diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..fa90d0bf2ea6adf1f8f60a7bddfb79c3603e8fca --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +# Folders +__pycache__/ +build/ +*.egg-info +.idea/ + + +# Files +*.weights +*.pth +*.pt +*.t7 +*.mp4 +*.avi +*.so +*.txt diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000000000000000000000000000000000..bbbb4fe6e636673a88a877b68447290305c92a7e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "thirdparty/fast-reid"] + path = thirdparty/fast-reid + url = https://github.com/JDAI-CV/fast-reid.git +[submodule "thirdparty/mmdetection"] + path = thirdparty/mmdetection + url = https://github.com/open-mmlab/mmdetection.git diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..92a1ed5dc27676f33e306463d532e4969fbc42ae --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Ziqiang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..027b5e0d6bbe2f05a224feeeac5d0eb66fb30f87 --- /dev/null +++ b/README.md @@ -0,0 +1,235 @@ +# Deep Sort with PyTorch + +![](demo/demo.gif) + +## Update(1-1-2020) +Changes +- fix bugs +- refactor code +- accerate detection by adding nms on gpu + +## Update(07-22) +Changes +- bug fix (Thanks @JieChen91 and @yingsen1 for bug reporting). +- using batch for feature extracting for each frame, which lead to a small speed up. +- code improvement. + +Futher improvement direction +- Train detector on specific dataset rather than the official one. +- Retrain REID model on pedestrain dataset for better performance. +- Replace YOLOv3 detector with advanced ones. + +## Update(23-05-2024) + +### tracking + +- Added resnet network to the appearance feature extraction network in the deep folder + +- Fixed the NMS bug in the `preprocessing.py` and also fixed covariance calculation bug in the `kalmen_filter.py` in the sort folder + +### detecting + +- Added YOLOv5 detector, aligned interface, and added YOLOv5 related yaml configuration files. Codes references this repo: [YOLOv5-v6.1](https://github.com/ultralytics/yolov5/tree/v6.1). + +- The `train.py`, `val.py` and `detect.py` in the original YOLOv5 were deleted. This repo only need **yolov5x.pt**. + +### deepsort + +- Added tracking target category, which can display both category and tracking ID simultaneously. + +## Update(28-05-2024) + +### segmentation + +* Added Mask RCNN instance segmentation model. Codes references this repo: [mask_rcnn](https://github.com/WZMIAOMIAO/deep-learning-for-image-processing/tree/master/pytorch_object_detection/mask_rcnn). Visual result saved in `demo/demo2.gif`. +* Similar to YOLOv5, `train.py`, `validation.py` and `predict.py` were deleted. This repo only need **maskrcnn_resnet50_fpn_coco.pth**. + +### deepsort + +- Added tracking target mask, which can display both category, tracking ID and target mask simultaneously. + +## latest Update(09-06-2024) + +### feature extraction network + +* Using `nn.parallel.DistributedDataParallel` in PyTorch to support multiple GPUs training. +* Added [GETTING_STARTED.md](deep_sort/deep/GETTING_STARTED.md) for better using `train.py` and `train_multiGPU.py`. + +Updated `README.md` for previously updated content(#Update(23-05-2024) and #Update(28-05-2024)). + +**Any contributions to this repository is welcome!** + + +## Introduction +This is an implement of MOT tracking algorithm deep sort. Deep sort is basicly the same with sort but added a CNN model to extract features in image of human part bounded by a detector. This CNN model is indeed a RE-ID model and the detector used in [PAPER](https://arxiv.org/abs/1703.07402) is FasterRCNN , and the original source code is [HERE](https://github.com/nwojke/deep_sort). +However in original code, the CNN model is implemented with tensorflow, which I'm not familier with. SO I re-implemented the CNN feature extraction model with PyTorch, and changed the CNN model a little bit. Also, I use **YOLOv3** to generate bboxes instead of FasterRCNN. + +## Dependencies +- python 3 **(python2 not sure)** +- numpy +- scipy +- opencv-python +- sklearn +- torch >= 1.9 +- torchvision >= 0.13 +- pillow +- vizer +- edict +- matplotlib +- pycocotools +- tqdm + +## Quick Start +0. Check all dependencies installed +```bash +pip install -r requirements.txt +``` +for user in china, you can specify pypi source to accelerate install like: +```bash +pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple +``` + +1. Clone this repository +```bash +git clone git@github.com:ZQPei/deep_sort_pytorch.git +``` + +2. Download detector parameters +```bash +# if you use YOLOv3 as detector in this repo +cd detector/YOLOv3/weight/ +wget https://pjreddie.com/media/files/yolov3.weights +wget https://pjreddie.com/media/files/yolov3-tiny.weights +cd ../../../ + +# if you use YOLOv5 as detector in this repo +cd detector/YOLOv5 +wget https://github.com/ultralytics/yolov5/releases/download/v6.1/yolov5s.pt +or +wget https://github.com/ultralytics/yolov5/releases/download/v6.1/yolov5m.pt +cd ../../ + +# if you use Mask RCNN as detector in this repo +cd detector/Mask_RCNN/save_weights +wget https://download.pytorch.org/models/maskrcnn_resnet50_fpn_coco-bf2d0c1e.pth +cd ../../../ +``` + +3. Download deepsort feature extraction networks weight +```bash +# if you use original model in PAPER +cd deep_sort/deep/checkpoint +# download ckpt.t7 from +https://drive.google.com/drive/folders/1xhG0kRH1EX5B9_Iz8gQJb7UNnn_riXi6 to this folder +cd ../../../ + +# if you use resnet18 in this repo +cd deep_sort/deep/checkpoint +wget https://download.pytorch.org/models/resnet18-5c106cde.pth +cd ../../../ +``` + +4. **(Optional)** Compile nms module if you use YOLOv3 as detetor in this repo +```bash +cd detector/YOLOv3/nms +sh build.sh +cd ../../.. +``` + +Notice: +If compiling failed, the simplist way is to **Upgrade your pytorch >= 1.1 and torchvision >= 0.3" and you can avoid the troublesome compiling problems which are most likely caused by either `gcc version too low` or `libraries missing`. + +5. **(Optional)** Prepare third party submodules + +[fast-reid](https://github.com/JDAI-CV/fast-reid) + +This library supports bagtricks, AGW and other mainstream ReID methods through providing an fast-reid adapter. + +to prepare our bundled fast-reid, then follow instructions in its README to install it. + +Please refer to `configs/fastreid.yaml` for a sample of using fast-reid. See [Model Zoo](https://github.com/JDAI-CV/fast-reid/blob/master/docs/MODEL_ZOO.md) for available methods and trained models. + +[MMDetection](https://github.com/open-mmlab/mmdetection) + +This library supports Faster R-CNN and other mainstream detection methods through providing an MMDetection adapter. + +to prepare our bundled MMDetection, then follow instructions in its README to install it. + +Please refer to `configs/mmdet.yaml` for a sample of using MMDetection. See [Model Zoo](https://github.com/open-mmlab/mmdetection/blob/master/docs/model_zoo.md) for available methods and trained models. + +Run + +``` +git submodule update --init --recursive +``` + + +6. Run demo +```bash +usage: deepsort.py [-h] + [--fastreid] + [--config_fastreid CONFIG_FASTREID] + [--mmdet] + [--config_mmdetection CONFIG_MMDETECTION] + [--config_detection CONFIG_DETECTION] + [--config_deepsort CONFIG_DEEPSORT] [--display] + [--frame_interval FRAME_INTERVAL] + [--display_width DISPLAY_WIDTH] + [--display_height DISPLAY_HEIGHT] [--save_path SAVE_PATH] + [--cpu] [--camera CAM] + VIDEO_PATH + +# yolov3 + deepsort +python deepsort.py [VIDEO_PATH] --config_detection ./configs/yolov3.yaml + +# yolov3_tiny + deepsort +python deepsort.py [VIDEO_PATH] --config_detection ./configs/yolov3_tiny.yaml + +# yolov3 + deepsort on webcam +python3 deepsort.py /dev/video0 --camera 0 + +# yolov3_tiny + deepsort on webcam +python3 deepsort.py /dev/video0 --config_detection ./configs/yolov3_tiny.yaml --camera 0 + +# yolov5s + deepsort +python deepsort.py [VIDEO_PATH] --config_detection ./configs/yolov5s.yaml + +# yolov5m + deepsort +python deepsort.py [VIDEO_PATH] --config_detection ./configs/yolov5m.yaml + +# mask_rcnn + deepsort +python deepsort.py [VIDEO_PATH] --config_detection ./configs/mask_rcnn.yaml --segment + +# fast-reid + deepsort +python deepsort.py [VIDEO_PATH] --fastreid [--config_fastreid ./configs/fastreid.yaml] + +# MMDetection + deepsort +python deepsort.py [VIDEO_PATH] --mmdet [--config_mmdetection ./configs/mmdet.yaml] +``` +Use `--display` to enable display image per frame. +Results will be saved to `./output/results.avi` and `./output/results.txt`. + +All files above can also be accessed from BaiduDisk! +linker:[BaiduDisk](https://pan.baidu.com/s/1YJ1iPpdFTlUyLFoonYvozg) +passwd:fbuw + +## Training the RE-ID model +Check [GETTING_STARTED.md](deep_sort/deep/GETTING_STARTED.md) to start training progress using standard benchmark or **customized dataset**. + +## Demo videos and images +[demo.avi](https://drive.google.com/drive/folders/1xhG0kRH1EX5B9_Iz8gQJb7UNnn_riXi6) +[demo2.avi](https://drive.google.com/drive/folders/1xhG0kRH1EX5B9_Iz8gQJb7UNnn_riXi6) + +![1.jpg](demo/1.jpg) +![2.jpg](demo/2.jpg) + + +## References +- paper: [Simple Online and Realtime Tracking with a Deep Association Metric](https://arxiv.org/abs/1703.07402) +- code: [nwojke/deep_sort](https://github.com/nwojke/deep_sort) +- paper: [YOLOv3: An Incremental Improvement](https://pjreddie.com/media/files/papers/YOLOv3.pdf) +- code: [Joseph Redmon/yolov3](https://pjreddie.com/darknet/yolo/) +- paper: [Mask R-CNN](https://arxiv.org/pdf/1703.06870) +- code: [WZMIAOMIAO/Mask R-CNN](https://github.com/WZMIAOMIAO/deep-learning-for-image-processing/tree/master/pytorch_object_detection/mask_rcnn) +- paper: [YOLOv5](https://github.com/ultralytics/yolov5) +- code: [ultralytics/yolov5](https://github.com/ultralytics/yolov5/tree/v6.1) diff --git a/coco_classes.json b/coco_classes.json new file mode 100644 index 0000000000000000000000000000000000000000..21bdd48ca18a46d790b271711be449039813900a --- /dev/null +++ b/coco_classes.json @@ -0,0 +1,82 @@ +{ + "0": "person", + "1": "bicycle", + "10": "fire hydrant", + "11": "stop sign", + "12": "parking meter", + "13": "bench", + "14": "bird", + "15": "cat", + "16": "dog", + "17": "horse", + "18": "sheep", + "19": "cow", + "2": "car", + "20": "elephant", + "21": "bear", + "22": "zebra", + "23": "giraffe", + "24": "backpack", + "25": "umbrella", + "26": "handbag", + "27": "tie", + "28": "suitcase", + "29": "frisbee", + "3": "motorcycle", + "30": "skis", + "31": "snowboard", + "32": "sports ball", + "33": "kite", + "34": "baseball bat", + "35": "baseball glove", + "36": "skateboard", + "37": "surfboard", + "38": "tennis racket", + "39": "bottle", + "4": "airplane", + "40": "wine glass", + "41": "cup", + "42": "fork", + "43": "knife", + "44": "spoon", + "45": "bowl", + "46": "banana", + "47": "apple", + "48": "sandwich", + "49": "orange", + "5": "bus", + "50": "broccoli", + "51": "carrot", + "52": "hot dog", + "53": "pizza", + "54": "donut", + "55": "cake", + "56": "chair", + "57": "couch", + "58": "potted plant", + "59": "bed", + "6": "train", + "60": "dining table", + "61": "toilet", + "62": "tv", + "63": "laptop", + "64": "mouse", + "65": "remote", + "66": "keyboard", + "67": "cell phone", + "68": "microwave", + "69": "oven", + "7": "truck", + "70": "toaster", + "71": "sink", + "72": "refrigerator", + "73": "book", + "74": "clock", + "75": "vase", + "76": "scissors", + "77": "teddy bear", + "78": "hair drier", + "79": "toothbrush", + "8": "boat", + "9": "traffic light" +} \ No newline at end of file diff --git a/configs/deep_sort.yaml b/configs/deep_sort.yaml new file mode 100644 index 0000000000000000000000000000000000000000..11458090bbbc63d1dadfa690429f4cda8254a055 --- /dev/null +++ b/configs/deep_sort.yaml @@ -0,0 +1,10 @@ +DEEPSORT: + REID_CKPT: "./deep_sort/deep/checkpoint/ckpt.t7" + MAX_DIST: 0.2 + MIN_CONFIDENCE: 0.5 + NMS_MAX_OVERLAP: 0.5 + MAX_IOU_DISTANCE: 0.7 + MAX_AGE: 70 + N_INIT: 3 + NN_BUDGET: 100 + \ No newline at end of file diff --git a/configs/fastreid.yaml b/configs/fastreid.yaml new file mode 100644 index 0000000000000000000000000000000000000000..60b37f7102e59b8e367bfbcaf26f643e4f92fdfd --- /dev/null +++ b/configs/fastreid.yaml @@ -0,0 +1,3 @@ +FASTREID: + CFG: "thirdparty/fast-reid/configs/Market1501/bagtricks_R50.yml" + CHECKPOINT: "deep_sort/deep/checkpoint/market_bot_R50.pth" \ No newline at end of file diff --git a/configs/mask_rcnn.yaml b/configs/mask_rcnn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cb3c736db213231165877aa98800898041bb09e3 --- /dev/null +++ b/configs/mask_rcnn.yaml @@ -0,0 +1,6 @@ +MASKRCNN: + LABEL: "./coco_classes.json" + WEIGHT: "./detector/Mask_RCNN/save_weights/maskrcnn_resnet50_fpn_coco.pth" + + NUM_CLASSES: 90 + BOX_THRESH: 0.5 \ No newline at end of file diff --git a/configs/mmdet.yaml b/configs/mmdet.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e1b7e5882e71f0f940e1026b208b0f300b9fd517 --- /dev/null +++ b/configs/mmdet.yaml @@ -0,0 +1,5 @@ +MMDET: + CFG: "thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py" + CHECKPOINT: "detector/MMDet/weight/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth" + + SCORE_THRESH: 0.5 \ No newline at end of file diff --git a/configs/yolov3.yaml b/configs/yolov3.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a08476ae50168812ce30391520041fe1e4398387 --- /dev/null +++ b/configs/yolov3.yaml @@ -0,0 +1,7 @@ +YOLOV3: + CFG: "./detector/YOLOv3/cfg/yolo_v3.cfg" + WEIGHT: "./detector/YOLOv3/weight/yolov3.weights" + CLASS_NAMES: "./detector/YOLOv3/cfg/coco.names" + + SCORE_THRESH: 0.5 + NMS_THRESH: 0.4 diff --git a/configs/yolov3_tiny.yaml b/configs/yolov3_tiny.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1261e6859ad70072c561c6969886df536812361b --- /dev/null +++ b/configs/yolov3_tiny.yaml @@ -0,0 +1,7 @@ +YOLOV3: + CFG: "./detector/YOLOv3/cfg/yolov3-tiny.cfg" + WEIGHT: "./detector/YOLOv3/weight/yolov3-tiny.weights" + CLASS_NAMES: "./detector/YOLOv3/cfg/coco.names" + + SCORE_THRESH: 0.5 + NMS_THRESH: 0.4 \ No newline at end of file diff --git a/configs/yolov5l.yaml b/configs/yolov5l.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b5b08457a9d42f7d6fe69e0642f829ccbf0275a6 --- /dev/null +++ b/configs/yolov5l.yaml @@ -0,0 +1,9 @@ +YOLOV5: + CFG: "./detector/YOLOv5/models/yolov5l.yaml" + WEIGHT: "./detector/YOLOv5/yolov5l.pt" + DATA: './detector/YOLOv5/data/coco128.yaml' + + IMGSZ: [640, 640] + SCORE_THRESH: 0.25 + NMS_THRESH: 0.45 + MAX_DET: 100 diff --git a/configs/yolov5m.yaml b/configs/yolov5m.yaml new file mode 100644 index 0000000000000000000000000000000000000000..837a33d012c0b2d27392809a7a12f01d4f4318f8 --- /dev/null +++ b/configs/yolov5m.yaml @@ -0,0 +1,9 @@ +YOLOV5: + CFG: "./detector/YOLOv5/models/yolov5m.yaml" + WEIGHT: "./detector/YOLOv5/yolov5m.pt" + DATA: './detector/YOLOv5/data/coco128.yaml' + + IMGSZ: [640, 640] + SCORE_THRESH: 0.25 + NMS_THRESH: 0.45 + MAX_DET: 100 diff --git a/configs/yolov5n.yaml b/configs/yolov5n.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0e80fedcb640ff2b74f306301bc915836feb4790 --- /dev/null +++ b/configs/yolov5n.yaml @@ -0,0 +1,9 @@ +YOLOV5: + CFG: "./detector/YOLOv5/models/yolov5n.yaml" + WEIGHT: "./detector/YOLOv5/yolov5n.pt" + DATA: './detector/YOLOv5/data/coco128.yaml' + + IMGSZ: [640, 640] + SCORE_THRESH: 0.25 + NMS_THRESH: 0.45 + MAX_DET: 100 diff --git a/configs/yolov5s.yaml b/configs/yolov5s.yaml new file mode 100644 index 0000000000000000000000000000000000000000..97c0cfa34f97c3127e1be4522fca948067eb092b --- /dev/null +++ b/configs/yolov5s.yaml @@ -0,0 +1,9 @@ +YOLOV5: + CFG: "./detector/YOLOv5/models/yolov5s.yaml" + WEIGHT: "./detector/YOLOv5/yolov5s.pt" + DATA: './detector/YOLOv5/data/coco128.yaml' + + IMGSZ: [640, 640] + SCORE_THRESH: 0.25 + NMS_THRESH: 0.45 + MAX_DET: 100 diff --git a/configs/yolov5x.yaml b/configs/yolov5x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6728b6034f9abf2b8e9a4ae2a39e64935aeb8c23 --- /dev/null +++ b/configs/yolov5x.yaml @@ -0,0 +1,9 @@ +YOLOV5: + CFG: "./detector/YOLOv5/models/yolov5x.yaml" + WEIGHT: "./detector/YOLOv5/yolov5x.pt" + DATA: './detector/YOLOv5/data/coco128.yaml' + + IMGSZ: [640, 640] + SCORE_THRESH: 0.25 + NMS_THRESH: 0.45 + MAX_DET: 100 diff --git a/deep_sort/README.md b/deep_sort/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e89c9b3ea08691210046fbb9184bf8e44e88f29e --- /dev/null +++ b/deep_sort/README.md @@ -0,0 +1,3 @@ +# Deep Sort + +This is the implemention of deep sort with pytorch. \ No newline at end of file diff --git a/deep_sort/__init__.py b/deep_sort/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..be3976f44ff121e50284ace021fee8f4c082649b --- /dev/null +++ b/deep_sort/__init__.py @@ -0,0 +1,19 @@ +from .deep_sort import DeepSort + +__all__ = ['DeepSort', 'build_tracker'] + + +def build_tracker(cfg, use_cuda): + if cfg.USE_FASTREID: + return DeepSort(model_path=cfg.FASTREID.CHECKPOINT, model_config=cfg.FASTREID.CFG, + max_dist=cfg.DEEPSORT.MAX_DIST, min_confidence=cfg.DEEPSORT.MIN_CONFIDENCE, + nms_max_overlap=cfg.DEEPSORT.NMS_MAX_OVERLAP, max_iou_distance=cfg.DEEPSORT.MAX_IOU_DISTANCE, + max_age=cfg.DEEPSORT.MAX_AGE, n_init=cfg.DEEPSORT.N_INIT, nn_budget=cfg.DEEPSORT.NN_BUDGET, + use_cuda=use_cuda) + + else: + return DeepSort(model_path=cfg.DEEPSORT.REID_CKPT, + max_dist=cfg.DEEPSORT.MAX_DIST, min_confidence=cfg.DEEPSORT.MIN_CONFIDENCE, + nms_max_overlap=cfg.DEEPSORT.NMS_MAX_OVERLAP, max_iou_distance=cfg.DEEPSORT.MAX_IOU_DISTANCE, + max_age=cfg.DEEPSORT.MAX_AGE, n_init=cfg.DEEPSORT.N_INIT, nn_budget=cfg.DEEPSORT.NN_BUDGET, + use_cuda=use_cuda) diff --git a/deep_sort/deep/GETTING_STARTED.md b/deep_sort/deep/GETTING_STARTED.md new file mode 100644 index 0000000000000000000000000000000000000000..b55ef24502f64a9c404527c485b2b47757e0230e --- /dev/null +++ b/deep_sort/deep/GETTING_STARTED.md @@ -0,0 +1,82 @@ +In deepsort algorithm, appearance feature extraction network used to extract features from **image_crops** for matching purpose.The original model used in paper is in `model.py`, and its parameter here [ckpt.t7](https://drive.google.com/drive/folders/1xhG0kRH1EX5B9_Iz8gQJb7UNnn_riXi6). This repository also provides a `resnet.py` script and its pre-training weights on Imagenet here. + +``` +# resnet18 +https://download.pytorch.org/models/resnet18-5c106cde.pth +# resnet34 +https://download.pytorch.org/models/resnet34-333f7ec4.pth +# resnet50 +https://download.pytorch.org/models/resnet50-19c8e357.pth +# resnext50_32x4d +https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth +``` + +## Dataset PrePare + +To train the model, first you need download [Market1501](http://www.liangzheng.com.cn/Project/project_reid.html) dataset or [Mars](http://www.liangzheng.com.cn/Project/project_mars.html) dataset. + +If you want to train on your **own dataset**, assuming you have already downloaded the dataset.The dataset should be arranged in the following way. + +``` + ├── dataset_root: The root dir of the dataset. + ├── class1: Category 1 is located in the folder dir. + ├── xxx1.jpg: Image belonging to category 1. + ├── xxx2.jpg: Image belonging to category 1. + ├── class2: Category 2 is located in the folder dir. + ├── xxx3.jpg: Image belonging to category 2. + ├── xxx4.jpg: Image belonging to category 2. + ├── class3: Category 3 is located in the folder dir. + ... + ... +``` + +## Training the RE-ID model + +Assuming you have already prepare the dataset. Then you can use the following command to start your training progress. + +#### training on a single GPU + +```python +usage: train.py [--data-dir] + [--epochs] + [--batch_size] + [--lr] + [--lrf] + [--weights] + [--freeze-layers] + [--gpu_id] + +# default use cuda:0, use Net in `model.py` +python train.py --data-dir [dataset/root/path] --weights [(optional)pre-train/weight/path] +# you can use `--freeze-layers` option to freeze full convolutional layer parameters except fc layers parameters +python train.py --data-dir [dataset/root/path] --weights [(optional)pre-train/weight/path] --freeze-layers +``` + +#### training on multiple GPU + +```python +usage: train_multiGPU.py [--data-dir] + [--epochs] + [--batch_size] + [--lr] + [--lrf] + [--syncBN] + [--weights] + [--freeze-layers] + # not change the following parameters, the system will automatically assignment + [--device] + [--world_size] + [--dist_url] + +# default use cuda:0, cuda:1, cuda:2, cuda:3, use resnet18 in `resnet.py` +CUDA_VISIBLE_DEVICES=0,1,2,3 torchrun --nproc_per_node=4 train_multiGPU.py --data-dir [dataset/root/path] --weights [(optional)pre-train/weight/path] +# you can use `--freeze-layers` option to freeze full convolutional layer parameters except fc layers parameters +CUDA_VISIBLE_DEVICES=0,1,2,3 torchrun --nproc_per_node=4 train_multiGPU.py --data-dir [dataset/root/path] --weights [(optional)pre-train/weight/path] --freeze-layers +``` + +An example of training progress is as follows: + +![train.jpg](./train.jpg) + +The last, you can evaluate it using [test.py](deep_sort/deep/test.py) and [evaluate.py](deep_sort/deep/evalute.py). + diff --git a/deep_sort/deep/__init__.py b/deep_sort/deep/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deep_sort/deep/checkpoint/.gitkeep b/deep_sort/deep/checkpoint/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deep_sort/deep/datasets.py b/deep_sort/deep/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..9e83b37a66be7ed9a0d9a5e7ec9499ce79b1c53c --- /dev/null +++ b/deep_sort/deep/datasets.py @@ -0,0 +1,92 @@ +import json +import os +import random + +import cv2 +from PIL import Image +import torch +from torch.utils.data import Dataset +import matplotlib.pyplot as plt + + +class ClsDataset(Dataset): + def __init__(self, images_path, images_labels, transform=None): + self.images_path = images_path + self.images_labels = images_labels + self.transform = transform + + def __len__(self): + return len(self.images_path) + + def __getitem__(self, idx): + img = cv2.imread(self.images_path[idx]) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + img = Image.fromarray(img) + label = self.images_labels[idx] + + if self.transform is not None: + img = self.transform(img) + return img, label + + @staticmethod + def collate_fn(batch): + images, labels = tuple(zip(*batch)) + images = torch.stack(images, dim=0) + labels = torch.as_tensor(labels) + return images, labels + + +def read_split_data(root, valid_rate=0.2): + assert os.path.exists(root), 'dataset root: {} does not exist.'.format(root) + + class_names = [cls for cls in os.listdir(root) if os.path.isdir(os.path.join(root, cls))] + class_names.sort() + + class_indices = {name: i for i, name in enumerate(class_names)} + json_str = json.dumps({v: k for k, v in class_indices.items()}, indent=4) + with open('class_indices.json', 'w') as f: + f.write(json_str) + + train_images_path = [] + train_labels = [] + val_images_path = [] + val_labels = [] + per_class_num = [] + + supported = ['.jpg', '.JPG', '.png', '.PNG'] + for cls in class_names: + cls_path = os.path.join(root, cls) + images_path = [os.path.join(cls_path, i) for i in os.listdir(cls_path) + if os.path.splitext(i)[-1] in supported] + images_label = class_indices[cls] + per_class_num.append(len(images_path)) + + val_path = random.sample(images_path, int(len(images_path) * valid_rate)) + for img_path in images_path: + if img_path in val_path: + val_images_path.append(img_path) + val_labels.append(images_label) + else: + train_images_path.append(img_path) + train_labels.append(images_label) + + print("{} images were found in the dataset.".format(sum(per_class_num))) + print("{} images for training.".format(len(train_images_path))) + print("{} images for validation.".format(len(val_images_path))) + + assert len(train_images_path) > 0, "number of training images must greater than zero" + assert len(val_images_path) > 0, "number of validation images must greater than zero" + + plot_distribution = False + if plot_distribution: + plt.bar(range(len(class_names)), per_class_num, align='center') + plt.xticks(range(len(class_names)), class_names) + + for i, v in enumerate(per_class_num): + plt.text(x=i, y=v + 5, s=str(v), ha='center') + + plt.xlabel('classes') + plt.ylabel('numbers') + plt.title('the distribution of dataset') + plt.show() + return [train_images_path, train_labels], [val_images_path, val_labels], len(class_names) diff --git a/deep_sort/deep/evaluate.py b/deep_sort/deep/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..85eaa6fc35a61edf966fd4133ea3ead5735111ff --- /dev/null +++ b/deep_sort/deep/evaluate.py @@ -0,0 +1,15 @@ +import torch + +features = torch.load("features.pth") +qf = features["qf"] +ql = features["ql"] +gf = features["gf"] +gl = features["gl"] + +scores = qf.mm(gf.t()) +res = scores.topk(5, dim=1)[1][:,0] +top1correct = gl[res].eq(ql).sum().item() + +print("Acc top1:{:.3f}".format(top1correct/ql.size(0))) + + diff --git a/deep_sort/deep/feature_extractor.py b/deep_sort/deep/feature_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..b01cdf494310dbad0fdbc17fb90a8d9e22bc7b1a --- /dev/null +++ b/deep_sort/deep/feature_extractor.py @@ -0,0 +1,93 @@ +import torch +import torchvision.transforms as transforms +import numpy as np +import cv2 +import logging + +from .model import Net +from .resnet import resnet18 +# from fastreid.config import get_cfg +# from fastreid.engine import DefaultTrainer +# from fastreid.utils.checkpoint import Checkpointer + + +class Extractor(object): + def __init__(self, model_path, use_cuda=True): + self.net = Net(reid=True) + # self.net = resnet18(reid=True) + self.device = "cuda" if torch.cuda.is_available() and use_cuda else "cpu" + state_dict = torch.load(model_path, map_location=lambda storage, loc: storage) + self.net.load_state_dict(state_dict if 'net_dict' not in state_dict else state_dict['net_dict'], strict=False) + logger = logging.getLogger("root.tracker") + logger.info("Loading weights from {}... Done!".format(model_path)) + self.net.to(self.device) + self.size = (64, 128) + self.norm = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + + def _preprocess(self, im_crops): + """ + TODO: + 1. to float with scale from 0 to 1 + 2. resize to (64, 128) as Market1501 dataset did + 3. concatenate to a numpy array + 3. to torch Tensor + 4. normalize + """ + + def _resize(im, size): + return cv2.resize(im.astype(np.float32) / 255., size) + + im_batch = torch.cat([self.norm(_resize(im, self.size)).unsqueeze(0) for im in im_crops], dim=0).float() + return im_batch + + def __call__(self, im_crops): + im_batch = self._preprocess(im_crops) + with torch.no_grad(): + im_batch = im_batch.to(self.device) + features = self.net(im_batch) + return features.cpu().numpy() + + +class FastReIDExtractor(object): + def __init__(self, model_config, model_path, use_cuda=True): + cfg = get_cfg() + cfg.merge_from_file(model_config) + cfg.MODEL.BACKBONE.PRETRAIN = False + self.net = DefaultTrainer.build_model(cfg) + self.device = "cuda" if torch.cuda.is_available() and use_cuda else "cpu" + + Checkpointer(self.net).load(model_path) + logger = logging.getLogger("root.tracker") + logger.info("Loading weights from {}... Done!".format(model_path)) + self.net.to(self.device) + self.net.eval() + height, width = cfg.INPUT.SIZE_TEST + self.size = (width, height) + self.norm = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + + def _preprocess(self, im_crops): + def _resize(im, size): + return cv2.resize(im.astype(np.float32) / 255., size) + + im_batch = torch.cat([self.norm(_resize(im, self.size)).unsqueeze(0) for im in im_crops], dim=0).float() + return im_batch + + def __call__(self, im_crops): + im_batch = self._preprocess(im_crops) + with torch.no_grad(): + im_batch = im_batch.to(self.device) + features = self.net(im_batch) + return features.cpu().numpy() + + +if __name__ == '__main__': + img = cv2.imread("demo.jpg")[:, :, (2, 1, 0)] + extr = Extractor("checkpoint/ckpt.t7") + feature = extr(img) + print(feature.shape) diff --git a/deep_sort/deep/model.py b/deep_sort/deep/model.py new file mode 100644 index 0000000000000000000000000000000000000000..a305dd3be080bff4a074a256590d1f8ff41cbef3 --- /dev/null +++ b/deep_sort/deep/model.py @@ -0,0 +1,105 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class BasicBlock(nn.Module): + def __init__(self, c_in, c_out, is_downsample=False): + super(BasicBlock, self).__init__() + self.is_downsample = is_downsample + if is_downsample: + self.conv1 = nn.Conv2d(c_in, c_out, 3, stride=2, padding=1, bias=False) + else: + self.conv1 = nn.Conv2d(c_in, c_out, 3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(c_out) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(c_out, c_out, 3, stride=1, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(c_out) + if is_downsample: + self.downsample = nn.Sequential( + nn.Conv2d(c_in, c_out, 1, stride=2, bias=False), + nn.BatchNorm2d(c_out) + ) + elif c_in != c_out: + self.downsample = nn.Sequential( + nn.Conv2d(c_in, c_out, 1, stride=1, bias=False), + nn.BatchNorm2d(c_out) + ) + self.is_downsample = True + + def forward(self, x): + y = self.conv1(x) + y = self.bn1(y) + y = self.relu(y) + y = self.conv2(y) + y = self.bn2(y) + if self.is_downsample: + x = self.downsample(x) + return F.relu(x.add(y), True) + + +def make_layers(c_in, c_out, repeat_times, is_downsample=False): + blocks = [] + for i in range(repeat_times): + if i == 0: + blocks += [BasicBlock(c_in, c_out, is_downsample=is_downsample), ] + else: + blocks += [BasicBlock(c_out, c_out), ] + return nn.Sequential(*blocks) + + +class Net(nn.Module): + def __init__(self, num_classes=751, reid=False): + super(Net, self).__init__() + # 3 128 64 + self.conv = nn.Sequential( + nn.Conv2d(3, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + # nn.Conv2d(32,32,3,stride=1,padding=1), + # nn.BatchNorm2d(32), + # nn.ReLU(inplace=True), + nn.MaxPool2d(3, 2, padding=1), + ) + # 32 64 32 + self.layer1 = make_layers(64, 64, 2, False) + # 32 64 32 + self.layer2 = make_layers(64, 128, 2, True) + # 64 32 16 + self.layer3 = make_layers(128, 256, 2, True) + # 128 16 8 + self.layer4 = make_layers(256, 512, 2, True) + # 256 8 4 + self.avgpool = nn.AdaptiveAvgPool2d(1) + # 256 1 1 + self.reid = reid + self.classifier = nn.Sequential( + nn.Linear(512, 256), + nn.BatchNorm1d(256), + nn.ReLU(inplace=True), + nn.Dropout(), + nn.Linear(256, num_classes), + ) + + def forward(self, x): + x = self.conv(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.avgpool(x) + x = x.view(x.size(0), -1) + # B x 128 + if self.reid: + x = x.div(x.norm(p=2, dim=1, keepdim=True)) + return x + # classifier + x = self.classifier(x) + return x + + +if __name__ == '__main__': + net = Net() + x = torch.randn(4, 3, 128, 64) + y = net(x) + diff --git a/deep_sort/deep/multi_train_utils/distributed_utils.py b/deep_sort/deep/multi_train_utils/distributed_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4cfc813bb5e3bee0d075dcfb7b1f7e819edd26b7 --- /dev/null +++ b/deep_sort/deep/multi_train_utils/distributed_utils.py @@ -0,0 +1,67 @@ +import os + +import torch +import torch.distributed as dist + + +def init_distributed_mode(args): + if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ['RANK']) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + else: + print("Not using distributed mode") + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + print('| distributed init (rank {}): {}'.format(args.rank, args.dist_url), flush=True) + dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + dist.barrier() + + +def cleanup(): + dist.destroy_process_group() + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def reduce_value(value, average=True): + world_size = get_world_size() + if world_size < 2: + return value + with torch.no_grad(): + dist.all_reduce(value) + if average: + value /= world_size + + return value diff --git a/deep_sort/deep/multi_train_utils/train_eval_utils.py b/deep_sort/deep/multi_train_utils/train_eval_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fdc073bd0c14a92ac3c3dc680d7dbed2c62197c7 --- /dev/null +++ b/deep_sort/deep/multi_train_utils/train_eval_utils.py @@ -0,0 +1,90 @@ +import sys + +from tqdm import tqdm +import torch + +from .distributed_utils import reduce_value, is_main_process + + +def load_model(state_dict, model_state_dict, model): + for k in state_dict: + if k in model_state_dict: + if state_dict[k].shape != model_state_dict[k].shape: + print('Skip loading parameter {}, required shape {}, ' \ + 'loaded shape {}.'.format( + k, model_state_dict[k].shape, state_dict[k].shape)) + state_dict[k] = model_state_dict[k] + else: + print('Drop parameter {}.'.format(k)) + for k in model_state_dict: + if not (k in state_dict): + print('No param {}.'.format(k)) + state_dict[k] = model_state_dict[k] + model.load_state_dict(state_dict, strict=False) + return model + + +def train_one_epoch(model, optimizer, data_loader, device, epoch): + model.train() + criterion = torch.nn.CrossEntropyLoss() + mean_loss = torch.zeros(1).to(device) + sum_num = torch.zeros(1).to(device) + optimizer.zero_grad() + + if is_main_process(): + data_loader = tqdm(data_loader, file=sys.stdout) + + for idx, (images, labels) in enumerate(data_loader): + # forward + images, labels = images.to(device), labels.to(device) + outputs = model(images) + loss = criterion(outputs, labels) + + # backward + loss.backward() + loss = reduce_value(loss, average=True) + mean_loss = (mean_loss * idx + loss.detach()) / (idx + 1) + pred = torch.max(outputs, dim=1)[1] + sum_num += torch.eq(pred, labels).sum() + + if is_main_process(): + data_loader.desc = '[epoch {}] mean loss {}'.format(epoch, mean_loss.item()) + + if not torch.isfinite(loss): + print('loss is infinite, ending training') + sys.exit(1) + + optimizer.step() + optimizer.zero_grad() + if device != torch.device('cpu'): + torch.cuda.synchronize(device) + sum_num = reduce_value(sum_num, average=False) + + return sum_num.item(), mean_loss.item() + + +@torch.no_grad() +def evaluate(model, data_loader, device): + model.eval() + criterion = torch.nn.CrossEntropyLoss() + test_loss = torch.zeros(1).to(device) + sum_num = torch.zeros(1).to(device) + if is_main_process(): + data_loader = tqdm(data_loader, file=sys.stdout) + + for idx, (inputs, labels) in enumerate(data_loader): + inputs, labels = inputs.to(device), labels.to(device) + outputs = model(inputs) + loss = criterion(outputs, labels) + loss = reduce_value(loss, average=True) + + test_loss = (test_loss * idx + loss.detach()) / (idx + 1) + pred = torch.max(outputs, dim=1)[1] + sum_num += torch.eq(pred, labels).sum() + + if device != torch.device('cpu'): + torch.cuda.synchronize(device) + + sum_num = reduce_value(sum_num, average=False) + + return sum_num.item(), test_loss.item() diff --git a/deep_sort/deep/resnet.py b/deep_sort/deep/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..6912b1349567f97f4f9efd581ab7aea571b55840 --- /dev/null +++ b/deep_sort/deep/resnet.py @@ -0,0 +1,173 @@ +import torch.nn as nn +import torch + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, in_channel, out_channel, stride=1, downsample=None, **kwargs): + super(BasicBlock, self).__init__() + self.conv1 = nn.Conv2d(in_channels=in_channel, out_channels=out_channel, kernel_size=3, + stride=stride, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(out_channel) + self.relu = nn.ReLU() + self.conv2 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel, kernel_size=3, + stride=1, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(out_channel) + self.downsample = downsample + + def forward(self, x): + identity = x + if self.downsample is not None: + identity = self.downsample(x) + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + out += identity + out = self.relu(out) + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, in_channel, out_channel, stride=1, downsample=None, + groups=1, width_per_group=64): + super(Bottleneck, self).__init__() + width = int(out_channel * (width_per_group / 64.)) * groups + + self.conv1 = nn.Conv2d(in_channels=in_channel, out_channels=width, kernel_size=1, + stride=1, bias=False) + self.bn1 = nn.BatchNorm2d(width) + self.conv2 = nn.Conv2d(in_channels=width, out_channels=width, kernel_size=3, + stride=stride, padding=1, bias=False, groups=groups) + self.bn2 = nn.BatchNorm2d(width) + self.conv3 = nn.Conv2d(in_channels=width, out_channels=out_channel * self.expansion, + kernel_size=1, stride=1, bias=False) + self.bn3 = nn.BatchNorm2d(out_channel * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + + def forward(self, x): + identity = x + if self.downsample is not None: + identity = self.downsample(x) + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + out += identity + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + + def __init__(self, block, blocks_num, reid=False, num_classes=1000, groups=1, width_per_group=64): + super(ResNet, self).__init__() + self.reid = reid + self.in_channel = 64 + + self.groups = groups + self.width_per_group = width_per_group + + self.conv1 = nn.Conv2d(3, self.in_channel, kernel_size=7, stride=2, + padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(self.in_channel) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layers(block, 64, blocks_num[0]) + self.layer2 = self._make_layers(block, 128, blocks_num[1], stride=2) + self.layer3 = self._make_layers(block, 256, blocks_num[2], stride=2) + # self.layer4 = self._make_layers(block, 512, blocks_num[3], stride=1) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(256 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layers(self, block, channel, block_num, stride=1): + downsample = None + if stride != 1 or self.in_channel != channel * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.in_channel, channel * block.expansion, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(channel * block.expansion) + ) + layers = [] + layers.append(block(self.in_channel, channel, downsample=downsample, stride=stride, + groups=self.groups, width_per_group=self.width_per_group)) + self.in_channel = channel * block.expansion + + for _ in range(1, block_num): + layers.append(block(self.in_channel, channel, groups=self.groups, width_per_group=self.width_per_group)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + # x = self.layer4(x) + x = self.avgpool(x) + x = torch.flatten(x, 1) + + # B x 512 + if self.reid: + x = x.div(x.norm(p=2, dim=1, keepdim=True)) + return x + # classifier + x = self.fc(x) + return x + + +def resnet18(num_classes=1000, reid=False): + # https://download.pytorch.org/models/resnet18-5c106cde.pth + return ResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes, reid=reid) + + +def resnet34(num_classes=1000, reid=False): + # https://download.pytorch.org/models/resnet34-333f7ec4.pth + return ResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes, reid=reid) + + +def resnet50(num_classes=1000, reid=False): + # https://download.pytorch.org/models/resnet50-19c8e357.pth + return ResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes, reid=reid) + + +def resnext50_32x4d(num_classes=1000, reid=False): + # https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth + groups = 32 + width_per_group = 4 + return ResNet(Bottleneck, [3, 4, 6, 3], reid=reid, + num_classes=num_classes, groups=groups, width_per_group=width_per_group) + + +if __name__ == '__main__': + net = resnet18(reid=True) + x = torch.randn(4, 3, 128, 64) + y = net(x) diff --git a/deep_sort/deep/test.py b/deep_sort/deep/test.py new file mode 100644 index 0000000000000000000000000000000000000000..d572472fd147d29b780859751de88f9eefa6d923 --- /dev/null +++ b/deep_sort/deep/test.py @@ -0,0 +1,77 @@ +import torch +import torch.backends.cudnn as cudnn +import torchvision + +import argparse +import os + +from model import Net + +parser = argparse.ArgumentParser(description="Train on market1501") +parser.add_argument("--data-dir", default='data', type=str) +parser.add_argument("--no-cuda", action="store_true") +parser.add_argument("--gpu-id", default=0, type=int) +args = parser.parse_args() + +# device +device = "cuda:{}".format(args.gpu_id) if torch.cuda.is_available() and not args.no_cuda else "cpu" +if torch.cuda.is_available() and not args.no_cuda: + cudnn.benchmark = True + +# data loader +root = args.data_dir +query_dir = os.path.join(root, "query") +gallery_dir = os.path.join(root, "gallery") +transform = torchvision.transforms.Compose([ + torchvision.transforms.Resize((128, 64)), + torchvision.transforms.ToTensor(), + torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) +]) +queryloader = torch.utils.data.DataLoader( + torchvision.datasets.ImageFolder(query_dir, transform=transform), + batch_size=64, shuffle=False +) +galleryloader = torch.utils.data.DataLoader( + torchvision.datas0ets.ImageFolder(gallery_dir, transform=transform), + batch_size=64, shuffle=False +) + +# net definition +net = Net(reid=True) +assert os.path.isfile("./checkpoint/ckpt.t7"), "Error: no checkpoint file found!" +print('Loading from checkpoint/ckpt.t7') +checkpoint = torch.load("./checkpoint/ckpt.t7") +net_dict = checkpoint['net_dict'] +net.load_state_dict(net_dict, strict=False) +net.eval() +net.to(device) + +# compute features +query_features = torch.tensor([]).float() +query_labels = torch.tensor([]).long() +gallery_features = torch.tensor([]).float() +gallery_labels = torch.tensor([]).long() + +with torch.no_grad(): + for idx, (inputs, labels) in enumerate(queryloader): + inputs = inputs.to(device) + features = net(inputs).cpu() + query_features = torch.cat((query_features, features), dim=0) + query_labels = torch.cat((query_labels, labels)) + + for idx, (inputs, labels) in enumerate(galleryloader): + inputs = inputs.to(device) + features = net(inputs).cpu() + gallery_features = torch.cat((gallery_features, features), dim=0) + gallery_labels = torch.cat((gallery_labels, labels)) + +gallery_labels -= 2 + +# save features +features = { + "qf": query_features, + "ql": query_labels, + "gf": gallery_features, + "gl": gallery_labels +} +torch.save(features, "features.pth") diff --git a/deep_sort/deep/train.jpg b/deep_sort/deep/train.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0dc5eae9b02bb1a2f757b46c153361de9c91ceda --- /dev/null +++ b/deep_sort/deep/train.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5ea8deb53c3cadc4917a253f06ac3d827843eedbb9ea8c4fb5780acd7e91850 +size 60349 diff --git a/deep_sort/deep/train.py b/deep_sort/deep/train.py new file mode 100644 index 0000000000000000000000000000000000000000..99220385fefcf76fc943b735887fad81e951f86e --- /dev/null +++ b/deep_sort/deep/train.py @@ -0,0 +1,151 @@ +import argparse +import os +import tempfile + +import math +import warnings +import matplotlib.pyplot as plt +import torch +import torchvision +from torch.optim import lr_scheduler + +from multi_train_utils.distributed_utils import init_distributed_mode, cleanup +from multi_train_utils.train_eval_utils import train_one_epoch, evaluate, load_model +import torch.distributed as dist +from datasets import ClsDataset, read_split_data + +from model import Net +from resnet import resnet18 + +# plot figure +x_epoch = [] +record = {'train_loss': [], 'train_err': [], 'test_loss': [], 'test_err': []} +fig = plt.figure() +ax0 = fig.add_subplot(121, title="loss") +ax1 = fig.add_subplot(122, title="top1_err") + + +def draw_curve(epoch, train_loss, train_err, test_loss, test_err): + global record + record['train_loss'].append(train_loss) + record['train_err'].append(train_err) + record['test_loss'].append(test_loss) + record['test_err'].append(test_err) + + x_epoch.append(epoch) + ax0.plot(x_epoch, record['train_loss'], 'bo-', label='train') + ax0.plot(x_epoch, record['test_loss'], 'ro-', label='val') + ax1.plot(x_epoch, record['train_err'], 'bo-', label='train') + ax1.plot(x_epoch, record['test_err'], 'ro-', label='val') + if epoch == 0: + ax0.legend() + ax1.legend() + fig.savefig("train.jpg") + + +def main(args): + batch_size = args.batch_size + device = 'cuda:{}'.format(args.gpu_id) if torch.cuda.is_available() else 'cpu' + + train_info, val_info, num_classes = read_split_data(args.data_dir, valid_rate=0.2) + train_images_path, train_labels = train_info + val_images_path, val_labels = val_info + + transform_train = torchvision.transforms.Compose([ + torchvision.transforms.RandomCrop((128, 64), padding=4), + torchvision.transforms.RandomHorizontalFlip(), + torchvision.transforms.ToTensor(), + torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + transform_val = torchvision.transforms.Compose([ + torchvision.transforms.Resize((128, 64)), + torchvision.transforms.ToTensor(), + torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + + train_dataset = ClsDataset( + images_path=train_images_path, + images_labels=train_labels, + transform=transform_train + ) + val_dataset = ClsDataset( + images_path=val_images_path, + images_labels=val_labels, + transform=transform_val + ) + + number_workers = min([os.cpu_count(), batch_size if batch_size > 1 else 0, 8]) + print('Using {} dataloader workers every process'.format(number_workers)) + + train_loader = torch.utils.data.DataLoader( + train_dataset, + batch_size=batch_size, + shuffle=True, + pin_memory=True, + num_workers=number_workers + ) + val_loader = torch.utils.data.DataLoader( + val_dataset, + batch_size=batch_size, + shuffle=False, + pin_memory=True, + num_workers=number_workers, + ) + + # net definition + start_epoch = 0 + net = Net(num_classes=num_classes) + if args.weights: + print('Loading from ', args.weights) + checkpoint = torch.load(args.weights, map_location='cpu') + net_dict = checkpoint if 'net_dict' not in checkpoint else checkpoint['net_dict'] + start_epoch = checkpoint['epoch'] if 'epoch' in checkpoint else start_epoch + net = load_model(net_dict, net.state_dict(), net) + + if args.freeze_layers: + for name, param in net.named_parameters(): + if 'classifier' not in name: + param.requires_grad = False + + net.to(device) + + # loss and optimizer + pg = [p for p in net.parameters() if p.requires_grad] + optimizer = torch.optim.SGD(pg, args.lr, momentum=0.9, weight_decay=5e-4) + + lr = lambda x: ((1 + math.cos(x * math.pi / args.epochs)) / 2) * (1 - args.lrf) + args.lrf + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lr) + for epoch in range(start_epoch, start_epoch + args.epochs): + train_positive, train_loss = train_one_epoch(net, optimizer, train_loader, device, epoch) + train_acc = train_positive / len(train_dataset) + scheduler.step() + + test_positive, test_loss = evaluate(net, val_loader, device) + test_acc = test_positive / len(val_dataset) + + print('[epoch {}] accuracy: {}'.format(epoch, test_acc)) + + state_dict = { + 'net_dict': net.state_dict(), + 'acc': test_acc, + 'epoch': epoch + } + torch.save(state_dict, './checkpoint/model_{}.pth'.format(epoch)) + draw_curve(epoch, train_loss, 1 - train_acc, test_loss, 1 - test_acc) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description="Train on market1501") + parser.add_argument("--data-dir", default='data', type=str) + parser.add_argument('--epochs', type=int, default=40) + parser.add_argument('--batch_size', type=int, default=32) + parser.add_argument("--lr", default=0.001, type=float) + parser.add_argument('--lrf', default=0.1, type=float) + + parser.add_argument('--weights', type=str, default='./checkpoint/resnet18.pth') + parser.add_argument('--freeze-layers', action='store_true') + + parser.add_argument('--gpu_id', default='0', help='gpu id') + args = parser.parse_args() + + main(args) diff --git a/deep_sort/deep/train_multiGPU.py b/deep_sort/deep/train_multiGPU.py new file mode 100644 index 0000000000000000000000000000000000000000..27d5ab7afe29cb3b17b2ce919ff8bf2aaeced6a7 --- /dev/null +++ b/deep_sort/deep/train_multiGPU.py @@ -0,0 +1,189 @@ +import argparse +import os +import tempfile + +import math +import warnings +import matplotlib.pyplot as plt +import torch +import torchvision +from torch.optim import lr_scheduler + +from multi_train_utils.distributed_utils import init_distributed_mode, cleanup +from multi_train_utils.train_eval_utils import train_one_epoch, evaluate, load_model +import torch.distributed as dist +from datasets import ClsDataset, read_split_data + +from resnet import resnet18 + + +# plot figure +x_epoch = [] +record = {'train_loss': [], 'train_err': [], 'test_loss': [], 'test_err': []} +fig = plt.figure() +ax0 = fig.add_subplot(121, title="loss") +ax1 = fig.add_subplot(122, title="top1_err") + + +def draw_curve(epoch, train_loss, train_err, test_loss, test_err): + global record + record['train_loss'].append(train_loss) + record['train_err'].append(train_err) + record['test_loss'].append(test_loss) + record['test_err'].append(test_err) + + x_epoch.append(epoch) + ax0.plot(x_epoch, record['train_loss'], 'bo-', label='train') + ax0.plot(x_epoch, record['test_loss'], 'ro-', label='val') + ax1.plot(x_epoch, record['train_err'], 'bo-', label='train') + ax1.plot(x_epoch, record['test_err'], 'ro-', label='val') + if epoch == 0: + ax0.legend() + ax1.legend() + fig.savefig("train.jpg") + + +def main(args): + init_distributed_mode(args) + + rank = args.rank + device = torch.device(args.device) + batch_size = args.batch_size + weights_path = args.weights + args.lr *= args.world_size + checkpoint_path = '' + + if rank == 0: + print(args) + if os.path.exists('./checkpoint') is False: + os.mkdir('./checkpoint') + + train_info, val_info, num_classes = read_split_data(args.data_dir, valid_rate=0.2) + train_images_path, train_labels = train_info + val_images_path, val_labels = val_info + + transform_train = torchvision.transforms.Compose([ + torchvision.transforms.RandomCrop((128, 64), padding=4), + torchvision.transforms.RandomHorizontalFlip(), + torchvision.transforms.ToTensor(), + torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + transform_val = torchvision.transforms.Compose([ + torchvision.transforms.Resize((128, 64)), + torchvision.transforms.ToTensor(), + torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + + train_dataset = ClsDataset( + images_path=train_images_path, + images_labels=train_labels, + transform=transform_train + ) + val_dataset = ClsDataset( + images_path=val_images_path, + images_labels=val_labels, + transform=transform_val + ) + train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset) + val_sampler = torch.utils.data.distributed.DistributedSampler(val_dataset) + + train_batch_sampler = torch.utils.data.BatchSampler(train_sampler, batch_size, drop_last=True) + + number_workers = min([os.cpu_count(), batch_size if batch_size > 1 else 0, 8]) + + if rank == 0: + print('Using {} dataloader workers every process'.format(number_workers)) + + train_loader = torch.utils.data.DataLoader( + train_dataset, + batch_sampler=train_batch_sampler, + pin_memory=True, + num_workers=number_workers + ) + val_loader = torch.utils.data.DataLoader( + val_dataset, + sampler=val_sampler, + batch_size=batch_size, + pin_memory=True, + num_workers=number_workers, + ) + + # net definition + start_epoch = 0 + net = resnet18(num_classes=num_classes) + if args.weights: + print('Loading from ', args.weights) + checkpoint = torch.load(args.weights, map_location='cpu') + net_dict = checkpoint if 'net_dict' not in checkpoint else checkpoint['net_dict'] + start_epoch = checkpoint['epoch'] if 'epoch' in checkpoint else start_epoch + net = load_model(net_dict, net.state_dict(), net) + else: + warnings.warn("better providing pretraining weights") + checkpoint_path = os.path.join(tempfile.gettempdir(), 'initial_weights.pth') + if rank == 0: + torch.save(net.state_dict(), checkpoint_path) + + dist.barrier() + net.load_state_dict(torch.load(checkpoint_path, map_location='cpu')) + + if args.freeze_layers: + for name, param in net.named_parameters(): + if 'fc' not in name: + param.requires_grad = False + else: + if args.syncBN: + net = torch.nn.SyncBatchNorm.convert_sync_batchnorm(net) + net.to(device) + + net = torch.nn.parallel.DistributedDataParallel(net, device_ids=[args.gpu]) + + # loss and optimizer + pg = [p for p in net.parameters() if p.requires_grad] + optimizer = torch.optim.SGD(pg, args.lr, momentum=0.9, weight_decay=5e-4) + + lr = lambda x: ((1 + math.cos(x * math.pi / args.epochs)) / 2) * (1 - args.lrf) + args.lrf + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lr) + for epoch in range(start_epoch, start_epoch + args.epochs): + train_positive, train_loss = train_one_epoch(net, optimizer, train_loader, device, epoch) + train_acc = train_positive / len(train_dataset) + scheduler.step() + + test_positive, test_loss = evaluate(net, val_loader, device) + test_acc = test_positive / len(val_dataset) + + if rank == 0: + print('[epoch {}] accuracy: {}'.format(epoch, test_acc)) + + state_dict = { + 'net_dict': net.module.state_dict(), + 'acc': test_acc, + 'epoch': epoch + } + torch.save(state_dict, './checkpoint/model_{}.pth'.format(epoch)) + draw_curve(epoch, train_loss, 1 - train_acc, test_loss, 1 - test_acc) + + if rank == 0: + if os.path.exists(checkpoint_path) is True: + os.remove(checkpoint_path) + cleanup() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description="Train on market1501") + parser.add_argument("--data-dir", default='data', type=str) + parser.add_argument('--epochs', type=int, default=40) + parser.add_argument('--batch_size', type=int, default=32) + parser.add_argument("--lr", default=0.001, type=float) + parser.add_argument('--lrf', default=0.1, type=float) + parser.add_argument('--syncBN', type=bool, default=True) + + parser.add_argument('--weights', type=str, default='./checkpoint/resnet18.pth') + parser.add_argument('--freeze-layers', action='store_true') + + # not change the following parameters, the system will automatically assignment + parser.add_argument('--device', default='cuda', help='device id (i.e. 0 or 0, 1 or cpu)') + parser.add_argument('--world_size', default=4, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + args = parser.parse_args() + + main(args) diff --git a/deep_sort/deep_sort.py b/deep_sort/deep_sort.py new file mode 100644 index 0000000000000000000000000000000000000000..ffca7b30f242b2202d352959568d784ec344cc24 --- /dev/null +++ b/deep_sort/deep_sort.py @@ -0,0 +1,121 @@ +import numpy as np +import torch + +from .deep.feature_extractor import Extractor, FastReIDExtractor +from .sort.nn_matching import NearestNeighborDistanceMetric +from .sort.preprocessing import non_max_suppression +from .sort.detection import Detection +from .sort.tracker import Tracker + +__all__ = ['DeepSort'] + + +class DeepSort(object): + def __init__(self, model_path, model_config=None, max_dist=0.2, min_confidence=0.3, nms_max_overlap=1.0, + max_iou_distance=0.7, max_age=70, n_init=3, nn_budget=100, use_cuda=True): + self.min_confidence = min_confidence + self.nms_max_overlap = nms_max_overlap + + if model_config is None: + self.extractor = Extractor(model_path, use_cuda=use_cuda) + else: + self.extractor = FastReIDExtractor(model_config, model_path, use_cuda=use_cuda) + + max_cosine_distance = max_dist + metric = NearestNeighborDistanceMetric("cosine", max_cosine_distance, nn_budget) + self.tracker = Tracker(metric, max_iou_distance=max_iou_distance, max_age=max_age, n_init=n_init) + + def update(self, bbox_xywh, confidences, classes, ori_img, masks=None): + self.height, self.width = ori_img.shape[:2] + # generate detections + features = self._get_features(bbox_xywh, ori_img) + bbox_tlwh = self._xywh_to_tlwh(bbox_xywh) + detections = [Detection(bbox_tlwh[i], conf, label, features[i], None if masks is None else masks[i]) + for i, (conf, label) in enumerate(zip(confidences, classes)) + if conf > self.min_confidence] + + # run on non-maximum supression + boxes = np.array([d.tlwh for d in detections]) + scores = np.array([d.confidence for d in detections]) + indices = non_max_suppression(boxes, self.nms_max_overlap, scores) + detections = [detections[i] for i in indices] + + # update tracker + self.tracker.predict() + self.tracker.update(detections) + + # output bbox identities + outputs = [] + mask_outputs = [] + for track in self.tracker.tracks: + if not track.is_confirmed() or track.time_since_update > 1: + continue + box = track.to_tlwh() + x1, y1, x2, y2 = self._tlwh_to_xyxy(box) + track_id = track.track_id + track_cls = track.cls + outputs.append(np.array([x1, y1, x2, y2, track_cls, track_id], dtype=np.int32)) + if track.mask is not None: + mask_outputs.append(track.mask) + if len(outputs) > 0: + outputs = np.stack(outputs, axis=0) + return outputs, mask_outputs + + """ + TODO: + Convert bbox from xc_yc_w_h to xtl_ytl_w_h + Thanks JieChen91@github.com for reporting this bug! + """ + + @staticmethod + def _xywh_to_tlwh(bbox_xywh): + if isinstance(bbox_xywh, np.ndarray): + bbox_tlwh = bbox_xywh.copy() + elif isinstance(bbox_xywh, torch.Tensor): + bbox_tlwh = bbox_xywh.clone() + bbox_tlwh[:, 0] = bbox_xywh[:, 0] - bbox_xywh[:, 2] / 2. + bbox_tlwh[:, 1] = bbox_xywh[:, 1] - bbox_xywh[:, 3] / 2. + return bbox_tlwh + + def _xywh_to_xyxy(self, bbox_xywh): + x, y, w, h = bbox_xywh + x1 = max(int(x - w / 2), 0) + x2 = min(int(x + w / 2), self.width - 1) + y1 = max(int(y - h / 2), 0) + y2 = min(int(y + h / 2), self.height - 1) + return x1, y1, x2, y2 + + def _tlwh_to_xyxy(self, bbox_tlwh): + """ + TODO: + Convert bbox from xtl_ytl_w_h to xc_yc_w_h + Thanks JieChen91@github.com for reporting this bug! + """ + x, y, w, h = bbox_tlwh + x1 = max(int(x), 0) + x2 = min(int(x + w), self.width - 1) + y1 = max(int(y), 0) + y2 = min(int(y + h), self.height - 1) + return x1, y1, x2, y2 + + @staticmethod + def _xyxy_to_tlwh(bbox_xyxy): + x1, y1, x2, y2 = bbox_xyxy + + t = x1 + l = y1 + w = int(x2 - x1) + h = int(y2 - y1) + return t, l, w, h + + def _get_features(self, bbox_xywh, ori_img): + im_crops = [] + for box in bbox_xywh: + x1, y1, x2, y2 = self._xywh_to_xyxy(box) + im = ori_img[y1:y2, x1:x2] + im_crops.append(im) + if im_crops: + features = self.extractor(im_crops) + else: + features = np.array([]) + return features diff --git a/deep_sort/sort/__init__.py b/deep_sort/sort/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deep_sort/sort/detection.py b/deep_sort/sort/detection.py new file mode 100644 index 0000000000000000000000000000000000000000..7d84dd2fc3c2ba513f051dff57fcf5f51c1ef172 --- /dev/null +++ b/deep_sort/sort/detection.py @@ -0,0 +1,51 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np + + +class Detection(object): + """ + This class represents a bounding box detection in a single image. + + Parameters + ---------- + tlwh : array_like + Bounding box in format `(x, y, w, h)`. + confidence : float + Detector confidence score. + feature : array_like + A feature vector that describes the object contained in this image. + + Attributes + ---------- + tlwh : ndarray + Bounding box in format `(top left x, top left y, width, height)`. + confidence : ndarray + Detector confidence score. + feature : ndarray | NoneType + A feature vector that describes the object contained in this image. + + """ + + def __init__(self, tlwh, confidence, label, feature, mask=None): + self.tlwh = np.asarray(tlwh, dtype=np.float32) + self.confidence = float(confidence) + self.cls = int(label) + self.feature = np.asarray(feature, dtype=np.float32) + self.mask = mask + + def to_tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + def to_xyah(self): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = self.tlwh.copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret diff --git a/deep_sort/sort/iou_matching.py b/deep_sort/sort/iou_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..481e930da7bcfb2c380d2592be5c0444e8b6152f --- /dev/null +++ b/deep_sort/sort/iou_matching.py @@ -0,0 +1,81 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np +from . import linear_assignment + + +def iou(bbox, candidates): + """Computer intersection over union. + + Parameters + ---------- + bbox : ndarray + A bounding box in format `(top left x, top left y, width, height)`. + candidates : ndarray + A matrix of candidate bounding boxes (one per row) in the same format + as `bbox`. + + Returns + ------- + ndarray + The intersection over union in [0, 1] between the `bbox` and each + candidate. A higher score means a larger fraction of the `bbox` is + occluded by the candidate. + + """ + bbox_tl, bbox_br = bbox[:2], bbox[:2] + bbox[2:] + candidates_tl = candidates[:, :2] + candidates_br = candidates[:, :2] + candidates[:, 2:] + + tl = np.c_[np.maximum(bbox_tl[0], candidates_tl[:, 0])[:, np.newaxis], + np.maximum(bbox_tl[1], candidates_tl[:, 1])[:, np.newaxis]] + br = np.c_[np.minimum(bbox_br[0], candidates_br[:, 0])[:, np.newaxis], + np.minimum(bbox_br[1], candidates_br[:, 1])[:, np.newaxis]] + wh = np.maximum(0., br - tl) + + area_intersection = wh.prod(axis=1) + area_bbox = bbox[2:].prod() + area_candidates = candidates[:, 2:].prod(axis=1) + return area_intersection / (area_bbox + area_candidates - area_intersection) + + +def iou_cost(tracks, detections, track_indices=None, + detection_indices=None): + """An intersection over union distance metric. + + Parameters + ---------- + tracks : List[deep_sort.track.Track] + A list of tracks. + detections : List[deep_sort.detection.Detection] + A list of detections. + track_indices : Optional[List[int]] + A list of indices to tracks that should be matched. Defaults to + all `tracks`. + detection_indices : Optional[List[int]] + A list of indices to detections that should be matched. Defaults + to all `detections`. + + Returns + ------- + ndarray + Returns a cost matrix of shape + len(track_indices), len(detection_indices) where entry (i, j) is + `1 - iou(tracks[track_indices[i]], detections[detection_indices[j]])`. + + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + cost_matrix = np.zeros((len(track_indices), len(detection_indices))) + for row, track_idx in enumerate(track_indices): + if tracks[track_idx].time_since_update > 1: + cost_matrix[row, :] = linear_assignment.INFTY_COST + continue + + bbox = tracks[track_idx].to_tlwh() + candidates = np.asarray([detections[i].tlwh for i in detection_indices]) + cost_matrix[row, :] = 1. - iou(bbox, candidates) + return cost_matrix diff --git a/deep_sort/sort/kalman_filter.py b/deep_sort/sort/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..3ac07463864935e3d975bf96969dab0d3a83beb7 --- /dev/null +++ b/deep_sort/sort/kalman_filter.py @@ -0,0 +1,231 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + mean = np.dot(self._motion_mat, mean) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + # new_covariance = covariance - np.linalg.multi_dot(( + # kalman_gain, projected_cov, kalman_gain.T)) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, self._update_mat, covariance)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False): + """Compute gating distance between state distribution and measurements. + + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + cholesky_factor = np.linalg.cholesky(covariance) + d = measurements - mean + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha diff --git a/deep_sort/sort/linear_assignment.py b/deep_sort/sort/linear_assignment.py new file mode 100644 index 0000000000000000000000000000000000000000..f8c78bbd28b5e403d1c115ab843a6666886f4b9c --- /dev/null +++ b/deep_sort/sort/linear_assignment.py @@ -0,0 +1,192 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np +# from sklearn.utils.linear_assignment_ import linear_assignment +from scipy.optimize import linear_sum_assignment as linear_assignment +from . import kalman_filter + + +INFTY_COST = 1e+5 + + +def min_cost_matching( + distance_metric, max_distance, tracks, detections, track_indices=None, + detection_indices=None): + """Solve linear assignment problem. + + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection_indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + if len(detection_indices) == 0 or len(track_indices) == 0: + return [], track_indices, detection_indices # Nothing to match. + + cost_matrix = distance_metric( + tracks, detections, track_indices, detection_indices) + cost_matrix[cost_matrix > max_distance] = max_distance + 1e-5 + + row_indices, col_indices = linear_assignment(cost_matrix) + + matches, unmatched_tracks, unmatched_detections = [], [], [] + for col, detection_idx in enumerate(detection_indices): + if col not in col_indices: + unmatched_detections.append(detection_idx) + for row, track_idx in enumerate(track_indices): + if row not in row_indices: + unmatched_tracks.append(track_idx) + for row, col in zip(row_indices, col_indices): + track_idx = track_indices[row] + detection_idx = detection_indices[col] + if cost_matrix[row, col] > max_distance: + unmatched_tracks.append(track_idx) + unmatched_detections.append(detection_idx) + else: + matches.append((track_idx, detection_idx)) + return matches, unmatched_tracks, unmatched_detections + + +def matching_cascade( + distance_metric, max_distance, cascade_depth, tracks, detections, + track_indices=None, detection_indices=None): + """Run matching cascade. + + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + cascade_depth: int + The cascade depth, should be se to the maximum track age. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : Optional[List[int]] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). Defaults to all tracks. + detection_indices : Optional[List[int]] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). Defaults to all + detections. + + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + + """ + if track_indices is None: + track_indices = list(range(len(tracks))) + if detection_indices is None: + detection_indices = list(range(len(detections))) + + unmatched_detections = detection_indices + matches = [] + for level in range(cascade_depth): + if len(unmatched_detections) == 0: # No detections left + break + + track_indices_l = [ + k for k in track_indices + if tracks[k].time_since_update == 1 + level + ] + if len(track_indices_l) == 0: # Nothing to match at this level + continue + + matches_l, _, unmatched_detections = \ + min_cost_matching( + distance_metric, max_distance, tracks, detections, + track_indices_l, unmatched_detections) + matches += matches_l + unmatched_tracks = list(set(track_indices) - set(k for k, _ in matches)) + return matches, unmatched_tracks, unmatched_detections + + +def gate_cost_matrix( + kf, cost_matrix, tracks, detections, track_indices, detection_indices, + gated_cost=INFTY_COST, only_position=False): + """Invalidate infeasible entries in cost matrix based on the state + distributions obtained by Kalman filtering. + + Parameters + ---------- + kf : The Kalman filter. + cost_matrix : ndarray + The NxM dimensional cost matrix, where N is the number of track indices + and M is the number of detection indices, such that entry (i, j) is the + association cost between `tracks[track_indices[i]]` and + `detections[detection_indices[j]]`. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + gated_cost : Optional[float] + Entries in the cost matrix corresponding to infeasible associations are + set this value. Defaults to a very large value. + only_position : Optional[bool] + If True, only the x, y position of the state distribution is considered + during gating. Defaults to False. + + Returns + ------- + ndarray + Returns the modified cost matrix. + + """ + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray( + [detections[i].to_xyah() for i in detection_indices]) + for row, track_idx in enumerate(track_indices): + track = tracks[track_idx] + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = gated_cost + return cost_matrix diff --git a/deep_sort/sort/nn_matching.py b/deep_sort/sort/nn_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..2786f36fd4179bc462f82b5511e684fb9b17a7d5 --- /dev/null +++ b/deep_sort/sort/nn_matching.py @@ -0,0 +1,176 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np + + +def _pdist(a, b): + """Compute pair-wise squared distance between points in `a` and `b`. + + Parameters + ---------- + a : array_like + An NxM matrix of N samples of dimensionality M. + b : array_like + An LxM matrix of L samples of dimensionality M. + + Returns + ------- + ndarray + Returns a matrix of size len(a), len(b) such that eleement (i, j) + contains the squared distance between `a[i]` and `b[j]`. + + """ + a, b = np.asarray(a), np.asarray(b) + if len(a) == 0 or len(b) == 0: + return np.zeros((len(a), len(b))) + a2, b2 = np.square(a).sum(axis=1), np.square(b).sum(axis=1) + r2 = -2. * np.dot(a, b.T) + a2[:, None] + b2[None, :] + r2 = np.clip(r2, 0., float(np.inf)) + return r2 + + +def _cosine_distance(a, b, data_is_normalized=False): + """Compute pair-wise cosine distance between points in `a` and `b`. + + Parameters + ---------- + a : array_like + An NxM matrix of N samples of dimensionality M. + b : array_like + An LxM matrix of L samples of dimensionality M. + data_is_normalized : Optional[bool] + If True, assumes rows in a and b are unit length vectors. + Otherwise, a and b are explicitly normalized to lenght 1. + + Returns + ------- + ndarray + Returns a matrix of size len(a), len(b) such that eleement (i, j) + contains the squared distance between `a[i]` and `b[j]`. + + """ + if not data_is_normalized: + a = np.asarray(a) / np.linalg.norm(a, axis=1, keepdims=True) + b = np.asarray(b) / np.linalg.norm(b, axis=1, keepdims=True) + return 1. - np.dot(a, b.T) + + +def _nn_euclidean_distance(x, y): + """ Helper function for nearest neighbor distance metric (Euclidean). + + Parameters + ---------- + x : ndarray + A matrix of N row-vectors (sample points). + y : ndarray + A matrix of M row-vectors (query points). + + Returns + ------- + ndarray + A vector of length M that contains for each entry in `y` the + smallest Euclidean distance to a sample in `x`. + + """ + distances = _pdist(x, y) + return np.maximum(0.0, distances.min(axis=0)) + + +def _nn_cosine_distance(x, y): + """ Helper function for nearest neighbor distance metric (cosine). + + Parameters + ---------- + x : ndarray + A matrix of N row-vectors (sample points). + y : ndarray + A matrix of M row-vectors (query points). + + Returns + ------- + ndarray + A vector of length M that contains for each entry in `y` the + smallest cosine distance to a sample in `x`. + + """ + distances = _cosine_distance(x, y) + return distances.min(axis=0) + + +class NearestNeighborDistanceMetric(object): + """ + A nearest neighbor distance metric that, for each target, returns + the closest distance to any sample that has been observed so far. + + Parameters + ---------- + metric : str + Either "euclidean" or "cosine". + matching_threshold: float + The matching threshold. Samples with larger distance are considered an + invalid match. + budget : Optional[int] + If not None, fix samples per class to at most this number. Removes + the oldest samples when the budget is reached. + + Attributes + ---------- + samples : Dict[int -> List[ndarray]] + A dictionary that maps from target identities to the list of samples + that have been observed so far. + + """ + + def __init__(self, metric, matching_threshold, budget=None): + + if metric == "euclidean": + self._metric = _nn_euclidean_distance + elif metric == "cosine": + self._metric = _nn_cosine_distance + else: + raise ValueError( + "Invalid metric; must be either 'euclidean' or 'cosine'") + self.matching_threshold = matching_threshold + self.budget = budget + self.samples = {} + + def partial_fit(self, features, targets, active_targets): + """Update the distance metric with new data. + + Parameters + ---------- + features : ndarray + An NxM matrix of N features of dimensionality M. + targets : ndarray + An integer array of associated target identities. + active_targets : List[int] + A list of targets that are currently present in the scene. + + """ + for feature, target in zip(features, targets): + self.samples.setdefault(target, []).append(feature) + if self.budget is not None: + self.samples[target] = self.samples[target][-self.budget:] + self.samples = {k: self.samples[k] for k in active_targets} + + def distance(self, features, targets): + """Compute distance between features and targets. + + Parameters + ---------- + features : ndarray + An NxM matrix of N features of dimensionality M. + targets : List[int] + A list of targets to match the given `features` against. + + Returns + ------- + ndarray + Returns a cost matrix of shape len(targets), len(features), where + element (i, j) contains the closest squared distance between + `targets[i]` and `features[j]`. + + """ + cost_matrix = np.zeros((len(targets), len(features))) + for i, target in enumerate(targets): + cost_matrix[i, :] = self._metric(self.samples[target], features) + return cost_matrix diff --git a/deep_sort/sort/preprocessing.py b/deep_sort/sort/preprocessing.py new file mode 100644 index 0000000000000000000000000000000000000000..02d35b27483dc279b6801f4034fb203338448aac --- /dev/null +++ b/deep_sort/sort/preprocessing.py @@ -0,0 +1,73 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import cv2 + + +def non_max_suppression(boxes, max_bbox_overlap, scores=None): + """Suppress overlapping detections. + + Original code from [1]_ has been adapted to include confidence score. + + .. [1] http://www.pyimagesearch.com/2015/02/16/ + faster-non-maximum-suppression-python/ + + Examples + -------- + + >>> boxes = [d.roi for d in detections] + >>> scores = [d.confidence for d in detections] + >>> indices = non_max_suppression(boxes, max_bbox_overlap, scores) + >>> detections = [detections[i] for i in indices] + + Parameters + ---------- + boxes : ndarray + Array of ROIs (x, y, width, height). + max_bbox_overlap : float + ROIs that overlap more than this values are suppressed. + scores : Optional[array_like] + Detector confidence score. + + Returns + ------- + List[int] + Returns indices of detections that have survived non-maxima suppression. + + """ + if len(boxes) == 0: + return [] + + boxes = boxes.astype(np.float32) + pick = [] + + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + boxes[:, 0] + y2 = boxes[:, 3] + boxes[:, 1] + + area = (x2 - x1 + 1) * (y2 - y1 + 1) + if scores is not None: + idxs = np.argsort(scores) + else: + idxs = np.argsort(y2) + + while len(idxs) > 0: + last = len(idxs) - 1 + i = idxs[last] + pick.append(i) + + xx1 = np.maximum(x1[i], x1[idxs[:last]]) + yy1 = np.maximum(y1[i], y1[idxs[:last]]) + xx2 = np.minimum(x2[i], x2[idxs[:last]]) + yy2 = np.minimum(y2[i], y2[idxs[:last]]) + + w = np.maximum(0, xx2 - xx1 + 1) + h = np.maximum(0, yy2 - yy1 + 1) + + overlap = (w * h) / (area[idxs[:last]] + area[idxs[last]] - w * h) + + idxs = np.delete( + idxs, np.concatenate( + ([last], np.where(overlap > max_bbox_overlap)[0]))) + + return pick diff --git a/deep_sort/sort/track.py b/deep_sort/sort/track.py new file mode 100644 index 0000000000000000000000000000000000000000..8e1d0f2b2eb17e67b59f552cdfefc7b99fbb3423 --- /dev/null +++ b/deep_sort/sort/track.py @@ -0,0 +1,169 @@ +# vim: expandtab:ts=4:sw=4 + + +class TrackState: + """ + Enumeration type for the single target track state. Newly created tracks are + classified as `tentative` until enough evidence has been collected. Then, + the track state is changed to `confirmed`. Tracks that are no longer alive + are classified as `deleted` to mark them for removal from the set of active + tracks. + + """ + + Tentative = 1 + Confirmed = 2 + Deleted = 3 + + +class Track: + """ + A single target track with state space `(x, y, a, h)` and associated + velocities, where `(x, y)` is the center of the bounding box, `a` is the + aspect ratio and `h` is the height. + + Parameters + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + n_init : int + Number of consecutive detections before the track is confirmed. The + track state is set to `Deleted` if a miss occurs within the first + `n_init` frames. + max_age : int + The maximum number of consecutive misses before the track state is + set to `Deleted`. + feature : Optional[ndarray] + Feature vector of the detection this track originates from. If not None, + this feature is added to the `features` cache. + + Attributes + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + hits : int + Total number of measurement updates. + age : int + Total number of frames since first occurance. + time_since_update : int + Total number of frames since last measurement update. + state : TrackState + The current track state. + features : List[ndarray] + A cache of features. On each measurement update, the associated feature + vector is added to this list. + + """ + + def __init__(self, mean, covariance, track_id, n_init, max_age, + feature=None, cls=None, mask=None): + self.mean = mean + self.covariance = covariance + self.track_id = track_id + self.hits = 1 + self.age = 1 + self.time_since_update = 0 + + self.state = TrackState.Tentative + self.cls = cls + self.mask = mask + self.features = [] + if feature is not None: + self.features.append(feature) + + self._n_init = n_init + self._max_age = max_age + + def to_tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + + Returns + ------- + ndarray + The bounding box. + + """ + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + def to_tlbr(self): + """Get current position in bounding box format `(min x, miny, max x, + max y)`. + + Returns + ------- + ndarray + The bounding box. + + """ + ret = self.to_tlwh() + ret[2:] = ret[:2] + ret[2:] + return ret + + def predict(self, kf): + """Propagate the state distribution to the current time step using a + Kalman filter prediction step. + + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + + """ + self.mean, self.covariance = kf.predict(self.mean, self.covariance) + self.age += 1 + self.time_since_update += 1 + + def update(self, kf, detection): + """Perform Kalman filter measurement update step and update the feature + cache. + + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + detection : Detection + The associated detection. + + """ + self.mask = detection.mask + self.mean, self.covariance = kf.update( + self.mean, self.covariance, detection.to_xyah()) + self.features.append(detection.feature) + + self.hits += 1 + self.time_since_update = 0 + if self.state == TrackState.Tentative and self.hits >= self._n_init: + self.state = TrackState.Confirmed + + def mark_missed(self): + """Mark this track as missed (no association at the current time step). + """ + if self.state == TrackState.Tentative: + self.state = TrackState.Deleted + elif self.time_since_update > self._max_age: + self.state = TrackState.Deleted + + def is_tentative(self): + """Returns True if this track is tentative (unconfirmed). + """ + return self.state == TrackState.Tentative + + def is_confirmed(self): + """Returns True if this track is confirmed.""" + return self.state == TrackState.Confirmed + + def is_deleted(self): + """Returns True if this track is dead and should be deleted.""" + return self.state == TrackState.Deleted diff --git a/deep_sort/sort/tracker.py b/deep_sort/sort/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..d7a225616dfa2f1da7277b658bc1fb7b593655d5 --- /dev/null +++ b/deep_sort/sort/tracker.py @@ -0,0 +1,138 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np +from . import kalman_filter +from . import linear_assignment +from . import iou_matching +from .track import Track + + +class Tracker: + """ + This is the multi-target tracker. + + Parameters + ---------- + metric : nn_matching.NearestNeighborDistanceMetric + A distance metric for measurement-to-track association. + max_age : int + Maximum number of missed misses before a track is deleted. + n_init : int + Number of consecutive detections before the track is confirmed. The + track state is set to `Deleted` if a miss occurs within the first + `n_init` frames. + + Attributes + ---------- + metric : nn_matching.NearestNeighborDistanceMetric + The distance metric used for measurement to track association. + max_age : int + Maximum number of missed misses before a track is deleted. + n_init : int + Number of frames that a track remains in initialization phase. + kf : kalman_filter.KalmanFilter + A Kalman filter to filter target trajectories in image space. + tracks : List[Track] + The list of active tracks at the current time step. + + """ + + def __init__(self, metric, max_iou_distance=0.7, max_age=70, n_init=3): + self.metric = metric + self.max_iou_distance = max_iou_distance + self.max_age = max_age + self.n_init = n_init + + self.kf = kalman_filter.KalmanFilter() + self.tracks = [] + self._next_id = 1 + + def predict(self): + """Propagate track state distributions one time step forward. + + This function should be called once every time step, before `update`. + """ + for track in self.tracks: + track.predict(self.kf) + + def update(self, detections): + """Perform measurement update and track management. + + Parameters + ---------- + detections : List[deep_sort.detection.Detection] + A list of detections at the current time step. + + """ + # Run matching cascade. + matches, unmatched_tracks, unmatched_detections = \ + self._match(detections) + + # Update track set. + for track_idx, detection_idx in matches: + self.tracks[track_idx].update( + self.kf, detections[detection_idx]) + for track_idx in unmatched_tracks: + self.tracks[track_idx].mark_missed() + for detection_idx in unmatched_detections: + self._initiate_track(detections[detection_idx]) + self.tracks = [t for t in self.tracks if not t.is_deleted()] + + # Update distance metric. + active_targets = [t.track_id for t in self.tracks if t.is_confirmed()] + features, targets = [], [] + for track in self.tracks: + if not track.is_confirmed(): + continue + features += track.features + targets += [track.track_id for _ in track.features] + track.features = [] + self.metric.partial_fit( + np.asarray(features), np.asarray(targets), active_targets) + + def _match(self, detections): + + def gated_metric(tracks, dets, track_indices, detection_indices): + features = np.array([dets[i].feature for i in detection_indices]) + targets = np.array([tracks[i].track_id for i in track_indices]) + cost_matrix = self.metric.distance(features, targets) + cost_matrix = linear_assignment.gate_cost_matrix( + self.kf, cost_matrix, tracks, dets, track_indices, + detection_indices) + + return cost_matrix + + # Split track set into confirmed and unconfirmed tracks. + confirmed_tracks = [ + i for i, t in enumerate(self.tracks) if t.is_confirmed()] + unconfirmed_tracks = [ + i for i, t in enumerate(self.tracks) if not t.is_confirmed()] + + # Associate confirmed tracks using appearance features. + matches_a, unmatched_tracks_a, unmatched_detections = \ + linear_assignment.matching_cascade( + gated_metric, self.metric.matching_threshold, self.max_age, + self.tracks, detections, confirmed_tracks) + + # Associate remaining tracks together with unconfirmed tracks using IOU. + iou_track_candidates = unconfirmed_tracks + [ + k for k in unmatched_tracks_a if + self.tracks[k].time_since_update == 1] + unmatched_tracks_a = [ + k for k in unmatched_tracks_a if + self.tracks[k].time_since_update != 1] + matches_b, unmatched_tracks_b, unmatched_detections = \ + linear_assignment.min_cost_matching( + iou_matching.iou_cost, self.max_iou_distance, self.tracks, + detections, iou_track_candidates, unmatched_detections) + + matches = matches_a + matches_b + unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b)) + return matches, unmatched_tracks, unmatched_detections + + def _initiate_track(self, detection): + mean, covariance = self.kf.initiate(detection.to_xyah()) + self.tracks.append(Track( + mean, covariance, self._next_id, self.n_init, self.max_age, + detection.feature, detection.cls, detection.mask)) + self._next_id += 1 diff --git a/deepsort.py b/deepsort.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b6bdde2caa72c4de3cf789de559cd5695da062 --- /dev/null +++ b/deepsort.py @@ -0,0 +1,189 @@ +import os +import cv2 +import time +import argparse +import torch +import warnings +import json +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), 'thirdparty/fast-reid')) + +from detector import build_detector +from deep_sort import build_tracker +from utils.draw import draw_boxes +from utils.parser import get_config +from utils.log import get_logger +from utils.io import write_results + + +class VideoTracker(object): + def __init__(self, cfg, args, video_path): + self.cfg = cfg + self.args = args + self.video_path = video_path + self.logger = get_logger("root") + + use_cuda = args.use_cuda and torch.cuda.is_available() + if not use_cuda: + warnings.warn("Running in cpu mode which maybe very slow!", UserWarning) + + if args.display: + cv2.namedWindow("test", cv2.WINDOW_NORMAL) + cv2.resizeWindow("test", args.display_width, args.display_height) + + if args.cam != -1: + print("Using webcam " + str(args.cam)) + self.vdo = cv2.VideoCapture(args.cam) + else: + self.vdo = cv2.VideoCapture() + self.detector = build_detector(cfg, use_cuda=use_cuda, segment=self.args.segment) + self.deepsort = build_tracker(cfg, use_cuda=use_cuda) + self.class_names = self.detector.class_names + + def __enter__(self): + if self.args.cam != -1: + ret, frame = self.vdo.read() + assert ret, "Error: Camera error" + self.im_width = frame.shape[0] + self.im_height = frame.shape[1] + + else: + assert os.path.isfile(self.video_path), "Path error" + self.vdo.open(self.video_path) + self.im_width = int(self.vdo.get(cv2.CAP_PROP_FRAME_WIDTH)) + self.im_height = int(self.vdo.get(cv2.CAP_PROP_FRAME_HEIGHT)) + assert self.vdo.isOpened() + + if self.args.save_path: + os.makedirs(self.args.save_path, exist_ok=True) + # TODO save masks + + # path of saved video and results + self.save_video_path = os.path.join(self.args.save_path, "results.avi") + self.save_results_path = os.path.join(self.args.save_path, "results.txt") + + # create video writer + fourcc = cv2.VideoWriter_fourcc(*'MJPG') + self.writer = cv2.VideoWriter(self.save_video_path, fourcc, 20, (self.im_width, self.im_height)) + + # logging + self.logger.info("Save results to {}".format(self.args.save_path)) + + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + if exc_type: + print(exc_type, exc_value, exc_traceback) + + def run(self): + results = [] + idx_frame = 0 + with open('coco_classes.json', 'r') as f: + idx_to_class = json.load(f) + while self.vdo.grab(): + idx_frame += 1 + if idx_frame % self.args.frame_interval: + continue + + start = time.time() + _, ori_im = self.vdo.retrieve() + im = cv2.cvtColor(ori_im, cv2.COLOR_BGR2RGB) + + # do detection + if self.args.segment: + bbox_xywh, cls_conf, cls_ids, seg_masks = self.detector(im) + else: + bbox_xywh, cls_conf, cls_ids = self.detector(im) + + # select person class + mask = cls_ids == 0 + + bbox_xywh = bbox_xywh[mask] + # bbox dilation just in case bbox too small, delete this line if using a better pedestrian detector + bbox_xywh[:, 2:] *= 1.2 + cls_conf = cls_conf[mask] + cls_ids = cls_ids[mask] + + # do tracking + if self.args.segment: + seg_masks = seg_masks[mask] + outputs, mask_outputs = self.deepsort.update(bbox_xywh, cls_conf, cls_ids, im, seg_masks) + else: + outputs, _ = self.deepsort.update(bbox_xywh, cls_conf, cls_ids, im) + + # draw boxes for visualization + if len(outputs) > 0: + bbox_tlwh = [] + bbox_xyxy = outputs[:, :4] + identities = outputs[:, -1] + cls = outputs[:, -2] + names = [idx_to_class[str(label)] for label in cls] + + ori_im = draw_boxes(ori_im, bbox_xyxy, names, identities, None if not self.args.segment else mask_outputs) + + for bb_xyxy in bbox_xyxy: + bbox_tlwh.append(self.deepsort._xyxy_to_tlwh(bb_xyxy)) + + results.append((idx_frame - 1, bbox_tlwh, identities, cls)) + + end = time.time() + + if self.args.display: + cv2.imshow("test", ori_im) + cv2.waitKey(1) + + if self.args.save_path: + self.writer.write(ori_im) + + # save results + write_results(self.save_results_path, results, 'mot') + + # logging + self.logger.info("time: {:.03f}s, fps: {:.03f}, detection numbers: {}, tracking numbers: {}" \ + .format(end - start, 1 / (end - start), bbox_xywh.shape[0], len(outputs))) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--VIDEO_PATH", type=str, default='demo.avi') + parser.add_argument("--config_mmdetection", type=str, default="./configs/mmdet.yaml") + parser.add_argument("--config_detection", type=str, default="./configs/mask_rcnn.yaml") + parser.add_argument("--config_deepsort", type=str, default="./configs/deep_sort.yaml") + parser.add_argument("--config_fastreid", type=str, default="./configs/fastreid.yaml") + parser.add_argument("--fastreid", action="store_true") + parser.add_argument("--mmdet", action="store_true") + parser.add_argument("--segment", action="store_true") + # parser.add_argument("--ignore_display", dest="display", action="store_false", default=True) + parser.add_argument("--display", action="store_true") + parser.add_argument("--frame_interval", type=int, default=1) + parser.add_argument("--display_width", type=int, default=800) + parser.add_argument("--display_height", type=int, default=600) + parser.add_argument("--save_path", type=str, default="./output/") + parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True) + parser.add_argument("--camera", action="store", dest="cam", type=int, default="-1") + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + cfg = get_config() + if args.segment: + cfg.USE_SEGMENT = True + else: + cfg.USE_SEGMENT = False + if args.mmdet: + cfg.merge_from_file(args.config_mmdetection) + cfg.USE_MMDET = True + else: + cfg.merge_from_file(args.config_detection) + cfg.USE_MMDET = False + cfg.merge_from_file(args.config_deepsort) + if args.fastreid: + cfg.merge_from_file(args.config_fastreid) + cfg.USE_FASTREID = True + else: + cfg.USE_FASTREID = False + + with VideoTracker(cfg, args, video_path=args.VIDEO_PATH) as vdo_trk: + vdo_trk.run() diff --git a/deepsort_new.py b/deepsort_new.py new file mode 100644 index 0000000000000000000000000000000000000000..2193f1296a7b3b9bb2d8d008c3d0ecc494645921 --- /dev/null +++ b/deepsort_new.py @@ -0,0 +1,229 @@ +# Modified deepsort.py — Target ID recovery with IOU threshold and smooth tracking +import os +import cv2 +import time +import argparse +import torch +import warnings +import json +import sys +import numpy as np + +sys.path.append(os.path.join(os.path.dirname(__file__), 'thirdparty/fast-reid')) + +from detector import build_detector +from deep_sort import build_tracker +from utils.draw import draw_boxes +from utils.parser import get_config +from utils.log import get_logger +from utils.io import write_results + + +def compute_iou(box1, box2): + if box1 is None or box2 is None: + return 0.0 + xi1, yi1 = max(box1[0], box2[0]), max(box1[1], box2[1]) + xi2, yi2 = min(box1[2], box2[2]), min(box1[3], box2[3]) + inter_area = max(0, xi2 - xi1) * max(0, yi2 - yi1) + box1_area = (box1[2] - box1[0]) * (box1[3] - box1[1]) + box2_area = (box2[2] - box2[0]) * (box2[3] - box2[1]) + union = box1_area + box2_area - inter_area + return inter_area / union if union > 0 else 0.0 + +def get_best_iou_track(outputs, target_bbox, return_iou=False): + if target_bbox is None: + return (None, 0.0) if return_iou else None + best_iou = 0 + best_id = None + for det in outputs: + x1, y1, x2, y2 = det[:4] + track_id = int(det[-1]) + iou = compute_iou([x1, y1, x2, y2], target_bbox) + if iou > best_iou: + best_iou = iou + best_id = track_id + if return_iou: + return best_id, best_iou + return best_id + +class VideoTracker: + def __init__(self, cfg, args, video_path): + self.cfg = cfg + self.args = args + self.video_path = video_path + self.logger = get_logger("root") + self.first_frame_flag = True + self.target_id = None + self.last_known_bbox = None + + use_cuda = args.use_cuda and torch.cuda.is_available() + if not use_cuda: + warnings.warn("Running in cpu mode which maybe very slow!", UserWarning) + + if args.display: + cv2.namedWindow("test", cv2.WINDOW_NORMAL) + cv2.resizeWindow("test", args.display_width, args.display_height) + + if args.cam != -1: + self.vdo = cv2.VideoCapture(args.cam) + else: + self.vdo = cv2.VideoCapture(video_path) + + self.detector = build_detector(cfg, use_cuda=use_cuda, segment=args.segment) + self.deepsort = build_tracker(cfg, use_cuda=use_cuda) + + def run(self): + results = [] + idx_frame = 0 + with open('coco_classes.json', 'r') as f: + idx_to_class = json.load(f) + + if not self.vdo.isOpened(): + raise IOError("Failed to open video") + + im_width = int(self.vdo.get(cv2.CAP_PROP_FRAME_WIDTH)) + im_height = int(self.vdo.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + if self.args.save_path: + os.makedirs(self.args.save_path, exist_ok=True) + self.writer = cv2.VideoWriter( + os.path.join(self.args.save_path, "results.avi"), + cv2.VideoWriter_fourcc(*'MJPG'), + 20, (im_width, im_height)) + + while self.vdo.grab(): + idx_frame += 1 + if idx_frame % self.args.frame_interval: + continue + + _, ori_im = self.vdo.retrieve() + im = cv2.cvtColor(ori_im, cv2.COLOR_BGR2RGB) + + if self.args.segment: + bbox_xywh, cls_conf, cls_ids, seg_masks = self.detector(im) + else: + bbox_xywh, cls_conf, cls_ids = self.detector(im) + + mask = cls_ids == 0 # person class + bbox_xywh = bbox_xywh[mask] + cls_conf = cls_conf[mask] + cls_ids = cls_ids[mask] + if bbox_xywh.shape[0] == 0: + continue + + bbox_xywh[:, 2:] *= 1.2 + + if self.args.segment: + seg_masks = seg_masks[mask] + outputs, mask_outputs = self.deepsort.update(bbox_xywh, cls_conf, cls_ids, im, seg_masks) + else: + outputs, _ = self.deepsort.update(bbox_xywh, cls_conf, cls_ids, im) + + if self.first_frame_flag and len(outputs) > 0: + cv2.imshow("Select target", ori_im) + cv2.waitKey(1) + target_roi = cv2.selectROI("Select target", ori_im, False, False) + cv2.destroyWindow("Select target") + target_bbox = [target_roi[0], target_roi[1], target_roi[0] + target_roi[2], target_roi[1] + target_roi[3]] + self.target_id = get_best_iou_track(outputs, target_bbox) + self.last_known_bbox = target_bbox + print(f"[INFO] Selected target ID: {self.target_id}") + self.first_frame_flag = False + continue + + bbox_tlwh = [] + filtered_outputs = [] + for det in outputs: + if int(det[-1]) == self.target_id: + filtered_outputs.append(det) + self.last_known_bbox = det[:4] + + if len(filtered_outputs) == 0 and self.last_known_bbox is not None: + new_id, best_iou = get_best_iou_track(outputs, self.last_known_bbox, return_iou=True) + if best_iou > 0.4: + self.target_id = new_id + print(f"[INFO] Target temporarily lost. Reassigned to ID {self.target_id} (IOU={best_iou:.2f})") + for det in outputs: + if int(det[-1]) == self.target_id: + filtered_outputs.append(det) + self.last_known_bbox = det[:4] + else: + print("[INFO] IOU too low to reassign. Skipping reassignment.") + + if len(filtered_outputs) > 0: + def box_center(box): + return np.array([(box[0] + box[2]) / 2, (box[1] + box[3]) / 2]) + + smoothed_outputs = [] + for det in filtered_outputs: + if self.last_known_bbox is None: + smoothed_outputs.append(det) + continue + dist = np.linalg.norm(box_center(det[:4]) - box_center(self.last_known_bbox)) + if dist < 300: + smoothed_outputs.append(det) + else: + print(f"[INFO] Skipped jumpy box with dist={dist:.2f}") + + if len(smoothed_outputs) > 0: + bbox_xyxy = np.array([det[:4] for det in smoothed_outputs]) + identities = [int(det[-1]) for det in smoothed_outputs] + cls = [int(det[-2]) for det in smoothed_outputs] + names = [idx_to_class[str(label)] for label in cls] + + ori_im = draw_boxes(ori_im, bbox_xyxy, names, identities) + + for box in bbox_xyxy: + bbox_tlwh.append(self.deepsort._xyxy_to_tlwh(box)) + + results.append((idx_frame - 1, bbox_tlwh, identities, cls)) + + if self.args.display: + cv2.imshow("test", ori_im) + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + if self.args.save_path: + self.writer.write(ori_im) + + if self.args.save_path: + write_results(os.path.join(self.args.save_path, "results.txt"), results, 'mot') + + self.vdo.release() + if self.args.display: + cv2.destroyAllWindows() + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--VIDEO_PATH", type=str, default="demo.avi") + parser.add_argument("--config_mmdetection", type=str, default="./configs/mmdet.yaml") + parser.add_argument("--config_detection", type=str, default="./configs/mask_rcnn.yaml") + parser.add_argument("--config_deepsort", type=str, default="./configs/deep_sort.yaml") + parser.add_argument("--config_fastreid", type=str, default="./configs/fastreid.yaml") + parser.add_argument("--fastreid", action="store_true") + parser.add_argument("--mmdet", action="store_true") + parser.add_argument("--segment", action="store_true") + parser.add_argument("--display", action="store_true") + parser.add_argument("--frame_interval", type=int, default=1) + parser.add_argument("--display_width", type=int, default=800) + parser.add_argument("--display_height", type=int, default=600) + parser.add_argument("--save_path", type=str, default="./output/") + parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True) + parser.add_argument("--camera", action="store", dest="cam", type=int, default="-1") + return parser.parse_args() + +if __name__ == "__main__": + args = parse_args() + cfg = get_config() + + cfg.USE_SEGMENT = args.segment + cfg.USE_MMDET = args.mmdet + cfg.USE_FASTREID = args.fastreid + + cfg.merge_from_file(args.config_mmdetection if args.mmdet else args.config_detection) + cfg.merge_from_file(args.config_deepsort) + if args.fastreid: + cfg.merge_from_file(args.config_fastreid) + + tracker = VideoTracker(cfg, args, video_path=args.VIDEO_PATH) + tracker.run() \ No newline at end of file diff --git a/demo/1.jpg b/demo/1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e11a0bb51fecef7b36e510c6d634bcd45521f07a --- /dev/null +++ b/demo/1.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3aa029de627b124bc9b25473994ca6fd915f8ee4cea3bfff0f85096eeda562c2 +size 253964 diff --git a/demo/2.jpg b/demo/2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f5256e394cca2f5b2e2ad0be639a456cf9f2da2b --- /dev/null +++ b/demo/2.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a87401b28b189ee6397a2aa964b621774246344a31849ac9fcce6ea61613918 +size 281544 diff --git a/demo/demo.gif b/demo/demo.gif new file mode 100644 index 0000000000000000000000000000000000000000..556dd4ad263b627569a1ae2c6e9afd6e571a5003 --- /dev/null +++ b/demo/demo.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fbafb71266180efb6159d3b13e822a34dfba2824e28a9f8b83700c8d3372576a +size 5485012 diff --git a/demo/demo2.gif b/demo/demo2.gif new file mode 100644 index 0000000000000000000000000000000000000000..735fea87d5522310786cd108ef2cfea2dda2a29e --- /dev/null +++ b/demo/demo2.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9dfa3cecfa13cc984d3c58b37484c0480e07eeeaa259f0bd521f55176981a357 +size 4016574 diff --git a/detector/MMDet/__init__.py b/detector/MMDet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..53a482bb00a829db99fd9a0b1593aba7d41b0e9f --- /dev/null +++ b/detector/MMDet/__init__.py @@ -0,0 +1,2 @@ +from .detector import MMDet +__all__ = ['MMDet'] \ No newline at end of file diff --git a/detector/MMDet/detector.py b/detector/MMDet/detector.py new file mode 100644 index 0000000000000000000000000000000000000000..72348aa45f905846162290d136e29f0b7681f257 --- /dev/null +++ b/detector/MMDet/detector.py @@ -0,0 +1,52 @@ +import logging +import numpy as np +import torch + +from mmdet.apis import init_detector, inference_detector +from .mmdet_utils import xyxy_to_xywh + + +class MMDet(object): + def __init__(self, cfg_file, checkpoint_file, score_thresh=0.7, + is_xywh=False, use_cuda=True): + # net definition + self.device = "cuda" if use_cuda else "cpu" + self.net = init_detector(cfg_file, checkpoint_file, device=self.device) + logger = logging.getLogger("root.detector") + logger.info('Loading weights from %s... Done!' % (checkpoint_file)) + + #constants + self.score_thresh = score_thresh + self.use_cuda = use_cuda + self.is_xywh = is_xywh + self.class_names = self.net.CLASSES + self.num_classes = len(self.class_names) + + def __call__(self, ori_img): + # forward + bbox_result = inference_detector(self.net, ori_img) + bboxes = np.vstack(bbox_result) + + if len(bboxes) == 0: + bbox = np.array([]).reshape([0, 4]) + cls_conf = np.array([]) + cls_ids = np.array([]) + return bbox, cls_conf, cls_ids + + bbox = bboxes[:, :4] + cls_conf = bboxes[:, 4] + cls_ids = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + cls_ids = np.concatenate(cls_ids) + + selected_idx = cls_conf > self.score_thresh + bbox = bbox[selected_idx, :] + cls_conf = cls_conf[selected_idx] + cls_ids = cls_ids[selected_idx] + + if self.is_xywh: + bbox = xyxy_to_xywh(bbox) + + return bbox, cls_conf, cls_ids diff --git a/detector/MMDet/mmdet_utils.py b/detector/MMDet/mmdet_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6a44c5800bf3d9e221f1ea9927dfc5d589476cb0 --- /dev/null +++ b/detector/MMDet/mmdet_utils.py @@ -0,0 +1,15 @@ +import torch +import numpy as np + +def xyxy_to_xywh(boxes_xyxy): + if isinstance(boxes_xyxy, torch.Tensor): + boxes_xywh = boxes_xyxy.clone() + elif isinstance(boxes_xyxy, np.ndarray): + boxes_xywh = boxes_xyxy.copy() + + boxes_xywh[:, 0] = (boxes_xyxy[:, 0] + boxes_xyxy[:, 2]) / 2. + boxes_xywh[:, 1] = (boxes_xyxy[:, 1] + boxes_xyxy[:, 3]) / 2. + boxes_xywh[:, 2] = boxes_xyxy[:, 2] - boxes_xyxy[:, 0] + boxes_xywh[:, 3] = boxes_xyxy[:, 3] - boxes_xyxy[:, 1] + + return boxes_xywh \ No newline at end of file diff --git a/detector/Mask_RCNN/README.md b/detector/Mask_RCNN/README.md new file mode 100644 index 0000000000000000000000000000000000000000..77f014021eedeef6ffc998bc61b3bce21685a130 --- /dev/null +++ b/detector/Mask_RCNN/README.md @@ -0,0 +1,153 @@ +# Mask R-CNN + +## 该项目参考自pytorch官方torchvision模块中的源码(使用pycocotools处略有不同) +* https://github.com/pytorch/vision/tree/master/references/detection + +## 环境配置: +* Python3.6/3.7/3.8 +* Pytorch1.10或以上 +* pycocotools(Linux:`pip install pycocotools`; Windows:`pip install pycocotools-windows`(不需要额外安装vs)) +* Ubuntu或Centos(不建议Windows) +* 最好使用GPU训练 +* 详细环境配置见`requirements.txt` + +## 文件结构: +``` + ├── backbone: 特征提取网络 + ├── network_files: Mask R-CNN网络 + ├── train_utils: 训练验证相关模块(包括coco验证相关) + ├── my_dataset_coco.py: 自定义dataset用于读取COCO2017数据集 + ├── my_dataset_voc.py: 自定义dataset用于读取Pascal VOC数据集 + ├── train.py: 单GPU/CPU训练脚本 + ├── train_multi_GPU.py: 针对使用多GPU的用户使用 + ├── predict.py: 简易的预测脚本,使用训练好的权重进行预测 + ├── validation.py: 利用训练好的权重验证/测试数据的COCO指标,并生成record_mAP.txt文件 + └── transforms.py: 数据预处理(随机水平翻转图像以及bboxes、将PIL图像转为Tensor) +``` + +## 预训练权重下载地址(下载后放入当前文件夹中): +* Resnet50预训练权重 https://download.pytorch.org/models/resnet50-0676ba61.pth (注意,下载预训练权重后要重命名, +比如在train.py中读取的是`resnet50.pth`文件,不是`resnet50-0676ba61.pth`) +* Mask R-CNN(Resnet50+FPN)预训练权重 https://download.pytorch.org/models/maskrcnn_resnet50_fpn_coco-bf2d0c1e.pth (注意, +载预训练权重后要重命名,比如在train.py中读取的是`maskrcnn_resnet50_fpn_coco.pth`文件,不是`maskrcnn_resnet50_fpn_coco-bf2d0c1e.pth`) + + +## 数据集,本例程使用的有COCO2017数据集和Pascal VOC2012数据集 +### COCO2017数据集 +* COCO官网地址:https://cocodataset.org/ +* 对数据集不了解的可以看下我写的博文:https://blog.csdn.net/qq_37541097/article/details/113247318 +* 这里以下载coco2017数据集为例,主要下载三个文件: + * `2017 Train images [118K/18GB]`:训练过程中使用到的所有图像文件 + * `2017 Val images [5K/1GB]`:验证过程中使用到的所有图像文件 + * `2017 Train/Val annotations [241MB]`:对应训练集和验证集的标注json文件 +* 都解压到`coco2017`文件夹下,可得到如下文件夹结构: +``` +├── coco2017: 数据集根目录 + ├── train2017: 所有训练图像文件夹(118287张) + ├── val2017: 所有验证图像文件夹(5000张) + └── annotations: 对应标注文件夹 + ├── instances_train2017.json: 对应目标检测、分割任务的训练集标注文件 + ├── instances_val2017.json: 对应目标检测、分割任务的验证集标注文件 + ├── captions_train2017.json: 对应图像描述的训练集标注文件 + ├── captions_val2017.json: 对应图像描述的验证集标注文件 + ├── person_keypoints_train2017.json: 对应人体关键点检测的训练集标注文件 + └── person_keypoints_val2017.json: 对应人体关键点检测的验证集标注文件夹 +``` + +### Pascal VOC2012数据集 +* 数据集下载地址: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/index.html#devkit +* 对数据集不了解的可以看下我写的博文:https://blog.csdn.net/qq_37541097/article/details/115787033 +* 解压后得到的文件夹结构如下: +``` +VOCdevkit + └── VOC2012 + ├── Annotations 所有的图像标注信息(XML文件) + ├── ImageSets + │ ├── Action 人的行为动作图像信息 + │ ├── Layout 人的各个部位图像信息 + │ │ + │ ├── Main 目标检测分类图像信息 + │ │ ├── train.txt 训练集(5717) + │ │ ├── val.txt 验证集(5823) + │ │ └── trainval.txt 训练集+验证集(11540) + │ │ + │ └── Segmentation 目标分割图像信息 + │ ├── train.txt 训练集(1464) + │ ├── val.txt 验证集(1449) + │ └── trainval.txt 训练集+验证集(2913) + │ + ├── JPEGImages 所有图像文件 + ├── SegmentationClass 语义分割png图(基于类别) + └── SegmentationObject 实例分割png图(基于目标) +``` + +## 训练方法 +* 确保提前准备好数据集 +* 确保提前下载好对应预训练模型权重 +* 确保设置好`--num-classes`和`--data-path` +* 若要使用单GPU训练直接使用train.py训练脚本 +* 若要使用多GPU训练,使用`torchrun --nproc_per_node=8 train_multi_GPU.py`指令,`nproc_per_node`参数为使用GPU数量 +* 如果想指定使用哪些GPU设备可在指令前加上`CUDA_VISIBLE_DEVICES=0,3`(例如我只要使用设备中的第1块和第4块GPU设备) +* `CUDA_VISIBLE_DEVICES=0,3 torchrun --nproc_per_node=2 train_multi_GPU.py` + +## 注意事项 +1. 在使用训练脚本时,注意要将`--data-path`设置为自己存放数据集的**根目录**: +``` +# 假设要使用COCO数据集,启用自定义数据集读取CocoDetection并将数据集解压到成/data/coco2017目录下 +python train.py --data-path /data/coco2017 + +# 假设要使用Pascal VOC数据集,启用自定义数据集读取VOCInstances并数据集解压到成/data/VOCdevkit目录下 +python train.py --data-path /data/VOCdevkit +``` + +2. 如果倍增`batch_size`,建议学习率也跟着倍增。假设将`batch_size`从4设置成8,那么学习率`lr`从0.004设置成0.008 +3. 如果使用Batch Normalization模块时,`batch_size`不能小于4,否则效果会变差。**如果显存不够,batch_size必须小于4时**,建议在创建`resnet50_fpn_backbone`时, +将`norm_layer`设置成`FrozenBatchNorm2d`或将`trainable_layers`设置成0(即冻结整个`backbone`) +4. 训练过程中保存的`det_results.txt`(目标检测任务)以及`seg_results.txt`(实例分割任务)是每个epoch在验证集上的COCO指标,前12个值是COCO指标,后面两个值是训练平均损失以及学习率 +5. 在使用预测脚本时,要将`weights_path`设置为你自己生成的权重路径。 +6. 使用validation文件时,注意确保你的验证集或者测试集中必须包含每个类别的目标,并且使用时需要修改`--num-classes`、`--data-path`、`--weights-path`以及 +`--label-json-path`(该参数是根据训练的数据集设置的)。其他代码尽量不要改动 + + +## 复现结果 +在COCO2017数据集上进行复现,训练过程中仅载入Resnet50的预训练权重,训练26个epochs。训练采用指令如下: +``` +torchrun --nproc_per_node=8 train_multi_GPU.py --batch-size 8 --lr 0.08 --pretrain False --amp True +``` + +训练得到权重下载地址: https://pan.baidu.com/s/1qpXUIsvnj8RHY-V05J-mnA 密码: 63d5 + +在COCO2017验证集上的mAP(目标检测任务): +``` + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.381 + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.588 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.411 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.215 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.420 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.492 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.315 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.499 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.523 + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.319 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.565 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.666 +``` + +在COCO2017验证集上的mAP(实例分割任务): +``` + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.340 + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.552 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.361 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.151 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.369 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.500 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.290 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.449 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.468 + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.266 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.509 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.619 +``` + +## 如果对Mask RCNN原理不是很理解可参考我的bilibili +https://www.bilibili.com/video/BV1ZY411774T diff --git a/detector/Mask_RCNN/__init__.py b/detector/Mask_RCNN/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0b5112f9344e8d5b83b4cff2f14c0f5cb599667f --- /dev/null +++ b/detector/Mask_RCNN/__init__.py @@ -0,0 +1 @@ +from .maskrcnn import Mask_RCNN diff --git a/detector/Mask_RCNN/backbone/__init__.py b/detector/Mask_RCNN/backbone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..314eb748f899d9fc39604dc6ed07b7d1ef26db1b --- /dev/null +++ b/detector/Mask_RCNN/backbone/__init__.py @@ -0,0 +1 @@ +from .resnet50_fpn_model import resnet50_fpn_backbone diff --git a/detector/Mask_RCNN/backbone/feature_pyramid_network.py b/detector/Mask_RCNN/backbone/feature_pyramid_network.py new file mode 100644 index 0000000000000000000000000000000000000000..fc2fc757f9a8bd43ce9116cd30b00dc8a65b992f --- /dev/null +++ b/detector/Mask_RCNN/backbone/feature_pyramid_network.py @@ -0,0 +1,235 @@ +from collections import OrderedDict + +import torch.nn as nn +import torch +from torch import Tensor +import torch.nn.functional as F + +from torch.jit.annotations import Tuple, List, Dict + + +class IntermediateLayerGetter(nn.ModuleDict): + """ + Module wrapper that returns intermediate layers from a model + It has a strong assumption that the modules have been registered + into the model in the same order as they are used. + This means that one should **not** reuse the same nn.Module + twice in the forward if you want this to work. + Additionally, it is only able to query submodules that are directly + assigned to the model. So if `model` is passed, `model.feature1` can + be returned, but not `model.feature1.layer2`. + Arguments: + model (nn.Module): model on which we will extract the features + return_layers (Dict[name, new_name]): a dict containing the names + of the modules for which the activations will be returned as + the key of the dict, and the value of the dict is the name + of the returned activation (which the user can specify). + """ + __annotations__ = { + "return_layers": Dict[str, str], + } + + def __init__(self, model, return_layers): + if not set(return_layers).issubset([name for name, _ in model.named_children()]): + raise ValueError("return_layers are not present in model") + + orig_return_layers = return_layers + return_layers = {str(k): str(v) for k, v in return_layers.items()} + layers = OrderedDict() + + # 遍历模型子模块按顺序存入有序字典 + # 只保存layer4及其之前的结构,舍去之后不用的结构 + for name, module in model.named_children(): + layers[name] = module + if name in return_layers: + del return_layers[name] + if not return_layers: + break + + super().__init__(layers) + self.return_layers = orig_return_layers + + def forward(self, x): + out = OrderedDict() + # 依次遍历模型的所有子模块,并进行正向传播, + # 收集layer1, layer2, layer3, layer4的输出 + for name, module in self.items(): + x = module(x) + if name in self.return_layers: + out_name = self.return_layers[name] + out[out_name] = x + return out + + +class BackboneWithFPN(nn.Module): + """ + Adds a FPN on top of a model. + Internally, it uses torchvision.models._utils.IntermediateLayerGetter to + extract a submodel that returns the feature maps specified in return_layers. + The same limitations of IntermediatLayerGetter apply here. + Arguments: + backbone (nn.Module) + return_layers (Dict[name, new_name]): a dict containing the names + of the modules for which the activations will be returned as + the key of the dict, and the value of the dict is the name + of the returned activation (which the user can specify). + in_channels_list (List[int]): number of channels for each feature map + that is returned, in the order they are present in the OrderedDict + out_channels (int): number of channels in the FPN. + extra_blocks: ExtraFPNBlock + Attributes: + out_channels (int): the number of channels in the FPN + """ + + def __init__(self, + backbone: nn.Module, + return_layers=None, + in_channels_list=None, + out_channels=256, + extra_blocks=None, + re_getter=True): + super().__init__() + + if extra_blocks is None: + extra_blocks = LastLevelMaxPool() + + if re_getter: + assert return_layers is not None + self.body = IntermediateLayerGetter(backbone, return_layers=return_layers) + else: + self.body = backbone + + self.fpn = FeaturePyramidNetwork( + in_channels_list=in_channels_list, + out_channels=out_channels, + extra_blocks=extra_blocks, + ) + + self.out_channels = out_channels + + def forward(self, x): + x = self.body(x) + x = self.fpn(x) + return x + + +class FeaturePyramidNetwork(nn.Module): + """ + Module that adds a FPN from on top of a set of feature maps. This is based on + `"Feature Pyramid Network for Object Detection" `_. + The feature maps are currently supposed to be in increasing depth + order. + The input to the model is expected to be an OrderedDict[Tensor], containing + the feature maps on top of which the FPN will be added. + Arguments: + in_channels_list (list[int]): number of channels for each feature map that + is passed to the module + out_channels (int): number of channels of the FPN representation + extra_blocks (ExtraFPNBlock or None): if provided, extra operations will + be performed. It is expected to take the fpn features, the original + features and the names of the original features as input, and returns + a new list of feature maps and their corresponding names + """ + + def __init__(self, in_channels_list, out_channels, extra_blocks=None): + super().__init__() + # 用来调整resnet特征矩阵(layer1,2,3,4)的channel(kernel_size=1) + self.inner_blocks = nn.ModuleList() + # 对调整后的特征矩阵使用3x3的卷积核来得到对应的预测特征矩阵 + self.layer_blocks = nn.ModuleList() + for in_channels in in_channels_list: + if in_channels == 0: + continue + inner_block_module = nn.Conv2d(in_channels, out_channels, 1) + layer_block_module = nn.Conv2d(out_channels, out_channels, 3, padding=1) + self.inner_blocks.append(inner_block_module) + self.layer_blocks.append(layer_block_module) + + # initialize parameters now to avoid modifying the initialization of top_blocks + for m in self.children(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_uniform_(m.weight, a=1) + nn.init.constant_(m.bias, 0) + + self.extra_blocks = extra_blocks + + def get_result_from_inner_blocks(self, x: Tensor, idx: int) -> Tensor: + """ + This is equivalent to self.inner_blocks[idx](x), + but torchscript doesn't support this yet + """ + num_blocks = len(self.inner_blocks) + if idx < 0: + idx += num_blocks + i = 0 + out = x + for module in self.inner_blocks: + if i == idx: + out = module(x) + i += 1 + return out + + def get_result_from_layer_blocks(self, x: Tensor, idx: int) -> Tensor: + """ + This is equivalent to self.layer_blocks[idx](x), + but torchscript doesn't support this yet + """ + num_blocks = len(self.layer_blocks) + if idx < 0: + idx += num_blocks + i = 0 + out = x + for module in self.layer_blocks: + if i == idx: + out = module(x) + i += 1 + return out + + def forward(self, x: Dict[str, Tensor]) -> Dict[str, Tensor]: + """ + Computes the FPN for a set of feature maps. + Arguments: + x (OrderedDict[Tensor]): feature maps for each feature level. + Returns: + results (OrderedDict[Tensor]): feature maps after FPN layers. + They are ordered from highest resolution first. + """ + # unpack OrderedDict into two lists for easier handling + names = list(x.keys()) + x = list(x.values()) + + # 将resnet layer4的channel调整到指定的out_channels + # last_inner = self.inner_blocks[-1](x[-1]) + last_inner = self.get_result_from_inner_blocks(x[-1], -1) + # result中保存着每个预测特征层 + results = [] + # 将layer4调整channel后的特征矩阵,通过3x3卷积后得到对应的预测特征矩阵 + # results.append(self.layer_blocks[-1](last_inner)) + results.append(self.get_result_from_layer_blocks(last_inner, -1)) + + for idx in range(len(x) - 2, -1, -1): + inner_lateral = self.get_result_from_inner_blocks(x[idx], idx) + feat_shape = inner_lateral.shape[-2:] + inner_top_down = F.interpolate(last_inner, size=feat_shape, mode="nearest") + last_inner = inner_lateral + inner_top_down + results.insert(0, self.get_result_from_layer_blocks(last_inner, idx)) + + # 在layer4对应的预测特征层基础上生成预测特征矩阵5 + if self.extra_blocks is not None: + results, names = self.extra_blocks(results, x, names) + + # make it back an OrderedDict + out = OrderedDict([(k, v) for k, v in zip(names, results)]) + + return out + + +class LastLevelMaxPool(torch.nn.Module): + """ + Applies a max_pool2d on top of the last feature map + """ + + def forward(self, x: List[Tensor], y: List[Tensor], names: List[str]) -> Tuple[List[Tensor], List[str]]: + names.append("pool") + x.append(F.max_pool2d(x[-1], 1, 2, 0)) + return x, names diff --git a/detector/Mask_RCNN/backbone/resnet50_fpn_model.py b/detector/Mask_RCNN/backbone/resnet50_fpn_model.py new file mode 100644 index 0000000000000000000000000000000000000000..a79502e5b2e3694e5571231fdd4610ed796a2af4 --- /dev/null +++ b/detector/Mask_RCNN/backbone/resnet50_fpn_model.py @@ -0,0 +1,199 @@ +import os + +import torch +import torch.nn as nn +from torchvision.ops.misc import FrozenBatchNorm2d + +from .feature_pyramid_network import BackboneWithFPN, LastLevelMaxPool + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, in_channel, out_channel, stride=1, downsample=None, norm_layer=None): + super().__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + + self.conv1 = nn.Conv2d(in_channels=in_channel, out_channels=out_channel, + kernel_size=1, stride=1, bias=False) # squeeze channels + self.bn1 = norm_layer(out_channel) + # ----------------------------------------- + self.conv2 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel, + kernel_size=3, stride=stride, bias=False, padding=1) + self.bn2 = norm_layer(out_channel) + # ----------------------------------------- + self.conv3 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel * self.expansion, + kernel_size=1, stride=1, bias=False) # unsqueeze channels + self.bn3 = norm_layer(out_channel * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + + def forward(self, x): + identity = x + if self.downsample is not None: + identity = self.downsample(x) + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + out += identity + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + + def __init__(self, block, blocks_num, num_classes=1000, include_top=True, norm_layer=None): + super().__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + self._norm_layer = norm_layer + + self.include_top = include_top + self.in_channel = 64 + + self.conv1 = nn.Conv2d(3, self.in_channel, kernel_size=7, stride=2, + padding=3, bias=False) + self.bn1 = norm_layer(self.in_channel) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, blocks_num[0]) + self.layer2 = self._make_layer(block, 128, blocks_num[1], stride=2) + self.layer3 = self._make_layer(block, 256, blocks_num[2], stride=2) + self.layer4 = self._make_layer(block, 512, blocks_num[3], stride=2) + if self.include_top: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) # output size = (1, 1) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + + def _make_layer(self, block, channel, block_num, stride=1): + norm_layer = self._norm_layer + downsample = None + if stride != 1 or self.in_channel != channel * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.in_channel, channel * block.expansion, kernel_size=1, stride=stride, bias=False), + norm_layer(channel * block.expansion)) + + layers = [] + layers.append(block(self.in_channel, channel, downsample=downsample, + stride=stride, norm_layer=norm_layer)) + self.in_channel = channel * block.expansion + + for _ in range(1, block_num): + layers.append(block(self.in_channel, channel, norm_layer=norm_layer)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + if self.include_top: + x = self.avgpool(x) + x = torch.flatten(x, 1) + x = self.fc(x) + + return x + + +def overwrite_eps(model, eps): + """ + This method overwrites the default eps values of all the + FrozenBatchNorm2d layers of the model with the provided value. + This is necessary to address the BC-breaking change introduced + by the bug-fix at pytorch/vision#2933. The overwrite is applied + only when the pretrained weights are loaded to maintain compatibility + with previous versions. + + Args: + model (nn.Module): The model on which we perform the overwrite. + eps (float): The new value of eps. + """ + for module in model.modules(): + if isinstance(module, FrozenBatchNorm2d): + module.eps = eps + + +def resnet50_fpn_backbone(pretrain_path="", + norm_layer=nn.BatchNorm2d, + trainable_layers=3, + returned_layers=None, + extra_blocks=None): + """ + 搭建resnet50_fpn——backbone + Args: + pretrain_path: resnet50的预训练权重,如果不使用就默认为空 + norm_layer: 默认是nn.BatchNorm2d,如果GPU显存很小,batch_size不能设置很大, + 建议将norm_layer设置成FrozenBatchNorm2d(默认是nn.BatchNorm2d) + (https://github.com/facebookresearch/maskrcnn-benchmark/issues/267) + trainable_layers: 指定训练哪些层结构 + returned_layers: 指定哪些层的输出需要返回 + extra_blocks: 在输出的特征层基础上额外添加的层结构 + + Returns: + + """ + resnet_backbone = ResNet(Bottleneck, [3, 4, 6, 3], + include_top=False, + norm_layer=norm_layer) + + if isinstance(norm_layer, FrozenBatchNorm2d): + overwrite_eps(resnet_backbone, 0.0) + + if pretrain_path != "": + assert os.path.exists(pretrain_path), "{} is not exist.".format(pretrain_path) + # 载入预训练权重 + print(resnet_backbone.load_state_dict(torch.load(pretrain_path), strict=False)) + + # select layers that wont be frozen + assert 0 <= trainable_layers <= 5 + layers_to_train = ['layer4', 'layer3', 'layer2', 'layer1', 'conv1'][:trainable_layers] + + # 如果要训练所有层结构的话,不要忘了conv1后还有一个bn1 + if trainable_layers == 5: + layers_to_train.append("bn1") + + # freeze layers + for name, parameter in resnet_backbone.named_parameters(): + # 只训练不在layers_to_train列表中的层结构 + if all([not name.startswith(layer) for layer in layers_to_train]): + parameter.requires_grad_(False) + + if extra_blocks is None: + extra_blocks = LastLevelMaxPool() + + if returned_layers is None: + returned_layers = [1, 2, 3, 4] + # 返回的特征层个数肯定大于0小于5 + assert min(returned_layers) > 0 and max(returned_layers) < 5 + + # return_layers = {'layer1': '0', 'layer2': '1', 'layer3': '2', 'layer4': '3'} + return_layers = {f'layer{k}': str(v) for v, k in enumerate(returned_layers)} + + # in_channel 为layer4的输出特征矩阵channel = 2048 + in_channels_stage2 = resnet_backbone.in_channel // 8 # 256 + # 记录resnet50提供给fpn的每个特征层channel + in_channels_list = [in_channels_stage2 * 2 ** (i - 1) for i in returned_layers] + # 通过fpn后得到的每个特征层的channel + out_channels = 256 + return BackboneWithFPN(resnet_backbone, return_layers, in_channels_list, out_channels, extra_blocks=extra_blocks) diff --git a/detector/Mask_RCNN/coco_classes.json b/detector/Mask_RCNN/coco_classes.json new file mode 100644 index 0000000000000000000000000000000000000000..21bdd48ca18a46d790b271711be449039813900a --- /dev/null +++ b/detector/Mask_RCNN/coco_classes.json @@ -0,0 +1,82 @@ +{ + "0": "person", + "1": "bicycle", + "10": "fire hydrant", + "11": "stop sign", + "12": "parking meter", + "13": "bench", + "14": "bird", + "15": "cat", + "16": "dog", + "17": "horse", + "18": "sheep", + "19": "cow", + "2": "car", + "20": "elephant", + "21": "bear", + "22": "zebra", + "23": "giraffe", + "24": "backpack", + "25": "umbrella", + "26": "handbag", + "27": "tie", + "28": "suitcase", + "29": "frisbee", + "3": "motorcycle", + "30": "skis", + "31": "snowboard", + "32": "sports ball", + "33": "kite", + "34": "baseball bat", + "35": "baseball glove", + "36": "skateboard", + "37": "surfboard", + "38": "tennis racket", + "39": "bottle", + "4": "airplane", + "40": "wine glass", + "41": "cup", + "42": "fork", + "43": "knife", + "44": "spoon", + "45": "bowl", + "46": "banana", + "47": "apple", + "48": "sandwich", + "49": "orange", + "5": "bus", + "50": "broccoli", + "51": "carrot", + "52": "hot dog", + "53": "pizza", + "54": "donut", + "55": "cake", + "56": "chair", + "57": "couch", + "58": "potted plant", + "59": "bed", + "6": "train", + "60": "dining table", + "61": "toilet", + "62": "tv", + "63": "laptop", + "64": "mouse", + "65": "remote", + "66": "keyboard", + "67": "cell phone", + "68": "microwave", + "69": "oven", + "7": "truck", + "70": "toaster", + "71": "sink", + "72": "refrigerator", + "73": "book", + "74": "clock", + "75": "vase", + "76": "scissors", + "77": "teddy bear", + "78": "hair drier", + "79": "toothbrush", + "8": "boat", + "9": "traffic light" +} \ No newline at end of file diff --git a/detector/Mask_RCNN/draw_box_utils.py b/detector/Mask_RCNN/draw_box_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2d74c95291ed0e2fe11e6d93ef871b542accb648 --- /dev/null +++ b/detector/Mask_RCNN/draw_box_utils.py @@ -0,0 +1,153 @@ +from PIL.Image import Image, fromarray +import PIL.ImageDraw as ImageDraw +import PIL.ImageFont as ImageFont +from PIL import ImageColor +import numpy as np + +STANDARD_COLORS = [ + 'AliceBlue', 'Chartreuse', 'Aqua', 'Aquamarine', 'Azure', 'Beige', 'Bisque', + 'BlanchedAlmond', 'BlueViolet', 'BurlyWood', 'CadetBlue', 'AntiqueWhite', + 'Chocolate', 'Coral', 'CornflowerBlue', 'Cornsilk', 'Crimson', 'Cyan', + 'DarkCyan', 'DarkGoldenRod', 'DarkGrey', 'DarkKhaki', 'DarkOrange', + 'DarkOrchid', 'DarkSalmon', 'DarkSeaGreen', 'DarkTurquoise', 'DarkViolet', + 'DeepPink', 'DeepSkyBlue', 'DodgerBlue', 'FireBrick', 'FloralWhite', + 'ForestGreen', 'Fuchsia', 'Gainsboro', 'GhostWhite', 'Gold', 'GoldenRod', + 'Salmon', 'Tan', 'HoneyDew', 'HotPink', 'IndianRed', 'Ivory', 'Khaki', + 'Lavender', 'LavenderBlush', 'LawnGreen', 'LemonChiffon', 'LightBlue', + 'LightCoral', 'LightCyan', 'LightGoldenRodYellow', 'LightGray', 'LightGrey', + 'LightGreen', 'LightPink', 'LightSalmon', 'LightSeaGreen', 'LightSkyBlue', + 'LightSlateGray', 'LightSlateGrey', 'LightSteelBlue', 'LightYellow', 'Lime', + 'LimeGreen', 'Linen', 'Magenta', 'MediumAquaMarine', 'MediumOrchid', + 'MediumPurple', 'MediumSeaGreen', 'MediumSlateBlue', 'MediumSpringGreen', + 'MediumTurquoise', 'MediumVioletRed', 'MintCream', 'MistyRose', 'Moccasin', + 'NavajoWhite', 'OldLace', 'Olive', 'OliveDrab', 'Orange', 'OrangeRed', + 'Orchid', 'PaleGoldenRod', 'PaleGreen', 'PaleTurquoise', 'PaleVioletRed', + 'PapayaWhip', 'PeachPuff', 'Peru', 'Pink', 'Plum', 'PowderBlue', 'Purple', + 'Red', 'RosyBrown', 'RoyalBlue', 'SaddleBrown', 'Green', 'SandyBrown', + 'SeaGreen', 'SeaShell', 'Sienna', 'Silver', 'SkyBlue', 'SlateBlue', + 'SlateGray', 'SlateGrey', 'Snow', 'SpringGreen', 'SteelBlue', 'GreenYellow', + 'Teal', 'Thistle', 'Tomato', 'Turquoise', 'Violet', 'Wheat', 'White', + 'WhiteSmoke', 'Yellow', 'YellowGreen' +] + + +def draw_text(draw, + box: list, + cls: int, + score: float, + category_index: dict, + color: str, + font: str = 'arial.ttf', + font_size: int = 24): + """ + 将目标边界框和类别信息绘制到图片上 + """ + try: + font = ImageFont.truetype(font, font_size) + except IOError: + font = ImageFont.load_default() + + left, top, right, bottom = box + # If the total height of the display strings added to the top of the bounding + # box exceeds the top of the image, stack the strings below the bounding box + # instead of above. + display_str = f"{category_index[str(cls)]}: {int(100 * score)}%" + display_str_heights = [font.getsize(ds)[1] for ds in display_str] + # Each display_str has a top and bottom margin of 0.05x. + display_str_height = (1 + 2 * 0.05) * max(display_str_heights) + + if top > display_str_height: + text_top = top - display_str_height + text_bottom = top + else: + text_top = bottom + text_bottom = bottom + display_str_height + + for ds in display_str: + text_width, text_height = font.getsize(ds) + margin = np.ceil(0.05 * text_width) + draw.rectangle([(left, text_top), + (left + text_width + 2 * margin, text_bottom)], fill=color) + draw.text((left + margin, text_top), + ds, + fill='black', + font=font) + left += text_width + + +def draw_masks(image, masks, colors, thresh: float = 0.7, alpha: float = 0.5): + np_image = np.array(image) + masks = np.where(masks > thresh, True, False) + + # colors = np.array(colors) + img_to_draw = np.copy(np_image) + # TODO: There might be a way to vectorize this + for mask, color in zip(masks, colors): + img_to_draw[mask] = color + + out = np_image * (1 - alpha) + img_to_draw * alpha + return fromarray(out.astype(np.uint8)) + + +def draw_objs(image: Image, + boxes: np.ndarray = None, + classes: np.ndarray = None, + scores: np.ndarray = None, + masks: np.ndarray = None, + category_index: dict = None, + box_thresh: float = 0.1, + mask_thresh: float = 0.5, + line_thickness: int = 8, + font: str = 'arial.ttf', + font_size: int = 24, + draw_boxes_on_image: bool = True, + draw_masks_on_image: bool = True): + """ + 将目标边界框信息,类别信息,mask信息绘制在图片上 + Args: + image: 需要绘制的图片 + boxes: 目标边界框信息 + classes: 目标类别信息 + scores: 目标概率信息 + masks: 目标mask信息 + category_index: 类别与名称字典 + box_thresh: 过滤的概率阈值 + mask_thresh: + line_thickness: 边界框宽度 + font: 字体类型 + font_size: 字体大小 + draw_boxes_on_image: + draw_masks_on_image: + + Returns: + + """ + + # 过滤掉低概率的目标 + idxs = np.greater(scores, box_thresh) + boxes = boxes[idxs] + classes = classes[idxs] + scores = scores[idxs] + if masks is not None: + masks = masks[idxs] + if len(boxes) == 0: + return image + + colors = [ImageColor.getrgb(STANDARD_COLORS[cls % len(STANDARD_COLORS)]) for cls in classes] + + if draw_boxes_on_image: + # Draw all boxes onto image. + draw = ImageDraw.Draw(image) + for box, cls, score, color in zip(boxes, classes, scores, colors): + left, top, right, bottom = box + # 绘制目标边界框 + draw.line([(left, top), (left, bottom), (right, bottom), + (right, top), (left, top)], width=line_thickness, fill=color) + # 绘制类别和概率信息 + draw_text(draw, box.tolist(), int(cls), float(score), category_index, color, font, font_size) + + if draw_masks_on_image and (masks is not None): + # Draw all mask onto image. + image = draw_masks(image, masks, colors, mask_thresh) + + return image diff --git a/detector/Mask_RCNN/maskrcnn.py b/detector/Mask_RCNN/maskrcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..7445895045ffbc47731da9244f568efc1c4955eb --- /dev/null +++ b/detector/Mask_RCNN/maskrcnn.py @@ -0,0 +1,215 @@ +import os +import json +import sys +from pathlib import Path + +import cv2 +import numpy as np +from PIL import Image +import matplotlib.pyplot as plt +import torch + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from network_files import MaskRCNN +from backbone import resnet50_fpn_backbone + +# generate from ChatGPT +coco91_to_coco80 = { + 1: 0, # 人 + 2: 1, # 自行车 + 3: 2, # 车 + 4: 3, # 摩托车 + 5: 4, # 飞机 + 6: 5, # 巴士 + 7: 6, # 火车 + 8: 7, # 卡车 + 9: 8, # 船 + 10: 9, # 交通灯 + 11: 10, # 消防栓 + 13: 11, # 停止标志 + 14: 12, # 停车米黄线 + 15: 13, # 长凳 + 16: 14, # 鸟 + 17: 15, # 猫 + 18: 16, # 狗 + 19: 17, # 马 + 20: 18, # 羊 + 21: 19, # 牛 + 22: 20, # 大象 + 23: 21, # 熊 + 24: 22, # 斑马 + 25: 23, # 长颈鹿 + 27: 24, # 背包 + 28: 25, # 雨伞 + 31: 26, # 手提包 + 32: 27, # 领带 + 33: 28, # 衬衫 + 34: 29, # 鞋 + 35: 30, # 西装 + 36: 31, # 帽子 + 37: 32, # 袜子 + 38: 33, # 裙子 + 39: 34, # 短裤 + 40: 35, # 手套 + 41: 36, # 围巾 + 42: 37, # 打火机 + 43: 38, # 书 + 44: 39, # 时钟 + 46: 40, # 杯子 + 47: 41, # 刀叉 + 48: 42, # 盘子 + 49: 43, # 水果 + 50: 44, # 螺丝刀 + 51: 45, # 锤子 + 52: 46, # 剪刀 + 53: 47, # 锯 + 54: 48, # 摄像机 + 55: 49, # 电脑 + 56: 50, # 电视 + 57: 51, # 微波炉 + 58: 52, # 烤箱 + 59: 53, # 烤面包机 + 60: 54, # 水槽 + 61: 55, # 冰箱 + 62: 56, # 椅子 + 63: 57, # 镜子 + 64: 58, # 餐桌 + 65: 59, # 网站 + 67: 60, # 面具 + 70: 61, # 枕头 + 72: 62, # 气球 + 73: 63, # 硬盘 + 74: 64, # 风扇 + 75: 65, # 窗户帘 + 76: 66, # 桌子 + 77: 67, # 厕所 + 78: 68, # 锅炉 + 79: 69, # 烟囱 + 80: 70, # 书架 + 81: 71, # 梯子 + 82: 72, # 布艺 + 84: 73, # 画 + 85: 74, # 毯子 + 86: 75, # 沙发 + 87: 76, # 植物 + 88: 77, # 床 + 89: 78, # 镜子 + 90: 79, # 餐车 +} + + +def create_model(num_classes, box_thresh=0.5): + backbone = resnet50_fpn_backbone() + model = MaskRCNN(backbone, + num_classes=num_classes, + rpn_score_thresh=box_thresh, + box_score_thresh=box_thresh) + + return model + + +def xyxy_to_xywh(boxes_xyxy): + if isinstance(boxes_xyxy, torch.Tensor): + boxes_xywh = boxes_xyxy.clone() + elif isinstance(boxes_xyxy, np.ndarray): + boxes_xywh = boxes_xyxy.copy() + + boxes_xywh[:, 0] = (boxes_xyxy[:, 0] + boxes_xyxy[:, 2]) / 2 + boxes_xywh[:, 1] = (boxes_xyxy[:, 1] + boxes_xyxy[:, 3]) / 2 + boxes_xywh[:, 2] = boxes_xyxy[:, 2] - boxes_xyxy[:, 0] + boxes_xywh[:, 3] = boxes_xyxy[:, 3] - boxes_xyxy[:, 1] + + return boxes_xywh + + +def xywh_to_xyxy(boxes_xywh): + if isinstance(boxes_xywh, torch.Tensor): + boxes_xyxy = boxes_xywh.clone() + elif isinstance(boxes_xywh, np.ndarray): + boxes_xyxy = boxes_xywh.copy() + + boxes_xyxy[:, 0] = boxes_xywh[:, 0] - boxes_xywh[:, 2] / 2 + boxes_xyxy[:, 1] = boxes_xywh[:, 1] - boxes_xywh[:, 3] / 2 + boxes_xyxy[:, 2] = boxes_xywh[:, 0] + boxes_xywh[:, 2] / 2 + boxes_xyxy[:, 3] = boxes_xywh[:, 1] + boxes_xywh[:, 3] / 2 + + return boxes_xyxy + + +class Mask_RCNN: + def __init__(self, segment, num_classes, box_thresh, label_json_path='coco_classes.json', weight_path=None): + self.segment = segment + self.num_classes = num_classes # 不包含背景 + self.box_thresh = box_thresh + self.weight_path = weight_path + self.label_json_path = label_json_path + with open(self.label_json_path, 'r') as f: + self.category_index = json.load(f) + self.class_names = [value for value in self.category_index.values()] + self.category_index = {str(k): v for k, v in enumerate(self.class_names)} + # get devices + self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + # print("using {} device.".format(self.device)) + + # create model + self.model = create_model(num_classes=self.num_classes + 1, box_thresh=self.box_thresh) + + # load train weights + assert os.path.exists(self.weight_path), "{} file dose not exist.".format(self.weight_path) + weights_dict = torch.load(self.weight_path, map_location='cpu') + weights_dict = weights_dict["model"] if "model" in weights_dict else weights_dict + self.model.load_state_dict(weights_dict) + self.model.to(self.device) + + def __call__(self, img): + if (img > 1).any(): + img = (img / 255.).astype('float32') + img = torch.from_numpy(img).permute(2, 0, 1) + + self.model.eval() + with torch.no_grad(): + outputs = self.model(img.to(self.device).unsqueeze(0))[0] + + # coco91 to 80 + outputs['labels'] = torch.tensor([coco91_to_coco80[label.item()] for label in outputs['labels']], + device=outputs['boxes'].device) + if self.segment: + return (xyxy_to_xywh(outputs['boxes']).detach().cpu().numpy(), + outputs['scores'].detach().cpu().numpy(), + outputs['labels'].detach().cpu().numpy(), + outputs['masks'].squeeze().detach().cpu().numpy()) + else: + return (xyxy_to_xywh(outputs['boxes']).detach().cpu().numpy(), + outputs['scores'].detach().cpu().numpy(), + outputs['labels'].detach().cpu().numpy()) + + +if __name__ == '__main__': + from draw_box_utils import draw_objs + + num_classes = 90 # 不包含背景 + box_thresh = 0.5 + img_path = "./test.jpg" + weight_path = "./save_weights/maskrcnn_resnet50_fpn_coco.pth" + + mask_rcnn = Mask_RCNN(True, num_classes=90, box_thresh=0.5, label_json_path='coco_classes.json', weight_path=weight_path) + img = cv2.imread(img_path) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + predict_boxes, predict_scores, predict_classes, predict_mask = mask_rcnn(img) + + plot_img = draw_objs(Image.fromarray(img), + boxes=xywh_to_xyxy(predict_boxes), + classes=predict_classes.astype(np.int32), + scores=predict_scores, + masks=predict_mask, + category_index=mask_rcnn.category_index, + box_thresh=box_thresh, + line_thickness=3, + font='arial.ttf', + font_size=20) + plt.imshow(plot_img) + plt.show() diff --git a/detector/Mask_RCNN/network_files/__init__.py b/detector/Mask_RCNN/network_files/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3a2ed22998b2b41c826fd9f0f0535b979aa1b21f --- /dev/null +++ b/detector/Mask_RCNN/network_files/__init__.py @@ -0,0 +1,3 @@ +from .faster_rcnn_framework import FasterRCNN, FastRCNNPredictor +from .rpn_function import AnchorsGenerator +from .mask_rcnn import MaskRCNN diff --git a/detector/Mask_RCNN/network_files/boxes.py b/detector/Mask_RCNN/network_files/boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..8eeca4573fa677e996088ab79fabf57a328afce3 --- /dev/null +++ b/detector/Mask_RCNN/network_files/boxes.py @@ -0,0 +1,181 @@ +import torch +from typing import Tuple +from torch import Tensor +import torchvision + + +def nms(boxes, scores, iou_threshold): + # type: (Tensor, Tensor, float) -> Tensor + """ + Performs non-maximum suppression (NMS) on the boxes according + to their intersection-over-union (IoU). + + NMS iteratively removes lower scoring boxes which have an + IoU greater than iou_threshold with another (higher scoring) + box. + + Parameters + ---------- + boxes : Tensor[N, 4]) + boxes to perform NMS on. They + are expected to be in (x1, y1, x2, y2) format + scores : Tensor[N] + scores for each one of the boxes + iou_threshold : float + discards all overlapping + boxes with IoU > iou_threshold + + Returns + ------- + keep : Tensor + int64 tensor with the indices + of the elements that have been kept + by NMS, sorted in decreasing order of scores + """ + return torch.ops.torchvision.nms(boxes, scores, iou_threshold) + + +def batched_nms(boxes, scores, idxs, iou_threshold): + # type: (Tensor, Tensor, Tensor, float) -> Tensor + """ + Performs non-maximum suppression in a batched fashion. + + Each index value correspond to a category, and NMS + will not be applied between elements of different categories. + + Parameters + ---------- + boxes : Tensor[N, 4] + boxes where NMS will be performed. They + are expected to be in (x1, y1, x2, y2) format + scores : Tensor[N] + scores for each one of the boxes + idxs : Tensor[N] + indices of the categories for each one of the boxes. + iou_threshold : float + discards all overlapping boxes + with IoU < iou_threshold + + Returns + ------- + keep : Tensor + int64 tensor with the indices of + the elements that have been kept by NMS, sorted + in decreasing order of scores + """ + if boxes.numel() == 0: + return torch.empty((0,), dtype=torch.int64, device=boxes.device) + + # strategy: in order to perform NMS independently per class. + # we add an offset to all the boxes. The offset is dependent + # only on the class idx, and is large enough so that boxes + # from different classes do not overlap + # 获取所有boxes中最大的坐标值(xmin, ymin, xmax, ymax) + max_coordinate = boxes.max() + + # to(): Performs Tensor dtype and/or device conversion + # 为每一个类别/每一层生成一个很大的偏移量 + # 这里的to只是让生成tensor的dytpe和device与boxes保持一致 + offsets = idxs.to(boxes) * (max_coordinate + 1) + # boxes加上对应层的偏移量后,保证不同类别/层之间boxes不会有重合的现象 + boxes_for_nms = boxes + offsets[:, None] + keep = nms(boxes_for_nms, scores, iou_threshold) + return keep + + +def remove_small_boxes(boxes, min_size): + # type: (Tensor, float) -> Tensor + """ + Remove boxes which contains at least one side smaller than min_size. + 移除宽高小于指定阈值的索引 + Arguments: + boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format + min_size (float): minimum size + + Returns: + keep (Tensor[K]): indices of the boxes that have both sides + larger than min_size + """ + ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1] # 预测boxes的宽和高 + # keep = (ws >= min_size) & (hs >= min_size) # 当满足宽,高都大于给定阈值时为True + keep = torch.logical_and(torch.ge(ws, min_size), torch.ge(hs, min_size)) + # nonzero(): Returns a tensor containing the indices of all non-zero elements of input + # keep = keep.nonzero().squeeze(1) + keep = torch.where(keep)[0] + return keep + + +def clip_boxes_to_image(boxes, size): + # type: (Tensor, Tuple[int, int]) -> Tensor + """ + Clip boxes so that they lie inside an image of size `size`. + 裁剪预测的boxes信息,将越界的坐标调整到图片边界上 + + Arguments: + boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format + size (Tuple[height, width]): size of the image + + Returns: + clipped_boxes (Tensor[N, 4]) + """ + dim = boxes.dim() + boxes_x = boxes[..., 0::2] # x1, x2 + boxes_y = boxes[..., 1::2] # y1, y2 + height, width = size + + if torchvision._is_tracing(): + boxes_x = torch.max(boxes_x, torch.tensor(0, dtype=boxes.dtype, device=boxes.device)) + boxes_x = torch.min(boxes_x, torch.tensor(width, dtype=boxes.dtype, device=boxes.device)) + boxes_y = torch.max(boxes_y, torch.tensor(0, dtype=boxes.dtype, device=boxes.device)) + boxes_y = torch.min(boxes_y, torch.tensor(height, dtype=boxes.dtype, device=boxes.device)) + else: + boxes_x = boxes_x.clamp(min=0, max=width) # 限制x坐标范围在[0,width]之间 + boxes_y = boxes_y.clamp(min=0, max=height) # 限制y坐标范围在[0,height]之间 + + clipped_boxes = torch.stack((boxes_x, boxes_y), dim=dim) + return clipped_boxes.reshape(boxes.shape) + + +def box_area(boxes): + """ + Computes the area of a set of bounding boxes, which are specified by its + (x1, y1, x2, y2) coordinates. + + Arguments: + boxes (Tensor[N, 4]): boxes for which the area will be computed. They + are expected to be in (x1, y1, x2, y2) format + + Returns: + area (Tensor[N]): area for each box + """ + return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) + + +def box_iou(boxes1, boxes2): + """ + Return intersection-over-union (Jaccard index) of boxes. + + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + + Arguments: + boxes1 (Tensor[N, 4]) + boxes2 (Tensor[M, 4]) + + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + area1 = box_area(boxes1) + area2 = box_area(boxes2) + + # When the shapes do not match, + # the shape of the returned output tensor follows the broadcasting rules + lt = torch.max(boxes1[:, None, :2], boxes2[:, :2]) # left-top [N,M,2] + rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) # right-bottom [N,M,2] + + wh = (rb - lt).clamp(min=0) # [N,M,2] + inter = wh[:, :, 0] * wh[:, :, 1] # [N,M] + + iou = inter / (area1[:, None] + area2 - inter) + return iou + diff --git a/detector/Mask_RCNN/network_files/det_utils.py b/detector/Mask_RCNN/network_files/det_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6b4fe6013a9f1d492192584e602889ffab4d0d40 --- /dev/null +++ b/detector/Mask_RCNN/network_files/det_utils.py @@ -0,0 +1,408 @@ +import torch +import math +from typing import List, Tuple +from torch import Tensor + + +class BalancedPositiveNegativeSampler(object): + """ + This class samples batches, ensuring that they contain a fixed proportion of positives + """ + + def __init__(self, batch_size_per_image, positive_fraction): + # type: (int, float) -> None + """ + Arguments: + batch_size_per_image (int): number of elements to be selected per image + positive_fraction (float): percentage of positive elements per batch + """ + self.batch_size_per_image = batch_size_per_image + self.positive_fraction = positive_fraction + + def __call__(self, matched_idxs): + # type: (List[Tensor]) -> Tuple[List[Tensor], List[Tensor]] + """ + Arguments: + matched idxs: list of tensors containing -1, 0 or positive values. + Each tensor corresponds to a specific image. + -1 values are ignored, 0 are considered as negatives and > 0 as + positives. + + Returns: + pos_idx (list[tensor]) + neg_idx (list[tensor]) + + Returns two lists of binary masks for each image. + The first list contains the positive elements that were selected, + and the second list the negative example. + """ + pos_idx = [] + neg_idx = [] + # 遍历每张图像的matched_idxs + for matched_idxs_per_image in matched_idxs: + # >= 1的为正样本, nonzero返回非零元素索引 + # positive = torch.nonzero(matched_idxs_per_image >= 1).squeeze(1) + positive = torch.where(torch.ge(matched_idxs_per_image, 1))[0] + # = 0的为负样本 + # negative = torch.nonzero(matched_idxs_per_image == 0).squeeze(1) + negative = torch.where(torch.eq(matched_idxs_per_image, 0))[0] + + # 指定正样本的数量 + num_pos = int(self.batch_size_per_image * self.positive_fraction) + # protect against not enough positive examples + # 如果正样本数量不够就直接采用所有正样本 + num_pos = min(positive.numel(), num_pos) + # 指定负样本数量 + num_neg = self.batch_size_per_image - num_pos + # protect against not enough negative examples + # 如果负样本数量不够就直接采用所有负样本 + num_neg = min(negative.numel(), num_neg) + + # randomly select positive and negative examples + # Returns a random permutation of integers from 0 to n - 1. + # 随机选择指定数量的正负样本 + perm1 = torch.randperm(positive.numel(), device=positive.device)[:num_pos] + perm2 = torch.randperm(negative.numel(), device=negative.device)[:num_neg] + + pos_idx_per_image = positive[perm1] + neg_idx_per_image = negative[perm2] + + # create binary mask from indices + pos_idx_per_image_mask = torch.zeros_like( + matched_idxs_per_image, dtype=torch.uint8 + ) + neg_idx_per_image_mask = torch.zeros_like( + matched_idxs_per_image, dtype=torch.uint8 + ) + + pos_idx_per_image_mask[pos_idx_per_image] = 1 + neg_idx_per_image_mask[neg_idx_per_image] = 1 + + pos_idx.append(pos_idx_per_image_mask) + neg_idx.append(neg_idx_per_image_mask) + + return pos_idx, neg_idx + + +@torch.jit._script_if_tracing +def encode_boxes(reference_boxes, proposals, weights): + # type: (torch.Tensor, torch.Tensor, torch.Tensor) -> torch.Tensor + """ + Encode a set of proposals with respect to some + reference boxes + + Arguments: + reference_boxes (Tensor): reference boxes(gt) + proposals (Tensor): boxes to be encoded(anchors) + weights: + """ + + # perform some unpacking to make it JIT-fusion friendly + wx = weights[0] + wy = weights[1] + ww = weights[2] + wh = weights[3] + + # unsqueeze() + # Returns a new tensor with a dimension of size one inserted at the specified position. + proposals_x1 = proposals[:, 0].unsqueeze(1) + proposals_y1 = proposals[:, 1].unsqueeze(1) + proposals_x2 = proposals[:, 2].unsqueeze(1) + proposals_y2 = proposals[:, 3].unsqueeze(1) + + reference_boxes_x1 = reference_boxes[:, 0].unsqueeze(1) + reference_boxes_y1 = reference_boxes[:, 1].unsqueeze(1) + reference_boxes_x2 = reference_boxes[:, 2].unsqueeze(1) + reference_boxes_y2 = reference_boxes[:, 3].unsqueeze(1) + + # implementation starts here + # parse widths and heights + ex_widths = proposals_x2 - proposals_x1 + ex_heights = proposals_y2 - proposals_y1 + # parse coordinate of center point + ex_ctr_x = proposals_x1 + 0.5 * ex_widths + ex_ctr_y = proposals_y1 + 0.5 * ex_heights + + gt_widths = reference_boxes_x2 - reference_boxes_x1 + gt_heights = reference_boxes_y2 - reference_boxes_y1 + gt_ctr_x = reference_boxes_x1 + 0.5 * gt_widths + gt_ctr_y = reference_boxes_y1 + 0.5 * gt_heights + + targets_dx = wx * (gt_ctr_x - ex_ctr_x) / ex_widths + targets_dy = wy * (gt_ctr_y - ex_ctr_y) / ex_heights + targets_dw = ww * torch.log(gt_widths / ex_widths) + targets_dh = wh * torch.log(gt_heights / ex_heights) + + targets = torch.cat((targets_dx, targets_dy, targets_dw, targets_dh), dim=1) + return targets + + +class BoxCoder(object): + """ + This class encodes and decodes a set of bounding boxes into + the representation used for training the regressors. + """ + + def __init__(self, weights, bbox_xform_clip=math.log(1000. / 16)): + # type: (Tuple[float, float, float, float], float) -> None + """ + Arguments: + weights (4-element tuple) + bbox_xform_clip (float) + """ + self.weights = weights + self.bbox_xform_clip = bbox_xform_clip + + def encode(self, reference_boxes, proposals): + # type: (List[Tensor], List[Tensor]) -> List[Tensor] + """ + 结合anchors和与之对应的gt计算regression参数 + Args: + reference_boxes: List[Tensor] 每个proposal/anchor对应的gt_boxes + proposals: List[Tensor] anchors/proposals + + Returns: regression parameters + + """ + # 统计每张图像的anchors个数,方便后面拼接在一起处理后在分开 + # reference_boxes和proposal数据结构相同 + boxes_per_image = [len(b) for b in reference_boxes] + reference_boxes = torch.cat(reference_boxes, dim=0) + proposals = torch.cat(proposals, dim=0) + + # targets_dx, targets_dy, targets_dw, targets_dh + targets = self.encode_single(reference_boxes, proposals) + return targets.split(boxes_per_image, 0) + + def encode_single(self, reference_boxes, proposals): + """ + Encode a set of proposals with respect to some + reference boxes + + Arguments: + reference_boxes (Tensor): reference boxes + proposals (Tensor): boxes to be encoded + """ + dtype = reference_boxes.dtype + device = reference_boxes.device + weights = torch.as_tensor(self.weights, dtype=dtype, device=device) + targets = encode_boxes(reference_boxes, proposals, weights) + + return targets + + def decode(self, rel_codes, boxes): + # type: (Tensor, List[Tensor]) -> Tensor + """ + + Args: + rel_codes: bbox regression parameters + boxes: anchors/proposals + + Returns: + + """ + assert isinstance(boxes, (list, tuple)) + assert isinstance(rel_codes, torch.Tensor) + boxes_per_image = [b.size(0) for b in boxes] + concat_boxes = torch.cat(boxes, dim=0) + + box_sum = 0 + for val in boxes_per_image: + box_sum += val + + # 将预测的bbox回归参数应用到对应anchors上得到预测bbox的坐标 + pred_boxes = self.decode_single( + rel_codes, concat_boxes + ) + + # 防止pred_boxes为空时导致reshape报错 + if box_sum > 0: + pred_boxes = pred_boxes.reshape(box_sum, -1, 4) + + return pred_boxes + + def decode_single(self, rel_codes, boxes): + """ + From a set of original boxes and encoded relative box offsets, + get the decoded boxes. + + Arguments: + rel_codes (Tensor): encoded boxes (bbox regression parameters) + boxes (Tensor): reference boxes (anchors/proposals) + """ + boxes = boxes.to(rel_codes.dtype) + + # xmin, ymin, xmax, ymax + widths = boxes[:, 2] - boxes[:, 0] # anchor/proposal宽度 + heights = boxes[:, 3] - boxes[:, 1] # anchor/proposal高度 + ctr_x = boxes[:, 0] + 0.5 * widths # anchor/proposal中心x坐标 + ctr_y = boxes[:, 1] + 0.5 * heights # anchor/proposal中心y坐标 + + wx, wy, ww, wh = self.weights # RPN中为[1,1,1,1], fastrcnn中为[10,10,5,5] + dx = rel_codes[:, 0::4] / wx # 预测anchors/proposals的中心坐标x回归参数 + dy = rel_codes[:, 1::4] / wy # 预测anchors/proposals的中心坐标y回归参数 + dw = rel_codes[:, 2::4] / ww # 预测anchors/proposals的宽度回归参数 + dh = rel_codes[:, 3::4] / wh # 预测anchors/proposals的高度回归参数 + + # limit max value, prevent sending too large values into torch.exp() + # self.bbox_xform_clip=math.log(1000. / 16) 4.135 + dw = torch.clamp(dw, max=self.bbox_xform_clip) + dh = torch.clamp(dh, max=self.bbox_xform_clip) + + pred_ctr_x = dx * widths[:, None] + ctr_x[:, None] + pred_ctr_y = dy * heights[:, None] + ctr_y[:, None] + pred_w = torch.exp(dw) * widths[:, None] + pred_h = torch.exp(dh) * heights[:, None] + + # xmin + pred_boxes1 = pred_ctr_x - torch.tensor(0.5, dtype=pred_ctr_x.dtype, device=pred_w.device) * pred_w + # ymin + pred_boxes2 = pred_ctr_y - torch.tensor(0.5, dtype=pred_ctr_y.dtype, device=pred_h.device) * pred_h + # xmax + pred_boxes3 = pred_ctr_x + torch.tensor(0.5, dtype=pred_ctr_x.dtype, device=pred_w.device) * pred_w + # ymax + pred_boxes4 = pred_ctr_y + torch.tensor(0.5, dtype=pred_ctr_y.dtype, device=pred_h.device) * pred_h + + pred_boxes = torch.stack((pred_boxes1, pred_boxes2, pred_boxes3, pred_boxes4), dim=2).flatten(1) + return pred_boxes + + +class Matcher(object): + BELOW_LOW_THRESHOLD = -1 + BETWEEN_THRESHOLDS = -2 + + __annotations__ = { + 'BELOW_LOW_THRESHOLD': int, + 'BETWEEN_THRESHOLDS': int, + } + + def __init__(self, high_threshold, low_threshold, allow_low_quality_matches=False): + # type: (float, float, bool) -> None + """ + Args: + high_threshold (float): quality values greater than or equal to + this value are candidate matches. + low_threshold (float): a lower quality threshold used to stratify + matches into three levels: + 1) matches >= high_threshold + 2) BETWEEN_THRESHOLDS matches in [low_threshold, high_threshold) + 3) BELOW_LOW_THRESHOLD matches in [0, low_threshold) + allow_low_quality_matches (bool): if True, produce additional matches + for predictions that have only low-quality match candidates. See + set_low_quality_matches_ for more details. + """ + self.BELOW_LOW_THRESHOLD = -1 + self.BETWEEN_THRESHOLDS = -2 + assert low_threshold <= high_threshold + self.high_threshold = high_threshold # 0.7 + self.low_threshold = low_threshold # 0.3 + self.allow_low_quality_matches = allow_low_quality_matches + + def __call__(self, match_quality_matrix): + """ + 计算anchors与每个gtboxes匹配的iou最大值,并记录索引, + iou= self.low_threshold) & ( + matched_vals < self.high_threshold + ) + # iou小于low_threshold的matches索引置为-1 + matches[below_low_threshold] = self.BELOW_LOW_THRESHOLD # -1 + + # iou在[low_threshold, high_threshold]之间的matches索引置为-2 + matches[between_thresholds] = self.BETWEEN_THRESHOLDS # -2 + + if self.allow_low_quality_matches: + assert all_matches is not None + self.set_low_quality_matches_(matches, all_matches, match_quality_matrix) + + return matches + + def set_low_quality_matches_(self, matches, all_matches, match_quality_matrix): + """ + Produce additional matches for predictions that have only low-quality matches. + Specifically, for each ground-truth find the set of predictions that have + maximum overlap with it (including ties); for each prediction in that set, if + it is unmatched, then match it to the ground-truth with which it has the highest + quality value. + """ + # For each gt, find the prediction with which it has highest quality + # 对于每个gt boxes寻找与其iou最大的anchor, + # highest_quality_foreach_gt为匹配到的最大iou值 + highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1) # the dimension to reduce. + + # Find highest quality match available, even if it is low, including ties + # 寻找每个gt boxes与其iou最大的anchor索引,一个gt匹配到的最大iou可能有多个anchor + # gt_pred_pairs_of_highest_quality = torch.nonzero( + # match_quality_matrix == highest_quality_foreach_gt[:, None] + # ) + gt_pred_pairs_of_highest_quality = torch.where( + torch.eq(match_quality_matrix, highest_quality_foreach_gt[:, None]) + ) + # Example gt_pred_pairs_of_highest_quality: + # tensor([[ 0, 39796], + # [ 1, 32055], + # [ 1, 32070], + # [ 2, 39190], + # [ 2, 40255], + # [ 3, 40390], + # [ 3, 41455], + # [ 4, 45470], + # [ 5, 45325], + # [ 5, 46390]]) + # Each row is a (gt index, prediction index) + # Note how gt items 1, 2, 3, and 5 each have two ties + + # gt_pred_pairs_of_highest_quality[:, 0]代表是对应的gt index(不需要) + # pre_inds_to_update = gt_pred_pairs_of_highest_quality[:, 1] + pre_inds_to_update = gt_pred_pairs_of_highest_quality[1] + # 保留该anchor匹配gt最大iou的索引,即使iou低于设定的阈值 + matches[pre_inds_to_update] = all_matches[pre_inds_to_update] + + +def smooth_l1_loss(input, target, beta: float = 1. / 9, size_average: bool = True): + """ + very similar to the smooth_l1_loss from pytorch, but with + the extra beta parameter + """ + n = torch.abs(input - target) + # cond = n < beta + cond = torch.lt(n, beta) + loss = torch.where(cond, 0.5 * n ** 2 / beta, n - 0.5 * beta) + if size_average: + return loss.mean() + return loss.sum() diff --git a/detector/Mask_RCNN/network_files/faster_rcnn_framework.py b/detector/Mask_RCNN/network_files/faster_rcnn_framework.py new file mode 100644 index 0000000000000000000000000000000000000000..827d8c65331d3a3b87d2608fcf3a5a65a08772b7 --- /dev/null +++ b/detector/Mask_RCNN/network_files/faster_rcnn_framework.py @@ -0,0 +1,354 @@ +import warnings +from collections import OrderedDict +from typing import Tuple, List, Dict, Optional, Union + +import torch +from torch import nn, Tensor +import torch.nn.functional as F +from torchvision.ops import MultiScaleRoIAlign + +from .roi_head import RoIHeads +from .transform import GeneralizedRCNNTransform +from .rpn_function import AnchorsGenerator, RPNHead, RegionProposalNetwork + + +class FasterRCNNBase(nn.Module): + """ + Main class for Generalized R-CNN. + + Arguments: + backbone (nn.Module): + rpn (nn.Module): + roi_heads (nn.Module): takes the features + the proposals from the RPN and computes + detections / masks from it. + transform (nn.Module): performs the data transformation from the inputs to feed into + the model + """ + + def __init__(self, backbone, rpn, roi_heads, transform): + super(FasterRCNNBase, self).__init__() + self.transform = transform + self.backbone = backbone + self.rpn = rpn + self.roi_heads = roi_heads + # used only on torchscript mode + self._has_warned = False + + @torch.jit.unused + def eager_outputs(self, losses, detections): + # type: (Dict[str, Tensor], List[Dict[str, Tensor]]) -> Union[Dict[str, Tensor], List[Dict[str, Tensor]]] + if self.training: + return losses + + return detections + + def forward(self, images, targets=None): + # type: (List[Tensor], Optional[List[Dict[str, Tensor]]]) -> Tuple[Dict[str, Tensor], List[Dict[str, Tensor]]] + """ + Arguments: + images (list[Tensor]): images to be processed + targets (list[Dict[Tensor]]): ground-truth boxes present in the image (optional) + + Returns: + result (list[BoxList] or dict[Tensor]): the output from the model. + During training, it returns a dict[Tensor] which contains the losses. + During testing, it returns list[BoxList] contains additional fields + like `scores`, `labels` and `mask` (for Mask R-CNN models). + + """ + if self.training and targets is None: + raise ValueError("In training mode, targets should be passed") + + if self.training: + assert targets is not None + for target in targets: # 进一步判断传入的target的boxes参数是否符合规定 + boxes = target["boxes"] + if isinstance(boxes, torch.Tensor): + if len(boxes.shape) != 2 or boxes.shape[-1] != 4: + raise ValueError("Expected target boxes to be a tensor" + "of shape [N, 4], got {:}.".format( + boxes.shape)) + else: + raise ValueError("Expected target boxes to be of type " + "Tensor, got {:}.".format(type(boxes))) + + original_image_sizes = torch.jit.annotate(List[Tuple[int, int]], []) + for img in images: + val = img.shape[-2:] + assert len(val) == 2 # 防止输入的是个一维向量 + original_image_sizes.append((val[0], val[1])) + # original_image_sizes = [img.shape[-2:] for img in images] + + images, targets = self.transform(images, targets) # 对图像进行预处理 + # print(images.tensors.shape) + features = self.backbone(images.tensors) # 将图像输入backbone得到特征图 + if isinstance(features, torch.Tensor): # 若只在一层特征层上预测,将feature放入有序字典中,并编号为‘0’ + features = OrderedDict([('0', features)]) # 若在多层特征层上预测,传入的就是一个有序字典 + + # 将特征层以及标注target信息传入rpn中 + # proposals: List[Tensor], Tensor_shape: [num_proposals, 4], + # 每个proposals是绝对坐标,且为(x1, y1, x2, y2)格式 + proposals, proposal_losses = self.rpn(images, features, targets) + + # 将rpn生成的数据以及标注target信息传入fast rcnn后半部分 + detections, detector_losses = self.roi_heads(features, proposals, images.image_sizes, targets) + + # 对网络的预测结果进行后处理(主要将bboxes还原到原图像尺度上) + detections = self.transform.postprocess(detections, images.image_sizes, original_image_sizes) + + losses = {} + losses.update(detector_losses) + losses.update(proposal_losses) + + if torch.jit.is_scripting(): + if not self._has_warned: + warnings.warn("RCNN always returns a (Losses, Detections) tuple in scripting") + self._has_warned = True + return losses, detections + else: + return self.eager_outputs(losses, detections) + + # if self.training: + # return losses + # + # return detections + + +class TwoMLPHead(nn.Module): + """ + Standard heads for FPN-based models + + Arguments: + in_channels (int): number of input channels + representation_size (int): size of the intermediate representation + """ + + def __init__(self, in_channels, representation_size): + super(TwoMLPHead, self).__init__() + + self.fc6 = nn.Linear(in_channels, representation_size) + self.fc7 = nn.Linear(representation_size, representation_size) + + def forward(self, x): + x = x.flatten(start_dim=1) + + x = F.relu(self.fc6(x)) + x = F.relu(self.fc7(x)) + + return x + + +class FastRCNNPredictor(nn.Module): + """ + Standard classification + bounding box regression layers + for Fast R-CNN. + + Arguments: + in_channels (int): number of input channels + num_classes (int): number of output classes (including background) + """ + + def __init__(self, in_channels, num_classes): + super(FastRCNNPredictor, self).__init__() + self.cls_score = nn.Linear(in_channels, num_classes) + self.bbox_pred = nn.Linear(in_channels, num_classes * 4) + + def forward(self, x): + if x.dim() == 4: + assert list(x.shape[2:]) == [1, 1] + x = x.flatten(start_dim=1) + scores = self.cls_score(x) + bbox_deltas = self.bbox_pred(x) + + return scores, bbox_deltas + + +class FasterRCNN(FasterRCNNBase): + """ + Implements Faster R-CNN. + + The input to the model is expected to be a list of tensors, each of shape [C, H, W], one for each + image, and should be in 0-1 range. Different images can have different sizes. + + The behavior of the model changes depending if it is in training or evaluation mode. + + During training, the model expects both the input tensors, as well as a targets (list of dictionary), + containing: + - boxes (FloatTensor[N, 4]): the ground-truth boxes in [x1, y1, x2, y2] format, with values + between 0 and H and 0 and W + - labels (Int64Tensor[N]): the class label for each ground-truth box + + The model returns a Dict[Tensor] during training, containing the classification and regression + losses for both the RPN and the R-CNN. + + During inference, the model requires only the input tensors, and returns the post-processed + predictions as a List[Dict[Tensor]], one for each input image. The fields of the Dict are as + follows: + - boxes (FloatTensor[N, 4]): the predicted boxes in [x1, y1, x2, y2] format, with values between + 0 and H and 0 and W + - labels (Int64Tensor[N]): the predicted labels for each image + - scores (Tensor[N]): the scores or each prediction + + Arguments: + backbone (nn.Module): the network used to compute the features for the model. + It should contain a out_channels attribute, which indicates the number of output + channels that each feature map has (and it should be the same for all feature maps). + The backbone should return a single Tensor or and OrderedDict[Tensor]. + num_classes (int): number of output classes of the model (including the background). + If box_predictor is specified, num_classes should be None. + min_size (int): minimum size of the image to be rescaled before feeding it to the backbone + max_size (int): maximum size of the image to be rescaled before feeding it to the backbone + image_mean (Tuple[float, float, float]): mean values used for input normalization. + They are generally the mean values of the dataset on which the backbone has been trained + on + image_std (Tuple[float, float, float]): std values used for input normalization. + They are generally the std values of the dataset on which the backbone has been trained on + rpn_anchor_generator (AnchorGenerator): module that generates the anchors for a set of feature + maps. + rpn_head (nn.Module): module that computes the objectness and regression deltas from the RPN + rpn_pre_nms_top_n_train (int): number of proposals to keep before applying NMS during training + rpn_pre_nms_top_n_test (int): number of proposals to keep before applying NMS during testing + rpn_post_nms_top_n_train (int): number of proposals to keep after applying NMS during training + rpn_post_nms_top_n_test (int): number of proposals to keep after applying NMS during testing + rpn_nms_thresh (float): NMS threshold used for postprocessing the RPN proposals + rpn_fg_iou_thresh (float): minimum IoU between the anchor and the GT box so that they can be + considered as positive during training of the RPN. + rpn_bg_iou_thresh (float): maximum IoU between the anchor and the GT box so that they can be + considered as negative during training of the RPN. + rpn_batch_size_per_image (int): number of anchors that are sampled during training of the RPN + for computing the loss + rpn_positive_fraction (float): proportion of positive anchors in a mini-batch during training + of the RPN + rpn_score_thresh (float): during inference, only return proposals with a classification score + greater than rpn_score_thresh + box_roi_pool (MultiScaleRoIAlign): the module which crops and resizes the feature maps in + the locations indicated by the bounding boxes + box_head (nn.Module): module that takes the cropped feature maps as input + box_predictor (nn.Module): module that takes the output of box_head and returns the + classification logits and box regression deltas. + box_score_thresh (float): during inference, only return proposals with a classification score + greater than box_score_thresh + box_nms_thresh (float): NMS threshold for the prediction head. Used during inference + box_detections_per_img (int): maximum number of detections per image, for all classes. + box_fg_iou_thresh (float): minimum IoU between the proposals and the GT box so that they can be + considered as positive during training of the classification head + box_bg_iou_thresh (float): maximum IoU between the proposals and the GT box so that they can be + considered as negative during training of the classification head + box_batch_size_per_image (int): number of proposals that are sampled during training of the + classification head + box_positive_fraction (float): proportion of positive proposals in a mini-batch during training + of the classification head + bbox_reg_weights (Tuple[float, float, float, float]): weights for the encoding/decoding of the + bounding boxes + + """ + + def __init__(self, backbone, num_classes=None, + # transform parameter + min_size=800, max_size=1333, # 预处理resize时限制的最小尺寸与最大尺寸 + image_mean=None, image_std=None, # 预处理normalize时使用的均值和方差 + # RPN parameters + rpn_anchor_generator=None, rpn_head=None, + rpn_pre_nms_top_n_train=2000, rpn_pre_nms_top_n_test=1000, # rpn中在nms处理前保留的proposal数(根据score) + rpn_post_nms_top_n_train=2000, rpn_post_nms_top_n_test=1000, # rpn中在nms处理后保留的proposal数 + rpn_nms_thresh=0.7, # rpn中进行nms处理时使用的iou阈值 + rpn_fg_iou_thresh=0.7, rpn_bg_iou_thresh=0.3, # rpn计算损失时,采集正负样本设置的阈值 + rpn_batch_size_per_image=256, rpn_positive_fraction=0.5, # rpn计算损失时采样的样本数,以及正样本占总样本的比例 + rpn_score_thresh=0.0, + # Box parameters + box_roi_pool=None, box_head=None, box_predictor=None, + # 移除低目标概率 fast rcnn中进行nms处理的阈值 对预测结果根据score排序取前100个目标 + box_score_thresh=0.05, box_nms_thresh=0.5, box_detections_per_img=100, + box_fg_iou_thresh=0.5, box_bg_iou_thresh=0.5, # fast rcnn计算误差时,采集正负样本设置的阈值 + box_batch_size_per_image=512, box_positive_fraction=0.25, # fast rcnn计算误差时采样的样本数,以及正样本占所有样本的比例 + bbox_reg_weights=None): + if not hasattr(backbone, "out_channels"): + raise ValueError( + "backbone should contain an attribute out_channels" + "specifying the number of output channels (assumed to be the" + "same for all the levels" + ) + + # assert isinstance(rpn_anchor_generator, (AnchorsGenerator, type(None))) + assert isinstance(box_roi_pool, (MultiScaleRoIAlign, type(None))) + + if num_classes is not None: + if box_predictor is not None: + raise ValueError("num_classes should be None when box_predictor " + "is specified") + else: + if box_predictor is None: + raise ValueError("num_classes should not be None when box_predictor " + "is not specified") + + # 预测特征层的channels + out_channels = backbone.out_channels + + # 若anchor生成器为空,则自动生成针对resnet50_fpn的anchor生成器 + if rpn_anchor_generator is None: + anchor_sizes = ((32,), (64,), (128,), (256,), (512,)) + aspect_ratios = ((0.5, 1.0, 2.0),) * len(anchor_sizes) + rpn_anchor_generator = AnchorsGenerator( + anchor_sizes, aspect_ratios + ) + + # 生成RPN通过滑动窗口预测网络部分 + if rpn_head is None: + rpn_head = RPNHead( + out_channels, rpn_anchor_generator.num_anchors_per_location()[0] + ) + + # 默认rpn_pre_nms_top_n_train = 2000, rpn_pre_nms_top_n_test = 1000, + # 默认rpn_post_nms_top_n_train = 2000, rpn_post_nms_top_n_test = 1000, + rpn_pre_nms_top_n = dict(training=rpn_pre_nms_top_n_train, testing=rpn_pre_nms_top_n_test) + rpn_post_nms_top_n = dict(training=rpn_post_nms_top_n_train, testing=rpn_post_nms_top_n_test) + + # 定义整个RPN框架 + rpn = RegionProposalNetwork( + rpn_anchor_generator, rpn_head, + rpn_fg_iou_thresh, rpn_bg_iou_thresh, + rpn_batch_size_per_image, rpn_positive_fraction, + rpn_pre_nms_top_n, rpn_post_nms_top_n, rpn_nms_thresh, + score_thresh=rpn_score_thresh) + + # Multi-scale RoIAlign pooling + if box_roi_pool is None: + box_roi_pool = MultiScaleRoIAlign( + featmap_names=['0', '1', '2', '3'], # 在哪些特征层进行roi pooling + output_size=[7, 7], + sampling_ratio=2) + + # fast RCNN中roi pooling后的展平处理两个全连接层部分 + if box_head is None: + resolution = box_roi_pool.output_size[0] # 默认等于7 + representation_size = 1024 + box_head = TwoMLPHead( + out_channels * resolution ** 2, + representation_size + ) + + # 在box_head的输出上预测部分 + if box_predictor is None: + representation_size = 1024 + box_predictor = FastRCNNPredictor( + representation_size, + num_classes) + + # 将roi pooling, box_head以及box_predictor结合在一起 + roi_heads = RoIHeads( + # box + box_roi_pool, box_head, box_predictor, + box_fg_iou_thresh, box_bg_iou_thresh, # 0.5 0.5 + box_batch_size_per_image, box_positive_fraction, # 512 0.25 + bbox_reg_weights, + box_score_thresh, box_nms_thresh, box_detections_per_img) # 0.05 0.5 100 + + if image_mean is None: + image_mean = [0.485, 0.456, 0.406] + if image_std is None: + image_std = [0.229, 0.224, 0.225] + + # 对数据进行标准化,缩放,打包成batch等处理部分 + transform = GeneralizedRCNNTransform(min_size, max_size, image_mean, image_std) + + super(FasterRCNN, self).__init__(backbone, rpn, roi_heads, transform) diff --git a/detector/Mask_RCNN/network_files/image_list.py b/detector/Mask_RCNN/network_files/image_list.py new file mode 100644 index 0000000000000000000000000000000000000000..a1b36f33426aa5e5cfdb5d3adba5e519ef24f76f --- /dev/null +++ b/detector/Mask_RCNN/network_files/image_list.py @@ -0,0 +1,27 @@ +from typing import List, Tuple +from torch import Tensor + + +class ImageList(object): + """ + Structure that holds a list of images (of possibly + varying sizes) as a single tensor. + This works by padding the images to the same size, + and storing in a field the original sizes of each image + """ + + def __init__(self, tensors, image_sizes): + # type: (Tensor, List[Tuple[int, int]]) -> None + """ + Arguments: + tensors (tensor) padding后的图像数据 + image_sizes (list[tuple[int, int]]) padding前的图像尺寸 + """ + self.tensors = tensors + self.image_sizes = image_sizes + + def to(self, device): + # type: (Device) -> ImageList # noqa + cast_tensor = self.tensors.to(device) + return ImageList(cast_tensor, self.image_sizes) + diff --git a/detector/Mask_RCNN/network_files/mask_rcnn.py b/detector/Mask_RCNN/network_files/mask_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..97a8d7fe99a571c2080e7c71866368860b346e97 --- /dev/null +++ b/detector/Mask_RCNN/network_files/mask_rcnn.py @@ -0,0 +1,239 @@ +from collections import OrderedDict +import torch.nn as nn +from torchvision.ops import MultiScaleRoIAlign + +from .faster_rcnn_framework import FasterRCNN + + +class MaskRCNN(FasterRCNN): + """ + Implements Mask R-CNN. + + The input to the model is expected to be a list of tensors, each of shape [C, H, W], one for each + image, and should be in 0-1 range. Different images can have different sizes. + + The behavior of the model changes depending if it is in training or evaluation mode. + + During training, the model expects both the input tensors, as well as a targets (list of dictionary), + containing: + - boxes (``FloatTensor[N, 4]``): the ground-truth boxes in ``[x1, y1, x2, y2]`` format, with + ``0 <= x1 < x2 <= W`` and ``0 <= y1 < y2 <= H``. + - labels (Int64Tensor[N]): the class label for each ground-truth box + - masks (UInt8Tensor[N, H, W]): the segmentation binary masks for each instance + + The model returns a Dict[Tensor] during training, containing the classification and regression + losses for both the RPN and the R-CNN, and the mask loss. + + During inference, the model requires only the input tensors, and returns the post-processed + predictions as a List[Dict[Tensor]], one for each input image. The fields of the Dict are as + follows: + - boxes (``FloatTensor[N, 4]``): the predicted boxes in ``[x1, y1, x2, y2]`` format, with + ``0 <= x1 < x2 <= W`` and ``0 <= y1 < y2 <= H``. + - labels (Int64Tensor[N]): the predicted labels for each image + - scores (Tensor[N]): the scores or each prediction + - masks (UInt8Tensor[N, 1, H, W]): the predicted masks for each instance, in 0-1 range. In order to + obtain the final segmentation masks, the soft masks can be thresholded, generally + with a value of 0.5 (mask >= 0.5) + + Args: + backbone (nn.Module): the network used to compute the features for the model. + It should contain a out_channels attribute, which indicates the number of output + channels that each feature map has (and it should be the same for all feature maps). + The backbone should return a single Tensor or and OrderedDict[Tensor]. + num_classes (int): number of output classes of the model (including the background). + If box_predictor is specified, num_classes should be None. + min_size (int): minimum size of the image to be rescaled before feeding it to the backbone + max_size (int): maximum size of the image to be rescaled before feeding it to the backbone + image_mean (Tuple[float, float, float]): mean values used for input normalization. + They are generally the mean values of the dataset on which the backbone has been trained + on + image_std (Tuple[float, float, float]): std values used for input normalization. + They are generally the std values of the dataset on which the backbone has been trained on + rpn_anchor_generator (AnchorGenerator): module that generates the anchors for a set of feature + maps. + rpn_head (nn.Module): module that computes the objectness and regression deltas from the RPN + rpn_pre_nms_top_n_train (int): number of proposals to keep before applying NMS during training + rpn_pre_nms_top_n_test (int): number of proposals to keep before applying NMS during testing + rpn_post_nms_top_n_train (int): number of proposals to keep after applying NMS during training + rpn_post_nms_top_n_test (int): number of proposals to keep after applying NMS during testing + rpn_nms_thresh (float): NMS threshold used for postprocessing the RPN proposals + rpn_fg_iou_thresh (float): minimum IoU between the anchor and the GT box so that they can be + considered as positive during training of the RPN. + rpn_bg_iou_thresh (float): maximum IoU between the anchor and the GT box so that they can be + considered as negative during training of the RPN. + rpn_batch_size_per_image (int): number of anchors that are sampled during training of the RPN + for computing the loss + rpn_positive_fraction (float): proportion of positive anchors in a mini-batch during training + of the RPN + rpn_score_thresh (float): during inference, only return proposals with a classification score + greater than rpn_score_thresh + box_roi_pool (MultiScaleRoIAlign): the module which crops and resizes the feature maps in + the locations indicated by the bounding boxes + box_head (nn.Module): module that takes the cropped feature maps as input + box_predictor (nn.Module): module that takes the output of box_head and returns the + classification logits and box regression deltas. + box_score_thresh (float): during inference, only return proposals with a classification score + greater than box_score_thresh + box_nms_thresh (float): NMS threshold for the prediction head. Used during inference + box_detections_per_img (int): maximum number of detections per image, for all classes. + box_fg_iou_thresh (float): minimum IoU between the proposals and the GT box so that they can be + considered as positive during training of the classification head + box_bg_iou_thresh (float): maximum IoU between the proposals and the GT box so that they can be + considered as negative during training of the classification head + box_batch_size_per_image (int): number of proposals that are sampled during training of the + classification head + box_positive_fraction (float): proportion of positive proposals in a mini-batch during training + of the classification head + bbox_reg_weights (Tuple[float, float, float, float]): weights for the encoding/decoding of the + bounding boxes + mask_roi_pool (MultiScaleRoIAlign): the module which crops and resizes the feature maps in + the locations indicated by the bounding boxes, which will be used for the mask head. + mask_head (nn.Module): module that takes the cropped feature maps as input + mask_predictor (nn.Module): module that takes the output of the mask_head and returns the + segmentation mask logits + + """ + + def __init__( + self, + backbone, + num_classes=None, + # transform parameters + min_size=800, + max_size=1333, + image_mean=None, + image_std=None, + # RPN parameters + rpn_anchor_generator=None, + rpn_head=None, + rpn_pre_nms_top_n_train=2000, + rpn_pre_nms_top_n_test=1000, + rpn_post_nms_top_n_train=2000, + rpn_post_nms_top_n_test=1000, + rpn_nms_thresh=0.7, + rpn_fg_iou_thresh=0.7, + rpn_bg_iou_thresh=0.3, + rpn_batch_size_per_image=256, + rpn_positive_fraction=0.5, + rpn_score_thresh=0.0, + # Box parameters + box_roi_pool=None, + box_head=None, + box_predictor=None, + box_score_thresh=0.05, + box_nms_thresh=0.5, + box_detections_per_img=100, + box_fg_iou_thresh=0.5, + box_bg_iou_thresh=0.5, + box_batch_size_per_image=512, + box_positive_fraction=0.25, + bbox_reg_weights=None, + # Mask parameters + mask_roi_pool=None, + mask_head=None, + mask_predictor=None, + ): + + if not isinstance(mask_roi_pool, (MultiScaleRoIAlign, type(None))): + raise TypeError( + f"mask_roi_pool should be of type MultiScaleRoIAlign or None instead of {type(mask_roi_pool)}" + ) + + if num_classes is not None: + if mask_predictor is not None: + raise ValueError("num_classes should be None when mask_predictor is specified") + + out_channels = backbone.out_channels + + if mask_roi_pool is None: + mask_roi_pool = MultiScaleRoIAlign(featmap_names=["0", "1", "2", "3"], output_size=14, sampling_ratio=2) + + if mask_head is None: + mask_layers = (256, 256, 256, 256) + mask_dilation = 1 + mask_head = MaskRCNNHeads(out_channels, mask_layers, mask_dilation) + + if mask_predictor is None: + mask_predictor_in_channels = 256 + mask_dim_reduced = 256 + mask_predictor = MaskRCNNPredictor(mask_predictor_in_channels, mask_dim_reduced, num_classes) + + super().__init__( + backbone, + num_classes, + # transform parameters + min_size, + max_size, + image_mean, + image_std, + # RPN-specific parameters + rpn_anchor_generator, + rpn_head, + rpn_pre_nms_top_n_train, + rpn_pre_nms_top_n_test, + rpn_post_nms_top_n_train, + rpn_post_nms_top_n_test, + rpn_nms_thresh, + rpn_fg_iou_thresh, + rpn_bg_iou_thresh, + rpn_batch_size_per_image, + rpn_positive_fraction, + rpn_score_thresh, + # Box parameters + box_roi_pool, + box_head, + box_predictor, + box_score_thresh, + box_nms_thresh, + box_detections_per_img, + box_fg_iou_thresh, + box_bg_iou_thresh, + box_batch_size_per_image, + box_positive_fraction, + bbox_reg_weights, + ) + + self.roi_heads.mask_roi_pool = mask_roi_pool + self.roi_heads.mask_head = mask_head + self.roi_heads.mask_predictor = mask_predictor + + +class MaskRCNNHeads(nn.Sequential): + def __init__(self, in_channels, layers, dilation): + """ + Args: + in_channels (int): number of input channels + layers (tuple): feature dimensions of each FCN layer + dilation (int): dilation rate of kernel + """ + d = OrderedDict() + next_feature = in_channels + + for layer_idx, layers_features in enumerate(layers, 1): + d[f"mask_fcn{layer_idx}"] = nn.Conv2d(next_feature, + layers_features, + kernel_size=3, + stride=1, + padding=dilation, + dilation=dilation) + d[f"relu{layer_idx}"] = nn.ReLU(inplace=True) + next_feature = layers_features + + super().__init__(d) + # initial params + for name, param in self.named_parameters(): + if "weight" in name: + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") + + +class MaskRCNNPredictor(nn.Sequential): + def __init__(self, in_channels, dim_reduced, num_classes): + super().__init__(OrderedDict([ + ("conv5_mask", nn.ConvTranspose2d(in_channels, dim_reduced, 2, 2, 0)), + ("relu", nn.ReLU(inplace=True)), + ("mask_fcn_logits", nn.Conv2d(dim_reduced, num_classes, 1, 1, 0)) + ])) + # initial params + for name, param in self.named_parameters(): + if "weight" in name: + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") diff --git a/detector/Mask_RCNN/network_files/roi_head.py b/detector/Mask_RCNN/network_files/roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7269f58da0ccb6fed46a3976618f846d0c2dc9a5 --- /dev/null +++ b/detector/Mask_RCNN/network_files/roi_head.py @@ -0,0 +1,560 @@ +from typing import Optional, List, Dict, Tuple + +import torch +from torch import Tensor +import torch.nn.functional as F +from torchvision.ops import roi_align + +from . import det_utils +from . import boxes as box_ops + + +def fastrcnn_loss(class_logits, box_regression, labels, regression_targets): + # type: (Tensor, Tensor, List[Tensor], List[Tensor]) -> Tuple[Tensor, Tensor] + """ + Computes the loss for Faster R-CNN. + + Arguments: + class_logits : 预测类别概率信息,shape=[num_anchors, num_classes] + box_regression : 预测边目标界框回归信息 + labels : 真实类别信息 + regression_targets : 真实目标边界框信息 + + Returns: + classification_loss (Tensor) + box_loss (Tensor) + """ + + labels = torch.cat(labels, dim=0) + regression_targets = torch.cat(regression_targets, dim=0) + + # 计算类别损失信息 + classification_loss = F.cross_entropy(class_logits, labels) + + # get indices that correspond to the regression targets for + # the corresponding ground truth labels, to be used with + # advanced indexing + # 返回标签类别大于0的索引 + # sampled_pos_inds_subset = torch.nonzero(torch.gt(labels, 0)).squeeze(1) + sampled_pos_inds_subset = torch.where(torch.gt(labels, 0))[0] + + # 返回标签类别大于0位置的类别信息 + labels_pos = labels[sampled_pos_inds_subset] + + # shape=[num_proposal, num_classes] + N, num_classes = class_logits.shape + box_regression = box_regression.reshape(N, -1, 4) + + # 计算边界框损失信息 + box_loss = det_utils.smooth_l1_loss( + # 获取指定索引proposal的指定类别box信息 + box_regression[sampled_pos_inds_subset, labels_pos], + regression_targets[sampled_pos_inds_subset], + beta=1 / 9, + size_average=False, + ) / labels.numel() + + return classification_loss, box_loss + + +def maskrcnn_inference(x, labels): + # type: (Tensor, List[Tensor]) -> List[Tensor] + """ + From the results of the CNN, post process the masks + by taking the mask corresponding to the class with max + probability (which are of fixed size and directly output + by the CNN) and return the masks in the mask field of the BoxList. + + Args: + x (Tensor): the mask logits + labels (list[BoxList]): bounding boxes that are used as + reference, one for ech image + + Returns: + results (list[BoxList]): one BoxList for each image, containing + the extra field mask + """ + # 将预测值通过sigmoid激活全部缩放到0~1之间 + mask_prob = x.sigmoid() + + # select masks corresponding to the predicted classes + num_masks = x.shape[0] + # 先记录每张图片中boxes/masks的个数 + boxes_per_image = [label.shape[0] for label in labels] + # 在将所有图片中的masks信息拼接在一起(拼接后统一处理能够提升并行度) + labels = torch.cat(labels) + index = torch.arange(num_masks, device=labels.device) + # 提取每个masks中对应预测最终类别的mask + mask_prob = mask_prob[index, labels][:, None] + # 最后再按照每张图片中的masks个数分离开 + mask_prob = mask_prob.split(boxes_per_image, dim=0) + + return mask_prob + + +def project_masks_on_boxes(gt_masks, boxes, matched_idxs, M): + # type: (Tensor, Tensor, Tensor, int) -> Tensor + """ + Given segmentation masks and the bounding boxes corresponding + to the location of the masks in the image, this function + crops and resizes the masks in the position defined by the + boxes. This prepares the masks for them to be fed to the + loss computation as the targets. + """ + matched_idxs = matched_idxs.to(boxes) + rois = torch.cat([matched_idxs[:, None], boxes], dim=1) + gt_masks = gt_masks[:, None].to(rois) + return roi_align(gt_masks, rois, (M, M), 1.0)[:, 0] + + +def maskrcnn_loss(mask_logits, proposals, gt_masks, gt_labels, mask_matched_idxs): + # type: (Tensor, List[Tensor], List[Tensor], List[Tensor], List[Tensor]) -> Tensor + """ + + Args: + mask_logits: + proposals: + gt_masks: + gt_labels: + mask_matched_idxs: + + Returns: + mask_loss (Tensor): scalar tensor containing the loss + """ + + # 28(FCN分支输出mask的大小) + discretization_size = mask_logits.shape[-1] + # 获取每个Proposal(全部为正样本)对应的gt类别 + labels = [gt_label[idxs] for gt_label, idxs in zip(gt_labels, mask_matched_idxs)] + # 根据Proposal信息在gt_masks上裁剪对应区域做为计算loss时的真正gt_mask + mask_targets = [ + project_masks_on_boxes(m, p, i, discretization_size) for m, p, i in zip(gt_masks, proposals, mask_matched_idxs) + ] + + # 将一个batch中所有的Proposal对应信息拼接在一起(统一处理提高并行度) + labels = torch.cat(labels, dim=0) + mask_targets = torch.cat(mask_targets, dim=0) + + # torch.mean (in binary_cross_entropy_with_logits) doesn't + # accept empty tensors, so handle it separately + if mask_targets.numel() == 0: + return mask_logits.sum() * 0 + + # 计算预测mask与真实gt_mask之间的BCELoss + mask_loss = F.binary_cross_entropy_with_logits( + mask_logits[torch.arange(labels.shape[0], device=labels.device), labels], mask_targets + ) + return mask_loss + + +class RoIHeads(torch.nn.Module): + __annotations__ = { + 'box_coder': det_utils.BoxCoder, + 'proposal_matcher': det_utils.Matcher, + 'fg_bg_sampler': det_utils.BalancedPositiveNegativeSampler, + } + + def __init__(self, + box_roi_pool, # Multi-scale RoIAlign pooling + box_head, # TwoMLPHead + box_predictor, # FastRCNNPredictor + # Faster R-CNN training + fg_iou_thresh, bg_iou_thresh, # default: 0.5, 0.5 + batch_size_per_image, positive_fraction, # default: 512, 0.25 + bbox_reg_weights, # None + # Faster R-CNN inference + score_thresh, # default: 0.05 + nms_thresh, # default: 0.5 + detection_per_img, # default: 100 + # Mask + mask_roi_pool=None, + mask_head=None, + mask_predictor=None, + ): + super(RoIHeads, self).__init__() + + self.box_similarity = box_ops.box_iou + # assign ground-truth boxes for each proposal + self.proposal_matcher = det_utils.Matcher( + fg_iou_thresh, # default: 0.5 + bg_iou_thresh, # default: 0.5 + allow_low_quality_matches=False) + + self.fg_bg_sampler = det_utils.BalancedPositiveNegativeSampler( + batch_size_per_image, # default: 512 + positive_fraction) # default: 0.25 + + if bbox_reg_weights is None: + bbox_reg_weights = (10., 10., 5., 5.) + self.box_coder = det_utils.BoxCoder(bbox_reg_weights) + + self.box_roi_pool = box_roi_pool # Multi-scale RoIAlign pooling + self.box_head = box_head # TwoMLPHead + self.box_predictor = box_predictor # FastRCNNPredictor + + self.score_thresh = score_thresh # default: 0.05 + self.nms_thresh = nms_thresh # default: 0.5 + self.detection_per_img = detection_per_img # default: 100 + + self.mask_roi_pool = mask_roi_pool + self.mask_head = mask_head + self.mask_predictor = mask_predictor + + def has_mask(self): + if self.mask_roi_pool is None: + return False + if self.mask_head is None: + return False + if self.mask_predictor is None: + return False + return True + + def assign_targets_to_proposals(self, proposals, gt_boxes, gt_labels): + # type: (List[Tensor], List[Tensor], List[Tensor]) -> Tuple[List[Tensor], List[Tensor]] + """ + 为每个proposal匹配对应的gt_box,并划分到正负样本中 + Args: + proposals: + gt_boxes: + gt_labels: + + Returns: + + """ + matched_idxs = [] + labels = [] + # 遍历每张图像的proposals, gt_boxes, gt_labels信息 + for proposals_in_image, gt_boxes_in_image, gt_labels_in_image in zip(proposals, gt_boxes, gt_labels): + if gt_boxes_in_image.numel() == 0: # 该张图像中没有gt框,为背景 + # background image + device = proposals_in_image.device + clamped_matched_idxs_in_image = torch.zeros( + (proposals_in_image.shape[0],), dtype=torch.int64, device=device + ) + labels_in_image = torch.zeros( + (proposals_in_image.shape[0],), dtype=torch.int64, device=device + ) + else: + # set to self.box_similarity when https://github.com/pytorch/pytorch/issues/27495 lands + # 计算proposal与每个gt_box的iou重合度 + match_quality_matrix = box_ops.box_iou(gt_boxes_in_image, proposals_in_image) + + # 计算proposal与每个gt_box匹配的iou最大值,并记录索引, + # iou < low_threshold索引值为 -1, low_threshold <= iou < high_threshold索引值为 -2 + matched_idxs_in_image = self.proposal_matcher(match_quality_matrix) + + # 限制最小值,防止匹配标签时出现越界的情况 + # 注意-1, -2对应的gt索引会调整到0,获取的标签类别为第0个gt的类别(实际上并不是),后续会进一步处理 + clamped_matched_idxs_in_image = matched_idxs_in_image.clamp(min=0) + # 获取proposal匹配到的gt对应标签 + labels_in_image = gt_labels_in_image[clamped_matched_idxs_in_image] + labels_in_image = labels_in_image.to(dtype=torch.int64) + + # label background (below the low threshold) + # 将gt索引为-1的类别设置为0,即背景,负样本 + bg_inds = matched_idxs_in_image == self.proposal_matcher.BELOW_LOW_THRESHOLD # -1 + labels_in_image[bg_inds] = 0 + + # label ignore proposals (between low and high threshold) + # 将gt索引为-2的类别设置为-1, 即废弃样本 + ignore_inds = matched_idxs_in_image == self.proposal_matcher.BETWEEN_THRESHOLDS # -2 + labels_in_image[ignore_inds] = -1 # -1 is ignored by sampler + + matched_idxs.append(clamped_matched_idxs_in_image) + labels.append(labels_in_image) + return matched_idxs, labels + + def subsample(self, labels): + # type: (List[Tensor]) -> List[Tensor] + # BalancedPositiveNegativeSampler + sampled_pos_inds, sampled_neg_inds = self.fg_bg_sampler(labels) + sampled_inds = [] + # 遍历每张图片的正负样本索引 + for img_idx, (pos_inds_img, neg_inds_img) in enumerate(zip(sampled_pos_inds, sampled_neg_inds)): + # 记录所有采集样本索引(包括正样本和负样本) + # img_sampled_inds = torch.nonzero(pos_inds_img | neg_inds_img).squeeze(1) + img_sampled_inds = torch.where(pos_inds_img | neg_inds_img)[0] + sampled_inds.append(img_sampled_inds) + return sampled_inds + + def add_gt_proposals(self, proposals, gt_boxes): + # type: (List[Tensor], List[Tensor]) -> List[Tensor] + """ + 将gt_boxes拼接到proposal后面 + Args: + proposals: 一个batch中每张图像rpn预测的boxes + gt_boxes: 一个batch中每张图像对应的真实目标边界框 + + Returns: + + """ + proposals = [ + torch.cat((proposal, gt_box)) + for proposal, gt_box in zip(proposals, gt_boxes) + ] + return proposals + + def check_targets(self, targets): + # type: (Optional[List[Dict[str, Tensor]]]) -> None + assert targets is not None + assert all(["boxes" in t for t in targets]) + assert all(["labels" in t for t in targets]) + + def select_training_samples(self, + proposals, # type: List[Tensor] + targets # type: Optional[List[Dict[str, Tensor]]] + ): + # type: (...) -> Tuple[List[Tensor], List[Tensor], List[Tensor], List[Tensor]] + """ + 划分正负样本,统计对应gt的标签以及边界框回归信息 + list元素个数为batch_size + Args: + proposals: rpn预测的boxes + targets: + + Returns: + + """ + + # 检查target数据是否为空 + self.check_targets(targets) + if targets is None: + raise ValueError("target should not be None.") + + dtype = proposals[0].dtype + device = proposals[0].device + + # 获取标注好的boxes以及labels信息 + gt_boxes = [t["boxes"].to(dtype) for t in targets] + gt_labels = [t["labels"] for t in targets] + + # append ground-truth bboxes to proposal + # 将gt_boxes拼接到proposal后面 + proposals = self.add_gt_proposals(proposals, gt_boxes) + + # get matching gt indices for each proposal + # 为每个proposal匹配对应的gt_box,并划分到正负样本中 + matched_idxs, labels = self.assign_targets_to_proposals(proposals, gt_boxes, gt_labels) + # sample a fixed proportion of positive-negative proposals + # 按给定数量和比例采样正负样本 + sampled_inds = self.subsample(labels) + matched_gt_boxes = [] + num_images = len(proposals) + + # 遍历每张图像 + for img_id in range(num_images): + # 获取每张图像的正负样本索引 + img_sampled_inds = sampled_inds[img_id] + # 获取对应正负样本的proposals信息 + proposals[img_id] = proposals[img_id][img_sampled_inds] + # 获取对应正负样本的真实类别信息 + labels[img_id] = labels[img_id][img_sampled_inds] + # 获取对应正负样本的gt索引信息 + matched_idxs[img_id] = matched_idxs[img_id][img_sampled_inds] + + gt_boxes_in_image = gt_boxes[img_id] + if gt_boxes_in_image.numel() == 0: + gt_boxes_in_image = torch.zeros((1, 4), dtype=dtype, device=device) + # 获取对应正负样本的gt box信息 + matched_gt_boxes.append(gt_boxes_in_image[matched_idxs[img_id]]) + + # 根据gt和proposal计算边框回归参数(针对gt的) + regression_targets = self.box_coder.encode(matched_gt_boxes, proposals) + return proposals, matched_idxs, labels, regression_targets + + def postprocess_detections(self, + class_logits, # type: Tensor + box_regression, # type: Tensor + proposals, # type: List[Tensor] + image_shapes # type: List[Tuple[int, int]] + ): + # type: (...) -> Tuple[List[Tensor], List[Tensor], List[Tensor]] + """ + 对网络的预测数据进行后处理,包括 + (1)根据proposal以及预测的回归参数计算出最终bbox坐标 + (2)对预测类别结果进行softmax处理 + (3)裁剪预测的boxes信息,将越界的坐标调整到图片边界上 + (4)移除所有背景信息 + (5)移除低概率目标 + (6)移除小尺寸目标 + (7)执行nms处理,并按scores进行排序 + (8)根据scores排序返回前topk个目标 + Args: + class_logits: 网络预测类别概率信息 + box_regression: 网络预测的边界框回归参数 + proposals: rpn输出的proposal + image_shapes: 打包成batch前每张图像的宽高 + + Returns: + + """ + device = class_logits.device + # 预测目标类别数 + num_classes = class_logits.shape[-1] + + # 获取每张图像的预测bbox数量 + boxes_per_image = [boxes_in_image.shape[0] for boxes_in_image in proposals] + # 根据proposal以及预测的回归参数计算出最终bbox坐标 + pred_boxes = self.box_coder.decode(box_regression, proposals) + + # 对预测类别结果进行softmax处理 + pred_scores = F.softmax(class_logits, -1) + + # split boxes and scores per image + # 根据每张图像的预测bbox数量分割结果 + pred_boxes_list = pred_boxes.split(boxes_per_image, 0) + pred_scores_list = pred_scores.split(boxes_per_image, 0) + + all_boxes = [] + all_scores = [] + all_labels = [] + # 遍历每张图像预测信息 + for boxes, scores, image_shape in zip(pred_boxes_list, pred_scores_list, image_shapes): + # 裁剪预测的boxes信息,将越界的坐标调整到图片边界上 + boxes = box_ops.clip_boxes_to_image(boxes, image_shape) + + # create labels for each prediction + labels = torch.arange(num_classes, device=device) + labels = labels.view(1, -1).expand_as(scores) + + # remove prediction with the background label + # 移除索引为0的所有信息(0代表背景) + boxes = boxes[:, 1:] + scores = scores[:, 1:] + labels = labels[:, 1:] + + # batch everything, by making every class prediction be a separate instance + boxes = boxes.reshape(-1, 4) + scores = scores.reshape(-1) + labels = labels.reshape(-1) + + # remove low scoring boxes + # 移除低概率目标,self.scores_thresh=0.05 + # gt: Computes input > other element-wise. + # inds = torch.nonzero(torch.gt(scores, self.score_thresh)).squeeze(1) + inds = torch.where(torch.gt(scores, self.score_thresh))[0] + boxes, scores, labels = boxes[inds], scores[inds], labels[inds] + + # remove empty boxes + # 移除小目标 + keep = box_ops.remove_small_boxes(boxes, min_size=1.) + boxes, scores, labels = boxes[keep], scores[keep], labels[keep] + + # non-maximun suppression, independently done per class + # 执行nms处理,执行后的结果会按照scores从大到小进行排序返回 + keep = box_ops.batched_nms(boxes, scores, labels, self.nms_thresh) + + # keep only topk scoring predictions + # 获取scores排在前topk个预测目标 + keep = keep[:self.detection_per_img] + boxes, scores, labels = boxes[keep], scores[keep], labels[keep] + + all_boxes.append(boxes) + all_scores.append(scores) + all_labels.append(labels) + + return all_boxes, all_scores, all_labels + + def forward(self, + features, # type: Dict[str, Tensor] + proposals, # type: List[Tensor] + image_shapes, # type: List[Tuple[int, int]] + targets=None # type: Optional[List[Dict[str, Tensor]]] + ): + # type: (...) -> Tuple[List[Dict[str, Tensor]], Dict[str, Tensor]] + """ + Arguments: + features (List[Tensor]) + proposals (List[Tensor[N, 4]]) + image_shapes (List[Tuple[H, W]]) + targets (List[Dict]) + """ + + # 检查targets的数据类型是否正确 + if targets is not None: + for t in targets: + floating_point_types = (torch.float, torch.double, torch.half) + assert t["boxes"].dtype in floating_point_types, "target boxes must of float type" + assert t["labels"].dtype == torch.int64, "target labels must of int64 type" + + if self.training: + # 划分正负样本,统计对应gt的标签以及边界框回归信息 + proposals, matched_idxs, labels, regression_targets = self.select_training_samples(proposals, targets) + else: + labels = None + regression_targets = None + matched_idxs = None + + # 将采集样本通过Multi-scale RoIAlign pooling层 + # box_features_shape: [num_proposals, channel, height, width] + box_features = self.box_roi_pool(features, proposals, image_shapes) + + # 通过roi_pooling后的两层全连接层 + # box_features_shape: [num_proposals, representation_size] + box_features = self.box_head(box_features) + + # 接着分别预测目标类别和边界框回归参数 + class_logits, box_regression = self.box_predictor(box_features) + + result: List[Dict[str, torch.Tensor]] = [] + losses = {} + if self.training: + assert labels is not None and regression_targets is not None + loss_classifier, loss_box_reg = fastrcnn_loss( + class_logits, box_regression, labels, regression_targets) + losses = { + "loss_classifier": loss_classifier, + "loss_box_reg": loss_box_reg + } + else: + boxes, scores, labels = self.postprocess_detections(class_logits, box_regression, proposals, image_shapes) + num_images = len(boxes) + for i in range(num_images): + result.append( + { + "boxes": boxes[i], + "labels": labels[i], + "scores": scores[i], + } + ) + + if self.has_mask(): + mask_proposals = [p["boxes"] for p in result] # 将最终预测的Boxes信息取出 + if self.training: + # matched_idxs为每个proposal在正负样本匹配过程中得到的gt索引(背景的gt索引也默认设置成了0) + if matched_idxs is None: + raise ValueError("if in training, matched_idxs should not be None") + + # during training, only focus on positive boxes + num_images = len(proposals) + mask_proposals = [] + pos_matched_idxs = [] + for img_id in range(num_images): + pos = torch.where(labels[img_id] > 0)[0] # 寻找对应gt类别大于0,即正样本 + mask_proposals.append(proposals[img_id][pos]) + pos_matched_idxs.append(matched_idxs[img_id][pos]) + else: + pos_matched_idxs = None + + mask_features = self.mask_roi_pool(features, mask_proposals, image_shapes) + mask_features = self.mask_head(mask_features) + mask_logits = self.mask_predictor(mask_features) + + loss_mask = {} + if self.training: + if targets is None or pos_matched_idxs is None or mask_logits is None: + raise ValueError("targets, pos_matched_idxs, mask_logits cannot be None when training") + + gt_masks = [t["masks"] for t in targets] + gt_labels = [t["labels"] for t in targets] + rcnn_loss_mask = maskrcnn_loss(mask_logits, mask_proposals, gt_masks, gt_labels, pos_matched_idxs) + loss_mask = {"loss_mask": rcnn_loss_mask} + else: + labels = [r["labels"] for r in result] + mask_probs = maskrcnn_inference(mask_logits, labels) + for mask_prob, r in zip(mask_probs, result): + r["masks"] = mask_prob + + losses.update(loss_mask) + + return result, losses diff --git a/detector/Mask_RCNN/network_files/rpn_function.py b/detector/Mask_RCNN/network_files/rpn_function.py new file mode 100644 index 0000000000000000000000000000000000000000..b186898847e8ef921ba977ee2b3411698dded356 --- /dev/null +++ b/detector/Mask_RCNN/network_files/rpn_function.py @@ -0,0 +1,643 @@ +from typing import List, Optional, Dict, Tuple + +import torch +from torch import nn, Tensor +from torch.nn import functional as F +import torchvision + +from . import det_utils +from . import boxes as box_ops +from .image_list import ImageList + + +@torch.jit.unused +def _onnx_get_num_anchors_and_pre_nms_top_n(ob, orig_pre_nms_top_n): + # type: (Tensor, int) -> Tuple[int, int] + from torch.onnx import operators + num_anchors = operators.shape_as_tensor(ob)[1].unsqueeze(0) + pre_nms_top_n = torch.min(torch.cat( + (torch.tensor([orig_pre_nms_top_n], dtype=num_anchors.dtype), + num_anchors), 0)) + + return num_anchors, pre_nms_top_n + + +class AnchorsGenerator(nn.Module): + __annotations__ = { + "cell_anchors": Optional[List[torch.Tensor]], + "_cache": Dict[str, List[torch.Tensor]] + } + + """ + anchors生成器 + Module that generates anchors for a set of feature maps and + image sizes. + + The module support computing anchors at multiple sizes and aspect ratios + per feature map. + + sizes and aspect_ratios should have the same number of elements, and it should + correspond to the number of feature maps. + + sizes[i] and aspect_ratios[i] can have an arbitrary number of elements, + and AnchorGenerator will output a set of sizes[i] * aspect_ratios[i] anchors + per spatial location for feature map i. + + Arguments: + sizes (Tuple[Tuple[int]]): + aspect_ratios (Tuple[Tuple[float]]): + """ + + def __init__(self, sizes=(128, 256, 512), aspect_ratios=(0.5, 1.0, 2.0)): + super(AnchorsGenerator, self).__init__() + + if not isinstance(sizes[0], (list, tuple)): + # TODO change this + sizes = tuple((s,) for s in sizes) + if not isinstance(aspect_ratios[0], (list, tuple)): + aspect_ratios = (aspect_ratios,) * len(sizes) + + assert len(sizes) == len(aspect_ratios) + + self.sizes = sizes + self.aspect_ratios = aspect_ratios + self.cell_anchors = None + self._cache = {} + + def generate_anchors(self, scales, aspect_ratios, dtype=torch.float32, device=torch.device("cpu")): + # type: (List[int], List[float], torch.dtype, torch.device) -> Tensor + """ + compute anchor sizes + Arguments: + scales: sqrt(anchor_area) + aspect_ratios: h/w ratios + dtype: float32 + device: cpu/gpu + """ + scales = torch.as_tensor(scales, dtype=dtype, device=device) + aspect_ratios = torch.as_tensor(aspect_ratios, dtype=dtype, device=device) + h_ratios = torch.sqrt(aspect_ratios) + w_ratios = 1.0 / h_ratios + + # [r1, r2, r3]' * [s1, s2, s3] + # number of elements is len(ratios)*len(scales) + ws = (w_ratios[:, None] * scales[None, :]).view(-1) + hs = (h_ratios[:, None] * scales[None, :]).view(-1) + + # left-top, right-bottom coordinate relative to anchor center(0, 0) + # 生成的anchors模板都是以(0, 0)为中心的, shape [len(ratios)*len(scales), 4] + base_anchors = torch.stack([-ws, -hs, ws, hs], dim=1) / 2 + + return base_anchors.round() # round 四舍五入 + + def set_cell_anchors(self, dtype, device): + # type: (torch.dtype, torch.device) -> None + if self.cell_anchors is not None: + cell_anchors = self.cell_anchors + assert cell_anchors is not None + # suppose that all anchors have the same device + # which is a valid assumption in the current state of the codebase + if cell_anchors[0].device == device: + return + + # 根据提供的sizes和aspect_ratios生成anchors模板 + # anchors模板都是以(0, 0)为中心的anchor + cell_anchors = [ + self.generate_anchors(sizes, aspect_ratios, dtype, device) + for sizes, aspect_ratios in zip(self.sizes, self.aspect_ratios) + ] + self.cell_anchors = cell_anchors + + def num_anchors_per_location(self): + # 计算每个预测特征层上每个滑动窗口的预测目标数 + return [len(s) * len(a) for s, a in zip(self.sizes, self.aspect_ratios)] + + # For every combination of (a, (g, s), i) in (self.cell_anchors, zip(grid_sizes, strides), 0:2), + # output g[i] anchors that are s[i] distance apart in direction i, with the same dimensions as a. + def grid_anchors(self, grid_sizes, strides): + # type: (List[List[int]], List[List[Tensor]]) -> List[Tensor] + """ + anchors position in grid coordinate axis map into origin image + 计算预测特征图对应原始图像上的所有anchors的坐标 + Args: + grid_sizes: 预测特征矩阵的height和width + strides: 预测特征矩阵上一步对应原始图像上的步距 + """ + anchors = [] + cell_anchors = self.cell_anchors + assert cell_anchors is not None + + # 遍历每个预测特征层的grid_size,strides和cell_anchors + for size, stride, base_anchors in zip(grid_sizes, strides, cell_anchors): + grid_height, grid_width = size + stride_height, stride_width = stride + device = base_anchors.device + + # For output anchor, compute [x_center, y_center, x_center, y_center] + # shape: [grid_width] 对应原图上的x坐标(列) + shifts_x = torch.arange(0, grid_width, dtype=torch.float32, device=device) * stride_width + # shape: [grid_height] 对应原图上的y坐标(行) + shifts_y = torch.arange(0, grid_height, dtype=torch.float32, device=device) * stride_height + + # 计算预测特征矩阵上每个点对应原图上的坐标(anchors模板的坐标偏移量) + # torch.meshgrid函数分别传入行坐标和列坐标,生成网格行坐标矩阵和网格列坐标矩阵 + # shape: [grid_height, grid_width] + shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) + shift_x = shift_x.reshape(-1) + shift_y = shift_y.reshape(-1) + + # 计算anchors坐标(xmin, ymin, xmax, ymax)在原图上的坐标偏移量 + # shape: [grid_width*grid_height, 4] + shifts = torch.stack([shift_x, shift_y, shift_x, shift_y], dim=1) + + # For every (base anchor, output anchor) pair, + # offset each zero-centered base anchor by the center of the output anchor. + # 将anchors模板与原图上的坐标偏移量相加得到原图上所有anchors的坐标信息(shape不同时会使用广播机制) + shifts_anchor = shifts.view(-1, 1, 4) + base_anchors.view(1, -1, 4) + anchors.append(shifts_anchor.reshape(-1, 4)) + + return anchors # List[Tensor(all_num_anchors, 4)] + + def cached_grid_anchors(self, grid_sizes, strides): + # type: (List[List[int]], List[List[Tensor]]) -> List[Tensor] + """将计算得到的所有anchors信息进行缓存""" + key = str(grid_sizes) + str(strides) + # self._cache是字典类型 + if key in self._cache: + return self._cache[key] + anchors = self.grid_anchors(grid_sizes, strides) + self._cache[key] = anchors + return anchors + + def forward(self, image_list, feature_maps): + # type: (ImageList, List[Tensor]) -> List[Tensor] + # 获取每个预测特征层的尺寸(height, width) + grid_sizes = list([feature_map.shape[-2:] for feature_map in feature_maps]) + + # 获取输入图像的height和width + image_size = image_list.tensors.shape[-2:] + + # 获取变量类型和设备类型 + dtype, device = feature_maps[0].dtype, feature_maps[0].device + + # one step in feature map equate n pixel stride in origin image + # 计算特征层上的一步等于原始图像上的步长 + strides = [[torch.tensor(image_size[0] // g[0], dtype=torch.int64, device=device), + torch.tensor(image_size[1] // g[1], dtype=torch.int64, device=device)] for g in grid_sizes] + + # 根据提供的sizes和aspect_ratios生成anchors模板 + self.set_cell_anchors(dtype, device) + + # 计算/读取所有anchors的坐标信息(这里的anchors信息是映射到原图上的所有anchors信息,不是anchors模板) + # 得到的是一个list列表,对应每张预测特征图映射回原图的anchors坐标信息 + anchors_over_all_feature_maps = self.cached_grid_anchors(grid_sizes, strides) + + anchors = torch.jit.annotate(List[List[torch.Tensor]], []) + # 遍历一个batch中的每张图像 + for i, (image_height, image_width) in enumerate(image_list.image_sizes): + anchors_in_image = [] + # 遍历每张预测特征图映射回原图的anchors坐标信息 + for anchors_per_feature_map in anchors_over_all_feature_maps: + anchors_in_image.append(anchors_per_feature_map) + anchors.append(anchors_in_image) + # 将每一张图像的所有预测特征层的anchors坐标信息拼接在一起 + # anchors是个list,每个元素为一张图像的所有anchors信息 + anchors = [torch.cat(anchors_per_image) for anchors_per_image in anchors] + # Clear the cache in case that memory leaks. + self._cache.clear() + return anchors + + +class RPNHead(nn.Module): + """ + add a RPN head with classification and regression + 通过滑动窗口计算预测目标概率与bbox regression参数 + + Arguments: + in_channels: number of channels of the input feature + num_anchors: number of anchors to be predicted + """ + + def __init__(self, in_channels, num_anchors): + super(RPNHead, self).__init__() + # 3x3 滑动窗口 + self.conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + # 计算预测的目标分数(这里的目标只是指前景或者背景) + self.cls_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1) + # 计算预测的目标bbox regression参数 + self.bbox_pred = nn.Conv2d(in_channels, num_anchors * 4, kernel_size=1, stride=1) + + for layer in self.children(): + if isinstance(layer, nn.Conv2d): + torch.nn.init.normal_(layer.weight, std=0.01) + torch.nn.init.constant_(layer.bias, 0) + + def forward(self, x): + # type: (List[Tensor]) -> Tuple[List[Tensor], List[Tensor]] + logits = [] + bbox_reg = [] + for i, feature in enumerate(x): + t = F.relu(self.conv(feature)) + logits.append(self.cls_logits(t)) + bbox_reg.append(self.bbox_pred(t)) + return logits, bbox_reg + + +def permute_and_flatten(layer, N, A, C, H, W): + # type: (Tensor, int, int, int, int, int) -> Tensor + """ + 调整tensor顺序,并进行reshape + Args: + layer: 预测特征层上预测的目标概率或bboxes regression参数 + N: batch_size + A: anchors_num_per_position + C: classes_num or 4(bbox coordinate) + H: height + W: width + + Returns: + layer: 调整tensor顺序,并reshape后的结果[N, -1, C] + """ + # view和reshape功能是一样的,先展平所有元素在按照给定shape排列 + # view函数只能用于内存中连续存储的tensor,permute等操作会使tensor在内存中变得不再连续,此时就不能再调用view函数 + # reshape则不需要依赖目标tensor是否在内存中是连续的 + # [batch_size, anchors_num_per_position * (C or 4), height, width] + layer = layer.view(N, -1, C, H, W) + # 调换tensor维度 + layer = layer.permute(0, 3, 4, 1, 2) # [N, H, W, -1, C] + layer = layer.reshape(N, -1, C) + return layer + + +def concat_box_prediction_layers(box_cls, box_regression): + # type: (List[Tensor], List[Tensor]) -> Tuple[Tensor, Tensor] + """ + 对box_cla和box_regression两个list中的每个预测特征层的预测信息 + 的tensor排列顺序以及shape进行调整 -> [N, -1, C] + Args: + box_cls: 每个预测特征层上的预测目标概率 + box_regression: 每个预测特征层上的预测目标bboxes regression参数 + + Returns: + + """ + box_cls_flattened = [] + box_regression_flattened = [] + + # 遍历每个预测特征层 + for box_cls_per_level, box_regression_per_level in zip(box_cls, box_regression): + # [batch_size, anchors_num_per_position * classes_num, height, width] + # 注意,当计算RPN中的proposal时,classes_num=1,只区分目标和背景 + N, AxC, H, W = box_cls_per_level.shape + # # [batch_size, anchors_num_per_position * 4, height, width] + Ax4 = box_regression_per_level.shape[1] + # anchors_num_per_position + A = Ax4 // 4 + # classes_num + C = AxC // A + + # [N, -1, C] + box_cls_per_level = permute_and_flatten(box_cls_per_level, N, A, C, H, W) + box_cls_flattened.append(box_cls_per_level) + + # [N, -1, C] + box_regression_per_level = permute_and_flatten(box_regression_per_level, N, A, 4, H, W) + box_regression_flattened.append(box_regression_per_level) + + box_cls = torch.cat(box_cls_flattened, dim=1).flatten(0, -2) # start_dim, end_dim + box_regression = torch.cat(box_regression_flattened, dim=1).reshape(-1, 4) + return box_cls, box_regression + + +class RegionProposalNetwork(torch.nn.Module): + """ + Implements Region Proposal Network (RPN). + + Arguments: + anchor_generator (AnchorGenerator): module that generates the anchors for a set of feature + maps. + head (nn.Module): module that computes the objectness and regression deltas + fg_iou_thresh (float): minimum IoU between the anchor and the GT box so that they can be + considered as positive during training of the RPN. + bg_iou_thresh (float): maximum IoU between the anchor and the GT box so that they can be + considered as negative during training of the RPN. + batch_size_per_image (int): number of anchors that are sampled during training of the RPN + for computing the loss + positive_fraction (float): proportion of positive anchors in a mini-batch during training + of the RPN + pre_nms_top_n (Dict[str]): number of proposals to keep before applying NMS. It should + contain two fields: training and testing, to allow for different values depending + on training or evaluation + post_nms_top_n (Dict[str]): number of proposals to keep after applying NMS. It should + contain two fields: training and testing, to allow for different values depending + on training or evaluation + nms_thresh (float): NMS threshold used for postprocessing the RPN proposals + + """ + __annotations__ = { + 'box_coder': det_utils.BoxCoder, + 'proposal_matcher': det_utils.Matcher, + 'fg_bg_sampler': det_utils.BalancedPositiveNegativeSampler, + 'pre_nms_top_n': Dict[str, int], + 'post_nms_top_n': Dict[str, int], + } + + def __init__(self, anchor_generator, head, + fg_iou_thresh, bg_iou_thresh, + batch_size_per_image, positive_fraction, + pre_nms_top_n, post_nms_top_n, nms_thresh, score_thresh=0.0): + super(RegionProposalNetwork, self).__init__() + self.anchor_generator = anchor_generator + self.head = head + self.box_coder = det_utils.BoxCoder(weights=(1.0, 1.0, 1.0, 1.0)) + + # use during training + # 计算anchors与真实bbox的iou + self.box_similarity = box_ops.box_iou + + self.proposal_matcher = det_utils.Matcher( + fg_iou_thresh, # 当iou大于fg_iou_thresh(0.7)时视为正样本 + bg_iou_thresh, # 当iou小于bg_iou_thresh(0.3)时视为负样本 + allow_low_quality_matches=True + ) + + self.fg_bg_sampler = det_utils.BalancedPositiveNegativeSampler( + batch_size_per_image, positive_fraction # 256, 0.5 + ) + + # use during testing + self._pre_nms_top_n = pre_nms_top_n + self._post_nms_top_n = post_nms_top_n + self.nms_thresh = nms_thresh + self.score_thresh = score_thresh + self.min_size = 1. + + def pre_nms_top_n(self): + if self.training: + return self._pre_nms_top_n['training'] + return self._pre_nms_top_n['testing'] + + def post_nms_top_n(self): + if self.training: + return self._post_nms_top_n['training'] + return self._post_nms_top_n['testing'] + + def assign_targets_to_anchors(self, anchors, targets): + # type: (List[Tensor], List[Dict[str, Tensor]]) -> Tuple[List[Tensor], List[Tensor]] + """ + 计算每个anchors最匹配的gt,并划分为正样本,背景以及废弃的样本 + Args: + anchors: (List[Tensor]) + targets: (List[Dict[Tensor]) + Returns: + labels: 标记anchors归属类别(1, 0, -1分别对应正样本,背景,废弃的样本) + 注意,在RPN中只有前景和背景,所有正样本的类别都是1,0代表背景 + matched_gt_boxes:与anchors匹配的gt + """ + labels = [] + matched_gt_boxes = [] + # 遍历每张图像的anchors和targets + for anchors_per_image, targets_per_image in zip(anchors, targets): + gt_boxes = targets_per_image["boxes"] + if gt_boxes.numel() == 0: + device = anchors_per_image.device + matched_gt_boxes_per_image = torch.zeros(anchors_per_image.shape, dtype=torch.float32, device=device) + labels_per_image = torch.zeros((anchors_per_image.shape[0],), dtype=torch.float32, device=device) + else: + # 计算anchors与真实bbox的iou信息 + # set to self.box_similarity when https://github.com/pytorch/pytorch/issues/27495 lands + match_quality_matrix = box_ops.box_iou(gt_boxes, anchors_per_image) + # 计算每个anchors与gt匹配iou最大的索引(如果iou<0.3索引置为-1,0.3= 0 + labels_per_image = labels_per_image.to(dtype=torch.float32) + + # background (negative examples) + bg_indices = matched_idxs == self.proposal_matcher.BELOW_LOW_THRESHOLD # -1 + labels_per_image[bg_indices] = 0.0 + + # discard indices that are between thresholds + inds_to_discard = matched_idxs == self.proposal_matcher.BETWEEN_THRESHOLDS # -2 + labels_per_image[inds_to_discard] = -1.0 + + labels.append(labels_per_image) + matched_gt_boxes.append(matched_gt_boxes_per_image) + return labels, matched_gt_boxes + + def _get_top_n_idx(self, objectness, num_anchors_per_level): + # type: (Tensor, List[int]) -> Tensor + """ + 获取每张预测特征图上预测概率排前pre_nms_top_n的anchors索引值 + Args: + objectness: Tensor(每张图像的预测目标概率信息 ) + num_anchors_per_level: List(每个预测特征层上的预测的anchors个数) + Returns: + + """ + r = [] # 记录每个预测特征层上预测目标概率前pre_nms_top_n的索引信息 + offset = 0 + # 遍历每个预测特征层上的预测目标概率信息 + for ob in objectness.split(num_anchors_per_level, 1): + if torchvision._is_tracing(): + num_anchors, pre_nms_top_n = _onnx_get_num_anchors_and_pre_nms_top_n(ob, self.pre_nms_top_n()) + else: + num_anchors = ob.shape[1] # 预测特征层上的预测的anchors个数 + pre_nms_top_n = min(self.pre_nms_top_n(), num_anchors) + + # Returns the k largest elements of the given input tensor along a given dimension + _, top_n_idx = ob.topk(pre_nms_top_n, dim=1) + r.append(top_n_idx + offset) + offset += num_anchors + return torch.cat(r, dim=1) + + def filter_proposals(self, proposals, objectness, image_shapes, num_anchors_per_level): + # type: (Tensor, Tensor, List[Tuple[int, int]], List[int]) -> Tuple[List[Tensor], List[Tensor]] + """ + 筛除小boxes框,nms处理,根据预测概率获取前post_nms_top_n个目标 + Args: + proposals: 预测的bbox坐标 + objectness: 预测的目标概率 + image_shapes: batch中每张图片的size信息 + num_anchors_per_level: 每个预测特征层上预测anchors的数目 + + Returns: + + """ + num_images = proposals.shape[0] + device = proposals.device + + # do not backprop throught objectness + objectness = objectness.detach() + objectness = objectness.reshape(num_images, -1) + + # Returns a tensor of size size filled with fill_value + # levels负责记录分隔不同预测特征层上的anchors索引信息 + levels = [torch.full((n, ), idx, dtype=torch.int64, device=device) + for idx, n in enumerate(num_anchors_per_level)] + levels = torch.cat(levels, 0) + + # Expand this tensor to the same size as objectness + levels = levels.reshape(1, -1).expand_as(objectness) + + # select top_n boxes independently per level before applying nms + # 获取每张预测特征图上预测概率排前pre_nms_top_n的anchors索引值 + top_n_idx = self._get_top_n_idx(objectness, num_anchors_per_level) + + image_range = torch.arange(num_images, device=device) + batch_idx = image_range[:, None] # [batch_size, 1] + + # 根据每个预测特征层预测概率排前pre_nms_top_n的anchors索引值获取相应概率信息 + objectness = objectness[batch_idx, top_n_idx] + levels = levels[batch_idx, top_n_idx] + # 预测概率排前pre_nms_top_n的anchors索引值获取相应bbox坐标信息 + proposals = proposals[batch_idx, top_n_idx] + + objectness_prob = torch.sigmoid(objectness) + + final_boxes = [] + final_scores = [] + # 遍历每张图像的相关预测信息 + for boxes, scores, lvl, img_shape in zip(proposals, objectness_prob, levels, image_shapes): + # 调整预测的boxes信息,将越界的坐标调整到图片边界上 + boxes = box_ops.clip_boxes_to_image(boxes, img_shape) + + # 返回boxes满足宽,高都大于min_size的索引 + keep = box_ops.remove_small_boxes(boxes, self.min_size) + boxes, scores, lvl = boxes[keep], scores[keep], lvl[keep] + + # 移除小概率boxes,参考下面这个链接 + # https://github.com/pytorch/vision/pull/3205 + keep = torch.where(torch.ge(scores, self.score_thresh))[0] # ge: >= + boxes, scores, lvl = boxes[keep], scores[keep], lvl[keep] + + # non-maximum suppression, independently done per level + keep = box_ops.batched_nms(boxes, scores, lvl, self.nms_thresh) + + # keep only topk scoring predictions + keep = keep[: self.post_nms_top_n()] + boxes, scores = boxes[keep], scores[keep] + + final_boxes.append(boxes) + final_scores.append(scores) + return final_boxes, final_scores + + def compute_loss(self, objectness, pred_bbox_deltas, labels, regression_targets): + # type: (Tensor, Tensor, List[Tensor], List[Tensor]) -> Tuple[Tensor, Tensor] + """ + 计算RPN损失,包括类别损失(前景与背景),bbox regression损失 + Arguments: + objectness (Tensor):预测的前景概率 + pred_bbox_deltas (Tensor):预测的bbox regression + labels (List[Tensor]):真实的标签 1, 0, -1(batch中每一张图片的labels对应List的一个元素中) + regression_targets (List[Tensor]):真实的bbox regression + + Returns: + objectness_loss (Tensor) : 类别损失 + box_loss (Tensor):边界框回归损失 + """ + # 按照给定的batch_size_per_image, positive_fraction选择正负样本 + sampled_pos_inds, sampled_neg_inds = self.fg_bg_sampler(labels) + # 将一个batch中的所有正负样本List(Tensor)分别拼接在一起,并获取非零位置的索引 + # sampled_pos_inds = torch.nonzero(torch.cat(sampled_pos_inds, dim=0)).squeeze(1) + sampled_pos_inds = torch.where(torch.cat(sampled_pos_inds, dim=0))[0] + # sampled_neg_inds = torch.nonzero(torch.cat(sampled_neg_inds, dim=0)).squeeze(1) + sampled_neg_inds = torch.where(torch.cat(sampled_neg_inds, dim=0))[0] + + # 将所有正负样本索引拼接在一起 + sampled_inds = torch.cat([sampled_pos_inds, sampled_neg_inds], dim=0) + objectness = objectness.flatten() + + labels = torch.cat(labels, dim=0) + regression_targets = torch.cat(regression_targets, dim=0) + + # 计算边界框回归损失 + box_loss = det_utils.smooth_l1_loss( + pred_bbox_deltas[sampled_pos_inds], + regression_targets[sampled_pos_inds], + beta=1 / 9, + size_average=False, + ) / (sampled_inds.numel()) + + # 计算目标预测概率损失 + objectness_loss = F.binary_cross_entropy_with_logits( + objectness[sampled_inds], labels[sampled_inds] + ) + + return objectness_loss, box_loss + + def forward(self, + images, # type: ImageList + features, # type: Dict[str, Tensor] + targets=None # type: Optional[List[Dict[str, Tensor]]] + ): + # type: (...) -> Tuple[List[Tensor], Dict[str, Tensor]] + """ + Arguments: + images (ImageList): images for which we want to compute the predictions + features (Dict[Tensor]): features computed from the images that are + used for computing the predictions. Each tensor in the list + correspond to different feature levels + targets (List[Dict[Tensor]): ground-truth boxes present in the image (optional). + If provided, each element in the dict should contain a field `boxes`, + with the locations of the ground-truth boxes. + + Returns: + boxes (List[Tensor]): the predicted boxes from the RPN, one Tensor per + image. + losses (Dict[Tensor]): the losses for the model during training. During + testing, it is an empty dict. + """ + # RPN uses all feature maps that are available + # features是所有预测特征层组成的OrderedDict + features = list(features.values()) + + # 计算每个预测特征层上的预测目标概率和bboxes regression参数 + # objectness和pred_bbox_deltas都是list + objectness, pred_bbox_deltas = self.head(features) + + # 生成一个batch图像的所有anchors信息,list(tensor)元素个数等于batch_size + anchors = self.anchor_generator(images, features) + + # batch_size + num_images = len(anchors) + + # numel() Returns the total number of elements in the input tensor. + # 计算每个预测特征层上的对应的anchors数量 + num_anchors_per_level_shape_tensors = [o[0].shape for o in objectness] + num_anchors_per_level = [s[0] * s[1] * s[2] for s in num_anchors_per_level_shape_tensors] + + # 调整内部tensor格式以及shape + objectness, pred_bbox_deltas = concat_box_prediction_layers(objectness, + pred_bbox_deltas) + + # apply pred_bbox_deltas to anchors to obtain the decoded proposals + # note that we detach the deltas because Faster R-CNN do not backprop through + # the proposals + # 将预测的bbox regression参数应用到anchors上得到最终预测bbox坐标 + proposals = self.box_coder.decode(pred_bbox_deltas.detach(), anchors) + proposals = proposals.view(num_images, -1, 4) + + # 筛除小boxes框,nms处理,根据预测概率获取前post_nms_top_n个目标 + boxes, scores = self.filter_proposals(proposals, objectness, images.image_sizes, num_anchors_per_level) + + losses = {} + if self.training: + assert targets is not None + # 计算每个anchors最匹配的gt,并将anchors进行分类,前景,背景以及废弃的anchors + labels, matched_gt_boxes = self.assign_targets_to_anchors(anchors, targets) + # 结合anchors以及对应的gt,计算regression参数 + regression_targets = self.box_coder.encode(matched_gt_boxes, anchors) + loss_objectness, loss_rpn_box_reg = self.compute_loss( + objectness, pred_bbox_deltas, labels, regression_targets + ) + losses = { + "loss_objectness": loss_objectness, + "loss_rpn_box_reg": loss_rpn_box_reg + } + return boxes, losses diff --git a/detector/Mask_RCNN/network_files/transform.py b/detector/Mask_RCNN/network_files/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..420d8ed0ed2f2bdf3e4a6bc84fdc308a82d37489 --- /dev/null +++ b/detector/Mask_RCNN/network_files/transform.py @@ -0,0 +1,490 @@ +import math +from typing import List, Tuple, Dict, Optional + +import torch +from torch import nn, Tensor +import torch.nn.functional as F +import torchvision + +from .image_list import ImageList + + +def _onnx_paste_mask_in_image(mask, box, im_h, im_w): + one = torch.ones(1, dtype=torch.int64) + zero = torch.zeros(1, dtype=torch.int64) + + w = box[2] - box[0] + one + h = box[3] - box[1] + one + w = torch.max(torch.cat((w, one))) + h = torch.max(torch.cat((h, one))) + + # Set shape to [batchxCxHxW] + mask = mask.expand((1, 1, mask.size(0), mask.size(1))) + + # Resize mask + mask = F.interpolate(mask, size=(int(h), int(w)), mode="bilinear", align_corners=False) + mask = mask[0][0] + + x_0 = torch.max(torch.cat((box[0].unsqueeze(0), zero))) + x_1 = torch.min(torch.cat((box[2].unsqueeze(0) + one, im_w.unsqueeze(0)))) + y_0 = torch.max(torch.cat((box[1].unsqueeze(0), zero))) + y_1 = torch.min(torch.cat((box[3].unsqueeze(0) + one, im_h.unsqueeze(0)))) + + unpaded_im_mask = mask[(y_0 - box[1]): (y_1 - box[1]), (x_0 - box[0]): (x_1 - box[0])] + + # TODO : replace below with a dynamic padding when support is added in ONNX + + # pad y + zeros_y0 = torch.zeros(y_0, unpaded_im_mask.size(1)) + zeros_y1 = torch.zeros(im_h - y_1, unpaded_im_mask.size(1)) + concat_0 = torch.cat((zeros_y0, unpaded_im_mask.to(dtype=torch.float32), zeros_y1), 0)[0:im_h, :] + # pad x + zeros_x0 = torch.zeros(concat_0.size(0), x_0) + zeros_x1 = torch.zeros(concat_0.size(0), im_w - x_1) + im_mask = torch.cat((zeros_x0, concat_0, zeros_x1), 1)[:, :im_w] + return im_mask + + +@torch.jit._script_if_tracing +def _onnx_paste_mask_in_image_loop(masks, boxes, im_h, im_w): + res_append = torch.zeros(0, im_h, im_w) + for i in range(masks.size(0)): + mask_res = _onnx_paste_mask_in_image(masks[i][0], boxes[i], im_h, im_w) + mask_res = mask_res.unsqueeze(0) + res_append = torch.cat((res_append, mask_res)) + + return res_append + + +@torch.jit.unused +def _get_shape_onnx(image: Tensor) -> Tensor: + from torch.onnx import operators + + return operators.shape_as_tensor(image)[-2:] + + +@torch.jit.unused +def _fake_cast_onnx(v: Tensor) -> float: + # ONNX requires a tensor but here we fake its type for JIT. + return v + + +def _resize_image_and_masks(image: Tensor, + self_min_size: float, + self_max_size: float, + target: Optional[Dict[str, Tensor]] = None, + fixed_size: Optional[Tuple[int, int]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + + if torchvision._is_tracing(): + im_shape = _get_shape_onnx(image) + else: + im_shape = torch.tensor(image.shape[-2:]) + + size: Optional[List[int]] = None + scale_factor: Optional[float] = None + recompute_scale_factor: Optional[bool] = None + if fixed_size is not None: + size = [fixed_size[1], fixed_size[0]] + else: + min_size = torch.min(im_shape).to(dtype=torch.float32) # 获取高宽中的最小值 + max_size = torch.max(im_shape).to(dtype=torch.float32) # 获取高宽中的最大值 + scale = torch.min(self_min_size / min_size, self_max_size / max_size) # 计算缩放比例 + + if torchvision._is_tracing(): + scale_factor = _fake_cast_onnx(scale) + else: + scale_factor = scale.item() + recompute_scale_factor = True + + # interpolate利用插值的方法缩放图片 + # image[None]操作是在最前面添加batch维度[C, H, W] -> [1, C, H, W] + # bilinear只支持4D Tensor + image = torch.nn.functional.interpolate( + image[None], + size=size, + scale_factor=scale_factor, + mode="bilinear", + recompute_scale_factor=recompute_scale_factor, + align_corners=False)[0] + + if target is None: + return image, target + + if "masks" in target: + mask = target["masks"] + mask = torch.nn.functional.interpolate( + mask[:, None].float(), size=size, scale_factor=scale_factor, recompute_scale_factor=recompute_scale_factor + )[:, 0].byte() # self.byte() is equivalent to self.to(torch.uint8). + target["masks"] = mask + + return image, target + + +def _onnx_expand_boxes(boxes, scale): + # type: (Tensor, float) -> Tensor + w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5 + h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5 + x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5 + y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5 + + w_half = w_half.to(dtype=torch.float32) * scale + h_half = h_half.to(dtype=torch.float32) * scale + + boxes_exp0 = x_c - w_half + boxes_exp1 = y_c - h_half + boxes_exp2 = x_c + w_half + boxes_exp3 = y_c + h_half + boxes_exp = torch.stack((boxes_exp0, boxes_exp1, boxes_exp2, boxes_exp3), 1) + return boxes_exp + + +# the next two functions should be merged inside Masker +# but are kept here for the moment while we need them +# temporarily for paste_mask_in_image +def expand_boxes(boxes, scale): + # type: (Tensor, float) -> Tensor + if torchvision._is_tracing(): + return _onnx_expand_boxes(boxes, scale) + w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5 + h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5 + x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5 + y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5 + + w_half *= scale + h_half *= scale + + boxes_exp = torch.zeros_like(boxes) + boxes_exp[:, 0] = x_c - w_half + boxes_exp[:, 2] = x_c + w_half + boxes_exp[:, 1] = y_c - h_half + boxes_exp[:, 3] = y_c + h_half + return boxes_exp + + +@torch.jit.unused +def expand_masks_tracing_scale(M, padding): + # type: (int, int) -> float + return torch.tensor(M + 2 * padding).to(torch.float32) / torch.tensor(M).to(torch.float32) + + +def expand_masks(mask, padding): + # type: (Tensor, int) -> Tuple[Tensor, float] + M = mask.shape[-1] + if torch._C._get_tracing_state(): # could not import is_tracing(), not sure why + scale = expand_masks_tracing_scale(M, padding) + else: + scale = float(M + 2 * padding) / M + padded_mask = F.pad(mask, (padding,) * 4) + return padded_mask, scale + + +def paste_mask_in_image(mask, box, im_h, im_w): + # type: (Tensor, Tensor, int, int) -> Tensor + + # refer to: https://github.com/pytorch/vision/issues/5845 + TO_REMOVE = 1 + w = int(box[2] - box[0] + TO_REMOVE) + h = int(box[3] - box[1] + TO_REMOVE) + w = max(w, 1) + h = max(h, 1) + + # Set shape to [batch, C, H, W] + # 因为后续的bilinear操作只支持4-D的Tensor + mask = mask.expand((1, 1, -1, -1)) # -1 means not changing the size of that dimension + + # Resize mask + mask = F.interpolate(mask, size=(h, w), mode='bilinear', align_corners=False) + mask = mask[0][0] # [batch, C, H, W] -> [H, W] + + im_mask = torch.zeros((im_h, im_w), dtype=mask.dtype, device=mask.device) + # 填入原图的目标区域(防止越界) + x_0 = max(box[0], 0) + x_1 = min(box[2] + 1, im_w) + y_0 = max(box[1], 0) + y_1 = min(box[3] + 1, im_h) + + # 将resize后的mask填入对应目标区域 + im_mask[y_0:y_1, x_0:x_1] = mask[(y_0 - box[1]):(y_1 - box[1]), (x_0 - box[0]):(x_1 - box[0])] + return im_mask + + +def paste_masks_in_image(masks, boxes, img_shape, padding=1): + # type: (Tensor, Tensor, Tuple[int, int], int) -> Tensor + + # pytorch官方说对mask进行expand能够略微提升mAP + # refer to: https://github.com/pytorch/vision/issues/5845 + masks, scale = expand_masks(masks, padding=padding) + boxes = expand_boxes(boxes, scale).to(dtype=torch.int64) + im_h, im_w = img_shape + + if torchvision._is_tracing(): + return _onnx_paste_mask_in_image_loop( + masks, boxes, torch.scalar_tensor(im_h, dtype=torch.int64), torch.scalar_tensor(im_w, dtype=torch.int64) + )[:, None] + res = [paste_mask_in_image(m[0], b, im_h, im_w) for m, b in zip(masks, boxes)] + if len(res) > 0: + ret = torch.stack(res, dim=0)[:, None] # [num_obj, 1, H, W] + else: + ret = masks.new_empty((0, 1, im_h, im_w)) + return ret + + +class GeneralizedRCNNTransform(nn.Module): + """ + Performs input / target transformation before feeding the data to a GeneralizedRCNN + model. + + The transformations it perform are: + - input normalization (mean subtraction and std division) + - input / target resizing to match min_size / max_size + + It returns a ImageList for the inputs, and a List[Dict[Tensor]] for the targets + """ + + def __init__(self, + min_size: int, + max_size: int, + image_mean: List[float], + image_std: List[float], + size_divisible: int = 32, + fixed_size: Optional[Tuple[int, int]] = None): + super().__init__() + if not isinstance(min_size, (list, tuple)): + min_size = (min_size,) + self.min_size = min_size # 指定图像的最小边长范围 + self.max_size = max_size # 指定图像的最大边长范围 + self.image_mean = image_mean # 指定图像在标准化处理中的均值 + self.image_std = image_std # 指定图像在标准化处理中的方差 + self.size_divisible = size_divisible + self.fixed_size = fixed_size + + def normalize(self, image): + """标准化处理""" + dtype, device = image.dtype, image.device + mean = torch.as_tensor(self.image_mean, dtype=dtype, device=device) + std = torch.as_tensor(self.image_std, dtype=dtype, device=device) + # [:, None, None]: shape [3] -> [3, 1, 1] + return (image - mean[:, None, None]) / std[:, None, None] + + def torch_choice(self, k): + # type: (List[int]) -> int + """ + Implements `random.choice` via torch ops so it can be compiled with + TorchScript. Remove if https://github.com/pytorch/pytorch/issues/25803 + is fixed. + """ + index = int(torch.empty(1).uniform_(0., float(len(k))).item()) + return k[index] + + def resize(self, image, target): + # type: (Tensor, Optional[Dict[str, Tensor]]) -> Tuple[Tensor, Optional[Dict[str, Tensor]]] + """ + 将图片缩放到指定的大小范围内,并对应缩放bboxes信息 + Args: + image: 输入的图片 + target: 输入图片的相关信息(包括bboxes信息) + + Returns: + image: 缩放后的图片 + target: 缩放bboxes后的图片相关信息 + """ + # image shape is [channel, height, width] + h, w = image.shape[-2:] + + if self.training: + size = float(self.torch_choice(self.min_size)) # 指定输入图片的最小边长,注意是self.min_size不是min_size + else: + # FIXME assume for now that testing uses the largest scale + size = float(self.min_size[-1]) # 指定输入图片的最小边长,注意是self.min_size不是min_size + + image, target = _resize_image_and_masks(image, size, float(self.max_size), target, self.fixed_size) + + if target is None: + return image, target + + bbox = target["boxes"] + # 根据图像的缩放比例来缩放bbox + bbox = resize_boxes(bbox, [h, w], image.shape[-2:]) + target["boxes"] = bbox + + return image, target + + # _onnx_batch_images() is an implementation of + # batch_images() that is supported by ONNX tracing. + @torch.jit.unused + def _onnx_batch_images(self, images, size_divisible=32): + # type: (List[Tensor], int) -> Tensor + max_size = [] + for i in range(images[0].dim()): + max_size_i = torch.max(torch.stack([img.shape[i] for img in images]).to(torch.float32)).to(torch.int64) + max_size.append(max_size_i) + stride = size_divisible + max_size[1] = (torch.ceil((max_size[1].to(torch.float32)) / stride) * stride).to(torch.int64) + max_size[2] = (torch.ceil((max_size[2].to(torch.float32)) / stride) * stride).to(torch.int64) + max_size = tuple(max_size) + + # work around for + # pad_img[: img.shape[0], : img.shape[1], : img.shape[2]].copy_(img) + # which is not yet supported in onnx + padded_imgs = [] + for img in images: + padding = [(s1 - s2) for s1, s2 in zip(max_size, tuple(img.shape))] + padded_img = torch.nn.functional.pad(img, [0, padding[2], 0, padding[1], 0, padding[0]]) + padded_imgs.append(padded_img) + + return torch.stack(padded_imgs) + + def max_by_axis(self, the_list): + # type: (List[List[int]]) -> List[int] + maxes = the_list[0] + for sublist in the_list[1:]: + for index, item in enumerate(sublist): + maxes[index] = max(maxes[index], item) + return maxes + + def batch_images(self, images, size_divisible=32): + # type: (List[Tensor], int) -> Tensor + """ + 将一批图像打包成一个batch返回(注意batch中每个tensor的shape是相同的) + Args: + images: 输入的一批图片 + size_divisible: 将图像高和宽调整到该数的整数倍 + + Returns: + batched_imgs: 打包成一个batch后的tensor数据 + """ + + if torchvision._is_tracing(): + # batch_images() does not export well to ONNX + # call _onnx_batch_images() instead + return self._onnx_batch_images(images, size_divisible) + + # 分别计算一个batch中所有图片中的最大channel, height, width + max_size = self.max_by_axis([list(img.shape) for img in images]) + + stride = float(size_divisible) + # max_size = list(max_size) + # 将height向上调整到stride的整数倍 + max_size[1] = int(math.ceil(float(max_size[1]) / stride) * stride) + # 将width向上调整到stride的整数倍 + max_size[2] = int(math.ceil(float(max_size[2]) / stride) * stride) + + # [batch, channel, height, width] + batch_shape = [len(images)] + max_size + + # 创建shape为batch_shape且值全部为0的tensor + batched_imgs = images[0].new_full(batch_shape, 0) + for img, pad_img in zip(images, batched_imgs): + # 将输入images中的每张图片复制到新的batched_imgs的每张图片中,对齐左上角,保证bboxes的坐标不变 + # 这样保证输入到网络中一个batch的每张图片的shape相同 + # copy_: Copies the elements from src into self tensor and returns self + pad_img[: img.shape[0], : img.shape[1], : img.shape[2]].copy_(img) + + return batched_imgs + + def postprocess(self, + result, # type: List[Dict[str, Tensor]] + image_shapes, # type: List[Tuple[int, int]] + original_image_sizes # type: List[Tuple[int, int]] + ): + # type: (...) -> List[Dict[str, Tensor]] + """ + 对网络的预测结果进行后处理(主要将bboxes还原到原图像尺度上) + Args: + result: list(dict), 网络的预测结果, len(result) == batch_size + image_shapes: list(torch.Size), 图像预处理缩放后的尺寸, len(image_shapes) == batch_size + original_image_sizes: list(torch.Size), 图像的原始尺寸, len(original_image_sizes) == batch_size + + Returns: + + """ + if self.training: + return result + + # 遍历每张图片的预测信息,将boxes信息还原回原尺度 + for i, (pred, im_s, o_im_s) in enumerate(zip(result, image_shapes, original_image_sizes)): + boxes = pred["boxes"] + boxes = resize_boxes(boxes, im_s, o_im_s) # 将bboxes缩放回原图像尺度上 + result[i]["boxes"] = boxes + if "masks" in pred: + masks = pred["masks"] + # 将mask映射回原图尺度 + masks = paste_masks_in_image(masks, boxes, o_im_s) + result[i]["masks"] = masks + + return result + + def __repr__(self): + """自定义输出实例化对象的信息,可通过print打印实例信息""" + format_string = self.__class__.__name__ + '(' + _indent = '\n ' + format_string += "{0}Normalize(mean={1}, std={2})".format(_indent, self.image_mean, self.image_std) + format_string += "{0}Resize(min_size={1}, max_size={2}, mode='bilinear')".format(_indent, self.min_size, + self.max_size) + format_string += '\n)' + return format_string + + def forward(self, + images, # type: List[Tensor] + targets=None # type: Optional[List[Dict[str, Tensor]]] + ): + # type: (...) -> Tuple[ImageList, Optional[List[Dict[str, Tensor]]]] + images = [img for img in images] + for i in range(len(images)): + image = images[i] + target_index = targets[i] if targets is not None else None + + if image.dim() != 3: + raise ValueError("images is expected to be a list of 3d tensors " + "of shape [C, H, W], got {}".format(image.shape)) + image = self.normalize(image) # 对图像进行标准化处理 + image, target_index = self.resize(image, target_index) # 对图像和对应的bboxes缩放到指定范围 + images[i] = image + if targets is not None and target_index is not None: + targets[i] = target_index + + # 记录resize后的图像尺寸 + image_sizes = [img.shape[-2:] for img in images] + images = self.batch_images(images, self.size_divisible) # 将images打包成一个batch + image_sizes_list = torch.jit.annotate(List[Tuple[int, int]], []) + + for image_size in image_sizes: + assert len(image_size) == 2 + image_sizes_list.append((image_size[0], image_size[1])) + + image_list = ImageList(images, image_sizes_list) + return image_list, targets + + +def resize_boxes(boxes, original_size, new_size): + # type: (Tensor, List[int], List[int]) -> Tensor + """ + 将boxes参数根据图像的缩放情况进行相应缩放 + + Arguments: + original_size: 图像缩放前的尺寸 + new_size: 图像缩放后的尺寸 + """ + ratios = [ + torch.tensor(s, dtype=torch.float32, device=boxes.device) / + torch.tensor(s_orig, dtype=torch.float32, device=boxes.device) + for s, s_orig in zip(new_size, original_size) + ] + ratios_height, ratios_width = ratios + # Removes a tensor dimension, boxes [minibatch, 4] + # Returns a tuple of all slices along a given dimension, already without it. + xmin, ymin, xmax, ymax = boxes.unbind(1) + xmin = xmin * ratios_width + xmax = xmax * ratios_width + ymin = ymin * ratios_height + ymax = ymax * ratios_height + return torch.stack((xmin, ymin, xmax, ymax), dim=1) + + + + + + + + diff --git a/detector/Mask_RCNN/test.jpg b/detector/Mask_RCNN/test.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0cd81bc102cf3aeda2ede96de2782fb6e018fb67 --- /dev/null +++ b/detector/Mask_RCNN/test.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f9564cfeca39d8612936ba738256ea540ff9815fd17c1bb830ef5bac17ec453 +size 265000 diff --git a/detector/Mask_RCNN/test_result.jpg b/detector/Mask_RCNN/test_result.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2433891f781a8e429859e521d801b9746e3c8fd3 --- /dev/null +++ b/detector/Mask_RCNN/test_result.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a57c0aefc2435517ed9036d1d8b757df4519e1ad043e236306f9b64497df41a9 +size 428901 diff --git a/detector/Mask_RCNN/train_utils/__init__.py b/detector/Mask_RCNN/train_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3dfa7eadcf4a6f76febce7832675c26d7fdab677 --- /dev/null +++ b/detector/Mask_RCNN/train_utils/__init__.py @@ -0,0 +1,4 @@ +from .group_by_aspect_ratio import GroupedBatchSampler, create_aspect_ratio_groups +from .distributed_utils import init_distributed_mode, save_on_master, mkdir +from .coco_eval import EvalCOCOMetric +from .coco_utils import coco_remove_images_without_annotations, convert_coco_poly_mask, convert_to_coco_api diff --git a/detector/Mask_RCNN/train_utils/coco_eval.py b/detector/Mask_RCNN/train_utils/coco_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..b8df0204df58590e906452ba4f89864f64476d32 --- /dev/null +++ b/detector/Mask_RCNN/train_utils/coco_eval.py @@ -0,0 +1,163 @@ +import json +import copy + +import numpy as np +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +import pycocotools.mask as mask_util +from .distributed_utils import all_gather, is_main_process + + +def merge(img_ids, eval_results): + """将多个进程之间的数据汇总在一起""" + all_img_ids = all_gather(img_ids) + all_eval_results = all_gather(eval_results) + + merged_img_ids = [] + for p in all_img_ids: + merged_img_ids.extend(p) + + merged_eval_results = [] + for p in all_eval_results: + merged_eval_results.extend(p) + + merged_img_ids = np.array(merged_img_ids) + + # keep only unique (and in sorted order) images + # 去除重复的图片索引,多GPU训练时为了保证每个进程的训练图片数量相同,可能将一张图片分配给多个进程 + merged_img_ids, idx = np.unique(merged_img_ids, return_index=True) + merged_eval_results = [merged_eval_results[i] for i in idx] + + return list(merged_img_ids), merged_eval_results + + +class EvalCOCOMetric: + def __init__(self, + coco: COCO = None, + iou_type: str = None, + results_file_name: str = "predict_results.json", + classes_mapping: dict = None): + self.coco = copy.deepcopy(coco) + self.img_ids = [] # 记录每个进程处理图片的ids + self.results = [] + self.aggregation_results = None + self.classes_mapping = classes_mapping + self.coco_evaluator = None + assert iou_type in ["bbox", "segm", "keypoints"] + self.iou_type = iou_type + self.results_file_name = results_file_name + + def prepare_for_coco_detection(self, targets, outputs): + """将预测的结果转换成COCOeval指定的格式,针对目标检测任务""" + # 遍历每张图像的预测结果 + for target, output in zip(targets, outputs): + if len(output) == 0: + continue + + img_id = int(target["image_id"]) + if img_id in self.img_ids: + # 防止出现重复的数据 + continue + self.img_ids.append(img_id) + per_image_boxes = output["boxes"] + # 对于coco_eval, 需要的每个box的数据格式为[x_min, y_min, w, h] + # 而我们预测的box格式是[x_min, y_min, x_max, y_max],所以需要转下格式 + per_image_boxes[:, 2:] -= per_image_boxes[:, :2] + per_image_classes = output["labels"].tolist() + per_image_scores = output["scores"].tolist() + + res_list = [] + # 遍历每个目标的信息 + for object_score, object_class, object_box in zip( + per_image_scores, per_image_classes, per_image_boxes): + object_score = float(object_score) + class_idx = int(object_class) + if self.classes_mapping is not None: + class_idx = int(self.classes_mapping[str(class_idx)]) + # We recommend rounding coordinates to the nearest tenth of a pixel + # to reduce resulting JSON file size. + object_box = [round(b, 2) for b in object_box.tolist()] + + res = {"image_id": img_id, + "category_id": class_idx, + "bbox": object_box, + "score": round(object_score, 3)} + res_list.append(res) + self.results.append(res_list) + + def prepare_for_coco_segmentation(self, targets, outputs): + """将预测的结果转换成COCOeval指定的格式,针对实例分割任务""" + # 遍历每张图像的预测结果 + for target, output in zip(targets, outputs): + if len(output) == 0: + continue + + img_id = int(target["image_id"]) + if img_id in self.img_ids: + # 防止出现重复的数据 + continue + + self.img_ids.append(img_id) + per_image_masks = output["masks"] + per_image_classes = output["labels"].tolist() + per_image_scores = output["scores"].tolist() + + masks = per_image_masks > 0.5 + + res_list = [] + # 遍历每个目标的信息 + for mask, label, score in zip(masks, per_image_classes, per_image_scores): + rle = mask_util.encode(np.array(mask[0, :, :, np.newaxis], dtype=np.uint8, order="F"))[0] + rle["counts"] = rle["counts"].decode("utf-8") + + class_idx = int(label) + if self.classes_mapping is not None: + class_idx = int(self.classes_mapping[str(class_idx)]) + + res = {"image_id": img_id, + "category_id": class_idx, + "segmentation": rle, + "score": round(score, 3)} + res_list.append(res) + self.results.append(res_list) + + def update(self, targets, outputs): + if self.iou_type == "bbox": + self.prepare_for_coco_detection(targets, outputs) + elif self.iou_type == "segm": + self.prepare_for_coco_segmentation(targets, outputs) + else: + raise KeyError(f"not support iou_type: {self.iou_type}") + + def synchronize_results(self): + # 同步所有进程中的数据 + eval_ids, eval_results = merge(self.img_ids, self.results) + self.aggregation_results = {"img_ids": eval_ids, "results": eval_results} + + # 主进程上保存即可 + if is_main_process(): + results = [] + [results.extend(i) for i in eval_results] + # write predict results into json file + json_str = json.dumps(results, indent=4) + with open(self.results_file_name, 'w') as json_file: + json_file.write(json_str) + + def evaluate(self): + # 只在主进程上评估即可 + if is_main_process(): + # accumulate predictions from all images + coco_true = self.coco + coco_pre = coco_true.loadRes(self.results_file_name) + + self.coco_evaluator = COCOeval(cocoGt=coco_true, cocoDt=coco_pre, iouType=self.iou_type) + + self.coco_evaluator.evaluate() + self.coco_evaluator.accumulate() + print(f"IoU metric: {self.iou_type}") + self.coco_evaluator.summarize() + + coco_info = self.coco_evaluator.stats.tolist() # numpy to list + return coco_info + else: + return None diff --git a/detector/Mask_RCNN/train_utils/coco_utils.py b/detector/Mask_RCNN/train_utils/coco_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7a3b3122eeb189ceaa3c3455f32b4f83fd72c6df --- /dev/null +++ b/detector/Mask_RCNN/train_utils/coco_utils.py @@ -0,0 +1,98 @@ +import torch +import torch.utils.data +from pycocotools import mask as coco_mask +from pycocotools.coco import COCO + + +def coco_remove_images_without_annotations(dataset, ids): + """ + 删除coco数据集中没有目标,或者目标面积非常小的数据 + refer to: + https://github.com/pytorch/vision/blob/master/references/detection/coco_utils.py + :param dataset: + :param cat_list: + :return: + """ + def _has_only_empty_bbox(anno): + return all(any(o <= 1 for o in obj["bbox"][2:]) for obj in anno) + + def _has_valid_annotation(anno): + # if it's empty, there is no annotation + if len(anno) == 0: + return False + # if all boxes have close to zero area, there is no annotation + if _has_only_empty_bbox(anno): + return False + + return True + + valid_ids = [] + for ds_idx, img_id in enumerate(ids): + ann_ids = dataset.getAnnIds(imgIds=img_id, iscrowd=None) + anno = dataset.loadAnns(ann_ids) + + if _has_valid_annotation(anno): + valid_ids.append(img_id) + + return valid_ids + + +def convert_coco_poly_mask(segmentations, height, width): + masks = [] + for polygons in segmentations: + rles = coco_mask.frPyObjects(polygons, height, width) + mask = coco_mask.decode(rles) + if len(mask.shape) < 3: + mask = mask[..., None] + mask = torch.as_tensor(mask, dtype=torch.uint8) + mask = mask.any(dim=2) + masks.append(mask) + if masks: + masks = torch.stack(masks, dim=0) + else: + # 如果mask为空,则说明没有目标,直接返回数值为0的mask + masks = torch.zeros((0, height, width), dtype=torch.uint8) + return masks + + +def convert_to_coco_api(self): + coco_ds = COCO() + # annotation IDs need to start at 1, not 0, see torchvision issue #1530 + ann_id = 1 + dataset = {"images": [], "categories": [], "annotations": []} + categories = set() + for img_idx in range(len(self)): + targets, h, w = self.get_annotations(img_idx) + img_id = targets["image_id"].item() + img_dict = {"id": img_id, + "height": h, + "width": w} + dataset["images"].append(img_dict) + bboxes = targets["boxes"].clone() + # convert (x_min, ymin, xmax, ymax) to (xmin, ymin, w, h) + bboxes[:, 2:] -= bboxes[:, :2] + bboxes = bboxes.tolist() + labels = targets["labels"].tolist() + areas = targets["area"].tolist() + iscrowd = targets["iscrowd"].tolist() + if "masks" in targets: + masks = targets["masks"] + # make masks Fortran contiguous for coco_mask + masks = masks.permute(0, 2, 1).contiguous().permute(0, 2, 1) + num_objs = len(bboxes) + for i in range(num_objs): + ann = {"image_id": img_id, + "bbox": bboxes[i], + "category_id": labels[i], + "area": areas[i], + "iscrowd": iscrowd[i], + "id": ann_id} + categories.add(labels[i]) + if "masks" in targets: + ann["segmentation"] = coco_mask.encode(masks[i].numpy()) + dataset["annotations"].append(ann) + ann_id += 1 + dataset["categories"] = [{"id": i} for i in sorted(categories)] + coco_ds.dataset = dataset + coco_ds.createIndex() + return coco_ds diff --git a/detector/Mask_RCNN/train_utils/distributed_utils.py b/detector/Mask_RCNN/train_utils/distributed_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..80b2412c6929e30013c6c630da3cf788665bf276 --- /dev/null +++ b/detector/Mask_RCNN/train_utils/distributed_utils.py @@ -0,0 +1,299 @@ +from collections import defaultdict, deque +import datetime +import pickle +import time +import errno +import os + +import torch +import torch.distributed as dist + + +class SmoothedValue(object): + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{value:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) # deque简单理解成加强版list + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + if not is_dist_avail_and_initialized(): + return + t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda") + dist.barrier() + dist.all_reduce(t) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): # @property 是装饰器,这里可简单理解为增加median属性(只读) + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, + avg=self.avg, + global_avg=self.global_avg, + max=self.max, + value=self.value) + + +def all_gather(data): + """ + 收集各个进程中的数据 + Run all_gather on arbitrary picklable data (not necessarily tensors) + Args: + data: any picklable object + Returns: + list[data]: list of data gathered from each rank + """ + world_size = get_world_size() # 进程数 + if world_size == 1: + return [data] + + data_list = [None] * world_size + dist.all_gather_object(data_list, data) + + return data_list + + +def reduce_dict(input_dict, average=True): + """ + Args: + input_dict (dict): all the values will be reduced + average (bool): whether to do average or sum + Reduce the values in the dictionary from all processes so that all processes + have the averaged results. Returns a dict with the same fields as + input_dict, after reduction. + """ + world_size = get_world_size() + if world_size < 2: # 单GPU的情况 + return input_dict + with torch.no_grad(): # 多GPU的情况 + names = [] + values = [] + # sort the keys so that they are consistent across processes + for k in sorted(input_dict.keys()): + names.append(k) + values.append(input_dict[k]) + values = torch.stack(values, dim=0) + dist.all_reduce(values) + if average: + values /= world_size + + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict + + +class MetricLogger(object): + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError("'{}' object has no attribute '{}'".format( + type(self).__name__, attr)) + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append( + "{}: {}".format(name, str(meter)) + ) + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def log_every(self, iterable, print_freq, header=None): + i = 0 + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt='{avg:.4f}') + data_time = SmoothedValue(fmt='{avg:.4f}') + space_fmt = ":" + str(len(str(len(iterable)))) + "d" + if torch.cuda.is_available(): + log_msg = self.delimiter.join([header, + '[{0' + space_fmt + '}/{1}]', + 'eta: {eta}', + '{meters}', + 'time: {time}', + 'data: {data}', + 'max mem: {memory:.0f}']) + else: + log_msg = self.delimiter.join([header, + '[{0' + space_fmt + '}/{1}]', + 'eta: {eta}', + '{meters}', + 'time: {time}', + 'data: {data}']) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0 or i == len(iterable) - 1: + eta_second = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=eta_second)) + if torch.cuda.is_available(): + print(log_msg.format(i, len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB)) + else: + print(log_msg.format(i, len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time))) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('{} Total time: {} ({:.4f} s / it)'.format(header, + total_time_str, + + total_time / len(iterable))) + + +def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor): + + def f(x): + """根据step数返回一个学习率倍率因子""" + if x >= warmup_iters: # 当迭代数大于给定的warmup_iters时,倍率因子为1 + return 1 + alpha = float(x) / warmup_iters + # 迭代过程中倍率因子从warmup_factor -> 1 + return warmup_factor * (1 - alpha) + alpha + + return torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=f) + + +def mkdir(path): + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def setup_for_distributed(is_master): + """ + This function disables when not in master process + """ + import builtins as __builtin__ + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop('force', False) + if is_master or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def is_dist_avail_and_initialized(): + """检查是否支持分布式环境""" + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def save_on_master(*args, **kwargs): + if is_main_process(): + torch.save(*args, **kwargs) + + +def init_distributed_mode(args): + if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + else: + print('Not using distributed mode') + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + print('| distributed init (rank {}): {}'.format( + args.rank, args.dist_url), flush=True) + torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + diff --git a/detector/Mask_RCNN/train_utils/group_by_aspect_ratio.py b/detector/Mask_RCNN/train_utils/group_by_aspect_ratio.py new file mode 100644 index 0000000000000000000000000000000000000000..e7b8b9e887646bc9927098e45c002d2af4b5d2ad --- /dev/null +++ b/detector/Mask_RCNN/train_utils/group_by_aspect_ratio.py @@ -0,0 +1,201 @@ +import bisect +from collections import defaultdict +import copy +from itertools import repeat, chain +import math +import numpy as np + +import torch +import torch.utils.data +from torch.utils.data.sampler import BatchSampler, Sampler +from torch.utils.model_zoo import tqdm +import torchvision + +from PIL import Image + + +def _repeat_to_at_least(iterable, n): + repeat_times = math.ceil(n / len(iterable)) + repeated = chain.from_iterable(repeat(iterable, repeat_times)) + return list(repeated) + + +class GroupedBatchSampler(BatchSampler): + """ + Wraps another sampler to yield a mini-batch of indices. + It enforces that the batch only contain elements from the same group. + It also tries to provide mini-batches which follows an ordering which is + as close as possible to the ordering from the original sampler. + Arguments: + sampler (Sampler): Base sampler. + group_ids (list[int]): If the sampler produces indices in range [0, N), + `group_ids` must be a list of `N` ints which contains the group id of each sample. + The group ids must be a continuous set of integers starting from + 0, i.e. they must be in the range [0, num_groups). + batch_size (int): Size of mini-batch. + """ + def __init__(self, sampler, group_ids, batch_size): + if not isinstance(sampler, Sampler): + raise ValueError( + "sampler should be an instance of " + "torch.utils.data.Sampler, but got sampler={}".format(sampler) + ) + self.sampler = sampler + self.group_ids = group_ids + self.batch_size = batch_size + + def __iter__(self): + buffer_per_group = defaultdict(list) + samples_per_group = defaultdict(list) + + num_batches = 0 + for idx in self.sampler: + group_id = self.group_ids[idx] + buffer_per_group[group_id].append(idx) + samples_per_group[group_id].append(idx) + if len(buffer_per_group[group_id]) == self.batch_size: + yield buffer_per_group[group_id] + num_batches += 1 + del buffer_per_group[group_id] + assert len(buffer_per_group[group_id]) < self.batch_size + + # now we have run out of elements that satisfy + # the group criteria, let's return the remaining + # elements so that the size of the sampler is + # deterministic + expected_num_batches = len(self) + num_remaining = expected_num_batches - num_batches + if num_remaining > 0: + # for the remaining batches, take first the buffers with largest number + # of elements + for group_id, _ in sorted(buffer_per_group.items(), + key=lambda x: len(x[1]), reverse=True): + remaining = self.batch_size - len(buffer_per_group[group_id]) + samples_from_group_id = _repeat_to_at_least(samples_per_group[group_id], remaining) + buffer_per_group[group_id].extend(samples_from_group_id[:remaining]) + assert len(buffer_per_group[group_id]) == self.batch_size + yield buffer_per_group[group_id] + num_remaining -= 1 + if num_remaining == 0: + break + assert num_remaining == 0 + + def __len__(self): + return len(self.sampler) // self.batch_size + + +def _compute_aspect_ratios_slow(dataset, indices=None): + print("Your dataset doesn't support the fast path for " + "computing the aspect ratios, so will iterate over " + "the full dataset and load every image instead. " + "This might take some time...") + if indices is None: + indices = range(len(dataset)) + + class SubsetSampler(Sampler): + def __init__(self, indices): + self.indices = indices + + def __iter__(self): + return iter(self.indices) + + def __len__(self): + return len(self.indices) + + sampler = SubsetSampler(indices) + data_loader = torch.utils.data.DataLoader( + dataset, batch_size=1, sampler=sampler, + num_workers=14, # you might want to increase it for faster processing + collate_fn=lambda x: x[0]) + aspect_ratios = [] + with tqdm(total=len(dataset)) as pbar: + for _i, (img, _) in enumerate(data_loader): + pbar.update(1) + height, width = img.shape[-2:] + aspect_ratio = float(width) / float(height) + aspect_ratios.append(aspect_ratio) + return aspect_ratios + + +def _compute_aspect_ratios_custom_dataset(dataset, indices=None): + if indices is None: + indices = range(len(dataset)) + aspect_ratios = [] + for i in indices: + height, width = dataset.get_height_and_width(i) + aspect_ratio = float(width) / float(height) + aspect_ratios.append(aspect_ratio) + return aspect_ratios + + +def _compute_aspect_ratios_coco_dataset(dataset, indices=None): + if indices is None: + indices = range(len(dataset)) + aspect_ratios = [] + for i in indices: + img_info = dataset.coco.imgs[dataset.ids[i]] + aspect_ratio = float(img_info["width"]) / float(img_info["height"]) + aspect_ratios.append(aspect_ratio) + return aspect_ratios + + +def _compute_aspect_ratios_voc_dataset(dataset, indices=None): + if indices is None: + indices = range(len(dataset)) + aspect_ratios = [] + for i in indices: + # this doesn't load the data into memory, because PIL loads it lazily + width, height = Image.open(dataset.images[i]).size + aspect_ratio = float(width) / float(height) + aspect_ratios.append(aspect_ratio) + return aspect_ratios + + +def _compute_aspect_ratios_subset_dataset(dataset, indices=None): + if indices is None: + indices = range(len(dataset)) + + ds_indices = [dataset.indices[i] for i in indices] + return compute_aspect_ratios(dataset.dataset, ds_indices) + + +def compute_aspect_ratios(dataset, indices=None): + if hasattr(dataset, "get_height_and_width"): + return _compute_aspect_ratios_custom_dataset(dataset, indices) + + if isinstance(dataset, torchvision.datasets.CocoDetection): + return _compute_aspect_ratios_coco_dataset(dataset, indices) + + if isinstance(dataset, torchvision.datasets.VOCDetection): + return _compute_aspect_ratios_voc_dataset(dataset, indices) + + if isinstance(dataset, torch.utils.data.Subset): + return _compute_aspect_ratios_subset_dataset(dataset, indices) + + # slow path + return _compute_aspect_ratios_slow(dataset, indices) + + +def _quantize(x, bins): + bins = copy.deepcopy(bins) + bins = sorted(bins) + # bisect_right:寻找y元素按顺序应该排在bins中哪个元素的右边,返回的是索引 + quantized = list(map(lambda y: bisect.bisect_right(bins, y), x)) + return quantized + + +def create_aspect_ratio_groups(dataset, k=0): + # 计算所有数据集中的图片width/height比例 + aspect_ratios = compute_aspect_ratios(dataset) + # 将[0.5, 2]区间划分成2*k+1等份 + bins = (2 ** np.linspace(-1, 1, 2 * k + 1)).tolist() if k > 0 else [1.0] + + # 统计所有图像比例在bins区间中的位置索引 + groups = _quantize(aspect_ratios, bins) + # count number of elements per group + # 统计每个区间的频次 + counts = np.unique(groups, return_counts=True)[1] + fbins = [0] + bins + [np.inf] + print("Using {} as bins for aspect ratio quantization".format(fbins)) + print("Count of instances per bin: {}".format(counts)) + return groups diff --git a/detector/Mask_RCNN/train_utils/train_eval_utils.py b/detector/Mask_RCNN/train_utils/train_eval_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..29bae2fb245f67bf6e2c48117f634f11fdac237f --- /dev/null +++ b/detector/Mask_RCNN/train_utils/train_eval_utils.py @@ -0,0 +1,109 @@ +import math +import sys +import time + +import torch + +import train_utils.distributed_utils as utils +from .coco_eval import EvalCOCOMetric + + +def train_one_epoch(model, optimizer, data_loader, device, epoch, + print_freq=50, warmup=False, scaler=None): + model.train() + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + header = 'Epoch: [{}]'.format(epoch) + + lr_scheduler = None + if epoch == 0 and warmup is True: # 当训练第一轮(epoch=0)时,启用warmup训练方式,可理解为热身训练 + warmup_factor = 1.0 / 1000 + warmup_iters = min(1000, len(data_loader) - 1) + + lr_scheduler = utils.warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor) + + mloss = torch.zeros(1).to(device) # mean losses + for i, [images, targets] in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + images = list(image.to(device) for image in images) + targets = [{k: v.to(device) for k, v in t.items()} for t in targets] + + # 混合精度训练上下文管理器,如果在CPU环境中不起任何作用 + with torch.cuda.amp.autocast(enabled=scaler is not None): + loss_dict = model(images, targets) + + losses = sum(loss for loss in loss_dict.values()) + + # reduce losses over all GPUs for logging purpose + loss_dict_reduced = utils.reduce_dict(loss_dict) + losses_reduced = sum(loss for loss in loss_dict_reduced.values()) + + loss_value = losses_reduced.item() + # 记录训练损失 + mloss = (mloss * i + loss_value) / (i + 1) # update mean losses + + if not math.isfinite(loss_value): # 当计算的损失为无穷大时停止训练 + print("Loss is {}, stopping training".format(loss_value)) + print(loss_dict_reduced) + sys.exit(1) + + optimizer.zero_grad() + if scaler is not None: + scaler.scale(losses).backward() + scaler.step(optimizer) + scaler.update() + else: + losses.backward() + optimizer.step() + + if lr_scheduler is not None: # 第一轮使用warmup训练方式 + lr_scheduler.step() + + metric_logger.update(loss=losses_reduced, **loss_dict_reduced) + now_lr = optimizer.param_groups[0]["lr"] + metric_logger.update(lr=now_lr) + + return mloss, now_lr + + +@torch.no_grad() +def evaluate(model, data_loader, device): + cpu_device = torch.device("cpu") + model.eval() + metric_logger = utils.MetricLogger(delimiter=" ") + header = "Test: " + + det_metric = EvalCOCOMetric(data_loader.dataset.coco, iou_type="bbox", results_file_name="det_results.json") + seg_metric = EvalCOCOMetric(data_loader.dataset.coco, iou_type="segm", results_file_name="seg_results.json") + for image, targets in metric_logger.log_every(data_loader, 100, header): + image = list(img.to(device) for img in image) + + # 当使用CPU时,跳过GPU相关指令 + if device != torch.device("cpu"): + torch.cuda.synchronize(device) + + model_time = time.time() + outputs = model(image) + + outputs = [{k: v.to(cpu_device) for k, v in t.items()} for t in outputs] + model_time = time.time() - model_time + + det_metric.update(targets, outputs) + seg_metric.update(targets, outputs) + metric_logger.update(model_time=model_time) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger) + + # 同步所有进程中的数据 + det_metric.synchronize_results() + seg_metric.synchronize_results() + + if utils.is_main_process(): + coco_info = det_metric.evaluate() + seg_info = seg_metric.evaluate() + else: + coco_info = None + seg_info = None + + return coco_info, seg_info diff --git a/detector/YOLOv3/README.md b/detector/YOLOv3/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ef8e168b723a16fe3b3aee4fb8d68934fe5818bd --- /dev/null +++ b/detector/YOLOv3/README.md @@ -0,0 +1,11 @@ +# YOLOv3 for detection + +This is an implemention of YOLOv3 with only the forward part. + +If you want to train YOLOv3 on your custom dataset, please search `YOLOv3` on github. + +## Quick forward +```bash +cd YOLOv3 +python +``` \ No newline at end of file diff --git a/detector/YOLOv3/__init__.py b/detector/YOLOv3/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c5f8ea86439cdcce3a88291ff1888b6bb8138323 --- /dev/null +++ b/detector/YOLOv3/__init__.py @@ -0,0 +1,7 @@ +import sys + +sys.path.append("detector/YOLOv3") + +from .detector import YOLOv3 + +__all__ = ['YOLOv3'] diff --git a/detector/YOLOv3/cfg.py b/detector/YOLOv3/cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..57127eef5b3e08a2de7cdb3815f167df4df92ea1 --- /dev/null +++ b/detector/YOLOv3/cfg.py @@ -0,0 +1,248 @@ +import torch +from .yolo_utils import convert2cpu + + +def parse_cfg(cfgfile): + blocks = [] + fp = open(cfgfile) + block = None + line = fp.readline() + while line != '': + line = line.rstrip() + if line == '' or line[0] == '#': + line = fp.readline() + continue + elif line[0] == '[': + if block: + blocks.append(block) + block = dict() + block['type'] = line.lstrip('[').rstrip(']') + # set default value + if block['type'] == 'convolutional': + block['batch_normalize'] = 0 + else: + key, value = line.split('=') + key = key.strip() + if key == 'type': + key = '_type' + value = value.strip() + block[key] = value + line = fp.readline() + + if block: + blocks.append(block) + fp.close() + return blocks + + +def print_cfg(blocks): + print('layer filters size input output'); + prev_width = 416 + prev_height = 416 + prev_filters = 3 + out_filters = [] + out_widths = [] + out_heights = [] + ind = -2 + for block in blocks: + ind += 1 + if block['type'] == 'net': + prev_width = int(block['width']) + prev_height = int(block['height']) + continue + elif block['type'] == 'convolutional': + filters = int(block['filters']) + kernel_size = int(block['size']) + stride = int(block['stride']) + is_pad = int(block['pad']) + pad = (kernel_size - 1) // 2 if is_pad else 0 + width = (prev_width + 2 * pad - kernel_size) // stride + 1 + height = (prev_height + 2 * pad - kernel_size) // stride + 1 + print('%5d %-6s %4d %d x %d / %d %3d x %3d x%4d -> %3d x %3d x%4d' % ( + ind, 'conv', filters, kernel_size, kernel_size, stride, prev_width, prev_height, prev_filters, width, + height, filters)) + prev_width = width + prev_height = height + prev_filters = filters + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'maxpool': + pool_size = int(block['size']) + stride = int(block['stride']) + width = prev_width // stride + height = prev_height // stride + print('%5d %-6s %d x %d / %d %3d x %3d x%4d -> %3d x %3d x%4d' % ( + ind, 'max', pool_size, pool_size, stride, prev_width, prev_height, prev_filters, width, height, filters)) + prev_width = width + prev_height = height + prev_filters = filters + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'avgpool': + width = 1 + height = 1 + print('%5d %-6s %3d x %3d x%4d -> %3d' % ( + ind, 'avg', prev_width, prev_height, prev_filters, prev_filters)) + prev_width = width + prev_height = height + prev_filters = filters + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'softmax': + print('%5d %-6s -> %3d' % (ind, 'softmax', prev_filters)) + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'cost': + print('%5d %-6s -> %3d' % (ind, 'cost', prev_filters)) + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'reorg': + stride = int(block['stride']) + filters = stride * stride * prev_filters + width = prev_width // stride + height = prev_height // stride + print('%5d %-6s / %d %3d x %3d x%4d -> %3d x %3d x%4d' % ( + ind, 'reorg', stride, prev_width, prev_height, prev_filters, width, height, filters)) + prev_width = width + prev_height = height + prev_filters = filters + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'upsample': + stride = int(block['stride']) + filters = prev_filters + width = prev_width * stride + height = prev_height * stride + print('%5d %-6s * %d %3d x %3d x%4d -> %3d x %3d x%4d' % ( + ind, 'upsample', stride, prev_width, prev_height, prev_filters, width, height, filters)) + prev_width = width + prev_height = height + prev_filters = filters + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'route': + layers = block['layers'].split(',') + layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers] + if len(layers) == 1: + print('%5d %-6s %d' % (ind, 'route', layers[0])) + prev_width = out_widths[layers[0]] + prev_height = out_heights[layers[0]] + prev_filters = out_filters[layers[0]] + elif len(layers) == 2: + print('%5d %-6s %d %d' % (ind, 'route', layers[0], layers[1])) + prev_width = out_widths[layers[0]] + prev_height = out_heights[layers[0]] + assert (prev_width == out_widths[layers[1]]) + assert (prev_height == out_heights[layers[1]]) + prev_filters = out_filters[layers[0]] + out_filters[layers[1]] + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] in ['region', 'yolo']: + print('%5d %-6s' % (ind, 'detection')) + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'shortcut': + from_id = int(block['from']) + from_id = from_id if from_id > 0 else from_id + ind + print('%5d %-6s %d' % (ind, 'shortcut', from_id)) + prev_width = out_widths[from_id] + prev_height = out_heights[from_id] + prev_filters = out_filters[from_id] + out_widths.append(prev_width) + out_heights.append(prev_height) + out_filters.append(prev_filters) + elif block['type'] == 'connected': + filters = int(block['output']) + print('%5d %-6s %d -> %3d' % (ind, 'connected', prev_filters, filters)) + prev_filters = filters + out_widths.append(1) + out_heights.append(1) + out_filters.append(prev_filters) + else: + print('unknown type %s' % (block['type'])) + + +def load_conv(buf, start, conv_model): + num_w = conv_model.weight.numel() + num_b = conv_model.bias.numel() + # print("start: {}, num_w: {}, num_b: {}".format(start, num_w, num_b)) + # by ysyun, use .view_as() + conv_model.bias.data.copy_(torch.from_numpy(buf[start:start + num_b]).view_as(conv_model.bias.data)); + start = start + num_b + conv_model.weight.data.copy_(torch.from_numpy(buf[start:start + num_w]).view_as(conv_model.weight.data)); + start = start + num_w + return start + + +def save_conv(fp, conv_model): + if conv_model.bias.is_cuda: + convert2cpu(conv_model.bias.data).numpy().tofile(fp) + convert2cpu(conv_model.weight.data).numpy().tofile(fp) + else: + conv_model.bias.data.numpy().tofile(fp) + conv_model.weight.data.numpy().tofile(fp) + + +def load_conv_bn(buf, start, conv_model, bn_model): + num_w = conv_model.weight.numel() + num_b = bn_model.bias.numel() + bn_model.bias.data.copy_(torch.from_numpy(buf[start:start + num_b])); + start = start + num_b + bn_model.weight.data.copy_(torch.from_numpy(buf[start:start + num_b])); + start = start + num_b + bn_model.running_mean.copy_(torch.from_numpy(buf[start:start + num_b])); + start = start + num_b + bn_model.running_var.copy_(torch.from_numpy(buf[start:start + num_b])); + start = start + num_b + # conv_model.weight.data.copy_(torch.from_numpy(buf[start:start+num_w])); start = start + num_w + conv_model.weight.data.copy_(torch.from_numpy(buf[start:start + num_w]).view_as(conv_model.weight.data)); + start = start + num_w + return start + + +def save_conv_bn(fp, conv_model, bn_model): + if bn_model.bias.is_cuda: + convert2cpu(bn_model.bias.data).numpy().tofile(fp) + convert2cpu(bn_model.weight.data).numpy().tofile(fp) + convert2cpu(bn_model.running_mean).numpy().tofile(fp) + convert2cpu(bn_model.running_var).numpy().tofile(fp) + convert2cpu(conv_model.weight.data).numpy().tofile(fp) + else: + bn_model.bias.data.numpy().tofile(fp) + bn_model.weight.data.numpy().tofile(fp) + bn_model.running_mean.numpy().tofile(fp) + bn_model.running_var.numpy().tofile(fp) + conv_model.weight.data.numpy().tofile(fp) + + +def load_fc(buf, start, fc_model): + num_w = fc_model.weight.numel() + num_b = fc_model.bias.numel() + fc_model.bias.data.copy_(torch.from_numpy(buf[start:start + num_b])); + start = start + num_b + fc_model.weight.data.copy_(torch.from_numpy(buf[start:start + num_w])); + start = start + num_w + return start + + +def save_fc(fp, fc_model): + fc_model.bias.data.numpy().tofile(fp) + fc_model.weight.data.numpy().tofile(fp) + + +if __name__ == '__main__': + import sys + + blocks = parse_cfg('cfg/yolo.cfg') + if len(sys.argv) == 2: + blocks = parse_cfg(sys.argv[1]) + print_cfg(blocks) diff --git a/detector/YOLOv3/cfg/coco.data b/detector/YOLOv3/cfg/coco.data new file mode 100644 index 0000000000000000000000000000000000000000..b7e31be301b0d59ca5e71e6a458e68978bf91113 --- /dev/null +++ b/detector/YOLOv3/cfg/coco.data @@ -0,0 +1,5 @@ +train = coco_train.txt +valid = coco_test.txt +names = data/coco.names +backup = backup +gpus = 0,1,2,3 diff --git a/detector/YOLOv3/cfg/coco.names b/detector/YOLOv3/cfg/coco.names new file mode 100644 index 0000000000000000000000000000000000000000..ca76c80b5b2cd0b25047f75736656cfebc9da7aa --- /dev/null +++ b/detector/YOLOv3/cfg/coco.names @@ -0,0 +1,80 @@ +person +bicycle +car +motorbike +aeroplane +bus +train +truck +boat +traffic light +fire hydrant +stop sign +parking meter +bench +bird +cat +dog +horse +sheep +cow +elephant +bear +zebra +giraffe +backpack +umbrella +handbag +tie +suitcase +frisbee +skis +snowboard +sports ball +kite +baseball bat +baseball glove +skateboard +surfboard +tennis racket +bottle +wine glass +cup +fork +knife +spoon +bowl +banana +apple +sandwich +orange +broccoli +carrot +hot dog +pizza +donut +cake +chair +sofa +pottedplant +bed +diningtable +toilet +tvmonitor +laptop +mouse +remote +keyboard +cell phone +microwave +oven +toaster +sink +refrigerator +book +clock +vase +scissors +teddy bear +hair drier +toothbrush diff --git a/detector/YOLOv3/cfg/darknet19_448.cfg b/detector/YOLOv3/cfg/darknet19_448.cfg new file mode 100644 index 0000000000000000000000000000000000000000..133c688eb1d53feaccf4dc501348f8b3f3611144 --- /dev/null +++ b/detector/YOLOv3/cfg/darknet19_448.cfg @@ -0,0 +1,200 @@ +[net] +batch=128 +subdivisions=4 +height=448 +width=448 +max_crop=512 +channels=3 +momentum=0.9 +decay=0.0005 + +learning_rate=0.001 +policy=poly +power=4 +max_batches=100000 + +angle=7 +hue = .1 +saturation=.75 +exposure=.75 +aspect=.75 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +filters=1000 +size=1 +stride=1 +pad=1 +activation=linear + +[avgpool] + +[softmax] +groups=1 + +[cost] +type=sse + diff --git a/detector/YOLOv3/cfg/tiny-yolo-voc.cfg b/detector/YOLOv3/cfg/tiny-yolo-voc.cfg new file mode 100644 index 0000000000000000000000000000000000000000..ab2c066a216eacbee86e78c28f4d236e5d6b351a --- /dev/null +++ b/detector/YOLOv3/cfg/tiny-yolo-voc.cfg @@ -0,0 +1,134 @@ +[net] +batch=64 +subdivisions=8 +width=416 +height=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +max_batches = 40200 +policy=steps +steps=-1,100,20000,30000 +scales=.1,10,.1,.1 + +[convolutional] +batch_normalize=1 +filters=16 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=1 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +########### + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=125 +activation=linear + +[region] +anchors = 1.08,1.19, 3.42,4.41, 6.63,11.38, 9.42,5.11, 16.62,10.52 +bias_match=1 +classes=20 +coords=4 +num=5 +softmax=1 +jitter=.2 +rescore=1 + +object_scale=5 +noobject_scale=1 +class_scale=1 +coord_scale=1 + +absolute=1 +thresh = .6 +random=1 diff --git a/detector/YOLOv3/cfg/tiny-yolo.cfg b/detector/YOLOv3/cfg/tiny-yolo.cfg new file mode 100644 index 0000000000000000000000000000000000000000..ac5770e78fa0645d8f69b5e6233e1925756ebe66 --- /dev/null +++ b/detector/YOLOv3/cfg/tiny-yolo.cfg @@ -0,0 +1,140 @@ +[net] +# Training +# batch=64 +# subdivisions=2 +# Testing +batch=1 +subdivisions=1 +width=416 +height=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +burn_in=1000 +max_batches = 500200 +policy=steps +steps=400000,450000 +scales=.1,.1 + +[convolutional] +batch_normalize=1 +filters=16 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=1 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +########### + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=425 +activation=linear + +[region] +anchors = 0.57273, 0.677385, 1.87446, 2.06253, 3.33843, 5.47434, 7.88282, 3.52778, 9.77052, 9.16828 +bias_match=1 +classes=80 +coords=4 +num=5 +softmax=1 +jitter=.2 +rescore=0 + +object_scale=5 +noobject_scale=1 +class_scale=1 +coord_scale=1 + +absolute=1 +thresh = .6 +random=1 + diff --git a/detector/YOLOv3/cfg/voc.data b/detector/YOLOv3/cfg/voc.data new file mode 100644 index 0000000000000000000000000000000000000000..3329357e72dd17a98732505840ac60a01e39acd0 --- /dev/null +++ b/detector/YOLOv3/cfg/voc.data @@ -0,0 +1,5 @@ +train = data/voc_train.txt +valid = data/2007_test.txt +names = data/voc.names +backup = backup +gpus = 3 diff --git a/detector/YOLOv3/cfg/voc.names b/detector/YOLOv3/cfg/voc.names new file mode 100644 index 0000000000000000000000000000000000000000..8420ab35ede7400974f25836a6bb543024686a0e --- /dev/null +++ b/detector/YOLOv3/cfg/voc.names @@ -0,0 +1,20 @@ +aeroplane +bicycle +bird +boat +bottle +bus +car +cat +chair +cow +diningtable +dog +horse +motorbike +person +pottedplant +sheep +sofa +train +tvmonitor diff --git a/detector/YOLOv3/cfg/voc_gaotie.data b/detector/YOLOv3/cfg/voc_gaotie.data new file mode 100644 index 0000000000000000000000000000000000000000..66495ec54d6d9e75e3e5fbdd3c4690ba8d5f6c2d --- /dev/null +++ b/detector/YOLOv3/cfg/voc_gaotie.data @@ -0,0 +1,5 @@ +train = data/gaotie_trainval.txt +valid = data/gaotie_test.txt +names = data/voc.names +backup = backup +gpus = 3 \ No newline at end of file diff --git a/detector/YOLOv3/cfg/yolo-voc.cfg b/detector/YOLOv3/cfg/yolo-voc.cfg new file mode 100644 index 0000000000000000000000000000000000000000..d5bdfc1c5bf2d34885d7614d76d980c90373f89a --- /dev/null +++ b/detector/YOLOv3/cfg/yolo-voc.cfg @@ -0,0 +1,258 @@ +[net] +# Testing +batch=64 +subdivisions=8 +# Training +# batch=64 +# subdivisions=8 +height=416 +width=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +burn_in=1000 +max_batches = 80200 +policy=steps +steps=-1,500,40000,60000 +scales=0.1,10,.1,.1 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + + +####### + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[route] +layers=-9 + +[convolutional] +batch_normalize=1 +size=1 +stride=1 +pad=1 +filters=64 +activation=leaky + +[reorg] +stride=2 + +[route] +layers=-1,-4 + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=125 +activation=linear + + +[region] +anchors = 1.3221, 1.73145, 3.19275, 4.00944, 5.05587, 8.09892, 9.47112, 4.84053, 11.2364, 10.0071 +bias_match=1 +classes=20 +coords=4 +num=5 +softmax=1 +jitter=.3 +rescore=1 + +object_scale=5 +noobject_scale=1 +class_scale=1 +coord_scale=1 + +absolute=1 +thresh = .6 +random=1 diff --git a/detector/YOLOv3/cfg/yolo.cfg b/detector/YOLOv3/cfg/yolo.cfg new file mode 100644 index 0000000000000000000000000000000000000000..2a0cd98fbd07c94aa0840c528a12b1b60a004928 --- /dev/null +++ b/detector/YOLOv3/cfg/yolo.cfg @@ -0,0 +1,258 @@ +[net] +# Testing +batch=1 +subdivisions=1 +# Training +# batch=64 +# subdivisions=8 +width=416 +height=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +burn_in=1000 +max_batches = 500200 +policy=steps +steps=400000,450000 +scales=.1,.1 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + + +####### + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[route] +layers=-9 + +[convolutional] +batch_normalize=1 +size=1 +stride=1 +pad=1 +filters=64 +activation=leaky + +[reorg] +stride=2 + +[route] +layers=-1,-4 + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=425 +activation=linear + + +[region] +anchors = 0.57273, 0.677385, 1.87446, 2.06253, 3.33843, 5.47434, 7.88282, 3.52778, 9.77052, 9.16828 +bias_match=1 +classes=80 +coords=4 +num=5 +softmax=1 +jitter=.3 +rescore=1 + +object_scale=5 +noobject_scale=1 +class_scale=1 +coord_scale=1 + +absolute=1 +thresh = .6 +random=1 diff --git a/detector/YOLOv3/cfg/yolo_v3.cfg b/detector/YOLOv3/cfg/yolo_v3.cfg new file mode 100644 index 0000000000000000000000000000000000000000..938ffff23f106d65290faae217f6a9b0a715c023 --- /dev/null +++ b/detector/YOLOv3/cfg/yolo_v3.cfg @@ -0,0 +1,789 @@ +[net] +# Testing +# batch=1 +# subdivisions=1 +# Training +batch=64 +subdivisions=16 +width=608 +height=608 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +burn_in=1000 +max_batches = 500200 +policy=steps +steps=400000,450000 +scales=.1,.1 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +# Downsample + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=32 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +# Downsample + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +# Downsample + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +# Downsample + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +# Downsample + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +[shortcut] +from=-3 +activation=linear + +###################### + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + + +[yolo] +mask = 6,7,8 +anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 +classes=80 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 + + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = -1, 61 + + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + + +[yolo] +mask = 3,4,5 +anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 +classes=80 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 + + + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = -1, 36 + + + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + + +[yolo] +mask = 0,1,2 +anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 +classes=80 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 + diff --git a/detector/YOLOv3/cfg/yolov3-tiny.cfg b/detector/YOLOv3/cfg/yolov3-tiny.cfg new file mode 100644 index 0000000000000000000000000000000000000000..cfca3cfa6415b7b61eae238aa71107dedbe5d607 --- /dev/null +++ b/detector/YOLOv3/cfg/yolov3-tiny.cfg @@ -0,0 +1,182 @@ +[net] +# Testing +batch=1 +subdivisions=1 +# Training +# batch=64 +# subdivisions=2 +width=416 +height=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.001 +burn_in=1000 +max_batches = 500200 +policy=steps +steps=400000,450000 +scales=.1,.1 + +[convolutional] +batch_normalize=1 +filters=16 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[maxpool] +size=2 +stride=1 + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=1 +pad=1 +activation=leaky + +########### + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + + + +[yolo] +mask = 3,4,5 +anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319 +classes=80 +num=6 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = -1, 8 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + +[yolo] +mask = 0,1,2 +anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319 +classes=80 +num=6 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 diff --git a/detector/YOLOv3/darknet.py b/detector/YOLOv3/darknet.py new file mode 100644 index 0000000000000000000000000000000000000000..dadc290f823a5e440e31e8e2e3f962a6a9d3b235 --- /dev/null +++ b/detector/YOLOv3/darknet.py @@ -0,0 +1,445 @@ +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +from .cfg import * +from .region_layer import RegionLayer +from .yolo_layer import YoloLayer + + +class MaxPoolStride1(nn.Module): + def __init__(self): + super(MaxPoolStride1, self).__init__() + + def forward(self, x): + x = F.max_pool2d(F.pad(x, (0, 1, 0, 1), mode='replicate'), 2, stride=1) + return x + + +class Upsample(nn.Module): + def __init__(self, stride=2): + super(Upsample, self).__init__() + self.stride = stride + + def forward(self, x): + stride = self.stride + assert (x.data.dim() == 4) + B = x.data.size(0) + C = x.data.size(1) + H = x.data.size(2) + W = x.data.size(3) + ws = stride + hs = stride + x = x.view(B, C, H, 1, W, 1).expand(B, C, H, hs, W, ws).contiguous().view(B, C, H * hs, W * ws) + return x + + +class Reorg(nn.Module): + def __init__(self, stride=2): + super(Reorg, self).__init__() + self.stride = stride + + def forward(self, x): + stride = self.stride + assert (x.data.dim() == 4) + B = x.data.size(0) + C = x.data.size(1) + H = x.data.size(2) + W = x.data.size(3) + assert (H % stride == 0) + assert (W % stride == 0) + ws = stride + hs = stride + x = x.view(B, C, H // hs, hs, W // ws, ws).transpose(3, 4).contiguous() + x = x.view(B, C, (H // hs) * (W // ws), hs * ws).transpose(2, 3).contiguous() + x = x.view(B, C, hs * ws, H // hs, W // ws).transpose(1, 2).contiguous() + x = x.view(B, hs * ws * C, H // hs, W // ws) + return x + + +class GlobalAvgPool2d(nn.Module): + def __init__(self): + super(GlobalAvgPool2d, self).__init__() + + def forward(self, x): + N = x.data.size(0) + C = x.data.size(1) + H = x.data.size(2) + W = x.data.size(3) + x = F.avg_pool2d(x, (H, W)) + x = x.view(N, C) + return x + + +# for route and shortcut +class EmptyModule(nn.Module): + def __init__(self): + super(EmptyModule, self).__init__() + + def forward(self, x): + return x + + +# support route shortcut and reorg + +class Darknet(nn.Module): + def getLossLayers(self): + loss_layers = [] + for m in self.models: + if isinstance(m, RegionLayer) or isinstance(m, YoloLayer): + loss_layers.append(m) + return loss_layers + + def __init__(self, cfgfile, use_cuda=True): + super(Darknet, self).__init__() + self.use_cuda = use_cuda + self.blocks = parse_cfg(cfgfile) + self.models = self.create_network(self.blocks) # merge conv, bn,leaky + self.loss_layers = self.getLossLayers() + + #self.width = int(self.blocks[0]['width']) + #self.height = int(self.blocks[0]['height']) + + if len(self.loss_layers) > 0: + last = len(self.loss_layers) - 1 + self.anchors = self.loss_layers[last].anchors + self.num_anchors = self.loss_layers[last].num_anchors + self.anchor_step = self.loss_layers[last].anchor_step + self.num_classes = self.loss_layers[last].num_classes + + # default format : major=0, minor=1 + self.header = torch.IntTensor([0, 1, 0, 0]) + self.seen = 0 + + def forward(self, x): + ind = -2 + self.loss_layers = None + outputs = dict() + out_boxes = dict() + outno = 0 + for block in self.blocks: + ind = ind + 1 + + if block['type'] == 'net': + continue + elif block['type'] in ['convolutional', 'maxpool', 'reorg', 'upsample', 'avgpool', 'softmax', 'connected']: + x = self.models[ind](x) + outputs[ind] = x + elif block['type'] == 'route': + layers = block['layers'].split(',') + layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers] + if len(layers) == 1: + x = outputs[layers[0]] + elif len(layers) == 2: + x1 = outputs[layers[0]] + x2 = outputs[layers[1]] + x = torch.cat((x1, x2), 1) + outputs[ind] = x + elif block['type'] == 'shortcut': + from_layer = int(block['from']) + activation = block['activation'] + from_layer = from_layer if from_layer > 0 else from_layer + ind + x1 = outputs[from_layer] + x2 = outputs[ind - 1] + x = x1 + x2 + if activation == 'leaky': + x = F.leaky_relu(x, 0.1, inplace=True) + elif activation == 'relu': + x = F.relu(x, inplace=True) + outputs[ind] = x + elif block['type'] in ['region', 'yolo']: + boxes = self.models[ind].get_mask_boxes(x) + out_boxes[outno] = boxes + outno += 1 + outputs[ind] = None + elif block['type'] == 'cost': + continue + else: + print('unknown type %s' % (block['type'])) + return x if outno == 0 else out_boxes + + def print_network(self): + print_cfg(self.blocks) + + def create_network(self, blocks): + models = nn.ModuleList() + + prev_filters = 3 + out_filters = [] + prev_stride = 1 + out_strides = [] + conv_id = 0 + ind = -2 + for block in blocks: + ind += 1 + if block['type'] == 'net': + prev_filters = int(block['channels']) + self.width = int(block['width']) + self.height = int(block['height']) + continue + elif block['type'] == 'convolutional': + conv_id = conv_id + 1 + batch_normalize = int(block['batch_normalize']) + filters = int(block['filters']) + kernel_size = int(block['size']) + stride = int(block['stride']) + is_pad = int(block['pad']) + pad = (kernel_size - 1) // 2 if is_pad else 0 + activation = block['activation'] + model = nn.Sequential() + if batch_normalize: + model.add_module('conv{0}'.format(conv_id), + nn.Conv2d(prev_filters, filters, kernel_size, stride, pad, bias=False)) + model.add_module('bn{0}'.format(conv_id), nn.BatchNorm2d(filters)) + #model.add_module('bn{0}'.format(conv_id), BN2d(filters)) + else: + model.add_module('conv{0}'.format(conv_id), + nn.Conv2d(prev_filters, filters, kernel_size, stride, pad)) + if activation == 'leaky': + model.add_module('leaky{0}'.format(conv_id), nn.LeakyReLU(0.1, inplace=True)) + elif activation == 'relu': + model.add_module('relu{0}'.format(conv_id), nn.ReLU(inplace=True)) + prev_filters = filters + out_filters.append(prev_filters) + prev_stride = stride * prev_stride + out_strides.append(prev_stride) + models.append(model) + elif block['type'] == 'maxpool': + pool_size = int(block['size']) + stride = int(block['stride']) + if stride > 1: + model = nn.MaxPool2d(pool_size, stride) + else: + model = MaxPoolStride1() + out_filters.append(prev_filters) + prev_stride = stride * prev_stride + out_strides.append(prev_stride) + models.append(model) + elif block['type'] == 'avgpool': + model = GlobalAvgPool2d() + out_filters.append(prev_filters) + models.append(model) + elif block['type'] == 'softmax': + model = nn.Softmax() + out_strides.append(prev_stride) + out_filters.append(prev_filters) + models.append(model) + elif block['type'] == 'cost': + if block['_type'] == 'sse': + model = nn.MSELoss(size_average=True) + elif block['_type'] == 'L1': + model = nn.L1Loss(size_average=True) + elif block['_type'] == 'smooth': + model = nn.SmoothL1Loss(size_average=True) + out_filters.append(1) + out_strides.append(prev_stride) + models.append(model) + elif block['type'] == 'reorg': + stride = int(block['stride']) + prev_filters = stride * stride * prev_filters + out_filters.append(prev_filters) + prev_stride = prev_stride * stride + out_strides.append(prev_stride) + models.append(Reorg(stride)) + elif block['type'] == 'upsample': + stride = int(block['stride']) + out_filters.append(prev_filters) + prev_stride = prev_stride / stride + out_strides.append(prev_stride) + #models.append(nn.Upsample(scale_factor=stride, mode='nearest')) + models.append(Upsample(stride)) + elif block['type'] == 'route': + layers = block['layers'].split(',') + ind = len(models) + layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers] + if len(layers) == 1: + prev_filters = out_filters[layers[0]] + prev_stride = out_strides[layers[0]] + elif len(layers) == 2: + assert (layers[0] == ind - 1) + prev_filters = out_filters[layers[0]] + out_filters[layers[1]] + prev_stride = out_strides[layers[0]] + out_filters.append(prev_filters) + out_strides.append(prev_stride) + models.append(EmptyModule()) + elif block['type'] == 'shortcut': + ind = len(models) + prev_filters = out_filters[ind - 1] + out_filters.append(prev_filters) + prev_stride = out_strides[ind - 1] + out_strides.append(prev_stride) + models.append(EmptyModule()) + elif block['type'] == 'connected': + filters = int(block['output']) + if block['activation'] == 'linear': + model = nn.Linear(prev_filters, filters) + elif block['activation'] == 'leaky': + model = nn.Sequential( + nn.Linear(prev_filters, filters), + nn.LeakyReLU(0.1, inplace=True)) + elif block['activation'] == 'relu': + model = nn.Sequential( + nn.Linear(prev_filters, filters), + nn.ReLU(inplace=True)) + prev_filters = filters + out_filters.append(prev_filters) + out_strides.append(prev_stride) + models.append(model) + elif block['type'] == 'region': + region_layer = RegionLayer(use_cuda=self.use_cuda) + anchors = block['anchors'].split(',') + region_layer.anchors = [float(i) for i in anchors] + region_layer.num_classes = int(block['classes']) + region_layer.num_anchors = int(block['num']) + region_layer.anchor_step = len(region_layer.anchors) // region_layer.num_anchors + region_layer.rescore = int(block['rescore']) + region_layer.object_scale = float(block['object_scale']) + region_layer.noobject_scale = float(block['noobject_scale']) + region_layer.class_scale = float(block['class_scale']) + region_layer.coord_scale = float(block['coord_scale']) + region_layer.thresh = float(block['thresh']) + out_filters.append(prev_filters) + out_strides.append(prev_stride) + models.append(region_layer) + elif block['type'] == 'yolo': + yolo_layer = YoloLayer(use_cuda=self.use_cuda) + anchors = block['anchors'].split(',') + anchor_mask = block['mask'].split(',') + yolo_layer.anchor_mask = [int(i) for i in anchor_mask] + yolo_layer.anchors = [float(i) for i in anchors] + yolo_layer.num_classes = int(block['classes']) + yolo_layer.num_anchors = int(block['num']) + yolo_layer.anchor_step = len(yolo_layer.anchors) // yolo_layer.num_anchors + try: + yolo_layer.rescore = int(block['rescore']) + except: + pass + yolo_layer.ignore_thresh = float(block['ignore_thresh']) + yolo_layer.truth_thresh = float(block['truth_thresh']) + yolo_layer.stride = prev_stride + yolo_layer.nth_layer = ind + yolo_layer.net_width = self.width + yolo_layer.net_height = self.height + out_filters.append(prev_filters) + out_strides.append(prev_stride) + models.append(yolo_layer) + else: + print('unknown type %s' % (block['type'])) + + return models + + def load_binfile(self, weightfile): + fp = open(weightfile, 'rb') + + version = np.fromfile(fp, count=3, dtype=np.int32) + version = [int(i) for i in version] + if version[0] * 10 + version[1] >= 2 and version[0] < 1000 and version[1] < 1000: + seen = np.fromfile(fp, count=1, dtype=np.int64) + else: + seen = np.fromfile(fp, count=1, dtype=np.int32) + self.header = torch.from_numpy(np.concatenate((version, seen), axis=0)) + self.seen = int(seen) + body = np.fromfile(fp, dtype=np.float32) + fp.close() + return body + + def load_weights(self, weightfile): + buf = self.load_binfile(weightfile) + + start = 0 + ind = -2 + for block in self.blocks: + if start >= buf.size: + break + ind = ind + 1 + if block['type'] == 'net': + continue + elif block['type'] == 'convolutional': + model = self.models[ind] + batch_normalize = int(block['batch_normalize']) + if batch_normalize: + start = load_conv_bn(buf, start, model[0], model[1]) + else: + start = load_conv(buf, start, model[0]) + elif block['type'] == 'connected': + model = self.models[ind] + if block['activation'] != 'linear': + start = load_fc(buf, start, model[0]) + else: + start = load_fc(buf, start, model) + elif block['type'] == 'maxpool': + pass + elif block['type'] == 'reorg': + pass + elif block['type'] == 'upsample': + pass + elif block['type'] == 'route': + pass + elif block['type'] == 'shortcut': + pass + elif block['type'] == 'region': + pass + elif block['type'] == 'yolo': + pass + elif block['type'] == 'avgpool': + pass + elif block['type'] == 'softmax': + pass + elif block['type'] == 'cost': + pass + else: + print('unknown type %s' % (block['type'])) + + def save_weights(self, outfile, cutoff=0): + if cutoff <= 0: + cutoff = len(self.blocks) - 1 + + fp = open(outfile, 'wb') + self.header[3] = self.seen + header = np.array(self.header[0:3].numpy(), np.int32) + header.tofile(fp) + if (self.header[0] * 10 + self.header[1]) >= 2: + seen = np.array(self.seen, np.int64) + else: + seen = np.array(self.seen, np.int32) + seen.tofile(fp) + + ind = -1 + for blockId in range(1, cutoff + 1): + ind = ind + 1 + block = self.blocks[blockId] + if block['type'] == 'convolutional': + model = self.models[ind] + batch_normalize = int(block['batch_normalize']) + if batch_normalize: + save_conv_bn(fp, model[0], model[1]) + else: + save_conv(fp, model[0]) + elif block['type'] == 'connected': + model = self.models[ind] + if block['activation'] != 'linear': + save_fc(fc, model) + else: + save_fc(fc, model[0]) + elif block['type'] == 'maxpool': + pass + elif block['type'] == 'reorg': + pass + elif block['type'] == 'upsample': + pass + elif block['type'] == 'route': + pass + elif block['type'] == 'shortcut': + pass + elif block['type'] == 'region': + pass + elif block['type'] == 'yolo': + pass + elif block['type'] == 'avgpool': + pass + elif block['type'] == 'softmax': + pass + elif block['type'] == 'cost': + pass + else: + print('unknown type %s' % (block['type'])) + fp.close() diff --git a/detector/YOLOv3/demo/004545.jpg b/detector/YOLOv3/demo/004545.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d249dded68682cc2543f2cef074249f1ffd7c1cd --- /dev/null +++ b/detector/YOLOv3/demo/004545.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed22f6b4c8c33e50e391e089ede14e8fa9402c623b09dbcf010e804770698fbb +size 123072 diff --git a/detector/YOLOv3/demo/results/004545.jpg b/detector/YOLOv3/demo/results/004545.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cf0a18c438e148bd8cd78df1b9388657d461d158 --- /dev/null +++ b/detector/YOLOv3/demo/results/004545.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea8275a2474926b0fbfe9a89bd5ddedfde70751459ca7ce2252958442ec95239 +size 125682 diff --git a/detector/YOLOv3/detect.py b/detector/YOLOv3/detect.py new file mode 100644 index 0000000000000000000000000000000000000000..9a091a3474ded452498ea34c7807d076a624083c --- /dev/null +++ b/detector/YOLOv3/detect.py @@ -0,0 +1,131 @@ +import sys +import time +from PIL import Image, ImageDraw +#from models.tiny_yolo import TinyYoloNet +from yolo_utils import * +from darknet import Darknet + +import cv2 + +namesfile=None +def detect(cfgfile, weightfile, imgfolder): + m = Darknet(cfgfile) + + #m.print_network() + m.load_weights(weightfile) + print('Loading weights from %s... Done!' % (weightfile)) + + # if m.num_classes == 20: + # namesfile = 'data/voc.names' + # elif m.num_classes == 80: + # namesfile = 'data/coco.names' + # else: + # namesfile = 'data/names' + + use_cuda = True + if use_cuda: + m.cuda() + + imgfiles = [x for x in os.listdir(imgfolder) if x[-4:] == '.jpg'] + imgfiles.sort() + for imgname in imgfiles: + imgfile = os.path.join(imgfolder,imgname) + + img = Image.open(imgfile).convert('RGB') + sized = img.resize((m.width, m.height)) + + #for i in range(2): + start = time.time() + boxes = do_detect(m, sized, 0.5, 0.4, use_cuda) + finish = time.time() + #if i == 1: + print('%s: Predicted in %f seconds.' % (imgfile, (finish-start))) + + class_names = load_class_names(namesfile) + img = plot_boxes(img, boxes, 'result/{}'.format(os.path.basename(imgfile)), class_names) + img = np.array(img) + cv2.imshow('{}'.format(os.path.basename(imgfolder)), img) + cv2.resizeWindow('{}'.format(os.path.basename(imgfolder)), 1000,800) + cv2.waitKey(1000) + +def detect_cv2(cfgfile, weightfile, imgfile): + import cv2 + m = Darknet(cfgfile) + + m.print_network() + m.load_weights(weightfile) + print('Loading weights from %s... Done!' % (weightfile)) + + if m.num_classes == 20: + namesfile = 'data/voc.names' + elif m.num_classes == 80: + namesfile = 'data/coco.names' + else: + namesfile = 'data/names' + + use_cuda = True + if use_cuda: + m.cuda() + + img = cv2.imread(imgfile) + sized = cv2.resize(img, (m.width, m.height)) + sized = cv2.cvtColor(sized, cv2.COLOR_BGR2RGB) + + for i in range(2): + start = time.time() + boxes = do_detect(m, sized, 0.5, 0.4, use_cuda) + finish = time.time() + if i == 1: + print('%s: Predicted in %f seconds.' % (imgfile, (finish-start))) + + class_names = load_class_names(namesfile) + plot_boxes_cv2(img, boxes, savename='predictions.jpg', class_names=class_names) + +def detect_skimage(cfgfile, weightfile, imgfile): + from skimage import io + from skimage.transform import resize + m = Darknet(cfgfile) + + m.print_network() + m.load_weights(weightfile) + print('Loading weights from %s... Done!' % (weightfile)) + + if m.num_classes == 20: + namesfile = 'data/voc.names' + elif m.num_classes == 80: + namesfile = 'data/coco.names' + else: + namesfile = 'data/names' + + use_cuda = True + if use_cuda: + m.cuda() + + img = io.imread(imgfile) + sized = resize(img, (m.width, m.height)) * 255 + + for i in range(2): + start = time.time() + boxes = do_detect(m, sized, 0.5, 0.4, use_cuda) + finish = time.time() + if i == 1: + print('%s: Predicted in %f seconds.' % (imgfile, (finish-start))) + + class_names = load_class_names(namesfile) + plot_boxes_cv2(img, boxes, savename='predictions.jpg', class_names=class_names) + +if __name__ == '__main__': + if len(sys.argv) == 5: + cfgfile = sys.argv[1] + weightfile = sys.argv[2] + imgfolder = sys.argv[3] + cv2.namedWindow('{}'.format(os.path.basename(imgfolder)), cv2.WINDOW_NORMAL ) + cv2.resizeWindow('{}'.format(os.path.basename(imgfolder)), 1000,800) + globals()["namesfile"] = sys.argv[4] + detect(cfgfile, weightfile, imgfolder) + #detect_cv2(cfgfile, weightfile, imgfile) + #detect_skimage(cfgfile, weightfile, imgfile) + else: + print('Usage: ') + print(' python detect.py cfgfile weightfile imgfolder names') + #detect('cfg/tiny-yolo-voc.cfg', 'tiny-yolo-voc.weights', 'data/person.jpg', version=1) diff --git a/detector/YOLOv3/detector.py b/detector/YOLOv3/detector.py new file mode 100644 index 0000000000000000000000000000000000000000..d5a0e530149419d4c6dea04ca38b9c3895dbcaef --- /dev/null +++ b/detector/YOLOv3/detector.py @@ -0,0 +1,102 @@ +import torch +import logging +import numpy as np +import cv2 + +from .darknet import Darknet +from .yolo_utils import get_all_boxes, nms, post_process, xywh_to_xyxy, xyxy_to_xywh +from .nms import boxes_nms + + +class YOLOv3(object): + def __init__(self, cfgfile, weightfile, namesfile, score_thresh=0.7, conf_thresh=0.01, nms_thresh=0.45, + is_xywh=False, use_cuda=True): + # net definition + self.net = Darknet(cfgfile) + self.net.load_weights(weightfile) + logger = logging.getLogger("root.detector") + logger.info('Loading weights from %s... Done!' % (weightfile)) + self.device = "cuda" if use_cuda else "cpu" + self.net.eval() + self.net.to(self.device) + + # constants + self.size = self.net.width, self.net.height + self.score_thresh = score_thresh + self.conf_thresh = conf_thresh + self.nms_thresh = nms_thresh + self.use_cuda = use_cuda + self.is_xywh = is_xywh + self.num_classes = self.net.num_classes + self.class_names = self.load_class_names(namesfile) + + def __call__(self, ori_img): + # img to tensor + assert isinstance(ori_img, np.ndarray), "input must be a numpy array!" + img = ori_img.astype(float) / 255. + + img = cv2.resize(img, self.size) + img = torch.from_numpy(img).float().permute(2, 0, 1).unsqueeze(0) + + # forward + with torch.no_grad(): + img = img.to(self.device) + out_boxes = self.net(img) + boxes = get_all_boxes(out_boxes, self.conf_thresh, self.num_classes, + use_cuda=self.use_cuda) # batch size is 1 + # boxes = nms(boxes, self.nms_thresh) + + boxes = post_process(boxes, self.net.num_classes, self.conf_thresh, self.nms_thresh)[0].cpu() + boxes = boxes[boxes[:, -2] > self.score_thresh, :] # bbox xmin ymin xmax ymax + + if len(boxes) == 0: + bbox = torch.FloatTensor([]).reshape([0, 4]) + cls_conf = torch.FloatTensor([]) + cls_ids = torch.LongTensor([]) + else: + height, width = ori_img.shape[:2] + bbox = boxes[:, :4] + if self.is_xywh: + # bbox x y w h + bbox = xyxy_to_xywh(bbox) + + bbox *= torch.FloatTensor([[width, height, width, height]]) + cls_conf = boxes[:, 5] + cls_ids = boxes[:, 6].long() + return bbox.numpy(), cls_conf.numpy(), cls_ids.numpy() + + def load_class_names(self, namesfile): + with open(namesfile, 'r', encoding='utf8') as fp: + class_names = [line.strip() for line in fp.readlines()] + return class_names + + +def demo(): + import os + from vizer.draw import draw_boxes + + yolo = YOLOv3("cfg/yolo_v3.cfg", "weight/yolov3.weights", "cfg/coco.names") + print("yolo.size =", yolo.size) + root = "./demo" + resdir = os.path.join(root, "results") + os.makedirs(resdir, exist_ok=True) + files = [os.path.join(root, file) for file in os.listdir(root) if file.endswith('.jpg')] + files.sort() + for filename in files: + img = cv2.imread(filename) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + bbox, cls_conf, cls_ids = yolo(img) + + if bbox is not None: + img = draw_boxes(img, bbox, cls_ids, cls_conf, class_name_map=yolo.class_names) + # save results + cv2.imwrite(os.path.join(resdir, os.path.basename(filename)), img[:, :, (2, 1, 0)]) + # imshow + # cv2.namedWindow("yolo", cv2.WINDOW_NORMAL) + # cv2.resizeWindow("yolo", 600,600) + # cv2.imshow("yolo",res[:,:,(2,1,0)]) + # cv2.waitKey(0) + + +if __name__ == "__main__": + demo() diff --git a/detector/YOLOv3/nms/__init__.py b/detector/YOLOv3/nms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4da70072383f47dd0190ff3831a4d946952de55d --- /dev/null +++ b/detector/YOLOv3/nms/__init__.py @@ -0,0 +1 @@ +from .nms import boxes_nms \ No newline at end of file diff --git a/detector/YOLOv3/nms/build.sh b/detector/YOLOv3/nms/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..44766a2e20f8b63b267c1d18c28d22fcfac7fdc4 --- /dev/null +++ b/detector/YOLOv3/nms/build.sh @@ -0,0 +1,5 @@ +cd ext + +python build.py build_ext develop + +cd .. diff --git a/detector/YOLOv3/nms/ext/__init__.py b/detector/YOLOv3/nms/ext/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/detector/YOLOv3/nms/ext/build.py b/detector/YOLOv3/nms/ext/build.py new file mode 100644 index 0000000000000000000000000000000000000000..66973bcf10fceda31ac7280e5f4f6a2b5cba8ae6 --- /dev/null +++ b/detector/YOLOv3/nms/ext/build.py @@ -0,0 +1,58 @@ +import glob +import os + +import torch +from setuptools import setup +from torch.utils.cpp_extension import CUDA_HOME +from torch.utils.cpp_extension import CppExtension +from torch.utils.cpp_extension import CUDAExtension + +requirements = ["torch"] + + +def get_extensions(): + extensions_dir = os.path.dirname(os.path.abspath(__file__)) + + main_file = glob.glob(os.path.join(extensions_dir, "*.cpp")) + source_cpu = glob.glob(os.path.join(extensions_dir, "cpu", "*.cpp")) + source_cuda = glob.glob(os.path.join(extensions_dir, "cuda", "*.cu")) + + sources = main_file + source_cpu + extension = CppExtension + + extra_compile_args = {"cxx": []} + define_macros = [] + + if torch.cuda.is_available() and CUDA_HOME is not None: + extension = CUDAExtension + sources += source_cuda + define_macros += [("WITH_CUDA", None)] + extra_compile_args["nvcc"] = [ + "-DCUDA_HAS_FP16=1", + "-D__CUDA_NO_HALF_OPERATORS__", + "-D__CUDA_NO_HALF_CONVERSIONS__", + "-D__CUDA_NO_HALF2_OPERATORS__", + ] + + sources = [os.path.join(extensions_dir, s) for s in sources] + + include_dirs = [extensions_dir] + + ext_modules = [ + extension( + "torch_extension", + sources, + include_dirs=include_dirs, + define_macros=define_macros, + extra_compile_args=extra_compile_args, + ) + ] + + return ext_modules + + +setup( + name="torch_extension", + version="0.1", + ext_modules=get_extensions(), + cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension}) diff --git a/detector/YOLOv3/nms/ext/cpu/nms_cpu.cpp b/detector/YOLOv3/nms/ext/cpu/nms_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5b3f93ca0c8ee1849997eb3bc3777dd38c359fff --- /dev/null +++ b/detector/YOLOv3/nms/ext/cpu/nms_cpu.cpp @@ -0,0 +1,75 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#include "cpu/vision.h" + + +template +at::Tensor nms_cpu_kernel(const at::Tensor& dets, + const at::Tensor& scores, + const float threshold) { + AT_ASSERTM(!dets.type().is_cuda(), "dets must be a CPU tensor"); + AT_ASSERTM(!scores.type().is_cuda(), "scores must be a CPU tensor"); + AT_ASSERTM(dets.type() == scores.type(), "dets should have the same type as scores"); + + if (dets.numel() == 0) { + return at::empty({0}, dets.options().dtype(at::kLong).device(at::kCPU)); + } + + auto x1_t = dets.select(1, 0).contiguous(); + auto y1_t = dets.select(1, 1).contiguous(); + auto x2_t = dets.select(1, 2).contiguous(); + auto y2_t = dets.select(1, 3).contiguous(); + + at::Tensor areas_t = (x2_t - x1_t) * (y2_t - y1_t); + + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + + auto ndets = dets.size(0); + at::Tensor suppressed_t = at::zeros({ndets}, dets.options().dtype(at::kByte).device(at::kCPU)); + + auto suppressed = suppressed_t.data(); + auto order = order_t.data(); + auto x1 = x1_t.data(); + auto y1 = y1_t.data(); + auto x2 = x2_t.data(); + auto y2 = y2_t.data(); + auto areas = areas_t.data(); + + for (int64_t _i = 0; _i < ndets; _i++) { + auto i = order[_i]; + if (suppressed[i] == 1) + continue; + auto ix1 = x1[i]; + auto iy1 = y1[i]; + auto ix2 = x2[i]; + auto iy2 = y2[i]; + auto iarea = areas[i]; + + for (int64_t _j = _i + 1; _j < ndets; _j++) { + auto j = order[_j]; + if (suppressed[j] == 1) + continue; + auto xx1 = std::max(ix1, x1[j]); + auto yy1 = std::max(iy1, y1[j]); + auto xx2 = std::min(ix2, x2[j]); + auto yy2 = std::min(iy2, y2[j]); + + auto w = std::max(static_cast(0), xx2 - xx1); + auto h = std::max(static_cast(0), yy2 - yy1); + auto inter = w * h; + auto ovr = inter / (iarea + areas[j] - inter); + if (ovr >= threshold) + suppressed[j] = 1; + } + } + return at::nonzero(suppressed_t == 0).squeeze(1); +} + +at::Tensor nms_cpu(const at::Tensor& dets, + const at::Tensor& scores, + const float threshold) { + at::Tensor result; + AT_DISPATCH_FLOATING_TYPES(dets.type(), "nms", [&] { + result = nms_cpu_kernel(dets, scores, threshold); + }); + return result; +} \ No newline at end of file diff --git a/detector/YOLOv3/nms/ext/cpu/vision.h b/detector/YOLOv3/nms/ext/cpu/vision.h new file mode 100644 index 0000000000000000000000000000000000000000..b3529ad78420c37f13f341ab43411cf0e6dd778a --- /dev/null +++ b/detector/YOLOv3/nms/ext/cpu/vision.h @@ -0,0 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#pragma once +#include + +at::Tensor nms_cpu(const at::Tensor& dets, + const at::Tensor& scores, + const float threshold); diff --git a/detector/YOLOv3/nms/ext/cuda/nms.cu b/detector/YOLOv3/nms/ext/cuda/nms.cu new file mode 100644 index 0000000000000000000000000000000000000000..2eb452510d8404d63bb9580fa1f76149b19c9b6f --- /dev/null +++ b/detector/YOLOv3/nms/ext/cuda/nms.cu @@ -0,0 +1,131 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#include +#include + +#include +#include + +#include +#include + +int const threadsPerBlock = sizeof(unsigned long long) * 8; + +__device__ inline float devIoU(float const * const a, float const * const b) { + float left = max(a[0], b[0]), right = min(a[2], b[2]); + float top = max(a[1], b[1]), bottom = min(a[3], b[3]); + float width = max(right - left, 0.f), height = max(bottom - top, 0.f); + float interS = width * height; + float Sa = (a[2] - a[0]) * (a[3] - a[1]); + float Sb = (b[2] - b[0]) * (b[3] - b[1]); + return interS / (Sa + Sb - interS); +} + +__global__ void nms_kernel(const int n_boxes, const float nms_overlap_thresh, + const float *dev_boxes, unsigned long long *dev_mask) { + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = + min(n_boxes - row_start * threadsPerBlock, threadsPerBlock); + const int col_size = + min(n_boxes - col_start * threadsPerBlock, threadsPerBlock); + + __shared__ float block_boxes[threadsPerBlock * 5]; + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x; + const float *cur_box = dev_boxes + cur_box_idx * 5; + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + if (devIoU(cur_box, block_boxes + i * 5) > nms_overlap_thresh) { + t |= 1ULL << i; + } + } + const int col_blocks = THCCeilDiv(n_boxes, threadsPerBlock); + dev_mask[cur_box_idx * col_blocks + col_start] = t; + } +} + +// boxes is a N x 5 tensor +at::Tensor nms_cuda(const at::Tensor boxes, float nms_overlap_thresh) { + using scalar_t = float; + AT_ASSERTM(boxes.type().is_cuda(), "boxes must be a CUDA tensor"); + auto scores = boxes.select(1, 4); + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + auto boxes_sorted = boxes.index_select(0, order_t); + + int boxes_num = boxes.size(0); + + const int col_blocks = THCCeilDiv(boxes_num, threadsPerBlock); + + scalar_t* boxes_dev = boxes_sorted.data(); + + THCState *state = at::globalContext().lazyInitCUDA(); // TODO replace with getTHCState + + unsigned long long* mask_dev = NULL; + //THCudaCheck(THCudaMalloc(state, (void**) &mask_dev, + // boxes_num * col_blocks * sizeof(unsigned long long))); + + mask_dev = (unsigned long long*) THCudaMalloc(state, boxes_num * col_blocks * sizeof(unsigned long long)); + + dim3 blocks(THCCeilDiv(boxes_num, threadsPerBlock), + THCCeilDiv(boxes_num, threadsPerBlock)); + dim3 threads(threadsPerBlock); + nms_kernel<<>>(boxes_num, + nms_overlap_thresh, + boxes_dev, + mask_dev); + + std::vector mask_host(boxes_num * col_blocks); + THCudaCheck(cudaMemcpy(&mask_host[0], + mask_dev, + sizeof(unsigned long long) * boxes_num * col_blocks, + cudaMemcpyDeviceToHost)); + + std::vector remv(col_blocks); + memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks); + + at::Tensor keep = at::empty({boxes_num}, boxes.options().dtype(at::kLong).device(at::kCPU)); + int64_t* keep_out = keep.data(); + + int num_to_keep = 0; + for (int i = 0; i < boxes_num; i++) { + int nblock = i / threadsPerBlock; + int inblock = i % threadsPerBlock; + + if (!(remv[nblock] & (1ULL << inblock))) { + keep_out[num_to_keep++] = i; + unsigned long long *p = &mask_host[0] + i * col_blocks; + for (int j = nblock; j < col_blocks; j++) { + remv[j] |= p[j]; + } + } + } + + THCudaFree(state, mask_dev); + // TODO improve this part + return std::get<0>(order_t.index({ + keep.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep).to( + order_t.device(), keep.scalar_type()) + }).sort(0, false)); +} \ No newline at end of file diff --git a/detector/YOLOv3/nms/ext/cuda/vision.h b/detector/YOLOv3/nms/ext/cuda/vision.h new file mode 100644 index 0000000000000000000000000000000000000000..b5bd90778664373426fcec2c0f56d1f670c2a942 --- /dev/null +++ b/detector/YOLOv3/nms/ext/cuda/vision.h @@ -0,0 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#pragma once +#include + +at::Tensor nms_cuda(const at::Tensor boxes, float nms_overlap_thresh); + + diff --git a/detector/YOLOv3/nms/ext/nms.h b/detector/YOLOv3/nms/ext/nms.h new file mode 100644 index 0000000000000000000000000000000000000000..312fed4a7cb7c1bc6c2345b5e5d678cc6c1a7141 --- /dev/null +++ b/detector/YOLOv3/nms/ext/nms.h @@ -0,0 +1,28 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#pragma once +#include "cpu/vision.h" + +#ifdef WITH_CUDA +#include "cuda/vision.h" +#endif + + +at::Tensor nms(const at::Tensor& dets, + const at::Tensor& scores, + const float threshold) { + + if (dets.type().is_cuda()) { +#ifdef WITH_CUDA + // TODO raise error if not compiled with CUDA + if (dets.numel() == 0) + return at::empty({0}, dets.options().dtype(at::kLong).device(at::kCPU)); + auto b = at::cat({dets, scores.unsqueeze(1)}, 1); + return nms_cuda(b, threshold); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + + at::Tensor result = nms_cpu(dets, scores, threshold); + return result; +} diff --git a/detector/YOLOv3/nms/ext/vision.cpp b/detector/YOLOv3/nms/ext/vision.cpp new file mode 100644 index 0000000000000000000000000000000000000000..726b77bd41e3f57b27d1c6119fed106f40696c0d --- /dev/null +++ b/detector/YOLOv3/nms/ext/vision.cpp @@ -0,0 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +#include "nms.h" + + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("nms", &nms, "non-maximum suppression"); +} diff --git a/detector/YOLOv3/nms/nms.py b/detector/YOLOv3/nms/nms.py new file mode 100644 index 0000000000000000000000000000000000000000..1a921a66a4566085dc34c82cd5da15f98b71d27e --- /dev/null +++ b/detector/YOLOv3/nms/nms.py @@ -0,0 +1,35 @@ +import warnings +import torchvision + +try: + import torch + import torch_extension + + _nms = torch_extension.nms +except ImportError: + # if torchvision.__version__ >= '0.3.0': + # _nms = torchvision.ops.nms + # else: + # from .python_nms import python_nms + # + # _nms = python_nms + # warnings.warn('You are using python version NMS, which is very very slow. Try compile c++ NMS ' + # 'using `cd ext & python build.py build_ext develop`') + _nms = torchvision.ops.nms + + +def boxes_nms(boxes, scores, nms_thresh, max_count=-1): + """ Performs non-maximum suppression, run on GPU or CPU according to + boxes's device. + Args: + boxes(Tensor): `xyxy` mode boxes, use absolute coordinates(or relative coordinates), shape is (n, 4) + scores(Tensor): scores, shape is (n, ) + nms_thresh(float): thresh + max_count (int): if > 0, then only the top max_proposals are kept after non-maximum suppression + Returns: + indices kept. + """ + keep = _nms(boxes, scores, nms_thresh) + if max_count > 0: + keep = keep[:max_count] + return keep diff --git a/detector/YOLOv3/nms/python_nms.py b/detector/YOLOv3/nms/python_nms.py new file mode 100644 index 0000000000000000000000000000000000000000..bd8a4ba23e7b849f0de540ecf6902a5572a18e49 --- /dev/null +++ b/detector/YOLOv3/nms/python_nms.py @@ -0,0 +1,59 @@ +import torch +import numpy as np + + +def python_nms(boxes, scores, nms_thresh): + """ Performs non-maximum suppression using numpy + Args: + boxes(Tensor): `xyxy` mode boxes, use absolute coordinates(not support relative coordinates), + shape is (n, 4) + scores(Tensor): scores, shape is (n, ) + nms_thresh(float): thresh + Returns: + indices kept. + """ + if boxes.numel() == 0: + return torch.empty((0,), dtype=torch.long) + # Use numpy to run nms. Running nms in PyTorch code on CPU is really slow. + origin_device = boxes.device + cpu_device = torch.device('cpu') + boxes = boxes.to(cpu_device).numpy() + scores = scores.to(cpu_device).numpy() + + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + areas = (x2 - x1) * (y2 - y1) + order = np.argsort(scores)[::-1] + num_detections = boxes.shape[0] + suppressed = np.zeros((num_detections,), dtype=np.bool) + for _i in range(num_detections): + i = order[_i] + if suppressed[i]: + continue + ix1 = x1[i] + iy1 = y1[i] + ix2 = x2[i] + iy2 = y2[i] + iarea = areas[i] + + for _j in range(_i + 1, num_detections): + j = order[_j] + if suppressed[j]: + continue + + xx1 = max(ix1, x1[j]) + yy1 = max(iy1, y1[j]) + xx2 = min(ix2, x2[j]) + yy2 = min(iy2, y2[j]) + w = max(0, xx2 - xx1) + h = max(0, yy2 - yy1) + + inter = w * h + ovr = inter / (iarea + areas[j] - inter) + if ovr >= nms_thresh: + suppressed[j] = True + keep = np.nonzero(suppressed == 0)[0] + keep = torch.from_numpy(keep).to(origin_device) + return keep diff --git a/detector/YOLOv3/region_layer.py b/detector/YOLOv3/region_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..c55ef376e3177f718956827713897a28216c8530 --- /dev/null +++ b/detector/YOLOv3/region_layer.py @@ -0,0 +1,185 @@ +import math +import sys +import time +import torch +import torch.nn as nn +from .yolo_utils import bbox_iou, multi_bbox_ious, convert2cpu + + +class RegionLayer(nn.Module): + def __init__(self, num_classes=0, anchors=[], num_anchors=1, use_cuda=None): + super(RegionLayer, self).__init__() + use_cuda = torch.cuda.is_available() and (True if use_cuda is None else use_cuda) + self.device = torch.device("cuda" if use_cuda else "cpu") + self.num_classes = num_classes + self.num_anchors = num_anchors + self.anchor_step = len(anchors) // num_anchors + # self.anchors = torch.stack(torch.FloatTensor(anchors).split(self.anchor_step)).to(self.device) + self.anchors = torch.FloatTensor(anchors).view(self.num_anchors, self.anchor_step).to(self.device) + self.rescore = 1 + self.coord_scale = 1 + self.noobject_scale = 1 + self.object_scale = 5 + self.class_scale = 1 + self.thresh = 0.6 + self.seen = 0 + + def build_targets(self, pred_boxes, target, nH, nW): + nB = target.size(0) + nA = self.num_anchors + conf_mask = torch.ones(nB, nA, nH, nW) * self.noobject_scale + coord_mask = torch.zeros(nB, nA, nH, nW) + cls_mask = torch.zeros(nB, nA, nH, nW) + tcoord = torch.zeros(4, nB, nA, nH, nW) + tconf = torch.zeros(nB, nA, nH, nW) + tcls = torch.zeros(nB, nA, nH, nW) + + nAnchors = nA * nH * nW + nPixels = nH * nW + nGT = 0 # number of ground truth + nRecall = 0 + # it works faster on CPU than on GPU. + anchors = self.anchors.to("cpu") + + if self.seen < 12800: + tcoord[0].fill_(0.5) + tcoord[1].fill_(0.5) + coord_mask.fill_(1) + + for b in range(nB): + cur_pred_boxes = pred_boxes[b * nAnchors:(b + 1) * nAnchors].t() + cur_ious = torch.zeros(nAnchors) + tbox = target[b].view(-1, 5).to("cpu") + for t in range(50): + if tbox[t][1] == 0: + break + gx, gw = [i * nW for i in (tbox[t][1], tbox[t][3])] + gy, gh = [i * nH for i in (tbox[t][2], tbox[t][4])] + cur_gt_boxes = torch.FloatTensor([gx, gy, gw, gh]).repeat(nAnchors, 1).t() + cur_ious = torch.max(cur_ious, multi_bbox_ious(cur_pred_boxes, cur_gt_boxes, x1y1x2y2=False)) + ignore_ix = cur_ious > self.thresh + conf_mask[b][ignore_ix.view(nA, nH, nW)] = 0 + + for t in range(50): + if tbox[t][1] == 0: + break + nGT += 1 + gx, gw = [i * nW for i in (tbox[t][1], tbox[t][3])] + gy, gh = [i * nH for i in (tbox[t][2], tbox[t][4])] + gw, gh = gw.float(), gh.float() + gi, gj = int(gx), int(gy) + + tmp_gt_boxes = torch.FloatTensor([0, 0, gw, gh]).repeat(nA, 1).t() + anchor_boxes = torch.cat((torch.zeros(nA, 2), anchors), 1).t() + tmp_ious = multi_bbox_ious(tmp_gt_boxes, anchor_boxes, x1y1x2y2=False) + best_iou, best_n = torch.max(tmp_ious, 0) + + if self.anchor_step == 4: # this part is not tested. + tmp_ious_mask = (tmp_ious == best_iou) + if tmp_ious_mask.sum() > 0: + gt_pos = torch.FloatTensor([gi, gj, gx, gy]).repeat(nA, 1).t() + an_pos = anchor_boxes[4:6] # anchor_boxes are consisted of [0 0 aw ah ax ay] + dist = pow(((gt_pos[0] + an_pos[0]) - gt_pos[2]), 2) + pow( + ((gt_pos[1] + an_pos[1]) - gt_pos[3]), 2) + dist[1 - tmp_ious_mask] = 10000 # set the large number for the small ious + _, best_n = torch.min(dist, 0) + + gt_box = torch.FloatTensor([gx, gy, gw, gh]) + pred_box = pred_boxes[b * nAnchors + best_n * nPixels + gj * nW + gi] + iou = bbox_iou(gt_box, pred_box, x1y1x2y2=False) + + coord_mask[b][best_n][gj][gi] = 1 + cls_mask[b][best_n][gj][gi] = 1 + conf_mask[b][best_n][gj][gi] = self.object_scale + tcoord[0][b][best_n][gj][gi] = gx - gi + tcoord[1][b][best_n][gj][gi] = gy - gj + tcoord[2][b][best_n][gj][gi] = math.log(gw / anchors[best_n][0]) + tcoord[3][b][best_n][gj][gi] = math.log(gh / anchors[best_n][1]) + tcls[b][best_n][gj][gi] = tbox[t][0] + tconf[b][best_n][gj][gi] = iou if self.rescore else 1. + if iou > 0.5: + nRecall += 1 + + return nGT, nRecall, coord_mask, conf_mask, cls_mask, tcoord, tconf, tcls + + def get_mask_boxes(self, output): + if not isinstance(self.anchors, torch.Tensor): + self.anchors = torch.FloatTensor(self.anchors).view(self.num_anchors, self.anchor_step).to(self.device) + masked_anchors = self.anchors.view(-1) + num_anchors = torch.IntTensor([self.num_anchors]).to(self.device) + return {'x': output, 'a': masked_anchors, 'n': num_anchors} + + def forward(self, output, target): + # output : BxAs*(4+1+num_classes)*H*W + t0 = time.time() + nB = output.data.size(0) # batch size + nA = self.num_anchors + nC = self.num_classes + nH = output.data.size(2) + nW = output.data.size(3) + cls_anchor_dim = nB * nA * nH * nW + + if not isinstance(self.anchors, torch.Tensor): + self.anchors = torch.FloatTensor(self.anchors).view(self.num_anchors, self.anchor_step).to(self.device) + + output = output.view(nB, nA, (5 + nC), nH, nW) + cls_grid = torch.linspace(5, 5 + nC - 1, nC).long().to(self.device) + ix = torch.LongTensor(range(0, 5)).to(self.device) + pred_boxes = torch.FloatTensor(4, cls_anchor_dim).to(self.device) + + coord = output.index_select(2, ix[0:4]).view(nB * nA, -1, nH * nW).transpose(0, 1).contiguous().view(-1, + cls_anchor_dim) # x, y, w, h + coord[0:2] = coord[0:2].sigmoid() # x, y + conf = output.index_select(2, ix[4]).view(nB, nA, nH, nW).sigmoid() + cls = output.index_select(2, cls_grid) + cls = cls.view(nB * nA, nC, nH * nW).transpose(1, 2).contiguous().view(cls_anchor_dim, nC) + + t1 = time.time() + grid_x = torch.linspace(0, nW - 1, nW).repeat(nB * nA, nH, 1).view(cls_anchor_dim).to(self.device) + grid_y = torch.linspace(0, nH - 1, nH).repeat(nW, 1).t().repeat(nB * nA, 1, 1).view(cls_anchor_dim).to( + self.device) + anchor_w = self.anchors.index_select(1, ix[0]).repeat(1, nB * nH * nW).view(cls_anchor_dim) + anchor_h = self.anchors.index_select(1, ix[1]).repeat(1, nB * nH * nW).view(cls_anchor_dim) + + pred_boxes[0] = coord[0] + grid_x + pred_boxes[1] = coord[1] + grid_y + pred_boxes[2] = coord[2].exp() * anchor_w + pred_boxes[3] = coord[3].exp() * anchor_h + # for build_targets. it works faster on CPU than on GPU + pred_boxes = convert2cpu(pred_boxes.transpose(0, 1).contiguous().view(-1, 4)).detach() + + t2 = time.time() + nGT, nRecall, coord_mask, conf_mask, cls_mask, tcoord, tconf, tcls = \ + self.build_targets(pred_boxes, target.detach(), nH, nW) + + cls_mask = (cls_mask == 1) + tcls = tcls[cls_mask].long().view(-1) + cls_mask = cls_mask.view(-1, 1).repeat(1, nC).to(self.device) + cls = cls[cls_mask].view(-1, nC) + + nProposals = int((conf > 0.25).sum()) + + tcoord = tcoord.view(4, cls_anchor_dim).to(self.device) + tconf, tcls = tconf.to(self.device), tcls.to(self.device) + coord_mask, conf_mask = coord_mask.view(cls_anchor_dim).to(self.device), conf_mask.sqrt().to(self.device) + + t3 = time.time() + loss_coord = self.coord_scale * nn.MSELoss(size_average=False)(coord * coord_mask, tcoord * coord_mask) / 2 + # sqrt(object_scale)/2 is almost equal to 1. + loss_conf = nn.MSELoss(size_average=False)(conf * conf_mask, tconf * conf_mask) / 2 + loss_cls = self.class_scale * nn.CrossEntropyLoss(size_average=False)(cls, tcls) if cls.size(0) > 0 else 0 + loss = loss_coord + loss_conf + loss_cls + t4 = time.time() + if False: + print('-' * 30) + print(' activation : %f' % (t1 - t0)) + print(' create pred_boxes : %f' % (t2 - t1)) + print(' build targets : %f' % (t3 - t2)) + print(' create loss : %f' % (t4 - t3)) + print(' total : %f' % (t4 - t0)) + print('%d: nGT %3d, nRC %3d, nPP %3d, loss: box %6.3f, conf %6.3f, class %6.3f, total %7.3f' + % (self.seen, nGT, nRecall, nProposals, loss_coord, loss_conf, loss_cls, loss)) + if math.isnan(loss.item()): + print(conf, tconf) + sys.exit(0) + return loss diff --git a/detector/YOLOv3/weight/.gitkeep b/detector/YOLOv3/weight/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/detector/YOLOv3/yolo_layer.py b/detector/YOLOv3/yolo_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..578969faf98a06f9148038e74eea7d7ada9c1251 --- /dev/null +++ b/detector/YOLOv3/yolo_layer.py @@ -0,0 +1,181 @@ +import math +import sys +import time +import torch +import torch.nn as nn +from .yolo_utils import bbox_iou, multi_bbox_ious, convert2cpu + + +class YoloLayer(nn.Module): + def __init__(self, anchor_mask=[], num_classes=0, anchors=[], num_anchors=1, use_cuda=None): + super(YoloLayer, self).__init__() + use_cuda = torch.cuda.is_available() and (True if use_cuda is None else use_cuda) + self.device = torch.device("cuda" if use_cuda else "cpu") + + self.anchor_mask = anchor_mask + self.num_classes = num_classes + self.anchors = anchors + self.num_anchors = num_anchors + self.anchor_step = len(anchors) // num_anchors + self.rescore = 0 + self.ignore_thresh = 0.5 + self.truth_thresh = 1. + self.stride = 32 + self.nth_layer = 0 + self.seen = 0 + self.net_width = 0 + self.net_height = 0 + + def get_mask_boxes(self, output): + masked_anchors = [] + for m in self.anchor_mask: + masked_anchors += self.anchors[m * self.anchor_step:(m + 1) * self.anchor_step] + masked_anchors = [anchor / self.stride for anchor in masked_anchors] + + masked_anchors = torch.FloatTensor(masked_anchors).to(self.device) + num_anchors = torch.IntTensor([len(self.anchor_mask)]).to(self.device) + return {'x': output, 'a': masked_anchors, 'n': num_anchors} + + def build_targets(self, pred_boxes, target, anchors, nA, nH, nW): + nB = target.size(0) + anchor_step = anchors.size(1) # anchors[nA][anchor_step] + conf_mask = torch.ones(nB, nA, nH, nW) + coord_mask = torch.zeros(nB, nA, nH, nW) + cls_mask = torch.zeros(nB, nA, nH, nW) + tcoord = torch.zeros(4, nB, nA, nH, nW) + tconf = torch.zeros(nB, nA, nH, nW) + tcls = torch.zeros(nB, nA, nH, nW) + twidth, theight = self.net_width / self.stride, self.net_height / self.stride + + nAnchors = nA * nH * nW + nPixels = nH * nW + nGT = 0 + nRecall = 0 + nRecall75 = 0 + + # it works faster on CPU than on GPU. + anchors = anchors.to("cpu") + + for b in range(nB): + cur_pred_boxes = pred_boxes[b * nAnchors:(b + 1) * nAnchors].t() + cur_ious = torch.zeros(nAnchors) + tbox = target[b].view(-1, 5).to("cpu") + for t in range(50): + if tbox[t][1] == 0: + break + gx, gy = tbox[t][1] * nW, tbox[t][2] * nH + gw, gh = tbox[t][3] * twidth, tbox[t][4] * theight + cur_gt_boxes = torch.FloatTensor([gx, gy, gw, gh]).repeat(nAnchors, 1).t() + cur_ious = torch.max(cur_ious, multi_bbox_ious(cur_pred_boxes, cur_gt_boxes, x1y1x2y2=False)) + ignore_ix = cur_ious > self.ignore_thresh + conf_mask[b][ignore_ix.view(nA, nH, nW)] = 0 + + for t in range(50): + if tbox[t][1] == 0: + break + nGT += 1 + gx, gy = tbox[t][1] * nW, tbox[t][2] * nH + gw, gh = tbox[t][3] * twidth, tbox[t][4] * theight + gw, gh = gw.float(), gh.float() + gi, gj = int(gx), int(gy) + + tmp_gt_boxes = torch.FloatTensor([0, 0, gw, gh]).repeat(nA, 1).t() + anchor_boxes = torch.cat((torch.zeros(nA, anchor_step), anchors), 1).t() + _, best_n = torch.max(multi_bbox_ious(tmp_gt_boxes, anchor_boxes, x1y1x2y2=False), 0) + + gt_box = torch.FloatTensor([gx, gy, gw, gh]) + pred_box = pred_boxes[b * nAnchors + best_n * nPixels + gj * nW + gi] + iou = bbox_iou(gt_box, pred_box, x1y1x2y2=False) + + coord_mask[b][best_n][gj][gi] = 1 + cls_mask[b][best_n][gj][gi] = 1 + conf_mask[b][best_n][gj][gi] = 1 + tcoord[0][b][best_n][gj][gi] = gx - gi + tcoord[1][b][best_n][gj][gi] = gy - gj + tcoord[2][b][best_n][gj][gi] = math.log(gw / anchors[best_n][0]) + tcoord[3][b][best_n][gj][gi] = math.log(gh / anchors[best_n][1]) + tcls[b][best_n][gj][gi] = tbox[t][0] + tconf[b][best_n][gj][gi] = iou if self.rescore else 1. + + if iou > 0.5: + nRecall += 1 + if iou > 0.75: + nRecall75 += 1 + + return nGT, nRecall, nRecall75, coord_mask, conf_mask, cls_mask, tcoord, tconf, tcls + + def forward(self, output, target): + # output : BxAs*(4+1+num_classes)*H*W + mask_tuple = self.get_mask_boxes(output) + t0 = time.time() + nB = output.data.size(0) # batch size + nA = mask_tuple['n'].item() # num_anchors + nC = self.num_classes + nH = output.data.size(2) + nW = output.data.size(3) + anchor_step = mask_tuple['a'].size(0) // nA + anchors = mask_tuple['a'].view(nA, anchor_step).to(self.device) + cls_anchor_dim = nB * nA * nH * nW + + output = output.view(nB, nA, (5 + nC), nH, nW) + cls_grid = torch.linspace(5, 5 + nC - 1, nC).long().to(self.device) + ix = torch.LongTensor(range(0, 5)).to(self.device) + pred_boxes = torch.FloatTensor(4, cls_anchor_dim).to(self.device) + + coord = output.index_select(2, ix[0:4]).view(nB * nA, -1, nH * nW).transpose(0, 1).contiguous().view(-1, + cls_anchor_dim) # x, y, w, h + coord[0:2] = coord[0:2].sigmoid() # x, y + conf = output.index_select(2, ix[4]).view(nB, nA, nH, nW).sigmoid() + cls = output.index_select(2, cls_grid) + cls = cls.view(nB * nA, nC, nH * nW).transpose(1, 2).contiguous().view(cls_anchor_dim, nC) + + t1 = time.time() + grid_x = torch.linspace(0, nW - 1, nW).repeat(nB * nA, nH, 1).view(cls_anchor_dim).to(self.device) + grid_y = torch.linspace(0, nH - 1, nH).repeat(nW, 1).t().repeat(nB * nA, 1, 1).view(cls_anchor_dim).to( + self.device) + anchor_w = anchors.index_select(1, ix[0]).repeat(1, nB * nH * nW).view(cls_anchor_dim) + anchor_h = anchors.index_select(1, ix[1]).repeat(1, nB * nH * nW).view(cls_anchor_dim) + + pred_boxes[0] = coord[0] + grid_x + pred_boxes[1] = coord[1] + grid_y + pred_boxes[2] = coord[2].exp() * anchor_w + pred_boxes[3] = coord[3].exp() * anchor_h + # for build_targets. it works faster on CPU than on GPU + pred_boxes = convert2cpu(pred_boxes.transpose(0, 1).contiguous().view(-1, 4)).detach() + + t2 = time.time() + nGT, nRecall, nRecall75, coord_mask, conf_mask, cls_mask, tcoord, tconf, tcls = \ + self.build_targets(pred_boxes, target.detach(), anchors.detach(), nA, nH, nW) + + cls_mask = (cls_mask == 1) + tcls = tcls[cls_mask].long().view(-1) + cls_mask = cls_mask.view(-1, 1).repeat(1, nC).to(self.device) + cls = cls[cls_mask].view(-1, nC) + + nProposals = int((conf > 0.25).sum()) + + tcoord = tcoord.view(4, cls_anchor_dim).to(self.device) + tconf, tcls = tconf.to(self.device), tcls.to(self.device) + coord_mask, conf_mask = coord_mask.view(cls_anchor_dim).to(self.device), conf_mask.to(self.device) + + t3 = time.time() + loss_coord = nn.MSELoss(size_average=False)(coord * coord_mask, tcoord * coord_mask) / 2 + loss_conf = nn.MSELoss(size_average=False)(conf * conf_mask, tconf * conf_mask) + loss_cls = nn.CrossEntropyLoss(size_average=False)(cls, tcls) if cls.size(0) > 0 else 0 + loss = loss_coord + loss_conf + loss_cls + + t4 = time.time() + if False: + print('-' * 30) + print(' activation : %f' % (t1 - t0)) + print(' create pred_boxes : %f' % (t2 - t1)) + print(' build targets : %f' % (t3 - t2)) + print(' create loss : %f' % (t4 - t3)) + print(' total : %f' % (t4 - t0)) + print( + '%d: Layer(%03d) nGT %3d, nRC %3d, nRC75 %3d, nPP %3d, loss: box %6.3f, conf %6.3f, class %6.3f, total %7.3f' + % (self.seen, self.nth_layer, nGT, nRecall, nRecall75, nProposals, loss_coord, loss_conf, loss_cls, loss)) + if math.isnan(loss.item()): + print(conf, tconf) + sys.exit(0) + return loss diff --git a/detector/YOLOv3/yolo_utils.py b/detector/YOLOv3/yolo_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ff15369a1f39f9323805e2c50db1dc29580dfa50 --- /dev/null +++ b/detector/YOLOv3/yolo_utils.py @@ -0,0 +1,589 @@ +import os +import time +import math +import torch +import numpy as np +from PIL import Image, ImageDraw +import struct # get_image_size +import imghdr # get_image_size + + +def sigmoid(x): + return 1.0 / (math.exp(-x) + 1.) + + +def softmax(x): + x = torch.exp(x - torch.max(x)) + x /= x.sum() + return x + + +def bbox_iou(box1, box2, x1y1x2y2=True): + if x1y1x2y2: + x1_min = min(box1[0], box2[0]) + x2_max = max(box1[2], box2[2]) + y1_min = min(box1[1], box2[1]) + y2_max = max(box1[3], box2[3]) + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + else: + w1, h1 = box1[2], box1[3] + w2, h2 = box2[2], box2[3] + x1_min = min(box1[0] - w1 / 2.0, box2[0] - w2 / 2.0) + x2_max = max(box1[0] + w1 / 2.0, box2[0] + w2 / 2.0) + y1_min = min(box1[1] - h1 / 2.0, box2[1] - h2 / 2.0) + y2_max = max(box1[1] + h1 / 2.0, box2[1] + h2 / 2.0) + + w_union = x2_max - x1_min + h_union = y2_max - y1_min + w_cross = w1 + w2 - w_union + h_cross = h1 + h2 - h_union + carea = 0 + if w_cross <= 0 or h_cross <= 0: + return 0.0 + + area1 = w1 * h1 + area2 = w2 * h2 + carea = w_cross * h_cross + uarea = area1 + area2 - carea + return float(carea / uarea) + + +def multi_bbox_ious(boxes1, boxes2, x1y1x2y2=True): + if x1y1x2y2: + x1_min = torch.min(boxes1[0], boxes2[0]) + x2_max = torch.max(boxes1[2], boxes2[2]) + y1_min = torch.min(boxes1[1], boxes2[1]) + y2_max = torch.max(boxes1[3], boxes2[3]) + w1, h1 = boxes1[2] - boxes1[0], boxes1[3] - boxes1[1] + w2, h2 = boxes2[2] - boxes2[0], boxes2[3] - boxes2[1] + else: + w1, h1 = boxes1[2], boxes1[3] + w2, h2 = boxes2[2], boxes2[3] + x1_min = torch.min(boxes1[0] - w1 / 2.0, boxes2[0] - w2 / 2.0) + x2_max = torch.max(boxes1[0] + w1 / 2.0, boxes2[0] + w2 / 2.0) + y1_min = torch.min(boxes1[1] - h1 / 2.0, boxes2[1] - h2 / 2.0) + y2_max = torch.max(boxes1[1] + h1 / 2.0, boxes2[1] + h2 / 2.0) + + w_union = x2_max - x1_min + h_union = y2_max - y1_min + w_cross = w1 + w2 - w_union + h_cross = h1 + h2 - h_union + mask = (((w_cross <= 0) + (h_cross <= 0)) > 0) + area1 = w1 * h1 + area2 = w2 * h2 + carea = w_cross * h_cross + carea[mask] = 0 + uarea = area1 + area2 - carea + return carea / uarea + + +from torchvision import ops + + +def post_process(boxes, num_classes, conf_thresh=0.01, nms_thresh=0.45, obj_thresh=0.3): + batch_size = boxes.size(0) + + # nms + results_boxes = [] + for batch_id in range(batch_size): + processed_boxes = [] + for cls_id in range(num_classes): + mask = (boxes[batch_id, :, -1] == cls_id) * (boxes[batch_id, :, 4] > obj_thresh) + masked_boxes = boxes[batch_id, mask] + + keep = ops.nms(masked_boxes[:, :4], masked_boxes[:, 5], nms_thresh) + + nmsed_boxes = masked_boxes[keep, :] + + processed_boxes.append(nmsed_boxes) + processed_boxes = torch.cat(processed_boxes, dim=0) + + results_boxes.append(processed_boxes) + + return results_boxes + + +def xywh_to_xyxy(boxes_xywh): + boxes_xyxy = boxes_xywh.copy() + boxes_xyxy[:, 0] = boxes_xywh[:, 0] - boxes_xywh[:, 2] / 2. + boxes_xyxy[:, 0] = boxes_xywh[:, 0] - boxes_xywh[:, 2] / 2. + boxes_xyxy[:, 0] = boxes_xywh[:, 0] - boxes_xywh[:, 2] / 2. + boxes_xyxy[:, 0] = boxes_xywh[:, 0] - boxes_xywh[:, 2] / 2. + + return boxes_xyxy + + +def xyxy_to_xywh(boxes_xyxy): + if isinstance(boxes_xyxy, torch.Tensor): + boxes_xywh = boxes_xyxy.clone() + elif isinstance(boxes_xyxy, np.ndarray): + boxes_xywh = boxes_xyxy.copy() + + boxes_xywh[:, 0] = (boxes_xyxy[:, 0] + boxes_xyxy[:, 2]) / 2. + boxes_xywh[:, 1] = (boxes_xyxy[:, 1] + boxes_xyxy[:, 3]) / 2. + boxes_xywh[:, 2] = boxes_xyxy[:, 2] - boxes_xyxy[:, 0] + boxes_xywh[:, 3] = boxes_xyxy[:, 3] - boxes_xyxy[:, 1] + + return boxes_xywh + + +def nms(boxes, nms_thresh): + if len(boxes) == 0: + return boxes + + det_confs = torch.zeros(len(boxes)) + for i in range(len(boxes)): + det_confs[i] = boxes[i][4] + + _, sortIds = torch.sort(det_confs, descending=True) + out_boxes = [] + for i in range(len(boxes)): + box_i = boxes[sortIds[i]] + if box_i[4] > 0: + out_boxes.append(box_i) + for j in range(i + 1, len(boxes)): + box_j = boxes[sortIds[j]] + if bbox_iou(box_i, box_j, x1y1x2y2=False) > nms_thresh: + # print(box_i, box_j, bbox_iou(box_i, box_j, x1y1x2y2=False)) + box_j[4] = 0 + return out_boxes + + +def convert2cpu(gpu_matrix): + return torch.FloatTensor(gpu_matrix.size()).copy_(gpu_matrix) + + +def convert2cpu_long(gpu_matrix): + return torch.LongTensor(gpu_matrix.size()).copy_(gpu_matrix) + + +def get_all_boxes(output, conf_thresh, num_classes, only_objectness=1, validation=False, use_cuda=True): + # total number of inputs (batch size) + # first element (x) for first tuple (x, anchor_mask, num_anchor) + batchsize = output[0]['x'].data.size(0) + + all_boxes = [] + for i in range(len(output)): + pred, anchors, num_anchors = output[i]['x'].data, output[i]['a'], output[i]['n'].item() + boxes = get_region_boxes(pred, conf_thresh, num_classes, anchors, num_anchors, \ + only_objectness=only_objectness, validation=validation, use_cuda=use_cuda) + + all_boxes.append(boxes) + return torch.cat(all_boxes, dim=1) + + +def get_region_boxes(output, obj_thresh, num_classes, anchors, num_anchors, only_objectness=1, validation=False, + use_cuda=True): + device = torch.device("cuda" if use_cuda else "cpu") + anchors = anchors.to(device) + anchor_step = anchors.size(0) // num_anchors + if output.dim() == 3: + output = output.unsqueeze(0) + batch = output.size(0) + assert (output.size(1) == (5 + num_classes) * num_anchors) + h = output.size(2) + w = output.size(3) + cls_anchor_dim = batch * num_anchors * h * w + + # all_boxes = [] + output = output.view(batch * num_anchors, 5 + num_classes, h * w).transpose(0, 1).contiguous().view(5 + num_classes, + cls_anchor_dim) + + grid_x = torch.linspace(0, w - 1, w).repeat(batch * num_anchors, h, 1).view(cls_anchor_dim).to(device) + grid_y = torch.linspace(0, h - 1, h).repeat(w, 1).t().repeat(batch * num_anchors, 1, 1).view(cls_anchor_dim).to( + device) + ix = torch.LongTensor(range(0, 2)).to(device) + anchor_w = anchors.view(num_anchors, anchor_step).index_select(1, ix[0]).repeat(1, batch, h * w).view( + cls_anchor_dim) + anchor_h = anchors.view(num_anchors, anchor_step).index_select(1, ix[1]).repeat(1, batch, h * w).view( + cls_anchor_dim) + + xs, ys = torch.sigmoid(output[0]) + grid_x, torch.sigmoid(output[1]) + grid_y + ws, hs = torch.exp(output[2]) * anchor_w.detach(), torch.exp(output[3]) * anchor_h.detach() + det_confs = torch.sigmoid(output[4]) + + # by ysyun, dim=1 means input is 2D or even dimension else dim=0 + cls_confs = torch.nn.Softmax(dim=1)(output[5:5 + num_classes].transpose(0, 1)).detach() + cls_max_confs, cls_max_ids = torch.max(cls_confs, 1) + cls_max_confs = cls_max_confs.view(-1) + cls_max_ids = cls_max_ids.view(-1).float() + + # sz_hw = h*w + # sz_hwa = sz_hw*num_anchors + # det_confs = convert2cpu(det_confs) + # cls_max_confs = convert2cpu(cls_max_confs) + # cls_max_ids = convert2cpu_long(cls_max_ids) + # xs, ys = convert2cpu(xs), convert2cpu(ys) + # ws, hs = convert2cpu(ws), convert2cpu(hs) + + cls_confs = det_confs * cls_max_confs + + # boxes = [xs/w, ys/h, ws/w, hs/h, det_confs, cls_confs, cls_max_ids] + xs, ys, ws, hs = xs / w, ys / h, ws / w, hs / h + x1, y1, x2, y2 = torch.clamp_min(xs - ws / 2., 0.), torch.clamp_min(ys - hs / 2., 0.), torch.clamp_max(xs + ws / 2., + 1.), torch.clamp_max( + ys + hs / 2., 1.) + boxes = [x1, y1, x2, y2, det_confs, cls_confs, cls_max_ids] + boxes = list(map(lambda x: x.view(batch, -1), boxes)) + boxes = torch.stack(boxes, dim=2) + + # for b in range(batch): + # boxes = [] + # for cy in range(h): + # for cx in range(w): + # for i in range(num_anchors): + # ind = b*sz_hwa + i*sz_hw + cy*w + cx + # det_conf = det_confs[ind] + # if only_objectness: + # conf = det_confs[ind] + # else: + # conf = det_confs[ind] * cls_max_confs[ind] + + # if conf > conf_thresh: + # bcx = xs[ind] + # bcy = ys[ind] + # bw = ws[ind] + # bh = hs[ind] + # cls_max_conf = cls_max_confs[ind] + # cls_max_id = cls_max_ids[ind] + # box = [bcx/w, bcy/h, bw/w, bh/h, det_conf, cls_max_conf, cls_max_id] + + # boxes.append(box) + # all_boxes.append(boxes) + return boxes + + +# def get_all_boxes(output, conf_thresh, num_classes, only_objectness=1, validation=False, use_cuda=True): +# # total number of inputs (batch size) +# # first element (x) for first tuple (x, anchor_mask, num_anchor) +# tot = output[0]['x'].data.size(0) +# all_boxes = [[] for i in range(tot)] +# for i in range(len(output)): +# pred, anchors, num_anchors = output[i]['x'].data, output[i]['a'], output[i]['n'].item() +# b = get_region_boxes(pred, conf_thresh, num_classes, anchors, num_anchors, \ +# only_objectness=only_objectness, validation=validation, use_cuda=use_cuda) +# for t in range(tot): +# all_boxes[t] += b[t] +# return all_boxes + +# def get_region_boxes(output, conf_thresh, num_classes, anchors, num_anchors, only_objectness=1, validation=False, use_cuda=True): +# device = torch.device("cuda" if use_cuda else "cpu") +# anchors = anchors.to(device) +# anchor_step = anchors.size(0)//num_anchors +# if output.dim() == 3: +# output = output.unsqueeze(0) +# batch = output.size(0) +# assert(output.size(1) == (5+num_classes)*num_anchors) +# h = output.size(2) +# w = output.size(3) +# cls_anchor_dim = batch*num_anchors*h*w + +# t0 = time.time() +# all_boxes = [] +# output = output.view(batch*num_anchors, 5+num_classes, h*w).transpose(0,1).contiguous().view(5+num_classes, cls_anchor_dim) + +# grid_x = torch.linspace(0, w-1, w).repeat(batch*num_anchors, h, 1).view(cls_anchor_dim).to(device) +# grid_y = torch.linspace(0, h-1, h).repeat(w,1).t().repeat(batch*num_anchors, 1, 1).view(cls_anchor_dim).to(device) +# ix = torch.LongTensor(range(0,2)).to(device) +# anchor_w = anchors.view(num_anchors, anchor_step).index_select(1, ix[0]).repeat(1, batch, h*w).view(cls_anchor_dim) +# anchor_h = anchors.view(num_anchors, anchor_step).index_select(1, ix[1]).repeat(1, batch, h*w).view(cls_anchor_dim) + +# xs, ys = torch.sigmoid(output[0]) + grid_x, torch.sigmoid(output[1]) + grid_y +# ws, hs = torch.exp(output[2]) * anchor_w.detach(), torch.exp(output[3]) * anchor_h.detach() +# det_confs = torch.sigmoid(output[4]) + +# # by ysyun, dim=1 means input is 2D or even dimension else dim=0 +# cls_confs = torch.nn.Softmax(dim=1)(output[5:5+num_classes].transpose(0,1)).detach() +# cls_max_confs, cls_max_ids = torch.max(cls_confs, 1) +# cls_max_confs = cls_max_confs.view(-1) +# cls_max_ids = cls_max_ids.view(-1) +# t1 = time.time() + +# sz_hw = h*w +# sz_hwa = sz_hw*num_anchors +# det_confs = convert2cpu(det_confs) +# cls_max_confs = convert2cpu(cls_max_confs) +# cls_max_ids = convert2cpu_long(cls_max_ids) +# xs, ys = convert2cpu(xs), convert2cpu(ys) +# ws, hs = convert2cpu(ws), convert2cpu(hs) +# if validation: +# cls_confs = convert2cpu(cls_confs.view(-1, num_classes)) + +# t2 = time.time() +# for b in range(batch): +# boxes = [] +# for cy in range(h): +# for cx in range(w): +# for i in range(num_anchors): +# ind = b*sz_hwa + i*sz_hw + cy*w + cx +# det_conf = det_confs[ind] +# if only_objectness: +# conf = det_confs[ind] +# else: +# conf = det_confs[ind] * cls_max_confs[ind] + +# if conf > conf_thresh: +# bcx = xs[ind] +# bcy = ys[ind] +# bw = ws[ind] +# bh = hs[ind] +# cls_max_conf = cls_max_confs[ind] +# cls_max_id = cls_max_ids[ind] +# box = [bcx/w, bcy/h, bw/w, bh/h, det_conf, cls_max_conf, cls_max_id] +# if (not only_objectness) and validation: +# for c in range(num_classes): +# tmp_conf = cls_confs[ind][c] +# if c != cls_max_id and det_confs[ind]*tmp_conf > conf_thresh: +# box.append(tmp_conf) +# box.append(c) +# boxes.append(box) +# all_boxes.append(boxes) +# t3 = time.time() +# if False: +# print('---------------------------------') +# print('matrix computation : %f' % (t1-t0)) +# print(' gpu to cpu : %f' % (t2-t1)) +# print(' boxes filter : %f' % (t3-t2)) +# print('---------------------------------') +# return all_boxes + +def plot_boxes_cv2(img, boxes, savename=None, class_names=None, color=None): + import cv2 + colors = torch.FloatTensor([[1, 0, 1], [0, 0, 1], [0, 1, 1], [0, 1, 0], [1, 1, 0], [1, 0, 0]]) + + def get_color(c, x, max_val): + ratio = float(x) / max_val * 5 + i = int(math.floor(ratio)) + j = int(math.ceil(ratio)) + ratio -= i + r = (1 - ratio) * colors[i][c] + ratio * colors[j][c] + return int(r * 255) + + width = img.shape[1] + height = img.shape[0] + for i in range(len(boxes)): + box = boxes[i] + x1 = int(round((box[0] - box[2] / 2.0) * width)) + y1 = int(round((box[1] - box[3] / 2.0) * height)) + x2 = int(round((box[0] + box[2] / 2.0) * width)) + y2 = int(round((box[1] + box[3] / 2.0) * height)) + + if color: + rgb = color + else: + rgb = (255, 0, 0) + if len(box) >= 7 and class_names: + cls_conf = box[5] + cls_id = box[6] + # print('%s: %f' % (class_names[cls_id], cls_conf)) + classes = len(class_names) + offset = cls_id * 123457 % classes + red = get_color(2, offset, classes) + green = get_color(1, offset, classes) + blue = get_color(0, offset, classes) + if color is None: + rgb = (red, green, blue) + img = cv2.putText(img, class_names[cls_id], (x1, y1), cv2.FONT_HERSHEY_SIMPLEX, 1.2, rgb, 1) + img = cv2.rectangle(img, (x1, y1), (x2, y2), rgb, 1) + if savename: + print("save plot results to %s" % savename) + cv2.imwrite(savename, img) + return img + + +def plot_boxes(img, boxes, savename=None, class_names=None): + colors = torch.FloatTensor([[1, 0, 1], [0, 0, 1], [0, 1, 1], [0, 1, 0], [1, 1, 0], [1, 0, 0]]) + + def get_color(c, x, max_val): + ratio = float(x) / max_val * 5 + i = int(math.floor(ratio)) + j = int(math.ceil(ratio)) + ratio -= i + r = (1 - ratio) * colors[i][c] + ratio * colors[j][c] + return int(r * 255) + + width = img.width + height = img.height + draw = ImageDraw.Draw(img) + print("%d box(es) is(are) found" % len(boxes)) + for i in range(len(boxes)): + box = boxes[i] + x1 = (box[0] - box[2] / 2.0) * width + y1 = (box[1] - box[3] / 2.0) * height + x2 = (box[0] + box[2] / 2.0) * width + y2 = (box[1] + box[3] / 2.0) * height + + rgb = (255, 0, 0) + if len(box) >= 7 and class_names: + cls_conf = box[5] + cls_id = box[6] + print('%s: %f' % (class_names[cls_id], cls_conf)) + classes = len(class_names) + offset = cls_id * 123457 % classes + red = get_color(2, offset, classes) + green = get_color(1, offset, classes) + blue = get_color(0, offset, classes) + rgb = (red, green, blue) + draw.text((x1, y1), class_names[cls_id], fill=rgb) + draw.rectangle([x1, y1, x2, y2], outline=rgb) + if savename: + print("save plot results to %s" % savename) + img.save(savename) + return img + + +def read_truths(lab_path): + if not os.path.exists(lab_path): + return np.array([]) + if os.path.getsize(lab_path): + truths = np.loadtxt(lab_path) + truths = truths.reshape(truths.size // 5, 5) # to avoid single truth problem + return truths + else: + return np.array([]) + + +def read_truths_args(lab_path, min_box_scale): + truths = read_truths(lab_path) + new_truths = [] + for i in range(truths.shape[0]): + if truths[i][3] < min_box_scale: + continue + new_truths.append([truths[i][0], truths[i][1], truths[i][2], truths[i][3], truths[i][4]]) + return np.array(new_truths) + + +def load_class_names(namesfile): + class_names = [] + with open(namesfile, 'r', encoding='utf8') as fp: + lines = fp.readlines() + for line in lines: + class_names.append(line.strip()) + return class_names + + +def image2torch(img): + if isinstance(img, Image.Image): + width = img.width + height = img.height + img = torch.ByteTensor(torch.ByteStorage.from_buffer(img.tobytes())) + img = img.view(height, width, 3).transpose(0, 1).transpose(0, 2).contiguous() + img = img.view(1, 3, height, width) + img = img.float().div(255.0) + elif type(img) == np.ndarray: # cv2 image + img = torch.from_numpy(img.transpose(2, 0, 1)).float().div(255.0).unsqueeze(0) + else: + print("unknown image type") + exit(-1) + return img + + +def do_detect(model, img, conf_thresh, nms_thresh, use_cuda=True): + model.eval() + t0 = time.time() + img = image2torch(img) + t1 = time.time() + + img = img.to(torch.device("cuda" if use_cuda else "cpu")) + t2 = time.time() + + out_boxes = model(img) + boxes = get_all_boxes(out_boxes, conf_thresh, model.num_classes, use_cuda=use_cuda)[0] + + t3 = time.time() + boxes = nms(boxes, nms_thresh) + t4 = time.time() + + if False: + print('-----------------------------------') + print(' image to tensor : %f' % (t1 - t0)) + print(' tensor to cuda : %f' % (t2 - t1)) + print(' predict : %f' % (t3 - t2)) + print(' nms : %f' % (t4 - t3)) + print(' total : %f' % (t4 - t0)) + print('-----------------------------------') + return boxes + + +def read_data_cfg(datacfg): + options = dict() + options['gpus'] = '0,1,2,3' + options['num_workers'] = '10' + with open(datacfg) as fp: + lines = fp.readlines() + + for line in lines: + line = line.strip() + if line == '': + continue + key, value = line.split('=') + key = key.strip() + value = value.strip() + options[key] = value + return options + + +def scale_bboxes(bboxes, width, height): + import copy + dets = copy.deepcopy(bboxes) + for i in range(len(dets)): + dets[i][0] = dets[i][0] * width + dets[i][1] = dets[i][1] * height + dets[i][2] = dets[i][2] * width + dets[i][3] = dets[i][3] * height + return dets + + +def file_lines(thefilepath): + count = 0 + thefile = open(thefilepath, 'rb') + while True: + buffer = thefile.read(8192 * 1024) + if not buffer: + break + count += buffer.count(b'\n') + thefile.close() + return count + + +def get_image_size(fname): + """ + Determine the image type of fhandle and return its size. + from draco + """ + with open(fname, 'rb') as fhandle: + head = fhandle.read(24) + if len(head) != 24: + return + if imghdr.what(fname) == 'png': + check = struct.unpack('>i', head[4:8])[0] + if check != 0x0d0a1a0a: + return + width, height = struct.unpack('>ii', head[16:24]) + elif imghdr.what(fname) == 'gif': + width, height = struct.unpack('H', fhandle.read(2))[0] - 2 + # We are at a SOFn block + fhandle.seek(1, 1) # Skip `precision' byte. + height, width = struct.unpack('>HH', fhandle.read(4)) + except Exception: # IGNORE:W0703 + return + else: + return + return width, height + + +def logging(message): + print('%s %s' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), message)) diff --git a/detector/YOLOv5/.dockerignore b/detector/YOLOv5/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..af51ccc3d8df7681ca03ea6f5b669bac37e6baa6 --- /dev/null +++ b/detector/YOLOv5/.dockerignore @@ -0,0 +1,222 @@ +# Repo-specific DockerIgnore ------------------------------------------------------------------------------------------- +#.git +.cache +.idea +runs +output +coco +storage.googleapis.com + +data/samples/* +**/results*.csv +*.jpg + +# Neural Network weights ----------------------------------------------------------------------------------------------- +**/*.pt +**/*.pth +**/*.onnx +**/*.engine +**/*.mlmodel +**/*.torchscript +**/*.torchscript.pt +**/*.tflite +**/*.h5 +**/*.pb +*_saved_model/ +*_web_model/ +*_openvino_model/ + +# Below Copied From .gitignore ----------------------------------------------------------------------------------------- +# Below Copied From .gitignore ----------------------------------------------------------------------------------------- + + +# GitHub Python GitIgnore ---------------------------------------------------------------------------------------------- +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +wandb/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +.env + +# virtualenv +.venv* +venv*/ +ENV*/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + + +# https://github.com/github/gitignore/blob/master/Global/macOS.gitignore ----------------------------------------------- + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon +Icon? + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + + +# https://github.com/github/gitignore/blob/master/Global/JetBrains.gitignore +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff: +.idea/* +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/dictionaries +.html # Bokeh Plots +.pg # TensorFlow Frozen Graphs +.avi # videos + +# Sensitive or high-churn files: +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml + +# Gradle: +.idea/**/gradle.xml +.idea/**/libraries + +# CMake +cmake-build-debug/ +cmake-build-release/ + +# Mongo Explorer plugin: +.idea/**/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties diff --git a/detector/YOLOv5/.gitattributes b/detector/YOLOv5/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..dad4239ebad5b72917cbc4bba95206c1e55d519e --- /dev/null +++ b/detector/YOLOv5/.gitattributes @@ -0,0 +1,2 @@ +# this drop notebooks from GitHub language stats +*.ipynb linguist-vendored diff --git a/detector/YOLOv5/.gitignore b/detector/YOLOv5/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..69a00843ea42547a6e616ff78aff60f0dfdfa9cd --- /dev/null +++ b/detector/YOLOv5/.gitignore @@ -0,0 +1,256 @@ +# Repo-specific GitIgnore ---------------------------------------------------------------------------------------------- +*.jpg +*.jpeg +*.png +*.bmp +*.tif +*.tiff +*.heic +*.JPG +*.JPEG +*.PNG +*.BMP +*.TIF +*.TIFF +*.HEIC +*.mp4 +*.mov +*.MOV +*.avi +*.data +*.json +*.cfg +!setup.cfg +!cfg/yolov3*.cfg + +storage.googleapis.com +runs/* +data/* +data/images/* +!data/*.yaml +!data/hyps +!data/scripts +!data/images +!data/images/zidane.jpg +!data/images/bus.jpg +!data/*.sh + +results*.csv + +# Datasets ------------------------------------------------------------------------------------------------------------- +coco/ +coco128/ +VOC/ + +# MATLAB GitIgnore ----------------------------------------------------------------------------------------------------- +*.m~ +*.mat +!targets*.mat + +# Neural Network weights ----------------------------------------------------------------------------------------------- +*.weights +*.pt +*.pb +*.onnx +*.engine +*.mlmodel +*.torchscript +*.tflite +*.h5 +*_saved_model/ +*_web_model/ +*_openvino_model/ +darknet53.conv.74 +yolov3-tiny.conv.15 + +# GitHub Python GitIgnore ---------------------------------------------------------------------------------------------- +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +/wandb/ +.installed.cfg +*.egg + + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +.env + +# virtualenv +.venv* +venv*/ +ENV*/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + + +# https://github.com/github/gitignore/blob/master/Global/macOS.gitignore ----------------------------------------------- + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon +Icon? + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + + +# https://github.com/github/gitignore/blob/master/Global/JetBrains.gitignore +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff: +.idea/* +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/dictionaries +.html # Bokeh Plots +.pg # TensorFlow Frozen Graphs +.avi # videos + +# Sensitive or high-churn files: +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml + +# Gradle: +.idea/**/gradle.xml +.idea/**/libraries + +# CMake +cmake-build-debug/ +cmake-build-release/ + +# Mongo Explorer plugin: +.idea/**/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties diff --git a/detector/YOLOv5/.pre-commit-config.yaml b/detector/YOLOv5/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..526a5609fdd7e02720ee160c8ffb39813c7a2770 --- /dev/null +++ b/detector/YOLOv5/.pre-commit-config.yaml @@ -0,0 +1,66 @@ +# Define hooks for code formations +# Will be applied on any updated commit files if a user has installed and linked commit hook + +default_language_version: + python: python3.8 + +# Define bot property if installed via https://github.com/marketplace/pre-commit-ci +ci: + autofix_prs: true + autoupdate_commit_msg: '[pre-commit.ci] pre-commit suggestions' + autoupdate_schedule: quarterly + # submodules: true + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + - id: check-yaml + - id: check-toml + - id: pretty-format-json + - id: check-docstring-first + + - repo: https://github.com/asottile/pyupgrade + rev: v2.31.0 + hooks: + - id: pyupgrade + args: [--py36-plus] + name: Upgrade code + + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + name: Sort imports + + # TODO + #- repo: https://github.com/pre-commit/mirrors-yapf + # rev: v0.31.0 + # hooks: + # - id: yapf + # name: formatting + + # TODO + #- repo: https://github.com/executablebooks/mdformat + # rev: 0.7.7 + # hooks: + # - id: mdformat + # additional_dependencies: + # - mdformat-gfm + # - mdformat-black + # - mdformat_frontmatter + + # TODO + #- repo: https://github.com/asottile/yesqa + # rev: v1.2.3 + # hooks: + # - id: yesqa + + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + name: PEP8 diff --git a/detector/YOLOv5/CONTRIBUTING.md b/detector/YOLOv5/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..ebde03a562a0bb263202632a504c8cac0d10cf17 --- /dev/null +++ b/detector/YOLOv5/CONTRIBUTING.md @@ -0,0 +1,94 @@ +## Contributing to YOLOv5 🚀 + +We love your input! We want to make contributing to YOLOv5 as easy and transparent as possible, whether it's: + +- Reporting a bug +- Discussing the current state of the code +- Submitting a fix +- Proposing a new feature +- Becoming a maintainer + +YOLOv5 works so well due to our combined community effort, and for every small improvement you contribute you will be +helping push the frontiers of what's possible in AI 😃! + +## Submitting a Pull Request (PR) 🛠️ + +Submitting a PR is easy! This example shows how to submit a PR for updating `requirements.txt` in 4 steps: + +### 1. Select File to Update + +Select `requirements.txt` to update by clicking on it in GitHub. +

PR_step1

+ +### 2. Click 'Edit this file' + +Button is in top-right corner. +

PR_step2

+ +### 3. Make Changes + +Change `matplotlib` version from `3.2.2` to `3.3`. +

PR_step3

+ +### 4. Preview Changes and Submit PR + +Click on the **Preview changes** tab to verify your updates. At the bottom of the screen select 'Create a **new branch** +for this commit', assign your branch a descriptive name such as `fix/matplotlib_version` and click the green **Propose +changes** button. All done, your PR is now submitted to YOLOv5 for review and approval 😃! +

PR_step4

+ +### PR recommendations + +To allow your work to be integrated as seamlessly as possible, we advise you to: + +- ✅ Verify your PR is **up-to-date with upstream/master.** If your PR is behind upstream/master an + automatic [GitHub Actions](https://github.com/ultralytics/yolov5/blob/master/.github/workflows/rebase.yml) merge may + be attempted by writing /rebase in a new comment, or by running the following code, replacing 'feature' with the name + of your local branch: + +```bash +git remote add upstream https://github.com/ultralytics/yolov5.git +git fetch upstream +# git checkout feature # <--- replace 'feature' with local branch name +git merge upstream/master +git push -u origin -f +``` + +- ✅ Verify all Continuous Integration (CI) **checks are passing**. +- ✅ Reduce changes to the absolute **minimum** required for your bug fix or feature addition. _"It is not daily increase + but daily decrease, hack away the unessential. The closer to the source, the less wastage there is."_ — Bruce Lee + +## Submitting a Bug Report 🐛 + +If you spot a problem with YOLOv5 please submit a Bug Report! + +For us to start investigating a possible problem we need to be able to reproduce it ourselves first. We've created a few +short guidelines below to help users provide what we need in order to get started. + +When asking a question, people will be better able to provide help if you provide **code** that they can easily +understand and use to **reproduce** the problem. This is referred to by community members as creating +a [minimum reproducible example](https://stackoverflow.com/help/minimal-reproducible-example). Your code that reproduces +the problem should be: + +* ✅ **Minimal** – Use as little code as possible that still produces the same problem +* ✅ **Complete** – Provide **all** parts someone else needs to reproduce your problem in the question itself +* ✅ **Reproducible** – Test the code you're about to provide to make sure it reproduces the problem + +In addition to the above requirements, for [Ultralytics](https://ultralytics.com/) to provide assistance your code +should be: + +* ✅ **Current** – Verify that your code is up-to-date with current + GitHub [master](https://github.com/ultralytics/yolov5/tree/master), and if necessary `git pull` or `git clone` a new + copy to ensure your problem has not already been resolved by previous commits. +* ✅ **Unmodified** – Your problem must be reproducible without any modifications to the codebase in this + repository. [Ultralytics](https://ultralytics.com/) does not provide support for custom code ⚠️. + +If you believe your problem meets all of the above criteria, please close this issue and raise a new one using the 🐛 ** +Bug Report** [template](https://github.com/ultralytics/yolov5/issues/new/choose) and providing +a [minimum reproducible example](https://stackoverflow.com/help/minimal-reproducible-example) to help us better +understand and diagnose your problem. + +## License + +By contributing, you agree that your contributions will be licensed under +the [GPL-3.0 license](https://choosealicense.com/licenses/gpl-3.0/) diff --git a/detector/YOLOv5/Dockerfile b/detector/YOLOv5/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..489dd04ce5c9ca2d8f3287b433f988e1626fb61c --- /dev/null +++ b/detector/YOLOv5/Dockerfile @@ -0,0 +1,64 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Start FROM Nvidia PyTorch image https://ngc.nvidia.com/catalog/containers/nvidia:pytorch +FROM nvcr.io/nvidia/pytorch:21.10-py3 + +# Install linux packages +RUN apt update && apt install -y zip htop screen libgl1-mesa-glx + +# Install python dependencies +COPY requirements.txt . +RUN python -m pip install --upgrade pip +RUN pip uninstall -y torch torchvision torchtext +RUN pip install --no-cache -r requirements.txt albumentations wandb gsutil notebook \ + torch==1.10.2+cu113 torchvision==0.11.3+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html +# RUN pip install --no-cache -U torch torchvision + +# Create working directory +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +# Copy contents +COPY . /usr/src/app + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf /root/.config/Ultralytics/ + +# Set environment variables +# ENV HOME=/usr/src/app + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/yolov5:latest && sudo docker build -t $t . && sudo docker push $t + +# Pull and Run +# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all $t + +# Pull and Run with local directory access +# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/datasets:/usr/src/datasets $t + +# Kill all +# sudo docker kill $(sudo docker ps -q) + +# Kill all image-based +# sudo docker kill $(sudo docker ps -qa --filter ancestor=ultralytics/yolov5:latest) + +# Bash into running container +# sudo docker exec -it 5a9b5863d93d bash + +# Bash into stopped container +# id=$(sudo docker ps -qa) && sudo docker start $id && sudo docker exec -it $id bash + +# Clean up +# docker system prune -a --volumes + +# Update Ubuntu drivers +# https://www.maketecheasier.com/install-nvidia-drivers-ubuntu/ + +# DDP test +# python -m torch.distributed.run --nproc_per_node 2 --master_port 1 train.py --epochs 3 + +# GCP VM from Image +# docker.io/ultralytics/yolov5:latest diff --git a/detector/YOLOv5/LICENSE b/detector/YOLOv5/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..92b370f0e0e1b91cf8baf5d0f78c56a9824c39f1 --- /dev/null +++ b/detector/YOLOv5/LICENSE @@ -0,0 +1,674 @@ +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/detector/YOLOv5/README.md b/detector/YOLOv5/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b03a7c583c4007d64617964e15ae7ad413b4df9e --- /dev/null +++ b/detector/YOLOv5/README.md @@ -0,0 +1,304 @@ +
+

+ + +

+
+
+ CI CPU testing + YOLOv5 Citation + Docker Pulls +
+ Open In Colab + Open In Kaggle + Join Forum +
+ +
+

+YOLOv5 🚀 is a family of object detection architectures and models pretrained on the COCO dataset, and represents Ultralytics + open-source research into future vision AI methods, incorporating lessons learned and best practices evolved over thousands of hours of research and development. +

+ + + + + +
+ +##
Documentation
+ +See the [YOLOv5 Docs](https://docs.ultralytics.com) for full documentation on training, testing and deployment. + +##
Quick Start Examples
+ +
+Install + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a +[**Python>=3.7.0**](https://www.python.org/) environment, including +[**PyTorch>=1.7**](https://pytorch.org/get-started/locally/). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +
+ +
+Inference + +Inference with YOLOv5 and [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36) +. [Models](https://github.com/ultralytics/yolov5/tree/master/models) download automatically from the latest +YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```python +import torch + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5l, yolov5x, custom + +# Images +img = 'https://ultralytics.com/images/zidane.jpg' # or file, Path, PIL, OpenCV, numpy, list + +# Inference +results = model(img) + +# Results +results.print() # or .show(), .save(), .crop(), .pandas(), etc. +``` + +
+ + + +
+Inference with detect.py + +`detect.py` runs inference on a variety of sources, downloading [models](https://github.com/ultralytics/yolov5/tree/master/models) automatically from +the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases) and saving results to `runs/detect`. + +```bash +python detect.py --source 0 # webcam + img.jpg # image + vid.mp4 # video + path/ # directory + path/*.jpg # glob + 'https://youtu.be/Zgi9g1ksQHc' # YouTube + 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream +``` + +
+ +
+Training + +The commands below reproduce YOLOv5 [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) +results. [Models](https://github.com/ultralytics/yolov5/tree/master/models) +and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest +YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). Training times for YOLOv5n/s/m/l/x are +1/2/4/6/8 days on a V100 GPU ([Multi-GPU](https://github.com/ultralytics/yolov5/issues/475) times faster). Use the +largest `--batch-size` possible, or pass `--batch-size -1` for +YOLOv5 [AutoBatch](https://github.com/ultralytics/yolov5/pull/5092). Batch sizes shown for V100-16GB. + +```bash +python train.py --data coco.yaml --cfg yolov5n.yaml --weights '' --batch-size 128 + yolov5s 64 + yolov5m 40 + yolov5l 24 + yolov5x 16 +``` + + + +
+ +
+Tutorials + +* [Train Custom Data](https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data)  🚀 RECOMMENDED +* [Tips for Best Training Results](https://github.com/ultralytics/yolov5/wiki/Tips-for-Best-Training-Results)  ☘️ + RECOMMENDED +* [Weights & Biases Logging](https://github.com/ultralytics/yolov5/issues/1289)  🌟 NEW +* [Roboflow for Datasets, Labeling, and Active Learning](https://github.com/ultralytics/yolov5/issues/4975)  🌟 NEW +* [Multi-GPU Training](https://github.com/ultralytics/yolov5/issues/475) +* [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36)  ⭐ NEW +* [TFLite, ONNX, CoreML, TensorRT Export](https://github.com/ultralytics/yolov5/issues/251) 🚀 +* [Test-Time Augmentation (TTA)](https://github.com/ultralytics/yolov5/issues/303) +* [Model Ensembling](https://github.com/ultralytics/yolov5/issues/318) +* [Model Pruning/Sparsity](https://github.com/ultralytics/yolov5/issues/304) +* [Hyperparameter Evolution](https://github.com/ultralytics/yolov5/issues/607) +* [Transfer Learning with Frozen Layers](https://github.com/ultralytics/yolov5/issues/1314)  ⭐ NEW +* [TensorRT Deployment](https://github.com/wang-xinyu/tensorrtx) + +
+ +##
Environments
+ +Get started in seconds with our verified environments. Click each icon below for details. + + + +##
Integrations
+ + + +|Weights and Biases|Roboflow ⭐ NEW| +|:-:|:-:| +|Automatically track and visualize all your YOLOv5 training runs in the cloud with [Weights & Biases](https://wandb.ai/site?utm_campaign=repo_yolo_readme)|Label and export your custom datasets directly to YOLOv5 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | + + + + +##
Why YOLOv5
+ +

+
+ YOLOv5-P5 640 Figure (click to expand) + +

+
+
+ Figure Notes (click to expand) + +* **COCO AP val** denotes mAP@0.5:0.95 metric measured on the 5000-image [COCO val2017](http://cocodataset.org) dataset over various inference sizes from 256 to 1536. +* **GPU Speed** measures average inference time per image on [COCO val2017](http://cocodataset.org) dataset using a [AWS p3.2xlarge](https://aws.amazon.com/ec2/instance-types/p3/) V100 instance at batch-size 32. +* **EfficientDet** data from [google/automl](https://github.com/google/automl) at batch size 8. +* **Reproduce** by `python val.py --task study --data coco.yaml --iou 0.7 --weights yolov5n6.pt yolov5s6.pt yolov5m6.pt yolov5l6.pt yolov5x6.pt` +
+ +### Pretrained Checkpoints + +[assets]: https://github.com/ultralytics/yolov5/releases + +[TTA]: https://github.com/ultralytics/yolov5/issues/303 + +|Model |size
(pixels) |mAPval
0.5:0.95 |mAPval
0.5 |Speed
CPU b1
(ms) |Speed
V100 b1
(ms) |Speed
V100 b32
(ms) |params
(M) |FLOPs
@640 (B) +|--- |--- |--- |--- |--- |--- |--- |--- |--- +|[YOLOv5n][assets] |640 |28.0 |45.7 |**45** |**6.3**|**0.6**|**1.9**|**4.5** +|[YOLOv5s][assets] |640 |37.4 |56.8 |98 |6.4 |0.9 |7.2 |16.5 +|[YOLOv5m][assets] |640 |45.4 |64.1 |224 |8.2 |1.7 |21.2 |49.0 +|[YOLOv5l][assets] |640 |49.0 |67.3 |430 |10.1 |2.7 |46.5 |109.1 +|[YOLOv5x][assets] |640 |50.7 |68.9 |766 |12.1 |4.8 |86.7 |205.7 +| | | | | | | | | +|[YOLOv5n6][assets] |1280 |36.0 |54.4 |153 |8.1 |2.1 |3.2 |4.6 +|[YOLOv5s6][assets] |1280 |44.8 |63.7 |385 |8.2 |3.6 |16.8 |12.6 +|[YOLOv5m6][assets] |1280 |51.3 |69.3 |887 |11.1 |6.8 |35.7 |50.0 +|[YOLOv5l6][assets] |1280 |53.7 |71.3 |1784 |15.8 |10.5 |76.8 |111.4 +|[YOLOv5x6][assets]
+ [TTA][TTA]|1280
1536 |55.0
**55.8** |72.7
**72.7** |3136
- |26.2
- |19.4
- |140.7
- |209.8
- + +
+ Table Notes (click to expand) + +* All checkpoints are trained to 300 epochs with default settings. Nano and Small models use [hyp.scratch-low.yaml](https://github.com/ultralytics/yolov5/blob/master/data/hyps/hyp.scratch-low.yaml) hyps, all others use [hyp.scratch-high.yaml](https://github.com/ultralytics/yolov5/blob/master/data/hyps/hyp.scratch-high.yaml). +* **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset.
Reproduce by `python val.py --data coco.yaml --img 640 --conf 0.001 --iou 0.65` +* **Speed** averaged over COCO val images using a [AWS p3.2xlarge](https://aws.amazon.com/ec2/instance-types/p3/) instance. NMS times (~1 ms/img) not included.
Reproduce by `python val.py --data coco.yaml --img 640 --task speed --batch 1` +* **TTA** [Test Time Augmentation](https://github.com/ultralytics/yolov5/issues/303) includes reflection and scale augmentations.
Reproduce by `python val.py --data coco.yaml --img 1536 --iou 0.7 --augment` + +
+ +##
Contribute
+ +We love your input! We want to make contributing to YOLOv5 as easy and transparent as possible. Please see our [Contributing Guide](CONTRIBUTING.md) to get started, and fill out the [YOLOv5 Survey](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experiences. Thank you to all our contributors! + + + +##
Contact
+ +For YOLOv5 bugs and feature requests please visit [GitHub Issues](https://github.com/ultralytics/yolov5/issues). For business inquiries or +professional support requests please visit [https://ultralytics.com/contact](https://ultralytics.com/contact). + +
+ + diff --git a/detector/YOLOv5/__init__.py b/detector/YOLOv5/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..79344d6235260569b7a13f116b9fe6f8d7798f4b --- /dev/null +++ b/detector/YOLOv5/__init__.py @@ -0,0 +1,2 @@ +from .detector import YOLOv5 + diff --git a/detector/YOLOv5/data/Argoverse.yaml b/detector/YOLOv5/data/Argoverse.yaml new file mode 100644 index 0000000000000000000000000000000000000000..312791b33a2d8ef02aceb65cf98985e9b1dd9ef1 --- /dev/null +++ b/detector/YOLOv5/data/Argoverse.yaml @@ -0,0 +1,67 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Argoverse-HD dataset (ring-front-center camera) http://www.cs.cmu.edu/~mengtial/proj/streaming/ by Argo AI +# Example usage: python train.py --data Argoverse.yaml +# parent +# ├── yolov5 +# └── datasets +# └── Argoverse ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/Argoverse # dataset root dir +train: Argoverse-1.1/images/train/ # train images (relative to 'path') 39384 images +val: Argoverse-1.1/images/val/ # val images (relative to 'path') 15062 images +test: Argoverse-1.1/images/test/ # test images (optional) https://eval.ai/web/challenges/challenge-page/800/overview + +# Classes +nc: 8 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'bus', 'truck', 'traffic_light', 'stop_sign'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + import json + + from tqdm import tqdm + from utils.general import download, Path + + + def argoverse2yolo(set): + labels = {} + a = json.load(open(set, "rb")) + for annot in tqdm(a['annotations'], desc=f"Converting {set} to YOLOv5 format..."): + img_id = annot['image_id'] + img_name = a['images'][img_id]['name'] + img_label_name = img_name[:-3] + "txt" + + cls = annot['category_id'] # instance class id + x_center, y_center, width, height = annot['bbox'] + x_center = (x_center + width / 2) / 1920.0 # offset and scale + y_center = (y_center + height / 2) / 1200.0 # offset and scale + width /= 1920.0 # scale + height /= 1200.0 # scale + + img_dir = set.parents[2] / 'Argoverse-1.1' / 'labels' / a['seq_dirs'][a['images'][annot['image_id']]['sid']] + if not img_dir.exists(): + img_dir.mkdir(parents=True, exist_ok=True) + + k = str(img_dir / img_label_name) + if k not in labels: + labels[k] = [] + labels[k].append(f"{cls} {x_center} {y_center} {width} {height}\n") + + for k in labels: + with open(k, "w") as f: + f.writelines(labels[k]) + + + # Download + dir = Path('../datasets/Argoverse') # dataset root dir + urls = ['https://argoverse-hd.s3.us-east-2.amazonaws.com/Argoverse-HD-Full.zip'] + download(urls, dir=dir, delete=False) + + # Convert + annotations_dir = 'Argoverse-HD/annotations/' + (dir / 'Argoverse-1.1' / 'tracking').rename(dir / 'Argoverse-1.1' / 'images') # rename 'tracking' to 'images' + for d in "train.json", "val.json": + argoverse2yolo(dir / annotations_dir / d) # convert VisDrone annotations to YOLO labels diff --git a/detector/YOLOv5/data/GlobalWheat2020.yaml b/detector/YOLOv5/data/GlobalWheat2020.yaml new file mode 100644 index 0000000000000000000000000000000000000000..869dace0be2b6de91b7fb58eea18e1834321a19a --- /dev/null +++ b/detector/YOLOv5/data/GlobalWheat2020.yaml @@ -0,0 +1,53 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Global Wheat 2020 dataset http://www.global-wheat.com/ by University of Saskatchewan +# Example usage: python train.py --data GlobalWheat2020.yaml +# parent +# ├── yolov5 +# └── datasets +# └── GlobalWheat2020 ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/GlobalWheat2020 # dataset root dir +train: # train images (relative to 'path') 3422 images + - images/arvalis_1 + - images/arvalis_2 + - images/arvalis_3 + - images/ethz_1 + - images/rres_1 + - images/inrae_1 + - images/usask_1 +val: # val images (relative to 'path') 748 images (WARNING: train set contains ethz_1) + - images/ethz_1 +test: # test images (optional) 1276 images + - images/utokyo_1 + - images/utokyo_2 + - images/nau_1 + - images/uq_1 + +# Classes +nc: 1 # number of classes +names: ['wheat_head'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + from utils.general import download, Path + + # Download + dir = Path(yaml['path']) # dataset root dir + urls = ['https://zenodo.org/record/4298502/files/global-wheat-codalab-official.zip', + 'https://github.com/ultralytics/yolov5/releases/download/v1.0/GlobalWheat2020_labels.zip'] + download(urls, dir=dir) + + # Make Directories + for p in 'annotations', 'images', 'labels': + (dir / p).mkdir(parents=True, exist_ok=True) + + # Move + for p in 'arvalis_1', 'arvalis_2', 'arvalis_3', 'ethz_1', 'rres_1', 'inrae_1', 'usask_1', \ + 'utokyo_1', 'utokyo_2', 'nau_1', 'uq_1': + (dir / p).rename(dir / 'images' / p) # move to /images + f = (dir / p).with_suffix('.json') # json file + if f.exists(): + f.rename((dir / 'annotations' / p).with_suffix('.json')) # move to /annotations diff --git a/detector/YOLOv5/data/Objects365.yaml b/detector/YOLOv5/data/Objects365.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4c7cf3fdb2c8d1526b92aaf4276844f09a73a3d7 --- /dev/null +++ b/detector/YOLOv5/data/Objects365.yaml @@ -0,0 +1,112 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Objects365 dataset https://www.objects365.org/ by Megvii +# Example usage: python train.py --data Objects365.yaml +# parent +# ├── yolov5 +# └── datasets +# └── Objects365 ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/Objects365 # dataset root dir +train: images/train # train images (relative to 'path') 1742289 images +val: images/val # val images (relative to 'path') 80000 images +test: # test images (optional) + +# Classes +nc: 365 # number of classes +names: ['Person', 'Sneakers', 'Chair', 'Other Shoes', 'Hat', 'Car', 'Lamp', 'Glasses', 'Bottle', 'Desk', 'Cup', + 'Street Lights', 'Cabinet/shelf', 'Handbag/Satchel', 'Bracelet', 'Plate', 'Picture/Frame', 'Helmet', 'Book', + 'Gloves', 'Storage box', 'Boat', 'Leather Shoes', 'Flower', 'Bench', 'Potted Plant', 'Bowl/Basin', 'Flag', + 'Pillow', 'Boots', 'Vase', 'Microphone', 'Necklace', 'Ring', 'SUV', 'Wine Glass', 'Belt', 'Monitor/TV', + 'Backpack', 'Umbrella', 'Traffic Light', 'Speaker', 'Watch', 'Tie', 'Trash bin Can', 'Slippers', 'Bicycle', + 'Stool', 'Barrel/bucket', 'Van', 'Couch', 'Sandals', 'Basket', 'Drum', 'Pen/Pencil', 'Bus', 'Wild Bird', + 'High Heels', 'Motorcycle', 'Guitar', 'Carpet', 'Cell Phone', 'Bread', 'Camera', 'Canned', 'Truck', + 'Traffic cone', 'Cymbal', 'Lifesaver', 'Towel', 'Stuffed Toy', 'Candle', 'Sailboat', 'Laptop', 'Awning', + 'Bed', 'Faucet', 'Tent', 'Horse', 'Mirror', 'Power outlet', 'Sink', 'Apple', 'Air Conditioner', 'Knife', + 'Hockey Stick', 'Paddle', 'Pickup Truck', 'Fork', 'Traffic Sign', 'Balloon', 'Tripod', 'Dog', 'Spoon', 'Clock', + 'Pot', 'Cow', 'Cake', 'Dinning Table', 'Sheep', 'Hanger', 'Blackboard/Whiteboard', 'Napkin', 'Other Fish', + 'Orange/Tangerine', 'Toiletry', 'Keyboard', 'Tomato', 'Lantern', 'Machinery Vehicle', 'Fan', + 'Green Vegetables', 'Banana', 'Baseball Glove', 'Airplane', 'Mouse', 'Train', 'Pumpkin', 'Soccer', 'Skiboard', + 'Luggage', 'Nightstand', 'Tea pot', 'Telephone', 'Trolley', 'Head Phone', 'Sports Car', 'Stop Sign', + 'Dessert', 'Scooter', 'Stroller', 'Crane', 'Remote', 'Refrigerator', 'Oven', 'Lemon', 'Duck', 'Baseball Bat', + 'Surveillance Camera', 'Cat', 'Jug', 'Broccoli', 'Piano', 'Pizza', 'Elephant', 'Skateboard', 'Surfboard', + 'Gun', 'Skating and Skiing shoes', 'Gas stove', 'Donut', 'Bow Tie', 'Carrot', 'Toilet', 'Kite', 'Strawberry', + 'Other Balls', 'Shovel', 'Pepper', 'Computer Box', 'Toilet Paper', 'Cleaning Products', 'Chopsticks', + 'Microwave', 'Pigeon', 'Baseball', 'Cutting/chopping Board', 'Coffee Table', 'Side Table', 'Scissors', + 'Marker', 'Pie', 'Ladder', 'Snowboard', 'Cookies', 'Radiator', 'Fire Hydrant', 'Basketball', 'Zebra', 'Grape', + 'Giraffe', 'Potato', 'Sausage', 'Tricycle', 'Violin', 'Egg', 'Fire Extinguisher', 'Candy', 'Fire Truck', + 'Billiards', 'Converter', 'Bathtub', 'Wheelchair', 'Golf Club', 'Briefcase', 'Cucumber', 'Cigar/Cigarette', + 'Paint Brush', 'Pear', 'Heavy Truck', 'Hamburger', 'Extractor', 'Extension Cord', 'Tong', 'Tennis Racket', + 'Folder', 'American Football', 'earphone', 'Mask', 'Kettle', 'Tennis', 'Ship', 'Swing', 'Coffee Machine', + 'Slide', 'Carriage', 'Onion', 'Green beans', 'Projector', 'Frisbee', 'Washing Machine/Drying Machine', + 'Chicken', 'Printer', 'Watermelon', 'Saxophone', 'Tissue', 'Toothbrush', 'Ice cream', 'Hot-air balloon', + 'Cello', 'French Fries', 'Scale', 'Trophy', 'Cabbage', 'Hot dog', 'Blender', 'Peach', 'Rice', 'Wallet/Purse', + 'Volleyball', 'Deer', 'Goose', 'Tape', 'Tablet', 'Cosmetics', 'Trumpet', 'Pineapple', 'Golf Ball', + 'Ambulance', 'Parking meter', 'Mango', 'Key', 'Hurdle', 'Fishing Rod', 'Medal', 'Flute', 'Brush', 'Penguin', + 'Megaphone', 'Corn', 'Lettuce', 'Garlic', 'Swan', 'Helicopter', 'Green Onion', 'Sandwich', 'Nuts', + 'Speed Limit Sign', 'Induction Cooker', 'Broom', 'Trombone', 'Plum', 'Rickshaw', 'Goldfish', 'Kiwi fruit', + 'Router/modem', 'Poker Card', 'Toaster', 'Shrimp', 'Sushi', 'Cheese', 'Notepaper', 'Cherry', 'Pliers', 'CD', + 'Pasta', 'Hammer', 'Cue', 'Avocado', 'Hamimelon', 'Flask', 'Mushroom', 'Screwdriver', 'Soap', 'Recorder', + 'Bear', 'Eggplant', 'Board Eraser', 'Coconut', 'Tape Measure/Ruler', 'Pig', 'Showerhead', 'Globe', 'Chips', + 'Steak', 'Crosswalk Sign', 'Stapler', 'Camel', 'Formula 1', 'Pomegranate', 'Dishwasher', 'Crab', + 'Hoverboard', 'Meat ball', 'Rice Cooker', 'Tuba', 'Calculator', 'Papaya', 'Antelope', 'Parrot', 'Seal', + 'Butterfly', 'Dumbbell', 'Donkey', 'Lion', 'Urinal', 'Dolphin', 'Electric Drill', 'Hair Dryer', 'Egg tart', + 'Jellyfish', 'Treadmill', 'Lighter', 'Grapefruit', 'Game board', 'Mop', 'Radish', 'Baozi', 'Target', 'French', + 'Spring Rolls', 'Monkey', 'Rabbit', 'Pencil Case', 'Yak', 'Red Cabbage', 'Binoculars', 'Asparagus', 'Barbell', + 'Scallop', 'Noddles', 'Comb', 'Dumpling', 'Oyster', 'Table Tennis paddle', 'Cosmetics Brush/Eyeliner Pencil', + 'Chainsaw', 'Eraser', 'Lobster', 'Durian', 'Okra', 'Lipstick', 'Cosmetics Mirror', 'Curling', 'Table Tennis'] + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + from pycocotools.coco import COCO + from tqdm import tqdm + + from utils.general import Path, download, np, xyxy2xywhn + + # Make Directories + dir = Path(yaml['path']) # dataset root dir + for p in 'images', 'labels': + (dir / p).mkdir(parents=True, exist_ok=True) + for q in 'train', 'val': + (dir / p / q).mkdir(parents=True, exist_ok=True) + + # Train, Val Splits + for split, patches in [('train', 50 + 1), ('val', 43 + 1)]: + print(f"Processing {split} in {patches} patches ...") + images, labels = dir / 'images' / split, dir / 'labels' / split + + # Download + url = f"https://dorc.ks3-cn-beijing.ksyun.com/data-set/2020Objects365%E6%95%B0%E6%8D%AE%E9%9B%86/{split}/" + if split == 'train': + download([f'{url}zhiyuan_objv2_{split}.tar.gz'], dir=dir, delete=False) # annotations json + download([f'{url}patch{i}.tar.gz' for i in range(patches)], dir=images, curl=True, delete=False, threads=8) + elif split == 'val': + download([f'{url}zhiyuan_objv2_{split}.json'], dir=dir, delete=False) # annotations json + download([f'{url}images/v1/patch{i}.tar.gz' for i in range(15 + 1)], dir=images, curl=True, delete=False, threads=8) + download([f'{url}images/v2/patch{i}.tar.gz' for i in range(16, patches)], dir=images, curl=True, delete=False, threads=8) + + # Move + for f in tqdm(images.rglob('*.jpg'), desc=f'Moving {split} images'): + f.rename(images / f.name) # move to /images/{split} + + # Labels + coco = COCO(dir / f'zhiyuan_objv2_{split}.json') + names = [x["name"] for x in coco.loadCats(coco.getCatIds())] + for cid, cat in enumerate(names): + catIds = coco.getCatIds(catNms=[cat]) + imgIds = coco.getImgIds(catIds=catIds) + for im in tqdm(coco.loadImgs(imgIds), desc=f'Class {cid + 1}/{len(names)} {cat}'): + width, height = im["width"], im["height"] + path = Path(im["file_name"]) # image filename + try: + with open(labels / path.with_suffix('.txt').name, 'a') as file: + annIds = coco.getAnnIds(imgIds=im["id"], catIds=catIds, iscrowd=None) + for a in coco.loadAnns(annIds): + x, y, w, h = a['bbox'] # bounding box in xywh (xy top-left corner) + xyxy = np.array([x, y, x + w, y + h])[None] # pixels(1,4) + x, y, w, h = xyxy2xywhn(xyxy, w=width, h=height, clip=True)[0] # normalized and clipped + file.write(f"{cid} {x:.5f} {y:.5f} {w:.5f} {h:.5f}\n") + except Exception as e: + print(e) diff --git a/detector/YOLOv5/data/SKU-110K.yaml b/detector/YOLOv5/data/SKU-110K.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9481b7a04aee09b7be5581a80f2671d3194da015 --- /dev/null +++ b/detector/YOLOv5/data/SKU-110K.yaml @@ -0,0 +1,52 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# SKU-110K retail items dataset https://github.com/eg4000/SKU110K_CVPR19 by Trax Retail +# Example usage: python train.py --data SKU-110K.yaml +# parent +# ├── yolov5 +# └── datasets +# └── SKU-110K ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/SKU-110K # dataset root dir +train: train.txt # train images (relative to 'path') 8219 images +val: val.txt # val images (relative to 'path') 588 images +test: test.txt # test images (optional) 2936 images + +# Classes +nc: 1 # number of classes +names: ['object'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + import shutil + from tqdm import tqdm + from utils.general import np, pd, Path, download, xyxy2xywh + + # Download + dir = Path(yaml['path']) # dataset root dir + parent = Path(dir.parent) # download dir + urls = ['http://trax-geometry.s3.amazonaws.com/cvpr_challenge/SKU110K_fixed.tar.gz'] + download(urls, dir=parent, delete=False) + + # Rename directories + if dir.exists(): + shutil.rmtree(dir) + (parent / 'SKU110K_fixed').rename(dir) # rename dir + (dir / 'labels').mkdir(parents=True, exist_ok=True) # create labels dir + + # Convert labels + names = 'image', 'x1', 'y1', 'x2', 'y2', 'class', 'image_width', 'image_height' # column names + for d in 'annotations_train.csv', 'annotations_val.csv', 'annotations_test.csv': + x = pd.read_csv(dir / 'annotations' / d, names=names).values # annotations + images, unique_images = x[:, 0], np.unique(x[:, 0]) + with open((dir / d).with_suffix('.txt').__str__().replace('annotations_', ''), 'w') as f: + f.writelines(f'./images/{s}\n' for s in unique_images) + for im in tqdm(unique_images, desc=f'Converting {dir / d}'): + cls = 0 # single-class dataset + with open((dir / 'labels' / im).with_suffix('.txt'), 'a') as f: + for r in x[images == im]: + w, h = r[6], r[7] # image width, height + xywh = xyxy2xywh(np.array([[r[1] / w, r[2] / h, r[3] / w, r[4] / h]]))[0] # instance + f.write(f"{cls} {xywh[0]:.5f} {xywh[1]:.5f} {xywh[2]:.5f} {xywh[3]:.5f}\n") # write label diff --git a/detector/YOLOv5/data/VOC.yaml b/detector/YOLOv5/data/VOC.yaml new file mode 100644 index 0000000000000000000000000000000000000000..975d56466de13b6568b83423847b6b85c933fe4c --- /dev/null +++ b/detector/YOLOv5/data/VOC.yaml @@ -0,0 +1,80 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# PASCAL VOC dataset http://host.robots.ox.ac.uk/pascal/VOC by University of Oxford +# Example usage: python train.py --data VOC.yaml +# parent +# ├── yolov5 +# └── datasets +# └── VOC ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/VOC +train: # train images (relative to 'path') 16551 images + - images/train2012 + - images/train2007 + - images/val2012 + - images/val2007 +val: # val images (relative to 'path') 4952 images + - images/test2007 +test: # test images (optional) + - images/test2007 + +# Classes +nc: 20 # number of classes +names: ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', + 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + import xml.etree.ElementTree as ET + + from tqdm import tqdm + from utils.general import download, Path + + + def convert_label(path, lb_path, year, image_id): + def convert_box(size, box): + dw, dh = 1. / size[0], 1. / size[1] + x, y, w, h = (box[0] + box[1]) / 2.0 - 1, (box[2] + box[3]) / 2.0 - 1, box[1] - box[0], box[3] - box[2] + return x * dw, y * dh, w * dw, h * dh + + in_file = open(path / f'VOC{year}/Annotations/{image_id}.xml') + out_file = open(lb_path, 'w') + tree = ET.parse(in_file) + root = tree.getroot() + size = root.find('size') + w = int(size.find('width').text) + h = int(size.find('height').text) + + for obj in root.iter('object'): + cls = obj.find('name').text + if cls in yaml['names'] and not int(obj.find('difficult').text) == 1: + xmlbox = obj.find('bndbox') + bb = convert_box((w, h), [float(xmlbox.find(x).text) for x in ('xmin', 'xmax', 'ymin', 'ymax')]) + cls_id = yaml['names'].index(cls) # class id + out_file.write(" ".join([str(a) for a in (cls_id, *bb)]) + '\n') + + + # Download + dir = Path(yaml['path']) # dataset root dir + url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/' + urls = [url + 'VOCtrainval_06-Nov-2007.zip', # 446MB, 5012 images + url + 'VOCtest_06-Nov-2007.zip', # 438MB, 4953 images + url + 'VOCtrainval_11-May-2012.zip'] # 1.95GB, 17126 images + download(urls, dir=dir / 'images', delete=False) + + # Convert + path = dir / f'images/VOCdevkit' + for year, image_set in ('2012', 'train'), ('2012', 'val'), ('2007', 'train'), ('2007', 'val'), ('2007', 'test'): + imgs_path = dir / 'images' / f'{image_set}{year}' + lbs_path = dir / 'labels' / f'{image_set}{year}' + imgs_path.mkdir(exist_ok=True, parents=True) + lbs_path.mkdir(exist_ok=True, parents=True) + + image_ids = open(path / f'VOC{year}/ImageSets/Main/{image_set}.txt').read().strip().split() + for id in tqdm(image_ids, desc=f'{image_set}{year}'): + f = path / f'VOC{year}/JPEGImages/{id}.jpg' # old img path + lb_path = (lbs_path / f.name).with_suffix('.txt') # new label path + f.rename(imgs_path / f.name) # move image + convert_label(path, lb_path, year, id) # convert labels to YOLO format diff --git a/detector/YOLOv5/data/VisDrone.yaml b/detector/YOLOv5/data/VisDrone.yaml new file mode 100644 index 0000000000000000000000000000000000000000..83a5c7d55e063092141a2430b49d9017d56e72cd --- /dev/null +++ b/detector/YOLOv5/data/VisDrone.yaml @@ -0,0 +1,61 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# VisDrone2019-DET dataset https://github.com/VisDrone/VisDrone-Dataset by Tianjin University +# Example usage: python train.py --data VisDrone.yaml +# parent +# ├── yolov5 +# └── datasets +# └── VisDrone ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/VisDrone # dataset root dir +train: VisDrone2019-DET-train/images # train images (relative to 'path') 6471 images +val: VisDrone2019-DET-val/images # val images (relative to 'path') 548 images +test: VisDrone2019-DET-test-dev/images # test images (optional) 1610 images + +# Classes +nc: 10 # number of classes +names: ['pedestrian', 'people', 'bicycle', 'car', 'van', 'truck', 'tricycle', 'awning-tricycle', 'bus', 'motor'] + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + from utils.general import download, os, Path + + def visdrone2yolo(dir): + from PIL import Image + from tqdm import tqdm + + def convert_box(size, box): + # Convert VisDrone box to YOLO xywh box + dw = 1. / size[0] + dh = 1. / size[1] + return (box[0] + box[2] / 2) * dw, (box[1] + box[3] / 2) * dh, box[2] * dw, box[3] * dh + + (dir / 'labels').mkdir(parents=True, exist_ok=True) # make labels directory + pbar = tqdm((dir / 'annotations').glob('*.txt'), desc=f'Converting {dir}') + for f in pbar: + img_size = Image.open((dir / 'images' / f.name).with_suffix('.jpg')).size + lines = [] + with open(f, 'r') as file: # read annotation.txt + for row in [x.split(',') for x in file.read().strip().splitlines()]: + if row[4] == '0': # VisDrone 'ignored regions' class 0 + continue + cls = int(row[5]) - 1 + box = convert_box(img_size, tuple(map(int, row[:4]))) + lines.append(f"{cls} {' '.join(f'{x:.6f}' for x in box)}\n") + with open(str(f).replace(os.sep + 'annotations' + os.sep, os.sep + 'labels' + os.sep), 'w') as fl: + fl.writelines(lines) # write label.txt + + + # Download + dir = Path(yaml['path']) # dataset root dir + urls = ['https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-train.zip', + 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-val.zip', + 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-dev.zip', + 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-challenge.zip'] + download(urls, dir=dir) + + # Convert + for d in 'VisDrone2019-DET-train', 'VisDrone2019-DET-val', 'VisDrone2019-DET-test-dev': + visdrone2yolo(dir / d) # convert VisDrone annotations to YOLO labels diff --git a/detector/YOLOv5/data/coco.yaml b/detector/YOLOv5/data/coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3ed7e48a218583ff579c2b1e406cf4ec5f03e5f5 --- /dev/null +++ b/detector/YOLOv5/data/coco.yaml @@ -0,0 +1,44 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# COCO 2017 dataset http://cocodataset.org by Microsoft +# Example usage: python train.py --data coco.yaml +# parent +# ├── yolov5 +# └── datasets +# └── coco ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/coco # dataset root dir +train: train2017.txt # train images (relative to 'path') 118287 images +val: val2017.txt # val images (relative to 'path') 5000 images +test: test-dev2017.txt # 20288 of 40670 images, submit to https://competitions.codalab.org/competitions/20794 + +# Classes +nc: 80 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] # class names + + +# Download script/URL (optional) +download: | + from utils.general import download, Path + + # Download labels + segments = False # segment or box labels + dir = Path(yaml['path']) # dataset root dir + url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/' + urls = [url + ('coco2017labels-segments.zip' if segments else 'coco2017labels.zip')] # labels + download(urls, dir=dir.parent) + + # Download data + urls = ['http://images.cocodataset.org/zips/train2017.zip', # 19G, 118k images + 'http://images.cocodataset.org/zips/val2017.zip', # 1G, 5k images + 'http://images.cocodataset.org/zips/test2017.zip'] # 7G, 41k images (optional) + download(urls, dir=dir / 'images', threads=3) diff --git a/detector/YOLOv5/data/coco128.yaml b/detector/YOLOv5/data/coco128.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d07c704407a183117387da322cba1d3bd5602889 --- /dev/null +++ b/detector/YOLOv5/data/coco128.yaml @@ -0,0 +1,30 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# COCO128 dataset https://www.kaggle.com/ultralytics/coco128 (first 128 images from COCO train2017) by Ultralytics +# Example usage: python train.py --data coco128.yaml +# parent +# ├── yolov5 +# └── datasets +# └── coco128 ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/coco128 # dataset root dir +train: images/train2017 # train images (relative to 'path') 128 images +val: images/train2017 # val images (relative to 'path') 128 images +test: # test images (optional) + +# Classes +nc: 80 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] # class names + + +# Download script/URL (optional) +download: https://ultralytics.com/assets/coco128.zip diff --git a/detector/YOLOv5/data/hyps/hyp.Objects365.yaml b/detector/YOLOv5/data/hyps/hyp.Objects365.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74971740f7c73bf661950f339792b790a26b2b1c --- /dev/null +++ b/detector/YOLOv5/data/hyps/hyp.Objects365.yaml @@ -0,0 +1,34 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Hyperparameters for Objects365 training +# python train.py --weights yolov5m.pt --data Objects365.yaml --evolve +# See Hyperparameter Evolution tutorial for details https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.00258 +lrf: 0.17 +momentum: 0.779 +weight_decay: 0.00058 +warmup_epochs: 1.33 +warmup_momentum: 0.86 +warmup_bias_lr: 0.0711 +box: 0.0539 +cls: 0.299 +cls_pw: 0.825 +obj: 0.632 +obj_pw: 1.0 +iou_t: 0.2 +anchor_t: 3.44 +anchors: 3.2 +fl_gamma: 0.0 +hsv_h: 0.0188 +hsv_s: 0.704 +hsv_v: 0.36 +degrees: 0.0 +translate: 0.0902 +scale: 0.491 +shear: 0.0 +perspective: 0.0 +flipud: 0.0 +fliplr: 0.5 +mosaic: 1.0 +mixup: 0.0 +copy_paste: 0.0 diff --git a/detector/YOLOv5/data/hyps/hyp.VOC.yaml b/detector/YOLOv5/data/hyps/hyp.VOC.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aa952c501969c48686be9e5a8fa2c2bc0c4514eb --- /dev/null +++ b/detector/YOLOv5/data/hyps/hyp.VOC.yaml @@ -0,0 +1,40 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Hyperparameters for VOC training +# python train.py --batch 128 --weights yolov5m6.pt --data VOC.yaml --epochs 50 --img 512 --hyp hyp.scratch-med.yaml --evolve +# See Hyperparameter Evolution tutorial for details https://github.com/ultralytics/yolov5#tutorials + +# YOLOv5 Hyperparameter Evolution Results +# Best generation: 319 +# Last generation: 434 +# metrics/precision, metrics/recall, metrics/mAP_0.5, metrics/mAP_0.5:0.95, val/box_loss, val/obj_loss, val/cls_loss +# 0.86236, 0.86184, 0.91274, 0.72647, 0.0077056, 0.0042449, 0.0013846 + +lr0: 0.0033 +lrf: 0.15184 +momentum: 0.74747 +weight_decay: 0.00025 +warmup_epochs: 3.4278 +warmup_momentum: 0.59032 +warmup_bias_lr: 0.18742 +box: 0.02 +cls: 0.21563 +cls_pw: 0.5 +obj: 0.50843 +obj_pw: 0.6729 +iou_t: 0.2 +anchor_t: 3.4172 +fl_gamma: 0.0 +hsv_h: 0.01032 +hsv_s: 0.5562 +hsv_v: 0.28255 +degrees: 0.0 +translate: 0.04575 +scale: 0.73711 +shear: 0.0 +perspective: 0.0 +flipud: 0.0 +fliplr: 0.5 +mosaic: 0.87158 +mixup: 0.04294 +copy_paste: 0.0 +anchors: 3.3556 diff --git a/detector/YOLOv5/data/hyps/hyp.scratch-high.yaml b/detector/YOLOv5/data/hyps/hyp.scratch-high.yaml new file mode 100644 index 0000000000000000000000000000000000000000..123cc8407413e9c130e21a3b5dd8ed33a3632db5 --- /dev/null +++ b/detector/YOLOv5/data/hyps/hyp.scratch-high.yaml @@ -0,0 +1,34 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Hyperparameters for high-augmentation COCO training from scratch +# python train.py --batch 32 --cfg yolov5m6.yaml --weights '' --data coco.yaml --img 1280 --epochs 300 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.1 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.3 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 0.7 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.9 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.1 # image mixup (probability) +copy_paste: 0.1 # segment copy-paste (probability) diff --git a/detector/YOLOv5/data/hyps/hyp.scratch-low.yaml b/detector/YOLOv5/data/hyps/hyp.scratch-low.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b9ef1d55a3b6ec8873ac87d6f4aa0ca081868bd6 --- /dev/null +++ b/detector/YOLOv5/data/hyps/hyp.scratch-low.yaml @@ -0,0 +1,34 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Hyperparameters for low-augmentation COCO training from scratch +# python train.py --batch 64 --cfg yolov5n6.yaml --weights '' --data coco.yaml --img 640 --epochs 300 --linear +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.01 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.5 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 1.0 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.5 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.0 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) diff --git a/detector/YOLOv5/data/hyps/hyp.scratch-med.yaml b/detector/YOLOv5/data/hyps/hyp.scratch-med.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d6867d7557bac73db7f8787db60cff4c4c64b440 --- /dev/null +++ b/detector/YOLOv5/data/hyps/hyp.scratch-med.yaml @@ -0,0 +1,34 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Hyperparameters for medium-augmentation COCO training from scratch +# python train.py --batch 32 --cfg yolov5m6.yaml --weights '' --data coco.yaml --img 1280 --epochs 300 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.1 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.3 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 0.7 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.9 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.1 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) diff --git a/detector/YOLOv5/data/images/bus.jpg b/detector/YOLOv5/data/images/bus.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cf0dab1214b3c06668e2c6e3a1666463acfe88c --- /dev/null +++ b/detector/YOLOv5/data/images/bus.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33b198a1d2839bb9ac4c65d61f9e852196793cae9a0781360859425f6022b69c +size 487438 diff --git a/detector/YOLOv5/data/images/zidane.jpg b/detector/YOLOv5/data/images/zidane.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6d86f9edfce6353b027f16b9df7a973c72e598ba --- /dev/null +++ b/detector/YOLOv5/data/images/zidane.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:356dad2107bb0254e4e4a81bc1d9c7140043e88569d546e5b404b19bffa77d0a +size 168949 diff --git a/detector/YOLOv5/data/scripts/download_weights.sh b/detector/YOLOv5/data/scripts/download_weights.sh new file mode 100644 index 0000000000000000000000000000000000000000..e9fa65394178005ba42ad02b91fed2873effb66b --- /dev/null +++ b/detector/YOLOv5/data/scripts/download_weights.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Download latest models from https://github.com/ultralytics/yolov5/releases +# Example usage: bash path/to/download_weights.sh +# parent +# └── yolov5 +# ├── yolov5s.pt ← downloads here +# ├── yolov5m.pt +# └── ... + +python - <= cls >= 0, f'incorrect class index {cls}' + + # Write YOLO label + if id not in shapes: + shapes[id] = Image.open(file).size + box = xyxy2xywhn(box[None].astype(np.float), w=shapes[id][0], h=shapes[id][1], clip=True) + with open((labels / id).with_suffix('.txt'), 'a') as f: + f.write(f"{cls} {' '.join(f'{x:.6f}' for x in box[0])}\n") # write label.txt + except Exception as e: + print(f'WARNING: skipping one label for {file}: {e}') + + + # Download manually from https://challenge.xviewdataset.org + dir = Path(yaml['path']) # dataset root dir + # urls = ['https://d307kc0mrhucc3.cloudfront.net/train_labels.zip', # train labels + # 'https://d307kc0mrhucc3.cloudfront.net/train_images.zip', # 15G, 847 train images + # 'https://d307kc0mrhucc3.cloudfront.net/val_images.zip'] # 5G, 282 val images (no labels) + # download(urls, dir=dir, delete=False) + + # Convert labels + convert_labels(dir / 'xView_train.geojson') + + # Move images + images = Path(dir / 'images') + images.mkdir(parents=True, exist_ok=True) + Path(dir / 'train_images').rename(dir / 'images' / 'train') + Path(dir / 'val_images').rename(dir / 'images' / 'val') + + # Split + autosplit(dir / 'images' / 'train') diff --git a/detector/YOLOv5/detector.py b/detector/YOLOv5/detector.py new file mode 100644 index 0000000000000000000000000000000000000000..ef9542f66b184264a637d2fe9447bb711aa5afb0 --- /dev/null +++ b/detector/YOLOv5/detector.py @@ -0,0 +1,94 @@ +import argparse +import os +import sys +from pathlib import Path + +import cv2 +import torch +import torch.backends.cudnn as cudnn +import numpy as np + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +from models.common import DetectMultiBackend +from detector.YOLOv5.utils.augmentations import letterbox +from detector.YOLOv5.utils.datasets import IMG_FORMATS, VID_FORMATS, LoadImages, LoadStreams +from detector.YOLOv5.utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, + increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from detector.YOLOv5.utils.plots import Annotator, colors, save_one_box +from detector.YOLOv5.utils.torch_utils import select_device, time_sync + + +class YOLOv5(object): + def __init__(self, weight='yolov5s.pt', data='data/coco128.yaml', imgsz=[640, 640], + conf_thres=0.25, nms_thres=0.45, max_det=1000, device='cuda:0', dnn=False): + super().__init__() + self.device = select_device(device) + self.net = DetectMultiBackend(weight, device=self.device, dnn=dnn, data=data) + self.stride, self.class_names, self.pt = self.net.stride, self.net.names, self.net.pt + self.imgsz = check_img_size(imgsz, s=self.stride) # check image size + self.conf_thres = conf_thres + self.nms_thres = nms_thres + self.max_det = max_det + + def __call__(self, im0, augment=False, save_result=False): + # im shape is [H, W, 3] and RGB + # read image + bs = 1 + img = letterbox(im0, self.imgsz, stride=self.stride, auto=True)[0] + img = img.transpose((2, 0, 1)) + img = np.ascontiguousarray(img) + + # preprocess image + img = torch.from_numpy(img).to(self.device) + img = img.float() + img /= 255. + if len(img.shape) == 3: + img = img[None] + + # model inference + # self.net.warmup(imgsz=(1 if self.pt else bs, 3, *self.imgsz)) # warmup + pred = self.net(img, augment=augment) + pred = non_max_suppression(pred, self.conf_thres, self.nms_thres, + classes=None, agnostic=False, max_det=self.max_det)[0] + + # postprocess det + gn = torch.tensor(im0.shape)[[1, 0, 1, 0]] # normalization gain whwh + pred[:, :4] = scale_coords(img.shape[2:], pred[:, :4], im0.shape).round() + + if save_result is True: + annotator = Annotator(im0, line_width=3, example=str(self.names)) + for *xyxy, conf, cls in pred: + c = int(cls) # integer class + label = '{:} {:.2f}'.format(self.names[c], conf) + annotator.box_label(xyxy, label, color=colors(c, False)) + im0 = annotator.result() + + pred[:, :4] = xyxy2xywh(pred[:, :4]) + xywh = pred[:, :4].cpu().numpy() + conf = pred[:, 4].cpu().numpy() + cls = pred[:, 5].cpu().numpy() + return (xywh, conf, cls) if not save_result else (xywh, conf, cls, im0) + + +def demo(): + yolo = YOLOv5(weight='yolov5s.pt', data='data/coco128.yaml') + root = "./data/images" + files = [os.path.join(root, file) for file in os.listdir(root) if file.endswith('.jpg')] + for filename in files: + img = cv2.imread(filename) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + bbox, cls_conf, cls_ids, img_ = yolo(img, save_result=True) + # imshow + cv2.namedWindow("yolo") + cv2.imshow("yolo", img_[:, :, ::-1]) + cv2.waitKey(0) + cv2.destroyAllWindows() + + +if __name__ == "__main__": + demo() diff --git a/detector/YOLOv5/export.py b/detector/YOLOv5/export.py new file mode 100644 index 0000000000000000000000000000000000000000..88af474845a18f6ad85c0a9ecfea6d7f0342d859 --- /dev/null +++ b/detector/YOLOv5/export.py @@ -0,0 +1,559 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Export a YOLOv5 PyTorch model to other formats. TensorFlow exports authored by https://github.com/zldrobit + +Format | `export.py --include` | Model +--- | --- | --- +PyTorch | - | yolov5s.pt +TorchScript | `torchscript` | yolov5s.torchscript +ONNX | `onnx` | yolov5s.onnx +OpenVINO | `openvino` | yolov5s_openvino_model/ +TensorRT | `engine` | yolov5s.engine +CoreML | `coreml` | yolov5s.mlmodel +TensorFlow SavedModel | `saved_model` | yolov5s_saved_model/ +TensorFlow GraphDef | `pb` | yolov5s.pb +TensorFlow Lite | `tflite` | yolov5s.tflite +TensorFlow Edge TPU | `edgetpu` | yolov5s_edgetpu.tflite +TensorFlow.js | `tfjs` | yolov5s_web_model/ + +Requirements: + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime openvino-dev tensorflow-cpu # CPU + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime-gpu openvino-dev tensorflow # GPU + +Usage: + $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx openvino engine coreml tflite ... + +Inference: + $ python path/to/detect.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with --dnn + yolov5s.xml # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (MacOS-only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU + +TensorFlow.js: + $ cd .. && git clone https://github.com/zldrobit/tfjs-yolov5-example.git && cd tfjs-yolov5-example + $ npm install + $ ln -s ../../yolov5/yolov5s_web_model public/yolov5s_web_model + $ npm start +""" + +import argparse +import json +import os +import platform +import subprocess +import sys +import time +import warnings +from pathlib import Path + +import pandas as pd +import torch +import torch.nn as nn +from torch.utils.mobile_optimizer import optimize_for_mobile + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +from models.common import Conv +from models.experimental import attempt_load +from models.yolo import Detect +from detector.YOLOv5.utils.activations import SiLU +from detector.YOLOv5.utils.datasets import LoadImages +from detector.YOLOv5.utils.general import (LOGGER, check_dataset, check_img_size, check_requirements, check_version, colorstr, + file_size, print_args, url2file) +from detector.YOLOv5.utils.torch_utils import select_device + + +def export_formats(): + # YOLOv5 export formats + x = [['PyTorch', '-', '.pt'], + ['TorchScript', 'torchscript', '.torchscript'], + ['ONNX', 'onnx', '.onnx'], + ['OpenVINO', 'openvino', '_openvino_model'], + ['TensorRT', 'engine', '.engine'], + ['CoreML', 'coreml', '.mlmodel'], + ['TensorFlow SavedModel', 'saved_model', '_saved_model'], + ['TensorFlow GraphDef', 'pb', '.pb'], + ['TensorFlow Lite', 'tflite', '.tflite'], + ['TensorFlow Edge TPU', 'edgetpu', '_edgetpu.tflite'], + ['TensorFlow.js', 'tfjs', '_web_model']] + return pd.DataFrame(x, columns=['Format', 'Argument', 'Suffix']) + + +def export_torchscript(model, im, file, optimize, prefix=colorstr('TorchScript:')): + # YOLOv5 TorchScript model export + try: + LOGGER.info(f'\n{prefix} starting export with torch {torch.__version__}...') + f = file.with_suffix('.torchscript') + + ts = torch.jit.trace(model, im, strict=False) + d = {"shape": im.shape, "stride": int(max(model.stride)), "names": model.names} + extra_files = {'config.txt': json.dumps(d)} # torch._C.ExtraFilesMap() + if optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html + optimize_for_mobile(ts)._save_for_lite_interpreter(str(f), _extra_files=extra_files) + else: + ts.save(str(f), _extra_files=extra_files) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'{prefix} export failure: {e}') + + +def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorstr('ONNX:')): + # YOLOv5 ONNX export + try: + check_requirements(('onnx',)) + import onnx + + LOGGER.info(f'\n{prefix} starting export with onnx {onnx.__version__}...') + f = file.with_suffix('.onnx') + + torch.onnx.export(model, im, f, verbose=False, opset_version=opset, + training=torch.onnx.TrainingMode.TRAINING if train else torch.onnx.TrainingMode.EVAL, + do_constant_folding=not train, + input_names=['images'], + output_names=['output'], + dynamic_axes={'images': {0: 'batch', 2: 'height', 3: 'width'}, # shape(1,3,640,640) + 'output': {0: 'batch', 1: 'anchors'} # shape(1,25200,85) + } if dynamic else None) + + # Checks + model_onnx = onnx.load(f) # load onnx model + onnx.checker.check_model(model_onnx) # check onnx model + # LOGGER.info(onnx.helper.printable_graph(model_onnx.graph)) # print + + # Simplify + if simplify: + try: + check_requirements(('onnx-simplifier',)) + import onnxsim + + LOGGER.info(f'{prefix} simplifying with onnx-simplifier {onnxsim.__version__}...') + model_onnx, check = onnxsim.simplify( + model_onnx, + dynamic_input_shape=dynamic, + input_shapes={'images': list(im.shape)} if dynamic else None) + assert check, 'assert check failed' + onnx.save(model_onnx, f) + except Exception as e: + LOGGER.info(f'{prefix} simplifier failure: {e}') + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'{prefix} export failure: {e}') + + +def export_openvino(model, im, file, prefix=colorstr('OpenVINO:')): + # YOLOv5 OpenVINO export + try: + check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ + import openvino.inference_engine as ie + + LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...') + f = str(file).replace('.pt', '_openvino_model' + os.sep) + + cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f}" + subprocess.check_output(cmd, shell=True) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_coreml(model, im, file, prefix=colorstr('CoreML:')): + # YOLOv5 CoreML export + try: + check_requirements(('coremltools',)) + import coremltools as ct + + LOGGER.info(f'\n{prefix} starting export with coremltools {ct.__version__}...') + f = file.with_suffix('.mlmodel') + + ts = torch.jit.trace(model, im, strict=False) # TorchScript model + ct_model = ct.convert(ts, inputs=[ct.ImageType('image', shape=im.shape, scale=1 / 255, bias=[0, 0, 0])]) + ct_model.save(f) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return ct_model, f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + return None, None + + +def export_engine(model, im, file, train, half, simplify, workspace=4, verbose=False, prefix=colorstr('TensorRT:')): + # YOLOv5 TensorRT export https://developer.nvidia.com/tensorrt + try: + check_requirements(('tensorrt',)) + import tensorrt as trt + + if trt.__version__[0] == '7': # TensorRT 7 handling https://github.com/ultralytics/yolov5/issues/6012 + grid = model.model[-1].anchor_grid + model.model[-1].anchor_grid = [a[..., :1, :1, :] for a in grid] + export_onnx(model, im, file, 12, train, False, simplify) # opset 12 + model.model[-1].anchor_grid = grid + else: # TensorRT >= 8 + check_version(trt.__version__, '8.0.0', hard=True) # require tensorrt>=8.0.0 + export_onnx(model, im, file, 13, train, False, simplify) # opset 13 + onnx = file.with_suffix('.onnx') + + LOGGER.info(f'\n{prefix} starting export with TensorRT {trt.__version__}...') + assert im.device.type != 'cpu', 'export running on CPU but must be on GPU, i.e. `python export.py --device 0`' + assert onnx.exists(), f'failed to export ONNX file: {onnx}' + f = file.with_suffix('.engine') # TensorRT engine file + logger = trt.Logger(trt.Logger.INFO) + if verbose: + logger.min_severity = trt.Logger.Severity.VERBOSE + + builder = trt.Builder(logger) + config = builder.create_builder_config() + config.max_workspace_size = workspace * 1 << 30 + + flag = (1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)) + network = builder.create_network(flag) + parser = trt.OnnxParser(network, logger) + if not parser.parse_from_file(str(onnx)): + raise RuntimeError(f'failed to load ONNX file: {onnx}') + + inputs = [network.get_input(i) for i in range(network.num_inputs)] + outputs = [network.get_output(i) for i in range(network.num_outputs)] + LOGGER.info(f'{prefix} Network Description:') + for inp in inputs: + LOGGER.info(f'{prefix}\tinput "{inp.name}" with shape {inp.shape} and dtype {inp.dtype}') + for out in outputs: + LOGGER.info(f'{prefix}\toutput "{out.name}" with shape {out.shape} and dtype {out.dtype}') + + half &= builder.platform_has_fast_fp16 + LOGGER.info(f'{prefix} building FP{16 if half else 32} engine in {f}') + if half: + config.set_flag(trt.BuilderFlag.FP16) + with builder.build_engine(network, config) as engine, open(f, 'wb') as t: + t.write(engine.serialize()) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_saved_model(model, im, file, dynamic, + tf_nms=False, agnostic_nms=False, topk_per_class=100, topk_all=100, iou_thres=0.45, + conf_thres=0.25, keras=False, prefix=colorstr('TensorFlow SavedModel:')): + # YOLOv5 TensorFlow SavedModel export + try: + import tensorflow as tf + from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 + + from models.tf import TFDetect, TFModel + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + f = str(file).replace('.pt', '_saved_model') + batch_size, ch, *imgsz = list(im.shape) # BCHW + + tf_model = TFModel(cfg=model.yaml, model=model, nc=model.nc, imgsz=imgsz) + im = tf.zeros((batch_size, *imgsz, 3)) # BHWC order for TensorFlow + _ = tf_model.predict(im, tf_nms, agnostic_nms, topk_per_class, topk_all, iou_thres, conf_thres) + inputs = tf.keras.Input(shape=(*imgsz, 3), batch_size=None if dynamic else batch_size) + outputs = tf_model.predict(inputs, tf_nms, agnostic_nms, topk_per_class, topk_all, iou_thres, conf_thres) + keras_model = tf.keras.Model(inputs=inputs, outputs=outputs) + keras_model.trainable = False + keras_model.summary() + if keras: + keras_model.save(f, save_format='tf') + else: + m = tf.function(lambda x: keras_model(x)) # full model + spec = tf.TensorSpec(keras_model.inputs[0].shape, keras_model.inputs[0].dtype) + m = m.get_concrete_function(spec) + frozen_func = convert_variables_to_constants_v2(m) + tfm = tf.Module() + tfm.__call__ = tf.function(lambda x: frozen_func(x), [spec]) + tfm.__call__(im) + tf.saved_model.save( + tfm, + f, + options=tf.saved_model.SaveOptions(experimental_custom_gradients=False) if + check_version(tf.__version__, '2.6') else tf.saved_model.SaveOptions()) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return keras_model, f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + return None, None + + +def export_pb(keras_model, im, file, prefix=colorstr('TensorFlow GraphDef:')): + # YOLOv5 TensorFlow GraphDef *.pb export https://github.com/leimao/Frozen_Graph_TensorFlow + try: + import tensorflow as tf + from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + f = file.with_suffix('.pb') + + m = tf.function(lambda x: keras_model(x)) # full model + m = m.get_concrete_function(tf.TensorSpec(keras_model.inputs[0].shape, keras_model.inputs[0].dtype)) + frozen_func = convert_variables_to_constants_v2(m) + frozen_func.graph.as_graph_def() + tf.io.write_graph(graph_or_graph_def=frozen_func.graph, logdir=str(f.parent), name=f.name, as_text=False) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_tflite(keras_model, im, file, int8, data, ncalib, prefix=colorstr('TensorFlow Lite:')): + # YOLOv5 TensorFlow Lite export + try: + import tensorflow as tf + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + batch_size, ch, *imgsz = list(im.shape) # BCHW + f = str(file).replace('.pt', '-fp16.tflite') + + converter = tf.lite.TFLiteConverter.from_keras_model(keras_model) + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS] + converter.target_spec.supported_types = [tf.float16] + converter.optimizations = [tf.lite.Optimize.DEFAULT] + if int8: + from models.tf import representative_dataset_gen + dataset = LoadImages(check_dataset(data)['train'], img_size=imgsz, auto=False) # representative data + converter.representative_dataset = lambda: representative_dataset_gen(dataset, ncalib) + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.target_spec.supported_types = [] + converter.inference_input_type = tf.uint8 # or tf.int8 + converter.inference_output_type = tf.uint8 # or tf.int8 + converter.experimental_new_quantizer = False + f = str(file).replace('.pt', '-int8.tflite') + + tflite_model = converter.convert() + open(f, "wb").write(tflite_model) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_edgetpu(keras_model, im, file, prefix=colorstr('Edge TPU:')): + # YOLOv5 Edge TPU export https://coral.ai/docs/edgetpu/models-intro/ + try: + cmd = 'edgetpu_compiler --version' + help_url = 'https://coral.ai/docs/edgetpu/compiler/' + assert platform.system() == 'Linux', f'export only supported on Linux. See {help_url}' + if subprocess.run(cmd + ' >/dev/null', shell=True).returncode != 0: + LOGGER.info(f'\n{prefix} export requires Edge TPU compiler. Attempting install from {help_url}') + sudo = subprocess.run('sudo --version >/dev/null', shell=True).returncode == 0 # sudo installed on system + for c in ['curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -', + 'echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | sudo tee /etc/apt/sources.list.d/coral-edgetpu.list', + 'sudo apt-get update', + 'sudo apt-get install edgetpu-compiler']: + subprocess.run(c if sudo else c.replace('sudo ', ''), shell=True, check=True) + ver = subprocess.run(cmd, shell=True, capture_output=True, check=True).stdout.decode().split()[-1] + + LOGGER.info(f'\n{prefix} starting export with Edge TPU compiler {ver}...') + f = str(file).replace('.pt', '-int8_edgetpu.tflite') # Edge TPU model + f_tfl = str(file).replace('.pt', '-int8.tflite') # TFLite model + + cmd = f"edgetpu_compiler -s {f_tfl}" + subprocess.run(cmd, shell=True, check=True) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_tfjs(keras_model, im, file, prefix=colorstr('TensorFlow.js:')): + # YOLOv5 TensorFlow.js export + try: + check_requirements(('tensorflowjs',)) + import re + + import tensorflowjs as tfjs + + LOGGER.info(f'\n{prefix} starting export with tensorflowjs {tfjs.__version__}...') + f = str(file).replace('.pt', '_web_model') # js dir + f_pb = file.with_suffix('.pb') # *.pb path + f_json = f + '/model.json' # *.json path + + cmd = f'tensorflowjs_converter --input_format=tf_frozen_model ' \ + f'--output_node_names="Identity,Identity_1,Identity_2,Identity_3" {f_pb} {f}' + subprocess.run(cmd, shell=True) + + json = open(f_json).read() + with open(f_json, 'w') as j: # sort JSON Identity_* in ascending order + subst = re.sub( + r'{"outputs": {"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}}}', + r'{"outputs": {"Identity": {"name": "Identity"}, ' + r'"Identity_1": {"name": "Identity_1"}, ' + r'"Identity_2": {"name": "Identity_2"}, ' + r'"Identity_3": {"name": "Identity_3"}}}', + json) + j.write(subst) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +@torch.no_grad() +def run(data=ROOT / 'data/coco128.yaml', # 'dataset.yaml path' + weights=ROOT / 'yolov5s.pt', # weights path + imgsz=(640, 640), # image (height, width) + batch_size=1, # batch size + device='cpu', # cuda device, i.e. 0 or 0,1,2,3 or cpu + include=('torchscript', 'onnx'), # include formats + half=False, # FP16 half-precision export + inplace=False, # set YOLOv5 Detect() inplace=True + train=False, # model.train() mode + optimize=False, # TorchScript: optimize for mobile + int8=False, # CoreML/TF INT8 quantization + dynamic=False, # ONNX/TF: dynamic axes + simplify=False, # ONNX: simplify model + opset=12, # ONNX: opset version + verbose=False, # TensorRT: verbose log + workspace=4, # TensorRT: workspace size (GB) + nms=False, # TF: add NMS to model + agnostic_nms=False, # TF: add agnostic NMS to model + topk_per_class=100, # TF.js NMS: topk per class to keep + topk_all=100, # TF.js NMS: topk for all classes to keep + iou_thres=0.45, # TF.js NMS: IoU threshold + conf_thres=0.25 # TF.js NMS: confidence threshold + ): + t = time.time() + include = [x.lower() for x in include] # to lowercase + formats = tuple(export_formats()['Argument'][1:]) # --include arguments + flags = [x in include for x in formats] + assert sum(flags) == len(include), f'ERROR: Invalid --include {include}, valid --include arguments are {formats}' + jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = flags # export booleans + file = Path(url2file(weights) if str(weights).startswith(('http:/', 'https:/')) else weights) # PyTorch weights + + # Load PyTorch model + device = select_device(device) + assert not (device.type == 'cpu' and half), '--half only compatible with GPU export, i.e. use --device 0' + model = attempt_load(weights, map_location=device, inplace=True, fuse=True) # load FP32 model + nc, names = model.nc, model.names # number of classes, class names + + # Checks + imgsz *= 2 if len(imgsz) == 1 else 1 # expand + opset = 12 if ('openvino' in include) else opset # OpenVINO requires opset <= 12 + assert nc == len(names), f'Model class count {nc} != len(names) {len(names)}' + + # Input + gs = int(max(model.stride)) # grid size (max stride) + imgsz = [check_img_size(x, gs) for x in imgsz] # verify img_size are gs-multiples + im = torch.zeros(batch_size, 3, *imgsz).to(device) # image size(1,3,320,192) BCHW iDetection + + # Update model + if half: + im, model = im.half(), model.half() # to FP16 + model.train() if train else model.eval() # training mode = no Detect() layer grid construction + for k, m in model.named_modules(): + if isinstance(m, Conv): # assign export-friendly activations + if isinstance(m.act, nn.SiLU): + m.act = SiLU() + elif isinstance(m, Detect): + m.inplace = inplace + m.onnx_dynamic = dynamic + if hasattr(m, 'forward_export'): + m.forward = m.forward_export # assign custom forward (optional) + + for _ in range(2): + y = model(im) # dry runs + shape = tuple(y[0].shape) # model output shape + LOGGER.info(f"\n{colorstr('PyTorch:')} starting from {file} with output shape {shape} ({file_size(file):.1f} MB)") + + # Exports + f = [''] * 10 # exported filenames + warnings.filterwarnings(action='ignore', category=torch.jit.TracerWarning) # suppress TracerWarning + if jit: + f[0] = export_torchscript(model, im, file, optimize) + if engine: # TensorRT required before ONNX + f[1] = export_engine(model, im, file, train, half, simplify, workspace, verbose) + if onnx or xml: # OpenVINO requires ONNX + f[2] = export_onnx(model, im, file, opset, train, dynamic, simplify) + if xml: # OpenVINO + f[3] = export_openvino(model, im, file) + if coreml: + _, f[4] = export_coreml(model, im, file) + + # TensorFlow Exports + if any((saved_model, pb, tflite, edgetpu, tfjs)): + if int8 or edgetpu: # TFLite --int8 bug https://github.com/ultralytics/yolov5/issues/5707 + check_requirements(('flatbuffers==1.12',)) # required before `import tensorflow` + assert not (tflite and tfjs), 'TFLite and TF.js models must be exported separately, please pass only one type.' + model, f[5] = export_saved_model(model, im, file, dynamic, tf_nms=nms or agnostic_nms or tfjs, + agnostic_nms=agnostic_nms or tfjs, topk_per_class=topk_per_class, + topk_all=topk_all, conf_thres=conf_thres, iou_thres=iou_thres) # keras model + if pb or tfjs: # pb prerequisite to tfjs + f[6] = export_pb(model, im, file) + if tflite or edgetpu: + f[7] = export_tflite(model, im, file, int8=int8 or edgetpu, data=data, ncalib=100) + if edgetpu: + f[8] = export_edgetpu(model, im, file) + if tfjs: + f[9] = export_tfjs(model, im, file) + + # Finish + f = [str(x) for x in f if x] # filter out '' and None + if any(f): + LOGGER.info(f'\nExport complete ({time.time() - t:.2f}s)' + f"\nResults saved to {colorstr('bold', file.parent.resolve())}" + f"\nDetect: python detect.py --weights {f[-1]}" + f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{f[-1]}')" + f"\nValidate: python val.py --weights {f[-1]}" + f"\nVisualize: https://netron.app") + return f # return list of exported files/dirs + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--weights', nargs='+', type=str, default=ROOT / 'yolov5s.pt', help='model.pt path(s)') + parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640, 640], help='image (h, w)') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--half', action='store_true', help='FP16 half-precision export') + parser.add_argument('--inplace', action='store_true', help='set YOLOv5 Detect() inplace=True') + parser.add_argument('--train', action='store_true', help='model.train() mode') + parser.add_argument('--optimize', action='store_true', help='TorchScript: optimize for mobile') + parser.add_argument('--int8', action='store_true', help='CoreML/TF INT8 quantization') + parser.add_argument('--dynamic', action='store_true', help='ONNX/TF: dynamic axes') + parser.add_argument('--simplify', action='store_true', help='ONNX: simplify model') + parser.add_argument('--opset', type=int, default=12, help='ONNX: opset version') + parser.add_argument('--verbose', action='store_true', help='TensorRT: verbose log') + parser.add_argument('--workspace', type=int, default=4, help='TensorRT: workspace size (GB)') + parser.add_argument('--nms', action='store_true', help='TF: add NMS to model') + parser.add_argument('--agnostic-nms', action='store_true', help='TF: add agnostic NMS to model') + parser.add_argument('--topk-per-class', type=int, default=100, help='TF.js NMS: topk per class to keep') + parser.add_argument('--topk-all', type=int, default=100, help='TF.js NMS: topk for all classes to keep') + parser.add_argument('--iou-thres', type=float, default=0.45, help='TF.js NMS: IoU threshold') + parser.add_argument('--conf-thres', type=float, default=0.25, help='TF.js NMS: confidence threshold') + parser.add_argument('--include', nargs='+', + default=['torchscript', 'onnx'], + help='torchscript, onnx, openvino, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs') + opt = parser.parse_args() + print_args(FILE.stem, opt) + return opt + + +def main(opt): + for opt.weights in (opt.weights if isinstance(opt.weights, list) else [opt.weights]): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/detector/YOLOv5/hubconf.py b/detector/YOLOv5/hubconf.py new file mode 100644 index 0000000000000000000000000000000000000000..39fa614b2e34a41a7eedbdcbba7fa486abb706f3 --- /dev/null +++ b/detector/YOLOv5/hubconf.py @@ -0,0 +1,143 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +PyTorch Hub models https://pytorch.org/hub/ultralytics_yolov5/ + +Usage: + import torch + model = torch.hub.load('ultralytics/yolov5', 'yolov5s') + model = torch.hub.load('ultralytics/yolov5:master', 'custom', 'path/to/yolov5s.onnx') # file from branch +""" + +import torch + + +def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + """Creates or loads a YOLOv5 model + + Arguments: + name (str): model name 'yolov5s' or path 'path/to/best.pt' + pretrained (bool): load pretrained weights into the model + channels (int): number of input channels + classes (int): number of model classes + autoshape (bool): apply YOLOv5 .autoshape() wrapper to model + verbose (bool): print all information to screen + device (str, torch.device, None): device to use for model parameters + + Returns: + YOLOv5 model + """ + from pathlib import Path + + from models.common import AutoShape, DetectMultiBackend + from models.yolo import Model + from utils.downloads import attempt_download + from utils.general import LOGGER, check_requirements, intersect_dicts, logging + from utils.torch_utils import select_device + + if not verbose: + LOGGER.setLevel(logging.WARNING) + check_requirements(exclude=('tensorboard', 'thop', 'opencv-python')) + name = Path(name) + path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path + try: + device = select_device(('0' if torch.cuda.is_available() else 'cpu') if device is None else device) + + if pretrained and channels == 3 and classes == 80: + model = DetectMultiBackend(path, device=device) # download/load FP32 model + # model = models.experimental.attempt_load(path, map_location=device) # download/load FP32 model + else: + cfg = list((Path(__file__).parent / 'models').rglob(f'{path.stem}.yaml'))[0] # model.yaml path + model = Model(cfg, channels, classes) # create model + if pretrained: + ckpt = torch.load(attempt_download(path), map_location=device) # load + csd = ckpt['model'].float().state_dict() # checkpoint state_dict as FP32 + csd = intersect_dicts(csd, model.state_dict(), exclude=['anchors']) # intersect + model.load_state_dict(csd, strict=False) # load + if len(ckpt['model'].names) == classes: + model.names = ckpt['model'].names # set class names attribute + if autoshape: + model = AutoShape(model) # for file/URI/PIL/cv2/np inputs and NMS + return model.to(device) + + except Exception as e: + help_url = 'https://github.com/ultralytics/yolov5/issues/36' + s = f'{e}. Cache may be out of date, try `force_reload=True` or see {help_url} for help.' + raise Exception(s) from e + + +def custom(path='path/to/model.pt', autoshape=True, verbose=True, device=None): + # YOLOv5 custom or local model + return _create(path, autoshape=autoshape, verbose=verbose, device=device) + + +def yolov5n(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-nano model https://github.com/ultralytics/yolov5 + return _create('yolov5n', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5s(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-small model https://github.com/ultralytics/yolov5 + return _create('yolov5s', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5m(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-medium model https://github.com/ultralytics/yolov5 + return _create('yolov5m', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5l(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-large model https://github.com/ultralytics/yolov5 + return _create('yolov5l', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5x(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-xlarge model https://github.com/ultralytics/yolov5 + return _create('yolov5x', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5n6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-nano-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5n6', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5s6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-small-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5s6', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5m6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-medium-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5m6', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5l6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-large-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5l6', pretrained, channels, classes, autoshape, verbose, device) + + +def yolov5x6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-xlarge-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5x6', pretrained, channels, classes, autoshape, verbose, device) + + +if __name__ == '__main__': + model = _create(name='yolov5s', pretrained=True, channels=3, classes=80, autoshape=True, verbose=True) # pretrained + # model = custom(path='path/to/model.pt') # custom + + # Verify inference + from pathlib import Path + + import cv2 + import numpy as np + from PIL import Image + + imgs = ['data/images/zidane.jpg', # filename + Path('data/images/zidane.jpg'), # Path + 'https://ultralytics.com/images/zidane.jpg', # URI + cv2.imread('data/images/bus.jpg')[:, :, ::-1], # OpenCV + Image.open('data/images/bus.jpg'), # PIL + np.zeros((320, 640, 3))] # numpy + + results = model(imgs, size=320) # batched inference + results.print() + results.save() diff --git a/detector/YOLOv5/models/__init__.py b/detector/YOLOv5/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/detector/YOLOv5/models/common.py b/detector/YOLOv5/models/common.py new file mode 100644 index 0000000000000000000000000000000000000000..f073a5588ee43f68a4245cfaaf355cb94a778ab6 --- /dev/null +++ b/detector/YOLOv5/models/common.py @@ -0,0 +1,679 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Common modules +""" + +import json +import sys +import math +import platform +import warnings +from collections import OrderedDict, namedtuple +from copy import copy +from pathlib import Path + +import cv2 +import numpy as np +import pandas as pd +import requests +import torch +import torch.nn as nn +import yaml +from PIL import Image +from torch.cuda import amp + + +from detector.YOLOv5.utils.datasets import exif_transpose, letterbox +from detector.YOLOv5.utils.general import (LOGGER, check_requirements, check_suffix, check_version, colorstr, increment_path, + make_divisible, non_max_suppression, scale_coords, xywh2xyxy, xyxy2xywh) +from detector.YOLOv5.utils.plots import Annotator, colors, save_one_box +from detector.YOLOv5.utils.torch_utils import copy_attr, time_sync + + +def autopad(k, p=None): # kernel, padding + # Pad to 'same' + if p is None: + p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad + return p + + +class Conv(nn.Module): + # Standard convolution + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super().__init__() + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) + self.bn = nn.BatchNorm2d(c2) + self.act = nn.SiLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity()) + + def forward(self, x): + return self.act(self.bn(self.conv(x))) + + def forward_fuse(self, x): + return self.act(self.conv(x)) + + +class DWConv(Conv): + # Depth-wise convolution class + def __init__(self, c1, c2, k=1, s=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), act=act) + + +class TransformerLayer(nn.Module): + # Transformer layer https://arxiv.org/abs/2010.11929 (LayerNorm layers removed for better performance) + def __init__(self, c, num_heads): + super().__init__() + self.q = nn.Linear(c, c, bias=False) + self.k = nn.Linear(c, c, bias=False) + self.v = nn.Linear(c, c, bias=False) + self.ma = nn.MultiheadAttention(embed_dim=c, num_heads=num_heads) + self.fc1 = nn.Linear(c, c, bias=False) + self.fc2 = nn.Linear(c, c, bias=False) + + def forward(self, x): + x = self.ma(self.q(x), self.k(x), self.v(x))[0] + x + x = self.fc2(self.fc1(x)) + x + return x + + +class TransformerBlock(nn.Module): + # Vision Transformer https://arxiv.org/abs/2010.11929 + def __init__(self, c1, c2, num_heads, num_layers): + super().__init__() + self.conv = None + if c1 != c2: + self.conv = Conv(c1, c2) + self.linear = nn.Linear(c2, c2) # learnable position embedding + self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) for _ in range(num_layers))) + self.c2 = c2 + + def forward(self, x): + if self.conv is not None: + x = self.conv(x) + b, _, w, h = x.shape + p = x.flatten(2).permute(2, 0, 1) + return self.tr(p + self.linear(p)).permute(1, 2, 0).reshape(b, self.c2, w, h) + + +class Bottleneck(nn.Module): + # Standard bottleneck + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_, c2, 3, 1, g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + + +class BottleneckCSP(nn.Module): + # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) + self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) + self.cv4 = Conv(2 * c_, c2, 1, 1) + self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) + self.act = nn.SiLU() + self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) + + def forward(self, x): + y1 = self.cv3(self.m(self.cv1(x))) + y2 = self.cv2(x) + return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) + + +class C3(nn.Module): + # CSP Bottleneck with 3 convolutions + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c1, c_, 1, 1) + self.cv3 = Conv(2 * c_, c2, 1) # act=FReLU(c2) + self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) + # self.m = nn.Sequential(*[CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n)]) + + def forward(self, x): + return self.cv3(torch.cat((self.m(self.cv1(x)), self.cv2(x)), dim=1)) + + +class C3TR(C3): + # C3 module with TransformerBlock() + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): + super().__init__(c1, c2, n, shortcut, g, e) + c_ = int(c2 * e) + self.m = TransformerBlock(c_, c_, 4, n) + + +class C3SPP(C3): + # C3 module with SPP() + def __init__(self, c1, c2, k=(5, 9, 13), n=1, shortcut=True, g=1, e=0.5): + super().__init__(c1, c2, n, shortcut, g, e) + c_ = int(c2 * e) + self.m = SPP(c_, c_, k) + + +class C3Ghost(C3): + # C3 module with GhostBottleneck() + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): + super().__init__(c1, c2, n, shortcut, g, e) + c_ = int(c2 * e) # hidden channels + self.m = nn.Sequential(*(GhostBottleneck(c_, c_) for _ in range(n))) + + +class SPP(nn.Module): + # Spatial Pyramid Pooling (SPP) layer https://arxiv.org/abs/1406.4729 + def __init__(self, c1, c2, k=(5, 9, 13)): + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1) + self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k]) + + def forward(self, x): + x = self.cv1(x) + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) + + +class SPPF(nn.Module): + # Spatial Pyramid Pooling - Fast (SPPF) layer for YOLOv5 by Glenn Jocher + def __init__(self, c1, c2, k=5): # equivalent to SPP(k=(5, 9, 13)) + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_ * 4, c2, 1, 1) + self.m = nn.MaxPool2d(kernel_size=k, stride=1, padding=k // 2) + + def forward(self, x): + x = self.cv1(x) + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + y1 = self.m(x) + y2 = self.m(y1) + return self.cv2(torch.cat([x, y1, y2, self.m(y2)], 1)) + + +class Focus(nn.Module): + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super().__init__() + self.conv = Conv(c1 * 4, c2, k, s, p, g, act) + # self.contract = Contract(gain=2) + + def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) + return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1)) + # return self.conv(self.contract(x)) + + +class GhostConv(nn.Module): + # Ghost Convolution https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups + super().__init__() + c_ = c2 // 2 # hidden channels + self.cv1 = Conv(c1, c_, k, s, None, g, act) + self.cv2 = Conv(c_, c_, 5, 1, None, c_, act) + + def forward(self, x): + y = self.cv1(x) + return torch.cat([y, self.cv2(y)], 1) + + +class GhostBottleneck(nn.Module): + # Ghost Bottleneck https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k=3, s=1): # ch_in, ch_out, kernel, stride + super().__init__() + c_ = c2 // 2 + self.conv = nn.Sequential(GhostConv(c1, c_, 1, 1), # pw + DWConv(c_, c_, k, s, act=False) if s == 2 else nn.Identity(), # dw + GhostConv(c_, c2, 1, 1, act=False)) # pw-linear + self.shortcut = nn.Sequential(DWConv(c1, c1, k, s, act=False), + Conv(c1, c2, 1, 1, act=False)) if s == 2 else nn.Identity() + + def forward(self, x): + return self.conv(x) + self.shortcut(x) + + +class Contract(nn.Module): + # Contract width-height into channels, i.e. x(1,64,80,80) to x(1,256,40,40) + def __init__(self, gain=2): + super().__init__() + self.gain = gain + + def forward(self, x): + b, c, h, w = x.size() # assert (h / s == 0) and (W / s == 0), 'Indivisible gain' + s = self.gain + x = x.view(b, c, h // s, s, w // s, s) # x(1,64,40,2,40,2) + x = x.permute(0, 3, 5, 1, 2, 4).contiguous() # x(1,2,2,64,40,40) + return x.view(b, c * s * s, h // s, w // s) # x(1,256,40,40) + + +class Expand(nn.Module): + # Expand channels into width-height, i.e. x(1,64,80,80) to x(1,16,160,160) + def __init__(self, gain=2): + super().__init__() + self.gain = gain + + def forward(self, x): + b, c, h, w = x.size() # assert C / s ** 2 == 0, 'Indivisible gain' + s = self.gain + x = x.view(b, s, s, c // s ** 2, h, w) # x(1,2,2,16,80,80) + x = x.permute(0, 3, 4, 1, 5, 2).contiguous() # x(1,16,80,2,80,2) + return x.view(b, c // s ** 2, h * s, w * s) # x(1,16,160,160) + + +class Concat(nn.Module): + # Concatenate a list of tensors along dimension + def __init__(self, dimension=1): + super().__init__() + self.d = dimension + + def forward(self, x): + return torch.cat(x, self.d) + + +class DetectMultiBackend(nn.Module): + # YOLOv5 MultiBackend class for python inference on various backends + def __init__(self, weights='yolov5s.pt', device=None, dnn=False, data=None): + # Usage: + # PyTorch: weights = *.pt + # TorchScript: *.torchscript + # ONNX Runtime: *.onnx + # ONNX OpenCV DNN: *.onnx with --dnn + # OpenVINO: *.xml + # CoreML: *.mlmodel + # TensorRT: *.engine + # TensorFlow SavedModel: *_saved_model + # TensorFlow GraphDef: *.pb + # TensorFlow Lite: *.tflite + # TensorFlow Edge TPU: *_edgetpu.tflite + from models.experimental import attempt_download, attempt_load # scoped to avoid circular import + + super().__init__() + w = str(weights[0] if isinstance(weights, list) else weights) + pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = self.model_type(w) # get backend + stride, names = 64, [f'class{i}' for i in range(1000)] # assign defaults + w = attempt_download(w) # download if not local + if data: # data.yaml path (optional) + with open(data, errors='ignore') as f: + names = yaml.safe_load(f)['names'] # class names + + if pt: # PyTorch + model = attempt_load(weights if isinstance(weights, list) else w, map_location=device) + stride = max(int(model.stride.max()), 32) # model stride + names = model.module.names if hasattr(model, 'module') else model.names # get class names + self.model = model # explicitly assign for to(), cpu(), cuda(), half() + elif jit: # TorchScript + LOGGER.info(f'Loading {w} for TorchScript inference...') + extra_files = {'config.txt': ''} # model metadata + model = torch.jit.load(w, _extra_files=extra_files) + if extra_files['config.txt']: + d = json.loads(extra_files['config.txt']) # extra_files dict + stride, names = int(d['stride']), d['names'] + elif dnn: # ONNX OpenCV DNN + LOGGER.info(f'Loading {w} for ONNX OpenCV DNN inference...') + check_requirements(('opencv-python>=4.5.4',)) + net = cv2.dnn.readNetFromONNX(w) + elif onnx: # ONNX Runtime + LOGGER.info(f'Loading {w} for ONNX Runtime inference...') + cuda = torch.cuda.is_available() + check_requirements(('onnx', 'onnxruntime-gpu' if cuda else 'onnxruntime')) + import onnxruntime + providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] + session = onnxruntime.InferenceSession(w, providers=providers) + elif xml: # OpenVINO + LOGGER.info(f'Loading {w} for OpenVINO inference...') + check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ + import openvino.inference_engine as ie + core = ie.IECore() + if not Path(w).is_file(): # if not *.xml + w = next(Path(w).glob('*.xml')) # get *.xml file from *_openvino_model dir + network = core.read_network(model=w, weights=Path(w).with_suffix('.bin')) # *.xml, *.bin paths + executable_network = core.load_network(network, device_name='CPU', num_requests=1) + elif engine: # TensorRT + LOGGER.info(f'Loading {w} for TensorRT inference...') + import tensorrt as trt # https://developer.nvidia.com/nvidia-tensorrt-download + check_version(trt.__version__, '7.0.0', hard=True) # require tensorrt>=7.0.0 + Binding = namedtuple('Binding', ('name', 'dtype', 'shape', 'data', 'ptr')) + logger = trt.Logger(trt.Logger.INFO) + with open(w, 'rb') as f, trt.Runtime(logger) as runtime: + model = runtime.deserialize_cuda_engine(f.read()) + bindings = OrderedDict() + for index in range(model.num_bindings): + name = model.get_binding_name(index) + dtype = trt.nptype(model.get_binding_dtype(index)) + shape = tuple(model.get_binding_shape(index)) + data = torch.from_numpy(np.empty(shape, dtype=np.dtype(dtype))).to(device) + bindings[name] = Binding(name, dtype, shape, data, int(data.data_ptr())) + binding_addrs = OrderedDict((n, d.ptr) for n, d in bindings.items()) + context = model.create_execution_context() + batch_size = bindings['images'].shape[0] + elif coreml: # CoreML + LOGGER.info(f'Loading {w} for CoreML inference...') + import coremltools as ct + model = ct.models.MLModel(w) + else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) + if saved_model: # SavedModel + LOGGER.info(f'Loading {w} for TensorFlow SavedModel inference...') + import tensorflow as tf + keras = False # assume TF1 saved_model + model = tf.keras.models.load_model(w) if keras else tf.saved_model.load(w) + elif pb: # GraphDef https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt + LOGGER.info(f'Loading {w} for TensorFlow GraphDef inference...') + import tensorflow as tf + + def wrap_frozen_graph(gd, inputs, outputs): + x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped + ge = x.graph.as_graph_element + return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs)) + + gd = tf.Graph().as_graph_def() # graph_def + gd.ParseFromString(open(w, 'rb').read()) + frozen_func = wrap_frozen_graph(gd, inputs="x:0", outputs="Identity:0") + elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python + try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu + from tflite_runtime.interpreter import Interpreter, load_delegate + except ImportError: + import tensorflow as tf + Interpreter, load_delegate = tf.lite.Interpreter, tf.lite.experimental.load_delegate, + if edgetpu: # Edge TPU https://coral.ai/software/#edgetpu-runtime + LOGGER.info(f'Loading {w} for TensorFlow Lite Edge TPU inference...') + delegate = {'Linux': 'libedgetpu.so.1', + 'Darwin': 'libedgetpu.1.dylib', + 'Windows': 'edgetpu.dll'}[platform.system()] + interpreter = Interpreter(model_path=w, experimental_delegates=[load_delegate(delegate)]) + else: # Lite + LOGGER.info(f'Loading {w} for TensorFlow Lite inference...') + interpreter = Interpreter(model_path=w) # load TFLite model + interpreter.allocate_tensors() # allocate + input_details = interpreter.get_input_details() # inputs + output_details = interpreter.get_output_details() # outputs + elif tfjs: + raise Exception('ERROR: YOLOv5 TF.js inference is not supported') + self.__dict__.update(locals()) # assign all variables to self + + def forward(self, im, augment=False, visualize=False, val=False): + # YOLOv5 MultiBackend inference + b, ch, h, w = im.shape # batch, channel, height, width + if self.pt or self.jit: # PyTorch + y = self.model(im) if self.jit else self.model(im, augment=augment, visualize=visualize) + return y if val else y[0] + elif self.dnn: # ONNX OpenCV DNN + im = im.cpu().numpy() # torch to numpy + self.net.setInput(im) + y = self.net.forward() + elif self.onnx: # ONNX Runtime + im = im.cpu().numpy() # torch to numpy + y = self.session.run([self.session.get_outputs()[0].name], {self.session.get_inputs()[0].name: im})[0] + elif self.xml: # OpenVINO + im = im.cpu().numpy() # FP32 + desc = self.ie.TensorDesc(precision='FP32', dims=im.shape, layout='NCHW') # Tensor Description + request = self.executable_network.requests[0] # inference request + request.set_blob(blob_name='images', blob=self.ie.Blob(desc, im)) # name=next(iter(request.input_blobs)) + request.infer() + y = request.output_blobs['output'].buffer # name=next(iter(request.output_blobs)) + elif self.engine: # TensorRT + assert im.shape == self.bindings['images'].shape, (im.shape, self.bindings['images'].shape) + self.binding_addrs['images'] = int(im.data_ptr()) + self.context.execute_v2(list(self.binding_addrs.values())) + y = self.bindings['output'].data + elif self.coreml: # CoreML + im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + im = Image.fromarray((im[0] * 255).astype('uint8')) + # im = im.resize((192, 320), Image.ANTIALIAS) + y = self.model.predict({'image': im}) # coordinates are xywh normalized + if 'confidence' in y: + box = xywh2xyxy(y['coordinates'] * [[w, h, w, h]]) # xyxy pixels + conf, cls = y['confidence'].max(1), y['confidence'].argmax(1).astype(np.float) + y = np.concatenate((box, conf.reshape(-1, 1), cls.reshape(-1, 1)), 1) + else: + k = 'var_' + str(sorted(int(k.replace('var_', '')) for k in y)[-1]) # output key + y = y[k] # output + else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) + im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + if self.saved_model: # SavedModel + y = (self.model(im, training=False) if self.keras else self.model(im)[0]).numpy() + elif self.pb: # GraphDef + y = self.frozen_func(x=self.tf.constant(im)).numpy() + else: # Lite or Edge TPU + input, output = self.input_details[0], self.output_details[0] + int8 = input['dtype'] == np.uint8 # is TFLite quantized uint8 model + if int8: + scale, zero_point = input['quantization'] + im = (im / scale + zero_point).astype(np.uint8) # de-scale + self.interpreter.set_tensor(input['index'], im) + self.interpreter.invoke() + y = self.interpreter.get_tensor(output['index']) + if int8: + scale, zero_point = output['quantization'] + y = (y.astype(np.float32) - zero_point) * scale # re-scale + y[..., :4] *= [w, h, w, h] # xywh normalized to pixels + + y = torch.tensor(y) if isinstance(y, np.ndarray) else y + return (y, []) if val else y + + def warmup(self, imgsz=(1, 3, 640, 640), half=False): + # Warmup model by running inference once + if self.pt or self.jit or self.onnx or self.engine: # warmup types + if isinstance(self.device, torch.device) and self.device.type != 'cpu': # only warmup GPU models + im = torch.zeros(*imgsz).to(self.device).type(torch.half if half else torch.float) # input image + self.forward(im) # warmup + + @staticmethod + def model_type(p='path/to/model.pt'): + # Return model type from model path, i.e. path='path/to/model.onnx' -> type=onnx + from export import export_formats + suffixes = list(export_formats().Suffix) + ['.xml'] # export suffixes + check_suffix(p, suffixes) # checks + p = Path(p).name # eliminate trailing separators + pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, xml2 = (s in p for s in suffixes) + xml |= xml2 # *_openvino_model or *.xml + tflite &= not edgetpu # *.tflite + return pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs + + +class AutoShape(nn.Module): + # YOLOv5 input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS + conf = 0.25 # NMS confidence threshold + iou = 0.45 # NMS IoU threshold + agnostic = False # NMS class-agnostic + multi_label = False # NMS multiple labels per box + classes = None # (optional list) filter by class, i.e. = [0, 15, 16] for COCO persons, cats and dogs + max_det = 1000 # maximum number of detections per image + amp = False # Automatic Mixed Precision (AMP) inference + + def __init__(self, model): + super().__init__() + LOGGER.info('Adding AutoShape... ') + copy_attr(self, model, include=('yaml', 'nc', 'hyp', 'names', 'stride', 'abc'), exclude=()) # copy attributes + self.dmb = isinstance(model, DetectMultiBackend) # DetectMultiBackend() instance + self.pt = not self.dmb or model.pt # PyTorch model + self.model = model.eval() + + def _apply(self, fn): + # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers + self = super()._apply(fn) + if self.pt: + m = self.model.model.model[-1] if self.dmb else self.model.model[-1] # Detect() + m.stride = fn(m.stride) + m.grid = list(map(fn, m.grid)) + if isinstance(m.anchor_grid, list): + m.anchor_grid = list(map(fn, m.anchor_grid)) + return self + + @torch.no_grad() + def forward(self, imgs, size=640, augment=False, profile=False): + # Inference from various sources. For height=640, width=1280, RGB images example inputs are: + # file: imgs = 'data/images/zidane.jpg' # str or PosixPath + # URI: = 'https://ultralytics.com/images/zidane.jpg' + # OpenCV: = cv2.imread('image.jpg')[:,:,::-1] # HWC BGR to RGB x(640,1280,3) + # PIL: = Image.open('image.jpg') or ImageGrab.grab() # HWC x(640,1280,3) + # numpy: = np.zeros((640,1280,3)) # HWC + # torch: = torch.zeros(16,3,320,640) # BCHW (scaled to size=640, 0-1 values) + # multiple: = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images + + t = [time_sync()] + p = next(self.model.parameters()) if self.pt else torch.zeros(1) # for device and type + autocast = self.amp and (p.device.type != 'cpu') # Automatic Mixed Precision (AMP) inference + if isinstance(imgs, torch.Tensor): # torch + with amp.autocast(enabled=autocast): + return self.model(imgs.to(p.device).type_as(p), augment, profile) # inference + + # Pre-process + n, imgs = (len(imgs), imgs) if isinstance(imgs, list) else (1, [imgs]) # number of images, list of images + shape0, shape1, files = [], [], [] # image and inference shapes, filenames + for i, im in enumerate(imgs): + f = f'image{i}' # filename + if isinstance(im, (str, Path)): # filename or uri + im, f = Image.open(requests.get(im, stream=True).raw if str(im).startswith('http') else im), im + im = np.asarray(exif_transpose(im)) + elif isinstance(im, Image.Image): # PIL Image + im, f = np.asarray(exif_transpose(im)), getattr(im, 'filename', f) or f + files.append(Path(f).with_suffix('.jpg').name) + if im.shape[0] < 5: # image in CHW + im = im.transpose((1, 2, 0)) # reverse dataloader .transpose(2, 0, 1) + im = im[..., :3] if im.ndim == 3 else np.tile(im[..., None], 3) # enforce 3ch input + s = im.shape[:2] # HWC + shape0.append(s) # image shape + g = (size / max(s)) # gain + shape1.append([y * g for y in s]) + imgs[i] = im if im.data.contiguous else np.ascontiguousarray(im) # update + shape1 = [make_divisible(x, self.stride) for x in np.stack(shape1, 0).max(0)] # inference shape + x = [letterbox(im, new_shape=shape1 if self.pt else size, auto=False)[0] for im in imgs] # pad + x = np.stack(x, 0) if n > 1 else x[0][None] # stack + x = np.ascontiguousarray(x.transpose((0, 3, 1, 2))) # BHWC to BCHW + x = torch.from_numpy(x).to(p.device).type_as(p) / 255 # uint8 to fp16/32 + t.append(time_sync()) + + with amp.autocast(enabled=autocast): + # Inference + y = self.model(x, augment, profile) # forward + t.append(time_sync()) + + # Post-process + y = non_max_suppression(y if self.dmb else y[0], self.conf, iou_thres=self.iou, classes=self.classes, + agnostic=self.agnostic, multi_label=self.multi_label, max_det=self.max_det) # NMS + for i in range(n): + scale_coords(shape1, y[i][:, :4], shape0[i]) + + t.append(time_sync()) + return Detections(imgs, y, files, t, self.names, x.shape) + + +class Detections: + # YOLOv5 detections class for inference results + def __init__(self, imgs, pred, files, times=(0, 0, 0, 0), names=None, shape=None): + super().__init__() + d = pred[0].device # device + gn = [torch.tensor([*(im.shape[i] for i in [1, 0, 1, 0]), 1, 1], device=d) for im in imgs] # normalizations + self.imgs = imgs # list of images as numpy arrays + self.pred = pred # list of tensors pred[0] = (xyxy, conf, cls) + self.names = names # class names + self.files = files # image filenames + self.times = times # profiling times + self.xyxy = pred # xyxy pixels + self.xywh = [xyxy2xywh(x) for x in pred] # xywh pixels + self.xyxyn = [x / g for x, g in zip(self.xyxy, gn)] # xyxy normalized + self.xywhn = [x / g for x, g in zip(self.xywh, gn)] # xywh normalized + self.n = len(self.pred) # number of images (batch size) + self.t = tuple((times[i + 1] - times[i]) * 1000 / self.n for i in range(3)) # timestamps (ms) + self.s = shape # inference BCHW shape + + def display(self, pprint=False, show=False, save=False, crop=False, render=False, save_dir=Path('')): + crops = [] + for i, (im, pred) in enumerate(zip(self.imgs, self.pred)): + s = f'image {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} ' # string + if pred.shape[0]: + for c in pred[:, -1].unique(): + n = (pred[:, -1] == c).sum() # detections per class + s += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " # add to string + if show or save or render or crop: + annotator = Annotator(im, example=str(self.names)) + for *box, conf, cls in reversed(pred): # xyxy, confidence, class + label = f'{self.names[int(cls)]} {conf:.2f}' + if crop: + file = save_dir / 'crops' / self.names[int(cls)] / self.files[i] if save else None + crops.append({'box': box, 'conf': conf, 'cls': cls, 'label': label, + 'im': save_one_box(box, im, file=file, save=save)}) + else: # all others + annotator.box_label(box, label, color=colors(cls)) + im = annotator.im + else: + s += '(no detections)' + + im = Image.fromarray(im.astype(np.uint8)) if isinstance(im, np.ndarray) else im # from np + if pprint: + LOGGER.info(s.rstrip(', ')) + if show: + im.show(self.files[i]) # show + if save: + f = self.files[i] + im.save(save_dir / f) # save + if i == self.n - 1: + LOGGER.info(f"Saved {self.n} image{'s' * (self.n > 1)} to {colorstr('bold', save_dir)}") + if render: + self.imgs[i] = np.asarray(im) + if crop: + if save: + LOGGER.info(f'Saved results to {save_dir}\n') + return crops + + def print(self): + self.display(pprint=True) # print results + LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {tuple(self.s)}' % + self.t) + + def show(self): + self.display(show=True) # show results + + def save(self, save_dir='runs/detect/exp'): + save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) # increment save_dir + self.display(save=True, save_dir=save_dir) # save results + + def crop(self, save=True, save_dir='runs/detect/exp'): + save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) if save else None + return self.display(crop=True, save=save, save_dir=save_dir) # crop results + + def render(self): + self.display(render=True) # render results + return self.imgs + + def pandas(self): + # return detections as pandas DataFrames, i.e. print(results.pandas().xyxy[0]) + new = copy(self) # return copy + ca = 'xmin', 'ymin', 'xmax', 'ymax', 'confidence', 'class', 'name' # xyxy columns + cb = 'xcenter', 'ycenter', 'width', 'height', 'confidence', 'class', 'name' # xywh columns + for k, c in zip(['xyxy', 'xyxyn', 'xywh', 'xywhn'], [ca, ca, cb, cb]): + a = [[x[:5] + [int(x[5]), self.names[int(x[5])]] for x in x.tolist()] for x in getattr(self, k)] # update + setattr(new, k, [pd.DataFrame(x, columns=c) for x in a]) + return new + + def tolist(self): + # return a list of Detections objects, i.e. 'for result in results.tolist():' + r = range(self.n) # iterable + x = [Detections([self.imgs[i]], [self.pred[i]], [self.files[i]], self.times, self.names, self.s) for i in r] + # for d in x: + # for k in ['imgs', 'pred', 'xyxy', 'xyxyn', 'xywh', 'xywhn']: + # setattr(d, k, getattr(d, k)[0]) # pop out of list + return x + + def __len__(self): + return self.n + + +class Classify(nn.Module): + # Classification head, i.e. x(b,c1,20,20) to x(b,c2) + def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups + super().__init__() + self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1) + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g) # to x(b,c2,1,1) + self.flat = nn.Flatten() + + def forward(self, x): + z = torch.cat([self.aap(y) for y in (x if isinstance(x, list) else [x])], 1) # cat if list + return self.flat(self.conv(z)) # flatten to x(b,c2) diff --git a/detector/YOLOv5/models/experimental.py b/detector/YOLOv5/models/experimental.py new file mode 100644 index 0000000000000000000000000000000000000000..e0333d8f062fa784f686fdb653b378faacad0569 --- /dev/null +++ b/detector/YOLOv5/models/experimental.py @@ -0,0 +1,120 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Experimental modules +""" +import math + +import numpy as np +import torch +import torch.nn as nn + +from models.common import Conv +from detector.YOLOv5.utils.downloads import attempt_download + + +class CrossConv(nn.Module): + # Cross Convolution Downsample + def __init__(self, c1, c2, k=3, s=1, g=1, e=1.0, shortcut=False): + # ch_in, ch_out, kernel, stride, groups, expansion, shortcut + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, (1, k), (1, s)) + self.cv2 = Conv(c_, c2, (k, 1), (s, 1), g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + + +class Sum(nn.Module): + # Weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, n, weight=False): # n: number of inputs + super().__init__() + self.weight = weight # apply weights boolean + self.iter = range(n - 1) # iter object + if weight: + self.w = nn.Parameter(-torch.arange(1.0, n) / 2, requires_grad=True) # layer weights + + def forward(self, x): + y = x[0] # no weight + if self.weight: + w = torch.sigmoid(self.w) * 2 + for i in self.iter: + y = y + x[i + 1] * w[i] + else: + for i in self.iter: + y = y + x[i + 1] + return y + + +class MixConv2d(nn.Module): + # Mixed Depth-wise Conv https://arxiv.org/abs/1907.09595 + def __init__(self, c1, c2, k=(1, 3), s=1, equal_ch=True): # ch_in, ch_out, kernel, stride, ch_strategy + super().__init__() + n = len(k) # number of convolutions + if equal_ch: # equal c_ per group + i = torch.linspace(0, n - 1E-6, c2).floor() # c2 indices + c_ = [(i == g).sum() for g in range(n)] # intermediate channels + else: # equal weight.numel() per group + b = [c2] + [0] * n + a = np.eye(n + 1, n, k=-1) + a -= np.roll(a, 1, axis=1) + a *= np.array(k) ** 2 + a[0] = 1 + c_ = np.linalg.lstsq(a, b, rcond=None)[0].round() # solve for equal weight indices, ax = b + + self.m = nn.ModuleList( + [nn.Conv2d(c1, int(c_), k, s, k // 2, groups=math.gcd(c1, int(c_)), bias=False) for k, c_ in zip(k, c_)]) + self.bn = nn.BatchNorm2d(c2) + self.act = nn.SiLU() + + def forward(self, x): + return self.act(self.bn(torch.cat([m(x) for m in self.m], 1))) + + +class Ensemble(nn.ModuleList): + # Ensemble of models + def __init__(self): + super().__init__() + + def forward(self, x, augment=False, profile=False, visualize=False): + y = [] + for module in self: + y.append(module(x, augment, profile, visualize)[0]) + # y = torch.stack(y).max(0)[0] # max ensemble + # y = torch.stack(y).mean(0) # mean ensemble + y = torch.cat(y, 1) # nms ensemble + return y, None # inference, train output + + +def attempt_load(weights, map_location=None, inplace=True, fuse=True): + from models.yolo import Detect, Model + + # Loads an ensemble of models weights=[a,b,c] or a single model weights=[a] or weights=a + model = Ensemble() + for w in weights if isinstance(weights, list) else [weights]: + ckpt = torch.load(attempt_download(w), map_location=map_location) # load + if fuse: + model.append(ckpt['ema' if ckpt.get('ema') else 'model'].float().fuse().eval()) # FP32 model + else: + model.append(ckpt['ema' if ckpt.get('ema') else 'model'].float().eval()) # without layer fuse + + # Compatibility updates + for m in model.modules(): + if type(m) in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU, Detect, Model]: + m.inplace = inplace # pytorch 1.7.0 compatibility + if type(m) is Detect: + if not isinstance(m.anchor_grid, list): # new Detect Layer compatibility + delattr(m, 'anchor_grid') + setattr(m, 'anchor_grid', [torch.zeros(1)] * m.nl) + elif type(m) is Conv: + m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility + + if len(model) == 1: + return model[-1] # return model + else: + print(f'Ensemble created with {weights}\n') + for k in ['names']: + setattr(model, k, getattr(model[-1], k)) + model.stride = model[torch.argmax(torch.tensor([m.stride.max() for m in model])).int()].stride # max stride + return model # return ensemble diff --git a/detector/YOLOv5/models/hub/anchors.yaml b/detector/YOLOv5/models/hub/anchors.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e4d7beb06e07f295eaf58b1ebb2430a67997d2d4 --- /dev/null +++ b/detector/YOLOv5/models/hub/anchors.yaml @@ -0,0 +1,59 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +# Default anchors for COCO data + + +# P5 ------------------------------------------------------------------------------------------------------------------- +# P5-640: +anchors_p5_640: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + + +# P6 ------------------------------------------------------------------------------------------------------------------- +# P6-640: thr=0.25: 0.9964 BPR, 5.54 anchors past thr, n=12, img_size=640, metric_all=0.281/0.716-mean/best, past_thr=0.469-mean: 9,11, 21,19, 17,41, 43,32, 39,70, 86,64, 65,131, 134,130, 120,265, 282,180, 247,354, 512,387 +anchors_p6_640: + - [9,11, 21,19, 17,41] # P3/8 + - [43,32, 39,70, 86,64] # P4/16 + - [65,131, 134,130, 120,265] # P5/32 + - [282,180, 247,354, 512,387] # P6/64 + +# P6-1280: thr=0.25: 0.9950 BPR, 5.55 anchors past thr, n=12, img_size=1280, metric_all=0.281/0.714-mean/best, past_thr=0.468-mean: 19,27, 44,40, 38,94, 96,68, 86,152, 180,137, 140,301, 303,264, 238,542, 436,615, 739,380, 925,792 +anchors_p6_1280: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# P6-1920: thr=0.25: 0.9950 BPR, 5.55 anchors past thr, n=12, img_size=1920, metric_all=0.281/0.714-mean/best, past_thr=0.468-mean: 28,41, 67,59, 57,141, 144,103, 129,227, 270,205, 209,452, 455,396, 358,812, 653,922, 1109,570, 1387,1187 +anchors_p6_1920: + - [28,41, 67,59, 57,141] # P3/8 + - [144,103, 129,227, 270,205] # P4/16 + - [209,452, 455,396, 358,812] # P5/32 + - [653,922, 1109,570, 1387,1187] # P6/64 + + +# P7 ------------------------------------------------------------------------------------------------------------------- +# P7-640: thr=0.25: 0.9962 BPR, 6.76 anchors past thr, n=15, img_size=640, metric_all=0.275/0.733-mean/best, past_thr=0.466-mean: 11,11, 13,30, 29,20, 30,46, 61,38, 39,92, 78,80, 146,66, 79,163, 149,150, 321,143, 157,303, 257,402, 359,290, 524,372 +anchors_p7_640: + - [11,11, 13,30, 29,20] # P3/8 + - [30,46, 61,38, 39,92] # P4/16 + - [78,80, 146,66, 79,163] # P5/32 + - [149,150, 321,143, 157,303] # P6/64 + - [257,402, 359,290, 524,372] # P7/128 + +# P7-1280: thr=0.25: 0.9968 BPR, 6.71 anchors past thr, n=15, img_size=1280, metric_all=0.273/0.732-mean/best, past_thr=0.463-mean: 19,22, 54,36, 32,77, 70,83, 138,71, 75,173, 165,159, 148,334, 375,151, 334,317, 251,626, 499,474, 750,326, 534,814, 1079,818 +anchors_p7_1280: + - [19,22, 54,36, 32,77] # P3/8 + - [70,83, 138,71, 75,173] # P4/16 + - [165,159, 148,334, 375,151] # P5/32 + - [334,317, 251,626, 499,474] # P6/64 + - [750,326, 534,814, 1079,818] # P7/128 + +# P7-1920: thr=0.25: 0.9968 BPR, 6.71 anchors past thr, n=15, img_size=1920, metric_all=0.273/0.732-mean/best, past_thr=0.463-mean: 29,34, 81,55, 47,115, 105,124, 207,107, 113,259, 247,238, 222,500, 563,227, 501,476, 376,939, 749,711, 1126,489, 801,1222, 1618,1227 +anchors_p7_1920: + - [29,34, 81,55, 47,115] # P3/8 + - [105,124, 207,107, 113,259] # P4/16 + - [247,238, 222,500, 563,227] # P5/32 + - [501,476, 376,939, 749,711] # P6/64 + - [1126,489, 801,1222, 1618,1227] # P7/128 diff --git a/detector/YOLOv5/models/hub/yolov3-spp.yaml b/detector/YOLOv5/models/hub/yolov3-spp.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c66982158ce82d4e4ed7241c469b6f0166f0db49 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov3-spp.yaml @@ -0,0 +1,51 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# darknet53 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [32, 3, 1]], # 0 + [-1, 1, Conv, [64, 3, 2]], # 1-P1/2 + [-1, 1, Bottleneck, [64]], + [-1, 1, Conv, [128, 3, 2]], # 3-P2/4 + [-1, 2, Bottleneck, [128]], + [-1, 1, Conv, [256, 3, 2]], # 5-P3/8 + [-1, 8, Bottleneck, [256]], + [-1, 1, Conv, [512, 3, 2]], # 7-P4/16 + [-1, 8, Bottleneck, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P5/32 + [-1, 4, Bottleneck, [1024]], # 10 + ] + +# YOLOv3-SPP head +head: + [[-1, 1, Bottleneck, [1024, False]], + [-1, 1, SPP, [512, [5, 9, 13]]], + [-1, 1, Conv, [1024, 3, 1]], + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, Conv, [1024, 3, 1]], # 15 (P5/32-large) + + [-2, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, Conv, [512, 3, 1]], # 22 (P4/16-medium) + + [-2, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P3 + [-1, 1, Bottleneck, [256, False]], + [-1, 2, Bottleneck, [256, False]], # 27 (P3/8-small) + + [[27, 22, 15], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov3-tiny.yaml b/detector/YOLOv5/models/hub/yolov3-tiny.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b28b443152485e39dcf690d18c403780c898bfab --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov3-tiny.yaml @@ -0,0 +1,41 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,14, 23,27, 37,58] # P4/16 + - [81,82, 135,169, 344,319] # P5/32 + +# YOLOv3-tiny backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [16, 3, 1]], # 0 + [-1, 1, nn.MaxPool2d, [2, 2, 0]], # 1-P1/2 + [-1, 1, Conv, [32, 3, 1]], + [-1, 1, nn.MaxPool2d, [2, 2, 0]], # 3-P2/4 + [-1, 1, Conv, [64, 3, 1]], + [-1, 1, nn.MaxPool2d, [2, 2, 0]], # 5-P3/8 + [-1, 1, Conv, [128, 3, 1]], + [-1, 1, nn.MaxPool2d, [2, 2, 0]], # 7-P4/16 + [-1, 1, Conv, [256, 3, 1]], + [-1, 1, nn.MaxPool2d, [2, 2, 0]], # 9-P5/32 + [-1, 1, Conv, [512, 3, 1]], + [-1, 1, nn.ZeroPad2d, [[0, 1, 0, 1]]], # 11 + [-1, 1, nn.MaxPool2d, [2, 1, 0]], # 12 + ] + +# YOLOv3-tiny head +head: + [[-1, 1, Conv, [1024, 3, 1]], + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, Conv, [512, 3, 1]], # 15 (P5/32-large) + + [-2, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Conv, [256, 3, 1]], # 19 (P4/16-medium) + + [[19, 15], 1, Detect, [nc, anchors]], # Detect(P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov3.yaml b/detector/YOLOv5/models/hub/yolov3.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d1ef91290a8d261ccaf3a9663802e78b6b4e7542 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov3.yaml @@ -0,0 +1,51 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# darknet53 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [32, 3, 1]], # 0 + [-1, 1, Conv, [64, 3, 2]], # 1-P1/2 + [-1, 1, Bottleneck, [64]], + [-1, 1, Conv, [128, 3, 2]], # 3-P2/4 + [-1, 2, Bottleneck, [128]], + [-1, 1, Conv, [256, 3, 2]], # 5-P3/8 + [-1, 8, Bottleneck, [256]], + [-1, 1, Conv, [512, 3, 2]], # 7-P4/16 + [-1, 8, Bottleneck, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P5/32 + [-1, 4, Bottleneck, [1024]], # 10 + ] + +# YOLOv3 head +head: + [[-1, 1, Bottleneck, [1024, False]], + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, Conv, [1024, 3, 1]], + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, Conv, [1024, 3, 1]], # 15 (P5/32-large) + + [-2, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, Conv, [512, 3, 1]], # 22 (P4/16-medium) + + [-2, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P3 + [-1, 1, Bottleneck, [256, False]], + [-1, 2, Bottleneck, [256, False]], # 27 (P3/8-small) + + [[27, 22, 15], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-bifpn.yaml b/detector/YOLOv5/models/hub/yolov5-bifpn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..504815f5cfa03329618c4a1801f16ce68ec666e0 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-bifpn.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 BiFPN head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14, 6], 1, Concat, [1]], # cat P4 <--- BiFPN change + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-fpn.yaml b/detector/YOLOv5/models/hub/yolov5-fpn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a23e9c6fbf9f7f00c9e7f2a24bc8513a9d5717ea --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-fpn.yaml @@ -0,0 +1,42 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 FPN head +head: + [[-1, 3, C3, [1024, False]], # 10 (P5/32-large) + + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Conv, [512, 1, 1]], + [-1, 3, C3, [512, False]], # 14 (P4/16-medium) + + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 1, Conv, [256, 1, 1]], + [-1, 3, C3, [256, False]], # 18 (P3/8-small) + + [[18, 14, 10], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-p2.yaml b/detector/YOLOv5/models/hub/yolov5-p2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..554117dda59aca4a016b2ff42851d39cdc34f714 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-p2.yaml @@ -0,0 +1,54 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head with (P2, P3, P4, P5) outputs +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 2], 1, Concat, [1]], # cat backbone P2 + [-1, 1, C3, [128, False]], # 21 (P2/4-xsmall) + + [-1, 1, Conv, [128, 3, 2]], + [[-1, 18], 1, Concat, [1]], # cat head P3 + [-1, 3, C3, [256, False]], # 24 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 27 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 30 (P5/32-large) + + [[21, 24, 27, 30], 1, Detect, [nc, anchors]], # Detect(P2, P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-p34.yaml b/detector/YOLOv5/models/hub/yolov5-p34.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dbf0f850083ebf546ae7fc367be029297c174da1 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-p34.yaml @@ -0,0 +1,41 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [ [ -1, 1, Conv, [ 64, 6, 2, 2 ] ], # 0-P1/2 + [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 + [ -1, 3, C3, [ 128 ] ], + [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 + [ -1, 6, C3, [ 256 ] ], + [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 + [ -1, 9, C3, [ 512 ] ], + [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 7-P5/32 + [ -1, 3, C3, [ 1024 ] ], + [ -1, 1, SPPF, [ 1024, 5 ] ], # 9 + ] + +# YOLOv5 v6.0 head with (P3, P4) outputs +head: + [ [ -1, 1, Conv, [ 512, 1, 1 ] ], + [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], + [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 + [ -1, 3, C3, [ 512, False ] ], # 13 + + [ -1, 1, Conv, [ 256, 1, 1 ] ], + [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], + [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 + [ -1, 3, C3, [ 256, False ] ], # 17 (P3/8-small) + + [ -1, 1, Conv, [ 256, 3, 2 ] ], + [ [ -1, 14 ], 1, Concat, [ 1 ] ], # cat head P4 + [ -1, 3, C3, [ 512, False ] ], # 20 (P4/16-medium) + + [ [ 17, 20 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-p6.yaml b/detector/YOLOv5/models/hub/yolov5-p6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a17202f22044c0546bd9373ea58bd21c06b1d334 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-p6.yaml @@ -0,0 +1,56 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head with (P3, P4, P5, P6) outputs +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-p7.yaml b/detector/YOLOv5/models/hub/yolov5-p7.yaml new file mode 100644 index 0000000000000000000000000000000000000000..edd7d13a34a6c40e94d900ecce8ca64ae11bf5a1 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-p7.yaml @@ -0,0 +1,67 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, Conv, [1280, 3, 2]], # 11-P7/128 + [-1, 3, C3, [1280]], + [-1, 1, SPPF, [1280, 5]], # 13 + ] + +# YOLOv5 v6.0 head with (P3, P4, P5, P6, P7) outputs +head: + [[-1, 1, Conv, [1024, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 10], 1, Concat, [1]], # cat backbone P6 + [-1, 3, C3, [1024, False]], # 17 + + [-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 21 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 25 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 29 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 26], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 32 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 22], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 35 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 18], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 38 (P6/64-xlarge) + + [-1, 1, Conv, [1024, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P7 + [-1, 3, C3, [1280, False]], # 41 (P7/128-xxlarge) + + [[29, 32, 35, 38, 41], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6, P7) + ] diff --git a/detector/YOLOv5/models/hub/yolov5-panet.yaml b/detector/YOLOv5/models/hub/yolov5-panet.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ccfbf900691c5738b4705d2ce7944171b6152c98 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5-panet.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 PANet head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5l6.yaml b/detector/YOLOv5/models/hub/yolov5l6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..632c2cb699e3cf261da462ec7dd20c0ffb7aaad3 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5l6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/hub/yolov5m6.yaml b/detector/YOLOv5/models/hub/yolov5m6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ecc53fd68ba6421b4fe63d6693b6563ecaa0e981 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5m6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.67 # model depth multiple +width_multiple: 0.75 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/hub/yolov5n6.yaml b/detector/YOLOv5/models/hub/yolov5n6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0c0c71d32551789d57e5f44fd936636ecb4e3414 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5n6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.25 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/hub/yolov5s-ghost.yaml b/detector/YOLOv5/models/hub/yolov5s-ghost.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ff9519c3f1aa354f512ddab8b23e861d0f3de6c6 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5s-ghost.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, GhostConv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3Ghost, [128]], + [-1, 1, GhostConv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3Ghost, [256]], + [-1, 1, GhostConv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3Ghost, [512]], + [-1, 1, GhostConv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3Ghost, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, GhostConv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3Ghost, [512, False]], # 13 + + [-1, 1, GhostConv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3Ghost, [256, False]], # 17 (P3/8-small) + + [-1, 1, GhostConv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3Ghost, [512, False]], # 20 (P4/16-medium) + + [-1, 1, GhostConv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3Ghost, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5s-transformer.yaml b/detector/YOLOv5/models/hub/yolov5s-transformer.yaml new file mode 100644 index 0000000000000000000000000000000000000000..100d7c447527f1116e0edb3e1c096904fe3302f1 --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5s-transformer.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3TR, [1024]], # 9 <--- C3TR() Transformer module + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/hub/yolov5s6.yaml b/detector/YOLOv5/models/hub/yolov5s6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a28fb559482b25a41531517a68f08253f08edb0f --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5s6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/hub/yolov5x6.yaml b/detector/YOLOv5/models/hub/yolov5x6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ba795c4aad319b94db0fb4fd6961e9ef0cac207a --- /dev/null +++ b/detector/YOLOv5/models/hub/yolov5x6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.33 # model depth multiple +width_multiple: 1.25 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/detector/YOLOv5/models/tf.py b/detector/YOLOv5/models/tf.py new file mode 100644 index 0000000000000000000000000000000000000000..5f9bb217ed63714537d26a2b38ff5aa102911e42 --- /dev/null +++ b/detector/YOLOv5/models/tf.py @@ -0,0 +1,464 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +TensorFlow, Keras and TFLite versions of YOLOv5 +Authored by https://github.com/zldrobit in PR https://github.com/ultralytics/yolov5/pull/1127 + +Usage: + $ python models/tf.py --weights yolov5s.pt + +Export: + $ python path/to/export.py --weights yolov5s.pt --include saved_model pb tflite tfjs +""" + +import argparse +import sys +from copy import deepcopy +from pathlib import Path + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative + +import numpy as np +import tensorflow as tf +import torch +import torch.nn as nn +from tensorflow import keras + +from models.common import C3, SPP, SPPF, Bottleneck, BottleneckCSP, Concat, Conv, DWConv, Focus, autopad +from models.experimental import CrossConv, MixConv2d, attempt_load +from models.yolo import Detect +from detector.YOLOv5.activations import SiLU +from detector.YOLOv5.general import LOGGER, make_divisible, print_args + + +class TFBN(keras.layers.Layer): + # TensorFlow BatchNormalization wrapper + def __init__(self, w=None): + super().__init__() + self.bn = keras.layers.BatchNormalization( + beta_initializer=keras.initializers.Constant(w.bias.numpy()), + gamma_initializer=keras.initializers.Constant(w.weight.numpy()), + moving_mean_initializer=keras.initializers.Constant(w.running_mean.numpy()), + moving_variance_initializer=keras.initializers.Constant(w.running_var.numpy()), + epsilon=w.eps) + + def call(self, inputs): + return self.bn(inputs) + + +class TFPad(keras.layers.Layer): + def __init__(self, pad): + super().__init__() + self.pad = tf.constant([[0, 0], [pad, pad], [pad, pad], [0, 0]]) + + def call(self, inputs): + return tf.pad(inputs, self.pad, mode='constant', constant_values=0) + + +class TFConv(keras.layers.Layer): + # Standard convolution + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True, w=None): + # ch_in, ch_out, weights, kernel, stride, padding, groups + super().__init__() + assert g == 1, "TF v2.2 Conv2D does not support 'groups' argument" + assert isinstance(k, int), "Convolution with multiple kernels are not allowed." + # TensorFlow convolution padding is inconsistent with PyTorch (e.g. k=3 s=2 'SAME' padding) + # see https://stackoverflow.com/questions/52975843/comparing-conv2d-with-padding-between-tensorflow-and-pytorch + + conv = keras.layers.Conv2D( + c2, k, s, 'SAME' if s == 1 else 'VALID', use_bias=False if hasattr(w, 'bn') else True, + kernel_initializer=keras.initializers.Constant(w.conv.weight.permute(2, 3, 1, 0).numpy()), + bias_initializer='zeros' if hasattr(w, 'bn') else keras.initializers.Constant(w.conv.bias.numpy())) + self.conv = conv if s == 1 else keras.Sequential([TFPad(autopad(k, p)), conv]) + self.bn = TFBN(w.bn) if hasattr(w, 'bn') else tf.identity + + # YOLOv5 activations + if isinstance(w.act, nn.LeakyReLU): + self.act = (lambda x: keras.activations.relu(x, alpha=0.1)) if act else tf.identity + elif isinstance(w.act, nn.Hardswish): + self.act = (lambda x: x * tf.nn.relu6(x + 3) * 0.166666667) if act else tf.identity + elif isinstance(w.act, (nn.SiLU, SiLU)): + self.act = (lambda x: keras.activations.swish(x)) if act else tf.identity + else: + raise Exception(f'no matching TensorFlow activation found for {w.act}') + + def call(self, inputs): + return self.act(self.bn(self.conv(inputs))) + + +class TFFocus(keras.layers.Layer): + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True, w=None): + # ch_in, ch_out, kernel, stride, padding, groups + super().__init__() + self.conv = TFConv(c1 * 4, c2, k, s, p, g, act, w.conv) + + def call(self, inputs): # x(b,w,h,c) -> y(b,w/2,h/2,4c) + # inputs = inputs / 255 # normalize 0-255 to 0-1 + return self.conv(tf.concat([inputs[:, ::2, ::2, :], + inputs[:, 1::2, ::2, :], + inputs[:, ::2, 1::2, :], + inputs[:, 1::2, 1::2, :]], 3)) + + +class TFBottleneck(keras.layers.Layer): + # Standard bottleneck + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5, w=None): # ch_in, ch_out, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_, c2, 3, 1, g=g, w=w.cv2) + self.add = shortcut and c1 == c2 + + def call(self, inputs): + return inputs + self.cv2(self.cv1(inputs)) if self.add else self.cv2(self.cv1(inputs)) + + +class TFConv2d(keras.layers.Layer): + # Substitution for PyTorch nn.Conv2D + def __init__(self, c1, c2, k, s=1, g=1, bias=True, w=None): + super().__init__() + assert g == 1, "TF v2.2 Conv2D does not support 'groups' argument" + self.conv = keras.layers.Conv2D( + c2, k, s, 'VALID', use_bias=bias, + kernel_initializer=keras.initializers.Constant(w.weight.permute(2, 3, 1, 0).numpy()), + bias_initializer=keras.initializers.Constant(w.bias.numpy()) if bias else None, ) + + def call(self, inputs): + return self.conv(inputs) + + +class TFBottleneckCSP(keras.layers.Layer): + # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5, w=None): + # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv2d(c1, c_, 1, 1, bias=False, w=w.cv2) + self.cv3 = TFConv2d(c_, c_, 1, 1, bias=False, w=w.cv3) + self.cv4 = TFConv(2 * c_, c2, 1, 1, w=w.cv4) + self.bn = TFBN(w.bn) + self.act = lambda x: keras.activations.relu(x, alpha=0.1) + self.m = keras.Sequential([TFBottleneck(c_, c_, shortcut, g, e=1.0, w=w.m[j]) for j in range(n)]) + + def call(self, inputs): + y1 = self.cv3(self.m(self.cv1(inputs))) + y2 = self.cv2(inputs) + return self.cv4(self.act(self.bn(tf.concat((y1, y2), axis=3)))) + + +class TFC3(keras.layers.Layer): + # CSP Bottleneck with 3 convolutions + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5, w=None): + # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c1, c_, 1, 1, w=w.cv2) + self.cv3 = TFConv(2 * c_, c2, 1, 1, w=w.cv3) + self.m = keras.Sequential([TFBottleneck(c_, c_, shortcut, g, e=1.0, w=w.m[j]) for j in range(n)]) + + def call(self, inputs): + return self.cv3(tf.concat((self.m(self.cv1(inputs)), self.cv2(inputs)), axis=3)) + + +class TFSPP(keras.layers.Layer): + # Spatial pyramid pooling layer used in YOLOv3-SPP + def __init__(self, c1, c2, k=(5, 9, 13), w=None): + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_ * (len(k) + 1), c2, 1, 1, w=w.cv2) + self.m = [keras.layers.MaxPool2D(pool_size=x, strides=1, padding='SAME') for x in k] + + def call(self, inputs): + x = self.cv1(inputs) + return self.cv2(tf.concat([x] + [m(x) for m in self.m], 3)) + + +class TFSPPF(keras.layers.Layer): + # Spatial pyramid pooling-Fast layer + def __init__(self, c1, c2, k=5, w=None): + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_ * 4, c2, 1, 1, w=w.cv2) + self.m = keras.layers.MaxPool2D(pool_size=k, strides=1, padding='SAME') + + def call(self, inputs): + x = self.cv1(inputs) + y1 = self.m(x) + y2 = self.m(y1) + return self.cv2(tf.concat([x, y1, y2, self.m(y2)], 3)) + + +class TFDetect(keras.layers.Layer): + def __init__(self, nc=80, anchors=(), ch=(), imgsz=(640, 640), w=None): # detection layer + super().__init__() + self.stride = tf.convert_to_tensor(w.stride.numpy(), dtype=tf.float32) + self.nc = nc # number of classes + self.no = nc + 5 # number of outputs per anchor + self.nl = len(anchors) # number of detection layers + self.na = len(anchors[0]) // 2 # number of anchors + self.grid = [tf.zeros(1)] * self.nl # init grid + self.anchors = tf.convert_to_tensor(w.anchors.numpy(), dtype=tf.float32) + self.anchor_grid = tf.reshape(self.anchors * tf.reshape(self.stride, [self.nl, 1, 1]), + [self.nl, 1, -1, 1, 2]) + self.m = [TFConv2d(x, self.no * self.na, 1, w=w.m[i]) for i, x in enumerate(ch)] + self.training = False # set to False after building model + self.imgsz = imgsz + for i in range(self.nl): + ny, nx = self.imgsz[0] // self.stride[i], self.imgsz[1] // self.stride[i] + self.grid[i] = self._make_grid(nx, ny) + + def call(self, inputs): + z = [] # inference output + x = [] + for i in range(self.nl): + x.append(self.m[i](inputs[i])) + # x(bs,20,20,255) to x(bs,3,20,20,85) + ny, nx = self.imgsz[0] // self.stride[i], self.imgsz[1] // self.stride[i] + x[i] = tf.transpose(tf.reshape(x[i], [-1, ny * nx, self.na, self.no]), [0, 2, 1, 3]) + + if not self.training: # inference + y = tf.sigmoid(x[i]) + xy = (y[..., 0:2] * 2 - 0.5 + self.grid[i]) * self.stride[i] # xy + wh = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] + # Normalize xywh to 0-1 to reduce calibration error + xy /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) + wh /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) + y = tf.concat([xy, wh, y[..., 4:]], -1) + z.append(tf.reshape(y, [-1, self.na * ny * nx, self.no])) + + return x if self.training else (tf.concat(z, 1), x) + + @staticmethod + def _make_grid(nx=20, ny=20): + # yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) + # return torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + xv, yv = tf.meshgrid(tf.range(nx), tf.range(ny)) + return tf.cast(tf.reshape(tf.stack([xv, yv], 2), [1, 1, ny * nx, 2]), dtype=tf.float32) + + +class TFUpsample(keras.layers.Layer): + def __init__(self, size, scale_factor, mode, w=None): # warning: all arguments needed including 'w' + super().__init__() + assert scale_factor == 2, "scale_factor must be 2" + self.upsample = lambda x: tf.image.resize(x, (x.shape[1] * 2, x.shape[2] * 2), method=mode) + # self.upsample = keras.layers.UpSampling2D(size=scale_factor, interpolation=mode) + # with default arguments: align_corners=False, half_pixel_centers=False + # self.upsample = lambda x: tf.raw_ops.ResizeNearestNeighbor(images=x, + # size=(x.shape[1] * 2, x.shape[2] * 2)) + + def call(self, inputs): + return self.upsample(inputs) + + +class TFConcat(keras.layers.Layer): + def __init__(self, dimension=1, w=None): + super().__init__() + assert dimension == 1, "convert only NCHW to NHWC concat" + self.d = 3 + + def call(self, inputs): + return tf.concat(inputs, self.d) + + +def parse_model(d, ch, model, imgsz): # model_dict, input_channels(3) + LOGGER.info(f"\n{'':>3}{'from':>18}{'n':>3}{'params':>10} {'module':<40}{'arguments':<30}") + anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors + no = na * (nc + 5) # number of outputs = anchors * (classes + 5) + + layers, save, c2 = [], [], ch[-1] # layers, savelist, ch out + for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args + m_str = m + m = eval(m) if isinstance(m, str) else m # eval strings + for j, a in enumerate(args): + try: + args[j] = eval(a) if isinstance(a, str) else a # eval strings + except NameError: + pass + + n = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [nn.Conv2d, Conv, Bottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, BottleneckCSP, C3]: + c1, c2 = ch[f], args[0] + c2 = make_divisible(c2 * gw, 8) if c2 != no else c2 + + args = [c1, c2, *args[1:]] + if m in [BottleneckCSP, C3]: + args.insert(2, n) + n = 1 + elif m is nn.BatchNorm2d: + args = [ch[f]] + elif m is Concat: + c2 = sum(ch[-1 if x == -1 else x + 1] for x in f) + elif m is Detect: + args.append([ch[x + 1] for x in f]) + if isinstance(args[1], int): # number of anchors + args[1] = [list(range(args[1] * 2))] * len(f) + args.append(imgsz) + else: + c2 = ch[f] + + tf_m = eval('TF' + m_str.replace('nn.', '')) + m_ = keras.Sequential([tf_m(*args, w=model.model[i][j]) for j in range(n)]) if n > 1 \ + else tf_m(*args, w=model.model[i]) # module + + torch_m_ = nn.Sequential(*(m(*args) for _ in range(n))) if n > 1 else m(*args) # module + t = str(m)[8:-2].replace('__main__.', '') # module type + np = sum(x.numel() for x in torch_m_.parameters()) # number params + m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params + LOGGER.info(f'{i:>3}{str(f):>18}{str(n):>3}{np:>10} {t:<40}{str(args):<30}') # print + save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist + layers.append(m_) + ch.append(c2) + return keras.Sequential(layers), sorted(save) + + +class TFModel: + def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None, model=None, imgsz=(640, 640)): # model, channels, classes + super().__init__() + if isinstance(cfg, dict): + self.yaml = cfg # model dict + else: # is *.yaml + import yaml # for torch hub + self.yaml_file = Path(cfg).name + with open(cfg) as f: + self.yaml = yaml.load(f, Loader=yaml.FullLoader) # model dict + + # Define model + if nc and nc != self.yaml['nc']: + LOGGER.info(f"Overriding {cfg} nc={self.yaml['nc']} with nc={nc}") + self.yaml['nc'] = nc # override yaml value + self.model, self.savelist = parse_model(deepcopy(self.yaml), ch=[ch], model=model, imgsz=imgsz) + + def predict(self, inputs, tf_nms=False, agnostic_nms=False, topk_per_class=100, topk_all=100, iou_thres=0.45, + conf_thres=0.25): + y = [] # outputs + x = inputs + for i, m in enumerate(self.model.layers): + if m.f != -1: # if not from previous layer + x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers + + x = m(x) # run + y.append(x if m.i in self.savelist else None) # save output + + # Add TensorFlow NMS + if tf_nms: + boxes = self._xywh2xyxy(x[0][..., :4]) + probs = x[0][:, :, 4:5] + classes = x[0][:, :, 5:] + scores = probs * classes + if agnostic_nms: + nms = AgnosticNMS()((boxes, classes, scores), topk_all, iou_thres, conf_thres) + return nms, x[1] + else: + boxes = tf.expand_dims(boxes, 2) + nms = tf.image.combined_non_max_suppression( + boxes, scores, topk_per_class, topk_all, iou_thres, conf_thres, clip_boxes=False) + return nms, x[1] + + return x[0] # output only first tensor [1,6300,85] = [xywh, conf, class0, class1, ...] + # x = x[0][0] # [x(1,6300,85), ...] to x(6300,85) + # xywh = x[..., :4] # x(6300,4) boxes + # conf = x[..., 4:5] # x(6300,1) confidences + # cls = tf.reshape(tf.cast(tf.argmax(x[..., 5:], axis=1), tf.float32), (-1, 1)) # x(6300,1) classes + # return tf.concat([conf, cls, xywh], 1) + + @staticmethod + def _xywh2xyxy(xywh): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + x, y, w, h = tf.split(xywh, num_or_size_splits=4, axis=-1) + return tf.concat([x - w / 2, y - h / 2, x + w / 2, y + h / 2], axis=-1) + + +class AgnosticNMS(keras.layers.Layer): + # TF Agnostic NMS + def call(self, input, topk_all, iou_thres, conf_thres): + # wrap map_fn to avoid TypeSpec related error https://stackoverflow.com/a/65809989/3036450 + return tf.map_fn(lambda x: self._nms(x, topk_all, iou_thres, conf_thres), input, + fn_output_signature=(tf.float32, tf.float32, tf.float32, tf.int32), + name='agnostic_nms') + + @staticmethod + def _nms(x, topk_all=100, iou_thres=0.45, conf_thres=0.25): # agnostic NMS + boxes, classes, scores = x + class_inds = tf.cast(tf.argmax(classes, axis=-1), tf.float32) + scores_inp = tf.reduce_max(scores, -1) + selected_inds = tf.image.non_max_suppression( + boxes, scores_inp, max_output_size=topk_all, iou_threshold=iou_thres, score_threshold=conf_thres) + selected_boxes = tf.gather(boxes, selected_inds) + padded_boxes = tf.pad(selected_boxes, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]], [0, 0]], + mode="CONSTANT", constant_values=0.0) + selected_scores = tf.gather(scores_inp, selected_inds) + padded_scores = tf.pad(selected_scores, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]], + mode="CONSTANT", constant_values=-1.0) + selected_classes = tf.gather(class_inds, selected_inds) + padded_classes = tf.pad(selected_classes, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]], + mode="CONSTANT", constant_values=-1.0) + valid_detections = tf.shape(selected_inds)[0] + return padded_boxes, padded_scores, padded_classes, valid_detections + + +def representative_dataset_gen(dataset, ncalib=100): + # Representative dataset generator for use with converter.representative_dataset, returns a generator of np arrays + for n, (path, img, im0s, vid_cap, string) in enumerate(dataset): + input = np.transpose(img, [1, 2, 0]) + input = np.expand_dims(input, axis=0).astype(np.float32) + input /= 255 + yield [input] + if n >= ncalib: + break + + +def run(weights=ROOT / 'yolov5s.pt', # weights path + imgsz=(640, 640), # inference size h,w + batch_size=1, # batch size + dynamic=False, # dynamic batch size + ): + # PyTorch model + im = torch.zeros((batch_size, 3, *imgsz)) # BCHW image + model = attempt_load(weights, map_location=torch.device('cpu'), inplace=True, fuse=False) + _ = model(im) # inference + model.info() + + # TensorFlow model + im = tf.zeros((batch_size, *imgsz, 3)) # BHWC image + tf_model = TFModel(cfg=model.yaml, model=model, nc=model.nc, imgsz=imgsz) + _ = tf_model.predict(im) # inference + + # Keras model + im = keras.Input(shape=(*imgsz, 3), batch_size=None if dynamic else batch_size) + keras_model = keras.Model(inputs=im, outputs=tf_model.predict(im)) + keras_model.summary() + + LOGGER.info('PyTorch, TensorFlow and Keras models successfully verified.\nUse export.py for TF model export.') + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='weights path') + parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640], help='inference size h,w') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--dynamic', action='store_true', help='dynamic batch size') + opt = parser.parse_args() + opt.imgsz *= 2 if len(opt.imgsz) == 1 else 1 # expand + print_args(FILE.stem, opt) + return opt + + +def main(opt): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/detector/YOLOv5/models/yolo.py b/detector/YOLOv5/models/yolo.py new file mode 100644 index 0000000000000000000000000000000000000000..904d16c605fabcceb7d795e02d1837b58221d62c --- /dev/null +++ b/detector/YOLOv5/models/yolo.py @@ -0,0 +1,329 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +YOLO-specific modules + +Usage: + $ python path/to/models/yolo.py --cfg yolov5s.yaml +""" + +import argparse +import sys +from copy import deepcopy +from pathlib import Path + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative + +from models.common import * +from models.experimental import * +from detector.YOLOv5.utils.autoanchor import check_anchor_order +from detector.YOLOv5.utils.general import LOGGER, check_version, check_yaml, make_divisible, print_args +from detector.YOLOv5.utils.plots import feature_visualization +from detector.YOLOv5.utils.torch_utils import fuse_conv_and_bn, initialize_weights, model_info, scale_img, select_device, time_sync + +try: + import thop # for FLOPs computation +except ImportError: + thop = None + + +class Detect(nn.Module): + stride = None # strides computed during build + onnx_dynamic = False # ONNX export parameter + + def __init__(self, nc=80, anchors=(), ch=(), inplace=True): # detection layer + super().__init__() + self.nc = nc # number of classes + self.no = nc + 5 # number of outputs per anchor + self.nl = len(anchors) # number of detection layers + self.na = len(anchors[0]) // 2 # number of anchors + self.grid = [torch.zeros(1)] * self.nl # init grid + self.anchor_grid = [torch.zeros(1)] * self.nl # init anchor grid + self.register_buffer('anchors', torch.tensor(anchors).float().view(self.nl, -1, 2)) # shape(nl,na,2) + self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv + self.inplace = inplace # use in-place ops (e.g. slice assignment) + + def forward(self, x): + z = [] # inference output + for i in range(self.nl): + x[i] = self.m[i](x[i]) # conv + bs, _, ny, nx = x[i].shape # x(bs,255,20,20) to x(bs,3,20,20,85) + x[i] = x[i].view(bs, self.na, self.no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() + + if not self.training: # inference + if self.onnx_dynamic or self.grid[i].shape[2:4] != x[i].shape[2:4]: + self.grid[i], self.anchor_grid[i] = self._make_grid(nx, ny, i) + + y = x[i].sigmoid() + if self.inplace: + y[..., 0:2] = (y[..., 0:2] * 2 - 0.5 + self.grid[i]) * self.stride[i] # xy + y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh + else: # for YOLOv5 on AWS Inferentia https://github.com/ultralytics/yolov5/pull/2953 + xy = (y[..., 0:2] * 2 - 0.5 + self.grid[i]) * self.stride[i] # xy + wh = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh + y = torch.cat((xy, wh, y[..., 4:]), -1) + z.append(y.view(bs, -1, self.no)) + + return x if self.training else (torch.cat(z, 1), x) + + def _make_grid(self, nx=20, ny=20, i=0): + d = self.anchors[i].device + if check_version(torch.__version__, '1.10.0'): # torch>=1.10.0 meshgrid workaround for torch>=0.7 compatibility + yv, xv = torch.meshgrid([torch.arange(ny, device=d), torch.arange(nx, device=d)], indexing='ij') + else: + yv, xv = torch.meshgrid([torch.arange(ny, device=d), torch.arange(nx, device=d)]) + grid = torch.stack((xv, yv), 2).expand((1, self.na, ny, nx, 2)).float() + anchor_grid = (self.anchors[i].clone() * self.stride[i]) \ + .view((1, self.na, 1, 1, 2)).expand((1, self.na, ny, nx, 2)).float() + return grid, anchor_grid + + +class Model(nn.Module): + def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None, anchors=None): # model, input channels, number of classes + super().__init__() + if isinstance(cfg, dict): + self.yaml = cfg # model dict + else: # is *.yaml + import yaml # for torch hub + self.yaml_file = Path(cfg).name + with open(cfg, encoding='ascii', errors='ignore') as f: + self.yaml = yaml.safe_load(f) # model dict + + # Define model + ch = self.yaml['ch'] = self.yaml.get('ch', ch) # input channels + if nc and nc != self.yaml['nc']: + LOGGER.info(f"Overriding model.yaml nc={self.yaml['nc']} with nc={nc}") + self.yaml['nc'] = nc # override yaml value + if anchors: + LOGGER.info(f'Overriding model.yaml anchors with anchors={anchors}') + self.yaml['anchors'] = round(anchors) # override yaml value + self.model, self.save = parse_model(deepcopy(self.yaml), ch=[ch]) # model, savelist + self.names = [str(i) for i in range(self.yaml['nc'])] # default names + self.inplace = self.yaml.get('inplace', True) + + # Build strides, anchors + m = self.model[-1] # Detect() + if isinstance(m, Detect): + s = 256 # 2x min stride + m.inplace = self.inplace + m.stride = torch.tensor([s / x.shape[-2] for x in self.forward(torch.zeros(1, ch, s, s))]) # forward + m.anchors /= m.stride.view(-1, 1, 1) + check_anchor_order(m) + self.stride = m.stride + self._initialize_biases() # only run once + + # Init weights, biases + initialize_weights(self) + self.info() + LOGGER.info('') + + def forward(self, x, augment=False, profile=False, visualize=False): + if augment: + return self._forward_augment(x) # augmented inference, None + return self._forward_once(x, profile, visualize) # single-scale inference, train + + def _forward_augment(self, x): + img_size = x.shape[-2:] # height, width + s = [1, 0.83, 0.67] # scales + f = [None, 3, None] # flips (2-ud, 3-lr) + y = [] # outputs + for si, fi in zip(s, f): + xi = scale_img(x.flip(fi) if fi else x, si, gs=int(self.stride.max())) + yi = self._forward_once(xi)[0] # forward + # cv2.imwrite(f'img_{si}.jpg', 255 * xi[0].cpu().numpy().transpose((1, 2, 0))[:, :, ::-1]) # save + yi = self._descale_pred(yi, fi, si, img_size) + y.append(yi) + y = self._clip_augmented(y) # clip augmented tails + return torch.cat(y, 1), None # augmented inference, train + + def _forward_once(self, x, profile=False, visualize=False): + y, dt = [], [] # outputs + for m in self.model: + if m.f != -1: # if not from previous layer + x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers + if profile: + self._profile_one_layer(m, x, dt) + x = m(x) # run + y.append(x if m.i in self.save else None) # save output + if visualize: + feature_visualization(x, m.type, m.i, save_dir=visualize) + return x + + def _descale_pred(self, p, flips, scale, img_size): + # de-scale predictions following augmented inference (inverse operation) + if self.inplace: + p[..., :4] /= scale # de-scale + if flips == 2: + p[..., 1] = img_size[0] - p[..., 1] # de-flip ud + elif flips == 3: + p[..., 0] = img_size[1] - p[..., 0] # de-flip lr + else: + x, y, wh = p[..., 0:1] / scale, p[..., 1:2] / scale, p[..., 2:4] / scale # de-scale + if flips == 2: + y = img_size[0] - y # de-flip ud + elif flips == 3: + x = img_size[1] - x # de-flip lr + p = torch.cat((x, y, wh, p[..., 4:]), -1) + return p + + def _clip_augmented(self, y): + # Clip YOLOv5 augmented inference tails + nl = self.model[-1].nl # number of detection layers (P3-P5) + g = sum(4 ** x for x in range(nl)) # grid points + e = 1 # exclude layer count + i = (y[0].shape[1] // g) * sum(4 ** x for x in range(e)) # indices + y[0] = y[0][:, :-i] # large + i = (y[-1].shape[1] // g) * sum(4 ** (nl - 1 - x) for x in range(e)) # indices + y[-1] = y[-1][:, i:] # small + return y + + def _profile_one_layer(self, m, x, dt): + c = isinstance(m, Detect) # is final layer, copy input as inplace fix + o = thop.profile(m, inputs=(x.copy() if c else x,), verbose=False)[0] / 1E9 * 2 if thop else 0 # FLOPs + t = time_sync() + for _ in range(10): + m(x.copy() if c else x) + dt.append((time_sync() - t) * 100) + if m == self.model[0]: + LOGGER.info(f"{'time (ms)':>10s} {'GFLOPs':>10s} {'params':>10s} {'module'}") + LOGGER.info(f'{dt[-1]:10.2f} {o:10.2f} {m.np:10.0f} {m.type}') + if c: + LOGGER.info(f"{sum(dt):10.2f} {'-':>10s} {'-':>10s} Total") + + def _initialize_biases(self, cf=None): # initialize biases into Detect(), cf is class frequency + # https://arxiv.org/abs/1708.02002 section 3.3 + # cf = torch.bincount(torch.tensor(np.concatenate(dataset.labels, 0)[:, 0]).long(), minlength=nc) + 1. + m = self.model[-1] # Detect() module + for mi, s in zip(m.m, m.stride): # from + b = mi.bias.view(m.na, -1) # conv.bias(255) to (3,85) + b.data[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) + b.data[:, 5:] += math.log(0.6 / (m.nc - 0.999999)) if cf is None else torch.log(cf / cf.sum()) # cls + mi.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + + def _print_biases(self): + m = self.model[-1] # Detect() module + for mi in m.m: # from + b = mi.bias.detach().view(m.na, -1).T # conv.bias(255) to (3,85) + LOGGER.info( + ('%6g Conv2d.bias:' + '%10.3g' * 6) % (mi.weight.shape[1], *b[:5].mean(1).tolist(), b[5:].mean())) + + # def _print_weights(self): + # for m in self.model.modules(): + # if type(m) is Bottleneck: + # LOGGER.info('%10.3g' % (m.w.detach().sigmoid() * 2)) # shortcut weights + + def fuse(self): # fuse model Conv2d() + BatchNorm2d() layers + LOGGER.info('Fusing layers... ') + for m in self.model.modules(): + if isinstance(m, (Conv, DWConv)) and hasattr(m, 'bn'): + m.conv = fuse_conv_and_bn(m.conv, m.bn) # update conv + delattr(m, 'bn') # remove batchnorm + m.forward = m.forward_fuse # update forward + self.info() + return self + + def info(self, verbose=False, img_size=640): # print model information + model_info(self, verbose, img_size) + + def _apply(self, fn): + # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers + self = super()._apply(fn) + m = self.model[-1] # Detect() + if isinstance(m, Detect): + m.stride = fn(m.stride) + m.grid = list(map(fn, m.grid)) + if isinstance(m.anchor_grid, list): + m.anchor_grid = list(map(fn, m.anchor_grid)) + return self + + +def parse_model(d, ch): # model_dict, input_channels(3) + LOGGER.info(f"\n{'':>3}{'from':>18}{'n':>3}{'params':>10} {'module':<40}{'arguments':<30}") + anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors + no = na * (nc + 5) # number of outputs = anchors * (classes + 5) + + layers, save, c2 = [], [], ch[-1] # layers, savelist, ch out + for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args + m = eval(m) if isinstance(m, str) else m # eval strings + for j, a in enumerate(args): + try: + args[j] = eval(a) if isinstance(a, str) else a # eval strings + except NameError: + pass + + n = n_ = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, + BottleneckCSP, C3, C3TR, C3SPP, C3Ghost]: + c1, c2 = ch[f], args[0] + if c2 != no: # if not output + c2 = make_divisible(c2 * gw, 8) + + args = [c1, c2, *args[1:]] + if m in [BottleneckCSP, C3, C3TR, C3Ghost]: + args.insert(2, n) # number of repeats + n = 1 + elif m is nn.BatchNorm2d: + args = [ch[f]] + elif m is Concat: + c2 = sum(ch[x] for x in f) + elif m is Detect: + args.append([ch[x] for x in f]) + if isinstance(args[1], int): # number of anchors + args[1] = [list(range(args[1] * 2))] * len(f) + elif m is Contract: + c2 = ch[f] * args[0] ** 2 + elif m is Expand: + c2 = ch[f] // args[0] ** 2 + else: + c2 = ch[f] + + m_ = nn.Sequential(*(m(*args) for _ in range(n))) if n > 1 else m(*args) # module + t = str(m)[8:-2].replace('__main__.', '') # module type + np = sum(x.numel() for x in m_.parameters()) # number params + m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params + LOGGER.info(f'{i:>3}{str(f):>18}{n_:>3}{np:10.0f} {t:<40}{str(args):<30}') # print + save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist + layers.append(m_) + if i == 0: + ch = [] + ch.append(c2) + return nn.Sequential(*layers), sorted(save) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--cfg', type=str, default='yolov5s.yaml', help='model.yaml') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--profile', action='store_true', help='profile model speed') + parser.add_argument('--test', action='store_true', help='test all yolo*.yaml') + opt = parser.parse_args() + opt.cfg = check_yaml(opt.cfg) # check YAML + print_args(FILE.stem, opt) + device = select_device(opt.device) + + # Create model + model = Model(opt.cfg).to(device) + model.train() + + # Profile + if opt.profile: + img = torch.rand(8 if torch.cuda.is_available() else 1, 3, 640, 640).to(device) + y = model(img, profile=True) + + # Test all models + if opt.test: + for cfg in Path(ROOT / 'models').rglob('yolo*.yaml'): + try: + _ = Model(cfg) + except Exception as e: + print(f'Error in {cfg}: {e}') + + # Tensorboard (not working https://github.com/ultralytics/yolov5/issues/2898) + # from torch.utils.tensorboard import SummaryWriter + # tb_writer = SummaryWriter('.') + # LOGGER.info("Run 'tensorboard --logdir=models' to view tensorboard at http://localhost:6006/") + # tb_writer.add_graph(torch.jit.trace(model, img, strict=False), []) # add model graph diff --git a/detector/YOLOv5/models/yolov5l.yaml b/detector/YOLOv5/models/yolov5l.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ce8a5de46a2785f5537c09fe27f3077c057bb4f3 --- /dev/null +++ b/detector/YOLOv5/models/yolov5l.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/yolov5m.yaml b/detector/YOLOv5/models/yolov5m.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ad13ab370ff6532931284a0193959afba214f6f4 --- /dev/null +++ b/detector/YOLOv5/models/yolov5m.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.67 # model depth multiple +width_multiple: 0.75 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/yolov5n.yaml b/detector/YOLOv5/models/yolov5n.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8a28a40d6e20383727da1a9eed180c9e13ee89fd --- /dev/null +++ b/detector/YOLOv5/models/yolov5n.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.25 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/yolov5s.yaml b/detector/YOLOv5/models/yolov5s.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f35beabb1e1c76f9ec2cad0cb7adbce76f6b7c4c --- /dev/null +++ b/detector/YOLOv5/models/yolov5s.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/models/yolov5x.yaml b/detector/YOLOv5/models/yolov5x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f617a027d8a20a2b7c2a4b415da0941c02aeb3a3 --- /dev/null +++ b/detector/YOLOv5/models/yolov5x.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.33 # model depth multiple +width_multiple: 1.25 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/detector/YOLOv5/setup.cfg b/detector/YOLOv5/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..20ea49a8b4d6d90ae839da22f85828bf1b31900d --- /dev/null +++ b/detector/YOLOv5/setup.cfg @@ -0,0 +1,45 @@ +# Project-wide configuration file, can be used for package metadata and other toll configurations +# Example usage: global configuration for PEP8 (via flake8) setting or default pytest arguments + +[metadata] +license_file = LICENSE +description-file = README.md + + +[tool:pytest] +norecursedirs = + .git + dist + build +addopts = + --doctest-modules + --durations=25 + --color=yes + + +[flake8] +max-line-length = 120 +exclude = .tox,*.egg,build,temp +select = E,W,F +doctests = True +verbose = 2 +# https://pep8.readthedocs.io/en/latest/intro.html#error-codes +format = pylint +# see: https://www.flake8rules.com/ +ignore = + E731 # Do not assign a lambda expression, use a def + F405 # name may be undefined, or defined from star imports: module + E402 # module level import not at top of file + F401 # module imported but unused + W504 # line break after binary operator + E127 # continuation line over-indented for visual indent + W504 # line break after binary operator + E231 # missing whitespace after ‘,’, ‘;’, or ‘:’ + E501 # line too long + F403 # ‘from module import *’ used; unable to detect undefined names + + +[isort] +# https://pycqa.github.io/isort/docs/configuration/options.html +line_length = 120 +multi_line_output = 0 diff --git a/detector/YOLOv5/utils/__init__.py b/detector/YOLOv5/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..295aebfbc20ffd889fdeac44d97a44cac14c6dc4 --- /dev/null +++ b/detector/YOLOv5/utils/__init__.py @@ -0,0 +1,37 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +utils/initialization +""" + + +def notebook_init(verbose=True): + # Check system software and hardware + print('Checking setup...') + + import os + import shutil + + from utils.general import check_requirements, emojis, is_colab + from utils.torch_utils import select_device # imports + + check_requirements(('psutil', 'IPython')) + import psutil + from IPython import display # to display images and clear console output + + if is_colab(): + shutil.rmtree('/content/sample_data', ignore_errors=True) # remove colab /sample_data directory + + if verbose: + # System info + # gb = 1 / 1000 ** 3 # bytes to GB + gib = 1 / 1024 ** 3 # bytes to GiB + ram = psutil.virtual_memory().total + total, used, free = shutil.disk_usage("/") + display.clear_output() + s = f'({os.cpu_count()} CPUs, {ram * gib:.1f} GB RAM, {(total - free) * gib:.1f}/{total * gib:.1f} GB disk)' + else: + s = '' + + select_device(newline=False) + print(emojis(f'Setup complete ✅ {s}')) + return display diff --git a/detector/YOLOv5/utils/activations.py b/detector/YOLOv5/utils/activations.py new file mode 100644 index 0000000000000000000000000000000000000000..a4ff789cf336b4564e99198e0995bf39b8c79c15 --- /dev/null +++ b/detector/YOLOv5/utils/activations.py @@ -0,0 +1,101 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Activation functions +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +# SiLU https://arxiv.org/pdf/1606.08415.pdf ---------------------------------------------------------------------------- +class SiLU(nn.Module): # export-friendly version of nn.SiLU() + @staticmethod + def forward(x): + return x * torch.sigmoid(x) + + +class Hardswish(nn.Module): # export-friendly version of nn.Hardswish() + @staticmethod + def forward(x): + # return x * F.hardsigmoid(x) # for TorchScript and CoreML + return x * F.hardtanh(x + 3, 0.0, 6.0) / 6.0 # for TorchScript, CoreML and ONNX + + +# Mish https://github.com/digantamisra98/Mish -------------------------------------------------------------------------- +class Mish(nn.Module): + @staticmethod + def forward(x): + return x * F.softplus(x).tanh() + + +class MemoryEfficientMish(nn.Module): + class F(torch.autograd.Function): + @staticmethod + def forward(ctx, x): + ctx.save_for_backward(x) + return x.mul(torch.tanh(F.softplus(x))) # x * tanh(ln(1 + exp(x))) + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_tensors[0] + sx = torch.sigmoid(x) + fx = F.softplus(x).tanh() + return grad_output * (fx + x * sx * (1 - fx * fx)) + + def forward(self, x): + return self.F.apply(x) + + +# FReLU https://arxiv.org/abs/2007.11824 ------------------------------------------------------------------------------- +class FReLU(nn.Module): + def __init__(self, c1, k=3): # ch_in, kernel + super().__init__() + self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1, bias=False) + self.bn = nn.BatchNorm2d(c1) + + def forward(self, x): + return torch.max(x, self.bn(self.conv(x))) + + +# ACON https://arxiv.org/pdf/2009.04759.pdf ---------------------------------------------------------------------------- +class AconC(nn.Module): + r""" ACON activation (activate or not). + AconC: (p1*x-p2*x) * sigmoid(beta*(p1*x-p2*x)) + p2*x, beta is a learnable parameter + according to "Activate or Not: Learning Customized Activation" . + """ + + def __init__(self, c1): + super().__init__() + self.p1 = nn.Parameter(torch.randn(1, c1, 1, 1)) + self.p2 = nn.Parameter(torch.randn(1, c1, 1, 1)) + self.beta = nn.Parameter(torch.ones(1, c1, 1, 1)) + + def forward(self, x): + dpx = (self.p1 - self.p2) * x + return dpx * torch.sigmoid(self.beta * dpx) + self.p2 * x + + +class MetaAconC(nn.Module): + r""" ACON activation (activate or not). + MetaAconC: (p1*x-p2*x) * sigmoid(beta*(p1*x-p2*x)) + p2*x, beta is generated by a small network + according to "Activate or Not: Learning Customized Activation" . + """ + + def __init__(self, c1, k=1, s=1, r=16): # ch_in, kernel, stride, r + super().__init__() + c2 = max(r, c1 // r) + self.p1 = nn.Parameter(torch.randn(1, c1, 1, 1)) + self.p2 = nn.Parameter(torch.randn(1, c1, 1, 1)) + self.fc1 = nn.Conv2d(c1, c2, k, s, bias=True) + self.fc2 = nn.Conv2d(c2, c1, k, s, bias=True) + # self.bn1 = nn.BatchNorm2d(c2) + # self.bn2 = nn.BatchNorm2d(c1) + + def forward(self, x): + y = x.mean(dim=2, keepdims=True).mean(dim=3, keepdims=True) + # batch-size 1 bug/instabilities https://github.com/ultralytics/yolov5/issues/2891 + # beta = torch.sigmoid(self.bn2(self.fc2(self.bn1(self.fc1(y))))) # bug/unstable + beta = torch.sigmoid(self.fc2(self.fc1(y))) # bug patch BN layers removed + dpx = (self.p1 - self.p2) * x + return dpx * torch.sigmoid(beta * dpx) + self.p2 * x diff --git a/detector/YOLOv5/utils/augmentations.py b/detector/YOLOv5/utils/augmentations.py new file mode 100644 index 0000000000000000000000000000000000000000..4f28cece045dfdba8ff25186e574284674990e5e --- /dev/null +++ b/detector/YOLOv5/utils/augmentations.py @@ -0,0 +1,280 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Image augmentation functions +""" + +import math +import random + +import cv2 +import numpy as np + +from detector.YOLOv5.utils.general import LOGGER, check_version, colorstr, resample_segments, segment2box +from detector.YOLOv5.utils.metrics import bbox_ioa + + +class Albumentations: + # YOLOv5 Albumentations class (optional, only used if package is installed) + def __init__(self): + self.transform = None + try: + import albumentations as A + check_version(A.__version__, '1.0.3', hard=True) # version requirement + + self.transform = A.Compose([ + A.Blur(p=0.01), + A.MedianBlur(p=0.01), + A.ToGray(p=0.01), + A.CLAHE(p=0.01), + A.RandomBrightnessContrast(p=0.0), + A.RandomGamma(p=0.0), + A.ImageCompression(quality_lower=75, p=0.0)], + bbox_params=A.BboxParams(format='yolo', label_fields=['class_labels'])) + + LOGGER.info(colorstr('albumentations: ') + ', '.join(f'{x}' for x in self.transform.transforms if x.p)) + except ImportError: # package not installed, skip + pass + except Exception as e: + LOGGER.info(colorstr('albumentations: ') + f'{e}') + + def __call__(self, im, labels, p=1.0): + if self.transform and random.random() < p: + new = self.transform(image=im, bboxes=labels[:, 1:], class_labels=labels[:, 0]) # transformed + im, labels = new['image'], np.array([[c, *b] for c, b in zip(new['class_labels'], new['bboxes'])]) + return im, labels + + +def augment_hsv(im, hgain=0.5, sgain=0.5, vgain=0.5): + # HSV color-space augmentation + if hgain or sgain or vgain: + r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains + hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) + dtype = im.dtype # uint8 + + x = np.arange(0, 256, dtype=r.dtype) + lut_hue = ((x * r[0]) % 180).astype(dtype) + lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) + lut_val = np.clip(x * r[2], 0, 255).astype(dtype) + + im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) + cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed + + +def hist_equalize(im, clahe=True, bgr=False): + # Equalize histogram on BGR image 'im' with im.shape(n,m,3) and range 0-255 + yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) + if clahe: + c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) + yuv[:, :, 0] = c.apply(yuv[:, :, 0]) + else: + yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram + return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB + + +def replicate(im, labels): + # Replicate labels + h, w = im.shape[:2] + boxes = labels[:, 1:].astype(int) + x1, y1, x2, y2 = boxes.T + s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels) + for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices + x1b, y1b, x2b, y2b = boxes[i] + bh, bw = y2b - y1b, x2b - x1b + yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y + x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh] + im[y1a:y2a, x1a:x2a] = im[y1b:y2b, x1b:x2b] # im4[ymin:ymax, xmin:xmax] + labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0) + + return im, labels + + +def letterbox(im, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True, stride=32): + # Resize and pad image while meeting stride-multiple constraints + shape = im.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better val mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + if auto: # minimum rectangle + dw, dh = np.mod(dw, stride), np.mod(dh, stride) # wh padding + elif scaleFill: # stretch + dw, dh = 0.0, 0.0 + new_unpad = (new_shape[1], new_shape[0]) + ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + im = cv2.resize(im, new_unpad, interpolation=cv2.INTER_LINEAR) + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + im = cv2.copyMakeBorder(im, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + return im, ratio, (dw, dh) + + +def random_perspective(im, targets=(), segments=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, + border=(0, 0)): + # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(0.1, 0.1), scale=(0.9, 1.1), shear=(-10, 10)) + # targets = [cls, xyxy] + + height = im.shape[0] + border[0] * 2 # shape(h,w,c) + width = im.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -im.shape[1] / 2 # x translation (pixels) + C[1, 2] = -im.shape[0] / 2 # y translation (pixels) + + # Perspective + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(1 - scale, 1 + scale) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels) + T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels) + # cv2.imwrite('test_origin.jpg', im) + # Combined rotation matrix + M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + im = cv2.warpPerspective(im, M, dsize=(width, height), borderValue=(114, 114, 114)) + else: # affine + im = cv2.warpAffine(im, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) + # cv2.imwrite('test_warped.jpg', im) + + # Visualize + # import matplotlib.pyplot as plt + # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() + # ax[0].imshow(im[:, :, ::-1]) # base + # plt.show() + # ax[1].imshow(im2[:, :, ::-1]) # warped + + + # Transform label coordinates + n = len(targets) + if n: + use_segments = any(x.any() for x in segments) + new = np.zeros((n, 4)) + if use_segments: # warp segments + segments = resample_segments(segments) # upsample + for i, segment in enumerate(segments): + xy = np.ones((len(segment), 3)) + xy[:, :2] = segment + xy = xy @ M.T # transform + xy = xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2] # perspective rescale or affine + + # clip + new[i] = segment2box(xy, width, height) + + else: # warp boxes + xy = np.ones((n * 4, 3)) + xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + xy = (xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2]).reshape(n, 8) # perspective rescale or affine + + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + new = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + + # clip + new[:, [0, 2]] = new[:, [0, 2]].clip(0, width) + new[:, [1, 3]] = new[:, [1, 3]].clip(0, height) + + # filter candidates + i = box_candidates(box1=targets[:, 1:5].T * s, box2=new.T, area_thr=0.01 if use_segments else 0.10) + targets = targets[i] + targets[:, 1:5] = new[i] + + return im, targets + + +def copy_paste(im, labels, segments, p=0.5): + # Implement Copy-Paste augmentation https://arxiv.org/abs/2012.07177, labels as nx5 np.array(cls, xyxy) + n = len(segments) + if p and n: + h, w, c = im.shape # height, width, channels + im_new = np.zeros(im.shape, np.uint8) + for j in random.sample(range(n), k=round(p * n)): + l, s = labels[j], segments[j] + box = w - l[3], l[2], w - l[1], l[4] + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + if (ioa < 0.30).all(): # allow 30% obscuration of existing labels + labels = np.concatenate((labels, [[l[0], *box]]), 0) + segments.append(np.concatenate((w - s[:, 0:1], s[:, 1:2]), 1)) + cv2.drawContours(im_new, [segments[j].astype(np.int32)], -1, (255, 255, 255), cv2.FILLED) + + result = cv2.bitwise_and(src1=im, src2=im_new) + result = cv2.flip(result, 1) # augment segments (flip left-right) + i = result > 0 # pixels to replace + # i[:, :] = result.max(2).reshape(h, w, 1) # act over ch + im[i] = result[i] # cv2.imwrite('debug.jpg', im) # debug + + return im, labels, segments + + +def cutout(im, labels, p=0.5): + # Applies image cutout augmentation https://arxiv.org/abs/1708.04552 + if random.random() < p: + h, w = im.shape[:2] + scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction + for s in scales: + mask_h = random.randint(1, int(h * s)) # create random masks + mask_w = random.randint(1, int(w * s)) + + # box + xmin = max(0, random.randint(0, w) - mask_w // 2) + ymin = max(0, random.randint(0, h) - mask_h // 2) + xmax = min(w, xmin + mask_w) + ymax = min(h, ymin + mask_h) + + # apply random color mask + im[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)] + + # return unobscured labels + if len(labels) and s > 0.03: + box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + labels = labels[ioa < 0.60] # remove >60% obscured labels + + return labels + + +def mixup(im, labels, im2, labels2): + # Applies MixUp augmentation https://arxiv.org/pdf/1710.09412.pdf + r = np.random.beta(32.0, 32.0) # mixup ratio, alpha=beta=32.0 + im = (im * r + im2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + return im, labels + + +def box_candidates(box1, box2, wh_thr=2, ar_thr=100, area_thr=0.1, eps=1e-16): # box1(4,n), box2(4,n) + # Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + ar = np.maximum(w2 / (h2 + eps), h2 / (w2 + eps)) # aspect ratio + return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + eps) > area_thr) & (ar < ar_thr) # candidates diff --git a/detector/YOLOv5/utils/autoanchor.py b/detector/YOLOv5/utils/autoanchor.py new file mode 100644 index 0000000000000000000000000000000000000000..29f88a73e42ab71a3d48a8c13cfa16ba55636e3e --- /dev/null +++ b/detector/YOLOv5/utils/autoanchor.py @@ -0,0 +1,165 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +AutoAnchor utils +""" + +import random + +import numpy as np +import torch +import yaml +from tqdm import tqdm + +from detector.YOLOv5.utils.general import LOGGER, colorstr, emojis + +PREFIX = colorstr('AutoAnchor: ') + + +def check_anchor_order(m): + # Check anchor order against stride order for YOLOv5 Detect() module m, and correct if necessary + a = m.anchors.prod(-1).view(-1) # anchor area + da = a[-1] - a[0] # delta a + ds = m.stride[-1] - m.stride[0] # delta s + if da.sign() != ds.sign(): # same order + LOGGER.info(f'{PREFIX}Reversing anchor order') + m.anchors[:] = m.anchors.flip(0) + + +def check_anchors(dataset, model, thr=4.0, imgsz=640): + # Check anchor fit to data, recompute if necessary + m = model.module.model[-1] if hasattr(model, 'module') else model.model[-1] # Detect() + shapes = imgsz * dataset.shapes / dataset.shapes.max(1, keepdims=True) + scale = np.random.uniform(0.9, 1.1, size=(shapes.shape[0], 1)) # augment scale + wh = torch.tensor(np.concatenate([l[:, 3:5] * s for s, l in zip(shapes * scale, dataset.labels)])).float() # wh + + def metric(k): # compute metric + r = wh[:, None] / k[None] + x = torch.min(r, 1 / r).min(2)[0] # ratio metric + best = x.max(1)[0] # best_x + aat = (x > 1 / thr).float().sum(1).mean() # anchors above threshold + bpr = (best > 1 / thr).float().mean() # best possible recall + return bpr, aat + + anchors = m.anchors.clone() * m.stride.to(m.anchors.device).view(-1, 1, 1) # current anchors + bpr, aat = metric(anchors.cpu().view(-1, 2)) + s = f'\n{PREFIX}{aat:.2f} anchors/target, {bpr:.3f} Best Possible Recall (BPR). ' + if bpr > 0.98: # threshold to recompute + LOGGER.info(emojis(f'{s}Current anchors are a good fit to dataset ✅')) + else: + LOGGER.info(emojis(f'{s}Anchors are a poor fit to dataset ⚠️, attempting to improve...')) + na = m.anchors.numel() // 2 # number of anchors + try: + anchors = kmean_anchors(dataset, n=na, img_size=imgsz, thr=thr, gen=1000, verbose=False) + except Exception as e: + LOGGER.info(f'{PREFIX}ERROR: {e}') + new_bpr = metric(anchors)[0] + if new_bpr > bpr: # replace anchors + anchors = torch.tensor(anchors, device=m.anchors.device).type_as(m.anchors) + m.anchors[:] = anchors.clone().view_as(m.anchors) / m.stride.to(m.anchors.device).view(-1, 1, 1) # loss + check_anchor_order(m) + LOGGER.info(f'{PREFIX}New anchors saved to model. Update model *.yaml to use these anchors in the future.') + else: + LOGGER.info(f'{PREFIX}Original anchors better than new anchors. Proceeding with original anchors.') + + +def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=1000, verbose=True): + """ Creates kmeans-evolved anchors from training dataset + + Arguments: + dataset: path to data.yaml, or a loaded dataset + n: number of anchors + img_size: image size used for training + thr: anchor-label wh ratio threshold hyperparameter hyp['anchor_t'] used for training, default=4.0 + gen: generations to evolve anchors using genetic algorithm + verbose: print all results + + Return: + k: kmeans evolved anchors + + Usage: + from yolov5_utils.autoanchor import *; _ = kmean_anchors() + """ + from scipy.cluster.vq import kmeans + + npr = np.random + thr = 1 / thr + + def metric(k, wh): # compute metrics + r = wh[:, None] / k[None] + x = torch.min(r, 1 / r).min(2)[0] # ratio metric + # x = wh_iou(wh, torch.tensor(k)) # iou metric + return x, x.max(1)[0] # x, best_x + + def anchor_fitness(k): # mutation fitness + _, best = metric(torch.tensor(k, dtype=torch.float32), wh) + return (best * (best > thr).float()).mean() # fitness + + def print_results(k, verbose=True): + k = k[np.argsort(k.prod(1))] # sort small to large + x, best = metric(k, wh0) + bpr, aat = (best > thr).float().mean(), (x > thr).float().mean() * n # best possible recall, anch > thr + s = f'{PREFIX}thr={thr:.2f}: {bpr:.4f} best possible recall, {aat:.2f} anchors past thr\n' \ + f'{PREFIX}n={n}, img_size={img_size}, metric_all={x.mean():.3f}/{best.mean():.3f}-mean/best, ' \ + f'past_thr={x[x > thr].mean():.3f}-mean: ' + for i, x in enumerate(k): + s += '%i,%i, ' % (round(x[0]), round(x[1])) + if verbose: + LOGGER.info(s[:-2]) + return k + + if isinstance(dataset, str): # *.yaml file + with open(dataset, errors='ignore') as f: + data_dict = yaml.safe_load(f) # model dict + from utils.datasets import LoadImagesAndLabels + dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) + + # Get label wh + shapes = img_size * dataset.shapes / dataset.shapes.max(1, keepdims=True) + wh0 = np.concatenate([l[:, 3:5] * s for s, l in zip(shapes, dataset.labels)]) # wh + + # Filter + i = (wh0 < 3.0).any(1).sum() + if i: + LOGGER.info(f'{PREFIX}WARNING: Extremely small objects found. {i} of {len(wh0)} labels are < 3 pixels in size.') + wh = wh0[(wh0 >= 2.0).any(1)] # filter > 2 pixels + # wh = wh * (npr.rand(wh.shape[0], 1) * 0.9 + 0.1) # multiply by random scale 0-1 + + # Kmeans calculation + LOGGER.info(f'{PREFIX}Running kmeans for {n} anchors on {len(wh)} points...') + s = wh.std(0) # sigmas for whitening + k = kmeans(wh / s, n, iter=30)[0] * s # points + if len(k) != n: # kmeans may return fewer points than requested if wh is insufficient or too similar + LOGGER.warning(f'{PREFIX}WARNING: scipy.cluster.vq.kmeans returned only {len(k)} of {n} requested points') + k = np.sort(npr.rand(n * 2)).reshape(n, 2) * img_size # random init + wh = torch.tensor(wh, dtype=torch.float32) # filtered + wh0 = torch.tensor(wh0, dtype=torch.float32) # unfiltered + k = print_results(k, verbose=False) + + # Plot + # k, d = [None] * 20, [None] * 20 + # for i in tqdm(range(1, 21)): + # k[i-1], d[i-1] = kmeans(wh / s, i) # points, mean distance + # fig, ax = plt.subplots(1, 2, figsize=(14, 7), tight_layout=True) + # ax = ax.ravel() + # ax[0].plot(np.arange(1, 21), np.array(d) ** 2, marker='.') + # fig, ax = plt.subplots(1, 2, figsize=(14, 7)) # plot wh + # ax[0].hist(wh[wh[:, 0]<100, 0],400) + # ax[1].hist(wh[wh[:, 1]<100, 1],400) + # fig.savefig('wh.png', dpi=200) + + # Evolve + f, sh, mp, s = anchor_fitness(k), k.shape, 0.9, 0.1 # fitness, generations, mutation prob, sigma + pbar = tqdm(range(gen), desc=f'{PREFIX}Evolving anchors with Genetic Algorithm:') # progress bar + for _ in pbar: + v = np.ones(sh) + while (v == 1).all(): # mutate until a change occurs (prevent duplicates) + v = ((npr.random(sh) < mp) * random.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) + kg = (k.copy() * v).clip(min=2.0) + fg = anchor_fitness(kg) + if fg > f: + f, k = fg, kg.copy() + pbar.desc = f'{PREFIX}Evolving anchors with Genetic Algorithm: fitness = {f:.4f}' + if verbose: + print_results(k, verbose) + + return print_results(k) diff --git a/detector/YOLOv5/utils/autobatch.py b/detector/YOLOv5/utils/autobatch.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6452100a665e2eff7baf140b53a64d5a4c7edc --- /dev/null +++ b/detector/YOLOv5/utils/autobatch.py @@ -0,0 +1,57 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Auto-batch utils +""" + +from copy import deepcopy + +import numpy as np +import torch +from torch.cuda import amp + +from detector.YOLOv5.utils.general import LOGGER, colorstr +from detector.YOLOv5.utils.torch_utils import profile + + +def check_train_batch_size(model, imgsz=640): + # Check YOLOv5 training batch size + with amp.autocast(): + return autobatch(deepcopy(model).train(), imgsz) # compute optimal batch size + + +def autobatch(model, imgsz=640, fraction=0.9, batch_size=16): + # Automatically estimate best batch size to use `fraction` of available CUDA memory + # Usage: + # import torch + # from yolov5_utils.autobatch import autobatch + # model = torch.hub.load('ultralytics/yolov5', 'yolov5s', autoshape=False) + # print(autobatch(model)) + + prefix = colorstr('AutoBatch: ') + LOGGER.info(f'{prefix}Computing optimal batch size for --imgsz {imgsz}') + device = next(model.parameters()).device # get model device + if device.type == 'cpu': + LOGGER.info(f'{prefix}CUDA not detected, using default CPU batch-size {batch_size}') + return batch_size + + d = str(device).upper() # 'CUDA:0' + properties = torch.cuda.get_device_properties(device) # device properties + t = properties.total_memory / 1024 ** 3 # (GiB) + r = torch.cuda.memory_reserved(device) / 1024 ** 3 # (GiB) + a = torch.cuda.memory_allocated(device) / 1024 ** 3 # (GiB) + f = t - (r + a) # free inside reserved + LOGGER.info(f'{prefix}{d} ({properties.name}) {t:.2f}G total, {r:.2f}G reserved, {a:.2f}G allocated, {f:.2f}G free') + + batch_sizes = [1, 2, 4, 8, 16] + try: + img = [torch.zeros(b, 3, imgsz, imgsz) for b in batch_sizes] + y = profile(img, model, n=3, device=device) + except Exception as e: + LOGGER.warning(f'{prefix}{e}') + + y = [x[2] for x in y if x] # memory [2] + batch_sizes = batch_sizes[:len(y)] + p = np.polyfit(batch_sizes, y, deg=1) # first degree polynomial fit + b = int((f * fraction - p[1]) / p[0]) # y intercept (optimal batch size) + LOGGER.info(f'{prefix}Using batch-size {b} for {d} {t * fraction:.2f}G/{t:.2f}G ({fraction * 100:.0f}%)') + return b diff --git a/detector/YOLOv5/utils/aws/__init__.py b/detector/YOLOv5/utils/aws/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/detector/YOLOv5/utils/aws/mime.sh b/detector/YOLOv5/utils/aws/mime.sh new file mode 100644 index 0000000000000000000000000000000000000000..c319a83cfbdf09bea634c3bd9fca737c0b1dd505 --- /dev/null +++ b/detector/YOLOv5/utils/aws/mime.sh @@ -0,0 +1,26 @@ +# AWS EC2 instance startup 'MIME' script https://aws.amazon.com/premiumsupport/knowledge-center/execute-user-data-ec2/ +# This script will run on every instance restart, not only on first start +# --- DO NOT COPY ABOVE COMMENTS WHEN PASTING INTO USERDATA --- + +Content-Type: multipart/mixed; boundary="//" +MIME-Version: 1.0 + +--// +Content-Type: text/cloud-config; charset="us-ascii" +MIME-Version: 1.0 +Content-Transfer-Encoding: 7bit +Content-Disposition: attachment; filename="cloud-config.txt" + +#cloud-config +cloud_final_modules: +- [scripts-user, always] + +--// +Content-Type: text/x-shellscript; charset="us-ascii" +MIME-Version: 1.0 +Content-Transfer-Encoding: 7bit +Content-Disposition: attachment; filename="userdata.txt" + +#!/bin/bash +# --- paste contents of userdata.sh here --- +--// diff --git a/detector/YOLOv5/utils/aws/resume.py b/detector/YOLOv5/utils/aws/resume.py new file mode 100644 index 0000000000000000000000000000000000000000..afdffcfc140d493b3b1900bd11eab82e7c56b7f7 --- /dev/null +++ b/detector/YOLOv5/utils/aws/resume.py @@ -0,0 +1,40 @@ +# Resume all interrupted trainings in yolov5/ dir including DDP trainings +# Usage: $ python yolov5_utils/aws/resume.py + +import os +import sys +from pathlib import Path + +import torch +import yaml + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[2] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +port = 0 # --master_port +path = Path('').resolve() +for last in path.rglob('*/**/last.pt'): + ckpt = torch.load(last) + if ckpt['optimizer'] is None: + continue + + # Load opt.yaml + with open(last.parent.parent / 'opt.yaml', errors='ignore') as f: + opt = yaml.safe_load(f) + + # Get device count + d = opt['device'].split(',') # devices + nd = len(d) # number of devices + ddp = nd > 1 or (nd == 0 and torch.cuda.device_count() > 1) # distributed data parallel + + if ddp: # multi-GPU + port += 1 + cmd = f'python -m torch.distributed.run --nproc_per_node {nd} --master_port {port} train.py --resume {last}' + else: # single-GPU + cmd = f'python train.py --resume {last}' + + cmd += ' > /dev/null 2>&1 &' # redirect output to dev/null and run in daemon thread + print(cmd) + os.system(cmd) diff --git a/detector/YOLOv5/utils/aws/userdata.sh b/detector/YOLOv5/utils/aws/userdata.sh new file mode 100644 index 0000000000000000000000000000000000000000..5fc1332ac1b0d1794cf8f8c5f6918059ae5dc381 --- /dev/null +++ b/detector/YOLOv5/utils/aws/userdata.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# AWS EC2 instance startup script https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html +# This script will run only once on first instance start (for a re-start script see mime.sh) +# /home/ubuntu (ubuntu) or /home/ec2-user (amazon-linux) is working dir +# Use >300 GB SSD + +cd home/ubuntu +if [ ! -d yolov5 ]; then + echo "Running first-time script." # install dependencies, download COCO, pull Docker + git clone https://github.com/ultralytics/yolov5 -b master && sudo chmod -R 777 yolov5 + cd yolov5 + bash data/scripts/get_coco.sh && echo "COCO done." & + sudo docker pull ultralytics/yolov5:latest && echo "Docker done." & + python -m pip install --upgrade pip && pip install -r requirements.txt && python detect.py && echo "Requirements done." & + wait && echo "All tasks done." # finish background tasks +else + echo "Running re-start script." # resume interrupted runs + i=0 + list=$(sudo docker ps -qa) # container list i.e. $'one\ntwo\nthree\nfour' + while IFS= read -r id; do + ((i++)) + echo "restarting container $i: $id" + sudo docker start $id + # sudo docker exec -it $id python train.py --resume # single-GPU + sudo docker exec -d $id python utils/aws/resume.py # multi-scenario + done <<<"$list" +fi diff --git a/detector/YOLOv5/utils/benchmarks.py b/detector/YOLOv5/utils/benchmarks.py new file mode 100644 index 0000000000000000000000000000000000000000..658d4e0cb7b243daba9e1e1c55b0200c30b27270 --- /dev/null +++ b/detector/YOLOv5/utils/benchmarks.py @@ -0,0 +1,92 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Run YOLOv5 benchmarks on all supported export formats + +Format | `export.py --include` | Model +--- | --- | --- +PyTorch | - | yolov5s.pt +TorchScript | `torchscript` | yolov5s.torchscript +ONNX | `onnx` | yolov5s.onnx +OpenVINO | `openvino` | yolov5s_openvino_model/ +TensorRT | `engine` | yolov5s.engine +CoreML | `coreml` | yolov5s.mlmodel +TensorFlow SavedModel | `saved_model` | yolov5s_saved_model/ +TensorFlow GraphDef | `pb` | yolov5s.pb +TensorFlow Lite | `tflite` | yolov5s.tflite +TensorFlow Edge TPU | `edgetpu` | yolov5s_edgetpu.tflite +TensorFlow.js | `tfjs` | yolov5s_web_model/ + +Requirements: + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime openvino-dev tensorflow-cpu # CPU + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime-gpu openvino-dev tensorflow # GPU + +Usage: + $ python utils/benchmarks.py --weights yolov5s.pt --img 640 +""" + +import argparse +import sys +import time +from pathlib import Path + +import pandas as pd + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative + +import export +import val +from detector.YOLOv5.utils import notebook_init +from detector.YOLOv5.utils.general import LOGGER, print_args +s + +def run(weights=ROOT / 'yolov5s.pt', # weights path + imgsz=640, # inference size (pixels) + batch_size=1, # batch size + data=ROOT / 'data/coco128.yaml', # dataset.yaml path + ): + y, t = [], time.time() + formats = export.export_formats() + for i, (name, f, suffix) in formats.iterrows(): # index, (name, file, suffix) + try: + w = weights if f == '-' else export.run(weights=weights, imgsz=[imgsz], include=[f], device='cpu')[-1] + assert suffix in str(w), 'export failed' + result = val.run(data, w, batch_size, imgsz=imgsz, plots=False, device='cpu', task='benchmark') + metrics = result[0] # metrics (mp, mr, map50, map, *losses(box, obj, cls)) + speeds = result[2] # times (preprocess, inference, postprocess) + y.append([name, metrics[3], speeds[1]]) # mAP, t_inference + except Exception as e: + LOGGER.warning(f'WARNING: Benchmark failure for {name}: {e}') + y.append([name, None, None]) # mAP, t_inference + + # Print results + LOGGER.info('\n') + parse_opt() + notebook_init() # print system info + py = pd.DataFrame(y, columns=['Format', 'mAP@0.5:0.95', 'Inference time (ms)']) + LOGGER.info(f'\nBenchmarks complete ({time.time() - t:.2f}s)') + LOGGER.info(str(py)) + return py + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='weights path') + parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='inference size (pixels)') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + opt = parser.parse_args() + print_args(FILE.stem, opt) + return opt + + +def main(opt): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/detector/YOLOv5/utils/callbacks.py b/detector/YOLOv5/utils/callbacks.py new file mode 100644 index 0000000000000000000000000000000000000000..c51c268f20d63581014d569671cc5473f112eadc --- /dev/null +++ b/detector/YOLOv5/utils/callbacks.py @@ -0,0 +1,78 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Callback utils +""" + + +class Callbacks: + """" + Handles all registered callbacks for YOLOv5 Hooks + """ + + def __init__(self): + # Define the available callbacks + self._callbacks = { + 'on_pretrain_routine_start': [], + 'on_pretrain_routine_end': [], + + 'on_train_start': [], + 'on_train_epoch_start': [], + 'on_train_batch_start': [], + 'optimizer_step': [], + 'on_before_zero_grad': [], + 'on_train_batch_end': [], + 'on_train_epoch_end': [], + + 'on_val_start': [], + 'on_val_batch_start': [], + 'on_val_image_end': [], + 'on_val_batch_end': [], + 'on_val_end': [], + + 'on_fit_epoch_end': [], # fit = train + val + 'on_model_save': [], + 'on_train_end': [], + 'on_params_update': [], + 'teardown': [], + } + self.stop_training = False # set True to interrupt training + + def register_action(self, hook, name='', callback=None): + """ + Register a new action to a callback hook + + Args: + hook The callback hook name to register the action to + name The name of the action for later reference + callback The callback to fire + """ + assert hook in self._callbacks, f"hook '{hook}' not found in callbacks {self._callbacks}" + assert callable(callback), f"callback '{callback}' is not callable" + self._callbacks[hook].append({'name': name, 'callback': callback}) + + def get_registered_actions(self, hook=None): + """" + Returns all the registered actions by callback hook + + Args: + hook The name of the hook to check, defaults to all + """ + if hook: + return self._callbacks[hook] + else: + return self._callbacks + + def run(self, hook, *args, **kwargs): + """ + Loop through the registered actions and fire all callbacks + + Args: + hook The name of the hook to check, defaults to all + args Arguments to receive from YOLOv5 + kwargs Keyword Arguments to receive from YOLOv5 + """ + + assert hook in self._callbacks, f"hook '{hook}' not found in callbacks {self._callbacks}" + + for logger in self._callbacks[hook]: + logger['callback'](*args, **kwargs) diff --git a/detector/YOLOv5/utils/datasets.py b/detector/YOLOv5/utils/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..4331e07942ebb7826586f959071a46964f3166db --- /dev/null +++ b/detector/YOLOv5/utils/datasets.py @@ -0,0 +1,1037 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Dataloaders and dataset utils +""" + +import glob +import hashlib +import json +import math +import os +import random +import shutil +import time +from itertools import repeat +from multiprocessing.pool import Pool, ThreadPool +from pathlib import Path +from threading import Thread +from zipfile import ZipFile + +import cv2 +import numpy as np +import torch +import torch.nn.functional as F +import yaml +from PIL import ExifTags, Image, ImageOps +from torch.utils.data import DataLoader, Dataset, dataloader, distributed +from tqdm import tqdm + +from detector.YOLOv5.utils.augmentations import Albumentations, augment_hsv, copy_paste, letterbox, mixup, random_perspective +from detector.YOLOv5.utils.general import (DATASETS_DIR, LOGGER, NUM_THREADS, check_dataset, check_requirements, check_yaml, clean_str, + segments2boxes, xyn2xy, xywh2xyxy, xywhn2xyxy, xyxy2xywhn) +from detector.YOLOv5.utils.torch_utils import torch_distributed_zero_first + +# Parameters +HELP_URL = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data' +IMG_FORMATS = ['bmp', 'dng', 'jpeg', 'jpg', 'mpo', 'png', 'tif', 'tiff', 'webp'] # include image suffixes +VID_FORMATS = ['asf', 'avi', 'gif', 'm4v', 'mkv', 'mov', 'mp4', 'mpeg', 'mpg', 'wmv'] # include video suffixes + +# Get orientation exif tag +for orientation in ExifTags.TAGS.keys(): + if ExifTags.TAGS[orientation] == 'Orientation': + break + + +def get_hash(paths): + # Returns a single hash value of a list of paths (files or dirs) + size = sum(os.path.getsize(p) for p in paths if os.path.exists(p)) # sizes + h = hashlib.md5(str(size).encode()) # hash sizes + h.update(''.join(paths).encode()) # hash paths + return h.hexdigest() # return hash + + +def exif_size(img): + # Returns exif-corrected PIL size + s = img.size # (width, height) + try: + rotation = dict(img._getexif().items())[orientation] + if rotation == 6: # rotation 270 + s = (s[1], s[0]) + elif rotation == 8: # rotation 90 + s = (s[1], s[0]) + except Exception: + pass + + return s + + +def exif_transpose(image): + """ + Transpose a PIL image accordingly if it has an EXIF Orientation tag. + Inplace version of https://github.com/python-pillow/Pillow/blob/master/src/PIL/ImageOps.py exif_transpose() + + :param image: The image to transpose. + :return: An image. + """ + exif = image.getexif() + orientation = exif.get(0x0112, 1) # default 1 + if orientation > 1: + method = {2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(orientation) + if method is not None: + image = image.transpose(method) + del exif[0x0112] + image.info["exif"] = exif.tobytes() + return image + + +def create_dataloader(path, imgsz, batch_size, stride, single_cls=False, hyp=None, augment=False, cache=False, pad=0.0, + rect=False, rank=-1, workers=8, image_weights=False, quad=False, prefix='', shuffle=False): + if rect and shuffle: + LOGGER.warning('WARNING: --rect is incompatible with DataLoader shuffle, setting shuffle=False') + shuffle = False + with torch_distributed_zero_first(rank): # init dataset *.cache only once if DDP + dataset = LoadImagesAndLabels(path, imgsz, batch_size, + augment=augment, # augmentation + hyp=hyp, # hyperparameters + rect=rect, # rectangular batches + cache_images=cache, + single_cls=single_cls, + stride=int(stride), + pad=pad, + image_weights=image_weights, + prefix=prefix) + + batch_size = min(batch_size, len(dataset)) + nd = torch.cuda.device_count() # number of CUDA devices + nw = min([os.cpu_count() // max(nd, 1), batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle) + loader = DataLoader if image_weights else InfiniteDataLoader # only DataLoader allows for attribute updates + return loader(dataset, + batch_size=batch_size, + shuffle=shuffle and sampler is None, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabels.collate_fn4 if quad else LoadImagesAndLabels.collate_fn), dataset + + +class InfiniteDataLoader(dataloader.DataLoader): + """ Dataloader that reuses workers + + Uses same syntax as vanilla DataLoader + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + object.__setattr__(self, 'batch_sampler', _RepeatSampler(self.batch_sampler)) + self.iterator = super().__iter__() + + def __len__(self): + return len(self.batch_sampler.sampler) + + def __iter__(self): + for i in range(len(self)): + yield next(self.iterator) + + +class _RepeatSampler: + """ Sampler that repeats forever + + Args: + sampler (Sampler) + """ + + def __init__(self, sampler): + self.sampler = sampler + + def __iter__(self): + while True: + yield from iter(self.sampler) + + +class LoadImages: + # YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4` + def __init__(self, path, img_size=640, stride=32, auto=True): + p = str(Path(path).resolve()) # os-agnostic absolute path + if '*' in p: + files = sorted(glob.glob(p, recursive=True)) # glob + elif os.path.isdir(p): + files = sorted(glob.glob(os.path.join(p, '*.*'))) # dir + elif os.path.isfile(p): + files = [p] # files + else: + raise Exception(f'ERROR: {p} does not exist') + + images = [x for x in files if x.split('.')[-1].lower() in IMG_FORMATS] + videos = [x for x in files if x.split('.')[-1].lower() in VID_FORMATS] + ni, nv = len(images), len(videos) + + self.img_size = img_size + self.stride = stride + self.files = images + videos + self.nf = ni + nv # number of files + self.video_flag = [False] * ni + [True] * nv + self.mode = 'image' + self.auto = auto + if any(videos): + self.new_video(videos[0]) # new video + else: + self.cap = None + assert self.nf > 0, f'No images or videos found in {p}. ' \ + f'Supported formats are:\nimages: {IMG_FORMATS}\nvideos: {VID_FORMATS}' + + def __iter__(self): + self.count = 0 + return self + + def __next__(self): + if self.count == self.nf: + raise StopIteration + path = self.files[self.count] + + if self.video_flag[self.count]: + # Read video + self.mode = 'video' + ret_val, img0 = self.cap.read() + while not ret_val: + self.count += 1 + self.cap.release() + if self.count == self.nf: # last video + raise StopIteration + else: + path = self.files[self.count] + self.new_video(path) + ret_val, img0 = self.cap.read() + + self.frame += 1 + s = f'video {self.count + 1}/{self.nf} ({self.frame}/{self.frames}) {path}: ' + + else: + # Read image + self.count += 1 + img0 = cv2.imread(path) # BGR + assert img0 is not None, f'Image Not Found {path}' + s = f'image {self.count}/{self.nf} {path}: ' + + # Padded resize + img = letterbox(img0, self.img_size, stride=self.stride, auto=self.auto)[0] + + # Convert + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB + img = np.ascontiguousarray(img) + + return path, img, img0, self.cap, s + + def new_video(self, path): + self.frame = 0 + self.cap = cv2.VideoCapture(path) + self.frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + def __len__(self): + return self.nf # number of files + + +class LoadWebcam: # for inference + # YOLOv5 local webcam dataloader, i.e. `python detect.py --source 0` + def __init__(self, pipe='0', img_size=640, stride=32): + self.img_size = img_size + self.stride = stride + self.pipe = eval(pipe) if pipe.isnumeric() else pipe + self.cap = cv2.VideoCapture(self.pipe) # video capture object + self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 3) # set buffer size + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + if cv2.waitKey(1) == ord('q'): # q to quit + self.cap.release() + cv2.destroyAllWindows() + raise StopIteration + + # Read frame + ret_val, img0 = self.cap.read() + img0 = cv2.flip(img0, 1) # flip left-right + + # Print + assert ret_val, f'Camera Error {self.pipe}' + img_path = 'webcam.jpg' + s = f'webcam {self.count}: ' + + # Padded resize + img = letterbox(img0, self.img_size, stride=self.stride)[0] + + # Convert + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB + img = np.ascontiguousarray(img) + + return img_path, img, img0, None, s + + def __len__(self): + return 0 + + +class LoadStreams: + # YOLOv5 streamloader, i.e. `python detect.py --source 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP streams` + def __init__(self, sources='streams.txt', img_size=640, stride=32, auto=True): + self.mode = 'stream' + self.img_size = img_size + self.stride = stride + + if os.path.isfile(sources): + with open(sources) as f: + sources = [x.strip() for x in f.read().strip().splitlines() if len(x.strip())] + else: + sources = [sources] + + n = len(sources) + self.imgs, self.fps, self.frames, self.threads = [None] * n, [0] * n, [0] * n, [None] * n + self.sources = [clean_str(x) for x in sources] # clean source names for later + self.auto = auto + for i, s in enumerate(sources): # index, source + # Start thread to read frames from video stream + st = f'{i + 1}/{n}: {s}... ' + if 'youtube.com/' in s or 'youtu.be/' in s: # if source is YouTube video + check_requirements(('pafy', 'youtube_dl==2020.12.2')) + import pafy + s = pafy.new(s).getbest(preftype="mp4").url # YouTube URL + s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam + cap = cv2.VideoCapture(s) + assert cap.isOpened(), f'{st}Failed to open {s}' + w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + fps = cap.get(cv2.CAP_PROP_FPS) # warning: may return 0 or nan + self.frames[i] = max(int(cap.get(cv2.CAP_PROP_FRAME_COUNT)), 0) or float('inf') # infinite stream fallback + self.fps[i] = max((fps if math.isfinite(fps) else 0) % 100, 0) or 30 # 30 FPS fallback + + _, self.imgs[i] = cap.read() # guarantee first frame + self.threads[i] = Thread(target=self.update, args=([i, cap, s]), daemon=True) + LOGGER.info(f"{st} Success ({self.frames[i]} frames {w}x{h} at {self.fps[i]:.2f} FPS)") + self.threads[i].start() + LOGGER.info('') # newline + + # check for common shapes + s = np.stack([letterbox(x, self.img_size, stride=self.stride, auto=self.auto)[0].shape for x in self.imgs]) + self.rect = np.unique(s, axis=0).shape[0] == 1 # rect inference if all shapes equal + if not self.rect: + LOGGER.warning('WARNING: Stream shapes differ. For optimal performance supply similarly-shaped streams.') + + def update(self, i, cap, stream): + # Read stream `i` frames in daemon thread + n, f, read = 0, self.frames[i], 1 # frame number, frame array, inference every 'read' frame + while cap.isOpened() and n < f: + n += 1 + # _, self.imgs[index] = cap.read() + cap.grab() + if n % read == 0: + success, im = cap.retrieve() + if success: + self.imgs[i] = im + else: + LOGGER.warning('WARNING: Video stream unresponsive, please check your IP camera connection.') + self.imgs[i] = np.zeros_like(self.imgs[i]) + cap.open(stream) # re-open stream if signal was lost + time.sleep(1 / self.fps[i]) # wait time + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + if not all(x.is_alive() for x in self.threads) or cv2.waitKey(1) == ord('q'): # q to quit + cv2.destroyAllWindows() + raise StopIteration + + # Letterbox + img0 = self.imgs.copy() + img = [letterbox(x, self.img_size, stride=self.stride, auto=self.rect and self.auto)[0] for x in img0] + + # Stack + img = np.stack(img, 0) + + # Convert + img = img[..., ::-1].transpose((0, 3, 1, 2)) # BGR to RGB, BHWC to BCHW + img = np.ascontiguousarray(img) + + return self.sources, img, img0, None, '' + + def __len__(self): + return len(self.sources) # 1E12 frames = 32 streams at 30 FPS for 30 years + + +def img2label_paths(img_paths): + # Define label paths as a function of image paths + sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings + return [sb.join(x.rsplit(sa, 1)).rsplit('.', 1)[0] + '.txt' for x in img_paths] + + +class LoadImagesAndLabels(Dataset): + # YOLOv5 train_loader/val_loader, loads images and labels for training and validation + cache_version = 0.6 # dataset labels *.cache version + + def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, rect=False, image_weights=False, + cache_images=False, single_cls=False, stride=32, pad=0.0, prefix=''): + self.img_size = img_size + self.augment = augment + self.hyp = hyp + self.image_weights = image_weights + self.rect = False if image_weights else rect + self.mosaic = self.augment and not self.rect # load 4 images at a time into a mosaic (only during training) + self.mosaic_border = [-img_size // 2, -img_size // 2] + self.stride = stride + self.path = path + self.albumentations = Albumentations() if augment else None + + try: + f = [] # image files + for p in path if isinstance(path, list) else [path]: + p = Path(p) # os-agnostic + if p.is_dir(): # dir + f += glob.glob(str(p / '**' / '*.*'), recursive=True) + # f = list(p.rglob('*.*')) # pathlib + elif p.is_file(): # file + with open(p) as t: + t = t.read().strip().splitlines() + parent = str(p.parent) + os.sep + f += [x.replace('./', parent) if x.startswith('./') else x for x in t] # local to global path + # f += [p.parent / x.lstrip(os.sep) for x in t] # local to global path (pathlib) + else: + raise Exception(f'{prefix}{p} does not exist') + self.img_files = sorted(x.replace('/', os.sep) for x in f if x.split('.')[-1].lower() in IMG_FORMATS) + # self.img_files = sorted([x for x in f if x.suffix[1:].lower() in IMG_FORMATS]) # pathlib + assert self.img_files, f'{prefix}No images found' + except Exception as e: + raise Exception(f'{prefix}Error loading data from {path}: {e}\nSee {HELP_URL}') + + # Check cache + self.label_files = img2label_paths(self.img_files) # labels + cache_path = (p if p.is_file() else Path(self.label_files[0]).parent).with_suffix('.cache') + try: + cache, exists = np.load(cache_path, allow_pickle=True).item(), True # load dict + assert cache['version'] == self.cache_version # same version + assert cache['hash'] == get_hash(self.label_files + self.img_files) # same hash + except Exception: + cache, exists = self.cache_labels(cache_path, prefix), False # cache + + # Display cache + nf, nm, ne, nc, n = cache.pop('results') # found, missing, empty, corrupt, total + if exists: + d = f"Scanning '{cache_path}' images and labels... {nf} found, {nm} missing, {ne} empty, {nc} corrupt" + tqdm(None, desc=prefix + d, total=n, initial=n) # display cache results + if cache['msgs']: + LOGGER.info('\n'.join(cache['msgs'])) # display warnings + assert nf > 0 or not augment, f'{prefix}No labels in {cache_path}. Can not train without labels. See {HELP_URL}' + + # Read cache + [cache.pop(k) for k in ('hash', 'version', 'msgs')] # remove items + labels, shapes, self.segments = zip(*cache.values()) + self.labels = list(labels) + self.shapes = np.array(shapes, dtype=np.float64) + self.img_files = list(cache.keys()) # update + self.label_files = img2label_paths(cache.keys()) # update + n = len(shapes) # number of images + bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index + nb = bi[-1] + 1 # number of batches + self.batch = bi # batch index of image + self.n = n + self.indices = range(n) + + # Update labels + include_class = [] # filter labels to include only these classes (optional) + include_class_array = np.array(include_class).reshape(1, -1) + for i, (label, segment) in enumerate(zip(self.labels, self.segments)): + if include_class: + j = (label[:, 0:1] == include_class_array).any(1) + self.labels[i] = label[j] + if segment: + self.segments[i] = segment[j] + if single_cls: # single-class training, merge all classes into 0 + self.labels[i][:, 0] = 0 + if segment: + self.segments[i][:, 0] = 0 + + # Rectangular Training + if self.rect: + # Sort by aspect ratio + s = self.shapes # wh + ar = s[:, 1] / s[:, 0] # aspect ratio + irect = ar.argsort() + self.img_files = [self.img_files[i] for i in irect] + self.label_files = [self.label_files[i] for i in irect] + self.labels = [self.labels[i] for i in irect] + self.shapes = s[irect] # wh + ar = ar[irect] + + # Set training image shapes + shapes = [[1, 1]] * nb + for i in range(nb): + ari = ar[bi == i] + mini, maxi = ari.min(), ari.max() + if maxi < 1: + shapes[i] = [maxi, 1] + elif mini > 1: + shapes[i] = [1, 1 / mini] + + self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride + + # Cache images into RAM/disk for faster training (WARNING: large datasets may exceed system resources) + self.imgs, self.img_npy = [None] * n, [None] * n + if cache_images: + if cache_images == 'disk': + self.im_cache_dir = Path(Path(self.img_files[0]).parent.as_posix() + '_npy') + self.img_npy = [self.im_cache_dir / Path(f).with_suffix('.npy').name for f in self.img_files] + self.im_cache_dir.mkdir(parents=True, exist_ok=True) + gb = 0 # Gigabytes of cached images + self.img_hw0, self.img_hw = [None] * n, [None] * n + results = ThreadPool(NUM_THREADS).imap(self.load_image, range(n)) + pbar = tqdm(enumerate(results), total=n) + for i, x in pbar: + if cache_images == 'disk': + if not self.img_npy[i].exists(): + np.save(self.img_npy[i].as_posix(), x[0]) + gb += self.img_npy[i].stat().st_size + else: # 'ram' + self.imgs[i], self.img_hw0[i], self.img_hw[i] = x # im, hw_orig, hw_resized = load_image(self, i) + gb += self.imgs[i].nbytes + pbar.desc = f'{prefix}Caching images ({gb / 1E9:.1f}GB {cache_images})' + pbar.close() + + def cache_labels(self, path=Path('./labels.cache'), prefix=''): + # Cache dataset labels, check images and read shapes + x = {} # dict + nm, nf, ne, nc, msgs = 0, 0, 0, 0, [] # number missing, found, empty, corrupt, messages + desc = f"{prefix}Scanning '{path.parent / path.stem}' images and labels..." + with Pool(NUM_THREADS) as pool: + pbar = tqdm(pool.imap(verify_image_label, zip(self.img_files, self.label_files, repeat(prefix))), + desc=desc, total=len(self.img_files)) + for im_file, lb, shape, segments, nm_f, nf_f, ne_f, nc_f, msg in pbar: + nm += nm_f + nf += nf_f + ne += ne_f + nc += nc_f + if im_file: + x[im_file] = [lb, shape, segments] + if msg: + msgs.append(msg) + pbar.desc = f"{desc}{nf} found, {nm} missing, {ne} empty, {nc} corrupt" + + pbar.close() + if msgs: + LOGGER.info('\n'.join(msgs)) + if nf == 0: + LOGGER.warning(f'{prefix}WARNING: No labels found in {path}. See {HELP_URL}') + x['hash'] = get_hash(self.label_files + self.img_files) + x['results'] = nf, nm, ne, nc, len(self.img_files) + x['msgs'] = msgs # warnings + x['version'] = self.cache_version # cache version + try: + np.save(path, x) # save cache for next time + path.with_suffix('.cache.npy').rename(path) # remove .npy suffix + LOGGER.info(f'{prefix}New cache created: {path}') + except Exception as e: + LOGGER.warning(f'{prefix}WARNING: Cache directory {path.parent} is not writeable: {e}') # not writeable + return x + + def __len__(self): + return len(self.img_files) + + # def __iter__(self): + # self.count = -1 + # print('ran dataset iter') + # #self.shuffled_vector = np.random.permutation(self.nF) if self.augment else np.arange(self.nF) + # return self + + def __getitem__(self, index): + index = self.indices[index] # linear, shuffled, or image_weights + + hyp = self.hyp + mosaic = self.mosaic and random.random() < hyp['mosaic'] + if mosaic: + # Load mosaic + img, labels = self.load_mosaic(index) + shapes = None + + # MixUp augmentation + if random.random() < hyp['mixup']: + img, labels = mixup(img, labels, *self.load_mosaic(random.randint(0, self.n - 1))) + + else: + # Load image + img, (h0, w0), (h, w) = self.load_image(index) + + # Letterbox + shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape + img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment) + shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling + + labels = self.labels[index].copy() + if labels.size: # normalized xywh to pixel xyxy format + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], ratio[0] * w, ratio[1] * h, padw=pad[0], padh=pad[1]) + + if self.augment: + img, labels = random_perspective(img, labels, + degrees=hyp['degrees'], + translate=hyp['translate'], + scale=hyp['scale'], + shear=hyp['shear'], + perspective=hyp['perspective']) + + nl = len(labels) # number of labels + if nl: + labels[:, 1:5] = xyxy2xywhn(labels[:, 1:5], w=img.shape[1], h=img.shape[0], clip=True, eps=1E-3) + + if self.augment: + # Albumentations + img, labels = self.albumentations(img, labels) + nl = len(labels) # update after albumentations + + # HSV color-space + augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) + + # Flip up-down + if random.random() < hyp['flipud']: + img = np.flipud(img) + if nl: + labels[:, 2] = 1 - labels[:, 2] + + # Flip left-right + if random.random() < hyp['fliplr']: + img = np.fliplr(img) + if nl: + labels[:, 1] = 1 - labels[:, 1] + + # Cutouts + # labels = cutout(img, labels, p=0.5) + # nl = len(labels) # update after cutout + + labels_out = torch.zeros((nl, 6)) + if nl: + labels_out[:, 1:] = torch.from_numpy(labels) + + # Convert + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB + img = np.ascontiguousarray(img) + + return torch.from_numpy(img), labels_out, self.img_files[index], shapes + + def load_image(self, i): + # loads 1 image from dataset index 'i', returns (im, original hw, resized hw) + im = self.imgs[i] + if im is None: # not cached in RAM + npy = self.img_npy[i] + if npy and npy.exists(): # load npy + im = np.load(npy) + else: # read image + f = self.img_files[i] + im = cv2.imread(f) # BGR + assert im is not None, f'Image Not Found {f}' + h0, w0 = im.shape[:2] # orig hw + r = self.img_size / max(h0, w0) # ratio + if r != 1: # if sizes are not equal + im = cv2.resize(im, + (int(w0 * r), int(h0 * r)), + interpolation=cv2.INTER_LINEAR if (self.augment or r > 1) else cv2.INTER_AREA) + return im, (h0, w0), im.shape[:2] # im, hw_original, hw_resized + else: + return self.imgs[i], self.img_hw0[i], self.img_hw[i] # im, hw_original, hw_resized + + def load_mosaic(self, index): + # YOLOv5 4-mosaic loader. Loads 1 image + 3 random images into a 4-image mosaic + labels4, segments4 = [], [] + s = self.img_size + yc, xc = (int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border) # mosaic center x, y + indices = [index] + random.choices(self.indices, k=3) # 3 additional image indices + random.shuffle(indices) + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = self.load_image(index) + + # place img in img4 + if i == 0: # top left + img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) + x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) + elif i == 1: # top right + x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc + x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h + elif i == 2: # bottom left + x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) + x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) + elif i == 3: # bottom right + x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) + x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) + + img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + padw = x1a - x1b + padh = y1a - y1b + + # Labels + labels, segments = self.labels[index].copy(), self.segments[index].copy() + if labels.size: + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padw, padh) # normalized xywh to pixel xyxy format + segments = [xyn2xy(x, w, h, padw, padh) for x in segments] + labels4.append(labels) + segments4.extend(segments) + + # Concat/clip labels + labels4 = np.concatenate(labels4, 0) + for x in (labels4[:, 1:], *segments4): + np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() + # img4, labels4 = replicate(img4, labels4) # replicate + + # Augment + img4, labels4, segments4 = copy_paste(img4, labels4, segments4, p=self.hyp['copy_paste']) + img4, labels4 = random_perspective(img4, labels4, segments4, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img4, labels4 + + def load_mosaic9(self, index): + # YOLOv5 9-mosaic loader. Loads 1 image + 8 random images into a 9-image mosaic + labels9, segments9 = [], [] + s = self.img_size + indices = [index] + random.choices(self.indices, k=8) # 8 additional image indices + random.shuffle(indices) + hp, wp = -1, -1 # height, width previous + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = self.load_image(index) + + # place img in img9 + if i == 0: # center + img9 = np.full((s * 3, s * 3, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + h0, w0 = h, w + c = s, s, s + w, s + h # xmin, ymin, xmax, ymax (base) coordinates + elif i == 1: # top + c = s, s - h, s + w, s + elif i == 2: # top right + c = s + wp, s - h, s + wp + w, s + elif i == 3: # right + c = s + w0, s, s + w0 + w, s + h + elif i == 4: # bottom right + c = s + w0, s + hp, s + w0 + w, s + hp + h + elif i == 5: # bottom + c = s + w0 - w, s + h0, s + w0, s + h0 + h + elif i == 6: # bottom left + c = s + w0 - wp - w, s + h0, s + w0 - wp, s + h0 + h + elif i == 7: # left + c = s - w, s + h0 - h, s, s + h0 + elif i == 8: # top left + c = s - w, s + h0 - hp - h, s, s + h0 - hp + + padx, pady = c[:2] + x1, y1, x2, y2 = (max(x, 0) for x in c) # allocate coords + + # Labels + labels, segments = self.labels[index].copy(), self.segments[index].copy() + if labels.size: + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padx, pady) # normalized xywh to pixel xyxy format + segments = [xyn2xy(x, w, h, padx, pady) for x in segments] + labels9.append(labels) + segments9.extend(segments) + + # Image + img9[y1:y2, x1:x2] = img[y1 - pady:, x1 - padx:] # img9[ymin:ymax, xmin:xmax] + hp, wp = h, w # height, width previous + + # Offset + yc, xc = (int(random.uniform(0, s)) for _ in self.mosaic_border) # mosaic center x, y + img9 = img9[yc:yc + 2 * s, xc:xc + 2 * s] + + # Concat/clip labels + labels9 = np.concatenate(labels9, 0) + labels9[:, [1, 3]] -= xc + labels9[:, [2, 4]] -= yc + c = np.array([xc, yc]) # centers + segments9 = [x - c for x in segments9] + + for x in (labels9[:, 1:], *segments9): + np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() + # img9, labels9 = replicate(img9, labels9) # replicate + + # Augment + img9, labels9 = random_perspective(img9, labels9, segments9, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img9, labels9 + + @staticmethod + def collate_fn(batch): + img, label, path, shapes = zip(*batch) # transposed + for i, lb in enumerate(label): + lb[:, 0] = i # add target image index for build_targets() + return torch.stack(img, 0), torch.cat(label, 0), path, shapes + + @staticmethod + def collate_fn4(batch): + img, label, path, shapes = zip(*batch) # transposed + n = len(shapes) // 4 + img4, label4, path4, shapes4 = [], [], path[:n], shapes[:n] + + ho = torch.tensor([[0.0, 0, 0, 1, 0, 0]]) + wo = torch.tensor([[0.0, 0, 1, 0, 0, 0]]) + s = torch.tensor([[1, 1, 0.5, 0.5, 0.5, 0.5]]) # scale + for i in range(n): # zidane torch.zeros(16,3,720,1280) # BCHW + i *= 4 + if random.random() < 0.5: + im = F.interpolate(img[i].unsqueeze(0).float(), scale_factor=2.0, mode='bilinear', align_corners=False)[ + 0].type(img[i].type()) + lb = label[i] + else: + im = torch.cat((torch.cat((img[i], img[i + 1]), 1), torch.cat((img[i + 2], img[i + 3]), 1)), 2) + lb = torch.cat((label[i], label[i + 1] + ho, label[i + 2] + wo, label[i + 3] + ho + wo), 0) * s + img4.append(im) + label4.append(lb) + + for i, lb in enumerate(label4): + lb[:, 0] = i # add target image index for build_targets() + + return torch.stack(img4, 0), torch.cat(label4, 0), path4, shapes4 + + +# Ancillary functions -------------------------------------------------------------------------------------------------- +def create_folder(path='./new'): + # Create folder + if os.path.exists(path): + shutil.rmtree(path) # delete output folder + os.makedirs(path) # make new output folder + + +def flatten_recursive(path=DATASETS_DIR / 'coco128'): + # Flatten a recursive directory by bringing all files to top level + new_path = Path(str(path) + '_flat') + create_folder(new_path) + for file in tqdm(glob.glob(str(Path(path)) + '/**/*.*', recursive=True)): + shutil.copyfile(file, new_path / Path(file).name) + + +def extract_boxes(path=DATASETS_DIR / 'coco128'): # from yolov5_utils.datasets import *; extract_boxes() + # Convert detection dataset into classification dataset, with one directory per class + path = Path(path) # images dir + shutil.rmtree(path / 'classifier') if (path / 'classifier').is_dir() else None # remove existing + files = list(path.rglob('*.*')) + n = len(files) # number of files + for im_file in tqdm(files, total=n): + if im_file.suffix[1:] in IMG_FORMATS: + # image + im = cv2.imread(str(im_file))[..., ::-1] # BGR to RGB + h, w = im.shape[:2] + + # labels + lb_file = Path(img2label_paths([str(im_file)])[0]) + if Path(lb_file).exists(): + with open(lb_file) as f: + lb = np.array([x.split() for x in f.read().strip().splitlines()], dtype=np.float32) # labels + + for j, x in enumerate(lb): + c = int(x[0]) # class + f = (path / 'classifier') / f'{c}' / f'{path.stem}_{im_file.stem}_{j}.jpg' # new filename + if not f.parent.is_dir(): + f.parent.mkdir(parents=True) + + b = x[1:] * [w, h, w, h] # box + # b[2:] = b[2:].max() # rectangle to square + b[2:] = b[2:] * 1.2 + 3 # pad + b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int) + + b[[0, 2]] = np.clip(b[[0, 2]], 0, w) # clip boxes outside of image + b[[1, 3]] = np.clip(b[[1, 3]], 0, h) + assert cv2.imwrite(str(f), im[b[1]:b[3], b[0]:b[2]]), f'box failure in {f}' + + +def autosplit(path=DATASETS_DIR / 'coco128/images', weights=(0.9, 0.1, 0.0), annotated_only=False): + """ Autosplit a dataset into train/val/test splits and save path/autosplit_*.txt files + Usage: from yolov5_utils.datasets import *; autosplit() + Arguments + path: Path to images directory + weights: Train, val, test weights (list, tuple) + annotated_only: Only use images with an annotated txt file + """ + path = Path(path) # images dir + files = sorted(x for x in path.rglob('*.*') if x.suffix[1:].lower() in IMG_FORMATS) # image files only + n = len(files) # number of files + random.seed(0) # for reproducibility + indices = random.choices([0, 1, 2], weights=weights, k=n) # assign each image to a split + + txt = ['autosplit_train.txt', 'autosplit_val.txt', 'autosplit_test.txt'] # 3 txt files + [(path.parent / x).unlink(missing_ok=True) for x in txt] # remove existing + + print(f'Autosplitting images from {path}' + ', using *.txt labeled images only' * annotated_only) + for i, img in tqdm(zip(indices, files), total=n): + if not annotated_only or Path(img2label_paths([str(img)])[0]).exists(): # check label + with open(path.parent / txt[i], 'a') as f: + f.write('./' + img.relative_to(path.parent).as_posix() + '\n') # add image to txt file + + +def verify_image_label(args): + # Verify one image-label pair + im_file, lb_file, prefix = args + nm, nf, ne, nc, msg, segments = 0, 0, 0, 0, '', [] # number (missing, found, empty, corrupt), message, segments + try: + # verify images + im = Image.open(im_file) + im.verify() # PIL verify + shape = exif_size(im) # image size + assert (shape[0] > 9) & (shape[1] > 9), f'image size {shape} <10 pixels' + assert im.format.lower() in IMG_FORMATS, f'invalid image format {im.format}' + if im.format.lower() in ('jpg', 'jpeg'): + with open(im_file, 'rb') as f: + f.seek(-2, 2) + if f.read() != b'\xff\xd9': # corrupt JPEG + ImageOps.exif_transpose(Image.open(im_file)).save(im_file, 'JPEG', subsampling=0, quality=100) + msg = f'{prefix}WARNING: {im_file}: corrupt JPEG restored and saved' + + # verify labels + if os.path.isfile(lb_file): + nf = 1 # label found + with open(lb_file) as f: + lb = [x.split() for x in f.read().strip().splitlines() if len(x)] + if any([len(x) > 8 for x in lb]): # is segment + classes = np.array([x[0] for x in lb], dtype=np.float32) + segments = [np.array(x[1:], dtype=np.float32).reshape(-1, 2) for x in lb] # (cls, xy1...) + lb = np.concatenate((classes.reshape(-1, 1), segments2boxes(segments)), 1) # (cls, xywh) + lb = np.array(lb, dtype=np.float32) + nl = len(lb) + if nl: + assert lb.shape[1] == 5, f'labels require 5 columns, {lb.shape[1]} columns detected' + assert (lb >= 0).all(), f'negative label values {lb[lb < 0]}' + assert (lb[:, 1:] <= 1).all(), f'non-normalized or out of bounds coordinates {lb[:, 1:][lb[:, 1:] > 1]}' + _, i = np.unique(lb, axis=0, return_index=True) + if len(i) < nl: # duplicate row check + lb = lb[i] # remove duplicates + if segments: + segments = segments[i] + msg = f'{prefix}WARNING: {im_file}: {nl - len(i)} duplicate labels removed' + else: + ne = 1 # label empty + lb = np.zeros((0, 5), dtype=np.float32) + else: + nm = 1 # label missing + lb = np.zeros((0, 5), dtype=np.float32) + return im_file, lb, shape, segments, nm, nf, ne, nc, msg + except Exception as e: + nc = 1 + msg = f'{prefix}WARNING: {im_file}: ignoring corrupt image/label: {e}' + return [None, None, None, None, nm, nf, ne, nc, msg] + + +def dataset_stats(path='coco128.yaml', autodownload=False, verbose=False, profile=False, hub=False): + """ Return dataset statistics dictionary with images and instances counts per split per class + To run in parent directory: export PYTHONPATH="$PWD/yolov5" + Usage1: from yolov5_utils.datasets import *; dataset_stats('coco128.yaml', autodownload=True) + Usage2: from yolov5_utils.datasets import *; dataset_stats('path/to/coco128_with_yaml.zip') + Arguments + path: Path to data.yaml or data.zip (with data.yaml inside data.zip) + autodownload: Attempt to download dataset if not found locally + verbose: Print stats dictionary + """ + + def round_labels(labels): + # Update labels to integer class and 6 decimal place floats + return [[int(c), *(round(x, 4) for x in points)] for c, *points in labels] + + def unzip(path): + # Unzip data.zip TODO: CONSTRAINT: path/to/abc.zip MUST unzip to 'path/to/abc/' + if str(path).endswith('.zip'): # path is data.zip + assert Path(path).is_file(), f'Error unzipping {path}, file not found' + ZipFile(path).extractall(path=path.parent) # unzip + dir = path.with_suffix('') # dataset directory == zip name + return True, str(dir), next(dir.rglob('*.yaml')) # zipped, data_dir, yaml_path + else: # path is data.yaml + return False, None, path + + def hub_ops(f, max_dim=1920): + # HUB ops for 1 image 'f': resize and save at reduced quality in /dataset-hub for web/app viewing + f_new = im_dir / Path(f).name # dataset-hub image filename + try: # use PIL + im = Image.open(f) + r = max_dim / max(im.height, im.width) # ratio + if r < 1.0: # image too large + im = im.resize((int(im.width * r), int(im.height * r))) + im.save(f_new, 'JPEG', quality=75, optimize=True) # save + except Exception as e: # use OpenCV + print(f'WARNING: HUB ops PIL failure {f}: {e}') + im = cv2.imread(f) + im_height, im_width = im.shape[:2] + r = max_dim / max(im_height, im_width) # ratio + if r < 1.0: # image too large + im = cv2.resize(im, (int(im_width * r), int(im_height * r)), interpolation=cv2.INTER_AREA) + cv2.imwrite(str(f_new), im) + + zipped, data_dir, yaml_path = unzip(Path(path)) + with open(check_yaml(yaml_path), errors='ignore') as f: + data = yaml.safe_load(f) # data dict + if zipped: + data['path'] = data_dir # TODO: should this be dir.resolve()? + check_dataset(data, autodownload) # download dataset if missing + hub_dir = Path(data['path'] + ('-hub' if hub else '')) + stats = {'nc': data['nc'], 'names': data['names']} # statistics dictionary + for split in 'train', 'val', 'test': + if data.get(split) is None: + stats[split] = None # i.e. no test set + continue + x = [] + dataset = LoadImagesAndLabels(data[split]) # load dataset + for label in tqdm(dataset.labels, total=dataset.n, desc='Statistics'): + x.append(np.bincount(label[:, 0].astype(int), minlength=data['nc'])) + x = np.array(x) # shape(128x80) + stats[split] = {'instance_stats': {'total': int(x.sum()), 'per_class': x.sum(0).tolist()}, + 'image_stats': {'total': dataset.n, 'unlabelled': int(np.all(x == 0, 1).sum()), + 'per_class': (x > 0).sum(0).tolist()}, + 'labels': [{str(Path(k).name): round_labels(v.tolist())} for k, v in + zip(dataset.img_files, dataset.labels)]} + + if hub: + im_dir = hub_dir / 'images' + im_dir.mkdir(parents=True, exist_ok=True) + for _ in tqdm(ThreadPool(NUM_THREADS).imap(hub_ops, dataset.img_files), total=dataset.n, desc='HUB Ops'): + pass + + # Profile + stats_path = hub_dir / 'stats.json' + if profile: + for _ in range(1): + file = stats_path.with_suffix('.npy') + t1 = time.time() + np.save(file, stats) + t2 = time.time() + x = np.load(file, allow_pickle=True) + print(f'stats.npy times: {time.time() - t2:.3f}s read, {t2 - t1:.3f}s write') + + file = stats_path.with_suffix('.json') + t1 = time.time() + with open(file, 'w') as f: + json.dump(stats, f) # save stats *.json + t2 = time.time() + with open(file) as f: + x = json.load(f) # load hyps dict + print(f'stats.json times: {time.time() - t2:.3f}s read, {t2 - t1:.3f}s write') + + # Save, print and return + if hub: + print(f'Saving {stats_path.resolve()}...') + with open(stats_path, 'w') as f: + json.dump(stats, f) # save stats.json + if verbose: + print(json.dumps(stats, indent=2, sort_keys=False)) + return stats diff --git a/detector/YOLOv5/utils/downloads.py b/detector/YOLOv5/utils/downloads.py new file mode 100644 index 0000000000000000000000000000000000000000..d7b87cb2cadd22fcdfaafc7fd56fc29e14d9a538 --- /dev/null +++ b/detector/YOLOv5/utils/downloads.py @@ -0,0 +1,153 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Download utils +""" + +import os +import platform +import subprocess +import time +import urllib +from pathlib import Path +from zipfile import ZipFile + +import requests +import torch + + +def gsutil_getsize(url=''): + # gs://bucket/file size https://cloud.google.com/storage/docs/gsutil/commands/du + s = subprocess.check_output(f'gsutil du {url}', shell=True).decode('utf-8') + return eval(s.split(' ')[0]) if len(s) else 0 # bytes + + +def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''): + # Attempts to download file from url or url2, checks and removes incomplete downloads < min_bytes + file = Path(file) + assert_msg = f"Downloaded file '{file}' does not exist or size is < min_bytes={min_bytes}" + try: # url1 + print(f'Downloading {url} to {file}...') + torch.hub.download_url_to_file(url, str(file)) + assert file.exists() and file.stat().st_size > min_bytes, assert_msg # check + except Exception as e: # url2 + file.unlink(missing_ok=True) # remove partial downloads + print(f'ERROR: {e}\nRe-attempting {url2 or url} to {file}...') + os.system(f"curl -L '{url2 or url}' -o '{file}' --retry 3 -C -") # curl download, retry and resume on fail + finally: + if not file.exists() or file.stat().st_size < min_bytes: # check + file.unlink(missing_ok=True) # remove partial downloads + print(f"ERROR: {assert_msg}\n{error_msg}") + print('') + + +def attempt_download(file, repo='ultralytics/yolov5'): # from utils.downloads import *; attempt_download() + # Attempt file download if does not exist + file = Path(str(file).strip().replace("'", '')) + + if not file.exists(): + # URL specified + name = Path(urllib.parse.unquote(str(file))).name # decode '%2F' to '/' etc. + if str(file).startswith(('http:/', 'https:/')): # download + url = str(file).replace(':/', '://') # Pathlib turns :// -> :/ + file = name.split('?')[0] # parse authentication https://url.com/file.txt?auth... + if Path(file).is_file(): + print(f'Found {url} locally at {file}') # file already exists + else: + safe_download(file=file, url=url, min_bytes=1E5) + return file + + # GitHub assets + file.parent.mkdir(parents=True, exist_ok=True) # make parent dir (if required) + try: + response = requests.get(f'https://api.github.com/repos/{repo}/releases/latest').json() # github api + assets = [x['name'] for x in response['assets']] # release assets, i.e. ['yolov5s.pt', 'yolov5m.pt', ...] + tag = response['tag_name'] # i.e. 'v1.0' + except Exception: # fallback plan + assets = ['yolov5n.pt', 'yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt', + 'yolov5n6.pt', 'yolov5s6.pt', 'yolov5m6.pt', 'yolov5l6.pt', 'yolov5x6.pt'] + try: + tag = subprocess.check_output('git tag', shell=True, stderr=subprocess.STDOUT).decode().split()[-1] + except Exception: + tag = 'v6.0' # current release + + if name in assets: + safe_download(file, + url=f'https://github.com/{repo}/releases/download/{tag}/{name}', + # url2=f'https://storage.googleapis.com/{repo}/ckpt/{name}', # backup url (optional) + min_bytes=1E5, + error_msg=f'{file} missing, try downloading from https://github.com/{repo}/releases/') + + return str(file) + + +def gdrive_download(id='16TiPfZj7htmTyhntwcZyEEAejOUxuT6m', file='tmp.zip'): + # Downloads a file from Google Drive. from yolov5.utils.downloads import *; gdrive_download() + t = time.time() + file = Path(file) + cookie = Path('cookie') # gdrive cookie + print(f'Downloading https://drive.google.com/uc?export=download&id={id} as {file}... ', end='') + file.unlink(missing_ok=True) # remove existing file + cookie.unlink(missing_ok=True) # remove existing cookie + + # Attempt file download + out = "NUL" if platform.system() == "Windows" else "/dev/null" + os.system(f'curl -c ./cookie -s -L "drive.google.com/uc?export=download&id={id}" > {out}') + if os.path.exists('cookie'): # large file + s = f'curl -Lb ./cookie "drive.google.com/uc?export=download&confirm={get_token()}&id={id}" -o {file}' + else: # small file + s = f'curl -s -L -o {file} "drive.google.com/uc?export=download&id={id}"' + r = os.system(s) # execute, capture return + cookie.unlink(missing_ok=True) # remove existing cookie + + # Error check + if r != 0: + file.unlink(missing_ok=True) # remove partial + print('Download error ') # raise Exception('Download error') + return r + + # Unzip if archive + if file.suffix == '.zip': + print('unzipping... ', end='') + ZipFile(file).extractall(path=file.parent) # unzip + file.unlink() # remove zip + + print(f'Done ({time.time() - t:.1f}s)') + return r + + +def get_token(cookie="./cookie"): + with open(cookie) as f: + for line in f: + if "download" in line: + return line.split()[-1] + return "" + +# Google utils: https://cloud.google.com/storage/docs/reference/libraries ---------------------------------------------- +# +# +# def upload_blob(bucket_name, source_file_name, destination_blob_name): +# # Uploads a file to a bucket +# # https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-python +# +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(destination_blob_name) +# +# blob.upload_from_filename(source_file_name) +# +# print('File {} uploaded to {}.'.format( +# source_file_name, +# destination_blob_name)) +# +# +# def download_blob(bucket_name, source_blob_name, destination_file_name): +# # Uploads a blob from a bucket +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(source_blob_name) +# +# blob.download_to_filename(destination_file_name) +# +# print('Blob {} downloaded to {}.'.format( +# source_blob_name, +# destination_file_name)) diff --git a/detector/YOLOv5/utils/flask_rest_api/README.md b/detector/YOLOv5/utils/flask_rest_api/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a726acbd92043458311dd949cc09c0195cd35400 --- /dev/null +++ b/detector/YOLOv5/utils/flask_rest_api/README.md @@ -0,0 +1,73 @@ +# Flask REST API + +[REST](https://en.wikipedia.org/wiki/Representational_state_transfer) [API](https://en.wikipedia.org/wiki/API)s are +commonly used to expose Machine Learning (ML) models to other services. This folder contains an example REST API +created using Flask to expose the YOLOv5s model from [PyTorch Hub](https://pytorch.org/hub/ultralytics_yolov5/). + +## Requirements + +[Flask](https://palletsprojects.com/p/flask/) is required. Install with: + +```shell +$ pip install Flask +``` + +## Run + +After Flask installation run: + +```shell +$ python3 restapi.py --port 5000 +``` + +Then use [curl](https://curl.se/) to perform a request: + +```shell +$ curl -X POST -F image=@zidane.jpg 'http://localhost:5000/v1/object-detection/yolov5s' +``` + +The model inference results are returned as a JSON response: + +```json +[ + { + "class": 0, + "confidence": 0.8900438547, + "height": 0.9318675399, + "name": "person", + "width": 0.3264600933, + "xcenter": 0.7438579798, + "ycenter": 0.5207948685 + }, + { + "class": 0, + "confidence": 0.8440024257, + "height": 0.7155083418, + "name": "person", + "width": 0.6546785235, + "xcenter": 0.427829951, + "ycenter": 0.6334488392 + }, + { + "class": 27, + "confidence": 0.3771208823, + "height": 0.3902671337, + "name": "tie", + "width": 0.0696444362, + "xcenter": 0.3675483763, + "ycenter": 0.7991207838 + }, + { + "class": 27, + "confidence": 0.3527112305, + "height": 0.1540903747, + "name": "tie", + "width": 0.0336618312, + "xcenter": 0.7814827561, + "ycenter": 0.5065554976 + } +] +``` + +An example python script to perform inference using [requests](https://docs.python-requests.org/en/master/) is given +in `example_request.py` diff --git a/detector/YOLOv5/utils/flask_rest_api/example_request.py b/detector/YOLOv5/utils/flask_rest_api/example_request.py new file mode 100644 index 0000000000000000000000000000000000000000..ff21f30f93ca37578ce45366a1ddbe3f3eadaa79 --- /dev/null +++ b/detector/YOLOv5/utils/flask_rest_api/example_request.py @@ -0,0 +1,13 @@ +"""Perform test request""" +import pprint + +import requests + +DETECTION_URL = "http://localhost:5000/v1/object-detection/yolov5s" +TEST_IMAGE = "zidane.jpg" + +image_data = open(TEST_IMAGE, "rb").read() + +response = requests.post(DETECTION_URL, files={"image": image_data}).json() + +pprint.pprint(response) diff --git a/detector/YOLOv5/utils/flask_rest_api/restapi.py b/detector/YOLOv5/utils/flask_rest_api/restapi.py new file mode 100644 index 0000000000000000000000000000000000000000..b93ad16a0f58cf48bfc71afdbd1a548bc5ffe8db --- /dev/null +++ b/detector/YOLOv5/utils/flask_rest_api/restapi.py @@ -0,0 +1,37 @@ +""" +Run a rest API exposing the yolov5s object detection model +""" +import argparse +import io + +import torch +from flask import Flask, request +from PIL import Image + +app = Flask(__name__) + +DETECTION_URL = "/v1/object-detection/yolov5s" + + +@app.route(DETECTION_URL, methods=["POST"]) +def predict(): + if not request.method == "POST": + return + + if request.files.get("image"): + image_file = request.files["image"] + image_bytes = image_file.read() + + img = Image.open(io.BytesIO(image_bytes)) + + results = model(img, size=640) # reduce size=320 for faster inference + return results.pandas().xyxy[0].to_json(orient="records") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Flask API exposing YOLOv5 model") + parser.add_argument("--port", default=5000, type=int, help="port number") + args = parser.parse_args() + + model = torch.hub.load("ultralytics/yolov5", "yolov5s", force_reload=True) # force_reload to recache + app.run(host="0.0.0.0", port=args.port) # debug=True causes Restarting with stat diff --git a/detector/YOLOv5/utils/general.py b/detector/YOLOv5/utils/general.py new file mode 100644 index 0000000000000000000000000000000000000000..effab50be30199c76a46e62db1f143421d7569bc --- /dev/null +++ b/detector/YOLOv5/utils/general.py @@ -0,0 +1,880 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +General utils +""" + +import contextlib +import glob +import logging +import math +import os +import platform +import random +import re +import shutil +import signal +import time +import urllib +from itertools import repeat +from multiprocessing.pool import ThreadPool +from pathlib import Path +from subprocess import check_output +from zipfile import ZipFile + +import cv2 +import numpy as np +import pandas as pd +import pkg_resources as pkg +import torch +import torchvision +import yaml + +from detector.YOLOv5.utils.downloads import gsutil_getsize +from detector.YOLOv5.utils.metrics import box_iou, fitness + +# Settings +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +DATASETS_DIR = ROOT.parent / 'datasets' # YOLOv5 datasets directory +NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads +VERBOSE = str(os.getenv('YOLOv5_VERBOSE', True)).lower() == 'true' # global verbose mode +FONT = 'Arial.ttf' # https://ultralytics.com/assets/Arial.ttf + +torch.set_printoptions(linewidth=320, precision=5, profile='long') +np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5 +pd.options.display.max_columns = 10 +cv2.setNumThreads(0) # prevent OpenCV from multithreading (incompatible with PyTorch DataLoader) +os.environ['NUMEXPR_MAX_THREADS'] = str(NUM_THREADS) # NumExpr max threads + + +def is_kaggle(): + # Is environment a Kaggle Notebook? + try: + assert os.environ.get('PWD') == '/kaggle/working' + assert os.environ.get('KAGGLE_URL_BASE') == 'https://www.kaggle.com' + return True + except AssertionError: + return False + + +def is_writeable(dir, test=False): + # Return True if directory has write permissions, test opening a file with write permissions if test=True + if test: # method 1 + file = Path(dir) / 'tmp.txt' + try: + with open(file, 'w'): # open file with write permissions + pass + file.unlink() # remove file + return True + except OSError: + return False + else: # method 2 + return os.access(dir, os.R_OK) # possible issues on Windows + + +def set_logging(name=None, verbose=VERBOSE): + # Sets level and returns logger + if is_kaggle(): + for h in logging.root.handlers: + logging.root.removeHandler(h) # remove all handlers associated with the root logger object + rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings + logging.basicConfig(format="%(message)s", level=logging.INFO if (verbose and rank in (-1, 0)) else logging.WARNING) + return logging.getLogger(name) + + +LOGGER = set_logging('yolov5') # define globally (used in train.py, val.py, detect.py, etc.) + + +def user_config_dir(dir='Ultralytics', env_var='YOLOV5_CONFIG_DIR'): + # Return path of user configuration directory. Prefer environment variable if exists. Make dir if required. + env = os.getenv(env_var) + if env: + path = Path(env) # use environment variable + else: + cfg = {'Windows': 'AppData/Roaming', 'Linux': '.config', 'Darwin': 'Library/Application Support'} # 3 OS dirs + path = Path.home() / cfg.get(platform.system(), '') # OS-specific config dir + path = (path if is_writeable(path) else Path('/tmp')) / dir # GCP and AWS lambda fix, only /tmp is writeable + path.mkdir(exist_ok=True) # make if required + return path + + +CONFIG_DIR = user_config_dir() # Ultralytics settings dir + + +class Profile(contextlib.ContextDecorator): + # Usage: @Profile() decorator or 'with Profile():' context manager + def __enter__(self): + self.start = time.time() + + def __exit__(self, type, value, traceback): + print(f'Profile results: {time.time() - self.start:.5f}s') + + +class Timeout(contextlib.ContextDecorator): + # Usage: @Timeout(seconds) decorator or 'with Timeout(seconds):' context manager + def __init__(self, seconds, *, timeout_msg='', suppress_timeout_errors=True): + self.seconds = int(seconds) + self.timeout_message = timeout_msg + self.suppress = bool(suppress_timeout_errors) + + def _timeout_handler(self, signum, frame): + raise TimeoutError(self.timeout_message) + + def __enter__(self): + signal.signal(signal.SIGALRM, self._timeout_handler) # Set handler for SIGALRM + signal.alarm(self.seconds) # start countdown for SIGALRM to be raised + + def __exit__(self, exc_type, exc_val, exc_tb): + signal.alarm(0) # Cancel SIGALRM if it's scheduled + if self.suppress and exc_type is TimeoutError: # Suppress TimeoutError + return True + + +class WorkingDirectory(contextlib.ContextDecorator): + # Usage: @WorkingDirectory(dir) decorator or 'with WorkingDirectory(dir):' context manager + def __init__(self, new_dir): + self.dir = new_dir # new dir + self.cwd = Path.cwd().resolve() # current dir + + def __enter__(self): + os.chdir(self.dir) + + def __exit__(self, exc_type, exc_val, exc_tb): + os.chdir(self.cwd) + + +def try_except(func): + # try-except function. Usage: @try_except decorator + def handler(*args, **kwargs): + try: + func(*args, **kwargs) + except Exception as e: + print(e) + + return handler + + +def methods(instance): + # Get class/instance methods + return [f for f in dir(instance) if callable(getattr(instance, f)) and not f.startswith("__")] + + +def print_args(name, opt): + # Print argparser arguments + LOGGER.info(colorstr(f'{name}: ') + ', '.join(f'{k}={v}' for k, v in vars(opt).items())) + + +def init_seeds(seed=0): + # Initialize random number generator (RNG) seeds https://pytorch.org/docs/stable/notes/randomness.html + # cudnn seed 0 settings are slower and more reproducible, else faster and less reproducible + import torch.backends.cudnn as cudnn + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + cudnn.benchmark, cudnn.deterministic = (False, True) if seed == 0 else (True, False) + + +def intersect_dicts(da, db, exclude=()): + # Dictionary intersection of matching keys and shapes, omitting 'exclude' keys, using da values + return {k: v for k, v in da.items() if k in db and not any(x in k for x in exclude) and v.shape == db[k].shape} + + +def get_latest_run(search_dir='.'): + # Return path to most recent 'last.pt' in /runs (i.e. to --resume from) + last_list = glob.glob(f'{search_dir}/**/last*.pt', recursive=True) + return max(last_list, key=os.path.getctime) if last_list else '' + + +def is_docker(): + # Is environment a Docker container? + return Path('/workspace').exists() # or Path('/.dockerenv').exists() + + +def is_colab(): + # Is environment a Google Colab instance? + try: + import google.colab + return True + except ImportError: + return False + + +def is_pip(): + # Is file in a pip package? + return 'site-packages' in Path(__file__).resolve().parts + + +def is_ascii(s=''): + # Is string composed of all ASCII (no UTF) characters? (note str().isascii() introduced in python 3.7) + s = str(s) # convert list, tuple, None, etc. to str + return len(s.encode().decode('ascii', 'ignore')) == len(s) + + +def is_chinese(s='人工智能'): + # Is string composed of any Chinese characters? + return True if re.search('[\u4e00-\u9fff]', str(s)) else False + + +def emojis(str=''): + # Return platform-dependent emoji-safe version of string + return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str + + +def file_size(path): + # Return file/dir size (MB) + path = Path(path) + if path.is_file(): + return path.stat().st_size / 1E6 + elif path.is_dir(): + return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / 1E6 + else: + return 0.0 + + +def check_online(): + # Check internet connectivity + import socket + try: + socket.create_connection(("1.1.1.1", 443), 5) # check host accessibility + return True + except OSError: + return False + + +@try_except +@WorkingDirectory(ROOT) +def check_git_status(): + # Recommend 'git pull' if code is out of date + msg = ', for updates see https://github.com/ultralytics/yolov5' + s = colorstr('github: ') # string + assert Path('.git').exists(), s + 'skipping check (not a git repository)' + msg + assert not is_docker(), s + 'skipping check (Docker image)' + msg + assert check_online(), s + 'skipping check (offline)' + msg + + cmd = 'git fetch && git config --get remote.origin.url' + url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch + branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out + n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind + if n > 0: + s += f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update." + else: + s += f'up to date with {url} ✅' + LOGGER.info(emojis(s)) # emoji-safe + + +def check_python(minimum='3.6.2'): + # Check current python version vs. required python version + check_version(platform.python_version(), minimum, name='Python ', hard=True) + + +def check_version(current='0.0.0', minimum='0.0.0', name='version ', pinned=False, hard=False, verbose=False): + # Check version vs. required version + current, minimum = (pkg.parse_version(x) for x in (current, minimum)) + result = (current == minimum) if pinned else (current >= minimum) # bool + s = f'{name}{minimum} required by YOLOv5, but {name}{current} is currently installed' # string + if hard: + assert result, s # assert min requirements met + if verbose and not result: + LOGGER.warning(s) + return result + + +@try_except +def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), install=True): + # Check installed dependencies meet requirements (pass *.txt file or list of packages) + prefix = colorstr('red', 'bold', 'requirements:') + check_python() # check python version + if isinstance(requirements, (str, Path)): # requirements.txt file + file = Path(requirements) + assert file.exists(), f"{prefix} {file.resolve()} not found, check failed." + with file.open() as f: + requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(f) if x.name not in exclude] + else: # list or tuple of packages + requirements = [x for x in requirements if x not in exclude] + + n = 0 # number of packages updates + for r in requirements: + try: + pkg.require(r) + except Exception: # DistributionNotFound or VersionConflict if requirements not met + s = f"{prefix} {r} not found and is required by YOLOv5" + if install: + LOGGER.info(f"{s}, attempting auto-update...") + try: + assert check_online(), f"'pip install {r}' skipped (offline)" + LOGGER.info(check_output(f"pip install '{r}'", shell=True).decode()) + n += 1 + except Exception as e: + LOGGER.warning(f'{prefix} {e}') + else: + LOGGER.info(f'{s}. Please install and rerun your command.') + + if n: # if packages updated + source = file.resolve() if 'file' in locals() else requirements + s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \ + f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n" + LOGGER.info(emojis(s)) + + +def check_img_size(imgsz, s=32, floor=0): + # Verify image size is a multiple of stride s in each dimension + if isinstance(imgsz, int): # integer i.e. img_size=640 + new_size = max(make_divisible(imgsz, int(s)), floor) + else: # list i.e. img_size=[640, 480] + new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz] + if new_size != imgsz: + LOGGER.warning(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}') + return new_size + + +def check_imshow(): + # Check if environment supports image displays + try: + assert not is_docker(), 'cv2.imshow() is disabled in Docker environments' + assert not is_colab(), 'cv2.imshow() is disabled in Google Colab environments' + cv2.imshow('test', np.zeros((1, 1, 3))) + cv2.waitKey(1) + cv2.destroyAllWindows() + cv2.waitKey(1) + return True + except Exception as e: + LOGGER.warning(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}') + return False + + +def check_suffix(file='yolov5s.pt', suffix=('.pt',), msg=''): + # Check file(s) for acceptable suffix + if file and suffix: + if isinstance(suffix, str): + suffix = [suffix] + for f in file if isinstance(file, (list, tuple)) else [file]: + s = Path(f).suffix.lower() # file suffix + if len(s): + assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}" + + +def check_yaml(file, suffix=('.yaml', '.yml')): + # Search/download YAML file (if necessary) and return path, checking suffix + return check_file(file, suffix) + + +def check_file(file, suffix=''): + # Search/download file (if necessary) and return path + check_suffix(file, suffix) # optional + file = str(file) # convert to str() + if Path(file).is_file() or file == '': # exists + return file + elif file.startswith(('http:/', 'https:/')): # download + url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/ + file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth + if Path(file).is_file(): + LOGGER.info(f'Found {url} locally at {file}') # file already exists + else: + LOGGER.info(f'Downloading {url} to {file}...') + torch.hub.download_url_to_file(url, file) + assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check + return file + else: # search + files = [] + for d in 'data', 'models', 'yolov5_utils': # search directories + files.extend(glob.glob(str(ROOT / d / '**' / file), recursive=True)) # find file + assert len(files), f'File not found: {file}' # assert file was found + assert len(files) == 1, f"Multiple files match '{file}', specify exact path: {files}" # assert unique + return files[0] # return file + + +def check_font(font=FONT): + # Download font to CONFIG_DIR if necessary + font = Path(font) + if not font.exists() and not (CONFIG_DIR / font.name).exists(): + url = "https://ultralytics.com/assets/" + font.name + LOGGER.info(f'Downloading {url} to {CONFIG_DIR / font.name}...') + torch.hub.download_url_to_file(url, str(font), progress=False) + + +def check_dataset(data, autodownload=True): + # Download and/or unzip dataset if not found locally + # Usage: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128_with_yaml.zip + + # Download (optional) + extract_dir = '' + if isinstance(data, (str, Path)) and str(data).endswith('.zip'): # i.e. gs://bucket/dir/coco128.zip + download(data, dir=DATASETS_DIR, unzip=True, delete=False, curl=False, threads=1) + data = next((DATASETS_DIR / Path(data).stem).rglob('*.yaml')) + extract_dir, autodownload = data.parent, False + + # Read yaml (optional) + if isinstance(data, (str, Path)): + with open(data, errors='ignore') as f: + data = yaml.safe_load(f) # dictionary + + # Resolve paths + path = Path(extract_dir or data.get('path') or '') # optional 'path' default to '.' + if not path.is_absolute(): + path = (ROOT / path).resolve() + for k in 'train', 'val', 'test': + if data.get(k): # prepend path + data[k] = str(path / data[k]) if isinstance(data[k], str) else [str(path / x) for x in data[k]] + + # Parse yaml + assert 'nc' in data, "Dataset 'nc' key missing." + if 'names' not in data: + data['names'] = [f'class{i}' for i in range(data['nc'])] # assign class names if missing + train, val, test, s = (data.get(x) for x in ('train', 'val', 'test', 'download')) + if val: + val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path + if not all(x.exists() for x in val): + LOGGER.info('\nDataset not found, missing paths: %s' % [str(x) for x in val if not x.exists()]) + if s and autodownload: # download script + root = path.parent if 'path' in data else '..' # unzip directory i.e. '../' + if s.startswith('http') and s.endswith('.zip'): # URL + f = Path(s).name # filename + LOGGER.info(f'Downloading {s} to {f}...') + torch.hub.download_url_to_file(s, f) + Path(root).mkdir(parents=True, exist_ok=True) # create root + ZipFile(f).extractall(path=root) # unzip + Path(f).unlink() # remove zip + r = None # success + elif s.startswith('bash '): # bash script + LOGGER.info(f'Running {s} ...') + r = os.system(s) + else: # python script + r = exec(s, {'yaml': data}) # return None + LOGGER.info(f"Dataset autodownload {f'success, saved to {root}' if r in (0, None) else 'failure'}\n") + else: + raise Exception('Dataset not found.') + + return data # dictionary + + +def url2file(url): + # Convert URL to filename, i.e. https://url.com/file.txt?auth -> file.txt + url = str(Path(url)).replace(':/', '://') # Pathlib turns :// -> :/ + file = Path(urllib.parse.unquote(url)).name.split('?')[0] # '%2F' to '/', split https://url.com/file.txt?auth + return file + + +def download(url, dir='.', unzip=True, delete=True, curl=False, threads=1): + # Multi-threaded file download and unzip function, used in data.yaml for autodownload + def download_one(url, dir): + # Download 1 file + f = dir / Path(url).name # filename + if Path(url).is_file(): # exists in current path + Path(url).rename(f) # move to dir + elif not f.exists(): + LOGGER.info(f'Downloading {url} to {f}...') + if curl: + os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail + else: + torch.hub.download_url_to_file(url, f, progress=True) # torch download + if unzip and f.suffix in ('.zip', '.gz'): + LOGGER.info(f'Unzipping {f}...') + if f.suffix == '.zip': + ZipFile(f).extractall(path=dir) # unzip + elif f.suffix == '.gz': + os.system(f'tar xfz {f} --directory {f.parent}') # unzip + if delete: + f.unlink() # remove zip + + dir = Path(dir) + dir.mkdir(parents=True, exist_ok=True) # make directory + if threads > 1: + pool = ThreadPool(threads) + pool.imap(lambda x: download_one(*x), zip(url, repeat(dir))) # multi-threaded + pool.close() + pool.join() + else: + for u in [url] if isinstance(url, (str, Path)) else url: + download_one(u, dir) + + +def make_divisible(x, divisor): + # Returns nearest x divisible by divisor + if isinstance(divisor, torch.Tensor): + divisor = int(divisor.max()) # to int + return math.ceil(x / divisor) * divisor + + +def clean_str(s): + # Cleans a string by replacing special characters with underscore _ + return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s) + + +def one_cycle(y1=0.0, y2=1.0, steps=100): + # lambda function for sinusoidal ramp from y1 to y2 https://arxiv.org/pdf/1812.01187.pdf + return lambda x: ((1 - math.cos(x * math.pi / steps)) / 2) * (y2 - y1) + y1 + + +def colorstr(*input): + # Colors a string https://en.wikipedia.org/wiki/ANSI_escape_code, i.e. colorstr('blue', 'hello world') + *args, string = input if len(input) > 1 else ('blue', 'bold', input[0]) # color arguments, string + colors = {'black': '\033[30m', # basic colors + 'red': '\033[31m', + 'green': '\033[32m', + 'yellow': '\033[33m', + 'blue': '\033[34m', + 'magenta': '\033[35m', + 'cyan': '\033[36m', + 'white': '\033[37m', + 'bright_black': '\033[90m', # bright colors + 'bright_red': '\033[91m', + 'bright_green': '\033[92m', + 'bright_yellow': '\033[93m', + 'bright_blue': '\033[94m', + 'bright_magenta': '\033[95m', + 'bright_cyan': '\033[96m', + 'bright_white': '\033[97m', + 'end': '\033[0m', # misc + 'bold': '\033[1m', + 'underline': '\033[4m'} + return ''.join(colors[x] for x in args) + f'{string}' + colors['end'] + + +def labels_to_class_weights(labels, nc=80): + # Get class weights (inverse frequency) from training labels + if labels[0] is None: # no labels loaded + return torch.Tensor() + + labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO + classes = labels[:, 0].astype(np.int) # labels = [class xywh] + weights = np.bincount(classes, minlength=nc) # occurrences per class + + # Prepend gridpoint count (for uCE training) + # gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image + # weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start + + weights[weights == 0] = 1 # replace empty bins with 1 + weights = 1 / weights # number of targets per class + weights /= weights.sum() # normalize + return torch.from_numpy(weights) + + +def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)): + # Produces image weights based on class_weights and image contents + class_counts = np.array([np.bincount(x[:, 0].astype(np.int), minlength=nc) for x in labels]) + image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1) + # index = random.choices(range(n), weights=image_weights, k=1) # weight image sample + return image_weights + + +def coco80_to_coco91_class(): # converts 80-index (val2014) to 91-index (paper) + # https://tech.amikelive.com/node-718/what-object-categories-labels-are-in-coco-dataset/ + # a = np.loadtxt('data/coco.names', dtype='str', delimiter='\n') + # b = np.loadtxt('data/coco_paper.names', dtype='str', delimiter='\n') + # x1 = [list(a[i] == b).index(True) + 1 for i in range(80)] # darknet to coco + # x2 = [list(b[i] == a).index(True) if any(b[i] == a) else None for i in range(91)] # coco to darknet + x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90] + return x + + +def xyxy2xywh(x): + # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center + y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center + y[:, 2] = x[:, 2] - x[:, 0] # width + y[:, 3] = x[:, 3] - x[:, 1] # height + return y + + +def xywh2xyxy(x): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x + y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y + y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x + y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y + return y + + +def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0): + # Convert nx4 boxes from [x, y, w, h] normalized to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = w * (x[:, 0] - x[:, 2] / 2) + padw # top left x + y[:, 1] = h * (x[:, 1] - x[:, 3] / 2) + padh # top left y + y[:, 2] = w * (x[:, 0] + x[:, 2] / 2) + padw # bottom right x + y[:, 3] = h * (x[:, 1] + x[:, 3] / 2) + padh # bottom right y + return y + + +def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0): + # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] normalized where xy1=top-left, xy2=bottom-right + if clip: + clip_coords(x, (h - eps, w - eps)) # warning: inplace clip + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = ((x[:, 0] + x[:, 2]) / 2) / w # x center + y[:, 1] = ((x[:, 1] + x[:, 3]) / 2) / h # y center + y[:, 2] = (x[:, 2] - x[:, 0]) / w # width + y[:, 3] = (x[:, 3] - x[:, 1]) / h # height + return y + + +def xyn2xy(x, w=640, h=640, padw=0, padh=0): + # Convert normalized segments into pixel segments, shape (n,2) + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = w * x[:, 0] + padw # top left x + y[:, 1] = h * x[:, 1] + padh # top left y + return y + + +def segment2box(segment, width=640, height=640): + # Convert 1 segment label to 1 box label, applying inside-image constraint, i.e. (xy1, xy2, ...) to (xyxy) + x, y = segment.T # segment xy + inside = (x >= 0) & (y >= 0) & (x <= width) & (y <= height) + x, y, = x[inside], y[inside] + return np.array([x.min(), y.min(), x.max(), y.max()]) if any(x) else np.zeros((1, 4)) # xyxy + + +def segments2boxes(segments): + # Convert segment labels to box labels, i.e. (cls, xy1, xy2, ...) to (cls, xywh) + boxes = [] + for s in segments: + x, y = s.T # segment xy + boxes.append([x.min(), y.min(), x.max(), y.max()]) # cls, xyxy + return xyxy2xywh(np.array(boxes)) # cls, xywh + + +def resample_segments(segments, n=1000): + # Up-sample an (n,2) segment + for i, s in enumerate(segments): + x = np.linspace(0, len(s) - 1, n) + xp = np.arange(len(s)) + segments[i] = np.concatenate([np.interp(x, xp, s[:, i]) for i in range(2)]).reshape(2, -1).T # segment xy + return segments + + +def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): + # Rescale coords (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + pad = ratio_pad[1] + + coords[:, [0, 2]] -= pad[0] # x padding + coords[:, [1, 3]] -= pad[1] # y padding + coords[:, :4] /= gain + clip_coords(coords, img0_shape) + return coords + + +def clip_coords(boxes, shape): + # Clip bounding xyxy bounding boxes to image shape (height, width) + if isinstance(boxes, torch.Tensor): # faster individually + boxes[:, 0].clamp_(0, shape[1]) # x1 + boxes[:, 1].clamp_(0, shape[0]) # y1 + boxes[:, 2].clamp_(0, shape[1]) # x2 + boxes[:, 3].clamp_(0, shape[0]) # y2 + else: # np.array (faster grouped) + boxes[:, [0, 2]] = boxes[:, [0, 2]].clip(0, shape[1]) # x1, x2 + boxes[:, [1, 3]] = boxes[:, [1, 3]].clip(0, shape[0]) # y1, y2 + + +def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=None, agnostic=False, multi_label=False, + labels=(), max_det=300): + """Runs Non-Maximum Suppression (NMS) on inference results + + Returns: + list of detections, on (n,6) tensor per image [xyxy, conf, cls] + """ + + nc = prediction.shape[2] - 5 # number of classes + xc = prediction[..., 4] > conf_thres # candidates + + # Checks + assert 0 <= conf_thres <= 1, f'Invalid Confidence threshold {conf_thres}, valid values are between 0.0 and 1.0' + assert 0 <= iou_thres <= 1, f'Invalid IoU {iou_thres}, valid values are between 0.0 and 1.0' + + # Settings + min_wh, max_wh = 2, 7680 # (pixels) minimum and maximum box width and height + max_nms = 30000 # maximum number of boxes into torchvision.ops.nms() + time_limit = 10.0 # seconds to quit after + redundant = True # require redundant detections + multi_label &= nc > 1 # multiple labels per box (adds 0.5ms/img) + merge = False # use merge-NMS + + t = time.time() + output = [torch.zeros((0, 6), device=prediction.device)] * prediction.shape[0] + for xi, x in enumerate(prediction): # image index, image inference + # Apply constraints + x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x = x[xc[xi]] # confidence + + # Cat apriori labels if autolabelling + if labels and len(labels[xi]): + lb = labels[xi] + v = torch.zeros((len(lb), nc + 5), device=x.device) + v[:, :4] = lb[:, 1:5] # box + v[:, 4] = 1.0 # conf + v[range(len(lb)), lb[:, 0].long() + 5] = 1.0 # cls + x = torch.cat((x, v), 0) + + # If none remain process next image + if not x.shape[0]: + continue + + # Compute conf + x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf + + # Box (center x, center y, width, height) to (x1, y1, x2, y2) + box = xywh2xyxy(x[:, :4]) + + # Detections matrix nx6 (xyxy, conf, cls) + if multi_label: + i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T + x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + else: # best class only + conf, j = x[:, 5:].max(1, keepdim=True) + x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + + # Filter by class + if classes is not None: + x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)] + + # Apply finite constraint + # if not torch.isfinite(x).all(): + # x = x[torch.isfinite(x).all(1)] + + # Check shape + n = x.shape[0] # number of boxes + if not n: # no boxes + continue + elif n > max_nms: # excess boxes + x = x[x[:, 4].argsort(descending=True)[:max_nms]] # sort by confidence + + # Batched NMS + c = x[:, 5:6] * (0 if agnostic else max_wh) # classes + boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores + i = torchvision.ops.nms(boxes, scores, iou_thres) # NMS + if i.shape[0] > max_det: # limit detections + i = i[:max_det] + if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean) + # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4) + iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix + weights = iou * scores[None] # box weights + x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes + if redundant: + i = i[iou.sum(1) > 1] # require redundancy + + output[xi] = x[i] + if (time.time() - t) > time_limit: + LOGGER.warning(f'WARNING: NMS time limit {time_limit}s exceeded') + break # time limit exceeded + + return output + + +def strip_optimizer(f='best.pt', s=''): # from yolov5_utils.general import *; strip_optimizer() + # Strip optimizer from 'f' to finalize training, optionally save as 's' + x = torch.load(f, map_location=torch.device('cpu')) + if x.get('ema'): + x['model'] = x['ema'] # replace model with ema + for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys + x[k] = None + x['epoch'] = -1 + x['model'].half() # to FP16 + for p in x['model'].parameters(): + p.requires_grad = False + torch.save(x, s or f) + mb = os.path.getsize(s or f) / 1E6 # filesize + LOGGER.info(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB") + + +def print_mutation(results, hyp, save_dir, bucket, prefix=colorstr('evolve: ')): + evolve_csv = save_dir / 'evolve.csv' + evolve_yaml = save_dir / 'hyp_evolve.yaml' + keys = ('metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', + 'val/box_loss', 'val/obj_loss', 'val/cls_loss') + tuple(hyp.keys()) # [results + hyps] + keys = tuple(x.strip() for x in keys) + vals = results + tuple(hyp.values()) + n = len(keys) + + # Download (optional) + if bucket: + url = f'gs://{bucket}/evolve.csv' + if gsutil_getsize(url) > (evolve_csv.stat().st_size if evolve_csv.exists() else 0): + os.system(f'gsutil cp {url} {save_dir}') # download evolve.csv if larger than local + + # Log to evolve.csv + s = '' if evolve_csv.exists() else (('%20s,' * n % keys).rstrip(',') + '\n') # add header + with open(evolve_csv, 'a') as f: + f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n') + + # Save yaml + with open(evolve_yaml, 'w') as f: + data = pd.read_csv(evolve_csv) + data = data.rename(columns=lambda x: x.strip()) # strip keys + i = np.argmax(fitness(data.values[:, :4])) # + generations = len(data) + f.write('# YOLOv5 Hyperparameter Evolution Results\n' + + f'# Best generation: {i}\n' + + f'# Last generation: {generations - 1}\n' + + '# ' + ', '.join(f'{x.strip():>20s}' for x in keys[:7]) + '\n' + + '# ' + ', '.join(f'{x:>20.5g}' for x in data.values[i, :7]) + '\n\n') + yaml.safe_dump(data.loc[i][7:].to_dict(), f, sort_keys=False) + + # Print to screen + LOGGER.info(prefix + f'{generations} generations finished, current result:\n' + + prefix + ', '.join(f'{x.strip():>20s}' for x in keys) + '\n' + + prefix + ', '.join(f'{x:20.5g}' for x in vals) + '\n\n') + + if bucket: + os.system(f'gsutil cp {evolve_csv} {evolve_yaml} gs://{bucket}') # upload + + +def apply_classifier(x, model, img, im0): + # Apply a second stage classifier to YOLO outputs + # Example model = torchvision.models.__dict__['efficientnet_b0'](pretrained=True).to(device).eval() + im0 = [im0] if isinstance(im0, np.ndarray) else im0 + for i, d in enumerate(x): # per image + if d is not None and len(d): + d = d.clone() + + # Reshape and pad cutouts + b = xyxy2xywh(d[:, :4]) # boxes + b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # rectangle to square + b[:, 2:] = b[:, 2:] * 1.3 + 30 # pad + d[:, :4] = xywh2xyxy(b).long() + + # Rescale boxes from img_size to im0 size + scale_coords(img.shape[2:], d[:, :4], im0[i].shape) + + # Classes + pred_cls1 = d[:, 5].long() + ims = [] + for j, a in enumerate(d): # per item + cutout = im0[i][int(a[1]):int(a[3]), int(a[0]):int(a[2])] + im = cv2.resize(cutout, (224, 224)) # BGR + # cv2.imwrite('example%i.jpg' % j, cutout) + + im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32 + im /= 255 # 0 - 255 to 0.0 - 1.0 + ims.append(im) + + pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(1) # classifier prediction + x[i] = x[i][pred_cls1 == pred_cls2] # retain matching class detections + + return x + + +def increment_path(path, exist_ok=False, sep='', mkdir=False): + # Increment file or directory path, i.e. runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc. + path = Path(path) # os-agnostic + if path.exists() and not exist_ok: + path, suffix = (path.with_suffix(''), path.suffix) if path.is_file() else (path, '') + dirs = glob.glob(f"{path}{sep}*") # similar paths + matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs] + i = [int(m.groups()[0]) for m in matches if m] # indices + n = max(i) + 1 if i else 2 # increment number + path = Path(f"{path}{sep}{n}{suffix}") # increment path + if mkdir: + path.mkdir(parents=True, exist_ok=True) # make directory + return path + + +# Variables +NCOLS = 0 if is_docker() else shutil.get_terminal_size().columns # terminal window size for tqdm diff --git a/detector/YOLOv5/utils/google_app_engine/Dockerfile b/detector/YOLOv5/utils/google_app_engine/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..0155618f475104e9858b81470339558156c94e13 --- /dev/null +++ b/detector/YOLOv5/utils/google_app_engine/Dockerfile @@ -0,0 +1,25 @@ +FROM gcr.io/google-appengine/python + +# Create a virtualenv for dependencies. This isolates these packages from +# system-level packages. +# Use -p python3 or -p python3.7 to select python version. Default is version 2. +RUN virtualenv /env -p python3 + +# Setting these environment variables are the same as running +# source /env/bin/activate. +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH + +RUN apt-get update && apt-get install -y python-opencv + +# Copy the application's requirements.txt and run pip to install all +# dependencies into the virtualenv. +ADD requirements.txt /app/requirements.txt +RUN pip install -r /app/requirements.txt + +# Add the application source code. +ADD . /app + +# Run a WSGI server to serve the application. gunicorn must be declared as +# a dependency in requirements.txt. +CMD gunicorn -b :$PORT main:app diff --git a/detector/YOLOv5/utils/google_app_engine/app.yaml b/detector/YOLOv5/utils/google_app_engine/app.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5056b7c1186d6ad278957bbd6e976c3a0f169a30 --- /dev/null +++ b/detector/YOLOv5/utils/google_app_engine/app.yaml @@ -0,0 +1,14 @@ +runtime: custom +env: flex + +service: yolov5app + +liveness_check: + initial_delay_sec: 600 + +manual_scaling: + instances: 1 +resources: + cpu: 1 + memory_gb: 4 + disk_size_gb: 20 diff --git a/detector/YOLOv5/utils/loggers/__init__.py b/detector/YOLOv5/utils/loggers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f33e0c109027055376ad860c3ff7324eafedaadd --- /dev/null +++ b/detector/YOLOv5/utils/loggers/__init__.py @@ -0,0 +1,168 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Logging yolov5_utils +""" + +import os +import warnings +from threading import Thread + +import pkg_resources as pkg +import torch +from torch.utils.tensorboard import SummaryWriter + +from yolov5_utils.general import colorstr, emojis +from yolov5_utils.loggers.wandb.wandb_utils import WandbLogger +from yolov5_utils.plots import plot_images, plot_results +from yolov5_utils.torch_utils import de_parallel + +LOGGERS = ('csv', 'tb', 'wandb') # text-file, TensorBoard, Weights & Biases +RANK = int(os.getenv('RANK', -1)) + +try: + import wandb + + assert hasattr(wandb, '__version__') # verify package import not local dir + if pkg.parse_version(wandb.__version__) >= pkg.parse_version('0.12.2') and RANK in [0, -1]: + try: + wandb_login_success = wandb.login(timeout=30) + except wandb.errors.UsageError: # known non-TTY terminal issue + wandb_login_success = False + if not wandb_login_success: + wandb = None +except (ImportError, AssertionError): + wandb = None + + +class Loggers(): + # YOLOv5 Loggers class + def __init__(self, save_dir=None, weights=None, opt=None, hyp=None, logger=None, include=LOGGERS): + self.save_dir = save_dir + self.weights = weights + self.opt = opt + self.hyp = hyp + self.logger = logger # for printing results to console + self.include = include + self.keys = ['train/box_loss', 'train/obj_loss', 'train/cls_loss', # train loss + 'metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', # metrics + 'val/box_loss', 'val/obj_loss', 'val/cls_loss', # val loss + 'x/lr0', 'x/lr1', 'x/lr2'] # params + self.best_keys = ['best/epoch', 'best/precision', 'best/recall', 'best/mAP_0.5', 'best/mAP_0.5:0.95',] + for k in LOGGERS: + setattr(self, k, None) # init empty logger dictionary + self.csv = True # always log to csv + + # Message + if not wandb: + prefix = colorstr('Weights & Biases: ') + s = f"{prefix}run 'pip install wandb' to automatically track and visualize YOLOv5 🚀 runs (RECOMMENDED)" + print(emojis(s)) + + # TensorBoard + s = self.save_dir + if 'tb' in self.include and not self.opt.evolve: + prefix = colorstr('TensorBoard: ') + self.logger.info(f"{prefix}Start with 'tensorboard --logdir {s.parent}', view at http://localhost:6006/") + self.tb = SummaryWriter(str(s)) + + # W&B + if wandb and 'wandb' in self.include: + wandb_artifact_resume = isinstance(self.opt.resume, str) and self.opt.resume.startswith('wandb-artifact://') + run_id = torch.load(self.weights).get('wandb_id') if self.opt.resume and not wandb_artifact_resume else None + self.opt.hyp = self.hyp # add hyperparameters + self.wandb = WandbLogger(self.opt, run_id) + else: + self.wandb = None + + def on_pretrain_routine_end(self): + # Callback runs on pre-train routine end + paths = self.save_dir.glob('*labels*.jpg') # training labels + if self.wandb: + self.wandb.log({"Labels": [wandb.Image(str(x), caption=x.name) for x in paths]}) + + def on_train_batch_end(self, ni, model, imgs, targets, paths, plots, sync_bn): + # Callback runs on train batch end + if plots: + if ni == 0: + if not sync_bn: # tb.add_graph() --sync known issue https://github.com/ultralytics/yolov5/issues/3754 + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress jit trace warning + self.tb.add_graph(torch.jit.trace(de_parallel(model), imgs[0:1], strict=False), []) + if ni < 3: + f = self.save_dir / f'train_batch{ni}.jpg' # filename + Thread(target=plot_images, args=(imgs, targets, paths, f), daemon=True).start() + if self.wandb and ni == 10: + files = sorted(self.save_dir.glob('train*.jpg')) + self.wandb.log({'Mosaics': [wandb.Image(str(f), caption=f.name) for f in files if f.exists()]}) + + def on_train_epoch_end(self, epoch): + # Callback runs on train epoch end + if self.wandb: + self.wandb.current_epoch = epoch + 1 + + def on_val_image_end(self, pred, predn, path, names, im): + # Callback runs on val image end + if self.wandb: + self.wandb.val_one_image(pred, predn, path, names, im) + + def on_val_end(self): + # Callback runs on val end + if self.wandb: + files = sorted(self.save_dir.glob('val*.jpg')) + self.wandb.log({"Validation": [wandb.Image(str(f), caption=f.name) for f in files]}) + + def on_fit_epoch_end(self, vals, epoch, best_fitness, fi): + # Callback runs at the end of each fit (train+val) epoch + x = {k: v for k, v in zip(self.keys, vals)} # dict + if self.csv: + file = self.save_dir / 'results.csv' + n = len(x) + 1 # number of cols + s = '' if file.exists() else (('%20s,' * n % tuple(['epoch'] + self.keys)).rstrip(',') + '\n') # add header + with open(file, 'a') as f: + f.write(s + ('%20.5g,' * n % tuple([epoch] + vals)).rstrip(',') + '\n') + + if self.tb: + for k, v in x.items(): + self.tb.add_scalar(k, v, epoch) + + if self.wandb: + if best_fitness == fi: + best_results = [epoch] + vals[3:7] + for i, name in enumerate(self.best_keys): + self.wandb.wandb_run.summary[name] = best_results[i] # log best results in the summary + self.wandb.log(x) + self.wandb.end_epoch(best_result=best_fitness == fi) + + def on_model_save(self, last, epoch, final_epoch, best_fitness, fi): + # Callback runs on model save event + if self.wandb: + if ((epoch + 1) % self.opt.save_period == 0 and not final_epoch) and self.opt.save_period != -1: + self.wandb.log_model(last.parent, self.opt, epoch, fi, best_model=best_fitness == fi) + + def on_train_end(self, last, best, plots, epoch, results): + # Callback runs on training end + if plots: + plot_results(file=self.save_dir / 'results.csv') # save results.png + files = ['results.png', 'confusion_matrix.png', *(f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R'))] + files = [(self.save_dir / f) for f in files if (self.save_dir / f).exists()] # filter + + if self.tb: + import cv2 + for f in files: + self.tb.add_image(f.stem, cv2.imread(str(f))[..., ::-1], epoch, dataformats='HWC') + + if self.wandb: + self.wandb.log({k: v for k, v in zip(self.keys[3:10], results)}) # log best.pt val results + self.wandb.log({"Results": [wandb.Image(str(f), caption=f.name) for f in files]}) + # Calling wandb.log. TODO: Refactor this into WandbLogger.log_model + if not self.opt.evolve: + wandb.log_artifact(str(best if best.exists() else last), type='model', + name='run_' + self.wandb.wandb_run.id + '_model', + aliases=['latest', 'best', 'stripped']) + self.wandb.finish_run() + + def on_params_update(self, params): + # Update hyperparams or configs of the experiment + # params: A dict containing {param: value} pairs + if self.wandb: + self.wandb.wandb_run.config.update(params, allow_val_change=True) diff --git a/detector/YOLOv5/utils/loggers/wandb/README.md b/detector/YOLOv5/utils/loggers/wandb/README.md new file mode 100644 index 0000000000000000000000000000000000000000..63d999859e6d97684f6ec4ca46345d2e077c124d --- /dev/null +++ b/detector/YOLOv5/utils/loggers/wandb/README.md @@ -0,0 +1,152 @@ +📚 This guide explains how to use **Weights & Biases** (W&B) with YOLOv5 🚀. UPDATED 29 September 2021. +* [About Weights & Biases](#about-weights-&-biases) +* [First-Time Setup](#first-time-setup) +* [Viewing runs](#viewing-runs) +* [Disabling wandb](#disabling-wandb) +* [Advanced Usage: Dataset Versioning and Evaluation](#advanced-usage) +* [Reports: Share your work with the world!](#reports) + +## About Weights & Biases +Think of [W&B](https://wandb.ai/site?utm_campaign=repo_yolo_wandbtutorial) like GitHub for machine learning models. With a few lines of code, save everything you need to debug, compare and reproduce your models — architecture, hyperparameters, git commits, model weights, GPU usage, and even datasets and predictions. + +Used by top researchers including teams at OpenAI, Lyft, Github, and MILA, W&B is part of the new standard of best practices for machine learning. How W&B can help you optimize your machine learning workflows: + + * [Debug](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#Free-2) model performance in real time + * [GPU usage](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#System-4) visualized automatically + * [Custom charts](https://wandb.ai/wandb/customizable-charts/reports/Powerful-Custom-Charts-To-Debug-Model-Peformance--VmlldzoyNzY4ODI) for powerful, extensible visualization + * [Share insights](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#Share-8) interactively with collaborators + * [Optimize hyperparameters](https://docs.wandb.com/sweeps) efficiently + * [Track](https://docs.wandb.com/artifacts) datasets, pipelines, and production models + +## First-Time Setup +
+ Toggle Details +When you first train, W&B will prompt you to create a new account and will generate an **API key** for you. If you are an existing user you can retrieve your key from https://wandb.ai/authorize. This key is used to tell W&B where to log your data. You only need to supply your key once, and then it is remembered on the same device. + +W&B will create a cloud **project** (default is 'YOLOv5') for your training runs, and each new training run will be provided a unique run **name** within that project as project/name. You can also manually set your project and run name as: + + ```shell + $ python train.py --project ... --name ... + ``` + +YOLOv5 notebook example: Open In Colab Open In Kaggle +Screen Shot 2021-09-29 at 10 23 13 PM + + +
+ +## Viewing Runs +
+ Toggle Details +Run information streams from your environment to the W&B cloud console as you train. This allows you to monitor and even cancel runs in realtime . All important information is logged: + + * Training & Validation losses + * Metrics: Precision, Recall, mAP@0.5, mAP@0.5:0.95 + * Learning Rate over time + * A bounding box debugging panel, showing the training progress over time + * GPU: Type, **GPU Utilization**, power, temperature, **CUDA memory usage** + * System: Disk I/0, CPU utilization, RAM memory usage + * Your trained model as W&B Artifact + * Environment: OS and Python types, Git repository and state, **training command** + +

Weights & Biases dashboard

+
+ + ## Disabling wandb +* training after running `wandb disabled` inside that directory creates no wandb run +![Screenshot (84)](https://user-images.githubusercontent.com/15766192/143441777-c780bdd7-7cb4-4404-9559-b4316030a985.png) + +* To enable wandb again, run `wandb online` +![Screenshot (85)](https://user-images.githubusercontent.com/15766192/143441866-7191b2cb-22f0-4e0f-ae64-2dc47dc13078.png) + +## Advanced Usage +You can leverage W&B artifacts and Tables integration to easily visualize and manage your datasets, models and training evaluations. Here are some quick examples to get you started. +
+

1: Train and Log Evaluation simultaneousy

+ This is an extension of the previous section, but it'll also training after uploading the dataset. This also evaluation Table + Evaluation table compares your predictions and ground truths across the validation set for each epoch. It uses the references to the already uploaded datasets, + so no images will be uploaded from your system more than once. +
+ Usage + Code $ python train.py --upload_data val + +![Screenshot from 2021-11-21 17-40-06](https://user-images.githubusercontent.com/15766192/142761183-c1696d8c-3f38-45ab-991a-bb0dfd98ae7d.png) +
+ +

2. Visualize and Version Datasets

+ Log, visualize, dynamically query, and understand your data with W&B Tables. You can use the following command to log your dataset as a W&B Table. This will generate a {dataset}_wandb.yaml file which can be used to train from dataset artifact. +
+ Usage + Code $ python utils/logger/wandb/log_dataset.py --project ... --name ... --data .. + + ![Screenshot (64)](https://user-images.githubusercontent.com/15766192/128486078-d8433890-98a3-4d12-8986-b6c0e3fc64b9.png) +
+ +

3: Train using dataset artifact

+ When you upload a dataset as described in the first section, you get a new config file with an added `_wandb` to its name. This file contains the information that + can be used to train a model directly from the dataset artifact. This also logs evaluation +
+ Usage + Code $ python train.py --data {data}_wandb.yaml + +![Screenshot (72)](https://user-images.githubusercontent.com/15766192/128979739-4cf63aeb-a76f-483f-8861-1c0100b938a5.png) +
+ +

4: Save model checkpoints as artifacts

+ To enable saving and versioning checkpoints of your experiment, pass `--save_period n` with the base cammand, where `n` represents checkpoint interval. + You can also log both the dataset and model checkpoints simultaneously. If not passed, only the final model will be logged + +
+ Usage + Code $ python train.py --save_period 1 + +![Screenshot (68)](https://user-images.githubusercontent.com/15766192/128726138-ec6c1f60-639d-437d-b4ee-3acd9de47ef3.png) +
+ +
+ +

5: Resume runs from checkpoint artifacts.

+Any run can be resumed using artifacts if the --resume argument starts with wandb-artifact:// prefix followed by the run path, i.e, wandb-artifact://username/project/runid . This doesn't require the model checkpoint to be present on the local system. + +
+ Usage + Code $ python train.py --resume wandb-artifact://{run_path} + +![Screenshot (70)](https://user-images.githubusercontent.com/15766192/128728988-4e84b355-6c87-41ae-a591-14aecf45343e.png) +
+ +

6: Resume runs from dataset artifact & checkpoint artifacts.

+ Local dataset or model checkpoints are not required. This can be used to resume runs directly on a different device + The syntax is same as the previous section, but you'll need to lof both the dataset and model checkpoints as artifacts, i.e, set bot --upload_dataset or + train from _wandb.yaml file and set --save_period + +
+ Usage + Code $ python train.py --resume wandb-artifact://{run_path} + +![Screenshot (70)](https://user-images.githubusercontent.com/15766192/128728988-4e84b355-6c87-41ae-a591-14aecf45343e.png) +
+ + + +

Reports

+W&B Reports can be created from your saved runs for sharing online. Once a report is created you will receive a link you can use to publically share your results. Here is an example report created from the COCO128 tutorial trainings of all four YOLOv5 models ([link](https://wandb.ai/glenn-jocher/yolov5_tutorial/reports/YOLOv5-COCO128-Tutorial-Results--VmlldzozMDI5OTY)). + +Weights & Biases Reports + + +## Environments + +YOLOv5 may be run in any of the following up-to-date verified environments (with all dependencies including [CUDA](https://developer.nvidia.com/cuda)/[CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/) and [PyTorch](https://pytorch.org/) preinstalled): + +- **Google Colab and Kaggle** notebooks with free GPU: Open In Colab Open In Kaggle +- **Google Cloud** Deep Learning VM. See [GCP Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/GCP-Quickstart) +- **Amazon** Deep Learning AMI. See [AWS Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart) +- **Docker Image**. See [Docker Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/Docker-Quickstart) Docker Pulls + + +## Status + +![CI CPU testing](https://github.com/ultralytics/yolov5/workflows/CI%20CPU%20testing/badge.svg) + +If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), validation ([val.py](https://github.com/ultralytics/yolov5/blob/master/val.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit. diff --git a/detector/YOLOv5/utils/loggers/wandb/__init__.py b/detector/YOLOv5/utils/loggers/wandb/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/detector/YOLOv5/utils/loggers/wandb/log_dataset.py b/detector/YOLOv5/utils/loggers/wandb/log_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..06e81fb693072c99703e5c52b169892b7fd9a8cc --- /dev/null +++ b/detector/YOLOv5/utils/loggers/wandb/log_dataset.py @@ -0,0 +1,27 @@ +import argparse + +from wandb_utils import WandbLogger + +from utils.general import LOGGER + +WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' + + +def create_dataset_artifact(opt): + logger = WandbLogger(opt, None, job_type='Dataset Creation') # TODO: return value unused + if not logger.wandb: + LOGGER.info("install wandb using `pip install wandb` to log the dataset") + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default='data/coco128.yaml', help='data.yaml path') + parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset') + parser.add_argument('--project', type=str, default='YOLOv5', help='name of W&B Project') + parser.add_argument('--entity', default=None, help='W&B entity') + parser.add_argument('--name', type=str, default='log dataset', help='name of W&B run') + + opt = parser.parse_args() + opt.resume = False # Explicitly disallow resume check for dataset upload job + + create_dataset_artifact(opt) diff --git a/detector/YOLOv5/utils/loggers/wandb/sweep.py b/detector/YOLOv5/utils/loggers/wandb/sweep.py new file mode 100644 index 0000000000000000000000000000000000000000..206059bc30bff425fd3a7b2ee83a40a642a8e8c6 --- /dev/null +++ b/detector/YOLOv5/utils/loggers/wandb/sweep.py @@ -0,0 +1,41 @@ +import sys +from pathlib import Path + +import wandb + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from train import parse_opt, train +from utils.callbacks import Callbacks +from utils.general import increment_path +from utils.torch_utils import select_device + + +def sweep(): + wandb.init() + # Get hyp dict from sweep agent + hyp_dict = vars(wandb.config).get("_items") + + # Workaround: get necessary opt args + opt = parse_opt(known=True) + opt.batch_size = hyp_dict.get("batch_size") + opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok or opt.evolve)) + opt.epochs = hyp_dict.get("epochs") + opt.nosave = True + opt.data = hyp_dict.get("data") + opt.weights = str(opt.weights) + opt.cfg = str(opt.cfg) + opt.data = str(opt.data) + opt.hyp = str(opt.hyp) + opt.project = str(opt.project) + device = select_device(opt.device, batch_size=opt.batch_size) + + # train + train(hyp_dict, opt, device, callbacks=Callbacks()) + + +if __name__ == "__main__": + sweep() diff --git a/detector/YOLOv5/utils/loggers/wandb/sweep.yaml b/detector/YOLOv5/utils/loggers/wandb/sweep.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c7790d75f6b2384b821e577a7159faab4b6ba7a1 --- /dev/null +++ b/detector/YOLOv5/utils/loggers/wandb/sweep.yaml @@ -0,0 +1,143 @@ +# Hyperparameters for training +# To set range- +# Provide min and max values as: +# parameter: +# +# min: scalar +# max: scalar +# OR +# +# Set a specific list of search space- +# parameter: +# values: [scalar1, scalar2, scalar3...] +# +# You can use grid, bayesian and hyperopt search strategy +# For more info on configuring sweeps visit - https://docs.wandb.ai/guides/sweeps/configuration + +program: utils/loggers/wandb/sweep.py +method: random +metric: + name: metrics/mAP_0.5 + goal: maximize + +parameters: + # hyperparameters: set either min, max range or values list + data: + value: "data/coco128.yaml" + batch_size: + values: [64] + epochs: + values: [10] + + lr0: + distribution: uniform + min: 1e-5 + max: 1e-1 + lrf: + distribution: uniform + min: 0.01 + max: 1.0 + momentum: + distribution: uniform + min: 0.6 + max: 0.98 + weight_decay: + distribution: uniform + min: 0.0 + max: 0.001 + warmup_epochs: + distribution: uniform + min: 0.0 + max: 5.0 + warmup_momentum: + distribution: uniform + min: 0.0 + max: 0.95 + warmup_bias_lr: + distribution: uniform + min: 0.0 + max: 0.2 + box: + distribution: uniform + min: 0.02 + max: 0.2 + cls: + distribution: uniform + min: 0.2 + max: 4.0 + cls_pw: + distribution: uniform + min: 0.5 + max: 2.0 + obj: + distribution: uniform + min: 0.2 + max: 4.0 + obj_pw: + distribution: uniform + min: 0.5 + max: 2.0 + iou_t: + distribution: uniform + min: 0.1 + max: 0.7 + anchor_t: + distribution: uniform + min: 2.0 + max: 8.0 + fl_gamma: + distribution: uniform + min: 0.0 + max: 0.1 + hsv_h: + distribution: uniform + min: 0.0 + max: 0.1 + hsv_s: + distribution: uniform + min: 0.0 + max: 0.9 + hsv_v: + distribution: uniform + min: 0.0 + max: 0.9 + degrees: + distribution: uniform + min: 0.0 + max: 45.0 + translate: + distribution: uniform + min: 0.0 + max: 0.9 + scale: + distribution: uniform + min: 0.0 + max: 0.9 + shear: + distribution: uniform + min: 0.0 + max: 10.0 + perspective: + distribution: uniform + min: 0.0 + max: 0.001 + flipud: + distribution: uniform + min: 0.0 + max: 1.0 + fliplr: + distribution: uniform + min: 0.0 + max: 1.0 + mosaic: + distribution: uniform + min: 0.0 + max: 1.0 + mixup: + distribution: uniform + min: 0.0 + max: 1.0 + copy_paste: + distribution: uniform + min: 0.0 + max: 1.0 diff --git a/detector/YOLOv5/utils/loggers/wandb/wandb_utils.py b/detector/YOLOv5/utils/loggers/wandb/wandb_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3835436543d23b22ec694b293458c73b9db15cea --- /dev/null +++ b/detector/YOLOv5/utils/loggers/wandb/wandb_utils.py @@ -0,0 +1,562 @@ +"""Utilities and tools for tracking runs with Weights & Biases.""" + +import logging +import os +import sys +from contextlib import contextmanager +from pathlib import Path +from typing import Dict + +import yaml +from tqdm import tqdm + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from utils.datasets import LoadImagesAndLabels, img2label_paths +from utils.general import LOGGER, check_dataset, check_file + +try: + import wandb + + assert hasattr(wandb, '__version__') # verify package import not local dir +except (ImportError, AssertionError): + wandb = None + +RANK = int(os.getenv('RANK', -1)) +WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' + + +def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX): + return from_string[len(prefix):] + + +def check_wandb_config_file(data_config_file): + wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path + if Path(wandb_config).is_file(): + return wandb_config + return data_config_file + + +def check_wandb_dataset(data_file): + is_trainset_wandb_artifact = False + is_valset_wandb_artifact = False + if check_file(data_file) and data_file.endswith('.yaml'): + with open(data_file, errors='ignore') as f: + data_dict = yaml.safe_load(f) + is_trainset_wandb_artifact = (isinstance(data_dict['train'], str) and + data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX)) + is_valset_wandb_artifact = (isinstance(data_dict['val'], str) and + data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX)) + if is_trainset_wandb_artifact or is_valset_wandb_artifact: + return data_dict + else: + return check_dataset(data_file) + + +def get_run_info(run_path): + run_path = Path(remove_prefix(run_path, WANDB_ARTIFACT_PREFIX)) + run_id = run_path.stem + project = run_path.parent.stem + entity = run_path.parent.parent.stem + model_artifact_name = 'run_' + run_id + '_model' + return entity, project, run_id, model_artifact_name + + +def check_wandb_resume(opt): + process_wandb_config_ddp_mode(opt) if RANK not in [-1, 0] else None + if isinstance(opt.resume, str): + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + if RANK not in [-1, 0]: # For resuming DDP runs + entity, project, run_id, model_artifact_name = get_run_info(opt.resume) + api = wandb.Api() + artifact = api.artifact(entity + '/' + project + '/' + model_artifact_name + ':latest') + modeldir = artifact.download() + opt.weights = str(Path(modeldir) / "last.pt") + return True + return None + + +def process_wandb_config_ddp_mode(opt): + with open(check_file(opt.data), errors='ignore') as f: + data_dict = yaml.safe_load(f) # data dict + train_dir, val_dir = None, None + if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX): + api = wandb.Api() + train_artifact = api.artifact(remove_prefix(data_dict['train']) + ':' + opt.artifact_alias) + train_dir = train_artifact.download() + train_path = Path(train_dir) / 'data/images/' + data_dict['train'] = str(train_path) + + if isinstance(data_dict['val'], str) and data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX): + api = wandb.Api() + val_artifact = api.artifact(remove_prefix(data_dict['val']) + ':' + opt.artifact_alias) + val_dir = val_artifact.download() + val_path = Path(val_dir) / 'data/images/' + data_dict['val'] = str(val_path) + if train_dir or val_dir: + ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml') + with open(ddp_data_path, 'w') as f: + yaml.safe_dump(data_dict, f) + opt.data = ddp_data_path + + +class WandbLogger(): + """Log training runs, datasets, models, and predictions to Weights & Biases. + + This logger sends information to W&B at wandb.ai. By default, this information + includes hyperparameters, system configuration and metrics, model metrics, + and basic data metrics and analyses. + + By providing additional command line arguments to train.py, datasets, + models and predictions can also be logged. + + For more on how this logger is used, see the Weights & Biases documentation: + https://docs.wandb.com/guides/integrations/yolov5 + """ + + def __init__(self, opt, run_id=None, job_type='Training'): + """ + - Initialize WandbLogger instance + - Upload dataset if opt.upload_dataset is True + - Setup trainig processes if job_type is 'Training' + + arguments: + opt (namespace) -- Commandline arguments for this run + run_id (str) -- Run ID of W&B run to be resumed + job_type (str) -- To set the job_type for this run + + """ + # Pre-training routine -- + self.job_type = job_type + self.wandb, self.wandb_run = wandb, None if not wandb else wandb.run + self.val_artifact, self.train_artifact = None, None + self.train_artifact_path, self.val_artifact_path = None, None + self.result_artifact = None + self.val_table, self.result_table = None, None + self.bbox_media_panel_images = [] + self.val_table_path_map = None + self.max_imgs_to_log = 16 + self.wandb_artifact_data_dict = None + self.data_dict = None + # It's more elegant to stick to 1 wandb.init call, + # but useful config data is overwritten in the WandbLogger's wandb.init call + if isinstance(opt.resume, str): # checks resume from artifact + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + entity, project, run_id, model_artifact_name = get_run_info(opt.resume) + model_artifact_name = WANDB_ARTIFACT_PREFIX + model_artifact_name + assert wandb, 'install wandb to resume wandb runs' + # Resume wandb-artifact:// runs here| workaround for not overwriting wandb.config + self.wandb_run = wandb.init(id=run_id, + project=project, + entity=entity, + resume='allow', + allow_val_change=True) + opt.resume = model_artifact_name + elif self.wandb: + self.wandb_run = wandb.init(config=opt, + resume="allow", + project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem, + entity=opt.entity, + name=opt.name if opt.name != 'exp' else None, + job_type=job_type, + id=run_id, + allow_val_change=True) if not wandb.run else wandb.run + if self.wandb_run: + if self.job_type == 'Training': + if opt.upload_dataset: + if not opt.resume: + self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt) + + if opt.resume: + # resume from artifact + if isinstance(opt.resume, str) and opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + self.data_dict = dict(self.wandb_run.config.data_dict) + else: # local resume + self.data_dict = check_wandb_dataset(opt.data) + else: + self.data_dict = check_wandb_dataset(opt.data) + self.wandb_artifact_data_dict = self.wandb_artifact_data_dict or self.data_dict + + # write data_dict to config. useful for resuming from artifacts. Do this only when not resuming. + self.wandb_run.config.update({'data_dict': self.wandb_artifact_data_dict}, + allow_val_change=True) + self.setup_training(opt) + + if self.job_type == 'Dataset Creation': + self.wandb_run.config.update({"upload_dataset": True}) + self.data_dict = self.check_and_upload_dataset(opt) + + def check_and_upload_dataset(self, opt): + """ + Check if the dataset format is compatible and upload it as W&B artifact + + arguments: + opt (namespace)-- Commandline arguments for current run + + returns: + Updated dataset info dictionary where local dataset paths are replaced by WAND_ARFACT_PREFIX links. + """ + assert wandb, 'Install wandb to upload dataset' + config_path = self.log_dataset_artifact(opt.data, + opt.single_cls, + 'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem) + with open(config_path, errors='ignore') as f: + wandb_data_dict = yaml.safe_load(f) + return wandb_data_dict + + def setup_training(self, opt): + """ + Setup the necessary processes for training YOLO models: + - Attempt to download model checkpoint and dataset artifacts if opt.resume stats with WANDB_ARTIFACT_PREFIX + - Update data_dict, to contain info of previous run if resumed and the paths of dataset artifact if downloaded + - Setup log_dict, initialize bbox_interval + + arguments: + opt (namespace) -- commandline arguments for this run + + """ + self.log_dict, self.current_epoch = {}, 0 + self.bbox_interval = opt.bbox_interval + if isinstance(opt.resume, str): + modeldir, _ = self.download_model_artifact(opt) + if modeldir: + self.weights = Path(modeldir) / "last.pt" + config = self.wandb_run.config + opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp, opt.imgsz = str( + self.weights), config.save_period, config.batch_size, config.bbox_interval, config.epochs,\ + config.hyp, config.imgsz + data_dict = self.data_dict + if self.val_artifact is None: # If --upload_dataset is set, use the existing artifact, don't download + self.train_artifact_path, self.train_artifact = self.download_dataset_artifact(data_dict.get('train'), + opt.artifact_alias) + self.val_artifact_path, self.val_artifact = self.download_dataset_artifact(data_dict.get('val'), + opt.artifact_alias) + + if self.train_artifact_path is not None: + train_path = Path(self.train_artifact_path) / 'data/images/' + data_dict['train'] = str(train_path) + if self.val_artifact_path is not None: + val_path = Path(self.val_artifact_path) / 'data/images/' + data_dict['val'] = str(val_path) + + if self.val_artifact is not None: + self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") + columns = ["epoch", "id", "ground truth", "prediction"] + columns.extend(self.data_dict['names']) + self.result_table = wandb.Table(columns) + self.val_table = self.val_artifact.get("val") + if self.val_table_path_map is None: + self.map_val_table_path() + if opt.bbox_interval == -1: + self.bbox_interval = opt.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1 + if opt.evolve: + self.bbox_interval = opt.bbox_interval = opt.epochs + 1 + train_from_artifact = self.train_artifact_path is not None and self.val_artifact_path is not None + # Update the the data_dict to point to local artifacts dir + if train_from_artifact: + self.data_dict = data_dict + + def download_dataset_artifact(self, path, alias): + """ + download the model checkpoint artifact if the path starts with WANDB_ARTIFACT_PREFIX + + arguments: + path -- path of the dataset to be used for training + alias (str)-- alias of the artifact to be download/used for training + + returns: + (str, wandb.Artifact) -- path of the downladed dataset and it's corresponding artifact object if dataset + is found otherwise returns (None, None) + """ + if isinstance(path, str) and path.startswith(WANDB_ARTIFACT_PREFIX): + artifact_path = Path(remove_prefix(path, WANDB_ARTIFACT_PREFIX) + ":" + alias) + dataset_artifact = wandb.use_artifact(artifact_path.as_posix().replace("\\", "/")) + assert dataset_artifact is not None, "'Error: W&B dataset artifact doesn\'t exist'" + datadir = dataset_artifact.download() + return datadir, dataset_artifact + return None, None + + def download_model_artifact(self, opt): + """ + download the model checkpoint artifact if the resume path starts with WANDB_ARTIFACT_PREFIX + + arguments: + opt (namespace) -- Commandline arguments for this run + """ + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + model_artifact = wandb.use_artifact(remove_prefix(opt.resume, WANDB_ARTIFACT_PREFIX) + ":latest") + assert model_artifact is not None, 'Error: W&B model artifact doesn\'t exist' + modeldir = model_artifact.download() + # epochs_trained = model_artifact.metadata.get('epochs_trained') + total_epochs = model_artifact.metadata.get('total_epochs') + is_finished = total_epochs is None + assert not is_finished, 'training is finished, can only resume incomplete runs.' + return modeldir, model_artifact + return None, None + + def log_model(self, path, opt, epoch, fitness_score, best_model=False): + """ + Log the model checkpoint as W&B artifact + + arguments: + path (Path) -- Path of directory containing the checkpoints + opt (namespace) -- Command line arguments for this run + epoch (int) -- Current epoch number + fitness_score (float) -- fitness score for current epoch + best_model (boolean) -- Boolean representing if the current checkpoint is the best yet. + """ + model_artifact = wandb.Artifact('run_' + wandb.run.id + '_model', type='model', metadata={ + 'original_url': str(path), + 'epochs_trained': epoch + 1, + 'save period': opt.save_period, + 'project': opt.project, + 'total_epochs': opt.epochs, + 'fitness_score': fitness_score + }) + model_artifact.add_file(str(path / 'last.pt'), name='last.pt') + wandb.log_artifact(model_artifact, + aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else '']) + LOGGER.info(f"Saving model artifact on epoch {epoch + 1}") + + def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False): + """ + Log the dataset as W&B artifact and return the new data file with W&B links + + arguments: + data_file (str) -- the .yaml file with information about the dataset like - path, classes etc. + single_class (boolean) -- train multi-class data as single-class + project (str) -- project name. Used to construct the artifact path + overwrite_config (boolean) -- overwrites the data.yaml file if set to true otherwise creates a new + file with _wandb postfix. Eg -> data_wandb.yaml + + returns: + the new .yaml file with artifact links. it can be used to start training directly from artifacts + """ + upload_dataset = self.wandb_run.config.upload_dataset + log_val_only = isinstance(upload_dataset, str) and upload_dataset == 'val' + self.data_dict = check_dataset(data_file) # parse and check + data = dict(self.data_dict) + nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names']) + names = {k: v for k, v in enumerate(names)} # to index dictionary + + # log train set + if not log_val_only: + self.train_artifact = self.create_dataset_table(LoadImagesAndLabels( + data['train'], rect=True, batch_size=1), names, name='train') if data.get('train') else None + if data.get('train'): + data['train'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'train') + + self.val_artifact = self.create_dataset_table(LoadImagesAndLabels( + data['val'], rect=True, batch_size=1), names, name='val') if data.get('val') else None + if data.get('val'): + data['val'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'val') + + path = Path(data_file) + # create a _wandb.yaml file with artifacts links if both train and test set are logged + if not log_val_only: + path = (path.stem if overwrite_config else path.stem + '_wandb') + '.yaml' # updated data.yaml path + path = ROOT / 'data' / path + data.pop('download', None) + data.pop('path', None) + with open(path, 'w') as f: + yaml.safe_dump(data, f) + LOGGER.info(f"Created dataset config file {path}") + + if self.job_type == 'Training': # builds correct artifact pipeline graph + if not log_val_only: + self.wandb_run.log_artifact( + self.train_artifact) # calling use_artifact downloads the dataset. NOT NEEDED! + self.wandb_run.use_artifact(self.val_artifact) + self.val_artifact.wait() + self.val_table = self.val_artifact.get('val') + self.map_val_table_path() + else: + self.wandb_run.log_artifact(self.train_artifact) + self.wandb_run.log_artifact(self.val_artifact) + return path + + def map_val_table_path(self): + """ + Map the validation dataset Table like name of file -> it's id in the W&B Table. + Useful for - referencing artifacts for evaluation. + """ + self.val_table_path_map = {} + LOGGER.info("Mapping dataset") + for i, data in enumerate(tqdm(self.val_table.data)): + self.val_table_path_map[data[3]] = data[0] + + def create_dataset_table(self, dataset: LoadImagesAndLabels, class_to_id: Dict[int, str], name: str = 'dataset'): + """ + Create and return W&B artifact containing W&B Table of the dataset. + + arguments: + dataset -- instance of LoadImagesAndLabels class used to iterate over the data to build Table + class_to_id -- hash map that maps class ids to labels + name -- name of the artifact + + returns: + dataset artifact to be logged or used + """ + # TODO: Explore multiprocessing to slpit this loop parallely| This is essential for speeding up the the logging + artifact = wandb.Artifact(name=name, type="dataset") + img_files = tqdm([dataset.path]) if isinstance(dataset.path, str) and Path(dataset.path).is_dir() else None + img_files = tqdm(dataset.img_files) if not img_files else img_files + for img_file in img_files: + if Path(img_file).is_dir(): + artifact.add_dir(img_file, name='data/images') + labels_path = 'labels'.join(dataset.path.rsplit('images', 1)) + artifact.add_dir(labels_path, name='data/labels') + else: + artifact.add_file(img_file, name='data/images/' + Path(img_file).name) + label_file = Path(img2label_paths([img_file])[0]) + artifact.add_file(str(label_file), + name='data/labels/' + label_file.name) if label_file.exists() else None + table = wandb.Table(columns=["id", "train_image", "Classes", "name"]) + class_set = wandb.Classes([{'id': id, 'name': name} for id, name in class_to_id.items()]) + for si, (img, labels, paths, shapes) in enumerate(tqdm(dataset)): + box_data, img_classes = [], {} + for cls, *xywh in labels[:, 1:].tolist(): + cls = int(cls) + box_data.append({"position": {"middle": [xywh[0], xywh[1]], "width": xywh[2], "height": xywh[3]}, + "class_id": cls, + "box_caption": "%s" % (class_to_id[cls])}) + img_classes[cls] = class_to_id[cls] + boxes = {"ground_truth": {"box_data": box_data, "class_labels": class_to_id}} # inference-space + table.add_data(si, wandb.Image(paths, classes=class_set, boxes=boxes), list(img_classes.values()), + Path(paths).name) + artifact.add(table, name) + return artifact + + def log_training_progress(self, predn, path, names): + """ + Build evaluation Table. Uses reference from validation dataset table. + + arguments: + predn (list): list of predictions in the native space in the format - [xmin, ymin, xmax, ymax, confidence, class] + path (str): local path of the current evaluation image + names (dict(int, str)): hash map that maps class ids to labels + """ + class_set = wandb.Classes([{'id': id, 'name': name} for id, name in names.items()]) + box_data = [] + avg_conf_per_class = [0] * len(self.data_dict['names']) + pred_class_count = {} + for *xyxy, conf, cls in predn.tolist(): + if conf >= 0.25: + cls = int(cls) + box_data.append( + {"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, + "class_id": cls, + "box_caption": f"{names[cls]} {conf:.3f}", + "scores": {"class_score": conf}, + "domain": "pixel"}) + avg_conf_per_class[cls] += conf + + if cls in pred_class_count: + pred_class_count[cls] += 1 + else: + pred_class_count[cls] = 1 + + for pred_class in pred_class_count.keys(): + avg_conf_per_class[pred_class] = avg_conf_per_class[pred_class] / pred_class_count[pred_class] + + boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space + id = self.val_table_path_map[Path(path).name] + self.result_table.add_data(self.current_epoch, + id, + self.val_table.data[id][1], + wandb.Image(self.val_table.data[id][1], boxes=boxes, classes=class_set), + *avg_conf_per_class + ) + + def val_one_image(self, pred, predn, path, names, im): + """ + Log validation data for one image. updates the result Table if validation dataset is uploaded and log bbox media panel + + arguments: + pred (list): list of scaled predictions in the format - [xmin, ymin, xmax, ymax, confidence, class] + predn (list): list of predictions in the native space - [xmin, ymin, xmax, ymax, confidence, class] + path (str): local path of the current evaluation image + """ + if self.val_table and self.result_table: # Log Table if Val dataset is uploaded as artifact + self.log_training_progress(predn, path, names) + + if len(self.bbox_media_panel_images) < self.max_imgs_to_log and self.current_epoch > 0: + if self.current_epoch % self.bbox_interval == 0: + box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, + "class_id": int(cls), + "box_caption": f"{names[int(cls)]} {conf:.3f}", + "scores": {"class_score": conf}, + "domain": "pixel"} for *xyxy, conf, cls in pred.tolist()] + boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space + self.bbox_media_panel_images.append(wandb.Image(im, boxes=boxes, caption=path.name)) + + def log(self, log_dict): + """ + save the metrics to the logging dictionary + + arguments: + log_dict (Dict) -- metrics/media to be logged in current step + """ + if self.wandb_run: + for key, value in log_dict.items(): + self.log_dict[key] = value + + def end_epoch(self, best_result=False): + """ + commit the log_dict, model artifacts and Tables to W&B and flush the log_dict. + + arguments: + best_result (boolean): Boolean representing if the result of this evaluation is best or not + """ + if self.wandb_run: + with all_logging_disabled(): + if self.bbox_media_panel_images: + self.log_dict["BoundingBoxDebugger"] = self.bbox_media_panel_images + try: + wandb.log(self.log_dict) + except BaseException as e: + LOGGER.info( + f"An error occurred in wandb logger. The training will proceed without interruption. More info\n{e}") + self.wandb_run.finish() + self.wandb_run = None + + self.log_dict = {} + self.bbox_media_panel_images = [] + if self.result_artifact: + self.result_artifact.add(self.result_table, 'result') + wandb.log_artifact(self.result_artifact, aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), + ('best' if best_result else '')]) + + wandb.log({"evaluation": self.result_table}) + columns = ["epoch", "id", "ground truth", "prediction"] + columns.extend(self.data_dict['names']) + self.result_table = wandb.Table(columns) + self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") + + def finish_run(self): + """ + Log metrics if any and finish the current W&B run + """ + if self.wandb_run: + if self.log_dict: + with all_logging_disabled(): + wandb.log(self.log_dict) + wandb.run.finish() + + +@contextmanager +def all_logging_disabled(highest_level=logging.CRITICAL): + """ source - https://gist.github.com/simon-weber/7853144 + A context manager that will prevent any logging messages triggered during the body from being processed. + :param highest_level: the maximum logging level in use. + This would only need to be changed if a custom level greater than CRITICAL is defined. + """ + previous_level = logging.root.manager.disable + logging.disable(highest_level) + try: + yield + finally: + logging.disable(previous_level) diff --git a/detector/YOLOv5/utils/loss.py b/detector/YOLOv5/utils/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..e15bd756ddc042d1edcb0abf1a31d0974ca57c08 --- /dev/null +++ b/detector/YOLOv5/utils/loss.py @@ -0,0 +1,224 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Loss functions +""" + +import torch +import torch.nn as nn + +from detector.YOLOv5.utils.metrics import bbox_iou +from detector.YOLOv5.utils.torch_utils import de_parallel + + +def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441 + # return positive, negative label smoothing BCE targets + return 1.0 - 0.5 * eps, 0.5 * eps + + +class BCEBlurWithLogitsLoss(nn.Module): + # BCEwithLogitLoss() with reduced missing label effects. + def __init__(self, alpha=0.05): + super().__init__() + self.loss_fcn = nn.BCEWithLogitsLoss(reduction='none') # must be nn.BCEWithLogitsLoss() + self.alpha = alpha + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + pred = torch.sigmoid(pred) # prob from logits + dx = pred - true # reduce only missing label effects + # dx = (pred - true).abs() # reduce missing label and false label effects + alpha_factor = 1 - torch.exp((dx - 1) / (self.alpha + 1e-4)) + loss *= alpha_factor + return loss.mean() + + +class FocalLoss(nn.Module): + # Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) + def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): + super().__init__() + self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() + self.gamma = gamma + self.alpha = alpha + self.reduction = loss_fcn.reduction + self.loss_fcn.reduction = 'none' # required to apply FL to each element + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + # p_t = torch.exp(-loss) + # loss *= self.alpha * (1.000001 - p_t) ** self.gamma # non-zero power for gradient stability + + # TF implementation https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/losses/focal_loss.py + pred_prob = torch.sigmoid(pred) # prob from logits + p_t = true * pred_prob + (1 - true) * (1 - pred_prob) + alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha) + modulating_factor = (1.0 - p_t) ** self.gamma + loss *= alpha_factor * modulating_factor + + if self.reduction == 'mean': + return loss.mean() + elif self.reduction == 'sum': + return loss.sum() + else: # 'none' + return loss + + +class QFocalLoss(nn.Module): + # Wraps Quality focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) + def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): + super().__init__() + self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() + self.gamma = gamma + self.alpha = alpha + self.reduction = loss_fcn.reduction + self.loss_fcn.reduction = 'none' # required to apply FL to each element + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + + pred_prob = torch.sigmoid(pred) # prob from logits + alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha) + modulating_factor = torch.abs(true - pred_prob) ** self.gamma + loss *= alpha_factor * modulating_factor + + if self.reduction == 'mean': + return loss.mean() + elif self.reduction == 'sum': + return loss.sum() + else: # 'none' + return loss + + +class ComputeLoss: + # Compute losses + def __init__(self, model, autobalance=False): + self.sort_obj_iou = False + device = next(model.parameters()).device # get model device + h = model.hyp # hyperparameters + + # Define criteria + BCEcls = nn.BCEWithLogitsLoss(pos_weight=torch.tensor([h['cls_pw']], device=device)) + BCEobj = nn.BCEWithLogitsLoss(pos_weight=torch.tensor([h['obj_pw']], device=device)) + + # Class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3 + self.cp, self.cn = smooth_BCE(eps=h.get('label_smoothing', 0.0)) # positive, negative BCE targets + + # Focal loss + g = h['fl_gamma'] # focal loss gamma + if g > 0: + BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g) + + det = de_parallel(model).model[-1] # Detect() module + self.balance = {3: [4.0, 1.0, 0.4]}.get(det.nl, [4.0, 1.0, 0.25, 0.06, 0.02]) # P3-P7 + self.ssi = list(det.stride).index(16) if autobalance else 0 # stride 16 index + self.BCEcls, self.BCEobj, self.gr, self.hyp, self.autobalance = BCEcls, BCEobj, 1.0, h, autobalance + for k in 'na', 'nc', 'nl', 'anchors': + setattr(self, k, getattr(det, k)) + + def __call__(self, p, targets): # predictions, targets, model + device = targets.device + lcls, lbox, lobj = torch.zeros(1, device=device), torch.zeros(1, device=device), torch.zeros(1, device=device) + tcls, tbox, indices, anchors = self.build_targets(p, targets) # targets + + # Losses + for i, pi in enumerate(p): # layer index, layer predictions + b, a, gj, gi = indices[i] # image, anchor, gridy, gridx + tobj = torch.zeros_like(pi[..., 0], device=device) # target obj + + n = b.shape[0] # number of targets + if n: + ps = pi[b, a, gj, gi] # prediction subset corresponding to targets + + # Regression + pxy = ps[:, :2].sigmoid() * 2 - 0.5 + pwh = (ps[:, 2:4].sigmoid() * 2) ** 2 * anchors[i] + pbox = torch.cat((pxy, pwh), 1) # predicted box + iou = bbox_iou(pbox.T, tbox[i], x1y1x2y2=False, CIoU=True) # iou(prediction, target) + lbox += (1.0 - iou).mean() # iou loss + + # Objectness + score_iou = iou.detach().clamp(0).type(tobj.dtype) + if self.sort_obj_iou: + sort_id = torch.argsort(score_iou) + b, a, gj, gi, score_iou = b[sort_id], a[sort_id], gj[sort_id], gi[sort_id], score_iou[sort_id] + tobj[b, a, gj, gi] = (1.0 - self.gr) + self.gr * score_iou # iou ratio + + # Classification + if self.nc > 1: # cls loss (only if multiple classes) + t = torch.full_like(ps[:, 5:], self.cn, device=device) # targets + t[range(n), tcls[i]] = self.cp + lcls += self.BCEcls(ps[:, 5:], t) # BCE + + # Append targets to text file + # with open('targets.txt', 'a') as file: + # [file.write('%11.5g ' * 4 % tuple(x) + '\n') for x in torch.cat((txy[i], twh[i]), 1)] + + obji = self.BCEobj(pi[..., 4], tobj) + lobj += obji * self.balance[i] # obj loss + if self.autobalance: + self.balance[i] = self.balance[i] * 0.9999 + 0.0001 / obji.detach().item() + + if self.autobalance: + self.balance = [x / self.balance[self.ssi] for x in self.balance] + lbox *= self.hyp['box'] + lobj *= self.hyp['obj'] + lcls *= self.hyp['cls'] + bs = tobj.shape[0] # batch size + + # https://github.com/ultralytics/yolov5/issues/11147 + # 整个batch的总损失 + return (lbox + lobj + lcls) * bs, torch.cat((lbox, lobj, lcls)).detach() + + def build_targets(self, p, targets): + # Build targets for compute_loss(), input targets(image,class,x,y,w,h) + na, nt = self.na, targets.shape[0] # number of anchors, targets + tcls, tbox, indices, anch = [], [], [], [] + gain = torch.ones(7, device=targets.device).long() # normalized to gridspace gain + ai = torch.arange(na, device=targets.device).float().view(na, 1).repeat(1, nt) # same as .repeat_interleave(nt) + targets = torch.cat((targets.repeat(na, 1, 1), ai[:, :, None]), 2) # append anchor indices + + g = 0.5 # bias + off = torch.tensor([[0, 0], + [1, 0], [0, 1], [-1, 0], [0, -1], # j,k,l,m + # [1, 1], [1, -1], [-1, 1], [-1, -1], # jk,jm,lk,lm + ], device=targets.device).float() * g # offsets + + for i in range(self.nl): + anchors = self.anchors[i] + gain[2:6] = torch.tensor(p[i].shape)[[3, 2, 3, 2]] # xyxy gain + + # Match targets to anchors + t = targets * gain + if nt: + # Matches + r = t[:, :, 4:6] / anchors[:, None] # wh ratio + j = torch.max(r, 1 / r).max(2)[0] < self.hyp['anchor_t'] # compare + # j = wh_iou(anchors, t[:, 4:6]) > model.hyp['iou_t'] # iou(3,n)=wh_iou(anchors(3,2), gwh(n,2)) + t = t[j] # filter + + # Offsets + gxy = t[:, 2:4] # grid xy + gxi = gain[[2, 3]] - gxy # inverse + j, k = ((gxy % 1 < g) & (gxy > 1)).T + l, m = ((gxi % 1 < g) & (gxi > 1)).T + j = torch.stack((torch.ones_like(j), j, k, l, m)) + t = t.repeat((5, 1, 1))[j] + offsets = (torch.zeros_like(gxy)[None] + off[:, None])[j] + else: + t = targets[0] + offsets = 0 + + # Define + b, c = t[:, :2].long().T # image, class + gxy = t[:, 2:4] # grid xy + gwh = t[:, 4:6] # grid wh + gij = (gxy - offsets).long() + gi, gj = gij.T # grid xy indices + + # Append + a = t[:, 6].long() # anchor indices + indices.append((b, a, gj.clamp_(0, gain[3] - 1), gi.clamp_(0, gain[2] - 1))) # image, anchor, grid indices + tbox.append(torch.cat((gxy - gij, gwh), 1)) # box + anch.append(anchors[a]) # anchors + tcls.append(c) # class + + return tcls, tbox, indices, anch diff --git a/detector/YOLOv5/utils/metrics.py b/detector/YOLOv5/utils/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..b996c5713fc31bff20f18cce4162bfefd8946c7f --- /dev/null +++ b/detector/YOLOv5/utils/metrics.py @@ -0,0 +1,342 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Model validation metrics +""" + +import math +import warnings +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +import torch + + +def fitness(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.1, 0.9] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16): + """ Compute the average precision, given the recall and precision curves. + Source: https://github.com/rafaelpadilla/Object-Detection-Metrics. + # Arguments + tp: True positives (nparray, nx1 or nx10). + conf: Objectness value from 0-1 (nparray). + pred_cls: Predicted object classes (nparray). + target_cls: True object classes (nparray). + plot: Plot precision-recall curve at mAP@0.5 + save_dir: Plot save directory + # Returns + The average precision as computed in py-faster-rcnn. + """ + + # Sort by objectness + i = np.argsort(-conf) + tp, conf, pred_cls = tp[i], conf[i], pred_cls[i] + + # Find unique classes + unique_classes, nt = np.unique(target_cls, return_counts=True) + nc = unique_classes.shape[0] # number of classes, number of detections + + # Create Precision-Recall curve and compute AP for each class + px, py = np.linspace(0, 1, 1000), [] # for plotting + ap, p, r = np.zeros((nc, tp.shape[1])), np.zeros((nc, 1000)), np.zeros((nc, 1000)) + for ci, c in enumerate(unique_classes): + i = pred_cls == c + n_l = nt[ci] # number of labels + n_p = i.sum() # number of predictions + + if n_p == 0 or n_l == 0: + continue + else: + # Accumulate FPs and TPs + fpc = (1 - tp[i]).cumsum(0) + tpc = tp[i].cumsum(0) + + # Recall + recall = tpc / (n_l + eps) # recall curve + r[ci] = np.interp(-px, -conf[i], recall[:, 0], left=0) # negative x, xp because xp decreases + + # Precision + precision = tpc / (tpc + fpc) # precision curve + p[ci] = np.interp(-px, -conf[i], precision[:, 0], left=1) # p at pr_score + + # AP from recall-precision curve + for j in range(tp.shape[1]): + ap[ci, j], mpre, mrec = compute_ap(recall[:, j], precision[:, j]) + if plot and j == 0: + py.append(np.interp(px, mrec, mpre)) # precision at mAP@0.5 + + # Compute F1 (harmonic mean of precision and recall) + f1 = 2 * p * r / (p + r + eps) + names = [v for k, v in names.items() if k in unique_classes] # list: only classes that have data + names = {i: v for i, v in enumerate(names)} # to dict + if plot: + plot_pr_curve(px, py, ap, Path(save_dir) / 'PR_curve.png', names) + plot_mc_curve(px, f1, Path(save_dir) / 'F1_curve.png', names, ylabel='F1') + plot_mc_curve(px, p, Path(save_dir) / 'P_curve.png', names, ylabel='Precision') + plot_mc_curve(px, r, Path(save_dir) / 'R_curve.png', names, ylabel='Recall') + + i = f1.mean(0).argmax() # max F1 index + p, r, f1 = p[:, i], r[:, i], f1[:, i] + tp = (r * nt).round() # true positives + fp = (tp / (p + eps) - tp).round() # false positives + return tp, fp, p, r, f1, ap, unique_classes.astype('int32') + + +def compute_ap(recall, precision): + """ Compute the average precision, given the recall and precision curves + # Arguments + recall: The recall curve (list) + precision: The precision curve (list) + # Returns + Average precision, precision curve, recall curve + """ + + # Append sentinel values to beginning and end + mrec = np.concatenate(([0.0], recall, [1.0])) + mpre = np.concatenate(([1.0], precision, [0.0])) + + # Compute the precision envelope(计算精度包络) + mpre = np.flip(np.maximum.accumulate(np.flip(mpre))) + + # Integrate area under curve + method = 'interp' # methods: 'continuous', 'interp' + if method == 'interp': + x = np.linspace(0, 1, 101) # 101-point interp (COCO) + ap = np.trapz(np.interp(x, mrec, mpre), x) # integrate + else: # 'continuous' + i = np.where(mrec[1:] != mrec[:-1])[0] # points where x axis (recall) changes + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) # area under curve + + return ap, mpre, mrec + + +class ConfusionMatrix: + # Updated version of https://github.com/kaanakan/object_detection_confusion_matrix + def __init__(self, nc, conf=0.25, iou_thres=0.45): + self.matrix = np.zeros((nc + 1, nc + 1)) + self.nc = nc # number of classes + self.conf = conf + self.iou_thres = iou_thres + + def process_batch(self, detections, labels): + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + detections (Array[N, 6]), x1, y1, x2, y2, conf, class + labels (Array[M, 5]), class, x1, y1, x2, y2 + Returns: + None, updates confusion matrix accordingly + """ + detections = detections[detections[:, 4] > self.conf] + gt_classes = labels[:, 0].int() + detection_classes = detections[:, 5].int() + iou = box_iou(labels[:, 1:], detections[:, :4]) + + x = torch.where(iou > self.iou_thres) + if x[0].shape[0]: + matches = torch.cat((torch.stack(x, 1), iou[x[0], x[1]][:, None]), 1).cpu().numpy() + if x[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + else: + matches = np.zeros((0, 3)) + + n = matches.shape[0] > 0 + m0, m1, _ = matches.transpose().astype(np.int16) + for i, gc in enumerate(gt_classes): + j = m0 == i + if n and sum(j) == 1: + self.matrix[detection_classes[m1[j]], gc] += 1 # correct + else: + self.matrix[self.nc, gc] += 1 # background FP + + if n: + for i, dc in enumerate(detection_classes): + if not any(m1 == i): + self.matrix[dc, self.nc] += 1 # background FN + + def matrix(self): + return self.matrix + + def tp_fp(self): + tp = self.matrix.diagonal() # true positives + fp = self.matrix.sum(1) - tp # false positives + # fn = self.matrix.sum(0) - tp # false negatives (missed detections) + return tp[:-1], fp[:-1] # remove background class + + def plot(self, normalize=True, save_dir='', names=()): + try: + import seaborn as sn + + array = self.matrix / ((self.matrix.sum(0).reshape(1, -1) + 1E-9) if normalize else 1) # normalize columns + array[array < 0.005] = np.nan # don't annotate (would appear as 0.00) + + fig = plt.figure(figsize=(12, 9), tight_layout=True) + nc, nn = self.nc, len(names) # number of classes, names + sn.set(font_scale=1.0 if nc < 50 else 0.8) # for label size + labels = (0 < nn < 99) and (nn == nc) # apply names to ticklabels + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress empty matrix RuntimeWarning: All-NaN slice encountered + sn.heatmap(array, annot=nc < 30, annot_kws={"size": 8}, cmap='Blues', fmt='.2f', square=True, vmin=0.0, + xticklabels=names + ['background FP'] if labels else "auto", + yticklabels=names + ['background FN'] if labels else "auto").set_facecolor((1, 1, 1)) + fig.axes[0].set_xlabel('True') + fig.axes[0].set_ylabel('Predicted') + fig.savefig(Path(save_dir) / 'confusion_matrix.png', dpi=250) + plt.close() + except Exception as e: + print(f'WARNING: ConfusionMatrix plot failure: {e}') + + def print(self): + for i in range(self.nc + 1): + print(' '.join(map(str, self.matrix[i]))) + + +def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False, eps=1e-7): + # Returns the IoU of box1 to box2. box1 is 4, box2 is nx4 + box2 = box2.T + + # Get the coordinates of bounding boxes + if x1y1x2y2: # x1, y1, x2, y2 = box1 + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + else: # transform from xywh to xyxy + b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2 + b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2 + b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2 + b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2 + + # Intersection area + inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \ + (torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0) + + # Union Area + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + union = w1 * h1 + w2 * h2 - inter + eps + + iou = inter / union + if CIoU or DIoU or GIoU: + cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width + ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height + if CIoU or DIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1 + c2 = cw ** 2 + ch ** 2 + eps # convex diagonal squared + rho2 = ((b2_x1 + b2_x2 - b1_x1 - b1_x2) ** 2 + + (b2_y1 + b2_y2 - b1_y1 - b1_y2) ** 2) / 4 # center distance squared + if CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47 + v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + with torch.no_grad(): + alpha = v / (v - iou + (1 + eps)) + return iou - (rho2 / c2 + v * alpha) # CIoU + return iou - rho2 / c2 # DIoU + c_area = cw * ch + eps # convex area + return iou - (c_area - union) / c_area # GIoU https://arxiv.org/pdf/1902.09630.pdf + return iou # IoU + + +def box_iou(box1, box2): + # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + box1 (Tensor[N, 4]) + box2 (Tensor[M, 4]) + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + + def box_area(box): + # box = 4xn + return (box[2] - box[0]) * (box[3] - box[1]) + + area1 = box_area(box1.T) + area2 = box_area(box2.T) + + # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) + inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) + return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) + + +def bbox_ioa(box1, box2, eps=1E-7): + """ Returns the intersection over box2 area given box1, box2. Boxes are x1y1x2y2 + box1: np.array of shape(4) + box2: np.array of shape(nx4) + returns: np.array of shape(n) + """ + + box2 = box2.transpose() + + # Get the coordinates of bounding boxes + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + + # Intersection area + inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(0) * \ + (np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(0) + + # box2 area + box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + eps + + # Intersection over box2 area + return inter_area / box2_area + + +def wh_iou(wh1, wh2): + # Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2 + wh1 = wh1[:, None] # [N,1,2] + wh2 = wh2[None] # [1,M,2] + inter = torch.min(wh1, wh2).prod(2) # [N,M] + return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter) + + +# Plots ---------------------------------------------------------------------------------------------------------------- + +def plot_pr_curve(px, py, ap, save_dir='pr_curve.png', names=()): + # Precision-recall curve + fig, ax = plt.subplots(1, 1, figsize=(9, 6), tight_layout=True) + py = np.stack(py, axis=1) + + if 0 < len(names) < 21: # display per-class legend if < 21 classes + for i, y in enumerate(py.T): + ax.plot(px, y, linewidth=1, label=f'{names[i]} {ap[i, 0]:.3f}') # plot(recall, precision) + else: + ax.plot(px, py, linewidth=1, color='grey') # plot(recall, precision) + + ax.plot(px, py.mean(1), linewidth=3, color='blue', label='all classes %.3f mAP@0.5' % ap[:, 0].mean()) + ax.set_xlabel('Recall') + ax.set_ylabel('Precision') + ax.set_xlim(0, 1) + ax.set_ylim(0, 1) + plt.legend(bbox_to_anchor=(1.04, 1), loc="upper left") + fig.savefig(Path(save_dir), dpi=250) + plt.close() + + +def plot_mc_curve(px, py, save_dir='mc_curve.png', names=(), xlabel='Confidence', ylabel='Metric'): + # Metric-confidence curve + fig, ax = plt.subplots(1, 1, figsize=(9, 6), tight_layout=True) + + if 0 < len(names) < 21: # display per-class legend if < 21 classes + for i, y in enumerate(py): + ax.plot(px, y, linewidth=1, label=f'{names[i]}') # plot(confidence, metric) + else: + ax.plot(px, py.T, linewidth=1, color='grey') # plot(confidence, metric) + + y = py.mean(0) + ax.plot(px, y, linewidth=3, color='blue', label=f'all classes {y.max():.2f} at {px[y.argmax()]:.3f}') + ax.set_xlabel(xlabel) + ax.set_ylabel(ylabel) + ax.set_xlim(0, 1) + ax.set_ylim(0, 1) + plt.legend(bbox_to_anchor=(1.04, 1), loc="upper left") + fig.savefig(Path(save_dir), dpi=250) + plt.close() diff --git a/detector/YOLOv5/utils/plots.py b/detector/YOLOv5/utils/plots.py new file mode 100644 index 0000000000000000000000000000000000000000..ae4844e2ef10161ae609a939c99c38605eed366d --- /dev/null +++ b/detector/YOLOv5/utils/plots.py @@ -0,0 +1,471 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Plotting utils +""" + +import math +import os +from copy import copy +from pathlib import Path + +import cv2 +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sn +import torch +from PIL import Image, ImageDraw, ImageFont + +from detector.YOLOv5.utils.general import (CONFIG_DIR, FONT, LOGGER, Timeout, check_font, check_requirements, clip_coords, + increment_path, is_ascii, is_chinese, try_except, xywh2xyxy, xyxy2xywh) +from detector.YOLOv5.utils.metrics import fitness + +# Settings +RANK = int(os.getenv('RANK', -1)) +matplotlib.rc('font', **{'size': 11}) +matplotlib.use('Agg') # for writing to files only + + +class Colors: + # Ultralytics color palette https://ultralytics.com/ + def __init__(self): + # hex = matplotlib.colors.TABLEAU_COLORS.values() + hex = ('FF3838', 'FF9D97', 'FF701F', 'FFB21D', 'CFD231', '48F90A', '92CC17', '3DDB86', '1A9334', '00D4BB', + '2C99A8', '00C2FF', '344593', '6473FF', '0018EC', '8438FF', '520085', 'CB38FF', 'FF95C8', 'FF37C7') + self.palette = [self.hex2rgb('#' + c) for c in hex] + self.n = len(self.palette) + + def __call__(self, i, bgr=False): + c = self.palette[int(i) % self.n] + return (c[2], c[1], c[0]) if bgr else c + + @staticmethod + def hex2rgb(h): # rgb order (PIL) + return tuple(int(h[1 + i:1 + i + 2], 16) for i in (0, 2, 4)) + + +colors = Colors() # create instance for 'from yolov5_utils.plots import colors' + + +def check_pil_font(font=FONT, size=10): + # Return a PIL TrueType Font, downloading to CONFIG_DIR if necessary + font = Path(font) + font = font if font.exists() else (CONFIG_DIR / font.name) + try: + return ImageFont.truetype(str(font) if font.exists() else font.name, size) + except Exception: # download if missing + check_font(font) + try: + return ImageFont.truetype(str(font), size) + except TypeError: + check_requirements('Pillow>=8.4.0') # known issue https://github.com/ultralytics/yolov5/issues/5374 + + +class Annotator: + if RANK in (-1, 0): + check_pil_font() # download TTF if necessary + + # YOLOv5 Annotator for train/val mosaics and jpgs and detect/hub inference annotations + def __init__(self, im, line_width=None, font_size=None, font='Arial.ttf', pil=False, example='abc'): + assert im.data.contiguous, 'Image not contiguous. Apply np.ascontiguousarray(im) to Annotator() input images.' + self.pil = pil or not is_ascii(example) or is_chinese(example) + if self.pil: # use PIL + self.im = im if isinstance(im, Image.Image) else Image.fromarray(im) + self.draw = ImageDraw.Draw(self.im) + self.font = check_pil_font(font='Arial.Unicode.ttf' if is_chinese(example) else font, + size=font_size or max(round(sum(self.im.size) / 2 * 0.035), 12)) + else: # use cv2 + self.im = im + self.lw = line_width or max(round(sum(im.shape) / 2 * 0.003), 2) # line width + + def box_label(self, box, label='', color=(128, 128, 128), txt_color=(255, 255, 255)): + # Add one xyxy box to image with label + if self.pil or not is_ascii(label): + self.draw.rectangle(box, width=self.lw, outline=color) # box + if label: + w, h = self.font.getsize(label) # text width, height + outside = box[1] - h >= 0 # label fits outside box + self.draw.rectangle((box[0], + box[1] - h if outside else box[1], + box[0] + w + 1, + box[1] + 1 if outside else box[1] + h + 1), fill=color) + # self.draw.text((box[0], box[1]), label, fill=txt_color, font=self.font, anchor='ls') # for PIL>8.0 + self.draw.text((box[0], box[1] - h if outside else box[1]), label, fill=txt_color, font=self.font) + else: # cv2 + p1, p2 = (int(box[0]), int(box[1])), (int(box[2]), int(box[3])) + cv2.rectangle(self.im, p1, p2, color, thickness=self.lw, lineType=cv2.LINE_AA) + if label: + tf = max(self.lw - 1, 1) # font thickness + w, h = cv2.getTextSize(label, 0, fontScale=self.lw / 3, thickness=tf)[0] # text width, height + outside = p1[1] - h - 3 >= 0 # label fits outside box + p2 = p1[0] + w, p1[1] - h - 3 if outside else p1[1] + h + 3 + cv2.rectangle(self.im, p1, p2, color, -1, cv2.LINE_AA) # filled + cv2.putText(self.im, label, (p1[0], p1[1] - 2 if outside else p1[1] + h + 2), 0, self.lw / 3, txt_color, + thickness=tf, lineType=cv2.LINE_AA) + + def rectangle(self, xy, fill=None, outline=None, width=1): + # Add rectangle to image (PIL-only) + self.draw.rectangle(xy, fill, outline, width) + + def text(self, xy, text, txt_color=(255, 255, 255)): + # Add text to image (PIL-only) + w, h = self.font.getsize(text) # text width, height + self.draw.text((xy[0], xy[1] - h + 1), text, fill=txt_color, font=self.font) + + def result(self): + # Return annotated image as array + return np.asarray(self.im) + + +def feature_visualization(x, module_type, stage, n=32, save_dir=Path('runs/detect/exp')): + """ + x: Features to be visualized + module_type: Module type + stage: Module stage within model + n: Maximum number of feature maps to plot + save_dir: Directory to save results + """ + if 'Detect' not in module_type: + batch, channels, height, width = x.shape # batch, channels, height, width + if height > 1 and width > 1: + f = save_dir / f"stage{stage}_{module_type.split('.')[-1]}_features.png" # filename + + blocks = torch.chunk(x[0].cpu(), channels, dim=0) # select batch index 0, block by channels + n = min(n, channels) # number of plots + fig, ax = plt.subplots(math.ceil(n / 8), 8, tight_layout=True) # 8 rows x n/8 cols + ax = ax.ravel() + plt.subplots_adjust(wspace=0.05, hspace=0.05) + for i in range(n): + ax[i].imshow(blocks[i].squeeze()) # cmap='gray' + ax[i].axis('off') + + LOGGER.info(f'Saving {f}... ({n}/{channels})') + plt.savefig(f, dpi=300, bbox_inches='tight') + plt.close() + np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save + + +def hist2d(x, y, n=100): + # 2d histogram used in labels.png and evolve.png + xedges, yedges = np.linspace(x.min(), x.max(), n), np.linspace(y.min(), y.max(), n) + hist, xedges, yedges = np.histogram2d(x, y, (xedges, yedges)) + xidx = np.clip(np.digitize(x, xedges) - 1, 0, hist.shape[0] - 1) + yidx = np.clip(np.digitize(y, yedges) - 1, 0, hist.shape[1] - 1) + return np.log(hist[xidx, yidx]) + + +def butter_lowpass_filtfilt(data, cutoff=1500, fs=50000, order=5): + from scipy.signal import butter, filtfilt + + # https://stackoverflow.com/questions/28536191/how-to-filter-smooth-with-scipy-numpy + def butter_lowpass(cutoff, fs, order): + nyq = 0.5 * fs + normal_cutoff = cutoff / nyq + return butter(order, normal_cutoff, btype='low', analog=False) + + b, a = butter_lowpass(cutoff, fs, order=order) + return filtfilt(b, a, data) # forward-backward filter + + +def output_to_target(output): + # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] + targets = [] + for i, o in enumerate(output): + for *box, conf, cls in o.cpu().numpy(): + targets.append([i, cls, *list(*xyxy2xywh(np.array(box)[None])), conf]) + return np.array(targets) + + +def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=1920, max_subplots=16): + # Plot image grid with labels + if isinstance(images, torch.Tensor): + images = images.cpu().float().numpy() + if isinstance(targets, torch.Tensor): + targets = targets.cpu().numpy() + if np.max(images[0]) <= 1: + images *= 255 # de-normalise (optional) + bs, _, h, w = images.shape # batch size, _, height, width + bs = min(bs, max_subplots) # limit plot images + ns = np.ceil(bs ** 0.5) # number of subplots (square) + + # Build Image + mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init + for i, im in enumerate(images): + if i == max_subplots: # if last batch has fewer images than we expect + break + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + im = im.transpose(1, 2, 0) + mosaic[y:y + h, x:x + w, :] = im + + # Resize (optional) + scale = max_size / ns / max(h, w) + if scale < 1: + h = math.ceil(scale * h) + w = math.ceil(scale * w) + mosaic = cv2.resize(mosaic, tuple(int(x * ns) for x in (w, h))) + + # Annotate + fs = int((h + w) * ns * 0.01) # font size + annotator = Annotator(mosaic, line_width=round(fs / 10), font_size=fs, pil=True, example=names) + for i in range(i + 1): + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + annotator.rectangle([x, y, x + w, y + h], None, (255, 255, 255), width=2) # borders + if paths: + annotator.text((x + 5, y + 5 + h), text=Path(paths[i]).name[:40], txt_color=(220, 220, 220)) # filenames + if len(targets) > 0: + ti = targets[targets[:, 0] == i] # image targets + boxes = xywh2xyxy(ti[:, 2:6]).T + classes = ti[:, 1].astype('int') + labels = ti.shape[1] == 6 # labels if no conf column + conf = None if labels else ti[:, 6] # check for confidence presence (label vs pred) + + if boxes.shape[1]: + if boxes.max() <= 1.01: # if normalized with tolerance 0.01 + boxes[[0, 2]] *= w # scale to pixels + boxes[[1, 3]] *= h + elif scale < 1: # absolute coords need scale if image scales + boxes *= scale + boxes[[0, 2]] += x + boxes[[1, 3]] += y + for j, box in enumerate(boxes.T.tolist()): + cls = classes[j] + color = colors(cls) + cls = names[cls] if names else cls + if labels or conf[j] > 0.25: # 0.25 conf thresh + label = f'{cls}' if labels else f'{cls} {conf[j]:.1f}' + annotator.box_label(box, label, color=color) + annotator.im.save(fname) # save + + +def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=''): + # Plot LR simulating training for full epochs + optimizer, scheduler = copy(optimizer), copy(scheduler) # do not modify originals + y = [] + for _ in range(epochs): + scheduler.step() + y.append(optimizer.param_groups[0]['lr']) + plt.plot(y, '.-', label='LR') + plt.xlabel('epoch') + plt.ylabel('LR') + plt.grid() + plt.xlim(0, epochs) + plt.ylim(0) + plt.savefig(Path(save_dir) / 'LR.png', dpi=200) + plt.close() + + +def plot_val_txt(): # from utils.plots import *; plot_val() + # Plot val.txt histograms + x = np.loadtxt('val.txt', dtype=np.float32) + box = xyxy2xywh(x[:, :4]) + cx, cy = box[:, 0], box[:, 1] + + fig, ax = plt.subplots(1, 1, figsize=(6, 6), tight_layout=True) + ax.hist2d(cx, cy, bins=600, cmax=10, cmin=0) + ax.set_aspect('equal') + plt.savefig('hist2d.png', dpi=300) + + fig, ax = plt.subplots(1, 2, figsize=(12, 6), tight_layout=True) + ax[0].hist(cx, bins=600) + ax[1].hist(cy, bins=600) + plt.savefig('hist1d.png', dpi=200) + + +def plot_targets_txt(): # from utils.plots import *; plot_targets_txt() + # Plot targets.txt histograms + x = np.loadtxt('targets.txt', dtype=np.float32).T + s = ['x targets', 'y targets', 'width targets', 'height targets'] + fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) + ax = ax.ravel() + for i in range(4): + ax[i].hist(x[i], bins=100, label=f'{x[i].mean():.3g} +/- {x[i].std():.3g}') + ax[i].legend() + ax[i].set_title(s[i]) + plt.savefig('targets.jpg', dpi=200) + + +def plot_val_study(file='', dir='', x=None): # from utils.plots import *; plot_val_study() + # Plot file=study.txt generated by val.py (or plot all study*.txt in dir) + save_dir = Path(file).parent if file else Path(dir) + plot2 = False # plot additional results + if plot2: + ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True)[1].ravel() + + fig2, ax2 = plt.subplots(1, 1, figsize=(8, 4), tight_layout=True) + # for f in [save_dir / f'study_coco_{x}.txt' for x in ['yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'yolov5x6']]: + for f in sorted(save_dir.glob('study*.txt')): + y = np.loadtxt(f, dtype=np.float32, usecols=[0, 1, 2, 3, 7, 8, 9], ndmin=2).T + x = np.arange(y.shape[1]) if x is None else np.array(x) + if plot2: + s = ['P', 'R', 'mAP@.5', 'mAP@.5:.95', 't_preprocess (ms/img)', 't_inference (ms/img)', 't_NMS (ms/img)'] + for i in range(7): + ax[i].plot(x, y[i], '.-', linewidth=2, markersize=8) + ax[i].set_title(s[i]) + + j = y[3].argmax() + 1 + ax2.plot(y[5, 1:j], y[3, 1:j] * 1E2, '.-', linewidth=2, markersize=8, + label=f.stem.replace('study_coco_', '').replace('yolo', 'YOLO')) + + ax2.plot(1E3 / np.array([209, 140, 97, 58, 35, 18]), [34.6, 40.5, 43.0, 47.5, 49.7, 51.5], + 'k.-', linewidth=2, markersize=8, alpha=.25, label='EfficientDet') + + ax2.grid(alpha=0.2) + ax2.set_yticks(np.arange(20, 60, 5)) + ax2.set_xlim(0, 57) + ax2.set_ylim(25, 55) + ax2.set_xlabel('GPU Speed (ms/img)') + ax2.set_ylabel('COCO AP val') + ax2.legend(loc='lower right') + f = save_dir / 'study.png' + print(f'Saving {f}...') + plt.savefig(f, dpi=300) + + +@try_except # known issue https://github.com/ultralytics/yolov5/issues/5395 +@Timeout(30) # known issue https://github.com/ultralytics/yolov5/issues/5611 +def plot_labels(labels, names=(), save_dir=Path('')): + # plot dataset labels + LOGGER.info(f"Plotting labels to {save_dir / 'labels.jpg'}... ") + c, b = labels[:, 0], labels[:, 1:].transpose() # classes, boxes + nc = int(c.max() + 1) # number of classes + x = pd.DataFrame(b.transpose(), columns=['x', 'y', 'width', 'height']) + + # seaborn correlogram + sn.pairplot(x, corner=True, diag_kind='auto', kind='hist', diag_kws=dict(bins=50), plot_kws=dict(pmax=0.9)) + plt.savefig(save_dir / 'labels_correlogram.jpg', dpi=200) + plt.close() + + # matplotlib labels + matplotlib.use('svg') # faster + ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)[1].ravel() + y = ax[0].hist(c, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8) + try: # color histogram bars by class + [y[2].patches[i].set_color([x / 255 for x in colors(i)]) for i in range(nc)] # known issue #3195 + except Exception: + pass + ax[0].set_ylabel('instances') + if 0 < len(names) < 30: + ax[0].set_xticks(range(len(names))) + ax[0].set_xticklabels(names, rotation=90, fontsize=10) + else: + ax[0].set_xlabel('classes') + sn.histplot(x, x='x', y='y', ax=ax[2], bins=50, pmax=0.9) + sn.histplot(x, x='width', y='height', ax=ax[3], bins=50, pmax=0.9) + + # rectangles + labels[:, 1:3] = 0.5 # center + labels[:, 1:] = xywh2xyxy(labels[:, 1:]) * 2000 + img = Image.fromarray(np.ones((2000, 2000, 3), dtype=np.uint8) * 255) + for cls, *box in labels[:1000]: + ImageDraw.Draw(img).rectangle(box, width=1, outline=colors(cls)) # plot + ax[1].imshow(img) + ax[1].axis('off') + + for a in [0, 1, 2, 3]: + for s in ['top', 'right', 'left', 'bottom']: + ax[a].spines[s].set_visible(False) + + plt.savefig(save_dir / 'labels.jpg', dpi=200) + matplotlib.use('Agg') + plt.close() + + +def plot_evolve(evolve_csv='path/to/evolve.csv'): # from utils.plots import *; plot_evolve() + # Plot evolve.csv hyp evolution results + evolve_csv = Path(evolve_csv) + data = pd.read_csv(evolve_csv) + keys = [x.strip() for x in data.columns] + x = data.values + f = fitness(x) + j = np.argmax(f) # max fitness index + plt.figure(figsize=(10, 12), tight_layout=True) + matplotlib.rc('font', **{'size': 8}) + print(f'Best results from row {j} of {evolve_csv}:') + for i, k in enumerate(keys[7:]): + v = x[:, 7 + i] + mu = v[j] # best single result + plt.subplot(6, 5, i + 1) + plt.scatter(v, f, c=hist2d(v, f, 20), cmap='viridis', alpha=.8, edgecolors='none') + plt.plot(mu, f.max(), 'k+', markersize=15) + plt.title(f'{k} = {mu:.3g}', fontdict={'size': 9}) # limit to 40 characters + if i % 5 != 0: + plt.yticks([]) + print(f'{k:>15}: {mu:.3g}') + f = evolve_csv.with_suffix('.png') # filename + plt.savefig(f, dpi=200) + plt.close() + print(f'Saved {f}') + + +def plot_results(file='path/to/results.csv', dir=''): + # Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv') + save_dir = Path(file).parent if file else Path(dir) + fig, ax = plt.subplots(2, 5, figsize=(12, 6), tight_layout=True) + ax = ax.ravel() + files = list(save_dir.glob('results*.csv')) + assert len(files), f'No results.csv files found in {save_dir.resolve()}, nothing to plot.' + for fi, f in enumerate(files): + try: + data = pd.read_csv(f) + s = [x.strip() for x in data.columns] + x = data.values[:, 0] + for i, j in enumerate([1, 2, 3, 4, 5, 8, 9, 10, 6, 7]): + y = data.values[:, j] + # y[y == 0] = np.nan # don't show zero values + ax[i].plot(x, y, marker='.', label=f.stem, linewidth=2, markersize=8) + ax[i].set_title(s[j], fontsize=12) + # if j in [8, 9, 10]: # share train and val loss y axes + # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) + except Exception as e: + LOGGER.info(f'Warning: Plotting error for {f}: {e}') + ax[1].legend() + fig.savefig(save_dir / 'results.png', dpi=200) + plt.close() + + +def profile_idetection(start=0, stop=0, labels=(), save_dir=''): + # Plot iDetection '*.txt' per-image logs. from utils.plots import *; profile_idetection() + ax = plt.subplots(2, 4, figsize=(12, 6), tight_layout=True)[1].ravel() + s = ['Images', 'Free Storage (GB)', 'RAM Usage (GB)', 'Battery', 'dt_raw (ms)', 'dt_smooth (ms)', 'real-world FPS'] + files = list(Path(save_dir).glob('frames*.txt')) + for fi, f in enumerate(files): + try: + results = np.loadtxt(f, ndmin=2).T[:, 90:-30] # clip first and last rows + n = results.shape[1] # number of rows + x = np.arange(start, min(stop, n) if stop else n) + results = results[:, x] + t = (results[0] - results[0].min()) # set t0=0s + results[0] = x + for i, a in enumerate(ax): + if i < len(results): + label = labels[fi] if len(labels) else f.stem.replace('frames_', '') + a.plot(t, results[i], marker='.', label=label, linewidth=1, markersize=5) + a.set_title(s[i]) + a.set_xlabel('time (s)') + # if fi == len(files) - 1: + # a.set_ylim(bottom=0) + for side in ['top', 'right']: + a.spines[side].set_visible(False) + else: + a.remove() + except Exception as e: + print(f'Warning: Plotting error for {f}; {e}') + ax[1].legend() + plt.savefig(Path(save_dir) / 'idetection_profile.png', dpi=200) + + +def save_one_box(xyxy, im, file='image.jpg', gain=1.02, pad=10, square=False, BGR=False, save=True): + # Save image crop as {file} with crop size multiple {gain} and {pad} pixels. Save and/or return crop + xyxy = torch.tensor(xyxy).view(-1, 4) + b = xyxy2xywh(xyxy) # boxes + if square: + b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square + b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad + xyxy = xywh2xyxy(b).long() + clip_coords(xyxy, im.shape) + crop = im[int(xyxy[0, 1]):int(xyxy[0, 3]), int(xyxy[0, 0]):int(xyxy[0, 2]), ::(1 if BGR else -1)] + if save: + file.parent.mkdir(parents=True, exist_ok=True) # make directory + cv2.imwrite(str(increment_path(file).with_suffix('.jpg')), crop) + return crop diff --git a/detector/YOLOv5/utils/torch_utils.py b/detector/YOLOv5/utils/torch_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3bb6f556d2ce25e346e36922c737e1f46375602b --- /dev/null +++ b/detector/YOLOv5/utils/torch_utils.py @@ -0,0 +1,329 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +PyTorch utils +""" + +import datetime +import math +import os +import platform +import subprocess +import time +import warnings +from contextlib import contextmanager +from copy import deepcopy +from pathlib import Path + +import torch +import torch.distributed as dist +import torch.nn as nn +import torch.nn.functional as F + +from detector.YOLOv5.utils.general import LOGGER + +try: + import thop # for FLOPs computation +except ImportError: + thop = None + +# Suppress PyTorch warnings +warnings.filterwarnings('ignore', message='User provided device_type of \'cuda\', but CUDA is not available. Disabling') + + +@contextmanager +def torch_distributed_zero_first(local_rank: int): + """ + Decorator to make all processes in distributed training wait for each local_master to do something. + """ + if local_rank not in [-1, 0]: + dist.barrier(device_ids=[local_rank]) + yield + if local_rank == 0: + dist.barrier(device_ids=[0]) + + +def date_modified(path=__file__): + # return human-readable file modification date, i.e. '2021-3-26' + t = datetime.datetime.fromtimestamp(Path(path).stat().st_mtime) + return f'{t.year}-{t.month}-{t.day}' + + +def git_describe(path=Path(__file__).parent): # path must be a directory + # return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe + s = f'git -C {path} describe --tags --long --always' + try: + return subprocess.check_output(s, shell=True, stderr=subprocess.STDOUT).decode()[:-1] + except subprocess.CalledProcessError: + return '' # not a git repository + + +def device_count(): + # Returns number of CUDA devices available. Safe version of torch.cuda.device_count(). Only works on Linux. + assert platform.system() == 'Linux', 'device_count() function only works on Linux' + try: + cmd = 'nvidia-smi -L | wc -l' + return int(subprocess.run(cmd, shell=True, capture_output=True, check=True).stdout.decode().split()[-1]) + except Exception: + return 0 + + +def select_device(device='', batch_size=0, newline=True): + # device = 'cpu' or '0' or '0,1,2,3' + s = f'YOLOv5 🚀 {git_describe() or date_modified()} torch {torch.__version__} ' # string + device = str(device).strip().lower().replace('cuda:', '') # to string, 'cuda:0' to '0' + cpu = device == 'cpu' + if cpu: + os.environ['CUDA_VISIBLE_DEVICES'] = '-1' # force torch.cuda.is_available() = False + elif device: # non-cpu device requested + os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable - must be before assert is_available() + assert torch.cuda.is_available() and torch.cuda.device_count() >= len(device.replace(',', '')), \ + f"Invalid CUDA '--device {device}' requested, use '--device cpu' or pass valid CUDA device(s)" + + cuda = not cpu and torch.cuda.is_available() + if cuda: + devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7 + n = len(devices) # device count + if n > 1 and batch_size > 0: # check batch_size is divisible by device_count + assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}' + space = ' ' * (len(s) + 1) + for i, d in enumerate(devices): + p = torch.cuda.get_device_properties(i) + s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / 1024 ** 2:.0f}MiB)\n" # bytes to MB + else: + s += 'CPU\n' + + if not newline: + s = s.rstrip() + LOGGER.info(s.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else s) # emoji-safe + return torch.device('cuda:0' if cuda else 'cpu') + + +def time_sync(): + # pytorch-accurate time + if torch.cuda.is_available(): + torch.cuda.synchronize() + return time.time() + + +def profile(input, ops, n=10, device=None): + # YOLOv5 speed/memory/FLOPs profiler + # + # Usage: + # input = torch.randn(16, 3, 640, 640) + # m1 = lambda x: x * torch.sigmoid(x) + # m2 = nn.SiLU() + # profile(input, [m1, m2], n=100) # profile over 100 iterations + + results = [] + device = device or select_device() + print(f"{'Params':>12s}{'GFLOPs':>12s}{'GPU_mem (GB)':>14s}{'forward (ms)':>14s}{'backward (ms)':>14s}" + f"{'input':>24s}{'output':>24s}") + + for x in input if isinstance(input, list) else [input]: + x = x.to(device) + x.requires_grad = True + for m in ops if isinstance(ops, list) else [ops]: + m = m.to(device) if hasattr(m, 'to') else m # device + m = m.half() if hasattr(m, 'half') and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m + tf, tb, t = 0, 0, [0, 0, 0] # dt forward, backward + try: + flops = thop.profile(m, inputs=(x,), verbose=False)[0] / 1E9 * 2 # GFLOPs + except Exception: + flops = 0 + + try: + for _ in range(n): + t[0] = time_sync() + y = m(x) + t[1] = time_sync() + try: + _ = (sum(yi.sum() for yi in y) if isinstance(y, list) else y).sum().backward() + t[2] = time_sync() + except Exception: # no backward method + # print(e) # for debug + t[2] = float('nan') + tf += (t[1] - t[0]) * 1000 / n # ms per op forward + tb += (t[2] - t[1]) * 1000 / n # ms per op backward + mem = torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0 # (GB) + s_in = tuple(x.shape) if isinstance(x, torch.Tensor) else 'list' + s_out = tuple(y.shape) if isinstance(y, torch.Tensor) else 'list' + p = sum(list(x.numel() for x in m.parameters())) if isinstance(m, nn.Module) else 0 # parameters + print(f'{p:12}{flops:12.4g}{mem:>14.3f}{tf:14.4g}{tb:14.4g}{str(s_in):>24s}{str(s_out):>24s}') + results.append([p, flops, mem, tf, tb, s_in, s_out]) + except Exception as e: + print(e) + results.append(None) + torch.cuda.empty_cache() + return results + + +def is_parallel(model): + # Returns True if model is of type DP or DDP + return type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel) + + +def de_parallel(model): + # De-parallelize a model: returns single-GPU model if model is of type DP or DDP + return model.module if is_parallel(model) else model + + +def initialize_weights(model): + for m in model.modules(): + t = type(m) + if t is nn.Conv2d: + pass # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif t is nn.BatchNorm2d: + m.eps = 1e-3 + m.momentum = 0.03 + elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU]: + m.inplace = True + + +def find_modules(model, mclass=nn.Conv2d): + # Finds layer indices matching module class 'mclass' + return [i for i, m in enumerate(model.module_list) if isinstance(m, mclass)] + + +def sparsity(model): + # Return global model sparsity + a, b = 0, 0 + for p in model.parameters(): + a += p.numel() + b += (p == 0).sum() + return b / a + + +def prune(model, amount=0.3): + # Prune model to requested global sparsity + import torch.nn.utils.prune as prune + print('Pruning model... ', end='') + for name, m in model.named_modules(): + if isinstance(m, nn.Conv2d): + prune.l1_unstructured(m, name='weight', amount=amount) # prune + prune.remove(m, 'weight') # make permanent + print(' %.3g global sparsity' % sparsity(model)) + + +def fuse_conv_and_bn(conv, bn): + # Fuse convolution and batchnorm layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ + fusedconv = nn.Conv2d(conv.in_channels, + conv.out_channels, + kernel_size=conv.kernel_size, + stride=conv.stride, + padding=conv.padding, + groups=conv.groups, + bias=True).requires_grad_(False).to(conv.weight.device) + + # prepare filters + w_conv = conv.weight.clone().view(conv.out_channels, -1) + w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var))) + fusedconv.weight.copy_(torch.mm(w_bn, w_conv).view(fusedconv.weight.shape)) + + # prepare spatial bias + b_conv = torch.zeros(conv.weight.size(0), device=conv.weight.device) if conv.bias is None else conv.bias + b_bn = bn.bias - bn.weight.mul(bn.running_mean).div(torch.sqrt(bn.running_var + bn.eps)) + fusedconv.bias.copy_(torch.mm(w_bn, b_conv.reshape(-1, 1)).reshape(-1) + b_bn) + + return fusedconv + + +def model_info(model, verbose=False, img_size=640): + # Model information. img_size may be int or list, i.e. img_size=640 or img_size=[640, 320] + n_p = sum(x.numel() for x in model.parameters()) # number parameters + n_g = sum(x.numel() for x in model.parameters() if x.requires_grad) # number gradients + if verbose: + print(f"{'layer':>5} {'name':>40} {'gradient':>9} {'parameters':>12} {'shape':>20} {'mu':>10} {'sigma':>10}") + for i, (name, p) in enumerate(model.named_parameters()): + name = name.replace('module_list.', '') + print('%5g %40s %9s %12g %20s %10.3g %10.3g' % + (i, name, p.requires_grad, p.numel(), list(p.shape), p.mean(), p.std())) + + try: # FLOPs + from thop import profile + stride = max(int(model.stride.max()), 32) if hasattr(model, 'stride') else 32 + img = torch.zeros((1, model.yaml.get('ch', 3), stride, stride), device=next(model.parameters()).device) # input + flops = profile(deepcopy(model), inputs=(img,), verbose=False)[0] / 1E9 * 2 # stride GFLOPs + img_size = img_size if isinstance(img_size, list) else [img_size, img_size] # expand if int/float + fs = ', %.1f GFLOPs' % (flops * img_size[0] / stride * img_size[1] / stride) # 640x640 GFLOPs + except (ImportError, Exception): + fs = '' + + LOGGER.info(f"Model Summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}") + + +def scale_img(img, ratio=1.0, same_shape=False, gs=32): # img(16,3,256,416) + # scales img(bs,3,y,x) by ratio constrained to gs-multiple + if ratio == 1.0: + return img + else: + h, w = img.shape[2:] + s = (int(h * ratio), int(w * ratio)) # new size + img = F.interpolate(img, size=s, mode='bilinear', align_corners=False) # resize + if not same_shape: # pad/crop img + h, w = (math.ceil(x * ratio / gs) * gs for x in (h, w)) + return F.pad(img, [0, w - s[1], 0, h - s[0]], value=0.447) # value = imagenet mean + + +def copy_attr(a, b, include=(), exclude=()): + # Copy attributes from b to a, options to only include [...] and to exclude [...] + for k, v in b.__dict__.items(): + if (len(include) and k not in include) or k.startswith('_') or k in exclude: + continue + else: + setattr(a, k, v) + + +class EarlyStopping: + # YOLOv5 simple early stopper + def __init__(self, patience=30): + self.best_fitness = 0.0 # i.e. mAP + self.best_epoch = 0 + self.patience = patience or float('inf') # epochs to wait after fitness stops improving to stop + self.possible_stop = False # possible stop may occur next epoch + + def __call__(self, epoch, fitness): + if fitness >= self.best_fitness: # >= 0 to allow for early zero-fitness stage of training + self.best_epoch = epoch + self.best_fitness = fitness + delta = epoch - self.best_epoch # epochs without improvement + self.possible_stop = delta >= (self.patience - 1) # possible stop may occur next epoch + stop = delta >= self.patience # stop training if patience exceeded + if stop: + LOGGER.info(f'Stopping training early as no improvement observed in last {self.patience} epochs. ' + f'Best results observed at epoch {self.best_epoch}, best model saved as best.pt.\n' + f'To update EarlyStopping(patience={self.patience}) pass a new patience value, ' + f'i.e. `python train.py --patience 300` or use `--patience 0` to disable EarlyStopping.') + return stop + + +class ModelEMA: + """ Updated Exponential Moving Average (EMA) from https://github.com/rwightman/pytorch-image-models + Keeps a moving average of everything in the model state_dict (parameters and buffers) + For EMA details see https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage + """ + + def __init__(self, model, decay=0.9999, updates=0): + # Create EMA + self.ema = deepcopy(de_parallel(model)).eval() # FP32 EMA + # if next(model.parameters()).device.type != 'cpu': + # self.ema.half() # FP16 EMA + self.updates = updates # number of EMA updates + self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs) + for p in self.ema.parameters(): + p.requires_grad_(False) + + def update(self, model): + # Update EMA parameters + with torch.no_grad(): + self.updates += 1 + d = self.decay(self.updates) + + msd = de_parallel(model).state_dict() # model state_dict + for k, v in self.ema.state_dict().items(): + if v.dtype.is_floating_point: + v *= d + v += (1 - d) * msd[k].detach() + + def update_attr(self, model, include=(), exclude=('process_group', 'reducer')): + # Update EMA attributes + copy_attr(self.ema, model, include, exclude) diff --git a/detector/__init__.py b/detector/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d3392dea1d03acf9a171ce4d2fc47d5b00c020ee --- /dev/null +++ b/detector/__init__.py @@ -0,0 +1,24 @@ +from .YOLOv3 import YOLOv3 +from .MMDet import MMDet +from .YOLOv5 import YOLOv5 +from .Mask_RCNN import Mask_RCNN + +__all__ = ['build_detector'] + + +def build_detector(cfg, use_cuda, segment=False): + if cfg.USE_MMDET: + return MMDet(cfg.MMDET.CFG, cfg.MMDET.CHECKPOINT, + score_thresh=cfg.MMDET.SCORE_THRESH, + is_xywh=True, use_cuda=use_cuda) + elif cfg.USE_SEGMENT: + return Mask_RCNN(segment, num_classes=cfg.MASKRCNN.NUM_CLASSES, box_thresh=cfg.MASKRCNN.BOX_THRESH, + label_json_path=cfg.MASKRCNN.LABEL, weight_path=cfg.MASKRCNN.WEIGHT) + else: + if 'YOLOV5' in cfg: + return YOLOv5(cfg.YOLOV5.WEIGHT, cfg.YOLOV5.DATA, cfg.YOLOV5.IMGSZ, + cfg.YOLOV5.SCORE_THRESH, cfg.YOLOV5.NMS_THRESH, cfg.YOLOV5.MAX_DET) + elif 'YOLOV3' in cfg: + return YOLOv3(cfg.YOLOV3.CFG, cfg.YOLOV3.WEIGHT, cfg.YOLOV3.CLASS_NAMES, + score_thresh=cfg.YOLOV3.SCORE_THRESH, nms_thresh=cfg.YOLOV3.NMS_THRESH, + is_xywh=True, use_cuda=use_cuda) diff --git a/input.mp4:Zone.Identifier b/input.mp4:Zone.Identifier new file mode 100644 index 0000000000000000000000000000000000000000..d5f34ac228482c3e782989019f2db4128ec518ef --- /dev/null +++ b/input.mp4:Zone.Identifier @@ -0,0 +1,4 @@ +[ZoneTransfer] +ZoneId=3 +ReferrerUrl=https://www.pexels.com/ +HostUrl=https://videos.pexels.com/video-files/2932301/2932301-uhd_4096_2160_24fps.mp4 diff --git a/ped_det_server.py b/ped_det_server.py new file mode 100644 index 0000000000000000000000000000000000000000..4d67916e963db3475c0f029373b7e894fb0ef2db --- /dev/null +++ b/ped_det_server.py @@ -0,0 +1,155 @@ +""" +This module gets video in input and outputs the +json file with coordination of bboxes in the video. + +""" +from os.path import basename, splitext, join, isfile, isdir, dirname +from os import makedirs + +from tqdm import tqdm +import cv2 +import argparse +import torch + +from detector import build_detector +from deep_sort import build_tracker +from utils.tools import tik_tok, is_video +from utils.draw import compute_color_for_labels +from utils.parser import get_config +from utils.json_logger import BboxToJsonLogger +import warnings + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--VIDEO_PATH", type=str, default="./demo/ped.avi") + parser.add_argument("--config_detection", type=str, default="./configs/yolov3.yaml") + parser.add_argument("--config_deepsort", type=str, default="./configs/deep_sort.yaml") + parser.add_argument("--write-fps", type=int, default=20) + parser.add_argument("--frame_interval", type=int, default=1) + parser.add_argument("--save_path", type=str, default="./output") + parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True) + args = parser.parse_args() + + assert isfile(args.VIDEO_PATH), "Error: Video not found" + assert is_video(args.VIDEO_PATH), "Error: Not Supported format" + if args.frame_interval < 1: args.frame_interval = 1 + + return args + + +class VideoTracker(object): + def __init__(self, cfg, args): + self.cfg = cfg + self.args = args + use_cuda = args.use_cuda and torch.cuda.is_available() + if not use_cuda: + warnings.warn("Running in cpu mode!") + + self.vdo = cv2.VideoCapture() + self.detector = build_detector(cfg, use_cuda=use_cuda) + self.deepsort = build_tracker(cfg, use_cuda=use_cuda) + self.class_names = self.detector.class_names + + # Configure output video and json + self.logger = BboxToJsonLogger() + filename, extension = splitext(basename(self.args.VIDEO_PATH)) + self.output_file = join(self.args.save_path, f'{filename}.avi') + self.json_output = join(self.args.save_path, f'{filename}.json') + if not isdir(dirname(self.json_output)): + makedirs(dirname(self.json_output)) + + def __enter__(self): + self.vdo.open(self.args.VIDEO_PATH) + self.total_frames = int(cv2.VideoCapture.get(self.vdo, cv2.CAP_PROP_FRAME_COUNT)) + self.im_width = int(self.vdo.get(cv2.CAP_PROP_FRAME_WIDTH)) + self.im_height = int(self.vdo.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + video_details = {'frame_width': self.im_width, + 'frame_height': self.im_height, + 'frame_rate': self.args.write_fps, + 'video_name': self.args.VIDEO_PATH} + codec = cv2.VideoWriter_fourcc(*'XVID') + self.writer = cv2.VideoWriter(self.output_file, codec, self.args.write_fps, + (self.im_width, self.im_height)) + self.logger.add_video_details(**video_details) + + assert self.vdo.isOpened() + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + if exc_type: + print(exc_type, exc_value, exc_traceback) + + def run(self): + idx_frame = 0 + pbar = tqdm(total=self.total_frames + 1) + while self.vdo.grab(): + if idx_frame % args.frame_interval == 0: + _, ori_im = self.vdo.retrieve() + timestamp = self.vdo.get(cv2.CAP_PROP_POS_MSEC) + frame_id = int(self.vdo.get(cv2.CAP_PROP_POS_FRAMES)) + self.logger.add_frame(frame_id=frame_id, timestamp=timestamp) + self.detection(frame=ori_im, frame_id=frame_id) + self.save_frame(ori_im) + idx_frame += 1 + pbar.update() + self.logger.json_output(self.json_output) + + @tik_tok + def detection(self, frame, frame_id): + im = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + # do detection + bbox_xywh, cls_conf, cls_ids = self.detector(im) + if bbox_xywh is not None: + # select person class + mask = cls_ids == 0 + + bbox_xywh = bbox_xywh[mask] + bbox_xywh[:, 3:] *= 1.2 # bbox dilation just in case bbox too small + cls_conf = cls_conf[mask] + + # do tracking + outputs = self.deepsort.update(bbox_xywh, cls_conf, im) + + # draw boxes for visualization + if len(outputs) > 0: + frame = self.draw_boxes(img=frame, frame_id=frame_id, output=outputs) + + def draw_boxes(self, img, frame_id, output, offset=(0, 0)): + for i, box in enumerate(output): + x1, y1, x2, y2, identity = [int(ii) for ii in box] + self.logger.add_bbox_to_frame(frame_id=frame_id, + bbox_id=identity, + top=y1, + left=x1, + width=x2 - x1, + height=y2 - y1) + x1 += offset[0] + x2 += offset[0] + y1 += offset[1] + y2 += offset[1] + + # box text and bar + self.logger.add_label_to_bbox(frame_id=frame_id, bbox_id=identity, category='pedestrian', confidence=0.9) + color = compute_color_for_labels(identity) + label = '{}{:d}'.format("", identity) + t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 2, 2)[0] + cv2.rectangle(img, (x1, y1), (x2, y2), color, 3) + cv2.rectangle(img, (x1, y1), (x1 + t_size[0] + 3, y1 + t_size[1] + 4), color, -1) + cv2.putText(img, label, (x1, y1 + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 2, [255, 255, 255], 2) + return img + + def save_frame(self, frame) -> None: + if frame is not None: self.writer.write(frame) + + +if __name__ == "__main__": + args = parse_args() + cfg = get_config() + cfg.merge_from_file(args.config_detection) + cfg.merge_from_file(args.config_deepsort) + + with VideoTracker(cfg, args) as vdo_trk: + vdo_trk.run() + diff --git a/scripts/yolov3_deepsort.sh b/scripts/yolov3_deepsort.sh new file mode 100644 index 0000000000000000000000000000000000000000..ed1311e8b3d51ccbdce7bff5551313d1d6a618b5 --- /dev/null +++ b/scripts/yolov3_deepsort.sh @@ -0,0 +1 @@ +python yolov3_deepsort.py [VIDEO_PATH] --config_detection \ No newline at end of file diff --git a/scripts/yolov3_tiny_deepsort.sh b/scripts/yolov3_tiny_deepsort.sh new file mode 100644 index 0000000000000000000000000000000000000000..24316f178345651351c286c75b77e6583f298ba4 --- /dev/null +++ b/scripts/yolov3_tiny_deepsort.sh @@ -0,0 +1 @@ +python yolov3_deepsort.py [VIDEO_PATH] --config_detection ./configs/yolov3_tiny.yaml \ No newline at end of file diff --git a/test.png b/test.png new file mode 100644 index 0000000000000000000000000000000000000000..f8e4685bf5bb83ceef4b099b1c427d915a361837 --- /dev/null +++ b/test.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08b4f3b0cfbb9f8d092cbeb44f460e41ae59fc8d6e046d965750ca0af6dffab8 +size 2720895 diff --git a/test.png:Zone.Identifier b/test.png:Zone.Identifier new file mode 100644 index 0000000000000000000000000000000000000000..c20eb4d3993dba85e2b55879ab6d0e1623015e37 --- /dev/null +++ b/test.png:Zone.Identifier @@ -0,0 +1,3 @@ +[ZoneTransfer] +ZoneId=3 +HostUrl=https://chatgpt.com/ diff --git a/test_roi.py b/test_roi.py new file mode 100644 index 0000000000000000000000000000000000000000..beb3849cc671e28ccc3264e33ec3a1bdb181adc7 --- /dev/null +++ b/test_roi.py @@ -0,0 +1,8 @@ +import cv2 + +img = cv2.imread("test.png") # 아무 이미지 +cv2.imshow("img", img) +cv2.waitKey(1) +roi = cv2.selectROI("img", img, False, False) +print("Selected ROI:", roi) +cv2.destroyAllWindows() diff --git a/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/bugs.md b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/bugs.md new file mode 100644 index 0000000000000000000000000000000000000000..3b51cbd6a18ffbf13457c4933f432b35b2f062da --- /dev/null +++ b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/bugs.md @@ -0,0 +1,36 @@ +--- +name: "🐛 Bugs" +about: Report bugs in fastreid +title: Please read & provide the following + +--- + +## Instructions To Reproduce the 🐛 Bug: + +1. what changes you made (`git diff`) or what code you wrote +``` + +``` +2. what exact command you run: +3. what you observed (including __full logs__): +``` + +``` +4. please simplify the steps as much as possible so they do not require additional resources to + run, such as a private dataset. + +## Expected behavior: + +If there are no obvious error in "what you observed" provided above, +please tell us the expected behavior. + +## Environment: + +Provide your environment information using the following command: + +``` +wget -nc -q https://github.com/facebookresearch/detectron2/raw/master/detectron2/utils/collect_env.py && python collect_env.py +``` + +If your issue looks like an installation issue / environment issue, +please first try to solve it yourself with the instructions in diff --git a/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/config.yml b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..88e5778a807a65c0c5bcf3c56c4c687572f8f155 --- /dev/null +++ b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +# require an issue template to be chosen +blank_issues_enabled: false + +# Unexpected behaviors & bugs are split to two templates. +# When they are one template, users think "it's not a bug" and don't choose the template. +# +# But the file name is still "unexpected-problems-bugs.md" so that old references +# to this issue template still works. +# It's ok since this template should be a superset of "bugs.md" (unexpected behaviors is a superset of bugs) \ No newline at end of file diff --git a/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/questions-help-support.md b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/questions-help-support.md new file mode 100644 index 0000000000000000000000000000000000000000..1220eb4b845221e77ad36b30f810f33897cd13d0 --- /dev/null +++ b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/questions-help-support.md @@ -0,0 +1,22 @@ +--- +name: "How to do something❓" +about: How to do something using fastreid? What does an API do? + +--- + +## ❓ How to do something using fastreid + +Describe what you want to do, including: + +1. what inputs you will provide, if any: +2. what outputs you are expecting: + +NOTE: + +1. Only general answers are provided. + If you want to ask about "why X did not work", please use the + [Unexpected behaviors](https://github.com/JDAI-CV/fast-reid/issues/new/choose) issue template. + +2. About how to implement new models / new dataloader / new training logic, etc., check documentation first. + +3. We do not answer general machine learning / computer vision questions that are not specific to fastreid, such as how a model works, how to improve your training/make it converge, or what algorithm/methods can be used to achieve X. diff --git a/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md new file mode 100644 index 0000000000000000000000000000000000000000..36de5b9818085797e8bf4b81caf6a40ff1d041c1 --- /dev/null +++ b/thirdparty/fast-reid/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md @@ -0,0 +1,35 @@ +--- +name: "Unexpected behaviors" +about: Run into unexpected behaviors when using fastreid +title: Please read & provide the following + +--- + +If you do not know the root cause of the problem, and wish someone to help you, please +post according to this template: + +## Instructions To Reproduce the Issue: + +Check https://stackoverflow.com/help/minimal-reproducible-example for how to ask good questions. +Simplify the steps to reproduce the issue using suggestions from the above link, and provide them below: + +1. full code you wrote or full changes you made (`git diff`) +``` + +``` +2. what exact command you run: +3. __full logs__ you observed: +``` + +``` + +## Expected behavior: + +If there are no obvious error in "what you observed" provided above, +please tell us the expected behavior. + +If you expect the model to converge / work better, note that we do not give suggestions +on how to train a new model. +Only in one of the two conditions we will help with it: +(1) You're unable to reproduce the results in fastreid model zoo. +(2) It indicates a fastreid bug. diff --git a/thirdparty/fast-reid/.gitignore b/thirdparty/fast-reid/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..41d06a514cf1589a7ae571422caaf337856e3ecb --- /dev/null +++ b/thirdparty/fast-reid/.gitignore @@ -0,0 +1,8 @@ +.idea +__pycache__ +.DS_Store +.vscode +*.so +logs/ +.ipynb_checkpoints +logs \ No newline at end of file diff --git a/thirdparty/fast-reid/LICENSE b/thirdparty/fast-reid/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f632668fe3fa3d7727567d0324975f3e5c501413 --- /dev/null +++ b/thirdparty/fast-reid/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 JD.com Inc. JD AI + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/thirdparty/fast-reid/README.md b/thirdparty/fast-reid/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c18e259305ff0e87b43e15fc3b93e3b2b7155abd --- /dev/null +++ b/thirdparty/fast-reid/README.md @@ -0,0 +1,55 @@ +# FastReID + +FastReID is a research platform that implements state-of-the-art re-identification algorithms. It is a groud-up rewrite of the previous version, [reid strong baseline](https://github.com/michuanhaohao/reid-strong-baseline). + +## What's New + +- [Oct 2020] Added the [Hyper-Parameter Optimization](https://github.com/JDAI-CV/fast-reid/tree/master/projects/HPOReID) based on fastreid. See `projects/HPOReID`. +- [Sep 2020] Added the [person attribute recognition](https://github.com/JDAI-CV/fast-reid/tree/master/projects/attribute_recognition) based on fastreid. See `projects/attribute_recognition`. +- [Sep 2020] Automatic Mixed Precision training is supported with pytorch1.6 built-in `torch.cuda.amp`. Set `cfg.SOLVER.AMP_ENABLED=True` to switch it on. +- [Aug 2020] [Model Distillation](https://github.com/JDAI-CV/fast-reid/tree/master/projects/DistillReID) is supported, thanks for [guan'an wang](https://github.com/wangguanan)'s contribution. +- [Aug 2020] ONNX/TensorRT converter is supported. +- [Jul 2020] Distributed training with multiple GPUs, it trains much faster. +- [Jul 2020] `MAX_ITER` in config means `epoch`, it will auto scale to maximum iterations. +- Includes more features such as circle loss, abundant visualization methods and evaluation metrics, SoTA results on conventional, cross-domain, partial and vehicle re-id, testing on multi-datasets simultaneously, etc. +- Can be used as a library to support [different projects](https://github.com/JDAI-CV/fast-reid/tree/master/projects) on top of it. We'll open source more research projects in this way. +- Remove [ignite](https://github.com/pytorch/ignite)(a high-level library) dependency and powered by [PyTorch](https://pytorch.org/). + +We write a [chinese blog](https://l1aoxingyu.github.io/blogpages/reid/2020/05/29/fastreid.html) about this toolbox. + +## Installation + +See [INSTALL.md](https://github.com/JDAI-CV/fast-reid/blob/master/docs/INSTALL.md). + +## Quick Start + +The designed architecture follows this guide [PyTorch-Project-Template](https://github.com/L1aoXingyu/PyTorch-Project-Template), you can check each folder's purpose by yourself. + +See [GETTING_STARTED.md](https://github.com/JDAI-CV/fast-reid/blob/master/docs/GETTING_STARTED.md). + +Learn more at out [documentation](). And see [projects/](https://github.com/JDAI-CV/fast-reid/tree/master/projects) for some projects that are build on top of fastreid. + +## Model Zoo and Baselines + +We provide a large set of baseline results and trained models available for download in the [Fastreid Model Zoo](https://github.com/JDAI-CV/fast-reid/blob/master/docs/MODEL_ZOO.md). + +## Deployment + +We provide some examples and scripts to convert fastreid model to Caffe, ONNX and TensorRT format in [Fastreid deploy](https://github.com/JDAI-CV/fast-reid/blob/master/tools/deploy). + +## License + +Fastreid is released under the [Apache 2.0 license](https://github.com/JDAI-CV/fast-reid/blob/master/LICENSE). + +## Citing Fastreid + +If you use Fastreid in your research or wish to refer to the baseline results published in the Model Zoo, please use the following BibTeX entry. + +```BibTeX +@article{he2020fastreid, + title={FastReID: A Pytorch Toolbox for General Instance Re-identification}, + author={He, Lingxiao and Liao, Xingyu and Liu, Wu and Liu, Xinchen and Cheng, Peng and Mei, Tao}, + journal={arXiv preprint arXiv:2006.02631}, + year={2020} +} +``` diff --git a/thirdparty/fast-reid/configs/Base-AGW.yml b/thirdparty/fast-reid/configs/Base-AGW.yml new file mode 100644 index 0000000000000000000000000000000000000000..7d5dcf56d75e4f7560298893be94e867e5a06cd4 --- /dev/null +++ b/thirdparty/fast-reid/configs/Base-AGW.yml @@ -0,0 +1,19 @@ +_BASE_: "Base-bagtricks.yml" + +MODEL: + BACKBONE: + WITH_NL: True + + HEADS: + POOL_LAYER: "gempool" + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss") + CE: + EPSILON: 0.1 + SCALE: 1.0 + + TRI: + MARGIN: 0.0 + HARD_MINING: False + SCALE: 1.0 diff --git a/thirdparty/fast-reid/configs/Base-MGN.yml b/thirdparty/fast-reid/configs/Base-MGN.yml new file mode 100644 index 0000000000000000000000000000000000000000..8c80832dd846db7a7e1624ac1877ce0a077472ce --- /dev/null +++ b/thirdparty/fast-reid/configs/Base-MGN.yml @@ -0,0 +1,25 @@ +_BASE_: "Base-Strongerbaseline.yml" + +MODEL: + META_ARCHITECTURE: 'MGN' + + FREEZE_LAYERS: ["backbone", "b1", "b2", "b3",] + + BACKBONE: + WITH_NL: False + + HEADS: + EMBEDDING_DIM: 256 + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss",) + CE: + EPSILON: 0.1 + SCALE: 1.0 + + TRI: + MARGIN: 0.0 + HARD_MINING: True + NORM_FEAT: False + SCALE: 1.0 + diff --git a/thirdparty/fast-reid/configs/Base-Strongerbaseline.yml b/thirdparty/fast-reid/configs/Base-Strongerbaseline.yml new file mode 100644 index 0000000000000000000000000000000000000000..36cc455df18af07ecbe442cd7ca3d5255a662a53 --- /dev/null +++ b/thirdparty/fast-reid/configs/Base-Strongerbaseline.yml @@ -0,0 +1,59 @@ +_BASE_: "Base-bagtricks.yml" + +MODEL: + FREEZE_LAYERS: ["backbone"] + + BACKBONE: + WITH_NL: True + + HEADS: + NECK_FEAT: "after" + POOL_LAYER: "gempoolP" + CLS_LAYER: "circleSoftmax" + SCALE: 64 + MARGIN: 0.35 + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss",) + CE: + EPSILON: 0.1 + SCALE: 1.0 + TRI: + MARGIN: 0.0 + HARD_MINING: True + NORM_FEAT: False + SCALE: 1.0 + +INPUT: + SIZE_TRAIN: [384, 128] + SIZE_TEST: [384, 128] + DO_AUTOAUG: True + +DATALOADER: + NUM_INSTANCE: 16 + +SOLVER: + OPT: "Adam" + MAX_ITER: 60 + BASE_LR: 0.00035 + BIAS_LR_FACTOR: 1. + WEIGHT_DECAY: 0.0005 + WEIGHT_DECAY_BIAS: 0.0005 + IMS_PER_BATCH: 64 + + SCHED: "WarmupCosineAnnealingLR" + DELAY_ITERS: 30 + ETA_MIN_LR: 0.00000077 + + WARMUP_FACTOR: 0.01 + WARMUP_ITERS: 10 + FREEZE_ITERS: 10 + + CHECKPOINT_PERIOD: 30 + +TEST: + EVAL_PERIOD: 30 + IMS_PER_BATCH: 128 + +CUDNN_BENCHMARK: True + diff --git a/thirdparty/fast-reid/configs/Base-bagtricks.yml b/thirdparty/fast-reid/configs/Base-bagtricks.yml new file mode 100644 index 0000000000000000000000000000000000000000..43d2af221fdd20bfe4874055075500795c5fde54 --- /dev/null +++ b/thirdparty/fast-reid/configs/Base-bagtricks.yml @@ -0,0 +1,73 @@ +MODEL: + META_ARCHITECTURE: "Baseline" + + BACKBONE: + NAME: "build_resnet_backbone" + NORM: "BN" + DEPTH: "50x" + LAST_STRIDE: 1 + FEAT_DIM: 2048 + WITH_IBN: False + PRETRAIN: True + PRETRAIN_PATH: "/export/home/lxy/.cache/torch/checkpoints/resnet50-19c8e357.pth" + + HEADS: + NAME: "EmbeddingHead" + NORM: "BN" + WITH_BNNECK: True + POOL_LAYER: "avgpool" + NECK_FEAT: "before" + CLS_LAYER: "linear" + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss",) + + CE: + EPSILON: 0.1 + SCALE: 1. + + TRI: + MARGIN: 0.3 + HARD_MINING: True + NORM_FEAT: False + SCALE: 1. + +INPUT: + SIZE_TRAIN: [256, 128] + SIZE_TEST: [256, 128] + REA: + ENABLED: True + PROB: 0.5 + MEAN: [123.675, 116.28, 103.53] + DO_PAD: True + +DATALOADER: + PK_SAMPLER: True + NAIVE_WAY: True + NUM_INSTANCE: 4 + NUM_WORKERS: 8 + +SOLVER: + OPT: "Adam" + MAX_ITER: 120 + BASE_LR: 0.00035 + BIAS_LR_FACTOR: 2. + WEIGHT_DECAY: 0.0005 + WEIGHT_DECAY_BIAS: 0.0005 + IMS_PER_BATCH: 64 + + SCHED: "WarmupMultiStepLR" + STEPS: [40, 90] + GAMMA: 0.1 + + WARMUP_FACTOR: 0.01 + WARMUP_ITERS: 10 + + CHECKPOINT_PERIOD: 60 + +TEST: + EVAL_PERIOD: 30 + IMS_PER_BATCH: 128 + +CUDNN_BENCHMARK: True + diff --git a/thirdparty/fast-reid/configs/DukeMTMC/AGW_R101-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..592ba0fe804efda0d61bb0fc2aec85db6ec8715b --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/agw_R101-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..648660fbf267f4197371ea9b0703084b10af108c --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/agw_R50-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50.yml b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..c2cceb8d7b37bb9db247b7a6a2430ad0e086d6ce --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/AGW_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-AGW.yml" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/agw_R50" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/AGW_S50.yml b/thirdparty/fast-reid/configs/DukeMTMC/AGW_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..f166d6e8add172e3d5d8ad0b398d278417a3ac19 --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/AGW_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/agw_S50" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R101-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..0865131aa55fba26032bc5b6ff7789808cee0818 --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/bagtricks_R101-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..cb469295a2899ecfe8926e8433074c180d311794 --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/bagtricks_R50-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50.yml b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..c564e99e3df9c13342e4e2594a7cc2f7c723c40a --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-bagtricks.yml" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/bagtricks_R50" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_S50.yml b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..03735e8fd7ccf5c0ab38ae54f822e4ee519eaccc --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/bagtricks_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/bagtricks_S50" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/mgn_R50-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/mgn_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..ab6bf7cdf547921891f293704d87246b0c29ba1b --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/mgn_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-MGN.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/mgn_R50-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/sbs_R101-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..6e2c93fa0ec27b657e2f0957d8c68192ff558e73 --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/sbs_R101-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50-ibn.yml b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..cbca66a2abcc2a1af12d46c8ae6ef784d71a3b5a --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/sbs_R50-ibn" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50.yml b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..b568f8dc4438eaadac99ac8a8b0b2eddc33bd6db --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/sbs_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/sbs_R50" diff --git a/thirdparty/fast-reid/configs/DukeMTMC/sbs_S50.yml b/thirdparty/fast-reid/configs/DukeMTMC/sbs_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..1074d862838b33a2a6923f7ee341801c2be9532b --- /dev/null +++ b/thirdparty/fast-reid/configs/DukeMTMC/sbs_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "logs/dukemtmc/sbs_S50" diff --git a/thirdparty/fast-reid/configs/MSMT17/AGW_R101-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/AGW_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..a122f6ca20ea6300a90c40cb8875b14cc8078e80 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/AGW_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/agw_R101-ibn" diff --git a/thirdparty/fast-reid/configs/MSMT17/AGW_R50-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/AGW_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..6104ed6744ffb397f38a35be8d098083aaf39d51 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/AGW_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/agw_R50-ibn" diff --git a/thirdparty/fast-reid/configs/MSMT17/AGW_R50.yml b/thirdparty/fast-reid/configs/MSMT17/AGW_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..a43e32f06a7b46ff132c13bf5bd12aaa42268ea2 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/AGW_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-AGW.yml" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/agw_R50" diff --git a/thirdparty/fast-reid/configs/MSMT17/AGW_S50.yml b/thirdparty/fast-reid/configs/MSMT17/AGW_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..8ec8ccbe888ad116da7a35db48393d6dcedcd53f --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/AGW_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/agw_S50" diff --git a/thirdparty/fast-reid/configs/MSMT17/bagtricks_R101-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..693269f05e805603da2fb39deff3d0bf89685b8c --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R101-ibn.yml @@ -0,0 +1,13 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/bagtricks_R101-ibn" + diff --git a/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..fac921e92b3bf93911b2a3acb6c524e6f6b87c44 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/bagtricks_R50-ibn" + diff --git a/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50.yml b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..313e93e28c175b71ebc24f9e27e243c8d59f0f05 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/bagtricks_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-bagtricks.yml" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/bagtricks_R50" diff --git a/thirdparty/fast-reid/configs/MSMT17/bagtricks_S50.yml b/thirdparty/fast-reid/configs/MSMT17/bagtricks_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..b855bfd04029613dd96c1089c4b35301172bb828 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/bagtricks_S50.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/bagtricks_S50" + diff --git a/thirdparty/fast-reid/configs/MSMT17/mgn_R50-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/mgn_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..07f18ddf6b54d210c02a93b65df73813ff4b1b89 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/mgn_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-MGN.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/mgn_R50-ibn" diff --git a/thirdparty/fast-reid/configs/MSMT17/sbs_R101-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/sbs_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..f3b6b7e881eee9fe972613ce69ac580df270fcd3 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/sbs_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/sbs_R101-ibn" diff --git a/thirdparty/fast-reid/configs/MSMT17/sbs_R50-ibn.yml b/thirdparty/fast-reid/configs/MSMT17/sbs_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..d90ce4a5028a6cf99f710703dd263e27ee938321 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/sbs_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/sbs_R50-ibn" diff --git a/thirdparty/fast-reid/configs/MSMT17/sbs_R50.yml b/thirdparty/fast-reid/configs/MSMT17/sbs_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..e0ed26e43aabe05260258de49bf4aa15c759d605 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/sbs_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/sbs_R50" diff --git a/thirdparty/fast-reid/configs/MSMT17/sbs_S50.yml b/thirdparty/fast-reid/configs/MSMT17/sbs_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..e6ca1b0e5ba92edf18a50e6c89e3c0d81beaafe9 --- /dev/null +++ b/thirdparty/fast-reid/configs/MSMT17/sbs_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("MSMT17",) + TESTS: ("MSMT17",) + +OUTPUT_DIR: "logs/msmt17/sbs_S50" diff --git a/thirdparty/fast-reid/configs/Market1501/AGW_R101-ibn.yml b/thirdparty/fast-reid/configs/Market1501/AGW_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..94d3d85b27726375e967f4569c6351af1c547a4d --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/AGW_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/agw_R101-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/AGW_R50-ibn.yml b/thirdparty/fast-reid/configs/Market1501/AGW_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..4ec8154e32185f5f9305a7133f1e4e33a839fccb --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/AGW_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/agw_R50-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/AGW_R50.yml b/thirdparty/fast-reid/configs/Market1501/AGW_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..99af75769dafc1a2d9fcfa3fd7a68c077550eda5 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/AGW_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-AGW.yml" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/agw_R50" diff --git a/thirdparty/fast-reid/configs/Market1501/AGW_S50.yml b/thirdparty/fast-reid/configs/Market1501/AGW_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..ff870bdcf6b1822e5b168247b1c74f742cfe7886 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/AGW_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-AGW.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/agw_S50" diff --git a/thirdparty/fast-reid/configs/Market1501/bagtricks_R101-ibn.yml b/thirdparty/fast-reid/configs/Market1501/bagtricks_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..de11e3f0dc5a3680e864b398972bccf60d5540f3 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/bagtricks_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/bagtricks_R101-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/bagtricks_R50-ibn.yml b/thirdparty/fast-reid/configs/Market1501/bagtricks_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..0a5a032b2062b1908ffe3299ae089b948654042a --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/bagtricks_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/bagtricks_R50-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/bagtricks_R50.yml b/thirdparty/fast-reid/configs/Market1501/bagtricks_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..d814d8c5e35e318d50ea96c0a5fbefd0ec00b9f6 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/bagtricks_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-bagtricks.yml" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/bagtricks_R50" diff --git a/thirdparty/fast-reid/configs/Market1501/bagtricks_S50.yml b/thirdparty/fast-reid/configs/Market1501/bagtricks_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..69f51c7a7adb991e3c14dd6dd3e495e8162f4c48 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/bagtricks_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-bagtricks.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/bagtricks_S50" diff --git a/thirdparty/fast-reid/configs/Market1501/mgn_R50-ibn.yml b/thirdparty/fast-reid/configs/Market1501/mgn_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..2444c4447ef051fc05d8edc46fdd3286e4259e51 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/mgn_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-MGN.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/mgn_R50-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/sbs_R101-ibn.yml b/thirdparty/fast-reid/configs/Market1501/sbs_R101-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..13b9df039bf415a17ac1a67f2562eac6b957d35d --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/sbs_R101-ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/sbs_R101-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/sbs_R50-ibn.yml b/thirdparty/fast-reid/configs/Market1501/sbs_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..7302591e0073ee9d1c35f7ddcdefd8ea3e30e83c --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/sbs_R50-ibn.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + WITH_IBN: True + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/sbs_R50-ibn" diff --git a/thirdparty/fast-reid/configs/Market1501/sbs_R50.yml b/thirdparty/fast-reid/configs/Market1501/sbs_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..2f50564ba5a6fe2e8931309ad631f0df9c26aec7 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/sbs_R50.yml @@ -0,0 +1,7 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/sbs_R50" diff --git a/thirdparty/fast-reid/configs/Market1501/sbs_S50.yml b/thirdparty/fast-reid/configs/Market1501/sbs_S50.yml new file mode 100644 index 0000000000000000000000000000000000000000..332d1566d2af5bfe29911ac42768da69eadaaca6 --- /dev/null +++ b/thirdparty/fast-reid/configs/Market1501/sbs_S50.yml @@ -0,0 +1,11 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + NAME: "build_resnest_backbone" + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("Market1501",) + +OUTPUT_DIR: "logs/market1501/sbs_S50" diff --git a/thirdparty/fast-reid/configs/VERIWild/bagtricks_R50-ibn.yml b/thirdparty/fast-reid/configs/VERIWild/bagtricks_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..20fc87782bea381292e0ad258dce6b1492f4fa76 --- /dev/null +++ b/thirdparty/fast-reid/configs/VERIWild/bagtricks_R50-ibn.yml @@ -0,0 +1,33 @@ +_BASE_: "../Base-bagtricks.yml" + +INPUT: + SIZE_TRAIN: [256, 256] + SIZE_TEST: [256, 256] + +MODEL: + BACKBONE: + WITH_IBN: True + HEADS: + POOL_LAYER: gempool + LOSSES: + TRI: + HARD_MINING: False + MARGIN: 0.0 + +DATASETS: + NAMES: ("VeRiWild",) + TESTS: ("SmallVeRiWild", "MediumVeRiWild", "LargeVeRiWild",) + +SOLVER: + IMS_PER_BATCH: 128 + MAX_ITER: 60 + STEPS: [30, 50] + WARMUP_ITERS: 10 + + CHECKPOINT_PERIOD: 20 + +TEST: + EVAL_PERIOD: 20 + IMS_PER_BATCH: 128 + +OUTPUT_DIR: "logs/veriwild/bagtricks_R50-ibn_4gpu" diff --git a/thirdparty/fast-reid/configs/VeRi/sbs_R50-ibn.yml b/thirdparty/fast-reid/configs/VeRi/sbs_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..5324cde4379da9a63ce797c5bf25ee74722fb181 --- /dev/null +++ b/thirdparty/fast-reid/configs/VeRi/sbs_R50-ibn.yml @@ -0,0 +1,32 @@ +_BASE_: "../Base-Strongerbaseline.yml" + +INPUT: + SIZE_TRAIN: [256, 256] + SIZE_TEST: [256, 256] + +MODEL: + BACKBONE: + WITH_IBN: True + +SOLVER: + OPT: "SGD" + BASE_LR: 0.01 + ETA_MIN_LR: 7.7e-5 + + IMS_PER_BATCH: 64 + MAX_ITER: 60 + DELAY_ITERS: 30 + WARMUP_ITERS: 10 + FREEZE_ITERS: 10 + + CHECKPOINT_PERIOD: 20 + +DATASETS: + NAMES: ("VeRi",) + TESTS: ("VeRi",) + +TEST: + EVAL_PERIOD: 20 + IMS_PER_BATCH: 128 + +OUTPUT_DIR: "logs/veri/sbs_R50-ibn" diff --git a/thirdparty/fast-reid/configs/VehicleID/bagtricks_R50-ibn.yml b/thirdparty/fast-reid/configs/VehicleID/bagtricks_R50-ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..0020c4f5bfe2bc8b4515a51eeb045c9ad615deaa --- /dev/null +++ b/thirdparty/fast-reid/configs/VehicleID/bagtricks_R50-ibn.yml @@ -0,0 +1,35 @@ +_BASE_: "../Base-bagtricks.yml" + +INPUT: + SIZE_TRAIN: [256, 256] + SIZE_TEST: [256, 256] + +MODEL: + BACKBONE: + WITH_IBN: True + HEADS: + POOL_LAYER: gempool + LOSSES: + TRI: + HARD_MINING: False + MARGIN: 0.0 + +DATASETS: + NAMES: ("VehicleID",) + TESTS: ("SmallVehicleID", "MediumVehicleID", "LargeVehicleID",) + +SOLVER: + BIAS_LR_FACTOR: 1. + + IMS_PER_BATCH: 512 + MAX_ITER: 60 + STEPS: [30, 50] + WARMUP_ITERS: 10 + + CHECKPOINT_PERIOD: 20 + +TEST: + EVAL_PERIOD: 20 + IMS_PER_BATCH: 128 + +OUTPUT_DIR: "logs/vehicleid/bagtricks_R50-ibn_4gpu" diff --git a/thirdparty/fast-reid/datasets/README.md b/thirdparty/fast-reid/datasets/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f89014498d3312ff3f8c087a7f569128463ce593 --- /dev/null +++ b/thirdparty/fast-reid/datasets/README.md @@ -0,0 +1,43 @@ +# Setup Buildin Dataset + +Fastreid has buildin support for a few datasets. The datasets are assumed to exist in a directory specified by the environment variable `FASTREID_DATASETS`. Under this directory, fastreid expects to find datasets in the structure described below. + +You can set the location for builtin datasets by `export FASTREID_DATASETS=/path/to/datasets/`. If left unset, the default is `datasets/` relative to your current working directory. + +The [model zoo](https://github.com/JDAI-CV/fast-reid/blob/master/MODEL_ZOO.md) contains configs and models that use these buildin datasets. + +## Expected dataset structure for Market1501 + +1. Download dataset to `datasets/` from [baidu pan](https://pan.baidu.com/s/1ntIi2Op) or [google driver](https://drive.google.com/file/d/0B8-rUzbwVRk0c054eEozWG9COHM/view) +2. Extract dataset. The dataset structure would like: + +```bash +datasets/ + Market-1501-v15.09.15/ + bounding_box_test/ + bounding_box_train/ +``` + +## Expected dataset structure for DukeMTMC + +1. Download datasets to `datasets/` +2. Extract dataset. The dataset structure would like: + +```bash +datasets/ + DukeMTMC-reID/ + bounding_box_train/ + bounding_box_test/ +``` + +## Expected dataset structure for MSMT17 + +1. Download datasets to `datasets/` +2. Extract dataset. The dataset structure would like: + +```bash +datasets/ + MSMT17_V2/ + mask_train_v2/ + mask_test_v2/ +``` diff --git a/thirdparty/fast-reid/demo/README.md b/thirdparty/fast-reid/demo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..572e51f2ece16b5d711c4f2cd811467551584414 --- /dev/null +++ b/thirdparty/fast-reid/demo/README.md @@ -0,0 +1,10 @@ +# FastReID Demo + +We provide a command line tool to run a simple demo of builtin models. + +You can run this command to get cosine similarites between different images + +```bash +cd demo/ +sh run_demo.sh +``` \ No newline at end of file diff --git a/thirdparty/fast-reid/demo/demo.py b/thirdparty/fast-reid/demo/demo.py new file mode 100644 index 0000000000000000000000000000000000000000..d3ba9df80dae2ac1dfea0b2bb1e0152ff67a2975 --- /dev/null +++ b/thirdparty/fast-reid/demo/demo.py @@ -0,0 +1,87 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import argparse +import glob +import os +import sys + +import cv2 +import numpy as np +import tqdm +from torch.backends import cudnn + +sys.path.append('..') + +from fastreid.config import get_cfg +from fastreid.utils.logger import setup_logger +from fastreid.utils.file_io import PathManager + +from predictor import FeatureExtractionDemo +# import some modules added in project like this below +# from projects.PartialReID.partialreid import * + +cudnn.benchmark = True +setup_logger(name="fastreid") + + +def setup_cfg(args): + # load config from file and command-line arguments + cfg = get_cfg() + # add_partialreid_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Feature extraction with reid models") + parser.add_argument( + "--config-file", + metavar="FILE", + help="path to config file", + ) + parser.add_argument( + "--parallel", + action='store_true', + help='If use multiprocess for feature extraction.' + ) + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + default='demo_output', + help='path to save features' + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +if __name__ == '__main__': + args = get_parser().parse_args() + cfg = setup_cfg(args) + demo = FeatureExtractionDemo(cfg, parallel=args.parallel) + + PathManager.mkdirs(args.output) + if args.input: + if PathManager.isdir(args.input[0]): + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in tqdm.tqdm(args.input): + img = cv2.imread(path) + feat = demo.run_on_image(img) + feat = feat.numpy() + np.save(os.path.join(args.output, path.replace('.jpg', '.npy').split('/')[-1]), feat) diff --git a/thirdparty/fast-reid/demo/plot_roc_with_pickle.py b/thirdparty/fast-reid/demo/plot_roc_with_pickle.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e9aadbb606eea6a05e695bfcc7caf551aade96 --- /dev/null +++ b/thirdparty/fast-reid/demo/plot_roc_with_pickle.py @@ -0,0 +1,23 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import matplotlib.pyplot as plt +import sys + +sys.path.append('.') +from fastreid.utils.visualizer import Visualizer + +if __name__ == "__main__": + baseline_res = Visualizer.load_roc_info("logs/duke_vis/roc_info.pickle") + mgn_res = Visualizer.load_roc_info("logs/mgn_duke_vis/roc_info.pickle") + + fig = Visualizer.plot_roc_curve(baseline_res['fpr'], baseline_res['tpr'], name='baseline') + Visualizer.plot_roc_curve(mgn_res['fpr'], mgn_res['tpr'], name='mgn', fig=fig) + plt.savefig('roc.jpg') + + fig = Visualizer.plot_distribution(baseline_res['pos'], baseline_res['neg'], name='baseline') + Visualizer.plot_distribution(mgn_res['pos'], mgn_res['neg'], name='mgn', fig=fig) + plt.savefig('dist.jpg') diff --git a/thirdparty/fast-reid/demo/predictor.py b/thirdparty/fast-reid/demo/predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..7acb7e3ca2477668e43bb5185f36d7053a4b1a36 --- /dev/null +++ b/thirdparty/fast-reid/demo/predictor.py @@ -0,0 +1,174 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import atexit +import bisect + +import cv2 +import torch +import torch.multiprocessing as mp +from collections import deque + +from fastreid.engine import DefaultPredictor + +try: + mp.set_start_method('spawn') +except RuntimeError: + pass + + +class FeatureExtractionDemo(object): + def __init__(self, cfg, parallel=False): + """ + Args: + cfg (CfgNode): + parallel (bool) whether to run the model in different processes from visualization.: + Useful since the visualization logic can be slow. + """ + self.cfg = cfg + self.parallel = parallel + + if parallel: + self.num_gpus = torch.cuda.device_count() + self.predictor = AsyncPredictor(cfg, self.num_gpus) + else: + self.predictor = DefaultPredictor(cfg) + + def run_on_image(self, original_image): + """ + + Args: + original_image (np.ndarray): an image of shape (H, W, C) (in BGR order). + This is the format used by OpenCV. + + Returns: + predictions (np.ndarray): normalized feature of the model. + """ + # the model expects RGB inputs + original_image = original_image[:, :, ::-1] + # Apply pre-processing to image. + image = cv2.resize(original_image, tuple(self.cfg.INPUT.SIZE_TEST[::-1]), interpolation=cv2.INTER_CUBIC) + # Make shape with a new batch dimension which is adapted for + # network input + image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1))[None] + predictions = self.predictor(image) + return predictions + + def run_on_loader(self, data_loader): + if self.parallel: + buffer_size = self.predictor.default_buffer_size + + batch_data = deque() + + for cnt, batch in enumerate(data_loader): + batch_data.append(batch) + self.predictor.put(batch["images"]) + + if cnt >= buffer_size: + batch = batch_data.popleft() + predictions = self.predictor.get() + yield predictions, batch["targets"].numpy(), batch["camids"].numpy() + + while len(batch_data): + batch = batch_data.popleft() + predictions = self.predictor.get() + yield predictions, batch["targets"].numpy(), batch["camids"].numpy() + else: + for batch in data_loader: + predictions = self.predictor(batch["images"]) + yield predictions, batch["targets"].numpy(), batch["camids"].numpy() + + +class AsyncPredictor: + """ + A predictor that runs the model asynchronously, possibly on >1 GPUs. + Because when the amount of data is large. + """ + + class _StopToken: + pass + + class _PredictWorker(mp.Process): + def __init__(self, cfg, task_queue, result_queue): + self.cfg = cfg + self.task_queue = task_queue + self.result_queue = result_queue + super().__init__() + + def run(self): + predictor = DefaultPredictor(self.cfg) + + while True: + task = self.task_queue.get() + if isinstance(task, AsyncPredictor._StopToken): + break + idx, data = task + result = predictor(data) + self.result_queue.put((idx, result)) + + def __init__(self, cfg, num_gpus: int = 1): + """ + + Args: + cfg (CfgNode): + num_gpus (int): if 0, will run on CPU + """ + num_workers = max(num_gpus, 1) + self.task_queue = mp.Queue(maxsize=num_workers * 3) + self.result_queue = mp.Queue(maxsize=num_workers * 3) + self.procs = [] + for gpuid in range(max(num_gpus, 1)): + cfg = cfg.clone() + cfg.defrost() + cfg.MODEL.DEVICE = "cuda:{}".format(gpuid) if num_gpus > 0 else "cpu" + self.procs.append( + AsyncPredictor._PredictWorker(cfg, self.task_queue, self.result_queue) + ) + + self.put_idx = 0 + self.get_idx = 0 + self.result_rank = [] + self.result_data = [] + + for p in self.procs: + p.start() + + atexit.register(self.shutdown) + + def put(self, image): + self.put_idx += 1 + self.task_queue.put((self.put_idx, image)) + + def get(self): + self.get_idx += 1 + if len(self.result_rank) and self.result_rank[0] == self.get_idx: + res = self.result_data[0] + del self.result_data[0], self.result_rank[0] + return res + + while True: + # Make sure the results are returned in the correct order + idx, res = self.result_queue.get() + if idx == self.get_idx: + return res + insert = bisect.bisect(self.result_rank, idx) + self.result_rank.insert(insert, idx) + self.result_data.insert(insert, res) + + def __len__(self): + return self.put_idx - self.get_idx + + def __call__(self, image): + self.put(image) + return self.get() + + def shutdown(self): + for _ in self.procs: + self.task_queue.put(AsyncPredictor._StopToken()) + + @property + def default_buffer_size(self): + return len(self.procs) * 5 diff --git a/thirdparty/fast-reid/demo/run_demo.sh b/thirdparty/fast-reid/demo/run_demo.sh new file mode 100644 index 0000000000000000000000000000000000000000..6f3f28078f335efebe54b03c06f7b3eeca47922b --- /dev/null +++ b/thirdparty/fast-reid/demo/run_demo.sh @@ -0,0 +1,3 @@ +python demo/visualize_result.py --config-file logs/dukemtmc/mgn_R50-ibn/config.yaml \ +--parallel --vis-label --dataset-name 'DukeMTMC' --output logs/mgn_duke_vis \ +--opts MODEL.WEIGHTS logs/dukemtmc/mgn_R50-ibn/model_final.pth diff --git a/thirdparty/fast-reid/demo/visualize_result.py b/thirdparty/fast-reid/demo/visualize_result.py new file mode 100644 index 0000000000000000000000000000000000000000..c86f7e10d8de1305c349646650d8018177012064 --- /dev/null +++ b/thirdparty/fast-reid/demo/visualize_result.py @@ -0,0 +1,142 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import argparse +import logging +import sys + +import numpy as np +import torch +import tqdm +from torch.backends import cudnn + +sys.path.append('.') + +from fastreid.evaluation import evaluate_rank +from fastreid.config import get_cfg +from fastreid.utils.logger import setup_logger +from fastreid.data import build_reid_test_loader +from predictor import FeatureExtractionDemo +from fastreid.utils.visualizer import Visualizer + +# import some modules added in project +# for example, add partial reid like this below +# from projects.PartialReID.partialreid import * + +cudnn.benchmark = True +setup_logger(name="fastreid") + +logger = logging.getLogger('fastreid.visualize_result') + + +def setup_cfg(args): + # load config from file and command-line arguments + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Feature extraction with reid models") + parser.add_argument( + "--config-file", + metavar="FILE", + help="path to config file", + ) + parser.add_argument( + '--parallel', + action='store_true', + help='if use multiprocess for feature extraction.' + ) + parser.add_argument( + "--dataset-name", + help="a test dataset name for visualizing ranking list." + ) + parser.add_argument( + "--output", + default="./vis_rank_list", + help="a file or directory to save rankling list result.", + + ) + parser.add_argument( + "--vis-label", + action='store_true', + help="if visualize label of query instance" + ) + parser.add_argument( + "--num-vis", + default=100, + help="number of query images to be visualized", + ) + parser.add_argument( + "--rank-sort", + default="ascending", + help="rank order of visualization images by AP metric", + ) + parser.add_argument( + "--label-sort", + default="ascending", + help="label order of visualization images by cosine similarity metric", + ) + parser.add_argument( + "--max-rank", + default=10, + help="maximum number of rank list to be visualized", + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +if __name__ == '__main__': + args = get_parser().parse_args() + cfg = setup_cfg(args) + test_loader, num_query = build_reid_test_loader(cfg, args.dataset_name) + demo = FeatureExtractionDemo(cfg, parallel=args.parallel) + + logger.info("Start extracting image features") + feats = [] + pids = [] + camids = [] + for (feat, pid, camid) in tqdm.tqdm(demo.run_on_loader(test_loader), total=len(test_loader)): + feats.append(feat) + pids.extend(pid) + camids.extend(camid) + + feats = torch.cat(feats, dim=0) + q_feat = feats[:num_query] + g_feat = feats[num_query:] + q_pids = np.asarray(pids[:num_query]) + g_pids = np.asarray(pids[num_query:]) + q_camids = np.asarray(camids[:num_query]) + g_camids = np.asarray(camids[num_query:]) + + # compute cosine distance + distmat = 1 - torch.mm(q_feat, g_feat.t()) + distmat = distmat.numpy() + + logger.info("Computing APs for all query images ...") + cmc, all_ap, all_inp = evaluate_rank(distmat, q_pids, g_pids, q_camids, g_camids) + logger.info("Finish computing APs for all query images!") + + visualizer = Visualizer(test_loader.dataset) + visualizer.get_model_output(all_ap, distmat, q_pids, g_pids, q_camids, g_camids) + + logger.info("Start saving ROC curve ...") + fpr, tpr, pos, neg = visualizer.vis_roc_curve(args.output) + visualizer.save_roc_info(args.output, fpr, tpr, pos, neg) + logger.info("Finish saving ROC curve!") + + logger.info("Saving rank list result ...") + query_indices = visualizer.vis_rank_list(args.output, args.vis_label, args.num_vis, + args.rank_sort, args.label_sort, args.max_rank) + logger.info("Finish saving rank list results!") diff --git a/thirdparty/fast-reid/docs/GETTING_STARTED.md b/thirdparty/fast-reid/docs/GETTING_STARTED.md new file mode 100644 index 0000000000000000000000000000000000000000..800441ad154f4195491ccc0572387d69a4b7d583 --- /dev/null +++ b/thirdparty/fast-reid/docs/GETTING_STARTED.md @@ -0,0 +1,42 @@ +# Getting Started with Fastreid + +## Prepare pretrained model + +If you use backbones supported by fastreid, you do not need to do anything. It will automatically download the pre-train models. +But if your network is not connected, you can download pre-train models manually and put it in `~/.cache/torch/checkpoints`. + +If you want to use other pre-train models, such as MoCo pre-train, you can download by yourself and set the pre-train model path in `configs/Base-bagtricks.yml`. + +## Compile with cython to accelerate evalution + +```bash +cd fastreid/evaluation/rank_cylib; make all +``` + +## Training & Evaluation in Command Line + +We provide a script in "tools/train_net.py", that is made to train all the configs provided in fastreid. +You may want to use it as a reference to write your own training script. + +To train a model with "train_net.py", first setup up the corresponding datasets following [datasets/README.md](https://github.com/JDAI-CV/fast-reid/tree/master/datasets), then run: + +```bash +./tools/train_net.py --config-file ./configs/Market1501/bagtricks_R50.yml MODEL.DEVICE "cuda:0" +``` + +The configs are made for 1-GPU training. + +If you want to train model with 4 GPUs, you can run: + +```bash +python tools/train_net.py --config-file ./configs/Market1501/bagtricks_R50.yml --num-gpus 4 +``` + +To evaluate a model's performance, use + +```bash +python tools/train_net.py --config-file ./configs/Market1501/bagtricks_R50.yml --eval-only \ +MODEL.WEIGHTS /path/to/checkpoint_file MODEL.DEVICE "cuda:0" +``` + +For more options, see `./tools/train_net.py -h`. diff --git a/thirdparty/fast-reid/docs/INSTALL.md b/thirdparty/fast-reid/docs/INSTALL.md new file mode 100644 index 0000000000000000000000000000000000000000..d7045d7ab6a58a6c71ed18ff9c0674f675573ba1 --- /dev/null +++ b/thirdparty/fast-reid/docs/INSTALL.md @@ -0,0 +1,28 @@ +# Installation + +## Requirements + +- Linux or macOS with python ≥ 3.6 +- PyTorch ≥ 1.6 +- torchvision that matches the Pytorch installation. You can install them together at [pytorch.org](https://pytorch.org/) to make sure of this. +- [yacs](https://github.com/rbgirshick/yacs) +- Cython (optional to compile evaluation code) +- tensorboard (needed for visualization): `pip install tensorboard` +- gdown (for automatically downloading pre-train model) +- sklearn +- termcolor +- tabulate +- [faiss](https://github.com/facebookresearch/faiss) `pip install faiss-cpu` + + + +# Set up with Conda +```shell script +conda create -n fastreid python=3.7 +conda activate fastreid +conda install pytorch==1.6.0 torchvision tensorboard -c pytorch +pip install -r requirements +``` + +# Set up with Dockder +comming soon diff --git a/thirdparty/fast-reid/docs/MODEL_ZOO.md b/thirdparty/fast-reid/docs/MODEL_ZOO.md new file mode 100644 index 0000000000000000000000000000000000000000..d1bf509e504df2755634b9d6383d993ac3981ad7 --- /dev/null +++ b/thirdparty/fast-reid/docs/MODEL_ZOO.md @@ -0,0 +1,247 @@ +# FastReID Model Zoo and Baselines + +## Introduction + +This file documents collection of baselines trained with fastreid. All numbers were obtained with 1 NVIDIA P40 GPU. +The software in use were PyTorch 1.4, CUDA 10.1. + +In addition to these official baseline models, you can find more models in [projects/](https://github.com/JDAI-CV/fast-reid/tree/master/projects). + +### How to Read the Tables + +- The "Name" column contains a link to the config file. +Running `tools/train_net.py` with this config file and 1 GPU will reproduce the model. + +### Common Settings for all Person reid models + +**BoT**: + +[Bag of Tricks and A Strong Baseline for Deep Person Re-identification](http://openaccess.thecvf.com/content_CVPRW_2019/papers/TRMTMCT/Luo_Bag_of_Tricks_and_a_Strong_Baseline_for_Deep_Person_CVPRW_2019_paper.pdf). CVPRW2019, Oral. + +**AGW**: + +[ReID-Survey with a Powerful AGW Baseline](https://github.com/mangye16/ReID-Survey). + +**MGN**: + +[Learning Discriminative Features with Multiple Granularities for Person Re-Identification](https://arxiv.org/abs/1804.01438v1) + +**SBS**: + +stronger baseline on top of BoT: + +Bag of Freebies(BoF): + +1. Circle loss +2. Freeze backbone training +3. Cutout data augmentation & Auto Augmentation +4. Cosine annealing learning rate decay +5. Soft margin triplet loss + +Bag of Specials(BoS): + +1. Non-local block +2. GeM pooling + +### Market1501 Baselines + +**BoT**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---: | +| [BoT(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/bagtricks_R50.yml) | ImageNet | 94.4% | 86.1% | 59.4% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_bot_R50.pth) | +| [BoT(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/bagtricks_R50-ibn.yml) | ImageNet | 94.9% | 87.6% | 64.1% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_bot_R50-ibn.pth) | +| [BoT(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/bagtricks_S50.yml) | ImageNet | 95.2% | 88.7% | 66.9% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_bot_S50.pth) | +| [BoT(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/bagtricks_R101-ibn.yml) | ImageNet| 95.4% | 88.9% | 67.4% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_bot_R101-ibn.pth) | + +**AGW**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: |:---: | +| [AGW(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/AGW_R50.yml) | ImageNet | 95.3% | 88.2% | 66.3% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_agw_R50.pth) | +| [AGW(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/AGW_R50-ibn.yml) | ImageNet | 95.1% | 88.7% | 67.1% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_agw_R50-ibn.pth) | +| [AGW(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/AGW_S50.yml) | ImageNet | 95.3% | 89.3% | 68.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_agw_S50.pth) | +| [AGW(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/AGW_R101-ibn.yml) | ImageNet | 95.5% | 89.5% | 69.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_agw_R101-ibn.pth) | + +**SBS**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: |:---:| +| [SBS(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/sbs_R50.yml) | ImageNet | 95.4% | 88.2% | 64.8% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_sbs_R50.pth) | +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/sbs_R50-ibn.yml) | ImageNet | 95.7% | 89.3% | 67.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_sbs_R50-ibn.pth) | +| [SBS(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/sbs_S50.yml) | ImageNet | 95.8% | 89.4% | 67.6% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_sbs_S50.pth) | +| [SBS(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/sbs_R101-ibn.yml) | ImageNet | 96.3% | 90.3% | 70.0% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_sbs_R101-ibn.pth) | + +**MGN**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/Market1501/mgn_R50-ibn.yml) | ImageNet | 95.8% | 89.8% | 67.7% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/market_mgn_R50-ibn.pth) | + +### DukeMTMC Baseline + +**BoT**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---: | +| [BoT(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/bagtricks_R50.yml) | ImageNet | 87.2% | 77.0% | 42.1% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_bot_R50.pth) | +| [BoT(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/bagtricks_R50-ibn.yml) | ImageNet | 89.3% | 79.6% | 45.2% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_bot_R50-ibn.pth) | +| [BoT(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/bagtricks_S50.yml) | ImageNet | 90.0% | 80.13% | 45.8% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_bot_S50.pth) | +| [BoT(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/bagtricks_R101-ibn.yml) | ImageNet| 91.2% | 81.2% | 47.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_bot_R101-ibn.pth) | + +**AGW**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [AGW(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/AGW_R50.yml) | ImageNet | 89.0% | 79.9% | 46.1% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_agw_R50.pth) | +| [AGW(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/AGW_R50-ibn.yml) | ImageNet | 90.5% | 80.8% | 47.6% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_agw_R50-ibn.pth) | +| [AGW(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/AGW_S50.yml) | ImageNet | 90.9% | 82.4% | 49.2% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_agw_S50.pth) | +| [AGW(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/AGW_R101-ibn.yml) | ImageNet | 91.7% | 82.3% | 50.0% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_agw_R101-ibn.pth) | + +**SBS**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/sbs_R50.yml) | ImageNet | 90.3% | 80.3% | 46.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_sbs_R50.pth) | +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/sbs_R50-ibn.yml) | ImageNet | 90.8% | 81.2% | 47.0% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_sbs_R50-ibn.pth) | +| [SBS(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/sbs_S50.yml) | ImageNet | 91.0% | 81.4% | 47.6% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_sbs_S50.pth) | +| [SBS(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/sbs_R101-ibn.yml) | ImageNet | 91.9% | 83.6% | 51.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_sbs_R101-ibn.pth) | + +**MGN**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/DukeMTMC/mgn_R50-ibn.yml) | ImageNet | 91.1% | 82.0% | 46.8% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/duke_mgn_R50-ibn.pth) | + +### MSMT17 Baseline + +**BoT**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [BoT(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/bagtricks_R50.yml) | ImageNet | 74.1% | 50.2% | 10.4% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_bot_R50.pth) | +| [BoT(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/bagtricks_R50-ibn.yml) | ImageNet | 77.0% | 54.4% | 12.5% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_bot_R50-ibn.pth) | +| [BoT(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/bagtricks_S50.yml) | ImageNet | 80.8% | 59.9% | 16.3% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_bot_S50.pth) | +| [BoT(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/bagtricks_R101-ibn.yml) | ImageNet| 81.0% | 59.4% | 15.6% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_bot_R101-ibn.pth) | + +**AGW**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [AGW(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/AGW_R50.yml) | ImageNet | 78.3% | 55.6% | 12.9% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_agw_R50.pth) | +| [AGW(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/AGW_R50-ibn.yml) | ImageNet | 81.2% | 59.7% | 15.3% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_agw_R50-ibn.pth) | +| [AGW(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/AGW_S50.yml) | ImageNet | 82.6% | 62.6% | 17.7% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_agw_S50.pth) | +| [AGW(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/AGW_R101-ibn.yml) | ImageNet | 82.0% | 61.4% | 17.3% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_agw_R101-ibn.pth) | + +**SBS**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/sbs_R50.yml) | ImageNet | 81.8% | 58.4% | 13.9% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_sbs_R50.pth) | +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/sbs_R50-ibn.yml) | ImageNet | 83.9% | 60.6% | 15.2% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_sbs_R50-ibn.pth) | +| [SBS(S50)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/sbs_S50.yml) | ImageNet | 84.1% | 61.7% | 15.2% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_sbs_S50.pth) | +| [SBS(R101-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/sbs_R101-ibn.yml) | ImageNet | 84.8% | 62.8% | 16.3% | [model](https://github.com/JDAI-CV/fast-reid/releases/download/v0.1.1/msmt_sbs_R101-ibn.pth) | + +**MGN**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/MSMT17/mgn_R50-ibn.yml) | ImageNet | 85.1% | 65.4% | 18.4% | - | + +### VeRi Baseline + +**SBS**: + +| Method | Pretrained | Rank@1 | mAP | mINP | download | +| :---: | :---: | :---: |:---: | :---: | :---:| +| [SBS(R50-ibn)](https://github.com/JDAI-CV/fast-reid/blob/master/configs/VeRi/sbs_R50-ibn.yml) | ImageNet | 97.0% | 81.9% | 46.3% | -| + +### VehicleID Baseline + +**BoT**: +Test protocol: 10-fold cross-validation; trained on 4 NVIDIA P40 GPU. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPretrainedTestset sizedownload
SmallMediumLarge
Rank@1Rank@5Rank@1Rank@5Rank@1Rank@5
BoT(R50-ibn)ImageNet86.6%97.9%82.9%96.0%80.6%93.9%-
+ +### VERI-Wild Baseline + +**BoT**: +Test protocol: Trained on 4 NVIDIA P40 GPU. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPretrainedTestset sizedownload
SmallMediumLarge
Rank@1mAPmINPRank@1mAPmINPRank@1mAPmINP
BoT(R50-ibn)ImageNet96.4%87.7%69.2%95.1%83.5%61.2%92.5%77.3%49.8%-
diff --git a/thirdparty/fast-reid/docs/requirements b/thirdparty/fast-reid/docs/requirements new file mode 100644 index 0000000000000000000000000000000000000000..f9f6594a0c895b0565431fce4b835851140e73e6 --- /dev/null +++ b/thirdparty/fast-reid/docs/requirements @@ -0,0 +1,20 @@ +matplotlib +scipy +Pillow +numpy +prettytable +easydict +scikit-learn +pyyaml +yacs +termcolor +tabulate +tensorboard +opencv-python +pyyaml +yacs +termcolor +scikit-learn +tabulate +gdown +faiss-cpu \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/__init__.py b/thirdparty/fast-reid/fastreid/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..94eab6e9fa310a912476ae4e4157411be1fb5e91 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/__init__.py @@ -0,0 +1,8 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + + +__version__ = "0.2.0" diff --git a/thirdparty/fast-reid/fastreid/config/__init__.py b/thirdparty/fast-reid/fastreid/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..13e6acd6ac780e98ccbc87ac382bed13613534f2 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/config/__init__.py @@ -0,0 +1,8 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .config import CfgNode, get_cfg +from .defaults import _C as cfg diff --git a/thirdparty/fast-reid/fastreid/config/config.py b/thirdparty/fast-reid/fastreid/config/config.py new file mode 100644 index 0000000000000000000000000000000000000000..6f79a4bad4bd1afed077df2d3b1754875017e648 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/config/config.py @@ -0,0 +1,159 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import logging +import os +from typing import Any + +import yaml +from yacs.config import CfgNode as _CfgNode + +from ..utils.file_io import PathManager + +BASE_KEY = "_BASE_" + + +class CfgNode(_CfgNode): + """ + Our own extended version of :class:`yacs.config.CfgNode`. + It contains the following extra features: + 1. The :meth:`merge_from_file` method supports the "_BASE_" key, + which allows the new CfgNode to inherit all the attributes from the + base configuration file. + 2. Keys that start with "COMPUTED_" are treated as insertion-only + "computed" attributes. They can be inserted regardless of whether + the CfgNode is frozen or not. + 3. With "allow_unsafe=True", it supports pyyaml tags that evaluate + expressions in config. See examples in + https://pyyaml.org/wiki/PyYAMLDocumentation#yaml-tags-and-python-types + Note that this may lead to arbitrary code execution: you must not + load a config file from untrusted sources before manually inspecting + the content of the file. + """ + + @staticmethod + def load_yaml_with_base(filename: str, allow_unsafe: bool = False): + """ + Just like `yaml.load(open(filename))`, but inherit attributes from its + `_BASE_`. + Args: + filename (str): the file name of the current config. Will be used to + find the base config file. + allow_unsafe (bool): whether to allow loading the config file with + `yaml.unsafe_load`. + Returns: + (dict): the loaded yaml + """ + with PathManager.open(filename, "r") as f: + try: + cfg = yaml.safe_load(f) + except yaml.constructor.ConstructorError: + if not allow_unsafe: + raise + logger = logging.getLogger(__name__) + logger.warning( + "Loading config {} with yaml.unsafe_load. Your machine may " + "be at risk if the file contains malicious content.".format( + filename + ) + ) + f.close() + with open(filename, "r") as f: + cfg = yaml.unsafe_load(f) + + def merge_a_into_b(a, b): + # merge dict a into dict b. values in a will overwrite b. + for k, v in a.items(): + if isinstance(v, dict) and k in b: + assert isinstance( + b[k], dict + ), "Cannot inherit key '{}' from base!".format(k) + merge_a_into_b(v, b[k]) + else: + b[k] = v + + if BASE_KEY in cfg: + base_cfg_file = cfg[BASE_KEY] + if base_cfg_file.startswith("~"): + base_cfg_file = os.path.expanduser(base_cfg_file) + if not any( + map(base_cfg_file.startswith, ["/", "https://", "http://"]) + ): + # the path to base cfg is relative to the config file itself. + base_cfg_file = os.path.join( + os.path.dirname(filename), base_cfg_file + ) + base_cfg = CfgNode.load_yaml_with_base( + base_cfg_file, allow_unsafe=allow_unsafe + ) + del cfg[BASE_KEY] + + merge_a_into_b(cfg, base_cfg) + return base_cfg + return cfg + + def merge_from_file(self, cfg_filename: str, allow_unsafe: bool = False): + """ + Merge configs from a given yaml file. + Args: + cfg_filename: the file name of the yaml config. + allow_unsafe: whether to allow loading the config file with + `yaml.unsafe_load`. + """ + loaded_cfg = CfgNode.load_yaml_with_base( + cfg_filename, allow_unsafe=allow_unsafe + ) + loaded_cfg = type(self)(loaded_cfg) + self.merge_from_other_cfg(loaded_cfg) + + # Forward the following calls to base, but with a check on the BASE_KEY. + def merge_from_other_cfg(self, cfg_other): + """ + Args: + cfg_other (CfgNode): configs to merge from. + """ + assert ( + BASE_KEY not in cfg_other + ), "The reserved key '{}' can only be used in files!".format(BASE_KEY) + return super().merge_from_other_cfg(cfg_other) + + def merge_from_list(self, cfg_list: list): + """ + Args: + cfg_list (list): list of configs to merge from. + """ + keys = set(cfg_list[0::2]) + assert ( + BASE_KEY not in keys + ), "The reserved key '{}' can only be used in files!".format(BASE_KEY) + return super().merge_from_list(cfg_list) + + def __setattr__(self, name: str, val: Any): + if name.startswith("COMPUTED_"): + if name in self: + old_val = self[name] + if old_val == val: + return + raise KeyError( + "Computed attributed '{}' already exists " + "with a different value! old={}, new={}.".format( + name, old_val, val + ) + ) + self[name] = val + else: + super().__setattr__(name, val) + + +def get_cfg() -> CfgNode: + """ + Get a copy of the default config. + Returns: + a fastreid CfgNode instance. + """ + from .defaults import _C + + return _C.clone() diff --git a/thirdparty/fast-reid/fastreid/config/defaults.py b/thirdparty/fast-reid/fastreid/config/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..282634740b8918a2b5d32bd4e9c70b629e9e8e07 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/config/defaults.py @@ -0,0 +1,273 @@ +from .config import CfgNode as CN + +# ----------------------------------------------------------------------------- +# Convention about Training / Test specific parameters +# ----------------------------------------------------------------------------- +# Whenever an argument can be either used for training or for testing, the +# corresponding name will be post-fixed by a _TRAIN for a training parameter, +# or _TEST for a test-specific parameter. +# For example, the number of images during training will be +# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be +# IMAGES_PER_BATCH_TEST + +# ----------------------------------------------------------------------------- +# Config definition +# ----------------------------------------------------------------------------- + +_C = CN() + +# ----------------------------------------------------------------------------- +# MODEL +# ----------------------------------------------------------------------------- +_C.MODEL = CN() +_C.MODEL.DEVICE = "cuda" +_C.MODEL.META_ARCHITECTURE = 'Baseline' +_C.MODEL.FREEZE_LAYERS = [''] + +# ---------------------------------------------------------------------------- # +# Backbone options +# ---------------------------------------------------------------------------- # +_C.MODEL.BACKBONE = CN() + +_C.MODEL.BACKBONE.NAME = "build_resnet_backbone" +_C.MODEL.BACKBONE.DEPTH = "50x" +_C.MODEL.BACKBONE.LAST_STRIDE = 1 +# Backbone feature dimension +_C.MODEL.BACKBONE.FEAT_DIM = 2048 +# Normalization method for the convolution layers. +_C.MODEL.BACKBONE.NORM = "BN" +# If use IBN block in backbone +_C.MODEL.BACKBONE.WITH_IBN = False +# If use SE block in backbone +_C.MODEL.BACKBONE.WITH_SE = False +# If use Non-local block in backbone +_C.MODEL.BACKBONE.WITH_NL = False +# If use ImageNet pretrain model +_C.MODEL.BACKBONE.PRETRAIN = True +# Pretrain model path +_C.MODEL.BACKBONE.PRETRAIN_PATH = '' + +# ---------------------------------------------------------------------------- # +# REID HEADS options +# ---------------------------------------------------------------------------- # +_C.MODEL.HEADS = CN() +_C.MODEL.HEADS.NAME = "EmbeddingHead" +# Normalization method for the convolution layers. +_C.MODEL.HEADS.NORM = "BN" +# Number of identity +_C.MODEL.HEADS.NUM_CLASSES = 0 +# Embedding dimension in head +_C.MODEL.HEADS.EMBEDDING_DIM = 0 +# If use BNneck in embedding +_C.MODEL.HEADS.WITH_BNNECK = True +# Triplet feature using feature before(after) bnneck +_C.MODEL.HEADS.NECK_FEAT = "before" # options: before, after +# Pooling layer type +_C.MODEL.HEADS.POOL_LAYER = "avgpool" + +# Classification layer type +_C.MODEL.HEADS.CLS_LAYER = "linear" # "arcSoftmax" or "circleSoftmax" + +# Margin and Scale for margin-based classification layer +_C.MODEL.HEADS.MARGIN = 0.15 +_C.MODEL.HEADS.SCALE = 128 + +# ---------------------------------------------------------------------------- # +# REID LOSSES options +# ---------------------------------------------------------------------------- # +_C.MODEL.LOSSES = CN() +_C.MODEL.LOSSES.NAME = ("CrossEntropyLoss",) + +# Cross Entropy Loss options +_C.MODEL.LOSSES.CE = CN() +# if epsilon == 0, it means no label smooth regularization, +# if epsilon == -1, it means adaptive label smooth regularization +_C.MODEL.LOSSES.CE.EPSILON = 0.0 +_C.MODEL.LOSSES.CE.ALPHA = 0.2 +_C.MODEL.LOSSES.CE.SCALE = 1.0 + +# Triplet Loss options +_C.MODEL.LOSSES.TRI = CN() +_C.MODEL.LOSSES.TRI.MARGIN = 0.3 +_C.MODEL.LOSSES.TRI.NORM_FEAT = False +_C.MODEL.LOSSES.TRI.HARD_MINING = True +_C.MODEL.LOSSES.TRI.SCALE = 1.0 + +# Circle Loss options +_C.MODEL.LOSSES.CIRCLE = CN() +_C.MODEL.LOSSES.CIRCLE.MARGIN = 0.25 +_C.MODEL.LOSSES.CIRCLE.ALPHA = 128 +_C.MODEL.LOSSES.CIRCLE.SCALE = 1.0 + +# Focal Loss options +_C.MODEL.LOSSES.FL = CN() +_C.MODEL.LOSSES.FL.ALPHA = 0.25 +_C.MODEL.LOSSES.FL.GAMMA = 2 +_C.MODEL.LOSSES.FL.SCALE = 1.0 + +# Path to a checkpoint file to be loaded to the model. You can find available models in the model zoo. +_C.MODEL.WEIGHTS = "" + +# Values to be used for image normalization +_C.MODEL.PIXEL_MEAN = [0.485*255, 0.456*255, 0.406*255] +# Values to be used for image normalization +_C.MODEL.PIXEL_STD = [0.229*255, 0.224*255, 0.225*255] + + +# ----------------------------------------------------------------------------- +# INPUT +# ----------------------------------------------------------------------------- +_C.INPUT = CN() +# Size of the image during training +_C.INPUT.SIZE_TRAIN = [256, 128] +# Size of the image during test +_C.INPUT.SIZE_TEST = [256, 128] + +# Random probability for image horizontal flip +_C.INPUT.DO_FLIP = True +_C.INPUT.FLIP_PROB = 0.5 + +# Value of padding size +_C.INPUT.DO_PAD = True +_C.INPUT.PADDING_MODE = 'constant' +_C.INPUT.PADDING = 10 +# Random color jitter +_C.INPUT.CJ = CN() +_C.INPUT.CJ.ENABLED = False +_C.INPUT.CJ.PROB = 0.5 +_C.INPUT.CJ.BRIGHTNESS = 0.15 +_C.INPUT.CJ.CONTRAST = 0.15 +_C.INPUT.CJ.SATURATION = 0.1 +_C.INPUT.CJ.HUE = 0.1 +# Auto augmentation +_C.INPUT.DO_AUTOAUG = False +# Augmix augmentation +_C.INPUT.DO_AUGMIX = False +# Random Erasing +_C.INPUT.REA = CN() +_C.INPUT.REA.ENABLED = False +_C.INPUT.REA.PROB = 0.5 +_C.INPUT.REA.MEAN = [0.596*255, 0.558*255, 0.497*255] # [0.485*255, 0.456*255, 0.406*255] +# Random Patch +_C.INPUT.RPT = CN() +_C.INPUT.RPT.ENABLED = False +_C.INPUT.RPT.PROB = 0.5 + +# ----------------------------------------------------------------------------- +# Dataset +# ----------------------------------------------------------------------------- +_C.DATASETS = CN() +# List of the dataset names for training +_C.DATASETS.NAMES = ("Market1501",) +# List of the dataset names for testing +_C.DATASETS.TESTS = ("Market1501",) +# Combine trainset and testset joint training +_C.DATASETS.COMBINEALL = False + +# ----------------------------------------------------------------------------- +# DataLoader +# ----------------------------------------------------------------------------- +_C.DATALOADER = CN() +# P/K Sampler for data loading +_C.DATALOADER.PK_SAMPLER = True +# Naive sampler which don't consider balanced identity sampling +_C.DATALOADER.NAIVE_WAY = False +# Number of instance for each person +_C.DATALOADER.NUM_INSTANCE = 4 +_C.DATALOADER.NUM_WORKERS = 8 + +# ---------------------------------------------------------------------------- # +# Solver +# ---------------------------------------------------------------------------- # +_C.SOLVER = CN() + +# AUTOMATIC MIXED PRECISION +_C.SOLVER.AMP_ENABLED = False + +# Optimizer +_C.SOLVER.OPT = "Adam" + +_C.SOLVER.MAX_ITER = 120 + +_C.SOLVER.BASE_LR = 3e-4 +_C.SOLVER.BIAS_LR_FACTOR = 1. +_C.SOLVER.HEADS_LR_FACTOR = 1. + +_C.SOLVER.MOMENTUM = 0.9 + +_C.SOLVER.WEIGHT_DECAY = 0.0005 +_C.SOLVER.WEIGHT_DECAY_BIAS = 0. + +# Multi-step learning rate options +_C.SOLVER.SCHED = "WarmupMultiStepLR" +_C.SOLVER.GAMMA = 0.1 +_C.SOLVER.STEPS = [30, 55] + +# Cosine annealing learning rate options +_C.SOLVER.DELAY_ITERS = 0 +_C.SOLVER.ETA_MIN_LR = 3e-7 + +# Warmup options +_C.SOLVER.WARMUP_FACTOR = 0.1 +_C.SOLVER.WARMUP_ITERS = 10 +_C.SOLVER.WARMUP_METHOD = "linear" + +_C.SOLVER.FREEZE_ITERS = 0 + +# SWA options +_C.SOLVER.SWA = CN() +_C.SOLVER.SWA.ENABLED = False +_C.SOLVER.SWA.ITER = 10 +_C.SOLVER.SWA.PERIOD = 2 +_C.SOLVER.SWA.LR_FACTOR = 10. +_C.SOLVER.SWA.ETA_MIN_LR = 3.5e-6 +_C.SOLVER.SWA.LR_SCHED = False + +_C.SOLVER.CHECKPOINT_PERIOD = 20 + +# Number of images per batch across all machines. +# This is global, so if we have 8 GPUs and IMS_PER_BATCH = 16, each GPU will +# see 2 images per batch +_C.SOLVER.IMS_PER_BATCH = 64 + +# This is global, so if we have 8 GPUs and IMS_PER_BATCH = 16, each GPU will +# see 2 images per batch +_C.TEST = CN() + +_C.TEST.EVAL_PERIOD = 20 + +# Number of images per batch in one process. +_C.TEST.IMS_PER_BATCH = 64 +_C.TEST.METRIC = "cosine" +_C.TEST.ROC_ENABLED = False + +# Average query expansion +_C.TEST.AQE = CN() +_C.TEST.AQE.ENABLED = False +_C.TEST.AQE.ALPHA = 3.0 +_C.TEST.AQE.QE_TIME = 1 +_C.TEST.AQE.QE_K = 5 + +# Re-rank +_C.TEST.RERANK = CN() +_C.TEST.RERANK.ENABLED = False +_C.TEST.RERANK.K1 = 20 +_C.TEST.RERANK.K2 = 6 +_C.TEST.RERANK.LAMBDA = 0.3 + +# Precise batchnorm +_C.TEST.PRECISE_BN = CN() +_C.TEST.PRECISE_BN.ENABLED = False +_C.TEST.PRECISE_BN.DATASET = 'Market1501' +_C.TEST.PRECISE_BN.NUM_ITER = 300 + +# ---------------------------------------------------------------------------- # +# Misc options +# ---------------------------------------------------------------------------- # +_C.OUTPUT_DIR = "logs/" + +# Benchmark different cudnn algorithms. +# If input images have very different sizes, this option will have large overhead +# for about 10k iterations. It usually hurts total time, but can benefit for certain models. +# If input images have the same or similar sizes, benchmark is often helpful. +_C.CUDNN_BENCHMARK = False diff --git a/thirdparty/fast-reid/fastreid/data/__init__.py b/thirdparty/fast-reid/fastreid/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d138908decfe336e3f015fdc3093e3a1e756e6c9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/__init__.py @@ -0,0 +1,7 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +from .build import build_reid_train_loader, build_reid_test_loader diff --git a/thirdparty/fast-reid/fastreid/data/build.py b/thirdparty/fast-reid/fastreid/data/build.py new file mode 100644 index 0000000000000000000000000000000000000000..46b3e5741a6621a300a3407c3d5f61dba24a3de2 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/build.py @@ -0,0 +1,113 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import os +import torch +from torch._six import container_abcs, string_classes, int_classes +from torch.utils.data import DataLoader +from fastreid.utils import comm + +from . import samplers +from .common import CommDataset +from .datasets import DATASET_REGISTRY +from .transforms import build_transforms + +_root = os.getenv("FASTREID_DATASETS", "datasets") + + +def build_reid_train_loader(cfg): + cfg = cfg.clone() + cfg.defrost() + + train_items = list() + for d in cfg.DATASETS.NAMES: + dataset = DATASET_REGISTRY.get(d)(root=_root, combineall=cfg.DATASETS.COMBINEALL) + if comm.is_main_process(): + dataset.show_train() + train_items.extend(dataset.train) + + iters_per_epoch = len(train_items) // cfg.SOLVER.IMS_PER_BATCH + cfg.SOLVER.MAX_ITER *= iters_per_epoch + train_transforms = build_transforms(cfg, is_train=True) + train_set = CommDataset(train_items, train_transforms, relabel=True) + + num_workers = cfg.DATALOADER.NUM_WORKERS + num_instance = cfg.DATALOADER.NUM_INSTANCE + mini_batch_size = cfg.SOLVER.IMS_PER_BATCH // comm.get_world_size() + + if cfg.DATALOADER.PK_SAMPLER: + if cfg.DATALOADER.NAIVE_WAY: + data_sampler = samplers.NaiveIdentitySampler(train_set.img_items, + cfg.SOLVER.IMS_PER_BATCH, num_instance) + else: + data_sampler = samplers.BalancedIdentitySampler(train_set.img_items, + cfg.SOLVER.IMS_PER_BATCH, num_instance) + else: + data_sampler = samplers.TrainingSampler(len(train_set)) + batch_sampler = torch.utils.data.sampler.BatchSampler(data_sampler, mini_batch_size, True) + + train_loader = torch.utils.data.DataLoader( + train_set, + num_workers=num_workers, + batch_sampler=batch_sampler, + collate_fn=fast_batch_collator, + pin_memory=True, + ) + return train_loader + + +def build_reid_test_loader(cfg, dataset_name): + cfg = cfg.clone() + cfg.defrost() + + dataset = DATASET_REGISTRY.get(dataset_name)(root=_root) + if comm.is_main_process(): + dataset.show_test() + test_items = dataset.query + dataset.gallery + + test_transforms = build_transforms(cfg, is_train=False) + test_set = CommDataset(test_items, test_transforms, relabel=False) + + mini_batch_size = cfg.TEST.IMS_PER_BATCH // comm.get_world_size() + data_sampler = samplers.InferenceSampler(len(test_set)) + batch_sampler = torch.utils.data.BatchSampler(data_sampler, mini_batch_size, False) + test_loader = DataLoader( + test_set, + batch_sampler=batch_sampler, + num_workers=2, # save some memory + collate_fn=fast_batch_collator, + pin_memory=True, + ) + return test_loader, len(dataset.query) + + +def trivial_batch_collator(batch): + """ + A batch collator that does nothing. + """ + return batch + + +def fast_batch_collator(batched_inputs): + """ + A simple batch collator for most common reid tasks + """ + elem = batched_inputs[0] + if isinstance(elem, torch.Tensor): + out = torch.zeros((len(batched_inputs), *elem.size()), dtype=elem.dtype) + for i, tensor in enumerate(batched_inputs): + out[i] += tensor + return out + + elif isinstance(elem, container_abcs.Mapping): + return {key: fast_batch_collator([d[key] for d in batched_inputs]) for key in elem} + + elif isinstance(elem, float): + return torch.tensor(batched_inputs, dtype=torch.float64) + elif isinstance(elem, int_classes): + return torch.tensor(batched_inputs) + elif isinstance(elem, string_classes): + return batched_inputs diff --git a/thirdparty/fast-reid/fastreid/data/common.py b/thirdparty/fast-reid/fastreid/data/common.py new file mode 100644 index 0000000000000000000000000000000000000000..82feb3b850081ac38fe30869759db932966aac4f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/common.py @@ -0,0 +1,55 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from torch.utils.data import Dataset + +from .data_utils import read_image + + +class CommDataset(Dataset): + """Image Person ReID Dataset""" + + def __init__(self, img_items, transform=None, relabel=True): + self.img_items = img_items + self.transform = transform + self.relabel = relabel + + pid_set = set() + cam_set = set() + for i in img_items: + pid_set.add(i[1]) + cam_set.add(i[2]) + + self.pids = sorted(list(pid_set)) + self.cams = sorted(list(cam_set)) + if relabel: + self.pid_dict = dict([(p, i) for i, p in enumerate(self.pids)]) + self.cam_dict = dict([(p, i) for i, p in enumerate(self.cams)]) + + def __len__(self): + return len(self.img_items) + + def __getitem__(self, index): + img_path, pid, camid = self.img_items[index] + img = read_image(img_path) + if self.transform is not None: img = self.transform(img) + if self.relabel: + pid = self.pid_dict[pid] + camid = self.cam_dict[camid] + return { + "images": img, + "targets": pid, + "camids": camid, + "img_paths": img_path, + } + + @property + def num_classes(self): + return len(self.pids) + + @property + def num_cameras(self): + return len(self.cams) diff --git a/thirdparty/fast-reid/fastreid/data/data_utils.py b/thirdparty/fast-reid/fastreid/data/data_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b1cb565ad2a1dec6338eb7b032118559ba6a44f7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/data_utils.py @@ -0,0 +1,45 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import numpy as np +from PIL import Image, ImageOps + +from fastreid.utils.file_io import PathManager + + +def read_image(file_name, format=None): + """ + Read an image into the given format. + Will apply rotation and flipping if the image has such exif information. + Args: + file_name (str): image file path + format (str): one of the supported image modes in PIL, or "BGR" + Returns: + image (np.ndarray): an HWC image + """ + with PathManager.open(file_name, "rb") as f: + image = Image.open(f) + + # capture and ignore this bug: https://github.com/python-pillow/Pillow/issues/3973 + try: + image = ImageOps.exif_transpose(image) + except Exception: + pass + + if format is not None: + # PIL only supports RGB, so convert to RGB and flip channels over below + conversion_format = format + if format == "BGR": + conversion_format = "RGB" + image = image.convert(conversion_format) + image = np.asarray(image) + if format == "BGR": + # flip channels if needed + image = image[:, :, ::-1] + # PIL squeezes out the channel dimension for "L", so make it HWC + if format == "L": + image = np.expand_dims(image, -1) + image = Image.fromarray(image) + return image diff --git a/thirdparty/fast-reid/fastreid/data/datasets/AirportALERT.py b/thirdparty/fast-reid/fastreid/data/datasets/AirportALERT.py new file mode 100644 index 0000000000000000000000000000000000000000..0ffece6b04ff1733b74c318f1a46dfb4283d3b75 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/AirportALERT.py @@ -0,0 +1,46 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['AirportALERT', ] + + +@DATASET_REGISTRY.register() +class AirportALERT(ImageDataset): + dataset_dir = "AirportALERT" + dataset_name = "airport" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + self.train_file = os.path.join(self.root, self.dataset_dir, 'filepath.txt') + + required_files = [self.train_file, self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path, self.train_file) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, dir_path, train_file): + data = [] + with open(train_file, "r") as f: + img_paths = [line.strip('\n') for line in f.readlines()] + + for path in img_paths: + split_path = path.split('\\') + img_path = '/'.join(split_path) + camid = self.dataset_name + "_" + split_path[0] + pid = self.dataset_name + "_" + split_path[1] + img_path = os.path.join(dir_path, img_path) + if 11001 <= int(split_path[1]) <= 401999: + data.append([img_path, pid, camid]) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/__init__.py b/thirdparty/fast-reid/fastreid/data/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2977c51bf1447e7f89c662a4c5e95bd171633a1e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/__init__.py @@ -0,0 +1,41 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from ...utils.registry import Registry + +DATASET_REGISTRY = Registry("DATASET") +DATASET_REGISTRY.__doc__ = """ +Registry for datasets +It must returns an instance of :class:`Backbone`. +""" + +# Person re-id datasets +from .cuhk03 import CUHK03 +from .dukemtmcreid import DukeMTMC +from .market1501 import Market1501 +from .msmt17 import MSMT17 +from .AirportALERT import AirportALERT +from .iLIDS import iLIDS +from .pku import PKU +from .prai import PRAI +from .sensereid import SenseReID +from .sysu_mm import SYSU_mm +from .thermalworld import Thermalworld +from .pes3d import PeS3D +from .caviara import CAVIARa +from .viper import VIPeR +from .lpw import LPW +from .shinpuhkan import Shinpuhkan +from .wildtracker import WildTrackCrop +from .cuhk_sysu import cuhkSYSU + +# Vehicle re-id datasets +from .veri import VeRi +from .vehicleid import VehicleID, SmallVehicleID, MediumVehicleID, LargeVehicleID +from .veriwild import VeRiWild, SmallVeRiWild, MediumVeRiWild, LargeVeRiWild + + +__all__ = [k for k in globals().keys() if "builtin" not in k and not k.startswith("_")] diff --git a/thirdparty/fast-reid/fastreid/data/datasets/bases.py b/thirdparty/fast-reid/fastreid/data/datasets/bases.py new file mode 100644 index 0000000000000000000000000000000000000000..e9122e3eb63b7e70dfa3b9fb70214b1e7882a211 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/bases.py @@ -0,0 +1,170 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +import copy +import logging +import os +from tabulate import tabulate +from termcolor import colored + +logger = logging.getLogger(__name__) + + +class Dataset(object): + """An abstract class representing a Dataset. + This is the base class for ``ImageDataset`` and ``VideoDataset``. + Args: + train (list): contains tuples of (img_path(s), pid, camid). + query (list): contains tuples of (img_path(s), pid, camid). + gallery (list): contains tuples of (img_path(s), pid, camid). + transform: transform function. + mode (str): 'train', 'query' or 'gallery'. + combineall (bool): combines train, query and gallery in a + dataset for training. + verbose (bool): show information. + """ + _junk_pids = [] # contains useless person IDs, e.g. background, false detections + + def __init__(self, train, query, gallery, transform=None, mode='train', + combineall=False, verbose=True, **kwargs): + self.train = train + self.query = query + self.gallery = gallery + self.transform = transform + self.mode = mode + self.combineall = combineall + self.verbose = verbose + + self.num_train_pids = self.get_num_pids(self.train) + self.num_train_cams = self.get_num_cams(self.train) + + if self.combineall: + self.combine_all() + + if self.mode == 'train': + self.data = self.train + elif self.mode == 'query': + self.data = self.query + elif self.mode == 'gallery': + self.data = self.gallery + else: + raise ValueError('Invalid mode. Got {}, but expected to be ' + 'one of [train | query | gallery]'.format(self.mode)) + + def __getitem__(self, index): + raise NotImplementedError + + def __len__(self): + return len(self.data) + + def __radd__(self, other): + """Supports sum([dataset1, dataset2, dataset3]).""" + if other == 0: + return self + else: + return self.__add__(other) + + def parse_data(self, data): + """Parses data list and returns the number of person IDs + and the number of camera views. + Args: + data (list): contains tuples of (img_path(s), pid, camid) + """ + pids = set() + cams = set() + for _, pid, camid in data: + pids.add(pid) + cams.add(camid) + return len(pids), len(cams) + + def get_num_pids(self, data): + """Returns the number of training person identities.""" + return self.parse_data(data)[0] + + def get_num_cams(self, data): + """Returns the number of training cameras.""" + return self.parse_data(data)[1] + + def show_summary(self): + """Shows dataset statistics.""" + pass + + def combine_all(self): + """Combines train, query and gallery in a dataset for training.""" + combined = copy.deepcopy(self.train) + + def _combine_data(data): + for img_path, pid, camid in data: + if pid in self._junk_pids: + continue + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + combined.append((img_path, pid, camid)) + + _combine_data(self.query) + _combine_data(self.gallery) + + self.train = combined + self.num_train_pids = self.get_num_pids(self.train) + + def check_before_run(self, required_files): + """Checks if required files exist before going deeper. + Args: + required_files (str or list): string file name(s). + """ + if isinstance(required_files, str): + required_files = [required_files] + + for fpath in required_files: + if not os.path.exists(fpath): + raise RuntimeError('"{}" is not found'.format(fpath)) + + +class ImageDataset(Dataset): + """A base class representing ImageDataset. + All other image datasets should subclass it. + ``__getitem__`` returns an image given index. + It will return ``img``, ``pid``, ``camid`` and ``img_path`` + where ``img`` has shape (channel, height, width). As a result, + data in each batch has shape (batch_size, channel, height, width). + """ + + def __init__(self, train, query, gallery, **kwargs): + super(ImageDataset, self).__init__(train, query, gallery, **kwargs) + + def show_train(self): + num_train_pids, num_train_cams = self.parse_data(self.train) + + headers = ['subset', '# ids', '# images', '# cameras'] + csv_results = [['train', num_train_pids, len(self.train), num_train_cams]] + + # tabulate it + table = tabulate( + csv_results, + tablefmt="pipe", + headers=headers, + numalign="left", + ) + logger.info(f"=> Loaded {self.__class__.__name__} in csv format: \n" + colored(table, "cyan")) + + def show_test(self): + num_query_pids, num_query_cams = self.parse_data(self.query) + num_gallery_pids, num_gallery_cams = self.parse_data(self.gallery) + + headers = ['subset', '# ids', '# images', '# cameras'] + csv_results = [ + ['query', num_query_pids, len(self.query), num_query_cams], + ['gallery', num_gallery_pids, len(self.gallery), num_gallery_cams], + ] + + # tabulate it + table = tabulate( + csv_results, + tablefmt="pipe", + headers=headers, + numalign="left", + ) + logger.info(f"=> Loaded {self.__class__.__name__} in csv format: \n" + colored(table, "cyan")) diff --git a/thirdparty/fast-reid/fastreid/data/datasets/caviara.py b/thirdparty/fast-reid/fastreid/data/datasets/caviara.py new file mode 100644 index 0000000000000000000000000000000000000000..9403671ed935db28ac457bc9439258149c593f37 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/caviara.py @@ -0,0 +1,46 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from scipy.io import loadmat +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset +import pdb +import random +import numpy as np + +__all__ = ['CAVIARa',] + + +@DATASET_REGISTRY.register() +class CAVIARa(ImageDataset): + dataset_dir = "CAVIARa" + dataset_name = "caviara" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + img_list = glob(os.path.join(train_path, "*.jpg")) + for img_path in img_list: + img_name = img_path.split('/')[-1] + pid = self.dataset_name + "_" + img_name[:4] + camid = self.dataset_name + "_cam0" + data.append([img_path, pid, camid]) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/cuhk03.py b/thirdparty/fast-reid/fastreid/data/datasets/cuhk03.py new file mode 100644 index 0000000000000000000000000000000000000000..96fdaa58222c8c014a6baa1aafaeab92a5ccfc2d --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/cuhk03.py @@ -0,0 +1,274 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: liaoxingyu2@jd.com +""" + +import json +import os.path as osp + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.utils.file_io import PathManager +from .bases import ImageDataset + + +@DATASET_REGISTRY.register() +class CUHK03(ImageDataset): + """CUHK03. + + Reference: + Li et al. DeepReID: Deep Filter Pairing Neural Network for Person Re-identification. CVPR 2014. + + URL: ``_ + + Dataset statistics: + - identities: 1360. + - images: 13164. + - cameras: 6. + - splits: 20 (classic). + """ + dataset_dir = 'cuhk03' + dataset_url = None + dataset_name = "cuhk03" + + def __init__(self, root='datasets', split_id=0, cuhk03_labeled=True, cuhk03_classic_split=False, **kwargs): + self.root = root + self.dataset_dir = osp.join(self.root, self.dataset_dir) + + self.data_dir = osp.join(self.dataset_dir, 'cuhk03_release') + self.raw_mat_path = osp.join(self.data_dir, 'cuhk-03.mat') + + self.imgs_detected_dir = osp.join(self.dataset_dir, 'images_detected') + self.imgs_labeled_dir = osp.join(self.dataset_dir, 'images_labeled') + + self.split_classic_det_json_path = osp.join(self.dataset_dir, 'splits_classic_detected.json') + self.split_classic_lab_json_path = osp.join(self.dataset_dir, 'splits_classic_labeled.json') + + self.split_new_det_json_path = osp.join(self.dataset_dir, 'splits_new_detected.json') + self.split_new_lab_json_path = osp.join(self.dataset_dir, 'splits_new_labeled.json') + + self.split_new_det_mat_path = osp.join(self.dataset_dir, 'cuhk03_new_protocol_config_detected.mat') + self.split_new_lab_mat_path = osp.join(self.dataset_dir, 'cuhk03_new_protocol_config_labeled.mat') + + required_files = [ + self.dataset_dir, + self.data_dir, + self.raw_mat_path, + self.split_new_det_mat_path, + self.split_new_lab_mat_path + ] + self.check_before_run(required_files) + + self.preprocess_split() + + if cuhk03_labeled: + split_path = self.split_classic_lab_json_path if cuhk03_classic_split else self.split_new_lab_json_path + else: + split_path = self.split_classic_det_json_path if cuhk03_classic_split else self.split_new_det_json_path + + with PathManager.open(split_path) as f: + splits = json.load(f) + assert split_id < len(splits), 'Condition split_id ({}) < len(splits) ({}) is false'.format(split_id, + len(splits)) + split = splits[split_id] + + train = split['train'] + tmp_train = [] + for img_path, pid, camid in train: + new_pid = self.dataset_name + "_" + str(pid) + new_camid = self.dataset_name + "_" + str(camid) + tmp_train.append((img_path, new_pid, new_camid)) + train = tmp_train + del tmp_train + query = split['query'] + gallery = split['gallery'] + + super(CUHK03, self).__init__(train, query, gallery, **kwargs) + + def preprocess_split(self): + # This function is a bit complex and ugly, what it does is + # 1. extract data from cuhk-03.mat and save as png images + # 2. create 20 classic splits (Li et al. CVPR'14) + # 3. create new split (Zhong et al. CVPR'17) + if osp.exists(self.imgs_labeled_dir) \ + and osp.exists(self.imgs_detected_dir) \ + and osp.exists(self.split_classic_det_json_path) \ + and osp.exists(self.split_classic_lab_json_path) \ + and osp.exists(self.split_new_det_json_path) \ + and osp.exists(self.split_new_lab_json_path): + return + + import h5py + from imageio import imwrite + from scipy.io import loadmat + + PathManager.mkdirs(self.imgs_detected_dir) + PathManager.mkdirs(self.imgs_labeled_dir) + + print('Extract image data from "{}" and save as png'.format(self.raw_mat_path)) + mat = h5py.File(self.raw_mat_path, 'r') + + def _deref(ref): + return mat[ref][:].T + + def _process_images(img_refs, campid, pid, save_dir): + img_paths = [] # Note: some persons only have images for one view + for imgid, img_ref in enumerate(img_refs): + img = _deref(img_ref) + if img.size == 0 or img.ndim < 3: + continue # skip empty cell + # images are saved with the following format, index-1 (ensure uniqueness) + # campid: index of camera pair (1-5) + # pid: index of person in 'campid'-th camera pair + # viewid: index of view, {1, 2} + # imgid: index of image, (1-10) + viewid = 1 if imgid < 5 else 2 + img_name = '{:01d}_{:03d}_{:01d}_{:02d}.png'.format(campid + 1, pid + 1, viewid, imgid + 1) + img_path = osp.join(save_dir, img_name) + if not osp.isfile(img_path): + imwrite(img_path, img) + img_paths.append(img_path) + return img_paths + + def _extract_img(image_type): + print('Processing {} images ...'.format(image_type)) + meta_data = [] + imgs_dir = self.imgs_detected_dir if image_type == 'detected' else self.imgs_labeled_dir + for campid, camp_ref in enumerate(mat[image_type][0]): + camp = _deref(camp_ref) + num_pids = camp.shape[0] + for pid in range(num_pids): + img_paths = _process_images(camp[pid, :], campid, pid, imgs_dir) + assert len(img_paths) > 0, 'campid{}-pid{} has no images'.format(campid, pid) + meta_data.append((campid + 1, pid + 1, img_paths)) + print('- done camera pair {} with {} identities'.format(campid + 1, num_pids)) + return meta_data + + meta_detected = _extract_img('detected') + meta_labeled = _extract_img('labeled') + + def _extract_classic_split(meta_data, test_split): + train, test = [], [] + num_train_pids, num_test_pids = 0, 0 + num_train_imgs, num_test_imgs = 0, 0 + for i, (campid, pid, img_paths) in enumerate(meta_data): + + if [campid, pid] in test_split: + for img_path in img_paths: + camid = int(osp.basename(img_path).split('_')[2]) - 1 # make it 0-based + test.append((img_path, num_test_pids, camid)) + num_test_pids += 1 + num_test_imgs += len(img_paths) + else: + for img_path in img_paths: + camid = int(osp.basename(img_path).split('_')[2]) - 1 # make it 0-based + train.append((img_path, num_train_pids, camid)) + num_train_pids += 1 + num_train_imgs += len(img_paths) + return train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs + + print('Creating classic splits (# = 20) ...') + splits_classic_det, splits_classic_lab = [], [] + for split_ref in mat['testsets'][0]: + test_split = _deref(split_ref).tolist() + + # create split for detected images + train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \ + _extract_classic_split(meta_detected, test_split) + splits_classic_det.append({ + 'train': train, + 'query': test, + 'gallery': test, + 'num_train_pids': num_train_pids, + 'num_train_imgs': num_train_imgs, + 'num_query_pids': num_test_pids, + 'num_query_imgs': num_test_imgs, + 'num_gallery_pids': num_test_pids, + 'num_gallery_imgs': num_test_imgs + }) + + # create split for labeled images + train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \ + _extract_classic_split(meta_labeled, test_split) + splits_classic_lab.append({ + 'train': train, + 'query': test, + 'gallery': test, + 'num_train_pids': num_train_pids, + 'num_train_imgs': num_train_imgs, + 'num_query_pids': num_test_pids, + 'num_query_imgs': num_test_imgs, + 'num_gallery_pids': num_test_pids, + 'num_gallery_imgs': num_test_imgs + }) + + with PathManager.open(self.split_classic_det_json_path, 'w') as f: + json.dump(splits_classic_det, f, indent=4, separators=(',', ': ')) + with PathManager.open(self.split_classic_lab_json_path, 'w') as f: + json.dump(splits_classic_lab, f, indent=4, separators=(',', ': ')) + + def _extract_set(filelist, pids, pid2label, idxs, img_dir, relabel): + tmp_set = [] + unique_pids = set() + for idx in idxs: + img_name = filelist[idx][0] + camid = int(img_name.split('_')[2]) - 1 # make it 0-based + pid = pids[idx] + if relabel: + pid = pid2label[pid] + img_path = osp.join(img_dir, img_name) + tmp_set.append((img_path, int(pid), camid)) + unique_pids.add(pid) + return tmp_set, len(unique_pids), len(idxs) + + def _extract_new_split(split_dict, img_dir): + train_idxs = split_dict['train_idx'].flatten() - 1 # index-0 + pids = split_dict['labels'].flatten() + train_pids = set(pids[train_idxs]) + pid2label = {pid: label for label, pid in enumerate(train_pids)} + query_idxs = split_dict['query_idx'].flatten() - 1 + gallery_idxs = split_dict['gallery_idx'].flatten() - 1 + filelist = split_dict['filelist'].flatten() + train_info = _extract_set(filelist, pids, pid2label, train_idxs, img_dir, relabel=True) + query_info = _extract_set(filelist, pids, pid2label, query_idxs, img_dir, relabel=False) + gallery_info = _extract_set(filelist, pids, pid2label, gallery_idxs, img_dir, relabel=False) + return train_info, query_info, gallery_info + + print('Creating new split for detected images (767/700) ...') + train_info, query_info, gallery_info = _extract_new_split( + loadmat(self.split_new_det_mat_path), + self.imgs_detected_dir + ) + split = [{ + 'train': train_info[0], + 'query': query_info[0], + 'gallery': gallery_info[0], + 'num_train_pids': train_info[1], + 'num_train_imgs': train_info[2], + 'num_query_pids': query_info[1], + 'num_query_imgs': query_info[2], + 'num_gallery_pids': gallery_info[1], + 'num_gallery_imgs': gallery_info[2] + }] + + with PathManager.open(self.split_new_det_json_path, 'w') as f: + json.dump(split, f, indent=4, separators=(',', ': ')) + + print('Creating new split for labeled images (767/700) ...') + train_info, query_info, gallery_info = _extract_new_split( + loadmat(self.split_new_lab_mat_path), + self.imgs_labeled_dir + ) + split = [{ + 'train': train_info[0], + 'query': query_info[0], + 'gallery': gallery_info[0], + 'num_train_pids': train_info[1], + 'num_train_imgs': train_info[2], + 'num_query_pids': query_info[1], + 'num_query_imgs': query_info[2], + 'num_gallery_pids': gallery_info[1], + 'num_gallery_imgs': gallery_info[2] + }] + with PathManager.open(self.split_new_lab_json_path, 'w') as f: + json.dump(split, f, indent=4, separators=(',', ': ')) diff --git a/thirdparty/fast-reid/fastreid/data/datasets/cuhk_sysu.py b/thirdparty/fast-reid/fastreid/data/datasets/cuhk_sysu.py new file mode 100644 index 0000000000000000000000000000000000000000..75a14889f1ec03a043cb0ed9a0ed163ee764e0b5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/cuhk_sysu.py @@ -0,0 +1,58 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import glob +import os.path as osp +import re +import warnings + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class cuhkSYSU(ImageDataset): + r"""CUHK SYSU datasets. + + The dataset is collected from two sources: street snap and movie. + In street snap, 12,490 images and 6,057 query persons were collected + with movable cameras across hundreds of scenes while 5,694 images and + 2,375 query persons were selected from movies and TV dramas. + + Dataset statistics: + - identities: xxx. + - images: 12936 (train). + """ + dataset_dir = 'cuhk_sysu' + dataset_name = "cuhksysu" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.dataset_dir = osp.join(self.root, self.dataset_dir) + + self.data_dir = osp.join(self.dataset_dir, "cropped_images") + + required_files = [self.data_dir] + self.check_before_run(required_files) + + train = self.process_dir(self.data_dir) + query = [] + gallery = [] + + super(cuhkSYSU, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path): + img_paths = glob.glob(osp.join(dir_path, '*.jpg')) + pattern = re.compile(r'p([-\d]+)_s(\d)') + + data = [] + for img_path in img_paths: + pid, _ = map(int, pattern.search(img_path).groups()) + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_0" + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/dukemtmcreid.py b/thirdparty/fast-reid/fastreid/data/datasets/dukemtmcreid.py new file mode 100644 index 0000000000000000000000000000000000000000..cd90ce2c1b35a73db5eddc4c0099c446a48d5631 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/dukemtmcreid.py @@ -0,0 +1,70 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: liaoxingyu2@jd.com +""" + +import glob +import os.path as osp +import re + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class DukeMTMC(ImageDataset): + """DukeMTMC-reID. + + Reference: + - Ristani et al. Performance Measures and a Data Set for Multi-Target, Multi-Camera Tracking. ECCVW 2016. + - Zheng et al. Unlabeled Samples Generated by GAN Improve the Person Re-identification Baseline in vitro. ICCV 2017. + + URL: ``_ + + Dataset statistics: + - identities: 1404 (train + query). + - images:16522 (train) + 2228 (query) + 17661 (gallery). + - cameras: 8. + """ + dataset_dir = 'DukeMTMC-reID' + dataset_url = 'http://vision.cs.duke.edu/DukeMTMC/data/misc/DukeMTMC-reID.zip' + dataset_name = "dukemtmc" + + def __init__(self, root='datasets', **kwargs): + # self.root = osp.abspath(osp.expanduser(root)) + self.root = root + self.dataset_dir = osp.join(self.root, self.dataset_dir) + self.train_dir = osp.join(self.dataset_dir, 'bounding_box_train') + self.query_dir = osp.join(self.dataset_dir, 'query') + self.gallery_dir = osp.join(self.dataset_dir, 'bounding_box_test') + + required_files = [ + self.dataset_dir, + self.train_dir, + self.query_dir, + self.gallery_dir, + ] + self.check_before_run(required_files) + + train = self.process_dir(self.train_dir) + query = self.process_dir(self.query_dir, is_train=False) + gallery = self.process_dir(self.gallery_dir, is_train=False) + + super(DukeMTMC, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path, is_train=True): + img_paths = glob.glob(osp.join(dir_path, '*.jpg')) + pattern = re.compile(r'([-\d]+)_c(\d)') + + data = [] + for img_path in img_paths: + pid, camid = map(int, pattern.search(img_path).groups()) + assert 1 <= camid <= 8 + camid -= 1 # index starts from 0 + if is_train: + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/iLIDS.py b/thirdparty/fast-reid/fastreid/data/datasets/iLIDS.py new file mode 100644 index 0000000000000000000000000000000000000000..9d9a4128375b5f08d3ef747b3f2befddddacf261 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/iLIDS.py @@ -0,0 +1,43 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['iLIDS', ] + + +@DATASET_REGISTRY.register() +class iLIDS(ImageDataset): + dataset_dir = "iLIDS" + dataset_name = "ilids" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + file_path = os.listdir(train_path) + for pid_dir in file_path: + img_file = os.path.join(train_path, pid_dir) + img_paths = glob(os.path.join(img_file, "*.png")) + for img_path in img_paths: + split_path = img_path.split('/') + pid = self.dataset_name + "_" + split_path[-2] + camid = self.dataset_name + "_" + split_path[-1].split('_')[0] + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/lpw.py b/thirdparty/fast-reid/fastreid/data/datasets/lpw.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f6117f1a996d12bf5dd5ccb4278ecfa182c315 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/lpw.py @@ -0,0 +1,47 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['LPW', ] + + +@DATASET_REGISTRY.register() +class LPW(ImageDataset): + dataset_dir = "pep_256x128" + dataset_name = "lpw" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + file_path_list = ['scen1', 'scen2', 'scen3'] + + for scene in file_path_list: + cam_list = os.listdir(os.path.join(train_path, scene)) + for cam in cam_list: + camid = self.dataset_name + "_" + cam + pid_list = os.listdir(os.path.join(train_path, scene, cam)) + for pid_dir in pid_list: + img_paths = glob(os.path.join(train_path, scene, cam, pid_dir, "*.jpg")) + for img_path in img_paths: + pid = self.dataset_name + "_" + scene + "-" + pid_dir + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/market1501.py b/thirdparty/fast-reid/fastreid/data/datasets/market1501.py new file mode 100644 index 0000000000000000000000000000000000000000..d1968afe58d857d138c0cd1ddb45a5c87f945f2c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/market1501.py @@ -0,0 +1,90 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +import glob +import os.path as osp +import re +import warnings + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class Market1501(ImageDataset): + """Market1501. + + Reference: + Zheng et al. Scalable Person Re-identification: A Benchmark. ICCV 2015. + + URL: ``_ + + Dataset statistics: + - identities: 1501 (+1 for background). + - images: 12936 (train) + 3368 (query) + 15913 (gallery). + """ + _junk_pids = [0, -1] + dataset_dir = '' + dataset_url = 'http://188.138.127.15:81/Datasets/Market-1501-v15.09.15.zip' + dataset_name = "market1501" + + def __init__(self, root='datasets', market1501_500k=False, **kwargs): + # self.root = osp.abspath(osp.expanduser(root)) + self.root = root + self.dataset_dir = osp.join(self.root, self.dataset_dir) + + # allow alternative directory structure + self.data_dir = self.dataset_dir + data_dir = osp.join(self.data_dir, 'Market-1501-v15.09.15') + if osp.isdir(data_dir): + self.data_dir = data_dir + else: + warnings.warn('The current data structure is deprecated. Please ' + 'put data folders such as "bounding_box_train" under ' + '"Market-1501-v15.09.15".') + + self.train_dir = osp.join(self.data_dir, 'bounding_box_train') + self.query_dir = osp.join(self.data_dir, 'query') + self.gallery_dir = osp.join(self.data_dir, 'bounding_box_test') + self.extra_gallery_dir = osp.join(self.data_dir, 'images') + self.market1501_500k = market1501_500k + + required_files = [ + self.data_dir, + self.train_dir, + self.query_dir, + self.gallery_dir, + ] + if self.market1501_500k: + required_files.append(self.extra_gallery_dir) + self.check_before_run(required_files) + + train = self.process_dir(self.train_dir) + query = self.process_dir(self.query_dir, is_train=False) + gallery = self.process_dir(self.gallery_dir, is_train=False) + if self.market1501_500k: + gallery += self.process_dir(self.extra_gallery_dir, is_train=False) + + super(Market1501, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path, is_train=True): + img_paths = glob.glob(osp.join(dir_path, '*.jpg')) + pattern = re.compile(r'([-\d]+)_c(\d)') + + data = [] + for img_path in img_paths: + pid, camid = map(int, pattern.search(img_path).groups()) + if pid == -1: + continue # junk images are just ignored + assert 0 <= pid <= 1501 # pid == 0 means background + assert 1 <= camid <= 6 + camid -= 1 # index starts from 0 + if is_train: + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/msmt17.py b/thirdparty/fast-reid/fastreid/data/datasets/msmt17.py new file mode 100644 index 0000000000000000000000000000000000000000..3d77d2b68798aacebc86f24a643f33ef21217b26 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/msmt17.py @@ -0,0 +1,114 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import sys +import os +import os.path as osp + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY +##### Log ##### +# 22.01.2019 +# - add v2 +# - v1 and v2 differ in dir names +# - note that faces in v2 are blurred +TRAIN_DIR_KEY = 'train_dir' +TEST_DIR_KEY = 'test_dir' +VERSION_DICT = { + 'MSMT17_V1': { + TRAIN_DIR_KEY: 'train', + TEST_DIR_KEY: 'test', + }, + 'MSMT17_V2': { + TRAIN_DIR_KEY: 'mask_train_v2', + TEST_DIR_KEY: 'mask_test_v2', + } +} + + +@DATASET_REGISTRY.register() +class MSMT17(ImageDataset): + """MSMT17. + Reference: + Wei et al. Person Transfer GAN to Bridge Domain Gap for Person Re-Identification. CVPR 2018. + URL: ``_ + + Dataset statistics: + - identities: 4101. + - images: 32621 (train) + 11659 (query) + 82161 (gallery). + - cameras: 15. + """ + # dataset_dir = 'MSMT17_V2' + dataset_url = None + dataset_name = 'msmt17' + + def __init__(self, root='datasets', **kwargs): + self.dataset_dir = root + + has_main_dir = False + for main_dir in VERSION_DICT: + if osp.exists(osp.join(self.dataset_dir, main_dir)): + train_dir = VERSION_DICT[main_dir][TRAIN_DIR_KEY] + test_dir = VERSION_DICT[main_dir][TEST_DIR_KEY] + has_main_dir = True + break + assert has_main_dir, 'Dataset folder not found' + + self.train_dir = osp.join(self.dataset_dir, main_dir, train_dir) + self.test_dir = osp.join(self.dataset_dir, main_dir, test_dir) + self.list_train_path = osp.join(self.dataset_dir, main_dir, 'list_train.txt') + self.list_val_path = osp.join(self.dataset_dir, main_dir, 'list_val.txt') + self.list_query_path = osp.join(self.dataset_dir, main_dir, 'list_query.txt') + self.list_gallery_path = osp.join(self.dataset_dir, main_dir, 'list_gallery.txt') + + required_files = [ + self.dataset_dir, + self.train_dir, + self.test_dir + ] + self.check_before_run(required_files) + + train = self.process_dir(self.train_dir, self.list_train_path) + val = self.process_dir(self.train_dir, self.list_val_path) + query = self.process_dir(self.test_dir, self.list_query_path, is_train=False) + gallery = self.process_dir(self.test_dir, self.list_gallery_path, is_train=False) + + num_train_pids = self.get_num_pids(train) + query_tmp = [] + for img_path, pid, camid in query: + query_tmp.append((img_path, pid+num_train_pids, camid)) + del query + query = query_tmp + + gallery_temp = [] + for img_path, pid, camid in gallery: + gallery_temp.append((img_path, pid+num_train_pids, camid)) + del gallery + gallery = gallery_temp + + # Note: to fairly compare with published methods on the conventional ReID setting, + # do not add val images to the training set. + if 'combineall' in kwargs and kwargs['combineall']: + train += val + super(MSMT17, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path, list_path, is_train=True): + with open(list_path, 'r') as txt: + lines = txt.readlines() + + data = [] + + for img_idx, img_info in enumerate(lines): + img_path, pid = img_info.split(' ') + pid = int(pid) # no need to relabel + camid = int(img_path.split('_')[2]) - 1 # index starts from 0 + img_path = osp.join(dir_path, img_path) + if is_train: + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/pes3d.py b/thirdparty/fast-reid/fastreid/data/datasets/pes3d.py new file mode 100644 index 0000000000000000000000000000000000000000..4b4897eb7f5db6aa265ea90564f508b52b7a40de --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/pes3d.py @@ -0,0 +1,46 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from scipy.io import loadmat +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset +import pdb +import random +import numpy as np + +__all__ = ['PeS3D',] + + +@DATASET_REGISTRY.register() +class PeS3D(ImageDataset): + dataset_dir = "3DPeS" + dataset_name = "pes3d" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + pid_list = os.listdir(train_path) + for pid_dir in pid_list: + pid = self.dataset_name + "_" + pid_dir + img_list = glob(os.path.join(train_path, pid_dir, "*.bmp")) + for img_path in img_list: + camid = self.dataset_name + "_cam0" + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/pku.py b/thirdparty/fast-reid/fastreid/data/datasets/pku.py new file mode 100644 index 0000000000000000000000000000000000000000..0082d29ad40580960fd21ba1a076e55296c47491 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/pku.py @@ -0,0 +1,42 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['PKU', ] + + +@DATASET_REGISTRY.register() +class PKU(ImageDataset): + dataset_dir = "PKUv1a_128x48" + dataset_name = 'pku' + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + img_paths = glob(os.path.join(train_path, "*.png")) + + for img_path in img_paths: + split_path = img_path.split('/') + img_info = split_path[-1].split('_') + pid = self.dataset_name + "_" + img_info[0] + camid = self.dataset_name + "_" + img_info[1] + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/prai.py b/thirdparty/fast-reid/fastreid/data/datasets/prai.py new file mode 100644 index 0000000000000000000000000000000000000000..42a02234a90a970832d0bb76e90a8a7d67a81db7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/prai.py @@ -0,0 +1,44 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from scipy.io import loadmat +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset +import pdb + +__all__ = ['PRAI',] + + +@DATASET_REGISTRY.register() +class PRAI(ImageDataset): + dataset_dir = "PRAI-1581" + dataset_name = 'prai' + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir, 'images') + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + img_paths = glob(os.path.join(train_path, "*.jpg")) + for img_path in img_paths: + split_path = img_path.split('/') + img_info = split_path[-1].split('_') + pid = self.dataset_name + "_" + img_info[0] + camid = self.dataset_name + "_" + img_info[1] + data.append([img_path, pid, camid]) + return data + diff --git a/thirdparty/fast-reid/fastreid/data/datasets/sensereid.py b/thirdparty/fast-reid/fastreid/data/datasets/sensereid.py new file mode 100644 index 0000000000000000000000000000000000000000..92d2c1632828f73fbccfb19dad1ff5607eed9edb --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/sensereid.py @@ -0,0 +1,45 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['SenseReID', ] + + +@DATASET_REGISTRY.register() +class SenseReID(ImageDataset): + dataset_dir = "SenseReID" + dataset_name = "senseid" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + file_path_list = ['test_gallery', 'test_prob'] + + for file_path in file_path_list: + sub_file = os.path.join(train_path, file_path) + img_name = glob(os.path.join(sub_file, "*.jpg")) + for img_path in img_name: + img_name = img_path.split('/')[-1] + img_info = img_name.split('_') + pid = self.dataset_name + "_" + img_info[0] + camid = self.dataset_name + "_" + img_info[1].split('.')[0] + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/shinpuhkan.py b/thirdparty/fast-reid/fastreid/data/datasets/shinpuhkan.py new file mode 100644 index 0000000000000000000000000000000000000000..ae240c1ec578c4bd64a30e1b296e440f74a0daa3 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/shinpuhkan.py @@ -0,0 +1,46 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['Shinpuhkan', ] + + +@DATASET_REGISTRY.register() +class Shinpuhkan(ImageDataset): + dataset_dir = "shinpuhkan" + dataset_name = 'shinpuhkan' + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + for root, dirs, files in os.walk(train_path): + img_names = list(filter(lambda x: x.endswith(".jpg"), files)) + # fmt: off + if len(img_names) == 0: continue + # fmt: on + for img_name in img_names: + img_path = os.path.join(root, img_name) + split_path = img_name.split('_') + pid = self.dataset_name + "_" + split_path[0] + camid = self.dataset_name + "_" + split_path[2] + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/sysu_mm.py b/thirdparty/fast-reid/fastreid/data/datasets/sysu_mm.py new file mode 100644 index 0000000000000000000000000000000000000000..bf67fff34894c10cc5b2866d9adb7a2a3ead0827 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/sysu_mm.py @@ -0,0 +1,48 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from scipy.io import loadmat +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset +import pdb + +__all__ = ['SYSU_mm', ] + + +@DATASET_REGISTRY.register() +class SYSU_mm(ImageDataset): + dataset_dir = "SYSU-MM01" + dataset_name = "sysumm01" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + file_path_list = ['cam1', 'cam2', 'cam4', 'cam5'] + + for file_path in file_path_list: + camid = self.dataset_name + "_" + file_path + pid_list = os.listdir(os.path.join(train_path, file_path)) + for pid_dir in pid_list: + pid = self.dataset_name + "_" + pid_dir + img_list = glob(os.path.join(train_path, file_path, pid_dir, "*.jpg")) + for img_path in img_list: + data.append([img_path, pid, camid]) + return data + diff --git a/thirdparty/fast-reid/fastreid/data/datasets/thermalworld.py b/thirdparty/fast-reid/fastreid/data/datasets/thermalworld.py new file mode 100644 index 0000000000000000000000000000000000000000..b6df954824d5711e66324454579e746140b789fa --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/thermalworld.py @@ -0,0 +1,45 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from scipy.io import loadmat +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset +import pdb +import random +import numpy as np + +__all__ = ['Thermalworld',] + + +@DATASET_REGISTRY.register() +class Thermalworld(ImageDataset): + dataset_dir = "thermalworld_rgb" + dataset_name = "thermalworld" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + pid_list = os.listdir(train_path) + for pid_dir in pid_list: + pid = self.dataset_name + "_" + pid_dir + img_list = glob(os.path.join(train_path, pid_dir, "*.jpg")) + for img_path in img_list: + camid = self.dataset_name + "_cam0" + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/vehicleid.py b/thirdparty/fast-reid/fastreid/data/datasets/vehicleid.py new file mode 100644 index 0000000000000000000000000000000000000000..5b42d0eec7a9576421b1cb36b0fdfaa330b65e2f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/vehicleid.py @@ -0,0 +1,124 @@ +# encoding: utf-8 +""" +@author: Jinkai Zheng +@contact: 1315673509@qq.com +""" + +import os.path as osp +import random + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class VehicleID(ImageDataset): + """VehicleID. + + Reference: + Liu et al. Deep relative distance learning: Tell the difference between similar vehicles. CVPR 2016. + + URL: ``_ + + Train dataset statistics: + - identities: 13164. + - images: 113346. + """ + dataset_dir = "vehicleid" + dataset_name = "vehicleid" + + def __init__(self, root='datasets', test_list='', **kwargs): + self.dataset_dir = osp.join(root, self.dataset_dir) + + self.image_dir = osp.join(self.dataset_dir, 'image') + self.train_list = osp.join(self.dataset_dir, 'train_test_split/train_list.txt') + if test_list: + self.test_list = test_list + else: + self.test_list = osp.join(self.dataset_dir, 'train_test_split/test_list_13164.txt') + + required_files = [ + self.dataset_dir, + self.image_dir, + self.train_list, + self.test_list, + ] + self.check_before_run(required_files) + + train = self.process_dir(self.train_list, is_train=True) + query, gallery = self.process_dir(self.test_list, is_train=False) + + super(VehicleID, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, list_file, is_train=True): + img_list_lines = open(list_file, 'r').readlines() + + dataset = [] + for idx, line in enumerate(img_list_lines): + line = line.strip() + vid = int(line.split(' ')[1]) + imgid = line.split(' ')[0] + img_path = osp.join(self.image_dir, imgid + '.jpg') + if is_train: + vid = self.dataset_name + "_" + str(vid) + dataset.append((img_path, vid, int(imgid))) + + if is_train: return dataset + else: + random.shuffle(dataset) + vid_container = set() + query = [] + gallery = [] + for sample in dataset: + if sample[1] not in vid_container: + vid_container.add(sample[1]) + gallery.append(sample) + else: + query.append(sample) + + return query, gallery + + +@DATASET_REGISTRY.register() +class SmallVehicleID(VehicleID): + """VehicleID. + Small test dataset statistics: + - identities: 800. + - images: 6493. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_800.txt') + + super(SmallVehicleID, self).__init__(root, self.test_list, **kwargs) + + +@DATASET_REGISTRY.register() +class MediumVehicleID(VehicleID): + """VehicleID. + Medium test dataset statistics: + - identities: 1600. + - images: 13377. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_1600.txt') + + super(MediumVehicleID, self).__init__(root, self.test_list, **kwargs) + + +@DATASET_REGISTRY.register() +class LargeVehicleID(VehicleID): + """VehicleID. + Large test dataset statistics: + - identities: 2400. + - images: 19777. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_2400.txt') + + super(LargeVehicleID, self).__init__(root, self.test_list, **kwargs) diff --git a/thirdparty/fast-reid/fastreid/data/datasets/veri.py b/thirdparty/fast-reid/fastreid/data/datasets/veri.py new file mode 100644 index 0000000000000000000000000000000000000000..b585c1e27aba9c6e9b84c0e22dc20eea516c0b5e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/veri.py @@ -0,0 +1,68 @@ +# encoding: utf-8 +""" +@author: Jinkai Zheng +@contact: 1315673509@qq.com +""" + +import glob +import os.path as osp +import re + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class VeRi(ImageDataset): + """VeRi. + + Reference: + Liu et al. A Deep Learning based Approach for Progressive Vehicle Re-Identification. ECCV 2016. + + URL: ``_ + + Dataset statistics: + - identities: 775. + - images: 37778 (train) + 1678 (query) + 11579 (gallery). + """ + dataset_dir = "veri" + dataset_name = "veri" + + def __init__(self, root='datasets', **kwargs): + self.dataset_dir = osp.join(root, self.dataset_dir) + + self.train_dir = osp.join(self.dataset_dir, 'image_train') + self.query_dir = osp.join(self.dataset_dir, 'image_query') + self.gallery_dir = osp.join(self.dataset_dir, 'image_test') + + required_files = [ + self.dataset_dir, + self.train_dir, + self.query_dir, + self.gallery_dir, + ] + self.check_before_run(required_files) + + train = self.process_dir(self.train_dir) + query = self.process_dir(self.query_dir, is_train=False) + gallery = self.process_dir(self.gallery_dir, is_train=False) + + super(VeRi, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path, is_train=True): + img_paths = glob.glob(osp.join(dir_path, '*.jpg')) + pattern = re.compile(r'([\d]+)_c(\d\d\d)') + + data = [] + for img_path in img_paths: + pid, camid = map(int, pattern.search(img_path).groups()) + if pid == -1: continue # junk images are just ignored + assert 1 <= pid <= 776 + assert 1 <= camid <= 20 + camid -= 1 # index starts from 0 + if is_train: + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + data.append((img_path, pid, camid)) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/veriwild.py b/thirdparty/fast-reid/fastreid/data/datasets/veriwild.py new file mode 100644 index 0000000000000000000000000000000000000000..5902acbc79f14a3a73d3fba5dc8fe00c163a5838 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/veriwild.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +""" +@author: Jinkai Zheng +@contact: 1315673509@qq.com +""" + +import os.path as osp + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class VeRiWild(ImageDataset): + """VeRi-Wild. + + Reference: + Lou et al. A Large-Scale Dataset for Vehicle Re-Identification in the Wild. CVPR 2019. + + URL: ``_ + + Train dataset statistics: + - identities: 30671. + - images: 277797. + """ + dataset_dir = "VERI-Wild" + dataset_name = "veriwild" + + def __init__(self, root='datasets', query_list='', gallery_list='', **kwargs): + self.dataset_dir = osp.join(root, self.dataset_dir) + + self.image_dir = osp.join(self.dataset_dir, 'images') + self.train_list = osp.join(self.dataset_dir, 'train_test_split/train_list.txt') + self.vehicle_info = osp.join(self.dataset_dir, 'train_test_split/vehicle_info.txt') + if query_list and gallery_list: + self.query_list = query_list + self.gallery_list = gallery_list + else: + self.query_list = osp.join(self.dataset_dir, 'train_test_split/test_10000_query.txt') + self.gallery_list = osp.join(self.dataset_dir, 'train_test_split/test_10000.txt') + + required_files = [ + self.image_dir, + self.train_list, + self.query_list, + self.gallery_list, + self.vehicle_info, + ] + self.check_before_run(required_files) + + self.imgid2vid, self.imgid2camid, self.imgid2imgpath = self.process_vehicle(self.vehicle_info) + + train = self.process_dir(self.train_list) + query = self.process_dir(self.query_list, is_train=False) + gallery = self.process_dir(self.gallery_list, is_train=False) + + super(VeRiWild, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, img_list, is_train=True): + img_list_lines = open(img_list, 'r').readlines() + + dataset = [] + for idx, line in enumerate(img_list_lines): + line = line.strip() + vid = int(line.split('/')[0]) + imgid = line.split('/')[1] + if is_train: + vid = self.dataset_name + "_" + str(vid) + dataset.append((self.imgid2imgpath[imgid], vid, int(self.imgid2camid[imgid]))) + + assert len(dataset) == len(img_list_lines) + return dataset + + def process_vehicle(self, vehicle_info): + imgid2vid = {} + imgid2camid = {} + imgid2imgpath = {} + vehicle_info_lines = open(vehicle_info, 'r').readlines() + + for idx, line in enumerate(vehicle_info_lines[1:]): + vid = line.strip().split('/')[0] + imgid = line.strip().split(';')[0].split('/')[1] + camid = line.strip().split(';')[1] + img_path = osp.join(self.image_dir, vid, imgid + '.jpg') + imgid2vid[imgid] = vid + imgid2camid[imgid] = camid + imgid2imgpath[imgid] = img_path + + assert len(imgid2vid) == len(vehicle_info_lines) - 1 + return imgid2vid, imgid2camid, imgid2imgpath + + +@DATASET_REGISTRY.register() +class SmallVeRiWild(VeRiWild): + """VeRi-Wild. + Small test dataset statistics: + - identities: 3000. + - images: 41861. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.query_list = osp.join(dataset_dir, 'train_test_split/test_3000_query.txt') + self.gallery_list = osp.join(dataset_dir, 'train_test_split/test_3000.txt') + + super(SmallVeRiWild, self).__init__(root, self.query_list, self.gallery_list, **kwargs) + + +@DATASET_REGISTRY.register() +class MediumVeRiWild(VeRiWild): + """VeRi-Wild. + Medium test dataset statistics: + - identities: 5000. + - images: 69389. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.query_list = osp.join(dataset_dir, 'train_test_split/test_5000_query.txt') + self.gallery_list = osp.join(dataset_dir, 'train_test_split/test_5000.txt') + + super(MediumVeRiWild, self).__init__(root, self.query_list, self.gallery_list, **kwargs) + + +@DATASET_REGISTRY.register() +class LargeVeRiWild(VeRiWild): + """VeRi-Wild. + Large test dataset statistics: + - identities: 10000. + - images: 138517. + """ + + def __init__(self, root='datasets', **kwargs): + dataset_dir = osp.join(root, self.dataset_dir) + self.query_list = osp.join(dataset_dir, 'train_test_split/test_10000_query.txt') + self.gallery_list = osp.join(dataset_dir, 'train_test_split/test_10000.txt') + + super(LargeVeRiWild, self).__init__(root, self.query_list, self.gallery_list, **kwargs) diff --git a/thirdparty/fast-reid/fastreid/data/datasets/viper.py b/thirdparty/fast-reid/fastreid/data/datasets/viper.py new file mode 100644 index 0000000000000000000000000000000000000000..730264fcff34072919bfcdb8d2dccca7e651af93 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/viper.py @@ -0,0 +1,45 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os +from glob import glob + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['VIPeR', ] + + +@DATASET_REGISTRY.register() +class VIPeR(ImageDataset): + dataset_dir = "VIPeR" + dataset_name = "viper" + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.train_path = os.path.join(self.root, self.dataset_dir) + + required_files = [self.train_path] + self.check_before_run(required_files) + + train = self.process_train(self.train_path) + + super().__init__(train, [], [], **kwargs) + + def process_train(self, train_path): + data = [] + + file_path_list = ['cam_a', 'cam_b'] + + for file_path in file_path_list: + camid = self.dataset_name + "_" + file_path + img_list = glob(os.path.join(train_path, file_path, "*.bmp")) + for img_path in img_list: + img_name = img_path.split('/')[-1] + pid = self.dataset_name + "_" + img_name.split('_')[0] + data.append([img_path, pid, camid]) + + return data diff --git a/thirdparty/fast-reid/fastreid/data/datasets/wildtracker.py b/thirdparty/fast-reid/fastreid/data/datasets/wildtracker.py new file mode 100644 index 0000000000000000000000000000000000000000..d163d5db3aa674c8dd40010e1f9b86ec3ed623cb --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/datasets/wildtracker.py @@ -0,0 +1,59 @@ +# encoding: utf-8 +""" +@author: wangguanan +@contact: guan.wang0706@gmail.com +""" + +import glob +import os + +from .bases import ImageDataset +from ..datasets import DATASET_REGISTRY + + +@DATASET_REGISTRY.register() +class WildTrackCrop(ImageDataset): + """WildTrack. + Reference: + WILDTRACK: A Multi-camera HD Dataset for Dense Unscripted Pedestrian Detection + T. Chavdarova; P. Baqué; A. Maksai; S. Bouquet; C. Jose et al. + URL: ``_ + Dataset statistics: + - identities: 313 + - images: 33979 (train only) + - cameras: 7 + Args: + data_path(str): path to WildTrackCrop dataset + combineall(bool): combine train and test sets as train set if True + """ + dataset_url = None + dataset_dir = 'Wildtrack_crop_dataset' + dataset_name = 'wildtrack' + + def __init__(self, root='datasets', **kwargs): + self.root = root + self.dataset_dir = os.path.join(self.root, self.dataset_dir) + + self.train_dir = os.path.join(self.dataset_dir, "crop") + + train = self.process_dir(self.train_dir) + query = [] + gallery = [] + + super(WildTrackCrop, self).__init__(train, query, gallery, **kwargs) + + def process_dir(self, dir_path): + r""" + :param dir_path: directory path saving images + Returns + data(list) = [img_path, pid, camid] + """ + data = [] + for dir_name in os.listdir(dir_path): + img_lists = glob.glob(os.path.join(dir_path, dir_name, "*.png")) + for img_path in img_lists: + pid = self.dataset_name + "_" + dir_name + camid = img_path.split('/')[-1].split('_')[0] + camid = self.dataset_name + "_" + camid + data.append([img_path, pid, camid]) + return data diff --git a/thirdparty/fast-reid/fastreid/data/samplers/__init__.py b/thirdparty/fast-reid/fastreid/data/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d0494569e97230312a70ba1082bc97ed0c0042ea --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/samplers/__init__.py @@ -0,0 +1,8 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .triplet_sampler import BalancedIdentitySampler, NaiveIdentitySampler +from .data_sampler import TrainingSampler, InferenceSampler diff --git a/thirdparty/fast-reid/fastreid/data/samplers/data_sampler.py b/thirdparty/fast-reid/fastreid/data/samplers/data_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..df6902fd9c836c18a1f5b5076aa74babeb9564d1 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/samplers/data_sampler.py @@ -0,0 +1,85 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" +import itertools +from typing import Optional + +import numpy as np +from torch.utils.data import Sampler + +from fastreid.utils import comm + + +class TrainingSampler(Sampler): + """ + In training, we only care about the "infinite stream" of training data. + So this sampler produces an infinite stream of indices and + all workers cooperate to correctly shuffle the indices and sample different indices. + The samplers in each worker effectively produces `indices[worker_id::num_workers]` + where `indices` is an infinite stream of indices consisting of + `shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True) + or `range(size) + range(size) + ...` (if shuffle is False) + """ + + def __init__(self, size: int, shuffle: bool = True, seed: Optional[int] = None): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + self._size = size + assert size > 0 + self._shuffle = shuffle + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + np.random.seed(self._seed) + while True: + if self._shuffle: + yield from np.random.permutation(self._size) + else: + yield from np.arange(self._size) + + +class InferenceSampler(Sampler): + """ + Produce indices for inference. + Inference needs to run on the __exact__ set of samples, + therefore when the total number of samples is not divisible by the number of workers, + this sampler produces different number of samples on different workers. + """ + + def __init__(self, size: int): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + """ + self._size = size + assert size > 0 + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + shard_size = (self._size - 1) // self._world_size + 1 + begin = shard_size * self._rank + end = min(shard_size * (self._rank + 1), self._size) + self._local_indices = range(begin, end) + + def __iter__(self): + yield from self._local_indices + + def __len__(self): + return len(self._local_indices) diff --git a/thirdparty/fast-reid/fastreid/data/samplers/triplet_sampler.py b/thirdparty/fast-reid/fastreid/data/samplers/triplet_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..a9bcb3407c5e82b17b4c58672561f1038ad6b7c9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/samplers/triplet_sampler.py @@ -0,0 +1,169 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: liaoxingyu2@jd.com +""" + +import copy +import itertools +from collections import defaultdict +from typing import Optional + +import numpy as np +from torch.utils.data.sampler import Sampler + +from fastreid.utils import comm + + +def no_index(a, b): + assert isinstance(a, list) + return [i for i, j in enumerate(a) if j != b] + + +class BalancedIdentitySampler(Sampler): + def __init__(self, data_source: str, batch_size: int, num_instances: int, seed: Optional[int] = None): + self.data_source = data_source + self.batch_size = batch_size + self.num_instances = num_instances + self.num_pids_per_batch = batch_size // self.num_instances + + self.index_pid = defaultdict(list) + self.pid_cam = defaultdict(list) + self.pid_index = defaultdict(list) + + for index, info in enumerate(data_source): + pid = info[1] + camid = info[2] + self.index_pid[index] = pid + self.pid_cam[pid].append(camid) + self.pid_index[pid].append(index) + + self.pids = sorted(list(self.pid_index.keys())) + self.num_identities = len(self.pids) + + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + np.random.seed(self._seed) + while True: + # Shuffle identity list + identities = np.random.permutation(self.num_identities) + + # If remaining identities cannot be enough for a batch, + # just drop the remaining parts + drop_indices = self.num_identities % self.num_pids_per_batch + if drop_indices: identities = identities[:-drop_indices] + + ret = [] + for kid in identities: + i = np.random.choice(self.pid_index[self.pids[kid]]) + _, i_pid, i_cam = self.data_source[i] + ret.append(i) + pid_i = self.index_pid[i] + cams = self.pid_cam[pid_i] + index = self.pid_index[pid_i] + select_cams = no_index(cams, i_cam) + + if select_cams: + if len(select_cams) >= self.num_instances: + cam_indexes = np.random.choice(select_cams, size=self.num_instances - 1, replace=False) + else: + cam_indexes = np.random.choice(select_cams, size=self.num_instances - 1, replace=True) + for kk in cam_indexes: + ret.append(index[kk]) + else: + select_indexes = no_index(index, i) + if not select_indexes: + # Only one image for this identity + ind_indexes = [0] * (self.num_instances - 1) + elif len(select_indexes) >= self.num_instances: + ind_indexes = np.random.choice(select_indexes, size=self.num_instances - 1, replace=False) + else: + ind_indexes = np.random.choice(select_indexes, size=self.num_instances - 1, replace=True) + + for kk in ind_indexes: + ret.append(index[kk]) + + if len(ret) == self.batch_size: + yield from ret + ret = [] + + +class NaiveIdentitySampler(Sampler): + """ + Randomly sample N identities, then for each identity, + randomly sample K instances, therefore batch size is N*K. + Args: + - data_source (list): list of (img_path, pid, camid). + - num_instances (int): number of instances per identity in a batch. + - batch_size (int): number of examples in a batch. + """ + + def __init__(self, data_source: str, batch_size: int, num_instances: int, seed: Optional[int] = None): + self.data_source = data_source + self.batch_size = batch_size + self.num_instances = num_instances + self.num_pids_per_batch = batch_size // self.num_instances + + self.index_pid = defaultdict(list) + self.pid_cam = defaultdict(list) + self.pid_index = defaultdict(list) + + for index, info in enumerate(data_source): + pid = info[1] + camid = info[2] + self.index_pid[index] = pid + self.pid_cam[pid].append(camid) + self.pid_index[pid].append(index) + + self.pids = sorted(list(self.pid_index.keys())) + self.num_identities = len(self.pids) + + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + np.random.seed(self._seed) + while True: + avai_pids = copy.deepcopy(self.pids) + batch_idxs_dict = {} + + batch_indices = [] + while len(avai_pids) >= self.num_pids_per_batch: + selected_pids = np.random.choice(avai_pids, self.num_pids_per_batch, replace=False).tolist() + for pid in selected_pids: + # Register pid in batch_idxs_dict if not + if pid not in batch_idxs_dict: + idxs = copy.deepcopy(self.pid_index[pid]) + if len(idxs) < self.num_instances: + idxs = np.random.choice(idxs, size=self.num_instances, replace=True).tolist() + np.random.shuffle(idxs) + batch_idxs_dict[pid] = idxs + + avai_idxs = batch_idxs_dict[pid] + for _ in range(self.num_instances): + batch_indices.append(avai_idxs.pop(0)) + + if len(avai_idxs) < self.num_instances: avai_pids.remove(pid) + + assert len(batch_indices) == self.batch_size, f"batch indices have wrong " \ + f"length with {len(batch_indices)}!" + yield from batch_indices + batch_indices = [] diff --git a/thirdparty/fast-reid/fastreid/data/transforms/__init__.py b/thirdparty/fast-reid/fastreid/data/transforms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a4bea9f3e2eca817359fc97141fe81936cb24108 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/transforms/__init__.py @@ -0,0 +1,10 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + + +from .build import build_transforms +from .transforms import * +from .autoaugment import * diff --git a/thirdparty/fast-reid/fastreid/data/transforms/autoaugment.py b/thirdparty/fast-reid/fastreid/data/transforms/autoaugment.py new file mode 100644 index 0000000000000000000000000000000000000000..63c354b70dc0ba1df0b4b169365dc3ea20d7f5a5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/transforms/autoaugment.py @@ -0,0 +1,812 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +""" AutoAugment, RandAugment, and AugMix for PyTorch +This code implements the searched ImageNet policies with various tweaks and improvements and +does not include any of the search code. +AA and RA Implementation adapted from: + https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py +AugMix adapted from: + https://github.com/google-research/augmix +Papers: + AutoAugment: Learning Augmentation Policies from Data - https://arxiv.org/abs/1805.09501 + Learning Data Augmentation Strategies for Object Detection - https://arxiv.org/abs/1906.11172 + RandAugment: Practical automated data augmentation... - https://arxiv.org/abs/1909.13719 + AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty - https://arxiv.org/abs/1912.02781 +Hacked together by Ross Wightman +""" +import math +import random +import re + +import PIL +import numpy as np +from PIL import Image, ImageOps, ImageEnhance + +_PIL_VER = tuple([int(x) for x in PIL.__version__.split('.')[:2]]) + +_FILL = (128, 128, 128) + +# This signifies the max integer that the controller RNN could predict for the +# augmentation scheme. +_MAX_LEVEL = 10. + +_HPARAMS_DEFAULT = dict( + translate_const=57, + img_mean=_FILL, +) + +_RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC) + + +def _interpolation(kwargs): + interpolation = kwargs.pop('resample', Image.BILINEAR) + if isinstance(interpolation, (list, tuple)): + return random.choice(interpolation) + else: + return interpolation + + +def _check_args_tf(kwargs): + if 'fillcolor' in kwargs and _PIL_VER < (5, 0): + kwargs.pop('fillcolor') + kwargs['resample'] = _interpolation(kwargs) + + +def shear_x(img, factor, **kwargs): + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, factor, 0, 0, 1, 0), **kwargs) + + +def shear_y(img, factor, **kwargs): + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, 0, 0, factor, 1, 0), **kwargs) + + +def translate_x_rel(img, pct, **kwargs): + pixels = pct * img.size[0] + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs) + + +def translate_y_rel(img, pct, **kwargs): + pixels = pct * img.size[1] + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs) + + +def translate_x_abs(img, pixels, **kwargs): + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs) + + +def translate_y_abs(img, pixels, **kwargs): + _check_args_tf(kwargs) + return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs) + + +def rotate(img, degrees, **kwargs): + _check_args_tf(kwargs) + if _PIL_VER >= (5, 2): + return img.rotate(degrees, **kwargs) + elif _PIL_VER >= (5, 0): + w, h = img.size + post_trans = (0, 0) + rotn_center = (w / 2.0, h / 2.0) + angle = -math.radians(degrees) + matrix = [ + round(math.cos(angle), 15), + round(math.sin(angle), 15), + 0.0, + round(-math.sin(angle), 15), + round(math.cos(angle), 15), + 0.0, + ] + + def transform(x, y, matrix): + (a, b, c, d, e, f) = matrix + return a * x + b * y + c, d * x + e * y + f + + matrix[2], matrix[5] = transform( + -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix + ) + matrix[2] += rotn_center[0] + matrix[5] += rotn_center[1] + return img.transform(img.size, Image.AFFINE, matrix, **kwargs) + else: + return img.rotate(degrees, resample=kwargs['resample']) + + +def auto_contrast(img, **__): + return ImageOps.autocontrast(img) + + +def invert(img, **__): + return ImageOps.invert(img) + + +def equalize(img, **__): + return ImageOps.equalize(img) + + +def solarize(img, thresh, **__): + return ImageOps.solarize(img, thresh) + + +def solarize_add(img, add, thresh=128, **__): + lut = [] + for i in range(256): + if i < thresh: + lut.append(min(255, i + add)) + else: + lut.append(i) + if img.mode in ("L", "RGB"): + if img.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return img.point(lut) + else: + return img + + +def posterize(img, bits_to_keep, **__): + if bits_to_keep >= 8: + return img + return ImageOps.posterize(img, bits_to_keep) + + +def contrast(img, factor, **__): + return ImageEnhance.Contrast(img).enhance(factor) + + +def color(img, factor, **__): + return ImageEnhance.Color(img).enhance(factor) + + +def brightness(img, factor, **__): + return ImageEnhance.Brightness(img).enhance(factor) + + +def sharpness(img, factor, **__): + return ImageEnhance.Sharpness(img).enhance(factor) + + +def _randomly_negate(v): + """With 50% prob, negate the value""" + return -v if random.random() > 0.5 else v + + +def _rotate_level_to_arg(level, _hparams): + # range [-30, 30] + level = (level / _MAX_LEVEL) * 30. + level = _randomly_negate(level) + return level, + + +def _enhance_level_to_arg(level, _hparams): + # range [0.1, 1.9] + return (level / _MAX_LEVEL) * 1.8 + 0.1, + + +def _enhance_increasing_level_to_arg(level, _hparams): + # the 'no change' level is 1.0, moving away from that towards 0. or 2.0 increases the enhancement blend + # range [0.1, 1.9] + level = (level / _MAX_LEVEL) * .9 + level = 1.0 + _randomly_negate(level) + return level, + + +def _shear_level_to_arg(level, _hparams): + # range [-0.3, 0.3] + level = (level / _MAX_LEVEL) * 0.3 + level = _randomly_negate(level) + return level, + + +def _translate_abs_level_to_arg(level, hparams): + translate_const = hparams['translate_const'] + level = (level / _MAX_LEVEL) * float(translate_const) + level = _randomly_negate(level) + return level, + + +def _translate_rel_level_to_arg(level, hparams): + # default range [-0.45, 0.45] + translate_pct = hparams.get('translate_pct', 0.45) + level = (level / _MAX_LEVEL) * translate_pct + level = _randomly_negate(level) + return level, + + +def _posterize_level_to_arg(level, _hparams): + # As per Tensorflow TPU EfficientNet impl + # range [0, 4], 'keep 0 up to 4 MSB of original image' + # intensity/severity of augmentation decreases with level + return int((level / _MAX_LEVEL) * 4), + + +def _posterize_increasing_level_to_arg(level, hparams): + # As per Tensorflow models research and UDA impl + # range [4, 0], 'keep 4 down to 0 MSB of original image', + # intensity/severity of augmentation increases with level + return 4 - _posterize_level_to_arg(level, hparams)[0], + + +def _posterize_original_level_to_arg(level, _hparams): + # As per original AutoAugment paper description + # range [4, 8], 'keep 4 up to 8 MSB of image' + # intensity/severity of augmentation decreases with level + return int((level / _MAX_LEVEL) * 4) + 4, + + +def _solarize_level_to_arg(level, _hparams): + # range [0, 256] + # intensity/severity of augmentation decreases with level + return int((level / _MAX_LEVEL) * 256), + + +def _solarize_increasing_level_to_arg(level, _hparams): + # range [0, 256] + # intensity/severity of augmentation increases with level + return 256 - _solarize_level_to_arg(level, _hparams)[0], + + +def _solarize_add_level_to_arg(level, _hparams): + # range [0, 110] + return int((level / _MAX_LEVEL) * 110), + + +LEVEL_TO_ARG = { + 'AutoContrast': None, + 'Equalize': None, + 'Invert': None, + 'Rotate': _rotate_level_to_arg, + # There are several variations of the posterize level scaling in various Tensorflow/Google repositories/papers + 'Posterize': _posterize_level_to_arg, + 'PosterizeIncreasing': _posterize_increasing_level_to_arg, + 'PosterizeOriginal': _posterize_original_level_to_arg, + 'Solarize': _solarize_level_to_arg, + 'SolarizeIncreasing': _solarize_increasing_level_to_arg, + 'SolarizeAdd': _solarize_add_level_to_arg, + 'Color': _enhance_level_to_arg, + 'ColorIncreasing': _enhance_increasing_level_to_arg, + 'Contrast': _enhance_level_to_arg, + 'ContrastIncreasing': _enhance_increasing_level_to_arg, + 'Brightness': _enhance_level_to_arg, + 'BrightnessIncreasing': _enhance_increasing_level_to_arg, + 'Sharpness': _enhance_level_to_arg, + 'SharpnessIncreasing': _enhance_increasing_level_to_arg, + 'ShearX': _shear_level_to_arg, + 'ShearY': _shear_level_to_arg, + 'TranslateX': _translate_abs_level_to_arg, + 'TranslateY': _translate_abs_level_to_arg, + 'TranslateXRel': _translate_rel_level_to_arg, + 'TranslateYRel': _translate_rel_level_to_arg, +} + +NAME_TO_OP = { + 'AutoContrast': auto_contrast, + 'Equalize': equalize, + 'Invert': invert, + 'Rotate': rotate, + 'Posterize': posterize, + 'PosterizeIncreasing': posterize, + 'PosterizeOriginal': posterize, + 'Solarize': solarize, + 'SolarizeIncreasing': solarize, + 'SolarizeAdd': solarize_add, + 'Color': color, + 'ColorIncreasing': color, + 'Contrast': contrast, + 'ContrastIncreasing': contrast, + 'Brightness': brightness, + 'BrightnessIncreasing': brightness, + 'Sharpness': sharpness, + 'SharpnessIncreasing': sharpness, + 'ShearX': shear_x, + 'ShearY': shear_y, + 'TranslateX': translate_x_abs, + 'TranslateY': translate_y_abs, + 'TranslateXRel': translate_x_rel, + 'TranslateYRel': translate_y_rel, +} + + +class AugmentOp: + + def __init__(self, name, prob=0.5, magnitude=10, hparams=None): + hparams = hparams or _HPARAMS_DEFAULT + self.aug_fn = NAME_TO_OP[name] + self.level_fn = LEVEL_TO_ARG[name] + self.prob = prob + self.magnitude = magnitude + self.hparams = hparams.copy() + self.kwargs = dict( + fillcolor=hparams['img_mean'] if 'img_mean' in hparams else _FILL, + resample=hparams['interpolation'] if 'interpolation' in hparams else _RANDOM_INTERPOLATION, + ) + + # If magnitude_std is > 0, we introduce some randomness + # in the usually fixed policy and sample magnitude from a normal distribution + # with mean `magnitude` and std-dev of `magnitude_std`. + # NOTE This is my own hack, being tested, not in papers or reference impls. + self.magnitude_std = self.hparams.get('magnitude_std', 0) + + def __call__(self, img): + if self.prob < 1.0 and random.random() > self.prob: + return img + magnitude = self.magnitude + if self.magnitude_std and self.magnitude_std > 0: + magnitude = random.gauss(magnitude, self.magnitude_std) + magnitude = min(_MAX_LEVEL, max(0, magnitude)) # clip to valid range + level_args = self.level_fn(magnitude, self.hparams) if self.level_fn is not None else tuple() + return self.aug_fn(img, *level_args, **self.kwargs) + + +def auto_augment_policy_v0(hparams): + # ImageNet v0 policy from TPU EfficientNet impl, cannot find a paper reference. + policy = [ + [('Equalize', 0.8, 1), ('ShearY', 0.8, 4)], + [('Color', 0.4, 9), ('Equalize', 0.6, 3)], + [('Color', 0.4, 1), ('Rotate', 0.6, 8)], + [('Solarize', 0.8, 3), ('Equalize', 0.4, 7)], + [('Solarize', 0.4, 2), ('Solarize', 0.6, 2)], + [('Color', 0.2, 0), ('Equalize', 0.8, 8)], + [('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)], + [('ShearX', 0.2, 9), ('Rotate', 0.6, 8)], + [('Color', 0.6, 1), ('Equalize', 1.0, 2)], + [('Invert', 0.4, 9), ('Rotate', 0.6, 0)], + [('Equalize', 1.0, 9), ('ShearY', 0.6, 3)], + [('Color', 0.4, 7), ('Equalize', 0.6, 0)], + [('Posterize', 0.4, 6), ('AutoContrast', 0.4, 7)], + [('Solarize', 0.6, 8), ('Color', 0.6, 9)], + [('Solarize', 0.2, 4), ('Rotate', 0.8, 9)], + [('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)], + [('ShearX', 0.0, 0), ('Solarize', 0.8, 4)], + [('ShearY', 0.8, 0), ('Color', 0.6, 4)], + [('Color', 1.0, 0), ('Rotate', 0.6, 2)], + [('Equalize', 0.8, 4), ('Equalize', 0.0, 8)], + [('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)], + [('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)], + [('Posterize', 0.8, 2), ('Solarize', 0.6, 10)], # This results in black image with Tpu posterize + [('Solarize', 0.6, 8), ('Equalize', 0.6, 1)], + [('Color', 0.8, 6), ('Rotate', 0.4, 5)], + ] + pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy] + return pc + + +def auto_augment_policy_v0r(hparams): + # ImageNet v0 policy from TPU EfficientNet impl, with variation of Posterize used + # in Google research implementation (number of bits discarded increases with magnitude) + policy = [ + [('Equalize', 0.8, 1), ('ShearY', 0.8, 4)], + [('Color', 0.4, 9), ('Equalize', 0.6, 3)], + [('Color', 0.4, 1), ('Rotate', 0.6, 8)], + [('Solarize', 0.8, 3), ('Equalize', 0.4, 7)], + [('Solarize', 0.4, 2), ('Solarize', 0.6, 2)], + [('Color', 0.2, 0), ('Equalize', 0.8, 8)], + [('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)], + [('ShearX', 0.2, 9), ('Rotate', 0.6, 8)], + [('Color', 0.6, 1), ('Equalize', 1.0, 2)], + [('Invert', 0.4, 9), ('Rotate', 0.6, 0)], + [('Equalize', 1.0, 9), ('ShearY', 0.6, 3)], + [('Color', 0.4, 7), ('Equalize', 0.6, 0)], + [('PosterizeIncreasing', 0.4, 6), ('AutoContrast', 0.4, 7)], + [('Solarize', 0.6, 8), ('Color', 0.6, 9)], + [('Solarize', 0.2, 4), ('Rotate', 0.8, 9)], + [('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)], + [('ShearX', 0.0, 0), ('Solarize', 0.8, 4)], + [('ShearY', 0.8, 0), ('Color', 0.6, 4)], + [('Color', 1.0, 0), ('Rotate', 0.6, 2)], + [('Equalize', 0.8, 4), ('Equalize', 0.0, 8)], + [('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)], + [('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)], + [('PosterizeIncreasing', 0.8, 2), ('Solarize', 0.6, 10)], + [('Solarize', 0.6, 8), ('Equalize', 0.6, 1)], + [('Color', 0.8, 6), ('Rotate', 0.4, 5)], + ] + pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy] + return pc + + +def auto_augment_policy_original(hparams): + # ImageNet policy from https://arxiv.org/abs/1805.09501 + policy = [ + [('PosterizeOriginal', 0.4, 8), ('Rotate', 0.6, 9)], + [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)], + [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)], + [('PosterizeOriginal', 0.6, 7), ('PosterizeOriginal', 0.6, 6)], + [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)], + [('Equalize', 0.4, 4), ('Rotate', 0.8, 8)], + [('Solarize', 0.6, 3), ('Equalize', 0.6, 7)], + [('PosterizeOriginal', 0.8, 5), ('Equalize', 1.0, 2)], + [('Rotate', 0.2, 3), ('Solarize', 0.6, 8)], + [('Equalize', 0.6, 8), ('PosterizeOriginal', 0.4, 6)], + [('Rotate', 0.8, 8), ('Color', 0.4, 0)], + [('Rotate', 0.4, 9), ('Equalize', 0.6, 2)], + [('Equalize', 0.0, 7), ('Equalize', 0.8, 8)], + [('Invert', 0.6, 4), ('Equalize', 1.0, 8)], + [('Color', 0.6, 4), ('Contrast', 1.0, 8)], + [('Rotate', 0.8, 8), ('Color', 1.0, 2)], + [('Color', 0.8, 8), ('Solarize', 0.8, 7)], + [('Sharpness', 0.4, 7), ('Invert', 0.6, 8)], + [('ShearX', 0.6, 5), ('Equalize', 1.0, 9)], + [('Color', 0.4, 0), ('Equalize', 0.6, 3)], + [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)], + [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)], + [('Invert', 0.6, 4), ('Equalize', 1.0, 8)], + [('Color', 0.6, 4), ('Contrast', 1.0, 8)], + [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)], + ] + pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy] + return pc + + +def auto_augment_policy_originalr(hparams): + # ImageNet policy from https://arxiv.org/abs/1805.09501 with research posterize variation + policy = [ + [('PosterizeIncreasing', 0.4, 8), ('Rotate', 0.6, 9)], + [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)], + [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)], + [('PosterizeIncreasing', 0.6, 7), ('PosterizeIncreasing', 0.6, 6)], + [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)], + [('Equalize', 0.4, 4), ('Rotate', 0.8, 8)], + [('Solarize', 0.6, 3), ('Equalize', 0.6, 7)], + [('PosterizeIncreasing', 0.8, 5), ('Equalize', 1.0, 2)], + [('Rotate', 0.2, 3), ('Solarize', 0.6, 8)], + [('Equalize', 0.6, 8), ('PosterizeIncreasing', 0.4, 6)], + [('Rotate', 0.8, 8), ('Color', 0.4, 0)], + [('Rotate', 0.4, 9), ('Equalize', 0.6, 2)], + [('Equalize', 0.0, 7), ('Equalize', 0.8, 8)], + [('Invert', 0.6, 4), ('Equalize', 1.0, 8)], + [('Color', 0.6, 4), ('Contrast', 1.0, 8)], + [('Rotate', 0.8, 8), ('Color', 1.0, 2)], + [('Color', 0.8, 8), ('Solarize', 0.8, 7)], + [('Sharpness', 0.4, 7), ('Invert', 0.6, 8)], + [('ShearX', 0.6, 5), ('Equalize', 1.0, 9)], + [('Color', 0.4, 0), ('Equalize', 0.6, 3)], + [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)], + [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)], + [('Invert', 0.6, 4), ('Equalize', 1.0, 8)], + [('Color', 0.6, 4), ('Contrast', 1.0, 8)], + [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)], + ] + pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy] + return pc + + +def auto_augment_policy(name="original"): + hparams = _HPARAMS_DEFAULT + if name == 'original': + return auto_augment_policy_original(hparams) + elif name == 'originalr': + return auto_augment_policy_originalr(hparams) + elif name == 'v0': + return auto_augment_policy_v0(hparams) + elif name == 'v0r': + return auto_augment_policy_v0r(hparams) + else: + assert False, 'Unknown AA policy (%s)' % name + + +class AutoAugment: + + def __init__(self, total_iter): + self.total_iter = total_iter + self.gamma = 0 + self.policy = auto_augment_policy() + + def __call__(self, img): + if random.uniform(0, 1) > self.gamma: + sub_policy = random.choice(self.policy) + self.gamma = min(1.0, self.gamma + 1.0 / self.total_iter) + for op in sub_policy: + img = op(img) + return img + else: + return img + + +def auto_augment_transform(config_str, hparams): + """ + Create a AutoAugment transform + :param config_str: String defining configuration of auto augmentation. Consists of multiple sections separated by + dashes ('-'). The first section defines the AutoAugment policy (one of 'v0', 'v0r', 'original', 'originalr'). + The remaining sections, not order sepecific determine + 'mstd' - float std deviation of magnitude noise applied + Ex 'original-mstd0.5' results in AutoAugment with original policy, magnitude_std 0.5 + :param hparams: Other hparams (kwargs) for the AutoAugmentation scheme + :return: A PyTorch compatible Transform + """ + config = config_str.split('-') + policy_name = config[0] + config = config[1:] + for c in config: + cs = re.split(r'(\d.*)', c) + if len(cs) < 2: + continue + key, val = cs[:2] + if key == 'mstd': + # noise param injected via hparams for now + hparams.setdefault('magnitude_std', float(val)) + else: + assert False, 'Unknown AutoAugment config section' + aa_policy = auto_augment_policy(policy_name) + return AutoAugment(aa_policy) + + +_RAND_TRANSFORMS = [ + 'AutoContrast', + 'Equalize', + 'Invert', + 'Rotate', + 'Posterize', + 'Solarize', + 'SolarizeAdd', + 'Color', + 'Contrast', + 'Brightness', + 'Sharpness', + 'ShearX', + 'ShearY', + 'TranslateXRel', + 'TranslateYRel', + # 'Cutout' # NOTE I've implement this as random erasing separately +] + +_RAND_INCREASING_TRANSFORMS = [ + 'AutoContrast', + 'Equalize', + 'Invert', + 'Rotate', + 'PosterizeIncreasing', + 'SolarizeIncreasing', + 'SolarizeAdd', + 'ColorIncreasing', + 'ContrastIncreasing', + 'BrightnessIncreasing', + 'SharpnessIncreasing', + 'ShearX', + 'ShearY', + 'TranslateXRel', + 'TranslateYRel', + # 'Cutout' # NOTE I've implement this as random erasing separately +] + +# These experimental weights are based loosely on the relative improvements mentioned in paper. +# They may not result in increased performance, but could likely be tuned to so. +_RAND_CHOICE_WEIGHTS_0 = { + 'Rotate': 0.3, + 'ShearX': 0.2, + 'ShearY': 0.2, + 'TranslateXRel': 0.1, + 'TranslateYRel': 0.1, + 'Color': .025, + 'Sharpness': 0.025, + 'AutoContrast': 0.025, + 'Solarize': .005, + 'SolarizeAdd': .005, + 'Contrast': .005, + 'Brightness': .005, + 'Equalize': .005, + 'Posterize': 0, + 'Invert': 0, +} + + +def _select_rand_weights(weight_idx=0, transforms=None): + transforms = transforms or _RAND_TRANSFORMS + assert weight_idx == 0 # only one set of weights currently + rand_weights = _RAND_CHOICE_WEIGHTS_0 + probs = [rand_weights[k] for k in transforms] + probs /= np.sum(probs) + return probs + + +def rand_augment_ops(magnitude=10, hparams=None, transforms=None): + hparams = hparams or _HPARAMS_DEFAULT + transforms = transforms or _RAND_TRANSFORMS + return [AugmentOp( + name, prob=0.5, magnitude=magnitude, hparams=hparams) for name in transforms] + + +class RandAugment: + def __init__(self, ops, num_layers=2, choice_weights=None): + self.ops = ops + self.num_layers = num_layers + self.choice_weights = choice_weights + + def __call__(self, img): + # no replacement when using weighted choice + ops = np.random.choice( + self.ops, self.num_layers, replace=self.choice_weights is None, p=self.choice_weights) + for op in ops: + img = op(img) + return img + + +def rand_augment_transform(config_str, hparams): + """ + Create a RandAugment transform + :param config_str: String defining configuration of random augmentation. Consists of multiple sections separated by + dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand'). The remaining + sections, not order sepecific determine + 'm' - integer magnitude of rand augment + 'n' - integer num layers (number of transform ops selected per image) + 'w' - integer probabiliy weight index (index of a set of weights to influence choice of op) + 'mstd' - float std deviation of magnitude noise applied + 'inc' - integer (bool), use augmentations that increase in severity with magnitude (default: 0) + Ex 'rand-m9-n3-mstd0.5' results in RandAugment with magnitude 9, num_layers 3, magnitude_std 0.5 + 'rand-mstd1-w0' results in magnitude_std 1.0, weights 0, default magnitude of 10 and num_layers 2 + :param hparams: Other hparams (kwargs) for the RandAugmentation scheme + :return: A PyTorch compatible Transform + """ + magnitude = _MAX_LEVEL # default to _MAX_LEVEL for magnitude (currently 10) + num_layers = 2 # default to 2 ops per image + weight_idx = None # default to no probability weights for op choice + transforms = _RAND_TRANSFORMS + config = config_str.split('-') + assert config[0] == 'rand' + config = config[1:] + for c in config: + cs = re.split(r'(\d.*)', c) + if len(cs) < 2: + continue + key, val = cs[:2] + if key == 'mstd': + # noise param injected via hparams for now + hparams.setdefault('magnitude_std', float(val)) + elif key == 'inc': + if bool(val): + transforms = _RAND_INCREASING_TRANSFORMS + elif key == 'm': + magnitude = int(val) + elif key == 'n': + num_layers = int(val) + elif key == 'w': + weight_idx = int(val) + else: + assert False, 'Unknown RandAugment config section' + ra_ops = rand_augment_ops(magnitude=magnitude, hparams=hparams, transforms=transforms) + choice_weights = None if weight_idx is None else _select_rand_weights(weight_idx) + return RandAugment(ra_ops, num_layers, choice_weights=choice_weights) + + +_AUGMIX_TRANSFORMS = [ + 'AutoContrast', + 'ColorIncreasing', # not in paper + 'ContrastIncreasing', # not in paper + 'BrightnessIncreasing', # not in paper + 'SharpnessIncreasing', # not in paper + 'Equalize', + 'Rotate', + 'PosterizeIncreasing', + 'SolarizeIncreasing', + 'ShearX', + 'ShearY', + 'TranslateXRel', + 'TranslateYRel', +] + + +def augmix_ops(magnitude=10, hparams=None, transforms=None): + hparams = hparams or _HPARAMS_DEFAULT + transforms = transforms or _AUGMIX_TRANSFORMS + return [AugmentOp( + name, prob=1.0, magnitude=magnitude, hparams=hparams) for name in transforms] + + +class AugMixAugment: + """ AugMix Transform + Adapted and improved from impl here: https://github.com/google-research/augmix/blob/master/imagenet.py + From paper: 'AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty - + https://arxiv.org/abs/1912.02781 + """ + + def __init__(self, ops, alpha=1., width=3, depth=-1, blended=False): + self.ops = ops + self.alpha = alpha + self.width = width + self.depth = depth + self.blended = blended # blended mode is faster but not well tested + + def _calc_blended_weights(self, ws, m): + ws = ws * m + cump = 1. + rws = [] + for w in ws[::-1]: + alpha = w / cump + cump *= (1 - alpha) + rws.append(alpha) + return np.array(rws[::-1], dtype=np.float32) + + def _apply_blended(self, img, mixing_weights, m): + # This is my first crack and implementing a slightly faster mixed augmentation. Instead + # of accumulating the mix for each chain in a Numpy array and then blending with original, + # it recomputes the blending coefficients and applies one PIL image blend per chain. + # TODO the results appear in the right ballpark but they differ by more than rounding. + img_orig = img.copy() + ws = self._calc_blended_weights(mixing_weights, m) + for w in ws: + depth = self.depth if self.depth > 0 else np.random.randint(1, 4) + ops = np.random.choice(self.ops, depth, replace=True) + img_aug = img_orig # no ops are in-place, deep copy not necessary + for op in ops: + img_aug = op(img_aug) + img = Image.blend(img, img_aug, w) + return img + + def _apply_basic(self, img, mixing_weights, m): + # This is a literal adaptation of the paper/official implementation without normalizations and + # PIL <-> Numpy conversions between every op. It is still quite CPU compute heavy compared to the + # typical augmentation transforms, could use a GPU / Kornia implementation. + img_shape = img.size[0], img.size[1], len(img.getbands()) + mixed = np.zeros(img_shape, dtype=np.float32) + for mw in mixing_weights: + depth = self.depth if self.depth > 0 else np.random.randint(1, 4) + ops = np.random.choice(self.ops, depth, replace=True) + img_aug = img # no ops are in-place, deep copy not necessary + for op in ops: + img_aug = op(img_aug) + mixed += mw * np.asarray(img_aug, dtype=np.float32) + np.clip(mixed, 0, 255., out=mixed) + mixed = Image.fromarray(mixed.astype(np.uint8)) + return Image.blend(img, mixed, m) + + def __call__(self, img): + mixing_weights = np.float32(np.random.dirichlet([self.alpha] * self.width)) + m = np.float32(np.random.beta(self.alpha, self.alpha)) + if self.blended: + mixed = self._apply_blended(img, mixing_weights, m) + else: + mixed = self._apply_basic(img, mixing_weights, m) + return mixed + + +def augment_and_mix_transform(config_str, hparams): + """ Create AugMix PyTorch transform + :param config_str: String defining configuration of random augmentation. Consists of multiple sections separated by + dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand'). The remaining + sections, not order sepecific determine + 'm' - integer magnitude (severity) of augmentation mix (default: 3) + 'w' - integer width of augmentation chain (default: 3) + 'd' - integer depth of augmentation chain (-1 is random [1, 3], default: -1) + 'b' - integer (bool), blend each branch of chain into end result without a final blend, less CPU (default: 0) + 'mstd' - float std deviation of magnitude noise applied (default: 0) + Ex 'augmix-m5-w4-d2' results in AugMix with severity 5, chain width 4, chain depth 2 + :param hparams: Other hparams (kwargs) for the Augmentation transforms + :return: A PyTorch compatible Transform + """ + magnitude = 3 + width = 3 + depth = -1 + alpha = 1. + blended = False + config = config_str.split('-') + assert config[0] == 'augmix' + config = config[1:] + for c in config: + cs = re.split(r'(\d.*)', c) + if len(cs) < 2: + continue + key, val = cs[:2] + if key == 'mstd': + # noise param injected via hparams for now + hparams.setdefault('magnitude_std', float(val)) + elif key == 'm': + magnitude = int(val) + elif key == 'w': + width = int(val) + elif key == 'd': + depth = int(val) + elif key == 'a': + alpha = float(val) + elif key == 'b': + blended = bool(val) + else: + assert False, 'Unknown AugMix config section' + ops = augmix_ops(magnitude=magnitude, hparams=hparams) + return AugMixAugment(ops, alpha=alpha, width=width, depth=depth, blended=blended) diff --git a/thirdparty/fast-reid/fastreid/data/transforms/build.py b/thirdparty/fast-reid/fastreid/data/transforms/build.py new file mode 100644 index 0000000000000000000000000000000000000000..52b87825788131efef8347e991c9e3b4d39b48d7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/transforms/build.py @@ -0,0 +1,71 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torchvision.transforms as T + +from .transforms import * +from .autoaugment import AutoAugment + + +def build_transforms(cfg, is_train=True): + res = [] + + if is_train: + size_train = cfg.INPUT.SIZE_TRAIN + + # augmix augmentation + do_augmix = cfg.INPUT.DO_AUGMIX + + # auto augmentation + do_autoaug = cfg.INPUT.DO_AUTOAUG + total_iter = cfg.SOLVER.MAX_ITER + + # horizontal filp + do_flip = cfg.INPUT.DO_FLIP + flip_prob = cfg.INPUT.FLIP_PROB + + # padding + do_pad = cfg.INPUT.DO_PAD + padding = cfg.INPUT.PADDING + padding_mode = cfg.INPUT.PADDING_MODE + + # color jitter + do_cj = cfg.INPUT.CJ.ENABLED + cj_prob = cfg.INPUT.CJ.PROB + cj_brightness = cfg.INPUT.CJ.BRIGHTNESS + cj_contrast = cfg.INPUT.CJ.CONTRAST + cj_saturation = cfg.INPUT.CJ.SATURATION + cj_hue = cfg.INPUT.CJ.HUE + + # random erasing + do_rea = cfg.INPUT.REA.ENABLED + rea_prob = cfg.INPUT.REA.PROB + rea_mean = cfg.INPUT.REA.MEAN + # random patch + do_rpt = cfg.INPUT.RPT.ENABLED + rpt_prob = cfg.INPUT.RPT.PROB + + if do_autoaug: + res.append(AutoAugment(total_iter)) + res.append(T.Resize(size_train, interpolation=3)) + if do_flip: + res.append(T.RandomHorizontalFlip(p=flip_prob)) + if do_pad: + res.extend([T.Pad(padding, padding_mode=padding_mode), + T.RandomCrop(size_train)]) + if do_cj: + res.append(T.RandomApply([T.ColorJitter(cj_brightness, cj_contrast, cj_saturation, cj_hue)], p=cj_prob)) + if do_augmix: + res.append(AugMix()) + if do_rea: + res.append(RandomErasing(probability=rea_prob, mean=rea_mean)) + if do_rpt: + res.append(RandomPatch(prob_happen=rpt_prob)) + else: + size_test = cfg.INPUT.SIZE_TEST + res.append(T.Resize(size_test, interpolation=3)) + res.append(ToTensor()) + return T.Compose(res) diff --git a/thirdparty/fast-reid/fastreid/data/transforms/functional.py b/thirdparty/fast-reid/fastreid/data/transforms/functional.py new file mode 100644 index 0000000000000000000000000000000000000000..6e96c114abd9768ec07756edbb73248e7a644ddb --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/transforms/functional.py @@ -0,0 +1,190 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import numpy as np +import torch +from PIL import Image, ImageOps, ImageEnhance + + +def to_tensor(pic): + """Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor. + + See ``ToTensor`` for more details. + + Args: + pic (PIL Image or numpy.ndarray): Image to be converted to tensor. + + Returns: + Tensor: Converted image. + """ + if isinstance(pic, np.ndarray): + assert len(pic.shape) in (2, 3) + # handle numpy array + if pic.ndim == 2: + pic = pic[:, :, None] + + img = torch.from_numpy(pic.transpose((2, 0, 1))) + # backward compatibility + if isinstance(img, torch.ByteTensor): + return img.float() + else: + return img + + # handle PIL Image + if pic.mode == 'I': + img = torch.from_numpy(np.array(pic, np.int32, copy=False)) + elif pic.mode == 'I;16': + img = torch.from_numpy(np.array(pic, np.int16, copy=False)) + elif pic.mode == 'F': + img = torch.from_numpy(np.array(pic, np.float32, copy=False)) + elif pic.mode == '1': + img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False)) + else: + img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes())) + # PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK + if pic.mode == 'YCbCr': + nchannel = 3 + elif pic.mode == 'I;16': + nchannel = 1 + else: + nchannel = len(pic.mode) + img = img.view(pic.size[1], pic.size[0], nchannel) + # put it from HWC to CHW format + # yikes, this transpose takes 80% of the loading time/CPU + img = img.transpose(0, 1).transpose(0, 2).contiguous() + if isinstance(img, torch.ByteTensor): + return img.float() + else: + return img + + +def int_parameter(level, maxval): + """Helper function to scale `val` between 0 and maxval . + Args: + level: Level of the operation that will be between [0, `PARAMETER_MAX`]. + maxval: Maximum value that the operation can have. This will be scaled to + level/PARAMETER_MAX. + Returns: + An int that results from scaling `maxval` according to `level`. + """ + return int(level * maxval / 10) + + +def float_parameter(level, maxval): + """Helper function to scale `val` between 0 and maxval. + Args: + level: Level of the operation that will be between [0, `PARAMETER_MAX`]. + maxval: Maximum value that the operation can have. This will be scaled to + level/PARAMETER_MAX. + Returns: + A float that results from scaling `maxval` according to `level`. + """ + return float(level) * maxval / 10. + + +def sample_level(n): + return np.random.uniform(low=0.1, high=n) + + +def autocontrast(pil_img, *args): + return ImageOps.autocontrast(pil_img) + + +def equalize(pil_img, *args): + return ImageOps.equalize(pil_img) + + +def posterize(pil_img, level, *args): + level = int_parameter(sample_level(level), 4) + return ImageOps.posterize(pil_img, 4 - level) + + +def rotate(pil_img, level, *args): + degrees = int_parameter(sample_level(level), 30) + if np.random.uniform() > 0.5: + degrees = -degrees + return pil_img.rotate(degrees, resample=Image.BILINEAR) + + +def solarize(pil_img, level, *args): + level = int_parameter(sample_level(level), 256) + return ImageOps.solarize(pil_img, 256 - level) + + +def shear_x(pil_img, level, image_size): + level = float_parameter(sample_level(level), 0.3) + if np.random.uniform() > 0.5: + level = -level + return pil_img.transform(image_size, + Image.AFFINE, (1, level, 0, 0, 1, 0), + resample=Image.BILINEAR) + + +def shear_y(pil_img, level, image_size): + level = float_parameter(sample_level(level), 0.3) + if np.random.uniform() > 0.5: + level = -level + return pil_img.transform(image_size, + Image.AFFINE, (1, 0, 0, level, 1, 0), + resample=Image.BILINEAR) + + +def translate_x(pil_img, level, image_size): + level = int_parameter(sample_level(level), image_size[0] / 3) + if np.random.random() > 0.5: + level = -level + return pil_img.transform(image_size, + Image.AFFINE, (1, 0, level, 0, 1, 0), + resample=Image.BILINEAR) + + +def translate_y(pil_img, level, image_size): + level = int_parameter(sample_level(level), image_size[1] / 3) + if np.random.random() > 0.5: + level = -level + return pil_img.transform(image_size, + Image.AFFINE, (1, 0, 0, 0, 1, level), + resample=Image.BILINEAR) + + +# operation that overlaps with ImageNet-C's test set +def color(pil_img, level, *args): + level = float_parameter(sample_level(level), 1.8) + 0.1 + return ImageEnhance.Color(pil_img).enhance(level) + + +# operation that overlaps with ImageNet-C's test set +def contrast(pil_img, level, *args): + level = float_parameter(sample_level(level), 1.8) + 0.1 + return ImageEnhance.Contrast(pil_img).enhance(level) + + +# operation that overlaps with ImageNet-C's test set +def brightness(pil_img, level, *args): + level = float_parameter(sample_level(level), 1.8) + 0.1 + return ImageEnhance.Brightness(pil_img).enhance(level) + + +# operation that overlaps with ImageNet-C's test set +def sharpness(pil_img, level, *args): + level = float_parameter(sample_level(level), 1.8) + 0.1 + return ImageEnhance.Sharpness(pil_img).enhance(level) + + +augmentations_reid = [ + autocontrast, equalize, posterize, shear_x, shear_y, + color, contrast, brightness, sharpness +] + +augmentations = [ + autocontrast, equalize, posterize, rotate, solarize, shear_x, shear_y, + translate_x, translate_y +] + +augmentations_all = [ + autocontrast, equalize, posterize, rotate, solarize, shear_x, shear_y, + translate_x, translate_y, color, contrast, brightness, sharpness +] diff --git a/thirdparty/fast-reid/fastreid/data/transforms/transforms.py b/thirdparty/fast-reid/fastreid/data/transforms/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..0de5562892913a6e838d075adf151c931a8f39c7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/data/transforms/transforms.py @@ -0,0 +1,204 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +__all__ = ['ToTensor', 'RandomErasing', 'RandomPatch', 'AugMix',] + +import math +import random +from collections import deque + +import numpy as np +from PIL import Image + +from .functional import to_tensor, augmentations_reid + + +class ToTensor(object): + """Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor. + + Converts a PIL Image or numpy.ndarray (H x W x C) in the range + [0, 255] to a torch.FloatTensor of shape (C x H x W) in the range [0.0, 255.0] + if the PIL Image belongs to one of the modes (L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK, 1) + or if the numpy.ndarray has dtype = np.uint8 + + In the other cases, tensors are returned without scaling. + """ + + def __call__(self, pic): + """ + Args: + pic (PIL Image or numpy.ndarray): Image to be converted to tensor. + + Returns: + Tensor: Converted image. + """ + return to_tensor(pic) + + def __repr__(self): + return self.__class__.__name__ + '()' + + +class RandomErasing(object): + """ Randomly selects a rectangle region in an image and erases its pixels. + 'Random Erasing Data Augmentation' by Zhong et al. + See https://arxiv.org/pdf/1708.04896.pdf + Args: + probability: The probability that the Random Erasing operation will be performed. + sl: Minimum proportion of erased area against input image. + sh: Maximum proportion of erased area against input image. + r1: Minimum aspect ratio of erased area. + mean: Erasing value. + """ + + def __init__(self, probability=0.5, sl=0.02, sh=0.4, r1=0.3, mean=255 * (0.49735, 0.4822, 0.4465)): + self.probability = probability + self.mean = mean + self.sl = sl + self.sh = sh + self.r1 = r1 + + def __call__(self, img): + img = np.asarray(img, dtype=np.float32).copy() + if random.uniform(0, 1) > self.probability: + return img + + for attempt in range(100): + area = img.shape[0] * img.shape[1] + target_area = random.uniform(self.sl, self.sh) * area + aspect_ratio = random.uniform(self.r1, 1 / self.r1) + + h = int(round(math.sqrt(target_area * aspect_ratio))) + w = int(round(math.sqrt(target_area / aspect_ratio))) + + if w < img.shape[1] and h < img.shape[0]: + x1 = random.randint(0, img.shape[0] - h) + y1 = random.randint(0, img.shape[1] - w) + if img.shape[2] == 3: + img[x1:x1 + h, y1:y1 + w, 0] = self.mean[0] + img[x1:x1 + h, y1:y1 + w, 1] = self.mean[1] + img[x1:x1 + h, y1:y1 + w, 2] = self.mean[2] + else: + img[x1:x1 + h, y1:y1 + w, 0] = self.mean[0] + return img + return img + + +class RandomPatch(object): + """Random patch data augmentation. + There is a patch pool that stores randomly extracted pathces from person images. + For each input image, RandomPatch + 1) extracts a random patch and stores the patch in the patch pool; + 2) randomly selects a patch from the patch pool and pastes it on the + input (at random position) to simulate occlusion. + Reference: + - Zhou et al. Omni-Scale Feature Learning for Person Re-Identification. ICCV, 2019. + - Zhou et al. Learning Generalisable Omni-Scale Representations + for Person Re-Identification. arXiv preprint, 2019. + """ + + def __init__(self, prob_happen=0.5, pool_capacity=50000, min_sample_size=100, + patch_min_area=0.01, patch_max_area=0.5, patch_min_ratio=0.1, + prob_rotate=0.5, prob_flip_leftright=0.5, + ): + self.prob_happen = prob_happen + + self.patch_min_area = patch_min_area + self.patch_max_area = patch_max_area + self.patch_min_ratio = patch_min_ratio + + self.prob_rotate = prob_rotate + self.prob_flip_leftright = prob_flip_leftright + + self.patchpool = deque(maxlen=pool_capacity) + self.min_sample_size = min_sample_size + + def generate_wh(self, W, H): + area = W * H + for attempt in range(100): + target_area = random.uniform(self.patch_min_area, self.patch_max_area) * area + aspect_ratio = random.uniform(self.patch_min_ratio, 1. / self.patch_min_ratio) + h = int(round(math.sqrt(target_area * aspect_ratio))) + w = int(round(math.sqrt(target_area / aspect_ratio))) + if w < W and h < H: + return w, h + return None, None + + def transform_patch(self, patch): + if random.uniform(0, 1) > self.prob_flip_leftright: + patch = patch.transpose(Image.FLIP_LEFT_RIGHT) + if random.uniform(0, 1) > self.prob_rotate: + patch = patch.rotate(random.randint(-10, 10)) + return patch + + def __call__(self, img): + if isinstance(img, np.ndarray): + img = Image.fromarray(img.astype(np.uint8)) + + W, H = img.size # original image size + + # collect new patch + w, h = self.generate_wh(W, H) + if w is not None and h is not None: + x1 = random.randint(0, W - w) + y1 = random.randint(0, H - h) + new_patch = img.crop((x1, y1, x1 + w, y1 + h)) + self.patchpool.append(new_patch) + + if len(self.patchpool) < self.min_sample_size: + return img + + if random.uniform(0, 1) > self.prob_happen: + return img + + # paste a randomly selected patch on a random position + patch = random.sample(self.patchpool, 1)[0] + patchW, patchH = patch.size + x1 = random.randint(0, W - patchW) + y1 = random.randint(0, H - patchH) + patch = self.transform_patch(patch) + img.paste(patch, (x1, y1)) + + return img + + +class AugMix(object): + """ Perform AugMix augmentation and compute mixture. + Args: + aug_prob_coeff: Probability distribution coefficients. + mixture_width: Number of augmentation chains to mix per augmented example. + mixture_depth: Depth of augmentation chains. -1 denotes stochastic depth in [1, 3]' + severity: Severity of underlying augmentation operators (between 1 to 10). + """ + + def __init__(self, aug_prob_coeff=1, mixture_width=3, mixture_depth=-1, severity=1): + self.aug_prob_coeff = aug_prob_coeff + self.mixture_width = mixture_width + self.mixture_depth = mixture_depth + self.severity = severity + self.aug_list = augmentations_reid + + def __call__(self, image): + """Perform AugMix augmentations and compute mixture. + Returns: + mixed: Augmented and mixed image. + """ + ws = np.float32( + np.random.dirichlet([self.aug_prob_coeff] * self.mixture_width)) + m = np.float32(np.random.beta(self.aug_prob_coeff, self.aug_prob_coeff)) + + image = np.asarray(image, dtype=np.float32).copy() + mix = np.zeros_like(image) + h, w = image.shape[0], image.shape[1] + for i in range(self.mixture_width): + image_aug = Image.fromarray(image.copy().astype(np.uint8)) + depth = self.mixture_depth if self.mixture_depth > 0 else np.random.randint(1, 4) + for _ in range(depth): + op = np.random.choice(self.aug_list) + image_aug = op(image_aug, self.severity, (w, h)) + mix += ws[i] * np.asarray(image_aug, dtype=np.float32) + + mixed = (1 - m) * image + m * mix + return mixed diff --git a/thirdparty/fast-reid/fastreid/engine/__init__.py b/thirdparty/fast-reid/fastreid/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..95e829969731467823ce679eb07774fae71b2f61 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/engine/__init__.py @@ -0,0 +1,15 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +from .train_loop import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] + + +# prefer to let hooks and defaults live in separate namespaces (therefore not in __all__) +# but still make them available here +from .hooks import * +from .defaults import * +from .launch import * diff --git a/thirdparty/fast-reid/fastreid/engine/defaults.py b/thirdparty/fast-reid/fastreid/engine/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..69482be12872f5747d6825801ca0fc068ec298cf --- /dev/null +++ b/thirdparty/fast-reid/fastreid/engine/defaults.py @@ -0,0 +1,515 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +This file contains components with some default boilerplate logic user may need +in training / testing. They will not work for everyone, but many users may find them useful. +The behavior of functions/classes in this file is subject to change, +since they are meant to represent the "common default behavior" people need in their projects. +""" + +import argparse +import logging +import os +import sys +from collections import OrderedDict + +import torch +import torch.nn.functional as F +from torch.nn.parallel import DistributedDataParallel + +from fastreid.data import build_reid_test_loader, build_reid_train_loader +from fastreid.evaluation import (DatasetEvaluator, ReidEvaluator, + inference_on_dataset, print_csv_format) +from fastreid.modeling.meta_arch import build_model +from fastreid.solver import build_lr_scheduler, build_optimizer +from fastreid.utils import comm +from fastreid.utils.checkpoint import Checkpointer +from fastreid.utils.collect_env import collect_env_info +from fastreid.utils.env import seed_all_rng +from fastreid.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter +from fastreid.utils.file_io import PathManager +from fastreid.utils.logger import setup_logger +from . import hooks +from .train_loop import SimpleTrainer + +__all__ = ["default_argument_parser", "default_setup", "DefaultPredictor", "DefaultTrainer"] + + +def default_argument_parser(): + """ + Create a parser with some common arguments used by fastreid users. + Returns: + argparse.ArgumentParser: + """ + parser = argparse.ArgumentParser(description="fastreid Training") + parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") + parser.add_argument( + "--resume", + action="store_true", + help="whether to attempt to resume from the checkpoint directory", + ) + parser.add_argument("--eval-only", action="store_true", help="perform evaluation only") + parser.add_argument("--num-gpus", type=int, default=1, help="number of gpus *per machine*") + parser.add_argument("--num-machines", type=int, default=1, help="total number of machines") + parser.add_argument( + "--machine-rank", type=int, default=0, help="the rank of this machine (unique per machine)" + ) + + # PyTorch still may leave orphan processes in multi-gpu training. + # Therefore we use a deterministic way to obtain port, + # so that users are aware of orphan processes by seeing the port occupied. + port = 2 ** 15 + 2 ** 14 + hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14 + parser.add_argument("--dist-url", default="tcp://127.0.0.1:{}".format(port)) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + return parser + + +def default_setup(cfg, args): + """ + Perform some basic common setups at the beginning of a job, including: + 1. Set up the detectron2 logger + 2. Log basic information about environment, cmdline arguments, and config + 3. Backup the config to the output directory + Args: + cfg (CfgNode): the full config to be used + args (argparse.NameSpace): the command line arguments to be logged + """ + output_dir = cfg.OUTPUT_DIR + if comm.is_main_process() and output_dir: + PathManager.mkdirs(output_dir) + + rank = comm.get_rank() + setup_logger(output_dir, distributed_rank=rank, name="fvcore") + logger = setup_logger(output_dir, distributed_rank=rank) + + logger.info("Rank of current process: {}. World size: {}".format(rank, comm.get_world_size())) + logger.info("Environment info:\n" + collect_env_info()) + + logger.info("Command line arguments: " + str(args)) + if hasattr(args, "config_file") and args.config_file != "": + logger.info( + "Contents of args.config_file={}:\n{}".format( + args.config_file, PathManager.open(args.config_file, "r").read() + ) + ) + + logger.info("Running with full config:\n{}".format(cfg)) + if comm.is_main_process() and output_dir: + # Note: some of our scripts may expect the existence of + # config.yaml in output directory + path = os.path.join(output_dir, "config.yaml") + with PathManager.open(path, "w") as f: + f.write(cfg.dump()) + logger.info("Full config saved to {}".format(os.path.abspath(path))) + + # make sure each worker has a different, yet deterministic seed if specified + seed_all_rng() + + # cudnn benchmark has large overhead. It shouldn't be used considering the small size of + # typical validation set. + if not (hasattr(args, "eval_only") and args.eval_only): + torch.backends.cudnn.benchmark = cfg.CUDNN_BENCHMARK + + +class DefaultPredictor: + """ + Create a simple end-to-end predictor with the given config. + The predictor takes an BGR image, resizes it to the specified resolution, + runs the model and produces a dict of predictions. + This predictor takes care of model loading and input preprocessing for you. + If you'd like to do anything more fancy, please refer to its source code + as examples to build and use the model manually. + Attributes: + Examples: + .. code-block:: python + pred = DefaultPredictor(cfg) + inputs = cv2.imread("input.jpg") + outputs = pred(inputs) + """ + + def __init__(self, cfg): + self.cfg = cfg.clone() # cfg can be modified by model + self.cfg.defrost() + self.cfg.MODEL.BACKBONE.PRETRAIN = False + self.model = build_model(self.cfg) + self.model.eval() + + Checkpointer(self.model).load(cfg.MODEL.WEIGHTS) + + def __call__(self, image): + """ + Args: + image (torch.tensor): an image tensor of shape (B, C, H, W). + Returns: + predictions (torch.tensor): the output features of the model + """ + inputs = {"images": image} + with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258 + predictions = self.model(inputs) + # Normalize feature to compute cosine distance + features = F.normalize(predictions) + features = features.cpu().data + return features + + +class DefaultTrainer(SimpleTrainer): + """ + A trainer with default training logic. Compared to `SimpleTrainer`, it + contains the following logic in addition: + 1. Create model, optimizer, scheduler, dataloader from the given config. + 2. Load a checkpoint or `cfg.MODEL.WEIGHTS`, if exists. + 3. Register a few common hooks. + It is created to simplify the **standard model training workflow** and reduce code boilerplate + for users who only need the standard training workflow, with standard features. + It means this class makes *many assumptions* about your training logic that + may easily become invalid in a new research. In fact, any assumptions beyond those made in the + :class:`SimpleTrainer` are too much for research. + The code of this class has been annotated about restrictive assumptions it mades. + When they do not work for you, you're encouraged to: + 1. Overwrite methods of this class, OR: + 2. Use :class:`SimpleTrainer`, which only does minimal SGD training and + nothing else. You can then add your own hooks if needed. OR: + 3. Write your own training loop similar to `tools/plain_train_net.py`. + Also note that the behavior of this class, like other functions/classes in + this file, is not stable, since it is meant to represent the "common default behavior". + It is only guaranteed to work well with the standard models and training workflow in fastreid. + To obtain more stable behavior, write your own training logic with other public APIs. + Attributes: + scheduler: + checkpointer: + cfg (CfgNode): + Examples: + .. code-block:: python + trainer = DefaultTrainer(cfg) + trainer.resume_or_load() # load last checkpoint or MODEL.WEIGHTS + trainer.train() + """ + + def __init__(self, cfg): + """ + Args: + cfg (CfgNode): + """ + logger = logging.getLogger("fastreid") + if not logger.isEnabledFor(logging.INFO): # setup_logger is not called for fastreid + setup_logger() + + # Assume these objects must be constructed in this order. + data_loader = self.build_train_loader(cfg) + cfg = self.auto_scale_hyperparams(cfg, data_loader) + model = self.build_model(cfg) + optimizer = self.build_optimizer(cfg, model) + + # For training, wrap with DDP. But don't need this for inference. + if comm.get_world_size() > 1: + # ref to https://github.com/pytorch/pytorch/issues/22049 to set `find_unused_parameters=True` + # for part of the parameters is not updated. + model = DistributedDataParallel( + model, device_ids=[comm.get_local_rank()], broadcast_buffers=False + ) + + super().__init__(model, data_loader, optimizer, cfg.SOLVER.AMP_ENABLED) + + self.scheduler = self.build_lr_scheduler(cfg, optimizer) + # Assume no other objects need to be checkpointed. + # We can later make it checkpoint the stateful hooks + self.checkpointer = Checkpointer( + # Assume you want to save checkpoints together with logs/statistics + model, + cfg.OUTPUT_DIR, + save_to_disk=comm.is_main_process(), + optimizer=optimizer, + scheduler=self.scheduler, + ) + self.start_iter = 0 + if cfg.SOLVER.SWA.ENABLED: + self.max_iter = cfg.SOLVER.MAX_ITER + cfg.SOLVER.SWA.ITER + else: + self.max_iter = cfg.SOLVER.MAX_ITER + + self.cfg = cfg + + self.register_hooks(self.build_hooks()) + + def resume_or_load(self, resume=True): + """ + If `resume==True` and `cfg.OUTPUT_DIR` contains the last checkpoint (defined by + a `last_checkpoint` file), resume from the file. Resuming means loading all + available states (eg. optimizer and scheduler) and update iteration counter + from the checkpoint. ``cfg.MODEL.WEIGHTS`` will not be used. + Otherwise, this is considered as an independent training. The method will load model + weights from the file `cfg.MODEL.WEIGHTS` (but will not load other states) and start + from iteration 0. + Args: + resume (bool): whether to do resume or not + """ + # The checkpoint stores the training iteration that just finished, thus we start + # at the next iteration (or iter zero if there's no checkpoint). + checkpoint = self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume) + + if resume and self.checkpointer.has_checkpoint(): + self.start_iter = checkpoint.get("iteration", -1) + 1 + # The checkpoint stores the training iteration that just finished, thus we start + # at the next iteration (or iter zero if there's no checkpoint). + + def build_hooks(self): + """ + Build a list of default hooks, including timing, evaluation, + checkpointing, lr scheduling, precise BN, writing events. + Returns: + list[HookBase]: + """ + logger = logging.getLogger(__name__) + cfg = self.cfg.clone() + cfg.defrost() + cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN + cfg.DATASETS.NAMES = tuple([cfg.TEST.PRECISE_BN.DATASET]) # set dataset name for PreciseBN + + ret = [ + hooks.IterationTimer(), + hooks.LRScheduler(self.optimizer, self.scheduler), + ] + + if cfg.SOLVER.SWA.ENABLED: + ret.append( + hooks.SWA( + cfg.SOLVER.MAX_ITER, + cfg.SOLVER.SWA.PERIOD, + cfg.SOLVER.SWA.LR_FACTOR, + cfg.SOLVER.SWA.ETA_MIN_LR, + cfg.SOLVER.SWA.LR_SCHED, + ) + ) + + if cfg.TEST.PRECISE_BN.ENABLED and hooks.get_bn_modules(self.model): + logger.info("Prepare precise BN dataset") + ret.append(hooks.PreciseBN( + # Run at the same freq as (but before) evaluation. + self.model, + # Build a new data loader to not affect training + self.build_train_loader(cfg), + cfg.TEST.PRECISE_BN.NUM_ITER, + )) + + if cfg.MODEL.FREEZE_LAYERS != [''] and cfg.SOLVER.FREEZE_ITERS > 0: + freeze_layers = ",".join(cfg.MODEL.FREEZE_LAYERS) + logger.info(f'Freeze layer group "{freeze_layers}" training for {cfg.SOLVER.FREEZE_ITERS:d} iterations') + ret.append(hooks.FreezeLayer( + self.model, + self.optimizer, + cfg.MODEL.FREEZE_LAYERS, + cfg.SOLVER.FREEZE_ITERS, + )) + # Do PreciseBN before checkpointer, because it updates the model and need to + # be saved by checkpointer. + # This is not always the best: if checkpointing has a different frequency, + # some checkpoints may have more precise statistics than others. + if comm.is_main_process(): + ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD)) + + def test_and_save_results(): + self._last_eval_results = self.test(self.cfg, self.model) + return self._last_eval_results + + # Do evaluation after checkpointer, because then if it fails, + # we can use the saved checkpoint to debug. + ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results)) + + if comm.is_main_process(): + # run writers in the end, so that evaluation metrics are written + ret.append(hooks.PeriodicWriter(self.build_writers(), 200)) + + return ret + + def build_writers(self): + """ + Build a list of writers to be used. By default it contains + writers that write metrics to the screen, + a json file, and a tensorboard event file respectively. + If you'd like a different list of writers, you can overwrite it in + your trainer. + Returns: + list[EventWriter]: a list of :class:`EventWriter` objects. + It is now implemented by: + .. code-block:: python + return [ + CommonMetricPrinter(self.max_iter), + JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")), + TensorboardXWriter(self.cfg.OUTPUT_DIR), + ] + """ + # Assume the default print/log frequency. + return [ + # It may not always print what you want to see, since it prints "common" metrics only. + CommonMetricPrinter(self.max_iter), + JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")), + TensorboardXWriter(self.cfg.OUTPUT_DIR), + ] + + def train(self): + """ + Run training. + Returns: + OrderedDict of results, if evaluation is enabled. Otherwise None. + """ + super().train(self.start_iter, self.max_iter) + if comm.is_main_process(): + assert hasattr( + self, "_last_eval_results" + ), "No evaluation results obtained during training!" + # verify_results(self.cfg, self._last_eval_results) + return self._last_eval_results + + @classmethod + def build_model(cls, cfg): + """ + Returns: + torch.nn.Module: + It now calls :func:`fastreid.modeling.build_model`. + Overwrite it if you'd like a different model. + """ + model = build_model(cfg) + logger = logging.getLogger(__name__) + logger.info("Model:\n{}".format(model)) + return model + + @classmethod + def build_optimizer(cls, cfg, model): + """ + Returns: + torch.optim.Optimizer: + It now calls :func:`fastreid.solver.build_optimizer`. + Overwrite it if you'd like a different optimizer. + """ + return build_optimizer(cfg, model) + + @classmethod + def build_lr_scheduler(cls, cfg, optimizer): + """ + It now calls :func:`fastreid.solver.build_lr_scheduler`. + Overwrite it if you'd like a different scheduler. + """ + return build_lr_scheduler(cfg, optimizer) + + @classmethod + def build_train_loader(cls, cfg): + """ + Returns: + iterable + It now calls :func:`fastreid.data.build_detection_train_loader`. + Overwrite it if you'd like a different data loader. + """ + logger = logging.getLogger(__name__) + logger.info("Prepare training set") + return build_reid_train_loader(cfg) + + @classmethod + def build_test_loader(cls, cfg, dataset_name): + """ + Returns: + iterable + It now calls :func:`fastreid.data.build_detection_test_loader`. + Overwrite it if you'd like a different data loader. + """ + return build_reid_test_loader(cfg, dataset_name) + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_dir=None): + data_loader, num_query = cls.build_test_loader(cfg, dataset_name) + return data_loader, ReidEvaluator(cfg, num_query, output_dir) + + @classmethod + def test(cls, cfg, model): + """ + Args: + cfg (CfgNode): + model (nn.Module): + Returns: + dict: a dict of result metrics + """ + logger = logging.getLogger(__name__) + + results = OrderedDict() + for idx, dataset_name in enumerate(cfg.DATASETS.TESTS): + logger.info("Prepare testing set") + try: + data_loader, evaluator = cls.build_evaluator(cfg, dataset_name) + except NotImplementedError: + logger.warn( + "No evaluator found. implement its `build_evaluator` method." + ) + results[dataset_name] = {} + continue + results_i = inference_on_dataset(model, data_loader, evaluator) + results[dataset_name] = results_i + + if comm.is_main_process(): + assert isinstance( + results, dict + ), "Evaluator must return a dict on the main process. Got {} instead.".format( + results + ) + print_csv_format(results) + + if len(results) == 1: results = list(results.values())[0] + + return results + + @staticmethod + def auto_scale_hyperparams(cfg, data_loader): + r""" + This is used for auto-computation actual training iterations, + because some hyper-param, such as MAX_ITER, means training epochs rather than iters, + so we need to convert specific hyper-param to training iterations. + """ + cfg = cfg.clone() + frozen = cfg.is_frozen() + cfg.defrost() + + # If you don't hard-code the number of classes, it will compute the number automatically + if cfg.MODEL.HEADS.NUM_CLASSES == 0: + output_dir = cfg.OUTPUT_DIR + cfg.MODEL.HEADS.NUM_CLASSES = data_loader.dataset.num_classes + # Update the saved config file to make the number of classes valid + if comm.is_main_process() and output_dir: + # Note: some of our scripts may expect the existence of + # config.yaml in output directory + path = os.path.join(output_dir, "config.yaml") + with PathManager.open(path, "w") as f: + f.write(cfg.dump()) + + iters_per_epoch = len(data_loader.dataset) // cfg.SOLVER.IMS_PER_BATCH + cfg.SOLVER.MAX_ITER *= iters_per_epoch + cfg.SOLVER.WARMUP_ITERS *= iters_per_epoch + cfg.SOLVER.FREEZE_ITERS *= iters_per_epoch + cfg.SOLVER.DELAY_ITERS *= iters_per_epoch + for i in range(len(cfg.SOLVER.STEPS)): + cfg.SOLVER.STEPS[i] *= iters_per_epoch + cfg.SOLVER.SWA.ITER *= iters_per_epoch + cfg.SOLVER.SWA.PERIOD *= iters_per_epoch + + ckpt_multiple = cfg.SOLVER.CHECKPOINT_PERIOD / cfg.TEST.EVAL_PERIOD + # Evaluation period must be divided by 200 for writing into tensorboard. + eval_num_mod = (200 - cfg.TEST.EVAL_PERIOD * iters_per_epoch) % 200 + cfg.TEST.EVAL_PERIOD = cfg.TEST.EVAL_PERIOD * iters_per_epoch + eval_num_mod + # Change checkpoint saving period consistent with evaluation period. + cfg.SOLVER.CHECKPOINT_PERIOD = int(cfg.TEST.EVAL_PERIOD * ckpt_multiple) + + logger = logging.getLogger(__name__) + logger.info( + f"Auto-scaling the config to num_classes={cfg.MODEL.HEADS.NUM_CLASSES}, " + f"max_Iter={cfg.SOLVER.MAX_ITER}, wamrup_Iter={cfg.SOLVER.WARMUP_ITERS}, " + f"freeze_Iter={cfg.SOLVER.FREEZE_ITERS}, delay_Iter={cfg.SOLVER.DELAY_ITERS}, " + f"step_Iter={cfg.SOLVER.STEPS}, ckpt_Iter={cfg.SOLVER.CHECKPOINT_PERIOD}, " + f"eval_Iter={cfg.TEST.EVAL_PERIOD}." + ) + + if frozen: cfg.freeze() + + return cfg diff --git a/thirdparty/fast-reid/fastreid/engine/hooks.py b/thirdparty/fast-reid/fastreid/engine/hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..e63921a8388d7ff654b83efc39d299a76f25d6bd --- /dev/null +++ b/thirdparty/fast-reid/fastreid/engine/hooks.py @@ -0,0 +1,503 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import datetime +import itertools +import logging +import os +import tempfile +import time +from collections import Counter + +import torch +from torch import nn +from torch.nn.parallel import DistributedDataParallel + +from fastreid.evaluation.testing import flatten_results_dict +from fastreid.solver import optim +from fastreid.utils import comm +from fastreid.utils.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer +from fastreid.utils.events import EventStorage, EventWriter +from fastreid.utils.file_io import PathManager +from fastreid.utils.precision_bn import update_bn_stats, get_bn_modules +from fastreid.utils.timer import Timer +from .train_loop import HookBase + +__all__ = [ + "CallbackHook", + "IterationTimer", + "PeriodicWriter", + "PeriodicCheckpointer", + "LRScheduler", + "AutogradProfiler", + "EvalHook", + "PreciseBN", + "FreezeLayer", +] + +""" +Implement some common hooks. +""" + + +class CallbackHook(HookBase): + """ + Create a hook using callback functions provided by the user. + """ + + def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None): + """ + Each argument is a function that takes one argument: the trainer. + """ + self._before_train = before_train + self._before_step = before_step + self._after_step = after_step + self._after_train = after_train + + def before_train(self): + if self._before_train: + self._before_train(self.trainer) + + def after_train(self): + if self._after_train: + self._after_train(self.trainer) + # The functions may be closures that hold reference to the trainer + # Therefore, delete them to avoid circular reference. + del self._before_train, self._after_train + del self._before_step, self._after_step + + def before_step(self): + if self._before_step: + self._before_step(self.trainer) + + def after_step(self): + if self._after_step: + self._after_step(self.trainer) + + +class IterationTimer(HookBase): + """ + Track the time spent for each iteration (each run_step call in the trainer). + Print a summary in the end of training. + This hook uses the time between the call to its :meth:`before_step` + and :meth:`after_step` methods. + Under the convention that :meth:`before_step` of all hooks should only + take negligible amount of time, the :class:`IterationTimer` hook should be + placed at the beginning of the list of hooks to obtain accurate timing. + """ + + def __init__(self, warmup_iter=3): + """ + Args: + warmup_iter (int): the number of iterations at the beginning to exclude + from timing. + """ + self._warmup_iter = warmup_iter + self._step_timer = Timer() + + def before_train(self): + self._start_time = time.perf_counter() + self._total_timer = Timer() + self._total_timer.pause() + + def after_train(self): + logger = logging.getLogger(__name__) + total_time = time.perf_counter() - self._start_time + total_time_minus_hooks = self._total_timer.seconds() + hook_time = total_time - total_time_minus_hooks + + num_iter = self.trainer.iter + 1 - self.trainer.start_iter - self._warmup_iter + + if num_iter > 0 and total_time_minus_hooks > 0: + # Speed is meaningful only after warmup + # NOTE this format is parsed by grep in some scripts + logger.info( + "Overall training speed: {} iterations in {} ({:.4f} s / it)".format( + num_iter, + str(datetime.timedelta(seconds=int(total_time_minus_hooks))), + total_time_minus_hooks / num_iter, + ) + ) + + logger.info( + "Total training time: {} ({} on hooks)".format( + str(datetime.timedelta(seconds=int(total_time))), + str(datetime.timedelta(seconds=int(hook_time))), + ) + ) + + def before_step(self): + self._step_timer.reset() + self._total_timer.resume() + + def after_step(self): + # +1 because we're in after_step + iter_done = self.trainer.iter - self.trainer.start_iter + 1 + if iter_done >= self._warmup_iter: + sec = self._step_timer.seconds() + self.trainer.storage.put_scalars(time=sec) + else: + self._start_time = time.perf_counter() + self._total_timer.reset() + + self._total_timer.pause() + + +class PeriodicWriter(HookBase): + """ + Write events to EventStorage periodically. + It is executed every ``period`` iterations and after the last iteration. + """ + + def __init__(self, writers, period=20): + """ + Args: + writers (list[EventWriter]): a list of EventWriter objects + period (int): + """ + self._writers = writers + for w in writers: + assert isinstance(w, EventWriter), w + self._period = period + + def after_step(self): + if (self.trainer.iter + 1) % self._period == 0 or ( + self.trainer.iter == self.trainer.max_iter - 1 + ): + for writer in self._writers: + writer.write() + + def after_train(self): + for writer in self._writers: + writer.close() + + +class PeriodicCheckpointer(_PeriodicCheckpointer, HookBase): + """ + Same as :class:`fastreid.utils.checkpoint.PeriodicCheckpointer`, but as a hook. + Note that when used as a hook, + it is unable to save additional data other than what's defined + by the given `checkpointer`. + It is executed every ``period`` iterations and after the last iteration. + """ + + def before_train(self): + self.max_iter = self.trainer.max_iter + + def after_step(self): + # No way to use **kwargs + self.step(self.trainer.iter) + + +class LRScheduler(HookBase): + """ + A hook which executes a torch builtin LR scheduler and summarizes the LR. + It is executed after every iteration. + """ + + def __init__(self, optimizer, scheduler): + """ + Args: + optimizer (torch.optim.Optimizer): + scheduler (torch.optim._LRScheduler) + """ + self._optimizer = optimizer + self._scheduler = scheduler + + # NOTE: some heuristics on what LR to summarize + # summarize the param group with most parameters + largest_group = max(len(g["params"]) for g in optimizer.param_groups) + + if largest_group == 1: + # If all groups have one parameter, + # then find the most common initial LR, and use it for summary + lr_count = Counter([g["lr"] for g in optimizer.param_groups]) + lr = lr_count.most_common()[0][0] + for i, g in enumerate(optimizer.param_groups): + if g["lr"] == lr: + self._best_param_group_id = i + break + else: + for i, g in enumerate(optimizer.param_groups): + if len(g["params"]) == largest_group: + self._best_param_group_id = i + break + + def after_step(self): + lr = self._optimizer.param_groups[self._best_param_group_id]["lr"] + self.trainer.storage.put_scalar("lr", lr, smoothing_hint=False) + self._scheduler.step() + + +class AutogradProfiler(HookBase): + """ + A hook which runs `torch.autograd.profiler.profile`. + Examples: + .. code-block:: python + hooks.AutogradProfiler( + lambda trainer: trainer.iter > 10 and trainer.iter < 20, self.cfg.OUTPUT_DIR + ) + The above example will run the profiler for iteration 10~20 and dump + results to ``OUTPUT_DIR``. We did not profile the first few iterations + because they are typically slower than the rest. + The result files can be loaded in the ``chrome://tracing`` page in chrome browser. + Note: + When used together with NCCL on older version of GPUs, + autograd profiler may cause deadlock because it unnecessarily allocates + memory on every device it sees. The memory management calls, if + interleaved with NCCL calls, lead to deadlock on GPUs that do not + support `cudaLaunchCooperativeKernelMultiDevice`. + """ + + def __init__(self, enable_predicate, output_dir, *, use_cuda=True): + """ + Args: + enable_predicate (callable[trainer -> bool]): a function which takes a trainer, + and returns whether to enable the profiler. + It will be called once every step, and can be used to select which steps to profile. + output_dir (str): the output directory to dump tracing files. + use_cuda (bool): same as in `torch.autograd.profiler.profile`. + """ + self._enable_predicate = enable_predicate + self._use_cuda = use_cuda + self._output_dir = output_dir + + def before_step(self): + if self._enable_predicate(self.trainer): + self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda) + self._profiler.__enter__() + else: + self._profiler = None + + def after_step(self): + if self._profiler is None: + return + self._profiler.__exit__(None, None, None) + out_file = os.path.join( + self._output_dir, "profiler-trace-iter{}.json".format(self.trainer.iter) + ) + if "://" not in out_file: + self._profiler.export_chrome_trace(out_file) + else: + # Support non-posix filesystems + with tempfile.TemporaryDirectory(prefix="fastreid_profiler") as d: + tmp_file = os.path.join(d, "tmp.json") + self._profiler.export_chrome_trace(tmp_file) + with open(tmp_file) as f: + content = f.read() + with PathManager.open(out_file, "w") as f: + f.write(content) + + +class EvalHook(HookBase): + """ + Run an evaluation function periodically, and at the end of training. + It is executed every ``eval_period`` iterations and after the last iteration. + """ + + def __init__(self, eval_period, eval_function): + """ + Args: + eval_period (int): the period to run `eval_function`. + eval_function (callable): a function which takes no arguments, and + returns a nested dict of evaluation metrics. + Note: + This hook must be enabled in all or none workers. + If you would like only certain workers to perform evaluation, + give other workers a no-op function (`eval_function=lambda: None`). + """ + self._period = eval_period + self._func = eval_function + + def _do_eval(self): + results = self._func() + + if results: + assert isinstance( + results, dict + ), "Eval function must return a dict. Got {} instead.".format(results) + + flattened_results = flatten_results_dict(results) + for k, v in flattened_results.items(): + try: + v = float(v) + except Exception: + raise ValueError( + "[EvalHook] eval_function should return a nested dict of float. " + "Got '{}: {}' instead.".format(k, v) + ) + self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False) + + # Remove extra memory cache of main process due to evaluation + torch.cuda.empty_cache() + + def after_step(self): + next_iter = self.trainer.iter + 1 + is_final = next_iter == self.trainer.max_iter + if is_final or (self._period > 0 and next_iter % self._period == 0): + self._do_eval() + # Evaluation may take different time among workers. + # A barrier make them start the next iteration together. + comm.synchronize() + + def after_train(self): + # func is likely a closure that holds reference to the trainer + # therefore we clean it to avoid circular reference in the end + del self._func + + +class PreciseBN(HookBase): + """ + The standard implementation of BatchNorm uses EMA in inference, which is + sometimes suboptimal. + This class computes the true average of statistics rather than the moving average, + and put true averages to every BN layer in the given model. + It is executed after the last iteration. + """ + + def __init__(self, model, data_loader, num_iter): + """ + Args: + model (nn.Module): a module whose all BN layers in training mode will be + updated by precise BN. + Note that user is responsible for ensuring the BN layers to be + updated are in training mode when this hook is triggered. + data_loader (iterable): it will produce data to be run by `model(data)`. + num_iter (int): number of iterations used to compute the precise + statistics. + """ + self._logger = logging.getLogger(__name__) + if len(get_bn_modules(model)) == 0: + self._logger.info( + "PreciseBN is disabled because model does not contain BN layers in training mode." + ) + self._disabled = True + return + + self._model = model + self._data_loader = data_loader + self._num_iter = num_iter + self._disabled = False + + self._data_iter = None + + def after_step(self): + next_iter = self.trainer.iter + 1 + is_final = next_iter == self.trainer.max_iter + if is_final: + self.update_stats() + + def update_stats(self): + """ + Update the model with precise statistics. Users can manually call this method. + """ + if self._disabled: + return + + if self._data_iter is None: + self._data_iter = iter(self._data_loader) + + def data_loader(): + for num_iter in itertools.count(1): + if num_iter % 100 == 0: + self._logger.info( + "Running precise-BN ... {}/{} iterations.".format(num_iter, self._num_iter) + ) + # This way we can reuse the same iterator + yield next(self._data_iter) + + with EventStorage(): # capture events in a new storage to discard them + self._logger.info( + "Running precise-BN for {} iterations... ".format(self._num_iter) + + "Note that this could produce different statistics every time." + ) + update_bn_stats(self._model, data_loader(), self._num_iter) + + +class FreezeLayer(HookBase): + def __init__(self, model, optimizer, freeze_layers, freeze_iters): + self._logger = logging.getLogger(__name__) + + if isinstance(model, DistributedDataParallel): + model = model.module + self.model = model + self.optimizer = optimizer + + self.freeze_layers = freeze_layers + self.freeze_iters = freeze_iters + + # Previous parameters freeze status + param_freeze = {} + for param_group in self.optimizer.param_groups: + param_name = param_group['name'] + param_freeze[param_name] = param_group['freeze'] + self.param_freeze = param_freeze + + self.is_frozen = False + + def before_step(self): + # Freeze specific layers + if self.trainer.iter <= self.freeze_iters and not self.is_frozen: + self.freeze_specific_layer() + + # Recover original layers status + if self.trainer.iter > self.freeze_iters and self.is_frozen: + self.open_all_layer() + + def freeze_specific_layer(self): + for layer in self.freeze_layers: + if not hasattr(self.model, layer): + self._logger.info(f'{layer} is not an attribute of the model, will skip this layer') + + for param_group in self.optimizer.param_groups: + param_name = param_group['name'] + if param_name.split('.')[0] in self.freeze_layers: + param_group['freeze'] = True + + # Change BN in freeze layers to eval mode + for name, module in self.model.named_children(): + if name in self.freeze_layers: module.eval() + + self.is_frozen = True + + def open_all_layer(self): + self.model.train() + for param_group in self.optimizer.param_groups: + param_name = param_group['name'] + param_group['freeze'] = self.param_freeze[param_name] + + self.is_frozen = False + + +class SWA(HookBase): + def __init__(self, swa_start: int, swa_freq: int, swa_lr_factor: float, eta_min: float, lr_sched=False, ): + self.swa_start = swa_start + self.swa_freq = swa_freq + self.swa_lr_factor = swa_lr_factor + self.eta_min = eta_min + self.lr_sched = lr_sched + + def before_step(self): + is_swa = self.trainer.iter == self.swa_start + if is_swa: + # Wrapper optimizer with SWA + self.trainer.optimizer = optim.SWA(self.trainer.optimizer, self.swa_freq, self.swa_lr_factor) + self.trainer.optimizer.reset_lr_to_swa() + + if self.lr_sched: + self.scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts( + optimizer=self.trainer.optimizer, + T_0=self.swa_freq, + eta_min=self.eta_min, + ) + + def after_step(self): + next_iter = self.trainer.iter + 1 + + # Use Cyclic learning rate scheduler + if next_iter > self.swa_start and self.lr_sched: + self.scheduler.step() + + is_final = next_iter == self.trainer.max_iter + if is_final: + self.trainer.optimizer.swap_swa_param() diff --git a/thirdparty/fast-reid/fastreid/engine/launch.py b/thirdparty/fast-reid/fastreid/engine/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f5c9bf7f06f1e1b8beda222af079abe05d248b --- /dev/null +++ b/thirdparty/fast-reid/fastreid/engine/launch.py @@ -0,0 +1,103 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on: +# https://github.com/facebookresearch/detectron2/blob/master/detectron2/engine/launch.py + + +import logging + +import torch +import torch.distributed as dist +import torch.multiprocessing as mp + +from fastreid.utils import comm + +__all__ = ["launch"] + + +def _find_free_port(): + import socket + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + # Binding to port 0 will cause the OS to find an available port for us + sock.bind(("", 0)) + port = sock.getsockname()[1] + sock.close() + # NOTE: there is still a chance the port could be taken by other processes. + return port + + +def launch(main_func, num_gpus_per_machine, num_machines=1, machine_rank=0, dist_url=None, args=()): + """ + Launch multi-gpu or distributed training. + This function must be called on all machines involved in the training. + It will spawn child processes (defined by ``num_gpus_per_machine`) on each machine. + Args: + main_func: a function that will be called by `main_func(*args)` + num_gpus_per_machine (int): number of GPUs per machine + num_machines (int): the total number of machines + machine_rank (int): the rank of this machine + dist_url (str): url to connect to for distributed jobs, including protocol + e.g. "tcp://127.0.0.1:8686". + Can be set to "auto" to automatically select a free port on localhost + args (tuple): arguments passed to main_func + """ + world_size = num_machines * num_gpus_per_machine + if world_size > 1: + # https://github.com/pytorch/pytorch/pull/14391 + # TODO prctl in spawned processes + + if dist_url == "auto": + assert num_machines == 1, "dist_url=auto not supported in multi-machine jobs." + port = _find_free_port() + dist_url = f"tcp://127.0.0.1:{port}" + if num_machines > 1 and dist_url.startswith("file://"): + logger = logging.getLogger(__name__) + logger.warning( + "file:// is not a reliable init_method in multi-machine jobs. Prefer tcp://" + ) + + mp.spawn( + _distributed_worker, + nprocs=num_gpus_per_machine, + args=(main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args), + daemon=False, + ) + else: + main_func(*args) + + +def _distributed_worker( + local_rank, main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args +): + assert torch.cuda.is_available(), "cuda is not available. Please check your installation." + global_rank = machine_rank * num_gpus_per_machine + local_rank + try: + dist.init_process_group( + backend="NCCL", init_method=dist_url, world_size=world_size, rank=global_rank + ) + except Exception as e: + logger = logging.getLogger(__name__) + logger.error("Process group URL: {}".format(dist_url)) + raise e + # synchronize is needed here to prevent a possible timeout after calling init_process_group + # See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172 + comm.synchronize() + + assert num_gpus_per_machine <= torch.cuda.device_count() + torch.cuda.set_device(local_rank) + + # Setup the local process group (which contains ranks within the same machine) + assert comm._LOCAL_PROCESS_GROUP is None + num_machines = world_size // num_gpus_per_machine + for i in range(num_machines): + ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine)) + pg = dist.new_group(ranks_on_i) + if i == machine_rank: + comm._LOCAL_PROCESS_GROUP = pg + + main_func(*args) diff --git a/thirdparty/fast-reid/fastreid/engine/train_loop.py b/thirdparty/fast-reid/fastreid/engine/train_loop.py new file mode 100644 index 0000000000000000000000000000000000000000..59cc70b65c68857fcdd5094821671ce35d18dc53 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/engine/train_loop.py @@ -0,0 +1,283 @@ +# encoding: utf-8 +""" +credit: +https://github.com/facebookresearch/detectron2/blob/master/detectron2/engine/train_loop.py +""" + +import logging +import time +import weakref + +import numpy as np +import torch +from torch.cuda import amp +from torch.nn.parallel import DistributedDataParallel + +import fastreid.utils.comm as comm +from fastreid.utils.events import EventStorage + +__all__ = ["HookBase", "TrainerBase", "SimpleTrainer"] + + +class HookBase: + """ + Base class for hooks that can be registered with :class:`TrainerBase`. + Each hook can implement 4 methods. The way they are called is demonstrated + in the following snippet: + .. code-block:: python + hook.before_train() + for iter in range(start_iter, max_iter): + hook.before_step() + trainer.run_step() + hook.after_step() + hook.after_train() + Notes: + 1. In the hook method, users can access `self.trainer` to access more + properties about the context (e.g., current iteration). + 2. A hook that does something in :meth:`before_step` can often be + implemented equivalently in :meth:`after_step`. + If the hook takes non-trivial time, it is strongly recommended to + implement the hook in :meth:`after_step` instead of :meth:`before_step`. + The convention is that :meth:`before_step` should only take negligible time. + Following this convention will allow hooks that do care about the difference + between :meth:`before_step` and :meth:`after_step` (e.g., timer) to + function properly. + Attributes: + trainer: A weak reference to the trainer object. Set by the trainer when the hook is + registered. + """ + + def before_train(self): + """ + Called before the first iteration. + """ + pass + + def after_train(self): + """ + Called after the last iteration. + """ + pass + + def before_step(self): + """ + Called before each iteration. + """ + pass + + def after_step(self): + """ + Called after each iteration. + """ + pass + + +class TrainerBase: + """ + Base class for iterative trainer with hooks. + The only assumption we made here is: the training runs in a loop. + A subclass can implement what the loop is. + We made no assumptions about the existence of dataloader, optimizer, model, etc. + Attributes: + iter(int): the current iteration. + start_iter(int): The iteration to start with. + By convention the minimum possible value is 0. + max_iter(int): The iteration to end training. + storage(EventStorage): An EventStorage that's opened during the course of training. + """ + + def __init__(self): + self._hooks = [] + + def register_hooks(self, hooks): + """ + Register hooks to the trainer. The hooks are executed in the order + they are registered. + Args: + hooks (list[Optional[HookBase]]): list of hooks + """ + hooks = [h for h in hooks if h is not None] + for h in hooks: + assert isinstance(h, HookBase) + # To avoid circular reference, hooks and trainer cannot own each other. + # This normally does not matter, but will cause memory leak if the + # involved objects contain __del__: + # See http://engineering.hearsaysocial.com/2013/06/16/circular-references-in-python/ + h.trainer = weakref.proxy(self) + self._hooks.extend(hooks) + + def train(self, start_iter: int, max_iter: int): + """ + Args: + start_iter, max_iter (int): See docs above + """ + logger = logging.getLogger(__name__) + logger.info("Starting training from iteration {}".format(start_iter)) + + self.iter = self.start_iter = start_iter + self.max_iter = max_iter + + with EventStorage(start_iter) as self.storage: + try: + self.before_train() + for self.iter in range(start_iter, max_iter): + self.before_step() + self.run_step() + self.after_step() + except Exception: + logger.exception("Exception during training:") + finally: + self.after_train() + + def before_train(self): + for h in self._hooks: + h.before_train() + + def after_train(self): + for h in self._hooks: + h.after_train() + + def before_step(self): + for h in self._hooks: + h.before_step() + + def after_step(self): + for h in self._hooks: + h.after_step() + # this guarantees, that in each hook's after_step, storage.iter == trainer.iter + self.storage.step() + + def run_step(self): + raise NotImplementedError + + +class SimpleTrainer(TrainerBase): + """ + A simple trainer for the most common type of task: + single-cost single-optimizer single-data-source iterative optimization. + It assumes that every step, you: + 1. Compute the loss with a data from the data_loader. + 2. Compute the gradients with the above loss. + 3. Update the model with the optimizer. + If you want to do anything fancier than this, + either subclass TrainerBase and implement your own `run_step`, + or write your own training loop. + """ + + def __init__(self, model, data_loader, optimizer, amp_enabled): + """ + Args: + model: a torch Module. Takes a data from data_loader and returns a + dict of heads. + data_loader: an iterable. Contains data to be used to call model. + optimizer: a torch optimizer. + """ + super().__init__() + + """ + We set the model to training mode in the trainer. + However it's valid to train a model that's in eval mode. + If you want your model (or a submodule of it) to behave + like evaluation during training, you can overwrite its train() method. + """ + model.train() + + self.model = model + self.data_loader = data_loader + self._data_loader_iter = iter(data_loader) + self.optimizer = optimizer + self.amp_enabled = amp_enabled + + if amp_enabled: + # Creates a GradScaler once at the beginning of training. + self.scaler = amp.GradScaler() + + def run_step(self): + """ + Implement the standard training logic described above. + """ + assert self.model.training, "[SimpleTrainer] model was changed to eval mode!" + start = time.perf_counter() + """ + If your want to do something with the data, you can wrap the dataloader. + """ + data = next(self._data_loader_iter) + data_time = time.perf_counter() - start + + """ + If your want to do something with the heads, you can wrap the model. + """ + + with amp.autocast(enabled=self.amp_enabled): + outs = self.model(data) + + # Compute loss + if isinstance(self.model, DistributedDataParallel): + loss_dict = self.model.module.losses(outs) + else: + loss_dict = self.model.losses(outs) + + losses = sum(loss_dict.values()) + + with torch.cuda.stream(torch.cuda.Stream()): + metrics_dict = loss_dict + metrics_dict["data_time"] = data_time + self._write_metrics(metrics_dict) + self._detect_anomaly(losses, loss_dict) + + """ + If you need accumulate gradients or something similar, you can + wrap the optimizer with your custom `zero_grad()` method. + """ + self.optimizer.zero_grad() + + if self.amp_enabled: + self.scaler.scale(losses).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + else: + losses.backward() + """ + If you need gradient clipping/scaling or other processing, you can + wrap the optimizer with your custom `step()` method. + """ + self.optimizer.step() + + def _detect_anomaly(self, losses, loss_dict): + if not torch.isfinite(losses).all(): + raise FloatingPointError( + "Loss became infinite or NaN at iteration={}!\nloss_dict = {}".format( + self.iter, loss_dict + ) + ) + + def _write_metrics(self, metrics_dict: dict): + """ + Args: + metrics_dict (dict): dict of scalar metrics + """ + metrics_dict = { + k: v.detach().cpu().item() if isinstance(v, torch.Tensor) else float(v) + for k, v in metrics_dict.items() + } + # gather metrics among all workers for logging + # This assumes we do DDP-style training, which is currently the only + # supported method in fastreid. + all_metrics_dict = comm.gather(metrics_dict) + + if comm.is_main_process(): + if "data_time" in all_metrics_dict[0]: + # data_time among workers can have high variance. The actual latency + # caused by data_time is the maximum among workers. + data_time = np.max([x.pop("data_time") for x in all_metrics_dict]) + self.storage.put_scalar("data_time", data_time) + + # average the rest metrics + metrics_dict = { + k: np.mean([x[k] for x in all_metrics_dict]) for k in all_metrics_dict[0].keys() + } + total_losses_reduced = sum(loss for loss in metrics_dict.values()) + + self.storage.put_scalar("total_loss", total_losses_reduced) + if len(metrics_dict) > 1: + self.storage.put_scalars(**metrics_dict) diff --git a/thirdparty/fast-reid/fastreid/evaluation/__init__.py b/thirdparty/fast-reid/fastreid/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..aec369c7d77f6b6e51669d471895085743ca2b83 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .evaluator import DatasetEvaluator, inference_context, inference_on_dataset +from .rank import evaluate_rank +from .roc import evaluate_roc +from .reid_evaluation import ReidEvaluator +from .testing import print_csv_format, verify_results + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/thirdparty/fast-reid/fastreid/evaluation/evaluator.py b/thirdparty/fast-reid/fastreid/evaluation/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..82d3a46cda22059071388d6bb0539a9b19702365 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/evaluator.py @@ -0,0 +1,166 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import datetime +import logging +import time +from contextlib import contextmanager + +import torch + +from fastreid.utils.logger import log_every_n_seconds + + +class DatasetEvaluator: + """ + Base class for a dataset evaluator. + The function :func:`inference_on_dataset` runs the model over + all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs. + This class will accumulate information of the inputs/outputs (by :meth:`process`), + and produce evaluation results in the end (by :meth:`evaluate`). + """ + + def reset(self): + """ + Preparation for a new round of evaluation. + Should be called before starting a round of evaluation. + """ + pass + + def preprocess_inputs(self, inputs): + pass + + def process(self, inputs, outputs): + """ + Process an input/output pair. + Args: + inputs: the inputs that's used to call the model. + outputs: the return value of `model(input)` + """ + pass + + def evaluate(self): + """ + Evaluate/summarize the performance, after processing all input/output pairs. + Returns: + dict: + A new evaluator class can return a dict of arbitrary format + as long as the user can process the results. + In our train_net.py, we expect the following format: + * key: the name of the task (e.g., bbox) + * value: a dict of {metric name: score}, e.g.: {"AP50": 80} + """ + pass + + +# class DatasetEvaluators(DatasetEvaluator): +# def __init__(self, evaluators): +# assert len(evaluators) +# super().__init__() +# self._evaluators = evaluators +# +# def reset(self): +# for evaluator in self._evaluators: +# evaluator.reset() +# +# def process(self, input, output): +# for evaluator in self._evaluators: +# evaluator.process(input, output) +# +# def evaluate(self): +# results = OrderedDict() +# for evaluator in self._evaluators: +# result = evaluator.evaluate() +# if is_main_process() and result is not None: +# for k, v in result.items(): +# assert ( +# k not in results +# ), "Different evaluators produce results with the same key {}".format(k) +# results[k] = v +# return results + + +def inference_on_dataset(model, data_loader, evaluator): + """ + Run model on the data_loader and evaluate the metrics with evaluator. + The model will be used in eval mode. + Args: + model (nn.Module): a module which accepts an object from + `data_loader` and returns some outputs. It will be temporarily set to `eval` mode. + If you wish to evaluate a model in `training` mode instead, you can + wrap the given model and override its behavior of `.eval()` and `.train()`. + data_loader: an iterable object with a length. + The elements it generates will be the inputs to the model. + evaluator (DatasetEvaluator): the evaluator to run. Use + :class:`DatasetEvaluators([])` if you only want to benchmark, but + don't want to do any evaluation. + Returns: + The return value of `evaluator.evaluate()` + """ + logger = logging.getLogger(__name__) + logger.info("Start inference on {} images".format(len(data_loader.dataset))) + + total = len(data_loader) # inference data loader must have a fixed length + evaluator.reset() + + num_warmup = min(5, total - 1) + start_time = time.perf_counter() + total_compute_time = 0 + with inference_context(model), torch.no_grad(): + for idx, inputs in enumerate(data_loader): + if idx == num_warmup: + start_time = time.perf_counter() + total_compute_time = 0 + + start_compute_time = time.perf_counter() + outputs = model(inputs) + total_compute_time += time.perf_counter() - start_compute_time + evaluator.process(inputs, outputs) + + idx += 1 + iters_after_start = idx + 1 - num_warmup * int(idx >= num_warmup) + seconds_per_batch = total_compute_time / iters_after_start + if idx >= num_warmup * 2 or seconds_per_batch > 30: + total_seconds_per_img = (time.perf_counter() - start_time) / iters_after_start + eta = datetime.timedelta(seconds=int(total_seconds_per_img * (total - idx - 1))) + log_every_n_seconds( + logging.INFO, + "Inference done {}/{}. {:.4f} s / batch. ETA={}".format( + idx + 1, total, seconds_per_batch, str(eta) + ), + n=30, + ) + + # Measure the time only for this worker (before the synchronization barrier) + total_time = time.perf_counter() - start_time + total_time_str = str(datetime.timedelta(seconds=total_time)) + # NOTE this format is parsed by grep + logger.info( + "Total inference time: {} ({:.6f} s / batch per device)".format( + total_time_str, total_time / (total - num_warmup) + ) + ) + total_compute_time_str = str(datetime.timedelta(seconds=int(total_compute_time))) + logger.info( + "Total inference pure compute time: {} ({:.6f} s / batch per device)".format( + total_compute_time_str, total_compute_time / (total - num_warmup) + ) + ) + results = evaluator.evaluate() + # An evaluator may return None when not in main process. + # Replace it by an empty dict instead to make it easier for downstream code to handle + if results is None: + results = {} + return results + + +@contextmanager +def inference_context(model): + """ + A context where the model is temporarily changed to eval mode, + and restored to previous mode afterwards. + Args: + model: a torch Module + """ + training_mode = model.training + model.eval() + yield + model.train(training_mode) diff --git a/thirdparty/fast-reid/fastreid/evaluation/query_expansion.py b/thirdparty/fast-reid/fastreid/evaluation/query_expansion.py new file mode 100644 index 0000000000000000000000000000000000000000..637c097c04bd81bfb55ac774835dabb41e5fc720 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/query_expansion.py @@ -0,0 +1,46 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on +# https://github.com/PyRetri/PyRetri/blob/master/pyretri/index/re_ranker/re_ranker_impl/query_expansion.py + +import numpy as np +import torch +import torch.nn.functional as F + + +def aqe(query_feat: torch.tensor, gallery_feat: torch.tensor, + qe_times: int = 1, qe_k: int = 10, alpha: float = 3.0): + """ + Combining the retrieved topk nearest neighbors with the original query and doing another retrieval. + c.f. https://www.robots.ox.ac.uk/~vgg/publications/papers/chum07b.pdf + Args : + query_feat (torch.tensor): + gallery_feat (torch.tensor): + qe_times (int): number of query expansion times. + qe_k (int): number of the neighbors to be combined. + alpha (float): + """ + num_query = query_feat.shape[0] + all_feat = torch.cat((query_feat, gallery_feat), dim=0) + norm_feat = F.normalize(all_feat, p=2, dim=1) + + all_feat = all_feat.numpy() + for i in range(qe_times): + all_feat_list = [] + sims = torch.mm(norm_feat, norm_feat.t()) + sims = sims.data.cpu().numpy() + for sim in sims: + init_rank = np.argpartition(-sim, range(1, qe_k + 1)) + weights = sim[init_rank[:qe_k]].reshape((-1, 1)) + weights = np.power(weights, alpha) + all_feat_list.append(np.mean(all_feat[init_rank[:qe_k], :] * weights, axis=0)) + all_feat = np.stack(all_feat_list, axis=0) + norm_feat = F.normalize(torch.from_numpy(all_feat), p=2, dim=1) + + query_feat = torch.from_numpy(all_feat[:num_query]) + gallery_feat = torch.from_numpy(all_feat[num_query:]) + return query_feat, gallery_feat diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank.py b/thirdparty/fast-reid/fastreid/evaluation/rank.py new file mode 100644 index 0000000000000000000000000000000000000000..94b677b6518f0468bf760815ee957e78d54813fb --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank.py @@ -0,0 +1,202 @@ +# credits: https://github.com/KaiyangZhou/deep-person-reid/blob/master/torchreid/metrics/rank.py + +import warnings +from collections import defaultdict + +import numpy as np + +try: + from .rank_cylib.rank_cy import evaluate_cy + + IS_CYTHON_AVAI = True +except ImportError: + IS_CYTHON_AVAI = False + warnings.warn( + 'Cython rank evaluation (very fast so highly recommended) is ' + 'unavailable, now use python evaluation.' + ) + + +def eval_cuhk03(distmat, q_pids, g_pids, q_camids, g_camids, max_rank): + """Evaluation with cuhk03 metric + Key: one image for each gallery identity is randomly sampled for each query identity. + Random sampling is performed num_repeats times. + """ + num_repeats = 10 + + num_q, num_g = distmat.shape + + indices = np.argsort(distmat, axis=1) + + if num_g < max_rank: + max_rank = num_g + print( + 'Note: number of gallery samples is quite small, got {}'. + format(num_g) + ) + + matches = (g_pids[indices] == q_pids[:, np.newaxis]).astype(np.int32) + + # compute cmc curve for each query + all_cmc = [] + all_AP = [] + num_valid_q = 0. # number of valid query + + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + # remove gallery samples that have the same pid and camid with query + order = indices[q_idx] + remove = (g_pids[order] == q_pid) & (g_camids[order] == q_camid) + keep = np.invert(remove) + + # compute cmc curve + raw_cmc = matches[q_idx][ + keep] # binary vector, positions with value 1 are correct matches + if not np.any(raw_cmc): + # this condition is true when query identity does not appear in gallery + continue + + kept_g_pids = g_pids[order][keep] + g_pids_dict = defaultdict(list) + for idx, pid in enumerate(kept_g_pids): + g_pids_dict[pid].append(idx) + + cmc = 0. + for repeat_idx in range(num_repeats): + mask = np.zeros(len(raw_cmc), dtype=np.bool) + for _, idxs in g_pids_dict.items(): + # randomly sample one image for each gallery person + rnd_idx = np.random.choice(idxs) + mask[rnd_idx] = True + masked_raw_cmc = raw_cmc[mask] + _cmc = masked_raw_cmc.cumsum() + _cmc[_cmc > 1] = 1 + cmc += _cmc[:max_rank].astype(np.float32) + + cmc /= num_repeats + all_cmc.append(cmc) + # compute AP + num_rel = raw_cmc.sum() + tmp_cmc = raw_cmc.cumsum() + tmp_cmc = [x / (i + 1.) for i, x in enumerate(tmp_cmc)] + tmp_cmc = np.asarray(tmp_cmc) * raw_cmc + AP = tmp_cmc.sum() / num_rel + all_AP.append(AP) + num_valid_q += 1. + + assert num_valid_q > 0, 'Error: all query identities do not appear in gallery' + + all_cmc = np.asarray(all_cmc).astype(np.float32) + all_cmc = all_cmc.sum(0) / num_valid_q + mAP = np.mean(all_AP) + + return all_cmc, mAP + + +def eval_market1501(distmat, q_pids, g_pids, q_camids, g_camids, max_rank): + """Evaluation with market1501 metric + Key: for each query identity, its gallery images from the same camera view are discarded. + """ + num_q, num_g = distmat.shape + + if num_g < max_rank: + max_rank = num_g + print('Note: number of gallery samples is quite small, got {}'.format(num_g)) + + indices = np.argsort(distmat, axis=1) + + matches = (g_pids[indices] == q_pids[:, np.newaxis]).astype(np.int32) + + # compute cmc curve for each query + all_cmc = [] + all_AP = [] + all_INP = [] + num_valid_q = 0. # number of valid query + + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + # remove gallery samples that have the same pid and camid with query + order = indices[q_idx] + remove = (g_pids[order] == q_pid) & (g_camids[order] == q_camid) + keep = np.invert(remove) + + # compute cmc curve + raw_cmc = matches[q_idx][keep] # binary vector, positions with value 1 are correct matches + if not np.any(raw_cmc): + # this condition is true when query identity does not appear in gallery + continue + + cmc = raw_cmc.cumsum() + + pos_idx = np.where(raw_cmc == 1) + max_pos_idx = np.max(pos_idx) + inp = cmc[max_pos_idx] / (max_pos_idx + 1.0) + all_INP.append(inp) + + cmc[cmc > 1] = 1 + + all_cmc.append(cmc[:max_rank]) + num_valid_q += 1. + + # compute average precision + # reference: https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Average_precision + num_rel = raw_cmc.sum() + tmp_cmc = raw_cmc.cumsum() + tmp_cmc = [x / (i + 1.) for i, x in enumerate(tmp_cmc)] + tmp_cmc = np.asarray(tmp_cmc) * raw_cmc + AP = tmp_cmc.sum() / num_rel + all_AP.append(AP) + + assert num_valid_q > 0, 'Error: all query identities do not appear in gallery' + + all_cmc = np.asarray(all_cmc).astype(np.float32) + all_cmc = all_cmc.sum(0) / num_valid_q + + return all_cmc, all_AP, all_INP + + +def evaluate_py(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_metric_cuhk03): + if use_metric_cuhk03: + return eval_cuhk03(distmat, g_pids, q_camids, g_camids, max_rank) + else: + return eval_market1501(distmat, q_pids, g_pids, q_camids, g_camids, max_rank) + + +def evaluate_rank( + distmat, + q_pids, + g_pids, + q_camids, + g_camids, + max_rank=50, + use_metric_cuhk03=False, + use_cython=True +): + """Evaluates CMC rank. + Args: + distmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery). + q_pids (numpy.ndarray): 1-D array containing person identities + of each query instance. + g_pids (numpy.ndarray): 1-D array containing person identities + of each gallery instance. + q_camids (numpy.ndarray): 1-D array containing camera views under + which each query instance is captured. + g_camids (numpy.ndarray): 1-D array containing camera views under + which each gallery instance is captured. + max_rank (int, optional): maximum CMC rank to be computed. Default is 50. + use_metric_cuhk03 (bool, optional): use single-gallery-shot setting for cuhk03. + Default is False. This should be enabled when using cuhk03 classic split. + use_cython (bool, optional): use cython code for evaluation. Default is True. + This is highly recommended as the cython code can speed up the cmc computation + by more than 10x. This requires Cython to be installed. + """ + if use_cython and IS_CYTHON_AVAI: + return evaluate_cy(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_metric_cuhk03) + else: + return evaluate_py(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_metric_cuhk03) diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/Makefile b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..812a5586e09a365a07d9d9d30b5c962088f7e33f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/Makefile @@ -0,0 +1,7 @@ +all: + python3 setup.py build_ext --inplace + rm -rf build + python3 test_cython.py +clean: + rm -rf build + rm -f rank_cy.c *.so diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/__init__.py b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..980c5fe81ebc7561aad3e939442528791930f3c5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/__init__.py @@ -0,0 +1,5 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/rank_cy.pyx b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/rank_cy.pyx new file mode 100644 index 0000000000000000000000000000000000000000..be45e105db8daa82b976c0ba54bb6fd38e22a3a5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/rank_cy.pyx @@ -0,0 +1,257 @@ +# cython: boundscheck=False, wraparound=False, nonecheck=False, cdivision=True +# credits: https://github.com/KaiyangZhou/deep-person-reid/blob/master/torchreid/metrics/rank_cylib/rank_cy.pyx + +import cython +import numpy as np +cimport numpy as np +from collections import defaultdict +import faiss + + +""" +Compiler directives: +https://github.com/cython/cython/wiki/enhancements-compilerdirectives +Cython tutorial: +https://cython.readthedocs.io/en/latest/src/userguide/numpy_tutorial.html +Credit to https://github.com/luzai +""" + + +# Main interface +cpdef evaluate_cy(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_metric_cuhk03=False): + distmat = np.asarray(distmat, dtype=np.float32) + q_pids = np.asarray(q_pids, dtype=np.int64) + g_pids = np.asarray(g_pids, dtype=np.int64) + q_camids = np.asarray(q_camids, dtype=np.int64) + g_camids = np.asarray(g_camids, dtype=np.int64) + if use_metric_cuhk03: + return eval_cuhk03_cy(distmat, q_pids, g_pids, q_camids, g_camids, max_rank) + return eval_market1501_cy(distmat, q_pids, g_pids, q_camids, g_camids, max_rank) + + +cpdef eval_cuhk03_cy(float[:,:] distmat, long[:] q_pids, long[:]g_pids, + long[:]q_camids, long[:]g_camids, long max_rank): + cdef long num_q = distmat.shape[0] + cdef long num_g = distmat.shape[1] + + + if num_g < max_rank: + max_rank = num_g + print('Note: number of gallery samples is quite small, got {}'.format(num_g)) + + cdef: + long num_repeats = 10 + long[:,:] indices = np.argsort(distmat, axis=1) + long[:,:] matches = (np.asarray(g_pids)[np.asarray(indices)] == np.asarray(q_pids)[:, np.newaxis]).astype(np.int64) + + float[:,:] all_cmc = np.zeros((num_q, max_rank), dtype=np.float32) + float[:] all_AP = np.zeros(num_q, dtype=np.float32) + float num_valid_q = 0. # number of valid query + + long q_idx, q_pid, q_camid, g_idx + long[:] order = np.zeros(num_g, dtype=np.int64) + long keep + + float[:] raw_cmc = np.zeros(num_g, dtype=np.float32) # binary vector, positions with value 1 are correct matches + float[:] masked_raw_cmc = np.zeros(num_g, dtype=np.float32) + float[:] cmc, masked_cmc + long num_g_real, num_g_real_masked, rank_idx, rnd_idx + unsigned long meet_condition + float AP + long[:] kept_g_pids, mask + + float num_rel + float[:] tmp_cmc = np.zeros(num_g, dtype=np.float32) + float tmp_cmc_sum + + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + # remove gallery samples that have the same pid and camid with query + for g_idx in range(num_g): + order[g_idx] = indices[q_idx, g_idx] + num_g_real = 0 + meet_condition = 0 + kept_g_pids = np.zeros(num_g, dtype=np.int64) + + for g_idx in range(num_g): + if (g_pids[order[g_idx]] != q_pid) or (g_camids[order[g_idx]] != q_camid): + raw_cmc[num_g_real] = matches[q_idx][g_idx] + kept_g_pids[num_g_real] = g_pids[order[g_idx]] + num_g_real += 1 + if matches[q_idx][g_idx] > 1e-31: + meet_condition = 1 + + if not meet_condition: + # this condition is true when query identity does not appear in gallery + continue + + # cuhk03-specific setting + g_pids_dict = defaultdict(list) # overhead! + for g_idx in range(num_g_real): + g_pids_dict[kept_g_pids[g_idx]].append(g_idx) + + cmc = np.zeros(max_rank, dtype=np.float32) + for _ in range(num_repeats): + mask = np.zeros(num_g_real, dtype=np.int64) + + for _, idxs in g_pids_dict.items(): + # randomly sample one image for each gallery person + rnd_idx = np.random.choice(idxs) + #rnd_idx = idxs[0] # use deterministic for debugging + mask[rnd_idx] = 1 + + num_g_real_masked = 0 + for g_idx in range(num_g_real): + if mask[g_idx] == 1: + masked_raw_cmc[num_g_real_masked] = raw_cmc[g_idx] + num_g_real_masked += 1 + + masked_cmc = np.zeros(num_g, dtype=np.float32) + function_cumsum(masked_raw_cmc, masked_cmc, num_g_real_masked) + for g_idx in range(num_g_real_masked): + if masked_cmc[g_idx] > 1: + masked_cmc[g_idx] = 1 + + for rank_idx in range(max_rank): + cmc[rank_idx] += masked_cmc[rank_idx] / num_repeats + + for rank_idx in range(max_rank): + all_cmc[q_idx, rank_idx] = cmc[rank_idx] + # compute average precision + # reference: https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Average_precision + function_cumsum(raw_cmc, tmp_cmc, num_g_real) + num_rel = 0 + tmp_cmc_sum = 0 + for g_idx in range(num_g_real): + tmp_cmc_sum += (tmp_cmc[g_idx] / (g_idx + 1.)) * raw_cmc[g_idx] + num_rel += raw_cmc[g_idx] + all_AP[q_idx] = tmp_cmc_sum / num_rel + num_valid_q += 1. + + assert num_valid_q > 0, 'Error: all query identities do not appear in gallery' + + # compute averaged cmc + cdef float[:] avg_cmc = np.zeros(max_rank, dtype=np.float32) + for rank_idx in range(max_rank): + for q_idx in range(num_q): + avg_cmc[rank_idx] += all_cmc[q_idx, rank_idx] + avg_cmc[rank_idx] /= num_valid_q + + cdef float mAP = 0 + for q_idx in range(num_q): + mAP += all_AP[q_idx] + mAP /= num_valid_q + + return np.asarray(avg_cmc).astype(np.float32), mAP + + +cpdef eval_market1501_cy(float[:,:] distmat, long[:] q_pids, long[:]g_pids, + long[:]q_camids, long[:]g_camids, long max_rank): + + cdef long num_q = distmat.shape[0] + cdef long num_g = distmat.shape[1] + + if num_g < max_rank: + max_rank = num_g + print('Note: number of gallery samples is quite small, got {}'.format(num_g)) + + cdef: + long[:,:] indices = np.argsort(distmat, axis=1) + long[:,:] matches = (np.asarray(g_pids)[np.asarray(indices)] == np.asarray(q_pids)[:, np.newaxis]).astype(np.int64) + + float[:,:] all_cmc = np.zeros((num_q, max_rank), dtype=np.float32) + float[:] all_AP = np.zeros(num_q, dtype=np.float32) + float[:] all_INP = np.zeros(num_q, dtype=np.float32) + float num_valid_q = 0. # number of valid query + long valid_index = 0 + + long q_idx, q_pid, q_camid, g_idx + long[:] order = np.zeros(num_g, dtype=np.int64) + long keep + + float[:] raw_cmc = np.zeros(num_g, dtype=np.float32) # binary vector, positions with value 1 are correct matches + float[:] cmc = np.zeros(num_g, dtype=np.float32) + long max_pos_idx = 0 + float inp + long num_g_real, rank_idx + unsigned long meet_condition + + float num_rel + float[:] tmp_cmc = np.zeros(num_g, dtype=np.float32) + float tmp_cmc_sum + + + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + for g_idx in range(num_g): + order[g_idx] = indices[q_idx, g_idx] + num_g_real = 0 + meet_condition = 0 + + # remove gallery samples that have the same pid and camid with query + for g_idx in range(num_g): + if (g_pids[order[g_idx]] != q_pid) or (g_camids[order[g_idx]] != q_camid): + raw_cmc[num_g_real] = matches[q_idx][g_idx] + num_g_real += 1 + # this condition is true if query appear in gallery + if matches[q_idx][g_idx] > 1e-31: + meet_condition = 1 + + if not meet_condition: + # this condition is true when query identity does not appear in gallery + continue + + # compute cmc + function_cumsum(raw_cmc, cmc, num_g_real) + # compute mean inverse negative penalty + # reference : https://github.com/mangye16/ReID-Survey/blob/master/utils/reid_metric.py + max_pos_idx = 0 + for g_idx in range(num_g_real): + if (raw_cmc[g_idx] == 1) and (g_idx > max_pos_idx): + max_pos_idx = g_idx + inp = cmc[max_pos_idx] / (max_pos_idx + 1.0) + all_INP[valid_index] = inp + + for g_idx in range(num_g_real): + if cmc[g_idx] > 1: + cmc[g_idx] = 1 + + for rank_idx in range(max_rank): + all_cmc[q_idx, rank_idx] = cmc[rank_idx] + num_valid_q += 1. + + # compute average precision + # reference: https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Average_precision + function_cumsum(raw_cmc, tmp_cmc, num_g_real) + num_rel = 0 + tmp_cmc_sum = 0 + for g_idx in range(num_g_real): + tmp_cmc_sum += (tmp_cmc[g_idx] / (g_idx + 1.)) * raw_cmc[g_idx] + num_rel += raw_cmc[g_idx] + all_AP[valid_index] = tmp_cmc_sum / num_rel + valid_index += 1 + + assert num_valid_q > 0, 'Error: all query identities do not appear in gallery' + + # compute averaged cmc + cdef float[:] avg_cmc = np.zeros(max_rank, dtype=np.float32) + for rank_idx in range(max_rank): + for q_idx in range(num_q): + avg_cmc[rank_idx] += all_cmc[q_idx, rank_idx] + avg_cmc[rank_idx] /= num_valid_q + + return np.asarray(avg_cmc).astype(np.float32), np.asarray(all_AP[:valid_index]), np.asarray(all_INP[:valid_index]) + + +# Compute the cumulative sum +cdef void function_cumsum(cython.numeric[:] src, cython.numeric[:] dst, long n): + cdef long i + dst[0] = src[0] + for i in range(1, n): + dst[i] = src[i] + dst[i - 1] \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/roc_cy.pyx b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/roc_cy.pyx new file mode 100644 index 0000000000000000000000000000000000000000..69bc1ad19c9e290a6b2bb76864f95993551c4f71 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/roc_cy.pyx @@ -0,0 +1,88 @@ +# cython: boundscheck=False, wraparound=False, nonecheck=False, cdivision=True +# credits: https://github.com/KaiyangZhou/deep-person-reid/blob/master/torchreid/metrics/rank_cylib/rank_cy.pyx + +import cython +import faiss +import numpy as np +cimport numpy as np + + +""" +Compiler directives: +https://github.com/cython/cython/wiki/enhancements-compilerdirectives +Cython tutorial: +https://cython.readthedocs.io/en/latest/src/userguide/numpy_tutorial.html +Credit to https://github.com/luzai +""" + + +# Main interface +cpdef evaluate_roc_cy(float[:,:] distmat, long[:] q_pids, long[:]g_pids, + long[:]q_camids, long[:]g_camids): + + distmat = np.asarray(distmat, dtype=np.float32) + q_pids = np.asarray(q_pids, dtype=np.int64) + g_pids = np.asarray(g_pids, dtype=np.int64) + q_camids = np.asarray(q_camids, dtype=np.int64) + g_camids = np.asarray(g_camids, dtype=np.int64) + + cdef long num_q = distmat.shape[0] + cdef long num_g = distmat.shape[1] + + cdef: + long[:,:] indices = np.argsort(distmat, axis=1) + long[:,:] matches = (np.asarray(g_pids)[np.asarray(indices)] == np.asarray(q_pids)[:, np.newaxis]).astype(np.int64) + + float[:] pos = np.zeros(num_q*num_g, dtype=np.float32) + float[:] neg = np.zeros(num_q*num_g, dtype=np.float32) + + long valid_pos = 0 + long valid_neg = 0 + long ind + + long q_idx, q_pid, q_camid, g_idx + long[:] order = np.zeros(num_g, dtype=np.int64) + + float[:] raw_cmc = np.zeros(num_g, dtype=np.float32) # binary vector, positions with value 1 are correct matches + long[:] sort_idx = np.zeros(num_g, dtype=np.int64) + + long idx + + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + for g_idx in range(num_g): + order[g_idx] = indices[q_idx, g_idx] + num_g_real = 0 + + # remove gallery samples that have the same pid and camid with query + for g_idx in range(num_g): + if (g_pids[order[g_idx]] != q_pid) or (g_camids[order[g_idx]] != q_camid): + raw_cmc[num_g_real] = matches[q_idx][g_idx] + sort_idx[num_g_real] = order[g_idx] + num_g_real += 1 + + q_dist = distmat[q_idx] + + for valid_idx in range(num_g_real): + if raw_cmc[valid_idx] == 1: + pos[valid_pos] = q_dist[sort_idx[valid_idx]] + valid_pos += 1 + elif raw_cmc[valid_idx] == 0: + neg[valid_neg] = q_dist[sort_idx[valid_idx]] + valid_neg += 1 + + cdef float[:] scores = np.hstack((pos[:valid_pos], neg[:valid_neg])) + cdef float[:] labels = np.hstack((np.zeros(valid_pos, dtype=np.float32), + np.ones(valid_neg, dtype=np.float32))) + return np.asarray(scores), np.asarray(labels) + + +# Compute the cumulative sum +cdef void function_cumsum(cython.numeric[:] src, cython.numeric[:] dst, long n): + cdef long i + dst[0] = src[0] + for i in range(1, n): + dst[i] = src[i] + dst[i - 1] \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/setup.py b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..2b24615567866c0bbbe78b05a2392f14d894e9e3 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/setup.py @@ -0,0 +1,32 @@ +from distutils.core import setup +from distutils.extension import Extension + +import numpy as np +from Cython.Build import cythonize + + +def numpy_include(): + try: + numpy_include = np.get_include() + except AttributeError: + numpy_include = np.get_numpy_include() + return numpy_include + + +ext_modules = [ + Extension( + 'rank_cy', + ['rank_cy.pyx'], + include_dirs=[numpy_include()], + ), + Extension( + 'roc_cy', + ['roc_cy.pyx'], + include_dirs=[numpy_include()], + ) +] + +setup( + name='Cython-based reid evaluation code', + ext_modules=cythonize(ext_modules) +) diff --git a/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/test_cython.py b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/test_cython.py new file mode 100644 index 0000000000000000000000000000000000000000..8fd554223863c920eed82a683045a3fd074a8d92 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rank_cylib/test_cython.py @@ -0,0 +1,106 @@ +import sys +import timeit +import numpy as np +import os.path as osp + +sys.path.insert(0, osp.dirname(osp.abspath(__file__)) + '/../../..') + +from fastreid.evaluation import evaluate_rank +from fastreid.evaluation import evaluate_roc + +""" +Test the speed of cython-based evaluation code. The speed improvements +can be much bigger when using the real reid data, which contains a larger +amount of query and gallery images. +Note: you might encounter the following error: + 'AssertionError: Error: all query identities do not appear in gallery'. +This is normal because the inputs are random numbers. Just try again. +""" + +print('*** Compare running time ***') + +setup = ''' +import sys +import os.path as osp +import numpy as np +sys.path.insert(0, osp.dirname(osp.abspath(__file__)) + '/../../..') +from fastreid.evaluation import evaluate_rank +from fastreid.evaluation import evaluate_roc +num_q = 30 +num_g = 300 +dim = 512 +max_rank = 5 +q_feats = np.random.rand(num_q, dim).astype(np.float32) * 20 +q_feats = q_feats / np.linalg.norm(q_feats, ord=2, axis=1, keepdims=True) +g_feats = np.random.rand(num_g, dim).astype(np.float32) * 20 +g_feats = g_feats / np.linalg.norm(g_feats, ord=2, axis=1, keepdims=True) +distmat = 1 - np.dot(q_feats, g_feats.transpose()) +q_pids = np.random.randint(0, num_q, size=num_q) +g_pids = np.random.randint(0, num_g, size=num_g) +q_camids = np.random.randint(0, 5, size=num_q) +g_camids = np.random.randint(0, 5, size=num_g) +''' + +print('=> Using CMC metric') +pytime = timeit.timeit( + 'evaluate_rank(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_cython=False)', + setup=setup, + number=20 +) +cytime = timeit.timeit( + 'evaluate_rank(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_cython=True)', + setup=setup, + number=20 +) +print('Python time: {} s'.format(pytime)) +print('Cython time: {} s'.format(cytime)) +print('CMC Cython is {} times faster than python\n'.format(pytime / cytime)) + +print('=> Using ROC metric') +pytime = timeit.timeit( + 'evaluate_roc(distmat, q_pids, g_pids, q_camids, g_camids, use_cython=False)', + setup=setup, + number=20 +) +cytime = timeit.timeit( + 'evaluate_roc(distmat, q_pids, g_pids, q_camids, g_camids, use_cython=True)', + setup=setup, + number=20 +) +print('Python time: {} s'.format(pytime)) +print('Cython time: {} s'.format(cytime)) +print('ROC Cython is {} times faster than python\n'.format(pytime / cytime)) + +print("=> Check precision") +num_q = 30 +num_g = 300 +dim = 512 +max_rank = 5 +q_feats = np.random.rand(num_q, dim).astype(np.float32) * 20 +q_feats = q_feats / np.linalg.norm(q_feats, ord=2, axis=1, keepdims=True) +g_feats = np.random.rand(num_g, dim).astype(np.float32) * 20 +g_feats = g_feats / np.linalg.norm(g_feats, ord=2, axis=1, keepdims=True) +distmat = 1 - np.dot(q_feats, g_feats.transpose()) +q_pids = np.random.randint(0, num_q, size=num_q) +g_pids = np.random.randint(0, num_g, size=num_g) +q_camids = np.random.randint(0, 5, size=num_q) +g_camids = np.random.randint(0, 5, size=num_g) + +cmc_py, mAP_py, mINP_py = evaluate_rank(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_cython=False) + +cmc_cy, mAP_cy, mINP_cy = evaluate_rank(distmat, q_pids, g_pids, q_camids, g_camids, max_rank, use_cython=True) + +np.testing.assert_allclose(cmc_py, cmc_cy, rtol=1e-3, atol=1e-6) +np.testing.assert_allclose(mAP_py, mAP_cy, rtol=1e-3, atol=1e-6) +np.testing.assert_allclose(mINP_py, mINP_cy, rtol=1e-3, atol=1e-6) +print('Rank results between python and cython are the same!') + +scores_cy, labels_cy = evaluate_roc(distmat, q_pids, g_pids, q_camids, g_camids, use_cython=True) +scores_py, labels_py = evaluate_roc(distmat, q_pids, g_pids, q_camids, g_camids, use_cython=False) + +np.testing.assert_allclose(scores_cy, scores_py, rtol=1e-3, atol=1e-6) +np.testing.assert_allclose(labels_cy, labels_py, rtol=1e-3, atol=1e-6) +print('ROC results between python and cython are the same!\n') + +print("=> Check exact values") +print("mAP = {} \ncmc = {}\nmINP = {}\nScores = {}".format(np.array(mAP_cy), cmc_cy, np.array(mINP_cy), scores_cy)) diff --git a/thirdparty/fast-reid/fastreid/evaluation/reid_evaluation.py b/thirdparty/fast-reid/fastreid/evaluation/reid_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..c67b295d0e6ad4f185ee04fc853e6c81c338abad --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/reid_evaluation.py @@ -0,0 +1,118 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import copy +import logging +from collections import OrderedDict +from sklearn import metrics + +import numpy as np +import torch +import torch.nn.functional as F + +from .evaluator import DatasetEvaluator +from .query_expansion import aqe +from .rank import evaluate_rank +from .rerank import re_ranking +from .roc import evaluate_roc +from fastreid.utils import comm +from fastreid.utils.compute_dist import build_dist + +logger = logging.getLogger(__name__) + + +class ReidEvaluator(DatasetEvaluator): + def __init__(self, cfg, num_query, output_dir=None): + self.cfg = cfg + self._num_query = num_query + self._output_dir = output_dir + + self.features = [] + self.pids = [] + self.camids = [] + + def reset(self): + self.features = [] + self.pids = [] + self.camids = [] + + def process(self, inputs, outputs): + self.pids.extend(inputs["targets"]) + self.camids.extend(inputs["camids"]) + self.features.append(outputs.cpu()) + + def evaluate(self): + if comm.get_world_size() > 1: + comm.synchronize() + features = comm.gather(self.features) + features = sum(features, []) + + pids = comm.gather(self.pids) + pids = sum(pids, []) + + camids = comm.gather(self.camids) + camids = sum(camids, []) + + # fmt: off + if not comm.is_main_process(): return {} + # fmt: on + else: + features = self.features + pids = self.pids + camids = self.camids + + features = torch.cat(features, dim=0) + # query feature, person ids and camera ids + query_features = features[:self._num_query] + query_pids = np.asarray(pids[:self._num_query]) + query_camids = np.asarray(camids[:self._num_query]) + + # gallery features, person ids and camera ids + gallery_features = features[self._num_query:] + gallery_pids = np.asarray(pids[self._num_query:]) + gallery_camids = np.asarray(camids[self._num_query:]) + + self._results = OrderedDict() + + if self.cfg.TEST.AQE.ENABLED: + logger.info("Test with AQE setting") + qe_time = self.cfg.TEST.AQE.QE_TIME + qe_k = self.cfg.TEST.AQE.QE_K + alpha = self.cfg.TEST.AQE.ALPHA + query_features, gallery_features = aqe(query_features, gallery_features, qe_time, qe_k, alpha) + + dist = build_dist(query_features, gallery_features, self.cfg.TEST.METRIC) + + if self.cfg.TEST.RERANK.ENABLED: + logger.info("Test with rerank setting") + k1 = self.cfg.TEST.RERANK.K1 + k2 = self.cfg.TEST.RERANK.K2 + lambda_value = self.cfg.TEST.RERANK.LAMBDA + + if self.cfg.TEST.METRIC == "cosine": + query_features = F.normalize(query_features, dim=1) + gallery_features = F.normalize(gallery_features, dim=1) + + rerank_dist = build_dist(query_features, gallery_features, metric="jaccard", k1=k1, k2=k2) + dist = rerank_dist * (1 - lambda_value) + dist * lambda_value + + cmc, all_AP, all_INP = evaluate_rank(dist, query_pids, gallery_pids, query_camids, gallery_camids) + + mAP = np.mean(all_AP) + mINP = np.mean(all_INP) + for r in [1, 5, 10]: + self._results['Rank-{}'.format(r)] = cmc[r - 1] + self._results['mAP'] = mAP + self._results['mINP'] = mINP + + if self.cfg.TEST.ROC_ENABLED: + scores, labels = evaluate_roc(dist, query_pids, gallery_pids, query_camids, gallery_camids) + fprs, tprs, thres = metrics.roc_curve(labels, scores) + + for fpr in [1e-4, 1e-3, 1e-2]: + ind = np.argmin(np.abs(fprs - fpr)) + self._results["TPR@FPR={:.0e}".format(fpr)] = tprs[ind] + + return copy.deepcopy(self._results) diff --git a/thirdparty/fast-reid/fastreid/evaluation/rerank.py b/thirdparty/fast-reid/fastreid/evaluation/rerank.py new file mode 100644 index 0000000000000000000000000000000000000000..e69ab4421eb1fd713a70c47897e978b2a8a78191 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/rerank.py @@ -0,0 +1,73 @@ +# encoding: utf-8 + +# based on: +# https://github.com/zhunzhong07/person-re-ranking + +__all__ = ['re_ranking'] + +import numpy as np + + +def re_ranking(q_g_dist, q_q_dist, g_g_dist, k1: int = 20, k2: int = 6, lambda_value: float = 0.3): + original_dist = np.concatenate( + [np.concatenate([q_q_dist, q_g_dist], axis=1), + np.concatenate([q_g_dist.T, g_g_dist], axis=1)], + axis=0) + original_dist = np.power(original_dist, 2).astype(np.float32) + original_dist = np.transpose(1. * original_dist / np.max(original_dist, axis=0)) + V = np.zeros_like(original_dist).astype(np.float32) + initial_rank = np.argsort(original_dist).astype(np.int32) + + query_num = q_g_dist.shape[0] + gallery_num = q_g_dist.shape[0] + q_g_dist.shape[1] + all_num = gallery_num + + for i in range(all_num): + # k-reciprocal neighbors + forward_k_neigh_index = initial_rank[i, :k1 + 1] + backward_k_neigh_index = initial_rank[forward_k_neigh_index, :k1 + 1] + fi = np.where(backward_k_neigh_index == i)[0] + k_reciprocal_index = forward_k_neigh_index[fi] + k_reciprocal_expansion_index = k_reciprocal_index + for j in range(len(k_reciprocal_index)): + candidate = k_reciprocal_index[j] + candidate_forward_k_neigh_index = initial_rank[candidate, + :int(np.around(k1 / 2.)) + 1] + candidate_backward_k_neigh_index = initial_rank[candidate_forward_k_neigh_index, + :int(np.around(k1 / 2.)) + 1] + fi_candidate = np.where(candidate_backward_k_neigh_index == candidate)[0] + candidate_k_reciprocal_index = candidate_forward_k_neigh_index[fi_candidate] + if len(np.intersect1d(candidate_k_reciprocal_index, k_reciprocal_index)) > 2. / 3 * len( + candidate_k_reciprocal_index): + k_reciprocal_expansion_index = np.append(k_reciprocal_expansion_index, candidate_k_reciprocal_index) + + k_reciprocal_expansion_index = np.unique(k_reciprocal_expansion_index) + weight = np.exp(-original_dist[i, k_reciprocal_expansion_index]) + V[i, k_reciprocal_expansion_index] = 1. * weight / np.sum(weight) + original_dist = original_dist[:query_num, ] + if k2 != 1: + V_qe = np.zeros_like(V, dtype=np.float32) + for i in range(all_num): + V_qe[i, :] = np.mean(V[initial_rank[i, :k2], :], axis=0) + V = V_qe + del V_qe + del initial_rank + invIndex = [] + for i in range(gallery_num): + invIndex.append(np.where(V[:, i] != 0)[0]) + + jaccard_dist = np.zeros_like(original_dist, dtype=np.float32) + + for i in range(query_num): + temp_min = np.zeros(shape=[1, gallery_num], dtype=np.float32) + indNonZero = np.where(V[i, :] != 0)[0] + indImages = [invIndex[ind] for ind in indNonZero] + for j in range(len(indNonZero)): + temp_min[0, indImages[j]] = temp_min[0, indImages[j]] + np.minimum(V[i, indNonZero[j]], + V[indImages[j], indNonZero[j]]) + jaccard_dist[i] = 1 - temp_min / (2. - temp_min) + + final_dist = jaccard_dist * (1 - lambda_value) + original_dist * lambda_value + del original_dist, V, jaccard_dist + final_dist = final_dist[:query_num, query_num:] + return final_dist diff --git a/thirdparty/fast-reid/fastreid/evaluation/roc.py b/thirdparty/fast-reid/fastreid/evaluation/roc.py new file mode 100644 index 0000000000000000000000000000000000000000..82a43fe80e229d997e9c6a3ee70e0e196293a56f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/roc.py @@ -0,0 +1,90 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import warnings + +import faiss +import numpy as np + +try: + from .rank_cylib.roc_cy import evaluate_roc_cy + + IS_CYTHON_AVAI = True +except ImportError: + IS_CYTHON_AVAI = False + warnings.warn( + 'Cython roc evaluation (very fast so highly recommended) is ' + 'unavailable, now use python evaluation.' + ) + + +def evaluate_roc_py(distmat, q_pids, g_pids, q_camids, g_camids): + r"""Evaluation with ROC curve. + Key: for each query identity, its gallery images from the same camera view are discarded. + + Args: + distmat (np.ndarray): cosine distance matrix + """ + num_q, num_g = distmat.shape + + indices = np.argsort(distmat, axis=1) + matches = (g_pids[indices] == q_pids[:, np.newaxis]).astype(np.int32) + + pos = [] + neg = [] + for q_idx in range(num_q): + # get query pid and camid + q_pid = q_pids[q_idx] + q_camid = q_camids[q_idx] + + # Remove gallery samples that have the same pid and camid with query + order = indices[q_idx] + remove = (g_pids[order] == q_pid) & (g_camids[order] == q_camid) + keep = np.invert(remove) + raw_cmc = matches[q_idx][keep] + + sort_idx = order[keep] + + q_dist = distmat[q_idx] + ind_pos = np.where(raw_cmc == 1)[0] + pos.extend(q_dist[sort_idx[ind_pos]]) + + ind_neg = np.where(raw_cmc == 0)[0] + neg.extend(q_dist[sort_idx[ind_neg]]) + + scores = np.hstack((pos, neg)) + + labels = np.hstack((np.zeros(len(pos)), np.ones(len(neg)))) + return scores, labels + + +def evaluate_roc( + distmat, + q_pids, + g_pids, + q_camids, + g_camids, + use_cython=True +): + """Evaluates CMC rank. + Args: + distmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery). + q_pids (numpy.ndarray): 1-D array containing person identities + of each query instance. + g_pids (numpy.ndarray): 1-D array containing person identities + of each gallery instance. + q_camids (numpy.ndarray): 1-D array containing camera views under + which each query instance is captured. + g_camids (numpy.ndarray): 1-D array containing camera views under + which each gallery instance is captured. + use_cython (bool, optional): use cython code for evaluation. Default is True. + This is highly recommended as the cython code can speed up the cmc computation + by more than 10x. This requires Cython to be installed. + """ + if use_cython and IS_CYTHON_AVAI: + return evaluate_roc_cy(distmat, q_pids, g_pids, q_camids, g_camids) + else: + return evaluate_roc_py(distmat, q_pids, g_pids, q_camids, g_camids) diff --git a/thirdparty/fast-reid/fastreid/evaluation/testing.py b/thirdparty/fast-reid/fastreid/evaluation/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..fc9019e1bf93de74b2172ef52b33a986daef5b69 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/evaluation/testing.py @@ -0,0 +1,89 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import pprint +import sys +from collections import Mapping, OrderedDict + +import numpy as np +from tabulate import tabulate +from termcolor import colored + +logger = logging.getLogger(__name__) + + +def print_csv_format(results): + """ + Print main metrics in a format similar to Detectron, + so that they are easy to copypaste into a spreadsheet. + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + """ + assert isinstance(results, OrderedDict), results # unordered results cannot be properly printed + task = list(results.keys())[0] + metrics = ["Datasets"] + [k for k in results[task]] + + csv_results = [] + for task, res in results.items(): + csv_results.append((task, *list(res.values()))) + + # tabulate it + table = tabulate( + csv_results, + tablefmt="pipe", + floatfmt=".2%", + headers=metrics, + numalign="left", + ) + + logger.info("Evaluation results in csv format: \n" + colored(table, "cyan")) + + +def verify_results(cfg, results): + """ + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + Returns: + bool: whether the verification succeeds or not + """ + expected_results = cfg.TEST.EXPECTED_RESULTS + if not len(expected_results): + return True + + ok = True + for task, metric, expected, tolerance in expected_results: + actual = results[task][metric] + if not np.isfinite(actual): + ok = False + diff = abs(actual - expected) + if diff > tolerance: + ok = False + + logger = logging.getLogger(__name__) + if not ok: + logger.error("Result verification failed!") + logger.error("Expected Results: " + str(expected_results)) + logger.error("Actual Results: " + pprint.pformat(results)) + + sys.exit(1) + else: + logger.info("Results verification passed.") + return ok + + +def flatten_results_dict(results): + """ + Expand a hierarchical dict of scalars into a flat dict of scalars. + If results[k1][k2][k3] = v, the returned dict will have the entry + {"k1/k2/k3": v}. + Args: + results (dict): + """ + r = {} + for k, v in results.items(): + if isinstance(v, Mapping): + v = flatten_results_dict(v) + for kk, vv in v.items(): + r[k + "/" + kk] = vv + else: + r[k] = v + return r diff --git a/thirdparty/fast-reid/fastreid/layers/__init__.py b/thirdparty/fast-reid/fastreid/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..32848a9bef3e4f486abd65eb971b8ea6659248a9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/__init__.py @@ -0,0 +1,19 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .activation import * +from .arc_softmax import ArcSoftmax +from .circle_softmax import CircleSoftmax +from .am_softmax import AMSoftmax +from .batch_drop import BatchDrop +from .batch_norm import * +from .context_block import ContextBlock +from .frn import FRN, TLU +from .non_local import Non_local +from .pooling import * +from .se_layer import SELayer +from .splat import SplAtConv2d +from .gather_layer import GatherLayer diff --git a/thirdparty/fast-reid/fastreid/layers/activation.py b/thirdparty/fast-reid/fastreid/layers/activation.py new file mode 100644 index 0000000000000000000000000000000000000000..dafbf607c1c8313b8d4e8b39dcfda31b027927bf --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/activation.py @@ -0,0 +1,59 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + +__all__ = [ + 'Mish', + 'Swish', + 'MemoryEfficientSwish', + 'GELU'] + + +class Mish(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + # inlining this saves 1 second per epoch (V100 GPU) vs having a temp x and then returning x(!) + return x * (torch.tanh(F.softplus(x))) + + +class Swish(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class SwishImplementation(torch.autograd.Function): + @staticmethod + def forward(ctx, i): + result = i * torch.sigmoid(i) + ctx.save_for_backward(i) + return result + + @staticmethod + def backward(ctx, grad_output): + i = ctx.saved_variables[0] + sigmoid_i = torch.sigmoid(i) + return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i))) + + +class MemoryEfficientSwish(nn.Module): + def forward(self, x): + return SwishImplementation.apply(x) + + +class GELU(nn.Module): + """ + Paper Section 3.4, last paragraph notice that BERT used the GELU instead of RELU + """ + + def forward(self, x): + return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) diff --git a/thirdparty/fast-reid/fastreid/layers/am_softmax.py b/thirdparty/fast-reid/fastreid/layers/am_softmax.py new file mode 100644 index 0000000000000000000000000000000000000000..0b03e7ca0584fb1439bc4f01306199f2857eaac1 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/am_softmax.py @@ -0,0 +1,43 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +from torch import nn +import torch.nn.functional as F +from torch.nn import Parameter + + +class AMSoftmax(nn.Module): + r"""Implement of large margin cosine distance: + Args: + in_feat: size of each input sample + num_classes: size of each output sample + """ + + def __init__(self, cfg, in_feat, num_classes): + super().__init__() + self.in_features = in_feat + self._num_classes = num_classes + self.s = cfg.MODEL.HEADS.SCALE + self.m = cfg.MODEL.HEADS.MARGIN + self.weight = Parameter(torch.Tensor(num_classes, in_feat)) + nn.init.xavier_uniform_(self.weight) + + def forward(self, features, targets): + # --------------------------- cos(theta) & phi(theta) --------------------------- + cosine = F.linear(F.normalize(features), F.normalize(self.weight)) + phi = cosine - self.m + # --------------------------- convert label to one-hot --------------------------- + targets = F.one_hot(targets, num_classes=self._num_classes) + output = (targets * phi) + ((1.0 - targets) * cosine) + output *= self.s + + return output + + def extra_repr(self): + return 'in_features={}, num_classes={}, scale={}, margin={}'.format( + self.in_feat, self._num_classes, self.s, self.m + ) diff --git a/thirdparty/fast-reid/fastreid/layers/arc_softmax.py b/thirdparty/fast-reid/fastreid/layers/arc_softmax.py new file mode 100644 index 0000000000000000000000000000000000000000..485444f8b45f1de2c3370b69ce64e0b9c4d86dbe --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/arc_softmax.py @@ -0,0 +1,55 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn import Parameter + + +class ArcSoftmax(nn.Module): + def __init__(self, cfg, in_feat, num_classes): + super().__init__() + self.in_feat = in_feat + self._num_classes = num_classes + self.s = cfg.MODEL.HEADS.SCALE + self.m = cfg.MODEL.HEADS.MARGIN + + self.cos_m = math.cos(self.m) + self.sin_m = math.sin(self.m) + self.threshold = math.cos(math.pi - self.m) + self.mm = math.sin(math.pi - self.m) * self.m + + self.weight = Parameter(torch.Tensor(num_classes, in_feat)) + nn.init.xavier_uniform_(self.weight) + self.register_buffer('t', torch.zeros(1)) + + def forward(self, features, targets): + # get cos(theta) + cos_theta = F.linear(F.normalize(features), F.normalize(self.weight)) + cos_theta = cos_theta.clamp(-1, 1) # for numerical stability + + target_logit = cos_theta[torch.arange(0, features.size(0)), targets].view(-1, 1) + + sin_theta = torch.sqrt(1.0 - torch.pow(target_logit, 2)) + cos_theta_m = target_logit * self.cos_m - sin_theta * self.sin_m # cos(target+margin) + mask = cos_theta > cos_theta_m + final_target_logit = torch.where(target_logit > self.threshold, cos_theta_m, target_logit - self.mm) + + hard_example = cos_theta[mask] + with torch.no_grad(): + self.t = target_logit.mean() * 0.01 + (1 - 0.01) * self.t + cos_theta[mask] = hard_example * (self.t + hard_example) + cos_theta.scatter_(1, targets.view(-1, 1).long(), final_target_logit) + pred_class_logits = cos_theta * self.s + return pred_class_logits + + def extra_repr(self): + return 'in_features={}, num_classes={}, scale={}, margin={}'.format( + self.in_feat, self._num_classes, self.s, self.m + ) diff --git a/thirdparty/fast-reid/fastreid/layers/batch_drop.py b/thirdparty/fast-reid/fastreid/layers/batch_drop.py new file mode 100644 index 0000000000000000000000000000000000000000..5c256975901b2c022a329867d711f810af88a35c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/batch_drop.py @@ -0,0 +1,32 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import random + +from torch import nn + + +class BatchDrop(nn.Module): + """ref: https://github.com/daizuozhuo/batch-dropblock-network/blob/master/models/networks.py + batch drop mask + """ + + def __init__(self, h_ratio, w_ratio): + super(BatchDrop, self).__init__() + self.h_ratio = h_ratio + self.w_ratio = w_ratio + + def forward(self, x): + if self.training: + h, w = x.size()[-2:] + rh = round(self.h_ratio * h) + rw = round(self.w_ratio * w) + sx = random.randint(0, h - rh) + sy = random.randint(0, w - rw) + mask = x.new_ones(x.size()) + mask[:, :, sx:sx + rh, sy:sy + rw] = 0 + x = x * mask + return x diff --git a/thirdparty/fast-reid/fastreid/layers/batch_norm.py b/thirdparty/fast-reid/fastreid/layers/batch_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..e0e88e37115ba1748d08584a694a9658b1f364f5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/batch_norm.py @@ -0,0 +1,208 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import logging + +import torch +import torch.nn.functional as F +from torch import nn + +__all__ = [ + "BatchNorm", + "IBN", + "GhostBatchNorm", + "FrozenBatchNorm", + "SyncBatchNorm", + "get_norm", +] + + +class BatchNorm(nn.BatchNorm2d): + def __init__(self, num_features, eps=1e-05, momentum=0.1, weight_freeze=False, bias_freeze=False, weight_init=1.0, + bias_init=0.0, **kwargs): + super().__init__(num_features, eps=eps, momentum=momentum) + if weight_init is not None: nn.init.constant_(self.weight, weight_init) + if bias_init is not None: nn.init.constant_(self.bias, bias_init) + self.weight.requires_grad_(not weight_freeze) + self.bias.requires_grad_(not bias_freeze) + + +class SyncBatchNorm(nn.SyncBatchNorm): + def __init__(self, num_features, eps=1e-05, momentum=0.1, weight_freeze=False, bias_freeze=False, weight_init=1.0, + bias_init=0.0): + super().__init__(num_features, eps=eps, momentum=momentum) + if weight_init is not None: nn.init.constant_(self.weight, weight_init) + if bias_init is not None: nn.init.constant_(self.bias, bias_init) + self.weight.requires_grad_(not weight_freeze) + self.bias.requires_grad_(not bias_freeze) + + +class IBN(nn.Module): + def __init__(self, planes, bn_norm, **kwargs): + super(IBN, self).__init__() + half1 = int(planes / 2) + self.half = half1 + half2 = planes - half1 + self.IN = nn.InstanceNorm2d(half1, affine=True) + self.BN = get_norm(bn_norm, half2, **kwargs) + + def forward(self, x): + split = torch.split(x, self.half, 1) + out1 = self.IN(split[0].contiguous()) + out2 = self.BN(split[1].contiguous()) + out = torch.cat((out1, out2), 1) + return out + + +class GhostBatchNorm(BatchNorm): + def __init__(self, num_features, num_splits=1, **kwargs): + super().__init__(num_features, **kwargs) + self.num_splits = num_splits + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + + def forward(self, input): + N, C, H, W = input.shape + if self.training or not self.track_running_stats: + self.running_mean = self.running_mean.repeat(self.num_splits) + self.running_var = self.running_var.repeat(self.num_splits) + outputs = F.batch_norm( + input.view(-1, C * self.num_splits, H, W), self.running_mean, self.running_var, + self.weight.repeat(self.num_splits), self.bias.repeat(self.num_splits), + True, self.momentum, self.eps).view(N, C, H, W) + self.running_mean = torch.mean(self.running_mean.view(self.num_splits, self.num_features), dim=0) + self.running_var = torch.mean(self.running_var.view(self.num_splits, self.num_features), dim=0) + return outputs + else: + return F.batch_norm( + input, self.running_mean, self.running_var, + self.weight, self.bias, False, self.momentum, self.eps) + + +class FrozenBatchNorm(BatchNorm): + """ + BatchNorm2d where the batch statistics and the affine parameters are fixed. + It contains non-trainable buffers called + "weight" and "bias", "running_mean", "running_var", + initialized to perform identity transformation. + The pre-trained backbone models from Caffe2 only contain "weight" and "bias", + which are computed from the original four parameters of BN. + The affine transform `x * weight + bias` will perform the equivalent + computation of `(x - running_mean) / sqrt(running_var) * weight + bias`. + When loading a backbone model from Caffe2, "running_mean" and "running_var" + will be left unchanged as identity transformation. + Other pre-trained backbone models may contain all 4 parameters. + The forward is implemented by `F.batch_norm(..., training=False)`. + """ + + _version = 3 + + def __init__(self, num_features, eps=1e-5, **kwargs): + super().__init__(num_features, weight_freeze=True, bias_freeze=True, **kwargs) + self.num_features = num_features + self.eps = eps + + def forward(self, x): + if x.requires_grad: + # When gradients are needed, F.batch_norm will use extra memory + # because its backward op computes gradients for weight/bias as well. + scale = self.weight * (self.running_var + self.eps).rsqrt() + bias = self.bias - self.running_mean * scale + scale = scale.reshape(1, -1, 1, 1) + bias = bias.reshape(1, -1, 1, 1) + return x * scale + bias + else: + # When gradients are not needed, F.batch_norm is a single fused op + # and provide more optimization opportunities. + return F.batch_norm( + x, + self.running_mean, + self.running_var, + self.weight, + self.bias, + training=False, + eps=self.eps, + ) + + def _load_from_state_dict( + self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ): + version = local_metadata.get("version", None) + + if version is None or version < 2: + # No running_mean/var in early versions + # This will silent the warnings + if prefix + "running_mean" not in state_dict: + state_dict[prefix + "running_mean"] = torch.zeros_like(self.running_mean) + if prefix + "running_var" not in state_dict: + state_dict[prefix + "running_var"] = torch.ones_like(self.running_var) + + if version is not None and version < 3: + logger = logging.getLogger(__name__) + logger.info("FrozenBatchNorm {} is upgraded to version 3.".format(prefix.rstrip("."))) + # In version < 3, running_var are used without +eps. + state_dict[prefix + "running_var"] -= self.eps + + super()._load_from_state_dict( + state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ) + + def __repr__(self): + return "FrozenBatchNorm2d(num_features={}, eps={})".format(self.num_features, self.eps) + + @classmethod + def convert_frozen_batchnorm(cls, module): + """ + Convert BatchNorm/SyncBatchNorm in module into FrozenBatchNorm. + Args: + module (torch.nn.Module): + Returns: + If module is BatchNorm/SyncBatchNorm, returns a new module. + Otherwise, in-place convert module and return it. + Similar to convert_sync_batchnorm in + https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/batchnorm.py + """ + bn_module = nn.modules.batchnorm + bn_module = (bn_module.BatchNorm2d, bn_module.SyncBatchNorm) + res = module + if isinstance(module, bn_module): + res = cls(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = cls.convert_frozen_batchnorm(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + +def get_norm(norm, out_channels, **kwargs): + """ + Args: + norm (str or callable): either one of BN, GhostBN, FrozenBN, GN or SyncBN; + or a callable that thakes a channel number and returns + the normalization layer as a nn.Module + out_channels: number of channels for normalization layer + + Returns: + nn.Module or None: the normalization layer + """ + if isinstance(norm, str): + if len(norm) == 0: + return None + norm = { + "BN": BatchNorm, + "GhostBN": GhostBatchNorm, + "FrozenBN": FrozenBatchNorm, + "GN": lambda channels, **args: nn.GroupNorm(32, channels), + "syncBN": SyncBatchNorm, + }[norm] + return norm(out_channels, **kwargs) diff --git a/thirdparty/fast-reid/fastreid/layers/circle_softmax.py b/thirdparty/fast-reid/fastreid/layers/circle_softmax.py new file mode 100644 index 0000000000000000000000000000000000000000..2224e0644f4dedf29e63985ada6e94eac151a0c1 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/circle_softmax.py @@ -0,0 +1,45 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn import Parameter + + +class CircleSoftmax(nn.Module): + def __init__(self, cfg, in_feat, num_classes): + super().__init__() + self.in_feat = in_feat + self._num_classes = num_classes + self.s = cfg.MODEL.HEADS.SCALE + self.m = cfg.MODEL.HEADS.MARGIN + + self.weight = Parameter(torch.Tensor(num_classes, in_feat)) + nn.init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + + def forward(self, features, targets): + sim_mat = F.linear(F.normalize(features), F.normalize(self.weight)) + alpha_p = torch.clamp_min(-sim_mat.detach() + 1 + self.m, min=0.) + alpha_n = torch.clamp_min(sim_mat.detach() + self.m, min=0.) + delta_p = 1 - self.m + delta_n = self.m + + s_p = self.s * alpha_p * (sim_mat - delta_p) + s_n = self.s * alpha_n * (sim_mat - delta_n) + + targets = F.one_hot(targets, num_classes=self._num_classes) + + pred_class_logits = targets * s_p + (1.0 - targets) * s_n + + return pred_class_logits + + def extra_repr(self): + return 'in_features={}, num_classes={}, scale={}, margin={}'.format( + self.in_feat, self._num_classes, self.s, self.m + ) diff --git a/thirdparty/fast-reid/fastreid/layers/context_block.py b/thirdparty/fast-reid/fastreid/layers/context_block.py new file mode 100644 index 0000000000000000000000000000000000000000..7b1098a8663e48c4affead9bef504acf3e50e2ef --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/context_block.py @@ -0,0 +1,113 @@ +# copy from https://github.com/xvjiarui/GCNet/blob/master/mmdet/ops/gcb/context_block.py + +import torch +from torch import nn + +__all__ = ['ContextBlock'] + + +def last_zero_init(m): + if isinstance(m, nn.Sequential): + nn.init.constant_(m[-1].weight, val=0) + if hasattr(m[-1], 'bias') and m[-1].bias is not None: + nn.init.constant_(m[-1].bias, 0) + else: + nn.init.constant_(m.weight, val=0) + if hasattr(m, 'bias') and m.bias is not None: + nn.init.constant_(m.bias, 0) + + +class ContextBlock(nn.Module): + + def __init__(self, + inplanes, + ratio, + pooling_type='att', + fusion_types=('channel_add',)): + super(ContextBlock, self).__init__() + assert pooling_type in ['avg', 'att'] + assert isinstance(fusion_types, (list, tuple)) + valid_fusion_types = ['channel_add', 'channel_mul'] + assert all([f in valid_fusion_types for f in fusion_types]) + assert len(fusion_types) > 0, 'at least one fusion should be used' + self.inplanes = inplanes + self.ratio = ratio + self.planes = int(inplanes * ratio) + self.pooling_type = pooling_type + self.fusion_types = fusion_types + if pooling_type == 'att': + self.conv_mask = nn.Conv2d(inplanes, 1, kernel_size=1) + self.softmax = nn.Softmax(dim=2) + else: + self.avg_pool = nn.AdaptiveAvgPool2d(1) + if 'channel_add' in fusion_types: + self.channel_add_conv = nn.Sequential( + nn.Conv2d(self.inplanes, self.planes, kernel_size=1), + nn.LayerNorm([self.planes, 1, 1]), + nn.ReLU(inplace=True), # yapf: disable + nn.Conv2d(self.planes, self.inplanes, kernel_size=1)) + else: + self.channel_add_conv = None + if 'channel_mul' in fusion_types: + self.channel_mul_conv = nn.Sequential( + nn.Conv2d(self.inplanes, self.planes, kernel_size=1), + nn.LayerNorm([self.planes, 1, 1]), + nn.ReLU(inplace=True), # yapf: disable + nn.Conv2d(self.planes, self.inplanes, kernel_size=1)) + else: + self.channel_mul_conv = None + self.reset_parameters() + + def reset_parameters(self): + if self.pooling_type == 'att': + nn.init.kaiming_normal_(self.conv_mask.weight, a=0, mode='fan_in', nonlinearity='relu') + if hasattr(self.conv_mask, 'bias') and self.conv_mask.bias is not None: + nn.init.constant_(self.conv_mask.bias, 0) + self.conv_mask.inited = True + + if self.channel_add_conv is not None: + last_zero_init(self.channel_add_conv) + if self.channel_mul_conv is not None: + last_zero_init(self.channel_mul_conv) + + def spatial_pool(self, x): + batch, channel, height, width = x.size() + if self.pooling_type == 'att': + input_x = x + # [N, C, H * W] + input_x = input_x.view(batch, channel, height * width) + # [N, 1, C, H * W] + input_x = input_x.unsqueeze(1) + # [N, 1, H, W] + context_mask = self.conv_mask(x) + # [N, 1, H * W] + context_mask = context_mask.view(batch, 1, height * width) + # [N, 1, H * W] + context_mask = self.softmax(context_mask) + # [N, 1, H * W, 1] + context_mask = context_mask.unsqueeze(-1) + # [N, 1, C, 1] + context = torch.matmul(input_x, context_mask) + # [N, C, 1, 1] + context = context.view(batch, channel, 1, 1) + else: + # [N, C, 1, 1] + context = self.avg_pool(x) + + return context + + def forward(self, x): + # [N, C, 1, 1] + context = self.spatial_pool(x) + + out = x + if self.channel_mul_conv is not None: + # [N, C, 1, 1] + channel_mul_term = torch.sigmoid(self.channel_mul_conv(context)) + out = out * channel_mul_term + if self.channel_add_conv is not None: + # [N, C, 1, 1] + channel_add_term = self.channel_add_conv(context) + out = out + channel_add_term + + return out diff --git a/thirdparty/fast-reid/fastreid/layers/frn.py b/thirdparty/fast-reid/fastreid/layers/frn.py new file mode 100644 index 0000000000000000000000000000000000000000..f00a1e446a55d81a4a1ebd15c6e7e245eaa4cb21 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/frn.py @@ -0,0 +1,199 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +from torch import nn +from torch.nn.modules.batchnorm import BatchNorm2d +from torch.nn import ReLU, LeakyReLU +from torch.nn.parameter import Parameter + + +class TLU(nn.Module): + def __init__(self, num_features): + """max(y, tau) = max(y - tau, 0) + tau = ReLU(y - tau) + tau""" + super(TLU, self).__init__() + self.num_features = num_features + self.tau = Parameter(torch.Tensor(num_features)) + self.reset_parameters() + + def reset_parameters(self): + nn.init.zeros_(self.tau) + + def extra_repr(self): + return 'num_features={num_features}'.format(**self.__dict__) + + def forward(self, x): + return torch.max(x, self.tau.view(1, self.num_features, 1, 1)) + + +class FRN(nn.Module): + def __init__(self, num_features, eps=1e-6, is_eps_leanable=False): + """ + weight = gamma, bias = beta + beta, gamma: + Variables of shape [1, 1, 1, C]. if TensorFlow + Variables of shape [1, C, 1, 1]. if PyTorch + eps: A scalar constant or learnable variable. + """ + super(FRN, self).__init__() + + self.num_features = num_features + self.init_eps = eps + self.is_eps_leanable = is_eps_leanable + + self.weight = Parameter(torch.Tensor(num_features)) + self.bias = Parameter(torch.Tensor(num_features)) + if is_eps_leanable: + self.eps = Parameter(torch.Tensor(1)) + else: + self.register_buffer('eps', torch.Tensor([eps])) + self.reset_parameters() + + def reset_parameters(self): + nn.init.ones_(self.weight) + nn.init.zeros_(self.bias) + if self.is_eps_leanable: + nn.init.constant_(self.eps, self.init_eps) + + def extra_repr(self): + return 'num_features={num_features}, eps={init_eps}'.format(**self.__dict__) + + def forward(self, x): + """ + 0, 1, 2, 3 -> (B, H, W, C) in TensorFlow + 0, 1, 2, 3 -> (B, C, H, W) in PyTorch + TensorFlow code + nu2 = tf.reduce_mean(tf.square(x), axis=[1, 2], keepdims=True) + x = x * tf.rsqrt(nu2 + tf.abs(eps)) + # This Code include TLU function max(y, tau) + return tf.maximum(gamma * x + beta, tau) + """ + # Compute the mean norm of activations per channel. + nu2 = x.pow(2).mean(dim=[2, 3], keepdim=True) + + # Perform FRN. + x = x * torch.rsqrt(nu2 + self.eps.abs()) + + # Scale and Bias + x = self.weight.view(1, self.num_features, 1, 1) * x + self.bias.view(1, self.num_features, 1, 1) + # x = self.weight * x + self.bias + return x + + +def bnrelu_to_frn(module): + """ + Convert 'BatchNorm2d + ReLU' to 'FRN + TLU' + """ + mod = module + before_name = None + before_child = None + is_before_bn = False + + for name, child in module.named_children(): + if is_before_bn and isinstance(child, (ReLU, LeakyReLU)): + # Convert BN to FRN + if isinstance(before_child, BatchNorm2d): + mod.add_module( + before_name, FRN(num_features=before_child.num_features)) + else: + raise NotImplementedError() + + # Convert ReLU to TLU + mod.add_module(name, TLU(num_features=before_child.num_features)) + else: + mod.add_module(name, bnrelu_to_frn(child)) + + before_name = name + before_child = child + is_before_bn = isinstance(child, BatchNorm2d) + return mod + + +def convert(module, flag_name): + mod = module + before_ch = None + for name, child in module.named_children(): + if hasattr(child, flag_name) and getattr(child, flag_name): + if isinstance(child, BatchNorm2d): + before_ch = child.num_features + mod.add_module(name, FRN(num_features=child.num_features)) + # TODO bn is no good... + if isinstance(child, (ReLU, LeakyReLU)): + mod.add_module(name, TLU(num_features=before_ch)) + else: + mod.add_module(name, convert(child, flag_name)) + return mod + + +def remove_flags(module, flag_name): + mod = module + for name, child in module.named_children(): + if hasattr(child, 'is_convert_frn'): + delattr(child, flag_name) + mod.add_module(name, remove_flags(child, flag_name)) + else: + mod.add_module(name, remove_flags(child, flag_name)) + return mod + + +def bnrelu_to_frn2(model, input_size=(3, 128, 128), batch_size=2, flag_name='is_convert_frn'): + forard_hooks = list() + backward_hooks = list() + + is_before_bn = [False] + + def register_forward_hook(module): + def hook(self, input, output): + if isinstance(module, (nn.Sequential, nn.ModuleList)) or (module == model): + is_before_bn.append(False) + return + + # input and output is required in hook def + is_converted = is_before_bn[-1] and isinstance(self, (ReLU, LeakyReLU)) + if is_converted: + setattr(self, flag_name, True) + is_before_bn.append(isinstance(self, BatchNorm2d)) + + forard_hooks.append(module.register_forward_hook(hook)) + + is_before_relu = [False] + + def register_backward_hook(module): + def hook(self, input, output): + if isinstance(module, (nn.Sequential, nn.ModuleList)) or (module == model): + is_before_relu.append(False) + return + is_converted = is_before_relu[-1] and isinstance(self, BatchNorm2d) + if is_converted: + setattr(self, flag_name, True) + is_before_relu.append(isinstance(self, (ReLU, LeakyReLU))) + + backward_hooks.append(module.register_backward_hook(hook)) + + # multiple inputs to the network + if isinstance(input_size, tuple): + input_size = [input_size] + + # batch_size of 2 for batchnorm + x = [torch.rand(batch_size, *in_size) for in_size in input_size] + + # register hook + model.apply(register_forward_hook) + model.apply(register_backward_hook) + + # make a forward pass + output = model(*x) + output.sum().backward() # Raw output is not enabled to use backward() + + # remove these hooks + for h in forard_hooks: + h.remove() + for h in backward_hooks: + h.remove() + + model = convert(model, flag_name=flag_name) + model = remove_flags(model, flag_name=flag_name) + return model diff --git a/thirdparty/fast-reid/fastreid/layers/gather_layer.py b/thirdparty/fast-reid/fastreid/layers/gather_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..1643b483a01dd1f15f26dc7862df63da44603a45 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/gather_layer.py @@ -0,0 +1,30 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on: https://github.com/open-mmlab/OpenSelfSup/blob/master/openselfsup/models/utils/gather_layer.py + +import torch +import torch.distributed as dist + + +class GatherLayer(torch.autograd.Function): + """Gather tensors from all process, supporting backward propagation. + """ + + @staticmethod + def forward(ctx, input): + ctx.save_for_backward(input) + output = [torch.zeros_like(input) \ + for _ in range(dist.get_world_size())] + dist.all_gather(output, input) + return tuple(output) + + @staticmethod + def backward(ctx, *grads): + input, = ctx.saved_tensors + grad_out = torch.zeros_like(input) + grad_out[:] = grads[dist.get_rank()] + return grad_out diff --git a/thirdparty/fast-reid/fastreid/layers/non_local.py b/thirdparty/fast-reid/fastreid/layers/non_local.py new file mode 100644 index 0000000000000000000000000000000000000000..a10a9d200446b8ca2691c868a4700e27716438c6 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/non_local.py @@ -0,0 +1,54 @@ +# encoding: utf-8 + + +import torch +from torch import nn +from .batch_norm import get_norm + + +class Non_local(nn.Module): + def __init__(self, in_channels, bn_norm, reduc_ratio=2): + super(Non_local, self).__init__() + + self.in_channels = in_channels + self.inter_channels = in_channels // reduc_ratio + + self.g = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, + kernel_size=1, stride=1, padding=0) + + self.W = nn.Sequential( + nn.Conv2d(in_channels=self.inter_channels, out_channels=self.in_channels, + kernel_size=1, stride=1, padding=0), + get_norm(bn_norm, self.in_channels), + ) + nn.init.constant_(self.W[1].weight, 0.0) + nn.init.constant_(self.W[1].bias, 0.0) + + self.theta = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, + kernel_size=1, stride=1, padding=0) + + self.phi = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, + kernel_size=1, stride=1, padding=0) + + def forward(self, x): + """ + :param x: (b, t, h, w) + :return x: (b, t, h, w) + """ + batch_size = x.size(0) + g_x = self.g(x).view(batch_size, self.inter_channels, -1) + g_x = g_x.permute(0, 2, 1) + + theta_x = self.theta(x).view(batch_size, self.inter_channels, -1) + theta_x = theta_x.permute(0, 2, 1) + phi_x = self.phi(x).view(batch_size, self.inter_channels, -1) + f = torch.matmul(theta_x, phi_x) + N = f.size(-1) + f_div_C = f / N + + y = torch.matmul(f_div_C, g_x) + y = y.permute(0, 2, 1).contiguous() + y = y.view(batch_size, self.inter_channels, *x.size()[2:]) + W_y = self.W(y) + z = W_y + x + return z diff --git a/thirdparty/fast-reid/fastreid/layers/pooling.py b/thirdparty/fast-reid/fastreid/layers/pooling.py new file mode 100644 index 0000000000000000000000000000000000000000..505a3323414ca7c69a64b60b2ab037a62235e4c5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/pooling.py @@ -0,0 +1,99 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F +from torch import nn + +__all__ = ["Flatten", + "GeneralizedMeanPooling", + "GeneralizedMeanPoolingP", + "FastGlobalAvgPool2d", + "AdaptiveAvgMaxPool2d", + "ClipGlobalAvgPool2d", + ] + + +class Flatten(nn.Module): + def forward(self, input): + return input.view(input.size(0), -1) + + +class GeneralizedMeanPooling(nn.Module): + r"""Applies a 2D power-average adaptive pooling over an input signal composed of several input planes. + The function computed is: :math:`f(X) = pow(sum(pow(X, p)), 1/p)` + - At p = infinity, one gets Max Pooling + - At p = 1, one gets Average Pooling + The output is of size H x W, for any input size. + The number of output features is equal to the number of input planes. + Args: + output_size: the target output size of the image of the form H x W. + Can be a tuple (H, W) or a single H for a square image H x H + H and W can be either a ``int``, or ``None`` which means the size will + be the same as that of the input. + """ + + def __init__(self, norm=3, output_size=1, eps=1e-6): + super(GeneralizedMeanPooling, self).__init__() + assert norm > 0 + self.p = float(norm) + self.output_size = output_size + self.eps = eps + + def forward(self, x): + x = x.clamp(min=self.eps).pow(self.p) + return torch.nn.functional.adaptive_avg_pool2d(x, self.output_size).pow(1. / self.p) + + def __repr__(self): + return self.__class__.__name__ + '(' \ + + str(self.p) + ', ' \ + + 'output_size=' + str(self.output_size) + ')' + + +class GeneralizedMeanPoolingP(GeneralizedMeanPooling): + """ Same, but norm is trainable + """ + + def __init__(self, norm=3, output_size=1, eps=1e-6): + super(GeneralizedMeanPoolingP, self).__init__(norm, output_size, eps) + self.p = nn.Parameter(torch.ones(1) * norm) + + +class AdaptiveAvgMaxPool2d(nn.Module): + def __init__(self): + super(AdaptiveAvgMaxPool2d, self).__init__() + self.gap = FastGlobalAvgPool2d() + self.gmp = nn.AdaptiveMaxPool2d(1) + + def forward(self, x): + avg_feat = self.gap(x) + max_feat = self.gmp(x) + feat = avg_feat + max_feat + return feat + + +class FastGlobalAvgPool2d(nn.Module): + def __init__(self, flatten=False): + super(FastGlobalAvgPool2d, self).__init__() + self.flatten = flatten + + def forward(self, x): + if self.flatten: + in_size = x.size() + return x.view((in_size[0], in_size[1], -1)).mean(dim=2) + else: + return x.view(x.size(0), x.size(1), -1).mean(-1).view(x.size(0), x.size(1), 1, 1) + + +class ClipGlobalAvgPool2d(nn.Module): + def __init__(self): + super().__init__() + self.avgpool = FastGlobalAvgPool2d() + + def forward(self, x): + x = self.avgpool(x) + x = torch.clamp(x, min=0., max=1.) + return x diff --git a/thirdparty/fast-reid/fastreid/layers/se_layer.py b/thirdparty/fast-reid/fastreid/layers/se_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..04e1dc655adb4f2e6778cbd0156cf49dac4232d7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/se_layer.py @@ -0,0 +1,25 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from torch import nn + + +class SELayer(nn.Module): + def __init__(self, channel, reduction=16): + super(SELayer, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(channel, int(channel / reduction), bias=False), + nn.ReLU(inplace=True), + nn.Linear(int(channel / reduction), channel, bias=False), + nn.Sigmoid() + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + return x * y.expand_as(x) diff --git a/thirdparty/fast-reid/fastreid/layers/splat.py b/thirdparty/fast-reid/fastreid/layers/splat.py new file mode 100644 index 0000000000000000000000000000000000000000..2a5fac788559185cfd4cdbd10a2845916cc00754 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/layers/splat.py @@ -0,0 +1,97 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F +from torch import nn +from torch.nn import Conv2d, ReLU +from torch.nn.modules.utils import _pair +from fastreid.layers import get_norm + + +class SplAtConv2d(nn.Module): + """Split-Attention Conv2d + """ + + def __init__(self, in_channels, channels, kernel_size, stride=(1, 1), padding=(0, 0), + dilation=(1, 1), groups=1, bias=True, + radix=2, reduction_factor=4, + rectify=False, rectify_avg=False, norm_layer=None, num_splits=1, + dropblock_prob=0.0, **kwargs): + super(SplAtConv2d, self).__init__() + padding = _pair(padding) + self.rectify = rectify and (padding[0] > 0 or padding[1] > 0) + self.rectify_avg = rectify_avg + inter_channels = max(in_channels * radix // reduction_factor, 32) + self.radix = radix + self.cardinality = groups + self.channels = channels + self.dropblock_prob = dropblock_prob + if self.rectify: + from rfconv import RFConv2d + self.conv = RFConv2d(in_channels, channels * radix, kernel_size, stride, padding, dilation, + groups=groups * radix, bias=bias, average_mode=rectify_avg, **kwargs) + else: + self.conv = Conv2d(in_channels, channels * radix, kernel_size, stride, padding, dilation, + groups=groups * radix, bias=bias, **kwargs) + self.use_bn = norm_layer is not None + if self.use_bn: + self.bn0 = get_norm(norm_layer, channels * radix) + self.relu = ReLU(inplace=True) + self.fc1 = Conv2d(channels, inter_channels, 1, groups=self.cardinality) + if self.use_bn: + self.bn1 = get_norm(norm_layer, inter_channels) + self.fc2 = Conv2d(inter_channels, channels * radix, 1, groups=self.cardinality) + + self.rsoftmax = rSoftMax(radix, groups) + + def forward(self, x): + x = self.conv(x) + if self.use_bn: + x = self.bn0(x) + if self.dropblock_prob > 0.0: + x = self.dropblock(x) + x = self.relu(x) + + batch, rchannel = x.shape[:2] + if self.radix > 1: + splited = torch.split(x, rchannel // self.radix, dim=1) + gap = sum(splited) + else: + gap = x + gap = F.adaptive_avg_pool2d(gap, 1) + gap = self.fc1(gap) + + if self.use_bn: + gap = self.bn1(gap) + gap = self.relu(gap) + + atten = self.fc2(gap) + atten = self.rsoftmax(atten).view(batch, -1, 1, 1) + + if self.radix > 1: + attens = torch.split(atten, rchannel // self.radix, dim=1) + out = sum([att * split for (att, split) in zip(attens, splited)]) + else: + out = atten * x + return out.contiguous() + + +class rSoftMax(nn.Module): + def __init__(self, radix, cardinality): + super().__init__() + self.radix = radix + self.cardinality = cardinality + + def forward(self, x): + batch = x.size(0) + if self.radix > 1: + x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2) + x = F.softmax(x, dim=1) + x = x.reshape(batch, -1) + else: + x = torch.sigmoid(x) + return x diff --git a/thirdparty/fast-reid/fastreid/modeling/__init__.py b/thirdparty/fast-reid/fastreid/modeling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..71a2e7de867b9e1c28b56f2160254d578d1ca0c5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/__init__.py @@ -0,0 +1,7 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +from .meta_arch import build_model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/__init__.py b/thirdparty/fast-reid/fastreid/modeling/backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bf2a35a6edae7de6c1319a9b46bbf34bda62965b --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/__init__.py @@ -0,0 +1,13 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .build import build_backbone, BACKBONE_REGISTRY + +from .resnet import build_resnet_backbone +from .osnet import build_osnet_backbone +from .resnest import build_resnest_backbone +from .resnext import build_resnext_backbone +from .regnet import build_regnet_backbone, build_effnet_backbone diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/build.py b/thirdparty/fast-reid/fastreid/modeling/backbones/build.py new file mode 100644 index 0000000000000000000000000000000000000000..8786238fd566be1056116044af62c7373b5c438a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/build.py @@ -0,0 +1,28 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from ...utils.registry import Registry + +BACKBONE_REGISTRY = Registry("BACKBONE") +BACKBONE_REGISTRY.__doc__ = """ +Registry for backbones, which extract feature maps from images +The registered object must be a callable that accepts two arguments: +1. A :class:`detectron2.config.CfgNode` +2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification. +It must returns an instance of :class:`Backbone`. +""" + + +def build_backbone(cfg): + """ + Build a backbone from `cfg.MODEL.BACKBONE.NAME`. + Returns: + an instance of :class:`Backbone` + """ + + backbone_name = cfg.MODEL.BACKBONE.NAME + backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg) + return backbone diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/osnet.py b/thirdparty/fast-reid/fastreid/modeling/backbones/osnet.py new file mode 100644 index 0000000000000000000000000000000000000000..26908ecef5cee529a6ec72ba4311c4ab9a0a4ecf --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/osnet.py @@ -0,0 +1,530 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on: +# https://github.com/KaiyangZhou/deep-person-reid/blob/master/torchreid/models/osnet.py + +import logging + +import torch +from torch import nn + +from fastreid.layers import get_norm +from fastreid.utils import comm +from .build import BACKBONE_REGISTRY + +logger = logging.getLogger(__name__) +model_urls = { + 'osnet_x1_0': + 'https://drive.google.com/uc?id=1LaG1EJpHrxdAxKnSCJ_i0u-nbxSAeiFY', + 'osnet_x0_75': + 'https://drive.google.com/uc?id=1uwA9fElHOk3ZogwbeY5GkLI6QPTX70Hq', + 'osnet_x0_5': + 'https://drive.google.com/uc?id=16DGLbZukvVYgINws8u8deSaOqjybZ83i', + 'osnet_x0_25': + 'https://drive.google.com/uc?id=1rb8UN5ZzPKRc_xvtHlyDh-cSz88YX9hs', + 'osnet_ibn_x1_0': + 'https://drive.google.com/uc?id=1sr90V6irlYYDd4_4ISU2iruoRG8J__6l' +} + + +########## +# Basic layers +########## +class ConvLayer(nn.Module): + """Convolution layer (conv + bn + relu).""" + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + bn_norm, + stride=1, + padding=0, + groups=1, + IN=False + ): + super(ConvLayer, self).__init__() + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=padding, + bias=False, + groups=groups + ) + if IN: + self.bn = nn.InstanceNorm2d(out_channels, affine=True) + else: + self.bn = get_norm(bn_norm, out_channels) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + return x + + +class Conv1x1(nn.Module): + """1x1 convolution + bn + relu.""" + + def __init__(self, in_channels, out_channels, bn_norm, stride=1, groups=1): + super(Conv1x1, self).__init__() + self.conv = nn.Conv2d( + in_channels, + out_channels, + 1, + stride=stride, + padding=0, + bias=False, + groups=groups + ) + self.bn = get_norm(bn_norm, out_channels) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + return x + + +class Conv1x1Linear(nn.Module): + """1x1 convolution + bn (w/o non-linearity).""" + + def __init__(self, in_channels, out_channels, bn_norm, stride=1): + super(Conv1x1Linear, self).__init__() + self.conv = nn.Conv2d( + in_channels, out_channels, 1, stride=stride, padding=0, bias=False + ) + self.bn = get_norm(bn_norm, out_channels) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + return x + + +class Conv3x3(nn.Module): + """3x3 convolution + bn + relu.""" + + def __init__(self, in_channels, out_channels, bn_norm, stride=1, groups=1): + super(Conv3x3, self).__init__() + self.conv = nn.Conv2d( + in_channels, + out_channels, + 3, + stride=stride, + padding=1, + bias=False, + groups=groups + ) + self.bn = get_norm(bn_norm, out_channels) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + return x + + +class LightConv3x3(nn.Module): + """Lightweight 3x3 convolution. + 1x1 (linear) + dw 3x3 (nonlinear). + """ + + def __init__(self, in_channels, out_channels, bn_norm): + super(LightConv3x3, self).__init__() + self.conv1 = nn.Conv2d( + in_channels, out_channels, 1, stride=1, padding=0, bias=False + ) + self.conv2 = nn.Conv2d( + out_channels, + out_channels, + 3, + stride=1, + padding=1, + bias=False, + groups=out_channels + ) + self.bn = get_norm(bn_norm, out_channels) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + x = self.bn(x) + x = self.relu(x) + return x + + +########## +# Building blocks for omni-scale feature learning +########## +class ChannelGate(nn.Module): + """A mini-network that generates channel-wise gates conditioned on input tensor.""" + + def __init__( + self, + in_channels, + num_gates=None, + return_gates=False, + gate_activation='sigmoid', + reduction=16, + layer_norm=False + ): + super(ChannelGate, self).__init__() + if num_gates is None: num_gates = in_channels + self.return_gates = return_gates + + self.global_avgpool = nn.AdaptiveAvgPool2d(1) + + self.fc1 = nn.Conv2d( + in_channels, + in_channels // reduction, + kernel_size=1, + bias=True, + padding=0 + ) + self.norm1 = None + if layer_norm: self.norm1 = nn.LayerNorm((in_channels // reduction, 1, 1)) + self.relu = nn.ReLU(inplace=True) + self.fc2 = nn.Conv2d( + in_channels // reduction, + num_gates, + kernel_size=1, + bias=True, + padding=0 + ) + if gate_activation == 'sigmoid': + self.gate_activation = nn.Sigmoid() + elif gate_activation == 'relu': + self.gate_activation = nn.ReLU(inplace=True) + elif gate_activation == 'linear': + self.gate_activation = nn.Identity() + else: + raise RuntimeError( + "Unknown gate activation: {}".format(gate_activation) + ) + + def forward(self, x): + input = x + x = self.global_avgpool(x) + x = self.fc1(x) + if self.norm1 is not None: x = self.norm1(x) + x = self.relu(x) + x = self.fc2(x) + x = self.gate_activation(x) + if self.return_gates: return x + return input * x + + +class OSBlock(nn.Module): + """Omni-scale feature learning block.""" + + def __init__( + self, + in_channels, + out_channels, + bn_norm, + IN=False, + bottleneck_reduction=4, + **kwargs + ): + super(OSBlock, self).__init__() + mid_channels = out_channels // bottleneck_reduction + self.conv1 = Conv1x1(in_channels, mid_channels, bn_norm) + self.conv2a = LightConv3x3(mid_channels, mid_channels, bn_norm) + self.conv2b = nn.Sequential( + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + ) + self.conv2c = nn.Sequential( + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + ) + self.conv2d = nn.Sequential( + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + LightConv3x3(mid_channels, mid_channels, bn_norm), + ) + self.gate = ChannelGate(mid_channels) + self.conv3 = Conv1x1Linear(mid_channels, out_channels, bn_norm) + self.downsample = None + if in_channels != out_channels: + self.downsample = Conv1x1Linear(in_channels, out_channels, bn_norm) + self.IN = None + if IN: self.IN = nn.InstanceNorm2d(out_channels, affine=True) + self.relu = nn.ReLU(True) + + def forward(self, x): + identity = x + x1 = self.conv1(x) + x2a = self.conv2a(x1) + x2b = self.conv2b(x1) + x2c = self.conv2c(x1) + x2d = self.conv2d(x1) + x2 = self.gate(x2a) + self.gate(x2b) + self.gate(x2c) + self.gate(x2d) + x3 = self.conv3(x2) + if self.downsample is not None: + identity = self.downsample(identity) + out = x3 + identity + if self.IN is not None: + out = self.IN(out) + return self.relu(out) + + +########## +# Network architecture +########## +class OSNet(nn.Module): + """Omni-Scale Network. + + Reference: + - Zhou et al. Omni-Scale Feature Learning for Person Re-Identification. ICCV, 2019. + - Zhou et al. Learning Generalisable Omni-Scale Representations + for Person Re-Identification. arXiv preprint, 2019. + """ + + def __init__( + self, + blocks, + layers, + channels, + bn_norm, + IN=False, + **kwargs + ): + super(OSNet, self).__init__() + num_blocks = len(blocks) + assert num_blocks == len(layers) + assert num_blocks == len(channels) - 1 + + # convolutional backbone + self.conv1 = ConvLayer(3, channels[0], 7, bn_norm, stride=2, padding=3, IN=IN) + self.maxpool = nn.MaxPool2d(3, stride=2, padding=1) + self.conv2 = self._make_layer( + blocks[0], + layers[0], + channels[0], + channels[1], + bn_norm, + reduce_spatial_size=True, + IN=IN + ) + self.conv3 = self._make_layer( + blocks[1], + layers[1], + channels[1], + channels[2], + bn_norm, + reduce_spatial_size=True + ) + self.conv4 = self._make_layer( + blocks[2], + layers[2], + channels[2], + channels[3], + bn_norm, + reduce_spatial_size=False + ) + self.conv5 = Conv1x1(channels[3], channels[3], bn_norm) + + self._init_params() + + def _make_layer( + self, + block, + layer, + in_channels, + out_channels, + bn_norm, + reduce_spatial_size, + IN=False + ): + layers = [] + + layers.append(block(in_channels, out_channels, bn_norm, IN=IN)) + for i in range(1, layer): + layers.append(block(out_channels, out_channels, bn_norm, IN=IN)) + + if reduce_spatial_size: + layers.append( + nn.Sequential( + Conv1x1(out_channels, out_channels, bn_norm), + nn.AvgPool2d(2, stride=2), + ) + ) + + return nn.Sequential(*layers) + + def _init_params(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_( + m.weight, mode='fan_out', nonlinearity='relu' + ) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + elif isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.conv1(x) + x = self.maxpool(x) + x = self.conv2(x) + x = self.conv3(x) + x = self.conv4(x) + x = self.conv5(x) + return x + + +def init_pretrained_weights(model, key=''): + """Initializes model with pretrained weights. + + Layers that don't match with pretrained layers in name or size are kept unchanged. + """ + import os + import errno + import gdown + from collections import OrderedDict + import warnings + import logging + + logger = logging.getLogger(__name__) + + def _get_torch_home(): + ENV_TORCH_HOME = 'TORCH_HOME' + ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' + DEFAULT_CACHE_DIR = '~/.cache' + torch_home = os.path.expanduser( + os.getenv( + ENV_TORCH_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch' + ) + ) + ) + return torch_home + + torch_home = _get_torch_home() + model_dir = os.path.join(torch_home, 'checkpoints') + try: + os.makedirs(model_dir) + except OSError as e: + if e.errno == errno.EEXIST: + # Directory already exists, ignore. + pass + else: + # Unexpected OSError, re-raise. + raise + filename = key + '_imagenet.pth' + cached_file = os.path.join(model_dir, filename) + + if not os.path.exists(cached_file): + if comm.is_main_process(): + gdown.download(model_urls[key], cached_file, quiet=False) + + comm.synchronize() + + state_dict = torch.load(cached_file, map_location=torch.device('cpu')) + model_dict = model.state_dict() + new_state_dict = OrderedDict() + matched_layers, discarded_layers = [], [] + + for k, v in state_dict.items(): + if k.startswith('module.'): + k = k[7:] # discard module. + + if k in model_dict and model_dict[k].size() == v.size(): + new_state_dict[k] = v + matched_layers.append(k) + else: + discarded_layers.append(k) + + model_dict.update(new_state_dict) + model.load_state_dict(model_dict) + + if len(matched_layers) == 0: + warnings.warn( + 'The pretrained weights from "{}" cannot be loaded, ' + 'please check the key names manually ' + '(** ignored and continue **)'.format(cached_file) + ) + else: + logger.info( + 'Successfully loaded imagenet pretrained weights from "{}"'.format(cached_file) + ) + if len(discarded_layers) > 0: + logger.info( + '** The following layers are discarded ' + 'due to unmatched keys or layer size: {}'.format(discarded_layers) + ) + + +@BACKBONE_REGISTRY.register() +def build_osnet_backbone(cfg): + """ + Create a OSNet instance from config. + Returns: + OSNet: a :class:`OSNet` instance + """ + + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + with_ibn = cfg.MODEL.BACKBONE.WITH_IBN + bn_norm = cfg.MODEL.BACKBONE.NORM + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + num_blocks_per_stage = [2, 2, 2] + num_channels_per_stage = { + "x1_0": [64, 256, 384, 512], + "x0_75": [48, 192, 288, 384], + "x0_5": [32, 128, 192, 256], + "x0_25": [16, 64, 96, 128]}[depth] + model = OSNet([OSBlock, OSBlock, OSBlock], num_blocks_per_stage, num_channels_per_stage, + bn_norm, IN=with_ibn) + + if pretrain: + # Load pretrain path if specifically + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu')) + logger.info(f"Loading pretrained model from {pretrain_path}") + model.load_state_dict(state_dict) + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + if with_ibn: + pretrain_key = "osnet_ibn_" + depth + else: + pretrain_key = "osnet_" + depth + + init_pretrained_weights(model, pretrain_key) + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/__init__.py b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e8cf80fe8825c1716fba339d5d6f9bfff68b16d5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/__init__.py @@ -0,0 +1,4 @@ + + +from .regnet import build_regnet_backbone +from .effnet import build_effnet_backbone diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/config.py b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/config.py new file mode 100644 index 0000000000000000000000000000000000000000..96f764930f3570f645c85f0a57dd1104e61e4045 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/config.py @@ -0,0 +1,420 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Configuration file (powered by YACS).""" + +import argparse +import os +import sys + +from yacs.config import CfgNode as CfgNode + + +# Global config object +_C = CfgNode() + +# Example usage: +# from core.config import cfg +cfg = _C + + +# ------------------------------------------------------------------------------------ # +# Model options +# ------------------------------------------------------------------------------------ # +_C.MODEL = CfgNode() + +# Model type +_C.MODEL.TYPE = "" + +# Number of weight layers +_C.MODEL.DEPTH = 0 + +# Number of classes +_C.MODEL.NUM_CLASSES = 10 + +# Loss function (see pycls/models/loss.py for options) +_C.MODEL.LOSS_FUN = "cross_entropy" + + +# ------------------------------------------------------------------------------------ # +# ResNet options +# ------------------------------------------------------------------------------------ # +_C.RESNET = CfgNode() + +# Transformation function (see pycls/models/resnet.py for options) +_C.RESNET.TRANS_FUN = "basic_transform" + +# Number of groups to use (1 -> ResNet; > 1 -> ResNeXt) +_C.RESNET.NUM_GROUPS = 1 + +# Width of each group (64 -> ResNet; 4 -> ResNeXt) +_C.RESNET.WIDTH_PER_GROUP = 64 + +# Apply stride to 1x1 conv (True -> MSRA; False -> fb.torch) +_C.RESNET.STRIDE_1X1 = True + + +# ------------------------------------------------------------------------------------ # +# AnyNet options +# ------------------------------------------------------------------------------------ # +_C.ANYNET = CfgNode() + +# Stem type +_C.ANYNET.STEM_TYPE = "simple_stem_in" + +# Stem width +_C.ANYNET.STEM_W = 32 + +# Block type +_C.ANYNET.BLOCK_TYPE = "res_bottleneck_block" + +# Depth for each stage (number of blocks in the stage) +_C.ANYNET.DEPTHS = [] + +# Width for each stage (width of each block in the stage) +_C.ANYNET.WIDTHS = [] + +# Strides for each stage (applies to the first block of each stage) +_C.ANYNET.STRIDES = [] + +# Bottleneck multipliers for each stage (applies to bottleneck block) +_C.ANYNET.BOT_MULS = [] + +# Group widths for each stage (applies to bottleneck block) +_C.ANYNET.GROUP_WS = [] + +# Whether SE is enabled for res_bottleneck_block +_C.ANYNET.SE_ON = False + +# SE ratio +_C.ANYNET.SE_R = 0.25 + + +# ------------------------------------------------------------------------------------ # +# RegNet options +# ------------------------------------------------------------------------------------ # +_C.REGNET = CfgNode() + +# Stem type +_C.REGNET.STEM_TYPE = "simple_stem_in" + +# Stem width +_C.REGNET.STEM_W = 32 + +# Block type +_C.REGNET.BLOCK_TYPE = "res_bottleneck_block" + +# Stride of each stage +_C.REGNET.STRIDE = 2 + +# Squeeze-and-Excitation (RegNetY) +_C.REGNET.SE_ON = False +_C.REGNET.SE_R = 0.25 + +# Depth +_C.REGNET.DEPTH = 10 + +# Initial width +_C.REGNET.W0 = 32 + +# Slope +_C.REGNET.WA = 5.0 + +# Quantization +_C.REGNET.WM = 2.5 + +# Group width +_C.REGNET.GROUP_W = 16 + +# Bottleneck multiplier (bm = 1 / b from the paper) +_C.REGNET.BOT_MUL = 1.0 + + +# ------------------------------------------------------------------------------------ # +# EfficientNet options +# ------------------------------------------------------------------------------------ # +_C.EN = CfgNode() + +# Stem width +_C.EN.STEM_W = 32 + +# Depth for each stage (number of blocks in the stage) +_C.EN.DEPTHS = [] + +# Width for each stage (width of each block in the stage) +_C.EN.WIDTHS = [] + +# Expansion ratios for MBConv blocks in each stage +_C.EN.EXP_RATIOS = [] + +# Squeeze-and-Excitation (SE) ratio +_C.EN.SE_R = 0.25 + +# Strides for each stage (applies to the first block of each stage) +_C.EN.STRIDES = [] + +# Kernel sizes for each stage +_C.EN.KERNELS = [] + +# Head width +_C.EN.HEAD_W = 1280 + +# Drop connect ratio +_C.EN.DC_RATIO = 0.0 + +# Dropout ratio +_C.EN.DROPOUT_RATIO = 0.0 + + +# ------------------------------------------------------------------------------------ # +# Batch norm options +# ------------------------------------------------------------------------------------ # +_C.BN = CfgNode() + +# BN epsilon +_C.BN.EPS = 1e-5 + +# BN momentum (BN momentum in PyTorch = 1 - BN momentum in Caffe2) +_C.BN.MOM = 0.1 + +# Precise BN stats +_C.BN.USE_PRECISE_STATS = True +_C.BN.NUM_SAMPLES_PRECISE = 8192 + +# Initialize the gamma of the final BN of each block to zero +_C.BN.ZERO_INIT_FINAL_GAMMA = False + +# Use a different weight decay for BN layers +_C.BN.USE_CUSTOM_WEIGHT_DECAY = False +_C.BN.CUSTOM_WEIGHT_DECAY = 0.0 + + +# ------------------------------------------------------------------------------------ # +# Optimizer options +# ------------------------------------------------------------------------------------ # +_C.OPTIM = CfgNode() + +# Base learning rate +_C.OPTIM.BASE_LR = 0.1 + +# Learning rate policy select from {'cos', 'exp', 'steps'} +_C.OPTIM.LR_POLICY = "cos" + +# Exponential decay factor +_C.OPTIM.GAMMA = 0.1 + +# Steps for 'steps' policy (in epochs) +_C.OPTIM.STEPS = [] + +# Learning rate multiplier for 'steps' policy +_C.OPTIM.LR_MULT = 0.1 + +# Maximal number of epochs +_C.OPTIM.MAX_EPOCH = 200 + +# Momentum +_C.OPTIM.MOMENTUM = 0.9 + +# Momentum dampening +_C.OPTIM.DAMPENING = 0.0 + +# Nesterov momentum +_C.OPTIM.NESTEROV = True + +# L2 regularization +_C.OPTIM.WEIGHT_DECAY = 5e-4 + +# Start the warm up from OPTIM.BASE_LR * OPTIM.WARMUP_FACTOR +_C.OPTIM.WARMUP_FACTOR = 0.1 + +# Gradually warm up the OPTIM.BASE_LR over this number of epochs +_C.OPTIM.WARMUP_EPOCHS = 0 + + +# ------------------------------------------------------------------------------------ # +# Training options +# ------------------------------------------------------------------------------------ # +_C.TRAIN = CfgNode() + +# Dataset and split +_C.TRAIN.DATASET = "" +_C.TRAIN.SPLIT = "train" + +# Total mini-batch size +_C.TRAIN.BATCH_SIZE = 128 + +# Image size +_C.TRAIN.IM_SIZE = 224 + +# Evaluate model on test data every eval period epochs +_C.TRAIN.EVAL_PERIOD = 1 + +# Save model checkpoint every checkpoint period epochs +_C.TRAIN.CHECKPOINT_PERIOD = 1 + +# Resume training from the latest checkpoint in the output directory +_C.TRAIN.AUTO_RESUME = True + +# Weights to start training from +_C.TRAIN.WEIGHTS = "" + + +# ------------------------------------------------------------------------------------ # +# Testing options +# ------------------------------------------------------------------------------------ # +_C.TEST = CfgNode() + +# Dataset and split +_C.TEST.DATASET = "" +_C.TEST.SPLIT = "val" + +# Total mini-batch size +_C.TEST.BATCH_SIZE = 200 + +# Image size +_C.TEST.IM_SIZE = 256 + +# Weights to use for testing +_C.TEST.WEIGHTS = "" + + +# ------------------------------------------------------------------------------------ # +# Common train/test data loader options +# ------------------------------------------------------------------------------------ # +_C.DATA_LOADER = CfgNode() + +# Number of data loader workers per process +_C.DATA_LOADER.NUM_WORKERS = 8 + +# Load data to pinned host memory +_C.DATA_LOADER.PIN_MEMORY = True + + +# ------------------------------------------------------------------------------------ # +# Memory options +# ------------------------------------------------------------------------------------ # +_C.MEM = CfgNode() + +# Perform ReLU inplace +_C.MEM.RELU_INPLACE = True + + +# ------------------------------------------------------------------------------------ # +# CUDNN options +# ------------------------------------------------------------------------------------ # +_C.CUDNN = CfgNode() + +# Perform benchmarking to select the fastest CUDNN algorithms to use +# Note that this may increase the memory usage and will likely not result +# in overall speedups when variable size inputs are used (e.g. COCO training) +_C.CUDNN.BENCHMARK = True + + +# ------------------------------------------------------------------------------------ # +# Precise timing options +# ------------------------------------------------------------------------------------ # +_C.PREC_TIME = CfgNode() + +# Number of iterations to warm up the caches +_C.PREC_TIME.WARMUP_ITER = 3 + +# Number of iterations to compute avg time +_C.PREC_TIME.NUM_ITER = 30 + + +# ------------------------------------------------------------------------------------ # +# Misc options +# ------------------------------------------------------------------------------------ # + +# Number of GPUs to use (applies to both training and testing) +_C.NUM_GPUS = 1 + +# Output directory +_C.OUT_DIR = "/tmp" + +# Config destination (in OUT_DIR) +_C.CFG_DEST = "config.yaml" + +# Note that non-determinism may still be present due to non-deterministic +# operator implementations in GPU operator libraries +_C.RNG_SEED = 1 + +# Log destination ('stdout' or 'file') +_C.LOG_DEST = "stdout" + +# Log period in iters +_C.LOG_PERIOD = 10 + +# Distributed backend +_C.DIST_BACKEND = "nccl" + +# Hostname and port range for multi-process groups (actual port selected randomly) +_C.HOST = "localhost" +_C.PORT_RANGE = [10000, 65000] + +# Models weights referred to by URL are downloaded to this local cache +_C.DOWNLOAD_CACHE = "/tmp/pycls-download-cache" + + +# ------------------------------------------------------------------------------------ # +# Deprecated keys +# ------------------------------------------------------------------------------------ # + +_C.register_deprecated_key("PREC_TIME.BATCH_SIZE") +_C.register_deprecated_key("PREC_TIME.ENABLED") +_C.register_deprecated_key("PORT") + + +def assert_and_infer_cfg(cache_urls=True): + """Checks config values invariants.""" + err_str = "The first lr step must start at 0" + assert not _C.OPTIM.STEPS or _C.OPTIM.STEPS[0] == 0, err_str + data_splits = ["train", "val", "test"] + err_str = "Data split '{}' not supported" + assert _C.TRAIN.SPLIT in data_splits, err_str.format(_C.TRAIN.SPLIT) + assert _C.TEST.SPLIT in data_splits, err_str.format(_C.TEST.SPLIT) + err_str = "Mini-batch size should be a multiple of NUM_GPUS." + assert _C.TRAIN.BATCH_SIZE % _C.NUM_GPUS == 0, err_str + assert _C.TEST.BATCH_SIZE % _C.NUM_GPUS == 0, err_str + err_str = "Log destination '{}' not supported" + assert _C.LOG_DEST in ["stdout", "file"], err_str.format(_C.LOG_DEST) + if cache_urls: + cache_cfg_urls() + + +def cache_cfg_urls(): + """Download URLs in config, cache them, and rewrite cfg to use cached file.""" + _C.TRAIN.WEIGHTS = cache_url(_C.TRAIN.WEIGHTS, _C.DOWNLOAD_CACHE) + _C.TEST.WEIGHTS = cache_url(_C.TEST.WEIGHTS, _C.DOWNLOAD_CACHE) + + +def dump_cfg(): + """Dumps the config to the output directory.""" + cfg_file = os.path.join(_C.OUT_DIR, _C.CFG_DEST) + with open(cfg_file, "w") as f: + _C.dump(stream=f) + + +def load_cfg(out_dir, cfg_dest="config.yaml"): + """Loads config from specified output directory.""" + cfg_file = os.path.join(out_dir, cfg_dest) + _C.merge_from_file(cfg_file) + + +def load_cfg_fom_args(description="Config file options."): + """Load config from command line arguments and set any specified options.""" + parser = argparse.ArgumentParser(description=description) + help_s = "Config file location" + parser.add_argument("--cfg", dest="cfg_file", help=help_s, required=True, type=str) + help_s = "See pycls/core/config.py for all options" + parser.add_argument("opts", help=help_s, default=None, nargs=argparse.REMAINDER) + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + args = parser.parse_args() + _C.merge_from_file(args.cfg_file) + _C.merge_from_list(args.opts) \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet.py b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet.py new file mode 100644 index 0000000000000000000000000000000000000000..b89aede21129b37bdf2cdc9220f52ceb367b5729 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet.py @@ -0,0 +1,281 @@ +# !/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""EfficientNet models.""" + +import logging + +import torch +import torch.nn as nn + +from fastreid.layers import * +from fastreid.modeling.backbones.build import BACKBONE_REGISTRY +from fastreid.utils import comm +from fastreid.utils.checkpoint import get_missing_parameters_message, get_unexpected_parameters_message +from .config import cfg as effnet_cfg +from .regnet import drop_connect, init_weights + +logger = logging.getLogger(__name__) +model_urls = { + 'b0': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161305613/EN-B0_dds_8gpu.pyth', + 'b1': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161304979/EN-B1_dds_8gpu.pyth', + 'b2': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161305015/EN-B2_dds_8gpu.pyth', + 'b3': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161304979/EN-B3_dds_8gpu.pyth', + 'b4': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161305098/EN-B4_dds_8gpu.pyth', + 'b5': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161304979/EN-B5_dds_8gpu.pyth', + 'b6': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161304979/EN-B6_dds_8gpu.pyth', + 'b7': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161304979/EN-B7_dds_8gpu.pyth', +} + + +class EffHead(nn.Module): + """EfficientNet head: 1x1, BN, Swish, AvgPool, Dropout, FC.""" + + def __init__(self, w_in, w_out, bn_norm): + super(EffHead, self).__init__() + self.conv = nn.Conv2d(w_in, w_out, 1, stride=1, padding=0, bias=False) + self.conv_bn = get_norm(bn_norm, w_out) + self.conv_swish = Swish() + + def forward(self, x): + x = self.conv_swish(self.conv_bn(self.conv(x))) + return x + + +class Swish(nn.Module): + """Swish activation function: x * sigmoid(x).""" + + def __init__(self): + super(Swish, self).__init__() + + def forward(self, x): + return x * torch.sigmoid(x) + + +class SE(nn.Module): + """Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" + + def __init__(self, w_in, w_se): + super(SE, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) + self.f_ex = nn.Sequential( + nn.Conv2d(w_in, w_se, 1, bias=True), + Swish(), + nn.Conv2d(w_se, w_in, 1, bias=True), + nn.Sigmoid(), + ) + + def forward(self, x): + return x * self.f_ex(self.avg_pool(x)) + + +class MBConv(nn.Module): + """Mobile inverted bottleneck block w/ SE (MBConv).""" + + def __init__(self, w_in, exp_r, kernel, stride, se_r, w_out, bn_norm): + # expansion, 3x3 dwise, BN, Swish, SE, 1x1, BN, skip_connection + super(MBConv, self).__init__() + self.exp = None + w_exp = int(w_in * exp_r) + if w_exp != w_in: + self.exp = nn.Conv2d(w_in, w_exp, 1, stride=1, padding=0, bias=False) + self.exp_bn = get_norm(bn_norm, w_exp) + self.exp_swish = Swish() + dwise_args = {"groups": w_exp, "padding": (kernel - 1) // 2, "bias": False} + self.dwise = nn.Conv2d(w_exp, w_exp, kernel, stride=stride, **dwise_args) + self.dwise_bn = get_norm(bn_norm, w_exp) + self.dwise_swish = Swish() + self.se = SE(w_exp, int(w_in * se_r)) + self.lin_proj = nn.Conv2d(w_exp, w_out, 1, stride=1, padding=0, bias=False) + self.lin_proj_bn = get_norm(bn_norm, w_out) + # Skip connection if in and out shapes are the same (MN-V2 style) + self.has_skip = stride == 1 and w_in == w_out + + def forward(self, x): + f_x = x + if self.exp: + f_x = self.exp_swish(self.exp_bn(self.exp(f_x))) + f_x = self.dwise_swish(self.dwise_bn(self.dwise(f_x))) + f_x = self.se(f_x) + f_x = self.lin_proj_bn(self.lin_proj(f_x)) + if self.has_skip: + if self.training and effnet_cfg.EN.DC_RATIO > 0.0: + f_x = drop_connect(f_x, effnet_cfg.EN.DC_RATIO) + f_x = x + f_x + return f_x + + +class EffStage(nn.Module): + """EfficientNet stage.""" + + def __init__(self, w_in, exp_r, kernel, stride, se_r, w_out, d, bn_norm): + super(EffStage, self).__init__() + for i in range(d): + b_stride = stride if i == 0 else 1 + b_w_in = w_in if i == 0 else w_out + name = "b{}".format(i + 1) + self.add_module(name, MBConv(b_w_in, exp_r, kernel, b_stride, se_r, w_out, bn_norm)) + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + + +class StemIN(nn.Module): + """EfficientNet stem for ImageNet: 3x3, BN, Swish.""" + + def __init__(self, w_in, w_out, bn_norm): + super(StemIN, self).__init__() + self.conv = nn.Conv2d(w_in, w_out, 3, stride=2, padding=1, bias=False) + self.bn = get_norm(bn_norm, w_out) + self.swish = Swish() + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class EffNet(nn.Module): + """EfficientNet model.""" + + @staticmethod + def get_args(): + return { + "stem_w": effnet_cfg.EN.STEM_W, + "ds": effnet_cfg.EN.DEPTHS, + "ws": effnet_cfg.EN.WIDTHS, + "exp_rs": effnet_cfg.EN.EXP_RATIOS, + "se_r": effnet_cfg.EN.SE_R, + "ss": effnet_cfg.EN.STRIDES, + "ks": effnet_cfg.EN.KERNELS, + "head_w": effnet_cfg.EN.HEAD_W, + } + + def __init__(self, last_stride, bn_norm, **kwargs): + super(EffNet, self).__init__() + kwargs = self.get_args() if not kwargs else kwargs + self._construct(**kwargs, last_stride=last_stride, bn_norm=bn_norm) + self.apply(init_weights) + + def _construct(self, stem_w, ds, ws, exp_rs, se_r, ss, ks, head_w, last_stride, bn_norm): + stage_params = list(zip(ds, ws, exp_rs, ss, ks)) + self.stem = StemIN(3, stem_w, bn_norm) + prev_w = stem_w + for i, (d, w, exp_r, stride, kernel) in enumerate(stage_params): + name = "s{}".format(i + 1) + if i == 5: stride = last_stride + self.add_module(name, EffStage(prev_w, exp_r, kernel, stride, se_r, w, d, bn_norm)) + prev_w = w + self.head = EffHead(prev_w, head_w, bn_norm) + + def forward(self, x): + for module in self.children(): + x = module(x) + return x + + +def init_pretrained_weights(key): + """Initializes model with pretrained weights. + + Layers that don't match with pretrained layers in name or size are kept unchanged. + """ + import os + import errno + import gdown + + def _get_torch_home(): + ENV_TORCH_HOME = 'TORCH_HOME' + ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' + DEFAULT_CACHE_DIR = '~/.cache' + torch_home = os.path.expanduser( + os.getenv( + ENV_TORCH_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch' + ) + ) + ) + return torch_home + + torch_home = _get_torch_home() + model_dir = os.path.join(torch_home, 'checkpoints') + try: + os.makedirs(model_dir) + except OSError as e: + if e.errno == errno.EEXIST: + # Directory already exists, ignore. + pass + else: + # Unexpected OSError, re-raise. + raise + + filename = model_urls[key].split('/')[-1] + + cached_file = os.path.join(model_dir, filename) + + if not os.path.exists(cached_file): + if comm.is_main_process(): + gdown.download(model_urls[key], cached_file, quiet=False) + + comm.synchronize() + + logger.info(f"Loading pretrained model from {cached_file}") + state_dict = torch.load(cached_file, map_location=torch.device("cpu"))["model_state"] + + return state_dict + + +@BACKBONE_REGISTRY.register() +def build_effnet_backbone(cfg): + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE + bn_norm = cfg.MODEL.BACKBONE.NORM + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + cfg_files = { + 'b0': 'fastreid/modeling/backbones/regnet/effnet/EN-B0_dds_8gpu.yaml', + 'b1': 'fastreid/modeling/backbones/regnet/effnet/EN-B1_dds_8gpu.yaml', + 'b2': 'fastreid/modeling/backbones/regnet/effnet/EN-B2_dds_8gpu.yaml', + 'b3': 'fastreid/modeling/backbones/regnet/effnet/EN-B3_dds_8gpu.yaml', + 'b4': 'fastreid/modeling/backbones/regnet/effnet/EN-B4_dds_8gpu.yaml', + 'b5': 'fastreid/modeling/backbones/regnet/effnet/EN-B5_dds_8gpu.yaml', + }[depth] + + effnet_cfg.merge_from_file(cfg_files) + model = EffNet(last_stride, bn_norm) + + if pretrain: + # Load pretrain path if specifically + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu'))["model_state"] + logger.info(f"Loading pretrained model from {pretrain_path}") + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + key = depth + state_dict = init_pretrained_weights(key) + + incompatible = model.load_state_dict(state_dict, strict=False) + if incompatible.missing_keys: + logger.info( + get_missing_parameters_message(incompatible.missing_keys) + ) + if incompatible.unexpected_keys: + logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B0_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B0_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2db2de58d4fd08069b9bc575b0f474a7966c4612 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B0_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 32 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [1, 2, 2, 3, 3, 4, 1] + WIDTHS: [16, 24, 40, 80, 112, 192, 320] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 1280 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B1_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B1_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1f8bf7307e8d5ed34381955a415589ce7ce46598 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B1_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 32 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [2, 3, 3, 4, 4, 5, 2] + WIDTHS: [16, 24, 40, 80, 112, 192, 320] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 1280 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 240 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 274 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B2_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B2_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3d6d5fdc47f0199795126b6495417639ddb642a5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B2_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 32 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [2, 3, 3, 4, 4, 5, 2] + WIDTHS: [16, 24, 48, 88, 120, 208, 352] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 1408 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 260 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 298 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B3_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B3_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3cd96cec04f989e8b08821c15a1d3c036ca4ecb7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B3_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 40 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [2, 3, 3, 5, 5, 6, 2] + WIDTHS: [24, 32, 48, 96, 136, 232, 384] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 1536 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 300 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 342 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B4_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B4_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..576a177e33c14cca82470164a6548bd9ef3921a7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B4_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 48 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [2, 4, 4, 6, 6, 8, 2] + WIDTHS: [24, 32, 56, 112, 160, 272, 448] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 1792 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.2 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 380 + BATCH_SIZE: 128 +TEST: + DATASET: imagenet + IM_SIZE: 434 + BATCH_SIZE: 104 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B5_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B5_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e0cc03178c277967b663ceb975969745c18ca074 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/effnet/EN-B5_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: effnet + NUM_CLASSES: 1000 +EN: + STEM_W: 48 + STRIDES: [1, 2, 2, 2, 1, 2, 1] + DEPTHS: [3, 5, 5, 7, 7, 9, 3] + WIDTHS: [24, 40, 64, 128, 176, 304, 512] + EXP_RATIOS: [1, 6, 6, 6, 6, 6, 6] + KERNELS: [3, 3, 5, 3, 5, 5, 3] + HEAD_W: 2048 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.1 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-5 +TRAIN: + DATASET: imagenet + IM_SIZE: 456 + BATCH_SIZE: 64 +TEST: + DATASET: imagenet + IM_SIZE: 522 + BATCH_SIZE: 48 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnet.py b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..a271ea06a2923be7fd1d9e7ea5ad81ab89182556 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnet.py @@ -0,0 +1,592 @@ +import logging +import math + +import numpy as np +import torch +import torch.nn as nn + +from fastreid.layers import get_norm +from fastreid.utils import comm +from fastreid.utils.checkpoint import get_missing_parameters_message, get_unexpected_parameters_message +from .config import cfg as regnet_cfg +from ..build import BACKBONE_REGISTRY + +logger = logging.getLogger(__name__) +model_urls = { + '800x': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160905981/RegNetX-200MF_dds_8gpu.pyth', + '800y': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906567/RegNetY-800MF_dds_8gpu.pyth', + '1600x': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160990626/RegNetX-1.6GF_dds_8gpu.pyth', + '1600y': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906681/RegNetY-1.6GF_dds_8gpu.pyth', + '3200x': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906139/RegNetX-3.2GF_dds_8gpu.pyth', + '3200y': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906834/RegNetY-3.2GF_dds_8gpu.pyth', + '4000x': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906383/RegNetX-4.0GF_dds_8gpu.pyth', + '4000y': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906838/RegNetY-4.0GF_dds_8gpu.pyth', + '6400x': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/161116590/RegNetX-6.4GF_dds_8gpu.pyth', + '6400y': 'https://dl.fbaipublicfiles.com/pycls/dds_baselines/160907112/RegNetY-6.4GF_dds_8gpu.pyth', +} + + +def init_weights(m): + """Performs ResNet-style weight initialization.""" + if isinstance(m, nn.Conv2d): + # Note that there is no bias due to BN + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(mean=0.0, std=math.sqrt(2.0 / fan_out)) + elif isinstance(m, nn.BatchNorm2d): + zero_init_gamma = ( + hasattr(m, "final_bn") and m.final_bn and regnet_cfg.BN.ZERO_INIT_FINAL_GAMMA + ) + m.weight.data.fill_(0.0 if zero_init_gamma else 1.0) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + m.weight.data.normal_(mean=0.0, std=0.01) + m.bias.data.zero_() + + +def get_stem_fun(stem_type): + """Retrives the stem function by name.""" + stem_funs = { + "res_stem_cifar": ResStemCifar, + "res_stem_in": ResStemIN, + "simple_stem_in": SimpleStemIN, + } + assert stem_type in stem_funs.keys(), "Stem type '{}' not supported".format( + stem_type + ) + return stem_funs[stem_type] + + +def get_block_fun(block_type): + """Retrieves the block function by name.""" + block_funs = { + "vanilla_block": VanillaBlock, + "res_basic_block": ResBasicBlock, + "res_bottleneck_block": ResBottleneckBlock, + } + assert block_type in block_funs.keys(), "Block type '{}' not supported".format( + block_type + ) + return block_funs[block_type] + + +def drop_connect(x, drop_ratio): + """Drop connect (adapted from DARTS).""" + keep_ratio = 1.0 - drop_ratio + mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device) + mask.bernoulli_(keep_ratio) + x.div_(keep_ratio) + x.mul_(mask) + return x + +class AnyHead(nn.Module): + """AnyNet head.""" + + def __init__(self, w_in, nc): + super(AnyHead, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(w_in, nc, bias=True) + + def forward(self, x): + x = self.avg_pool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + return x + + +class VanillaBlock(nn.Module): + """Vanilla block: [3x3 conv, BN, Relu] x2""" + + def __init__(self, w_in, w_out, stride, bn_norm, bm=None, gw=None, se_r=None): + assert ( + bm is None and gw is None and se_r is None + ), "Vanilla block does not support bm, gw, and se_r options" + super(VanillaBlock, self).__init__() + self.construct(w_in, w_out, stride, bn_norm) + + def construct(self, w_in, w_out, stride, bn_norm): + # 3x3, BN, ReLU + self.a = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.a_bn = get_norm(bn_norm, w_out) + self.a_relu = nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE) + # 3x3, BN, ReLU + self.b = nn.Conv2d(w_out, w_out, kernel_size=3, stride=1, padding=1, bias=False) + self.b_bn = get_norm(bn_norm, w_out) + self.b_relu = nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class BasicTransform(nn.Module): + """Basic transformation: [3x3 conv, BN, Relu] x2""" + + def __init__(self, w_in, w_out, stride, bn_norm): + super(BasicTransform, self).__init__() + self.construct(w_in, w_out, stride, bn_norm) + + def construct(self, w_in, w_out, stride, bn_norm): + # 3x3, BN, ReLU + self.a = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.a_bn = get_norm(bn_norm, w_out) + self.a_relu = nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE) + # 3x3, BN + self.b = nn.Conv2d(w_out, w_out, kernel_size=3, stride=1, padding=1, bias=False) + self.b_bn = get_norm(bn_norm, w_out) + self.b_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResBasicBlock(nn.Module): + """Residual basic block: x + F(x), F = basic transform""" + + def __init__(self, w_in, w_out, stride, bn_norm, bm=None, gw=None, se_r=None): + assert ( + bm is None and gw is None and se_r is None + ), "Basic transform does not support bm, gw, and se_r options" + super(ResBasicBlock, self).__init__() + self.construct(w_in, w_out, stride, bn_norm) + + def _add_skip_proj(self, w_in, w_out, stride, bn_norm): + self.proj = nn.Conv2d( + w_in, w_out, kernel_size=1, stride=stride, padding=0, bias=False + ) + self.bn = get_norm(bn_norm, w_out) + + def construct(self, w_in, w_out, stride, bn_norm): + # Use skip connection with projection if shape changes + self.proj_block = (w_in != w_out) or (stride != 1) + if self.proj_block: + self._add_skip_proj(w_in, w_out, stride, bn_norm) + self.f = BasicTransform(w_in, w_out, stride, bn_norm) + self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) + + def forward(self, x): + if self.proj_block: + x = self.bn(self.proj(x)) + self.f(x) + else: + x = x + self.f(x) + x = self.relu(x) + return x + + +class SE(nn.Module): + """Squeeze-and-Excitation (SE) block""" + + def __init__(self, w_in, w_se): + super(SE, self).__init__() + self.construct(w_in, w_se) + + def construct(self, w_in, w_se): + # AvgPool + self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) + # FC, Activation, FC, Sigmoid + self.f_ex = nn.Sequential( + nn.Conv2d(w_in, w_se, kernel_size=1, bias=True), + nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE), + nn.Conv2d(w_se, w_in, kernel_size=1, bias=True), + nn.Sigmoid(), + ) + + def forward(self, x): + return x * self.f_ex(self.avg_pool(x)) + + +class BottleneckTransform(nn.Module): + """Bottlenect transformation: 1x1, 3x3, 1x1""" + + def __init__(self, w_in, w_out, stride, bn_norm, bm, gw, se_r): + super(BottleneckTransform, self).__init__() + self.construct(w_in, w_out, stride, bn_norm, bm, gw, se_r) + + def construct(self, w_in, w_out, stride, bn_norm, bm, gw, se_r): + # Compute the bottleneck width + w_b = int(round(w_out * bm)) + # Compute the number of groups + num_gs = w_b // gw + # 1x1, BN, ReLU + self.a = nn.Conv2d(w_in, w_b, kernel_size=1, stride=1, padding=0, bias=False) + self.a_bn = get_norm(bn_norm, w_b) + self.a_relu = nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE) + # 3x3, BN, ReLU + self.b = nn.Conv2d( + w_b, w_b, kernel_size=3, stride=stride, padding=1, groups=num_gs, bias=False + ) + self.b_bn = get_norm(bn_norm, w_b) + self.b_relu = nn.ReLU(inplace=regnet_cfg.MEM.RELU_INPLACE) + # Squeeze-and-Excitation (SE) + if se_r: + w_se = int(round(w_in * se_r)) + self.se = SE(w_b, w_se) + # 1x1, BN + self.c = nn.Conv2d(w_b, w_out, kernel_size=1, stride=1, padding=0, bias=False) + self.c_bn = get_norm(bn_norm, w_out) + self.c_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResBottleneckBlock(nn.Module): + """Residual bottleneck block: x + F(x), F = bottleneck transform""" + + def __init__(self, w_in, w_out, stride, bn_norm, bm=1.0, gw=1, se_r=None): + super(ResBottleneckBlock, self).__init__() + self.construct(w_in, w_out, stride, bn_norm, bm, gw, se_r) + + def _add_skip_proj(self, w_in, w_out, stride, bn_norm): + self.proj = nn.Conv2d( + w_in, w_out, kernel_size=1, stride=stride, padding=0, bias=False + ) + self.bn = get_norm(bn_norm, w_out) + + def construct(self, w_in, w_out, stride, bn_norm, bm, gw, se_r): + # Use skip connection with projection if shape changes + self.proj_block = (w_in != w_out) or (stride != 1) + if self.proj_block: + self._add_skip_proj(w_in, w_out, stride, bn_norm) + self.f = BottleneckTransform(w_in, w_out, stride, bn_norm, bm, gw, se_r) + self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) + + def forward(self, x): + if self.proj_block: + x = self.bn(self.proj(x)) + self.f(x) + else: + x = x + self.f(x) + x = self.relu(x) + return x + + +class ResStemCifar(nn.Module): + """ResNet stem for CIFAR.""" + + def __init__(self, w_in, w_out, bn_norm): + super(ResStemCifar, self).__init__() + self.construct(w_in, w_out, bn_norm) + + def construct(self, w_in, w_out, bn_norm): + # 3x3, BN, ReLU + self.conv = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=1, padding=1, bias=False + ) + self.bn = get_norm(bn_norm, w_out) + self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResStemIN(nn.Module): + """ResNet stem for ImageNet.""" + + def __init__(self, w_in, w_out, bn_norm): + super(ResStemIN, self).__init__() + self.construct(w_in, w_out, bn_norm) + + def construct(self, w_in, w_out, bn_norm): + # 7x7, BN, ReLU, maxpool + self.conv = nn.Conv2d( + w_in, w_out, kernel_size=7, stride=2, padding=3, bias=False + ) + self.bn = get_norm(bn_norm, w_out) + self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) + self.pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class SimpleStemIN(nn.Module): + """Simple stem for ImageNet.""" + + def __init__(self, in_w, out_w, bn_norm): + super(SimpleStemIN, self).__init__() + self.construct(in_w, out_w, bn_norm) + + def construct(self, in_w, out_w, bn_norm): + # 3x3, BN, ReLU + self.conv = nn.Conv2d( + in_w, out_w, kernel_size=3, stride=2, padding=1, bias=False + ) + self.bn = get_norm(bn_norm, out_w) + self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class AnyStage(nn.Module): + """AnyNet stage (sequence of blocks w/ the same output shape).""" + + def __init__(self, w_in, w_out, stride, bn_norm, d, block_fun, bm, gw, se_r): + super(AnyStage, self).__init__() + self.construct(w_in, w_out, stride, bn_norm, d, block_fun, bm, gw, se_r) + + def construct(self, w_in, w_out, stride, bn_norm, d, block_fun, bm, gw, se_r): + # Construct the blocks + for i in range(d): + # Stride and w_in apply to the first block of the stage + b_stride = stride if i == 0 else 1 + b_w_in = w_in if i == 0 else w_out + # Construct the block + self.add_module( + "b{}".format(i + 1), block_fun(b_w_in, w_out, b_stride, bn_norm, bm, gw, se_r) + ) + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + + +class AnyNet(nn.Module): + """AnyNet model.""" + + def __init__(self, **kwargs): + super(AnyNet, self).__init__() + if kwargs: + self.construct( + stem_type=kwargs["stem_type"], + stem_w=kwargs["stem_w"], + block_type=kwargs["block_type"], + ds=kwargs["ds"], + ws=kwargs["ws"], + ss=kwargs["ss"], + bn_norm=kwargs["bn_norm"], + bms=kwargs["bms"], + gws=kwargs["gws"], + se_r=kwargs["se_r"], + ) + else: + self.construct( + stem_type=regnet_cfg.ANYNET.STEM_TYPE, + stem_w=regnet_cfg.ANYNET.STEM_W, + block_type=regnet_cfg.ANYNET.BLOCK_TYPE, + ds=regnet_cfg.ANYNET.DEPTHS, + ws=regnet_cfg.ANYNET.WIDTHS, + ss=regnet_cfg.ANYNET.STRIDES, + bn_norm=regnet_cfg.ANYNET.BN_NORM, + bms=regnet_cfg.ANYNET.BOT_MULS, + gws=regnet_cfg.ANYNET.GROUP_WS, + se_r=regnet_cfg.ANYNET.SE_R if regnet_cfg.ANYNET.SE_ON else None, + ) + self.apply(init_weights) + + def construct(self, stem_type, stem_w, block_type, ds, ws, ss, bn_norm, bms, gws, se_r): + # Generate dummy bot muls and gs for models that do not use them + bms = bms if bms else [1.0 for _d in ds] + gws = gws if gws else [1 for _d in ds] + # Group params by stage + stage_params = list(zip(ds, ws, ss, bms, gws)) + # Construct the stem + stem_fun = get_stem_fun(stem_type) + self.stem = stem_fun(3, stem_w, bn_norm) + # Construct the stages + block_fun = get_block_fun(block_type) + prev_w = stem_w + for i, (d, w, s, bm, gw) in enumerate(stage_params): + self.add_module( + "s{}".format(i + 1), AnyStage(prev_w, w, s, bn_norm, d, block_fun, bm, gw, se_r) + ) + prev_w = w + # Construct the head + self.in_planes = prev_w + # self.head = AnyHead(w_in=prev_w, nc=nc) + + def forward(self, x): + for module in self.children(): + x = module(x) + return x + + +def quantize_float(f, q): + """Converts a float to closest non-zero int divisible by q.""" + return int(round(f / q) * q) + + +def adjust_ws_gs_comp(ws, bms, gs): + """Adjusts the compatibility of widths and groups.""" + ws_bot = [int(w * b) for w, b in zip(ws, bms)] + gs = [min(g, w_bot) for g, w_bot in zip(gs, ws_bot)] + ws_bot = [quantize_float(w_bot, g) for w_bot, g in zip(ws_bot, gs)] + ws = [int(w_bot / b) for w_bot, b in zip(ws_bot, bms)] + return ws, gs + + +def get_stages_from_blocks(ws, rs): + """Gets ws/ds of network at each stage from per block values.""" + ts_temp = zip(ws + [0], [0] + ws, rs + [0], [0] + rs) + ts = [w != wp or r != rp for w, wp, r, rp in ts_temp] + s_ws = [w for w, t in zip(ws, ts[:-1]) if t] + s_ds = np.diff([d for d, t in zip(range(len(ts)), ts) if t]).tolist() + return s_ws, s_ds + + +def generate_regnet(w_a, w_0, w_m, d, q=8): + """Generates per block ws from RegNet parameters.""" + assert w_a >= 0 and w_0 > 0 and w_m > 1 and w_0 % q == 0 + ws_cont = np.arange(d) * w_a + w_0 + ks = np.round(np.log(ws_cont / w_0) / np.log(w_m)) + ws = w_0 * np.power(w_m, ks) + ws = np.round(np.divide(ws, q)) * q + num_stages, max_stage = len(np.unique(ws)), ks.max() + 1 + ws, ws_cont = ws.astype(int).tolist(), ws_cont.tolist() + return ws, num_stages, max_stage, ws_cont + + +class RegNet(AnyNet): + """RegNet model.""" + + def __init__(self, last_stride, bn_norm): + # Generate RegNet ws per block + b_ws, num_s, _, _ = generate_regnet( + regnet_cfg.REGNET.WA, regnet_cfg.REGNET.W0, regnet_cfg.REGNET.WM, regnet_cfg.REGNET.DEPTH + ) + # Convert to per stage format + ws, ds = get_stages_from_blocks(b_ws, b_ws) + # Generate group widths and bot muls + gws = [regnet_cfg.REGNET.GROUP_W for _ in range(num_s)] + bms = [regnet_cfg.REGNET.BOT_MUL for _ in range(num_s)] + # Adjust the compatibility of ws and gws + ws, gws = adjust_ws_gs_comp(ws, bms, gws) + # Use the same stride for each stage + ss = [regnet_cfg.REGNET.STRIDE for _ in range(num_s)] + ss[-1] = last_stride + # Use SE for RegNetY + se_r = regnet_cfg.REGNET.SE_R if regnet_cfg.REGNET.SE_ON else None + # Construct the model + kwargs = { + "stem_type": regnet_cfg.REGNET.STEM_TYPE, + "stem_w": regnet_cfg.REGNET.STEM_W, + "block_type": regnet_cfg.REGNET.BLOCK_TYPE, + "ss": ss, + "ds": ds, + "ws": ws, + "bn_norm": bn_norm, + "bms": bms, + "gws": gws, + "se_r": se_r, + } + super(RegNet, self).__init__(**kwargs) + + +def init_pretrained_weights(key): + """Initializes model with pretrained weights. + + Layers that don't match with pretrained layers in name or size are kept unchanged. + """ + import os + import errno + import gdown + + def _get_torch_home(): + ENV_TORCH_HOME = 'TORCH_HOME' + ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' + DEFAULT_CACHE_DIR = '~/.cache' + torch_home = os.path.expanduser( + os.getenv( + ENV_TORCH_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch' + ) + ) + ) + return torch_home + + torch_home = _get_torch_home() + model_dir = os.path.join(torch_home, 'checkpoints') + try: + os.makedirs(model_dir) + except OSError as e: + if e.errno == errno.EEXIST: + # Directory already exists, ignore. + pass + else: + # Unexpected OSError, re-raise. + raise + + filename = model_urls[key].split('/')[-1] + + cached_file = os.path.join(model_dir, filename) + + if not os.path.exists(cached_file): + if comm.is_main_process(): + gdown.download(model_urls[key], cached_file, quiet=False) + + comm.synchronize() + + logger.info(f"Loading pretrained model from {cached_file}") + state_dict = torch.load(cached_file, map_location=torch.device('cpu'))['model_state'] + + return state_dict + + +@BACKBONE_REGISTRY.register() +def build_regnet_backbone(cfg): + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE + bn_norm = cfg.MODEL.BACKBONE.NORM + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + cfg_files = { + '800x': 'fastreid/modeling/backbones/regnet/regnetx/RegNetX-800MF_dds_8gpu.yaml', + '800y': 'fastreid/modeling/backbones/regnet/regnety/RegNetY-800MF_dds_8gpu.yaml', + '1600x': 'fastreid/modeling/backbones/regnet/regnetx/RegNetX-1.6GF_dds_8gpu.yaml', + '1600y': 'fastreid/modeling/backbones/regnet/regnety/RegNetY-1.6GF_dds_8gpu.yaml', + '3200x': 'fastreid/modeling/backbones/regnet/regnetx/RegNetX-3.2GF_dds_8gpu.yaml', + '3200y': 'fastreid/modeling/backbones/regnet/regnety/RegNetY-3.2GF_dds_8gpu.yaml', + '4000x': 'fastreid/modeling/backbones/regnet/regnety/RegNetX-4.0GF_dds_8gpu.yaml', + '4000y': 'fastreid/modeling/backbones/regnet/regnety/RegNetY-4.0GF_dds_8gpu.yaml', + '6400x': 'fastreid/modeling/backbones/regnet/regnetx/RegNetX-6.4GF_dds_8gpu.yaml', + '6400y': 'fastreid/modeling/backbones/regnet/regnety/RegNetY-6.4GF_dds_8gpu.yaml', + }[depth] + + regnet_cfg.merge_from_file(cfg_files) + model = RegNet(last_stride, bn_norm) + + if pretrain: + # Load pretrain path if specifically + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu')) + logger.info(f"Loading pretrained model from {pretrain_path}") + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + key = depth + state_dict = init_pretrained_weights(key) + + incompatible = model.load_state_dict(state_dict, strict=False) + if incompatible.missing_keys: + logger.info( + get_missing_parameters_message(incompatible.missing_keys) + ) + if incompatible.unexpected_keys: + logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-1.6GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-1.6GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c8133d7e43e7e081a5a107143cc3900c87c43c0a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-1.6GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 18 + W0: 80 + WA: 34.01 + WM: 2.25 + GROUP_W: 24 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-12GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-12GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a3edf4224c422780423cbd5093155b6fbb2688d3 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-12GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 19 + W0: 168 + WA: 73.36 + WM: 2.37 + GROUP_W: 112 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-16GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-16GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0f94f9bd20d3b9999050683f8b31bd6c4a829dcd --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-16GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 22 + W0: 216 + WA: 55.59 + WM: 2.1 + GROUP_W: 128 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-200MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-200MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c0b13b6fab581e450c9136a6012465a7ce33c773 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-200MF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 13 + W0: 24 + WA: 36.44 + WM: 2.49 + GROUP_W: 8 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-3.2GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-3.2GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..594d533ac5df1775d75317271273958250fd4765 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-3.2GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 25 + W0: 88 + WA: 26.31 + WM: 2.25 + GROUP_W: 48 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-32GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-32GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c1d34b88514c2243d0d50698e55a2ef3c9ec3c6a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-32GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 23 + W0: 320 + WA: 69.86 + WM: 2.0 + GROUP_W: 168 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.2 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-4.0GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-4.0GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bd954531a279c280f2f6b1e51d42755b697d8a82 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-4.0GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 23 + W0: 96 + WA: 38.65 + WM: 2.43 + GROUP_W: 40 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-400MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-400MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7b887ad4b54c3e6ff410350659bf592019bed20f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-400MF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 22 + W0: 24 + WA: 24.48 + WM: 2.54 + GROUP_W: 16 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-6.4GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-6.4GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f256e64e1a0b65ee0c76a3145c7a94e4c0546a2e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-6.4GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 17 + W0: 184 + WA: 60.83 + WM: 2.07 + GROUP_W: 56 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-600MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-600MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aca28aa47008379780eefb61ed0a4f61c753d3c7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-600MF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 16 + W0: 48 + WA: 36.97 + WM: 2.24 + GROUP_W: 24 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-8.0GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-8.0GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a4141d663d8d58657bf367ce21fbe97daaaefc83 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-8.0GF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 23 + W0: 80 + WA: 49.56 + WM: 2.88 + GROUP_W: 120 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-800MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-800MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8d2f6ae8ede67f3233b86c3f0dbf5cb2ded2bf05 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnetx/RegNetX-800MF_dds_8gpu.yaml @@ -0,0 +1,26 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + DEPTH: 16 + W0: 56 + WA: 35.73 + WM: 2.28 + GROUP_W: 16 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-1.6GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-1.6GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2dc9f3780f7ee68c00d4a388b816eaea48f2585b --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-1.6GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 27 + W0: 48 + WA: 20.71 + WM: 2.65 + GROUP_W: 24 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-12GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-12GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6d27d5d1cd5518596cf5a70e8a5ff6a7c040f9b5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-12GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 19 + W0: 168 + WA: 73.36 + WM: 2.37 + GROUP_W: 112 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-16GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-16GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..605d215594dd36af1c3537c10884d27f22c5bea2 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-16GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 18 + W0: 200 + WA: 106.23 + WM: 2.48 + GROUP_W: 112 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.2 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-200MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-200MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..300cc438ad5837a9303bc5c237a886daec52a109 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-200MF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 13 + W0: 24 + WA: 36.44 + WM: 2.49 + GROUP_W: 8 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-3.2GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-3.2GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..95f05bad95b27bf3a57500392792617510726cf7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-3.2GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 21 + W0: 80 + WA: 42.63 + WM: 2.66 + GROUP_W: 24 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-32GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-32GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..753d7a595a42e82a846e97cbd0a1b46de3a63461 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-32GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 20 + W0: 232 + WA: 115.89 + WM: 2.53 + GROUP_W: 232 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.2 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 256 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 200 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-4.0GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-4.0GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..27895a9bb6312d38bf7516cfdd62c0deefcb075e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-4.0GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 22 + W0: 96 + WA: 31.41 + WM: 2.24 + GROUP_W: 64 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-400MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-400MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1b1c31b5e3e9894aeceb12197dad0a92081cbeac --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-400MF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 16 + W0: 48 + WA: 27.89 + WM: 2.09 + GROUP_W: 8 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-6.4GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-6.4GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74535c2e9a81f1564dae313a4f5e4817e18d85d1 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-6.4GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 25 + W0: 112 + WA: 33.22 + WM: 2.27 + GROUP_W: 72 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-600MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-600MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..661e1a9324d5dd853a5e5aed75912447765d05fa --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-600MF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 15 + W0: 48 + WA: 32.54 + WM: 2.32 + GROUP_W: 16 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-8.0GF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-8.0GF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..792147a16b0c4d955c101c66c77259a967bee598 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-8.0GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: true + DEPTH: 17 + W0: 192 + WA: 76.82 + WM: 2.19 + GROUP_W: 56 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 400 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-800MF_dds_8gpu.yaml b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-800MF_dds_8gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6e528235689e53cddf4f75c00ac447bc2d3b7f92 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/regnet/regnety/RegNetY-800MF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: True + DEPTH: 14 + W0: 56 + WA: 38.84 + WM: 2.4 + GROUP_W: 16 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.8 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 224 + BATCH_SIZE: 1024 +TEST: + DATASET: imagenet + IM_SIZE: 256 + BATCH_SIZE: 800 +NUM_GPUS: 8 +OUT_DIR: . diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/resnest.py b/thirdparty/fast-reid/fastreid/modeling/backbones/resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..3ddc39c5b3d217ba2ee5eeed536b07f5a2584fcb --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/resnest.py @@ -0,0 +1,427 @@ +# encoding: utf-8 +# based on: +# https://github.com/zhanghang1989/ResNeSt/blob/master/resnest/torch/resnest.py +"""ResNeSt models""" + +import logging +import math + +import torch +from torch import nn + +from fastreid.layers import ( + IBN, + Non_local, + SplAtConv2d, + get_norm, +) +from fastreid.utils.checkpoint import get_unexpected_parameters_message, get_missing_parameters_message +from .build import BACKBONE_REGISTRY + +logger = logging.getLogger(__name__) +_url_format = 'https://s3.us-west-1.wasabisys.com/resnest/torch/{}-{}.pth' + +_model_sha256 = {name: checksum for checksum, name in [ + ('528c19ca', 'resnest50'), + ('22405ba7', 'resnest101'), + ('75117900', 'resnest200'), + ('0cc87c48', 'resnest269'), +]} + + +def short_hash(name): + if name not in _model_sha256: + raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) + return _model_sha256[name][:8] + + +model_urls = {name: _url_format.format(name, short_hash(name)) for + name in _model_sha256.keys() + } + + +class Bottleneck(nn.Module): + """ResNet Bottleneck + """ + # pylint: disable=unused-argument + expansion = 4 + + def __init__(self, inplanes, planes, bn_norm, with_ibn=False, stride=1, downsample=None, + radix=1, cardinality=1, bottleneck_width=64, + avd=False, avd_first=False, dilation=1, is_first=False, + rectified_conv=False, rectify_avg=False, + dropblock_prob=0.0, last_gamma=False): + super(Bottleneck, self).__init__() + group_width = int(planes * (bottleneck_width / 64.)) * cardinality + self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False) + if with_ibn: + self.bn1 = IBN(group_width, bn_norm) + else: + self.bn1 = get_norm(bn_norm, group_width) + self.dropblock_prob = dropblock_prob + self.radix = radix + self.avd = avd and (stride > 1 or is_first) + self.avd_first = avd_first + + if self.avd: + self.avd_layer = nn.AvgPool2d(3, stride, padding=1) + stride = 1 + + if radix > 1: + self.conv2 = SplAtConv2d( + group_width, group_width, kernel_size=3, + stride=stride, padding=dilation, + dilation=dilation, groups=cardinality, bias=False, + radix=radix, rectify=rectified_conv, + rectify_avg=rectify_avg, + norm_layer=bn_norm, + dropblock_prob=dropblock_prob) + elif rectified_conv: + from rfconv import RFConv2d + self.conv2 = RFConv2d( + group_width, group_width, kernel_size=3, stride=stride, + padding=dilation, dilation=dilation, + groups=cardinality, bias=False, + average_mode=rectify_avg) + self.bn2 = get_norm(bn_norm, group_width) + else: + self.conv2 = nn.Conv2d( + group_width, group_width, kernel_size=3, stride=stride, + padding=dilation, dilation=dilation, + groups=cardinality, bias=False) + self.bn2 = get_norm(bn_norm, group_width) + + self.conv3 = nn.Conv2d( + group_width, planes * 4, kernel_size=1, bias=False) + self.bn3 = get_norm(bn_norm, planes * 4) + + if last_gamma: + from torch.nn.init import zeros_ + zeros_(self.bn3.weight) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.dilation = dilation + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + if self.dropblock_prob > 0.0: + out = self.dropblock1(out) + out = self.relu(out) + + if self.avd and self.avd_first: + out = self.avd_layer(out) + + out = self.conv2(out) + if self.radix == 1: + out = self.bn2(out) + if self.dropblock_prob > 0.0: + out = self.dropblock2(out) + out = self.relu(out) + + if self.avd and not self.avd_first: + out = self.avd_layer(out) + + out = self.conv3(out) + out = self.bn3(out) + if self.dropblock_prob > 0.0: + out = self.dropblock3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNest(nn.Module): + """ResNet Variants ResNest + Parameters + ---------- + block : Block + Class for the residual block. Options are BasicBlockV1, BottleneckV1. + layers : list of int + Numbers of layers in each block + classes : int, default 1000 + Number of classification classes. + dilated : bool, default False + Applying dilation strategy to pretrained ResNet yielding a stride-8 model, + typically used in Semantic Segmentation. + norm_layer : object + Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`; + for Synchronized Cross-GPU BachNormalization). + Reference: + - He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016. + - Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions." + """ + + # pylint: disable=unused-variable + def __init__(self, last_stride, bn_norm, with_ibn, with_nl, block, layers, non_layers, radix=1, + groups=1, + bottleneck_width=64, + dilated=False, dilation=1, + deep_stem=False, stem_width=64, avg_down=False, + rectified_conv=False, rectify_avg=False, + avd=False, avd_first=False, + final_drop=0.0, dropblock_prob=0, + last_gamma=False): + self.cardinality = groups + self.bottleneck_width = bottleneck_width + # ResNet-D params + self.inplanes = stem_width * 2 if deep_stem else 64 + self.avg_down = avg_down + self.last_gamma = last_gamma + # ResNeSt params + self.radix = radix + self.avd = avd + self.avd_first = avd_first + + super().__init__() + self.rectified_conv = rectified_conv + self.rectify_avg = rectify_avg + if rectified_conv: + from rfconv import RFConv2d + conv_layer = RFConv2d + else: + conv_layer = nn.Conv2d + conv_kwargs = {'average_mode': rectify_avg} if rectified_conv else {} + if deep_stem: + self.conv1 = nn.Sequential( + conv_layer(3, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **conv_kwargs), + get_norm(bn_norm, stem_width), + nn.ReLU(inplace=True), + conv_layer(stem_width, stem_width, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), + get_norm(bn_norm, stem_width), + nn.ReLU(inplace=True), + conv_layer(stem_width, stem_width * 2, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), + ) + else: + self.conv1 = conv_layer(3, 64, kernel_size=7, stride=2, padding=3, + bias=False, **conv_kwargs) + self.bn1 = get_norm(bn_norm, self.inplanes) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0], 1, bn_norm, with_ibn=with_ibn, is_first=False) + self.layer2 = self._make_layer(block, 128, layers[1], 2, bn_norm, with_ibn=with_ibn) + if dilated or dilation == 4: + self.layer3 = self._make_layer(block, 256, layers[2], 1, bn_norm, with_ibn=with_ibn, + dilation=2, dropblock_prob=dropblock_prob) + self.layer4 = self._make_layer(block, 512, layers[3], 1, bn_norm, with_ibn=with_ibn, + dilation=4, dropblock_prob=dropblock_prob) + elif dilation == 2: + self.layer3 = self._make_layer(block, 256, layers[2], 2, bn_norm, with_ibn=with_ibn, + dilation=1, dropblock_prob=dropblock_prob) + self.layer4 = self._make_layer(block, 512, layers[3], 1, bn_norm, with_ibn=with_ibn, + dilation=2, dropblock_prob=dropblock_prob) + else: + self.layer3 = self._make_layer(block, 256, layers[2], 2, bn_norm, with_ibn=with_ibn, + dropblock_prob=dropblock_prob) + self.layer4 = self._make_layer(block, 512, layers[3], last_stride, bn_norm, with_ibn=with_ibn, + dropblock_prob=dropblock_prob) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + # fmt: off + if with_nl: self._build_nonlocal(layers, non_layers, bn_norm) + else: self.NL_1_idx = self.NL_2_idx = self.NL_3_idx = self.NL_4_idx = [] + # fmt: on + + def _make_layer(self, block, planes, blocks, stride=1, bn_norm="BN", with_ibn=False, + dilation=1, dropblock_prob=0.0, is_first=True): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + down_layers = [] + if self.avg_down: + if dilation == 1: + down_layers.append(nn.AvgPool2d(kernel_size=stride, stride=stride, + ceil_mode=True, count_include_pad=False)) + else: + down_layers.append(nn.AvgPool2d(kernel_size=1, stride=1, + ceil_mode=True, count_include_pad=False)) + down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=1, bias=False)) + else: + down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False)) + down_layers.append(get_norm(bn_norm, planes * block.expansion)) + downsample = nn.Sequential(*down_layers) + + layers = [] + if dilation == 1 or dilation == 2: + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, stride, downsample=downsample, + radix=self.radix, cardinality=self.cardinality, + bottleneck_width=self.bottleneck_width, + avd=self.avd, avd_first=self.avd_first, + dilation=1, is_first=is_first, rectified_conv=self.rectified_conv, + rectify_avg=self.rectify_avg, + dropblock_prob=dropblock_prob, + last_gamma=self.last_gamma)) + elif dilation == 4: + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, stride, downsample=downsample, + radix=self.radix, cardinality=self.cardinality, + bottleneck_width=self.bottleneck_width, + avd=self.avd, avd_first=self.avd_first, + dilation=2, is_first=is_first, rectified_conv=self.rectified_conv, + rectify_avg=self.rectify_avg, + dropblock_prob=dropblock_prob, + last_gamma=self.last_gamma)) + else: + raise RuntimeError("=> unknown dilation size: {}".format(dilation)) + + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, + radix=self.radix, cardinality=self.cardinality, + bottleneck_width=self.bottleneck_width, + avd=self.avd, avd_first=self.avd_first, + dilation=dilation, rectified_conv=self.rectified_conv, + rectify_avg=self.rectify_avg, + dropblock_prob=dropblock_prob, + last_gamma=self.last_gamma)) + + return nn.Sequential(*layers) + + def _build_nonlocal(self, layers, non_layers, bn_norm): + self.NL_1 = nn.ModuleList( + [Non_local(256, bn_norm) for _ in range(non_layers[0])]) + self.NL_1_idx = sorted([layers[0] - (i + 1) for i in range(non_layers[0])]) + self.NL_2 = nn.ModuleList( + [Non_local(512, bn_norm) for _ in range(non_layers[1])]) + self.NL_2_idx = sorted([layers[1] - (i + 1) for i in range(non_layers[1])]) + self.NL_3 = nn.ModuleList( + [Non_local(1024, bn_norm) for _ in range(non_layers[2])]) + self.NL_3_idx = sorted([layers[2] - (i + 1) for i in range(non_layers[2])]) + self.NL_4 = nn.ModuleList( + [Non_local(2048, bn_norm) for _ in range(non_layers[3])]) + self.NL_4_idx = sorted([layers[3] - (i + 1) for i in range(non_layers[3])]) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + NL1_counter = 0 + if len(self.NL_1_idx) == 0: + self.NL_1_idx = [-1] + for i in range(len(self.layer1)): + x = self.layer1[i](x) + if i == self.NL_1_idx[NL1_counter]: + _, C, H, W = x.shape + x = self.NL_1[NL1_counter](x) + NL1_counter += 1 + # Layer 2 + NL2_counter = 0 + if len(self.NL_2_idx) == 0: + self.NL_2_idx = [-1] + for i in range(len(self.layer2)): + x = self.layer2[i](x) + if i == self.NL_2_idx[NL2_counter]: + _, C, H, W = x.shape + x = self.NL_2[NL2_counter](x) + NL2_counter += 1 + # Layer 3 + NL3_counter = 0 + if len(self.NL_3_idx) == 0: + self.NL_3_idx = [-1] + for i in range(len(self.layer3)): + x = self.layer3[i](x) + if i == self.NL_3_idx[NL3_counter]: + _, C, H, W = x.shape + x = self.NL_3[NL3_counter](x) + NL3_counter += 1 + # Layer 4 + NL4_counter = 0 + if len(self.NL_4_idx) == 0: + self.NL_4_idx = [-1] + for i in range(len(self.layer4)): + x = self.layer4[i](x) + if i == self.NL_4_idx[NL4_counter]: + _, C, H, W = x.shape + x = self.NL_4[NL4_counter](x) + NL4_counter += 1 + + return x + + +@BACKBONE_REGISTRY.register() +def build_resnest_backbone(cfg): + """ + Create a ResNest instance from config. + Returns: + ResNet: a :class:`ResNet` instance. + """ + + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE + bn_norm = cfg.MODEL.BACKBONE.NORM + with_ibn = cfg.MODEL.BACKBONE.WITH_IBN + with_se = cfg.MODEL.BACKBONE.WITH_SE + with_nl = cfg.MODEL.BACKBONE.WITH_NL + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + num_blocks_per_stage = { + "50x": [3, 4, 6, 3], + "101x": [3, 4, 23, 3], + "200x": [3, 24, 36, 3], + "269x": [3, 30, 48, 8], + }[depth] + + nl_layers_per_stage = { + "50x": [0, 2, 3, 0], + "101x": [0, 2, 3, 0], + "200x": [0, 2, 3, 0], + "269x": [0, 2, 3, 0], + }[depth] + + stem_width = { + "50x": 32, + "101x": 64, + "200x": 64, + "269x": 64, + }[depth] + + model = ResNest(last_stride, bn_norm, with_ibn, with_nl, Bottleneck, num_blocks_per_stage, + nl_layers_per_stage, radix=2, groups=1, bottleneck_width=64, + deep_stem=True, stem_width=stem_width, avg_down=True, + avd=True, avd_first=False) + if pretrain: + # Load pretrain path if specifically + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu')) + logger.info(f"Loading pretrained model from {pretrain_path}") + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + state_dict = torch.hub.load_state_dict_from_url( + model_urls['resnest' + depth[:-1]], progress=True, check_hash=True, map_location=torch.device('cpu')) + + incompatible = model.load_state_dict(state_dict, strict=False) + if incompatible.missing_keys: + logger.info( + get_missing_parameters_message(incompatible.missing_keys) + ) + if incompatible.unexpected_keys: + logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/resnet.py b/thirdparty/fast-reid/fastreid/modeling/backbones/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..ac24ad45335b723ad0dd1e1fd8144a79fb2b7860 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/resnet.py @@ -0,0 +1,359 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import logging +import math + +import torch +from torch import nn + +from fastreid.layers import ( + IBN, + SELayer, + Non_local, + get_norm, +) +from fastreid.utils.checkpoint import get_missing_parameters_message, get_unexpected_parameters_message +from .build import BACKBONE_REGISTRY +from fastreid.utils import comm + + +logger = logging.getLogger(__name__) +model_urls = { + '18x': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', + '34x': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', + '50x': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', + '101x': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', + 'ibn_18x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/resnet18_ibn_a-2f571257.pth', + 'ibn_34x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/resnet34_ibn_a-94bc1577.pth', + 'ibn_50x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/resnet50_ibn_a-d9d0bb7b.pth', + 'ibn_101x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/resnet101_ibn_a-59ea0ac6.pth', + 'se_ibn_101x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/se_resnet101_ibn_a-fabed4e2.pth', +} + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, bn_norm, with_ibn=False, with_se=False, + stride=1, downsample=None, reduction=16): + super(BasicBlock, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False) + if with_ibn: + self.bn1 = IBN(planes, bn_norm) + else: + self.bn1 = get_norm(bn_norm, planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) + self.bn2 = get_norm(bn_norm, planes) + self.relu = nn.ReLU(inplace=True) + if with_se: + self.se = SELayer(planes, reduction) + else: + self.se = nn.Identity() + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, bn_norm, with_ibn=False, with_se=False, + stride=1, downsample=None, reduction=16): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + if with_ibn: + self.bn1 = IBN(planes, bn_norm) + else: + self.bn1 = get_norm(bn_norm, planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, bias=False) + self.bn2 = get_norm(bn_norm, planes) + self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.bn3 = get_norm(bn_norm, planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + if with_se: + self.se = SELayer(planes * self.expansion, reduction) + else: + self.se = nn.Identity() + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + def __init__(self, last_stride, bn_norm, with_ibn, with_se, with_nl, block, layers, non_layers): + self.inplanes = 64 + super().__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = get_norm(bn_norm, 64) + self.relu = nn.ReLU(inplace=True) + # self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=True) + self.layer1 = self._make_layer(block, 64, layers[0], 1, bn_norm, with_ibn, with_se) + self.layer2 = self._make_layer(block, 128, layers[1], 2, bn_norm, with_ibn, with_se) + self.layer3 = self._make_layer(block, 256, layers[2], 2, bn_norm, with_ibn, with_se) + self.layer4 = self._make_layer(block, 512, layers[3], last_stride, bn_norm, with_se=with_se) + + self.random_init() + + # fmt: off + if with_nl: self._build_nonlocal(layers, non_layers, bn_norm) + else: self.NL_1_idx = self.NL_2_idx = self.NL_3_idx = self.NL_4_idx = [] + # fmt: on + + def _make_layer(self, block, planes, blocks, stride=1, bn_norm="BN", with_ibn=False, with_se=False): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + get_norm(bn_norm, planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, with_se, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, with_se)) + + return nn.Sequential(*layers) + + def _build_nonlocal(self, layers, non_layers, bn_norm): + self.NL_1 = nn.ModuleList( + [Non_local(256, bn_norm) for _ in range(non_layers[0])]) + self.NL_1_idx = sorted([layers[0] - (i + 1) for i in range(non_layers[0])]) + self.NL_2 = nn.ModuleList( + [Non_local(512, bn_norm) for _ in range(non_layers[1])]) + self.NL_2_idx = sorted([layers[1] - (i + 1) for i in range(non_layers[1])]) + self.NL_3 = nn.ModuleList( + [Non_local(1024, bn_norm) for _ in range(non_layers[2])]) + self.NL_3_idx = sorted([layers[2] - (i + 1) for i in range(non_layers[2])]) + self.NL_4 = nn.ModuleList( + [Non_local(2048, bn_norm) for _ in range(non_layers[3])]) + self.NL_4_idx = sorted([layers[3] - (i + 1) for i in range(non_layers[3])]) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + NL1_counter = 0 + if len(self.NL_1_idx) == 0: + self.NL_1_idx = [-1] + for i in range(len(self.layer1)): + x = self.layer1[i](x) + if i == self.NL_1_idx[NL1_counter]: + _, C, H, W = x.shape + x = self.NL_1[NL1_counter](x) + NL1_counter += 1 + # Layer 2 + NL2_counter = 0 + if len(self.NL_2_idx) == 0: + self.NL_2_idx = [-1] + for i in range(len(self.layer2)): + x = self.layer2[i](x) + if i == self.NL_2_idx[NL2_counter]: + _, C, H, W = x.shape + x = self.NL_2[NL2_counter](x) + NL2_counter += 1 + # Layer 3 + NL3_counter = 0 + if len(self.NL_3_idx) == 0: + self.NL_3_idx = [-1] + for i in range(len(self.layer3)): + x = self.layer3[i](x) + if i == self.NL_3_idx[NL3_counter]: + _, C, H, W = x.shape + x = self.NL_3[NL3_counter](x) + NL3_counter += 1 + # Layer 4 + NL4_counter = 0 + if len(self.NL_4_idx) == 0: + self.NL_4_idx = [-1] + for i in range(len(self.layer4)): + x = self.layer4[i](x) + if i == self.NL_4_idx[NL4_counter]: + _, C, H, W = x.shape + x = self.NL_4[NL4_counter](x) + NL4_counter += 1 + + return x + + def random_init(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + nn.init.normal_(m.weight, 0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + +def init_pretrained_weights(key): + """Initializes model with pretrained weights. + + Layers that don't match with pretrained layers in name or size are kept unchanged. + """ + import os + import errno + import gdown + + def _get_torch_home(): + ENV_TORCH_HOME = 'TORCH_HOME' + ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' + DEFAULT_CACHE_DIR = '~/.cache' + torch_home = os.path.expanduser( + os.getenv( + ENV_TORCH_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch' + ) + ) + ) + return torch_home + + torch_home = _get_torch_home() + model_dir = os.path.join(torch_home, 'checkpoints') + try: + os.makedirs(model_dir) + except OSError as e: + if e.errno == errno.EEXIST: + # Directory already exists, ignore. + pass + else: + # Unexpected OSError, re-raise. + raise + + filename = model_urls[key].split('/')[-1] + + cached_file = os.path.join(model_dir, filename) + + if not os.path.exists(cached_file): + if comm.is_main_process(): + gdown.download(model_urls[key], cached_file, quiet=False) + + comm.synchronize() + + logger.info(f"Loading pretrained model from {cached_file}") + state_dict = torch.load(cached_file, map_location=torch.device('cpu')) + + return state_dict + + +@BACKBONE_REGISTRY.register() +def build_resnet_backbone(cfg): + """ + Create a ResNet instance from config. + Returns: + ResNet: a :class:`ResNet` instance. + """ + + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE + bn_norm = cfg.MODEL.BACKBONE.NORM + with_ibn = cfg.MODEL.BACKBONE.WITH_IBN + with_se = cfg.MODEL.BACKBONE.WITH_SE + with_nl = cfg.MODEL.BACKBONE.WITH_NL + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + num_blocks_per_stage = { + '18x': [2, 2, 2, 2], + '34x': [3, 4, 6, 3], + '50x': [3, 4, 6, 3], + '101x': [3, 4, 23, 3], + }[depth] + + nl_layers_per_stage = { + '18x': [0, 0, 0, 0], + '34x': [0, 0, 0, 0], + '50x': [0, 2, 3, 0], + '101x': [0, 2, 9, 0] + }[depth] + + block = { + '18x': BasicBlock, + '34x': BasicBlock, + '50x': Bottleneck, + '101x': Bottleneck + }[depth] + + model = ResNet(last_stride, bn_norm, with_ibn, with_se, with_nl, block, + num_blocks_per_stage, nl_layers_per_stage) + if pretrain: + # Load pretrain path if specifically + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu')) + logger.info(f"Loading pretrained model from {pretrain_path}") + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + key = depth + if with_ibn: key = 'ibn_' + key + if with_se: key = 'se_' + key + + state_dict = init_pretrained_weights(key) + + incompatible = model.load_state_dict(state_dict, strict=False) + if incompatible.missing_keys: + logger.info( + get_missing_parameters_message(incompatible.missing_keys) + ) + if incompatible.unexpected_keys: + logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/backbones/resnext.py b/thirdparty/fast-reid/fastreid/modeling/backbones/resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..6d6112532f8099c97cb1fd73b9ca3f3cfdee95c9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/backbones/resnext.py @@ -0,0 +1,334 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on: +# https://github.com/XingangPan/IBN-Net/blob/master/models/imagenet/resnext_ibn_a.py + +import logging +import math + +import torch +import torch.nn as nn + +from fastreid.layers import * +from fastreid.utils import comm +from fastreid.utils.checkpoint import get_missing_parameters_message, get_unexpected_parameters_message +from .build import BACKBONE_REGISTRY + +logger = logging.getLogger(__name__) +model_urls = { + 'ibn_101x': 'https://github.com/XingangPan/IBN-Net/releases/download/v1.0/resnext101_ibn_a-6ace051d.pth', +} + + +class Bottleneck(nn.Module): + """ + RexNeXt bottleneck type C + """ + expansion = 4 + + def __init__(self, inplanes, planes, bn_norm, with_ibn, baseWidth, cardinality, stride=1, + downsample=None): + """ Constructor + Args: + inplanes: input channel dimensionality + planes: output channel dimensionality + baseWidth: base width. + cardinality: num of convolution groups. + stride: conv stride. Replaces pooling layer. + """ + super(Bottleneck, self).__init__() + + D = int(math.floor(planes * (baseWidth / 64))) + C = cardinality + self.conv1 = nn.Conv2d(inplanes, D * C, kernel_size=1, stride=1, padding=0, bias=False) + if with_ibn: + self.bn1 = IBN(D * C, bn_norm) + else: + self.bn1 = get_norm(bn_norm, D * C) + self.conv2 = nn.Conv2d(D * C, D * C, kernel_size=3, stride=stride, padding=1, groups=C, bias=False) + self.bn2 = get_norm(bn_norm, D * C) + self.conv3 = nn.Conv2d(D * C, planes * 4, kernel_size=1, stride=1, padding=0, bias=False) + self.bn3 = get_norm(bn_norm, planes * 4) + self.relu = nn.ReLU(inplace=True) + + self.downsample = downsample + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNeXt(nn.Module): + """ + ResNext optimized for the ImageNet dataset, as specified in + https://arxiv.org/pdf/1611.05431.pdf + """ + + def __init__(self, last_stride, bn_norm, with_ibn, with_nl, block, layers, non_layers, + baseWidth=4, cardinality=32): + """ Constructor + Args: + baseWidth: baseWidth for ResNeXt. + cardinality: number of convolution groups. + layers: config of layers, e.g., [3, 4, 6, 3] + """ + super(ResNeXt, self).__init__() + + self.cardinality = cardinality + self.baseWidth = baseWidth + self.inplanes = 64 + self.output_size = 64 + + self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False) + self.bn1 = get_norm(bn_norm, 64) + self.relu = nn.ReLU(inplace=True) + self.maxpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0], 1, bn_norm, with_ibn=with_ibn) + self.layer2 = self._make_layer(block, 128, layers[1], 2, bn_norm, with_ibn=with_ibn) + self.layer3 = self._make_layer(block, 256, layers[2], 2, bn_norm, with_ibn=with_ibn) + self.layer4 = self._make_layer(block, 512, layers[3], last_stride, bn_norm, with_ibn=with_ibn) + + self.random_init() + + # fmt: off + if with_nl: self._build_nonlocal(layers, non_layers, bn_norm) + else: self.NL_1_idx = self.NL_2_idx = self.NL_3_idx = self.NL_4_idx = [] + # fmt: on + + def _make_layer(self, block, planes, blocks, stride=1, bn_norm='BN', with_ibn=False): + """ Stack n bottleneck modules where n is inferred from the depth of the network. + Args: + block: block type used to construct ResNext + planes: number of output channels (need to multiply by block.expansion) + blocks: number of blocks to be built + stride: factor to reduce the spatial dimensionality in the first bottleneck of the block. + Returns: a Module consisting of n sequential bottlenecks. + """ + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + get_norm(bn_norm, planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, bn_norm, with_ibn, + self.baseWidth, self.cardinality, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append( + block(self.inplanes, planes, bn_norm, with_ibn, self.baseWidth, self.cardinality, 1, None)) + + return nn.Sequential(*layers) + + def _build_nonlocal(self, layers, non_layers, bn_norm): + self.NL_1 = nn.ModuleList( + [Non_local(256, bn_norm) for _ in range(non_layers[0])]) + self.NL_1_idx = sorted([layers[0] - (i + 1) for i in range(non_layers[0])]) + self.NL_2 = nn.ModuleList( + [Non_local(512, bn_norm) for _ in range(non_layers[1])]) + self.NL_2_idx = sorted([layers[1] - (i + 1) for i in range(non_layers[1])]) + self.NL_3 = nn.ModuleList( + [Non_local(1024, bn_norm) for _ in range(non_layers[2])]) + self.NL_3_idx = sorted([layers[2] - (i + 1) for i in range(non_layers[2])]) + self.NL_4 = nn.ModuleList( + [Non_local(2048, bn_norm) for _ in range(non_layers[3])]) + self.NL_4_idx = sorted([layers[3] - (i + 1) for i in range(non_layers[3])]) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool1(x) + + NL1_counter = 0 + if len(self.NL_1_idx) == 0: + self.NL_1_idx = [-1] + for i in range(len(self.layer1)): + x = self.layer1[i](x) + if i == self.NL_1_idx[NL1_counter]: + _, C, H, W = x.shape + x = self.NL_1[NL1_counter](x) + NL1_counter += 1 + # Layer 2 + NL2_counter = 0 + if len(self.NL_2_idx) == 0: + self.NL_2_idx = [-1] + for i in range(len(self.layer2)): + x = self.layer2[i](x) + if i == self.NL_2_idx[NL2_counter]: + _, C, H, W = x.shape + x = self.NL_2[NL2_counter](x) + NL2_counter += 1 + # Layer 3 + NL3_counter = 0 + if len(self.NL_3_idx) == 0: + self.NL_3_idx = [-1] + for i in range(len(self.layer3)): + x = self.layer3[i](x) + if i == self.NL_3_idx[NL3_counter]: + _, C, H, W = x.shape + x = self.NL_3[NL3_counter](x) + NL3_counter += 1 + # Layer 4 + NL4_counter = 0 + if len(self.NL_4_idx) == 0: + self.NL_4_idx = [-1] + for i in range(len(self.layer4)): + x = self.layer4[i](x) + if i == self.NL_4_idx[NL4_counter]: + _, C, H, W = x.shape + x = self.NL_4[NL4_counter](x) + NL4_counter += 1 + return x + + def random_init(self): + self.conv1.weight.data.normal_(0, math.sqrt(2. / (7 * 7 * 64))) + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + elif isinstance(m, nn.InstanceNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + +def init_pretrained_weights(key): + """Initializes model with pretrained weights. + + Layers that don't match with pretrained layers in name or size are kept unchanged. + """ + import os + import errno + import gdown + + def _get_torch_home(): + ENV_TORCH_HOME = 'TORCH_HOME' + ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' + DEFAULT_CACHE_DIR = '~/.cache' + torch_home = os.path.expanduser( + os.getenv( + ENV_TORCH_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch' + ) + ) + ) + return torch_home + + torch_home = _get_torch_home() + model_dir = os.path.join(torch_home, 'checkpoints') + try: + os.makedirs(model_dir) + except OSError as e: + if e.errno == errno.EEXIST: + # Directory already exists, ignore. + pass + else: + # Unexpected OSError, re-raise. + raise + + filename = model_urls[key].split('/')[-1] + + cached_file = os.path.join(model_dir, filename) + + if not os.path.exists(cached_file): + if comm.is_main_process(): + gdown.download(model_urls[key], cached_file, quiet=False) + + comm.synchronize() + + logger.info(f"Loading pretrained model from {cached_file}") + state_dict = torch.load(cached_file, map_location=torch.device('cpu')) + + return state_dict + + +@BACKBONE_REGISTRY.register() +def build_resnext_backbone(cfg): + """ + Create a ResNeXt instance from config. + Returns: + ResNeXt: a :class:`ResNeXt` instance. + """ + + # fmt: off + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE + bn_norm = cfg.MODEL.BACKBONE.NORM + with_ibn = cfg.MODEL.BACKBONE.WITH_IBN + with_nl = cfg.MODEL.BACKBONE.WITH_NL + depth = cfg.MODEL.BACKBONE.DEPTH + # fmt: on + + num_blocks_per_stage = { + '50x': [3, 4, 6, 3], + '101x': [3, 4, 23, 3], + '152x': [3, 8, 36, 3], }[depth] + nl_layers_per_stage = { + '50x': [0, 2, 3, 0], + '101x': [0, 2, 3, 0]}[depth] + model = ResNeXt(last_stride, bn_norm, with_ibn, with_nl, Bottleneck, + num_blocks_per_stage, nl_layers_per_stage) + if pretrain: + if pretrain_path: + try: + state_dict = torch.load(pretrain_path, map_location=torch.device('cpu'))['model'] + # Remove module.encoder in name + new_state_dict = {} + for k in state_dict: + new_k = '.'.join(k.split('.')[2:]) + if new_k in model.state_dict() and (model.state_dict()[new_k].shape == state_dict[k].shape): + new_state_dict[new_k] = state_dict[k] + state_dict = new_state_dict + logger.info(f"Loading pretrained model from {pretrain_path}") + except FileNotFoundError as e: + logger.info(f'{pretrain_path} is not found! Please check this path.') + raise e + except KeyError as e: + logger.info("State dict keys error! Please check the state dict.") + raise e + else: + key = depth + if with_ibn: key = 'ibn_' + key + + state_dict = init_pretrained_weights(key) + + incompatible = model.load_state_dict(state_dict, strict=False) + if incompatible.missing_keys: + logger.info( + get_missing_parameters_message(incompatible.missing_keys) + ) + if incompatible.unexpected_keys: + logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/heads/__init__.py b/thirdparty/fast-reid/fastreid/modeling/heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7d6233083ca9437e1b06453db92d67dcb565ed30 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/heads/__init__.py @@ -0,0 +1,11 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .build import REID_HEADS_REGISTRY, build_heads + +# import all the meta_arch, so they will be registered +from .embedding_head import EmbeddingHead +from .attr_head import AttrHead diff --git a/thirdparty/fast-reid/fastreid/modeling/heads/attr_head.py b/thirdparty/fast-reid/fastreid/modeling/heads/attr_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a0618db1ebee50435673ef48d39a7a23c60101cf --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/heads/attr_head.py @@ -0,0 +1,77 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +from torch import nn + +from fastreid.layers import * +from fastreid.utils.weight_init import weights_init_kaiming, weights_init_classifier +from .build import REID_HEADS_REGISTRY + + +@REID_HEADS_REGISTRY.register() +class AttrHead(nn.Module): + def __init__(self, cfg): + super().__init__() + # fmt: off + feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM + num_classes = cfg.MODEL.HEADS.NUM_CLASSES + pool_type = cfg.MODEL.HEADS.POOL_LAYER + cls_type = cfg.MODEL.HEADS.CLS_LAYER + with_bnneck = cfg.MODEL.HEADS.WITH_BNNECK + norm_type = cfg.MODEL.HEADS.NORM + + if pool_type == 'fastavgpool': self.pool_layer = FastGlobalAvgPool2d() + elif pool_type == 'avgpool': self.pool_layer = nn.AdaptiveAvgPool2d(1) + elif pool_type == 'maxpool': self.pool_layer = nn.AdaptiveMaxPool2d(1) + elif pool_type == 'gempoolP': self.pool_layer = GeneralizedMeanPoolingP() + elif pool_type == 'gempool': self.pool_layer = GeneralizedMeanPooling() + elif pool_type == "avgmaxpool": self.pool_layer = AdaptiveAvgMaxPool2d() + elif pool_type == 'clipavgpool': self.pool_layer = ClipGlobalAvgPool2d() + elif pool_type == "identity": self.pool_layer = nn.Identity() + elif pool_type == "flatten": self.pool_layer = Flatten() + else: raise KeyError(f"{pool_type} is not supported!") + + # Classification layer + if cls_type == 'linear': self.classifier = nn.Linear(feat_dim, num_classes, bias=False) + elif cls_type == 'arcSoftmax': self.classifier = ArcSoftmax(cfg, feat_dim, num_classes) + elif cls_type == 'circleSoftmax': self.classifier = CircleSoftmax(cfg, feat_dim, num_classes) + elif cls_type == 'amSoftmax': self.classifier = AMSoftmax(cfg, feat_dim, num_classes) + else: raise KeyError(f"{cls_type} is not supported!") + # fmt: on + + # bottleneck = [] + # if with_bnneck: + # bottleneck.append(get_norm(norm_type, feat_dim, bias_freeze=True)) + bottleneck = [nn.BatchNorm1d(num_classes)] + + self.bottleneck = nn.Sequential(*bottleneck) + + self.bottleneck.apply(weights_init_kaiming) + self.classifier.apply(weights_init_classifier) + + def forward(self, features, targets=None): + """ + See :class:`ReIDHeads.forward`. + """ + global_feat = self.pool_layer(features) + global_feat = global_feat[..., 0, 0] + + classifier_name = self.classifier.__class__.__name__ + # fmt: off + if classifier_name == 'Linear': cls_outputs = self.classifier(global_feat) + else: cls_outputs = self.classifier(global_feat, targets) + # fmt: on + + cls_outputs = self.bottleneck(cls_outputs) + + if self.training: + return { + "cls_outputs": cls_outputs, + } + else: + cls_outputs = torch.sigmoid(cls_outputs) + return cls_outputs diff --git a/thirdparty/fast-reid/fastreid/modeling/heads/build.py b/thirdparty/fast-reid/fastreid/modeling/heads/build.py new file mode 100644 index 0000000000000000000000000000000000000000..d57c9a8ceebcf52c2226775904a27e6e14a7bbfa --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/heads/build.py @@ -0,0 +1,24 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from ...utils.registry import Registry + +REID_HEADS_REGISTRY = Registry("HEADS") +REID_HEADS_REGISTRY.__doc__ = """ +Registry for ROI heads in a generalized R-CNN model. +ROIHeads take feature maps and region proposals, and +perform per-region computation. +The registered object will be called with `obj(cfg, input_shape)`. +The call is expected to return an :class:`ROIHeads`. +""" + + +def build_heads(cfg): + """ + Build REIDHeads defined by `cfg.MODEL.REID_HEADS.NAME`. + """ + head = cfg.MODEL.HEADS.NAME + return REID_HEADS_REGISTRY.get(head)(cfg) diff --git a/thirdparty/fast-reid/fastreid/modeling/heads/embedding_head.py b/thirdparty/fast-reid/fastreid/modeling/heads/embedding_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d3e40091100d652421ad3bb2ec24a4cd04b023e9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/heads/embedding_head.py @@ -0,0 +1,97 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch.nn.functional as F +from torch import nn + +from fastreid.layers import * +from fastreid.utils.weight_init import weights_init_kaiming, weights_init_classifier +from .build import REID_HEADS_REGISTRY + + +@REID_HEADS_REGISTRY.register() +class EmbeddingHead(nn.Module): + def __init__(self, cfg): + super().__init__() + # fmt: off + feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM + embedding_dim = cfg.MODEL.HEADS.EMBEDDING_DIM + num_classes = cfg.MODEL.HEADS.NUM_CLASSES + neck_feat = cfg.MODEL.HEADS.NECK_FEAT + pool_type = cfg.MODEL.HEADS.POOL_LAYER + cls_type = cfg.MODEL.HEADS.CLS_LAYER + with_bnneck = cfg.MODEL.HEADS.WITH_BNNECK + norm_type = cfg.MODEL.HEADS.NORM + + if pool_type == 'fastavgpool': self.pool_layer = FastGlobalAvgPool2d() + elif pool_type == 'avgpool': self.pool_layer = nn.AdaptiveAvgPool2d(1) + elif pool_type == 'maxpool': self.pool_layer = nn.AdaptiveMaxPool2d(1) + elif pool_type == 'gempoolP': self.pool_layer = GeneralizedMeanPoolingP() + elif pool_type == 'gempool': self.pool_layer = GeneralizedMeanPooling() + elif pool_type == "avgmaxpool": self.pool_layer = AdaptiveAvgMaxPool2d() + elif pool_type == 'clipavgpool': self.pool_layer = ClipGlobalAvgPool2d() + elif pool_type == "identity": self.pool_layer = nn.Identity() + elif pool_type == "flatten": self.pool_layer = Flatten() + else: raise KeyError(f"{pool_type} is not supported!") + # fmt: on + + self.neck_feat = neck_feat + + bottleneck = [] + if embedding_dim > 0: + bottleneck.append(nn.Conv2d(feat_dim, embedding_dim, 1, 1, bias=False)) + feat_dim = embedding_dim + + if with_bnneck: + bottleneck.append(get_norm(norm_type, feat_dim, bias_freeze=True)) + + self.bottleneck = nn.Sequential(*bottleneck) + + # identity classification layer + # fmt: off + if cls_type == 'linear': self.classifier = nn.Linear(feat_dim, num_classes, bias=False) + elif cls_type == 'arcSoftmax': self.classifier = ArcSoftmax(cfg, feat_dim, num_classes) + elif cls_type == 'circleSoftmax': self.classifier = CircleSoftmax(cfg, feat_dim, num_classes) + elif cls_type == 'amSoftmax': self.classifier = AMSoftmax(cfg, feat_dim, num_classes) + else: raise KeyError(f"{cls_type} is not supported!") + # fmt: on + + self.bottleneck.apply(weights_init_kaiming) + self.classifier.apply(weights_init_classifier) + + def forward(self, features, targets=None): + """ + See :class:`ReIDHeads.forward`. + """ + global_feat = self.pool_layer(features) + bn_feat = self.bottleneck(global_feat) + bn_feat = bn_feat[..., 0, 0] + + # Evaluation + # fmt: off + if not self.training: return bn_feat + # fmt: on + + # Training + if self.classifier.__class__.__name__ == 'Linear': + cls_outputs = self.classifier(bn_feat) + pred_class_logits = F.linear(bn_feat, self.classifier.weight) + else: + cls_outputs = self.classifier(bn_feat, targets) + pred_class_logits = self.classifier.s * F.linear(F.normalize(bn_feat), + F.normalize(self.classifier.weight)) + + # fmt: off + if self.neck_feat == "before": feat = global_feat[..., 0, 0] + elif self.neck_feat == "after": feat = bn_feat + else: raise KeyError(f"{self.neck_feat} is invalid for MODEL.HEADS.NECK_FEAT") + # fmt: on + + return { + "cls_outputs": cls_outputs, + "pred_class_logits": pred_class_logits, + "features": feat, + } diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/__init__.py b/thirdparty/fast-reid/fastreid/modeling/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..351625896d413bc06c020bca96d5e0b0bca7987f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/__init__.py @@ -0,0 +1,10 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .cross_entroy_loss import cross_entropy_loss, log_accuracy +from .focal_loss import focal_loss +from .triplet_loss import triplet_loss +from .circle_loss import circle_loss diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/circle_loss.py b/thirdparty/fast-reid/fastreid/modeling/losses/circle_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..c95aac27615e2a227400f0f1ede8d9cb6eec8450 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/circle_loss.py @@ -0,0 +1,52 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F +from torch import nn + +from fastreid.utils import comm +from .utils import concat_all_gather + + +def circle_loss( + embedding: torch.Tensor, + targets: torch.Tensor, + margin: float, + alpha: float,) -> torch.Tensor: + embedding = nn.functional.normalize(embedding, dim=1) + + if comm.get_world_size() > 1: + all_embedding = concat_all_gather(embedding) + all_targets = concat_all_gather(targets) + else: + all_embedding = embedding + all_targets = targets + + dist_mat = torch.matmul(all_embedding, all_embedding.t()) + + N = dist_mat.size(0) + is_pos = all_targets.view(N, 1).expand(N, N).eq(all_targets.view(N, 1).expand(N, N).t()).float() + + # Compute the mask which ignores the relevance score of the query to itself + is_pos = is_pos - torch.eye(N, N, device=is_pos.device) + + is_neg = all_targets.view(N, 1).expand(N, N).ne(all_targets.view(N, 1).expand(N, N).t()) + + s_p = dist_mat * is_pos + s_n = dist_mat * is_neg + + alpha_p = torch.clamp_min(-s_p.detach() + 1 + margin, min=0.) + alpha_n = torch.clamp_min(s_n.detach() + margin, min=0.) + delta_p = 1 - margin + delta_n = margin + + logit_p = - alpha * alpha_p * (s_p - delta_p) + logit_n = alpha * alpha_n * (s_n - delta_n) + + loss = nn.functional.softplus(torch.logsumexp(logit_p, dim=1) + torch.logsumexp(logit_n, dim=1)).mean() + + return loss diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/cross_entroy_loss.py b/thirdparty/fast-reid/fastreid/modeling/losses/cross_entroy_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..c75becc13b7ccd7a2e7937d2bc006dee15f4a8d5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/cross_entroy_loss.py @@ -0,0 +1,62 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" +import torch +import torch.nn.functional as F + +from fastreid.utils.events import get_event_storage + + +def log_accuracy(pred_class_logits, gt_classes, topk=(1,)): + """ + Log the accuracy metrics to EventStorage. + """ + bsz = pred_class_logits.size(0) + maxk = max(topk) + _, pred_class = pred_class_logits.topk(maxk, 1, True, True) + pred_class = pred_class.t() + correct = pred_class.eq(gt_classes.view(1, -1).expand_as(pred_class)) + + ret = [] + for k in topk: + correct_k = correct[:k].view(-1).float().sum(dim=0, keepdim=True) + ret.append(correct_k.mul_(1. / bsz)) + + storage = get_event_storage() + storage.put_scalar("cls_accuracy", ret[0]) + + +def cross_entropy_loss(pred_class_outputs, gt_classes, eps, alpha=0.2): + num_classes = pred_class_outputs.size(1) + + if eps >= 0: + smooth_param = eps + else: + # Adaptive label smooth regularization + soft_label = F.softmax(pred_class_outputs, dim=1) + smooth_param = alpha * soft_label[torch.arange(soft_label.size(0)), gt_classes].unsqueeze(1) + + log_probs = F.log_softmax(pred_class_outputs, dim=1) + with torch.no_grad(): + targets = torch.ones_like(log_probs) + targets *= smooth_param / (num_classes - 1) + targets.scatter_(1, gt_classes.data.unsqueeze(1), (1 - smooth_param)) + + loss = (-targets * log_probs).sum(dim=1) + + """ + # confidence penalty + conf_penalty = 0.3 + probs = F.softmax(pred_class_logits, dim=1) + entropy = torch.sum(-probs * log_probs, dim=1) + loss = torch.clamp_min(loss - conf_penalty * entropy, min=0.) + """ + + with torch.no_grad(): + non_zero_cnt = max(loss.nonzero(as_tuple=False).size(0), 1) + + loss = loss.sum() / non_zero_cnt + + return loss diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/focal_loss.py b/thirdparty/fast-reid/fastreid/modeling/losses/focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..8447b0890706ee5f05fbaaac3dd2d9f67eb0b020 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/focal_loss.py @@ -0,0 +1,92 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F + + +# based on: +# https://github.com/kornia/kornia/blob/master/kornia/losses/focal.py + +def focal_loss( + input: torch.Tensor, + target: torch.Tensor, + alpha: float, + gamma: float = 2.0, + reduction: str = 'mean') -> torch.Tensor: + r"""Criterion that computes Focal loss. + See :class:`fastreid.modeling.losses.FocalLoss` for details. + According to [1], the Focal loss is computed as follows: + .. math:: + \text{FL}(p_t) = -\alpha_t (1 - p_t)^{\gamma} \, \text{log}(p_t) + where: + - :math:`p_t` is the model's estimated probability for each class. + Arguments: + alpha (float): Weighting factor :math:`\alpha \in [0, 1]`. + gamma (float): Focusing parameter :math:`\gamma >= 0`. + reduction (str, optional): Specifies the reduction to apply to the + output: ‘none’ | ‘mean’ | ‘sum’. ‘none’: no reduction will be applied, + ‘mean’: the sum of the output will be divided by the number of elements + in the output, ‘sum’: the output will be summed. Default: ‘none’. + Shape: + - Input: :math:`(N, C, *)` where C = number of classes. + - Target: :math:`(N, *)` where each value is + :math:`0 ≤ targets[i] ≤ C−1`. + Examples: + >>> N = 5 # num_classes + >>> loss = FocalLoss(cfg) + >>> input = torch.randn(1, N, 3, 5, requires_grad=True) + >>> target = torch.empty(1, 3, 5, dtype=torch.long).random_(N) + >>> output = loss(input, target) + >>> output.backward() + References: + [1] https://arxiv.org/abs/1708.02002 + """ + if not torch.is_tensor(input): + raise TypeError("Input type is not a torch.Tensor. Got {}" + .format(type(input))) + + if not len(input.shape) >= 2: + raise ValueError("Invalid input shape, we expect BxCx*. Got: {}" + .format(input.shape)) + + if input.size(0) != target.size(0): + raise ValueError('Expected input batch_size ({}) to match target batch_size ({}).' + .format(input.size(0), target.size(0))) + + n = input.size(0) + out_size = (n,) + input.size()[2:] + if target.size()[1:] != input.size()[2:]: + raise ValueError('Expected target size {}, got {}'.format( + out_size, target.size())) + + if not input.device == target.device: + raise ValueError( + "input and target must be in the same device. Got: {}".format( + input.device, target.device)) + + # compute softmax over the classes axis + input_soft = F.softmax(input, dim=1) + + # create the labels one hot tensor + target_one_hot = F.one_hot(target, num_classes=input.shape[1]) + + # compute the actual focal loss + weight = torch.pow(-input_soft + 1., gamma) + + focal = -alpha * weight * torch.log(input_soft) + loss_tmp = torch.sum(target_one_hot * focal, dim=1) + + if reduction == 'none': + loss = loss_tmp + elif reduction == 'mean': + loss = torch.mean(loss_tmp) + elif reduction == 'sum': + loss = torch.sum(loss_tmp) + else: + raise NotImplementedError("Invalid reduction mode: {}" + .format(reduction)) + return loss diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/smooth_ap.py b/thirdparty/fast-reid/fastreid/modeling/losses/smooth_ap.py new file mode 100644 index 0000000000000000000000000000000000000000..6305ca7b9a9e3c3d0642add98ad641716c2ec07e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/smooth_ap.py @@ -0,0 +1,241 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on: +# https://github.com/Andrew-Brown1/Smooth_AP/blob/master/src/Smooth_AP_loss.py + +import torch +import torch.nn.functional as F + +from fastreid.utils import comm +from fastreid.modeling.losses.utils import concat_all_gather + + +def sigmoid(tensor, temp=1.0): + """ temperature controlled sigmoid + takes as input a torch tensor (tensor) and passes it through a sigmoid, controlled by temperature: temp + """ + exponent = -tensor / temp + # clamp the input tensor for stability + exponent = torch.clamp(exponent, min=-50, max=50) + y = 1.0 / (1.0 + torch.exp(exponent)) + return y + + +class SmoothAP(object): + r"""PyTorch implementation of the Smooth-AP loss. + implementation of the Smooth-AP loss. Takes as input the mini-batch of CNN-produced feature embeddings and returns + the value of the Smooth-AP loss. The mini-batch must be formed of a defined number of classes. Each class must + have the same number of instances represented in the mini-batch and must be ordered sequentially by class. + e.g. the labels for a mini-batch with batch size 9, and 3 represented classes (A,B,C) must look like: + labels = ( A, A, A, B, B, B, C, C, C) + (the order of the classes however does not matter) + For each instance in the mini-batch, the loss computes the Smooth-AP when it is used as the query and the rest of the + mini-batch is used as the retrieval set. The positive set is formed of the other instances in the batch from the + same class. The loss returns the average Smooth-AP across all instances in the mini-batch. + Args: + anneal : float + the temperature of the sigmoid that is used to smooth the ranking function. A low value of the temperature + results in a steep sigmoid, that tightly approximates the heaviside step function in the ranking function. + batch_size : int + the batch size being used during training. + num_id : int + the number of different classes that are represented in the batch. + feat_dims : int + the dimension of the input feature embeddings + Shape: + - Input (preds): (batch_size, feat_dims) (must be a cuda torch float tensor) + - Output: scalar + Examples:: + >>> loss = SmoothAP(0.01, 60, 6, 256) + >>> input = torch.randn(60, 256, requires_grad=True).cuda() + >>> output = loss(input) + >>> output.backward() + """ + + def __init__(self, cfg): + r""" + Parameters + ---------- + cfg: (cfgNode) + + anneal : float + the temperature of the sigmoid that is used to smooth the ranking function + batch_size : int + the batch size being used + num_id : int + the number of different classes that are represented in the batch + feat_dims : int + the dimension of the input feature embeddings + """ + + self.anneal = 0.01 + self.num_id = cfg.SOLVER.IMS_PER_BATCH // cfg.DATALOADER.NUM_INSTANCE + # self.num_id = 6 + + def __call__(self, embedding, targets): + """Forward pass for all input predictions: preds - (batch_size x feat_dims) """ + + # ------ differentiable ranking of all retrieval set ------ + embedding = F.normalize(embedding, dim=1) + + feat_dim = embedding.size(1) + + # For distributed training, gather all features from different process. + if comm.get_world_size() > 1: + all_embedding = concat_all_gather(embedding) + all_targets = concat_all_gather(targets) + else: + all_embedding = embedding + all_targets = targets + + sim_dist = torch.matmul(embedding, all_embedding.t()) + N, M = sim_dist.size() + + # Compute the mask which ignores the relevance score of the query to itself + mask_indx = 1.0 - torch.eye(M, device=sim_dist.device) + mask_indx = mask_indx.unsqueeze(dim=0).repeat(N, 1, 1) # (N, M, M) + + # sim_dist -> N, 1, M -> N, M, N + sim_dist_repeat = sim_dist.unsqueeze(dim=1).repeat(1, M, 1) # (N, M, M) + # sim_dist_repeat_t = sim_dist.t().unsqueeze(dim=1).repeat(1, N, 1) # (N, N, M) + + # Compute the difference matrix + sim_diff = sim_dist_repeat - sim_dist_repeat.permute(0, 2, 1) # (N, M, M) + + # Pass through the sigmoid + sim_sg = sigmoid(sim_diff, temp=self.anneal) * mask_indx + + # Compute all the rankings + sim_all_rk = torch.sum(sim_sg, dim=-1) + 1 # (N, N) + + pos_mask = targets.view(N, 1).expand(N, M).eq(all_targets.view(M, 1).expand(M, N).t()).float() # (N, M) + + pos_mask_repeat = pos_mask.unsqueeze(1).repeat(1, M, 1) # (N, M, M) + + # Compute positive rankings + pos_sim_sg = sim_sg * pos_mask_repeat + sim_pos_rk = torch.sum(pos_sim_sg, dim=-1) + 1 # (N, N) + + # sum the values of the Smooth-AP for all instances in the mini-batch + ap = 0 + group = N // self.num_id + for ind in range(self.num_id): + pos_divide = torch.sum( + sim_pos_rk[(ind * group):((ind + 1) * group), (ind * group):((ind + 1) * group)] / (sim_all_rk[(ind * group):((ind + 1) * group), (ind * group):((ind + 1) * group)])) + ap += pos_divide / torch.sum(pos_mask[ind*group]) / N + return 1 - ap + + +class SmoothAP_old(torch.nn.Module): + """PyTorch implementation of the Smooth-AP loss. + implementation of the Smooth-AP loss. Takes as input the mini-batch of CNN-produced feature embeddings and returns + the value of the Smooth-AP loss. The mini-batch must be formed of a defined number of classes. Each class must + have the same number of instances represented in the mini-batch and must be ordered sequentially by class. + e.g. the labels for a mini-batch with batch size 9, and 3 represented classes (A,B,C) must look like: + labels = ( A, A, A, B, B, B, C, C, C) + (the order of the classes however does not matter) + For each instance in the mini-batch, the loss computes the Smooth-AP when it is used as the query and the rest of the + mini-batch is used as the retrieval set. The positive set is formed of the other instances in the batch from the + same class. The loss returns the average Smooth-AP across all instances in the mini-batch. + Args: + anneal : float + the temperature of the sigmoid that is used to smooth the ranking function. A low value of the temperature + results in a steep sigmoid, that tightly approximates the heaviside step function in the ranking function. + batch_size : int + the batch size being used during training. + num_id : int + the number of different classes that are represented in the batch. + feat_dims : int + the dimension of the input feature embeddings + Shape: + - Input (preds): (batch_size, feat_dims) (must be a cuda torch float tensor) + - Output: scalar + Examples:: + >>> loss = SmoothAP(0.01, 60, 6, 256) + >>> input = torch.randn(60, 256, requires_grad=True).cuda() + >>> output = loss(input) + >>> output.backward() + """ + + def __init__(self, anneal, batch_size, num_id, feat_dims): + """ + Parameters + ---------- + anneal : float + the temperature of the sigmoid that is used to smooth the ranking function + batch_size : int + the batch size being used + num_id : int + the number of different classes that are represented in the batch + feat_dims : int + the dimension of the input feature embeddings + """ + super().__init__() + + assert(batch_size%num_id==0) + + self.anneal = anneal + self.batch_size = batch_size + self.num_id = num_id + self.feat_dims = feat_dims + + def forward(self, preds): + """Forward pass for all input predictions: preds - (batch_size x feat_dims) """ + + preds = F.normalize(preds, dim=1) + # ------ differentiable ranking of all retrieval set ------ + # compute the mask which ignores the relevance score of the query to itself + mask = 1.0 - torch.eye(self.batch_size) + mask = mask.unsqueeze(dim=0).repeat(self.batch_size, 1, 1) + # compute the relevance scores via cosine similarity of the CNN-produced embedding vectors + sim_all = torch.mm(preds, preds.t()) + sim_all_repeat = sim_all.unsqueeze(dim=1).repeat(1, self.batch_size, 1) + # compute the difference matrix + sim_diff = sim_all_repeat - sim_all_repeat.permute(0, 2, 1) + # pass through the sigmoid + sim_sg = sigmoid(sim_diff, temp=self.anneal) * mask + # compute the rankings + sim_all_rk = torch.sum(sim_sg, dim=-1) + 1 + + # ------ differentiable ranking of only positive set in retrieval set ------ + # compute the mask which only gives non-zero weights to the positive set + xs = preds.view(self.num_id, int(self.batch_size / self.num_id), self.feat_dims) + pos_mask = 1.0 - torch.eye(int(self.batch_size / self.num_id)) + pos_mask = pos_mask.unsqueeze(dim=0).unsqueeze(dim=0).repeat(self.num_id, int(self.batch_size / self.num_id), 1, 1) + # compute the relevance scores + sim_pos = torch.bmm(xs, xs.permute(0, 2, 1)) + sim_pos_repeat = sim_pos.unsqueeze(dim=2).repeat(1, 1, int(self.batch_size / self.num_id), 1) + # compute the difference matrix + sim_pos_diff = sim_pos_repeat - sim_pos_repeat.permute(0, 1, 3, 2) + # pass through the sigmoid + sim_pos_sg = sigmoid(sim_pos_diff, temp=self.anneal) * pos_mask + # compute the rankings of the positive set + sim_pos_rk = torch.sum(sim_pos_sg, dim=-1) + 1 + + # sum the values of the Smooth-AP for all instances in the mini-batch + ap = torch.zeros(1) + group = int(self.batch_size / self.num_id) + for ind in range(self.num_id): + pos_divide = torch.sum(sim_pos_rk[ind] / (sim_all_rk[(ind * group):((ind + 1) * group), (ind * group):((ind + 1) * group)])) + ap = ap + ((pos_divide / group) / self.batch_size) + + return 1-ap + +if __name__ == '__main__': + loss1 = SmoothAP(0.01) + loss2 = SmoothAP_old(0.01, 60, 6, 256) + + inputs = torch.randn(60, 256, requires_grad=True) + targets = [] + for i in range(6): + targets.extend([i]*10) + targets = torch.LongTensor(targets) + + output1 = loss1(inputs, targets) + output2 = loss2(inputs) + + print(torch.sum(output1 - output2)) diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/triplet_loss.py b/thirdparty/fast-reid/fastreid/modeling/losses/triplet_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..cc3f624305f982ac0f18581954967c2d1c86b89a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/triplet_loss.py @@ -0,0 +1,121 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F + +from fastreid.utils import comm +from fastreid.layers import GatherLayer +from .utils import concat_all_gather, euclidean_dist, normalize + + +def softmax_weights(dist, mask): + max_v = torch.max(dist * mask, dim=1, keepdim=True)[0] + diff = dist - max_v + Z = torch.sum(torch.exp(diff) * mask, dim=1, keepdim=True) + 1e-6 # avoid division by zero + W = torch.exp(diff) * mask / Z + return W + + +def hard_example_mining(dist_mat, is_pos, is_neg): + """For each anchor, find the hardest positive and negative sample. + Args: + dist_mat: pair wise distance between samples, shape [N, M] + is_pos: positive index with shape [N, M] + is_neg: negative index with shape [N, M] + Returns: + dist_ap: pytorch Variable, distance(anchor, positive); shape [N] + dist_an: pytorch Variable, distance(anchor, negative); shape [N] + p_inds: pytorch LongTensor, with shape [N]; + indices of selected hard positive samples; 0 <= p_inds[i] <= N - 1 + n_inds: pytorch LongTensor, with shape [N]; + indices of selected hard negative samples; 0 <= n_inds[i] <= N - 1 + NOTE: Only consider the case in which all labels have same num of samples, + thus we can cope with all anchors in parallel. + """ + + assert len(dist_mat.size()) == 2 + N = dist_mat.size(0) + + # `dist_ap` means distance(anchor, positive) + # both `dist_ap` and `relative_p_inds` with shape [N, 1] + dist_ap, relative_p_inds = torch.max( + dist_mat[is_pos].contiguous().view(N, -1), 1, keepdim=True) + # `dist_an` means distance(anchor, negative) + # both `dist_an` and `relative_n_inds` with shape [N, 1] + dist_an, relative_n_inds = torch.min( + dist_mat[is_neg].contiguous().view(N, -1), 1, keepdim=True) + + # shape [N] + dist_ap = dist_ap.squeeze(1) + dist_an = dist_an.squeeze(1) + + return dist_ap, dist_an + + +def weighted_example_mining(dist_mat, is_pos, is_neg): + """For each anchor, find the weighted positive and negative sample. + Args: + dist_mat: pytorch Variable, pair wise distance between samples, shape [N, N] + is_pos: + is_neg: + Returns: + dist_ap: pytorch Variable, distance(anchor, positive); shape [N] + dist_an: pytorch Variable, distance(anchor, negative); shape [N] + """ + assert len(dist_mat.size()) == 2 + + is_pos = is_pos.float() + is_neg = is_neg.float() + dist_ap = dist_mat * is_pos + dist_an = dist_mat * is_neg + + weights_ap = softmax_weights(dist_ap, is_pos) + weights_an = softmax_weights(-dist_an, is_neg) + + dist_ap = torch.sum(dist_ap * weights_ap, dim=1) + dist_an = torch.sum(dist_an * weights_an, dim=1) + + return dist_ap, dist_an + + +def triplet_loss(embedding, targets, margin, norm_feat, hard_mining): + r"""Modified from Tong Xiao's open-reid (https://github.com/Cysu/open-reid). + Related Triplet Loss theory can be found in paper 'In Defense of the Triplet + Loss for Person Re-Identification'.""" + + if norm_feat: embedding = normalize(embedding, axis=-1) + + # For distributed training, gather all features from different process. + if comm.get_world_size() > 1: + all_embedding = torch.cat(GatherLayer.apply(embedding), dim=0) + all_targets = concat_all_gather(targets) + else: + all_embedding = embedding + all_targets = targets + + dist_mat = euclidean_dist(all_embedding, all_embedding) + + N, N = dist_mat.size() + is_pos = all_targets.view(N, 1).expand(N, N).eq(all_targets.view(N, 1).expand(N, N).t()) + is_neg = all_targets.view(N, 1).expand(N, N).ne(all_targets.view(N, 1).expand(N, N).t()) + + if hard_mining: + dist_ap, dist_an = hard_example_mining(dist_mat, is_pos, is_neg) + else: + dist_ap, dist_an = weighted_example_mining(dist_mat, is_pos, is_neg) + + y = dist_an.new().resize_as_(dist_an).fill_(1) + + if margin > 0: + loss = F.margin_ranking_loss(dist_an, dist_ap, y, margin=margin) + else: + loss = F.soft_margin_loss(dist_an - dist_ap, y) + # fmt: off + if loss == float('Inf'): loss = F.margin_ranking_loss(dist_an, dist_ap, y, margin=0.3) + # fmt: on + + return loss diff --git a/thirdparty/fast-reid/fastreid/modeling/losses/utils.py b/thirdparty/fast-reid/fastreid/modeling/losses/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..664f0e8b2f589e6b6e15537da455af6e41088bc4 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/losses/utils.py @@ -0,0 +1,49 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch + + +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [torch.ones_like(tensor) + for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +def normalize(x, axis=-1): + """Normalizing to unit length along the specified dimension. + Args: + x: pytorch Variable + Returns: + x: pytorch Variable, same shape as input + """ + x = 1. * x / (torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12) + return x + + +def euclidean_dist(x, y): + m, n = x.size(0), y.size(0) + xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) + yy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t() + dist = xx + yy - 2 * torch.matmul(x, y.t()) + dist = dist.clamp(min=1e-12).sqrt() # for numerical stability + return dist + + +def cosine_dist(x, y): + bs1, bs2 = x.size(0), y.size(0) + frac_up = torch.matmul(x, y.transpose(0, 1)) + frac_down = (torch.sqrt(torch.sum(torch.pow(x, 2), 1))).view(bs1, 1).repeat(1, bs2) * \ + (torch.sqrt(torch.sum(torch.pow(y, 2), 1))).view(1, bs2).repeat(bs1, 1) + cosine = frac_up / frac_down + return 1 - cosine diff --git a/thirdparty/fast-reid/fastreid/modeling/meta_arch/__init__.py b/thirdparty/fast-reid/fastreid/modeling/meta_arch/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3b6b2651c0329f2931864eb7c2d5d31cd966825a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/meta_arch/__init__.py @@ -0,0 +1,12 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .build import META_ARCH_REGISTRY, build_model + + +# import all the meta_arch, so they will be registered +from .baseline import Baseline +from .mgn import MGN diff --git a/thirdparty/fast-reid/fastreid/modeling/meta_arch/baseline.py b/thirdparty/fast-reid/fastreid/modeling/meta_arch/baseline.py new file mode 100644 index 0000000000000000000000000000000000000000..9db61cbead0e5c0f314246999e1b75a655a0e84b --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/meta_arch/baseline.py @@ -0,0 +1,116 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +from torch import nn + +from fastreid.modeling.backbones import build_backbone +from fastreid.modeling.heads import build_heads +from fastreid.modeling.losses import * +from .build import META_ARCH_REGISTRY + + +@META_ARCH_REGISTRY.register() +class Baseline(nn.Module): + def __init__(self, cfg): + super().__init__() + self._cfg = cfg + assert len(cfg.MODEL.PIXEL_MEAN) == len(cfg.MODEL.PIXEL_STD) + self.register_buffer("pixel_mean", torch.tensor(cfg.MODEL.PIXEL_MEAN).view(1, -1, 1, 1)) + self.register_buffer("pixel_std", torch.tensor(cfg.MODEL.PIXEL_STD).view(1, -1, 1, 1)) + + # backbone + self.backbone = build_backbone(cfg) + + # head + self.heads = build_heads(cfg) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + images = self.preprocess_image(batched_inputs) + features = self.backbone(images) + + if self.training: + assert "targets" in batched_inputs, "Person ID annotation are missing in training!" + targets = batched_inputs["targets"].to(self.device) + + # PreciseBN flag, When do preciseBN on different dataset, the number of classes in new dataset + # may be larger than that in the original dataset, so the circle/arcface will + # throw an error. We just set all the targets to 0 to avoid this problem. + if targets.sum() < 0: targets.zero_() + + outputs = self.heads(features, targets) + return { + "outputs": outputs, + "targets": targets, + } + else: + outputs = self.heads(features) + return outputs + + def preprocess_image(self, batched_inputs): + r""" + Normalize and batch the input images. + """ + if isinstance(batched_inputs, dict): + images = batched_inputs["images"].to(self.device) + elif isinstance(batched_inputs, torch.Tensor): + images = batched_inputs.to(self.device) + else: + raise TypeError("batched_inputs must be dict or torch.Tensor, but get {}".format(type(batched_inputs))) + + images.sub_(self.pixel_mean).div_(self.pixel_std) + return images + + def losses(self, outs): + r""" + Compute loss from modeling's outputs, the loss function input arguments + must be the same as the outputs of the model forwarding. + """ + # fmt: off + outputs = outs["outputs"] + gt_labels = outs["targets"] + # model predictions + pred_class_logits = outputs['pred_class_logits'].detach() + cls_outputs = outputs['cls_outputs'] + pred_features = outputs['features'] + # fmt: on + + # Log prediction accuracy + log_accuracy(pred_class_logits, gt_labels) + + loss_dict = {} + loss_names = self._cfg.MODEL.LOSSES.NAME + + if "CrossEntropyLoss" in loss_names: + loss_dict['loss_cls'] = cross_entropy_loss( + cls_outputs, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE + + if "TripletLoss" in loss_names: + loss_dict['loss_triplet'] = triplet_loss( + pred_features, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE + + if "CircleLoss" in loss_names: + loss_dict['loss_circle'] = circle_loss( + pred_features, + gt_labels, + self._cfg.MODEL.LOSSES.CIRCLE.MARGIN, + self._cfg.MODEL.LOSSES.CIRCLE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CIRCLE.SCALE + + return loss_dict diff --git a/thirdparty/fast-reid/fastreid/modeling/meta_arch/build.py b/thirdparty/fast-reid/fastreid/modeling/meta_arch/build.py new file mode 100644 index 0000000000000000000000000000000000000000..7a4caa19284649cb1b321cfeef4bc04aa1b4f7f4 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/meta_arch/build.py @@ -0,0 +1,26 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import torch + +from fastreid.utils.registry import Registry + +META_ARCH_REGISTRY = Registry("META_ARCH") # noqa F401 isort:skip +META_ARCH_REGISTRY.__doc__ = """ +Registry for meta-architectures, i.e. the whole model. +The registered object will be called with `obj(cfg)` +and expected to return a `nn.Module` object. +""" + + +def build_model(cfg): + """ + Build the whole model architecture, defined by ``cfg.MODEL.META_ARCHITECTURE``. + Note that it does not load any weights from ``cfg``. + """ + meta_arch = cfg.MODEL.META_ARCHITECTURE + model = META_ARCH_REGISTRY.get(meta_arch)(cfg) + model.to(torch.device(cfg.MODEL.DEVICE)) + return model diff --git a/thirdparty/fast-reid/fastreid/modeling/meta_arch/mgn.py b/thirdparty/fast-reid/fastreid/modeling/meta_arch/mgn.py new file mode 100644 index 0000000000000000000000000000000000000000..10f51da55667574d0bdd87e6b855d9a7d1823f7c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/modeling/meta_arch/mgn.py @@ -0,0 +1,280 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import copy + +import torch +from torch import nn + +from fastreid.layers import get_norm +from fastreid.modeling.backbones import build_backbone +from fastreid.modeling.backbones.resnet import Bottleneck +from fastreid.modeling.heads import build_heads +from fastreid.modeling.losses import * +from .build import META_ARCH_REGISTRY + + +@META_ARCH_REGISTRY.register() +class MGN(nn.Module): + def __init__(self, cfg): + super().__init__() + self._cfg = cfg + assert len(cfg.MODEL.PIXEL_MEAN) == len(cfg.MODEL.PIXEL_STD) + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(1, -1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(1, -1, 1, 1)) + + # fmt: off + # backbone + bn_norm = cfg.MODEL.BACKBONE.NORM + with_se = cfg.MODEL.BACKBONE.WITH_SE + # fmt :on + + backbone = build_backbone(cfg) + self.backbone = nn.Sequential( + backbone.conv1, + backbone.bn1, + backbone.relu, + backbone.maxpool, + backbone.layer1, + backbone.layer2, + backbone.layer3[0] + ) + res_conv4 = nn.Sequential(*backbone.layer3[1:]) + res_g_conv5 = backbone.layer4 + + res_p_conv5 = nn.Sequential( + Bottleneck(1024, 512, bn_norm, False, with_se, downsample=nn.Sequential( + nn.Conv2d(1024, 2048, 1, bias=False), get_norm(bn_norm, 2048))), + Bottleneck(2048, 512, bn_norm, False, with_se), + Bottleneck(2048, 512, bn_norm, False, with_se)) + res_p_conv5.load_state_dict(backbone.layer4.state_dict()) + + # branch1 + self.b1 = nn.Sequential( + copy.deepcopy(res_conv4), + copy.deepcopy(res_g_conv5) + ) + self.b1_head = build_heads(cfg) + + # branch2 + self.b2 = nn.Sequential( + copy.deepcopy(res_conv4), + copy.deepcopy(res_p_conv5) + ) + self.b2_head = build_heads(cfg) + self.b21_head = build_heads(cfg) + self.b22_head = build_heads(cfg) + + # branch3 + self.b3 = nn.Sequential( + copy.deepcopy(res_conv4), + copy.deepcopy(res_p_conv5) + ) + self.b3_head = build_heads(cfg) + self.b31_head = build_heads(cfg) + self.b32_head = build_heads(cfg) + self.b33_head = build_heads(cfg) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + images = self.preprocess_image(batched_inputs) + features = self.backbone(images) # (bs, 2048, 16, 8) + + # branch1 + b1_feat = self.b1(features) + + # branch2 + b2_feat = self.b2(features) + b21_feat, b22_feat = torch.chunk(b2_feat, 2, dim=2) + + # branch3 + b3_feat = self.b3(features) + b31_feat, b32_feat, b33_feat = torch.chunk(b3_feat, 3, dim=2) + + if self.training: + assert "targets" in batched_inputs, "Person ID annotation are missing in training!" + targets = batched_inputs["targets"].long().to(self.device) + + if targets.sum() < 0: targets.zero_() + + b1_outputs = self.b1_head(b1_feat, targets) + b2_outputs = self.b2_head(b2_feat, targets) + b21_outputs = self.b21_head(b21_feat, targets) + b22_outputs = self.b22_head(b22_feat, targets) + b3_outputs = self.b3_head(b3_feat, targets) + b31_outputs = self.b31_head(b31_feat, targets) + b32_outputs = self.b32_head(b32_feat, targets) + b33_outputs = self.b33_head(b33_feat, targets) + + return { + "b1_outputs": b1_outputs, + "b2_outputs": b2_outputs, + "b21_outputs": b21_outputs, + "b22_outputs": b22_outputs, + "b3_outputs": b3_outputs, + "b31_outputs": b31_outputs, + "b32_outputs": b32_outputs, + "b33_outputs": b33_outputs, + "targets": targets, + } + else: + b1_pool_feat = self.b1_head(b1_feat) + b2_pool_feat = self.b2_head(b2_feat) + b21_pool_feat = self.b21_head(b21_feat) + b22_pool_feat = self.b22_head(b22_feat) + b3_pool_feat = self.b3_head(b3_feat) + b31_pool_feat = self.b31_head(b31_feat) + b32_pool_feat = self.b32_head(b32_feat) + b33_pool_feat = self.b33_head(b33_feat) + + pred_feat = torch.cat([b1_pool_feat, b2_pool_feat, b3_pool_feat, b21_pool_feat, + b22_pool_feat, b31_pool_feat, b32_pool_feat, b33_pool_feat], dim=1) + return pred_feat + + def preprocess_image(self, batched_inputs): + r""" + Normalize and batch the input images. + """ + if isinstance(batched_inputs, dict): + images = batched_inputs["images"].to(self.device) + elif isinstance(batched_inputs, torch.Tensor): + images = batched_inputs.to(self.device) + else: + raise TypeError("batched_inputs must be dict or torch.Tensor, but get {}".format(type(batched_inputs))) + + images.sub_(self.pixel_mean).div_(self.pixel_std) + return images + + def losses(self, outs): + # fmt: off + b1_outputs = outs["b1_outputs"] + b2_outputs = outs["b2_outputs"] + b21_outputs = outs["b21_outputs"] + b22_outputs = outs["b22_outputs"] + b3_outputs = outs["b3_outputs"] + b31_outputs = outs["b31_outputs"] + b32_outputs = outs["b32_outputs"] + b33_outputs = outs["b33_outputs"] + gt_labels = outs["targets"] + # model predictions + pred_class_logits = b1_outputs['pred_class_logits'].detach() + b1_logits = b1_outputs['cls_outputs'] + b2_logits = b2_outputs['cls_outputs'] + b21_logits = b21_outputs['cls_outputs'] + b22_logits = b22_outputs['cls_outputs'] + b3_logits = b3_outputs['cls_outputs'] + b31_logits = b31_outputs['cls_outputs'] + b32_logits = b32_outputs['cls_outputs'] + b33_logits = b33_outputs['cls_outputs'] + b1_pool_feat = b1_outputs['features'] + b2_pool_feat = b2_outputs['features'] + b3_pool_feat = b3_outputs['features'] + b21_pool_feat = b21_outputs['features'] + b22_pool_feat = b22_outputs['features'] + b31_pool_feat = b31_outputs['features'] + b32_pool_feat = b32_outputs['features'] + b33_pool_feat = b33_outputs['features'] + # fmt: on + + # Log prediction accuracy + log_accuracy(pred_class_logits, gt_labels) + + b22_pool_feat = torch.cat((b21_pool_feat, b22_pool_feat), dim=1) + b33_pool_feat = torch.cat((b31_pool_feat, b32_pool_feat, b33_pool_feat), dim=1) + + loss_dict = {} + loss_names = self._cfg.MODEL.LOSSES.NAME + + if "CrossEntropyLoss" in loss_names: + loss_dict['loss_cls_b1'] = cross_entropy_loss( + b1_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b2'] = cross_entropy_loss( + b2_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b21'] = cross_entropy_loss( + b21_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b22'] = cross_entropy_loss( + b22_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b3'] = cross_entropy_loss( + b3_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b31'] = cross_entropy_loss( + b31_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b32'] = cross_entropy_loss( + b32_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + loss_dict['loss_cls_b33'] = cross_entropy_loss( + b33_logits, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE * 0.125 + + if "TripletLoss" in loss_names: + loss_dict['loss_triplet_b1'] = triplet_loss( + b1_pool_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE * 0.2 + loss_dict['loss_triplet_b2'] = triplet_loss( + b2_pool_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE * 0.2 + loss_dict['loss_triplet_b3'] = triplet_loss( + b3_pool_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE * 0.2 + loss_dict['loss_triplet_b22'] = triplet_loss( + b22_pool_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE * 0.2 + loss_dict['loss_triplet_b33'] = triplet_loss( + b33_pool_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE * 0.2 + + return loss_dict diff --git a/thirdparty/fast-reid/fastreid/solver/__init__.py b/thirdparty/fast-reid/fastreid/solver/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..385eb921ce40469211f9e4ae909e4746273e9d3c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/__init__.py @@ -0,0 +1,8 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + + +from .build import build_lr_scheduler, build_optimizer \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/solver/build.py b/thirdparty/fast-reid/fastreid/solver/build.py new file mode 100644 index 0000000000000000000000000000000000000000..0b04d825dbd6295e503423c9e9498b7c57b54254 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/build.py @@ -0,0 +1,52 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from . import lr_scheduler +from . import optim + + +def build_optimizer(cfg, model): + params = [] + for key, value in model.named_parameters(): + if not value.requires_grad: continue + + lr = cfg.SOLVER.BASE_LR + weight_decay = cfg.SOLVER.WEIGHT_DECAY + if "heads" in key: + lr *= cfg.SOLVER.HEADS_LR_FACTOR + if "bias" in key: + lr *= cfg.SOLVER.BIAS_LR_FACTOR + weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS + params += [{"name": key, "params": [value], "lr": lr, "weight_decay": weight_decay, "freeze": False}] + + solver_opt = cfg.SOLVER.OPT + # fmt: off + if solver_opt == "SGD": opt_fns = getattr(optim, solver_opt)(params, momentum=cfg.SOLVER.MOMENTUM) + else: opt_fns = getattr(optim, solver_opt)(params) + # fmt: on + return opt_fns + + +def build_lr_scheduler(cfg, optimizer): + scheduler_args = { + "optimizer": optimizer, + + # warmup options + "warmup_factor": cfg.SOLVER.WARMUP_FACTOR, + "warmup_iters": cfg.SOLVER.WARMUP_ITERS, + "warmup_method": cfg.SOLVER.WARMUP_METHOD, + + # multi-step lr scheduler options + "milestones": cfg.SOLVER.STEPS, + "gamma": cfg.SOLVER.GAMMA, + + # cosine annealing lr scheduler options + "max_iters": cfg.SOLVER.MAX_ITER, + "delay_iters": cfg.SOLVER.DELAY_ITERS, + "eta_min_lr": cfg.SOLVER.ETA_MIN_LR, + + } + return getattr(lr_scheduler, cfg.SOLVER.SCHED)(**scheduler_args) diff --git a/thirdparty/fast-reid/fastreid/solver/lr_scheduler.py b/thirdparty/fast-reid/fastreid/solver/lr_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..3563230c97a7d1cadb3a6632aa8a1d78e49c06e8 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/lr_scheduler.py @@ -0,0 +1,143 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import math +from bisect import bisect_right +from typing import List + +import torch +from torch.optim.lr_scheduler import _LRScheduler + +__all__ = ["WarmupMultiStepLR", "WarmupCosineAnnealingLR"] + + +class WarmupMultiStepLR(_LRScheduler): + def __init__( + self, + optimizer: torch.optim.Optimizer, + milestones: List[int], + gamma: float = 0.1, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch: int = -1, + **kwargs, + ): + if not list(milestones) == sorted(milestones): + raise ValueError( + "Milestones should be a list of" " increasing integers. Got {}", milestones + ) + self.milestones = milestones + self.gamma = gamma + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + super().__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor + ) + return [ + base_lr * warmup_factor * self.gamma ** bisect_right(self.milestones, self.last_epoch) + for base_lr in self.base_lrs + ] + + def _compute_values(self) -> List[float]: + # The new interface + return self.get_lr() + + +class WarmupCosineAnnealingLR(_LRScheduler): + r"""Set the learning rate of each parameter group using a cosine annealing + schedule, where :math:`\eta_{max}` is set to the initial lr and + :math:`T_{cur}` is the number of epochs since the last restart in SGDR: + + .. math:: + \eta_t = \eta_{min} + \frac{1}{2}(\eta_{max} - \eta_{min})(1 + + \cos(\frac{T_{cur}}{T_{max}}\pi)) + + When last_epoch=-1, sets initial lr as lr. + + It has been proposed in + `SGDR: Stochastic Gradient Descent with Warm Restarts`_. Note that this only + implements the cosine annealing part of SGDR, and not the restarts. + + Args: + optimizer (Optimizer): Wrapped optimizer. + T_max (int): Maximum number of iterations. + eta_min (float): Minimum learning rate. Default: 0. + last_epoch (int): The index of last epoch. Default: -1. + + .. _SGDR\: Stochastic Gradient Descent with Warm Restarts: + https://arxiv.org/abs/1608.03983 + """ + + def __init__( + self, + optimizer: torch.optim.Optimizer, + max_iters: int, + delay_iters: int = 0, + eta_min_lr: int = 0, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch=-1, + **kwargs + ): + self.max_iters = max_iters + self.delay_iters = delay_iters + self.eta_min_lr = eta_min_lr + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + assert self.delay_iters >= self.warmup_iters, "Scheduler delay iters must be larger than warmup iters" + super(WarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + if self.last_epoch <= self.warmup_iters: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor, + ) + return [ + base_lr * warmup_factor for base_lr in self.base_lrs + ] + elif self.last_epoch <= self.delay_iters: + return self.base_lrs + + else: + return [ + self.eta_min_lr + (base_lr - self.eta_min_lr) * + (1 + math.cos( + math.pi * (self.last_epoch - self.delay_iters) / (self.max_iters - self.delay_iters))) / 2 + for base_lr in self.base_lrs] + + +def _get_warmup_factor_at_iter( + method: str, iter: int, warmup_iters: int, warmup_factor: float +) -> float: + """ + Return the learning rate warmup factor at a specific iteration. + See https://arxiv.org/abs/1706.02677 for more details. + Args: + method (str): warmup method; either "constant" or "linear". + iter (int): iteration at which to calculate the warmup factor. + warmup_iters (int): the number of warmup iterations. + warmup_factor (float): the base warmup factor (the meaning changes according + to the method used). + Returns: + float: the effective warmup factor at the given iteration. + """ + if iter >= warmup_iters: + return 1.0 + + if method == "constant": + return warmup_factor + elif method == "linear": + alpha = iter / warmup_iters + return warmup_factor * (1 - alpha) + alpha + else: + raise ValueError("Unknown warmup method: {}".format(method)) diff --git a/thirdparty/fast-reid/fastreid/solver/optim/__init__.py b/thirdparty/fast-reid/fastreid/solver/optim/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b47f158956ec8aa0d30b358ec8c14a836fb9b612 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/optim/__init__.py @@ -0,0 +1,5 @@ +from .lamb import Lamb +from .swa import SWA +from .adam import Adam +from .sgd import SGD + diff --git a/thirdparty/fast-reid/fastreid/solver/optim/adam.py b/thirdparty/fast-reid/fastreid/solver/optim/adam.py new file mode 100644 index 0000000000000000000000000000000000000000..3b515d9b24dfd9d215111f8b868003f495d54768 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/optim/adam.py @@ -0,0 +1,116 @@ +import math + +import torch +from torch.optim.optimizer import Optimizer + + +class Adam(Optimizer): + r"""Implements Adam algorithm. + It has been proposed in `Adam: A Method for Stochastic Optimization`_. + The implementation of the L2 penalty follows changes proposed in + `Decoupled Weight Decay Regularization`_. + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its square (default: (0.9, 0.999)) + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + amsgrad (boolean, optional): whether to use the AMSGrad variant of this + algorithm from the paper `On the Convergence of Adam and Beyond`_ + (default: False) + .. _Adam\: A Method for Stochastic Optimization: + https://arxiv.org/abs/1412.6980 + .. _Decoupled Weight Decay Regularization: + https://arxiv.org/abs/1711.05101 + .. _On the Convergence of Adam and Beyond: + https://openreview.net/forum?id=ryQu7f-RZ + """ + + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, + weight_decay=0, amsgrad=False): + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + if not 0.0 <= weight_decay: + raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, amsgrad=amsgrad) + super(Adam, self).__init__(params, defaults) + + def __setstate__(self, state): + super(Adam, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('amsgrad', False) + + @torch.no_grad() + def step(self, closure=None): + """Performs a single optimization step. + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + for group in self.param_groups: + if group['freeze']: continue + + for p in group['params']: + if p.grad is None: + continue + grad = p.grad + if grad.is_sparse: + raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead') + amsgrad = group['amsgrad'] + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + if amsgrad: + # Maintains max of all exp. moving avg. of sq. grad. values + state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + if amsgrad: + max_exp_avg_sq = state['max_exp_avg_sq'] + beta1, beta2 = group['betas'] + + state['step'] += 1 + bias_correction1 = 1 - beta1 ** state['step'] + bias_correction2 = 1 - beta2 ** state['step'] + + if group['weight_decay'] != 0: + grad = grad.add(p, alpha=group['weight_decay']) + + # Decay the first and second moment running average coefficient + exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1) + exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2) + if amsgrad: + # Maintains the maximum of all 2nd moment running avg. till now + torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq) + # Use the max. for normalizing running avg. of gradient + denom = (max_exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps']) + else: + denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps']) + + step_size = group['lr'] / bias_correction1 + + p.addcdiv_(exp_avg, denom, value=-step_size) + + return loss diff --git a/thirdparty/fast-reid/fastreid/solver/optim/lamb.py b/thirdparty/fast-reid/fastreid/solver/optim/lamb.py new file mode 100644 index 0000000000000000000000000000000000000000..1650b74ed0bdd46860470147d1421ccd0b59b9f0 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/optim/lamb.py @@ -0,0 +1,123 @@ +#### +# CODE TAKEN FROM https://github.com/mgrankin/over9000 +#### + +import collections + +import torch +from torch.optim.optimizer import Optimizer +from torch.utils.tensorboard import SummaryWriter + + +def log_lamb_rs(optimizer: Optimizer, event_writer: SummaryWriter, token_count: int): + """Log a histogram of trust ratio scalars in across layers.""" + results = collections.defaultdict(list) + for group in optimizer.param_groups: + for p in group['params']: + state = optimizer.state[p] + for i in ('weight_norm', 'adam_norm', 'trust_ratio'): + if i in state: + results[i].append(state[i]) + + for k, v in results.items(): + event_writer.add_histogram(f'lamb/{k}', torch.tensor(v), token_count) + + +class Lamb(Optimizer): + r"""Implements Lamb algorithm. + It has been proposed in `Large Batch Optimization for Deep Learning: Training BERT in 76 minutes`_. + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its square (default: (0.9, 0.999)) + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + adam (bool, optional): always use trust ratio = 1, which turns this into + Adam. Useful for comparison purposes. + .. _Large Batch Optimization for Deep Learning: Training BERT in 76 minutes: + https://arxiv.org/abs/1904.00962 + """ + + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, + weight_decay=0, adam=False): + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay) + self.adam = adam + super(Lamb, self).__init__(params, defaults) + + def step(self, closure=None): + """Performs a single optimization step. + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None or group['freeze']: + continue + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('Lamb does not support sparse gradients, consider SparseAdam instad.') + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p.data) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p.data) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + beta1, beta2 = group['betas'] + + state['step'] += 1 + + # Decay the first and second moment running average coefficient + # m_t + exp_avg.mul_(beta1).add_(1 - beta1, grad) + # v_t + exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) + + # Paper v3 does not use debiasing. + # bias_correction1 = 1 - beta1 ** state['step'] + # bias_correction2 = 1 - beta2 ** state['step'] + # Apply bias to lr to avoid broadcast. + step_size = group['lr'] # * math.sqrt(bias_correction2) / bias_correction1 + + weight_norm = p.data.pow(2).sum().sqrt().clamp(0, 10) + + adam_step = exp_avg / exp_avg_sq.sqrt().add(group['eps']) + if group['weight_decay'] != 0: + adam_step.add_(group['weight_decay'], p.data) + + adam_norm = adam_step.pow(2).sum().sqrt() + if weight_norm == 0 or adam_norm == 0: + trust_ratio = 1 + else: + trust_ratio = weight_norm / adam_norm + state['weight_norm'] = weight_norm + state['adam_norm'] = adam_norm + state['trust_ratio'] = trust_ratio + if self.adam: + trust_ratio = 1 + + p.data.add_(-step_size * trust_ratio, adam_step) + + return loss diff --git a/thirdparty/fast-reid/fastreid/solver/optim/sgd.py b/thirdparty/fast-reid/fastreid/solver/optim/sgd.py new file mode 100644 index 0000000000000000000000000000000000000000..2114856d2d17a30da64215d7871071b7885eb616 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/optim/sgd.py @@ -0,0 +1,104 @@ +import torch +from torch.optim.optimizer import Optimizer, required + + +class SGD(Optimizer): + r"""Implements stochastic gradient descent (optionally with momentum). + Nesterov momentum is based on the formula from + `On the importance of initialization and momentum in deep learning`__. + Args: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float): learning rate + momentum (float, optional): momentum factor (default: 0) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + dampening (float, optional): dampening for momentum (default: 0) + nesterov (bool, optional): enables Nesterov momentum (default: False) + Example: + >>> optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9) + >>> optimizer.zero_grad() + >>> loss_fn(model(input), target).backward() + >>> optimizer.step() + __ http://www.cs.toronto.edu/%7Ehinton/absps/momentum.pdf + .. note:: + The implementation of SGD with Momentum/Nesterov subtly differs from + Sutskever et. al. and implementations in some other frameworks. + Considering the specific case of Momentum, the update can be written as + .. math:: + \begin{aligned} + v_{t+1} & = \mu * v_{t} + g_{t+1}, \\ + p_{t+1} & = p_{t} - \text{lr} * v_{t+1}, + \end{aligned} + where :math:`p`, :math:`g`, :math:`v` and :math:`\mu` denote the + parameters, gradient, velocity, and momentum respectively. + This is in contrast to Sutskever et. al. and + other frameworks which employ an update of the form + .. math:: + \begin{aligned} + v_{t+1} & = \mu * v_{t} + \text{lr} * g_{t+1}, \\ + p_{t+1} & = p_{t} - v_{t+1}. + \end{aligned} + The Nesterov version is analogously modified. + """ + + def __init__(self, params, lr=required, momentum=0, dampening=0, + weight_decay=0, nesterov=False): + if lr is not required and lr < 0.0: + raise ValueError("Invalid learning rate: {}".format(lr)) + if momentum < 0.0: + raise ValueError("Invalid momentum value: {}".format(momentum)) + if weight_decay < 0.0: + raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + + defaults = dict(lr=lr, momentum=momentum, dampening=dampening, + weight_decay=weight_decay, nesterov=nesterov) + if nesterov and (momentum <= 0 or dampening != 0): + raise ValueError("Nesterov momentum requires a momentum and zero dampening") + super(SGD, self).__init__(params, defaults) + + def __setstate__(self, state): + super(SGD, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('nesterov', False) + + @torch.no_grad() + def step(self, closure=None): + """Performs a single optimization step. + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + for group in self.param_groups: + if group['freeze']: continue + + weight_decay = group['weight_decay'] + momentum = group['momentum'] + dampening = group['dampening'] + nesterov = group['nesterov'] + + for p in group['params']: + if p.grad is None: + continue + d_p = p.grad + if weight_decay != 0: + d_p = d_p.add(p, alpha=weight_decay) + if momentum != 0: + param_state = self.state[p] + if 'momentum_buffer' not in param_state: + buf = param_state['momentum_buffer'] = torch.clone(d_p).detach() + else: + buf = param_state['momentum_buffer'] + buf.mul_(momentum).add_(d_p, alpha=1 - dampening) + if nesterov: + d_p = d_p.add(buf, alpha=momentum) + else: + d_p = buf + + p.add_(d_p, alpha=-group['lr']) + + return loss diff --git a/thirdparty/fast-reid/fastreid/solver/optim/swa.py b/thirdparty/fast-reid/fastreid/solver/optim/swa.py new file mode 100644 index 0000000000000000000000000000000000000000..1d45e02d193e26487500d6dc40b503b2f3a4f79a --- /dev/null +++ b/thirdparty/fast-reid/fastreid/solver/optim/swa.py @@ -0,0 +1,246 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" +# based on: +# https://github.com/pytorch/contrib/blob/master/torchcontrib/optim/swa.py + +import warnings +from collections import defaultdict + +import torch +from torch.optim.optimizer import Optimizer + + +class SWA(Optimizer): + def __init__(self, optimizer, swa_freq=None, swa_lr_factor=None): + r"""Implements Stochastic Weight Averaging (SWA). + Stochastic Weight Averaging was proposed in `Averaging Weights Leads to + Wider Optima and Better Generalization`_ by Pavel Izmailov, Dmitrii + Podoprikhin, Timur Garipov, Dmitry Vetrov and Andrew Gordon Wilson + (UAI 2018). + SWA is implemented as a wrapper class taking optimizer instance as input + and applying SWA on top of that optimizer. + SWA can be used in two modes: automatic and manual. In the automatic + mode SWA running averages are automatically updated every + :attr:`swa_freq` steps after :attr:`swa_start` steps of optimization. If + :attr:`swa_lr` is provided, the learning rate of the optimizer is reset + to :attr:`swa_lr` at every step starting from :attr:`swa_start`. To use + SWA in automatic mode provide values for both :attr:`swa_start` and + :attr:`swa_freq` arguments. + Alternatively, in the manual mode, use :meth:`update_swa` or + :meth:`update_swa_group` methods to update the SWA running averages. + In the end of training use `swap_swa_sgd` method to set the optimized + variables to the computed averages. + Args: + swa_freq (int): number of steps between subsequent updates of + SWA running averages in automatic mode; if None, manual mode is + selected (default: None) + swa_lr (float): learning rate to use starting from step swa_start + in automatic mode; if None, learning rate is not changed + (default: None) + Examples: + >>> # automatic mode + >>> base_opt = torch.optim.SGD(model.parameters(), lr=0.1) + >>> opt = SWA(base_opt, swa_start=10, swa_freq=5, swa_lr=0.05) + >>> for _ in range(100): + >>> opt.zero_grad() + >>> loss_fn(model(input), target).backward() + >>> opt.step() + >>> opt.swap_swa_param() + >>> # manual mode + >>> opt = SWA(base_opt) + >>> for i in range(100): + >>> opt.zero_grad() + >>> loss_fn(model(input), target).backward() + >>> opt.step() + >>> if i > 10 and i % 5 == 0: + >>> opt.update_swa() + >>> opt.swap_swa_param() + .. note:: + SWA does not support parameter-specific values of :attr:`swa_start`, + :attr:`swa_freq` or :attr:`swa_lr`. In automatic mode SWA uses the + same :attr:`swa_start`, :attr:`swa_freq` and :attr:`swa_lr` for all + parameter groups. If needed, use manual mode with + :meth:`update_swa_group` to use different update schedules for + different parameter groups. + .. note:: + Call :meth:`swap_swa_sgd` in the end of training to use the computed + running averages. + .. note:: + If you are using SWA to optimize the parameters of a Neural Network + containing Batch Normalization layers, you need to update the + :attr:`running_mean` and :attr:`running_var` statistics of the + Batch Normalization module. You can do so by using + `torchcontrib.optim.swa.bn_update` utility. + .. note:: + See the blogpost + https://pytorch.org/blog/stochastic-weight-averaging-in-pytorch/ + for an extended description of this SWA implementation. + .. note:: + The repo https://github.com/izmailovpavel/contrib_swa_examples + contains examples of using this SWA implementation. + .. _Averaging Weights Leads to Wider Optima and Better Generalization: + https://arxiv.org/abs/1803.05407 + .. _Improving Consistency-Based Semi-Supervised Learning with Weight + Averaging: + https://arxiv.org/abs/1806.05594 + """ + self._auto_mode, (self.swa_freq,) = self._check_params(swa_freq) + self.swa_lr_factor = swa_lr_factor + + if self._auto_mode: + if swa_freq < 1: + raise ValueError("Invalid swa_freq: {}".format(swa_freq)) + else: + if self.swa_lr_factor is not None: + warnings.warn( + "Swa_freq is None, ignoring swa_lr") + # If not in auto mode make all swa parameters None + self.swa_lr_factor = None + self.swa_freq = None + + if self.swa_lr_factor is not None and self.swa_lr_factor < 0: + raise ValueError("Invalid SWA learning rate factor: {}".format(swa_lr_factor)) + + self.optimizer = optimizer + + self.defaults = self.optimizer.defaults + self.param_groups = self.optimizer.param_groups + self.state = defaultdict(dict) + self.opt_state = self.optimizer.state + for group in self.param_groups: + group['n_avg'] = 0 + group['step_counter'] = 0 + + @staticmethod + def _check_params(swa_freq): + params = [swa_freq] + params_none = [param is None for param in params] + if not all(params_none) and any(params_none): + warnings.warn( + "Some of swa_start, swa_freq is None, ignoring other") + for i, param in enumerate(params): + if param is not None and not isinstance(param, int): + params[i] = int(param) + warnings.warn("Casting swa_start, swa_freq to int") + return not any(params_none), params + + def reset_lr_to_swa(self): + for param_group in self.param_groups: + param_group['initial_lr'] = self.swa_lr_factor * param_group['lr'] + + def update_swa_group(self, group): + r"""Updates the SWA running averages for the given parameter group. + Arguments: + group (dict): Specifies for what parameter group SWA running + averages should be updated + Examples: + >>> # automatic mode + >>> base_opt = torch.optim.SGD([{'params': [x]}, + >>> {'params': [y], 'lr': 1e-3}], lr=1e-2, momentum=0.9) + >>> opt = torchcontrib.optim.SWA(base_opt) + >>> for i in range(100): + >>> opt.zero_grad() + >>> loss_fn(model(input), target).backward() + >>> opt.step() + >>> if i > 10 and i % 5 == 0: + >>> # Update SWA for the second parameter group + >>> opt.update_swa_group(opt.param_groups[1]) + >>> opt.swap_swa_param() + """ + for p in group['params']: + param_state = self.state[p] + if 'swa_buffer' not in param_state: + param_state['swa_buffer'] = torch.zeros_like(p.data) + buf = param_state['swa_buffer'] + virtual_decay = 1 / float(group["n_avg"] + 1) + diff = (p.data - buf) * virtual_decay + buf.add_(diff) + group["n_avg"] += 1 + + def update_swa(self): + r"""Updates the SWA running averages of all optimized parameters. + """ + for group in self.param_groups: + self.update_swa_group(group) + + def swap_swa_param(self): + r"""Swaps the values of the optimized variables and swa buffers. + It's meant to be called in the end of training to use the collected + swa running averages. It can also be used to evaluate the running + averages during training; to continue training `swap_swa_sgd` + should be called again. + """ + for group in self.param_groups: + for p in group['params']: + param_state = self.state[p] + if 'swa_buffer' not in param_state: + # If swa wasn't applied we don't swap params + warnings.warn( + "SWA wasn't applied to param {}; skipping it".format(p)) + continue + buf = param_state['swa_buffer'] + tmp = torch.empty_like(p.data) + tmp.copy_(p.data) + p.data.copy_(buf) + buf.copy_(tmp) + + def step(self, closure=None): + r"""Performs a single optimization step. + In automatic mode also updates SWA running averages. + """ + loss = self.optimizer.step(closure) + for group in self.param_groups: + group["step_counter"] += 1 + steps = group["step_counter"] + if self._auto_mode: + if steps % self.swa_freq == 0: + self.update_swa_group(group) + return loss + + def state_dict(self): + r"""Returns the state of SWA as a :class:`dict`. + It contains three entries: + * opt_state - a dict holding current optimization state of the base + optimizer. Its content differs between optimizer classes. + * swa_state - a dict containing current state of SWA. For each + optimized variable it contains swa_buffer keeping the running + average of the variable + * param_groups - a dict containing all parameter groups + """ + opt_state_dict = self.optimizer.state_dict() + swa_state = {(id(k) if isinstance(k, torch.Tensor) else k): v + for k, v in self.state.items()} + opt_state = opt_state_dict["state"] + param_groups = opt_state_dict["param_groups"] + return {"opt_state": opt_state, "swa_state": swa_state, + "param_groups": param_groups} + + def load_state_dict(self, state_dict): + r"""Loads the optimizer state. + Args: + state_dict (dict): SWA optimizer state. Should be an object returned + from a call to `state_dict`. + """ + swa_state_dict = {"state": state_dict["swa_state"], + "param_groups": state_dict["param_groups"]} + opt_state_dict = {"state": state_dict["opt_state"], + "param_groups": state_dict["param_groups"]} + super(SWA, self).load_state_dict(swa_state_dict) + self.optimizer.load_state_dict(opt_state_dict) + self.opt_state = self.optimizer.state + + def add_param_group(self, param_group): + r"""Add a param group to the :class:`Optimizer` s `param_groups`. + This can be useful when fine tuning a pre-trained network as frozen + layers can be made trainable and added to the :class:`Optimizer` as + training progresses. + Args: + param_group (dict): Specifies what Tensors should be optimized along + with group specific optimization options. + """ + param_group['n_avg'] = 0 + param_group['step_counter'] = 0 + self.optimizer.add_param_group(param_group) diff --git a/thirdparty/fast-reid/fastreid/utils/__init__.py b/thirdparty/fast-reid/fastreid/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..42be7d8331fe0e1383877ae6ffc22b3e277e4e9f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/__init__.py @@ -0,0 +1,6 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + diff --git a/thirdparty/fast-reid/fastreid/utils/checkpoint.py b/thirdparty/fast-reid/fastreid/utils/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..9b28be2b1f4235382388ce04ec02068bcb4ef06e --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/checkpoint.py @@ -0,0 +1,478 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import copy +import logging +import os +from collections import defaultdict +from typing import Any +from typing import Optional, List, Dict, NamedTuple, Tuple, Iterable + +import numpy as np +import torch +import torch.nn as nn +from termcolor import colored +from torch.nn.parallel import DataParallel, DistributedDataParallel + +from fastreid.utils.file_io import PathManager + + +class _IncompatibleKeys( + NamedTuple( + # pyre-fixme[10]: Name `IncompatibleKeys` is used but not defined. + "IncompatibleKeys", + [ + ("missing_keys", List[str]), + ("unexpected_keys", List[str]), + # pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter. + # pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter. + # pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter. + ("incorrect_shapes", List[Tuple]), + ], + ) +): + pass + + +class Checkpointer(object): + """ + A checkpointer that can save/load model as well as extra checkpointable + objects. + """ + + def __init__( + self, + model: nn.Module, + save_dir: str = "", + *, + save_to_disk: bool = True, + **checkpointables: object, + ): + """ + Args: + model (nn.Module): model. + save_dir (str): a directory to save and find checkpoints. + save_to_disk (bool): if True, save checkpoint to disk, otherwise + disable saving for this checkpointer. + checkpointables (object): any checkpointable objects, i.e., objects + that have the `state_dict()` and `load_state_dict()` method. For + example, it can be used like + `Checkpointer(model, "dir", optimizer=optimizer)`. + """ + if isinstance(model, (DistributedDataParallel, DataParallel)): + model = model.module + self.model = model + self.checkpointables = copy.copy(checkpointables) + self.logger = logging.getLogger(__name__) + self.save_dir = save_dir + self.save_to_disk = save_to_disk + + self.path_manager = PathManager + + def save(self, name: str, **kwargs: Dict[str, str]): + """ + Dump model and checkpointables to a file. + Args: + name (str): name of the file. + kwargs (dict): extra arbitrary data to save. + """ + if not self.save_dir or not self.save_to_disk: + return + + data = {} + data["model"] = self.model.state_dict() + for key, obj in self.checkpointables.items(): + data[key] = obj.state_dict() + data.update(kwargs) + + basename = "{}.pth".format(name) + save_file = os.path.join(self.save_dir, basename) + assert os.path.basename(save_file) == basename, basename + self.logger.info("Saving checkpoint to {}".format(save_file)) + with PathManager.open(save_file, "wb") as f: + torch.save(data, f) + self.tag_last_checkpoint(basename) + + def load(self, path: str, checkpointables: Optional[List[str]] = None) -> object: + """ + Load from the given checkpoint. When path points to network file, this + function has to be called on all ranks. + Args: + path (str): path or url to the checkpoint. If empty, will not load + anything. + checkpointables (list): List of checkpointable names to load. If not + specified (None), will load all the possible checkpointables. + Returns: + dict: + extra data loaded from the checkpoint that has not been + processed. For example, those saved with + :meth:`.save(**extra_data)`. + """ + if not path: + # no checkpoint provided + self.logger.info("No checkpoint found. Training model from scratch") + return {} + self.logger.info("Loading checkpoint from {}".format(path)) + if not os.path.isfile(path): + path = self.path_manager.get_local_path(path) + assert os.path.isfile(path), "Checkpoint {} not found!".format(path) + + checkpoint = self._load_file(path) + incompatible = self._load_model(checkpoint) + if ( + incompatible is not None + ): # handle some existing subclasses that returns None + self._log_incompatible_keys(incompatible) + + for key in self.checkpointables if checkpointables is None else checkpointables: + if key in checkpoint: # pyre-ignore + self.logger.info("Loading {} from {}".format(key, path)) + obj = self.checkpointables[key] + obj.load_state_dict(checkpoint.pop(key)) # pyre-ignore + + # return any further checkpoint data + return checkpoint + + def has_checkpoint(self): + """ + Returns: + bool: whether a checkpoint exists in the target directory. + """ + save_file = os.path.join(self.save_dir, "last_checkpoint") + return PathManager.exists(save_file) + + def get_checkpoint_file(self): + """ + Returns: + str: The latest checkpoint file in target directory. + """ + save_file = os.path.join(self.save_dir, "last_checkpoint") + try: + with PathManager.open(save_file, "r") as f: + last_saved = f.read().strip() + except IOError: + # if file doesn't exist, maybe because it has just been + # deleted by a separate process + return "" + return os.path.join(self.save_dir, last_saved) + + def get_all_checkpoint_files(self): + """ + Returns: + list: All available checkpoint files (.pth files) in target + directory. + """ + all_model_checkpoints = [ + os.path.join(self.save_dir, file) + for file in PathManager.ls(self.save_dir) + if PathManager.isfile(os.path.join(self.save_dir, file)) + and file.endswith(".pth") + ] + return all_model_checkpoints + + def resume_or_load(self, path: str, *, resume: bool = True): + """ + If `resume` is True, this method attempts to resume from the last + checkpoint, if exists. Otherwise, load checkpoint from the given path. + This is useful when restarting an interrupted training job. + Args: + path (str): path to the checkpoint. + resume (bool): if True, resume from the last checkpoint if it exists. + Returns: + same as :meth:`load`. + """ + if resume and self.has_checkpoint(): + path = self.get_checkpoint_file() + return self.load(path) + else: + return self.load(path, checkpointables=[]) + + def tag_last_checkpoint(self, last_filename_basename: str): + """ + Tag the last checkpoint. + Args: + last_filename_basename (str): the basename of the last filename. + """ + save_file = os.path.join(self.save_dir, "last_checkpoint") + with PathManager.open(save_file, "w") as f: + f.write(last_filename_basename) + + def _load_file(self, f: str): + """ + Load a checkpoint file. Can be overwritten by subclasses to support + different formats. + Args: + f (str): a locally mounted file path. + Returns: + dict: with keys "model" and optionally others that are saved by + the checkpointer dict["model"] must be a dict which maps strings + to torch.Tensor or numpy arrays. + """ + return torch.load(f, map_location=torch.device("cpu")) + + def _load_model(self, checkpoint: Any): + """ + Load weights from a checkpoint. + Args: + checkpoint (Any): checkpoint contains the weights. + """ + checkpoint_state_dict = checkpoint.pop("model") + self._convert_ndarray_to_tensor(checkpoint_state_dict) + + # if the state_dict comes from a model that was wrapped in a + # DataParallel or DistributedDataParallel during serialization, + # remove the "module" prefix before performing the matching. + _strip_prefix_if_present(checkpoint_state_dict, "module.") + + # work around https://github.com/pytorch/pytorch/issues/24139 + model_state_dict = self.model.state_dict() + incorrect_shapes = [] + for k in list(checkpoint_state_dict.keys()): + if k in model_state_dict: + shape_model = tuple(model_state_dict[k].shape) + shape_checkpoint = tuple(checkpoint_state_dict[k].shape) + if shape_model != shape_checkpoint: + incorrect_shapes.append((k, shape_checkpoint, shape_model)) + checkpoint_state_dict.pop(k) + + incompatible = self.model.load_state_dict(checkpoint_state_dict, strict=False) + return _IncompatibleKeys( + missing_keys=incompatible.missing_keys, + unexpected_keys=incompatible.unexpected_keys, + incorrect_shapes=incorrect_shapes, + ) + + def _log_incompatible_keys(self, incompatible: _IncompatibleKeys) -> None: + """ + Log information about the incompatible keys returned by ``_load_model``. + """ + for k, shape_checkpoint, shape_model in incompatible.incorrect_shapes: + self.logger.warning( + "Skip loading parameter '{}' to the model due to incompatible " + "shapes: {} in the checkpoint but {} in the " + "model! You might want to double check if this is expected.".format( + k, shape_checkpoint, shape_model + ) + ) + if incompatible.missing_keys: + missing_keys = _filter_reused_missing_keys( + self.model, incompatible.missing_keys + ) + if missing_keys: + self.logger.info(get_missing_parameters_message(missing_keys)) + if incompatible.unexpected_keys: + self.logger.info( + get_unexpected_parameters_message(incompatible.unexpected_keys) + ) + + def _convert_ndarray_to_tensor(self, state_dict: dict): + """ + In-place convert all numpy arrays in the state_dict to torch tensor. + Args: + state_dict (dict): a state-dict to be loaded to the model. + """ + # model could be an OrderedDict with _metadata attribute + # (as returned by Pytorch's state_dict()). We should preserve these + # properties. + for k in list(state_dict.keys()): + v = state_dict[k] + if not isinstance(v, np.ndarray) and not isinstance( + v, torch.Tensor + ): + raise ValueError( + "Unsupported type found in checkpoint! {}: {}".format( + k, type(v) + ) + ) + if not isinstance(v, torch.Tensor): + state_dict[k] = torch.from_numpy(v) + + +class PeriodicCheckpointer: + """ + Save checkpoints periodically. When `.step(iteration)` is called, it will + execute `checkpointer.save` on the given checkpointer, if iteration is a + multiple of period or if `max_iter` is reached. + """ + + def __init__(self, checkpointer: Any, period: int, max_iter: int = None): + """ + Args: + checkpointer (Any): the checkpointer object used to save + checkpoints. + period (int): the period to save checkpoint. + max_iter (int): maximum number of iterations. When it is reached, + a checkpoint named "model_final" will be saved. + """ + self.checkpointer = checkpointer + self.period = int(period) + self.max_iter = max_iter + + def step(self, iteration: int, **kwargs: Any): + """ + Perform the appropriate action at the given iteration. + Args: + iteration (int): the current iteration, ranged in [0, max_iter-1]. + kwargs (Any): extra data to save, same as in + :meth:`Checkpointer.save`. + """ + iteration = int(iteration) + additional_state = {"iteration": iteration} + additional_state.update(kwargs) + if (iteration + 1) % self.period == 0: + self.checkpointer.save( + "model_{:07d}".format(iteration), **additional_state + ) + if iteration >= self.max_iter - 1: + self.checkpointer.save("model_final", **additional_state) + + def save(self, name: str, **kwargs: Any): + """ + Same argument as :meth:`Checkpointer.save`. + Use this method to manually save checkpoints outside the schedule. + Args: + name (str): file name. + kwargs (Any): extra data to save, same as in + :meth:`Checkpointer.save`. + """ + self.checkpointer.save(name, **kwargs) + + +def _filter_reused_missing_keys(model: nn.Module, keys: List[str]) -> List[str]: + """ + Filter "missing keys" to not include keys that have been loaded with another name. + """ + keyset = set(keys) + param_to_names = defaultdict(set) # param -> names that points to it + for module_prefix, module in _named_modules_with_dup(model): + for name, param in list(module.named_parameters(recurse=False)) + list( + module.named_buffers(recurse=False) # pyre-ignore + ): + full_name = (module_prefix + "." if module_prefix else "") + name + param_to_names[param].add(full_name) + for names in param_to_names.values(): + # if one name appears missing but its alias exists, then this + # name is not considered missing + if any(n in keyset for n in names) and not all(n in keyset for n in names): + [keyset.remove(n) for n in names if n in keyset] + return list(keyset) + + +def get_missing_parameters_message(keys: List[str]) -> str: + """ + Get a logging-friendly message to report parameter names (keys) that are in + the model but not found in a checkpoint. + Args: + keys (list[str]): List of keys that were not found in the checkpoint. + Returns: + str: message. + """ + groups = _group_checkpoint_keys(keys) + msg = "Some model parameters or buffers are not found in the checkpoint:\n" + msg += "\n".join( + " " + colored(k + _group_to_str(v), "blue") for k, v in groups.items() + ) + return msg + + +def get_unexpected_parameters_message(keys: List[str]) -> str: + """ + Get a logging-friendly message to report parameter names (keys) that are in + the checkpoint but not found in the model. + Args: + keys (list[str]): List of keys that were not found in the model. + Returns: + str: message. + """ + groups = _group_checkpoint_keys(keys) + msg = "The checkpoint state_dict contains keys that are not used by the model:\n" + msg += "\n".join( + " " + colored(k + _group_to_str(v), "magenta") for k, v in groups.items() + ) + return msg + + +def _strip_prefix_if_present(state_dict: Dict[str, Any], prefix: str) -> None: + """ + Strip the prefix in metadata, if any. + Args: + state_dict (OrderedDict): a state-dict to be loaded to the model. + prefix (str): prefix. + """ + keys = sorted(state_dict.keys()) + if not all(len(key) == 0 or key.startswith(prefix) for key in keys): + return + + for key in keys: + newkey = key[len(prefix):] + state_dict[newkey] = state_dict.pop(key) + + # also strip the prefix in metadata, if any.. + try: + metadata = state_dict._metadata # pyre-ignore + except AttributeError: + pass + else: + for key in list(metadata.keys()): + # for the metadata dict, the key can be: + # '': for the DDP module, which we want to remove. + # 'module': for the actual model. + # 'module.xx.xx': for the rest. + + if len(key) == 0: + continue + newkey = key[len(prefix):] + metadata[newkey] = metadata.pop(key) + + +def _group_checkpoint_keys(keys: List[str]) -> Dict[str, List[str]]: + """ + Group keys based on common prefixes. A prefix is the string up to the final + "." in each key. + Args: + keys (list[str]): list of parameter names, i.e. keys in the model + checkpoint dict. + Returns: + dict[list]: keys with common prefixes are grouped into lists. + """ + groups = defaultdict(list) + for key in keys: + pos = key.rfind(".") + if pos >= 0: + head, tail = key[:pos], [key[pos + 1:]] + else: + head, tail = key, [] + groups[head].extend(tail) + return groups + + +def _group_to_str(group: List[str]) -> str: + """ + Format a group of parameter name suffixes into a loggable string. + Args: + group (list[str]): list of parameter name suffixes. + Returns: + str: formated string. + """ + if len(group) == 0: + return "" + + if len(group) == 1: + return "." + group[0] + + return ".{" + ", ".join(group) + "}" + + +def _named_modules_with_dup( + model: nn.Module, prefix: str = "" +) -> Iterable[Tuple[str, nn.Module]]: + """ + The same as `model.named_modules()`, except that it includes + duplicated modules that have more than one name. + """ + yield prefix, model + for name, module in model._modules.items(): # pyre-ignore + if module is None: + continue + submodule_prefix = prefix + ("." if prefix else "") + name + yield from _named_modules_with_dup(module, submodule_prefix) diff --git a/thirdparty/fast-reid/fastreid/utils/collect_env.py b/thirdparty/fast-reid/fastreid/utils/collect_env.py new file mode 100644 index 0000000000000000000000000000000000000000..5affc3385880c743e6a7cabc986519604cdae0cf --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/collect_env.py @@ -0,0 +1,158 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# based on +# https://github.com/facebookresearch/detectron2/blob/master/detectron2/utils/collect_env.py +import importlib +import os +import re +import subprocess +import sys +from collections import defaultdict + +import PIL +import numpy as np +import torch +import torchvision +from tabulate import tabulate + +__all__ = ["collect_env_info"] + + +def collect_torch_env(): + try: + import torch.__config__ + + return torch.__config__.show() + except ImportError: + # compatible with older versions of pytorch + from torch.utils.collect_env import get_pretty_env_info + + return get_pretty_env_info() + + +def get_env_module(): + var_name = "FASTREID_ENV_MODULE" + return var_name, os.environ.get(var_name, "") + + +def detect_compute_compatibility(CUDA_HOME, so_file): + try: + cuobjdump = os.path.join(CUDA_HOME, "bin", "cuobjdump") + if os.path.isfile(cuobjdump): + output = subprocess.check_output( + "'{}' --list-elf '{}'".format(cuobjdump, so_file), shell=True + ) + output = output.decode("utf-8").strip().split("\n") + sm = [] + for line in output: + line = re.findall(r"\.sm_[0-9]*\.", line)[0] + sm.append(line.strip(".")) + sm = sorted(set(sm)) + return ", ".join(sm) + else: + return so_file + "; cannot find cuobjdump" + except Exception: + # unhandled failure + return so_file + + +def collect_env_info(): + has_gpu = torch.cuda.is_available() # true for both CUDA & ROCM + torch_version = torch.__version__ + + # NOTE: the use of CUDA_HOME and ROCM_HOME requires the CUDA/ROCM build deps, though in + # theory detectron2 should be made runnable with only the corresponding runtimes + from torch.utils.cpp_extension import CUDA_HOME + + has_rocm = False + if tuple(map(int, torch_version.split(".")[:2])) >= (1, 5): + from torch.utils.cpp_extension import ROCM_HOME + + if (getattr(torch.version, "hip", None) is not None) and (ROCM_HOME is not None): + has_rocm = True + has_cuda = has_gpu and (not has_rocm) + + data = [] + data.append(("sys.platform", sys.platform)) + data.append(("Python", sys.version.replace("\n", ""))) + data.append(("numpy", np.__version__)) + + try: + import fastreid # noqa + + data.append( + ("fastreid", fastreid.__version__ + " @" + os.path.dirname(fastreid.__file__)) + ) + except ImportError: + data.append(("fastreid", "failed to import")) + + data.append(get_env_module()) + data.append(("PyTorch", torch_version + " @" + os.path.dirname(torch.__file__))) + data.append(("PyTorch debug build", torch.version.debug)) + + data.append(("GPU available", has_gpu)) + if has_gpu: + devices = defaultdict(list) + for k in range(torch.cuda.device_count()): + devices[torch.cuda.get_device_name(k)].append(str(k)) + for name, devids in devices.items(): + data.append(("GPU " + ",".join(devids), name)) + + if has_rocm: + data.append(("ROCM_HOME", str(ROCM_HOME))) + else: + data.append(("CUDA_HOME", str(CUDA_HOME))) + + cuda_arch_list = os.environ.get("TORCH_CUDA_ARCH_LIST", None) + if cuda_arch_list: + data.append(("TORCH_CUDA_ARCH_LIST", cuda_arch_list)) + data.append(("Pillow", PIL.__version__)) + + try: + data.append( + ( + "torchvision", + str(torchvision.__version__) + " @" + os.path.dirname(torchvision.__file__), + ) + ) + if has_cuda: + try: + torchvision_C = importlib.util.find_spec("torchvision._C").origin + msg = detect_compute_compatibility(CUDA_HOME, torchvision_C) + data.append(("torchvision arch flags", msg)) + except ImportError: + data.append(("torchvision._C", "failed to find")) + except AttributeError: + data.append(("torchvision", "unknown")) + + try: + import fvcore + + data.append(("fvcore", fvcore.__version__)) + except ImportError: + pass + + try: + import cv2 + + data.append(("cv2", cv2.__version__)) + except ImportError: + pass + env_str = tabulate(data) + "\n" + env_str += collect_torch_env() + return env_str + + +if __name__ == "__main__": + try: + import detectron2 # noqa + except ImportError: + print(collect_env_info()) + else: + from fastreid.utils.collect_env import collect_env_info + + print(collect_env_info()) diff --git a/thirdparty/fast-reid/fastreid/utils/comm.py b/thirdparty/fast-reid/fastreid/utils/comm.py new file mode 100644 index 0000000000000000000000000000000000000000..3306d7c8d070d72b46046d9aa20f1dc908142dc9 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/comm.py @@ -0,0 +1,255 @@ +""" +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import functools +import logging +import numpy as np +import pickle +import torch +import torch.distributed as dist + +_LOCAL_PROCESS_GROUP = None +""" +A torch process group which only includes processes that on the same machine as the current process. +This variable is set when processes are spawned by `launch()` in "engine/launch.py". +""" + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group=group) == 1: + return [data] + rank = dist.get_rank(group=group) + + tensor = _serialize_to_tensor(data, group) + size_list, tensor = _pad_to_largest_tensor(tensor, group) + + # receiving Tensor from all ranks + if rank == dst: + max_size = max(size_list) + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.gather(tensor, tensor_list, dst=dst, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + return data_list + else: + dist.gather(tensor, [], dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + +def reduce_dict(input_dict, average=True): + """ + Reduce the values in the dictionary from all processes so that process with rank + 0 has the reduced results. + Args: + input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor. + average (bool): whether to do average or sum + Returns: + a dict with the same keys as input_dict, after reduction. + """ + world_size = get_world_size() + if world_size < 2: + return input_dict + with torch.no_grad(): + names = [] + values = [] + # sort the keys so that they are consistent across processes + for k in sorted(input_dict.keys()): + names.append(k) + values.append(input_dict[k]) + values = torch.stack(values, dim=0) + dist.reduce(values, dst=0) + if dist.get_rank() == 0 and average: + # only main process gets accumulated, so only divide by + # world_size in this case + values /= world_size + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict diff --git a/thirdparty/fast-reid/fastreid/utils/compute_dist.py b/thirdparty/fast-reid/fastreid/utils/compute_dist.py new file mode 100644 index 0000000000000000000000000000000000000000..985096bd8e7625f984367991bcef5c87c9410075 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/compute_dist.py @@ -0,0 +1,200 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# Modified from: https://github.com/open-mmlab/OpenUnReID/blob/66bb2ae0b00575b80fbe8915f4d4f4739cc21206/openunreid/core/utils/compute_dist.py + + +import faiss +import numpy as np +import torch +import torch.nn.functional as F + +from .faiss_utils import ( + index_init_cpu, + index_init_gpu, + search_index_pytorch, + search_raw_array_pytorch, +) + +__all__ = [ + "build_dist", + "compute_jaccard_distance", + "compute_euclidean_distance", + "compute_cosine_distance", +] + + +@torch.no_grad() +def build_dist(feat_1: torch.Tensor, feat_2: torch.Tensor, metric: str = "euclidean", **kwargs) -> np.ndarray: + r"""Compute distance between two feature embeddings. + + Args: + feat_1 (torch.Tensor): 2-D feature with batch dimension. + feat_2 (torch.Tensor): 2-D feature with batch dimension. + metric: + + Returns: + numpy.ndarray: distance matrix. + """ + assert metric in ["cosine", "euclidean", "jaccard"], "Expected metrics are cosine, euclidean and jaccard, " \ + "but got {}".format(metric) + + if metric == "euclidean": + return compute_euclidean_distance(feat_1, feat_2) + + elif metric == "cosine": + return compute_cosine_distance(feat_1, feat_2) + + elif metric == "jaccard": + feat = torch.cat((feat_1, feat_2), dim=0) + dist = compute_jaccard_distance(feat, k1=kwargs["k1"], k2=kwargs["k2"], search_option=0) + return dist[: feat_1.size(0), feat_1.size(0):] + + +def k_reciprocal_neigh(initial_rank, i, k1): + forward_k_neigh_index = initial_rank[i, : k1 + 1] + backward_k_neigh_index = initial_rank[forward_k_neigh_index, : k1 + 1] + fi = np.where(backward_k_neigh_index == i)[0] + return forward_k_neigh_index[fi] + + +@torch.no_grad() +def compute_jaccard_distance(features, k1=20, k2=6, search_option=0, fp16=False): + if search_option < 3: + # torch.cuda.empty_cache() + features = features.cuda() + + ngpus = faiss.get_num_gpus() + N = features.size(0) + mat_type = np.float16 if fp16 else np.float32 + + if search_option == 0: + # GPU + PyTorch CUDA Tensors (1) + res = faiss.StandardGpuResources() + res.setDefaultNullStreamAllDevices() + _, initial_rank = search_raw_array_pytorch(res, features, features, k1) + initial_rank = initial_rank.cpu().numpy() + elif search_option == 1: + # GPU + PyTorch CUDA Tensors (2) + res = faiss.StandardGpuResources() + index = faiss.GpuIndexFlatL2(res, features.size(-1)) + index.add(features.cpu().numpy()) + _, initial_rank = search_index_pytorch(index, features, k1) + res.syncDefaultStreamCurrentDevice() + initial_rank = initial_rank.cpu().numpy() + elif search_option == 2: + # GPU + index = index_init_gpu(ngpus, features.size(-1)) + index.add(features.cpu().numpy()) + _, initial_rank = index.search(features.cpu().numpy(), k1) + else: + # CPU + index = index_init_cpu(features.size(-1)) + index.add(features.cpu().numpy()) + _, initial_rank = index.search(features.cpu().numpy(), k1) + + nn_k1 = [] + nn_k1_half = [] + for i in range(N): + nn_k1.append(k_reciprocal_neigh(initial_rank, i, k1)) + nn_k1_half.append(k_reciprocal_neigh(initial_rank, i, int(np.around(k1 / 2)))) + + V = np.zeros((N, N), dtype=mat_type) + for i in range(N): + k_reciprocal_index = nn_k1[i] + k_reciprocal_expansion_index = k_reciprocal_index + for candidate in k_reciprocal_index: + candidate_k_reciprocal_index = nn_k1_half[candidate] + if len( + np.intersect1d(candidate_k_reciprocal_index, k_reciprocal_index) + ) > 2 / 3 * len(candidate_k_reciprocal_index): + k_reciprocal_expansion_index = np.append( + k_reciprocal_expansion_index, candidate_k_reciprocal_index + ) + + k_reciprocal_expansion_index = np.unique( + k_reciprocal_expansion_index + ) # element-wise unique + + x = features[i].unsqueeze(0).contiguous() + y = features[k_reciprocal_expansion_index] + m, n = x.size(0), y.size(0) + dist = ( + torch.pow(x, 2).sum(dim=1, keepdim=True).expand(m, n) + + torch.pow(y, 2).sum(dim=1, keepdim=True).expand(n, m).t() + ) + dist.addmm_(x, y.t(), beta=1, alpha=-2) + + if fp16: + V[i, k_reciprocal_expansion_index] = ( + F.softmax(-dist, dim=1).view(-1).cpu().numpy().astype(mat_type) + ) + else: + V[i, k_reciprocal_expansion_index] = ( + F.softmax(-dist, dim=1).view(-1).cpu().numpy() + ) + + del nn_k1, nn_k1_half, x, y + features = features.cpu() + + if k2 != 1: + V_qe = np.zeros_like(V, dtype=mat_type) + for i in range(N): + V_qe[i, :] = np.mean(V[initial_rank[i, :k2], :], axis=0) + V = V_qe + del V_qe + + del initial_rank + + invIndex = [] + for i in range(N): + invIndex.append(np.where(V[:, i] != 0)[0]) # len(invIndex)=all_num + + jaccard_dist = np.zeros((N, N), dtype=mat_type) + for i in range(N): + temp_min = np.zeros((1, N), dtype=mat_type) + indNonZero = np.where(V[i, :] != 0)[0] + indImages = [invIndex[ind] for ind in indNonZero] + for j in range(len(indNonZero)): + temp_min[0, indImages[j]] = temp_min[0, indImages[j]] + np.minimum( + V[i, indNonZero[j]], V[indImages[j], indNonZero[j]] + ) + + jaccard_dist[i] = 1 - temp_min / (2 - temp_min) + + del invIndex, V + + pos_bool = jaccard_dist < 0 + jaccard_dist[pos_bool] = 0.0 + + return jaccard_dist + + +@torch.no_grad() +def compute_euclidean_distance(features, others): + m, n = features.size(0), others.size(0) + dist_m = ( + torch.pow(features, 2).sum(dim=1, keepdim=True).expand(m, n) + + torch.pow(others, 2).sum(dim=1, keepdim=True).expand(n, m).t() + ) + dist_m.addmm_(1, -2, features, others.t()) + + return dist_m.cpu().numpy() + + +@torch.no_grad() +def compute_cosine_distance(features, others): + """Computes cosine distance. + Args: + features (torch.Tensor): 2-D feature matrix. + others (torch.Tensor): 2-D feature matrix. + Returns: + torch.Tensor: distance matrix. + """ + features = F.normalize(features, p=2, dim=1) + others = F.normalize(others, p=2, dim=1) + dist_m = 1 - torch.mm(features, others.t()) + return dist_m.cpu().numpy() diff --git a/thirdparty/fast-reid/fastreid/utils/env.py b/thirdparty/fast-reid/fastreid/utils/env.py new file mode 100644 index 0000000000000000000000000000000000000000..2a738fedcd5615a88452e0845d51759559146f53 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/env.py @@ -0,0 +1,119 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import importlib +import importlib.util +import logging +import numpy as np +import os +import random +import sys +from datetime import datetime +import torch + +__all__ = ["seed_all_rng"] + + +TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2]) +""" +PyTorch version as a tuple of 2 ints. Useful for comparison. +""" + + +def seed_all_rng(seed=None): + """ + Set the random seed for the RNG in torch, numpy and python. + Args: + seed (int): if None, will use a strong random seed. + """ + if seed is None: + seed = ( + os.getpid() + + int(datetime.now().strftime("%S%f")) + + int.from_bytes(os.urandom(2), "big") + ) + logger = logging.getLogger(__name__) + logger.info("Using a generated random seed {}".format(seed)) + np.random.seed(seed) + torch.set_rng_state(torch.manual_seed(seed).get_state()) + random.seed(seed) + + +# from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path +def _import_file(module_name, file_path, make_importable=False): + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + if make_importable: + sys.modules[module_name] = module + return module + + +def _configure_libraries(): + """ + Configurations for some libraries. + """ + # An environment option to disable `import cv2` globally, + # in case it leads to negative performance impact + disable_cv2 = int(os.environ.get("DETECTRON2_DISABLE_CV2", False)) + if disable_cv2: + sys.modules["cv2"] = None + else: + # Disable opencl in opencv since its interaction with cuda often has negative effects + # This envvar is supported after OpenCV 3.4.0 + os.environ["OPENCV_OPENCL_RUNTIME"] = "disabled" + try: + import cv2 + + if int(cv2.__version__.split(".")[0]) >= 3: + cv2.ocl.setUseOpenCL(False) + except ImportError: + pass + + def get_version(module, digit=2): + return tuple(map(int, module.__version__.split(".")[:digit])) + + # fmt: off + assert get_version(torch) >= (1, 4), "Requires torch>=1.4" + import yaml + assert get_version(yaml) >= (5, 1), "Requires pyyaml>=5.1" + # fmt: on + + +_ENV_SETUP_DONE = False + + +def setup_environment(): + """Perform environment setup work. The default setup is a no-op, but this + function allows the user to specify a Python source file or a module in + the $FASTREID_ENV_MODULE environment variable, that performs + custom setup work that may be necessary to their computing environment. + """ + global _ENV_SETUP_DONE + if _ENV_SETUP_DONE: + return + _ENV_SETUP_DONE = True + + _configure_libraries() + + custom_module_path = os.environ.get("FASTREID_ENV_MODULE") + + if custom_module_path: + setup_custom_environment(custom_module_path) + else: + # The default setup is a no-op + pass + + +def setup_custom_environment(custom_module): + """ + Load custom environment setup by importing a Python source file or a + module, and run the setup function. + """ + if custom_module.endswith(".py"): + module = _import_file("fastreid.utils.env.custom_module", custom_module) + else: + module = importlib.import_module(custom_module) + assert hasattr(module, "setup_environment") and callable(module.setup_environment), ( + "Custom environment module defined in {} does not have the " + "required callable attribute 'setup_environment'." + ).format(custom_module) + module.setup_environment() \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/utils/events.py b/thirdparty/fast-reid/fastreid/utils/events.py new file mode 100644 index 0000000000000000000000000000000000000000..e5fda2d45d7089fbadcf41a94b95dfb9a023c0b7 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/events.py @@ -0,0 +1,445 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import datetime +import json +import logging +import os +import time +from collections import defaultdict +from contextlib import contextmanager +import torch +from .file_io import PathManager +from .history_buffer import HistoryBuffer + +__all__ = [ + "get_event_storage", + "JSONWriter", + "TensorboardXWriter", + "CommonMetricPrinter", + "EventStorage", +] + +_CURRENT_STORAGE_STACK = [] + + +def get_event_storage(): + """ + Returns: + The :class:`EventStorage` object that's currently being used. + Throws an error if no :class:`EventStorage` is currently enabled. + """ + assert len( + _CURRENT_STORAGE_STACK + ), "get_event_storage() has to be called inside a 'with EventStorage(...)' context!" + return _CURRENT_STORAGE_STACK[-1] + + +class EventWriter: + """ + Base class for writers that obtain events from :class:`EventStorage` and process them. + """ + + def write(self): + raise NotImplementedError + + def close(self): + pass + + +class JSONWriter(EventWriter): + """ + Write scalars to a json file. + It saves scalars as one json per line (instead of a big json) for easy parsing. + Examples parsing such a json file: + :: + $ cat metrics.json | jq -s '.[0:2]' + [ + { + "data_time": 0.008433341979980469, + "iteration": 20, + "loss": 1.9228371381759644, + "loss_box_reg": 0.050025828182697296, + "loss_classifier": 0.5316952466964722, + "loss_mask": 0.7236229181289673, + "loss_rpn_box": 0.0856662318110466, + "loss_rpn_cls": 0.48198649287223816, + "lr": 0.007173333333333333, + "time": 0.25401854515075684 + }, + { + "data_time": 0.007216215133666992, + "iteration": 40, + "loss": 1.282649278640747, + "loss_box_reg": 0.06222952902317047, + "loss_classifier": 0.30682939291000366, + "loss_mask": 0.6970193982124329, + "loss_rpn_box": 0.038663312792778015, + "loss_rpn_cls": 0.1471673548221588, + "lr": 0.007706666666666667, + "time": 0.2490077018737793 + } + ] + $ cat metrics.json | jq '.loss_mask' + 0.7126231789588928 + 0.689423680305481 + 0.6776131987571716 + ... + """ + + def __init__(self, json_file, window_size=20): + """ + Args: + json_file (str): path to the json file. New data will be appended if the file exists. + window_size (int): the window size of median smoothing for the scalars whose + `smoothing_hint` are True. + """ + self._file_handle = PathManager.open(json_file, "a") + self._window_size = window_size + self._last_write = -1 + + def write(self): + storage = get_event_storage() + to_save = defaultdict(dict) + + for k, (v, iter) in storage.latest_with_smoothing_hint(self._window_size).items(): + # keep scalars that have not been written + if iter <= self._last_write: + continue + to_save[iter][k] = v + all_iters = sorted(to_save.keys()) + self._last_write = max(all_iters) + + for itr, scalars_per_iter in to_save.items(): + scalars_per_iter["iteration"] = itr + self._file_handle.write(json.dumps(scalars_per_iter, sort_keys=True) + "\n") + self._file_handle.flush() + try: + os.fsync(self._file_handle.fileno()) + except AttributeError: + pass + + def close(self): + self._file_handle.close() + + +class TensorboardXWriter(EventWriter): + """ + Write all scalars to a tensorboard file. + """ + + def __init__(self, log_dir: str, window_size: int = 20, **kwargs): + """ + Args: + log_dir (str): the directory to save the output events + window_size (int): the scalars will be median-smoothed by this window size + kwargs: other arguments passed to `torch.utils.tensorboard.SummaryWriter(...)` + """ + self._window_size = window_size + from torch.utils.tensorboard import SummaryWriter + + self._writer = SummaryWriter(log_dir, **kwargs) + self._last_write = -1 + + def write(self): + storage = get_event_storage() + new_last_write = self._last_write + for k, (v, iter) in storage.latest_with_smoothing_hint(self._window_size).items(): + if iter > self._last_write: + self._writer.add_scalar(k, v, iter) + new_last_write = max(new_last_write, iter) + self._last_write = new_last_write + + # storage.put_{image,histogram} is only meant to be used by + # tensorboard writer. So we access its internal fields directly from here. + if len(storage._vis_data) >= 1: + for img_name, img, step_num in storage._vis_data: + self._writer.add_image(img_name, img, step_num) + # Storage stores all image data and rely on this writer to clear them. + # As a result it assumes only one writer will use its image data. + # An alternative design is to let storage store limited recent + # data (e.g. only the most recent image) that all writers can access. + # In that case a writer may not see all image data if its period is long. + storage.clear_images() + + if len(storage._histograms) >= 1: + for params in storage._histograms: + self._writer.add_histogram_raw(**params) + storage.clear_histograms() + + def close(self): + if hasattr(self, "_writer"): # doesn't exist when the code fails at import + self._writer.close() + + +class CommonMetricPrinter(EventWriter): + """ + Print **common** metrics to the terminal, including + iteration time, ETA, memory, all losses, and the learning rate. + It also applies smoothing using a window of 20 elements. + It's meant to print common metrics in common ways. + To print something in more customized ways, please implement a similar printer by yourself. + """ + + def __init__(self, max_iter): + """ + Args: + max_iter (int): the maximum number of iterations to train. + Used to compute ETA. + """ + self.logger = logging.getLogger(__name__) + self._max_iter = max_iter + self._last_write = None + + def write(self): + storage = get_event_storage() + iteration = storage.iter + + try: + data_time = storage.history("data_time").avg(20) + except KeyError: + # they may not exist in the first few iterations (due to warmup) + # or when SimpleTrainer is not used + data_time = None + + eta_string = None + try: + iter_time = storage.history("time").global_avg() + eta_seconds = storage.history("time").median(1000) * (self._max_iter - iteration) + storage.put_scalar("eta_seconds", eta_seconds, smoothing_hint=False) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + except KeyError: + iter_time = None + # estimate eta on our own - more noisy + if self._last_write is not None: + estimate_iter_time = (time.perf_counter() - self._last_write[1]) / ( + iteration - self._last_write[0] + ) + eta_seconds = estimate_iter_time * (self._max_iter - iteration) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + self._last_write = (iteration, time.perf_counter()) + + try: + lr = "{:.2e}".format(storage.history("lr").latest()) + except KeyError: + lr = "N/A" + + if torch.cuda.is_available(): + max_mem_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0 + else: + max_mem_mb = None + + # NOTE: max_mem is parsed by grep in "dev/parse_results.sh" + self.logger.info( + " {eta}iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format( + eta=f"eta: {eta_string} " if eta_string else "", + iter=iteration, + losses=" ".join( + [ + "{}: {:.4g}".format(k, v.median(20)) + for k, v in storage.histories().items() + if "loss" in k + ] + ), + time="time: {:.4f} ".format(iter_time) if iter_time is not None else "", + data_time="data_time: {:.4f} ".format(data_time) if data_time is not None else "", + lr=lr, + memory="max_mem: {:.0f}M".format(max_mem_mb) if max_mem_mb is not None else "", + ) + ) + + +class EventStorage: + """ + The user-facing class that provides metric storage functionalities. + In the future we may add support for storing / logging other types of data if needed. + """ + + def __init__(self, start_iter=0): + """ + Args: + start_iter (int): the iteration number to start with + """ + self._history = defaultdict(HistoryBuffer) + self._smoothing_hints = {} + self._latest_scalars = {} + self._iter = start_iter + self._current_prefix = "" + self._vis_data = [] + self._histograms = [] + + def put_image(self, img_name, img_tensor): + """ + Add an `img_tensor` associated with `img_name`, to be shown on + tensorboard. + Args: + img_name (str): The name of the image to put into tensorboard. + img_tensor (torch.Tensor or numpy.array): An `uint8` or `float` + Tensor of shape `[channel, height, width]` where `channel` is + 3. The image format should be RGB. The elements in img_tensor + can either have values in [0, 1] (float32) or [0, 255] (uint8). + The `img_tensor` will be visualized in tensorboard. + """ + self._vis_data.append((img_name, img_tensor, self._iter)) + + def put_scalar(self, name, value, smoothing_hint=True): + """ + Add a scalar `value` to the `HistoryBuffer` associated with `name`. + Args: + smoothing_hint (bool): a 'hint' on whether this scalar is noisy and should be + smoothed when logged. The hint will be accessible through + :meth:`EventStorage.smoothing_hints`. A writer may ignore the hint + and apply custom smoothing rule. + It defaults to True because most scalars we save need to be smoothed to + provide any useful signal. + """ + name = self._current_prefix + name + history = self._history[name] + value = float(value) + history.update(value, self._iter) + self._latest_scalars[name] = (value, self._iter) + + existing_hint = self._smoothing_hints.get(name) + if existing_hint is not None: + assert ( + existing_hint == smoothing_hint + ), "Scalar {} was put with a different smoothing_hint!".format(name) + else: + self._smoothing_hints[name] = smoothing_hint + + def put_scalars(self, *, smoothing_hint=True, **kwargs): + """ + Put multiple scalars from keyword arguments. + Examples: + storage.put_scalars(loss=my_loss, accuracy=my_accuracy, smoothing_hint=True) + """ + for k, v in kwargs.items(): + self.put_scalar(k, v, smoothing_hint=smoothing_hint) + + def put_histogram(self, hist_name, hist_tensor, bins=1000): + """ + Create a histogram from a tensor. + Args: + hist_name (str): The name of the histogram to put into tensorboard. + hist_tensor (torch.Tensor): A Tensor of arbitrary shape to be converted + into a histogram. + bins (int): Number of histogram bins. + """ + ht_min, ht_max = hist_tensor.min().item(), hist_tensor.max().item() + + # Create a histogram with PyTorch + hist_counts = torch.histc(hist_tensor, bins=bins) + hist_edges = torch.linspace(start=ht_min, end=ht_max, steps=bins + 1, dtype=torch.float32) + + # Parameter for the add_histogram_raw function of SummaryWriter + hist_params = dict( + tag=hist_name, + min=ht_min, + max=ht_max, + num=len(hist_tensor), + sum=float(hist_tensor.sum()), + sum_squares=float(torch.sum(hist_tensor ** 2)), + bucket_limits=hist_edges[1:].tolist(), + bucket_counts=hist_counts.tolist(), + global_step=self._iter, + ) + self._histograms.append(hist_params) + + def history(self, name): + """ + Returns: + HistoryBuffer: the scalar history for name + """ + ret = self._history.get(name, None) + if ret is None: + raise KeyError("No history metric available for {}!".format(name)) + return ret + + def histories(self): + """ + Returns: + dict[name -> HistoryBuffer]: the HistoryBuffer for all scalars + """ + return self._history + + def latest(self): + """ + Returns: + dict[str -> (float, int)]: mapping from the name of each scalar to the most + recent value and the iteration number its added. + """ + return self._latest_scalars + + def latest_with_smoothing_hint(self, window_size=20): + """ + Similar to :meth:`latest`, but the returned values + are either the un-smoothed original latest value, + or a median of the given window_size, + depend on whether the smoothing_hint is True. + This provides a default behavior that other writers can use. + """ + result = {} + for k, (v, itr) in self._latest_scalars.items(): + result[k] = ( + self._history[k].median(window_size) if self._smoothing_hints[k] else v, + itr, + ) + return result + + def smoothing_hints(self): + """ + Returns: + dict[name -> bool]: the user-provided hint on whether the scalar + is noisy and needs smoothing. + """ + return self._smoothing_hints + + def step(self): + """ + User should call this function at the beginning of each iteration, to + notify the storage of the start of a new iteration. + The storage will then be able to associate the new data with the + correct iteration number. + """ + self._iter += 1 + + @property + def iter(self): + return self._iter + + @property + def iteration(self): + # for backward compatibility + return self._iter + + def __enter__(self): + _CURRENT_STORAGE_STACK.append(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + assert _CURRENT_STORAGE_STACK[-1] == self + _CURRENT_STORAGE_STACK.pop() + + @contextmanager + def name_scope(self, name): + """ + Yields: + A context within which all the events added to this storage + will be prefixed by the name scope. + """ + old_prefix = self._current_prefix + self._current_prefix = name.rstrip("/") + "/" + yield + self._current_prefix = old_prefix + + def clear_images(self): + """ + Delete all the stored images for visualization. This should be called + after images are written to tensorboard. + """ + self._vis_data = [] + + def clear_histograms(self): + """ + Delete all the stored histograms for visualization. + This should be called after histograms are written to tensorboard. + """ + self._histograms = [] \ No newline at end of file diff --git a/thirdparty/fast-reid/fastreid/utils/faiss_utils.py b/thirdparty/fast-reid/fastreid/utils/faiss_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4d418f02b8d92f83d0991b6d3019df9d4d70c78c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/faiss_utils.py @@ -0,0 +1,127 @@ +# encoding: utf-8 +# copy from: https://github.com/open-mmlab/OpenUnReID/blob/66bb2ae0b00575b80fbe8915f4d4f4739cc21206/openunreid/core/utils/faiss_utils.py + +import faiss +import torch + + +def swig_ptr_from_FloatTensor(x): + assert x.is_contiguous() + assert x.dtype == torch.float32 + return faiss.cast_integer_to_float_ptr( + x.storage().data_ptr() + x.storage_offset() * 4 + ) + + +def swig_ptr_from_LongTensor(x): + assert x.is_contiguous() + assert x.dtype == torch.int64, "dtype=%s" % x.dtype + return faiss.cast_integer_to_long_ptr( + x.storage().data_ptr() + x.storage_offset() * 8 + ) + + +def search_index_pytorch(index, x, k, D=None, I=None): + """call the search function of an index with pytorch tensor I/O (CPU + and GPU supported)""" + assert x.is_contiguous() + n, d = x.size() + assert d == index.d + + if D is None: + D = torch.empty((n, k), dtype=torch.float32, device=x.device) + else: + assert D.size() == (n, k) + + if I is None: + I = torch.empty((n, k), dtype=torch.int64, device=x.device) + else: + assert I.size() == (n, k) + torch.cuda.synchronize() + xptr = swig_ptr_from_FloatTensor(x) + Iptr = swig_ptr_from_LongTensor(I) + Dptr = swig_ptr_from_FloatTensor(D) + index.search_c(n, xptr, k, Dptr, Iptr) + torch.cuda.synchronize() + return D, I + + +def search_raw_array_pytorch(res, xb, xq, k, D=None, I=None, metric=faiss.METRIC_L2): + assert xb.device == xq.device + + nq, d = xq.size() + if xq.is_contiguous(): + xq_row_major = True + elif xq.t().is_contiguous(): + xq = xq.t() # I initially wrote xq:t(), Lua is still haunting me :-) + xq_row_major = False + else: + raise TypeError("matrix should be row or column-major") + + xq_ptr = swig_ptr_from_FloatTensor(xq) + + nb, d2 = xb.size() + assert d2 == d + if xb.is_contiguous(): + xb_row_major = True + elif xb.t().is_contiguous(): + xb = xb.t() + xb_row_major = False + else: + raise TypeError("matrix should be row or column-major") + xb_ptr = swig_ptr_from_FloatTensor(xb) + + if D is None: + D = torch.empty(nq, k, device=xb.device, dtype=torch.float32) + else: + assert D.shape == (nq, k) + assert D.device == xb.device + + if I is None: + I = torch.empty(nq, k, device=xb.device, dtype=torch.int64) + else: + assert I.shape == (nq, k) + assert I.device == xb.device + + D_ptr = swig_ptr_from_FloatTensor(D) + I_ptr = swig_ptr_from_LongTensor(I) + + faiss.bruteForceKnn( + res, + metric, + xb_ptr, + xb_row_major, + nb, + xq_ptr, + xq_row_major, + nq, + d, + k, + D_ptr, + I_ptr, + ) + + return D, I + + +def index_init_gpu(ngpus, feat_dim): + flat_config = [] + for i in range(ngpus): + cfg = faiss.GpuIndexFlatConfig() + cfg.useFloat16 = False + cfg.device = i + flat_config.append(cfg) + + res = [faiss.StandardGpuResources() for i in range(ngpus)] + indexes = [ + faiss.GpuIndexFlatL2(res[i], feat_dim, flat_config[i]) for i in range(ngpus) + ] + index = faiss.IndexShards(feat_dim) + for sub_index in indexes: + index.add_shard(sub_index) + index.reset() + return index + + +def index_init_cpu(feat_dim): + return faiss.IndexFlatL2(feat_dim) diff --git a/thirdparty/fast-reid/fastreid/utils/file_io.py b/thirdparty/fast-reid/fastreid/utils/file_io.py new file mode 100644 index 0000000000000000000000000000000000000000..8533fe8709ec6ae2be7f7cda7dd0dc4235c1e12c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/file_io.py @@ -0,0 +1,520 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import errno +import logging +import os +import shutil +from collections import OrderedDict +from typing import ( + IO, + Any, + Callable, + Dict, + List, + MutableMapping, + Optional, + Union, +) + +__all__ = ["PathManager", "get_cache_dir"] + + +def get_cache_dir(cache_dir: Optional[str] = None) -> str: + """ + Returns a default directory to cache static files + (usually downloaded from Internet), if None is provided. + Args: + cache_dir (None or str): if not None, will be returned as is. + If None, returns the default cache directory as: + 1) $FVCORE_CACHE, if set + 2) otherwise ~/.torch/fvcore_cache + """ + if cache_dir is None: + cache_dir = os.path.expanduser( + os.getenv("FVCORE_CACHE", "~/.torch/fvcore_cache") + ) + return cache_dir + + +class PathHandler: + """ + PathHandler is a base class that defines common I/O functionality for a URI + protocol. It routes I/O for a generic URI which may look like "protocol://*" + or a canonical filepath "/foo/bar/baz". + """ + + _strict_kwargs_check = True + + def _check_kwargs(self, kwargs: Dict[str, Any]) -> None: + """ + Checks if the given arguments are empty. Throws a ValueError if strict + kwargs checking is enabled and args are non-empty. If strict kwargs + checking is disabled, only a warning is logged. + Args: + kwargs (Dict[str, Any]) + """ + if self._strict_kwargs_check: + if len(kwargs) > 0: + raise ValueError("Unused arguments: {}".format(kwargs)) + else: + logger = logging.getLogger(__name__) + for k, v in kwargs.items(): + logger.warning( + "[PathManager] {}={} argument ignored".format(k, v) + ) + + def _get_supported_prefixes(self) -> List[str]: + """ + Returns: + List[str]: the list of URI prefixes this PathHandler can support + """ + raise NotImplementedError() + + def _get_local_path(self, path: str, **kwargs: Any) -> str: + """ + Get a filepath which is compatible with native Python I/O such as `open` + and `os.path`. + If URI points to a remote resource, this function may download and cache + the resource to local disk. In this case, this function is meant to be + used with read-only resources. + Args: + path (str): A URI supported by this PathHandler + Returns: + local_path (str): a file path which exists on the local file system + """ + raise NotImplementedError() + + def _open( + self, path: str, mode: str = "r", buffering: int = -1, **kwargs: Any + ) -> Union[IO[str], IO[bytes]]: + """ + Open a stream to a URI, similar to the built-in `open`. + Args: + path (str): A URI supported by this PathHandler + mode (str): Specifies the mode in which the file is opened. It defaults + to 'r'. + buffering (int): An optional integer used to set the buffering policy. + Pass 0 to switch buffering off and an integer >= 1 to indicate the + size in bytes of a fixed-size chunk buffer. When no buffering + argument is given, the default buffering policy depends on the + underlying I/O implementation. + Returns: + file: a file-like object. + """ + raise NotImplementedError() + + def _copy( + self, + src_path: str, + dst_path: str, + overwrite: bool = False, + **kwargs: Any, + ) -> bool: + """ + Copies a source path to a destination path. + Args: + src_path (str): A URI supported by this PathHandler + dst_path (str): A URI supported by this PathHandler + overwrite (bool): Bool flag for forcing overwrite of existing file + Returns: + status (bool): True on success + """ + raise NotImplementedError() + + def _exists(self, path: str, **kwargs: Any) -> bool: + """ + Checks if there is a resource at the given URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path exists + """ + raise NotImplementedError() + + def _isfile(self, path: str, **kwargs: Any) -> bool: + """ + Checks if the resource at the given URI is a file. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a file + """ + raise NotImplementedError() + + def _isdir(self, path: str, **kwargs: Any) -> bool: + """ + Checks if the resource at the given URI is a directory. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a directory + """ + raise NotImplementedError() + + def _ls(self, path: str, **kwargs: Any) -> List[str]: + """ + List the contents of the directory at the provided URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + List[str]: list of contents in given path + """ + raise NotImplementedError() + + def _mkdirs(self, path: str, **kwargs: Any) -> None: + """ + Recursive directory creation function. Like mkdir(), but makes all + intermediate-level directories needed to contain the leaf directory. + Similar to the native `os.makedirs`. + Args: + path (str): A URI supported by this PathHandler + """ + raise NotImplementedError() + + def _rm(self, path: str, **kwargs: Any) -> None: + """ + Remove the file (not directory) at the provided URI. + Args: + path (str): A URI supported by this PathHandler + """ + raise NotImplementedError() + + +class NativePathHandler(PathHandler): + """ + Handles paths that can be accessed using Python native system calls. This + handler uses `open()` and `os.*` calls on the given path. + """ + + def _get_local_path(self, path: str, **kwargs: Any) -> str: + self._check_kwargs(kwargs) + return path + + def _open( + self, + path: str, + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + closefd: bool = True, + opener: Optional[Callable] = None, + **kwargs: Any, + ) -> Union[IO[str], IO[bytes]]: + """ + Open a path. + Args: + path (str): A URI supported by this PathHandler + mode (str): Specifies the mode in which the file is opened. It defaults + to 'r'. + buffering (int): An optional integer used to set the buffering policy. + Pass 0 to switch buffering off and an integer >= 1 to indicate the + size in bytes of a fixed-size chunk buffer. When no buffering + argument is given, the default buffering policy works as follows: + * Binary files are buffered in fixed-size chunks; the size of + the buffer is chosen using a heuristic trying to determine the + underlying device’s “block size” and falling back on + io.DEFAULT_BUFFER_SIZE. On many systems, the buffer will + typically be 4096 or 8192 bytes long. + encoding (Optional[str]): the name of the encoding used to decode or + encode the file. This should only be used in text mode. + errors (Optional[str]): an optional string that specifies how encoding + and decoding errors are to be handled. This cannot be used in binary + mode. + newline (Optional[str]): controls how universal newlines mode works + (it only applies to text mode). It can be None, '', '\n', '\r', + and '\r\n'. + closefd (bool): If closefd is False and a file descriptor rather than + a filename was given, the underlying file descriptor will be kept + open when the file is closed. If a filename is given closefd must + be True (the default) otherwise an error will be raised. + opener (Optional[Callable]): A custom opener can be used by passing + a callable as opener. The underlying file descriptor for the file + object is then obtained by calling opener with (file, flags). + opener must return an open file descriptor (passing os.open as opener + results in functionality similar to passing None). + See https://docs.python.org/3/library/functions.html#open for details. + Returns: + file: a file-like object. + """ + self._check_kwargs(kwargs) + return open( # type: ignore + path, + mode, + buffering=buffering, + encoding=encoding, + errors=errors, + newline=newline, + closefd=closefd, + opener=opener, + ) + + def _copy( + self, + src_path: str, + dst_path: str, + overwrite: bool = False, + **kwargs: Any, + ) -> bool: + """ + Copies a source path to a destination path. + Args: + src_path (str): A URI supported by this PathHandler + dst_path (str): A URI supported by this PathHandler + overwrite (bool): Bool flag for forcing overwrite of existing file + Returns: + status (bool): True on success + """ + self._check_kwargs(kwargs) + + if os.path.exists(dst_path) and not overwrite: + logger = logging.getLogger(__name__) + logger.error("Destination file {} already exists.".format(dst_path)) + return False + + try: + shutil.copyfile(src_path, dst_path) + return True + except Exception as e: + logger = logging.getLogger(__name__) + logger.error("Error in file copy - {}".format(str(e))) + return False + + def _exists(self, path: str, **kwargs: Any) -> bool: + self._check_kwargs(kwargs) + return os.path.exists(path) + + def _isfile(self, path: str, **kwargs: Any) -> bool: + self._check_kwargs(kwargs) + return os.path.isfile(path) + + def _isdir(self, path: str, **kwargs: Any) -> bool: + self._check_kwargs(kwargs) + return os.path.isdir(path) + + def _ls(self, path: str, **kwargs: Any) -> List[str]: + self._check_kwargs(kwargs) + return os.listdir(path) + + def _mkdirs(self, path: str, **kwargs: Any) -> None: + self._check_kwargs(kwargs) + try: + os.makedirs(path, exist_ok=True) + except OSError as e: + # EEXIST it can still happen if multiple processes are creating the dir + if e.errno != errno.EEXIST: + raise + + def _rm(self, path: str, **kwargs: Any) -> None: + self._check_kwargs(kwargs) + os.remove(path) + + +class PathManager: + """ + A class for users to open generic paths or translate generic paths to file names. + """ + + _PATH_HANDLERS: MutableMapping[str, PathHandler] = OrderedDict() + _NATIVE_PATH_HANDLER = NativePathHandler() + + @staticmethod + def __get_path_handler(path: str) -> PathHandler: + """ + Finds a PathHandler that supports the given path. Falls back to the native + PathHandler if no other handler is found. + Args: + path (str): URI path to resource + Returns: + handler (PathHandler) + """ + for p in PathManager._PATH_HANDLERS.keys(): + if path.startswith(p): + return PathManager._PATH_HANDLERS[p] + return PathManager._NATIVE_PATH_HANDLER + + @staticmethod + def open( + path: str, mode: str = "r", buffering: int = -1, **kwargs: Any + ) -> Union[IO[str], IO[bytes]]: + """ + Open a stream to a URI, similar to the built-in `open`. + Args: + path (str): A URI supported by this PathHandler + mode (str): Specifies the mode in which the file is opened. It defaults + to 'r'. + buffering (int): An optional integer used to set the buffering policy. + Pass 0 to switch buffering off and an integer >= 1 to indicate the + size in bytes of a fixed-size chunk buffer. When no buffering + argument is given, the default buffering policy depends on the + underlying I/O implementation. + Returns: + file: a file-like object. + """ + return PathManager.__get_path_handler(path)._open( # type: ignore + path, mode, buffering=buffering, **kwargs + ) + + @staticmethod + def copy( + src_path: str, dst_path: str, overwrite: bool = False, **kwargs: Any + ) -> bool: + """ + Copies a source path to a destination path. + Args: + src_path (str): A URI supported by this PathHandler + dst_path (str): A URI supported by this PathHandler + overwrite (bool): Bool flag for forcing overwrite of existing file + Returns: + status (bool): True on success + """ + + # Copying across handlers is not supported. + assert PathManager.__get_path_handler( # type: ignore + src_path + ) == PathManager.__get_path_handler(dst_path) + return PathManager.__get_path_handler(src_path)._copy( + src_path, dst_path, overwrite, **kwargs + ) + + @staticmethod + def get_local_path(path: str, **kwargs: Any) -> str: + """ + Get a filepath which is compatible with native Python I/O such as `open` + and `os.path`. + If URI points to a remote resource, this function may download and cache + the resource to local disk. + Args: + path (str): A URI supported by this PathHandler + Returns: + local_path (str): a file path which exists on the local file system + """ + return PathManager.__get_path_handler( # type: ignore + path + )._get_local_path(path, **kwargs) + + @staticmethod + def exists(path: str, **kwargs: Any) -> bool: + """ + Checks if there is a resource at the given URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path exists + """ + return PathManager.__get_path_handler(path)._exists( # type: ignore + path, **kwargs + ) + + @staticmethod + def isfile(path: str, **kwargs: Any) -> bool: + """ + Checks if there the resource at the given URI is a file. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a file + """ + return PathManager.__get_path_handler(path)._isfile( # type: ignore + path, **kwargs + ) + + @staticmethod + def isdir(path: str, **kwargs: Any) -> bool: + """ + Checks if the resource at the given URI is a directory. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a directory + """ + return PathManager.__get_path_handler(path)._isdir( # type: ignore + path, **kwargs + ) + + @staticmethod + def ls(path: str, **kwargs: Any) -> List[str]: + """ + List the contents of the directory at the provided URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + List[str]: list of contents in given path + """ + return PathManager.__get_path_handler(path)._ls( # type: ignore + path, **kwargs + ) + + @staticmethod + def mkdirs(path: str, **kwargs: Any) -> None: + """ + Recursive directory creation function. Like mkdir(), but makes all + intermediate-level directories needed to contain the leaf directory. + Similar to the native `os.makedirs`. + Args: + path (str): A URI supported by this PathHandler + """ + return PathManager.__get_path_handler(path)._mkdirs( # type: ignore + path, **kwargs + ) + + @staticmethod + def rm(path: str, **kwargs: Any) -> None: + """ + Remove the file (not directory) at the provided URI. + Args: + path (str): A URI supported by this PathHandler + """ + return PathManager.__get_path_handler(path)._rm( # type: ignore + path, **kwargs + ) + + @staticmethod + def register_handler(handler: PathHandler) -> None: + """ + Register a path handler associated with `handler._get_supported_prefixes` + URI prefixes. + Args: + handler (PathHandler) + """ + assert isinstance(handler, PathHandler), handler + for prefix in handler._get_supported_prefixes(): + assert prefix not in PathManager._PATH_HANDLERS + PathManager._PATH_HANDLERS[prefix] = handler + + # Sort path handlers in reverse order so longer prefixes take priority, + # eg: http://foo/bar before http://foo + PathManager._PATH_HANDLERS = OrderedDict( + sorted( + PathManager._PATH_HANDLERS.items(), + key=lambda t: t[0], + reverse=True, + ) + ) + + @staticmethod + def set_strict_kwargs_checking(enable: bool) -> None: + """ + Toggles strict kwargs checking. If enabled, a ValueError is thrown if any + unused parameters are passed to a PathHandler function. If disabled, only + a warning is given. + With a centralized file API, there's a tradeoff of convenience and + correctness delegating arguments to the proper I/O layers. An underlying + `PathHandler` may support custom arguments which should not be statically + exposed on the `PathManager` function. For example, a custom `HTTPURLHandler` + may want to expose a `cache_timeout` argument for `open()` which specifies + how old a locally cached resource can be before it's refetched from the + remote server. This argument would not make sense for a `NativePathHandler`. + If strict kwargs checking is disabled, `cache_timeout` can be passed to + `PathManager.open` which will forward the arguments to the underlying + handler. By default, checking is enabled since it is innately unsafe: + multiple `PathHandler`s could reuse arguments with different semantic + meanings or types. + Args: + enable (bool) + """ + PathManager._NATIVE_PATH_HANDLER._strict_kwargs_check = enable + for handler in PathManager._PATH_HANDLERS.values(): + handler._strict_kwargs_check = enable diff --git a/thirdparty/fast-reid/fastreid/utils/history_buffer.py b/thirdparty/fast-reid/fastreid/utils/history_buffer.py new file mode 100644 index 0000000000000000000000000000000000000000..b185c34a730ad4028f41b3ff26c0acf5bc85a511 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/history_buffer.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import numpy as np +from typing import List, Tuple + + +class HistoryBuffer: + """ + Track a series of scalar values and provide access to smoothed values over a + window or the global average of the series. + """ + + def __init__(self, max_length: int = 1000000): + """ + Args: + max_length: maximal number of values that can be stored in the + buffer. When the capacity of the buffer is exhausted, old + values will be removed. + """ + self._max_length: int = max_length + self._data: List[Tuple[float, float]] = [] # (value, iteration) pairs + self._count: int = 0 + self._global_avg: float = 0 + + def update(self, value: float, iteration: float = None): + """ + Add a new scalar value produced at certain iteration. If the length + of the buffer exceeds self._max_length, the oldest element will be + removed from the buffer. + """ + if iteration is None: + iteration = self._count + if len(self._data) == self._max_length: + self._data.pop(0) + self._data.append((value, iteration)) + + self._count += 1 + self._global_avg += (value - self._global_avg) / self._count + + def latest(self): + """ + Return the latest scalar value added to the buffer. + """ + return self._data[-1][0] + + def median(self, window_size: int): + """ + Return the median of the latest `window_size` values in the buffer. + """ + return np.median([x[0] for x in self._data[-window_size:]]) + + def avg(self, window_size: int): + """ + Return the mean of the latest `window_size` values in the buffer. + """ + return np.mean([x[0] for x in self._data[-window_size:]]) + + def global_avg(self): + """ + Return the mean of all the elements in the buffer. Note that this + includes those getting removed due to limited buffer storage. + """ + return self._global_avg + + def values(self): + """ + Returns: + list[(number, iteration)]: content of the current buffer. + """ + return self._data diff --git a/thirdparty/fast-reid/fastreid/utils/logger.py b/thirdparty/fast-reid/fastreid/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..d51f127b9071809c4975f3fc345fd976908c92a8 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/logger.py @@ -0,0 +1,209 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import functools +import logging +import os +import sys +import time +from collections import Counter +from .file_io import PathManager +from termcolor import colored + + +class _ColorfulFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): + self._root_name = kwargs.pop("root_name") + "." + self._abbrev_name = kwargs.pop("abbrev_name", "") + if len(self._abbrev_name): + self._abbrev_name = self._abbrev_name + "." + super(_ColorfulFormatter, self).__init__(*args, **kwargs) + + def formatMessage(self, record): + record.name = record.name.replace(self._root_name, self._abbrev_name) + log = super(_ColorfulFormatter, self).formatMessage(record) + if record.levelno == logging.WARNING: + prefix = colored("WARNING", "red", attrs=["blink"]) + elif record.levelno == logging.ERROR or record.levelno == logging.CRITICAL: + prefix = colored("ERROR", "red", attrs=["blink", "underline"]) + else: + return log + return prefix + " " + log + + +@functools.lru_cache() # so that calling setup_logger multiple times won't add many handlers +def setup_logger( + output=None, distributed_rank=0, *, color=True, name="fastreid", abbrev_name=None +): + """ + Args: + output (str): a file name or a directory to save log. If None, will not save log file. + If ends with ".txt" or ".log", assumed to be a file name. + Otherwise, logs will be saved to `output/log.txt`. + name (str): the root module name of this logger + abbrev_name (str): an abbreviation of the module, to avoid long names in logs. + Set to "" to not log the root module in logs. + By default, will abbreviate "detectron2" to "d2" and leave other + modules unchanged. + """ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.propagate = False + + if abbrev_name is None: + abbrev_name = "d2" if name == "detectron2" else name + + plain_formatter = logging.Formatter( + "[%(asctime)s] %(name)s %(levelname)s: %(message)s", datefmt="%m/%d %H:%M:%S" + ) + # stdout logging: master only + if distributed_rank == 0: + ch = logging.StreamHandler(stream=sys.stdout) + ch.setLevel(logging.DEBUG) + if color: + formatter = _ColorfulFormatter( + colored("[%(asctime)s %(name)s]: ", "green") + "%(message)s", + datefmt="%m/%d %H:%M:%S", + root_name=name, + abbrev_name=str(abbrev_name), + ) + else: + formatter = plain_formatter + ch.setFormatter(formatter) + logger.addHandler(ch) + + # file logging: all workers + if output is not None: + if output.endswith(".txt") or output.endswith(".log"): + filename = output + else: + filename = os.path.join(output, "log.txt") + if distributed_rank > 0: + filename = filename + ".rank{}".format(distributed_rank) + PathManager.mkdirs(os.path.dirname(filename)) + + fh = logging.StreamHandler(_cached_log_stream(filename)) + fh.setLevel(logging.DEBUG) + fh.setFormatter(plain_formatter) + logger.addHandler(fh) + + return logger + + +# cache the opened file object, so that different calls to `setup_logger` +# with the same file name can safely write to the same file. +@functools.lru_cache(maxsize=None) +def _cached_log_stream(filename): + return PathManager.open(filename, "a") + + +""" +Below are some other convenient logging methods. +They are mainly adopted from +https://github.com/abseil/abseil-py/blob/master/absl/logging/__init__.py +""" + + +def _find_caller(): + """ + Returns: + str: module name of the caller + tuple: a hashable key to be used to identify different callers + """ + frame = sys._getframe(2) + while frame: + code = frame.f_code + if os.path.join("utils", "logger.") not in code.co_filename: + mod_name = frame.f_globals["__name__"] + if mod_name == "__main__": + mod_name = "detectron2" + return mod_name, (code.co_filename, frame.f_lineno, code.co_name) + frame = frame.f_back + + +_LOG_COUNTER = Counter() +_LOG_TIMER = {} + + +def log_first_n(lvl, msg, n=1, *, name=None, key="caller"): + """ + Log only for the first n times. + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + key (str or tuple[str]): the string(s) can be one of "caller" or + "message", which defines how to identify duplicated logs. + For example, if called with `n=1, key="caller"`, this function + will only log the first call from the same caller, regardless of + the message content. + If called with `n=1, key="message"`, this function will log the + same content only once, even if they are called from different places. + If called with `n=1, key=("caller", "message")`, this function + will not log only if the same caller has logged the same message before. + """ + if isinstance(key, str): + key = (key,) + assert len(key) > 0 + + caller_module, caller_key = _find_caller() + hash_key = () + if "caller" in key: + hash_key = hash_key + caller_key + if "message" in key: + hash_key = hash_key + (msg,) + + _LOG_COUNTER[hash_key] += 1 + if _LOG_COUNTER[hash_key] <= n: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n(lvl, msg, n=1, *, name=None): + """ + Log once per n times. + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + _LOG_COUNTER[key] += 1 + if n == 1 or _LOG_COUNTER[key] % n == 1: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n_seconds(lvl, msg, n=1, *, name=None): + """ + Log no more than once per n seconds. + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + last_logged = _LOG_TIMER.get(key, None) + current_time = time.time() + if last_logged is None or current_time - last_logged >= n: + logging.getLogger(name or caller_module).log(lvl, msg) + _LOG_TIMER[key] = current_time + +# def create_small_table(small_dict): +# """ +# Create a small table using the keys of small_dict as headers. This is only +# suitable for small dictionaries. +# Args: +# small_dict (dict): a result dictionary of only a few items. +# Returns: +# str: the table as a string. +# """ +# keys, values = tuple(zip(*small_dict.items())) +# table = tabulate( +# [values], +# headers=keys, +# tablefmt="pipe", +# floatfmt=".3f", +# stralign="center", +# numalign="center", +# ) +# return table diff --git a/thirdparty/fast-reid/fastreid/utils/precision_bn.py b/thirdparty/fast-reid/fastreid/utils/precision_bn.py new file mode 100644 index 0000000000000000000000000000000000000000..94bae03b6e519eb0d13d61d59fac3dc09ef117b6 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/precision_bn.py @@ -0,0 +1,94 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import itertools + +import torch + +BN_MODULE_TYPES = ( + torch.nn.BatchNorm1d, + torch.nn.BatchNorm2d, + torch.nn.BatchNorm3d, + torch.nn.SyncBatchNorm, +) + + +@torch.no_grad() +def update_bn_stats(model, data_loader, num_iters: int = 200): + """ + Recompute and update the batch norm stats to make them more precise. During + training both BN stats and the weight are changing after every iteration, so + the running average can not precisely reflect the actual stats of the + current model. + In this function, the BN stats are recomputed with fixed weights, to make + the running average more precise. Specifically, it computes the true average + of per-batch mean/variance instead of the running average. + Args: + model (nn.Module): the model whose bn stats will be recomputed. + Note that: + 1. This function will not alter the training mode of the given model. + Users are responsible for setting the layers that needs + precise-BN to training mode, prior to calling this function. + 2. Be careful if your models contain other stateful layers in + addition to BN, i.e. layers whose state can change in forward + iterations. This function will alter their state. If you wish + them unchanged, you need to either pass in a submodule without + those layers, or backup the states. + data_loader (iterator): an iterator. Produce data as inputs to the model. + num_iters (int): number of iterations to compute the stats. + """ + bn_layers = get_bn_modules(model) + if len(bn_layers) == 0: + return + + # In order to make the running stats only reflect the current batch, the + # momentum is disabled. + # bn.running_mean = (1 - momentum) * bn.running_mean + momentum * batch_mean + # Setting the momentum to 1.0 to compute the stats without momentum. + momentum_actual = [bn.momentum for bn in bn_layers] + for bn in bn_layers: + bn.momentum = 1.0 + + # Note that running_var actually means "running average of variance" + running_mean = [torch.zeros_like(bn.running_mean) for bn in bn_layers] + running_var = [torch.zeros_like(bn.running_var) for bn in bn_layers] + + for ind, inputs in enumerate(itertools.islice(data_loader, num_iters)): + inputs['targets'].fill_(-1) + with torch.no_grad(): # No need to backward + model(inputs) + for i, bn in enumerate(bn_layers): + # Accumulates the bn stats. + running_mean[i] += (bn.running_mean - running_mean[i]) / (ind + 1) + running_var[i] += (bn.running_var - running_var[i]) / (ind + 1) + # We compute the "average of variance" across iterations. + assert ind == num_iters - 1, ( + "update_bn_stats is meant to run for {} iterations, " + "but the dataloader stops at {} iterations.".format(num_iters, ind) + ) + + for i, bn in enumerate(bn_layers): + # Sets the precise bn stats. + bn.running_mean = running_mean[i] + bn.running_var = running_var[i] + bn.momentum = momentum_actual[i] + + +def get_bn_modules(model): + """ + Find all BatchNorm (BN) modules that are in training mode. See + fvcore.precise_bn.BN_MODULE_TYPES for a list of all modules that are + included in this search. + Args: + model (nn.Module): a model possibly containing BN modules. + Returns: + list[nn.Module]: all BN modules in the model. + """ + # Finds all the bn layers. + bn_layers = [ + m for m in model.modules() if m.training and isinstance(m, BN_MODULE_TYPES) + ] + return bn_layers diff --git a/thirdparty/fast-reid/fastreid/utils/registry.py b/thirdparty/fast-reid/fastreid/utils/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..eb9d4f2f933798ed5ac6c51ed2aab70846d08bca --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/registry.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Dict, Optional + + +class Registry(object): + """ + The registry that provides name -> object mapping, to support third-party + users' custom modules. + To create a registry (e.g. a backbone registry): + .. code-block:: python + BACKBONE_REGISTRY = Registry('BACKBONE') + To register an object: + .. code-block:: python + @BACKBONE_REGISTRY.register() + class MyBackbone(): + ... + Or: + .. code-block:: python + BACKBONE_REGISTRY.register(MyBackbone) + """ + + def __init__(self, name: str) -> None: + """ + Args: + name (str): the name of this registry + """ + self._name: str = name + self._obj_map: Dict[str, object] = {} + + def _do_register(self, name: str, obj: object) -> None: + assert ( + name not in self._obj_map + ), "An object named '{}' was already registered in '{}' registry!".format( + name, self._name + ) + self._obj_map[name] = obj + + def register(self, obj: object = None) -> Optional[object]: + """ + Register the given object under the the name `obj.__name__`. + Can be used as either a decorator or not. See docstring of this class for usage. + """ + if obj is None: + # used as a decorator + def deco(func_or_class: object) -> object: + name = func_or_class.__name__ # pyre-ignore + self._do_register(name, func_or_class) + return func_or_class + + return deco + + # used as a function call + name = obj.__name__ # pyre-ignore + self._do_register(name, obj) + + def get(self, name: str) -> object: + ret = self._obj_map.get(name) + if ret is None: + raise KeyError( + "No object named '{}' found in '{}' registry!".format( + name, self._name + ) + ) + return ret diff --git a/thirdparty/fast-reid/fastreid/utils/summary.py b/thirdparty/fast-reid/fastreid/utils/summary.py new file mode 100644 index 0000000000000000000000000000000000000000..28e9085081b1f9de731b179ce19081f81dfb35f5 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/summary.py @@ -0,0 +1,120 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn as nn +from torch.autograd import Variable + +from collections import OrderedDict +import numpy as np + + +def summary(model, input_size, batch_size=-1, device="cuda"): + def register_hook(module): + + def hook(module, input, output): + class_name = str(module.__class__).split(".")[-1].split("'")[0] + module_idx = len(summary) + + m_key = "%s-%i" % (class_name, module_idx + 1) + summary[m_key] = OrderedDict() + summary[m_key]["input_shape"] = list(input[0].size()) + summary[m_key]["input_shape"][0] = batch_size + if isinstance(output, (list, tuple)): + summary[m_key]["output_shape"] = [ + [-1] + list(o.size())[1:] for o in output + ] + else: + summary[m_key]["output_shape"] = list(output.size()) + summary[m_key]["output_shape"][0] = batch_size + + params = 0 + if hasattr(module, "weight") and hasattr(module.weight, "size"): + params += torch.prod(torch.LongTensor(list(module.weight.size()))) + summary[m_key]["trainable"] = module.weight.requires_grad + if hasattr(module, "bias") and hasattr(module.bias, "size"): + params += torch.prod(torch.LongTensor(list(module.bias.size()))) + summary[m_key]["nb_params"] = params + + if ( + not isinstance(module, nn.Sequential) + and not isinstance(module, nn.ModuleList) + and not (module == model) + ): + hooks.append(module.register_forward_hook(hook)) + + device = device.lower() + assert device in [ + "cuda", + "cpu", + ], "Input device is not valid, please specify 'cuda' or 'cpu'" + + if device == "cuda" and torch.cuda.is_available(): + dtype = torch.cuda.FloatTensor + else: + dtype = torch.FloatTensor + + # multiple inputs to the network + if isinstance(input_size, tuple): + input_size = [input_size] + + # batch_size of 2 for batchnorm + x = [torch.rand(2, *in_size).type(dtype) for in_size in input_size] + # print(type(x[0])) + + # create properties + summary = OrderedDict() + hooks = [] + + # register hook + model.apply(register_hook) + + # make a forward pass + # print(x.shape) + model(*x) + + # remove these hooks + for h in hooks: + h.remove() + + print("----------------------------------------------------------------") + line_new = "{:>20} {:>25} {:>15}".format("Layer (type)", "Output Shape", "Param #") + print(line_new) + print("================================================================") + total_params = 0 + total_output = 0 + trainable_params = 0 + for layer in summary: + # input_shape, output_shape, trainable, nb_params + line_new = "{:>20} {:>25} {:>15}".format( + layer, + str(summary[layer]["output_shape"]), + "{0:,}".format(summary[layer]["nb_params"]), + ) + total_params += summary[layer]["nb_params"] + total_output += np.prod(summary[layer]["output_shape"]) + if "trainable" in summary[layer]: + if summary[layer]["trainable"] == True: + trainable_params += summary[layer]["nb_params"] + print(line_new) + + # assume 4 bytes/number (float on cuda). + total_input_size = abs(np.prod(input_size) * batch_size * 4. / (1024 ** 2.)) + total_output_size = abs(2. * total_output * 4. / (1024 ** 2.)) # x2 for gradients + total_params_size = abs(total_params.numpy() * 4. / (1024 ** 2.)) + total_size = total_params_size + total_output_size + total_input_size + + print("================================================================") + print("Total params: {0:,}".format(total_params)) + print("Trainable params: {0:,}".format(trainable_params)) + print("Non-trainable params: {0:,}".format(total_params - trainable_params)) + print("----------------------------------------------------------------") + print("Input size (MB): %0.2f" % total_input_size) + print("Forward/backward pass size (MB): %0.2f" % total_output_size) + print("Params size (MB): %0.2f" % total_params_size) + print("Estimated Total Size (MB): %0.2f" % total_size) + print("----------------------------------------------------------------") + # return summary diff --git a/thirdparty/fast-reid/fastreid/utils/timer.py b/thirdparty/fast-reid/fastreid/utils/timer.py new file mode 100644 index 0000000000000000000000000000000000000000..7aa7916f2303d83b1686797803d1ce2b65c3d96c --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/timer.py @@ -0,0 +1,68 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# -*- coding: utf-8 -*- + +from time import perf_counter +from typing import Optional + + +class Timer: + """ + A timer which computes the time elapsed since the start/reset of the timer. + """ + + def __init__(self): + self.reset() + + def reset(self): + """ + Reset the timer. + """ + self._start = perf_counter() + self._paused: Optional[float] = None + self._total_paused = 0 + self._count_start = 1 + + def pause(self): + """ + Pause the timer. + """ + if self._paused is not None: + raise ValueError("Trying to pause a Timer that is already paused!") + self._paused = perf_counter() + + def is_paused(self) -> bool: + """ + Returns: + bool: whether the timer is currently paused + """ + return self._paused is not None + + def resume(self): + """ + Resume the timer. + """ + if self._paused is None: + raise ValueError("Trying to resume a Timer that is not paused!") + self._total_paused += perf_counter() - self._paused + self._paused = None + self._count_start += 1 + + def seconds(self) -> float: + """ + Returns: + (float): the total number of seconds since the start/reset of the + timer, excluding the time when the timer is paused. + """ + if self._paused is not None: + end_time: float = self._paused # type: ignore + else: + end_time = perf_counter() + return end_time - self._start - self._total_paused + + def avg_seconds(self) -> float: + """ + Returns: + (float): the average number of seconds between every start/reset and + pause. + """ + return self.seconds() / self._count_start diff --git a/thirdparty/fast-reid/fastreid/utils/visualizer.py b/thirdparty/fast-reid/fastreid/utils/visualizer.py new file mode 100644 index 0000000000000000000000000000000000000000..5a06abd8595f1f128f5e60519fce10c5727c5f80 --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/visualizer.py @@ -0,0 +1,278 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import os +import pickle +import random + +import matplotlib.pyplot as plt +import numpy as np +import tqdm +from scipy.stats import norm +from sklearn import metrics + +from .file_io import PathManager + + +class Visualizer: + r"""Visualize images(activation map) ranking list of features generated by reid models.""" + + def __init__(self, dataset): + self.dataset = dataset + + def get_model_output(self, all_ap, dist, q_pids, g_pids, q_camids, g_camids): + self.all_ap = all_ap + self.dist = dist + self.sim = 1 - dist + self.q_pids = q_pids + self.g_pids = g_pids + self.q_camids = q_camids + self.g_camids = g_camids + + self.indices = np.argsort(dist, axis=1) + self.matches = (g_pids[self.indices] == q_pids[:, np.newaxis]).astype(np.int32) + + self.num_query = len(q_pids) + + def get_matched_result(self, q_index): + q_pid = self.q_pids[q_index] + q_camid = self.q_camids[q_index] + + order = self.indices[q_index] + remove = (self.g_pids[order] == q_pid) & (self.g_camids[order] == q_camid) + keep = np.invert(remove) + cmc = self.matches[q_index][keep] + sort_idx = order[keep] + return cmc, sort_idx + + def save_rank_result(self, query_indices, output, max_rank=5, vis_label=False, label_sort='ascending', + actmap=False): + if vis_label: + fig, axes = plt.subplots(2, max_rank + 1, figsize=(3 * max_rank, 12)) + else: + fig, axes = plt.subplots(1, max_rank + 1, figsize=(3 * max_rank, 6)) + for cnt, q_idx in enumerate(tqdm.tqdm(query_indices)): + all_imgs = [] + cmc, sort_idx = self.get_matched_result(q_idx) + query_info = self.dataset[q_idx] + query_img = query_info['images'] + cam_id = query_info['camids'] + query_name = query_info['img_paths'].split('/')[-1] + all_imgs.append(query_img) + query_img = np.rollaxis(np.asarray(query_img.numpy(), dtype=np.uint8), 0, 3) + plt.clf() + ax = fig.add_subplot(1, max_rank + 1, 1) + ax.imshow(query_img) + ax.set_title('{:.4f}/cam{}'.format(self.all_ap[q_idx], cam_id)) + ax.axis("off") + for i in range(max_rank): + if vis_label: + ax = fig.add_subplot(2, max_rank + 1, i + 2) + else: + ax = fig.add_subplot(1, max_rank + 1, i + 2) + g_idx = self.num_query + sort_idx[i] + gallery_info = self.dataset[g_idx] + gallery_img = gallery_info['images'] + cam_id = gallery_info['camids'] + all_imgs.append(gallery_img) + gallery_img = np.rollaxis(np.asarray(gallery_img, dtype=np.uint8), 0, 3) + if cmc[i] == 1: + label = 'true' + ax.add_patch(plt.Rectangle(xy=(0, 0), width=gallery_img.shape[1] - 1, + height=gallery_img.shape[0] - 1, edgecolor=(1, 0, 0), + fill=False, linewidth=5)) + else: + label = 'false' + ax.add_patch(plt.Rectangle(xy=(0, 0), width=gallery_img.shape[1] - 1, + height=gallery_img.shape[0] - 1, + edgecolor=(0, 0, 1), fill=False, linewidth=5)) + ax.imshow(gallery_img) + ax.set_title(f'{self.sim[q_idx, sort_idx[i]]:.3f}/{label}/cam{cam_id}') + ax.axis("off") + # if actmap: + # act_outputs = [] + # + # def hook_fns_forward(module, input, output): + # act_outputs.append(output.cpu()) + # + # all_imgs = np.stack(all_imgs, axis=0) # (b, 3, h, w) + # all_imgs = torch.from_numpy(all_imgs).float() + # # normalize + # all_imgs = all_imgs.sub_(self.mean).div_(self.std) + # sz = list(all_imgs.shape[-2:]) + # handle = m.base.register_forward_hook(hook_fns_forward) + # with torch.no_grad(): + # _ = m(all_imgs.cuda()) + # handle.remove() + # acts = self.get_actmap(act_outputs[0], sz) + # for i in range(top + 1): + # axes.flat[i].imshow(acts[i], alpha=0.3, cmap='jet') + if vis_label: + label_indice = np.where(cmc == 1)[0] + if label_sort == "ascending": label_indice = label_indice[::-1] + label_indice = label_indice[:max_rank] + for i in range(max_rank): + if i >= len(label_indice): break + j = label_indice[i] + g_idx = self.num_query + sort_idx[j] + gallery_info = self.dataset[g_idx] + gallery_img = gallery_info['images'] + cam_id = gallery_info['camids'] + gallery_img = np.rollaxis(np.asarray(gallery_img, dtype=np.uint8), 0, 3) + ax = fig.add_subplot(2, max_rank + 1, max_rank + 3 + i) + ax.add_patch(plt.Rectangle(xy=(0, 0), width=gallery_img.shape[1] - 1, + height=gallery_img.shape[0] - 1, + edgecolor=(1, 0, 0), + fill=False, linewidth=5)) + ax.imshow(gallery_img) + ax.set_title(f'{self.sim[q_idx, sort_idx[j]]:.3f}/cam{cam_id}') + ax.axis("off") + + plt.tight_layout() + filepath = os.path.join(output, "{}.jpg".format(cnt)) + fig.savefig(filepath) + + def vis_rank_list(self, output, vis_label, num_vis=100, rank_sort="ascending", label_sort="ascending", max_rank=5, + actmap=False): + r"""Visualize rank list of query instance + Args: + output (str): a directory to save rank list result. + vis_label (bool): if visualize label of query + num_vis (int): + rank_sort (str): save visualization results by which order, + if rank_sort is ascending, AP from low to high, vice versa. + label_sort (bool): + max_rank (int): maximum number of rank result to visualize + actmap (bool): + """ + assert rank_sort in ['ascending', 'descending'], "{} not match [ascending, descending]".format(rank_sort) + + query_indices = np.argsort(self.all_ap) + if rank_sort == 'descending': query_indices = query_indices[::-1] + + query_indices = query_indices[:num_vis] + self.save_rank_result(query_indices, output, max_rank, vis_label, label_sort, actmap) + + def vis_roc_curve(self, output): + PathManager.mkdirs(output) + pos, neg = [], [] + for i, q in enumerate(self.q_pids): + cmc, sort_idx = self.get_matched_result(i) # remove same id in same camera + ind_pos = np.where(cmc == 1)[0] + q_dist = self.dist[i] + pos.extend(q_dist[sort_idx[ind_pos]]) + + ind_neg = np.where(cmc == 0)[0] + neg.extend(q_dist[sort_idx[ind_neg]]) + + scores = np.hstack((pos, neg)) + labels = np.hstack((np.zeros(len(pos)), np.ones(len(neg)))) + + fpr, tpr, thresholds = metrics.roc_curve(labels, scores) + + self.plot_roc_curve(fpr, tpr) + filepath = os.path.join(output, "roc.jpg") + plt.savefig(filepath) + # self.plot_distribution(pos, neg) + # filepath = os.path.join(output, "pos_neg_dist.jpg") + # plt.savefig(filepath) + return fpr, tpr, pos, neg + + @staticmethod + def plot_roc_curve(fpr, tpr, name='model', fig=None): + if fig is None: + fig = plt.figure() + plt.semilogx(np.arange(0, 1, 0.01), np.arange(0, 1, 0.01), 'r', linestyle='--', label='Random guess') + plt.semilogx(fpr, tpr, color=(random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)), + label='ROC curve with {}'.format(name)) + plt.title('Receiver Operating Characteristic') + plt.xlabel('False Positive Rate') + plt.ylabel('True Positive Rate') + plt.legend(loc='best') + return fig + + @staticmethod + def plot_distribution(pos, neg, name='model', fig=None): + if fig is None: + fig = plt.figure() + pos_color = (random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)) + n, bins, _ = plt.hist(pos, bins=80, alpha=0.7, density=True, + color=pos_color, + label='positive with {}'.format(name)) + mu = np.mean(pos) + sigma = np.std(pos) + y = norm.pdf(bins, mu, sigma) # fitting curve + plt.plot(bins, y, color=pos_color) # plot y curve + + neg_color = (random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)) + n, bins, _ = plt.hist(neg, bins=80, alpha=0.5, density=True, + color=neg_color, + label='negative with {}'.format(name)) + mu = np.mean(neg) + sigma = np.std(neg) + y = norm.pdf(bins, mu, sigma) # fitting curve + plt.plot(bins, y, color=neg_color) # plot y curve + + plt.xticks(np.arange(0, 1.5, 0.1)) + plt.title('positive and negative pairs distribution') + plt.legend(loc='best') + return fig + + @staticmethod + def save_roc_info(output, fpr, tpr, pos, neg): + results = { + "fpr": np.asarray(fpr), + "tpr": np.asarray(tpr), + "pos": np.asarray(pos), + "neg": np.asarray(neg), + } + with open(os.path.join(output, "roc_info.pickle"), "wb") as handle: + pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL) + + @staticmethod + def load_roc_info(path): + with open(path, 'rb') as handle: res = pickle.load(handle) + return res + + # def plot_camera_dist(self): + # same_cam, diff_cam = [], [] + # for i, q in enumerate(self.q_pids): + # q_camid = self.q_camids[i] + # + # order = self.indices[i] + # same = (self.g_pids[order] == q) & (self.g_camids[order] == q_camid) + # diff = (self.g_pids[order] == q) & (self.g_camids[order] != q_camid) + # sameCam_idx = order[same] + # diffCam_idx = order[diff] + # + # same_cam.extend(self.sim[i, sameCam_idx]) + # diff_cam.extend(self.sim[i, diffCam_idx]) + # + # fig = plt.figure(figsize=(10, 5)) + # plt.hist(same_cam, bins=80, alpha=0.7, density=True, color='red', label='same camera') + # plt.hist(diff_cam, bins=80, alpha=0.5, density=True, color='blue', label='diff camera') + # plt.xticks(np.arange(0.1, 1.0, 0.1)) + # plt.title('positive and negative pair distribution') + # return fig + + # def get_actmap(self, features, sz): + # """ + # :param features: (1, 2048, 16, 8) activation map + # :return: + # """ + # features = (features ** 2).sum(1) # (1, 16, 8) + # b, h, w = features.size() + # features = features.view(b, h * w) + # features = nn.functional.normalize(features, p=2, dim=1) + # acts = features.view(b, h, w) + # all_acts = [] + # for i in range(b): + # act = acts[i].numpy() + # act = cv2.resize(act, (sz[1], sz[0])) + # act = 255 * (act - act.max()) / (act.max() - act.min() + 1e-12) + # act = np.uint8(np.floor(act)) + # all_acts.append(act) + # return all_acts diff --git a/thirdparty/fast-reid/fastreid/utils/weight_init.py b/thirdparty/fast-reid/fastreid/utils/weight_init.py new file mode 100644 index 0000000000000000000000000000000000000000..0bd22e3c4fef99e114e9cad08ac2a7edc015fc7f --- /dev/null +++ b/thirdparty/fast-reid/fastreid/utils/weight_init.py @@ -0,0 +1,37 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import math +from torch import nn + +__all__ = [ + 'weights_init_classifier', + 'weights_init_kaiming', +] + + +def weights_init_kaiming(m): + classname = m.__class__.__name__ + if classname.find('Linear') != -1: + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0) + elif classname.find('Conv') != -1: + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0.0) + elif classname.find('BatchNorm') != -1: + if m.affine: + nn.init.normal_(m.weight, 1.0, 0.02) + nn.init.constant_(m.bias, 0.0) + + +def weights_init_classifier(m): + classname = m.__class__.__name__ + if classname.find('Linear') != -1: + nn.init.normal_(m.weight, std=0.001) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0) diff --git a/thirdparty/fast-reid/projects/Cross-domain-reid/README.md b/thirdparty/fast-reid/projects/Cross-domain-reid/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0e99ce78483d9639488a2dee492dd9b4113ccac4 --- /dev/null +++ b/thirdparty/fast-reid/projects/Cross-domain-reid/README.md @@ -0,0 +1,54 @@ +# Cross-domain Person Re-Identification + +## Introduction + +[UDAStrongBaseline](https://github.com/zkcys001/UDAStrongBaseline) is a transitional code based pyTorch framework for both unsupervised learning (USL) +and unsupervised domain adaptation (UDA) in the object re-ID tasks. It provides stronger +baselines on these tasks. It needs the enviorment: Python >=3.6 and PyTorch >=1.1. We will transfer all the codes to the [fastreid](https://github.com/JDAI-CV/fast-reid) in the future (ongoing) from [UDAStrongBaseline](https://github.com/zkcys001/UDAStrongBaseline). + + +### Unsupervised domain adaptation (UDA) on Person re-ID + +- `Direct Transfer` models are trained on the source-domain datasets +([source_pretrain]()) and directly tested on the target-domain datasets. +- UDA methods (`MMT`, `SpCL`, etc.) starting from ImageNet means that they are trained end-to-end +in only one stage without source-domain pre-training. `MLT` denotes to the implementation of our NeurIPS-2020. +Please note that it is a pre-released repository for the anonymous review process, and the official +repository will be released upon the paper published. + +#### DukeMTMC-reID -> Market-1501 + +| Method | Backbone | Pre-trained | mAP(%) | top-1(%) | top-5(%) | top-10(%) | Train time | +| ----- | :------: | :---------: | :----: | :------: | :------: | :-------: | :------: | +| Direct Transfer | ResNet50 | DukeMTMC | 32.2 | 64.9 | 78.7 | 83.4 | ~1h | +| [UDA_TP](https://github.com/open-mmlab/OpenUnReID/) PR'2020| ResNet50 | DukeMTMC | 52.3 | 76.0 | 87.8 | 91.9 | ~2h | +| [MMT](https://github.com/open-mmlab/OpenUnReID/) ICLR'2020| ResNet50 | DukeMTMC | 80.9 | 92.2 | 97.6 | 98.4 | ~6h | +| [SpCL](https://github.com/open-mmlab/OpenUnReID/) NIPS'2020 submission| ResNet50 | DukeMTMC | 78.2 | 90.5 | 96.6 | 97.8 | ~3h | +| [strong_baseline](https://github.com/open-mmlab/OpenUnReID/) | ResNet50 | DukeMTMC | 75.6 | 90.9 | 96.6 | 97.8 | ~3h | +| [Our stronger_baseline](https://github.com/JDAI-CV/fast-reid) | ResNet50 | DukeMTMC | 78.0 | 91.0 | 96.4 | 97.7 | ~3h | +| [MLT] NeurIPS'2020 submission| ResNet50 | DukeMTMC | 81.5| 92.8| 96.8| 97.9 | ~ | + +#### Market-1501 -> DukeMTMC-reID + +| Method | Backbone | Pre-trained | mAP(%) | top-1(%) | top-5(%) | top-10(%) | Train time | +| ----- | :------: | :---------: | :----: | :------: | :------: | :-------: | :------: | +| Direct Transfer | ResNet50 | Market | 34.1 | 51.3 | 65.3 | 71.7 | ~1h | +| [UDA_TP](https://github.com/open-mmlab/OpenUnReID/) PR'2020| ResNet50 | Market | 45.7 | 65.5 | 78.0 | 81.7 | ~2h | +| [MMT](https://github.com/open-mmlab/OpenUnReID/) ICLR'2020| ResNet50 | Market | 67.7 | 80.3 | 89.9 | 92.9 | ~6h | +| [SpCL](https://github.com/open-mmlab/OpenUnReID/) NIPS'2020 submission | ResNet50 | Market | 70.4 | 83.8 | 91.2 | 93.4 | ~3h | +| [strong_baseline](https://github.com/open-mmlab/OpenUnReID/) | ResNet50 | Market | 60.4 | 75.9 | 86.2 | 89.8 | ~3h | +| [Our stronger_baseline](https://github.com/JDAI-CV/fast-reid) | ResNet50 | Market | 66.7 | 80.0 | 89.2 | 92.2 | ~3h | +| [MLT] NeurIPS'2020 submission| ResNet50 | Market | 71.2 |83.9| 91.5| 93.2| ~ | + +### Market1501 -> MSMT17 + +| Method | Source | Rank@1 | mAP | mINP | +| :---: | :---: | :---: |:---: | :---: | +| DirectTransfer(R50) | Market1501 | 29.8% | 10.3% | 9.3% | +| Our method | DukeMTMC | 56.6% | 26.5% | - | + +### DukeMTMC -> MSMT17 +| Method | Source | Rank@1 | mAP | mINP | +| :---: | :---: | :---: |:---: | :---: | +| DirectTransfer(R50) | DukeMTMC | 34.8% | 12.5% | 0.3% | +| Our method | DukeMTMC | 59.5% | 27.7% | - | diff --git a/thirdparty/fast-reid/projects/DistillReID/README.md b/thirdparty/fast-reid/projects/DistillReID/README.md new file mode 100644 index 0000000000000000000000000000000000000000..77ffb1054637fac49b0671f42678b95a990a8a58 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/README.md @@ -0,0 +1,47 @@ +# Model Distillation in FastReID + +This project provides a training script of small model + for both fast inference and high accuracy. + + +## Datasets Prepration +- Market1501 +- DukeMTMC-reID +- MSMT17 + + +## Train and Evaluation +```shell script +# a demo on DukeMTMC-reID dataset +# please see more in ./configs +# train BagTricksIBN50 as teacher model +python3 projects/DistillReID/train_net.py --config-file projects/DistillReID/configs/DukeMTMC/bot50ibn.yml +# train BagTricksIBN18 as student model +python3 projects/DistillReID/train_net.py --config-file projects/DistillReID/configs/DukeMTMC/KD-bot50ibn-bot18ibn.yml --kd +``` + +## Experimental Results and Trained Models + +### Settings + +All the experiments are conducted with a P40 GPU and +- CPU: Intel(R) Xeon(R) CPU E5-2683 v4 @ 2.10GHz +- GPU:Tesla P40 (Memory 22919MB) + +### DukeMTMC-reID + +
Rank-1 (mAP) /
Q.Time/batch(128)
Student (BagTricks)
IBN-101IBN-50IBN-34IBN-18
Teacher
(BagTricks)
IBN-10190.8(80.8)/0.3395s90.8(81.1)/0.1984s89.63(78.9)/0.1760s86.96(75.75)/0.0854s
IBN-50-89.8(79.8)/0.2264s88.82(78.9)/0.1761s87.75(76.18)/0.0838s
IBN-34--88.64(76.4)/0.1766s87.43(75.66)/0.0845s
IBN-18---85.50(71.60)/0.9178s
+ +### Market-1501 + +
Rank-1 (mAP) /
Q.Time/batch(128)
Student (BagTricks)
IBN-101IBN-50IBN-34IBN-18
Teacher
(BagTricks)
IBN-10195.43(88.95)/0.2698s95.19(89.52)/0.1791s94.51(87.82)/0.0869s93.85(85.77)/0.0612s
IBN-50-95.25(88.16)/0.1823s95.13(87.28)/0.0863s94.18(85.81)/0.0614s
IBN-34-94.63(84.91)/0.0860s93.71(85.20)/0.0620s
IBN-18---92.87(81.22)/0.0615s
Average Q.Time0.2698s0.1807s0.0864s0.0616s
+ +### MSMT17 + +
Rank-1 (mAP) /
Q.Time/batch(128)
Student (BagTricks)
IBN-101IBN-50IBN-34IBN-18
Teacher
(BagTricks)
IBN-10181.95(60.51)/0.2693s82.37(62.08)/0.1792s81.07(58.56)/0.0872s77.77(52.77)/0.0610s
IBN-50-80.18(57.80)/0.1789s81.28(58.27)/0.0863s78.11(53.10)/0.0623s
IBN-34-78.27(53.41)/0.0873s77.65(52.82)/0.0615s
IBN-18---74.11(47.26)/0.0621s
Average Q.Time0.2693s0.1801s0.0868s0.0617s
+ + +## Contact +This project is conducted by [Guan'an Wang](https://wangguanan.github.io/) (guan.wang0706@gmail) and [Xingyu Liao](https://github.com/L1aoXingyu). + + diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/Base-bot-kd.yml b/thirdparty/fast-reid/projects/DistillReID/configs/Base-bot-kd.yml new file mode 100644 index 0000000000000000000000000000000000000000..b02dd2ec7d455833f6e85fdfd53fded983399269 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/Base-bot-kd.yml @@ -0,0 +1,30 @@ +_BASE_: "../../../configs/Base-bagtricks.yml" + +MODEL_TEACHER: + META_ARCHITECTURE: "Baseline" + + BACKBONE: + NAME: "build_resnet_backbone" + NORM: "BN" + DEPTH: "101x" + FEAT_DIM: 2048 + LAST_STRIDE: 1 + WITH_IBN: True + PRETRAIN: True + + HEADS: + NAME: "EmbeddingHead" + NORM: "BN" + POOL_LAYER: "avgpool" + NECK_FEAT: "before" + CLS_LAYER: "linear" + +MODEL: + BACKBONE: + NAME: "build_resnet_backbone" + DEPTH: "50x" + FEAT_DIM: 2048 + WITH_IBN: True + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "logs/dukemtmc/bagtricks_R34-ibn/model_final.pth" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/Base-sbs-kd.yml b/thirdparty/fast-reid/projects/DistillReID/configs/Base-sbs-kd.yml new file mode 100644 index 0000000000000000000000000000000000000000..ba4cd4819e3f053e14d543a34a90f980d1da02fa --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/Base-sbs-kd.yml @@ -0,0 +1,37 @@ +_BASE_: "../../../configs/Base-Strongerbaseline.yml" + +MODEL_TEACHER: + META_ARCHITECTURE: "Baseline" + + BACKBONE: + NAME: "build_resnet_backbone" + NORM: "BN" + DEPTH: "101x" + FEAT_DIM: 2048 + LAST_STRIDE: 1 + WITH_NL: False + WITH_IBN: True + PRETRAIN: True + + HEADS: + NAME: "EmbeddingHead" + NORM: "BN" + NECK_FEAT: "after" + POOL_LAYER: "gempoolP" + CLS_LAYER: "circleSoftmax" + SCALE: 64 + MARGIN: 0.35 + +MODEL: + BACKBONE: + NAME: "build_resnet_backbone" + DEPTH: "50x" + FEAT_DIM: 2048 + WITH_IBN: True + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "logs/dukemtmc/bagtricks_R34-ibn/model_final.pth" + +INPUT: + SIZE_TRAIN: [ 256, 128 ] + SIZE_TEST: [ 256, 128 ] diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..09c471d8f5ca9ef15bffb8f9974b79cf590b8e48 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot18ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-bot-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "101x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "18x" + FEAT_DIM: 512 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/bagtricks_R101-ibn" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/bot101ibn-kd-bot18ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot50ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot50ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..8ccbc4ae5fb5406702b561b77e623b7ca3389002 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot101ibn-bot50ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-bot-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "101x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "50x" + FEAT_DIM: 2048 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/bagtricks_R101-ibn" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/dukemtmc/bot101ibn-kd-bot50ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot50ibn-bot18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot50ibn-bot18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..b7f9e2563ed60f0dfa636f9d7dbfe3455137ce36 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-bot50ibn-bot18ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-bot-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "50x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "18x" + FEAT_DIM: 512 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/bagtricks_R50-ibn/model_final.pth" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/bot50ibn-kd-bot18ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..849b861c2a28fca642082dc982dd7afbdbd4d736 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs18ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-sbs-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "101x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "34x" + FEAT_DIM: 512 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/sbs_R101-ibn/model_final.pth" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs101ibn-kd-sbs18ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs50ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs50ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..01938aa5594336838dca5821ba32c344dc9e629e --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs101ibn-sbs50ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-sbs-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "101x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "50x" + FEAT_DIM: 2048 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/sbs_R101-ibn/model_final.pth" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs101ibn-kd-sbs50ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs50ibn-sbs18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs50ibn-sbs18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..2c73527ff2c1d38d415aac42cd02f043e72bdb99 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/KD-sbs50ibn-sbs18ibn.yml @@ -0,0 +1,20 @@ +_BASE_: "../Base-sbs-kd.yml" + +MODEL_TEACHER: + BACKBONE: + DEPTH: "50x" + FEAT_DIM: 2048 + +MODEL: + BACKBONE: + DEPTH: "18x" + FEAT_DIM: 512 + + STUDENT_WEIGHTS: "" + TEACHER_WEIGHTS: "projects/DistillReID/logs/dukemtmc/sbs_R50-ibn/model_final.pth" + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs50ibn-kd-sbs18ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot101ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot101ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..4fccde20daf9a28bcd7b731835d2aeaec4759bc6 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot101ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../../../../configs/Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/bagtricks_R101-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..7976304551b71d40652687955a97c22d8038adcc --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot18ibn.yml @@ -0,0 +1,13 @@ +_BASE_: "../../../../configs/Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "18x" + WITH_IBN: True + FEAT_DIM: 512 + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/bagtricks_R18-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot50ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot50ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..dbac55512309946b1a6563521555a27192a8b35e --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/bot50ibn.yml @@ -0,0 +1,12 @@ +_BASE_: "../../../../configs/Base-bagtricks.yml" + +MODEL: + BACKBONE: + DEPTH: "50x" + WITH_IBN: True + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/bagtricks_R50-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs101ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs101ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..c06d62c6f8d34e22c026e55706618379851db783 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs101ibn.yml @@ -0,0 +1,13 @@ +_BASE_: "../../../configs/Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "101x" + WITH_IBN: True + FEAT_DIM: 2048 + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs_R101-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs18ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs18ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..27e8117c4af30de5879a1eee248c0eeaf32bdcb7 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs18ibn.yml @@ -0,0 +1,13 @@ +_BASE_: "../../../configs/Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "18x" + WITH_IBN: True + FEAT_DIM: 512 + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs_R18-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs50ibn.yml b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs50ibn.yml new file mode 100644 index 0000000000000000000000000000000000000000..da4d8d5a8c5292936f8916a7d394fdfc47132fb6 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/configs/DukeMTMC/sbs50ibn.yml @@ -0,0 +1,13 @@ +_BASE_: "../../../configs/Base-Strongerbaseline.yml" + +MODEL: + BACKBONE: + DEPTH: "50x" + WITH_IBN: True + FEAT_DIM: 2048 + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + +OUTPUT_DIR: "projects/DistillReID/logs/dukemtmc/sbs_R50-ibn" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/__init__.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..53e68678f18aef030db048f6fff3384a9fd4e707 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/__init__.py @@ -0,0 +1,9 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +from .config import add_kdreid_config, add_shufflenet_config +from .kd_trainer import KDTrainer +from .modeling import build_shufflenetv2_backbone \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/config.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/config.py new file mode 100644 index 0000000000000000000000000000000000000000..70abc3b414430d52493d0f8f5befb31614dba11b --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/config.py @@ -0,0 +1,105 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu, guan'an wang +@contact: sherlockliao01@gmail.com, guan.wang0706@gmail.com +""" + +from fastreid.config import CfgNode as CN + + +def add_shufflenet_config(cfg): + _C = cfg + _C.MODEL.BACKBONE.MODEL_SIZE = '1.0x' + + +def add_kdreid_config(cfg): + _C = cfg + + _C.MODEL_TEACHER = CN() + _C.MODEL_TEACHER.META_ARCHITECTURE = 'Baseline' + + # ---------------------------------------------------------------------------- # + # teacher model Backbone options + # ---------------------------------------------------------------------------- # + _C.MODEL_TEACHER.BACKBONE = CN() + + _C.MODEL_TEACHER.BACKBONE.NAME = "build_resnet_backbone" + _C.MODEL_TEACHER.BACKBONE.DEPTH = "50x" + _C.MODEL_TEACHER.BACKBONE.LAST_STRIDE = 1 + # If use IBN block in backbone + _C.MODEL_TEACHER.BACKBONE.WITH_IBN = False + # If use SE block in backbone + _C.MODEL_TEACHER.BACKBONE.WITH_SE = False + # If use Non-local block in backbone + _C.MODEL_TEACHER.BACKBONE.WITH_NL = False + # Input feature dimension + _C.MODEL_TEACHER.BACKBONE.FEAT_DIM = 2048 + + # for shufflenet + _C.MODEL_TEACHER.BACKBONE.MODEL_SIZE = '1.0x' + + # + _C.MODEL_TEACHER.BACKBONE.NORM = 'BN' + _C.MODEL_TEACHER.BACKBONE.PRETRAIN = False + + # ---------------------------------------------------------------------------- # + # teacher model HEADS options + # ---------------------------------------------------------------------------- # + _C.MODEL_TEACHER.HEADS = CN() + _C.MODEL_TEACHER.HEADS.NAME = "EmbeddingHead" + + # Pooling layer type + _C.MODEL_TEACHER.HEADS.POOL_LAYER = "avgpool" + _C.MODEL_TEACHER.HEADS.NECK_FEAT = "before" + _C.MODEL_TEACHER.HEADS.CLS_LAYER = "linear" + + # Pretrained teacher and student model weights + _C.MODEL.TEACHER_WEIGHTS = "" + _C.MODEL.STUDENT_WEIGHTS = "" + + # + _C.MODEL_TEACHER.HEADS.NORM = 'BN' + _C.MODEL_TEACHER.HEADS.SCALE = 64 + _C.MODEL_TEACHER.HEADS.MARGIN = 0.35 + + +def update_model_teacher_config(cfg): + cfg = cfg.clone() + + frozen = cfg.is_frozen() + + cfg.defrost() + cfg.MODEL.META_ARCHITECTURE = cfg.MODEL_TEACHER.META_ARCHITECTURE + # ---------------------------------------------------------------------------- # + # teacher model Backbone options + # ---------------------------------------------------------------------------- # + cfg.MODEL.BACKBONE.NAME = cfg.MODEL_TEACHER.BACKBONE.NAME + cfg.MODEL.BACKBONE.DEPTH = cfg.MODEL_TEACHER.BACKBONE.DEPTH + cfg.MODEL.BACKBONE.LAST_STRIDE = cfg.MODEL_TEACHER.BACKBONE.LAST_STRIDE + # If use IBN block in backbone + cfg.MODEL.BACKBONE.WITH_IBN = cfg.MODEL_TEACHER.BACKBONE.WITH_IBN + # If use SE block in backbone + cfg.MODEL.BACKBONE.WITH_SE = cfg.MODEL_TEACHER.BACKBONE.WITH_SE + # If use Non-local block in backbone + cfg.MODEL.BACKBONE.WITH_NL = cfg.MODEL_TEACHER.BACKBONE.WITH_NL + # Input feature dimension + cfg.MODEL.BACKBONE.FEAT_DIM = cfg.MODEL_TEACHER.BACKBONE.FEAT_DIM + cfg.MODEL.BACKBONE.PRETRAIN = False + + # for shufflenet + cfg.MODEL.BACKBONE.MODEL_SIZE = cfg.MODEL_TEACHER.BACKBONE.MODEL_SIZE + + # ---------------------------------------------------------------------------- # + # teacher model HEADS options + # ---------------------------------------------------------------------------- # + cfg.MODEL.HEADS.NAME = cfg.MODEL_TEACHER.HEADS.NAME + + # Pooling layer type + cfg.MODEL.HEADS.POOL_LAYER = cfg.MODEL_TEACHER.HEADS.POOL_LAYER + + cfg.MODEL.HEADS.SCALE = cfg.MODEL_TEACHER.HEADS.SCALE + cfg.MODEL.HEADS.MARGIN = cfg.MODEL_TEACHER.HEADS.MARGIN + + if frozen: cfg.freeze() + + return cfg \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/kd_trainer.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/kd_trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..125f5418ebc7d613909c2c5f5c050b110998e35c --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/kd_trainer.py @@ -0,0 +1,139 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import logging +import time + +import torch +import torch.nn.functional as F +from torch import nn +from torch.nn.parallel import DistributedDataParallel + +from fastreid.engine import DefaultTrainer +from fastreid.utils.file_io import PathManager +from fastreid.modeling.meta_arch import build_model +from fastreid.utils.checkpoint import Checkpointer +from .config import update_model_teacher_config + + +class KDTrainer(DefaultTrainer): + """ + A knowledge distillation trainer for person reid of task. + """ + + def __init__(self, cfg): + """ + Args: + cfg (CfgNode): + """ + super().__init__(cfg) + + model_t = self.build_model_teacher(self.cfg) + for param in model_t.parameters(): + param.requires_grad = False + + logger = logging.getLogger('fastreid.' + __name__) + + # Load pre-trained teacher model + logger.info("Loading teacher model ...") + Checkpointer(model_t).load(cfg.MODEL.TEACHER_WEIGHTS) + + if PathManager.exists(cfg.MODEL.STUDENT_WEIGHTS): + logger.info("Loading student model ...") + Checkpointer(self.model).load(cfg.MODEL.STUDENT_WEIGHTS) + else: + logger.info("No student model checkpoints") + + self.model_t = model_t + + def run_step(self): + """ + Implement the moco training logic described above. + """ + assert self.model.training, "[KDTrainer] base model was changed to eval mode!" + start = time.perf_counter() + """ + If your want to do something with the data, you can wrap the dataloader. + """ + data = next(self._data_loader_iter) + + data_time = time.perf_counter() - start + + outs = self.model(data) + + # Compute reid loss + if isinstance(self.model, DistributedDataParallel): + loss_dict = self.model.module.losses(outs) + else: + loss_dict = self.model.losses(outs) + + with torch.no_grad(): + outs_t = self.model_t(data) + + q_logits = outs["outputs"]["pred_class_logits"] + t_logits = outs_t["outputs"]["pred_class_logits"].detach() + loss_dict['loss_kl'] = self.distill_loss(q_logits, t_logits, t=16) + + losses = sum(loss_dict.values()) + + with torch.cuda.stream(torch.cuda.Stream()): + metrics_dict = loss_dict + metrics_dict["data_time"] = data_time + self._write_metrics(metrics_dict) + self._detect_anomaly(losses, loss_dict) + + """ + If you need accumulate gradients or something similar, you can + wrap the optimizer with your custom `zero_grad()` method. + """ + self.optimizer.zero_grad() + losses.backward() + + """ + If you need gradient clipping/scaling or other processing, you can + wrap the optimizer with your custom `step()` method. + """ + self.optimizer.step() + + @classmethod + def build_model_teacher(cls, cfg) -> nn.Module: + cfg_t = update_model_teacher_config(cfg) + model_t = build_model(cfg_t) + return model_t + + @staticmethod + def pkt_loss(output_net, target_net, eps=0.0000001): + # Normalize each vector by its norm + output_net_norm = torch.sqrt(torch.sum(output_net ** 2, dim=1, keepdim=True)) + output_net = output_net / (output_net_norm + eps) + output_net[output_net != output_net] = 0 + + target_net_norm = torch.sqrt(torch.sum(target_net ** 2, dim=1, keepdim=True)) + target_net = target_net / (target_net_norm + eps) + target_net[target_net != target_net] = 0 + + # Calculate the cosine similarity + model_similarity = torch.mm(output_net, output_net.transpose(0, 1)) + target_similarity = torch.mm(target_net, target_net.transpose(0, 1)) + + # Scale cosine similarity to 0..1 + model_similarity = (model_similarity + 1.0) / 2.0 + target_similarity = (target_similarity + 1.0) / 2.0 + + # Transform them into probabilities + model_similarity = model_similarity / torch.sum(model_similarity, dim=1, keepdim=True) + target_similarity = target_similarity / torch.sum(target_similarity, dim=1, keepdim=True) + + # Calculate the KL-divergence + loss = torch.mean(target_similarity * torch.log((target_similarity + eps) / (model_similarity + eps))) + return loss + + @staticmethod + def distill_loss(y_s, y_t, t=4): + p_s = F.log_softmax(y_s / t, dim=1) + p_t = F.softmax(y_t / t, dim=1) + loss = F.kl_div(p_s, p_t, reduction='sum') * (t ** 2) / y_s.shape[0] + return loss diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/__init__.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..417ac447cea1375c78db7c41bdda9318c895f13c --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/__init__.py @@ -0,0 +1 @@ +from .backbones import build_shufflenetv2_backbone \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/__init__.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..42828a282c2a78e3a537310700a0691fab76d571 --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/__init__.py @@ -0,0 +1 @@ +from .shufflenetv2 import build_shufflenetv2_backbone \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/__init__.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4d302a04715c9bbe6db8cc176e649ba5b39b5ebe --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/__init__.py @@ -0,0 +1,43 @@ +import torch +import torch.nn as nn +from collections import OrderedDict + +from fastreid.modeling.backbones.build import BACKBONE_REGISTRY +from .network import ShuffleNetV2 + + +__all__ = ['build_shufflenetv2_backbone'] + + +@BACKBONE_REGISTRY.register() +def build_shufflenetv2_backbone(cfg): + + pretrain = cfg.MODEL.BACKBONE.PRETRAIN + pretrain_path = cfg.MODEL.BACKBONE.PRETRAIN_PATH + model_size = cfg.MODEL.BACKBONE.MODEL_SIZE + + return ShuffleNetV2Backbone(model_size=model_size, pretrained=pretrain, pretrain_path=pretrain_path) + + +class ShuffleNetV2Backbone(nn.Module): + + def __init__(self, model_size, pretrained=False, pretrain_path=''): + super(ShuffleNetV2Backbone, self).__init__() + + model = ShuffleNetV2(model_size=model_size) + if pretrained: + new_state_dict = OrderedDict() + state_dict = torch.load(pretrain_path)['state_dict'] + for k, v in state_dict.items(): + if k[:7] == 'module.': + k = k[7:] + new_state_dict[k] = v + model.load_state_dict(new_state_dict, strict=True) + + self.backbone = nn.Sequential( + model.first_conv, model.maxpool, model.features, model.conv_last) + + def forward(self, x): + return self.backbone(x) + + diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/blocks.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..c114428a511f3c0687dae3b24a90406ed4aaf1ac --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/blocks.py @@ -0,0 +1,71 @@ +""" +Author: Guan'an Wang +Contact: guan.wang0706@gmail.com +""" + +import torch +import torch.nn as nn + +class ShuffleV2Block(nn.Module): + """ + Reference: + https://github.com/megvii-model/ShuffleNet-Series/tree/master/ShuffleNetV2 + """ + def __init__(self, inp, oup, mid_channels, *, ksize, stride): + super(ShuffleV2Block, self).__init__() + self.stride = stride + assert stride in [1, 2] + + self.mid_channels = mid_channels + self.ksize = ksize + pad = ksize // 2 + self.pad = pad + self.inp = inp + + outputs = oup - inp + + branch_main = [ + # pw + nn.Conv2d(inp, mid_channels, 1, 1, 0, bias=False), + nn.BatchNorm2d(mid_channels), + nn.ReLU(inplace=True), + # dw + nn.Conv2d(mid_channels, mid_channels, ksize, stride, pad, groups=mid_channels, bias=False), + nn.BatchNorm2d(mid_channels), + # pw-linear + nn.Conv2d(mid_channels, outputs, 1, 1, 0, bias=False), + nn.BatchNorm2d(outputs), + nn.ReLU(inplace=True), + ] + self.branch_main = nn.Sequential(*branch_main) + + if stride == 2: + branch_proj = [ + # dw + nn.Conv2d(inp, inp, ksize, stride, pad, groups=inp, bias=False), + nn.BatchNorm2d(inp), + # pw-linear + nn.Conv2d(inp, inp, 1, 1, 0, bias=False), + nn.BatchNorm2d(inp), + nn.ReLU(inplace=True), + ] + self.branch_proj = nn.Sequential(*branch_proj) + else: + self.branch_proj = None + + def forward(self, old_x): + if self.stride==1: + x_proj, x = self.channel_shuffle(old_x) + return torch.cat((x_proj, self.branch_main(x)), 1) + elif self.stride==2: + x_proj = old_x + x = old_x + return torch.cat((self.branch_proj(x_proj), self.branch_main(x)), 1) + + def channel_shuffle(self, x): + batchsize, num_channels, height, width = x.data.size() + assert (num_channels % 4 == 0) + x = x.reshape(batchsize * num_channels // 2, 2, height * width) + x = x.permute(1, 0, 2) + x = x.reshape(2, -1, num_channels // 2, height, width) + return x[0], x[1] \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/network.py b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/network.py new file mode 100644 index 0000000000000000000000000000000000000000..54fe0eaafcd1838747e616dd9101a1599892d64f --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/kdreid/modeling/backbones/shufflenetv2/network.py @@ -0,0 +1,116 @@ +""" +Author: Guan'an Wang +Contact: guan.wang0706@gmail.com +""" + +import torch +import torch.nn as nn +from .blocks import ShuffleV2Block + + +class ShuffleNetV2(nn.Module): + """ + Reference: + https://github.com/megvii-model/ShuffleNet-Series/tree/master/ShuffleNetV2 + """ + + def __init__(self, input_size=224, n_class=1000, model_size='1.5x'): + super(ShuffleNetV2, self).__init__() + print('model size is ', model_size) + + self.stage_repeats = [4, 8, 4] + self.model_size = model_size + if model_size == '0.5x': + self.stage_out_channels = [-1, 24, 48, 96, 192, 1024] + elif model_size == '1.0x': + self.stage_out_channels = [-1, 24, 116, 232, 464, 1024] + elif model_size == '1.5x': + self.stage_out_channels = [-1, 24, 176, 352, 704, 1024] + elif model_size == '2.0x': + self.stage_out_channels = [-1, 24, 244, 488, 976, 2048] + else: + raise NotImplementedError + + # building first layer + input_channel = self.stage_out_channels[1] + self.first_conv = nn.Sequential( + nn.Conv2d(3, input_channel, 3, 2, 1, bias=False), + nn.BatchNorm2d(input_channel), + nn.ReLU(inplace=True), + ) + + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.features = [] + for idxstage in range(len(self.stage_repeats)): + numrepeat = self.stage_repeats[idxstage] + output_channel = self.stage_out_channels[idxstage + 2] + + for i in range(numrepeat): + if i == 0: + self.features.append(ShuffleV2Block(input_channel, output_channel, + mid_channels=output_channel // 2, ksize=3, stride=2)) + else: + self.features.append(ShuffleV2Block(input_channel // 2, output_channel, + mid_channels=output_channel // 2, ksize=3, stride=1)) + + input_channel = output_channel + + self.features = nn.Sequential(*self.features) + + self.conv_last = nn.Sequential( + nn.Conv2d(input_channel, self.stage_out_channels[-1], 1, 1, 0, bias=False), + nn.BatchNorm2d(self.stage_out_channels[-1]), + nn.ReLU(inplace=True) + ) + self.globalpool = nn.AvgPool2d(7) + if self.model_size == '2.0x': + self.dropout = nn.Dropout(0.2) + self.classifier = nn.Sequential(nn.Linear(self.stage_out_channels[-1], n_class, bias=False)) + self._initialize_weights() + + def forward(self, x): + x = self.first_conv(x) + x = self.maxpool(x) + x = self.features(x) + x = self.conv_last(x) + + x = self.globalpool(x) + if self.model_size == '2.0x': + x = self.dropout(x) + x = x.contiguous().view(-1, self.stage_out_channels[-1]) + x = self.classifier(x) + return x + + def _initialize_weights(self): + for name, m in self.named_modules(): + if isinstance(m, nn.Conv2d): + if 'first' in name: + nn.init.normal_(m.weight, 0, 0.01) + else: + nn.init.normal_(m.weight, 0, 1.0 / m.weight.shape[1]) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0001) + nn.init.constant_(m.running_mean, 0) + elif isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0001) + nn.init.constant_(m.running_mean, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + +if __name__ == "__main__": + model = ShuffleNetV2() + # print(model) + + test_data = torch.rand(5, 3, 224, 224) + test_outputs = model(test_data) + print(test_outputs.size()) diff --git a/thirdparty/fast-reid/projects/DistillReID/train_net.py b/thirdparty/fast-reid/projects/DistillReID/train_net.py new file mode 100644 index 0000000000000000000000000000000000000000..eaf0f0ed7206a090c67c1e1aba1c3800d3234c5d --- /dev/null +++ b/thirdparty/fast-reid/projects/DistillReID/train_net.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +@author: sherlock, guan'an wang +@contact: sherlockliao01@gmail.com, guan.wang0706@gmail.com +""" + +import sys +import torch +from torch import nn + +sys.path.append('.') +from fastreid.config import get_cfg +from fastreid.engine import default_argument_parser, default_setup, DefaultTrainer, launch +from fastreid.utils.checkpoint import Checkpointer + +from kdreid import * + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_shufflenet_config(cfg) + add_kdreid_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = DefaultTrainer.build_model(cfg) + Checkpointer(model, save_dir=cfg.OUTPUT_DIR).load(cfg.MODEL.WEIGHTS) + res = DefaultTrainer.test(cfg, model) + return res + + if args.kd: trainer = KDTrainer(cfg) + else: trainer = DefaultTrainer(cfg) + + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + parser = default_argument_parser() + parser.add_argument("--kd", action="store_true", help="kd training with teacher model guided") + args = parser.parse_args() + + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/HAA/Readme.md b/thirdparty/fast-reid/projects/HAA/Readme.md new file mode 100644 index 0000000000000000000000000000000000000000..7799c5d63331fe067c4704f33e84921846b09175 --- /dev/null +++ b/thirdparty/fast-reid/projects/HAA/Readme.md @@ -0,0 +1,56 @@ +# Black Re-ID: A Head-shoulder Descriptor for the Challenging Problem of Person Re-Identification + +## Training + +To train a model, run + +```bash +CUDA_VISIBLE_DEVICES=gpus python train_net.py --config-file +``` + +## Evaluation + +To evaluate the model in test set, run similarly: + +```bash +CUDA_VISIBLE_DEVICES=gpus python train_net.py --config-file --eval-only MODEL.WEIGHTS model.pth +``` + +## Experimental Results + +### Market1501 dataset + +| Method | Pretrained | Rank@1 | mAP | +| :---: | :---: | :---: |:---: | +| ResNet50 | ImageNet | 93.3% | 84.6% | +| MGN | ImageNet | 95.7% | 86.9% | +| HAA (ResNet50) | ImageNet | 95% | 87.1% | +| HAA (MGN) | ImageNet | 95.8% | 89.5% | + +### DukeMTMC dataset + +| Method | Pretrained | Rank@1 | mAP | +| :---: | :---: | :---: |:---: | +| ResNet50 | ImageNet | 86.2% | 75.3% | +| MGN | ImageNet | 88.7% | 78.4% | +| HAA (ResNet50) | ImageNet | 87.7% | 75.7% | +| HAA (MGN) | ImageNet | 89% | 80.4% | + +### Black-reid black group + +| Method | Pretrained | Rank@1 | mAP | +| :---: | :---: | :---: |:---: | +| ResNet50 | ImageNet | 80.9% | 70.8% | +| MGN | ImageNet | 86.7% | 79.1% | +| HAA (ResNet50) | ImageNet | 86.7% | 79% | +| HAA (MGN) | ImageNet | 91.0% | 83.8% | + +### White-reid white group + +| Method | Pretrained | Rank@1 | mAP | +| :---: | :---: | :---: |:---: | +| ResNet50 | ImageNet | 89.5% | 75.8% | +| MGN | ImageNet | 94.3% | 85.8% | +| HAA (ResNet50) | ImageNet | 93.5% | 84.4% | +| HSE (MGN) | ImageNet | 95.3% | 88.1% | + diff --git a/thirdparty/fast-reid/projects/HPOReID/README.md b/thirdparty/fast-reid/projects/HPOReID/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3cb5c9c9289d83909a7aa9774c9c7f345d811445 --- /dev/null +++ b/thirdparty/fast-reid/projects/HPOReID/README.md @@ -0,0 +1,23 @@ +# Hyper-Parameter Optimization in FastReID + +This project includes training reid models with hyper-parameter optimization. + +Install the following + +```bash +pip install 'ray[tune]' +pip install hpbandster ConfigSpace hyperopt +``` + +## Example + +This is an example for tuning `batch_size` and `num_instance` automatically. + +To train hyperparameter optimization with BOHB(Bayesian Optimization with HyperBand) search algorithm, run + +```bash +python3 projects/HPOReID/train_hpo.py --config-file projects/HPOReID/configs/baseline.yml --srch-algo "bohb" +``` + +## Known issues +todo \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/HPOReID/configs/baseline.yml b/thirdparty/fast-reid/projects/HPOReID/configs/baseline.yml new file mode 100644 index 0000000000000000000000000000000000000000..8e853aca7c479ef0207c264e777390d2333024b7 --- /dev/null +++ b/thirdparty/fast-reid/projects/HPOReID/configs/baseline.yml @@ -0,0 +1,93 @@ +MODEL: + META_ARCHITECTURE: "Baseline" + + FREEZE_LAYERS: ["backbone"] + + BACKBONE: + NAME: "build_resnet_backbone" + DEPTH: "34x" + LAST_STRIDE: 1 + FEAT_DIM: 512 + NORM: "BN" + WITH_NL: False + WITH_IBN: True + PRETRAIN: True + PRETRAIN_PATH: "/export/home/lxy/.cache/torch/checkpoints/resnet34_ibn_a-94bc1577.pth" + + HEADS: + NAME: "EmbeddingHead" + NORM: "BN" + NECK_FEAT: "after" + EMBEDDING_DIM: 0 + POOL_LAYER: "gempool" + CLS_LAYER: "circleSoftmax" + SCALE: 64 + MARGIN: 0.35 + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss",) + + CE: + EPSILON: 0.1 + SCALE: 1. + + TRI: + MARGIN: 0.0 + HARD_MINING: True + NORM_FEAT: False + SCALE: 1. + + CIRCLE: + MARGIN: 0.25 + ALPHA: 96 + SCALE: 1.0 + +INPUT: + SIZE_TRAIN: [256, 128] + SIZE_TEST: [256, 128] + DO_AUTOAUG: True + REA: + ENABLED: True + CJ: + ENABLED: True + DO_PAD: True + +DATALOADER: + PK_SAMPLER: True + NAIVE_WAY: False + NUM_INSTANCE: 16 + NUM_WORKERS: 8 + +SOLVER: + AMP_ENABLED: False + OPT: "Adam" + SCHED: "WarmupCosineAnnealingLR" + MAX_ITER: 60 + BASE_LR: 0.00035 + BIAS_LR_FACTOR: 1. + WEIGHT_DECAY: 0.0005 + WEIGHT_DECAY_BIAS: 0.0 + IMS_PER_BATCH: 64 + + DELAY_ITERS: 30 + ETA_MIN_LR: 0.00000077 + + FREEZE_ITERS: 5 + + WARMUP_FACTOR: 0.01 + WARMUP_ITERS: 5 + + CHECKPOINT_PERIOD: 100 + +TEST: + EVAL_PERIOD: 10 + IMS_PER_BATCH: 256 + +DATASETS: + NAMES: ("DukeMTMC",) + TESTS: ("DukeMTMC",) + COMBINEALL: False + +CUDNN_BENCHMARK: True + +OUTPUT_DIR: "projects/HPOReID/logs/dukemtmc/r34-ibn_bohb_bsz_num-inst" diff --git a/thirdparty/fast-reid/projects/HPOReID/hporeid/__init__.py b/thirdparty/fast-reid/projects/HPOReID/hporeid/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f661f2601e7c8d58eca97379873cabb68053cab9 --- /dev/null +++ b/thirdparty/fast-reid/projects/HPOReID/hporeid/__init__.py @@ -0,0 +1,7 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +from .tune_hooks import TuneReportHook diff --git a/thirdparty/fast-reid/projects/HPOReID/hporeid/tune_hooks.py b/thirdparty/fast-reid/projects/HPOReID/hporeid/tune_hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..a6fc503a9efd4277dae7e03541c33edec52c3a7d --- /dev/null +++ b/thirdparty/fast-reid/projects/HPOReID/hporeid/tune_hooks.py @@ -0,0 +1,57 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +from ray import tune + +from fastreid.engine.hooks import EvalHook, flatten_results_dict +from fastreid.utils.checkpoint import Checkpointer + + +class TuneReportHook(EvalHook): + def __init__(self, eval_period, eval_function): + super().__init__(eval_period, eval_function) + self.step = 0 + + def _do_eval(self): + results = self._func() + + if results: + assert isinstance( + results, dict + ), "Eval function must return a dict. Got {} instead.".format(results) + + flattened_results = flatten_results_dict(results) + for k, v in flattened_results.items(): + try: + v = float(v) + except Exception: + raise ValueError( + "[EvalHook] eval_function should return a nested dict of float. " + "Got '{}: {}' instead.".format(k, v) + ) + + # Remove extra memory cache of main process due to evaluation + torch.cuda.empty_cache() + + self.step += 1 + + # Here we save a checkpoint. It is automatically registered with + # Ray Tune and will potentially be passed as the `checkpoint_dir` + # parameter in future iterations. + with tune.checkpoint_dir(step=self.step) as checkpoint_dir: + additional_state = {"iteration": int(self.trainer.iter)} + Checkpointer( + # Assume you want to save checkpoints together with logs/statistics + self.trainer.model, + checkpoint_dir, + save_to_disk=True, + optimizer=self.trainer.optimizer, + scheduler=self.trainer.scheduler, + ).save(name="checkpoint", **additional_state) + + metrics = dict(r1=results['Rank-1'], map=results['mAP'], score=(results['Rank-1'] + results['mAP']) / 2) + tune.report(**metrics) diff --git a/thirdparty/fast-reid/projects/HPOReID/train_hpo.py b/thirdparty/fast-reid/projects/HPOReID/train_hpo.py new file mode 100644 index 0000000000000000000000000000000000000000..9ab56bf65bb7cf8e97b021b1a0def2dea75aa2f2 --- /dev/null +++ b/thirdparty/fast-reid/projects/HPOReID/train_hpo.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +import logging +import os +import sys +from functools import partial + +import ConfigSpace as CS +import ray +from hyperopt import hp +from ray import tune +from ray.tune import CLIReporter +from ray.tune.schedulers import ASHAScheduler, PopulationBasedTraining +from ray.tune.schedulers.hb_bohb import HyperBandForBOHB +from ray.tune.suggest.bohb import TuneBOHB +from ray.tune.suggest.hyperopt import HyperOptSearch + +sys.path.append('.') + +from fastreid.config import get_cfg, CfgNode +from fastreid.engine import hooks +from fastreid.modeling import build_model +from fastreid.engine import DefaultTrainer, default_argument_parser, default_setup +from fastreid.utils.events import CommonMetricPrinter +from fastreid.utils.file_io import PathManager + +from hporeid import * + +logger = logging.getLogger("fastreid.project.tune") + +ray.init(dashboard_host='127.0.0.1') + + +class HyperTuneTrainer(DefaultTrainer): + def build_hooks(self): + r""" + Build a list of default hooks, including timing, evaluation, + checkpointing, lr scheduling, precise BN, writing events. + Returns: + list[HookBase]: + """ + cfg = self.cfg.clone() + cfg.defrost() + + ret = [ + hooks.IterationTimer(), + hooks.LRScheduler(self.optimizer, self.scheduler), + ] + + if cfg.MODEL.FREEZE_LAYERS != [''] and cfg.SOLVER.FREEZE_ITERS > 0: + freeze_layers = ",".join(cfg.MODEL.FREEZE_LAYERS) + logger.info(f'Freeze layer group "{freeze_layers}" training for {cfg.SOLVER.FREEZE_ITERS:d} iterations') + ret.append(hooks.FreezeLayer( + self.model, + self.optimizer, + cfg.MODEL.FREEZE_LAYERS, + cfg.SOLVER.FREEZE_ITERS, + )) + + def test_and_save_results(): + self._last_eval_results = self.test(self.cfg, self.model) + return self._last_eval_results + + # Do evaluation after checkpointer, because then if it fails, + # we can use the saved checkpoint to debug. + ret.append(TuneReportHook(cfg.TEST.EVAL_PERIOD, test_and_save_results)) + + # run writers in the end, so that evaluation metrics are written + ret.append(hooks.PeriodicWriter([CommonMetricPrinter(self.max_iter)], 200)) + + return ret + + @classmethod + def build_model(cls, cfg): + model = build_model(cfg) + return model + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def update_config(cfg, config): + cfg.defrost() + + # lr, weight decay + # cfg.SOLVER.BASE_LR = config["lr"] + # cfg.SOLVER.ETA_MIN_LR = config["lr"] * 0.0022 + # cfg.SOLVER.DELAY_ITERS = config["delay_iters"] + # cfg.SOLVER.WEIGHT_DECAY = config["wd"] + # cfg.SOLVER.WEIGHT_DECAY_BIAS = config["wd_bias"] + + # batch size, number of instance + cfg.SOLVER.IMS_PER_BATCH = config["bsz"] + cfg.DATALOADER.NUM_INSTANCE = config["num_inst"] + + # loss related + # cfg.MODEL.LOSSES.CE.SCALE = config["ce_scale"] + # cfg.MODEL.HEADS.SCALE = config["circle_scale"] + # cfg.MODEL.HEADS.MARGIN = config["circle_margin"] + + # data augmentation + # cfg.INPUT.DO_AUTOAUG = config["autoaug_enabled"] + # cfg.INPUT.CJ.ENABLED = config["cj_enabled"] + return cfg + + +def train_reid_tune(config, checkpoint_dir=None, cfg=None): + update_config(cfg, config) + + trainer = HyperTuneTrainer(cfg) + # Load checkpoint if specific + if checkpoint_dir: + path = os.path.join(checkpoint_dir, "checkpoint.pth") + checkpoint = trainer.checkpointer.resume_or_load(path, resume=False) + trainer.start_iter = checkpoint.get("iteration", -1) + 1 + + # Regular model training + trainer.train() + + +def main(args): + cfg = setup(args) + + exp_metrics = dict(metric="score", mode="max") + + if args.srch_algo == "hyperopt": + # Create a HyperOpt search space + search_space = { + # "lr": hp.loguniform("lr", 1e-6, 1e-3), + # "delay_iters": hp.randint("delay_iters", 40) + 10, + # "wd": hp.uniform("wd", 0, 1e-3), + # "wd_bias": hp.uniform("wd_bias", 0, 1e-3), + "bsz": hp.choice("bsz", [64, 96, 128, 160, 224, 256]), + "num_inst": hp.choice("num_inst", [2, 4, 8, 16, 32]), + # "ce_scale": hp.uniform("ce_scale", 0.1, 1.0), + # "circle_scale": hp.choice("circle_scale", [16, 32, 64, 128, 256]), + # "circle_margin": hp.uniform("circle_margin", 0, 1) * 0.4 + 0.1, + # "autoaug_enabled": hp.choice("autoaug_enabled", [True, False]), + # "cj_enabled": hp.choice("cj_enabled", [True, False]), + } + + search_algo = HyperOptSearch(search_space, **exp_metrics) + + if args.pbt: + scheduler = PopulationBasedTraining( + time_attr="training_iteration", + **exp_metrics, + perturbation_interval=2, + hyperparam_mutations={ + "bsz": [64, 96, 128, 160, 224, 256], + "num_inst": [2, 4, 8, 16, 32], + } + ) + else: + scheduler = ASHAScheduler( + metric="score", + mode="max", + max_t=10, + grace_period=1, + reduction_factor=2) + + elif args.srch_algo == "bohb": + search_space = CS.ConfigurationSpace() + search_space.add_hyperparameters([ + # CS.UniformFloatHyperparameter(name="lr", lower=1e-6, upper=1e-2, log=True), + # CS.UniformIntegerHyperparameter(name="delay_iters", lower=20, upper=60), + # CS.UniformFloatHyperparameter(name="ce_scale", lower=0.1, upper=1.0), + # CS.UniformIntegerHyperparameter(name="circle_scale", lower=8, upper=256), + # CS.UniformFloatHyperparameter(name="circle_margin", lower=0.1, upper=0.5), + # CS.UniformFloatHyperparameter(name="wd", lower=0, upper=1e-3), + # CS.UniformFloatHyperparameter(name="wd_bias", lower=0, upper=1e-3), + CS.CategoricalHyperparameter(name="bsz", choices=[64, 96, 128, 160, 224, 256]), + CS.CategoricalHyperparameter(name="num_inst", choices=[2, 4, 8, 16, 32]), + # CS.CategoricalHyperparameter(name="autoaug_enabled", choices=[True, False]), + # CS.CategoricalHyperparameter(name="cj_enabled", choices=[True, False]), + ]) + + search_algo = TuneBOHB( + search_space, max_concurrent=4, **exp_metrics) + + scheduler = HyperBandForBOHB( + time_attr="training_iteration", + reduction_factor=3, + max_t=9, + **exp_metrics, + ) + + else: + raise ValueError("Search algorithm must be chosen from [hyperopt, bohb], but got {}".format(args.srch_algo)) + + reporter = CLIReporter( + parameter_columns=["bsz", "num_inst"], + metric_columns=["r1", "map", "training_iteration"]) + + analysis = tune.run( + partial( + train_reid_tune, + cfg=cfg), + resources_per_trial={"cpu": 12, "gpu": 1}, + search_alg=search_algo, + num_samples=args.num_trials, + scheduler=scheduler, + progress_reporter=reporter, + local_dir=cfg.OUTPUT_DIR, + keep_checkpoints_num=10, + name=args.srch_algo) + + best_trial = analysis.get_best_trial("score", "max", "last") + logger.info("Best trial config: {}".format(best_trial.config)) + logger.info("Best trial final validation mAP: {}, Rank-1: {}".format( + best_trial.last_result["map"], best_trial.last_result["r1"])) + + save_dict = dict(R1=best_trial.last_result["r1"].item(), mAP=best_trial.last_result["map"].item()) + save_dict.update(best_trial.config) + path = os.path.join(cfg.OUTPUT_DIR, "best_config.yaml") + with PathManager.open(path, "w") as f: + f.write(CfgNode(save_dict).dump()) + logger.info("Best config saved to {}".format(os.path.abspath(path))) + + +if __name__ == "__main__": + parser = default_argument_parser() + parser.add_argument("--num-trials", type=int, default=12, help="number of tune trials") + parser.add_argument("--srch-algo", type=str, default="bohb", + help="search algorithms for hyperparameters search space") + parser.add_argument("--pbt", action="store_true", help="use population based training") + args = parser.parse_args() + print("Command Line Args:", args) + main(args) diff --git a/thirdparty/fast-reid/projects/PartialReID/README.md b/thirdparty/fast-reid/projects/PartialReID/README.md new file mode 100644 index 0000000000000000000000000000000000000000..35191df5f5b9fe69fb1056d1d6af667afcde4378 --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/README.md @@ -0,0 +1,75 @@ +# DSR in FastReID +**Deep Spatial Feature Reconstruction for Partial Person Re-identification** + +Lingxiao He, Xingyu Liao + +[[`CVPR2018`](http://openaccess.thecvf.com/content_cvpr_2018/papers/He_Deep_Spatial_Feature_CVPR_2018_paper.pdf)] [[`BibTeX`](#CitingDSR)] + +**Foreground-aware Pyramid Reconstruction for Alignment-free Occluded Person Re-identification** + +Lingxiao He, Xingyu Liao + +[[`ICCV2019`](http://openaccess.thecvf.com/content_ICCV_2019/papers/He_Foreground-Aware_Pyramid_Reconstruction_for_Alignment-Free_Occluded_Person_Re-Identification_ICCV_2019_paper.pdf)] [[`BibTeX`](#CitingFPR)] + +## News! + +[1] The old_version code can be check in [old_version](https://github.com/JDAI-CV/Partial-Person-ReID), you can obtain the same result published in paper, and the new version code is updating, please waiting! + +## Installation + +First install FastReID, and then put Partial Datasets in directory datasets. The whole framework of FastReID-DSR is +
+ +
+ +and the detail you can refer to +## Datasets + +The datasets can find in [Google Drive](https://drive.google.com/file/d/1p7Jvo-RJhU_B6hf9eAhIEFNhvrzM5cdh/view?usp=sharing) + +PartialREID---gallery: 300 images of 60 ids, query: 300 images of 60 ids + +PartialiLIDS---gallery: 119 images of 119 ids, query: 119 images of 119 ids + +OccludedREID---gallery: 1,000 images of 200 ids, query: 1,000 images of 200 ids + +## Training and Evaluation + +To train a model, run: +```bash +python3 projects/PartialReID/train_net.py --config-file +``` + +For example, to train the re-id network with IBN-ResNet-50 Backbone +one should execute: +```bash +CUDA_VISIBLE_DEVICES='0,1,2,3' python3 projects/PartialReID/train_net.py --config-file 'projects/PartialReID/configs/partial_market.yml' +``` + +## Results + +| Method | PartialREID | OccludedREID | PartialiLIDS | +|:--:|:--:|:--:|:--:| +| | Rank@1 (mAP)| Rank@1 (mAP)| Rank@1 (mAP)| +| DSR (CVPR’18) |73.7(68.1) |72.8(62.8)|64.3(58.1)| +| FPR (ICCV'19) | 81.0(76.6)|78.3(68.0)|68.1(61.8)| +| FastReID-DSR | 82.7(76.8)|81.6(70.9)|73.1(79.8) | + +## Citing DSR and Citing FPR + +If you use DSR or FPR, please use the following BibTeX entry. + +``` +@inproceedings{he2018deep, + title={Deep spatial feature reconstruction for partial person re-identification: Alignment-free approach}, + author={He, Lingxiao and Liang, Jian and Li, Haiqing and Sun, Zhenan}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2018} +} +@inproceedings{he2019foreground, + title={Foreground-aware Pyramid Reconstruction for Alignment-free Occluded Person Re-identification}, + author={He, Lingxiao and Wang, Yinggang and Liu, Wu and Zhao, He and Sun, Zhenan and Feng, Jiashi}, + booktitle={IEEE International Conference on Computer Vision (ICCV)}, + year={2019} +} +``` diff --git a/thirdparty/fast-reid/projects/PartialReID/configs/partial_market.yml b/thirdparty/fast-reid/projects/PartialReID/configs/partial_market.yml new file mode 100644 index 0000000000000000000000000000000000000000..b78706e4f619ba00ed545df8e28ed02d3a761fc5 --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/configs/partial_market.yml @@ -0,0 +1,70 @@ +MODEL: + META_ARCHITECTURE: 'PartialBaseline' + + BACKBONE: + NAME: "build_resnet_backbone" + DEPTH: "50x" + NORM: "BN" + LAST_STRIDE: 1 + WITH_IBN: True + PRETRAIN_PATH: "/export/home/lxy/.cache/torch/checkpoints/resnet50_ibn_a-d9d0bb7b.pth" + + HEADS: + NAME: "DSRHead" + NORM: "BN" + POOL_LAYER: "avgpool" + NECK_FEAT: "before" + CLS_LAYER: "linear" + + LOSSES: + NAME: ("CrossEntropyLoss", "TripletLoss") + CE: + EPSILON: 0.1 + SCALE: 1. + TRI: + MARGIN: 0.3 + HARD_MINING: False + SCALE: 1. + +DATASETS: + NAMES: ("Market1501",) + TESTS: ("PartialREID", "PartialiLIDS","OccludedREID",) + +INPUT: + SIZE_TRAIN: [384, 128] + SIZE_TEST: [384, 128] + REA: + ENABLED: False + DO_PAD: False + +DATALOADER: + PK_SAMPLER: True + NAIVE_WAY: False + NUM_INSTANCE: 4 + NUM_WORKERS: 8 + +SOLVER: + OPT: "Adam" + MAX_ITER: 30 + BASE_LR: 0.00035 + BIAS_LR_FACTOR: 2. + WEIGHT_DECAY: 0.0005 + WEIGHT_DECAY_BIAS: 0.0 + IMS_PER_BATCH: 64 + + SCHED: "WarmupMultiStepLR" + STEPS: [15, 25] + GAMMA: 0.1 + + WARMUP_FACTOR: 0.01 + WARMUP_ITERS: 5 + + CHECKPOINT_PERIOD: 10 + +TEST: + EVAL_PERIOD: 5 + IMS_PER_BATCH: 128 + +CUDNN_BENCHMARK: True + +OUTPUT_DIR: "projects/PartialReID/logs/test_partial" diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/__init__.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..82240545af8ee7fe5cbd3ff707a39884148cdddc --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/__init__.py @@ -0,0 +1,11 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +from .partial_dataset import * +from .partialbaseline import PartialBaseline +from .dsr_head import DSRHead +from .config import add_partialreid_config +from .dsr_evaluation import DsrEvaluator diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/config.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/config.py new file mode 100644 index 0000000000000000000000000000000000000000..882be1300f0f87cfdf23f985b7862c56ba152fd3 --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/config.py @@ -0,0 +1,15 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +from fastreid.config import CfgNode as CN + + +def add_partialreid_config(cfg): + _C = cfg + + _C.TEST.DSR = CN() + _C.TEST.DSR.ENABLED = True + diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_distance.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_distance.py new file mode 100644 index 0000000000000000000000000000000000000000..4b05d10cf99f2fc4e5ceb1ea38fdafeba8124ede --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_distance.py @@ -0,0 +1,53 @@ +"""Numpy version of euclidean distance, etc. +Notice the input/output shape of methods, so that you can better understand +the meaning of these methods.""" + +import torch +import numpy as np + + +def normalize(nparray, order=2, axis=0): + """Normalize a N-D numpy array along the specified axis.""" + norm = np.linalg.norm(nparray, ord=order, axis=axis, keepdims=True) + return nparray / (norm + np.finfo(np.float32).eps) + + +def compute_dsr_dist(array1, array2, distmat, scores): + """ Compute the sptial feature reconstruction of all pairs + array: [M, N, C] M: the number of query, N: the number of spatial feature, C: the dimension of each spatial feature + array2: [M, N, C] M: the number of gallery + :return: + numpy array with shape [m1, m2] + """ + dist = 100 * torch.ones(len(array1), len(array2)) + dist = dist.cuda() + kappa = 0.001 + index = np.argsort(distmat, axis=1) + T = kappa * torch.eye(110) + T = T.cuda() + M = [] + for i in range(0, len(array2)): + g = array2[i] + g = torch.FloatTensor(g) + g = g.view(g.size(0), g.size(1)) + g = g.cuda() + Proj_M1 = torch.matmul(torch.inverse(torch.matmul(g.t(), g) + T), g.t()) + Proj_M1 = Proj_M1.cpu().numpy() + M.append(Proj_M1) + for i in range(0, len(array1)): + q = torch.FloatTensor(array1[i]) + q = q.view(q.size(0), q.size(1)) + q = q.cuda() + for j in range(0, 100): + g = array2[index[i, j]] + g = torch.FloatTensor(g) + g = g.view(g.size(0), g.size(1)) + g = g.cuda() + Proj_M = torch.FloatTensor(M[index[i, j]]) + Proj_M = Proj_M.cuda() + a = torch.matmul(g, torch.matmul(Proj_M, q)) - q + dist[i, index[i, j]] = ((torch.pow(a, 2).sum(0).sqrt()) * scores[i]).sum() + dist = dist.cpu() + dist = dist.numpy() + + return dist diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_evaluation.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6c26630c98256a4eca267c21b1b7eff756005c --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_evaluation.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import copy +import logging +from collections import OrderedDict + +import numpy as np +import torch +import torch.nn.functional as F +from sklearn import metrics + +from fastreid.evaluation.evaluator import DatasetEvaluator +from fastreid.evaluation.rank import evaluate_rank +from fastreid.evaluation.roc import evaluate_roc +from fastreid.utils import comm +from .dsr_distance import compute_dsr_dist + +logger = logging.getLogger('fastreid.partialreid.dsr_evaluation') + + +class DsrEvaluator(DatasetEvaluator): + def __init__(self, cfg, num_query, output_dir=None): + self.cfg = cfg + self._num_query = num_query + self._output_dir = output_dir + + self.features = [] + self.spatial_features = [] + self.scores = [] + self.pids = [] + self.camids = [] + + def reset(self): + self.features = [] + self.spatial_features = [] + self.scores = [] + self.pids = [] + self.camids = [] + + def process(self, inputs, outputs): + self.pids.extend(inputs["targets"]) + self.camids.extend(inputs["camids"]) + self.features.append(F.normalize(outputs[0]).cpu()) + outputs1 = F.normalize(outputs[1].data).cpu() + self.spatial_features.append(outputs1) + self.scores.append(outputs[2]) + + def evaluate(self): + if comm.get_world_size() > 1: + comm.synchronize() + features = comm.gather(self.features) + features = sum(features, []) + + spatial_features = comm.gather(self.spatial_features) + spatial_features = sum(spatial_features, []) + + scores = comm.gather(self.scores) + scores = sum(scores, []) + + pids = comm.gather(self.pids) + pids = sum(pids, []) + + camids = comm.gather(self.camids) + camids = sum(camids, []) + + # fmt: off + if not comm.is_main_process(): return {} + # fmt: on + else: + features = self.features + spatial_features = self.spatial_features + scores = self.scores + pids = self.pids + camids = self.camids + + features = torch.cat(features, dim=0) + spatial_features = torch.cat(spatial_features, dim=0).numpy() + scores = torch.cat(scores, dim=0) + + # query feature, person ids and camera ids + query_features = features[:self._num_query] + query_pids = np.asarray(pids[:self._num_query]) + query_camids = np.asarray(camids[:self._num_query]) + + # gallery features, person ids and camera ids + gallery_features = features[self._num_query:] + gallery_pids = np.asarray(pids[self._num_query:]) + gallery_camids = np.asarray(camids[self._num_query:]) + + if self.cfg.TEST.METRIC == "cosine": + query_features = F.normalize(query_features, dim=1) + gallery_features = F.normalize(gallery_features, dim=1) + + dist = 1 - torch.mm(query_features, gallery_features.t()).numpy() + self._results = OrderedDict() + + query_features = query_features.numpy() + gallery_features = gallery_features.numpy() + if self.cfg.TEST.DSR.ENABLED: + logger.info("Testing with DSR setting") + dist = compute_dsr_dist(spatial_features[:self._num_query], spatial_features[self._num_query:], dist, + scores[:self._num_query]) + cmc, all_AP, all_INP = evaluate_rank(dist, query_features, gallery_features, query_pids, gallery_pids, + query_camids, gallery_camids, use_distmat=True) + else: + cmc, all_AP, all_INP = evaluate_rank(dist, query_features, gallery_features, query_pids, gallery_pids, + query_camids, gallery_camids, use_distmat=False) + mAP = np.mean(all_AP) + mINP = np.mean(all_INP) + + for r in [1, 5, 10]: + self._results['Rank-{}'.format(r)] = cmc[r - 1] + self._results['mAP'] = mAP + self._results['mINP'] = mINP + + if self.cfg.TEST.ROC_ENABLED: + scores, labels = evaluate_roc(dist, query_features, gallery_features, + query_pids, gallery_pids, query_camids, gallery_camids) + fprs, tprs, thres = metrics.roc_curve(labels, scores) + + for fpr in [1e-4, 1e-3, 1e-2]: + ind = np.argmin(np.abs(fprs - fpr)) + self._results["TPR@FPR={:.0e}".format(fpr)] = tprs[ind] + + return copy.deepcopy(self._results) diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_head.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7009180221e949dc993d57a065c89bd630164c7e --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/dsr_head.py @@ -0,0 +1,153 @@ +# encoding: utf-8 +""" +@author: lingxiao he +@contact: helingxiao3@jd.com +""" + +import torch +import torch.nn.functional as F +from torch import nn + +from fastreid.layers import * +from fastreid.modeling.heads.build import REID_HEADS_REGISTRY +from fastreid.utils.weight_init import weights_init_classifier, weights_init_kaiming + + +class OcclusionUnit(nn.Module): + def __init__(self, in_planes=2048): + super(OcclusionUnit, self).__init__() + self.MaxPool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0) + self.MaxPool2 = nn.MaxPool2d(kernel_size=4, stride=2, padding=0) + self.MaxPool3 = nn.MaxPool2d(kernel_size=6, stride=2, padding=0) + self.MaxPool4 = nn.MaxPool2d(kernel_size=8, stride=2, padding=0) + self.mask_layer = nn.Linear(in_planes, 1, bias=False) + + def forward(self, x): + SpaFeat1 = self.MaxPool1(x) # shape: [n, c, h, w] + SpaFeat2 = self.MaxPool2(x) + SpaFeat3 = self.MaxPool3(x) + SpaFeat4 = self.MaxPool4(x) + + Feat1 = SpaFeat1.view(SpaFeat1.size(0), SpaFeat1.size(1), SpaFeat1.size(2) * SpaFeat1.size(3)) + Feat2 = SpaFeat2.view(SpaFeat2.size(0), SpaFeat2.size(1), SpaFeat2.size(2) * SpaFeat2.size(3)) + Feat3 = SpaFeat3.view(SpaFeat3.size(0), SpaFeat3.size(1), SpaFeat3.size(2) * SpaFeat3.size(3)) + Feat4 = SpaFeat4.view(SpaFeat4.size(0), SpaFeat4.size(1), SpaFeat4.size(2) * SpaFeat4.size(3)) + SpatialFeatAll = torch.cat((Feat1, Feat2, Feat3, Feat4), 2) + SpatialFeatAll = SpatialFeatAll.transpose(1, 2) # shape: [n, c, m] + y = self.mask_layer(SpatialFeatAll) + mask_weight = torch.sigmoid(y[:, :, 0]) + + feat_dim = SpaFeat1.size(2) * SpaFeat1.size(3) + mask_score = F.normalize(mask_weight[:, :feat_dim], p=1, dim=1) + mask_weight_norm = F.normalize(mask_weight, p=1, dim=1) + mask_score = mask_score.unsqueeze(1) + + SpaFeat1 = SpaFeat1.transpose(1, 2) + SpaFeat1 = SpaFeat1.transpose(2, 3) # shape: [n, h, w, c] + SpaFeat1 = SpaFeat1.view((SpaFeat1.size(0), SpaFeat1.size(1) * SpaFeat1.size(2), -1)) # shape: [n, h*w, c] + + global_feats = mask_score.matmul(SpaFeat1).view(SpaFeat1.shape[0], -1, 1, 1) + return global_feats, mask_weight, mask_weight_norm + + +@REID_HEADS_REGISTRY.register() +class DSRHead(nn.Module): + def __init__(self, cfg): + super().__init__() + + # fmt: off + feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM + num_classes = cfg.MODEL.HEADS.NUM_CLASSES + neck_feat = cfg.MODEL.HEADS.NECK_FEAT + pool_type = cfg.MODEL.HEADS.POOL_LAYER + cls_type = cfg.MODEL.HEADS.CLS_LAYER + norm_type = cfg.MODEL.HEADS.NORM + + if pool_type == 'fastavgpool': self.pool_layer = FastGlobalAvgPool2d() + elif pool_type == 'avgpool': self.pool_layer = nn.AdaptiveAvgPool2d(1) + elif pool_type == 'maxpool': self.pool_layer = nn.AdaptiveMaxPool2d(1) + elif pool_type == 'gempoolP': self.pool_layer = GeneralizedMeanPoolingP() + elif pool_type == 'gempool': self.pool_layer = GeneralizedMeanPooling() + elif pool_type == "avgmaxpool": self.pool_layer = AdaptiveAvgMaxPool2d() + elif pool_type == 'clipavgpool': self.pool_layer = ClipGlobalAvgPool2d() + elif pool_type == "identity": self.pool_layer = nn.Identity() + elif pool_type == "flatten": self.pool_layer = Flatten() + else: raise KeyError(f"{pool_type} is not supported!") + # fmt: on + + self.neck_feat = neck_feat + + self.occ_unit = OcclusionUnit(in_planes=feat_dim) + self.MaxPool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0) + self.MaxPool2 = nn.MaxPool2d(kernel_size=4, stride=2, padding=0) + self.MaxPool3 = nn.MaxPool2d(kernel_size=6, stride=2, padding=0) + self.MaxPool4 = nn.MaxPool2d(kernel_size=8, stride=2, padding=0) + + self.bnneck = get_norm(norm_type, feat_dim, bias_freeze=True) + self.bnneck.apply(weights_init_kaiming) + + self.bnneck_occ = get_norm(norm_type, feat_dim, bias_freeze=True) + self.bnneck_occ.apply(weights_init_kaiming) + + # identity classification layer + if cls_type == 'linear': + self.classifier = nn.Linear(feat_dim, num_classes, bias=False) + self.classifier_occ = nn.Linear(feat_dim, num_classes, bias=False) + elif cls_type == 'arcSoftmax': + self.classifier = ArcSoftmax(cfg, feat_dim, num_classes) + self.classifier_occ = ArcSoftmax(cfg, feat_dim, num_classes) + elif cls_type == 'circleSoftmax': + self.classifier = CircleSoftmax(cfg, feat_dim, num_classes) + self.classifier_occ = CircleSoftmax(cfg, feat_dim, num_classes) + else: + raise KeyError(f"{cls_type} is invalid, please choose from " + f"'linear', 'arcSoftmax' and 'circleSoftmax'.") + + self.classifier.apply(weights_init_classifier) + self.classifier_occ.apply(weights_init_classifier) + + def forward(self, features, targets=None): + """ + See :class:`ReIDHeads.forward`. + """ + SpaFeat1 = self.MaxPool1(features) # shape: [n, c, h, w] + SpaFeat2 = self.MaxPool2(features) + SpaFeat3 = self.MaxPool3(features) + SpaFeat4 = self.MaxPool4(features) + + Feat1 = SpaFeat1.view(SpaFeat1.size(0), SpaFeat1.size(1), SpaFeat1.size(2) * SpaFeat1.size(3)) + Feat2 = SpaFeat2.view(SpaFeat2.size(0), SpaFeat2.size(1), SpaFeat2.size(2) * SpaFeat2.size(3)) + Feat3 = SpaFeat3.view(SpaFeat3.size(0), SpaFeat3.size(1), SpaFeat3.size(2) * SpaFeat3.size(3)) + Feat4 = SpaFeat4.view(SpaFeat4.size(0), SpaFeat4.size(1), SpaFeat4.size(2) * SpaFeat4.size(3)) + SpatialFeatAll = torch.cat((Feat1, Feat2, Feat3, Feat4), dim=2) + + foreground_feat, mask_weight, mask_weight_norm = self.occ_unit(features) + bn_foreground_feat = self.bnneck_occ(foreground_feat) + bn_foreground_feat = bn_foreground_feat[..., 0, 0] + + # Evaluation + if not self.training: + return bn_foreground_feat, SpatialFeatAll, mask_weight_norm + + # Training + global_feat = self.pool_layer(features) + bn_feat = self.bnneck(global_feat) + bn_feat = bn_feat[..., 0, 0] + + if self.classifier.__class__.__name__ == 'Linear': + cls_outputs = self.classifier(bn_feat) + fore_cls_outputs = self.classifier_occ(bn_foreground_feat) + pred_class_logits = F.linear(bn_feat, self.classifier.weight) + else: + cls_outputs = self.classifier(bn_feat, targets) + fore_cls_outputs = self.classifier_occ(bn_foreground_feat, targets) + pred_class_logits = self.classifier.s * F.linear(F.normalize(bn_feat), + F.normalize(self.classifier.weight)) + + return { + "cls_outputs": cls_outputs, + "fore_cls_outputs": fore_cls_outputs, + "pred_class_logits": pred_class_logits, + "global_features": global_feat[..., 0, 0], + "foreground_features": foreground_feat[..., 0, 0], + } diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/partial_dataset.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/partial_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..ecce97879596cda4d0c7d16a4d626dd552c131c3 --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/partial_dataset.py @@ -0,0 +1,74 @@ +# encoding: utf-8 + +""" +@author: lingxiao he +@contact: helingxiao3@jd.com +""" + +import glob +import os +import os.path as osp +import re + +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.datasets.bases import ImageDataset + +__all__ = ['PartialREID', 'PartialiLIDS', 'OccludedREID'] + + +def process_test(query_path, gallery_path): + query_img_paths = glob.glob(os.path.join(query_path, '*.jpg')) + gallery_img_paths = glob.glob(os.path.join(gallery_path, '*.jpg')) + query_paths = [] + pattern = re.compile(r'([-\d]+)_(\d*)') + for img_path in query_img_paths: + pid, camid = map(int, pattern.search(img_path).groups()) + query_paths.append([img_path, pid, camid]) + gallery_paths = [] + for img_path in gallery_img_paths: + pid, camid = map(int, pattern.search(img_path).groups()) + gallery_paths.append([img_path, pid, camid]) + return query_paths, gallery_paths + + +@DATASET_REGISTRY.register() +class PartialREID(ImageDataset): + + dataset_name = "partialreid" + + def __init__(self, root='datasets',): + self.root = root + + self.query_dir = osp.join(self.root, 'Partial_REID/partial_body_images') + self.gallery_dir = osp.join(self.root, 'Partial_REID/whole_body_images') + query, gallery = process_test(self.query_dir, self.gallery_dir) + + ImageDataset.__init__(self, [], query, gallery) + + +@DATASET_REGISTRY.register() +class PartialiLIDS(ImageDataset): + dataset_name = "partialilids" + + def __init__(self, root='datasets',): + self.root = root + + self.query_dir = osp.join(self.root, 'PartialiLIDS/query') + self.gallery_dir = osp.join(self.root, 'PartialiLIDS/gallery') + query, gallery = process_test(self.query_dir, self.gallery_dir) + + ImageDataset.__init__(self, [], query, gallery) + + +@DATASET_REGISTRY.register() +class OccludedREID(ImageDataset): + dataset_name = "occludereid" + + def __init__(self, root='datasets',): + self.root = root + + self.query_dir = osp.join(self.root, 'OccludedREID/query') + self.gallery_dir = osp.join(self.root, 'OccludedREID/gallery') + query, gallery = process_test(self.query_dir, self.gallery_dir) + + ImageDataset.__init__(self, [], query, gallery) diff --git a/thirdparty/fast-reid/projects/PartialReID/partialreid/partialbaseline.py b/thirdparty/fast-reid/projects/PartialReID/partialreid/partialbaseline.py new file mode 100644 index 0000000000000000000000000000000000000000..088df16e1d8d0150a5dade18d9db5fdd96b6a651 --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/partialreid/partialbaseline.py @@ -0,0 +1,69 @@ +# encoding: utf-8 +""" +@authorr: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from fastreid.modeling.losses import * +from fastreid.modeling.meta_arch import Baseline +from fastreid.modeling.meta_arch.build import META_ARCH_REGISTRY + + +@META_ARCH_REGISTRY.register() +class PartialBaseline(Baseline): + + def losses(self, outs): + r""" + Compute loss from modeling's outputs, the loss function input arguments + must be the same as the outputs of the model forwarding. + """ + # fmt: off + outputs = outs["outputs"] + gt_labels = outs["targets"] + # model predictions + pred_class_logits = outputs['pred_class_logits'].detach() + cls_outputs = outputs["cls_outputs"] + fore_cls_outputs = outputs["fore_cls_outputs"] + global_feat = outputs["global_features"] + fore_feat = outputs["foreground_features"] + # fmt: on + + # Log prediction accuracy + log_accuracy(pred_class_logits, gt_labels) + + loss_dict = {} + loss_names = self._cfg.MODEL.LOSSES.NAME + + if "CrossEntropyLoss" in loss_names: + loss_dict['loss_avg_branch_cls'] = cross_entropy_loss( + cls_outputs, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE + + loss_dict['loss_fore_branch_cls'] = cross_entropy_loss( + fore_cls_outputs, + gt_labels, + self._cfg.MODEL.LOSSES.CE.EPSILON, + self._cfg.MODEL.LOSSES.CE.ALPHA, + ) * self._cfg.MODEL.LOSSES.CE.SCALE + + if "TripletLoss" in loss_names: + loss_dict['loss_avg_branch_triplet'] = triplet_loss( + global_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE + + loss_dict['loss_fore_branch_triplet'] = triplet_loss( + fore_feat, + gt_labels, + self._cfg.MODEL.LOSSES.TRI.MARGIN, + self._cfg.MODEL.LOSSES.TRI.NORM_FEAT, + self._cfg.MODEL.LOSSES.TRI.HARD_MINING, + ) * self._cfg.MODEL.LOSSES.TRI.SCALE + return loss_dict + diff --git a/thirdparty/fast-reid/projects/PartialReID/train_net.py b/thirdparty/fast-reid/projects/PartialReID/train_net.py new file mode 100644 index 0000000000000000000000000000000000000000..d4ad2561519f860c54478180714d48d9caeee23f --- /dev/null +++ b/thirdparty/fast-reid/projects/PartialReID/train_net.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +import logging +import os +import sys + +sys.path.append('.') + +from fastreid.config import get_cfg +from fastreid.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from fastreid.utils.checkpoint import Checkpointer +from fastreid.engine import hooks + +from partialreid import * + + +class Trainer(DefaultTrainer): + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_dir=None): + data_loader, num_query = cls.build_test_loader(cfg, dataset_name) + return data_loader, DsrEvaluator(cfg, num_query, output_dir) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_partialreid_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + logger = logging.getLogger("fastreid.trainer") + cfg.defrost() + cfg.MODEL.BACKBONE.PRETRAIN = False + model = Trainer.build_model(cfg) + + Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model + + if cfg.TEST.PRECISE_BN.ENABLED and hooks.get_bn_modules(model): + prebn_cfg = cfg.clone() + prebn_cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN + prebn_cfg.DATASETS.NAMES = tuple([cfg.TEST.PRECISE_BN.DATASET]) # set dataset name for PreciseBN + logger.info("Prepare precise BN dataset") + hooks.PreciseBN( + # Run at the same freq as (but before) evaluation. + model, + # Build a new data loader to not affect training + Trainer.build_train_loader(prebn_cfg), + cfg.TEST.PRECISE_BN.NUM_ITER, + ).update_stats() + res = Trainer.test(cfg, model) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/thirdparty/fast-reid/projects/README.md b/thirdparty/fast-reid/projects/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2481480df412d3534c4b8ae3e48784a1b1e8fad5 --- /dev/null +++ b/thirdparty/fast-reid/projects/README.md @@ -0,0 +1,17 @@ + +Here are a few projects that are built on fastreid. +They are examples of how to use fastrei as a library, to make your projects more maintainable. + +# Projects by JDAI + +Note that these are research projects, and therefore may not have the same level of support or stability of fastreid. + +- [Deep Spatial Feature Reconstruction for Partial Person Re-identification](https://github.com/JDAI-CV/fast-reid/tree/master/projects/PartialReID) +- [Distillation Person Re-identification](https://github.com/JDAI-CV/fast-reid/tree/master/projects/DistillReID) +- [Black Re-ID: A Head-shoulder Descriptor for the Challenging Problem of Person Re-Identification](https://github.com/JDAI-CV/fast-reid/tree/master/projects/HAA) +- [Person Attribute Recognition](https://github.com/JDAI-CV/fast-reid/tree/master/projects/attribute_recognition) + +# External Projects + +External projects in the community that use fastreid: + diff --git a/thirdparty/fast-reid/projects/attribute_recognition/README.md b/thirdparty/fast-reid/projects/attribute_recognition/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6efac098af44da3759a0c14419cfd7741af04d9e --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/README.md @@ -0,0 +1,26 @@ +# Person Attribute Recognition in FastReID + +## Training and Evaluation + +To train a model, run: + +```bash +python3 projects/PartialReID/train_net.py --config-file --num-gpus 1 +``` + +For example, to train the attribute recognition network with ResNet-50 Backbone in PA100k dataset, +one should execute: + +```bash +python3 projects/attribute_recognition/train_net.py --config-file projects/attribute_recognition/configs/pa100.yml --num-gpus 4 +``` + +## Results + +### PA100k + +| Method | mA | Accu | Prec | Recall | F1 | +|:--:|:--:|:--:|:--:|:--:|:--:| +| Strongbaseline | 77.76 | 77.59 | 88.38 | 84.35 | 86.32 | + +More datasets and test results are waiting to add, stay tune! diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/__init__.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a769ede462369f05625c4d7fd832273d7781388c --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/__init__.py @@ -0,0 +1,12 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +from .config import add_attr_config +from .datasets import * +from .attr_baseline import AttrBaseline +from .attr_evaluation import AttrEvaluator +from .data_build import build_attr_train_loader, build_attr_test_loader +from .attr_trainer import AttrTrainer diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_baseline.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_baseline.py new file mode 100644 index 0000000000000000000000000000000000000000..1961f68001c6351a80329e6b4b4338f08708da2f --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_baseline.py @@ -0,0 +1,41 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +from fastreid.modeling.meta_arch.baseline import Baseline +from fastreid.modeling.meta_arch.build import META_ARCH_REGISTRY +from .bce_loss import cross_entropy_sigmoid_loss + + +@META_ARCH_REGISTRY.register() +class AttrBaseline(Baseline): + + def losses(self, outs, sample_weight=None): + r""" + Compute loss from modeling's outputs, the loss function input arguments + must be the same as the outputs of the model forwarding. + """ + # fmt: off + outputs = outs["outputs"] + gt_labels = outs["targets"] + # model predictions + # pred_class_logits = outputs['pred_class_logits'].detach() + cls_outputs = outputs['cls_outputs'] + # fmt: on + + # Log prediction accuracy + # log_accuracy(pred_class_logits, gt_labels) + + loss_dict = {} + loss_names = self._cfg.MODEL.LOSSES.NAME + + if "BinaryCrossEntropyLoss" in loss_names: + loss_dict['loss_bce'] = cross_entropy_sigmoid_loss( + cls_outputs, + gt_labels, + sample_weight, + ) * self._cfg.MODEL.LOSSES.BCE.SCALE + + return loss_dict diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_evaluation.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..d1022843425d4e0dd0b0b7cf4e3eeb3263f6a7d3 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_evaluation.py @@ -0,0 +1,96 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" +import copy +import logging +from collections import OrderedDict + +import torch + +from fastreid.evaluation.evaluator import DatasetEvaluator +from fastreid.utils import comm + +logger = logging.getLogger(__name__) + + +class AttrEvaluator(DatasetEvaluator): + def __init__(self, cfg, attr_dict, thres=0.5, output_dir=None): + self.cfg = cfg + self.attr_dict = attr_dict + self.thres = thres + self._output_dir = output_dir + + self.pred_logits = [] + self.gt_labels = [] + + def reset(self): + self.pred_logits = [] + self.gt_labels = [] + + def process(self, inputs, outputs): + self.gt_labels.extend(inputs["targets"]) + self.pred_logits.extend(outputs.cpu()) + + @staticmethod + def get_attr_metrics(gt_labels, pred_logits, thres): + + pred_labels = copy.deepcopy(pred_logits) + pred_labels[pred_logits < thres] = 0 + pred_labels[pred_logits >= thres] = 1 + + # Compute label-based metric + overlaps = pred_labels * gt_labels + correct_pos = overlaps.sum(axis=0) + real_pos = gt_labels.sum(axis=0) + inv_overlaps = (1 - pred_labels) * (1 - gt_labels) + correct_neg = inv_overlaps.sum(axis=0) + real_neg = (1 - gt_labels).sum(axis=0) + + # Compute instance-based accuracy + pred_labels = pred_labels.astype(bool) + gt_labels = gt_labels.astype(bool) + intersect = (pred_labels & gt_labels).astype(float) + union = (pred_labels | gt_labels).astype(float) + ins_acc = (intersect.sum(axis=1) / union.sum(axis=1)).mean() + ins_prec = (intersect.sum(axis=1) / pred_labels.astype(float).sum(axis=1)).mean() + ins_rec = (intersect.sum(axis=1) / gt_labels.astype(float).sum(axis=1)).mean() + ins_f1 = (2 * ins_prec * ins_rec) / (ins_prec + ins_rec) + + term1 = correct_pos / real_pos + term2 = correct_neg / real_neg + label_mA_verbose = (term1 + term2) * 0.5 + label_mA = label_mA_verbose.mean() + + results = OrderedDict() + results["Accu"] = ins_acc + results["Prec"] = ins_prec + results["Recall"] = ins_rec + results["F1"] = ins_f1 + results["mA"] = label_mA + return results + + def evaluate(self): + if comm.get_world_size() > 1: + comm.synchronize() + pred_logits = comm.gather(self.pred_logits) + pred_logits = sum(pred_logits, []) + + gt_labels = comm.gather(self.gt_labels) + gt_labels = sum(gt_labels, []) + + if not comm.is_main_process(): + return {} + else: + pred_logits = self.pred_logits + gt_labels = self.gt_labels + + pred_logits = torch.stack(pred_logits, dim=0).numpy() + gt_labels = torch.stack(gt_labels, dim=0).numpy() + + # Pedestrian attribute metrics + thres = self.cfg.TEST.THRES + self._results = self.get_attr_metrics(gt_labels, pred_logits, thres) + + return copy.deepcopy(self._results) diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_trainer.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..d63f12415bfbd3835a93c82fafc386af1e74f471 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/attr_trainer.py @@ -0,0 +1,89 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import time +import torch +from torch.nn.parallel import DistributedDataParallel +from torch.cuda import amp +from fastreid.engine import DefaultTrainer +from .data_build import build_attr_train_loader, build_attr_test_loader +from .attr_evaluation import AttrEvaluator + + +class AttrTrainer(DefaultTrainer): + def __init__(self, cfg): + super().__init__(cfg) + + # Sample weight for attributed imbalanced classification + bce_weight_enabled = self.cfg.MODEL.LOSSES.BCE.WEIGHT_ENABLED + # fmt: off + if bce_weight_enabled: self.sample_weights = self.data_loader.dataset.sample_weights.to("cuda") + else: self.sample_weights = None + # fmt: on + + @classmethod + def build_train_loader(cls, cfg): + return build_attr_train_loader(cfg) + + @classmethod + def build_test_loader(cls, cfg, dataset_name): + return build_attr_test_loader(cfg, dataset_name) + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + data_loader = cls.build_test_loader(cfg, dataset_name) + return data_loader, AttrEvaluator(cfg, output_folder) + + def run_step(self): + r""" + Implement the attribute model training logic. + """ + assert self.model.training, "[SimpleTrainer] model was changed to eval mode!" + start = time.perf_counter() + """ + If your want to do something with the data, you can wrap the dataloader. + """ + data = next(self._data_loader_iter) + data_time = time.perf_counter() - start + + """ + If your want to do something with the heads, you can wrap the model. + """ + + with amp.autocast(enabled=self.amp_enabled): + outs = self.model(data) + + # Compute loss + if isinstance(self.model, DistributedDataParallel): + loss_dict = self.model.module.losses(outs, self.sample_weights) + else: + loss_dict = self.model.losses(outs, self.sample_weights) + + losses = sum(loss_dict.values()) + + with torch.cuda.stream(torch.cuda.Stream()): + metrics_dict = loss_dict + metrics_dict["data_time"] = data_time + self._write_metrics(metrics_dict) + self._detect_anomaly(losses, loss_dict) + + """ + If you need accumulate gradients or something similar, you can + wrap the optimizer with your custom `zero_grad()` method. + """ + self.optimizer.zero_grad() + + if self.amp_enabled: + self.scaler.scale(losses).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + else: + losses.backward() + """ + If you need gradient clipping/scaling or other processing, you can + wrap the optimizer with your custom `step()` method. + """ + self.optimizer.step() diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/bce_loss.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/bce_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..639c4a394cae7e27e8fb8a77d3914f4ebb0b584b --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/bce_loss.py @@ -0,0 +1,33 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import torch +import torch.nn.functional as F + + +def ratio2weight(targets, ratio): + pos_weights = targets * (1 - ratio) + neg_weights = (1 - targets) * ratio + weights = torch.exp(neg_weights + pos_weights) + + weights[targets > 1] = 0.0 + return weights + + +def cross_entropy_sigmoid_loss(pred_class_logits, gt_classes, sample_weight=None): + loss = F.binary_cross_entropy_with_logits(pred_class_logits, gt_classes, reduction='none') + + if sample_weight is not None: + targets_mask = torch.where(gt_classes.detach() > 0.5, + torch.ones(1, device="cuda"), torch.zeros(1, device="cuda")) # dtype float32 + weight = ratio2weight(targets_mask, sample_weight) + loss = loss * weight + + with torch.no_grad(): + non_zero_cnt = max(loss.nonzero(as_tuple=False).size(0), 1) + + loss = loss.sum() / non_zero_cnt + return loss diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/common_attr.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/common_attr.py new file mode 100644 index 0000000000000000000000000000000000000000..896ec551cfa1aa25d8d12d2348509d70563e20c3 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/common_attr.py @@ -0,0 +1,47 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import torch +from torch.utils.data import Dataset + +from fastreid.data.data_utils import read_image + + +class AttrDataset(Dataset): + """Image Person Attribute Dataset""" + + def __init__(self, img_items, attr_dict, transform=None): + self.img_items = img_items + self.attr_dict = attr_dict + self.transform = transform + + def __len__(self): + return len(self.img_items) + + def __getitem__(self, index): + img_path, labels = self.img_items[index] + img = read_image(img_path) + if self.transform is not None: img = self.transform(img) + + labels = torch.from_numpy(labels) + + return { + "images": img, + "targets": labels, + "img_paths": img_path, + } + + @property + def num_classes(self): + return len(self.attr_dict) + + @property + def sample_weights(self): + sample_weights = torch.zeros(self.num_classes, dtype=torch.float) + for _, attr in self.img_items: + sample_weights += torch.from_numpy(attr) + sample_weights /= len(self) + return sample_weights diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/config.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/config.py new file mode 100644 index 0000000000000000000000000000000000000000..5b69581c19086e47ee018da8ae7921c7687e9c6a --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/config.py @@ -0,0 +1,17 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +from fastreid.config import CfgNode as CN + + +def add_attr_config(cfg): + _C = cfg + + _C.MODEL.LOSSES.BCE = CN() + _C.MODEL.LOSSES.BCE.WEIGHT_ENABLED = True + _C.MODEL.LOSSES.BCE.SCALE = 1. + + _C.TEST.THRES = 0.5 diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/data_build.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/data_build.py new file mode 100644 index 0000000000000000000000000000000000000000..eb049224446ff4ffb53ff8bcf44f1f1fff5f2756 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/data_build.py @@ -0,0 +1,82 @@ +# encoding: utf-8 +""" +@author: l1aoxingyu +@contact: sherlockliao01@gmail.com +""" + +import os +import torch +from torch.utils.data import DataLoader +from fastreid.utils import comm + +from .common_attr import AttrDataset +from fastreid.data import samplers +from fastreid.data.build import fast_batch_collator +from fastreid.data.datasets import DATASET_REGISTRY +from fastreid.data.transforms import build_transforms + +_root = os.getenv("FASTREID_DATASETS", "datasets") + + +def build_attr_train_loader(cfg): + cfg = cfg.clone() + cfg.defrost() + + train_items = list() + attr_dict = None + for d in cfg.DATASETS.NAMES: + dataset = DATASET_REGISTRY.get(d)(root=_root, combineall=cfg.DATASETS.COMBINEALL) + if comm.is_main_process(): + dataset.show_train() + if attr_dict is not None: + assert attr_dict == dataset.attr_dict, "attr_dict in {} does not match with previous ones".format(d) + else: + attr_dict = dataset.attr_dict + train_items.extend(dataset.train) + + iters_per_epoch = len(train_items) // cfg.SOLVER.IMS_PER_BATCH + cfg.SOLVER.MAX_ITER *= iters_per_epoch + train_transforms = build_transforms(cfg, is_train=True) + train_set = AttrDataset(train_items, attr_dict, train_transforms) + + num_workers = cfg.DATALOADER.NUM_WORKERS + mini_batch_size = cfg.SOLVER.IMS_PER_BATCH // comm.get_world_size() + + data_sampler = samplers.TrainingSampler(len(train_set)) + batch_sampler = torch.utils.data.sampler.BatchSampler(data_sampler, mini_batch_size, True) + + train_loader = torch.utils.data.DataLoader( + train_set, + num_workers=num_workers, + batch_sampler=batch_sampler, + collate_fn=fast_batch_collator, + pin_memory=True, + ) + return train_loader + + +def build_attr_test_loader(cfg, dataset_name): + cfg = cfg.clone() + cfg.defrost() + + dataset = DATASET_REGISTRY.get(dataset_name)(root=_root, combineall=cfg.DATASETS.COMBINEALL) + if comm.is_main_process(): + dataset.show_test() + test_items = dataset.test + + test_transforms = build_transforms(cfg, is_train=False) + test_set = AttrDataset(test_items, dataset.attr_dict, test_transforms) + + mini_batch_size = cfg.TEST.IMS_PER_BATCH // comm.get_world_size() + data_sampler = samplers.InferenceSampler(len(test_set)) + batch_sampler = torch.utils.data.BatchSampler(data_sampler, mini_batch_size, False) + test_loader = DataLoader( + test_set, + batch_sampler=batch_sampler, + num_workers=0, # save some memory + collate_fn=fast_batch_collator, + pin_memory=True, + ) + return test_loader + + diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/__init__.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..18050a284ef63314ce82983d9723ac26a7567689 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/__init__.py @@ -0,0 +1,8 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +# Attributed datasets +from .pa100k import PA100K diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/bases.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/bases.py new file mode 100644 index 0000000000000000000000000000000000000000..ee0bbe8fe015706b95215b89b2d21bad5ca26b8e --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/bases.py @@ -0,0 +1,127 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import copy +import logging +import os + +from tabulate import tabulate +from termcolor import colored + +logger = logging.getLogger("fastreid." + __name__) + + +class Dataset(object): + + def __init__( + self, + train, + val, + test, + attr_dict, + mode='train', + verbose=True, + **kwargs, + ): + self.train = train + self.val = val + self.test = test + self._attr_dict = attr_dict + self._num_attrs = len(self.attr_dict) + + if mode == 'train': + self.data = self.train + elif mode == 'val': + self.data = self.val + else: + self.data = self.test + + @property + def num_attrs(self): + return self._num_attrs + + @property + def attr_dict(self): + return self._attr_dict + + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + raise NotImplementedError + + def check_before_run(self, required_files): + """Checks if required files exist before going deeper. + Args: + required_files (str or list): string file name(s). + """ + if isinstance(required_files, str): + required_files = [required_files] + + for fpath in required_files: + if not os.path.exists(fpath): + raise RuntimeError('"{}" is not found'.format(fpath)) + + def combine_all(self): + """Combines train, val and test in a dataset for training.""" + combined = copy.deepcopy(self.train) + + def _combine_data(data): + for img_path, pid, camid in data: + if pid in self._junk_pids: + continue + pid = self.dataset_name + "_" + str(pid) + camid = self.dataset_name + "_" + str(camid) + combined.append((img_path, pid, camid)) + + _combine_data(self.query) + _combine_data(self.gallery) + + self.train = combined + self.num_train_pids = self.get_num_pids(self.train) + + def show_train(self): + num_train = len(self.train) + num_val = len(self.val) + num_total = num_train + num_val + + headers = ['subset', '# images'] + csv_results = [ + ['train', num_train], + ['val', num_val], + ['total', num_total], + ] + + # tabulate it + table = tabulate( + csv_results, + tablefmt="pipe", + headers=headers, + numalign="left", + ) + logger.info(f"=> Loaded {self.__class__.__name__} in csv format: \n" + colored(table, "cyan")) + logger.info("attributes:") + for label, attr in self.attr_dict.items(): + logger.info('{:3d}: {}'.format(label, attr)) + logger.info("------------------------------") + logger.info("# attributes: {}".format(len(self.attr_dict))) + + def show_test(self): + num_test = len(self.test) + + headers = ['subset', '# images'] + csv_results = [ + ['test', num_test], + ] + + # tabulate it + table = tabulate( + csv_results, + tablefmt="pipe", + headers=headers, + numalign="left", + ) + logger.info(f"=> Loaded {self.__class__.__name__} in csv format: \n" + colored(table, "cyan")) diff --git a/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/pa100k.py b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/pa100k.py new file mode 100644 index 0000000000000000000000000000000000000000..5d6d154121cee1b45196c14bfd06bfc53a7ef2c6 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/attribute_baseline/datasets/pa100k.py @@ -0,0 +1,65 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import os.path as osp + +import numpy as np +from scipy.io import loadmat + +from fastreid.data.datasets import DATASET_REGISTRY + +from .bases import Dataset + + +@DATASET_REGISTRY.register() +class PA100K(Dataset): + """Pedestrian attribute dataset. + 80k training images + 20k test images. + The folder structure should be: + pa100k/ + data/ # images + annotation.mat + """ + dataset_dir = 'PA-100K' + + def __init__(self, root='', **kwargs): + self.root = root + self.dataset_dir = osp.join(self.root, self.dataset_dir) + self.data_dir = osp.join(self.dataset_dir, 'data') + self.anno_mat_path = osp.join( + self.dataset_dir, 'annotation.mat' + ) + + required_files = [self.data_dir, self.anno_mat_path] + self.check_before_run(required_files) + + train, val, test, attr_dict = self.extract_data() + super(PA100K, self).__init__(train, val, test, attr_dict=attr_dict, **kwargs) + + def extract_data(self): + # anno_mat is a dictionary with keys: ['test_images_name', 'val_images_name', + # 'train_images_name', 'val_label', 'attributes', 'test_label', 'train_label'] + anno_mat = loadmat(self.anno_mat_path) + + def _extract(key_name, key_label): + names = anno_mat[key_name] + labels = anno_mat[key_label] + num_imgs = names.shape[0] + data = [] + for i in range(num_imgs): + name = names[i, 0][0] + attrs = labels[i, :].astype(np.float32) + img_path = osp.join(self.data_dir, name) + data.append((img_path, attrs)) + return data + + train = _extract('train_images_name', 'train_label') + val = _extract('val_images_name', 'val_label') + test = _extract('test_images_name', 'test_label') + attrs = anno_mat['attributes'] + attr_dict = {i: str(attr[0][0]) for i, attr in enumerate(attrs)} + + return train, val, test, attr_dict diff --git a/thirdparty/fast-reid/projects/attribute_recognition/configs/Base-attribute.yml b/thirdparty/fast-reid/projects/attribute_recognition/configs/Base-attribute.yml new file mode 100644 index 0000000000000000000000000000000000000000..ba6da2792ae156036273dab6c9796439b9efb61b --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/configs/Base-attribute.yml @@ -0,0 +1,63 @@ +MODEL: + META_ARCHITECTURE: "AttrBaseline" + + BACKBONE: + NAME: "build_resnet_backbone" + NORM: "BN" + DEPTH: "50x" + LAST_STRIDE: 2 + FEAT_DIM: 2048 + WITH_IBN: False + PRETRAIN: True + PRETRAIN_PATH: "/export/home/lxy/.cache/torch/checkpoints/resnet50-19c8e357.pth" + + HEADS: + NAME: "AttrHead" + NORM: "BN" + WITH_BNNECK: True + POOL_LAYER: "fastavgpool" + CLS_LAYER: "linear" + NUM_CLASSES: 26 + + LOSSES: + NAME: ("BinaryCrossEntropyLoss",) + + BCE: + WEIGHT_ENABLED: True + SCALE: 1. + +INPUT: + SIZE_TRAIN: [256, 128] + SIZE_TEST: [256, 128] + REA: + ENABLED: False + DO_PAD: True + +DATALOADER: + NUM_WORKERS: 8 + +SOLVER: + OPT: "SGD" + MAX_ITER: 30 + BASE_LR: 0.01 + BIAS_LR_FACTOR: 2. + HEADS_LR_FACTOR: 10. + WEIGHT_DECAY: 0.0005 + WEIGHT_DECAY_BIAS: 0.0005 + IMS_PER_BATCH: 64 + + SCHED: "WarmupCosineAnnealingLR" + DELAY_ITERS: 5 + ETA_MIN_LR: 0.00001 + + WARMUP_FACTOR: 0.01 + WARMUP_ITERS: 5 + + CHECKPOINT_PERIOD: 10 + +TEST: + EVAL_PERIOD: 10 + IMS_PER_BATCH: 256 + +CUDNN_BENCHMARK: True + diff --git a/thirdparty/fast-reid/projects/attribute_recognition/configs/pa100.yml b/thirdparty/fast-reid/projects/attribute_recognition/configs/pa100.yml new file mode 100644 index 0000000000000000000000000000000000000000..b7de5ee8a726bc6354b08a66f3ed863cf883c2c9 --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/configs/pa100.yml @@ -0,0 +1,7 @@ +_BASE_: "Base-attribute.yml" + +DATASETS: + NAMES: ("PA100K",) + TESTS: ("PA100K",) + +OUTPUT_DIR: "projects/attribute_recognition/logs/pa100k/strong_baseline" \ No newline at end of file diff --git a/thirdparty/fast-reid/projects/attribute_recognition/train_net.py b/thirdparty/fast-reid/projects/attribute_recognition/train_net.py new file mode 100644 index 0000000000000000000000000000000000000000..1aa97d6a18680339c231b9cba3efa4d51d7217ca --- /dev/null +++ b/thirdparty/fast-reid/projects/attribute_recognition/train_net.py @@ -0,0 +1,58 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" +import sys + +sys.path.append('.') + +from fastreid.config import get_cfg +from fastreid.engine import default_argument_parser, default_setup, launch +from fastreid.utils.checkpoint import Checkpointer + +from attribute_baseline import * + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_attr_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + cfg.defrost() + cfg.MODEL.BACKBONE.PRETRAIN = False + model = AttrTrainer.build_model(cfg) + + Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model + + res = AttrTrainer.test(cfg, model) + return res + + trainer = AttrTrainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/thirdparty/fast-reid/tests/__init__.py b/thirdparty/fast-reid/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e404ab17f8d406d501cbc8f8ec4850ae764e018d --- /dev/null +++ b/thirdparty/fast-reid/tests/__init__.py @@ -0,0 +1,5 @@ +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" diff --git a/thirdparty/fast-reid/tests/dataset_test.py b/thirdparty/fast-reid/tests/dataset_test.py new file mode 100644 index 0000000000000000000000000000000000000000..46f6d37fefec6b8afc2642154d267717beab9e06 --- /dev/null +++ b/thirdparty/fast-reid/tests/dataset_test.py @@ -0,0 +1,42 @@ +# encoding: utf-8 +""" +@author: liaoxingyu +@contact: sherlockliao01@gmail.com +""" + +import sys +sys.path.append('.') +from data import get_dataloader +from config import cfg +import argparse +from data.datasets import init_dataset +# cfg.DATALOADER.SAMPLER = 'triplet' +cfg.DATASETS.NAMES = ("market1501", "dukemtmc", "cuhk03", "msmt17",) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description="ReID Baseline Training") + parser.add_argument( + '-cfg', "--config_file", + default="", + metavar="FILE", + help="path to config file", + type=str + ) + # parser.add_argument("--local_rank", type=int, default=0) + parser.add_argument("opts", help="Modify config options using the command-line", default=None, + nargs=argparse.REMAINDER) + args = parser.parse_args() + cfg.merge_from_list(args.opts) + + # dataset = init_dataset('msmt17', combineall=True) + get_dataloader(cfg) + # tng_dataloader, val_dataloader, num_classes, num_query = get_dataloader(cfg) + # def get_ex(): return open_image('datasets/beijingStation/query/000245_c10s2_1561732033722.000000.jpg') + # im = get_ex() + # print(data.train_ds[0]) + # print(data.test_ds[0]) + # a = next(iter(data.train_dl)) + # from IPython import embed; embed() + # from ipdb import set_trace; set_trace() + # im.apply_tfms(crop_pad(size=(300, 300))) diff --git a/thirdparty/fast-reid/tests/feature_align.py b/thirdparty/fast-reid/tests/feature_align.py new file mode 100644 index 0000000000000000000000000000000000000000..00af20c1d30f7e74148873268e252aa4dea4749d --- /dev/null +++ b/thirdparty/fast-reid/tests/feature_align.py @@ -0,0 +1,30 @@ +import unittest +import numpy as np +import os +from glob import glob + + +class TestFeatureAlign(unittest.TestCase): + def test_caffe_pytorch_feat_align(self): + caffe_feat_path = "/export/home/lxy/cvpalgo-fast-reid/tools/deploy/caffe_R50_output" + pytorch_feat_path = "/export/home/lxy/cvpalgo-fast-reid/demo/logs/R50_256x128_pytorch_feat_output" + feat_filenames = os.listdir(caffe_feat_path) + for feat_name in feat_filenames: + caffe_feat = np.load(os.path.join(caffe_feat_path, feat_name)) + pytorch_feat = np.load(os.path.join(pytorch_feat_path, feat_name)) + sim = np.dot(caffe_feat, pytorch_feat.transpose())[0][0] + assert sim > 0.97, f"Got similarity {sim} and feature of {feat_name} is not aligned" + + def test_model_performance(self): + caffe_feat_path = "/export/home/lxy/cvpalgo-fast-reid/tools/deploy/caffe_R50_output" + feat_filenames = os.listdir(caffe_feat_path) + feats = [] + for feat_name in feat_filenames: + caffe_feat = np.load(os.path.join(caffe_feat_path, feat_name)) + feats.append(caffe_feat) + from ipdb import set_trace; set_trace() + + + +if __name__ == '__main__': + unittest.main() diff --git a/thirdparty/fast-reid/tests/interp_test.py b/thirdparty/fast-reid/tests/interp_test.py new file mode 100644 index 0000000000000000000000000000000000000000..92f8730512c3d02dfd5edcf74ab7494be81bd8f1 --- /dev/null +++ b/thirdparty/fast-reid/tests/interp_test.py @@ -0,0 +1,23 @@ +import torch +from fastai.vision import * +from fastai.basic_data import * +from fastai.layers import * + +import sys +sys.path.append('.') +from engine.interpreter import ReidInterpretation + +from data import get_data_bunch +from modeling import build_model +from config import cfg +cfg.DATASETS.NAMES = ('market1501',) +cfg.DATASETS.TEST_NAMES = 'market1501' +cfg.MODEL.BACKBONE = 'resnet50' + +data_bunch, test_labels, num_query = get_data_bunch(cfg) + +model = build_model(cfg, 10) +model.load_params_wo_fc(torch.load('logs/2019.8.14/market/baseline/models/model_149.pth')['model']) +learn = Learner(data_bunch, model) + +feats, _ = learn.get_preds(DatasetType.Test, activ=Lambda(lambda x: x)) \ No newline at end of file diff --git a/thirdparty/fast-reid/tests/lr_scheduler_test.py b/thirdparty/fast-reid/tests/lr_scheduler_test.py new file mode 100644 index 0000000000000000000000000000000000000000..eb9ee4a879009fd3b21187ada93f778aa1abf652 --- /dev/null +++ b/thirdparty/fast-reid/tests/lr_scheduler_test.py @@ -0,0 +1,26 @@ +import sys +import unittest + +import torch +from torch import nn + +sys.path.append('.') +from solver.lr_scheduler import WarmupMultiStepLR +from solver.build import make_optimizer +from config import cfg + + +class MyTestCase(unittest.TestCase): + def test_something(self): + net = nn.Linear(10, 10) + optimizer = make_optimizer(cfg, net) + lr_scheduler = WarmupMultiStepLR(optimizer, [20, 40], warmup_iters=10) + for i in range(50): + lr_scheduler.step() + for j in range(3): + print(i, lr_scheduler.get_lr()[0]) + optimizer.step() + + +if __name__ == '__main__': + unittest.main() diff --git a/thirdparty/fast-reid/tests/model_test.py b/thirdparty/fast-reid/tests/model_test.py new file mode 100644 index 0000000000000000000000000000000000000000..fa13e87c655d060af63be3326365476f597e548b --- /dev/null +++ b/thirdparty/fast-reid/tests/model_test.py @@ -0,0 +1,38 @@ +import unittest + +import torch + +import sys +sys.path.append('.') +from fastreid.config import cfg +from fastreid.modeling.backbones import build_resnet_backbone +from fastreid.modeling.backbones.resnet_ibn_a import se_resnet101_ibn_a +from torch import nn + + +class MyTestCase(unittest.TestCase): + def test_se_resnet101(self): + cfg.MODEL.BACKBONE.NAME = 'resnet101' + cfg.MODEL.BACKBONE.DEPTH = 101 + cfg.MODEL.BACKBONE.WITH_IBN = True + cfg.MODEL.BACKBONE.WITH_SE = True + cfg.MODEL.BACKBONE.PRETRAIN_PATH = '/export/home/lxy/.cache/torch/checkpoints/se_resnet101_ibn_a.pth.tar' + + net1 = build_resnet_backbone(cfg) + net1.cuda() + net2 = nn.DataParallel(se_resnet101_ibn_a()) + res = net2.load_state_dict(torch.load(cfg.MODEL.BACKBONE.PRETRAIN_PATH)['state_dict'], strict=False) + net2.cuda() + x = torch.randn(10, 3, 256, 128).cuda() + y1 = net1(x) + y2 = net2(x) + assert y1.sum() == y2.sum(), 'train mode problem' + net1.eval() + net2.eval() + y1 = net1(x) + y2 = net2(x) + assert y1.sum() == y2.sum(), 'eval mode problem' + + +if __name__ == '__main__': + unittest.main() diff --git a/thirdparty/fast-reid/tests/sampler_test.py b/thirdparty/fast-reid/tests/sampler_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c6430279e861e7b32228fcf6f2d456ae31524e23 --- /dev/null +++ b/thirdparty/fast-reid/tests/sampler_test.py @@ -0,0 +1,16 @@ +import unittest +import sys +sys.path.append('.') +from fastreid.data.samplers import TrainingSampler + + +class SamplerTestCase(unittest.TestCase): + def test_training_sampler(self): + sampler = TrainingSampler(5) + for i in sampler: + from ipdb import set_trace; set_trace() + print(i) + + +if __name__ == '__main__': + unittest.main() diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/ReadMe.md b/thirdparty/fast-reid/tools/deploy/Caffe/ReadMe.md new file mode 100644 index 0000000000000000000000000000000000000000..7f992044974a9d9d299256e9e5aed1492a20c251 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/ReadMe.md @@ -0,0 +1,21 @@ +# The Caffe in nn_tools Provides some convenient API +If there are some problem in parse your prototxt or caffemodel, Please replace +the caffe.proto with your own version and compile it with command + `protoc --python_out ./ caffe.proto` + +## caffe_net.py +Using `from nn_tools.Caffe import caffe_net` to import this model +### Prototxt ++ `net=caffe_net.Prototxt(file_name)` to open a prototxt file ++ `net.init_caffemodel(caffe_cmd_path='caffe')` to generate a caffemodel file in the current work directory \ +if your `caffe` cmd not in the $PATH, specify your caffe cmd path by the `caffe_cmd_path` kwargs. +### Caffemodel ++ `net=caffe_net.Caffemodel(file_name)` to open a caffemodel ++ `net.save_prototxt(path)` to save the caffemodel to a prototxt file (not containing the weight data) ++ `net.get_layer_data(layer_name)` return the numpy ndarray data of the layer ++ `net.set_layer_date(layer_name, datas)` specify the data of one layer in the caffemodel .`datas` is normally a list of numpy ndarray `[weights,bias]` ++ `net.save(path)` save the changed caffemodel +### Functions for both Prototxt and Caffemodel ++ `net.add_layer(layer_params,before='',after='')` add a new layer with `Layer_Param` object ++ `net.remove_layer_by_name(layer_name)` ++ `net.get_layer_by_name(layer_name)` or `net.layer(layer_name)` get the raw Layer object defined in caffe_pb2 diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/__init__.py b/thirdparty/fast-reid/tools/deploy/Caffe/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/caffe.proto b/thirdparty/fast-reid/tools/deploy/Caffe/caffe.proto new file mode 100644 index 0000000000000000000000000000000000000000..4ea08113ed60739eff5f017871d6985a39e78bf3 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/caffe.proto @@ -0,0 +1,2368 @@ +syntax = "proto2"; + +package caffe; + +// Specifies the shape (dimensions) of a Blob. +message BlobShape { + repeated int64 dim = 1 [packed = true]; +} + +message BlobProto { + optional BlobShape shape = 7; + repeated float data = 5 [packed = true]; + repeated float diff = 6 [packed = true]; + repeated double double_data = 8 [packed = true]; + repeated double double_diff = 9 [packed = true]; + + // 4D dimensions -- deprecated. Use "shape" instead. + optional int32 num = 1 [default = 0]; + optional int32 channels = 2 [default = 0]; + optional int32 height = 3 [default = 0]; + optional int32 width = 4 [default = 0]; +} + +// The BlobProtoVector is simply a way to pass multiple blobproto instances +// around. +message BlobProtoVector { + repeated BlobProto blobs = 1; +} + +message Datum { + optional int32 channels = 1; + optional int32 height = 2; + optional int32 width = 3; + // the actual image data, in bytes + optional bytes data = 4; + optional int32 label = 5; + // Optionally, the datum could also hold float data. + repeated float float_data = 6; + // If true data contains an encoded image that need to be decoded + optional bool encoded = 7 [default = false]; + repeated float labels = 8; +} + +// *******************add by xia for ssd****************** +// The label (display) name and label id. +message LabelMapItem { + // Both name and label are required. + optional string name = 1; + optional int32 label = 2; + // display_name is optional. + optional string display_name = 3; +} + +message LabelMap { + repeated LabelMapItem item = 1; +} + +// Sample a bbox in the normalized space [0, 1] with provided constraints. +message Sampler { + // Minimum scale of the sampled bbox. + optional float min_scale = 1 [default = 1.]; + // Maximum scale of the sampled bbox. + optional float max_scale = 2 [default = 1.]; + + // Minimum aspect ratio of the sampled bbox. + optional float min_aspect_ratio = 3 [default = 1.]; + // Maximum aspect ratio of the sampled bbox. + optional float max_aspect_ratio = 4 [default = 1.]; +} + +// Constraints for selecting sampled bbox. +message SampleConstraint { + // Minimum Jaccard overlap between sampled bbox and all bboxes in + // AnnotationGroup. + optional float min_jaccard_overlap = 1; + // Maximum Jaccard overlap between sampled bbox and all bboxes in + // AnnotationGroup. + optional float max_jaccard_overlap = 2; + + // Minimum coverage of sampled bbox by all bboxes in AnnotationGroup. + optional float min_sample_coverage = 3; + // Maximum coverage of sampled bbox by all bboxes in AnnotationGroup. + optional float max_sample_coverage = 4; + + // Minimum coverage of all bboxes in AnnotationGroup by sampled bbox. + optional float min_object_coverage = 5; + // Maximum coverage of all bboxes in AnnotationGroup by sampled bbox. + optional float max_object_coverage = 6; +} + +// Sample a batch of bboxes with provided constraints. +message BatchSampler { + // Use original image as the source for sampling. + optional bool use_original_image = 1 [default = true]; + + // Constraints for sampling bbox. + optional Sampler sampler = 2; + + // Constraints for determining if a sampled bbox is positive or negative. + optional SampleConstraint sample_constraint = 3; + + // If provided, break when found certain number of samples satisfing the + // sample_constraint. + optional uint32 max_sample = 4; + + // Maximum number of trials for sampling to avoid infinite loop. + optional uint32 max_trials = 5 [default = 100]; +} + +// Condition for emitting annotations. +message EmitConstraint { + enum EmitType { + CENTER = 0; + MIN_OVERLAP = 1; + } + optional EmitType emit_type = 1 [default = CENTER]; + // If emit_type is MIN_OVERLAP, provide the emit_overlap. + optional float emit_overlap = 2; +} + +// The normalized bounding box [0, 1] w.r.t. the input image size. +message NormalizedBBox { + optional float xmin = 1; + optional float ymin = 2; + optional float xmax = 3; + optional float ymax = 4; + optional int32 label = 5; + optional bool difficult = 6; + optional float score = 7; + optional float size = 8; +} + +// Annotation for each object instance. +message Annotation { + optional int32 instance_id = 1 [default = 0]; + optional NormalizedBBox bbox = 2; +} + +// Group of annotations for a particular label. +message AnnotationGroup { + optional int32 group_label = 1; + repeated Annotation annotation = 2; +} + +// An extension of Datum which contains "rich" annotations. +message AnnotatedDatum { + enum AnnotationType { + BBOX = 0; + } + optional Datum datum = 1; + // If there are "rich" annotations, specify the type of annotation. + // Currently it only supports bounding box. + // If there are no "rich" annotations, use label in datum instead. + optional AnnotationType type = 2; + // Each group contains annotation for a particular class. + repeated AnnotationGroup annotation_group = 3; +} + +// *******************add by xia for mtcnn****************** +message MTCNNBBox { + optional float xmin = 1; + optional float ymin = 2; + optional float xmax = 3; + optional float ymax = 4; +} + +message MTCNNDatum { + optional Datum datum = 1; + //repeated MTCNNBBox rois = 2; + optional MTCNNBBox roi = 2; + repeated float pts = 3; +} +//************************************************************** + +message FillerParameter { + // The filler type. + optional string type = 1 [default = 'constant']; + optional float value = 2 [default = 0]; // the value in constant filler + optional float min = 3 [default = 0]; // the min value in uniform filler + optional float max = 4 [default = 1]; // the max value in uniform filler + optional float mean = 5 [default = 0]; // the mean value in Gaussian filler + optional float std = 6 [default = 1]; // the std value in Gaussian filler + // The expected number of non-zero output weights for a given input in + // Gaussian filler -- the default -1 means don't perform sparsification. + optional int32 sparse = 7 [default = -1]; + // Normalize the filler variance by fan_in, fan_out, or their average. + // Applies to 'xavier' and 'msra' fillers. + enum VarianceNorm { + FAN_IN = 0; + FAN_OUT = 1; + AVERAGE = 2; + } + optional VarianceNorm variance_norm = 8 [default = FAN_IN]; + // added by me + optional string file = 9; +} + +message NetParameter { + optional string name = 1; // consider giving the network a name + // The input blobs to the network. + repeated string input = 3; + // The shape of the input blobs. + repeated BlobShape input_shape = 8; + + // 4D input dimensions -- deprecated. Use "shape" instead. + // If specified, for each input blob there should be four + // values specifying the num, channels, height and width of the input blob. + // Thus, there should be a total of (4 * #input) numbers. + repeated int32 input_dim = 4; + + // Whether the network will force every layer to carry out backward operation. + // If set False, then whether to carry out backward is determined + // automatically according to the net structure and learning rates. + optional bool force_backward = 5 [default = false]; + // The current "state" of the network, including the phase, level, and stage. + // Some layers may be included/excluded depending on this state and the states + // specified in the layers' include and exclude fields. + optional NetState state = 6; + + // Print debugging information about results while running Net::Forward, + // Net::Backward, and Net::Update. + optional bool debug_info = 7 [default = false]; + + // The layers that make up the net. Each of their configurations, including + // connectivity and behavior, is specified as a LayerParameter. + repeated LayerParameter layer = 100; // ID 100 so layers are printed last. + + // DEPRECATED: use 'layer' instead. + repeated V1LayerParameter layers = 2; +} + +// NOTE +// Update the next available ID when you add a new SolverParameter field. +// +// SolverParameter next available ID: 41 (last added: type) +message SolverParameter { + ////////////////////////////////////////////////////////////////////////////// + // Specifying the train and test networks + // + // Exactly one train net must be specified using one of the following fields: + // train_net_param, train_net, net_param, net + // One or more test nets may be specified using any of the following fields: + // test_net_param, test_net, net_param, net + // If more than one test net field is specified (e.g., both net and + // test_net are specified), they will be evaluated in the field order given + // above: (1) test_net_param, (2) test_net, (3) net_param/net. + // A test_iter must be specified for each test_net. + // A test_level and/or a test_stage may also be specified for each test_net. + ////////////////////////////////////////////////////////////////////////////// + + // Proto filename for the train net, possibly combined with one or more + // test nets. + optional string net = 24; + // Inline train net param, possibly combined with one or more test nets. + optional NetParameter net_param = 25; + + optional string train_net = 1; // Proto filename for the train net. + repeated string test_net = 2; // Proto filenames for the test nets. + optional NetParameter train_net_param = 21; // Inline train net params. + repeated NetParameter test_net_param = 22; // Inline test net params. + + // The states for the train/test nets. Must be unspecified or + // specified once per net. + // + // By default, all states will have solver = true; + // train_state will have phase = TRAIN, + // and all test_state's will have phase = TEST. + // Other defaults are set according to the NetState defaults. + optional NetState train_state = 26; + repeated NetState test_state = 27; + + // The number of iterations for each test net. + repeated int32 test_iter = 3; + + // The number of iterations between two testing phases. + optional int32 test_interval = 4 [default = 0]; + optional bool test_compute_loss = 19 [default = false]; + // If true, run an initial test pass before the first iteration, + // ensuring memory availability and printing the starting value of the loss. + optional bool test_initialization = 32 [default = true]; + optional float base_lr = 5; // The base learning rate + // the number of iterations between displaying info. If display = 0, no info + // will be displayed. + optional int32 display = 6; + // Display the loss averaged over the last average_loss iterations + optional int32 average_loss = 33 [default = 1]; + optional int32 max_iter = 7; // the maximum number of iterations + // accumulate gradients over `iter_size` x `batch_size` instances + optional int32 iter_size = 36 [default = 1]; + + // The learning rate decay policy. The currently implemented learning rate + // policies are as follows: + // - fixed: always return base_lr. + // - step: return base_lr * gamma ^ (floor(iter / step)) + // - exp: return base_lr * gamma ^ iter + // - inv: return base_lr * (1 + gamma * iter) ^ (- power) + // - multistep: similar to step but it allows non uniform steps defined by + // stepvalue + // - poly: the effective learning rate follows a polynomial decay, to be + // zero by the max_iter. return base_lr (1 - iter/max_iter) ^ (power) + // - sigmoid: the effective learning rate follows a sigmod decay + // return base_lr ( 1/(1 + exp(-gamma * (iter - stepsize)))) + // + // where base_lr, max_iter, gamma, step, stepvalue and power are defined + // in the solver parameter protocol buffer, and iter is the current iteration. + optional string lr_policy = 8; + optional float gamma = 9; // The parameter to compute the learning rate. + optional float power = 10; // The parameter to compute the learning rate. + optional float momentum = 11; // The momentum value. + optional float weight_decay = 12; // The weight decay. + // regularization types supported: L1 and L2 + // controlled by weight_decay + optional string regularization_type = 29 [default = "L2"]; + // the stepsize for learning rate policy "step" + optional int32 stepsize = 13; + // the stepsize for learning rate policy "multistep" + repeated int32 stepvalue = 34; + // for rate policy "multifixed" + repeated float stagelr = 50; + repeated int32 stageiter = 51; + + // Set clip_gradients to >= 0 to clip parameter gradients to that L2 norm, + // whenever their actual L2 norm is larger. + optional float clip_gradients = 35 [default = -1]; + + optional int32 snapshot = 14 [default = 0]; // The snapshot interval + optional string snapshot_prefix = 15; // The prefix for the snapshot. + // whether to snapshot diff in the results or not. Snapshotting diff will help + // debugging but the final protocol buffer size will be much larger. + optional bool snapshot_diff = 16 [default = false]; + enum SnapshotFormat { + HDF5 = 0; + BINARYPROTO = 1; + } + optional SnapshotFormat snapshot_format = 37 [default = BINARYPROTO]; + // the mode solver will use: 0 for CPU and 1 for GPU. Use GPU in default. + enum SolverMode { + CPU = 0; + GPU = 1; + } + optional SolverMode solver_mode = 17 [default = GPU]; + // the device_id will that be used in GPU mode. Use device_id = 0 in default. + optional int32 device_id = 18 [default = 0]; + // If non-negative, the seed with which the Solver will initialize the Caffe + // random number generator -- useful for reproducible results. Otherwise, + // (and by default) initialize using a seed derived from the system clock. + optional int64 random_seed = 20 [default = -1]; + + // type of the solver + optional string type = 40 [default = "SGD"]; + + // numerical stability for RMSProp, AdaGrad and AdaDelta and Adam + optional float delta = 31 [default = 1e-8]; + // parameters for the Adam solver + optional float momentum2 = 39 [default = 0.999]; + + // RMSProp decay value + // MeanSquare(t) = rms_decay*MeanSquare(t-1) + (1-rms_decay)*SquareGradient(t) + optional float rms_decay = 38; + + // If true, print information about the state of the net that may help with + // debugging learning problems. + optional bool debug_info = 23 [default = false]; + + // If false, don't save a snapshot after training finishes. + optional bool snapshot_after_train = 28 [default = true]; + + // DEPRECATED: old solver enum types, use string instead + enum SolverType { + SGD = 0; + NESTEROV = 1; + ADAGRAD = 2; + RMSPROP = 3; + ADADELTA = 4; + ADAM = 5; + } + // DEPRECATED: use type instead of solver_type + optional SolverType solver_type = 30 [default = SGD]; +} + +// A message that stores the solver snapshots +message SolverState { + optional int32 iter = 1; // The current iteration + optional string learned_net = 2; // The file that stores the learned net. + repeated BlobProto history = 3; // The history for sgd solvers + optional int32 current_step = 4 [default = 0]; // The current step for learning rate +} + +enum Phase { + TRAIN = 0; + TEST = 1; +} + +message NetState { + optional Phase phase = 1 [default = TEST]; + optional int32 level = 2 [default = 0]; + repeated string stage = 3; +} + +message NetStateRule { + // Set phase to require the NetState have a particular phase (TRAIN or TEST) + // to meet this rule. + optional Phase phase = 1; + + // Set the minimum and/or maximum levels in which the layer should be used. + // Leave undefined to meet the rule regardless of level. + optional int32 min_level = 2; + optional int32 max_level = 3; + + // Customizable sets of stages to include or exclude. + // The net must have ALL of the specified stages and NONE of the specified + // "not_stage"s to meet the rule. + // (Use multiple NetStateRules to specify conjunctions of stages.) + repeated string stage = 4; + repeated string not_stage = 5; +} + +// added by Me +message SpatialTransformerParameter { + + // How to use the parameter passed by localisation network + optional string transform_type = 1 [default = "affine"]; + // What is the sampling technique + optional string sampler_type = 2 [default = "bilinear"]; + + // If not set,stay same with the input dimension H and W + optional int32 output_H = 3; + optional int32 output_W = 4; + + // If false, only compute dTheta, DO NOT compute dU + optional bool to_compute_dU = 5 [default = true]; + + // The default value for some parameters + optional double theta_1_1 = 6; + optional double theta_1_2 = 7; + optional double theta_1_3 = 8; + optional double theta_2_1 = 9; + optional double theta_2_2 = 10; + optional double theta_2_3 = 11; +} + +// added by Me +message STLossParameter { + + // Indicate the resolution of the output images after ST transformation + required int32 output_H = 1; + required int32 output_W = 2; +} + +// Specifies training parameters (multipliers on global learning constants, +// and the name and other settings used for weight sharing). +message ParamSpec { + // The names of the parameter blobs -- useful for sharing parameters among + // layers, but never required otherwise. To share a parameter between two + // layers, give it a (non-empty) name. + optional string name = 1; + + // Whether to require shared weights to have the same shape, or just the same + // count -- defaults to STRICT if unspecified. + optional DimCheckMode share_mode = 2; + enum DimCheckMode { + // STRICT (default) requires that num, channels, height, width each match. + STRICT = 0; + // PERMISSIVE requires only the count (num*channels*height*width) to match. + PERMISSIVE = 1; + } + + // The multiplier on the global learning rate for this parameter. + optional float lr_mult = 3 [default = 1.0]; + + // The multiplier on the global weight decay for this parameter. + optional float decay_mult = 4 [default = 1.0]; +} + +// NOTE +// Update the next available ID when you add a new LayerParameter field. +// +// LayerParameter next available layer-specific ID: 143 (last added: scale_param) + +message LayerParameter { + optional string name = 1; // the layer name + optional string type = 2; // the layer type + repeated string bottom = 3; // the name of each bottom blob + repeated string top = 4; // the name of each top blob + + // The train / test phase for computation. + optional Phase phase = 10; + + // The amount of weight to assign each top blob in the objective. + // Each layer assigns a default value, usually of either 0 or 1, + // to each top blob. + repeated float loss_weight = 5; + + // Specifies training parameters (multipliers on global learning constants, + // and the name and other settings used for weight sharing). + repeated ParamSpec param = 6; + + // The blobs containing the numeric parameters of the layer. + repeated BlobProto blobs = 7; + + // Specifies on which bottoms the backpropagation should be skipped. + // The size must be either 0 or equal to the number of bottoms. + repeated bool propagate_down = 11; + + // Rules controlling whether and when a layer is included in the network, + // based on the current NetState. You may specify a non-zero number of rules + // to include OR exclude, but not both. If no include or exclude rules are + // specified, the layer is always included. If the current NetState meets + // ANY (i.e., one or more) of the specified rules, the layer is + // included/excluded. + repeated NetStateRule include = 8; + repeated NetStateRule exclude = 9; + + // Parameters for data pre-processing. + optional TransformationParameter transform_param = 100; + + // Parameters shared by loss layers. + optional LossParameter loss_param = 101; + + + // Yolo detection loss layer + optional DetectionLossParameter detection_loss_param = 200; + // Yolo detection evaluation layer + optional EvalDetectionParameter eval_detection_param = 201; + // Yolo 9000 + optional RegionLossParameter region_loss_param = 202; + optional ReorgParameter reorg_param = 203; + + // Layer type-specific parameters. + // + // Note: certain layers may have more than one computational engine + // for their implementation. These layers include an Engine type and + // engine parameter for selecting the implementation. + // The default for the engine is set by the ENGINE switch at compile-time. + optional AccuracyParameter accuracy_param = 102; + optional ArgMaxParameter argmax_param = 103; + optional BatchNormParameter batch_norm_param = 139; + optional BiasParameter bias_param = 141; + optional ConcatParameter concat_param = 104; + optional ContrastiveLossParameter contrastive_loss_param = 105; + optional ConvolutionParameter convolution_param = 106; + optional DataParameter data_param = 107; + optional DropoutParameter dropout_param = 108; + optional DummyDataParameter dummy_data_param = 109; + optional EltwiseParameter eltwise_param = 110; + optional ELUParameter elu_param = 140; + optional EmbedParameter embed_param = 137; + optional ExpParameter exp_param = 111; + optional FlattenParameter flatten_param = 135; + optional HDF5DataParameter hdf5_data_param = 112; + optional HDF5OutputParameter hdf5_output_param = 113; + optional HingeLossParameter hinge_loss_param = 114; + optional ImageDataParameter image_data_param = 115; + optional InfogainLossParameter infogain_loss_param = 116; + optional InnerProductParameter inner_product_param = 117; + optional InputParameter input_param = 143; + optional LogParameter log_param = 134; + optional LRNParameter lrn_param = 118; + optional MemoryDataParameter memory_data_param = 119; + optional MVNParameter mvn_param = 120; + optional PoolingParameter pooling_param = 121; + optional PowerParameter power_param = 122; + optional PReLUParameter prelu_param = 131; + optional PythonParameter python_param = 130; + optional RecurrentParameter recurrent_param = 146; + optional ReductionParameter reduction_param = 136; + optional ReLUParameter relu_param = 123; + optional ReshapeParameter reshape_param = 133; + optional ROIPoolingParameter roi_pooling_param = 8266711; //roi pooling + optional ScaleParameter scale_param = 142; + optional SigmoidParameter sigmoid_param = 124; + optional SmoothL1LossParameter smooth_l1_loss_param = 8266712; + optional SoftmaxParameter softmax_param = 125; + optional SPPParameter spp_param = 132; + optional SliceParameter slice_param = 126; + optional TanHParameter tanh_param = 127; + optional ThresholdParameter threshold_param = 128; + optional TileParameter tile_param = 138; + optional WindowDataParameter window_data_param = 129; + + // added by Me + optional SpatialTransformerParameter st_param = 148; + optional STLossParameter st_loss_param = 145; + //***************add by xia************************** + optional RPNParameter rpn_param = 150; // rpn + optional FocalLossParameter focal_loss_param = 155; // Focal Loss layer + + optional AsdnDataParameter asdn_data_param = 159; //asdn + + optional BNParameter bn_param = 160; //bn + optional MTCNNDataParameter mtcnn_data_param = 161; //mtcnn + + optional InterpParameter interp_param = 162; //Interp + + optional PSROIPoolingParameter psroi_pooling_param = 163; //rfcn + + //**************************ssd******************************************* + optional AnnotatedDataParameter annotated_data_param = 164; //ssd + optional PriorBoxParameter prior_box_param = 165; + optional CropParameter crop_param = 167; + optional DetectionEvaluateParameter detection_evaluate_param = 168; + optional DetectionOutputParameter detection_output_param = 169; + //optional NormalizeParameter normalize_param = 170; + optional MultiBoxLossParameter multibox_loss_param = 171; + optional PermuteParameter permute_param = 172; + optional VideoDataParameter video_data_param = 173; + + //*************************a softmax loss*********************************** + optional MarginInnerProductParameter margin_inner_product_param = 174; + + //*************************center loss*********************************** + optional CenterLossParameter center_loss_param = 175; + + //*************************deformabel conv*********************************** + optional DeformableConvolutionParameter deformable_convolution_param = 176; + + //***************Additive Margin Softmax for Face Verification*************** + optional LabelSpecificAddParameter label_specific_add_param = 177; + + optional AdditiveMarginInnerProductParameter additive_margin_inner_product_param = 178; + optional CosinAddmParameter cosin_add_m_param = 179; + optional CosinMulmParameter cosin_mul_m_param = 180; + optional ChannelScaleParameter channel_scale_param = 181; + optional FlipParameter flip_param = 182; + optional TripletLossParameter triplet_loss_param = 183; + optional CoupledClusterLossParameter coupled_cluster_loss_param = 184; + optional GeneralTripletParameter general_triplet_loss_param = 185; + + optional ROIAlignParameter roi_align_param = 186; + + //**************add by wdd*************** + optional UpsampleParameter upsample_param = 100003; + optional MatMulParameter matmul_param = 100005; + optional PassThroughParameter pass_through_param = 100004; + optional NormalizeParameter norm_param = 100001; +} + +//*********************add by wdd****************** +message UpsampleParameter { + optional uint32 scale = 1 [default = 2]; + optional uint32 scale_h = 2; + optional uint32 scale_w = 3; + optional bool pad_out_h = 4 [default = false]; + optional bool pad_out_w = 5 [default = false]; + optional uint32 upsample_h = 6; + optional uint32 upsample_w = 7; +} + +message MatMulParameter { + optional uint32 dim_1 = 1;//row of input matrix one + optional uint32 dim_2 = 2;//column of input matrix one and row of input matrix two + optional uint32 dim_3 = 3;//column of input matrix two +} + +message PassThroughParameter { + optional uint32 num_output = 1 [default = 0]; + optional uint32 block_height = 2 [default = 0]; + optional uint32 block_width = 3 [default = 0]; +} + +message NormalizeParameter{ +optional bool across_spatial = 1 [default = true]; +optional FillerParameter scale_filler = 2; +optional bool channel_shared = 3 [default = true]; +optional float eps = 4 [default = 1e-10]; +optional float sqrt_a = 5 [default = 1]; +} + + + +//*******************add by xia****ssd data********* +message AnnotatedDataParameter { + // Define the sampler. + repeated BatchSampler batch_sampler = 1; + // Store label name and label id in LabelMap format. + optional string label_map_file = 2; + // If provided, it will replace the AnnotationType stored in each + // AnnotatedDatum. + optional AnnotatedDatum.AnnotationType anno_type = 3; +} + +//*******************add by xia****asdn data********* +message AsdnDataParameter{ + optional int32 count_drop = 1 [default = 15]; + optional int32 permute_count = 2 [default = 20]; + optional int32 count_drop_neg = 3 [default = 0]; + optional int32 channels = 4 [default = 1024]; + optional int32 iter_size = 5 [default = 2]; + optional int32 maintain_before = 6 [default = 1]; +} + +//*******************add by xia****mtcnn********* +message MTCNNDataParameter{ + optional bool augmented = 1 [default = true]; + optional bool flip = 2 [default = true]; + + // -1 means batch_size + optional int32 num_positive = 3 [default = -1]; + optional int32 num_negitive = 4 [default = -1]; + optional int32 num_part = 5 [default = -1]; + optional uint32 resize_width = 6 [default = 0]; + optional uint32 resize_height = 7 [default = 0]; + optional float min_negitive_scale = 8 [default = 0.5]; + optional float max_negitive_scale = 9 [default = 1.5]; +} + +//***************add by xia******InterpLayer********* +message InterpParameter { + optional int32 height = 1 [default = 0]; // Height of output + optional int32 width = 2 [default = 0]; // Width of output + optional int32 zoom_factor = 3 [default = 1]; // zoom factor + optional int32 shrink_factor = 4 [default = 1]; // shrink factor + optional int32 pad_beg = 5 [default = 0]; // padding at begin of input + optional int32 pad_end = 6 [default = 0]; // padding at end of input +} +//*******************add by xia******rfcn******************************** + +message PSROIPoolingParameter { + required float spatial_scale = 1; + required int32 output_dim = 2; // output channel number + required int32 group_size = 3; // number of groups to encode position-sensitive score maps +} +//*************************************************** +message FlipParameter { + optional bool flip_width = 1 [default = true]; + optional bool flip_height = 2 [default = false]; +} + +message BNParameter { + optional FillerParameter slope_filler = 1; + optional FillerParameter bias_filler = 2; + optional float momentum = 3 [default = 0.9]; + optional float eps = 4 [default = 1e-5]; + // If true, will use the moving average mean and std for training and test. + // Will override the lr_param and freeze all the parameters. + // Make sure to initialize the layer properly with pretrained parameters. + optional bool frozen = 5 [default = false]; + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 6 [default = DEFAULT]; +} + +//************************add by xia******************************* +// Focal Loss for Dense Object Detection +message FocalLossParameter { + enum Type { + ORIGIN = 0; // FL(p_t) = -(1 - p_t) ^ gama * log(p_t), where p_t = p if y == 1 else 1 - p, whre p = sigmoid(x) + LINEAR = 1; // FL*(p_t) = -log(p_t) / gama, where p_t = sigmoid(gama * x_t + beta), where x_t = x * y, y is the ground truth label {-1, 1} + } + optional Type type = 1 [default = ORIGIN]; + optional float gamma = 2 [default = 2]; + // cross-categories weights to solve the imbalance problem + optional float alpha = 3 [default = 0.25]; + optional float beta = 4 [default = 1.0]; +} +//**************************FocalLoss**************************************** + +// Message that stores parameters used to apply transformation +// to the data layer's data +message TransformationParameter { + // For data pre-processing, we can do simple scaling and subtracting the + // data mean, if provided. Note that the mean subtraction is always carried + // out before scaling. + optional float scale = 1 [default = 1]; + // Specify if we want to randomly mirror data. + optional bool mirror = 2 [default = false]; + // Specify if we would like to randomly crop an image. + optional uint32 crop_size = 3 [default = 0]; + optional uint32 crop_h = 11 [default = 0]; + optional uint32 crop_w = 12 [default = 0]; + + // mean_file and mean_value cannot be specified at the same time + optional string mean_file = 4; + // if specified can be repeated once (would substract it from all the channels) + // or can be repeated the same number of times as channels + // (would subtract them from the corresponding channel) + repeated float mean_value = 5; + // Force the decoded image to have 3 color channels. + optional bool force_color = 6 [default = false]; + // Force the decoded image to have 1 color channels. + optional bool force_gray = 7 [default = false]; + + // Resize policy + optional ResizeParameter resize_param = 8; + // Noise policy + optional NoiseParameter noise_param = 9; + // Distortion policy + optional DistortionParameter distort_param = 13; + // Expand policy + optional ExpansionParameter expand_param = 14; + // Constraint for emitting the annotation after transformation. + optional EmitConstraint emit_constraint = 10; +} + +//*******************add by xia****ssd****************************************************** +// Message that stores parameters used by data transformer for resize policy +message ResizeParameter { + //Probability of using this resize policy + optional float prob = 1 [default = 1]; + + enum Resize_mode { + WARP = 1; + FIT_SMALL_SIZE = 2; + FIT_LARGE_SIZE_AND_PAD = 3; + } + optional Resize_mode resize_mode = 2 [default = WARP]; + optional uint32 height = 3 [default = 0]; + optional uint32 width = 4 [default = 0]; + // A parameter used to update bbox in FIT_SMALL_SIZE mode. + optional uint32 height_scale = 8 [default = 0]; + optional uint32 width_scale = 9 [default = 0]; + + enum Pad_mode { + CONSTANT = 1; + MIRRORED = 2; + REPEAT_NEAREST = 3; + } + // Padding mode for BE_SMALL_SIZE_AND_PAD mode and object centering + optional Pad_mode pad_mode = 5 [default = CONSTANT]; + // if specified can be repeated once (would fill all the channels) + // or can be repeated the same number of times as channels + // (would use it them to the corresponding channel) + repeated float pad_value = 6; + + enum Interp_mode { //Same as in OpenCV + LINEAR = 1; + AREA = 2; + NEAREST = 3; + CUBIC = 4; + LANCZOS4 = 5; + } + //interpolation for for resizing + repeated Interp_mode interp_mode = 7; +} + +message SaltPepperParameter { + //Percentage of pixels + optional float fraction = 1 [default = 0]; + repeated float value = 2; +} + +// Message that stores parameters used by data transformer for transformation +// policy +message NoiseParameter { + //Probability of using this resize policy + optional float prob = 1 [default = 0]; + // Histogram equalized + optional bool hist_eq = 2 [default = false]; + // Color inversion + optional bool inverse = 3 [default = false]; + // Grayscale + optional bool decolorize = 4 [default = false]; + // Gaussian blur + optional bool gauss_blur = 5 [default = false]; + + // JPEG compression quality (-1 = no compression) + optional float jpeg = 6 [default = -1]; + + // Posterization + optional bool posterize = 7 [default = false]; + + // Erosion + optional bool erode = 8 [default = false]; + + // Salt-and-pepper noise + optional bool saltpepper = 9 [default = false]; + + optional SaltPepperParameter saltpepper_param = 10; + + // Local histogram equalization + optional bool clahe = 11 [default = false]; + + // Color space conversion + optional bool convert_to_hsv = 12 [default = false]; + + // Color space conversion + optional bool convert_to_lab = 13 [default = false]; +} + +// Message that stores parameters used by data transformer for distortion policy +message DistortionParameter { + // The probability of adjusting brightness. + optional float brightness_prob = 1 [default = 0.0]; + // Amount to add to the pixel values within [-delta, delta]. + // The possible value is within [0, 255]. Recommend 32. + optional float brightness_delta = 2 [default = 0.0]; + + // The probability of adjusting contrast. + optional float contrast_prob = 3 [default = 0.0]; + // Lower bound for random contrast factor. Recommend 0.5. + optional float contrast_lower = 4 [default = 0.0]; + // Upper bound for random contrast factor. Recommend 1.5. + optional float contrast_upper = 5 [default = 0.0]; + + // The probability of adjusting hue. + optional float hue_prob = 6 [default = 0.0]; + // Amount to add to the hue channel within [-delta, delta]. + // The possible value is within [0, 180]. Recommend 36. + optional float hue_delta = 7 [default = 0.0]; + + // The probability of adjusting saturation. + optional float saturation_prob = 8 [default = 0.0]; + // Lower bound for the random saturation factor. Recommend 0.5. + optional float saturation_lower = 9 [default = 0.0]; + // Upper bound for the random saturation factor. Recommend 1.5. + optional float saturation_upper = 10 [default = 0.0]; + + // The probability of randomly order the image channels. + optional float random_order_prob = 11 [default = 0.0]; +} + +// Message that stores parameters used by data transformer for expansion policy +message ExpansionParameter { + //Probability of using this expansion policy + optional float prob = 1 [default = 1]; + + // The ratio to expand the image. + optional float max_expand_ratio = 2 [default = 1.]; +} + +//************************************************************************************************** + +// Message that stores parameters shared by loss layers +message LossParameter { + // If specified, ignore instances with the given label. + optional int32 ignore_label = 1; + // How to normalize the loss for loss layers that aggregate across batches, + // spatial dimensions, or other dimensions. Currently only implemented in + // SoftmaxWithLoss layer. + enum NormalizationMode { + // Divide by the number of examples in the batch times spatial dimensions. + // Outputs that receive the ignore label will NOT be ignored in computing + // the normalization factor. + FULL = 0; + // Divide by the total number of output locations that do not take the + // ignore_label. If ignore_label is not set, this behaves like FULL. + VALID = 1; + // Divide by the batch size. + BATCH_SIZE = 2; + // Do not normalize the loss. + NONE = 3; + } + optional NormalizationMode normalization = 3 [default = VALID]; + // Deprecated. Ignored if normalization is specified. If normalization + // is not specified, then setting this to false will be equivalent to + // normalization = BATCH_SIZE to be consistent with previous behavior. + optional bool normalize = 2; +} + +// Messages that store parameters used by individual layer types follow, in +// alphabetical order. + +message AccuracyParameter { + // When computing accuracy, count as correct by comparing the true label to + // the top k scoring classes. By default, only compare to the top scoring + // class (i.e. argmax). + optional uint32 top_k = 1 [default = 1]; + + // The "label" axis of the prediction blob, whose argmax corresponds to the + // predicted label -- may be negative to index from the end (e.g., -1 for the + // last axis). For example, if axis == 1 and the predictions are + // (N x C x H x W), the label blob is expected to contain N*H*W ground truth + // labels with integer values in {0, 1, ..., C-1}. + optional int32 axis = 2 [default = 1]; + + // If specified, ignore instances with the given label. + optional int32 ignore_label = 3; +} + +message ArgMaxParameter { + // If true produce pairs (argmax, maxval) + optional bool out_max_val = 1 [default = false]; + optional uint32 top_k = 2 [default = 1]; + // The axis along which to maximise -- may be negative to index from the + // end (e.g., -1 for the last axis). + // By default ArgMaxLayer maximizes over the flattened trailing dimensions + // for each index of the first / num dimension. + optional int32 axis = 3; +} + +message ConcatParameter { + // The axis along which to concatenate -- may be negative to index from the + // end (e.g., -1 for the last axis). Other axes must have the + // same dimension for all the bottom blobs. + // By default, ConcatLayer concatenates blobs along the "channels" axis (1). + optional int32 axis = 2 [default = 1]; + + // DEPRECATED: alias for "axis" -- does not support negative indexing. + optional uint32 concat_dim = 1 [default = 1]; +} + +message BatchNormParameter { + // If false, accumulate global mean/variance values via a moving average. If + // true, use those accumulated values instead of computing mean/variance + // across the batch. + optional bool use_global_stats = 1; + // How much does the moving average decay each iteration? + optional float moving_average_fraction = 2 [default = .999]; + // Small value to add to the variance estimate so that we don't divide by + // zero. + optional float eps = 3 [default = 1e-5]; +} + +message BiasParameter { + // The first axis of bottom[0] (the first input Blob) along which to apply + // bottom[1] (the second input Blob). May be negative to index from the end + // (e.g., -1 for the last axis). + // + // For example, if bottom[0] is 4D with shape 100x3x40x60, the output + // top[0] will have the same shape, and bottom[1] may have any of the + // following shapes (for the given value of axis): + // (axis == 0 == -4) 100; 100x3; 100x3x40; 100x3x40x60 + // (axis == 1 == -3) 3; 3x40; 3x40x60 + // (axis == 2 == -2) 40; 40x60 + // (axis == 3 == -1) 60 + // Furthermore, bottom[1] may have the empty shape (regardless of the value of + // "axis") -- a scalar bias. + optional int32 axis = 1 [default = 1]; + + // (num_axes is ignored unless just one bottom is given and the bias is + // a learned parameter of the layer. Otherwise, num_axes is determined by the + // number of axes by the second bottom.) + // The number of axes of the input (bottom[0]) covered by the bias + // parameter, or -1 to cover all axes of bottom[0] starting from `axis`. + // Set num_axes := 0, to add a zero-axis Blob: a scalar. + optional int32 num_axes = 2 [default = 1]; + + // (filler is ignored unless just one bottom is given and the bias is + // a learned parameter of the layer.) + // The initialization for the learned bias parameter. + // Default is the zero (0) initialization, resulting in the BiasLayer + // initially performing the identity operation. + optional FillerParameter filler = 3; +} + +message ContrastiveLossParameter { + // margin for dissimilar pair + optional float margin = 1 [default = 1.0]; + // The first implementation of this cost did not exactly match the cost of + // Hadsell et al 2006 -- using (margin - d^2) instead of (margin - d)^2. + // legacy_version = false (the default) uses (margin - d)^2 as proposed in the + // Hadsell paper. New models should probably use this version. + // legacy_version = true uses (margin - d^2). This is kept to support / + // reproduce existing models and results + optional bool legacy_version = 2 [default = false]; +} + +message DetectionLossParameter { + // Yolo detection loss layer + optional uint32 side = 1 [default = 7]; + optional uint32 num_class = 2 [default = 20]; + optional uint32 num_object = 3 [default = 2]; + optional float object_scale = 4 [default = 1.0]; + optional float noobject_scale = 5 [default = 0.5]; + optional float class_scale = 6 [default = 1.0]; + optional float coord_scale = 7 [default = 5.0]; + optional bool sqrt = 8 [default = true]; + optional bool constriant = 9 [default = false]; +} + +message RegionLossParameter{ + //Yolo 9000 + optional uint32 side = 1 [default = 13]; + optional uint32 num_class = 2 [default = 20]; + optional uint32 bias_match = 3 [default = 1]; + optional uint32 coords = 4 [default = 4]; + optional uint32 num = 5 [default = 5]; + optional uint32 softmax = 6 [default = 1]; + optional float jitter = 7 [default = 0.2]; + optional uint32 rescore = 8 [default = 1]; + + optional float object_scale = 9 [default = 1.0]; + optional float class_scale = 10 [default = 1.0]; + optional float noobject_scale = 11 [default = 0.5]; + optional float coord_scale = 12 [default = 5.0]; + optional uint32 absolute = 13 [default = 1]; + optional float thresh = 14 [default = 0.2]; + optional uint32 random = 15 [default = 1]; + repeated float biases = 16; + optional string softmax_tree = 17; + optional string class_map = 18; +} + +message ReorgParameter { + optional uint32 stride = 1; + optional bool reverse = 2 [default = false]; +} + +message EvalDetectionParameter { + enum ScoreType { + OBJ = 0; + PROB = 1; + MULTIPLY = 2; + } + // Yolo detection evaluation layer + optional uint32 side = 1 [default = 7]; + optional uint32 num_class = 2 [default = 20]; + optional uint32 num_object = 3 [default = 2]; + optional float threshold = 4 [default = 0.5]; + optional bool sqrt = 5 [default = true]; + optional bool constriant = 6 [default = true]; + optional ScoreType score_type = 7 [default = MULTIPLY]; + optional float nms = 8 [default = -1]; + repeated float biases = 9; +} + + +message ConvolutionParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + optional bool bias_term = 2 [default = true]; // whether to have bias terms + + // Pad, kernel size, and stride are all given as a single value for equal + // dimensions in all spatial dimensions, or once per spatial dimension. + repeated uint32 pad = 3; // The padding size; defaults to 0 + repeated uint32 kernel_size = 4; // The kernel size + repeated uint32 stride = 6; // The stride; defaults to 1 + // Factor used to dilate the kernel, (implicitly) zero-filling the resulting + // holes. (Kernel dilation is sometimes referred to by its use in the + // algorithme à trous from Holschneider et al. 1987.) + repeated uint32 dilation = 18; // The dilation; defaults to 1 + + // For 2D convolution only, the *_h and *_w versions may also be used to + // specify both spatial dimensions. + optional uint32 pad_h = 9 [default = 0]; // The padding height (2D only) + optional uint32 pad_w = 10 [default = 0]; // The padding width (2D only) + optional uint32 kernel_h = 11; // The kernel height (2D only) + optional uint32 kernel_w = 12; // The kernel width (2D only) + optional uint32 stride_h = 13; // The stride height (2D only) + optional uint32 stride_w = 14; // The stride width (2D only) + + optional uint32 group = 5 [default = 1]; // The group size for group conv + + optional FillerParameter weight_filler = 7; // The filler for the weight + optional FillerParameter bias_filler = 8; // The filler for the bias + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 15 [default = DEFAULT]; + + // The axis to interpret as "channels" when performing convolution. + // Preceding dimensions are treated as independent inputs; + // succeeding dimensions are treated as "spatial". + // With (N, C, H, W) inputs, and axis == 1 (the default), we perform + // N independent 2D convolutions, sliding C-channel (or (C/g)-channels, for + // groups g>1) filters across the spatial axes (H, W) of the input. + // With (N, C, D, H, W) inputs, and axis == 1, we perform + // N independent 3D convolutions, sliding (C/g)-channels + // filters across the spatial axes (D, H, W) of the input. + optional int32 axis = 16 [default = 1]; + + // Whether to force use of the general ND convolution, even if a specific + // implementation for blobs of the appropriate number of spatial dimensions + // is available. (Currently, there is only a 2D-specific convolution + // implementation; for input blobs with num_axes != 2, this option is + // ignored and the ND implementation will be used.) + optional bool force_nd_im2col = 17 [default = false]; +} + +message CropParameter { + // To crop, elements of the first bottom are selected to fit the dimensions + // of the second, reference bottom. The crop is configured by + // - the crop `axis` to pick the dimensions for cropping + // - the crop `offset` to set the shift for all/each dimension + // to align the cropped bottom with the reference bottom. + // All dimensions up to but excluding `axis` are preserved, while + // the dimensions including and trailing `axis` are cropped. + // If only one `offset` is set, then all dimensions are offset by this amount. + // Otherwise, the number of offsets must equal the number of cropped axes to + // shift the crop in each dimension accordingly. + // Note: standard dimensions are N,C,H,W so the default is a spatial crop, + // and `axis` may be negative to index from the end (e.g., -1 for the last + // axis). + optional int32 axis = 1 [default = 2]; + repeated uint32 offset = 2; +} + + +message DataParameter { + enum DB { + LEVELDB = 0; + LMDB = 1; + } + // Specify the data source. + optional string source = 1; + // Specify the batch size. + optional uint32 batch_size = 4; + // The rand_skip variable is for the data layer to skip a few data points + // to avoid all asynchronous sgd clients to start at the same point. The skip + // point would be set as rand_skip * rand(0,1). Note that rand_skip should not + // be larger than the number of keys in the database. + // DEPRECATED. Each solver accesses a different subset of the database. + optional uint32 rand_skip = 7 [default = 0]; + optional DB backend = 8 [default = LEVELDB]; + // DEPRECATED. See TransformationParameter. For data pre-processing, we can do + // simple scaling and subtracting the data mean, if provided. Note that the + // mean subtraction is always carried out before scaling. + optional float scale = 2 [default = 1]; + optional string mean_file = 3; + // DEPRECATED. See TransformationParameter. Specify if we would like to randomly + // crop an image. + optional uint32 crop_size = 5 [default = 0]; + // DEPRECATED. See TransformationParameter. Specify if we want to randomly mirror + // data. + optional bool mirror = 6 [default = false]; + // Force the encoded image to have 3 color channels + optional bool force_encoded_color = 9 [default = false]; + // Prefetch queue (Number of batches to prefetch to host memory, increase if + // data access bandwidth varies). + optional uint32 prefetch = 10 [default = 4]; + + repeated uint32 side = 11; +} + +//**********************************ssd******************************************* + +// Message that store parameters used by DetectionEvaluateLayer +message DetectionEvaluateParameter { + // Number of classes that are actually predicted. Required! + optional uint32 num_classes = 1; + // Label id for background class. Needed for sanity check so that + // background class is neither in the ground truth nor the detections. + optional uint32 background_label_id = 2 [default = 0]; + // Threshold for deciding true/false positive. + optional float overlap_threshold = 3 [default = 0.5]; + // If true, also consider difficult ground truth for evaluation. + optional bool evaluate_difficult_gt = 4 [default = true]; + // A file which contains a list of names and sizes with same order + // of the input DB. The file is in the following format: + // name height width + // ... + // If provided, we will scale the prediction and ground truth NormalizedBBox + // for evaluation. + optional string name_size_file = 5; + // The resize parameter used in converting NormalizedBBox to original image. + optional ResizeParameter resize_param = 6; +} + +message NonMaximumSuppressionParameter { + // Threshold to be used in nms. + optional float nms_threshold = 1 [default = 0.3]; + // Maximum number of results to be kept. + optional int32 top_k = 2; + // Parameter for adaptive nms. + optional float eta = 3 [default = 1.0]; +} + +message SaveOutputParameter { + // Output directory. If not empty, we will save the results. + optional string output_directory = 1; + // Output name prefix. + optional string output_name_prefix = 2; + // Output format. + // VOC - PASCAL VOC output format. + // COCO - MS COCO output format. + optional string output_format = 3; + // If you want to output results, must also provide the following two files. + // Otherwise, we will ignore saving results. + // label map file. + optional string label_map_file = 4; + // A file which contains a list of names and sizes with same order + // of the input DB. The file is in the following format: + // name height width + // ... + optional string name_size_file = 5; + // Number of test images. It can be less than the lines specified in + // name_size_file. For example, when we only want to evaluate on part + // of the test images. + optional uint32 num_test_image = 6; + // The resize parameter used in saving the data. + optional ResizeParameter resize_param = 7; +} + + +// Message that store parameters used by DetectionOutputLayer +message DetectionOutputParameter { + // Number of classes to be predicted. Required! + optional uint32 num_classes = 1; + // If true, bounding box are shared among different classes. + optional bool share_location = 2 [default = true]; + // Background label id. If there is no background class, + // set it as -1. + optional int32 background_label_id = 3 [default = 0]; + // Parameters used for non maximum suppression. + optional NonMaximumSuppressionParameter nms_param = 4; + // Parameters used for saving detection results. + optional SaveOutputParameter save_output_param = 5; + // Type of coding method for bbox. + optional PriorBoxParameter.CodeType code_type = 6 [default = CORNER]; + // If true, variance is encoded in target; otherwise we need to adjust the + // predicted offset accordingly. + optional bool variance_encoded_in_target = 8 [default = false]; + // Number of total bboxes to be kept per image after nms step. + // -1 means keeping all bboxes after nms step. + optional int32 keep_top_k = 7 [default = -1]; + // Only consider detections whose confidences are larger than a threshold. + // If not provided, consider all boxes. + optional float confidence_threshold = 9; + // If true, visualize the detection results. + optional bool visualize = 10 [default = false]; + // The threshold used to visualize the detection results. + optional float visualize_threshold = 11; + // If provided, save outputs to video file. + optional string save_file = 12; +} +//******************************************************************************* + +message DropoutParameter { + optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio + optional bool scale_train = 2 [default = true]; // scale train or test phase +} + +// DummyDataLayer fills any number of arbitrarily shaped blobs with random +// (or constant) data generated by "Fillers" (see "message FillerParameter"). +message DummyDataParameter { + // This layer produces N >= 1 top blobs. DummyDataParameter must specify 1 or N + // shape fields, and 0, 1 or N data_fillers. + // + // If 0 data_fillers are specified, ConstantFiller with a value of 0 is used. + // If 1 data_filler is specified, it is applied to all top blobs. If N are + // specified, the ith is applied to the ith top blob. + repeated FillerParameter data_filler = 1; + repeated BlobShape shape = 6; + + // 4D dimensions -- deprecated. Use "shape" instead. + repeated uint32 num = 2; + repeated uint32 channels = 3; + repeated uint32 height = 4; + repeated uint32 width = 5; +} + +message EltwiseParameter { + enum EltwiseOp { + PROD = 0; + SUM = 1; + MAX = 2; + } + optional EltwiseOp operation = 1 [default = SUM]; // element-wise operation + repeated float coeff = 2; // blob-wise coefficient for SUM operation + + // Whether to use an asymptotically slower (for >2 inputs) but stabler method + // of computing the gradient for the PROD operation. (No effect for SUM op.) + optional bool stable_prod_grad = 3 [default = true]; +} + +// Message that stores parameters used by ELULayer +message ELUParameter { + // Described in: + // Clevert, D.-A., Unterthiner, T., & Hochreiter, S. (2015). Fast and Accurate + // Deep Network Learning by Exponential Linear Units (ELUs). arXiv + optional float alpha = 1 [default = 1]; +} + +// Message that stores parameters used by EmbedLayer +message EmbedParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + // The input is given as integers to be interpreted as one-hot + // vector indices with dimension num_input. Hence num_input should be + // 1 greater than the maximum possible input value. + optional uint32 input_dim = 2; + + optional bool bias_term = 3 [default = true]; // Whether to use a bias term + optional FillerParameter weight_filler = 4; // The filler for the weight + optional FillerParameter bias_filler = 5; // The filler for the bias + +} + +// Message that stores parameters used by ExpLayer +message ExpParameter { + // ExpLayer computes outputs y = base ^ (shift + scale * x), for base > 0. + // Or if base is set to the default (-1), base is set to e, + // so y = exp(shift + scale * x). + optional float base = 1 [default = -1.0]; + optional float scale = 2 [default = 1.0]; + optional float shift = 3 [default = 0.0]; +} + +/// Message that stores parameters used by FlattenLayer +message FlattenParameter { + // The first axis to flatten: all preceding axes are retained in the output. + // May be negative to index from the end (e.g., -1 for the last axis). + optional int32 axis = 1 [default = 1]; + + // The last axis to flatten: all following axes are retained in the output. + // May be negative to index from the end (e.g., the default -1 for the last + // axis). + optional int32 end_axis = 2 [default = -1]; +} + +// Message that stores parameters used by HDF5DataLayer +message HDF5DataParameter { + // Specify the data source. + optional string source = 1; + // Specify the batch size. + optional uint32 batch_size = 2; + + // Specify whether to shuffle the data. + // If shuffle == true, the ordering of the HDF5 files is shuffled, + // and the ordering of data within any given HDF5 file is shuffled, + // but data between different files are not interleaved; all of a file's + // data are output (in a random order) before moving onto another file. + optional bool shuffle = 3 [default = false]; +} + +message HDF5OutputParameter { + optional string file_name = 1; +} + +message HingeLossParameter { + enum Norm { + L1 = 1; + L2 = 2; + } + // Specify the Norm to use L1 or L2 + optional Norm norm = 1 [default = L1]; +} + +message ImageDataParameter { + // Specify the data source. + optional string source = 1; + // Specify the batch size. + optional uint32 batch_size = 4 [default = 1]; + // The rand_skip variable is for the data layer to skip a few data points + // to avoid all asynchronous sgd clients to start at the same point. The skip + // point would be set as rand_skip * rand(0,1). Note that rand_skip should not + // be larger than the number of keys in the database. + optional uint32 rand_skip = 7 [default = 0]; + // Whether or not ImageLayer should shuffle the list of files at every epoch. + optional bool shuffle = 8 [default = false]; + // It will also resize images if new_height or new_width are not zero. + optional uint32 new_height = 9 [default = 0]; + optional uint32 new_width = 10 [default = 0]; + // Specify if the images are color or gray + optional bool is_color = 11 [default = true]; + // DEPRECATED. See TransformationParameter. For data pre-processing, we can do + // simple scaling and subtracting the data mean, if provided. Note that the + // mean subtraction is always carried out before scaling. + optional float scale = 2 [default = 1]; + optional string mean_file = 3; + // DEPRECATED. See TransformationParameter. Specify if we would like to randomly + // crop an image. + optional uint32 crop_size = 5 [default = 0]; + // DEPRECATED. See TransformationParameter. Specify if we want to randomly mirror + // data. + optional bool mirror = 6 [default = false]; + optional string root_folder = 12 [default = ""]; +} + +message InfogainLossParameter { + // Specify the infogain matrix source. + optional string source = 1; +} + +message InnerProductParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + optional bool bias_term = 2 [default = true]; // whether to have bias terms + optional FillerParameter weight_filler = 3; // The filler for the weight + optional FillerParameter bias_filler = 4; // The filler for the bias + + // The first axis to be lumped into a single inner product computation; + // all preceding axes are retained in the output. + // May be negative to index from the end (e.g., -1 for the last axis). + optional int32 axis = 5 [default = 1]; + // Specify whether to transpose the weight matrix or not. + // If transpose == true, any operations will be performed on the transpose + // of the weight matrix. The weight matrix itself is not going to be transposed + // but rather the transfer flag of operations will be toggled accordingly. + optional bool transpose = 6 [default = false]; + optional bool normalize = 7 [default = false]; +} + +message InputParameter { + // This layer produces N >= 1 top blob(s) to be assigned manually. + // Define N shapes to set a shape for each top. + // Define 1 shape to set the same shape for every top. + // Define no shape to defer to reshaping manually. + repeated BlobShape shape = 1; +} + + +// Message that stores parameters used by LogLayer +message LogParameter { + // LogLayer computes outputs y = log_base(shift + scale * x), for base > 0. + // Or if base is set to the default (-1), base is set to e, + // so y = ln(shift + scale * x) = log_e(shift + scale * x) + optional float base = 1 [default = -1.0]; + optional float scale = 2 [default = 1.0]; + optional float shift = 3 [default = 0.0]; +} + +// Message that stores parameters used by LRNLayer +message LRNParameter { + optional uint32 local_size = 1 [default = 5]; + optional float alpha = 2 [default = 1.]; + optional float beta = 3 [default = 0.75]; + enum NormRegion { + ACROSS_CHANNELS = 0; + WITHIN_CHANNEL = 1; + } + optional NormRegion norm_region = 4 [default = ACROSS_CHANNELS]; + optional float k = 5 [default = 1.]; + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 6 [default = DEFAULT]; +} + +message MemoryDataParameter { + optional uint32 batch_size = 1; + optional uint32 channels = 2; + optional uint32 height = 3; + optional uint32 width = 4; +} +//**************************ssd******************************************** + +// Message that store parameters used by MultiBoxLossLayer +message MultiBoxLossParameter { + // Localization loss type. + enum LocLossType { + L2 = 0; + SMOOTH_L1 = 1; + } + optional LocLossType loc_loss_type = 1 [default = SMOOTH_L1]; + // Confidence loss type. + enum ConfLossType { + SOFTMAX = 0; + LOGISTIC = 1; + } + optional ConfLossType conf_loss_type = 2 [default = SOFTMAX]; + // Weight for localization loss. + optional float loc_weight = 3 [default = 1.0]; + // Number of classes to be predicted. Required! + optional uint32 num_classes = 4; + // If true, bounding box are shared among different classes. + optional bool share_location = 5 [default = true]; + // Matching method during training. + enum MatchType { + BIPARTITE = 0; + PER_PREDICTION = 1; + } + optional MatchType match_type = 6 [default = PER_PREDICTION]; + // If match_type is PER_PREDICTION, use overlap_threshold to + // determine the extra matching bboxes. + optional float overlap_threshold = 7 [default = 0.5]; + // Use prior for matching. + optional bool use_prior_for_matching = 8 [default = true]; + // Background label id. + optional uint32 background_label_id = 9 [default = 0]; + // If true, also consider difficult ground truth. + optional bool use_difficult_gt = 10 [default = true]; + // If true, perform negative mining. + // DEPRECATED: use mining_type instead. + optional bool do_neg_mining = 11; + // The negative/positive ratio. + optional float neg_pos_ratio = 12 [default = 3.0]; + // The negative overlap upperbound for the unmatched predictions. + optional float neg_overlap = 13 [default = 0.5]; + // Type of coding method for bbox. + optional PriorBoxParameter.CodeType code_type = 14 [default = CORNER]; + // If true, encode the variance of prior box in the loc loss target instead of + // in bbox. + optional bool encode_variance_in_target = 16 [default = false]; + // If true, map all object classes to agnostic class. It is useful for learning + // objectness detector. + optional bool map_object_to_agnostic = 17 [default = false]; + // If true, ignore cross boundary bbox during matching. + // Cross boundary bbox is a bbox who is outside of the image region. + optional bool ignore_cross_boundary_bbox = 18 [default = false]; + // If true, only backpropagate on corners which are inside of the image + // region when encode_type is CORNER or CORNER_SIZE. + optional bool bp_inside = 19 [default = false]; + // Mining type during training. + // NONE : use all negatives. + // MAX_NEGATIVE : select negatives based on the score. + // HARD_EXAMPLE : select hard examples based on "Training Region-based Object Detectors with Online Hard Example Mining", Shrivastava et.al. + enum MiningType { + NONE = 0; + MAX_NEGATIVE = 1; + HARD_EXAMPLE = 2; + } + optional MiningType mining_type = 20 [default = MAX_NEGATIVE]; + // Parameters used for non maximum suppression durig hard example mining. + optional NonMaximumSuppressionParameter nms_param = 21; + optional int32 sample_size = 22 [default = 64]; + optional bool use_prior_for_nms = 23 [default = false]; +} + +// Message that stores parameters used by NormalizeLayer +//message NormalizeParameter { +// //optional bool across_spatial = 1 [default = true]; +// // Initial value of scale. Default is 1.0 for all +// //optional FillerParameter scale_filler = 2; +// // Whether or not scale parameters are shared across channels. +// //optional bool channel_shared = 3 [default = true]; +// // Epsilon for not dividing by zero while normalizing variance +// //optional float eps = 4 [default = 1e-10]; +// //************************************************** +// optional string normalize_type = 1 [default = "L2"]; +// optional bool fix_gradient = 2 [default = false]; +// optional bool bp_norm = 3 [default = false]; +//} + +message PermuteParameter { + // The new orders of the axes of data. Notice it should be with + // in the same range as the input data, and it starts from 0. + // Do not provide repeated order. + repeated uint32 order = 1; +} +//**************************end*********************************************** + +message MVNParameter { + // This parameter can be set to false to normalize mean only + optional bool normalize_variance = 1 [default = true]; + + // This parameter can be set to true to perform DNN-like MVN + optional bool across_channels = 2 [default = false]; + + // Epsilon for not dividing by zero while normalizing variance + optional float eps = 3 [default = 1e-9]; +} + +message ParameterParameter { + optional BlobShape shape = 1; +} + + +message PoolingParameter { + enum PoolMethod { + MAX = 0; + AVE = 1; + STOCHASTIC = 2; + } + optional PoolMethod pool = 1 [default = MAX]; // The pooling method + // Pad, kernel size, and stride are all given as a single value for equal + // dimensions in height and width or as Y, X pairs. + optional uint32 pad = 4 [default = 0]; // The padding size (equal in Y, X) + optional uint32 pad_h = 9 [default = 0]; // The padding height + optional uint32 pad_w = 10 [default = 0]; // The padding width + optional uint32 kernel_size = 2; // The kernel size (square) + optional uint32 kernel_h = 5; // The kernel height + optional uint32 kernel_w = 6; // The kernel width + optional uint32 stride = 3 [default = 1]; // The stride (equal in Y, X) + optional uint32 stride_h = 7; // The stride height + optional uint32 stride_w = 8; // The stride width + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 11 [default = DEFAULT]; + // If global_pooling then it will pool over the size of the bottom by doing + // kernel_h = bottom->height and kernel_w = bottom->width + optional bool global_pooling = 12 [default = false]; + + /////////////////////// + // Specify floor/ceil mode + optional bool ceil_mode = 13 [default = true]; + /////////////////////////////// +} + +message PowerParameter { + // PowerLayer computes outputs y = (shift + scale * x) ^ power. + optional float power = 1 [default = 1.0]; + optional float scale = 2 [default = 1.0]; + optional float shift = 3 [default = 0.0]; +} + +//*************ssd******************************************************************** +// Message that store parameters used by PriorBoxLayer +message PriorBoxParameter { + // Encode/decode type. + enum CodeType { + CORNER = 1; + CENTER_SIZE = 2; + CORNER_SIZE = 3; + } + // Minimum box size (in pixels). Required! + repeated float min_size = 1; + // Maximum box size (in pixels). Required! + repeated float max_size = 2; + // Various of aspect ratios. Duplicate ratios will be ignored. + // If none is provided, we use default ratio 1. + repeated float aspect_ratio = 3; + // If true, will flip each aspect ratio. + // For example, if there is aspect ratio "r", + // we will generate aspect ratio "1.0/r" as well. + optional bool flip = 4 [default = true]; + // If true, will clip the prior so that it is within [0, 1] + optional bool clip = 5 [default = false]; + // Variance for adjusting the prior bboxes. + repeated float variance = 6; + // By default, we calculate img_height, img_width, step_x, step_y based on + // bottom[0] (feat) and bottom[1] (img). Unless these values are explicitely + // provided. + // Explicitly provide the img_size. + optional uint32 img_size = 7; + // Either img_size or img_h/img_w should be specified; not both. + optional uint32 img_h = 8; + optional uint32 img_w = 9; + + // Explicitly provide the step size. + optional float step = 10; + // Either step or step_h/step_w should be specified; not both. + optional float step_h = 11; + optional float step_w = 12; + + // Offset to the top left corner of each cell. + optional float offset = 13 [default = 0.5]; +} +//********************************************************************************* +message PythonParameter { + optional string module = 1; + optional string layer = 2; + // This value is set to the attribute `param_str` of the `PythonLayer` object + // in Python before calling the `setup()` method. This could be a number, + // string, dictionary in Python dict format, JSON, etc. You may parse this + // string in `setup` method and use it in `forward` and `backward`. + optional string param_str = 3 [default = '']; + // Whether this PythonLayer is shared among worker solvers during data parallelism. + // If true, each worker solver sequentially run forward from this layer. + // This value should be set true if you are using it as a data layer. + optional bool share_in_parallel = 4 [default = false]; +} + +message RecurrentParameter { + // The dimension of the output (and usually hidden state) representation -- + // must be explicitly set to non-zero. + optional uint32 num_output = 1 [default = 0]; + + optional FillerParameter weight_filler = 2; // The filler for the weight + optional FillerParameter bias_filler = 3; // The filler for the bias + + // Whether to enable displaying debug_info in the unrolled recurrent net. + optional bool debug_info = 4 [default = false]; + + // Whether to add as additional inputs (bottoms) the initial hidden state + // blobs, and add as additional outputs (tops) the final timestep hidden state + // blobs. The number of additional bottom/top blobs required depends on the + // recurrent architecture -- e.g., 1 for RNNs, 2 for LSTMs. + optional bool expose_hidden = 5 [default = false]; +} + + +// Message that stores parameters used by ReductionLayer +message ReductionParameter { + enum ReductionOp { + SUM = 1; + ASUM = 2; + SUMSQ = 3; + MEAN = 4; + } + + optional ReductionOp operation = 1 [default = SUM]; // reduction operation + + // The first axis to reduce to a scalar -- may be negative to index from the + // end (e.g., -1 for the last axis). + // (Currently, only reduction along ALL "tail" axes is supported; reduction + // of axis M through N, where N < num_axes - 1, is unsupported.) + // Suppose we have an n-axis bottom Blob with shape: + // (d0, d1, d2, ..., d(m-1), dm, d(m+1), ..., d(n-1)). + // If axis == m, the output Blob will have shape + // (d0, d1, d2, ..., d(m-1)), + // and the ReductionOp operation is performed (d0 * d1 * d2 * ... * d(m-1)) + // times, each including (dm * d(m+1) * ... * d(n-1)) individual data. + // If axis == 0 (the default), the output Blob always has the empty shape + // (count 1), performing reduction across the entire input -- + // often useful for creating new loss functions. + optional int32 axis = 2 [default = 0]; + + optional float coeff = 3 [default = 1.0]; // coefficient for output +} + +// Message that stores parameters used by ReLULayer +message ReLUParameter { + // Allow non-zero slope for negative inputs to speed up optimization + // Described in: + // Maas, A. L., Hannun, A. Y., & Ng, A. Y. (2013). Rectifier nonlinearities + // improve neural network acoustic models. In ICML Workshop on Deep Learning + // for Audio, Speech, and Language Processing. + optional float negative_slope = 1 [default = 0]; + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 2 [default = DEFAULT]; +} + +message ReshapeParameter { + // Specify the output dimensions. If some of the dimensions are set to 0, + // the corresponding dimension from the bottom layer is used (unchanged). + // Exactly one dimension may be set to -1, in which case its value is + // inferred from the count of the bottom blob and the remaining dimensions. + // For example, suppose we want to reshape a 2D blob "input" with shape 2 x 8: + // + // layer { + // type: "Reshape" bottom: "input" top: "output" + // reshape_param { ... } + // } + // + // If "input" is 2D with shape 2 x 8, then the following reshape_param + // specifications are all equivalent, producing a 3D blob "output" with shape + // 2 x 2 x 4: + // + // reshape_param { shape { dim: 2 dim: 2 dim: 4 } } + // reshape_param { shape { dim: 0 dim: 2 dim: 4 } } + // reshape_param { shape { dim: 0 dim: 2 dim: -1 } } + // reshape_param { shape { dim: -1 dim: 0 dim: 2 } } + // + optional BlobShape shape = 1; + + // axis and num_axes control the portion of the bottom blob's shape that are + // replaced by (included in) the reshape. By default (axis == 0 and + // num_axes == -1), the entire bottom blob shape is included in the reshape, + // and hence the shape field must specify the entire output shape. + // + // axis may be non-zero to retain some portion of the beginning of the input + // shape (and may be negative to index from the end; e.g., -1 to begin the + // reshape after the last axis, including nothing in the reshape, + // -2 to include only the last axis, etc.). + // + // For example, suppose "input" is a 2D blob with shape 2 x 8. + // Then the following ReshapeLayer specifications are all equivalent, + // producing a blob "output" with shape 2 x 2 x 4: + // + // reshape_param { shape { dim: 2 dim: 2 dim: 4 } } + // reshape_param { shape { dim: 2 dim: 4 } axis: 1 } + // reshape_param { shape { dim: 2 dim: 4 } axis: -3 } + // + // num_axes specifies the extent of the reshape. + // If num_axes >= 0 (and axis >= 0), the reshape will be performed only on + // input axes in the range [axis, axis+num_axes]. + // num_axes may also be -1, the default, to include all remaining axes + // (starting from axis). + // + // For example, suppose "input" is a 2D blob with shape 2 x 8. + // Then the following ReshapeLayer specifications are equivalent, + // producing a blob "output" with shape 1 x 2 x 8. + // + // reshape_param { shape { dim: 1 dim: 2 dim: 8 } } + // reshape_param { shape { dim: 1 dim: 2 } num_axes: 1 } + // reshape_param { shape { dim: 1 } num_axes: 0 } + // + // On the other hand, these would produce output blob shape 2 x 1 x 8: + // + // reshape_param { shape { dim: 2 dim: 1 dim: 8 } } + // reshape_param { shape { dim: 1 } axis: 1 num_axes: 0 } + // + optional int32 axis = 2 [default = 0]; + optional int32 num_axes = 3 [default = -1]; +} + +// Message that stores parameters used by ROIPoolingLayer +message ROIPoolingParameter { + // Pad, kernel size, and stride are all given as a single value for equal + // dimensions in height and width or as Y, X pairs. + optional uint32 pooled_h = 1 [default = 0]; // The pooled output height + optional uint32 pooled_w = 2 [default = 0]; // The pooled output width + // Multiplicative spatial scale factor to translate ROI coords from their + // input scale to the scale used when pooling + optional float spatial_scale = 3 [default = 1]; +} + +message ScaleParameter { + // The first axis of bottom[0] (the first input Blob) along which to apply + // bottom[1] (the second input Blob). May be negative to index from the end + // (e.g., -1 for the last axis). + // + // For example, if bottom[0] is 4D with shape 100x3x40x60, the output + // top[0] will have the same shape, and bottom[1] may have any of the + // following shapes (for the given value of axis): + // (axis == 0 == -4) 100; 100x3; 100x3x40; 100x3x40x60 + // (axis == 1 == -3) 3; 3x40; 3x40x60 + // (axis == 2 == -2) 40; 40x60 + // (axis == 3 == -1) 60 + // Furthermore, bottom[1] may have the empty shape (regardless of the value of + // "axis") -- a scalar multiplier. + optional int32 axis = 1 [default = 1]; + + // (num_axes is ignored unless just one bottom is given and the scale is + // a learned parameter of the layer. Otherwise, num_axes is determined by the + // number of axes by the second bottom.) + // The number of axes of the input (bottom[0]) covered by the scale + // parameter, or -1 to cover all axes of bottom[0] starting from `axis`. + // Set num_axes := 0, to multiply with a zero-axis Blob: a scalar. + optional int32 num_axes = 2 [default = 1]; + + // (filler is ignored unless just one bottom is given and the scale is + // a learned parameter of the layer.) + // The initialization for the learned scale parameter. + // Default is the unit (1) initialization, resulting in the ScaleLayer + // initially performing the identity operation. + optional FillerParameter filler = 3; + + // Whether to also learn a bias (equivalent to a ScaleLayer+BiasLayer, but + // may be more efficient). Initialized with bias_filler (defaults to 0). + optional bool bias_term = 4 [default = false]; + optional FillerParameter bias_filler = 5; + optional float min_value = 6; + optional float max_value = 7; +} + +message SigmoidParameter { + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 1 [default = DEFAULT]; +} + +message SmoothL1LossParameter { + // SmoothL1Loss(x) = + // 0.5 * (sigma * x) ** 2 -- if x < 1.0 / sigma / sigma + // |x| - 0.5 / sigma / sigma -- otherwise + optional float sigma = 1 [default = 1]; +} + +message SliceParameter { + // The axis along which to slice -- may be negative to index from the end + // (e.g., -1 for the last axis). + // By default, SliceLayer concatenates blobs along the "channels" axis (1). + optional int32 axis = 3 [default = 1]; + repeated uint32 slice_point = 2; + + // DEPRECATED: alias for "axis" -- does not support negative indexing. + optional uint32 slice_dim = 1 [default = 1]; +} + +// Message that stores parameters used by SoftmaxLayer, SoftmaxWithLossLayer +message SoftmaxParameter { + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 1 [default = DEFAULT]; + + // The axis along which to perform the softmax -- may be negative to index + // from the end (e.g., -1 for the last axis). + // Any other axes will be evaluated as independent softmaxes. + optional int32 axis = 2 [default = 1]; +} + +message TanHParameter { + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 1 [default = DEFAULT]; +} + +// Message that stores parameters used by TileLayer +message TileParameter { + // The index of the axis to tile. + optional int32 axis = 1 [default = 1]; + + // The number of copies (tiles) of the blob to output. + optional int32 tiles = 2; +} + +// Message that stores parameters used by ThresholdLayer +message ThresholdParameter { + optional float threshold = 1 [default = 0]; // Strictly positive values +} + +message WindowDataParameter { + // Specify the data source. + optional string source = 1; + // For data pre-processing, we can do simple scaling and subtracting the + // data mean, if provided. Note that the mean subtraction is always carried + // out before scaling. + optional float scale = 2 [default = 1]; + optional string mean_file = 3; + // Specify the batch size. + optional uint32 batch_size = 4; + // Specify if we would like to randomly crop an image. + optional uint32 crop_size = 5 [default = 0]; + // Specify if we want to randomly mirror data. + optional bool mirror = 6 [default = false]; + // Foreground (object) overlap threshold + optional float fg_threshold = 7 [default = 0.5]; + // Background (non-object) overlap threshold + optional float bg_threshold = 8 [default = 0.5]; + // Fraction of batch that should be foreground objects + optional float fg_fraction = 9 [default = 0.25]; + // Amount of contextual padding to add around a window + // (used only by the window_data_layer) + optional uint32 context_pad = 10 [default = 0]; + // Mode for cropping out a detection window + // warp: cropped window is warped to a fixed size and aspect ratio + // square: the tightest square around the window is cropped + optional string crop_mode = 11 [default = "warp"]; + // cache_images: will load all images in memory for faster access + optional bool cache_images = 12 [default = false]; + // append root_folder to locate images + optional string root_folder = 13 [default = ""]; +} + +message SPPParameter { + enum PoolMethod { + MAX = 0; + AVE = 1; + STOCHASTIC = 2; + } + optional uint32 pyramid_height = 1; + optional PoolMethod pool = 2 [default = MAX]; // The pooling method + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 6 [default = DEFAULT]; +} + +// DEPRECATED: use LayerParameter. +message V1LayerParameter { + repeated string bottom = 2; + repeated string top = 3; + optional string name = 4; + repeated NetStateRule include = 32; + repeated NetStateRule exclude = 33; + enum LayerType { + NONE = 0; + ABSVAL = 35; + ACCURACY = 1; + ARGMAX = 30; + BNLL = 2; + CONCAT = 3; + CONTRASTIVE_LOSS = 37; + CONVOLUTION = 4; + DATA = 5; + DECONVOLUTION = 39; + DROPOUT = 6; + DUMMY_DATA = 32; + EUCLIDEAN_LOSS = 7; + ELTWISE = 25; + EXP = 38; + FLATTEN = 8; + HDF5_DATA = 9; + HDF5_OUTPUT = 10; + HINGE_LOSS = 28; + IM2COL = 11; + IMAGE_DATA = 12; + INFOGAIN_LOSS = 13; + INNER_PRODUCT = 14; + LRN = 15; + MEMORY_DATA = 29; + MULTINOMIAL_LOGISTIC_LOSS = 16; + MVN = 34; + POOLING = 17; + POWER = 26; + RELU = 18; + SIGMOID = 19; + SIGMOID_CROSS_ENTROPY_LOSS = 27; + SILENCE = 36; + SOFTMAX = 20; + SOFTMAX_LOSS = 21; + SPLIT = 22; + SLICE = 33; + TANH = 23; + WINDOW_DATA = 24; + THRESHOLD = 31; + } + optional LayerType type = 5; + repeated BlobProto blobs = 6; + repeated string param = 1001; + repeated DimCheckMode blob_share_mode = 1002; + enum DimCheckMode { + STRICT = 0; + PERMISSIVE = 1; + } + repeated float blobs_lr = 7; + repeated float weight_decay = 8; + repeated float loss_weight = 35; + optional AccuracyParameter accuracy_param = 27; + optional ArgMaxParameter argmax_param = 23; + optional ConcatParameter concat_param = 9; + optional ContrastiveLossParameter contrastive_loss_param = 40; + optional ConvolutionParameter convolution_param = 10; + optional DataParameter data_param = 11; + optional DropoutParameter dropout_param = 12; + optional DummyDataParameter dummy_data_param = 26; + optional EltwiseParameter eltwise_param = 24; + optional ExpParameter exp_param = 41; + optional HDF5DataParameter hdf5_data_param = 13; + optional HDF5OutputParameter hdf5_output_param = 14; + optional HingeLossParameter hinge_loss_param = 29; + optional ImageDataParameter image_data_param = 15; + optional InfogainLossParameter infogain_loss_param = 16; + optional InnerProductParameter inner_product_param = 17; + optional LRNParameter lrn_param = 18; + optional MemoryDataParameter memory_data_param = 22; + optional MVNParameter mvn_param = 34; + optional PoolingParameter pooling_param = 19; + optional PowerParameter power_param = 21; + optional ReLUParameter relu_param = 30; + optional SigmoidParameter sigmoid_param = 38; + optional SoftmaxParameter softmax_param = 39; + optional SliceParameter slice_param = 31; + optional TanHParameter tanh_param = 37; + optional ThresholdParameter threshold_param = 25; + optional WindowDataParameter window_data_param = 20; + optional TransformationParameter transform_param = 36; + optional LossParameter loss_param = 42; + optional DetectionLossParameter detection_loss_param = 200; + optional EvalDetectionParameter eval_detection_param = 201; + optional V0LayerParameter layer = 1; +} + +// DEPRECATED: V0LayerParameter is the old way of specifying layer parameters +// in Caffe. We keep this message type around for legacy support. +message V0LayerParameter { + optional string name = 1; // the layer name + optional string type = 2; // the string to specify the layer type + + // Parameters to specify layers with inner products. + optional uint32 num_output = 3; // The number of outputs for the layer + optional bool biasterm = 4 [default = true]; // whether to have bias terms + optional FillerParameter weight_filler = 5; // The filler for the weight + optional FillerParameter bias_filler = 6; // The filler for the bias + + optional uint32 pad = 7 [default = 0]; // The padding size + optional uint32 kernelsize = 8; // The kernel size + optional uint32 group = 9 [default = 1]; // The group size for group conv + optional uint32 stride = 10 [default = 1]; // The stride + enum PoolMethod { + MAX = 0; + AVE = 1; + STOCHASTIC = 2; + } + optional PoolMethod pool = 11 [default = MAX]; // The pooling method + optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio + + optional uint32 local_size = 13 [default = 5]; // for local response norm + optional float alpha = 14 [default = 1.]; // for local response norm + optional float beta = 15 [default = 0.75]; // for local response norm + optional float k = 22 [default = 1.]; + + // For data layers, specify the data source + optional string source = 16; + // For data pre-processing, we can do simple scaling and subtracting the + // data mean, if provided. Note that the mean subtraction is always carried + // out before scaling. + optional float scale = 17 [default = 1]; + optional string meanfile = 18; + // For data layers, specify the batch size. + optional uint32 batchsize = 19; + // For data layers, specify if we would like to randomly crop an image. + optional uint32 cropsize = 20 [default = 0]; + // For data layers, specify if we want to randomly mirror data. + optional bool mirror = 21 [default = false]; + + // The blobs containing the numeric parameters of the layer + repeated BlobProto blobs = 50; + // The ratio that is multiplied on the global learning rate. If you want to + // set the learning ratio for one blob, you need to set it for all blobs. + repeated float blobs_lr = 51; + // The weight decay that is multiplied on the global weight decay. + repeated float weight_decay = 52; + + // The rand_skip variable is for the data layer to skip a few data points + // to avoid all asynchronous sgd clients to start at the same point. The skip + // point would be set as rand_skip * rand(0,1). Note that rand_skip should not + // be larger than the number of keys in the database. + optional uint32 rand_skip = 53 [default = 0]; + + // Fields related to detection (det_*) + // foreground (object) overlap threshold + optional float det_fg_threshold = 54 [default = 0.5]; + // background (non-object) overlap threshold + optional float det_bg_threshold = 55 [default = 0.5]; + // Fraction of batch that should be foreground objects + optional float det_fg_fraction = 56 [default = 0.25]; + + // optional bool OBSOLETE_can_clobber = 57 [default = true]; + + // Amount of contextual padding to add around a window + // (used only by the window_data_layer) + optional uint32 det_context_pad = 58 [default = 0]; + + // Mode for cropping out a detection window + // warp: cropped window is warped to a fixed size and aspect ratio + // square: the tightest square around the window is cropped + optional string det_crop_mode = 59 [default = "warp"]; + + // For ReshapeLayer, one needs to specify the new dimensions. + optional int32 new_num = 60 [default = 0]; + optional int32 new_channels = 61 [default = 0]; + optional int32 new_height = 62 [default = 0]; + optional int32 new_width = 63 [default = 0]; + + // Whether or not ImageLayer should shuffle the list of files at every epoch. + // It will also resize images if new_height or new_width are not zero. + optional bool shuffle_images = 64 [default = false]; + + // For ConcatLayer, one needs to specify the dimension for concatenation, and + // the other dimensions must be the same for all the bottom blobs. + // By default it will concatenate blobs along the channels dimension. + optional uint32 concat_dim = 65 [default = 1]; + + optional HDF5OutputParameter hdf5_output_param = 1001; +} + +message PReLUParameter { + // Parametric ReLU described in K. He et al, Delving Deep into Rectifiers: + // Surpassing Human-Level Performance on ImageNet Classification, 2015. + + // Initial value of a_i. Default is a_i=0.25 for all i. + optional FillerParameter filler = 1; + // Whether or not slope paramters are shared across channels. + optional bool channel_shared = 2 [default = false]; +} + + +//********add by xia**************** +message RPNParameter { + optional uint32 feat_stride = 1; + optional uint32 basesize = 2; + repeated uint32 scale = 3; + repeated float ratio = 4; + optional uint32 boxminsize =5; + optional uint32 per_nms_topn = 9; + optional uint32 post_nms_topn = 11; + optional float nms_thresh = 8; +} + +message VideoDataParameter{ + enum VideoType { + WEBCAM = 0; + VIDEO = 1; + } + optional VideoType video_type = 1 [default = WEBCAM]; + optional int32 device_id = 2 [default = 0]; + optional string video_file = 3; + // Number of frames to be skipped before processing a frame. + optional uint32 skip_frames = 4 [default = 0]; +} + +message CenterLossParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + optional FillerParameter center_filler = 2; // The filler for the centers + // The first axis to be lumped into a single inner product computation; + // all preceding axes are retained in the output. + // May be negative to index from the end (e.g., -1 for the last axis). + optional int32 axis = 3 [default = 1]; +} + +message MarginInnerProductParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + enum MarginType { + SINGLE = 0; + DOUBLE = 1; + TRIPLE = 2; + QUADRUPLE = 3; + } + optional MarginType type = 2 [default = SINGLE]; + optional FillerParameter weight_filler = 3; // The filler for the weight + + // The first axis to be lumped into a single inner product computation; + // all preceding axes are retained in the output. + // May be negative to index from the end (e.g., -1 for the last axis). + optional int32 axis = 4 [default = 1]; + optional float base = 5 [default = 1]; + optional float gamma = 6 [default = 0]; + optional float power = 7 [default = 1]; + optional int32 iteration = 8 [default = 0]; + optional float lambda_min = 9 [default = 0]; +} + +message AdditiveMarginInnerProductParameter { + optional uint32 num_output = 1; // The number of outputs for the layer + optional FillerParameter weight_filler = 2; // The filler for the weight + optional float m = 3 [default = 0.35]; + optional int32 axis = 4 [default = 1]; +} + +message DeformableConvolutionParameter { + optional uint32 num_output = 1; + optional bool bias_term = 2 [default = true]; + repeated uint32 pad = 3; // The padding size; defaults to 0 + repeated uint32 kernel_size = 4; // The kernel size + repeated uint32 stride = 6; // The stride; defaults to 1 + repeated uint32 dilation = 18; // The dilation; defaults to 1 + optional uint32 pad_h = 9 [default = 0]; // The padding height (2D only) + optional uint32 pad_w = 10 [default = 0]; // The padding width (2D only) + optional uint32 kernel_h = 11; // The kernel height (2D only) + optional uint32 kernel_w = 12; // The kernel width (2D only) + optional uint32 stride_h = 13; // The stride height (2D only) + optional uint32 stride_w = 14; // The stride width (2D only) + optional uint32 group = 5 [default = 4]; + optional uint32 deformable_group = 25 [default = 4]; + optional FillerParameter weight_filler = 7; // The filler for the weight + optional FillerParameter bias_filler = 8; // The filler for the bias + enum Engine { + DEFAULT = 0; + CAFFE = 1; + CUDNN = 2; + } + optional Engine engine = 15 [default = DEFAULT]; + optional int32 axis = 16 [default = 1]; + optional bool force_nd_im2col = 17 [default = false]; +} + +message LabelSpecificAddParameter { + optional float bias = 1 [default = 0.0]; + optional bool transform_test = 2 [default = false]; +} + +message ChannelScaleParameter{ + optional bool do_forward = 1 [default = true]; + optional bool do_backward_feature = 2 [default = true]; + optional bool do_backward_scale = 3 [default = true]; + optional bool global_scale = 4 [default = false]; + optional float max_global_scale = 5 [default = 1000.0]; + optional float min_global_scale = 6 [default = 0.0]; + optional float init_global_scale = 7 [default = 1.0]; +} + +message CosinAddmParameter { + optional float m = 1 [default = 0.5]; + optional bool transform_test = 2 [default = false]; +} +message CosinMulmParameter { + optional float m = 1 [default = 4]; + optional bool transform_test = 2 [default = false]; +} + +message CoupledClusterLossParameter { + optional float margin = 1 [default = 1]; + optional int32 group_size = 2 [default = 3]; + optional float scale = 3 [default = 1]; + optional bool log_flag = 4 [default = false]; + // optional int32 pos_num = 3 [default = 1]; + // optional int32 neg_num = 4 [default = 1]; +} + +message TripletLossParameter { + optional float margin = 1 [default = 1]; + optional int32 group_size = 2 [default = 3]; + optional float scale = 3 [default = 1]; + // optional int32 pos_num = 3 [default = 1]; + // optional int32 neg_num = 4 [default = 1]; +} + +message GeneralTripletParameter { + optional float margin = 1 [default = 0.2]; + optional bool add_center_loss = 2 [default = true]; + optional bool hardest_only = 3 [default = false]; + optional bool positive_first = 4 [default = false]; + optional float positive_upper_bound = 5 [default = 1.0]; + optional float positive_weight = 6 [default = 1.0]; + optional float negative_weight = 7 [default = 1.0]; +} + +message ROIAlignParameter { + optional uint32 pooled_h = 1 [default = 0]; // The pooled output height + optional uint32 pooled_w = 2 [default = 0]; // The pooled output width + optional float spatial_scale = 3 [default = 1]; +} + diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/caffe_lmdb.py b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_lmdb.py new file mode 100644 index 0000000000000000000000000000000000000000..7c425c3520135deddb64391418268a8fb95fbe5a --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_lmdb.py @@ -0,0 +1,35 @@ +import lmdb +from Caffe import caffe_pb2 as pb2 +import numpy as np + +class Read_Caffe_LMDB(): + def __init__(self,path,dtype=np.uint8): + + self.env=lmdb.open(path, readonly=True) + self.dtype=dtype + self.txn=self.env.begin() + self.cursor=self.txn.cursor() + + @staticmethod + def to_numpy(value,dtype=np.uint8): + datum = pb2.Datum() + datum.ParseFromString(value) + flat_x = np.fromstring(datum.data, dtype=dtype) + data = flat_x.reshape(datum.channels, datum.height, datum.width) + label=flat_x = datum.label + return data,label + + def iterator(self): + while True: + key,value=self.cursor.key(),self.cursor.value() + yield self.to_numpy(value,self.dtype) + if not self.cursor.next(): + return + + def __iter__(self): + self.cursor.first() + it = self.iterator() + return it + + def __len__(self): + return int(self.env.stat()['entries']) diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/caffe_net.py b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_net.py new file mode 100644 index 0000000000000000000000000000000000000000..d68ce47f295beced11b67a77e59a1ac956c467e2 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_net.py @@ -0,0 +1,139 @@ +from __future__ import absolute_import +from . import caffe_pb2 as pb +import google.protobuf.text_format as text_format +import numpy as np +from .layer_param import Layer_param + +class _Net(object): + def __init__(self): + self.net=pb.NetParameter() + + def layer_index(self,layer_name): + # find a layer's index by name. if the layer was found, return the layer position in the net, else return -1. + for i, layer in enumerate(self.net.layer): + if layer.name == layer_name: + return i + + def add_layer(self,layer_params,before='',after=''): + # find the before of after layer's position + index = -1 + if after != '': + index = self.layer_index(after) + 1 + if before != '': + index = self.layer_index(before) + new_layer = pb.LayerParameter() + new_layer.CopyFrom(layer_params.param) + #insert the layer into the layer protolist + if index != -1: + self.net.layer.add() + for i in range(len(self.net.layer) - 1, index, -1): + self.net.layer[i].CopyFrom(self.net.layer[i - 1]) + self.net.layer[index].CopyFrom(new_layer) + else: + self.net.layer.extend([new_layer]) + + def remove_layer_by_name(self,layer_name): + for i,layer in enumerate(self.net.layer): + if layer.name == layer_name: + del self.net.layer[i] + return + raise(AttributeError, "cannot found layer %s" % str(layer_name)) + + def get_layer_by_name(self, layer_name): + # get the layer by layer_name + for layer in self.net.layer: + if layer.name == layer_name: + return layer + raise(AttributeError, "cannot found layer %s" % str(layer_name)) + + def save_prototxt(self,path): + prototxt=pb.NetParameter() + prototxt.CopyFrom(self.net) + for layer in prototxt.layer: + del layer.blobs[:] + with open(path,'w') as f: + f.write(text_format.MessageToString(prototxt)) + + def layer(self,layer_name): + return self.get_layer_by_name(layer_name) + + def layers(self): + return list(self.net.layer) + + + +class Prototxt(_Net): + def __init__(self,file_name=''): + super(Prototxt,self).__init__() + self.file_name=file_name + if file_name!='': + f = open(file_name,'r') + text_format.Parse(f.read(), self.net) + pass + + def init_caffemodel(self,caffe_cmd_path='caffe'): + """ + :param caffe_cmd_path: The shell command of caffe, normally at /build/tools/caffe + """ + s=pb.SolverParameter() + s.train_net=self.file_name + s.max_iter=0 + s.base_lr=1 + s.solver_mode = pb.SolverParameter.CPU + s.snapshot_prefix='./nn' + with open('/tmp/nn_tools_solver.prototxt','w') as f: + f.write(str(s)) + import os + os.system('%s train --solver /tmp/nn_tools_solver.prototxt'%caffe_cmd_path) + +class Caffemodel(_Net): + def __init__(self, file_name=''): + super(Caffemodel,self).__init__() + # caffe_model dir + if file_name!='': + f = open(file_name,'rb') + self.net.ParseFromString(f.read()) + f.close() + + def save(self, path): + with open(path,'wb') as f: + f.write(self.net.SerializeToString()) + + def add_layer_with_data(self,layer_params,datas, before='', after=''): + """ + Args: + layer_params:A Layer_Param object + datas:a fixed dimension numpy object list + after: put the layer after a specified layer + before: put the layer before a specified layer + """ + self.add_layer(layer_params,before,after) + new_layer =self.layer(layer_params.name) + + #process blobs + del new_layer.blobs[:] + for data in datas: + new_blob=new_layer.blobs.add() + for dim in data.shape: + new_blob.shape.dim.append(dim) + new_blob.data.extend(data.flatten().astype(float)) + + def get_layer_data(self,layer_name): + layer=self.layer(layer_name) + datas=[] + for blob in layer.blobs: + shape=list(blob.shape.dim) + data=np.array(blob.data).reshape(shape) + datas.append(data) + return datas + + def set_layer_data(self,layer_name,datas): + # datas is normally a list of [weights,bias] + layer=self.layer(layer_name) + for blob,data in zip(layer.blobs,datas): + blob.data[:]=data.flatten() + pass + +class Net(): + def __init__(self,*args,**kwargs): + raise(TypeError,'the class Net is no longer used, please use Caffemodel or Prototxt instead') \ No newline at end of file diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/caffe_pb2.py b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..faaae0b9d9314fff9b7ae83ec059d61f6dda1ffa --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/caffe_pb2.py @@ -0,0 +1,10970 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: caffe.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='caffe.proto', + package='caffe', + syntax='proto2', + serialized_options=None, + serialized_pb=_b('\n\x0b\x63\x61\x66\x66\x65.proto\x12\x05\x63\x61\x66\x66\x65\"\x1c\n\tBlobShape\x12\x0f\n\x03\x64im\x18\x01 \x03(\x03\x42\x02\x10\x01\"\xcc\x01\n\tBlobProto\x12\x1f\n\x05shape\x18\x07 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x10\n\x04\x64\x61ta\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x10\n\x04\x64iff\x18\x06 \x03(\x02\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x08 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_diff\x18\t \x03(\x01\x42\x02\x10\x01\x12\x0e\n\x03num\x18\x01 \x01(\x05:\x01\x30\x12\x13\n\x08\x63hannels\x18\x02 \x01(\x05:\x01\x30\x12\x11\n\x06height\x18\x03 \x01(\x05:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\x05:\x01\x30\"2\n\x0f\x42lobProtoVector\x12\x1f\n\x05\x62lobs\x18\x01 \x03(\x0b\x32\x10.caffe.BlobProto\"\x91\x01\n\x05\x44\x61tum\x12\x10\n\x08\x63hannels\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x12\n\nfloat_data\x18\x06 \x03(\x02\x12\x16\n\x07\x65ncoded\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x0e\n\x06labels\x18\x08 \x03(\x02\"A\n\x0cLabelMapItem\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\x05\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\"-\n\x08LabelMap\x12!\n\x04item\x18\x01 \x03(\x0b\x32\x13.caffe.LabelMapItem\"o\n\x07Sampler\x12\x14\n\tmin_scale\x18\x01 \x01(\x02:\x01\x31\x12\x14\n\tmax_scale\x18\x02 \x01(\x02:\x01\x31\x12\x1b\n\x10min_aspect_ratio\x18\x03 \x01(\x02:\x01\x31\x12\x1b\n\x10max_aspect_ratio\x18\x04 \x01(\x02:\x01\x31\"\xc0\x01\n\x10SampleConstraint\x12\x1b\n\x13min_jaccard_overlap\x18\x01 \x01(\x02\x12\x1b\n\x13max_jaccard_overlap\x18\x02 \x01(\x02\x12\x1b\n\x13min_sample_coverage\x18\x03 \x01(\x02\x12\x1b\n\x13max_sample_coverage\x18\x04 \x01(\x02\x12\x1b\n\x13min_object_coverage\x18\x05 \x01(\x02\x12\x1b\n\x13max_object_coverage\x18\x06 \x01(\x02\"\xb2\x01\n\x0c\x42\x61tchSampler\x12 \n\x12use_original_image\x18\x01 \x01(\x08:\x04true\x12\x1f\n\x07sampler\x18\x02 \x01(\x0b\x32\x0e.caffe.Sampler\x12\x32\n\x11sample_constraint\x18\x03 \x01(\x0b\x32\x17.caffe.SampleConstraint\x12\x12\n\nmax_sample\x18\x04 \x01(\r\x12\x17\n\nmax_trials\x18\x05 \x01(\r:\x03\x31\x30\x30\"\x8a\x01\n\x0e\x45mitConstraint\x12\x39\n\temit_type\x18\x01 \x01(\x0e\x32\x1e.caffe.EmitConstraint.EmitType:\x06\x43\x45NTER\x12\x14\n\x0c\x65mit_overlap\x18\x02 \x01(\x02\"\'\n\x08\x45mitType\x12\n\n\x06\x43\x45NTER\x10\x00\x12\x0f\n\x0bMIN_OVERLAP\x10\x01\"\x87\x01\n\x0eNormalizedBBox\x12\x0c\n\x04xmin\x18\x01 \x01(\x02\x12\x0c\n\x04ymin\x18\x02 \x01(\x02\x12\x0c\n\x04xmax\x18\x03 \x01(\x02\x12\x0c\n\x04ymax\x18\x04 \x01(\x02\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x11\n\tdifficult\x18\x06 \x01(\x08\x12\r\n\x05score\x18\x07 \x01(\x02\x12\x0c\n\x04size\x18\x08 \x01(\x02\"I\n\nAnnotation\x12\x16\n\x0binstance_id\x18\x01 \x01(\x05:\x01\x30\x12#\n\x04\x62\x62ox\x18\x02 \x01(\x0b\x32\x15.caffe.NormalizedBBox\"M\n\x0f\x41nnotationGroup\x12\x13\n\x0bgroup_label\x18\x01 \x01(\x05\x12%\n\nannotation\x18\x02 \x03(\x0b\x32\x11.caffe.Annotation\"\xaf\x01\n\x0e\x41nnotatedDatum\x12\x1b\n\x05\x64\x61tum\x18\x01 \x01(\x0b\x32\x0c.caffe.Datum\x12\x32\n\x04type\x18\x02 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\x12\x30\n\x10\x61nnotation_group\x18\x03 \x03(\x0b\x32\x16.caffe.AnnotationGroup\"\x1a\n\x0e\x41nnotationType\x12\x08\n\x04\x42\x42OX\x10\x00\"C\n\tMTCNNBBox\x12\x0c\n\x04xmin\x18\x01 \x01(\x02\x12\x0c\n\x04ymin\x18\x02 \x01(\x02\x12\x0c\n\x04xmax\x18\x03 \x01(\x02\x12\x0c\n\x04ymax\x18\x04 \x01(\x02\"U\n\nMTCNNDatum\x12\x1b\n\x05\x64\x61tum\x18\x01 \x01(\x0b\x32\x0c.caffe.Datum\x12\x1d\n\x03roi\x18\x02 \x01(\x0b\x32\x10.caffe.MTCNNBBox\x12\x0b\n\x03pts\x18\x03 \x03(\x02\"\x98\x02\n\x0f\x46illerParameter\x12\x16\n\x04type\x18\x01 \x01(\t:\x08\x63onstant\x12\x10\n\x05value\x18\x02 \x01(\x02:\x01\x30\x12\x0e\n\x03min\x18\x03 \x01(\x02:\x01\x30\x12\x0e\n\x03max\x18\x04 \x01(\x02:\x01\x31\x12\x0f\n\x04mean\x18\x05 \x01(\x02:\x01\x30\x12\x0e\n\x03std\x18\x06 \x01(\x02:\x01\x31\x12\x12\n\x06sparse\x18\x07 \x01(\x05:\x02-1\x12\x42\n\rvariance_norm\x18\x08 \x01(\x0e\x32#.caffe.FillerParameter.VarianceNorm:\x06\x46\x41N_IN\x12\x0c\n\x04\x66ile\x18\t \x01(\t\"4\n\x0cVarianceNorm\x12\n\n\x06\x46\x41N_IN\x10\x00\x12\x0b\n\x07\x46\x41N_OUT\x10\x01\x12\x0b\n\x07\x41VERAGE\x10\x02\"\x8e\x02\n\x0cNetParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12%\n\x0binput_shape\x18\x08 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x11\n\tinput_dim\x18\x04 \x03(\x05\x12\x1d\n\x0e\x66orce_backward\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x05state\x18\x06 \x01(\x0b\x32\x0f.caffe.NetState\x12\x19\n\ndebug_info\x18\x07 \x01(\x08:\x05\x66\x61lse\x12$\n\x05layer\x18\x64 \x03(\x0b\x32\x15.caffe.LayerParameter\x12\'\n\x06layers\x18\x02 \x03(\x0b\x32\x17.caffe.V1LayerParameter\"\xc0\n\n\x0fSolverParameter\x12\x0b\n\x03net\x18\x18 \x01(\t\x12&\n\tnet_param\x18\x19 \x01(\x0b\x32\x13.caffe.NetParameter\x12\x11\n\ttrain_net\x18\x01 \x01(\t\x12\x10\n\x08test_net\x18\x02 \x03(\t\x12,\n\x0ftrain_net_param\x18\x15 \x01(\x0b\x32\x13.caffe.NetParameter\x12+\n\x0etest_net_param\x18\x16 \x03(\x0b\x32\x13.caffe.NetParameter\x12$\n\x0btrain_state\x18\x1a \x01(\x0b\x32\x0f.caffe.NetState\x12#\n\ntest_state\x18\x1b \x03(\x0b\x32\x0f.caffe.NetState\x12\x11\n\ttest_iter\x18\x03 \x03(\x05\x12\x18\n\rtest_interval\x18\x04 \x01(\x05:\x01\x30\x12 \n\x11test_compute_loss\x18\x13 \x01(\x08:\x05\x66\x61lse\x12!\n\x13test_initialization\x18 \x01(\x08:\x04true\x12\x0f\n\x07\x62\x61se_lr\x18\x05 \x01(\x02\x12\x0f\n\x07\x64isplay\x18\x06 \x01(\x05\x12\x17\n\x0c\x61verage_loss\x18! \x01(\x05:\x01\x31\x12\x10\n\x08max_iter\x18\x07 \x01(\x05\x12\x14\n\titer_size\x18$ \x01(\x05:\x01\x31\x12\x11\n\tlr_policy\x18\x08 \x01(\t\x12\r\n\x05gamma\x18\t \x01(\x02\x12\r\n\x05power\x18\n \x01(\x02\x12\x10\n\x08momentum\x18\x0b \x01(\x02\x12\x14\n\x0cweight_decay\x18\x0c \x01(\x02\x12\x1f\n\x13regularization_type\x18\x1d \x01(\t:\x02L2\x12\x10\n\x08stepsize\x18\r \x01(\x05\x12\x11\n\tstepvalue\x18\" \x03(\x05\x12\x0f\n\x07stagelr\x18\x32 \x03(\x02\x12\x11\n\tstageiter\x18\x33 \x03(\x05\x12\x1a\n\x0e\x63lip_gradients\x18# \x01(\x02:\x02-1\x12\x13\n\x08snapshot\x18\x0e \x01(\x05:\x01\x30\x12\x17\n\x0fsnapshot_prefix\x18\x0f \x01(\t\x12\x1c\n\rsnapshot_diff\x18\x10 \x01(\x08:\x05\x66\x61lse\x12K\n\x0fsnapshot_format\x18% \x01(\x0e\x32%.caffe.SolverParameter.SnapshotFormat:\x0b\x42INARYPROTO\x12;\n\x0bsolver_mode\x18\x11 \x01(\x0e\x32!.caffe.SolverParameter.SolverMode:\x03GPU\x12\x14\n\tdevice_id\x18\x12 \x01(\x05:\x01\x30\x12\x17\n\x0brandom_seed\x18\x14 \x01(\x03:\x02-1\x12\x11\n\x04type\x18( \x01(\t:\x03SGD\x12\x14\n\x05\x64\x65lta\x18\x1f \x01(\x02:\x05\x31\x65-08\x12\x18\n\tmomentum2\x18\' \x01(\x02:\x05\x30.999\x12\x11\n\trms_decay\x18& \x01(\x02\x12\x19\n\ndebug_info\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\"\n\x14snapshot_after_train\x18\x1c \x01(\x08:\x04true\x12;\n\x0bsolver_type\x18\x1e \x01(\x0e\x32!.caffe.SolverParameter.SolverType:\x03SGD\"+\n\x0eSnapshotFormat\x12\x08\n\x04HDF5\x10\x00\x12\x0f\n\x0b\x42INARYPROTO\x10\x01\"\x1e\n\nSolverMode\x12\x07\n\x03\x43PU\x10\x00\x12\x07\n\x03GPU\x10\x01\"U\n\nSolverType\x12\x07\n\x03SGD\x10\x00\x12\x0c\n\x08NESTEROV\x10\x01\x12\x0b\n\x07\x41\x44\x41GRAD\x10\x02\x12\x0b\n\x07RMSPROP\x10\x03\x12\x0c\n\x08\x41\x44\x41\x44\x45LTA\x10\x04\x12\x08\n\x04\x41\x44\x41M\x10\x05\"l\n\x0bSolverState\x12\x0c\n\x04iter\x18\x01 \x01(\x05\x12\x13\n\x0blearned_net\x18\x02 \x01(\t\x12!\n\x07history\x18\x03 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x17\n\x0c\x63urrent_step\x18\x04 \x01(\x05:\x01\x30\"N\n\x08NetState\x12!\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase:\x04TEST\x12\x10\n\x05level\x18\x02 \x01(\x05:\x01\x30\x12\r\n\x05stage\x18\x03 \x03(\t\"s\n\x0cNetStateRule\x12\x1b\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase\x12\x11\n\tmin_level\x18\x02 \x01(\x05\x12\x11\n\tmax_level\x18\x03 \x01(\x05\x12\r\n\x05stage\x18\x04 \x03(\t\x12\x11\n\tnot_stage\x18\x05 \x03(\t\"\x90\x02\n\x1bSpatialTransformerParameter\x12\x1e\n\x0etransform_type\x18\x01 \x01(\t:\x06\x61\x66\x66ine\x12\x1e\n\x0csampler_type\x18\x02 \x01(\t:\x08\x62ilinear\x12\x10\n\x08output_H\x18\x03 \x01(\x05\x12\x10\n\x08output_W\x18\x04 \x01(\x05\x12\x1b\n\rto_compute_dU\x18\x05 \x01(\x08:\x04true\x12\x11\n\ttheta_1_1\x18\x06 \x01(\x01\x12\x11\n\ttheta_1_2\x18\x07 \x01(\x01\x12\x11\n\ttheta_1_3\x18\x08 \x01(\x01\x12\x11\n\ttheta_2_1\x18\t \x01(\x01\x12\x11\n\ttheta_2_2\x18\n \x01(\x01\x12\x11\n\ttheta_2_3\x18\x0b \x01(\x01\"5\n\x0fSTLossParameter\x12\x10\n\x08output_H\x18\x01 \x02(\x05\x12\x10\n\x08output_W\x18\x02 \x02(\x05\"\xa3\x01\n\tParamSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\nshare_mode\x18\x02 \x01(\x0e\x32\x1d.caffe.ParamSpec.DimCheckMode\x12\x12\n\x07lr_mult\x18\x03 \x01(\x02:\x01\x31\x12\x15\n\ndecay_mult\x18\x04 \x01(\x02:\x01\x31\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\x95%\n\x0eLayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06\x62ottom\x18\x03 \x03(\t\x12\x0b\n\x03top\x18\x04 \x03(\t\x12\x1b\n\x05phase\x18\n \x01(\x0e\x32\x0c.caffe.Phase\x12\x13\n\x0bloss_weight\x18\x05 \x03(\x02\x12\x1f\n\x05param\x18\x06 \x03(\x0b\x32\x10.caffe.ParamSpec\x12\x1f\n\x05\x62lobs\x18\x07 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x16\n\x0epropagate_down\x18\x0b \x03(\x08\x12$\n\x07include\x18\x08 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18\t \x03(\x0b\x32\x13.caffe.NetStateRule\x12\x37\n\x0ftransform_param\x18\x64 \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18\x65 \x01(\x0b\x32\x14.caffe.LossParameter\x12<\n\x14\x64\x65tection_loss_param\x18\xc8\x01 \x01(\x0b\x32\x1d.caffe.DetectionLossParameter\x12<\n\x14\x65val_detection_param\x18\xc9\x01 \x01(\x0b\x32\x1d.caffe.EvalDetectionParameter\x12\x36\n\x11region_loss_param\x18\xca\x01 \x01(\x0b\x32\x1a.caffe.RegionLossParameter\x12+\n\x0breorg_param\x18\xcb\x01 \x01(\x0b\x32\x15.caffe.ReorgParameter\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x66 \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18g \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12\x34\n\x10\x62\x61tch_norm_param\x18\x8b\x01 \x01(\x0b\x32\x19.caffe.BatchNormParameter\x12)\n\nbias_param\x18\x8d\x01 \x01(\x0b\x32\x14.caffe.BiasParameter\x12,\n\x0c\x63oncat_param\x18h \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18i \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18j \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12(\n\ndata_param\x18k \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18l \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18m \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18n \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12\'\n\telu_param\x18\x8c\x01 \x01(\x0b\x32\x13.caffe.ELUParameter\x12+\n\x0b\x65mbed_param\x18\x89\x01 \x01(\x0b\x32\x15.caffe.EmbedParameter\x12&\n\texp_param\x18o \x01(\x0b\x32\x13.caffe.ExpParameter\x12/\n\rflatten_param\x18\x87\x01 \x01(\x0b\x32\x17.caffe.FlattenParameter\x12\x31\n\x0fhdf5_data_param\x18p \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18q \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18r \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18s \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18t \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18u \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12+\n\x0binput_param\x18\x8f\x01 \x01(\x0b\x32\x15.caffe.InputParameter\x12\'\n\tlog_param\x18\x86\x01 \x01(\x0b\x32\x13.caffe.LogParameter\x12&\n\tlrn_param\x18v \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18w \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18x \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\rpooling_param\x18y \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18z \x01(\x0b\x32\x15.caffe.PowerParameter\x12+\n\x0bprelu_param\x18\x83\x01 \x01(\x0b\x32\x15.caffe.PReLUParameter\x12-\n\x0cpython_param\x18\x82\x01 \x01(\x0b\x32\x16.caffe.PythonParameter\x12\x33\n\x0frecurrent_param\x18\x92\x01 \x01(\x0b\x32\x19.caffe.RecurrentParameter\x12\x33\n\x0freduction_param\x18\x88\x01 \x01(\x0b\x32\x19.caffe.ReductionParameter\x12(\n\nrelu_param\x18{ \x01(\x0b\x32\x14.caffe.ReLUParameter\x12/\n\rreshape_param\x18\x85\x01 \x01(\x0b\x32\x17.caffe.ReshapeParameter\x12\x38\n\x11roi_pooling_param\x18\xd7\xc7\xf8\x03 \x01(\x0b\x32\x1a.caffe.ROIPoolingParameter\x12+\n\x0bscale_param\x18\x8e\x01 \x01(\x0b\x32\x15.caffe.ScaleParameter\x12.\n\rsigmoid_param\x18| \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12=\n\x14smooth_l1_loss_param\x18\xd8\xc7\xf8\x03 \x01(\x0b\x32\x1c.caffe.SmoothL1LossParameter\x12.\n\rsoftmax_param\x18} \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12\'\n\tspp_param\x18\x84\x01 \x01(\x0b\x32\x13.caffe.SPPParameter\x12*\n\x0bslice_param\x18~ \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18\x7f \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x33\n\x0fthreshold_param\x18\x80\x01 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12)\n\ntile_param\x18\x8a\x01 \x01(\x0b\x32\x14.caffe.TileParameter\x12\x36\n\x11window_data_param\x18\x81\x01 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12\x35\n\x08st_param\x18\x94\x01 \x01(\x0b\x32\".caffe.SpatialTransformerParameter\x12.\n\rst_loss_param\x18\x91\x01 \x01(\x0b\x32\x16.caffe.STLossParameter\x12\'\n\trpn_param\x18\x96\x01 \x01(\x0b\x32\x13.caffe.RPNParameter\x12\x34\n\x10\x66ocal_loss_param\x18\x9b\x01 \x01(\x0b\x32\x19.caffe.FocalLossParameter\x12\x32\n\x0f\x61sdn_data_param\x18\x9f\x01 \x01(\x0b\x32\x18.caffe.AsdnDataParameter\x12%\n\x08\x62n_param\x18\xa0\x01 \x01(\x0b\x32\x12.caffe.BNParameter\x12\x34\n\x10mtcnn_data_param\x18\xa1\x01 \x01(\x0b\x32\x19.caffe.MTCNNDataParameter\x12-\n\x0cinterp_param\x18\xa2\x01 \x01(\x0b\x32\x16.caffe.InterpParameter\x12:\n\x13psroi_pooling_param\x18\xa3\x01 \x01(\x0b\x32\x1c.caffe.PSROIPoolingParameter\x12<\n\x14\x61nnotated_data_param\x18\xa4\x01 \x01(\x0b\x32\x1d.caffe.AnnotatedDataParameter\x12\x32\n\x0fprior_box_param\x18\xa5\x01 \x01(\x0b\x32\x18.caffe.PriorBoxParameter\x12)\n\ncrop_param\x18\xa7\x01 \x01(\x0b\x32\x14.caffe.CropParameter\x12\x44\n\x18\x64\x65tection_evaluate_param\x18\xa8\x01 \x01(\x0b\x32!.caffe.DetectionEvaluateParameter\x12@\n\x16\x64\x65tection_output_param\x18\xa9\x01 \x01(\x0b\x32\x1f.caffe.DetectionOutputParameter\x12:\n\x13multibox_loss_param\x18\xab\x01 \x01(\x0b\x32\x1c.caffe.MultiBoxLossParameter\x12/\n\rpermute_param\x18\xac\x01 \x01(\x0b\x32\x17.caffe.PermuteParameter\x12\x34\n\x10video_data_param\x18\xad\x01 \x01(\x0b\x32\x19.caffe.VideoDataParameter\x12G\n\x1amargin_inner_product_param\x18\xae\x01 \x01(\x0b\x32\".caffe.MarginInnerProductParameter\x12\x36\n\x11\x63\x65nter_loss_param\x18\xaf\x01 \x01(\x0b\x32\x1a.caffe.CenterLossParameter\x12L\n\x1c\x64\x65\x66ormable_convolution_param\x18\xb0\x01 \x01(\x0b\x32%.caffe.DeformableConvolutionParameter\x12\x43\n\x18label_specific_add_param\x18\xb1\x01 \x01(\x0b\x32 .caffe.LabelSpecificAddParameter\x12X\n#additive_margin_inner_product_param\x18\xb2\x01 \x01(\x0b\x32*.caffe.AdditiveMarginInnerProductParameter\x12\x35\n\x11\x63osin_add_m_param\x18\xb3\x01 \x01(\x0b\x32\x19.caffe.CosinAddmParameter\x12\x35\n\x11\x63osin_mul_m_param\x18\xb4\x01 \x01(\x0b\x32\x19.caffe.CosinMulmParameter\x12:\n\x13\x63hannel_scale_param\x18\xb5\x01 \x01(\x0b\x32\x1c.caffe.ChannelScaleParameter\x12)\n\nflip_param\x18\xb6\x01 \x01(\x0b\x32\x14.caffe.FlipParameter\x12\x38\n\x12triplet_loss_param\x18\xb7\x01 \x01(\x0b\x32\x1b.caffe.TripletLossParameter\x12G\n\x1a\x63oupled_cluster_loss_param\x18\xb8\x01 \x01(\x0b\x32\".caffe.CoupledClusterLossParameter\x12\x43\n\x1ageneral_triplet_loss_param\x18\xb9\x01 \x01(\x0b\x32\x1e.caffe.GeneralTripletParameter\x12\x32\n\x0froi_align_param\x18\xba\x01 \x01(\x0b\x32\x18.caffe.ROIAlignParameter\x12\x32\n\x0eupsample_param\x18\xa3\x8d\x06 \x01(\x0b\x32\x18.caffe.UpsampleParameter\x12.\n\x0cmatmul_param\x18\xa5\x8d\x06 \x01(\x0b\x32\x16.caffe.MatMulParameter\x12\x39\n\x12pass_through_param\x18\xa4\x8d\x06 \x01(\x0b\x32\x1b.caffe.PassThroughParameter\x12/\n\nnorm_param\x18\xa1\x8d\x06 \x01(\x0b\x32\x19.caffe.NormalizeParameter\"\xa3\x01\n\x11UpsampleParameter\x12\x10\n\x05scale\x18\x01 \x01(\r:\x01\x32\x12\x0f\n\x07scale_h\x18\x02 \x01(\r\x12\x0f\n\x07scale_w\x18\x03 \x01(\r\x12\x18\n\tpad_out_h\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tpad_out_w\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x12\n\nupsample_h\x18\x06 \x01(\r\x12\x12\n\nupsample_w\x18\x07 \x01(\r\">\n\x0fMatMulParameter\x12\r\n\x05\x64im_1\x18\x01 \x01(\r\x12\r\n\x05\x64im_2\x18\x02 \x01(\r\x12\r\n\x05\x64im_3\x18\x03 \x01(\r\"^\n\x14PassThroughParameter\x12\x15\n\nnum_output\x18\x01 \x01(\r:\x01\x30\x12\x17\n\x0c\x62lock_height\x18\x02 \x01(\r:\x01\x30\x12\x16\n\x0b\x62lock_width\x18\x03 \x01(\r:\x01\x30\"\xa5\x01\n\x12NormalizeParameter\x12\x1c\n\x0e\x61\x63ross_spatial\x18\x01 \x01(\x08:\x04true\x12,\n\x0cscale_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1c\n\x0e\x63hannel_shared\x18\x03 \x01(\x08:\x04true\x12\x12\n\x03\x65ps\x18\x04 \x01(\x02:\x05\x31\x65-10\x12\x11\n\x06sqrt_a\x18\x05 \x01(\x02:\x01\x31\"\x95\x01\n\x16\x41nnotatedDataParameter\x12*\n\rbatch_sampler\x18\x01 \x03(\x0b\x32\x13.caffe.BatchSampler\x12\x16\n\x0elabel_map_file\x18\x02 \x01(\t\x12\x37\n\tanno_type\x18\x03 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\"\xab\x01\n\x11\x41sdnDataParameter\x12\x16\n\ncount_drop\x18\x01 \x01(\x05:\x02\x31\x35\x12\x19\n\rpermute_count\x18\x02 \x01(\x05:\x02\x32\x30\x12\x19\n\x0e\x63ount_drop_neg\x18\x03 \x01(\x05:\x01\x30\x12\x16\n\x08\x63hannels\x18\x04 \x01(\x05:\x04\x31\x30\x32\x34\x12\x14\n\titer_size\x18\x05 \x01(\x05:\x01\x32\x12\x1a\n\x0fmaintain_before\x18\x06 \x01(\x05:\x01\x31\"\x80\x02\n\x12MTCNNDataParameter\x12\x17\n\taugmented\x18\x01 \x01(\x08:\x04true\x12\x12\n\x04\x66lip\x18\x02 \x01(\x08:\x04true\x12\x18\n\x0cnum_positive\x18\x03 \x01(\x05:\x02-1\x12\x18\n\x0cnum_negitive\x18\x04 \x01(\x05:\x02-1\x12\x14\n\x08num_part\x18\x05 \x01(\x05:\x02-1\x12\x17\n\x0cresize_width\x18\x06 \x01(\r:\x01\x30\x12\x18\n\rresize_height\x18\x07 \x01(\r:\x01\x30\x12\x1f\n\x12min_negitive_scale\x18\x08 \x01(\x02:\x03\x30.5\x12\x1f\n\x12max_negitive_scale\x18\t \x01(\x02:\x03\x31.5\"\x90\x01\n\x0fInterpParameter\x12\x11\n\x06height\x18\x01 \x01(\x05:\x01\x30\x12\x10\n\x05width\x18\x02 \x01(\x05:\x01\x30\x12\x16\n\x0bzoom_factor\x18\x03 \x01(\x05:\x01\x31\x12\x18\n\rshrink_factor\x18\x04 \x01(\x05:\x01\x31\x12\x12\n\x07pad_beg\x18\x05 \x01(\x05:\x01\x30\x12\x12\n\x07pad_end\x18\x06 \x01(\x05:\x01\x30\"V\n\x15PSROIPoolingParameter\x12\x15\n\rspatial_scale\x18\x01 \x02(\x02\x12\x12\n\noutput_dim\x18\x02 \x02(\x05\x12\x12\n\ngroup_size\x18\x03 \x02(\x05\"E\n\rFlipParameter\x12\x18\n\nflip_width\x18\x01 \x01(\x08:\x04true\x12\x1a\n\x0b\x66lip_height\x18\x02 \x01(\x08:\x05\x66\x61lse\"\x8b\x02\n\x0b\x42NParameter\x12,\n\x0cslope_filler\x18\x01 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x15\n\x08momentum\x18\x03 \x01(\x02:\x03\x30.9\x12\x12\n\x03\x65ps\x18\x04 \x01(\x02:\x05\x31\x65-05\x12\x15\n\x06\x66rozen\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x32\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x19.caffe.BNParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"\xa2\x01\n\x12\x46ocalLossParameter\x12\x34\n\x04type\x18\x01 \x01(\x0e\x32\x1e.caffe.FocalLossParameter.Type:\x06ORIGIN\x12\x10\n\x05gamma\x18\x02 \x01(\x02:\x01\x32\x12\x13\n\x05\x61lpha\x18\x03 \x01(\x02:\x04\x30.25\x12\x0f\n\x04\x62\x65ta\x18\x04 \x01(\x02:\x01\x31\"\x1e\n\x04Type\x12\n\n\x06ORIGIN\x10\x00\x12\n\n\x06LINEAR\x10\x01\"\xca\x03\n\x17TransformationParameter\x12\x10\n\x05scale\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\x06mirror\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x14\n\tcrop_size\x18\x03 \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_h\x18\x0b \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_w\x18\x0c \x01(\r:\x01\x30\x12\x11\n\tmean_file\x18\x04 \x01(\t\x12\x12\n\nmean_value\x18\x05 \x03(\x02\x12\x1a\n\x0b\x66orce_color\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nforce_gray\x18\x07 \x01(\x08:\x05\x66\x61lse\x12,\n\x0cresize_param\x18\x08 \x01(\x0b\x32\x16.caffe.ResizeParameter\x12*\n\x0bnoise_param\x18\t \x01(\x0b\x32\x15.caffe.NoiseParameter\x12\x31\n\rdistort_param\x18\r \x01(\x0b\x32\x1a.caffe.DistortionParameter\x12/\n\x0c\x65xpand_param\x18\x0e \x01(\x0b\x32\x19.caffe.ExpansionParameter\x12.\n\x0f\x65mit_constraint\x18\n \x01(\x0b\x32\x15.caffe.EmitConstraint\"\x90\x04\n\x0fResizeParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12=\n\x0bresize_mode\x18\x02 \x01(\x0e\x32\".caffe.ResizeParameter.Resize_mode:\x04WARP\x12\x11\n\x06height\x18\x03 \x01(\r:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\r:\x01\x30\x12\x17\n\x0cheight_scale\x18\x08 \x01(\r:\x01\x30\x12\x16\n\x0bwidth_scale\x18\t \x01(\r:\x01\x30\x12;\n\x08pad_mode\x18\x05 \x01(\x0e\x32\x1f.caffe.ResizeParameter.Pad_mode:\x08\x43ONSTANT\x12\x11\n\tpad_value\x18\x06 \x03(\x02\x12\x37\n\x0binterp_mode\x18\x07 \x03(\x0e\x32\".caffe.ResizeParameter.Interp_mode\"G\n\x0bResize_mode\x12\x08\n\x04WARP\x10\x01\x12\x12\n\x0e\x46IT_SMALL_SIZE\x10\x02\x12\x1a\n\x16\x46IT_LARGE_SIZE_AND_PAD\x10\x03\":\n\x08Pad_mode\x12\x0c\n\x08\x43ONSTANT\x10\x01\x12\x0c\n\x08MIRRORED\x10\x02\x12\x12\n\x0eREPEAT_NEAREST\x10\x03\"I\n\x0bInterp_mode\x12\n\n\x06LINEAR\x10\x01\x12\x08\n\x04\x41REA\x10\x02\x12\x0b\n\x07NEAREST\x10\x03\x12\t\n\x05\x43UBIC\x10\x04\x12\x0c\n\x08LANCZOS4\x10\x05\"9\n\x13SaltPepperParameter\x12\x13\n\x08\x66raction\x18\x01 \x01(\x02:\x01\x30\x12\r\n\x05value\x18\x02 \x03(\x02\"\xee\x02\n\x0eNoiseParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x30\x12\x16\n\x07hist_eq\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x16\n\x07inverse\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndecolorize\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ngauss_blur\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x04jpeg\x18\x06 \x01(\x02:\x02-1\x12\x18\n\tposterize\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x05\x65rode\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nsaltpepper\x18\t \x01(\x08:\x05\x66\x61lse\x12\x34\n\x10saltpepper_param\x18\n \x01(\x0b\x32\x1a.caffe.SaltPepperParameter\x12\x14\n\x05\x63lahe\x18\x0b \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_hsv\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_lab\x18\r \x01(\x08:\x05\x66\x61lse\"\xbd\x02\n\x13\x44istortionParameter\x12\x1a\n\x0f\x62rightness_prob\x18\x01 \x01(\x02:\x01\x30\x12\x1b\n\x10\x62rightness_delta\x18\x02 \x01(\x02:\x01\x30\x12\x18\n\rcontrast_prob\x18\x03 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_lower\x18\x04 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_upper\x18\x05 \x01(\x02:\x01\x30\x12\x13\n\x08hue_prob\x18\x06 \x01(\x02:\x01\x30\x12\x14\n\thue_delta\x18\x07 \x01(\x02:\x01\x30\x12\x1a\n\x0fsaturation_prob\x18\x08 \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_lower\x18\t \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_upper\x18\n \x01(\x02:\x01\x30\x12\x1c\n\x11random_order_prob\x18\x0b \x01(\x02:\x01\x30\"B\n\x12\x45xpansionParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12\x1b\n\x10max_expand_ratio\x18\x02 \x01(\x02:\x01\x31\"\xc2\x01\n\rLossParameter\x12\x14\n\x0cignore_label\x18\x01 \x01(\x05\x12\x44\n\rnormalization\x18\x03 \x01(\x0e\x32&.caffe.LossParameter.NormalizationMode:\x05VALID\x12\x11\n\tnormalize\x18\x02 \x01(\x08\"B\n\x11NormalizationMode\x12\x08\n\x04\x46ULL\x10\x00\x12\t\n\x05VALID\x10\x01\x12\x0e\n\nBATCH_SIZE\x10\x02\x12\x08\n\x04NONE\x10\x03\"L\n\x11\x41\x63\x63uracyParameter\x12\x10\n\x05top_k\x18\x01 \x01(\r:\x01\x31\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x14\n\x0cignore_label\x18\x03 \x01(\x05\"M\n\x0f\x41rgMaxParameter\x12\x1a\n\x0bout_max_val\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x05top_k\x18\x02 \x01(\r:\x01\x31\x12\x0c\n\x04\x61xis\x18\x03 \x01(\x05\"9\n\x0f\x43oncatParameter\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x15\n\nconcat_dim\x18\x01 \x01(\r:\x01\x31\"j\n\x12\x42\x61tchNormParameter\x12\x18\n\x10use_global_stats\x18\x01 \x01(\x08\x12&\n\x17moving_average_fraction\x18\x02 \x01(\x02:\x05\x30.999\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-05\"]\n\rBiasParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\"L\n\x18\x43ontrastiveLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x1d\n\x0elegacy_version\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xec\x01\n\x16\x44\x65tectionLossParameter\x12\x0f\n\x04side\x18\x01 \x01(\r:\x01\x37\x12\x15\n\tnum_class\x18\x02 \x01(\r:\x02\x32\x30\x12\x15\n\nnum_object\x18\x03 \x01(\r:\x01\x32\x12\x17\n\x0cobject_scale\x18\x04 \x01(\x02:\x01\x31\x12\x1b\n\x0enoobject_scale\x18\x05 \x01(\x02:\x03\x30.5\x12\x16\n\x0b\x63lass_scale\x18\x06 \x01(\x02:\x01\x31\x12\x16\n\x0b\x63oord_scale\x18\x07 \x01(\x02:\x01\x35\x12\x12\n\x04sqrt\x18\x08 \x01(\x08:\x04true\x12\x19\n\nconstriant\x18\t \x01(\x08:\x05\x66\x61lse\"\x91\x03\n\x13RegionLossParameter\x12\x10\n\x04side\x18\x01 \x01(\r:\x02\x31\x33\x12\x15\n\tnum_class\x18\x02 \x01(\r:\x02\x32\x30\x12\x15\n\nbias_match\x18\x03 \x01(\r:\x01\x31\x12\x11\n\x06\x63oords\x18\x04 \x01(\r:\x01\x34\x12\x0e\n\x03num\x18\x05 \x01(\r:\x01\x35\x12\x12\n\x07softmax\x18\x06 \x01(\r:\x01\x31\x12\x13\n\x06jitter\x18\x07 \x01(\x02:\x03\x30.2\x12\x12\n\x07rescore\x18\x08 \x01(\r:\x01\x31\x12\x17\n\x0cobject_scale\x18\t \x01(\x02:\x01\x31\x12\x16\n\x0b\x63lass_scale\x18\n \x01(\x02:\x01\x31\x12\x1b\n\x0enoobject_scale\x18\x0b \x01(\x02:\x03\x30.5\x12\x16\n\x0b\x63oord_scale\x18\x0c \x01(\x02:\x01\x35\x12\x13\n\x08\x61\x62solute\x18\r \x01(\r:\x01\x31\x12\x13\n\x06thresh\x18\x0e \x01(\x02:\x03\x30.2\x12\x11\n\x06random\x18\x0f \x01(\r:\x01\x31\x12\x0e\n\x06\x62iases\x18\x10 \x03(\x02\x12\x14\n\x0csoftmax_tree\x18\x11 \x01(\t\x12\x11\n\tclass_map\x18\x12 \x01(\t\"8\n\x0eReorgParameter\x12\x0e\n\x06stride\x18\x01 \x01(\r\x12\x16\n\x07reverse\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xb3\x02\n\x16\x45valDetectionParameter\x12\x0f\n\x04side\x18\x01 \x01(\r:\x01\x37\x12\x15\n\tnum_class\x18\x02 \x01(\r:\x02\x32\x30\x12\x15\n\nnum_object\x18\x03 \x01(\r:\x01\x32\x12\x16\n\tthreshold\x18\x04 \x01(\x02:\x03\x30.5\x12\x12\n\x04sqrt\x18\x05 \x01(\x08:\x04true\x12\x18\n\nconstriant\x18\x06 \x01(\x08:\x04true\x12\x45\n\nscore_type\x18\x07 \x01(\x0e\x32\'.caffe.EvalDetectionParameter.ScoreType:\x08MULTIPLY\x12\x0f\n\x03nms\x18\x08 \x01(\x02:\x02-1\x12\x0e\n\x06\x62iases\x18\t \x03(\x02\",\n\tScoreType\x12\x07\n\x03OBJ\x10\x00\x12\x08\n\x04PROB\x10\x01\x12\x0c\n\x08MULTIPLY\x10\x02\"\xfc\x03\n\x14\x43onvolutionParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12\x0b\n\x03pad\x18\x03 \x03(\r\x12\x13\n\x0bkernel_size\x18\x04 \x03(\r\x12\x0e\n\x06stride\x18\x06 \x03(\r\x12\x10\n\x08\x64ilation\x18\x12 \x03(\r\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x10\n\x08kernel_h\x18\x0b \x01(\r\x12\x10\n\x08kernel_w\x18\x0c \x01(\r\x12\x10\n\x08stride_h\x18\r \x01(\r\x12\x10\n\x08stride_w\x18\x0e \x01(\r\x12\x10\n\x05group\x18\x05 \x01(\r:\x01\x31\x12-\n\rweight_filler\x18\x07 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x08 \x01(\x0b\x32\x16.caffe.FillerParameter\x12;\n\x06\x65ngine\x18\x0f \x01(\x0e\x32\".caffe.ConvolutionParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x10 \x01(\x05:\x01\x31\x12\x1e\n\x0f\x66orce_nd_im2col\x18\x11 \x01(\x08:\x05\x66\x61lse\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"0\n\rCropParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x32\x12\x0e\n\x06offset\x18\x02 \x03(\r\"\xb2\x02\n\rDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x31\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0e\x32\x17.caffe.DataParameter.DB:\x07LEVELDB\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x66orce_encoded_color\x18\t \x01(\x08:\x05\x66\x61lse\x12\x13\n\x08prefetch\x18\n \x01(\r:\x01\x34\x12\x0c\n\x04side\x18\x0b \x03(\r\"\x1b\n\x02\x44\x42\x12\x0b\n\x07LEVELDB\x10\x00\x12\x08\n\x04LMDB\x10\x01\"\xdc\x01\n\x1a\x44\x65tectionEvaluateParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x02 \x01(\r:\x01\x30\x12\x1e\n\x11overlap_threshold\x18\x03 \x01(\x02:\x03\x30.5\x12#\n\x15\x65valuate_difficult_gt\x18\x04 \x01(\x08:\x04true\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12,\n\x0cresize_param\x18\x06 \x01(\x0b\x32\x16.caffe.ResizeParameter\"[\n\x1eNonMaximumSuppressionParameter\x12\x1a\n\rnms_threshold\x18\x01 \x01(\x02:\x03\x30.3\x12\r\n\x05top_k\x18\x02 \x01(\x05\x12\x0e\n\x03\x65ta\x18\x03 \x01(\x02:\x01\x31\"\xd8\x01\n\x13SaveOutputParameter\x12\x18\n\x10output_directory\x18\x01 \x01(\t\x12\x1a\n\x12output_name_prefix\x18\x02 \x01(\t\x12\x15\n\routput_format\x18\x03 \x01(\t\x12\x16\n\x0elabel_map_file\x18\x04 \x01(\t\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12\x16\n\x0enum_test_image\x18\x06 \x01(\r\x12,\n\x0cresize_param\x18\x07 \x01(\x0b\x32\x16.caffe.ResizeParameter\"\xc7\x03\n\x18\x44\x65tectionOutputParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1c\n\x0eshare_location\x18\x02 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x03 \x01(\x05:\x01\x30\x12\x38\n\tnms_param\x18\x04 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x35\n\x11save_output_param\x18\x05 \x01(\x0b\x32\x1a.caffe.SaveOutputParameter\x12<\n\tcode_type\x18\x06 \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12)\n\x1avariance_encoded_in_target\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x16\n\nkeep_top_k\x18\x07 \x01(\x05:\x02-1\x12\x1c\n\x14\x63onfidence_threshold\x18\t \x01(\x02\x12\x18\n\tvisualize\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x13visualize_threshold\x18\x0b \x01(\x02\x12\x11\n\tsave_file\x18\x0c \x01(\t\"I\n\x10\x44ropoutParameter\x12\x1a\n\rdropout_ratio\x18\x01 \x01(\x02:\x03\x30.5\x12\x19\n\x0bscale_train\x18\x02 \x01(\x08:\x04true\"\xa0\x01\n\x12\x44ummyDataParameter\x12+\n\x0b\x64\x61ta_filler\x18\x01 \x03(\x0b\x32\x16.caffe.FillerParameter\x12\x1f\n\x05shape\x18\x06 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x0b\n\x03num\x18\x02 \x03(\r\x12\x10\n\x08\x63hannels\x18\x03 \x03(\r\x12\x0e\n\x06height\x18\x04 \x03(\r\x12\r\n\x05width\x18\x05 \x03(\r\"\xa5\x01\n\x10\x45ltwiseParameter\x12\x39\n\toperation\x18\x01 \x01(\x0e\x32!.caffe.EltwiseParameter.EltwiseOp:\x03SUM\x12\r\n\x05\x63oeff\x18\x02 \x03(\x02\x12\x1e\n\x10stable_prod_grad\x18\x03 \x01(\x08:\x04true\"\'\n\tEltwiseOp\x12\x08\n\x04PROD\x10\x00\x12\x07\n\x03SUM\x10\x01\x12\x07\n\x03MAX\x10\x02\" \n\x0c\x45LUParameter\x12\x10\n\x05\x61lpha\x18\x01 \x01(\x02:\x01\x31\"\xac\x01\n\x0e\x45mbedParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x11\n\tinput_dim\x18\x02 \x01(\r\x12\x17\n\tbias_term\x18\x03 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"D\n\x0c\x45xpParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"9\n\x10\x46lattenParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x14\n\x08\x65nd_axis\x18\x02 \x01(\x05:\x02-1\"O\n\x11HDF5DataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x02 \x01(\r\x12\x16\n\x07shuffle\x18\x03 \x01(\x08:\x05\x66\x61lse\"(\n\x13HDF5OutputParameter\x12\x11\n\tfile_name\x18\x01 \x01(\t\"^\n\x12HingeLossParameter\x12\x30\n\x04norm\x18\x01 \x01(\x0e\x32\x1e.caffe.HingeLossParameter.Norm:\x02L1\"\x16\n\x04Norm\x12\x06\n\x02L1\x10\x01\x12\x06\n\x02L2\x10\x02\"\x97\x02\n\x12ImageDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x15\n\nbatch_size\x18\x04 \x01(\r:\x01\x31\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x16\n\x07shuffle\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x15\n\nnew_height\x18\t \x01(\r:\x01\x30\x12\x14\n\tnew_width\x18\n \x01(\r:\x01\x30\x12\x16\n\x08is_color\x18\x0b \x01(\x08:\x04true\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\x0c \x01(\t:\x00\"\'\n\x15InfogainLossParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\"\xe5\x01\n\x15InnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x05 \x01(\x05:\x01\x31\x12\x18\n\ttranspose\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tnormalize\x18\x07 \x01(\x08:\x05\x66\x61lse\"1\n\x0eInputParameter\x12\x1f\n\x05shape\x18\x01 \x03(\x0b\x32\x10.caffe.BlobShape\"D\n\x0cLogParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb8\x02\n\x0cLRNParameter\x12\x15\n\nlocal_size\x18\x01 \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x02 \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x03 \x01(\x02:\x04\x30.75\x12\x44\n\x0bnorm_region\x18\x04 \x01(\x0e\x32\x1e.caffe.LRNParameter.NormRegion:\x0f\x41\x43ROSS_CHANNELS\x12\x0c\n\x01k\x18\x05 \x01(\x02:\x01\x31\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.LRNParameter.Engine:\x07\x44\x45\x46\x41ULT\"5\n\nNormRegion\x12\x13\n\x0f\x41\x43ROSS_CHANNELS\x10\x00\x12\x12\n\x0eWITHIN_CHANNEL\x10\x01\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x13MemoryDataParameter\x12\x12\n\nbatch_size\x18\x01 \x01(\r\x12\x10\n\x08\x63hannels\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\r\n\x05width\x18\x04 \x01(\r\"\xe8\x08\n\x15MultiBoxLossParameter\x12J\n\rloc_loss_type\x18\x01 \x01(\x0e\x32(.caffe.MultiBoxLossParameter.LocLossType:\tSMOOTH_L1\x12J\n\x0e\x63onf_loss_type\x18\x02 \x01(\x0e\x32).caffe.MultiBoxLossParameter.ConfLossType:\x07SOFTMAX\x12\x15\n\nloc_weight\x18\x03 \x01(\x02:\x01\x31\x12\x13\n\x0bnum_classes\x18\x04 \x01(\r\x12\x1c\n\x0eshare_location\x18\x05 \x01(\x08:\x04true\x12J\n\nmatch_type\x18\x06 \x01(\x0e\x32&.caffe.MultiBoxLossParameter.MatchType:\x0ePER_PREDICTION\x12\x1e\n\x11overlap_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12$\n\x16use_prior_for_matching\x18\x08 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\t \x01(\r:\x01\x30\x12\x1e\n\x10use_difficult_gt\x18\n \x01(\x08:\x04true\x12\x15\n\rdo_neg_mining\x18\x0b \x01(\x08\x12\x18\n\rneg_pos_ratio\x18\x0c \x01(\x02:\x01\x33\x12\x18\n\x0bneg_overlap\x18\r \x01(\x02:\x03\x30.5\x12<\n\tcode_type\x18\x0e \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12(\n\x19\x65ncode_variance_in_target\x18\x10 \x01(\x08:\x05\x66\x61lse\x12%\n\x16map_object_to_agnostic\x18\x11 \x01(\x08:\x05\x66\x61lse\x12)\n\x1aignore_cross_boundary_bbox\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tbp_inside\x18\x13 \x01(\x08:\x05\x66\x61lse\x12J\n\x0bmining_type\x18\x14 \x01(\x0e\x32\'.caffe.MultiBoxLossParameter.MiningType:\x0cMAX_NEGATIVE\x12\x38\n\tnms_param\x18\x15 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x17\n\x0bsample_size\x18\x16 \x01(\x05:\x02\x36\x34\x12 \n\x11use_prior_for_nms\x18\x17 \x01(\x08:\x05\x66\x61lse\"$\n\x0bLocLossType\x12\x06\n\x02L2\x10\x00\x12\r\n\tSMOOTH_L1\x10\x01\")\n\x0c\x43onfLossType\x12\x0b\n\x07SOFTMAX\x10\x00\x12\x0c\n\x08LOGISTIC\x10\x01\".\n\tMatchType\x12\r\n\tBIPARTITE\x10\x00\x12\x12\n\x0ePER_PREDICTION\x10\x01\":\n\nMiningType\x12\x08\n\x04NONE\x10\x00\x12\x10\n\x0cMAX_NEGATIVE\x10\x01\x12\x10\n\x0cHARD_EXAMPLE\x10\x02\"!\n\x10PermuteParameter\x12\r\n\x05order\x18\x01 \x03(\r\"d\n\x0cMVNParameter\x12 \n\x12normalize_variance\x18\x01 \x01(\x08:\x04true\x12\x1e\n\x0f\x61\x63ross_channels\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-09\"5\n\x12ParameterParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\"\xbb\x03\n\x10PoolingParameter\x12\x35\n\x04pool\x18\x01 \x01(\x0e\x32\".caffe.PoolingParameter.PoolMethod:\x03MAX\x12\x0e\n\x03pad\x18\x04 \x01(\r:\x01\x30\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x13\n\x0bkernel_size\x18\x02 \x01(\r\x12\x10\n\x08kernel_h\x18\x05 \x01(\r\x12\x10\n\x08kernel_w\x18\x06 \x01(\r\x12\x11\n\x06stride\x18\x03 \x01(\r:\x01\x31\x12\x10\n\x08stride_h\x18\x07 \x01(\r\x12\x10\n\x08stride_w\x18\x08 \x01(\r\x12\x37\n\x06\x65ngine\x18\x0b \x01(\x0e\x32\x1e.caffe.PoolingParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x1d\n\x0eglobal_pooling\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x17\n\tceil_mode\x18\r \x01(\x08:\x04true\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"F\n\x0ePowerParameter\x12\x10\n\x05power\x18\x01 \x01(\x02:\x01\x31\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb5\x02\n\x11PriorBoxParameter\x12\x10\n\x08min_size\x18\x01 \x03(\x02\x12\x10\n\x08max_size\x18\x02 \x03(\x02\x12\x14\n\x0c\x61spect_ratio\x18\x03 \x03(\x02\x12\x12\n\x04\x66lip\x18\x04 \x01(\x08:\x04true\x12\x13\n\x04\x63lip\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x08variance\x18\x06 \x03(\x02\x12\x10\n\x08img_size\x18\x07 \x01(\r\x12\r\n\x05img_h\x18\x08 \x01(\r\x12\r\n\x05img_w\x18\t \x01(\r\x12\x0c\n\x04step\x18\n \x01(\x02\x12\x0e\n\x06step_h\x18\x0b \x01(\x02\x12\x0e\n\x06step_w\x18\x0c \x01(\x02\x12\x13\n\x06offset\x18\r \x01(\x02:\x03\x30.5\"8\n\x08\x43odeType\x12\n\n\x06\x43ORNER\x10\x01\x12\x0f\n\x0b\x43\x45NTER_SIZE\x10\x02\x12\x0f\n\x0b\x43ORNER_SIZE\x10\x03\"g\n\x0fPythonParameter\x12\x0e\n\x06module\x18\x01 \x01(\t\x12\r\n\x05layer\x18\x02 \x01(\t\x12\x13\n\tparam_str\x18\x03 \x01(\t:\x00\x12 \n\x11share_in_parallel\x18\x04 \x01(\x08:\x05\x66\x61lse\"\xc0\x01\n\x12RecurrentParameter\x12\x15\n\nnum_output\x18\x01 \x01(\r:\x01\x30\x12-\n\rweight_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x19\n\ndebug_info\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\rexpose_hidden\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xad\x01\n\x12ReductionParameter\x12=\n\toperation\x18\x01 \x01(\x0e\x32%.caffe.ReductionParameter.ReductionOp:\x03SUM\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x10\n\x05\x63oeff\x18\x03 \x01(\x02:\x01\x31\"5\n\x0bReductionOp\x12\x07\n\x03SUM\x10\x01\x12\x08\n\x04\x41SUM\x10\x02\x12\t\n\x05SUMSQ\x10\x03\x12\x08\n\x04MEAN\x10\x04\"\x8d\x01\n\rReLUParameter\x12\x19\n\x0enegative_slope\x18\x01 \x01(\x02:\x01\x30\x12\x34\n\x06\x65ngine\x18\x02 \x01(\x0e\x32\x1b.caffe.ReLUParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x10ReshapeParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x14\n\x08num_axes\x18\x03 \x01(\x05:\x02-1\"Y\n\x13ROIPoolingParameter\x12\x13\n\x08pooled_h\x18\x01 \x01(\r:\x01\x30\x12\x13\n\x08pooled_w\x18\x02 \x01(\r:\x01\x30\x12\x18\n\rspatial_scale\x18\x03 \x01(\x02:\x01\x31\"\xcb\x01\n\x0eScaleParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x18\n\tbias_term\x18\x04 \x01(\x08:\x05\x66\x61lse\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x11\n\tmin_value\x18\x06 \x01(\x02\x12\x11\n\tmax_value\x18\x07 \x01(\x02\"x\n\x10SigmoidParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SigmoidParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\")\n\x15SmoothL1LossParameter\x12\x10\n\x05sigma\x18\x01 \x01(\x02:\x01\x31\"L\n\x0eSliceParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\x12\x13\n\x0bslice_point\x18\x02 \x03(\r\x12\x14\n\tslice_dim\x18\x01 \x01(\r:\x01\x31\"\x89\x01\n\x10SoftmaxParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SoftmaxParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"r\n\rTanHParameter\x12\x34\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1b.caffe.TanHParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"/\n\rTileParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\r\n\x05tiles\x18\x02 \x01(\x05\"*\n\x12ThresholdParameter\x12\x14\n\tthreshold\x18\x01 \x01(\x02:\x01\x30\"\xc1\x02\n\x13WindowDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x0c\x66g_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12\x19\n\x0c\x62g_threshold\x18\x08 \x01(\x02:\x03\x30.5\x12\x19\n\x0b\x66g_fraction\x18\t \x01(\x02:\x04\x30.25\x12\x16\n\x0b\x63ontext_pad\x18\n \x01(\r:\x01\x30\x12\x17\n\tcrop_mode\x18\x0b \x01(\t:\x04warp\x12\x1b\n\x0c\x63\x61\x63he_images\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\r \x01(\t:\x00\"\xeb\x01\n\x0cSPPParameter\x12\x16\n\x0epyramid_height\x18\x01 \x01(\r\x12\x31\n\x04pool\x18\x02 \x01(\x0e\x32\x1e.caffe.SPPParameter.PoolMethod:\x03MAX\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.SPPParameter.Engine:\x07\x44\x45\x46\x41ULT\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"\xdc\x14\n\x10V1LayerParameter\x12\x0e\n\x06\x62ottom\x18\x02 \x03(\t\x12\x0b\n\x03top\x18\x03 \x03(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12$\n\x07include\x18 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18! \x03(\x0b\x32\x13.caffe.NetStateRule\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.caffe.V1LayerParameter.LayerType\x12\x1f\n\x05\x62lobs\x18\x06 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x0e\n\x05param\x18\xe9\x07 \x03(\t\x12>\n\x0f\x62lob_share_mode\x18\xea\x07 \x03(\x0e\x32$.caffe.V1LayerParameter.DimCheckMode\x12\x10\n\x08\x62lobs_lr\x18\x07 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x08 \x03(\x02\x12\x13\n\x0bloss_weight\x18# \x03(\x02\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x1b \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18\x17 \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12,\n\x0c\x63oncat_param\x18\t \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18( \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18\n \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12(\n\ndata_param\x18\x0b \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18\x0c \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18\x1a \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18\x18 \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12&\n\texp_param\x18) \x01(\x0b\x32\x13.caffe.ExpParameter\x12\x31\n\x0fhdf5_data_param\x18\r \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18\x0e \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18\x1d \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18\x0f \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18\x10 \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18\x11 \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12&\n\tlrn_param\x18\x12 \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18\x16 \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18\" \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\rpooling_param\x18\x13 \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18\x15 \x01(\x0b\x32\x15.caffe.PowerParameter\x12(\n\nrelu_param\x18\x1e \x01(\x0b\x32\x14.caffe.ReLUParameter\x12.\n\rsigmoid_param\x18& \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18\' \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12*\n\x0bslice_param\x18\x1f \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18% \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x32\n\x0fthreshold_param\x18\x19 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12\x35\n\x11window_data_param\x18\x14 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12\x37\n\x0ftransform_param\x18$ \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18* \x01(\x0b\x32\x14.caffe.LossParameter\x12<\n\x14\x64\x65tection_loss_param\x18\xc8\x01 \x01(\x0b\x32\x1d.caffe.DetectionLossParameter\x12<\n\x14\x65val_detection_param\x18\xc9\x01 \x01(\x0b\x32\x1d.caffe.EvalDetectionParameter\x12&\n\x05layer\x18\x01 \x01(\x0b\x32\x17.caffe.V0LayerParameter\"\xd8\x04\n\tLayerType\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06\x41\x42SVAL\x10#\x12\x0c\n\x08\x41\x43\x43URACY\x10\x01\x12\n\n\x06\x41RGMAX\x10\x1e\x12\x08\n\x04\x42NLL\x10\x02\x12\n\n\x06\x43ONCAT\x10\x03\x12\x14\n\x10\x43ONTRASTIVE_LOSS\x10%\x12\x0f\n\x0b\x43ONVOLUTION\x10\x04\x12\x08\n\x04\x44\x41TA\x10\x05\x12\x11\n\rDECONVOLUTION\x10\'\x12\x0b\n\x07\x44ROPOUT\x10\x06\x12\x0e\n\nDUMMY_DATA\x10 \x12\x12\n\x0e\x45UCLIDEAN_LOSS\x10\x07\x12\x0b\n\x07\x45LTWISE\x10\x19\x12\x07\n\x03\x45XP\x10&\x12\x0b\n\x07\x46LATTEN\x10\x08\x12\r\n\tHDF5_DATA\x10\t\x12\x0f\n\x0bHDF5_OUTPUT\x10\n\x12\x0e\n\nHINGE_LOSS\x10\x1c\x12\n\n\x06IM2COL\x10\x0b\x12\x0e\n\nIMAGE_DATA\x10\x0c\x12\x11\n\rINFOGAIN_LOSS\x10\r\x12\x11\n\rINNER_PRODUCT\x10\x0e\x12\x07\n\x03LRN\x10\x0f\x12\x0f\n\x0bMEMORY_DATA\x10\x1d\x12\x1d\n\x19MULTINOMIAL_LOGISTIC_LOSS\x10\x10\x12\x07\n\x03MVN\x10\"\x12\x0b\n\x07POOLING\x10\x11\x12\t\n\x05POWER\x10\x1a\x12\x08\n\x04RELU\x10\x12\x12\x0b\n\x07SIGMOID\x10\x13\x12\x1e\n\x1aSIGMOID_CROSS_ENTROPY_LOSS\x10\x1b\x12\x0b\n\x07SILENCE\x10$\x12\x0b\n\x07SOFTMAX\x10\x14\x12\x10\n\x0cSOFTMAX_LOSS\x10\x15\x12\t\n\x05SPLIT\x10\x16\x12\t\n\x05SLICE\x10!\x12\x08\n\x04TANH\x10\x17\x12\x0f\n\x0bWINDOW_DATA\x10\x18\x12\r\n\tTHRESHOLD\x10\x1f\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xfd\x07\n\x10V0LayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x12\n\nnum_output\x18\x03 \x01(\r\x12\x16\n\x08\x62iasterm\x18\x04 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x06 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0e\n\x03pad\x18\x07 \x01(\r:\x01\x30\x12\x12\n\nkernelsize\x18\x08 \x01(\r\x12\x10\n\x05group\x18\t \x01(\r:\x01\x31\x12\x11\n\x06stride\x18\n \x01(\r:\x01\x31\x12\x35\n\x04pool\x18\x0b \x01(\x0e\x32\".caffe.V0LayerParameter.PoolMethod:\x03MAX\x12\x1a\n\rdropout_ratio\x18\x0c \x01(\x02:\x03\x30.5\x12\x15\n\nlocal_size\x18\r \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x0e \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x0f \x01(\x02:\x04\x30.75\x12\x0c\n\x01k\x18\x16 \x01(\x02:\x01\x31\x12\x0e\n\x06source\x18\x10 \x01(\t\x12\x10\n\x05scale\x18\x11 \x01(\x02:\x01\x31\x12\x10\n\x08meanfile\x18\x12 \x01(\t\x12\x11\n\tbatchsize\x18\x13 \x01(\r\x12\x13\n\x08\x63ropsize\x18\x14 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x05\x62lobs\x18\x32 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x10\n\x08\x62lobs_lr\x18\x33 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x34 \x03(\x02\x12\x14\n\trand_skip\x18\x35 \x01(\r:\x01\x30\x12\x1d\n\x10\x64\x65t_fg_threshold\x18\x36 \x01(\x02:\x03\x30.5\x12\x1d\n\x10\x64\x65t_bg_threshold\x18\x37 \x01(\x02:\x03\x30.5\x12\x1d\n\x0f\x64\x65t_fg_fraction\x18\x38 \x01(\x02:\x04\x30.25\x12\x1a\n\x0f\x64\x65t_context_pad\x18: \x01(\r:\x01\x30\x12\x1b\n\rdet_crop_mode\x18; \x01(\t:\x04warp\x12\x12\n\x07new_num\x18< \x01(\x05:\x01\x30\x12\x17\n\x0cnew_channels\x18= \x01(\x05:\x01\x30\x12\x15\n\nnew_height\x18> \x01(\x05:\x01\x30\x12\x14\n\tnew_width\x18? \x01(\x05:\x01\x30\x12\x1d\n\x0eshuffle_images\x18@ \x01(\x08:\x05\x66\x61lse\x12\x15\n\nconcat_dim\x18\x41 \x01(\r:\x01\x31\x12\x36\n\x11hdf5_output_param\x18\xe9\x07 \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"W\n\x0ePReLUParameter\x12&\n\x06\x66iller\x18\x01 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1d\n\x0e\x63hannel_shared\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xa8\x01\n\x0cRPNParameter\x12\x13\n\x0b\x66\x65\x61t_stride\x18\x01 \x01(\r\x12\x10\n\x08\x62\x61sesize\x18\x02 \x01(\r\x12\r\n\x05scale\x18\x03 \x03(\r\x12\r\n\x05ratio\x18\x04 \x03(\x02\x12\x12\n\nboxminsize\x18\x05 \x01(\r\x12\x14\n\x0cper_nms_topn\x18\t \x01(\r\x12\x15\n\rpost_nms_topn\x18\x0b \x01(\r\x12\x12\n\nnms_thresh\x18\x08 \x01(\x02\"\xbb\x01\n\x12VideoDataParameter\x12?\n\nvideo_type\x18\x01 \x01(\x0e\x32#.caffe.VideoDataParameter.VideoType:\x06WEBCAM\x12\x14\n\tdevice_id\x18\x02 \x01(\x05:\x01\x30\x12\x12\n\nvideo_file\x18\x03 \x01(\t\x12\x16\n\x0bskip_frames\x18\x04 \x01(\r:\x01\x30\"\"\n\tVideoType\x12\n\n\x06WEBCAM\x10\x00\x12\t\n\x05VIDEO\x10\x01\"i\n\x13\x43\x65nterLossParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12-\n\rcenter_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\"\xd9\x02\n\x1bMarginInnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x43\n\x04type\x18\x02 \x01(\x0e\x32-.caffe.MarginInnerProductParameter.MarginType:\x06SINGLE\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x04 \x01(\x05:\x01\x31\x12\x0f\n\x04\x62\x61se\x18\x05 \x01(\x02:\x01\x31\x12\x10\n\x05gamma\x18\x06 \x01(\x02:\x01\x30\x12\x10\n\x05power\x18\x07 \x01(\x02:\x01\x31\x12\x14\n\titeration\x18\x08 \x01(\x05:\x01\x30\x12\x15\n\nlambda_min\x18\t \x01(\x02:\x01\x30\"?\n\nMarginType\x12\n\n\x06SINGLE\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\n\n\x06TRIPLE\x10\x02\x12\r\n\tQUADRUPLE\x10\x03\"\x8a\x01\n#AdditiveMarginInnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12-\n\rweight_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x01m\x18\x03 \x01(\x02:\x04\x30.35\x12\x0f\n\x04\x61xis\x18\x04 \x01(\x05:\x01\x31\"\xad\x04\n\x1e\x44\x65\x66ormableConvolutionParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12\x0b\n\x03pad\x18\x03 \x03(\r\x12\x13\n\x0bkernel_size\x18\x04 \x03(\r\x12\x0e\n\x06stride\x18\x06 \x03(\r\x12\x10\n\x08\x64ilation\x18\x12 \x03(\r\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x10\n\x08kernel_h\x18\x0b \x01(\r\x12\x10\n\x08kernel_w\x18\x0c \x01(\r\x12\x10\n\x08stride_h\x18\r \x01(\r\x12\x10\n\x08stride_w\x18\x0e \x01(\r\x12\x10\n\x05group\x18\x05 \x01(\r:\x01\x34\x12\x1b\n\x10\x64\x65\x66ormable_group\x18\x19 \x01(\r:\x01\x34\x12-\n\rweight_filler\x18\x07 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x08 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x45\n\x06\x65ngine\x18\x0f \x01(\x0e\x32,.caffe.DeformableConvolutionParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x10 \x01(\x05:\x01\x31\x12\x1e\n\x0f\x66orce_nd_im2col\x18\x11 \x01(\x08:\x05\x66\x61lse\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"K\n\x19LabelSpecificAddParameter\x12\x0f\n\x04\x62ias\x18\x01 \x01(\x02:\x01\x30\x12\x1d\n\x0etransform_test\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xed\x01\n\x15\x43hannelScaleParameter\x12\x18\n\ndo_forward\x18\x01 \x01(\x08:\x04true\x12!\n\x13\x64o_backward_feature\x18\x02 \x01(\x08:\x04true\x12\x1f\n\x11\x64o_backward_scale\x18\x03 \x01(\x08:\x04true\x12\x1b\n\x0cglobal_scale\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10max_global_scale\x18\x05 \x01(\x02:\x04\x31\x30\x30\x30\x12\x1b\n\x10min_global_scale\x18\x06 \x01(\x02:\x01\x30\x12\x1c\n\x11init_global_scale\x18\x07 \x01(\x02:\x01\x31\"C\n\x12\x43osinAddmParameter\x12\x0e\n\x01m\x18\x01 \x01(\x02:\x03\x30.5\x12\x1d\n\x0etransform_test\x18\x02 \x01(\x08:\x05\x66\x61lse\"A\n\x12\x43osinMulmParameter\x12\x0c\n\x01m\x18\x01 \x01(\x02:\x01\x34\x12\x1d\n\x0etransform_test\x18\x02 \x01(\x08:\x05\x66\x61lse\"r\n\x1b\x43oupledClusterLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\ngroup_size\x18\x02 \x01(\x05:\x01\x33\x12\x10\n\x05scale\x18\x03 \x01(\x02:\x01\x31\x12\x17\n\x08log_flag\x18\x04 \x01(\x08:\x05\x66\x61lse\"R\n\x14TripletLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\ngroup_size\x18\x02 \x01(\x05:\x01\x33\x12\x10\n\x05scale\x18\x03 \x01(\x02:\x01\x31\"\xe2\x01\n\x17GeneralTripletParameter\x12\x13\n\x06margin\x18\x01 \x01(\x02:\x03\x30.2\x12\x1d\n\x0f\x61\x64\x64_center_loss\x18\x02 \x01(\x08:\x04true\x12\x1b\n\x0chardest_only\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0epositive_first\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x14positive_upper_bound\x18\x05 \x01(\x02:\x01\x31\x12\x1a\n\x0fpositive_weight\x18\x06 \x01(\x02:\x01\x31\x12\x1a\n\x0fnegative_weight\x18\x07 \x01(\x02:\x01\x31\"W\n\x11ROIAlignParameter\x12\x13\n\x08pooled_h\x18\x01 \x01(\r:\x01\x30\x12\x13\n\x08pooled_w\x18\x02 \x01(\r:\x01\x30\x12\x18\n\rspatial_scale\x18\x03 \x01(\x02:\x01\x31*\x1c\n\x05Phase\x12\t\n\x05TRAIN\x10\x00\x12\x08\n\x04TEST\x10\x01') +) + +_PHASE = _descriptor.EnumDescriptor( + name='Phase', + full_name='caffe.Phase', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TRAIN', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TEST', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=29109, + serialized_end=29137, +) +_sym_db.RegisterEnumDescriptor(_PHASE) + +Phase = enum_type_wrapper.EnumTypeWrapper(_PHASE) +TRAIN = 0 +TEST = 1 + + +_EMITCONSTRAINT_EMITTYPE = _descriptor.EnumDescriptor( + name='EmitType', + full_name='caffe.EmitConstraint.EmitType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CENTER', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MIN_OVERLAP', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1162, + serialized_end=1201, +) +_sym_db.RegisterEnumDescriptor(_EMITCONSTRAINT_EMITTYPE) + +_ANNOTATEDDATUM_ANNOTATIONTYPE = _descriptor.EnumDescriptor( + name='AnnotationType', + full_name='caffe.AnnotatedDatum.AnnotationType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='BBOX', index=0, number=0, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1645, + serialized_end=1671, +) +_sym_db.RegisterEnumDescriptor(_ANNOTATEDDATUM_ANNOTATIONTYPE) + +_FILLERPARAMETER_VARIANCENORM = _descriptor.EnumDescriptor( + name='VarianceNorm', + full_name='caffe.FillerParameter.VarianceNorm', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FAN_IN', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FAN_OUT', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AVERAGE', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2058, + serialized_end=2110, +) +_sym_db.RegisterEnumDescriptor(_FILLERPARAMETER_VARIANCENORM) + +_SOLVERPARAMETER_SNAPSHOTFORMAT = _descriptor.EnumDescriptor( + name='SnapshotFormat', + full_name='caffe.SolverParameter.SnapshotFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='HDF5', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BINARYPROTO', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=3568, + serialized_end=3611, +) +_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SNAPSHOTFORMAT) + +_SOLVERPARAMETER_SOLVERMODE = _descriptor.EnumDescriptor( + name='SolverMode', + full_name='caffe.SolverParameter.SolverMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CPU', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GPU', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=3613, + serialized_end=3643, +) +_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERMODE) + +_SOLVERPARAMETER_SOLVERTYPE = _descriptor.EnumDescriptor( + name='SolverType', + full_name='caffe.SolverParameter.SolverType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SGD', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NESTEROV', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADAGRAD', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RMSPROP', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADADELTA', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADAM', index=5, number=5, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=3645, + serialized_end=3730, +) +_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERTYPE) + +_PARAMSPEC_DIMCHECKMODE = _descriptor.EnumDescriptor( + name='DimCheckMode', + full_name='caffe.ParamSpec.DimCheckMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STRICT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERMISSIVE', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=4491, + serialized_end=4533, +) +_sym_db.RegisterEnumDescriptor(_PARAMSPEC_DIMCHECKMODE) + +_BNPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.BNParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_BNPARAMETER_ENGINE) + +_FOCALLOSSPARAMETER_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='caffe.FocalLossParameter.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ORIGIN', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LINEAR', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=11083, + serialized_end=11113, +) +_sym_db.RegisterEnumDescriptor(_FOCALLOSSPARAMETER_TYPE) + +_RESIZEPARAMETER_RESIZE_MODE = _descriptor.EnumDescriptor( + name='Resize_mode', + full_name='caffe.ResizeParameter.Resize_mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='WARP', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIT_SMALL_SIZE', index=1, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIT_LARGE_SIZE_AND_PAD', index=2, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=11899, + serialized_end=11970, +) +_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_RESIZE_MODE) + +_RESIZEPARAMETER_PAD_MODE = _descriptor.EnumDescriptor( + name='Pad_mode', + full_name='caffe.ResizeParameter.Pad_mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CONSTANT', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MIRRORED', index=1, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPEAT_NEAREST', index=2, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=11972, + serialized_end=12030, +) +_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_PAD_MODE) + +_RESIZEPARAMETER_INTERP_MODE = _descriptor.EnumDescriptor( + name='Interp_mode', + full_name='caffe.ResizeParameter.Interp_mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='LINEAR', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AREA', index=1, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEAREST', index=2, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUBIC', index=3, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LANCZOS4', index=4, number=5, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=12032, + serialized_end=12105, +) +_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_INTERP_MODE) + +_LOSSPARAMETER_NORMALIZATIONMODE = _descriptor.EnumDescriptor( + name='NormalizationMode', + full_name='caffe.LossParameter.NormalizationMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FULL', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VALID', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BATCH_SIZE', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NONE', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=13052, + serialized_end=13118, +) +_sym_db.RegisterEnumDescriptor(_LOSSPARAMETER_NORMALIZATIONMODE) + +_EVALDETECTIONPARAMETER_SCORETYPE = _descriptor.EnumDescriptor( + name='ScoreType', + full_name='caffe.EvalDetectionParameter.ScoreType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OBJ', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROB', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MULTIPLY', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=14582, + serialized_end=14626, +) +_sym_db.RegisterEnumDescriptor(_EVALDETECTIONPARAMETER_SCORETYPE) + +_CONVOLUTIONPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.ConvolutionParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_CONVOLUTIONPARAMETER_ENGINE) + +_DATAPARAMETER_DB = _descriptor.EnumDescriptor( + name='DB', + full_name='caffe.DataParameter.DB', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='LEVELDB', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LMDB', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=15469, + serialized_end=15496, +) +_sym_db.RegisterEnumDescriptor(_DATAPARAMETER_DB) + +_ELTWISEPARAMETER_ELTWISEOP = _descriptor.EnumDescriptor( + name='EltwiseOp', + full_name='caffe.EltwiseParameter.EltwiseOp', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PROD', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUM', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAX', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=16856, + serialized_end=16895, +) +_sym_db.RegisterEnumDescriptor(_ELTWISEPARAMETER_ELTWISEOP) + +_HINGELOSSPARAMETER_NORM = _descriptor.EnumDescriptor( + name='Norm', + full_name='caffe.HingeLossParameter.Norm', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='L1', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='L2', index=1, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=17430, + serialized_end=17452, +) +_sym_db.RegisterEnumDescriptor(_HINGELOSSPARAMETER_NORM) + +_LRNPARAMETER_NORMREGION = _descriptor.EnumDescriptor( + name='NormRegion', + full_name='caffe.LRNParameter.NormRegion', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ACROSS_CHANNELS', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WITHIN_CHANNEL', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=18345, + serialized_end=18398, +) +_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_NORMREGION) + +_LRNPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.LRNParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_ENGINE) + +_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE = _descriptor.EnumDescriptor( + name='LocLossType', + full_name='caffe.MultiBoxLossParameter.LocLossType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='L2', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SMOOTH_L1', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=19479, + serialized_end=19515, +) +_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE) + +_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE = _descriptor.EnumDescriptor( + name='ConfLossType', + full_name='caffe.MultiBoxLossParameter.ConfLossType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SOFTMAX', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOGISTIC', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=19517, + serialized_end=19558, +) +_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE) + +_MULTIBOXLOSSPARAMETER_MATCHTYPE = _descriptor.EnumDescriptor( + name='MatchType', + full_name='caffe.MultiBoxLossParameter.MatchType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='BIPARTITE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PER_PREDICTION', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=19560, + serialized_end=19606, +) +_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MATCHTYPE) + +_MULTIBOXLOSSPARAMETER_MININGTYPE = _descriptor.EnumDescriptor( + name='MiningType', + full_name='caffe.MultiBoxLossParameter.MiningType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAX_NEGATIVE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HARD_EXAMPLE', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=19608, + serialized_end=19666, +) +_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MININGTYPE) + +_POOLINGPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor( + name='PoolMethod', + full_name='caffe.PoolingParameter.PoolMethod', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MAX', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AVE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STOCHASTIC', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=20213, + serialized_end=20259, +) +_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_POOLMETHOD) + +_POOLINGPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.PoolingParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_ENGINE) + +_PRIORBOXPARAMETER_CODETYPE = _descriptor.EnumDescriptor( + name='CodeType', + full_name='caffe.PriorBoxParameter.CodeType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CORNER', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CENTER_SIZE', index=1, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CORNER_SIZE', index=2, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=20632, + serialized_end=20688, +) +_sym_db.RegisterEnumDescriptor(_PRIORBOXPARAMETER_CODETYPE) + +_REDUCTIONPARAMETER_REDUCTIONOP = _descriptor.EnumDescriptor( + name='ReductionOp', + full_name='caffe.ReductionParameter.ReductionOp', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SUM', index=0, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ASUM', index=1, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUMSQ', index=2, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MEAN', index=3, number=4, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=21111, + serialized_end=21164, +) +_sym_db.RegisterEnumDescriptor(_REDUCTIONPARAMETER_REDUCTIONOP) + +_RELUPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.ReLUParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_RELUPARAMETER_ENGINE) + +_SIGMOIDPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.SigmoidParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_SIGMOIDPARAMETER_ENGINE) + +_SOFTMAXPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.SoftmaxParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_SOFTMAXPARAMETER_ENGINE) + +_TANHPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.TanHParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_TANHPARAMETER_ENGINE) + +_SPPPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor( + name='PoolMethod', + full_name='caffe.SPPParameter.PoolMethod', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MAX', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AVE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STOCHASTIC', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=20213, + serialized_end=20259, +) +_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_POOLMETHOD) + +_SPPPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.SPPParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_ENGINE) + +_V1LAYERPARAMETER_LAYERTYPE = _descriptor.EnumDescriptor( + name='LayerType', + full_name='caffe.V1LayerParameter.LayerType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ABSVAL', index=1, number=35, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACCURACY', index=2, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ARGMAX', index=3, number=30, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BNLL', index=4, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONCAT', index=5, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONTRASTIVE_LOSS', index=6, number=37, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONVOLUTION', index=7, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATA', index=8, number=5, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DECONVOLUTION', index=9, number=39, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DROPOUT', index=10, number=6, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUMMY_DATA', index=11, number=32, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EUCLIDEAN_LOSS', index=12, number=7, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ELTWISE', index=13, number=25, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXP', index=14, number=38, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLATTEN', index=15, number=8, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HDF5_DATA', index=16, number=9, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HDF5_OUTPUT', index=17, number=10, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HINGE_LOSS', index=18, number=28, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IM2COL', index=19, number=11, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMAGE_DATA', index=20, number=12, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INFOGAIN_LOSS', index=21, number=13, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INNER_PRODUCT', index=22, number=14, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LRN', index=23, number=15, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MEMORY_DATA', index=24, number=29, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MULTINOMIAL_LOGISTIC_LOSS', index=25, number=16, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MVN', index=26, number=34, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POOLING', index=27, number=17, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POWER', index=28, number=26, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RELU', index=29, number=18, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SIGMOID', index=30, number=19, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SIGMOID_CROSS_ENTROPY_LOSS', index=31, number=27, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SILENCE', index=32, number=36, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SOFTMAX', index=33, number=20, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SOFTMAX_LOSS', index=34, number=21, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPLIT', index=35, number=22, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SLICE', index=36, number=33, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TANH', index=37, number=23, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WINDOW_DATA', index=38, number=24, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THRESHOLD', index=39, number=31, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=24862, + serialized_end=25462, +) +_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_LAYERTYPE) + +_V1LAYERPARAMETER_DIMCHECKMODE = _descriptor.EnumDescriptor( + name='DimCheckMode', + full_name='caffe.V1LayerParameter.DimCheckMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STRICT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERMISSIVE', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=4491, + serialized_end=4533, +) +_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_DIMCHECKMODE) + +_V0LAYERPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor( + name='PoolMethod', + full_name='caffe.V0LayerParameter.PoolMethod', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MAX', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AVE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STOCHASTIC', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=20213, + serialized_end=20259, +) +_sym_db.RegisterEnumDescriptor(_V0LAYERPARAMETER_POOLMETHOD) + +_VIDEODATAPARAMETER_VIDEOTYPE = _descriptor.EnumDescriptor( + name='VideoType', + full_name='caffe.VideoDataParameter.VideoType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='WEBCAM', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VIDEO', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=26946, + serialized_end=26980, +) +_sym_db.RegisterEnumDescriptor(_VIDEODATAPARAMETER_VIDEOTYPE) + +_MARGININNERPRODUCTPARAMETER_MARGINTYPE = _descriptor.EnumDescriptor( + name='MarginType', + full_name='caffe.MarginInnerProductParameter.MarginType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SINGLE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOUBLE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRIPLE', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUADRUPLE', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=27372, + serialized_end=27435, +) +_sym_db.RegisterEnumDescriptor(_MARGININNERPRODUCTPARAMETER_MARGINTYPE) + +_DEFORMABLECONVOLUTIONPARAMETER_ENGINE = _descriptor.EnumDescriptor( + name='Engine', + full_name='caffe.DeformableConvolutionParameter.Engine', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DEFAULT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAFFE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUDNN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=10905, + serialized_end=10948, +) +_sym_db.RegisterEnumDescriptor(_DEFORMABLECONVOLUTIONPARAMETER_ENGINE) + + +_BLOBSHAPE = _descriptor.Descriptor( + name='BlobShape', + full_name='caffe.BlobShape', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dim', full_name='caffe.BlobShape.dim', index=0, + number=1, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22, + serialized_end=50, +) + + +_BLOBPROTO = _descriptor.Descriptor( + name='BlobProto', + full_name='caffe.BlobProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='shape', full_name='caffe.BlobProto.shape', index=0, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data', full_name='caffe.BlobProto.data', index=1, + number=5, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='diff', full_name='caffe.BlobProto.diff', index=2, + number=6, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_data', full_name='caffe.BlobProto.double_data', index=3, + number=8, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_diff', full_name='caffe.BlobProto.double_diff', index=4, + number=9, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num', full_name='caffe.BlobProto.num', index=5, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channels', full_name='caffe.BlobProto.channels', index=6, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='caffe.BlobProto.height', index=7, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.BlobProto.width', index=8, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=53, + serialized_end=257, +) + + +_BLOBPROTOVECTOR = _descriptor.Descriptor( + name='BlobProtoVector', + full_name='caffe.BlobProtoVector', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='blobs', full_name='caffe.BlobProtoVector.blobs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=259, + serialized_end=309, +) + + +_DATUM = _descriptor.Descriptor( + name='Datum', + full_name='caffe.Datum', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='channels', full_name='caffe.Datum.channels', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='caffe.Datum.height', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.Datum.width', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data', full_name='caffe.Datum.data', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label', full_name='caffe.Datum.label', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='float_data', full_name='caffe.Datum.float_data', index=5, + number=6, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='encoded', full_name='caffe.Datum.encoded', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='labels', full_name='caffe.Datum.labels', index=7, + number=8, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=312, + serialized_end=457, +) + + +_LABELMAPITEM = _descriptor.Descriptor( + name='LabelMapItem', + full_name='caffe.LabelMapItem', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='caffe.LabelMapItem.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label', full_name='caffe.LabelMapItem.label', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='display_name', full_name='caffe.LabelMapItem.display_name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=459, + serialized_end=524, +) + + +_LABELMAP = _descriptor.Descriptor( + name='LabelMap', + full_name='caffe.LabelMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='item', full_name='caffe.LabelMap.item', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=526, + serialized_end=571, +) + + +_SAMPLER = _descriptor.Descriptor( + name='Sampler', + full_name='caffe.Sampler', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='min_scale', full_name='caffe.Sampler.min_scale', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_scale', full_name='caffe.Sampler.max_scale', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_aspect_ratio', full_name='caffe.Sampler.min_aspect_ratio', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_aspect_ratio', full_name='caffe.Sampler.max_aspect_ratio', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=573, + serialized_end=684, +) + + +_SAMPLECONSTRAINT = _descriptor.Descriptor( + name='SampleConstraint', + full_name='caffe.SampleConstraint', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='min_jaccard_overlap', full_name='caffe.SampleConstraint.min_jaccard_overlap', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_jaccard_overlap', full_name='caffe.SampleConstraint.max_jaccard_overlap', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_sample_coverage', full_name='caffe.SampleConstraint.min_sample_coverage', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_sample_coverage', full_name='caffe.SampleConstraint.max_sample_coverage', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_object_coverage', full_name='caffe.SampleConstraint.min_object_coverage', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_object_coverage', full_name='caffe.SampleConstraint.max_object_coverage', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=687, + serialized_end=879, +) + + +_BATCHSAMPLER = _descriptor.Descriptor( + name='BatchSampler', + full_name='caffe.BatchSampler', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='use_original_image', full_name='caffe.BatchSampler.use_original_image', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sampler', full_name='caffe.BatchSampler.sampler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sample_constraint', full_name='caffe.BatchSampler.sample_constraint', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_sample', full_name='caffe.BatchSampler.max_sample', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_trials', full_name='caffe.BatchSampler.max_trials', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=100, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=882, + serialized_end=1060, +) + + +_EMITCONSTRAINT = _descriptor.Descriptor( + name='EmitConstraint', + full_name='caffe.EmitConstraint', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='emit_type', full_name='caffe.EmitConstraint.emit_type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='emit_overlap', full_name='caffe.EmitConstraint.emit_overlap', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _EMITCONSTRAINT_EMITTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1063, + serialized_end=1201, +) + + +_NORMALIZEDBBOX = _descriptor.Descriptor( + name='NormalizedBBox', + full_name='caffe.NormalizedBBox', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='xmin', full_name='caffe.NormalizedBBox.xmin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ymin', full_name='caffe.NormalizedBBox.ymin', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='xmax', full_name='caffe.NormalizedBBox.xmax', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ymax', full_name='caffe.NormalizedBBox.ymax', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label', full_name='caffe.NormalizedBBox.label', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='difficult', full_name='caffe.NormalizedBBox.difficult', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='score', full_name='caffe.NormalizedBBox.score', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='size', full_name='caffe.NormalizedBBox.size', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1204, + serialized_end=1339, +) + + +_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='caffe.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='instance_id', full_name='caffe.Annotation.instance_id', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bbox', full_name='caffe.Annotation.bbox', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1341, + serialized_end=1414, +) + + +_ANNOTATIONGROUP = _descriptor.Descriptor( + name='AnnotationGroup', + full_name='caffe.AnnotationGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='group_label', full_name='caffe.AnnotationGroup.group_label', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='annotation', full_name='caffe.AnnotationGroup.annotation', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1416, + serialized_end=1493, +) + + +_ANNOTATEDDATUM = _descriptor.Descriptor( + name='AnnotatedDatum', + full_name='caffe.AnnotatedDatum', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='datum', full_name='caffe.AnnotatedDatum.datum', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.AnnotatedDatum.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='annotation_group', full_name='caffe.AnnotatedDatum.annotation_group', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ANNOTATEDDATUM_ANNOTATIONTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1496, + serialized_end=1671, +) + + +_MTCNNBBOX = _descriptor.Descriptor( + name='MTCNNBBox', + full_name='caffe.MTCNNBBox', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='xmin', full_name='caffe.MTCNNBBox.xmin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ymin', full_name='caffe.MTCNNBBox.ymin', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='xmax', full_name='caffe.MTCNNBBox.xmax', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ymax', full_name='caffe.MTCNNBBox.ymax', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1673, + serialized_end=1740, +) + + +_MTCNNDATUM = _descriptor.Descriptor( + name='MTCNNDatum', + full_name='caffe.MTCNNDatum', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='datum', full_name='caffe.MTCNNDatum.datum', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='roi', full_name='caffe.MTCNNDatum.roi', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pts', full_name='caffe.MTCNNDatum.pts', index=2, + number=3, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1742, + serialized_end=1827, +) + + +_FILLERPARAMETER = _descriptor.Descriptor( + name='FillerParameter', + full_name='caffe.FillerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='caffe.FillerParameter.type', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("constant").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='caffe.FillerParameter.value', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min', full_name='caffe.FillerParameter.min', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max', full_name='caffe.FillerParameter.max', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean', full_name='caffe.FillerParameter.mean', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='std', full_name='caffe.FillerParameter.std', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sparse', full_name='caffe.FillerParameter.sparse', index=6, + number=7, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='variance_norm', full_name='caffe.FillerParameter.variance_norm', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='file', full_name='caffe.FillerParameter.file', index=8, + number=9, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILLERPARAMETER_VARIANCENORM, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1830, + serialized_end=2110, +) + + +_NETPARAMETER = _descriptor.Descriptor( + name='NetParameter', + full_name='caffe.NetParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='caffe.NetParameter.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='input', full_name='caffe.NetParameter.input', index=1, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='input_shape', full_name='caffe.NetParameter.input_shape', index=2, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='input_dim', full_name='caffe.NetParameter.input_dim', index=3, + number=4, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_backward', full_name='caffe.NetParameter.force_backward', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='state', full_name='caffe.NetParameter.state', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='debug_info', full_name='caffe.NetParameter.debug_info', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='layer', full_name='caffe.NetParameter.layer', index=7, + number=100, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='layers', full_name='caffe.NetParameter.layers', index=8, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2113, + serialized_end=2383, +) + + +_SOLVERPARAMETER = _descriptor.Descriptor( + name='SolverParameter', + full_name='caffe.SolverParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='net', full_name='caffe.SolverParameter.net', index=0, + number=24, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='net_param', full_name='caffe.SolverParameter.net_param', index=1, + number=25, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='train_net', full_name='caffe.SolverParameter.train_net', index=2, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_net', full_name='caffe.SolverParameter.test_net', index=3, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='train_net_param', full_name='caffe.SolverParameter.train_net_param', index=4, + number=21, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_net_param', full_name='caffe.SolverParameter.test_net_param', index=5, + number=22, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='train_state', full_name='caffe.SolverParameter.train_state', index=6, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_state', full_name='caffe.SolverParameter.test_state', index=7, + number=27, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_iter', full_name='caffe.SolverParameter.test_iter', index=8, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_interval', full_name='caffe.SolverParameter.test_interval', index=9, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_compute_loss', full_name='caffe.SolverParameter.test_compute_loss', index=10, + number=19, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_initialization', full_name='caffe.SolverParameter.test_initialization', index=11, + number=32, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='base_lr', full_name='caffe.SolverParameter.base_lr', index=12, + number=5, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='display', full_name='caffe.SolverParameter.display', index=13, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='average_loss', full_name='caffe.SolverParameter.average_loss', index=14, + number=33, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_iter', full_name='caffe.SolverParameter.max_iter', index=15, + number=7, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='iter_size', full_name='caffe.SolverParameter.iter_size', index=16, + number=36, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lr_policy', full_name='caffe.SolverParameter.lr_policy', index=17, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='gamma', full_name='caffe.SolverParameter.gamma', index=18, + number=9, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='power', full_name='caffe.SolverParameter.power', index=19, + number=10, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='momentum', full_name='caffe.SolverParameter.momentum', index=20, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_decay', full_name='caffe.SolverParameter.weight_decay', index=21, + number=12, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='regularization_type', full_name='caffe.SolverParameter.regularization_type', index=22, + number=29, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("L2").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stepsize', full_name='caffe.SolverParameter.stepsize', index=23, + number=13, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stepvalue', full_name='caffe.SolverParameter.stepvalue', index=24, + number=34, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stagelr', full_name='caffe.SolverParameter.stagelr', index=25, + number=50, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stageiter', full_name='caffe.SolverParameter.stageiter', index=26, + number=51, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='clip_gradients', full_name='caffe.SolverParameter.clip_gradients', index=27, + number=35, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(-1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshot', full_name='caffe.SolverParameter.snapshot', index=28, + number=14, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshot_prefix', full_name='caffe.SolverParameter.snapshot_prefix', index=29, + number=15, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshot_diff', full_name='caffe.SolverParameter.snapshot_diff', index=30, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshot_format', full_name='caffe.SolverParameter.snapshot_format', index=31, + number=37, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='solver_mode', full_name='caffe.SolverParameter.solver_mode', index=32, + number=17, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_id', full_name='caffe.SolverParameter.device_id', index=33, + number=18, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='random_seed', full_name='caffe.SolverParameter.random_seed', index=34, + number=20, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.SolverParameter.type', index=35, + number=40, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("SGD").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='delta', full_name='caffe.SolverParameter.delta', index=36, + number=31, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1e-08), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='momentum2', full_name='caffe.SolverParameter.momentum2', index=37, + number=39, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.999), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rms_decay', full_name='caffe.SolverParameter.rms_decay', index=38, + number=38, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='debug_info', full_name='caffe.SolverParameter.debug_info', index=39, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshot_after_train', full_name='caffe.SolverParameter.snapshot_after_train', index=40, + number=28, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='solver_type', full_name='caffe.SolverParameter.solver_type', index=41, + number=30, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SOLVERPARAMETER_SNAPSHOTFORMAT, + _SOLVERPARAMETER_SOLVERMODE, + _SOLVERPARAMETER_SOLVERTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2386, + serialized_end=3730, +) + + +_SOLVERSTATE = _descriptor.Descriptor( + name='SolverState', + full_name='caffe.SolverState', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='iter', full_name='caffe.SolverState.iter', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='learned_net', full_name='caffe.SolverState.learned_net', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='history', full_name='caffe.SolverState.history', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='current_step', full_name='caffe.SolverState.current_step', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3732, + serialized_end=3840, +) + + +_NETSTATE = _descriptor.Descriptor( + name='NetState', + full_name='caffe.NetState', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='phase', full_name='caffe.NetState.phase', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='level', full_name='caffe.NetState.level', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stage', full_name='caffe.NetState.stage', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3842, + serialized_end=3920, +) + + +_NETSTATERULE = _descriptor.Descriptor( + name='NetStateRule', + full_name='caffe.NetStateRule', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='phase', full_name='caffe.NetStateRule.phase', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_level', full_name='caffe.NetStateRule.min_level', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_level', full_name='caffe.NetStateRule.max_level', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stage', full_name='caffe.NetStateRule.stage', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='not_stage', full_name='caffe.NetStateRule.not_stage', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3922, + serialized_end=4037, +) + + +_SPATIALTRANSFORMERPARAMETER = _descriptor.Descriptor( + name='SpatialTransformerParameter', + full_name='caffe.SpatialTransformerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='transform_type', full_name='caffe.SpatialTransformerParameter.transform_type', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("affine").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sampler_type', full_name='caffe.SpatialTransformerParameter.sampler_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("bilinear").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_H', full_name='caffe.SpatialTransformerParameter.output_H', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_W', full_name='caffe.SpatialTransformerParameter.output_W', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='to_compute_dU', full_name='caffe.SpatialTransformerParameter.to_compute_dU', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_1_1', full_name='caffe.SpatialTransformerParameter.theta_1_1', index=5, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_1_2', full_name='caffe.SpatialTransformerParameter.theta_1_2', index=6, + number=7, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_1_3', full_name='caffe.SpatialTransformerParameter.theta_1_3', index=7, + number=8, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_2_1', full_name='caffe.SpatialTransformerParameter.theta_2_1', index=8, + number=9, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_2_2', full_name='caffe.SpatialTransformerParameter.theta_2_2', index=9, + number=10, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='theta_2_3', full_name='caffe.SpatialTransformerParameter.theta_2_3', index=10, + number=11, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4040, + serialized_end=4312, +) + + +_STLOSSPARAMETER = _descriptor.Descriptor( + name='STLossParameter', + full_name='caffe.STLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='output_H', full_name='caffe.STLossParameter.output_H', index=0, + number=1, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_W', full_name='caffe.STLossParameter.output_W', index=1, + number=2, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4314, + serialized_end=4367, +) + + +_PARAMSPEC = _descriptor.Descriptor( + name='ParamSpec', + full_name='caffe.ParamSpec', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='caffe.ParamSpec.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='share_mode', full_name='caffe.ParamSpec.share_mode', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lr_mult', full_name='caffe.ParamSpec.lr_mult', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='decay_mult', full_name='caffe.ParamSpec.decay_mult', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PARAMSPEC_DIMCHECKMODE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4370, + serialized_end=4533, +) + + +_LAYERPARAMETER = _descriptor.Descriptor( + name='LayerParameter', + full_name='caffe.LayerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='caffe.LayerParameter.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.LayerParameter.type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bottom', full_name='caffe.LayerParameter.bottom', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='top', full_name='caffe.LayerParameter.top', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='phase', full_name='caffe.LayerParameter.phase', index=4, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loss_weight', full_name='caffe.LayerParameter.loss_weight', index=5, + number=5, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='param', full_name='caffe.LayerParameter.param', index=6, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blobs', full_name='caffe.LayerParameter.blobs', index=7, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='propagate_down', full_name='caffe.LayerParameter.propagate_down', index=8, + number=11, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='include', full_name='caffe.LayerParameter.include', index=9, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='exclude', full_name='caffe.LayerParameter.exclude', index=10, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transform_param', full_name='caffe.LayerParameter.transform_param', index=11, + number=100, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loss_param', full_name='caffe.LayerParameter.loss_param', index=12, + number=101, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='detection_loss_param', full_name='caffe.LayerParameter.detection_loss_param', index=13, + number=200, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eval_detection_param', full_name='caffe.LayerParameter.eval_detection_param', index=14, + number=201, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='region_loss_param', full_name='caffe.LayerParameter.region_loss_param', index=15, + number=202, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reorg_param', full_name='caffe.LayerParameter.reorg_param', index=16, + number=203, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='accuracy_param', full_name='caffe.LayerParameter.accuracy_param', index=17, + number=102, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='argmax_param', full_name='caffe.LayerParameter.argmax_param', index=18, + number=103, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batch_norm_param', full_name='caffe.LayerParameter.batch_norm_param', index=19, + number=139, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_param', full_name='caffe.LayerParameter.bias_param', index=20, + number=141, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='concat_param', full_name='caffe.LayerParameter.concat_param', index=21, + number=104, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='contrastive_loss_param', full_name='caffe.LayerParameter.contrastive_loss_param', index=22, + number=105, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='convolution_param', full_name='caffe.LayerParameter.convolution_param', index=23, + number=106, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data_param', full_name='caffe.LayerParameter.data_param', index=24, + number=107, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dropout_param', full_name='caffe.LayerParameter.dropout_param', index=25, + number=108, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dummy_data_param', full_name='caffe.LayerParameter.dummy_data_param', index=26, + number=109, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eltwise_param', full_name='caffe.LayerParameter.eltwise_param', index=27, + number=110, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='elu_param', full_name='caffe.LayerParameter.elu_param', index=28, + number=140, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='embed_param', full_name='caffe.LayerParameter.embed_param', index=29, + number=137, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='exp_param', full_name='caffe.LayerParameter.exp_param', index=30, + number=111, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='flatten_param', full_name='caffe.LayerParameter.flatten_param', index=31, + number=135, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdf5_data_param', full_name='caffe.LayerParameter.hdf5_data_param', index=32, + number=112, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdf5_output_param', full_name='caffe.LayerParameter.hdf5_output_param', index=33, + number=113, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hinge_loss_param', full_name='caffe.LayerParameter.hinge_loss_param', index=34, + number=114, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='image_data_param', full_name='caffe.LayerParameter.image_data_param', index=35, + number=115, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='infogain_loss_param', full_name='caffe.LayerParameter.infogain_loss_param', index=36, + number=116, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='inner_product_param', full_name='caffe.LayerParameter.inner_product_param', index=37, + number=117, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='input_param', full_name='caffe.LayerParameter.input_param', index=38, + number=143, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='log_param', full_name='caffe.LayerParameter.log_param', index=39, + number=134, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lrn_param', full_name='caffe.LayerParameter.lrn_param', index=40, + number=118, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='memory_data_param', full_name='caffe.LayerParameter.memory_data_param', index=41, + number=119, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mvn_param', full_name='caffe.LayerParameter.mvn_param', index=42, + number=120, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pooling_param', full_name='caffe.LayerParameter.pooling_param', index=43, + number=121, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='power_param', full_name='caffe.LayerParameter.power_param', index=44, + number=122, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prelu_param', full_name='caffe.LayerParameter.prelu_param', index=45, + number=131, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='python_param', full_name='caffe.LayerParameter.python_param', index=46, + number=130, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='recurrent_param', full_name='caffe.LayerParameter.recurrent_param', index=47, + number=146, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reduction_param', full_name='caffe.LayerParameter.reduction_param', index=48, + number=136, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='relu_param', full_name='caffe.LayerParameter.relu_param', index=49, + number=123, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reshape_param', full_name='caffe.LayerParameter.reshape_param', index=50, + number=133, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='roi_pooling_param', full_name='caffe.LayerParameter.roi_pooling_param', index=51, + number=8266711, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale_param', full_name='caffe.LayerParameter.scale_param', index=52, + number=142, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sigmoid_param', full_name='caffe.LayerParameter.sigmoid_param', index=53, + number=124, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='smooth_l1_loss_param', full_name='caffe.LayerParameter.smooth_l1_loss_param', index=54, + number=8266712, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='softmax_param', full_name='caffe.LayerParameter.softmax_param', index=55, + number=125, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='spp_param', full_name='caffe.LayerParameter.spp_param', index=56, + number=132, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='slice_param', full_name='caffe.LayerParameter.slice_param', index=57, + number=126, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tanh_param', full_name='caffe.LayerParameter.tanh_param', index=58, + number=127, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='threshold_param', full_name='caffe.LayerParameter.threshold_param', index=59, + number=128, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tile_param', full_name='caffe.LayerParameter.tile_param', index=60, + number=138, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='window_data_param', full_name='caffe.LayerParameter.window_data_param', index=61, + number=129, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='st_param', full_name='caffe.LayerParameter.st_param', index=62, + number=148, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='st_loss_param', full_name='caffe.LayerParameter.st_loss_param', index=63, + number=145, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rpn_param', full_name='caffe.LayerParameter.rpn_param', index=64, + number=150, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='focal_loss_param', full_name='caffe.LayerParameter.focal_loss_param', index=65, + number=155, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='asdn_data_param', full_name='caffe.LayerParameter.asdn_data_param', index=66, + number=159, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bn_param', full_name='caffe.LayerParameter.bn_param', index=67, + number=160, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mtcnn_data_param', full_name='caffe.LayerParameter.mtcnn_data_param', index=68, + number=161, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='interp_param', full_name='caffe.LayerParameter.interp_param', index=69, + number=162, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='psroi_pooling_param', full_name='caffe.LayerParameter.psroi_pooling_param', index=70, + number=163, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='annotated_data_param', full_name='caffe.LayerParameter.annotated_data_param', index=71, + number=164, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prior_box_param', full_name='caffe.LayerParameter.prior_box_param', index=72, + number=165, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_param', full_name='caffe.LayerParameter.crop_param', index=73, + number=167, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='detection_evaluate_param', full_name='caffe.LayerParameter.detection_evaluate_param', index=74, + number=168, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='detection_output_param', full_name='caffe.LayerParameter.detection_output_param', index=75, + number=169, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='multibox_loss_param', full_name='caffe.LayerParameter.multibox_loss_param', index=76, + number=171, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='permute_param', full_name='caffe.LayerParameter.permute_param', index=77, + number=172, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='video_data_param', full_name='caffe.LayerParameter.video_data_param', index=78, + number=173, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='margin_inner_product_param', full_name='caffe.LayerParameter.margin_inner_product_param', index=79, + number=174, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='center_loss_param', full_name='caffe.LayerParameter.center_loss_param', index=80, + number=175, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='deformable_convolution_param', full_name='caffe.LayerParameter.deformable_convolution_param', index=81, + number=176, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label_specific_add_param', full_name='caffe.LayerParameter.label_specific_add_param', index=82, + number=177, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='additive_margin_inner_product_param', full_name='caffe.LayerParameter.additive_margin_inner_product_param', index=83, + number=178, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cosin_add_m_param', full_name='caffe.LayerParameter.cosin_add_m_param', index=84, + number=179, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cosin_mul_m_param', full_name='caffe.LayerParameter.cosin_mul_m_param', index=85, + number=180, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channel_scale_param', full_name='caffe.LayerParameter.channel_scale_param', index=86, + number=181, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='flip_param', full_name='caffe.LayerParameter.flip_param', index=87, + number=182, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='triplet_loss_param', full_name='caffe.LayerParameter.triplet_loss_param', index=88, + number=183, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coupled_cluster_loss_param', full_name='caffe.LayerParameter.coupled_cluster_loss_param', index=89, + number=184, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='general_triplet_loss_param', full_name='caffe.LayerParameter.general_triplet_loss_param', index=90, + number=185, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='roi_align_param', full_name='caffe.LayerParameter.roi_align_param', index=91, + number=186, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='upsample_param', full_name='caffe.LayerParameter.upsample_param', index=92, + number=100003, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='matmul_param', full_name='caffe.LayerParameter.matmul_param', index=93, + number=100005, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pass_through_param', full_name='caffe.LayerParameter.pass_through_param', index=94, + number=100004, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='norm_param', full_name='caffe.LayerParameter.norm_param', index=95, + number=100001, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4536, + serialized_end=9293, +) + + +_UPSAMPLEPARAMETER = _descriptor.Descriptor( + name='UpsampleParameter', + full_name='caffe.UpsampleParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.UpsampleParameter.scale', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale_h', full_name='caffe.UpsampleParameter.scale_h', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale_w', full_name='caffe.UpsampleParameter.scale_w', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_out_h', full_name='caffe.UpsampleParameter.pad_out_h', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_out_w', full_name='caffe.UpsampleParameter.pad_out_w', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='upsample_h', full_name='caffe.UpsampleParameter.upsample_h', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='upsample_w', full_name='caffe.UpsampleParameter.upsample_w', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9296, + serialized_end=9459, +) + + +_MATMULPARAMETER = _descriptor.Descriptor( + name='MatMulParameter', + full_name='caffe.MatMulParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dim_1', full_name='caffe.MatMulParameter.dim_1', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dim_2', full_name='caffe.MatMulParameter.dim_2', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dim_3', full_name='caffe.MatMulParameter.dim_3', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9461, + serialized_end=9523, +) + + +_PASSTHROUGHPARAMETER = _descriptor.Descriptor( + name='PassThroughParameter', + full_name='caffe.PassThroughParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.PassThroughParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='block_height', full_name='caffe.PassThroughParameter.block_height', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='block_width', full_name='caffe.PassThroughParameter.block_width', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9525, + serialized_end=9619, +) + + +_NORMALIZEPARAMETER = _descriptor.Descriptor( + name='NormalizeParameter', + full_name='caffe.NormalizeParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='across_spatial', full_name='caffe.NormalizeParameter.across_spatial', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale_filler', full_name='caffe.NormalizeParameter.scale_filler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channel_shared', full_name='caffe.NormalizeParameter.channel_shared', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eps', full_name='caffe.NormalizeParameter.eps', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1e-10), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sqrt_a', full_name='caffe.NormalizeParameter.sqrt_a', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9622, + serialized_end=9787, +) + + +_ANNOTATEDDATAPARAMETER = _descriptor.Descriptor( + name='AnnotatedDataParameter', + full_name='caffe.AnnotatedDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='batch_sampler', full_name='caffe.AnnotatedDataParameter.batch_sampler', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label_map_file', full_name='caffe.AnnotatedDataParameter.label_map_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='anno_type', full_name='caffe.AnnotatedDataParameter.anno_type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9790, + serialized_end=9939, +) + + +_ASDNDATAPARAMETER = _descriptor.Descriptor( + name='AsdnDataParameter', + full_name='caffe.AsdnDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='count_drop', full_name='caffe.AsdnDataParameter.count_drop', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=15, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='permute_count', full_name='caffe.AsdnDataParameter.permute_count', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=20, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='count_drop_neg', full_name='caffe.AsdnDataParameter.count_drop_neg', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channels', full_name='caffe.AsdnDataParameter.channels', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1024, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='iter_size', full_name='caffe.AsdnDataParameter.iter_size', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='maintain_before', full_name='caffe.AsdnDataParameter.maintain_before', index=5, + number=6, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9942, + serialized_end=10113, +) + + +_MTCNNDATAPARAMETER = _descriptor.Descriptor( + name='MTCNNDataParameter', + full_name='caffe.MTCNNDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='augmented', full_name='caffe.MTCNNDataParameter.augmented', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='flip', full_name='caffe.MTCNNDataParameter.flip', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_positive', full_name='caffe.MTCNNDataParameter.num_positive', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_negitive', full_name='caffe.MTCNNDataParameter.num_negitive', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_part', full_name='caffe.MTCNNDataParameter.num_part', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_width', full_name='caffe.MTCNNDataParameter.resize_width', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_height', full_name='caffe.MTCNNDataParameter.resize_height', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_negitive_scale', full_name='caffe.MTCNNDataParameter.min_negitive_scale', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_negitive_scale', full_name='caffe.MTCNNDataParameter.max_negitive_scale', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10116, + serialized_end=10372, +) + + +_INTERPPARAMETER = _descriptor.Descriptor( + name='InterpParameter', + full_name='caffe.InterpParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='height', full_name='caffe.InterpParameter.height', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.InterpParameter.width', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='zoom_factor', full_name='caffe.InterpParameter.zoom_factor', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shrink_factor', full_name='caffe.InterpParameter.shrink_factor', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_beg', full_name='caffe.InterpParameter.pad_beg', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_end', full_name='caffe.InterpParameter.pad_end', index=5, + number=6, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10375, + serialized_end=10519, +) + + +_PSROIPOOLINGPARAMETER = _descriptor.Descriptor( + name='PSROIPoolingParameter', + full_name='caffe.PSROIPoolingParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='spatial_scale', full_name='caffe.PSROIPoolingParameter.spatial_scale', index=0, + number=1, type=2, cpp_type=6, label=2, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_dim', full_name='caffe.PSROIPoolingParameter.output_dim', index=1, + number=2, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group_size', full_name='caffe.PSROIPoolingParameter.group_size', index=2, + number=3, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10521, + serialized_end=10607, +) + + +_FLIPPARAMETER = _descriptor.Descriptor( + name='FlipParameter', + full_name='caffe.FlipParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='flip_width', full_name='caffe.FlipParameter.flip_width', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='flip_height', full_name='caffe.FlipParameter.flip_height', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10609, + serialized_end=10678, +) + + +_BNPARAMETER = _descriptor.Descriptor( + name='BNParameter', + full_name='caffe.BNParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='slope_filler', full_name='caffe.BNParameter.slope_filler', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.BNParameter.bias_filler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='momentum', full_name='caffe.BNParameter.momentum', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.9), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eps', full_name='caffe.BNParameter.eps', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1e-05), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='frozen', full_name='caffe.BNParameter.frozen', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.BNParameter.engine', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _BNPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10681, + serialized_end=10948, +) + + +_FOCALLOSSPARAMETER = _descriptor.Descriptor( + name='FocalLossParameter', + full_name='caffe.FocalLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='caffe.FocalLossParameter.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='gamma', full_name='caffe.FocalLossParameter.gamma', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(2), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='alpha', full_name='caffe.FocalLossParameter.alpha', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.25), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='beta', full_name='caffe.FocalLossParameter.beta', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FOCALLOSSPARAMETER_TYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10951, + serialized_end=11113, +) + + +_TRANSFORMATIONPARAMETER = _descriptor.Descriptor( + name='TransformationParameter', + full_name='caffe.TransformationParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.TransformationParameter.scale', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mirror', full_name='caffe.TransformationParameter.mirror', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_size', full_name='caffe.TransformationParameter.crop_size', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_h', full_name='caffe.TransformationParameter.crop_h', index=3, + number=11, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_w', full_name='caffe.TransformationParameter.crop_w', index=4, + number=12, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_file', full_name='caffe.TransformationParameter.mean_file', index=5, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_value', full_name='caffe.TransformationParameter.mean_value', index=6, + number=5, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_color', full_name='caffe.TransformationParameter.force_color', index=7, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_gray', full_name='caffe.TransformationParameter.force_gray', index=8, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_param', full_name='caffe.TransformationParameter.resize_param', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='noise_param', full_name='caffe.TransformationParameter.noise_param', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='distort_param', full_name='caffe.TransformationParameter.distort_param', index=11, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='expand_param', full_name='caffe.TransformationParameter.expand_param', index=12, + number=14, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='emit_constraint', full_name='caffe.TransformationParameter.emit_constraint', index=13, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11116, + serialized_end=11574, +) + + +_RESIZEPARAMETER = _descriptor.Descriptor( + name='ResizeParameter', + full_name='caffe.ResizeParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prob', full_name='caffe.ResizeParameter.prob', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_mode', full_name='caffe.ResizeParameter.resize_mode', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='caffe.ResizeParameter.height', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.ResizeParameter.width', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height_scale', full_name='caffe.ResizeParameter.height_scale', index=4, + number=8, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width_scale', full_name='caffe.ResizeParameter.width_scale', index=5, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_mode', full_name='caffe.ResizeParameter.pad_mode', index=6, + number=5, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_value', full_name='caffe.ResizeParameter.pad_value', index=7, + number=6, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='interp_mode', full_name='caffe.ResizeParameter.interp_mode', index=8, + number=7, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RESIZEPARAMETER_RESIZE_MODE, + _RESIZEPARAMETER_PAD_MODE, + _RESIZEPARAMETER_INTERP_MODE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11577, + serialized_end=12105, +) + + +_SALTPEPPERPARAMETER = _descriptor.Descriptor( + name='SaltPepperParameter', + full_name='caffe.SaltPepperParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fraction', full_name='caffe.SaltPepperParameter.fraction', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='caffe.SaltPepperParameter.value', index=1, + number=2, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12107, + serialized_end=12164, +) + + +_NOISEPARAMETER = _descriptor.Descriptor( + name='NoiseParameter', + full_name='caffe.NoiseParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prob', full_name='caffe.NoiseParameter.prob', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hist_eq', full_name='caffe.NoiseParameter.hist_eq', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='inverse', full_name='caffe.NoiseParameter.inverse', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='decolorize', full_name='caffe.NoiseParameter.decolorize', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='gauss_blur', full_name='caffe.NoiseParameter.gauss_blur', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='jpeg', full_name='caffe.NoiseParameter.jpeg', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(-1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='posterize', full_name='caffe.NoiseParameter.posterize', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='erode', full_name='caffe.NoiseParameter.erode', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='saltpepper', full_name='caffe.NoiseParameter.saltpepper', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='saltpepper_param', full_name='caffe.NoiseParameter.saltpepper_param', index=9, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='clahe', full_name='caffe.NoiseParameter.clahe', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='convert_to_hsv', full_name='caffe.NoiseParameter.convert_to_hsv', index=11, + number=12, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='convert_to_lab', full_name='caffe.NoiseParameter.convert_to_lab', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12167, + serialized_end=12533, +) + + +_DISTORTIONPARAMETER = _descriptor.Descriptor( + name='DistortionParameter', + full_name='caffe.DistortionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='brightness_prob', full_name='caffe.DistortionParameter.brightness_prob', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='brightness_delta', full_name='caffe.DistortionParameter.brightness_delta', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='contrast_prob', full_name='caffe.DistortionParameter.contrast_prob', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='contrast_lower', full_name='caffe.DistortionParameter.contrast_lower', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='contrast_upper', full_name='caffe.DistortionParameter.contrast_upper', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hue_prob', full_name='caffe.DistortionParameter.hue_prob', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hue_delta', full_name='caffe.DistortionParameter.hue_delta', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='saturation_prob', full_name='caffe.DistortionParameter.saturation_prob', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='saturation_lower', full_name='caffe.DistortionParameter.saturation_lower', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='saturation_upper', full_name='caffe.DistortionParameter.saturation_upper', index=9, + number=10, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='random_order_prob', full_name='caffe.DistortionParameter.random_order_prob', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12536, + serialized_end=12853, +) + + +_EXPANSIONPARAMETER = _descriptor.Descriptor( + name='ExpansionParameter', + full_name='caffe.ExpansionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prob', full_name='caffe.ExpansionParameter.prob', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_expand_ratio', full_name='caffe.ExpansionParameter.max_expand_ratio', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12855, + serialized_end=12921, +) + + +_LOSSPARAMETER = _descriptor.Descriptor( + name='LossParameter', + full_name='caffe.LossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ignore_label', full_name='caffe.LossParameter.ignore_label', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='normalization', full_name='caffe.LossParameter.normalization', index=1, + number=3, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='normalize', full_name='caffe.LossParameter.normalize', index=2, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _LOSSPARAMETER_NORMALIZATIONMODE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12924, + serialized_end=13118, +) + + +_ACCURACYPARAMETER = _descriptor.Descriptor( + name='AccuracyParameter', + full_name='caffe.AccuracyParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='top_k', full_name='caffe.AccuracyParameter.top_k', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.AccuracyParameter.axis', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ignore_label', full_name='caffe.AccuracyParameter.ignore_label', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13120, + serialized_end=13196, +) + + +_ARGMAXPARAMETER = _descriptor.Descriptor( + name='ArgMaxParameter', + full_name='caffe.ArgMaxParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='out_max_val', full_name='caffe.ArgMaxParameter.out_max_val', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='top_k', full_name='caffe.ArgMaxParameter.top_k', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ArgMaxParameter.axis', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13198, + serialized_end=13275, +) + + +_CONCATPARAMETER = _descriptor.Descriptor( + name='ConcatParameter', + full_name='caffe.ConcatParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ConcatParameter.axis', index=0, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='concat_dim', full_name='caffe.ConcatParameter.concat_dim', index=1, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13277, + serialized_end=13334, +) + + +_BATCHNORMPARAMETER = _descriptor.Descriptor( + name='BatchNormParameter', + full_name='caffe.BatchNormParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='use_global_stats', full_name='caffe.BatchNormParameter.use_global_stats', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='moving_average_fraction', full_name='caffe.BatchNormParameter.moving_average_fraction', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.999), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eps', full_name='caffe.BatchNormParameter.eps', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1e-05), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13336, + serialized_end=13442, +) + + +_BIASPARAMETER = _descriptor.Descriptor( + name='BiasParameter', + full_name='caffe.BiasParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.BiasParameter.axis', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_axes', full_name='caffe.BiasParameter.num_axes', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='filler', full_name='caffe.BiasParameter.filler', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13444, + serialized_end=13537, +) + + +_CONTRASTIVELOSSPARAMETER = _descriptor.Descriptor( + name='ContrastiveLossParameter', + full_name='caffe.ContrastiveLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='margin', full_name='caffe.ContrastiveLossParameter.margin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='legacy_version', full_name='caffe.ContrastiveLossParameter.legacy_version', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13539, + serialized_end=13615, +) + + +_DETECTIONLOSSPARAMETER = _descriptor.Descriptor( + name='DetectionLossParameter', + full_name='caffe.DetectionLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='side', full_name='caffe.DetectionLossParameter.side', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_class', full_name='caffe.DetectionLossParameter.num_class', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=20, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_object', full_name='caffe.DetectionLossParameter.num_object', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='object_scale', full_name='caffe.DetectionLossParameter.object_scale', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='noobject_scale', full_name='caffe.DetectionLossParameter.noobject_scale', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='class_scale', full_name='caffe.DetectionLossParameter.class_scale', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coord_scale', full_name='caffe.DetectionLossParameter.coord_scale', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sqrt', full_name='caffe.DetectionLossParameter.sqrt', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='constriant', full_name='caffe.DetectionLossParameter.constriant', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13618, + serialized_end=13854, +) + + +_REGIONLOSSPARAMETER = _descriptor.Descriptor( + name='RegionLossParameter', + full_name='caffe.RegionLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='side', full_name='caffe.RegionLossParameter.side', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=13, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_class', full_name='caffe.RegionLossParameter.num_class', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=20, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_match', full_name='caffe.RegionLossParameter.bias_match', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coords', full_name='caffe.RegionLossParameter.coords', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num', full_name='caffe.RegionLossParameter.num', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='softmax', full_name='caffe.RegionLossParameter.softmax', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='jitter', full_name='caffe.RegionLossParameter.jitter', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.2), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rescore', full_name='caffe.RegionLossParameter.rescore', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='object_scale', full_name='caffe.RegionLossParameter.object_scale', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='class_scale', full_name='caffe.RegionLossParameter.class_scale', index=9, + number=10, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='noobject_scale', full_name='caffe.RegionLossParameter.noobject_scale', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coord_scale', full_name='caffe.RegionLossParameter.coord_scale', index=11, + number=12, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='absolute', full_name='caffe.RegionLossParameter.absolute', index=12, + number=13, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='thresh', full_name='caffe.RegionLossParameter.thresh', index=13, + number=14, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.2), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='random', full_name='caffe.RegionLossParameter.random', index=14, + number=15, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='biases', full_name='caffe.RegionLossParameter.biases', index=15, + number=16, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='softmax_tree', full_name='caffe.RegionLossParameter.softmax_tree', index=16, + number=17, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='class_map', full_name='caffe.RegionLossParameter.class_map', index=17, + number=18, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=13857, + serialized_end=14258, +) + + +_REORGPARAMETER = _descriptor.Descriptor( + name='ReorgParameter', + full_name='caffe.ReorgParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='stride', full_name='caffe.ReorgParameter.stride', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reverse', full_name='caffe.ReorgParameter.reverse', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=14260, + serialized_end=14316, +) + + +_EVALDETECTIONPARAMETER = _descriptor.Descriptor( + name='EvalDetectionParameter', + full_name='caffe.EvalDetectionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='side', full_name='caffe.EvalDetectionParameter.side', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_class', full_name='caffe.EvalDetectionParameter.num_class', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=20, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_object', full_name='caffe.EvalDetectionParameter.num_object', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='threshold', full_name='caffe.EvalDetectionParameter.threshold', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sqrt', full_name='caffe.EvalDetectionParameter.sqrt', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='constriant', full_name='caffe.EvalDetectionParameter.constriant', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='score_type', full_name='caffe.EvalDetectionParameter.score_type', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='nms', full_name='caffe.EvalDetectionParameter.nms', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(-1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='biases', full_name='caffe.EvalDetectionParameter.biases', index=8, + number=9, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _EVALDETECTIONPARAMETER_SCORETYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=14319, + serialized_end=14626, +) + + +_CONVOLUTIONPARAMETER = _descriptor.Descriptor( + name='ConvolutionParameter', + full_name='caffe.ConvolutionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.ConvolutionParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_term', full_name='caffe.ConvolutionParameter.bias_term', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad', full_name='caffe.ConvolutionParameter.pad', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_size', full_name='caffe.ConvolutionParameter.kernel_size', index=3, + number=4, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride', full_name='caffe.ConvolutionParameter.stride', index=4, + number=6, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dilation', full_name='caffe.ConvolutionParameter.dilation', index=5, + number=18, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_h', full_name='caffe.ConvolutionParameter.pad_h', index=6, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_w', full_name='caffe.ConvolutionParameter.pad_w', index=7, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_h', full_name='caffe.ConvolutionParameter.kernel_h', index=8, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_w', full_name='caffe.ConvolutionParameter.kernel_w', index=9, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_h', full_name='caffe.ConvolutionParameter.stride_h', index=10, + number=13, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_w', full_name='caffe.ConvolutionParameter.stride_w', index=11, + number=14, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group', full_name='caffe.ConvolutionParameter.group', index=12, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.ConvolutionParameter.weight_filler', index=13, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.ConvolutionParameter.bias_filler', index=14, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.ConvolutionParameter.engine', index=15, + number=15, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ConvolutionParameter.axis', index=16, + number=16, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_nd_im2col', full_name='caffe.ConvolutionParameter.force_nd_im2col', index=17, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _CONVOLUTIONPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=14629, + serialized_end=15137, +) + + +_CROPPARAMETER = _descriptor.Descriptor( + name='CropParameter', + full_name='caffe.CropParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.CropParameter.axis', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='offset', full_name='caffe.CropParameter.offset', index=1, + number=2, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15139, + serialized_end=15187, +) + + +_DATAPARAMETER = _descriptor.Descriptor( + name='DataParameter', + full_name='caffe.DataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='caffe.DataParameter.source', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batch_size', full_name='caffe.DataParameter.batch_size', index=1, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rand_skip', full_name='caffe.DataParameter.rand_skip', index=2, + number=7, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='backend', full_name='caffe.DataParameter.backend', index=3, + number=8, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.DataParameter.scale', index=4, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_file', full_name='caffe.DataParameter.mean_file', index=5, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_size', full_name='caffe.DataParameter.crop_size', index=6, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mirror', full_name='caffe.DataParameter.mirror', index=7, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_encoded_color', full_name='caffe.DataParameter.force_encoded_color', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prefetch', full_name='caffe.DataParameter.prefetch', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='side', full_name='caffe.DataParameter.side', index=10, + number=11, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DATAPARAMETER_DB, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15190, + serialized_end=15496, +) + + +_DETECTIONEVALUATEPARAMETER = _descriptor.Descriptor( + name='DetectionEvaluateParameter', + full_name='caffe.DetectionEvaluateParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_classes', full_name='caffe.DetectionEvaluateParameter.num_classes', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='background_label_id', full_name='caffe.DetectionEvaluateParameter.background_label_id', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='overlap_threshold', full_name='caffe.DetectionEvaluateParameter.overlap_threshold', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='evaluate_difficult_gt', full_name='caffe.DetectionEvaluateParameter.evaluate_difficult_gt', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name_size_file', full_name='caffe.DetectionEvaluateParameter.name_size_file', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_param', full_name='caffe.DetectionEvaluateParameter.resize_param', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15499, + serialized_end=15719, +) + + +_NONMAXIMUMSUPPRESSIONPARAMETER = _descriptor.Descriptor( + name='NonMaximumSuppressionParameter', + full_name='caffe.NonMaximumSuppressionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nms_threshold', full_name='caffe.NonMaximumSuppressionParameter.nms_threshold', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.3), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='top_k', full_name='caffe.NonMaximumSuppressionParameter.top_k', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eta', full_name='caffe.NonMaximumSuppressionParameter.eta', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15721, + serialized_end=15812, +) + + +_SAVEOUTPUTPARAMETER = _descriptor.Descriptor( + name='SaveOutputParameter', + full_name='caffe.SaveOutputParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='output_directory', full_name='caffe.SaveOutputParameter.output_directory', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_name_prefix', full_name='caffe.SaveOutputParameter.output_name_prefix', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='output_format', full_name='caffe.SaveOutputParameter.output_format', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label_map_file', full_name='caffe.SaveOutputParameter.label_map_file', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name_size_file', full_name='caffe.SaveOutputParameter.name_size_file', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_test_image', full_name='caffe.SaveOutputParameter.num_test_image', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resize_param', full_name='caffe.SaveOutputParameter.resize_param', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15815, + serialized_end=16031, +) + + +_DETECTIONOUTPUTPARAMETER = _descriptor.Descriptor( + name='DetectionOutputParameter', + full_name='caffe.DetectionOutputParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_classes', full_name='caffe.DetectionOutputParameter.num_classes', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='share_location', full_name='caffe.DetectionOutputParameter.share_location', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='background_label_id', full_name='caffe.DetectionOutputParameter.background_label_id', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='nms_param', full_name='caffe.DetectionOutputParameter.nms_param', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='save_output_param', full_name='caffe.DetectionOutputParameter.save_output_param', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='code_type', full_name='caffe.DetectionOutputParameter.code_type', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='variance_encoded_in_target', full_name='caffe.DetectionOutputParameter.variance_encoded_in_target', index=6, + number=8, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='keep_top_k', full_name='caffe.DetectionOutputParameter.keep_top_k', index=7, + number=7, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='confidence_threshold', full_name='caffe.DetectionOutputParameter.confidence_threshold', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='visualize', full_name='caffe.DetectionOutputParameter.visualize', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='visualize_threshold', full_name='caffe.DetectionOutputParameter.visualize_threshold', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='save_file', full_name='caffe.DetectionOutputParameter.save_file', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16034, + serialized_end=16489, +) + + +_DROPOUTPARAMETER = _descriptor.Descriptor( + name='DropoutParameter', + full_name='caffe.DropoutParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dropout_ratio', full_name='caffe.DropoutParameter.dropout_ratio', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale_train', full_name='caffe.DropoutParameter.scale_train', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16491, + serialized_end=16564, +) + + +_DUMMYDATAPARAMETER = _descriptor.Descriptor( + name='DummyDataParameter', + full_name='caffe.DummyDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data_filler', full_name='caffe.DummyDataParameter.data_filler', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shape', full_name='caffe.DummyDataParameter.shape', index=1, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num', full_name='caffe.DummyDataParameter.num', index=2, + number=2, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channels', full_name='caffe.DummyDataParameter.channels', index=3, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='caffe.DummyDataParameter.height', index=4, + number=4, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.DummyDataParameter.width', index=5, + number=5, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16567, + serialized_end=16727, +) + + +_ELTWISEPARAMETER = _descriptor.Descriptor( + name='EltwiseParameter', + full_name='caffe.EltwiseParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='operation', full_name='caffe.EltwiseParameter.operation', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coeff', full_name='caffe.EltwiseParameter.coeff', index=1, + number=2, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stable_prod_grad', full_name='caffe.EltwiseParameter.stable_prod_grad', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ELTWISEPARAMETER_ELTWISEOP, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16730, + serialized_end=16895, +) + + +_ELUPARAMETER = _descriptor.Descriptor( + name='ELUParameter', + full_name='caffe.ELUParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='alpha', full_name='caffe.ELUParameter.alpha', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16897, + serialized_end=16929, +) + + +_EMBEDPARAMETER = _descriptor.Descriptor( + name='EmbedParameter', + full_name='caffe.EmbedParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.EmbedParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='input_dim', full_name='caffe.EmbedParameter.input_dim', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_term', full_name='caffe.EmbedParameter.bias_term', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.EmbedParameter.weight_filler', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.EmbedParameter.bias_filler', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=16932, + serialized_end=17104, +) + + +_EXPPARAMETER = _descriptor.Descriptor( + name='ExpParameter', + full_name='caffe.ExpParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='base', full_name='caffe.ExpParameter.base', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(-1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.ExpParameter.scale', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shift', full_name='caffe.ExpParameter.shift', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17106, + serialized_end=17174, +) + + +_FLATTENPARAMETER = _descriptor.Descriptor( + name='FlattenParameter', + full_name='caffe.FlattenParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.FlattenParameter.axis', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='end_axis', full_name='caffe.FlattenParameter.end_axis', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17176, + serialized_end=17233, +) + + +_HDF5DATAPARAMETER = _descriptor.Descriptor( + name='HDF5DataParameter', + full_name='caffe.HDF5DataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='caffe.HDF5DataParameter.source', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batch_size', full_name='caffe.HDF5DataParameter.batch_size', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shuffle', full_name='caffe.HDF5DataParameter.shuffle', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17235, + serialized_end=17314, +) + + +_HDF5OUTPUTPARAMETER = _descriptor.Descriptor( + name='HDF5OutputParameter', + full_name='caffe.HDF5OutputParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file_name', full_name='caffe.HDF5OutputParameter.file_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17316, + serialized_end=17356, +) + + +_HINGELOSSPARAMETER = _descriptor.Descriptor( + name='HingeLossParameter', + full_name='caffe.HingeLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='norm', full_name='caffe.HingeLossParameter.norm', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _HINGELOSSPARAMETER_NORM, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17358, + serialized_end=17452, +) + + +_IMAGEDATAPARAMETER = _descriptor.Descriptor( + name='ImageDataParameter', + full_name='caffe.ImageDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='caffe.ImageDataParameter.source', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batch_size', full_name='caffe.ImageDataParameter.batch_size', index=1, + number=4, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rand_skip', full_name='caffe.ImageDataParameter.rand_skip', index=2, + number=7, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shuffle', full_name='caffe.ImageDataParameter.shuffle', index=3, + number=8, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_height', full_name='caffe.ImageDataParameter.new_height', index=4, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_width', full_name='caffe.ImageDataParameter.new_width', index=5, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_color', full_name='caffe.ImageDataParameter.is_color', index=6, + number=11, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.ImageDataParameter.scale', index=7, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_file', full_name='caffe.ImageDataParameter.mean_file', index=8, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_size', full_name='caffe.ImageDataParameter.crop_size', index=9, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mirror', full_name='caffe.ImageDataParameter.mirror', index=10, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='root_folder', full_name='caffe.ImageDataParameter.root_folder', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17455, + serialized_end=17734, +) + + +_INFOGAINLOSSPARAMETER = _descriptor.Descriptor( + name='InfogainLossParameter', + full_name='caffe.InfogainLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='caffe.InfogainLossParameter.source', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17736, + serialized_end=17775, +) + + +_INNERPRODUCTPARAMETER = _descriptor.Descriptor( + name='InnerProductParameter', + full_name='caffe.InnerProductParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.InnerProductParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_term', full_name='caffe.InnerProductParameter.bias_term', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.InnerProductParameter.weight_filler', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.InnerProductParameter.bias_filler', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.InnerProductParameter.axis', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transpose', full_name='caffe.InnerProductParameter.transpose', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='normalize', full_name='caffe.InnerProductParameter.normalize', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=17778, + serialized_end=18007, +) + + +_INPUTPARAMETER = _descriptor.Descriptor( + name='InputParameter', + full_name='caffe.InputParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='shape', full_name='caffe.InputParameter.shape', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18009, + serialized_end=18058, +) + + +_LOGPARAMETER = _descriptor.Descriptor( + name='LogParameter', + full_name='caffe.LogParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='base', full_name='caffe.LogParameter.base', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(-1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.LogParameter.scale', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shift', full_name='caffe.LogParameter.shift', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18060, + serialized_end=18128, +) + + +_LRNPARAMETER = _descriptor.Descriptor( + name='LRNParameter', + full_name='caffe.LRNParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='local_size', full_name='caffe.LRNParameter.local_size', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='alpha', full_name='caffe.LRNParameter.alpha', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='beta', full_name='caffe.LRNParameter.beta', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.75), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='norm_region', full_name='caffe.LRNParameter.norm_region', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='k', full_name='caffe.LRNParameter.k', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.LRNParameter.engine', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _LRNPARAMETER_NORMREGION, + _LRNPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18131, + serialized_end=18443, +) + + +_MEMORYDATAPARAMETER = _descriptor.Descriptor( + name='MemoryDataParameter', + full_name='caffe.MemoryDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='batch_size', full_name='caffe.MemoryDataParameter.batch_size', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channels', full_name='caffe.MemoryDataParameter.channels', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='caffe.MemoryDataParameter.height', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='caffe.MemoryDataParameter.width', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18445, + serialized_end=18535, +) + + +_MULTIBOXLOSSPARAMETER = _descriptor.Descriptor( + name='MultiBoxLossParameter', + full_name='caffe.MultiBoxLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='loc_loss_type', full_name='caffe.MultiBoxLossParameter.loc_loss_type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='conf_loss_type', full_name='caffe.MultiBoxLossParameter.conf_loss_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loc_weight', full_name='caffe.MultiBoxLossParameter.loc_weight', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_classes', full_name='caffe.MultiBoxLossParameter.num_classes', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='share_location', full_name='caffe.MultiBoxLossParameter.share_location', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='match_type', full_name='caffe.MultiBoxLossParameter.match_type', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='overlap_threshold', full_name='caffe.MultiBoxLossParameter.overlap_threshold', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='use_prior_for_matching', full_name='caffe.MultiBoxLossParameter.use_prior_for_matching', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='background_label_id', full_name='caffe.MultiBoxLossParameter.background_label_id', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='use_difficult_gt', full_name='caffe.MultiBoxLossParameter.use_difficult_gt', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='do_neg_mining', full_name='caffe.MultiBoxLossParameter.do_neg_mining', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='neg_pos_ratio', full_name='caffe.MultiBoxLossParameter.neg_pos_ratio', index=11, + number=12, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(3), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='neg_overlap', full_name='caffe.MultiBoxLossParameter.neg_overlap', index=12, + number=13, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='code_type', full_name='caffe.MultiBoxLossParameter.code_type', index=13, + number=14, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='encode_variance_in_target', full_name='caffe.MultiBoxLossParameter.encode_variance_in_target', index=14, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='map_object_to_agnostic', full_name='caffe.MultiBoxLossParameter.map_object_to_agnostic', index=15, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ignore_cross_boundary_bbox', full_name='caffe.MultiBoxLossParameter.ignore_cross_boundary_bbox', index=16, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bp_inside', full_name='caffe.MultiBoxLossParameter.bp_inside', index=17, + number=19, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mining_type', full_name='caffe.MultiBoxLossParameter.mining_type', index=18, + number=20, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='nms_param', full_name='caffe.MultiBoxLossParameter.nms_param', index=19, + number=21, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sample_size', full_name='caffe.MultiBoxLossParameter.sample_size', index=20, + number=22, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=64, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='use_prior_for_nms', full_name='caffe.MultiBoxLossParameter.use_prior_for_nms', index=21, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MULTIBOXLOSSPARAMETER_LOCLOSSTYPE, + _MULTIBOXLOSSPARAMETER_CONFLOSSTYPE, + _MULTIBOXLOSSPARAMETER_MATCHTYPE, + _MULTIBOXLOSSPARAMETER_MININGTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18538, + serialized_end=19666, +) + + +_PERMUTEPARAMETER = _descriptor.Descriptor( + name='PermuteParameter', + full_name='caffe.PermuteParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='order', full_name='caffe.PermuteParameter.order', index=0, + number=1, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19668, + serialized_end=19701, +) + + +_MVNPARAMETER = _descriptor.Descriptor( + name='MVNParameter', + full_name='caffe.MVNParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='normalize_variance', full_name='caffe.MVNParameter.normalize_variance', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='across_channels', full_name='caffe.MVNParameter.across_channels', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eps', full_name='caffe.MVNParameter.eps', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1e-09), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19703, + serialized_end=19803, +) + + +_PARAMETERPARAMETER = _descriptor.Descriptor( + name='ParameterParameter', + full_name='caffe.ParameterParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='shape', full_name='caffe.ParameterParameter.shape', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19805, + serialized_end=19858, +) + + +_POOLINGPARAMETER = _descriptor.Descriptor( + name='PoolingParameter', + full_name='caffe.PoolingParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pool', full_name='caffe.PoolingParameter.pool', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad', full_name='caffe.PoolingParameter.pad', index=1, + number=4, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_h', full_name='caffe.PoolingParameter.pad_h', index=2, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_w', full_name='caffe.PoolingParameter.pad_w', index=3, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_size', full_name='caffe.PoolingParameter.kernel_size', index=4, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_h', full_name='caffe.PoolingParameter.kernel_h', index=5, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_w', full_name='caffe.PoolingParameter.kernel_w', index=6, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride', full_name='caffe.PoolingParameter.stride', index=7, + number=3, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_h', full_name='caffe.PoolingParameter.stride_h', index=8, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_w', full_name='caffe.PoolingParameter.stride_w', index=9, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.PoolingParameter.engine', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='global_pooling', full_name='caffe.PoolingParameter.global_pooling', index=11, + number=12, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ceil_mode', full_name='caffe.PoolingParameter.ceil_mode', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _POOLINGPARAMETER_POOLMETHOD, + _POOLINGPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19861, + serialized_end=20304, +) + + +_POWERPARAMETER = _descriptor.Descriptor( + name='PowerParameter', + full_name='caffe.PowerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='power', full_name='caffe.PowerParameter.power', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.PowerParameter.scale', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shift', full_name='caffe.PowerParameter.shift', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20306, + serialized_end=20376, +) + + +_PRIORBOXPARAMETER = _descriptor.Descriptor( + name='PriorBoxParameter', + full_name='caffe.PriorBoxParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='min_size', full_name='caffe.PriorBoxParameter.min_size', index=0, + number=1, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_size', full_name='caffe.PriorBoxParameter.max_size', index=1, + number=2, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='aspect_ratio', full_name='caffe.PriorBoxParameter.aspect_ratio', index=2, + number=3, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='flip', full_name='caffe.PriorBoxParameter.flip', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='clip', full_name='caffe.PriorBoxParameter.clip', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='variance', full_name='caffe.PriorBoxParameter.variance', index=5, + number=6, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='img_size', full_name='caffe.PriorBoxParameter.img_size', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='img_h', full_name='caffe.PriorBoxParameter.img_h', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='img_w', full_name='caffe.PriorBoxParameter.img_w', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='step', full_name='caffe.PriorBoxParameter.step', index=9, + number=10, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='step_h', full_name='caffe.PriorBoxParameter.step_h', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='step_w', full_name='caffe.PriorBoxParameter.step_w', index=11, + number=12, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='offset', full_name='caffe.PriorBoxParameter.offset', index=12, + number=13, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PRIORBOXPARAMETER_CODETYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20379, + serialized_end=20688, +) + + +_PYTHONPARAMETER = _descriptor.Descriptor( + name='PythonParameter', + full_name='caffe.PythonParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='module', full_name='caffe.PythonParameter.module', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='layer', full_name='caffe.PythonParameter.layer', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='param_str', full_name='caffe.PythonParameter.param_str', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='share_in_parallel', full_name='caffe.PythonParameter.share_in_parallel', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20690, + serialized_end=20793, +) + + +_RECURRENTPARAMETER = _descriptor.Descriptor( + name='RecurrentParameter', + full_name='caffe.RecurrentParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.RecurrentParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.RecurrentParameter.weight_filler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.RecurrentParameter.bias_filler', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='debug_info', full_name='caffe.RecurrentParameter.debug_info', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='expose_hidden', full_name='caffe.RecurrentParameter.expose_hidden', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20796, + serialized_end=20988, +) + + +_REDUCTIONPARAMETER = _descriptor.Descriptor( + name='ReductionParameter', + full_name='caffe.ReductionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='operation', full_name='caffe.ReductionParameter.operation', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ReductionParameter.axis', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='coeff', full_name='caffe.ReductionParameter.coeff', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _REDUCTIONPARAMETER_REDUCTIONOP, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20991, + serialized_end=21164, +) + + +_RELUPARAMETER = _descriptor.Descriptor( + name='ReLUParameter', + full_name='caffe.ReLUParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='negative_slope', full_name='caffe.ReLUParameter.negative_slope', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.ReLUParameter.engine', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RELUPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21167, + serialized_end=21308, +) + + +_RESHAPEPARAMETER = _descriptor.Descriptor( + name='ReshapeParameter', + full_name='caffe.ReshapeParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='shape', full_name='caffe.ReshapeParameter.shape', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ReshapeParameter.axis', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_axes', full_name='caffe.ReshapeParameter.num_axes', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21310, + serialized_end=21400, +) + + +_ROIPOOLINGPARAMETER = _descriptor.Descriptor( + name='ROIPoolingParameter', + full_name='caffe.ROIPoolingParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pooled_h', full_name='caffe.ROIPoolingParameter.pooled_h', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pooled_w', full_name='caffe.ROIPoolingParameter.pooled_w', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='spatial_scale', full_name='caffe.ROIPoolingParameter.spatial_scale', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21402, + serialized_end=21491, +) + + +_SCALEPARAMETER = _descriptor.Descriptor( + name='ScaleParameter', + full_name='caffe.ScaleParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.ScaleParameter.axis', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_axes', full_name='caffe.ScaleParameter.num_axes', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='filler', full_name='caffe.ScaleParameter.filler', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_term', full_name='caffe.ScaleParameter.bias_term', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.ScaleParameter.bias_filler', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_value', full_name='caffe.ScaleParameter.min_value', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_value', full_name='caffe.ScaleParameter.max_value', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21494, + serialized_end=21697, +) + + +_SIGMOIDPARAMETER = _descriptor.Descriptor( + name='SigmoidParameter', + full_name='caffe.SigmoidParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.SigmoidParameter.engine', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SIGMOIDPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21699, + serialized_end=21819, +) + + +_SMOOTHL1LOSSPARAMETER = _descriptor.Descriptor( + name='SmoothL1LossParameter', + full_name='caffe.SmoothL1LossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sigma', full_name='caffe.SmoothL1LossParameter.sigma', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21821, + serialized_end=21862, +) + + +_SLICEPARAMETER = _descriptor.Descriptor( + name='SliceParameter', + full_name='caffe.SliceParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.SliceParameter.axis', index=0, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='slice_point', full_name='caffe.SliceParameter.slice_point', index=1, + number=2, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='slice_dim', full_name='caffe.SliceParameter.slice_dim', index=2, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21864, + serialized_end=21940, +) + + +_SOFTMAXPARAMETER = _descriptor.Descriptor( + name='SoftmaxParameter', + full_name='caffe.SoftmaxParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.SoftmaxParameter.engine', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.SoftmaxParameter.axis', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SOFTMAXPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21943, + serialized_end=22080, +) + + +_TANHPARAMETER = _descriptor.Descriptor( + name='TanHParameter', + full_name='caffe.TanHParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.TanHParameter.engine', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TANHPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22082, + serialized_end=22196, +) + + +_TILEPARAMETER = _descriptor.Descriptor( + name='TileParameter', + full_name='caffe.TileParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.TileParameter.axis', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tiles', full_name='caffe.TileParameter.tiles', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22198, + serialized_end=22245, +) + + +_THRESHOLDPARAMETER = _descriptor.Descriptor( + name='ThresholdParameter', + full_name='caffe.ThresholdParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='threshold', full_name='caffe.ThresholdParameter.threshold', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22247, + serialized_end=22289, +) + + +_WINDOWDATAPARAMETER = _descriptor.Descriptor( + name='WindowDataParameter', + full_name='caffe.WindowDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='caffe.WindowDataParameter.source', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.WindowDataParameter.scale', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_file', full_name='caffe.WindowDataParameter.mean_file', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batch_size', full_name='caffe.WindowDataParameter.batch_size', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_size', full_name='caffe.WindowDataParameter.crop_size', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mirror', full_name='caffe.WindowDataParameter.mirror', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fg_threshold', full_name='caffe.WindowDataParameter.fg_threshold', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bg_threshold', full_name='caffe.WindowDataParameter.bg_threshold', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fg_fraction', full_name='caffe.WindowDataParameter.fg_fraction', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.25), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='context_pad', full_name='caffe.WindowDataParameter.context_pad', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='crop_mode', full_name='caffe.WindowDataParameter.crop_mode', index=10, + number=11, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("warp").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cache_images', full_name='caffe.WindowDataParameter.cache_images', index=11, + number=12, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='root_folder', full_name='caffe.WindowDataParameter.root_folder', index=12, + number=13, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22292, + serialized_end=22613, +) + + +_SPPPARAMETER = _descriptor.Descriptor( + name='SPPParameter', + full_name='caffe.SPPParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pyramid_height', full_name='caffe.SPPParameter.pyramid_height', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pool', full_name='caffe.SPPParameter.pool', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.SPPParameter.engine', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SPPPARAMETER_POOLMETHOD, + _SPPPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22616, + serialized_end=22851, +) + + +_V1LAYERPARAMETER = _descriptor.Descriptor( + name='V1LayerParameter', + full_name='caffe.V1LayerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bottom', full_name='caffe.V1LayerParameter.bottom', index=0, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='top', full_name='caffe.V1LayerParameter.top', index=1, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='caffe.V1LayerParameter.name', index=2, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='include', full_name='caffe.V1LayerParameter.include', index=3, + number=32, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='exclude', full_name='caffe.V1LayerParameter.exclude', index=4, + number=33, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.V1LayerParameter.type', index=5, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blobs', full_name='caffe.V1LayerParameter.blobs', index=6, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='param', full_name='caffe.V1LayerParameter.param', index=7, + number=1001, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blob_share_mode', full_name='caffe.V1LayerParameter.blob_share_mode', index=8, + number=1002, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blobs_lr', full_name='caffe.V1LayerParameter.blobs_lr', index=9, + number=7, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_decay', full_name='caffe.V1LayerParameter.weight_decay', index=10, + number=8, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loss_weight', full_name='caffe.V1LayerParameter.loss_weight', index=11, + number=35, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='accuracy_param', full_name='caffe.V1LayerParameter.accuracy_param', index=12, + number=27, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='argmax_param', full_name='caffe.V1LayerParameter.argmax_param', index=13, + number=23, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='concat_param', full_name='caffe.V1LayerParameter.concat_param', index=14, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='contrastive_loss_param', full_name='caffe.V1LayerParameter.contrastive_loss_param', index=15, + number=40, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='convolution_param', full_name='caffe.V1LayerParameter.convolution_param', index=16, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data_param', full_name='caffe.V1LayerParameter.data_param', index=17, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dropout_param', full_name='caffe.V1LayerParameter.dropout_param', index=18, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dummy_data_param', full_name='caffe.V1LayerParameter.dummy_data_param', index=19, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eltwise_param', full_name='caffe.V1LayerParameter.eltwise_param', index=20, + number=24, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='exp_param', full_name='caffe.V1LayerParameter.exp_param', index=21, + number=41, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdf5_data_param', full_name='caffe.V1LayerParameter.hdf5_data_param', index=22, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdf5_output_param', full_name='caffe.V1LayerParameter.hdf5_output_param', index=23, + number=14, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hinge_loss_param', full_name='caffe.V1LayerParameter.hinge_loss_param', index=24, + number=29, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='image_data_param', full_name='caffe.V1LayerParameter.image_data_param', index=25, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='infogain_loss_param', full_name='caffe.V1LayerParameter.infogain_loss_param', index=26, + number=16, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='inner_product_param', full_name='caffe.V1LayerParameter.inner_product_param', index=27, + number=17, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lrn_param', full_name='caffe.V1LayerParameter.lrn_param', index=28, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='memory_data_param', full_name='caffe.V1LayerParameter.memory_data_param', index=29, + number=22, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mvn_param', full_name='caffe.V1LayerParameter.mvn_param', index=30, + number=34, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pooling_param', full_name='caffe.V1LayerParameter.pooling_param', index=31, + number=19, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='power_param', full_name='caffe.V1LayerParameter.power_param', index=32, + number=21, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='relu_param', full_name='caffe.V1LayerParameter.relu_param', index=33, + number=30, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sigmoid_param', full_name='caffe.V1LayerParameter.sigmoid_param', index=34, + number=38, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='softmax_param', full_name='caffe.V1LayerParameter.softmax_param', index=35, + number=39, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='slice_param', full_name='caffe.V1LayerParameter.slice_param', index=36, + number=31, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tanh_param', full_name='caffe.V1LayerParameter.tanh_param', index=37, + number=37, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='threshold_param', full_name='caffe.V1LayerParameter.threshold_param', index=38, + number=25, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='window_data_param', full_name='caffe.V1LayerParameter.window_data_param', index=39, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transform_param', full_name='caffe.V1LayerParameter.transform_param', index=40, + number=36, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loss_param', full_name='caffe.V1LayerParameter.loss_param', index=41, + number=42, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='detection_loss_param', full_name='caffe.V1LayerParameter.detection_loss_param', index=42, + number=200, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eval_detection_param', full_name='caffe.V1LayerParameter.eval_detection_param', index=43, + number=201, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='layer', full_name='caffe.V1LayerParameter.layer', index=44, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _V1LAYERPARAMETER_LAYERTYPE, + _V1LAYERPARAMETER_DIMCHECKMODE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22854, + serialized_end=25506, +) + + +_V0LAYERPARAMETER = _descriptor.Descriptor( + name='V0LayerParameter', + full_name='caffe.V0LayerParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='caffe.V0LayerParameter.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.V0LayerParameter.type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.V0LayerParameter.num_output', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='biasterm', full_name='caffe.V0LayerParameter.biasterm', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.V0LayerParameter.weight_filler', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.V0LayerParameter.bias_filler', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad', full_name='caffe.V0LayerParameter.pad', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernelsize', full_name='caffe.V0LayerParameter.kernelsize', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group', full_name='caffe.V0LayerParameter.group', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride', full_name='caffe.V0LayerParameter.stride', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pool', full_name='caffe.V0LayerParameter.pool', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dropout_ratio', full_name='caffe.V0LayerParameter.dropout_ratio', index=11, + number=12, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='local_size', full_name='caffe.V0LayerParameter.local_size', index=12, + number=13, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='alpha', full_name='caffe.V0LayerParameter.alpha', index=13, + number=14, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='beta', full_name='caffe.V0LayerParameter.beta', index=14, + number=15, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.75), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='k', full_name='caffe.V0LayerParameter.k', index=15, + number=22, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='caffe.V0LayerParameter.source', index=16, + number=16, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.V0LayerParameter.scale', index=17, + number=17, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='meanfile', full_name='caffe.V0LayerParameter.meanfile', index=18, + number=18, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='batchsize', full_name='caffe.V0LayerParameter.batchsize', index=19, + number=19, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cropsize', full_name='caffe.V0LayerParameter.cropsize', index=20, + number=20, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mirror', full_name='caffe.V0LayerParameter.mirror', index=21, + number=21, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blobs', full_name='caffe.V0LayerParameter.blobs', index=22, + number=50, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blobs_lr', full_name='caffe.V0LayerParameter.blobs_lr', index=23, + number=51, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_decay', full_name='caffe.V0LayerParameter.weight_decay', index=24, + number=52, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rand_skip', full_name='caffe.V0LayerParameter.rand_skip', index=25, + number=53, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='det_fg_threshold', full_name='caffe.V0LayerParameter.det_fg_threshold', index=26, + number=54, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='det_bg_threshold', full_name='caffe.V0LayerParameter.det_bg_threshold', index=27, + number=55, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='det_fg_fraction', full_name='caffe.V0LayerParameter.det_fg_fraction', index=28, + number=56, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.25), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='det_context_pad', full_name='caffe.V0LayerParameter.det_context_pad', index=29, + number=58, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='det_crop_mode', full_name='caffe.V0LayerParameter.det_crop_mode', index=30, + number=59, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("warp").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_num', full_name='caffe.V0LayerParameter.new_num', index=31, + number=60, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_channels', full_name='caffe.V0LayerParameter.new_channels', index=32, + number=61, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_height', full_name='caffe.V0LayerParameter.new_height', index=33, + number=62, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_width', full_name='caffe.V0LayerParameter.new_width', index=34, + number=63, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shuffle_images', full_name='caffe.V0LayerParameter.shuffle_images', index=35, + number=64, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='concat_dim', full_name='caffe.V0LayerParameter.concat_dim', index=36, + number=65, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdf5_output_param', full_name='caffe.V0LayerParameter.hdf5_output_param', index=37, + number=1001, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _V0LAYERPARAMETER_POOLMETHOD, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=25509, + serialized_end=26530, +) + + +_PRELUPARAMETER = _descriptor.Descriptor( + name='PReLUParameter', + full_name='caffe.PReLUParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filler', full_name='caffe.PReLUParameter.filler', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='channel_shared', full_name='caffe.PReLUParameter.channel_shared', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=26532, + serialized_end=26619, +) + + +_RPNPARAMETER = _descriptor.Descriptor( + name='RPNParameter', + full_name='caffe.RPNParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='feat_stride', full_name='caffe.RPNParameter.feat_stride', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='basesize', full_name='caffe.RPNParameter.basesize', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.RPNParameter.scale', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ratio', full_name='caffe.RPNParameter.ratio', index=3, + number=4, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='boxminsize', full_name='caffe.RPNParameter.boxminsize', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='per_nms_topn', full_name='caffe.RPNParameter.per_nms_topn', index=5, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='post_nms_topn', full_name='caffe.RPNParameter.post_nms_topn', index=6, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='nms_thresh', full_name='caffe.RPNParameter.nms_thresh', index=7, + number=8, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=26622, + serialized_end=26790, +) + + +_VIDEODATAPARAMETER = _descriptor.Descriptor( + name='VideoDataParameter', + full_name='caffe.VideoDataParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='video_type', full_name='caffe.VideoDataParameter.video_type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_id', full_name='caffe.VideoDataParameter.device_id', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='video_file', full_name='caffe.VideoDataParameter.video_file', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='skip_frames', full_name='caffe.VideoDataParameter.skip_frames', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _VIDEODATAPARAMETER_VIDEOTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=26793, + serialized_end=26980, +) + + +_CENTERLOSSPARAMETER = _descriptor.Descriptor( + name='CenterLossParameter', + full_name='caffe.CenterLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.CenterLossParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='center_filler', full_name='caffe.CenterLossParameter.center_filler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.CenterLossParameter.axis', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=26982, + serialized_end=27087, +) + + +_MARGININNERPRODUCTPARAMETER = _descriptor.Descriptor( + name='MarginInnerProductParameter', + full_name='caffe.MarginInnerProductParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.MarginInnerProductParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='caffe.MarginInnerProductParameter.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.MarginInnerProductParameter.weight_filler', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.MarginInnerProductParameter.axis', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='base', full_name='caffe.MarginInnerProductParameter.base', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='gamma', full_name='caffe.MarginInnerProductParameter.gamma', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='power', full_name='caffe.MarginInnerProductParameter.power', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='iteration', full_name='caffe.MarginInnerProductParameter.iteration', index=7, + number=8, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lambda_min', full_name='caffe.MarginInnerProductParameter.lambda_min', index=8, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MARGININNERPRODUCTPARAMETER_MARGINTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=27090, + serialized_end=27435, +) + + +_ADDITIVEMARGININNERPRODUCTPARAMETER = _descriptor.Descriptor( + name='AdditiveMarginInnerProductParameter', + full_name='caffe.AdditiveMarginInnerProductParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.AdditiveMarginInnerProductParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.AdditiveMarginInnerProductParameter.weight_filler', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='m', full_name='caffe.AdditiveMarginInnerProductParameter.m', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.35), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.AdditiveMarginInnerProductParameter.axis', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=27438, + serialized_end=27576, +) + + +_DEFORMABLECONVOLUTIONPARAMETER = _descriptor.Descriptor( + name='DeformableConvolutionParameter', + full_name='caffe.DeformableConvolutionParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='num_output', full_name='caffe.DeformableConvolutionParameter.num_output', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_term', full_name='caffe.DeformableConvolutionParameter.bias_term', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad', full_name='caffe.DeformableConvolutionParameter.pad', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_size', full_name='caffe.DeformableConvolutionParameter.kernel_size', index=3, + number=4, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride', full_name='caffe.DeformableConvolutionParameter.stride', index=4, + number=6, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dilation', full_name='caffe.DeformableConvolutionParameter.dilation', index=5, + number=18, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_h', full_name='caffe.DeformableConvolutionParameter.pad_h', index=6, + number=9, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pad_w', full_name='caffe.DeformableConvolutionParameter.pad_w', index=7, + number=10, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_h', full_name='caffe.DeformableConvolutionParameter.kernel_h', index=8, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kernel_w', full_name='caffe.DeformableConvolutionParameter.kernel_w', index=9, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_h', full_name='caffe.DeformableConvolutionParameter.stride_h', index=10, + number=13, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stride_w', full_name='caffe.DeformableConvolutionParameter.stride_w', index=11, + number=14, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group', full_name='caffe.DeformableConvolutionParameter.group', index=12, + number=5, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='deformable_group', full_name='caffe.DeformableConvolutionParameter.deformable_group', index=13, + number=25, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weight_filler', full_name='caffe.DeformableConvolutionParameter.weight_filler', index=14, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bias_filler', full_name='caffe.DeformableConvolutionParameter.bias_filler', index=15, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='engine', full_name='caffe.DeformableConvolutionParameter.engine', index=16, + number=15, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='axis', full_name='caffe.DeformableConvolutionParameter.axis', index=17, + number=16, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='force_nd_im2col', full_name='caffe.DeformableConvolutionParameter.force_nd_im2col', index=18, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DEFORMABLECONVOLUTIONPARAMETER_ENGINE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=27579, + serialized_end=28136, +) + + +_LABELSPECIFICADDPARAMETER = _descriptor.Descriptor( + name='LabelSpecificAddParameter', + full_name='caffe.LabelSpecificAddParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bias', full_name='caffe.LabelSpecificAddParameter.bias', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transform_test', full_name='caffe.LabelSpecificAddParameter.transform_test', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28138, + serialized_end=28213, +) + + +_CHANNELSCALEPARAMETER = _descriptor.Descriptor( + name='ChannelScaleParameter', + full_name='caffe.ChannelScaleParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='do_forward', full_name='caffe.ChannelScaleParameter.do_forward', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='do_backward_feature', full_name='caffe.ChannelScaleParameter.do_backward_feature', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='do_backward_scale', full_name='caffe.ChannelScaleParameter.do_backward_scale', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='global_scale', full_name='caffe.ChannelScaleParameter.global_scale', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_global_scale', full_name='caffe.ChannelScaleParameter.max_global_scale', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1000), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_global_scale', full_name='caffe.ChannelScaleParameter.min_global_scale', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='init_global_scale', full_name='caffe.ChannelScaleParameter.init_global_scale', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28216, + serialized_end=28453, +) + + +_COSINADDMPARAMETER = _descriptor.Descriptor( + name='CosinAddmParameter', + full_name='caffe.CosinAddmParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='m', full_name='caffe.CosinAddmParameter.m', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.5), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transform_test', full_name='caffe.CosinAddmParameter.transform_test', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28455, + serialized_end=28522, +) + + +_COSINMULMPARAMETER = _descriptor.Descriptor( + name='CosinMulmParameter', + full_name='caffe.CosinMulmParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='m', full_name='caffe.CosinMulmParameter.m', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(4), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='transform_test', full_name='caffe.CosinMulmParameter.transform_test', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28524, + serialized_end=28589, +) + + +_COUPLEDCLUSTERLOSSPARAMETER = _descriptor.Descriptor( + name='CoupledClusterLossParameter', + full_name='caffe.CoupledClusterLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='margin', full_name='caffe.CoupledClusterLossParameter.margin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group_size', full_name='caffe.CoupledClusterLossParameter.group_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=3, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.CoupledClusterLossParameter.scale', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='log_flag', full_name='caffe.CoupledClusterLossParameter.log_flag', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28591, + serialized_end=28705, +) + + +_TRIPLETLOSSPARAMETER = _descriptor.Descriptor( + name='TripletLossParameter', + full_name='caffe.TripletLossParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='margin', full_name='caffe.TripletLossParameter.margin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group_size', full_name='caffe.TripletLossParameter.group_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=3, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='scale', full_name='caffe.TripletLossParameter.scale', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28707, + serialized_end=28789, +) + + +_GENERALTRIPLETPARAMETER = _descriptor.Descriptor( + name='GeneralTripletParameter', + full_name='caffe.GeneralTripletParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='margin', full_name='caffe.GeneralTripletParameter.margin', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(0.2), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='add_center_loss', full_name='caffe.GeneralTripletParameter.add_center_loss', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hardest_only', full_name='caffe.GeneralTripletParameter.hardest_only', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='positive_first', full_name='caffe.GeneralTripletParameter.positive_first', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='positive_upper_bound', full_name='caffe.GeneralTripletParameter.positive_upper_bound', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='positive_weight', full_name='caffe.GeneralTripletParameter.positive_weight', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='negative_weight', full_name='caffe.GeneralTripletParameter.negative_weight', index=6, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=28792, + serialized_end=29018, +) + + +_ROIALIGNPARAMETER = _descriptor.Descriptor( + name='ROIAlignParameter', + full_name='caffe.ROIAlignParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pooled_h', full_name='caffe.ROIAlignParameter.pooled_h', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pooled_w', full_name='caffe.ROIAlignParameter.pooled_w', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='spatial_scale', full_name='caffe.ROIAlignParameter.spatial_scale', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=float(1), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=29020, + serialized_end=29107, +) + +_BLOBPROTO.fields_by_name['shape'].message_type = _BLOBSHAPE +_BLOBPROTOVECTOR.fields_by_name['blobs'].message_type = _BLOBPROTO +_LABELMAP.fields_by_name['item'].message_type = _LABELMAPITEM +_BATCHSAMPLER.fields_by_name['sampler'].message_type = _SAMPLER +_BATCHSAMPLER.fields_by_name['sample_constraint'].message_type = _SAMPLECONSTRAINT +_EMITCONSTRAINT.fields_by_name['emit_type'].enum_type = _EMITCONSTRAINT_EMITTYPE +_EMITCONSTRAINT_EMITTYPE.containing_type = _EMITCONSTRAINT +_ANNOTATION.fields_by_name['bbox'].message_type = _NORMALIZEDBBOX +_ANNOTATIONGROUP.fields_by_name['annotation'].message_type = _ANNOTATION +_ANNOTATEDDATUM.fields_by_name['datum'].message_type = _DATUM +_ANNOTATEDDATUM.fields_by_name['type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE +_ANNOTATEDDATUM.fields_by_name['annotation_group'].message_type = _ANNOTATIONGROUP +_ANNOTATEDDATUM_ANNOTATIONTYPE.containing_type = _ANNOTATEDDATUM +_MTCNNDATUM.fields_by_name['datum'].message_type = _DATUM +_MTCNNDATUM.fields_by_name['roi'].message_type = _MTCNNBBOX +_FILLERPARAMETER.fields_by_name['variance_norm'].enum_type = _FILLERPARAMETER_VARIANCENORM +_FILLERPARAMETER_VARIANCENORM.containing_type = _FILLERPARAMETER +_NETPARAMETER.fields_by_name['input_shape'].message_type = _BLOBSHAPE +_NETPARAMETER.fields_by_name['state'].message_type = _NETSTATE +_NETPARAMETER.fields_by_name['layer'].message_type = _LAYERPARAMETER +_NETPARAMETER.fields_by_name['layers'].message_type = _V1LAYERPARAMETER +_SOLVERPARAMETER.fields_by_name['net_param'].message_type = _NETPARAMETER +_SOLVERPARAMETER.fields_by_name['train_net_param'].message_type = _NETPARAMETER +_SOLVERPARAMETER.fields_by_name['test_net_param'].message_type = _NETPARAMETER +_SOLVERPARAMETER.fields_by_name['train_state'].message_type = _NETSTATE +_SOLVERPARAMETER.fields_by_name['test_state'].message_type = _NETSTATE +_SOLVERPARAMETER.fields_by_name['snapshot_format'].enum_type = _SOLVERPARAMETER_SNAPSHOTFORMAT +_SOLVERPARAMETER.fields_by_name['solver_mode'].enum_type = _SOLVERPARAMETER_SOLVERMODE +_SOLVERPARAMETER.fields_by_name['solver_type'].enum_type = _SOLVERPARAMETER_SOLVERTYPE +_SOLVERPARAMETER_SNAPSHOTFORMAT.containing_type = _SOLVERPARAMETER +_SOLVERPARAMETER_SOLVERMODE.containing_type = _SOLVERPARAMETER +_SOLVERPARAMETER_SOLVERTYPE.containing_type = _SOLVERPARAMETER +_SOLVERSTATE.fields_by_name['history'].message_type = _BLOBPROTO +_NETSTATE.fields_by_name['phase'].enum_type = _PHASE +_NETSTATERULE.fields_by_name['phase'].enum_type = _PHASE +_PARAMSPEC.fields_by_name['share_mode'].enum_type = _PARAMSPEC_DIMCHECKMODE +_PARAMSPEC_DIMCHECKMODE.containing_type = _PARAMSPEC +_LAYERPARAMETER.fields_by_name['phase'].enum_type = _PHASE +_LAYERPARAMETER.fields_by_name['param'].message_type = _PARAMSPEC +_LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO +_LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE +_LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE +_LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER +_LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER +_LAYERPARAMETER.fields_by_name['detection_loss_param'].message_type = _DETECTIONLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['eval_detection_param'].message_type = _EVALDETECTIONPARAMETER +_LAYERPARAMETER.fields_by_name['region_loss_param'].message_type = _REGIONLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['reorg_param'].message_type = _REORGPARAMETER +_LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER +_LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER +_LAYERPARAMETER.fields_by_name['batch_norm_param'].message_type = _BATCHNORMPARAMETER +_LAYERPARAMETER.fields_by_name['bias_param'].message_type = _BIASPARAMETER +_LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER +_LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER +_LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER +_LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER +_LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER +_LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER +_LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER +_LAYERPARAMETER.fields_by_name['elu_param'].message_type = _ELUPARAMETER +_LAYERPARAMETER.fields_by_name['embed_param'].message_type = _EMBEDPARAMETER +_LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER +_LAYERPARAMETER.fields_by_name['flatten_param'].message_type = _FLATTENPARAMETER +_LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER +_LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER +_LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER +_LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER +_LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER +_LAYERPARAMETER.fields_by_name['input_param'].message_type = _INPUTPARAMETER +_LAYERPARAMETER.fields_by_name['log_param'].message_type = _LOGPARAMETER +_LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER +_LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER +_LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER +_LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER +_LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER +_LAYERPARAMETER.fields_by_name['prelu_param'].message_type = _PRELUPARAMETER +_LAYERPARAMETER.fields_by_name['python_param'].message_type = _PYTHONPARAMETER +_LAYERPARAMETER.fields_by_name['recurrent_param'].message_type = _RECURRENTPARAMETER +_LAYERPARAMETER.fields_by_name['reduction_param'].message_type = _REDUCTIONPARAMETER +_LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER +_LAYERPARAMETER.fields_by_name['reshape_param'].message_type = _RESHAPEPARAMETER +_LAYERPARAMETER.fields_by_name['roi_pooling_param'].message_type = _ROIPOOLINGPARAMETER +_LAYERPARAMETER.fields_by_name['scale_param'].message_type = _SCALEPARAMETER +_LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER +_LAYERPARAMETER.fields_by_name['smooth_l1_loss_param'].message_type = _SMOOTHL1LOSSPARAMETER +_LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER +_LAYERPARAMETER.fields_by_name['spp_param'].message_type = _SPPPARAMETER +_LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER +_LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER +_LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER +_LAYERPARAMETER.fields_by_name['tile_param'].message_type = _TILEPARAMETER +_LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER +_LAYERPARAMETER.fields_by_name['st_param'].message_type = _SPATIALTRANSFORMERPARAMETER +_LAYERPARAMETER.fields_by_name['st_loss_param'].message_type = _STLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['rpn_param'].message_type = _RPNPARAMETER +_LAYERPARAMETER.fields_by_name['focal_loss_param'].message_type = _FOCALLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['asdn_data_param'].message_type = _ASDNDATAPARAMETER +_LAYERPARAMETER.fields_by_name['bn_param'].message_type = _BNPARAMETER +_LAYERPARAMETER.fields_by_name['mtcnn_data_param'].message_type = _MTCNNDATAPARAMETER +_LAYERPARAMETER.fields_by_name['interp_param'].message_type = _INTERPPARAMETER +_LAYERPARAMETER.fields_by_name['psroi_pooling_param'].message_type = _PSROIPOOLINGPARAMETER +_LAYERPARAMETER.fields_by_name['annotated_data_param'].message_type = _ANNOTATEDDATAPARAMETER +_LAYERPARAMETER.fields_by_name['prior_box_param'].message_type = _PRIORBOXPARAMETER +_LAYERPARAMETER.fields_by_name['crop_param'].message_type = _CROPPARAMETER +_LAYERPARAMETER.fields_by_name['detection_evaluate_param'].message_type = _DETECTIONEVALUATEPARAMETER +_LAYERPARAMETER.fields_by_name['detection_output_param'].message_type = _DETECTIONOUTPUTPARAMETER +_LAYERPARAMETER.fields_by_name['multibox_loss_param'].message_type = _MULTIBOXLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['permute_param'].message_type = _PERMUTEPARAMETER +_LAYERPARAMETER.fields_by_name['video_data_param'].message_type = _VIDEODATAPARAMETER +_LAYERPARAMETER.fields_by_name['margin_inner_product_param'].message_type = _MARGININNERPRODUCTPARAMETER +_LAYERPARAMETER.fields_by_name['center_loss_param'].message_type = _CENTERLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['deformable_convolution_param'].message_type = _DEFORMABLECONVOLUTIONPARAMETER +_LAYERPARAMETER.fields_by_name['label_specific_add_param'].message_type = _LABELSPECIFICADDPARAMETER +_LAYERPARAMETER.fields_by_name['additive_margin_inner_product_param'].message_type = _ADDITIVEMARGININNERPRODUCTPARAMETER +_LAYERPARAMETER.fields_by_name['cosin_add_m_param'].message_type = _COSINADDMPARAMETER +_LAYERPARAMETER.fields_by_name['cosin_mul_m_param'].message_type = _COSINMULMPARAMETER +_LAYERPARAMETER.fields_by_name['channel_scale_param'].message_type = _CHANNELSCALEPARAMETER +_LAYERPARAMETER.fields_by_name['flip_param'].message_type = _FLIPPARAMETER +_LAYERPARAMETER.fields_by_name['triplet_loss_param'].message_type = _TRIPLETLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['coupled_cluster_loss_param'].message_type = _COUPLEDCLUSTERLOSSPARAMETER +_LAYERPARAMETER.fields_by_name['general_triplet_loss_param'].message_type = _GENERALTRIPLETPARAMETER +_LAYERPARAMETER.fields_by_name['roi_align_param'].message_type = _ROIALIGNPARAMETER +_LAYERPARAMETER.fields_by_name['upsample_param'].message_type = _UPSAMPLEPARAMETER +_LAYERPARAMETER.fields_by_name['matmul_param'].message_type = _MATMULPARAMETER +_LAYERPARAMETER.fields_by_name['pass_through_param'].message_type = _PASSTHROUGHPARAMETER +_LAYERPARAMETER.fields_by_name['norm_param'].message_type = _NORMALIZEPARAMETER +_NORMALIZEPARAMETER.fields_by_name['scale_filler'].message_type = _FILLERPARAMETER +_ANNOTATEDDATAPARAMETER.fields_by_name['batch_sampler'].message_type = _BATCHSAMPLER +_ANNOTATEDDATAPARAMETER.fields_by_name['anno_type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE +_BNPARAMETER.fields_by_name['slope_filler'].message_type = _FILLERPARAMETER +_BNPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_BNPARAMETER.fields_by_name['engine'].enum_type = _BNPARAMETER_ENGINE +_BNPARAMETER_ENGINE.containing_type = _BNPARAMETER +_FOCALLOSSPARAMETER.fields_by_name['type'].enum_type = _FOCALLOSSPARAMETER_TYPE +_FOCALLOSSPARAMETER_TYPE.containing_type = _FOCALLOSSPARAMETER +_TRANSFORMATIONPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER +_TRANSFORMATIONPARAMETER.fields_by_name['noise_param'].message_type = _NOISEPARAMETER +_TRANSFORMATIONPARAMETER.fields_by_name['distort_param'].message_type = _DISTORTIONPARAMETER +_TRANSFORMATIONPARAMETER.fields_by_name['expand_param'].message_type = _EXPANSIONPARAMETER +_TRANSFORMATIONPARAMETER.fields_by_name['emit_constraint'].message_type = _EMITCONSTRAINT +_RESIZEPARAMETER.fields_by_name['resize_mode'].enum_type = _RESIZEPARAMETER_RESIZE_MODE +_RESIZEPARAMETER.fields_by_name['pad_mode'].enum_type = _RESIZEPARAMETER_PAD_MODE +_RESIZEPARAMETER.fields_by_name['interp_mode'].enum_type = _RESIZEPARAMETER_INTERP_MODE +_RESIZEPARAMETER_RESIZE_MODE.containing_type = _RESIZEPARAMETER +_RESIZEPARAMETER_PAD_MODE.containing_type = _RESIZEPARAMETER +_RESIZEPARAMETER_INTERP_MODE.containing_type = _RESIZEPARAMETER +_NOISEPARAMETER.fields_by_name['saltpepper_param'].message_type = _SALTPEPPERPARAMETER +_LOSSPARAMETER.fields_by_name['normalization'].enum_type = _LOSSPARAMETER_NORMALIZATIONMODE +_LOSSPARAMETER_NORMALIZATIONMODE.containing_type = _LOSSPARAMETER +_BIASPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER +_EVALDETECTIONPARAMETER.fields_by_name['score_type'].enum_type = _EVALDETECTIONPARAMETER_SCORETYPE +_EVALDETECTIONPARAMETER_SCORETYPE.containing_type = _EVALDETECTIONPARAMETER +_CONVOLUTIONPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_CONVOLUTIONPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_CONVOLUTIONPARAMETER.fields_by_name['engine'].enum_type = _CONVOLUTIONPARAMETER_ENGINE +_CONVOLUTIONPARAMETER_ENGINE.containing_type = _CONVOLUTIONPARAMETER +_DATAPARAMETER.fields_by_name['backend'].enum_type = _DATAPARAMETER_DB +_DATAPARAMETER_DB.containing_type = _DATAPARAMETER +_DETECTIONEVALUATEPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER +_SAVEOUTPUTPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER +_DETECTIONOUTPUTPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER +_DETECTIONOUTPUTPARAMETER.fields_by_name['save_output_param'].message_type = _SAVEOUTPUTPARAMETER +_DETECTIONOUTPUTPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE +_DUMMYDATAPARAMETER.fields_by_name['data_filler'].message_type = _FILLERPARAMETER +_DUMMYDATAPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE +_ELTWISEPARAMETER.fields_by_name['operation'].enum_type = _ELTWISEPARAMETER_ELTWISEOP +_ELTWISEPARAMETER_ELTWISEOP.containing_type = _ELTWISEPARAMETER +_EMBEDPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_EMBEDPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_HINGELOSSPARAMETER.fields_by_name['norm'].enum_type = _HINGELOSSPARAMETER_NORM +_HINGELOSSPARAMETER_NORM.containing_type = _HINGELOSSPARAMETER +_INNERPRODUCTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_INNERPRODUCTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_INPUTPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE +_LRNPARAMETER.fields_by_name['norm_region'].enum_type = _LRNPARAMETER_NORMREGION +_LRNPARAMETER.fields_by_name['engine'].enum_type = _LRNPARAMETER_ENGINE +_LRNPARAMETER_NORMREGION.containing_type = _LRNPARAMETER +_LRNPARAMETER_ENGINE.containing_type = _LRNPARAMETER +_MULTIBOXLOSSPARAMETER.fields_by_name['loc_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_LOCLOSSTYPE +_MULTIBOXLOSSPARAMETER.fields_by_name['conf_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_CONFLOSSTYPE +_MULTIBOXLOSSPARAMETER.fields_by_name['match_type'].enum_type = _MULTIBOXLOSSPARAMETER_MATCHTYPE +_MULTIBOXLOSSPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE +_MULTIBOXLOSSPARAMETER.fields_by_name['mining_type'].enum_type = _MULTIBOXLOSSPARAMETER_MININGTYPE +_MULTIBOXLOSSPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER +_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER +_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER +_MULTIBOXLOSSPARAMETER_MATCHTYPE.containing_type = _MULTIBOXLOSSPARAMETER +_MULTIBOXLOSSPARAMETER_MININGTYPE.containing_type = _MULTIBOXLOSSPARAMETER +_PARAMETERPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE +_POOLINGPARAMETER.fields_by_name['pool'].enum_type = _POOLINGPARAMETER_POOLMETHOD +_POOLINGPARAMETER.fields_by_name['engine'].enum_type = _POOLINGPARAMETER_ENGINE +_POOLINGPARAMETER_POOLMETHOD.containing_type = _POOLINGPARAMETER +_POOLINGPARAMETER_ENGINE.containing_type = _POOLINGPARAMETER +_PRIORBOXPARAMETER_CODETYPE.containing_type = _PRIORBOXPARAMETER +_RECURRENTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_RECURRENTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_REDUCTIONPARAMETER.fields_by_name['operation'].enum_type = _REDUCTIONPARAMETER_REDUCTIONOP +_REDUCTIONPARAMETER_REDUCTIONOP.containing_type = _REDUCTIONPARAMETER +_RELUPARAMETER.fields_by_name['engine'].enum_type = _RELUPARAMETER_ENGINE +_RELUPARAMETER_ENGINE.containing_type = _RELUPARAMETER +_RESHAPEPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE +_SCALEPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER +_SCALEPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_SIGMOIDPARAMETER.fields_by_name['engine'].enum_type = _SIGMOIDPARAMETER_ENGINE +_SIGMOIDPARAMETER_ENGINE.containing_type = _SIGMOIDPARAMETER +_SOFTMAXPARAMETER.fields_by_name['engine'].enum_type = _SOFTMAXPARAMETER_ENGINE +_SOFTMAXPARAMETER_ENGINE.containing_type = _SOFTMAXPARAMETER +_TANHPARAMETER.fields_by_name['engine'].enum_type = _TANHPARAMETER_ENGINE +_TANHPARAMETER_ENGINE.containing_type = _TANHPARAMETER +_SPPPARAMETER.fields_by_name['pool'].enum_type = _SPPPARAMETER_POOLMETHOD +_SPPPARAMETER.fields_by_name['engine'].enum_type = _SPPPARAMETER_ENGINE +_SPPPARAMETER_POOLMETHOD.containing_type = _SPPPARAMETER +_SPPPARAMETER_ENGINE.containing_type = _SPPPARAMETER +_V1LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE +_V1LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE +_V1LAYERPARAMETER.fields_by_name['type'].enum_type = _V1LAYERPARAMETER_LAYERTYPE +_V1LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO +_V1LAYERPARAMETER.fields_by_name['blob_share_mode'].enum_type = _V1LAYERPARAMETER_DIMCHECKMODE +_V1LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER +_V1LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER +_V1LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER +_V1LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER +_V1LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER +_V1LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER +_V1LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER +_V1LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER +_V1LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER +_V1LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER +_V1LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER +_V1LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER +_V1LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER +_V1LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER +_V1LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER +_V1LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER +_V1LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER +_V1LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER +_V1LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER +_V1LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER +_V1LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER +_V1LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER +_V1LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER +_V1LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER +_V1LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER +_V1LAYERPARAMETER.fields_by_name['detection_loss_param'].message_type = _DETECTIONLOSSPARAMETER +_V1LAYERPARAMETER.fields_by_name['eval_detection_param'].message_type = _EVALDETECTIONPARAMETER +_V1LAYERPARAMETER.fields_by_name['layer'].message_type = _V0LAYERPARAMETER +_V1LAYERPARAMETER_LAYERTYPE.containing_type = _V1LAYERPARAMETER +_V1LAYERPARAMETER_DIMCHECKMODE.containing_type = _V1LAYERPARAMETER +_V0LAYERPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_V0LAYERPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_V0LAYERPARAMETER.fields_by_name['pool'].enum_type = _V0LAYERPARAMETER_POOLMETHOD +_V0LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO +_V0LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER +_V0LAYERPARAMETER_POOLMETHOD.containing_type = _V0LAYERPARAMETER +_PRELUPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER +_VIDEODATAPARAMETER.fields_by_name['video_type'].enum_type = _VIDEODATAPARAMETER_VIDEOTYPE +_VIDEODATAPARAMETER_VIDEOTYPE.containing_type = _VIDEODATAPARAMETER +_CENTERLOSSPARAMETER.fields_by_name['center_filler'].message_type = _FILLERPARAMETER +_MARGININNERPRODUCTPARAMETER.fields_by_name['type'].enum_type = _MARGININNERPRODUCTPARAMETER_MARGINTYPE +_MARGININNERPRODUCTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_MARGININNERPRODUCTPARAMETER_MARGINTYPE.containing_type = _MARGININNERPRODUCTPARAMETER +_ADDITIVEMARGININNERPRODUCTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_DEFORMABLECONVOLUTIONPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER +_DEFORMABLECONVOLUTIONPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER +_DEFORMABLECONVOLUTIONPARAMETER.fields_by_name['engine'].enum_type = _DEFORMABLECONVOLUTIONPARAMETER_ENGINE +_DEFORMABLECONVOLUTIONPARAMETER_ENGINE.containing_type = _DEFORMABLECONVOLUTIONPARAMETER +DESCRIPTOR.message_types_by_name['BlobShape'] = _BLOBSHAPE +DESCRIPTOR.message_types_by_name['BlobProto'] = _BLOBPROTO +DESCRIPTOR.message_types_by_name['BlobProtoVector'] = _BLOBPROTOVECTOR +DESCRIPTOR.message_types_by_name['Datum'] = _DATUM +DESCRIPTOR.message_types_by_name['LabelMapItem'] = _LABELMAPITEM +DESCRIPTOR.message_types_by_name['LabelMap'] = _LABELMAP +DESCRIPTOR.message_types_by_name['Sampler'] = _SAMPLER +DESCRIPTOR.message_types_by_name['SampleConstraint'] = _SAMPLECONSTRAINT +DESCRIPTOR.message_types_by_name['BatchSampler'] = _BATCHSAMPLER +DESCRIPTOR.message_types_by_name['EmitConstraint'] = _EMITCONSTRAINT +DESCRIPTOR.message_types_by_name['NormalizedBBox'] = _NORMALIZEDBBOX +DESCRIPTOR.message_types_by_name['Annotation'] = _ANNOTATION +DESCRIPTOR.message_types_by_name['AnnotationGroup'] = _ANNOTATIONGROUP +DESCRIPTOR.message_types_by_name['AnnotatedDatum'] = _ANNOTATEDDATUM +DESCRIPTOR.message_types_by_name['MTCNNBBox'] = _MTCNNBBOX +DESCRIPTOR.message_types_by_name['MTCNNDatum'] = _MTCNNDATUM +DESCRIPTOR.message_types_by_name['FillerParameter'] = _FILLERPARAMETER +DESCRIPTOR.message_types_by_name['NetParameter'] = _NETPARAMETER +DESCRIPTOR.message_types_by_name['SolverParameter'] = _SOLVERPARAMETER +DESCRIPTOR.message_types_by_name['SolverState'] = _SOLVERSTATE +DESCRIPTOR.message_types_by_name['NetState'] = _NETSTATE +DESCRIPTOR.message_types_by_name['NetStateRule'] = _NETSTATERULE +DESCRIPTOR.message_types_by_name['SpatialTransformerParameter'] = _SPATIALTRANSFORMERPARAMETER +DESCRIPTOR.message_types_by_name['STLossParameter'] = _STLOSSPARAMETER +DESCRIPTOR.message_types_by_name['ParamSpec'] = _PARAMSPEC +DESCRIPTOR.message_types_by_name['LayerParameter'] = _LAYERPARAMETER +DESCRIPTOR.message_types_by_name['UpsampleParameter'] = _UPSAMPLEPARAMETER +DESCRIPTOR.message_types_by_name['MatMulParameter'] = _MATMULPARAMETER +DESCRIPTOR.message_types_by_name['PassThroughParameter'] = _PASSTHROUGHPARAMETER +DESCRIPTOR.message_types_by_name['NormalizeParameter'] = _NORMALIZEPARAMETER +DESCRIPTOR.message_types_by_name['AnnotatedDataParameter'] = _ANNOTATEDDATAPARAMETER +DESCRIPTOR.message_types_by_name['AsdnDataParameter'] = _ASDNDATAPARAMETER +DESCRIPTOR.message_types_by_name['MTCNNDataParameter'] = _MTCNNDATAPARAMETER +DESCRIPTOR.message_types_by_name['InterpParameter'] = _INTERPPARAMETER +DESCRIPTOR.message_types_by_name['PSROIPoolingParameter'] = _PSROIPOOLINGPARAMETER +DESCRIPTOR.message_types_by_name['FlipParameter'] = _FLIPPARAMETER +DESCRIPTOR.message_types_by_name['BNParameter'] = _BNPARAMETER +DESCRIPTOR.message_types_by_name['FocalLossParameter'] = _FOCALLOSSPARAMETER +DESCRIPTOR.message_types_by_name['TransformationParameter'] = _TRANSFORMATIONPARAMETER +DESCRIPTOR.message_types_by_name['ResizeParameter'] = _RESIZEPARAMETER +DESCRIPTOR.message_types_by_name['SaltPepperParameter'] = _SALTPEPPERPARAMETER +DESCRIPTOR.message_types_by_name['NoiseParameter'] = _NOISEPARAMETER +DESCRIPTOR.message_types_by_name['DistortionParameter'] = _DISTORTIONPARAMETER +DESCRIPTOR.message_types_by_name['ExpansionParameter'] = _EXPANSIONPARAMETER +DESCRIPTOR.message_types_by_name['LossParameter'] = _LOSSPARAMETER +DESCRIPTOR.message_types_by_name['AccuracyParameter'] = _ACCURACYPARAMETER +DESCRIPTOR.message_types_by_name['ArgMaxParameter'] = _ARGMAXPARAMETER +DESCRIPTOR.message_types_by_name['ConcatParameter'] = _CONCATPARAMETER +DESCRIPTOR.message_types_by_name['BatchNormParameter'] = _BATCHNORMPARAMETER +DESCRIPTOR.message_types_by_name['BiasParameter'] = _BIASPARAMETER +DESCRIPTOR.message_types_by_name['ContrastiveLossParameter'] = _CONTRASTIVELOSSPARAMETER +DESCRIPTOR.message_types_by_name['DetectionLossParameter'] = _DETECTIONLOSSPARAMETER +DESCRIPTOR.message_types_by_name['RegionLossParameter'] = _REGIONLOSSPARAMETER +DESCRIPTOR.message_types_by_name['ReorgParameter'] = _REORGPARAMETER +DESCRIPTOR.message_types_by_name['EvalDetectionParameter'] = _EVALDETECTIONPARAMETER +DESCRIPTOR.message_types_by_name['ConvolutionParameter'] = _CONVOLUTIONPARAMETER +DESCRIPTOR.message_types_by_name['CropParameter'] = _CROPPARAMETER +DESCRIPTOR.message_types_by_name['DataParameter'] = _DATAPARAMETER +DESCRIPTOR.message_types_by_name['DetectionEvaluateParameter'] = _DETECTIONEVALUATEPARAMETER +DESCRIPTOR.message_types_by_name['NonMaximumSuppressionParameter'] = _NONMAXIMUMSUPPRESSIONPARAMETER +DESCRIPTOR.message_types_by_name['SaveOutputParameter'] = _SAVEOUTPUTPARAMETER +DESCRIPTOR.message_types_by_name['DetectionOutputParameter'] = _DETECTIONOUTPUTPARAMETER +DESCRIPTOR.message_types_by_name['DropoutParameter'] = _DROPOUTPARAMETER +DESCRIPTOR.message_types_by_name['DummyDataParameter'] = _DUMMYDATAPARAMETER +DESCRIPTOR.message_types_by_name['EltwiseParameter'] = _ELTWISEPARAMETER +DESCRIPTOR.message_types_by_name['ELUParameter'] = _ELUPARAMETER +DESCRIPTOR.message_types_by_name['EmbedParameter'] = _EMBEDPARAMETER +DESCRIPTOR.message_types_by_name['ExpParameter'] = _EXPPARAMETER +DESCRIPTOR.message_types_by_name['FlattenParameter'] = _FLATTENPARAMETER +DESCRIPTOR.message_types_by_name['HDF5DataParameter'] = _HDF5DATAPARAMETER +DESCRIPTOR.message_types_by_name['HDF5OutputParameter'] = _HDF5OUTPUTPARAMETER +DESCRIPTOR.message_types_by_name['HingeLossParameter'] = _HINGELOSSPARAMETER +DESCRIPTOR.message_types_by_name['ImageDataParameter'] = _IMAGEDATAPARAMETER +DESCRIPTOR.message_types_by_name['InfogainLossParameter'] = _INFOGAINLOSSPARAMETER +DESCRIPTOR.message_types_by_name['InnerProductParameter'] = _INNERPRODUCTPARAMETER +DESCRIPTOR.message_types_by_name['InputParameter'] = _INPUTPARAMETER +DESCRIPTOR.message_types_by_name['LogParameter'] = _LOGPARAMETER +DESCRIPTOR.message_types_by_name['LRNParameter'] = _LRNPARAMETER +DESCRIPTOR.message_types_by_name['MemoryDataParameter'] = _MEMORYDATAPARAMETER +DESCRIPTOR.message_types_by_name['MultiBoxLossParameter'] = _MULTIBOXLOSSPARAMETER +DESCRIPTOR.message_types_by_name['PermuteParameter'] = _PERMUTEPARAMETER +DESCRIPTOR.message_types_by_name['MVNParameter'] = _MVNPARAMETER +DESCRIPTOR.message_types_by_name['ParameterParameter'] = _PARAMETERPARAMETER +DESCRIPTOR.message_types_by_name['PoolingParameter'] = _POOLINGPARAMETER +DESCRIPTOR.message_types_by_name['PowerParameter'] = _POWERPARAMETER +DESCRIPTOR.message_types_by_name['PriorBoxParameter'] = _PRIORBOXPARAMETER +DESCRIPTOR.message_types_by_name['PythonParameter'] = _PYTHONPARAMETER +DESCRIPTOR.message_types_by_name['RecurrentParameter'] = _RECURRENTPARAMETER +DESCRIPTOR.message_types_by_name['ReductionParameter'] = _REDUCTIONPARAMETER +DESCRIPTOR.message_types_by_name['ReLUParameter'] = _RELUPARAMETER +DESCRIPTOR.message_types_by_name['ReshapeParameter'] = _RESHAPEPARAMETER +DESCRIPTOR.message_types_by_name['ROIPoolingParameter'] = _ROIPOOLINGPARAMETER +DESCRIPTOR.message_types_by_name['ScaleParameter'] = _SCALEPARAMETER +DESCRIPTOR.message_types_by_name['SigmoidParameter'] = _SIGMOIDPARAMETER +DESCRIPTOR.message_types_by_name['SmoothL1LossParameter'] = _SMOOTHL1LOSSPARAMETER +DESCRIPTOR.message_types_by_name['SliceParameter'] = _SLICEPARAMETER +DESCRIPTOR.message_types_by_name['SoftmaxParameter'] = _SOFTMAXPARAMETER +DESCRIPTOR.message_types_by_name['TanHParameter'] = _TANHPARAMETER +DESCRIPTOR.message_types_by_name['TileParameter'] = _TILEPARAMETER +DESCRIPTOR.message_types_by_name['ThresholdParameter'] = _THRESHOLDPARAMETER +DESCRIPTOR.message_types_by_name['WindowDataParameter'] = _WINDOWDATAPARAMETER +DESCRIPTOR.message_types_by_name['SPPParameter'] = _SPPPARAMETER +DESCRIPTOR.message_types_by_name['V1LayerParameter'] = _V1LAYERPARAMETER +DESCRIPTOR.message_types_by_name['V0LayerParameter'] = _V0LAYERPARAMETER +DESCRIPTOR.message_types_by_name['PReLUParameter'] = _PRELUPARAMETER +DESCRIPTOR.message_types_by_name['RPNParameter'] = _RPNPARAMETER +DESCRIPTOR.message_types_by_name['VideoDataParameter'] = _VIDEODATAPARAMETER +DESCRIPTOR.message_types_by_name['CenterLossParameter'] = _CENTERLOSSPARAMETER +DESCRIPTOR.message_types_by_name['MarginInnerProductParameter'] = _MARGININNERPRODUCTPARAMETER +DESCRIPTOR.message_types_by_name['AdditiveMarginInnerProductParameter'] = _ADDITIVEMARGININNERPRODUCTPARAMETER +DESCRIPTOR.message_types_by_name['DeformableConvolutionParameter'] = _DEFORMABLECONVOLUTIONPARAMETER +DESCRIPTOR.message_types_by_name['LabelSpecificAddParameter'] = _LABELSPECIFICADDPARAMETER +DESCRIPTOR.message_types_by_name['ChannelScaleParameter'] = _CHANNELSCALEPARAMETER +DESCRIPTOR.message_types_by_name['CosinAddmParameter'] = _COSINADDMPARAMETER +DESCRIPTOR.message_types_by_name['CosinMulmParameter'] = _COSINMULMPARAMETER +DESCRIPTOR.message_types_by_name['CoupledClusterLossParameter'] = _COUPLEDCLUSTERLOSSPARAMETER +DESCRIPTOR.message_types_by_name['TripletLossParameter'] = _TRIPLETLOSSPARAMETER +DESCRIPTOR.message_types_by_name['GeneralTripletParameter'] = _GENERALTRIPLETPARAMETER +DESCRIPTOR.message_types_by_name['ROIAlignParameter'] = _ROIALIGNPARAMETER +DESCRIPTOR.enum_types_by_name['Phase'] = _PHASE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +BlobShape = _reflection.GeneratedProtocolMessageType('BlobShape', (_message.Message,), dict( + DESCRIPTOR = _BLOBSHAPE, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BlobShape) + )) +_sym_db.RegisterMessage(BlobShape) + +BlobProto = _reflection.GeneratedProtocolMessageType('BlobProto', (_message.Message,), dict( + DESCRIPTOR = _BLOBPROTO, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BlobProto) + )) +_sym_db.RegisterMessage(BlobProto) + +BlobProtoVector = _reflection.GeneratedProtocolMessageType('BlobProtoVector', (_message.Message,), dict( + DESCRIPTOR = _BLOBPROTOVECTOR, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BlobProtoVector) + )) +_sym_db.RegisterMessage(BlobProtoVector) + +Datum = _reflection.GeneratedProtocolMessageType('Datum', (_message.Message,), dict( + DESCRIPTOR = _DATUM, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.Datum) + )) +_sym_db.RegisterMessage(Datum) + +LabelMapItem = _reflection.GeneratedProtocolMessageType('LabelMapItem', (_message.Message,), dict( + DESCRIPTOR = _LABELMAPITEM, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LabelMapItem) + )) +_sym_db.RegisterMessage(LabelMapItem) + +LabelMap = _reflection.GeneratedProtocolMessageType('LabelMap', (_message.Message,), dict( + DESCRIPTOR = _LABELMAP, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LabelMap) + )) +_sym_db.RegisterMessage(LabelMap) + +Sampler = _reflection.GeneratedProtocolMessageType('Sampler', (_message.Message,), dict( + DESCRIPTOR = _SAMPLER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.Sampler) + )) +_sym_db.RegisterMessage(Sampler) + +SampleConstraint = _reflection.GeneratedProtocolMessageType('SampleConstraint', (_message.Message,), dict( + DESCRIPTOR = _SAMPLECONSTRAINT, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SampleConstraint) + )) +_sym_db.RegisterMessage(SampleConstraint) + +BatchSampler = _reflection.GeneratedProtocolMessageType('BatchSampler', (_message.Message,), dict( + DESCRIPTOR = _BATCHSAMPLER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BatchSampler) + )) +_sym_db.RegisterMessage(BatchSampler) + +EmitConstraint = _reflection.GeneratedProtocolMessageType('EmitConstraint', (_message.Message,), dict( + DESCRIPTOR = _EMITCONSTRAINT, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.EmitConstraint) + )) +_sym_db.RegisterMessage(EmitConstraint) + +NormalizedBBox = _reflection.GeneratedProtocolMessageType('NormalizedBBox', (_message.Message,), dict( + DESCRIPTOR = _NORMALIZEDBBOX, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NormalizedBBox) + )) +_sym_db.RegisterMessage(NormalizedBBox) + +Annotation = _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATION, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.Annotation) + )) +_sym_db.RegisterMessage(Annotation) + +AnnotationGroup = _reflection.GeneratedProtocolMessageType('AnnotationGroup', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATIONGROUP, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AnnotationGroup) + )) +_sym_db.RegisterMessage(AnnotationGroup) + +AnnotatedDatum = _reflection.GeneratedProtocolMessageType('AnnotatedDatum', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATEDDATUM, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AnnotatedDatum) + )) +_sym_db.RegisterMessage(AnnotatedDatum) + +MTCNNBBox = _reflection.GeneratedProtocolMessageType('MTCNNBBox', (_message.Message,), dict( + DESCRIPTOR = _MTCNNBBOX, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MTCNNBBox) + )) +_sym_db.RegisterMessage(MTCNNBBox) + +MTCNNDatum = _reflection.GeneratedProtocolMessageType('MTCNNDatum', (_message.Message,), dict( + DESCRIPTOR = _MTCNNDATUM, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MTCNNDatum) + )) +_sym_db.RegisterMessage(MTCNNDatum) + +FillerParameter = _reflection.GeneratedProtocolMessageType('FillerParameter', (_message.Message,), dict( + DESCRIPTOR = _FILLERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.FillerParameter) + )) +_sym_db.RegisterMessage(FillerParameter) + +NetParameter = _reflection.GeneratedProtocolMessageType('NetParameter', (_message.Message,), dict( + DESCRIPTOR = _NETPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NetParameter) + )) +_sym_db.RegisterMessage(NetParameter) + +SolverParameter = _reflection.GeneratedProtocolMessageType('SolverParameter', (_message.Message,), dict( + DESCRIPTOR = _SOLVERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SolverParameter) + )) +_sym_db.RegisterMessage(SolverParameter) + +SolverState = _reflection.GeneratedProtocolMessageType('SolverState', (_message.Message,), dict( + DESCRIPTOR = _SOLVERSTATE, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SolverState) + )) +_sym_db.RegisterMessage(SolverState) + +NetState = _reflection.GeneratedProtocolMessageType('NetState', (_message.Message,), dict( + DESCRIPTOR = _NETSTATE, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NetState) + )) +_sym_db.RegisterMessage(NetState) + +NetStateRule = _reflection.GeneratedProtocolMessageType('NetStateRule', (_message.Message,), dict( + DESCRIPTOR = _NETSTATERULE, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NetStateRule) + )) +_sym_db.RegisterMessage(NetStateRule) + +SpatialTransformerParameter = _reflection.GeneratedProtocolMessageType('SpatialTransformerParameter', (_message.Message,), dict( + DESCRIPTOR = _SPATIALTRANSFORMERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SpatialTransformerParameter) + )) +_sym_db.RegisterMessage(SpatialTransformerParameter) + +STLossParameter = _reflection.GeneratedProtocolMessageType('STLossParameter', (_message.Message,), dict( + DESCRIPTOR = _STLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.STLossParameter) + )) +_sym_db.RegisterMessage(STLossParameter) + +ParamSpec = _reflection.GeneratedProtocolMessageType('ParamSpec', (_message.Message,), dict( + DESCRIPTOR = _PARAMSPEC, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ParamSpec) + )) +_sym_db.RegisterMessage(ParamSpec) + +LayerParameter = _reflection.GeneratedProtocolMessageType('LayerParameter', (_message.Message,), dict( + DESCRIPTOR = _LAYERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LayerParameter) + )) +_sym_db.RegisterMessage(LayerParameter) + +UpsampleParameter = _reflection.GeneratedProtocolMessageType('UpsampleParameter', (_message.Message,), dict( + DESCRIPTOR = _UPSAMPLEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.UpsampleParameter) + )) +_sym_db.RegisterMessage(UpsampleParameter) + +MatMulParameter = _reflection.GeneratedProtocolMessageType('MatMulParameter', (_message.Message,), dict( + DESCRIPTOR = _MATMULPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MatMulParameter) + )) +_sym_db.RegisterMessage(MatMulParameter) + +PassThroughParameter = _reflection.GeneratedProtocolMessageType('PassThroughParameter', (_message.Message,), dict( + DESCRIPTOR = _PASSTHROUGHPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PassThroughParameter) + )) +_sym_db.RegisterMessage(PassThroughParameter) + +NormalizeParameter = _reflection.GeneratedProtocolMessageType('NormalizeParameter', (_message.Message,), dict( + DESCRIPTOR = _NORMALIZEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NormalizeParameter) + )) +_sym_db.RegisterMessage(NormalizeParameter) + +AnnotatedDataParameter = _reflection.GeneratedProtocolMessageType('AnnotatedDataParameter', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATEDDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AnnotatedDataParameter) + )) +_sym_db.RegisterMessage(AnnotatedDataParameter) + +AsdnDataParameter = _reflection.GeneratedProtocolMessageType('AsdnDataParameter', (_message.Message,), dict( + DESCRIPTOR = _ASDNDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AsdnDataParameter) + )) +_sym_db.RegisterMessage(AsdnDataParameter) + +MTCNNDataParameter = _reflection.GeneratedProtocolMessageType('MTCNNDataParameter', (_message.Message,), dict( + DESCRIPTOR = _MTCNNDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MTCNNDataParameter) + )) +_sym_db.RegisterMessage(MTCNNDataParameter) + +InterpParameter = _reflection.GeneratedProtocolMessageType('InterpParameter', (_message.Message,), dict( + DESCRIPTOR = _INTERPPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.InterpParameter) + )) +_sym_db.RegisterMessage(InterpParameter) + +PSROIPoolingParameter = _reflection.GeneratedProtocolMessageType('PSROIPoolingParameter', (_message.Message,), dict( + DESCRIPTOR = _PSROIPOOLINGPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PSROIPoolingParameter) + )) +_sym_db.RegisterMessage(PSROIPoolingParameter) + +FlipParameter = _reflection.GeneratedProtocolMessageType('FlipParameter', (_message.Message,), dict( + DESCRIPTOR = _FLIPPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.FlipParameter) + )) +_sym_db.RegisterMessage(FlipParameter) + +BNParameter = _reflection.GeneratedProtocolMessageType('BNParameter', (_message.Message,), dict( + DESCRIPTOR = _BNPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BNParameter) + )) +_sym_db.RegisterMessage(BNParameter) + +FocalLossParameter = _reflection.GeneratedProtocolMessageType('FocalLossParameter', (_message.Message,), dict( + DESCRIPTOR = _FOCALLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.FocalLossParameter) + )) +_sym_db.RegisterMessage(FocalLossParameter) + +TransformationParameter = _reflection.GeneratedProtocolMessageType('TransformationParameter', (_message.Message,), dict( + DESCRIPTOR = _TRANSFORMATIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.TransformationParameter) + )) +_sym_db.RegisterMessage(TransformationParameter) + +ResizeParameter = _reflection.GeneratedProtocolMessageType('ResizeParameter', (_message.Message,), dict( + DESCRIPTOR = _RESIZEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ResizeParameter) + )) +_sym_db.RegisterMessage(ResizeParameter) + +SaltPepperParameter = _reflection.GeneratedProtocolMessageType('SaltPepperParameter', (_message.Message,), dict( + DESCRIPTOR = _SALTPEPPERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SaltPepperParameter) + )) +_sym_db.RegisterMessage(SaltPepperParameter) + +NoiseParameter = _reflection.GeneratedProtocolMessageType('NoiseParameter', (_message.Message,), dict( + DESCRIPTOR = _NOISEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NoiseParameter) + )) +_sym_db.RegisterMessage(NoiseParameter) + +DistortionParameter = _reflection.GeneratedProtocolMessageType('DistortionParameter', (_message.Message,), dict( + DESCRIPTOR = _DISTORTIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DistortionParameter) + )) +_sym_db.RegisterMessage(DistortionParameter) + +ExpansionParameter = _reflection.GeneratedProtocolMessageType('ExpansionParameter', (_message.Message,), dict( + DESCRIPTOR = _EXPANSIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ExpansionParameter) + )) +_sym_db.RegisterMessage(ExpansionParameter) + +LossParameter = _reflection.GeneratedProtocolMessageType('LossParameter', (_message.Message,), dict( + DESCRIPTOR = _LOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LossParameter) + )) +_sym_db.RegisterMessage(LossParameter) + +AccuracyParameter = _reflection.GeneratedProtocolMessageType('AccuracyParameter', (_message.Message,), dict( + DESCRIPTOR = _ACCURACYPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AccuracyParameter) + )) +_sym_db.RegisterMessage(AccuracyParameter) + +ArgMaxParameter = _reflection.GeneratedProtocolMessageType('ArgMaxParameter', (_message.Message,), dict( + DESCRIPTOR = _ARGMAXPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ArgMaxParameter) + )) +_sym_db.RegisterMessage(ArgMaxParameter) + +ConcatParameter = _reflection.GeneratedProtocolMessageType('ConcatParameter', (_message.Message,), dict( + DESCRIPTOR = _CONCATPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ConcatParameter) + )) +_sym_db.RegisterMessage(ConcatParameter) + +BatchNormParameter = _reflection.GeneratedProtocolMessageType('BatchNormParameter', (_message.Message,), dict( + DESCRIPTOR = _BATCHNORMPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BatchNormParameter) + )) +_sym_db.RegisterMessage(BatchNormParameter) + +BiasParameter = _reflection.GeneratedProtocolMessageType('BiasParameter', (_message.Message,), dict( + DESCRIPTOR = _BIASPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.BiasParameter) + )) +_sym_db.RegisterMessage(BiasParameter) + +ContrastiveLossParameter = _reflection.GeneratedProtocolMessageType('ContrastiveLossParameter', (_message.Message,), dict( + DESCRIPTOR = _CONTRASTIVELOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ContrastiveLossParameter) + )) +_sym_db.RegisterMessage(ContrastiveLossParameter) + +DetectionLossParameter = _reflection.GeneratedProtocolMessageType('DetectionLossParameter', (_message.Message,), dict( + DESCRIPTOR = _DETECTIONLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DetectionLossParameter) + )) +_sym_db.RegisterMessage(DetectionLossParameter) + +RegionLossParameter = _reflection.GeneratedProtocolMessageType('RegionLossParameter', (_message.Message,), dict( + DESCRIPTOR = _REGIONLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.RegionLossParameter) + )) +_sym_db.RegisterMessage(RegionLossParameter) + +ReorgParameter = _reflection.GeneratedProtocolMessageType('ReorgParameter', (_message.Message,), dict( + DESCRIPTOR = _REORGPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ReorgParameter) + )) +_sym_db.RegisterMessage(ReorgParameter) + +EvalDetectionParameter = _reflection.GeneratedProtocolMessageType('EvalDetectionParameter', (_message.Message,), dict( + DESCRIPTOR = _EVALDETECTIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.EvalDetectionParameter) + )) +_sym_db.RegisterMessage(EvalDetectionParameter) + +ConvolutionParameter = _reflection.GeneratedProtocolMessageType('ConvolutionParameter', (_message.Message,), dict( + DESCRIPTOR = _CONVOLUTIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ConvolutionParameter) + )) +_sym_db.RegisterMessage(ConvolutionParameter) + +CropParameter = _reflection.GeneratedProtocolMessageType('CropParameter', (_message.Message,), dict( + DESCRIPTOR = _CROPPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.CropParameter) + )) +_sym_db.RegisterMessage(CropParameter) + +DataParameter = _reflection.GeneratedProtocolMessageType('DataParameter', (_message.Message,), dict( + DESCRIPTOR = _DATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DataParameter) + )) +_sym_db.RegisterMessage(DataParameter) + +DetectionEvaluateParameter = _reflection.GeneratedProtocolMessageType('DetectionEvaluateParameter', (_message.Message,), dict( + DESCRIPTOR = _DETECTIONEVALUATEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DetectionEvaluateParameter) + )) +_sym_db.RegisterMessage(DetectionEvaluateParameter) + +NonMaximumSuppressionParameter = _reflection.GeneratedProtocolMessageType('NonMaximumSuppressionParameter', (_message.Message,), dict( + DESCRIPTOR = _NONMAXIMUMSUPPRESSIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.NonMaximumSuppressionParameter) + )) +_sym_db.RegisterMessage(NonMaximumSuppressionParameter) + +SaveOutputParameter = _reflection.GeneratedProtocolMessageType('SaveOutputParameter', (_message.Message,), dict( + DESCRIPTOR = _SAVEOUTPUTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SaveOutputParameter) + )) +_sym_db.RegisterMessage(SaveOutputParameter) + +DetectionOutputParameter = _reflection.GeneratedProtocolMessageType('DetectionOutputParameter', (_message.Message,), dict( + DESCRIPTOR = _DETECTIONOUTPUTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DetectionOutputParameter) + )) +_sym_db.RegisterMessage(DetectionOutputParameter) + +DropoutParameter = _reflection.GeneratedProtocolMessageType('DropoutParameter', (_message.Message,), dict( + DESCRIPTOR = _DROPOUTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DropoutParameter) + )) +_sym_db.RegisterMessage(DropoutParameter) + +DummyDataParameter = _reflection.GeneratedProtocolMessageType('DummyDataParameter', (_message.Message,), dict( + DESCRIPTOR = _DUMMYDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DummyDataParameter) + )) +_sym_db.RegisterMessage(DummyDataParameter) + +EltwiseParameter = _reflection.GeneratedProtocolMessageType('EltwiseParameter', (_message.Message,), dict( + DESCRIPTOR = _ELTWISEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.EltwiseParameter) + )) +_sym_db.RegisterMessage(EltwiseParameter) + +ELUParameter = _reflection.GeneratedProtocolMessageType('ELUParameter', (_message.Message,), dict( + DESCRIPTOR = _ELUPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ELUParameter) + )) +_sym_db.RegisterMessage(ELUParameter) + +EmbedParameter = _reflection.GeneratedProtocolMessageType('EmbedParameter', (_message.Message,), dict( + DESCRIPTOR = _EMBEDPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.EmbedParameter) + )) +_sym_db.RegisterMessage(EmbedParameter) + +ExpParameter = _reflection.GeneratedProtocolMessageType('ExpParameter', (_message.Message,), dict( + DESCRIPTOR = _EXPPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ExpParameter) + )) +_sym_db.RegisterMessage(ExpParameter) + +FlattenParameter = _reflection.GeneratedProtocolMessageType('FlattenParameter', (_message.Message,), dict( + DESCRIPTOR = _FLATTENPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.FlattenParameter) + )) +_sym_db.RegisterMessage(FlattenParameter) + +HDF5DataParameter = _reflection.GeneratedProtocolMessageType('HDF5DataParameter', (_message.Message,), dict( + DESCRIPTOR = _HDF5DATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.HDF5DataParameter) + )) +_sym_db.RegisterMessage(HDF5DataParameter) + +HDF5OutputParameter = _reflection.GeneratedProtocolMessageType('HDF5OutputParameter', (_message.Message,), dict( + DESCRIPTOR = _HDF5OUTPUTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.HDF5OutputParameter) + )) +_sym_db.RegisterMessage(HDF5OutputParameter) + +HingeLossParameter = _reflection.GeneratedProtocolMessageType('HingeLossParameter', (_message.Message,), dict( + DESCRIPTOR = _HINGELOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.HingeLossParameter) + )) +_sym_db.RegisterMessage(HingeLossParameter) + +ImageDataParameter = _reflection.GeneratedProtocolMessageType('ImageDataParameter', (_message.Message,), dict( + DESCRIPTOR = _IMAGEDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ImageDataParameter) + )) +_sym_db.RegisterMessage(ImageDataParameter) + +InfogainLossParameter = _reflection.GeneratedProtocolMessageType('InfogainLossParameter', (_message.Message,), dict( + DESCRIPTOR = _INFOGAINLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.InfogainLossParameter) + )) +_sym_db.RegisterMessage(InfogainLossParameter) + +InnerProductParameter = _reflection.GeneratedProtocolMessageType('InnerProductParameter', (_message.Message,), dict( + DESCRIPTOR = _INNERPRODUCTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.InnerProductParameter) + )) +_sym_db.RegisterMessage(InnerProductParameter) + +InputParameter = _reflection.GeneratedProtocolMessageType('InputParameter', (_message.Message,), dict( + DESCRIPTOR = _INPUTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.InputParameter) + )) +_sym_db.RegisterMessage(InputParameter) + +LogParameter = _reflection.GeneratedProtocolMessageType('LogParameter', (_message.Message,), dict( + DESCRIPTOR = _LOGPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LogParameter) + )) +_sym_db.RegisterMessage(LogParameter) + +LRNParameter = _reflection.GeneratedProtocolMessageType('LRNParameter', (_message.Message,), dict( + DESCRIPTOR = _LRNPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LRNParameter) + )) +_sym_db.RegisterMessage(LRNParameter) + +MemoryDataParameter = _reflection.GeneratedProtocolMessageType('MemoryDataParameter', (_message.Message,), dict( + DESCRIPTOR = _MEMORYDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MemoryDataParameter) + )) +_sym_db.RegisterMessage(MemoryDataParameter) + +MultiBoxLossParameter = _reflection.GeneratedProtocolMessageType('MultiBoxLossParameter', (_message.Message,), dict( + DESCRIPTOR = _MULTIBOXLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MultiBoxLossParameter) + )) +_sym_db.RegisterMessage(MultiBoxLossParameter) + +PermuteParameter = _reflection.GeneratedProtocolMessageType('PermuteParameter', (_message.Message,), dict( + DESCRIPTOR = _PERMUTEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PermuteParameter) + )) +_sym_db.RegisterMessage(PermuteParameter) + +MVNParameter = _reflection.GeneratedProtocolMessageType('MVNParameter', (_message.Message,), dict( + DESCRIPTOR = _MVNPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MVNParameter) + )) +_sym_db.RegisterMessage(MVNParameter) + +ParameterParameter = _reflection.GeneratedProtocolMessageType('ParameterParameter', (_message.Message,), dict( + DESCRIPTOR = _PARAMETERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ParameterParameter) + )) +_sym_db.RegisterMessage(ParameterParameter) + +PoolingParameter = _reflection.GeneratedProtocolMessageType('PoolingParameter', (_message.Message,), dict( + DESCRIPTOR = _POOLINGPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PoolingParameter) + )) +_sym_db.RegisterMessage(PoolingParameter) + +PowerParameter = _reflection.GeneratedProtocolMessageType('PowerParameter', (_message.Message,), dict( + DESCRIPTOR = _POWERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PowerParameter) + )) +_sym_db.RegisterMessage(PowerParameter) + +PriorBoxParameter = _reflection.GeneratedProtocolMessageType('PriorBoxParameter', (_message.Message,), dict( + DESCRIPTOR = _PRIORBOXPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PriorBoxParameter) + )) +_sym_db.RegisterMessage(PriorBoxParameter) + +PythonParameter = _reflection.GeneratedProtocolMessageType('PythonParameter', (_message.Message,), dict( + DESCRIPTOR = _PYTHONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PythonParameter) + )) +_sym_db.RegisterMessage(PythonParameter) + +RecurrentParameter = _reflection.GeneratedProtocolMessageType('RecurrentParameter', (_message.Message,), dict( + DESCRIPTOR = _RECURRENTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.RecurrentParameter) + )) +_sym_db.RegisterMessage(RecurrentParameter) + +ReductionParameter = _reflection.GeneratedProtocolMessageType('ReductionParameter', (_message.Message,), dict( + DESCRIPTOR = _REDUCTIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ReductionParameter) + )) +_sym_db.RegisterMessage(ReductionParameter) + +ReLUParameter = _reflection.GeneratedProtocolMessageType('ReLUParameter', (_message.Message,), dict( + DESCRIPTOR = _RELUPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ReLUParameter) + )) +_sym_db.RegisterMessage(ReLUParameter) + +ReshapeParameter = _reflection.GeneratedProtocolMessageType('ReshapeParameter', (_message.Message,), dict( + DESCRIPTOR = _RESHAPEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ReshapeParameter) + )) +_sym_db.RegisterMessage(ReshapeParameter) + +ROIPoolingParameter = _reflection.GeneratedProtocolMessageType('ROIPoolingParameter', (_message.Message,), dict( + DESCRIPTOR = _ROIPOOLINGPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ROIPoolingParameter) + )) +_sym_db.RegisterMessage(ROIPoolingParameter) + +ScaleParameter = _reflection.GeneratedProtocolMessageType('ScaleParameter', (_message.Message,), dict( + DESCRIPTOR = _SCALEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ScaleParameter) + )) +_sym_db.RegisterMessage(ScaleParameter) + +SigmoidParameter = _reflection.GeneratedProtocolMessageType('SigmoidParameter', (_message.Message,), dict( + DESCRIPTOR = _SIGMOIDPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SigmoidParameter) + )) +_sym_db.RegisterMessage(SigmoidParameter) + +SmoothL1LossParameter = _reflection.GeneratedProtocolMessageType('SmoothL1LossParameter', (_message.Message,), dict( + DESCRIPTOR = _SMOOTHL1LOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SmoothL1LossParameter) + )) +_sym_db.RegisterMessage(SmoothL1LossParameter) + +SliceParameter = _reflection.GeneratedProtocolMessageType('SliceParameter', (_message.Message,), dict( + DESCRIPTOR = _SLICEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SliceParameter) + )) +_sym_db.RegisterMessage(SliceParameter) + +SoftmaxParameter = _reflection.GeneratedProtocolMessageType('SoftmaxParameter', (_message.Message,), dict( + DESCRIPTOR = _SOFTMAXPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SoftmaxParameter) + )) +_sym_db.RegisterMessage(SoftmaxParameter) + +TanHParameter = _reflection.GeneratedProtocolMessageType('TanHParameter', (_message.Message,), dict( + DESCRIPTOR = _TANHPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.TanHParameter) + )) +_sym_db.RegisterMessage(TanHParameter) + +TileParameter = _reflection.GeneratedProtocolMessageType('TileParameter', (_message.Message,), dict( + DESCRIPTOR = _TILEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.TileParameter) + )) +_sym_db.RegisterMessage(TileParameter) + +ThresholdParameter = _reflection.GeneratedProtocolMessageType('ThresholdParameter', (_message.Message,), dict( + DESCRIPTOR = _THRESHOLDPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ThresholdParameter) + )) +_sym_db.RegisterMessage(ThresholdParameter) + +WindowDataParameter = _reflection.GeneratedProtocolMessageType('WindowDataParameter', (_message.Message,), dict( + DESCRIPTOR = _WINDOWDATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.WindowDataParameter) + )) +_sym_db.RegisterMessage(WindowDataParameter) + +SPPParameter = _reflection.GeneratedProtocolMessageType('SPPParameter', (_message.Message,), dict( + DESCRIPTOR = _SPPPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.SPPParameter) + )) +_sym_db.RegisterMessage(SPPParameter) + +V1LayerParameter = _reflection.GeneratedProtocolMessageType('V1LayerParameter', (_message.Message,), dict( + DESCRIPTOR = _V1LAYERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.V1LayerParameter) + )) +_sym_db.RegisterMessage(V1LayerParameter) + +V0LayerParameter = _reflection.GeneratedProtocolMessageType('V0LayerParameter', (_message.Message,), dict( + DESCRIPTOR = _V0LAYERPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.V0LayerParameter) + )) +_sym_db.RegisterMessage(V0LayerParameter) + +PReLUParameter = _reflection.GeneratedProtocolMessageType('PReLUParameter', (_message.Message,), dict( + DESCRIPTOR = _PRELUPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.PReLUParameter) + )) +_sym_db.RegisterMessage(PReLUParameter) + +RPNParameter = _reflection.GeneratedProtocolMessageType('RPNParameter', (_message.Message,), dict( + DESCRIPTOR = _RPNPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.RPNParameter) + )) +_sym_db.RegisterMessage(RPNParameter) + +VideoDataParameter = _reflection.GeneratedProtocolMessageType('VideoDataParameter', (_message.Message,), dict( + DESCRIPTOR = _VIDEODATAPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.VideoDataParameter) + )) +_sym_db.RegisterMessage(VideoDataParameter) + +CenterLossParameter = _reflection.GeneratedProtocolMessageType('CenterLossParameter', (_message.Message,), dict( + DESCRIPTOR = _CENTERLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.CenterLossParameter) + )) +_sym_db.RegisterMessage(CenterLossParameter) + +MarginInnerProductParameter = _reflection.GeneratedProtocolMessageType('MarginInnerProductParameter', (_message.Message,), dict( + DESCRIPTOR = _MARGININNERPRODUCTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.MarginInnerProductParameter) + )) +_sym_db.RegisterMessage(MarginInnerProductParameter) + +AdditiveMarginInnerProductParameter = _reflection.GeneratedProtocolMessageType('AdditiveMarginInnerProductParameter', (_message.Message,), dict( + DESCRIPTOR = _ADDITIVEMARGININNERPRODUCTPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.AdditiveMarginInnerProductParameter) + )) +_sym_db.RegisterMessage(AdditiveMarginInnerProductParameter) + +DeformableConvolutionParameter = _reflection.GeneratedProtocolMessageType('DeformableConvolutionParameter', (_message.Message,), dict( + DESCRIPTOR = _DEFORMABLECONVOLUTIONPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.DeformableConvolutionParameter) + )) +_sym_db.RegisterMessage(DeformableConvolutionParameter) + +LabelSpecificAddParameter = _reflection.GeneratedProtocolMessageType('LabelSpecificAddParameter', (_message.Message,), dict( + DESCRIPTOR = _LABELSPECIFICADDPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.LabelSpecificAddParameter) + )) +_sym_db.RegisterMessage(LabelSpecificAddParameter) + +ChannelScaleParameter = _reflection.GeneratedProtocolMessageType('ChannelScaleParameter', (_message.Message,), dict( + DESCRIPTOR = _CHANNELSCALEPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ChannelScaleParameter) + )) +_sym_db.RegisterMessage(ChannelScaleParameter) + +CosinAddmParameter = _reflection.GeneratedProtocolMessageType('CosinAddmParameter', (_message.Message,), dict( + DESCRIPTOR = _COSINADDMPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.CosinAddmParameter) + )) +_sym_db.RegisterMessage(CosinAddmParameter) + +CosinMulmParameter = _reflection.GeneratedProtocolMessageType('CosinMulmParameter', (_message.Message,), dict( + DESCRIPTOR = _COSINMULMPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.CosinMulmParameter) + )) +_sym_db.RegisterMessage(CosinMulmParameter) + +CoupledClusterLossParameter = _reflection.GeneratedProtocolMessageType('CoupledClusterLossParameter', (_message.Message,), dict( + DESCRIPTOR = _COUPLEDCLUSTERLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.CoupledClusterLossParameter) + )) +_sym_db.RegisterMessage(CoupledClusterLossParameter) + +TripletLossParameter = _reflection.GeneratedProtocolMessageType('TripletLossParameter', (_message.Message,), dict( + DESCRIPTOR = _TRIPLETLOSSPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.TripletLossParameter) + )) +_sym_db.RegisterMessage(TripletLossParameter) + +GeneralTripletParameter = _reflection.GeneratedProtocolMessageType('GeneralTripletParameter', (_message.Message,), dict( + DESCRIPTOR = _GENERALTRIPLETPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.GeneralTripletParameter) + )) +_sym_db.RegisterMessage(GeneralTripletParameter) + +ROIAlignParameter = _reflection.GeneratedProtocolMessageType('ROIAlignParameter', (_message.Message,), dict( + DESCRIPTOR = _ROIALIGNPARAMETER, + __module__ = 'caffe_pb2' + # @@protoc_insertion_point(class_scope:caffe.ROIAlignParameter) + )) +_sym_db.RegisterMessage(ROIAlignParameter) + + +_BLOBSHAPE.fields_by_name['dim']._options = None +_BLOBPROTO.fields_by_name['data']._options = None +_BLOBPROTO.fields_by_name['diff']._options = None +_BLOBPROTO.fields_by_name['double_data']._options = None +_BLOBPROTO.fields_by_name['double_diff']._options = None +# @@protoc_insertion_point(module_scope) diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/layer_param.py b/thirdparty/fast-reid/tools/deploy/Caffe/layer_param.py new file mode 100644 index 0000000000000000000000000000000000000000..1e13a2426a9043321a9f20e15113a2177de9f66e --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/layer_param.py @@ -0,0 +1,158 @@ +from __future__ import absolute_import +from . import caffe_pb2 as pb +import numpy as np + +def pair_process(item,strict_one=True): + if hasattr(item,'__iter__'): + for i in item: + if i!=item[0]: + if strict_one: + raise ValueError("number in item {} must be the same".format(item)) + else: + print("IMPORTANT WARNING: number in item {} must be the same".format(item)) + return item[0] + return item + +def pair_reduce(item): + if hasattr(item,'__iter__'): + for i in item: + if i!=item[0]: + return item + return [item[0]] + return [item] + +class Layer_param(): + def __init__(self,name='',type='',top=(),bottom=()): + self.param=pb.LayerParameter() + self.name=self.param.name=name + self.type=self.param.type=type + + self.top=self.param.top + self.top.extend(top) + self.bottom=self.param.bottom + self.bottom.extend(bottom) + + def fc_param(self, num_output, weight_filler='xavier', bias_filler='constant',has_bias=True): + if self.type != 'InnerProduct': + raise TypeError('the layer type must be InnerProduct if you want set fc param') + fc_param = pb.InnerProductParameter() + fc_param.num_output = num_output + fc_param.weight_filler.type = weight_filler + fc_param.bias_term = has_bias + if has_bias: + fc_param.bias_filler.type = bias_filler + self.param.inner_product_param.CopyFrom(fc_param) + + def conv_param(self, num_output, kernel_size, stride=(1), pad=(0,), + weight_filler_type='xavier', bias_filler_type='constant', + bias_term=True, dilation=None,groups=None): + """ + add a conv_param layer if you spec the layer type "Convolution" + Args: + num_output: a int + kernel_size: int list + stride: a int list + weight_filler_type: the weight filer type + bias_filler_type: the bias filler type + Returns: + """ + if self.type not in ['Convolution','Deconvolution']: + raise TypeError('the layer type must be Convolution or Deconvolution if you want set conv param') + conv_param=pb.ConvolutionParameter() + conv_param.num_output=num_output + conv_param.kernel_size.extend(pair_reduce(kernel_size)) + conv_param.stride.extend(pair_reduce(stride)) + conv_param.pad.extend(pair_reduce(pad)) + conv_param.bias_term=bias_term + conv_param.weight_filler.type=weight_filler_type + if bias_term: + conv_param.bias_filler.type = bias_filler_type + if dilation: + conv_param.dilation.extend(pair_reduce(dilation)) + if groups: + conv_param.group=groups + self.param.convolution_param.CopyFrom(conv_param) + + def pool_param(self,type='MAX',kernel_size=2,stride=2,pad=None, ceil_mode = False): + pool_param=pb.PoolingParameter() + pool_param.pool=pool_param.PoolMethod.Value(type) + pool_param.kernel_size=pair_process(kernel_size) + pool_param.stride=pair_process(stride) + pool_param.ceil_mode=ceil_mode + if pad: + if isinstance(pad,tuple): + pool_param.pad_h = pad[0] + pool_param.pad_w = pad[1] + else: + pool_param.pad=pad + self.param.pooling_param.CopyFrom(pool_param) + + def batch_norm_param(self,use_global_stats=0,moving_average_fraction=None,eps=None): + bn_param=pb.BatchNormParameter() + bn_param.use_global_stats=use_global_stats + if moving_average_fraction: + bn_param.moving_average_fraction=moving_average_fraction + if eps: + bn_param.eps = eps + self.param.batch_norm_param.CopyFrom(bn_param) + + # layer + # { + # name: "upsample_layer" + # type: "Upsample" + # bottom: "some_input_feature_map" + # bottom: "some_input_pool_index" + # top: "some_output" + # upsample_param { + # upsample_h: 224 + # upsample_w: 224 + # } + # } + def upsample_param(self,size=None, scale_factor=None): + upsample_param=pb.UpsampleParameter() + if scale_factor: + if isinstance(scale_factor,int): + upsample_param.scale = scale_factor + else: + upsample_param.scale_h = scale_factor[0] + upsample_param.scale_w = scale_factor[1] + + if size: + if isinstance(size,int): + upsample_param.upsample_h = size + else: + upsample_param.upsample_h = size[0] + upsample_param.upsample_w = size[1] + #upsample_param.upsample_h = size[0] * scale_factor + #upsample_param.upsample_w = size[1] * scale_factor + self.param.upsample_param.CopyFrom(upsample_param) + def interp_param(self,size=None, scale_factor=None): + interp_param=pb.InterpParameter() + if scale_factor: + if isinstance(scale_factor,int): + interp_param.zoom_factor = scale_factor + + if size: + print('size:', size) + interp_param.height = size[0] + interp_param.width = size[1] + self.param.interp_param.CopyFrom(interp_param) + + def add_data(self,*args): + """Args are data numpy array + """ + del self.param.blobs[:] + for data in args: + new_blob = self.param.blobs.add() + for dim in data.shape: + new_blob.shape.dim.append(dim) + new_blob.data.extend(data.flatten().astype(float)) + + def set_params_by_dict(self,dic): + pass + + def copy_from(self,layer_param): + pass + +def set_enum(param,key,value): + setattr(param,key,param.Value(value)) diff --git a/thirdparty/fast-reid/tools/deploy/Caffe/net.py b/thirdparty/fast-reid/tools/deploy/Caffe/net.py new file mode 100644 index 0000000000000000000000000000000000000000..d2291f5768a53fd1cd8e5ab8112191582895451e --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/Caffe/net.py @@ -0,0 +1 @@ +raise ImportError,'the nn_tools.Caffe.net is no longer used, please use nn_tools.Caffe.caffe_net' \ No newline at end of file diff --git a/thirdparty/fast-reid/tools/deploy/README.md b/thirdparty/fast-reid/tools/deploy/README.md new file mode 100644 index 0000000000000000000000000000000000000000..81db7807fd5c04f38187afc389561c957b856d3a --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/README.md @@ -0,0 +1,160 @@ +# Model Deployment + +This directory contains: + +1. The scripts that convert a fastreid model to Caffe/ONNX/TRT format. + +2. The exmpales that load a R50 baseline model in Caffe/ONNX/TRT and run inference. + +## Tutorial + +### Caffe Convert + +
+step-to-step pipeline for caffe convert + +This is a tiny example for converting fastreid-baseline in `meta_arch` to Caffe model, if you want to convert more complex architecture, you need to customize more things. + +1. Run `caffe_export.py` to get the converted Caffe model, + + ```bash + python caffe_export.py --config-file root-path/market1501/bagtricks_R50/config.yml --name "baseline_R50" --output outputs/caffe_model --opts MODEL.WEIGHTS root-path/logs/market1501/bagtricks_R50/model_final.pth + ``` + + then you can check the Caffe model and prototxt in `outputs/caffe_model`. + +2. Change `prototxt` following next three steps: + + 1) Edit `max_pooling` in `baseline_R50.prototxt` like this + + ```prototxt + layer { + name: "max_pool1" + type: "Pooling" + bottom: "relu_blob1" + top: "max_pool_blob1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + pad: 0 # 1 + # ceil_mode: false + } + } + ``` + + 2) Add `avg_pooling` right place in `baseline_R50.prototxt` + + ```prototxt + layer { + name: "avgpool1" + type: "Pooling" + bottom: "relu_blob49" + top: "avgpool_blob1" + pooling_param { + pool: AVE + global_pooling: true + } + } + ``` + + 3) Change the last layer `top` name to `output` + + ```prototxt + layer { + name: "bn_scale54" + type: "Scale" + bottom: "batch_norm_blob54" + top: "output" # bn_norm_blob54 + scale_param { + bias_term: true + } + } + ``` + +3. (optional) You can open [Netscope](https://ethereon.github.io/netscope/quickstart.html), then enter you network `prototxt` to visualize the network. + +4. Run `caffe_inference.py` to save Caffe model features with input images + + ```bash + python caffe_inference.py --model-def outputs/caffe_model/baseline_R50.prototxt \ + --model-weights outputs/caffe_model/baseline_R50.caffemodel \ + --input test_data/*.jpg --output caffe_output + ``` + +5. Run `demo/demo.py` to get fastreid model features with the same input images, then verify that Caffe and PyTorch are computing the same value for the network. + + ```python + np.testing.assert_allclose(torch_out, ort_out, rtol=1e-3, atol=1e-6) + ``` + +
+ +### ONNX Convert + +
+step-to-step pipeline for onnx convert + +This is a tiny example for converting fastreid-baseline in `meta_arch` to ONNX model. ONNX supports most operators in pytorch as far as I know and if some operators are not supported by ONNX, you need to customize these. + +1. Run `onnx_export.py` to get the converted ONNX model, + + ```bash + python onnx_export.py --config-file root-path/bagtricks_R50/config.yml --name "baseline_R50" --output outputs/onnx_model --opts MODEL.WEIGHTS root-path/logs/market1501/bagtricks_R50/model_final.pth + ``` + + then you can check the ONNX model in `outputs/onnx_model`. + +2. (optional) You can use [Netron](https://github.com/lutzroeder/netron) to visualize the network. + +3. Run `onnx_inference.py` to save ONNX model features with input images + + ```bash + python onnx_inference.py --model-path outputs/onnx_model/baseline_R50.onnx \ + --input test_data/*.jpg --output onnx_output + ``` + +4. Run `demo/demo.py` to get fastreid model features with the same input images, then verify that ONNX Runtime and PyTorch are computing the same value for the network. + + ```python + np.testing.assert_allclose(torch_out, ort_out, rtol=1e-3, atol=1e-6) + ``` + +
+ +### TensorRT Convert + +
+step-to-step pipeline for trt convert + +This is a tiny example for converting fastreid-baseline in `meta_arch` to TRT model. We use [tiny-tensorrt](https://github.com/zerollzeng/tiny-tensorrt), which is a simple and easy-to-use nvidia TensorRT warpper, to get the model converted to tensorRT. + +First you need to convert the pytorch model to ONNX format following [ONNX Convert](https://github.com/JDAI-CV/fast-reid/tree/master/tools/deploy#onnx-convert), and you need to remember your `output` name. Then you can convert ONNX model to TensorRT following instructions below. + +1. Run command line below to get the converted TRT model from ONNX model, + + ```bash + + python trt_export.py --name "baseline_R50" --output outputs/trt_model --onnx-model outputs/onnx_model/baseline.onnx --heighi 256 --width 128 + ``` + + then you can check the TRT model in `outputs/trt_model`. + +2. Run `trt_inference.py` to save TRT model features with input images + + ```bash + python onnx_inference.py --model-path outputs/trt_model/baseline.engine \ + --input test_data/*.jpg --output trt_output --output-name trt_model_outputname + ``` + +3. Run `demo/demo.py` to get fastreid model features with the same input images, then verify that TensorRT and PyTorch are computing the same value for the network. + + ```python + np.testing.assert_allclose(torch_out, ort_out, rtol=1e-3, atol=1e-6) + ``` + +
+ +## Acknowledgements + +Thank to [CPFLAME](https://github.com/CPFLAME), [gcong18](https://github.com/gcong18), [YuxiangJohn](https://github.com/YuxiangJohn) and [wiggin66](https://github.com/wiggin66) at JDAI Model Acceleration Group for help in PyTorch model converting. diff --git a/thirdparty/fast-reid/tools/deploy/caffe_export.py b/thirdparty/fast-reid/tools/deploy/caffe_export.py new file mode 100644 index 0000000000000000000000000000000000000000..e6651b75ede2fbacefc3ab1185f6abf30900b33a --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/caffe_export.py @@ -0,0 +1,78 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import argparse + +import torch +import sys +sys.path.append('../../') + +import pytorch_to_caffe +from fastreid.config import get_cfg +from fastreid.modeling.meta_arch import build_model +from fastreid.utils.file_io import PathManager +from fastreid.utils.checkpoint import Checkpointer +from fastreid.utils.logger import setup_logger + +logger = setup_logger(name='caffe_export') + + +def setup_cfg(args): + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Convert Pytorch to Caffe model") + + parser.add_argument( + "--config-file", + metavar="FILE", + help="path to config file", + ) + parser.add_argument( + "--name", + default="baseline", + help="name for converted model" + ) + parser.add_argument( + "--output", + default='caffe_model', + help='path to save converted caffe model' + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +if __name__ == '__main__': + args = get_parser().parse_args() + cfg = setup_cfg(args) + + cfg.defrost() + cfg.MODEL.BACKBONE.PRETRAIN = False + cfg.MODEL.HEADS.POOL_LAYER = "identity" + cfg.MODEL.BACKBONE.WITH_NL = False + + model = build_model(cfg) + Checkpointer(model).load(cfg.MODEL.WEIGHTS) + model.eval() + logger.info(model) + + inputs = torch.randn(1, 3, cfg.INPUT.SIZE_TEST[0], cfg.INPUT.SIZE_TEST[1]).to(torch.device(cfg.MODEL.DEVICE)) + PathManager.mkdirs(args.output) + pytorch_to_caffe.trans_net(model, inputs, args.name) + pytorch_to_caffe.save_prototxt(f"{args.output}/{args.name}.prototxt") + pytorch_to_caffe.save_caffemodel(f"{args.output}/{args.name}.caffemodel") + + logger.info(f"Export caffe model in {args.output} sucessfully!") diff --git a/thirdparty/fast-reid/tools/deploy/caffe_inference.py b/thirdparty/fast-reid/tools/deploy/caffe_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..2956816814d1129f98198f0e5f3560f33d6cd169 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/caffe_inference.py @@ -0,0 +1,95 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import caffe +import tqdm +import glob +import os +import cv2 +import numpy as np + +caffe.set_mode_gpu() + +import argparse + + +def get_parser(): + parser = argparse.ArgumentParser(description="Caffe model inference") + + parser.add_argument( + "--model-def", + default="logs/test_caffe/baseline_R50.prototxt", + help="caffe model prototxt" + ) + parser.add_argument( + "--model-weights", + default="logs/test_caffe/baseline_R50.caffemodel", + help="caffe model weights" + ) + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + default='caffe_output', + help='path to save converted caffe model' + ) + parser.add_argument( + "--height", + type=int, + default=256, + help="height of image" + ) + parser.add_argument( + "--width", + type=int, + default=128, + help="width of image" + ) + return parser + + +def preprocess(image_path, image_height, image_width): + original_image = cv2.imread(image_path) + # the model expects RGB inputs + original_image = original_image[:, :, ::-1] + + # Apply pre-processing to image. + image = cv2.resize(original_image, (image_width, image_height), interpolation=cv2.INTER_CUBIC) + image = image.astype("float32").transpose(2, 0, 1)[np.newaxis] # (1, 3, h, w) + image = (image - np.array([0.485 * 255, 0.456 * 255, 0.406 * 255]).reshape((1, -1, 1, 1))) / np.array( + [0.229 * 255, 0.224 * 255, 0.225 * 255]).reshape((1, -1, 1, 1)) + return image + + +def normalize(nparray, order=2, axis=-1): + """Normalize a N-D numpy array along the specified axis.""" + norm = np.linalg.norm(nparray, ord=order, axis=axis, keepdims=True) + return nparray / (norm + np.finfo(np.float32).eps) + + +if __name__ == "__main__": + args = get_parser().parse_args() + + net = caffe.Net(args.model_def, args.model_weights, caffe.TEST) + net.blobs['blob1'].reshape(1, 3, args.height, args.width) + + if not os.path.exists(args.output): os.makedirs(args.output) + + if args.input: + if os.path.isdir(args.input[0]): + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in tqdm.tqdm(args.input): + image = preprocess(path, args.height, args.width) + net.blobs['blob1'].data[...] = image + feat = net.forward()['output'] + feat = normalize(feat[..., 0, 0], axis=1) + np.save(os.path.join(args.output, path.replace('.jpg', '.npy').split('/')[-1]), feat) + diff --git a/thirdparty/fast-reid/tools/deploy/onnx_export.py b/thirdparty/fast-reid/tools/deploy/onnx_export.py new file mode 100644 index 0000000000000000000000000000000000000000..449db3a6b0e427435c9d832d381c8071f4f5426c --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/onnx_export.py @@ -0,0 +1,146 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import argparse +import io +import sys + +import onnx +import torch +from onnxsim import simplify +from torch.onnx import OperatorExportTypes + +sys.path.append('../../') + +from fastreid.config import get_cfg +from fastreid.modeling.meta_arch import build_model +from fastreid.utils.file_io import PathManager +from fastreid.utils.checkpoint import Checkpointer +from fastreid.utils.logger import setup_logger + +logger = setup_logger(name='onnx_export') + + +def setup_cfg(args): + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Convert Pytorch to ONNX model") + + parser.add_argument( + "--config-file", + metavar="FILE", + help="path to config file", + ) + parser.add_argument( + "--name", + default="baseline", + help="name for converted model" + ) + parser.add_argument( + "--output", + default='onnx_model', + help='path to save converted onnx model' + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +def remove_initializer_from_input(model): + if model.ir_version < 4: + print( + 'Model with ir_version below 4 requires to include initilizer in graph input' + ) + return + + inputs = model.graph.input + name_to_input = {} + for input in inputs: + name_to_input[input.name] = input + + for initializer in model.graph.initializer: + if initializer.name in name_to_input: + inputs.remove(name_to_input[initializer.name]) + + return model + + +def export_onnx_model(model, inputs): + """ + Trace and export a model to onnx format. + Args: + model (nn.Module): + inputs (torch.Tensor): the model will be called by `model(*inputs)` + Returns: + an onnx model + """ + assert isinstance(model, torch.nn.Module) + + # make sure all modules are in eval mode, onnx may change the training state + # of the module if the states are not consistent + def _check_eval(module): + assert not module.training + + model.apply(_check_eval) + + # Export the model to ONNX + with torch.no_grad(): + with io.BytesIO() as f: + torch.onnx.export( + model, + inputs, + f, + operator_export_type=OperatorExportTypes.ONNX_ATEN_FALLBACK, + # verbose=True, # NOTE: uncomment this for debugging + # export_params=True, + ) + onnx_model = onnx.load_from_string(f.getvalue()) + + # Apply ONNX's Optimization + all_passes = onnx.optimizer.get_available_passes() + passes = ["extract_constant_to_initializer", "eliminate_unused_initializer", "fuse_bn_into_conv"] + assert all(p in all_passes for p in passes) + onnx_model = onnx.optimizer.optimize(onnx_model, passes) + return onnx_model + + +if __name__ == '__main__': + args = get_parser().parse_args() + cfg = setup_cfg(args) + + cfg.defrost() + cfg.MODEL.BACKBONE.PRETRAIN = False + if cfg.MODEL.HEADS.POOL_LAYER == 'fastavgpool': + cfg.MODEL.HEADS.POOL_LAYER = 'avgpool' + model = build_model(cfg) + Checkpointer(model).load(cfg.MODEL.WEIGHTS) + model.eval() + logger.info(model) + + inputs = torch.randn(1, 3, cfg.INPUT.SIZE_TEST[0], cfg.INPUT.SIZE_TEST[1]) + onnx_model = export_onnx_model(model, inputs) + + model_simp, check = simplify(onnx_model) + + model_simp = remove_initializer_from_input(model_simp) + + assert check, "Simplified ONNX model could not be validated" + + PathManager.mkdirs(args.output) + + onnx.save_model(model_simp, f"{args.output}/{args.name}.onnx") + + logger.info(f"Export onnx model in {args.output} successfully!") diff --git a/thirdparty/fast-reid/tools/deploy/onnx_inference.py b/thirdparty/fast-reid/tools/deploy/onnx_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..2e29f620b1fb6f87d66fdd28624d96370d7807eb --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/onnx_inference.py @@ -0,0 +1,85 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import argparse +import glob +import os + +import cv2 +import numpy as np +import onnxruntime +import tqdm + + +def get_parser(): + parser = argparse.ArgumentParser(description="onnx model inference") + + parser.add_argument( + "--model-path", + default="onnx_model/baseline.onnx", + help="onnx model path" + ) + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + default='onnx_output', + help='path to save converted caffe model' + ) + parser.add_argument( + "--height", + type=int, + default=256, + help="height of image" + ) + parser.add_argument( + "--width", + type=int, + default=128, + help="width of image" + ) + return parser + + +def preprocess(image_path, image_height, image_width): + original_image = cv2.imread(image_path) + # the model expects RGB inputs + original_image = original_image[:, :, ::-1] + + # Apply pre-processing to image. + img = cv2.resize(original_image, (image_width, image_height), interpolation=cv2.INTER_CUBIC) + img = img.astype("float32").transpose(2, 0, 1)[np.newaxis] # (1, 3, h, w) + return img + + +def normalize(nparray, order=2, axis=-1): + """Normalize a N-D numpy array along the specified axis.""" + norm = np.linalg.norm(nparray, ord=order, axis=axis, keepdims=True) + return nparray / (norm + np.finfo(np.float32).eps) + + +if __name__ == "__main__": + args = get_parser().parse_args() + + ort_sess = onnxruntime.InferenceSession(args.model_path) + + input_name = ort_sess.get_inputs()[0].name + + if not os.path.exists(args.output): os.makedirs(args.output) + + if args.input: + if os.path.isdir(args.input[0]): + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in tqdm.tqdm(args.input): + image = preprocess(path, args.height, args.width) + feat = ort_sess.run(None, {input_name: image})[0] + feat = normalize(feat, axis=1) + np.save(os.path.join(args.output, path.replace('.jpg', '.npy').split('/')[-1]), feat) diff --git a/thirdparty/fast-reid/tools/deploy/pytorch_to_caffe.py b/thirdparty/fast-reid/tools/deploy/pytorch_to_caffe.py new file mode 100644 index 0000000000000000000000000000000000000000..9a1aea976d28a8fe85fcd0575299ca7dbfb95bdb --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/pytorch_to_caffe.py @@ -0,0 +1,747 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import torch +import torch.nn as nn +import traceback +from Caffe import caffe_net +import torch.nn.functional as F +from torch.autograd import Variable +from Caffe import layer_param +from torch.nn.modules.utils import _pair +import numpy as np +import math +from torch.nn.modules.utils import _list_with_default + +""" +How to support a new layer type: + layer_name=log.add_layer(layer_type_name) + top_blobs=log.add_blobs() + layer=caffe_net.Layer_param(xxx) + + [] + log.cnet.add_layer(layer) + +Please MUTE the inplace operations to avoid not find in graph +""" + + +# TODO: support the inplace output of the layers + +class Blob_LOG(): + def __init__(self): + self.data = {} + + def __setitem__(self, key, value): + self.data[key] = value + + def __getitem__(self, key): + return self.data[key] + + def __len__(self): + return len(self.data) + + +NET_INITTED = False + + +# 转换原理解析:通过记录 +class TransLog(object): + def __init__(self): + """ + doing init() with inputs Variable before using it + """ + self.layers = {} + self.detail_layers = {} + self.detail_blobs = {} + self._blobs = Blob_LOG() + self._blobs_data = [] + self.cnet = caffe_net.Caffemodel('') + self.debug = True + + def init(self, inputs): + """ + :param inputs: is a list of input variables + """ + self.add_blobs(inputs) + + def add_layer(self, name='layer'): + if name in self.layers: + return self.layers[name] + if name not in self.detail_layers.keys(): + self.detail_layers[name] = 0 + self.detail_layers[name] += 1 + name = '{}{}'.format(name, self.detail_layers[name]) + self.layers[name] = name + if self.debug: + print("{} was added to layers".format(self.layers[name])) + return self.layers[name] + + def add_blobs(self, blobs, name='blob', with_num=True): + rst = [] + for blob in blobs: + self._blobs_data.append(blob) # to block the memory address be rewrited + blob_id = int(id(blob)) + if name not in self.detail_blobs.keys(): + self.detail_blobs[name] = 0 + self.detail_blobs[name] += 1 + if with_num: + rst.append('{}{}'.format(name, self.detail_blobs[name])) + else: + rst.append('{}'.format(name)) + if self.debug: + print("{}:{} was added to blobs".format(blob_id, rst[-1])) + print('Add blob {} : {}'.format(rst[-1].center(21), blob.size())) + self._blobs[blob_id] = rst[-1] + return rst + + def blobs(self, var): + var = id(var) + if self.debug: + print("{}:{} getting".format(var, self._blobs[var])) + try: + return self._blobs[var] + except: + print("WARNING: CANNOT FOUND blob {}".format(var)) + return None + + +log = TransLog() + +layer_names = {} + + +def _conv2d(raw, input, weight, bias=None, stride=1, padding=0, dilation=1, groups=1): + x = raw(input, weight, bias, stride, padding, dilation, groups) + name = log.add_layer(name='conv') + log.add_blobs([x], name='conv_blob') + layer = caffe_net.Layer_param(name=name, type='Convolution', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + layer.conv_param(x.size()[1], weight.size()[2:], stride=_pair(stride), + pad=_pair(padding), dilation=_pair(dilation), bias_term=bias is not None, groups=groups) + if bias is not None: + layer.add_data(weight.cpu().data.numpy(), bias.cpu().data.numpy()) + #print('conv2d weight, bias: ',weight.cpu().data.numpy(), bias.cpu().data.numpy()) + + else: + layer.param.convolution_param.bias_term = False + layer.add_data(weight.cpu().data.numpy()) + log.cnet.add_layer(layer) + return x + + +def _conv_transpose2d(raw, input, weight, bias=None, stride=1, padding=0, output_padding=0, groups=1, dilation=1): + x = raw(input, weight, bias, stride, padding, output_padding, groups, dilation) + name = log.add_layer(name='conv_transpose') + log.add_blobs([x], name='conv_transpose_blob') + layer = caffe_net.Layer_param(name=name, type='Deconvolution', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + layer.conv_param(x.size()[1], weight.size()[2:], stride=_pair(stride), + pad=_pair(padding), dilation=_pair(dilation), bias_term=bias is not None) + if bias is not None: + layer.add_data(weight.cpu().data.numpy(), bias.cpu().data.numpy()) + else: + layer.param.convolution_param.bias_term = False + layer.add_data(weight.cpu().data.numpy()) + log.cnet.add_layer(layer) + return x + + +def _linear(raw, input, weight, bias=None): + x = raw(input, weight, bias) + layer_name = log.add_layer(name='fc') + top_blobs = log.add_blobs([x], name='fc_blob') + layer = caffe_net.Layer_param(name=layer_name, type='InnerProduct', + bottom=[log.blobs(input)], top=top_blobs) + layer.fc_param(x.size()[1], has_bias=bias is not None) + if bias is not None: + layer.add_data(weight.cpu().data.numpy(), bias.cpu().data.numpy()) + else: + layer.add_data(weight.cpu().data.numpy()) + log.cnet.add_layer(layer) + return x + + +def _split(raw, tensor, split_size, dim=0): + # split in pytorch is slice in caffe + x = raw(tensor, split_size, dim) + layer_name = log.add_layer('split') + top_blobs = log.add_blobs(x, name='split_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Slice', + bottom=[log.blobs(tensor)], top=top_blobs) + slice_num = int(np.floor(tensor.size()[dim] / split_size)) + slice_param = caffe_net.pb.SliceParameter(axis=dim, slice_point=[split_size * i for i in range(1, slice_num)]) + layer.param.slice_param.CopyFrom(slice_param) + log.cnet.add_layer(layer) + return x + + +def _pool(type, raw, input, x, kernel_size, stride, padding, ceil_mode): + # TODO dilation,ceil_mode,return indices + layer_name = log.add_layer(name='{}_pool'.format(type)) + top_blobs = log.add_blobs([x], name='{}_pool_blob'.format(type)) + layer = caffe_net.Layer_param(name=layer_name, type='Pooling', bottom=[log.blobs(input)], top=top_blobs) + + # TODO w,h different kernel, stride and padding + # processing ceil mode + layer.pool_param(kernel_size=kernel_size, stride=kernel_size if stride is None else stride, + pad=padding, type=type.upper()) + log.cnet.add_layer(layer) + if ceil_mode == False and stride is not None: + oheight = (input.size()[2] - _pair(kernel_size)[0] + 2 * _pair(padding)[0]) % (_pair(stride)[0]) + owidth = (input.size()[3] - _pair(kernel_size)[1] + 2 * _pair(padding)[1]) % (_pair(stride)[1]) + if oheight != 0 or owidth != 0: + caffe_out = raw(input, kernel_size, stride, padding, ceil_mode=False) + print("WARNING: the output shape miss match at {}: " + + "input {} output---Pytorch:{}---Caffe:{}\n" + "This is caused by the different implementation that ceil mode in caffe and the floor mode in pytorch.\n" + "You can add the clip layer in caffe prototxt manually if shape mismatch error is caused in caffe. ".format( + layer_name, input.size(), x.size(), caffe_out.size())) + + +def _max_pool2d(raw, input, kernel_size, stride=None, padding=0, dilation=1, + ceil_mode=False, return_indices=False): + x = raw(input, kernel_size, stride, padding, dilation, ceil_mode, return_indices) + _pool('max', raw, input, x, kernel_size, stride, padding, ceil_mode) + return x + + +def _avg_pool2d(raw, input, kernel_size, stride=None, padding=0, ceil_mode=False, count_include_pad=True): + x = raw(input, kernel_size, stride, padding, ceil_mode, count_include_pad) + _pool('ave', raw, input, x, kernel_size, stride, padding, ceil_mode) + return x + + +def _max(raw, *args): + x = raw(*args) + if len(args) == 1: + # TODO max in one tensor + assert NotImplementedError + else: + bottom_blobs = [] + for arg in args: + bottom_blobs.append(log.blobs(arg)) + layer_name = log.add_layer(name='max') + top_blobs = log.add_blobs([x], name='max_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=bottom_blobs, top=top_blobs) + layer.param.eltwise_param.operation = 2 + log.cnet.add_layer(layer) + return x + + +def _cat(raw, inputs, dimension=0): + x = raw(inputs, dimension) + bottom_blobs = [] + for input in inputs: + bottom_blobs.append(log.blobs(input)) + layer_name = log.add_layer(name='cat') + top_blobs = log.add_blobs([x], name='cat_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Concat', + bottom=bottom_blobs, top=top_blobs) + layer.param.concat_param.axis = dimension + log.cnet.add_layer(layer) + return x + + +def _dropout(raw, input, p=0.5, training=False, inplace=False): + x = raw(input, p, training, inplace) + bottom_blobs = [log.blobs(input)] + layer_name = log.add_layer(name='dropout') + top_blobs = log.add_blobs([x], name=bottom_blobs[0], with_num=False) + layer = caffe_net.Layer_param(name=layer_name, type='Dropout', + bottom=bottom_blobs, top=top_blobs) + layer.param.dropout_param.dropout_ratio = p + layer.param.include.extend([caffe_net.pb.NetStateRule(phase=0)]) # 1 for test, 0 for train + log.cnet.add_layer(layer) + return x + + +def _threshold(raw, input, threshold, value, inplace=False): + # for threshold or relu + if threshold == 0 and value == 0: + x = raw(input, threshold, value, inplace) + bottom_blobs = [log.blobs(input)] + name = log.add_layer(name='relu') + log.add_blobs([x], name='relu_blob') + layer = caffe_net.Layer_param(name=name, type='ReLU', + bottom=bottom_blobs, top=[log.blobs(x)]) + log.cnet.add_layer(layer) + return x + if value != 0: + raise NotImplemented("value !=0 not implemented in caffe") + x = raw(input, input, threshold, value, inplace) + bottom_blobs = [log.blobs(input)] + layer_name = log.add_layer(name='threshold') + top_blobs = log.add_blobs([x], name='threshold_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Threshold', + bottom=bottom_blobs, top=top_blobs) + layer.param.threshold_param.threshold = threshold + log.cnet.add_layer(layer) + return x + + +def _relu(raw, input, inplace=False): + # for threshold or prelu + x = raw(input, False) + name = log.add_layer(name='relu') + log.add_blobs([x], name='relu_blob') + layer = caffe_net.Layer_param(name=name, type='ReLU', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + log.cnet.add_layer(layer) + return x + + +def _prelu(raw, input, weight): + # for threshold or prelu + x = raw(input, weight) + bottom_blobs = [log.blobs(input)] + name = log.add_layer(name='prelu') + log.add_blobs([x], name='prelu_blob') + layer = caffe_net.Layer_param(name=name, type='PReLU', + bottom=bottom_blobs, top=[log.blobs(x)]) + if weight.size()[0] == 1: + layer.param.prelu_param.channel_shared = True + layer.add_data(weight.cpu().data.numpy()[0]) + else: + layer.add_data(weight.cpu().data.numpy()) + log.cnet.add_layer(layer) + return x + + +def _leaky_relu(raw, input, negative_slope=0.01, inplace=False): + x = raw(input, negative_slope) + name = log.add_layer(name='leaky_relu') + log.add_blobs([x], name='leaky_relu_blob') + layer = caffe_net.Layer_param(name=name, type='ReLU', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + layer.param.relu_param.negative_slope = negative_slope + log.cnet.add_layer(layer) + return x + + +def _tanh(raw, input): + # for tanh activation + x = raw(input) + name = log.add_layer(name='tanh') + log.add_blobs([x], name='tanh_blob') + layer = caffe_net.Layer_param(name=name, type='TanH', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + log.cnet.add_layer(layer) + return x + + +def _softmax(raw, input, dim=None, _stacklevel=3): + # for F.softmax + x = raw(input, dim=dim) + if dim is None: + dim = F._get_softmax_dim('softmax', input.dim(), _stacklevel) + bottom_blobs = [log.blobs(input)] + name = log.add_layer(name='softmax') + log.add_blobs([x], name='softmax_blob') + layer = caffe_net.Layer_param(name=name, type='Softmax', + bottom=bottom_blobs, top=[log.blobs(x)]) + layer.param.softmax_param.axis = dim + log.cnet.add_layer(layer) + return x + + +def _sigmoid(raw, input): + # for tanh activation + x = raw(input) + name = log.add_layer(name='Sigmoid') + log.add_blobs([x], name='Sigmoid_blob') + layer = caffe_net.Layer_param(name=name, type='Sigmoid', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + log.cnet.add_layer(layer) + return x + + +def _batch_norm(raw, input, running_mean, running_var, weight=None, bias=None, + training=False, momentum=0.1, eps=1e-5): + # because the runing_mean and runing_var will be changed after the _batch_norm operation, we first save the parameters + + x = raw(input, running_mean, running_var, weight, bias, + training, momentum, eps) + bottom_blobs = [log.blobs(input)] + layer_name1 = log.add_layer(name='batch_norm') + top_blobs = log.add_blobs([x], name='batch_norm_blob') + layer1 = caffe_net.Layer_param(name=layer_name1, type='BatchNorm', + bottom=bottom_blobs, top=top_blobs) + if running_mean is None or running_var is None: + # not use global_stats, normalization is performed over the current mini-batch + layer1.batch_norm_param(use_global_stats=0, eps=eps) + else: + layer1.batch_norm_param(use_global_stats=1, eps=eps) + running_mean_clone = running_mean.clone() + running_var_clone = running_var.clone() + layer1.add_data(running_mean_clone.cpu().numpy(), running_var_clone.cpu().numpy(), np.array([1.0])) + #print('running_mean: ',running_mean_clone.cpu().numpy()) + #print('running_var: ',running_var_clone.cpu().numpy()) + log.cnet.add_layer(layer1) + if weight is not None and bias is not None: + layer_name2 = log.add_layer(name='bn_scale') + layer2 = caffe_net.Layer_param(name=layer_name2, type='Scale', + bottom=top_blobs, top=top_blobs) + layer2.param.scale_param.bias_term = True + layer2.add_data(weight.cpu().data.numpy(), bias.cpu().data.numpy()) + log.cnet.add_layer(layer2) + #print('scale weight: ', weight.cpu().data.numpy()) + #print('scale bias: ', bias.cpu().data.numpy()) + return x + + +def _instance_norm(raw, input, running_mean=None, running_var=None, weight=None, + bias=None, use_input_stats=True, momentum=0.1, eps=1e-5): + # TODO: the batch size!=1 view operations + print("WARNING: The Instance Normalization transfers to Caffe using BatchNorm, so the batch size should be 1") + if running_var is not None or weight is not None: + # TODO: the affine=True or track_running_stats=True case + raise NotImplementedError("not implement the affine=True or track_running_stats=True case InstanceNorm") + x = torch.batch_norm( + input, weight, bias, running_mean, running_var, + use_input_stats, momentum, eps, torch.backends.cudnn.enabled) + bottom_blobs = [log.blobs(input)] + layer_name1 = log.add_layer(name='instance_norm') + top_blobs = log.add_blobs([x], name='instance_norm_blob') + layer1 = caffe_net.Layer_param(name=layer_name1, type='BatchNorm', + bottom=bottom_blobs, top=top_blobs) + if running_mean is None or running_var is None: + # not use global_stats, normalization is performed over the current mini-batch + layer1.batch_norm_param(use_global_stats=0, eps=eps) + running_mean = torch.zeros(input.size()[1]) + running_var = torch.ones(input.size()[1]) + else: + layer1.batch_norm_param(use_global_stats=1, eps=eps) + running_mean_clone = running_mean.clone() + running_var_clone = running_var.clone() + layer1.add_data(running_mean_clone.cpu().numpy(), running_var_clone.cpu().numpy(), np.array([1.0])) + log.cnet.add_layer(layer1) + if weight is not None and bias is not None: + layer_name2 = log.add_layer(name='bn_scale') + layer2 = caffe_net.Layer_param(name=layer_name2, type='Scale', + bottom=top_blobs, top=top_blobs) + layer2.param.scale_param.bias_term = True + layer2.add_data(weight.cpu().data.numpy(), bias.cpu().data.numpy()) + log.cnet.add_layer(layer2) + return x + + +# upsample layer +def _interpolate(raw, input, size=None, scale_factor=None, mode='nearest', align_corners=None): + # 定义的参数包括 scale,即输出与输入的尺寸比例,如 2;scale_h、scale_w, + # 同 scale,分别为 h、w 方向上的尺寸比例;pad_out_h、pad_out_w,仅在 scale 为 2 时 + # 有用,对输出进行额外 padding 在 h、w 方向上的数值;upsample_h、upsample_w,输 + # 出图像尺寸的数值。在 Upsample 的相关代码中,推荐仅仅使用 upsample_h、 + # upsample_w 准确定义 Upsample 层的输出尺寸,其他所有的参数都不推荐继续使用。 + ''' + if mode == 'bilinear': + x = raw(input, size, scale_factor, mode) + name = log.add_layer(name='conv_transpose') + log.add_blobs([x], name='conv_transpose_blob') + layer = caffe_net.Layer_param(name=name, type='Deconvolution', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + print('Deconv: ', name) + print(input.shape) + print(x.size()) + print(size) + factor = float(size[0]) / input.shape[2] + C = x.size()[1] + print(factor,C) + kernel_size = int(2 * factor - factor % 2) + stride = int(factor) + num_output = C + group = C + pad = math.ceil((factor-1) / 2.) + print('kernel_size, stride, num_output, group, pad') + print(kernel_size, stride, num_output, group, pad) + layer.conv_param(num_output, kernel_size, stride=stride, + pad=pad, weight_filler_type='bilinear', bias_term=False, groups=group) + + layer.param.convolution_param.bias_term = False + log.cnet.add_layer(layer) + return x + ''' + # transfer bilinear align_corners=True to caffe-interp + if mode == "bilinear" and align_corners == True: + x = raw(input, size, scale_factor, mode) + name = log.add_layer(name='interp') + log.add_blobs([x], name='interp_blob') + layer = caffe_net.Layer_param(name=name, type='Interp', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + layer.interp_param(size=size, scale_factor=scale_factor) + log.cnet.add_layer(layer) + return x + + # for nearest _interpolate + if mode != "nearest" or align_corners != None: + raise NotImplementedError("not implement F.interpolate totoaly") + x = raw(input, size, scale_factor, mode) + layer_name = log.add_layer(name='upsample') + top_blobs = log.add_blobs([x], name='upsample_blob'.format(type)) + layer = caffe_net.Layer_param(name=layer_name, type='Upsample', + bottom=[log.blobs(input)], top=top_blobs) + #layer.upsample_param(size=(input.size(2), input.size(3)), scale_factor=scale_factor) + #layer.upsample_param(size=size, scale_factor=scale_factor) + layer.upsample_param(size=None, scale_factor=size[0]) + + log.cnet.add_layer(layer) + return x + + +# ----- for Variable operations -------- + +def _view(input, *args): + x = raw_view(input, *args) + if not NET_INITTED: + return x + layer_name = log.add_layer(name='view') + top_blobs = log.add_blobs([x], name='view_blob') + + # print('*'*60) + # print('input={}'.format(input)) + # print('layer_name={}'.format(layer_name)) + # print('top_blobs={}'.format(top_blobs)) + + layer = caffe_net.Layer_param(name=layer_name, type='Reshape', bottom=[log.blobs(input)], top=top_blobs) + # TODO: reshpae added to nn_tools layer + dims = list(args) + dims[0] = 0 # the first dim should be batch_size + layer.param.reshape_param.shape.CopyFrom(caffe_net.pb.BlobShape(dim=dims)) + log.cnet.add_layer(layer) + return x + + +def _mean(input, *args, **kwargs): + x = raw_mean(input, *args, **kwargs) + if not NET_INITTED: + return x + layer_name = log.add_layer(name='mean') + top_blobs = log.add_blobs([x], name='mean_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Reduction', + bottom=[log.blobs(input)], top=top_blobs) + if len(args) == 1: + dim = args[0] + elif 'dim' in kwargs: + dim = kwargs['dim'] + else: + raise NotImplementedError('mean operation must specify a dim') + layer.param.reduction_param.operation = 4 + layer.param.reduction_param.axis = dim + log.cnet.add_layer(layer) + return x + + +def _add(input, *args): + # check if add a const value + if isinstance(args[0], int): + print('value: ',args[0]) + x = raw__add__(input, *args) + #x = raw(input) + layer_name = log.add_layer(name='scale') + log.add_blobs([x], name='Scale_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Scale', + bottom=[log.blobs(input)], top=[log.blobs(x)]) + dim = x.shape[1] + layer.param.scale_param.bias_term = True + weight = np.ones(dim, dtype=np.float32) + bias = args[0] * np.ones(dim, dtype=np.float32) + layer.add_data(weight, bias) + log.cnet.add_layer(layer) + return x + # otherwise add a tensor + x = raw__add__(input, *args) + if not NET_INITTED: + return x + layer_name = log.add_layer(name='add') + top_blobs = log.add_blobs([x], name='add_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 1 # sum is 1 + log.cnet.add_layer(layer) + return x + + +def _iadd(input, *args): + x = raw__iadd__(input, *args) + if not NET_INITTED: + return x + x = x.clone() + layer_name = log.add_layer(name='add') + top_blobs = log.add_blobs([x], name='add_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 1 # sum is 1 + log.cnet.add_layer(layer) + return x + + +def _sub(input, *args): + x = raw__sub__(input, *args) + if not NET_INITTED: + return x + layer_name = log.add_layer(name='sub') + top_blobs = log.add_blobs([x], name='sub_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 1 # sum is 1 + layer.param.eltwise_param.coeff.extend([1., -1.]) + log.cnet.add_layer(layer) + return x + + +def _isub(input, *args): + x = raw__isub__(input, *args) + if not NET_INITTED: + return x + x = x.clone() + layer_name = log.add_layer(name='sub') + top_blobs = log.add_blobs([x], name='sub_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 1 # sum is 1 + log.cnet.add_layer(layer) + return x + + +def _mul(input, *args): + x = raw__sub__(input, *args) + if not NET_INITTED: + return x + layer_name = log.add_layer(name='mul') + top_blobs = log.add_blobs([x], name='mul_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 0 # product is 1 + log.cnet.add_layer(layer) + return x + + +def _imul(input, *args): + x = raw__isub__(input, *args) + if not NET_INITTED: + return x + x = x.clone() + layer_name = log.add_layer(name='mul') + top_blobs = log.add_blobs([x], name='mul_blob') + layer = caffe_net.Layer_param(name=layer_name, type='Eltwise', + bottom=[log.blobs(input), log.blobs(args[0])], top=top_blobs) + layer.param.eltwise_param.operation = 0 # product is 1 + layer.param.eltwise_param.coeff.extend([1., -1.]) + log.cnet.add_layer(layer) + return x + + +def _adaptive_avg_pool2d(raw, input, output_size): + _output_size = _list_with_default(output_size, input.size()) + x = raw(input, _output_size) + _pool('ave', raw, input, x, input.shape[2], input.shape[2], 0, False) + return x + + +# 核心组件,通过该类,实现对torch的function中的operators的输入,输出以及参数的读取 +class Rp(object): + def __init__(self, raw, replace, **kwargs): + # replace the raw function to replace function + self.obj = replace + self.raw = raw + + def __call__(self, *args, **kwargs): + if not NET_INITTED: + return self.raw(*args, **kwargs) + for stack in traceback.walk_stack(None): + if 'self' in stack[0].f_locals: + layer = stack[0].f_locals['self'] + if layer in layer_names: + log.pytorch_layer_name = layer_names[layer] + print(layer_names[layer]) + break + out = self.obj(self.raw, *args, **kwargs) + # if isinstance(out,Variable): + # out=[out] + return out + + +F.conv2d = Rp(F.conv2d, _conv2d) +F.linear = Rp(F.linear, _linear) +F.relu = Rp(F.relu, _relu) + +F.leaky_relu = Rp(F.leaky_relu, _leaky_relu) +F.max_pool2d = Rp(F.max_pool2d, _max_pool2d) +F.avg_pool2d = Rp(F.avg_pool2d, _avg_pool2d) +F.dropout = Rp(F.dropout, _dropout) +F.threshold = Rp(F.threshold, _threshold) +F.prelu = Rp(F.prelu, _prelu) +F.batch_norm = Rp(F.batch_norm, _batch_norm) +F.instance_norm = Rp(F.instance_norm, _instance_norm) +F.softmax = Rp(F.softmax, _softmax) +F.conv_transpose2d = Rp(F.conv_transpose2d, _conv_transpose2d) +F.interpolate = Rp(F.interpolate, _interpolate) +F.adaptive_avg_pool2d = Rp(F.adaptive_avg_pool2d, _adaptive_avg_pool2d) + +torch.split = Rp(torch.split, _split) +torch.max = Rp(torch.max, _max) +torch.cat = Rp(torch.cat, _cat) +torch.sigmoid = Rp(torch.sigmoid, _sigmoid) + +# TODO: other types of the view function +try: + raw_view = Variable.view + Variable.view = _view + raw_mean = Variable.mean + Variable.mean = _mean + raw__add__ = Variable.__add__ + Variable.__add__ = _add + raw__iadd__ = Variable.__iadd__ + Variable.__iadd__ = _iadd + raw__sub__ = Variable.__sub__ + Variable.__sub__ = _sub + raw__isub__ = Variable.__isub__ + Variable.__isub__ = _isub + raw__mul__ = Variable.__mul__ + Variable.__mul__ = _mul + raw__imul__ = Variable.__imul__ + Variable.__imul__ = _imul +except: + # for new version 0.4.0 and later version + for t in [torch.Tensor]: + raw_view = t.view + t.view = _view + raw_mean = t.mean + t.mean = _mean + raw__add__ = t.__add__ + t.__add__ = _add + raw__iadd__ = t.__iadd__ + t.__iadd__ = _iadd + raw__sub__ = t.__sub__ + t.__sub__ = _sub + raw__isub__ = t.__isub__ + t.__isub__ = _isub + raw__mul__ = t.__mul__ + t.__mul__ = _mul + raw__imul__ = t.__imul__ + t.__imul__ = _imul + + +def trans_net(net, input_var, name='TransferedPytorchModel'): + print('Starting Transform, This will take a while') + log.init([input_var]) + log.cnet.net.name = name + log.cnet.net.input.extend([log.blobs(input_var)]) + log.cnet.net.input_dim.extend(input_var.size()) + global NET_INITTED + NET_INITTED = True + for name, layer in net.named_modules(): + layer_names[layer] = name + print("torch ops name:", layer_names) + out = net.forward(input_var) + print('Transform Completed') + + +def save_prototxt(save_name): + log.cnet.save_prototxt(save_name) + + +def save_caffemodel(save_name): + log.cnet.save(save_name) diff --git a/thirdparty/fast-reid/tools/deploy/run_export.sh b/thirdparty/fast-reid/tools/deploy/run_export.sh new file mode 100644 index 0000000000000000000000000000000000000000..48f5c9bf3157aa4e400373f78cc6e5aee74c6de2 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/run_export.sh @@ -0,0 +1,4 @@ +python caffe_export.py --config-file /export/home/lxy/cvpalgo-fast-reid/logs/dukemtmc/R34/config.yaml \ +--name "baseline_R34" \ +--output logs/caffe_R34 \ +--opts MODEL.WEIGHTS /export/home/lxy/cvpalgo-fast-reid/logs/dukemtmc/R34/model_final.pth diff --git a/thirdparty/fast-reid/tools/deploy/test_data/0022_c6s1_002976_01.jpg b/thirdparty/fast-reid/tools/deploy/test_data/0022_c6s1_002976_01.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0c715d5bd329711c2f55f2122cee872c8f55c532 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/test_data/0022_c6s1_002976_01.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b9854c9592914f991d785531045f10d355a45b3ac3d4c423f74c78519635496 +size 2223 diff --git a/thirdparty/fast-reid/tools/deploy/test_data/0027_c2s2_091032_02.jpg b/thirdparty/fast-reid/tools/deploy/test_data/0027_c2s2_091032_02.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d4ad36df2bb89e65246952e2f1fb019790cf88b8 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/test_data/0027_c2s2_091032_02.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdedfc7eabdcddb505818e929804ce1d766b2e8241c06b5abe853c72e67f6dc5 +size 2137 diff --git a/thirdparty/fast-reid/tools/deploy/test_data/0032_c6s1_002851_01.jpg b/thirdparty/fast-reid/tools/deploy/test_data/0032_c6s1_002851_01.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c8f55c1f06af9991e9364f4f5848cbf5828cd673 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/test_data/0032_c6s1_002851_01.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e57a5a8609d8f2af0801705d9412d662e363258b26b373a216a785b5af9f3afa +size 2640 diff --git a/thirdparty/fast-reid/tools/deploy/test_data/0048_c1s1_005351_01.jpg b/thirdparty/fast-reid/tools/deploy/test_data/0048_c1s1_005351_01.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9b412848cfdf186327a261ef33f32b2c657ead20 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/test_data/0048_c1s1_005351_01.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d86981e26e4f85f91e89731b0d1f06301a6728a137959b6decef845738c41e79 +size 2149 diff --git a/thirdparty/fast-reid/tools/deploy/test_data/0065_c6s1_009501_02.jpg b/thirdparty/fast-reid/tools/deploy/test_data/0065_c6s1_009501_02.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3f84545e31eeba81a75a5656b33dcc8c7b839f07 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/test_data/0065_c6s1_009501_02.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95c7e9b1f73398f3e90feb5a76d1a91a5995ad0b53dac52e905bbb52c5a48cde +size 1890 diff --git a/thirdparty/fast-reid/tools/deploy/trt_export.py b/thirdparty/fast-reid/tools/deploy/trt_export.py new file mode 100644 index 0000000000000000000000000000000000000000..edc9f1e925d0605af6ab253cffeb55e915b63866 --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/trt_export.py @@ -0,0 +1,82 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" + +import argparse +import os +import numpy as np +import sys + +sys.path.append('../../') +sys.path.append("/export/home/lxy/runtimelib-tensorrt-tiny/build") + +import pytrt +from fastreid.utils.logger import setup_logger +from fastreid.utils.file_io import PathManager + + +logger = setup_logger(name='trt_export') + + +def get_parser(): + parser = argparse.ArgumentParser(description="Convert ONNX to TRT model") + + parser.add_argument( + "--name", + default="baseline", + help="name for converted model" + ) + parser.add_argument( + "--output", + default='outputs/trt_model', + help='path to save converted trt model' + ) + parser.add_argument( + "--onnx-model", + default='outputs/onnx_model/baseline.onnx', + help='path to onnx model' + ) + parser.add_argument( + "--height", + type=int, + default=256, + help="height of image" + ) + parser.add_argument( + "--width", + type=int, + default=128, + help="width of image" + ) + return parser + + +def export_trt_model(onnxModel, engineFile, input_numpy_array): + r""" + Export a model to trt format. + """ + + trt = pytrt.Trt() + + customOutput = [] + maxBatchSize = 1 + calibratorData = [] + mode = 2 + trt.CreateEngine(onnxModel, engineFile, customOutput, maxBatchSize, mode, calibratorData) + trt.DoInference(input_numpy_array) # slightly different from c++ + return 0 + + +if __name__ == '__main__': + args = get_parser().parse_args() + + inputs = np.zeros(shape=(32, args.height, args.width, 3)) + onnxModel = args.onnx_model + engineFile = os.path.join(args.output, args.name+'.engine') + + PathManager.mkdirs(args.output) + export_trt_model(onnxModel, engineFile, inputs) + + logger.info(f"Export trt model in {args.output} successfully!") diff --git a/thirdparty/fast-reid/tools/deploy/trt_inference.py b/thirdparty/fast-reid/tools/deploy/trt_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..0775364e4ae01959d42e05da91ee2d7afdf9acdb --- /dev/null +++ b/thirdparty/fast-reid/tools/deploy/trt_inference.py @@ -0,0 +1,99 @@ +# encoding: utf-8 +""" +@author: xingyu liao +@contact: sherlockliao01@gmail.com +""" +import argparse +import glob +import os +import sys + +import cv2 +import numpy as np +# import tqdm + +sys.path.append("/export/home/lxy/runtimelib-tensorrt-tiny/build") + +import pytrt + + +def get_parser(): + parser = argparse.ArgumentParser(description="trt model inference") + + parser.add_argument( + "--model-path", + default="outputs/trt_model/baseline.engine", + help="trt model path" + ) + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + default="trt_output", + help="path to save trt model inference results" + ) + parser.add_argument( + "--output-name", + help="tensorRT model output name" + ) + parser.add_argument( + "--height", + type=int, + default=256, + help="height of image" + ) + parser.add_argument( + "--width", + type=int, + default=128, + help="width of image" + ) + return parser + + +def preprocess(image_path, image_height, image_width): + original_image = cv2.imread(image_path) + # the model expects RGB inputs + original_image = original_image[:, :, ::-1] + + # Apply pre-processing to image. + img = cv2.resize(original_image, (image_width, image_height), interpolation=cv2.INTER_CUBIC) + img = img.astype("float32").transpose(2, 0, 1)[np.newaxis] # (1, 3, h, w) + return img + + +def normalize(nparray, order=2, axis=-1): + """Normalize a N-D numpy array along the specified axis.""" + norm = np.linalg.norm(nparray, ord=order, axis=axis, keepdims=True) + return nparray / (norm + np.finfo(np.float32).eps) + + +if __name__ == "__main__": + args = get_parser().parse_args() + + trt = pytrt.Trt() + + onnxModel = "" + engineFile = args.model_path + customOutput = [] + maxBatchSize = 1 + calibratorData = [] + mode = 2 + trt.CreateEngine(onnxModel, engineFile, customOutput, maxBatchSize, mode, calibratorData) + + if not os.path.exists(args.output): os.makedirs(args.output) + + if args.input: + if os.path.isdir(args.input[0]): + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in args.input: + input_numpy_array = preprocess(path, args.height, args.width) + trt.DoInference(input_numpy_array) + feat = trt.GetOutput(args.output_name) + feat = normalize(feat, axis=1) + np.save(os.path.join(args.output, path.replace('.jpg', '.npy').split('/')[-1]), feat) diff --git a/thirdparty/fast-reid/tools/train_net.py b/thirdparty/fast-reid/tools/train_net.py new file mode 100644 index 0000000000000000000000000000000000000000..1a0a3c7f979a822df988ffe0eba91e4973833a61 --- /dev/null +++ b/thirdparty/fast-reid/tools/train_net.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +@author: sherlock +@contact: sherlockliao01@gmail.com +""" + +import sys + +sys.path.append('.') + +from fastreid.config import get_cfg +from fastreid.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from fastreid.utils.checkpoint import Checkpointer + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + cfg.defrost() + cfg.MODEL.BACKBONE.PRETRAIN = False + model = DefaultTrainer.build_model(cfg) + + Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model + + res = DefaultTrainer.test(cfg, model) + return res + + trainer = DefaultTrainer(cfg) + + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/thirdparty/mmdetection/.dev_scripts/batch_test.py b/thirdparty/mmdetection/.dev_scripts/batch_test.py new file mode 100644 index 0000000000000000000000000000000000000000..bf8eacd220b712d8b4b24225a50ee6e0a357bb92 --- /dev/null +++ b/thirdparty/mmdetection/.dev_scripts/batch_test.py @@ -0,0 +1,212 @@ +""" +some instructions +1. Fill the models that needs to be checked in the modelzoo_dict +2. Arange the structure of the directory as follows, the script will find the + corresponding config itself: + model_dir/model_family/checkpoints + e.g.: models/faster_rcnn/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth + models/faster_rcnn/faster_rcnn_r101_fpn_1x_coco_20200130-047c8118.pth +3. Excute the batch_test.sh +""" + +import argparse +import json +import os +import subprocess + +import mmcv +import torch +from mmcv import Config, get_logger +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + +modelzoo_dict = { + 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py': { + 'bbox': 0.374 + }, + 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py': { + 'bbox': 0.382, + 'segm': 0.347 + }, + 'configs/rpn/rpn_r50_fpn_1x_coco.py': { + 'AR@1000': 0.582 + } +} + + +def parse_args(): + parser = argparse.ArgumentParser( + description='The script used for checking the correctness \ + of batch inference') + parser.add_argument('model_dir', help='directory of models') + parser.add_argument( + 'json_out', help='the output json records test information like mAP') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def check_finish(all_model_dict, result_file): + # check if all models are checked + tested_cfgs = [] + with open(result_file, 'r+') as f: + for line in f: + line = json.loads(line) + tested_cfgs.append(line['cfg']) + is_finish = True + for cfg in sorted(all_model_dict.keys()): + if cfg not in tested_cfgs: + return cfg + if is_finish: + with open(result_file, 'a+') as f: + f.write('finished\n') + + +def dump_dict(record_dict, json_out): + # dump result json dict + with open(json_out, 'a+') as f: + mmcv.dump(record_dict, f, file_format='json') + f.write('\n') + + +def main(): + args = parse_args() + # touch the output json if not exist + with open(args.json_out, 'a+'): + pass + # init distributed env first, since logger depends on the dist + # info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, backend='nccl') + rank, world_size = get_dist_info() + + logger = get_logger('root') + + # read info of checkpoints and config + result_dict = dict() + for model_family_dir in os.listdir(args.model_dir): + for model in os.listdir( + os.path.join(args.model_dir, model_family_dir)): + # cpt: rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth + # cfg: rpn_r50_fpn_1x_coco.py + cfg = model.split('.')[0][:-18] + '.py' + cfg_path = os.path.join('configs', model_family_dir, cfg) + assert os.path.isfile( + cfg_path), f'{cfg_path} is not valid config path' + cpt_path = os.path.join(args.model_dir, model_family_dir, model) + result_dict[cfg_path] = cpt_path + assert cfg_path in modelzoo_dict, f'please fill the ' \ + f'performance of cfg: {cfg_path}' + cfg = check_finish(result_dict, args.json_out) + cpt = result_dict[cfg] + try: + cfg_name = cfg + logger.info(f'evaluate {cfg}') + record = dict(cfg=cfg, cpt=cpt) + cfg = Config.fromfile(cfg) + # cfg.data.test.ann_file = 'data/val_0_10.json' + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + + # build the dataloader + samples_per_gpu = 2 # hack test with 2 image per gpu + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + model = build_detector( + cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + + checkpoint = load_checkpoint(model, cpt, map_location='cpu') + # old versions did not save class info in checkpoints, + # this walkaround is for backward compatibility + if 'CLASSES' in checkpoint['meta']: + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, 'tmp') + if rank == 0: + ref_mAP_dict = modelzoo_dict[cfg_name] + metrics = list(ref_mAP_dict.keys()) + metrics = [ + m if m != 'AR@1000' else 'proposal_fast' for m in metrics + ] + eval_results = dataset.evaluate(outputs, metrics) + print(eval_results) + for metric in metrics: + if metric == 'proposal_fast': + ref_metric = modelzoo_dict[cfg_name]['AR@1000'] + eval_metric = eval_results['AR@1000'] + else: + ref_metric = modelzoo_dict[cfg_name][metric] + eval_metric = eval_results[f'{metric}_mAP'] + if abs(ref_metric - eval_metric) > 0.003: + record['is_normal'] = False + dump_dict(record, args.json_out) + check_finish(result_dict, args.json_out) + except Exception as e: + logger.error(f'rank: {rank} test fail with error: {e}') + record['terminate'] = True + dump_dict(record, args.json_out) + check_finish(result_dict, args.json_out) + # hack there to throw some error to prevent hang out + subprocess.call('xxx') + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/.dev_scripts/batch_test.sh b/thirdparty/mmdetection/.dev_scripts/batch_test.sh new file mode 100644 index 0000000000000000000000000000000000000000..8da8fef1235de8895cecfe0fbb7dd5a830a77de3 --- /dev/null +++ b/thirdparty/mmdetection/.dev_scripts/batch_test.sh @@ -0,0 +1,19 @@ +export PYTHONPATH=${PWD} + +partition=$1 +model_dir=$2 +json_out=$3 +job_name=batch_test +gpus=8 +gpu_per_node=8 + +touch $json_out +lastLine=$(tail -n 1 $json_out) +while [ "$lastLine" != "finished" ] +do + srun -p ${partition} --gres=gpu:${gpu_per_node} -n${gpus} --ntasks-per-node=${gpu_per_node} \ + --job-name=${job_name} --kill-on-bad-exit=1 \ + python .dev_scripts/batch_test.py $model_dir $json_out --launcher='slurm' + lastLine=$(tail -n 1 $json_out) + echo $lastLine +done diff --git a/thirdparty/mmdetection/.dev_scripts/benchmark_filter.py b/thirdparty/mmdetection/.dev_scripts/benchmark_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..7a8b5c7be40ac023c757c2b47d33d52e72a73b8b --- /dev/null +++ b/thirdparty/mmdetection/.dev_scripts/benchmark_filter.py @@ -0,0 +1,132 @@ +import argparse +import os +import os.path as osp + +import mmcv + + +def parse_args(): + parser = argparse.ArgumentParser(description='Filter configs to train') + parser.add_argument( + '--basic-arch', + action='store_true', + help='to train models in basic arch') + parser.add_argument( + '--datasets', action='store_true', help='to train models in dataset') + parser.add_argument( + '--data-pipeline', + action='store_true', + help='to train models related to data pipeline, e.g. augmentations') + parser.add_argument( + '--nn-module', + action='store_true', + help='to train models related to neural network modules') + + args = parser.parse_args() + return args + + +basic_arch_root = [ + 'cascade_rcnn', 'double_heads', 'fcos', 'foveabox', 'free_anchor', + 'grid_rcnn', 'guided_anchoring', 'htc', 'libra_rcnn', 'atss', 'mask_rcnn', + 'ms_rcnn', 'nas_fpn', 'reppoints', 'retinanet', 'ssd', 'gn', 'ghm', 'fsaf', + 'point_rend', 'nas_fcos', 'pisa', 'dynamic_rcnn', 'gfl', 'sabl', 'paa', + 'yolo' +] + +datasets_root = ['wider_face', 'pascal_voc', 'cityscapes', 'mask_rcnn'] + +data_pipeline_root = [ + 'albu_example', 'instaboost', 'ssd', 'mask_rcnn', 'nas_fpn' +] + +nn_module_root = [ + 'carafe', 'dcn', 'empirical_attention', 'gcnet', 'gn+ws', 'hrnet', 'pafpn', + 'nas_fpn', 'regnet' +] + +benchmark_pool = [ + 'configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py', + 'configs/htc/htc_r50_fpn_1x_coco.py', + 'configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py', + 'configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py', + 'configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py', + 'configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x.py', + 'configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py', + 'configs/regnet/mask_rcnn_regnetx-3GF_fpn_1x_coco.py', + 'configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py', + 'configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py', + 'configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py', + 'configs/rpn/rpn_r50_fpn_1x_coco.py', + 'configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py', + 'configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py', + 'configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py', + 'configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py', + 'configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py', + 'configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py', + 'configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py', + 'configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py', + 'configs/ssd/ssd300_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py', + 'configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py', # noqa + 'configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py', + 'configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py', + 'configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py', + 'configs/fsaf/fsaf_r50_fpn_1x_coco.py', + 'configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py', + 'configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py', + 'configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py', + 'configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py', + 'configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py', + 'configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py', + 'configs/wider_face/ssd300_wider_face.py', + 'configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py', + 'configs/fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py', + 'configs/atss/atss_r50_fpn_1x_coco.py', + 'configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py', + 'configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py', + 'configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py', + 'configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py', + 'configs/pascal_voc/ssd300_voc0712.py', + 'configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py', + 'configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py', + 'configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py', + 'configs/gfl/gfl_r50_fpn_1x_coco.py', + 'configs/paa/paa_r50_fpn_1x_coco.py', + 'configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py', + 'configs/yolo/yolov3_d53_320_273e_coco.py' +] + + +def main(): + args = parse_args() + + benchmark_type = [] + if args.basic_arch: + benchmark_type += basic_arch_root + if args.datasets: + benchmark_type += datasets_root + if args.data_pipeline: + benchmark_type += data_pipeline_root + if args.nn_module: + benchmark_type += nn_module_root + + config_dpath = 'configs/' + benchmark_configs = [] + for cfg_root in benchmark_type: + cfg_dir = osp.join(config_dpath, cfg_root) + configs = os.scandir(cfg_dir) + for cfg in configs: + config_path = osp.join(cfg_dir, cfg.name) + if (config_path in benchmark_pool + and config_path not in benchmark_configs): + benchmark_configs.append(config_path) + + print(f'Totally found {len(benchmark_configs)} configs to benchmark') + config_dicts = dict(models=benchmark_configs) + mmcv.dump(config_dicts, 'regression_test_configs.json') + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/.dev_scripts/gather_models.py b/thirdparty/mmdetection/.dev_scripts/gather_models.py new file mode 100644 index 0000000000000000000000000000000000000000..6586d4fdef15124bdf386ed71e1191cf747bf2c6 --- /dev/null +++ b/thirdparty/mmdetection/.dev_scripts/gather_models.py @@ -0,0 +1,171 @@ +import argparse +import glob +import json +import os.path as osp +import shutil +import subprocess + +import mmcv +import torch + +# build schedule look-up table to automatically find the final model +SCHEDULES_LUT = { + '_1x_': 12, + '_2x_': 24, + '_20e_': 20, + '_3x_': 36, + '_4x_': 48, + '_24e_': 24, + '_6x_': 73 +} +RESULTS_LUT = ['bbox_mAP', 'segm_mAP'] + + +def process_checkpoint(in_file, out_file): + checkpoint = torch.load(in_file, map_location='cpu') + # remove optimizer for smaller file size + if 'optimizer' in checkpoint: + del checkpoint['optimizer'] + # if it is necessary to remove some sensitive data in checkpoint['meta'], + # add the code here. + torch.save(checkpoint, out_file) + sha = subprocess.check_output(['sha256sum', out_file]).decode() + final_file = out_file.rstrip('.pth') + '-{}.pth'.format(sha[:8]) + subprocess.Popen(['mv', out_file, final_file]) + return final_file + + +def get_final_epoch(config): + if config.find('grid_rcnn') != -1 and config.find('2x') != -1: + # grid_rcnn 2x trains 25 epochs + return 25 + + for schedule_name, epoch_num in SCHEDULES_LUT.items(): + if config.find(schedule_name) != -1: + return epoch_num + + +def get_final_results(log_json_path, epoch): + result_dict = dict() + with open(log_json_path, 'r') as f: + for line in f.readlines(): + log_line = json.loads(line) + if 'mode' not in log_line.keys(): + continue + + if log_line['mode'] == 'train' and log_line['epoch'] == epoch: + result_dict['memory'] = log_line['memory'] + + if log_line['mode'] == 'val' and log_line['epoch'] == epoch: + result_dict.update({ + key: log_line[key] + for key in RESULTS_LUT if key in log_line + }) + return result_dict + + +def parse_args(): + parser = argparse.ArgumentParser(description='Gather benchmarked models') + parser.add_argument( + 'root', + type=str, + help='root path of benchmarked models to be gathered') + parser.add_argument( + 'out', type=str, help='output path of gathered models to be stored') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + models_root = args.root + models_out = args.out + mmcv.mkdir_or_exist(models_out) + + # find all models in the root directory to be gathered + raw_configs = list(mmcv.scandir('./configs', '.py', recursive=True)) + + # filter configs that is not trained in the experiments dir + used_configs = [] + for raw_config in raw_configs: + if osp.exists(osp.join(models_root, raw_config)): + used_configs.append(raw_config) + print(f'Find {len(used_configs)} models to be gathered') + + # find final_ckpt and log file for trained each config + # and parse the best performance + model_infos = [] + for used_config in used_configs: + exp_dir = osp.join(models_root, used_config) + # check whether the exps is finished + final_epoch = get_final_epoch(used_config) + final_model = 'epoch_{}.pth'.format(final_epoch) + model_path = osp.join(exp_dir, final_model) + + # skip if the model is still training + if not osp.exists(model_path): + continue + + # get logs + log_json_path = glob.glob(osp.join(exp_dir, '*.log.json'))[0] + log_txt_path = glob.glob(osp.join(exp_dir, '*.log'))[0] + model_performance = get_final_results(log_json_path, final_epoch) + + if model_performance is None: + continue + + model_time = osp.split(log_txt_path)[-1].split('.')[0] + model_infos.append( + dict( + config=used_config, + results=model_performance, + epochs=final_epoch, + model_time=model_time, + log_json_path=osp.split(log_json_path)[-1])) + + # publish model for each checkpoint + publish_model_infos = [] + for model in model_infos: + model_publish_dir = osp.join(models_out, model['config'].rstrip('.py')) + mmcv.mkdir_or_exist(model_publish_dir) + + model_name = osp.split(model['config'])[-1].split('.')[0] + + model_name += '_' + model['model_time'] + publish_model_path = osp.join(model_publish_dir, model_name) + trained_model_path = osp.join(models_root, model['config'], + 'epoch_{}.pth'.format(model['epochs'])) + + # convert model + final_model_path = process_checkpoint(trained_model_path, + publish_model_path) + + # copy log + shutil.copy( + osp.join(models_root, model['config'], model['log_json_path']), + osp.join(model_publish_dir, f'{model_name}.log.json')) + shutil.copy( + osp.join(models_root, model['config'], + model['log_json_path'].rstrip('.json')), + osp.join(model_publish_dir, f'{model_name}.log')) + + # copy config to guarantee reproducibility + config_path = model['config'] + config_path = osp.join( + 'configs', + config_path) if 'configs' not in config_path else config_path + target_cconfig_path = osp.split(config_path)[-1] + shutil.copy(config_path, + osp.join(model_publish_dir, target_cconfig_path)) + + model['model_path'] = final_model_path + publish_model_infos.append(model) + + models = dict(models=publish_model_infos) + print(f'Totally gathered {len(publish_model_infos)} models') + mmcv.dump(models, osp.join(models_out, 'model_info.json')) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/.dev_scripts/linter.sh b/thirdparty/mmdetection/.dev_scripts/linter.sh new file mode 100644 index 0000000000000000000000000000000000000000..b0fe0acfa492820d6e556cf76d6d48e46c64e5e0 --- /dev/null +++ b/thirdparty/mmdetection/.dev_scripts/linter.sh @@ -0,0 +1,3 @@ +yapf -r -i mmdet/ configs/ tests/ tools/ +isort -rc mmdet/ configs/ tests/ tools/ +flake8 . diff --git a/thirdparty/mmdetection/.github/CODE_OF_CONDUCT.md b/thirdparty/mmdetection/.github/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..efd4305798630a5cd7b17d7cf893b9a811d5501f --- /dev/null +++ b/thirdparty/mmdetection/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at chenkaidev@gmail.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/thirdparty/mmdetection/.github/CONTRIBUTING.md b/thirdparty/mmdetection/.github/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..086f417804f06ebfc204a98ea0af7229a3cba4b3 --- /dev/null +++ b/thirdparty/mmdetection/.github/CONTRIBUTING.md @@ -0,0 +1,54 @@ +# Contributing to mmdetection + +All kinds of contributions are welcome, including but not limited to the following. + +- Fixes (typo, bugs) +- New features and components + +## Workflow + +1. fork and pull the latest mmdetection +2. checkout a new branch (do not use master branch for PRs) +3. commit your changes +4. create a PR + +Note +- If you plan to add some new features that involve large changes, it is encouraged to open an issue for discussion first. +- If you are the author of some papers and would like to include your method to mmdetection, +please let us know (open an issue or contact the maintainers). We will much appreciate your contribution. +- For new features and new modules, unit tests are required to improve the code's robustness. + +## Code style + +### Python +We adopt [PEP8](https://www.python.org/dev/peps/pep-0008/) as the preferred code style. + +We use the following tools for linting and formatting: +- [flake8](http://flake8.pycqa.org/en/latest/): linter +- [yapf](https://github.com/google/yapf): formatter +- [isort](https://github.com/timothycrosley/isort): sort imports + +Style configurations of yapf and isort can be found in [setup.cfg](../setup.cfg). + +We use [pre-commit hook](https://pre-commit.com/) that checks and formats for `flake8`, `yapf`, `isort`, `trailing whitespaces`, + fixes `end-of-files`, sorts `requirments.txt` automatically on every commit. +The config for a pre-commit hook is stored in [.pre-commit-config](../.pre-commit-config.yaml). + +After you clone the repository, you will need to install initialize pre-commit hook. + +``` +pip install -U pre-commit +``` + +From the repository folder +``` +pre-commit install +``` + +After this on every commit check code linters and formatter will be enforced. + + +>Before you create a PR, make sure that your code lints and is formatted by yapf. + +### C++ and CUDA +We follow the [Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html). diff --git a/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/config.yml b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..56bbd88fddfd7a1ceb50a1cb406e80318a8d0370 --- /dev/null +++ b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +blank_issues_enabled: false + +contact_links: + - name: Common Issues + url: https://mmdetection.readthedocs.io/en/latest/faq.html + about: Check if your issue already has solutions + - name: MMDetection Documentation + url: https://mmdetection.readthedocs.io/en/latest/ + about: Check if your question is answered in docs diff --git a/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/error-report.md b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/error-report.md new file mode 100644 index 0000000000000000000000000000000000000000..acdfd3521487eebfd77104a4c776537cc301bce2 --- /dev/null +++ b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/error-report.md @@ -0,0 +1,41 @@ +--- +name: Error report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +Thanks for your error report and we appreciate it a lot. + +**Checklist** +1. I have searched related issues but cannot get the expected help. +2. The bug has not been fixed in the latest version. + +**Describe the bug** +A clear and concise description of what the bug is. + +**Reproduction** +1. What command or script did you run? +``` +A placeholder for the command. +``` +2. Did you make any modifications on the code or config? Did you understand what you have modified? +3. What dataset did you use? + +**Environment** + +1. Please run `python mmdet/utils/collect_env.py` to collect necessary environment information and paste it here. +2. You may add addition that may be helpful for locating the problem, such as + - How you installed PyTorch [e.g., pip, conda, source] + - Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) + +**Error traceback** +If applicable, paste the error trackback here. +``` +A placeholder for trackback. +``` + +**Bug fix** +If you have already identified the reason, you can provide the information here. If you are willing to create a PR to fix it, please also leave a comment here and that would be much appreciated! diff --git a/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/feature_request.md b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000000000000000000000000000000..33f9d5f2354dcb019cee5f6fbddf36f3a408fba3 --- /dev/null +++ b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,22 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Describe the feature** + +**Motivation** +A clear and concise description of the motivation of the feature. +Ex1. It is inconvenient when [....]. +Ex2. There is a recent paper [....], which is very helpful for [....]. + +**Related resources** +If there is an official code release or third-party implementations, please also provide the information here, which would be very helpful. + +**Additional context** +Add any other context or screenshots about the feature request here. +If you would like to implement the feature and create a PR, please leave a comment here and that would be much appreciated. diff --git a/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/general_questions.md b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/general_questions.md new file mode 100644 index 0000000000000000000000000000000000000000..b5a6451a6cbf81ad1c4d0cae8541621b7991b99b --- /dev/null +++ b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/general_questions.md @@ -0,0 +1,8 @@ +--- +name: General questions +about: Ask general questions to get help +title: '' +labels: '' +assignees: '' + +--- diff --git a/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/reimplementation_questions.md b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/reimplementation_questions.md new file mode 100644 index 0000000000000000000000000000000000000000..58ffdeb3f0f0556a167d5c5107124064aab10315 --- /dev/null +++ b/thirdparty/mmdetection/.github/ISSUE_TEMPLATE/reimplementation_questions.md @@ -0,0 +1,59 @@ +--- +name: Reimplementation Questions +about: Ask about questions during model reimplementation +title: '' +labels: 'reimplementation' +assignees: '' + +--- + +**Notice** + +There are several common situations in the reimplementation issues as below +1. Reimplement a model in the model zoo using the provided configs +2. Reimplement a model in the model zoo on other dataset (e.g., custom datasets) +3. Reimplement a custom model but all the components are implemented in MMDetection +4. Reimplement a custom model with new modules implemented by yourself + +There are several things to do for different cases as below. +- For case 1 & 3, please follow the steps in the following sections thus we could help to quick identify the issue. +- For case 2 & 4, please understand that we are not able to do much help here because we usually do not know the full code and the users should be responsible to the code they write. +- One suggestion for case 2 & 4 is that the users should first check whether the bug lies in the self-implemented code or the original code. For example, users can first make sure that the same model runs well on supported datasets. If you still need help, please describe what you have done and what you obtain in the issue, and follow the steps in the following sections and try as clear as possible so that we can better help you. + +**Checklist** +1. I have searched related issues but cannot get the expected help. +2. The issue has not been fixed in the latest version. + +**Describe the issue** + +A clear and concise description of what the problem you meet and what have you done. + +**Reproduction** +1. What command or script did you run? +``` +A placeholder for the command. +``` +2. What config dir you run? +``` +A placeholder for the config. +``` +3. Did you make any modifications on the code or config? Did you understand what you have modified? +4. What dataset did you use? + +**Environment** + +1. Please run `python mmdet/utils/collect_env.py` to collect necessary environment information and paste it here. +2. You may add addition that may be helpful for locating the problem, such as + - How you installed PyTorch [e.g., pip, conda, source] + - Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) + +**Results** + +If applicable, paste the related results here, e.g., what you expect and what you get. +``` +A placeholder for results comparison +``` + +**Issue fix** + +If you have already identified the reason, you can provide the information here. If you are willing to create a PR to fix it, please also leave a comment here and that would be much appreciated! diff --git a/thirdparty/mmdetection/.github/workflows/build.yml b/thirdparty/mmdetection/.github/workflows/build.yml new file mode 100644 index 0000000000000000000000000000000000000000..537128d4c1183bcbcd449b5f68804518536d6edc --- /dev/null +++ b/thirdparty/mmdetection/.github/workflows/build.yml @@ -0,0 +1,161 @@ +name: build + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install pre-commit hook + run: | + pip install pre-commit + pre-commit install + - name: Linting + run: pre-commit run --all-files + - name: Check docstring coverage + run: | + pip install interrogate + interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmdet + + build_cpu: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + torch: [1.3.0, 1.4.0, 1.5.0, 1.6.0] + include: + - torch: 1.3.0 + torchvision: 0.4.2 + - torch: 1.4.0 + torchvision: 0.5.0 + - torch: 1.5.0 + torchvision: 0.6.1 + - torch: 1.6.0 + torchvision: 0.7.0 + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install Pillow + run: pip install Pillow==6.2.2 + if: ${{matrix.torchvision == '0.4.2'}} + - name: Install PyTorch + run: pip install torch==${{matrix.torch}}+cpu torchvision==${{matrix.torchvision}}+cpu -f https://download.pytorch.org/whl/torch_stable.html + - name: Install MMCV + run: | + pip install mmcv-full==latest+torch${{matrix.torch}}+cpu -f https://openmmlab.oss-accelerate.aliyuncs.com/mmcv/dist/index.html + python -c 'import mmcv; print(mmcv.__version__)' + - name: Install unittest dependencies + run: pip install -r requirements/tests.txt -r requirements/optional.txt + - name: Build and install + run: rm -rf .eggs && pip install -e . + - name: Run unittests and generate coverage report + run: | + coverage run --branch --source mmdet -m pytest tests/ + coverage xml + coverage report -m + + build_cuda: + runs-on: ubuntu-latest + + env: + CUDA: 10.1.105-1 + CUDA_SHORT: 10.1 + UBUNTU_VERSION: ubuntu1804 + strategy: + matrix: + python-version: [3.7] + torch: [1.3.1, 1.5.1+cu101, 1.6.0+cu101] + include: + - torch: 1.3.1 + torchvision: 0.4.2 + mmcv: "latest+torch1.3.0+cu101" + - torch: 1.5.1+cu101 + torchvision: 0.6.1+cu101 + mmcv: "latest+torch1.5.0+cu101" + - torch: 1.6.0+cu101 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + - torch: 1.6.0+cu101 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + python-version: 3.6 + - torch: 1.6.0+cu101 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + python-version: 3.8 + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install CUDA + run: | + export INSTALLER=cuda-repo-${UBUNTU_VERSION}_${CUDA}_amd64.deb + wget http://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/${INSTALLER} + sudo dpkg -i ${INSTALLER} + wget https://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/7fa2af80.pub + sudo apt-key add 7fa2af80.pub + sudo apt update -qq + sudo apt install -y cuda-${CUDA_SHORT/./-} cuda-cufft-dev-${CUDA_SHORT/./-} + sudo apt clean + export CUDA_HOME=/usr/local/cuda-${CUDA_SHORT} + export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} + export PATH=${CUDA_HOME}/bin:${PATH} + - name: Install Pillow + run: pip install Pillow==6.2.2 + if: ${{matrix.torchvision < 0.5}} + - name: Install PyTorch + run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + - name: Install mmdet dependencies + run: | + pip install mmcv-full==${{matrix.mmcv}} -f https://openmmlab.oss-accelerate.aliyuncs.com/mmcv/dist/index.html + pip install -r requirements.txt + python -c 'import mmcv; print(mmcv.__version__)' + - name: Build and install + run: | + rm -rf .eggs + python setup.py check -m -s + TORCH_CUDA_ARCH_LIST=7.0 pip install . + - name: Run unittests and generate coverage report + run: | + coverage run --branch --source mmdet -m pytest tests/ + coverage xml + coverage report -m + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1.0.10 + with: + file: ./coverage.xml + flags: unittests + env_vars: OS,PYTHON + name: codecov-umbrella + fail_ci_if_error: false + + build_parrots: + runs-on: ubuntu-latest + container: + image: ghcr.io/sunnyxiaohu/parrots-mmcv:1.2.1 + credentials: + username: sunnyxiaohu + password: ${{secrets.CR_PAT}} + + steps: + - uses: actions/checkout@v2 + - name: Install mmdet dependencies + run: | + git clone https://github.com/open-mmlab/mmcv.git && cd mmcv + MMCV_WITH_OPS=1 python setup.py install + cd .. && rm -rf mmcv + python -c 'import mmcv; print(mmcv.__version__)' + pip install -r requirements.txt + - name: Build and install + run: rm -rf .eggs && pip install -e . diff --git a/thirdparty/mmdetection/.github/workflows/deploy.yml b/thirdparty/mmdetection/.github/workflows/deploy.yml new file mode 100644 index 0000000000000000000000000000000000000000..2f9458b95c74c4778543022df5c7b8fcff159d8d --- /dev/null +++ b/thirdparty/mmdetection/.github/workflows/deploy.yml @@ -0,0 +1,24 @@ +name: deploy + +on: push + +jobs: + build-n-publish: + runs-on: ubuntu-latest + if: startsWith(github.event.ref, 'refs/tags') + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install torch + run: pip install torch + - name: Install wheel + run: pip install wheel + - name: Build MMDetection + run: python setup.py sdist bdist_wheel + - name: Publish distribution to PyPI + run: | + pip install twine + twine upload dist/* -u __token__ -p ${{ secrets.pypi_password }} diff --git a/thirdparty/mmdetection/.gitignore b/thirdparty/mmdetection/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..77ca0d7c808c77d27777041e64cd8a01054433fc --- /dev/null +++ b/thirdparty/mmdetection/.gitignore @@ -0,0 +1,121 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +data/ +data +.vscode +.idea +.DS_Store + +# custom +*.pkl +*.pkl.json +*.log.json +work_dirs/ + +# Pytorch +*.pth +*.py~ +*.sh~ diff --git a/thirdparty/mmdetection/.pre-commit-config.yaml b/thirdparty/mmdetection/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9e6d30895b03a8d4664849a999cc0fb118c2f7a5 --- /dev/null +++ b/thirdparty/mmdetection/.pre-commit-config.yaml @@ -0,0 +1,35 @@ +repos: + - repo: https://gitlab.com/pycqa/flake8.git + rev: 3.8.3 + hooks: + - id: flake8 + - repo: https://github.com/asottile/seed-isort-config + rev: v2.2.0 + hooks: + - id: seed-isort-config + - repo: https://github.com/timothycrosley/isort + rev: 4.3.21 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-yapf + rev: v0.30.0 + hooks: + - id: yapf + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.1.0 + hooks: + - id: trailing-whitespace + - id: check-yaml + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: double-quote-string-fixer + - id: check-merge-conflict + - id: fix-encoding-pragma + args: ["--remove"] + - id: mixed-line-ending + args: ["--fix=lf"] + - repo: https://github.com/myint/docformatter + rev: v1.3.1 + hooks: + - id: docformatter + args: ["--in-place", "--wrap-descriptions", "79"] diff --git a/thirdparty/mmdetection/.readthedocs.yml b/thirdparty/mmdetection/.readthedocs.yml new file mode 100644 index 0000000000000000000000000000000000000000..73ea4cb7e95530cd18ed94895ca38edd531f0d94 --- /dev/null +++ b/thirdparty/mmdetection/.readthedocs.yml @@ -0,0 +1,7 @@ +version: 2 + +python: + version: 3.7 + install: + - requirements: requirements/docs.txt + - requirements: requirements/readthedocs.txt diff --git a/thirdparty/mmdetection/LICENSE b/thirdparty/mmdetection/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..04adf5cbc620ad190547b092fa449e36df5f7bf4 --- /dev/null +++ b/thirdparty/mmdetection/LICENSE @@ -0,0 +1,203 @@ +Copyright 2018-2019 Open-MMLab. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2019 Open-MMLab. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/thirdparty/mmdetection/README.md b/thirdparty/mmdetection/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1a01c4e2c16fa9d571806e5262abafff0c2dad36 --- /dev/null +++ b/thirdparty/mmdetection/README.md @@ -0,0 +1,153 @@ +
+ +
+ +**News**: We released the technical report on [ArXiv](https://arxiv.org/abs/1906.07155). + +Documentation: https://mmdetection.readthedocs.io/ + +## Introduction + +MMDetection is an open source object detection toolbox based on PyTorch. It is +a part of the OpenMMLab project developed by [Multimedia Laboratory, CUHK](http://mmlab.ie.cuhk.edu.hk/). + +The master branch works with **PyTorch 1.3 to 1.6**. +The old v1.x branch works with PyTorch 1.1 to 1.4, but v2.0 is strongly recommended for faster speed, higher performance, better design and more friendly usage. + +![demo image](resources/coco_test_12510.jpg) + +### Major features + +- **Modular Design** + + We decompose the detection framework into different components and one can easily construct a customized object detection framework by combining different modules. + +- **Support of multiple frameworks out of box** + + The toolbox directly supports popular and contemporary detection frameworks, *e.g.* Faster RCNN, Mask RCNN, RetinaNet, etc. + +- **High efficiency** + + All basic bbox and mask operations run on GPUs. The training speed is faster than or comparable to other codebases, including [Detectron2](https://github.com/facebookresearch/detectron2), [maskrcnn-benchmark](https://github.com/facebookresearch/maskrcnn-benchmark) and [SimpleDet](https://github.com/TuSimple/simpledet). + +- **State of the art** + + The toolbox stems from the codebase developed by the *MMDet* team, who won [COCO Detection Challenge](http://cocodataset.org/#detection-leaderboard) in 2018, and we keep pushing it forward. + +Apart from MMDetection, we also released a library [mmcv](https://github.com/open-mmlab/mmcv) for computer vision research, which is heavily depended on by this toolbox. + +## License + +This project is released under the [Apache 2.0 license](LICENSE). + +## Changelog + +v2.7.0 was released in 30/11/2020. +Please refer to [changelog.md](docs/changelog.md) for details and release history. +A comparison between v1.x and v2.0 codebases can be found in [compatibility.md](docs/compatibility.md). + +## Benchmark and model zoo + +Results and models are available in the [model zoo](docs/model_zoo.md). + +Supported backbones: +- [x] ResNet +- [x] ResNeXt +- [x] VGG +- [x] HRNet +- [x] RegNet +- [x] Res2Net +- [x] ResNeSt + +Supported methods: +- [x] [RPN](configs/rpn) +- [x] [Fast R-CNN](configs/fast_rcnn) +- [x] [Faster R-CNN](configs/faster_rcnn) +- [x] [Mask R-CNN](configs/mask_rcnn) +- [x] [Cascade R-CNN](configs/cascade_rcnn) +- [x] [Cascade Mask R-CNN](configs/cascade_rcnn) +- [x] [SSD](configs/ssd) +- [x] [RetinaNet](configs/retinanet) +- [x] [GHM](configs/ghm) +- [x] [Mask Scoring R-CNN](configs/ms_rcnn) +- [x] [Double-Head R-CNN](configs/double_heads) +- [x] [Hybrid Task Cascade](configs/htc) +- [x] [Libra R-CNN](configs/libra_rcnn) +- [x] [Guided Anchoring](configs/guided_anchoring) +- [x] [FCOS](configs/fcos) +- [x] [RepPoints](configs/reppoints) +- [x] [Foveabox](configs/foveabox) +- [x] [FreeAnchor](configs/free_anchor) +- [x] [NAS-FPN](configs/nas_fpn) +- [x] [ATSS](configs/atss) +- [x] [FSAF](configs/fsaf) +- [x] [PAFPN](configs/pafpn) +- [x] [Dynamic R-CNN](configs/dynamic_rcnn) +- [x] [PointRend](configs/point_rend) +- [x] [CARAFE](configs/carafe/README.md) +- [x] [DCNv2](configs/dcn/README.md) +- [x] [Group Normalization](configs/gn/README.md) +- [x] [Weight Standardization](configs/gn+ws/README.md) +- [x] [OHEM](configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py) +- [x] [Soft-NMS](configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py) +- [x] [Generalized Attention](configs/empirical_attention/README.md) +- [x] [GCNet](configs/gcnet/README.md) +- [x] [Mixed Precision (FP16) Training](configs/fp16/README.md) +- [x] [InstaBoost](configs/instaboost/README.md) +- [x] [GRoIE](configs/groie/README.md) +- [x] [DetectoRS](configs/detectors/README.md) +- [x] [Generalized Focal Loss](configs/gfl/README.md) +- [x] [CornerNet](configs/cornernet/README.md) +- [x] [Side-Aware Boundary Localization](configs/sabl/README.md) +- [x] [YOLOv3](configs/yolo/README.md) +- [x] [PAA](configs/paa/README.md) +- [x] [YOLACT](configs/yolact/README.md) +- [x] [CentripetalNet](configs/centripetalnet/README.md) +- [x] [VFNet](configs/vfnet/README.md) +- [x] [DETR](configs/detr/README.md) + +Some other methods are also supported in [projects using MMDetection](./docs/projects.md). + +## Installation + +Please refer to [get_started.md](docs/get_started.md) for installation. + +## Getting Started + +Please see [get_started.md](docs/get_started.md) for the basic usage of MMDetection. +We provide [colab tutorial](demo/MMDet_Tutorial.ipynb), and full guidance for quick run [with existing dataset](docs/1_exist_data_model.md) and [with new dataset](docs/2_new_data_model.md) for beginners. +There are also tutorials for [finetuning models](docs/tutorials/finetune.md), [adding new dataset](docs/tutorials/new_dataset.md), [designing data pipeline](docs/tutorials/data_pipeline.md), [customizing models](docs/tutorials/customize_models.md), [customizing runtime settings](docs/tutorials/customize_runtime.md) and [useful tools](docs/useful_tools.md). + +Please refer to [FAQ](docs/faq.md) for frequently asked questions. + +## Contributing + +We appreciate all contributions to improve MMDetection. Please refer to [CONTRIBUTING.md](.github/CONTRIBUTING.md) for the contributing guideline. + +## Acknowledgement + +MMDetection is an open source project that is contributed by researchers and engineers from various colleges and companies. We appreciate all the contributors who implement their methods or add new features, as well as users who give valuable feedbacks. +We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their own new detectors. + +## Citation + +If you use this toolbox or benchmark in your research, please cite this project. + +``` +@article{mmdetection, + title = {{MMDetection}: Open MMLab Detection Toolbox and Benchmark}, + author = {Chen, Kai and Wang, Jiaqi and Pang, Jiangmiao and Cao, Yuhang and + Xiong, Yu and Li, Xiaoxiao and Sun, Shuyang and Feng, Wansen and + Liu, Ziwei and Xu, Jiarui and Zhang, Zheng and Cheng, Dazhi and + Zhu, Chenchen and Cheng, Tianheng and Zhao, Qijie and Li, Buyu and + Lu, Xin and Zhu, Rui and Wu, Yue and Dai, Jifeng and Wang, Jingdong + and Shi, Jianping and Ouyang, Wanli and Loy, Chen Change and Lin, Dahua}, + journal= {arXiv preprint arXiv:1906.07155}, + year={2019} +} +``` + +## Contact + +This repo is currently maintained by Kai Chen ([@hellock](http://github.com/hellock)), Yuhang Cao ([@yhcao6](https://github.com/yhcao6)), Wenwei Zhang ([@ZwwWayne](https://github.com/ZwwWayne)), +Jiarui Xu ([@xvjiarui](https://github.com/xvjiarui)). Other core developers include Jiangmiao Pang ([@OceanPang](https://github.com/OceanPang)) and Jiaqi Wang ([@myownskyW7](https://github.com/myownskyW7)). diff --git a/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_detection.py b/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..156aca02588a96a4e279de2e647864b0739e476d --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_detection.py @@ -0,0 +1,55 @@ +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_instance.py b/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..3c5472aab09acdd5efa2cee206d94824f06058f9 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/cityscapes_instance.py @@ -0,0 +1,55 @@ +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/coco_detection.py b/thirdparty/mmdetection/configs/_base_/datasets/coco_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..09a75c404687223c71dcdf0abc7af827f2e498a6 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/coco_detection.py @@ -0,0 +1,48 @@ +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/thirdparty/mmdetection/configs/_base_/datasets/coco_instance.py b/thirdparty/mmdetection/configs/_base_/datasets/coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..f6ea4f4562a8118275a444879a884717b55caa15 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/coco_instance.py @@ -0,0 +1,48 @@ +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/coco_instance_semantic.py b/thirdparty/mmdetection/configs/_base_/datasets/coco_instance_semantic.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c072ec92731af85952840128f6527bc799913a --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/coco_instance_semantic.py @@ -0,0 +1,53 @@ +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/deepfashion.py b/thirdparty/mmdetection/configs/_base_/datasets/deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..308b4b2ac4d9e3516ba4a57e9d3b6af91e97f24b --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/deepfashion.py @@ -0,0 +1,53 @@ +# dataset settings +dataset_type = 'DeepFashionDataset' +data_root = 'data/DeepFashion/In-shop/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(750, 1101), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(750, 1101), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + imgs_per_gpu=2, + workers_per_gpu=1, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=train_pipeline, + data_root=data_root), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/DeepFashion_segmentation_gallery.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/lvis_v0.5_instance.py b/thirdparty/mmdetection/configs/_base_/datasets/lvis_v0.5_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..f3da861d6df05b8da58f361815892a416987a927 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/lvis_v0.5_instance.py @@ -0,0 +1,23 @@ +_base_ = 'coco_instance.py' +dataset_type = 'LVISV05Dataset' +data_root = 'data/lvis_v0.5/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_train.json', + img_prefix=data_root + 'train2017/')), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/'), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/')) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/lvis_v1_instance.py b/thirdparty/mmdetection/configs/_base_/datasets/lvis_v1_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..e8c5d1b14594a6ea38b215635686c04995338ed7 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/lvis_v1_instance.py @@ -0,0 +1,23 @@ +_base_ = 'coco_instance.py' +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/_base_/datasets/voc0712.py b/thirdparty/mmdetection/configs/_base_/datasets/voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..ae09acdd5c9580217815300abbad9f08b71b37ed --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/voc0712.py @@ -0,0 +1,55 @@ +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=[ + data_root + 'VOC2007/ImageSets/Main/trainval.txt', + data_root + 'VOC2012/ImageSets/Main/trainval.txt' + ], + img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='mAP') diff --git a/thirdparty/mmdetection/configs/_base_/datasets/wider_face.py b/thirdparty/mmdetection/configs/_base_/datasets/wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d649be42bca2955fb56a784fe80bcc2fdce4e1 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/datasets/wider_face.py @@ -0,0 +1,63 @@ +# dataset settings +dataset_type = 'WIDERFaceDataset' +data_root = 'data/WIDERFace/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=60, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=2, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'train.txt', + img_prefix=data_root + 'WIDER_train/', + min_size=17, + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/_base_/default_runtime.py b/thirdparty/mmdetection/configs/_base_/default_runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..594de8dcc99b9e4fc0208f327a05910a95a1793c --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/default_runtime.py @@ -0,0 +1,14 @@ +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/thirdparty/mmdetection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..f90b78cef38815b004175d94eee023d3b5ef5e25 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py @@ -0,0 +1,200 @@ +# model settings +model = dict( + type='CascadeRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5)) diff --git a/thirdparty/mmdetection/configs/_base_/models/cascade_rcnn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/cascade_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..303276b845fecd041d093e240046de08b6016638 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/cascade_rcnn_r50_fpn.py @@ -0,0 +1,183 @@ +# model settings +model = dict( + type='CascadeRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ])) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False) + ]) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/thirdparty/mmdetection/configs/_base_/models/fast_rcnn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/fast_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..b8d9570deeaaf0cf42b0e16619a1dfc22d38ae5d --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/fast_rcnn_r50_fpn.py @@ -0,0 +1,62 @@ +# model settings +model = dict( + type='FastRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)) +test_cfg = dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..5a381636382bdd82dc7650e199ef26a3602513e3 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py @@ -0,0 +1,116 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe'), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=12000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=6000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py new file mode 100644 index 0000000000000000000000000000000000000000..5b4e4c3d663d84cc124c2389c53a3026dbdc451f --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py @@ -0,0 +1,107 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + strides=(1, 2, 2, 1), + dilations=(1, 1, 1, 2), + out_indices=(3, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe'), + rpn_head=dict( + type='RPNHead', + in_channels=2048, + feat_channels=2048, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=2048, + featmap_strides=[16]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=2048, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=12000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=6000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..338a5c6b604d4bfe316ad35ab51d6b997f74ba9e --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/faster_rcnn_r50_fpn.py @@ -0,0 +1,111 @@ +model = dict( + type='FasterRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) + # soft-nms is also supported for rcnn testing + # e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05) +) diff --git a/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py b/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..b9b29b0b99de34caadd1d906b1b9367659524c89 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py @@ -0,0 +1,127 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='MaskRCNN', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe'), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=None, + mask_head=dict( + type='FCNMaskHead', + num_convs=0, + in_channels=2048, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=12000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=14, + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=6000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5)) diff --git a/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..4472bd0a80d7426278cbb05ab4be9bf411eaef0f --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/mask_rcnn_r50_fpn.py @@ -0,0 +1,124 @@ +# model settings +model = dict( + type='MaskRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5)) diff --git a/thirdparty/mmdetection/configs/_base_/models/retinanet_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/retinanet_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..a08b14f60992a8a5c00c668b37eb9a4dbf0ac7a3 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/retinanet_r50_fpn.py @@ -0,0 +1,60 @@ +# model settings +model = dict( + type='RetinaNet', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) diff --git a/thirdparty/mmdetection/configs/_base_/models/rpn_r50_caffe_c4.py b/thirdparty/mmdetection/configs/_base_/models/rpn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..bd5d665e0331711adfb2cb3eeea113ed4762e5db --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/rpn_r50_caffe_c4.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe'), + neck=None, + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=12000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/_base_/models/rpn_r50_fpn.py b/thirdparty/mmdetection/configs/_base_/models/rpn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..13e96191deb243d1f625d99ac85bf17503f1f8a8 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/rpn_r50_fpn.py @@ -0,0 +1,60 @@ +# model settings +model = dict( + type='RPN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/_base_/models/ssd300.py b/thirdparty/mmdetection/configs/_base_/models/ssd300.py new file mode 100644 index 0000000000000000000000000000000000000000..ee7cf3adc8aaced804031196c3901f90b0b0d140 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/models/ssd300.py @@ -0,0 +1,49 @@ +# model settings +input_size = 300 +model = dict( + type='SingleStageDetector', + pretrained='open-mmlab://vgg16_caffe', + backbone=dict( + type='SSDVGG', + input_size=input_size, + depth=16, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + l2_norm_scale=20), + neck=None, + bbox_head=dict( + type='SSDHead', + in_channels=(512, 1024, 512, 256, 256, 256), + num_classes=80, + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]))) +cudnn_benchmark = True +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False) +test_cfg = dict( + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200) diff --git a/thirdparty/mmdetection/configs/_base_/schedules/schedule_1x.py b/thirdparty/mmdetection/configs/_base_/schedules/schedule_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..12694c87aa0a9fedd9badd4aff2b23280403f15f --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/schedules/schedule_1x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +total_epochs = 12 diff --git a/thirdparty/mmdetection/configs/_base_/schedules/schedule_20e.py b/thirdparty/mmdetection/configs/_base_/schedules/schedule_20e.py new file mode 100644 index 0000000000000000000000000000000000000000..0559030c24ed097d86918bbd589a6a12f8dd8bd5 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/schedules/schedule_20e.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/_base_/schedules/schedule_2x.py b/thirdparty/mmdetection/configs/_base_/schedules/schedule_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..e34095ff2b5ffdb1f9ba07380a6948504715e3d8 --- /dev/null +++ b/thirdparty/mmdetection/configs/_base_/schedules/schedule_2x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/albu_example/README.md b/thirdparty/mmdetection/configs/albu_example/README.md new file mode 100644 index 0000000000000000000000000000000000000000..25e8a0742739fd7e96ee53744141e59863537466 --- /dev/null +++ b/thirdparty/mmdetection/configs/albu_example/README.md @@ -0,0 +1,7 @@ +# Albu Example + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.4 | 16.6 | 38.0 | 34.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208-ab203bcd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208_225520.log.json) | diff --git a/thirdparty/mmdetection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py b/thirdparty/mmdetection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3f879a6c573871ea17b2bf158173aadf14457b6 --- /dev/null +++ b/thirdparty/mmdetection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py @@ -0,0 +1,73 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +albu_train_transforms = [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict( + type='OneOf', + transforms=[ + dict( + type='RGBShift', + r_shift_limit=10, + g_shift_limit=10, + b_shift_limit=10, + p=1.0), + dict( + type='HueSaturationValue', + hue_shift_limit=20, + sat_shift_limit=30, + val_shift_limit=20, + p=1.0) + ], + p=0.1), + dict(type='JpegCompression', quality_lower=85, quality_upper=95, p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), +] +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='Pad', size_divisor=32), + dict( + type='Albu', + transforms=albu_train_transforms, + bbox_params=dict( + type='BboxParams', + format='pascal_voc', + label_fields=['gt_labels'], + min_visibility=0.0, + filter_lost_elements=True), + keymap={ + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + }, + update_pad_shape=False, + skip_img_without_anno=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'pad_shape', 'scale_factor')) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/thirdparty/mmdetection/configs/atss/README.md b/thirdparty/mmdetection/configs/atss/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b34307f436dfa5082da4b7e320578dd6fa16bc22 --- /dev/null +++ b/thirdparty/mmdetection/configs/atss/README.md @@ -0,0 +1,21 @@ +# Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection + + +## Introduction + +``` +@article{zhang2019bridging, + title = {Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection}, + author = {Zhang, Shifeng and Chi, Cheng and Yao, Yongqiang and Lei, Zhen and Li, Stan Z.}, + journal = {arXiv preprint arXiv:1912.02424}, + year = {2019} +} +``` + + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.7 | 19.7 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209_102539.log.json) | +| R-101 | pytorch | 1x | 5.6 | 12.3 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.log.json) | diff --git a/thirdparty/mmdetection/configs/atss/atss_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/atss/atss_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..695779ab31b5f848f8c85c13cc4ca637c8590ba7 --- /dev/null +++ b/thirdparty/mmdetection/configs/atss/atss_r101_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = './atss_r50_fpn_1x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), +) diff --git a/thirdparty/mmdetection/configs/atss/atss_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/atss/atss_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e787622c24b5e3b424ca3400eab31efb3d7876af --- /dev/null +++ b/thirdparty/mmdetection/configs/atss/atss_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/carafe/README.md b/thirdparty/mmdetection/configs/carafe/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0cb572588b9d9d3a4529db7ffebb7474dd519335 --- /dev/null +++ b/thirdparty/mmdetection/configs/carafe/README.md @@ -0,0 +1,30 @@ +# CARAFE: Content-Aware ReAssembly of FEatures + +## Introduction + +We provide config files to reproduce the object detection & instance segmentation results in the ICCV 2019 Oral paper for [CARAFE: Content-Aware ReAssembly of FEatures](https://arxiv.org/abs/1905.02188). + +``` +@inproceedings{Wang_2019_ICCV, + title = {CARAFE: Content-Aware ReAssembly of FEatures}, + author = {Wang, Jiaqi and Chen, Kai and Xu, Rui and Liu, Ziwei and Loy, Chen Change and Lin, Dahua}, + booktitle = {The IEEE International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. + +| Method | Backbone | Style | Lr schd | Test Proposal Num | Inf time (fps) | Box AP | Mask AP | Config | Download | +|:--------------------:|:--------:|:-------:|:-------:|:-----------------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 16.5 | 38.6 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_20200504_175733.log.json) | +| - | - | - | - | 2000 | | | | | +| Mask R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 14.0 | 39.3 | 35.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_20200503_135957.log.json) | +| - | - | - | - | 2000 | | | | | + +## Implementation + +The CUDA implementation of CARAFE can be find at https://github.com/myownskyW7/CARAFE. diff --git a/thirdparty/mmdetection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py b/thirdparty/mmdetection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dedac3f46b4710d16a8bc66f00663e379b2ebdc7 --- /dev/null +++ b/thirdparty/mmdetection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py b/thirdparty/mmdetection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..668c023981b9d421e5b51a48757c3819d090307f --- /dev/null +++ b/thirdparty/mmdetection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)), + roi_head=dict( + mask_head=dict( + upsample_cfg=dict( + type='carafe', + scale_factor=2, + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/README.md b/thirdparty/mmdetection/configs/cascade_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3ad625a8e4aa03cd5096bd3ae85221c1a1eb27d6 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/README.md @@ -0,0 +1,52 @@ +# Cascade R-CNN: High Quality Object Detection and Instance Segmentation + +## Introduction +``` +@article{Cai_2019, + title={Cascade R-CNN: High Quality Object Detection and Instance Segmentation}, + ISSN={1939-3539}, + url={http://dx.doi.org/10.1109/tpami.2019.2956516}, + DOI={10.1109/tpami.2019.2956516}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Cai, Zhaowei and Vasconcelos, Nuno}, + year={2019}, + pages={1–1} +} +``` + +## Results and models + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: |:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.2 | | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_20200504_174853.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316_214748.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_20200504_175131.log.json) | +| R-101-FPN | caffe | 1x | 6.2 | | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_20200504_175649.log.json) | +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317_101744.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_20200504_231812.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 10.9 | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316_055608.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 7.6 | | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 10.7 | | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357.log.json)| + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 5.9 | | 41.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_20200504_174659.log.json) | +| R-50-FPN | pytorch | 1x | 6.0 | 11.2 | 41.2 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203_170449.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.9 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_20200504_174711.log.json)| +| R-101-FPN | caffe | 1x | 7.8 | | 43.2 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_20200504_174813.log.json)| +| R-101-FPN | pytorch | 1x | 7.9 | 9.8 | 42.9 | 37.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203_092521.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 43.4 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_20200504_174836.log.json)| +| X-101-32x4d-FPN | pytorch | 1x | 9.2 | 8.6 | 44.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201_052416.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 9.2 | - | 45.0 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 12.2 | 6.7 | 45.3 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203_044059.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 12.2 | | 45.6 |39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033.log.json)| + +**Notes:** + +- The `20e` schedule in Cascade (Mask) R-CNN indicates decreasing the lr at 16 and 19 epochs, with a total of 20 epochs. diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f42165d9fd14600858681e695de7927aac865652 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9212dda4992b4d18cef9a4916b765ef37850237f --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d069f8c9fdbaa55cbc44065740187c242cfa2903 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,2 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b371ed757bf7dd95ef9ecfc2e609ca5ab03795d6 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_1x_coco.py'] + +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49ab539aa4cdf7c396b6f109efe2dc7a6d596a2a --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1296dc45dd89da9c0801e1242080c67957cace74 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_20e.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d05eb50c7cd501a5bab4ec403a98137b31b9b51b --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0cfc7d78a79836ed06cf242f5f5c32af7f065249 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..33629ee6cc2b903407372d68c6d7ab599fe6598e --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e64c22cdb062a43c082360803caf399fa4141d60 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e8b830fd544b73d2da7a359ea208178a37fc324 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..66666517ad6c7a8427d59cb3efaf33712ef7ed83 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9cb3581910f74063eb1c62b9345a6493098d4a4a --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,2 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c576c7496928eed58400ba11d71af8f4edc1c4b5 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' + +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict(norm_cfg=dict(requires_grad=False), style='caffe')) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87e21fbff82763caf0e14ba641493870a15578b1 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..74f24a202074effdf11661f71af32316b4480fb6 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1fbe6ce9f8a91151f2dfb656e90c9586b6dd35e3 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1afeeef1212db831dd1f097d30b0354e459daa97 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b249bfa0df6037f1433ef6d41f7da16b10645aa2 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='CascadeRCNN', + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..500b48cf7882d3e2ecbe6534e2955948bddb6825 --- /dev/null +++ b/thirdparty/mmdetection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + type='CascadeRCNN', + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/centripetalnet/README.md b/thirdparty/mmdetection/configs/centripetalnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c83422291a2cb0627576315e4edbb578832a4d6 --- /dev/null +++ b/thirdparty/mmdetection/configs/centripetalnet/README.md @@ -0,0 +1,22 @@ +# CentripetalNet + +## Introduction +``` +@InProceedings{Dong_2020_CVPR, +author = {Dong, Zhiwei and Li, Guoxuan and Liao, Yue and Wang, Fei and Ren, Pengju and Qian, Chen}, +title = {CentripetalNet: Pursuing High-Quality Keypoint Pairs for Object Detection}, +booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, +month = {June}, +year = {2020} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [16 x 6](./centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | 190/210 | 16.7 | 3.7 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804.log.json) | + +Note: +- TTA setting is single-scale and `flip=True`. +- The model we released is the best checkpoint rather than the latest checkpoint (box AP 44.8 vs 44.6 in our experiment). diff --git a/thirdparty/mmdetection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py b/thirdparty/mmdetection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..510e5abfdaa392b7bc161b83c34d64aa2e85eb1e --- /dev/null +++ b/thirdparty/mmdetection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CentripetalHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=0, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1), + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# training and testing settings +train_cfg = None +test_cfg = dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms_cfg=dict(type='soft_nms', iou_threshold=0.5, method='gaussian')) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[190]) +total_epochs = 210 diff --git a/thirdparty/mmdetection/configs/cityscapes/README.md b/thirdparty/mmdetection/configs/cityscapes/README.md new file mode 100644 index 0000000000000000000000000000000000000000..80ce589c5622f1697f3efd97e05a791cef9ebdf5 --- /dev/null +++ b/thirdparty/mmdetection/configs/cityscapes/README.md @@ -0,0 +1,23 @@ +# Cityscapes Dataset + +## Common settings + +- All baselines were trained using 8 GPU with a batch size of 8 (1 images per GPU) using the [linear scaling rule](https://arxiv.org/abs/1706.02677) to scale the learning rate. +- All models were trained on `cityscapes_train`, and tested on `cityscapes_val`. +- 1x training schedule indicates 64 epochs which corresponds to slightly less than the 24k iterations reported in the original schedule from the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870) +- COCO pre-trained weights are used to initialize. +- A conversion [script](../../tools/convert_datasets/cityscapes.py) is provided to convert Cityscapes into COCO format. Please refer to [install.md](../../docs/install.md#prepare-datasets) for details. +- `CityscapesDataset` implemented three evaluation methods. `bbox` and `segm` are standard COCO bbox/mask AP. `cityscapes` is the cityscapes dataset official evaluation, which may be slightly higher than COCO. + + +### Faster R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :---: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.2 | - | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502-829424c0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502_114915.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------: | :------------: | :----: | :-----: | :------: | :------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.3 | - | 41.0 | 35.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20200502-6ea77f0e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20200502_114915.log.json) | diff --git a/thirdparty/mmdetection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py b/thirdparty/mmdetection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..a7cfcaa0dd0747587a9e1bb90cf28ce45e46fc2e --- /dev/null +++ b/thirdparty/mmdetection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_detection.py', + '../_base_/default_runtime.py' +] +model = dict( + pretrained=None, + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +total_epochs = 8 # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' # noqa diff --git a/thirdparty/mmdetection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py b/thirdparty/mmdetection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..b17735366f145029d345c91df9ce2689d9e73dc0 --- /dev/null +++ b/thirdparty/mmdetection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,45 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained=None, + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +total_epochs = 8 # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth' # noqa diff --git a/thirdparty/mmdetection/configs/cornernet/README.md b/thirdparty/mmdetection/configs/cornernet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..457200c80886a201133baba1edfc93c86677326a --- /dev/null +++ b/thirdparty/mmdetection/configs/cornernet/README.md @@ -0,0 +1,29 @@ +# CornerNet + +## Introduction +``` +@inproceedings{law2018cornernet, + title={Cornernet: Detecting objects as paired keypoints}, + author={Law, Hei and Deng, Jia}, + booktitle={15th European Conference on Computer Vision, ECCV 2018}, + pages={765--781}, + year={2018}, + organization={Springer Verlag} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [10 x 5](./cornernet_hourglass104_mstest_10x5_210e_coco.py) | 180/210 | 13.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720.log.json) | +| HourglassNet-104 | [8 x 6](./cornernet_hourglass104_mstest_8x6_210e_coco.py) | 180/210 | 15.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618.log.json) | +| HourglassNet-104 | [32 x 3](./cornernet_hourglass104_mstest_32x3_210e_coco.py) | 180/210 | 9.5 | 3.9 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110.log.json) | + +Note: +- TTA setting is single-scale and `flip=True`. +- Experiments with `images_per_gpu=6` are conducted on Tesla V100-SXM2-32GB, `images_per_gpu=3` are conducted on GeForce GTX 1080 Ti. +- Here are the descriptions of each experiment setting: + - 10 x 5: 10 GPUs with 5 images per gpu. This is the same setting as that reported in the original paper. + - 8 x 6: 8 GPUs with 6 images per gpu. The total batchsize is similar to paper and only need 1 node to train. + - 32 x 3: 32 GPUs with 3 images per gpu. The default setting for 1080TI and need 4 nodes to train. diff --git a/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b0d8771606c8784f6ac1c3343491a2f22a697976 --- /dev/null +++ b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=5, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# training and testing settings +train_cfg = None +test_cfg = dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms_cfg=dict(type='soft_nms', iou_threshold=0.5, method='gaussian')) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +total_epochs = 210 diff --git a/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b025785df1b2219e993e4588a16fb4fa140ff06f --- /dev/null +++ b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=3, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# training and testing settings +train_cfg = None +test_cfg = dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms_cfg=dict(type='soft_nms', iou_threshold=0.5, method='gaussian')) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +total_epochs = 210 diff --git a/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0e607d4c6440f405d9f5238e701100385e2ece06 --- /dev/null +++ b/thirdparty/mmdetection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# training and testing settings +train_cfg = None +test_cfg = dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms_cfg=dict(type='soft_nms', iou_threshold=0.5, method='gaussian')) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +total_epochs = 210 diff --git a/thirdparty/mmdetection/configs/dcn/README.md b/thirdparty/mmdetection/configs/dcn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..94bec674ed063fae642ed11dd3b8dbbfe7aedb73 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/README.md @@ -0,0 +1,46 @@ +# Deformable Convolutional Networks + +# Introduction + +``` +@inproceedings{dai2017deformable, + title={Deformable Convolutional Networks}, + author={Dai, Jifeng and Qi, Haozhi and Xiong, Yuwen and Li, Yi and Zhang, Guodong and Hu, Han and Wei, Yichen}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} + +@article{zhu2018deformable, + title={Deformable ConvNets v2: More Deformable, Better Results}, + author={Zhu, Xizhou and Hu, Han and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1811.11168}, + year={2018} +} +``` + +## Results and Models + +| Backbone | Model | Style | Conv | Pool | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:----------------:|:------------:|:-------:|:-------------:|:------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 4.0 | 17.8 | 41.3 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_212941.log.json) | +| R-50-FPN | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.1 | 17.6 | 41.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130_222144.log.json) | +| *R-50-FPN (dg=4) | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.2 | 17.4 | 41.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130_222058.log.json) | +| R-50-FPN | Faster | pytorch | - | dpool | 1x | 5.0 | 17.2 | 38.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307_203250.log.json) | +| R-50-FPN | Faster | pytorch | - | mdpool | 1x | 5.8 | 16.6 | 38.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307_203304.log.json) | +| R-101-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 12.5 | 42.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_230019.log.json) | +| X-101-32x4d-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 7.3 | 10.0 | 44.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203_001325.log.json) | +| R-50-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 15.4 | 41.8 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203_061339.log.json) | +| R-50-FPN | Mask | pytorch | mdconv(c3-c5) | - | 1x | 4.5 | 15.1 | 41.5 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203_063443.log.json) | +| R-101-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 6.5 | 11.7 | 43.5 | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216_191601.log.json) | +| R-50-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 14.6 | 43.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_220843.log.json) | +| R-101-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 6.4 | 11.0 | 45.0 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_224829.log.json) | +| R-50-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 10.0 | 44.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202_010309.log.json) | +| R-101-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 8.0 | 8.6 | 45.8 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204_134006.log.json) | +| X-101-32x4d-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 9.2 | | 47.3 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-20200606_183737.log.json) | + +**Notes:** + +- `dconv` and `mdconv` denote (modulated) deformable convolution, `c3-c5` means adding dconv in resnet stage 3 to 5. `dpool` and `mdpool` denote (modulated) deformable roi pooling. +- The dcn ops are modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch, which should be more memory efficient and slightly faster. +- (*) For R-50-FPN (dg=4), dg is short for deformable_group. This model is trained and tested on Amazon EC2 p3dn.24xlarge instance. +- **Memory, Train/Inf time is outdated.** diff --git a/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..081b998f6f54d3d805dbab38b26750a378c0d93f --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3b3683af235f46df36d8793e52c2b9c52e0defeb --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..daaa4729c8280107b19107607ec399230713cf93 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a01df33c94e1f8b5f51a51a780b30a77ce99b2c0 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aa664bd61c78873a74af229caa8f62feca8daa5e --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f5fee7e13cdfd531bf24d7c261e843855124f762 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8787088f27a09a3f8fd0d05a1144c0abdedd0a21 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b695f0e19049dc91b7656d7684df151896b7727 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='DeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d1bcf3c102fb660641eda2a1398db3df520caa3a --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ab89c261f970e16a9c4407620bd16a0df9e9e9 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=4, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad7b0346a63dfa3c3ca246b624155fc4fd331a3f --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='ModulatedDeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/thirdparty/mmdetection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8357766f50ff638f13ca56bd79d1b1c64e96f3dd --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,15 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cb340022ea27f563b8c4a570cf89b5f09e6434cd --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ababe58dc3fdfbbc6c366f48271db31bf6e2e9e2 --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ca2a67cde62bff078b7c4c0d696a585265e4c3a --- /dev/null +++ b/thirdparty/mmdetection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/deepfashion/README.md b/thirdparty/mmdetection/configs/deepfashion/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c087575b73a1f0efcb0b29c0102aec8fb1ded127 --- /dev/null +++ b/thirdparty/mmdetection/configs/deepfashion/README.md @@ -0,0 +1,43 @@ +# DeepFashion + +MMFashion(https://github.com/open-mmlab/mmfashion) develops "fashion parsing and segmentation" module +based on the dataset +[DeepFashion-Inshop](https://drive.google.com/drive/folders/0B7EVK8r0v71pVDZFQXRsMDZCX1E?usp=sharing). +Its annotation follows COCO style. +To use it, you need to first download the data. Note that we only use "img_highres" in this task. +The file tree should be like this: + +```sh +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── DeepFashion +│ │ ├── In-shop +│ │ ├── Anno +│ │ │   ├── segmentation +│ │ │   | ├── DeepFashion_segmentation_train.json +│ │ │   | ├── DeepFashion_segmentation_query.json +│ │ │   | ├── DeepFashion_segmentation_gallery.json +│ │ │   ├── list_bbox_inshop.txt +│ │ │   ├── list_description_inshop.json +│ │ │   ├── list_item_inshop.txt +│ │ │   └── list_landmarks_inshop.txt +│ │ ├── Eval +│ │ │ └── list_eval_partition.txt +│ │ ├── Img +│ │ │ ├── img +│ │ │ │ ├──XXX.jpg +│ │ │ ├── img_highres +│ │ │ └── ├──XXX.jpg + +``` + +After that you can train the Mask RCNN r50 on DeepFashion-In-shop dataset by launching training with the `mask_rcnn_r50_fpn_1x.py` config +or creating your own config file. + +## Model Zoo +| Backbone | Model type | Dataset | bbox detection Average Precision | segmentation Average Precision | Config | Download (Google) | +| :---------: | :----------: | :-----------------: | :--------------------------------: | :----------------------------: | :---------:| :-------------------------: | +| ResNet50 | Mask RCNN | DeepFashion-In-shop | 0.599 | 0.584 |[config](https://github.com/open-mmlab/mmdetection/blob/master/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py)| [model](https://drive.google.com/open?id=1q6zF7J6Gb-FFgM87oIORIt6uBozaXp5r) | [log](https://drive.google.com/file/d/1qTK4Dr4FFLa9fkdI6UVko408gkrfTRLP/view?usp=sharing) | diff --git a/thirdparty/mmdetection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py b/thirdparty/mmdetection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..72e1afce8097f20364622f99b285bf6ee2321f06 --- /dev/null +++ b/thirdparty/mmdetection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py @@ -0,0 +1,10 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/deepfashion.py', '../_base_/schedules/schedule_1x.py', + '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=15), mask_head=dict(num_classes=15))) +# runtime settings +total_epochs = 15 diff --git a/thirdparty/mmdetection/configs/detectors/README.md b/thirdparty/mmdetection/configs/detectors/README.md new file mode 100644 index 0000000000000000000000000000000000000000..103c1cb4631c77ab43671606348f9b70cd59f31b --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/README.md @@ -0,0 +1,37 @@ +# DetectoRS + +## Introduction + +We provide the config files for [DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution](https://arxiv.org/pdf/2006.02334.pdf). + +```BibTeX +@article{qiao2020detectors, + title={DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution}, + author={Qiao, Siyuan and Chen, Liang-Chieh and Yuille, Alan}, + journal={arXiv preprint arXiv:2006.02334}, + year={2020} +} +``` + +## Results and Models + +DetectoRS includes two major components: + +- Recursive Feature Pyramid (RFP). +- Switchable Atrous Convolution (SAC). + +They can be used independently. +Combining them together results in DetectoRS. +The results on COCO 2017 val are shown in the below table. + +| Method | Detector | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| RFP | Cascade + ResNet-50 | 1x | 7.5 | - | 44.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco_20200624_104126.log.json) | +| SAC | Cascade + ResNet-50 | 1x | 5.6 | - | 45.0| | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco_20200624_104402.log.json) | +| DetectoRS | Cascade + ResNet-50 | 1x | 9.9 | - | 47.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco_20200706_001203.log.json) | +| RFP | HTC + ResNet-50 | 1x | 11.2 | - | 46.6 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_rfp_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco_20200624_103053.log.json) | +| SAC | HTC + ResNet-50 | 1x | 9.3 | - | 46.4 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_sac_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco_20200624_103111.log.json) | +| DetectoRS | HTC + ResNet-50 | 1x | 13.6 | - | 49.1 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r50_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco_20200624_103659.log.json) | + +*Note*: This is a re-implementation based on MMDetection-V2. +The original implementation is based on MMDetection-V1. diff --git a/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py b/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4430d8a677e48f84552eb23403bc874c56bda506 --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py b/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ccd9319b2d1badebf3b891c8e3bdd55a435a4b7c --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py b/thirdparty/mmdetection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f76040434f1ff07608c83202f779dfacfe91c323 --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/thirdparty/mmdetection/configs/detectors/detectors_htc_r50_1x_coco.py b/thirdparty/mmdetection/configs/detectors/detectors_htc_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d2fc4f77fcca715c1dfb613306d214b636aa0c0 --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/detectors_htc_r50_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/thirdparty/mmdetection/configs/detectors/htc_r50_rfp_1x_coco.py b/thirdparty/mmdetection/configs/detectors/htc_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..496104e12550a1985f9c9e3748a343f69d7df6d8 --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/htc_r50_rfp_1x_coco.py @@ -0,0 +1,24 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/thirdparty/mmdetection/configs/detectors/htc_r50_sac_1x_coco.py b/thirdparty/mmdetection/configs/detectors/htc_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..72d4db963ffd95851b945911b3db9941426583ab --- /dev/null +++ b/thirdparty/mmdetection/configs/detectors/htc_r50_sac_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/detr/README.md b/thirdparty/mmdetection/configs/detr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ef85706174bcf15989577c4a484e3095e933f2df --- /dev/null +++ b/thirdparty/mmdetection/configs/detr/README.md @@ -0,0 +1,25 @@ +# DETR + +## Introduction + +We provide the config files for DETR: [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872). + +```BibTeX +@inproceedings{detr, + author = {Nicolas Carion and + Francisco Massa and + Gabriel Synnaeve and + Nicolas Usunier and + Alexander Kirillov and + Sergey Zagoruyko}, + title = {End-to-End Object Detection with Transformers}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Model | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | DETR |150e || | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detr/detr_r50_8x4_150e_coco.py) | [model]() | [log]() | diff --git a/thirdparty/mmdetection/configs/detr/detr_r50_8x4_150e_coco.py b/thirdparty/mmdetection/configs/detr/detr_r50_8x4_150e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4fa060efc21c8036bf7d888d8b60496d3f3b6a5c --- /dev/null +++ b/thirdparty/mmdetection/configs/detr/detr_r50_8x4_150e_coco.py @@ -0,0 +1,129 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DETR', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch'), + bbox_head=dict( + type='TransformerHead', + num_classes=80, + in_channels=2048, + num_fcs=2, + transformer=dict( + type='Transformer', + embed_dims=256, + num_heads=8, + num_encoder_layers=6, + num_decoder_layers=6, + feedforward_channels=2048, + dropout=0.1, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2, + pre_norm=False, + return_intermediate_dec=True), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='HungarianAssigner', cls_weight=1., bbox_weight=5., + iou_weight=2.)) +test_cfg = dict(max_per_img=100) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[100]) +total_epochs = 150 diff --git a/thirdparty/mmdetection/configs/double_heads/README.md b/thirdparty/mmdetection/configs/double_heads/README.md new file mode 100644 index 0000000000000000000000000000000000000000..049dad5dca88d86768c3a4031c49ce2d57157f84 --- /dev/null +++ b/thirdparty/mmdetection/configs/double_heads/README.md @@ -0,0 +1,19 @@ +# Rethinking Classification and Localization for Object Detection + +## Introduction +``` +@article{wu2019rethinking, + title={Rethinking Classification and Localization for Object Detection}, + author={Yue Wu and Yinpeng Chen and Lu Yuan and Zicheng Liu and Lijuan Wang and Hongzhi Li and Yun Fu}, + year={2019}, + eprint={1904.06493}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 6.8 | 9.5 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130_220238.log.json) | diff --git a/thirdparty/mmdetection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8118b4b633c78120c370f877f47e951c2fdb38 --- /dev/null +++ b/thirdparty/mmdetection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) diff --git a/thirdparty/mmdetection/configs/dynamic_rcnn/README.md b/thirdparty/mmdetection/configs/dynamic_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d3237747ca02dc97f8def5a09f6d73c8bf753230 --- /dev/null +++ b/thirdparty/mmdetection/configs/dynamic_rcnn/README.md @@ -0,0 +1,18 @@ +# Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training + +## Introduction + +``` +@article{DynamicRCNN, + author = {Hongkai Zhang and Hong Chang and Bingpeng Ma and Naiyan Wang and Xilin Chen}, + title = {Dynamic {R-CNN}: Towards High Quality Object Detection via Dynamic Training}, + journal = {arXiv preprint arXiv:2004.06002}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.8 | | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x_20200618_095048.log.json) | diff --git a/thirdparty/mmdetection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x.py b/thirdparty/mmdetection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..60f9c5043a6d8e7da0c6038aca868ad7e966c534 --- /dev/null +++ b/thirdparty/mmdetection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x.py @@ -0,0 +1,28 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DynamicRoIHead', + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +train_cfg = dict( + rpn_proposal=dict(nms_thr=0.85), + rcnn=dict( + dynamic_rcnn=dict( + iou_topk=75, + beta_topk=10, + update_iter_interval=100, + initial_iou=0.4, + initial_beta=1.0))) +test_cfg = dict(rpn=dict(nms_thr=0.85)) diff --git a/thirdparty/mmdetection/configs/empirical_attention/README.md b/thirdparty/mmdetection/configs/empirical_attention/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e737ea005304a3d942fe8935061b08134eef2aa --- /dev/null +++ b/thirdparty/mmdetection/configs/empirical_attention/README.md @@ -0,0 +1,22 @@ +# An Empirical Study of Spatial Attention Mechanisms in Deep Networks + +## Introduction + +``` +@article{zhu2019empirical, + title={An Empirical Study of Spatial Attention Mechanisms in Deep Networks}, + author={Zhu, Xizhou and Cheng, Dazhi and Zhang, Zheng and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1904.05873}, + year={2019} +} +``` + + +## Results and Models + +| Backbone | Attention Component | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------------:|:----:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 1111 | N | 1x | 8.0 | 13.8 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130_210344.log.json) | +| R-50 | 0010 | N | 1x | 4.2 | 18.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130_210125.log.json) | +| R-50 | 1111 | Y | 1x | 8.0 | 12.7 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130_204442.log.json) | +| R-50 | 0010 | Y | 1x | 4.2 | 17.1 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130_210410.log.json) | diff --git a/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a544e3ab636aea0efe56007a0ea40608b6e71ad4 --- /dev/null +++ b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bbefd27aa02f427e27068b37ecf4d30fbd49b519 --- /dev/null +++ b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..13a4645bfdb50d5a2f04cee49ecc5f7647d10acf --- /dev/null +++ b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1f26c081da27811f856fe9973eb444c82604727 --- /dev/null +++ b/thirdparty/mmdetection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/README.md b/thirdparty/mmdetection/configs/fast_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b01c4b5956d7beb18a4ebbdfd3845d7156dce63d --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/README.md @@ -0,0 +1,13 @@ +# Fast R-CNN + +## Introduction +``` +@inproceedings{girshick2015fast, + title={Fast r-cnn}, + author={Girshick, Ross}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2015} +} +``` + +## Results and models diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6db24b1e8aa26de5b153f4adcc8ae8dbd885186b --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fast_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a76b3997fbbed5883adde2122dc17ee2262fa80 --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c9d5b4bef7cf527dc9af1856b6773fc061bda2a7 --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fast_rcnn_r50_fpn_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..178deb6036e365815944620bce335aaf1233d3af --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(type='BN', requires_grad=False), style='caffe')) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f080e9d3b1ddade22341aa38c6258eaee78a50 --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/fast_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..03a87c70454d3a2b2f19762f0ca78c15220f8b5b --- /dev/null +++ b/thirdparty/mmdetection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/faster_rcnn/README.md b/thirdparty/mmdetection/configs/faster_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a331ccd3bf4b2590958e47b08ff8dc2d38534ca9 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/README.md @@ -0,0 +1,49 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Introduction +``` +@article{Ren_2017, + title={Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + year={2017}, + month={Jun}, +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-DC5 | caffe | 1x | - | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909.log.json) | +| R-50-FPN | caffe | 1x | 3.8 | | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_20200504_180032.log.json) | +| R-50-FPN | pytorch | 1x | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_20200504_210434.log.json) | +| R-101-FPN | caffe | 1x | 5.7 | | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_20200504_180057.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 15.6 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130_204655.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_20200504_210455.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 13.8 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203_000520.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_20200506_041400.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 9.4 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204_134340.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033.log.json) | + +## Different regression loss +We trained with R-50-FPN pytorch style backbone for 1x schedule. + +| Backbone | Loss type | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-------: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | L1Loss | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | IoULoss | | | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco_20200506_095954.log.json) | +| R-50-FPN | GIoULoss | | | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco_20200505_161120.log.json) | +| R-50-FPN | BoundedIoULoss | | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco_20200505_160738.log.json) | + +## Pre-trained Models +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | caffe | 1x | - | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851.log.json) +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | caffe | 3x | - | | 38.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107.log.json) +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | caffe | 2x | 4.3 | | 39.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_20200504_231813.log.json) +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 4.3 | | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20200504_163323.log.json) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95c7238fcf38a274900599dae6c804829bb600ab --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d2edab113649c38cac3c7dc3ff425462f7c40ffd --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9367a3c83aeb1e05f38f4db9fb0110e731dd859c --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..92344a151be9af53659845b51e4ece7f0a7b636f --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee2010c64a4c24e18b81c0be7e002ea474c57a44 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..14eaef2dffea606027001b69d12d11cb46693e1c --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c10a8d4f9ad7231ead68c987200cf79db33e539c --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..762c72be00b94445897adb8b49420628fec9c33b --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4b87b2ce58b2efc2461046df897038fdd5128cee --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ef34b92683bd58c9527cc560811e793cdd4bc428 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d95ed61c4bcbba59a93cc46cabf14b4c0b9fa11 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car.py new file mode 100644 index 0000000000000000000000000000000000000000..7f40dd8da2b57565642b2b2ea8d03520d2ed81dc --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=3))) +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +# TODO: Update model url after bumping to V2.0 +load_from = 'https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/faster_rcnn_r50_fpn_1x_20181010-3d1b3351.pth' # noqa diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person.py new file mode 100644 index 0000000000000000000000000000000000000000..cab20dedde1ed6c8b978b989dc26f48971194792 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=1))) +classes = ('person', ) +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..009bd93d06b3284c7b31f33f82d636f774e86b74 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e77a7fa8d6b8c1ad7fe293bc932d621464287e0c --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..648081f19ca7d3ca9a7362a4a41e514d753ce4e8 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='BoundedIoULoss', loss_weight=10.0)))) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5556c4977e221182b013b68fef4b73d1b0605bf3 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)))) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf663e4f0e1525490a493674b32b3dc4c781bb2 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='IoULoss', loss_weight=10.0)))) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f79ee70dcdf24497681c57e8a22b9127b050db0f --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +train_cfg = dict(rcnn=dict(sampler=dict(type='OHEMSampler'))) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba6b017ff6269824cb960700732b6116d2a3981 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +test_cfg = dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='soft_nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c536fccc5efbc3a0c58d5bdc5df9be8579d15571 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..927609206e1323dcf1173c4a5393e3f03d534c0a --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b588b4eca3df7de341c346aa9ecd0b171194f329 --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e87d21a4e6a241f5af892eb11aa82e2c6012a31c --- /dev/null +++ b/thirdparty/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/fcos/README.md b/thirdparty/mmdetection/configs/fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d7c0d419736e205914c6e02a14da603ca7b94b12 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/README.md @@ -0,0 +1,37 @@ +# FCOS: Fully Convolutional One-Stage Object Detection + +## Introduction + +``` +@article{tian2019fcos, + title={FCOS: Fully Convolutional One-Stage Object Detection}, + author={Tian, Zhi and Shen, Chunhua and Chen, Hao and He, Tong}, + journal={arXiv preprint arXiv:1904.01355}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | GN | MS train | Tricks | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | N | N | N | N | 1x | 5.2 | 22.9 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_4x4_1x_coco/fcos_r50_caffe_fpn_1x_4gpu_20200218-c229552f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_4x4_1x_coco/20200224_230410.log.json) | +| R-50 | caffe | Y | N | N | N | 1x | 6.5 | 22.7 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco/fcos_r50_caffe_fpn_gn_1x_4gpu_20200218-7831950c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco/20200130_004230.log.json) | +| R-50 | caffe | Y | N | Y | N | 1x | - | - | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco_20200603-67b3859f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco_20200603.log.json)| +| R-50 | caffe | Y | N | Y | Y | 1x | - | - | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco_20200603-ed16da04.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco_20200603.log.json)| +| R-50 | caffe | Y | N | N | N | 2x | - | - | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco/fcos_r50_caffe_fpn_gn_2x_4gpu_20200218-8ceb5c76.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco/20200130_004232.log.json) | +| R-101 | caffe | Y | N | N | N | 1x | 10.2 | 17.3 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco/fcos_r101_caffe_fpn_gn_1x_4gpu_20200218-13e2cc55.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco/20200130_004231.log.json) | +| R-101 | caffe | Y | N | N | N | 2x | - | - | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco/fcos_r101_caffe_fpn_gn_2x_4gpu_20200218-d2261033.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco/20200130_004231.log.json) | + + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | Y | 2x | 6.5 | 22.9 | 38.7 | | | +| R-101 | caffe | Y | Y | 2x | 10.2 | 17.3 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fcos_mstrain_640_800_r101_caffe_fpn_gn_2x_4gpu_20200218-d8a4f4cf.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco/20200130_004232.log.json) | +| X-101 | pytorch | Y | Y | 2x | 10.0 | 9.3 | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco_20200229-11f8c079.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco_20200229_222104.log.json) | + +**Notes:** +- To be consistent with the author's implementation, we use 4 GPUs with 4 images/GPU for R-50 and R-101 models, and 8 GPUs with 2 image/GPU for X-101 models. +- The X-101 backbone is X-101-64x4d. +- Tricks means setting `norm_on_bbox`, `centerness_on_reg`, `center_sampling` as `True`. +- DCN means using `DCNv2` in both backbone and head. diff --git a/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d83fa17f17379067c2f3f659ac9ed37ccf8e20ee --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,51 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' + +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=False, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0))) +# training and testing settings +test_cfg = dict(nms=dict(type='nms', iou_threshold=0.6)) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67edb415c5feabe8a1eb1bfefb6a7368e3a0b2b1 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_4x4_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' + +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True)), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=True, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0))) +# training and testing settings +test_cfg = dict(nms=dict(type='nms', iou_threshold=0.6)) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/thirdparty/mmdetection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..42b030b636cb670a7acd68ddf836e8db59428f16 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict(bbox_head=dict(center_sampling=True, center_sample_radius=1.5)) diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1bab973547ed59c36ab14e493f171cca1492e613 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6632b0c9991468cf0ac99408e8d56050e37b2cf1 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = ['./fcos_r50_caffe_fpn_gn-head_4x4_2x_coco.py'] +model = dict( + pretrained='open-mmlab://detectron/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..472f7269e46d8f3730b09db5443420ac971058b4 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,44 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron/resnet101_caffe', + backbone=dict(depth=101)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4697e9e7efc86771b6dfc6dabd36b8e2b1788b09 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_4x4_1x_coco.py @@ -0,0 +1,106 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FCOS', + pretrained='open-mmlab://detectron/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + extra_convs_on_inputs=False, # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=None, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='constant', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[8, 11]) +total_epochs = 12 diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b0bcad9e101e4a661f8995d7aba54ef86517ba59 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FCOS', + pretrained='open-mmlab://detectron/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + extra_convs_on_inputs=False, # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='constant', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[8, 11]) +total_epochs = 12 diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3a3ccc149b9458bec0e133692e771473d6cd0c18 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5983c00f9a005779d71dac9ee84e590e2ee16ec7 --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco.py b/thirdparty/mmdetection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dc27edd6084d867f4b7bb048cd87492fd6d7ed3c --- /dev/null +++ b/thirdparty/mmdetection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_4x2_2x_coco.py @@ -0,0 +1,59 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/foveabox/README.md b/thirdparty/mmdetection/configs/foveabox/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4ea751e930b3bdca48dea1d016237528e245bae5 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/README.md @@ -0,0 +1,36 @@ +# FoveaBox: Beyond Anchor-based Object Detector + +FoveaBox is an accurate, flexible and completely anchor-free object detection system for object detection framework, as presented in our paper [https://arxiv.org/abs/1904.03797](https://arxiv.org/abs/1904.03797): +Different from previous anchor-based methods, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. + +## Main Results +### Results on R50/101-FPN + +| Backbone | Style | align | ms-train| Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | 5.6 | 24.1 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219_223025.log.json) | +| R-50 | pytorch | N | N | 2x | 5.6 | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203_112043.log.json) | +| R-50 | pytorch | Y | N | 2x | 8.1 | 19.4 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203_134252.log.json) | +| R-50 | pytorch | Y | Y | 2x | 8.1 | 18.3 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205_112557.log.json) | +| R-101 | pytorch | N | N | 1x | 9.2 | 17.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219_011740.log.json) | +| R-101 | pytorch | N | N | 2x | 11.7 | - | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208_202059.log.json) | +| R-101 | pytorch | Y | N | 2x | 11.7 | 14.7 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208_203337.log.json) | +| R-101 | pytorch | Y | Y | 2x | 11.7 | 14.7 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208_202124.log.json) | + +[1] *1x and 2x mean the model is trained for 12 and 24 epochs, respectively.* \ +[2] *Align means utilizing deformable convolution to align the cls branch.* \ +[3] *All results are obtained with a single model and without any test time data augmentation.*\ +[4] *We use 4 GPUs for training.* + +Any pull requests or issues are welcome. + +## Citations +Please consider citing our paper in your publications if the project helps your research. BibTeX reference is as follows. +``` +@article{kong2019foveabox, + title={FoveaBox: Beyond Anchor-based Object Detector}, + author={Kong, Tao and Sun, Fuchun and Liu, Huaping and Jiang, Yuning and Shi, Jianbo}, + journal={arXiv preprint arXiv:1904.03797}, + year={2019} +} +``` diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49a99af2b1ce205c70df26b877345b9fccbbdd16 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3f35dd6d5c207c66ebb0514035290eb05818c1a2 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,27 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..47cf1125fcca6e0b06774377ea10a62c864a13ca --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e80310eab6bbaf0b716f3961408e6586ae2d41d2 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,25 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..907bede158c7043d2a3b0d9daf64a0b6a13bc83c --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..92963935466ab2db968a8f241420c9795ab2b1b0 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fovea_r50_fpn_4x4_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4b62c81212e77fedc8581a855077f9b541ff67a2 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FOVEA', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + num_outs=5, + add_extra_convs='on_input'), + bbox_head=dict( + type='FoveaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + base_edge_list=[16, 32, 64, 128, 256], + scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)), + sigma=0.4, + with_deform=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.50, + alpha=0.4, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) +# training and testing settings +train_cfg = dict() +test_cfg = dict( + nms_pre=1000, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +data = dict(samples_per_gpu=4, workers_per_gpu=4) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py b/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b4559bb3d9ee631f6e3ca38a9692ac886431a7c8 --- /dev/null +++ b/thirdparty/mmdetection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/fp16/README.md b/thirdparty/mmdetection/configs/fp16/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e8ec8721084a1b9be11fd2003c2e52c505d079fd --- /dev/null +++ b/thirdparty/mmdetection/configs/fp16/README.md @@ -0,0 +1,19 @@ +# Mixed Precision Training + +## Introduction +``` +@article{micikevicius2017mixed, + title={Mixed precision training}, + author={Micikevicius, Paulius and Narang, Sharan and Alben, Jonah and Diamos, Gregory and Elsen, Erich and Garcia, David and Ginsburg, Boris and Houston, Michael and Kuchaiev, Oleksii and Venkatesh, Ganesh and others}, + journal={arXiv preprint arXiv:1710.03740}, + year={2017} +} +``` + +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | 1x | 3.4 | 28.8 | 37.5 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204_143530.log.json) | +| Mask R-CNN | R-50 | pytorch | 1x | 3.6 | 24.1 | 38.1 | 34.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205_130539.log.json) | +| Retinanet | R-50 | pytorch | 1x | 2.8 | 31.6 | 36.4 | |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702-0dbfb212.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702_020127.log.json) | diff --git a/thirdparty/mmdetection/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py b/thirdparty/mmdetection/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78fa5b6c6a895cb04e1813462ed6a7eefd8c1fa6 --- /dev/null +++ b/thirdparty/mmdetection/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/thirdparty/mmdetection/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py b/thirdparty/mmdetection/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f506ea815fedd6faefad9a06d7f466b86e8d2622 --- /dev/null +++ b/thirdparty/mmdetection/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/thirdparty/mmdetection/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py b/thirdparty/mmdetection/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..519c4dbacb1a876dcd973f2a82ddeef98787619d --- /dev/null +++ b/thirdparty/mmdetection/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/thirdparty/mmdetection/configs/free_anchor/README.md b/thirdparty/mmdetection/configs/free_anchor/README.md new file mode 100644 index 0000000000000000000000000000000000000000..85a675e92c3148f0e855f5ffc6681c62bb7b48f1 --- /dev/null +++ b/thirdparty/mmdetection/configs/free_anchor/README.md @@ -0,0 +1,24 @@ +# FreeAnchor: Learning to Match Anchors for Visual Object Detection + +## Introduction + +``` +@inproceedings{zhang2019freeanchor, + title = {{FreeAnchor}: Learning to Match Anchors for Visual Object Detection}, + author = {Zhang, Xiaosong and Wan, Fang and Liu, Chang and Ji, Rongrong and Ye, Qixiang}, + booktitle = {Neural Information Processing Systems}, + year = {2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:--------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.9 | 18.4 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130_095625.log.json) | +| R-101 | pytorch | 1x | 6.8 | 14.9 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130_100723.log.json) | +| X-101-32x4d | pytorch | 1x | 8.1 | 11.1 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130_095627.log.json) | + +**Notes:** +- We use 8 GPUs with 2 images/GPU. +- For more settings and models, please refer to the [official repo](https://github.com/zhangxiaosong18/FreeAnchor). diff --git a/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9917d5c4dc8b9c0149a963e24ecfa1098c1a9995 --- /dev/null +++ b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..28f983c29edd071b32a50f18ac7b3f5c1bfdda88 --- /dev/null +++ b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='FreeAnchorRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.75))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e2640c07e86db2d8cc2e6654c78077df10789b4c --- /dev/null +++ b/thirdparty/mmdetection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/fsaf/README.md b/thirdparty/mmdetection/configs/fsaf/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8039bfb36ed39081208b0f3d23843489bd6da4f7 --- /dev/null +++ b/thirdparty/mmdetection/configs/fsaf/README.md @@ -0,0 +1,39 @@ +# Feature Selective Anchor-Free Module for Single-Shot Object Detection + +FSAF is an anchor-free method published in CVPR2019 ([https://arxiv.org/pdf/1903.00621.pdf](https://arxiv.org/pdf/1903.00621.pdf)). +Actually it is equivalent to the anchor-based method with only one anchor at each feature map position in each FPN level. +And this is how we implemented it. +Only the anchor-free branch is released for its better compatibility with the current framework and less computational budget. + +In the original paper, feature maps within the central 0.2-0.5 area of a gt box are tagged as ignored. However, +it is empirically found that a hard threshold (0.2-0.2) gives a further gain on the performance. (see the table below) + +## Main Results +### Results on R50/R101/X101-FPN + +| Backbone | ignore range | ms-train| Lr schd |Train Mem (GB)| Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:----------:| :-------: |:-------:|:-------:|:------------:|:---------------:|:--------------:|:-------------:|:------:|:--------:| +| R-50 | 0.2-0.5 | N | 1x | 3.15 | 0.43 | 12.3 | 36.0 (35.9) | | [model](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715-b555b0e0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715_094657.log.json) | +| R-50 | 0.2-0.2 | N | 1x | 3.15 | 0.43 | 13.0 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco_20200428_072327.log.json)| +| R-101 | 0.2-0.2 | N | 1x | 5.08 | 0.58 | 10.8 | 39.3 (37.9) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco_20200428_160348.log.json)| +| X-101 | 0.2-0.2 | N | 1x | 9.38 | 1.23 | 5.6 | 42.4 (41.0) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco_20200428_160424.log.json)| + +**Notes:** + - *1x means the model is trained for 12 epochs.* + - *AP values in the brackets represent those reported in the original paper.* + - *All results are obtained with a single model and single-scale test.* + - *X-101 backbone represents ResNext-101-64x4d.* + - *All pretrained backbones use pytorch style.* + - *All models are trained on 8 Titan-XP gpus and tested on a single gpu.* + +## Citations +BibTeX reference is as follows. +``` +@inproceedings{zhu2019feature, + title={Feature Selective Anchor-Free Module for Single-Shot Object Detection}, + author={Zhu, Chenchen and He, Yihui and Savvides, Marios}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={840--849}, + year={2019} +} +``` diff --git a/thirdparty/mmdetection/configs/fsaf/fsaf_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fsaf/fsaf_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95a7ae2de598f5c89ddf8f0f82be653aa85bd3e6 --- /dev/null +++ b/thirdparty/mmdetection/configs/fsaf/fsaf_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/fsaf/fsaf_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fsaf/fsaf_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..deb14528efc266e1850e22fb6c171c40e6f7b997 --- /dev/null +++ b/thirdparty/mmdetection/configs/fsaf/fsaf_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='FSAF', + bbox_head=dict( + type='FSAFHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + reg_decoded_bbox=True, + # Only anchor-free branch is implemented. The anchor generator only + # generates 1 anchor at each feature point, as a substitute of the + # grid of features. + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(_delete_=True, type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + _delete_=True, + type='IoULoss', + eps=1e-6, + loss_weight=1.0, + reduction='none'), + )) + +# training and testing settings +train_cfg = dict( + assigner=dict( + _delete_=True, + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=10, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b966f24969a60b95878b0b86bb8dae7b8cb3f1ae --- /dev/null +++ b/thirdparty/mmdetection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/gcnet/README.md b/thirdparty/mmdetection/configs/gcnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7c9e29c1ba948b29a3927ff5977a020fb2b14aad --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/README.md @@ -0,0 +1,56 @@ +# GCNet for Object Detection + +By [Yue Cao](http://yue-cao.me), [Jiarui Xu](http://jerryxu.net), [Stephen Lin](https://scholar.google.com/citations?user=c3PYmxUAAAAJ&hl=en), Fangyun Wei, [Han Hu](https://sites.google.com/site/hanhushomepage/). + +We provide config files to reproduce the results in the paper for +["GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond"](https://arxiv.org/abs/1904.11492) on COCO object detection. + +## Introduction + +**GCNet** is initially described in [arxiv](https://arxiv.org/abs/1904.11492). Via absorbing advantages of Non-Local Networks (NLNet) and Squeeze-Excitation Networks (SENet), GCNet provides a simple, fast and effective approach for global context modeling, which generally outperforms both NLNet and SENet on major benchmarks for various recognition tasks. + +## Citing GCNet + +``` +@article{cao2019GCNet, + title={GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond}, + author={Cao, Yue and Xu, Jiarui and Lin, Stephen and Wei, Fangyun and Hu, Han}, + journal={arXiv preprint arXiv:1904.11492}, + year={2019} +} +``` + +## Results and models +The results on COCO 2017val are shown in the below table. + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | | 39.7 | 35.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.0 | 39.9 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204_024626.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 11.4 | 41.3 | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205_192835.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.6 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206_112128.log.json) | + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :-------: | +| R-50-FPN | Mask | - | 1x | 4.4 | 16.6 | 38.4 | 34.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202_214122.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | 15.5 | 40.4 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202_174907.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.1 | 40.7 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-101-FPN | Mask | - | 1x | 6.4 | 13.3 | 40.5 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210_220422.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 12.0 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207_015330.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.8 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| X-101-FPN | Mask | - | 1x | 7.6 | 11.3 | 42.4 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211_054326.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r16) | 1x | 8.8 | 9.8 | 43.5 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_164715.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r4) | 1x | 9.0 | 9.7 | 43.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212_070942.log.json) | +| X-101-FPN | Cascade Mask | - | 1x | 9.2 | 8.4 | 44.7 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310_115217.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r16) | 1x | 10.3 | 7.7 | 46.2 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_184154.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r4) | 1x | 10.6 | | 46.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653.log.json) | +| X-101-FPN | DCN Cascade Mask | - | 1x | | | 44.9 | 38.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20200516_182249-680fc3f2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20200516_182249.log.json)| +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r16) | 1x | | | 44.6 | |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20200516_015634-08f56b56.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20200516_015634.log.json) | +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r4) | 1x | | | 45.7 | 39.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20200518_041145-24cabcfd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20200518_041145.log.json) | + +**Notes:** + +- The `SyncBN` is added in the backbone for all models in **Table 2**. +- `GC` denotes Global Context (GC) block is inserted after 1x1 conv of backbone. +- `DCN` denotes replace 3x3 conv with 3x3 Deformable Convolution in `c3-c5` stages of backbone. +- `r4` and `r16` denote ratio 4 and ratio 16 in GC block respectively. diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5118895f00345a42fdbc6d2edba084ccd3f1a3c8 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..464aef787de3c932dc3244a93e62cc3df83002ec --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fa4b6f12f36be74c6e1f7182db110893f9f4f0c4 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b76e3e6bab7a32e95aec352829324b8865e63631 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50883ffeb16369ea6210f2ece8fc2d7e084b0134 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31fdd070595ac0512a39075bb045dd18035d3f14 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad6ad47696e6aeb2b3505abab0bd2d49d3b7aa83 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f91674c6d54bfa6fdcfcb5b7e2ec2a2bbf81fa --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6e1c5d0cadfb9fb3a4f8645e28a8e67fc499e900 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..781dba78d68e77fa7eee15f5bbcc539731f8378d --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..32972de857b3c4f43170dcd3e7fbce76425f094d --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d299b69f576a2547de1f7d9edd171d56ab002d0a --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ac908e60c1f964bdd6c3e61933a37c04d487bfb --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0308a567c147413688c9da679d06f93b0e154d88 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e04780c50f96929997c279b23fe5fa427657039b --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..980f8191d4c07eb35e338bd87e3b73b06b3214ad --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f0c96e58b6131f2958f28c56b9d8384d5b4746f7 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7fb8e82ece225ab6f88f1f4f83bea56a42cf1a57 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1ddbee3b4b79e79bb2a3faf30604f2465612728 --- /dev/null +++ b/thirdparty/mmdetection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/thirdparty/mmdetection/configs/gfl/README.md b/thirdparty/mmdetection/configs/gfl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b379f6850ed2d9b50d40de5f321ad39af8244e20 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/README.md @@ -0,0 +1,32 @@ +# Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection + + +## Introduction + +We provide config files to reproduce the object detection results in the paper [Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection](https://arxiv.org/abs/2006.04388) + +``` +@article{li2020generalized, + title={Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection}, + author={Li, Xiang and Wang, Wenhai and Wu, Lijun and Chen, Shuo and Hu, Xiaolin and Li, Jun and Tang, Jinhui and Yang, Jian}, + journal={arXiv preprint arXiv:2006.04388}, + year={2020} +} +``` + + +## Results and Models + +| Backbone | Style | Lr schd | Multi-scale Training| Inf time (fps) | box AP | Config | Download | +|:-----------------:|:-------:|:-------:|:-------------------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | No | 19.5 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244.log.json) | +| R-50 | pytorch | 2x | Yes | 19.5 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802.log.json) | +| R-101 | pytorch | 2x | Yes | 14.7 | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126.log.json) | +| R-101-dcnv2 | pytorch | 2x | Yes | 12.9 | 47.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d | pytorch | 2x | Yes | 12.1 | 45.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d-dcnv2 | pytorch | 2x | Yes | 10.7 | 48.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002.log.json) | + +[1] *1x and 2x mean the model is trained for 90K and 180K iterations, respectively.* \ +[2] *All results are obtained with a single model and without any test time data augmentation such as multi-scale, flipping and etc..* \ +[3] *`dcnv2` denotes deformable convolutional networks v2.* \ +[4] *FPS is tested with a single GeForce RTX 2080Ti GPU, using a batch size of 1.* diff --git a/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eab622b2e8bdc03c717b9b04d043da46f25a7cb3 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c972d0c159676a81d997e033e4db0a2a6d9b87e2 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,12 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..77a15ebce3761fe435dcb3c2bc97dd1300ba6633 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='GFL', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='GFLHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0))) +# training and testing settings +train_cfg = dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bebfee9f8fdebb8da3bf791a65b0dab8de3fb582 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,22 @@ +_base_ = './gfl_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a2370e234dfec0099aaf74c46a3a85052d882385 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, False, True, True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e00a059f8d2e58d23d6b77764456be351bd3115 --- /dev/null +++ b/thirdparty/mmdetection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/ghm/README.md b/thirdparty/mmdetection/configs/ghm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9f8d702ceea2217eb67685d153d434b1080caea2 --- /dev/null +++ b/thirdparty/mmdetection/configs/ghm/README.md @@ -0,0 +1,21 @@ +# Gradient Harmonized Single-stage Detector + +## Introduction + +``` +@inproceedings{li2019gradient, + title={Gradient Harmonized Single-stage Detector}, + author={Li, Buyu and Liu, Yu and Wang, Xiaogang}, + booktitle={AAAI Conference on Artificial Intelligence}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 4.0 | 3.3 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130_004213.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 4.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130_145259.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 5.1 | 40.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131_113653.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 5.2 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131_113723.log.json) | diff --git a/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..18f899a9b456383a8f74053e4716aee50ee5ec8c --- /dev/null +++ b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..61b9751057f10f2173b8e7edde12cca53ebbd2d0 --- /dev/null +++ b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py @@ -0,0 +1,19 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + loss_cls=dict( + _delete_=True, + type='GHMC', + bins=30, + momentum=0.75, + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + _delete_=True, + type='GHMR', + mu=0.02, + bins=10, + momentum=0.7, + loss_weight=10.0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a89fc1389ce0f1f9712b4b5d684e632aaee25ce8 --- /dev/null +++ b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..88013f5ffa2334fe3eccd30616a0b033c258ad87 --- /dev/null +++ b/thirdparty/mmdetection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/gn+ws/README.md b/thirdparty/mmdetection/configs/gn+ws/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1487ae86d70e4e904e9cac5ea1930b1fa6ae00e3 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/README.md @@ -0,0 +1,42 @@ +# Weight Standardization + +## Introduction + +``` +@article{weightstandardization, + author = {Siyuan Qiao and Huiyu Wang and Chenxi Liu and Wei Shen and Alan Yuille}, + title = {Weight Standardization}, + journal = {arXiv preprint arXiv:1903.10520}, + year = {2019}, +} +``` + +## Results and Models + +Faster R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 1x | 5.9 | 11.7 | 39.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130_210936.log.json) | +| R-101-FPN | pytorch | GN+WS | 1x | 8.9 | 9.0 | 41.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205_232146.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 1x | 7.0 | 10.3 | 40.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203_220113.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 1x | 10.8 | 7.6 | 42.1 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212_195302.log.json) | + +Mask R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:---------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 2x | 7.3 | 10.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226_062128.log.json) | +| R-101-FPN | pytorch | GN+WS | 2x | 10.3 | 8.6 | 42.0 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212_213627.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 2x | 8.4 | 9.3 | 41.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216_201500.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 2x | 12.2 | 7.1 | 42.1 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319_104101.log.json) | +| R-50-FPN | pytorch | GN+WS | 20-23-24e | 7.3 | - | 41.1 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213_035123.log.json) | +| R-101-FPN | pytorch | GN+WS | 20-23-24e | 10.3 | - | 43.1 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213_130142.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 8.4 | - | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226_093732.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 12.2 | - | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316_013741.log.json) | + +Note: + +- GN+WS requires about 5% more memory than GN, and it is only 5% slower than GN. +- In the paper, a 20-23-24e lr schedule is used instead of 2x. +- The X-50-GN and X-101-GN pretrained models are also shared by the authors. diff --git a/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a5f6bd2292f4c1dfbd59de968e0dc3acf7579424 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +model = dict( + pretrained='open-mmlab://jhu/resnet101_gn_ws', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..497267b6b50b3c160a4f8807230d4f986cf8eb3f --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnet50_gn_ws', + backbone=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg))) diff --git a/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..061ca6993606fe2c7bdb020eaf3b5ea8b91a9b8e --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnext101_32x4d_gn_ws', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) diff --git a/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1268980615b69009a33b785eeb59322372633d10 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnext50_32x4d_gn_ws', + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0912329cbe7c8da1b100945c978a274d60254aaa --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4be68176d2ed6f9b209823187f1367d204fe67d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +model = dict( + pretrained='open-mmlab://jhu/resnet101_gn_ws', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..938910482f764e5a7ad31c29e9db9e29d65c2db7 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2032b932b1da461180ca9be08c56b5cd66d25873 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnet50_gn_ws', + backbone=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg), + mask_head=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d3084e5cad5e0e909c18a2738e9cfd4e9586a48b --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe88770ae5dffbed5229ed4a4e62f10b1c8d12b --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnext101_32x4d_gn_ws', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..68792e16c9e3533cb2e0e4d02c6eb049f0f72ed2 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9bbc86ead7003ab75264f8cf0cd18edb735fe9fd --- /dev/null +++ b/thirdparty/mmdetection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://jhu/resnext50_32x4d_gn_ws', + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) diff --git a/thirdparty/mmdetection/configs/gn/README.md b/thirdparty/mmdetection/configs/gn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..205892afcd7ab5ad48c60dc23953a06e09d17a0a --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/README.md @@ -0,0 +1,28 @@ +# Group Normalization + +## Introduction + +``` +@inproceedings{wu2018group, + title={Group Normalization}, + author={Wu, Yuxin and He, Kaiming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2018} +} +``` + +## Results and Models + +| Backbone | model | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN (d) | Mask R-CNN | 2x | 7.1 | 11.0 | 40.2 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206_050355.log.json) | +| R-50-FPN (d) | Mask R-CNN | 3x | 7.1 | - | 40.5 | 36.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214_063512.log.json) | +| R-101-FPN (d) | Mask R-CNN | 2x | 9.9 | 9.0 | 41.9 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205_234402.log.json) | +| R-101-FPN (d) | Mask R-CNN | 3x | 9.9 | | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609.log.json) | +| R-50-FPN (c) | Mask R-CNN | 2x | 7.1 | 10.9 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207_225832.log.json) | +| R-50-FPN (c) | Mask R-CNN | 3x | 7.1 | - | 40.1 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225_235135.log.json) | + +**Notes:** +- (d) means pretrained model converted from Detectron, and (c) means the contributed model pretrained by [@thangvubk](https://github.com/thangvubk). +- The `3x` schedule is epoch [28, 34, 36]. +- **Memory, Train/Inf time is outdated.** diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0fcc558018b69beedbd05781163c8043d93f7277 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' +model = dict( + pretrained='open-mmlab://detectron/resnet101_gn', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67890c2a154e0e5c82bfeacd1d7355878bcdf19b --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r101_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7cede4147a32d374ca8d048513493429410f699c --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py @@ -0,0 +1,46 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://detectron/resnet50_gn', + backbone=dict(norm_cfg=norm_cfg), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c0b0013829909ea7b3b68415fd89f35037eb77a8 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3c690aecb9662b9e433200e4cd1e1ad3c330f3d9 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='open-mmlab://contrib/resnet50_gn', + backbone=dict(norm_cfg=norm_cfg), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6174861dfa53a5b3465d7e777a5a54b684077788 --- /dev/null +++ b/thirdparty/mmdetection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/grid_rcnn/README.md b/thirdparty/mmdetection/configs/grid_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d6f4966511a50840adb66bc951e5e5f09789c2e7 --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/README.md @@ -0,0 +1,32 @@ +# Grid R-CNN + +## Introduction + +``` +@inproceedings{lu2019grid, + title={Grid r-cnn}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{lu2019grid, + title={Grid R-CNN Plus: Faster and Better}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + journal={arXiv preprint arXiv:1906.05688}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 2x | 5.1 | 15.0 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130_221140.log.json) | +| R-101 | 2x | 7.0 | 12.6 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309_164224.log.json) | +| X-101-32x4d | 2x | 8.3 | 10.8 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130_215413.log.json) | +| X-101-64x4d | 2x | 11.3 | 7.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204_080641.log.json) | + +**Notes:** +- All models are trained with 8 GPUs instead of 32 GPUs in the original paper. +- The warming up lasts for 1 epoch and `2x` here indicates 25 epochs. diff --git a/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cf8b648a4291db4a172bf031f301110963f38dd6 --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' + +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc3e3ef594243be1335aa3b3d2f78f50f4477082 --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = ['../grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py'] +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +checkpoint_config = dict(interval=1) +# runtime settings +total_epochs = 12 diff --git a/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b40e039c1e8fd584908794755385e62416dd38f --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py @@ -0,0 +1,135 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='GridRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='GridRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + with_reg=False, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False), + grid_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + grid_head=dict( + type='GridHead', + grid_points=9, + num_convs=8, + in_channels=256, + point_feat_channels=64, + norm_cfg=dict(type='GN', num_groups=36), + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=15)))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_radius=1, + pos_weight=-1, + max_num_grid=192, + debug=False)) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.03, + nms=dict(type='nms', iou_threshold=0.3), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +total_epochs = 25 diff --git a/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..515bbdf0aa8840c4bec273d1753f34faecf903c5 --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,23 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch')) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +total_epochs = 25 diff --git a/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2fdc53c8c04c12bed16a31281127f9774bb70b64 --- /dev/null +++ b/thirdparty/mmdetection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,12 @@ +_base_ = './grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/groie/README.md b/thirdparty/mmdetection/configs/groie/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9ccb91285999d59c223594d4cb84fb0b339f58e8 --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/README.md @@ -0,0 +1,64 @@ +# GRoIE + +## A novel Region of Interest Extraction Layer for Instance Segmentation + +By Leonardo Rossi, Akbar Karimi and Andrea Prati from +[IMPLab](http://implab.ce.unipr.it/). + +We provide configs to reproduce the results in the paper for +"*A novel Region of Interest Extraction Layer for Instance Segmentation*" +on COCO object detection. + +## Introduction + +This paper is motivated by the need to overcome to the limitations of existing +RoI extractors which select only one (the best) layer from FPN. + +Our intuition is that all the layers of FPN retain useful information. + +Therefore, the proposed layer (called Generic RoI Extractor - **GRoIE**) +introduces non-local building blocks and attention mechanisms to boost the +performance. + +## Results and models + +The results on COCO 2017 minival (5k images) are shown in the below table. +You can find +[here](https://drive.google.com/drive/folders/19ssstbq_h0Z1cgxHmJYFO8s1arf3QJbT) +the trained models. + +### Application of GRoIE to different architectures + +| Backbone | Method | Lr schd | box AP | mask AP | Config | Download| +| :-------: | :--------------: | :-----: | :----: | :-----: | :-------:| :--------:| +| R-50-FPN | Faster Original | 1x | 37.4 | | [config](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | + GRoIE | 1x | 38.3 | | [config](./faster_rcnn_r50_fpn_groie_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | Grid R-CNN | 1x | 39.1 | | [config](./grid_rcnn_r50_fpn_gn-head_1x_coco.py)| [model](http://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059-64f00ee8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059.log.json) | +| R-50-FPN | + GRoIE | 1x | | | [config](./grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py)|| +| R-50-FPN | Mask R-CNN | 1x | 38.2 | 34.7 | [config](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | + GRoIE | 1x | 39.0 | 36.0 | [config](./mask_rcnn_r50_fpn_groie_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | GC-Net | 1x | 40.7 | 36.5 | [config](../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-50-FPN | + GRoIE | 1x | 41.0 | 37.8 | [config](./mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py) |[model](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | +| R-101-FPN | GC-Net | 1x | 42.2 | 37.8 | [config](../configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| R-101-FPN | + GRoIE | 1x | | | [config](./mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py)| [model](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507.log.json) | + + +## Citation + +If you use this work or benchmark in your research, please cite this project. + +``` +@misc{rossi2020novel, + title={A novel Region of Interest Extraction Layer for Instance Segmentation}, + author={Leonardo Rossi and Akbar Karimi and Andrea Prati}, + year={2020}, + eprint={2004.13665}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Contact + +The implementation of GROI is currently maintained by +[Leonardo Rossi](https://github.com/hachreak/). diff --git a/thirdparty/mmdetection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py b/thirdparty/mmdetection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0fc528bfd49bfc9a262692db78a5f94b46c285af --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,25 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/thirdparty/mmdetection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py b/thirdparty/mmdetection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e4b4ab23513a97adf4471ab3b33ca8abdb6dbe5 --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + grid_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/thirdparty/mmdetection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/thirdparty/mmdetection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8b83722197c69a51907f43bcb05883deedc37f0c --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py b/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..81dfb4873bdb587626200a3007dc4d57a92c0fd9 --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..852c5ca7c5c4ba04f6a5f7dd6dbaf6b2c357a2fa --- /dev/null +++ b/thirdparty/mmdetection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/README.md b/thirdparty/mmdetection/configs/guided_anchoring/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3bd1121b950e72056eb7bb55b4c00e6aed96e674 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/README.md @@ -0,0 +1,51 @@ +# Region Proposal by Guided Anchoring + +## Introduction + +We provide config files to reproduce the results in the CVPR 2019 paper for [Region Proposal by Guided Anchoring](https://arxiv.org/abs/1901.03278). + +``` +@inproceedings{wang2019region, + title={Region Proposal by Guided Anchoring}, + author={Jiaqi Wang and Kai Chen and Shuo Yang and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR 1000 | Config | Download | +| :----: | :-------------: | :-----: | :-----: | :------: | :------------: | :-----: | :------: | :--------: | +| GA-RPN | R-50-FPN | caffe | 1x | 5.3 | 15.8 | 68.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531_011819.log.json) | +| GA-RPN | R-101-FPN | caffe | 1x | 7.3 | 13.0 | 69.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531_011812.log.json) | +| GA-RPN | X-101-32x4d-FPN | pytorch | 1x | 8.5 | 10.0 | 70.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220_221326.log.json) | +| GA-RPN | X-101-64x4d-FPN | pytorch | 1x | 7.1 | 7.5 | 71.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225_152704.log.json) | + + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------------: | :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| GA-Faster RCNN | R-50-FPN | caffe | 1x | 5.5 | | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718.log.json) | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | 7.5 | | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_20200505_115528.log.json) | +| GA-Faster RCNN | X-101-32x4d-FPN | pytorch | 1x | 8.7 | 9.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215_184547.log.json) | +| GA-Faster RCNN | X-101-64x4d-FPN | pytorch | 1x | 11.8 | 7.3 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215_104455.log.json) | +| GA-RetinaNet | R-50-FPN | caffe | 1x | 3.5 | 16.8 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020_225450.log.json) | +| GA-RetinaNet | R-101-FPN | caffe | 1x | 5.5 | 12.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531_012847.log.json) | +| GA-RetinaNet | X-101-32x4d-FPN | pytorch | 1x | 6.9 | 10.6 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219_223025.log.json) | +| GA-RetinaNet | X-101-64x4d-FPN | pytorch | 1x | 9.9 | 7.7 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226_221123.log.json) | + + + +- In the Guided Anchoring paper, `score_thr` is set to 0.001 in Fast/Faster RCNN and 0.05 in RetinaNet for both baselines and Guided Anchoring. + +- Performance on COCO test-dev benchmark are shown as follows. + + +| Method | Backbone | Style | Lr schd | Aug Train | Score thr | AP | AP_50 | AP_75 | AP_small | AP_medium | AP_large | Download | +| :------------: | :-------: | :---: | :-----: | :-------: | :-------: | :---: | :---: | :---: | :------: | :-------: | :------: | :------: | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.001 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 2x | T | 0.05 | | | | | | | | diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a1258bd905aced4acfc17c4afb22958cb21d4104 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,63 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe'), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1])))) +# model training and testing settings +train_cfg = dict( + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(num=256))) +test_cfg = dict(rcnn=dict(score_thr=1e-3)) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f438a4792e9aa4bcef35a42349156f1eab044477 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ga_faster_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..40e75128441c45ef77a77e00391c46e378b27a8c --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,64 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1])))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(max_num=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))) +test_cfg = dict(rpn=dict(max_num=300), rcnn=dict(score_thr=1e-3)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ab19e5b675f1aa1b3b03c2db51defe517f852444 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py @@ -0,0 +1,64 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1])))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(max_num=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))) +test_cfg = dict(rpn=dict(max_num=300), rcnn=dict(score_thr=1e-3)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c9a035f15cfad12ddbbfa87ed0d579c1cde0c4ce --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87bbfdc827eb17654527ad5305ec80bd9e84b78a --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0048965d5b4d2257eed860f9bd69256795b44fa6 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ga_retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..f6c487bf18fe6bcee9a9b7d62ca99a4d98cafa17 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py @@ -0,0 +1,172 @@ +# model settings +model = dict( + type='RetinaNet', + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5), + bbox_head=dict( + type='GARetinaHead', + num_classes=81, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + center_ratio=0.2, + ignore_ratio=0.5, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + keep_ratio=True, + multiscale_mode='range'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[16, 22]) +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +# runtime settings +total_epochs = 24 +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/ga_retinanet_r101_caffe_fpn_mstrain_2x' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8c73cebe0f1c748ca0ac14065179aeceab4d54f8 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a5b595d8bb351ed8f507d0aa349fe127d4fc0708 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..18daadd6a9d3024f30157aea1f1cef3e13326b5a --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b18c2ba41d1493380bab3515be8e29547988ebf --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8d154763bf810dc9f668988f05f53dd32a354a31 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = './ga_rpn_r50_caffe_fpn_1x_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d61fba8abd471adbbbc029864be5909f4c8c7379 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9c6eb91890b78c7215852525d181e75db434582b --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = '../rpn/rpn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))) +# model training and testing settings +train_cfg = dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e0fe4931e9cb340fcf3b80a4f9380abee500238 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bf66b6b9283042ce6eabc437219f0b16be96d613 --- /dev/null +++ b/thirdparty/mmdetection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/hrnet/README.md b/thirdparty/mmdetection/configs/hrnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..450516658ae749d9ec63c95e14ef6a1dcb1bd800 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/README.md @@ -0,0 +1,92 @@ +# High-resolution networks (HRNets) for object detection + +## Introduction + +``` +@inproceedings{SunXLW19, + title={Deep High-Resolution Representation Learning for Human Pose Estimation}, + author={Ke Sun and Bin Xiao and Dong Liu and Jingdong Wang}, + booktitle={CVPR}, + year={2019} +} + +@article{SunZJCXLMWLW19, + title={High-Resolution Representations for Labeling Pixels and Regions}, + author={Ke Sun and Yang Zhao and Borui Jiang and Tianheng Cheng and Bin Xiao + and Dong Liu and Yadong Mu and Xinggang Wang and Wenyu Liu and Jingdong Wang}, + journal = {CoRR}, + volume = {abs/1904.04514}, + year={2019} +} +``` + +## Results and Models + + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:| :--------:| +| HRNetV2p-W18 | pytorch | 1x | 6.6 | 13.4 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130_211246.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 6.6 | | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.0 | 12.4 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130_204442.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.0 | | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.4 | 10.5 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210_125315.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.4 | | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 1x | 7.0 | 11.7 | 37.7 | 34.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205_232523.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 7.0 | - | 39.8 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212_134222.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.4 | 11.3 | 41.2 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207_055017.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.4 | - | 42.5 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213_150518.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.9 | | 42.1 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.9 | | 42.8 | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732.log.json) | + + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------: | :--------: | +| HRNetV2p-W18 | pytorch | 20e | 7.0 | 11.0 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210_105632.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 9.4 | 11.0 | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208_160511.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 10.8 | | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112.log.json) | + + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 8.5 | 8.5 |41.6 |36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210_093149.log.json) | +| HRNetV2p-W32 | pytorch | 20e | | 8.3 |44.3 |38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 12.5 | |45.1 |39.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 10.8 | 4.7 | 42.8 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210_182735.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 13.1 | 4.9 | 45.4 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207_193153.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 14.6 | | 46.4 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411.log.json) | + + +### FCOS + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:------:|:------:|:------:|:------:|:--------:| +|HRNetV2p-W18| pytorch | Y | N | 1x | 13.0 | 12.9 | 35.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20200316-c24bac34.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20200316_103815.log.json) | +|HRNetV2p-W18| pytorch | Y | N | 2x | 13.0 | - | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20200316-15348c5b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20200316_103815.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 1x | 17.5 | 12.9 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20200314-59a7807f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20200314_150555.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 2x | 17.5 | - | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20200314-faf8f0b8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20200314_145136.log.json) | +|HRNetV2p-W18| pytorch | Y | Y | 2x | 13.0 | 12.9 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20200316-a668468b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20200316_104027.log.json) | +|HRNetV2p-W32| pytorch | Y | Y | 2x | 17.5 | 12.4 | 41.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20200314-065d37a6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20200314_145356.log.json) | +|HRNetV2p-W48| pytorch | Y | Y | 2x | 20.3 | 10.8 | 42.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20200314-e201886d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20200314_150607.log.json) | + + + +**Note:** + +- The `28e` schedule in HTC indicates decreasing the lr at 24 and 27 epochs, with a total of 28 epochs. +- HRNetV2 ImageNet pretrained models are in [HRNets for Image Classification](https://github.com/HRNet/HRNet-Image-Classification). diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e8df265edefee1b7e5892fe373c1c0f80f59bf7b --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f394c886b0aedeb1c5f034cd46b0e1cae544da7 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,39 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29b1469fa9f455a3235b323fa3b1e39d5c095f3d --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9585a4f35d9151b42beac05066a1a231dd1777a9 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c5746337a45bec7bf5ea0e8dc709c7c69685a7b2 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,39 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bd43e47254be7a153fadf26e734f0756d9b4b02e --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9907bcbf6464fb964664a318533bf9edda4e34fd --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# model settings +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ff3e7cae4aeb1f380f00a7f7f72f1c1ed47e7583 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './faster_rcnn_hrnetv2p_w18_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..190e81c710b0e5e9eb34bafff01c9dd4a8ef130c --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2876e3fdae70a0398e7772d81e24d31d2bc1d6fb --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0fd9fa0284f17272c0785701f2ae81860bc04b6 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ddb4bd83381851456279541b7f6ed5a4f12ff0a3 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..20bffb95616d4358007d0825820f4a91ea223649 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py @@ -0,0 +1,9 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7847fb438b9954327066535e4ff810aefba0f214 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b845128de51d2080f6444e2c849f4642a43ad942 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,9 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3640224511b4a1fd38e999a82f1723431dc5cb3 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = '../fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256, + stride=2, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..65717e3b2f942df98f17574c0442e343fb869782 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6866b1ae3d8399d69d5f875bca771a102af4e815 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..452b0fe2d89566a998744d9c7812e550596462e3 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..391636ff452471af367ed14be5faa49c0b7e1be6 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,9 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aee78089b9e32d3c0bcd6a29f51c22d1af96d2ce --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,36 @@ +_base_ = '../htc/htc_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..abf6fb550e4dfff4e749e15b001c37e6db8ae476 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..40c97d1fdb1b5b86030d9aef436129d24b3dbb0e --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_hrnetv2p_w40_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +total_epochs = 28 diff --git a/thirdparty/mmdetection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py b/thirdparty/mmdetection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..459af318e785d119b5afef5f25a3095c1cd4e665 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +total_epochs = 28 diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..82a5f464ed9b31ec6a513efc6a9fa20953cf1689 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,9 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w18', + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144)))), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..afde2daa2729316d29a0a56c9c0380b8f2b8aa95 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f533af6d867466ff3ee70a3941b7bfbe90f5b3ba --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..24dce1ce5520060805f94cb0b9c6900912e44d0b --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5b10c166cf36601bdb895de81874970aebc83310 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w40', + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320)))), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fa7ec1c6e09742f5e4e92ed0fe066ac5ed75fe94 --- /dev/null +++ b/thirdparty/mmdetection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/htc/README.md b/thirdparty/mmdetection/configs/htc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..618dcbae10f4a2f450a60934c2e016c8021da463 --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/README.md @@ -0,0 +1,55 @@ +# Hybrid Task Cascade for Instance Segmentation + +## Introduction + +We provide config files to reproduce the results in the CVPR 2019 paper for [Hybrid Task Cascade](https://arxiv.org/abs/1901.07518). + +``` +@inproceedings{chen2019hybrid, + title={Hybrid task cascade for instance segmentation}, + author={Chen, Kai and Pang, Jiangmiao and Wang, Jiaqi and Xiong, Yu and Li, Xiaoxiao and Sun, Shuyang and Feng, Wansen and Liu, Ziwei and Shi, Jianping and Ouyang, Wanli and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Dataset + +HTC requires COCO and COCO-stuff dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +``` +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 8.2 | 5.8 | 42.3 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317_070435.log.json) | +| R-50-FPN | pytorch | 20e | 8.2 | - | 43.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319_070313.log.json) | +| R-101-FPN | pytorch | 20e | 10.2 | 5.5 | 44.8 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317_153107.log.json) | +| X-101-32x4d-FPN | pytorch |20e| 11.4 | 5.0 | 46.1 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318_034519.log.json) | +| X-101-64x4d-FPN | pytorch |20e| 14.5 | 4.4 | 47.0 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318_081711.log.json) | + +- In the HTC paper and COCO 2018 Challenge, `score_thr` is set to 0.001 for both baselines and HTC. +- We use 8 GPUs with 2 images/GPU for R-50 and R-101 models, and 16 GPUs with 1 image/GPU for X-101 models. +If you would like to train X-101 HTC with 8 GPUs, you need to change the lr from 0.02 to 0.01. + +We also provide a powerful HTC with DCN and multi-scale training model. No testing augmentation is used. + +| Backbone | Style | DCN | training scales | Lr schd | box AP | mask AP | Config | Download | +|:----------------:|:-------:|:-----:|:---------------:|:-------:|:------:|:-------:|:------:|:--------:| +| X-101-64x4d-FPN | pytorch | c3-c5 | 400~1400 | 20e | 50.4 | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312_203410.log.json) | diff --git a/thirdparty/mmdetection/configs/htc/htc_r101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/htc/htc_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d543f028fe7ee3984f498fd05c94ddb265070061 --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_r101_fpn_20e_coco.py @@ -0,0 +1,5 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/htc/htc_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/htc/htc_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..929cf464f6091f8380fd1057b282f29f4f7a8b5f --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_r50_fpn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = './htc_without_semantic_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='FusedSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2))) +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict( + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/htc/htc_r50_fpn_20e_coco.py b/thirdparty/mmdetection/configs/htc/htc_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b05a92cd8a4d45f6c8733b0d9a44d357cf8a3308 --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..81ed3a8a03a36fcc3d183844d7405b755cc03540 --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py @@ -0,0 +1,240 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='HybridTaskCascade', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='HybridTaskCascadeRoIHead', + interleaved=True, + mask_info_flow=True, + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=[ + dict( + type='HTCMaskHead', + with_conv_res=False, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)) + ])) +# model training and testing settings +train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]) +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_num=1000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + score_thr=0.001, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + val=dict(pipeline=test_pipeline), test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py b/thirdparty/mmdetection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..19b3447cd71a7339669b3b18471858d0adae016a --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,18 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py b/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e76cff2a21fec34eeef25ef65f053ad0a2cde16f --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,18 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py b/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4a98ff2858895b0e6730634b2a559eba1ce72ea4 --- /dev/null +++ b/thirdparty/mmdetection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py @@ -0,0 +1,42 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +data = dict( + samples_per_gpu=1, workers_per_gpu=1, train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/instaboost/README.md b/thirdparty/mmdetection/configs/instaboost/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1d4dbe5951c5f52337c9101d961a52ed878e9b3c --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/README.md @@ -0,0 +1,43 @@ +# InstaBoost for MMDetection + +Configs in this directory is the implementation for ICCV2019 paper "InstaBoost: Boosting Instance Segmentation Via Probability Map Guided Copy-Pasting" and provided by the authors of the paper. InstaBoost is a data augmentation method for object detection and instance segmentation. The paper has been released on [`arXiv`](https://arxiv.org/abs/1908.07801). + +``` +@inproceedings{fang2019instaboost, + title={Instaboost: Boosting instance segmentation via probability map guided copy-pasting}, + author={Fang, Hao-Shu and Sun, Jianhua and Wang, Runzhong and Gou, Minghao and Li, Yong-Lu and Lu, Cewu}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={682--691}, + year={2019} +} +``` + +## Usage + +### Requirements + +You need to install `instaboostfast` before using it. + +``` +pip install instaboostfast +``` + +The code and more details can be found [here](https://github.com/GothicAi/Instaboost). + +### Integration with MMDetection + +InstaBoost have been already integrated in the data pipeline, thus all you need is to add or change **InstaBoost** configurations after **LoadImageFromFile**. We have provided examples like [this](mask_rcnn_r50_fpn_instaboost_4x#L121). You can refer to [`InstaBoostConfig`](https://github.com/GothicAi/InstaBoost-pypi#instaboostconfig) for more details. + +## Results and Models + + - All models were trained on `coco_2017_train` and tested on `coco_2017_val` for conveinience of evaluation and comparison. In the paper, the results are obtained from `test-dev`. + - To balance accuracy and training time when using InstaBoost, models released in this page are all trained for 48 Epochs. Other training and testing configs strictly follow the original framework. + - For results and models in MMDetection V1.x, please refer to [Instaboost](https://github.com/GothicAi/Instaboost). + + +| Network | Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :--------: | :-----: | :------: | :------------: | :------:| :-----: | :------: | :-----------------: | +| Mask R-CNN | R-50-FPN | 4x | 4.4 | 17.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223635.log.json) | +| Mask R-CNN | R-101-FPN | 4x | 6.4 | | 42.5 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738.log.json) | +| Mask R-CNN | X-101-64x4d-FPN | 4x | 10.7 | | 44.7 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947.log.json) | +| Cascade R-CNN | R-101-FPN | 4x | 6.0 | 12.0 | 43.7 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223646.log.json) | diff --git a/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..723ab0295f8457c03114ca535dede951e7d5b169 --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,3 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' + +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6c234b62aa439aac37cb0ea3867f73e42edf8d78 --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +total_epochs = 48 diff --git a/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7cf5f307442e56b29460fb5477cef64bfd3476b9 --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,13 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c2819477abb070b724d0295ccf028025918b263a --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,2 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ca4b312fca68e02aeea331a59d5541a74e6723bc --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +total_epochs = 48 diff --git a/thirdparty/mmdetection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0acd088a469e682011a90b770efa51116f6c42ca --- /dev/null +++ b/thirdparty/mmdetection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/README.md b/thirdparty/mmdetection/configs/legacy_1.x/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9a0bb477a1e90949a620563ffda486a15cb948ce --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/README.md @@ -0,0 +1,49 @@ +# Legacy Configs in MMDetection V1.x + +Configs in this directory implement the legacy configs used by MMDetection V1.x and its model zoos. + +To help users convert their models from V1.x to MMDetection V2.0, we provide v1.x configs to inference the converted v1.x models. +Due to the BC-breaking changes in MMDetection V2.0 from MMDetection V1.x, running inference with the same model weights in these two version will produce different results. The difference will cause within 1% AP absolute difference as can be found in the following table. + +## Usage + +To upgrade the model version, the users need to do the following steps. + +### 1. Convert model weights +There are three main difference in the model weights between V1.x and V2.0 codebases. + +1. Since the class order in all the detector's classification branch is reordered, all the legacy model weights need to go through the conversion process. +2. The regression and segmentation head no longer contain the background channel. Weights in these background channels should be removed to fix in the current codebase. +3. For two-stage detectors, their wegihts need to be upgraded since MMDetection V2.0 refactors all the two-stage detectors with `RoIHead`. + +The users can do the same modification as mentioned above for the self-implemented +detectors. We provide a scripts `tools/upgrade_model_version.py` to convert the model weights in the V1.x model zoo. + +```bash +python tools/upgrade_model_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} --num-classes ${NUM_CLASSES} + +``` +- OLD_MODEL_PATH: the path to load the model weights in 1.x version. +- NEW_MODEL_PATH: the path to save the converted model weights in 2.0 version. +- NUM_CLASSES: number of classes of the original model weights. Usually it is 81 for COCO dataset, 21 for VOC dataset. +The number of classes in V2.0 models should be equal to that in V1.x models - 1. + +### 2. Use configs with legacy settings + +After converting the model weights, checkout to the v1.2 release to find the corresponding config file that uses the legacy settings. +The V1.x models usually need these three legacy modules: `LegacyAnchorGenerator`, `LegacyDeltaXYWHBBoxCoder`, and `RoIAlign(align=False)`. +For models using ResNet Caffe backbones, they also need to change the pretrain name and the corresponding `img_norm_cfg`. +An example is in [`retinanet_r50_caffe_fpn_1x_coco_v1.py`](retinanet_r50_caffe_fpn_1x_coco_v1.py) +Then use the config to test the model weights. For most models, the obtained results should be close to that in V1.x. +We provide configs of some common structures in this directory. + +## Performance + +The performance change after converting the models in this directory are listed as the following. +| Method | Style | Lr schd | V1.x box AP | V1.x mask AP | V2.0 box AP | V2.0 mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------:| :-----: |:------:| :-----: | :-------: |:------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN R-50-FPN | pytorch | 1x | 37.3 | 34.2 | 36.8 | 33.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth)| +| RetinaNet R-50-FPN | caffe | 1x | 35.8 | - | 35.4 | - | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_caffe_1x_coco_v1.py) | +| RetinaNet R-50-FPN | pytorch | 1x | 35.6 |-|35.2| -| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/retinanet_r50_fpn_1x_20181125-7b0c2548.pth) | +| Cascade Mask R-CNN R-50-FPN | pytorch | 1x | 41.2 | 35.7 |40.8| 35.6| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/cascade_mask_rcnn_r50_fpn_1x_20181123-88b170c9.pth) | +| SSD300-VGG16 | caffe | 120e | 25.7 |-|25.4|-| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/ssd300_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/ssd300_coco_vgg16_caffe_120e_20181221-84d7110b.pth) | diff --git a/thirdparty/mmdetection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..5899444adf0c7309367fb52e1f6d135e788f2b57 --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='CascadeRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067])), + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +dist_params = dict(backend='nccl', port=29515) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..1cb833cfbcdbe420deece2d5fd806b7b99df5a24 --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,37 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='FasterRCNN', + pretrained='torchvision://resnet50', + rpn_head=dict( + type='RPNHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn_proposal=dict(nms_post=2000, max_num=2000), + rcnn=dict(assigner=dict(match_low_quality=True))) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0b200610191369da8d3581478f9013b4467755e4 --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,33 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn_proposal=dict(nms_post=2000, max_num=2000), + rcnn=dict(assigner=dict(match_low_quality=True))) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..ef9392f7e351f489d6d9e97936925b6a16d1212e --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py @@ -0,0 +1,37 @@ +_base_ = './retinanet_r50_fpn_1x_coco_v1.py' +model = dict( + pretrained='open-mmlab://detectron/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..6198b9717957374ce734ca74de5f54dda44123b9 --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + type='RetinaHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) diff --git a/thirdparty/mmdetection/configs/legacy_1.x/ssd300_coco_v1.py b/thirdparty/mmdetection/configs/legacy_1.x/ssd300_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..b194e7651ede006c5101bff1056749edf4d249cd --- /dev/null +++ b/thirdparty/mmdetection/configs/legacy_1.x/ssd300_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +input_size = 300 +model = dict( + bbox_head=dict( + type='SSDHead', + anchor_generator=dict( + type='LegacySSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +dist_params = dict(backend='nccl', port=29555) diff --git a/thirdparty/mmdetection/configs/libra_rcnn/README.md b/thirdparty/mmdetection/configs/libra_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7f19d4a9eae882fae0fccc3ec0e2b5a41cda202d --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/README.md @@ -0,0 +1,26 @@ +# Libra R-CNN: Towards Balanced Learning for Object Detection + +## Introduction + +We provide config files to reproduce the results in the CVPR 2019 paper [Libra R-CNN](https://arxiv.org/pdf/1904.02701.pdf). + +``` +@inproceedings{pang2019libra, + title={Libra R-CNN: Towards Balanced Learning for Object Detection}, + author={Pang, Jiangmiao and Chen, Kai and Shi, Jianping and Feng, Huajun and Ouyang, Wanli and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 1x | 4.6 | 19.0 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| Fast R-CNN | R-50-FPN | pytorch | 1x | | | | | +| Faster R-CNN | R-101-FPN | pytorch | 1x | 6.5 | 14.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203_001405.log.json) | +| Faster R-CNN | X-101-64x4d-FPN | pytorch | 1x | 10.8 | 8.5 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315_231625.log.json) | +| RetinaNet | R-50-FPN | pytorch | 1x | 4.2 | 17.7 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205_112757.log.json) | diff --git a/thirdparty/mmdetection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b416c8d035146edc68f0d7198f15aed0bc0093cd --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3)))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +data = dict( + train=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_train2017.pkl'), + val=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'), + test=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl')) diff --git a/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e36c9b3a506eacd97bfadee8d167886eef74cb7 --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9e9b6172158af7f6c63e159916f85f3676096b6f --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rpn=dict(sampler=dict(neg_pos_ub=5), allowed_border=-1), + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3)))) diff --git a/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e94553294294fa49952f2dfe0e3c64a5e00bc878 --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be2742098fb8f1e46bbb16c9d3e2e20c2e3083aa --- /dev/null +++ b/thirdparty/mmdetection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,26 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=1, + refine_type='non_local') + ], + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=0.11, + loss_weight=1.0))) diff --git a/thirdparty/mmdetection/configs/lvis/README.md b/thirdparty/mmdetection/configs/lvis/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d7c106b5e25e46c9f0d7037261c809a512a9a5fc --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/README.md @@ -0,0 +1,43 @@ +# LVIS dataset + +## Introduction +``` +@inproceedings{gupta2019lvis, + title={{LVIS}: A Dataset for Large Vocabulary Instance Segmentation}, + author={Gupta, Agrim and Dollar, Piotr and Girshick, Ross}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Common Setting +* Please follow [install guide](../../docs/install.md#install-mmdetection) to install open-mmlab forked cocoapi first. +* Run following scripts to install our forked lvis-api. + ``` + # mmlvis is fully compatible with official lvis + pip install mmlvis + ``` + or + ``` + pip install -r requirements/optional.txt + ``` +* All experiments use oversample strategy [here](../../docs/tutorials/new_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. +* The size of LVIS v0.5 is half of COCO, so schedule `2x` in LVIS is roughly the same iterations as `1x` in COCO. + +## Results and models of LVIS v0.5 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: |:--------: | +| R-50-FPN | pytorch | 2x | - | - | 26.1 | 25.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis-dbd06831.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_20200531_160435.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 27.1 | 27.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis-54582ee2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_20200601_134748.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 26.7 | 26.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis-3cf55ea2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_20200531_221749.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 26.4 | 26.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis-1c99a5ad.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_20200601_194651.log.json) | + +## Results and models of LVIS v1 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 9.1 | - | 22.5 | 21.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-aa78ac3d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_061305.log.json) | +| R-101-FPN | pytorch | 1x | 10.8 | - | 24.6 | 23.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-ec55ce32.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_070959.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 11.8 | - | 26.7 | 25.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-ebbc5c81.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_071317.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 14.6 | - | 27.2 | 25.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-43d9edfe.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200830_060206.log.json) | diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..188186502d56674fa4e6073b39819a209b9a2c1f --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,2 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..2d2816c2dee68b60376e67e78e9fba277da826c0 --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,2 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..6ca6098f689f38a2be8e80b9ec944b1129ab0b46 --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1203), mask_head=dict(num_classes=1203))) +test_cfg = dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..ff1da67187d92ca3ca3cb9cdc9118b0d1584ec0f --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v0.5_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1230), mask_head=dict(num_classes=1230))) +test_cfg = dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..5abcc2e014fe57b862422fa2fe18dd651761b56e --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..439c39a93a8a12119ffa408987c8cea6d8cb313a --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..f77adba2f150f62900571f5f32b2083ee53b7003 --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..2136255464715bcee89b47f1437a9dd4040e04c7 --- /dev/null +++ b/thirdparty/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/README.md b/thirdparty/mmdetection/configs/mask_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..40533b7182acec1f49594bd424975ea880988f71 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/README.md @@ -0,0 +1,40 @@ +# Mask R-CNN + +## Introduction +``` +@article{He_2017, + title={Mask R-CNN}, + journal={2017 IEEE International Conference on Computer Vision (ICCV)}, + publisher={IEEE}, + author={He, Kaiming and Gkioxari, Georgia and Dollar, Piotr and Girshick, Ross}, + year={2017}, + month={Oct} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.3 | | 38.0 | 34.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_20200504_231812.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 39.2 | 35.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_20200505_003907.log.json) | +| R-101-FPN | caffe | 1x | | | 40.4 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758.log.json)| +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 40.8 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_20200505_071027.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 11.3 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_20200506_004702.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | 8.0 | 42.8 | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201_124310.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 42.7 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208.log.json)| +| X-101-32x8d-FPN | pytorch | 1x | - | - | 42.8 | 38.3 | | + + +## Pre-trained Models +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | caffe | 2x | 4.3 | | 40.3 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_20200504_231822.log.json) +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 4.3 | | 40.8 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_20200504_163245.log.json) +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 1x | - | | 43.6 | 39.0 | +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | - | | 44.0 | 39.3 | diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..230181cbeeb9c070dad926892f62d8f482d0ab1e --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..db02d9b880c7de447da881efe184e532ad0ee215 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c8cb2d87eedae2777ac8727dff5f398e1c477ab1 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './mask_rcnn_r50_fpn_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a44c01831b508da0a5e1ca3720bb437bcea086d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0471fe86eb50b0fd644f10d77ab0ea7e150c95cf --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict(norm_cfg=dict(requires_grad=False), style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5d6215d6f6e2f81fa284af0e639f3568429e3a75 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict(norm_cfg=dict(requires_grad=False), style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..374b86446af40b643c4e68501e8215c4817579cf --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..44f7e039fce0d1162c9f1bb11530dd7977439a11 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..86c5b13343b637ce218eed231240195a6768c5d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict(norm_cfg=dict(requires_grad=False), style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..431e5ab33675290d27e232f4fc5402279b7cf14c --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py @@ -0,0 +1,57 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnet50_caffe_bgr', + backbone=dict(norm_cfg=dict(requires_grad=False), style='caffe'), + rpn_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a6c92460f1d58b8e8d361fb56ee123f2668ad9f --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..932b1f905155a0d3285daefc4891f5194705e30d --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb6d57e0d25370a59472a4ceb1a3b9da6574608 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0016d1f1df4534ae27de95c4f7ec9976b3ab6d0 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d4189c6fa2a6a3481bf666b713f6ab91812f3d86 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_r101_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee034b716d6e20bfad03abe769f91fa3cc44c5e9 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py @@ -0,0 +1,63 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnext101_32x8d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch')) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1c124328286c659d800d2c44a2c4e4fee15f26e5 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnext101_32x8d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch')) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f326441d6226c469ae544052c92ac0c6fd210159 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnext101_32x8d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch')) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31e5943216f19a87a2f1e6f666efead573f72626 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9ba92c5b0b6dcaf10746aeacf7a868348133ff80 --- /dev/null +++ b/thirdparty/mmdetection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/README.md b/thirdparty/mmdetection/configs/ms_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f71fda8dcca52f553f8cfa1823cf7f5ab85d16b2 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/README.md @@ -0,0 +1,24 @@ +# Mask Scoring R-CNN + +## Introduction + +``` +@inproceedings{huang2019msrcnn, + title={Mask Scoring R-CNN}, + author={Zhaojin Huang and Lichao Huang and Yongchao Gong and Chang Huang and Xinggang Wang}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019}, +} +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.5 | | 38.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848.log.json) | +| R-50-FPN | caffe | 2x | - | - | 38.8 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_20200506_004738.log.json) | +| R-101-FPN | caffe | 1x | 6.5 | | 40.4 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_20200506_004755.log.json) | +| R-101-FPN | caffe | 2x | - | - | 41.1 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_20200506_011134.log.json) | +| R-X101-32x4d | pytorch | 2x | 7.9 | 11.0 | 41.8 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206_100113.log.json) | +| R-X101-64x4d | pytorch | 1x | 11.0 | 8.0 | 43.0 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206_091744.log.json) | +| R-X101-64x4d | pytorch | 2x | 11.0 | 8.0 | 42.6 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308_012247.log.json) | diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3bd33c40263fc3a5bc44d09f5e3368ea9a859b0f --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8d4a30a3f446d7af065ff0921667fc7a813b65a2 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r101_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f0781996623e48a475f2d3fb3cc77abebbf7aa2f --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80))) +# model training and testing settings +train_cfg = dict(rcnn=dict(mask_thr_binary=0.5)) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a14317ad90b31a6ecaf4a8452afa9df4ff5b66c0 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..daf4c37584b79a8017d040b0fd0f23d40989f6a0 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80))) +# model training and testing settings +train_cfg = dict(rcnn=dict(mask_thr_binary=0.5)) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4a78a252a9a49889c288ec6cb7d8114c78da5c57 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..61a0cefe4e20b55cd3caaab7dde325a111275726 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..92ce4fbdd88727ceed7c688cc6ec954380fd2cc9 --- /dev/null +++ b/thirdparty/mmdetection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_x101_64x4d_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/nas_fcos/README.md b/thirdparty/mmdetection/configs/nas_fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..87c58dcb9f1e1dfe00e005a5889417266f489d72 --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fcos/README.md @@ -0,0 +1,22 @@ +# NAS-FCOS: Fast Neural Architecture Search for Object Detection + +## Introduction + +``` +@article{wang2019fcos, + title={Nas-fcos: Fast neural architecture search for object detection}, + author={Wang, Ning and Gao, Yang and Chen, Hao and Wang, Peng and Tian, Zhi and Shen, Chunhua}, + journal={arXiv preprint arXiv:1906.04423}, + year={2019} +} +``` + +## Results and Models + +| Head | Backbone | Style | GN-head | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:---------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| NAS-FCOSHead | R-50 | caffe | Y | 1x | | | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520.log.json) | +| FCOSHead | R-50 | caffe | Y | 1x | | | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521.log.json) | + +**Notes:** +- To be consistent with the author's implementation, we use 4 GPUs with 4 images/GPU. diff --git a/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..76dde57d8a42d5bf9ce1a188270d98bc7fcdb49e --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,99 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe'), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) + +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a22f8f1998c46b38f56223837330d2014029ca11 --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,98 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe'), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='NASFCOSHead', + num_classes=80, + in_channels=256, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) + +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/thirdparty/mmdetection/configs/nas_fpn/README.md b/thirdparty/mmdetection/configs/nas_fpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c6e0a0c9c939db4718e6a1a2d69539bace3832ed --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fpn/README.md @@ -0,0 +1,25 @@ +# NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection + +## Introduction + +``` +@inproceedings{ghiasi2019fpn, + title={Nas-fpn: Learning scalable feature pyramid architecture for object detection}, + author={Ghiasi, Golnaz and Lin, Tsung-Yi and Le, Quoc V}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={7036--7045}, + year={2019} +} +``` + +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. RetinaNet is used in the paper. + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50-FPN | 50e | 12.9 | 22.9 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco_20200529_095329.log.json) | +| R-50-NASFPN | 50e | 13.2 | 23.0 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco_20200528_230008.log.json) | + + +**Note**: We find that it is unstable to train NAS-FPN and there is a small chance that results can be 3% mAP lower. diff --git a/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py b/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..feeabc7119ba72279dc0ad266ec19b7146aec3e6 --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + neck=dict( + relu_before_extra_convs=True, + no_norm_on_lateral=True, + norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg)) +# training and testing settings +train_cfg = dict(assigner=dict(neg_iou_thr=0.5)) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +total_epochs = 50 diff --git a/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py b/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..72fbb0445a4b778d86b935051042d98bac37538b --- /dev/null +++ b/thirdparty/mmdetection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +# model settings +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + type='RetinaNet', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + neck=dict(type='NASFPN', stack_times=7, norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg)) +# training and testing settings +train_cfg = dict(assigner=dict(neg_iou_thr=0.5)) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +total_epochs = 50 diff --git a/thirdparty/mmdetection/configs/paa/README.md b/thirdparty/mmdetection/configs/paa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..19b2b4740f129e61373a4190239cac71d3545ffc --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/README.md @@ -0,0 +1,22 @@ +# Probabilistic Anchor Assignment with IoU Prediction for Object Detection + + + +## Results and Models +We provide config files to reproduce the object detection results in the +ECCV 2020 paper for Probabilistic Anchor Assignment with IoU +Prediction for Object Detection. + +| Backbone | Lr schd | Mem (GB) | Score voting | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------------:|:------:|:------:|:--------:| +| R-50-FPN | 12e | 3.7 | True | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.log.json) | +| R-50-FPN | 12e | 3.7 | False | 40.2 | - | +| R-50-FPN | 18e | 3.7 | True | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1.5x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.log.json) | +| R-50-FPN | 18e | 3.7 | False | 41.2 | - | +| R-50-FPN | 24e | 3.7 | True | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.log.json) | +| R-101-FPN | 12e | 6.2 | True | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.log.json) | +| R-101-FPN | 12e | 6.2 | False | 42.4 | - | +| R-101-FPN | 24e | 6.2 | True | 43.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.log.json) | + +**Note**: +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.2 mAP. We report the best results. diff --git a/thirdparty/mmdetection/configs/paa/paa_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/paa/paa_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a64a012dd32c1c4b857a21bc996778c923c7c461 --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/paa_r101_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/paa/paa_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/paa/paa_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a3bc60f91e42244876aee34a8f330af9e5711ea2 --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/paa_r101_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r101_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1.5x_coco.py b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1.5x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7de45783b8114fe15892e9e9f242d5283e1fceea --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1.5x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[12, 16]) +total_epochs = 18 diff --git a/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e66cd1b77968459a01eec82c819c33a0403a2358 --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PAA', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='PAAHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/paa/paa_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..529f07439e00789fe7f378b4d7b13da708db1fa6 --- /dev/null +++ b/thirdparty/mmdetection/configs/paa/paa_r50_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/pafpn/README.md b/thirdparty/mmdetection/configs/pafpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0d3ab9e1fc36ecaa54f1b87855436b542e48ffc0 --- /dev/null +++ b/thirdparty/mmdetection/configs/pafpn/README.md @@ -0,0 +1,24 @@ +# Path Aggregation Network for Instance Segmentation + +## Introduction + +``` +@inproceedings{liu2018path, + author = {Shu Liu and + Lu Qi and + Haifang Qin and + Jianping Shi and + Jiaya Jia}, + title = {Path Aggregation Network for Instance Segmentation}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2018} +} +``` + +## Results and Models + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.0 | 17.2 | 37.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_20200503_105836.log.json) | diff --git a/thirdparty/mmdetection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py b/thirdparty/mmdetection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b2fdef91c5cc8396baee9c2d8a09556162443078 --- /dev/null +++ b/thirdparty/mmdetection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/pascal_voc/README.md b/thirdparty/mmdetection/configs/pascal_voc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c7cb1dce9888e30135d42367b4ccd982193439f6 --- /dev/null +++ b/thirdparty/mmdetection/configs/pascal_voc/README.md @@ -0,0 +1,8 @@ +# PASCAL VOC Dataset + +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | 1x | 2.6 | - | 79.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/faster_rcnn_r50_fpn_1x_voc0712_20200624-c9895d40.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/20200623_015208.log.json) | +| Retinanet | R-50 | pytorch | 1x | 2.1 | - | 77.3 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200617-47cbdd0e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200616_014642.log.json) | diff --git a/thirdparty/mmdetection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py b/thirdparty/mmdetection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..b48203a54a5ee06b22f35c5c80b9da9647caec8d --- /dev/null +++ b/thirdparty/mmdetection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +total_epochs = 4 # actual epoch = 4 * 3 = 12 diff --git a/thirdparty/mmdetection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py b/thirdparty/mmdetection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..cf8b9bf6f69eedebd2d982b53a24a5bfa226a02c --- /dev/null +++ b/thirdparty/mmdetection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=20)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +total_epochs = 4 # actual epoch = 4 * 3 = 12 diff --git a/thirdparty/mmdetection/configs/pascal_voc/ssd300_voc0712.py b/thirdparty/mmdetection/configs/pascal_voc/ssd300_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..677ed07c3a590bc2ca8a2d5949194a9f282b6dc9 --- /dev/null +++ b/thirdparty/mmdetection/configs/pascal_voc/ssd300_voc0712.py @@ -0,0 +1,69 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + num_classes=20, anchor_generator=dict(basesize_ratio_range=(0.2, + 0.9)))) +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + type='RepeatDataset', times=10, dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 20]) +checkpoint_config = dict(interval=1) +# runtime settings +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/pascal_voc/ssd512_voc0712.py b/thirdparty/mmdetection/configs/pascal_voc/ssd512_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..365a65fc64bf693d812c97855942827b10bd8e64 --- /dev/null +++ b/thirdparty/mmdetection/configs/pascal_voc/ssd512_voc0712.py @@ -0,0 +1,53 @@ +_base_ = 'ssd300_voc0712.py' +input_size = 512 +model = dict( + backbone=dict(input_size=input_size), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + input_size=input_size, + strides=[8, 16, 32, 64, 128, 256, 512], + basesize_ratio_range=(0.15, 0.9), + ratios=([2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2])))) +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/pisa/README.md b/thirdparty/mmdetection/configs/pisa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..75e58b7acef03b4bd719dc17a820be06f8ca89be --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/README.md @@ -0,0 +1,38 @@ +# Prime Sample Attention in Object Detection + +## Introduction + +``` +@inproceedings{cao2019prime, + title={Prime sample attention in object detection}, + author={Cao, Yuhang and Chen, Kai and Loy, Chen Change and Lin, Dahua}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2020} +} +``` + +## Results and models + + +| PISA | Network | Backbone | Lr schd | box AP | mask AP | Config | Download | +|:----:|:-------:|:-------------------:|:-------:|:------:|:-------:|:------:|:--------:| +| × | Faster R-CNN | R-50-FPN | 1x | 36.4 | | - | +| √ | Faster R-CNN | R-50-FPN | 1x | 38.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco_20200506_185619.log.json) | +| × | Faster R-CNN | X101-32x4d-FPN | 1x | 40.1 | | - | +| √ | Faster R-CNN | X101-32x4d-FPN | 1x | 41.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco_20200505_181503.log.json) | +| × | Mask R-CNN | R-50-FPN | 1x | 37.3 | 34.2 | - | +| √ | Mask R-CNN | R-50-FPN | 1x | 39.1 | 35.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco_20200508_150500.log.json) | +| × | Mask R-CNN | X101-32x4d-FPN | 1x | 41.1 | 37.1 | - | +| √ | Mask R-CNN | X101-32x4d-FPN | 1x | | | | +| × | RetinaNet | R-50-FPN | 1x | 35.6 | | - | +| √ | RetinaNet | R-50-FPN | 1x | 36.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco_20200504_014311.log.json) | +| × | RetinaNet | X101-32x4d-FPN | 1x | 39.0 | | - | +| √ | RetinaNet | X101-32x4d-FPN | 1x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco_20200505_001404.log.json) | +| × | SSD300 | VGG16 | 1x | 25.6 | | - | +| √ | SSD300 | VGG16 | 1x | 27.6 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd300_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco_20200504_144325.log.json) | +| × | SSD300 | VGG16 | 1x | 29.3 | | - | +| √ | SSD300 | VGG16 | 1x | 31.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd512_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco_20200508_131030.log.json) | + +**Notes:** +- In the original paper, all models are trained and tested on mmdet v1.x, thus results may not be exactly the same with this release on v2.0. +- It is noted PISA only modifies the training pipeline so the inference time remains the same with the baseline. diff --git a/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ab70f464ce45b27a27f2c4fde610b6a997ac0553 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) + +train_cfg = dict( + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))) + +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e735ecad36877f318ea97e9686378bd0ed0f11b1 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) + +train_cfg = dict( + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))) + +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d1008c3f0e6d7f004fed6dd6a93ed7f8a9ee7003 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) + +train_cfg = dict( + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))) + +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..568792588456ef57b6f90189bf5dfec2a5765236 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) + +train_cfg = dict( + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))) + +test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_num=2000, + nms_thr=0.7, + min_bbox_size=0)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b4aa4db51672eee8a5ab8d94522e0f9fadd28108 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) + +train_cfg = dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4f8f273d3976677aed3e8697dee4b39e808922c1 --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../retinanet/retinanet_x101_32x4d_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) + +train_cfg = dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_ssd300_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fe5f4f6d05cb4a9efddaae868d859490db53ae1c --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_ssd300_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd300_coco.py' + +model = dict(bbox_head=dict(type='PISASSDHead')) + +train_cfg = dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2)) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/pisa/pisa_ssd512_coco.py b/thirdparty/mmdetection/configs/pisa/pisa_ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1404ee05340523169562f93999e024561324940e --- /dev/null +++ b/thirdparty/mmdetection/configs/pisa/pisa_ssd512_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd512_coco.py' + +model = dict(bbox_head=dict(type='PISASSDHead')) + +train_cfg = dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2)) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/point_rend/README.md b/thirdparty/mmdetection/configs/point_rend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e946973e7002f99aadcb42bd2f077434e464b2f9 --- /dev/null +++ b/thirdparty/mmdetection/configs/point_rend/README.md @@ -0,0 +1,20 @@ +# PointRend + +## Introduction +``` +@InProceedings{kirillov2019pointrend, + title={{PointRend}: Image Segmentation as Rendering}, + author={Alexander Kirillov and Yuxin Wu and Kaiming He and Ross Girshick}, + journal={ArXiv:1912.08193}, + year={2019} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.6 | | 38.4 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco_20200612_161407.log.json) | +| R-50-FPN | caffe | 3x | 4.6 | | 41.0 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco_20200614_002632.log.json) | + +Note: All models are trained with multi-scale, the input image shorter side is randomly scaled to one of (640, 672, 704, 736, 768, 800). diff --git a/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dc7f97554b2ca905ad098b487cd7e0393d30cd1d --- /dev/null +++ b/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# model settings +model = dict( + type='PointRend', + roi_head=dict( + type='PointRendRoIHead', + mask_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='concat', + roi_layer=dict( + _delete_=True, type='SimpleRoIAlign', output_size=14), + out_channels=256, + featmap_strides=[4]), + mask_head=dict( + _delete_=True, + type='CoarseMaskHead', + num_fcs=2, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + point_head=dict( + type='MaskPointHead', + num_fcs=3, + in_channels=256, + fc_channels=256, + num_classes=80, + coarse_pred_each_layer=True, + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# model training and testing settings +train_cfg = dict( + rcnn=dict( + mask_size=7, + num_points=14 * 14, + oversample_ratio=3, + importance_sample_ratio=0.75)) +test_cfg = dict( + rcnn=dict( + subdivision_steps=5, subdivision_num_points=28 * 28, scale_factor=2)) diff --git a/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e00eb744c76a770b035ecb5f3751e95df02025a --- /dev/null +++ b/thirdparty/mmdetection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './point_rend_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/regnet/README.md b/thirdparty/mmdetection/configs/regnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..67ba03d2de94dffdbd5c1abedfa1ff8f84d52661 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/README.md @@ -0,0 +1,90 @@ +# Designing Network Design Spaces + +## Introduction + +We implement RegNetX and RegNetY models in detection systems and provide their first results on Mask R-CNN, Faster R-CNN and RetinaNet. + +The pre-trained modles are converted from [model zoo of pycls](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). + +``` +@article{radosavovic2020designing, + title={Designing Network Design Spaces}, + author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár}, + year={2020}, + eprint={2003.13678}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Usage + +To use a regnet model, there are two steps to do: +1. Convert the model to ResNet-style supported by MMDetection +2. Modify backbone and neck in config accordingly + +### Convert model + +We already prepare models of FLOPs from 400M to 12G in our model zoo. + +For more general usage, we also provide script `regnet2mmdet.py` in the tools directory to convert the key of models pretrained by [pycls](https://github.com/facebookresearch/pycls/) to +ResNet-style checkpoints used in MMDetection. + +```bash +python -u tools/regnet2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} +``` +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + + +### Modify config + +The users can modify the config's `depth` of backbone and corresponding keys in `arch` according to the configs in the [pycls model zoo](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). +The parameter `in_channels` in FPN can be found in the Figure 15 & 16 of the paper (`wi` in the legend). +This directory already provides some configs with their performance, using RegNetX from 800MF to 12GF level. +For other pre-trained models or self-implemented regnet models, the users are responsible to check these parameters by themselves. + +**Note**: Although Fig. 15 & 16 also provide `w0`, `wa`, `wm`, `group_w`, and `bot_mul` for `arch`, they are quantized thus inaccurate, using them sometimes produces different backbone that does not match the key in the pre-trained model. + +## Results + +### Mask R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.4 | 12.0 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +|[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | +|[RegNetX-4.0GF-FPN](./mask_rcnn_regnetx-4GF_fpn_1x_coco.py)| pytorch | 1x |5.5||41.5|37.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217.log.json) | +| [R-101-FPN](../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py)| pytorch | 1x | 6.4 | 10.3 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +|[RegNetX-6.4GF-FPN](./mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py)| pytorch | 1x |6.1 ||41.0|37.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439.log.json) | +| [X-101-32x4d-FPN](../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | pytorch | 1x | 7.6 | 9.4 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +|[RegNetX-8.0GF-FPN](./mask_rcnn_regnetx-8GF_fpn_1x_coco.py)| pytorch | 1x |6.4 ||41.7|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515.log.json) | +|[RegNetX-12GF-FPN](./mask_rcnn_regnetx-12GF_fpn_1x_coco.py)| pytorch | 1x |7.4 ||42.2|38|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552.log.json) | +|[RegNetX-3.2GF-FPN-DCN-C3-C5](./mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726.log.json) | + +### Faster R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.0 | 18.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x | 4.5||39.9|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py)| pytorch | 2x | 4.5||41.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955.log.json) | + +### RetinaNet +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../retinanet/retinanet_r50_fpn_1x_coco.py) | pytorch | 1x | 3.8 | 16.6 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +|[RegNetX-800MF-FPN](./retinanet_regnetx-800MF_fpn_1x_coco.py)| pytorch | 1x |2.5||35.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-1.6GF-FPN](./retinanet_regnetx-1.6GF_fpn_1x_coco.py)| pytorch | 1x |3.3||37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-3.2GF-FPN](./retinanet_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |4.2 ||39.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | + +### Pre-trained models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Faster RCNN |[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.0 ||42.2|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200520_224253-bf85ae3e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200520_224253.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | + +### Notice +1. The models are trained using a different weight decay, i.e., `weight_decay=5e-5` according to the setting in ImageNet training. This brings improvement of at least 0.7 AP absolute but does not improve the model using ResNet-50. +2. RetinaNets using RegNets are trained with learning rate 0.02 with gradient clip. We find that using learning rate 0.02 could improve the results by at least 0.7 AP absolute and gradient clip is necessary to stabilize the training. +However, this does not improve the performance of ResNet-50-FPN RetinaNet. diff --git a/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4fc61a3b523e0b29447e858d98d683a9df00921a --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4645b694eb7b1d55361279d8fef965924f67b6aa --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..66e636ae5ceb9b6f012fc0e94207cb4c63fad8fc --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,63 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..104d6d43bd958d49f75d54965b326ebac29ae330 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_12gf', + backbone=dict( + type='RegNet', + arch='regnetx_12gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[224, 448, 896, 2240], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..19168b54d9e22ddf7b48f753844b9983b68c47f1 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dd5153e6ef0ef16b8607279634ce6f1593bd3c1c --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = 'mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..59255b43483d85d582748ebf31a6047a51bc9794 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,65 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +lr_config = dict(step=[28, 34]) +total_epochs = 36 +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8830ef08481bae863bd1401223f4cbd14210e87f --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_4.0gf', + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7569ef3825737cfbf4c2680a655c1b197e0a8053 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_6.4gf', + backbone=dict( + type='RegNet', + arch='regnetx_6.4gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[168, 392, 784, 1624], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b5890264672f0996d98db422365746e85fcea8e6 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_8.0gf', + backbone=dict( + type='RegNet', + arch='regnetx_8.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[80, 240, 720, 1920], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4f2beb850ded95402d6b44c80553f224e15fb557 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_1.6gf', + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8f483a17ace5c101548f640b95cc94030f37a0b3 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='open-mmlab://regnetx_3.2gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fe1d659f1a58ddb6e662d74a41c77005d2ee0638 --- /dev/null +++ b/thirdparty/mmdetection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://regnetx_800mf', + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/thirdparty/mmdetection/configs/reppoints/README.md b/thirdparty/mmdetection/configs/reppoints/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0c22aa84b6b44a74a1af8f17bf91b3d21c994e83 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/README.md @@ -0,0 +1,52 @@ +# RepPoints: Point Set Representation for Object Detection + +By [Ze Yang](https://yangze.tech/), [Shaohui Liu](http://b1ueber2y.me/), and [Han Hu](https://ancientmooner.github.io/). + +We provide code support and configuration files to reproduce the results in the paper for +["RepPoints: Point Set Representation for Object Detection"](https://arxiv.org/abs/1904.11490) on COCO object detection. + +## Introduction + +**RepPoints**, initially described in [arXiv](https://arxiv.org/abs/1904.11490), is a new representation method for visual objects, on which visual understanding tasks are typically centered. Visual object representation, aiming at both geometric description and appearance feature extraction, is conventionally achieved by `bounding box + RoIPool (RoIAlign)`. The bounding box representation is convenient to use; however, it provides only a rectangular localization of objects that lacks geometric precision and may consequently degrade feature quality. Our new representation, RepPoints, models objects by a `point set` instead of a `bounding box`, which learns to adaptively position themselves over an object in a manner that circumscribes the object’s `spatial extent` and enables `semantically aligned feature extraction`. This richer and more flexible representation maintains the convenience of bounding boxes while facilitating various visual understanding applications. This repo demonstrated the effectiveness of RepPoints for COCO object detection. + +Another feature of this repo is the demonstration of an `anchor-free detector`, which can be as effective as state-of-the-art anchor-based detection methods. The anchor-free detector can utilize either `bounding box` or `RepPoints` as the basic object representation. + +
+ +

Learning RepPoints in Object Detection.

+
+ +## Citing RepPoints + +``` +@inproceedings{yang2019reppoints, + title={RepPoints: Point Set Representation for Object Detection}, + author={Yang, Ze and Liu, Shaohui and Hu, Han and Wang, Liwei and Lin, Stephen}, + booktitle={The IEEE International Conference on Computer Vision (ICCV)}, + month={Oct}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the table below. + +| Method | Backbone | GN | Anchor | convert func | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------:|:---:|:------:|:------------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| BBox | R-50-FPN | Y | single | - | 1x | 3.9 | 15.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329_145916.log.json) | +| BBox | R-50-FPN | Y | none | - | 1x | 3.9 | 15.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | N | none | moment | 1x | 3.3 | 18.5 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 1x | 3.9 | 17.5 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329_145952.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 2x | 3.9 | - | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329_150020.log.json) | +| RepPoints | R-101-FPN | Y | none | moment | 2x | 5.8 | 13.7 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329_132205.log.json) | +| RepPoints | R-101-FPN-DCN | Y | none | moment | 2x | 5.9 | 12.1 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132134.log.json) | +| RepPoints | X-101-FPN-DCN | Y | none | moment | 2x | 7.1 | 9.3 | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132201.log.json) | + +**Notes:** + +- `R-xx`, `X-xx` denote the ResNet and ResNeXt architectures, respectively. +- `DCN` denotes replacing 3x3 conv with the 3x3 deformable convolution in `c3-c5` stages of backbone. +- `none` in the `anchor` column means 2-d `center point` (x,y) is used to represent the initial object hypothesis. `single` denotes one 4-d anchor box (x,y,w,h) with IoU based label assign criterion is adopted. +- `moment`, `partial MinMax`, `MinMax` in the `convert func` column are three functions to convert a point set to a pseudo box. +- Note the results here are slightly different from those reported in the paper, due to framework change. While the original paper uses an [MXNet](https://mxnet.apache.org/) implementation, we re-implement the method in [PyTorch](https://pytorch.org/) based on mmdetection. diff --git a/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b24c8db768423de12d1e8582bb26dd71218f52ee --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax', use_grid_points=True)) diff --git a/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f225a32080c749c2908360a998e383323fbd317c --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax', use_grid_points=True)) +# training and testing settings +train_cfg = dict( + init=dict( + assigner=dict( + _delete_=True, + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1))) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints.png b/thirdparty/mmdetection/configs/reppoints/reppoints.png new file mode 100644 index 0000000000000000000000000000000000000000..16d491b9ec62835d91b474b7d69c46bd25da25e5 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8c4c485b83297b7972632a0fc8dbc2b27a3620afecbc7b42aaf2183e3f98f6b +size 1198109 diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f56a46b3c002cdec630bb06df66a4fc9e7804a8 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax')) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..241754cfb45ed998e7c2e3bb8e662a49fa341e89 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict( + depth=101, + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..19efa0dd756993c9f51a3b9589e558beb2eb5f83 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6d1c89b208217f71add73b76c7e2daeb67b23979 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py @@ -0,0 +1,67 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RepPointsDetector', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RepPointsHead', + num_classes=80, + in_channels=256, + feat_channels=256, + point_feat_channels=256, + stacked_convs=3, + num_points=9, + gradient_mul=0.1, + point_strides=[8, 16, 32, 64, 128], + point_base_scale=4, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5), + loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0), + transform_method='moment')) +# training and testing settings +train_cfg = dict( + init=dict( + assigner=dict(type='PointAssigner', scale=4, pos_num=1), + allowed_border=-1, + pos_weight=-1, + debug=False), + refine=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +optimizer = dict(lr=0.01) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..337f167c820979f345eef120a936195d8f5975c2 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './reppoints_moment_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict(neck=dict(norm_cfg=norm_cfg), bbox_head=dict(norm_cfg=norm_cfg)) +optimizer = dict(lr=0.01) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b9c712d998092bdd7bf7c2d03dac22c58f253c08 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c33019da0ccbc3b37bd58bfa4e6f2cfca68cbd48 --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py b/thirdparty/mmdetection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a63bd0862be6d5f363c5d481bade3e8e2e8433a --- /dev/null +++ b/thirdparty/mmdetection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='partial_minmax')) diff --git a/thirdparty/mmdetection/configs/res2net/README.md b/thirdparty/mmdetection/configs/res2net/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b326ba4a5ee512de6b77238f7f29519d693ac992 --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/README.md @@ -0,0 +1,52 @@ +# Res2Net for object detection and instance segmentation + +## Introduction + +We propose a novel building block for CNNs, namely Res2Net, by constructing hierarchical residual-like connections within one single residual block. The Res2Net represents multi-scale features at a granular level and increases the range of receptive fields for each network layer. + +| Backbone |Params. | GFLOPs | top-1 err. | top-5 err. | +| :-------------: |:----: | :-----: | :--------: | :--------: | +| ResNet-101 |44.6 M | 7.8 | 22.63 | 6.44 | +| ResNeXt-101-64x4d |83.5M | 15.5 | 20.40 | - | +| HRNetV2p-W48 | 77.5M | 16.1 | 20.70 | 5.50 | +| Res2Net-101 | 45.2M | 8.3 | 18.77 | 4.64 | + +Compared with other backbone networks, Res2Net requires fewer parameters and FLOPs. + +**Note:** +- GFLOPs for classification are calculated with image size (224x224). + +``` +@article{gao2019res2net, + title={Res2Net: A New Multi-scale Backbone Architecture}, + author={Gao, Shang-Hua and Cheng, Ming-Ming and Zhao, Kai and Zhang, Xin-Yu and Yang, Ming-Hsuan and Torr, Philip}, + journal={IEEE TPAMI}, + year={2020}, + doi={10.1109/TPAMI.2019.2938758}, +} +``` +## Results and Models +### Faster R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.4 | - | 43.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco_20200514_231734.log.json) | +### Mask R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.9 | - | 43.6 | 38.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco-17f061e8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco_20200515_002413.log.json) | +### Cascade R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 20e | 7.8 | - | 45.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco-f4b7b7db.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco_20200515_091644.log.json) | +### Cascade Mask R-CNN +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +R2-101-FPN | pytorch | 20e | 9.5 | - | 46.4 | 40.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco-8a7b41e1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco_20200515_091645.log.json) | +### Hybrid Task Cascade (HTC) +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R2-101-FPN | pytorch | 20e | - | - | 47.5 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/htc_r2_101_fpn_20e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco-3a8d2112.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco_20200515_150029.log.json) | + + +- Res2Net ImageNet pretrained models are in [Res2Net-PretrainedModels](https://github.com/Res2Net/Res2Net-PretrainedModels). +- More applications of Res2Net are in [Res2Net-Github](https://github.com/Res2Net/). diff --git a/thirdparty/mmdetection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50df4e2db500d575eaddd7538b49cc808e30b50e --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict(type='Res2Net', depth=101, scales=4, base_width=26)) diff --git a/thirdparty/mmdetection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1cac759ab66323cf034f21a9afff770f79c10035 --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict(type='Res2Net', depth=101, scales=4, base_width=26)) diff --git a/thirdparty/mmdetection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..85004e02c31edeb487f765835815c6f80c18fb6f --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict(type='Res2Net', depth=101, scales=4, base_width=26)) diff --git a/thirdparty/mmdetection/configs/res2net/htc_r2_101_fpn_20e_coco.py b/thirdparty/mmdetection/configs/res2net/htc_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e7647a6a148615a6b72e6b7a11a8d7be0742b77 --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/htc_r2_101_fpn_20e_coco.py @@ -0,0 +1,7 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict(type='Res2Net', depth=101, scales=4, base_width=26)) +# learning policy +lr_config = dict(step=[16, 19]) +total_epochs = 20 diff --git a/thirdparty/mmdetection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a620188807218a9c80ad89ac6002dda3ea4b830c --- /dev/null +++ b/thirdparty/mmdetection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict(type='Res2Net', depth=101, scales=4, base_width=26)) diff --git a/thirdparty/mmdetection/configs/resnest/README.md b/thirdparty/mmdetection/configs/resnest/README.md new file mode 100644 index 0000000000000000000000000000000000000000..07c916407e82d6e5f468c258003085dff129fe1e --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/README.md @@ -0,0 +1,42 @@ +# ResNeSt: Split-Attention Networks + +## Introduction + +```latex +@article{zhang2020resnest, +title={ResNeSt: Split-Attention Networks}, +author={Zhang, Hang and Wu, Chongruo and Zhang, Zhongyue and Zhu, Yi and Zhang, Zhi and Lin, Haibin and Sun, Yue and He, Tong and Muller, Jonas and Manmatha, R. and Li, Mu and Smola, Alexander}, +journal={arXiv preprint arXiv:2004.08955}, +year={2020} +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 4.8 | - | 42.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20200926_125502-20289c16.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco-20200926_125502.log.json) | +|S-101-FPN | pytorch | 1x | 7.1 | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20201006_021058-421517f1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco-20201006_021058.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 5.5 | - | 42.6 | 38.1 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco_20200926_125503-8a2c3d47.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco-20200926_125503.log.json) | +|S-101-FPN | pytorch | 1x | 7.8 | - | 45.2 | 40.2 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco_20201005_215831-af60cdf9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco-20201005_215831.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20201122_213640-763cc7b5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco-20201005_113242.log.json) | +|S-101-FPN | pytorch | 1x | 8.4 | - | 46.8 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20201005_113242-b9459f8f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco-20201122_213640.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 45.4 | 39.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco_20201122_104428-99eca4c7.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco-20201122_104428.log.json) | +|S-101-FPN | pytorch | 1x | 10.5 | - | 47.7 | 41.4 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco_20201005_113243-42607475.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco-20201005_113243.log.json) | diff --git a/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3995603a6cee82a7d7cff620cb8bffe14b15b6a1 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnest101', + backbone=dict(stem_channels=128, depth=101)) diff --git a/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f2cf444d4cd49220ea2e0f7cf25c81b57850a202 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,118 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + pretrained='open-mmlab://resnest50', + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..53964a3e1d2c9fd2e9eb905c6d6f645f62ea9957 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnest101', + backbone=dict(stem_channels=128, depth=101)) diff --git a/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78a154bba2e12e1daec0efaa6a1cb67016084671 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,116 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + pretrained='open-mmlab://resnest50', + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], )) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/thirdparty/mmdetection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1915ab1b4f013efacaedcdae08e93176cfe3bd55 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnest101', + backbone=dict(stem_channels=128, depth=101)) diff --git a/thirdparty/mmdetection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/thirdparty/mmdetection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..422fbca1bb159d0e7f174eaa16680783c306386c --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + pretrained='open-mmlab://resnest50', + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89e077d620f3539de86fb2e10c6f7e342ad4bf0c --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnest101', + backbone=dict(stem_channels=128, depth=101)) diff --git a/thirdparty/mmdetection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f21fd040614425e8b36415b660823ad6bd38e1 --- /dev/null +++ b/thirdparty/mmdetection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,64 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + pretrained='open-mmlab://resnest50', + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch'), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/retinanet/README.md b/thirdparty/mmdetection/configs/retinanet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b7953ffc9f73dfe125a3501bb00b5df85fc27708 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/README.md @@ -0,0 +1,26 @@ +# Focal Loss for Dense Object Detection + +## Introduction +``` +@inproceedings{lin2017focal, + title={Focal loss for dense object detection}, + author={Lin, Tsung-Yi and Goyal, Priya and Girshick, Ross and He, Kaiming and Doll{\'a}r, Piotr}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 18.6 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531-f11027c5.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531_012518.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 19.0 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131-fdb43119.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131_114738.log.json) | +| R-101-FPN | caffe | 1x | 5.5 | 14.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531-b428fa0f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531_012536.log.json) | +| R-101-FPN | pytorch | 1x | 5.7 | 15.0 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130-7a93545f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130_003055.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131-5560aee8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131_114859.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 12.1 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130-5c8b7ec4.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130_003004.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131-237fc5e1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131_114812.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.0 | 8.7 | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130-366f5af1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130_003008.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131-bca068ab.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131_114833.log.json) | diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..21d227b044728a30890b93fc769743d2124956c1 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e6f46340d551abaa22ff2176bec22824188d6cb --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c12088a266d7ccad31bd2233ee5a9ee90f4c2b14 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r101_fpn_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..028c1a3ad48f49ee22e0ee70d07555d58f3c73d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f2a0decf8fb46f0dde87e8e5f9d1608ce8ffe576 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a42c4925e10ef2fa591893aa2e05de3c47f18ab4 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2fb73e51ef02ca582b125387278ee50406d4ea1c --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +total_epochs = 36 diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..04bd696b9589e37ad34c9fdd035b97e271d3b214 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1c61d36404e712efdce5cbdb06cec6d0a3e1225a --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_r50_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9927f8f07510b2bc6d1c92f397bc2075e38c104c --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cd78b6df320aea7b23412b2f734e8684f84b9822 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc40f26020731817dd3c3ff702427280760e67d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eac05a64a22f28d597eb4c8b1c31351b52829056 --- /dev/null +++ b/thirdparty/mmdetection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/rpn/README.md b/thirdparty/mmdetection/configs/rpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..773d5e3a3e50dc64525f52502f734f28e614805d --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/README.md @@ -0,0 +1,26 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Introduction +``` +@inproceedings{ren2015faster, + title={Faster r-cnn: Towards real-time object detection with region proposal networks}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + booktitle={Advances in neural information processing systems}, + year={2015} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR1000 | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 22.6 | 58.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531-5b903a37.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531_012334.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 22.3 | 58.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218_151240.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 58.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131-0728c9b3.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131_190631.log.json) | +| R-101-FPN | caffe | 1x | 5.4 | 17.3 | 60.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531-0629a2e2.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531_012345.log.json) | +| R-101-FPN | pytorch | 1x | 5.8 | 16.5 | 59.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131-2ace2249.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131_191000.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 60.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131-24e3db1a.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131_191106.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 13.0 | 60.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219-b02646c6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219_012037.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 61.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208-d22bd0bb.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.1 | 9.1 | 61.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208-cde6f7dd.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 61.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208-c65f524f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208_200752.log.json) | diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e616fdf46ef82fb1de0519541d20156e789f03ec --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b2af6119319c03a8e213b2c352fc48e66bc8a822 --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6908d3001d89ee3efe2b1e508759fbda94b7bf7a --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r101_fpn_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6da0ee94906fd8febaf69786976e478ef8f35c9e --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/rpn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..398f3c14db1d63343b08bd5280d69aaae6c70a99 --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet50_caffe', + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..26f95a3402f9fd2d54c5919484e2f4958beb8a34 --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/rpn_r50_fpn.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +data = dict(train=dict(pipeline=train_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_2x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3a92d8d3f65776c1fe72c9909c36fca428267afd --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83bd70032cb24be6b96f988522ef84f7b4cc0e6a --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..979afb97073a92e228ed302dab161d8f9bbade32 --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bb7f0a630b9f2e9263183e003c288a33eb972e71 --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py b/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8c766f05f4ee61273670ce74ed60c91c89beb50e --- /dev/null +++ b/thirdparty/mmdetection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/sabl/README.md b/thirdparty/mmdetection/configs/sabl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1deb211ff13f3a32158db84b23695877d3457a64 --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/README.md @@ -0,0 +1,36 @@ +# Side-Aware Boundary Localization for More Precise Object Detection + +## Introduction + +We provide config files to reproduce the object detection results in the ECCV 2020 Spotlight paper for [Side-Aware Boundary Localization for More Precise Object Detection](https://arxiv.org/abs/1912.04260). + +``` +@inproceedings{Wang_2020_ECCV, + title = {Side-Aware Boundary Localization for More Precise Object Detection}, + author = {Wang, Jiaqi and Zhang, Wenwei and Cao, Yuhang and Chen, Kai and Pang, Jiangmiao and Gong, Tao and Shi, Jianping, Loy, Chen Change and Lin, Dahua}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). +Single-scale testing (1333x800) is adopted in all results. + + +| Method | Backbone | Lr schd | ms-train | box AP | Config | Download | +| :----------------: | :-------: | :-----: | :------: | :----: | :------: | :--------:| +| SABL Faster R-CNN | R-50-FPN | 1x | N | 39.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/20200830_130324.log.json) | +| SABL Faster R-CNN | R-101-FPN | 1x | N | 41.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/20200830_183949.log.json) | +| SABL Cascade R-CNN | R-50-FPN | 1x | N | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/20200831_033726.log.json) | +| SABL Cascade R-CNN | R-101-FPN | 1x | N | 43.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py) |[model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/20200831_141745.log.json) | + +| Method | Backbone | GN | Lr schd | ms-train | box AP | Config | Download | +| :------------: | :-------: | :---: | :-----: | :---------: | :----: | :------: | :--------:| +| SABL RetinaNet | R-50-FPN | N | 1x | N | 37.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/20200830_053451.log.json) | +| SABL RetinaNet | R-50-FPN | Y | 1x | N | 38.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/20200831_141955.log.json) | +| SABL RetinaNet | R-101-FPN | N | 1x | N | 39.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/20200831_034256.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 1x | N | 40.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/20200830_201422.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (640~800) | 42.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/20200830_144807.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (480~960) | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py) | [model](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth) | [log](https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/20200830_164537.log.json) | diff --git a/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0322006464e158a238525e91449cc81a6143375c --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,88 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4b28a59280e6701d31afeeaae7ae12cdbd4fb95e --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,86 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4c797cad1c693ba3578fd6852f8d055d3e7406fe --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,36 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..732c7ba3f607e2ac68f16acceddd16b1269aa2cf --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7504fe216056e7710caf29935e5cd4fdb1b695fb --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8143af21297eaf40f46217fa7fa65f7ecee2c11f --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e2b71bfe673dea67263d0f9bf21a68f7abc48f4 --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py @@ -0,0 +1,71 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..013020105a06f18b4fee33dc65ed3ca5f3ccdcef --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py @@ -0,0 +1,71 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained='torchvision://resnet101', + backbone=dict(depth=101), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ce518306b570eba94f71da7da84967b5de7765fe --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bb1dad59b6312e9df2742e7775f10635ebb13431 --- /dev/null +++ b/thirdparty/mmdetection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/thirdparty/mmdetection/configs/scratch/README.md b/thirdparty/mmdetection/configs/scratch/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a47ed52af08e7146c8d18bd599bb899df8ce12d1 --- /dev/null +++ b/thirdparty/mmdetection/configs/scratch/README.md @@ -0,0 +1,22 @@ +# Rethinking ImageNet Pre-training + +## Introduction + +``` +@article{he2018rethinking, + title={Rethinking imagenet pre-training}, + author={He, Kaiming and Girshick, Ross and Doll{\'a}r, Piotr}, + journal={arXiv preprint arXiv:1811.08883}, + year={2018} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 6x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_bbox_mAP-0.407_20200201_193013-90813d01.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_20200201_193013.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 6x | 41.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_bbox_mAP-0.412__segm_mAP-0.374_20200201_193051-1e190a40.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_20200201_193051.log.json) | + +Note: +- The above models are trained with 16 GPUs. diff --git a/thirdparty/mmdetection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/thirdparty/mmdetection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebe87d11f41f164882a1d787b26a8c9cc55b4107 --- /dev/null +++ b/thirdparty/mmdetection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,22 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained=None, + backbone=dict( + frozen_stages=-1, zero_init_residual=False, norm_cfg=norm_cfg), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +total_epochs = 73 diff --git a/thirdparty/mmdetection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/thirdparty/mmdetection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2da1750dd3842edcc1e9653e3efc635337941f76 --- /dev/null +++ b/thirdparty/mmdetection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + pretrained=None, + backbone=dict( + frozen_stages=-1, zero_init_residual=False, norm_cfg=norm_cfg), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +total_epochs = 73 diff --git a/thirdparty/mmdetection/configs/ssd/README.md b/thirdparty/mmdetection/configs/ssd/README.md new file mode 100644 index 0000000000000000000000000000000000000000..582292f2dbb6bacc8f3e5ed35cbb57755fd60dca --- /dev/null +++ b/thirdparty/mmdetection/configs/ssd/README.md @@ -0,0 +1,18 @@ +# SSD: Single Shot MultiBox Detector + +## Introduction +``` +@article{Liu_2016, + title={SSD: Single Shot MultiBox Detector}, + journal={ECCV}, + author={Liu, Wei and Anguelov, Dragomir and Erhan, Dumitru and Szegedy, Christian and Reed, Scott and Fu, Cheng-Yang and Berg, Alexander C.}, + year={2016}, +} +``` + +## Results and models + +| Backbone | Size | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------: | :---: | :---: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| VGG16 | 300 | caffe | 120e | 10.2 | 43.7 | 25.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd300_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20200307-a92d2092.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20200307_174216.log.json) | +| VGG16 | 512 | caffe | 120e | 9.3 | 30.7 | 29.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd512_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20200308-038c5591.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20200308_134447.log.json) | diff --git a/thirdparty/mmdetection/configs/ssd/ssd300_coco.py b/thirdparty/mmdetection/configs/ssd/ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..75c5e4e5b81a320a7e6bd7bc31e7d5cf49a0b92d --- /dev/null +++ b/thirdparty/mmdetection/configs/ssd/ssd300_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) diff --git a/thirdparty/mmdetection/configs/ssd/ssd512_coco.py b/thirdparty/mmdetection/configs/ssd/ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..44d2920f4289c351c27e0d70dc03de0deb064a54 --- /dev/null +++ b/thirdparty/mmdetection/configs/ssd/ssd512_coco.py @@ -0,0 +1,71 @@ +_base_ = 'ssd300_coco.py' +input_size = 512 +model = dict( + backbone=dict(input_size=input_size), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.1, 0.9), + strides=[8, 16, 32, 64, 128, 256, 512], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2]]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) diff --git a/thirdparty/mmdetection/configs/vfnet/README.md b/thirdparty/mmdetection/configs/vfnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3d6aef3fbc873659583c97daff63682ab9b9dc5e --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/README.md @@ -0,0 +1,40 @@ +# VarifocalNet: An IoU-aware Dense Object Detector + +## Introduction +**VarifocalNet (VFNet)** learns to predict the IoU-aware classification score which mixes the object presence confidence and localization accuracy together as the detection score for a bounding box. The learning is supervised by the proposed Varifocal Loss (VFL), based on a new star-shaped bounding box feature representation (the features at nine yellow sampling points). Given the new representation, the object localization accuracy is further improved by refining the initially regressed bounding box. The full paper is available at: [https://arxiv.org/abs/2008.13367](https://arxiv.org/abs/2008.13367). + +
+ +

Learning to Predict the IoU-aware Classification Score.

+
+ +## Citing VarifocalNet + +``` +@article{zhang2020varifocalnet, + title={VarifocalNet: An IoU-aware Dense Object Detector}, + author={Zhang, Haoyang and Wang, Ying and Dayoub, Feras and S{\"u}nderhauf, Niko}, + journal={arXiv preprint arXiv:2008.13367}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | DCN | MS train | Lr schd |Inf time (fps) | box AP (val) | box AP (test-dev) | Config | Download | +|:------------:|:---------:|:-------:|:--------:|:-------:|:-------------:|:------------:|:-----------------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | - | 41.6 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r50_fpn_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco.json)| +| R-50 | pytorch | N | Y | 2x | - | 44.5 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r50_fpn_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco_20201027-7cc75bd2.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco.json)| +| R-50 | pytorch | Y | Y | 2x | - | 47.8 | 48.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-6879c318.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| R-101 | pytorch | N | N | 1x | - | 43.0 | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r101_fpn_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco_20201027pth-c831ece7.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco.json)| +| R-101 | pytorch | N | Y | 2x | - | 46.2 | 46.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r101_fpn_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco_20201027pth-4a5d53f1.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco.json)| +| R-101 | pytorch | Y | Y | 2x | - | 49.0 | 49.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-7729adb5.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-32x4d | pytorch | Y | Y | 2x | - | 49.7 | 50.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-d300a6fc.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-64x4d | pytorch | Y | Y | 2x | - | 50.4 | 50.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-b5f6da5e.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| + + +**Notes:** +- The MS-train scale range is 1333x[480:960] (`range` mode) and the inference scale keeps 1333x800. +- DCN means using `DCNv2` in both backbone and head. +- Inference time will be updated soon. +- More results and pre-trained models can be found in [VarifocalNet-Github](https://github.com/hyz-xmaster/VarifocalNet) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_1x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..09521310523f38be90518e9c7db6856db1225c1b --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0a1f569463972dc5b7fe10c35f8fb5d3321a261 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f8ef6ec092db2e454ca5359b6df89d31365672c0 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + pretrained='torchvision://resnet101', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be7f075fea00a4570d50fd30f1685139b70a8bb6 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,2 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8da3122657adc2785129c28a84473c25777abba3 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2bcf779db008dbbf0c8f3b1fdc84a9940967f78a --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://res2net101_v1d_26w_4s', + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_1x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6875e5f38c4dae0d10888fa90ead55af736b67aa --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_1x_coco.py @@ -0,0 +1,114 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='VFNet', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + extra_convs_on_inputs=False, # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='VFNetHead', + num_classes=80, + in_channels=256, + stacked_convs=3, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + center_sampling=False, + dcn_on_last_conv=False, + use_atss=True, + use_vfl=True, + loss_cls=dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.5), + loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0))) + +# training and testing settings +train_cfg = dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100) + +# data setting +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[8, 11]) +total_epochs = 12 + +# runtime +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..24d2093b8b537a365c3e07261921b120b422918c --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True)), + bbox_head=dict(dcn_on_last_conv=True)) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31b54fb8fe1ef3e620198adf851a97d8f9a071df --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +total_epochs = 24 diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebeef6ff6640e83378391d3ce7072aa296826c32 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ed26504af131f3806426fcbd343bb7c4c9e229c --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2e19078e2830a2fa6dd2d3b703b0bbf711b7e1e4 --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4329b34bee03d219cdd94b600055eb5d5a7cc8ef --- /dev/null +++ b/thirdparty/mmdetection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch')) diff --git a/thirdparty/mmdetection/configs/wider_face/README.md b/thirdparty/mmdetection/configs/wider_face/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6eced922655447901bf411b952bd73cddfe0a7e5 --- /dev/null +++ b/thirdparty/mmdetection/configs/wider_face/README.md @@ -0,0 +1,32 @@ +# WIDER Face Dataset + +To use the WIDER Face dataset you need to download it +and extract to the `data/WIDERFace` folder. Annotation in the VOC format +can be found in this [repo](https://github.com/sovrasov/wider-face-pascal-voc-annotations.git). +You should move the annotation files from `WIDER_train_annotations` and `WIDER_val_annotations` folders +to the `Annotation` folders inside the corresponding directories `WIDER_train` and `WIDER_val`. +Also annotation lists `val.txt` and `train.txt` should be copied to `data/WIDERFace` from `WIDER_train_annotations` and `WIDER_val_annotations`. +The directory should be like this: + +``` +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── WIDERFace +│ │ ├── WIDER_train +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── WIDER_val +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── val.txt +│ │ ├── train.txt + +``` + +After that you can train the SSD300 on WIDER by launching training with the `ssd300_wider_face.py` config or +create your own config based on the presented one. diff --git a/thirdparty/mmdetection/configs/wider_face/ssd300_wider_face.py b/thirdparty/mmdetection/configs/wider_face/ssd300_wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..d0e89a83d9828bf2188664da22b91ec87cbada74 --- /dev/null +++ b/thirdparty/mmdetection/configs/wider_face/ssd300_wider_face.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/wider_face.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=1)) +# optimizer +optimizer = dict(type='SGD', lr=0.012, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.001, + step=[16, 20]) +# runtime settings +total_epochs = 24 +log_config = dict(interval=1) diff --git a/thirdparty/mmdetection/configs/yolact/README.md b/thirdparty/mmdetection/configs/yolact/README.md new file mode 100644 index 0000000000000000000000000000000000000000..37d2b46e26bb66d361b35b9d20ede9449702c8da --- /dev/null +++ b/thirdparty/mmdetection/configs/yolact/README.md @@ -0,0 +1,60 @@ +# **Y**ou **O**nly **L**ook **A**t **C**oefficien**T**s +``` + ██╗ ██╗ ██████╗ ██╗ █████╗ ██████╗████████╗ + ╚██╗ ██╔╝██╔═══██╗██║ ██╔══██╗██╔════╝╚══██╔══╝ + ╚████╔╝ ██║ ██║██║ ███████║██║ ██║ + ╚██╔╝ ██║ ██║██║ ██╔══██║██║ ██║ + ██║ ╚██████╔╝███████╗██║ ██║╚██████╗ ██║ + ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ +``` + +A simple, fully convolutional model for real-time instance segmentation. This is the code for our paper: + - [YOLACT: Real-time Instance Segmentation](https://arxiv.org/abs/1904.02689) + + +#### For a real-time demo, check out our ICCV video: +[![IMAGE ALT TEXT HERE](https://img.youtube.com/vi/0pMfmo8qfpQ/0.jpg)](https://www.youtube.com/watch?v=0pMfmo8qfpQ) + +# Evaluation +Here are our YOLACT models along with their FPS on a Titan Xp and mAP on COCO's `val`: + +| Image Size | GPU x BS | Backbone | *FPS | mAP | Weights | Configs | Download | +|:----------:|:--------:|:-------------:|:-----:|:----:|:-------:|:------:|:--------:| +| 550 | 1x8 | Resnet50-FPN | 42.5 | 29.0 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact_r50_1x8_coco.py) |[model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_1x8_coco_20200908-f38d58df.pth) | +| 550 | 8x8 | Resnet50-FPN | 42.5 | 28.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact_r50_8x8_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_8x8_coco_20200908-ca34f5db.pth) | +| 550 | 1x8 | Resnet101-FPN | 33.5 | 30.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact_r101_1x8_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r101_1x8_coco_20200908-4cbe9101.pth) | + +*Note: The FPS is evaluated by the [original implementation](https://github.com/dbolya/yolact). When calculating FPS, only the model inference time is taken into account. Data loading and post-processing operations such as converting masks to RLE code, generating COCO JSON results, image rendering are not included. + +# Training +All the aforementioned models are trained with a single GPU. It typically takes ~12GB VRAM when using resnet-101 as the backbone. If you want to try multiple GPUs training, you may have to modify the configuration files accordingly, such as adjusting the training schedule and freezing batch norm. +```Shell +# Trains using the resnet-101 backbone with a batch size of 8 on a single GPU. +./tools/dist_train.sh configs/yolact/yolact_r101.py 1 +``` + +# Testing +Please refer to [mmdetection/docs/getting_started.md](https://github.com/open-mmlab/mmdetection/blob/master/docs/getting_started.md#inference-with-pretrained-models). + +# Citation +If you use YOLACT or this code base in your work, please cite +``` +@inproceedings{yolact-iccv2019, + author = {Daniel Bolya and Chong Zhou and Fanyi Xiao and Yong Jae Lee}, + title = {YOLACT: {Real-time} Instance Segmentation}, + booktitle = {ICCV}, + year = {2019}, +} +``` + + diff --git a/thirdparty/mmdetection/configs/yolact/yolact_r101_1x8_coco.py b/thirdparty/mmdetection/configs/yolact/yolact_r101_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2864b590b5538b735a16df3b2690b29a95384df8 --- /dev/null +++ b/thirdparty/mmdetection/configs/yolact/yolact_r101_1x8_coco.py @@ -0,0 +1,3 @@ +_base_ = './yolact_r50_1x8_coco.py' + +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/thirdparty/mmdetection/configs/yolact/yolact_r50_1x8_coco.py b/thirdparty/mmdetection/configs/yolact/yolact_r50_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9c9a8c8ff3449a013190765c8342cb3998c70dd5 --- /dev/null +++ b/thirdparty/mmdetection/configs/yolact/yolact_r50_1x8_coco.py @@ -0,0 +1,160 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +img_size = 550 +model = dict( + type='YOLACT', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, # do not freeze stem + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=False, # update the statistics of bn + zero_init_residual=False, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5, + upsample_cfg=dict(mode='bilinear')), + bbox_head=dict( + type='YOLACTHead', + num_classes=80, + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + base_sizes=[8, 16, 32, 64, 128], + ratios=[0.5, 1.0, 2.0], + strides=[550.0 / x for x in [69, 35, 18, 9, 5]], + centers=[(550 * 0.5 / x, 550 * 0.5 / x) + for x in [69, 35, 18, 9, 5]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True), + mask_head=dict( + type='YOLACTProtonet', + in_channels=256, + num_protos=32, + num_classes=80, + max_masks_to_train=100, + loss_mask_weight=6.125), + segm_head=dict( + type='YOLACTSegmHead', + num_classes=80, + in_channels=256, + loss_segm=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + # smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + iou_thr=0.5, + top_k=200, + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.68, 116.78, 103.94], std=[58.40, 57.12, 57.38], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(4.0, 4.0)), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(img_size, img_size), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(img_size, img_size), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) +total_epochs = 55 +cudnn_benchmark = True +evaluation = dict(metric=['bbox', 'segm']) diff --git a/thirdparty/mmdetection/configs/yolact/yolact_r50_8x8_coco.py b/thirdparty/mmdetection/configs/yolact/yolact_r50_8x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3adcb74a6155a0ab7303ab9ae90ee120f3eb4ad --- /dev/null +++ b/thirdparty/mmdetection/configs/yolact/yolact_r50_8x8_coco.py @@ -0,0 +1,11 @@ +_base_ = 'yolact_r50_1x8_coco.py' + +optimizer = dict(type='SGD', lr=8e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) diff --git a/thirdparty/mmdetection/configs/yolo/README.md b/thirdparty/mmdetection/configs/yolo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..83c17159d9e51d997cbdf934c863a7ae375f3926 --- /dev/null +++ b/thirdparty/mmdetection/configs/yolo/README.md @@ -0,0 +1,25 @@ +# YOLOv3 + +## Introduction +``` +@misc{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Joseph Redmon and Ali Farhadi}, + year={2018}, + eprint={1804.02767}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and Models + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| DarkNet-53 | 320 | 273e | 2.7 | 63.9 | 27.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_320_273e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-421362b6.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-20200819_172101.log.json) | +| DarkNet-53 | 416 | 273e | 3.8 | 61.2 | 30.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-20200819_173424.log.json) | +| DarkNet-53 | 608 | 273e | 7.1 | 48.1 | 33.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco-139f5633.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco-20200819_170820.log.json) | + + +## Credit +This implementation originates from the project of Haoyu Wu(@wuhy08) at Western Digital. diff --git a/thirdparty/mmdetection/configs/yolo/yolov3_d53_320_273e_coco.py b/thirdparty/mmdetection/configs/yolo/yolov3_d53_320_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87359f6fb66d94de10b8e3797ee3eec93a19cb26 --- /dev/null +++ b/thirdparty/mmdetection/configs/yolo/yolov3_d53_320_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(320, 320), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(320, 320), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py b/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d029b5cdd6b3dad09b16a6f2a23e66be684a6412 --- /dev/null +++ b/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (416, 416)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(416, 416), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py b/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..049984d01cfbf78e09e609e8de381460747faa0b --- /dev/null +++ b/thirdparty/mmdetection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py @@ -0,0 +1,121 @@ +_base_ = '../_base_/default_runtime.py' +# model settings +model = dict( + type='YOLOV3', + pretrained='open-mmlab://darknet53', + backbone=dict(type='Darknet', depth=53, out_indices=(3, 4, 5)), + neck=dict( + type='YOLOV3Neck', + num_scales=3, + in_channels=[1024, 512, 256], + out_channels=[512, 256, 128]), + bbox_head=dict( + type='YOLOV3Head', + num_classes=80, + in_channels=[512, 256, 128], + out_channels=[1024, 512, 256], + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=2.0, + reduction='sum'), + loss_wh=dict(type='MSELoss', loss_weight=2.0, reduction='sum'))) +# training and testing settings +train_cfg = dict( + assigner=dict( + type='GridAssigner', pos_iou_thr=0.5, neg_iou_thr=0.5, min_pos_iou=0)) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + conf_thr=0.005, + nms=dict(type='nms', iou_threshold=0.45), + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (608, 608)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(608, 608), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.001, momentum=0.9, weight_decay=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=2000, # same as burn-in in darknet + warmup_ratio=0.1, + step=[218, 246]) +# runtime settings +total_epochs = 273 +evaluation = dict(interval=1, metric=['bbox']) diff --git a/thirdparty/mmdetection/demo/MMDet_Tutorial.ipynb b/thirdparty/mmdetection/demo/MMDet_Tutorial.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e5fa16217d7ee872273d9fe145caefe99a4a1e31 --- /dev/null +++ b/thirdparty/mmdetection/demo/MMDet_Tutorial.ipynb @@ -0,0 +1,1699 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "MMDet Tutorial.ipynb", + "provenance": [], + "collapsed_sections": [], + "toc_visible": true, + "authorship_tag": "ABX9TyP5/C3WJm9hrSsq7gTU5Ezx", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU", + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "f774902af44d49cd9cd6049f9e1d9527": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_9748115624d645b7bca53c0170d13f20", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_ee35e58fb3794e68815a25d2798399ee", + "IPY_MODEL_a3110b0aab414ba3b6fcbfcb68884e17" + ] + } + }, + "9748115624d645b7bca53c0170d13f20": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "ee35e58fb3794e68815a25d2798399ee": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_0cfe88e3a7224d988638ed69169d091d", + "_dom_classes": [], + "description": "100%", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 94284731, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 94284731, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_ad4b96f52e8345fca66b853b74f77e09" + } + }, + "a3110b0aab414ba3b6fcbfcb68884e17": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_8327d294e11c4ed0b1002f58122c16bb", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 89.9M/89.9M [00:24<00:00, 3.93MB/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_fb500fbfc668479ca7ee264a69fa3b6e" + } + }, + "0cfe88e3a7224d988638ed69169d091d": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "initial", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "ad4b96f52e8345fca66b853b74f77e09": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "8327d294e11c4ed0b1002f58122c16bb": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "fb500fbfc668479ca7ee264a69fa3b6e": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + } + } + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aGYwt_UjIrqp", + "colab_type": "text" + }, + "source": [ + "# MMDetection Tutorial\n", + "\n", + "Welcome to MMDetection! This is the official colab tutorial for using MMDetection. In this tutorial, you will learn\n", + "- Perform inference with a MMDet detector.\n", + "- Train a new detector with a new dataset.\n", + "\n", + "Let's start!\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tJxJHruNLb7Y", + "colab_type": "text" + }, + "source": [ + "## Install MMDetection" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Wi4LPmsR66sy", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "outputId": "7405a303-9fc5-4ab1-ca4d-6f4a82590fce" + }, + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2019 NVIDIA Corporation\n", + "Built on Sun_Jul_28_19:07:16_PDT_2019\n", + "Cuda compilation tools, release 10.1, V10.1.243\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "gkGnB9WyHSXB", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "91dd3587-20d8-4692-be03-288ab9c42a08" + }, + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.5.1+cu101 torchvision==0.6.1+cu101 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full\n", + "\n", + "# Install mmdetection\n", + "!rm -rf mmdetection\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "!pip install -e .\n", + "\n", + "# install Pillow 7.0.0 back in order to avoid bug in colab\n", + "!pip install Pillow==7.0.0" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Requirement already up-to-date: torch==1.5.1+cu101 in /usr/local/lib/python3.6/dist-packages (1.5.1+cu101)\n", + "Requirement already up-to-date: torchvision==0.6.1+cu101 in /usr/local/lib/python3.6/dist-packages (0.6.1+cu101)\n", + "Requirement already satisfied, skipping upgrade: future in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (0.16.0)\n", + "Requirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (1.18.5)\n", + "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision==0.6.1+cu101) (7.0.0)\n", + "Collecting mmcv-full\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/af/ac/72d570062951e090139a790acfaca495d655f24f008c8ae0c2140cff0eb9/mmcv-full-1.0.2.tar.gz (234kB)\n", + "\u001b[K |████████████████████████████████| 235kB 12.3MB/s \n", + "\u001b[?25hCollecting addict\n", + " Downloading https://files.pythonhosted.org/packages/14/6f/beb258220417c1a0fe11e842f2e012a1be7eeeaa72a1d10ba17a804da367/addict-2.2.1-py3-none-any.whl\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (1.18.5)\n", + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (3.13)\n", + "Collecting yapf\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c1/5d/d84677fe852bc5e091739acda444a9b6700ffc6b11a21b00dd244c8caef0/yapf-0.30.0-py2.py3-none-any.whl (190kB)\n", + "\u001b[K |████████████████████████████████| 194kB 33.0MB/s \n", + "\u001b[?25hRequirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (4.1.2.30)\n", + "Building wheels for collected packages: mmcv-full\n", + " Building wheel for mmcv-full (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for mmcv-full: filename=mmcv_full-1.0.2-cp36-cp36m-linux_x86_64.whl size=16253442 sha256=932fdc61b0b91e88fd72c32d3d63681ef303415bf8ae341309c270deb96a4f4a\n", + " Stored in directory: /root/.cache/pip/wheels/60/a8/df/94b37c3935d4ef3035e66106787bcf0096a86a92ec77f9fe6f\n", + "Successfully built mmcv-full\n", + "Installing collected packages: addict, yapf, mmcv-full\n", + "Successfully installed addict-2.2.1 mmcv-full-1.0.2 yapf-0.30.0\n", + "Cloning into 'mmdetection'...\n", + "remote: Enumerating objects: 18, done.\u001b[K\n", + "remote: Counting objects: 100% (18/18), done.\u001b[K\n", + "remote: Compressing objects: 100% (17/17), done.\u001b[K\n", + "remote: Total 12220 (delta 3), reused 3 (delta 0), pack-reused 12202\u001b[K\n", + "Receiving objects: 100% (12220/12220), 11.17 MiB | 3.78 MiB/s, done.\n", + "Resolving deltas: 100% (8365/8365), done.\n", + "/content/mmdetection\n", + "Obtaining file:///content/mmdetection\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (1.18.5)\n", + "Collecting Pillow<=6.2.2\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/8a/fd/bbbc569f98f47813c50a116b539d97b3b17a86ac7a309f83b2022d26caf2/Pillow-6.2.2-cp36-cp36m-manylinux1_x86_64.whl (2.1MB)\n", + "\u001b[K |████████████████████████████████| 2.1MB 13.1MB/s \n", + "\u001b[?25hRequirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (1.12.0)\n", + "Collecting terminaltables\n", + " Downloading https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz\n", + "Requirement already satisfied: torch>=1.3 in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (1.5.1+cu101)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (0.6.1+cu101)\n", + "Requirement already satisfied: pycocotools@ git+https://github.com/open-mmlab/cocoapi.git#subdirectory=pycocotools from git+https://github.com/open-mmlab/cocoapi.git#subdirectory=pycocotools in /usr/local/lib/python3.6/dist-packages (from mmdet==2.3.0rc0+5e69769) (2.0.1)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.3.0rc0+5e69769) (2.8.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.3.0rc0+5e69769) (1.2.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.3.0rc0+5e69769) (2.4.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.3.0rc0+5e69769) (0.10.0)\n", + "Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from torch>=1.3->mmdet==2.3.0rc0+5e69769) (0.16.0)\n", + "Requirement already satisfied: setuptools>=18.0 in /usr/local/lib/python3.6/dist-packages (from pycocotools@ git+https://github.com/open-mmlab/cocoapi.git#subdirectory=pycocotools->mmdet==2.3.0rc0+5e69769) (47.3.1)\n", + "Requirement already satisfied: cython>=0.27.3 in /usr/local/lib/python3.6/dist-packages (from pycocotools@ git+https://github.com/open-mmlab/cocoapi.git#subdirectory=pycocotools->mmdet==2.3.0rc0+5e69769) (0.29.20)\n", + "Building wheels for collected packages: terminaltables\n", + " Building wheel for terminaltables (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for terminaltables: filename=terminaltables-3.1.0-cp36-none-any.whl size=15356 sha256=321854d81cdc7fbc8ce194cc7bbb226b4bbb99344d52068a088e6ce1a63d0f86\n", + " Stored in directory: /root/.cache/pip/wheels/30/6b/50/6c75775b681fb36cdfac7f19799888ef9d8813aff9e379663e\n", + "Successfully built terminaltables\n", + "\u001b[31mERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Installing collected packages: Pillow, terminaltables, mmdet\n", + " Found existing installation: Pillow 7.0.0\n", + " Uninstalling Pillow-7.0.0:\n", + " Successfully uninstalled Pillow-7.0.0\n", + " Running setup.py develop for mmdet\n", + "Successfully installed Pillow-6.2.2 mmdet terminaltables-3.1.0\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "PIL" + ] + } + } + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Collecting Pillow==7.0.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/19/5e/23dcc0ce3cc2abe92efd3cd61d764bee6ccdf1b667a1fb566f45dc249953/Pillow-7.0.0-cp36-cp36m-manylinux1_x86_64.whl (2.1MB)\n", + "\r\u001b[K |▏ | 10kB 19.9MB/s eta 0:00:01\r\u001b[K |▎ | 20kB 27.4MB/s eta 0:00:01\r\u001b[K |▌ | 30kB 32.7MB/s eta 0:00:01\r\u001b[K |▋ | 40kB 24.4MB/s eta 0:00:01\r\u001b[K |▊ | 51kB 15.9MB/s eta 0:00:01\r\u001b[K |█ | 61kB 14.7MB/s eta 0:00:01\r\u001b[K |█ | 71kB 13.5MB/s eta 0:00:01\r\u001b[K |█▎ | 81kB 13.2MB/s eta 0:00:01\r\u001b[K |█▍ | 92kB 13.2MB/s eta 0:00:01\r\u001b[K |█▌ | 102kB 13.7MB/s eta 0:00:01\r\u001b[K |█▊ | 112kB 13.7MB/s eta 0:00:01\r\u001b[K |█▉ | 122kB 13.7MB/s eta 0:00:01\r\u001b[K |██ | 133kB 13.7MB/s eta 0:00:01\r\u001b[K |██▏ | 143kB 13.7MB/s eta 0:00:01\r\u001b[K |██▎ | 153kB 13.7MB/s eta 0:00:01\r\u001b[K |██▌ | 163kB 13.7MB/s eta 0:00:01\r\u001b[K |██▋ | 174kB 13.7MB/s eta 0:00:01\r\u001b[K |██▉ | 184kB 13.7MB/s eta 0:00:01\r\u001b[K |███ | 194kB 13.7MB/s eta 0:00:01\r\u001b[K |███ | 204kB 13.7MB/s eta 0:00:01\r\u001b[K |███▎ | 215kB 13.7MB/s eta 0:00:01\r\u001b[K |███▍ | 225kB 13.7MB/s eta 0:00:01\r\u001b[K |███▋ | 235kB 13.7MB/s eta 0:00:01\r\u001b[K |███▊ | 245kB 13.7MB/s eta 0:00:01\r\u001b[K |███▉ | 256kB 13.7MB/s eta 0:00:01\r\u001b[K |████ | 266kB 13.7MB/s eta 0:00:01\r\u001b[K |████▏ | 276kB 13.7MB/s eta 0:00:01\r\u001b[K |████▍ | 286kB 13.7MB/s eta 0:00:01\r\u001b[K |████▌ | 296kB 13.7MB/s eta 0:00:01\r\u001b[K |████▋ | 307kB 13.7MB/s eta 0:00:01\r\u001b[K |████▉ | 317kB 13.7MB/s eta 0:00:01\r\u001b[K |█████ | 327kB 13.7MB/s eta 0:00:01\r\u001b[K |█████▏ | 337kB 13.7MB/s eta 0:00:01\r\u001b[K |█████▎ | 348kB 13.7MB/s eta 0:00:01\r\u001b[K |█████▍ | 358kB 13.7MB/s eta 0:00:01\r\u001b[K |█████▋ | 368kB 13.7MB/s eta 0:00:01\r\u001b[K |█████▊ | 378kB 13.7MB/s eta 0:00:01\r\u001b[K |██████ | 389kB 13.7MB/s eta 0:00:01\r\u001b[K |██████ | 399kB 13.7MB/s eta 0:00:01\r\u001b[K |██████▏ | 409kB 13.7MB/s eta 0:00:01\r\u001b[K |██████▍ | 419kB 13.7MB/s eta 0:00:01\r\u001b[K |██████▌ | 430kB 13.7MB/s eta 0:00:01\r\u001b[K |██████▊ | 440kB 13.7MB/s eta 0:00:01\r\u001b[K |██████▉ | 450kB 13.7MB/s eta 0:00:01\r\u001b[K |███████ | 460kB 13.7MB/s eta 0:00:01\r\u001b[K |███████▏ | 471kB 13.7MB/s eta 0:00:01\r\u001b[K |███████▎ | 481kB 13.7MB/s eta 0:00:01\r\u001b[K |███████▌ | 491kB 13.7MB/s eta 0:00:01\r\u001b[K |███████▋ | 501kB 13.7MB/s eta 0:00:01\r\u001b[K |███████▊ | 512kB 13.7MB/s eta 0:00:01\r\u001b[K |████████ | 522kB 13.7MB/s eta 0:00:01\r\u001b[K |████████ | 532kB 13.7MB/s eta 0:00:01\r\u001b[K |████████▎ | 542kB 13.7MB/s eta 0:00:01\r\u001b[K |████████▍ | 552kB 13.7MB/s eta 0:00:01\r\u001b[K |████████▌ | 563kB 13.7MB/s eta 0:00:01\r\u001b[K |████████▊ | 573kB 13.7MB/s eta 0:00:01\r\u001b[K |████████▉ | 583kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████ | 593kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████▏ | 604kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████▎ | 614kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████▌ | 624kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████▋ | 634kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████▉ | 645kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████ | 655kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████ | 665kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████▎ | 675kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████▍ | 686kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████▋ | 696kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████▊ | 706kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████▉ | 716kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████ | 727kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████▏ | 737kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████▍ | 747kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████▌ | 757kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████▋ | 768kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████▉ | 778kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████ | 788kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████▏ | 798kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████▎ | 808kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████▍ | 819kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████▋ | 829kB 13.7MB/s eta 0:00:01\r\u001b[K |████████████▊ | 839kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████ | 849kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████ | 860kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████▏ | 870kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████▍ | 880kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████▌ | 890kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████▊ | 901kB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████▉ | 911kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████ | 921kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████▏ | 931kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████▎ | 942kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████▌ | 952kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████▋ | 962kB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████▊ | 972kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████ | 983kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████ | 993kB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████▎ | 1.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████▍ | 1.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████▌ | 1.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████▊ | 1.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████▉ | 1.0MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████▏ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████▎ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████▌ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████▋ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████▉ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████▎ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████▍ | 1.1MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████▋ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████▊ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████▉ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████▏ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████▍ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████▌ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████▋ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████▉ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████ | 1.2MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████▏ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████▎ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████▍ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████▋ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████▊ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████▏ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████▍ | 1.3MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████▌ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████▊ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████▉ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▏ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▎ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▌ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▋ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▊ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████ | 1.4MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▎ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▍ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▌ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▊ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▉ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▏ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▎ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▌ | 1.5MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▋ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▊ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▎ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▍ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▌ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▊ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▉ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████ | 1.6MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▏ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▎ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▌ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▋ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▉ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████▎ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████▍ | 1.7MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████▋ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████▊ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████▉ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████▏ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████▍ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████▌ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████▋ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████▉ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████ | 1.8MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████▏ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████▎ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████▍ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████▋ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████▊ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▏ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▍ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▌ | 1.9MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▊ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▉ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▏ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▎ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▌ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▋ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▊ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████ | 2.0MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▎| 2.1MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▍| 2.1MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▌| 2.1MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▊| 2.1MB 13.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▉| 2.1MB 13.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 2.1MB 13.7MB/s \n", + "\u001b[31mERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "\u001b[31mERROR: mmdet 2.3.0rc0+5e69769 has requirement Pillow<=6.2.2, but you'll have pillow 7.0.0 which is incompatible.\u001b[0m\n", + "\u001b[?25hInstalling collected packages: Pillow\n", + " Found existing installation: Pillow 6.2.2\n", + " Uninstalling Pillow-6.2.2:\n", + " Successfully uninstalled Pillow-6.2.2\n", + "Successfully installed Pillow-7.0.0\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "PIL" + ] + } + } + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "6hD0mmMixT0p", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + }, + "outputId": "ec4c578c-09f5-4c05-b255-6688af9fed50" + }, + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMDetection installation\n", + "import mmdet\n", + "print(mmdet.__version__)\n", + "\n", + "# Check mmcv installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "1.5.1+cu101 True\n", + "2.3.0rc0+5e69769\n", + "10.1\n", + "GCC 7.5\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gi9zw03oM4CH", + "colab_type": "text" + }, + "source": [ + "## Perform inference with a MMDet detector\n", + "MMDetection already provides high level APIs to do inference and training." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "j4doHX4exvS1", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 224 + }, + "outputId": "894b0cac-0749-4f7f-dd1c-b5b811ec4305" + }, + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth \\\n", + " -O checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--2020-07-13 14:48:13-- https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth\n", + "Resolving open-mmlab.s3.ap-northeast-2.amazonaws.com (open-mmlab.s3.ap-northeast-2.amazonaws.com)... 52.219.56.132\n", + "Connecting to open-mmlab.s3.ap-northeast-2.amazonaws.com (open-mmlab.s3.ap-northeast-2.amazonaws.com)|52.219.56.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 177867103 (170M) [application/x-www-form-urlencoded]\n", + "Saving to: ‘checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth’\n", + "\n", + "checkpoints/mask_rc 100%[===================>] 169.63M 28.9MB/s in 6.6s \n", + "\n", + "2020-07-13 14:48:20 (25.8 MB/s) - ‘checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth’ saved [177867103/177867103]\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "8M5KUnX7Np3h", + "colab_type": "code", + "colab": {} + }, + "source": [ + "from mmdet.apis import inference_detector, init_detector, show_result_pyplot\n", + "\n", + "# Choose to use a config and initialize the detector\n", + "config = 'configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "# initialize the detector\n", + "model = init_detector(config, checkpoint, device='cuda:0')" + ], + "execution_count": 5, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "Wi6DRpsQPEmV", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Use the detector to do inference\n", + "img = 'demo/demo.jpg'\n", + "result = inference_detector(model, img)" + ], + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "UsJU5D-QPX8L", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 595 + }, + "outputId": "81bf0470-65c8-4a2b-adcf-edcd77d9cd48" + }, + "source": [ + "# Let's plot the result\n", + "show_result_pyplot(model, img, result, score_thr=0.3)" + ], + "execution_count": 7, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1cAAAJCCAYAAAAsrj1sAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9WayuWXrf9XvW9L7ftIcz1jk1d3V1Vye28WwTMDY4CQlB4oZIwA03yLkBrpDITbhB4gohFCEhcoGEhEh8ASIyMQ6O7Fgdk9htpz10u7u6uuaqM+xz9vgN77CGh4v17l1lZAck3HKEvn+Vzqm997ffYa1nPcP/+a9Voqrssccee+yxxx577LHHHnvs8f8N5k/7AfbYY4899thjjz322GOPPf7/gH1xtccee+yxxx577LHHHnvs8SeAfXG1xx577LHHHnvssccee+zxJ4B9cbXHHnvssccee+yxxx577PEngH1xtccee+yxxx577LHHHnvs8SeAfXG1xx577LHHHnvssccee+zxJ4DvWXElIn9JRN4Wke+KyF//Xt1njz322GOPPfbYY4899tjjnwfI9+L/cyUiFvgO8BeAT4CvAf+uqv7Bn/jN9thjjz322GOPPfbYY489/jnA96pz9ePAd1X1PVUdgb8D/Fvfo3vtsccee+yxxx577LHHHnv8qcN9j677IvDx577+BPiJP+7Dxona1iIAAiiAQUQREVQBTWj9AapQtP59AwW5/l2ZriuCtY5cMqogCPqHPwKqFAptEygpg4KxDjVKLgUtIApiFTFgEFAoWREpFARTpF5QBOsV5wIApRTGIVGKYm29uyqoGlzwaCloKZRSUDLNzBBHZYyKCDhrb55RFYz1WG/RXJ8LhEys71QKWurbGbF1rHT62kxjU6bLiWCMJ8aEqiKi10NIHaXrIVWcdZCBYrDGMaaIsRkx9do6jbkxgsjNqJJGUFWQen9VECP1Xp99bBq3m5uTxs89g9QZN4abdxABIwGloKqUQr2vCKrVVupj6GfXv7mBgZxRys23bt5VFVFhvjwg5UQpCS2JlPP0NNejY0AzOQn15QW0vmPO9R7GVNtVLdP3wDmLNUpRQ86gRVHyZLcyzcu13RfECmIAU6rdls+9YwZKAmsQKxgnGDFogRwTJWfK597tety4/pvrNUWdD70ZKBRFjFCK3gzh5yEiGGvxvpCL1ofWOle5KOb6PtQ1aqwh94WSpY6dGKDUaTcG5xzWG4RCjJkY8+fu+/kHEIyRula0rmD5vB3pZ9ark12KqT7ASH0fvRkURbXaq4hMz1rH2VqH94FSCruuo2kCzlrGcUQBayxFlZILRat/qrZfr1uKUnImNDNc8KQ8YCSSxmof1V7BoHhrsc5iraEfRxaLFYohxsSu21Wr02kdi2CMoZRyMzbV2q592mef+2z91j+NMdy4g5vfrHNTSsGK5eDgkKZpOD09JaWEdxYjMKaCakHEYKzHNyAFcqrfWywP6LorShkn+3CIsZjJ4FJK9H2PNQad5gbqvNQ1LDc2OpnRzdOLQNHqz6w10xqLgMcYU+NAqc+n6B/yXAjV75r6uyXX91YyxirWgfHVrww7JUdDCIG2sWw2XV23k9O8fsabJytUu3AOVaUfOprGT/OrN35YP2eX12sCMZ/7ntyMxTWu56T6NkWvDebGug0WoZhqw857vLOkNJJiZLlaoqps1htUqe9edPLzIGIo0+IXmGxKP3ePz0VHLdPvyc23uHmSaXytBYRSMta66bknmzEGI0Iu5bP1aWSa+/pMqvXaWpR21hBaT06ZlGq8/WzQa4zLeYpzYgjBVt8h9Z5DnxHnsGZy75N9cO1SJv/njKnr+SaBqDfS63ERIfiWcRymaZLpvXJdS/LZ71lbr1mKklO1O2Pqzer7VbsMjdxMfRwLYOrcIjfjW9c6N+Mkcj13hZsfaP1+9V2GoulzU6bX//JZYJ3sXgvWmpt30c/5CjGCtTXG5aSfjRXggpBTqTFHHWILJdWbyORDi5abOCvTpOm1V/rcetYCxoGx9bniAMboZ89YdLINbnIeEXNzLWuF0ASGPlFKvnnLa+dx7Vuu50aujeB6Dvnsv0Xk5hp/OM597nM3f2p9L5niy/8t8dQp/7l95zZ937Pd7ii51Me6+axM8c7e5LSlFHLKNb+xijFKKZacaw5QU02Z7OQPL8HrWFbj1+eeaVrjZpqX65xXpvfOudqwtQYQxnHAGHuzlpumwVrLdre9TmkBmfzwZ/YEmbZpKAopZsYxYqzUfPkmLbr29ebGLosqxlrMzfqvsdroZ/NYijI7rLlvjqAZ/ErQnKnrpsZSLRkbQNWhxaJq6toXoUzXSXHAmXpfjGCdRVQZ+4xMMabmp/KZH5wWqlzPN9W/GGsnO7SYybftrvrnqnqXPwLfq+Lq/xEi8nPAzwHYYLj7L9yqiYZ3KEIaDcKAaEFwHKwsu+GUcRyJMTMOSoqfFVpcJ+9mSs0LUKBpW4zx9WsFcYZkIpRMKZFUEmpmBGt4eHyfOYHhbMsJa5jB6y+9weuvvMrhg8Bu/S6ffPyE88eR7XOHbQvaj5SrEZOgWTToi4XLc8PB4QFHR3M09fzOrz/j4csNs5WloAx5R1gJZXRosoDl7LTj9quZq6vM5TOQ3tMeRkQdFnCmkGiYH0LrLHlUzs8TOVheeOFVln6OTZn1+ikigXZh2XY9J88vaRcDlogp1VBjghGh6w2lZMSA9cIYAWpSZVDGYrCDp1wW6MCoQWeBWy9E2lnGOCVqoJBwEsgjDN3I7dtLVDrWV8puU+cnhAyt4ltLCILTSLQWaxu0FNLYk5wQe4PmGlRyTvgADx+sEJSSlNu3jug6oesifSwMWTDS0DQLYkrknKeE1RJjRFGMKCIZUwKbk1NS32OcJV1ELHVlizVIO+PgtRfpux6RTNvA8WJGysp2t2bstmg/oMZgd4YyWoadcnHR441g24htBddaGCz9VWFMShHFN8KbD+9zNXTsNj19l0jG0MxA3IiYWsTFTUPfKdkUxCthJiwPVxAK4jxFhbHbYPuCzGZI4zGtYzFrOD87Y3u+Y+gUSsHIVKhJHY9mVvBtIo+OPDpKhHbWERO4ZLEqYCNjA0hDHjJ5yBixZJchWxTBWHhwx3N25RnOM04Kt14CsxqJPVgrGCuMKXN4d8XzdxL9qWKiEFoLRYlaiQkrAe8sNiQ0O9Jo6fs1YhJGLGh1mkrBGCGljJaCsTWI1/hZUM04BKeQyagpiFXamSWEJdvdQD+MiBUe3r/D6fOrmpw6T84jVmCzHnnpxVf5l/7cT7FcHvC3f/5/wjpD2zbsuiusceSsGOuw1jKMia7rWCwanDWUIWNkx1gs84N7NMs52Z1Q4prLZ56+i6QScbpgNd9wazmnbRaozrnYbPjiW2/w4OGLjGPmH/zKrxJjDb7WWrz3OOfYbrdTUCoUMjFnjBasdYgIMSasNTS+QYFUIkjixVdWXJ31bNeFlAPNIgAZJwFnPd4Jb37pTX7ndwduHxzx1utv8vZ33uHx+oqc801yfnjb89rDB2x3O/oh8/CFL3N5dskYT0i5IyZlfaUc3Tlk1gaMFFLumM0bhu6Cq8uB9VUiZcWLJ5ZEISFSCGaGDYWUaiHrfSCmKwyBo6NbHBysKLqmDfe53J7jQ+Dw8DYpKm9/+3ex1uC8q/4kJlQTxnica/Cu4WJzxsFtw3xpCHPD7NacIW05eVfZPHc0zZIf+dHXeO+7H3L2bEO/izQNlOJIeQQtiChjhKNbS2azGaqZzTbzwsNjdpvEbhvp+4hzytDXos8YwTpHI54ihjgmck6IFW4fHzDENBU8YK0j5kxKiRQjYz+Qc01kTAg0sxm3Vwe8//EJaYyslgccHh6z2e7Ydicsbh3UYjQ0jGOPcYY4RlJKQCG4ifgpimBYrFasrzY3iawYQ2gb+r6rsQGwIjUB+lwF3IZAzjXpVFVyyjgXiDGSS6qJnQiz2YxhGLkuWlIeCcFNSa+gYhFjiTny4KX73H9wB+MMH3/yGB9aUlFyVl64d5flzPHk8WPOz87p+8h80fDCwzuERoh54NmzLaZZsWgblktLMxMungxcPlsTVRHrmTUzHtwzfOfbHzD0iuDQXHOHkq+LNs+d27c5v3jCwTG0raHvMk8fdTjnEbEYY5ktPC+/3qDmiO1V5vzkikFHbHagHUhCbaDre5zPWDuRPWqII4y9ImqwbiI5nOfg8BDrDOurc1QdkGtcm/xA13c4E/C+wXlHtx3BjKjWuXIu3JAIilDUoCWQuGI+a2naBmsdz08vyAV88ITG0cwtISinT3cEL8zmnssdNPOE1UIehNhbbi0du3VPjI6Sa3HnXFP9g1GMVYoKOYN3BhXICt74Gs/nGRMyfRw5mq3wLtGvE9uLTBkN1k3XMYIRS9dFshZWBwsWywbVyOYiE2M3FUeCtZ4iWotAlUoCeUFKLWBEwHtHyglEbgitYRjIOZFjAQVr7Q155Vz18apKSokQKplijIOieO/JOVNKJWubxnPr9jGb7QZjIcfqxzDXxSw0zYwiymKxxAdHTondesNyZZgvFefg7BRyXpLtFVlHclFK8cybFd56Ssps1htiP2ImEkumKqiUgnEWY2u8t1OBnXMmp0TKidDUn10X2c5TY2Cpa9k5z+HhioPYkHK1K+MM8/mcq8sdQz9SsjJrj/HeY2xkHHs2G8gT3afGghiMGhyGrIlMQrVQo5InGA8oUSPRNlB2NMFjiiVuR/7cv/4VuqvM1eOad730M0s+eNpjhyNmbUN7mPj4+TcJu1uUFBBmBH+M+OpbQ+uwVrn45EPOn3c0S4/xQqIw88LVScbZQggFYxPbLaQExlqc9UQtzBoPVMLLNRZlRcyXzGeWtnUYJ3znH/Qf/nE1zvequPoUePlzX780fe8Gqvq3gL8FEJZOvUystgp5Yr+EgqFgTWW1c65sedGpi2Qg5Ws2fWJQDFMAEDDCECPB2Zpo2socGApJEyJSF5RXch/p+oEwh9l9w2wL28uIbiOmF0x/i83jx2xOHN1lIg7KQORLr894+l5h2BTaI8vpruNqY/DNyCIGZFDStuCMMp+DaQ27wUGzYds58hhoZgbfBNYXW0pW2sYw7DIxKkrCmRr0ch4pKTFmQ461Gs89XD664O7Ld/ni669y68738/4Hj8hETk6fc3G2RooFYmU9RRBNxDTgrSVNzk/LNU+jiOrEdxjGPOCcYIJCGlkdLAihILY6rcYrKSumZHJU8k7pfGR5B5YH4J3QrR1xp1ifa1JYhJIF4yBrphQoailScKIkyZVh9MKsNagmvGuxvsVISzdsGIsFG/DeI8VjxOHtxIoaqTy2MYgWKmfuavCZNdjgcTawOTtDx1TZx1CIOXL17BnkWqSXpgbC2czQzuY4GxhlRxwTNIqQcSUynye8ERa3PBqUUadkyWVs9miCPkW2vbLrIuJgfuzRuQWTUCuVGFDB2UIEvHW1Q7COJDLmUHE2Yw1k6UEagqtsWhlG+jigMqI2gwGDxRmDC0IphqJCjIYwA+8Vo4WsghiH+IwmrUmGWmhqYYLRG0axqEGlMk05w2ZTiKNj3FGdabKEJpJTTcSy6ER0FHwTSEHJY6KkBKV2T4sWUuoYcczmDmMsgpBzxNZ2cU0QCpWhxVY2aWI+P2OaqQVgyVNHCKypY2qK0A8DQ4ykXLAY+on5rIlhJqWIbxt8CKRSOD27YBgyKpCyMsaMiCXlzNBHfFCcdxwcrBCBnEdiKhhxfOHl1/j48QlD3DKud0Td4K2h8RZNmTxMxaB1DGqJPaSYmK+OuHv/AfP5in64JKcEpXarxFT2bBiGyuiWQtaCAi+9+hrdeku32zKO/U1CkCfG/NrvpRRRUvWPuTJSOVfmL4SGW7dWPH7yESntuP/gLX74J38SN5vz6Ku/ymq+4P79uzx88TZf/Uf/J/7lOYuFIZVLPvr02xwt76HRM4w7tt2aBIg5JuWMtUozm7Fc3UF8xxATw6CYWH13M6tFo5HCsE0MMQIZa6rPNqbO627okZ1juQhoAec9t+7c5eWXvsAffPPbuMZMzG6haJm6OoFSDGNUinaoSWgRxq4wdo6xh23MpL4SMP2w4fRsy3w5Y7cd6HY9YgJCgWwmPrsgUohxvOkoFIWrqy0lfdZZ1Gv7lNo5dFbIJeFMAH/NGo837G2ZuhhN49l2fS1oy3XXUadOXSaOPZvOMvOWUSMxbrm8TFAMjXPsrjbVixdAC3GMlFxw1jCfL/CuMt5DN9L349R9ZVpHZUq68jXVXW2plKpgmApALULfJ4yRWiSK1J9TbYrruFoyYxxvOmbGGFI21C3Z1C7MNFjOWNYXWwTHg5fuo2UkDoYyMcdX6yukzFGdmGMSYx8ZukhtFxXmxx6JkPqeHQZNhm67wwZDExwUQ+oi47hAxZFLpOSEaCHIApFI8Ib5POBch/WF2cLSziDlgvcOY2vHzxpBJLG+gpQH4qioK5RUcGTEZNQIGI+zA1I8JQtl8k8lKwcHR5RS6LttzV8w6NRxERcZdwVjLNZ4XBNoGwcIKdYuQuMD0UVSyYhUH6VIJYBEQGr+ZKzBaINmIfWFbBOawUz/yJR3OReYrQRXIi5n5niO2rusLy6J3Ygp0EhLu5xjwpykcHr2nHGoXWtKgVxqAXrTKa9EdxZQUyjJ1G6lQju3SKkxm2IwrqpojBisqfbivSIl0QaPM47z80tUa3Ekhik2ZYw4MLUzZ43Bu0BO8aZ7IjKpOz7XMaldtilhVCXnzworY2pHtn7tJ3uunXM/dWivr+2mzs+TJ0+BqRtsagvHiCDWYqeO59gPWCPk5K+9BCU7Yg9RCnGIqAyINXjX4EWI46RwIk+Ex/U7XcdnboorzdwUV6hSUsYYAWdIWadu2HUHuM6Vau3g5ZxJacSYMtlTASmUCJs19LuCFoOxSs6R3W6knV13FR1liKidOq0qVZ1jlKzXY2GQDEkSuUyFt4BTAQKk2lgJjefe4iHvPX3C7Chw69Vqmz/0lR9m+1zZbq+47D5liJAGi+iU+9Ez9pD6DVYdzdKyWhjW6+pFJAsOZYjjpMSo019EGFNd11qULBkTDNiqh8BarJ0hwTBvDLZUn9En/0fVPjf4XhVXXwPeFJHXqUXVvwP8e3/sp4tiS0HVkHIipoxki5GCtYo1he12TSHVdqjWat3Y+ru1uDKIVmNDp3apNbVqzxGcx9m6aDQmNNVOjBNLkh7rA5urK5TAvS8csnItm0c9Z5+c8lGccXwR+OjtgU3vyHgwmSFuePnNQ64uI51k7L3A9qMR7AIVJQ4jw7OIKYqziWYOzcKg2hDDlpRg6BTrI4tl4OqqVsmNFwY/Uoonm1S7BTisU7wtpLEQk1T2pO84+fgJr9/5Iq+9/AY//bNf4Rd+4Zd5fPIMb69oGyWXKl0yk6QkWEOaGNUShRi1FhQ3E1ILLEOmBMV7S1hYzAjLA480XU20RZjNC8NQkJRQLzgrbDY94SDQzDK+UYwazi4VnwRTFKiL1FohlpEMYCaeJRakVAcVgiU4Q7eNtEdHHB7c42q9Zt1ljA+4MCO4GZoKkhVnDca6yoZrxtiq4BAVjDgSieZojnUOZxvGiw692uKkFhNFRmQLXisTNvaZR5std+8GDle3mS8OcM2K7vyS3g1Y29N6ZTYHJ5nFrTmjKuudkkUIkuEqMHaGHEcu+46+TyyPGlZ3Anqc2G3GSipMjr5pDNHCrF3AqGyeXlCGkdJVZyBBibkWpH6SH+ZxpB/WhKOADQUzgo4Ob2E2M8QMw6DEWKWPbSgUU7uXRjxqlSKFnA0me5zNJDKFMrXyMyU3YGOVHaij60fEZqwzWCwGXxlHH6bEMGNFKGMihAadGYZBEM0TmyhTWMtY8TR+DmoY0kgpBYdDMZNkoyafzjmcs6hCSrF+XwtuCsS5ZLIU5t7SWMGIEsWxGSIxZbIqRuHycoOITpKeKskNbUNoHKqZt995B2c9KSfaZk4IM3IWuq6rATZlSs7M53MEy9n5CX0caeaWBy+9wQdPn5PSDlJhsx04Olpx0NZI0ncGaxU1gVFnlORIQ+Heiyvu3HnAZtPz/NklcYxYG24YxpQS4zDiJkkQpZIrr33hC5w+OuXJ40/p+x7vLTnXRKHKTaAJnu1VJiWDMYoxaUpyIjlbXPB88Ut/ht/6rV9FSBzcWvHaW1/m7sMHfPO9t7lzdIsf+L6v8CM/+n38H7/0awS7IvjAdrfh2dl7HB0uUEmkMhLLhtmqoZ3VTkAtMhzGrEgiSGuZrSx5FM43O1bLGYtZwBl4NJxgpPoBU3V9iDqUwma3ZYgjxhzSpUvCqmW1usXhwX2ennyV0NTiN6c8yTssIgHEULTQjTusa4jjSOqUMhS2F5muZBpbaJraaX128pwXXjiknfm6LMWgMoIxlGIn6bFOct+arORiuFoPOAloEQRLSXmSE06JcPAY78h9xIeA85a+6+j7iBpuiqvQtgzPT6cYVuOY97VwMaWSBarC4WJGnDn6OBDTjrmpBFMSEFMl3Ot1j3eBYjPOO27fOprk1YYLruj6RJ7YbqbEElFSijUhEzNJl5TGenIZQQUrFmtrMZ1SwojBWY91NYJYazDWEFMh5zTF3evE1xJCS0lTgqepFqAi7DY9ORvu3ruHtRDHhHEOH4TT02dsLmYYqUoWZ6jFwliwDtxMmB3MMBvLZbehWxtSH9h1Pbfv3mI+a4i7xPPTKzZXK4SASFWviBaMCVAS7cxxfLyglHVNvCYZd84QmoDqgMh1cQWPP1mThojxgl0klJroi0uoNeRSCAhNMyNnJcZMiomcLcvVITlHun6D84YxZnZdh4sFGIjRYs2SEBxtY1ksAoJns95hxeGtx3uFm2IKUirEEZpgq9hRMs5GTJlRcmGMiVQKVg3We6x1WGNx1uJtw8HRjLxZI9std5o5X3n9+/iDb/wBZ1dn+OBYNAtWs9vMDpeMGhmHntP+ZCKZaocmeAOmTNL3SjKozahkcvJIDljX1vmLnhwdoiO4ASMOZ1xdd0UJocGkSXqZE3GM+MmGRZjiVkY04K2ZZKeKN54iGch8tn1gkmLWf6kS/0lurYWSC9bV7g1Ma1KhaRqMNbWbnBLOWNKkkqm2YIlDpGhdZ8ZYEnGS3ineWUII9P0IWgusHCOIokUYOmHspRZ4BcayY2ZbvG8wzlJiTxojscRauMj1Nomp+8tnhSFT0RhCYBxHihYaH2rhrYVcUpWBipm2h1Q1SJk6185VBURKBWP0hqQc+0JJDfN5YL4wDPGScZxkvVqLY8mTCmgaN7QWKlIpAwyWooL6oW7VESEboRFwZUbO4ILj7sMjXjh6kW/uPuHwZc8rP3DMs5OOn/mxf42P3/2UP/j27/Ped87poyFg8E2LCZ5gdoy9kDcbogrJeYKH2SHENZCFtnFsuuFm/RYU1bpOnam2WnLtTCIZEYdxM7w7IPkNR3eEslb6jZKk+WcWQd+T4kpVk4j8h8DfByzw36vqN//4zxe67hJbGiQ0kBVn6iKDTCwJExrMSF20UhhKplA19TkrQ0y0jSOXBEVRKaiAdYIjYYPHWGEYRk4/Oa1PNQ3wwgqLu8pujFy+3/PBNzesFg45gqfpGeuTZ3zRf4PlYeDw6DbN8jbuwHJ12fHBO+f0QyEhvPfpQMqBzEguQomKjIHV8cB6Wxg/TTgjrE9GrkLdB2YopIvCCw89JS65uujZbQfszJBjrHtvXNWPOhsYd4XF4gBv4dGTM2bzGS++eISdKc82zxjKn+Fbbz/iW9/9p8RyyfLQ0ndjTVZjhlJw3rCYeS76UmWVGQoZ6+3UuaqdHw8kFxipHQcnng/eveT+yxY/85hGoXGodaSYkFlmebtwegKPLhW2teUazIh7CHrucN4xPxL0MJEG8BlMUbKkyuoDsybUJNpk+l3k1q07vHDvDW4fP+C9D75KxuGdRwTGYYeWHcE7BE8p1Qk2M1f3eRQzvd8ALuJzdWBXbFm+eUD3NKG7AZfhnpuxzgnpEwczR7tqeTIoTz9esw47ju7c4s6rrzE/WnK5OSegtAZc2XG+fcbJdocmj7ML5seW4weCRKVE6EfP1faC4+MqD7VLz85bUlzT2sq6ZWMxy5aHy0OiJkYii5cd7Dr6R9BfRNQl2qMDUsmM1KI4F4glEbc1oDlbnZxmxToLNqGkqpQeQazFiGJMZaycgLQ1UPW7xNG85arfVadoAQ8Wi3UZJZPLWGUfwTI7iLR2ZLZK9KOipSbKhogQ6bsBv7Ic3F6w8LcYtw3rp2uuTnr6bQQvhGAwoiAZ75TgGryZVZbNFsQkstaAcXh4CMDTp08maUaaAmMmNA27bsOB8ZUp1CpBaBqPykhKCWsMy+UBy2VNWMcxkUvkpZceosWw3UYuz7e08zmL1Yy2mTNrFjh/gBbhyeMnjOOOvt9x+vyExeKYpllSijBuB37vd36Pbr1BjOKcZzU/JqfIVd4yDCDScv/WbaIrNPMDcoRNWvP05Iy/8/P/M+urDeMYWS0W+KCM41j3fgLOCqLK8eERqRROzk75+td/F5uEcehuCjHvLTENUwfBEUfHcn4XYzuELSI7KAkfLGKUzfacf/JPfpOSC01r+f1v/D6b7uf5L/7Lv8l//eM/y+nzT9hcnHN2cg4oIcCz51ecPDpnNTvm8aOnLI8is1XEBE8/GB6ffJujgxc4PnrA8eEDBp5xdv6M5fKIo9v3uDxVuv4jgplTiqNLmeXhAT/1Uz/Fh+99zMfvf8TF2Sm+tfhgyarEGPnw/ae0weI2Ky4uEt/51gm73Qbna5GsRSnZ4AN0u5FSdJI9z9G0INNX0mHuiVJYzG7hQ8JJZplGnj15yv17B4SmIbQNMVqwBbGVqcm54KynTB0YMQYrlrt3b3F6elrVE1R5phgllYz1c1588Bp//q/8Zf67v/nfcOfwgOXBimEY6cYdLlSxTCnKZrOlZGqiKFS5joGu6/jCF97k+37wB/mRf+Un+B/+q/+Wu3deRIOhzx3x7JL33/6Q1d3bvPWVt/iR7/8h/u7f/QV+4id+jLPzE548fcR6s8GI4/DwkNVSpuLFojkzxISWgrWWMdeurbFV/hZmLd4J2qO7MrMAACAASURBVNWk8+jogL/8b/wlfvHv/QPOz58TY08/rIkpYD4nObqWHzWhIefCOI4Y41jMF+y2HTH2pJKYN3Nizty6fcx8seTRo8cc3jri+fMzrFgWi0NSUq5Oe5zLdW8KQFmyXidycbQaagfBFo5vL5iFY+btXcaH59y53XJxseO025Kt5+TZJ1hXuHPvFtY6Tp6c4LzQ9Zl2tuTO3Zf47a/9NqH1fPrBDmuEo8Mj5vPC1WYH2RPaQ1558TXeef83aOc9St07vbplaLae4Ar4gc6MfPpeYN4kUhkYxhFrVjjreHZ6QsqRMSYk1X27m+1Q90cBCLhJxplSZLdNWGlZLmas1x0ffPCI1bzh/oMjhtSx3Y3EIeLdAuemLiQW1JNLJIRQVSPDgPOhkpqJKcm1nG7WuL4hdRmh4cW33uQv/pW/ymr2v/LRd36H3K1h2dKEBZvLK66uzkibDS/fP6bve7ou0w9K7JWsCWPdtAeosFw2uMawXQ902xEdWvoBUIemkVK25DEynx1grAcVSkmIQMqR58/PEBGWyxZHYbeLxFQ7OGHmiHGH6BJDg2CIaV33Nl93W286WKZ29K/32eVKzjtnwdUO1DiOVVZnHdY4RCzeBdpmBlIVBMH5KRHPxDESU6wEXywkCimWiawrlKL0/cg4RKzUbmElxwpZR+LYEdyStlkyn83YPHuKqmHX9ZQy0jYrrKkqoirvrVxsLvlm733ttIEPLSBsNptKzITAmBM5xaoaAZLGqbiYmg9F8T5gbcAaTxsavvz9b/L++++y3W340pde5wd+8It89dd+h1vH93jllZe5uHrKBx98wPPnJ2zWHeNYUFLdz5UrOea8Z6cdXkHHqrbwtwOrYNg+cyQEf6hYzXiX8RL5whff5N//D/5jTp5+yGAGhCOW5Q3e++QDzuOWs/ge6/guMT2niYHbB/c5uH+fg1vCcfM+z558zLMPPWPMbM43jF1Ln1rKeo6kQglbFvMV2UCWkZRGhpQJvqForF1pWwvvNETa5ZKHD17jzdff4Jd/439heWmRtVB6jzs8pB4n8Ufje7bnSlV/EfjF/1cfFtAW1AtYQx7ztOHN1sVgHThDY0HziBTDatayWfeE2ZLZfIULgY+efIy4DLFqfsekvHT/LrePbrHedFxcbtGU8VZQYzGmSrvWo5KfQnYWaWExz6gdCepRC8vjhh/+sS+wvA0nz9e8895Tfv+dgSOfWdqAK5aZjwzpvLLMGYZ+TvZLXnhlztPTT0k5c9gq9+4BB/D+p3BOJmKw0bJZbwnzI1bF0GCIcYuzglpHZmrpWoM1C1556cuEZgb2Xc6vTrj3xi1eeHWBtmv+x7/993n7ve8S9RLxO4ZYB9g6S46QtLbRd4NyuS3kNG2SripKCkLRukmycZkuTRUPBmuE2dwSGotvCrZRZjO4eH9AEawXbJC6yWocMLG2grMWQmOQVWI3JnYfGg4P5timq5InhZSU4B0lF+YrQ7sw5CLky5H7L95hsVoyjoUgHptGZJOwjdDOhW2/Zb3rCP6A4FbMmhZnTJWQYlBTnVm2jjJtUmysoZFMuROQEvDqscw4vLhgs/T01pAdHCnExRxc4uJyzfo33+OLP/gDPDi4xUV/zvnuDNcrKRVsyfSXmYvTRJcWvHL3EDMf0BDBw+ErkeHpFk1LXDxgERz+uK3taEAxuDCfCheAzNAPYIUhbxEs1jfgW1aH9SgTMQ2+PcSsHM/PziA5DAZ1CgaGPOKCMJsFlmJJsSeZKgEsTrAG3NggIaNtRlaJfhxgMJUlN0rKBVyPxpp8heApeWBkx2wJvjGonwqAZodxinGKxRAjoDuSjAxmRzr0HN4Xbu8Cu6eOj77Tg/ZcXlwiUgmEo9UxZxdbrK8JbSmZEFq63cBm8wgjQts0DENf3YcxdY/XmPB+zi5mskIbAtsu06WMs4bWN4Cy7XpKsczawHy25OH9h8Rx5IMPPmWzGbCmoX/yGOMys3bH4UHiwYMHjIPQtC3IQC49QxzIV5GmndGEA3IqXGzPK3NXLFnBUvfNCC1tEzhaBhqJPHjwMh+fnHB2fkG/7dlta9dxdRiwdo5GQ9GBw6MD+nFgu91yvfF+GIaadCuM2y3WR9RMh+aIJZexFthi6voW2OzOgIQR8H5epZemqVLHccfmcoMWS7OAcXfCN771K/y1v/Yf8eaX/yJNu+Xy/EPe/r1/irGJL37li3zJvsG7793hH/7KV1ku52w2W6wrGPHk3jGMlpkLrGVgt/mIpskchZc4+WTDexefsFodkobC46tHHBwe8cJLL9G0S+7decizRxcohm6MuNCQxwhSsMawWq2wQDdsOR92bOwJoXGItsS4JaWqbjCZGizVYpIhlSuKK8Q4kKOgNNjQMFwOlVEPQmoXbLuBR08v8U6ZrwzPH+1YLqR2wlGcUVKJjGOksTOsOErJPH36FGQSsU1y1Ta0IIbGtWw2HV/7za+TisE2LXfuv8DLr7zK//ZLf4/DZoaR672DinP1moZKKOQcaZqG84uRb33rlCQf88H7p+SPPwEtNKHljT/7Fj/3n/zb/ON/+Ovcvv0y//K/+Ve5yA2zxnLe92zGT/jkyROcGrabdWW9JaFqJznu5/b1FyWVglND3RqheNNy/5Ujdrue3bbn/XcfMYybKpUzM8YxUvd7m+k9ypRY1wMt6qb/2sHq+wEXHEs/JyZPN25xtiGlkV23pht6FqsZB0dzdv3A40ePmc9XmLbQzuqBFeMuc/H8CjVLUEhpZB4Lh3cCs8Hg4kCOl2wbi8+RXg2ElsWx4rZuOogjE8cB5xtee/MV3vry9zGOA+++9zbbtEVyg6jFmQZnW8bYczB/gayKMvL42fssbllee21Bvyl8+lHP3ftKepZYzRaIF86GgeM7A9vLelDWctGQsyNrph+7Gtepe4tmM6kdCexE0jXksiNnQC2j8RwcFJpWODxe8OW3Foh2DKMnniYkK20rpLRD8UBNoJ0VskLf7272T3rvcSqUnEgpEruOnBO2XNE0c5rZEc83a/763/gbHJrE8cxwtFzx3pOn3Hu45PWXX+Ol/CK/9dv/iIPFETlekfxYyW+1DMlhvSA2gozszgOhqQWA84WUBlqrVWIvDpo5WTOaIGstqpyFH//JH+Xd775DCA3Wep6dnLIIjuVsnAgPy/Ozc0INmChjLSixNI3/Q53/YRiqRNe5m7+Buu8qT9tOct072IS2Ki5U2W6rX/He4Xzdl7Xb7W7IA7GGMkCKIz54nPdY6xEM3jc3st+29VVx4msnK4TAkLaMww4jluASzSxyfKc2C5zzzGZHqFq2m67uZ8x52iZSD9HQm4WrpJRpmirRFeOnznDtdCMGcQ4pirEykdOGrutges75fMVysSKEho8+fMTlxRZjGlJ/h2F7xMHqgOenn/DRx29zdnaF81KJAOdpZpZN2lai1hRyGStpYC39aLDBMT+2PHzL8OHX1mTfsLiz4M7rMw5Wwp0vjnzppbd48fAtduMFw5HlP/vP/1PW6yu+/f53iKvHfPOd3+AbX/s6z589Z7a8y5//4X+R7alhebhkTGt+73cvubpoOLxnyOdw8RhgyVfe/AIn5VM260tU58RtlT/iS1UkjJlUaj7pWk/TNIjAUAQngd1mzTvf/TrL4JnZBa++eQxF+d9/+9v/zLLmT+1Ai89DqRpkLQllgGxoZjIZe6SMI42ZYQqUVOrnpCBa+/VOhFnTsGxbunFEp5NQXMlsNjtQxzjmqj13lnbZ1JNNbKGIIEMhbes+GUMBO8mHqcXd5jLza7/+iNUhLO6ABrhz35IuM2lI2IXHBEO+MJiSMcUwjD1d3DI7eIWf+dm/QPFP6MandLtn3HvgOLua0/e1kxSMoMWRxoFF4zk4WHB6knnhQcOzrmMTMxkheMNI4d1PPq4bV40SZnC2ecLuW+d8OwW6q0DfP8W4hBODM5DFUkqqwV8gAduxkKZOspHaXK4ng5jrs9zI2dOagbgx5M4Qc2R5BNhcGU3nqM0PQ1HIY0FjZeC8dbUboYVUhHFUWmsRFVIUzp723Hq9tuR1WxhPM3qkFK8MQwIUcXVT62bd4c0F3kaMGemGNTluCLM5TXPAcn7AcJVIKSM6EpxH1NXi2VR22eDwCmlUiILrDU4K3s0gFUqnrDdbfNwRl45UHKZ33PILjI1gMmhGuw2Pv/02r/7gG4QAfjDElOl3BRkFXyxz69hse8a+oXRKArL3HM4KKontsKMvpyzaBqQhJkXE4UPAzwWrkIee0g3I5cCzD05xNPijBn/UQjNHNxHvd1UKO2R2u0iwgViqLhsnFE3EscpoyQVMPZkILYgB7w2ow6gnl0yxBdMauqtEjga0nogjpu5PohhiUoYxcbBoGXOPeoM4U6WFWreT5J1nGAwSYHE0VGkvGRjwTaF4xy5CN1qcs6S0w3tXpb6mHrJydLwipjSdsLRit9tVLfi0x+o6QajJdCHfiFotfUxkLRQLai1TuXkjU0ma6Euh343s/MjxwW363RZRizUOMHUjftwyjiObzZrnzz3bzUhKAxAxLuMMrK/WFGKVr9iAb2eUmGrxUwq2gBdPKhlvOw6WPYu55eNPvsv5OhJjIjSwaAMpTydgidAu5lxebvBhhZjaZdhudpVRKx2VrTRQ6kmMRuzNIR+5uHp/JgkhGdVIop7w1ybPop1xebXl8PY9XnzlNd56689y9uwZlxePeP78OSfPL/jkg39MmyM/8sM/xGwR+Mb2nPmsZXV4l/Vmw3pd6HeZpsnMwz3QRNYdd+8vOH2WGMctsyU8fPEB3RpOzz/l/v07vPHaLWK+wDS32e12rBaHvHD3AWPZ8su/9Ct86Utf4qd/+l/lnXe+w9e//nXauZ9OhSvE1LHegGkSxlIPJCpVAiVisWLJkhnHmpzUEwkT3tYCnGzQmEh5i2kUDULCUtRQduBwbC4jB0ee+XxG29bDNpjk5vUUsrqvUa9PaqM2SUVzlaVLPenvWgqWC/8Xc2/2Y1t6nvf9vmmNe9eu+Uw9sefmJJGiRIYibUuCYwSxERiJFUOAguQ2f0TAOLlwgNwEuQkEO/BlAiQ3AQJbNmnJEgxbUqQmxW5OPfcZa961hzV8Yy6+VdUSYOsiyAULOGic7j51qmrvtdb3vs/z/B66rufs7BQbHM+ePcPFwP17zyFVjXMBJfP2+SZb5b2nqWsODw8oioKTkycsdmqO9ktq03Pn7j5mUVAoSVs1vPLWV9BxTr/p+PO3/4x/9Dv/iG6z5OrihMuzEzbXS0pVQcz0RinVZC/Naq7SGiZ7kA+R+U7elkshcphfSlbXG8ZxZBxH3nn3bYb+xqYUMcVEQAs+Z3yUmrJceRlws2HP6klAFxIhM+Ap9DlL2vdbGPMS8ez0khde3qdpK67UhqF3HB7NGYaR4BNVXWGKJS449udz9nYL7PIJQpZcL1fgN+i2h4MDrh9buq5ns+m5Xm6o1ZyuWzGbF9RNydB7NmvL++89pJ1VHBzss3/QouXIegldJzHGc3S8y2o14O0GGzp6L5hpydNPHHYUjK5iedGixo56p0AUgb7bsNiD6AQplUhpsL5j0/VoXVAUGUBQ1xXBB5x3pBjQMhDTtCSZDs/GKLptz2ynQBmFqRNxrNDKIOiAQFGUlJXAlAZrPeMYiC67N4RQ6Ame4Z3FaDPdGzJBrpm1OOdomgXzdoGPK0R/hmoX1PNDdu4d4U6/j9Yl66Fj3G5YD4Gy99gk8Cnho6csFT7F7CAChKwgCJKTeWEmEy46osuRgTS9UUSKGfxSFNk+5wY+/OADhmFECIm1ntXqmlgX7O4cUs8alBb4aLm62hDilENSIeeNnOOGxBtjzIS3jHLMz41JDYebDJZgsVhQmOp2IHPuJreVcN5h3TjxALj9M5DjHMpUKCluYylxotpJKdDKIGWBNiZbMAtDVVbIvsKPAi0TxoAyW4oqkzZJkmDTtCSKFMaQdM6O33z9+SMPOULkJYPSisKUeD8ydB37BwcUhWF1fc12vUJFhRQapSQpSaSWjG4kblO2TQeXlw5aUVSSi9VP+d7vvYcdtyA8MXmsG5CyATJAKCWHUSWagURJlALEAGNk/3jO8Ssl8zuJjz64oN2raXdb9KxFljUvval49OwJ3c4xQ33M6fZnrMaW6B9z8uR9PvroBwgpePfH/w/Hhwd88dU3Odx/jk8fXfGlb36Fi8dPeO/dT3n0yZq6TqyuG7CKqowEtebpo09x/ZAjFAT6bcd81hCFxU7Z5pzR1zTtgvl8xsXTJ4gk8XFDP1q8T1Ra0NaK+awBBLMaxr9irvm5GK4EWZ6NMcuWylQI4REktIwYLVEpwJgIYw4sK5Hx1J6YvaxiQEZQAQJ/Icw+jMS4uQ1uSi0wtc5bNi2JUqCVp48RGZlw6+QD6YS5tDby4adL6gLuDCXtrGZRt2izT/dkBVIi8WgMjpAVoOAYxoHtEPjVX/9Vzpc/45OHgouLS4a5xFQGuZUwBkSZ82b4QFNXNKqkF5ZSFWjjUcKTYn6Y+xi5Wl0ilaSuG4SQbLsN635N3AqChbr2aKFQ6AlOIYjhMxxuTCKjboW4+WazLzpym7PJA5NGyYng1yuKWtDMPOiQoQIi4a3I0AQP3gucm6hS8rMXV8pMH4whv4mRCe88PtXImNGuwufBR5cTYtlnjGjw4F2i3w6M5GD2ODrskHAelFE0u0Um6SR5uy3NN72IVlAUihQKCIHeWsKQKL2G0CN2MuXNbyzdyjKrI9EHQoQYBMFERJKEHqRPFFUCeUk/7hJlRn+GwjBsBDIptJEUM0HtR5K2xJApOYJM50ORbXWsqdwAYofxChQRNYNQe2IA322x/UDwHruxmKpAFwZTl3gL4zrS7ELyHrvK9snqQBGkJ6hMURIp4kfJot5hb3cGjJwvz8GIaci68etLogKhEknloOnNuyDjyfNrItUEl3CAqwCHLHKuR0iJCBoRDW4l2V4IKBM7ixKpHELkHJ9SHpcE2zV01xFQ+YGs61ukufeOWVtTUqBNRVnWVOWK6+Ul3nMLElBSkpT6SzStNNkcUgBcDuIrlaYMDEit0BO9KbhICpbNesvQj0ghMErnjKJSyJA9885alssrUlQYLVDaYErJ/sEuH48nxOTwPmJUylvnm1MlE24cQSJgSpjtCFQR8xbSTXhiIxEyZuBFmJDiRuJjXmIYrSiNJ1SC3YMD1LR11VrywXvvIYWZLFx5jRmjnmAK+auI0WMKg/O5FiIQSKHAu5Ax2VJTNbt869ufx/XXfP/ttzl99od4d8Xy7CecPZ0TosAOHVpp3nvvfVbrnmdPr9C6wPvEq6/8IsjAs/MPGfrLTIRTniQc1vcs14n1Zs0Lz73OG69+kevuEx4/W4HU1O0cU9aEMXB2fsHXfnmP1157nXZnwR/98R9TpYLoI0or7t45YlxolqsTvB/zISZEIg6jZhSmQheC6/U5ushe+pgiVdkAEJCkpPN2NXmkiJRKYUQiek8wmjA6bE/OFukJNCByRiDGHHzO0dEchJdCTdUH+Z6YSBPAIdsDow/EkIgXl0gp6bqeeHqGFJrCGGKwKKlz1mM6sIm/oGS98cabrNcrdhYt+wct7V7B8y+/xP5zx9OWveELv/ANvv/7/wpnB9abJX/+p3/Ezrzk2ZNHjP0AIeZlipQT5WxSksjbW6NNhodMqOa6KvOGMSUKNS2Qhn7KtAUuLs8nG9EU2pc583Y7UaV8+JQq5ybjhPTNKgFYmxBSEqdakRTzYTaKQN02GWagSooyP+c/vjxj7zDb+OKUq1N6snBpQd1UpL4AWTK4JclaSiOpxZzVRce229D1HcOmp5iVjLZnv5izWOxydWG5uliyvFqzuztj/6DB6JKjowOU6NlcW7quw7maYRgYXE9kRCjN2Bk2faQoWhY7+8Sg8MGy7iJYTz8EZKGhEEQL3kVGZ8nckwniIxNKCEgm2/bwORcZb2oH8kJJ6UQ3WKyFbivwIZFszQ3B0gcQPlG1JYXReBfwLt97MxRlwnILiN4hjJ6UsgzMKosyD0ExDzbOrWk1lIUkSUHvQYuSYdtx0m3ot2tGF1l1A967nHlM+f6S8yoTvCTl6yImUCmDZ0TMdmcpcxrnM1hMzmTHifh5cnJOUZhJ+ZS0bcOXvvBlpDRstmsulxcoOd3v0s0bj6wmufCXgRbypiblMzXrhiZ4AzerqholNeM43masMo3vBjyT33ta64z1JtdNCCkxxhBjyNfX7dIlTX8vFFJQFOUtiTDnrDOBNhN2M+xjNpsxdIpxSNjRcVM7Y6Y81A3sJk7giJv7vBA545hSzkInIi74qYpIT/nphJ7uYzHl61XIDLOINr+Ozo75+aprQrJcr7acn24oy4qqLDGFpq4bXnz+ZZ4+PWG7XZNSQItMtvZTapapHkZXhmpeUu+AVoYHn59hu0QQkrre5ZXXFnz63jmnJxtC/JSV/3Ps6gHXq6esVh/SuU+QqmV1+oznX3vA0eF99hZH/PGffciXyoLr7TVnZ09JJMoq4XqfycRa4AvHuD5DyPweCyFDKiJT1RL5HpViQheaZtYym+/w9MNPaOa77B/O0YVg3HYoCdvVwJncIKWmMoYsVfy7P34+hiuZKSIpRISMlIXC+g4RFW1VcjCv6Jc915vIMGS/tdaKEAU+CMaxg8stUtj84FNxgkDEacruKYtq6hgAWWVVRel8EY4pUlcCaRO4RHBgEESdL+gEyDIxjomrxxF2DHcOD3jr1Zf5yfX7bEWPCj2NcCwZUDIikIyD45NHT3jw0uus3hVYu2TdPWR8bNEyEjqJW4PajagyooWhUpodqem1YbMOmHrGXHucH7DdSBISXWXinHORZDWpdChZUZQaimXO2ogyD1UuQIokL3LfR4IUJDIktJAkkZn1IubQbiSBDPnwgcmebAsaw8HhHNFeEUXKA1J0+FGj24TvxHRjBiEVQboccpRQVJmQ1F9NXv4y0UgIY4WNIykFijlYFWlNhS6yyhJSYrTZkuL6RL/doGWDHRXjAP3g2I6nHKaWal6SRAFM1CAF0VkqKZmVhrEvsHFg1a0Ze0+bSvT2iqZsiYNjGC2DiMx3KthqtAWdAkO5QYWa/jyiVWLvxZL9VzVXy8fIVKNVQ7Ezw25KAp7CROrWctQGhNxgFAglUNoRkwWTSKUE6fBjj/CB8VOViY67kkF4cD5DLITH15Fmv0BsEzIqRNDYy2vW5yNNo/GDZ30+IoTP37vK6mvOukr6Jdx96UW++PnXiWnN//29f067MCgZCd5CHEAbTAHRSHwS6FLmjU7wSBRGa+xgUdU0dAwav6kQ1YjBTZRGg5EKXMF45Vg/ckQlefDiDtFswXiUFMRkUVHhrqG/TNmGetMRozKGN3iL9wO7+4fU7S6FmXPn+ICf/KSn22YFOmvTMVtJU8R5l+8lySGkIiGxPuSODcBPfUGlKdEq9zRFHRExcXZ6gtJ5+zctHgnOZvuFyPS97eaae3efQxuFlJGmNXzpC18guB9ycvYMNwwEl7Ah+/al0FkpkY7e+WzPqQrKnYJNN3J0fIDz12y3GW273nQQw+2gSxqzGjKpT4Wpme3s84WvfoV21tLWNUYKfvzuu9TFLB/sb8LOIg8PiUBKHucCi50jQr/BugEfA/0YqdqG7XbDj3/0I54+uuZ//p/+R956/VVKXfLd3/2nFFXDZuj5l//6D3EuEr2jKQX/5//xvyFERVnU7Cwy5v6b3/obmELzb/7tv+K73/u/uH//mKppiCny/gefcnHVoZOnbfd48ZU3Wdsd/vRP/inoxDhIxlHSGMlbn3+N3f1jdvbu8Qt376GK30HIxDh46nrGV7/yDfbv3OP3vvvPePL4CSkKtIlYP4IUlFXD3uGcPlyiiwA2IJ2gLmvG6BmDQJcVjalYrVdUYmRPa5pSEeaScihZ9RE72IxSFxG8BJUPKyFEFIoUpwNUErkHzcdcIzItwKQ0xDRg3Yj3gDRstwPlrMU7ix8szx4/pCkNfZ9VK601bhizI4Cs+JyenvLmm2/xs5+9x2w+p93foXnumM+Nihdff4WREpcq3vyVb/G//g//DWqn4M6DQ+LgGbeX6GhRRhCVnAYROR38MnVSTP2EpjDolDfigRxk9z7knIbOtqRMZZMTVl4SokNNQ1nOQAWKQhNCuLUwtW1LMWWunHNYOyJkYhiHiYaokFGTQs6mCKWoiorju8dZ7VEFs9rQbT+k27ZZjZSwuV7nISFaxmGgty1ycUigQpYNSYwgQamB9bajX3V4aylQeAaEgPlsn/3FPT5wp1xvTlHas95InjzVKFHx4le/xt7uOU+fPOLhxw959Chi3RYhA6aQlEXFZjniRcli/4BXXnyF8+6M0/4pp6dDRlBrSb8tSClh/YDdRnwYaeodUpR4F8B7hpCzNVVZQiqIfsTGNSlKmKothAhEAtuuZ7VOkyshMps32EFleuvoqaoZeIW3A846ijJv9W56IFMMk0sAlDC5EyhADArtBd2w5PL6FC0NR9UMbRTrbsXFesvB/JDl2QnbfoV1I0IUbLYrYspVIkIohsEhRaCYQAq5diDmoWva8UoyaEzIKt/jYMpMOobeE0K2uUGRVc0wUjclr7/xGv/Ff/lf85Ofvc0f/MHv8977H3C4dwikiXKXXSCFqRhDf6vw3HYTyRs1/7McFnBrE0wRBjvgnJsGK4E2kps+Q6Uy7EFrTVXX2ZI65ZvENLT4CS6Rey6zCyiDdSRlXSPJSlq33eKdxbotWlek1OBtw/HBLtdhi+077DggZAau3NgZs2ujuO3Dg0w4zNeXza+FYIIXSS4v81LHaIWc4BrOZ4u00nL62U0DZ3Rok1WtEDzbjcN7MFX+PnwU6KTZ21vwjW98k+9977tcr5b57ws9UgtSsoQkUUJDGRhc5OxZpus9eP6Qr3+r4e1/vmTcKF7Yv88bb7zEmBy8gwAAIABJREFUD/7lQ07O3uFJ1yHaM/TVikMfmO1sqfcj5++vaENks/Z88uySJ6dLHp58xI///Cd8/NN3OF1+yuFzLTNjWZ6NGdiEQUtoDkeGwdN3ELYF7c6CfliRpEMYUFoTh5GyLmjairI02MFz/GDBq69/DlMKHn38KdvlOZ+8f8GnaUtRlujdEuj/vXON+s53vvP/fSr6/+njH/z33/mOWeTQsC5KvNKkQiNEgx/h+mzJ+nHPch2I02Gj244EC9HqW9k7pYgw4GUiyoQSCVlAlCGrYilhyhJ0gZQlMgrC4Dk/jxwt9mnnmro1mLJgFRzJO6J14BNtu0tVCw5mc3Z3G9q9Gt/tc/CiIZgV/XDN5nxgKwwiRKSX4KAftjy6+mPO1s/wwlHWLZerE44ahfSJlBwjIy46ZGvBWvxFB0+XDPOBg7rhQNUcqpZXDz+Xt/Fa0bQNx0dHdKsBFy19N9ANPbrIfl8fA1ILZvOaSh4zDte57NgLhk0gIHEpF2qWpaAuDdHnLWMmdyYCYIpArWfM2h3aRY1VZ6hC4oOgt+BKh3cCPwYIYFJBDA6hyBQomSgLaAtQRiPLDE8wTYXqI8PFiN1GUqE4fEFSNIFYJvqYOL+MzNoc+n/88TkP3z9lb9fg3RItS8rKUC0iVTMS4pauG9hsPcOYqJqS2mha3VLLOU25z9hfobylFImiVBwczzlbX+GlIFUFXQp4JzCzgqgtg+vwFwXKSNQMykWDqY7o5DEMG9aXFyyvThlSz0JVuM4xusAQoZGSqioQMmuBY0xE5XOJYtSoviSdGjYrkb33zuKHiBEKseOwJiKMYKYMaSMZrabbDlyfLFl9esnBnqZqod+OrC43HN2X9HgQBpVKpNU4pYnec//OS7zy/Jd49eWv8Qd/9F3KxYiuIkIqlGi5utrifUAJqGpJN0BvI0aXlKbBdYqq8ngZQcYMnfAJbRRpNCRfYqoKUw+szxLjIAgS3HbkcLeinieqymBMiZJw8CIcLGq0MHz6fsdir2AYLc5Zgg8YXTHYjsvlkvOLJcvlBqMj69US0kQwEomu21AYgzEFRhtGO7K3mGFMAUIQQyD4cVKsDKaqaaoGlSKqSJPdK1AYiXNTYagEIQV1W1EUAmMKhIBh3DKfzyiKCpKi7wJPHl/y1a98nadPT1mu1uiyROuWLg1AyGQzIRiiQNiC7VXk5LGjc7vUpmG9vs72Cq1JyYCXU/F5QhUOayOzZk63Hbm4WKKKig8eP+GtL36Ro7t3ePToET/64Tu88fkXkArGfgQyXS2mkRBzj0uKsLvYJ3Y9uICZzXnrV77Of/J3/zP2Fw1he4n2ju9+78/Zf/AiJ1fnfPjxj1kuR0woGIYlpgzcf/4ew3ZD0zQYHMltsdGBgnd+9BFvf/8dzi9OuHd/zu7uAy6vVvTumuPnEovKsd7Cah158uiCsydPeeGFO3zu+WNst+LhRx/y2//Vb/HXv/1Nzi8HrlYdr731Aj97/12uLlbY0WaCaLXgf/nH/xglIsvLC9772fuUlcG7fJgYfc/ZxQX79yT7dwJRBrZDZH0dee3l11hddgzrnmAtLga+/AtvEOyAc5a7z93jl37l6/ziL7/KYr9iDB2ka8atyFmFJFDkLbpSeoIYBIQUaF0So8UHmxH3osTFPlt/lKSzPbbv2NmfURlNqRVSS5KdKGYh4uxEytRZ9cpF1iMPHz2mbmaUOwfM7hzxxldf4mzzPiFpTp4+44OPfsLPTn/I6uSSNBWSlpWg6wa23TZn8Exif3/B+cWanZ2GutYIFej7EaVzMbbzjsFajDFst2tkipRKIlOg9/EzzH9wU/Yk16L46PHeUlcNMWbVIaY00dRUtrAnbnuBhBSAzopFDIDO5eNk/PTuYpcHzx/x0QdPePjRCRenS/YO50g5o+9G3OgoTEEcJSENOB/otoHeJZbXW1A1qmwRuuDy6opHT56gZUNZN6TKMg4WKVqINZvVyNXVQ1588YDXX3/A/u6CYAXj1nG+Gjk4OuT4zhFCClbXS8pKUegCkTTObyl0y85CIAlcnq9Y9U8pSo+oPMl4fIj4FUTlsc5m8JMo0Frg/ECIAZKeeho3OctsAt6OFGZB31tiUCjZUJgFq+VIYVrKylDUkX603Lk/p2klWudaB0iMwzVCBMqiwvYTAc5lxaMsSpqmZRx7ss4aGeyIdQP7R8dU+3Nka2BjOWp2SaYCU7Ozd8hv/fZv5kVE3xGGjlkjkDLdFqSHGAGDlnWGDQVLCpFNJ0Dne6ISoGRJPWuyhVQCKeQSaMQ0dCXqpkRpONg/oK4b6nrG4cEx/+L3/jU/+OE7PHnyhBAsdV0xThbVTBme4FbpM+XqZigBMMZQVRVa50WAUp91P92U6RpjKIp871dKUlVVJk2nrA7FGLH2ZphJ2VYf8tCV/9xNsbakbira2YzgI90wst5sWK1WrLdriioyb3eQosQNAmcDJ8+ecXXZ46yiUC2CrGx553NGDTFh0/M1ma31+WvSJtuf82AKiRwBUFOBsZSSsiwnR0WkLAuM0ZOSF/M5QBZIIXEud6xFMtlQqyqXTcuAs5G3//Qdtv0lWpM7IK2DRqOLgrIxNPuCO18uWBw5hL5msEtsJXn9zb/Lf/obf49f/tLX8MEzHJ9T7j5kcWfk6K7ijZeP+MYvf4n9ueTZo5F3f2Bp2wN0pRm2I1dPn3H+8cek+QZ/+TFXj58xrEe88WxtXn65PrJZWvxGoJXEtLk31PvEzs4CU0miDnjhcSmSZKJpGrpu5OTZBcl7VG14dn7F2fkVEBm7K4L11ItEvYAYC64fd0+/853v/M6/a675uRiu/uE//Aff2X++IOiCYAxNo6iJWR0RjhEL0WLmgaIGbSREhZaedidQFB4xSdHBRoQCaRKigcLMEC5vL6Ik2wKTQWuPd5bNymJHiyDR9wMIzf7hMX03ZjtNKakrgxwV84Vmu9mwXq+5urji3Z9+zOXqiuAbkq/ZDmu2W4cqs8VKyuyTfv7+c/g+sF13bNZrXH9Bs5gxCE0XE8trm2EGZX6RfRERu4KgYcuAc1DaloUpkFpzPYyshpERh5ER0wjSjQQrA7UC6T3RevouYEOkWwb8VcJvEl4mrJaoMlEVUGqBImKjZvSZOKckuXdCKaIIBNER3IqqEFRzQTWLFG2CUJKUz4pXEogocDHfQGECNTiIscwdKEBpSvb2D1j1I8l4TCOpmpLZTiBGxbBU9JcZbBJUSXI9wWYSjb5jkJtEVWhMJRGFxLoSZwvcmPCjww6evrfs7y2Yz0oKLRi2jvvHhxwf7FGVhuv1CtPuMVscYooKJTWzqqEViiE4hpjw0tDsVAgxkGQCZVCqRIXA2nU5a6BBhDjZzCwi5Y4pH8AkJu80BOchSWCySaSEi4l+Exi7QAoZZ96FHt1Jiph//n1hGbcKdxKhUxSi5uh4j/ZVgfOJYZsITtAcgwsBFfLn9spRiEg9Kxk2Sx5+9FM++vg9Yv2MUqtMGfS5w+Krn/8yzz24Q9OWLFdXFEVJUXraNtHUGiMPkVFhk0WoSGkUlZJcLQPjJqAT7B8q/GLN9pMGMTYoCrbjBqkbju4ahm3ggx8PNM0u+7st67Xh/Nxz/mhNWZRoXcD0c3MTpjl3cQRSGFguT5Aq4t1IjJa6FsxmLfN5Q7tjKBtB1zmkyoOVFAIl80Y+2yg8yVusHei6nsEGggv59YqKqiiwzuGTQBUV7azJ147tCGFAa8MwWLZdz2a7ZrNdsdpc8d57P2W9WU9YX4VRhuTS1POSrQcyhUklB6E0TVUzDCN93+VtfTIE51HG5t4nYxjGESUlwzgwjiMxgncdbrjk8ceP+dHbP+L9H72PC1c0zSFDn6lVUsIw9DlbINRklVWMNqsGadL0T87OMeaIb/4H3+Y//82/zze+8XX+9t/5Wzz+4IzHH10QvOGjD77P/RfucXD/Hu3uDs5vePPlt0B4hDFgaiBnxZTZ0raS3Z09vN+w7Tb0/ZroM+5dzOFv/63f4m/8td/g5Vde5fGHzzBNy6OzM+49/zn+3t//bX7hF78OB3usl5ckO1LWB7z5uTf4k3/zJwRvKUrDw0ePmD33BuvzE8b1mn695Xy7oi4UJJcPeaIlrCXDGNmsEsKXfP7N11kcV3zulc/RNjXnz04IPeweHnK1Gln1gqgP+aVv/0dYZjw9veLk2VMKJXDW4CfXkRRi2kxHbnp8lJKUZYEdJN5l9V7KxNAntKoBSYyOoggYUzO6HFiPvgNRUpR5yHDOTgqFyrZ2ndWszfaa9eaaq+stZ6cbHn14zaMPL3jvnQ94+LP3ufzkQx7/8G2unp1wfnKK6wbmzZy7dxY8+vSSoqh54cUHfPvXvsxHH39KM19wfHePu/d3uTrLoBxJ7lBiKuMmpBy6r0t0ofDhprsqTran3MF0g6vWtyXW2R6WUv7zUgmCt4SQ7TNlmTfwacqnSanzzzRmMi4S+r7j8vKKcdjmQ6QTaDR2GOjXHdEGSlNQlwv67ZboIkZr7t7do9ssUUgKU6C05sOffcxuvcv+oqU0MGwGZNJ4Z+m6Jav1Oc5bNhtLWe9z995zvPWF14nCsVk95eTZGSenlwip+cpXvsjp6VO6ocf6QN9JClNT1XOsDVxdnVMZix0H7CDwY77PCp2zeSIqiCp3zqWpe3PSbRY7uzz34D6VqQke+i6fWXShEYpsuyok9x8co8sMDRAI9uYtlZnRdwHnPO1MsrPouHe3YlZrZEwMG4+LPmcSlUZIjfOe0QaUMkhl0FLx0vMvoYoKFSXKCjrnIUWkqSnrmvm84fl7L9Ps7HL69CFXp49otGGwgjE46qZisWgZ7DVRQAgxq0hVmzOsw5ALrqUmpozwzzbbnMW7e7zP4AIxqCyr6YgUBXG7gRSx3vHBx59ycfGQcbsk+REtBdZZDu8e0tQ5A9RvHTYOpOTy10+GRdmQM0nBOcLocDEwkq1qRhkKU7K7u3uLI48xMox9zv1P16dzLmO6YbLg5Y4n5xxKKpqmoWlatCmxdkQZQ1XXzGYzmqYhppyriylRFAY3eOqyzhZDGfBhnKokEkIGEJ6QfF6Y+6zuKqkJU2BeyOzhyFb53PeYff1pWqxFyqKknAY+a10mdwaPn2pR/Ogni3yAmMmC2TpJVqqNRpqK5MfcxCYlMWzp1h2DdViX+0t3jvY4eu0Oey8eUt+ZMZiIWQw4b4lEQpQ8e+zZPlYMcoafV8wfFAxnv8+jT9e0xQE7xR3ccsGXPv8f88G7H/Punz7l/XcvcZXg7tGb/Nov/4f8+rd+g69+/Wt88MGP2WyvOD/vWa49noS3Du0atChoasXuXY+owPkMmjNaUpaAcJktIA1V2ZDGm9L2HucHRCWp6gplakypKSrL2QdXRNVgvaDrPUNvGS/Cv3e4+rmwBWbSXEkpDEkYsOCcRpUFSmqkGvE6q1AyAn5yTQaJGxOImClwOuKshEGhBJgyYdeJnXnL4D2D99ixx2hDEh6hPUUF46gYhhGpcr/EBUuiDxAT0YP1gbTtwRTZYZkCDLmIb9OP+JTQStG0miADFBI3BtyYsx0Xy2sIGuct1m8wOiPjw1TuKqSkqhMpFTifiC6xvJRUc1BNwPqOVSfwHXnwEh6jIz70SF2ABK0FqVDTPjD3gwlhiKpiuR6nfiGRS2KNwHkoymxpqYxGasnGjhgTUUlRSZ27KWQiFUzbpmyvqooaWXlccljriTaRQu5pkhnCdBPvmqwCmbal9WS5ijGT1qJFFmA0mCJ3rgx9olslxq1A14rESEgJJRNNJalKuCaidMAUCm1KopAEP1LKkmQkg5W5R2azJRQV1bxBNopXX3ud5eqaKGr2NgnrLGlM6FQipcKmDb33VG2VbS9BoIMhKYP1Nn8dcURYjxtz/qssK9p5jdYlF/6crhvxMZC0QRQakbJ6YYxg8CMh5U2fEhE5U7QqI+1tB9YGhDcMK4WLNv/MS4OpA+ZQ028DiJGdhcQYgfWRYD0pBFIUmCKne3yA6LIloS1rpFMMbiRuH1HvqZxxcLnsw14HlueJdsdAKtDCsLzS7N2ZYQqHwFOpnuuTEk1BSBHnI4XTqOjwQRCCQvqStG3oVh7hR4wW1DX0q8DZJwrrJZsTwWM3YtcaksZ1gqKYHvhCZjCDJG8O9VR8m7L9LyWBs1nCVyLiPQg0296S+oBPAW3UNMCLKfMxWTLIkAwkFKbg7p0DHj99inO548dFn/MGQpFPQSGH64VF60RCEvzNgyznF/ImPmZwgjKT0uBxIQ/d8XZLf7PxV1MZciRFS0pygnVojNbZuptyF1kIiRjyAd3aIavyiGzPKCTd5pIQ85ZTyorl1fK2dyUrBh4h9GQRBB8zrSoqmVNgMTJeXTGuLtjZ3eelL36VzbBhXs0pd+7y4HN3ePCTknfe+Wcsr1bMnSKJwHqz5FQ84/xiRRIJoQTaZJT0vN1h3rRURUHb3mcYM6wjUrC/t+CTJx/x4MFbPHjuTbyFN7/QMb+3S/v+T9g9PEDszPm3f/oTdl+6z9Fzr7C3u2V93fHK57/E57/0Ku/9dORquSQ6x3s/eJtvf/NrvPjCA+48OOSf/JP/PWdgkJm6lRKjG0hbCRiU0YxuQKiWl159id3FLuvVFZdn14yu5+h4j6IsaQ6OWezt8fHDT7g4WTJsPHeO9tlUJ/gh50pTckiV8vsqyslWBzEEdDEg9Q0YyZBw7B4YmrrCu4Ynzx4RksMYKMoptO7zUJx7yfL7Px+UskW1MDqrH9aS4hV+GFmdX1DvFBwezSiSpXNbDJ4ueQqlSDGyWl9TVofs7tXE4HB2wPaJb/3qV+g7xziOGfZ08zwTAi0lStX4qXDbhYAJgrJsSGlJTOn2l7oN1eYlQpqUqpsS5Rvr1U2GTE72w7IsM0RA5Gs0pkAKAan0pHx4pEoUXuNsmGi2GeOOsuRMoWQYBg6PKopNPgM4m2sfZk2DTAVuGLjut6SUaOYzAgFrB4LPUBfS1OM3XXfNvMXHnqfPnvHs6RnrTTdlUhRV1XD3zkt8+MEjNhtLSoqmqbl7d0GKPYUpIYIyAusNkWw7FuTvd/QB2+fy35yplNghUDcFWmeUf1M3BAfDkA9t1mZ1sFBmuj9m45x14y1UxNoRqxzXK5cD+SIhkifIxCgsRhkWrUHsRx4tQ/4cQkxKS37/3XSNpQTrzZpt7yBERIp0vsekIuel7cBw2vH2n/0ATOJ8ec2YBDIE1r3HS4WwIluboyFIhVK51yznpRKFMRSFQUqJ7XKm6YbYl1JepLmQO5A0EoNh2/X80ldfJoyWk/NrHq82FFXCJQFJQRREImM/4n3CWpcH/4luHFKuT8hK/pRDzQS12wLjhCQKeZvDAnLf6gSvyFnBcPt+v1Fn/+L7P3dlJYZhIMaIKcocyfQeay3DMNwC2pRMt99z8FkBMyarYj54pMrF6dMBalpiAFPBNGQbYIgxF60LMXVWyVtwR1biZLarh89KgmNM3PRjCRQpyskqmgfpm3xZSH7K5035qeSIkxItpaDdrWl2S9rFDKUiIXb4osHstQjpkH6gqD3BOeza0FQti8Wcw7pg/3DEhfcZh46dnT3s1tD4hu3SsUo9crvPonmJYPew1hAQlHXLi5/7Aq+8+UUeHB0zdGtkp7HOkVTAFKAnGBC4nHkVkRhAlRnUJoUgCPCxJyZJUdQYWSAk+HFDSNnRJaWhndUUxT5Fo9FmxNmeoYvsH7T03YjtevRf3SH88zFcxQRRKozSEBXDxpIwlIWZCgk168wJv/0lZcCPEmcTQuW8htQQksg0lCDRLtKtA+V+S5IOnwaIIzEMOeBpElUjGQedveAiYUeL7ZeYMh9MUkhYFwguEdagG0DkFvuykcToadqaedvmzFS3JgrF9dWAG0ZcguX2kpKCFD0hDWhT4H3AjZHg8iG9qDwxKkIK+CFydQH7RlObiCwc7Kw5v9A466BJyCLgQmAUGuVCboaXUEhDHHtkykF5rTR931PqQFlXuWiydIRNyJjiqPLmUiqkHMg5dInxJvdfGAda5AC9gTR6XK+QQeDERG4MGf5wE1yVKteIRaY29ghCelQhcyeTD2y3HTE5tFQonW1Q45joOhj7SHQSU+Vwb0aawpQLJkx5LBEAoTL6WySKqgRMVhyNpttssfUu5V6DUoJmfsByG0lyQ1XtktbXOZMjEi70rMaBrR+5W+1iTElwiaFLJK1Q9PjosWHqwEi5+0lrRd22SDR1WeCdy63oWhGlIYXcdK40RJ8fYjdEMa0DpdFgBBSJfu2RtiYEhds4tBLMZwpZDchjRVh7ogtUraBIiqsxkHweqFLIW9Ao841WSIm1oILJHWci4OMG7RVSx7whR+JXimePNsx2Naq0JGdYn8PBYYmRIJVHSUtvGnwssMkRvMcNCo3PfRFRMm5hHAqGrscIMFpRGcU4WM4f5Yec7wUXm5HuytK0mQilTcbzpnjTN6Kpqux77rrt7cNOyWz9MlqhNDgbsQ5Gnx8eCdDG4L3LIeqpIDIESFPxapoKI+/dv8dydc3YjyipUTIy2ICPOejq/MgwJLSKaC0AlQ/POg+BYVIqZcxW5kxey5kBG/r8sE7ZMpbpUTLTK4nIiaw2DH4KUUuUFqgctcO7mGl0YhrgYriFUyhpsoUr5rLSREKKkm67yfdJKab/P04D1mfZgpQykAGRv0fhPG57DVJgdvcxYQ7C8NovHfPia3c4Opb87r94je46MZ8tGMYty6snPHz8kL5PlJWmriVS5G3nrNmhMCV2GFk0+zi3yfREo1ksDhh+9DDTBV1BUbUcPniedn+B9BZZK5bDlu//4Ie8JSUv3T9ClZLLy1Neq+d8+au/yHJ5xunJCUWh+OhHP+Bv/s1f58FbL1PuCL73u3/Ip6cfIYTGSI0MngFHchV6Osydn19x/6XnONg/RqFpFy0RycHdQ/Z3Z9RtjahnSAXd5prt9Ypx6yjv76CrJ0inMyQl5fLpGPNwnH/Gmea4c5BtNUMPigJdDOzsChY7FSIYzi6eAo6qEhRFpoimwdN3GWKjbrqtQpjImPlzKykIJOpSolRguX6CLncpixYHCJMy3GjZUemSRGK1WWFMSzMzjL3DO8fm2vHqyy/x9MkpDz894eIkdwHlHsVA3dbs7R7w7ORZ3q7HTA7MFqt0qyZ/9p76y/+8+W+fAYWAabl289ubA+DN8i2l/H7NNNL8L4siqwh950kxZMXaWwRhOmgmhtEy29F0fc1qOWKdZ73puHfnkHE7stmuuLq6YDZvKZqSzWqZS8ABET1CpAl/n7/2/f0WcJyePuPibEvT1BS1RBpDWdU0zQ5/8tEnxGgpy4Kqqnjw/DHd+pztKoOBjCmxIUOVgndoKSi0wVpJClmhEyYRnJ9sW1kNVJMCeH2d+/OGfpwgCZDSpCiSrV7r9Qbv3e2A5ZXE+W4C3Aii82gUyQZmjaGuNE3jEKv8ORJkuhtT1v0vKB+XV1f0g0WLvHQNwqObFhcdw3Zg7DYM67fRpaQbMk67j4mibfCjZxgDbogURQFRZjVGiAlUpm6BRel2keRur58YYRjGbM9OWc2XPr9p3nrrBa6fPmVzeY5AUujJ4pfyMqIxitB1hJgVm2KqBMg1MFll9cHn+yUqn98gl5QLkc9lUSKDpOu2U37JZnJj+oxO+1muaiouvgXP5OEqhMAwTqRPma8pADuO3HRiCZkXXEqk2wEqP7MyyMM7Pz0TxXQt3Sz0cq3NZ2AOhZgUppv7BDfo0um1TdPvb5YeecjKSxEpbtYjOZ/uY/5vUoqce56eqYJsiRYp5EGvEBRzRX1QUtW73H9wn7KEfrzi02WPqBUybCkYmLWB1TWkoaLZucdz957jhYOWi/AxVbNE0ZBsjWcfEyLnmyXXXUexbUA1SGYYU1G1ivmsZbG3h6hqugDXq44wZtu7NpGqVghdEBNo5dExMwCGTtGYDMXL9OowQcaKvLTQJYlAVZdTB51BmZq2niP1grL0CNkzbEZmswU7e/MMWHMD0gj+qo+fC1vgf/vffec75R2NSIpkBevLFe3CULYSqQIhWDZdIIwZB60laBnpN1BUEaVvJlONtZ5Zq6iMJHQQhkDVVGhTUxQVhfYZKakzllRpSbAAGVN9Q46RKlCXmSISY0JWJVJ5ZHAIEZEl+CYRupbf/Du/xa99+9eJOhHiyPHxEckG+vWWROTwbkDENYmeqBI2lBRSslr3dN2IRCGLiPd5YySImY5VKIQPHL0gefWvFWwEPFmv2QjHKBI+KbrViF9a/Ji35IsdzfrMMW4F45gYvWc7WlJUHN7dYfdohk2C0eVG9ugl4xBz94jRCAdxI3BXGgaFFBZjMhFQyEStCh7/zHHxUNNd1ijjECpmGpaLRBdRJcwbMEXGWg+bwHwnk/3i5O3tO5+tUkikBlVEVteCzcrkwVjlLaYqGwotaXXFzNS44BmTY+wTm43jertls+1QRlNVJUIIrpbXhKTwLjGf7bK3f0DnOk7PHI9OTzm9POV6fcXh4nle/dzraAPX/SWPTp9QmMR8fkih5wgqnPNoU1AVNVpKBtejG82i1ZS1AJ3oreXq/Ip5ralrgzIC5wV2UEQliUplOIiqKKpyogLlgPc4OopSUrQRXXuG04JZ3eKdI/lIIQRFO+KUp9xJzPeyqlioLZdnkmA1VaFRlWB7pum3khQ1bVOzjZaZVyTnc57DQ3+RqHc80oC3hvFkBx8HLs7PuThf4YaC1fl6wlsrKl1S6RYlZiRGEPlg788Ewlg84ELEdQPrM5eRpWVFaWrcKP5f5t7sx7Isve777elMd4whIyLnrKqseWyS3c0Gu0l2UyRkUzBIgJQFU4AJAwYE24AB/wV8suwnPRm2AVsPhAEBlgSLgyRKFEmRbKrZze6u6qHmyqycI2O+4xn34Id9Iqr0QL+a8ZaVURnDPfec/X1rrd8isI7hdtuuj0U5AAAgAElEQVShREAJRegaVmcl60X0/MeNX8w8ZVnGZDJhMCxo2hbXD1fnqoz3gNDkxZCmbels23vkU5xVuFCitfxcbsXFVnoB4Gm7ius3LpPlPWo7z5luTLl771HccvZWEILEdz3eEUXwCiHj1yEErGvj9+5jWawQGklG58qLTX7M4ija2mNMSpZKxiPD9VvXePToSW+bgUDXB6/7gymeEFqgp6yJWGxYDDRN28TaihCVYiEsSZIwGBSkWcTvxkVRvxn1cQizbQe+77TTEuUNWmW88uZbPPPyS8w7T6csrVOYQcHGpU18Cf/lf/6rfOMXv8LOtS3u3H/Eo8cH5FlKoIuHI5/hbMPOpV3KsubhowdY57h3/yOsr0mzjCwbo2zCw/sH+GyI2djgow/f5/6Pj9h6NmOwndF1DQdHPyaXjo/fu8+H791nuV4Qkpwb11/k4OE+dz98j2SScP+Th6jJJo3WMMqYFpLvf+vHeKFQRhF8jVAGg8GoFOE1h09P+MIXvs7Wxi6P7j3ku9/6FtevvcKv/v3/Aj0dsLBttJ+pAcFWnBw85eDJIZPRFCcqmkrhrewHoIDtolophcB1cZv90msTQLA4cxT5CG0gS6NlTqqCpikJPjAYK5I8UK3XpCbDdhrvY6BcK3GxyYZAXTXUVYVWmmdffJWbL9xGFTUyNew/WXF0XFNWIJRiuZrH4ZtA1bQ0naezgSQt2Njc5srVqzx4eMiDB2c8fHTMweExYDAmo20te5ev8HNf/xpHx4dok3xmazWCuo7dXlGFUPG+qHVPARQ9adD3lQrx0HGuSlzksM4thd5HmEYIKBkD9s5bhBLkRc7e3lW8k3EAEdGp4YMk2DwCLYKgbQVvvPUqQsay1VVZoYzh2Zu3ONo/4OzkFOcct27fwoXAfDGjahqMTnG2QYp+8YckyyRXLl/H2X6Ikw2ug4CibQV107EqZzhxjBQdQnq0UezsblMk29z79D7zxQptchrfIJVEoDBGkQ3ANh15OubSpUtsbk6ZzU5i31ESMzAEODtdslie0vX3AXqK2bmVq6rqi8zNeeUCATamlxmOh7S2oW5KpFZIlRJCRusT1m3gdFFTdf6CmGb7wcAYQ1FEy54UgqqqGA4KiiIjSQ2ZKHjmxnXOTk9ZzecoFzieHeO6hiRPSQcDvE/48le+yOHRE6qyhCBIsgTrugiv8CHW5oho9yzLkqZpLqx3XRcH5jRJsS5QVhWZNkgETeXYvX6TN17eZX74lMMnxyzDBkMDCs0k11ze1Dy/kyPLlkxJCqPIE4ldV3Qij6qcj2pjvJN7lFQIneISHaMM/ZAkZGC1Wvb37/MlguOcJHh+LZ8rXDHD1Q+tIfTkwUjfa5o69oapXhHr4lB+0QFHXIrGHrK4ij5Hv0d19nxQijmwNEmjGiMVxiQXv7+YvYoWy7ZtSJIEpRTOWsq6urDyng+A50Ob7vsqlVQkaeyiE0JhTI7Uoq8EUBAkgQ7pNekgZft6xqVbCY3SdGnC5u4O440tsuGUB0eHDIaWzZFlOgqkieTpg5RBMebWMy/zxptf5Gtfu813fvQXXLn5KpON69hW0ebw8M4Ji+aYZXPG6ZHjjZ/9Keb3HzA/eUrVzNi6PME6y+FJx/t3HvDdd7/NafmXtHUsYREiIbgJWa6YTCxpFumby1lOYmxvW4YgLEqmQELnoe0ctoVhsUFnA1mRMdmcksopjdfotAGxIrgVr7/yJVShWJ6WrOclSkN9Zv9mZ67+4f/0D39r50qKdA7tFHuTbbQJOBlR6VKDrUtkYjFJIEkCeRqYHwXqhcG3klQHRgNDEkasFo7VPHprQ7AUI4NJA0mqycwuKlsigolvKNXRNdF+oHTAmIDJ4sZbKIVJBTqBprY4DyFJ8EpjvUDWkmZlqeoVx7MznBuweGL58hd+msKM8K5juF3y+k9vsJxHKk9eWMoDOHgocSHmalprkQF0IhkMNdMtxZVbjuUiQQ8Fw8sD8ktT9j92pH6A7AK0FqyNG3kvYs5Mxm6d0GTUlaeuI12x6QJlGeh8w6qqmJ11bF/aJcvHeKcoS0tZrTFaYctAV0K3BrfKGBYpMoDwDpO0tMtAO8vp1o5yWbI4lRRZwtmjlGqm0Uoy2YFyFggokkKysRNtTk0dN1lKQ7M2KBPRmSaJO5aTw4g/ljiU8mTDlNA6pGzo1pJqlqGEojVrpBN4J6gtOCdo6paAwHlYVw0eDUIzGY/JBzkfPnhIVc7IlYeu5vDgMT967106XXD11itcv/YSTx8ccnJ2xux4xfHTQ46fPuHk8ISr13bRKXS+jiFUOcLkijZY6s7ig+LK1h5JAsqA6m9W8XbcABatJM63CNHiQ4cXDqEk0khc35ujhKSZlbimAjrwlrbsKCY5VjmC14QgCapGiRF1Y1AYEplgBeyEDlYNonNkI4nNGuTM0VUddW1Zzy2rI8/GzhBnNSf7Hc2Zom4tSaLIsgTVbxi70lDPDeszQ7UQ7D8+Ad+iZEAhsAuwtFjilnNoUtpKoPJYJG0J1J0H00bghNEo3Q9JomUwKBgMckJvUUgS0286Y8B6f3+frovh+rzIWVVnhJDibKCzltbWJJnoFR4ZD2OupiiGpEmOc1CWZW+XTEiTnMFgyNbmNrPZnBdeeIWXX36Vl196ma2tPYpxTucjaSlJUozxFNmAJOl9/FXEftd1Q9PU0b4hZf9QFjgf6Brf2wBN3z+gMEZSDAoWixVXr13h57/x8/zqr/w9fu/3/g2RIiUiVEDJfkiMdhrnu/jAwKF1LJNFSJqmgaAhRHunUJaqjECGEBxFkaOUomnOrWaKRBs2phsEa/EuYoNFlnF0/AgRPKMk5We++jrf+85DHnz4mHJhGU92+cbf+gI3nn8ZZTI++eQ+/+J3fjeSSmWkfYngcRbqqmG5mtO0S9LMM58t2N27TGIkiSkYFVdpqjNee+YS1y6NcW3Jd999l3f+7Pf43jf/lOp0yYs3bzPOE9qmYnH2EOlOuLY95NaN5/jRD++w/+gubTtj1qxh5Wn8gtP5E6r6jNvP32D/03s0zYKmqxAqJ0Fw8+bz2NayWpxx5dkrvPDma5zNGu7dfcj+o8dsbQ4YTzZ4enDMfLEgzQJ7Oze4+8ldHjx8Ejf5zQyJpmsrCBalNFJkeNa9YqlAWL74MzvcvLXD2XHg3p0lTV3T2Jpq7ZBiwubWM6Sp5/r15+m6nOMjz9lBS9MYjNZsbk3Z29shy1U8mIWGQFTgNzbGtG3N4/1TPv30MdViRTdrmBjBzd1Nrl3a5IMfv0e1buiaWCWhpKV1C9rGMplskQ/GPDk64dvf+g4nyzPa4DFZgQhNHGKC4MqVq3zpS1+kKIbkxRTrJKt1xdHxEUZH1HQI8YA5HA7puu5zFiTBeDzuQ/+f4a/PyYHAxUEv/l3criuhKfIBSguS1CCUZDZfslgsEDJgdLx+26aLGUIqEB3KSO5+egeTSJqmJTjBZLTJ8dFTnh48xWSay1cvUXcNR4enOGtj210fKTivfJBCcGn7RrTr+djPmGhDWR0QcGztjNi9WjDZ7DhbHGBS0EmOEAOqMuPRo/uItGO6NWJjYxONIxGXY45Ot6zLEqMGDPKY/Vytapq25OrNDK0Suk5SV5YgVpHwK3rKpxBcvnKFJNExF3NOrtOG8XiE1pqmbSN9VYZe/RNYb0FKWhzLumK2WlG7aFNzvWIVbWcxu2M7S9O0dDYOOXXdIlCkJiM4qFZLyuWS0WDAiy8+z9e//jUePrpPt6rxq4B3A375V36N6STFtQ0nR4foNFr1lAIhPM73dSQyDh6IqIh8BpuIXV9SGUQnca7D9jUfu1tX+N633+XegxNKr8gmQ4quZndnh+eff41XXv0KJrvMbF3x0ksvsbO5TXVW8pt/99eZZh3r9ZKTxQqtBYkZYUnpEHjpKJSic1HVjZCKiFqPpD3RX7/2Yrg6X5qdEwe9j6rt+d/Fn0X2w5pkOBwwHAxRUl4AJ7quI+m7rtqmpXMdWZ72ClrEuMdFY2/HIyq1WVZ8Njg5x2w2+yzzSMw3RviSuLhefIiQhs93ezkXVVPbxc5GIcMFlCMqeuGC3itlrIrI04R1WfPG13aYXlV0wrIsx4hCMT8tOTqYc3q6IjWBzZFla0sQQscH7zwlHWXcevUaaQHLxSHLbs3PfunnWa8N+4cnHM8eMyi2OD19QOvmKJEwHb/KL331a8zmx3z64DGffPKYpnK8/SfvsX/vEUcndyn9A0KyYritqeeC9UmsiBGqpSiSeDasHNXKkBfntmSFlBqlI1XSWmjqmHtXMhIUd/au8+Irb3H7hdscHO/jwykhVCRmyPMvvkUy8Jw9nTM7mqOKhuaEv9mZq0uXLvHTP/mTvP/R+zx89IjWW5JhCsogklh4JsQC22kCAZPD9pWE6VTw8Y9b6ioga0FmFVmhKFtFELGcUyrFcl4zTQODSSzXlM02tl7F7bMUGCOpQkeiNcNhzmg8om0DnZ3FYKEQmFwyP1hD6xBG9ha3DpUHnh4/wlNy5fIG67okyDr2j+gRtRecnkpC6hiNAxtTzUjDu9+3eOkIyiM0DAaQjzUoT4tDWsPWnmB24nnwfsX+p4563TCcFEgfUG3sMfLCkhod5X0ZKJsuisGpRzofB682R9JQrqGuHN7CerlmMM5JMslYZTS1At+SDwSkEptDteqwISUNGSpobKU4OrTQeBw+chmc4PQBtE2HziLJrWvjTadtLUFJBmOF0I5UxWJB7wOjkQTNxWDV9ERLgUcm0WG4Klu2RvGGsWpa1osFG0Yy3MjofBPJRxiSTJImCYlOUWg2xhmrpiYzEt/VVIsFe8MBl65MOJsv6cqayWQCoeb06Y/53vIpSudUXYlxgaBsLDDWAjrPbP+Mzb1tUp1C6GjbM+ZzTZAtQjmGuca6liwrUCpHCUcxlLRlyexsRVU11G2J85Hmd2FRE/0hRCbRpqI9W9cMrswoVzXOEbeANqp72gSUkFg3YuHXeJ2iTIYWCeSKbbligmThJEfzwHCq6DqH90nc5KcO13Yc71uUVsg6Q2nBdCsn0DAcDLlx7WWOj5/yySd3aNtVzBs5RbmyCAyZM2idxkCoUCiZolVGkAlbuzln6zO6rsX7Dh8cKiQ4p2MmUoBKNb61WN8PmCL09pSotETrS0eWpXE+kbFHhGCQqkH0XVtV5ZCqwOgEL+L/l6ocQYLWBmNMRKq2LW1r48HMWc7OzrBdw9vff4fpdEqaJjx5fMil3U2kEKRpghSxY6RpBFJZgnCIXkl1vm+CE30Bo9I4G61NyvjYsRV8xHErQZqPCD7mQlfrirt391nM/i3eexKTkCYZg2LCarWI/XCEHhCQ4eL6nDQZMijGzBcLpLB9n5Xoy8qbC4WgrhvatmV3d488H1CWJXVdY0zKrVu3uPfpp5ydncVDgYvErvd++DFt9fvsH6YUGyP+9F//Ls4bbj7/Fq+9+RJ4wez4Ce/94B4SQW4mtF1D6yFJEl566SbrZUfXejY2JlzamXL/0QccPi2xoUWqmvVywenxjKtXryOfPkUcn5C6Gf/L//qP+O1//L9RbE4YX73KVL7Ef/iL32HWrhhNNtl77UuMnt/Fffhdrjy3S1p8iQf/6l8wHiQc3nvKNN1gtPMCH713wKXr1zk9XVAtWvQAvNdUbYlMO4abUAwrvvlHf8xyXrFeLmhsyaf3H/Hq7IRreztck3ucns743nf+io/vfIDKJG99+S2evXGVf/ev/21Un0zMJbZdhbMKjyNJBFtbU37izW+Q5YrV7FPauuCVV1/kD/7gD7FecHw6Y11+n43tMVlhWM5XLGanqEJGrDuK7UuXuX37Nuv1GfP5X1FVfYYpCKra4pwkUx4lPSport28wbKZUamAznJuv/EWw4GjXXesZmtmp3NUYlktPYdPjjk6OAVlyfM8voecw7oW4QPaROtqWVZ89OF9jk5mfPTRR5yeHlOWK6SIB7pr165RliWHh0cXA9M5hc05d3H4C8HTNO6iMytJkotn/YUKIGSsiyAWQ8flROxju3b9MlobHtx/RNM0GBVVPJOWNK0jOEmSCTa3xmitSNMEnEJaz9HZCVdv3qAY5QQ6FoeHkZBJtEr70NPoRMCYXmWoFwwG27FLsWmx1pLocSQtdopyqTg8XJKoDZJEIEgIIaOzLTIRjDe3SUyKb6BcdgyGB+QDTTEeMJwMePTgGKHWVDPFYh7VGms72hac0z3KPkSrO7GUWQqFSRR1Y7E9UEFKSds1nJ6eYoxhNBogpacsV2RpQZakrNcrguvJeR6M1ITABRnvfCCA/r9Zd6FsGGO4cuUaAsdqueB0seDKlU0mxZQ8H9Jpw/7jexT5iJwlwTc0suX/+if/B01VUpdr0ixD2FgFg/LR/u51zMxBf41E5HmSJGRZfoHpT5MM2y6xncOohCKfMD99ShAlne6wTiJmFa999av80i/8LLvbm7RlxXe++TZpT+vUWWD7xibrtGB5tkDbmkkiWHhF5RqM9Kh+gKgaiw6xKzE6aroIcemvV+fchf3v3J4Ooldwo+p03pnlett4kpwDWwJN0523Y1zQCbXWVFVNXbe9zTYqYEmSkBjT/y5cb1elH9pUXKpxnmUXF/9e20bHkzExa1VVkUAppUQRbaKIz2Ug+9ddRnZMrJZQBV29ZjRJuHx5h3LtODw4IVAhZYv3hiQbkE0ykmGOXSuKJME2NeOiiEvHriXTm3TNA04Wa4SyPPPGDs89N6XtJmAzMjNge/QsJNtcGQimQbGyY5btEaV4gExrppMpe9uChJxyv2P1uGb9tCVUJb5s8M0BdpFBq8gnmitvDmnmFcsnDdDQLDtmwpNnkfpdtSuaJkEmTW/FFP37I75GosedVKsFQWfUjWK1gss3CowumZ0tsa4lLRJmK8tgOIhqJBaV9YCBv+bjb8Rw1TQN9aojNYrBWFDZmlQPYsDORX9oojRt29FZTyVgvdYMhoFiSyDW8ca57gLDrCYddog2EIKgmAbWq462kzSNJRtBKsaE1rNexQNPmsQwqNaKJE1JswHr9ZzxdBSn/DaQJSndwFOVXU/TE8g09kN064Zu2JIUGitrPn2wj7SKza0hwiq6tmaw6cmHmuFQYk8dJm2pRR9+lrE3RUmPJdDUsK4EqbK03tO1nqoMGNXTZES8aXkPUoRY3uhlzLCIHrIpQ4xA9X5ZJUQkACpQIVCVa1TaW/OkJcuiv1frgLdRytcErC0woUXjEc7QVQLhu/gAUMRtSy3QUlBkkuFI4nUXL0AR83AOH7dSBGwX8wpJEggy+oet87guAjO0CZzb74OIZDpno2zuZUdnNakpsKFDCk8+kGQDQ2YyfCdiI7rSqL4TIm5qGrIiY7VwNGuPlppLO2NUUrP/6ISz2X6fvXEMc0kV4lsmeImSgcXJDJMYSDxKBjq/BpsidCwhpaupEXiRoRKN1Io0MciBIesGOBR1vUaiok0iREKSVrHnRPaDltAePZFI40h1DD9L47BSItAEL+KNUjk64UkGgHfYVc1qaRGpZ5gHulZQV4FskNC480Z5T3DRitJVAisCwgtMFnNjWg+ZTLbZ3bnCyfEJRZ5R1YHgPEFFQIoSKZoMEyTWnaGzHCEylDSAJKgQrTVdDCcL4vBtg403cyV6Ky69lS7aE/y5fSTE7h3vfd+zEU8BSgnSpEfrOh2VsqHC2gbnNKAwRjMcjCjLGmstSp2XG3tkpOBEmESweNdwNjugbpckieFscYYTDVJ6jAl4111sMT2xYyYvclarFecZEhA450nTFCUVznZ0XYPW8QEpe5iEd3HTmfQZsrt37/Lk0SMGgwSQmERR5APatt8k0mf9XbSOCjTOxkVFkQ95/o23uHfvDqdnx8TUjvwcIjuWEDddGxG6IQ6Bzvt+qGzjewuQ3qJNYLE+5YOPf8TaOq5cu8on73+funY8enCPTz64HlWCoKjWR+zsbLKcncT3vwvIoMjSCVI4To5PaNqGuqnQSc1wVLBcdjR1Q1Ot6bzl0aNDTo/nSBkgUSTTW9x45Qs0tuR4/wmTvcssj09ZHS+p5553xLscLM648+nHjNKCweY2WZ5G33xnGeUjblx/lrcffMzlm8/w5N4Tzo7O4gIHx3wxj/laGXCuwXcLumqFUYHJ7ia7O3t8evdTZsuG6cYOo8Emi8UdVuWKNEnJijHTyc1+ixuD4EF4hGxJEtkfijSDwQbLuWC1jMNMmua8/PKr3Ln7CY/3D+i6CmTD8bHD6C2UlgyGCauqjP1qQtA0ltl8wdHRAbYLeB+zfkJGq7uQff5YxOuyrEqqusTaCAFqG8GtmzdY6hOU0kw3Nnjt9Rf58z//Dxwfz6jqGt968lzjnYv5Mac4LyglwGw2590fv898Oefk+AhtBJPpiOVyiXeOqqnp+lLV88M+cHHtnR+gQ4+q11rRde3F58Bnw9hnR72AdXH5kaQJWZFTDAsW8xnOd3hvcSI6PJCxmDY42WdbDUop8iJD0tEsStquJSlypNGs1mu6tovAHyBCSCIFzbnQd4sZkkQiiHbgum4I3mNUjhQdUnqUaUkCuC7Hu6iyCCWBEo3Ee0lVdTRlg9SKyUa0k/lW4JuMYuxYVx21tTRtvH9UK4m1oYeJBAimt6DFHFQIgbJc03UtQkqSNOntwDGrFC1oASH9hRICAqMNdWtj158IBBGQStF1Xf8axH9DS4UUAt8fus8VyCKPlr4gA13XMtncYGMyoKk7Dvb3WekIrBhu7JAXCq8t33vnLhaN0oYkLRCuiX2h0qFNhPacKzfn14LvITvnA4T3nrYp6YJDqQStUgQS182RskUnhlQWDM0mN269wptv/QR5Ijl88pQvfuUnODl6wPzslLZt8FLw6PAJlbWopCAbKFZlfA4arVEqDkRN2xGt1X1uuc9V/ccqVvz9xN+3RAgZi6zPLYB9BirLsh6LHs8dbW9pd31Bt+qtnGmaYvtCe5MklFWJtTZ+Tv+6f76L6/zr+v49I6Tsh1N38fVj3jb++/F7/Cx7FbN78eeVQvQZYYEUn2N7+FhU7JynaaKylecG6yyua2k7z86NHCssWqQMhlNSLUjSPS7vbEZL9/FRtMVWHh8MOskYjjRXLu+ynm+TJhuMR1M2hwPu3T/i+c09RqamOfiEg9N3SUdNrIJJLKI45sOjP+bR6dvMqwOcCJC26Nwg+uWN71p8FVjsB2wZ88x12SKNQvW/r/NIQddBhgDlEUQ+wTkq37s4yBuVILOcEGC+nLN/JPBtwFaW1nWxj7EVDEPMtgsp0WkGrP7aueZvxHB1enrCxx99SD72bO2kPJ3XqCQh2JJQ+wi3UIpSVtjgqCrB033H9hVLtiFRQ0XXKlYLi1ElyUigW0VTaopxh+1LRlfLlmKrI5UDtMvpmhq7sGBjaFjrGJB0XeDsZMHuzhVq29E1LUmRMRwPqep5zFkgMJnAlhFkq0SCNiO8fMwPf3yH52/d5NqNXdJywnF7wnRLkhYG4TXW15jc0jQevMRIFemC1uGBthIsjgJSdqgMVCLQOIyQdG1DCAJHtC4qEag7R8CjtECngi7wGT1IgRQtSmiyAkwKtgosqzV1XaOTgFaBPBVkqSbQ0bYB4QK5Mfj5iCAXsQDXKqSXeGKBYyxojZsgZTSDgWayKVnoEl9JUi2QWcBikQyQqiW0InZJ5Q4p+jJOFwlGWkWCYdsjR03SD19eokwgGYAPskeRahCWfBTBF4lREcHauYh5lVGldN7hfEcQKYf3lgwLzWQzZbKjCInnZN5i6xIVahLhUKME33pqB86C1lDOlwgV0MPY02VDFwsFRUAGj2srKtdRhwpjM7Ikj0uBVJAWBT4YnA8xSOk66PclWmqcb+IGN/IXIPUQWtIUvBe03tIJgQoa4YgFlqbFokgyQegc7bpldlRT7oDOA+DoFgrahEAMbwtvsZ2MZDofvfceT5YKnIU8H5IlY7z3rJYlg+GAgKKuWqRwpJnAmAwtMyQCi2eQjFEYRBB4OpbVItIjbSwxVkoRHDjfRJqRMIjg0Yo+nBub6ts2drqHz54J/QHCYYzGGE0xSCjLjq41pCZnb2/IweEDqrVDSUOSpozGQ8qyoqkbEBF/bm0/6IdYpp2mEhECTjQ0rcM6RZp7Dg4P2NoakKaS4FyEViARQaFlymg04mw2Qyvd++8FXdcwHG6htaFcrzk6LMmLYd+ZFR+ObVWzubmJmsJ6vWb/6X3yNGX70piqtkgZu0mSxOB9e5H79K3r1c0I2+lsx+W9K/zkT3yF+XzB8clhpG1Kc7E9FT0J7Gw26zetAikUnbPcuXMHoyRSKayEEBqUdgRVMq8ecvT2x3z43g6pAWdbTk8+4sN3LYg1ezsvsLd3ldu3n+fd904IIgI+cIH5rEUpWJdL1tUZ6/oElZyxs7uLdRFLHdAUw5TFbMlpuY9zFeO9S/zBn/4ItXEFvT7k3vvvMekeM396wOrpjPlin3e+9QmXn73G2Xyf6zdvsbkxZbQ1pjw+ITOa8dYGl569iTl7wtZgk2I4RqiY/fHKMTs7QwJpHnH0165vkWqFEJrtnW2ee/Ym//JffRPPE567/TJ/52//Ql/gG2hby3zecPi0Qase3EEkzAnVkZqE4AVJojFmwPvv3SNgOT4+BkCrgtsv3WS+nFGWFaOp5pOPTsjSSxQjw0SNOPt0TjZKqFrP0ckx88WMR48+JdV5VLJV7IhaV00sGRVg8Xhfs//kEUIEBoVHNIIn+0uef+Y5mg6yYcHN6zf4xi/9Mh/d+4jaVYRFwLaS4Jo+f5cgRYKjw3YeguDsbMZ8tqKsVqS54fLmLoNBRtOWdG3g6PgYfOiD7qHPNJ4fkgOr1epCRdVaMxgMmM1O/yOC4LkKEFXpeFi1zpIVGYPRgGJY0FnL4yeP8S4+v3yIGdHgiVUHMt5vylXN1vYmea7ABU6Wx0gl6WxHt26Zz3j5J80AACAASURBVBbY1sWFTZ+POd/8OxtQiSE1BaPRCGs76rqibarYCZROSNIBOnNkw5ZMG+ZHjvVCI7xEJw6hS4LVrJdRiWibip2dMeMNyey4ZH7csDxVTJ8JrG0cdgTE+8UyQSeRTBpwCDKgQkrTK3uO+XwWy18TQ5ImrBYlWmmcjzmb5bJFKcne7i62cxeACEdUC4OzBNshjaRe11zARQgo+Rnt8TNIgqRpSjrvsCEwSCST8Zi9S5scPT3k6OED2o0xuRoyvPoMO1f3SHXFnfefsrTgVYJXKlLa6prQ+Xjfz2QPiYgHd4GL5FTv+9yp7If3NQ2CIslRQuOaFqUbvIU0SRkNp+xNd7iydY3JcArBkuRjvvjV23znu/+ejz/5lNPjOcPJkOHmIdYY1HCKUS3GHSFqMKbAGEnsOGyorUOEuAiMjoCoHDnnLpYEMVN4DpPoS3h75RbidVwUBfDZouE8B0U4V2t1JAb6iG5PM4VJ4gATASUxe+V6u3lcLvr+z9F6fj68JUkER8WIhY5nkT4TFguCba+I9eAN38MxEL3qEj/OgRddt47qU9Xx9MkZUjkGg4TQKBwaqRzPvqGZtxU0gY3xiOmljOuXn+PSzoD58hh0iQhwcqJJig3UIMXJilF2i6HYZfPSVTa3tyhPDvjgx3e4/fMvkxUl6/V9npy+zc6VaQSRULNyj/n2k9/mYXnASnTIoSTfDiAKkEU/9JbYxnPvOxXdWuOdoVx0jDYNg6GmtR3rskWrHCHi8leIFiUbULJXsWNBcqIiEVTlOciO+eqI8u4p3VrgKo8NDlWEfolS9XCrBJMO+P8arv5GZK7+x//5t36L6Rm1qBC5QZgpzgmcX9K2C5aLGSIRaN0jF7XAWUnZWFob8w5expwOMuC9ochHXLu8E7tHCKjUIpOKyp7RNIHF2SnelRRZoKsFWgWyzOFdzeJ0RrXwzA9K5gdL1vMV63pBnhpaG+1OWIfxAVkIpnvRwvP+XzzEpGsWixmvvPQF3nzt6xyfbrO0H2GEoZp3HD1aMNtXoBzOSWSALHVIafDSI4IgUTAaOcpa4NsEGSTaRLqQoS9otBBqhWolKB2xrVphdMa6dKDAe+gaGKQaJQWjqaYYxYxPuQ6MJ4bRKGEwSHChxfpoZxLSUAwmXL6yx3BjSJCGqtTMjixZoiIe2gskGcN8G4THe0021mze0DSmoVnE3E8+lnjh8bVgMIk31KZx6CQgREpVe9o2cnyE0AQnaS2EIBjlgrrt8L5DGk2SZHSlIpkG2koidUI6MZyczhAmoaobqqaiti1NF+jaSONLjebxw2OywnP7xV2u3tqhGI8QKgfhGAxTTJKwWltCkpPmhiLRZEqBA5l6WtXS0oIQ5HpACB1SxIO3NilCC4Kw0bLZQHVWRSz8wtKVAeESREjIkgJtcoTK6FxCUGm0JXiDtxEUEMjoOk1rJdYHOjqE7NAqYLQAGQ8MRw8twU7Y2rxBZzXLVcCljkZ66kYglSfRGYVO4s/iJY3tMCZFmwRhNG27QlFQNxX7Tx/z/be/y9aepKo8TVtjfRXphY2kbqr+4dsx3JwgPKjeK960K+bzGUbEjWjMS8hYSNgXBBDAuziwuC6qvlIpsiy/KHZM0+wiOB+xsoo0zciKqGDG7ISnSAuE6Ag0/TbbUFV1pFD6eDherUqcEyRJEa15AfJUI0SH8zlNrajWkbg3yAc0VYP3gsFgzHxRgRAomZDnAzY2N9l/8gSgz0dJ6rri5372q7z00m2MMdy5e580M+RZRpZmaCFZLZb87Fe/zObGiEGuSRJPkWdILfFe0DQdi/kcT2BrczsqGURIhbcpzjUE0RBER7W2fPsvf8Dp2VMQLURI7UVZZgiRIXiO3g0h4IMjy1J88EACaIR2CNlGJZWo7OVpQVZUtH4FSUc21qSjDC9rLu9t8OyzL/LKm79McmXIdO8KUgmq8oxHT+4wO5mxszNma3OD1Aw4PXFMN8ZUzYwgA5PxVQZJwa//N7/Bpau7NHXDar3k3//+b/Pw8acoqdjZucqffu8dTo+fsr//hKOTfUp7SHfyFNlYTh8csX/3MZPNKa0/o6k9rR6wNiOKUeDk7oyP3/+Ik9MjTCYQwuBby3iUc+XGLq9/8VVuPvNTPD44QqYZV68/x4M7T7jz/oeUyxm3bl7hv/4H/4Dv/uW3eXDvI9arBYvZjLe/9x1efmXEcBTRyvNFi9ZpRDXbuBAxaceyPGa2ekzZnLCuZrzz9sc8eHSP4ydrypWnagPCBzY2tpgtZhwdHmPXnp/66Td5+vQx8/kJVbUkMbGnbDDKSFONsx2eti98jU4FL2pa61EM+cVv/BJ/99d/hXfe+Sbv/+gj2sozHGwznuzxj//Pf8rJ8ZL5Yon1DVtbQ8p1CyFBCEuQqwvrk5QiUit1IE01aVJEAM66xnWWnb2daDNtIirbqM9ngSR5nlPX9edUlWhd6rr2c+H/+BEPqfQY+6jM7Vy+xHg6xAXPB+9/TJrlpKkiAF1nkTp2IE6mBTt7I65d3+TsdMnmdBPXOZaLNceHZ+xe2aEqK6pVibeWtqpR6B5EIrA+kOioGg+HQ6bTTYbFVlSy6yXOOqRI2LsypguW4WSAMhnLpUME0UOYBN4KvIODg32qqorqg1Hcem7Ip3eOqFcJtlFRFUfxwmubGJlSLRUi5EipIxVPCLTRKBWfb0pJikHO1WvXODo8IsvyC0Ji17nYidSX3saMpkZLxXK5ZLVa0TkPWjLdnJAUApl2CFUxTCbgQz9ERKU7vj7uM/y4tcxmx5yeLnGd5Gdeu87Hd/Z5fOcO9fyYm5c3ePGVL/Pgo/cZXtphsHWJelWxkQiGuaRtS87mc0wCMngiZRWapqVtugsse4SiRJWmbSNJT+sE7zTKKIwJBN9R1zV1a5FiQPCapo0Aqy//1Bd49HhOMZ7w7MvP8C//zTf5p//8/2FR74PukGrEf/Wb/y2Pjp+wXJ3S1Uu0yJiVNVLFLDYobGjRvc36XCk6J9YqpfrvEUAynW6QpTki0A/qcYFgjLn43LZtqaqKtm0xSrO9vdkrtwGlYrXI0UlU+M8BL1JCVVU0bU3T1r3aEi7stdZalInPyDRNL6y3bdcxGA7ROi76QoDpdNLbGy1t28Ruq/hDXSzgsjTDnXsV+0VHwKNkJHV3jQDRUDVLqlVDPhT8xC9M+covp9z5eM3RgaKtJ7z6+hd4443nOFp9wIOD73Nw/CNSB3s7z3H16m1GxS7zQ8HPffk/5erlm1zevUGqR/zgRx/QHi9p85b91TscnPwhxW7g4NOKZhkgSNIxiMkZrGC1r1g90exubiDW0MzW1OsOKzKyQcrsbsPi1FGWEb+PtFy+meJDYDl3YBOuXNmNz9I2DqJCxYG+KqMjrCsjDKQNNcEEZKbpLNT7h6xnawIw2Mi5tLHNe+/+gNnJGcZoxpsTTu+e/bWZK/H5m97/Xx/ZQIUXv57itaNuBM16SJ4PqdYLbFshcLSuizYmHcv3bOvRWdyACCQSgVIO5RPa4BkNhrx47Vm+9nP/GX/wZ3/O48P3WVZPsATOzgSFVuRGkSqFsxqja9rOIoUgL1KMtCyPPK72aCPYvJGxf1pR1wHXxgPi5o5jdhbITMRwF1nB9d1XUFnLzRuXKbIBf/gnP+TDDz7kl37xy2zsaM6qh9x98AmDYU5opkg7QAb4qx98zM6NIdp7EmF59iXJT/+da/yTf/SE/U8bRhuabhAPVLY2OCvwsiVYSUcgkZD228BVGTecMgS0l7h5gRQ1OhfIXCAySz2PW/zhyFCMFCKrqKqYxYgwm4Tx6DbTSzfQWlCVxzy59wNGxlCtFau5ZTXvaKuI6tSJZ+s6XHtdURmNLnsbnw9UZUKua0YbOeulY3bcooWkbUPMqCAQUtDWnhAdRxip2cwHHK1XFLlDekHoFJkBvZsyP/Q0VUBpUJlja/MS81nFfLHGeotSGcZJXn72Ob741uu88NwOR4sVjw9LPrl3wI/e+4TtrRFpCoNJQVYkWFFzfHCKoGO1WDE/WWBbgRL64nvUqWC8KWlqxWpV0bYNRW5IB4HcZPiFoD60NCvPpZd3qamxoe19zwohYiZIa4WSkS4pgdDFPJSnI5j4Wrg2dnmE1BGEYpIZikRS+oaUEA9eTYpfT6Gc8vDoKVlhSbVHd55ibGjWawwJShqWvqSxkq7pi3a1g5nDjExvJ4iWw2Lk8d2Eum6o64qAB6eRMpBmCYPRkOFkiwTJ2dEpi+Wclpq26bGtMuYIlUwI1GidXlDW2trHXo4gY0eJsBcPsyyNNLDlch49+KnBh9hxkxcDTFLHnGEHXWVi6W4RQGja1jCfd6RZf3AT8YHTNG3MmWBJTWBjI2NdnVK3A6SI1QSLRYPAIeRn1uDxdETTBqqqoqor6jr2VylpIAiCi9TLLElQSpINNINJwtPHBzQrw3Q0ZXNaMJl6Dg6eouUAITXOBc7ma7JiyPO3X0NIzf0HdynrktFwSl0vqOoFITiMiVYg6xxt19K2bY9uVxdWEN+B9d3F4VXoSHI7V87Oy26lFAQV7ajKxoOdVClKeaQKBKmpm5pUmXiP1RIb4vTgumjvFEKi0pzxdBspLa5dUi6OSdJhb40ZMMjHCG2ZrR/TOY/3ChEMSRD81Nf+Ez698wl3PnyPS1vbfPXrX2Jerrn3yQM+/tEnKGcRhcT7mkGRceXqTSrvaRaWumvofI23HboxrJsZG7s73H71DZ5//kXuvvtD7t+9z8nxCTZUSGNo65qdy1e5fusWV25MqU4kl7Y3MEXBysLbb3+b5fFjvJOkyYjx5h5P7z6kqh72dqoUKSR7ewlVs0LqjCy7xOHhIdYvKAYaqQTlukX5y+ik44UXnufy5av86Z/9EQcPZ/z3/8N/x3A44k/+3Z/x/R9+i+n0EkJ0tG3NctkyzrbwoqHrYgZjUOS89eYX+fG7P2CxmDMeTdm6tMG9Bw+x9RpcwIucYiOJGUgzwsiEJ/v30WmKNAYlwYQO5zVVvUJqS5aljMY7GClwYcl8vuTstESKnCzTNH1ZajxIjqnrOlLlkAyKMS+//jzvvvsuZVlG+5ILF4MVRKpltEQpzjuZ4qH5vDQ3DhJJksShzNmoJhNhR8V4gtQK6ztW5ZLdnUt0XUe9bujqjqvXN9h/vKBtHHmesXdlk3wA64Xh9PQU51tefu0lnjx5yPHBKQTI0hTvHHXbkWR9x5JQMaNkDLdu3WJvb4+z0zM+/PBOtJMGB1g2NjZZ+5akGCGMobUVk1FCpiVVXbFa1dQrcFXAioYklQxyTVMv6VqBtZrReJvnbr/Anbsf07ZHdI3DNhHCkyaDWKkQzkluMSduEkNRFIxHG9y7d4/RsKAq49LImAQZPkN8CyWx3qOVQgYgxMqFNEto2tilp6UhMyk6l6xWq2hDF5Kus30Z7mdKBkBXdWhjSLOUTAWO5gteub3L5mSAs9AtlihZ8Ph0xVnZkhQDXr/5HIlbsipXnCzXHKxWdE7Q+gYIaBHrUc6zSOcDzLn6k6YpWZ5FF0erKbtoI8YLhHJImeOtZ2/vKr/2936TXM/5v//ZH3P58m3eevML/N7v/u/Mj++RZkOUSQlCMx5eZrU+JPENqW9RboUZDWlkwbLyzJYVi+UanZnYeQUX+cALomWI0RLXW7u10igpKauSwXBImqaEEFiv1/3zKuWcPCiBPM9ZLpdAVJRCCH0xeHSylFWFUYKmaS4WYkrIz2HT4wIizTKEUngXC7sjvCKWLJ0j3m1n2djYoG0jcKluaiAuOdquI3iPNqZfMDiENAipIdjYdYUjhI6Aw+iMsqy4en3KW1+5xN/++3v883/2kLvvV6CHjC/tcOvZl9jcWvPx/e+QZQmv3f4J3rrxKlaNeXTwmNPDU+Q65Vd/4zfYnI74+J13+fF3f8DSJLz+5Q0++PiPePzwYxbzBdnLJW7ZsXjQUB4LkENe/7WMB99ecvrhmO50j1svbPL7v/NtksyTZilSFnTrJfUabPAIaUl0R1vDF766SZCC+WlANhsYlfLwySPKesn/y9ybPFmWnud9v286w703b041V89Ad2MeKHAABJAUJVGiKIXs8M4LDxEOb2zZVoTDG29EOvwH2BtvvJbkoEJeyJJFSg6KpCWSAEEQQHeDje6u6hqzcr55hzN9kxfvudmkwtIa1ZuK7urKzHPP+c47PM/vqWsZ0LgCtHL4IXFxvuTqooe6ZO/uPW68dA9Xes5++C7LywjOML1TMCunXG2uMDkzmxbcfmWXb/+jB9/NOX/t/6+v+YnYXP36//xrf+/2GwVohY+ZmB3KpFGvH/BtpjAKq0T3rlXGWojZjahWBVpLeCuKqERW1q0Dq/MNR0/O2DQrej/Q9pl2CaWWAmkYsngyUiYli9IO6xS2HJhOLM5K6Kt1EaMjhQWVJbdiZ24pbKIyU3Lv2Fx5Xn3pTX7mG1/j1bc+g6r3+cEPPsQ3F7z00kvsH9xlf+9lUC0+b1i8UNTmNt/85jcZGFhv1gyDSOzmty1f/tprnJ8l2jbSrDxWi9cqK/Ej5ZRFloUSadlIBk3INRLfhwKdcKVGm4S2CVtqhk2isOBbaNcap2smk0COWrC3ZNrG05551ldLVqsFa3+J24nUu5a6UhRWgkz7dWS2WzDdK9Gl4fRRoD91FEZTlBqflOBrC4v34iHLUZGjSBtB/FaaEkKBtQbnZJLaD8MYDjeic62mrKekrMbPWjObT4k5sWl6+l6kEaUzlG7CfL7HwY2bvPWFL/O9H57irCN2HUcfP+by8pJmuGK9WrJZr1BZAj1LW6OTIcfE0PdoKzlRWQXQkaqqiH1CJY0zFmfAmpKyVFgd0TmwXmfc3IrGC0UaDy4pgkccawqk0JOjJ6WORCc5HTZjECx/juKXMkrjDEKfqwpcIRAPVUCqWpwdCG3D0GdU0OxVspG8WrYMHvwYWO20obQap4GYZfqu4595wRqGPjP0kRQ/ydoB+VGsdRS2hpy4cXhDCGBkur6hdJVIgPQn2SDOWMmO0FaM8aOBemtsL50AJIYhXONnjbFMJpNR324oyxKdBQ0vG22AjFYF1myLuYj34oFRWghKOWtQItkQ/HOmqgq8D0ynB1g3ATSmbOh7UGP2yOAju7v7rJZXNJse34u0MniPUQVGG7SWANSirKknO0znU2aHJfPJnM9/5rOgAm23oSpnXJ4umO1K3krTKaY7M5E4xshms2LTNqQcaNolKHkJ5gTDWHyknEAr7ty9LX7K0ROwlfYYI2G2KKFUfuIP2Mp9JP8lK2nWSRZtEkqLzC3mhNIwnUxwVqOyJnoIvoU8JiWaiCp6TO4J3QY/NMQg29uY4uhPCyTdg1kTUi+SSlNSV1PK+ZRf+tZfIawbVudXvPapz3P31Vd493s/5OL4hJ1pyWR3yrrvqadT7r30El/88hcpiopHDx8LZtpkyAO992QtQbQ5am7cvM9rb73G3uEu2sLRiycUVST6SFXNKCczrHWU1Zyhjzx79oJ3332X47Nj/NCRYyZ6z2Z5ibM9uwdQTxTWgcqFZPNFyaKKPjGM21GyRlvFzoGm6RO9H0hZMQRPyA3OVSgcz56+4MnjR1ytzohBi+80yjBjvjPjYL/CFOCDp+sT/8V/9XfYm1aEvufJsyNyOUVnL+S9FElIPlwIkbZds1qLPynEAU0kxp5haJlM58Q0iJ8iyUZz/3Cfphnkvh4805kMe4ZBGumyLChLSz8IYUhrKeq8D2zWG8mjY/QdqK3PY/R9mG2eXL5u9quqFMLi2HTFINO7rTwvZyhchR/GOIIEVsu54ExBTpphCPjQC8Wuku1ON7TszPfYdAM7+7vcfukOxbzgjTde5+zkkqHvsRpy9qRsZGuexAtaFhX1ZMpsZ44xlqurJYurS2KKKA3GWbrOU04r6klF6SxaJcpCJLqb9Yau6TDaSWSKMlhlyUlztZSzIqVMCgnf9pA3lKbG94m+6SE7irKmrDPVNFPWieWVJwU7RgdomtUaP/SQxTO5Pe+U0kx2DEUlm+8cwgjMkbwjbTTzvT3atrvODgwhXlPgZEPmxDc20uEE2jUW6kbylEzKeB+IVnIwVVDUxjC/MWF9tSIrj3UQPZxfXpGNoZhOqWc1F4tTQhR/j9KyfdRaPNhmlB9upaLbDZweyZJ9N4zB6u76ftmZzSnLktl0ysH+Ln/4B3/IYrGgXV/x/NkDlssz2ZpG6H2k7yUHzOiME0YkyTS8ZDSni45lGxiSJiRPZaVs2ubNayOAMwlAHqM0Bo/5c9st8YVDIgaR9Wljmc3mWFegtaMoK7QrMLYgoej7gRgS9WRKXU0wxhK8SCR39+ZMqhpnrPjn/61fCsHnX783x4A2Z0WeLR5jhR8GQtx620bP8Bh78Gd/RgmTlgBhgTEF8e+qjMKQ4wTjIl/6qVd5+dVDHnx4wfd++5T53fs4O4EhUs9bPnrwDhdnC3JbMc23+Qtf/TrnFysuz5eEAW7euMtPf+VrXBxf8PDxj3lx9ZibLx/A9F2Onjzg/KhldVlx6/UNsdVMZgWzPUt2ntVRYPPCcPYk8vzRmsvLNe26R2dN8ImuaQTmkeXaG1fgCkPSkWoyodkk1ktPioblckXTNkDGuIIQLTEXWDehLGtKV7K8WpKiprAFlbWYsmHz7IrQB3QBxY6A8XzncYVluldz8+U9Hn73+CebFphzJnp5AJ3RBKtRWqZIIURCB9ManLVi0iSLIT4aMqMhlAxjwaO1EIEW7QU/PP8efV/gTctgICRHpTOFkXydvo/SvWsxZGqtiSkQUqIsMq4CyESfqRwYrSQ0N4DvNdMdg8mOUFQYo9BOcKZnFw2Xm4HZfErxym2UdcCEnfqA2tzjIh5zddVCvyGSeO31myzXR2xyIGvF6Xnmve834DL1vuZqkckeKZDGiYQZw1K1kolGzhKOZ/ToX8lyGJg6UhUFKchBqpUBHaQ5CTKZVslSFtIooBM+RGK6YnWVRsliZIiGdc6onYzThslcExRYrbBO06814YlmeTxgg5KwzKlITZJXtE1k6BM5bQ3O8pqGLB4dZYmIRNO6jE8DGmlmyZCUhE3npLEWXKnJ3qBNQd9vZCJj5B5yzuB0QdP3PDk+5kcPT/jh+89467V9ZpOCV+4c8J133qdSkbZTNBtH6OdEBbEU+U1dV2Pi+idbgJQSwUeIEWcUajyE5aWRMVVG7yuqNjOkFsdEvGVZI5w5mdrGGIhEYBCYhQpkHSDJy8hYKDQCBMkajXg8vJfvDW2wWLTpMJMWbRJ7PpFPIHcQkyPHTMqaMKL0jQanoSotIWQ2m0Sn8/UkVB5GTYyanP0odzCorAXxOzpgU0qslys2Ow3KCEI5hYwrx1DgcXItpCUpxHLammc/oSyh5N/tzHbI6/W1Xn0yqSmKgrZtSDlix6yQbhBa33aal6MmxkG8b0ZjTBgLBtjm3mXlxVyeBQDTtJ4YDNZtC4tINfWslpoE17l2q9WGZtMSI6OECapqRvDj/aqVvICtpShLXCH5Za++epv9+Yzl6oKms9y8/RKVq2mHJ/jNgLKj7zEGun4lnsLQEaIEWWtTYW1xbWpPo4SjLApu3rolwaHDML4Ux42UApU0EK+LFrYWfqVG+ZUhjZ+fMttzIBMzkA2lrfjsW5/h8eOP0Vjms32uVs9ZrTtpYsZCTKWADxHlLVo7nLPkLChfFTP9EAg5jNAAxd58jzc//TnO11fcf/lVCu3Y3d2jmu7x+ONHHD19hs6BO/fusEoNZqNRxpGyo+ugbyNNuyGmAeNKZjtTFmGDjpowDFyenfPs+XPe/PwvQtKcnZ7LZ+MUVht813J1fkYMA7v7ivXlktPjFxy9eC5nc6nI3hOCgAyKHUc9dcSoUW0kDrJhDymT8wA5jkW7JnpNDBpbKLJuGELm9OyUtu2YzkqK0vCjH71PGDx9v4EkXsKtAwIF1hnefPtNLpbnfPDRA5bPG4pqhy984au0m5Z33/8xy+UVBZ5EHj8HCRuNcUQuj3CJ0HsmlaWeFtiiEoO30ZAF9767P0MZgTKFQVG4ismkoFlHGdQZS11X4tdI6rop8n7g9ORUBkLjIZDGXJzrTCYtRXlSn4QKb8EvdV3hvadtGvn/NNcNmBrZ3DFESBltZEvQtwNlKUNDrTWbdUdV1JS1wZUShN4NnoQQVYu6pPWe117/PO/9yUM2qwZUlDgVJflCKssQ1doCYxx+lA63bScbJCLaOKx1bDYtM20gylBEI1lx3kfIGmuclB0qYdFCcbSZspaGXwPJD6wWJ8z3Zxwc3ibnE9ZLQfjXkwplM8aJrzLTXWdA5jDQdw1FacfneMRkZ/HJFrVsTFERpyuatpM7agyhTlkIrKQom7isyP24fXEOVwiOum1HVcIoddtmJxHH+yuDLUtCF+lVTyws5XQPN+monCL2gdWmZ92swGqCUUxqjbEZFTLWSKB1CHI+iFxcXzdSxpjxbJL3gu+FIquRAGAFQjsG2dTkyMOP3ufDDx8ym07puwXr1fEI7nCjT1aGBCkuUVRUrgLt6JOiiAnfe3pv8KpAzAhyhqYsQ6yUI5PJdJTWDXLGKtn463Eg6JwhRI8f5X05ZRnApkRWQpAtqlq8U7YgpkzbdqicGXoZSG2PaFc4SleQTJa/p2s/OfvH7ZkMLGVQtm2svB9GMvNwvTXzYcBaI+fL+AzGlEZZ4ChnHIEZeTvgGO97GVWP1owUcZXAtl48bfjgx+esTxPV7YhREcsAes3QXjLRBXv1AXvTW5TTCU1zgVaZvf19Xnr1PsN64J3vf5dnV++S9o4ZNCw2P2ZxdsXlcWR5ZXhlKLAmUO4I7KqYwx//05bUV5wfey4uVqxXLdNJjQ8B73tCbFGuvCb5Kq3xCZSB9WqbpeoZzIa+7YlJ8uG0sYSoIFi8V9dBylprVEj49YbNhcHNDCkwSmkTSkX6ocOMNXVC0w//DvQhowAAIABJREFU/r7mJ6S5gq7J1MZSFooULNZpNhoygqPVJuOKKSiIydMPrUxOokARUJF6pPepbOSCVz2Lq55uAyiNdgWTes7hPY0yA+tNx9B3tJuEKgXNarRQwIYWIgMugzaamBxkwZwrqzBV5uIyM71pKerMbj1hd/Yy5MA//j//MW20TOaHfOaNO1T3P08XCro+0rct50c7LIJl1WxYXHzAP/+Xp/zUT7/F7UPDwmauVgMfvZt47wfv8vKrsmWodhTtKsGgx0mzpihKfI5CGRr/0TZBFFO3BJkmnDVM5wY/aHxQgEVVPQMWOzHYoDl+OuBsyf59oYgxwKTKLFijQ0nuLMOiZvkksSmgPtTMblsOPm24cz/z6L2Ws6eJ2Bc4q/EZmiZQhIAuHaGF5XoYb1gRI2Qy1slGKqUsYAQ1jJ4Qkf0Jhl/AGZqEc5qUAiiPNhqXJ/g+0vcRoy11XWCdFnxvNFwurzhfLnjnw2OOXpxg1af59Mu3+dJXP8u3f/A+zigxFmfP5ckVgwKjr9jZmTCfT9iZz1gsepSy0iTknqHfoJUTaILJlKVDKZlsG6spZprDlwfONz0mFxhVoFWJ0rJRjVEaLJ8GrA1j4K3cZ1l72gS2MBhrmGhF6Bxp2CX7SNO3nOc1tZ5S6YStQE0Meaa5cagwlWf53HN5qUlKJoPKgmzLAso6qokl+Ew3eFBhLIaSNJFqaz6XA2cr5dge9BBJuWGxaNlsPmD/YE7Oka6TCXlZlhgjEoiu66TBQo9/vxSDs9kMEI3/ZtPy+htvUFQlbduOzdXkE0BDSPQhUtcFvuuJyVOYksl0n2a9EoO0lgmhMks5L5LGmELkDW3LprlCy0XgcrGkMBOWyxVFpZnsJAo7gNKkaFEYjNU8ffYYZ6ZMpgXWJbq+4dNvfppHD5+PxZgUfcEPtM2arBKm2OPO25/i44ffY9O0HBzc5Oe+8XUmRclv/MY/JKYTpnNYr07JGOa7NzBG0XZXeA9lVQGID4FMUTi894Jdns6YVBV63EZlLcWpcYKbT0mKpE/CLkVylbPkkFXTiuh7UpRnq2sUzlgZauiCvfIGv/iNX+QfvfgN9g72+bmf+zkef/wu33/vjwl9wreazSIzGIspCzljUybEcYKdRo/nYGnWGuOEuHjwyj3++i//R/zRd76Dnd7gW//Bz/ArteaHv/d7/IP/7n9H2ZbbhzeYVBOOzp9QFIF2s+HBh8949mjN4BuG6DEmonXm7p2XCMNzukaQzZv1Fe/84I/4zNtf5uJ4xeJsoCwnWG1IFoZNw9lmyYujCmse03ZLkvdU2vHWrVuoaeDs5JyrRYePma4LFM0UsIRgRjlwYugSQy+N+p07U4bQ4YMheM1qEUleKLLtZo1vIvg7rFZr2vUKcsQ5h6ZA60xKnhgg+EzfD/zU136eZyePODo74+jJBf/iN3+Hv/ZLv8Cn3v48t27/Ludnp1xtyWbajFK7MIIhJH8qxohWMJ1NuffSDW7dnvKb/+w77Mx2qcua/YN9Pv+lt/jRn/6AzWoBybIz26N0hrPNMYlM4UQ6d3XViC8LCd5UyuP9gLWfSAC99xjj0FoGfPK8GpTahqQyemoG9vb25Pd+GJvxT3yBzjn6vhfamZLBjfdC3uzGEFRnNTnW9H2kngb2D6bcf+UWf/K9j3DugMX5FRcXFxTzObV7jaLYx7gTlG4xTCEblIlyBo5b7xCk2QtethxCKLVjQ6nHINbIsl2SUsKWdiy8a3Zmu6isuTi7FNKrG6jqyGSe2DWR48fVeAZBPVW48oDb9+/Rh4HT81NsqTi8OeFqmQhe3g9ltSapS3wI9F58rLvTOcpGUuOFAIghxI6UphSlYboDdjKjaV9gjGxZYowsLs7l3B5rmcKVdF1HVVVoo8fw50+kZ2QIITKfz4k+0+WOXg/EYNjNhj0LE6vpleFqZTh46XVOjk9prk5YrJbs7E5ZrC7YbC6pCoOrCkxQaGtlqBU9g/dUVSGbdLUN7pVBgdKgdKbvO4pKmoQ/u/3c3gdd3/P+j9+XDUUKlIWjKqecnJ/KfTgO7LQWr+vgG6q9W1RFTbdWNNZiSomw8f2oGHEWZTQmC+U2+sx8vkuzaST/ywc5U8ezta4rrDUsl0uR8yWpO3KMLBZXFFVFPZ1RuBI9bpO23ricMpeXl3/O07W3u89m044erDgKXTTET3y0W3CFINQF/iF02YAf/DUuvqoKqQlDpB8GgXLAdRi1yEaTKIAQiJSQE8W3BQaVIaol1hb88bcf4DsgFdx9dZ/HP3zAfK64/1pJved429ymMDt86pUv8he+8g1SuWBQT9k9POTWrTt86lMv88533uf//q1/yPS1M17+kuOjj7/P3Vdrzj5ueP5BYDF03D6acfPOgmQirnDcuXPAhz94hvdCJjXjptUVFb1fCuDGaKxN+GDQKpNTL0qRyjF0Y53loW83WKvJY5aoNZqkDdZquqGjaTp808j5kyPtek2IPfNbt4hRqk6VxYJhbKDQUos2K8+jDxf/3r7mJ0IW+Gv/06/9veg8Z6c956eR+W5NSg19O6BIVLWsn20J/TDQdeJhMVqmBiobDKWkt0dNypJNox0UbkJKBS+/9DY/97Pf4D/5z/4K//F/+tdZLSqWm0iTVmSVaYZIHzxDGkjjtgBt0KVCl6MUR2dCGCfjOuOjppgYNn1D5zu0VWi74uNnj7DRcf/wNt/4hc/x/o83TIuSFD0vTk75+NGHnJwd0zeeqtLcuufofMd7f3TG2bOOfiUfZF1nhrVhdZXZNJ4uKnnhZ402UJQBZTxGW6zOOJOE+lc5YpBpR1Ebdm4o+tRjKkc5mVEXu3R+QcoRlQNWJWbFDlcnmcWpJw6wu+vwyjObWaZzz+xWx95nB6q9Kf2xpr/MrE8Dzz+KBODwYEZdWTZNi3KZPkcOXnbs3nZE3RGbjM5KkPCKMaxWi8wRBaqUw9dGjM4iYwoaqwvWC48fhP5W1g5bOEJSxOhGc/Y216gUrfi0lGI5acnjSj3WH7N303CxWrHsAtP5Ie1wwtWiQ6kSV5UwbamKmmk9wQ8Di4Ug2GX4I/JRbeShzckwDJHgM5OiwMQOhSZl8DmgygFXlATvCT6gsaQoEpsUBxQeV+hx+qvJWLKyGJNxRmRZyQthD2T13w4NbduTGsPaB5o+yiQ1RXADWQcqMwVfcvJ0wDhZ/3ufGIYIOEpbs1w2bJqOGDNZS1GxDT7USlOWBXVd4gqLs5bZbOdampNzGAsoxTB0dF1D33dYYwlBCH9CKUoyrUP0/dsXQF3X3Llz51rbvl6vUQqcK5jNZuzv77O/v8/Z2Rl9314HlX7r57/F3/pbfxtjHOt1Ayqz2XTyOeckE09dCDJei9TEGpHClqUbcfFWCoa9W1grmzZtYOg1fStbOmMsRhuZSFtF33d4H9jd3ePLX/oaJycX4hVQEe97+q6nbVu0UezOd3j442e888N38AH2929x9+7rDEPmX//+H3BxuQLl2Gw2QGaxWHJ1tQIy871E368JXgkJM6vxntPj1DZz/OL4GsyxbX6vEceC77wOI96GMaNECnl4uEdyCa8iMSRu3zig6VqSVmAtrff8/h98m7PTYzbrwNVK86Wf/hZ7h4aD/UNu3r7LvVde4bx9QCIQUk/EUzpHTgltpPkOPmGKTD2DaqJo2ob33nnKf/nf/w+88varmPqQaA/49Ofe4Gd+9qeZzWru3LvN7ft3+P57j2hXK4ahxYeWwXcMfsPh4S5vv/k5PvXaZzg7vuD09BFaS3QFyFn/J9/9AX/67vc4O35MWTA2yx1aWwo7ZW5Lmn7JS/df4bXXX+e1tz7F5/7Ct3j57heoDyy5bFlcXHLr7oz1ekPTyHawnkV0uWH/cEo9tfjY0raKYpJReiCGgeYKCn1A8garHdrAYnnEweGUqrZYq4lhu8mRTeje7pzPfPazPH30gquV5sGDI46OTmiWS1bdJdFGVv2arDK3d+Y048YwxfTnQkuB69/nmNid71O6mrbJPH92jLOanfkO00nFerXgyZOHkBJWK3IMnJ+fU1YORSKlMJIuGWXYUnIVpeG1V18btzvbyISMtY4UE24MqY4h4YdhnPLL5kEbzWq1ouu6a18ncL0V30qBQZQphXOgMs7Z0dNlqMqpTLSLgjt3XmG+c5t2o3n40XOWi0uGfk0MA+0GVheOzfqMmNYMYQO5JOaBmAYG7+m7yN7+jOBHVcw4+W+adpQiQ9O0IuuLmb7t6bqBru2JIUqwuCkhZjZXG8KQuP1azfyWRbnI2YkEl8aE+GuLPV6cLHj/4ftcnF8IVdhUTGcFFxcXXF2uWC06tIrsHSrCkAkqUt6J7OwfUs4MB7dL7t6bUpqaid1lWK/o1y2xSZxcXI6ERnU9ALPWXJPlrHWy5TBSnAcfRCa1acZAbH2tJNjd3SUq6KNcm5kZ+PyuY+fubSZ3X8Hu3uX//Vd/wJtvfY0bt25S1panz56hzHhfGwva0rUB7QpC7ghxgDTK/5RsoWKMEvBOvJYj5pyp6grn7DXsIQOD7/nUp9+Uxi94jo6eU9WO6aRm6AcuLy8/sZeOyNmc0pjr5dmZ7DKZ7uAVXF5esooZDyhnmNQV3TCQcqBwlr29XZbLNcvlis1mIwqVzHUg7xbGAYquE9msKyQ+YDbboSwr6klNXVdoZXj27Cmr1WrE6avtsvr6l1KKzXp9nZuVc6b3vQzGRpx6TIkwKku896MEUJpoPwhd1jkZuKQUKasapbS8b7WSTbmWAZuzlr7rRwy7bERzFPlITmmUm0chLzcBlSqKomaya0irgbwemM1r7rxxi6//lZ9i0r/M7Vd+lpuvvIbbXfHtP/r7cLgghA0njy/417/1lP/jH/wG88MVszsJO++5cQvi0T3e/cGKJ48XXDw756PfWYGr2D08YKeec/mB5wd/5NmZ1lgDIQxSR6WGvpOmX6mamDxWyZ8xOkrksitxVvIuQdM2PWVpKQpNUQqGP4wAH+8FIDKdTmg3HaJ2MVI/aOiuIjEZlHVY5/DDWLNpiW04O76A1b87RPgno7n69V//ex6F7xUpaVIaKIsJm5UnDImqFGylslsTLKQsUixXyMMZBnB6R0y4BZgSjIHT5xP+8i//ZX75b/wSX//WzzDZuceTZy2///vv8uLkMTFfQrKjuTQTI/g+43uFMwpSIgZZIJe1xVpZsZISKgX6laN0ivnMcuOg4Ks/e5fJ7A4qWNYXG87OA66c8tLtXfYPZrja8fz4hHboyQHikLk88xw9WpI3Gj2I7yiZgWQsQx/wIRGUkqk6gdJGCpdRdsyy0uKp6YJm0yTaQVa+1mic1WgCOVjaVSD1moP5IWGo2TRSpBaVwkw69m5lLJDWiu4EiskuahLwGbrB0q+nrF8YcjtycbNnoivWlz3tqqfvMkZXpOCZFFNUqslDwWwCPsDQaXJi1OZbMFHyS5Ap7tB71PjApzjKr1D4EDHOUFQFUSWUkWDdnC3WOCaTHYrSIXSdgSH0LJdLmrYn5YB1Cl06OiKBQE6BPCR++suf54MffUy76QlJY8s5lepRefT1GYEFhOClYc8RcqZwJQopzEuXmRSew2khPhzliTlCnGDSnJxGCYCSrCih/hmMdoRg0Fq8hjK2VZAtJhmcMjgt+s5m5XGqYmdSMZ/XFC5DoTGqxxpFXUyZ1Tfl+pUBW0cmleXseU/CENP4XAUJeIYCCdEDbUX2J1puec7athkLHGlYQeFcQfBSIDlboa1HqdF0qzRl5UDKKpwrcEVBjAnn3HX2hzGGGzdu0DSN+N2aDUUp2vPLy0ucE8nYFmW7JQYGH4CC27fu8fDjxxwdPceHRia1KZIYSHkgeEmVV1qTUmQYtr6Dgq4VieIbb7zKf/3f/B2evXhGYEE1a7BlT7eZYZ1MpYZhoJ4myAXVRDPbKagnNe/88Mds1j1x9G6knCmLCdY4og8sLs44Pz9h/7BkOpmwu3vIG6+/zcOHH/LBhx+wf7DHl770Rb74xZ/ivfd+zHQ65caNG9w4vMNqucKM5CdrNHv7c2IM1+GR1kiQpPej9yZEur7HOJmoM3oCttPobYG1zW1p244cImmA0Bq+/he/ijYy+eybDnKk7xtAsbe3y8H+lA8//AEf/ukDZpNbVPUhl8uOy7On3L9RcOvWnJ35jIuTgax68WkYi7EFXaMxBezM93njjTf5+l/8Bt/+V7/Dl372m1z1kfcfPePxkyve+87v8se/930+fP8RHz99xsmzq9FcrrHOMpnWpBTo2p5hSLQbz2rZQEqYcZOsVB7P556qkMGSNQ7cWCwo8WUGb0mx4Vf/1q/yS7/813jz7S/yv/0v/yvr1RNuHA7sTDMPHp+AsqyblmHwDEOkbWFvvwbTEJLIryYzRzlhBErMubF/h8XiVCRMymNtZH/vDsPQs7xs6FuJC7C2YNO0vP76p/irf/Wv8Xf/7n/LP/kn/5inTx9wdvoMP7RMipIYM59683PcvvcKhzdvM9EF0dS0TUfXNqPsPX4y1R63lVVRs9lsuLhYcLVYUdUFIfYEH9hsOq6uVuztzfjKVz7Da6/fYv+woJpm6qoGtEiDTOLeS/sk1UpREwJaWZGrxUTwXqbx1hDGbR1wvY3a0t+2Q4Gc8uj5SdegGbk/3fX3vf3z8rUU9+/duwa45CQb92HwzKZzFleXHB095ejoGeCZ1TXTSU3pKhkA9S1Nc8IwrIkxjL5NydLUGPb3bxBjS7tpsMZgjaNtWpF82U98ijlnJnXNtgxlLKoLWxB9ZOh62WSmTNcNrBeZzUVBtxRZncaTY8/QbCh0QaUjOmVyiuQUWS16VLJYDUp1AunpBNZjsqIYDOfPVzBkQjfQND2LZaQZPK+8fZfDlw8xk5LaTlivN0hGpR0L73QNERHJZWJLi9tea+AairD9mfu+F5vAEFExMa8mBOPoKAnZ4VzJ26+9xs9+66exhUToPHj4iNl8F6MNCi31k5c8uJTlfW61E0pdin+GTgmHB4eURUHwQUApxrC3tyfS5xAwWpqEw8NDlssrLs4vxm1Mou86Uk4URYmx4m3WRmO03Mc5b3M4oes87TDQxyjeYyV5F4aM0hJQHWOg6/oxo1AjgjF1HQ5srUFpAYi0XUfXdXAdJ2CYTqe044ZNa027WdN3HVVZUNc109mUg4MDZtOpgEm0HcEeUkulUcJprR6jQ2RAmZJIB7fPTU7i/VVK4G5b2aixFojs7OwxmcwoylJCsEcYh0K2k9vhhmyli1GiqFEatE6irokFhZnASOMNCagiXTNw/849vvG1v8jf+c//Rz5//01a9Yjj1Xd4/+N/xbf/ryO+8JfmvPMHD/ndf/oeP/jjx1yujrEWbr5S8dKbO+Srmt/++2c8eXJK3/dUruTmnR3WfcvyvOL0cc3JI8Py0ZKsWkBjVD36ifsxk9OMfbRAOFIU36HSRijaUWFdQV1NRmmkxzojQBKlyMoQklw3Zw2FK1gtNqg8qpNSomsb+mErJTSQNaGXzF3nZJtO9PjFv7u5+smQBSIGX3SGnGg2EaN7fCfNzoZMUUS0l4laiorgxQdVTwwxZnyM+KhwxZzAlRzo2qCLzGZY8NGjjzg6PaPrIrrQHJ0esdqsUFbWhTJmU6OnRDxd/ZBl3T6amETTK9rhwhpUFVleZlIPfhO4PF7y+OEClSdUlaM3mvPjhtfeusHhjX1SCjw/PuP08oJm2ZAGCeNVbvRF1RAawacbB36QMMkYMyFk8UPFTFXVzCYlZmI4XV2gjSZ5JQjzpHBYtBGPSfAZ30NZyrUNYeCqWRLzmHWUMz4mrAXKQLErOTh+CatnPTulwk4lwHb5PDIcG4wCW1hUyATfQfQMnUgQtUoYwBEYLi3LQeNKi9lN0ITRF6ZAWZQaIDuh1aSEsUiIaxCkNIh8sJqOmlijiFHhA0IZzGNmRkZ04kHkKzEHoveorLbDLLIqsFEyzULouVLndOEW090SvxhQMVBHCSYGj9XgLAwpEYI0DbJlExkmSjTqhXNY62lWiqA1ODAm0LeZ5APKallL50RMQUh6iBdKa/kM1NZ8hEKZVszH6HGqFyicJnaetpGJsC4N++UcN++JOZGw7Ozc587sPlfrIzZ+QTUJWFvKtHk04KoMMWesEhdvFMsRZjQYpyzXMCYPCOGp60S7LE2WhOL2fc90bqhrS9t6QpDrYo0jIqnvKUcKV8hLYJQteS943Ws60vjCH3wv0uCu4/Ly8s9N6MqiJMfEyfEZ3/nOd3hx/GwMw41UlSIHP04Ttxs+yXaq64rZ7IDNpqNrIkVhefXV+/zt//Bv8OL4MW08R9cb6p1BDs3Ck4Jovnenjp3dguNnAWPEB9J3a5qmRatKrqVSOFuMLzjZ5lmdMRgm1YSu9Ry/OOXBRw/YtEsObh4wn1e4KrFcr1FacePmDe7ff4n5zgEPH33AZKaF3JfSGDSZR/lXHBHRmsF7oS0ia4U0SoS3L9/tLz3SpWIUzHKMAZI8xzlnnj59QdNucIXl3kv3+dIXvspv/Yt/RtcEJrMZr7/+OjcO55xdnrBYbnhx+pyjZ0dMq102m4YiRnZ3D/nVv/lN3nnvuxyfnBCjkFWVamjXioO9CaEv+fjBCUfPH/Hd7/0eR8cLHn7wlD27w5M//T2ePvyQTb8iVRGlpUkb33vEIJNZ7wOr9QXBNxgZs+LHTag1xbihi0zqmrKYif5er2jXUoApEwkMvPnW51FlxWK9xmjLr/zKL3BwE5brM04er4m+IoTIwcEcsqbvE+dnK9qmQFkLOOrKMNlJdJ1nPp/gdEnX9Fib0K5iGBI5Gz79qS+Cbvjwgw9ZXa0xyhFTRBvNej3w9Okl7/zwEV0/EEI3bscNysHQtywvFnz6zc/y2c99kd/88Uecnh7RdxuU4hqdDZ80NdJIM06vS27c2sOYzNHzFm0T1dSytzcDPXD/pbtYbbg4PyfhOTvprvPVUIquH9BKJIJGK7S21FXN1dWCGORr7ezMBKjRdCKjH/0n/3aw8Fb6tT3ftNluW+O4fRVJuDQ0I2lNKXZ35jhr8UM/nuuClx58jx+n+1VhhR4aJINMpUDbHpGS0NE0Bp0zoY+oDEXhmNY1pxfnTGohk24ljEopog9bm/L1eSdTfwtGrvt6LVtnSbnPpAzDRr5vpTPOlEQtTaHWks+kVWRoBcaklWzWvR/wEfm+SRgb2dkDV0BhSybFAVdXnuXqCr+UaJW+C4RoWFx27O476hmsjv24/ctCk1OfbLByTsjSQ41nwNZbq0Z/3YSiKABomobVakVpFBrxsEYKznrQfsFtW/Dq/Xtslhs6PxAigBlpzXlsggGVBZQTM6Pej4zIQ7XW17AjPW5f5PoL6CKGOEJT0hioC4UruDg/J3i5Z7bvqpgVZKFFg2yQJJJC2iLxS2X62DOoRFL5E+mgFD6ycVMjJGwMDbbWfvI+VprCWUHWB/HbbYETu7u7FGVxnXm4lQmGKF4tlTOu0IQ4YJPB2voThQFqjBiRhsiTSSmMpMpPvLLWmhGw4f5csZyTbK+liZRBADmP/raIsRljHZPJjBhXoyQ/Q1bjtjmO8kB5Z6cwKiGMRWtR5aQYxc9ExMaAamE2m/KFL32On//5X6TS93jGEceLpzz804948qNjzh6XLJ4Ejh5seP7kEkXPZt1j3S47s12K5Pg3v/WAx0/XhGyoyh1MXWLnFSkoFs8DC5YEH/Cxg8EISCzHa+9Y1nF81gwma0IYxs2rFW+4DxgdSSEQlNwHdV1CliUJOjP4wHRnTk6R6D2r1QbGqJitty76EW5SgnVZ8jlVge8GtFa4QmNTAfy7jVc/Ec0VZLTdGsdlErledrKaR+G7RExqDK0djZ1BDg8bpPPPKhFzYFbNSEMjRaJW1PPIi7OPODp5SgoFZVmxd2Of8+UxzdBQKaTCHCkqefvgqS2lS6OdJg0R7xNKZ6EDOY3SQj6KA7TrwNCt6L//gt3dOX3npKBXJfu7E27cPGSzbgmD4nyxgK5BZ7ClxlRQ7ipKItkmQqNwE+iW4oNRSlNYRC7WaawqqcsdJjslZ8vVKNO4tsuglRHAahbFmB/ET2YLC2iavmFIRqiDyDYpRY3PCTfhms63fu4p9rRMwQF6j0kGW0vAbUqGkBu0EW1qjolMj9MOUs/Qa/zgsMdweChmQxKAknPXgO8twUvIq9D2PD6PoY+jLrssBIW+/ZoxKhJSnKccBOuL/4SUkzIGIUoajRgSMxifiJuMzwMrf8XHj1/gKsNkoslhoKIjbu9HBcrIZnBQoIwFNb7AkE2BGPtkerRpMhQGa8GaRJ8TXeuppgXGifTPey9SJiUG8MI4Yvok2FIomF4OwyzSMHSkqAzNJrG58mRv2L9VMLWOnWnBkHvWQ6Isp9y/fx8eO7pz2HRnuLIghE7oeUpm0wKUSGzFOCmN12hsdELw4/cHMcoLZbXu8F7M8SlLQz1TNVW9xdaGUY7iUAopfoKnGidH22DRYRgE8RzC9TYrJo/3nsKVeD9weXlO2/Y455jUNdoYyrKm63ve+9G7gAek2VAqorXgvkkG8kCK0nhYY9nb2yP4Jc16yY3DfT77uc/wi3/pl/j1X/81FusjimmHrSQzr5oGfFswnVTcuFWRVEOzcuQUSNHTtmJEz0nM/lqPz5nOWCch1oWRSIhpvQPJM/Q97733Djfv7PKFL3wF7Qba7oIPP3qELhKzvZL53pS6qhi8p0wOspZp+NiAbv1wPkTKQoqDxPjyHf0r24DLrTzMjBP5IQ4SxDo2XuG6tsp8+MEjDm9OqKZT7t69z8998xf40Yd/ytGTI4qyYr53wK/+yt/k8dFH/PP/5zdZPDxhGJbUO3POLzbYdmBnZvjKV78MxrP8N7/P1dUKYzzWebpeMbSZxXnD1eUTdg8KHnzwPR4/eMazD55xd7rPg3f/hKZfMRAIbZbNQRbqWkqJoR8J2E74AAAgAElEQVTED6ENKfZ0Q0ddWFI2DDFgjcVphyaAUpTlhN2dA+rZDl6tuTxdS5OuZLN179VPM0TF6cUl03rCX/+bv0JIx/zO7/4hp+fPMLomp57ZbAdrHE3jOT25pG0CxsnkvqoKcr6kbxN1IYXkMDSUVUEKmpggR810uk9Rzbl3v2M1u2LoPMfHp7jCsm7X/PiDH+M9NG0v6gIt2/qQIiEFTp4fcX50ziuvvcWT5885efFY6GJKEcafHT6RVBljSDlKTtKkZHdvBsozXVagRHUx35szDBJ0bbWlbj1VNUWpfpxoK3LSrK4GymJK4TRBZWKQIiyEODbQmrKs2NkpiPGCtmmvyWR2lCzmlEjjVu0aTz3S10KK0miqfL292v53gK7rmUwkz6nrDOv1GnKg67sxP8+Ss8h3YsrEfpBhQw4EZGC3fQ+qDGGQMPLSyQBGK810NiOFNMoVGVUPWymtoigq8TKOz5e1jhCCIOtTEsiCteJfiXaUQAZMOUGpjDKy7airgtC3+MHgtEEXmqIsSNmzWm4IXiA8dWWZ7XqBYlgjcrYdw+JHC/ouoo0lBU+OmfPjJSkUHN7SNE0Lo1QzxtHrNo7spKDeyr63J74U9hJGW7G7Ox99c4rF4oqYJe5BWc0QM01MGDUQfEtB4MPjIz788BHRd6zXLdYYSGPDn/PoHR7DgSnHoaR4beu6GgcAQjjth37c2qRx8wLr1Wr0ZYkHrywKlldXbEOOlRqbcSMEvexF/ubKihxFTbD9MxklHlMyWA1WY5UhB1Ee5NHTv33/bs/Osd9DjYoN54Qwm1IixSRExt3da/R63/ecn5+Pn0EgDINsSqyl7zuUhsIXsp3DXg9DJIy4IxkNW29P2m6kZdBnR0XA9dAiJVLMlGVBivFahr+FUrVdR0zybphOZzRNi/fD9ZZbKy1gnCz5VllZ8lhnGVtQFAY/ZHwQlLtVYMn4PrF7a8ad+/c4uHGbP/z2e7z7/I/5kx+9z6P3Tjn/EfTB8fh7HSePAl2T2N3vWV30VNOCHAxnHw9897dPGIxjbz7H2YqQNENOpL6ga3v6YUMkSOZrcKQcSbmHLAOBlBOYNJJyx62IGn+fZUubXBw3t7Jpt7MJIQViCpLJ2Xr29xxZadrB0zY91hbyXOdxx5IMGEbAjgyHtLL0nTz7BEMKjp/45kprqGcj1SZpsokMg0cpOxrXxm5aeUF3KygrqKeglWQUWKcgLdG6YGIrgtL43DK70bNqn5AGmM/2+MpXvs4P3z2mG5ZEBrKRQNOYI0qN3bHSKJsoJxnlEiEakeqphLHhGriAyugi0EcLEaaznqMnJxw/O8c3mom7wTe//jN85u2XuX3rLt2uZrG26H/5TzFzULEkZ0XrW/oV6AnoOlOVYKfQrhM0cLBXcueVCSEoHn/U0IRAFQd2yh0qs8tqfYlOkYmVTRWxx5gS5wqsK7BlIqSe+e6Mup6QOsWTk6ekZKhKy7RSJB9JPuBVwlnL9LAgXVWsH7fEPjK5rbj3ecfQd3SXmc1xJq00dTGh6ytU7iWYVAdgyiYu0M6jFJy/CBy+6TFGfGtagTYeraFZSROmtcW5kqHvx82hTA6MdqRkSFETUGRtpOE0VnobpRjCgNEJhZYwPGMAP0qVNMZqdI4My571aSB4ja0KLh79iNmBYzIX4lfqE1Y5lNbELBuDUaBMUUiRMPiAMZaYFU0XaJoBdjyzPUM2Gu0y2sHOrqUfrDzQQTC6MVrxbbkBWySKCqLX9K1sSwX3W4167ghGoY2SKe3BhE2vGXpF7S358gFR7VKVmtom+uVD2nzApLhF6RPHH1ygqgHjYMvEyVm2VCLR0ZTGMowys/+PuTf7kSy90/OebztbRGTkWntV7yub3S1yRHJWaoazaMQLw4LsO8PQlW8ECAZ8YwMDj2XAf4RgXwzgsYABBMiSbWAwI1Aih8uwySY5Pc3uruqu6qUyqyq3yNjO9m2++E5kU4A910wgb7oamRkR53znt7zv827kDIiIFhJtEsI3RMdolLNctBRFlSSWKnIxWxFDGCg8QEghwM4lso51iUKU5yWQPqdNPkcYNPFKSZx3rNfnZIbLfJGuay79XJPxhP29A3SmODl+DCH5GVZ1TYiSPCuwXaTte7reUpYZQnh6u2J2kcIWTbHi+Zde4aWXXmQ1N/zNTx4gtztkltMsc9pVz5WrAiVG7O0ecOvmAX/9w7/m9S9+hUeHZzw6eoTt0sGcyErpnOraiJSG7Z0KLQX1ouN81vL8C9f4e2/eomnW/Ot/8+8QxZf4Z//8X3B+/ph/++/+d+7d/RZb12BtH3Lvk2POT3p0pmkbkDHJT3qfHsR5nlGUJVlRMp/Pme5ss16viDGkPJuuSz6+kD5gCRRZhs4yYmTIvEqFVec9QkQyI1ktGr7y1a9hcQidcbJc8o//yT/l29/6cw4fHvHW2+/w3/53/wPHsx4VNFd2x7zx6gt89/vv45XGB8vRkyf83//Pf+C/+q//MQ/uP2S5eI+uW1GNR0hpefLkE3q74ku/8gV+9Uu/TswtB198ni++eIfMrvn0/reZXM25WGoeH3WpcZHJbL2ZxgYPZTElywRaO0JskVIhVcJmp3DqFPoIhu2dXV597cuYfMzDB4fM5guIGSquWKyXPFu9wNO3n2Z3/4BHT1acH3mOj1JhcnDDMD/vOT9JAIrghxBRafFeIEWB7Qz3762RUvP4kzmZWXNwdYet8YSPHzzBxhaT9/yH//iX9K3mD7/5m7z+hS0eHR3x8YPHTPdytnc9UX7M9370UyIV/fCakJp+KIQ+ePc97t875M/+7N8wO7nH3o5EaknvIRuKOtv1l811HAxSRZnTWcfdex+zf5Bx4/YVZuer9H0meOmFl5lfBLJ8hQtrvNP4IIYCEuq6JXjDqNhOmTnNiqZd0bZtus+lxLrAcrnizu2nmOslverT+xXdMPSMn0MhvMdojdjQzpQiN2bYRMTLbdelzyZG5vPF0GBVCAyCHC0i3nXkhUEbDVGDS0CZENO54UNHkVeEIIGkPvF4tMwpCwNETk4ecefOnUQE7ZZ07SZjaDD7kxrWi8XiUhJWFAVZZug6SZQas2kCm35AzLvkzQngm47x9oit3RHSSBaLFctaUhQ5RZFIlFvbOXsHOR8/OOTsbIHrI3vXd1mtjpk99Pi2ZXfvjOs3rhCRJCKRQRuPzizRw/zUsb5IOP6EWE+1VCRyCYsYNsCpiO+G8zcjywpi3MjEFFVVAZHT09O0QREBESUyLDFRMNneofeBH//kJ3x0+JhPPlugTY9UPdvbWxANtl8Ro8MMOU4Mf48Um/yogcS8yZAapIqbv1qQJOobtLjWmtF4RJ7ll+S+VCsm6WEMDjFQBWOEbsh3ihu6LBGtNtusgDIaqTSaDIHHhw6XsMWDsm4g6vnPB1RSqMvfneBMKdQ+DpTODexpAwsJwQ3Kp7SBUwN9sWlq6qahb3uKvGQ63aGqKowxrFfzYYsrSGCYXyiMY/o88zwnzxOUpKkburZnd3cP27d0XZtiFIbn+3K5xPslWiueevoOmyDkGMXgM/IDECzVWD56lEmfTV4UjMoxs36GEEkRgVRYoVmaFZXR/PRnDzj85E/5i+9+h65fEOqavCiY7O1hguX7/3bGRZPyIH0dyPIcr3ve/t5HPHlwwnIu2X96j+tP7YCQnDxZ8eTDI6Lz+EHKOSoVfrxNu2gJ/nMauPegyIikAZuPDUpXw2AoeSQFCQillMQYzfbWFq53mDwjREnXLone06zqFB8xKE0UOR0d3vXpHgikyIzYpyZXGpzvCJ2ls2CVRKj87+xrfjlChMcqXv1CytpoO09TA0INiMzUzWcZ5GWitnifEMXbuxCcQkaJlpKMHIUhSJcCdlVDMUrmbomBUNF325ycPk4BdzEgYTAcAzHRU8pcEYNlPDKJnCc81pF8QIIksxKBooK+VVyc5EgpuHanS4nf/irX9q9y58Ydnrrz97i4KHl4/CFnyxVns5rvf+f/ophenj84gLwkM23K8iIteLSqEKtAJgrG4ynXb15n1s2wQdD3jqaek6mObiVpQ4+lR0qN1D1dk6aIk0nB1f1d7n9wzLiasj0ZMSkjn60+Y1F7lIJMCYI16KJH2IL+QlIfQ+Nhu8yxLkLm2brVs/1ch6QgLiZ0jzM++dkTRpOAc1nankkPOqONiXJohEQ4zd4LC8rrLskJ0bjOsTg3hJjwrEpLpK4JwRJ8QqMiBMZMCJREoVOGTNekiZLOEAqitPg+Q6ow+MDS9dx0J4CgyEuKfITrPbJdDQSwJN/TYklzkQ/bHI9WnqAlfTdkcGhQRhBV2j4plVCofR8xuse6RF+UoqMooOvAmJwyL7GtpfFlIrQFO0yVE5a+rBQmi3jXoHVO3weCT546rRIVMTU6GSGURNlhDCxnhuY8Y8uXvPC1XU7qmhvVFq+UU/723/857XWFLQuWtebRp4HZYkGZl4m4iEQJPdDnNDF6nB+KoiAGf0LEGJWKlCLHDiGhV6/eRKucR4eP8L6lrCRExXK1HMp2RfQmTZXN58Qt5xzrVdKhb4qng4ODS9lfJA6GXMPFbD7QgcTllM5ZS1mWTKdThHRoqfB9pO96WrtMxVvQ5IWmLBQCQVa11LVnvY7U64jt4cqNbb721V9nb+c6P/nRzzk8/imT3RwfktTk5s0dHn12Rm9rhFCMRzv8yldf5dMHcx5+fMLiYk1ZZZSjiifHT+htkq9JUUFQSJkaTCUlddfzR3/031PmI37+7nv8y//1X/Jrv/e7vPzS15mt7vLg4b8nipajTy4YVxUiKtYXltmJI8sMfd8NJDbNZDK5NFFLrdk72Of07JiurhECRmWOlpLZ+XK4Xj/3sGwwvJtJLoAajMyeFMMwqXaZjAukFJwcz3nqzhvM10e09QzX9qjiJtsjzXQcEsVVGrJ8i88+fYD1NTJTjMbX+K3f/Dpts+LR0afcv3+X45NDymKXuraU1YTnnn8JnY/5+O4nTKa73Lx9jRduV7zzs3/NUWNYdhrbQbWC8/5kmCBKpMyQUbK7u4/Wiq5rOTp8xBdee4V137GYz1nOFuTFFgGB9y3BB2IoufPUmwT3MYvlGatmTV4qmnPP61/6A5579Q1uPX+LneuRH739Y9ZnF5wfPeTuuz/klVdfZHZiOTues1wumEwVnY14ryiqnOlOQd2u2d894NrVq/Sd5cdv/ZSqKtNWR4PSgqaxqbJ0BTFIilzzj775dd794GecnB7Ru5rt7RHdUvPkZE3XJ5nc1ngERtItVwMSWSO1pluLJJURCRV9cHDA4cOjVOSp5DHMjRq2PwofDNavKU2Ojz5lQAbF7VvP8bWvvcrWpMC5mmX9iPsfnvPhB49pW8fBlX1+5xu/xjvv/C2PHx1zcbGia9OW2GTp+kob1SRXz/M8SbUGLwwx3bd+kC6VVT4AYJLEDrg8CwTiciO0uea1TnAia9ME/+adHe48tcdff/c+UhjyAiKO9brGWTCZQkg/qBM0zgqkygb1lyd4y6gc48PghdWCW9dvsV6vaZuWvuuG8OSADym4O8sLLhZrQgxDYHcarBVlRpbnZEVBliffjLUB7+c06wWr+ZrVWUY1Khht5QilWK8cdXtCngvK0S5buzs8+7LkfHHC7MzTNh7bWS4eWf7+r76JUp7l8oKjo8dMxlPm5yvausf2nuAhL0n5a0IjYoYWkqbpNvyDyy3mME9j47mKceOhLSiKEik19brFuR4/kEW7riWSyLWZUYxHFYu+RTcxZT6qgFMB1bsB567BS/rO8Nprz4PwnJ2f8ejolKIcNpIkYEaWpc3NJlNrk9m0aa6VUpdKFe8cZVlycHDAer1mtVpd0vGSFzdJ65KKNW1HrU/PNSmTZ8b7NJyPQ+Osi5yyqHB9klWjJPN6hXLDQD29e+nnihRyTIT1qk45i1IOG7/UiLhhU7uRuysxQDt0kvP3fY8PyXMcEfgYic5ferkE6Tk3GhWXz0rvh21SDAPsJf380XiMlArnYsrVso7pdEqe5XhnabuaxWJOjIEsLweZrmXv4GCQEKdmsGmayy2XEGkDJyiwtiF5rDPKYouymLBanmNdalaFklRCEhC01tP5wQOvFCpEgpQEo5jk0AeNl26QE0KdF2QTjbvo6M5bfBl49Qs3uP3U08xmnp/95DPq84fsXdM4BzjNWI/xQnI2OyWKMEBOoOs9ShtMnlFUGeXIMJvP6euWYD1S6iFSguEcSRvbtutQ2iBExPuUVdnW7XAGpTozzxJ+P3mwVIqOcIroWiTpHLDBIYNKZ42KyCKnP23/f0OEfyk2V5GY9KxOEINJ2ynphrVcwjjE6HE9oCKIQIjQtYpMa7QK6GhpV5HYOWwRscITPdweRaqRpOk9y+WK5aJH5R3bphwwlI7V2iZ1F0lbZ0PKtahbSx7lkInj8SLlbyWtq8B2mqKIZDqgZcb2eI/d6znhbMqv/eqv88zTr/J//uldbr3YsAzHHJ59wmcPHrG1pehVSBuNIMiFxPsuvbSNxEd4ettTlgFjAhB456+XZNOKcqrIS0Ux3qN3PWEUcfYCb1u873Ftwisnv0LLcn3CwbWIbRcsuyWNERSTJBcJrUX2EZNPqb1DqoAZCbavFvzasy9TtzWffHjI+dkZ6089052SrrSIrCbuecY3JPW5GqTEid6Ypw08BI+PFqU9o70Jzq5Y1+B7iZEVx586dq5AMYnJF+HdoLtPSPyAJgiFysUwFXVIE1EDCYaYtlVSpbwjKfUwLQJckl9okxOVonOWQgogHXq2T1OwUFisEwmjHhWxT+GMSqXCP5AyzZzzSO3Jc0WeG/Io0CqgiENmVGqGoheEpkUJRZQtQrhhmxewLmCynI15vCiywRsQiDKCdPgYcQFyk+H7wPy0JrYlhXKE3qP7ll40HH0q6EvBEjj1iv0rFau9PR7ayFpZimmLPwNVarzzOJvMoFlmSNK6FNZLTGbXJOdJhvSyylBakqJn0rVXjUoQKQne25JylGG6Euc2EgyfQnZ1kqeGID9/8AzkKjHIbUajMW3bDeGLgswY6nV7ObUzxlxOBp1zLBZzqlHy1oigMarAyIxlN8PaliIbU2QFWdZgQ8Q1Ob6L5EXkK197men0GocPn/D+zz9iPM6Z7uTE6MA3iOBYnozp156srJgMEl7fbfH++z/n9o2rvPLKC/QdvPH6V/nLb/0FDw/vUzdzhGyRKqMsN8Gcmn7pWa49P//5O7z33jvcevoZvGj4wTv/B9loRrm3oF0oohDMLmoIIoVRF4KuaSnLCft7W5RVztHRZ0P+CmiTrm3n/QBMCdSNHxrxAZfvB/mnZChaIgqShE4rUAapBLlO+vQQljS2SZQvE3jw8Tso5ZPvoxLUy0eInRybK1ovWZ5Jru7njLcmXCw6us6xvV9wdnZBmWu0zjGmQMspREmWKbyreXD/PYJU+K6nqY84e/we994tKUcR5AoTNdYXzPsO7/KE2t1MvJUiy9Ugl1QolVOOSrqQMpe8t3i/ACPITJLSdl3Nk9MfI2w/FM6e4DKKbcP9+29zeHiP8Vs7PPPyba7t73JxPGd1/ITtfRiNKmbHMyIJhLO3f8DxyQxrHW3dI5DsXbvKwZU9hMxp2p6Ipu8TfdPZpNkPXqMLiPQ451m2kbff+jlnsznWJwlOXwesXZMbAVGClOxtT9DbBY8/6elri4oVdddiVaJ8yShxzrJYLC6L0o1vSGs1+EEDuQ6YqBPMgICSkUDg8ePPKKs3CLHh6NEnCULRRiaTbZ566gpf/vKXyXI4PDxhuVinxl6ERN+VQyi6SHJzrVM4eJoS59T1imtXryBlkqs9fvwoScYvQ8OTJOkXwRYbT+BG8pXkWWLIYhTMzxvu1ke0bUOWtQibXnNVjmhlR9t3BAdKSKpKcfX6NrP5gr4PyKgZjTJu3Trg9HTOfLnCWsv5WQoM9gNpUzBECmhDWVVU1ZjZfEVWFIO3UZDlGfsH+9Rtg4+BzlqEjDR1x/VnJlzJM3yfMTEl7/z1CbZJpMNcB8qtPaQQuBBZzmc8uOepJjmTsWB3T6Bk5L3VEy7OzyEmcmHXKJ5/cZcbtzPqlWNx7jl50iBlR/ASH2TCTdNeAgsufbvi87oqDl4cBt9Tgl0EUlh8z0ZEGGNSL8SBaBd9pO46DBGrLF5CFJIsCFSmAQ3BkJeal196AZNLlsuarutS1iHgnRi2mGlwl8J3N8NUjXcJVLIBcVgfuH79Kl3T0rUtDw8PiSFgspT7J4acrkTITa8rwUZScH3Y+CKG7V3SeDHYIzy+t3jvCDK9Ty4GMqmH7LhNY/q5dzXFYWS/sP1hACWloGMl1eUgS6rkeUj+NwFRDVLTwRMrxdBYpgM9RbJEoLj0J4YQmUy2WCwXw+9Lm90UtTHMjWPacM3nS8q8u5QimiGSw9pE0jZG0vfdMKzcfKfFRPKYyZT3JBIlL/iAxSPFCkHAb2SQw/rTaoH1liAkWueI4Gk9yBgR0SO7QCOT+mJUFeQFeJVySne3W9aix7nAqCpoRMPh7EO6Vc8oX2IzTRsE0UmEhYVd0imBMAYRU8i9NqC8RJoemVlE1rFutsiKEq0kvre43g21lbiU+obomWyNaNsOO0SWCBUGv+7gYdeavm2oRqM0rCHQ2XDJAUgevPQZC6cSfyxG7KXO/v/765ekuYLeJ8y5tyJ5qTanxcYrElP3rwY/lCJJqgIhodOHn9WvPLYPeB1BaGZnMN7KMNqhdQJlKBXITSTP9bDaTRkKzqVNmQuRXIvhhkjfQqVAQyGH3xQAJNFbqiInU1tkYsrTV29z87UXeObZLyDjNt3qLtef3uXucWS9WLJanTGtUogrcnPsJVJL2NwEApQnYbLzwGhLM8kqPn53RiUCOzs5kzLDOYEpRxSFQLce0Vi6rsZ5Bo11uoGXy55RpdCFw8dAayOxLTEiQxuDUMncqXxJUbWQDe+pdQkcIdINGVzg4jAwuibRo4iqLOMbEaKhXUeCExASGlv4ZLgURKQOrJeOvjG4ThG8Trk1QaGlQ4khfTwm/LocQlvTUzwQfZsCJn3EiNRUhSiJCEQQKevHt0MGkEIJSZaNU/OATqZe2+FjKjB89IlqFBWImKZKiEE/nx4AAYENyciolBhMoOCFIFcO18jLg0egiR6kTtrfKARRakKwg/lSIggEG5DCUGQ5eRHpfZ82dgKQHqEDoZc4Ak54gpJIlWFrgxWbEMxAiB2L4xnV3jboSJcFyoPbLIWhlpZe13jTUZYlUvoUVjno7K3tk3F+wO8KEYbXIIgoUGIYIKRLP/rIcr7Cu4TZjjFN0ASBIs9po8eGDVJ3I+1LF/WGfiQ3+ui40bAbmqbFOU8mM5qmZZOpFQZjtDEZNiaTvHMubTMJEFKY797VA1YfXyB8Ami0raWzPSiNjwqTB/JRIMsjbd1x/OSEi9k5V6/u0bqeLLNUZUBnEt/15LmmyHPKrMRIw2x2xtZkwp2nnuLmjWscHZ7TtA1N3WB7n9LtAaHSg88HQDrKQvDjt9/i8OER5xfn3Lh1E+8d5+dHTPGMRyWnh0t8K/F9yt1RIqKSOfCyYNYy0RxjAD/IRjJlKE2JbdpUJJBImilDSQ6my4AcyIHBJ8O9URlSeaQJmEyRZxrXpWGGj3GQFUmafkmeZeiYCpkiN3R2TR52mEx2uba/z/x8TVFWMDc065blxZq7H9xjMi4IvsMMIaxxkFmHEGiaJZ4kNw1u2G7g6PCUlUYGQeg7rO8RUaMGmIuIAusiz7/wEkTN/GLF7t4B52fnuGF6nTIIPYUyGL05TT3OLRFRIbVEiUTMzHNBvz6F0FCVktMnmut7B9y8fY1q0vLo5JjTx3P6fk1exBRUHQI3rl9juWyo1w19Z+lqy+JiQYwrVssmnQ/RDZLyJCuPIpJrQ5SS3emY7e0tPvv0ET5Y8kKhpWK17Oidx5gcqQc/XfRJgkoypYY4hIsOU30RU+Bu8iENPmSRqInagHPp7FcqncfVqCAvckL0HB+f0/crnjw+IUbL0eEF3keeefpFrt/oycyI4AXn52vWqxrvXJr+ZhLrPoempHc+nQERR4wpi9DkOa+9/hqL+Zzj4xNG4xGnZ8comeRV3VDsbULkk08qXEYnRCEQRlJOxqzmK1xv8ZlEq4ydnQnz+QVSGTaZWFJCkeWowpCy9BwHV7YwRWB21tDVki9/+cu07ZLTswXOp3ul7VPeW3IcQ0zp9AleYQRZocnzkmpaYd1GmiTSszCk681ZR6TD2556HhHbgnJcMp4qPH6QkSaFi1CGGNIU3VvL4tjhOsONZw4oJwLrlly/PUHJBACw3jHdy8jLjqJUOJuK7DxLQbV2CDclelwI5CZHDOdtCA6hkrxbDNWVFKl2Sj6skDw4QQyo+yHYdyDWCjFM82HYyMSUVbXx9iFBikuQhVKauq1pL2raJt0fUjJskH8BbjIU6gp5+Sxa9/0v+N08LoKUaXvZiZ6u68mGQRtxgF6EMMAYNiAShm3bgPkXYgip3rz6z69YmXC+6RnjAzIm/+ovfgmR4FqQzsRslGOHrXKqHgJ5meInNgAKJWXacOo0UHTWD7VBem0iDvJlkbZtm7oBBo8z4Lwd/t7URYWQ0O9CSLxNQdZiwOZv8hXbAa6QIDApA9QP7JhApGnqIWPL8J8q1Ia6Ngpi9IO0NT3AvXd4nzyMG7CHd4GekLbRWmNUyslMz/iICKmOdb1OodtW4IXAK5A6YHRI4LmpYTSeECK0tiOIlmLLMmWH1oeByOyApBQzIlEMEQnEJaKkyBXVBPIK6kVOMMN77T1BpEGjGBqsDcgjUVxTBqmKKilkZEyNlUwy/CjSa1dRoLRIcL3B149Iw3etVLJ2D40v/u9W/f1yNFcBrEv0P28FUTgypYaMokFHTEonF1GghgLWh8TxDz5NrrJB7sTakxWCfKegWUdsYymxJ3UAACAASURBVFBKMNJAFel7AdGipWJcFMQomS26dIG4ZIhXSmIyiVIiFS5aY6IlypAMgYCUEdtFynJEle1j4lXu7H6J3/y9r3Nybvn5T48YjRS3X7qF/K7BrT1aR7JSolaBmAliSD/DKHAqwQUUAhMUcpIyDKrxiO2tPUx2gfRr9qaK6a7g0aMlZV5hdE4mp0ghWMVAFzpcGMJhUazrdDBOd8BogZ3D2WPP/r6kKNPKu+3XGJUxGidj8Do6Pvz4HkhD3TeQg8wjpw97yqIk0wo9CnA9kCvJ6WfQLtND3vk+GQClGnxTkiefraEeoWKBkAobV0z2JHnmkVicD+ng9inkT8RUJEQ6fOMQUaJimnB13hFkKtZlFJArghCJ/yAEWiqqfAfrLLgIwZHbHjvIDVMjFXEd6YEuh4lSBPAIaYhRpGaCSFWl1yVCBCvAeOqlGrx5gJEoGVAiybekzJJMqRmaCzV4dYQgVwWjfESWB+r5OaZMxYKSoEzEWUkvAla4JLuaFtRnIIIgCEmQHtEH+sWS7XIHPQp40cH2Mzx8dESrBB5J4yJbOxP69pzMGHJdoFTG0eMn5FmJ1mkLq5XHB5GCnZUizwoCKwgSGSXRe2ZnF8xmZ4hQIURCrLvekusCp0LKVGMI1fUMB2RI/p7cINgU+qn5TdjbNuVgCcFiMR/khMlY7VxgPCrwLtE0tRZDUHiaYql8wrWnrvHpyfswbMgWizWtt4zGE4SKFHmgmgQePTpidXHMYrnC2p4nj88JrLh6QzKaKMqx5uy4pyg1VTFiXGwzLnZY1DNefOEprl67hjYVbXfMW2//gMdPPqNtG6TUBKvxoqbrwLoeqRzjrTE/+MG3aHvBeGubre0tzuenxCYjrAytLjm8d8poVCGHIOAQQYgEQ/HeUtdLvO0pRyNSVkdN9J6tYkp23fBR07Js+7Qp84Esi0MTm9jDUihClPRYvA2MqhHoFqU9xkS0UfS2R8TkX8SnIUJm9IABThCDohqzWiyYVBU39p7ljTdf5y//4ttpA2rm2H7J+fEJR80njEYF29sTtrbGjEaG1bq9hNIYI5A+DXqE1mR5znR7wvm8w/gqFQ/9Cjnk3qhhuh2BunU888zLZFnBYrEg+I5/9ad/wtb2dvIHZgbnIpkqMDo9mCURJzJEYTA6AxTzxRolIuNJYG+35MaNK5y3NefrNS+/eovbsqL7q3v84G8O2doWbG1XFEXJeuV54fkXOD4+4fj4mLPTUxanp7QrQQoEHhqZGFCDHA0gSo8xBdHn3Lh5h9ffeJUPPvgTilKS5WMQktW8wUbB7ramzNO1v6yXuEWNt2ngFWSfOJjeDKql5M1smoY8S3k6aSOsUDoNiGJMU3JnIzt7I3b3dnHO8+jRKQDvvfcRIipWy5bednzzm68S5ZKjw1Pe+ZufI5UepuFiIJpJwmootoZncYwJqa2EZsBAkBcFr776Kvfu3WOxXPHKq8/wo7feIi8KVqtVohAiBlBVHJ7r6ecKKfAigQcmu4lI2S1bptOS69cOqOuO2WxODAl44qOFGNmaTKjKChCcnp6zvVNQjacQYOYjv/Irf5/vfOfbtF3KLRtVFS66hJMfMm1ilBCSHGg0zpjuFMzmE7b3Jzi3TJK0TqcMsKiSIsNZbNeSmcjJxw2r7Zzd2xXKBHqfmgpBxHqIcYnxJcqkQZpvBBddx53npuS5JgrPcy9lHB+tWDWQj3OefnYLxAJrM5bznvmsIzMQokLgURKMFDSdQKtiGEL2ONthBhz5hhBLkOnc1Ek6vfGUJPpt+juNyS+bmMtr2Dn6YQskYiQ4T8wzXBDJ8yQiMRZ89NFHdF2H0Zo8T4TaRDf9XAqaoFkRJUEKiVafgx1iTKHUHkVbDzlPEbTUFHnBar1KhbFMHr4Q3aXcOQ1i0rZYDkU1DDVjFGzwj0oqlNEI73AuATc06X35Re/XhpKbzjhDUY5YLpoky/MWF3p2tkesFgt86/A+olWqc7Iso+v6RKuM/lJyxrBEgzQc2bR8Sim6PsU7bHyKXdemzyWm+0JJPVAGUz0kRUQKPwyk3WWzFUKgyHI6l+jPaSvYDcHHZVIuDF+C4W9SkY05ZVDiD9fFxnOdmmqcww5Nu1aaLEvRRZmEDc06xlQbZSopWKwTSGPIpwFvBVJmjMY543FFN9AgRR5Q2569aoezJz11tyBg0cqgQp+25So9C/rOY7RgZzpm+8AwmirO2OK8n6ceJ6Q6Tl36cNOZHAI0dY/QiShplGI+X4EkYfiFxPvkyWu7BmSkNCVZruk6mwbOwzpXCkk0PtmSQkT5wN+1u/qlyLn6H//FH/8xBSTTUzK1KSUxXiN7iRtogSZPB0YUAqdyghZImdapvQOvFNvXBNEHtncMX/q1Kb/3zQN+/O1TLh47hE9db+c0vbP0taWb95i8JzNQ5IFcR1QQCCeIXST2AekFhRxhKpemoV6laWu0yMrQO8Eo3+XVp7/GwTNPc/fnD/jOt3/Ij9/+MR3ndDLy7vt/w8o9pthrEWgCiqqMGCLNWQSVPD7aKUpyblzZZveWxuQ7LB4J7v/wjNlize5OyUtv7LN7veTx7Jzjt0/IG0VWTsm29+g7R9A16LSBiUjQSe4XCMSgyM2EZh0p8hwlc4TIkiQsLJBZRzHume63NHrFxfkCnEUjwBoUBc0ioW3zQqLLgNn26CCJXaReBnQxIssbBIYQczovaZeeYgQ6SyHBRm5RbK9RVY/Qm0yKfCA2xUuvXUK7ZxB9yjIKGTGmTCXJIHuy6yG1OyI0SJMhhEK6FcZ3VESujsasZ4sh8yPp1n1MGzmp4pDynTalcTBEI9O2yqgBFa+gqCArHc3aIXXaBlhv0SbHmHxoECLO1UTpKMvU/GI9lVLcvnYDX1tWZysyrWncmo6I0IrcaIKwQ1C0RmEotaY5WZFrRRAOJ3t8DoIMXU3ppaLuLXvBcf/h+4RlT5hbmvOGrBD0bTJAEyVS5Fi/Ji8tWeHJC8HW1BC7nFj2qFHLqOzJ84LeKoLVCV0fG4qqSof8kFdD9Nh+MCLLiI/9EEgoh0eHxMgy6d5JABitEy1sNjunty1CWEJsUFpie4dS5pIwKERgNNZMtjLGkwxnDTrvkLLFhzXL8wuu7FUs512SiQiB1jn1ylGVyYy8XkXWqyUqS16KosgxhUWqyHR/TNdLHn/mePC+Y7lsuXXnJb7xB/85/+yf/xEvvfjr/Oit7/Kdv/o2f/Xd73B8/BilR9y4oyiqlLMiYo7JLVmeAZq2leg856WXX2bvYJcslxwfH9GuW06eLJmdrTk/WVPkGVprmqYdQpIVRVEmBG1vaZqWpm34rW98nRdfehUBLOcLfusffJN/8l/+F3z04YccP3lEtVWAsOQFyfclHEIK8myLLNNkuaQcKXb2dnn2uWeIrqJeKZYLR26mtG3AduCsILrUCMUQ2KDN224FseDgas7OfkWZ71JWOQdXrpHlhiwP5OMF0x3F7vZVJBVdF3jp1RvM56d4n0pnKSMxpKBT5xLOuChKnn76eR4dHrNatuT5GKMyomxR2pBlOUVlsM7y1g//lsOjQ2zfsl7VfPLJA6IION/jvCXTI2Lw7OxMGI8L2rZFqZLRaI/93dtMJ3scH3/I7TsH3HxRMb2uCRIuTmb88Lvf5d77dzk8+hQ9XjKb19y4c4udKyP2r4/4yld/Bc1NPvjgLvP5BZNxhch6qq3UxHgnhoLEkpkRcZCYxaBY94JFs6bzGa3d4+HJp1Q7mrpdsVot0VpiNBR5wWiUmoSzs3OWizVZnlGWBXlpLslqSUqcs7+/S10n6qZ3DhFDCtqVBUIYQBKGwVDXOVbLNavlCq0hzw1N29B2NRCSWb6yHJ98yt27d/nxWz9jsZiDsEy3p2xvT8kylWRIlcIYECLgbPLwlJUhz1MxX687FvPApx8/5vjJjOMn51jrE+0xery39K4B4TF5AitkJhs2BdA7T0QwriZURUW9qjHaUFUjvPes18s0TA0eow0mhVwOVUQcMnCgdzOyXDEeTfmP33qLj+7fRcgekym62jPJBAo9bBo9aM9y3fPya0/zW7/zOr/zD1/jb979GXeuPsuzT+/x8os3eeWlF9jZLfBunSIXIqwuVuRCopVKmWYXS67uP4WtBX3dJc9olTZlSgVCsHhnUbmiLBWzs5q9vev89u/+Lm3t+e6f3yWfVLz65kv8/u98k8CcD/52zXLpyfJkYWj7Fqk0k+mY/Su7LJYzjM4uUd5CRjZaOCkVRufEmALeN9LsjTpgQ+DbbMullNje0/cJ5mSMRht9KS3TegCFEAaJXKDregjpupRK4HzycEXi0NcM4ISQfFohBnrbX24xufz00lAleayWydcqGSAVYRhCpbgQ27thIKkGdYpIMTmRAe6TqH5ZlrGBe2RZRt/3Q1agS+RNmTDnSqWGv+89RucolVGWE8oyxTrE6Gi7Dmt7gnfYLmD7kM5N0nYmElgslykQmES+zPJ8yB9MTV/f9pe5j5umciPdS1la+jK+QKlU1G8iSbQ2OJtAb7Z3IGBUjTA6BUFH0kDND/pFKdJWP6lANr8zdVAbme7mGxh8hen/393dpSiLy21jur4CRVkxHk+oqhHOJdhLnhcolWTpeaHIdEmI/aC2mHD96ZL1MrBeRNZLz3q+Zm8vQ5XJW9xeKLZ3DpBZS7ussXWgGI8oJy3RKJQs0BS46Hn2tYpnXvwCL7z6JV5+40us1hccHt6lrnsCmmpcDTRjh3d+kMYLXNeiosBoTVEVlGWRCJVSJgl3cIOMNPmW8yxnd3eX5WrFZuOplEpLF5kklNGHdH3bX/IQ4f/5f/mf/riappVnlBoRFb1z/OoXv8If/vbv8gd/8Nv81fe/hzIkT0sAaRxCJYO7GDIDRFRs70j2nzYUVwTH855FE1ktOryP9C20tUIKj5ZJgmV7zXRf0HcjXAxEBEoVROFQKqCMQCrobc1oBF2djK/SeNRI4ZBsbQeqClyn+cabX+XNN79AKTPsRU0/O6U66Pnwk0/wLNjfc2iZkOsiBGIjcMsMozN67weZowPZUa9ynr32Cn/wjX/AP/1v/jPGBw1n6jPkKBJEJCsjo6f2adpI3Tn6tqUoLLYOGGVIhM8OwWBVcwbbKeq6S7QfppTFFqNqQvAFIRouZpauh6wylFdBNgXtWaSZJfkbuiE3Gt+WNIuSUaXJtlqKMsmAuqVAxRQ8q3NPOQpsTXJsZ/FCIaPBiIxqJJjeWBKUxgcDQeFsByogVAKLaKlQJFpeFAGUIMYcF/UlKp08IGKRfD6xw7mapm0JviHTPdFFulVkfnRB0wZkcpzjoqSxAoZ8hBTgq0DKweeS5BFVWaJMAh+YTDKd5rSdxWiFyRXSCJCePEuSkhAsXddgrR3mspEYeqDFNT0Xx+DXI8bqCrcObjBfPUaXEiEitvUQcowARSDPYDwx9MuInVfp0NSe6AW59iyaBciOnbHipnTcOztN2yEBWnna1pJpTfCCtnHUTc3+lQrvUqhq9BohPKsmUuicQuZYG/EhZ//qbbwUrPsEeVDekmdF8nXQI0RPpnO2tqZMRhOMVqxXNVqC0qlhFcNU3WQ6bcpEZD5fkucVkgwps4R4tkNGCinvw7me3rZIpXAuUK9bfJDcev5putajnOC1m9fYHWnOFp7O51g0tl+gROD5l6+ws1fStQ1d67h58yZf/OIb/MZv/Aa///u/y937P2W5WNKsFNFNKEtD3S1ROZyeP+F73/8Bf/K//SuOz98F4RlPpuxsX2O9mrNcXtB1lqwwXL9d0LQtT91+npdeep0XXvwiP3r7p5yezriYLWjWzTAhlxA9apgC9r2lbVo2gctaZaxW64RSFpsQU1iuLA8fPuT4yeMh4NJxdjzng3vvslovkCgyI/CdQpKhVXrQ2T7JS9LiyJCZHfJ8yuxiTt3Un1PbsoLxZItRNcJ5T72uL30ECbIy4tpteO6VCYGWb3/rZ6zqC87mn9HamrxQTKcZwu/x5Pgxy/UpkZrVIlDXa7RxKB1xvaFre/IiQxvFBv18fr6iWVu2xjtcv34LqSSz8zZtX4Kna1uuX7vJm196gRgtDx484KP775MbRddalEobJk3G1StP8bWvfIOb11/ig/ce4kNPXS9YzJ+wWj1hd5rRrjyPPosod4Wnbz7HYnaP5sLTLDps0zAtBKOs4mCvYnXR8eCDGffeP+Ji9Snz9RMQAWMqxpMpTZtkrclDpAkhJwZBUcJ4SyLIiVagcfTNOadPPiDPUpEVokjBqrpkd7TH6fmM+XJB09QYkw8Y64jJFONJlRoxZQjOsL97jX/4h7/Dxx/fZWu8TZ7lCCkYTyqc63A+ARrSECkFovZ9n6bYGqbT7ZQR6X2axNue09NjVqsaomB7Z8RsNgcRyfIk4RVCsFw25IVma2vC9tY2s9kCSOe095627YhecnzykIv5CV2/om3XtF3DarWk64Y8GhRlUSGkIgRPW6ezsihK1ODhQkpG5YjlfMFyseL87ILeWdquSdS9skQpk+6jrqWu19T1Gue5LJalMBA1dTOgqPEoLZlsTfAqS81lSINT2zqu3dyjmKgUmxFypuMrXHtmm2dfeJXnX3ydZ19+mrfffYvJzi7716+ytb/LsnFEkyi7rnX0TaSdl9TrczIjGE8yJluGrekBNgjCsFGuFw0+QO8cwQlW5x0/+O4PiFnNlRtTFIafvXXI97/zPvNZS98NCHDdU44j3mt8SFCFTAu6xg6VVKTtLEIJTJYyvPq+xVp/2UQliZz8T4rrzb8557h6fZ8rV/eYTMecnJ4Pm0suJcphGNR9/h/j0Jy41FBkyX+9wZRfCvNEahT8JoOLFOwshLiUHEqZaJdyyBW1zqbN1+WvG7Zog/JkA3wQQl3K6DYSQCHkZYSAUpq2aYfNmRoiNCRSpcY9kfQS0bGzPS44slyTlwZjFMHDer0kBocxkq5zjKop23sV1TjDWgnRIEVOniuyjMvg5t4lia/RGaPRmGvXrg0FfSrOJQm1LobXt3md3g9y4EEuDymU25g81UVK0/cWRPI7Km1oO3u55ZJSYrQe4tg+D/ju+37wGymM1mRZdvn+p89A/YKCJEUvOO8uc982eWjJcpCuI+9TNIo2BUWZkxcZeVEm+aBv6BqLlobpVs6NO47rtxVVuUtZbJOXBZ89WFBWI0ZbgrwKzM9bui4gOrDLHvqeV9/YJ3LBk8/mHH76mMNPPuMn33sf0QSKTDIaSaoi1QBFaZBGDsLLgCfZSPrOsV40mGLwoQ+AFIhY6y63e27Y+n1uVzBkWTacl/bz61AIsPGXO0QYwGSBiEq47QDGpPDB6fQqX/nS17my+2ecto+Gm1kifUTIhBFOm2dPmfeszmA9g2wE1U7k9HHEBYcpBBioaw8uolVadQssUkbaNuJDyneKwVPkaZU82RKUlaDvFOMrjmo3Z3nhWS8kNw5ucng2Q1eafBTR+lO+8/73uHb6Ct16ze7Nfe68eZufvP9jXACl0g1I3oIVxAzEOFJddbg2+Yc8kTZGTpeeUtccXBmzdXWLuii49vx1bi32EMHT1w5fJ9xqeT1nagxlkZEXE376syV15wg40AkuYdfgfMLNOiLVSKHUinXtWS1zUJEg00GZpEOWrIg0RY/UAqEVutBYNFHkhKDpa8HxA8EBE/auCdSBY37cMjtMEikhIKoIsqXIJCsLXsbk/4iK9UJhpgKtwTYCGTKiSchVEdMDUhBBpkMkogli+FtMlnKXBPQiojKDdAHfB6xtaHKwvUD0QJ+Sx71WeJnCAu0gfxAiZbIw6KKdd0iZQUgadiGgLDRKRpQU+CgIIUOKJG0hpomp9wHvU8J73Bi4dSqqnfOpgegUwrXYcEZbL1mvc5poGB1oRBWwmWW1ioxMROhIMI5ariivZthVS/RhaJT7lAs0ynEqch4sn/QKub1Db1N+zHinov3kgt6qNF2Pghgty7nC2eRZwASEGBNED2gynTMZbdM7i20TvUhlhvHogPnZQ5ABpdJhbzT0bfIuSKWRCHItk7VKqpR7Fn8x1NLjvSMrFMYIVFEgh+nx/l7Ok5NDvOtBJIJlNZpcUgOV0jRNzdWdA/aKA5rFmifLC/aLPWJ8Ar6hykpeeu1Njk+PaDtLFnMOrlynyFsePzpnOlkzHVucnRGiJTMZegQmb+l7yelK4WPP+eyIk+NDnGiYXywpqy2uHuzy8gsv8/H9DwlxSlZqpIl8dvQRzjuu7t9ge2uXw+Mn+L7HaQE4QhhgK9bhbMT16UD3JAKSkCljziWO+OeTXJFAPvViTd+3BJ+iBj79+D7zs3PWiwsypYk+InWO9Q5hdDJVi569/Qmr9QrnEnH01q3nOHr0CV23JNLgQ8Q2MKomWJuw+HeeeoZ2XfPo0SEhBKrRmBA6bCf45P4crQ3FWBExgEqAgNmCvAh0tcP5GmXA5JrtnYJVLfE+hYyGmCAZzvcIqZAyFexd26TiJXpmszNWqzkmkwjSdiJR9gTz2ZLZ+QXr9RIG58PewQ7bOxV7+xXj8gqvv/arPPvMcyxXS+5/fJef/OS7ZPkgjwNsLHFhTdsIzo6XHI3XPPf8G/T2Ho8fL6mbnvsfWabbe2TrDjTs7o9QesxnDz9NPhH5/zL3Zj+Wbud5328N37SHmqvn4cwkxUNRs0gzlK1AUWA5tm8MKI7hIIARQ7YC6NrIjRxk+j+CxIABBQiSCwFBYkuGKFOkSJE8h+MZqrtPj9U17Okb1piL99vVh4h1zyIIHDa761Tv/e213uF5fk9Bip6+BzdAWWmKKjJ0HcobQBNCIncJPyQUnhzV6NV0KCyT6Yy1b+m6Dqc8Wpfi0UoJNwRswbghhq7tcH4gxEhVVszmBWWleP7sktLWkAOTScHcTMVvp3saVeJcYL1qKawVahoaazRGKTablq7zV+Z9pT2rZSDFgrrR2CIznU1ww0DbtuPmII/RFxWZiqRKJtMpGoP3g2TL5G3BKnkwSkEIQi0zRo/eEJGvxhzHgtIQrYUQGIaeiAKlUesVtRGoTlEWGK2ZTCasNwtAoALBO7q+ZzppJB8seAbX8vx5oGlKisKj1YA2sjlDSayHtZo8bltAIAguROq6wBjDap15/Az2Dj/DR2cfcjmc8+x8oLItDz88oV+VkC0pZ9pVi1Ya7wBlaSrF4vKCeio0xcFHhkvZVjgf2Nkp2Z0XxCHzyScOHQsWz875/kWHVg137uyxvhh48fARrs+joT6SosJ7yCqJGkU5dnYPuH/3bVTq+PY3vof3Xo70Qs5UY8VDZiwMQ8Qo2eJsh1ifJvV9OkNqd3ePvb1dYog8evSYGEaan9ZXHjllJFhZIQ2TQB9Gn/Ton0JJFlNMSfLJkCYphXS1OdFbQuBYA2YyXddxFS0AY3ZXFm+41hJVkLbbOfHUhBB+avu2lRkWpmCb/xejyNQlq3RsaMaGz2ghVGrj2fSBZtJgq0RSG5ZrT3a1wGRsRWknhNjRDSsGLwV8iIm6nKDNgLCfBX7gQ6Qsa6qqoSprkU4WFdrI4NCQMUqPrwdXP+s2M25bxMvAoHj1d8tZPncpCoRLacqiIuf+agMjr902LDiPcuFRaTLeM/L/qW07KhJDYxgGJ5K4seHaZuu92noKfl4z0h3HfNEckTzMsfkra8tm4/Eh4AkQ4PqNknYZWSw71n2iC71k1KZGzqmy5vCwYHqkuHjS0qYeYxPVrmJ/cofnsWWzWXJ+ecZsV7astimopobpZIqp5D7tN57gJZM2uIDKyKatLq9kgzltyYwSFZBGakjOmWGQ5yzmCFm8gRIwvX1WR0jK/8+19+rrZ6O5ymDLcTwREllDVRpcXLFsL9m0kb3dA87cS3L28hCO3g5bvNL0VhX0m4xOcybNjLvXan7w44fEmCgLjSozeZBg4pjEtGarTMyJkDwhFCOJJ1KUmaq22FpjK40uLKrq2T8yKKvolgo9zGkqSYOPOWOna77z4M/58YdnXD884Oj6jEX2nJ5fYCtPoQ22rImhlwIHiBbyJAlKMipyFgpQHzPaexbrcx49e8yzbuCy2xBjQd8GCIEiQpUSeS9RFJ6m1Fw7vsXrr3l+8vEJbT+MIXSJISO67KzJWhNTRpWO4BN926NMxtQaa8S/lnyiVCX7x4ruJQQ3amuVhPkWRg7kYaNZPdXMG0tpDce3MnGoWC8iwUsAL3GUhGVFoTPGJIKLDEvF/o74nfwgnitdjdrpZMhp1OGPpKKcS1AWbQuULUBr2f4pjyS/awwGmyTRO0QjfhIVUSUkDVmNlCGV0aOTbztdQ3PlYczjMxbDmKehZZvVbiLeGTSC7xU7njy7YgZ91agVRsyTzgX8kCFoChXIyhEDtOcWoxWlMRRZo2dgC0+MWfwTZIbkMbqWi3U0KeckQZ7JCvzi0nfiNyynZDqSCuhSUVUVm1UYsfYZsuLe3TdYLBd0/ZoYHYqapANBa7yS96gqDUPbYZKjsVAYSxwniRmDGU2dMWb6rhPcbQ7ijcppLJJkmmlHhC2IJLCqLeREWVqsaWQ4MKlHPLxcTDlD09Sj4RaSzfR9S+ojTTEn1vDg5RNCsUNZK2YxYovI7Zv38apjtWoJHTR1SVln3PkFp6fnpHBCyp4UM9bWlI2nmAz4TYkuhO7mQ8vZiyV7+80oERV8awgd77z9NqenlwQcLrWslx5SwWbT49wLHj08obAao7dENARhHOMIM9HU1YSAXFwaCQwOSeSnZsxnyWPj7wbZfCkdKQpF37a4rkXrOAZ9K5EpayHnCbUxUVUl61YBBqMrmnrKYnkhnhElF/nu/j5kQ9u2hBhpmoaD3V3Ozk5x3lGWBVkNeKd5+cxhi0g9rVkuOppYsFxuuFws2VUaH3tm04kg+JX4IkLIpLT1ngS0FfiGSqMvMwRyCtiiICXHatWy3qyYTCekIOe5Upmu27BpB9brDRnxMOasuXb9OodHMJfw2wAAIABJREFUU2ZTw9HBbe7ffw1j5YK8cfMa/huOupFpa2ILHNFMJpbJpKCpa7748+/yyZNTlqtISpoQsvyz6SjKgrqacXR8wKOHT1AIVSzmgc45hl6GcraI42bakJMlDOMACclYEu6sRmsxr0eXKSigkGJ51W1QSgOjnyS5kTiWcd6TXZKw6ATNzNJMFIvFGWQzFmsKbcQPZAxUtUiTvZP8s5z5KRiGG9xV2KzWhrI0DH2SzLYximE6nchAIPgRNZ4pJzsoXZGyIUQoq0YK5cEToiCmbVUQs6eu6zE3y9P3Z6MfUKRlmUzIAYMZp8KGnBIheNLYXA19ZmOWoAQm9Omti8i/MjEJBdIWRuhvKlOYTN/1BAfGBqyFsrKAyKqsNtIsqHQlVUw5YcuKnf0dTJXovOP8omWyk3l48pLLSc/R7pTdiefi5ZL1hSIGeX+KwlAVOygt+YWFAefCGNKqJdDcZ3ISgt78cML1WzVaw8XmkujkferbloPDQ3Qs2FysOD9bkhnY3Z0LqRFpMrxLY2EYMXr0WqkgFNEQx6EeIndE8iSN0QKPuso6fBXN8GprJee2UjAMjq7tx2ZEzj/JzpJCTWv5nlVZoZWmy/3Vs6TGu3VLLxTqr76K4tBKX+XPbD1GV+G4o0Q/hjhu3KXYF2x/FvS1kjxIM2LK86f8S1qLf05pdfV7tpLBLfp/Pp9T1zW9c2w27XiGfspTlr0MAAqDLRXaRGJy+GEbw1JcATcwgddfe52yqnn2/AUX55coNYI2UgYsMWUqU1JVE5q6oW1lK2+MxdhC/G46Xw1TpAn048ZlS3rkakv3aRnl9n9nGOV7orraBnZvt1UCdXj1515JAbfQDHn9rLGj0kbIyD8NwJDfu4VngMINg8jqtNSr8txJUwsRaxPaQugz2hQCfRo0oW0oisx61bHYeIYk+PO2X2G1oSoq9m/VqKbDbTJFZSlqRTXb4ej4Ju36IUPsaV3PZNdSaj362AR+VJZTclqD9qMXr8DogFZCSq6abfSOvlKIpJQpCj1yAoTKG6M07Hxq82etlYE5r9TIUYKU/oNfPxOywP/uv/+Xf3hwt8QYiF78QbuzmmYv0IYlJx8/ZtWe0vo1WQdQAY0WYgnjRZwNu/OG0mpuHn6Od978Zb7w7q/yzb/8lhymNoNOhAyYEhcDqoBmYiW02FtcDLgQ8DHhI+welvhQ0W5qjJmwcgXX7kYMms2LmtMTy/GNKYvLDjckjo52efLiPT784AG2STgG/pd/9X9QFS+ZHmyY7xmKquJssWS1yAyDwjlN148mVYU0GNlgbSQOlmcvnvLg0Sc8fvKUDx9/yCePW15eDjif2N+xHGNIDKzOV7QXnrs33ubnP/9LPHj4mOVqRVEadBHxEZkKGoM2BcMw0FTVOBXRtJsBYzcUphaJnkvs7u9x77V9tFEMQ+T8WcBah1GW2bRhd6/G2sCLZxv6BVRFwf135hzcucblmcc78eUEX1NVkWqihWyoMu06M+iBZh7IMbI5zxgFVZMFvkDB4D1DSgKNUCWZCmNqbNUQlSIRUPTjRKpHkbA6UxCpTaTQ4gfCZIKBrKWAMBYK+8pkqzUoo7aKCcZPFHKZB7a0o6HPrJeB4C3eeVzIsg0E6qqQafvVxA3qakrfOvouEIMCLGWZqCeJcppJOhLaTFyDChKuuXvo6RYJrex4OWiWD2fEc3UlufOD4uBehcvgUsAlh2kKfGsI2Yv+vsvsTQ5p18PVNsioit/7Z/811aQjIpIdxYzODaRa0TNw+fKMnR2Lyp4qe6rk2LRrgYhET0xCA3SDoywmdJsN/bCmqgVEkVK6kkPG6K+S4nd2dtjd3aMoLF07UNiasijIeNabC0JwMF4OKSWapqGqCorSYK0iRlhfrEWT3y256C548mTJa6/NmM2V+OHKewwqEHPBehV5+uScrFr293dZLtY8eHDCxw/e5/BgF9SEqCIu9KxXCq0LbtyQwv7Jg55JPWM6k8/henXBj370Ib/7u/8lH5885uTkAScnH9OtE5Wd8/CTB3zw0U94+eIl8/kUPcpiyqIaG2XD0HfM5zNu3LxJYS2r5Qo9EhtDTAIQKMuri1T+O9IktRTIezsHY9ZQGDPFSlTS6EKks94Latjqhr4PgMUUNSjLsxcPZEqnCqpiwq/+6q/TbnrWqw1d26GU4trxIefnZ/ggEQJlWZFiSU4FfZ958XzB6bMLer8gpoHpXHHnNc1kVnB0cIvC7LBZKZ4+fckwjJebRjbV+hVsIUWZDErBnUk5EoL4IaqqIY0IZ1M4Li/X+NiJWdlaok8URcM7n/kM89mMdtNh2CG4kvfe/wEffvwR2mQ++uhHAj5KcvlW2lIXu9y9f43P//xbfPnLv8hX/sZX+OP/82vkCEeHe9y+c42Tk4e0647lZcB1mju37kA29K3DDT0hDazWHb4f5AwYklBScyQGA0liJGZzQ+8SkUzWkI00oa6D44MD7t6+xbXj63zy+ARblOMwIuK9XOrbZ0MKVY0PsLvXsH8wYTKFp5+cs7z0tJuWtlvQDxu0kgyjqiwpC8ti0X6qIBJJ1xaeI1+Ko8M52mRS1FhTMJ9OIRvZIBgjGwqlKSYz7LZYzZkQEn3XywACKfAmkxnL5ZrJZMbe3gF1NeHl2UskH0gIcXGcypttUU0e4x4EfiDT+4TrO3KS5kfO4CSNS4pIU6UpSz1O4RVVXXHj1g7eD/hBjw1kJPhMTgWTSY2xRs6e2ShDH4vI3YMD3vnim3jdEtJADgO4S37yvR9gfGB3WnP79jV+8IMPKZsKW4KxEUtmVs0ppjXaKlJ0TCYz1l0PlJS2YVrNQDl2DvbYvztnfm9Cfb3CeyXYfqtp5pacB54/OsUPEoKrVMTYMRyciCYQBwEIGG1wfeLF0wVPn51Q6GJ8f+UMUHo7pBF5ac4yEFWoTxXoryRgsmUUsu3ZxTlPHj/h6ZOnQKawxTgsCVcNkTUFTT3B2pKhH3CD3AsyIBpDhBHQUEbOwm1wfFlIzQFc/dp246SUHuNBZJOy3dTEnMczUsh29fj9tk3JNpxejURahaKu6qvCP4xS7LfeepN3v/AuWhuePnkimWUjEMe5nt5tsKZEG0PTFNRNicayXjh51jTEHEArjq/d4r/5g9/nt3/nP2W16vnud76J0RUSPivvW0YiF5pG4DiLiwXT6XSUNo4N0vj6ACNRs5ct1jig3KLrt7++fd1SksGCUuMmN4bx9Xk1jJBGh6sNlHzIXjXWSsl2qyiKUV6sCTH9VMMrVMZtYzXKSJU8a9smTbY6mrLSOJfohw7nJUcqhIGd+XwEr9VEN+HajTkrd4mLHTpbAonzxRlWWfZ397j72i2efPKS6d6cw1v7XLtzzGznHsd7d1i2Z3Sxk5ie7LnxTk3ylvVZ5vy0G8N+PSEkUYupEoCqLjCFkSBiY64+Czkjnu0sWWxlWVGUJVoLHdTYUREyfp7kHpMBsNaG0Pq/Vhb4MxEibBud7/4azOoCHUs+PFnzxhtT/sE/fY2948h3vn5CoW/RnVkePz6n6x1vvXWHprng//6/XnL+QvCf3dry1b99THPQ8PIi8f3vrtnZXTIxiugDziViKkF5+k1Ce01jDTde06w3sFhKanzdGC7OPG9+ZsJ8V6YMy1XDeuMJYYNOoL1l+cyzuHTs3Zxw6405b/zcLu36KX/14ZLpdMbutGHxdEWtO7yZUlQVtdG8eHzKeZeIK8XhzgE//8W3+eDRCz4++QSjPXWDTHEiNEXF9GDC7Noc0h7XjhKESHCe3nWcXbYccZ95UaFT5NHDga6bseETYnmOnmxIFFyceYwysmUxCU3EdTCfztjZ2SWlkmfPT2SLsE0e14mv/savcOvGFJsSz3605Eff+hFH+9c5PLwNVPybP/0TuuSIy0RpDYc3Z/zSf/YmZXWHH3zzIx798An0mSEGqnogeEdwkdlOw513E+u1p1taYltzeGtN66WpJGlymYgW0BOyEt1BWe5TNQ1oCDEyeE9pStaXT6k1aKdYP/Xo6Ni9FYllokuJGC0pGqIP5CxAjn5gNKTK9x5coix4NdlF6Dm+3wY3SoFijEElK+ZhHVEmsndgsToTQ8K7jHNQlDMpZPJYLDtPChFtDarQZBupVMHmufjktNHc+vyc3WsaxT45l6TkeP7RA84/KigKsHWm7TPv/K07XF5s2FwMhA3sHx8T+47T7gKlDfcObvHa4V3+3Z9+nb39Ca+/cZf7917nG3/+Heb7AfSGtl3x8kXBzHTMjhoiJc+fwZ03AuuzEp0SKXjOzjoSieCBLD6LSW3JQWOMpplUHBzOuThfsVyu0KVGF5rNemDoI1/84he5fv0GShl++IOfcHx8g9PT55yePmO5umQymTCZTEbDrshM5NIX2uBkUrNad6zWA3du3+Bgb0rsFpycPKRoGg6uHVBPJrz3nUe89XNfpqrg7Ow5Jx/9hKbJVLYh+IQi0kwh+obpdBeUJ8YNOUXefOsOm27N8+fnPDw55/bRdUrr+NUv/TJ7h4f82V98i8Pr73By8n0uTl/QrVYQQBeGcmbFO5Ur2k4mj5PZlKIs2Gw6ykqzXJ5zdHTEzRu3OX0u5DlBZkuB03t3VRjJ9DWJ4EBJs1UWBT/3uc9gdEk/XJJCQichWS0Wz0nZyWZTaaI3TGYVMXu6ocN7y2w+Hbeeo0RGy2WyJYRNJjUXZy/FmD5O9LSyZIZRPpPo+8CtO3dBt5RND9qxOEt0neNg9w4hGJaXazabU+Y7NSlFtIaqFmmq9xL6nkboyXpzCUgQbV1PaDfDKDEaYxmiZnAbMRJbJDMpGJRxzOd3UBiGfsl6tcCHzM7BhFt3rvHZz77D5WnLt7/xXbJylJWhv/S8/fP3+Pv/xT/mV379b3Ln1tv84Jt/xu//3j/nN3/7b/HVr36JeWH5Z//0X7B/FDEleA8vnnuqegKqk+2+LSFPeP2znpwGNqvE+fOS4GH3QHHjVsF8X3Py8ZrTh5YUhGKli0jUnt/5+/+Ad9/9PG++/hq3rx3xB7/3Bzw/X+CCgyyT5hCybItToB/6sdgpmU6n7O3ucXx8nY8/+g6zHdnmuCGzuPA0zZTpfMrO7oS93QlnpyseP3mIUgmrzQjeiJhC5Itkzf3X7nJwnAhDSd8aNqvAkydPRh+ZQDG0gaQqUOMzhiJILjBlqYjRsVxuSFFTNoVsN7Smrmr6vpM73lqsKUlJMi1j6mEMS7e2pK7rETbgqapq3Hr1VwV9DAlrS9QIICoKg9KZvnOApq5rbtw8xIVTTp85UjAURYWE52qOjw9IKfLw4RNuHl9jsVoy+AhK08ymvPb2m/zir/4iySSenz3n3r37/Pk3/pjPvfUljnZv8/TRCf/6f/3f2D2YUU8UqMBy2fN3/vY/5OLilMcPH/Lo4weAYb6zT1FKDqJOsF4vmO4eS2TEMDAET2UVjZ2gEKUMpXhsun5D8B6SpW07oKCwBqs13mVsGSFJYZnYgDZYVcsWhoRzAyHA7sH+6FvsSFG2EfAqT2w7xIFXk3mtNaaSHJ+t5IssIcQpZRR69LqFUcbG1UbI+2GUgcveZdtgpZQEZlFIkwbyZ9yYOdQ0Ddu4DfH6cPUzCTY+gnklEzTWwtgUb4cPUhRLbMLWS7Nt3LSWoUU/DLz19lsSYTA+j23b0rbtuDmWAWoeN3xagy00TV2TwpgRFpJ45dKKWfWLfPnX/h77RxMePv9/+NM/+WOaYp8UHTEOkh2oFNraEdxSsV6tcH3H0dEBdV3RdR1D11LVxVXDudlsxEem9VVTKwTGV941M24mt6+ZwDHAu0hM/up1kfDun36vtdZC5h0bKOccu7t7lEVNjJG2bUmo0fssShJjNMMwjA2FNPdmzFbcwji20Aznxfsekyd48K7j1s3bNJOKrDKX545Ve8n0uMROClKG8/MFuhp48/5r3Lx2B6tmtPGcnYMJfTewWQ3sTQ85P/2Qi03LEEHbkomq2Kjn0ELcaNpNYrVaMZ2XaKuJWeEHxd7BPpdnZ/jBURYlMUYmk4k0uDGN+WAebSx2XDZ473AjOGU7kEgpMJtNrxqsGDOrp6u/NkT4Z2Nz9T/+4R82B5bJtOTm3Zov/toBf/m1BUNnKWzNO+8eEbXizcPfAbfP2bOB97/xjGl+k//8d/8uf/fv/Ef82i9/ns++9TofffQJj5+ecXG5pKwiTZXwK4TwYxKX54lJA/fvTahLxWoRqCczjm8ccXnp6PpAMzekrLhcJLou40NGpcB0PkXrkjAoho1jtluxd7OEMtJMK16/f4tikhieOdTgCK4j2IGXJ5aiygyu58XLDcuLyG5TcFAZXr9xnV/5ha9w87XPc/LgR3jXo7OhNJrdnQYVLJYJpd4jrNY0k5bFasHp00ue/WRBWAyEaaRNEa8tt97aQ8+WvFg8ZzP0oCzJJ3yr0RkJrDUJjdAJq4miagqa+hamKJBwO4fSPbs7FZ+cPKdbJYgNk+ku3/za+yzPHS+evuTJkydsug7SXOAfGpI3LJYKVSSWZwNuDcqDmXQQEmhFOdPs37boumd9kXCtYGWnu4reiZwQpZhMC6IKZEqUsZiypK4aVHQQPAYoq5qFW+P7QBUUlUswdOgdTawiIWdiGClCUY/bJRAFypgHokf/FaAVNI2hKgVkQlZoCsSMLQ2X1QalgoTRGZnkOifTKqXk8LLjB9S7KFIUMgeHNc1UJHaDT5hiQlU3aKskJDYXrC48JkcmpWJvPuPG4evUxQG9WxJTJnpDpeHl6cCwViI90Zn1ytPUBXWtqFJFPJ3yhS+8zuNnJ3gf6LrI6emSj378MYvFRrDIJqOtZzU4mnJCXU2wk4LVRg6hoQPXK27tT7g9K/G5QFuhmDXTgmHw7OzsMp3PCTFxenqBi4E4egTckNjb28f5wNOnz/nggw85P79ksXpJ1h1Hx7t89jPv8vTpk5GaBGVVopUURvfu3acsal68eMl8XqCJ9N3AatHjYsXt1+6yWSvIsoZcdy84X2xww0COA2XpMNrTt04Cdkfjate3DO4So3qasqA0e/zWf/xbPDh5zuXFiv39KUUB/+2/+J/4/Be+xKpL/L//9t+x8R+B9nz1K7/BP/qH/xVvfuY2z55f4oIj5/EyVwbne9AJWyrmc8nnAUsMltVi4PTFU6zWpBhJWaa7TV3J9DeNJnFrSDK/RSvIKrJatuTUk2Ji6AfOzi5oNy1aS86abGENigKtFc4H+kH8dMEHwhiqKxenR5Fk0i8EDHIObDNkFJkYNVCMEoiE0hHXJ7qup90E1qvEej2gKVlv1mw2S5zvMbqitA0hZGJQqFxQ1XaUu8nnzBhFYYW0pkfPUUgRrTPXrh0zm05ZLi75yle/zGuvv07T7JCSoW4060Wk6zd03QbvAnVZkpTic+++zbu/8A537h/z/fc+xA8DSqURJhLRpeN0cc53v/d9/vTf/hnf+rOv80tfvENVT/jkyRnvffABT8+fMN/bwcdM3TT8k3/yj7l58yZ7+zNu3LzFrduv8/HJT2gmVuITjBShq8XA7bt3KWpN23eofETCEJwEtVZliaotn/vFLzHb+wzTnTe5decm//sf/StW6w0x+Ff+iJAJ3lOWBUfHR0ynMzabtcilAG0y9+/foO06hi7jnca5wBBaZvOKorIMXrJ5+l4CzbXW43YhiDwL2RqtVx3z+ZSyrPFD4MWzs/GsFPmSLcRnEJP4CEHujsKUlNaQsyNnjy0MhS2JSVHXUybNhJzk90vTKAjtorCjX2T0aWRomgmz2Qwyo3/rlXIApBivq1pyh8qSsqoEUtAP41ZGJM91NcfHDcEnBJkunrO7r+2gNTgvEulWdwxOkP2VjZSFY2/vJtpYFosXPH3yE77xtW+xuezplz2PH5zw/e/9JVU9qiCyZOpMDjSLZWZoIyob5tMZn/vcG8xmO2zWKxaXS/reEWOmbXv61hGGiBrz5dq2F1/dMOBCxucxo9EYjJWcxGZSMZ3U1HVJyh05KrS2NE3Dznwm3rqilLvGD/K+Zol4iWHcHNlGjP3xVVMiEtTRLzJKugDxCEbZMNdFA2NhrrXcUYnIdF5zfO2Y3b19BucobMlsNrsq6GOKV0U8SvKyhPKYrhoGgC1C3BhDOW7tt03XK8+R/HzG6ivandVjDp56RTqsqwr1KVDHVnK63ZhqJflFFxfnbDYb+r6j66R5t1ZIeTHI+SubDYFcxZjou068Wsh5lZIH5Xj87D0++PAvePH8MXdv3aFre7SKFIXUG8poUW9YQ10XdN2GFNzo0Q50XUvTyCDRe88wvNpObf8Oxppx6CYbOZAtcYyB69ePr4r99Xojr40xV5RAdeVZfuVF06Ne/dMNV1XVbLOulFJ4N4zeI67e/6IoKUbZtyh6ImVZjs/bNgRZ40NPSGnclFqMmgFGrBEuUE0qfOpGH31CGcPBvOCz786ZziLr9RmPP37Kb/wnf4OHnzzl5ONHPH38nKLKaOOY7kwoakPXt5w+u+Rg/4Djaze4fusae8d70rAOEs+ks4YkUvQc5TxJWbZ74pGXpn7oB5RG/NExChUybfMit1u8JDaHYnzGxuHDsBp+toEWSkFZiPRssfL8yldvc+u1jsXpwPMHa17/7IwhtYTyGZPDwP61hscfd+SQeOvN17l95w7dOvPi3guqes6/+daf8PjlJ1STTAgaFyO2gXqiOSo1+3uGnbkh9JmcwA2ely9XOCdEoaLSxGXADaPvxUdMdpRrmbj6ITCEiFOR3Z0MzjOklpeX5xzdug5uwG2W6EnL3v3E4kWkmBhyNugONi7yxq5lfzahqjXf++BDNlhK7Wiygc6gc0JVJfQDzq9BRQ73IzPj8coxlAH2IqFXbPolbRxo84qYFvjQoXSP1iKLiEHINGmUKItFQFM1iaoJVNOWojijpiBmzTAoQjT0F4IsfZHP8RvL4XHB0f0DNk8dm1GXHWPCZI8tBeUcM6xPBy6rM/rLlhgGsjZYpYlaY8pEuZNoDgJuqOjWHr9JUDq6zoikbPRGKZ0hZJROoA1alWTlib5FeVC2QmtLmWRKFJKnw5OmgWLH4AEfIETZQIk3bsx/ziJD0eNqW8L+BKayTfjW27R2lUY1ch59OKC0yN9gPLyMbAQKqygKS1ISNEkevWMp0A2Ooi4p6goMJK/xLZTlDAqIk0C76Fi9yBBbYjgTeuZ8wvT6hGzArQJ4qOyUkMR7EHMiuIFFIzCSmSm4tj/lxbMLYsyEGGjbFet1S1ElMp4YC3KuyARUCqxWA84lTKnpFz1mItOrrCIu1RRlScwOlB5DYj1VXaOMTEyzSpRVQ79OAlowGWM1KceRdAcpaWxh6d0GZQw+lGgjKejWSgNLDlfTut29HVarFX3fspcFJbtet8TQMg2Ba9fmHF87ZrW+4OXZS6raslhfslllCiveHqNLptOSGLw0FmNmxTB49vZ22Ds+pN3AX73/fYaYuXv/be7du8/5xTkLv+a993/IX333O/jYEkNPCjVdn+i8oY8SdJyTyJNsATdv3uZisaB3vWCNh9Vo+M14ekjSMl27do31ekk3tKQY0LpCKwmhTTlLM5jSeEHKQzt0nrV2NPWY55c91iYUFltLXk9MAYX441IKEmlRGCYzRbsKBP9qehmCTPAk1Lm7QhdvM4xiDIARWAIKrSwxbkhJLnBjC6rK4nqhsSmFeGAzhChTz630IueEMVI0x1GqtA2YFuCLRyuhcw79ICGtSlNVM1xw+CiAlZ35nHXdUdZTZrNddmZzQljzwYePGFxis4lcXvbs7Bzz9htv8fDhx3z44QdgYb3xrNZPWQ9nhL7it7/096jKDd/+zo958PgZuvQErwjey99fWfou07Ybur7Hh0QmUjWwOM9sxtBi7xJKWxbLNa2XLMQ4RHIIr7KnUOSgeXrygLPHHT/69g/5/jevc7lay0RfyURYa8nr63sJr1Wkq4BSbSLKeLROuEGQzDE6UFBWBuczfe8xdqCsM+16A0ryp8rR03h5eX7lwVBkgo9cvEx0dSuve+iZTmd4Z0jZkXOU/K4oxn/ZjkJhx7ynBBqLKTRRgSmm2NH75pyg3l2SgM4cAylJ/pkePYOg8M6zWq2Jo/Qsp21BL4VMUtIYNnW9ddhQFiUdPRBomkroZSYTu1IaasaNia05urbLy+dLXB8o7Zz1cDZabUUu573i2bNnrPoNpgyE0DK0K7RtcM5jjTT+WEjZj94WTVUYYjxn1Rbip9WGdtOxuOzoNg7XSx7W3v4hPjoJRY4RU2tCHCS6ImX5WaPH+kAewVJRvfKDpLT1TxkMegyv1ZBLrK0k42eU1G3DgGXNMELZ84hGJ18V6FsK3VbatQ0qj3G85xA4QdxK3bMiIsWwvG6Clg8hoPDjn936oMSrHNQWgjH6oMyrxmf7dRUSn4RKJzLT7c0K28ynLSRBnqNXKHH5OT3BehmAKnXl8dqeX9poqrpkGIarJkP+neNWdNyoZQTfnkY5Y055zMSSZ4XRu1zYCu87vGtlM18Zgp9SV4VgwKNs7FGZMBIbIZKzR2mRIIagJUA4lxSFvWp27aga8kFiNay1kgEX40jBlL+jD/5qc7VtlApbXG0OldZU1l4RGbevi3w/rj5nCsaGVuqe7WuBGrM/R9wFQFGUVw3y1ket9at/f86OpDw5iy82pYjO0PdB8k4LUewYLXmyOmVmk8jRQUmhtFjYUyREx49/cMKLZ4/xfYdVBReLjulEsddU1KqktB7MhVgLssb5TCaQYiYM8jNnO4Yihw3GCLAmJCFWWqPHcOx0Jb3cNvQinxdJqbGv4B9XMTte1Ckik/7rv352mqsSnPOcnckk7jNf2Of80QI/dHS9ZggdS/t97N6E4/uJOxeZ2dGKbCMwpal2uXt7F1Ne4wePTnh2/gKlI91G4RNMCkWzq9m7UzBrIC7FqCtd7MCLhy0xGZqZQTxrMqmIPtHfA1OoAAAgAElEQVRFkXmp8xVlLcV/SLAaHOVORunIEDsevzhlfngLH2e4vsNq2J0pmj1BuhdFgSnh9LHAJybzmt4Fvvv+d3EMTHNPrQzBGXKCXme0C0TVkYo1B2/NuHGsaGYl05llveNZvVQsAgTf0vcrHi8T1UQQ28ZkXIh4D8pA1pCyIgVJ0FYolElQdGT7DKOOqRrIumDYGPrLQGUVm5cr+rWi8xU33rzGhVrQLSTvwYaMX/dYm8nKEBKkIbB6umDwLSkFIg1lksmRmUA5h2KaWZ/X+Dbje4ci0G4K7EQ2aqhMJFAoi9fbFHlNzh3JtShvUFmTqkRNQTIZHz2DHlCTTDNXuFYRg7xXOY3Th/GMTBkJ/VUarog64yUSJch5O+HJagwEzIwGU64uJXjVXElivYSmhgzWqvH5EqnhunU02lKWFY01hBQF613U6EpDDUVv6M4iwQ/0g2NgYH7tOsUOVFFuy9QbJtM9lu2awUdyUhgSXmVUhGpiuXNc8tGTJ/jREF+Ulq4NTOZyQIiGvCAPoKJl8I6Eo46gR8qfKQKKRJs1MVqGGFAYMa6GiDLlOA1O2CJT1TWsu1GnrSgKjfM9lbYURU1ZTPChp+1anPOs1mvOzk9H+UVJCJFhcMSciXmgrgu8L8eCwKKNoqpKnBroukva9T57B8csN5Hlesne7gSjelLo8FGhlfiY6mZC329I2WOUxgeFtVMms33m+/tcbp7y9W/9BUeH1zm6fptr11/HM+Hr3/v3vPfeX/Hg4xO0zqig6deek48fk9N3OF+8YNOtSFEKhkRiNp/LtnSR6boVm/X6ipSnVQYrHoH5fI4PAz72QmU0CbP1JaWx2U+S98Uoz1Eo+s6TUxSzfqFpJpnNxlNVgslvNwHvxZuiVKYoLGWpmcw0rksEl1BKM5vOabsN8/kOzaShe/xYNhXWjpr1cdswFl3iAypJugUKrBVMsbKavhswVhoDjTzrLg5ouHpdQhjz5IxkwsQAUIybNvEMWGsobcF6tWLoHbYoWS1bLlcXXF6e40NPVR3RTC+YTvc5Pr7JjWvHdP1LHn3ynHY98PTJJctlT13v8NZbb9N2Gz786COyUfRdQtOj8oacKt79whf49re/xsnJI04efMTBjZp5c4R3L8lRBjHf/asfcbF8xqpdiu8EzXRuWV9mYpICJadMWRacXVxQ9ZqytgztBuWiBLmiiDmTPTz40Q/pNu/hBktVHbLu1pC4MocbM27TnTQUfddiq0IABjZhCo+2jrOzDu8jykBpweoCpUtS1DgXwXiW6wWFrijKiqapKSysN2vxf2bxJGmluTwfKIo0Fn+wszunXScGlwhbopxKWC0UwC01LpMgS5aiyhqlElVRk0Zam/ORSVNhtJVBXPD44LCmGH0MXBW6gmrfesxeeUG0NigN3ndoo/E+QI5QaZEu5sRkOmE+m+L8QAwlOXlGXBTWlEyaCSktcU4KUOVBRUVVW4pK40Ki7V6w6i4oS0vTGJqyIJdm3JgYynpK7zw6B1JS6KxR3mCKjrbbkJzBmxI/rFkvnfycSe6QyWzOMGxoY8AH2eoE5zFZoAYxR0IYUCExjERAbEGMQjNWSZFMgdYVSgWi99LojwZ8aYiMBLPqjDGMOHPZQockZ4YeIRFKyyVYVtWVbC4OaRxwjYS5tB3Q5Cs0d0ri/fQ+jZuSEWCQlWxeGcm7apTWZ391dsnXp0ECo59n9P2EICAQsx0mSWXIdl0rcAwBfsUQrrZNW0/VMAw/5d0S4t7YsCB3rvP+p/xecoVL4C5KoZV8jvvOX9UEMSUU5fijCAmxqhpycmijKQtNWSjWq4VsGyOidNIKtKgOQnDE4NBK4CEhegijV/GqUeTq87BtDFV+RXJ0IWDgaoMlsur2ysKx3VSlNJJnx8bMuZG+/KnmSmI6BOqllMJ5R4oyHM/pVXO9JQ++asjNWPcojBFfox4BInk8T5TKVw1yyhGFx4WEyRZ0hfdubGwMVmWaIjKdQteCVRadLaYMvP+9H9PMB3YmDdHWvFxsRBJvA6DQyVJUEkJNUqM6Zz3aMPTVgHyb2Ygp5NeyIqRw1Rh9Gqt+BfjIW5bDq9ciJwGdyOdCBg5Gv5Kx/oe+fiZkgf/D//wv/3B+S0lHnSJ/+ecLvvKbx9x603J4r6SYiuG3bDpUvWJ6uOHtX8js3My8vFB879tPef8bD5lOa1ZM+OEHH/Dy/DmDX9IvFSrCfG7ZPSzZvVERJ2ue/9ixOZNpQF1Hnn8CyRdYpSjKgaaBsNHURaapBFCQXEEgkFJAx0huI7bMmMKAKmjbgsvTJaoOkvGx7pnrKaEtWL2M7BwY3v6FKevv9qyXcHaWuTj3ZNdzsLck+Eq8PFnTXmrWacVsxzKdaWYzxTu/sMcXf/Ntbn/2OpO9Oc+egkmW1/Zvsj+rKSqZgIUkDWVIEKJ8GK0SJLsZZUNeB2InhCwfEt6BeeaYHRVMdmpKU6Oo6V8syX0ix0CX1xzeOuKtL1zn1lvHHN/b4+Z9y2J9RhgiMSqyVlht6bsWrEEZQ7dusSYxn0MzNZTTCjuZ8vLHiSI6tIoMSVFQM2kSZZEwZcLpzK3ZTdosCeiN9US/QPeBsqrRkxmxmaLjhsEM+DTg0+h5OZgz5EFoV44xnynJBFBDMVIRYWvcVKRsxgNDsMo5yWX+KUgPKYMyiowdV8lyADuXmM4MkwkUZcCqTAqZbMSTZTWorEjeQ4LSFhxem6LLS/phRd93uC7h1hGrMtFpUm/QvcFWLxlY4Y0nN5pmZ0qRduj9QFVpDnZryHD33jV2K8XhHG7dUjw9f8blZuD69fvcv/dZbDXn/PIh82rOzrShaSL9ZcdmCBzdq9g7rgiDZu+NgePbNUZpojPUezV52ND3oJRBG2i7DavlhrZztBvHet0R84AQ3QqsKVFKiqK93Rn1uG3pup6hD1IEDonl5ZKirElZCqwQMpNZxWp9yf179ylsxaYNTHZuYKzi7bdf49qNIx5+8gSl4eXpguAGySjpDRohwimV6Dc9qsis+yVd2xNixpSG5VnLZz73GY4ODxn6ge+//x5NbVktLzj56CP+4ut/yYOTD3n27IeEtqPE4vpIu4bgMi+fv+DHP/gejx98gBoP4JAi7cbz5MlTzi7ORzywIispwre0s5QyKsPl5SVdv8HYwM4elJX4iUQWmCiMSIS0ESpmtoqsE0G19E4asqY2vPXZCeeXjnqimc0tRVnihp6itILvxeJDYn2ZCcmBCpAyd2/fp26mFFWJD47ziwtyFgN2joY0audBNiWF1eMGZIayinpWUTSWzWqBMbKZSWk8JxMS1DlSVDOKnAxkkf0Zq9BYgmeUIacxIDTjh2Hc5Emg42bTcXLyMcPgmEymzOZzuqFn6ASKEbzj+PCImDxFWdL2A48ePWW5XnHy6BnPnz6RYYzVuMGzO5syLedMyn1OLxV/9Ef/msXZM6aNZWdnj9/67a/y6JNPyBRo2/D++98h0OP8Bm0c9SRRVRZbJspCyQBl3L7G6CltSWlK2vWKkDRxLNKMymidWC8vKctMXQcG9wytpCiRLYUh4El4lC5IWdF2LcPQEoKhspmqdKAvOH2Zmc132ZntMmlmkCxlVbN7uIuxhtXlCu8yTTUZpS2Ksp4Qs0ykRaYn74cPa1JOaGWoqwk3b99msbygLAvqqmbTrdFGfk6hTSbabi2kvhxwwbFpW0zJuN3tyEmm9ZIPKAWtD4JzLsviausgnqhCMmakGvpUsZNIWaG0wRrBZqcoEqTBdWiTsKVmMpuitObxo8cyAIjiSylLgQvM5hM2a4Ea+LSQgPqoeO3tOW/93A7ed3z2XYtJE4ZNZHG+QqeG+2/uEYJEMvTrzPHBPjoZUm8IPWQfWJ55mmqKNZrgOzablrKsiGmgqODmrUOm84ph8LgYCVnye5SXDWXKmRxhZMcxhEhMmdJYUnTkFHFDxDuJTlit5HPqneT/hRyIWc7TGGVzrZWisOUogR7ISgGJqmgE/GDAR4c2peRhoVit13IeGEvOgtFXukBpQ1k02ELy1Nq2vWqGvN/K+xTO94ToZdBm5XuKfC1LU6bTqw1aVigl97EaGyetrBS8WbZrV/8Zf12Nz4o1BqMlZHwr2Wqahr7fjJu2MBL4RvnrWCvrUb7vvb/KN4oxCrSlsFgr+XtV1TCM+VESnWOwhaGs5BkOQTb0xurxM5RASVPbD3I2h5gwusAYiUswCgpt0LmSul2nUZ1jUGNOnHP+ahsVgkNbkT6G4MljULUdg36Tkj/bDQ7nJfBZa0XM8cpPd9Wwpldy0G32mBtlb9uGK4QgK85xgiwbtVGGmEXiKPmH8n5Za0YfmMgKt42skmBVaSKVtIBbSI0xhqKwAtvRDTvThsoaus6z2QSKosEFxXrwrHvPbG+HL//aL3G4d0y78jx58JC9HXj6yUtOT88Jsce5zGx6k3v3b3B42PDkk0fcvHlI27ZUjWK+U2Js5tqtPSISiZOCHhVFwjRIMUucjDKoLN61bfNptEXrCDmOXvutVHJ8Zg24dfzZBlpUc5Xf+HI5GmgDR9cLfv2rN9jZt+gi4nAU1Q6TvRZbTIAJzsO8rPnoa5offX3D+SPNP//9f8S///qf8ef/H3Pv8WRZmp73/T53zDXps1z7melxDcxgYAhSNCAJkBQjQEjYSRRDCiq0YEj6IyBttNFGWy21ZUhkyIRAiUEQBAMgBmN6XAPty5v01x33OS3ec2/VIAitJyMqoqsrq/LmzXO+85rn+T3f/zZXm3OO7k64Wt8w+JLcOnSEetYyvQvXjxWxTViTqCtZHYekGXyiHwLGuXH6JMhuky3XXYYQsBqcM4Rsmc0y9dygS4VXYnrb9Ip+A0WGL7+t+Ae/8RX+5T9/QnE68NW/XfDRtyf84H97QjmLVMcKsyc3zOrc0F1ahpUiRs/f/0fHYBU3NxuuLte8/c4XePrYk3RP1w2cPfXkTnN6onjzS8fUBxX/+l/9iCEhah5tAEvXDliTBbcZFM3So2uwBqITefoMTfsZ7N0psfOKaB2xuSI1hnpicBV4kyjrW5zePgWr6PLAUHR89U7g4YcLnn3acv3Ic7h3wOXymp//xZ/j9NYpn/zZA558/hnWwvykZn5a0rLm/EeRSQHKKoYMs5liPj0kuZ7oOlIV4UJTnR6zXg0sL1eUUzg+BNw+wdQM2jFJFU1zg6PB5BafGlSuKSYDTZ9YrjOSz2pGQo/CGEdMorUlGXIcsxzGNXgIGe/zOC1jPDBgi7IW0IDo240xYyZLYDKV70ORGTYFffQy/YqCic3Zj4MRhXOGg4MK7wdCl0iNIawdy9CiXUlhoKYH4zh4Yx9bbohpw7IxPP14QtN4yknJ4ckhp3deYzIfKLRjrkuObMHT/iNePJxRmznZR77zne+QTeCNNyYc3bLYGv70TwYOXnPcvlejs+X5/UQ96zi9pYhDplsrQqiIIbBZZ3KSDUXTLkdpmTxkQ4js7U2ZzR0+tvR9D2mCURP6vhP5iFLM5gfkLMZtPwzkaAh5jVaOFBVkzWRSU5RT3vv6l8hEnjx5zv7BCZ99/mf8w//0H/KNn/smP3r/h/ybf/17nF8usaXFFprHz56DHjg5nWMirC8aFuueg+Mj+qFjCC3KJW7tv8lf+St/g8ePH/CDH/4J1ikyHWVZAJrgNWVVj7QlOx6knk3bEKKnKAzOGbouEEOBsZI5FQcvvoMYRukQu4fdblor+lSMqimcoprAZCZFYLsuBfqQEqbo6XqLsZLnl/F4r8ixwLmEKyO28qSoCY08+CTEs8QaqKo9+i6yWkvRG4aCLrTkHKi0obQOtKP1LUPqKCoojCYPmRAhZkXhNBGPYTQxK0ffJQpXyZRTZ9BBKJEjPUlrLRkhO0LVOAXNirq0VGMwZ/SB9aqnLCegNCFGYhzom4EQBDJTTyxD6CiKCaB2U8bTk3toIv3QsW42rDZrbt29h7YFCoMic/b4CaW1xDgwhJ7sIs5WTJxhWtWUbsJnDy45OJ3y3/zX/4RvfvM9/vg7v8/HHz7i80ff4fqqZbPU9MOamORBWpYFdVWhlOTA9J1kcZVlTdt2I60xjQ1A2hm9FYoUt362zOHhoWwL+5b1ei1SO21BGXwM2PGsySMIR2EAg1EZazTlpCZEx3xfkQbFsFFkAqZaoe2Ephm4vLiirkusthglkqotlWwbGNsPHTE04q+pxHcRQ6Jte+bzPe7du4fWhu9/730ODufAyy2D1nKfJhLWGupJzRA8z5+cU1UVVVUyDANN0+Cc28m/iqIYJafj9NeYnRlea0OKSTLAhmFHnnNOQBa7PCAtXsQtAjqP2ULOVcTUYqhHdHlPCpYvfvk2i8WG1XqNjwIwyHHG1772Dm9/8ZTr5UOurxp+/qt/k+fPz/n2d/4N996c0fgJt+4conTi8aNnJAqGTlOUmck8MD9acvFiIN0cgddk1RJCZLOSjY+1lqquWK9a9K53TETfyXYOJSGvSTEt5zTdeJZZi85K1BtBcXR0xOHhAV038Pz5s3FrIAV5SqKQsLagKBxF6Xjx4rlce0WB1pam6TEqScE7Si21sxJ0v6v/JDdK/VQYLxRFtSPcdV1H27bcvn3Ker1hs2lEPq/1jqq7BRzsJFaj90vpOErtJSdP/jzszoec5drahoxbY7FWMtvK0o3SagkM3soZt5terZVI+bLIwIZhGLMYq52/rGka0Hl3/SsljeIWoqFGufurmVkgQccC2dnGreRXrv8JdV1RFOKnGrz4qYzS+E1HkzcYJRTDlGEILWiRb5usKHOmU69sidhuUrZ+KNk0ZqWxWiSBQrfzEjaeJROLUXIpnjWpL4xRO4nh9uey3extyY8godsCT5PGzBqHNQY//HTW1lamK0HGcr8uFoudnFGojgIoefX+ds7tXoNzjtNTuXYk1NwwmdVgEk2zpqxLiqpEWY2PkXU/Li5zYugarF7TrcFoy3xvwt3XJkyrtzC1RTmFynBxdsn9T+4TQ6CqHfsHFZNZxfnVFW3TQdBIjmlJDKNsMAopcwsyA7BGU1V2fG9EPp3x1PUBxkJWgeAT62fNXwi0+NmQBQKoUaeaNO068PlPEl//xQmHdwyF6ggm8+Rzx3rZs7puuLw/YIZaEs8Xhtn0lK994XX05Jc5W35KvP8CbMtsT7F4Oqdf9yjdsXdHc34uK/usoRtgtcq8/gUZu/YJ2iGThjwiUSUUNowmOavk7w0qolNCx4JhLAactWwGjwqeykDlDG5a8tp7B/z2OyWrfs3lzYrPPhxgrybPBoYiEH1Edwa/8JiceP3tKb/+21/gr/y9t/m9f/Exjx63vLhQ2P3HuFlBdwWxUdw+tKzXC77xq2/iFTx7cYmtNE5X9CGMGugomuAQCCkzn9d86d17PHh4yWbRyDSohsIk7v2iRikDFoIKPL6y+GHATiNFORZkOTOsPUZnrPK4WvHpR4Gq3uPu21OmuuXqbEkKhmefvGD1dMmm7ZgcQ04WO0lo1xEXmcK+pFilmJnVxyxvGuw0Y+cSsofJLF6s2SwDsQvsHZb4ToKhUxHAFVAIqMAoMYdmFXE+0TeKtlP0YczA0omk8piKbnCV3mFTlVKUJUQvPjtXgHPyAO+7cQIz5oHElKmrCTl7UvagIyko+k7ybKxL1BOFdhmTR49LZej6sLvelUKu3U2itFa2jiWURwWb80wmEnSm0Y7QFDT3O05vWQ5O9piceMJG8eKRrPEH71kuljx9coZC8/UvfJXf/Lu/xeeXz/jdh7/Pi2cv6NdLyFBNHKrIbAZPHCLF3NJ3nsefRHI0+GzZPyxZXDX4oInB4FKLLWbEuML7FqO3YBA94tIdfR9YNw3VdEpVO6YzR4pTFpd+nFzKQ9P7QSZqIUojqzM6apwOJBRG1Rzv32U6nzKf77FuFjTtmqHXDD7yu7/7e/zwux9yPJ9z8ewFfU4c3brH/tEBDx5+TlFpLp83HB+c8I1f+Dqzg5I//KPvUFjFdDIjGQnb/PCjH7NeLcUYv4mSlzWXPI7rq5acCoa+Z29vQllVNM0SY+1ughnjGPBYOOazfcqihBR5cXaGUXYsjOXhZ40YmOUBp1HZkKInK8UwKIYrqKuaopKf+zBE/JCIdIQhYY2SEFgCs7ll3XYMPlHMSg5P5qxeSEBtWZbMZzOurp/R9RuG8YHgfWYYJGvHFQKX0MoQOoVLFUZZdPJiws4W45T4erzn+OgUBfhBsN9ZZ7qwEc+VNmhEKmK1GSfDYWeU3mKetzCRbpBw1VQqisJQTwpiiMQQSRmqquL1e28SU6RpWi6vF/jBosijh0G8HImeu2+8SQauLi8YHj5gcX3J0dEJ06lMVGPsKWcld+69xf7RHhcXF3z60QOyHiM31A1uLlvkk9ltTmavsVoY/u0f/Ij/7D//LRbL57z//vv85IcrZtN9Ym7oWs9mFQBP4UqGfmw2fL8z7IPQ9YqyRGFGaZFspsqxSBu8p1+M2PwxD0qlgCsTByczNssNQxcFXoDBB5ECGmtRzhJyRx9avv7mV1jfdDxcnolUisimvYFsONw7pOs6hkFCPa2V7eD+/nwsDoW4N/QZbfROPjQ6sRiGjmfPngKKqi5/CobgnKPrepq2G8NqDTEp+t5T1yVagfc9bdvuGoFXyXTGmJ23Yws18N5jtEzTpcCudrSyTCInaSBS8qgxK8gPAWOzADAQOVtZlvSNZzKpmN/Z59njG1arZqSdxd0AQBnP2fk563XDar0kpczq6k/wscMWhrZV5BS4/2eP6dqO3g/sHVmC1yRf4HtD2xiinxD7SBoGEp4UgOywRorkrmvEe5r8KI9T5KiJwaFcwBrQ1qFN4uT4mKZriClQTSuGPmIKaTqHIXN1uaCqZBgn1MeETRVd1zCbzrlz5w53777Osye/C1bjXElRViibGDYi2TLWjO9jpihG6VOGnDVdv0bllyjvrXdOiHpSvBeFo2kauq4lhGGMC5FNqDQDatxujJv6KEATGfRuM7bG/MItjVKrUS0CYoZTpJxkezbm5kmBLj49rQ1VVY5Ff2Cz2eyaOvGeyZm0DbRFKVxZ7DxKL3O9xsgHY3/Ke/UqWa+qqrHx1yOQR67lcoyxSSnLpqQsqCe1hFl3PV0KRFWglBttCAmtC1QGnxUeRQSI/fi1xutSi91AaTU6zwTGEFMijhvuHTkwiwpCYk9ebqK2DS/8dNP26jbr1byzXRQCihQSdjIhjvJCyUlzOwjJFk6ilNp52Lab5q3n7eXHSwrl9vVszyOjHTkpujbLmR4cXR8ZbItxGmUhDw3BR6IH30f0tCT3nqPbt/mFX/0F3np3zk9+9JyL6+f0oaEuax49ekxCNo9Dr2mbzHq9RBlN6SoC4MwMP2TJwsoJozzGgYmKGBQpCvSj9+EV758m9NDrAZvETvNKL/bv/fiZaK5Q0p0yTqVyhMuzjmePWpSrOLjj6IeO5B1nT1ZcPt8wN4rHH0HnPY6aw3uWo3u38ef3qfY1k31FZzK5cXSXmTwo3FxT1pn+JlNkg1aZaEBbKQy6PtF2mWFMQ5ethFzgYpHd+m1Au0xdi0ZUayGcEaFwJVZplA04ndgsDT/8/g2nX3RkVdJfJ66fPseVkCtFNJoUtEAsVKScK2YnCjMx/NmnK55ebFj3PRSJXHTMjgLoKfSHnOzd4cM//YCmTTR+YLXpqKYWcAyruAviVFY86TllbKG58/qcm3ZNaHuGENFeM5sVvP3VKX68sDKZq8uajdJkHYlZCqiQg8irsqKwjrqaMkwOsG6g9w3GysrZaMt6uaRfr0lG8dpXapYbjy2laPBrjTNyQ2+lUN5n+jaiSkWJxmhFQtMtE7HJOK2Z1pl2ENiF1lIKZCKqsIJs1YmQHUXytCvFEBWY0VBrZRMl2vNI4cAoxzB6ooxSKGtEWmDAGAhefqlRspLT6LtKGmW0BC6TxLielGwgGiicGfN9Rl+WzeggOSXiw9Dk6NB5H5MGsh3ABEqnmE0L+j6SUVhXUO5NaK+XtOvMpNLMbzuOjhWhqRi8JNdvlgJRAMVy3XB+veTxZxdcX1ywWlySY8P8wFIfKJQWH55PislEfGlDGwgxkKtA28oDsvciD5mqSMoikfR+IG2HDjkR4ksN9hCCHFpIvhGjmTUD27BKozV9CKLHVqLh1xRMpoHoR8JV6jFqtnvIOWdYXl+gdebi/Ix+taY/mYLaEKKiaVvcpmRWASSiVkxnM27dfY1qTwsuP0FhKtzkEJ3mLBZXDL5hMnW8/trb7B8YLq6es9ysURRkEvfuvQ5kur6jayV81ZalTMV9RGlLRlLgZ9MZ02nFzXJB3/dj85XH6zvtdNyiK5VrIOVM8pkUFIWDciI+lkym77eB0YgfwFiKUlGWiqxKtDPszed84+d+mfv2Uy7PLxn6ABOLMSWh78lJjLkxiw/DALPphDtv3aLrB84eXmOjRimHjwJGsUZROIdxiuuuRysJlQ0h4Ec5TUoihUlZEQZpGjNxBNzEV6akaretU1rjhzQS5BTGig9QpDUZlSD4wHx/X4oHpbm8XrB/sM/QC5yDnDDWsl6tubq+QhlN27c7D0vfNOitIdmIF9YVFXV9SOE6rJHCJaeESomqzOQUeP8HH7BYel6cXeGcww9Cuow+k4Ncf0MHoCnKinYTSCOQQcAN8j0pJX5ErWUCDFKAvIQzyNk6DANxLO4k7mH0KmCYz2fkEAlDM/reGAd84yWklARZDpHlckOzbvBhA0kx9IGcFJNJxenRMZdXVyxXizEsVyhzBwcHXF5ejfefGYOXIz6Kr3S3CUiBplkDmroWVYlkk5kRaCOFU0wiY4sjXEdIaALz2Xov5Tx4Gdi63eRuG7WtREtldoWx1uAKNxbeIKwDDVn+nYTIE63VxKRIEVLy5FiSs2f/oOatd25xcb5gs253krUMODMhppbNZknX9ohSIbBpnmAc1FPHwUlLsoIAACAASURBVOEhFrhMnjzI0Ce1PTkFlDUoZemWGrIhDgM5iEQsJ/HZomSwGXPAWcc2GFprRVU7jC7pvGCuy7IkDOIXm09nWCd+Mt8vcUUmxJb1OpKyx7lMNwRCkHtEqzw2S9A0PYvFirJSFBWgJPdKa0NMXkLViwJjHOv1Bm2loMkj4Ult5U470p4dt2Nx/DnK97DZNIKLh3GQIVAOkeml0UM60ue0QhuDHiEKo75P3iNkSLclr2ltdtANheS7OefwQXy8OY05X+aV/MkYdnlOW8Lb1ssUg0Q4mPF72Z5d2yBg2aKO4bxZ8pu2YAOtZFNktMjxBKYRR9uA3j3PtGZHnQM5w2KK2LKgb1oC2+9P7q3Bj5CgDCEl3GgryylLW2lGz+2u8XnpAdp6z7ZySNg2T3pULrz0q8lfyX/u815pzMbfvzrkACERdr3Qb52zu9yxnU8rbYcZL39tsRfGjIh3M6LtxyDqXRhvynRdh1ZWNqhj8x0GkbLmLdXZR7TTGO3H53/J3Vu32DRr/BBFah8j5xc3vDg/Y93eoG1kUldklQSENtaUQx8IKVBNJAszxjyeWR5UQNsAKmPtyEId65mU0tjnp9ErJzAuGSrLdpKXP4Z/78fPRnOVRTYiWGy5SJp2w8d/5un8jG8eHzI0gdm0oFsPrJctv/i39nj2oeL8wzV1GRluDyyy5vsffMomr6kODf1Ks3puWD7tmE4MxUGJspIoThKdrCsS+3uOMEQ2i0TTZJG4WJGSoUQS41zCIx5NjZjJZ3sV6UZTuQnaObq+Zza1YyGwIfmBy6eKf/6/fM5X/to+R4dzwmVJ3AyUR4JEB432Dt1rijJRH2ZyGfl3f3TGJy8+o643OBfYO1HsH1n2bgfc1ODSEXcOfo4PP7ngJz94gS4zutLUtUy5cspCbbNiitcGYs6gEq5M7N+1xLVjfa3JyXB0eMLbX3iN1mzQ2qNj4uzFhufPSnLqCVGQlt4Eeh8lu6GomByc8vZrMy6ePef8IuIHjUoWZxUhDmLgRXH39BbZXeFDJvSGYakot7CHDEpnFoslYdCoqNEJTNT4YImdACZmtWI28fLzKRXJADmRfE9RCNI8KU8OMmlt19I8uz3wPVgtBZAY7QeBApiSPAykIaOSyL0IEW0F0qC1wg+QYyanMYxRZwYfsKP2mpQoK3nAxwD9RuGdpZ71WDuSCnPCWoh+vCOVAVUysXdR/ooYV6gi48gc1IZVKImxoLY1h8eHXGjL0N1wfdVRHjpm84x+raJtSppWcX5+xey4JGXDzfqG/+ff/C6f/vgDLhaXOJOYTAz1fsnsQOP7gPcakwqKCnynsJNIINCZnuvrxHzP0YfM0EeKQrFeyEZFHrTSLGYkZyZnRVVbIccNsFl7vI/SSHWeqtZoI3Sq6XRO07TjoSuTeW0U87lh6DzNxtO0L3CFZb2qwGTm8xnPnz1iOp9TWUthBlbrM/aOFavzyNnZBcvrJbcPSpbXLfXxjOPTPVRZ8OLqCm0V2WdUKtmfvkWMilW7xLrM3t4+f+/v/DpFYfmn//Sfs7xec3xyC8ySL33pK7x4/oTNeolWmqENGMbQyT5TVILa7duevJeZzecURYEfvBSv49ZyK1NhnNYqnXfSmRTl7BNkr3h4pHjI5MGStPivSmeZTCtiDhwf7zGZ7jE/uMXXvvTrDIvMxdmCy4tLVC5w1QQYdlPcHDWuUsS+py4nvPPOFzm/ueDs4TkqZwyaNniCilSTismkoLCOoekZmpaTw0PsbEaMgZubBdaWpKgEQNJ1WGvxr2wnyjGcM45Fc4xx3HYmfB5pcw7ctMQV4kUKPrFeN/jgASMyXp2499oRz57e0DaBjBDjbq7XXC4+HAsWGfI4Y9islqyXC5RSFFYT4sDiekMYVixXnrI0kANEjTMFSfX0yfPP/s//i9n8gLtvTvnSl1/je9/7LsubG64uNiP9M5E34Mqao+MjzvwgmGkYZTphhC+8zOPJWQrNLVZaay2I4pBe3kPOSrGhM0obUrAoKspywFrJA0zZ46wmBEMOhuwMhSsIBj796CEx9qgcsKYgZ8V0NuHo8IDbt46J2dMPjUhytRiy9/b3uble0o+blKqqaZq1FE5kkYAaaZYzAhEoCkcapVUvDfhSDA9+YPCewQ+4wlIYMdWnFHbX+NZv+Gr+ztZcv22uVN56X6XIT+RX6JWZsJPUbgl60mhUlcF7wzDSAduNwTrN4fGUt9455v3vfcTiukfbhNKCiLZ6Ss4DxmSKMlPXjsWiFWy+thTljDfeuEeRPftTy3rV0zSOp0+eo2KLs4rSajZtFvnUDqM/Du50lPcgi0dNaJsZZTKu1Oztz6kqx/kZgGIyKbjuVyhf8/ZbrzPfm/PkyTWanqLqGPolfQ/TyQRlW2KEEOU8TWqgnk5ou4HV42c8fvyM2b7GFpmmaWk3PfWkIsVAUcwoq5IUkU121q/U32NDMeYmvWxyX8rLhmHYBfLK54wh6MGjjUYpKbTDuMW11o6NkjRXEMfCPGGtIiVLCFv59BYVbuQZrSzWlpSVw6+b0dcjOZMAm836lY3WFgIRd689RgE9qZQwmJ3Mb7t1KYqCsqzH/Kowvia78yDt8sDGa25LclUq77atMizQaC2h2zvSnNHU9ZTQeayt0FmhU8JWkWtkQ6KSSDS3jYIsxWRTl/NLWqL0o9thxLgZUuOQKL/M+kppuzlKu02xvOeKbbDwn/9II8Vzu5kD8ZQ1TUddu9370Q9+tEyMpcv4H8a+xJRnBJEvBNgRfJMibgSPbAmgQ9djTJaIB83ojRM7xlbu6X2QAN8puKpiOjnm57/1Lj/5yY+I2hNUy2effcTmg0sur1rqecHh8R51PeHgcM7N5SUxR3SWPD/rxPMnvUWib1dyrVipY+Qs1qSx3k9JpPgqOyCTtCwLqrqgHzxa2xH+9P+/uvrZAFr8D//97xzc2+YUiAzAqEzXZEgF+7M9JrOWq/WSF593XHwGDz+E7gb2tKdymXXT8gf/9ntcPviEWD8nVz3DRvHxH0acMSiXoYzsn2SaNqLimJngYO8w0m1kAqtH7aorFAoJsyTLFqOowaaEzoYYS1YLT5lb9o4ss70JlTshtIlirrBzjy4DobHce73l1hdKrq6W/Oi7D5kfVITCkJwnhoFhOVAnN3odAv3Qc33WYsuGqk7ympRmcgi2NPi0Ybl5wiefvo+2stnq+p6+E91/3/WoLF6xFEbTtZKmMcZE3zRkXXJye8bJrZqTWzNObr/GN9/7daL5IpE7xFyhwoqnf7ahqmZUBxNCvWFGyeOfXKN1xfzWCdfdhrMHC2w44GB+m9O7p9z/6CEqWTod6E0gKc35gwalp+g8IXuH36yobCZERcKibUXTdXJ4xkDoMs3SsV4t6LueyRRObjtuvwarRcInQ0pCiombFdP5PtaIkblUJcNFA9FT1Jl6ZtibTKlcAUmmFLM9jbOaHIx8zaxRWZpvVyZG6qZI3rpxBTxO9ozRJCUYzxhBKzcWHJ7SaeqiIvvEtJ5RFkJaWq8SxkaIGo3B6JK6mqOHE5YvLtncrAltoKonKLeiqGuKqsa4gjtvvMHdt79GnBhuworF9Yaq9JSHA9H0rFYD67Xm9K0jUirYrDouLh/xtXc7cF7w2BiGQXP7aMI7b0zZm1r6NhDbA66bhvmp4+h2zd684urpmhyF3uVKh600y+U5KlmMtiNONjCpD0Z8bmIyqZhO91ivV+PDSgAC1mqms5L9/Tknp7d44967XJyvqKsZZTGhbz3arNEKnJ0wn+1zeJpYLS22GGlaMXBxdoExidB65vUef/lXfpVVB21QRD/gUss7J5YQ4M6d9zB6zoOH9/ng4+9TqDlOT5nN93jzi3e4XjzHFYZ7917nl3/pr/Lf/pPf4V/+v3/ABx98wN7BlL/5t/4D9g8q/vAPv83+3hF/+Vf/Kv/xb/8Wf/In35YpexrIDOMDp6RtNlxdX/L02TPW681O8rjFEzsjpmyjJb9GWwsEMdEqQZ9nIpNJTV1VuMIRQiS2UNVwdFRxenuOtRPOzq+4desW77z1Lm/c+xr/0//4P/Pg4QcMQ4exluVqSUqBwXejV9CRsgQyh2Rp28zZ0yvOHp1xc7EYjfYJbSw+Br7xza9zdLSPVoq/8df+On/64w8pzYS96SHHR6d88tEnFNYS+wHfdajRZA2iOnDOjcZpRO4nEyohs454/DyeqWXlUEgRmmLEuZpHTx7x9MljFotritJxfDLnxfNrQhywTgqIST2j0jWlKbBGNutJeUL2ZKJk0RlDjpnF5QUvnj5ks1zhSk0fI9lkXCUP9thHrN2g9JqmGbi4uODp0wd03YbJxHJ6t6Kuj3BujlIFbdcxdB337t0bC4FtPo28B9siz4+h0MDOs7D9822BJNKskhQyKcoQ5+J8wc21NPNVVVKVJTkqFJX4A/xWFmcoSy1DjYkFk/jqe++SEEO894Hnz5+xtzfHjGTStms5v7jAuoKsFD55prOJNEwAWczseevlGD9C9Fhb7KSPKSVmezO0FnrnfG/O0fEhbdsSg+C7GRUeIht7abKPMVKW5a5A3coRt/SuXTOm1O51pBSk6ExbCbehLB22bKkno3QqGTSW1cJz9+4ph8czUu54eP8CPyi0zWgjDaF1nsODe3z9va/w7pff5OnT8/G5WWBMzXQyoyr3efows94oYi6xdoJxlqgCIUUGH8aNRSdZgwlApuHGZNkKaUUIAirRNlPPSw5Oj3jrnXcJ8WD0U3qadsPNheYf/5f/Cd/61i+icPzev/oDTm8dM9t7WcSG2HFwVNP3UJYzTk9uEVInUlDtsQ6ctVgnQ6s4GJJXaCNeN2scfdezXN6gjWw6T09POTo6oO3WxBDH6zKNKgW5fuu63jXFKaVdyLs0yBqltxvPUf611b3DriGKUeoYPUasmBGtLrRQKb4z8vplgGkoixKtMyEO48ZgK2Htd34xae62ErbteSoFc1HU6PF8stYJaXbM0ZLLPYs3GPVTzeP2OtxuVFMSH6APAW3kvgTZwnRdR05p16MKvQ6a1Ya//Hf/Pr/5j/8RX/3WNymrgtfffIP9L9ylDT31bMov/dJf4uzpM3wcfW5qK3scz1MQSqiRvLAttXEYhp+S2m6DlLeN5stQaLP73l7dGo+3OowNzXYoFKPc29NZRVlK7uKrjaZ1opJ59d/b+tPMmK/10zLBlzJFWVAUY2i0AEdi9AJtITL0PdPphLqejJEXmi556ukh+wd32LQbmn7BN37hXd56+xaojrt3j5gUUyKa3ifJZ0s9Q9fKVttlyFrum4Oa6bTEGFHT1LUMAEGzXkX29/aYTKaQxWdnjZat7khUFIqlAPByioQwAIlh9RcDLX4mNlcyVRtEJpMgRYeyhmEDH313xQ/+oOHv/fYx/+F/8R5Tf4Z/9pQPf3/JvFAUxzLV2lwu+eCz97n7VubOvGZ6ex97t+fdX4bPf5iQyEzLajEhtJnKJmJO9H1ms6hRBIwJovseN54xCCFE2UyHIbfglCXqjFctEyd+nicvbtCLltKtuHVQogjotEdWU/TkjM5rfvS9JWEwWDul67JkggwVNlqy7Ui5gKol1Qk9g4M3BuppQQCGNhLbSOUtt+YOO3O0Hs4OEk8+6ShbRdSQgjywnYasE652lGVFs5GcmNImjImsmw137BHXi4J6nji9V/HuL36J+2ePubhs0c5weHLEu+99ieXnkWdnC5bPOlQZsc0Ktw48/9PPWL4446//jV/mPD/gJ//uxwydYnpwgJvWrC6ucDZTGksKid5GmuWGXDuM1hgFQ3T0Xig+Bk9RVBKy5xLWBELs6dYlSidiSixWPR9/YJlOJxiV8H5F33pCa5nf8yibSbkjmg3XN4oq71Hg0UPHarEBZdk7MpQTgMDNVWToBK0rdJwBq8QcKhNSKXik+UIkPwq0SbL5MXo0knq8d5KZlSLZNqicefh55N4bFdN5Ic29CuQh4kMiqQiFx5iO/cOSHE7RruToeE70B/ThGlVuqA7nfPTx5ww3z9Flh3MT+nZDv6zQE9AuMt3POHeLB/ef4VcZ1cPERKZfr6lLzepqYLNM7M0rKqOIG0W3gb6LDAg96/wqcbVSlGpCPe/IKbK/XzGZFlxdNdhuinISnGyMYW9+RAYODvaw1mJtxXLRcPvWa3StwArIHW+9+Q6/9mu/hvc97//gO/zgx/+WalrSNsPomVBoW0mR38nmbEgNk+k9bp3ew4eBi/Nzkf4kg8ESkuJqGXjy/IZN2zKbGW4fTDg6Dnz03FAPZ6jwjH55ydsnd/n8/gV/6Vd+hfd+/j1sVfD9H3x/zAtJvP/DP+Lv/4Nf4+nT51STxJe+/A3eeetbfP7JDSh4+4vv8iu/+mt85avv8q1f+X3++I/+mKEfsGUhMhU8GPEQyIOasZHSu4lqWZa7wkQUb07odk6PE1+F0QVnz1ums0RVW6ytqacNd+7dpprUxJR4+PQF1dTy+Nlj7j94ytD8PrODDdVkjtFCz9ozBT/33nu8eHHFxcUVy+USV4wPRD0QYkdzI6G1ujJYV1AVJbNywvX6hi9/85c4PDnl6mrJ0wcP2YQApcZOLa5wnNy9hVGGHBN+8Cw3K2AsikZ/UddJOKfebTgQ/xFh5xnoB89ysWZvv6YoNVoH4mihPDjcoygqYtB88JPP0FQYY1FIrh5KssFiDKQcUFbhikIm7GPswmLRURQObTW2BJsD/TrSD5lYZdyOvmWI2RD6SNicQap5972C9SLw/MmK5kHk4LgFNaXvPE27Zm+6z4MHjxhG+UxVCQUsxoDS4Aop4oPUprvrYFvYbIvQlBLRB5HTkMVHY0pCgOmkxOpM1/aEAMo2OO0wqhyhHYm3Xv8C2mhW6w0vnl+xP3uTzcpysTjn6uw5RT1huWlG0IZQT63VNN1KpKZaIAXOWoxWUBaQR+mO3s3OidGzWNxweHhEztC2AjaYTqfUdUVKgcuzF6QYR6/GKH1Vlq4bdluDlBJVVe2aLBgL4vyy6bRWvIP90I2eHIEG5RQoq2L0RCR88Lz22h6uyCyvPX0byclR1hJi+/zpkhfPAmWxx4prchaZmXWWnAzr9ZIfvr/AB89m3XL7zuvEdE6ILTdXmbMX3yMn2X7mLNKhwpaAQD+2uU2D11hlAZFJayXxKlkNUtdUlq7vuXNrwv7hCXV1wtUVbNYNzbojes3J/lv8d7/zX/Ebf/vX+Wf/6//OR598yHzPEVPHzVWPNorCVlhX0q4Tzmigo+vOKKctprL0TWboPH6IKO3IxjGfzzk6rLi4fMbgO2IAbQxFVZLJ+ABFUXJ6esLde7f443/3nTE3SDYY2+Z3sViQs5D2tvK6nX9UCZDkVZz4tjnYSlkzGj8ECuUwYy5iCAPWVFirSVrypLz36FiSkx5jYzyhj6SUx39LttxSsEvpur1mBPAhHrC6rinLirKs2Eq8JKy4YDqd0fc9wzCM26tSngXj1T4Mw0/5hrYNxBbMEENk2Swoy3I8P7ZDAHlPUhZ5X9KG7/7h+3z/hz8G36GahoPj2xT39umagdx4nj54Thc6fPSjLFFiGGSrYmCUVYfkZSCnXr6mV+V929f76n9vz52Xv1d/7u/msYlOzOfTVxpUvXsPZNGRd/EBO1kciTSk3RmxVaGIv/JlOLgaoTyiZxRpeFk5ysLS9Z6+7+k7+brOVYSYYfDIAyMzsyf0y577Vx+Qyfzab/x1vvTuu/joGdKnHN7xfPzouyw3AVKBSYmcO2LIbCMihj5jrOPqot+9F0OXxutIS4yNVqxXHWTJhJXNfcZVZvQgypY95QGy4+BQfOXL1YoVA3/Rx89Ec5XHVT9GNMihc5QuM3hH6oAucv/HSz7/dqC91hzcKnjnF2acf9gQOgndUwbUVPP8sqU+K7ATjT0OfPlbMy4etZAL6mqKyXsolbE2MJt57DTQLhSLS4hRjLkKIIHCsl1lZ52wpQMvrGGdINsCVCQPcUTorth0nu464juDSZZb07vsz0raszNiJ7IiazzYkjRmMClX02tLnxOWhEY2drGPaGuxKLCgHdhKk4IlNonUdbQrI3hdC05lQoIYJGPIOZEadk0kh0xdG8qpQk8D6+Ul1fyAHB1Xl5H7D59wsn+Atg03VxvuP1hzerdl9k5iLzv6B56bpy2qtiiXKLQmdwM/+P4H3PuK5c6pZn3Rszx7gXEG58bJBgZtHUNusbnEdwpPZDYrwCn8hcL3GT1qtXM0aGNxBahBppaVK7FWCrdnT3sO9xXlieC2Y++BzNAsiEOGHKmOMnnIguIeDJODOQeV5vxiQZ4oslWEnPC9Aux4ACSKKu/CVdOI6fT+Vb05KDOmxo8m6xzFgC0yEAhRNoSFc6hCsd5EhpiJJpCjphg9DRlohoH5vCc3E5ou0rY3FBeOvvMEnXDTiKquCV7RdlfkdY82A6ZMhOGAy6cdGI9SGe9XTEszbpRq3rx1j9vH+3z24EegOlwZyUp8efvTu6Kntg23jvYheZaDyJCKqqeoYWgz2nhcldnbM+ibSC4VPiMNJ73g01uZGivVA5mutXSdZ/AdKXuWqyu++93v4b3n5mbJb/7mf4QPPX/0h3/C/c/vM504iEJ1S6EHAtlIWO2zZ8/ZP5jz9ttv8/GHD7n1zh182+PjwGePHnFzs8LnTGUdfe84vylZdxdM2mtKl7EmcnJ0xJOnZ1xcveCjj0swU/bnMy7Or7hKPX1fAgXeL3HB8ejRA/7F7/4f3H/wkJTg448/ZLVqsP93wePHz0avUD0GoWZylqk6KFR66Sth1JoXusAHv8snSVE8ZVoXBC9SBecg5p6+26KBS/b29nm+WrFYBjbtINPyIaBiwBWG/aM5R2/f4f6D+wx9IqUOpaCoEvfvP2LoNcGLPG3oJb9II1JorTSumpOi5/DwiNPTE04OjylmE1Z9YLi+QVsIJlDMSoqpAHKePj8jITADq42ADGLGWiFy5Z3eP4+UPDnc47jNUaPMbFvwhCB+JWO0FBcaCbuNmeiFQnfnzozFleSEbQv3TC3YYW2xpiDESLsOqJwpy4KDwzlvfvEuB8d7bJZr1jcbppM5k0NLNa1ZrzY8f/yCbrHBaov3irIueeuNfXJ2GPOIt95+na9++U2evzjngw8+GO9/RVVZQhRpS1VLZAMgHhgUWpndlH8b+Pnq1Hi7xXqVIDmpHYko8rrBozJMJiVVqXFOsVoGMoGUEL9A75makrMX58QUx5waePzwEYPvKZyBWS0hz4WjH3piiNiikADMESphtNmZiHOWoUBVlXLuhf6njOg5J3zYbrOUAH0SdG1LTOGVYu+lRWT7PW4lgda+DHB9tTg0r2z98pjNtN1U5NFjYq1B6TTGtYhk/4tf+Aoxr6jLNUZtuLhoKApDzpHNWmIfcs4Yp3feohRlc+JDI+9pDCgVCfGGuioZhkzTSIC2NWNQcAI/DGgHIfQjQRjxjmQJBpYcRivZUNpTTTRFKcCeO2/exhaJ6XRO7fa53Hg2mxt8N1C5iv29Uw73v8yPf/yY7//gT/n8wQPxKI5+qbqqqetSticxkJLH+8AwgKnk/JjOHNOJIQdL1yaBMBGJbmAYJBZBl4xgJivbVq1ZrFakHKlKt9tmiBTwZaDs9me1bZK32wlgxJlLYb29TqRQV2N2mzQJMWiUsuPniQw4RTA24ZyiqCx2EMCBbLISXbdBsrMyW0rgFpShtfkpv5W1dkTDy/niXIE2dkcR3DZJOcs21TlHVVVsNhsm9YTCifdwvV6/lGyP92lZlrst1vZrWWu3twJ6fE93flRGf3p3hQoaTUanzMWLJ0zCipN6TrSRp8/uU04tIQtYRyP3Qghxl3uXt1AIM+ImU0JbGW7J15J7pe9fbppfbQa3ABE13uOyyYq7+3EbQgziN0NJRM0uB2yUHw7D8HLrlbJQEf8cmCbG7ZYLtkAna91uIyaxNgpn7O59MsaO22jLMEgNVU8mLDcrJpMJKRcM0WBdxeHhEY8fPeP88pzn5094I83IUSAhMcogSqmIwpGzeO6s26oExvchM3IUMruAbq2IUY/yVU1ZQlZSV6WYsIVjPt9jb7/mxbMrJrMp872K3ntgyV/08TPRXI1OPdGzGkmbVyoSBkX2msrA6izyvX91gao6jIHbXylZXxmGmzUpJLRNuJlhcaFY3ySqBdRzhcNQVRqyw9qSvivJaUIIEesGDo48ziouXgTCoGGEUaSs0Hbs9MnoJDQ5Zw0pacLolcheoaMEneYiE2jIAZoboJvwxhff5q03vsLi7Nt0qzP6HND7Hq0LkpLuOqMFJanEfKmzJvWatfdMypHkoqENcH2T8EOi3SQ2y0jotEhhtGxVbFakPIY66kzOI9lPy7xTj9CQdtgwUQ4fpqyXhif3n5PurVE+0XU9i0VD55e89vaMg3s1fq25frSmI6ELMGhySDx99IzZ4THzUhMquHm+YT6ZYmYlbRsJQWQNOilU1gSviBkO9yZE24snbBi3hUQ0hWyMAhATzuUdgtgWCh8D65WHmYFCkfKY/9N5+msh4CljyR5yNxBUSSgdk0oTW2gXo7TEaXIUfTQmgcoYJ9O26KVYikG2VjkrMqJh1luaD8BWZ681CZHCxCw3ss4aXKbpA51PAiQJGlVpXKnGzIRMUqJBj2mg71fcXIq/IZmET4lULClrR5j0+Eb039W0YtMnNgsZKtgi0Sw3lJUlm8ykLnjtzmuoMGVofwQkilITCbTB04ZAHP/fwcEE33R0V542DGgVsCbjEelf2yZCMhR1JjmBzcSYGcIIEgiSWK6UaPA3m63ZOIyyi5b7Dz6l7+Rzjg5fZ7G8QiHTxpA0RsnhFlMcNeMzmrDBD2egMnfvHcnEtSiIwdP4lufnZwzeo7D0XeJGeQkMD4nlasPJ0Yw7915HG4ctFOfnz1gsNig9pyoywXekqDHaMp0a0+94cwAAIABJREFUZrMCpeH66pyzF+fSIJeap08f8vjxQzabjvlsKkZ7a0UiouRaVqRXzrDtg+Xl1DPlKPlyWh6gcuIJHEaeeBptMrP9EfmbFcaU+JhYbXoyYsavq5JV0zGbzTg+vs3rr73Bp589ph+6ERqRUDZzeXmDM1PZ8mTE/2YT2xDIpDPOGmyG+Xyf41u32TvZZ//4hJtlSz9EtIncXF9xcnLIpK7wfc/zZ2cYW9C2Pc4ajHpZeOU8Gp1H+Yp4zPIYxKhG0IV+KSFU8vDt2iBeHWfEA2sNYYjk5KmrktfeuMMHi3N8kK+TcpTiOgu5rJ7WpJzwXQs5UZUFk2nFdDbh9t1bxKNDmlVLzob62HL39bssrlf0nefZzRpFpHCW6aTkYH/Cej2QY83e7Da3T9+i9+IZFBmSRFv0bUdZuV1hss372U2Osxo3lK+Ss376QytNWRS07eg/BLZhnVLYWMqywtmSZnMtA76cSQRikAbl6vpaijAjuUcvnj+hLMtxoy4yOmMshc1kk7GuoBs3AErLfZeI5B32SkEWytzgXw0flYJpGMJuC2edG2EdYbfRV7ArYMfjUd6TEWhhtNzj8AqpbCQsvtx8ZLwfXjajO5kZ4wZp3BREzfHRHRI1moKhh7PzJYUrd5lp0vBkdIYUxgGuknVKioNAXoymqBT9sKEq9kfewgDI92SMwigjyfYZyFJ0SZq7RhExTrJ90JZ6kjG1YuSZgM4cns5Yr3vJoiIwDCtC+P+Ye7Ney9Izz+v3TmutPZ4pTsyRc6bLrm7b1dVUFVVAm6KBpkHUFUPzCZD4BIgbg6CluuUaPgHd6stGtNRiKIFQQ7XtctrptHOIjIiMiDPus4c1vRMXz1r7RNoqrn2ko5RORJ7Ye+13eJ7nPzUs5jMKU+L7np9//Ixde8WLl2dD3lHEWjW47Ek2JQiNKsU4NA2ATRRuDBA2oDS7bUDpUvRPydMHvzdVGBvpnA2F07RtR9d1uKF4H3VP1t6iIG/ado+GNePXrWmJfHZjIa3QoEca8GDogkFrhxj9jCh2whWGclJguki9a8RJLmdi6KX2Ug41DC2EKaKGPuObDYWcb3K+aGP3Q5086L++aT0/Nh1iWiGar/hrdDa1dxiU9Sy5fbchyDJoVUb+/khxFTOMjM81RSpBawKZXbPFXitO3zomuszXz59zerqk8x0pZAzg1GDVrkZjFGl0jDV7fZxIPt50A8yDM3Te0/RGy/U30axvfnajccmtbT5KdGAjqiUItFQ7I01y/Dfdrzksyl4Wk6rxe/87GJEyQcWjscPvzMOdkIdnK2hYVVWst2tCjFinmRRzpvMlTbPjy6fPeH1+xq5Zo3SLbzUqGdQwkBGLILNHOs3gnaC1Fl8H8n6YJ+tvaDyHLEqjJcczEPcNt7UWY93gvKlAK4x1LJYL4Ow3D/fh67eiuVIKnNVEpNZYHsPmNew2Ad8ops4wdwf8xT/9jOP7ivvvFSzeKTn99iOe/eUX9NsdhogygUJNqLea9ZUmVxM+/XFPbCxuuCCvVpdkW9LkBIVlfhp48p7m6y8DTTZkIsp1hM5iJ0KlyFFDZ5mqzJ17M5TVNK2nvmhYrwOlO6BUE4oMy8MLpoeWlyGwvkyUh3f4G3/0H/HV08irr/9fVqsXzMtEVTYYU0CwxAYOjhX6wEEsyL0jNjsuFeQYKVwmG8X5Oby6aMhpFKIGxLM/yQLVYJTFYfB9oPcJHzJFYSi0FcOAVcYEgy4zTbMhG8hGU59F/vKrT7lzesjp6WO+/e3f5ad/+Qn9o8Dxk0MmVWK92nDx1YrptCRlsUOfac0XP7rm8EkBKRJRPLxb0fmKp89XrJsdxTRjUcTYkXIFzIED1utrYmrRKpO9wpQZW0LbRnZ1wuXMbF7gmx6jC2azCXfuGur1BU0rgXWpsPQpkvuK/sqzWzXUnSP2BVWZiATWm5bY9hAVV68D5U5zdL/CqB2ogHMJZSBlRb8LZKR5DkG44WPquUyELG3Xi7OSzkN+h0FyswbXoqypO49xCZclxDp4KZ0KHbAlFJVi6Ry7jcHELcZ65i5ws7lkMXPkoOi2mV5HHrzbM5vM6fsF6AmLw8CPf/wFKjqCT3RdgM6yPIBd6yltJGvNl7+8odt5rLYo49i1iVx4nl+8whpDNS0oTObwuON6I1NzqzxFqmi9YX2Vubr2+NhwOpuRo1Dpykmi3vZMyiOKIlAUGWMcNzctZI9zYrNd7yJVNcUVslavLmv+yT/6X9lsr9g1r7GFIQbIbDF6hrFGmgCj2W5avFa8evU1N5uvOTgynJ+/3lv09l2iLB0qlfS+Ydeuidkws4qblefhg7v87T/4Af/sn/+fZBw3N2t8f41SBQcLRabD2iUqV5y/uuLu3SOaZkfXebSBSTWnaTeQOpzRnBw7+q4lB9AknJHGGgwpBOFm6+HC8X64dOQgH7N9tNaDHbcb7HulGEpRsVxYHj1Zsln3bNaRzcYTFGgafC+FyLvvv8+nn3YsZg+ZTu5Rt4l1vcGgsC5jCxHNz+ZzSIbUigkEZAl11RUJ6EJHiJ5Sz5kUJbZ0nLUrXj1f8/0Pfx+dFc+/+oLPPv4Vf/y3v0sOiderc7rtBuNKohe6jtgp6eFbhgQ5sw+rZGgwzCBgvL2Hh0YJaBqPONJNSLnBOdFUkMCagocP3uanPzqTCWJphb4Xa1QGOylZHM6YHRRYm1FZ3Be7JvHzv3qKiks+/M7bvPPhnE8/+TlnX+84WEyZFEseP/Gcv/iKbtdy586c5YFjvbni009e8O1vf4/dzYzPb17z04+fcv9JRb3xbNewvcl0rR8m8PK+BIkxQ9E16opEE5pi2kdijJPeoiiYTibMpzPaumG3aUErtNEs5hPW6y3NLjEtJyyXC1JaIfalQQaQ2bKrW6ypODo8ZLGc8YtPfiHUqm4mU/2cCCFSFG5w/TKEPjCpStEx5UhOEasVTSfUxBATNzdrptMp1hpSisO6lSJP8ssYNCjtoMeQUNHCWbRh0NAoQKhd3neD1kbaxxACbsgyynmwdg4BZ4V2aoym77o9fXRszLwXbYZCD7Q1Q9/B/HDGfO6ZTLb0vmU6WZCSp5pU3Ll7iFKKL774kpiy0ClVJtDie3EhM+WE+dSy2WzoG8k9tCbgQ8R7RQgd2mRc4Wgbj7EJdB5eeyQDx8cLcjZ0feR3vnfM7KjnxYsbVjc9rphStzXbjaFPWywbrjdnlG7GRx+8zXa946cf/4z/4X/873nng/eZLxxvv/uIv/y/f0RRLAghsVnX7DaiA266HSmKg5nSCk2A5GhbyfUKPlHvIg8fnOAqQ+drEgmnxXmRnAgBymJGWemBmtXTtgHrzFBgqyE0VhBOodTJcGtEMMaCfDS2GLWGUmCroWFRgy65x5gKCZg1g8FCMTRHEVdoZrOSlFtCjCLN0FroooPJg8QaSIMsFuhh0B2We8RopCmOFuZNI1EzRmsKrbm8vNz/faEGdpRlRdM2BC8/M0YNiKHeN1fee8lAHKiRb9JatVY4JUYMMcYB1TLkEPEpMnXgU2Tdd2Qn9PmIIevMcn7AyekBm/WaQMAqhdMGpZy8b5AMxSzhyRlxsxtpg7eosmRmjlqsEblSqP2AZzT/0ANzQOpuMfao61oa4iE/zocwDMZGB+9bk5OUkphTqPQNuq8gXAz72+2pmoJmGZwTnWXTtLSxG5pcNTRDmb5PQ3NTUBViUHR1dU5RVCyXS4zRfPyzf8nVak0IEWctX315KXRxJZ4I1hp0KvCpJg9mLTHVgMUYyRvLMUrdGMXpcyA+AmJ0gvYkEl2IHBzewRjRna23O7b1DSFG1pst2moePLzHx3z21/Y1vx2GFn/+wx8ev5vRyqKTY3fV88XHmbQzmCBOX9WdJWWVKTWETWD1lWdXW/QEIpl6k5joOT07SJr53PH4o4KLzQUvfxmo9JLFfEFna1Z1h+9rVi87nv2V5+xrePzWgoODOdXEkHKLtpHea7ICU8Jkqth+5thdGnIomE6mnH/RYHB0MWOKKe88+oh/9Qdv8fXzG24uDbvrzBef/ox/+j/9Y85ffU1WLeVM7FFLpzBKDC12reHP/tO/z3RR4mOm6QNFdUyZJjR1pm0t3hfU1y3qUgJDkxHYtqyG6YAMVbEuUViLTnGMniQmMIVHF4LOWAVOJXZ9ou1boq+ZuQl/8nd/D1W1hLgmbtcQZly9Oufq6Q02av7oB9/l2bOXlHWH7iUXp8Ews9B0kbaV4La/9+/8gE9+cc3VVYvOibdPLO+e9lzvFFnJZnn06D5N3uHbhujF6rtJAbw49rnC7GlGyhi6JrO56iktHDxS9IUcXjoOduJerDN1MnSXPUYHaRxlfoerSqZlS6EzzmissehlptKKpCR3IkRwpSb2GWszk6mi78vBYAW0gpgjQY1TapnEj2iWHQS7InwUtM2oAqc0ExPxE81hFZlMDUVV4Iwhe0XaykHp5oa2MQQLzhYUCezNlsIVqMUMVx5SugOM06zO1izmSxaLOctpRWECdQ+oktAGLp5/yVuPd7zz9j1McYe6mVJ4TzlXvHzu2N1YnAZ7tKJrGmYzy8lxwWJuKcsTQhab5eQVOk7ZNTVH94+ZHC5IxolV/GhWkBVNkANJJQbHLEVRimOc7/xw8STQW3xYiyA0C0UHxEhGJlhi/07O3Lt/wmTiqNuG2WJCWzdMiwmzaiqHvVMcnR6jDaTUcXwCdoqsnVhy9irx2a+e0vc73nn7Iz54/ztMZwtO7h6x3l2JPTOJEDybzY6b65amBqLj6npFUZWQpUjO2bKpW9qux8c0WE8P9rTwxkRYJuYyAdSDfTAcLA8x2hC85/Bkxq5eU1aGo6M5jx6fklLk1asNbasoyikHx3M0hspYsadH863v/h6rzQqVA5evXvPxj35GUTpBrpXQlKwuWa827HY7mqYd7IjFFr6wBVZZVBSL8KOTU6yzrFc3fPazn7O+bNlte67Prwhty7/+d76Hp+F6vWbb7jBFxreKRDc4G7qhkImEwWLd2gJrJxhdoK3GWNA2EfeuYPJtVOaDD96n71r63hNCZlIt0TpiHaAC16sVP/6XvyKrwBjmqLVmujDkWNA2nqurC9brFmWnLI+XHJ0esThY8PTTF7z44in1rsXNF/zO3/l9DpXis7/6gpdfXZJCyasvP2d2WHF4eMjjJ4/5g3/t9+j4gnuPDvCh59XrC55+9RlX5x2T4pQH99/ivQ/eYb1bsVm3BJ/QyrCcz2ibhnt3HjCfHGCUZbO5QQ2uWXsTD6TxzFnR9Z6ma7CFUAIzCaMltNUgzX1btzx//pRqkjC6FOF8kpwtQQQNRVFRVRMODhecnV9itMY6hXWw3ayxRuF9J7lnYUeMHe0Q4J1yoihKnBuLw0DbtUyqcmh+8kCPgq71UhQPGrqUoiCxWovpTMrM50uUEpqPaJLsEE6sCTHQde2gz9HD75fC+/DklGo+Q9lEokfrxGw6F+Qiw9jEkiq00lijKZwVmpkNnJ1d8fWLSzYbeU913dL1keDF7XS93u0LS2strU845wZdi+iQeu9v4z2UEhqlHSbuQ3BsxhPTiOxIVtykLCmnCyKJSMcHH32HV693+KAoJwXzheHrL1eErif6nr5t2d40HC7mvP76iuvLG6rCcnQ44fz8V3ThNb0Xo5ntNuDcDB8DTbcmpnbIfpKzJURFiJquVgRvUJSUxYzCTbDOUO92rFdrMjIkrMqJmCPkTNfWlFVJWRZUVYmxSprpFIgp0PueuhXtsFjmixkBIMYlSRwolRojOsYQ4QG5H4ZgwWfJSiwLnrw7ZXni0cWG9XpNzoGjg3tYW1LvWs7O1sPdLwiV6BbjYDIWyGnMoQp4P0Y/jKjrSNfPpBjpWs/6ZoXRmqosgMx6fTPkLIqUQBohNxxK0pgVRcVicYDWlhiFxROCJ8QepcC6grrZ7jVGxkoTWde7QS9YYKyhTwGSuN25omRiHTZlYgpsN2ua3UYGtetA6DIpSjbehx+9Tx+HLEiGaI6MOPy2nhwzVTkR5CcPVudGy12lRP9ntN1TK7VSw7mRB2R5pDFCJg0osWR6WivxN+R06wqK7AXRJwniXhQl0+mMpm3l+WuFsYKUz+dLRs2eGp6pgGLyeY5o1y21VM7FmDLaOiazGccnJ8QA9+8tCb7n4vwSWyjQaR9tYaxjMZ8R+0Rfi+YyeTU0hZKZa51C5QKVDX0nBjTGic42SL4OCkXoBR1NSVBOAK0y29ZL/UMhWtF+y7rZsV537DYRzQGXL77+aw0tfiuaq//2z//rH95532Iw5N6weqlITQ9Zk8hEFXBIZkHb9jRtRLkJm13DvftLqrLA95muA7Sm2ciDrOYllT3m9Zfi4IXOlPMSH5VYEJuMKRUhFNTbAkUhrjcq0uwapqVlMS2Z2pKwinDjCD6CCegyUlaBosy4mccUPW3fsL3OXL2+pl51xN6zXFpy8EwrqMoeYzt21xk3S7hCk5Jmc+Np2sjZ2TU3m2uyqnnw6AhrK1LbYRFnrcncMz2U0F+FInnRK5WulKDCrNitC0FKglBLJLW+g+ww2qKtJukIVjGdVRwtJxzPS1K/4+osoWxFtTRMTrYszYa+cdAV4C0+9gQ1Q22gAqZlxleGrIcg3ZiJ3nJ08phNvKDPW7T1VBOHq2acXyvu3n/Mh9/6CN91RP2Kpt3JRsyWNNCksgKltGSGmWLg0sqUxfsAZSAYyTpJMbGYTQm1I3eCKlQVZCxKOWIA38mUxRUz9MxgFok87ehzIvZyQFmbMTrjgNZnYrbkXBCTR5djZhHoqFBRCoOxUBQKx2DNqhkuBmnGUNCT2aSE1QV9r9HWUk0104OEmmpWzxPrS7i5UazOW4iKucscziN3jgNffh7BZJYHBxzfuYNzd5gXC5ptR/KaYjLn5N4BIckU0vsWU/Y8evx9Pvt8x029485hwX/8vR+wfHHNvW+9xeRwyvZ6S5cg9wXzpaYoYH0dub5opZgdHKOETsGQQWRodiIaX8xnqGwJTaK7aUm+H5wJxSWrbRvUIBxFKTSWw6MTQuhJqSfjUWqYmoeAtcKDVypzdHRA0zZs1hvqusX7jjvH9zk8WIjrUzcgkkEydQQl6UlZcrtC7Oj9isWhptlF3nvvQx4+vs+2OeeLz5/T7jz37z7iw/c/Yrmc8/2/9TeZLWbMFjMOTo747u//Pm2IFK4YRN6ZtpdIArjN7dJaiwOgHpGLvL/wpRi9tefuuh4fPEUh73PUnlhbsL7Z0gfJposhSZZMr2mbHVVVcHrvmGpm+PCjJ6xX19xcrwDJQdM6MplMmVRTQA/W4BYzNKwqg0ITsieknqwjVheQoGsaut0OExOprZnbgsplqipwcXbJJx8/ZXV9SdfdkPOOtnZo48SIQ0NKoj/QRgKlq0lBThE1UEtiyPh21AbcUsCsk8Z7tLUdJ9+2yIMFsBQIxgh/Pg/Ukhgjd45PObm7pCg1za4le4+JBbExTNyMb33rbY7vKrbNhtp7brYN/WbD6cExD966xzvvvcWTe485WCx5dvWU6WSO00vOXsHPf/YZl+cNz59ecP76iuOTIw4OlpC1hDF3ka7t6Xsp0FJMtLU4kFZz6HzHZtPgo2cymQ70mMwYIl0U5Tem/FVVkpPo1XKGvk8YE0D15Cwagq5vKIqKMQNt1DXFKM+j7Vp2uw05RsZ58BiMeksNEte/EAJ3795jOpuSUmK9WVMWJSHEva7E97J+tBKaoITHlnvnr5ECqpTQ4bQZs69urYlH2lRVVVKMDzbZb07bR1RvMT3Atz05eqG262JANAeqGgHnFG3bo3TAWijLgpvrlrrecfb6mtVqCwqJAIhhMDjwNE3D6EA47k+hrd1qh97UgAn9Sfa4NJxIBEeEnDTz+YFkmOWE1rLXN7sdMQcm0wqQLKm6WdF3O6yeELopyTtUrJhWSz748F3eeus9nj9/ybbeMJk5crZcX1+zW3esV55ml8kEKQKTxhqZ/LtiaF4HU4IYxI1UGFyyvp48eczVxQUxRtwQ+qtQjKYUXdfSdz0hSW7drVYoM51OKUsJzu26DqNu3R5DCDRNMyBgeU//HLUzefjZ6ekp89lSGpPY44pEjjNyyiyPOybzwPVZwXvvfggqUNc1602NNoqqKvYoUN/3pJgZM/PGeKc8FP+3jnnqDQOWUdMpuiCQqIRuMJ+RmIgC5xxFUcq3K9HKyF3X98znM6qq3NMgU4o4V2CtGxA8t18z4/d0OrmlUSZxEh6RnRilThGXRUFh+17cRHe7Dc5airKgLCu0MaxWqwGFuh3ejQixdYNuLcvQRoYfQpUuikLIggPdUmIAZJ1kBnfjFCRLNsnQYNwbIyo4UvWsFXfbfT4ft/l0t6HK45l++9wlFy3tzz3v/R7JAmjbVnwVwq3bIYrBhXikCmessaxXNX0v92NIojUeDUycNeQcaJuamAbULEaKwuLj4GqaAaXo+g4UOCtOtmKEFPfDhFG3dnvvKHIX0dqR8PgkkUKBiFYO50AR2Fxu8HX72+0WmDN0nZhEkKCsFE0sCVGaK9Tg4mINCYXPitAE+j5Sr2vIGlsW7OoWreSX9G3m5oz9NCWEnnqzxRBkM1UFMRX4mAh9pG4C1cQI15WK0+MDfJ/RuST2hv4mY4rA4QHoaaKPEVxmMs0Ug2bGhzXnlx7tEuVCaBGKxPQ4UxbDRNZoitZQFAyXS8QVmbPXX6NdRruGYhLYNRf4qMmqFiFzDcoZ7MFAUVPSbGrEvU4XGeUym6tEMuyn1RCwBXR1AmswQ8ab0pCjIsTMdJb4zndnPH+6odk2bLaJbFpcThgzJSlDm6AJmUf3prx4foUPMu1xKbHLYLJF50zKkV8+/xV9XpEIpKxpveFqmyknU6bzGaZwbHdr2r7BFgpbGJo6MZk6jJNMqlHUaW0xiHAzqEjo00AvEn1esprkDf1WkTthp1elJamEDwGFRisnl2Th0RNPsoGgE1ZBVRSkIIeOtRBDJik7aKciRmV8uJ36WAP0EnXIqDlRtxeNGg76nIUiE1OGDC5bcgpEBV0Pba2YLipckqC7ppEJsQW6rUcdR7CanZ+wOJ4wswW2FxMTPV0yWywpLi/woR6mpMcUhWEyUXijiRp+9fQ1k9JxUk1Z2MzTzz5l1kGLQ/WKzcozLzTtAJW7QhpD6zq6xpC82PlbY/aXj0pxyK8o0EVBbGX6/ujhCbu6YVVfif6HW4G6D+KGRDbEKFNAcUYb1WsjvcTs6SdN09A2vVBvshQ3ikxVSrhwiis++vAj1usN682Kvt8RvSPkRArCcY+xYWGmLJaHrNc13j/n6vKSGBOz+THT2RFFNScrw3vvfYeiOuTLL7/k2fPnHNY1Rht8SnSdWO7CrROWUt8Udo9fQp241SGNqNatrW+mbXsmkwkqW7o2cnV1Q4qZHMGnQNAZ5wpi7KU5GgM3A2xudmw3NV3fSRDmRFEWx4y2713X75+7rMc80LcMd4/usJhPUCbz/MUFMfZkJWJ8Uubw4ACnFLHz7GJkvVmRQ0ez3RFThy3UMKlMaCUZPpL7YrGmRKm8t1/P+tYUJg96Snk2cU9RuVlvcc7ui76+91STCRhBSKy1RBsBGVDFnCirgq71JJXofEdWmRQyIXpi8vjoab3n4M4p7/xOoq7FFMDlnuvLlrfee8iTJ+9y7+gBi1nBT77836nbNV+/hmZ7zh/9wZ/w2Wef0W7XWG05XNzFx5qr9Qrvd1hT0HuJGAgxEEOCrLhzb8FsqVmvPWoXcEqCPeX+yRgrayWmUUwuA5q+70ELMi50Fcg5EkK7bzxLO6UoLMrnQeDuh8GOnFF91xJijzbSRI96JdFfjYL1jCsMdVPviwgJKd4Mn0nYa2MAwlDYpZwJMVNVbv+5ZdivcaUkRFspht+h9wjuKIQfnQBvG6uBQqqkCGvbHSkGjE3iJGvESCoM+joFRC1I2HzuKCtDDJmr8zW9V/tmyhZ2rzfbnz0+/MY+1W9oiW7zim41YONkXbKDEBMjrcgM+UkD0liUCqdB9ZCJtE3D189fUc0trsw4XbC9DmRfcjA7JIqQmBQt5XTKt777hIuzV1ycX9NsPL4bIl9Kw8lpxeXlNZBJORAiZCX3juitJD9KEd9oGiNtU3OzupbzaVgH1ggKM2Y83SJMeTCEyMPZNqLv8m2M3TfpDJ9ljGGPdObBIGFsbowpRP/ng1jyxzjQyjSkhrbTXLz2GJexpqLre7bbmrrpiCmhTCQhBl0xxT16cpvRJHdtikPDkG8R4fFLDYNZoWyrN97juGYFcRcERT7rGEakBnzwNE29P5e0Vsxms2/SHt/I/hrdLUeN0bhnRst6+Xfl3FfxliI9Noj7sOUBIbo8v5Rhch5R4ds6Iw+d00hLtG5s3vLtuh6QpzRIRd58Lloz6CDlOgZpBHMSjeL4t/dGNvlWbzfu4bHJfrOpEqqg0M/3+WD59nl8w7zG3A6m958pSs7KlPF9z3a7wbmCrusETXaKlKNEKA3IW0Q0oCllxOlZ6oiu88MZlVE57l+71vI7QgqoIe5wfF3WWhk2RHGOVkhtbHKQxjVADJ5yKULKJPZreL/h/+/rt6K5IkEYnN60ysxPLPXZhNCIm09OSjipQ8gXWtF0PbnXXJ9vsUVBwhIJaBUxTjbgzWtPH7pBpBppm5bYdczvHFHNpmTlECfIFSl7+k6js6Zwjvc/eIdnT69oW0PsBWK1i2uWDxRBwbrJJA3zmUInRfCJkFrq0DCflhRLDU7oFEVpMDajrUz5JmnIT+paUsyUk0zfr9DZUBq5FFarC7EWtxC9pa9lQqMmUFjZKFjJjmFIpdY6kwd3Q1vK4ko+SXPVDALYrAano0zsI02MFHPDg4+OuWluqJ/W3FxkNlsFS8WTZUc0ipgtbYo8vjNlfVSwiR1NE8AnfBI0R6cMeJ69/pKqgr4TZ8Omz6x3HfcfnFJUBZtubhKXAAAgAElEQVTthpubS9rQUc2mmLkhNomq9BRTWEdP1wWZYjrJBlJKiBdxoFlp4RahMPhW0W8TxES2kJzBWMm8IVUYPYGih6lHl4Jg5gRVAfPKUm89oQVjFT5mUAP1Eo9VhqPZEX1oSbkfOPeJ7M2eFiYHq6AEe9GpUhjtsEajDZgebuIOXSi8T+y2iqosMG2Pb7NwzVVmWhrWdU9C00bHblvw4J0jimSIXWK73lBVB/gcMQXoXvIi2u4A0BweVIRguL7p+fyrr/jD73+Lk4MZua750U8+5jvzOZvrHfW2peszZSOJ7F1tqSaGxYFmvoS2BrKR1z+YVfjeY6xCwKkCYxxJNRSV4+Hjt7i6vOHq6bUMDMwwTcxDDkkGpRJNs6XvJYdCpo2GEBLO3QaGFkXJ5eU1McgUTS55Q9d39H1EIVql9959ly++fEbdbOUgjxO8b8lJpq0xKg4PKx6+9TZXVxuev3hO014xmx1yfHqPyXxJFzNNl+g6gzEzUrKcn10MlKmCtm2o6x1t191ehOrNi+L2kh8Lgb097/BzY4xQLLMUR2JP7chJ1kLT7JhMpoNZSEIZRVUZ2m5L6eS9hD7T1Zmvn79gdb2VZ1ZmJlPDbLKg63ppMGOQTLs0CvcHUXeGO0d3eXDvlKQ8L19dE6KXM2WgcR6fnJBCZruuhcblGwonNBvfZ7SqRChMBBWHgYdkkmglNr3eS9ikyjJVz1lE0immoSBhQAak4HDB7lEuhWYymZOil2wlrehV2vP3E6Kzq3et6MlyQg+hnxGPp6Xutpxdrjg6OuTRkwW+3xJjTVVZbs4aonccHtzn3W99h1evv+BgYXh1cUOz26H7OX/2H/4X/ON/9E+I/XN2u4gzU7a7NU3dDuGpHpRF2726HGUjd+4dgulouoAtM4Uu6Ha3VKlxip5iYMz3SQl631OWQosFMRyS4ktQI60LitKKg5e2WJ2HQiYOk/w0FMNDo2s0YyDrGGy8dzkbmqsYR3OewdkwDEVEHgXxTtC2JAVgkDA/VNZkNRSQgxW5TMWThCAPwwMplkTnMcYT/Lo5Qh4m6kpldrs1ReEojBP7fGPRpif4QE4y1U59ZFJNWSymuBJ2ux0+9IRt2lOAtBmL/Nsm7tedGt9ssn7dHv8bxXoG0Gh7i3opa4lBDCUYNJblVGGspuukvtisPcXW8ODRgsJNuLpqWS4WHC0O2Wyv2Ww3rFYbTh/e4d2PHlLOIl+/fE1TN6RYYa2irAwndytWKwbkOxFTS/JypinxlUMpizVpoFhG4jAIev36JZPpdBgqh0GjdGsZrbUYaN0iCFIYj457o6Ol1gbjLG5gFsSu2xfF4zkoezMN681inWW329H3wnaoqor5ogK9IXrN5XkkBc10qtjWK/qwEzMoJUyUmCIhyn04np3SZIgBjuKNQebwYvRgwrDX8agxWPc2xHss+MeGUCkJgAdh+cjal7Ntt9sNqL98V9WMpmn2GXAjHdYYM7gOlvsQ4TedQN8MG04p4tPtGhxdL4uyHM6FSAi3SKvsgxFcUBJULU+BHPOQCyjnCEMjOWpcVWYYcI7aKvljNQxK3/z8bhuh20YsZUHaxqb2zfckzysMn3fB2FzHocEqijeGKyl/Y99rrYQK2ba/cR6kHFFZE5Eoj67rMFZRFGJk1vY99S5jBv2oUoq29aR8u5+VUvjODzWxrBNrhUUEQsn0kUFSMNzlwxCh6/phCDnoQ5OjtJCNhqjQ0UAAPfAHUFBODX4V+Ou+fitogf/wH/43P3zwgcVNM3aSKBeR+Z2CXdvS1IHcW6qZcD0TYaC5JZI3hCCi6F3dCl9ayyUTY2S7bqk3LVpJMZYzBJ/pmpbZdMp0NmO6nFGUmUxPsws0dcAYx9//D/59vvhiTdMHbKW48+AAvdjR5o4uZ0GBpqKnUVn+va4XCtR2HQi+x7jAcmnZ3czRNmKLiHWZTEdMihgMGYOxkWKayVmLKF5ZtIVinnBThTKZFL0svjIwcY5C2yF/YEqfDLsmU29BJUdbB6pKo5UihISxUBZ6ODyGxZEUlcsolajrzM9+vqW9KXl0f87h4ZQYHdsbaJsNxRRs4Tm/uODk0d/g8cMDTPSsL1ZcJiOBiY0n+yBzCDOjubaoUGC1JlGzWQd+72/9Pmh49tUXrFdnOKs5PrzPw3tPePvtxzz/6mtcoWjrQN8ldLYkeqFrDhafKRbgOibzKa6sAAO1prvZkqNMy5tGdE/LpZWJrzNMnxxglseUTuGKjJ4kmdLnQJcTwWiyEf1K7PNAG1UoVfCf/YM/w1hofEcbe8pFot1aUhoD+4AsNJo8bLxpVWHUlPcfv8dbR3eZdB1Xmw6SbPy+S5x/0bN6WhJDhy48pozYkLAzK43/Dtptu/881jlxowJZJc4uX5OTpywUhct89vkZlSl47927LBdznj3bMj8omas5pVowXdzh5Hff5f/49GM2QbJRFvcOubh8CRmaTabeZkJvODqeUrgZhaswqiBEj9YRky1lnjBVM3LnOVzOSQGUdpSTGevtBZvtDTnHoXhzmME2S2gN4H3NmAkycrJTEtG8FOeRvhP61ZuX6Gxe0raRl69es96sefDgEZvtlvPzM7q+GxrZkt4HisIxuhv9u3/v3+RP/uQP6Lqe16/OOT+75vj4lLc/eJfZckbT7PjJj3/K//bP/4Jf/OJTLi7P0CaxvpbmI8bfPDxHasRoUPBm0TYWnuPPxss9DFNcuXAjvu+FDovCWTcgTpJc74YJfFO3hBCZTGYsFksuLq548fyZoHu2hAxHB4es1w2rmxV1s6EoFJOyJCdxe+y74XlkT3XnDn1Rcrmpub5+TWkKdIRCae7fPeXw8B4f/+wXnJ2dA4rpZMnV1YVYt7uKjJOCqhjjBEbqX6TvRaRcDJk3IYr9vLHC5VeoQQyehsbU7S/rmCTP6PTuHf7oD/6QthWaW1EMzy+MxXEeKIcOqxKVK5iUc3zwZDzr1ZrdpqGwMx7ff4fddQc5Y1xm3a5ZHBxgciIkxabP/F9/+b+Qmk+4vukx5oQ/+Vf+lH/wn/zn/OKTz9nutvR+x/n5K169Pme+mHL/4R2evHWP2XTO2fn5UDxIAfTkyVtcnLWsb1pijBwdHXJ9uWGkQe6dA/O4TgYapJEGKmczUHl6XKmoijmFnaGRBr/ZdWRkAKSVovdirz6ux7IYmswoSM9olXybWTP8P0Nz13eepm6+scaNscIUGPQRYp/s8b1HGTOI6xPBSxEynU2QWBPFZFogQ/kxSFR/o9gcJ/9j05fRxKjwQZCDxcGCg8MDceAyhqubG/ouorWlsCVN23JycoeExHMUhaVtIrPZTBpdnXBOstb6rnvDxew3UarxTHmzsYph1KXIe5eiWmhc1hVUkwVHJwUn9zK2ioSQ2N6Itq1vO1RWFMWUFAOL2Qyipdlkml3kj//o+7TNjovzcy6vLnGTltnsgKvzhrOXGzbrhspOaLudFPDI4KTe9ZSlQ3KYMyloYlBU5RRnHajMZFLtrcHFpdPQda2g5TESojzDyaSkqqo94jI2JNKQymfmnKPvhQYuwxDRY7nCDXevDDrGZ6e1FrvsNFJKg4TKD+tvNpvx+NF9vvf9D3j322u6ztDsxHAjxsjjdx2P3/do7VldRorSYU0xIF9JNERmyMYa1k/fe+xAQZTPcTiL3vh8xybrzebaWktRFPszeXQBjHEIOs9jsw/Be3rfEXwv+/sNdFMh6wsliJYxmuvr6z2zYWwI54v5HuW5bSBuab2jC3Ua0B7fyz6rykrYAYPmQBwgM0rZIXRe9pVWdt8sygBydJUUIEHWvxqCcgeESkFVTURaMTBr3rTXH5vFN01MxgZxvPPGs0RQO7dfO1oLRdj38rsZXpd1Zo/gheDfQABvA5BjDJRlIeYWSqJtvA/MFo7j0ymHJxU32xt2tdCxu66naTy+G1wpB9RqZCXvzZRQzKYTwpA/KD2Awfueu3dPKCsLWlwpfUgYpXG2wNmSlC2PnhywXC54cO8u77/9Nj//qxcsjxVJeayzvP34Hc6eXfx2a67+uz//4Q+P3x4mv0rhc6Y8yZTFHJ0K6quO+XQ6aHFE+Gu1QLg5gzNG6F1JQoi1thgtYbUhRpQdlnGOQ86KpignKKcQOKxkNltSFkuMcfjYcHm1IuTE8d0ph3cL2nTFut3S9wUpFOSo2NY7DJnKVmjlaDpP6jUXXycMcLDQHJ845jPFpHJiOR0iBkvWJUn1JHpC0HRtpusSTZOpa+hby3SqmR5XzI8UR0eRoyPNwQOZNrV9ItmCZue5PPdsVoqudkwmGnIU6h8GY4phn8Zh42n63hGiLCatweqMTobP/6Xnk3/Rsb1Z8vYHH3FwVNI3kWwc1lQc60N+8s8+5fxqx6oJdNpSbzYsFocUpkDrkphLYm5xA8dVa0fOBXdPTnnnvQ9Zry54+exTjk8nnL3asb2JXF9vuby+4GZdM6tmtJ1Mv++eHtH77bAx5LXH2HN8twJVotAULrI7r3EjhK0Uk1lJs+2IocSHTFYeN5vQ1Uf0r7b0ly2hhvqlYl5WQlNwMKOi7CyHBUwrMJMMc4/fzlivevquw6cbFjNpRlIUykPhSrQV6qIxkskSfSaFyDuPH3CyuEvYHXF9E9FdpN9Ewg5m0zk5NeSgIWqmVvPH35ly52TBzBVMLBxMO/q+oQuGl683fP31DakxPHhyTLPpWJ1vuXh5Te97lkcHNF3NerMjK4czcPblVzRNx2y+5A9/73f5n//iX7A8OCWlkuvzhuQ7jLb7EMem7VBZY+x4gWYOj8uB9z1DmwplDOhI29a03rPtOl5cvmbVXHN4OME5QaN870UPpYZL3AADlWK0P81ZrIST8KHISazdnbOUhcMYRYo9q1VDCB2Rnph7NtstX3z+jOtrodRZY+nCDhC6kzGag4Ml89mSaXWPX3z6C9r2ih/829/j5uaSr796ydPPPuXFs1+SU8t05nATj7IdiQ60IStLVVZMqmqPUo2XkLWWxWIhgaddt6eevDn9frM5vE2uH36mNdbJxdd1oin0vmc6mbBczCmrkqbpsYVwxlfX1zTNFs1AL4wZrQqurm5omwajM0VhYeCSW+s4PDzgzskdtps1s+mEdrvl6vVrrs7OqVzBzbZmenDAyb17FPMZEYMtFdolmm5Loqcsl+yamrrZEtNQnGvhpUtQo8MVMJ06ylKjdKDvW2me0EL16PwA6A4aNWP3RcuIXkCmaRo++fkvubq6QqnEZGqk0MgGa4phst7R9575YkZRGFLuyCFTIAYgKSRurla8fHHO+fklShVMZgdkdnzr+x+Qc8/16oyvnv+CLp/j2w2+XjIvHvDg/kP+q//yh/z4J/8Pr8+esq0vSSnhbEXb9vg+obXj+GES2nGU77K0fPTB97m+qNludnRtLfA6RhD2AY2S4kx/Yy24wuF7ybCytuDgcM7mpoFcoo3FukwIG9bbZk+JEa1AxNly36zHIbE4JyniILJcHNB1HW3X7O36jdXstvU+Q8kVQrHT2g7T/ITvxTnNh0BCDFoWi8V+UCDZQ5GytFgrDZ9YnEsI8p4qlMfh020xtUd/B81VCIGD00OCCiQD2YAyms16xXQ2GbRfmQcPDwcNR5DGsJFcOKEoCWNAhjZCF7q1pjb7/Tf+22+iy+OeztxGCOwbEDUB1eFDw3pTs1lp2qYnJ4WzBQqH7/RAeRbqckyJvvf0jWhWXeEpisyXX/yC7e4a62A+Lfjlx19x+WqDipk7x3POztZ0vqeaV7iyYnfDMITJBG8gVUxnM9GbFeWgexFjlNG6XmthxsiZ5PfPRt67/rWzKWKdE6RDjY6MHmMczpZY64YC2w7011uxv5iyDM0M0pjM5lO00XukTKlMCFIMN1vPdhP55CcrulbWvNaavm84f624WQkFc0QQuibiTMHJ0Qnz+ZKmafdrxzkLg9aRNxrhkQ6qBq3POBwYGQajfupNiuioCZzN51hn6LqO09M7xOg5PDzg+OQYVGa32zCdlBK2baShzClRN7XY2DvRYKWcGTMJxsDlsZkQTfFvNng5iUmDDJ3ktbmiIKRIGPa5RAjIXSkhvmIT7vt+f2+mkIYIgjGc+9Zi3RgjCH8UffJoBHKrt/om60KYFmGcS4h5jhqNMdL+TvN9GHIb09Cs6b1T7Pj+xgZspPSMn5HWt/b5tzEdg04ZMQq5c/c+k2pKiInVaofRU6aTGWV1S3cOXU+K8uz1cAwxviYyRkuzppAYpem0xBrFerXBdwGVxcFRmsxEWTlmsxJ8Yt1e0daGh4/f4U//vX+DX/7qM9bNjkxiUpTMqiVnL/765uq3ghaobUFWU2Ks0dpTTDTnn0F7uaPfQDVRNNuO6WEWbnrWg/uUULzEtQYpCpVMQeMQRuiGEEARRQNKMqFCvyX4As2cpmtR1qELh1MGesPLl2doG7m49EIPcwlXWcgdyWdyMhR2AezY7jqZKqWC0EeWJ5mDI0M5NzRtQtsGpUqMyQPNRtM2Na5UFGWB94aEZ7dLdF0m+kiOmZdfJu5nzd37lpO7TjK0phrjYHMNzS5weGzQKnGzEjTEWs3bT045P1+JsN3KZrTWkgdRW5k0pgCVhdKXkqaYwP13Ay8+8dTXa9Yvrnj4Ow+4ubxm221p2p46e4rTGjULHB5NcLrg6szR7AKFzahSDZNYS+wyDIi17xPaJJ49/4rtzQVGJUL0ZBx13bFrepTVg0uQ2BR750V0GDTaBVKy8sxLUEmxuaxRBRwcO6yekrUnJU+MftCuJeqmE5pIoelfXrMLLWW9xRLIjaHeBcqJpzzSmFLRbRrKWNLtFLE0uEONnfY8++JzlvMj5tUEXczJqQclFAEROgdclSSzS0GMGqMcPvRsmp5ZqZkdHrN8+QlbkaqQQiZvPLMSep2YFZk7c0XfJrZmyirXqFJxMp2zvhGOsrMWUyfOn75kNvckH0kh4NtMNXUYl8GUGF1xcBR4cN/QP7Bcv+z58rOfc68654NHBUpv6PuA0w2zuaINBdHJ2pnPZ9y5W1HXHb2PlGXBd/7mQ3724yuCl0vMFRbCkqPFkqbbsW23rNuGxlsYmsqcIZEkX21wZzQD2hFCx8i7dk4h9rcyhXJGY0tL23dYJ8ik1oUUPoVcCj4kmralqNRg5S5BqjHHATmxzGZTjo6O+OnHP+XZV1fc3FxTTSxGT7g88zx6eJ+2r7i89qzXG8laqhQog/eKNhqMsjLJHVCZX0eo5OKypCxFldCkbukgY0EtJjmKUWg85oFIoTBMNLM0RyEGtvUO6yzVVHFwdETX9qyuNqQ+fLM5wePcQBcemtWY5L9uMmbMiG1433ti7ABxdopB8fDBqejbdltePHvBv/V3/5TTOwsur1asVtecPgx88cuNDA2cRUIVIaWelNkXkynLxFUKf6HRdI2E/k4nE06fPODlq8v9hS7PTRPjSE+xjJoFpS0pBXof6HpDUVoxxfBBohFMQbKevu/pOugH16eYd1TTEqUMbdMRUo2hAh0knD5nnj17TXcTMUoxWzgOjxZ8/leR9WpD32zYba5QZoU1/XBLi3nAfGEJI20oB5bLu4R+Rc4ymdVW8dOPf8RyPuPO6YztNrBdd6DGybAM98Yg5bGASUkTgrjU5RQHLWnBW2+9hfeC/lYTRVEtUUpsz8W7QFGQh5Do8Rne0qDGgjmmN/O3Bs0FDMMNaaSUVlTVhJQyfe9p2w5ni70GY2yO5M8aQQKNwg771liHCMIDPidUjt/4nN8saN+kFQ2XMdWkZLqYUE7F0S3GiG9r3n7nEfVmR73t6TsZJHTdbmgeJSZDaYjRY4waHOXyfm29SR8bi8jxa3xNb9KetNagsiAvQZ5Z261BSZFZWIcrHKEH3w0OetYR9RrfRbQyOKelMDTCYnAuUU4DL189J9NRVRplBhq0LVjMlkyrKSoHlGqZLx2Pn7xNVc04e/6C9VoCyvU+t0iswm/ttTN1XaO1IgQJmtVGDG3YU77ygGLcNrtaK2nAfD9Q20eK3ZitJOi7NAWDhnNv5S3PcCxuR4pXNan2TUzf91irBTXJmYuLa/zLTlAbnfdn/24bUEaTk3w3dTfQ9UuKokJru1+/I0tkgCT2P79dW7f7AOQuG8CffdNSluX+bB4ZB6P7XM6C+N6sVjg7ojeR4DusBXIY6JnSNMYUyVnQodGsJSv2jdTYzI3PakTP2rb9BhJWDrRA76UZ9iFTGI11Fj1ooVKW/aUYTOa1ouvEYl+hSEgUjHVaAtgHDbkxI3V9YAekcU/c7sU3rdu/oUF8g273TYrj7dBC6zw0+WZ/J477aUSolQbve0bzp9HsYp8dNjRl3odh/Zh9s1oVc3zfs1q3kKthLf9/zL1Zr21rft71e7vRzGa1uzn7dFXlsst22Q7u4yRIOCLiApC4QMoV3OYrkI+AxBWKhITvEBIiICVcRFEEJEIJIIUkTlK2y+VyVfmcOvvsfjWzHWO8LRf/d8y1j+M4IIiUdXS0zz5r7zXnHHPMMf7N8/yeGSRWs0cNUH2mRilSeUDOG6Xx48T6fEVEkXLBj3leUopknnh6/RRZznifWF2csQsBazPPn3/G//jX7xmP9xgXJTxcNYT0cIz+pK9/I5orYzv6xQeM/g3F3NNYxXRbmO4iOShcYwmDrIqVkvVnKQqtPLlocpYCVz5c5QQRKEpkG+LArW4+XXAWSvbEcYSjF6R0kcwiIZU2DPuRblEqmjSgswKXaRtLLIngE0ZbJl8gRFLQhGBorGKxkOnd8VBoG03TJbk52iy42aRxMdWAQDC50LiaMF5EJlGS4nCfGbtC7EA9UrQXCrdwlJBJUyLsM1ZnlmuD94UUCrEGgGo9T8lTbT6L6Nx1kedRp6kooVsUFKtrRX8G+Thy8/w1aEtOLY3JxDIJ6WUdmExiMgrddCyuG/ZvJJMHreUxsvjL0EI8KsjF6M3rVyS/x2gjksgSsK3BOAfWEscD0+RPxcdwFMqhUrHei+XiPh0CYSy4bFCTpcSCazspTnwNOHay4UNZNAZ8Bn+AIitiScMoTPuCaQwqQNjKhynGFrfo6HrDyGsOcc9xL8hibA8m07aFkjI+PFxQqm2inkuAyrLlsY7sLMaP5FqcuFqcamNolARW+6LYeMvrNHAoQZLrabifRhqjSEq2Y2E6sn13z3LVSUYYAp5IwVOKTIOsURiXaa4t3A0c7va8eRM5Wyv2h0Eml03h4tLx7kYag4LCWAipSDhwipikiLHKuFBoY3DG0rqGdb/A6oyu57YNie39SJwhBorqC5kv2pJdEkOWPBkrk0cfUvUSSKS27S06TnUaLpPAvlMULYGiKcs0rVuIzj96hfeSCZJzQEz14v+5ubnlsJOJtjY9L1+8lUnhfB5YmVzFWGhqY2Qq0tYZgw8TIYWvFGdz0TKOI841COJZChuKXMRFmlDxwHWj+nBTQIrtHKsMqaKrraEgkIMQfQ1Tp1KecsVQ10E5pU7rMhLIWOWKxsn1KSRKHk5T67btmcaJvu/5+JOPaLsF6/OOkiV0+I9+9JxPP/6Y169ecXdzT4gjbW+ISd4HU+mA0af6XtYmwUpDGYI0DjmDNSKz0lqxWHQ8ffKYu3sJyx2G4bSpmwNI5xtvKZJJk5N4WMchcXbeoq1HJTmWWlmMicTq6QCwTUOME+uzBVprjvsDRsP5hcU2GT+NKKt58+ItrnRYo8UfGxr29wHvR6YpcDwecY0ASXLdRIiBvXB5doG1HQrH7n7iePBQMs6BdYn98Q3nl09pbSZEzXDQeJ8wrZj8Zz+aroWreM5q2G09r3KdYn/rp7/FZnOD0pmLizV3byb221u0CmgtHjeR/MnGS4FAm1OmaVsa53CNwXvJM3vfd6SNkeD1Iv4+ANO62piJtGYuFGffC0AMMqWW1k6hjVwfSj1/JewUIR8WTuf6H98WzQXnbDona8jQd4sqQTzip8Dq0TX+MAGCY/denl+ujdsD7EAaK2s11rYCUDGKVJur+RyZHxNkOj4/Sa0UpRa+SqkTNRFgsejIxdZmMeFsJgSIUdDRqiQpd0slRFUvlGR6yRStcZZhOApKushwSZtIt1hgGkXMnjAMWKM5u2roFxqVBZHtbEvKCa3FUz1Onob2JCf+414iyR+qBSvSYFKHzLzntRE5mIXqIZp9cg90uwep3Eyie/CWcgLzlFI3hjOYBfFTlorhnzeA0zgRgmzr5c/LOUS2cl8vyLHJ4iZzrqGpUsTZyxXj7J0S18tpU/fHvXK8f+49XLPnBvr913hSG+T52lIhDoiagNrUWKMop/tK9SeV/JVB20PDV3Hm9THmY/7QTPwxCEdBIgFmSWKqGXRzfSYm1+qBAuMMbevwg8cqVT9zpdZGilkNW3JBWZHn5jIPO3jvfZYm1/tQG6S6EcyBOXxeiVHrdCxlcDw3xwrXOKyVaIWSC9M0or/i6VLMG6uHY5Xr+/Egj5w/x/Pmqus6jsej/LmkSEmgZKhJ3nutsUoy64quMAv1XnD7e0CpFOdhj6hict2+lQzaGhSWnAAl9x65sxZsJwsCpTQ5JvbbDddXS15taySFlWvAn/b1b0ZzpVsuLr7B9lgY/REbIy7CFKW+Mi14LaF9aI2xGqrGsxQr687iaa0E2YrxZzYtiqyErCArlE60rYS0+eOBKSj65RJiBlMqCraFkmlsi+sCSk+kaWTyA+vlGmMCsezJemQ8SGZLTonhWFg/djRmzeE+4ifPk6cWpxWqnTBGpiVZZxa9wodS8yHAmp7WJZxVkBTTQRFHRXynGLrM7jqwurSsdU9sA94FjiXIBqLv6JcFf0wMe8/NuxtKsbVgTkLVI5Hr1K/YRA5gdJ0+akUK0K4di6vCIQTefPmaH37vNT/xSz/D9cfX6HbibXgFunAIkWk70beK9bOe/ct7YtACekiKHCPtohe6UY5oJVuQcf8OQ6KxjpyWlHLg7ErN05MAACAASURBVOqC1eUZtnH84T/dsi1b+r4DFLvdgcViJdkqRqQuJWWGY8aqjkY1cLT46cjy8gyKhBfOF4KSpQFuG4fTFrJHtdDYRGvkA5cmRd47SqNJh4TvMnqxZnV1zfqy48XNLf3KsHs3ECdH07Usn0wsewPZA4lcBFhQsmzrtBYvm1aKru2wbcu+eIbtyNQ6nNU4a3DeMhRNqyyHSXEzaB5fOV7t3pCcwZWWKWnu95m1ykLw0kUQ1NuBZe/Eg+E0JcGwO1KUwBL6vuHdm0BcBLyLuLVlnztyE6QAyhrlOpZXipu7XZ18F47jjum1RxsJecyj54ffv5VtnG0x2mIxrLuGHCdIpR5jQ3KZ2/tEKmJQ1rVpjzGfvCCq/l4M/hawHA47jLanzU5fEdxitq5I5yYToqnrfoVWDc4NPHrcsLvX+EnROMfhcI8xinEq5PuKXDYRaxtSivz48+egJ97dvgaVSRn6/oLdbofzGmsVJYLB4qwSWWm9GT4ER8pNe78/0PYirzHWQJ2ayUZmLiaq7r7e2LWWYm4upLWajdOV1qUg5UQYA1ot2GwPjMNE8JFV22F1wqdU7x+GWPX584S2aTooE8NxJMUISrFer/ngg2cMw8Djx4/583/+z3P5+BFv333JNHn6xWv++W9/j4+efcKLz55z++YNL19/weWjJ/hRYAfyGIaCkB+lWVXYxlGSYRwnSblXlqwiOWW6rqPvFrRNR9O0dF17et1yo861sQLhnoISRygxwPGQWZ8pke1m8aTmJB6L6ANKiQzFuZ4xJc5XS5y1vHv1jsZanjxbg4Lj/kizMGze3XF9cYWPmre3A8ddwO+k4bENEPJpOCGTemmApjHy4UePWK8uCCHzhz/4fY77Ha5BaKc20ywsqRxEftIWFquW/cut+FyMJRZB7GtnTj6MnMUnVHL1DOjCYX/k0eNrtBuwDp4+eUY4jsR4j3XQ2LnIqUVvlnYnpgi5cHmxYrlcoA3c3Nzg/XQqREvJWGOY4gN4Qjas06kJmT0XRYtnZw4RzjHgrHmYzGtpeGAOEW5JUdWMmYdYgqZpTs3K3FxZaxHqYSb6yHiY4LxQPOQRygRxzERfKFk+H8eDR2lNyr56t+T+bo2RYZVznJ2dsd3s6/dmtPRX8fAAjbVSWOdMqfLc2RcC4g0JIXB9/YwYM4fDgc32jlJEBgkyIJrGUbbqOle8tcB/5BrgQUMpC9petgjBC1rdtRKhEOPAYRwZhz3OdVxdWuJ4z/Y+cn+74er6MWPYgkpkwMeaW1RilVUJMELeo/mYgzOuvu40O85O/pmHole9B6OQTfosY4tRGmlnLGUGG8z+pCxNSa5bEGMsKSfG4WEDJKjzBq1rs6UKi2XH8RhJFbSDtjjXnJp/reR5z7L6efiilKJt5q1mrFaPiDblBGw4bbSYm5evAhhmoESMD9dukAaq6zppvGqDvVz2HI9HDscDOQUWy0Y2ItnLfUcLXKbogimqjmjL6Vybt14xzTCbh+d4Ovffk8v6ydOoBmOtDNe8DP5KnlUOpSq1QDmNs47VasXAjpwVwSfiaasnYxbgK43lQ0MnzYJsqWa6p0QezM2V0YUUpvq86/ZPz+HDFa6SQKkHGmnOhZhT3SLl2ihVuV9WFayj6nsoz2WOiJhlg87JuWe0oeu6So8dKErTtQtiiRhSzdQCax12VERVhxZFyeDaSh5XqQOYVOB4HEEJBE2rutlLCmcaURj5QC6RptWnLXhSickHeme5uLjkG58+gjLy+v/8fdAe7SIxJ/60r38jmis/TbTNIy6LZbOzbJ//EctVzzjtGA+e4D1Gd4S9k82VKxQzokKiJC0EFQ1T8rR1xW21RduOY9gJYUlpkbJkLVP5EWIYiSkwdkd2d46LJ+e43gkZrMn4yVGMx/UT3Rkc71te3dyhKDhnQCvWFyv8MFII9JeFNLXcbTRGdbS65e5NoF/t6VtD0wsBZnOv0cpDMmgUTV+4vxsJU8v6zLJYRUYdOF91jNvE/pBxLwo3X8JunTGPFEErYlflCijoM+oys72V7Z2moAwoC1BRuVm6dNtM0EJCVuiKiFUGMnTrQjgo/L1h5RKvf/AZj69+gUdPPiK6FePhOWcmE0PDcet49b0tTWogq5o7JYGnmYKhwSqHs5kpHLi4juw3mbt7eNKd0ehEu+zpzgzaKGLwXHzQ0TaBEjJdX0Bv0c6xPIOuT2zeFJ59fM5unzjuBvZ3G7p+yYuXt6iiMCpjjKdfXqGNEryvyyRl0G3kWGBKFoKipMAUE59cP+bx4yu+fP6KNLzh0bcW2NUZw9SwP/a41QXBBcZ9YH8fUTwmqbeVZCX5MYpGjN9NwbUacmYcNDdvPCp6/AR/sDmyvLigzWeMaJJo59ju96AKdtlBv+Snr7/Jl69u2WwGwU0Phe34Bm0blG7Itsdoxf3Gk42iW61osmN3cyPUQWOIiwVdPOfLL+9YLwzLheU7v/Oc3/jNX8BMrznsN7x4fYc/dhwPlchnM0WNTCmRRujaBmcNr194zs9WRA9tn7leF7p4w/agGKJmSJl9zNwPOyEB6lKx9hqlmjqZzHWLKUbZnDTDsXA4BHLWYBV91+Fcw2YjGvtQJVNNYxh2I8oZMcF7xTQm1udeIC+xMA6JEAXxu1wl+mWhbTNtv+Lll/f83Lf/Lb797W/z9INr/tp/+V9hVaJftCxcS6CpW+iJVDTWdozjyG63x+oGrYWW1TTNqVibp8aLxYLgIyFI0eecI2q5OaJUhQvUG2oFNwDVl6DqVl1+ZioZwStLozaNgBXgQdsqnHH4cZJpnTYUZOrf1jwlqs9BjPnzdl9zeXnJkyfXPPn4A7rlirv9wCefLvit/+Fv8/LFa7SyPP3gI56/vGOXPFEn4pj44e8dCDGSimxIhNjvadxCyhoNFM1wHHCuQzu5nqTgWZ8t6Nqezf2ev/e//h/Yxp0KplIynkLX25PkJ4RA00iunLUOimyvDvuBxbpuWmKmFIfTDVORkNhxjDThDhUDz3/0gqvrR/z6n/11Prv5Q3yzYNWd00XF/d1b/KbwZniHagxZW8bNRPIFZxcYXTgEaaiWy5XIKOOAM5b9fs/nn33O06eBx48/YHN3YLmwAiUKEsB5cfaEF89vePrBIz7++Jo//IPnaHtAGxn25KxrUfEAQJnPC6NELlYoHDaRH3zvlq/91DPaDt69OfDy9Q3PvtZAdhjEIP78+QvZvOjqO9EidQoxUiisVktSSpViJlPvtu0Zx7EWR6ZulcTnslytQSkpLMfhVEjnXEgRbGtw1uKcyHQLddCgTJWneUDknbPXx9r38O1zY1ObGG0E7+5sy2674V5Lno1TlqZdcHdzS/RR5Fh4UnKkcEQZ8RPnaDG6Y5omrG1pnMiGhmFAIAf2ZMx/v6AupTBNDzJfoxVt606+nvd9lfv9RuSONY/tsAsC8UBy3KReFFWGyg60q5K6KDK3fWZ7v+VX/tw1x+OR4Vg47hzWnfHtX/w6Wd/w4vnAD37Xc3W2IAyG3f3IcZdZdFdMw0QumhAFiNP2jsXS0rkOyX+DN29ecX19zTT5GngrTXbbtjVHU8431/WyicuZ/X7P4XBAWVOzm+yDVBKFdZpsHsADM3VSa81+N9QttgUeSJTa6Foom9o0hFpsi1Ij5h0oKCTmkOGcFzgn25cYVD0/OobhiFIDq9Wq+uA0KWm5HgxHXNvUcyoxe8rm7ZC8r9IE9P3i1EDudhJcbK2l73v6vme5XLJcLpnGiVgx9RcXV9KIKvD+iCqZmL142KWYIkQBWihtUCiK0ie/2exn0uZBQj5vyVJKEsFRh2spif1DW9kuaatYrRbsdrvTJs0gHkJnF0AmRsU0RrR2MnQLuWYjFhpnoOSqorD1MeNpQDlvAZvKJwBF0xSsaZlBNM7Zep2txEOjcE7VBvyhyQL573EcmUYPGP79/+Df4zvf+Q5v3rxlHCca1xKiNN1dJ/lic96fRFE8fN5mdL1zDYfDEaU02/0t2rRAlR4jsuRUDM50PH12RgiZ3W5kmir6voDVBj9NDMMASmIlBLKUSZMXAnXjmKZJrh/Ve1gyMvgEjvcD5/0KbQtnlyt+8Rd/iS/efIZSv4ufoixuuPlT+xr1lRXl/8svpdRnwA5IQCyl/KpS6gr468DXgc+Av1xKufvTfo7pXfnmn/1pNA3+6HH+Cw6HI3pykKh4zoyPirY3NJ0iFymsgs9VJiTraUFH1wBEDYfjRFaFcDSs+oa/8O8s+Y//k59kjJ4cCwbD8uySv/ZffMb3f7RlTIXzJyuMy2i1QtsjyuxIHACRA5aKBA0RjNWEUSgui4Vi/7ZF+4grlsZo2vXE8sM9Nlm6lWX5WNG2Iy9fwHEH0cu5alpL9Eu+8bVPubo85x/+X99l/cE9OUqiutWRxmbslRWPRlbk6Hj7I8ef+bMtIY68fnHk+9/LfPxhg2sMxSSK9TQNhCPcvtXEAFfX9YaTO5EOaIF+ZB1oLRxvDW9+qDm8zigKX/uZn+DZT33M6sML7m/vub95DsVjgOG+ML07ctgMpJCwSlMcRLJ8YJVsGoMqOKOIPuKHjCoLYsjoJmKdaIyH/YBrEo0DVyWFRp2Jf8Jm0flrxyc/sWC3ydy/i9y9C9i24EewusM1DtfIlGu9vqZpWkpJ3Nx+yac/+RFvb7dstgfC5Hn80SU5a2kCs0b5kbv9lrPHT4hZMRxHsJHNZkQXkV9F7bh85Ag3d2gd0W0hNoXJG3RWNC7TtBJYVYJl3V1xvnzKov+Qf/RP/z5PPvyUtnWk5Lm5u2E8Bi5WK9zSUJpE3g4sz56yPW7Y73b4ned8ccX6asnxsOF4kGbz2aMnZOvwYWA47sTrNDqmMBALaLvG+B2hVTxzmg8XSxbPfopf+Qu/wN/7e3+LH/3gC/bbwtX5kqyPhFBwbcPV4xV3Lw+UuEC5gmklfyyu1mg90eJY5iXeF9onF6Q0ctxvuHv9hpKODIPD0KKV5TiNFEP1ECbIGYsCo8mV8KNJonPTsmFWxaJ14mrd8431hA+KLzaOsfNYZZiGAAWW646UWzRO/G9qJOU9x21Dv3TkBIddxPvI9aMLPnz2NZbLNXf3t3z/D/+AxTrS9R3WLDkeNMfxnrZzVT0TyMmQ04TRbYXCKIbhcMrIyVVW9OzDDzkcDgzDQPBCPSoUUgiyOSoJkTEX5n+UMSLxMLqyDoS4Z2zd3qFAW1JULJc9RQmmVyuLMw2r9ZoYAu/evUTh6ZsruYGnQK4+U62gxIzB8MlHHzPs9mwnzxQLpRi0zYQ8SFEBrNcrFos1b99sGKYDxXjWix4fB/qFDLXGoyKGhHUahUPWtJkwiSToQZuf+PjDD0/F7v3dvXjqlCLmSMrpRDZDVclKTmijKUWu+QrxaSqdefToEUplQhwYp5HoDV3bkzICNpjEJ/Ktn/46jx5fMBwP/M7v/gFXF9d06wWmMfhxggjb+6OEfjvNctFitTThBc9+u2M8dDz54ClnZ2fkUvjO7/wzlueGmCyUFk0H1mOsFBbeB5Q2/NQ3f1byt1IkxcDxsKMw1VBNak7cLDkriJAv1SLKYZxFG8W0P/LJJ7/Az/zKRywuC7vhDfloWCyvRZYXQKeOf/wP/z4hHCk4SmmZfAQCuliskYBUSiKnkRA9uW4UfJABQIpS8BnbMo7jV3xRy+WKnBO73UE8WE7X0GuPFMsa11SZU5YYipRKLajVaQAxgw3m38+SNBDVhNYGo6X4tU5XiXMmpCBTZSMSuMmPgKpNokiVcsoUpSmInEfOu7olzgWtRMWQkxxjELR8DBFthaKma36iUjBMkZINWju0kgDdjz96zBQmDseB/W5guViSczhpDcWL1GCtSHZzNgzDjm/8xCeEkMlZPNXDdMtuu+HyyROefvwh63XLj5//kMOmMOxhOiRyOdC4jrZp0doQfcEa8GmDUi2NXdItNEaLV/Fw2HO/uYNS+OVf/lV2uz13d/ccDgda1zCj+efXfhyHSv2TWASf4ntN78Mmy+kZwhBPviRXwTsxxq9kRs1Fci6yfWpcg2uEOBhjYLVaiox/HPEhnyh4swzMiAfjdA4ZM29YqflRXfXZSWREziJL7rpWApBzrqTRQu8ajDaiFKhkVoqcY4XCcMopNPT9gvV6Td/3GOMYhi3jOBAmcLbDuknQ6CGRQkCZ8dS0KWU4DFMFhKiqpLIiu56mUwbULAOcByjzkKFtuvcAL4XVas12t5Emo0I5qPLL+b0bg0fPuPVZ5aAk4qTrJDxea81utz1tD0s9Ns65E6VVa9maLxYrSs7E4PEh0LUtbduLpK8oxkmk4NIIQds32CJgpYzYS+zg6a4vOI6eJ5cf8Avf+jmuPur4W3/r77Lf1y2fBa1GtKrDPyLeewGipEBJiZQL2chgXmXxqrfNEmWLyB9jwqeAW2ZCBErCWsmG3W0z/bInVLvQsl+y3x447mU4JJ/TTNeKXHiOQoHyQG7USojS5kFGmnMmlkjXLcnVV2WNwnXINq2IPLqUwvaz9E9KKb/6J/U1/39srv5iKeXde7//q8DfLaX850qpv1p//5/9aT/ANZonH4gUYIci20v0MVKIJDJRFXQjm5VSxNdgtZi/RRYqmEk9azxnH0IQ6YAxiaDEo/HtX9b87K/u2Rwk58lpxfXja5596vj8heO4jxQSxtRtTDKU7CgoMeejyNQ8HhOZBlkJO2cwVuMWkRCloEwqUIwnI6b3cUqEO8XZaoFWaz54tiaFwPMvvxQ5YhPZbDbsN4lpv8DsBpbrXH1gWn7uMeGc6Ex9KLz9cuLzP0g0fUuYrmj6LZtNYn0GTV+Nh2S6RqGLJnuFiopuaZmmIFJIV1DFgncoCm2nWD8zHN5pmqjZv73jttcsLjtUUZyvrjgc3nEcb2muC8NGyYYsQohJJjnOoJRQWmJKKKexTQPKCHXMH9EWFDKR8qPHakWOtfh0UFRE64lFzYVRShOi5/XLA34w+EnTNg7rpHAvKcgk33WsL8+5udkx6SNNa1G68Pr1jrF63dpFi6+hnBmZRPSdpi8dm7sdRrf0TQ+28O64Z7FuaRYd2TpC2FC0ISZgqEQaG8hBEXNBZUXnIB4N6+tzFv2K4TigTcNxv2ccICWPHyY0knWTB4OOUiQe/T0pyUTJ9IpuUeg7jXEtuusZhshhOhB30sQmI5MlbQq6aGxRoAsTgSZ0hGLxqzU//VPf5ubFHcd9xrqeq8eOrjOgG0rykshe1kTnyUaaGJ2hMDENUbLZcmbnA74o2kNGl0SaBkLyNGqJwhNikIkpRSSBOZ5CEVPV7OuaX6V1T0wTSnsx0WZFzgafM67v0A7cPuGVwfskN2EtMBvvDywWC4yLFAL5aMnZ1EwmRbdouLq+oG07hvHAZrvl3c1bLq/O+No3zthsj7x6taFpGtZdwY+BGFQNY40iD8bU0M7CcrHglJ1SBEt8PB7qNkCyuoR4NZ6KD6WsyEVrQCdlzheaSxq56CutaFwjG6L6GLNRN2eqlBJ+5dd+ndevXvH69cs6uHCSR/QejS3mhCoFq8UcPIyjeMti4jiO7PYbupWj7R1aF0pKjOPE4TDig6fvG5ZnF/SdZrMFUOJlEac/1nTEGIlxPE1hgdOvKSXevn0ncrJqCjLWVGR6nX6qjBwWuamVilOW1Z5ImlAFMPgp1YBri7OWHDW5GreFmgipJKbJc9gPHA4HUgxMwyD5SdagcsZHT8geZQxt36AMnK0vSWFfM6xaULDf7yUDTBfa3pKLNJMZiGXCUEQFMctfteHt23dCRSy5bjUSMRZSUPW9mfOGpKgqRTZ2TdOSY6TkRFZCenv77ks+3J3RX59xcX3BbThwd7jDojDJkIdA9ODcgpTAR9k6TEPk8vyctunIJTEcB04yHR62rbMxffZhzeZ7kSsquk6ea9s65liF2e+Sc8IHMff3i0UFR83hwtJQzF+perREpj3jstOD5yOLpKnreprWchwkX+3Royv8lBjHWuCjK/SF0zlSauFktPimhF4p3zOz/E0r1PvSwFJprq5Ba5EQaa3JRXyJpjFQ5NpsbGEYvOTH1fc1RI+z4uecpYlCvxD1TC6Jrm958sEjbm82bDZ7/NFDccRk2G6OZF7zzmlub+9JvpVtpJUBHwiwxDVgXeS4D7QLkdbHMLDbZqxVTJOEhZcMTeP48Y9/TKzN8sNmToYs1j3ERYhXK59kWXJIHvxWSpVKmnzYJsxNwcOfhXlrIe9xxDX21AiE4ClFNhWSCyV+tZwyjW2q6Fd89Apk+DCDbbRGKcHcg0R35DR7qpJIRLWB2vCV+L40S9WNd5FtexZA0tw7zrJI4DRImKVy1jgaBxq57voQsFoyu2J8yCtURbzrTdMQk/glNdIkniAt9d+Z0jc/ngBJBFcvrhW5H8ykQGbzWUmn5zZLHKmLA611hRZVCqI2NXJjOjV0xtTPcgm1OconCef83km+V22SrERkUGXlpWiaVgjT3kuDFacozpoSSTXU3qxl8PHR02ecnV9xN+747B//EO+Huk23WKvI0VDyiDIzpTHL+12fkFaKmIF526xE+m+1ZohCQ1Q6onPi6rplP0RCSqQCptGM+0jGoIxixGMtaCeZqBSFUWIbEi9VPcxKcl5LfS265vWlHOu2TpPGh6GhyNQzU+QrGzx5f//l0sB/HbLA/wj4zfrf/w3wv/GvaK6sUzx6ajncJsa9pzRr0n7PELfkHKGI5nc+UVJUmFYkKLNWn1KkKFK60pfUyfsgtB3xqnz8yRXnV1CcBFU2TnF+7WnXkmdlJinkrTaSTl80uYisjiAfJgUUrXE6Mk7iZVVO/CSu0aRGk2MgZk+xkZSlJQuhEHeaEjW6XXF++QExeHjxhpw9zkS22zv84UgYWvLYYtYTxkYSYu7LI5A0ymg0sOgb7r5MNEuD6loWC8u49yLhURpnNUoVrG6wGlQuxEHRXmgpdkzFZCfRruYoJ+3ZI8sbHVh2S67X56ybju2bGzb7iUeXFwS7YDAb6PcULYVHLuoUQNdYfZoyKhK5SGGdAWU03VJwvCkUYpAbpYSLC7TCaEVRBqUCfXcBpeZ6qch+64meque1WCtFVipJAvSSpm8vCeNRNgylRTvFze2Rpne41mFMJgaNtjXPhYg1kqV2c7elawzdssP0FsWrKjUxKK2JoWCblvEQSDHTd9VAHoXMmHImJ4U/Gi7PP2C9vuaL/Q0kzX6zQ1VqY0qFrrPEID4oEwQIMsU9KUr2jWsbFktHyh7TKvq2w6eB4ziQjpliDPQNuWT6TqGjJUXZ9mIUOsEYYBMKQ4xsf/CH6FC4OLug7VuCv8eXnqyl2DscM7lvKcWjswwnipHk9BINVmm0ygRd8MctrmjUJDc+VI8uQg2LRbZUukCaPZBaC/0oSSKHRgvyPEtxdAIbpMLRJ25GI8JVS9Wde0qWzDqZxAVEe5Llxldack5cXT6l71tC9CgExnE4Duz3e6bpwPnFI7p+wW4/EWOgWxTaXhGmSEgKWwxWA0WmnqWID6HvRWMeY5RzzzrGQTa24jtrxC8VhOxktIQ2zoUMpdR7qPrKzQ5EAvLgEYAinyhiDPV8klFT27b44BnHsaotNb4a0+dCVmtLDqE2AIXBTzTasjpb0yx6tC0UU04FTUFC16dpxNhM1y1YLpYoHVDKkKNQlB48DvPzlAbi/bDM2Uu0ud9gnT0VuoWKRtYiIZr9GHNAtORgRQEFkFBaigmFZRwnUFbC2E2D0fJ8cxKqlTWQY2I8juy0YRgHjBFangYa44h6YowerRLWGdrGklKgbxcMycumMhu0yYzTkZAGtIVu4Zi8NMnWaCEPGoX346nxsFYzTkcOW9lsNk4KC+/f2w6UGj5qVM1jUTUupEHbugnNBWsNm909m82e9WHByjn2u8AQBhbO4bAM+z05g1ENWUUoXnJvMjjb0LUtPnpiDBjzsDEtWYYTMoHXp62S5AQpSpHtz4NPS4kCotT7a6VB5lTwOdH3fKVwe19+J+fHTO+rU3fER4K8/Fp459P/d87RL3qePHnCyxevpSgvsu0o+V/0T8ntRc5KUStpEhXnXAlx89948J0JOEaputFSDzAYrSUqJaaANpphGDHG0LiGkDzGFJzTdeBa8FG8VA/HU+GcxjhFzIFhOhJTYrFY45qOMHluXr0FdPUDp5MPE+XEt6sEXmF1JISRZQNhikw+EnzGusw4lNN2x1rLu3fvTjlXpYihX+yepXqoBHU9H7fZVzXL1uT4yPUuRn/yb82N+NyUK/XQ1M4/L2eRTMo5lB8IebVoL6dGQCShch5Wv9Tp4fPp/EtFrrOu6bDGMA5C5JNrjhTt2upTg6JqY62o2WqyWzn5qHKFX8y+nxlOEoIUzvMx08rQNLqGBUdM0+BMh9KFlA+n81YVIeTFJBCJUubPwAMwAjXnQNk/dvzEG6mUVJHiO6xwklo3zZI7ePBxzfcIOf3n1yY1oLyegHjmHKUOCmRDrEURoS1aSyMs71OkMt+wrWMavGylK9ysVU7iZJIg8ktKJC3NiIR/Q3GO4gtd21MovLh5xZs/ek7RIlk0WmG1IWKIeSRXX1eZ+0glgCphgyh0lUfbSrwsOhNzRLtC6xRto7i41PhcmA6ZGI1kt6aGVO/T4zjSddC2pjZ2VF9alMa9XmfER0aNthDWS67Pa/Y+qyKfUaXqsMboSoisXmrmf//lX/9fm6sC/M9Kgkr+61LKbwFPSykv6/dfAU//pL+olPorwF8BaJYa22RyHhmPGy4/+BRzvSTlIypnTHAckjROwSdSjAK1YKYgVUmgFnNtzrrmc2QMAj+IKtLQsyh/hrI74sqXuHakXVrQI5vDLcloXNfjzAKrPMVJvkPKmUZnOCZ53EajGonIsskQU0PRhjRpbFnRLoJkEPgEjeCCQ4aiLUlZps2Ox19bSXhtNOhGKGElJ1QZ0GVE41h1K5yNZB1I7afBMAAAIABJREFURFyrUKPD38nK9exDzV/4za/zg390y243keOO5UWh0RrXFKwttJ3Ge4X3S5SRDcHuBj74esEmObl1lhtLbgMhF1rXcNYbrNrz+KNP+Xf/0l+iW7X893/jv+WLz2/4+V/8Nc6fPaL7oOPN/rtoG5m86KZb11CIEibnwLWKtoPNFg7TgZIKzhgeP7uidZZ3b+5ReJZ9Kzk+WqGVIGiNksA7qx4JzWu/4fza4bJhKKXi+DOUiFYB21hKTmzvN7TG0Nh50unRTQMK+kWDa8B7j7ML+kXD9v6e++0ekwyXj89JQCiRCc/j9TmuafD7AX8cKdZxcdVjWsOwH/A+0OcOqy4pZiCzp+SBw8YwbQ3Xlx/z+OlTbt954pQIcaJtJa9DUehbh5/qGj8Ghm0GA1HJtIe+ZX11zuc/fkm/VixWjrbtGAePcgqDRXnLWBTnFz1xzAwHz34/0tmGGBM+Joa3r9n/3b/Np+UNH3/8dczimjFpbr74nM0E+5jwKZLTwMWnj9F6Qaeg00gDtD3iN1G2QU/O2ZfIZrdDGIKWSMs0Kmxu0C1EA3E30LqGeaislaYokcOZDJpIKZN4jJJCN0hUQYDNYeR3fzzSdJZu0UJ4v0CTC/LFRc92Ixc8Z9t6oX/Hb/zGn+Pq8hE//OEP+O3f/m2efvAIpSLKTJxfdmwP9/yzf7bBGE2/aMllpGQpclXIeK9xfcfkJ2aKUSmi218sFsB7W5oQaVxD07RY47i5v5Wkd1Ond86RT7IQuYgHLwG71CYDjGjD4xycrMXsPU1st7taRDmm0fN3/s7fkdtyLUzH4ClqBm0onIbri2uG455pGNkejtim5e2bV/z8z/88j588Ypie8Nvf+T38JNtFazTLZcs4HmS7mBOH/ZHDcUcMHm2s0L9ML4GLYcQ5h7ULttsdTdMIXYsHUzy5VGqaSMam4FGVnta0Dmv1yVtgqzwsTJFY8bfG1GMQdPXRNBjToVWD1lIcmCoBLzpSqkxmGCbGMdD3vagHShHpjjYQI00BnRJpPDL5yHQeKcpStGUYttX3pTBO41qFyYUQNBL2rml7Td/3vHixo5QsgBejubw4xw9D3XJGvK8bHQR2IAAXLzdyuQOitSMlRH6Sg0QUIETA27d7Co62h7ev7rh63NP2DkNhE7a4VhQaWguMw/tI47oaGgzKFGIKGOtk8JQyaDHAn50toBRynkgxEKN4dLpOMmq22y3eT1WqKDIlVaiFspxnMRaOx5GZGqm1SAdh9lw9bMbm5kvVbXUp+ZT/lXNiGA7s9p6vf+NrfPq1T2iahu9//4enRnAuNGOMdQDzIC2T70UERPIQTI0SAdDsg5ylbXKeCgwlVflbrFv7YRAVRUqFtrUMPvC1rz/FOcPnn3/G1VUn+PvUY4Dj/g2ukea0aRa0zYLDcMuL11/w7nbL8ehpu4bCyMXFGeNxz3G/h+Jomu6U3wiF4AshFhkYAdYkUhaponh2Mlo3xDhWyZg9gR8Wi74e18I4elrXVNBFJISpyj/Fa1NKEaXEe9I1qpoghcg0+bq17N7b7kjROW8eYIYhyMCglFhlfbI58V6uEbpK5hRziK/DGpGBbnc78URXKVaKmRw1k/d885s/weXVGYf9jpvbL9GssKbK/lTCtpbj8UhICaUMTs+oepEcnzZDzPAjQXwbYzgeR6ZpxPuJrlvgnGMaj+QkYeuTF/uHUgZjO9mq40lpEoofBR/m660M4I/HI03ToFXdjmXZas/N/CynnD8fuW7bjDas12t2ux3ee+a8p1LmwZK8N9L8z5vfqpRq7WkwYoxltVpxPB6Zxom27bk4vySlzLt3bwUQpgr397cYoyp9VrxT1nU1I+thUDT5I0q3GFuXFbEQURRrUSmjQmZKhdY4vngpkLNcItp4Uq5RAVoQ8coZcnKUOIcPi/wza0vKmSlltHEQPbpkLNAuWm6nDa7TtJ1hsbCs1oblSmFvPAyaNDnKZeD602vylBn2BzabHd4rliuRIYYQub/bV1m2lb5XT7X/rdl3VskgDxmEFiCniLMPZMl5CGB0oXPiwc5ZMYV/vUCLf7uU8qVS6gnwvyilvvf+N0sppTZe/8JXbcR+C+Dig6tC/00uvx5YPY68u0lk3xPvIpFb1IloYshqnmAYSpEbulxk5UIbYyJ4IVqJpt8QMvhSeLM58j/97e+gPtLY1Z5SIlZpvvYTkeN+idIRdGB/uCfuDcUGWOzRzch07Lig5bDdE01G9Q0Ls2J5MRFKIhdFnCwpH8A6bOfQTSbZCRlcKymCcqRF0xyXvH65ZTiMQGZxlSnJUpTGNIrV04jXO0zqsEGBA6udBDsWzTQpbp5nXtsvWH6iUCEwRo3W3+LTrxfevPmjOvHt+Oy7B/avtqyWhqZxjKPh3YvI5bMV6IlxGLl9bTm7umKxnDDOk9KOpz93zZ/51jd58eUXvHp5x/624BR875/8c559/RlPv3FN9ues+5a78pbgR3SRDBWMp5QGEKlN38Oy6fFTYBoCr19tuVif07gVVntSONJ1PSY7xmNivyloU1ifa25v3zCOgWkaiKVgdC9ygSySvuzqhMFIoV+KYfAB27WSlVXxnf2ycHd3gw8Z6FgsJ27v7ympiIfEJcZpIqSMzxNlv6VbLSEHdEmUqIgeDjhimFfxisNwJE17muzou471xYoPPr5k/fOfsj5bMw1eJkFx4my1EqJNSXg/EVJBW0cOiTIUnjSOn/3lX6OsX/D2/iXf/51bnn9uuHpySe8UJSZ248SzTxuevzyQgmZpG47eo+M5zo3kVSSbgNEwLjvBdU+BFz9+x1/+T/9DfuM3f43f/93v8zf/u7/Bz30zcbW/5UehZ+cWrM6XvPzuSxZn5zRnLanRbN8cabvCviu8jltu3+354MNP2L/zGCJOW/TZBcPthstmRdaJUkYiufo0qIoHkYemlDACyIesEG6gowSYYqZDc9kW3OqCgiEOHgyiybcaoxuMdvhpYhqELNe0ig9/8pLlWcMPf/gDfm/4A16+fMXbt7eSu9IGCp5xCKAsISiiEjqQ6xTDAZzpcAtH9o3Ic2qY4iy5897XXBmZ6u52O0AxTQe0Hui6jrbmZaSciCFwjImuW+DjSKFK4FKWYrU8TCNFtpOr1Cgx42ZE6lxvoMuGXFJtrgopSfBnM5OWjOby/JwwSV6M0galC29v3jGNie3+gLaG+91GYg6Q59I0hqvrNR8vnrHdRo7DyH5/Q9c7lsszjseJ0XuUlq3jcnHGxeUZxii22z3TNHF2doZSMq1eLgWmsNtsTkVFqZr7mCI6wpOnT9jt9wyDJ5iE0wY/Bayz2HnrXeSarnTGB48aYLGQ67QqAr5Ga1zbksnc3N6wXp/x87/ws5SS+O1//Du8fXvDcBzRKnN5fc3N7YZhnDj4ieQD3/297/Lock3XaayrlLmk8aN4RJrOcr5eM0yST5MmxcDAcrk6ea6GYUCXe5pGfHK5wgFysnUILdt5obzpKkdTpOQpZEJsJNfMdvjjkc4Z3vz4OW9+/JK20zz74IrxzcTer2m7Hmc0i4X4n0IQoIJRhaQyu/0du53IULu2ERWGtYASafqwlbgLtEyMa1Eao+TMtG3LYrHAewGjxBJPEqfJV6mwMgg9+iEkuZRyKhCBEzgg+Mg4DaftwZz1o5VI2haLJZvNhk8+/ZQPP/wIUPyDf/C/U7ImpULKs7+R+vcfQmCFdCvBuWRp9pqa1xNjxL9HZZx9J8Mw0RZH02hilmL5bH3NZrNjuexZrRe0bcPnn33OdDR88aMvcE5BiTz/o3sWqzVXj844u+wYi0jOj9sj3kd8O5BV4tWLO7SBxarFqI7DfqTrErYOG1MotUDzVfqaTxLEUjqgIdmWog+8eRNROLRqiVGy07QWSZh+b9g05zWpSiucG9Fm9nDlcCoobbas+gWjn8RFgTRPPkg9lVJkmob680qVB3KSfMoGKlKqLNY6aZBiDMRacLZdx7xlyVF88UpP+OBZLJb80i/9Er//ve9yPB5RClZnDR9+vOT3v3sg5SUpXZCLoe0viFNGmYwyiYInRmn8bJ1U2Oq12u8PQKFfdoSqFhC4gyFEzzA80F7FM5V4+fJLlPLSHJlzShYLgTYBZQ6gB6wJpClSqhQQdPUuCTBDmqdEzOErNe5ms6Ft2+ofE9XD+zLLucifIS8ylKDeA+o1EC3kydPzVuQs6HSR8Wa0zszRICeoCYrVas16fV7zFDPlvDAMB1arpchFyRwOe7IOrJdrShKynlKWaZDnkbMiRY/Sjn7dopXHmMjiSrHb3DN4RGGDRZmEj5rGRHJJTOMBRWR9cU3WgWIHnjxb8+5zRTgMtLmw0A0xZ0wjE9hcPOP9hk5DOnpC37LPhqFJvPjyQBrqZz8NxD08P/yYs3WHq01SiIr9djqpI5bLBZP1BP8QSSHnrFhgKIUUZVg5jp4Hmay8RTEkoRA66PsGnzwlBrR29H3Pgf2f1N7Iufcv/c7/g69Sypf11zdKqb8J/DrwWin1rJTyUin1DHjzr/o5KcGLzzx+GpkGz+3LI8PxHpcmjIVYMm22+JyZ2yhBdDpU0VTQK6XIxd9UXqrIBVoKI1YL5rzr7vna1xVZG3zUErqpEyF6rCksl5q2hZsvd6wuK08vNxQ3UaZzfuqbP0c0hXeHDcEfaGzA1QitPBlMf86Xrza0eM6WAddahoNHGzHeOqtpM2xfHZmOE+PoGWOi6c/QTvTKJUEoFrrCMR1RuiDmzER36dj+38y9Wa9t6X7e9XvbMcacc3W7r13NOadOY8cc2SCBkGwZOQoiBIUbJLjlgi9y8FUk+ARcIS64IBYRoEhwgWQFiCAm2MY5Pj5NVZ2q2lW7W91sxxhvy8X/nXOXo9hRgkBe0laV9t61aq45x3jHv3me33NTOOwLYymUOnF3n9FKJlGVl+w+HUlhRunCbjNSYsZViNtCQgzWX/9UU7Pn4fOOizPL+gvNL/6PNe9/vOLquaN7EPnw11cs3YpPP/2M9du3fPfbz/j5VzAeRu7Xa6afzOQpoxcblJ64fGA5P1sxHmYOcyWTmUJgDBFjgWjJMaMy9N2S+9t7+sEJ3tYYTJEbwquKNpkUFWFSxDiKzBFHDIpQ0km6oJUTklLnyVVw/Ycx4ouSZllBVYq6raQCWvWSUaU1U7yGbKA4nLWsLjusqZhNJYXEvB15W+/JSpH1cZ9tGMeAMXOToBoMhsWgGZwhp8y0U1wO3+PB8mORK817Wc87mWTHKHKXbumZZ5nEFUR6ZS5W/OzVJzw5M/jLM66eFc4venYxgV5ge4/q78i6Y3A9zg0s+57p/g3r7S1o6M8UH37rnPu3ivRmIqZKQXP+vuaP3rziD/7L/wnWNzw9qzx4ZLGD481Xns2dYp8m3nu/5+zRc+apEA4jjx52XM9rdEromkhz5eb1V/RVyTauZJRVYCOLZcccRuYx4p0hHXftBWoszNMsmTIIKLhWj04zWUFqWV45Z374g+e8uY9sthNhnlFGtPjDoPCdwrkFh3HHsFANJzzx+tU1d5st1hy4OL/gw2894emzSz777HNSy7WKAZargfNLmZLOU6DzA5oF07illojVPfM8Qg1NQqRONDuZvov/L4SD4IFbeKa3jnGe2uT0nWxgv983KYH4qTorJuPS7lnbziuJZTgGD7cke1mbkVKUBr3I5v4dHMDgnTxojyb+cX/AeS8y31wZBgc68OrNK+7u704Ft6agvYAlwjTz/P0PWa9fii/GyuvZ79dSgCnJoFkul5SSuL6+Ppmklaonv8dxa3GUQhrvxBfR/LC+NU/X19d0nRHgSZZswg/ef5+b+1sUYkaOUT4z3wnhdZ4Ty6VhsXTMcxJUf07MUTxnZxcLrKv89Oc/QSuP7SzGSIZezpH79Z5aLM5qkZtnkUmfny9ZnXVwn1hv9igWdK7Hd7K5WQxnKBL7w4HDbiLvEs4ZvHG4wZOieAyPIAVtLc5ZDuOOiky4nXWkJBvPMIvnwXeGrvNCrw0zVYPvBkKY8LWCynijuXoILz675s3re1K1qJqwJqBUbs2oBzI5T23oKM/VflgS5iAZZEbhO5FblQIlR0opnF+sGEcxgIv/I58yi4wxKKNPn+cR4VxyaQ1kk3pyzOrhJA9NKTNPElrtrFA2Y5Cg1HfFpmYOicurhyil+fTTX3IYx2Yil5wm1TZQx5/pKH8Vb5uWaIYkUivvu1ZPxFMBe/QNSSEK3gvlzlrPqj9DacNhnJjGyLe//W2evfcY7y1v3rzmb/7tf4tPf/GCL375NeN9YLFcEcLI29dyLhRJhqHzjhhgOsDVkwW73VYGvBSctVg7cndzzG/UuM4Q49wosxIEq1TmveePuLg4J+XMF19+hXEaozxCYTR0vVzHNBmbyHEFi370TR2Dd6cwnTZ803jA90IyFElWRRlF1/ciFW3IfeccqhEAj/ezgEwSx2yx4walHzqsRfxNDTFujW0+J3nvU8oYa1ier9jutuRaCEGUFF989SUfffvbvH7ziu1uy36aePmyYK3js09+xi+VoaqCdZCZqUXLJqRqdocNve8wygjuvslH+8Gd5Ka1FJGxnWR2IgMzzbNVkTMVCt4LlXCcR4wz5GyY50qKM3PYcXnlKdlTcqCUiLWaq6uLdl3JvV9KPg0h5Fx+J0M9nunzPJ/O++NZuV6vmxfrGDjs6LruNAxQSrNcnmGtBI6nlNFapNjHAGKA7XZHzlU8yNpQamK/3+KcP228aoG+X5Bz4ezikssHD7BG8emnP5f3q9Z2lrQIkZxQOjNciTzTmMDy3OAXS/ZdRoUenzUqW5R2THnH0HusrVAjuiRyXlJroVsa7GrgoEY2B4Uukd4oXGcgJ3KOks2mBMWvOw3aUqwm18i83bG6eMg4HihuwiwyJXlKjMwmUr3BGs+cktT0TbbnPAzNVjGPif02U3JlyhHv7YlaOE/z6ewspYg0kYz1gtsvpTLNshV1XmNtpar9X9rX/Es3V0qpJaBrrdv27/8O8LvAfw/8x8Dfaf/87/553yvHRAkTpiZUDKTNHusyvbfSRYcJ6jGnSRS2KWV8L8ZwjsuxevTFKCkIihjqq1EoIxusL19syOqKUgcg40yE0hPDPSDBlNOsefD0EmVvJUMgATpTdeXDj3vGXLj7xLDZFXormUHeOC6vLpjmAZMarEHnpn9WIk+pFQso5SQYeYrkHJlHy/215fxBllDWCnkyWJsorpBTpWRFv7IUrZhnw+G+sJ8yqw8GlIvQMNchb1oKdcYYOXR9p4jo9vCX1xB3cPvljMVxfmnJ+0TZK+6+jKRJcXaorFaJN/U1N+sNhwRXq0u++y3NF5+/4LCPbMeEVrBwBY1kfFC14D+T6J1pBWTnHLmlfmuVm6clysHetpG1iBnRaNEZ5wwpQyoyidFat7yiSAySOm+dxRtPTEWuj6Ixdmg0OtFrg5Zm04g2vCjJE+qHgXSobRrIyathrD4RDGOc22ElN56RipdmlICUiQ2v/OC9BSknDofAF1+85HBW+fj7H9H3g2xrnGBdrenonEM5SLOIF7QWM/P+EBnjnuQdizPH5cML+q5w82Jk3ksxVU3HbgNKRZ6cKz66ykx3M/u6ROuAsgWMJ5SJiuiztZJsma/u3jAfChdq5uzS8ure8vKmsN8WmDNzSvgrS4iRqhTGa6iJgsObisqKkCXEtdMdWg8CqSiFJ88/IB6iBFkDxWihAx5DDFXBmpYpbwTDnZVIyAoFowxOK5ZUnhrNrAq5mUdDu79rlXykKUuB45xcbzEVATkYC6rKxrIUFssFxhrJUylVPETHIEQadln1rBaXMtwJAfSMc5YY2hSwFW6CjTUNXGFOxegJctF+HfOljNYorQWCczIoN4mgAmGO8i50mG8EDVeROx114jlFtDr6U45SHdXyQQRgUWtlu9lSSuH9x0+Y55m319di7GduoBGRylQkTNwayf7Y7wovv35LqZGuU1Rl2W0jy+WCrnOkWNhuRpzzxDh9Y8JaT9uPYyH7TTqcTCPlHDpiseVnQ3wSLdBVKcODBw/YHXbEOANVEMwliN+nSUrG8cCwlPtFfASZmEQWarSSZmsOUJPkRVlHiIm7my0XyyU5JgnErfE0pIslkItsbRQj1sj2aZ4Kzg0cyoEYJFRUaG0SZivFvwzWYivohbwlU3vMvl1vNCqj+E1oJvuUCkPvRZKVZnJKVK8l5Fc1H1+/4Lu/8j3evv4T9vOeeRY4wKIXFUKt4ruwVkKeS5JrSvwEBue6RjHLKJNPW9hjE3T6TJq/I4b47vMrx+BVSwixyawAFM53LBZLpukgg4KQ5TprWyXJtDoWbPYbn/u78GSjVPMKWXa7Q9sECj63lqOvp7b7TmHEzsLR2yM2q3ebLJGgSp1gjNDmFouB+/v7U/Gq2j0phauh8168hgbW6w3iZRKISr+44uxyx7C8Y32zIebCxeUFxla0KXgLYZqZD4mqDLZzGKdYXZwRZqGH1mMYb04CakJRdRLJXxV6mVaqXc+VaZyYg+TEdf2CcgxSRzYESs1tUCG/bY3jmzhyyfaUPEjZhDcqWpPNtgOmnU/q+K3/nPxPGwlRr63I9NqdvFvHzYtzFudl2FPK0UMqwAljtDRwWswtBVoeX0E1Wdp6s2axXOC8x/cdqWSmsQ2VKNQqNMCa9LsGofnsjs+ASruXkJiP44Y2JWn2CuV0zR29NkeZdynvAAZHkEeKR/BHkP+PMeQkNaTIlIUq1/eC/T/i1U9NcwPDiGRSn9Dwx81pCPH0TJCzR7e/f9xmyYfR933bwLYfrwrRTp4dDXLBEYwkQ7sQIsfQ3lILquHXrXXtbJZhiDGaw7hnt53QemSx6Lm8eMx+tyXWGapAa6zTuGLR1tCdaa4eVYwRjxbeEPJE97BDOcg7TRoVHoOp+qhjxHSaNGeUU+1skugV5885Pz/DeSg1kCeoh0ydm5eyt9gLS4ipkbELeVcZQyTOUJKmosm1YJxutaIMv2tBagoFRoE2Fm0EQKSMwjp3aiRrbVwGJDC+lioBxa2GUPp4c8iN4XxPbIqlnI/Apb/46//N5uop8PfawWaB/7rW+j8qpf4A+G+UUv8J8DnwH/3zvlHNidWiiNwga/a+0D1ZYIJh3mTGzY6CYBtrlQJeJkMW1Dssomqm8OMDmyqr1qq06M1T5fPPJ+a5EymGmfFDpaSeeSy4lokUkuL9jx+xXs9s70dqiPiVRneFxcOROgFaqCW2rRWdtpyfXzDsF+yWBWMVvjeUIgd3KqWR0mCOhqGzoGYxV86a7T10ncYMBqMrKYIpkKqSGzwX6qCJsZCCJh0U811FP+q4vNRkNHMMZDVTjeQYaAOu07gzw4hG64LVFV0rJSo2rydMqeRNT9wVvLGs306MG8V074jnB4L/kvUuot2Ks6tnvNedc7jd8HJ/x2aa8cuCoUMVQwqKURXmMZGjoip5CFnjWZgFSWVCe7+MSnhfMUaAJSVJGJ11QmNRWrS7OYu3ACVm8GHRMc/1HWVHKfphYA5HQpul6xbEPDczq4YGxjC6UBuUQmlYLC+Y4gSIT2K3mzAtEHZYebqF53CXcRhqFWiERaO8ImUrHr92reWUWK1WaF8x2z0vXn3O4e6Wjz5+xnJxQaXgO0MKYJ1k1WQdW/hsg3hYxW5zIJnM2xy5eLjg/Q8XUHeEzZ5p3mO8Y/HgnHgX6YYDj1Yzv3Jl+Hma2fce00WUhWkS0AEd2GJRyTCOifV+TbdS0CtGa3j72vLZq4guhVojMVS2xpP1ln7wOINk5UQj0j0FMneXB6rRPboqlJp5+Og5L375C3KOGG1QRkh7NkMyQhPrlBXPpGlgA5UATUwVoyoLDQ+N5iyMXKhEsJVsFblakpIGep4TOe65uFItgFXkEkrDYjWgi6NS2Wz2LBZSCOQsWxyt22Q2IFIVHDVZOi8ByaXMhDwxLM5IWczkKNHG7/cyqTqSmiRDpZHS2nQ/ZylgjZEJ6THnaGzZOlSEcmr0qbE6osnVMW0eKSCPmnpq81jlAtryLgi04r0lxVlIixW22y1d13F2ds7FhXge7u7v2e53zYdQmcPUPGFy3ZVc2e/h9vYlV0+WdIPI23KcePTwAf3g2e1G1veHNrwS2ZdWilJkIv9PN1bfDMus9WiwbwGUWuG1JZbSppWyJbHONfJWRKmK85Yyy6ZdtUn9OB5Q2jWUdjPV58zQO3KJpCx+rJIyi8USbS15t2ecJi6WS1SVDZxShjnKdRrCzBQ4TZ21UcQYCGOi7xT5EOTnbERH6wemcSLXJA/mFsqrlGxw+r4jhMBy6TDWEUNht50oUbYwun32KQqdT7XBkjTdQa5JMQhgfc+zD76FX36KO4zkMhOpaO3wfqCUTIoB77u2HZUmXRtpboy16CiypXkKVISsi1KoViDW9tqPHiW5yBtuu4B2tuVZvfOaGOfoOk8IUyu847sMqybTPxaWx3tGpJL5VKg7J9KaGDP7/b4VmNLoCewEOWna9WHU8XqS5rC073lsrk5FD9JcLRYLnreohG/CG6qSuiCEIMM0o7BW8/bNW26ub7HWEmPm9cstca703uGsYg6BB48+oFuAMhPOzYw7Rb6OlCowjsXSUJVlPMC4n5j2UIvlGE4q0XRza1QF7S3Tek2YA9vNSIwZawzW9PI8r/KiS1GgI0bZ9nkpum7RJu3HQrsyHkaUqqfzyDg5W2KSAtw41zzqFpp5/9T8UtoQ8x3Z0nnX7uF8gp3I/SjQBOfMCXWtlBKfXwko0wBNIQiMyohErxaY5pmb21uRzRknQ9NJyaZk2aENzGFmPAT6bklOiVQTaBlwSUCsjKeMUWArRUm+YKHQuZ5cj1tLoSQKNIgmhSyUnJq3rHAM7K0lgy4YW/FOUapr76/CKYMtsnUVn5Q8Ca0VsmbXSayBbP1oA0Sap002lKVUKMeGzJDJf24gId/PYLy/wG8DAAAgAElEQVSXc6HK81e2m0ffkmz7tW6eKCWqpuNAQ+vaFBYiffwmHt45T9nt2dzv2O8Sy9WCp4+vCDZQUkFbGRY7J82hsQrjFVePJVi3WE9QDnYz3QOLcRBroewDCyuk2iTaYexg0SaivZz9aUqErWJYLbl4skS5xHa8R5sCyRFnObvpNGqliZsqMS4pk0bNdCcKEG0VxhuMzzgvQ27xsZkmhy/fePYoUqyEUKlF0/WOnFJbWGRyas/qKs8w05QpVSXZwkoLj1KqBU7H5o8VONNf9vUv3VzVWj8FfuOf8fs3wN/4F/lew8Lx4Q+eE8ZK3+8ZVh2rby24+WzL9S80Jd2hlpoyixFct44/pSzyDt7JcFKKVKVRaHTVGBdJRUnGjlKsTM+TheL162u6s8TVI8v9XWS3Lnz48WPe+/aAPcvs04x98T2m9Rum6ZpnHy7Jc+CPf/IluTi0dbz3geWwHYhlIuaR+83If/Dv/Q6f/XRDKDOhrnn58o/4/PZrYekrKLPixSeZt/Mt6EgsM4c58ejhis1GUYpntahUt0WbBS7IZLnoyPr1lqIci2Wm+0jhzx2v3mx5fp7pB8Wi15S5Qy8UYZLC0bsFqwH2/kANIs+aS0W5gs2V/W1guitUtSDrPcYV6mw5vHCMlwd+5YmnmwoP37vkr//Ov83//ZN/xNP3A7vDp6x3X6KqZT4kYorkopmnitVHelNE64ghs+wK0SAH9CyYzMXC4nxHTobDthAOB2zLuShFPs/KniapB625vLzk+uY155cdKRVCKK15MuScgIJ3yFasJjQiybm4eEjIO4q22MHTrQb2+yTbqXkml0TfPSBViOXAolesnvR4q9i+3qCRBwUlERFK4HuPn/HR8+c8ffqUz17/Ke9/6wlXj68wXcdP/vQzwtsVd/czu/EG7QwXVw9QpWOz2XO3vmdYdHRWSwZI28qa/kDvzjl7vMIMia9ffoGKK9IcUVEmNON2w8CCqhVxrMzX4kuY0g3enjd/xMT51ZL8nmFaz4zXMnHp+p7sMvc2sneR/rHm6dUjrl/u2K9nyJYyzmAc4yGxVYK4z/sDh85RjQUrUtx5nNAx0puOi8slrz/7mmlzwFKwvWc4P+PN518yDB2d9cyqQKqovWj5ja4sfI8a4H5/oMyJMle6y4FD3ZMXlbkU7g6ZngGlJ6Y5UovGasfN25lpguW54ezKkAnUcs4cD9RJs906xi+/ZnXmT4bvUhJKWazqqbkSpswhHfj61aekJA+mUgLjvGnTTSk2nHP03cDd3YYtB7pOCsPqazMzK3KKkuMxDK1Z0lBp0qzQJGIa1QiV6STv0ydpoEzVxGjvXIdS8l6JdaeQU8CYFkDcPH+l6chrFenXbj/xs5//gt/6rd/ib/6tv8Xv/be/x3q7Pn0PVG5FvSeERIoZrS2rsx7fySR8PhSchcuzK8Y5ME9bcg3M84GchTilFCwWC3a73clbkJrPpT0NUAqclxBI2rlcMhwarcsZuabnKfDZZ58Joc2K3zIX8bGNe5n4GqsxBjb3kQcPJXC6FiNynRRkwpgt5ExOkc8++QRnHEYbVr7jzevXfOe73+fhw0dorfjf/sE/5Oqip/cdzlqmMMk5o2qDDWSmcIe1lhBkg9n3lvfff4+f//wTjBEJelWwWi0oQAyBV9tXpJR48PACawWm8PjhQ26vb07XVG5wiM3mXgpGKs5otFlIQGsSP9Zuv+GP//Bz3rzdEPYZTcdqMGjVoRioZaLW0LwEBm1Vk915bm7uWC5Xbbuv2O62rM6XOCuqiXkSlP5h3ItcioZcb9RUbSy0onsYhjZll4L1cNgRp/H4zD9tdGOcSA3eYq09gSSOxc5xi3Xcqp2fX/LixQuhYM6ROcwnSVopGecdvfPttaXTxu3YjNdSxB9WwRq5Bo+b33kODMNCBnQIAKOUwjwlrFXc399zewtDv4Lc4UxiWFqW5z05O/7g9/9nNNB3hg+enfHi9RrlFbWrRFXZJsXf/Pf/XbY3a+ZRzpXVmecf/Z//kO1hpqDwncOZJWGsOJ+xTs4W33lSKI2cp/HdinneUauoM5QWqp3RTjLAjCFFxRwrq/OFRNEEidQopeBOEqfKZrehqIqyskGao9gTchUyHZlGkpRhiwAp0ulzNMad8pFCCBxDla0zVBxGW8aD5Mt1Xc/5+Tnr3VY2mKWRcIt4slKuxAhWa0pKQlCu+tSIr9drUhZZX80ZVQwheJy3ONeRu8Ic9+QkjbeERacjYwClFcP5gOsV2+2ekOSsSDXiXd+aaIk8Wa1WqEbNi+3+sk7Rd327PneU5Hj8+Dmr845uqGA3hLjn5ZeBFC1WW3abPTQJGwilb7U6bx5ckfHVing9j8+OvmcYBrbbbYvueCcjPHmB2v23vr9DYRpkpheC6GbbBgkayCeP3YlE276O4fTS3IliI2XxVJa58OTJUyRIes88b7m/3VDT7vQcUtpScmS/DQJ7Kpr1dub6Vebi+Yqr50tROuU1MwW78JjFhLZbfL8CHWT46gx61fHgIrO720lMgdM8f/yMTSxc376ilplaEgqPQoM11AwpBvJ2Jk+RcYxMUwblMCaAzvSd4vzCcHYlS5H9LhBmMFY8ndoYCok5zBzGkaHryUk2j8PKcX97IMyZFldF0u05bBSlJKYx4XotjVqjRBsjkUZGtSWOMuhigPEv7GvMj370o7+08fn/4+vv/Of/2Y/OP3jM/f2BKYF+/JTtTrG9zhxud6TNW7wtxGAQ+zvkEjk76zCmPVOTyFRyORYsgujNSVbgggU2pDnxt//DM773gxVPni/QuuPNm57f+6/uuXu7YLX8Dv/Gb/42f/hn/5g//fFnqDLy4NyRpoBezexnzWZTmEfFD3/1hwz2DMVMLnvGcOAwdZydPcavHpJqx1efvCSqNcYk6qES3yi6OVGUYEddX3n0geOD711wfV3ovOP8rIExcs8H3x3oLzT3h0yqnvU+knLF9JqL556rp6BcoMZEOWT2a0PvRfNrnacbFpAc1y93Ys7Xhag0kHAetHNU60VSVSJWKZms2orWCV8yMRzYbO748Y8/I+eOz1/8lMO8xlnIe8Fwl6qpGlxf+OEPn1PTGcPguLxQPH2c+dd/4zFTyYxjYTqI34sa2e8C4z6Rk2p+l0pVR829k7DAqKE6nPUYnwkhC9WpVIquGGUppZJiIgUJkRwWlppmai7kWBnnWSatCTCG4bIjp8zdqw05zVhXGVZOcnBSIYwj97drxvuC6y0zgaIj1ikyFVUS3/3OD/j+936NbnnJk6cf89nPb9neW1R9wGef3vCzT37CerNluz0Q4sibN1+htBVZUiqoolB1pibR7S9XA2Ec0auB7DTTXAl3BVctRp/T+TN612NyQC0saSwkPLthyUunKeVA1w2kYri72TO+vuNul+lWnofPehZuJrvMdrenzoVBDSwuL3jv2UdCWduNsK9cXF5IKO4+UfcyzGBwdIOmsxWvKl7J5Np2gaL3bA97YEbHQsqZfZi4W9/jqHTO4Jcd/nJFqoWV6xuJR8JbnbH43lMq7ObA6ylwlwyvtoX9aLB1IZNGZNNgrEVpoY6tznqefah5/i3FsDrj1Rfw0bfe5+GjS2qNGJ1xXgmevvmNla6EkMllQlsJTLy46HBdwPcV33nC5DEuYbSFqjkcRsZx5OOPv8OzZ09YLpcYY9hs1sQkoIZcMspqkf8Y3SaVLe1etfexVQXHguWYgXKcWh7Rwke07zB4fGcls8aKX+QojxQaXWsq2sYiA88/+ID/9Hd/l9/41/5Vfvqzn/H3/4e/z7DoG+AHSm6ymGCJQUHW9AsNujCOiumQoBaev/ecr756xc3tLdN0wHlNypEwhxMEIcbYMMff3CDkU9Fw/NlBtjuN70CtoLVtklWZyI7TSC0i4+g6xdn5gPdL1ndTyzQUCU2ae1IM1FzxthcPVc5CD8XRdQNGZ46qDmqlpsjZ4CEXVM44I57MzXaP9RbrDcZW7m5mvB9Ai6y7W0YqiTAbOr/i8uqC25trnD1O7mVTY20z3oLEZBhBsU/TiPeex48ecHd3LckBDbd9lE8tViu88xitQVdC2OKUQVcI48iLL9/y3mNNngspFi4felIamaaR84ueD7/1lIuLS25utrhOgyocDmPbiogCYLFc8P0ffJcPPnjG0dRdamG33dF3Pcvl8iRr6vpB8rdKkaFZzpxfnLc/T8zzTN/3qCbVlrdYCsWzs2WTPMbTtB7eba6cdyffSYyR6+u3jXqnG7kvt9d8RPYftyr51LjLJkuK1nqUq6VyUq4cf+Wc+eSTT5o8tcl+mgRPKcUw9JydrxgGCWMHTYqZaT9BAXcGpk9UnZjnSlEdm/0dc9oTwsSLX97xZ//XPbe3G968ueazX/6SP/nTn6C7kdQIbMYJQn2e1gL8iEo2m1PCtA2Rcx1gxXpQE95rnjx5xK/+tffx/oxStcCP0iyb9GopWaNVx6NHj7m6ukIrTUyJGAPeO2IUaqRzBu+PEtLjhqbQdY5jqGypFd916NaA1uZ3PA5Jjp61o7cKpZoU0XF+dkGMiXGam5JB/GSu0xREoqmLpetck9CJ7NJaR4q5ZRCJtzTFzINHZ3SDgFhyMoyHmdTk7UpBya4pWRTKyK9UJx48ueT8csmw8OSUoRpKPuZ0yenYCT4YhTT+fddTqTg1YKqHWkg5Uojc3Nxz/XZHKSu+/4O/xpwq0zizXR/IuRBCFEKss8RGWVytzkgpM00TRls++ugjjjEVIQg045uRFf+0hPo4ZPPek1JkDhOHw57dbkcI6SRDPObl/fk8LAGjKCXkVets2+K+k0MC7A97iShREox7bNS898QQWd+tacs2DvsDu/2eTGRhL9i83bLfzcSQ8boSpxnGiNXQnXfc7DPPfjXhV1IjHQ4jnT+DCmdXF1w8fMQ+VKa0JuXAxcUDvvPxr9F3Z2jANDx6URXl4OHjJxjdM0+S8YiKiMrWodQCjeTSKuXRWmi61jhpKKN4LBfDgAJ8Z+kHR9dbDuPI1eUly4WcefMsBEGtwXmD7zy1uvbeJbFFlESYE8Ln0s3vWZm3+eWPfvSj/+Kf1df8f5Fz9S/8lUrFdAN1nllfj7C/ZVrvmLd7UgyYfoHSB7QvpFmCybT21KKhcceMURIcpjVGizEztgtX10xFkTDsQ+J/+Qf3LB9c8ngpxJTpkOgWlu1u4rPPX+F/v+c7z38NfvVnqGxx2pPmHbev3pD0IFKekHj58jWKAWs6+s5zyBNfv/w5dy/fsN9UNvcj0+6aqyeadHCkCUJQaDJKTXS9xnqHzgvywXGx8nhXSaUyLJ9yd3PPfjL0l/DkQ8frL2fsKMTAeiigZhh65miwE7gZemtJVbZFxMo4F7bXVczbyEPG1QLYk5YbJA/FFUs9mp9XAz/84a9z+OqfkAfNISle3l5zv7thKhvxySXQ1pBromiF7TTD0rHfBu7vJs4ees4ePuDxk44/+ckbbraFaeIbD8ujdCFCla1kVU4KU90KMiO4YmNEijLPEoJXEUkCuTClUQ4XVdEWcg2Mk0FrMfEe0ac5V2oqTJuR+TBSq7zmUixFa6xfwLgh10qNGqKiEtluZcqulCIZzaMHj7jb3PLV268JpfD42XM++/mXKBPgUPnq5pdsb254dmW46GYGG9CLK/z5GSoqlouO5eA4hEoNCm+kLJ6nkaQ9vnekEHHacvXec4bOcf96w/ZwIObEarkgxYzrO0YqX95P9MvK7m5gczOjqhyej77/HV5/fcd8k9nswJol07Yyb2bMQsGZ5m47Uec7wpwwzqEWQt2KWfKklJHNi6tSwCZViVUxaIXtKykqYrCUZMgWrGk1ZlGcdQM2RVSFNM6kOYqcqsgkU5sKuhDSjPU9zoJzch4cpoi2VrTvapatkpVMtoomhgKl4/zBORfnjqul4+LDR/zkD/+Er796gTGaMAu0IB1m0VSLvganORWeUvtlrp6O3F0Xpo0izGLEVjjQGWzLT0uZvh8YhhXOJ6b5LbEUjG4NQ84oY5rHDKiSz6ON5HccixRtDCghQxkrU81KYL+fGpZcPCESwCyY+mOhI8rCo1RBQ7VkotzbLW9nvdnyd//u3yOlwheff9G2uoFctMAIqmS6xTlwdXnBcrlgt9tw2O3atkGojEVpMAlLpRQl3iavkewkmixCjM+1hSl+EyQgf0cJUCa3KdjxZ6jHCTcn+VjKGetmtPXUajnsC0OPBI5qi2kSVaUDuVZiBlsUvgNLRSnZ+OWY0TTvLRJAmkjEbOhUIas9iRnvHX1voUZy0tiuUbtyQGkpXmKCnCzaFJQOxBA4TInvffcD7m/XrNcb8Q0kx+psyRwieR8wyrXMF8s8B776+jUhFIwz4gNo8tBSKnEMWNMe7qZAtXCksylLzhPD6kNi3KD0CMVx+fCSu/sNh5R5dbNmPBzwnaGWSC7SWBptUEVTY2U6THz98iXTPJHmBEkobjFWjCm447PSmNN2UT4i2WwctltKlkbGagmG0U0OqkDIbVU+g84blJJNlJy97Tmjjr4SkfFCO3tLPdG5lKJlD4J3LT9JJXJKJ4BGrRJzQJOLeovktmnT/DSpqR9EYiVZNtLIphRJpeDsglo981yZolAtnfGt8JctdM4CUqFqOucwRjFPM/u7hOsN58sOo/fsNxs6D97Cm5uRed8xDJrOQ+8KgURMCoqoUFIuxJh4+uwBzlvmKUgunFVkJZj4zlvevt7z5Mn7KGC3u2UcNzhXME5keSlrbt4eePKeYRr3bNb37Pd7rNPkPEkTgqYWeyqotRafXqlRMoeqNKXOLkgcKFmky0cV5XFIcnz+AS3QWZNSYb1ek0uQSBetqcri3YDSGWsqqm2vSrJYk5HolEoMLWoEeW6jlMBc5opKszTNUQZMSlkZJKmC9VCTY5oPQGkNguHm1YFuIVQ8lCIEsL5STaEdMsyzNK7KgPis5AyLaYPBY/UCpWbmMRNLoirYHwLr9Y73n3/I1dkDrl+95uWrt6TYtRpGMioXywtyEu/oYnnGo6snXF9fk3KQMykXbm9vT9LsY46hQp8AbFrBMBjmKdH1jpQsMcj29rd/899kmna8ef2Wzz97ifMWrR3GeMneLBs5W9umKpejRPAIYFItuyrRRP1y7lbxgM15pF8a3nt4zrSHnAzaHBAEvSKnDZ3TdHjsNGD8gssHCusUJgfKtKdYDbFH1ciwiAznHhUCpjNopRmnxPpuQ7cwnD209AsY4x0xil1k0hA78Z5XHCEqYi6URtnW2uIbgGK/mQijJqUMSnLpvPOkPNF1RuTyqRJa4yjHm8Z4j19Yrh49IMWJcUxo3zONk5AiFdSSKFGGpNBCk42T57UTWXLKUjf8ZV9/JZqrQmW/D+zuJza3B8xiJI2BME3UUjDdAOWAtZDj0ZMgxbIxFiMbRaGWNYOoUopSDeiKqYqsLKnKw/LHf5T4zb+eePBMdMxhrnLDucL2sONnP33Bbz//FR4/eMJ2MzGP4IfHxP0dtpcJT1KB2zdbkh2R6EJLZmaT3jLGNbv7xG4tG4rzWVaMNcj0LLcCyWhBh897GO8zHkeJlXFf0NZirGa/jeArdiGyxn6QoiPMCZ0yhEw0UCPorPC9ZyY1Uo5gpFOyYqRta2yt5aaqVbUpUDkZfUsV6pUx8J2PnvPJ7S8I84TOEOaJGiN+oZnRxJyaZEETlUxsFYr9LpBLImXDOFfu1pX7jaMUI5MAlVpx1QIAlfiqtHLiq8uSO6C0TBlzeZeknUohxdoIirUF73Ga5MiPKFQikR60nCIU0xzbwxlKlELCWAvFUNGSSJ5k2tZqP7SppPzOE5ORxlpVzf12zRQCoRa+fPMVl1eOoiJzGDFZMXjLA7tjsI7JPsZqJ1RHq6laAROGHhl6J3KMeNtTYoacsZ1n0S0Zx7FhjmObBnpiyKzOHP3SoG0hxomaKjlMaFXpOo/SBoslj5FpzAyLnou+59mDC7RLJDUzziM36xvmQ5AfWlcOu4D1A9aJNy3OhRigZqH5RRTDUHDeMc9KvCRVgWrNSPtstDYoR5tkZ+Ywi36aiutFa5+UIgQhl4i0xyDPXMlDAwglkSr4eoRDlKYht9QM+03BkOiGhLWF8SB41Fo0GtuM3EjwopYgY6NloteetFxePmJ3Lw+TnArDojJPlVrfadqtU+x2Iv1Qbep7LE4kl0QGOrZ5ak5+EadpNYUUelq26aU1e0eP+dE7SpUA4lrFDyVegHLyIB7/f8YIXIGim2ldDOjTtOcP/+gfk1LmcNhj3LEgKhKxqZRIz9TR4A4ojWvQDvFN2JYvVVGIWVgrjbMOVdIJbXA0jdcCx/wt1WhT8v4CrdA9Ftny80qeS0X+THwNlrOzJcZImO04RryXAsg6h3cOpUzLx2pbP6MwTjVjfUUVGaQdYRliWVOopEk5E5Nk8pkp4f2K1WoBKjWK3dHPkCAXci1U5aAatJEMqhBmcq6kFqxciuT7fe8738f1lbdvr9mub7DuXbNbcmIMkWNjIZIihdGVakQKWlR7v1CtuZa/bowRfPXqot0bis0m0A29eHOnwPowM447Hl5eNhlfQdtmjszSjMQ5M6eJ/XjAIoRTXRVDP1AphJib79PS975BQQpaSSGWUzhFBei2CVJGsehFljZNU2ueZCPnqgTtCjwGjubvUgS+cdxAfTPc9uhDOf77u+Da0v67o99QvEqliA/xqFSRbcA7aEqt9bQtONYMFdmcOK+wTiS3cwuztk4Q80pXnLUoVShVzO5CcxXQRA4GowzDogNdKLHKcLE3PHn8jPX9BLFSVWFu/pvj8ySrSgkFYx3D0KNNZRwDkr1lml8kst/vmCfFYrUgskX5GdMVlAHjK/U0SVeMB8M47Znng2wMixYfixLaorGgTRF1iNyBMnRsm0Xx6n0DQnM8vepRTvnu/XyHs5YN9TTNLTupUdZqpfOeksSLZLVFU5nHhLHHa1yjaZI4XcT3pRWmAUxS29CULFJArQSGpZDA15N/qSrxzarKfjsSg0Wo7NLQkQWp7QbJnsqpUBTodi2qUiEpUhsOez/gvGUKhYps2VKcubm9YbksWBSXl+cUFK9f7cllPAFqvLNUXTDOgdKEMLHfb+StVpqKYp5HLq9WLaYDIhpv/Z9TLdQimy5nvADJnOby8iEPHzwhhIF5mql8hXXgncOaHpRmSmNTJKQ/d70f5YfflB3KPSTXxtG/eL7oubjqWSw912kiG49SxwE4lJpw1qCyIo+FurQsVgMoRU0GVTPDZSLPBus7vJfB9eHmQC4QQ6HUCDWTR8usWhHW7ekXitv7A7FUGQCmRJ0yKonE2DkjxGAMqrbMu5hbxJG0qsYIiKKU5mvVCqwo2nLOOC/3VpgTRjlCiOQcKCWjtGF5tmQaCzkK+RGVMKbDt4FbjO/Ik1o7OdcRBdhf9PVXorlSVF5/dcv25sB8CJwbg/I9HKQoxnUwy/S3WE3NcmPHKPpkYyCXBEluqKrFCN5pR8iSg6CKIlXBgH/1C0fY15bC3BGjJuVC11uUqdze3vP29Yy152ze7Li9OfDehz+gTD3nQ4/rPEFlXt3e8yp+jTEdg7ec9UtwE11fWD22mJXj9jpySBVHbI9WTagKobJ4StCMhxFnKtZH5lRItVCK5vLJgnm/I9eIPwcTe5YLCAmmoIhrKPsZzmvTWDsWq54UC7Fm0Anj4Ox8YHs9CuWqPd9rkge6/sZDPSlFaqviVAKrRSHZBfNBSC9nnePs/AxtDHebNQfWDN6xPPNMsRBSYR4jOlX6hSOExMuv7vnys8R3P/gOSwfb3Y7NZi2fuzZoI0UhSuROMUaI0uxZ25/8IccDXz53cK5IGF6WJuqd2VoKTaEJpqbxt+QCh4PQAJ13DIOnAiG/W6/vNlsJz1QacmvyjDvpbJVS1Fx48+YN3liSqoSUSC++IA+FTYxYHTGmgL9g3t1z3iXOtOHrGiijyJFCNYQCUWfOlgtIFuYZWyqLrud2s8X1RhqJMfDlL15SVZBAZqeI8544azpfefKkZ3m24Md//BbKFm0mrJEC/v7L16jUoYq8bjUqPv7+U37lN95jvb/ln/zkp7gxcHO/hwy6irE+hojvvBQzVqGzJswFimtT44q7qAyrnulwQJco75nqKDpRtXjAxpLpe/Fc5EloTK7JiJzrsN7J1CofxINSNFp7+Ts14q2ENKakyFWhq0HHihwAGTXIFm57G/hCTby+v2fwRjJHciFTWrihFAmSjSIZZbVI9pixFmrP0v8Aq16gucHYEedhv5tRDM2YPKO7yqtXLzg7W3N2fkaMe7mnlcJZh9OesN1iekultINbyUQ6RpTOGCfNk9Ia1WQr0zRhndDxjoGvzjmUVq2AlwNfqHwFa32TCRpyCehqKTVRlRiSIbPb38g0r29bpAxFF6pGpKnFYrxhN46MYcQ6w9WjB9zdrSlVGtP17v6EjYdK5zq8kYJSfENQq8ZohdVKkMjNB6YQeUdFmoecYwvSPQbAmvZ93227uq7n8eOnjOPIbrdjDrFRIAuuGYqXyyXr9UYGB84LuVFXtC4olVG6ts2DgHG0MYDDJpjjnmmylAoxOa7Oe5bLzDxP5Dwzx70U8lmTkzRi2vQY34Z1JRPCiNHw4os3UnwXTYoLfvM3/wav3nzKbjuS8yuc18jTXYzPzioJskTDMS/IukYmFH9KzgJ1kv66SdiMIe0qQ79guezJFb744hd4P2OSeC7DWDCmUkLFdlakyypSswQJUxVkkXJ1reCtWYKBL68u2G0OTNMsQJhF1+IUDqAyWtW2MZIC/J2fTgqYy6sLSsm8erWntiFBbTJVOT90Iw3KduIoLTsWgEcJ6bHoO8oFxa5YW9He6oR23RzP4pJFtmhak52ibIyPiOpa5T4KMRGTXJtaO/rBY12iX8jAxJieabStjhCz+tWDM2oXmKdEmApxjsQ0yyfwrZAAACAASURBVJZO9TgWlKCpNWFZUV0FW/lXfvhr/OzHnzAdRqZNYD9mFiuII1AruV2nl5eXaG2J4SDPJeupSN2SUuL1m5cotWT8/AY3ZOyqsug84VDAFko8EPKEI7PZwBRGii50i54QUwM3NU/SCko2ciYWgTs4pQlZpLlGK2I6yLWqDKh3jZQ0V3JN5pSgydZyTe26LXTDinGeyDGSU8QZaVg6v8A5T+8rh8MrGfRXg9EiU7S+A5XkLDQapSeU0mw3Iv9VVciBWndy37TXRZklYqFCbkH0JEghoLUAGIqOpOAYVprlQl7/9iCZlxUwFUypqNlQnJMtmol0vWMue3SjUVJmrq/f8rNPv+Rydcl3nr/PRx99i7fXP4Eqz8aYZYPjBmmipiny4utPsE6JzFl5kZeryqMnK6ZpZL+HOqoGdIlYI4OY/SYL6qQ1gN47Pvrg27x5eY91Qk323uB8ZbGQJr9US2HBnCqKUYZmyL0ooeLvGisJpLfvoG9KCKFXl+9z+WBBrQHqhLOWWixUeZ9L7Si1EKdIyRV3brHKMo1KgrXpqX1gTpnl8oJ+WGKUZ7ZfEQ4HQhvMXFz23L/I3L+Z6BeJp+87PvhWx+cvtpTYQ5K6sU6ZNGa6wbHsO+K0FwJmyW1IKYsAbd+pOUIMaBwxyjBBaxpkZKTrLdokdmsZYrx59fq0Sa/Z8vS9K3Jek7NQqrXNOA+rs4GUMrvdBus6YTf0Dt8N1JrYc/gL+5q/Es1VGTNLqynnmRi36N2K7bilsyL720yBp7rjUCXh3ntBKFrjEcVXkQKd0kgymVqOOk0Y41a2HMI74Mn7hYvzQu8SU4JpKgzdQyHZqUypE//r7//vPH7cEfOWUvZs12uWT1b8+u/8gGl0/NkfXVOuX/LUXWF7y8Pzc7735D2+HH/G9f4l21AZY0cxCosFA0oVHBpTNLv/h7k3abIsy67zvtPee1/jz5voI/usvgoFCCQowCSANIImDUgTZxpoIjP9A8000G+RzGSaSCOZkTJSECGgABUNqj6rClVZyD4jwiPCw5vX3+Z0Guz7PHMCjCtGmWmZGR7u795z9tprfavdU1zGaIVWgRKmXF1t2fcRtMWrM9QJ3HkYsFPLUDSrsiP0mWbqqaxhiD0Kx3YINEeW+4+m/P4fPOTf/ZsnqNLhnRAI18sbQl+jjAKT6YMUGRsyJsvlMKfCdggklZlNaxYnM371/k/47OOXvP21b/O73/gmrz18javrF/z5937Kvt9SqMilJeeGoRvo+yhKX8gScHcZQ0ErSxccm+WrEbNsmU8nXF9fk4tQfKzxt2HMGONonegAePD4DtYZ2rbj+flKtF/tQTGGlUGbLzCrhw1BGrtYtFaEvhsD8SNVqcC+3aO0dIiQlVymBOeEUULskZlOC1GjIDQZV7HLAbTFaYNKO+Kw48G9R5ydnuFqzQ8/fUEqijC/R5wfsenXbLcJN4XQ7tj1PX2foNnh1VzKl4ujzS21qym5sN/t2G2uOT62LLc9fUgkDEeLKbpKbPc7hicJ7wJ1M+FSvSAnxRA127ZnoizBrlHe0Cxq7r3lmb9xlw/OOy6vr7lcXtOvYH7HyPc0G0yacHZ/xtPPnrLZrzFGAt0P7laE3LHbRYZtYaHfZW4n3JQ1fc5UrrC6OGc+m1EQRbHPEecb9ts9xETlPSYD1rAPA3EYSNlSUBhrMGhcUrgcUSTSsCWjsMpQOYM1mhzFEla5mpgy//gP3uLi1Zpff/SM80/WVHbcaGClQWbcTB1a733lcEWPh47D2QZVPD/54SfkspNuNAqvLrZUNcznraCiV1rQ2HMNamC9vmQYYNpUlGwoURFUQCNKakqROFpxUhx3FqNFIydRDa0RiEkphbbt8FZEgiEl2SyNFxtjRKmX/0uQQHbIaO1pmopoA0OvGQbo9pHjkzmM/VE5FSZ1zW63pkoOVeRzHuOWajJeQmOhD5HTk4pJM6NrO9ptN/5eDmcrtJGhZd93og6PKjOpUDsrmzEUXsm2sRQhqBYgxCBEsXJQVNXo3pEthDFfhMNXq7UMVr0Uml5eXnHo2BmC9CRloA8DcRvYtZr5tGGxmOOd2PaGIWKsGMZzERFGm0JlRNntBxhCYr97Ste1nJzMmE4riio4m6h9Q69aQupIeUdJGlUMZI0qipP70LctWmcqLPOjBfU88oN/9wMuX71AmyjQHgQPr8ccgXOOGMSmrJSIPMZZYkK6WYrQTp2xqJJIUcAcvlL88ue/xjSWUEBPT1k8OGKzXFFCT2ohB8XV1ZLFosFW8jxjGTcAQBHCZegCptLjsAOTiWXSLLi6WtH3AzEmXr58iQxSCqUsWlX0/f4QKbv9OWpT6Lo93lvO7hyzXC5HUIIM/jEepiJQiMNBslnqdjspYtmAUhLgl2qBCBwAG7K9qKrmlsIGmZwlg6IQQEgcz4G6qQWqEeP4346U3iiOBWMU3VBozITtOpKivIeVimAGEQ2ixu7gzuIuR3PZUG+WW0K/xBlLXRuqKmKsYrft2aUlNmropnz0yTOGnBhyB37gZGHJpeWosnQ7TeigriyLxYLtdsMwtGg1Pj8lihUU+f7Ekhh6SywaEwRv7X1Fu++lmsFC5QrtsAGlsXWF0gO1t6ToKCWhbWZx1vD88xZtHU4XcsxQKqzZj6KEZKwoDqVEyLnNVyGuA6Xk3Wm1oeSEImJdQWMJYRCniZGBzjlDbWqUsng3YT6fcXPzAq2sCL/DgDGZkApHJ5W8I4YIpcbXSQQHYwWupLak3EvNC4b9LlHVnhwymoI3CpSntSPF0oDRkYpMNdOkUFjvIt4U/GiJNEYcDAOZ7/7BI2b35qRSuHmx5MNfPRsv7iMdswtoXTGtG7abjh//+AOqpsP6ismkhuLpdeH111/nybOnLFdr+r5nOq2AQsqKHAvDEAiDYnUzcHJ6TF0lnr98xc31Eq0qHj9+nfnRnF/dvCegKC3Z0anxPH3yCc+ev8BVIyTGNsRBxFyjJFs/mddsNhfSEjOKxV3X3VoCGc8fYHxu8rgpFsrls8/POX+aKCoSB413Uj0hok9iIDPxBqM8Oiv2y8yrz5eYKqFcR3F7bG3YX8Dq5gZFTVNPqNSKOyfHbHeOtrU8vj/n3bdbPvnoJV1n8dU9FpOvMOxfEfoBy4Auga9887s8f/mS/W7HfhgkdpBFDFByfJBTwlf+9vOahoLxihh68iCW2EnTUFWFrs0YM+C9Gl1NRc5eQFWF69UF9aSiWEPaZkLnxR2mpXvMuwld1wnxOQ4UkmyH/4FfvxXDlfaaR6/dp9/NWVcVyg+YfUd7HQibAszo81J85KqIiHHAq6coCqUCaxWxZwy0ApQRQzojxwAp4KziJz/a8PzzOQ9fs1gDu71COyAUrHEczaZMJhlfb5g1Na6ZM62mvP+LS37003P2m8Lzz1dUSnN6dMpquaO6+5j/4l/9d/zgZz/mr3/4P+PyClUVQqsJJVOPgblMZrNLGJR0EZDIJXFydoe33vouF1dPef7ic55/doXRFbPTRL0woDy22XB2bNjuYDcY8AuU3XM8t1it2O8HLq4veOv1M9rVwM3llpcfBSrbUE2LtFSngtcGB7e2GaU0Kilq70lpoIqJat+z3tb8zoMFXX/Fq4tzHr71n/DgjTvk+DeY3DGtLDlO6NqBkhxaWUm2+UjMO+iFvuas5aNP3sfawwCkKOOmKUaxnmgTxbZXRqR1zlhn6NrIbrMZfeSZSV1TCFjnxOaFJcYB677oUkkpj3aQg70EZjPPZrPHOyd5iKjxZiot9Qj5rLKGlMZVuoDcR9z22KFQMrEUrFJM65qsxILQ58jMLyhqYB9bTKg5mx3T1Ik7X/+nNNWM9N577FevOK4fUDuLKoEyFI78jD4lQpJNQLfdcvfkIbF0xCjQgatlz+vfvkdOkf22Y73teeOtBcd3NNtdz8X5S2KqMGZx62l3yEv96LgGrfBOM3GGX/3i+yxvxApaTypmc4v2Ca2dqIyNY7Vecdct6NeJbj3QrXbsSoWdaI6OFGd34OLVJZ9dBEwN0+kMFQyLqWfQEUJGB8lH3VwuZXhGbGV+UhNVEvIiBUPBNiOOPol9i16jsqjtRjsqP2WmDNtuDV6jMJTBo+KOi6UBO+HhgyNeXidUsqMNJ4nVsgxoJ4hsoyUDJeQ3JRuusEPplt2mo57YMfxdcXQ05f5j6brabXusi2hVCbFrGAgxUvkjKBI8TimOAxSokqST52AlDWkkgOnR8pcp6vDvAgpqP6PrBubzOd45ur4lhCCUwfLFQajVodQ1o3VAj15w7x1Hi4azs7s8f/6CmAbIAa1gMT+j27f0Q0BrRT2tOao07X4/XqAVJRSefPr5CL3QFKWwTsAcGfl6VZEy4KKNvDfIpNyTlJT+aiPQDhmMNIzET41hSCPCWQx7QKGkRFb5dhORUubyUjquvvzrYPvSI2pcOseGsZS9IRfP8/NLfAXea4wVkJGokIlMwfmKmCR/hRJSWgwKimUYMtYWvD2mcE4/7DE2szjxWG9oO9ivBcwxmWkqZ1lf7ckkZvOaR2+c8bO//X+E1tmAVV7qKOKIAk7S1SJuBcZhUtH3HTYLhltb2QqXouh7uTQpJVY0ax3LmzV67VBYUgSGmroeCNOEc55/8o/+S9abF3z26Qe07Y66bmi7dsxhHQZaIV9Rxh44ldnttpJLMoI+LxmsEXtQ3w90nViKF6cL+j4QhzBapwVdfXNzI24A55lN5/RdeysIpNSPm25GR6igvZVSMiiP3xcZuBjP7C9w8GIRlDJriKNgdgjoC47fHM6TUkhpADKTqQTSDx1EMRXwDqXFxbBar+T3Q0TalKXLsKnmYAq5DPhGs98MbGImDJFuHwBHXdVQoG0FPqGUxuSGuMlc3XSk9BFoaGpH7Q1mkM9BNfWELhNV5vjkiN1+S9/txwyMPA9yFlagNUPq8E5RooWowRRck8lEhla63bSyOAaayRF9TGQCvg4o3+KoCH1i6COvXkUyigcPJlQVdPs9zz/b4rQhJzkfnTMjmCrhrMErR9t2TKcNINvnkqNkKLMgsKVcXeA3KQu6uvYNr7/+Fk+enbN6tSSGV1hnmMzcbW0GSoAQ09qybzc4r/CNRttI7BmtaAVVInU9IQzpVijKZaDbDGhbS+RayybfpogxBT1a0rsAdRjEuRErigp0+57Tkxl+4lGNw54e8/hPH7C82bB/tYJmTQmKvgNlHMZW1N5QQkcJGW8K1Zlsz6YTR90YnJmgj474xS9/w3a7QVkpPh6GTEnjQIBc0CeTmv0ucXMj72dlMrPmmK5veXb+CeUZxDjwx3/yn7PdL7l75y6PH77B//Vnfyakxi9BXeTOEgk5QL+n6yUiIxbuw9bLf2kDKSTBGCMHWqcIE/HWYlmSGpcVhRAHidcg76tJY+m3HYmCN8DGcnPTU08crm5wzZTqbGAxVaRpDzpSOcPqhSOyJ1IoDdyUNawHZmeKui/sti/49//nOfXEiBuoLXS9Z69eUc16hhTohkEI4BqUi1IxZOTPNAxB+jFNRSnQdh2+0hjjMNrhvRcac5Sz19lM37VMpg2qKDJFoDO9pURH7Sx+UbjJLXEIbHc7sc5KfwUpChyjqmqUKcDm751rfiuGq5wyl1c36CwWnlQZ4roQ+270QCo6U1GbATV2bxhtpOtKMYbexcOb9HhbGdVRtPQnpSTXZesVq6Xn/PnA11aWxXHFsDMsb1aYppJivBAhVxi9oJiWbddx/sGGuNvz8pPI0CnSJtDblrW6wDPFMPDRi5/x64/fR6kpTdNRVMLrE8y+hbyTNabWYttSnkOw23vLi5cv0eqI+WyGevSA9//2c65fFe5eVeRsSKrw5tszMJEQCsEa6rMZbaeovSgMXVv4xU9a7N5w59hx1NQs88Dlk8J0IlSuHEWp0k5gHnkcQlGiaNlUWDQVrx3fYfH4dW5+8XPaoWWwN3zwmw/5+je+wr37b6P9nG7Y8fLJZ/haWqylfd6SGOhbsWuQFX0Qm1nJUXokULc4Umv1aIUIlJJH1VOhiyid9US2DP0gari1Wg4WbdDKoFQem+C/aIbXWiFUSTnMUxJ7izFCeAl9IOVCNWmY1pORbBXRBbSzY5ZLVFURX8t4qEim5I0332S2mPPy8orLm2uKlSDtdt/S5YzzEhBv6mP87AhrG6yxTCdTuiGRCIQsXUc59eTkSDGTcmDaeGbVMdtwSTOBB6enfPjrl5zdn2NUx2YZaZ90qOzw+gjHjpIkbzWtZvTDjpwD1kgvlLMesqL0cP2sJxeHTYFYMv0mk9OAUhHjElWjMK6i4Ml6wM0Mvp5QTwyTyrPtArHPpKBQXixrJjPmOjMpSY7IHMLNRZS7ogoJRSCz7VqU1WQtXn9nPYZMP/SoknBovNWUmOWyp+RA3cdCriKyfM0EUzC9J6SMMxpvKlQPyYrVzDpLPWkIMTEI4kc+C7kV9PjYQyUC/oD1GYoaA+zhthNlv0usllE+D6knpYqUZNNirHze7sxP5fsdI9c3l2PuQ439GaDsgQh46OCLt50z8iV8kRuMSQ6PIQ6SJXWKnGTwELuUfN1CoIqyIciFb3ztq7z15tvEWPjkw6cYJ0NNKYF2v8YZOD6ZMZ1MmU7nKKP48IOPqJuKuqnZ7rbsdh3GOBJis9YxYt0Y/illHBBA42QzogvFW9n2qZGapxXaWhROqJ4xyHsmgxs3F3l8zuu6wlhLTImu76UHiCQgBiXZuoNIUrJ02x16jpQW8mLKka4bS6qzgI5m04bVcjvaXjSqjJfyUlNyj8BExLs/DAU3CKbXqIi1nhiKpJK0BMCnc42zCMBHSxF4yg6tPbk4NrsVA3uM66mVx5SGHDuUitKHoqQINaUA6jCEjkWoWQqYldYoZW83eIfvp7GalGUora3DW0cqie16w+m9Kffv32UynVPPLPXxPdqy4eriFd1yh3dWMj9I6WZOATMS+OQDB/tdQOmBYRhGmEHCWCeD1GittL4whF4KhtMXuRsZisvtAHSbg/oSyUwdemOKZFdlUxXGkvcvxC/J4B5yIPk2h3fI/EjWQd1+/daKrVQqFsReG0Lm9PQI5w0hDNKJpvRtoXghgdJU1UQQ7XEQYqgWYEvOBV8VXK1oprDfduNnPqN1wqg0llrLsxpSlKGEgjJKNrWhJaMZUkBnQ+U8GsO+k9+7aTyoxDB0t3ZbrfX4/ZN36sGOl2Kgrt2tA6ePA8Yp3OiSO3RLZaCqDQlNFxKTuqBUxHqLomLoEtYWNuuBbckMXSBFhXaSp5TTG1Jy4xB8GLgOWHuhWyoFuQxoc1CFkKLvJD/zSTNhfnRCCJJLaSY1MVrIiTBIpk8rg7NmFLcCZKk9sFXBOtitZbgquYCKhGCgHK6pI/3OSEYuk8T2Shj7NBlFGk1dz0lEhgIhB9SQ0KZi32a60FO2A2kDP/rzlsXDOdPaM5tbrDZYo0ilEEOmC1mcRViqpmK68LLlzA1dW9jHgMprQtzL+6tACuNnWfRDisqUEToUYxzPHIELdf1evq8pY5zn7be+xX/1r/81T55+LhtuFDfL1Uhp5BZZj5Ii7lySXHOUojCKXopb8uCXqzEORM/DXwuGX5FywI6WZcVIY0Q6CUspI4DNoJQf7ZMFRaC2lhwH4h7oLfuQmdVOsH+6EFTHZhmYn1a4icLUcm8r2bFbDcQ+kgDvEov5nBgcHYHcRioPR49qmmnG+EC3L5Qk7y2JgxQ0BqvdeKaIOKONQGjqxlDVjiG2hBhomgmUwmbdMgygTZAOQC1CrCqeHDUGg7eGkxPF5eX1mAU8DFdaBPVxc54PnYB/z6/fjuGKyKZ9iVfgUSTlSZ0mDmUs/jQkXaMQJTaPCfAwjCWsWiANhw+WzAplvISU2+LCXEBbRdsaPvl04DsvPPcWU2Kn6drMZGJRVlDZu32Qi6aNtKFjdd0xn2T6XmhN86khGcBsmfsZ5B2/+PAHPL38nNpZrPUUOky0dBuF2CzEalE7S1byclAUrLEsVzdc2eec3V0wmUyoGs1uG1ldTYm9xtjIwz/0bIbCZh2JncLg6YcBW8Qy0Ce4eGJhteHu7zuOT6bsd5aXn/Q0FRzyDvJg3Vr/5YV6uOQVAW3UVcVifsLTPtKmTClXfPL+z/GVxvkJ0+mRBBRzBuXQGuqmYXEy59X1K3LajZADRQgZX1vSIBaVfCi2K+CNHRXJQ7i/YKxGj4pLVQsZTnouwDXicz78WYyRDgLFaCEZ/2yKgwWpjKttJS/kNK66S6FEj68qUdDL4QGCA2RDLgqM1LEvKD+z+YyzhaiPy40mJNAqSh9DAOcKtU7gTmTyt4WiBHGsjaUPiiEpCaj7jNoVSkygEtNmTlNN6dINkwYevbng+mZLXXvqBnwduVm1QCHHCm81J8eRzSZR2QpVOlIec19Kjxd3RY6a9XXHpKklg1gCMXUM+x5lEjYjdqYu40yN9YmiNdoanLcsThz9i0S7HshtwU0qXNGoJDAD9Ph8MW6OlUKnjFHyswG5VHchYJUTcpaRTIm6HdDk+TXWkJIaBwfNECJJW4wueAVGZZIdmEyPKCWQY6QG7p/cZWt7NusN2mimsyltOzCELC/CknEmU1WaIRQOxYyUhPN6PAzF0jcMmr6TwuIYEsYoUgqkZGXAMAZrpQD4/v37nJ7cQVH4/374N+OWUw5QNW6oxns+Rh/a4DPGmnGgUEI9QgbsPkZiChjvRF02AicY+jjaY+QdJ5sZuRhOZwuOj+/w8uWlZGu09HqFFNhv90wmUx6enXIyP8JXM4oyXEwusZXB145u2ONqQbXrAg6olZfL7kEMLYIzjnk8yJSQAI1WY5BYLmC1d5TsCKMFLpNHIJjgysv4LvS+pq4rur6nH1rsSImTeiV9axksBfo+iJ1lhERoxoM2J1IKzJpa4BamomkWrFcdRUX0GCaX8k6PIt2KWsZkjI23225ULxeSIY4/Q01MhUoX6olYjGJIGOtpJg3OOYzLbPfXFBupG0PsHbETCym+kHIR600zJcWBftginkixMh8yabL1FCvLAc4Ah2yBDJPWaOra4WrPvh9w/ozTu3dZnB5xcbFkOpvQHB9R7XZsXi2ZVBO0NQwhEYI4JIxxEvYeV6dDL3mvXISs5ytH1w6i9CrZ1jl3wJ3H2+6XA3DiCxR7pu/DODR+EaK/LQ8+BKeQAt8yDmd6HCJLkSHm8Mx8gfE/CBPj9nncQCutb3viDoNYKZp6BHSUIuXTWpcvnXEiIk6mNUop6qZiMnX0fcf15f72a9QaigpyiS1qHEBkM1LGHKXSQC6kIsh1rcavTRlKkpqVVBTaGlTSDG2P04a6MoTQEcJwaw+VTbtYo1OWc0xpTQwR48FXClRhvwmoYplMKskVh0hoIWWx0hUFocuo4lAojPYoWzHkHq0EsBWHRE6ZylfUjSYOPTEkSoSS/Nj1Iw+79GaN2VElw20ZLc1yzqrxMhtHsIzFWtlQNU2NFBkP5BjZ7aTGRI93N2M1fT9QksY4PQoAhn5IcqlH3pelMHawjQCsbMBIqa8qCnIhxzD+u+M5nS3NZEoXdoQUiCWN26OKLgTKSLHsUuDmZeHt33e41ybMVQNs5c+HRACaqsZXFVhDM6+YLTw5DWyuC90+0Lc9pWwxLuJwxCA2cHTBKiPgLcTyKSW38pm11qNMReNlmCRE6rrmrbe/wutvvM31csvl5QWr5RVt23M0l/6mUhSRLzDu4jyQD3jOGaW/nGvVX3o+vxBFvgy8EMvgQRAR66UIHUnyjmn8fBdQ1oEyoIV6WVSmrgxGZVQMbDcG3RaUleej0BP2iTLRYIWFEAvkpFhdRiiKo7OG5k7Ge4PWAgszJpJDwS8s9dTRtFZs9gE5OyPEKPK5tZpUxkyyUThnQckSxbg8OkAKTSPvuL6XZzWmNL4T5A6jKLJUSQqSZrZoWC21uKqUfLOUGumFORJG0vU/9Ou3YrjStmdx/zm69+StI8VTVPDEMqUYxcTXmHSFSlu5oBg59EQhNqO9rZCy5CgUQsopWktR34gJUjhyMYTU8uMfJL75bs13v1bIsVDPj6mnU2xt6Iaely/OGYYpkzilOXrAt3/vLrtuxz4rvGmYVzWmfo4vEbup6NuODz7ecPK4Y7eSLEUMhc3lOdtzw+m9hNaZIcDETtkh61g15jWOjwyb1RPa9oZqMuXu4wUvPl+xXmrKYDiaRppJxzB1zPdSoLx6LgHSvjNUU8dkVnHn+IRf//pv6X/3LvfenrN4fMSrZxdsrndi17DSVZJHlK5U7yiy1hSrCaVwGfZ8sLwgPT+jTxDCnhh2bFaX/NtPfsnx8T1U6gndhmZWkZPF15Z7jx7w9jtvc/VXP0DTkWMR2KjyDG13ixsuSZQ+q7UMxSPqEiRQLFsPRQg9JluMthgvB6xSmarWow1LMZ02eG8ZhmFsR4+3ys3BVgII4l0z5mAUk8qz366JoblVw7V2dH0/Hi4HOIaQ3wpy0Sha8eTJE9bLGwYT8VNHe94xnRkKRug0Q+Bm2JHKDV3qUBR24YYhRh6/do/t2rFerbFac/y4IX60ImZRUpSbYaqC6zSFxN7sOH3Lsl31uGbK/N6Ux91AN/Tshz0nJ8e89tZdPvrgFZtVR62nlKIprEEfsU8btLIY7zApcb3q8I181ieNI8eAnSucq8bhs+d4MeFocsbFiz03Nx3To5qz147IprB2hf2yRRFwqoC1FKXIKuBJKO0pjGpmSlTGCf2sZOKhALfIBlGVRLE9JSmaqhaiXlYkDLFUKNUQUqbtBk7feIS6/JTaKSqrqEzkjQeWX/Vbht2GM5X5J//sn/PpesePf/Qj+rbFag+5J0a5iFhXODqtWRzNePZ0Rc5KClVDIqVIVSW0VqSkWN8EJtOByUQxmTj6HjarTBwiMQkZ0FhFux24e+c+3/nWd7lz54T33nuPUrJQ5XKkajyFdHu4632KgQAAIABJREFUKukKEPCDHbvCrOXqaie4aKR3xnhICKzFK433mZx2kkcph8PzoIo43n//Yz766AlXr16wOPZ4Z+n2if0OKJrHDx/y6PSUo6qiD4l6Oue73/wGL65fcXFzya7rMbVnt++YaMuD6Zzj5pjL7oq2iEqqFZzdOeHZqyXGaLzXlByYzAzdPhL7LGAg3UgvUJKOMJC+MXX7TCl5fscLgDEiZsgAm8lZBuu6qdhudmMWM49CgQzFpZjx/1dIOXB25wF9JwqrpsG5ioGAVnLhSFERegl0lyLvhclRZHZciL0Zt/m9WDW1bMRUEorszVViOq3RJmL8wMnpQ06OpattiBteLZ9T1wuOZqdcPN+xvFkyqR2m0jg8tZ9wsjhmNpvyyafvE2JHTmIVkkW+KIK5JFTJGKMJQ7wl4TnnCV3PfugxteXe/buUzZ6uWF4u91ztd1xfXVDZI0qKxFDAymX3jcev8+rikqvLa5RltHLJO1CG4kQpmso7Fsdz7t475pe/+DU5VjIsOEBFmqqhpJZDIS/lYMU7UP4s7b6jafTtxc/aL9mSRiHsy8WnSimUtRhjCMNwe/kzWqPcqPwVGT5BUVV+FCMONqfDViqhlWY6rdjvBzbrFSEOHDZjYouuMNaB1kxmifOnN3ztq1/jnXff4fr6mv948UNSLvSdpu81abnDKY/RAuHou46mno8DnigQOgheWvVQVCTYSMGiAxircUqPW709HiOVGzqx3+0oWd8OVjEkqqoZhYxCGQFKqQRiGbd1BvCw7xKP7txlNnNkOj789RUh75G6u4TOCpMXIwhHy3kURQPNQTKD3hvuPphwfDrl5fk116+2pE7R1E4+h0kGMOsMIWUoI9yBsZ8qRnGHaEMhE1JgMp2Q0sByecODBw/xteHFRUvqBxQJoy0KxnqJLG6GAimBzw6laobWE4Y9qUSUymglJeUK2dqrKNvRLkqVReUzRmnWm0DXwZDkbmgpVFazupZuvInXTCc1fbAkI+IquWDbgk0Vq48HzMahThZ0w0u6bqCqK85Oj3j3nXfxU4+a70EPpGHg8uWWtnsll/2cSHRYU6AYFBZVPFql8b0mxbQpRtRIMhyGTFU13L/3mK9+bcYHf/eMm+sdznnmizl/+b3v85d/8T1eXlygSNS1BzVgjMeailJZrq+Xt84f5wxKJ+LQU6ig6Fv73+E5O9yHRKAaRUWgZI2rpeup5ILWdrTTStbx1t1RRITI2aFUg3M1y+0l33nrTY4msF9d0l1WhLYXMWhkI0yUI64M3TISGLCuJ+ZAuxpYnN7h9O5XsLbl8vpjQklElSkpcvG0Y72VPq++s4SQCX0ihVHMRYTjFFtpFFLikplMGo5OGlIeaNvd7TtqvdmglaGqK6rKEcuWgtShkDXGDKTQ0/YasqeZPmBaTRmGnpwElGSsIqRCjD23ROl/4NdvxXDlvOftd95g2Ey5fuFYvspcbs8xWQ4W7St2acLxUKHLQMoS9tV4VDFYrfCVou1kolcIUrqYLKvvfgZOUVQi5IFZo/jljzt++G7gd39fM5SebinKkjcVswmkZkHuYLcMhKTwZycY7tJEiLsdO73nd/7od/jO775FadecPznnhz/7gPVFwaFRRqGd4t7XFf2mEFuL0xqnDFftEusncklIhRwSVZ2pG0uIgZgCX/vKW/zRP73P+fXHnH/6gk+fbPjWs0fc7KGqeyaTgWfbG46a16iPPG27ZbPe4t0Nk4nnN++1/N2v5GJ5WjeYs8R2HQhDHlV6KyjSMV+URhzv7GTByYMzTh7dZdp5zh4/BNeRQkt7fsWdqWWprzCxZ15a6mJIR4/5+nf/MUend2n3e4Z9xJBQxqONHxW9PfvdQFVVTGfH1PWEi8tXlCI0QGstxmj6IY/UJ7FLdHtwvmCtqLdtG7BO7CApF3a7eNv2fkD1AreXNllGiCXEWlGUD0WSTsOw32Kdx/saby1V5dntWw7o6D4GXOXH9XGmJPnajBYryjBECgN2WtM0p/TdwHa9pJTCkV0w0XN0gGEVaHdXPH8KJism2nLv/inJJd75g4rNas2L8yXbuOTuccfje0ekMuHp8zXr3TPK8pR9MNjG8vJpz2RmQK3xkyknVUMkcbV+gsFhlEaVwnov+FtvwOgi5Yq5w+spqS8s9wOLu1Nu2kyOGq/BmMhnH7+gXRqccxxNDc2s570fPuFPfu9f8NU/ep3quON/+l/+V/LNwKRxFDTXy4jNBTs1gsjtCn0cSBj8+HMx2tzaSZSscei1wmKo60bsn23HsnSUnLCDfH7Rhbcmc/LbJzSpxw6RflvYXN1QNxWhmbHD8HeffMSvPr4htBFNYLP8jMncMoREqyElxXIZiUNA6Yw+9CIlUDTk1KOVpvZT5rWBfDMG2C3zKZATXTcQg4PiUdnTbxw/+Ju/4oc/+B79mPcURV3C8weiFiQwCoWADUAJQU4jB2jl2Ox2TKeipg8x0u0Su90W5xx1VTGZ1my3e6mfMEJ96vueqi7ADTFmnN+z30d6LUQ26wu+UnRhzUcXga985Sv86b/8U4Y08PDRQ/78L/6S//s//DntPlJjMQUe3n3A7331W9ydn3CTWt742ttcXV/zb/+Pf0NCE1VP3wbavcLYmmnTYEsi58gwDLTs6fuEr6bM5hXO17x8cTU+NwJbyAW6di8H4KiSV1WF9zVd19H3vSimRorhURnvNVqLkFLKmDlxmpjg8vJSxLOkuL66pqpgOvMjWGR8X9hutMlIB56TTmG0SZSS6ENL19b4WmG9JyfF+fmKN9494/TUs9sMXJwX3nnzHV7dfMjVq0uGYcvJXcu3v/mHfOcbf8z7v/413//+X/LsyTNcFoz7ZghsVmumswnWWFLUxBjEPgOyBSiSv7XGoXTGe7EKh7Gs2VeG0Ea2yy1XFxWLOw+4uFgSznuMgWk1YXZkmE0b7h0vSF95g7DpuHl+hc6KO4tjhtCx3G6ovJNtXpEy9Dj0pDQQLjvWm2vquhZiYspC2SuZYdhgjB/Fh1G1TVBXUn6cc6GpKxmCxwLsw6+DTfAQrk9jtcXt5Y9DTlrfXgJj3wtBVMmGv2lqKEmgVHxRWVByHreXhVQUMYk455wjl4SvqhEgI9kzTSGETFUr9t2Kp88+5eLlJUoLJAYqvK9oGhFlvZ8QY6HvA7m0hC4h20WL9RVaRYqtmJ06Tl+DgQ3Ll5k81ITecP5yz6SecHpUk9PA0LdobTGmurVIxii9ZDELrl+P35uqhs0mstsXXGWYzBesL69YXw9Y7ZnMJxSuqBtD13XEmPC+YrNq2e8GjNV4L71lJo/bOKtwlUX7wum9mj44QjKkvibHFq0cOcvPKBXZBknJsWIIkd22w3uHUgHogYy3fiQMZIaw4+c//wlD6BDAmFibjfWEMEi9wfj+N15hDZLbsQ2b7ZZcOpSyGO3EEhuh3XegouQi64IdFK7WKCeDqA2e2IEqEeMUxhk+f3FOXVXoYghD5nK7oqqPRqiXk98Dg6Zn/fIZYTPFPHqDN958h7OzY7yXUtoPP/yYoVhOXssU3bK52fP014pmWghDP1JJFZPmjKSW1FMZJIdBs1m2t6RL5w1vv/kAay3X1xvAc3J8hzfffZf/+r/5b/nrv/6P/Oynv+TxG2f85Z//v5w//5B2vx1tipbTk7tS1ty2rLeHd71lPq+pKs9msxszc/k2n7rb7aiqitsie61vSZo5S+VOKBB7EZUK4jTwvibnQbb5aLSeoY2jNlAYKDnQHM34H/77/5Ht5pqPf/M+Tz5dMwRIeWB6NMFYy27dUkLE6g0ZoQ+GQWGqwHx+RC7w0UefMasy3h8RQ2Toe1IJhF3PettinEb6DQNuLJQUJ8AEpROhbzm5O2M6mzD0mvXuCmOjdNa5mpI6FGOUocj7pesGiYXUFmu19FqqgrMVD+4+5Gh6xo9/8kMm82rsuoIYeyBhqGSqShmB7rR/71zzWzFclVSxenGXoVNsN4n+4hoTsnhqtXTXzO0UHR1JDeIDtxqip8RMMpGUI9ZCjo48enZzAqe+6KuIpUCJEkheGf72F4F//x82mKMjUveM9Ysl7doTrKXSR1Reo5xHUbHdRkK3Yu7m1DZjXObq1ZL//X97j6N5i6Ul7zUvf7plfj9y9DgwOYbQTrl3N1FpSxgC210ra/0cpKNJK3zj8U3FehXpQyT3G9774QeksmN+95S3v7JgvVjx0+9dk9sjmqnGVIZpZXHGMWmO2dxELs9v0K7FFOivOomdKcWl6TBVAGQQzUUGKqtF/b3NfmnD3dmcs6MFE9/wwUcXDNlgzBzNFOqadnvNss9UBRbO8+DhjM+7HU+efYa+uGS/F3tF5STLoZxhGPbkvdiIYoxsNxs2mx1d28v6fCS8dV37haJSYGjF9x1jJI6e9Mm0oR86UfBVoiQzZjHg4G8UNVYyFdYZqtrTHNWkIdHve+KQiEOiJHDWCbmqBDbblqqaSTdMFtuDGw+OUmSdnmJiPp3z+PVHPLm44Gq1xU007QB9WkFWVK7m6PgIg0KONk1JVpC/20BOAV0KsS+8+fUa/2bF/Yf3ufPaG6yfXhGS5XqjUKnH65Z3H7/O0tasrlfsr/a8+fqCfXjFZtPz6mVFt50wtIaT+YLNdUsYMk09YdIkvFZQBlARNZ3hmRJjYlbV3L9zwvX+OSb06PF5irkmpcgb7yx4/Y0HTGc1P/3Zrzg9ucsf/dE/49Gdt/j0k8+4fBr56rtzjubH9INhlwxT5wlhRddH0gDOGPSI19day6FsoOgvelHKfkBZz77fCt63JLx3RJO/2K46zYcvP6e1WxyGha54zS0w+gXXNzvaaLEKNqvn2Kowd4GmhqOjiq4PbDYFZzxaC9o9ZU0YxH9eNxV37iuur1q8M7Ipcg6jt7R7RYme3jpi7FlvIs1EUbSouyenjkf33+XFi8/Z7DZYL3bazUYgFLkY5BUrfXrEjCLhrB4zIGIH7GPE2gnOKUoeCDHjnKcfFexSCkF4t0ynkxHXnsSijBpFElDaSE9aljxPSkWQISqz665YbmD/m5ZBFb71rd/lk88veH5+xcN7j/jDP/hP+fnPf8r5+TlXqxXvnz9l/jv3+M7v/B7T0wmmqfn6N77BZ08/ZdY4YimUoYCCrtvjdINzMpCEsMVoT+ghhUBOmnfefYv9rmW93tDte6bThmbSsNvt6EMgxdFWNmKftXY0dS1iUxQAhbYW5/1oI5POG6UMdTVl6APeG4zVoDPd0GN6sVxb4wCDNsMYJ8goMiUrnD4llo6SB5yZjhh6R+glK3jvwZzHj97A+QgsmR63XK2fjuF5TQyG1ZXivR8/4frFD2i7Ld5NAAGreGPl2UIw5+t1i/f1mPmRqoh+GCRXRmYIHd5Xt9tJow2ZIih/Je+37aplUiUWjSVkGGJh11uc6sEXtBVQw/07d7h88or15YrY91SNbOy7kChZqi8oDmtHwEhRhF5R1ZZvfutNVusV11c3tPuI0uOZmmXLXzkvOawwUKweN4Lp1s54GKIOxLkv25IOGawDuIIiDTI5RiH2jv+8aRqxkeUyug7irTgz/ocj8EWs24UerT1lzDZXxkumj0QMclksRTGzJ9y9X8h5y4uXe/pOM1s44iDuBK00ebAsdwPWtvL3xbPb95IjpqAYRiuvAiegnqP5hF1IXOuAqT3zo4rFpCLuHfvukhAH2fyN2c6C5PGsVXS95Ghkey2bnapSmDAAYoWPW4XTkFNkvdqzWicwid0uS5ZZldHKGJnNDc46tDa0bYtXjpOTOcqKtanvCu/96Eo2UnlGKQLeyGVEmyJ2tbqux7+OhCFjtGfoM1UtA0PJGq0M3rkRMx/gkNHRdrTqFrpOCGvWiQ1QaXE7VI0hl5blKqJoWJxlun1PComYakIv9t6SM8Mgg/2dh5l+B3EvXaZee1ITxLGhNDlr5k1NCuKMwWoyFfu+RdnDtl9hdEDZSCmSkbtanmOZsN/u8V7scjEOYv9eT+hToesCb35twsMHEz7+uwu6feDOvWOefb4mBrhzZ8qdu8dMZnNKdLz22iPu3T9lNmv47LMnfP7pSyavHeMrRzNT/Pzn5zx79n222yXGTvnxz37Grr3CWiW9TSFjMby6uJHzU48uj4k8s8PQ0e73hFAgG/IobqQkz99hS1yKCCKH3rcwPg+VnwkUQ8l90Fbybrq8vBitoJlc9ljfQGcgy3utCw5fP+AHf/FX/OZv32NzfUPRMPENcdcyoIi9xVHhjQzqqoidMw7S4VpSYNjt2F3DrjioFUVljLSyorQec+6Fo8mMzW439h16jFHsdj1xEGFYa4txhRwNyxuwXrSrEMJ4Dso8EDqwleL46Iz5osZ7Q9v2vHzxkrPTBVBYrl9CzhwvZuzbjgxM6kaE9KTp+0hKCWf/4fHpt2K4UlhQM2KOhNRjrGM+aehCh7aKqvFoHKX3mCTFlkUZMIUYhEsf06iMIxfzlMdvqC5fqGgKsTMgdoJnTwZ+8P0t3/jPGooKhKwgiDJVXCQXjS5CaUmxCMJWabTVOC/IyZvzyDAP1DaT1pCHge11wNSFnAyby8y333kTVRKXV9estjvcRBHbMloDDH7iePzWEZufX0FJaKUYdj3tcsXsbIGf1DR3FNvrG3a7PcubAa0jbgr7akOfDfttQCVDGsSY+kWZmpC/YtSjkVkIWWJxO6iMBZ3V7aU3bFp2257V5ZK8WJB3A7O65u1vfpuL88+5+OhzSobG1XSuIhbFxeolyhics0xPKyEcOSvnd7aoxmB6NVo8ClJQeSgRzfSDwC7sQdFUmpwtR4sjYgz0vfjUrTWEwaKUbAeKHnMQxo5bqvHnPlpBdTpYzRRKOUqRIGLKaczqHcLdsl0opYhvuSRyStJ3USylaIwyLCaGf/TNR5j5HTbbgV21pzQdV6uOupbgeQL6PjNthJWmlKFoS+oyD16rqWYzjNcY7SmVYbcWO1jaJbp9pNtbUgxQOrztqOdTodOwo28zu3XCVTNMGgh9x6a9YjZTGNOwUwNJBbSHe/dhf20lOJqFDKSdvLiyqolRvj6tBD8f+0TIMqhWM08x0EfpTokh8psnH/P5+RVPPvoUoyCpzLYf6AeH0hVVdYQqHbpyOK1Yr3c4pwlRrFwoiH3E1zVGJ6nfQUtJtVZjXYFiaAO+9lTOkmOkbXteDT06FIxSZAfewqPGkTZptPxmhqFjMp3iXWLaaCaTmt06g1JYozBFkdGU1Em+Qzmcn3DnwYLl6jMaKwHZkKVYOkdFNYW6yex3ouBP6mPSAGGAxdFd/uSP/znf+96f8fEnH0jnWUx4V8sFUY2f9YO3W+tx6yRDgFgL8nhJi1LQqzI5ClXUOoexwBiOLlmjtJNeJ2VuL/hGycFhtOXRgzv03cDV9Q3/P3Vv1qPrlZ7nXWt6p2+sql1VeyC5yWY3u9mt1mxHEmI47gAZIOc3GPkDAfwHDB3EyK/IYY4dB06cIIKijmRJdivdUpNms5tkc9hzTV99wzutMQfrrU2dWAdBDuQCCBAEsWt/Ve+7nvU8z31fd0rZo5RCll35ENntrvns039PUdUkWZIknD+4x4M3H/DXP/lrlAGfRm4OW17uOhZXe06rEq8ripNj1vsdarenrwQuq4iQJIxWKLLEWoSCECVa56GNHbKhO+Ox7+RE+efSLArUmCeTwcUcOp0Sxmjmizltt81QlOkeK/AU2mC9m85nPRXQAM5iCkNhyiw7FRVCZl8PSFQUeSMTM5xmHKCuNKaoyIEZMYOHfJH9lSJiqoL9rUWantGNSC2www1KVNk3CHgbuLq4YDj4vAHxmcCnxORZIQ8JtJ4AJxPEIQcX16TkyBOcfH7EEBFTnRJKTSaO7G+BTMRq2y3lzJAmZHndFAxDi+kkJiginhu3J0iJqooMJChgtppBP9K1Dj8ECimxaCKTJC0mkpcslw31DIrC8+zJFSEofMr1805ObnSWAAafslcqa75zqRV3x+qdFDN7GUOME4Y9T87vCvNk6YNpeq513pyTIj76143Z65wtIXHOvpaTx3Rn8J+2XUZTFhU+BkKMebFCns6348DZm8tpc9rjnUEXZB8YkGJgdC7n6pEN7aoweGdfS1PzZ0sorTBa4IfA5bOBMSXmi4IwQowOayG4TI8LU6ByiApiQIiQPy8SqTTmzmAvIj5Zos/ZSMbIvKGNCak1w9BjvUVqmM+zR04q0EZRVwWH3TgBDnKqk1QB6cuskkmZSmiaKX8s5Etv9rPrDEhIcvKr2yy38rlOF6VGpkhjFuhCAJ6+G6Ysufx5hMyQFqFySZVCYCpFUonYxUxuNXdyXkXdNFlyNgi2O0utHIVRBCEI3iPLlIdc08UuWYHUBqFzeG0M+ZnTVX6Oos9AiePlmv1tj4sjkYgqNFIrnLdZihvFtKnJMk+lUkZ4p0jXxtfyYOezT3EcIqaWLOdzHjyc03cHVkcNp2cVDx8dUZg5TVOzPl6wXC2QckYKkuViRhKKzabjq6+ecnFxiykURVlwOGiubwaefvFV9vARiPSUKg9kS1NO8tiEizmEV0yALSU1zo14nzMdY7yzTXx9r66bCqX0a78kZErjnY9WpHzvyF7PuwsyKKPQRr4eUidSHniG6VkQCTv0/Okf/yGff/LJ5HHWVFWFTBLvLD5k+Xok5lgKkSm/QggK2aCVAJOl+s4nxjYPnIWe3tKkclaVUCgpKWtF1+W7SwiOECNSalZHDaaucCnRdyNlUdIPAz56osq5kRl2lmXSxaxEm3wOxSQx1ZzF0Skvn10yupGIJ0VH1RhCGCd/6EQ6rjTdPmKKglJBCv8xoNhVJp2Mo2MYIqaZ06QEbUBoRVPNcAbSUCP8AUkiCElUDtyU0u0VRaVfm3MzOjTiFaSkpwt7ptXEJBAycXNl+egngtXbQ0a1UiKKMoeX4vFBIYJDRYmkopjXhFGRNMiioG13NAoKD2mE4TZgqkgMiv4KxltBf4i891+/x26zoW1HUgDZ5P8/CY0QBmkUDx4f88tPWryz+RGXAddaxqFDGImrJLNzyW5/oD04fJ9QW4lY7Ni1DpWglArbZ3+LVBkqIISYiFyTaVikPK2UEY+bjMSgosIqQe9G9JWjPAzEPiHP10ibWM1n/Mpv/To/lQWfffkVIUQGrRmdZDQVbXdFpQWr9RHWVLRbjQOESIi6QTaCdBiIExWw1CbT+Kbfl/OBQgtkmozOSqFVwfroCOcsu902F1km03NSOQBZZ4Ok1lma4mP+jGJqGJiM/6NLaKuIXhLjlG+gczN2h6vQSoCM6MzpBRERwaKVYfSJstC8cbbk937rHT65NNxedthZS9CSy8uWctbkDB4/0t0eOG6OuUNkoCSVhrceL1k+WKJmhs5ann85Mn5pse2BsW2zyx2ya1ONyMIhVM/R6oiqbhis5/bGcrRaUGIJyZPcDU25wFuJLgRJJ1TjOXmg8G2JGyUhOJKymdRWaFwa2XddlleoghRyiKmzA0IKXEq8utoRI9SzhuvLK/7oL/4QPwSG7SWzec7S8l0ihAolagQGpSqqMmN927bFVBKVVMaxS0V763JIsAgkmZBK0nZ2wirnLc84WOpG08yqjEIfW0ZhmAdFEoJeRp4zsihKUpkoNFRovC+Z1yeU5QGjPN4q2jZDGrTOgdlIxdDuM4lvesaq8hgRn1AKg8DQegGhRAvHcuFZrDxGObZ9oqkWhJQvX4Ve8u63f4UPPvornr54gu96+tZTlXOE8Bn4Qpg8oTJL4qbMKiHE5IWQhCDxPjCbZ1JT8gEfA6osJjx2JFiIXpO8zEG56uswyIRn6EZ0Epyfn9EfBg67HSSHMpLgDDE6ZlOu1c3VC549/yXr0zdZLWqW85JI4LDvESp7q+ww8NXLK4bhI948PCYUiYNInN07QbZPSEW2+QubQ9q1kHjhkSrQVDO6zlGWObz6sPPstrcc9nmiKqXG+cBoRxbLjMd2NjK6CDqTPY1RzGYVg93mzQHT0iJmycdoMxBHRAmTh8pN1L15syRFxWJ2gpSeGHucC3lrLxVJZNTz0CWU9FRNgdGG5Pr8e5uooBEPg+LFs2t01SJN9hFJ4Ri67CExRuKdY+y2dNsOqQOqCJiygKgnHPnXQblS5qytNNGn7vxfkD10Gv36++cQYZmHIFKDkIRp6rzvNojiBFSBMorTs4bnL64JbY2JGqkTl9srklGU6wUi1sQUqI8WxHIgpBY/dmid8DEP3iI5nDv6DMs/OpphCs/m+ordNk0EpHw+IwJFqRiHOJFVE6Up8OHr5uNuO5Xl2rmx8cFl0zkTICiCQOZh1zSAgEQ5yQ1DiJNnMUt67pZdAoW1gdVqluV13pGCwIdAWRqMMZhS0+3zUCEhECoT18Y4YMpztDaEIBg7ByETIhMQgsO6kbpa55B7CUxepRAn2sI0NJNKUiqwrWVzPeBF4FvfL+i8p2sH+l2Pig1+yhojJbyXKEaUDLgILsosSS8MQiZctJmU7AO6FPnzNAV+cAgdGYYBYbOcbnVU4t2IEFCWinpWstvaqZEK5PxPSEHQtzbDEJKgXpU0M8F+63CDoyocSsyIIvurEmLagDh8IEOuKoN3PcvlAiQMbiAyAgIfHUIEpEpEDDE5rI8IBdoYUAkb8wZUmuxfCaPEqJqqaJhVGhduEKLHqJJoBNZbqplmsJlnKBJoFHYssh3IqCn0fqSaZfiNGyJjn+lw/cEyDImApy7z0KVt3RSkLCAKYtQURX6PtTKImIOSM0cxD1ici8QwMFsseOPBMW88XvPhh7/kwaOHHJ+sWB+VlNUJp2fH6LIgREV7q/DWc3F1y+3NhsuXFzx58gtCyM1yVkdUdN0+ZytnQyRCjjw4fQAxD5jKosRFi0nkZmraxIDM0ToTKEhKQSTkzeo0mDk9P0Prgs31hvawR0qBtXaKxDGICWyVx8GalLLnNURHWRrGMTfiYmr0iJYom8xxAAAgAElEQVQk89Y5uo4/+t//BVJmiERVV9TVkq7b432+cynl8cnhh4yJ10qRpKSqGmSZ0I1DN5GDGQlCEuxIdB4mKIwSIROoVYEsIlWhcCG/6z465vWc49M1mMToLIfDwHJRMXSB4DxBZFp1WZVEJKWpWc4XpLTnxYtbZKsomzmrewsEirZrUUVCaWhmhrbfM9qpPrlEM9fsY8tstqCu8/3/b/v6O9FckRSff/SKV08v6Q+et779mOeXOyoRmNU1WM3ybIlzLd24Zew6itrQtY6QAiKSzemmQoiOOwZmmhBXX+Nn80MZYg7ETCGx2wa++LmkmZ2QgssTPiMR1iBkAWhE0FRSYFvwtsPMarQ5p64LzAzumxp7feDDy5/jtOH73z3j8smW7U3Hf/KDc77xe/f44f98ze2NxXhIe4GgRKtE8JbbjeXlxS2r+wXOj+yuWkoD2+2CxiZ8u2HT3rBeJuqq4eG3F4QevvjkillZUxWSoRvp2oHopsR6csEMKRKszYSjO4wkHSnkrIM8xQVdlgzhQPIHCqM5WWiq9X3W33iTb77xJu88fMzpg7f483/9E7RICGkRPrLZwTb1vPX2Mffvr5jP5/z4R5+za0cW64rlumK2XLHfBlLdMbQHvHWsT48ZnzSEYYtkoDABowyxUzgXkEawWs6oqhzmFoMg+ES7308FNktDq1qzWlX0fZ8nNyr7ybKBNjdOWidmquJ2d5sv2iqvthEBL/P0XyaBt5Yo9yxmNbqsSXJOHBSiBLvfUK0KvvG9b3P09nucypHnz25ICoRWVE3JYnkPaxNdfwtcoBGE0BNHUAfLD/6LtwlFw8FHNs93PP/lhs1n2xyKW2rqpsAHzzBcEpxFG8XiaEEoCnpx4Oxsxtvvzrk5bPjogyd885vvYIpI32/oRU+oeuZVljmklNhcFMyaBcP+ls72nJzPuN1lH6IyFq1bjEmkvkCUFgOgJNYObG+6TOZJsJyvGA6S4H9GWcC944Z20PSj4/hI0pQS3wdub58xyp65mtFUNavThn4YWM/nHJ8cszo+4osXr4h2x+EGxjYiZKBpKsq6ytPnmFgeLen6Nhu0paQqF6i+Z0wakTxa5anVpzuBqyTrasFcz/EXI8cP3me7fcnl5QWbi2vEOiCCpSwVRS1QleDiU8XRTLBUkaLf88mff4xsB+KDGlmCkT2NLCmXDTPdUbqRqoH75xJ7eIbrI36UfPGLD/in//S/w0hBYQRVVTF0A113yOeIzp4AM0E9YvC0e0tKBcMwUFcNcvKCrFYFIWV0eSJQFiU2Za9N8vms0k3J2LVYa5FeIcQMLSSzumLoR/Zdy4urZ5RiTrSCRlWsVzVRJG67xPd/7Zu4kPjRjz7lncfnCJlo5mukWPLlx58z2i1RFChRgi/46mf/nvG44sd/9keMEVb31vzO9+6hkiN0MFqNkJEgNb6X+BAIeN58/JDLiw1lFfE2cNj1bDdjhlDIDDdIsWBzs2e/+9rYDwJrPWVRo2RB1/W0e4+koqxAGyh1niLroBh7ixWO7373V/jyyy8Z7B6ZIkpmycZ3v/ttZBq4vXrBl188wZBIMhBUnCaXCQ4WGRbI0hBkoGpmeKvyMCJ1ON8hpEG4SFksWM7PWd874he/+AX1PFHQcPlc0tSBlHRWRfg8kd/v2uwBlgkhItfXG6SAYRipqppvfOPbvHh+jdIVRmfy1X6/y3VK5GyxFJloqDIPjhJoKenagfWxYnU0Y3Uy4+HjU643XzJfK5q6IvoClRS3mwNeKJQpmc9nhBRZLRas6jX9aseT559TmCYj4ZGMPqIKwziMlFWJ0Q0np/e5vX1JEip7ZaTMocylyZd4nSjKMufotPY1hSyllBUfKoNiBJJCm0wmVJokE3b0X+OiU/6sdV0zn885HA6TfzGDITIERb6GBECkqjLRcrSS7d5n+bMJYCwuBbb7HdXkMxFSUBSa49Waixdb3nn7bR6/8R1+Gn7CxYtDloXGgA89i3VNVdR0/SFvikbPYrFiGCd5I3lIEJMjRE1RGIpKcb3dc/l0pClrGlPQuT5TyJInxIyyl0IjlAOtwCniKBjiAWSRpXZCkHxNjCPOaXyYEWJN210TUx6WihSJznHYCYTMpDdroXs5YMdEWU3WkCCJwVBon2FNQmDKCmtHdCEpyjAREGHsOyKJotAUhcAPCjvmnDUhMjm134NMFqkdQluW65Ju3BFihdGZhljr+7x49gIjBrSQRKe4ud5TzCefX1LM6yWH/TWb7ZbFTHJ+/oAf/Dd/n59//Cc8/eKS/d5h0EgF81XADQJFydFiwZPne+aLctrge0gSjcEYT/LZj//81TNUMlnaFiUmqUkRoDJMSwkcGbYRQkVp5qyX99Da8oMf/B6zuaEfW56/fMlwAO8si/WC+2+ccf+NR/zmb/2XeAc317d89tmnPHn6nJ/97Au6bk976Nhe9pNSKGPsjRasVovcrKgho8CloBKaYibxyWUq7gib7S0woFWBipqqWhN8T3DkBh3PMPRTJmGm+SWRSBMkJ/iIkorf+Z3f48GDB/zp//1DfvbRz/A2omSWpSqdMv0yQfKTfF0KQvJcXV2ymi/v4PEUhZj8h0WGXsUcpVEUBeM4AJmae7l5MUVQZN/g3VZdmbyxd8FjhMbLxNHxkmoVkY2kOR8p/Jpnv9xy87JFJYnRPe2QGHWgqDxFL5ktC0aXGKwn2hEX9rRtQQRs8HgReHp5lVVZLoGDypQUaoEqExjJQYx0t0OOxNnd0nUbvvz5RyAqpM+gmkjIEuUho+lTkgyHyNHRhIbXjhhU3vD+LV9/J5qrdn/gy49/CTFRlwWbr77keK6Yr2rm8wXzxYqj0zl29Q1eSs+lHTnsDxCLaSWd5Si2H4DcSUcVXxNTsictT9Qk4EOenFif2O8izz+PLI/nGAJSBagiV08lqqypygqpJVfdgaI/4GIJ1qPxlDXQK754+Yrh9gZZ9szLxKPvvcvjXxe07pLd/Gf8T//nnyI3mmSAAlKlEX1HqfKE/zBk5Pfv//7v8uN/8yl/8ccfojSYMnHx2ZbZUcH9oyWf/sUzmtmMk+/f4+TbJzx+/zEn5w1/9C8/pNu2pBBIkwTrTmLx9dfX/y4QSKEJr7UYOcfE1DUOzW0XsdZy9t4J5w++xc2rW375V/8XL66+wBx2nJqeNhq6VFA2kaWQtNuBJweHEtdUePrQIgeJ6CtUoVBhh0t7TDkgdWC7vWa0njcfvoVWcLN9RWc7ujRCkMgQ2YQbNrevIGWz+9npETEN3FwfEMlQasO8KdFGYIeeZj5H6YKL6x0hTfCAQqGMJnZ7ijLgJvmLCDnAtjKZeBQQqKKmISC8o+s8ne1QzYrlCdy/nxu9r4aX/Mknn7JvYahH5mcNvo0I6/jis08p6gX1bEXXGqLvaG3LbD7j/e+f85s/WPDDH/6cT3/6iusXB+ZLw9FxTSok2uRN4+WrjnohSVRZKlIpVA17a3nr6C3eefchp/tbPv/8K15dXaB0iVAGKSP3HkJVzEihYmhLrl61hPESFxLKlGy3Az4o+g6ikUSjud1ss3GZRFkUHK9X2HiN8y4XciXRc8uD+REvno607cDYBVbrFdvDBl1WiMpwdbmhsz2n91esjqAqe1StoX3Eza7l5cWWzaElDT1DSHgCqfAQE0pXubBM8AdjoGkqhMwBoKOLBCWIwWF8IOwitzvL+uw+Ol0yLyTrouClKvjVX/sNfvrBX3L56pq2tSxreP+9E642LZebgX4zcjY/YlULiuBIoaeaWf7Bb7/BT154Xu0SpJpYdigcxkuKVDJET7vXzPR93vvuu6zXa/7l//ovMLMSGRSSDENpGrjdHtBSZqmcFqgp58gmSwgWqRPKBJw/oKTB6IoQB0gaJWsSgnGM+LJEqxk+ePqxo91d841vLOk6R3ew7PeRcYjUjUEqKIqavp3zjffehLhlNVvwjbe/xcO35zx/eeDegxOcF6ybt/jt3/j7/OzjD7ndXLPdPOWrD39BkA2FUgQb6IY9UjvO7r+FNJrN7Y5hu+HJk0A7aLw3zKoV73/3fT788GdYu0eIiDEVz549QYjE7jKHK5czxTAogs/SD6ZMFm1AoiaZ5BQeqxXdJDV6a30PmSQvnu5YLBruv7GkKBR/+WcvOD054a13Fjx8eJ9/8k/+W/7ZP/vvubjMcJnd4cDmuufV5YbSGA69oQ0FUoYcSJmgUiXOeoRO7LuOblSTtC7ghpHVumF9fEzC8erihqpUlAaS6/nJv7niO9875+TU0PU916+e8fd+91f5/JMX3Nzc4n3i/OyYrj1MaoEsj7OuoywK8mBIMZs1HB8HInOUFsRgudlcUlXl5JlzxOApiwbnEzE4SB5tBPudpT+01DNF11k+/OAZKWalgk+CIUiGceRw2FIYQzOfsTwStIeegCaqAHPB/cf3uH7SZUR0iiiVZTRCZpCA8466mWG0wbqsCPExIvFIWeYmKsYpvmAgG+DzpSM3VgpnJ1S3kkCWsea5Z34vxtFy79490uRxIeU7gRQCoyZJob8LBPavaYTn52eYQlJWhsJrLjcvaWY1SlaMQ8I6z3JxxHw+p+0Pmd5ZVQhh8T6yuRwZd5bdJjKba4axy5I9WdG3nrE/4MOAlHB0vMT7kWaWh3/OZgqZ1hW7bY8ULWWtmFc1JjYIn/2TyXpsbzNuPGa0NYws7tVIpei7xDBmktziTE55ipHyyNG3Djt4htbS7bIEs5kt0bMygxkIRAumkISUDfdFUWFdRwo5S0qIiJQdu0HnLDcpiCJT0rq9xw5AyjUyRZ+3i95jDwGpi+yPix4fA1ImFmuJKTeIkMALYjLMFwWjt8xmCxazY/qNQRtwXcXgIs5tKfUMbw/U84bV0Qnn52/g/AfE6BncgSfPn+D+fOT4pKZolphxYOwjfR9o5tlTFdxI5wrKOiBlzgOUMiGShmiwwwgicu9BwWyuePm8RytDXRbE4OjtgI8ZIITIkslZMSeESD9sefJihxssIa44f/AGp+dnPHzrLfrhwGB3XLy64qM//hlu+Bhntxx2N9ghkz+7zvHur97j7CxgR0fqc6RN+htavbZtUSqxWhUICZvrASUVbWtBSExRcu9hye1NR3SSEBJt37Ldbya5rUaikFJQ1Zl66l97ikBQEgJEEjGO/Pmf/VvKQnLx6pKxt5SmJNCjdZltFwnUlEdmbSBMyqCmadjcbhFJvfZHXl9fonWBMbnB6vuesiypqpoQPP3QZ4mmmEOKBJ9tFsaUE0QsD2pj8nT9Jfb5gL7R6MYzOljNHC4kTGMQqcbHFiMNcVLRmDgShcQ6cDYgAoytZEtPPZPUlWLRrCgktIfIYjXj9HTG6mjkr350iVHFBI9L/KN//A9YLwxlrShKhUma/+OPfsjTX/asFoYHb5bs2g1ffBzyHT95gk289963+OTTz9jctIyup5nPgMN/sK/5O9FcpZiYr9f4MOJtD07wvW+/Q7GcI4oKIRS3N1vKpkAXDaaawe6AlGYy0QIxP8gx2YzuVjL7K+KdwZZpOirJ+uC8ThUoutZTmoAqChYrWD0YePHVDqN7hKkRlDAGYtLoQnB8mnj8bkJXkpm+z3bbcPWs4PlHkTDXfPXVgUWZ9epFmrN/+Rw2hmgtxiiwPRHDkCJRRJSR3L7w3OwFzdkZ3/zN76DSwOrNe+zbA4Pvueg8yzfPWK4FTjtkUfDb3/sNnj2/oKq+pFMtYSpMwFTUxWvsppDTxC3BFC6SM5/4+ucjtEY2c7rBc9PuuX7xBRsRUUPEdS37YeCBGpg3De0g2I+JmRzxQZOswAZJ9IHoB0xpiClyaFvGMHB0b87xakXfH9jtN+z6keVZjarzRImocQdPqSf5RsrY0LLMjWLwjr7P9Dsl9HRAZFOj0tnYijAkNDHlg2m2mKGNylKnIlL2lsoDSRFR7NoDSuXpaogSIzXlosR6hyngSBj0SUXPltQJxtZzc2W5ufoTfCx4641zHr77AKLlcj/iv9oSbcCKG4J3hLImxgpjGhZvHHOxi7zx4G12LyVj94rqSLC/HtFOMLqII6JLg9Z1lluIhA8J6z3LY8lh2PDxx5bPf/EE2wlSnYtyqQ111ZDGgrYVOWwvRmJwFGaBJCCtZYyOqjSkkDOT+nZHTDBrBMMw0rkRpTxBDIyuZ7FaM1vUHIZbdFxTL9cIeWDY7xj7W9arGbfbAXvVMnRZiz/cgAoF6rjm/Lzi+csNoh+JfaQfBfUiMW4GhJJUZYWSkqHNniEh1ETbc1NByvr62lRZThs1i6VmuZixmJ1x1e6g3zLsel5eJOTyEZ998gnPn3xGu7/C1IoqKbZdpPUCFyViEDx6lOi949Ymeq85XmpepSLLo/zA6PYQPJ2vOCwirvBsBoHzgiF2bNstqqp4/O4j2nhLGLPcdOxthrmUeaKY/Rk5J8x7l3XoZF+j0RVCxGkQ4qYLY4Bp6ldWZYZvhDDlBmX9+6tXB5RQBFcQYmK5WhBFRv0O3cBF/4ymcHz/V99hPlsgU0EMJZU27LcBISUP79/j+ZNXrJb3KcuOND7lZvOC8uQEb3tGO+CD5513F1CULE9OSabg6vKS6/1I11mq6ojlck3XHxDmhkJlCts4hCynSVDXS5RUWDtkepmKCKEBQxIxA5dkyP4Hl31oRitEBcFHvvr8FfUMju6B1o7DrmcxXxHiyNvvfodf/bVf4/HjR/wv/9u/4lvfeUyk5dXFC5zNEl8/Rsb2wOb2msHvaOoFy/WC4CzDYaQuKhxtxpbrvC1y3jJfHjFfzqnrktFanL1lvphRlUu0bCjjBfuXHe11pLc9uIDwS6Roib6j63qurq8oCs2sqajqirIs+eLzLzOYQ2piDFxcvuT68jZP0nUeAi6XS5wbpywqlWfHUwi20ZKUDHEyU+93O6qZYbaoKeSMQklEMhncc8h/r7oqWd9bM1/NsG4gTkCMTIErmM8fMMxv6A4bQrRImS9vUmdvVMKAEBhjcN7nENOUJo9qzIqIiS53RyXTWk/ZVGkiqmaQQVVleFAeokx5i5HXnqw0PTcheNQkPwIQEwwjhCwhh4x+H8aR0UdCLEAKTJGhOZnCl9+J2azChxGjFTppUpQcNp6iUFxcPCf454Q00B8ks2aFmQuEGuj7QHfoyVdVwTjm2upcfodzwHTC2366cILWsFzUkDTO9QQ/5vB6DdELYsz+1qop8VExuoEYEk2j0VVNucwbCBEkMpYMbWI2qwk+4VxA0DCrlwxjl/1SyRNdRFlHWc+RUtIPBxbzhrISpChxY6LvszwPKVDGoI1iHPf4OJ1QWlHNG0bp8GMgyoAUEaUDwWc/j9aaooqUZSLYxKwAmSKb3cgYI14IUrKEsMl+JQ3CZB919ImijFky5wM31xu2246kxxzCHBPWWzbbW6Q2XL5KiLjg0cNTrq6v2LcXCKEgFfS9xOgSJUvUFMhu4w4XEiEIkAZTKDY3AaESMYINEczIcgndPvvzE4bkE3ZMSJHz2aQwSKN58eIzrm6eUX5W0fy0zOevDNhhYGi7vCEyDiGgqoEEoxsnymXBsNf40aEalUFGgFA50uP4dI6pAm03Yu1IUSSiSNP7INjvRlKUWO/RUlBXmpKavm3zN2J6z6b8ViGynDGkyfdIQiKJRK4vL9Ba4J3LHqqUJerWO0CjVUG9qPPQsgDhBT7mAYrWCuLX2XviLi4ifg2t+bo2ZRCMVokUFXd5o1InpMoeqeyDzCHa15ueJA6Ysqak5urVSKgO+Z6aFNa3uBjQMueiSgSLI4MqzWvCaQiC+t4x984lNnYMo2V/GPEuRznUTU2zWNINt9x/8xitBH07ctg7jt58h7/3/jf5f/7q3/GTH/81du/YvLJQRrrBsbsq+E9//3eR8udcvLzFjpFqWfDxz79if2hxHiIa6/8jCBEGaGYNo0t43yNRnJweM8iC1nqGYc/2xTXVTGG7gURGEeOz8CtNh3pKGqnka4lBTmePrwl0+esuWT5OEpVMXKE3JCeoZoKmibzzvuRwcCSXCN5jRMXgI7PGMl9VnJ3PWTQ1PhQUyzWmksQu8eTmir73rJs5R8uGWGu+3D/BHixYNxHzEkmqHGQrQReS26ueTz5+iqoM64dLpKrwxjHsBnZtT+89bz46x9SRzjk2u5ZtO/Dqcj8Fmip8ny2J8fWW6i4oNcs0pvFGJvW8lk3mjJlEQiuFF2Al9EbS93vGJ59PRttENIF9nyikwSqJlzHj7Z1HFdk8SZSMfkAZhYsB5xMpldiYaMoaoxJKDAg6RAGd6xDTC5FsTv8uq4zu9vtIWc5z8zMVaRXuwgvF682G99kM3nUuJ6CKXESSEHk7JwRqnrGcMmZgSCwKRNSoqaAnQMZEEOCFQBlJVWr0HPoD2M7jbWKMlt3lFkzFbFGyPJ4xXywIZcn6bAHBEvzA7XXIWFvyn5lkyfNn16xnJavTGatDQ+9HXMpFKqWc+wFMl+z837yLzGYzpOq5udnQHza8evkyh4oW+RATU0hhu1MQ5LQpARcGjJnlCZ9OyJgQ5APLuYizAYRBmwITNSEGrItYnwhYgg8EFxkODhVGUDnmQGty8ZWSoct45PWsRifo95FB5YDO4sGSIbxE6Bwa6cc4GUzzkymFxBSaoXMZwHIn6xQC522mXSmZn22nM9paC1KlqVZLCrXPjcbtgN33VLPAi+cvub2+YXQdus6ZWnsr6JLOjBUCRid2LtIl6BDMkGxpWB+XCLnnxcWeEC3DUHI9XY5aZzJAQkPbt4Sba4qqodvfUNYVdoT2doCk0CoXtyldOdPt8BNBcPIJSY3RCSEyXCVFhZkwwmKSZ2gMCo2PgejzQCj4O+9NAhE4OjlCmcTN1YZdvydKz37fIU2DLmuchdEnrMvZWkUlmc0KbjZbympBdziw225JKkIYiSnlS/es5o03z5HiiPFmkzMFlWC5PmP0YaL2Jba7K7RxxKjxIYM4lE5ZuqcMxlQ5iDL2aJXDvyGhDKhkkCmQAysFpdJI4TOu3Ana/chsrilnmarYtwOSCikTbdfTdY5xgA8++JhHD08yIU6X1FXJohbstxu87XH9nkpBbQyr1ZJh6LBdvniMQ0LdhdCrkM3xJvsfrAtYF6iqhvX6jPlsDVFxemJIqWW3CYQkOD89pTuM9O2Ad5lstt+1GCURxqBkJrdprV4bzr133NxcsW9bjNHTP9mUfge2yOd2JkUqJbM/F40QCmMC4zhw2LXUs4bV0Yqmyv4tbzPoRsbA0fGSxXqBrgps8JSlItRygjFolJgxX0ZCaolY4jCFpxPyZTwjHaa/u/+bVXR6ZzPgIoMTZDa8p6zEuEM75yyqNF3S5QS0YAI83MEhcv7QXbCuEJl4Of04XjdXQqkpGBr60VJVGhciYvJWSCmmBijmMPopjFeI3KiGkHBjNspbe8C5MWPuLdBIZnPNbA1Pv+zyHULeZRVF7gz/d7eJ/Lll9odpmbd+UuCsIzg7DYkmz1YAY8r8O5a5JgoRUCZgtEBoOZn5M8UyOIEbJLJSGR5iJDHkwZN3juAtKXlEzAM4oyVCa1IUzOczisozDh5rIzEa5kuNt5G7iAifLW5UjUGb7PvWTQ5nlQkQCV3cgaEUCJk3mmokpKwDumNB2TGSlMKSSN7lrZI2SJM3BaT87KgpP85ZR7A9s5XMf1YShCjo+h6zSwyjQ6EZRyiLGZvNFLESFXaMzBeSojREPzXuhUR4gR2Z/Ir5vVU6+/p0pTh/9xSTdlx+4eh6gRcSNYUV5aiWDNopSsUwHhjtnsMBbm8yfVCa/HuXMYO4hBCYMr/bKSSqWcnRyRFVY+hnPd2rFxy6PUyh2JAwlaSsNT75/GwrTfobt7UYIl3r0cqgTW5WhMxALSlyXUgp5QBcZB7ATX7HqizR2nDYt/ndVhLnxuxxFdnPGVMkCZHzXGWBFPn7I5iIyZEUmKIScg1Lr2tYtpHcvat370TOzcrngFbgU+Tr4O7sffche+1UUmijUIXClAKtEyJFfB8oGkMiYX2Wxgup8tkRE0YX3D97xIN3HnF1cc3l82uun29QhWSxbggJDh2M1qIrOeV1BrbXLX03YkqNrhUmeNSYPXUheA77jv22oykk/W6kOi8QIWFdQIglq5OKzUZjbQQJ19c3KGMp6gxKCncXtv/A19+J5koIQamBIPFKo00BSnFzseXiapMlZDtLiHsqKaiJVLpktPkiHWJG1XqvMGWB7TNm8k6r/RrvTc5XkVIRI2TinMO7PXSKQxyJWnB+kPzD37/HX/6w59WTkWHwzGvNrmtZN4G6WdOU93k8P+Mvv/oFalFy9OiEVb3g03/1S5rFA+4/fsijRycc7AXNjwF3jfN7fPTYVJFEQIuJ8CJhc7Xnr/7sx5y9c4+zd+9RHmu++Ohjdi/BjiArjXzgabeakBy7/XOevbqmbbO2N4k0EWGm5ZWYrjETZQlyoZh+4AQfcq8lJsJdCigF3XAgRDCLghg1h8MO5glZ5eyQp9dQyIBDoU1BCgrb37KoS5p6dmd3yxri5FBGs5g3XO0OeK+RLiFiwbyR9P1IO15QiYJF0SBlgw09s1JQGkHfC6pyhhO5qXF+wDubAxHxecoqCuwwkiIcugMRQb2oaBYVu7YFIahEyUIvsEhUCkitELMSo+eUvcWogPeJMAa6viVgKHXGdjvXI5LBDQ5vIyiFTAWqUDx7+YrbfuStx9/icm9575szZnpBdz1w9XTHQmqCsBzcwOEm8PLmhvFdg157Ts4Mn31wyJu2iYpTRk3X7hk5ZHx00iQ8Z/dOeXn1hO3tlrG3LNfQtRopDSDxPjKMt9iuYlY2zBqVi7zrcDKSIiQRUBrGsSdYASGTzpz3oGsW9ZIYBUM/4UqLRL+z2F0HoSKlAeezR2vVNGhdcOg8tSiZr0rO75VcvLrB9WKSWTjaQdLvI2WR0alKBWzf0xyX9Ps8WbCdRGQAACAASURBVE5lzjCzY5hw1JqqrGkHSyl0RlHHyJgkIngur1ou9y23XU29allIKCtJqsAOHXs5MIx5AiybQOoVo1AMSuOMpyp6bobE3kmCyDLgTQv35YpvvvcG+82G/e6SAwOjH3hxCKQoKExDXSaa9QznHLcvXxFS4LAZOXq4ROocBqqNQQFiQmtncqd/PchJCUTM2ytVZ6JeiJHkFyyW2bsXYuDly1dU1YxCNLgQCdajleBofczhcMD6AakTi9Was9M5bvTcbvbM5wWqWPL551uOjyNHR0uC9OzHlsVijikMQTiqxnN59QWf/PwJz55esDw5pr+9pFicMz+ac7JoODu5h7XHfPH5V1xeXFJqwfvf/i5SwtB1DMMtnTtQ1TX9AImALhLNDOw4TLJsj9QJcAiKPOySnnpWovUMNx6I0SNlojAzbOizHEVqtJTTFjCnoHrr2LpbCiP56U9+xIsnz3jn8TcZD5FffPQ53ntO1uc8fHAfISOffvIJKjqOKkPVnFPNFjSLU27Flht9iygCbmcQMV8IdJ3xx94FDvuOtvNEAuePzjh/8BCj54y9583HKw5DS7iIKL3iu99/ny+ffMrm9jk+OOoyZ1mNbsT7iHOBobc0TZMv3OQLzOGw564JmPJtOewPFEWJmOAHMXhiDHmzljSgKIoKfI8Lju3tlhhgvTxlvaro+gN2HEjRE0Li/OyMoLJPQVCzXJScnCS8H+j6gcMm0ayXJHMAFbBDjx08LkS0mSiHIkv6csP3dYC19/HrupLITSnTe53SBAWIE/EygzhyE2lyYzXJzV77s4Inxim3cLpExgk4FCZioFEapMS5wDhaFss5CPAhUJc1ITiYgpiVLkhB4r24q/4kEkJF7JgJe1Wtsq9LKUa7QxczTs8bvvzcoVQzyery4CdET1lqgosEl5DSUJiasjIoLRApMvQD42CnTXNgqrJInZjPK6qy4OrmmrI+YrEoIA24ocsXYWcIyeOGwLiRdIfIOA5UdUFhJOPQ0fp2ilBJiJTQUuKCwFmbM4rMnGY2I9Hjg6UfRpybce/eipvLLd5lMmVhJInEarXAlCW7/Q4zM9ghK4CUyn4shMQNd4HleagTZaD3oIXCVAUh9Fkl4zXOGuwoWK01UVtSlCRvQOTBjVSJspCUdYnQiq4fCU6R0AzDiEgSXTlsN/KLT3refHSfFBp8sNkH4wKmKKkb2O9HDt2e03srZBD0vc2IcQTI7CUSKrE4rvjt/+y7XH3+c7qXW7wNoEClgEJiPbjcQ1CXmlqViMmrX5r8LiMiIeSsRCVGfFJ4H7ExECzMTo55+O4jHr7xALxEuw/4i7/4txRliVCRKDxJKQ79nmEYsGOkrkt8GBGJCTySsC5DbxbLTCweu5HkBAmNUWKSznqMLjk7O2O729O1PaenpxSloJuw7KYwSOVJcSJsirxN9F7y8K03aOoKNww8/eppzsWUOUid5KaR/FSXUkKmLO+92zDHifiZB95f58olAiSH1vmMyI+aIVOY4zS8TSzvzSkLDUSG8YAuJPfvn7LdbhjHFhkkdW0YxlxTCl3y1qPv8YP/6j/nyZOf8vGHH/DXtuVyt8GnZSY0HtcI5fGyIxw0+6uWy69ekbyhWhQsTisiOd5le33Bj25+weFmx7tvvsGv/9YD/seP/zXL4wYpAmnw/Ls//RJd2ex7E+CspawiZR2YL7OtaHP1tzdX6g/+4A/+v3VE/z9+/fP/4Z//wfFZRRoH4hhIdPmhud3jDh2xH9HTgZ58JDpPNU14kshYXWIuUgGXZQcwme+mTKw7yUHMGy1guvBE+kPFbDajOZ4j9IzrZxVjf5/gB5RSGK0p4sA//McPuP+OYH1vTlncw6l3+eGf/iHPf/YKZQt+7Xd/A2Tgiy+e8cmXT/npLz/j46ef871f/xXOzx/ig+Dps0vKuUaOHRqBFgoZE/NKslwozFxA5ZkdR0SfeHR0wr2jFboo+OrLa/a7Pb7rqVLgfFmy23/J7mJg2PdE78jjNl7TAjMxUILIU6j8OOT4vDR5XITKQItDbxk7j+8CvnUM3QFdNKzvLVmuG6SMbK4thzbDDhSaw65HENCUDHvL7mbL+99+h+QDyUEcwfeeZWNw7hZF4GS14rd/6z2ePLlCEilNTaPXONdSNSNCuByu6ARCKNq2xXn3tW43JmKymbpFNnLGlND/L3Vv1mtplp95/dbwjns6Y8SJITMjp8qasqrS7hbGLtlWt7u76KYxQogLJL5LyYIrBB+Ae7g0SG6pr2haYGjZ2OVyZeVYlTEPZ9zzO62Ri/VGlCXoe3OkuIoTETv2fs9a/+F5nl+ePEpBemIWU/CeiAzGcPNsya51HN99i9mdu9i6pG323JscUqgsHSZ4MumYFQu0rrBRcrPcs98NqFwgiwjBgpzh9h6z6+lWK7ZXT5lPMw7u5vREOif58R99zMeffJ9pdoRpBtb2OSrzvHi2pKw0RyczfBe5vlijc4PznraPZGUaEKisQqiMIKDplsyOBOUkkBWRGCqmhxn33j4myzO2m4623aFVhlKCvu+5vLyimsw4u1MSfKRtPFJJ1kuDjmoMPBH4GHBGUNc5WSZotl0C8xpBoSfMqjlVNqNQNb/1vR/y0bvf5mh+xsV5y+mts/RvRkWhK+bTBxTVERFodjs2j695650Dun5LFIZqodnuNfQuMXL8gHcmAZwLjZBJzip1jtYF7b6l3Xd448nwWHqwQBfpthu2yyX7/RGynjG5VSGI7NvAvt/ggidH8+Ep2GZgFwOtjCm9Tks6r3AhXSDtPvD2yYJ70ymmMzw6X1NnNd40+OhQUrDIM7resdkaOuOIKoJsmKg0cXQuonQ1bgSSd0USiS6iczFO9xIThjigdfIFTGY5i8Mjvv+dP+Ty+oahFwRXoOWE2cGabt+z2zaY3jGZTjHG0XYN1hqC91xdbnj8q0fs9x06y1O0vc148vgZzW6gruecvbWgj0tk5ojB0Lcdbd8w+I6Xz16yut5wdv9dalnyB3/0L5hMZ3z22Wd8/tlj/uZnf0OuI/O6wPUD1hv27Y68UhRVjhkE0Vdsth3GJB7VwWKGFCnlLDLQdjfM51OclZR15PhWyYP33qXZt/TdgFKC6TRPxvpxzO/swG7fs12nTbNUaYvTNZEYB+bzCikc19evuLq84fjoNh99+3t8/MNP+Og7P+D+2+/yk3/5R/z49/4BH3/vu7z34Q/4Z//iP+Xxk0seP33Kfn/F8VFG30mkEkgtEnvOKaxNQF+RfNh8//vf5etvPufzL/+WR4+/pjcl3/34x0yPZlSzEikX7IeHTBeOPNdkasL7755xc7OmyCdMJwuOj0/54IP32Ww2zOcLFotDGEWhRSEp8hSKIKVOE+Iox9AEkQqlMXhHSMYG1ZIXkuACXZNCQD757R8lGaO3yFzSd466mLHfDFy92vDs8SXHJ6ccn5zQ9o5X5zcEOyAKz7137jKZzLi+3BCM4v0P77yRbCkUF5cr+t6RGGQypR5Gj4hpg+XGYZ16M8hM91DwaXMjZUqcU0q9gWgnCeDr6PTXd3Ty/Lgx+RHGrRZJvqZyTVlVzA4OWG+3dH3PYGxKFQxpQ9X1Hmsj3gmchd6a1KTJFAbhnSVGwzAM9J0n+oJ6Iulaz2bdc3mxHXlfBS7Y5D91KcZfC00aS6WN9GJRExjwYcC7gX5vk/dNCRiZksYEVOZxLp3Tfe+RecZ0OkWKtLFJb9aUoRvouwHroZooMp0k3t44sgzKXCNVCh9IC6YBREGIFpVF5sdTrN3Q9UminLx8AtN17LYdEU9Vy7SVVBWz+QEq09xs1hzdmlOWCiE9PliyImO2yAkypbMZExNnqPJErTExx8WMaipRTBLSY5QqBu/H0IQkjQvSoDKNzjU6U+n1B00/2LRhGwyOjt1SIkKVJHp6oNlucHbA2R7nDFIIlEyFsrEG6xztXtJ3LcEFlNJkuiYS8cZS5FPycs42eK5e3LB83id/8USx3xps75AqAYsRHY4u1UopODPViFJhnMPEkAKwiKMFIUkvRcx47/sn9PWOmB1S5He4c1rwN59+OjZVHqEjxMRbNb0keklWgAjlyDwFIRWLo5zODhTFmHAdI9GPctc3HMiCD7/1PWbzWYJb+8i7777H+cULurZLUkGZmp1UE6jkdxQB6+HHf/D73L49RcqG/bZjuxkgZqmeBm7dvs12t0fAiDeS3L//Fl3XvZH9ZllG3/dord8koZo+JtmzYJS3VwSnKWpFVgmEDux2PZ/88HfQSrBvWpbLHe98+DZXyy373hEklNOBs7M5t+7PKcoMY2C7Fbx4Bae3b3F8coTOAutmS2s9h2c1p/cOmB0e8f6H7zCdKQ5PA7fuCo6OjvnWx7e4vFpyc9kQXEWwAwf3WmYHM45O7jC/dZf6WPP88ppt02N84PLZE5ZXO6IPzOaKO/cK8tKyOi/RskAJWF622Ma9+ulPf/o//H/1NX8vmqv/9r//73568uAWu7Zju+vY7XdYm5qD4CJd67hz9xbtkLTnWghyAl3XMfjUXEkiIQjqmR4Pc4mSGu9HMJ9IqxzvGQ/HOG5tJG7oqRc1xbxMPAsH65s9SipOz+ac3puxHgyF0dSzBTdXHX/97z7jf/83/5abl5c8+PAux7ePefxoy8/+3Vd0mw3etkTbodvIew8yhjayudqyu7xhNp0QYoqJ9NEzYNH1Ef/0X/6E+vCYVdsysCMfjjg8vsPgNOevNujoGVrDMEBW1Nx96x63ju9z8fAa2w5v5H2k1Pnx/zcmN5G2diGOWn4RIaaCW2kFMkeIDKUysrygKEvyTCJyQeg9bhuwOwhBU2aRLBMpdjVGnJAY7xKMjsBms6ZrB+LIRPE2HZKTUpGrDGSOrEtWXUc5n6GKHGMGyrjj3SNBUU5xsabrHDG+loxFpBwlZfE3UkcpBDF4slyBSKk1gZReVdXVG0NmlpUUWUUQkn3Tsl6vGPqexfEhgRThSwxYBrwI9NHQRUP0iml9nJp4H9FBoEOJ7VpmBxMObx1RVhV3bh0xqyRmP9Btek5vz3G5gnzGzbrlZ3/5tzz56ivc0LNfW26uDPvBErSjmkvyiUTl0DaS2ZEiqBa0pZiWqGIgr1IaSowZPjiadksMOW5QOCNo2g2LeY0xA/2QqOEHB6ccLm7R7klabhHBTciUIFORLIvoLCfPRIqfD2nDUFYDbdsRXMD0jvVNw3/+x/8Vb7/9Iaubjr/6i8/Y7ToODhacHN9ByQlfffESbzXr9YZu3eJbRz7LqSuBsxqtc/JSYsM2pcF5yTSf8/bbH6RJcDAUVUZVF+ybZgxdScBRISSdc3hRoFWBznNCrjF+jyohup5227LeGe7fOSIOW3Lfs5CBs1qhVZL/DQL0PCUlCuPIKCjzKcWhY7a+5OXTFzx9dcHOO3R2RPArJqVgWpVoOUPlitbsKauSw8MDpO6JeqDrNcZpolIgFDF0SBHQQiCioMoTkyoGCC6iBOhsgpBpMu+CJoSCfbvBWf+m4OzajmZrsd6hsiTT6YeByXTGZFojlWAwHUVeIMf0NSdafHSIaGmbPS9fnHN53qPylAxaZAW3jk8o6wOsCNy9c48H7zxgv+nY9T1Xl1c8ffqE9XpJWefozKOUwDqPj5633jrjZnnFarNltdkTBs+t+5qu75NUMQqazYDpPM73aCVYzO+wXbcJA6DTGbS8dswPK/IsPb+blQUMCIhBcXJyh9/93d/Fh8h239J1Cdr6WprStgYfAlmp6cweT8+r6+d8+fXn/Oyvfsb04BaRnJ//4kv+1//tz/k///rnfP34hrzKOLt9yO3jQ3Y3Le2wIUoNUiOVS36tPPmFjHGslw2z6YJ37j9gWtcsby5RReDp02vOLy5Yri9Zbx8ym8zBHGE6GLodXdOgsyQZC8Gz3W2xbhgLkUjXdbT73Rjz7UcUgifTiqLIExMsJpBmalo1CE+MyZ8XxZiuJyRCSJY3K4qyZLm8wTvHtJowmx2RVTUuOtCWegG37te8fPWYZr+mKnLu3DnFR9i1Hbu2AynxxvHugxO0jDhnaXvLetXQD+kshrHojGFs+FKj5Lwdk1jjm4GDEK9l+Okucs5TV9UbaZ0g4l0apoWQ4PavvRyQpIB2DKbSWifmjFaUVcngLGVdonSSzikNXdfjCSA8KEcxicwPy8S6AZwP5EVLXmh0JshzST0pCHGgyGukkJjepQAaE8hGaKkYX3FRKBgn4PK1XBJSqIdzRJ+4mNalTYeUAq0KpFDjL42QJQTLzeWaIq/56NvfQYqc8xc3yUMlMjQZi4MjdDHFC40B8joDCcFZgrVEZ3EIonIIHchrmB95Xr5oiF5DzFFCk5V7rA04m8C+Qgm2uyEpBYTF2J5mbyjyfLQMeAKeKCxK+STlkqlJKqsjsmwOUuGJDEME2eFJPk2bLJScvdtzfFaRF5pma+itJy9zlC4IUdPsHYMJCCJ5Ng4Woh4bjRR0lI9BTd4mXhFBIKLGupbFwXSEhXdYF3G9Q8sysQKlRWlJXWUoIfDWMPiW733yPv/Ff/Zfcnb7Li+eXlDPb/OjT47JcQjjUbFEiwJvfyO3D2OQhNAxcZeISA9eCfrQMThDjILD+wJfrthsL3n6+Ff84i8/ZbsxSXqr0sa5bQJ5JVM6JpGyynCh4PBYobK0AV4cVeRFQT8YIo4iFwl3EAyz+ZwPP/gOP/nJf8J8fsDzVy+4vLxiu9vSdQ2r5YZ+SOciIoxDDN4geYR47eWSdO2O/W7Lo4evUFTj98aUVr3b4V0EocZhiE5qCWP+znAkhdgolbZWZZn8qUIqhEyyYuMGEBnHZzPuvXPK3bdOsKHn1798iIqK6KHvO+pJyWq94eCo4vT2hKIKNG3H4rAmysjgWoy74unjZ9ysl3Rdw6zWrF+9Yno2p217VlcDwS7wwtAai5OBXnievrrGa8HkUHF6N+fkFEwnuP/gAWd3z6hnBY8eveDs8IyHnz5kc7HFNo66nqIzweJwwuFRzXRWcnMlgIysSPKsYEu6bfvvba7+XsgCnbM0jcHa0SwrSkxn2BlLCGAtNPtdOgxkmhqJqIhi9DCMDA3vAoIkiYI0pRYijtpPYNSxhxDGaVj6w7kQaVUcR+aWjti2Z7OuqKcCFAQ9MLhDbHPIsHeYvuFQKa7QLPcd5vkF+xcG05/Thy5BjV3GrK65fXbKE39NKJqRW9KCjwSh8EAIhoigOl4gjGVoMooYKfOck6O7mP6Gof2GRRXwsqB3hs16y6NfX3D39hlxSFKjqEYehBh9VzGihEwaWSXfvAcygvNuZH+l6YvWmiAD+URTzEqKacn66Ya2bzEhQHTJWIzGyxGIigAlwKRGK/11kbZt0CpHyDBuzBw6U3gHbbS0do9/HnB9RFYZKM9QNYRGsjERE8epkUivM0aVXveYkmNtR4ghQTzHdZZIg5lx2pvkgt7GpE+3A0JqsILdzqC0Is8kWkiutzswFgaDIFJlFU6kWGKiIApJPS3Ydl3S/AsBMfkIPBEXItYYbm42dG3BbDLjux8s+PCtd/mbL865feyospzbZ0ec3s65vrqm6T1B9kTlmExzsjImIr1x5LnHO0FRJt11lkUWizOsHdjve8xgU8PoC/abNsk1gqWQOcHZN0BXKTUyG1iuNjRNSjRyg0AIk0CVAZyDvJbktU4yCpFiXzeba8psQq4KtMioVWBWCZ4++TUPH/2azXZFVgRW12tM43DGIWxPJlZUKuKFx8vkX9wOPX30RCNRXrOYLuh3Hq1LFCV9k7AExixRMZIVGVWdYXqPVul1eufIgsCYHq8UMtPkKqOQE7KiJDeQR4+awrTMqA4rmAxkuWF2mrNeR2QTyXwENKGPyJDStBwB4TUhCobe4oC6mrKYSFZdmv7ZGOiGPVVZUGQg/YDZ7RiCZK8KnAyEaBAuI9dpk6diGvhkWc7hfELT77C9RUbJyektYM6uu8DadAk23QofDcb2SV6o0rN3eHzEYAZ27WaUWpE8m0GkaHMRiSqMXp1AtFDNcrzUCQgr4PLynOPTGTiBDpGqdBRVxA4908ktZtMpu2UPes/TJy9ptntkSK9jNpuMfgTBrCopizkqZPi+wXhDMUneCGvsGAEs0uTbCMQAPR7CniQXI8lavMf5HUJa3nvvLrt5z6+/fIk1FucgRo9Umjt373F5s+ZqeUPEARFjBjI9ZTpN0fX9MFCVeWITDSZ5fgJ89fnPsN2e68sXbDZrpC7o2guG6Yx8UnJ0+xiVRfafL2lbS/SBqDKIEBwj7yuSKcF2vaYuS0znkT5DBOj7FXkhKAuYzSPdfk+zHXA2eSZS1LInYogkr1XbbcnznKLIgcDSWiZFltQYvPb2+OSnYdzKh9fpeyP/7o3vJyN6ByKhJryPPPzmIfOjmslihi4qnHds9yuc7RAYJhOBZ6CaFYhJhRYaZEhJYWN3kk0kk0PFwbxiMB1t42mHVFRJmaS0r/1PRP9m06akSODOEMfhVxooOOdHv9iIAonJuyRIuUqvvRuvvyfJz5IELY7JklpnqFyChKIqkFKy22+pqgLE6B91Hu+Tb1VrjdJq3EgqpB6luprkV4sK0wd0nuLvtdK8ermnrl9L6MXITkyFZaYVohC0bfcmuGPU92JtCgcJIRD9KOWMDiHydLcKw8nxEZms2O027PdrUJLgDDKEMQxEs9m0yAjBRMqy5N47d1itDGZwRGSSSr5mPKqA1KOcUgSKMgXECCWwRqEU9J1HCUGmJVmR0kSFSt7XvJTIIcmWjemRSiTZ4bBH6lRfZXmFUg6ph+QrItJ2FpTBo4kBZJRo7X6DthnfO6EUMl/QDJZ+CMnPFWsEAmMs1qcBio8OqSHLMoq8TD+/VqAU5LmgyKBvcpxt8DZtScsqBbjUk5I6k8xPBGVRsLtwLC8GbG/QymFtxqSqkrwWQdbnZMUdXlwuuby+guCYHpxw/94Ru5ue1apBZKAyTWs8SiqUTtwx5xxKpWcHIHqPVIkNGpwAH9AI6uIQISYYC7uiG1MNQ7p7LShdUpQmseVsOstULpkcQG4EXeexocP4LKlXZILLq8zjbWQ2O2I+P8UMkhAlbduBdJS1IIguPcu6fuNfzHWBc0NSJyFQQpNLw4tnz8l0whk4K9E6Uk9ynIdu8An+LNVv6urxmZcy/Uy+9lm93jKnvAOP1J7gNXlWUU9zJrOah7++SM92LpjHgsl8ysGHtxm6nugixwdzoulxe0t+UDPNZhhp8Z1HhMQDqycVtmvJ5Ybr6+fAjvxWzWldsCUQtgYxDMSDG3Z0DF5i7EDbDHSt5eayZ3FUcHhYcuuo4uzkt5Gqp+8MAU9RGJzfUk0kk1meBgRIXHDs2h6PwIuCb333B7x48Zi+32GsgezvZjn8v7/+XjRX1lr2uy41OAiUzAnW0fV2NENLVsslURdAQISACxGlI9L+BlroQyo6hEwNRIA3JsA0EmVsMBLAJ6UTJX2x6S2us6OJXpFXCh8l240bwbSGXdOjVwNuSCk+tw4OWD+84Ppqy/Jqh7kaEPUeFxyD1WiZUx7WZPUcVzwjFB0qFxjbE70kKkUUEqIkRs+m37FtmhR1Xc5QmSIGTXCSmDI20SKHYGibhufPzsHI0cD7my8pRNLLir8TyT4GO6THISTQsMjG3xUIrYgxoCtFMdfkc428zKBL1HsQKBFRMl1gIojx8ojIkEyyaR84JtmI1MAmiSLovHgTJxxlwK88fkhxnSLzhCzQiIwdRRItisRCGYxL0eljY1jkii4LGBNIPCfegCdfB5Im6ZXADMk7EACpQmrIcQgBpcpwXrDZNAjvyWIg04pZNsH6sUjwvLksg7fpdShNsBahE9Cz7w3dMGD3e+rskKNvnfHg7Q+INmd13nNcOvJKc3A6Y35ym81fdXRtiyWxT2Y6BQM4kYrKqho9iFlBXoBSgbqYs25X2M7TtwN5VlGVM9pmi5CGvEiG1uDdWOhoQONdR9Ma2s4l2U7wSCCMl4J3UExS0WGdQ3pJmdUMfcbhvKLQGZlUFDU062uePPkVl5cvkDJwfDRju2tpNi0iBMpckOcD3qa1adQgtaePkT4MEHXiLk2PeLW8QAhNQLFZ70Emn0YQHqECWVYzdD1SJTlrDIFMl5hhixMeGaDwGVlRIYSmEIJaZ/gJmN4wyyQ6ywiZx5Q5q+Cx3iF9SOlQrUYLjQdcMCgLQ6bJyhGCXE6YZJa9HM33ItKahklVUGUaHS2x32FjSVdVoNr0HFuFwzMpK7QfUN6joiIbZafEiEQwnx3jY8Guu0xpaXmOcTuCsERpidITlSYExcnt27RtQz90uGjSJsNaBDJNUUfYtZQCGSUezaScYTOP85bgDX3fsLxqELGiKhOU04WWvrcoHaBUHJ4dkJs911c3uD4ggqRzA1LMEpxSQ1FrttuBQlVMSkPmQcqOdicI1pMRKZUELXFeQABvAq3fo6RDkGCM+LQN22x2TKdT5rMDgtNYY7m5WadIbG9Zb7ZvvLNSpuLS2IEsy5jNj/DBsm9WYzKmRmuVtnfe8PTR1xzOFkyqko++9SGzxSHzoxLHgFSaPK/JpwXz51/j3T41GD5tgeJo2JYieS76ruH66oqu7QlO4ExqvPIC6olmMSvZ9yLJSAMpSCHLkMJg7EAIGiHTxjnE9H+RMhnIi6JEl2LkKzmMcVS1SsBjH7AhFXLpHB+bFJmk7anUDsk7VCouL86p52+T5wW6KFkvX+LNgIgBrdOGwNqAzjO0TJuCft+xbzryqkpDp1ogjGY6n8AmELyg7+0oaZWMUUhjg8GYwPtaJfHa5D1K8JBYE0cczGu1QZI6vW5iXksBpRRvhn2jCYVIRCqN1lkCjotIXiQeXrvfMz9YYEc5IoCzie9T5Hny1YqUwOZcUi9IBVmmyPyMoWuQKiV5xhCwFpwLI49L4V0gfz3difJNIIl37k2AVmquHAT1qfLVCgAAIABJREFUJu0t+MRQEzLdtyFGiqLgaH6Gs4HVaokUERnlCKB2XF0sWS23FEoxWIuMitlsweXFFQRQCOL4uog2ecByMXoTA0pnIPzILRLkhaRpx3TgEAlWUx28Hlqkz0br3wwm43h/x0QER8QK4fJ0jgtDDCIxoiJUc0W379MgWqQgmBDHyHEpEVqRot3n7LbLFDhRFESh8cGmwIIQUSofpTRpiClierbzPKOqU8EfnCfYLA05Qxp8V3XGwcGMEJKH7ejujIOjgmeft+zXl/StRcU4bp80OoNMC6qipNs6Pn30KdvLV0Dyd0lmRJkTZLIa6FwSZUDILDXlasTUkKU7SwpitBAimhH6TiT6jLo4hVghsoHZYc7NVYOSARMizkvKWqOKnrxWhCAxXaCsUpiPUKBz6HqDNZo8S+E0xnmU0njrkTKj6yzffPOIICzNPuEvyip5s4WCsiiIPgHalVCj528MNhvDNfquo/UhoRJERoxu/J7kS9QqT+/5+LP5Wpb7mkfoXWL3JWRDsuQYk+o6pWSSvNdz7ty9xaOHL2l2PVK/9uRq7t66zcXVC3SIzGcH7HYbFtMDZsUxs2JOUJKimJDnGXhHWUI0Dj2x7Mya5WpgqmYcTSZsnUM6j3KOYLeYfUvQZYIIG0+hFN6GlCKoCo5Ojnj/o/s8+fVnbDYtXnjmE0nfbFmcTCCHbrBslg4ZZJLVBmiaSFVOEzjdSAIKnf//oLkiJqaGs54YJPi0JdFK4klRjqurLZNZTfCB3ltCZpiXHpzHS40XefLDBDWmucQ08dEqGWODAFQqLknFiYhp8uOqQOx2NJeashfMjufcefuMAcduvWd53jJdaJ49/4Lu/hPmi2PO7rzF5LBAPlnTvLgiCouYVfj9FBE6tIciV0wmC758fMnNvscLT1FHmq3CRkepLBqJDBlRDnz2i1+w2m7xwXLv/m9xvn7J3376ObttS16XrPeJJJ94OBEXG776+pfUokw6V5fkc2kKmGQQiHRpGWPfmJF9CAStxhSamIyOU4E1GZkXbG46+usduhTIIicbBFpIqjrHR8d+a/DW42VK95NajmlQcdTkFoT4uuETCKmJQdE7Q1FK6lKSFTlX2wZj1pRlTl0d08eGg3t3yW2H2bZ4l9ENNunJY0R4hQiS+YFit4o4m2QluiiwwTG4FMtf5RJjtpiYI8ucoqqQWlLlsxE66AjW4zYK2nQpySzJNRpZ0A4tph/w1qJUxuWTV0TSViWbFrTsKOd1MnUPBu89bbcnK0qUqLHxmP/xf/pX3LtzjJQw2J7z5Q3TOzOY5GjRI5zDN4GmEeSLgI0Bo+CwnKHnkehI8EnnOH91g8ZTZjkyF2yvDO/+8B0etTuMN2ilKKYZdrAQJMELnLH0naCaFQRrMc1AWRvQp3S9QHiD1un9avY9QzeAV9SFYzGpkEITvCBEgc4qvvj8IcH2LKY1pZ7wyQ+/w89/8XOulyuigPnBAVFn3NzsMCIgp7BtOo7vHrO/6JhMBO++M6cY7vC42ULuEapnGDYMJpKXSYrlrMc7hzGgSjnKDhTFZMruJkVJ+5jhWsmQB2h21EpT5wWtz/j1k2eUpQElcFQUK8WTpUMCqEAnW2o3I+iUyhh8ajovvOW7d9+nro7YDBG3+jVaeY7nC4RQrDcvkVnGRFdUoiMTA04opvUcmrQld1rTuh2nh3eZhgHVNyke/WKDUybtIrRm6DW2aIgioLWmnOa0u2tCLqjynFxrRDXj4uEV8+MT8qJgfX3NsulBp+EMPpKHcVIcI7XOyVTGXhrq6pgoWlbbG9arDRMx5auvfsUnswmz+VscH57y9edf4MoJzy+uQS5573t3OQyausq5eHrDi6crzpcrbq6XVLkkrySrsOfzJy/55ONvcXrvEGNann7zOftWMNGSeRWZ5YIrUyBn4GwaUpBBdIai1EQX8SEyKWtWuy2vXu34/sff4cd/+IcclXf4sz/7M54++xWXV1f8L//zn1JXc6ROSXnOe3zUeGFROuNwvuD2nRlffP4zyuwei/kCoufq6pztfs/B4RF/+I//EZ/89o+YVHB0codPv/wlXz1+wqOXr+jWLzk+OSOKNZvtjt1+Q5ZrcHqMFFYoleODZbU5p+0MnR1w+0ieHROFIwpF6I/56P4JD3nK+XrJru+pxQm37lturi29Sf6Xg/oQiOy2LX0/oDPBfHHGg7dO2OzWPH3+ktVqx+27hwy7PfvQYZxDqYA1aeAkJehM4r1FqTRYFALyQuIbR0ptz8irgotXr3j77gkyliOWBHxfsNw/R8spRXbKsDdcLVccHDpmC81slnF+LcmrCWoPwbb0zRZrRDL2R4EUgRjVWLJJYgw4b9IgKpAUC4RUZKss8ZOEJ473TxABKUbfkEh5aUJJopDEkNJ0bbDkeU5elWRlTtdaVC6JQievl9apFPQCLbP0nkSfIsNzjQ+Ooe9BFHiTFBRCeZzuCXqKEx2C9Fnvtg1FlSXA61jo+zBgvMGQGj+da4oyR2YSExgljGlAJccJf4gB4y3Ba4QyaT8XNRfn19w9fpci1yOg1FNVJfjAZrnm5bNzsipncnJIjAHbWB5/fsF203D77gH9ENnve+pJycANUjrUGCKgy4yrK02uBXnlmR+37PeSIi+SpFrC+tJxciaYHwS6PrBeinTuFBl6hDrvu4bFomJ6nNNvFJuLwLZrmE4UcRswnSToGQ8+fMCv/vbn+EwQo8Z0AqULutYhY4r3jtIwneU0Q0HwgrzMiJ1ns7RIqVEanG+QIjGQTGdpNj1RC05Oc05OJzjjePV0iTORqphjdY9QjnKu0UHx+Nklb01uc/rOA9TU0f3yFajk6QsWRPB0Q09WCepjzekpPP30z+m6BgSosmB18Zivv+7pe0OW5di+S9D6qkjsrFEBlWlBLgqi1BgZ8MqnQChVkJcpkGrdKE78gu2mZX2zIcummKFlMR1AQSsgzDtMECyOCiaLgrJU5D7yV//HDikk9SRP916s2O43BOeRMaeuDwjsOL+65Pzyiv1+T/Q2DQp0SlLcNy0mQFXkKSTJgXOpaVKjoinEyOA973/wNsF49qs9fdtjreHy1XU6S5RE5RnB2NT0isRKi0rgQ/qMlchSKNE8Y36gcZ1n+Wqgc4rFIi0CbGdxg2A+zzF9TiEzhIMXzzfsXj3n4D7cvnPEnZNbrNdnvPvhFO0LiiynPnkPMVeszp+xvL6k7yM+dMRcg21o7JbHq4B6+wN0d0M7gW4qyXON8jOmc4XQgcVxxuGswIuMqEFXERsjP//s37JeXSa/a57TyQXOHHD3rXe4FXY03ZJHv1qRyUPu3TsmBsGzRyv+9Z/+aw5PK+ppSVVMkKrgkpt/b1vz98Jz9dM/+ZOfltM5Wmi0yhBS4Y1BCYWWGq0UlVDYoccMychdZJJpDT4oApqIIjhHriKRZPyLSRZO1w845/DBj7KDZEKXIplkreVNo2X8QNu1FKqmns6YTNIK2hvFJz+c8e0fvcd73/4hx7d/xBc/vySYhsU8pywU3bZBFTm60AgV8d6wubwhZte8ff9dJvGI/csB17dkWTIo+phCKESQPH7+itn8gPfefx/vBZvVhiffPGR5dUUYLJkSSNEyqUvqrCY2kixCJpNUI4zbuNe6+MQaSTHG3vs3RuRI2hJoIQjBEoCD27fJs4pJqZhOM+aHBYXWFHmBtRZjDJIkgZMqTzI7JCKQfk+KN8ZJpeUozdBvonCns1liLAVB13iWN3tm05QGl8mCTFY0+wvWyxXtdqA3ESMl7WbH3eMTFosFqirYmpbjwyPkaxW8AKED3iVpQVUV6FxRFFOOTg7Jy5zgA4Xw6KixXWIPVZOa3XDOOx+cIVTiZe3WPbv9Hm8tklGOIrO0FiemtBkZWBxPQUUODuYcHs0RueewyPnePzxgdtux2r9ErTZk0nF06x69DXz52Wf84v/6C4aLPWbdYjYd/brF9Ia8khwfTLl3csz6UnDr5DZlNkEGRbd3dF1E5gNdP9C0BgIIkVHkAkWG6RRFJqlmjsEIXBAUtcS2NaY3EA1ZFok+T5eBV0QvUiiJEhAEwYVR016w2a65feeQw+Mjirrm5atzbOiQhSDKZBB/+vwGIy6YHlpk5nn8TYM1gYMjRQyWdjPg+5rlM8dBccZ33vktvv/h7/Dzn/+Km1XPMBgEnqPjnL7xZFnOfD5lMZ8kgGhMBeRrH0bvdszqGfXUUU0MZZW2I8Z0lDlMphnFTHBzuUzJXkNgWFs6a6kXC4JK+vlFLDk5PmDIOtCOMhMcTicspWZQFbWEDzPDo/OXLLsBP068TxYl3z4VXJjAlkCjAtcm0NoyGc5FwApPHHrOFg+Y6Qw9blbuP/gBzbrFDxYtAzIH119S6AlaLbC24gff/RHn52uqak5dH9CuHF2/xXQN6/WStmvAOmqtEM6jiGSFxlmJjZFBOJzyzKqa3/roPV48v2S934K2RG/QKvLi6XNurtYcLm5x9uAe0mkyoYnW8vyzL/lXf/p/k9UTOqdZbgP1yTG+67h14plWjn7veXB6n7YfsAuFvJWzyAZoPJ60USrKGRujKd9aoOqSoq6Y1BU3Lwdu310wn2kyIhcvLP/h7/8H/JOf/HO+9d1/QDV7i3x6xFunb3FxfsHFxSUxeGbzkhB7nG1xtkPJyNAars6vWa9usKZjOsnohxVtd42xK+pJRMkFz54940ef/Db//D/+Y7L5KVd7+OabCzbLDcp2sJFsL3tuViv2XUtZTpFxQcAl36oQhCCwYUeeJ3+N1oqj4xpjLLkoqIopB8clXzx6ybbfp/vJeIIx5LJGKcnJ8YK33r7Ds4fnmK7BmwEZPWWR0fdw9v7HHB7fZ5pPefLVN7zz3o9AF8hco0pJs+rJ8yRREyL5T2QaBY+ep8QEzMuSZr/HGkNdlojg+fDDt5nPjtG6oN9tWV2uUEHgesN+s2S5XjGpp5SZplCKUuWsrw0fvH+Xy8srnrx4wbJpGJoG57okzxQh+edU8mRZ64kxqQcS0DS8abSs9aM0P9WqRVFQVkVqyJzD2hSCorXER4dzBu8dugCpHMjXMsKKZtMlRlyWs1gsaNuG2fSQGNN2ISkKA5v9Fhc8ZVWOw6MdIRiUjFRlTogtSkiUVCkNUSoyWaFGv4gQDq0zyqqkrCvyIkcIkEK+4fqAfBPkYYLB+QBOUdUaXZf8sz/+Mf/oP/otTm/DN1+94MnTJ2yHDfksRxYS6yVda8iLnLO7R4jMs2+akeGkWO+vEQpsWKJ0SzV1BLYcHE4Z+kjfRro20A2OEHpmC83RacXRScGzryXTheTwRDA/DHSmZzLNiLGkmhSc3oXlqgNXJc+YDAymp6oLtpsO10cEGesleG/QSjOZTzg+O0Bkmm7XkOuKPNMIucEbhRYlSigIHjMMPHuUUvDKCWRzg2TObt2iFGQqR8Y5Qgo643DRIQvBfH7KybFO3C+tUDJnt9yCjxRFxnSSU+QDD7++YGgGMpnjW8EXf/klT/96l85FLTAxqU4++oHh4MRS5A6wvHhm2G8jItbU1YLJQcSLLcPg0XrCweEZ3kXKWuKjZTCGGPPE48scSroUrDRYikka6IbgyYVic7Pi618+4ublNWa7p72+4ehezr0P5syPSkolORlyMmp2a8N6adntNE8/XdFtFW4IOONRmWKzbtCqIM9zsiIgdY/MxajmgWlVEEeotnGOrjdJZeMlXT+kxOR6Stu3ODeMz6rCWsGts1u8/eAOZV5hjeTyqiXGlPyoFCAjvetR0uF8SIFwKnklXWAcqmjIFI0bOL1/xPRwCplgv94gpaaaCLQ2XF9cYILk6OwUVGS33WA3O9q+gyjpG8PV+TlfffEF0Uy5vrjg6voZvT1ntct4dfUpXbhC1QZRQJVPOJgqFlVFLqfsOoUpU9CK7wVRRU6OirR4sMk/W5VTFvMZRTZA7Gm7gWEIzA9SLLz1kZvljosXPdNDgcj3RNXw/nvvsL6UnL/Y8vzJkpurHc45Ts40Ibbs91uurwyhNX+/Ay3+5L/5r39an91ClyU6z8gznUBnMX24wQsMEh0TtEtIRV0W4DuMk/iQpnLSCyJmZE6kw96FkRYtQI454Vpnb+QMMSZ+AgG8CESRjvL11ZZ9sycKRznVHJ5O+N63vs12r3j4zQWffvpLorFomfT6fW/ZNg3KSzSWYCMiKBYzzTsf3eLileHq2YZh22LFgO18kjoogUKDV+gyBw1N37HdbtmsdxAyqnxCXUyp64q8KHFW4HqH8gEtXPIlMcoqxq8kbyBN1J0bU3aSqVaJtM3TMvETkAqLZLfrUSJxpqpJSbs3DEPA2fFzsA5nLXKcGsboUTEQ5WtfQCREP4Jg3Ru5RuKeOIIb5SQkyKd3gqFzDF3P0O/QSmE6x8nxbQ4WRwzrNao3fPj+B0znc6z3vHp1gxYZ3qYYYKmSbl6K/I32faTf0Q0dvR3wMaC1YDJVBGGROpLXGZPDgs1mT793MAikSTHtSYbwGmwr8DFST2vmB3Pmh3N2zRY/WMpSkJcQo6WqKu58UDI/PeT49AE//oN/SlAVyILNdsflxTnOdEynM6rJlGIyIa9yRIxj3HTEBIeTgZvlS/a7DSEM1AvFvt3jvMFZRo5VYLAb7ty5w2RyTHAFgxmYzQ/wrsAayTB4irJHeJmM70qxOKyRApzrEMqSlxIlIHqJtyJJBQO42IMMuOjx0UFmkTlkRQ4KPD27dst0UYFQ+CBRWcHtu8W4fQLvEsdis9px/+6Cdx/c4e69u3z9+Gts3GJMS4yecjZhs28Sy4bELbM2pbbJrEBlGqHA+fjm5z/GxEeJ0YIvEKLABsVu49CN4aTwzGWgDFAVUygzyAWq0BRZwXoYsCYgjUJ5TZd7pIt0bU/f7/GuwRnJrbMT6sxTiJ7ZQeTaTHm57Oi8IsiKoRMQd0gkigJFiVZQqBztOwgdvfLMbn9EtILBOjrTcVCX5Mozm92nqE5wHq6vn7NdX6KiIZcwyyv2uxtinwYbTkbmhxPyWlMWOUpreh/IoiCvA7fv3OP2nbfRWU5rDeeXr7DtgLY5mRIEBdEl2KPI4ODsDt2mAQL1VHLvXsFUSW6utqzOL3G7K3RsiJ2nUiWKHBUE9996h6cvLnE2cjCf8g//8Qe40jM5rqmPakSpePHkmuqgQs/S+953HYcnU7KJRgbNJJvx0bcf8PGPfoTdwa9++SV/+ef/hpdPvmGz2fHWO/c4PDrib3/xS8qywNrkA1BKMp0WeBeZTmqUFnRtj+nhO9/+mIQlsEgV6bvAH/z+j/m93/kdbp+c8uWXj/jlX/8F2tuUzFdP+O0f/x5Pzx/SNVv6tmO53KfG3Jvk+hn9P2HkNnmXGoYQUwJtcGD6gaa/4XK1J9cRSWLFNINks7V4I8lFTqU1q+uealqyOD6inE1pdpaDiWK33xBcYDY74e333iNWOUPo0bXi8NYB0SmGzhB9GNkyyecihErncEhyttfKARc8Xd9z684Juphxs1qyb5Ycn2TsN0l+aA14K8m0YLvr6NqO3hhsDAgpOT0u2CxXrG827HcOSQpqeJ16mVg6o79VKpRMEqkkDidtB8akuNe+ZyUF8/lslOqLMdrav0GjxJhUF9WkAjEGKWiFEAo3KLKsIBKw3uBCT13XCJHRdR1d1+L8/0Pdm8XatqX3Xb/RzW51uzvt7c65t9pb5SrbsSuxXCYhwdiJTGIJ8eRH5AeElAeEhHggAiEhhHgJjxAUgZBFEC+OrAQlKFGw44IyLl9Xd6tule+9pz+732ut2Y6WhzHPCWnMc7GOzsPS0d5n7T3nHGN83/f///4B5x3GFEipcTYwTdM85cvQHqMNhVFEJ3BTwE4+N1ZTLqrqWrHaVMTkUSo31rx3+DlnLoac1RdiyoRCoNQKmcCFSNGUaJ2ozH2Mvs+t+w8xzYrnz08JNqKTwXuP7SdE9MTgGQbLMHh8ckSfgTaZvKeoF8UsP5xwPmLHnIUUQqZINptMHX3/qw9459036PaRFy/PUZWlKDPW++oKDo9KhtHTDZZ+sAh0DpfVWTIqVGIcZ/+dlXirEMrw5nsldQVSC6LUrDYrzp9ez/JEj51kPnPJEqMl2kSUAm8lSkNdlxwfHUJsUTLhrGccPUJIXOrQwuS/6OzhEopltaIyKyQV19ctMgncODENHiEWxNixWByQkuT6astw0yKjYLKWcXKkIDk8qNGqoG8r8Me89+Cr/OgHz/AuEUP2f2eZbkcI+Yzi00BII3VVo02JFJp+mNBGMwz5+5amQqmCac4EBIGIUCiNiKBTQqdIkTRHmyNMkixkw+3mCG0D3mnCPhHahO09ySakCJhCUFWKxbLAO0vV1FSVoSglyCFP/KTMA4GYcJOd98w8Py5MyaKoyMEFYm5eZBm5MSValyhdIgqo1xPeZbp0tU4U0lOIAkX252uTdchaVAj07K2tKEuN1tkTp5WGZKi0RidFJQumfsD5iNKaRKZgDl2k21u6fYubRoyoSCkQnUSRY3ru31vy+NEzttc7gveUtSBWA5oRhUYITdUEhK8pVYOkJERJ0AZfWXRwYAPbPRwfNpACIWikqDg6XmJ9jyLLaJ3PQIumyY1zIyvq8gitBQfHAYiMnWB/teXjj85wIbLcVLz14JDjewuurjq6NsNhyiox7f50WuBPjCxQQDZCKplpMUrOC55AKoW1gVIqyrJEGp116EkR53EnMc2TDDFPaMSsJc2LfNaBM+u9VTYaxrzIi1laIWf/k0wSby39vgXlcWkicMgnj1u2+z392KNlXqSG9powOYZxxKdErUELk+VylWB9WxOSwrYTrhuIbsoQiHy5859ZwqeUxE4TNjoKU9H1eYJmTIUSuXtm/UhKPge9RU9MkexUe2UTnvXvM7UgxhzEqHU2JEuRtdtax5mkqFAy/79j6zBkQk9Mkfa6J2Ey7cYI7NQjlcrTr5T9UohEYYpcSM1hfK+CIQXM5uc4I3yz5j4cRuyvj8i/nSeLzAcCHyV28ljnSWVgISOqzAuK9bmwq00mpIk5+TyDNDTaSOzXLHbjKX9HE3A4ETGLgmphgAmhs948kf1ii3VJ9IEyKXyCYZKMWPyvO3ghUN/Ih9IswSiQKmeixAhlrYkqMQWbsbB4Ls8tfbtDq1PsfsXoBIWeDxfzfVZVJUiFjyGThEwuRKcRfBII4bmzqjMtSFqSchRSMPUSmRRGSFQlsWFH2+3zYSNCiIJxKEjRoUQkJkVME1pWSK1nj7gm+gGjC4zMxC/bzQGbKeutY7QoJen6kSigqnNexq5zWOezjjsEAi1tX82wGMH6WBBlRzdofFSoosD6gdtvblgeVFlO41zOomDPYp1QUjPZCVNotFC4KTINLjcBpM2daCQkjdEQXe5IZuCloFAgkyCgsHl7RpYVhfYsZGRdKXxT8WSYM7S0QBRi3qQ9CDnfr6BrRXSBPgRejop1s2a9OMTvLXZsaa/hempJ0QMaZ8V8qCyRSaESaAmjVIzB0ihJjAXbLrJ2jnJRUYwFe5dwxlOUa7wocNYytOdM9gpC9p1OVmLWJc3hAVPXZu+UzIc3JRUyR4SADDmNft1Q1gWJxG7bsfVbbh2sCXVF1w4MKee7ocEHy/X1Ja4fKFWgOdAsjgvqTUF3ZmlHjY6QqoANLSwKmuWKUicKdUbwNwgGGBSy9yw2B5RHK+68eUJ0kauzPWX5kuAsBI3WmtVqwfZ8z1KWyChnI77g0eNHjJ3Hdj127Hja9dRfWLNc3+X23bvcunsHZweULnL0QgoIZXDO8e67b6O14vLygrOzC6aR1+ht7+GXvv6v8Zd/9df54vtfplrmsG9jEiFZVKFZFRtOz5/RDiNVveTNNx7w9oOSZWP47ne+zThmJLOQMcsEQ5o9q4JxcpSFeg1QKosVxuwgCQqtUXXipvVzESKxLtH3nqLS1E1N3WwIAay/wifHdHWOjAYtlmwONlzvz1isVyArbBxYHa3Zb3fZgxLnoN05FPTVup9F8LmIcdPI9dU177x7n3Eaud7eMAw76uYAqSVuyquzUmpuOE4IJYgIxkmgpGEaI0ZJVrXhSuxBVEiZ9wIpBV038ipHMs0Fp9K5ycm8vs7b8WtSIAikknMG1LxnCUmYfUryFeVXKyKvYEuvzgg5WyaIGYriA1AyjgMxBrRWM9lNsFov83UaLTF56qaYKZOJYRwITuFtpkEGHzFGUxaCZpHjKFKKhOBI81oXYy52Ysh7WZzDj5MQFGVFWWZfBjZhg+dgvaQdrnj2XLHdL7g87Uhk9UjwnuM7CpGW3Jy3jIPPEJS6RJXgp5CJhiZfm+ACwXqcz3trPjiSzzIqgYwYI+k6S0wdV5cTujA0q4Q2CpEq7txd4MOeJD0ZLCOJLp+vQgqImIsEowxEg7WR4EfKRYVUBi8CPnr8NDCMW8DNe4ZGCUW5qElRZ/knmbY5Di57okODjivWC0WtHUbv2W0HYphYVEvWzYaqaDC6wCfHyZ0Vy7rGucDObnEhQ3iCy4WsS/DFr72Nn0pSjHg3cf58T4gC05QZRKMFGsn1pSMEqCvP8ycdpDw5TQnGccKFBCoj/FOM4LMXfxymfDYMESVTLmikIgmFT4qilNhJvUpNy40fkeXrMkXwAhc1o1dUIqGRmKCICozWyKTAe5JIaKXQSiNn1U/0EmM0wWWPt9bz8EDMZM4kczyCkrPPVhBUPv/G4POZz4ONYQbF6Dk/MhAEKCK6Nph6QVUWNLHlWnhalzO8irJmdbyhG/aMrSW5QFMXaKOIswedlAjOkzAYVbGoG5QQVPWCaTsxjqB05iJIkYOlBZkB4GJukHo70e8jUnq0LAjeAZqUInZ0FDrTCWPMcDKjIEiQUmcriPQ0TWTbBYooEGUJZU1AI4RHmJTBKFXB9T5iZD6jhmli7BJtrajrEqMLpCipmzyRnEaBHWB7OczKBbCjZ78b0WWeiZpVAAAgAElEQVR+ZqScuQ6vcmP/lNdPRnFFJFlHUoqoJVPKFMCci6Fz2nccAUO5bNBlQRg6iDlMjzSbN8l0IKEyWYgkZrka87g/EcKMhp3NtPmk9sqwq0EYFApTQkiR3c2eXd8zjnDxbKDQkZPbNXdvH/O9sxfcbLcQHD4EkhTISiJTSV0kymWiPhFs9yMaSSEC1ltklcEI8zEfEESRDZsh5vGwFIrJT1RNhTb5szkb8TEXPolAiCG/l2LOF3klBxRIclDpq26g1pm4J+afvDQl/T5rjJWRpOTx1jL02YMy9BM32z2LxQZjSoSEMUFhzLxhzsUqAmPU3MXJNBlr7Ux+kq8NkVKoOZ1eke5F+v94pPktkOOcPo7B2pxRttttkcGy0glZK2LK2P2ha1mvJaMbcbc1RIG5mJ10KuF+zTF9xqN/2xCSI2pFXWgW65JhyHjymL2o+DnL5uBwiVcZkjEdO8SfOMJvesQ3Jer386FKaUWKYEfH5EZUUVCsCoJMs9EU9mPHxVNN8jd0/SW/1/+Ar77/M7z19hsYI4khZPCAnqU0kyXgc75GjASX/QdKDHzh7c9ysd1yNV7T+ZZaH+OmgJoDc3VdIELP+fUFIbagK8S7ke405sVTg0QzTJqF1vlgQGIcA24MlPUKWQhisrQXlnIRkTqiYjbXmsLgXGDoBoIbMUWi3Ulas8WoAiMrEiPX2wmtNc3CsF4L9lc7ttsFWtaZDpkcn/mpByyqNVbA1W6PVA7rWo4OVxitOD3dUZUliuwZiz6iComWPmu9kyElk5PtrSSFLH11HupG5MmaSAijqIuCzmZ5nCwSixrcqiB+2hF8IERBLOYDn8keiRAjJQbRGIrgiGPgupMcLCpMKrJ8wcL1FoZ4Q7kqcSisC8hCYtSCZH0mHYpAD1gCk6gQvmB7HTjeXdGUAlUKhJF0RYJmw9g6pt2ebvcULT26yIv56CRaVxRHhwxxIE6WGCJtlzuaSmYio5AeqQ31aoGLjv3VBefPzzESfvrPfQWB5cnz53x6ZVE+gU4kkRi6AeVH1oeJo/uG1d0SVzm0khwfHbJZVCRbcXrxCApYHC1ZlIKpvabtL1htFEZlJUG7c3hRcevubUSKRCQnd1f0IhA81GXJ0cGST/74MfLWIYsy59A9eXbO9Y8/oiwUd49OuH98j4+fXuG85/rmGuc9n//C5/n2tz+gqspM5nQRFxKTS9y5c4/lssYYyfXNBS9fnhPikIlxQfDL/8Zf4cvvf43NwQlJae7cv8XF+V0+fvwYQmSpKr75u/+I64sddb3irTff4eGDzyDlxKNHn2LdlOuDeVLzmjgrs0RGeUEMCV1X3L37gNF+gh8t2kBpoBAWUWiUARcD225CVwJdKIh58p5EDmOVFrrtDSmajCVn4OTkDh7Ls/M99bKkaAxTlwFP2Uch5yl9bqSlmXRLzJMW6wJDOxCSY+g72m7g/LJAi5rEK0S4xEeFKXNmmZQa5xQxwdQn6qLi+Kjh6uaCtktIoSlLRVUrun6Y1/u8DwUH2uQiT0gxm99zCHHeJfJzJ2XKVOCY930JTC5j/k2hZil7xq2HGHJRmxJCREKcch6ZVkitcN7SdbkJUxQ6T78QLJo6N9z0iJCOZpGPOdMU2O8GJi8RZOpmCIngA2WhaRZLUkzsdi2TdSiR969XkRAxhKy+mJuI2etWIet8VpFOcH19hSoqRn/O6fk5zx4ZHn28RZUWqSOmSnz2Sycsyvt88M0fY19uIQnqRUmzNOy3A3ZwIHPj004uNx4DIPLvRimBVGKm8UZCMjz65BwfJCSHNiXLZcaWR1dx5/4JTx7vKBYBLcB2im6MGdARHckFUtSsDwyTU3g3YbGUTWDoFbaP2CngY2B741DaQTIEX6KUYL1cYV2LtR7nM+hFFRZT1EihCJNmvb6NXgWKoqCpr2n3A8eb+9w9Pma9PKAsF3TjlttvHjL1PZeXN/iZHorP2UrKCKQJvP+197g67ZAxovAE0dNtJatVMRe6E2dPJ66vR4h5b++7H6GLrMiZrKXrOqZBUNQZrZ9ieE1nsUPO8UwpF0OJQFUZIooxJkoZs1fNvwqJlngBqpAor0heYJ3mZvKsTc6sc12ij4l6UaNbl/GBeibKqgbmprQdAlIY+mGEFDC6BKlBpPx8zdN0KSVCiRxNECN28kw+n7MydGImKJObFylMeCkwoqRoNI1ZEcolcYjoao8VmYq3KCoOT24TO4H1l0gFt+4cMg6e/fWYkfgJXHAoo6nrBYvVCikCxbJB7vO9go0ZnGIEOmWyrYvQ+YlKGVK09P1EN/S4sUbLElllyqftfb73UyBFRSE1ihKpbTZnxKxCOyg9w4tEWWuqTclmdZDJjoXLtFYd0LLCO4jaZIVxDNhB0F5rZCgQtSEGR7WQ7G9gGiPD6Gm7RLNY0HeW/Xbg4tSyWFY5aLqRIBTT9P+H4iqRkZGdg22kvdrjESzXK8qyQFcCtSkYrgJCZePqMPY0acIolXGyRIQuSFLl0eRMIDJGk5J/VXDnm3KWemTQBSgJow9IWSKTzF189SrjwCB8gd0O+CKyc5a+t9zsALPkcz/1HsP+it1Nx+VVoBtakpl44/YJh3eXnO2eM04jt8oDTJWIQiBJjG6kLIosCUQQxcTh8QlJSvp+4vLiBrFUrFaGokiMbs/Z+QVH1TEyBkJyiEISx7nAnH+RmbiU5slSPhAYo2fi3/xeC0LKYZSq0FTLiuAtqoiURZnzRCK4yeHlQBinLIcRMPmEiD5DFIqcG+X9OGc5iNzp84GizN9HzJurmyasMTRNSVMbLjlntCOxg1LXbDYHvP3ghE//5JSu6+i7jkJrbi9XfOkrP8XjZ495ev4Yo6EsBO1/7fFbqH8TkJIkBSH4eQxeAY4oYw4/nEDFJfiCfneWaYCsuN5P6KLA9Y7pyxPD3x659VN3mNRAJH8t1qGFYbqZAFCFZr04pm0DIlqaRnDn4ZLv/+Ejvvb++8io+PSTM84evUC6G/AnBJ9llavDDdv+mmAzcneaet58+21Wqw12HNjvLjG1JMVAYxq60XN2M7BcSOTakiZBRFKuJNEt0TLRd5bteov7u5HVX1tQbjPlLPrIOFVM00DEZ4lfCJSiQZUDVZ09c892keWtCCLrsomGooh45xnaSHsjqaqK0ni0TIgUiDbgAhixIMYea23GyNoanyR2bClwvPNZzdvvW9y0xE6Kl9cXLJeS9XJJ2wYGO7LvOg5WJYV2vPngHuvlIX/0rQ9RZkFZZ0nmNAY0gvqoYFGVJBJX+45lDSN6jjMI1GvF41NNMIZdmihtT88GVyiKJQQi7d6yWBcEVogYkNETkCSvsNsdcbIoNKrtcHFPXEaKzQG3OsPkbjgdwEaJVxqVIs63oOtcBPoEscSXgZftNXIcubNQmO3HnKWCcRLIsGFnF4hSIIcrsC21VpRovBmJIdMLdRh4ebpnsBMiCpQoiNoQEeiyQnpgGjCHOkdZtAPD3lJpwTvvHHD/QZZAls09Pv4nj4l6QKgChMTjuffZhqO7liChdRPjRcv5j77B9RYCBwhzhOCIq/MnbI4izZ03qe5+Fne55UtHNbLuiabnk0fPQBzwne/eYKpIs9J8+S99lu998pLj5pjSF2yfXnOkImfnLQ+Oala15MPnpwiVOFjdRljNadvzg+99yIff/w6rTcNnPvMZvv6L/zofffgDtBAsmooiSPbtFfWi5MeffheJYBwHPvv+CS+eDCyakpUssNbzwbf+hL/zW/+At++/x5e/8BXu3j6gWFquzq5pxxGTDMWgePDOA2gCSkaeP3/KB3/4fcZxoijy1MJbiTaZQJjz9iSmiOhYEmSgrBZsNveo6ksGEQlhRMaB905qvv+szcGyTqB0LjzGaSCFK7SW3LtfM/QBrxVT2BJurqB7whf/7C8hy5IxCm4d3qK9uOLWWyecPz3DXTnqpiJPh1T2Qjif11rI3W4p0Erwgz/+IffeuM1qXbC5fcRgHdf7nkIljM5EV49GaIUSIk8wnKXrd1xdSW4f1Rzc2vBQv8cPP3yM3QWaRcWtOwuuri7zBFjO8SdK03cdxpjXlLFXe88rmJIxGmM00zTl0GARaPs+x6lIMEVB1ZQUtcbaXES7IIg2QupxbqAoFxwcnnD3jfs8fvoRk+2JMaAQqGRytk68plksKcuKhObqes9isUDrkqYpGPc7pITaGEgSOw3EmNje7HA20nWWwixJcYZ0kLDeZkVGdhbMHmfY7XaIQVJWhrLUOBd48fyMzWbD/Xt3+KmffYd++KeMY8PyRPL252/xK3/111D+Lo+f/c+cXnXY3uG3Aye3b2FETac7umFLFANCCSqdwSA+OoQMaAMZwWi4dafh4mJgHMacoVdKYpCMuwN0ofBx4uzHP8Ray523JT5EHp0NGVkvFTFIUihyg3B/Q32w4HCxBE5Qdc/ueke7Tdgh59OJmKhRXG73OD9ydPIGQkp8DIw2YqdsQ6jrijt336IoS/ppT2w9x4dL7pyccO8kH9bxd7lz99br84lY5IzD6/0jnNhz+0HBixcF7fUN9x8ccnK74Wq/4zv/1w39TbZ/LA4N7/zcZ/FiS52geznx6R93XL3oskKnjCg94nzi9p03mNyElB5TCPwYubmQ1Esoy9yc8JOlKpdMQ8A6j0yCyES9yECuftiTfKAwNSIl5HyudB6EKSiqEo1GDRbbB8JuhSqOKc0Ku098/uHnSOERU3jK2F/hZCbhOpcb7zlu0lEUeWqmVcL7bB0RMU/8ZaHp91v07HOPAlzwLA43bBZr3Oi52e4p6wIlFcMwZLmsUGgM2/OJF90LLk8jNxdbDg4Ussyot6HvePrJS8aq5XDTcHJ0yJ13H2KHlm994wOG/YBAkUTBZnHINER22xZdWcRRTx12jNeCMCoUeW+KKl/fmDxNXaCEIDpISSNFSd8NIAWmyGoyNwiaqyWrg8M8JQ6RuiooKs/VzQ3daPGdIHURWs16LVnUiv3k8UNDWSeCA9FLDqojWv2Y85sOU264e+ddgnvBclVTLjw+bbm42vLO6j26bscwDMQQqVYF22FE0FGXkdpoTp/tKJYVh7cXlEvFlHb/n2XNT0RxJfScqm0jKUYObx+xOSq5+849yuWCdrB8/OFjzCrN1B+4df8eB8Ux1y9eIJJFN4Zpyh25FCMhJYTWeXydU59eL45JhTmcNJclMipsCKTgEVJQVg2tdwSX5slXHmcaPxC853rruNr3ECPKH/P2e1/m3S8siNOIipGb7gnDZNnfgJoecrToOL/Y43tQ6w399gxjEkq9CqoDJGy3WxIaZ4FgUGjWzV2+9P5nePi5N/gH/9s/5oPf+yGFUJS6xIURLRNZ8JBXfSVVllnMBdcrYpB3bsbdilmioAkoAgU+lrl7VkpCtKRpQkrJarGmKUumyeK8xYZArRQpBnzKBCjV1GATSUSMNtSLJbvAnHkDUmeCTwiBvg0426N2ueC6dXybbugRgNKe3e6G4X/v8Xfz6HgCWtHx35u/w+JvaZpva6SNOVslWSQRIyRSBLw3GcyQIqMfkarE/o9bxp/2bGfyhYC52wutmEBA8W8XrO2S8qBgYKTt25wurzRFVYAU9L/Xov5GltbZvznwXLS8QtxvBZxKgfcR/9t7Dn58yJv1bayy1EcrhDS4MdBue1QzvEYU8zOR8D/AE/UUIZ7N92a+hsNfec6iL9BGsKqOGK3Po/qbnuu3W17+T3a+3szylPwc7W5OKdsim8armnox4bqKFCxCWMoKxp3i/XffQKrEk0+vQGgUK5DjTKESEAS2g5PNEQdvH2XUdZTs+gxWsK5DKRiGLgNhvOGiNdR1SaEsippaHlEF+MG3n1FvIk29wkhJ2w7srwJTzEKmRhWEwbHrBU1ymFueW4cN221LJTYUpSLVE+1wzvpEgwU/GdbNEbvdC0w6QABhcnRDzzu3KtoOelfjuUXvB+p7FdpL/BiBPc4Hut2IKaBqInIcIRkkJVFm/LI6vo90nifP9uxd4J03FpzXx+xiTwgaJUtWJJztGXwEDdWyQLYW6WCKIAtNvdpQCUs3TPRKkAqNu2pZjYmV8KQishOOUpeMvuKgOeDBvWO+9vO3+cYHH/PdR5Z2ACEKfBFBBoRYotWS0iwYGFnWFT//mTscrxvOu1NSXdIcBsbrlu3NJRN7Njo3OyDCuOe7337K+3qFUgX9pefF/33K2bOOrinp445ut6Pfb/lzP/szvDy74keXn3L84CHlqsQXS6ZpottNDN5hzR4pBsJeYp9mWfFiVSALRTt5nnYdB2+9x2K4YpAarSo+84UD1E7wufc+R71UtOGa9Gfeoyom+mGH7Z/wj//h3+X+rds8f/kM6x2JiMZQVRXe5vDTtm25uWlZLAsODk9QwvDJx884O/8RH/7w93n8+Dtc3/yQX/vLX+f73/ouoqgJ3nO5u+b04pK9c9TLChECF0/O+ejDj7l39xBiwo4jUBJjhr/kKZEAL0FlANLp+Qv+3u/8PXSKCJPlvqXR/PRnHvDz75QsF4cMw5ZnLz/i6tLStY6mKTg42PDwzc/yB9/4FqoeqZua5WbFuq6RpuH6dM++67F4DpsHvP9n3uXxvR/z0Q+/z48++ojjww2Ts0AGrviQs6iKopiLgQzlefrJOXfeWnLyxppbtw5ZrRdsty8ZBksYPMTsYYoiB6/KUrA4CDRHkmRgP3jOLibWhzkjabGCso40C4FzZH9qys0NIdRrpYQQghAcCIeUebJUljmPTAg5T5IjRVHgveXw+JiiKpBakohUTUktNcEmxs5yc30BCG6fvMmdu2+zOTzm5ekzJDsWTUldlTlOpZ3ouz3DkDMlk/SU1auAeUeIDikU4+AQjJSFZHPQ0Hfj6yKLCCT5Wm6ZO7Mif+aUJe5KGw6ODji7uGDFEhGgbXtKbThaHnFzseeHFy/Y79Z87ufuc3H+mMszweMfljz/WPCdP/oDHn98RQyB5UEO7T09fUFdFJhSslANwxgIVjFNASEt9UJQVlle71zCTYG+s5zcK9lfCEQseOPhPa4vRy5fbEkiILSg2ztMGfnxd/6Z/WCzafBhADJKPviGRRPptxJVTJhmzE1SU3B8O3vsnHVcPjV4kSFi1VKyPPLsho66qnHB0fY7ulZzdFxwcXZKtag4uL0Ctjy7ekHwkbJY8Mb9d0hpyw+enkPME5zdxciyENSHhmp9QtVU/NmvrTnfn3Pv4R2aTYP78CPWi4HU9tk31RvG7Rq1bPjwu884+/ia9oVDNhpv8+dvFgVFoRAmItJIXcLmaEkSlmdPeqYBpiFbDN566zbd1jKOln4INAuFNBWjDyiTOKoLpC9mOX+WzQkfmSy4STGmiaryrG7nnDlxpWhVz172lF2gCoLaGLTR+JBo6oa27zIERrxS+RhA5yxRkz3I+TnKpMy27SgKTcrjTFKQjH3g3/v3f5N2v+M7f/wdnn7jmxRNQdtuEeRMz+AiZSx4+dHAMFimMVAESRMMqdaICtTSc3N2wTQmpkJzer3nB9//JoqW4GKGqpmEkIrObjk0DbFWWCPwLdx+t6GId5DjmunG8r1vfxdTGoROBCKuC6SkWSxLClMgk6YwFTEJfNgTYk+hGw5Xa269UbPbTpw933E6nFKvoGoCpgyMi8T5E836qMGvJK7OMmIXPJ1LmBKWjUcUOx6+/WVa+0P2/TXXreHBFxc8e3rG0AoEEcfIjz9+xNHJIUlbbq4uSbuK5AWHx4dUjUAbz9EbS3b7lqp0aCVwcsWO8U+ta34iiisS6FqzutuwOVxSLSuEybSYth04e3qOdJHN4QHOjxA9VbNCyRGpNFLmiYWuNJLAK0+t92nG1YrXRtvcVYtzSFqWVoSYR6gpBITQmKpAown7AUGWT5iywbp9Rr1qgzQF0Tn224nnL/e0Y6DSHqUjUW8oRC56wg6urm4QWlMV+YDvYh7zphgJMs16XolIEILPGQ0xIYPkxfMz3Oh4+uSU7dlADC5nMkQFSSNFQMmsr43MxWWSOTRw/vVKKSiKfKmlAJEk3ka0rolJMgyWfhowpnqdXSFFLhiSBFUoDBrXe0SMaDMHEsuEn0bM7OVKMWKnCSnkjGePRBsJMZAzFjI4xLsZTOAFZVmQYmAsJrb/1Uh4N6B/W6D/D4GREaE03X9g2f5bDr803P8va6y1gEBJQW1UlnhEmYEWnwuM/92AEJb4VU86nMuQlBD/oSb9RoKfnduPgPtPHPZ/sehBvboVAbKm9k3B8DcG0puR8NdzYCdHaS7VX9+6r99/8BcfUfz8S/wE7b7npvlDCv19ptHifuMarwP1f1bifsHi/h0Px5Cv2j//LFz/5y32v11Q/VGJ2giu/os92gw4G3ALRzz4F75m/rruP7XI3xKobwvGKRe2+/9oj7vlstl81vB/ePKY8p+WxG9LykVge9NhSihfy4MCalGwXh5yfHDCweESrRa4CE+fPeaHH30PIRN1lRh+w2EbKP6bDZ3dYVRFWRbUTYFQ8Cd/fYdcjihtkEIwtpauHfNP/T2B+ptQFg5VlOzaLdb36CIQo2K/H2kWgsUmElvB6bMJyQgIQioJVhDpQQqUkEjW3LmzRp1bpjEipMJYSQgj+BE/TrQ3lqoQlCo/v9ONpZ7z3pQSkCTKQ+9r7h0dcTg9QrXnJPZELzi4K/Aj2F1g6jTjKGkWGqXA2glZ6JxDkxI+BW6QjFEyqoioDWVdsRwNttszqIDSCa0XdNbRR0UaI4deUx8+ZH2cONgWBNFmIpRNhDQyuWw8CyFm2UsfeHbT0aZEIQxfvXcnd6MXhqOH7/DG+fe4fP6SYNzsXzQ8e3nOwUvJ8bFG6kTSZ9RHBdtOYTuLifALv/gz/NKf/zq/+0++xYvTLYuiBGCylmEamKa8/vq9RwmZfSyjQxtwtuRl2DM5CFXDzjpYLomFItaGxbLgL3ztl3n8/AlPXjxm251z6+TziLSjHzzjuCeGAW0ayjqgQiBFGC340NO2OZQ4+gKpch7T6YvTnFlkO/7oW9/iK195l0VdUuieb37wf+L0ROMXKCkpSolZKU6aEtsJpjGwWAi++MW3aKol1zdXTNOIMoIQDFLMNNYoskQ7OlA5pJ3gOWrWHN4+4O6bD7j35kNOjk5omoaLiyt8GPnST/8Mf/DNP6JuSoLz9G3PBx98iDKKdaEgFUxW4zZrZGkoqwJTlNTrBd11C0ny4OHnKasl5+fXWNvPWVzi/5VnyDwxyqRTGyZUIbi62jHYwPGdisXaM9qJIELOdbOJ9WaR86iEQJWS3TCQUs00BcZhBGF58OAB11d7Qpxo990srQ+v851g9iKI7MfNMSCvdlzmbKXZ5xPy+h+jnKeBBqULhMzy8ojATRFjxGtIRZz9J9c3l1gXudlf4d00y89VDn1VCSUNCYEqNKbQxOhZNgviDHaKISKERknQGkwh58+TPcpizq+KcQLyZ00x++0iiSRkVntUNYtlg7wRDHFCppw/JqWgdy0eS3CW06c/oO08DkuYAppT/tHf/4c8e3zFuN9TakWpDf3kETpnTQVSnsannEmo58P4NA6UZZZcu8nTd5ZinxHz+20k+cRqI0lTxTRe5ClX0hRGM3TZt2RKSbM2gMCOGQilVEDqHIY89RNpUminMvBISqpSI2REKoc0I1KWkCwRQdv2nNxe0zTVnOkmaRaRcegyLXiM7C4CPiRsHNkcN6xuL6lP8nlqUyqMzEHYyzcgTAYpF0xWcvOiQw0Tqqw4fbqDpx1qvEWIiYPVirLUFAvFpGC1cbSHC8bFRMdNnrAtNEUtkEUgAJuDDe0+sttv2d20HJxo1psSOyaCz1LWYXCoxnNrlaWlzsPgelbrAqUV3iW2pxPr1RrhE0mBsBHrfSbc2oBNAlGUrA5K1CDQk6DxFZ9fv4vqNGECITTN6gCFpyhEztIkkISGpLPsN0VG66lXNTFM+T6NEanmDE4lETLnzxWFZn+z59GTR5yenRGBybocL0CeGq/WFeuDitOzHpBoI4nast3vuH3rPW7dv8VmU/Doux/x+PSUMUwsF4aH797l+uUTdhdu9pmCqTV9O9JedCQLslR4e4tLv+POrSM2t49YLR2rRx9jnc2+ZmVYHlQsbwmcTUydY2otWpe0nZ/PkRWjFWyvzygOb4MqWB8f4lwBXDIOEe8lyUO90GA8g4M0CtRKUDR7QlLE4Bn9xNPLj3nrja/yxhuHbLuBfZd49HFLP26zjDpJhl5QmcT1Zc5XJBgWhaSPCmcFQieSSkSRuHP3BD9Z+l3Etwvg/F8+i82vn4jiSkk4ubdhcbCgWtYgM83l5rple3bD5YsrCp0P4SLGnHXlAlVtULoieU8kIrVEBgeCmUaU4DXu4RVJL742+gmy5SomTxJZv8y8gJaLBWFKSO+RZB6+SxqJpjAlpipwSjMNlt1Ni3WOVSMwyxIjQVpF6GHaD7jBU69zIGwgm2M1giDC/P/lUEERBVLkRT960Ai6oWN8NnL6/AIRsuk3ioCLCZHEnHKffzyRQKpMStRG5cIx5NBcrfP7FEEkRfATqlJEMoY1BIcUmqTjLKdUpCQxlUYEiAT0lOlyWuVOYEyJ5DMt0P+cBysQ38qFVkyvCtk0A0vmzARmBzRkGYNQBAROevpfsSBg/eMVy9/NCGbnJcO/u8d9JWBV4uBv3eP0xRngUDJS6kRI+ZAhBHCSCH8tG3f/pVcE9S0DMhJ+Ov97/AsR9/uO+L38mYQQ+f55L+J/zeP/qgMgfe1fUdD8C6+X7+wQH4P6VFItlkhpiERCdFBEwq96wjcV7hc9/uvhn38GPhHo7ymIkelXHOP/OhFXCfFLkmkIjL9gYQXiWmB+x1BUAefBv5WIX8rucPcXI/4fRcK3IlFawl+C8VcHxB7Mp9k4G8rAxee3aF9Qf79B/32BnVxuIkhNLDL2VpqclzX0nrIMmDLSNCvKYleyepkAACAASURBVDEX9Rr3yy3+1zxhL7OpVCSMrBFSYdcTV3/egWrodi0woKSh7Qb4NxPUIBuJiiUp5kLZBUvoLGUwxCAJzmGKvABKVTBsJ5QJSC0IKWJUke8nFRFaoHSNC5pFVaKlZww9sY3QQQwBZ1+FQ+cQTW8T3kdimbt6wuQptQiKfnTsUwKTMCbgvEUuKpqlIUiF6gVOikxKVFlWZZ0nqVxUSZlpoL1IjEJiNhUIhQiSQmh6En30GaKjChwBKxQpJi56y4+eT1y3IGUxSz/CbKqHGCZSSrgYMa7BysDlbqCNkYMSaqlo7YrBG6xwvPvubXbnF4ToCSmRQuLm/JqhP2RaOkwKmCPN0h6zvhIZfUvi8P5t3v3i5/n0yTWWZ5hSE0RApkxwdUGhlMJ3EYwkugQ2Yl2GklgfcQlQhjG6HLFhJL6UmHrB5999hxenj+h2N2yvLtiU9xjCSDd4RhcI1qH6npAsiZyTpE3BOPZ4C2luqGglcJMneU/GMniGfscXPvd11ouSfr/j/PISdbLEihERBCmA1iWb1YLOj8hxolwW3D66xe6mx2hNVZaEBEGkHGg9Z/llP2tAkiW4aIGuNWVVslisOT6+x8MvfDl7eYuGvtvj/cTD997n+PaGm+stl2cXKFMytJekrsPHiCkV9+49pGyWdNuBojAcHd2ikIaqWXF4dESz2PD400d854NvUlUGqfPPImb/YKYczt4lGQCNnTze9Shxg7cZkW2MwRiFUpF6WRK3LvstlYFYELzCTxNDPyF1Yhg8RZlDTLvOZQqghBRzc+uVp5lZJcAsC3y19r8GVaQwS9ZVBkT4iJwl5TElQpzpt0kSk8gm87mIRwRimhjtDf6mY7JdBib5mXKXEqaokFpSlIaiMtjJYlRJFBGSwAuLUCqT9JQASQ4ChnkKlwvVEB0kXgeoSiFplkt8Shk+YDQ2uAzgmmFZepbb++hQOu9zdrqkfw6iMGghKc3E9vqc7c0pWmiMNshZe+JtRKSI0gkxB7RKFVFaoHQGDjhL9pCLHHniXFYZCAwxSS5f7nPWYczfK9rsUSFJlPFZdhZlzntE5tJXRJTO9wrJk3ym/cWQ0JIcS2IkQmh0lbM2RYIUA31rsZtMWgwziGO9FtxcWWRMOBsJ3jFZg8ezWOWW7+g6kArdROpaUpaC7myCytDvR4bW0V10FDiW1YKxc3grOKgPcTaglSYmhRsFXe9ZbjQGQ6EVSgJSUdYVB8eGqhJ025w75mxk6D1dlyea2hi8iHODVNJ2E4tDRbMuqBqD8x41FkiT0d0GwW7bE2UgyUhSZEy6FcgYZyJyou9GRK3QKlEXisbXHCyPOZ+u5kaBohSG5EKWBaYRFzLUSwqFUIkUUwZ1VJJISZgcyXlkyA1ez6xamcOXH33yKU+ePmK33c5N69xyiSlnjS7WDYe3Ky73WcZIkiRtcPsBZRo2t+7x1sPbdOdbnp2f4oNDLUre+sx7HG5qvrv7Mck6itLkgUDwdDctfvKouiAtDhj3nnC0QpcbtJlYHm/YXl3gp4iIiqoyPPzMXewE5892PLl+yThG7t57i1VVYoeJTx69pNt1HFjPYtlQHxT4qeL6esfYeryPaANFOT8XEcYhIpRDpxGZ6qxSEoI2dJxeX2TYmld4m+9ZHz1KC0iGNDUcHB/TtXuC9SSfvWxCJKKDMCkwgrqEqtC0k8e6yGT/GZ37X/X6iSiuTCF5+Nl7DEFwfrXn6mKLGSNXL87pdh0hJBarAnt9jRL5sL7vHfXdA4qiITmHtR1BRcoYSJJMEZyT1EVG68CcUB1j7kZliIUCHBCyCCB6psmyuHsLP1hS1yNjRMuErmqEkyBy3gjGUKRIchbbRvpYsKyOiO4Uv41M14l2u2VxGNHKE2LMfh+TUKFASEmcp04yKnCJotKYQuasDSFQjWYaXO5eWk+90NjgscGhU+6URfLGI2Xu7hljKEyB845h8EBOcQ82ElSEpcHrSLmKBOHx3mF6w/9D3ZsGb5ae5X2/Zznru/733rtnpmekWTRCaCRhRiJgVAQwUCFQJnFVCGAnhiSOg504Acpx7NhJxVUOThFX2bGj2BjbYLykYhYTNglJaEfraBbN9DLd0/3f3/1sz5YPz9s9M9KMsnwip6qru999Oe85z33f1/W7mrpCGIcgpZcnlJsZYk9RNQ6zCmSlQgWJrMXa2BhQ6y5B9eM1opIUP1MAYLp44pFKIYXEOodYhMgZUbFQcUOHJxK1wjC5vz9sn99h54ltTts50+MV6ApwKJ0xPvsIt9sGn1TRuLhp6ZyOxVX+VQbDuQAdIPpFCT9rSf5CH66B+4bl/ZvZ3OHLezAQCQjM+w3m/eZrd1YPYiYIwxB3qa/a1Ieh+OuKvcFZHn3sXRRZzv4rd/nEsx+lfX5K/ZfbN/wNFB9KGP03OV3Wcfx0g9MN9TsbzF8K6EdK7EdqGATkdUn/x0vGuw3zJVT/tqP5KUsYrV+eldggYNNx8t8uQcLgX2UMf7GgSHNaag7/uyXt+1rMFUPv1zbJtAEMxrTUladfbtC5lul0zslxjXvxFVSqOXN2h7peghTIzZLZXz3F7zjUhyxhc8XQ56RhSBNqppunLH76hCd/5Bs4ufkyeE+W91neOIAPGyhApIJ0O4FJ/D0lOkFKxWraramUgeAkpknIC0FTNchMIRKJ9JJCxkw8Ly1SgUwU128tubx9lrKwLKdTVosZuSjphMQKTTESuCpOqUSqECqlLTT1pKEnY6fWS8mqPuH5gxmJX6JVQFYKdb6PdAnCCbIkoAtDaxO8iOhaGSRN01GLlqIfKHJJawMm1ZzZ62EmlsUrHVY6SApWLsZEBAJojU6i9PRkseQ3PvxlRDsnT0DikIkny3PqDuTaP+c7A7UlJJGm2q1aqqbhxvGULNng8GTJ3ZNj3vvuDQ5u9Tk68tRNNMtXx6dk4irBGFoc6YWLDPobpEtD5+Nld9uWrLfDAw89jA2K/dkUOk9PF9S6h7Xxdt3KwSAiszWSWT0j60E/ybBOMK1XMezdQystSSKQbkguFgwSwyjx1K7h7nMf50gWyCwicdva0dpAUE0kBYqEsjeMKwPjAIvSkVZaLTs2d0YUecJyVjEajjm3e56dYR+/MWenFCzKnGk3ZzGvqKYGxQb9tk+eO0okdZWR6TF3bu/jnKEsSk5nK5AdiVY4D8b4+LwiNsOiasyzDBXuVDNbvsBsFnjyXd/CraMb9LaGnEyO+fTHP8W73vM0F66e5ZX9A7bOX+DRhy7yL3/hAzz7hQlSGXbO7PL0U09za3rIvj9ABUeS9Hjg6jajnXP0ih693pj3vPt9fP4zn1xLvuPkyq0pW2Ht01BBkkhN1yakEoTwLE5fYXKkufTwFdIiw0mLTzxOe1prwAryTJPLjK62tMuGpmnQueZzn3meyw/uQYCmikAgpST4CMCQIoBw63PseoJFlAxGoJKOhbjr4oFqDToy1sYswuDw1mN8R9s15MWApq1pVg1t1TIcDui6JeONHmmesqhm1M0SnKStO0xrkakkL3tkeUaWJySJpJYSZz1FmpNpDa4lSIXHRpWI9dx/Se7eFC5eHwJrOIdEas327h5N29A5Q2c6jienKK3RSqITgUrBrlzM60wkQds47WojZMUGTZH3uPzQBZbthHYR5YAWiU5gMTUkWlD0NEUhsYkhpB6EweEpegnLRU0gFo+bWwWNtXiTMBpH387dm3dwVjHYlEjANIa6cZRlRloEnLPUtaUoBWmaRwWBBJ1oItKbOAlVkqo21I2gaxxZKUmyjDS3uGq9bPSOpra8cmuCUpokMQwGmvFGSbWcrTH2MWy5qwNt61gVhlm6oG2X9MdX8KJFZpGgd+O5KYPLgtnhgva0QdUeORb4VJLplNwrRLKkqwXzqqI7tpgq4s2l3WUxicAmnUlElpFnPc6cGzAeS268eMxkMuHoeELbtAgSZqeSwUjS1A5jPFIGrLDofoHqBCGxFAPo90Yc7dfkPcHmjmZQCer5Ctd5ZBARuJamSOORLsFbOJ0umNaBvTMpRakIKuMoX3JYzfEppF4iVw0oQZaVKB8InaGtICkUQdQg4rTHy4a8HBNSi28Nrm7xwmPcmhwqBFmS8uwzX6KqVvjgGfRKgvd4aQhBoZKEfFAw3s3oTxO885iVQJV96BRCaNJixOaZh+iVL8Y5w3rNO9p8mIcfuMrzX7iN1JLR5pBq0YAI1PWCul0gG8nGeAOqAtENCLaH1Z7B3i5N24KrED5O0K4++CRlb8SLvevcfvGAxbzih/7db2WQS16+fp3nX7oGZpM8SRmUmkxnuCRncnhAs4jZk3oUEDpG4GA1XWNZVQtGZUfRS1EqiwwGrXn+xgtQlSiRkeSK0VCyXEafoQgZQm1y6eyD3N1/lulkQWU8K+GQmUP6lMRn5EGzvae5c3BEXXs6GyWgX2/7Q1FcOQHPPHuTet5SLyqqesYgL7DNilxD0S/pfM32eBchBdYH2gDzxQq1ssiG6KlwhkppQqcRwqPTZg3LCHGkHVf2SJnFzpSLmSSWgFZp9JYYy/L4ANHTaJ1gVAz7q49PQVg2egOUd9iqQecleMhEIBEWGQKru6cUeYESjsHYc2bvCgeTu+g0o8wco9GSV+ZHiIL7HSCtFEFGg6xpPNZZ6sYw6Of4zqO8Ii/SmFAdm+loHD44nA9IFWV3UmoSnbOYzUjTNBKFhKIz8WTgvaf7Vk/1SysAGhYAyOck6XdnDDcl1oBpPcfHNeeuOq7/rxPs7npqs9ae7P3QNumXYvCsyvqEtqYVHe33dZjvfYOCZL1l350gnskQa83i5EMnr9HhcV/H+MKPvMQLP/LS19y/qlZ89KO/w+KFJWEcaIDFd0OcUi1e1UGut8G/P8A+Zal/prp/WfM/LPnqrfvJ9WvuwMtIn3uzTUwF/bePWX1ohn/wa6dZ9odh8c2GxWPP8+DOw5TnLyCylE6/+ecCsDsqeOjpMb/5D2/GKdTPrV/DLMY0VWJtBQhgjWBxmuOtIP0Fh/5Ix+Ij8Yfug6f55o7V//yqFnjxJ1sWf/KNDwQRr2JAgteSWhtOmgOGvW2CBFd3FDqjMzX7d65HCMp5wf7v37z/ebtvMaw+ecL43ZcRqkWuOtQqgIQv/PznXvNs0/h937vfNzuWH5uz99SQLAyQIsNZWJoaoT1dF+hcDOkcbwuKsUIoAWi6LuFoMkdpGI9H9MsRq7rDG3jh5rM43wICkUqE6BgUCpFkdLRMTY3rYnZPIjNEp8ilI0GCdXRNBWnC5LRFJxlpUqK0ZONUcjBZUC86fAPDpE+ZJ3SmQkpBlmYkA1g1sbMqgiSYio2NMd3CxanGY31Wd+fcnR6h0hShDMYsyMImiTbgO8ZFn3c+8RhtOsNNA229omqnVM2SG/sTuvUkZdwrWeqGTMNmLlBp4MZsyW987ibnd2r2xht845PbvOVdW4xKySc/eosXntvncP+QZWUp8jFXHr5M3tfcfOEGF7J38Fuf/de4QnDhyasMSs+iqtBpwqAomByeMltJ/LBDK0PiLa6qsJkk+IR+oemNwJ/0+KPf8QDLowW3Xppz+pzj8hM7vHztiPm8wXaOt2yD1Gd4+MITbPaHHF45y29/5PNksqNzAhUU27nHe8PCadI8ISsz8rxH7Vp2dckwzykHkpOja3zDU2/l8UceYms4YnI043/757/Gc198DvfgJXa2BpzbvUCVNvSqPjO9YJWfULiMjbKlGFxBKk0zu82HP3eNtz5ykReev8WXvnQDk8HeliPXJXUjWbYClWYkukIFotpAabrKAA0iOWWZ3eDZT36azbc8yPzkiGtfuc5nfv/DTE5P+aPf9T0xSFVojvZXfPzDz5D3OjYHm2xtnOMb3vN+Pv+P/haZ82yMeuxtjcm3C1Ta42AyZTGd0AXJu979Xr7wzB9guzaGs1tPnkTvrLWexhqyLEMnr0qglUix3nH4yiGjjQ16owGn3Yy8PiWRA3SaY7sOGVZce+lWVBsEj+9aqtoxOe3ixHcdSi9EuibnRURyWIfY3z+oCwUyoET0CXkvsEasfU0WQiDROjbsbIQ8IR2Xzp2nqR3H8ynBOLTymLZhY+Msvf5OnGZrjzWBalHTtXHyNCgynFshREJTW+YzT6+foVMZTfDWoFSOklCviLJSz/0m7D36Ymy4rv1jRCptvei4c+s2XhmC8AglKIsSJXMaF6fKWmsGfQs2vl5nPUFJkp5iMCzplpa2XnL91ilPvuu9vHzjZZaLJVIIxolAqykqj1lCtAaNQKqUrnV0nYUsR4qOvCiQOqVpoFou2Ly0Q9taqqWJuPwk0C3XVMPgyXIQSUvbpGiZMeinWD+NVDc0UmY0NiEES9Wsp2IKlI9eGdsabHsPra8oitgE8gHSIKhWDYP+JhBl0qtFhvMapXvIRBCSFllUpE3CaFCy0RtTjD2vHN+hyBKm+w3VfIkMmjufO2K4kXPuUp/RdjznLU8lvtUkMiVLU4QRiNSQBo/BMj1Zcfv6HVTSUg41w80hrTfYumO5mFI1noOjlnp6SFkq0lzQNC3BKqqTVcy7cgHnUgbnRwgpOLyxoK1qxhcyVmZCnwKzEky6huHVEek1Q/VyfIx0s4/wS0yaYINH4BmQslgtObhlWfRrDsYVhT3kwpkt0ipDnMQpsB4loFYEGVAip8AyHPRpWo0LHVJbkgJsXpEMUkSjsfsdol6D1wLIADrUTFYmqoyCJfgl48EIs86vykuNV5qPf+Q6b33sUR5+YIPgHPt3n2GVbrC1s0VZ5DSzOc9/9ks468A5pi/f5pO//kGe/DfeirUtg/4GW1sXaOrrWNmilUShUF2Gny0IxiMT8EnCssnobVxib+sSEkeaay4/8gg669HvK7bPLThzZYeTlx3vfcd38Hsf+T/5zB88S5oK3vdHnkZfiHLmbpVxdvNxRiN47oWPM5u3KD3C65quE2RlwUArcj+gNTWN6xEWEj816KRjXI7IxjpKu33HslFsjzeZThrq2rIxbnjui19C9VtUpimI9pa3PPIAxje01uJEwtaDO9w5OQGXo1yC4P8HQAtnAq6dkyWOdChJ5Q7NqsH6FO8ctnOcv3KJs5fOsVqtmMxmdKuaZjWHaokyHWqd9yNZy+NEQFhBmqmIK3UeY+KOH0NnBV6uQ2dDiCNA/BqHLaBtcTL6ooQSBBfwJnBSzSn6BYPNHsFBmfdJgiJRKmrKbTQEplrRL0pIJBujMzgR8K6KniTkOmAxSsOkFEiV0BhHa1pcEIzHGyzmK9IkIeoTAsHFYgrvCD4aa+8XFCJKIKMMUKwnARlKa9pmgg/Q/LSn+3cCooW9Pz6isZr6T9SYpxqQUNWOIh0gM083qrj5CyvK/zinbwYkSUorPPu/+ArHf3OCaO+tk2fgA+6sf12BBLD7gQ3Sfc3tn4661O7vGkxrIYsd3+G/tUGYecz7O5q/WL96xzchXPornuXvr+LUSHyd27ZQvL+g/pkK99Tr5XdvePvXXKaFpHvjpweip6GpmzV6/o0fS2hJvtVHlRm+a9DOsjve5ECs3vRxtU7ZnG/wnT8U+J2/d5tutC7cBlB9qMZfWo/4H3esPjKnkvfJsbHavvdPGYEvr3ufAsSXNdmPjpDKRhT997fYn2xYfeSE3g+kyLlEaoHQGoi+DCULkiyjWjjOnhvireb026ec/OAhZ7/7QQ7+0U38lotTQWA+bSlST56mDEZDpuIUgO/+le9h+MU+d+/cZNqccOMrp6z+/AL7vS0IgdYb1JVEJYasL7iwucX+3SPyTJMkkkR77h6dkKWj+/hhKT2bO32apkMnAZ158rzP/PZdTGPQScpg1GNze8ylB85weHKHw+MDfB0otMA60Cp20rwXYHq4bh1GToEQgaLIAUuWSXb29qjtlL3LfaQQuNYyvVtxeqxJ8kCWK3yaIdGUmQdpcc7gnGF6NKdpPds7KeOLZ7n8ll16hwk3Xj7EdIHxYIuuCmxvnuObvukprj54hdODE5568ru4futlJqe3aKu7pInlF3/9Y7Q1dF3AS0NZ9lmuOrbHgstnSp58THDyMmwMFHkKojZ88VM1vVGP/HyfB/oXeVvxKC9de5Fnn/88B8f7FOWIadWw5wy9O7cReUsYrpiffTtSlKwqw6SqWGmP6FnmK1gZQKaMN0s0JywPWxaziiqF0c4GX7k+IU16pGc2udQ7Ad1QFn3S3oi8X9AIzbw9JBkqcrlBKg0PPSZ58egm9dExuRA8cm6PV0738VLTtIFuIRjnA9zpPu/9N9/L3tY2R4en/NMvv8i3fd/TvHTzNn9w+CzDPOGRS2NWs0NWs202yi0m1ZxDP2flAl4o0uQs2bDj2st36W5XSF2SFx0m65jjGZ7p82hyJmbloUjtChtgvNVjdWSYdhojZPTuaokWgnpec/6Ryzz59NM88O63kugNJse3kFScKRPkao4qC9L+ENM13D095Ad/4Hv59Cc/zGrVcOf2K3z4k7+L3OzRKy6jVMFs4Vgtpwy3C9rpBFdPKYYJT3/Ht3LjlReYHK0QZi161wFro2Qoem/dfVrra/9umhpzaJjNp2QbBVnWI8iUznq8NRRZyenJLIaiInAmQiC89ZgQMNYSFff+1QMM4f5f3of1MSEWLWW/j9Yaa6LcjyCxzkbpmhAEYeicQumUnb09vvO7votf+qe/RJJppMyxTkWQhHLM5qcY07JYnSBF9FRleZStJYkgyYqItDYNTWNimL336wBWiQuSpp7HQnBtDEuSmJ0GrCXxUb0gtMMZj7MiYqd7JYYmhv0GT9dZtI5yPqUEWggWs5rgLP2yoFf0MOsgduFUDHDHcPTyPtvj8zR1zNry1lCLlEZISpmTZBm6FPi2oWmXuNSjE0GWOVJX0DQtnWkwrWQw2CFNU2azBcvFYk1gsyDWDR6RY40nyzvazmBCIARHVsZ9oTMxVPpevqSSEaHvvYtN3xDjIaSMEmBjOpqmQaqwDlWODdzlckmWpSRJwWxuyMsc66t1HF8W5YjCcufOPkdHU/YubfP2951h/84KkzqGw5Ju2bC7t0lWeFQaIhHRF5w5W2AaT7PqmExmtNOEUd5DJXGxHIqO1QLOXR4x3E7wScO1Z+9CaDk6ABUydnoXmbobJJmi6xRdq3CdwSiBkyAyTaILfJhTyJJ8I6PJFdOTmvG5IZeujgjSsKiW0KY0sxDz00LANqvoKzdVnGgnEmssWSFBdLT1KfvVKYlOOTm4fT/rTeiMqlrSL3vRwyg0ZdGn7PWRicA6iQ8W08Aw7dMf9RG9wMKecDifIbKUYGOUQAgBmcTc0+Ah+ARTBbyF3bPnuHj1Kg+8/e0s5lPSVCACeGvp7/QRckiqN0mznKpa8Oj7rtB7WbA4acnTnCuPCD71q7/DqmnoOKK+OcUtanpJibSa4XiDnSsXGF0c89ClJ8mzHkIEjKsj4t6L6LlOEpyyaLnOR0syBr2SY3+Hn/2bf5XjyRFeBt77/u9ADIbs7zf0+1vs7p1B53OWM8/uhQfJJhNOT2dsD66wrCaUZY7zisObK0ZbJVoW5OWQIusj8DTNIct5RRANac8gE8fBScre1mUefWyX7V3Jxz7+aaxL6EykYu9ePINIIBUJwjsaO+X5Z1ZcurjNNPXcsS0nB19nocgfkuJKKUmiC0xtMK3Ddm0MrI1BTZi6YTmZ0myOmE9nzE4mdMaS6YDBRfZ9CDFsLbh1mKLAGoVOQOkYchjc2p+EI6xlglJDIgTWuPvFlRBgq4ZskCHWGU7CR9+VUJIgJIuFpT/QFGWKDrF6l2iEbyjyLHbktMZ4h5cyyh6cwtuMrrXR9BtipyyEmH0kpYpjTqUjrW/Qx3RdDGzzkaR4b6jy6sny9XCFzkQ5nljTkWzXgXA0Pxkw3+kRNeR/TVHcloRO0k0F3W6g+S9bxF+PQI0izRjsjLl16S5b0wx9pEBIdBblT+68I/v9nPS5lMWPvXn1Xr2tob3yqi6196Ee9rKleV+szNrvb6AJuIffwB/1RlsG/qqn+BsF3fd2uLe4N75dgOR6Av9KgDbYN5L3vdGmoP2vWvyFgPq4Rn1O0f34V018SrB/qYOtN59uQfyBlv0yInQfsax+cAbAA//7OaYPL5k8/vrP7eixFV/4kbv4qsVlr3lsBf7qa/6fg384LmqKf5zjtz3td7xaDnYrCcuvrSBF60ludTT/hSEk4J+wkIB/yNH+eUPyywn6WYn2IY7LffQ9BR/Qqcb6BiVLxNjjzhm6Z+qY6PjaLcDyx2YktzYZ3Bhz4W9d5JX/4DbXHnqJrExZzKfUtqJ9Z41/KH7nIQ3M/vyU5G/34VgQXMLm5jbW1xRFgVQS6zqEm+M6jWk9xhqca0l0HxkCbe2ZuCUCy9aZnNlpinUeKwxJGTg+nTKddHhTsDPe4HR2k6ozBJfgQoKxHQINcp1FJqPJfjBMaLvoo1hWNV4CxiG1IyQWPZaMxRjhVtEP4uJv2HSBbJCTZiVBKeaTip3tMxRpwXKxJN0ZMh7u0StXeGcRTvLWqw8yX80Y9Da4+uDb2Hv3kDOji5SjEZNJSj0LHB2egJMkUiG8i96D1JMqRbUMzCeSh85fZPNSs/ZENXRVx2q1pJ5FbHRnAmXWY2N7A93XGNEhbE1ZJCwO9+ltejpnmRzPkL0JtmtjRp/O0FlC0y0ZJD1mS8diuSLd7bOzMaI9PMYYCF5gWzg8rumVCWmqyMqCurZIGcNZ28ZhzZBkHdI7yFPY2Kbdhel0wvmrW0g8xydTFnWHFWA7IAS61Qm2iRkpzaAlYMidYnHnBt3xMe3JCRNpeejxtzDa2KEYZdRUTJxlvz6lwZHogp4aYpc1SzQrU2PrFdmyoVU1VVcjk4StnT3y/gY3rl2j7iS9fsn587sctYfMjppYPKynGISWRAcm0xNeeOkrPPjUN4CqaGg4e/kiW9/5x0iygkw5TD1ntVpx6PMTxwAAIABJREFUfHLE1tlL9DZ3sOYO9eyAT37wN7n4trdwcHpEEzKGZ/v0xz3aribLi4gytw1ndy/ynvd8C1/6/Ge4deMl0izDy3XArYwd7ThdEmupc9zkuhESvMM0LX7i6WdjlE4QaQwsRQmMtYTOYoidcYSks9HzbK1fgyxCBEUJ8bpG371zU0BQFDlpGpHQIUS/dJwSRaWGUgIRUyEQKmFVGT7xic/StSYu4KWK00EfCYISRaDFB0OWxYJJqYiCtw6USwjOE9bHMQI4axBK3A8FlTIGt9+bsFnjcNajkwSlo5fJOYdOJMY4bIiBvDJVqJAiRJzcKBm9aVrFz8FZFzMeiX4PoRWp0CidY51HJpKkp7Fty51XriNTyNIUKwWtcaRlgVCaICRJnkUI1rChWsVYjNY4ZPD3fcxSSAhEqmXwMcTZeaSK7z9O4hxpmqC0Jc/FelrXkWaaTkCmFMqD6e6BmsQ6myxGmyB0JGTeyxeSEaQT7vucA2BBRGmyNYb+IEcmERpmjaWrOpAFUhmCDgTRUVdL9m8tQErSXIFT2Fown9Rs6pI0TbAWppMFg35K0ldoncQ1zRLqrqFrOqqqjsIVb1lMl1HGaFrqqQTlUUGS6oRepsnOjJjM5nhn0EIiU0nRS6ibuA8ONiEveuRZQrqhKJ1jdbOmnghwKaNRyaifc3KwZDFdQt5jPMzZGeW03lMfLlmtDHVrY8aUj5P3EAQiCJzxLEyN1DKe14PHS03b+DVETCG1x3QdwQfyvCAvEk5npzinyLMzDPp9xsk2J9c/ExsZIpKaffAIH8OTHQIrYnZqoiVgaOoZR8e3CSrBtJI0ydFpD52oGO1hogxfyZTR7mW2kWzueYosY2ezT1bcYagHmK6lXq6wtWFjZ48L5y5gQ+Do6Ajd32L0xC5SeoxtEbIPwjJbnqC9p9Q5/WIYp5pKkqUbjPob9HrH7B/epDYtm3tnefDhR9k7t4M5iuXJcjXl1sHzlPkm2zuPMBytSPOXqeY1baUYDcYUeUqipkiRk6U5RZaT5yld29LrDVgtWtoOnJJUC4dMB7RYjqanHM8CRXmOZlbj2hneGZbTJSaJ1OisyBj191iII6yztMbTuYAXxZus/uL2h6K4EhLwGlM76mVL26zAGRQS4T2mM8yOTijyjMVixXK2REjBxpbEZBoTolldBBFhCUKsDbEKZx1JGjssToQ1sCBE74wUSKlRUmBNpMHFvMiArVvKfsCraFgOnaMsS/JeStU6pktDUSi0VmgpIpDCxvT6fq8XEfBCYILFOUUQsWsXfBYJLtKvw3jBuwiHkIkiSVJUktI0lrIscF2HcY7gfJzDc88zLNbv5R6IIX6Wzju0ioG0xnbrEyyYH3WEFtLfkPQ+kKAvSnIl6E4l7TF0f6pj/LcHqEpS9jLGgz6r6yvkk4L2uI0I3bUcACD7bEb5r8s3La6yL6fUD7S4bQ8exi/26f3zHt0TFnFOUj9U0/5o/Yb3/bpbgPQDGfYJ++bFFfEEmv7LjHAm/D8vriSYdzpCPyA/r9D/JH19cXUK3BSE/8S96XQNIGQB9w7D8tEZgRWnD52y/P74OQ2v9Wk2v3Y2NnmoYvJQ9TWXv+7l3ZGIpcQ9EgsT/bzCVa9/Ia6T0L3+suJUk88U5ptqzE9YyF9zpYDuhy1hIhC1JrsjIyjCJxgb8J0h0RlbOxucbiwxqUE9r1i9bYpPXiOLVBCeMtT/3pLRb4/oP9Nj8E9S7vzoK9wd3yHN8xhEXRnaqxXh3rQtgcWPztj45RS/n2DW/oThRo+8l4EC6wNlUjI70ixclAMRakwTkELSNY6makkzycWLu0DHfF7Tmoq2W7G/f0pTBVKdUW4NWWlNKyzO3pMpCRINyHB/4QeCvEhwwVE1HZPFgnKQYeoGlAXpSIuSzTzHzFu6xmGDQ4R1cUWCSlKUDTjbMCrH6FQjnWdjY5tZJRkNh6QJ5GVC/9t7iE9Y0qSgV27yzqfehVl6TuoFZW+O2VwyW1SYFrI0QStN18YFcpql1K3ldOqppj12tlPqqsbULabt8CZQh4TgHN4LqqomyXKSvsYDHS2FFCzdjI2LPbqFYrpw6PkJwVlSJSlSTZ4oJp0l30gRDtq6pTMZI5cjg0R5hRaKbhlwSSCYhjxLyHoZwgjy3FMvW7q2JRGOVCYQoNQpeVLQbThmm3vsXNxi2Vb8zvVXSGTGUEsGiULphO2BYl6mHB6dkucpo37Kle1tRL1gmEnkIGGxmLF3/iLjrQ2EFKy6BRNfM+3m6EIjtaB2gclkBSrHJh3Ot7GpR4fFUSYFRVmSl2dYLZ+lquPidzjYZZZOgJj5pghIqXDOkCYpk9MjVl+2XH3uCcrNMd42bJ07x9mH306iJPuLCccnU+Ynx5wcHyP7mySDMdnsmGZ6wrVnPs+VtzxMdTrBqYzsEmRlwcq09IseMpRURx2JKHns0W9kejrl9q1bSCmRArwK60zHEEl3YR1Uvw50lwBS3C+w2oWl6fUphkSIDZq2MQTrMKaL0yup8ELQGrv2Qfr7CHZEjKX34Z4C5NVjj5KaoigjDGRN7I0To3AvdeL+OTcASiUxo+yzX0Bpi079GtCUIpxHq4Q8y1Eami6PyPcgkCJBoLHOYTtBZMMFlJRwT82Cjc/rRLw83PucwBgXzxdZFgsUYjCy1CpCC+Ra6iQCQqXodbyI1ilt0yGJpFzvI3xCiAjKCAKUFCRKI7whJJKQCpIkMFvcpT8ao5MMpRLwJtJ6if44ncTjUTnQeC+p5tGz7WxcuBMUSmvA0dQ1hOi3dtag07X31gYCLiLuZYJO4jQxFk0RBiOlIpEx40isqZsRYQGdixM57wVhLZ/UKk4dg1OxUpcRUCIQWOMJGPb6mzS2XnvaBeBwNqM/1JDH/ST4jtvXF+xdHJIXihAU3gkmxw15XiJRWG+oljVt06JUirOCNElJUke1aFkuK5ply6AsaULL9KTFWodpDCrJIVFIJVCJoN9LufLWt/OJT36W1XwKIZBmkt4oxTuBF5YktZRZD5GAKDWJkgyXOZNbltmBoZdmjIcDTqYT2mpFliaossfepTGd9sxJCAcLuq5a74HxvSohkCoW7NZ7rF2v+QhondPVIRY2CvCGVYgTP6UStCpiEatzsnyX0WiPpLdFnnyJxkY4DSJGBkgvkevfeaReQpJrTKg4mb3C/NqS3d1LbAx2EErgpMBaSVJkMadYSoRUKDmgGG3RK3sMiwGlKHng8Rmz6ZKTg0NmpxMGZ7bo9UY89OSjLBYL7nzyM5zcrqhXlqwvCVoRbIKQFhM6vBdk3sXYo5DgScjKDXbPXaarO27duIZKFGWvz3CwxebOGWbNgtn8iJPJEbcOXuLcXo+trR79LKduFkxPpzirIeQoqdGpRZCilSZgaNo5xjT0yhSdSKRLsCahXQUG2ylVM2M2P2I5S3ns0ccRq2MILcFUVKsKowNZ2kOrnGSwSa9c0SwFdQOdEQj1KifgjbY/FMWVbQ3TwxO6ZUu3agjeMShHmLbBe0+WKaqm5taL10hkSio0eE9qAhvjEYv5kqNqglI5wZr7B7bgYyp7OUhQGrQX69wrovxPKbTWdF2HlmpNSAs4Y/FtR7OqcGmCA4xzlD3J9tmMZd3R0dBUC+rlBmUvQ2gR8dRZDyElzoeIeBexeySkjx6MkJLqFJ16uq7DWYdcI//atqHpHMjYHXPWYtuWYB3BuVhcvUZ6J0T8QQQdCBKEjEnmIbg1nUWi0LGDxxL9s4HslxXlbg/RyxkPNPr3AuHLhtk/aLh46UFkY0mVoh9KLv/s2/mNv/IJ6o03EMolHnIXz4qWe7yQ+9v5P7vHyZ+aMvuhBdJI3vNX3srLx6eI39PsHu7wzN95bv3lr+/wRntiILJG1p3Ne/83mK9FmH/VJtJ4Qvh6RdDXbAayHyhof7nGB4f1XzVR+4hA/JQmfNZA8nUe+1zA/lrNb/B/fM1Vn/9zL7z58wcQFsI6jgh4HTSj+MWS5NMJ0388AWDxl79WZlgOor/h9DWXPfihEbvPlvzuv7j1pk9t/qxBfyPk/9mQ/mCDk6OKzrQ442mN5U/88H/Iz33L32P+m0tGPz3m+ONfhSAtA4tfOQZA/54gKwRexgLqnf/sPVy6dhmVKa59+Tq//4EP0m6/fiIoxZrWVDu+8OznuXT1DEeHS5J+y8aeZVjk1HOB8ArpFEWZsJh19AZxAYXMuHrlQXqkhMEUupbDA8fdl04xusWj6XzNjTtLBqM+ukvxzuJdTb83wPs5zkuczcDm+LSh6xSu83jb4ZKOkEjwBteAt4qkL9HjE7q2RjpJSQ/rIFEJzcpQNR1tV6NFwv6dO1w4f57HHnuC973v/Xzl53+R7e1ttncvc+6JDf7mn/sAf+Z/+Y/YbrY4Oj3ihRcPSPoFN24d0Usl21uX6W0vqZYNu7u7JFmC6QJVPcfLlv7A0yWST33ueR55cI/N7RSvAl4Fhj1JcWWTbVcyXzkOjiccnp6QTjTBK4wJNFXNoCgZXr2Cm9dUL50gT05JdYlOLJle0pcVtBnKBcoipTco6azkpef2wTnyVJOohKpWZHmfbrnEzBv8ckw5GDM408IRtI1jb2eEDRk25AgfPa+jLcEfObOFrZdcu/4SqV3xnscfZ3PYYzjuMRoP2BkO+dXf+xgv3J1z/uIlnn76fQyk5vLb382yXnBycIuDm89CLmilITjPqqk4XhzjTMvly5cRwnF492Um0xNG22cZlAW94RbbxYAPf+Kj5Hs9Nvp90kRz89oxs5N9RBeweoNVX3M0naAwKB9IXZQTVU5B6JDC0a2WfPK3fpt3ffP7yHs5fnNAOxyR5WPO9Pc4Of0888mc+viQw7oiyBSR9Al+RtkvWNWSshyzvTXkzAM73D7uCImgDR10lqZpuHPnNsZ5NrZ3uXjlCje+8mXKYoCSApEEglzTUNd+orCeYonISr//J5OSg1dOGVlDXmZ4D6f7pyg8EokLgc4GhBJ0bQTH3HuM+4VbfKZ4/JKR+ieEIM9zEp3StnXEw6uoV3buVTKr99H37INhOMoZDjcZj8dcu/EMxlvStCTVBd55FvOO3t4Wm+MhaSrYv3u6pvndy7OM3iTnLUKE+FmEllSt86rWuHi3RrLfK7tA0O8PyXsFPjiato60NqHIikCSxQZlazvSYoBO8ihzUpDJgEgV3kZJflZuYKyN0ytvcM7RLA39osRpgREgM9jKCxbzBdWqQqY5UgXMYkle5KS5IJEzmmqCLDYQXYYwluEI5lNoGo8QkiyX5LmibWvazuC8jfTHsoyLdx8IeHTaIcSYVT3H+UCaZEwmBm8LQoiFYp4nJIXEObDWxsD5IiVRGU1tcN7HksuDNx6ZxCa1tS5Ch7q4n6WZpD9MqY4Ci4Ul0bC9nTGplmzsXImo8iaiub1NaBu3jukoaeslKslZzTuqRUXbVeyc3eLkbst0sqKpBaNBn0w5vIhI8jzzjEaSk8kK24FC0kslIjF4mdO2LY1SbJ+9yJ/+03+Gk4P/ien+p6jqA7INS5KUZHlKtYKj2zVmuCLdktF7lmkeePgMbnqLG1+4zeTlIefOj2mmkiQVqMyjckj6OS7p6G8NaVeCbiUIpkYJ8MrFyZ+UGNuRplkk/XmHt5FIaV2IjRHpsMKzcB3OS2anltOjlrxM2bxygXy4hSiGJD5hMB5THb5CCLEAdsqihMYpS8AhugCkqFFOk3asXIU8OuWx3Uu86+xDnLqOG9N96skrnBm+Cz0c0fmO5eKI9ugAK2bI/h5peYW+3uI7/vg7uPuVL/LMH/wBd+8e8G3f9z189Dc/zN4DD3Oll1COSz72K1/g5Zs3uPz4ZQabI5qVJ00kZ8qLiKBxneTm9RcpxyMkI4qNIW9773t58h3v4B//3b9L2QoGepPTwwVbDwaa5pDF/DbzxQQtEg6Pv4LxB6Q6wXQGp2uGu1t0rmJxuqCxB/TVDsZ1TE72Wa5OovrLjch7grQY0tYFpHfx830CNSEoyvQi5aAkdVtoJMxT5LbBtVDkY5xPufNyy6C/G+W9tkKzIJELvh7S4g9FcSUQnNsrWQ0Ex6cW9DarusV1ArxCW8lm0UMO4tTHGodwnuGwRwgOlSZk/RLTinVX7tWDvnfgjEBJBanDdB4hFErFzI22adeyg1fvF/WvsfMndUGep6QDzapxVDcqsn7C2XOb0KYkMuCDxHkBskG4nKbtIlFNqgiaCOBNBzYgvadIBSYEsjSLgAIbiwUlJGneIy+HpEnO9Rsvkfh7Oo64UA33jTax0BJIFr9icI+9XqK1+WMl6Qcj7vbeNhoP0d+jePnnTu6Nv4CAfEEyfqoku1LRG3mCk0yzmg/9jZdeP514zTb/sQXzH1lAgEvf8zBH//Ud6qdfXeynoxyVxd3Lp57f/MBnXvUIvaYoOfsXz+NLz8FfvPu1T9JA+XhB8y9a/Dd41IuKze/e4PijJ4SvJ8vLYPKlCf0fGvy/K65SaJ5bQQruHR73o19VXP2xAO+2bLxjl9mvnuCvvPnk7P/Llh4oHv+Js3zx799h9+/vIIzglf98//71q/90yf9NTYnOMoSVwKvfxZc/X/Hs7xr46a9/3/qbDbc+egLi9DX7GcCEP5P8FJf/wjnO/tYGh+rNsx0AEiWxj7Z88n/8ND4NfPAnfitOlSHKVdKvfRN1F7A/PqX7yQgceUEs1tcErov4NRbf1kevcoSXTE5afGhpTyBJcwaDHkJawnibyw+do7x7wMnyi+xs7/DCLz+LOfNqg+BQREmT/rkU9d+nzKYrNsaD2OXUHp3VkLZIlYBU+PdI6r+zxD0hGOYlUmuEVqBKppOWZpLhKk9wK5AOnQhSr5FBkAWNHORYGzi8c8yH9z/Opz74OfqbBQFJV3dUOr7nD/7Wp9lbXaPs9fhnv/TrXH3748xeOeTChYQz5+Ef/oN/wVseucpbH78I0nPtxYxLV9/Pr/3zX6Nzgp1LGU8+tslTj38Th4c3qZqG3b0R3/5t387P//Y/4pWDGdNJzWw+QTpF0uuRZJIshf5QsbO5xVZvQGgbtFpiFgMap+hkDcmCfmpJELTORbR0lnE0mRA8lDpHJBKXgO4LFospQiq01KxWK5btiiSB2dzinWA6n0JW4ZuK4COfLUksKhEMsz5bb3uCtz34CNt7fQKaug2YICg3M848sM+nrn2Ma7evc3P/ER58+6OErqLQjgtXLnLl4bfRVpbOnBI05IljIT1f+NJdXn7pmCTVIHq8/amHsT7AKkPblER7Hrp4kWW94njaEWTC0ho2hoKMgq0dyXB7xfn6Ek9tDQj2lMOjEz7/xRn5VgJpghKgpKWZL/jW7/xODDlNIzAtzJeWXi+jv5Hz4FvO8c73PEa77FD0CKsJ7fKIE5tSmSW7F/bY3d3B6pyqu4OdGY4Wc5Ik5dLVJxhvFnz6ox/j+OiYREjSXNA5h04UIlg62wKxk33PVwxrGuq6yArex5gRnbA4mTA/sUgkmjzGngSQSCwBGcA4jyZOY6zzCA9CrDMMEffPuvfPvd7TNA3zxRKtJGURR+bOOSQxoyeG9N6jCoKn5eXbL1KUESHvjMVLQ68sWVUtTRNIdEmqz1FXS5Is4IMhBINCI6Ql1WvTqfeEYBEiJdgEby3em1gAro9FQih65ZCAoms9QkqytI9MFAFHnrMuIgwmCJBRfui8RaqIII8vHqRWkZKbyPiJhIA368Xv/0Xdmwfbdt31nZ817OmMd3j33je/J+lpsi1bMraMGYw7iW0gZh66G0gCTkgCTRqogqpOdZoAobsr3SFdKbqTMHQYGhcJHQZjSIExYAYbjI0HybIsPelJevO785n2sMb+Y537JFmyg+l0F1lV94/77rn37bPP3nut3/p9v59v1yKzJJGcHi5YK48xrgyt61jYDi0GeG9o6mQ/OL56hlIPuXzlMkJ4NjYVSsFwnOOjwTowLbRzkFmRiKZAVIqqXENqQ9vOaTtLlq8yWjnLYKXBdB1d02FtQ+dSceSsxVpLUVlsOMqFTJRSok0Y6xBSJyymTWluC2k0IbQIkWigTaO5dmUflWX0+iVKdXSuZX9HUeSRwQBU7tm+vmDrvMR4Q1/1KYqc3YOaUye2mB5MWUyaJPfe3cOYhqKnKHuaerGP7p+hmy4wbSCKnJ25YevUMcbHSoqewEXL9Wd20N2U8XjMHXc/xN/89u9lbmpOnevTdFvs7SqevHiZzhxgoiJ4iQ4FwUKphjjbYK3HTHPyXkXVOqIKTE1HXhacP73Fyiih6z/58WfZm88pdGQz2+LOM2dp25YrN6+y6Lpl8LRE5wp82rQoRE5VlHg6yGLyScW0+R+9RSoHoaGda/Z3HD3dUGS7FAPIVkZs3f9abu7vErqaXEmilLTBkvm0no5aQgartqYXMsa9TU5v3Ie4NqYa9rljfYONzXWuba6x7wVV6FAiMh6MGV54mFN9jY4SbyM7i226qyNEf51Xf+Gbea2UDNc2+YqvPk0bHF4o7vq8N/PK+17JpKmIWUYz80QbEVGjWCUiUCqweWqV6DIyVRFiQesiVVnyNd/0tyiXET+T7oBb+7/Pk898CCUqjm2c5O7V89zYfoprNy7hjGY0PMmJE3eyf3gLYoNwFhHWiHadsjdicOwMbDi87SFkjek8MkjGZUFvY4hZ7DEYD6kGq7gwZvu5qxRlQJaSQVynXlikzGkWhhgbYrTsb2vyvKVpDKYNKeXps4y/FMWVKjS+PyT6jKqX0TYG4TqIDpVrqv6QbFAiCoXwAeUjFQoXatq6xXQWhcA4R/Rx2UGJCDxhycAvSoGQKTVe6ufftpCCXGU43xLDMpdJJJmD854SQaYLOp/hBSghaRtHZ2tyOWJ1uJKIQl7goiDiCcoTifgIMgigQOmCEFPHy+sIIU1Q3gWC90l3LqDf6zMYjNnbPSA6z3IaApY5Tjy/6I1VZPYLFn9vpPhlRfGLGpULDn62BRkQIu3UGWuAyOQdC0afGPHADz3A9e1rlAPP7MtaJm/qmP1ky/7/cECUgsUbLFe/pnvJIlh0gpPffwfb33cVe3a5WPWwmMzx7sWFyOHc0BlP9njB6Ec3Md6y+IE9wh0vlujt/Z1dUC8tlPoX+5z7l3ewr/axYiflRZ3yHP7chLiWJjP9+4rqRwt0ljH5P2aE48vjFUAJzf9YE4ef3RsFkP+bHHlJ0f5AQ+9v9ml/uCHcF156dyiI65HZvzkgbP2nLawA7Krn4g/v4PuB3a89eCm08Oh4HKx/7zGm3zXB3vvi8zlfRMTixZ9bOdRk6zlTD6e+b4Xdvzenu+9lfG4KMILT330WrKaUgXBHzdP//S0sjqvfcQv+q4gVn11mee2tN9j5/F1iFnn7T3wBqxv3gehxYPf49W/4hZcteNt/NiGuxiRZ9DD49nUyHzBf0jJ/R00Emv+9YfOdffL35ezvNJRZTucMaI8NC67eepINf8jO5YxuZultKp75l0+x9k+PEyYgMkl/VDGZ7TL5gQNOnjvNPV98P2urI97727+P1IqsiKANLkiMndB8a0f7lQYKGA8rQhOSyRxH9JY8FhxfHSHHktmsJe9rdna2EWhynad7VwISqqxkVK0yGqxTuxnWRRYzz+Jyksc659m+NQVmSA7Y3rnGjX94CfEqQ17AzddtI7jF5f5lTn5qk/v+7QUWU8eDD57HcEhvlLF18jXc8+o3YT7xIbrtHQ7nnnf9xmNM6pznvvYZbt27e/te1TKhvMUyI+/kz9xBUEmWPBrkNL7kXzz8Q7T5IdbUBOeZ13Ax10kC+ZRg9UfGCEh+WZ2RVSUqVyivyETOoByxtrbGJy8/xkBXFCpHlZLZ1KBDg/TTpXS5oBcN3g0xRxtdA8nBYU0hFTY4vBK4bp2t1TFvfOU9rK6ssqYD8rBl5joWtBggiobhaMz24TZCgMpycl2RlxmT+QFZUTAY9jmYwvWbT6HtgNIX5FjGJ04h6l2azrKoPbWdQT6GQqNHOb2+YmVrzNbWmDNnT7BY1Czsp7hy6xl01k/3sXPc3L7Be3/5N7nzNQ/R7w+JTUcoetQtbIw3WM97LFzAZh02eFZOnWezdz/1cxe58slPkqkzDC3UrSUXOc9df4L5bI4Pkss397nnvnM4b+mNekh9gq2zIz70Jx8nxi51bZRCoJYBueL2/lz0SY6ecrBkoubiyJJ2EIFEIYhKYZfeJSAh531IYfdqGQgPLHf4kozCi9sbaDFGuq5DSonWS5mZMbf9Xz74ZaZTRpaXIAV1PafpFgiZMsTKvFxKniw+GKxtUFqgc0lXt1jfksk8TfdBoERG8BaiTAAq79Ey4q1JN6BIx2WtJ/gk58yynLJXYezyPEXwfuk7zWSStklFVg3QweOIOGeIBMqsxJlE5ZNSooVMYcik7o+1Fm89o2pAO13QTRqcbFFeMHVzVCZBKDJZ0nUWoTXOBerWM20lQecUY0FZKEqdc+3ZlmBzdFFQVDllWTK5WbN2bBUvwco6ke+CwzQt1rul/1ozryfpusgUo3xMXQfqxT5CJUlf8IKudTgREXIpnVwWzf52ADOUZbnMq1RL6WmHUqB1hgyS4BX13DBey7De0DYdthP0+0MQHc0iSRazImc67eh8wDtJVTryUtO2LUoJql6GFI6D/Y7V1VWEshATdr5tpzTdHNNZYgxkPmcxkbgg6Q01Qge6TnPHyQch1zSm48f+1Y/xeQ9e4JGPP8HOrV1iEJw+e5a83GZ3H2bTQGgbuljhvaWICm2h3t7H2hpdCbRWYC1OeW7daLF1iZSSg4nFWIFaCWRVRSmHzOdzIhGVy5SlSlJEEZKUXSAhHOWqxSTrlZLgHVl2NBlDJDIoFYc3rjJrbzK4MuL4qXPY3TmhC2g0ucpxISCFRhCRIpJpWO/n3KFK1uKQYbtOb7tHb+ssN5/bJb+VowaKbE1RnhikJkQMIAUhk1RhgPOO1sxXeUJEAAAgAElEQVSYLraZ7lyit3qMleEapR4w2W2QSiTigQXnFYEhKisJwSMF5EWRJKSa5XNEMOitJlBJVEgbb2fNFavrKbMvOrLacvHin5JVEmNqbu5cYjofIoUhE3kKKw8thDblFeYaUYHKFYt2HxN2kdqD9JhOcOrUCIXAtTBfBG4dXqZfaKKPWKvQssWbOTd398F16Jiy9qyK6CISkTijKPJAmQ+QOYgsgv7PIOcqRpjNDbb2y6C8jhAjRb+k6vXojUfUIZAphcoiwkei8XgLziRPT6YVnXBEeSQBSHpjIhjjkzcqF0kz/kLBN+B8Wmyk/FiREOYy4KwB51ABpMhw0dDrF2RZAbJEqwytIzK4FNwXFdbbJCcTYUlOShMLUeGdo3OWtlVo7VOOxnJCi8GluPQI3gYWswXyBVA8bptHU7cpnAH7VQH3BYH8XZLi3YriTxSyEIgA7VtTqG/2BxprbNKWn/eYbQ9/rOnXI9Y2StY+pphsdjz38FOYGEH1cBuR+SsOX/pBeSg/UiLrF1xUAuqvm+JOvnixPnv7BHOhQ0wk6oMZmReIlwEtmLs+AyJ8Ksk+VJLraplTBvTAvvH5hb3aVRSPlnTf1BHzlxZR4hkJWwHOvex/AUD+qyX5b+VgkklYfTBDzNrP/AsZuDf8RzxcE1C/mOG/JeU5/XlHLGD+QDofg1sJAdydfRlJpgB7lyVUL33PgXi7S3Q0pArILN0LxUcq5Owze7uEFxQf7kFD8rA4zdMkLE57oUNfzKg+NEQ8CfOvnhF7Lz2Gdqul3WohwP7xKXb1FlDQLGrufe/dPP3Fz+DKF18v4TUv/r74s5ySiAqRduhw32AID3m6X3fQZsSgEw1LJ2Owc2lX21MzXQSaEw77lob6tQsGj64ibwhkHhBjj3u7JZaR4XDM6dN3sLm+Bbw/SZ50wEeHtRH79Q3dWyzhFQHRCopRD1UK+kVFoTN2t7fplYpMS4b9MWdOn2Paztjd28enxxJCZ0AKt+33Ks6ePclDD72eSXuIsQHnPHUx5aN8kFc9eD/V1iDBdWLkd8/9FvsP3qJnK1Y+vILez2mMZfHX9nEPesrdivYnMu46u0Lek+R5hgwVUg9oXcnuxHLj1h7T2S6zb7pGiB2rT+cIcqatwaIwD7a4+w3CCa7f2sU0LV23oCkje2/d4672lTQHOW09wXhLmEr6vYy6nuGe7ehVJSrCPHa4AN4EorFkeT91OFxgMpsjUYyqEV45ggpoPSA4BTFLi14BAo2NESUkaukjcDEiQySIBBbZO1ygi2OcPRPSoliXVFITjGcynbGwhqKQmK6j9nNEjMiQI1TFYDBksWhQwaOcoZ1PsfUcosQSkdFR1wvq1hOsQDtBETusFNx11xmOH1un1AU39w+oeiP6/T5F1ePCPce59NxTKB2RQqCQgOejf/oB2ixy9uw5NgerFGoAUoOqcFims0OKogRqgoQ2RlrTMuzngOdgf5/p4Rxpa6YHO3QxoHSO91Oevfj4MnC3I2QRrQtGq0Pq2QRrXPKBubCEDxypMY48TyGFIS/lSvgj/2/yTzn3vPxPJH1berLc9ikJlJAEGREv2Cl5fnbi9hzrvaMoltRfezTPitsTW4iBskrFVcBjrUNrnfxLKS0XSKH0KXZEpkVdMIkkqjQxpE5SFCEFWQuXNkpDMlx572/7vUL0SYYlFVLplDUZU6irlDJBBsKy0AoRhESgE8RiuWEb8YTgiCFL2Vgi4eVdgGx5zEJEovAEIXEBglIEH/AuoqXAiY4QMqTUSDTWzlGySMcbHHuHU6RuGa8VDEceLWDvVsb2jiArwbuINxFjLNZ4hE4I+agDrenoOoOUoDKdJINmkYogleHVstmmn//sXFqWE/HLgjgjk4rgEuBLSYlKZq/nASkxyfAQOUoWCJINqygkSkDTQddKvMvoFRrXGIxLCHwXoFrpkakIHlzr0Rks5oZcKZTUSOkhJveSM+lZKYXCtFM6Y/A+IoLAOIsqMkJ0dK3Duy5dI0riQ8d8usfk6jXOnCjY3t5hf2+C1jmVl2itqMqAFAFbeTwO7yyl6qGLDKG65O0TAhkDrunwssP6jJkM6CwiNQxKlfxbOKbNhFnT4GIgyqONDJDhaLMisCSSED3LYitATHC2dB5T9mMkprgOs0e7L+nqGfawIfORTKZC7QiOJgIgkhVFSUGUkdZ6RJ6TyRLZBprphIPQ0RMFo1mFyDWZlji/7J4JkYocXJI5S09VKRa1Q8RIDBLvE8vAySVwJgqwBqv0MnYhXUlROKLyKeiYJBUWMU/B2+6oZRDxMSBUhhfpGpS6x2i0ThFKDg4PmU9qRNUjyxxZVhCcxdkWazpME7FdS4yGIhc4N8djcN4TrF/GGCiUrIjRs6jn5EW6572XxKBBSoajPrPJTYJrYFkw+tCCDgilUKokhIYYc0SUyBhfrifwovGXorjyNjDZmRNtSDSiEIhS018dM1wZUg57LA6mqJjahgFPZ216aFmW6e8CJTuiFCR7UlwWIyrhVD2J6qdl0lvHtJOFjLRdhxQ67dOIdIEFLdPOU9sR8g5RVCnIb5AzGPTIsgyVWTIL0qQHhPc5rbXo8LwxSvhIiF3ShCtHpqBrMmLVoGSWDIghac5lFJjG4MycetGghEqrDo4kgWLpn4qE+yPtPw7IK4L+P9dkTx9lAEXkFUH9DY6gBMP3aXyAcifHrDo6ZbjV36U6UZFvrTOar7H2Yc9zr3kKIXtIMV52ynZf+kFJMFvNi6WCEprvf6n3Z/FdqTjTHyiwbbeUf/zHu0hHw2nH4XiC70Vi/vKvEUOBfEAw+5HFy3ZD8l/L8K/2+Ic/c5ep+rEeYl/g3piKmKOklv9XYyJQP57jv/5zK65eOLbeO0RYwf7DL4NvVzD9nsnL/p7cDIhP69a5kYWN9Jk5q4jhM2slo4iYUwZx0aHigDx78cnPL+YMf36MJLJ42/xli6vnDwY+8FWfAD4BQHlY8qU/8haee/2VlxRXt4eB7KZGi0imBDyaUV2qmH29SYX8zGAPNMQCJTVZBs6ZtCAUOTJX6Ars6zr2vusAeVnS2UWSuPhA1yjm3zwDL9CzDCkLuqYgIkFaQnS03tEed5ivs/i7n7/WRaEZjvrcc+YeTq2e5I9+5z1UI4mTGSsbG7z6Fa/kY498AqUyrPcI78kyBT6Z+cuqYOvEBm/+0i/GK+hsCjKdqj1+jp/iC//qF7G1OEHwnoWp+fdf8TM45Tj2rrPc/ZP38NyTF9mfNUxO7LH3FQd88Gs+ypn/+Tx3njvHsZU1BmVOPTXsbu9x4+Yhz17Z5rlrV6mdZvcbn+CuX1jj1O+tU+xrntmb0YmM+TsmLNY9ftXz9FPXmK0OQEamK5Yrf/863/cb/xPTxz7F7vbTTNsFj16SnNoacmvnMjt7N9HjHpkPZESCCTjr8K1h3F8H6ajbmhuTW6wMBqyMV1i4Oa1vqcoxzpcEMSJKSxAx+RKCp8g0mRB0JiR8bwRUCiK/dmuPojpG2VtFVDmHsqK3uoZuoJ1fZ+ZqYlVzsKjJRLf0Gzm0rFJshtMoHHnwmPk+K0UOQqOFohclk90b2FBR6ZKhzhiuDjioHF/4ugcYD8ZceeYmko5C1jRzi8x63HffCd73OymjTSqJVhlVDy5depQwFijtOfXqN1LoDBsEi9ZzMKvZOzzg9PFTiOCYHu6wsIbdnR1Ob24SEMwnexzsTRnlpPypUlH2FceGBc8++UyifkoIwhOMYbQ+xroO55o0l/iURyWluv0VQsD7JVSCkFQcIi7nmeT9Nd6QLfMcpAAZ420f1ZJbsSx+X/DcWBroI0d/OxVvzluq3hBnbZLgL4EXUi0XOMFRlBky07RdkwqUvKRr2mUPTS0XmYoYNUKpZQFiKfKMXGe4jtRdCQZvw3LeDymjKyRKoQ+esIROhJiKB6k0CIG1KYxUCJGyL0mdK7ssQEMAbxw2JIIhy8WstyZ1IBCJghg8udRHZmiEUogQaZxFFznaR6JPhEy0XdL8IloIiDYBG4ochOfgYJ+sCNxztmI46vCmYzDKuWGSp8Y5qEXyiC8WNVkZUVqC1CzqFmMcea7JhabzJvGRZfJqCQy5Bp1lqTMYI4gMkIRg0DojLwoKpVjMaqQEmWuUzDCdIUoJwiWPt1LEoBAiQ0qPEJ5eTxJDwLYCbzKULvHW03aeGCLeR1wUrK2uorIGYkiZmTIwmzlkP1si4TtUBm3bLEmEgVxbTLcgCoUSGVJmmLph9XiPwUgRvKFZLMjKioPuJt63eGcZV2Nk6OiahqapEapjOukwXY/BSmS8mgqhgwOHd4a8WGFYjYiipvMtwXlM42gWLUZ0lL0h1kakiozGmrIX6fV71HtzdvcOqeceJyCQFFNKSbSKaf3ql8WVAhF86t4Qlv7BmEiXSqUOs0jdHJHPwZW4qePWwQFVrqn6qRngglvSAj1R+qXfUFF7ww3n0JWB3DI0NQf7z9JWGmtzRFtRnlpHiVTAxpBgcBSK6GqsmxOjZWXQQ0VJlfUQyCQP1hKzpOpKAloYWjLAokQEETDRIXWW4HEiEROdJ13vQibSvlxSs70El7YvhMg5dfZO9vev07UW08CgOgHao3RDFAFjHXXd0jUR6xqE7ChyBaJDF8nr6H2iNS7mDf1eSYgRGy0nNk8xO5yRiz4q9gheMhgXDPdyEI4saA4tRC0IwiCVpMgV7czjjU8bOA6U/+yek78UxVX0kUplS4SsYLi+TtstyPKC6ARuFtjojXGtYeE7EJHheITf2aVpDTqHItcY3FEZkhTgQiBUytWIIWU25HkKGCTmaZtFeXShUVGlm96n3AehBEVUdPM5bd3BaJVjJ44lvaXxFFXFnfee4Z4TK5xczdCy5mB6k1F+Fx5F13aYtiE6g49zWgdSF1i7wS/9/HVq71JbOAhM1y2JOZHp4RRrF+AjWaGXyNo0EYJIu2piaSZewPoXFSzvDZZVGmtv0Rz+X47gHT60nDy5zvkfOMun/tvLXP2rO9z69QQ2eJYXwBUClHbAzlOHTC5PeNHMuazvYhG58kvPfJYP8tO+FxBcpJk5jPEE9+cvW9rXNDz3Gxef/7vPNxqff81bO9q3fmZLYf2Tn4W+tzyUpu3w32nx326gA2O626jbo/fwOY+zEfPhz5xp9ecZbdEhPnvX+eV/7x8uvUovONXdOwzdOwAHxjZLstfyh5/2/mI/cvnXnubUl58kXBU4y4teW3/5gvrL/2LvrV1p+dV/9u7P+pr82Zwz33gKZzsW+x1KeY5tBI4cWCtjQW89Mj209EvN5DDQH1QM17I0CU9bNserqDXY391n5U0jxls95lVNXRvigWX8lhW0LjnID/mj7HchKKw/JLQRbMSsO5r31Qzf3Kf7doP5FkuMkcMrV7C9Ve4+/wBbd72Sszfm7B1c4qFXPcyp02eQmeeJi89iWpMycoIldBF8xsrKMQ7nDR/8xGPc/cgl7rn/DbS2IUSD7aUCLu+fpsrOo6RgIE2a6CIIJdG9gvU7T7Ilcz658gg7saEsC77t276Rn/t3/57rV1d45V13ceGBNT708Uf40499jP2DA0JIvi5i5Knv3GbnlQ3n/tEpGgNZGTj2syuMPtrn2k9cY7Nap+d7zOuGhUg+MJdFuixgpCC4jOBnxG7pWc0012/cQLgCayM6F5QVVNUMvzD0jq1R9HNkBrnOuDHd4XC6wDnPWO0RshLRSegacB1Ff0DuHT5EOuewTaBRSUpWlSU4x9UbF8lmf0Y7j6yePs75FYnqnWBcneN8obi2/zRPX3+cSt/BQXNIVmUU/RLlOq7v7HHHyjrHRysMsoLHn/4Um5v3cRgWeGcYB83KMHDvvedRFahScd+Fr2I8bFndPMPjT97gsSdv8Le+/iu58egH6CY15Vqf1776NH/97a/gN373Mk23oKwcJpTIXFBv7xIWHStbJ7nyzDWeeepx8kyiJOhuzmwx5er1Z5ktDqlNw+GsYZytc/78Cc6dy2i6lmuXdzm+tcGVZ5/jxuU9/Jbj7F2vYnv3CgfTfSazKXVt6JcdUufkBbSLBJFQSqfuiEpIvqMMxCMsOkJTZBkheHxwWOfRZaJgeeuStA6FWXaugvd4ExHL7tJRd+uo8xVjmp+O3FfGWNq2TfOVlLfx8IgkS5RCMptPWN04RjQCv/Qimy4QQpLkSa3Ii4y8ZxEyprgUldN1DhUbCB2EFuc8QiSAk2DpEzIpx8r5Fus8hIxy0FueG7Wk2aUIEh8sWmv6gx4+eIgJ2e58Q9s29AYVdeeSlFZIrHEUOqOzSf5Y5BnepSgUY1s6a4kCyqpE6wycJziPExmSHjLYpUfPMwwDYnDPR6xEz7G1FQpVc+vZjqtPW3auWbKyWvrBkgQvz4bUzQxaT4gQQgpALqseprM0TUevNyArI841BAd4iyxLjA0gE+Gw6GmqQcH+fqIsSikwxmCMgQBaSTKtIGpMZ3DBIZWgqvpYbwi+SUV1gJ0blq6zqQNWKHThmU9qRtUQLTxlLzLazDlzvs/2rYZ60mIXhtp4rJHECoRK1+5gWKJ1QZwG8IGiB95m9IsKnSuCDOxLz8rqFmcurEHecX37KjvXApMw4eT6KhdO3MGDF76IZ5+9SlX0yLIZIXjyokpqh4mjLwKrm1kKz207qqyjyA07BzUnt04RveH6zh43prusrh5DBcPq+pDhIKOQhrlp2b6xoG48TWtpFoaNE2sY22BtyhjLRUmWVwkYEnwKou5JFosFWZZTVQWmTV7igEEg0DpD6f7SQwlSC8osx5GxCBYlAlIotCxTZoIQgCRGhXMePbJctE9ySdzizOYDrIvzrM4Ue9NbXOtuct/aBk5GvHAoqchFhbI587jL4898jNnhhLtOXuDe13wB3czho8dLS0SSk6GCJxAxOkNHgYgeHwUxZgnMZhUaD3iijMRMY/HIuNyWiMnlGW93jC1StTzz7MfY3b6EUhnjwRo7O9sMViNNmxPIqAYZN/f2KLMxZ07cSabg8pVraGHRMhWo0QeamWYqZpjGUA1GnLn7HGuZ4vixTQ4nHXuTHXZmE0pZcDqM2eiNKGh5xFwiLyssPawV1E1H2wakjtSGJF1XL8Quv3T8pSiuICTtt1aITBN0JKOic5bFdIp3jjwvaPcOMTaFoM5lzkjNKbUlVwrtoCKF/AVcaqeK1Omx1tG2nohmMCxQ2hCsBy9QxBSQ21lcCAmPqTTOpkIltTcDwTbMdm4wGh2DApxa0C6m7O6f4PWveQsPvepu1lbgY1ca5taxqBu6tqbIHb4FzA2UmTLb2Wa49iiLWwvsEg+bJBoVMVoQRzIwlSQdSx14jCzDYT1J/JVWvEn7btIPhcLHpHdd/c4evlP0Bj2+6/v/CpvH1vipu1t6txR/5/1vglaysz9DiMjhiTk/8dV/wGitZXtnwsZvex569himKfjTH7+JOeY5/dyYt77rHn76H3yI+BkW/Rt/2OPenzrGPfcd59/9Nx9jMTaEzze0j+4AEVb+Yj2ht//rV3Ljzil/9rbPTLv7nMehoPqSMe3PzYhHSPcI0ueIaCnfWVL+SsnBL72MPPL/h/Hc35u81HP1OQwxFxx723H2f34Hf+eyS6Rg+7f2CcNI/isZxc8VzH51/rK/X+gxV75tn8UXHnLnm8/yzG9eIb6MDPE/9RAioqpAMdT0RgWmhXlrYVle7XzPjMHnR8784BmObaySb1/Du5YQDcNhn3Ges3d4wMHhDCEERd7n6s9s47aS8TwCDYbhOwZkFzXt+ci1n35muWmRjiHKI+NIClQGUFrxX37rt/Lub/g1/sPpX+I9+t24b7aE4Dn92xcYPr7OZNLwdV/2tTzykf/AI998katfftT9FUy+cpvwBoH/AcEPlo+gVMbf/cQ/4sHdL6RuU3HV1S11PQUEQRYA/JPn/hXd1TnvP/wt6l3J133zV9H/mT4f/6MPc/A9B5T9DVb6Z/AycGPa0HzyJn/8J09xx31nOXX+Trw1zHd2+GP1JBv/yya93xyye+iRrCNCg3UC06XzUreBg91tnLAwTvdEqAO/+9c/yEdOfiDBd3zkUSkY/fQqa+9e4/5XneVEqambPYZrFVtnNxn1hnzi0Uvc3G05mDraNmcuO46tQm4XsAhkUjKo9jmoG9CKTOdM7HOU9Gg6ixUKvbbO6vE+5tIOOtR40VBKSW88QhU5VAOmbYfY2+X6znMMR336epWNbIsnLj5Ff1DSzg2Hu3NiLtgQGqTgQEQmOmN81wPEZsowDFhf7/O6e08wme3zRW/7G4xWSryd0Exbqp7gvb/5Pj759FVM1uPCK97EoNCsFZZI4FM3Zlw5nDEoHDqknXxFoOgL9m7t8NE/+jDUGetrQ7avPAGAVBpHoJtewRlPMz/A1IcMGHDtyUcQk13K3pA2Rib7NyiGmsxMUHHOwcwy3jzHvDVMp4dMJ9scTjvi+grD3hDvF9Q7+/TKkjwqLB4TO1x0lLEkItKOupCIGGi7BqUVQkoU4K0HqRBRIgS4GFKnKiYZoBCKGHlBftYLnwsvSDePqWPrvUerVHy0bYv3CTstVQoud16zNzkgSEXQGTNbE0RIYAoFUTlaF/AxoHNQMmLmDa6ZY3yGlEmhkgmFsx2oLMmVhMTLwLxtkoRMZhSDkqJfIGXqlkQfUUKjs0ipC5ASz7IDAEuAA/T71RJ9HuisSX4mKdFa0MtLQgBjkgzRzBvqpiYS6PcHZCJlhwXr8abDhkie58sMLXAykQ69gOg7RJAUqmDv6oJnH69RQpMXm6yfEDSzGpUpYgDbWYxZLIvlBO0qqiJtnsmAXGZvRiJd29IrU66Rd4GmrmnblqpfkhcFQkTm8wU6sygJQjt6wxITA91MYowguEgMNTGIpKpB0hlDXmbYhSRTisE45+T5Y/RXFE3bsH11wtWnDrn/obOcvCtRHbUqWF0f4sWcLTmkG1ZLdYvjmaf28XGK6wLGekxbUlae0VpECWgXknKUpIlResCwOsq5cWUbKTzrWxWnV4/RbN+inQVU16OtK558dofhluLc+dOUZcmiWeBsw2JekzEkiwHlF2RxFdQYVfXRw4zKjqmbGZODQ2KAu05tQdQoWRHqSBcUvdUTtAfXmB0uCMGRi0jVK+hrh8pydAe2bvDOYMoAIhBcxFjQukRWBcY7jJ3T65doNSRYg3cWZywSi5I9pM4JQRCMpSo7OtcQYip4RZAELFIm+SrCsL5e0R03KKUpVYB8F3V4mn2xYC538H6f6Y0VNjdPUMd8mZnVoYFe7HH3mbvxpwMrg5M00zkJbyORQRFiUrkEoYCYIiKXXjJ5VOTFNK8d5acJQIbUWROkzQtCAuwo6WjsLVo7QcjAootsbG6hRInpCm52V/FTx8Z4jWG+TqVGPD5t6PyEm3uHSXWmM2xw1DNHlikGo5zRCGxT0DZ7NM0hi8k+vbN3sHqyxk8maJ/x8L2vRxYd1z/xFIcYVquM/u6QS7GlqjIyr9BWI8Y562t9cHPaaYPJ/zPIuYrxaAkTkTHQzwssieqDUkQjaJ3FSPDegPWQC/SJDbxtqesFYT7Hm4DsaxCK4D0hWJROenGiJgaZMmyiRigPUSyLF592u+TSKrWUQAgRCQFEjPRzjcwVTddiA6AqjLVcufUcf/rII+RFydve/HqkucmjH3mag3oBeWC80gOTUVVnGPU1YmtOPnofcndnGRwncSoF/oaYQkjzXNOFRCtMhl5xW7ZxNKFJsWwth7icJNNXJE0Y7DjsGz17f9vyC6/5E4o858mVmzSi4/9+7YfAC9rOIgSYKuHrn/nebdpJh5Yw63mCq3GD5a56LDlpzvI9j27xzrv/kO3eS2Vps7sNF//2Ptvjhq5aLuhzbkvS/qLj42++RjP8c2ZVfdrIfi+neHeBM572n9bQB/1RRfl/VsR9Qf6/lcghcCHQfEfD0mwACxCPSXr/YA1iRKfYc2IMbJ5d58zpPpce3ubyvXu3/69X/Oq9rN0aoFXAMueD33yJrd87D0Fw7e1P3X7dxm8Pqa7kXH7H3ouOVe9LTv7zIVf/uylhEDl9/RgP//49/PKXfeDFHSYP9/30BrO9GmMEdqQ4/O5P+zwi+GueO372Xg6+Yoe9122nTuJaWvi413p0sNz/w8e5+L3buGGg94Ti5DsLrO9z8C23qB+qkV2gtxNfvH76/3AIoSjzIfVigRI5RHl7dxkg9COuH7BdZG+7Rrg1MiEY9ioefPA++j3FbDLjk+ee5Nbozzj80QPUv9as6RVGKwP6mz0+9ncfo/6+BjvxiLHErzpW/vFZBlLSPbTg4C0HvOJ/vZdDs8+eNhggqMAffMMfcN9jF1i/uMlwfQ0fFL/4xp/hA2/8PU7GZzj9mxf4L/7rr2N0qmQ8/QiPvv8RHvmqx4BI94PwcPFFPFB/Hj+5+i8AMLomypRfA2C9u90NMDYVvb1uSE8VrK2scv3yJba3dzm3eiftiQW/n/0u//ZL34l9L+jZkNZGDmYNW+c26a0P0HlGFnqcOXuS2U9s4z8Sia1ArjmufP8VxBLX7VfSs8RHg5CCIssoS8k+8M7X/TijT8EX/NmrsNby9BXLrb//NF4GQi2o5y1r95xFTyVFWYAeszNvOawj89YRZGBlI6fbD8wODYtaomJBofsoExCuS/4RDDJoRBFxwWGlRvWGXN6dsebTQluRE0RJ6yTEgELjnOKJS09gacj646TJFzmrG6sc7E+QWiGLjHpieeChh6jrKYtZTXNrj+HGGcgh1DWLOvLcgcKEjPf/ySP0+wOEsuzuXGHv5j6XLl/lcDZHKMm73vUevJ9zemNIr5Tc2Kspqi28PiRkSUqmTI2LIDLNZLHPY5/6OK98/YPQL9AqSYVab5keTMjzkt5gyKBfYnYaztx1nrzQRBnJpODM+jmmh4cMRh2iEBjXcenxTzKrD+icQWYV40FGJjTOOUSmWV7h8sAAACAASURBVNlYZ7o/JWhSnmPMyGKGjy7BjiLJVC4Fxrjbpvrnce3pfjuihoplcZU2A1OILLc3+LgNW0rXr1y+Psmd0mtSN0QphRARnWmEhCACLnj6WY6D5YZiROoEAogx3Ea3D/p95tMZ84MJk4N9MiUhJmhV8nd5sqxcQilC8qTFAGiqfpWM7ypJrrRe+jU92NYSYqRzcbkIkCgNUiUvVQgR73xSwSzfw5EXzLlAiEn2HmLyZkslqaoySRC9Q4gCJZMnUsRAaDokgbzQibrnPM5ZhE6eJu8c08aRZw7vAyKTRCWo+gVlL6dtO2IQlAONNznzRZNeJyRKSXKVYWyXPgskSiq8E7R1h4zLtUQQy+JLElzAdpb+oEJlA5JEzdPVERHSMUVcCphGJ38OchkEG7EN6CJj48SQk2fHDFYjhwc1Ra/g5LlV1sY5Jy4MyUeOGApELBA+sn2loSxL8IooMjINeS4QIkUCBJG8g8NxRp4laebhwYI8GyOFTpvRwSGFINORsiyRlEz2F0wPoBz02V3ssbhmOO/7nLxwgeAjZZ4xHG1ycLhDZwRFVaK1wjYDyipjNu2Y1YqoJCqTGOMYDFZpG5+UUlrQqzQ+Grxv2d2eMl9M0JnAuXTOxr0xmRa0rsMTiBqyrM/o2BptPaWt5wlwZhPAQ2mJVBovHHmhEVmGihq5DKo10wbvDUEkGXBZ5BANcSlN80dxPyJ1vKSUtAuBaDN0T6JEJIQpnd1lYWFetnRZg7bX6ds5IltJnrro6LxD6B694XEgIimXyiG1vD8SCTMZdI4m7vQlgKNdYXG0Rl3+IFktI0S17NCmf8/yDKkMzfQ6OwdPUTctuR7SG+TMpi17kwlrm336A0/mj9Erthj2hsgbj9HNO4xM10AMGdUg0tmaIDIQFUn5G8irEVKUlNkG68fO0naHyWWoPLPphDPnzzAb3UDWNnkUsx5yaogx4FTASwtR0zY1varPyZOreKH5+I3Ln3Et85ejuAKEVOR5QV6W5FmBbQ3BeYiRPEvepHJ1jMskvu1QuiSUFbHICQi89QiX8hgEAikkPibajVj6lNL6OCLQybAn0kMixGWS/fK5c7Qrl4x4SUKlo6Ac9GlswrhaF5nPG9puzoc+8mF85zm1eYz13jrrVTLd1cEymbb4ZkHe9DE+R8uKEPTSI5Zw7ZFICAaWZtJkiE1G5Bc7gI5CGtOOYtSe9m2O8v0COUvXtEbhoqP7PE/3FQHzpfD4RyfMHjogFoH+dMB8f8BskYL2lJLYaICb5OIYo/MVaklS6aa7CNECkbpveeaeQ0770YtMzC8c7QlHe2LOLV7cDRFecO/H7iaUC/Y3Juxuvny35DONK/d/Dt2jXeADkBUK9wZP9nRG7109us7Q/ZOG2I+o64rqPQWtiOj35GRKEj7f0XxHs0yjXx73TFH+SkX0jkIEwpalfU2HbCXaKuSnaW6VVWij0coTYspTE04iPs3jJJxAmpc5hxFkm/797oPjnNvfInMvvUVFhJMfXme+XTGZWA6GDr770/5UhNY5Rh/coHl1C6/bftHPw/mAHCre/Htfwvr2I3xKX2VgFH9t7x5+q3iW+RdPOD1e5cTTY278lYOXJTp+tlG0mjsuH+OJC7c49vQG5axHyAPXXvXyD6Pehwa4Y5YwCMzf1DCf1Sjpkmx28OLCOoakBJrXXcr20RlKDFBiRG+4zspIsbdpybJHuP8ND3P1h57hfO8U5+44zaBZ4eaHp+x+3k2afo2uNcc/eIL+ezbggQXmVXWSw3eKe87fzRPjp1jQEGXkyVc8wZs+8CXcVd9POR3hu8D7i/dx454rdMcNa+0WNpesn7mXu56G+XXDIzwGwD0PP8BDN76YV156HXfN/5BnLnwc5xyms7j8qLgC49NzyEZDJOJtvC37bbsJn7r4BOfPn6LfH+O152P3fYT7+6+GmUjEM9ly4Z4LdNJjPQiZ0RuPGf3OKtNFjYkOhSMWyTxt7zH48zblx7kOETO0ysiWx2RUzdblk6w/XtI0DfufaNn9G89h7zCYB2v0dcm880xrReYUQXr2Dg+o2yOYTzLEYwPWgmvTDrtGI0OFEo4gDAFBpkdILRAKlKrQvQF7OzusCE2UMtFVlSJ2IKIjRItzjrqb0MYF+WKIVppps2B1fRPTBWzoCNEibMr/EdGwOJiyv7OHL8f0YiQPgqY2PPbUTc7dMeSxTz2GDwJPYLp/i92rt5g7i1CKflFx8dKzRJVxMHEUBczsBFmMCMuWvlpugMUo0JnE+prr25cZbx/j5Mktgjcp86YoCXuHlEVB1ctRRGaLKWunzmBMjbEtQkQG4zXqacvKaJMRKyxmO3ziytNYqVBFQW/coxTQLVq8CCAVuhjghgHru2Saj2leMSIZ08VykZSOMy79P9wusF44Pv37ow2AtAlJAmMs54Skwoi3IUS3VRfiqLASKCWObElIAT4EgvMEIdLGYPpNQgwkgFlE64R+mhwc4DpD1zTJDB+OZIYQ8cQokgIlJCgMIqMoK8peueySLWXfJMuAEuAyQRCeuIRecJQLJo/ki0fQikRT4whlH1P3z3ub5mkh8C49q7ROHYSw9HrFkPxJuiiW8rlIrjVCCIIUeGdS8OvyTMaQAqClisTosc7gvCYvFcJpgk/ZlsWgR+scwqX1QoiRbAmfOPpcvXVED8ZZJJJsGX4qkIlWHCLGGsq8RCAJUeAdWGsITiFF8iPFJehj5dgwLaydxXUdZVGxsvn/UPfm4ZZedZ3vZw3vtKcz16kpNaSqMkJCCFMCEhowINog0qCicoVWbK76KIq2Xtur3pZ2aq8T4L20isKF1kYUbARB5iEkgZCJkEpSqdR4pjrTnt5xDf3H2udUgvq/dz9PPfWcYZ+z93vWu9bv9/tOLeYOtunti0iyhs3thrTdJp6KsFMOLyrwUWh8m5r+sGG42tC0g9mHBXwcaHCgJiYQEcbW6DjUd2VhqeuAejvBZOActEleBiqqbTyjfkM+8sg0DKnyvKKXblKWjvF4RF2XJNlEs6sihBAYIzAmIesaDGPGpQ36zxicccQiwhjPaNCQptDrxXhvKeuGfFhS24ooChRW6TVRnCKEmdS2oGKJdJpOewrXVNQyR2sxaVT9ri7LCz8JrVYhh1VpoihiWDfYvNm1wlcqQqMDh0mAEY4QBW0nw0hBXVjkOBhUOAW1NzS+wEtBnZSMs4Zu4jBYojDCn/x+jxQ6DBEATDC62dVlMgGmPHh2zHBkML4RO8D1DkVR7A5bLvvYTDRX3uJ8TWUKitElBqMlBqPzbG8PmO1dTV4IhuMRo3zM3ulZBA3WCoq6phHrVD7fBT+MM9S1odXNiOKwJqwzCOuIooA0atElVl3iaI66jum0MiJqBDGRyoik3r3PCwdxpdCpwApL5Q2xSShLgzQgiNDySSGk/8zjX0VzhRAkKma2N0NvZpqt0QBbVBTjMUoqutM90u4UU902RT4kH48xRrE9HtPqdkhmU2SvR7M+YLC8FDZiJREiBgRaN5PNtsH7BKkETRUWhVIBzZJCIOWEgjdZLE5OnNc8VOOGPVMzJMJRVg5TGDbWtvFScPGJB1g+t4ItS377//olrjr2TC4uj3j07Ab3nDnJ1niZfN1SbSbBHWxzDSXdJE3b4b1FqZ2VN+G1T4TIT9bdhO+dTAAMiAYGf1KjvydGPBpuLm0jDILyrZb6hQ49TDj29qfzjfd8hSapWLywn1f/6ffzyJkh7V5Ekib0j65z/j89xk2/9UIWn3mA1nQPsSW4eP5zfOJ3P0+RlKzs2eT9r/w00TDCJAZZyvBPBAvROptws4vJkpICkxhcbFFG8ao/+S7M4mnufNFDrO95bPc96bFGmZ0wR6i6VXDNNYJ4PDFTEGATi0n+BRMEIBpG2NjiHnaoH1B0pjIGHwkUiqgXY+pmF/0RsUBNK2RkkAMVDDwmF1pMLBqFlOhIQR1jmxoVgXmeZfCuEaZ0LLGGB1p5K2iR0pwHX/tNVKOI6pgQmpmy8pKzWGHAQzJOaWzN2osHkAAeOq4DQCNq6tmG9d80eAXfcd9NJCbij27/BFGhsdbiYh+ehyCSXRamp3DVNpeqfyYjDMjxjEcOOxTIXOFal409pFFMRwu85k1v4YXn/p53tf4n6ojiR3/yNbz38K8hvOT2tadxszrEm9/x3vC3KjXKS6SGIqrJmoRQPgfXrzKqd6/xbL/DD37kVn7nLZ/gpg/fzN6HD1HONfzPX/8ARVw+FYkD9vzuQUa39Vl/8zJP/NwZmBRu8JQ9HVEIVK6RLgZX4X0OXjMe1Xzly1/l2DU3c/T4Nfh4nlbT5cfu+M+8q/N2rr3pap5+043I9jRrfzTmi7/2cfpXbtLaaPH837uFizNLnPz3K2zfEhr5e3/5AX7xf/wi4yMN51nafZ17r7+GZG0/G/k2Ph/wjD9/LsWbcxyhmb3ji3cyP3MI0Z2ne+DI7vNe9+gbmT9zjNW1in+7/ZP8+U/8LNVWw3CzoOgEKnBtJOMykLZ8FP5WdWOp+kMuXVqjMkPue/BeqqaiPD64vPbpQCOpbYEUhuuvuZqHH38CW9QINCsXVllb2aRE4AnW5fM/eQRvS4Y/tc7oTX1cZIO+ooqI4onr00Dyhq/8B/qXHmXLn0VYhQaiIiJ/+RD99Jobf7fLow8vs7HuaGUp8/MVW6NLJLpFIiNs6dlYr6iKgE5EukbICtFYZDxPlLRxskCoBhW3kQiSaEycxLTaLWzj0e02VjkKU+KiBmEbnCipvUTTYXHfNCfPnePi2gBnPUV/wI17rqFztM369hk2t5aZyTpsPPEYs1NTCFOzsrlMX1gWRZsDCwsY4/jGw1/nGdd/B+fOnuLipUtsD0riMiITY6qqJMtmmZ6/goXD82yujTm7vMGoGFJGBTMz00jn0QY0EkuCwqEENK5mXBQ8dO9Xufa617OxtU5elOyZWaDfajEzP00sFLYy2D0ZsjOL2lZgPJUtGGMQlWZx30E6UwnDrcd54tTD1H6eTqfH7EKLmVbE+VPnqG1FnVvKSnL8qis5t3SBsp/jGhMKIBlBE+h24jLrdbe5gp3Ij8s36A5i9c89dmjtYrdR2/m3I5IN6A9wuVib/D6BQOlgkjHaHiGiCKnjMBi1NfgQRCKVIlaa8WhImed460kiTdM04ezUCqkEUkFZ1FgsSI+KNGk6Ta+XgWwwtsZUJmiKlAzlpJDE7Rhjg4GU8wGNaqoKlCKKY6TSaBHywuQumhauZ5y0sSI0eR6oqmBeFSiDmiyLKesKJzxZFJPEMVEcMRzkaKVCXEMqMMZSVSVCBm1TEmskCUU+pmka6rLBGofe5xEioWkaqipnbnYmuNrpEOtSlzVKBlMVbwPTJx+NQ23jHEor4iimrCq881RFENU65xhsDLFCBN0aGrAILwPKqAlNq3dcc8NBUDmblzY59cCYI8dbHLh6HtmylHJIpFu0ejHdmfAzNquCC48OOXTkMFLl1GXOYLVElh3W+32sihBxRityaJVST87qNEsompKiyBluVRRDg3ApVT1A+hStRLCzx9HUDaPhAGtL6rrAOcPwUoGOBDpRbG1u8sQTF9nc2qDMxxRVhbeSSHiqaoQpPc4pSl+gkprC5IzKbcDT0rNIM2I0rCcmGJ7ZhQxjLXVTUJkcKVKsq9FRiOAwzuGtQUhIUoXEMR7Ul2tMBFGUIKSimVj7OyCJoxABlMSToGpNpzeDrRqqegsaQxJlCK8DAwuLn7hIeytpVDByi51B1p5qpPCpQkQKJWK8jolbY5wtaBLD9MJeYp3hnQt0WOGIkwhnHDQB+WSynp0NtepOlMOuFH4Sz+AngeXw5IHMDsId9gI/ATuE8Dhq6nqT/miF8+cfw8llGjemLIesN+cZDglDBW8pcs3aZp9WnNDYFTbGFyiKnHY0h/MeYwzjkaHb7ZB1pjG2pqpKKl/R7SZ452lsReU2GQ6gM7OfXioCduIUrimpx4bYeOJEMSwLMqGRUlGJEKvknMI0msHGiHwwQov/H2iuhHU4Y9hY32B1aZXh+iYyjZhdXCBOw0ZCIukXQ+qmxilNHLdImhHNOEe12sTJFCOXc+NN13Du3AW2twchlwCJjiap8D7olYTweK/Ay4nhhZ0E4omJq0kIHwwThVA4Fs2Qfn8T3eoQ6Zi0l5GqDgf2LjB9Y0Q7jRFizMfu+hydaj9nHjrPhYvnyU7ExLZD1GyRZTW+7UEbEIF2gJfU9QSxkiH01xozoTbY3QFAmA6Gg0xrRfqliOx5krUHR2x+6J/ade/54SnsZxz5z1qu3ryCk+5uGuD0tY/yX97xLWmyk/vg4x95Hwi46lM3cfMHXsSH//QTTymCVaF4+fNexRc++CmmPz7DNb91LfuTvRw/cZj3/ZcPUF/03Pq2f4NWEnmgwx1v+0dOvvI+TGz4rXf+3/+koAZ46U++mOOfujIkl3v4s8//GYODQw7cfYDX/OCrEXiyuRZ3vuluPvuWz/2La+gVt38nJ3/0YS7ceIEjJ67k5IOPYRvP8A0Dhj80eMr7LF9asfKSgOTM3DaPPCN2w5aVD5NdJSSR1JioocIjkg4mUnRyw7t+/Z0kSUxVNjSNQSaCH/21H8VEhpv/8RZe9v7vZmp+D+lz5vjo33+Ie/Z9geGvXuAnfvBn+Ogjf8eFnznL6M1DNJq/e/SvkRY+NPthPjj7Yd77tb/gtc95HX/4jI9z3alj/Pwf/QgPnT3JQw/ex9KP9Bn8UNDp3Xtnn7iSKGloHWqzyeCpF0RA1k4oqjUW/mIP8/fO840/uHP3y8fvfza3/sNr+fD0Gd7zw/+NcWubZ63dzJlHjvLzH3o7Z5fu5suvuId3v+LyNX/hO1/Ec8sTHHi+4Cduexe/8+m3sIdpGttwvr/ML37feyYbPbSyLjc/7eX8xgev5WxTstQtqS45fu1P38bb3/AH9DvDp75eZUE4olMpe7/nKqwdEcdx0EzuKTj/ibMgYM9/3cv0385C5EmiNlXtKQqPoCZOLHd+/m+5+8stxsMRoxsG/O6vvA2ZTHH/A1ucPHU3MhmykJrdpTg9m/DG//16vvfWjzGOnmrB/xuv/Y1/ss5+/Xk/c3kh4XfPjbnPzjLePs3dn/8mZd1jWDUMbtiCV4TvfPT+L3H+619j3M+5+hnX8Avv+w80uaBv76CcCjlXWxceRvUvICgwsgDvufj4V+lupxyYm6LO29B25JsPsnHx8vUr8jXMKNB3ynyatp/G9WtEMSaNKlaXH6XcXMLpLr3uDAf2HWKbbUbrgtn/dozynorH3v0AkYnxTpJvVow/b7niOYdJfqlNlC0SdwSoAsM3eflvvppvfv+9fPP2+/n733uQ1/zyS1j+zDfY2m4YjWfpzXR48L5TRCoBrxnmlqynSbyn+ukh6jZQ/xHG+TpNUeFMhReW8ahhoSdJzDbWpdDfx6zqkelt2rFBuYJuBV5o8iqik+zh6IGnQdQQRYrt0Qajok8ROTYGGxhjmO5eydG9zyTREltdZGNlhUF/gzgVLPefYHbfUeLuDB0fsTjbhtyyut7Hxi32Hj9ANKrJVy/QoouOpxBpl+c995UMxhts9/ucOXuaL931WY5c0ebg4cOsnVtisDWg0+uSl9s4a5EOMgFue5MvffzjHLryBLPTC5TbJe2ZLrWL6UzN0p5PKM48ih9uk8UaKTNc2dDSmn03HGdmZpYklmxPG56+/W3EmSSK2yTZNPN7DjLdvY8zDz1KYTbpHINDR09w8MARvnHffZw7fZpYSeJIY1QddLNC45yYDPnCfbvTKAlxuUja+doOJX0HhbLWPqmR2tE4S3ayopSSEzdcj0BOUImwOUlCZIhvBDiP0jHaxwinqU1Dr9siz/MJggOD7S1wnkSJYGG9k9WmNc476tphrETomnanhVIp3kdYakSsJ8wUjdIGcBjXhPcqBVnaImsljIY5TWURKKamexSVo6wqwJAkSTBSyzRJKwjwAfLxiFhFSBXhvGdqapqmsVRFiXcO4QRaKrY3txiJkN3V6nTQOiZJO8RRAg5Ms81wGMxb0jSi04kY9Qd444lEkDqMR0PcJyum3jZL8gVNnCY45xgOhyihkEIhPJTjHK01kY5oZy2GwzHOWLTWKKV3G2kd6V0UzdsG4SRSWBAGKUNWUl2bgEDLSbaZkGS9CKRkXqfMzh3i6LErycUmTgtiOlxaa5hdiFlaucB4ZPE+ozeXcs8dp8nihFaWIXyKsJbG5vhGISuH05p2V2GqHOsD6t5uzTHcHpLGEfGUBwtNZQATzDsqjdYp01OCuh7T2CFCew4ehc0VhVKaKBKMm1Vye5yol0LLEaeCrUub6DRCGInyBp2OSLIuUws9apNTViWuEuTrQxKVBHmAlszN7QlsV2eJIktv1lCMUpJWF2cdtbM08QBbKZIkI44USnhs4tm3Zw5rxjRujPUN0nj0xLhFIAPN1IGIEnAxtYGL60Ouv+U29l2xyebFcyydPoXIpmhEjPUWnEPRIESJFhrhBcJDpJKJVKSgwVCrFlXcZVRcYEiBj7tMTT2dKJqnaUJQrnAKM7ZkkQgmb95TyBrNxI16shdcxpeDURxGouVO7NC3HJqTgcvOsemsQ0Qe5yrG+YhvPHgfaRphbIb3+5hrn6BsVhmujrHOYyU8vLLKfG+WqYUK6QtkkVMNoBIDpDQI4cnSmPXVMUlfoCOH1gaBZFDlxLElTWuytsfamqYqcMTBlkPHmGZAsdUgdcxMt8XBhTUuDWBsCpT2HNjTIko0m+spKnHEqSey/4KN9eTxr6K5clJQ1FWwn5SSzv55qv4IVzeoNKUzHxCtwXBE40Z4WwOOtBVTlY56XNIMSqrCcsuLX0r8lXt47OQjjMbrpJnANBolA/+zaRqUFxPoNDju7ATiQaAgoARNs4OSTFaK8GxcWKc9Y8l6bVQ7CfagVcrxo09nfmGG9c0BX3/g64jhXcSNpTMlkaZLpkqm9mvSTkxp4ODiYR46eQ5rg0PL7sMzgc0CP11KiXHN5WnATmaCCLRAMfLMvzQN9IkdREsJRmVNtSEw3w04qDc8N7zqVroLbTpTPXTSZkFleFMzyAtGdU3cUawubxBrQzKOGG2e5/kvfzFf/f++TL2n4tjjx3jD+/83Dty+h/t7d3Nwbh/POn4jidQT63ZPr5VyfP8eauMQjaJlFUefOMYb/vsbaawjjhK+dOvn+NQLP777lm3tKPPgrlPXDbe94iWIVBDXCaNRQzeLGa2OOfSOw7z6/a9FT6d8+C//iqZTc+WXjnPbO15CGincwHFKn2IunuW79r+MA/E0d7Xvo/kbR/dPumChKMYTuoEIlBEgWtITe+Id8aUKSIwTNAZqWRJnEuvGGFuSZzm/8NO/iBThZ+xcd6PDenng2+7h9A2PBYF4phjeNqTUOa7V8J4/ejfDeoiZC9QRg+GNV74ZPAzVkIEa8iPP/FGG0RAEnDp0jnd/319x9f+xj6XfGTO6arImNWz89Rnmf2WR+GSXKM2Ap6JXApiKNf3hOkmSkUWdp3z9zHX3s3boCYRS5NkABHxj7iHeduvPcfs/fhuffvPnWb1ynSdHZmWxJ8kT7HYPgIuPblDZhrNHVvjL7/z0bmMFMB4Z7v7yKueXH8M6iREpOsq4eGYLa/6pBu/ib53FtS225Vj520c58JpF8rUtvLOIJ1ESi+EIt9xQlg2CCKE0WZbRamcoo2j3MmIZQ6LYFp4oG9AUG4ybNapRl97sNMdueQl3tb7C8c/t4dav3Ep869X82F+9CoMjzmKSdsrWqM8Tj17iwW9/hAvPWdn9/Qu/MIM4GaGilGtPXIEQggd+4JtIEdOKZkjcNlU9JlaOTLvdlndpdY1WP6ZpCs4sCdqpZGF6jumZNp3Jekj8Elcs7KHXg0EeOOHTM5s87eYred6zb6Y0J6h8hW8ivnrFKd7bfJLf/vobcK/3+MoGbUgTUZgvcezaIXGk6bRiInWQX3zuFznxD1fw3O1ncstNN/PI0jp1fxltGlYOrvJY9ACX/m6J7/yrl7N/eS/DuQH/44f+Bv9Qg1+PaeqEvKwwvprkg8jdw7I9WOAFV95CbRsaavr9TZ57/Opg2mAco1FJpCXOwfm4otY1C90eSUdjVE6MpRUJyGGsxxQ2R8mY+W4LHY+oZIoWFkeDaAyVb1HFFSNKVgcDtra2OXLl1bTaIy5ePM3jD93JVccVeX+DpKkofMWmlaAMamqB2aiDSzvopXOMhjmjRcHMdJeFo4ucWdlifX0LmXboJHMszs3x+KUVLuXbTKcpvdlpzi1tsrT5OGmS0LiGYrCMKY9w0y0v4AH9dc7fcQfpuCRKWoFSJh1KOIwTrJw/hy0tiwdy5o8eZnNjk6cfOsRMbx4yRZq0uLCyTNJqkaYR7VYLGWkeWT5LsrKGwLOVX2RcFqjCYO0Y1JjN2nL9NUdZv7DCxvoGW2sVKt7mqhuuozc3x4Erj3LXZ74YtKRZFtApE/TIUgb2RmiU3JOapMv33c7nnvwx7KBaOyyQcJaFn+UmZxW7GihjzK4dfIDOxITKZ4m0BmlQkWDfnnmmZzKWLy5TVwYtIsrSYU0YIorJGemUoqqbQCONI3rZFFFaBMvlpqIxdaCHmxqlgrQgSdLwmqUJBh5ao6OQTxRyr0J2Zr8/AK1AhOm9947GlLhiZxIfMqPKssapgOwZa7FOEGlN2srAe6qi3M0CqpsGYyzWe5wPDmRZkpGlGXGSkmUNVV1T1g1+DM4bTGPJUkW7m9BNWiyrJWKl0UQ0TXD52z+/SJ6XlEU5Mb4KmvOiMeAn9u4+XH9jDJFWxEm8WzdYa4MNNxLhg2OhRGEbjzUhhw7n8LJh78FFWl3ob8fkRYtWT/DEygWmpyV17RgOLXnTkGSSJOsCUJWOxcUpDl2xj/XzQzZWhtRlQ+1hamYWgcDUDdYYityQxKFRrcoxaTpHHGl8FShdXgAAIABJREFUU+NNg1YanaVoFRqRujTUpUFHLYQJ5h1eeto9RavX0Jhg2iNZRFgZcvzqMV6kZFMxo1FO2orCz7Mxg01oTys8GlyEtylV2RBlDVIFqt76+jKNTfGiQShHnHRoTEWmukipUNIRzZSYAupxQ517hE+JVRdvoKpKxuMc6xymdnQ63SBVcQFp7HZ6k8GHQkcpe/fuJWstsGfvARb27yWK4NLZC3hhIA6ue6oJNvfSNQgZIVRCoyzFVsUVi21m2ynUmpneNHF7Hza3VAJEYSHKiaVlPB6xuTWg3ZsiXpih8RLnIRIS4SfY00SP6Z1FCYUK2UWBVTWhA+/qqZ5cj+y4YAqBIAJfE6kW8zOHufU5c3R7KVVZUFUGXIwQFzl/eonNzUv0R2tU7gKNyblwcRtnKpqmoqXbNEKSJNNkWUqWRdgatjbXEE1g92TRLE0DvsipaoMvc1RzHrWdoaWmlbVZ3LefVGume23K/hZL5waUlcF0WqRxTBQ5cJZ+vx8il3DUWLz9l5lU8K+kuRI+8JEBrGkQxtI0DUVRILVEJorhcMionwOOKA43UVOGYDmsRVhP1VQMxiVJS9DqKNY3HWlGEPRO3qpzDcLKXdtQRzgUdnjhO3qnneldGMo5hPWIsqEZjggzYtDS09/aZmVlDRFbRGpYWjtPVzoWujO02jPUOkfaPpVwlJViMDKMR5vhoJqEOQbqBCAEzgeahDHB8eSyzW1Ar8qftojeZQtc/FOTmYQIfOlCl7jrHXQc33jrvZRVzVYrIUlTpI5ZlhHeWaq6oTIGlUiGgzFaOhA+8OA92AmVbHuqz1eedwdpFtGf6cOzPfeN7+MFH7p19z5qGsNwMKIuKqQUNEWFGEvUQymJ1Fhj4MhT//Yh/yEENtbO0D2VAUHwXcuckpCpkK4n6Esxer6FcIITH7ma45+8mvY3e0RRhNMlWgfb4U5rnusPPYcHk1OMN0Ykj0RBJ5fv8IjFbsaIjCZQ9VmJ/s8RvpGk78uI1uLgwCQiNDXOuZAnIR1Li0s8/5MvY77qgWtYWltnrjqKKiyRDFSSLFG0b5pifbvPxakLnN17ko1D67zgU99Bdn87CPIlCGO460VfZnPPFmnR4shdN7P8/BVcXJNtZyzeu5ccw9T9HeL7YqqeYfjaHPkRkANPJUqaS54Df3GU5dedw2VPyvRSAu0iyjKnGjwVlZnZmOfYI0+n1Ztn33yXOxY+zeOdk1zoLFOLLsOFkqp7uZj5gQtv5cT+fSzOLOAmJidRt8P9e85wcXaVm++/Dq1TPn/j3QzaIwbdAR9/0Sf4ti8+jWakEZN7NYozdvh+8pQg/kBErzfN5us3sTPhtTfHSgbjIQsz+6ifXrH68vMgoPNHU4i7JVpGHD2yF+c8m5vblLeOcM8omP2bg4xNQ11W1FWFa1nWfnyV7rs7tBpDEhtE0XDy5AMURU68kdF/VHL22nmu67yKreXTrD52kSfWVylUzeiCwTzjqWM4fVHRXZum1Zmi+QZcWtumuL1B1rDS3ybpdlnfPheEsPXlmIALl7aJlhXGlGxsNxw6OIvzCdZLogm7YHVtiT0tiRAdShPjgc+fuJfqjOGGc1djETipwEqsA+EE0Zk06LK0DS5NqWJzfUiWSpQUIBq60yn2uGPpZRdZWZlF14fZ7G8z1dZ0Y0c+F05Oe7yhzhSdmRZz14Sg6o/yIebP7kNtJeTjMVmWUNQ1tTF0Bl2e8fWbOXF4Dr+npmocpQHnD+CNoaqCLW+kE5zPyUcN+eyINbVJZ7rLmUslholBgMq580V3UNJQ5RVSpXTnHmf1yDIv+dpLmC5SKtMguzNkjcZbSxwlGONYXVvn2LXXMdXpkNcN2dwp7n3VPZTlkDRWpGlM7SRIx8HPzTNXzXHg6BUoWbM9zkMWSj5mabTJA69/lKVnXUTqmI3OJhdbCRu3rMFfZsxVMTpJWB+sUZVDpCmx1ZhYatqtWRbmD3HddQ5vDKfPnmQ8rCbmFSAiGSbBpWVrcx3jLS5V7L/iEN1OCFyuGkuatLDeokSEs4J8XNDYEo9jaf0J+v0tKmfQyiGMozEewxalgOmWRqdtZhb3UkrLoeuv4szFS+gkprdwiGe+4IU8cs/XGFcFAtBaYXaK610E6smUnqdSAneMnp5KE7z8nB2a+w7qdRnRAmMNZRWGg8aYiSOnx2NxGJJYkbQiklZC2o4oq5wojvFWYKpJgKkPRhzeC5wNVDoVKaI0IkpjdCwoCkOn1yJKPFXVTHRRl1E2fMihquqKKFZIoWlM0Bw5A6AQUmGamjRVSBnhvZjQAEPukHWTrCLnED4KNEAlkDrC2kDvi6OJlqeqUZEmSVKUDjoTL2yg7WGoTYmoPFIL2t0WqlI0xtDsBDeLoC+yPrj/ARR5TjSOEV7g6kDhMy+rqa8td1GpXbrmmiB+T4KaNLXeOZqqRsWaOEmCgchEQ+ecDajg8y3uJRPr/UkkKMLhxzD8s5y1iwXW1zhqKuupqpr176gppy1V5TDesd0R4BXWgB9B75MJDZ6qrvHeghOYWlKMw3qxtqGpK7SOSOKUKBII01DnZdDcKbDCUxUG50ArPQmKnqzjJtDqEB4ZeerKopMKL2pqp/FGgQ/pydYYqrzBmohqDLINPgmu0LXN2Vw3RGmIDmiaEu8VgvaEAN8QxRPTsSa8lhB8mxLH00ErLytUDLEWSCMxcpLT5mLKymJNCD+WBpQIYXXG+BCSLWNUlKKjFCFjVJTRmp4ilTFCZyTTezh01Y1snFlB+gonHH5XzKSx0k5s7CXIEnJJYuaYivbQ6nTptveinUA3Paxw6CSmEQ1KKmSS0Oq2iROFdROzCi9QXuxGKITmKNB6vfABLRWT9WY9SDmhEopJbTppqPxlnaL3Nd7XIUNPaFppLzhQKkWaBEOQqhG0pts0zuCFQKsEmWiKIsfJbXSyQRQl+DLBWUvTlCStiItvWGFcjhB4tCqIVM21f3sDdjyiNkWgmcuIunIsPesM46dt0+32kM4xvOESM3dFzH+xRZUn2JZHmIbyypr+y4pgh28t9UdjzDlB8foh/CT/4uNfR3MlBFqFP6Cv6zAxqSpKv5NaHW7Kol+QtmN0FtFYR1nUTHwp8ASLy5WVS+gkpzfr0EsCawRKOYQMeVfWNngfeLo7SfViQrkLM4DLjYzbwTN9aACVc9g8pxRghSTJYGu7zyOPnWZYb7P/WJtBcZGkG+HSGN/qYL3BywGlKxkNGlaWx6xvLAcjix2xL2FjCyGNcjLBsCjlERNnKSHDwVb9e4uoBXLzcr4IPqTXQ6Ad7G3P471g1B0yTLbYfv464Cmc3z0YQ44HOOHZ8VvYmTLuGlYIj4tDwTtujzh59cM458ijnP7T+pRpxc3vv5kkCYVgYyyjUY4pCoT0mCaEu24PhmRxTDEekedPzUiSKqSk40QQhLoJf917hIeqrFCdDCkFwjnwYVpw9BNXcsWnDlFRUdSWbksF3i9Qe+jIRSRREFkaAxPxcBBah4u2Q+0QCORFhf5DDVqQfihFy3Boaq+QE1E4TzKmeOZnns98lJF3N+CJ84xyRzRsSGRE2krpZRFTi3tIV9YYL5acnTzv0Onj9C7N4J1FK4+sGx54zr3ABtrEHD5zDXc97xNATTJKWDg3z9ljZ0nPxMgNiV+o4N+BvFNh5xpKW1GeMxy5/wgr3/Mkq3oRXq6WCXVV0qxWcC9wA6CgPZjiijMnmJ4/xHE9x1qyTl/mbKtl1o82mPhJDTuCGwbPg2MFfe8YxIGStnEs595jj9NUBW/6yCtZ259z53X3AZB3x9z14i/z3PpaynXPcHvM9nhIb98cTk+u+xlJ/Psxe65YYPCyAWbP5UnQ4MSI/VckuNsd+etCk9J6b5fmdEPca3P8xLVIBfcWX6P/yi2qWwSdT3ZpRz3W1vqYjRJ5tsWlH9pE3xMRVzU6KhiPHWfdaXKG0Ik4M7PMqWXLgT0H2O6vcPaJEfc9chrznBozXTFKi2/ZrCRRmpK1WzTFgHF/hGkMMQJHSd9PI/HEGmwk2Jo8rawrTKkxdU0z6jM9lZHEI5wV6ImJSn/YZ3XcpTaS2mVcsbrA1/aeQqwmzCzN42mQEWx2RqzU2+xZnmbt0hBdxyjlUJFHRZYL82ssbu8hqgSmNnS6CR7ID4zpu1W2HjzFpdUSsadNOm1BV+BhYWmWcsvS+JrZMHTm1MJDOKA7nKUYl2RpSlFX1MbQGrS4/otPY/FoB1sMqWpBY1t0uz2K8RaDUQkiYmpqGuO2GW/VnJp5gnW9RWsqYW2rQieavFdxYfYCH3r2HRxcncE3Cic1pKssX3uWA6tXsf/0ItG2xyYpiYLUZERRjMexPdxgq79Fq9fD9hT22ZqHbr+f7lIXbUJQqrOG/pE++RP7ufqsZK61yNziHGYbVAS5HHN69hwrh8/jjni0klR6zFC3WHvRCofvPk58OqF2BjdeR5garSWxECSqjZIt0niWa67usme6Q/npnIcfPImIg/MdSiKsQigoqhyz2SDOKg5feTVZrxtQmPGIumzopW1EpKmaivF4QNt5WjMzrDdD+ltLjAtY3DuLVprGVFTViNWVVTpRSksndOfniIRn7vAB7vjaN2i3p9h/4AA33PICiv4Wpx97NJiXiCcjUDvsB/EtqNSTG6Wn/n95Gr3zvMmePtGVPHkzss7iG0eso1CI+WCOgHBYYYmziM5UizRLQAjqsglNt29CbtwktypMyUPRb60jbQdr9SiNcFjKvmMmjoligY4Eo6GbNGKBTh3O0ojGOKQG7SSNCdEpGo0gWNA7H2JclJIh/9I0aK2QUuO9wXpHYCZGSKnQkUIqSVVXmCpowYQHZwzeW+IkJfIS62qQAj35uc4Z6sYhpSZKFEIlqEaSl/WkNlF4BI11SBMaxKIosYUljRJooLymoPzugvq5FeJxOWkAHez1UEnc1y364QhtFM4HTXdEPGlQ6om+VQQzg6vBvdLiXm8RD6tQXAmBn/H4rmP70wMuLKV0MkHSCdlog8MFSy8Y0WQOuRrOV9tLGc3nNC2D3pbMv69LXTpsWeJqhzUanGDQrxDSImSDNSWpVzgniEREEinG44o48RMmj6BuDLYBq4LJgpQCHSlqY0OQrhKoWNDUhjhqAvI4MSHxUqGioPGW0lPmGpoU0wT9oYoUKjIMB5aWkySZwFOidRcp46DlEw1xnBDFCuMVDo9UAqXi4ISoa4QC38gJihWDVngZZChSRUg0Sii09jgZgwfTGIz1AfEUEh3FqDhDxy1kHKOco6otKkmZXTwMTiK9Q3iLm2iZPBonHUJ6pHRIDLHokep9dFrHmJ6ZRacLQIu22BfcBWUbozwojc403ThFSTupkSdolQ926yFNOjRy0sndtbHzfdIHI7mdGst7N0G7dvaZ0Hg5mh2xVtgCpKCsQiEvhUBpGA+2Kd0AYuh0Z+h1psmbAu871FaBKolEirEZjR1RRTnmiOf8q5dpL6eoUlPGNeODWzzrC7eCyijKgqpqSNMum7MrbF+3Rf9pG2zKTYRt2HjOJgenpslW91F/KUbS4JqKolWwcWNBecKAqJDjFvK+CP/8b6kNvuXxr6K5kkpi6zBttsYR6ZjIOMx4SDHM8XnN1NwMNi8oqpwqklinGA8H9No9BGHzm57qkZcFe/aUpB2LEzFnTkqitAbncS7kLCgVh2maaHBNs2se4SemBu7JB8NEiyVVDHisKTGFxUiHJaRSbz7S5+LGMlebRdLeiO2Bp6lrtgbbtKcynK3otDXNVs3KY9v0NwdEmZhEggTkyTTNhEahwmatDc4bFPpJAGsoStu/F5F9UBNFIdzPWRiPa5zVnDh6A6+9+QcoTMxXn/8lPvuKv+F13/8DwfazDrkJpq4Z52Ocs4ylJ9choDiJDNZKQKGEwouKr9zzWaoDJcfOHuWt7/oJxsOG3/mV/8qFIxdomoZz51a44op9OBesUGf3zoc8h7JEp6Ho67ZaGG3IdEacRjz5IQhonbWQtXuBB+4CjxigKKpdS3oloC6DFiVklFh05BmOCyIybGOxxjDaWueBh88wHI4wNNS+QPnOt9zkYF19WXg9cVcSziOtRE0MThyCihhjFbUV7OQtjfub3PHyu7nnlXeg3GVa6be8uctr3IS19Zdveufuwtp5ipBhsjPqbPP+N/xW+JyVXDqyzqff8vnL37vTF1qo3l+w5gvUByLUe2JO/vZ9AcE07PSOOO+py4qspUnPSsTtCnHa4TqeJ659iLPXPDxBPQWve/hXeeXq8/nzG9/KR3/qT8Ne6sJUylnHL1zzvbsbKQT3sT9+/vsAuPb8Ydx0xe+/5s+xeIS9TB/6w5f96VNuJq8mr9GFcYZ1EVGSIb1CukmopwT/ccvD3B+uqwnvx0aWJvK4uEXaPc7i3iv4wo/dxXh/Tu/cFNN7FnjZv/0+Pv6FuzDffAzevMSlz9zJ8u+vsuxWA52IywXk4MgKZ268m3O/exN//aGTRKIGO6Dak3Pu958UsG3Y3SmbuMVGUZK7JW67rsu333QdH9xfoirFy599iL/+xhluv2kfC92Ytev6vI+LADzjxDQzpoXNK0gcRTVAOUc+6JOXoXncu9jGX6q5sLTO1kbGT3/wlfzB6/6Oxo7Jm1V0HIHVfPyFdyCd5w3//TaIc6TwmCq4aTVa8Mdv/Bg//P6XcWhrARlZBsMKYeAln3seNzx2kM1km/2LEeuX+iTCU5sSaSWv+X9fRv+CpZnu079kkF7yH7/6Nh4bLvNEazlooxpJYxq8tTRlxZmzj7C/dwPYMVpCFMPmqMa5LUQaEk4G9RaJD1qSKPJoCdM9R6wEWQRfvOkUf/3yvwcPP/7+22nW2pTpHBw/xm/84Fv4q9e8l1s+fB03PXKM/mCbuJ5CtGLA4O2A2m3wuc9+jDidY/vZW9z7c59FOMGz/5/nMHtpLyLS5KMNPvGOf+DxNy5Rfg78nyjmDrZpHZilE8+yfmKLJ155iptfew218cx0Wuyd3cO+xWfyjv/0O+zZO028CueXH2f//EG0EOw7cpi53iJf/dIDrC2tceKo5tjR/Zw4PMf65iUe+cYjAa3Hg3CYssEIyFoRkRD0z1/gS5/4FEd+6udJspjx5hoP3ftVvuvf3MZmM2RQDSibASfm5rEZTB07ykIr4vNf/jLPetHLmJ7L2Oz3efzsBe49+TAH980gpSIvGja2htz74CPUlFA6BsM2h9Nr+K7XvomPffC9nD71CIPRkCR56n4spURJjQ0wzlMarZ2PvfeXw4AnnwtOgBOWyCSwOGzhYYouJmKMgCB58BYhFUoH44X5+b2krRjnLUXuOHL4Sk6dPE05rqgrAz4YUewwThCCKNZEWYSIBEYaHDA130FGGq0jtIrpb60xKDxKaYT0OEZ4kRIncaAieo8xlmKUE0cTjYqFOFbUZUNjyoCKeMHUdJcdbZlSCms8xjhII3YayKyd0WrHE4SoYtivKauSTGsiDZGOcAiMnbBuPDghMaaY6K3SyX2iGG41eDUZBFoQItQC3nqkh1Qr2kmLlXc/gdlrEJ9RxK/NSFtJMHf4WYP/ZUfxsSH6mh6x2TEMEczMzDAucqpm4mQ4GT7mfzzC3WAR35REL+0gpEFHGvuahvL3c/ynStaevUK1usic79LbO+Trv93Ha2j9WUrnD1vgJS+47RruetNJLj59HWc9G+e3yXNBrBRKKkxtaHUNw1WH85Y486RJRGMM+bjAO0+StYiUwrkK54LdvYpAKYh0GJY7A1Vj8AStXCQjYplgyBlsEhoNHRGlAhsr4qkpop4hbcOo1YNmioYxXjoinREnkqqSWD+grga0WgmRlyg3xFqB1JKqdgzzIVGkaWUxWVtSlA0Xl9ZI24asXeOGDcVIE9nZwCro1OzdO8WRwwdYv7QMQtLtScyO5t4YhPVY01CXJULGREQI4WgqQz4choa9kjTVmGExQsSBYie9wNmgWVbSg7P4pkRomJ/Zz+KB65k+cBVOK0ZCofUUGQrtPaWxOFGDSIIHgbRECeB1ACzwKKeIdYrUahJHMNHgSR8cLr0L7DMnUEJPChXYDRQXEALrJiYyUR3Mh3dcPTEksktjDQJLph3nVtf45mMPkehZDi4cZXEx5fzyaUQZEcmIJNtLObb02ileabZv7PPwrz4OFq575xGmTs0xPuD4yu99hVoNaKV76aVzIAw66XDvz3+RG778XL79na+m1e4g+yt84C1/yZmXLrF0fIvrv3M/vXZKNfLIOzXp47M8/NGg03c/kiPvjOm9fJ4t/nkzMfhX0lx559CJBiWwdUMmI3wSgxA0pqG/tclg2CfyNtxZKiaSbXRTUY43sdLjlGR6OiMWFif6TC9u87wrDKe+6VEyaDOsEFgrqCtHmkniRKMjQVlWIdfiSfzwYHceUCO8o/EVFg1MCkHrUUKg4gilBaZxPPbAJZ5+3TQro01sMyKWl9g7O002l3L2CceF02s8/tA52rMxVdFMJi8BYg18dhEyX4zbtYLfmdiEaYScTAImjkKJQklJVRqkU+gqo7fS42Mf/UesgeXeWczLLY9uXsQai040OtHIjkTN9zDlmKipmaoh3lZcqjdxVgX6kRZ0otblP5IQ4SD01ZM+B1pbtgd9msYQu+Dag5fIKEXIEIQ8GJb4yNHLwrTsyY+6qijzPAgXx2OEnEx8lEJriQOEdSRJRKvTobevi5SSudkp9u6ZxzjHwozGCUGaJHR7XZ59y7O56ZnXc2b+Xi40OXWpafVCmKUgUAOjSGNdGdArEeyntYoxVKC6GFFj3QClp9A+wdkhwl5G3eZn5mllLU6cP8Evv+eXOXvno1gXGoMARAjci2fYzgf4lZqpJ9RkuuMoy4LhaMjK2gbluOKWt97CA9/5AP9w5ad403v/T3SVMxgssTVaYiO/SJJWnFleY+n7tiiv9lzxluNIa9ja2CRSMdNTs7S/t8uFcxeoi4LytpzxbwyxRR9RxozKmjTL+LYbX8DTfulK/u7HP8FV2bX88INvpNyqqaYW6G+PMY3h7Sc/wB2f/RQbq0t8x2v+HTfcfDPv/s23c8sNU/S6i6RphookQwv1cIiwHi01SZzxm3/2dj7/lWWk6tLpddioVzkwu584Nozzdc67/0Xdm0bbdZXnms+cc/W7OZ10jo56y7JsGWNsgo1tCMaAYdAZQgKkuZCmLqQbgUozLkkgFDdAAoSMJNwUJJWWkBBCgBCaS+jd0BjcyrIlW5YsWdLp292ubjb1Y+4j21QqqZ+31hhnSFo6R3trrbXXmt/3ve/zPso3f/9zHH79bhZ/dpN+wxd+vd6Am9/9el768pdy84ufx9S2jC9+/g4QcO/93+Uzn/kYutLogaKsBgw6y5x49FYePxvT+LeMizhEO8mw0wOq+jyvf+Vh5p42zq1fupPb948x+F6XKz9xCQe/tIfGtoRd113P4Wf/EF+++LP8w8yf88n3vv9JtDSHXJPsevY+KqnQBdTPzOl/1N9EVz/8OFhfZ54V3rupA5+Z98gHFjHW8uCbD7N00yqLt6xeuF6+/Cv3cPnXdnD1v+xClxVZ1B41Af0EE2D/7DZ66xvMbSywuhkzNv1sVKi4+5pT3PfM0xeKda0sV5zcQ5yECBODEqTtJud2rvChl34aoywqBhkonIlJ4wle+uvP43s/fZR/fcU32CrWL3iMR4TMsZZkZk+TONNENbz3o2/k7a97B73n5X6yARecyS8/+hyu/tKNfGfjYVbW59k+GVIVhs7SCmNTbbQuSYKmp8GJIaqzh26xQZlbcA5Z1oyljjD0X1vb7PR++ut95td6LOjzF4zQu8ZnuHL2MMeGp6mpKIxl0N0kCUt+/g0/x13fO8LC2jI9uqhC8ZxfupmEjGHZIcBw0Y6dXHrLxZx53znCKCRrt9F2jM0z89z5xq8y//x5RA0T7YhyTTEYtLl3V8ljv/MBdFhz6vzDFIub7Nuxn6U1y97dF9MpHVUFU3u2MbO/RV93WFgXZHJIZTcxoSYSIcoJ6qpCY8mSCOE02gpsHLB6/iyf+rO/ZuehvYztbNG4aIZHTIf+SkFoA2bH9yJajm9/6zZu+KErufnGa7jhuVdSm1lOn3yMTmeV2q3TK3O6GubWz1P1N1Fac/7ICn1doIKUPM9ZODvPocPP4Gd/+de4/85v8OmP/w2l9QWVlP6auJB1NeKAbuVfBUHg4RHWe3d8ceU/Lz7D6sLT/ImHw9Ye51CBIlCKfDgEvApBCVBOkQ8domqwe/8BWu0GJ44/xr3fvZe8X+CzvQPcaCEWyAChPG69dhoVAoHxogKhaI01yPtDrHE04ga4yE8JVIBU/jmWtDKqKkfrEiFq4jBg5+wMZVGia58TaWzNcAD1iJgWxwm6dBDiY1RCRYUligXDwZAgkMRpRLfXIUkDsjgmUBFlGTLoDTy2vPbTtyCK/XMNRRCEhEGEjKEoCgb9AhxkcYIrHLt2TeGAotJY5YPObeVIVMr0+HaWu5tPuXcZZ4nShL1Tk3SmNlhnFQRcduhS6EBdlOSDIcPukDwfYPHTBeu21D0Q/llI8L6AigGBFFgrEJ8Jib7borq7BxY2VtfA1OzZdwCY8/fNUBGkIaYULJ/rod4E6Y0B+ds0G3M5Y62296+EhhpHt1uza88Y1mnyosCaDBVZZFhBYEdT3gFOGMJQEsYhZanJkhRThmAFKnQYV+BsOJIfQ7ebE0ZNmjFoM8BUFiUUohiyenbIsMiJmlDkmma2ncnpcbQ2zJ8vyBqGMDaEkffgWRsRRQmmNBjnG96CmCRJkUGBNhWbGxIZDxksd6lMgAwDdu51dFaHrJ/RNFvb2DG5C1NtcuzB+1hdXSMvBForjClwVnjtpVPYSrOZLyHV8qj5ELPeaEMQkzWnCFWE7vdwgUUqi7AOfHY1RtTUpUe1B1HI0BgCkbPSW6Jz0jJ/bp3GuMBVo9w7ERPRJSyPAAAgAElEQVTJhGHvPJ2VZVztSKOG90/GESLwmHdjFEGSYqREO7AIVBSRpAnhhcaJ8E0N6Ql7W96rLajNFisAF2ADhRJmNMdS3iMoLcbVWFMhbMX26e3s35OyuLDBo48XROMTrG2cpdgcAxfjtGR6R5N6EHL8jac4d/MCooLLXjtLHGaopEna8PegL//BV3jux25mz7EDSJd6TyMQRIK0pWiPNRD1BD/yjmu468UPcv/zHscJL8sNg4QAQUwI+OIqfc8EyV+2Ri36//ftf4niCvwCWloDCtJmSjEs0MYinUUBgQChQpwHAGKExio/15Fbkr2yQgQWW/vT1pxy7Nqv2FgSSGV8IjwpxuR+ioXF2VG+hFM4FAJBGHj/kHBbifSSUEi0NggqnFHoUhFXFb3eJpNjU2RZi1AndFYU26YOEIjAa+Y7BVXhWF3rsrE8op+4AmvxoW1CIoVCBuEILesQUqMChxmZ54QcPRy2MkBCh5aOemiR2pHnOa32BNt37iKu2uQDSRJb4sij3ONmwKBTMTbRJGnG9IYFOEOYprg0xRqLjiqiQUq/sQFOkg1baF1f8HMNhwUnT51F4PXz4PNcWs0GG90e1/zRtUTElIWfBGpdU1e1L44765RaM0gTuv3hU8671tp3d4X3iDgcOI3RBmu9F0uEXgZa1hoz0p0bJ6gNlLVhYzjAWUeel3R6A75z13GiIGGY11hhMKIC2UDIAOMs2HrUafXSTClGPjNnsdhRnofCkRE5D55QQYgKI8C//8WNgn6uqfo183cvo4oI6aDSlkHtaY9feMnf0M8GUFqC/Il0MGsNWnv0tTGWh/c/TH9bn0HW5TOv+gjCGrQuqU1BbUqktORlRbmjxjZg7g9OAw5d+aDtzWiDIAgoigJrLHbSL/Zl3mDwB5vo3YZS9TmaDjidPMrK7jWG8j4+cO0ytrbYIPLTCOdQUrF55Rp1VbI8c4Zme4zz/+0xHmkGvOo7P8Le1bELBnhp2l7DLmBQO3AC0ZmnM6zoxAlhM2Jus4O1GmsrynEPbpicuYzN7AQ9FjFIXGhZWVzhu7fdQ9V3vPpVN/PQ/YvsObALoRsEeYg2IVW9ydhkxtj4BO3WBHl3DVfUJEFIW8VMNByD5eN0zvVppCkvf9lBvnv3HQgnOPSMg1y2eoh77z3FJZP7GfaajH3lUi5/5Pkce/guJscbOCsx2lAPavobnmgUCEnwcIx60yxFVRGIBJXGiFhSdbqYwchEHECYRUw225x9YEB/XmE+PYGwjump7Thbs7YsuGOuQ1xYTKNAS4kwEDT856lfaKRMSdMxtOgjdcirb7ueNQZURhA5ixx5CNqDMQIzjqRGCkesYc/iBD/9mZsJRcbUfIYzAWEYE5QC0xfUzlKHTzXhvvDUdTzr9GWsd5doyzEKmaNdTRkXfPxFt9JN+hg1ykAaKg688xkkKuDAnkPMxDOk6Qk+95PfJEjBWcg6Ma+99QV88pXfpkr8AjysJa/8hxfiMGi0D1ltNVldcUyECltK0m7KzR99CYPhOFnQJBNrnH3oKC/77y/g9l+6kyGwbgzDfEAYRFg6NJpT7Nh7iKC5jzI4ytzKGnNjK359n2uy6QkGbpW1jVWq85ZrDlzJZtZh8dASd/7cnVzxx4dZWTtPv9qkcSbh0Mf2kZcWlTiSNINihn3vl2yunWL8fIKMQ9aG6wiluWHnC4lVzNpGl8BlZHELoSK6nSHdwQpVIThw0WVsLCzR7/XJSwgDB5UlV17G0zSSoK1YXXyIfvcszckppg7uIiRG2j55v8NitcaunduZHjdUZsj51QFFrrjkkknuX/0G/e4CwtZoE7KyuEKaSJrtBkFV0Apheb2kyGuSsYirrrqY8bEpOp0u2cQkT7/hOm778u00ohiLwwqLlRbFVsai7xRJBUHgPchi1Ine8iNfaP5dmGJJjLUXgBbGGqxVvsM+aqz5n1cIGSBFQFXXTE7uIIladHf0OfqGI8gf9fcwgcW6CqckroZhZXzGE5bG9pDYNdHOIKQhixOsTdBuiNMFOEeYRkgrwPoA4bCpSUNBpBI2X9Gjd3FO/DsRrUYKVuOMQVcQhE1kbFBBgdaaUvYo/9b5HNXRjdxpaP/iNrI0HXmoHVIququrdH+li7sJaq19w1TlqE9Kwk8F2EpQ1wUqCClepOn9fPeCj8X+qUSsQfV/FJhaM5+ssuMr08x8dhuPnT+Dc469O3fQ3p5hTMHaxjrWWsI/i1D/FOKMJHINdu/YQ/S1mP6pAdUHch4/fY6GDnwWUuUuNKRtZQiUoDXRoNPLQYCrLKKATGRoPcQKi3CCwFoqoP5wTfY3Ma0jEeF4eaGOdrbG6gKJIi+XqIcaWypAQyzIXYUQEXEYkoWOflWT5wAhpnbkpsu29nZaExKpBGUO6WRCkQ/Z+JGCclaT/W6IdCF1rVFCEEaSqoyx0kNCQHo6n5Q0JxxlBWXlCBONK2rq4ZCiV2HyyE8koxJMRhzC5LaCsgzIkgAZhlj8miPKfIQPBWhXYWyNpYXDoCJJlkYYJzDTEXWp0UPNxoLDmZS4EaGigHKo0dpy+VUHWFnbYG29JI1S71W0PuvNWO2nkjLA1g4CgQstud5EWElGQKAaWKpRk90HcltnMbWBUIzWLWBtiTSONJtkqj2JcYbzJ+5hYvsYVns/IiLkon0XMzMmEX2NViVxrEeZhxqMX5+GTtJM2zz4q49S3lYT/n2AUFvgiid5L6VDyWAE+lI4lyJk7r/FWYSTKNGglhXKee+7w/p8O6FHky2HikpufOl++lUXFxTE4ZCNZU+Ebe5RqNCv59PUsLpck9ua5HzCzr/cielYitSwcv1p1q/rMfuWgyy+/zFWO+uIuSahaTE5PclF77uCs897nDM/edJTTCtN3e/SnepD4RDDGr0WIprgMkOdPPHctLVFl44w+P9DzhXCY1bjmDCNcDIgaKRoITEl4CyhUsgw8UWVcIggQEQBCgGmwlnte23OL44dliBSzO5VbC5786wKBFqCGMnQlAAlt4x2PKEDxXfbqh+x8AiII4AVCCc9mt34VE9lJRPZGFmU0kqaTE1M4VxB1dWUzpOBhkVBkmSsrwzod3OE83kU1jmExaekXwidBO8F23oz/v0pMQpxG2lZhXZQe79YXSusUbSiCWaaO4nzlMGgwHm3ICYznP2pkxSDgv5EgygJGQzL0cjWm3utc5i6Qg+HDLMu0gqSoketwTRGQIvJTb5z03cJAsmg5Sc4xXjBgz9xjE5vQJwkKKU4f/U5jPE5IOsH15FO4WxJf3OTzpqls/HUQOC5F80x3DFki4D4g/I6B6PQPIUKQxqTLXRoOPXsk3QnetRak+e+27d6cAVdORYW5jFa+Ykk/iJ3owff1r/qnEUpj6JlZFRWaotk5XCjgEIMPndLKuSTQsfyok+tK8q84vFH52imIVGSUBlHUWlUM+H0npNEZxo0TzcJO4I4CInixGuXxZa52zA4V1JdahFXQPPbirp2SB0R2IBUNpFSkUaGzWiNQg4ZO94i250R1oZqMKQY5ARBhAAG1w+QMYx9oclYO6D7LA0DkMckRpZ0MOgJTb5esHzHKmARKiRQAc1mg5nZCSJXs766TvfxLr3QEAaTnHrhcQZJ5cNVrc998MSv0fG0o3EOBdoMcKYmi7YDjrLwWSpBFAPQmJomTM5BbhFGc81V1/LAJY9w9/Y15mdPsR6e4Y7L72Zm9zS9tWVKo8lLTRQ3SNMGQdQgJyLZuZ9oOKQRQasdMnQBjy70WTp0nPQAbJueofrZEtd0zF26hBGCkzvOEjztduKkzYpcIO/1oDLoZj3yZFpsrckv7SFDCKTPcrMCXG2wooYYRCSx4xqXO1+MB+CSmjot6b+8h966jg207mihnWZY5HSHQ1IMlojhdQVuVhPGvni56+CjqCmozhvilYwokBxanyGvNBpBHAaYOqeqI6SIydKI2lmCSBKKgECHXHp2H2nWgjRAZjGBjEkDx7kbFxnMDJnpTXPl3JU8vr7ImatPMLY0xuzxnYhBRa4CxsZnWdk+x4l9c+zKD7Dv3OWsrW2wlC6xeGCecnfO1G07iaMMdSCgNRah5hPmrl6gHC9J85i7No5TbUYsXLzIYNuAqAgZyhKnS7SpsThyURA1GsRJRpDGRDbiqqUrEK2QoBmQ1CGpKrmm93Tutvdzeucc2eUp204H9HWJaihUGqMrwaOnH2VxbYlW1mLH1A6WxRyonI31JWyVI61lbW2Fy3fvIgoDivGC1YtX0G43igIpLKIX0ni4zbkXniNQQOQw4YAqzzl86grW8w1qW/goDGeIghitS4bFJloXWCFZXV7AZTHNwGKtZXr3XmYmplldWuLkmVMEQiBUAMYAhlAp6kBidM6wV6HLAhE6miLyflWbQ1kh84SJpEmIot/rs7g8ZGb6AHlZYI1G4Rf6G2vLmGZMI5TEUrBjPGFxOGS9V9Dv91lZWqfbDyj6HZJUsOvAQVrNI5SlX1QL8L5WZUcSeYEUzn9tSfKehF3fmlo9xaM1khIx8qKYJ2nshdjCuY+kQsJi0QSRZH19iaLo0cu6bF61wQRNsF4ybi0IBZaARrtBFAgUJVP721R9Qa+o0bUPEK6CmjhMENZgtSGKAp//U3nCcCgUQRijTUm911JdUSMqycAVOKdHhZv3/ThnaTfbmF2G5WuXcTc41Bckqithm6B6UU1R5wiboGQwasQKylfXxO2I4GwAWuAq0K8wuFc4GIL9giRJU+KkQVFWDM5twOtGQelzEnlMoc6A/QlHLkvCkwHbmuOsxRldhmybmGBsPKHb7RAFklqAOiUJHpJo/Dnq9jYpzw+Ra/7Ebr56E/21kGQhIhAxKgkQtT+PUZgwMbGd/nDBrzMcYARCSpzZ8tR4DzSAe7ZDfkXBfYrBoN5auFAfMgxfkyOcYqlV0F+3mEu956nZSqmr2oMrypGn3TiKvPLAEeuoKk2o2oy1A7Qt2NhYJ2vGiNJQ7a0pDlgyE3sQmcFfaw6sjiGwT3h4AIdmOHQYq8CFIBRW177Rpy1G4Ju6UmNcgZCWMLPUWiGkw2hDVTuMiRCxXzNihcfSO0NdOYQN/ATaChwSZyTOBDgjyLvg1U8RxjCC20TU2iClIYosYShxlUEIQxAKlJAeMx/44yMUIAy6qlFBjK4qKisp86Ffi3hDlM/mlH7KJMVWjIKfaEVRRhgE2HJAma/jTDZqhlQ4oVGRI44CokAijCBUPohZjiwo3o+nka5geHgN87AhqFLfiN56xrknJtwC46FsTmJdCDLHGXHByy9lgJYGbOjXDUKjtb+POBRSCpKWYDBYo8w1URQQxiG9bp+6EqhAM3LosLw4YHPFeX9mV9G+t0VpB16OO7bJxuwms99sI/5CsXnnGuaMIJRtGllEuW/I+rlVevOe57tjrMH28QYujSlEhTMCkwvCRKFChZVPgMKE8OdK8qQQ2n9n+1+juHJeCtcYy2htm2C12yFoZ35GYTW2xqebRxE1Ai0dQeKpY9KBLh2mNKgg8JQ7al9kmYDtuwQnjviuvA8arDB4M6kKBEEYomrtsaSIUUq6v6Hkv64JPi6JjviANClHbatRory0koP7L0cpxfjkGIcuPUB3ZY65uSUGRZ/Klmhn6Q4km50BxTBHOYOoJTh9IeneT1CeMArbURibEIJQKH8SHRCMTlcJonCo0FEhaaeTbA9m2c4O6kjhRI6RElnERMOYU28+hnOOc0855Ft5Jk+YkrXx+mshQMlNlJI06wzVVcjtgqNvOoIA8pHJvz894Kvv+MZ/eGp3PDBDtavDhp2js95lwy4/5e/PvmaOuZcujl73yZ2Qrc1Ps8ToTVrh0FHNsec/iLruYRg90OwIgjF5ZJJ6uM765hBdl0gnCV2AM/YCKnjrGG8VN260SIjDCFNb37HFs4G0BTfyXz0ZRSzFECFqqrJi/twi7bEGrfEJkBKDJcn8a81+9SL2fm4vU49GtJOM9tgEIgoRgUQpxbBfsri4zkM/fYQzOwue+87n0R/mlHmJsQYZCaQKGTQqjr/tCKvPXODQbx5m5sZpGr2c/uISqwuLJNl2TtNl7oPzuAnN9J9MkG23zAegvhbT+vMWk3GIoMHZ/WdpPTDGgbdcgkMjwowsydizZyfXXP8MjtmjHH/wISrRRjVnCLOApRtPeGlPoPz7ctbLRS5AQhR25E0MY4PKMrJ2mzhKUb2uD8ZuemJhlKaoIPA5Iabi+luu51svupWFeJlTw4f4Nl/F3Wgpx4aEayHNqTbu8ZLpmUs9hdLCelFwxbOupbu8QlN0CWKY60KnnfP4D89RHlomG4TUv6Zxbcd3Ju6BvcALYaE7z6DZwVyh4WZ/PrsUqI0ABJjxp053ZBWQbDQJAVyFdfkFw3qiAvLxHJ1oT/DbGKDHK+IyQBnBMNbwxiFqPSS2QGUJ0op8JkC/oYt5Zo7p+mvl25c9RJXUNOdTbjh+JWmmsHVIFkhUoDBJhhlGaB0jpSKODD0TEIgMKcFoQ1hA1mqSpNuIRAtJwFpyjmM3n0QFIc84fRX/5baf5GsnvsfiwfMsVx1OdBfYWFsnyjJ2X3wFxy9f5FsHT/Lez76TVms7R44e4/vjd7G6fYVzb32YbUcnGFQ5/WpAYyzi8Ccvo7OzSzlekkclX7n8Hp7/4RupbqworsohgUFpMP0eVVVS25qlfJXLpnYyNjlFOtVCKcWePbtJQkMUWBpJk939CS552i6iJOT47AnW4lV+4lsv5uzaHNua49Qo5s8vMt/7PstLS1x72XXsunyWB8Td1Ds2WbpviamwxXiWktcbWDf0TSUACUGzZpvOWAwDepGlM10w95o1VBRg3QZG1OhxzQvCX2bts0PKjYLtjZSEiHzYZ1htsjlYZFCsU5maxdMPku7fyfS+GXA149umOXzlXhbOnWF1Y4HaGIwIiIdDhK6RiaG2ntiFceh6wPxDD2M7OdnMOEkjIIksrjdgPN3BWDROpQ3ziycZe3TaS/rCBFnXKGPRZQcXTCEJSBLJ3plxzndLygoGQ8PD959Gji0zM5Fy8aF9bJvZzo4dO3jksZOEgSBAQq29PQK/SHsi3v2plMCtZ4dfqD1xX9zyYsmRvFteWJSMGpxKobVGSIPDYJ0hzRQnTx2hri21Z5T5Ba5xaCdwQhJYg90VMb5/nImxkJg+U/umOHdmgfJ0iV2GytSUcc7U+DaEMeRVj0BJahFibRfnDLqawDZi+uM9qkRDBHYndOqhdxxIRSAV2hXUumJ6z3bkTYKl/30JgODjiuhsiLjCF1f98Q5lWRIWMVEvRikwv+No/XmL7C9ShuWQjayHvd5ibraYPRZ1j2Vq5xTb7A76pwr676uoX7vpD/QLLdIq0t9OqV9fe5n5uCA+EDLVmuS8XKOVZoxnDWxVMt5skAsfTitDwHgv+Jlzj6KrGia9Zqz6vYLyLKjzIVESEGcJVdHDhgFp2qDdnkYtrPpGcwPsNoHeMFgpthgGoJ8k9ayg7Do2FuoLT2zzDEd50KJrzVBJ9JjBJQ61Lpkca7LZ2UDXFabSSBliakuh89FEU2LrAGlbRIFClyXdToc0mUI4j2zAgbMKbTTWKUxtKKWBWQgTkIFvqmvtc0P7ZyEgIo5TVBShrc9D8vYfi5UaETi0KLHOopG4PVBGOXmRUxWWSIxhJmrc0HiWlglAOA9ZkQIdS8q4QgQlpmURTenjdcIA+7glDCSmrqkKy+TkNpYXVzCmJkkCgkBSlQZnNUEYIoLAq34CQxj54slaSz00yESR9ypKp6m6fR9psBUgLEEGEiP0qNgSCHyj2OeHarQtCWNJ1mwQhiFJHSOEIBtrUVdDnFEIG4KNPKFPerybEA4nDGVZ4pwjjELSLKWuayyjMOGGw4z7AGjnBviyQuJchViIwSg/yQJsWBDKBDuCZAihqIcVSjY9HKQlaF7SYKPVYaicj1ZpJiw/tkx1TKGHJY22IE5Szp4pyVVOJWuv9BovEWMa1sQIiOYQVYn4E0Vphgi5jsoKVgPJuZ99hPrDBvW3IRbJnosmuXZ8P8deM8/KdX0MAdIoVBkSSol8UnyMQhDEIKbNDybgPGX7T4srIcRfA68Alp1zV4z2TQL/BOwHzgCvc85tCL86/hN8dOYQ+Bnn3L3/2WtoY+kMCmrZobAGEYaUw4J8OKSua0QYUGGpbe0RoqFiK/xuMByiSz8RkpOhx0tGChkoispSlMZnVxhHbTVhLDFljbYhuGD0ALDo2rLViRv9HwHfQVCBRG95DoTwuRXWMTe3wI++7g1c/axncfDwZRw6fAkTMqDUm1Rmk05vmQcePMq73vlBqrKL1SVS1MSxw9V+LLqlR/WTFYl13qy6VXgZ631mYZqQNkLWZN9LOIxDaIvThusPvoJ2YwYrFArLwf1tnHLsu28Xz3rtsxEY6rrC4LsNzjrKoiTLGoRh5DOZAsf8/DKDbkkShUxva7Nn5xRhCLPX72DswCSiiIgd/MZL3sZjE6f/s9MKwOLTl/iHf/7Hp+58kiz/prc/n9kvztLr9UlTbwS2wNYoUcmAqqwJQ0+z2UwrvvC9T/ND77mB3V/f5/XwWcbG2hp1McRWOQO9xGRrnEAJciR9J2narTwXrzGXo0JLIEcGaW/qNbnEBNKTg5QAUaFNzXhzkqAp2MT7aCYnp8jSDCWhGVpEf0inW6CylLCV0T01j7WWVjTOTGuW6e0BVbek2MwJ2xLViMgHmv5GH2VKIgzWWjrdAStqyGQjZqKdEc5mdHs5U6sBmQmRKiLNdtE7DlI7yCMya7FlH9vUOOXQVtItIro5aCPZLmN2hWNMNsZQ0RgrwTJBJEnHAirADnLiRILqs7CyRLOc5SInEIEgEhHCOm4FvxjCP8D8lNGgVOCxr0iKyqJUjKIG7dCFRpc5+bDCaI0eFeVVXntfoZK4Frz11b8EAez/4vM5+IUXMGYl5UqH7739U6y84jTqupof/+kD7NyX0ekPWFzf4PzaOQ6pZ+HGJzl9usOjj5wlUKuc+/gJXNNy9b27eNnfP40vffcUD/7L41Q7RqGjOuIX/uIDfOyn3sPC7JOuYQc733cxpmGYf+fJp1yn24/t5wX/7ecJTAmVo5ev0M0XCGvH03bv4/O/+zkefc6jRP2Am99+JV9791Ged+dephcb/N1/vZ+TnzjGpW/ayb7HxpmYnmaYD/jaP56kblkmvtpizwdneODLJ3n1+2/i6AtO8sDNJ/jKH36Pt95xCxv9ISqICFWT+090uXRHRJYYBDnOlky2t7NwxmfkpK0KZiWq3supo4s4WZNfZPj9H/tNcPC7S+/h4scO87VTd1CrdRyGb914J3dvO8LuX7wY3TtP77mG+Zkz1PssJ092OXz5OHmVk36/wVV3Xs/3//5W0nbE3OoCS4OzrOtVVjZKSu29mOFqyKGfP8S5wWM0b4Vd149z/rc3uePrDzA7HtLrD6kqy9nzA9ZWV7jl5dfR2HEOGSjae/Zw6e69JGGTs+fOcfzsBsdOr1NWfsGdG83RzXWG5TKDMxonOtTKIFRNM2uQjE+gW0NMbPnuHz7EDb99DWNnYwJpOLRzhrXiJNWWb9KCGQj2TO7hTNpj7vLzPPCBB7j2tc9h56FD5IMN5huPcuQPj/LRn/lTdt29k52bE4xFChmlLK0cpbu0ztLcCp31NSZcTiHnCWzl81qGa9BtMHlpip0eZ2p8giqL2ByUbJ9oIoqas8t9ksSgiyGOECETVBawvHiW6txpJqfGedoVF8F2xSWHr2Z26iI219a46zvf4Lbb/5r29DRB0qQwAe0s5ubrr+KFL7iFViPl/Nn7aKohXz35GAcPX8Hl+6/kyukplh59kG8ce4hy2CMIZwjHBWhLIJsoqagYIrUjksZL+pxDOEVV+ViGfw/X7vePiqwRBVCgvKxPbFFtfaEmpRgBikIkXsYmQo2QFZFUyDikpsRUHjRhcaAkKnL0bu/x6NSTw8fPA3DD52a55LNt7ntggfkNgUk97rrfF4hY055qkYlJbK0phwHdjU3WPrOCvtjfE6rv+/uSj6bVlKK88Pk/ySNPto9RfqKmdPUTO+501OSYv66o35JjbI0zlqoqCbUimlKU3yxgSzRxKZh7DOfEWW746LMIvxkQLA05jld0qN+KCT8WIWfFhffw8Msf4+GXP3bhmZn3B1RFQKhCut0h1jpUFBFnKWFgEZEkjZqU3QF5/YRPeGxsgvZEm0SGzE612QS6mzkOiTSGauiz8uybNOaHa9SzAxANVOQnQfUoZwyg6vcouw6h4gvvc9fXJtj90SnmTqzSmm1z9l3LdK8fIgTEaURaZBSuHgFCapJGQFnXIJ0PVY6b9HXJ/Q906Pd75MOErJWQJC1ksIKTPWzoGOQGYcAJi7moZuP2+X+vJ8vkTbspH9FUVZ+okYAVGB14IrP0/uggHCeJW5RlznL3HPm3zj/lfEOHZQE737gLd5sAoX0eqwNrNOUPD+j8X5v/z9fXMHXZLKYoqeoCaysOHNjtp13DFlXfoDJF1MgYdmuGHYs1hjgQyIal0rmXydkIpzOqske/u44wkjRIydKQ3HjKlRCO2pag1AVLg1QRRpekaUIcJxirydIxEApjFKAIY0Vrok3/7LIH2FiDrSucKCl0TaIkoVIYFMPSY/6lk6NYGnuhkVK+rmLwoQE/uIkhbL9kisAlVLqmsCXGSa9mkv75H6gYR4XVOU4pyqs1m5+e5/wPHk8H+y7dQbWUkU7P0jp4CVc9K+PL7/kb8in/2g984jg4uOn3XomZmmfQLLjhFVfx6fd8kas/fR0Hjl1K3Sr469/6E3+O3wm801/T3xd9tr1lku5Q4/DHM40mCF2AyC1154n/X1kMqQ+VVF8b/sC18tTt/8vk6m+BPwX+7kn7fhP4unPufUKI3xz9+W3AS4FLRl/PBj4y+vU/3FQQ0Bofx2pNb61LgGC4OUviXr4AACAASURBVPCaaeWhETgz6ipJAqEI1Sh3RQaQNAiUpJ21CMRo1OhqtIWZnSn7DufMP+7odyStVFLVfugZGJ8tIJREGjuabj9BcQO83MmAMmYUCBlQ/5SlfLsGB3/Z+BBhGBEEilS3+YmPf4TEatygoLe4wvF772Tx5BoiKFFSYFyMdhVhGCKFpHiuZukPtz6cT3ptDWPXBQSFI5IQSknZ6+Oco//emsE7/A3eAV9Q/4IQivbJCa7/xZcRhiEIiXAS6YTPEBMp3onvQEiaEym4AOMkpganNWma0m41icOIJErZyEMiJPldQyaWUi6+aT+tVPPehz7Ap/b+E3dtu5s/uvcjuNhhR96PQImR+Tm8cDwRngijZMY/TX2CrzT+jd+//4P8wrU/RxD446DrmonZbczeMsP60U02j3fAgTY1SiXESUKQhPTpghA4LTDGgS0oVzuYoiRNU5rbpxifGOP0qdM4vAw5lepCjpo1voh5QgL4RCErVcBYO/Lo8sohAokMAkKhGPb7FL0nsqI++s4PUUc1RtZ8+uF/vdDdHWkMAUc5XnD/f72VB9/wLaTx/jHgCTSs89eXcw6TaaL1mMpBq0owAvpaEw5LykJTGUtdewdcLSW2lh7E0dhH2NxBUhomotOsyjUEhjFCyjxHWEegBapnWF9YYcpJrnzNQQQxcRgzvW0WV5dMTm0nTtssrRSceuwUUQp1ZbHa0RyPR+/9iT62zxTzn5mtKWBd155I6SxSecx/WdWgPAXQyS0ap+DivzjM5HybeXuaLQJjb+Us62e+w0VXNAka6zyQdQBotBPe9K7XkFSncNU0eW5Y3dhgLDjF+uNLdPolWRazazxlXsKPfWI/zzyzjWSqx9qgi3GWN96zjx29hD9+7kmepr5EKp4kT7XwolfezGRnkrkXzzGPL65e+pbf4JFX3sF64zxzj5xGuE2UgtoV1K5gLEh8sK41bP/yLJe860rul+tUFbiBJblVcPHHMk59fcjM/nFmgynGkiaNcAffDM6w90+n2fX9bbT3JTzASWZ3Rjz9yIu4vv9M/uqWf+b+++bYNzbJ9oYEPaB37iGiHVcyEU3QziZIs20slBt88Ruf4sdueQXPvea53Hf0OJ/48q1csm8nWagoN9fBwTs++S7G7W5OnH+MpYePsezO0Xxzhv4Zjb7I0O90UWqTx84MiP8x5cB3dvDht72fIFDULxt5IEdSESEVg6KiyDdZ7cwTyngkdfPTs+W5Jd/xHFiat4U8p3MJk8/YjlQVS2MBdd1nYf40Cxv304g0Z80c8gZHS+bIUJPrDoUe0E4n+LGXvZRPtD7JCitYB7l1VDTpD3qeIiYK1tctU9Mtbrvn83DEcfCjezn5d2e56zfvR24Rv0fd9zIbFdkq5NLZy0mbhiiNSO5L2fHfZxnWC+jlkKlsmnb+LLpv3saZ/3E7RVZQhgYzjKkH6zy0uMjstlna4w0ePL/CyuYGYbBKpGP02jbml0CYBzj/yL0M8xoZVKTbI9SUY9dYi2almXxonUfWIlQYIozDuQIdlIjGGJn24Ju77z3K0Ueb/Okf/Tjtie0s9lbZKLqIdIyZaIq8HrLRWwEbYdRB7rl3CTJJP0tIhn1M2SIzGdQhxzcle274cV53zSrHHr6H279+KxtrA8azDKNrjC0hcCDcSNngsd7OWMxo4vdktQNwYZq1tc9nRrqn7BcXFO1bocUC5wzWgRQhrnYImeDd8/4aq3VOECgipWCfYvN/DnGTcMM/P4fLHtnFRLvH8vwZ/vXXT3HXS5Y4f/GAF33kYr7+jQGVW6UWFS4JyeJZ6kE9emkFGSx8fp79bx+n86KC6lq4+neuYH5plf5wQP9Vffpv64OD9OYG9VtK9C1PnWRHfxwTfiTGOEdxpAcZSCSRjMlaYzR+vMH6r6wx99Ylf9E9ST20q7ePt975u7zjBW/mX1/7P3nxi27htzc/xBt4mf/8UFI8p6L4C+CykEt27Wa6NU4jSqhsyR3/eL8HLThHXpZYmYAoQCgmp2Z42mWHmV9bYmyszd6ZWYLL4SP8GQArKyt053ukSUZ32GHH1CRhUyCCkKSdUDPg8Ht2svlTfeZeuY45oYEuF4DT8omF0bbdu7j0OQe49tX7OSpPsP/dM7QejinqkiRN2TPdZPZjGfMnN3nov8zz0KfP0nhVgux4CFAUx1gM7bEGMhjh9dcHdJbOUlY5RhsiQnSnAmqE0b44yiUqGiLigOInNP23VuBg9yv3wqYHglWNmrWvLbDxqXni96W0/nmMLM5QCJQCKSTSBUQKorBGuh72pi75r66AgPiWHcjHLSrQZC3B8lfWWPzQAuqvQoJ3Rz6jzAm67+2Sv7KP6EL7pjZxnKGkoH5axeqfrbH+3SWmf3mW7PsNoEE5rIiU8956C2maIQCVDijDHGMqlBWUNiJWCSoUEEpqBzoP2T+7ixDF8vwCRVX6/LGRDQbhqaSBVB7gRogkJ0p8MHGeF95npQzaGIqiwpiEgAhT1CNgjMf0I33RZJ3AOE//dGWFwyt+rNEjZHpA5yM9yhdXyBXJ9EumCKVfl+XXVWx8uMPqfetM/Og04iGPWbciJaDGOA+OqTVUVUiSaMrfGVK9oUIUgt0vPkTRHXqa94GSzc9tcP7by+z4DYW5Y4G1+xYRSKq3P7EmE4Vg14sv4q4P3k7x9CEm0nz2XV+iTmsePXKCU8FJFt7y2IXvn/7IfiY+P41o1jz88fv5+ruPYEOLnBce865z6jAmCgPCNLnwc/sPXkybce7lO/xH239aXDnnbhdC7P+B3a8Cnj/6/UeBW/HF1auAv3O+vXWnEGJcCDHrnPsPhmd+U0J5w7O21NoQRtFIO+vQWoO1yMChnEBYqE3NIB+OvD4BIBj2esQiIgq88bbIod/TjG+D7qYi74dYGwJeE1/XjqAe+Zgu6MZHRZZ7wqxmnUPjiUf1Ww31yy0igPTtCmOGiAiqqwUbP7fJv17/foLflnBWY6qcor8CsvLdOusfTmEQgXP0f7xk+EqNbTsm3pFSl16+aPdC/huG4R8Zmv9DYh6pMcM+yMCPxscgXIqY/PgUe8RBBv0hSzfPsf7sFe5916084/euQ1X+e4Vz1EqNqHij6RshxugRwGNECHFeeuecpTYGV9WEQlF0BUG/9t1jIUlffiUtEZAyhrKSdpGitfCySeFzQS5kHThvtgTrQRVCIWLJ5vgGHz3wV1Sy4v433E/y/ISyKHikmZFdlFGulZQbvqPgCyDvh5JKMnQ1JtEM+ht010OkNATC0U3WCNKEMmujWz30zh5EXnPfyCJKYy7gZr2M0KKUL9qNMaMwZ0MYBBjpfWjSCgKZMehucumhS7ls5gB7Fif5vZn/kx979Hkc3XGKx6olnvmRHyJKY5IoQkmJNY7Nfs633/ENpr+3n8n7pokfV6jSUCtvHAfLCEyKQ7D8/EXWr13DWg1WYvGTVq1HYY5PkuTYkTetNJrKOYSLsA62DSc5r+dIWoqnXX4Fm90VluJlhBDU+w1nfmGZedfDF4CKuJ9x1R9egyakXh8CBcNS46zhkbccYzg9wFkIY4WNLMb64n/r8yaE4LuX38ljO08jENTa0Dw2zvpV66w9b50wTvzPjCQdJqq3BPGoQYDaVLjKL1wO/sszKNOKM796P/3JEEnJ2s4+AHUNR+6xTEQBISucn1rjCz+8ROBOcmrPkNbtMeOPRNz9hscxsePuZ60hG5If+v528r7EOYiKkCiP0Mryly+5k5WxAfvuuZyZE/v4/uu/xHZxCUEyzvbHdnLNn89w15s/TX5iE71eMHjGOo/83jd4xruv9OJHa7EV9Ptd7LZpcI5Ah6S9jGG6gHWO2amYp+/OWFsLOQ2cuGWexc/3SL/YQKRL1NqAGafM26ytFcz+1i4kMeub6yznc5hXW/7xRV/m1zbfSpt9DAcbdAbHGdt2mImxFkmSIKOUGTlBHIRIMsJomp2zJft33c/Kap/xVkS2s+2P4Qqc3jjF0soc3XqT0pTMhONM3ZZS3lVThlAaw+Zmgb2ioLqxZudfTnDTc25hbX2d5dVlVopVHv7fjiBlTFHW9MsSQeglLXJrcS3ZMd0gz2s2egPybk294DhvV5BJQHdY46SlTipSFbO53qHT6eKn+CGRytCmIA68JKRj+hhnmby9weS/temubBDGIcYMwHr6pwwcZAmFtZhejVtT7P/QQaqyT7+XMxxWxKFg58w4c69Zo6EzLr1jH/2yS88Z8srn4bBRELRCpuKYTEg63QLzQB+Mo/f6PoMdDdQ3LHsnJa7Roq9z1tZKIlFx7uwZnrnT0LKr9FeHLK/NMTnVwNU50tZEiSRtWDo9QSBhakJx6OqU8iHN+SWLVYGPoagMOq+Qyh/PWiu6qyX/9HdfZHy8RW07TP3frL1nuF1Xfef/WWvtdvq5XV2yLFuW3GTABjsYAjaQIYQQIEAyJHSSoT4hIWQmhTCTUELKMKmQQJIJf0gophhiU4wL7l0ukq1erqRbz72n7b7WmhfrqBBIefL815ur5+iee/beZ5Vf+ZaZJvuOdShbbZQs8dGknS4PP3IHXmHwa1WqGyYZG1Oo2ji6oljKF1maO8ruY8fZPNlivNLi+c+4mjuWBjxZzKOE6yp51qAlFMY4o1OLcwARp2GBp7lTP6p7Zc85S10ny/ESHEdZCAfx9jyFthYhDEIVmBK0dueVGRWgpPIxuYcpwWYaOzP6jL5PtR+xbaxkqj7NzfIovSgnb0GzNUZaLhIIiUDha8t0FNHt69H8NGRliZnSbAzWIuUyHZuw3qxlpR9TJpY0O9ud8ZYVZfojeBVDAQtgyhzvPT76V0v0tZryoznNP5li+W3LFPsK5KJEv0Wz8UPrKZOC/vUDBtt63POZ7zD2z+vp/PJJnlzcg7zxBvjY6G+/EWxsYQrksof0JX7uIzYL9r3hGMYfBbjWnRte4J61zguG3QFzs0sM04IyizFxBwIDBnZ+cidZOkBvKrC5JOlmaJ0jsUjpE/otLtm5k8m2Yc3BOhtubFGtBiydXOTEwRiTVmismeD4rx2A/+6RPJBybGWW1X9awL7GEiY+fgaWnMp4lV5qMYkh6zjYWjmhnQ+fZ0d2SRKMRzbUIzQQeKHAlAG1RhslnbpvGDaYmJIMqjldYqyXM7WmQjLQ5E2LnXRni1xWiBVAWxf7AHbCQDVHyhzfl2R5itY5WmeY3I68M3OEke45VQ3eL/kET1lsXyGQ6EjQ/MAYw3f3ES1BEIQIq503Z9uijikqf1Uj7FQI/BBdWuhD89db9H6/i/HAQyKVxA8iPB/CSJGXYtQJMpQIZBTgKYlnSvIciqREZxYZCFTk0Z7ZzKZ1G7BpyqnZ2ZF9y8hYGzPSDjjN3Xf+sLocCUsgnf+X71EWdiSjjouu84I8ztCl40VL6aNL8KSLUY0BYRwHDfdVIi2OGiA953/WsohTAnnKukIRoPoOjmpmLDJ0SBdhQYoCq921CikRgUe93kLrPnasQMwrgj+pMziQUGbaRah5QONXG/R/v49X1fgqdxDP0l1j64Ym3qLH8ls6rLxviWTbEFNxxaC0kbH5QxNE9ycMnpWS1M8KqvWft0y+KQbfgLJkLdewkNOC43/SQXiSLTesI/U0x18/d+Z9iy88xeqVyz+8L/yL8Z/lXM2ckzDNAaOtj/XwA9Se2dFrP5RcCSHeDrwdGGFNSwdBkAqrQHkKCZRao8scaRxV1rgMgLIonLGf8UckRklR5ChhXXVCgi4FnWVNuwljE5JkIEhWrZNkHVXci8Li+95ZBaDT3ZZ/OaR0zu0vzDHPtMgj4P9/EmUV0vcoe66dvu+i7+EfDZF7DVKVhJEgCqsOeoYjBysUhTGkzywptmsq3/WpfikgjV3Sp8+zFLsg/xmL+RLopx3HS43Ulrw9kur3IsZuHGdqsIZ6HFP2M7JowPGX7efSj+5Clp5LFIUGM1JpsiPHZeGjS40VI8lMpJv4UlCO3LgLWZKmCVlSoqSgv+qT9vrUt0wwtr5GvsaM2qcFulTOt0gwck13lRSXqDrFRWvBUDDVnWJn7WJ6YhWL5fizj/8Heps/PDo7FiiHCUoaAt+jV1uCyMevVKg1q5RpSdEqqPgVKpWIweoqQRAi5ahCYxwe2ionQ2u0QWuDrySe8tymAgirsKUl9AO22PX8+OIuPjzz51xz6nK6tSEnBz0uuOEivCigWas6vL42nFzpcc+v387YvhnWf+c8qg9YVKIpAoEdRSwCixQudikaBZ2rlhhZvsOImGx06YjF2DPP05ZOLEUY53RvrbMZGBN1lPHxooD169exdlODh8NHSc7P6bxkSOfnh2x+aA3NqMrq5Apzeo7so5bcwjBO0WVJblPin1ilP9Ejr2WOpFpxXKQjM0doDBtsWFk36vZB5mfMjc1xfI1b9tuu3E7RLkmmesxdfBSA5oEJgn6E8U+TZEeO7VhK7YIZL/GJ2gGV86C0hu6KJh/5UqWUfNt7iotn24wHTRb8nGNll6fOOwSbYRMNVKPKsJYClgMX9JlMKlxx/7TDkduRr0xhMNLy/Z0OTrTl0FrW3XkJvPYmGqqGtiG1o3VmujV4O5y8bA+D6SWKmYSlV+0n+pOrsJlB6wxpXFDu5pRCSosfhnjVECEFuTEYKZiYaoPoMnfpKit3JUTHqsTSzTXjheQ2wK7E1L9Qofpz61ihQzfOsNLyyCVPk+2NiPQmbDhBriPqrU1UWzU8DwoMnYVlvNCn0JYk0VTqTSbGmjz+xB6Wwh4TYzUA5hZPsDK/Sqe7wMAOEIFCGkG4J8IrAoyfo4XHcHvO8NkZ5tKcHX+1meu3voQDhw6w/9A+VD/gqbfsRiAoSwulYCIaY6UYjvY3t2aUDAhCiRckCAM6s+QDTRoUpMmoC6AsY/VJSl2QFSmZyrl/6mE2eztIVoesdhdZiZe474n7SXbE1A9VaN9bZX65Q2O8iZACrRVl5oQhdFFSZgV5NSO/PKHRH6d6b0RxVFP2LI26x9jGKgvPW0XVJV7sMdApxSAlL3NkaAlDg0DjSbC2xBY5a2XMLJBLQ6k0kVeybgyKqmBlLmGQJtSrhiJdIQp8Aq/Emg6p6YOoYE2JMBrfE7RDRb9bQGmIPNi4PmJHHNNfUfQyp1brCR9XkNZYKUdGwQV3ff8e6lWfelvSXCNIBkOkZ/F9SSgU2TDh2KG9mCQhqtQZG24iXtuEsI6qRaiKpDdc4sT8YTrz4zxj+zYu37qVF73gRSytdlnpLFGMp9hdBu1BOUquVEfg3XPaGPQcSKAAfZ1FBO7rNMIFM+r7EhGfk2ABxZUWO6WxfkmRWbxbFWZ7iVljUMpQlsIlyk87EQlgJBwgCEOF3/QY4qB6SxuWOKIEtXUFl83P8KzhFh6tnXQTL4hIntdDNyVog9cVbDhZZ+mSJUyUgR1xdIBmpcmyP6SsD1h+ziqDjUNn9H1RicwVU3tm8AMP7S9TkEMJ/q0e5XPKEXsMrLDIf/LRP6+xLzSUEwX5Ezn9n+qjfkuhHlOYNxgmvjmGHhboTSW99UP2PXI/E7rO4JU+s/Yoy0Xv7MF2DYhFgfquwloxEofSlAjSuuseWOP8wkqtkdKdI8WFOfH2Hiv7FtDCoVwGc6vk1q23mfsmyAOfZF1MvJxj+haJJgwlYTWgXq+yrX0eS1ccoKiXVDo+0w/XyA/2WCClqEhUdRQurkC8PCQ7kXH0mFPGFVo64Q7fED87o29yjNH0N541WpVIJ+riJgz580qywhkjq1QSPexTPl8TtgUoCYmmvBXCMHKFUQkqsFRrIWWWuQQjFlQeiNC5wTstC15CcEtAfk2OH0AUgRdYtC6d2qNyflWVsDo6Px1MTuSC2leqCG+URaCgtERfqZO8LsZuttjnG+xtBqVcR1aeUEQ3RogRH16XGj1nCL4Ywf/sku1KEctQPVyh2ggRosTzvJFP2igBGSlpeqFG6gRdaKqVBtUwot6oMr1hHUe2LzCcTimTmLRVIj2JussiBqdXmXTQXAx6g6G8qCCPYfGyRYqJkuxkiveQxQqL51lEbsijmJOXHGC5sYRJY8K5gOqsZOXqPtILsdbgr0jae+ojM2zLaeNmOYpnwaJOKqK7AjwhMQbyi0uKZ5yFzhbXZMiBROyzTkMB5YTaPIVseugXlJSZxm40iAJYkSR54mCSviuK1r4oGHxwQHZ5irQlJjOM31dxiXoiUf2RSFi7pPZoQLa5JD/fmRU3v1GHjoUrCkQuaN1TPyOwQcOpio7f2aC3K0atSqIDPnm7IPB8kE5Zsww0M3e1yKwgK0sGjX/bQBj+fxC0sNZaIcSPyEb+3fd9CvgUgAxDGycxnvIIo9BBjGSCFtbJmGajDowEoUDZkcu4pyjLHO1JbM2nXquS1UrCyHkuSAudRcPMjGBmg8GIgiceL/DGQJUKRrhu2TrdGQByEENxlhYknOS4JxW6xGUQuYUBaGHxwypJnpENc4duqkO9FeC1Nda4al5ZCKcmJC1CGUpdjqRuLcFuxcQ7az8ApZCHLLVfEHQPWcqaQNZAJgKjnTR6/TMhra9GqLpi/shxolrE+GfbqAe38NR9T0Itg9g4NQYM2hiCUSUC60iQ2hqkNzIxtAaFxZQSa53hlxWG5eMDinSAlIKwUiUeTNL565u46GWX0JlcwcxYUlPimdPJqVMasnYEocQdxJ7nUxYaheV589dy7alrMMbw5he+mdg7B6v7Q/jVkQ+Y5XTWe+b1zq+s0GGF060Qa0+LXpw7yaAiqgRBhSxbpNn00MIADpKirUF5ykmZ6tPPqkB6Eco64ZI8T6lVq5w8MceBfYfYtbgBAFkLEYGHRVBYSX+l5+aSUuTasGiGGCxCCSw5ZTIktxZlfKT0EUIx8thEWnsGOSKt66CdHkYYsIaRXqQTUxkMyU0yUjj0kNJjWEBQCylReNYgbcnOy84jigJOvnKBpZ/uE/YjXvp7P8ul6zbzwPX38vmrv0g+pigXHYYdYzFRzu5P38XOq68ieqKO7/ms2dTiG/d/mW9dczOrtRVe9d1XAY6jdu3D1zLRmeTTr3BmwYfesp9NN5zPef93O49+7F4Att6wi8nd68jHEu740y+hg9x1lyVkwm3CT/38gzz/+9fx/H96FQNdsPu+J3jit+5m+cpTpLWUG976dfzffwc7g2ewbujzhq8d5jff+0cYYZm9cshgTHPt+9bxzS8dovQM0oewIWgEActCQJZi0x/cptIhdDvu3148R7hUJ8l8FmMXyO37+Lltf4GMxohjS2b6zrRURDSaU3h+AKFBjEc0x9exrJ7kof09VnbXqc5sB44BljCUNOqComcosBhVoHxNxZP0kx7jOy7DLHYIGQD3A5DkA6wvqNfbeNJDhTWkX0MK0OmQf77xdrrS0MsHLHfmsaFHnEr2HXiCwXKfxnIN3g8HFh6jNzcgToYIU9KebDJ7YBGDk2sPETSiGsfetUR8RU51IaLeqtBot5HKonWKHM1LYzTSQk1W2Ra1ufngI+QjXlRZwrFjBe1pSdQIqSBo2pC1UYP9vRPYtEAYgZd6TEzNoCol1qR0iy6/s+tDvPjYf2H25GEe2fcwDx14hOMPnKD3gi5rvQlkxaPXXUAGCY3xaRIhWUqWGSekf2yVOC1Jr83IPp6wwDxTv9DCHLaEUtOoRazKPoUo6V60zOqaIS/+nesokhVMWRI0FeMzFYZzKUtJ34kfRYLrnxvyiILij+oEe0LqFxW0W5ojcwusLlZIUp/2TMBMXZB4DarVgCAsqLb7KJOSFQllafCspO1VmCVFFDk2lcRRxDN2GJYOeuw7qVlKSqKaoFIPGCYpBo30Bb7KGGTzdIcFsiOpLbawBhqtKkrA8nIKgY+nAS9EGkgWlomXulQ2T1PbeTEbZjbR7QxJB0vMn3qC3SKmOTXF7/3P32c1Tfjmd2/kyLOPwKeBwWjKhxb7kMB/iYc8XbbGBac0IP1Ehm2fs3fXofaCAPmUQBRnN+Ls/ZbimgIiF+y0r5Lk77PkLzu9Hi2QEvy2h7zHfUiaFFR9w8R5FaauaTBve4hUcejapzik9nKbUnz+0V/knfuu52/N3dwR7KcbFaR/nNGr56AszcUqm3+9xW2/36E3EWO8s6T01NMU0tCb7POdt9zh0AzRyApmqcrV/+f5LNdOkldSYoaIGBr/tUr39gHWt5gK+GlEXsX5xOTO5Pz4X7gikye8M91cX3pUqz6rQYCSQ1pNS0TJnG8ZXj0ge/65PDJQuyXVN0YMwpKiUpIEGWpBctFvbea+m57EWhfEZ1mGNQUylpS/kJFfMEC9cYAUFpMMGeQZvbGCIPPI66vOmkX5lN0Sv9nAa0I98mm0QhpTENTr3PKKYyysWyI6rtjxvkmWhgnd385Jr47pMKrW/1VJ/ivg/Y0z+i0ZkhdAIRlOGI6+58TZ8/ocj2pTKGweOXVApel8ZAHbtqBAHlTwojEGv7vMyswi+CCWBPXPj7Nu3Xp0rpFCUvFCskShSw+swltQrHn3JHFcElYDhJSYXkHrrW06ty5TGfNorPfxIickFjXrGM8nCCpMtdeS2SGIKkIWCCGYmJ5gtZsghMTzJFLkmCIAKyhelFJeUFB7Zuu0lh5CgvSt8xpFY7STai8zh3zqfWAZva5O46MRYxMRK8tDp6KnfUpr8SpVhDBIIZBodF5ibMTWCy9l84ZNrN+wgV0vuJq3P+s1PGVXEeVIvKsO7es8/L0SrFMxLAvwW4L8Ok3/NwtA8IR8ABsagvt9Ju5oOw8vT5NmGd3xHid/6WmKMHccta+1WPspOPjxk5iqS/zadzVovaOGGcF1hQBPCvA8Cu0KDZU7QibfP0bgSVb8jN6be2SvTpFDgalaVv9Xj7AVEH44pMwlQZgRRj5+LcRu8jj1scOOfhNZCCD7+w6VyydhNUOFirASIFJn1D333xyc319RbH79NMITrPxX95pIOsLazQAAIABJREFUBZs/sAadaZbe1GXxve71YcUiW4qiJlEDydYPbMZq13mTQjgTaw8e/+Qh6g+FrPuTcXpxj8lmC4mj11zy3k00vQpzpWR+ENO5dIXBl0/wb43/bHI1fxruJ4RYy2l3Lecmt/Gc39sweu3fHNYah7W2GmNzNJbeg7PY9tnNUANr37SR8GiETUqG+SppWVCr1cnflDD4zQUGnOD4aE/fshd+/AuK7ZcJwtBQbSjKZ0LnC6fx0yWVv1VU/lpx6u6zuM3wJkHzbf4ZfowdJR8Gg+dFDtb3fwXhb0gIJJnNER6Et3uYXQX5fks2KDBDD6kqWEpUUKKxSOPIfO5LdSbCdgSjOON4L0ZdjVG+Ovibkupnfdr/PUSM/A3GZ9ZSm2qycKLPCy66FC8MybOEhQ0ee6Xlm9/+Gte++0XM3LMWiwYROO6SkDgep6U0GivOQiCtBY2Tvszzkv7qALKEjRvblKVhGGecmj3MeedvZd8NdzM3eZD8woRDhx+i6lcIg5DAjwiDGkrVCGpNtMV5QhQDPGUQhE71RxqUbPJ3d37OSb1ai5CGwLN4XuTkrrEov8rUmhpLy0PiJKYoXNDbqrXRRiO0QJmA0sT0hitO4tQ3BF6JMCH9JOb2PffxzeJ7SOmmuhlB7IwxDIdD6vW6UxH0XTXQWInEx9gCq51TeugrBumQU/OnOHrkMABzTz/JoDrvIE55Sn+lRzJIGHqaXjUnuCx0ha/FApsYwm0RwSL0+7HzsvACBAIzgm6ehmXi6pAjkoJEGIVzXne8JaxBmIT1rQqd7hBjLWEloF4J0Dg51uXlZW67/Q7ue7LFynMH0IaJ/VO84h0/Rz/xeGywyOEdK6RrYr57/+d57jNfiDiuMKXGi1yyU4sKAj9DWIMqzq7D0wphWhuKonCcuXNUFC95zy46z13i0Y/ce/Y9vsR6kmI0p2/9nc+x7U8vpXa0TuAHxKOqdHfQ48TiCWqNkGqQ8XNffS2HVg7y9Z/8CgDNqmFpeYFTC4rEU2fO7wu+sYOdX7wEr9UFccStnzyH1VWGRR9rDbqI0fk5BthAGKTUK30Q8Jff+gee/a7rmPj2JlT+w1Upay2nDvXQqcV4CVaViLzKynJJkVtOvPAQJ3fN4r/y2RTacHC4jZOrz6HV3ICxtwCa8Y1Xsv3HryNc2cdN/hc5+kuPUbvqfK77zAs5UZ/kDz/41+Qh6B87G2wVmWDP4f0cnj3IgVOH2P3EnUxNrmesOUUjbDMxOU78VIqvAtatn6HZqHETkvF6nfEwoL2lyn4Bd/35fUx9sE3tuxWsFSwMFpluBcy9tcfJzYuon2xQ8SxZ4pKkoijZv3+Wp/c/xWDYRSmBlK5NIZWmKHM6q6s8deLk2cIUYA3kq5al/kka1SbN8bVsbG8h1ieg7GL9lEo0xsXbrmbYWCRZXcR8ZUDj7yL6N6d8864/w/Q1cTNh4zOqfO/dh3jJx59BY3Yt8ZRisE6ThDmWnOKnewxftYB33RTVKMRTZmTg7UalaGG9grKRoaIx9n1xP6Y24vTkJY89sh8xXKG7MqD//Iz7P3mUq15zCce7KwTVklq9wcFwHIMgCAJQlrhYpq6HVPpdmnotadDEVmd45OndzGWG1oKCtCDuCS64cJo80ZSZpR6MYYuCYmhpT8LUhEaXCUmi+MmfVjzr1IB9+4YcmQ05YQOCZkSSZKwudalUfHw8wkqIEYJBUtIvcu5+cJlGo4aMGux40UZefPGzqZCS50P6cZdh5zi33bmHm7/Zp75pCzt3bmNyy6VUppdZWS747p17ufJZh3jhT7ySh37jUY6sOYLoQGN7hTwvKD6qsVeMCGsjOKAQArsNhvekVC8MYGXEw5qUDJ5OGH4vJ/ptn/CvvDMB9sQbI5K3arq/44K41XvzH+AhnR5loRG564jvvOJ8Jpow/4pV7n6z23M3v/NZnJ9sJn52h3ve+11e/sxP86w3nc/cT6yw+Nouf/++W3jOz7+SJ3/nDrqXLdCbjPk/n76RX/qVX+D2197L3mv2n/ms73zkPhDg7wmZ+cVNzLSm2PeHT9J/RhepFOPTM+SDHC+MRusfVld7GA28P8NcX1K5vkb5VA/qID/j4X08IH/KwY6E5xRhsTl5aakGkTOq9yKmxnZQljlKLXA2kz13CGRNYQ4POKqOc1QcP/e/kEpSq9fxKwG9ImHDK9dz4ncX8DZLtp1/HpHvkfV77H39MZKf6vPmj1/P9w48xrFOQjWssH79BNHOgFs/8jDbvtNm+nsJew4dxZoK/dhdj5KSyUbI43+7SFkzP3SFmzdPMXlFjZgl9gg48EcOpSD2K4JLprCDrlPfe7WBT7p915QBZT6SrJeW8UtbdL/ep3yWxmzV9PYtMXNFm+7HhqQvLRACaqFH2k+dRLkV2NLnxJGSRr2KwGDJKE1OozVGmsVkWYFFUW82WZErLPxywuDHFOe9FzasabPjimeA9PFUBQ/DQu8QYIi9AiEVUxu2Ye08SdJ3cv3Wc9zO03xTJL4cJ8sVWgvChmJiTcSeRxfwJ5rOE04JkvychyUUwqvSGKtw5Mgs/UEHbTLGGm2UFaR5gjIeXhnhm42861feyxXP3EmWxzy0cD+vvfpFWCw7PrGL+g0VjszvZnFvjK98oKQ0OUp5+ErQ/TuNmpVMX1LBC5pMbLqU2V/fS+fFJ5j/1hLr317HQ9GeXMuO8Eo2fXwNn/71T1CEBZ2XdxlcNeSKqy/gse8cpFhTjorjrtM2Is7g1JUtxmq3MIRFYChLw8rdy5QbNPX7Irb8xiRPfHcWPCisQUhojjXZvDUgSVKWX9Rn4Zfn2fJjz6CzOE/84WWKNw4hgmTvEue/az31gxVMaVl4asi5iLKirbnjxqd+oJhuQ8sTd58Vfjn989Dtx9xbR5T4h+964kctORAwuDhh7vUuKTswEi+buLnKBR+YIvMarA4K8qJACv3Df+NfjP9scvV14A3AR0c/v3bO6+8SQvwjDuzV/Y/wrbAWm+eUFvINhuLzA2zbsOHTF1B5oE0/HDL3p3vQeUE60JTDjCRJnNzmH3YIehWmXnUeofTYeXGVx197lBPbVrnpLZqf/gefhaOutZ4dllx0h8/Tf1lgA0hfqSm3GyZeEtD5Uu6qcFKc8Vuy534j1qCE5rQkqNACP1RkhcPsCu1BHpHTI/lchv1TTfhZxxPSWjt8rxzFzJScFgew1gX1eZ6f4dTAaTy7qxZa4ZQSs9LJns7PrtA6AGvLCdQaD9/3gBC/Ep653KWFBfSRAmGNky2V7nSUSMdvU05i1ZMK3/Px/QDrSdLc+SZl3YSZmQnqExNIqQh6AwbDI/Q7y1QqgTPCLVLmZx8nUhXEyOldSAWyzsT0+dRaUwRhDWvBk6NF6HwpMXbo/KPw3TMlJ81cgOQUcDRC9VlZHGJsNsIJe1ib0UkTrNBIoVHSIktDPMhRoY8WAdpoWqqBLErytGSYurmSZS5xOM29yrWTw/c8DzyPLM+dYuAZ4QkPKxRaQ5rl9AYDljtuwf35z36bYSUjQfONb38ZrV1QYAAtLTISFPWcQ2/bzfHX7UGNOKfauC7UGRzqaOTjGTJV6NLZCIiRf4UYyfCX2sEswsBn87pp2humkUdP0u2sEveWGawrmGpXsNUM6ftEzSZh7pQnL7thFxd94WLmT5ygM5cQBQGcKLni7qt45HP3Y63j3gUVj4nmOK/7pddz2+99l95ED6zA8z10dJrQ7q5L4SwE5L/w/5I4qGltvs0Vf3k9SirUnIfV+gxZHQllXlDmOWKUmD330y+gN9Plhg98DikFeZpzT3gnee3sKTXoDrADBxmR/jkbbRwzXOyADPmtOz7E55/1Dzxy4T4Ovzdh8Q0lesZw47U9PP2DzzyjgZi7hJe8+3e55Q8+yu7fvBfvPQ+fMc09dwhgetyD3GD8EKsEulKi7Sov/+xPsX9uG9985Y0Un3kIpjLKNz3E8FV7SQIf11KA7mqV2RMbWL9mkpf+geX+193BwZ3H+cz/+DJFXjDMtIPWnhPMPLj7AHv3HGJ2cS/z6YBv3f5tRJDTiCpsaKyBSsSgP0cy7KK1plJtcunOC/nOTRG9wZBNwwn+4bEv8o6db6bzzh6rbzgdzFkWlKB1S5VNfzhGd7IkX805bRpeTJcc/rsTfGzsQ+iyoCgKCuMKUw/9yr3Y1GD2GNSvCtIvFeit7h7tmpL45pNU3xgQhjUqY3WUqXC0oylsA0TJIBly356HWVkZkhY9bF5Sm6jQJ+WT/+UbWDMqgGCw0lIf9ymP5qwugh9O0l85wvHfXSS5NkWUgmrDo1oJsEKS+8CIgn/qD05B6vbxjhpiqobNfzNF0dLMPW+FePkEjbExNn5hhvhkypH/tsDuP9uHFSMxB6VQEspAOzVaIYjCgLJSYNszpLHPardLfHKZY/EQTlTw2xH1wJCrDrf9rw6q6uA02i6jPEgLw/Z7pmk8PYb2GgzmTyKVoD7ms/H6Gl95dd/B0KTjH5lSU0pN+3UhnHRGqdJqxiLF/PwhFuYF0vMIGpMM1lzImq3nMzHdpNK06P4pnv+cXZycP8VCP6ZQQ8bXb6M3GOOEWuDY/BK//eEPU/7tPSxOncC/RVH9HyFoiZKWUlj05Zrk9ozougDKUWhlcep3BqSVLlheEVSfG5L8Y85pQySBI70Hnk+uzok2FUS/6+HdoqBpGXzDJV3mvRbe5ozMX//6N7D7vj0s8CjIBXcU6goiEMwcmOK6D13HLR+8hdmVOYaxe78VlqR7FK3d9x8sBVz+axcRD1Iu+/QOmk80uO/tTrz4vP8+Tu+alMEFmkoo2LJlK8ejQ/TpIiyoYcnanbu44NS1zD76NF++9C8xDwHb3P5ldmqSOwdQA/XuEG6SlOKs8EX2/tg9r6rh6X98GiUk+XiBrmlu/eRtWGuJ15zlf5w7yueU9L8+QL2wTrtWZbo9TrjT59EPPg5AmmcM0wQrLZPtKcC4DubOLt/75H0Oy2EM2ViOl/g8uniKUgnaU+NEgU8+nvHAb+8lHctZ6WTY45JhUqK8BFO6e0imC+750CnKqjkTqPoDxTP+eDOPvucYJ16/SHxRn/P/vMn4yzTdTwzR2w12i6a4uTNSrQNaZ+/LSoNXcfzpsjAEsuVMZtGoEx6T71hDWFYY/4Nx4if69N62QnViPSdXevRijfI9qtPj1AeKIIgoAougjyfrFLlThJbKw/drJHHsCuQSrBBoHaKCcdJkSKZzpNejXjEMigF5PGDQ76JrBY/91d14r1N4vRClKggvAn262Hn6PhwsT0gYPDPh8F/MMfOaFqGIyGMDWlOp2DNCINLL8aIupe2itaHZbILWKOGRFAkahRIC5Uua7TaPPb3I00d2M3fFQzz5E19yidXHnot3b0y/e5JAufXV+WRK9S88os+GlKZwqtPCIX3KQmBESmusjf/Vy6gdbzP783vorMT4tkazZgmDAUeXHhnx4qH5nQrrPzHG2FTA5e+f4fBbO2jPkJcFRVlyOiLWxlAaTVSpIFUKQqCnYe4fFinXaCb/vkb9K1VW5s+eY+aNJeKKnLV/7OHVCzITk6kMg2Zl6STSlAhxLrYc4jiBVUPgKdbsqLCoxJnz6XSce+5QueKn3/EKbv3NW1nZ2mGqM827PvdrfOStHySNzimYKtj2yovwZkdoLpzA2aG/PkByaez2NgsbXn8By++cY/V5Ax674SRSLtJ820bEccfX+/fGf0SK/fM48YpJIcQs8EFcUvUFIcRbgKPAa0a//s84GfYDOCn2N/27V8CIIKsk+pk5xc+kmO0Flb8ZR91RRcy69iHA4GcX8b9SRdwtMbokrNUotsTovRaxL6RZa9FuGy56QHLUwoltrkuj/JERYQqNZXsmARYHQd0I/lOK6v/2SF9zjlGYZNRZsCPImB1B+UZqM1I52J8eQeEwuKQJ7DaLnRr5DUiw5enKrj2DVxej+y7PN/TfnRL8oUAIg+8ppPSxsoBRdmxGxMIoqjIgJn1JTI0K7dua5GmO51Ww1sOWZ0U4tl91HlvCTWANZVGQW43REmNct8SQ4nmC4amE3uyAwcoQlCQKFT6CZj10sEXl2rGh79FqNojCgLEL2lRnHIFz4/rtBEqN1J80pbVYfKxIEWT4qooQvmv7ytFNW4HEtbeRJXaE5zMISu28KITDK46gdp6rWmmJJXAbt7TO2dsYVwQVIUJ4OCNYSawlC8sDOitD0rjgDKkagRTS8Z6sGSUyjAzwcKpSQo8UrpwFcYmDJBS5pjsXc/2tV1AWOQe2n2KwpcPqjpV/dW5n0zHZ9H9kFYC/6rP3N584syjOQC1xXISlq5fQwnIiHOK3QvpVjZGGVuTRnJLUKiFBJEmNpcw0/TxHG0Pn8CoHHzhCrz+kldcoC4VIJf4wYu2HNlEuaSicU2eO4cjz9tN6eIIwrmGtptmu8+TWHlbaUXDlcPLWOs8Pc25RYPTTKzwmTk46SVg9RKvC8SWBC777TFonxyj9BDnCqp+66ASNboWtj6zHCsnyYodGpUFvW494R8yOb2xHLw/RFvJEM3v8LMG01+sxt3CKyZn1VPdHbDy1mWExoNfv0PAadN/UoX+vj+hI5KtzXvTNl3Dv8+4iGwzpHVvGn5Ns/MTlWL+gVguor/O5b9v3ufLrz2F2xzHiVsLl37uStesmEbrAKouRljLRTLbqiFlD9K0mtaULMGpI+nMZ3t4m3uNrkGGV8s2nCL5UR//zIksH7qSI1zG9ECKiafzpEpNlVCp11oxVGRYF3faQ/tqUC29cz+z3nybsV8hDKD3NIO2hipKJesjW81sspXPkNkP4AVG1jpCKNdPT1GsVlpct3ZWcyfkZ1j68juV0gbx0sBXl+6SxpnhI4e+TeAisH6G+EsCDoJBUPI9rfuwqOr1VOiur9PsJa5UkqFTo9ZboH+wQVEKCu0p4UJ4hUtfCiNWkz1AnnJLLxL2Ek515cjLyVFPkJbNH5rClR1amNCpVZqozXHDXRawuLVOrtQHF4tISprPE/ntPIeYH6KRGGEaUmaG4TcNBQZj6KCxRJaAoDMFJj9qfNSgzt7blCKqc5TmVag17X4h+VuaEN9aPk2tBeRzy/S4Qi89LaX2hhplzQkpeUIG3g/6ZmGJDxOBIm0cWYaEr6OODV5J2YxpRhagSkkmPtKHpvSNjcL5h4rYalVkfRiIR3df0eeyqVRJPMvWdaZq1GlEt4PiWHg/sGBBvt7z07kmeeHKVucUMQoH+VUv8noLoixbvIWfbobSH0RYtLKYoMP1VHrj/Po4cOECz2SCqh+h8hV07tzA2NsHk1CQqmiKsTKK8CnG2jqXuEoeP9fls6xj+twyNr0SYpz2EZ1ECglskog7Fa7SDAuqR+l8Hgg97FO/QZwIicVrwonZaeMegvAAhDPVGhK5YIAEL4f8WeN8Ctc8i64rqHwUkwwJ7nYXnuq16mFdJyoDaU2vY/DXL0ZfvofPKI1ixQCSV6+4DM2tnWGiu0mMZBLRmKni+Owc9o1jf20iqJGLRZ/LBJruizSgBlac9yks7JKqg1ZhGGsHW27fjJQFLO+d48KX3suHmKwn2nUcrnuGS1R/nieffdmaDE12B901F/nUfcbePWgbVlLQ+NcGwLNAWjDaUJid9z+AHOnW9887hWAFqqNjw+XUsdTqkL0kxNYO4QSAe1fgtD38ioBJXmPqzMZbevkIpIBcuGAlkRGlL2reP452KKMscieOmV7wqQRowWMiJezlKQkpCMYjRXzWMVZqYR2G1F5Nlmlq9ytTNLYK9HkzDoWvmOf876xksDNCZplIGyN2GjV+qM//8IelahzLapCPSOyJW7zOs2oL0DT94f6eH9AUiBBUKIs9Hx/6ZIFWWksZsk9qEwuYweBTs3+dkrKJ8xy3LtWbQ6zEeraPZqBNXnJqslYokS52YFhJb5hTF2UKREJJQRpQ6xcoCXcYkyYA4SSjKBENGeFwxc8M0869ewPtlD6+bI2WGlT7GaMyGUWyHRZclYVXRvidi2CjpPSfBf19CanN07qgswrdno2sl0L6ln/QZ9oco6RSyy8KSlQVKBER+SOgH5GXK/OxxfHuKU1v3sSoX2frp7YwdrrK6tEQy7KGQNP6oyvBNCXbS3Z8STrgp+qJCnwfD9xcIVXJswx40huF0F6ylWE1JNJTJAlk8QPTiMzxHb6gIF0LKLRn6lGXi5jqy4uE1UuzQUVK0tpQaZKgI6hLpCWc63sjJd5TMfLlJ5dYQecgb8bJGYwq4QONFMb1hgS5Lqgc91E2K7q+uOJj7rnMsDoDez8Qk12YoKQmqChv8MPto8zc3sbJ9FdmQ7Pre5Rx53mGyhkOiJWHCAxffy+ZPnc+w22Owo8fqq11XqvviVcY/P079fodcMtYg4x9spw+u61LO5OiGJW4UQEE9UKOITPHvjf+IWuDP/Sv/dd2P+F0LvPPf/dR/MYQQSF9RPttgXp8hH/Oo/s04pbbgp67y+5giftkylb2a4HbXEg6iCHk4QGeW7OIU06qzurFPo1cweWqUXAlotCBNQCmwUp9GOSAfAu+TYOtQ+5TCjlvwHUnvdBJkjXVQgFFF4PT1CqEw2mK0cB2Ykdz4ufckzgTIZ14dPSec8dusgGdZ+u/MaX8nBAzSVwilMBRnNuSR4B7teoNlmaBfmlOcl1LKgvkjc9RbJTrXdNee3dQ2XrqBi8a2uwVVZOSlpiydYnKpNaXuE4aKxb0d8jhnsb+MN/So+g1q9QpBoFjtJlAa57UlYGzDJI1Gk/FLx6mtOYbnL7J2w2UoBQiDwaKtxRjNcNgjiEL8wEPJkKLQCG9EcjZOUdDYHJTACoG1PhIFuhg9MwlWEVUFQngjxTyDtQFS5meSNACpDEFknCm0cjDTUvqkRqL8Cq1Wi87qwqgj6b4HKeUZtR1wCbOSyk2MUSIolEQK5RJzoTGlpeh5vOLx6xj0etwTPE3QPgrWGUxbGJlRWzwpOFQ/jpE/3AX510bRLtj3G0//m7/jH6jx9EXHYWubuWie0EuZnvSpRAptM0zL4A0kjUpAYkuEgBO1k5yaWYK1iu1scA73VoBVNG8eo7t5FWstvq9IZ2IefP19XP6uK6nurWNMyfTaCfa+ZS/adzAAOzLcdurM5kzly81tPeKIgeeN1kBhkMbgjxbDlpsvRs6WrPinECORloM/to+Lv7adHV+7GOmHHN5/mOmJGU69eI7FLYvs+OoOUDF5CN3VIQee2nem0xunKSvdLmPTEzwt96D2S8aeaFM+HFOtTdB/9Sry2z7ikIKfKbn0H5/D7ot3M6wtMD+zGzxJ88411A5XmfZaTOyqcN/Lv8/FX7mCVOasbO5w2ReuQlX9kUeHQKPQnqXe8jhk9zOfLBHcNo04mpG9bBn/zjVU/vESVL3F8I330L5pivC+GDO8n5P7t9J9QUZ+TFHfPc7EwZKwXmVyTZ3ltKA8f4XBCzps/fpmhqc0SRSiq+MQzCE9QyhCxhuTbNm2hcHB45RYskIzGKZo2wHpIS6yJLWYE80FdnuPUH2oSrJaReZQaIsXBeQrJakw6E25I8abAP+gjzok8aVH0w+4fMtlzC6c4vjJUywvdvF9j0qziVzUFJ0h3hUR/oMafOWKJaakXauzvKVDz+vT9xOO2AJbyxFWohsa4xlW1nSp1ZrYwiBbisqmGtsObeHkUY/xsbVYPOxRwWB2yN7eSaJajeb0BO1Wi6yeY/cJ1H4PFSjy7TnFZIke5pBogjsiROK60grXTs7ikqAdMAhL8maBCAXV59aJF1borRQMx89CRhs3RPC4oCgtYVCn+6ZVzMtjknqFpb+Eo8d80lji+wHCV3jSZyzwCasBsYFu1TB4q0E+LRn7ao3mngoigKQoyS8r2XdhwrELOuz4zBiXXlJha6XJsW0pd+3KmDwa8uJ719D/Skl6EGRLsvyimPQXNWq/QD0CCIHVHp4UKGkx0lCYlCeeeBypC6RQCFWhJKPXv5YLz59i3ZoxJkKPdNBleqbJzNQaLjxvkks2Z/yTr2h/L8K71WfJ5oRKYgHvVoVdlJQ7LMKOioJCIBKB+rYk/0iJrZ5TbbZAjVGFylm/Kymp1QOyytmzMfgLgVhye6zKPaqfCMkXS2TsIzcq8vUZh44u0unHyCJi/PE1HH35HtJdSywZDwHoEf9vZnotSd0gsxVqszUaY7UzyZWQEr/RIC4VOo/xjysu+cJmhuf3ifMCTyqk0ER+lbS3yuRNkwxsn5PPPMYjP/Eg6e0eQ9GhOmyz5sA2l1yNhuiBd4siLxVMWsQE+ELSuqeJzlJKKygLDVpQvOdHQf/ODplJxu4Zo3diQH5Jjh03qFsk5pKcopEzbLmua/37FZbeskq8KUOtxu47Kpw4lr8U0lxQ5HnimopCIAOFihTDqZg+Q5AxeVmQpDG1O2tUGxWyMiE7L3Gc6GaF5pEKrV6E2Co4dM08E0+1ie6WmIWSUAYMygHrbqpThpb+WE5WGtaeV0U8GHFiWBJXhvDsGD/wRoqAjm4w2JrjhwoRCEQEYV2QGXtG3l0qQWsiol43GCvxliP0P1XJvC61ICQOLKkuieMezcoa/NBxoiyOO52VKTUvQiHIigyrCwe1P6nwDgWEXkBerGDJMKZPmnZJ45haQyJ9Qzgf4H+pRmfXMuK5FmNyjHUS/hYBYxqxJJB7XWE/qgdUHgzx0WQbcvLrM0xh0CVYI1CBAAXBCYVaUuRArx+TxClh6GE9Rak1xmgqvkcURHiezyAewuIsNWUwySJq6LH+b7egtuWU+ZA8zfCFpPlnFcodBXJBnCkYa2PxHxXYSUP2So3vK+blcZSn0XXHd7SpJclT8jQnHihUdJYeZz2BqVnSWkJ/UNB4oEJN+oh2jhw4v1jlBYSVOn7TErS0M672DIzW99TXGhRPC4rCEvgugA2PhXi5R7TggTAkQ6BQhAsKby90fqFwLJvRAAAgAElEQVTnvFyVT3ggAAHx1phslyFPRv6z/4+7946y7arvPD9775Nuqlz1Xr0cld4TCjwFBEiIIJLANNBugm0MqMHGxjAOjMM4MHgwHrtN2wY8DN0aTLBxIhoabFkSioASynp6OVaum+9JO8wf+1Y9ycDqnjWzllmz16pVq27dc8M5++z9C9+gLE79YHJ1/rcu4lD0NPnOlF2P7uTTH/pLGmcnqC5LelNd/vG6L/KyX3416bEuc687Q/sn2tSerLP89gWqT8eM3T/iO6nW/UAnLL2mg5nWyLYkmA8pzsuxVoAV/vd/Z/y/FrT4/2IIpXCBwgoQpxSVG6ag4dBxgS0MrucQLx3F3dNGFzmyFERBRFEYxK810G/v0f7r47Q5zlPDrpATkBRQqwVUG4Z+x6Fzx/mbBLfKdXVZALIso1ZLGPmjofeRGHaa1rIgIb3CoDiXHLkhb8dZjRKKda1J8E2SNWE056VnpfQJxTmXe0H9j0OCBwWtm3Na/21tc8+HP/j2pAG0QLqAiemIY1JggOaFTe766J0/8pze//hhTj9ZEAQhKqz6IFY5lBIkSqGNoBrH2I0z1K6NkE/3GTkimRyfpFatgZAsdo7RLVKKsoTRiPHn7eCiiy9jkKYE1ScpteDksoRIIFREFFaoVOpIVTK6UYEzlMZQaIEIK0ihiKLQqzDpnGrYwOEVZpwGKSxCFGgnsCgvpBGHFIVBKkegHFY7gijCWo2zCoGXN6W3ihABUigQmk2zG5nYOMPEpkk2bB/lM5/6/BCeM0ym1jyXhl5XUkmCIKI0hYf4WUvgvJCHEBH9bhMrxpjaeD4vuP41nJlb4IL+Dbg7CmzZpVoZQ1tLZnNKlzNRSXjTte+lHXU9bOaZ3jD/T+6Nf3WM2Z0x95mHmOOhH/o6Ath/ZDf/7lUH6K62OVI7TPpzLcy7CxSKgzz1rCcbDGpoO+BYk4mHRz/+wJqEhv/fWtsWzwoTIkDbYdfRnuv4lq7EYHDSoWMNg5xsZRlrBEXi36d98gRJU2B1FyXOMZ7n55vw8FEmaqN0uimoVVppx9PdnUbZiJX5FsuLqwx654K1SlKjUZ9gMOjz6bd9GlM3qM9LKt+OqQ19jYq0B33ASj770N00+33Kt5xl5a3nktnz3vJc5G3baR4LwQiOn16g3e3T7w144rGnUc76AgAKMTxn+UzCLR+5gxPXHkesBkzeeDng57JyPVTZBBwXXLCVrcE0oqv55h1PsfjRp3HjlunbJpj80B5SnXHk0aMMZIPBpOdi9otxrn/rz/P02Yhb7nmKIGhis6fJsogTx7vccu8inZVRbBnw2GOP8w/iK8S1UVxUcug3DnNy5gg4wa+7B+Da9dKOX9bED4ckPXOcdfB78kOsSen/sCHWKlXPGAtrxHf+NXfN4oTDSUfxzYwCX2Hs2B6n7FnuFvf+wOs76Te9ghYdWpx2IJ6xseVAz/WZV8vPvlm8l+azHmvbITRtCCn5zm89+Ywvcu64Qg8YTRqMhhVW08Fa05vuoMeR0ydJ+wHj4zPkuSEMA2Y2bWF+7hTLywPK0jKopmCh8dMVBsbhapqKEJSUXPzH2zhy0wLLjZSTiy3s04pN1XEGRtFYCviJ39+MPa9CpT7C+buq7JmtcMtLj3D2AU2GwChBEgt0GuKKAcL5wEqiaYyOE4oCXIG2fYyRfPMbt/IN65AyoJI00FKwY8+F7Nqzm517djG9ZQ/GwtiGScQMzM2foCbHMKX3q3EPC+IbKjhnPOdTgtluSW8vqO9OEM1z5AbnHP37M2+bISVpmjI+ViUMQ89rXb8U1nfmrYU0Ryc1lFDM/t1WKs0GT33iYVbPPkZztc/ca0+w/MuHwcGujzyXTd1JQiNoBk2+85lbWV0pGAwMtVM1Lvng5XS6TXThK+ClNhw6u0AyugFbplRNQTQW8aWPPsQFb9tBX1u0zlhafhIdBcwv9FjZ31+fD0/+b9/hSfcdPzfWFOXXlIZ3OQZf9/NXA1pCJqBtnlEcG8KbgPU59KwxfK1ypOT7n3zEH7A23+/ynYIVVlgZcj/Wjjn+SyfXk93/7vgR921b9Gg/g+/lcAxYYn7t+cN75Hu/9Dh7BzPU766QpZpc9hipNdjw96OM9gyFUhg1iSgL0tc1yZ67wu63TLJlyzSVoIZLDW1a3PGlI0SVhLAa4KSh1csIq95TCSAMJdv2JLS7KUo4KoljNIIomKLT6+ACSxhLJjfFdAenCQYZ3aKHc5Y06xN67D7WScjdGoiI+FN1qp8bx+0waLvKYLBIOkjRuSUgYKzeoNCG3BUMei1m3jdCaHwBQWYCconWIfOfXsJ+H+Jfr2IUJCMhRZITPSHY8iujVGxCZy6ltwQ6CxjZknDm7tNs+Vgde7ei203prkIShejCK5uCIFKKWq2BjCQl2qtSZovIikQwwDjDfLHAZBljXIkVGlAEAUy8N0EXeogAcqhY0fqDAo4Kxl6WMLN1nCA5n9pUm97zF3j8PWeQ1XES0fWCXlagC70+SYwwZBQEXYNxAWm1xEYGlCLZkiAiwczGGfZcsodB1KOozBFUFFY5VOz5uNoIglii6hKr/d6+48M7GT8zTVJLKEXKplHJ6spZlq5YZvk9HSavnmFkok5ltEpcjZAJPPDXDzD5uxsJHlc4kRPXU058rYWtP3tC7xbPZdl2eXDn3dz84U8D8MI/ei1zlx7ngbffhtACKTVj06P0J3oErYBLX3mA+++5FxVGBFGMw1Kr1Z+1RgHsePtuzn7wNNbA9B/McvzWp+h1msh+iCmf3WX7YePHIrnCWGRhUQaclFSrVcanp2j12/TyPgjB+MYpRt+9i6zTo19p0W91iCJB+blVgiMh8eUb0auOSy+pE4+ssHxNl6d+1nD/nQXVuversFqwMGex7xjq2UuJUpKi0MSx72jhfMAdqAoITRCGVCqxVzbL8/XkyBgv6R0EwbpSXRLG5AxovCImekqhQumVXdxa3uWGcK8hNA1JfJtgYrdFqtLziobS6E5JFh/MkG+NmD44woUvmeaK657DY5WvUD5DTe5Hje++9599ECJ4VpD87LEWUQyNdIc+XGtPd3bNm8Q/VUiBHOrYG6kJjwZ8+s9/k0w7AlVBqhCkwlpBv5cSSEEYBshA4YQkDuwQaimRKkAFDmccSVxlbGyKKKkQJxGEEdo5BmkHhCWJRokChVICpwPvbSY8L0lKSSUZRShPSFXKPy8IjtNvw/hkxIaZC9izZw/Hjp/0Hatn+FxZ67kNUjosjsJo4lD6oN8F5GmXWnWMIAoxStJKSw6e7DLXslihiII6tWiKdK3DGQiCOERWGrz8t25iYqrKBefv5srLD3jJZ10hy/vkZYaRCa6w3qg4DAikotPpEYSWQAnCMCSKEoQUdLtdQBGHIY26JMsgLzKsK0FZpAhQQRUlFSEByY0xtSjh5vG7WKTFfvMc/q7zZcIgIgoDglAyF5zmue5yHsrvYsZu5J/kbfxM8g4Avtb5Ghe489CmZGlxhW9/+w5ufvXNaByZc0RSImM/p90zuulJWCNUfdqbzvD1j3yaq256FUWzi1AhgioA93/mTjZ+dC+NL48RR/H6sZumpjiw7yJmN87y9LFj3P0r9zJ/+TxiRXH/t59g0+ZJtm6fJZqt8fTpczo5/d6AbqvLBXv285D8PgBXX3k5b/rQa/jUpz7t7+s/ASyEgeT9b7uIkXuu4p+Ku/nKFf+y/jp7t06ib2px+7vu46pL38l3v/z3ZNvb1L87TbcrqIguD//DfYx/d4wd//tO+j2DyCpkg2ES4cC1C7CO8dBRfd4yT//pPRA4yjJEhOPUxmPGRuZZFRKHRSlJrZGwcXw3z7+ywZ2v/i4PPO8RAqF45YHreN4Ve0juepKDqw8xMVpl387nEUchhRHMHVvin79+B9N7J4kDS3f1FMsLp9l36WUEUrHhS1vZ+om9lNpSmxxhZsSRDVY4ePIgozPThFVLXmYUuUbKCk4W5K5ECU1oLb25hPe8+3+m2V5iaf4srYVVAhWz1OyBUoxPTXD5lZcR3JdRSyqEcYQKvTG5lYLCFBirvf+ddDij+NrLv8It13/zWavQri/t4cAfXc3kxAYGtkRr3/UwjZK/+Ye/eFYQKYzgp954E0kWeSgMjpXWMv/41a+QTfvrIAeCPS/ZyYnPnibfO0yoLFzw5vMYNAvaL+/Q/ZkWEz8xTZq2MUWBvsZi/hQ2XDGGsZAFEp2VLC83qb94nPTPuggZEsYNgiJjde4MWWqJooTNW2YoOgVFlnmRoGxo8J4JeqUm14J84H2bFuSAQVZSXJ+x8C8nsTck3CJCFi7qU+6EUy3Lwl1Nzp5Imao6ttZDbnrDZj5aO03rfy2x15XY14AbSanGksCGSC2QzuGwpMJ7dgUyJI4U0aRAW/95osCSZV1OPn0vxw/dh1AJYW0z6Zszui2N6irCJEJTIlRAKB3SWLLCy14jJeW7DMXvDiPXZxSM1njKa/cBDKGFUcSgn5Gm5zqDa0q5AsAJKpGj7wzbdm5j06VbOCge4fJLd/Pk732R5e0n14978j/fSeO/XMXmBzYixv1j1bohjCxYjeyvQDTBmqqJc5o0O87YeE7bLLKqS0rnDzz4qROgHOHhhFRVmdkYsWO6SrRtlS4DcHDeT19A73RG+4o2/Q+2qL90lsHnF0m+WaFycx0XWWpxgBMB/Rt7tN/aYur1m4mlpChyBs/r0/1TjybZ/c5duCckeVkiraQ7SOn911Umzo6z5wN7ERZUGPD0Rw6SzxY85+2XDfm3nhaAcwSJ5J7v38XVv/V8tt63jUqlgqomWOOLtUJIVKDIC02pHcZ6g1mjS5RYO++e8qBUhKSkUoE86XA4eJJef5nlftevY7sECzd7YZ1ed4AcQBQljNb2cuSRRRoTEY3JCiM1yVhtjNt+7gFWdnQYWYm4/IUbGG1MkC9XUVoyu3GUOziClBorfNfn/J3nIWxGu5KS00MKwUgc8f2HHseJCk5ItC6RcuCRPMZzxceiGkVDI8MVXNLHbDcsPzjP7tdtoWHGKbVjudVidqZGP2ozPiPYsFMRVWOy3NDu9DHaEoYVgqqhX2Zk/ZLO1SmrH+wAjo3vnECddBAaZKyohSOoyHcICy1RgWT5ZEbzD9oMXt5HdgVTr0xQukFU01THSqa2J5wR0OvEVF3C5GSIM5bxkWnKMqPU3messAFxEnnEjayQjO1kz/XXMtEoeHz/bZyNz3DZ817MQvt7SOVthKz2KBCJ581b4XAhLD+Y4cahckIRhI5Wc8D+y8YoJbTLDjhH0WxRZiV57rAyYOK8UcAXAEWokI0YF2TkzRRTDSgrAc5BZCOsEywszpE9OUCM1JncIrAaOq9I6b4kBQGhGCWolBg0RccnK1OjG6ksjjHoGFbnNPWRiLO/36F1TQvVDhgdmUKrFqfeNU/vtQOPGFOO1KxSlQmVKKEmp4H22uKyPo49dZQtt28jvBq++8m7AZg7dojm7DKVpys850WXc9uDt2NGNC5w2Mhyz9N3YCsGZzVWa6wBE5lzLbzhkM4gnMMUgvyYYvMle8k7znOy/wcKGz8WyZV3h7Y44Vv5yViDzBZElYggUt412jrydk6eFoBkdGqC5l8sYZ9TIo7GBGlMVQh2z0wSjncQ446nJWzYHBAEjm7LkA0cYXLuBEoliOIAhyHPNFEUIaWHHQwbGkM9foG1ay70Av0Kg6s5kvcE649LKdcvjsgclA4nfQKwprSyluTYoRv24OdK8hdq6FtGbgIcqMCbJTvlP8d4Q7Bla8T28+usthbXOy0Tp2vsv2UzQQCBiAFFezzje694ipd/7TJk6ZM0iVeZU9IndEL6xMZ71XgInlKeb6SLgjgOh4IPUBQlUq118/CJkpQURQE45EBSvTHBmQBHgDaWQhcoGTE+solABR7SZxKKLETJAdZatJU4EWDR1JMqurAsLTc5fPgYnXTASFUyOZYwPTNGXM0pdRffCnQIEyBFsP43CIyLyQb5upKdz2YlurBolyBXJXmZe1+0IADWDApj0jQlCIKhV5H0MrHOe4f4pcGidYlSIVmW8dhTD9IcrNDv9nHW8+MioRBSI1xEYaF0jtEwIagKFo8tsXR0mQdueZQoSSj6KUXepyxztA1QhYHA4BCUWtDrpMRxiBCWKFbURyqUZYEuLWFQIU5ioromlDXS/gCsRgUgnMASgpSo0FsaBEoyrTbTeVmXEy84xs/V3+mv/xCuapuOiz58DR8vv0ZMwLF9B+E/+nn/e9Xfo4a3CCjigtVXrjI3NcfS2BJ/+fq/RKD4ya/+Ox68+Ps8fNGj6/fUofc+xYbHZrn405fz6Nsf5PEP3IvNSu+TU1Ug4NJPXkH4/Zi0HNDLz3U3jr/gJM3zWsRxRH+QsrqriYscbsKw/NnT9JIFdj65nQ39cfL3tddhs8UbU7JdA8a/NbveIXxi8yH+j9d9jpPPnUdPGBjmcNoaPv2mLxHIgOXRZ3PlvveOB3DKUWzoc/Dj3yLf0sWFjsG+Fgf/6tsoSnr7OuSbB7QvaWG041ig6Oz3AZRrGDqfPIKdLll64zHUIIDQsu3XtiFPOfpiiUot5vU3XsafhQ+RoWnub3Pf7z5KEB4iiQJWZ1rMZht54wP/nvHRSU4cPsrc0cfIu0c4LQPeuO8NSBdjStijC0Ld5+nlFaamNrBn93mosM7xUyfJ8wxlBXEJJu8yVdnLlpkJ+ukcpxdOYtIug65BW280GSiHCkKqYZUNk1PsnN3OxivPp3IkoWK3MG1nyKp90jRnyvYpiwJ12tFaOU3zZEZUGSWu1gmT2Iu6uAAhA4RSCOmIhObW9/wVR/Y9ReVInfN++xLq9TqP/U/3o2xAmMd059tIZVCELO9f5dF3Psi1v/BiFAohoLu9zX0f+B63/i/f5Ll/eiWTT08jVcConOLlH76RB9/6PU4dOIGNHWf+0zwHvnANc1ef5ejLngagfzbHtC10fJQZpxJVTDLI+5higBAlm+p1jq906OucWAU0ajU6SxaTOwZXpCz85iITHwgpiozSSqyG1VYL7Sx5WWBsgRkmV4GQ6HJo+2AElIq8p9Gl9V2LiqOSjJKXhqL0pp9LK4brr97P6aX76duCtL6B2ljXGzfGhqlNiue/tM7BsyGnT7XJjSOJQYZVjO57bqqVaAMYsBic9Ot3bkqUiokFaByWAtk/hXCWhbcvEIwFuI94wjp4eLW9QKN/2xD+VOi7L4GDKmt1wmd15NfGGiQeYciyjKRIMObcvpvdDOojwHcELgnIB4bJ2QmOvfQQh17jOz8d3USHOUSOSifm2r+7nNvecp/3JcTD2gEeecchRh+sse1TM7S6TaojgVcxA/SE4fQfzzN78yw1VwVniEd8Nf38v9xOc3+H5kQfJTSljjFCoJ/xddoLfURHEeoIEkf6n1awGw35qwe4ccfoH08hDTTf0yS9doBwEOSCLMvX7Q26+LVha7qTsBuRZhlCCb73sXsx+0qiswmjcoogkAgpiETMyr5lnvjYI1z+niuwWoBQqEARV0KEEMTUUUWVwcBQLvaw1ntfrfGHYcguXivoOkspfTFH4Dk6Qhms1mShQkUJs8lebj91AhMYkiRATZ+rmCVJHSVDOu0erfmMUGuarxywsh3GPl5Fd2Dgclzg0FqwtBTSbPWwaYbQPkkCaMxGiDwgECFkDhuW67woaw3tbpvpmQZlqUAEKNmg1ylxsqArQAtBpALSIkeqKo07JXbVsPD+lHbf4WSfoOKo7io5+Vstdn9hC+aBnEHeJakOsE6QDgyDa/u4l/VJfqeKqqbEsUTWLK7iP4samrdpV2KMZvUPz5LtzJHfDQiVT0KSEYWsgquALfFKqoFGEWG2O47+9hIudOSpRQ5yVD2nPj5Nv5eDKAlCRRSNIYqUNO36PVlGmMFZHr/tGwQCFswR8qtS7vj5L7HxwxWcwVv6CIs2Jau/nxLeDfGXPYfcVQHlC/nKQZQ4ziw/BZWQXunviSAU9HLnb2WhCGx0bsI7sIVjpa8p0hiXC/IVS3eQUa3l6NKSSMno6DhXvugGjp64l4k/G2Cvh97bPRLi2AdOMPO3CcEqnP33GVvft4VwIWDh+aeY27NA47dmUWXVUwwiX5Dp9zJsWVCisZVzFIMkiUjiCDY5jv/qGWzygxSL1soZGs2Q8QfhJe/bx61/8jixEIRCkG/OOfiZJ7j4ty8iGKpUFiMlD/75QyAgUiGVMKKICx777CO8fO7lnH7sFPfu90rH9XpMEEp6l/ZofeIUm391hjQ3IBVKhj/wWf71+PFIrnCEocAGAtMwpG9sEX1zhMCG3rdAG/I0x7y2izgoCZ+KqNYTll94BmIfSCsVUh8JqY5YKhOO2oiv0EzPCsASxoI8heq4fUbBzW8CURRRFgYpQ4IgxDnI8mxY6bFYZ7xBmxAE/yzRNzrM833XxDn/m21QvFQTfV4i2/5b+XfwCVp2vUZ2JdF951qP5QWG4nqD6IIMvGGdkOCmYHCDwQWw56Ip9kxOMzpRozc4t0FV0oidpzYQBoqAiIWZDvM1T/DcfnwSlfvkSsnQJ1mSdRENIYb8oKER7FpSosuCMIp8MiglZeZNZNeSqyAKwYHWeij4IJCbJEIrtPWu3dpKnEuYmdiBLh1ZbiiLAClCIPJJi5M4qaiGkiQMWVlc5fSZBeYXF+nlA/qhIe9HCFmw77JJSlusb5gSi7P5+rWTEoTsQaY99FIK7FDVLLKaOCwwZehnmTOsGW/4pPhccOA3JjGUO1eeByYtzgw7d04SBAH1uiKIm8RlF2cVOG9SK6UDFBiDNQZDBRVMYNOMMs/Q6Tx0FTZP0WXuhThUjDIWZ0qMczgriJXFlWCc8UmkrVOWA4wBRAw6xGQWE1bQrvDfX/jE3jgvpy6cwwh/DfcUE4yc3MnywQW67gQMxTEclqAdsm/pEuazgxhT0K+ssu/BzRR5ThqfhbhKGPpAfkM0xobTE8MCgcAYTT1KmHQT7GrtYUdzL612i4XFOaLHK4yemWJiZpZKEBDFIwDkeUYbGHtiCtURqAr0bYOJf5whLwss0CJF2JRKkjA5v4GkWqFWq2GrBVoXbBzdyIbJjbx4z/V0n+zRanYJAsX4+BhIx6Z/2kEyERJEAUjFvskreVB9h62Lu9nQ3oxx2sMaCRhf2cWFLW9PoJ32htilZechgR4ryL8/DsJ3NHOVY4wjuLuGMcYrJmUZsxsq7D45RbyUYIOQk9kS+5efiws0tqqpnInJjjrqlRE2TY+yc9MYWzdu5KrDl/HoycMEgeCi87ZhbUYtabB0vEF0psHskc0UdcnKmSVKbdi0ZYZDnTOMjIISYAqHKCUXXriVQ3ctE8WCWl3Saq4SiDZgGGnE7Nw+SoGgZ5YpRIKLBJWRGlamDNoFDuWN29GYMqReGWdUbGay3MGmeBv5XOlNgUWMEDHaZtSjGqUocbrA9QpSl5PmBdL2CLLUW1fYcJ30a6Ocudc+zWP7Hiaej9h161623b+dJEo49I5Hae9sc/yGo0x/ZZY4BIGhW20xd+kprvz5q5EmRApBfcsYu87rcOw1B9lbvZBaMYoKHNVKhQ0PbKTyMt8ZFU6ycWkrM9/ZRNSJMXHJiecfo0hLZOk8jNVBmeUYm3juoPUFh6kNNU53ehSlxThDoASBC7BCYKY16aUp4L16vDOCYTAY+CqoszgnwPrFtnh9ibo1gvlhIcs5itxh9RpPSVCvjZHlhqIw6Lpl/oYekZxAxjWkrFOd3oUVT+KG61S9EXLpZeNQq+EKR7uVkZeWMgfhNN4Q1SuZCeebOE5IkN7cXeDnuxLCd/5dRvR3AcUrcoqXFQRnAqw0fqOwFluzBAu+qPYsLD1DoYv+eqPK81dHHPpyC28FU1jKLxe0rrH095VrWy7mWnBHgL0OQostHGJMU93jiFL/WlnZX4dwO+noj6Y4AYsXLFLEA/LEG9Q1z+8gVyE8IomkYOFFixTjvlvpEkf/qpT0Zk0ofXCVDzTT32pglUFXDHbM0LuxDfUcIR2dC87BZTs3tAnTEHOBhgDM8333zdQMRZSRHulTCkd67YByV4maUwgnGCvHCG1Cv+ytA/pWXrxCsDekLAuEFJTX5zTuGKV6dwM9NAp2zlG5s0ZlvMbKi5c5+5YzXjBLSJRShFHA7Fe3EZ5NKHRJlqUUuSYMY6y1w6TYeWPVdTyiV3d1Zs0EesgptwrrBDZ3hDagKsaYcVtYTM/ihCXqRGy5dZYzL5ynf3lOXi8Z9AaUA0cSKsQGB8uwstzFConRQwuHquHY1SsgvME9RqAj/7/Oi3qIJUWwEBI9KOi9vkPRGEI4K4Yjz1tg6q4x1DDDdVYQDgvunVjRcSVZacFJ0AHxXETNxUBK71U9jEwJKw5GNL0DGfLjCn0SSue5TVEo0C8yZC8rsPtBdmNCZ4iDGCHOxWWD6zPcMmirsWj6V2TEd9UIHgwRzt83aTpAm2EHN3QMXleCK5HGYDYJOpdkjP63Ou6sxVlBFEZIYSl1ibEahMDfjWCcT4Ztqen3TlMs5+g8Im8MiPdWmX/ZMcR10+jzMvTAIIzFicx7svaGcH57LjbUexzpmzVlHbrRWUScUO7REMLgDQVFquEhgZor6b2yt54FFJtL2i/skv1tTmCrFIWhzA1Fz6AIPJJJeY7nINcYDPFTIfH2cB1g2rukR7hYEnQl/Rdk1O+qM7/nLNXz6+yOd9GNHasvn6OYyYb3qKX3hhYuyCh3nIP6A+TXFrjzHW7K0buizw8b6aBLUlSotAST3x0BKzBaY43B1i2tlzQZ/aVLibveRimfOme71Lmyg8oUpSxYffEKlf+zRkXVYL///9XPO0B7qsvqhhbF8/s+6baZn9v/A+ixH4vkSkqo1kPQIak1LP/mCTY8dT70vVle1svoig7uTT0aX5kgOdzwPKe1UROwSRDWFe2JFS+FPOq8aFOXdScAACAASURBVMQOi3Oa0W6EyBXFVLre0rMVh5lyxM0EXEmgIsIgBCHoD/zFtNZgTAkuABzxxwJEBvkvGcQWv3ggBeZFlvR3Skb3hig5dArHJ152Fnq/WBIelMQPBEi35qY9/CAC5NYAU5ZYJTD7BK0P5yQtxb7929g0PkE/s8TxOe6OiAVyYwAqxhg4cdkSj15ynGonIdAgh4mTxAyLV15owlrtfX+M87A4vAiDDHwSkQ5KGMpp27I4xz0TnhtXFnoIp/QQvCAIPIHX+tcIwoBe36IHgmY7o9PpkmUaJyTCWZ+QKYlSMDXeIO/3OXH4GA/d/yBCWYIEBkXOyqJlaXXAJddsIk7UemYoVYA1BilCD1FUlrLsE+oqQeClk50zBCQYk5EkFXQeoNQJHMPOFnKdM7eGs11LkgMhvAoeFic1xkivYqh9EHfRhdvZslfS71UxdgjGN4owlDhKSqcpsYxFozz8RE6joWjMVJmZSWi2u1SCBFwVKUKq1SpV4WEfAkugIEpi+v2UQmvCJGJ0Yox+2iPTeh0GqVSElQalvMu5UMoni06hdUYh+6RJBxFEFLlkk5nFPrABYzQ45T28nK9dq+tKut0+eZ4x5iw7b99Nt91ibHScqfEZ6rVx0p5jYmQTXkNO+SAlT0mmYy5fvoxLFq5A24DDRw9x34P3og30k5yN/9cOpipVRsbHAEcrWeXsC07THR/QMBXCmYRaPkV0c412p4tzoIRCFY7JyUni0QqTG6aY3TSL7nYYZD0mL5pg45ZZ3nTorZw8cZpDB09QqSTUGwknKgfZ8dfnsXnbNNWRGi6I2Lp1O09ueYTnnLmSK45cR+4G/t6QCUkYUokDAiXJbJ/ucp88M1gpyLIuzaUFkBbjNK1OhyzTLK90ycqCQhf0lld57hVb2LtlltGpCcpKjX+55WHe9tZXYc2AIh0wWRvhG5fejYor7Nm1gYv2bEYPMq5/6IUsfmVAraJ443+4nkI32TSxhYceOcnhY8scnjzKWMPSLB3BtpAds+dx5HsnWYqfJElCdOHIOw62BnQaTTqjC8xHEQ+ffooL929EJpqR2To7n99A1at87dZ7Kd0yQRIhtkZEIciVfCguIzC6oMgFYqpGKWKazQzHCaKRmpf2NQatNAMBZsSvX5KYWhghu96U0ugeTpc4YxAuosxLTFlQVrrc+otfptKqsuvr+9n7NxcRjEGeGaJmzPw1p3ngvd/lur+9Eaf9/CzbhngxoTPZRZQRQihUFnLxJw9w4pWHKa2m0CUBYHRAWWZ+fgMqV1z5oRegM82muS2MzY2ytHcRkxmEMyAMzlr63QFaFmDXHhdUx2PiOMA4jRUWnefUq3XaqudFhpBEqk4cRjiboa2mLAqcsR4nICOsDSkWSnq/kzJSxoh7BIXOEUKjIrFegRUI6iMjLJ5pkq96eM/crzc5+/E++XREZWSE8LxZluYfwQ75vHESsGnHJDS2U0tijh9d5tSZLq0eOK2HRRtHqKQ3JBcCJyS+nGJww8TOK9kKTBBQ+U2JS1LKl2rKXy8o3bkESj2kqLyt4vcRBXJgEWc8nDz/hWea+fj1WWhBeYWluMIgCtBfNrRenaEvdsizQ3CHELgboLzBUQpNrZZQypI9d21i6p5Rju2awzpDpR/R6FSx0nHXGz3c98wVJ5l/jsRaTbCk0BOGled1SCcK9j64nbNvOIqNHNGKt9twFnrdnGqhwDmy9oDNnx/n0AcWSXfnuNjRuqlFEzAj5lmKZOV/7FNqsM94TCwLKMEKQ/ddq9gZL/YkUoFaDlFFxFa9FTeA5SVBeDZCAE/d+MQzTxPBSsDGj25l5O4xOqJNqUuctdRubrDx8CaO70s5+v7DwxjEI3ukk7zw5a+kYhIKUozJMEVJrVrBOumTUQHCeQsP53cxLxwyzJcddn3tD8OYc/6KAQemD3DX4l2UostIt8b2m3czf9USi89rIp/rIYfOGVJbsOmvGsT/qDjab1Gp1xDLgnDRI15OvP7MEGrvKQbWWdSSYv7FLZSSJI8GiFtSjr1sFYlALSlKZ3nixgVecN8sY1UJlPS6KWPjAaFKmE/arJoBrX7KaK2KshLrJGEZEixL8ne1KdZiIyBckawut6FnCaIEZxSVqkO+2SJ2Q3hSIjKHCgKIQugK1IJv+7RubK3TN5xzyBXByMcncPcIMuMLKf1uRtSJCNoh1jk6bx8Mz2+KAII5yewfTbB4tks4ImnUGxRZSVF4SXNLSaG7VMMKQTCClAqTZzRXz+B0RpYmJLfVmOxMU1w6oPn6DlYbj/ByDtBMvbmKOGTQVvt4bs530vR2S/uXLQgNokDILlJA2A3o/FyGNY7wUwJ3j6X1jgzVkWAdxUzBymtLGp+r0GhI0lRT5prASYTxqtFWaLpFi+889F3GJlcQsSZyknBZYUqLA1r7S09TXBG0fqFPky7XPf4SDtx1DbdV7+bI2+5HxwXyrMQJR//9K0PkkCJa9nG2c4b+KwZ07TBxPAPM+nCLFGRHYGecT3CdwEWC3syA8GxI1s4pinOcqDw1uEwjJJS5JjwTUm4sWb2hSe9a759VaSecPnqWpdo5fuNrX/caHtvyNEeKk1RaNayuIGWJIcW6c0najxrC/Suc4b/FqDQSd8FV59NsdZmvL5Hf3vuhRMyRF28lPl3DGsNKZw5O9yAZ/vNHsfuHjx/4pmDDCfj6u5+tCiKPw+iBiDAMfUXfraHBHa07c5K/jah+PEJrPTT6lRQ3GdI/LH/wPTswskcRh7H3tRKgYsnZR7q4KtQ/HzHx2766mqcp7f9ckv2HNeWLf/XRLbz2/XsoXZ3CKrR2xEHM7R++n3ys/MFjBIzPNXjVp64iCH3So6Tv9AiriYMEXIRAIQLffRoqc/ggWxqM0UiphomrInMlUsmhI5fngzljfZXHuXVOVm5WkSIhCiOCUHH6WI/7bukgccTxWrVMoiqB7wgNCemNepWzp87SXu1Q5oYkilCRxGkwpaG0BT/7i1dAXGARGCvR1hBIhdHKLzLKQxvzTCCFQUovIDLZ2E02yJFBRp71+NYXD7K60iJUsa9alyVhqOj1eqwZ5cVRjEIyyAqM0IjAgA0IREDaL9m0ZYzrXraX7Rdauv3EG0NLRSxHKYsuSmmMcCAVM/UNPPRwQhgVTEyWbN5i6BWWUEiQFRyBD7hsSRiMECBQtiAMPXTDlOCsRIoQpx1F0R/yygRlWUeToSkweBw7gSEMKmAFT207yRduuPVH3W7/tuMZ9+T/r9/z33qsrXE/Lt9bwJvf8dMkT9bpDjI6qYeDKiE59abDHLrpEa668lrioLrOiywbln95+Es/iG8XcNlbrmbzfdupV0co85JCl3z/I/dy6o1HCXohr7vurYQyoMxKbGlojFf5+8e/4NERP1NSfCAlvnyEWBqCaoR+saH70TYzV09SZh4+LKRktd1iemKG1U+skr0gI1wKuexN+2iupnR6bbIsxVpBo14nzzJ0YTHWoeKS9uM9qP/wcwFeJe6ad17K0uE+ab9PurPH0ldbP3ovA3YcHeVtn72ct970Me66/et0mqsIYgZFnS/c/AlWmgtkWUFZCPK8pDRgbQBCEUWSOEwIQh8kGafRQoAFhfYiLFLiTEJpSoTygbk2lkiqZ3T53To8fU0t1FlHHMdEQ9VIM/QQVEoNC3n+pyjkuY6ms8go4G03/QTT42NoYzk6c4bP/czXee8fvQmVGcIgZDCj+fN3fQGAK2++kuk7GqwuH2OlKzj+mZMU20sqT0bsfv8s1bAGlMSJIqnEdJs+MLSiD4FFTEZ8578+zK63bKL9ij6D5+ec92s7sUZx9EPH6F86rJA72P+27ejjivaBPnN/sgDAyGvGsE8IrBOMjtSYv+c0ruqofXGU6d/fzPbFrehMo03pVVQVvnhR6HXbkKASr506nDVo7buenvPt95SkMcLFF+9Hl7C8vEqz2aJWq1CtVInjkCgKURJOHzuDihKCKB6ifLxK3hoRwQ3tT6wVOIz38VQBWhsUwsuXC4dDE8UJp8ZPIEd7jE0odu3cxl+872/J322ZfGCCHbs30S/mWFroIqMQB/RWe1RczERlhPHGGI1GnYw2qyuGsbEaLjK0yw6DDsxu3MDslhFqDVheWuCBO88yuXEUIyFNNTLzlhAjE75o2s8M0zMxumc4+a5Vmtv7TP1UjX0X70YQ0Ov36fTauMDQyjNkFBEEglg6GqHkxIllOk1DFI5z8cXXkJszpNkcuBJjBMcPd6kkNcIIwiggjqoIqWkPlomSmCiKyVON7jsiapjUknVzCgtvfdurmRqbRglFu93lc1/4AjLySbdU0BhJCKnSHRjqEwHTswnFfAVRVCltiXYp2vYIianUpgilIhv0eOzJp9BIpC0Zm6gws3UcW4vZu2UzS6fPsjq/TDYokSHownruli0RQnn+1TCJlkqgVYJKxhmZdtQbkqqK0WmflTM5VEOiqYRgKkAPMujkFC2DLCv89E++gaJzhtVWi6MnF7nn/kPIcUX3az3i2xNG/6LB6EyFei3HZjFOSVxYsvBEh8xIX8S2goqNufK6nyHNV1hpzdFqLiKKAbIi6S51yPI+ZZQiZMD0yEYmpraQ1BoUpo82c7T7CzSXBrSWSnTm4HQBY5B8SVH/cMTy91Iu33+ADZ0a/Ze0ufMTD3PJxGVEQjD3q/Oc+tAZsHDl1gPYRTu0VAICxeNPPcp1//wiDvzjc3FSczQ6TFb0OfQTR3nyHV5A50v3fpU/2fvHnD42zzUfewnfvvV2CjvACkt5VU77i4sgeMA5d+CHbXk/Fp0roRSFCogaI2zUVbIbMrqtrudaecgweZaTDfqkRR+rLZGKkNdvoFFpDLsyms3jFS69ok2hWvSzgkFHUm2UTEwHRJmj6Bpe/KuAC5ja4DhzxPHoXZYgBIcmjELfsTHe2NcHZRZrNNaUBCpEa438rCX58rBdLz3BUQiBM25YL3oGubeETS9t+L8HrFceVAAjH4yp/aHACY0UBlNKqtVRtm3fzqtedwO1A4r+ECKG0wz6JXv/4Qqsst5XSpc4N0APF2hXWCoqpN1so3WBNQZHgZQZgQpRIkYSIIEwDpDSqwdKJTFK4JyvTvluVkmEN460zmKcQyg8BwtfdXVKUFpLHGxEl56b5GRBpZawuHwCXWhkALowVKsNsnQFo73KH9aB0IxNjKACSW/QxxmL7RmE9fC8uBqQNGJKtO9qIAhQWOcIIoufvhFCGKTKkTL2nSinSe0iWkGSCKLAGwdrbVHSe135auC56pS1Fm00KoxQoUOKAKFi8mzgEztdMkhTVlsdzot30ux1UGFAGEYURY6MQOK5b5aYbDBGYzyk02uy2tEEKxYZClKtEfSGpE1D4AxlXg4BAg7dzVAyIpAegmBdF2sgCCKflBpNEHQIgwBKhUIhZYyVfazNcHi507gM+aUvvgHWVC1xBKFCKp9Qa+0oSx8UGWuHSTbg/LmJIi+J6oRb50Nalw2hkwF5kQ4VBH3ApeOCz/zUHbz77texqTuOUgFBOMXiqQEnjnRodQe0R9vc97vf5E2//BYanTFUWCGORhAKEuH91aSTEIQMrPVeMYBIIqZecTlP3383d3/ry/RPzXPDBS/isisO0MtK0qzgzOQZ/vJXP8ZN738frBRYXSCdIIpDPvPRz3DxFy9k360XUKlUCIIqdsjZC6QgCmOa/QHLSx0GgxysZeNUnSHacugLB1KEhFJgjCHLS1Kdc+bUPLOTU4yNjRJXKxS6oN/vY23p56W0FKUXiymtprAlGEuAQirfPRBCop2iuS2jOjPJRRdexE1v+Ulu+dq3+Px/+SStNGd08w7GZjYwVZmm19HIMGd8NiPQOfXaBHnfkKclsuJAW77xs//E9PImDvzzNRw6cgITLFK4krwckKdNTGkweZ1t2y5g65bdRFGN9u1nqVcrjE5OUR+fod1OCWsxaVqSpQU6L3DCB5bCCYQF67SvmDtftbRCeJhQ1sP2u5T9Lj23zHfvuINmN6U6kGitqYYxRlucg53f2MPue7eTJzmRqhHFAUvXzHP/b9zDDS95HQo57NqCCOEbX/qqn8NlQZb1CcLQQxTXJFpxDDoZY/URKpUKNjF0WzmvPv813HniO8wXZ5AIaiqAJPF7TOHtBQbdACVzwookjCJkF8LIIqQj/Kqi+vsRc8UC6UCjncMoiTYlaZEzGAx8F1o5KAqSF8WEgYfUiUCQJDFFWtD/lRS3QzD9m9PoUJGZHjkDOO4Ye9EU1QlD5GqIUmDSnH6noPmVDjtvnWbnLds4ordy5z1H+PzfPMjE5DQXPWc3J5dPM7ppNypusH12khddfTlGZZw4fZpWc0Brtc2ps0c4eKhJloeYd1nsb3i1z3VuDgz3vGd0pITfy/Jncqv+b+bePMiy677v+5ztLm/tfXo2AIPBACQIggRAilpI7VJRke1EkWRZNkuJIqscKw4rKbvKccqlVDkuJY6YSFZZluzIlZTtiiOVJcVJLJFliaRBSWRRXAUBGGwDDGbp6b3fepez5Y9zu2fEKMufvCgUBkC/7n7vnnvO7/f7bg9+/QNXLaoHevrY6bLc2Vd2vcMphJK+h2j5J/3fPHNvdTJl9/2Tn/rN7vWCIO//rC//yJdR/15CrkJIuiqA+rGW6//89n1mR0f5jmfS3O57dGjKzV+8RywiMY+89CuvEUnI1YPX9Z+/nc6/B5ArCQwGA7TJqN19veiaW+WJ6hqTdkrAI1RIP0pGQhvSe+n2ytB4Ul5gJHb1gBCC4Dz9/oDhygrjzXNUlefevT288/QHA9bXR+R5nogkQhCiJy8NrU9mXEpKgpCE6PEhMSKii6mpihD9aexIgxABKwW+Qwwkiul0wvWPfZXpu46RErT+Ik1u4efguD1kqk9S3REiiDS5DyHSxJqpbHhbHCKFOKOpPYhcxSi4J/f4I5Uoq8H7lBsp5yk4Pcazz6dbCoQIt1Sq//wgEFRk99NzjsyLaY2dNvmn7I8HkCshwLtAiMlp9Avmd4l4tv9ORv9GTpYZrgxGHN2OCJmMfU525yilWFnfoGlaTmYWLRTDrMR4Q0Wgjop+b8h7n/se9g5nnJwsiL0xDkW0SzJTkMk+wuZEAaMVgczatJfGPqujnLaFxgqcHaDDknY5oXItSkY+9O1P8el/+AV85jmWDRM1BSG4qW98DXLF2WeQrj+FpiYSb/BQdsv+tNbxCdEWMiHYsTvkYrdGP9b7H4kh/SzvA03rUjLORqR6uKL+wZp9leiRp+c/gjPkio6TNQF+O/vFJK0JPuWTnqLp3fuI3XM5l3Nuyje7e3iapZm8CUJIewlJXUD9YU/7zWn9vfC7X+WlKAh5JBaRF19P+u8w9Ge/15f/8Ktn7t2nl922PP+X/i2f+8HPopziz/7Mj/D7P/5b7D6+c/Y1f/m5H2emZ1y+/QjZIBJli3ekQ+j/Byj1ddFctVstd//zm8mS20e89bRN+yfsn3EeZ9pOuAlRSoRWzFXH3QyBu4VOoV/SYl3AtRGdQVF2AlcH9RKIntt9mD8XqT8ErXIQU17S6QEQYiRsRZo/43FX0nROinSjE7qVQHcnIlKE+9NxEZGyM1+AzkDgAV766ZuOnhhld9h0AH6IWDPnreEtfuvq7yB0xIWOyy4Ctg2k9i2hQTF4Io6L/+I9lIdDRPAcNDPKfkEUC3QGZdkjz0fp90nDrDPBsew2OkF6v0bnCWKN6eEIIiDlnzygQrfgIT3YPgSiTjz/GCXRa4zOCUETCWmTNhIfJUhJiJ1BBAKhYkr+FhKkxoeIFBFIoc/WObwHT6JqKqlTo2ctUYKSSScVfEBEhTyzYlddMygRUuNcZD5fdCLrU61WKvLS53I/78r5ltBND5VUSARaGpSqsK7h4LCmtmWaECKJQeKdQylBCJogJK0P7O0sWUrF9MiSKSgz6K22SKdTISpievpiCqqOIpx9vs5bXOjw0+iRIiWJh5gyxwQRA3iXssCUkJh8SHQNMSqiTTqw8mT4wJoFY/IO3UvZXd57QmxpbbgfFUDEu0ARyvTn4PDBEYUjuo6epbJERYin0Hug8ek5lLsr2HtbLKYC91bFfNGy0o5ZkRssY8UfAqvLi/Rn6wipUVIjFcS6oa0bQtMgTMD3BnhpCIsWuajR8TbDGzM+YN9Jvv0unhy/i3Kvj7SBQQi4YdJKPOLOJ82gihit6fUKjMy4UDzCk/33paIrBdOdmZ/MJkte/MOvUM+mrK+MufbENdbX19MaEqdHBSDun/reOWaTI+69dYe1YoUrmw+zubKJ9QE3TFz9FCggOtqGxweLCy3Wp0YloeGOEFoa61m/2zJ7w7H35Zt84qVPczKzPDJ9N3vtAbNblsObe4TymL0DR3+lZNtscGmzwPhI1JLQK4kmw/mKTAjcbE57d5dzmebV16dYm4Yw1mrObT7G9rmrFIcl8pZAF3CZRxjqVYZ+SG9R0qtblssIlUdWFtvUxKjx2I56REdXEmcRFijIjOBkd87s0CKcZOPcNgBHx3PsRKOUROuI0QYpA3koyOcZk+KA4CM3fuA17n7/TdqtmrX5CtInGmKMkagtRIFWGiLUTYPqBiOnz3DaHSUnszlZkdwxo1CYUPDk+hP0VhRvitcROmlFpOryCLNI9d9Oyf+BYLnfoqzHqAykBgSZKxgtV1OejMnxdQvBYTQ4WycESArERaj/ekAKh/ifJOp6GsQ1JuBsJMwjsgV2HdPRCdYH6g85mu9siLFFfqwkI1KYjEKVjHPPVMwRU8iWkvElw+9//gscHB4gCNy+8RovvPIqvdYQhGDhPDsnLZW1OLHF1rbh4iXPY+86x+bFffb25tx9dI9dtYf8r0BnaT8Mz0L7H5xu9umSb0iKny8Iwp2ZNsWuQKp/pk25VmdXpPgZjdhL+41SMhVnH/WEq/fPwG/+l9/Ma+9/jf1H9oDInIr3/PL7yQ9yZhcnvPxjf8z5X30PyiuyZSSqmhc++iWe+elnKOsCrRRKSwiCGOGND9/g5MoJz/7Se5PeNUSs9jRFy/HKISIK2qZKOtaRYven9tn+NyvMr9XUlx2X/7dzGCmompbJk3OOvyMpSDZ+bcD0QxXLd7WIRrD5C9uEQ4e3gdB6ai949G9d4+5P3ca3jmZaEYJHaZ0GM8ETrO0K1/uMmTSs6s5QKRBItJSoLCcvS6TS2NohQ01bV5RlycpoiNEZbeNwPpzVHzrTzE5mSWNlIt6dNsbiTJ4g5f3g0/TvAt3Z5SutUUpjtMbZAjYCay+tsfY7G7zmbsDfhuGnCswrGcFJmqbBB0mRa5SJtH5OYXpcCg/Rl0O0LsjykqLISSxFkdx7ZTK38jhcDDgvwHskNjFjhMBFSQgpqNZ7j3MtznqkNtz4zleYbZ/w7v/lWaIPxGhZ6Bkn+ojWH2NrSzUD70CqSFSGtXMZ0VtE0IzGF3n9J64zqQLLvYDRkfObJVvrgaN7jmYekTHprKq6QUoocoVREWcb5ouUueRDRPvA537/C0wWLcu6IoYabxvyQjPoD+n3xxgl0WaJHlqWrWWxCKz2x4z7qxwd7+JdQ4yaNgqkVmRk9EvNw1c2kBfh6m9fYfNgTL8nqU4WNApOTo6Zz5eJqoqFoPHBE2IaWKsYCQpCSNE9D52/zIXiEtGnRtPZQNM2IDU6M5jcUGqFayM2uE73DW1TYTtXRGMkRRFpKssf/cRXGN7pc/GLKWfMRc9r3/0m/iiw8VurKCe5+dG7vOfOe7j81kPMZkt2J5Hj6gQXQmqSG8/aTHbsKAlRIIVB6YDRhizLKLMSZMC52OVJCbQRLJZzYkzriXHD5//j57nyyfcyvhNRsyV1XbFcGt7+G29jx5bsXs5j/901BmtrRBTWWaq2RkVNc6lAF5p+WbCxNqZc3+a5N76P5b0lrW1pJjMWVcR+95dp2oajkxNam+r5gH+g9vl/vr4umis/cqy8Y0RZl2fFr4jybDNKAbLdBnVmhR7PqHrBBzye0HccyuX9jreALBO4Iqbw3FbQyojWkVhCm4HIOidAJD6Nl4B0U6MGMQZxWSB8muLHcL83F+IBXjOnv67A4++bJYiAj6edVzxl4qXhXXww/CI1FFkfwqDmlngFrRTHxxVN5dFKcuHhPpPjlslRi28D5cgwfX+FmJyjvDEgOA9UXHtqFWkEplCUA0NhSoKPRGGha0h8OM16Eh3FLxXY3pNE2QiCSEYeMXVk6c+nk4RThC5EvPCE0/7Sp4YDEVPgaKkJAdrWJ1QsJCv3XlHgg+1QFZBCJcG16qZdIW2Uto0ElYJbY5SEIPAuJDucEBGim6RFRfSJhy2UIASfXHWcoGkki0Wd3u/plEyA9+G+YUd3D1zwp+8MYkBJkxwjM4FznoO9JXUFRIl3XRYMrgt6Tg6RrQ/sHiyIPc1y4fA6NfX5wHauWaJbLN1hcooFyqSMiMEju/yP9DtIREjTnNPmtvWB6BWEgBSSXCiCV8QokzMZYG1q/FPNmRpFF5L5RuR0iBDOkIjTlZgaeNnRZFMDR0yokkQiyNBC37f6lxJMDxBUO5rZzZx6AvbVBVEbBv0BRb/PSow8cf3dDIsNsv4wZTVHj9QK7xxCS2QQSAX1bMkyClTrGVmPvrXLegMPXXiM0WjMYLzKYlmTaYGShn6RXI9WxwOKoux8sSTGaJ5860keiVdZ3zyP922HmqbCw1rL/vEJs8mEDMd4WHL58kV0f5QGKvLM2RlEut8yVU7kuWJjc53x2pjB6ojhyhgbTnV9XYkfJRGPj76bxjlsaGibirZJJh0hNNjWYuvA3eMDDu8c8dm3fp/xuU3WzDZCGWKzw9H+hOm64+jI0oQB83nJ8PENmnpCVhi0zrERam2RKlK1M44Xtyn6fdZXV1lObdJUqsDa+hXW9DnisiUeOPKxZmt7i/5wBaM10XusjdgQCK5zIwtpjaTnPzX5MSYDOSlAeAP7GgAAIABJREFUKlAaDJZ2cgxtTZnnjFeHAOy+e4c4Fwx3VnDRkeU5SkF9sWL68AnZpzKapuXeM3e49013MHPTFalpP4inz37sHFiFpHUOH5PDXXhwmiigadpual4gTc5i6TnX22bZn3AjvkbrWtI70um1BtxfqND/tMTeCQQJmc6pv6GF2wLzlsHoguVkhs50KvLGHt7jUc/rhJqLSFwJxL+UMuTcxyPxRTpL3wb/zR5/PkAQNFXDSYiEqPDvENQ/1Ka4gL9XYn1LYSRykBO+wUIJtm7xTUt/pNjZOUaqQAwti+kRx3u7DMYjIDBfVty8c8BkMUPEnLXxgI21ktWNi2xtGfJiQbXSsD/dZ/SrPaT22Hd72iseWpDPp/MtPB4RewLzzzRRxLMYjlBG/DcF9KcllGn0EE3EfTAg90D9W4F4G5SWuBbavxDg6v1705v02fjyJnZmOXl3cuy89K8vMbgx5ODpPa5/5EUu//oVjNVkM3DDGS989Etc+Y0rjKZDjNZoowk+NSpHD58wuzRleHfA1vPnCA6sdlS9hv21ITIIFu2UVtf4i5bdv7rP+ufGxEzQXJiTHxoyJfB1QM3va7nHnxxQX3GpuXKC0b8eMZtO0jlrBYUtufJ/PMbxDx+xPLdk77ldRp9dQSrZoQGC6LqJ6qk9POmcEOnpQQqBVAqjDYPhEJ3lKJ2GafVigRKQ54aiyPDOUdc2xcB0w1KpFNa2KJHccomxa+46h+BuuCtkZ0XUDVWVVEiR/qmURBtFXvSRSjK+vsrlf/kwt8M+9m9WmD/Q5J80OKug9VhvKPqK0kikLVlbvcDj6knGeo3M9CjKAWVZILmPjoQ24rzERoeLnrbThfcLQa4Sb2OyWLJYVjiXtPbeNTSNR+c5hw/vYU3Do//7EwmNCy3zbMqkt89Cv8Xu3X3EDIIXKA2tyBheysArNCXnLl7m9f/oFYIHW3dn7liTmwYcxFaSFZJWRKy1lEVGniuILdZGyuEIKSyVm4GK3Nu5x3RRUzUVgZpcCjKVYbQgM548y9CFwimbBlqNpr85JC966dz3abDbOMu4N0iZmnmKYwEY/d6Ayze32druM7l3ROwLbr4Z2N8LNBZc0EQvccHiA8SgUT4QjCB4gQw5W++8ypOrT4OFprFUtaVtGqTU6MJQlDmjIsc1nkW1wHrfnf2Wat6CUOSFZryiWU6XvPLD11l9e4NHnn8cEWuaesnbT98h3hNsf3ybzBlu/+Qum6+f59HPvYPD4zlhryY7GeOth6AILZybGQTJoVlLTSZyTBHJMkOeF/TzPl5YrPUcb06YXJigM0mYC4gCpQCXWpf8pKS3H9GTgFh63CJD2A4BdYJ8p6B0A0DR2JbYCGQ0xLKHKQ1lv6Cfj9l58g4ZJYNZRtO2qBOBXwRkq1kM5rz1+JtUfkn2pR5xeb/+/X+7vi6aK4Af+Fc/zKM3r4IUaaIZs1TQi4SONK3FtpbgHd45qsoxncxZLlPHWrOkvTpHDT6D9we0dUu9VGycg40LjmouOTkUHB14RmM4/zDcfVPy1T9Q+DaiKEg+lgGkpa00R88vGf5GxvCXsy6nQ9Ja1x3iCqV0B8ueTri7QhOL0gKlO81MTI1UgkLTBiBVag5D8EgRESKnbeDqO7d5/KlNVs4Fhmt9nv/ETXbfnjMscj7yn76DL3z2iC98aof5Ycujz63z+U+8jqpfZv9Wy9Gx5eqVc5y/pFE6xxKpaGisQ3uF7yhhugvtDUKfQemIiO0MLNLWLxFeYl1I1u0dXVIJRfC2y7ySKCLOa0JwyI433tQNyAV5PqTf7+O9par3wAeUVOT9nK3NTY6ODpNLUvBoGXDOE+OpeUVEeENTRVQvoSnWOVAqFca+Q3zwZ9biru3mcwZ8iEQWeBupZpqmcYkiFHyiH56K35Xuil4ACUEihSMGi48Oo4cEMccUGmsFd29OaY4XiF5L8F1jLhucNcRYI7VERomTgmh7SBlSQrpIELu1E6TKkSrvCtiOLw2cGiTH6JAqHb7OeZRqEUEhRKIYRQR1cGeHaYieuRVJoCwEdWyAiPUtwXY6OpkCr733WNuASOYizreYrMB2RidCSmJUuI4CCBIpM6LP0CI9IlEIvHVEG1DCoM2IzKwB0L6qEbc1g0yRP/owuRrjpQMV0V7zk7/8NzB98DoQbEDEiMr75GqVvhKUMjV6X/3UHzGbnzDaWuEdTz9KOSq5JC4m2m0MOB8Z5BlZntbmtEg6huFoSL8umS8a9g8W1Nbxk7/2kx0CkfLDRNC4mHSHbVMzPdjlyoUxo9VVNs9dQmQDjo6Wie6rEqoYgkOLNByIQiaapCz45m//dmKM1F5y62CKjwkZE0hETCYFiG5PExqBRgiDVkNM35OJgIqgRYZQgtHKHge7O+zfu4ed7FKurXNBb7DZ9rll38SeKwhqgo+W/XtHUD7CfDmhUJpcZNSuYZk3eBEJzjJdTDk8XvKj3/8Xuf3mEbfuHnJULVjb3Gbvt99gtVzh/KVLXLj8MKtZiTKaeePYn7XcfHvByqbBu1MHTIh0JjdBIAhoYpeXo8iMRgqYHy05ubfHxQvn2T63iVlNq/vmz32V7Oct43+cMw+CwTCjjo63v/E13vjzL/PBT3yYybSmbdNgIRA5cLNkzhRkFzeQHNtcEXGDkOh8S6gzi5P3uR9NsaRHTnQe21QU5ZCjwz2MGQKGGCPz+Yyy1IhY4O0DVBuddFh5kSMM7P69fXp/rUB/RrOUNScnJykYvoTwrKf9pZZzz11AiobGtthwf6oZcwk9iSDSLJf4XwiwCXxaMTtoUblldbSGjvePYj3QtPOKeXDUReCtn76DbATusKHZqwhLwxNPXOF4f4dhf8B4Y53N7RGFblnUgth6ltMjZpN7vP7qLt4FRqMx164+ydH0mIuXVinygiwzXHv0CgfNEbt/64j2uxxmV6F/0CMwtP+NJ74vwsgmV7LoE43oMix/rWF4rUCfJDQkrHomX6pZ/A+O4qch+yXR2fzzf6PQfPJHf4dv/E++hXW3zmf/0e/jex67N8PdAX++ghgZfqVG1SkFun04fZ6m08EFRIq88D4NJbzj5OqE3/qVT/AXn/4xskaTo8mXBYNZHxEjVTFnsXLEUXE3HXciR1pNM255+T97K6Giebwf+guEqDi1ewCI3qMCgCcTmvPVJpceeYSX8he4+8FbVE8u+cYPfGtyzW0s0TpkEHgJSiu6yQTIiCS5tp42O8Wgz8aFi8QuWVjJyM6NPXqjRAW0zrKYpny5VACL5MYnMwKB1lukN5RF0Q0tYsrN7CjwQghcsqUjBklrE/oTQ9PlaAWGw3E3qJRIClayNWbsU00czWHEi0BhUg2AdeRhyKPuPTw0fIa1jYtkWZ4MKkQaMuR52j+d8+zcOeRwWuFtYv0E5dEm5/LKBoMiR9ma2e4ud+7cw0fZZVaCtQHdWOwiICrFsm1QQROtZhi32Cg2qLcyjl4/oTeIqEKRScHxItLOW3Q0qLyHMmn4tjIoMKsZ3muaE7DM8UIiC4nOJDL2iTiM0iihmM1byqzH+559junelJe+9AJ+aHj8HY9x8+Yt9g4dQRm2xmOsAuGmuOWUYf4wXmUcHFqqRcCEnDLPE20ajRAZSnrcfE6/WINoaH3k9huHxBjZO9hleFTQ3xqwzCXn1zR37oCLoatTipRxGVODLpTEk5hGokNJJ6xjy02ihoqaRawwfYU+HS4rRSgLZN5glzNaa9FZxvbD25zs7lDNIlIZzGCFUkoymyFZJehzTG/dYLF7RLuwFHpIOVhNjrJScmtnH/fa28yDYHm8w+pBQdYaVFSU+Ri9UuBCOi+GPUNfaLLCIHRCckNQ4BuCbLn5vV/l8x/5PXRtOk0i3WBaYJqMl3/8s/cZYTLtNT53KQJgw/HC//zVsxedDhdiJ4EQHUMrPmDgkzYH+Jr/wkv/xZexecvgQ1fRN3IEjv+v6+umuWpcj0U1TFOJuKQwjtpGGhtpWs/B4TEnkznVfEJbzwi+JRMBrXWyIzeC4QuB8D7P4LzElLKjiUjGq5osCyjl6Q3h3Pk+iIqN84FrT0fe+GpGoQc0bZ3oJS5RxSC5kbQuoTytTTxQJTXaZFjrads2mUcoedZgRQRCJnt4JTOstckesgsU1iqnqW3qE4SgdYLzFx7GhYqr7zrPk+8/j40Nx0eG+XSXarkkzz27hxlX3vl+rr/wPFGd8My3v5M/FK/z4X//W9kbz3jphbtsDbYYr4zZ3T+hcgGR52mDwmJijpCaVjqCoAsWTE5Eumuykr6hQ648ZEIlIWAMaCEhgomK6CKB9PlXbZMMIVSG0dBqwFVMlksW88Ozw1V2Do9t23J3ZwdBPBOvn/4dQnhg0wCjknGFtwHfBHQm0VogfcqG8koRYsDaBikkWqXJC8GSqZJ2GZkcTACJcyHZE0tFnmuWywoldZqSeU+epbvnLEgl0Z07krUCITKCtVT1lLffOuGxp8/jFdRtS0QjDIiQprsxZGQyp+wbjquKGCyZKTAmx/lIjJoQZDLfkAKjEz3UB3Nm7y9kKiy0SK0uUqCURkmBtxVR6gSPA2A61DFx63U3WEl2850QXZxy0iPGZGdOjyEEtIS6DbgAQihCXCJ9SMOA4GnFnKgE3vQJ1uPbObkWDAYPU79YUu0WhJUMPgL9jQ361VYaOnhHIyq0lBRS0ZeSUS8jyw1CB1yI1AGEb8mE5uD1Pfbf3EMZxR/ffZWqXrAMW9Bf4XiuOzwt6ROVTDo6rRSZ1hw/mmIIdnYXZNOK22/v8MrLb/Ft3/0+CpN3TleCGDQueBCBqCR6MOKd7/sWpIq03tO0jnvTRZo6Nw3RJWdNGyTeW0Kocd5iQwAUwjYY5boBQqK4Bn+aOwNCJ2Q0+IROSilQ2kPUiG5okcJWDSoTZJmmt77Nk5evYkROkE1aI1LyVHiGg8khB80ed6d3uXvvLpyMaE8SwjQNDZPlAdvvXEHIhOo0PnL12ge4uPldfPGPP86N2W36Y8Xer3+Z9zz1ATa2HmI4WmOlr4kq8ObOlN29BSdHNdoYvPa01tJGjydSN0uG2jDqF5RFSRCespczLAy+aTnaO+Kll16kHBdcfPQ8Vx69Qn65TIcakdc++hKv/bWXgYTeA1z4F4/xDd/7/TT2lNqX9gvfd/ybz//Gnzjo0iEa+cI//AxEQX675Jmnv4svvfRJ2gv12es++eV/xb/7kT/H2mtrSCTGeJ549zVeev11Dg8P0Sbj6lPPcuuVLyFle5/6CTS/2dLE9v4JK0HUEXu8oHbTFN5qFNXPLGh/tEE0ApUV2JM62Yrb+xlB7peXuAd7CwX6bxrMP03mP0pGJvMTmrqjcwk4+d29B15wDAKe/iubfMPKt/DIDzxB7RyffP73ON6dI8/lDIc1ImqqCmxbsr65xjPPvpfzF7ZZ1HMmizdo7B55XrAx+jDzqWSx1bKzcpef+Ct/jr/+kZ9j2au5+pVtPvQr7+RX3e+lwSCe9v2B6cs1w0fLNBGOglMJ1INFiTrWrD8+5Pj3Fkgh0FIRRcqV+9rruQ98E/1bfXKf8T1/8H18/PX/E20UQpH0QXSv8zFpmlNHg84yrPM4nwLjtREptPmB7+3xqQEJqaj3HghQVGN6zQqj+jyf5RUmk0N6v6h44h+cZ9AfM2s8tz52m+Vz963YM5HQ/tNFMDaPEtUu5VSz0oxY29xiPm1wnQNjCIHpZEqWdRk4SuLxxBBRMgNS4Hp0PuVBKt19jrCxdY6iLPEhYBtLNV1Q156VzT5RaKrWIbIUr+FsotVFNOdGI2a9QbIt15oiy2mbFhuTvbt3HY1fnJqLOJqmizI5SwRIZ+/i5IS2bckyw0a5yjdV72aH15Ft5xZXKKRtWc6mPLL6LO/Yfj8XL7+X1iomswqYEQn41uNaT+s72UBMrnNRdLaF0oNX2KXlqwfHiOAwoUXUx4z6qyAUUUZQHqUk1bzhuX/0jSijCVqgjUQ7x3xec3TLcV48w7evb/DH8y+zt9yj7kvi1MJqyqELTmEWBSKC8xY/8zRLCWsClVv653vIueJwr0LIJQJJ3TSIoIhWsHnxIbZGDxGmt3EukqmSP/jSF1OkiveJZjkseezKRR5/1yNcOL9Jc3fJF176EsXqkAkty+OC5fGEomzp9wyZGeJtjZbbzKc11k+RylKMIBK58vgK44Xl7u0XGWjF3r6lqhoEJUpKMC2+cac7YmoYosS3ac0WaPov7XOnegtnU+LpcFiw3i/Q0ibJhNZo49i7c8B0ckIMip4uKXpjhuOaycEuy+MaURQMB5rv/eifYXrc0lRLrmw/xdEzY94c7cI8EhaBA3eCD4GDwwlx74Dh5piHDjbIinXUcISQJU4sUMB6r89wkNMbSE4mDVUb8I3tap/IeEWzd3tCO28Z31zn+3/8x1kZeHwrQOWY/gAzkBzfO2JeedpMIB/O8Mu3+czf+Tjb17d45jefZbi+SWNmmKDp5yOGK+ucTPeYTAtWpwNGyyIhqi5p/ZQ0XSRTh7gSIDRU5R4f+6W/ixAxGYupr3FK/VOur5vmajY5YTY9JNOK4cCAyTja22f37gFHewdEW5GVOaNcka31kfkobTS+o61FR7vcQ/Vq+mstJvdMDuH2jcBw1NFLSBST44Oa1U3BufOGfqm482qgXizOshqUMsQo2PyrQzjyEJNDXlGYjtaVJkfONRhjOlrd6dQrpkYrCrz1uFhDpGsiTimOrkNXPEVR8tCFy1y98jRkx5w7NyI2qwQbef4TX+Kpdz+MevoSR/sz7lwHLRY8duVxllsTvvL5t4kRrB/yyLXzZOUKn/7NP+Kp73gX2+fW8MFiW0WhSyb1Edalwo9YdeneKe9KiBSae3aQRCAK0CqZLXS8ba01MZAms51Gx/qIkSrRnxz4mA5WYwzOJ/vhGGXSS51SKoXAOUeWGYSS6SDsDuFkmGC6yiuhfhJBL88h03iRmi+jkzWoC4HlokXrHkZrlE4luNF9Ntf63Hh1lxuv7DAajYjR09QNyXY9I4SUOxE6rYb3nixPk5NkeS7w3iJFSimXwiFV4Iufe4VL1zaQmcK5QJTJjUvJxFtuGktwOn2GUeK9oq49Yp74vBGFVIYiy/A2EmyiWCESShhDCq6WMhXf3vvU+Is2UTw6WpTvRMNSKVzr8CEgkKcxPliXgiSDTeig0pLMZBDANoGwrCmKHja2CCQZGhEkIQwSDVNEovBEX6OlJy49mozCXqD5oqEZbGDkgGI1x/a6nJPW4lyLUlAUml6e0TOGTCuMVPgoWTbJgcw6T2Ph3DDjjS+8xQKFeOQivTwSJ2+zcfkyRTlgWtd4pZK7VkzDX0dkUS9REnp5dqb52r13wOzmPrPpEf1RJC8DL1+/y3DQZ2Wlx+pKgZaKo5lNJhuzOVXToqVgOp2zXNa0jU2UvXaJcy7ZkAePtzbZW8dkoGPbhkG/4No7ruG9583rr7OoKrQyifYTE/3kFGGkI6HFGJBCJOpiTOHeeZnz2BOPY43msKrY3dul0AYnBVlWUJZ9irJHf1iytnWZ9XMXuGonXP/1V3hx/gJPPHOBx548T29rwFdeepPFd9bYZcvRzoRM3OPv/+EvkJ1UjNsGaeCxR97F9vZDFEUPfMPJUcWdoyUHJxWlEjy+XTJrK0JT4+pIdFAoyfmNFfLSoGUazkiTU+oM17Ts3NnjjddvkBcZ45U+e0cVC7dPXq8RI3zwb/9Zto+30UJw6+YdThZLlIrkJyWuSbpX7QNXfuYaD//jR5PzV5ZjMoXSydEz03nSb2TpGTFBM3x2lcf+6x9BFSbpI0najmE9RG3ppDWRhiY4il5BUWQYNM/JJ7Fre5xUhyzjEioY/9Aavq0RJIMAnWmuXLvEmy/usFQ1pshpmhqtM7Kfk6hf0YQ2Uu8dI1xyMtW3JOI70jDjlCqOIAUMh4jczRDCEGXS4xFbiv8Vik+VuOAxucGIDCEVZT/nh37sW3DvPWTF5NhwwM7tCcvlEqkczldMpiecTGZsjxRCNHg9oxrcY1/tYouAMzUylnhVcCDuIlYFcTDFRcst7uKV59s+9yxP37rC1jM9Vj+xwmR3Sv4Lhuy6Yf6zddIpdu9FvA3FhwzVP2+oHtBcCQH+Ykg0Yh9wwSNlMth5kEZT5j2KosC0En0c+LYPfivqRoZQkqxMCIOIkSzL0gCoM5SwbUsuimTm0+kfjU46t9NLkqjtAoHWBqkDTdVSty6dyWX6uifD+7Gx4sDtsDycInoFo787Qny/YPGXO8dAl55PSJEsN956lSv2AmOzwnA8Ym1zjb29vTP9hZKSjZUVbNvSOosjJj2N1ImFIyVRpEHcKB8jlKYoewxGYwbDUdKlCIHzjpOjE3qDHnVdE2uBtZ66aqjnMzY21hiMB2S9DNssCC5ZfHvnKUxG0zTd2R5BpmI1ZUACSqYhqDJA0uvKLlje6NSg2dbh6pat/ji5AVcV/Uay2u8zmQW+9eoPcH7tcXr5OseTE4II6R6EmNgZzuK8J5Ac7FT0IB2IjOASayQGz0q/5NyKoq3mLOYNcpwcC2L0+BgIzmNtcg7GR6IO6Zlpkz5H54oejrs3brO+uc5F8zSiepPrd19m0bZEO8S7ikqcsFba1OxbhWoVohZUs4a86SNaCXWg50UyLfOq26sj3gRWtobsnOxw9/A2i2ZKnEn8es5AlrS15WBvB9331PPIrVuKSZUxzqEYXiDrbdIrG+ajFlnkBG+YzQ6pbUXez3FxTtN6VteHPHxlnQ984yU+rV7jyXde49FiG9s4RNOQr/WJ7nWW87tMZ/NksGNSPeo6jZqUmtBp5mMU6WwsM/rrfQb9krWVHpODo1QbNOkeqyYj14roRCr5MgEEhFL0ehm+rZncO0ZfXOHcuYtsr1rqacXe/pxhfhHdFhgTOb+5yUPveJRXiz/inNjiSnyMou7TjFoChhAEOljG/R7rKz1KYwBPvayItJhSk8WMYB1VPWe2SEheALRSnF8bYbKctkkDA9pIP2aUKyOOw5x5ZZF3IpOVHIFgY/tRnnjP96Aaz067gGWLkhoVS+rP3WN7vU+pS7IsByFROjXJShqEMDjfUjURrUSXg5hqRCXS0N//iZHOn3593TRX66OSc6s9IoJZZbn1xk2ODw5olgu08RTDAqkMSsaUP2IjPpBEeDESg0MGRya7Q00m7YY2iuU8YnKBySRKRarKs4JAatC5YLiqaatEexEkIwMbPfK66pqhdDhkSqesqAc+1xBC0l+RNAlprCfu63hidyiJAJyaQShiENgGtBIYZaiXlthE7txYsHfbI4SmOXaJ6qYMbaPZu91gq12kalnWC+7O07T+zp0l7UFJVRnmC8sb12dsbOZIpWhcQEtBZQWubVAy0B8qhBRooVP9LxJCcYq7ncKsguS2xllTRMoOUw/kTkmBjobOcI4Y6GgBomvSEq87adRO9U6io7d119dgsEkHBa5NiJ/KurI0JM1KxBGlTi/zARkjIkS868J4EeQmY1lZDo/m7O1O0DKjbbusqNP8CtltJCTXmvth0fJM1ydVQoCkSPodhGL3zj57d47ZurhGLzOczCYU5bDTRQUkCq1UCgsNaQJCBBUEmUzWxNHatN5CQHSJn1EEgkhuPj761Nx0ayl6n2zxhey+J52WqnM89HR6qNRQCUC5gIgCJR6w0Sem+xEjrYNoPZ5kyy9IgbFKZAkFS/AX3ifhtGaMnOeIQ8NQjtEMCRh8kLRtgsmNEZS5ojCKMs/pKYMPqdGzMuK8xyvXUc0iOMmdV/c5biIuF6jgaStPb+0cptcjCkNwySQmdk8oQiAFaAlGRgaFQg1SUba1OWDQOlZXC5RKZiaz5ZzFcknbDBgPLiKkY7lcMJvNWC4W+ODZ39/n6OCQalknM5W2AWw65GNyO8IHhNRn1ALbVhg1wodkNNI0C5bzGb1e/76ejbQZxxCTM1tH1YkyTXWTiQ80jWQ532Y4GBKDZzY7pjUaH2SnCRUUvYI8z+mVA8pemf7drHF5dJVeVVDtQD1q2X97iq09w94ql1cfYzwZUmAY9EtMTyCUYGP1HCpqQmMJ3tLWLaFuyUNDzxh6xlDkOdrkNP1A3Trq1iJQROsQmUYbg1KCZllx7+4OO3fvMptO2Fgb0dYtt77jJaqnHXrcAxHZffYWzWKKRHD02DHLpulCsEW3XwiwTVrDUiCVSPQcLZEq7RnJ9VSCTgHiUkrktyZ0UEmdTG1IouP4/nimFwWJ847FbML+43u4vuPV//BFptWUqm1wl1OOU/tdDcE7BOCFxKnAwfYx1UM1bWsJKmCtBRM7enEyIlqqRTKkEqlISZrPP7mxJf0XeGmJMnRulAGB686QRHVGJ2aBEJJQeF688Bb1+gQt9ghOcfJ4xb0nHdEHpkVOlmUcHh1Tl4p6WTEdGKorU0zeeWLGDv0WJlG+hODW9j2WRcOnnv0iTnvubR4iykjpFFvnhiz2a8SOR3wmkv33MukrAREh9sB/X8B8Rt2n0AiAiL90aioiurV+H/s529+VSlRzmdCTlT9eJRIRPYkyKey3KHJkI5Mh1GkWIaGjgEsQ6R4LkZgP5U7J1X92jULm2M5JUNzvB9E6nUFepH0q92NW1QYDNWTuDrnHXYodRX1wvyTyQZ2hcxLBel3So0duCgSK6cmMdtkQfWDj+haPfvwa48EK0+kEITQ6pmBfESNaaWJasJjMUJYFbWtR0pCbsqtnAta1VMuKpm0p84KmqrDWJvp4jGQqomXHClgGovNobbozKuKCRSqSOVF3BCPS2ZDuSkSqJCQNvnOe6+C9uvX44KnrmslkylgMIUbOXXmEjTc3GNY92gsFF7fehVbjZEWhUvixi4k2GXzKcUIlTZc4XX8+Ufi1FmiTkxvF6jAn2gXW20QxL3JyI3HWEVxL8AGTafDp3imp0kAzuqRpUgWxV6DlnGpWYbI+m/lD+CiO8iAkAAAgAElEQVRoshq9CvtH+8n+v1tD7QJMIxBaJddiIfCdJq7sG2zrUEJjNEjlcSFQz2acCMlyNkcKAc4SncUrT9lzPPZYnza3rKwEDg92uL235LFr55C9HL9UmEzQG0A1a9LPjSDRGFFidUWofHp/ShFEGlJqpen1C2I/sJx6imIDqW6neynFGX00NVJdHdA5IUcfaUPLpNfyaK9g0O+Ra0WzrIkErAvUrcU6jwngqoYQIllh6Jcl1UmDbyKZkvQKgXUe6SBYkXIujSYrFVSAh3xljdXHnqLo5wghkV4jlgrbWmLQnaugTUYQrWA2rVjKpqP3t3gCJu+qaJ+MN1y7PNP/O+/Y3z/At57gUtZpURZIEfGxISLomZxC5JjBGK0U8/kRt9++zmL3hBkZohUMdUHsjTg/2mLYGyNVllBSBD42NG1I9HcfsKFhuqgS8ye01MWi20qS07W8k9H/+xssuJ+L9bXX101ztTLsMe6XzOY1O3ePeeP6mwhqyp5mvDYg7xe0jadtatq2obU+WT+KZMYgYmCQeTSS6GXK1tAwGmvmU4tSgiyTKB1YzlPh6IPHORiMc052LU7oswld8IkbfGpAAf6s0QoPFLXB+U7InBytUsV7au/dHWpEOuEAp+HB3kUyk5GZjGrRcBgPyYpA23pae0KvKBn3NEe7M3yMLGYt1aRiMamQusUFx4I0MXvzxi4nd5IDmTbw8lfe4sL5DbLS0IqksxkNAV+R55HxoA+n71QIghD4IM6spyPx7LDMM0PojEOIKVNcK9kZWSdam4q6K3yTxWyitkGMCUUSMvHNTzfa0wYrho4m0TmPnfGESJN+Z1NWikSmIt8m3ZA0iVlwSj3LtTprjoL3iTKiBPu7M+7dnXByvGB1VWDbFtVN6WIMKCVxzifKHKkgcj5RImNnBW6MwBFAJA2dkJrD4zk3ru/QLwu2zo8weLIQUnNG2hC01ARvO6OQiBGKns5Jcn+Xfq4PKHHamqeHXIYO5Qj37UiJoEmIoJaJpik7dFGEVKyrmCaQAsGpgquIyelRK52Ez84hPAlBIiFT0ocUgh08gdToSWE6N8RTRFMT5zkmriFnBeJIMFxbxyvNsu7WbMdBLkvNSj+nNCblhnnJSdVSe08QKdrSB59iC7xAOXj7xgFubQRK0C4qKusoRxsE6wg+IqJEhmT2kZr6FBmQISm1YmNYUq0kmP7yhVVif0zdWJo6cjxdIJVnMZsjgsf7i3jfMp/NqJYzrG0wSrK/8zYnh0e0TQtC4oNDG3Fm3XxqZSslIDvE14E0ktD9JTvKisoE3geEj5zOIYJLGjehwWiFVMkNyduAJ2LbhsX0hH6vIC80Ra4S7VZKmmWVmkCnmdhADBmm6DPe3OCRK5d4tPcUoa5ZvDVnsTqntANkVGwMtnnq/LNktzIuP7RODOm5lEriZJaKKxeI3mFdy7AQ9I1Ca4kykkGeo3WByx3LqsbVS2bzBiE9oVdCCYjI9PCYW2/fYnJ8jIieXllwb/ceb37wRY7/nUOGB2kivfv+mxw0sntG0mgl6T3OML20j4ZuCNXpWekGM6e8+WTV3A12tCIbZmhtkruoS0yDtkm0Rq10ym7qthVipC4rQuHY/fCbFERM7FEXhhM9Y/TnTXI6jV18gdbMwoQiCDKvcdahYqJX3TdYgka0Z+/lbA87K+5Pn/E0iIvCJ7pYTBqJ4OP9rxMSXUKkBSlwyvGl7Pp9y2SREJLFtCEEycQnJ9ler2AGLJcNMwmz4byjtktOfykh0p9jjCzLmtZY3njkFl4GdrYO2Nk6oKbhey68h1svHROiR92J5D+rus8/EtbBf0PA/pee/rUCcdzpjWX6/80P+PuaBqmSMdXpGdhd0qfPI93I5NKW9mOVQl2BXlnS2tSgqs6b++SJE/RLhmLW6xrYgCI5OZZ3ezz5s0+h1w2Ns90kPzUYAplYEgKsTPbpR5cmDJeXuTRbo3ZbNGpBzGb8X9S9Waxl2Xnf91vTns5wx6pbXVOzurrZZLNJkTQHkZIoyaEsK7HsJIgTJzYQG04UBY715Cc7gDI4CZLAMRDAgDO8WEBsWTFii5bDwKSUaKDIkCLFFpvsgV3dVd01153OsM/ee415WPveYvKgvMqncdGo7rpV5+6z91rr+77///ffmKfhoC7wtJEqYfulmvK2QSZJsJ71YjOuw7Dz3T1e/KWXCNohZYuRGi3AewchoI3Jf4iUVEWJ0Ya+c7jB0W8sSTikhK7b0LUbYgrYvs/TFu+I0TOpC6q6oShllvf1+Wc0psxy9JRtDEqJp5dbxLGx9oNApAzYCf4pDj5En+l8PlsdVqsl69EmdunKDS5fvkb5rqI6uIySMzadxQdPXWegRoyJ6D3Re5RUmZMqxwgPchSFUjnLsq4M00pTmcTJuqXbdNjBI6tAqeUoZ8wWiqoqcMmSgkCIrIgRCQplMIVGqpJKK+7eeYIpJ+zWB+w3+6RgiZc6bg23OVyskKN0M7QgrECVInu30tjIMBKjJc4HtFBMpoqyjGw2HXiQPiJjVvBIIcEDRc/2BcX7b17kyK2ZacXjV55w794hs90JzxwUeJdjN5TSLA97NANSZPhFKSusKZHkIFznPEeLYyDRDwMbOyBKWIpAWgrskM9HZzCXEML5EpmlnRHJOHwgclSsEERwga63tOs1psxFtHWOmBJKG4ZuGCNyIskGTh6coGSGdJWlxhhJsp718SbvdNGjjMJ2m+yXL2dUW9dYHt7PTXGX6DaO6DuKYopW+SwdhKBNicXCnzfYtc4bpLEhN9pIyKjBWmRSSKHwznP/3mNC3yExVHXDbHuLGCJOOEplaMqSybTEzPdQyhBdi23vs1icsDhMYAuSnrO/Jbj8oQ+AULgQsT5HpbgIi4Wl7wLOJ0JyLNs1QigUnjA7y78bG+XvlZR/e+tfjOLqvaM1h6++y91bd7n31vd5/uUr7OxdJknNyWrD4/cOicJmQIDMhxqlJME9vcGkAEVGlOeOosBUPZfmegz1TUgdKEoyBTLqUbbgzrN+Qop4axEYlEojGjRPL9q2RZsid62dy54KkTsOZzkdenxf3ntS9HkikeLIYVDZ0xIkMVo++ZmbNE3NN752n529ihdfPiBESz9YZtMpbb/hD75+B9tHJpOKO7cOUVJxcHnO7nbJ/kzybeD+re9x/IaiMIlnbxi+/a1vEA8b5vMSUQnW3Qkf/ukfodqaEKUcN42RTDdutnlzlGP3YwwIhvPONjGNHpFsNJdnE8MQCUWeugjhQcZRVy2JSETKG1wOuEucZVOcXa9ctKXzyda5JDFJlBBoUVBoAzEH5BVKoAuN9Bl/HJ1jNq/RBsQopUE6tre2+Odfu8+jN4+J6w5bj10VJCQ5Qkkk1vaj/yh3lNwgUCpPKKTMj0eMHZBzS6SUzOZbfOO3XiP1jk995kU++OINHtx7kIv1JHEOvMsyPJJHpkSjJuxWE7SQhDLLJ3xyBEapEBqFIUYPGtSYI5FCnpSeeXgIEJUhhIAeDyKERKNzmrYQgkrl0fi0muKtH08+GXevVL7uSQZKA0VZomVBQo2d05bBLrJOWxoKM2VvfpXHX2yRTcnW7haT6zuctIHDo9y60iZRNXnh36pLdiYTSqXxybPsPU5ZIKGipC41JytFOU7Flq3DHexl+WiIRA9agx8GVBo7n2c5Z0qfF++F1mgiu3tzdnZnqOmogU6JppoxDEueHN5ntfbs7O9R6ZbKKLSJnJ5G1usOOzhihE3fc3h0nMlcTYGPEZGKUUYc0VJTTSe5YwXE4HHOEV3OCbI+S02LsmG3mXB6fEL0+T2f+V7VeBhM0aOLQEgC7yPBcx6qfXpyyHw2ZWt3h4sXLvLOe/e5dG2fOVu4rmdru0GIgfak5fRozbvfu8+7r7/Ktede4MrVZ9maX0XenvKn/8znuL37HruP9vmA+TA8nyMKqio3HSCythZvE0IWCG2opKCWIEtJjAE7eLq243t/8DrL0zZj8n2H0gNb011WqsRHaNueYehwrsOUBdrUmcLWLQjBcu07z/In/7s/xd/55b/Nv/TXf5b5W3N8dNjgCV5hvcvXRDhUimAmtOsW23VEn0332ZemMFJiJNR1PqhO6pr9gy1e+BPX2b5whUePDlkcHxP7DW+9+RpXrl5mf2+f+WRK8AFRZgrZVz76Tb74E/8Xf/O//sv0YcUSy3c+eIdf/vd+nf/wP/sZuo2lcy1ROvYOLrJePcK2isXxmicPH49F+5KNtXTes3EBJ6Bz4SnEQSoociErlUapAhdWuUGiK6SQxNBS1xXrpSWSUEah5ZSPfGwPqVuKUrO1vc/7bj6DaQw2DtmPMJnylS/eYnla54gJ1fPpT7+IEZLf+9prlBPNZ37qJarGk2KNH8lkVVWTUi48vvrx7/HtD93i5//Bv8x/83P/iD/+258kysiXPvc13n/zfXz1128Rk8doQQpPEez233L0f9Od1cF5D0mZinkWJpM7wQlrPY3+AULE+CpPJdqOUsExzG4yaajqht6ME/CyxCufyYCVgAhf+odf5sf+6k9y41efI5KvdZA+F+Ie7OA4PV2cdUPzHicTMkqcdSgpMNO8Zv7mf/FFPvY/fJof+vufZDI/4I+pn+KWfQ2rX+cxRwD0w5oQ8s8aq8g3v/QtPvev/AT19wsQntmsojAaozSnp6e8fusNpmWDMPIcWlFXhunWBfrOYrue4DxB2BxU7wJDa1mdtkxncwSJEBw+9EQsJ4enNPMZs60tJtM5VVERRl8yKVEaQ0JmyXfK0rC+yxOB6C0kTxI50iZ0WfmQAB883vvR85cL3BD9iGxPebpUShZh/N43HfbNQEoG+2DF8cM3UEWBqkqSBO82aCWIPsfFiKhIIaGMRBUKXRVszy+xN9E0kwKpNX3vOL7/hOXJI/ohE/lWxyc8wuO8xxQFO7u7lFsZ2OFjngIG35NS4sRvSDIgTWJaTLny7BU82YdthEGVmhsXrvNsvMed8i3efHQLUkKnktgpui4wq1o2XaAoahCSwUb2Ls6wfeLCMzs004Lbd+7z6Z/4aWbzxO//3jd47Y3XmO89g2bK9ecqnn3ecPmqgsOC/UnFYq3o7MC9hye8fPMFdOOwrmPdtXRdz3DyhORslt0bSzlMsbKjrgqaeYOa5r1g4yyPFy1dhFTu8OjtxwydpK52CGFBlHnKmVVaOZYgBI8k+3NFpehWD3jvre9RyQkkTQgCFza4lKiKkvl0RjMvkMZw2D7h8YMHvPP9N5lN5pRNxdbOLlXd4FKkPV4R3UAKuQgTEUyhCM4z3Dnm+Fe/i3UD8a8FgofgBUIphujxMVtgfAgoHCF6jCnQpkQnkyV3lhwLUBp0bZhMt7CbNYVRiCQw3nDt/QcIoSBmC0q/GShSJKYBOx3w10oOm+t4WfD5z/0kv/jDv8hCGn7pr/xdTu4fMt2a8eyLN1mtE5u+Z71pWSxbnhy27B/MeXJ4MipwFArwyVCUEikifhxkJDpIBpE0hPIPrWn+yBRXX//qd9j71g6zMvLjP/0RTtaRO/ePadsVxJ5i0iB0xkwGF8F61DgCFaPUIcUcdlY3BYMPvPe2Z72E/Yue4AztSnFy7Ln+guLqdgDtsSHQdwVlsQPR4n2m59R1RUxdhjkIEEFRliXWWkgxy76AJHN3M09r4ujTyZW5MQVCCPqhRYncNXY2sl71fOxTz/PSyy8yDBI7LHn0eOBzV6+we7EhEuh6j6kO+OQf/yj92nP31imH7W9T1wWf/NSHeeEDB+hLx7wib/Ozf+Hj7B/NiSTqqmBd/B77kxnXnrvI1Q8dcLpYglUsh0jw6XxB1VoTQs7NIEF0HqVkzmoRkigDLuUgYFJiCD1SlPTOnsMmykqyti0RD8IhUkBHgSkVhcsSjxBiNiuP1CJjTJZy1HXGlFqL9y4vFCl3bUWKSKlZr9ckU+ZiLQgwCaMjxkj0pKSWl2g7i/SSzcqzXq1ZrU+YT1bcvnNE5wLNrGZo8wFYVBVqlPqRBKWpRtldvg+VKvGugxQIMhD9WBDHQCCSZESpEllIXnn1Le4/OuRHPvsZLl6YcuHynCAifjHQ4zBK4WREK0iFpA09yfo8apYSj8fGIQfTEXJhSm4eKBIiRfz4xpQ2Wc4nBMmR81HGzyWEgD2T/pDzJxKJdcjNAIgjWTFlqjq5A1cYQ4qBbrMh+p7g84G7bGaYt0v8SrFsI2/e/S772zvsbG/TO8HR3UNcgElRZFmrUTD6IrwznKzyJojwOaDa50JRSM87764wpuLij3+QVBre/JVv5NF+yMbvRPZbyJQz3OKZ7SEqUvBoAYKAdQGnSh4tPV1oWW9liayPgpPTDSeLDb1N2OC4d/ceVSFp9rYRQnL0ZMGmb/PBPiXazSpfn5hy8Zpnt0DK5CUJsjJQFvm5t5aw2iDWHUXSyKRxITCEQFGWFNMGu+kJw0iNEwJTlxhjUFrQuzUx5A5fDBleIrRk021YbdaUsynlZIvp1pIQEmXdUE12sD6yM9ljvi25dCPxPue49/a7PLz7Fo8f3mVrd58bz7/A5psQP08+/aoMO4lCc9IFBu8zFMAKCNAPS3o75EBp55C4vIE6jxI9WyZw8UqD0lsgM2Hx7sMVXbcmRosQgSg8uszNiRQC3YmlUDOUNCxP13z3228BcOfd96jemWUJWMzZPiEEJrMp84OLTKsJmgG13TDdLXjmowc084aj31+grMq5T1qi0ajKII3O8IIHgcNX72FC5GIsiF4QVlfRb2h6ucByiqLApxyR0Tct8UcSj77rwDRoNafZX0ESNHeeYUeUpLHDH5/AbnEzT/MmCvl+OY7N8yEoxUw59SGHjPd2oLeWdduy3KxZbdbc39znneE2z738PspZze137pNC5Md+9NMcPX6EtxKUxKfIu+8d8Sd/8rNsXygwRaaXrlY94XzSA2KI/Nl/7cf4wj/5Ovu7M557/oP8yGef48tffI0waKpmh8vqBns7CWezXDqOsRJnAKE3ywcYobleXEYi+bU/8dsgoHIF779xne3tmpOFpf9ooP2lgfmHBNj8TJy9zlQI8DQIF7IMWmqdfbEx+1d+8HVw8SJGF3jvcIOlmUwpqmpUdawAWJys0LYkJtC3av715/8sv/Z7X8D2PZv1GilhMmv4whe+wI3//QZ/6t/9GcRUoAuDUhopBOsra375136FP/9Tf47mqIYkGIpc7P0bv/DvMPnuNkMf6NslSgh2quvs+QWQ79fuqMf3gYOvXOYT/8ln+Wf//B+RUkk/KKzzoAe+/q1/ys1feJGd39mjnBiMgo0dCD4RfS4eexvp+y6fCxJop2nqhqIsMVWW/XrfY4cud/LHzMuyqNjduUjZNCA163X2ccUUIYQRjDFkiXX0QMiZUipTSaXK00rvHdZtEEFiTMFkUlNWFe1mfU4SrEyDHXIjwwhJiWThOhKwOT7h9L4+E66iioQcQDmVD8imQKsiX/t5gTECIf25h0WJgk14l5NHmoXKMCVJZDIvqYs9Ht59yMnRYy5fv0hRNtlrLDWkguPD06cNYATG5EmoUhIpNKTEqlvx5NSipUGPsuAQAo/fPaUpI82s5MXrl/iGEDgNZS0ppSAFz6YV2KGnKDWTScPu3jZPHh1z/+5jzrzbr/7+Kzx58pijk4eoSclydcjqySEf+MgPMZ/NaNslMkpULdjZj+xs97zz1mPefPcqu9tTtCjpN4I7t7+HFBklLonYw8coZfB4elexag9YtQck/jFvvz3w4BsL+lXgY+Vlnt2+yI2XXubB5jG/8ztfwnE63isFImRwikhnPnKJEBVVUVAZjQwWHzpigtmkoJlsgQIXHO/cuUchVggc+9s1W7Mps60dhEp0fU+3WDC0A9tbNSE6VKUQSrBYLIhWk6JH+R6zecLJyhNDRJUaXZUk71GiwGiZB+ijT7cwGhklKQqigHIm2J1N6XtL12/ol57ThQM9sOkHlJLs7UwJ7YZusWbYeLwFUw4MCJ65cIELly7SvHiTgys3+fW65kHf8xvfe5t/+jf+N1R7wsUrz+L0hK995y4pKiIdmzY3Dy5enrJcrFGiYNIYtIr0y4HKCGazKTIlTjg7H+afP41UzT/s9UemuJqVnoMLJZN5yTv3DlkuemKyCOlJEkKfAynPxlRJCWwIOJvNtqUqmJYSJTKeNcnI7kXB9q6kmQi8hWYa2bkIzVZESoMgIYSnXYEdMplGKo21PhOJRvOKHIP+zvJrzkaaUmbZgPMeIcgGwxgzmSymEXvt0Tr7NELIcpcrl6/x8sc/zp27C+7dfYjZWmFFx2/+9tfYuzBjMm/wQbC1W7K1M+fx3RVvfPsez79wgbKouHv7IevVio/89AEkODqJlEuoak0yJZ/67Af45pfeBX/K5We3aXRBHwKisigCCpUDTeWIipYJKTSm1pCylC2mhChKOAssTFl37qKjrAtEihAdNgZMWYyTngIpIzI0VGVBdLnj6H3AaIWLLh+ex004j4XHUN0QMnwhhhH9nnDREcWaekujjcIHSec8i/XAsB6wXcR1gsXxEj9ourZn6C3egTGJxekGoTzVROPt2aHAZ2y8HDutKpLjW8ZYxuhQ+sxSFjGFwTmfZY8igsp0v9rkzK1Fa/mtr/wBTSP40EdvsL2/hTAFRVXiY0bkChlp3QZhe5TIOunocwcxEUZdckSkLGlKPk8PxXjIzxRK/zRwKWWPRj/k+zHfax6pcvHVulXeFPsVWprxEJTlgUKWDN2Ac4EUQQpBU20hHs8p2oJZPaH2U9S0wZuInTou7FyiqguKoiaJ/FmENOaojJ+nj3mhcbYH16PTmPckckhiLvsi13YbDn78o9x954h7bz3EhwGZwuiVy8UlfpSVPrUAZjlCEkSRZUQEjwy5AA224/QwF1eLzUC/sKyWKwaXIwM27Zr5ZJfpdEKIsF4vGJw790Fu1gtEDNnLM0pC8xQ1S5yChe54CUqNmvkxyJqzoOc4Sm4CLnpUoVBWEhwkH0BLhr6HFFGiQGEYhgGj1LjJeZRWOGvZbFq6rmMymzMpG1BjlEOMlKVh7UH6hEiZcLl37Qp1U3P6+JjV0SGvtxs2p0u6dkM3BB4d93gbR8msGO9JT3C58Hs6wSZTOEePV5IZ12/ljPXGYX2LswND39O1/RginWW7OiUEioggIBlsT9AFSYwkzzP7aRD5a/SgRQKg6TaOJ/cfEwrD/KrCUJKWmqNXFizLnuHEoZD4MqLmUyyJtOqRKvu+Eh6pS7QRKCEgRK6+sEvZFEgRxmIx4V3Cu8An7mxz7X/8AOXuLt5aCIIUDSTw0ZB8HGW5WSIcIvg4ZI8cmZL6A0JGzqR2MQkEFbqomes5O7v5vrgZXuDj/pNs789Y9y0X62cotjU/+ukf4u6D91guPRtraYeBcnKZ1UIwn+8hkIS+5WIxJQiHUIoQJUOfeGZ3h0lVI1XD/uwiNVMEBqREG0nVFITBEYZ8T0uyZDLJrOjwLn/mNir+8pf+NL/2ia/w9jP36MTA3/mZf8LD9x/jnEc8StQ/L4j+B6Hk+bX+n/qnBZfIos14KTH8hYD7XAQvkH+xPG9VnL2+/Le+xEt/60Nc+OoFIOVD1cYjhWTY5Am0VCBS9h/FdWQyqREJXv+F17jzb95GkPHmi5uLLFXvK1CJGPOE+O7n3+O7P/cd/NTRWU/qciPFFfnP/8rP/Saq1aOaIr93qRLtpdX5+/yhlz/BG7uv0XcDp3fy8erV//SbyFUOs0fAsNsTN4G4DFgJQ8p+2ZgypEgrSb9ZobSmaWZUVY0x2dsTYpZIxzhO/7Q6z30sTEO1PcU5R3dygg9ZoxiJpDDKOGKeDsUweg+lBAJ911OUFUmIvMYJwXS+RfblSIRWDDbgQz7xSqFIydA0DUIqhhhovUNXBgFcvfYsNz58Mz9fpcySPKEwyqBNQRQJMaLmY0xZaUGe8OdpVmB54ogiS8WbqqAoS5IS9KtD6mnNM89eYba3z9ANIPPuFyKgNTEMDP1ADDBppiTh8znFR6IL9HaDS5l6q2JC+ojRFeW0YbCWfhOQqR7f3wBBYlKBD4JGbxHoiS7St567b69wziNVoqokW9OK925/m81K4/1AJUCGAVUa6mZOXR8QxZQd4TFSszuvuXLxiDduP+bBe9/l0V2Z5W1Jsr01Y3V8QvKBJBVKTCibfK0W7Qknr/0B379TEv5q4Lp5H9e3nkNuaa5OLrI9rTnddBweCyaTCZthhXPi6dozkn6TUESZbRQubqhmNToZwqiSqLSmXWXJsdSSqhDUxZxaSbxzeB+xNlKUWeGTkCSp6XwixIRwCeEFIRiqssgScwKbZM+pmaWp2JrOOBtauyHhgyMmj0yafkgE21JXhv29ObvbNaFdM60FGwS3757Q9YJ6qxw9zgmtBzargU3b4X0+m6pJxeX5nJc+8xFu/vBLXPr4DZrZLn9vVvLod+/w+j/+XVK3ZvvaNZZeY1ubZcIq0rUWIaHQhpQi6/VAVRbUdUFdafanJX0vMCYRBksao3uSyATyfFr5w4OE/8gUV9tbFZOpIRBZr1qcswgVR8m4IHhPPt7H8UCjiB6aJieBJz9ueSZ3tX2CGCUkQfRZclYUkULC0EHfJYoi35CSCuc9RZGBAUIovM9d9zM5xNMu3VP9uJSjb4jRXxUCKofb5MX6DKs9mm4ZbQRNM6OxFzhdKOrO89yORhAQTwL9JpHmDj/zxLhhcbrhyb0Vp4fH7OxtYeqC5XLFycJx92428R8dnpIeR4xRlEoiTcHgBk6OB9699RDRFAwMMOYgVFqN+T2jCi9BCJ6qrMcsrvyQSDfKDc6IaSHhURgTYQSITOoiX6uU/WpCZqPoWTZOzvvJ0pIzzT/kDTBnPD3VhwtAncEuhCApMu6+j9jOs9kEThcbTo5b7Nrh+4Czgb7tCDYbKH0IpKQZpB2LN4mQoEfsfQY1ZAJNSjJnmQCU7VMAACAASURBVKQss8j/X4/d3bwpaSkwpsHHgYhHyjy1VLHIQBUvWLdLuiFy65Zh+3TBZGvGbOsmG+tQUpOCY7lYs1icUBTZIxAjxBTQUiIYPQ3n3q+n3g0pZLagEcaKD+T43s7esxDkjtFYXPXDACnRbhwCl4mCUqKlp207RJDI3qC6kqZumLCP8BUqlhS+RvUlUhlEFdFlYKrm2SA9pqUXQvA0USi/kjzzdYz5ZwKSkNkAOx68CqPYf/kZTjvL8sEx/eMTjEiZkpWy4Tml9AOBxk89LdnHOMICRltaioEhdbgkaLucf/TgaEFceNwwnF9HSaKpS+q6ypvt0BNC7vKmlOjaDeMJKzv2x6aKyIaNXLwOOecuqbE4GCE3+ZaO5/fQGZ1aKYkZiY4+5rXJW8sQE4ydrzM0fhZS5ebLMAwMQ8dsPqfUBZsQzosQT6KLFp0EWmS/UdVUGKGYFA2b0yVPDo+49+47DH3HYAdWq+UooxiwNmF9ZPCOFFyezozbhEygxBlgw5LSMK50jsH1WGcJfizKQswQnPFn1VoyDCnLtMZJqhT5gBRJuLOxcIrI5Mcp6oiCFoLgHb1zDIXh6ImnTRN0KuGhREmTYS7jxHsym+Xv9y7fZzoXUFLmmAIhBVIIqmZKE+sc0yEkKimkd8QhMLlTcPXtCwyqZ1isSD6xWS5JwPrklGQjPkR8yp7eIHWmv6VATAkXMljlrLjKXqj8KykVWmmKosSMUDuBwqRt/EohbWTHXKIpS+RpQ9Pt8u7hbY67NV3wpKD5/W+/yfGp4/LFPS7t1mzNp7i0QQpFCIKN9szKhkJplDZMqglNMaE0BlIOT6/rgrKQOVojaQQ+ex1TJDJCbhAU0vDRezdZ7i6pMbx58S774YAT13Fyeoq7NVD+hsHFvI/J1yXmf/n/Sv3ydQcofnX8qBGkH4iCEa+D+pbEyAIjDGIzqk5kzszLBYI6zx80piBJR4wO5zzGzLnxq8/hpqM8Z/RpXfvCdaavzrE2e0TE6OENy0jxdsn1W+/DnUYYUffejW/qERSdQamc+WS0ylK7w5LZrW3e/fzbXJte4755j03X4daBC3//IO/5KTd4hJQc/MpliofluUc3L1Vjrp3M4J2UBGXVUFY1pigAgXM2fx4xjaCTp7AOJTIBVoaADx7nLN45tNJAhlikGLLcPqrsAU9ndNowWhlC/u8injc+zjzK+IAPkeBHoAUOLy2hKMb9P59p6liND3gF5TTHoBQKJYrs4xIqk3JlLoZDDITgsTaidYFUhpQciZ6t6Q5+8FgfWG8GtFcUhaDfdAQfUNIQQn57LuZJaxbUCKLvcIMjxkQvJBGPSNm3G30guGH0hJxBOwJCBgSWEBzBReSqyD2AsVHlkichcv7jCIXwLrC0Lbpg9MtFnPX0wwYfchMHUs4pLRRPniyo3ilAOSoN4VBT64q9/QPet9/xZPEIZWTm8KaSrUmWkYaYm/WorNQxqiD4Dc4u2LT5zLk7ucjB5Ep+72RQT3u44vTRE7p+lWmMkfP3ThJP16KUG9VS1NTNFIJCukhZSkwKPHl8SkSgS4OSgTQYSp0Lbx99VnVYQWcHeuvwNuKCHZuf49l2XAfT+O+2H1A2TxKHdo1dr5lc2Mf2S7rlKkOApKKebme5cCmZTTWTiUYLQQpZ4hidZb3uiKIgLQZs15FSYnCWbjNgfSQKiZaKQgg+/MHneP5jL3L1Iy9w+cYFbOvRUnD64IS7r95jMp/hVcmm9/iYKApFTJ7gI1VlKIykazMteD4tmc6qXDR6RwiO0kAsS5pmms+tYyMRoRDyX5Diar67hdCaxdEJRkE5L+isx7uYddkiEgjIs+IqaiSaSwdbeO84PVkSkidVgcEnulZweigINtJMBKZISJVwDg4PBUIEZlsCUkldb+HDMcorhBbntMAU1HnXOBtF1XiYHalwMi92UubDsrWWUqpxUmPyIVwpYgg53V6Mp2MnWb4aOSivcXn7OqQB71vAEQaHOx0Ydpa49pQnJ6d0m4GtnYJlu6JSBdMLhpQCr715mwScnDxi9d6CYUjEtqeeHyBriw2WV7/zDkwrqHt0qil1xXyiqCfVUxpdgraz1E3etKQUaCURrqMsFHYY6LqeTW8RusoPcPSURvDslUsslos8+dKKotAkq9lsNtjBZyx10kQbEHqcQaQ8lbFDPxY5ESESSuQsp5xWmzfidgHR9XSbyOK45/jxgsWipUjZMCxNoqg0RgiayoAwRCHxIhFjeb64p0Jl/HXMha5WGfee/Ux+lHTq3ME+C/MlEgNUxQwbNC72pGjRShNtnQueGqTOHeOjx084PnrCfHuHT3zyg1gfadQE71oOHx5zdHqXuq45y55SCiojs0ZdeJJ0KJXNqsTsD1Mqgw2S8KNxPFPU8qKaKYZKq5HImGWdmzb7EE9XnuQdhTGYwiBCYHPcsbd9kanbZ2u1x47eQ6ynUEpSKfLeG8l4djGSIdGklHHYZ2CHlFIGO4gxE67K5VZRlJgx0DfEvBCJmItEUxdc+omX+J3//IuIvmfeSDadYOgTUfh8yDir2hJIsh8vxTOk9egXSyI3V0TOlxJC4kLu+N55cESzVpQKSqOwQ6CpCmbThrIyLNuWwbk81peSFGCz6YGcaZFIuVAcD0/5ic2mb5kSaWzUIMg/ux7fbMxUQD2CQ5RUKGNwCdyQfX3eedxgMZXJnpuxMXO2MQoypXAYjcJKSfzQ52toFEPncclilMgkxmaS31/ZcOP5K5Qp8cp3v8P333g9S4GGltXyITuzLUiOje/ZdJa+c6iYZW825OlTkaBfL2lDJlYKQfZL+iFjlEeJj5ESIbL3UklJpQ2qErTDwGBdpuTJCEGPJvqIC7nwlXHIhb1IJKmRPhGkRYqIEZGqMNy9c0wQC5B5fKwROaogZYmT0gXBO0h+HJjkjTaF3E0UMqGNopnOaCYTmmrKpJowrSsmm444bLDeMcSANIb+8IgYI6fPPiLFyJP37uJ8og+ePnm8jAwxT/jTWDCGkH2MZ89gnrYLrHdjqLikaaZ4P04whETJgrJSTJuaSTWBNbz+5buImPjOu2/w2J4SlKCaFNy/e58bNw75xMc+zLOf+xhVVSCCQ/qM1456kzfumMbpCJQmgwKUCkgZKbRhOtUEIxAxnB++nXNEBGVh8qRnayBGz4/eeYkSzZ3dx/zb3/lZ/sE//Ge8/uabnD52KF3ghhy/oX9doL/8Axg+8rYm9fhsRs9Z4ZlL0NGE/39Kqv/YMCnnfEp8GpPKLFEeZYVKK6RIuakFlKbEyiyZVQaS1HzyFz+Vn8+YMxhjdAgghEQXOkJMmFJBiuz9xg67v/7JnLMUI07kZlscm2vP/dL7uXr7Ck2YsFo7plWVg+NDZHmw4t3Pv83spMYMOQ7DuJKb/9ELee2PORtSaYOWGhc8SQZEigitkEpRl824lkcKU1A1VQYHhUDXtnRtB+P9lJtIEWQGb2idw7iHvkMogRK5iVMYRULhEcQkifJsap7OfXVCBYQoxyIyrypSCFzoRhpuzD5wcvPIOZ/v55ThRd5ZlBSUUjP1DQLBk8UG83hJbRQqCpLQkM4iaiRVVWLMGE8RPMFGtuYz6sYQk2foe57Z32PYHPPO/QUPjzsQNZd2CvxmhR0cziusj4ggsHaDj54oshVAJsdZE2rjLSllut4ZZFTLhBRZoqikRONBmUwBFCpPBOXYVJWGSGITOwqlULHNjcOMgiAkS6FqQky0ncPaFlNuEWJLcA4fyI1iFG+++Q4PHr2HLhLb8wmnveKDH3iRG89c5qPPJX7rW0+Yb5kMoerB9T1lU2F9vg9i6gl9AShmE8mOEfSd5jagi5ogDMt2Se9aovc8eHifew9u83i4R10WuTiOjBIPQUoeRumpEprS7KP1BBcCEU9pCoRdsNmc4kJEdZpoe0RSbG/PKWuN0IG2P8F3gc5arA8jhTifBVLM0Ixq2jC0GYAibWC96NgZthBJsD5+wumjh2xt7bNZLjh5eBfvwTRz9HQXbwN7l6Zsb1cIKTg+XmO8xVrHyWpg1UfKCpYP79GeHhNiYrV2bHpLkipLRoOkiYqPffT9TG9cQdUNYeG4c+sew8sW3yWO145nn7/KvScLkkzZsuMT1jmIYLRAq8TxYc/Wds2FvRmTJt9Dq0UkxgGtSuSOor6xA/m0jxQGgUL+/2Rd/ZEprgab2ClnXLw8pes7fO/ResjjxDjKpXSmixiTvwAm2xdxLhGZkDZ3MKEiJU/Qkqpq6NIKqRwZXiA4XcHiNLG3L5nWGpkMfZCIoBBaozToJuF6RUp+7F5LiqLkBwEWYvRQ5c567mIrJc88vTl407pRXpYPwzEBQXDxZJ+H7iFKnWJMTVXXIAqKaou6KpnXBeVCEbzlit3gr1n653uO3nmdB3dfJ1ZThC7ofT60RJ8QokeXgWgbjtsl21sTSlWCXTOsNkhXkApP79bYNqJ1SRoGbG/prcWKAC2s27wd1mXm/21t14gi/5zaJvougxS8TDgid79/DxkrSpEXQJ8MxCnzaUDMss8mxohuA6ZSWCQuCuZl/n0htFmLaypIngGB8gGVItJIXvnma8SFRRYGM21oykk2EouITrmD1ZcaG1uKwjKbGC7sXeHRvSOGfgwj1gWqLChVhlV4H+k3nt5HfCxpakGhoN+AkjWkDX2M9CGCH/CpxysFIiKJpGihysBniBmxi2drW4M06KqhdwLtbA7i9I7FcsXJ4ZrObEbvg0CojsGCEHqUhgSquhwhGzpn6ZgC7yNNVeSJmgygegRbY0Bk/go+MwKVEiwurYgx8s7b99Ao/ADJababHT43/WGu7d6g3pvBBUXvLNGHsUgS1EZnupGpc0YKnMOU0w98xfG+HlvI6DE4szKSicnLih+xxwaFGGmK33vzhIMLc/wSunbD0tu8CcYcEyeIhDRe2ZR58+fz4jR2h1MmX0EO9BIiYWP+uzZHKypZo4uCopAs1pbd6ZymmSKQHB0djZLA8TDgBoLtQaXc1UxPh1iJ7EXMjcGsD88dwRykGpNEqoKU8gGmLisaXTEoQxvXdEOP9xatZAa6iDyRddYy257lCW2MeC+w3qMLxdAPrBYrwoEjiMDB/gUmW1tUswmrdUulCrpgQWvqyZwUIuvDU05tRGmN2H+WC9Zxq7rD0aOHvPHKt7j50vvZmezR70Vc5zD3NnT9hloq5vOS3Qs7vPDcC7z+zd/ltbvHrDoHiNwsmZSQirOWfF7cRG4YpRRJtWIYPMF1EDw6gYgJUo/AI5OjCllqFUOXM0oESAyexNB37OgZz21f5+Ufe4nvvn2Lh48e0/c9VV0TEXStR0tJUShUqWiXS4KLowRJkkxNVBLvLSk4fHR0wzGHm2PiEIg2EEVgK0UumJpGZ8R8vT3L6GOV8iQfeEiPE4lUSdAVSVXE0KPEKF9Onu1qB0qN1CYX4CKxd/ECfW9ZnZ6yOj0lBIEWEj8sEcmjVGJw0B4dE3pH8gmjGw4OnuHTe59FysCRO+RLp1/m8jMX+MDNy7z43EUOLtQ8eHgbQUmKZaa5DWti8x6HJysK7bl79y7r/iEudXz45cuUheat269TVxO0LimMQqssGSurApLAxsDD2TH//p/7b//fG7GA/+Bn/wb8bP6l/roi/hlBUZj8mY//cLYiCHKT4kzZoSWkbELX0Y/d7twW0SR8Cgw+EGLG3avxnlJlxun7cXK1Wm1ILmUIj1AcHz7J/ha1wQWLWZ9959nKlBs4oXfn2pIEaCVHOXoi+ITrcrf5K7/0G3z4v/9jfOB//iESkc1gib6n36w4IYc4f+/Vtzg5XhC3PNauMGS5v9YCYQykIheu+RSfO/8JpvMpwXm0SBxcOGB7Z0JZ5O91g6VTgZXoeXDcsXGBpAWTukaKRDe0xD6hKdjZ3Wa1PMWNKhDXbTifmKaxICM3HRJZXpgzmPtzgqMcZYDVrIKo8c4xDDk02/o+NzolWRo4Fp5VWTKfzdCbvKarfonujlCiYLlesTha5c9TASKiZIEupqA0pip49sazzGYzpBD0/YANlnfefY92uaJdD5gQqZpEt9rkaYUQRDEwtFlRIEdAmBIRqSRlVWOtxVk3grdyczIrOrKyhuCwNuJcoOt6Kl1z+WB3pGwGzIh91CLTcsXZ2F0WOYMzJVpnQSR8P2RJsNCYicxyYB8QXqCDxoqWStdcOqjY3jEMfU+hHVM2TKNhq3yG8gM1Lw0riqLGO8ly0XPr1vdpmiZP5/yA82t0s8/SHfPMxR329y/z5CgX3b//jTfYvGG4fnWfmzd3WK06EDmLqa7r8ZYf9/8UkUoQg0LKQETipaGXlnfffgcZyjzpTD3dekFpNCIF3NAjRA6GX6/B2RKpBL3tcjNU6SybE2dPsBibjZFg+zxFSpFgPcOiIxTbABxcfx/PPPcCrY20vmH72ougK4IuGUIiKcfhSnLa+TyR6gZs3+f723uasqTfrCjKgC6ycqV3DlnovF6kfM658aHneGW15vhrr7D4Pwb49hPEFE7+y1MuNs8yn23x+OESpKMkELrIah3YIGkqTbcesCQqLbh6sIcuDe2qo12sSSMw5/GTNUcPj/n+/Vvw12DSlPgi4HsHruYPe/2RKa7WXceTo1P63mJ7iwuOoe+zJ6SpUWXBsHEYDd4IujSa1R/dykQv4dmqOzZfv8ruZ95jfmXJ7qUVTSUYVrmwCsCzU3LAa/REPzBsFO9/8UVO3/0OSlmkikg96u2lRKh8oPJ+lCwJcT69klKc09fO6HNp1ELIfIIYqYWRPjhiFBSyoSprqqYmG+IczkakUJyuOxbI8wDbwiiU0eilQd1p2DKf4HJ5gRvtQ/btKXai+fkEn1vBpccTrFccDpabFyfIwTIEySrsY5VkObVcraZM0DxZtxzhaPcaJqpm7g3l/SPcp7ZZTnJQrw4eqgIjCiojaLSiKUpORGJoO4LNiPTpTHF8ovCqRYpAjWJX1BRcwBuJSxLbRty0QJQV675nebygv3eMLBWuvkA0CqMEN0rFpJqz0Z4VA6vesqtn1GmgDZ5FTGgn2SyOWQ0R7w2kCYvgeSbNGRS0Ah7ZBbZ2RFWTEkQCRaEZ/IAHgkxEA75NtJMVUhs0BlkaVmKFEo5SJSZSMLiSEyJVFJQpB+22AqQJ5LwyCbEjijlKWwyJUkdEKei6lDsdUqO1YXsvG25FVFm6ERVVA2XRUBaaulQ09ZyjkwUxpZGIZZEyMJAILlMGVXJUxXqcoAIj3l+oHHbrQ8aGRtsBE65cuM6NvZt8pP4Qs2qCGqcCCU9ZaQSZxki+hfOBaUTzIzKWVAr9dEPKFc25VyEbDvPmValEqSEKQSkKWhsJUVBe3aO8scN3/t5vURs4Wbas+2GUjoWxa5j9YzE4fPQjMVHmYGOyfEaMfz0hNyxQiZD8mEsFooQkFEFLLJHFyQnXLu5QFAofAsvVME6lMnXLWY9zWb56hgeXY4jY+fN+9jPDKHEcr9H4Cx8cUQj0fIKezyi1Bp017MO6RSpN8P6c7JRQ9C7TJ41W1JMa7QPL0xXRBwZrGayl0JqhXdN1HeKkoqkbVFmgo8pktMVDgsjdOLRGusjQDVx68QMUzdeQ0pOc5c1vfpsXXv4Ic7nDbrVHvLnPyWXFpSON77Ok+ejohMPDBcHJjLdOgeQ9XW/HjTkCWcakxoDSlAKrYygKQwh+zABMaG2yLGyUkQTUD1xBDaQsQ2oDz19/jvc9e40r16+Sti8wHRoOtk6IfkMtEkoV51JKrWVuqoXAetUhUmJSKCal4uh0wXq5ou86bPCslj2OnlAGgspSnoSmntRUUmepprPEcQobxoraVyWFzvuNVAofHKdHp0y396jrCq0gBIluGmIcw7kFDMuO6D01ElPVLE83DCJSlpNMOwserSXz3V2IHtt1LE8X3L1/h+Pjhr3dfbZ3nuHz5U/xleGb3L53yGT2iN0LB1y/fJXl4pSUPGVKaNMwrfYpVcHO1hY3rl1nb19y/ZkB/PPEKPDRkTiTf9pR/SGwbfYeBTuwt5jz8//rv8rFvRd590uP+da1P+Crf+Wr/KU//5fom5b/+y9+ne/vvYXBsLFthh2N6g1rbZaDa3Uu5T7PL4wJYpYfCp1lzTEmgofJxLDfTHBdYvBhbNKQZUOk88m1KQXDYoPzHkbps9iH2DriOShmlJmfFf5nMSjjs5kl1VkN8cp/9Qpu4nj5r39o/F7YtGsWJ4dMZlOkksxnM+aTCr19psnegPAoKagKTdhY5vvPMNuaIAjcevs2O9tz5Bj9QkrM6gLXDsx3d7h87RovfujDvPfeEUp4jBpQoWcoFH1/ihQdWklUNWFwPe1mDf8PdW8WI1mW3vf9znbvjS0jcqula+nqrt7Zzdk4MxQ5pEmDoqARKRkmKdM0IAuS7QfbgiAI9oMlWqDkheCDZenBgCAJMEQ92KZtmLJoWRYlkqY41EwPOdMzw57eq6trz8rMyNjuclY/nBtZPRQgv1IBFLKWqMyIG/ec833ff4uClLJle7NZMh4PkeTQ9Rh6WpaUGT1HZNc+KTJ7QeXh83iSo1a2Bke5Zwqs12u8tfgYepOmfE985w4HTWOZn65wNp8vLnjqpqGzLfVqiXIdSSW8EEg0hU64bs5wMmW2s4tEcfTomJACXduwOTujXi4geIpSow2s18fUG4tRFVVZUpSaEAODwag/W3qjJpsdXVN/OMWQzs+mlHJQiJSayY5mtV7h7AbbdYSuZrkumO1OGAxKNtQABG+IbUZwEx6t2qxhExK0RqjEqBqwXje0XUOlhwgRkKE/e5Sga2BQjNkZ7bA709hOoqTm2qU9JiPN/PQep6eP0NLSNSvaNmFbzeHeVYRwLBYLXJczoBKOabVDFUrcWcP64T1IidP77/PR25Fm/hSP711gNhXUdUNI2U1yUI6xnTtHfnt+Rdbhban5oebs9A4FQ7TSef9UEtt1xCQoyyEHB7ucnMxztmPIKBVC5zDrvoETKeFD7L0H8p3iXEDpIaBAgjDk4RaJQREYVx1N07B7SXHv9inLOtJGneUGWJYnZ32EkkeKlhjanF1mFCl1mCJw5cJVmoMFx5z1VDxIyaOVZLIzYO/7rvH2r32D03mNi5LZcI/h5BJJSKrhmOn+IauTOVUMTGdTCmPwztH2HgkZ1QVTSIKNNM0KIQLVUFKvW779xrd5L9xhU7RUV7IER29LINnnnv4rHn9wmqvFEnEqsZ3Fdw6f/LlIv1OR0iSUBJF8zgeSisoIKlOhpEBrwaiqSHVgeXKMKNbEFOkKQX2WiEEjC8FQB+I7l5EpYquGegBCDtG6IKaOEEPPeZb5wCX2fPr8EB87SIA+8Db1tBX6Iq23pI30lMA84ZdCoXu3HO+21ezWSW0bkgohCmIShCiRrUBKlSkIlcY/exGzdgxXHmKGJS+1u8xWFYsucCkpLm8GjKLgrPa42nJhWmLiEOkiyTY8FRzP7hc0qsB3nrBoEI86JpdLOiUQKqGd4XghUKXnQJWMoqTtalYXK6ol7GwC46YjSokRhjTymBgoN4FqlSiLlmpcEo3gtBOQhkhhqYJEM2FOTXupIvohJsFQWIaPFHbj8Rcg7A1wZkz5/gIxDGwqwUQLnPe8uJywSYk1niZZxk3guh7xlnXcih1j53hGDAgIVibSDARXR4pwX7JUkblMHIvE1d2SGCPHQrFEMdSC7xooTmqYx46VaBlqwafFHpsEGyKNTJjYsqMNPoBN4Iohg5iQhSFJQzQl0udsq21opZSSkBxGleT8r4TWQ0yliEFlswEnaFKXLajJOhyFQQpN8KnPukrEKEhRIrVEq4ykKlmQ6BiNC4oLmo/ECdPJgHGxzzPTazw3vsFOOT4PwYWESDmc0UUQMSJFQkmRLfRlz2Ilh0ILZG8wsdUfgnURQaZ0WpULnrY3HXFdy/LxnHWT+PSPfgaGBe//7oco5+liQEgoC42PCWsDUuS1JgToQlNWJa3t6FzExUQSNmuz+5H0OaUu5M1/G3KqpUJHwEecz2SPCwe7aG1Y1Q2busm5K/Tc++DPDSW2ttKJhBLy3Kkxu1ie24v0BUm+FqYo+gYehDagNNIU2SULhSIfKBmF2z5REFzIVLPetdIUeV9IIRK8p21qxjs7tM2aKCSSPFntwhmiKLLVddcw2t3ldHFK50cYXdBszhjYnNM0vXCBa6+9yoffeIN2c0ZZGmTILqHPy0OWj4+wSeCl4O7pnJPFGhd03xgFgndokd2RtvhlHpiLPvx3a9yzzaXqUc2Y+sw1yDvik8/G6JwR5nzi1Rs3uPHccwz3D1jpISeP17RRocspopqglGRQFJTJ9fiHQCJZtw3lcMR0ZDjcHeCTpxYDxGCXUQwoDWOjqVdn2K6jbVsen855ejhl0HmidXQuW5LHEHAJbK+Bm17cJQWFDVmfJtGQsjZnMCgZDitOThaMR4MeNc5U9RByASiEBllgnKdrNj3diF5InRuNbFmvEEqyXCzofMvJ4pjWd+zuTHhVv8TqpObhu0e8PnmT080hs9EQJTMVztuGHQQesCnQRksXsr11HvDlIkTIbU6T4mh3zj/7xOuZhpYSdw4fo6Picn1A/SVL+cGQ4c4YEQXDjwYM9irK5slkdjtE3K4HIBs1SXFOE84U+Jg1eTERBedolxAyo/FS92htwrqQqZZxS2cSmX5I/n85OiPbukshkLpHK1LCDwK/999+kyTymhx9OOT5v/48sB04ASLkwPYE7V6Ln/jzAQpknXHbbOjsBoLG7e2w+r4zbv3J7Bb49l96l8WnlvgycOu/+oAXf/YFdi9cQqlIvVmcG11FEbOcQAhCl7PrKjNieXPF//rTv8hyUSOIWRtIIDaO/f90ihcq26MHT7QdCsHjnzlh9YfW55tvWWZDInOv4Nm/+TyFKZ6sLUGfbxXP0azoE+ulzTls5IYvxJwb5XsKsFQaGhIpUQAAIABJREFU73P21rZchn5ALHKQdQihr0cSvlvRbMpsONWH2QuZ9dValxSDEeOqoByO0WXJql73Gk2HbRuc6zKdXkqUhBSzZXyKDuuzOUeiwhSaruu1aP3nFGMkhO8M5Bap17qnvuUSknoVadtASorRaES7rqk3DaYwmQVR5B1kdycxuFiCqBBCMiwiUjqUVOhihNaevdmItrPUbQA148MP1zhvSN6TenmIVInReMDe3pAYS4gKo0uM8WjdMpsNmM2uUg0M9++d8Oa37rCYZyYQbEO9NW3TMZ1MeO7Z53n+xcs8OjviDfUuUiesW3F0dIvjx/c5ONyB5BBJ4xwYnc3ekDkvU0qdddx9KG4KCaFc1riGDhccMeS8rRAjUhl0UVAOSsrK0DQtQqp8JgfJx/XAxIQ2/Z5Prw12LeNxgVLZ2KooFKnO52VKedcrC4GSHdGf4TtP7NlheZiVv7XsTaW0yE6UMQS8t4yHE0xpUDprtgsdiV4ShMYpyzrc5tY/iNgwpCxnFNUYOZrQtFlrHaUkFiXlbJdkh7hUEIMkKZWzWXXWvkMiyYBNEp8SWpcIEaiXj7ndPcAOVjk6JeR9qas92HyWe9b8qx5/YJor17YkbykV6CLzepMwSKXQhaEsNJmBn6lKqoe7tcp6kyxuLunqNfVHe4RFFsgPSkW78iQUuhDYk0D45iFGJty0oZ45lp1DapmpJiHkRkzK3slnW/Tl6atST4wZtjTBLZK1ba7oFz0pHzJCZf1MImP13gdCbJ9MjYTMGRMqnfOISTIf7qE3xFCSwmriZkLtS5pQssp5iNhac7IMHLU1swpuPRYc+o6VtZxYz/4koU8l86bDOstMwygq5KamDh2h7QgOxK0NaujBJASSYSvRZaIKAdkJ2nqJf3ZEsbSUG0tZe06tpBpLzI5C2Eg48WycJI0dQrd4oTlJgskisb/bMXaKYiUQPrGqNMWxoloHBs7T3A3cP14xWmt26pIqVdh3a+IVjRxCoRLJR0a3DeNCs1dKvPFcbmBHaM5SxAnFRS253BZE56hHAqsll6Xm8SqRpgPqAuaq5Wo5Qq0V7yXFQ6WYacVndeIDH7glJY9KwVMx8hmxw70YuCscD5Rlryy5EAs2SrCUgloZnrUOV8FSKNZCIWLASIUTDq0Sg8rgk6aQhiAigYiQMgcu+uz243zA2g30h2JKnOuuXHRIIbPeCpO56P06yEGpeSlXZUVZZNqH0QUztce+PGSaZljnCUS0yjbYWkpI0HQBJRNmi9KG7Ly0HWWGmDLHXgqsj7S9ONT5/DwhU6aqAaerluHpkriZc3L7DpNnnmM0K6nXHct371OWgnXrMVIglMT32RlC5uGJVgKjBIUkUxZdJJE1hiKpc/pRFL2JRN9wbWumbXEXUqZ/jocV050JPgnWdUfnuvNhSaIP1IzxPHiZvrlKYltMpu3HkR8fA++A86yxEALOeowLCOEJLtOFtNR5gp/9/8/z48imQ0QfaUN3bh8tgOg9bV0znk57dDD1+X2Oum0xYpDBO28pTBZ8ewskD8HimjUxBKrJDrtXr/Hw9i28bfFtRxegjZ4bT13nZLWmsZEuJuz6lFVriSJrP/LliP1eJ3qw8lyBli+FyJ9hDBlZ2BZ4iTyAOu/Oz5+fm3QtYDqc8eLN5xhfvEytSk7ryHzVYLTByFxAJqOoRhWF8rlIDBCDpO4co4FmtjNktjvkZL1GVxMqM8o0JQVXL+zh6hVt3bBaZXT62ekF5nfvU1uP1IbOthATHdBuaUNlhbeRFHL2oUwKhUEIhTaGoqyIcQ4pngeS07tdojX050AxkUhbk7KNVw6alzm4W2mNUgapJDY6uk1DbRu6hcUYyVOjSxz7U1bHNe+/eZvj+RHf9fzzQMCFFqk9w+UutfMUIbAOHbrrsrWwyNQpJXJuUYp5qLdSno+mJwghuHfhMV1pOTzapb2XOPr6ghhN/zln51E61VPM8q2qlMrhrlci/rkc6SGEQL0l4bEgxhyYnWL6WOGb8nrcDh5lLtw763rdSR5mBN9BcPksPI7s/doeofUIIemuW85uLpBCMD3e7RegICpY3dxw+vlTwiiw+5Vdnvvrz/Xnbo90pDxAYXtfptQzTLZ7hacp1px9ak7ysNyZMP/BUz78Y7cgwfpqgxt77CXLg595wEs/93JGUu2G9TprdYLPga3uisW/2OLrTM/TVwfUn9/wpc/9KgDTr+0j1wo/dKx+eM6L3/sSO6/vEktP8/wCbzu0lLSfabEvWZCJ5Q8sGf/OGLfvkFJx8fcuM/7qJUSU5/zlrVNcplQGvAu5vghZR5xEZs6EEHu9btaBp+SzK+/5hhbPTUkg51ZGtTV/6s/eXACBKkjInneos3GRGRBRNG1HZ1dAptJ52+J9ixBZQxrDlhKqGI0M9arN5mVSogudzTt8bupEvwc/6av6WqtHKbcUVSEV9cYTUqIoB0x2hsx9xDpH3XQIrSiqvE8///wlLop9pB6gVcmoSGjlctalGlOYxGSiEVLSeXh04nnvvdfZUphScsQYmUxGXL9+gxvPHNC1GwQVEnKDpUqEUFTVkOl0BPEtvvH1W9R1y2jc65akQGtD0ywp9gY8/fSzfM9nX+Vo9Qip/jbaaISKOLfGNmdAx2Bg8Cr0GuOt9uxJBAKQ95i++YwxYIwmO/dvmVfZZEkZRVFWmLJEGQlt3rdUn/eZqaZPTJqkfGJWHBEEHygL07OscqwLMWukm3XN4vSsp3DWdO2S6EKOZUgahOqHqblR3g7jYsiu0c5Z5HDKlgJLigTXEb1kSSCqlolp2Lz/mNGLr1Ls7JOqMRQG7xdProWQFIMh0ZQQEr7XNer+vt1S/33MA4pIvq8jjnq14YwFhQkYlehiLrZd51E+S0Sc/9dEc1VKwf7uDjv7MzJXOk/ss3A4C8ptytbBMQSc97iU6HyeRAYPMXpiXSCObxK5QUowOJii3CnWedZOsEiGQIMSwL0ZHYK3Bg+phEPqDDXbJpOVlBC4kC0ojRLneivg3JFH9hD9NhQ3pa0L38edwxQhCkIUmbKy3hBFtjrOB4BE6SFKy15XIxBSEaJH6d7yOgj8umb9rTWnh55yBGe9A9Ib68dsli2Pw5obs4LXbzmuhcBgqNCXBuw/LXnznSXLSlDNNDdNxdtvz1l2S8prgvGNilJNufNP7lB3NU3hCNPEp3ZnSDfhzrKm7gJtWVP4Oe605UFMdEJxNxa8ttqgHh+y3ggeLZa0r8D+YUF9Z8nJkeNe0jxz9zY/MJ5y7yxxe20xn4J6Ldi/11Lc8bijmg2eL/s1L4wqXlwa9t70bC5fYPzymCJtMMdnXFxX/OqbR+wOB1zar3j6cIhxgjtLy7BUfG6yw/dORvyTd49RynDNlRxSsLpvef32hteevcLn9kY8Vcx5Z6OxzQ6HOoERXJpU7J2s2F0lnp7uYaeG66szrvgJ95qO95Pn/cryfbuHTBeRx/tDHk0lq/aEP7bZpS0i77eOb3WRpQ6ooFA2MSwEs50hu/6QrgvYrsN2LiMztUSrEqUNwkhs1zAZj3DO4mwgeqiKIdpIBClz0Msh61WXi50QSclS2xatSup6zemDNSklFgvHlW4MvqSWgc63eLIOYVgadoYVRSHZrBvKYYFQmuQTdt1gihw4HIG2yTbUutScrQL3HrXUXU1ZKQaDIVpKNjJvPh9+eJ/w3pKRPUWsH/Gj/8mf5fHXHjO/dcysUsxTh/ABjQDraM6W+N75rjAFRaFxtmF+b85m3SBMQTmdIr1GaE2UKVNvU0D4QIoZYfb9xr5abRhqiUyRru144cZVlDGZcls3pOAwQmSWVOr1UyI7lm01ZCk+aX63msqYUjZb6a2St8bUCoGICde2LE9PicGjtKZZLrJ2zkg6Z3OYrOybkJj3N6UVIQa6rqXe1E+mtb3oPaSAMtn8otQFxc4oBxbLjBQVtqTUJfs7Owids2HMZEwKLSJFXBdo68DsqavExRIpJG294d7Du1y48SxxEuluHbG6vyZR4LSG5BFRoNGUpsJ1Du8dKQaE7IvmKPppeUIolS2cY+6/kw8kZI+QZi3i9uGSwneRcVnw+Rde4OL1a9zpYL60NE1gVJTZ7CKlnHuWPDtVxcWdAaov1F0SjKSkqAqqUpOcZ71YUQowGprO8vhoQVWMONjfJ6UlrnHs7x8yLiru1RvWbY0eDYkbD0pRE6mJpASn8xod8hoZKUXyEUV+Lz4J2gAuwvHxCUUxRPWB3kkIlMn280kVVBNBuappmw4pBWWZHVuVNhhTIGU2L6nGU5JSqNoSGstHH31EvHKViweXmNqG29++zVc/eIPoPavNkta3HF47oI6Wk0VDMZmxSZG2OcP7bO0vyLEO0QukyMPH6sGAf+9/+yJaC/7OT/4ydy8fYVeRt//3BYNqCiIHtQIgEs2jDW6dozOiSBhjiDHS/lFL+1c89KDW6M9WmF+W/XnYx0z07A0lU49bBiASg2PTNJy5Dh1NPxzJAwH6OIDhmwWf/eKnsUWHKgwP/sQjvv7zX0c1ius/8SzpOJsP6JXh+37sD/FPf/efsX5pDaQnaHZ+E/2XHj/9eAG6vR+lZ/HSknd/5R1EI0hldiqVjSRWkZf+g1f48Gc/4OinH5GAuV8iHjykrdfU6zlGSdpNSywFj//NB3zwC+8g21wffLP4GoiErCVxEHnxr36W4e+MWdw85mu/9uu8/T+/xfM//Sr+0PLhL7xHGuRrf+MvPsu1//pp4tDz1Xe/yjN//ibzP3rK3b98h6/8/d/mC8/8G4ilyE1KCDgXzt+m6Ivg3OBn12MpFFFopAnnww/vw/nA6DzdI318FhKIWOoqUyz39i9x9enrCJnY1Bsam/AuOy+23QZrHYtFk6+5AGTeQ0kg8AhaZEqISB9rApPJLhcOZ9y9e4e6bvC+JYSKlOhrLHHeEIv+97BFxvsGq3cHVUrThhXVYMBsb8bhhQO0SBwdnWKDo0Iw3hmDgC9+8Ud55exFoigxZkiFApldUoNXVGWBay2zvRlCCb76ta/xt//W/8nO+BJaFbSxpes6nnnmOX7g+3+Mmzdv8NFHDxiaPWbTAik8MVistxRmjFKSOx9uIBkGA09rN/hgMUaxMx0yXx0xmk6Y7V5gZ3yR5XoDZJSoqipKU5BGgc7BcrmkKc4YHyrq2mO7eC4hyU6NmT4tROqjJBJVOcDHbHYRU8x1sw8MBobhYMxwMMprVEhiAutyTSHEdhjRn3chH1yCfpAYE1IWmUsgFaaocLEF4OG9u7hvWbJTZabekTRJ6DyEKUSmDfeoJBECPlOIZTYKatoNzpU51zZEFmdrkoD3/CMKM+CTl1/kuU9/hs7sUq9zXp5Kicn+IVprVIyUPiP5ulLIFCGprNn2Hu/WTyQVSRMTyD4sPaSOrq5ROuC6PLCIw1zfCJkb/zzE/Ze2lO94/IFprjbtkuP5KSsbcEHge+epHOboIfbTQXoKXsyWzEKkc3ZxMtm+lJAt2LVWzHaucbZUdN0a2zo2TQGyJMZs3WpFZDUTSG0xIqeqR99TYILvJ/y524/b/BuewNUhbDeBHqIHwrmtaqaC+BCIeSyAkhUxgGtqYsxZO1JKom+xpB7FMig1RKoCZEBJhRGaOEgYbbi3guWsovr0AYjbXPjTz/KJf/cGT+1dYrZ3mfmjU4QYMxxNGO5UfDC/RXznA2KaofUULTXNh3d4uqoY7A9Qk5J6nqhvrJmoJRfGMJ6WNM2S45MCnSIDI7k4HlI+dcCH73/E5rRG2MjLO4FhKbl1/4zGBSZ7F7nxhUvcP244eXPF8vaKvfWa177/Km+813D76IxVSHz2e6aoNOHu5JjT2YoHm47jo8CVLxygbk543Fpe/+g9qv199q4H1q3kaGE4fndON2549WXNMRv++fuPmIs9Hs5XPD2ecDiv+H++seZLZx2vXB4zOpLE2zFzt9OKh7e/yRsnIy4NC/7mm+/xCTXixmjA3qDiK7HkoatRXcO1yZibZzPONpa/29xh1sELowF/4sou9lbDu6cwLS/z6sVDZuKQ+qt3uDV/SHr6kM9+7yukH/k8v/nlb3Dv906plxuGY8dy/Zi2XVLqIaPhmL2DAbYTkGRPUfKMiiHBSWLIuqioPF3q0LrM7pRCEoLDFJHMOFRIYVBVdh/SqqIs8xQ6BsXu3oyiNLSdxYZIh0MBwWpi56hGhmQSZ+sV/jQig2BUeMIq52AIKeisx6dIfVbQdJE2tDRsiLFic7pBeE8a1ZDg+kHBle4yO5df5dM/8wN84pXLfOXdL9MMDcPBPleSZNU14C3JO160OzRe07Ydi+UZZ4tjuuWGWVmwPxgTEXjp0BPDgwf3cTZCkkhZ5I1f+Oxg6PPmV6WOSu0wmUyYjC/w8vPPE6RiuVyyPjsF62mNyIG5AqpCc/3pK9SrnG+y1VltqSnx3K5YELLn8fmhL5XAO4dSktC0NMdn2OWaJAX4kBuEEHrtZd4DhMxxD97bHBWB6KlSvSOhyDqztm6RMVGags5a5mdndOs1UhuGowGFMhASH7x/i/HegEoVlEXFbPeQwjjKqsTIyLSSHD5znaM797HO4WVi99Jl3nr7DS7OLnH1het0N2re/PZ7HBzvsDRrRCkojabtGkLIlueitzmmL9piDL3mKNE1NUVRYPrslKIwRB9yI5oS3vfUWNfxyosv88p3vcT+tUt8/f3H1GEAGEwpcd4ijKYwMK0UV2ZDDvaqfPj3zXBJYvT0VdZty3y5Zn62RivD/pUZMkZs2zAaaEaVxDZrTk8eMD8+5rnDpzi98xDf2dw8W4tGsYwBPyxgVCKE4MKVC8TaIUP+ec63nJ4uMEajRMK2Wc/YNBtiTGhV9s1VQjrbN9DZa7TQhi5ZQoi4GBns7ACSTdflEFhywz7ZmZEGntB2DOdn3Lt3h+Viw+H+AZ+6+UlGtwTvf+MbBBWRpmRTW45u32FxbBlWkQf3YPe6p3Ma4TJd0cYarUq8C328CPiYqYKdf2IhHL2gqVsiYLsOISXT6RQtVhQ9JUeXidjmdVH9XYP5DcXqt7PGcWsAsz0TXQwQRW9VTqaQbSt/JdiEnJGmksqI3/n0txfLZ3eFvNZ8to8f3h7yw9/9wxQ/NWDxaIk97jBRfkfD9DHi/r9UW5wX6aLP3usfD/7SPR78F/fAw3df/27e/6X3GX844drfuM7rX/tydrvs+804CfzO/a/wye8NjN4eMihVX3dITJToICnvD/iez/4ISsHb/93X6XZrnv+PXuP1936d13/pH3P1v3ma3X+8d/7z3/373+LwFy/y3Z/+NG+8+VUQ8OEvfMDtn7+Vn9CH3X+cyti0DcpmnF1KSTkw5zKEJ59HH5WREiTbN7HbmJknjcrOeMygqhhUFdWw4s1vv3n+PWIUILIVu6gO0dMblIVgXb/J6cN754ggPXXaGHMugQshm4ckIlKr3n7e41wLQlNVQ3Z399GmZDKZEiO0TZtRBG97c6cnDfH29ebbKN9LW7opQtD1etlqNEKZisWqpaimIJYZKVeKYTVBICjVTYbmNYKUBDzOuuykJBVog1WG2rRsVon1esF7d5YcXLpCVVaIomRcXublgwN2rrzI//IrX+fs7Le4e/cB9+7c5d/68R/kxReucrg3RLgVi7MVq9WGt956l7PlitF4wKbZZLJlTKzqFZFAFx3v3HqHKFcI2ZFSpKst67MNG9USSVRmy44yeGcylbHwRJ/wXgB5iBJdDQRQCqMN9VkNriCG1Gd4JlSKRNcR2obJ4CKTYcnZ8TFN06GVzvdP3K6qHJcQyUivVtkNU/T1byJhneV4/Zjf5Wu0dMhra8wnCqSqECKHYY+ainI5YHnWsFicQsqOpeOdIZeu7lONFMdHc44fL1gtPeVkDC6QQmR8MOBH/rPPsDiZ8+r6KpWdcZieodF7VBLE2CDGhkmpsMGjZKI0kskwM918dL0UQKCTIIhEMciSCxElMbR0yYGXWQ4UfcbMJMiiyK7FITeOPgZkr3Ev+P2RFN/5+APTXLVdYLPaYLutlet2BpV6jmamyNGjQjEmguAcihQI6CSqd85JSeB8gVBdb5UucC7i/AalepqRyDlIVakRQfQFQ97QC1FgipzhlFKiqiqstX0was+f7zMvtrqJbXMVY86QIWW0K8aI0IZqOGIy3Ge0mCCHDucczgWcz6a11cD0Qk0IqWU0yvCyEJIoQKJIHlxVshkb1gdZZBfbZ3l8+hSLpUHeXyPYY3Vaszi7zenpCXdvv8369Aw5LFGDCrTGNzVVWZznnchgMKMKkdpcrCqFi56d4R5JZf62wlLcdygHpDFSK6wS3E8T/GGHkgFbJj76liX4KXu7+xzuK0pgPZwweCXy4sseIT2ijMz0gMkuXHwx8KK1NK5DV5pxMUZHyfWf/AzrdYexht3KMHpVcOXZNSJGDvd20DExfnXF3qblaempkkYGSF3NDyrPtdkBoRUsz2pae8rISKbTGUoaFk3HH375MhMU08GEQhbUixVxXvPMhSETI3loHe/eeYSZvsZoUJA03BOOD1YdbtdzpufcvTenOV4Ro6VTBYPRkMkwsvnd3+DlwwNuvHZAsypp7ZJle5nh+EXq2rBcOZbtEaNhSUgSfMyDAUCpos/jSkgt+0wwj0i92YnRlGWe8EgMQhi6zjIcTvHBsjIblFR84sVP8/L4JZQ1bDYd1nWooUYmSEJgEX0B1qGEROmEUIAqKE3KwbMp5kI9FSzOWtq2I4WGqzsDdIrIQYkpxsiDGQgY7V+k2lygGE/Zv3SBL/3ib7N4tCT1mqMIDMoKURT9nxPTAO+88x7dpmZYVLz82ZeoqhJEwtqWxdmcf/HlN5jt7jG5tMNgOEBrTbfeMDCa6XTK8pUVvyH+D374Bz/P2I/QRmEKlQOPXeyt+guYjXteeuZ6Q2I8myL64cmWwhRTOncp3dJ8cyZN6guY3oK7bzyKQcl0Z4fgHXWXc6G8dzmHxfs+Ky7TFVMvTvc+nRemeTr7MWfE4Kg3NUpmCkboOk7v3kUITTg8QGlN1zRcf/EmyiiIEoHM4cblBFBIrVGDEi88tXfYusk5d8MSt+xYLxd0zqBKxQuv3OSjtz5AbnLop20swXeZOnQeFp33Ju/duSV/jj6T+edJhVJ5WFBWGqk0g4Hh6rXrfJXX+eE//AWem98EUfL612+TyhlS5IGVVIphNaIcacpCMiwVsupDThP9VYmE4EgBonfE4PHOsV5ZuhBzbllIOJ9pM/VqjYieg9mEiwf7fO31N7AhAYrYelSp2ThPkwQuZWr5tedfQNQty+MTlidnVFWFMpKQIo0NhAhKG7qmIcpIErkLkzLfa4nsipliwAxKaDd4mzP5oiTrCKQkSYlzkcHA5PWgcuBy2p+i7j1is+k4OnlI1624ev0mnGpOh0csBws24YhCT+mUZ+XXHK8eU59YimHOw5FKY7QB2UdKCI2UmlJqIn02G7C+suaXf/mX2SIO3TTrFeZLSzbByzoInRyWbCLgfyJQ/7knzVn7sx3hNU3517I2yP49R7yScd3Ww+iPZ42F/bcj/vM5CPxL/L+88lde5uBX94Ec1F4UWdMTvKf2Nb/9q18hVYnuQkd3oeO3fu23+OJf/THSMq8RhDh/H9tHNIkv/aMvEau8jssHJZ/7qc99x3M+1qeAgtmHe/zQz/8I//Qf/iPqV2p2bk3RKZdFb/7SN+me6rYLFEzi6jPX2ZP7hOg4eviA+fGcO3/jIxZfPCOlSHt2zAf/4F3Wn14yfH1ErPPQJ+lEwKPfMrzyQ5/krf/rG1z52Wvs/coBKjwpw67/99fZ+4d7tLrjrf/72wAc/o8XKT+o+ODvvNfrH1O2AIp9JmCvX9m+tRwT06NI5GEI4gk9kphZOU3XMhhWKKNw3vVmJVmL5mybkQQS69URJ0cDtILl2WOirfP1F9nIQyhB51uMKSiKApkkKvh+vxC9wUne38R2v5CKmCRFUWFMTdt2hJDZA1KoXk+WKaDb3nzbSwupEEnmayGha1pcyAZjplAUg5JuA1UhGU8qDp7aZf97L4MUnM6POTq5T1QKHx2ViVgrSUJSFJJHR0f85j9/k9XaI6WkGgR2L1yiaWusi4Sm497qDo/vnrCuO6z3SB3pmg1dc8RmoTFxiOscQlrO1o8JeHb3DrFdR91aSAbZDzu0GiFDw/HDB3jbkMqM7njXYLtshpRUYNVK1jtzumqDkC6zC/SIgCP4rZYwIqQBdNZMK8EtfZdde8AoDnPjEAK5a9A4IZmv10SluPpDNxFjgRSRQmeWhVTkuiAZ9EDT1jUPvnHEg2+cANnGP8WEDwEXA8996jO8XXyZC0+/wLOffBHnJEm0kBIFEukj3bFE/4sG1/j8OivDvQPLYDigPcvfK8Wa1fqY+cGGI7Gk7SLfemvDxkuu2EOGxR7FbIIwsF7VWJ9jdAo1pImbHm2LgEPErPVNPpJiIMRMjQ3B433O1wveEcksIakS3jc0XQOiBNVkun3MjZQQij5Dgv+/xx+Y5grAB0/q+pZqe5L33XNIERnT+WDqPND3HC7uv0rVB/2J8/yinqUNeAShnw7nzVnogJYRk3TvKJV1VVpoIlsnwJBFjPKJEGW72PPLyC8qZ7zIc4vl7QYR+oBErQ3GVPnwUwqpCqQOSLedwnBeuAlSznPpXY9yeK3G2TxVWUjP8oMVCXjz925z/8MVEkmUnqqcsFzNqTdr2nXNen7CctWgS0VRlSgzQMvEStbIADIIhNGUbYVKGSWMKRCEZF1ZkszFRXABXZYM+ol7VKAHhrIoCCkXFaTAIAmiLDItsl+kMsOMKAlSRmpr2TFDvFYoo6i0ovMdrrbI1NujSk+9WCPnEqk1FIqkbc4J22TdTrCBpo2ogcw20EniBhNsWPLQRYQwuNEIXwl8cLRoUpK0oqAcXqJVEacUEgW7B5hBy3rHYHXm04e30P0YAAAgAElEQVRYsb9/kcnYgEkcx456kTUfSghEiOhpTXi6YtpFzKUJ8nCKXiy4fuEC5mJeYkJGdJFpR++/fxfFIy5drjIcrjLSmimiualKMRevRuXN1pjsyOZ8wCbFYCT7+ztT1FICoytCtPgLHVJJXn3lFWaLKXbhaRqHsx6hBCIEHAKrAi0QbEQTUSJTvRovqYpeM9Fn+cznS9bzNUpF9mcFB/tTYsjaJKE0rtzSvzSjy/vsP3cJow0P78+JNva5vHkVZs5zL/QHlExM92Y4Eq31CFWS1BBdSKqiyBTJruHy5QvsHuxRDQZoKUjWYmSmpclZXoN7+zOGdvBkyokgJp/v250dUqnBihwQS498yxyKuN1ccg54plYI8fFNVJw3WnkPIqMtPbVQpYxQ2ZDNEkIM2ea+/+pDyMGLMYuQcxBkphyyDSMnD2tkSlSmyPQqo/q8uExRLKpsSlJSUAiVs7dS3uMSHd0q89dt07B89Jiua6jnC4J1yD43rYgC2QuIcVAazeXrlzmwrs9OyQY2dd0wHmdzibbNRZAbBepuTd1saIJlvJngvO33qUxP1OfsrC1XCfZ3DyjOSs6WLXXjMMri2hrvso5BICgrQ1FqqrKgHZVwOGWgc4aeVBKXIIRIbSMugNSKYpCyEPpcMxvwTnN2uqDSktloB9s0rNdLtFRZKyMhaqhtpAuC4CUxJR7eP2MgFE0TaW0gRIeLgdYGfPL4kBts5wERCSIilCB6T9VTPaWE1jqSF0RU1qeECD4gC5MDloVAm5xp5xy5gZQKUQwYH+6RxJy2bpmvFwzqEQMxZdIltKmYHXS0zQbhE6HzdMsGl2rGu5mrp4TB6OF587+lv6SksubC50Jb1YqLv3ZIiIl7P36P1dMrdK358p/5bSDw6LVH4ICQm8f2pzritUTxuqZ9pTeeeCbh/oiHdT63wvdH9Dcl4iG4n4x0fz7kZusuyF8XGK1Z/odnfPSnbjP/1CmkhNKKl/+nVxguhmwurHnvj7/P2efOSObJ2jv7zBnJRYzVLG92vPvj9xAC7H5GvtrLLe/9xfeYf25+/v/M3PDuf/4uz/4Pz56jIL+fHag2kuHXJ9SfaIiDjzUgAjavbfj9DzMcU41n+GDRxQkxeqqvVbTPljQvNRAs6+9a4PYd3bOSB3/u7nnNsvrCEuEFKEVSierdAcVHJf7qE37R6P6Yg28f4CrHWyk3V90zLevPrvvXFjPtdovs9CWRyNOOvibJpjUZ5ZHn+nGtNcSEs47O2jw40ZooBM52GX0SIgdya8O21rGbOZvTPAzrmvpJZme/70ktMcbkRqkoySi/71Esj+9ytIhzmZqIBKV1RkCk7DVUiRRS1tD2e20isSg2fZO1rackENmxY6TvBxIxEc/dSnsdX/AMhhWz2ZTJbIdG5vNpvjrjaP6YhMb7SIwNrc2DykIHTuZH3Lv/IZs6YXTJZGxoG4cPCe8ctq0JwQJzWp+NmSqtUTqyWi45eVzSrWqaBnThOF1saNuI1gPaNiDIrrVJpFzk+1wbudqyiGcsQkdKiY2piTrrTEPyCKmQZUCUGSigP2u2iGzaNhRCndM/E4J1XOMDtGHMHjt5L4gSay31pmG52HC7PmbczHrUKqC1zLrQPtdSIigVICVnquFUnbATZqzXK7z3dK/W3P8LdxFXhsRR4NEnHxF2wXtBEq5nZGSpTbtpqb+wJtiA0oZqdEx3IWu2wnGNnzc4a1HK4PY168+c4SYN3/ziV3EJjuOQiRlSDYe5qW8soddsDYoClzqW0wVsZTm9+Ys2GWkOMVDXbe/RIEgags7AipeZ9he39y+CLDtL2RW6vysFGUxB/muCXIleQO37GybblCf6k76fCG6JwXkxSvL05JwJkCJRBCSZC7pFlURKkELP/41I8nREkO2ESx/Qsg/n6zm8UqjekS1bUAuRHQRj2rr8bX/1P5o8+VYymwxE6Bs7QaS3xkRjgiaEhDEaaQRG5eA/JHRtR/Ah5wkqQVu36CIXFkpolITW1djasd60PJRzSPDO229TvTlCJQ0qMFCKVZgjVWBoSiARpEPLnF49VBWmNNS2gZTddrxwOVwUgUiOmFoikma1JomKmBLeLlEDg+o0IYTspqYU0xG0XtDazAG/OJthhaLetLStJalIlVyPGGaapw2OHTPGmuyyNywHdJ3HrdscnKizs1jwHhuBnrsrC0XwklJn6LcwBU0YoMpEobLBgBqMWS9PUaqmKAaU/VS0rVuirBFCYoSmSkPCSODjBplgOtjNPP865M9Gl+hxPjC8kSwKOBUGHwLF7ogYC1QUVBcdnZowCA6KhE2R6Eq6TqImY4bjEdPplEuHV3jja29RFAvG4xOef+kS69UKbfJCDf3dYm2XkVAhUUIjkexMJzjbUdcty06wMzOE0BH6AYIpCmIwJCKrwyy2vrL/FHqtCCpSGI0kN1KEzHuPMrB2noHMG2nqc0uszeLj7ZDCec/J8QnSduzsDrm0P6GcTLFREmLCh0TrstWts5Fif8T4xj5tYzlrO0QXUUKdi2V1SsQ+Ei4mMBEmB4dYKfDzM04XK3QrGI4HGK0Resh0d8qFy5eZ7u5SFCaLh+WTAVKSp/k3SoF6svZDCFgfchOoDcIX6CJvmtuVG4i9Ha84L1ggT2SlEOd/vw2uzBtW/rciZOG9kJknrxA9772ntISQ65AIPsZeGxZQMdPlQsxoFiGSBDlTJUZEyA2pDYEkBVJpLlw4JEaL7SmLJikIWdCuZEJKsjZ1vSaFwEqfcnvwbWq3Jlxsn1yfQjOthqSJIwpFBM4iXL50yFBXGG2QSlPXax4/PmZ3f4LRhrNFIi084rJgtXCs5gs2vkEfC3wXyEYWGmNKWmNwOw4GcHz1MQDH+yfUpy2nccXqZiTqM5rFGW2d7ZN9CGhl0BSUlOzIESeDS0wGhkFZYIzGylzkrGuHtZYYLMoIXJvoOtvrwzyr0Q5H7h77OxPUbuR4dY+jm6cMTInUCq8EGMl8U+NVpLnckGLkvTc/ZKeaILB4Z2nrJc4H2tYhrMSHzJiIHnyKqOgRWmc6E4ZqUKC0IYSWpvH4IDKqmCIqCgbFAKFF7uCR2DbifECErKeURlLNdogu0+IXyw0PHx2zMxlR2RkDdhhdizxa3WbDhgGGQTTEFShpCNETJQSTaZZFIXttjse7hFQF0fVBvScl3/3XXiMQWT+3YvX8Cj/0fPkv/CbT21PaaQtvZ2vw9DK4L0bUVySDv6Vp//2PUQtfSXQvOOS7Aiag3hCoNyXuT0bsTyTSAehfEZS/oJAvGbo/Zbn/U/fRf0QzeDhg9fyKww8usfsgsnh1wTs/9x6jb4+QSOyepbuU0aPF0wvM+4p42XPv37kHJPxOLprbay1v/ZdvMXl7Qn2tJowCbtfx1s+9xdO/+PTHK43vqDtCjKybzflZnp3y4nc8R60V5f2K+vkNNkDneRJ2Dhz+vUP0RnP0Zx5t5wj5Nb3Qcv8v38vX+oMSf8lx/KeP6J7tYyO2ryUIRm+PqG/WuajdN/jnHMO3hqhOsvjinKP/+CGErBHVfc7m1mpe9G8rMyrz35k+gy0jfAIfAmVR5npIttkYpyyR2hCFoHO+b65kNhHTJduMMtes6JY6F5gKRJUR0NTrcjACM6xQusyIVJ8/F/qBkSCgpMZm+D+jpyo70m7NT6TIFuLW+H57FSSZ2IwbtMnmCkJsDYQcYlMRakdTW5QzxOCzI6KLKBPpui6zSPanmEnF8fwMUuJ0ueTh42NSVHgHq9UCF/NwUuJxaUlQLaKCQGCxCqxXK5SR+GCxfp31pwpMqVGFRA8CYFgta+blGlt51hswRWKzcbRNJHjwLg/wYnSI/ho7ZzGqpNAlCkG72UCC1XNr1scNtvO4aNGVZm9/SlkZrLWk4PNgr7+PQugDhYUmJpFpcAOBm3ds2pba1xRbC+AkENLSFB1xr+Wd5kMmg120MPgUEFGgYokIWfsvRIX5/6h782Dbsvu+67OmvfcZ7vjmHqSWLGssS3LLErJsbMsDwnbAVEEY4piicIgrKWNIFUW5KAhFYghUTELAFQSJKwEqVBzPDmDHTjTYwbIla5asoaVutbr7vX73vnenM+xhTfzxW/uc+163nKSKP5Rd9brfu/fcfc/Ze+3f+g3fwStmk8jRoy9yPx3hOkOnPcOkZ/3kmvWT6826v//EEaubi5Kmj/ZFpQlQhOFQEBgY8JChV9vmJhjhPNOjmkisAhd/7DasI1/xoXArtSiYKrGCgMQwDAJVPM7k25rFcolWFfjI7u6EutaYHFkteqJ3KKfAQtKZ0gtADLbFv0vlLLyqcToMqCKsBZqk/jkprmLwxDCUhogUROOjiRK1PZkSFx8L+Y4omSGqalYZ/MjRyglFtSGQC3c24fs1aZDuXs6KSOLKueH0egdWMN9DCIQMORmUFhlYcWS36JxKgSDdJumkqC05r3SNUgzEmIrSmpx32jUcLuZ0ocO6Gq2cfDabxZAvK9ahx/uOCkvXDtyYXmV3PqG2NX0caHOgspnduUFfrXgB+IZXPMIkzEBr5vMd+qOWRd4nO482kaPn7nKwozh4ZcPhjSlX5g0qNyzuZ9ZDpguK1HV4nxl8hVYN1jr6/gwVjYxFVaKqIrnRaFVRZSmUuv6cnK9jbWSmE8bJg+KITJ2m1g1aOXaswjUHtGHNqjtnZgzaKhqj8aFncX4PnSdMdxqapkIpxWrZE4ZIPZVgHpKiYoKtVrShZx0VsYqkHlbna7RZoZQnDQ5yRFdN6UJ4VFyCOqBuxLtsCNAuL7h29SbzymJy5JnVs1BpZmZKRtEFz/n9JThIlchz196iwkBLxhuBE1ifiSmJTC6JmHpWHny2gKauLfu7M25efQ1nRxc8/urEa97gUNXAZH5ATuI9QgLrJli7I0l2TqASicSLZ+cCEdUGa5csLjrINYoaqPHrQNMYUrSkXhKoL/3Gs7zi0VfQ1BX7Vj53zGCz8K7IcHfRcmXfSkFSOjX7OxGtGmncpEi37HjFtZprjzyGriesesOdIwVGjHwViqGWrKJHcXR8Rve5L9Mt9nju2SPSKlJVNc2u+GKE5MumCqBQyWCdxpopu7uZo9svUM08Q9qnnjQ09S7f8S/9K3RdRzuIoIA04xPWyHS4LclOmxDOB6BD5Nnn7oB2nNy+y9nxCW0/cOU117DUIowTPZGEKVM1AbgpArkE7a1QzZi4PHjI1GRTvKUkqAseLMhyyjKNH+W40Shdi02IEhnnRMZqaRiZ0kcK5R6oFKl0Je/HlIQqQcxDSdJSeS+KndrgtOXZH3yaZ3/w6f8fo/TLHy/wIvBQ2qq2aIJ//Pc+AAr+yo/+NR6+euNxeWt94CRszzN+5Z8MyLj80+qhM3ztw6ws/uIux/fuFdn0zNAuqOpE23tS1oQoSoomQd95EgHtLJnIcnlO0zRMphOaekrv76BjB2GQxGq6z+OPP4HWBd5YoKjnyzNSAqUd1kDbLaGqMZM51TqxPDvDty2T2Ywm14SPKd7zjvfwPj7E9Sf2efM3vQrDPZZrzaqPKOWYNXNyMnStF5h5TLRtK5M+83m5rjnT9z3NTl3gZtvjB3/sB/nMn/gMn3/951DzCUe/fszBH59SfdSRXv0yF+8cJu+sWX+0p/+PisF3D/W7K4ZfHDBa416pOP3Adhp08wM3efuf+xZ+5Yu/ygf/9vs3N1f3mm97xztp0pRn/uxX+PRPfxoU/D/v/b9527/3JI/+/GO8+1uuA5n3fez9ImhRfu67vu27+NAvfYh733Fvs6jUAyvroVWmJIEcjxgCXdc+8JKD3z3km/7MW3j/U/+Q+/dvw91BrBGW0twkZg5+/oC9X9wnXa6uxv6rgtf+26+l+fSE5VtXfO5Dn918X2VNfVTxju/4Vn73E79Dipnjt97jE//LR3nb42/HZMOJOdk0gw4OD6kmNVoZmTA5w1Dky4WflaSZWBpMMcrE3FVOiPkhbtA22pW4rxTaCpxUKbkGfb8W2kUGnxJRaZrpjMnUcbu5S5fEi3RILbausE1DDJ6+bRm6QCodiJmfcr29ilJLlLJo5QQ6vV4TXWLwa8gZZxtmM8fy2imBgNGapml4/d4t5jtzMWLXMoFo3AwfOu7cvcP5M0fYL84gTYmDIQVpRC+XC+bXrhIONRf7S85P7pLJ3L19Tvji8wzDWvz7mGBNROsMVjPUnnzYsF4uWC5Pid0Ks6dIwZNp0U0vhuJa0FVZK3yK9EOFzwJJ3D1ocJM1O7Mb3HmxZ7m6YLk+ZrFcslicUNcTmmpCU9WktOTgkSs8+qpXsbM357Gw5Lf0r3PxVxcPrMHIwO380knqH3Wcs13/ay74Ahdf87Wn/0xnhs/z1PYf+cFn6ts/+B7e+sl3oG2SfdQ6rEk4DRNzSLKgrRTQBo0JWcSSjHhApghOZ4Yc+a03/1989DUf4sd/4b/G/P4pR199nsVyIDBj52AHY6+wP9XksOKZp7/C6f0T1t7j48AH0gdwVPRD4pve9CZu3rpGNTXsH8757Mef5uj+irUfME3ERYG0GyuKkG3nsU0QGgAJU8mEXMVCPdKZPj4YJx4+vm6KK61GiU02UKeHE5msRodoNv81m0azuDLoPHalM9okjBUYTE4JlTIGsFj5GmByRmcwuiLoNZmAVY10NmKUBmOJvUabh4oq+T3y6yXBbqpasOk+CRZUa0ytib6n7ResujO0NpyvDdppTCWdXjvZw5iauoqomEiDbEinp0vOzpegRrNTw2Q6QTMRGB5w73TN/pmhsZbz1YKTszPWfkFSLdq1zK5F3vRNj/P4EzfZ3Z3je4E5OvuI7A0pEYPHWAPlWkFmNj3k3vEJKWTZHCtD17W0bSvGfhm6bp+UIaa4KSJTQLDXyEK01nB2/4zl+j6g0XqO91A5Qz1xoCJDaIg50DQNsTXEThGzZTq9wrpfknJEG4HS+GFOSjvShHcQQkaxix+m+CESfaaqapaLFcEHUtIMrSOlTDOpBLqgDadOUTeWqBRD1KjKUdczQoZh6Om6lqR7rh9eLRNJgzOVdJiQicjQD6zXS7RJVFUuHkUdTvXcPLxCjJ6YetJ6yZe/8CzNRJHSo6TwKOenHW13n6aZCFwJReoHQs7UTY2xIuPdDxFjI9pA5Sqa+iYXi7MC/zA4a/G+wnuPsQnbDGRgHeDuIuIqmQLuHOxwerTg2TtLBh+xlcPUmbtPHbG/v8PBlV0OruzjYyQpQwgZHzJxf5fp4Q3uXETW9wd6vyKT0GXzTjEzTCTQnB3f4+Dsc4QXPbffN8GkClWD9z390YqmdswaS+UKXCVnQgziYVU6iDeuXacLHuXP8L3BJ8OprdA2k5KoKTk7oaotg+/ISXGkpXP29J1THjGW0LXcPbrD87ePed31Q8zxVzAvfIVwtuL523u4R28wOTykms/pug6bRsPgUmSWwlYpvZmCC8Rq27JWWeA4MYp5btIyzRLoRVFbihKDtDWMaoSKh6Zgl+PIeG617fKNstY5g7WuPGeXY5DZnkNpYgh8/3/+x9GVwmhN267p+44UAtZaZvNZ+RnhiSptNhAebeTvKRcl0yywSUXxwSlf2/iBOcNkNqWpKqq6wlWWKsEj33mT937Lz/DcF77Md/0Xb+d/++1f5T/8sR/m1p0bGGekeRYUfek455BQGIxJ5LrC7s6Y3tqnH3pWx2fkwaOLn2BMER8HMQLOSlQmjYKYySGShp6sBvYnVzHTPdYx8ZlPfxi9XuCsw1QOKosOGRsyyWaSEdgcCnIjyojKKHwzAyueMFkL91VpTY5WTK8VGOdISu61qHd2OF2ze22HFGf0/cB63dH1F9y9c5vJzhzX1CjnMM5xY2cmz5BPLIae+XSXnQLJYej50kc/zhAT1XyCsY72YskjN9/Ea15YcbA84PrJa2AYePe/+QaiTQwhEoeI00p4BVkgSJNmRiTywauf5C730Uoxm034pd//+6xvbTvPAGHdk3xgeNJz+yP3wMHZ31lLJ/chrtPmMPoltYtTCa8gZOgeUteyznElXeeH3/rv8mu/9stcvOqCK5+8wvf8W9/D2q82MuKXjwu1ZIcLdvIOl/0n9z62x7t+4J2wknX5ir/1Cl713if44O//tkg8j2v3ofNpo5nuzLa5hs4omx94TQie8wsppM7vvMDwtEzJczG3zGhOWXBbHZMBX2TE9Psc7sdn9J85k7Uak3S3xkNBzpEUEhSe1+f/4mdRSZGmiY995Q8AePxvvJInf+jtfOxXPsKn9Vew2pWJgiZ3kWEYiqlz4ewqoRRoJXDf6aTh+Oguj3TX2UlzjLbUpuL07gndsmNnPmdvPmPR3yOnzBH3OWFJVplI5I45YthP3HrkEc78EqUrGm2pyeS0I80h7eTz7CViiigt3lRG1ejccHR0j4PbFheRZz0MRCw+aOpXX2X3G3bR1vPq+nGcsZjy+ZRSxRZDFl0k463BqMQbbz7Bm94c+JW/90vMnkmcn0fW6zPq44advT1u/sBrSU2m7Vom9iqgmBzsceWVj2Nd4XUZK1P/0UBRKS4+9/usVj1Oe9xkgs8dQ854rUjKSA8gQcwCC3RKg14QTWJycJWDR2+RzpfsHu7yzPNfpVsPWDSHV6/R9YHkA74PMFdU1T6T+QHKTum8I8ZdfuK9f1GmdEaoJDnDwbUdPvF7v8PnPvVRnn/xq9S2IvniMZgDOUZRwhs90FJmCJl//8/8BNduPIqralIoRt1skRXbtThm1gWiWboCIlUuU2qVVBEbGfnJueyHZSKX5T7ZwaJmwhV3iF1FTpngM+dxTSr+aZuHABDFQrWhxaAVpEy76vHnPS/+3c9Qu12u3nyCm04zrNac3ztieXyXpxZnrNYXrNYrYh4gJ3QCYsYnGHzm7t0dos7sXLuGXy346tOf4WyxwqeMUiU+JFGqzA0cX1+gUJhoRQW35P1Zqc0+6B4mfT50fN0UV6WJDQXqt8ERbzF/XI7cKlOUmR4ut+TnZMx7aWwn01BZGIx+F4WcXjwiVKWE6yP6EhuoD8jPDcNQfK62/VMhdcZizLaVaRcYm9p8sE1CkkX2M+WIzpoYA3EIWN3I73cVNmU6HzAFKpnSqEaoEYnMiHWKqzdFUe6RVyb2UxQMfh6YHVqSmoOeYKrE7kHNK155i8pZ+hZCdJhk6H0SGJ2tMFWNAEyTbDCAz+AmE+mkG40xEKvM7IpsRilnQkgMoSswMiG1O+0IIRQfGOGf7d1qsK4S4FsRaEhZNlilVAlUUUREhkQO4CqHtYblWnwztBEjW2cachbFoBR6UJrK1ZIUjglYMWNNRYVt6EMh+paENWWsvYYfPKY8JF3b49fiFB+CJYSGEHapqkqugbZYY2nbDt+X+5gMPsxKUp4I3uAHy9BldnfngjmPiRgVMe9zfnGfo/v3GD6/5PDahLe+7U3iYxUjXQigFU3TsBg6UkxYZ4kuohtNIBFyR+sHouvR2skTbDK5iuiyrmJVzB/DktX5PbSuQGlOjeFisaCNSYqnpLDeEFLk7vE9LpYt7eBYtR6rBdaRsshtkwP9usN7EXzJJFRKhZOUiUWK3Xcrhq7CuUjuDClpQvQoBZNpxfVHrsKQ6HxkiKKW5lzEGgsI2d5VUsTogsNPQQkvIPWEpPAB2nbNyaIjpoRCsz4Qf4v16YKTbPBDx3KxZGc25fT5Z4gXd1Fmzfwgk7hgfSewPL9A7++xc+M60XtGb49RaUspEb2h8M42G9IIYci5FISyxrIvnnbFniHGQN+umUwayE6gmCmPldMD8U+G8uOkpnwhq02s2cSPEIpAjyhg+mFgZ3dvE/OyEtU2faaEK6kSKiiq6CCLf18eMqOHVVZpGzijTN2UVlilyrTObN/rNkiXjiiYDvQ64XPPoAbQikobMmesH/NMbMNjs5sA1MyZ2kOMc/TrJW4ZcKbeCHYQE6gKHwP5oiXECeuTMzhdbSZ5KmVizrgYsalAYRjIVuR0c8rolKh0Rdwz9KdLVu2S2amnHoocsDEkp8W4MqaN/QVaYkqIW/PoGkXCY0xfplkyYRSYuJWv9R7jTIGCy8XRg/AkMobkHHlvDkpRaYcZEip6lEkkeqIT3kkF7GTIPonVQvboFGh0xXJ5RhgG6qpiWjX84ac+TTWAXiVOXzhBKfjsbzyNqy3KGLKCMAy87ru/EdeIjLqPkSGPYK9yS5XC73he+zOvIZvMF3/iS5tbnTNgIe/KXnfwP+9ivqjprw5c/CUpxqr/0eE+bFBBoO+T/6Ri+NOe8P3yW0ZeiAbMsWb/T804/6sr8g4cPXmX3/pff5OcM+vrcj4dNZPVhFavN3vs+Ia+8+98N+GLA23oiCR2mW/34aAwpyJm9bo//43YhcEsVdmPwyZhTPnB4ooMuUiTy50r+/alY/m2JV/+378kz4ZSLOyKlVvjalEsVSkz4GnKjw1l29cRmrWiB577779K/bMT+Eea6k/OGf7mcvscKcXOzi5Kaa7/Hzc4+MABzhi6riOnRPtkz9M/KfdlUgVMBIXI5Hcx0MwMu/tTqqqmqquNaEVVOSZNzWTScHP5KPGkI9/zpLuy1xor0uNV7RjCwBB8aSIJnSQVEQxbWwKR49N7ArEuqKGSnpTYpTfT6pQjSqfC3zJo7RiGlue5J0J2nePZ+8+hlOwN5rbDLRtSFCNqM+Z/CKRR2wqtbWkEKdxULDGayuGMppnvMDy6JlqPN5bB1jQ3Zjx3fJu9wwOMsZyfy9Rn3Q6cnq0kdpXcQ5XzaiXNopgrElbMcrsB368Quzax5xjjQ86jsbrEgmEIrLuBdScy6Ytly8XFgvV6jfeBYfBYZ7HF5qTt1sznu1hbEWMWS6GUqUKzgdLlDN5HBpVR5wa7rDDnGu0U+CIJnnKhcwgjSCHw0BxAnSmCSSQjyoiSLhee27ivlPs3erbKMzMq45ZBxxgUEKqAFs8HhH8k913rUTZf0LUJurQAACAASURBVDZjyp7UpT1DI/6abMWcyrc2W4tMVsteOk5PUubiXJoU9XRG1UzYv3WLa485gh/wvscHT4pexI6C+KwNQ08/RJybYawiLc+5152wf2XK7pWZ7IExEWMQRUWg055juyQNImg1qlLKJRhznn8yhuLrprjSunRRgS3HQY68+c/LHWMHWMmNvlyAPVydZ0kWtczjN11YlHRqndIonTeY9JEcKpW6VOtj4rTZRLVU6Hrseme2gQFNShFjCkl0FMVQQnDedgEybbsSCJ2xuHpC13WIs29irAyVUsXhHJSH2VADcOPWjGtpB4IiZiFYY3LpvkLdOJpJUzqriKR3LgIiWd6tMUbI9SnKw6k1/TAQi1lmUtIxTkqhKoHpxRBJBnQ2aOuwxm7+GCfJsvT9Fa6yuKoq3YlICJ7BB0A6VOgakICvnRDchVipaKwQ+5WRddDUtUwKovCvtJaiWJWNyhpD33cotXXhDnE7YRzHbpO6xvc9TVVLh3/d0i4zIYdyLUqHRjsUUlwZa1mtFHgKrEBauUY7QhiKDC2EPlM3VYFmJLxPGFdzcmJJ9NgqYSYKVRkSwgRMSsvm5FR50BNJy+6ljKzvjWS3cRhVSwKnJcgpoyBcmtKX4jMh3kM5i7x2Ulb4PRkIcq9Xqw4fEtOdFcv1gNFq83worYlhIPp+M7lJWaS2SyjGa+HvqSsOggWdaIzlzAdSClhrsLVlMmm4ffc2pxdreh9FYcpmjHbyHOtM5XTpXo4cAofRipB7clHPtFa8O0Z/qlRMjHMMrNoLhqHFDy2zumFxehc1XKBNxFiF04F+tWYYAkPwTHbmYKwUSBQuGBmdU4kpPIAdH/cNNluZJPU5RDRGkgsELkgKG0NaVWCFuRRmD+SOl+Kd/Fv+s4mBeZR/TwSfxdfER5KXzV+ZEcsu7zhTlPxQm2m7JAIRP8iaVWOXs6AGUuGIai2StNJA1JuYqhBItqZ48ZX4GkOS5KBs1L2C4ZlEt2jRk4r62hUU0J6fszrdxUxq+naNXfZEbSQB7weUkSlazgm6RD5Z4M9XuH6QtahUiUNZJo1Rns+kM+7anvBlQiANPf1JS5qKD8/q4j4mDsJryqBTUVura/HKyaAiMBreoouHjhZ4Z1FXIwok3LkiUpMzqiQ3OsQH+oA59XIfpGZDmyLiMgyEopY2CgPEosaolUJZSFmhfSCWe9v0AzNELKR2FfPJhJOje8yrCWHVc/bifZTWLM/WuMbJRNpqur5ncmVGPXWknOj9QHWrInjhtZjacPDmPZRR7P/BPtltCw8/+JJgbY/mIxX2wxoe3y5c9wmD+w07fmzsBzXh+zQjjm27giVRzvO8vU4e9LmYqar00r16202X4+anHuW5s2e5qBY458gEopbfE01gMb/AOcf08w0pJ9aPS0K93FninSeRafPDUJ7MZQKVnijM4YNcinAjcP790rxZmBV6ZjA7jmraEIKXOPiYJ786cPjBqyz0KRGRJz/cOaT+hw0n7zpGfdQw+dgc5vnS786keeTOv3GbOImkJmGWhoPfOaRtWy5Ysn7LOWffdgoRprOZwNLK9TaVx1pLVTfUVYOrKpGxzxTFVAfOsWunrPUFy+GUxfE5OSkO0iHe91ysAwu1pr7RgNPkNyXSnwjkHKGC+O2Bdm/FWq1lUoIUVrpU4GMMkdtWph7FTFsVQY2UIqtVJ83PwuuSz5A3jdkUto112DactBW1S1OKINOM5tiy1zubGYYl2shUWSlLv59Q1RF7+/tUruLe6T2ySgQfpdEKqDHP07Y861mKFjRGidhGyeyl4NalYVX8TTVG9mFrqLVj8JHFsuX8fMl6PZBCL6IZOeODpw8BEGU7rRXeD1g7xfsgaCAUKYMxRUsAacYNg0fXIt6QQto24sqAAEQpMUXJDcaC0ShN1/W06w6lpbgSFcltYTQCplUhRW2eV3UpnGXZAy7NFATifjnekVHjc1Ry4zHXeyAPV+PrHypO1KVnouQ54xAkkxmip+sHkoIhRCofYW5F/bg2OFdhUiZHTwpBONQpoVTCx0AIo0VAz0Bm//o1jLaAIcVMiL6ISyVWaQ3L25emctv1vV2Xwhf/o46vm+LKGIu1ZnvzKPeuENNzHh3EN5VNGSFewhqUgJySJD3amU0XSl4rf1FayUifUgSVIC4PUySEgB3xxzEViXYYPSJSURCRKh5J7JEuglJ6s6BkqhUwFpyzuOwwg5WFyehhATkl1qsLtMk08z1cU2O6jrz2iI8Xl7DcmhQhLS3u+QaV4XD/Ko9ySOUkKRhCS9bbLpL3gaHv0MpiraOqLDGM6m5evHiUlWDgZeJkXIVPBbKTMyRdFFQybStJadf3MvqNDkUoMD1JesffoVFMJhNSzPSrHqUleGQyQ+/xvpfCxdb4IA//kAZ8kgKnqqWAzEUJyBhNa4sMraJAGRUhdBvoVlVZBt8z+poorUimkfF0TGilsc6wCgFra+x0Rl1VZFuhZwF0JBNkChQzzs0QbyWDc4a94KitIwSBxBmrqaqK4KUQVgqiH8hZYY0lxUTfd1RVjfe7MsbPEbKhG3pSEO+Iyor0f2g9lXLC4UGKWh1HnxaNrhw5OnKWc+fgUaq4jHtIXta2zk6CrmhX41TG4ovMeJGMJmOTTARyGohhibjQs5H5jT4TfL9pRGiALFPBFBMxJXotJO3qtYfYPEXdu6DBEmMvG21Jxn3n+cLnPse9e2cMIQgp2GoUdQm8CUKgqae4psLWE1w9Y1I7tEvUdUMzaZhMGubzCcvFeqNGBGBqhV+s8P0KYg99T7c+QdFhbcbETDVT4qvUBsLFgsXRfXYfu0n0JShrJXo6KW27dowdteITs5lkqdKFETiMRRN8KIW38C9DEENcMZB1oEThantSyobz4KGULq8r8S5GUA7fixeMMRpXW8KwLiql0rzRWqCN6eGTFnjG2CTKSW2+PU7ZY06iTBVL/Cu9iLHoSjlhiiKr1oZkrOzVpoh5ACEMdBenBD+gdx39/j4ZxfmXn6H66hoznYEC33d0qXTZs8LMdjC1oqodxif80RnkgNICp4oJkjMIs1/MZFMGPWvYff1ryASGdkV7dsq926fsxUho14Tzc7SPDENikgLGBpJqWNVTtBbrDKJMy7UxUBtU5dDOlqJK4JIpeHzXMZ1PCEliEFGSnaGXYioVZcjkZbIrtiBFBjhHKSDH5MdWVFVDDkEKPyDXmWgddTXFdj2rk2Ma33Jr54DqYBc3m6CVJnWRPES6sGK9WKOzFu5XLTycymp8Snz4F+5jnBaDege3vvMa/u3yrLody6t+6FFMmRRfhryu25bBe/BgLjTxcBSWenCVamOEON94zKnZ7rPj97VFkWCaia9LLP6HfvO9Kx+/wrv+7LuY7cz5ud/8Oc53zog2sjpYEY9EBOdyyXXmFpzNFyyurJju1Az5gmhlHwtV4P6jJ8znc0ZfudUNKa5OHr2gn4lq2QknvPTIuGNHuhnJ1yG95WVekkDdU7yY73Fj9wbXr1+jmTR0Oy1guPcv36X91485/KEbvHDyHHE3UtUN13Yf5eZPaZZ/64L5fM7+k/scvfd5AHwa8Hkg7kc+/d99EhQc/Wt36Sc9zfsm+BS4405Z6pWoNh4rbLWHyVUZ40JVbGdkiqXo2ri1fMADPSlF9iY7GGVpK7gzOcEPPZM8IS4jq3XPc8193vnkt1LFKcO7BniXGEAzgeGH/UZhckwyx5XyUEn8Msc2P3O4r/Ea6fLph842Rr5E/zKvf/h48GePOdk03ccJTb2eYrMtzRHFMOZAOUojMguCyWlNUzeEMBPODtIgDT4Ski/GvQatK4xxmMqglaghLhZL6vqM4KFd9pgC7YulcR3CsOHV5pTphyBwNpTElJwvKVPL/hJjRFtYrVuGwW8afeOzOJrTp8K7Qyuhx2RF17Y0kwFrlcBPtaB/BPJeJPu1wP/G4QNIPq6NNN1iZNO4fukdGi9xKZfyONfcDi+2PmXFmiiOMUJyOD0KULGFGqaciJsBQGLVrYkkdFLEdmC5POPivqOezIT/xjaXHheONZqDwynZZpLOkCLGZqbNHlXl8AP4oUC7ETE3ciD4M/JiO2BB8ZK4909zfN0UV7lMCi53dDcXrNyczcP30ETqssneNrJLd0RrU7r9AArrqtL5KbhotgpBqnhC5BzxfiDnLcfCWktKiRDCJoEBRdd1jA7XIEyGcRolPydJbwihcDMSISQqbVEWYhYZUY2ivbhAKUuz69jZ2+N8JR0m+djSLzLGEkNE5SSCGMDQQb+yZGPxIWDtbhnHijynIVFZkdkMfWS5Fgytq2r52pBQVpGiw+Kw2uGyIyxX7Fa7CDZE8LSubgqOP6Eaxc7uLmu/JsRR2lpRWUfbttQTScKiH4AKRcQqh9Xiz6Wrnsp4tMk4Z/AxYYwj+F4gcUZeV7ualOSehBBwxqGdLhAmmbxoJQqGPgQG7znYeUQe5GJ2SuHN+CgSm5YKax2r1Yp7F5GcBpQy2EmAVJNiQwxI0LWaFGUCoTPU9YzzvCYmDVm67SEM4tVCIqWOIUh3NQThyVVVQ9dHcpb1qChTyDzI6L+o0tRNTdf10qUjE4Kn6zp2pjuifOc9MYPWMISWmGRiM61v0raerl9wL92Hb4NhiFgduHt8yr37F1T1lJQyfbcgxEHgpbXGt566dkymU46P7xGSPGsywTC4ypJUL1POJP5OPgwkVIFbAnV5/Izi3tP3aD/8BaZ6wuHbvoWuFU+3YfC88MLzxLjiylWHq6Yo5ZjOpzjriH6g79a060zGk9VA6s9ZLyPrZFFOkbUV3LtxvPKJb0CbSlScBtkoc5+IfsBYxdzNOHnq88z6SMhiHYDN2LVGxYRLoEKmPTrG3LpCViLcYtBF7nYsri6T1LfZY2nbAKrwWlR55qXwjVEaNUrJ5HaEteb4UENoPBSbhEC6YyLZO25qPkZUVjgjSnVh6AghUFcVxlkMWZ7TOJ5u3AzVpjAWHoRMQVOKl5pYJd7GUriMUslKoMgpJaKo/CCud/K8x6wFV28k4bCmwrmKoORN9F3P0Z1jIBPDBZwlzIVF2Yp70XOcInY6YW9vzjCccna3Ze2XJAacseUZk+LPWcusmTBVNV5HrNbMbc1+fYWnP/IpUTJD+Bzn/TnXm+s4L0lCvFiz0+zRpYE1kXXqGGINKWFREvdrh/cD9AnlvXwmZeiDQLBijAQ/cHZxLpBmazf8EGMMaezW1wZVGYFUCnYYUqRCvG3GGx2VYh17IGON8ON8zviLJX1aCvvPKaKtIWbao1PaLFOqyWRG1LrwtioqV4mKWRRosdeJpJLAvasG3dQ0zZy7H1zhfyDCLcS+5ShDgqpy5EtcoxgDOSUmn6p5/E9f56kPPUdha5QmpxzGGPIfy6x+qmP3dbOSGIbt97VFKY//kYj/kQdJVzFGVqtVKfplnz158wm/+Ae/yHuufi+qK+JU5dn4Bz/1K3z7Y9/FN//eW/BqILQrXqiPGPDUzYwnXvdWAKqmlmn8tSWf5Qs88do3sN7ruL14nqd2n33gPaSY6M8G3vaWd/KZX/44d7/vLne/7y4vOZaK6RsPmTUzqBRHx0cs2yVnXzqGqbxEv6B5+ktfYPKuGflnMsurF/zhZz4JQNe2DHd7Tp463pzyOfUCX023id32ermfbOj/tudT+eOYyolIVgrwhwr9zZZP8XtYbQuCRmErK/vOhkJRhHUKUsZai3WWO2en1I2laRyvf/Pry1Re42zFbnWFR5tXkU3gLf/Vd4hfndZgFe/72Z/jyZ99N4984tUoaxjNxUaBMaPZeGWWfJ6sxsmK3sSglBNGuS3FYRTcKIbWstfYB3O58biUAyoFGl+S8LEQcKQ48mWRqZbRNPMKazQxJpaLFc45UuRS/jaqLSbIEZUyyjVc23+EfXOVQELnmuOjFzm7f8bF/WNW945kepQyMQ6icroMoCxW7aCA6XRC1wWG1YrFxTkpR/YOd3nh9m2U0uKBGHXZJ0pDVusie14+8ubzjVSVgO87vO833N/LU6pRqMQ5ue8KhfeZ9XrNZNaR8siBQlQeR47ZJQ7gyHkC8MiU7fL9GNfcA3/0w9zh8YZxaQjyYHEFZpRF2HCTha4jDZzxNbV1OO1QGaYp0EYl0EadMEqTvGFIK7HCUAXNET0pSmPAVTXTfsq68yhdpp/KYrQiD5mz4/ucnJ6xjIMgFAKQewa1JFWheDZS4OZjvaA2a+9h+PDDx9dRccXLzNnURiEQHiyiXgL5u/R1pWRka5R00tJYQZdOQMq5BACguC0/tnyMdnpMWy9KAQOqGGNyqciTBVYgM1rjbL2B9wElmRI4jTaITG/seUK9mlsHr+DwieusVitOT85Yr9ek7HGVJXtJXIZujTaG/cN9WuekW5uTkAA3Y8pIwpOyBKamqbG1IqtYMNeaIWRSEv8GV1eElMg5ErMnZsFjCyzIosSlU/wCjCVGIUKbZo+2GKBSCs+wWtDUVSlcAheL23gGuWZR5DDrumG1XG8SD4FGyt/Bk3Nb3lsgxJ4Qe1LyGCdu6kpIaVK4ZLXB/1JkqsdCd+xyxEJYHsUHlNK0s57FciG49Zy5ev0Wp2cnUmipDAZspem7NRToXc6ZaloTy3TLGct8UnN/ucJiIGpib2hswqszQhwAzWx6wMm9JVVtAZkITac1KV+Ar9BZYe3AdD7BxzVZBZSWiVdKCmuqAovT9F1gNjvcdKcmyjCbs0ngjLbU2mGrgHaGqqqomwZr5/StdG3mj3ya358+y5/763+KD/zN3+b0g7c5P3qaxWIJusFseoICyckMrJXiTGlSqVhzEk+k+d4er33DG1n3IpIRQyQMwk+KOVFXAgcNVoo8kzPVdId84zHq2tF3LSF4alPhnMGljB8i2laYakYz36HZ3SOEgSH1DLam2neC2Y8JFSIEj9uxTKf7pKwZfGS5bllcLISMLE+1xAg0OrLh6FxcrNGpgmBRfQA1sEILPrzwo1IOpGGNM2WjTQhMbySsbuJOKVZKcjnC6GT6XUY8MaKNhFVtwGlXxHMSMRRjaGVLSJPzylou739TYI2DePlkuaz3lILwDiqDqxrI266i/EzZoDaNKFnumpGIKzEwM74XvfkdJTWTKJkTQzeIXw6ILK2sDjasnZypyIAtkxxF9B1+qPEYaSYFj18LpEpP56hmSogZnz0a2KvmTPYO2bt1hcXFBddf+wS9F1NKrMYmWJ8t6X0vfxYt+/MGckINgdCuOfnyOUplPOKHCJD3dtCPP8Jq0XA6nDFd3KeNkRNgrRQhARcXmOgJZfQmVgTIxClLDMpavLWcq9DGYrVh8B6LFo5r8ETlS8OE7TXXworRSjGmmBsUBWNnNuNL99wpjVOiPkqMIg+sFVEbMDVqEF5AjAGVEr5tsdaUzV7uuJvUxTvIgnHUswnKDmS1Qplz7HTK/uHBpnl5//CE/+Y//iu0TcdH/vLHH9hH/9/f+BBhJxCbwDO/dpvs4Oh/OoOBUXsBgIufWuB+y7L7vXMAlr/cEt5SiqgKzn93TbqeX3bEcff77vL+330/7/727yHlxGv+8jey/7ED/uD//DAf/PQ/hsxGap0M7/zPvpvT31tw+/ij5EPF3Z//Eu0VgfldvOqU9/21v8/V9zzOxU8f49/WSbca+J2/8A8Y5gPm/ZaGGT1bJbaL153xm7/+a1z9/jfw+F/6ZnafepHn/oPPvfTNzjPrj57SqrMHn7UJm8+WbibOP3Yi0fWK8BdXnxKFtvxIJv14IvzotpAKf2OAlhKs5Gv+L/SE/5RSyAp8jb0MU0hflv1+wF9aa19LnKBEK0UpfPImuR1frcafH7s6m0nP9q9h4mmMY2KsJOIKtDKkYoOhCj+dUctkzLcQmFgmQ/EJNVpg1pkMRVm5LqqcOSfIw0aaffvZ5K8jL1JrTfYKY+2mxSNUALWxyVEqE1OPMzUjH9ZaTV07QXqUHKqxhkwmZkvKBl1yi6Oj57l9/Dz3T+4T1oGwXqKLCXLMkehHeJ3Y7iijiuiBlyZt9jibWQ8d3g+Fq6+4deMmF4tlGSLIpF+biqZpqCpHKIiCcdq2QTYpQ1VpoUloUCqRU9hMr1KZojs3IcSBlIXDrIwgaqqmkkmN9yUPHuXuy6M1TqUKR360HxkLibHBsb0v8sNSG6VN3m4uQcgfXI96s+7k2vtLzYCyQWWBbI/rOaaMUR1KCU+scnDnuTv0SeDVpjJoU+O7dUF3jOJM8rOT6ZSDK1eYz2t88oQoapO1i7TriKmNoEuSx+Jx2oAzpGSIyjKdNbSrddl300s/khIqzR91fN0UVy93T2CzD5Qn+dLXL1XDL/mZNBZP+oFgUlaqPPgFBjNyp2xyENXGvPVhoQylsihGse0qSLFVDIuzcCyMtWwIbyWHTSljlKVxDbPZjLbr2dk/oKodbbuibdcoJYZ4KUVyHNDyRsS3ZHQ2L1dEj0IRpUHQrQdWqwGtM9oE0Jqu78hZFGe6IUoCWQxqjTa0fSvCBKMJX4qQBcNMlmtY1TO6XjolowGyQuPbRPRBOjDBE3Iv8L2MfL1P5CGgK4sxiuQzzczRuEkRwYhEoJnsiyhFjjKiNok4ynQXV/ednV00W9jKaACNUuIerg1t2xX/Dplthpiw1rE72d+MlSeTCTNXy73UCm1GsRLhKiklwTmrKSH3kAeUilSVYcftM6l3qcwEQ83uziHaFDUka2maHYY+lf1Jgq4UgB6tHUqJoWPVOIEgIcIN1lnAoVSBCikpzidNU0qf4semDDkXcq9S2CzGm7nw1URGuaxVBS/u9uQMJ+ueRMX+4VUeefSCO7e/SvRRUBBJYGx938qaL6srlQBHjuzu7TGbNBL4lEZbQ9OIDLA1hlA2I2sNYSack6qyVJOKNJ2ipxNyyGglZrtnJyf4dSufzICyYG3FpJmyWAZC1PjoSFgmddlEVALjsJMKV89IMaK1p7YzdnfmLFcX9IPfENWVTgx9y/nZCe35GX61JikRObApYbMijgGdTFAZXya8qmQI4qenS0FRum9I7BihdqqEE0ocASlIUooYvS3GxuRF1kXB0ce07f6Pk7Dx/Y834pK8rSqdzRA8oB+AKCi9VfHbbF65TBjU2Hkbu2zbzy3vt3yyTZdxhDwqUVCVi7GJO4otNPXBgDvCCiWRSmrAIFBpaTYUw1lt8NqIYbPRqG7AGi2qodrQY3DTPWojFhXZZLS2zPYGuvaC1fl9jhdrJs0MhyfRktbCu0u6PH8pMaSAio4QehrXsDM/5F56jqg9LZCzwipkmo/AnrOKqCyEVpVl2pSTQFPGPSMn8fUzOpUGBZt7oY1cqxE4lwCXx7Wx7aqrooJFTkXpNhY1O+FdZlJpX0tjLGVVVBLl+uq0vW++4Gu0Utimph06dJaubdaW4HvmOzu4piIRWJwe056e8sa//mrcv6q4+/YT3vYLb+Pw9Yf87hs/xHNXnt/c0u4x8UWrn6+49XevslytGPxA/y5P/wPbAiFdzaRJRp3A+r/tCW+OsLtd/ukVDzdMt0ecRRavW/DJn/44/bWe+99+n9VrVqCgfeKlMsfPfd+XWX9zS98PMEFEMMbCxiXWN5cc/+QL9G9Zka5tp2TtVRHLyG/MdP/l4oE8IlWJ1SuW8Oe/wspMGL7xpXLXV4+u8+TH3oFzjt/+F/4Ri/kFe4sDvvOj34t9VmS5n775JT7xDR8hvfLBRCw/IZ//3R97D3urfSlOntEbPptSiq5p+fXHfxUUfPPp23ni7qsf7LA8L/8bofzjPqOUcBA3PCekORJHr9DyyOecN5Bdefrl7yGkbfxRAivTlxTQtBEz8ke6VzO7ultymTLNH/OqAsMSqkW556UppJUuk5jxs27PLfCwWLhXUlyVCLMpCDccVy6dT8vEyxgjOVcWRM9m4q8kV/NhYDbdYb1uSSmwM58w35nhfWDLpRlFToSDTghUdcVJ57BOoWxCOchWhHNUVthkBEKfU7nuscS5iFaZSVOxtzMjpEx3bmRKgihJ933PMAy4qsZoQx4idV0xmU5wdQXjtHwU3Sq5YwKmE4e1EuEZBwWqNMuyXFsJNXoT42MpOi6j10cuXM5FlGozhZJqOedI1roIGZVp2lg0jUtyXNvjShrXwkOS7BuEx6UcPedMzKH4zI5Lb4Q5bpsCWifMiPYABhxXH7nG6f0zoaN4sBpSbBl6T4oiVpYRXYEUPJVrWK86YhhkP8maEBTKOrSDkDra1TnrrsMaS0xKJP+nsLe7S7duRdALCmVI9lJT9sCvNeAZj6+b4mqkYb/cd17+63I8PEaWB04esM3IcgxAZVNSakNTH9OMzW/fnk0eXL1JSJAOfTFgvTy23VTl49e0KgtVzp1SKpARScZ7Lw7U09kMqxVD1xOU+E2INKsn9ILX1SOGlPLmxsmctoXrBSlqctCC3FMCLzGlvnNGk4LHCmO6kKJnRaxCiq2sxQVcTASRJEGBzhadPTorVEQmY1WNxuBsRa0n6Iki5oGmaYSDkTIiIa2o6wlGG7z3zKcz6rrZQCtTgulslxxlgRqjhHQZtjLvJMXu7h62kNxTEdfIWbhWWls0ppjusunkDMMgha+9FLhTLJLSxdBZG8jCidHFbFFgg45ET8oDMfeSLEbLpNmldhOsqdjb28WoGmsd1jm5JsZsoKdqhElkJYahpfgZO9eqdIek02LldQAqFS6NudQ8kAlsKmuJnMixx9gJIajihyVFsjYJZSKz5oCYEk899Tx3755iTM2tRx9jMq0I3ZowiFrj4AeWbYv3RSWFXIQVpEu3f3DI7u6+QGaNxTmHq4Q3ZIwhxKIIpQxUsvEPnWfotUA8tcEGWXcxBNq+Zb0Ug2NjNdoK5MEZU/IALTyvJGs4j0WjNTg3RWtL8AMkT+MMlSndtcSGw5SR7n7XrVkszpg5SzYZFSNEBdmisqiKpSzQLFUVYQEFjI24e2w65wAAIABJREFUUnBIbzRvaqgxZxk3ggem6YisvBlNhJWsAco0dfyTokCOx3Rni57fFknbeMZ2ep+3gSAzvr8xaI0b7jZ+XT627LHSXCqvzZsCsvybQkzOo6z8WHnLph5i3J65rGdNQltXiscsTQUSKsfSdJLiKlpL1JocY2kOZYzTOKNwGSwGVc1xVY0zGusUSVvUnmJ1pgndEpMzdeVwKIa+oy/QxqBFbU8r+axN7fCrFTpk1JBos6InUSR0sFmgjDGbUoxKTRNH/zWlQItYji2xgiLiYvRYGG0LKa2NNALG66mQ8jKzSYTG25fLPZcYkKWAKwnb2NXNiOS+6CsnUKOymUYXjk1QYjydjWKyP6M7vUD1QeK1gdB1xKZh/bqO5SMdZ4sLhuXAztEe03sTrDdc++o1bly7QTM0vNxhOs3kyw3Doif3Ef+N4SWvyVcz/l8MDD/qH1y8/xRHahLP/NgzAJx+68vxocqh4IXvfvaPPpmB9k9+bS+f9MpI/yMv4xWkYfXvHPO1XITqoeba8Q2qqsJEy8HFFV773Bt416e/U1ROjWJvcchFc8bmWbp85Mw7/vDbuHJxTYorI6IIIE2J1XTF5299lpzhrV98O2987pu2xdWGmwney/UdPfRQAofaFGrlfCNsH6Wk+ZKSIC8kVG7ykhDSZiKOGps+5ZchvJuUknDED0tBFrdQYpUpFjfIa4r3E1lyL1P2PFXOPW5hY7MopUtiB3mchz8IPdv8rpJjSVNqm2Ns8jpVRCfK6WJqqOupTI20QPXms0lRKRbJIlFuzajs5MLEQNXUTCfitVk1NUZncuwgioAOXgJyjGMjS1ALOWdyTDjrmM9npJQ40bKPCcon0bUdfT+gnSvCRxnnLNNpg3GV+I0VePWYT46KvXVTSSOfVBANpYi6lIOGFOUcpVBJKW24uMZochSIN1pvfSbH119qEkqDSTFefeH9F5Efxn2wwDvVVtxowyMu9/PSRGBcwiXPkc9IVqJ2OOZul16ugawMWUmTaYiOg+s7kgNeXDB4jzPQqwRlcCBIKI9IAyiMcRzduU3IA9oI8kphUKai7yKLizPWqwu6dY+2lhAzdVUzqabMpzOOlWYsw7dKvuUzlnzxjzq+boqrr/0+xw/zz3Y+pcDowrmJUZTALkFmNtAWpYpYgixYozVByaQnRgVFdUXkwweBcVzq3sriKF2joiRjjSFmMRCurIymR/W5GCPn5+f4wXPjxnVme3us+4HFxbrIKSeGoWd5ccFkNmdCUZrpRWrSGPEGMsYy29kBFPt717jWX8OhaGYVxmi8F9k4mZwY6rqmqhzaWrIyBB+kOLAOYyykTF1XRO9JKRBTYj0sWawv6LuOtutYrltm1QH7+1e4eniVg/1DptOpTGWqSqZ2WkPWWFttRtwpJUiKKFjL8jCKxHUIklAKjLLo5hVFuJwVfds90HMbiY7WOmJItG3P7P9j701irluz+67f0+29T/M2X3fburfKVXZRZbvixATFsS0UESsgy0KKLCGBxCBKEANGTJCYMIB5aCYIRQyIECIZQLAQSMAAK43TOCnjQOxqbjW3bvP13/c255y999MsBut59jnvrXIhJAZliXN17/2+9z3NPns/ez1r/dd//f+btXaKKpqEkaroUxMgAWNVUUYq8rt0wLALrdNaC3IghA6MoxTDHBPWmboeVPLTWCHOjjw7UmrXesLQOOWF9XpAciCNE1gIPihXu3Z9rLHqgZUT3gYdwtYog6Ec0UeAYmiythgh5pEurEl1kzDOEtyWOI5g9xy4pqwKH3z72/zeP/5HnPVnfO799/jZr/1JKDMyTeR5Zpxnnl/t2E8TOc+oIqMeW5xmTQHFMM4zoQsIjmlW3xBF8tVjLGdh7LXA/c63PubigxXhyZ4HImzPLpAkSLHq43Yh5P2NUiHdimw8KRW8MayDUwlwHMFaMoliFBHcrhU5vb0ZOYw3BG/5/oef0vcb7eLVUxXjjBsC995+xMO37pPGiXkakXmipMRkgDqc3cCR877TeZeilgpGDCFnrcWq+IgW4FSacN1QMbX7C83FPZfMHCH0AecsJVV0UzQeaWpRjoVRXdc/CgTTDbYc86tWADe0+TQ81iTmNIzK8j765qVWhydNeaiEwPYcZbtorGyqWO2Rc+b25pZxjDVZMnjv6INjdXZO6LSrGXc34Kr/Vy7MOx1IL5s10llkVGXKkkRpKz4QCnSrDr+9r517WzjfrhhTpFv37F8L026kLwbnwc+WFGvC6eqZLNqRjd7wuZ/6Amm358VHn/Dq40/YWMO5tdxSiEYoJpPiXncD6xRhT0UlfK1Rjn7wdH1gs90y7fekuoZk2T1aedVIllLXiKr/pdQ6ATrjYLyryWq9BrY+NzhK7Yrq6jdQHILFGqHYTKnHabzR+BUTxlkSgukCq4f32M8RyeBwDOsNIoX9YeQf/uYf8I2//H3MXPctEYoTJAj/zX/03/7wwjt57H965A/+8+/82OekX82kX80/9jl/XB82O5689Zi/8a//dQCSi/wr//Bf5c//k18Hb5jjjBPHVz/4Ob7ywc/RusanqL3GGU3Uiqkm5RWYygjdzYp/62/9u+Ss9Dkd3FfxgVJaR93ivNLxs5wI0UhkAXhrXmKtgnOtAMtFxUHa3LFUMGEYPIsUNwZvHLOZKlinx+nqmm2gT8nKuFBlupouS41i5hSw0RhnRbu2ADhb98fWAb+boEpqa+hupwNOIxRkmWqhELDGU2QGOwGpHpPF0tV9RwHKEHpM8QRbGUJIpdiZ2moTnNHZ6M52WHGYDKZYjDhs9ToqEuvsfMLYTvcDa8gpMx5GJAud78Bm0jwvIGpOGWc0Jk7TRHYZlwEs62FDGHqsGys1r8qZi1RVZKELOn/ehIygwnItltfi21rt9iAaJ4a+ZzUMdN2KbJ0W25haPB3Ps+YeNY4KyJIzt71JFkE/A0e2SwXja6V959450mdleR3Aqh/wTnOlSCSlXEHz4+dlCnEWFVIRQxotxq64ePAm/fqM3e2OHGf8YUcYOqVyZ5Cp4LxQ0o7XL2559eL79fMr5xsDJmBNQiRhEZ3tNRHtIFgMonPMRsHXH3qYRon8Y1Jc/X/1WBCOigIIwtEuq168is4oSmvr66gJU1kSc+Ob2qAGhObdRPV00KIi6M1SA6M1hjlFlNKlcubDsEJG4fX1FRnHgwePuH3xXA1gVysevPkWN68/qDQ1pSYepgnxoQp95JMLqcHs3vtv8Mt/5df4L+1f5xd/4df4yvhlnauoqno+hNpebghPVJW65l8ANBn2hiCICN5VdIM6w2QEMRGpkyPTGNntD1AK85x4/foJho6pqmU55xjHGWsNXTfQ9wOr1QoRoetCHXjWrtvrq5ecn+sQdIwzFINzHWKLdnGSMPS9mptaU4s3SCJMaQIRwsqQmJjjoVIDDV3XgwiHaVL1vZQwwHq9rkWu/jvPM2dnZ6ScKaXQ973OUOxuUblvT9cFDnOsVEyHD2tev76m72C1DvjgmcakAggu18TKMI4zxhxAPHkq3MZb+j6AMby+fs08z7z19ltgHbs8QrZY19GFQMqF0GtHpwlLILIgQ51dkfOBznpN8KUQywjOINJTpCd0nl//i3+OP/tLX+Xv/Pbf53d/++v8g7/xDxDTY32PdQFjA9YKwWaCVwUllUcPOJOIlTNegOlwIMeZOUb1Q0E3Q1ORuXkbEYQP//Dr/Bn/p/jaL/4005x4/vJA11ms1/O9f/2azcMHnJ9fsF1vGPqBft1zfn5Po3oppDlxGAuHCXK26lyAUMTg+0s6Oqbpiv68o6Spong685Vn9Urza5UmttaQpxukggbJZEqeluJGBEgFiToDJLbREwzYgqUHscwxctjvyWVcOO6gxZYOjVtCMGy3a8ZxVFDAqOqUDu+mZXjbWKWjnYBgy+P0Nq8/4ZSijEhzkTh50V0lrxqcjm/2o6quWqAtGySFkvVnUjfMUgp91/Hy1StevXrNq1dXmlSJbt76yGAc3j9W+nAI3HvwNucPHiK2Z463vLqttMC338f87Dnd7pry3e9zPjtycoRdQpjpOsPNJ9/jsL8hp8jHwwZvOx4+OOP2+UviYeTy4pzL1QU34wumOLJLIxsbyBi8BbE6r1XWF8wFsrd4mTm3HVMuWCt4UynBRYsZctQOQNc6VYHV+ox+2DCOMykaus0FbojM+x3T7jUmNLlxvR4lRS1+MCrpDvqe9TpLEUwzomxFcqUK0kBeI4ixjFiszVhiLePqvIvNiKsCS0FpXNq7Lzz9xgdY41lvNhhrGeeJp9Mtt3HiVbzl7He2fPU3vsKf/vKf4PbJE/7pX/4mX/8PvsX///jxj7/4t/8N/vlv/lItTuCv/uZ/WPdF/bvv+tqRUaEYSqWslXLcs0UtSXztXOeSsbZbcgsqq0Vct4w0EKyawoeu5ia6hiwC0kxYDRKhc90C4FJqfCmFNl1kTCCBimABrsIDOUaM0zm/XKqNS7UdsFWuO4/HefIm151jk3sTDI7mA9hAhBZPFsNQnAIHuXVVagesikuIHG0c6un6IbC9zYJiDAUHPiMc6je0GLqlUMQaSpkZOg8mkfOEt0KRWIEpMNglhGUDVNsasYJ4Tfr3cU86ZKb9lRZCtfXmgkWyFkySC9YZNpszJDnmmJnmSB8KXY0HzlrsMCApslr3+L5DsnC43TFMkTulx3IeGsPGLLOD2mDUWdtp0vm0Kt1LTokQnC6NqhgITVCm5rii1hIN8DEiTdOjXl9NbUPwdN16YfVYqxRS7ZAqMN4KvNN8qmR7l8lhzLIupYrVGWMY1oGu12IxzXERfYo51Rm1kZgmHt7fcLF1lJK4vX5BjNeIGJy39MPA5vyCR2+8w/ymYTrP5GmP+6eHxRpEbQMyh/FWrSWyFlRlFuJscd0W4xxljqQUK0utUGRmnl297+q6qzHdnpzP/6fHT1RxdZoL1D+135w84dj6/ZHP4VgpO2cqzSfTVFmcpQ49m7uvFVeZIAbvO+Z5pgWL5qeQsgozAKSSscYtnSprrfranCACghYJuSJGJeow5IN33uT+o3sYEwDLKkS64LWdr1rlGCDOcy0UIbjA4DvmeaILgfv313zuvUcYDHEaOewPtMF6AfVGqKhJMy9up64FGGut0uuMIZd4grLVrd9U7xvRgW+LYbvasFnd086PhZZglNLUWvRGUWqe5n0pJ/a7fU3KACxd1/PNb/4z3n33Xe7du8dqWKkjfJlUiahK5+z3O6QGjxj1RgxdYBhWGGOYYkJECF1PnGd2+z2vXr7k/OLeySUWpCSePHmy3OwpRXa7vcrE1+BgjcrNp8ptt9bS9wNgmCaVJ7dWzR03256+VxGKOersSEGFJ7xTWVbn1NAzpsw47nUeKfQqczxPPH78hND5OkujNIy+73U4tdNCy/tOxUKqupPublXYoLX/pegcgijC98nbj8mPCt/8px9Aybz3/ns8+s1HhOz5rf/kt3j25BNVMTQqlx86+MpXv8x2u+Wjj57yySc3fO6dzzHt9rx8+Yxp2rHqB774xfdYrXoSCfcluLl5pcOuxlC2A79r4EvvXdLdCh98+2M++d7HvPvTP4UNK5zzDL2DdcRZT5phdEIXMqbAut9UXyswWy2sm0eIFu1BB/zPO+a4Zjee8+LpU6Z4A2Ip1fvFmsLNq5EYVVjCApL36KRVRiQuAAO1+2Trul+ocoBkBVRiPDCOkd3tLdM81Z/XmLFsSjrcGoJnGFbcu3+B82rJsD/cMgxrKE2Z6tgxY0lIPhPORJ+z/LzUDlbJQFC1p4Y4NxpnTVra5mzuxMNc14qtKHNLjOpHyhFlPMZPIfjA8+eveP78FbvdARFfTR3bRqmIoJg6GJ7UwPHls485jHsO+x2cJaSogMDrT75L/v6GIXR86Rd+gfNhxe7mmhITUQxzFsanH5NmNXSc5ZYkcPWJIccJmxJdCFw/fcq0u8XHwvmwpRihKwIp41zPg4sN6dULZDfDbocNnoMxHDLEen1VGQqyBSr/3ySDjRqzIyNpzlzPEztJOG/xVme1PG4RPVFsSs/1QnMyRwcZU9eJMZrcLr8TBbdKK5aXi9FMKo8ULv2YtHTYHYZUkW0xgtiiA9kZJBUOJJ5PN+yLeutZ6yGPlF3km9/8Do82Z1jXs/qw4xf/zS/z+Z//k4T1wPd+8bv8w9/8Hf6d//3fI73e8U/+8d/hG9/+P7G+R/JEilkTFlFz6ZR13qWt2baXniq7GXQQP8aIDz1/4S/8Jj/z1S8zxlEptL7NlxgoKkjknGPOI0Ki7we8G/jgWx9w79Uz1jUp2+92Si2yOldsW1ekKsUBSCnElLDW8b/++j9iHs75jf/5L3H+1Yn/4s/8p3z5w6/yL3zrz1IkYm1HEyvWLnPgr/3LfxUvnq50y/3RxB/a31XTtbVu2jr4YQS/3ukLEGIq5ah++ZNMpJ6/rNdZvaRYOk7FVKpxpXMZL2Ry3dvr50jWrmgFZUrJWDh2LUUWlk5lBOKsJrftexXkjjS60eSoFnCVMVFfl8pxtdcvoUVTC3H12EpVt4O7qnhw3KoXeli9P36oykIqNVBnka1pxYddjlctW3TftCZg6MA6lSMvDlOph6XlhFKBDqPfOjDwxuVbBN8xzRP71/dIM8yHA+PtNeNhxrsVXZV0V2/RDkomlZkYZ856R8yTzg4ZIc2J/e1ea55qM2NEWK03dIPF+owxGRd6LXrEocwRMGbGJQ+lzvwWUUEQaxZ2TrJCSVkVRo0Da8k4vOvZDIF+CEzeVpB8t7BnpukWbNSCIk7Mh5nLy4f0/pLr24l5jvS9Z5omEEPX9/T9gBNXFbZNvVaFKMLQrwi+A4FxnBREtKpNoCMZDskHdte7pYMqojPkq86zWfXk5NgdCmcXG/qhY5SJ/yv9Po+Ge+QSiTERrzPltSOOmfwDAwGsLchuxhBwtsO5nq4fcPc1rlrr8XZN8I6Sdpig4yE2rwhSgCpG1QV8EIZVIOVZZxMX66aMEV1DDXD5ox4/OcWVqVGtogN3iqV645r6vBZEfugtzGkwoM7YtPe785Ynf9TPMhisc3WWp9RiCVrcNGhifVqAYI4Gne09jSj1S6t1pRSJUZ1q5yx91zHNM6uzLXnWG8UaQ+h6UpqW7xF8UK+Kej6M1dmo1arnn/vTX+ZP/Is/z9l2pe9rVVRAgbC2QxwzMwWx5fhd6rlBhGkaFQF1ob5U/XmkCLEkuqCS0g21gKrAmDMiGd85Sta5n5RUcnsYeqUlYsipsN/v2e/3DINbUP/1WjtWr1+/ZhwnhmGgiYIYWw1ODczzTMnqP5FzAoFhtaLv9xggxVhVG4M6qk8zu8OI2FswotfUGnKK+l51k5KitM0pVmRZwFANmCsVSmoXsqF4VLlP4ywplVqQ1mLNKf0hUhDR81C1oVU8IiXGvWDdnhS1gDvsJqxloZYZY+hCR846d2VrALRVYp9aCBjR2R4dLj52VaUyVD8tn1B+IfPNP/ymficL3htWQ8/Dr9zj1fVzxpdajJesFMpnT55w2O2I80SKB169fs5hrwlyjAce3NuyebdnOOuYEnQPPPbsQtEgCmmt98Hl5RnOwNXr17y6esWj8U3CytcNVvAhUERUUr7sifPE7S7ivK+UNMF5qtv8sUPirMP26nFUckGizu4JmkSVaowqJXJxvtXNUqhFkRDnmZym2j2qHPIlDshx02pFU1WpnKaJedINM1Up9PaaxqXXfDoxz4lp0uuyPVvRDwFrbaWH1WTlLiePzwYyU9EUI7Wz3gq+JclopdHJW5hjlJPlvv8sMNX+syx2jlixHldePGn05zlnXr18zWGv3bqmnGoWYEVjdqH6cRm9r+I8ka9fk+KMFyHUuJanW+ZRE8aUBfEBu1phQ8Ji6Y3hvDvOKWnxYBXskIIDemfxRbDbgX6eWceZOc6YacKOCe8c/dk5vfOY3nC4uM8udIy3O/LudlHXa3OlLU5K/ZkphuyEQ5oY88iYMzMFyYKz2n3YYOkx2CqAcYrT3T3/WjUdf6TJkfoSmZqeGo5PqCBY2wbr76s8ST0v1OS4qj/aghSDr+vyEGf2kjjkrLNgx6EBrLVc7W7Z9gNJCu5gefg7Ky5e9WzffsTVW1fY4nj/459i//wV979xyfk/Gyi+Ix4yJoIrllLAu0DMZVE0oxaMbf5kKd7F4oMlR4P1lov373PPP+QwTcTqH2Yq+Eip0i/G4oLOZA4idNNI+fCMi73hYrvFWMPtzY3u0aVg3VHooIgOthuoVP5E13Vsd2sO2fDeNzzns+dfuvzzvPXsfb7w5KeXO0o7zBrHnQ244liEYU4fn0345aRIOM0P/ojHctct72uXc3YEYNp7HV+xFHina/b0ubSfH7vdx5mmtpZQkZS29k6OuxU3LUTIcj50LZoTgZY6FXMnFh5Pj9QwepQQPIqEsfz5s69px30Ep1ni5Z3ufps70t/UfYN6PMJStkpZgC9X5yelXl9rLUt1W79WNl49SV2gswPBrsnG4M1McbMCI1XSXFxl14jOrsUyQ4YsWdcjlt1OlezU53JSX8cQEKMfXaxh6NYE32O9IYSED52W60X3PjEG4wyDU1XnInmRBTc0kE2qGmzRDpzV4t1ay253xeurTBjhcFA6d5YDpSqPxjQROhWcinEmxsiLF58S/Io450WdOca5zm752tHSwrZeXooUdtPMer1l6HoFnacJ72qXzBxPtDHqH1pXFSnnygBR2mKzLXp+veLpm4/JX408/bc/5nB+peyTnIkxU7IhxUybNdbRWM2/nQ04G/ChwwdbCzy9r723lKK5lzFK+6xcg+W8eZu5vr5hHCdyyuThRHCqdvv+2AhawBHl04fc+e/Sujx9/knS0YKQrUl54/SahvDQWpSyeCx89pNsG0BEW/taQOgTpJRFYvmYlJQlKBwFLgRjAgYN9EUK3mhS4p2jHzr2hwPZOsqoghnDxuNDr8ELnTvy1ukcTNEvWkRItvDO++/xJ//8L/CVX/qyUulo9ZQiyaWq/zlrKSLqiVUT/PadVSlPOya3t7eM48j52QUpl0r70ffZH/ZstzoUqsWCJc6KXo6jei4Mq14LjZy0uEqJzXqtaITzSIHdbkeMkfU6VAS+sD9MnJ9fcnV1xdXVTRVx0Juj1IvbnN2bhKjB4ENgmJQKJqVQcqELoX5/LX4LlpvdTjdapxLmOktWFlRRb+YewS9CFyB0vT0WYKKFh1hL8F4DcC44PDlK9ZbQQijNheAtKWfmKdZicKzy28pdlxTvzFKJiPpNQUWxwbdksv4+56yBap7rcWv3NcZErHNS7VGyqgs92XxKKYXvfOebpBgpojTX1dBx72fvcf6DM9KcSHulW6Zc+Oijx3Rdz9n5OdYJz188rsaRwupyxRs/dY/wrievIY3gxNOvzhHRACzdDBg6t8abgDMT4SzoGs4ZqfecDR5J6kcxx5nbnWDsXmkiRde+cxrBTG3BG2MIzhF6j7Wq5mYyTNOIcUDW4krQLulbD+7RDwNxTjwlk+LEeDhwGBMxVqsEjG5eUnQIVvJybaxR5DXGSJxHSkXYtdC1S6w5Jgl6D5ai98Vh3BPTJffunbNaD6QUCd7ThNttXWtL4tTQm5a4tLxniXEt3qk58AIH/1BEPP1bO67Tn5wmvjU5r5+nIIECK20Ie78fubq60fVr9T5pqHebWdRpI3tHRRGBeR7JOdMV6OoshfgIriAps//4MWE3gdOk24WA7zznl5dqXukMWL3fY8pLVyIn7VT7YjAxUcaJtD/AfkeYMp2xDKs1wQ84B4fQc9WteDoVkAO+ntAiGanofZPCNyljcRwMXMnMTUl6tazV+JmUllVch8UTxOCaCuARel8EGeVk3xJQutLicaaJs6sUs6VklmOcbglxm7kxCNT5YOeqB2OBYnW2pAC3cWRPoRhL5z2uHOdLnXeMU2E/HbQgRRhsZP/8CcP9rR6DCMTMfjxgLKyHnoPAvCTVBufUwgErxDnWvaV9rZNEilKBt2PyPE4jr1+9ZpwjMbUBfF/Vamu5aSxrf07ooOxeMb98zHo84IcB55WS1A/d0kUrWdeuNmaq8m27gdB7+eJmQ7efyOM/Y/9Bz69t/hzJrpgi4IUioyoM1+9Y5EcId5zgFP9viqn2vHbNT1/7Q3MpNXtr96kKRDW8r4nRHI9Hluti6/sfAZOFDlbnl6kxT19bc6Ja4utarBS/+ubLJ9X8qZYltTsrtHnNlnO1IFaKYAqL0MGC+pvaW6ox80ivOu63CwVaOIl7rShbnl5p2RpzlKHDAjYa22KrqhA7Zwi+5oUcc0JZzr/mHBZL8NAHj2SY9jOHw8jhZiKWW/W0qgqLKp6QKQVigpQjlKOSYU7C9dUNKaqv6ThNasihvhiam1nB2w5rdObXu6xdZlSyvu0L1lulCJd5URu21lbwXS12TI33bZ7dYPHO8PLFxzqX6UbGMUJS65H9eMth2uOcY71eLyC2IMQYyamOB3iPQXMSH1TgJNdiaNX19b7Va/fqRlkaQz8QnAMprFb93RylFIyXeu31fpjnpGBT1SSIMdL1K3JJPHv0lO3PDMhfmdmZJnijC6Fzgf6kXmiFpvegXnsZa6ZaVCtoVbDM6H4Xaz7Y6oLmgyJiiDEvTYVSwXj/gcdkBRb13z8uxZWoVOJn0Yy7N+FJMbUkJywohtQNTU+KJqUtYSrlyOstOR9RnvpPTJF5GpndRMkGi0NOuiilorqlKLXDe/V1+uzD+7B4XXnXkXIkdA72SpU7HCY+98Uv8c1v/iHz7cT2bMuDd94n5kk7HDmSY+T6MGM76IaA9x46y9V7B37lL/0qxcM//vrv8Sq/RL4m/MEf/gE3L66XpPD0XJVSiLXL0QZeQ1Dz4ZwTu90N+8Oe1aqnFbEqr+3JSVTIQKrzNwZjXP1dJpeMz3lBBwYzLJ8/jiMg+ODJq2HZAAAgAElEQVS5vLepiFhTyFPBjdvbPX2/YrXaaMLUjJpzXtDZruuItTBavk+MjKOqCjrnOYyZadLg0G7+aZqO/g0GgqdSm2qBWRPIrguKPMVUOwx6SxwOB/b7Pev1gPdK2WyiF6lktIOigcXVdea9P65RqzTS6TCCwGo14OqQf3vojatmfRgdRK7VdEWvqnpgTfRt9fkoGFKMy3fDtM6VWdAqEeHV68c6F1Wv+zgaUh55+Iv36O95XvwfL9jtDxU0MMQ08/Tpp3ivc39d6Dl/45wv/Nrn+JmvvMmLV4lcHA/Oe6B27hwYawmdFn/ztzPvujd560/9DA9ff4KZDKvVOaEPeC/kmNQrxE5kEhItTkLla+u9miu/O8Y9WaAYS5wy6TrpTW5BvKPzPb3plBLQaXf4kD3Pr2e4iux3I3EWbvdCloDr1uDrvVkDR5GMidpNyEWTg5rHMM0RjFRUvCWIeUmKFDFussr6r/MW8Fxf3bC73XN5ec7ZxXpZb4gab1r7GUn16hvW1CtUotgoA0fUQ0pntdxSfy2xryUJp9gU9UvUpF3vXlk4+nobHGNFSaLmwyKYoDHv2bMXeo9Vi4M2TG2srZQbeyyuSqGkuBRotiq1iqnUO+DF+TUlBHiZefnxc0ZTalFmEfS72d6rQXiAUrvIXVhjxKPzoxYxajoeOk/wHpcKvR+w544ZYRcj88dPtNA97BkPB8Y01dNRJ06MYD10zpFFFgsOvOdlOjB7NUw3RSngfaW65FK4ThM+w5nvCNarila9XuYE4W/FSvunIBibaZ0o14o70lJULLvSYq7ZrqRBrEVMtcqINRGr9gxSYBLh1mYmDMGu+Nz7b9E5eHrxnBu5JWfB28DtbsdhHHHe8OCdgJiC17l8rLXcv3ePTz/5NnYoDOcD6eXIoSbwSu+xunakda6OBVWbPXbVvLVqxGGtpesCb7zxkLffeZtpHpnirMICzpFqsdeFwL2L+3z42xNMX2dMLyml0AXP+flZBYP0Ho5xPhamAsaoMInG2Ep9cxq7f+O//7N6Znu4vb0lff33GLZfI5x/nquz79ENfX3fhDOZOc4V3NLk+NgBavdVKxD+aL+bI+B693HMcU5+v+Qyml84p+Chth7vvOud4s654Q6w297/NI86mubq6xxuea6IvfO6VqCJKJAHdwEkV5VPbT3WI6OlfTeL8X7ZKzWu1t9ZufNey15srHZlao7V1lKba11OTn1o90E/s3lgak3ahDKUIqqU80SeJ4YuaCw3FYTM6n2lcbfuPWXGmBWrLiB5x+31J9zsXnJ7dUuMFlugA/phqIrH2lEfvGD6gXlX8Fkoh8jLw0t2uytKUnAPSQxemKZrfPYYEWyKXO337MaRznSMOTONV4yHG82FctZZZzPh9694/foxSMJWJkqejwVFERWFKNmRC4gbMU54+eoHjNHgHZgy0PmOrrds+5714Fmv15xfbJmmqYqnaI4nooI8bW9LKdV7SYufEAKr1Yp51Pll5x3b7QCiDYrgA5u1umvHOVGy3p+bzZYQdMZJ8zzoug7BMM+JmDK56EjAuL/l5//25/hTf/8LlJJxnqqorcf47pvvaaxB1aSnuGMuM+++9w6bzYbgAr13XGx68ryv+/sKMviQ2O+v2O1uOOwTN9cjpXisWZFi4Nvfe8Grqyturm/Z7Xfs9zuur68pZEyFSf2PZwX+5BRXmsC3m+ukwPqMsfAp6n8sJBpPVygFRb+tWdyqxVhVmatBl9P3r4Hp2+E7POwsq74jR/Ud6ft+Mahts0s6p+QVBRxHnNOkWAUSIikWrD9uslpITCAF5z191zOseq6f7njnnTd4+7232Jyds9/NPHzzTS4uV1y+s+WdX3mDL7z/NolYA13AuE4VVHJk6AJvbB4C0HeG0NtKz1F0WZNyR+gHXNEBv1JynTc6Sn5vt5dsNpdKuUMDj4ig6smW8VBqR6RV+YkYq1eRgedPb5nnabkWTWr9NFC3ItTYRDNhnqbmNH68ntqlabSIUil/SkvM5ehIrrTFirhbLSyGTrn4uhmmJZnVAka7StOsHS9nVVRjHA/0/UCuxaNeS0Oc59qZm4FzQggn1EhdCxZftxRDqdLrKU7LZmsAbxIMXruOoobDqW5ITXxjNQwYC4LO1OQk+OAUNVI4ThWEoCYtangYY1SBEK/GeXOdR/PWMz2IxHXkf/hr/+NxX1pAyJq+ZTkKGCxPuHuvGYx6a4U6WHsHSZS7DRRAjPC3/rP/TpWWTKUpyF3k8Zhr/BFor5z+5njwPz5JAalDv70knnzvW9zudsR5JKcJIYFpCoH1Xqp0WQVP9H0W9SiqWXKKd45f6RCtRbEcROXs6xpOMerAsOgG8eLFK9brToUVQsD5oJz6ExRVkVyHCzVprefJ1ALGoHEwV4pGOxNqAKw0nyPk1M7Y8fQtHZRjpk7OsugnUdF+ZxLW6DrLRRhnRShrFqm0OK+2APM8VwBLVc0657Uot47DNGFE749kLDvX1fNnsKZQTMEOK2xFv1X+Xw3DjUBOEUlFLSJiJOXX1EkJihQS2k1WtSwlxbR1rD8z1Q2jiRkJ1mmyqGuhmpFKIU0TUuWAcY4nMjEPCjb4Ylj1HQye6fZAjNoRsc5xkIQvFfWOmSRHOfc2l9NmCrRw0uMQWFTAVFn1ZB2LOXn+MYFvKqilPs2ZOjtnwIiWOt4aDvOEBO1Ybfue9Xqtl9yqjcLD+5c8e/ayrnmt42/3ntXZmpKrsqUIt/tb4py42R3YjROXF/fYHWaKSbVINwt1R4Ghtqce98mUcr1XRL0a6/5xe3uLt45JLAZHHwyHec/lxSVWPPnmlld/7+/iphHvIQzrCgwpPbeBEjnXrkXtiOuZVZGRz3Y6NK9oxQWs1gPTFEmvf58gH3L53s/x+MlzrNeugZR52ZtSSkzTdOxwNDXf25s7MeiU6vZZH7jPzjQuhc4SQhpaUovyUpjno49i3Ul13Ro1z2104Vzmet7rZ7tWHDXJ7goA5SrmUJPnY/gqy5EoJUqP49g1qhHkpIATKZQGXDqNCVLnrtr3b4bpWHBV4VKkFaQs12Kh9BmrflMtYglY+6OLxZgnBQQFsPWeqpR4pCkzg4mGlDylBFK2qioo+t2N0657yjNI6x71lKmj8yuGrmO96jHcx84dh9GS5khOB2KKWAMJFVlCPKt+y+v4kjGNTGXkcBjZXt5jf7sjA50Ubm8nQqdzXNYaLu9dEscf8L/9L/81vnPVf9MyHW6QXBYPQOczZUoc9q8JQRV64zTXGfBmAq+qe9a55RpSCp03rAbDauUZwpo+rMhlJmdHTInpNnKbIzkb4qxjFdYaYpzp+w7nDVOc2O9G+q4n+B7nB0oyiO2xRVlDMsPWD0zzjC3gioNoWa9XZJOIKZGTMO9mcugpYpd9adwXLRhdT+jUBDgSoW/epo7N2YbQWXaHkZzAm44nnz7GmMh8SMQ5Y62QTeLpx88IYWDoNlxuL1h1nmEI5JLYjXs26wvOtx2bTWA9XNKHzKMHjs3mnKvXez759AVvvuM5v7/h1cvEbleYZ0/OW5wLmv9Zh7Oe7333A/6ox09McQV3A1Dj3bagfSdrEBaH51MEV9VTyiK7bgxLq7F1Qhoqc2QLC9ZCLJkYCyGqYes81ZmZelyta4JpdpAsiX6bazIGrJHagQNjwSKICbXVODOOe3IqvPnG27z7+XfY3tvw9Pkrckpc3jvn7Z9/g4svnjMZ+P4PdqQyqk5fqdK+Vk2KRWDudZbho4+eMj1WDrDmbXY53lw7H0L1cKmDlBoMWc7t0lKv9phSCmq/LseCrRYYx+tkqn+AJh0q31xqV++urGrO+SjBjnbxWmBo/x8blaiq1CgK2jo4erzeOk2YXE38rRZijf1kncHj6LrubuJftK2dqpS+9x7rN3h3lHB3Trtyc5dYbc4wRjcOax2udaVqUWRqpWAwOK/dxdNBbmcVURVrl/Psvavbol3Oi3a7TsoJKYuwRSvSWkHfzpMU6rlRmlq7R2JUBPyN8Dk+93e+BItISb0P0rGLGVNVyHGK9pYcKTlqYluU7nPY79kfdtoFlBPqD9TrR7UPEOaQ+Na//32++B9/gfPv9qyS5c3hnGuTSKuOHFwdxs6kqmpFUY46UsixenoUIRWVlG5dGqxUYQ9H1/XqRbFeYb0wrLzOM4ngimMolxz6TIgRkVjXqqNJrFJpLQuaKoLkSpNDKSbOesZ51gF70xKUY/LUOuHtumoBopfRB7fYM7Tie7ebSEkwNoGZ8V0ghH7JMkRE7129S/VV1mGtFi9S7+MUI8NKBVaQY8dA2mzpac139y+axMiiucVydMsLLan6i3lxWujncuSXWyqFThOsofc4p7SQMc7EnHDW1DXpqaZ79Ug0ju5nz1oCWx/wtUnXNlkRnaVTaOwEzTdBZdPLCRBgnZrp1n+WWZmWTBtBrFTqj54f0IJm2UqUe1wbEXqvJuC2FmvbzZbLszM22xWPnz5mc35GnCO3NztcMSQjZKNv42vCXxk/yzGYuuZaoWuWYyw1drkTcEMvRit2dWeqv6tr0KGJupWj2AWiQ/jRGmYpIGqrMcYDu/2ezWqDNRbvHRf3Lnjx6hWpFB3CF4tnrd0LY5aDL6UoGhwLaYqUTmoRq0WKsWhBSp03q3tNo98sAEXO9b5tVEfDVBVcSy6UnMnGsV2fkz/dMt1+Qrz5PuPra87OzwihgZZztReJmNpFVg8fd6ejdGrMuwgzVJGRZZ6nPr0LusbL/pr83e9w8da7HKYDuYgCIPW+8N7R991SKFpj6EJgs14tnyXtWpx+ttSDOunQ/KgZo+XP5e7fqSCH96fv146/UqpQ8S1pw7a0Ytss0tpt/unonXikC54eC1DzBHPyORU9E1XVNG2ECrOsTgUCqhhEgWZFsgB2UoEio2ujgeUNWFpMkKXRE1s3hvbCO+etogUnvob1vYvgT2biVRCrMKfImCZ83xNTxpqwGCXPacY4g5EDxghqNBvpuom+0+ucUqLvA2JGSsiQwwJAizhShhhVYGq7Dlg7Mx1ekQ+vYLrClolgI64X7t97SEHVnJ23DKsVkgXbzRXYVcGJ7bln1XWsh4HV0HMYb7i6uubqpSrOtmahgvqZQl6AYZqTnwnkmDnbXvK5d7dstpY4F9KMqp92HSvTM40zKUVWQ0ff9fR9BTIlEILHGEefVlxsVIW55AbyBfUfdTUntsqm6dYqxOWcoZSIoSoWelc92eo9iYCoN5WzRq0H6p7sRIHqHlOl6YUuJ0JaMfQXhI1nCIHghFXvSTEqi8p7UkzKrqhLpZSJzdoyDLrX7DrPPO7w2WFmhRedDViEebohpT1dmHnnUaDrtlw/HNjt9szTzOXFpYLSlemQUuG3+KMfPznFVY1QuvHJnSFtYEEBhYb6Ki1hGSmQVtzovu4ahaocA0/9kIq4t6JNf6fFQ0NATBUV8CevOwbHVrgsCH7buNt2KA2ZkooQGvbuQFd29PsNV6+vuLx/zmq7Jmbh2eOXeAPdPQvbxJ5brl9OcDBKnarI0TRNdH23LJ6xHwF49foV5tlJYlZnvFLO2s1wij6nOoBsaa35U9qCLMOLyvkulBLr3zWhF9omapdA3XVBFcSMbm45ZaiBtJ0zWzsvVClaRNHfZTCydQ+ccntD8HinJntaYByL4hDCEnCbp8c8TVW90C6JjLaa22YHOatHWKzzTErlaF2CJmii5swqlWurMbCuyVB9uxY6Qy3qDEoF9c4fz6ep82YI2JrQmtZpVfTRVMGFFgU0gTV1HWuXo51nYwyuUjl1+etMXEv6qeik66rgRz7nwQdvk/N80lHkpLhSNDnnhHdevaWqj1eO6lc1x8ThcOAwHlQMouTlHtRCLi+fXxDG7sC3+ZDPf/+nuf/3PKsnB97vz7mywvjggnnoiWhgTUjLrHW42qiwRqmKeGqkrVu3SCFL0jUtBe86un5gsz1jve5Yb3RA1WDI0fDk5jUxRUDRMGk0pmWdt7hyqvJFVeBjSTDzieeKsVW+1nCcx0M7D1LqjEdNjK11FZg4DqMfDqNagaiWEy5muk6poJoft85XA2uO90pLOBQxnxmm+RiTWkJlTb0n6vF+ptLSuFgFGBbQyrbUaUneY5px1YKg8uSAZslglvspdIG+D3RBE5XX19fMc6zxoVGj6/0vLL5iqVhy1liCNajFnSYUgqLgLnTqN5X1whgxOqeVM80HBVOBtKbMupiu1Ncs59Asw+N6yk6Ktvq/VlALMJZC7rQL1/lA3w+cXV7w4Q8+pDvrNRkqwvXVNdmo+a+IUqVyK8LrO7dOgzktZ02bmbHLNeF4tMeDWp53AruYWliZY+GsxZx+cJTCJEU7+M6RcyTlSIx67zrvObu4xDpPKvE41+o8xZ7Uwmg3aJ4mFYkRLayOzZh6jmuF176BJvjHpL11TPTPSiXFNCNc7RpYA33Xk1464uNnyP4xeX6FSKHvO7x3pBTJuarxSlFJ+nopm/fc8bA0qV8ouCz5+d0E3VA7PZVO9PQJq0dfRYJQTKJZkDRwresqsFWvoXOWvqoWajLf5pTqf4xZPIHudtyPBQFLHDl53ul+bE4M7pfnHPcHYBGRsbUDuhRy5mhY3z7Bthk9qUhJO4aTFahquXWtLcDIyQXlJAfimA80ERBTlYNpghknhe9yJ5iT9zWn7yfLXny8TGaJPe0ctRxARzt03wwh6MxTZSelHFVxMk7AHu8izk4kDhgTa1wVOjeSS8LZol10JzgZsc4zdI4h9OSYySGR0oFk6/kTnb8tpc7sSibHhHeW/e0Nz55+ArtrpukGRHBORZq6XqndjVVrbKazBhd0PXqnPluX5wND6OiDFipjbzASeewaVdK1y93O0vGMmbwUwapk3bMetqwGS5p2CtTaqiVQDH03IOWANVTGzFCvp8P5UM3eWfa2XOfcQwhQCtOsPqTBe7xVpljwangeo+7x6uNp1CoGCxJ1LRqHMR1953XtVEaPBcqqo+sGVEMpEeeZvj8jhIG+Cwy9pwtwvl5RRM21h37FtB9pM2tC4TCN9MNA6NRvdpx7bq4Tfd8R+k7vMecRG4nzgc47Hty/qO/fE+9dMI0T0zhy//49JOka0875SUz5EY+fmOKqoTea5De4406KoLtJ/ZkmlVrgLPQlbyttxTOse7p1QJxggkGsEKvPk8CCXghCKgUxBfCVL189BpxDJC+0NuvdUlidolE5Z+1otOLDS/VVE6w4ckm8DC+RZFmNFzx79ox33nzEfhy5uj7w9KNnnJ077BsTz24+5vDiAJ0wuB4xQQ0tjXZ7pqjBxHtPcjrcmNOeOIfFVuJUcrWIkFGaY3NuV0NdVXBzxiyxNmfwnc4BgGWahfVqu4ioOe/q+SsVadMOUKmqfFLVmQyOvusWFE8lc7VVb43DGkVcQtdp4WeV99240tZroSSo5G9TKQOWDpIOpTpUQldhcFcLNX2eo2WPba1Ya08Ckd4bDdm0dbaurcFGy1FaoW5o1nmcVUqecWbpeHnTCph6oozBnpov1g2+GcJq50oLN0XTS5130POVcmFOioal0rzZ1NiZInROld3uJAz1HmrfwxqVqW/rWNE9gzEdxhu6VtwaFYvQsQEVJnHO0UxLrfOYOrfgbBvmRf3IqlKYSGHsdvxN8zd565e+xPbpSHnxPZhu6O1Ath3RdeoLIgG8Batdtt6uMKEjBIM1GWsKnfeIJCRHTRLnmcM+Mo07pumWw3xLmfas8kPilCh9j/jA1T7y5PH3SfMEogp9WujoWq0n6eSEceJF0zZ6LZZSmjUBbKIalUbWOrDOWzWyrgO4kkstwCrtxopuFiKM46TXxOrAeM4zcbQ1ydSFmKUFa02wctGOBMiShM9x4nDr6/qsybnU4eCWtGhwpNFr681S5eYNR9UwOWbu+qOFBhmCxwePcSru4kyLD5lhGDjbbnTmKQTOz85JMTO6WY3O50jfD8TUjKYL0sRYnEBO5Mkh3UA2BaxBbABn6HrPdrVl3I/EaVbLgZxx7d4pqtqXU9I5LhFM0ST/WKQq4GGryEZLSMUYyvGrLh4xTaAjI9zmpEbXodPEYRzx1nPYH+iGgbPtGZuV+tyJMcscb6UonCTYhuaZ2BJMOaEpt0JYltjQ/K5Ou1i1MKHFiJaYH5NbW5WtMIY5z8xSuLfdErqew+GA9579uCPGGe88Z+cXON8RY9bzojcgpdNiqa0X33dcX18x1Y6RdVTFsFg7E23usNW0rUxt60zXXAiecUwLYFcQYh2Md7PSsM9XZ3zr7z7Hxa/T+ahUft9QeAXXVquBnEuloh73XVX6OtImjaHOLC5XmVY6tPuhzbAWEd0DxXB9fcPmSc/Z+5fIKtaktO1dVp9HAyIqsLfcc9T9sgGAd5Xx2rHqo7JDToqF43c5LSBkoe3nXNVs6z1fcqW3CkCu/kZhoZi23KQVmMt7W6OxUFqR1s6N5lUq8CNqF9AKUo605SWVr4Wjcbpmo2hR0+YHWXxDzTFXo9IYq/qtqce/zOdVcLXkuZ5bs7Bc1E/phOpYhO16YJwOKMvDsl73zDErGHjYU+Kezgb2+yu2w8TZ4HDyEic3agqM5vrbwbA/3OLcoKqVfiZYQbJnOwS23QobLa6DuJ8oeSSVRC6G4AdlWqS0MHqchRfPXkKa6c2eOR/UJL3rsM5yff2azWa92NPEOHF/dcFq6PG+o+8GHj54wMN7a0pM5Crkc765BKs2FFLA+ADMFf9yteM168x1cHV9qKpgmoXxoHH45lVms12BJKbDTJphu10TrNoE2WDohxXe9ZRsca7De4tzmWkctYsXHGGt/pGlJKZ5Ikumsz2blbJ9EI2vwWme14eAtR5rHM51dEEp5c45hmFgNXQK+EsVFirQdx33LnsVCUuRJ0+es1pfIFQmkXMYUzjbDmDUDulse87u+po4q4BU6HVGTNpoUCmknLg8N5rnBL/EpphmbmJivVlzfn6J90KaMsNqQIqw3+3oB4czlpJUQGS13vDjHj8xxdV6FTg765fkWOlYVRbxxMwMOCbsFd3xzuPWju4XtN2ZsyeEge3ZOYcpMn5n5sPxE77/8iMwBWdVjMJYpZEpRQHiDLEzOjxudMixHUPOrSNzTM4alax1MpwNCIYpjQQXCK6rBY/OYjnneHjxkJ/72td4/OlzPvzGh7x6+ZKUrvn8v7bhwdmKIh1TVpLKNCUcUhV6lKqy3x8IoSd4T/Sq0HZ5dsaD+48QGxQB8aHS/KhJoSwiFtZZfDhyrp3zOsdTANEB4IaMWZfA1MCNWTYw708oP8aqpK7VWSZj3DLw2DaaUNvrmqzq+SyZOjfULQWqc44sR/PDux3C4+PuRpSXDuVCBck6Gtu+eymCMz2Haa6S9Yo46XpSRUP1L8oLpfGU3tKodaQEJN0I0nGzSZW3nqqnirUOcibXQWi7dBX0od0ZnYlrKGGpyeMpMqhryi0FcxC3ZHCm+mAsVMGG6NdEI5XC0B2vgbbIM10VXInzzFw34bloMNaOm/o3zFPEIMsMDhiqEr4mjpII/ijTH1fqs/XGvbewbwuP30v84Paa+TDx6Uff5pCbT5duyh4tKKOFkuxRgccYYCD4ntB3dMPAanPG/e2G97+05fvf+xavP/qI4dlzVi+eMeVCMp7seq7Xa6Z4UBTNuTq8LKQ8LRuzqaIIpiUWVda30Vg/S6c9vXDzPLPdbthsNgx9T0qRYRgYx5Hb3Y6bm2uMaEJWSlGQvd5LOQli9TOCsxjTjBdloTOdjpdqjeR05q4mksGAqSpmihXXoXfiUlQdk/PPFN6G1k/Rc7wM0DWhDcFLpVDjSSVQYsKdKANaa9ludfh5noVh6DjfXvDyxXMePHoDbx2vxldIzvgqtKDQssabPN+w6S55894D1q5nznu1cCjCXIS4T7zevdAuVc4QkxIKW3KomTziqfQ47XblrKhnJxBqAVmsyuua2l3O9TuU2nXEVB8ho3Nb0RQO3tB7R8yRGCcdup7fYnt2yYOHjzjfbpGks7O5RLIIEeFgpVVsNXZJJUK2eSDu/N8Ywx2Yp5Qfvm4t8V7WghbVSYTcslYRbGUIjFmV99brDW+/9Tbn5+d8+uxTdgcVrJnjnudPn9E5y1gTVes8Z5fvMTn1kWujGsY75iTsx8jtYWY9xMUqY5rmqmaqQNhiLEqLTbUIKbIoc4YQyBV4KxWQOz+/x/WnM9/+By9g/G26oaerYFsphf1ux2q9JtS9v3hht9vhvT8WqVXdi9P7piWWrVti2mld/lD3bFMFgGC7XXH77H/ijS/+Csmv2O9umK1ZrsGxsK3/vdMOPfXNbP2c0w5MraDvHEN9mxMVtdOZZYzD2r6qtSrAogBy9e6sBbx3HUi/HGJB8PYosNHEnVJOwLyo4ioDzi6gjQJ7eqz2JDdQP7N6BOW4dvU7JUQKvtrWdJ1f9iFpJtolYx2E0MBTVZpsjIycXfUHDHXfPRomw7HQbDFZKWWFzha6Vd+OkpJnOuPothsuzjZLXvD+u5d0YY01vfo08RApHilG6XiSsWe68RymAy+vn7PZbOk2kfNVYdPv6Qp88dElMW6Yp8J+N/PRx9+DckPxjtEkrg57hmHNFz//Lpt1wLpMKvCzX/wCOWcePXzEW2+8xe/+7u/y6MFDui7Q9x3r9YbLew8QVHzNomMAzqpglzGW0HWEVeD5a8fvh++S5gyScFWxVCgUiUxTJPgtMKtOQLF0YcA7w3S4IU1qbH//cmDe3dIZoz5O6YARy9nmkq7rNbblzPZiTc5Jr6E1bDcXS05nreX8/JyYRt588z4pzbXBYAktHxblJBgjtUCrqs0ZnL88WQMZMY7N2bmOgxgFAYJ1DBuDd54BR9/fo4iKYMwxM40KAh+mqPTI4DiMkUNM9N2AsTpnN04TxsDt7prDPJIQHj16CxsCc5oZx6mujwg2sNvtePn6imI77rHGmy4AACAASURBVN9/wHj1kv1OBS02mxV939F1gRA6ehn5cY+fmOJq+3MrLt12uenbDIupf3beUzJ14RTtDORCN3T4UAPwnNhPkTmOdEOmX62rwavjwcOHzFPh+fPHzFMzA650BgPDMDCsAiGojCbkqpCiKEoWQerAbysQpmlS1KgmJ0oPTHjfQ03YrDX43CE2cxt2fNd/SPetMz799IbD7Uu6B8JP/ern+dlf/mXkdkCsw3gdkredq67uaLeoditc7UbkIYH5r/jS1/407733BUrWza8Jg5RKJ/I+1ABXf1bSsjG2f41RatZxsxKEAWuP8xOaiiaNxFIplAXwngykGliLWPVpqEjiNKnSnRSDxFyH5zVhPIxpSZ5KnO+0uq015Ny6S9Sir8rklyNaWWxTLVI0TjtjOksDQJ13C96Ti6KsxhqMHIuy9m+alSrXKApSi9s7yLJQ5/r0810dBDZthiJnsG5BNLUw1TkHa60qfNG6FAnaUD46CHya1DeUPed0p5ByzpFSvmN4rEWqngdn3YnKVe36Oc9U5xaM84rul0znMo1qUOox9YNbUH9jDTlPmL5broFI9YVxVmlRXaV+SeTel9/gQSd847d+h0MqGN+zCoJxVe48O0zdSHoMZtVmX2owkBFIxPHAfHjF9cuPeCwe/13LHKf/m7k3i9UtTe+7fu+0hm/Y0xlr7Kpqd7tnt9uz2nFsAiLIJkFBEJAIlhwlFwgQKFcgLpByh0C+YZASCQESUi5AcGEsAQGLENMtsJ1gd7ur0lN1Vddw6gx7/L41vBMXz7vW9+19zqnOICQvqWqf/e31reEdn+H//P+4oWcdPWMfMAkMhqgH0APLhQW1yxg6W6OVk3fLhY0zjAWGWPo4CURP+pW59gdlinMrzv9yteL+/Xs45+iHng/efRetNXfu3ubwYI01isePz9F1U8a0Fk2SLFG+Keo9jnkvIiz3SXt1E3O2I8u4383ZhHPV/My5FO/vsjOl/RTzL5kdqYgY4tfH9Jxd0ZrgA1mBCQEdAzmEuc7K6ExdF1FF7zk4WHN4eEAGxhS52l6ijaFeiiFjy3qhtEFXUp9Srdd4qzm9uuJKDyXbPGXbDEZZFgaUKyMxZ2ZtmikCn4Wt6WJzxRAj2mqOVifkq5647UmFTCfHVOCS8sYpJWpXS58rRdYKG6X+tMuePssKa9HE6KXvjOJ8c8XJyS0a1zCOgW6zYb1acHZxBjoTNfRKIOQ6aSKJmLOQnKhdHZBRItMgGcwiaT3VuSbmOTpn2mE2UlSa6scm+miNyplqNsdAhUTrah4/esz5ubCNrY+WmALbzjkTYqKuG/S2kzHtA09++ENi5bj71dc5+NJ9FFAp6OKW4HsYEz6IkRezzJkwG/27XKAxEAtLbi7vFkZP0yxE0sAHspKxe/v2bb779Y94+N0PcPE7NKvlDIuSLGyes0UTKU5KU4BQRq3Sqozt/SDCvIDMY39HbCR7g9FChKCVGPoTSdDV5gndd76FuX2H6ui4BFNlnd8RLU3pScWuum7v7mqqIWb3HBM+kF2/TteedDMBYpJ13BW9ysyWfuxo2wYQiP4ERe67jlwyzJOzotDosp7FLPVP2mratqJtW3Q6kMBpaSNrKsnOldcxhYVxYqhNMeLL/J9ZLLMEswV+H1AqlX16Qn0kYoqQMnU1OX2KnHeO435wOmUK4yjkUFgMxTMVFzWXmmZlStmAjLUxDCWoRAkOlaDuKG1YOSmdIBvGvuxjKjCMV5CTsEL3W6Dng/ffpbLiAGQNV08iGIXL8JnXX2XVOtJ2xFlNU0lQ+/T0FbTKNK0I5iplaaoWqzVN5aidBNmNMfiylyil+cTdO6xWa4G7xsi269iESN9vMEYMdqM0mV5IZmSEY1tHeBIlC4Oidg3dsC3tC2RNVTV4L+ul6LwZvO85OFjyyTfusFrC2ZNzVu2a5esvEFOUOZkVTdvStC2gGLwgEFxdFQZBqTmLEbpuKP1t2WwvC2PgAj86um6DMlK7lbOUURweHOKHvph3eg7S+2Fgu93OAXVrFMP2kmyNOGJKQeW4vDQYLTZ4KgiW0Y9oo2mWDRfnV6jB4UzAW884BLIyKKvRScaysonGVFi14iCvwBiG0QtCK0tN3uW25+LyiuNbh3gM/TjSVoGzhx9J9jTDqlmg0VxdbEopgJBXfdzxJ8a5+t5feIf3Tz9ihhsUI45iZMvkZY+VRmA6U4YL1GzwxiSbXbtY4GNks7lkc9Wx2XT4zUgKiaxjMW9l8R5N5slyxBpFCEXzJZeIjhJHApAJPxniMWK0LHoyyCdjJZSFYZf1iDExmnP6ynOhLthuPDH02KXi0Sce8OCVR6Sh0PmaEpm1RXV8ithmKRqmZEOykc35++9+xOZBgc7lKUI0LdyyKVV1hZuZZEStWlK0AleyVqIS3geCF72qGH2JcCcm4MdM7JH3DADkHXe1Z3uZAPJsJJXHk8WzwCpmyEspMN+FIaVNp4g5e20pn0/jY3fe9FUhFJGFecroTJIEquwman72uVuZoo57ceQ52l+W+6mT53FafmVfE2R+yMmBms5Qc7XEfPVCeVI0zvJTz7V/TJ+L4G2cryt/S6UPijGS097fmM+Zx9J8pXwtyyF/nuLlzE6m0tNSIfcpVXTz++wMh0SzqLn9idtcffWTvPP17zNuZDPUSghJ8uygyXz23hdiFFX6LO7GVfF4VE6MvYwpW9e442M2QeB4OSR8SmyiJ8eiuZIzOQlD3UQ+kVImhUQMe4QUmcI8OY3jfWgPpW8EirNer/BhR0NrrSvFvRXtosU4y/nFtkAHJUiU8q61pzE2BzCmyPVedH1eR3Keh9AcCZwo3nIZdCUDK4XY0zxgXtemOTmNqTTdKAvhAlONl5rmYHH+mTT6JMcn/a+om5rbt29xenqGrSoSmvPLS9brNe2i3VsbSqCBwu4XZBP66S//NOH9nocPPyTFrmQSd3PSYNET+xyTMboHccxlLJDpo2ij2KQwPqBJJCtG7xSRV1Nso7RfSGlmX5R5K3MvYYhlfbXOce/+PVBKiB+SiII+OT3FGkPT1FJrllVxBHRZM9K8WaisxL9XEW3FOc9lXRemUVm/XRZKZjWPC+b+KQ0gxmeBnGfEYctKsnYuSf+EYoQpbWT9Ltp/y/YO3TCQooydyomsxURqhFaoRcXJF++wvLfA1iUQlBR+7MhEMJkhDKzcEqszRiesmfSXEmpP7yhRSBHSxPKFiIzrCrI4Jl13wcX3PeH0A0x4n7qKOCdaOPvr667mVNaUGOMOATE5m2WlVlNbzaWVuxqoeZ2ejXVTCAHyrFUzicqOVxsWh0esFy3b7krWGpupq+JMljpXrQPG9pL5MhkQJEIqaAdVIHi5sPZYZ0pGWD5LuhDozHtWAiPU8hKclNpToaW2woiXw+xMjuMAKmGTgSTomxJFKP1iSu2jYUyOPhqUt9R1i3M1la1xbkGmaDomySLJNYTIwRZplhjD7EAZPYkqA8jfJfCiZ5kRqX2CqqolUJWnFUhqfqeAJWRMppRPyM6npkByTgVyWyCQk5sxCcorMxNFZWAcvcTGyhSMKbDte3zsGfoRP3oykTAMWKVQRDSetskcNBalIikFxn6QWuSccVVF27QcLFe4pWa1aLHOkTMYKyx6pkC9jbYslyu2mw2RRJcjOWtiEDswFUIu11Q83pzPTnVMiW7sQCWyDgRgDDKOrbNMtcI5Ja6GnjFBzhUq15C287Ytc7/UrqaM0gllFCGOVHVF2y5YLi1xVKwXB7SLipxKkLvoF2pjCCkRgmIYOiKRMYjosJQmeDZ9j9YVzlUyLkZFUy9IKeNz5nCxYLlYYrSgyZy1EDRt2woDYIizbb4+WIttlYT9UBIBRXqExNBLoE+riCYJy2wUTMAYoL8a8MpQVzVaJZRKhDgw9AGywjqBZsec+OjslLpuQGn67VayVVqyskpB1opmaUjKk1XG1oZsHF3XlTWg1F/lhKoM4zgwBk+8gQ65efyJcK7URvOhvSDfPmeXHRDDY1cbUDDs8+/7R979UEDWVHXF4dEBTVOxubxis90yDEKHLXZ+2LuMwjOSrSqR1B2BwxTamfHRxdmZ9uzIJPgqDyBOX9h7bgp+HMbc49VIaMaZ7XBUsFVbPgwP0G7COHPDEJ/w7VO0e3ImFC+8/TKP3j2lfyiQLqsNzkk9jTG6iNMZqrqiKkQRRoNzFVVVo60pNV2uDGaARE5RFseJCZD9+pQ941tOn/tll9Iv/Tc5BHGHNVcle5NLQwpRwq4P92EB0n7PihJO9997jgLlkOV8vxA4F+z2zuF9+jrlfmoSSt2RC8x+9fyMO6gdxQBPe22S53Nm92b3pHn3+0zUnIsBmydmsZ1hvX/sCDOUsH1Nzz87eXlukpQTk2rOfnvu3ncHj5r6QT5Wu3fbb++Jyjjt2jDvP2TZ+FJh3mvXLS9/5Q3Gj7ac/fARvvOAqNnnNDFHFoc9TBDKQi2eEhQI6WSsaDK5ZOKUtfjlgm7wpT4q4L2n6zvMEOb5NkEkp0xPKsZyjPna+00O/6T+OvsuFEdZSZTYOcvmaoPRhvV6zXK1IsRIXTdUVY3Siqap2W47TN5zCvYun9kZe3nypKZxMPXj1M9lXMxR3GJo7/x2cQgmxsb5Yyb/a+eEzeP32ntPntw0GmUMzm0yj1G5t3WO1XrF6D1aWcYxcHV1xXp9QNu2DMPA0PWzsYuStW/KQnz205/hwf/zAR/k9+jGvhjhezdTGqN39Y4xFlgtQjZjy1qWW2H2SzkxpJ6rfssy1jJ/dIGjUAgI8q4PdE7oaY4h9Z8KjbhmBTJpDAdHx4Bo3fkgxdKXlxdUVc1qtRKGMQSya7Ur7ZPnIn4JPpR1QzPtWrNRNUXjnTLoeQhM7T71vbS51rqQrUwunFCyTxXDqdwnKfmPXCA1TU3bLLi83JBK5sVaiw+B9XJJ27Zsq45bn3uJ45+6T31YS5ZVLsHYd6LBpzOj76BZiTOhLNaWgIg2cxAtlXkijqQMxEkfjQxGWaytWanE5TsPyN0H1O6cqmqw1uD97r1ROyjyVGuXSpQ7a0MkMt+kjDWBruY5tsWeUT+NZDn2IXzTtJMx6r0nDolltGzzFZDRZsTYDtKkUZjIdOR8CtqiTSZlL/tlnvZKTSSSk0hAWFXhQ0/MQWq7sxJmuzK/U44oNGPortHWx6jxxdjWGhIRsiYmD0gNbgoK4/ZZZBNG16C0BA2yJvcRnSw+tVShxdsGxXI+P+WMws5ES03T0OoWawwx9oDCKoszQvDAbIdIMCH4SCoagQpdAmWmdFEW56E48xMJCopSk142ySJbQwkCaQWUUpAY99YqMkZVhb1Sxl1MUeQIrEGpTEgD55eP8P6MrtswDgM5BoiKWjuh5m4ttTY0RweIvMzApsskLIlMVdU0TcvCtRytD2ibhpAyV/2Ati3aCXtzDhmtpa70st/igxeERBZUiIZ5TTXB0fW9rKXWUlUVYRhwleh75uQJXmOsxpWxH2OmVolNPzCGRKbIwMw28W7PEjTO1JaQdS7ZbgPaoUwNpsZHCWVpZTDO4UfPUBhfh3Fg2/eoKAx8Kckaal3Gx4hRXtaHcQPoErASSKgxDmUc1tUYJVqmsZDTpZzwQWrIndZidypNSpqQpd5AOSNlOj6Qs8E4ERePMeG0IiuNchY/jJxdXlEvFlijSVqjciCEgZgyw+gZo9iuQ0p8eHrGYr0kAZfbDSpn4jhQV462rqkqqQPv/DjD+X30JYAXMUSiVviY0CbQp54he1ATNPXZx58I58p8r2b15z8hDDApkXIkB4k2iIFUWMTo5+LVnWW7B6fJuYQuLPdeeYU/9Su/wCdfu8/f/4O/xx994495590PMLqm0pqAkEEorcnKklPk6NjStgKXCl4BY4lMamIMxUjfMfiILkicN8SUEq5qBIM6satlwSNLBkdhTcOnfuyLuMYhRV9aBCiVpmnrWYHeWFlUVXYzqYMId5rCald+GiHx0PcrlHGQioFRoItKCbm6RPNloKYkmg+jnzbTDIVzed/5UCXCtvM/dpsZTDUQuwgze4bLU07LMz5Ts6+4YxfcP/95358NkL1jytJlxTVa22cd152M59/j457l5needb44jTfuMzvs8puct/v3/plPBxHk0HtZuil7BmIkZLVjGnvq3s/oB/neM25/4zvX2mwOgMzu3Ax/yjnTbQdU0iyXx3zlX/5FHn79HbYfXDAMA9/5B99h222pqgrnpEawbVu6rp/nmGiu6ZmeVusdyyNKsd1uuLq6FIppY/acqUzXbcpnMken1P2kdzY99813nKQb5j+lLFooSWhzV4sll+fnXF1tAHB1zdGJGOGL9ZKcEn3fsVy0Bd9PybqaGYKrlJGIe4zXbcMsZvKOcGKCAgeKt1WylUEcgjLeYnnmVDTqJojglM1STBnaqY+LgbI/XufBI1mYOI2RPJEu7Fgpc4KLiw3LxZq+79lshCxhtV6yXC0Q8h955lychbZteeXll8otKrSrMM5hRo+2bhbint6pXdRUVU0MkbPzc4xxxOBZL9ec3L7F4eEB4xsjl90FFxdPePL4IT94510+Mf4YJlt5Vi06XuzNAmMUq0XL0HWkENEZKmVZuIYxJyIJraUu6KMHj8uYidhK0zSV6GTJMk3IUm/ijGVR15issKroc5U1SCKihsGPhdVNkTWEUteJ0wKnTrnEEQu/1bT/IXpLs3RGVuQoAalsJdCo0qT7p4ha+spoxWLRcPf+PTKw7TqB/5AJOWGc43Of+xz5E4bNYcdP/fovcrYd6GOgL30Hie3mgnHoyMkTR3Gwuq5HKUNVOfq+Z6oFTKnwPRbxUa0lgJlSEBH4AM41vHDrLr/48kvk7e9jVY+qBNJvjdTpToEjgQoVvayY53VBKYWrLDoKuZMqGVuBQpVMIghU+sYaL7D0QqKgRfw6JYVOooHW1DXdtmP7eECbgcujN8XoDk/ox3chQdd3xDTQD0+42LxN24rBNgwDKYoeoTUGXYgoJmcLHJvNRal9KvIbVXut7iR0AR99yYwpKgc+J3LWWCVtJDVXBlTEh46h36KNZdE0GF305wah4m6bBaBK7XVkuViRo7DmbrYDYSzkDzkASqRDMoCQShgtxr/3AY2lrhradikQ/wJjttZysD7h4nRLisLWtlyuqJuGmLxkRRBSmhBErygXRJB1hpgilXOQJ6RLYBhTIWpRGC1tQyow8sJaOeXgxaZKVCWYXNWOlAKXl5dcnJ/S2DNqPM5EsvI0y5bKGNaLJevlkpQSOldUTiHAjETSsFguGL1nGEZizJycnHB5ecnZxQVnF5fk7ElDJJTAlnWOjz58SEQIw5KQo1NZRxyGwjhZcXF5Tl3VxJyIXpGSwdKQYsaPY4ENt4SxB9MzjJ5hDLzw4hF9PzKOIzF6IfwqzMp50gpMkcpVkIQ0KUVFXa+52nq2Q6IOiU0YuTw7xVqZZ0oZ2rphGDp8GPFeaNmVVoShJyRZh4xSLNb3sHUkpZEQOrzfcLC+xfZqizMNB6tDzrZbNmOmqSJGaaIfMNpw0Z+RkiflSOVqVKV58uCMmDLtckXV1mhtqQ+PqFzN5nJL07YYK+0y9gNRaYHOaujHjtPtOQ0Rj6atHNYIG+3teydCW39+ydVmIykUo7g6f8wQenrfc3J0yMV4SpMb+rxE9xXBi/Pv/YgfB4xJNM1C1okhki7PSBhC2OLjQIwZRcPHHX8inKvge87PviuZF+swBRc31RYZI8wltV3vHIZSIIfZaSVZq3HO0Y2BW3fvcOv+HS6uOlCWV19/g/ufeJ26Xgg2WUmko7JSZLdarTg/f8DDj97n/R++R2UrQBVRNkm5pkJlmuNkTO7qlfbZeVCKpq4x2jCOgTGMRXFdY2rHL//yL7M6viOL6kTjbPYzNBPVtsIqN0OwhPJVzUZzLtFmyZ6UjXfOboghNRX/TsYvSOxOntdccwyB3TsgmyOzgzBlhSjX5SmD/KazcdOIn6Kb83np+v2ma9z83vOOfSdrhmNMzzE7H892lv5hnavp2Hegbn7+sb/fbKNnv8kc9b1p+D/rSzf781ltKEPguXd75u8l6HXjOrt221XePd1OSRfnJ4qRqnTk8mrAGkf12TscfeUVWuPo/+uBt956i+1mO7/DYrFgvVrhvRejDYkihxTnZxGjQxfjU6L6IXm8KqxpRu+MspSK+vve8+9fqHyyyyxJ+0+RUwrURfyQ3dw+Oz1jdXDAYrFAozg/P5caBDLjOHL65DH37t7j/PyisKrtweSmSKHWRWttYizNBcIJqtBKPuVrl+csMUp2mUZVgicw07aT0cruZAQK1G+KIs8u1dQuab/Db7rje4ZMjgzDwFvfeovDw0OqWsQUnW1wruHDDx4QQqCql1xedLx4+4S6/iFVVXN86y4AQUWq1xqWXz5k+39eYp1lLGK01hg++cYbDN7TdT3juGGqtUJpPvlLn+Onf/XnWa/XDGEklIhozonzsyd8+N57kGIR3nSEYaBuK1QWw6xZ1BzcPuL09DGXb19w+Y0z/vgb3+Th2aUw+RmFyRqjEu+//wNUIe945dYrnF+c88ILLwmpQ4LN1YjShn7sCWOPKkG3GNNcz6WUYgwjh0dHHKzXtMsFMWeqn2qwCzsLhKYUUVZIiERkWoIJzrYY5cjZEENA20jrHMZYvFLcuf8if+c//9/4wTe/z9npmQQVw0BTNVS2ZtkueOuttzg7PaXfbolD4sP3H7BaHXB4eELbLggx8e5HZyStpQZjFMjd5eklV2cDvjeQWhKGzeaKxXKJtWKIDEM/Ez1N0Gup81GF5CbijCPGzDh4vvL5X+bPffVfwum/N9cpT7Ie4mDGQozgCiFTYXFVUovl01RzWvJO1hDiHsxMTUyFea7HMeW9bCGwSBQm4hJ8kufMOCvkRilFuu2WdH5OfVf2uKurc06fPBCaaSv068tFw9Hhim03EFOiripSjALXM9Bvu2KzWNCax4+fcHBwwNIocoqMfkSJEYGzEj2/uLhg1S6xrjicVhxIZ2uca7CmIoVAjCLHohSlRirPjqlSCr009Nsea6uSQRGZg65/LM5lSIx94MyPLNtFgS+LfSVt4/HBM4yJRIPC0A+Bqw7MpUALpdY6kmLko4fv0LiKrt9ijKG+aLCmxpoGZ5sSIEss2jV1tRQNPDQ5qUKA5RlGqfOpbc16Wc1U1ylqKl2DmXO/5GzI2oMSgiSLBJuscSQv6IeTw9vcOblDjFNWRZPyQIw9wct49US2myt8TFJtHCIxerqrc+raEoNoPlpteO/D92Q82QprG5KX9Vb8sYzykcO6ZfC9oDKQAOjJ4REpRIZxYBhHjo9vY6oabQzDMHB2doaqMzmP5CSOUxh6Qg4YJ8yKbWMYxzPRXS0s1ilHQamqKYso25r34hg756gqx+ADp2eP+eiRJusWrRObq46qqZG3tkQVudpc0S4aoSQfAsqAUzWh2xK8JxnYDFshhiKilWJx8CIxK1SdGMKGD8/PyBWkbSX09SGQMxyt77BoKrSRYF3vMx+ev4ezFVElnjx5T7K6KNr2gKpeQBb2PnTZ21H4wTOMHXVjaRYNy3uHXF325LTldDMQwkgKmXfPHhFCD3gwEeMUSlmGXpzeha1QSXH/ziuMvcePnsSIsZK8qKsGlg0YySAbIzVkIQSCh6pdovVS9j6z4uOOPxHO1cHhAb/0Z/5UMY6ciHoVusUpci3GiCl6GTuqdDU7NYI91sYw9IG6abl19zZv//FbHJ3c5+XXD1kcrElR6o20sVijsCbTdSMnJ8e8/b1vMw4jb3/vbZq6QWkrcjwhEaMwIs0aJVoVeNF1ZyAWpqsxTuKFsolYa0gIo1xImZADIcsmhFboZItdPX1HDLuAQBGKb1Ncq324UZ6hDXspJnbIh1RgbWo20iAXbOuOznunXbXnWGhzrbJmMv5/lPPzoxwkzcTaxVPG3LMyXtPnz3Nunvp87znnLNxzHL7d2xXnce9Sz8t9KTVBRHaOzU3nTMpYrgvv3rzufubvpqjzNdv6xvcFSpT3/qiunT8hLXR+1rene+4uMWV+9jOP08UFVlB+Szfece+9p8CANlJcTM5SiEpEWwiMbJOn+uIh6dtKWDSR7GzwnsuLi7KpTpFUvSNcKRDIrPUsL6BUkvqtnEg5kPOk4zLVcsl3TdFpg6lv8/yuOU8R99nFks9KZtwoW+L4kLWiXa04ODzCWMcwjizaBVOBdFcyBLYYjrMGWonAG2tmCmIQ8oiURWRRlwBSKhm0HXxvn/1P+kvmezE2UiYXNslZAyoLo1z2w96AyDfG8jQ3irtcMqGqZPIp81MINHbio0bLentweCjGXZBM2sOHj2fY1lTTuVofYl2FPrRUX1iCUgyd54X7r2C/aPmdv/uBRP+0kDOEmHjvgw8AiL7QYTvFm+ObfOnLX+bo9SOM0bz/wfsFUlyyc0qM3nsvv4AtG6RCKHitztMkIObMoAzu4JgXvnSPT3zO8NHhBVf/65uoCApNyppxjBgrJAAHB4dY1/Dgw++xPfCs12vqumIYej794z9OYyu2Vxu+//bbRO95T31ITy/ZuJhY3bvHL/3pr/DiSy/JWFcG78RwUioT/EhlFEobUo6MMZBHMKahG8EYcBXErGntmj4mIQTKiWHrefHnXuCs/oBH3z7l3p1bqB+0WF1Ru8jYn3Fy3HD/7qtc3jrj8fIxy88P/PTP/AS2sui7gZwCYftD2uUBKKhVD2QcV4zdOTlJLWSMeSal8GNiHP2eKO31Q0icpD4nxkQIkc99/gt8+pPHtO77+DHO9cC6kHvknG+wA5uSMS0wyvm8KRMk7IYh9jLesrD9KkNhpd3BusU3l/FtlNQGKUSjrIQMRe/PFhgbFmtWHKxvo5RmtVhxdHAsY3XKPCeNypZF4wr+M6JtxXK5FLmCup7nFiiau20hiwLtLxcTuQAAIABJREFUNOuV6DJN8y7lzGp5QFaCSFFZM/ZZGM9CZOivSDFjjcE5TQg9KUltnHPCRJpiLKgeySiNQydrHkLiY6xm9L4QICUWbYNSAr/OWTTunHPi+OmMs5qYhXhA6tLAB4g4QgxCOpDl3touEIF0Q4ierj/FGlfWd40xjidnRpwu60qWLM11WpODvlgcUA81fTfQ9z3DMLJaibbRVGsvRGci6izU3o6qbgVaWeytcUwoEj55lKowukKbipgS2lUCx0yZ9dGhzNUk9pPWmXwwELzUXglMNhOj1OJImXim77ZUrkD5KFRUStGoTIoSLEo50AVNt+mJIaG1Ayr8KMK5wQdMNnT9JdZpJgbhHFUhznBCAJY1Lx/f4dv2Ac7WKLVhHAfZC9QuQ2q1jO80pe4x5JgxSXPn8Bb3j1oeP3yf48pB0jP1eby45F7bFHFcxcI2aCNkbcv6ANVCVOJkxGyZTY+sSASMrUipIoaMrbb40ZIbXYhvRiJXjKFCR7HfXbPEuZZQaOadq1gs6hIoScRhUwIkkYwRCLSGqtLUTZlXMdJdXaEyjN0FrmqpqwOSzqTUoXJEKxlvE2mXRpJ6xkDtGmE2dA113WKd7L9XV5sCRdcQDSFIFsZogzMVxEzbSgBMoSH9E2aulFL/JfBrwEc55y+Uz/5D4K8AD8tp/37O+bfL3/494C8jirb/ds75f/5R92jbBZ/93E+WCHTRQiip4es1IlIcPEWpJmpR1GSLlExNEDbAum7wfWCxWnJ8dIv18TF+9FJjpA1aZ5SKWDeyXB2wWB7gqoUUUhpQ2kiBKZEU00xxWlpmap9rn+3XPky1KzsjPxOTiPtKfYlEOqe4dJ4pXadMVSkInckZSgR/it5d66ep6Hlurp3TMDtmFAdrH8Z23Sfb6/c5Sr6fpbqZJZFrPJ0Nev6/5afe+9t+FuRZ1/5HyWRN737td3ZZmKecuWLAqr1zpktMmzR556Lm8szXKOL3nJ1dX+3VEOy/+P5zTXC26Vn2nv/pDEJp473/7x7oqZOf41I99SjXnudZGT95rmdfZ/97+7DAKUigSpQtAT5IdeLhK7e4c/c2fbNlHMfCfJXn/6Qge3IW5L8SaWCSF5gd0GtQ0kQISfQu1iuqqoIMDx89utaCufxvJj2YAg7qRl8UpzKVc62z3Ll7h6ZpiUGyOFJcH4tzKhDAOYihmKPoKWeIWQI0xaisGwfY+Z1jDLIpzmN2b2ypG326N6YmHa/pPbLKNHUNR9CFjm235ejwiNt3bjP0PePlSDpNYkQhRdVT301kFjvoaoKy5kpfRFarBW1bk3M5P2dyH3GVbCXBj6CgbRuBSDmNO6rK33rq2nLnhVu8/nNv8I0/+gbNUKGT1BVt+04qpVJmzD3n+oKjV074wi/8BHdffYFIJulS0zGvZ7JmOlOXgvvCboglhUTCE4nElImdl82z1bjGUJ/UXC02+G7AUbGKa8iZ9XLFYrGgdhWbqwusgUXjqJ0mJ09TWRYvi0EX20iNwxjHnbzidBvYbEZcttx+oSbwhPOrHate3/UcHq1pFw05DGA1ztYMw8jQdRht0VUltb9akQJYbdG+lz5IInjanQ8c34VPf+ket+5nTo4OCK/JWmatpalqXrIv4Kzhuy+0dKuKz375BV54vaUfAq4VHbh14zHqSsakHYFMCmek1JFVQBshOgohFIZNYRmbpsw8zks2OVNYZHMiBfjsKz/Dl159kRePF+R0Po/puXZ5L1BK2Wu0VgiyrwRulMD+UtppyU21fFP2KadEUtf3i5T3BO9vBJJUmvbYac2fgo8dOj0SFjhAKSPB3ikYoVSZr1KHGWIkSSkUu5pIgXGqsiYYbejGoUD6HVpXpCEI/FMWJCYIsVK6ZPsi2oreVoqSVRyzsNrJfTQkYWlNsWggKjAFLub9CCVQE1NAqVrKGEpNrLWW4AWGmJUYs3VdkdJEWiFVgzntoNfTemyNlndBY7T0T1Kq1MdIBiimnknORJeAh1GVOFdGyjC6bltqz2Uc+NihMGy3HaOXsYgdoNOzc4XKGCVGrTYGZyuca0hR7CaltGhwZaEo16rCmEaY+ExCqZqchBytsiUIpiQAl5LUjBlbY3PNHGQqxE/TDmH0GmcdoZSxyAvIPiEBwVhsxoBrFrgSaLR1hfcD5ISxkaqpSH5brimQ3sbVMhdyLszGmsosGLpEDAVya8ze+wqcXMa4rDE5F+bnJMRTOhl0NJioWbUt41Bqa3Ni8CN120gAUUmgRxmNsamgQXQZk5GsbYHpJpTJhVREkdBEowjDSK0WoIPU+qkoTlthx0SBHzwxST1tzlnIWLKVTO5MwpUgFwceJHiSwOiqlLXI/mi0lOswGrLRWG0JSWz/ylY4K0Q5MUoWbJLAyxFiDoUxXBNjkuDRWGC5JuOTEGqM3ktG3TWgYfQDOsq6wM78eObxD5O5+q+A/xT4b258/ps55/94/wOl1OeAfwX4PPAi8LeVUp/OEnJ67uFszZ27r09BeNne8x5D0vxvcW529mRxSsizfknOmaqKOCt1SinEgll2WG3JGpKSyRXLhM0Ygk8o5ST1r4ww9iFOC9MzlHD/zvxmdriuGahKzVk3ghgoO2dFNipra1JW6Akax86YV5PlMO8JkxG479jsslfTR0oszp3hryYijBL13vXTnkPD/H7c/PtkE1zv42s/f5Tzs2+4qmsfPn1ent7qOZd7HmRv7pM9p2p6rn0HZ3Iw9y8/OUvTFae2m++xNx4lDng9y6emC+9dcB4h++fMj7rXzrMvNYE954681i5yjnqqj24ez4Y8flz/lOdM+yP66ed83n32f5+dq5mifues5Fz8o6w4Oj7htR9/je3jCy5Pz/nwg4cMwzBHwucgRKGUn8aPnjJoew5yTrIBsNc26/Wau/fucXh8iGksD7/2mNT7vdmyG2uTfpPad3KnNtR6XldQYrCenJzM0V8fPH3XY42maUUBvomp0Lizm3tK7zaJ0jdNW3F4eDjDqoZh4NGjx8XhkydUTMQqN/zzidmvjORMZnnYYCtXWMoyR8fH2E9WPO4eEx9l7r/xEj/+45/m8uyc7YcbwttSl4BSXFxccHFxQT/0rPRiB5sqj5KmpShnQvAcHqwkyxaFDS6kJCKYzoiDMEid6upkiWski2eNRYHQuMeBel3xxT/7E/zRk28S3ouoVJjUMojQNVypLe+rB/zKF/4ZPvv5L2CqivPNBmUtSQOjMEhmvOhSjRQBeInUW2UZhyjQGZ0kqFVYC31d0TuL1ZF4d6TfXKFCwzreomlaTk5uUTcNkDk7f8TLrx9z795ttNZcXl7ycj6heRUut1fk5Za7L69ZrSveiMc8+ugRjx6esrRLXnrlhO7qAefnH8yCsIPvWer7OHeEzh4VoDVLVPQk31HVNRVKhIzJhBBZLFp0FiILrMDfQ/eQg/UBdz/zEvUXX0crRR+3+CxCuzo7DtZr4jhycL/hfNHyhc+9BoyQAk5HjIHjQ0vfDVincUZqm7ruMRmPUlEcfsUcaRZDDqZavDkMlYtuJHqe68vG8Quf+ad4/e6GtvaEEEvWQgbVBDcXdlvLzUz/tB5MWdYYRWNw0g2sXMWYB4Go5UQBE8zr9yTEm9K0xkpxPnv7yI7RrtBF50sIPyD7+2I4BnGkpjIEhRiu0SeUCoxDmMmavBHWuuAL7FdLpokU8EPAWqGsHwdPt+2FWY+dEWudTPIYkmhchohp7LxPjMMgMjLWojGQRTZgCuhqY7C2ZhwHyYSkNLdpMhM8eWJlFWgehXQkxYBCshqpMKRoDU4ngUQahdKZ4ANV5ZhRPMVaTUrY6UQyRJFzIOcosMQ84GyDVgKBzqW27+zyCXUt2qYKhY+ecfD0XVcCNC0+VnTbbdlHpAanaqRWUxuNNaJb5b2gcYw1VE2NEMxkNBXG1NRVQ1VbUrTkKM5pVZBPxtUIGsWjcqauKqxxaGXJWYMurJ7iTlJVAjXWKs3Q85hTcfoLzYeWQFNdM9tzigR6KPMlYoPHDCMpDSiVMVZTVytCDIxDL14AmpgbTk+3jGNEaYOzjs0woSBKNrVouxmtUVmRQunvrAkD+B6MqmmrJWn0QAQT0TpIZs3ZnZ4hCm2djM2scLqiDxuMdpiYCRGsyYXQRszESOb0StMsWkgecsbEAFSCeCnMPUO3JTKSsgclgdgYZC2QEhwZsyFOzNhCITOMHhWnOS6Q25iLjttwiWZL2yzp+462rTG6QhAMgzASplwcUkHKxBwwrpL6yBBLOUKiqqS4dgwCre3GDWQKiY7h8mKDSKQYjP4nFBHOOf8dpdRrP+q8cvx54G/lnAfg+0qp7wA/C3zt474Uc2I7dnNtw81aGihOTJJoxr4xPMPaihOEQiKKSqFdmeSqLPxWQQRlFCSFyhqwGAPG1VT1QkTntAICwau5HmKCMggl6JRl4trzpiR1T3XVEGPE+yhYzlw2p5hxRjEMQ4mWpHkDgH1Xkdngc6pof7AXZbvh4OlpwygG0ZRgK8F5OSsxf29nxE37jLp+/fJvrfYM9Rv2+b6BvV+vdfNvJb4CUAokn3NkWZQiqcDanpsu2fX93rXTtdD+9GA7x2pXX3P9uAlx26d9v/lO+3VHuvTJ81/o2Q7Qs+CJM4SwvNRc13Tz8jeyYDczc88+xCDa91llCJX5Mt3r+f7xnsFT3N/55Ju/y4UUWqJn9GQk8mMKM9swBF78M58ifveKs28/xPvE2z/4gWxOZV7txqBAXOqqZugLC52ZRG8n46jUJ2rNya1bfPWrv8g4jixeWXP85Tt8p32Hi7/7kNSJ/pExmuViyfnFZYF25llrZXbCkTFtTaEGjwk/eB4++IiLywusc7SLBY1quX3ntjAW+kBbt4yjGDspRhaLBW+88QbtskWpSWsH1utDDg4OCtxRop2/+7v/F2dPTksTKkIhR9jRtQtfYyqi2TlJZHO9XvPP/ru/SntcoBYRjm/foa5bQgj0w1iocyMVispI8XfX96xXK772ta/xO7/zO3zz97/FV9SX5swASZhGNU6czKSIUdE0K/pu4PT0jMvLK6qq4uVXXiSlxGbcst10VLbmtX/6U6xe+j02qSeHktXKlsuNwPVObr3MX/rX/jL/4//w3/PWm9/i/Oy01Ew6KXyuLLdv3+PWrbv8L3/7fy9oAlsYpAKxU8QgztPBwZInp4/JOaBVxliFqwxPHj3mlfsv8dLd+7z/4bvEccunPvUaq2ZJHj0v3IJf/bVfAB+ojePk6Jh2scT7iKsc1mm67pgXX7xLXTv6vuP87Jyj5efpkyfHFwghsO0Hlss1B4sFKXt8HKisGBSLVuqUQDOGonWnNInEMPRshi21rWlbJ7UeKTAOCj/EQqsMthIYaQziHDhr2Q4b0ZIikuJI7WpyrtE0OCcO2DBGFJaMaGxdXnYSxCjzM2dFzGCrRto0ybj/4YcPGH0kYyFb/NARgkD8doROkw6hRKVNoYEnJ9YHx7x87xY//+ljXlr9AGuWxCQBKYG/TvvFzXV20qRKM1ywxCrLclNWRLX7UCld6NALJJ+JgGkKHAmNepr3AYnwT1IrlLU3BF+EbBP9JrF98zUIYkQTjZQAGMk0GW3QWbE5v0RpVwICqmR5BP4KoHUkOQlUOFuRUqLfbshkus2VvCNRxKpLfam1ksELIbBarei2Y4GHKWormTKdA8YWrcNhLOUSont4evWEZtEUUogCv1QGlKd2dg7WhjHijMhJoBQjnuDtvN4IUUnC2ho/eoICV1lUsmwuRynBUJkUPItFg3MKa5sCVaznmlMfBrbDhm6bsKuabgjE5KkqS7tspVYmFyZNFD5fYZospRsOhrAhKMnuZBRZfEq0gZy8ZLuUxjPioxc2wy6xWi0gLwheXqlpLSmPxBFUlqybRtE2K5LWJCV7T2MNvvfijLmGHNXMhqcLG17wiXEMHB4eUddl7sRIXddlTGmcajFJ6sKQXYSsDG2zZg4GVmAWFahEVoGYA95D3RoWa6EuDylRrdacnvWMo0S7vfdkL3ZzSgI/d7bGOkvlKnEUU8S6QMgW1xyzPDwhK0eXR6JrqazFOY3xnsvLDSZOiChxlVIYmOABSQ103Yb1+pjVcsXaOi7PT6ntClsbYurphzMWlcemLSF5nIosmhUZjQ89OU9kGCPZB1CBmAVR4KoVB+tjtAUfOy4ut9RtI1nhBCkrQnT0/Yi1mhhH+r6n7wPG1gz+ksF3hNHhg+bg4BgNBd7ZU1cyRwXNVlPVVgK61uKcKwGYSEwBExR2NCSfaBvLppPgZ3qywZiKceyK3aDR6v8/EeF/Uyn1rwO/B/y1nPMp8BLw9b1zflg++9hDK3BuArUJi4nQ5IoBFaPk3Y11ewbifrZlZyRCgfNohSfi40i1cJjGkYxiGD3VQQ0hCnAxS0RV5XpyMYghSDGxKlotapf6nSJ108Ygz1d0GVLE2EaMWSTapjCSWjS76O/YbXG22P4onCvGaEolm7Y7JmNTIkQZRan12XMVNIKXjek6bfm0wJUvMOXHJIKrueYEXctmFUdiL9s1GZ9p77133536bWqmPaMfBAeM6FPs99bsJE+RzOIN5snI5llse88+puzADj63l5G4NlY+/tgnigCe6/DfvN5Nx3KGpjzH8byW3Zl2+QlS+Jx3neaHxjz1nNfOy/magbH7OXfQ7rdd+uz5x74vtfc0Ep0tp5SfWgN6BAo9rnJlbpQnN0YYp16paJtj7m9e4J133sEWYeiJrS6XDGKMia7vqVyFNlMdViw0sbssY3vY8OW/9NOsX7nHdrsla+j8yD/3z/858p9NkBMacNbSNC3f+e0/5s0/+Bbf/e53SYX5TERKS+RRaXIsBDYJhn5g8B1931PVDVVVcevWbd75wTsSLWta7pzcLsxhApFaLJa8/MqraKvxfhdYiDHz6FQcKWsM1li+9KUv8gd/8PtcXlwQChRhatipK3POUgicEpu6Z3PL8+t/7d/g4PYRXd+R8kjbtlyeDzwer0r9iuXSb8XhEzUnFGCN5cn5Ba++/jq//tqr/It/4V/gcLnif/qt3+KDP/4A9aHgykOcsn6K4DVnp6fUtSVnj7ERVyVSCJxfnPNEPWLzxhkvv/gKtpLnfuv1b/Af/cZ/QCbzN//ib6JTYTFUAskZ/sqwI0UAKNIWIyMf6h/y31X/Lc+cuntr03Sta0NWydz7/VLHMwmYGmNKcGnKRu8OXWDm824yB1zUtTVgHw4sWe399fPa6s2EOHhq+lDWJfYZWXffv/bOT0VZKM+//77TWvJ0oKS3I0kl/p3f+M/mz0YbCCbwb/2rvykjImeCiaQ+8b13foBPQgNNjmKI6kyIEtycaqOGYURrESdPGYa+46d+5md5/fgnud/e5aR+exbqft6RUi6Zi9IHSs3vJRDhEhwq/ZFyFsIZIwEGax1ERc5etMXmW0mbCs34brBMJBqzczW35q4vc1ZUbgkoqspRV5Z+uxXCj5xIYSSOG1TqOVgKpGocxeCzbcPJYXFaAB8CKUqGtXJWaNKBtTXEJDphWqvChiqoFgU0bU3OGj9GlFFzm4dQU1WGunHUtWO76QQcgwSIwjKStcCMU0ylrSzoWvb9JPCrxtWMXmq+JUMQhJRC21Inmuj7rZAjONGOU0k0RNVE5lWCwxcXV1gjLJ/WWjabDcqJ7lAskMWUEv3QlfcorI3UKOUK1E0Tcs96eVfqnbLUEsUY0Wopc0iDNRGjlghgQQziGCK3Du7I3oBMAFcZcvAyh1OiHzY4Y3FtQ/ABP4yslyu6qx4VNQZ5rqxECJfk6TY9m21PPw7EEIhKkazFWEcYE+O7I84Yjo7WLOp2hpbFGKFS1LYm+kTXebabgWEILBcHNFVDUze0dcM2nHF4uCaTir7WAcZacdqLJzmkUz56dErKIqETQtg5VRNrtHWiTeqFJEVqjUeGHrQ5pm5fohscrs64UJFRBBLZBA5bI3INWfSium5DUxkyUWD9amS0nmAcj656hu4R3faUMD7EuaaQrtQcLSwnR3dIKRe2xQGlDLY9JiXJaL7wYy/y4MMPJRBmFcYZfEoEL+NUqRV36gOcMYwxMvjA4CVRoSuFUhKMGFVNdSRkO5jbjKHn/OKMIcDl1QUKTesqjFmhlKKupUSh768YBsV6dcg4eobNxbwuDKPokymlibkT6CJp1mprm5WwxiK2jNX7ZUJPH/+4ztV/Afz1sib9deA/AX7jH+UCSqm/CvxVgKPjE4xyUyhr3txkURYmH6ul+HyCBrGXcbmZstBKiiaNUsTBo5MSdQDlWFQtMSkMQm2aAaUN2oBzopJtrS2Gmy7irqB0MfYVEi0v7EbixYrxobWwAwkOXGBN02Ku2G3SV1dXstmLzYeImk/CprsNP+UImJ2Dcc1RolD0FgFJVbQhpgzAtPnuZzumzT8XXRZ1o/3ydO3SB/sJCSArYSabFXn3+uhHZYaubbB7FkC+4STsnJgpi3DdCbnmvNzI5EzvkHO+5ljt76LPgrTtP+fNz/cN+JtO2sf9+6axJVmIp4kvbh7X4KXsOaAfl1oqx9PXzNenxmSjMEHWbvbj86+5g1hOz3L9GnMtQ4H4gNlh2HXajVkli2hKkfr2ktd+5VO8rX7I4z98QOozxtpibO+YwBQKHzzZ7+a/ZFYlcu7uNhx//g5vfObHONt06KbA0FIijKOMcyXzZQyebhs4+MJtPnXv85w8vs/Y9Zz/v4949NFDuk6iUfPcUBL0GfzAcrXCOiuRa6UYtlv6zRZQBTqj6XuJbFV1Rd00IiL7Rk1jnQjrKolCowX3rpXCKI1zhj/9xVv8gze/xVtvvsn3v/c99BRInhhKUCSfQBtu37nPT//8z3ExRN7/znvzmLOmRzDrEaWYISO7aL/M0xi2WF3kHgrUUC8q7n7iBYZwyeb4AQcHa/q+o6pkAzXGsVgmFsvEPduizbIwJiWOxpaX7Uvo5ascro/oww/48tdf5hNvrjGrir/1K7/NV/+PT3OrP8QWJzZN9RtF8yz4UCKvElyRjKTB2ErWpJQI4wAqSs2JkmdPQSKJ1ggRUkIi3ymJ5k3OnrptUVEMUKsttamERjtHbIDkPZvhirpeYG1V9AAVQifuSuZf5nMKV4QsQQ6tDMpOAqtSh5CUIseMJCFUeQ4RZVZK6l2U0kIJb0ypZwpSmyCTsQhdikxBRox6Y2qUdWSV6YeepmpQSAZvDBFtlRBDKE0OEuFOOfG7n/5DHq/O+bXf+6poAWn4+698m2/e+z5/8et/mrpa4mMQIz5qvnn2LXxIaKxkEABjMikFfM6Q9yG8ol03joHXXnuDg4MjVs2WpX1YhLcKvLWcPxFeTMvTTKYzrSVz4OppOPN8Lrv1J1PmUNG/unHGHsx1ghcyMxGSRagWKNmhsrakhPdCCKGKrVDbCq2cMLjVDSeHx2RGjKkIMdJUjqODNct2hQ8BIfVBWE9DAKWwRs/BlOk+E4xa9izZDzMCb8vZkBqpmYpRrlM3K8koZyUU+T24qhZ4VAj4FAlpEJKflAQVYSNhYvDIYpP0fUdWFh0EaqmMBF0jad7eVQ70Xc9Eda+LozIMwzyWnbPFDhrYdiPILEAnNTtXkwxFt72YyyVSSqisS7BI4UNijAODHksgZLfniByDtFNVVSS1ZYgiqqwQaOdmIwYvxQkd+1Fgl2pymjPDMNLUokuXUuay3xCjZOet0Vgn9WoinCzZJowhZkhKSkx81+GV6KWmEOgZeRwGNk1VJAWiZPpKADuGSAgZHyRIeNVfFGIVgzOWpDLvP5AMJgpqtxQWSMCaGmdrrrrHdJtuz2yaoPGqcAfoGQFVVbY4ywnt4MHDD3lyesr97R3AMY6dwAaFXm2uExOftDDmtRptBaVlyr2cRQgwoqdtDlku74mDZETCg2TINnEZ6zLmDTlLbV0cAhnpl827Z4Re0fcebTPtqgYV6LsRaypqV9PWK4zKtDrR1Jlc51lYO4eAakrNYIyMcSRTk+yCI7PkYttz0hSkmlZ4n9ClRi+mSFMLdHW87DAKFkrkOzCahcmFYTMT8kLqB1Oh109gsxIRaiVjTcWPD9j/YzlXOecH07+VUn8T+K3y63vAK3unvlw+e9Y1/gbwNwBefvW1nCZw/ySmiGKqGNNKibhYniBbFAN9vwB8/+IJozS1q0iJWSxSZcH/pzQJPk6LsxRACmzICe1plmL1lBSTwN1k7IkmjVCcqhJpmwa7FNDt6kXEyZr+Kpvz5eUlE+RhcrBmJ6D4QHtNMb/zJG67V50ztaVkEcrEy5MDxZTtKE+Qd8+hFTzLHcpMWZSyWF3vNJRS84a0y96pGZ4n/VE2SDUZhbt77Tsp1yLO5Vyx32d+w921pvfcf2f5K9dDuyUGuY/ff1Y4d++Nb177qTP2HL5dRmhSk9+r7WJ6lRteKXvsVTdD0Dce66no99zG4lioqW/03j3LRqRLndAz54Ta+1H6fjJW0o3mmSBskzGy94en3nF2riZoq5qIHZgzmBNx1vV315hW0x43/NjPfobxnY6rh5fXdY/YBRAk+hmYisplCcjoA8PdT93hkz/5BnVVky43c11GzplxlCyQLu2UCzlGc6vm5Pgua39MHEY2y3tcfO3/5vy9K5zfRaSm5xi9Z6U0y8Vqdtz7occYzaJd0DayqUi9Q6I5XrB69Zjq/pr+MKOc1EQlpPhbK4QsJ0ZII1qBObaoWzX52NK3AdQga0kWrL9SiqwCylhiPZDTwFtv/iHd2GOtmymul8sFzmq0KkGU8vLCrmUwVuOHgYH/j7k3+ZllzfO7Ps8YEZn5vu85d6hb3XW7Crsby93C0I29wCDAWJYYBBLNDrFhhwR/BCtv2bNA7EBCCAk2TBuQcAsBorGEjdtWt6ubGu9whvfNzIh4ph+L3xOReW4V1WZYVEqn6py8mZExPMNv+A4qOa8WFpZlET779gkfP+Xlu6pkWhL0AAAgAElEQVSOV3JVMrj1GHTzHiaLD0qu9/bYyeceZz3BT4zDkev8Nb/6gxOf//gjePKYfw7+3B/9Kt9++zHOq78ZVk3bnelQzybEMNCk9M6CEEdN6iy2e4ld2GSYt8EfnXZHVTnWIkYLXdINV4VMGIImV6IckqFD9VapDOKgVC7LCzGM+DDsyRVGmEY1TzUd6y/5hVQaxvYkCRUp8k49C5u11FQYBr3OJkJuKgShPJyAMZacM8MwkNZMKxlpqlpnscyXGescp4cTIpl5uSLGY7q897IuavfR7TTmZcF0CK01rvN2FhD4w2//kOwL/8T3/4IS1Gl8fXjPH77+IX/p7/05pulIygUXAjYE/ufz7+se2aGNWyBNT1ba5ju1FRIFpsOR73z+XT4eJ05uxVvdO40Jd3ubJleu2yXcF9g+KCD2vWDbPj5Ye7cu1lbk2QtZd7HB/YLXA+stgd9+Q+eSLn77mi5936iZsn6xF3gUihkwUnugGZiGidIstUg3SPadi2R2NTpVGvNbjtk5gUpHqK3Syi2xaq0Sgse72Pmept93TbhrR7W4plHRpgzYhF2VsfQEK5WkVLl+j1QAo90KYEDJmozXDkdUTz9UAKaHAM4Jda2KVrFgTeteYZnW/fhEDNZUWiv9WRlNcLJSMlrT7oez6D1plcqmKulorfSEWt/L5D1ikWZIuRFD7VBS0YvqEC79lML22lZM7kl0zgu53hWrpXG+zExj7nNAO2NWIsZY5WpFx7rOnW8VMKi4yNwLdLUq/Nva7ndqFJqXUqG1gvexP0tFMuW8aiLaQIwh11WLY/0KnbEYp76okDFWcPZlTyRDX9O/+PJH1F6Mx9zQSaZDP53zuE09cePQiTYLXi7v+eLLH/H69YHWVjAFZ6d+HME7T8oFi+1qlB6MgiY1lgBnAtZZRArOOJwfiVFJHDj6mNCrSiIUExBXsb7RWqZZXS8a+jydOVCslvBdCxhTKDhqtTRxiAmYnDHNaYxpVRAkpZWaukWRM1zmGawgTa0WwNOKx/bCRZFKTo1aFkpWgQrnHE2ENCdi9PtabDCMUSdNEy1gppSoonMrdZ9NK30NRPB3TYaf9/p/lVwZY35FRH7c//m7wP/e//5fAP+RMebfQwUt/mHgf/rTjicIVWpX2rkFinu1yRoEtycEOy4buIeDbIFuaQmL5zgdEOdVwhJDbb0ygyDSlY364pZz3rtV1g1gGqUlrZDi+mZdbsmB0QdunKOU1mXZe3YtW6qxBeWKC1djyMLL8zO01oPNu6DcmrumkMHYsAfOxig+Xg2Ce1DcZK/OmS1qZvv8N5ON7bduG9F99nZLDm5S0HKXHG1/2RaO7Xc2Muoe58t9cL6lE7ITmbfPafVwP2U9Bnavrt1215+fuLQ9KRM2OVmzZaV7AkxPMgVBfb1+Np/88Nit46S/WTXdkwm7/V1hKRq8OiXDbkFiv4bteEJPfHstQC+ni450hTZEMKJqT1vl7vYsrc4P6U/TdPJsvz/0OGXzY7slnj1k6clIu7uP1qDQCvaf3+ePc7pw3uBQumlb77UgIYK1oZvy9jF1x1Xbbp3z5lYQ6ONTYbQF5/VZvMwzv/2X/iLXv/mOP0l/xJsv3+xz2vXh16RhxN9UMkWhutY1Hv/8J/zmP/mb/PZv/wV+/OM3uOBuG6q5wTp3HpsBHzxraVhniIeR8HDku//Kn+Vvvv87LJcfY74UMGrg6XAYUY7Cy/OZT7/1LZx35JxY8szD0wOvX7+iVeHNV1+zlIVCYfi1I4//2CcMv/6KNz/9knZdKbXoH2NZryslZ0peyeuVtM6c372Q1oWG8O3vfEbJb8kpoetPxDnLMGgnv5Q3/C//43/J6TTx0acHYhyUE2Acn332bYZh80yiQ6h0vMTo8EePc5n5moiHE6fpxDh5rpcf8SvfOvKdzz+nmm/TquMwPlBKVjPHddXNyArX+YU1zViXeIgPPRFUXL11E7WtqnxnEvPygkCHEzaWecYHzziO1FQ7DykwDkdVSkuZWgVnNIiVXFVyXhy6m0d8MB1BAK9ffYtcEus6K6IgRL1eN+CdBrzL8qKw0rrSamWVBWc80TniEPDHidPDoQdPng0yGGLEh0gpC8Z5hsMR146ElBBrlC98nXl7vjAOAz7oWF/XhBCV/G51fc55Zhhs92BTCFEcY5+vWo09HibGGHmYjljrGacJofHF1z9lTjMlJwyKzCgpY4IQgiX4hjTBFq8dUlmxVpQk7nQej0OgVUut1x0x0KohpUsXbfDYFrheFf4ntlCNUBp4B84GShVKy2phYA3rUpgOR7773T/DZ5/+Ct+lcAiqxlty4W6J37sm9x2q+0LbDqHeu2JA7+7ciklbYaV1VeG+i/bNxDm6bYp+zu7FwA11wm1PBYzZ9s56S65koV7/DkjZ6owEZ3dlU8RQC1wuGjgP46BJdYEvv/opwzhQixZ/hziw9sDMdKEP5wqX+azKYx26uyxXDtNBE2YXkeapbSFXXTdqE4xxrOnSvaP03G2MLOuiFXcqFbV6GezQLR48IQy3WMloV1upFrpXNBFybmoabBdqEaRafHQYF0AsIjpmpXY+lFGRorQW1nVFKJoc+kBZKjRRLpzR41dTGMdJ+Ukla1IVhOv5rOpuccCIdnNd7JzEVDifV2LsctneUNMLaUldZc6COLwH3xqlJjZJ81wulNxpHa3zG69XxlE70wZLzbX7gWnCFKJlWc54f8DbEWciiKXUFd2uNICfhqjdvkr32VIe1jxnfAjEMDAeRtZ1ZW3akTYYwqBJuI5FjRuMDYqKMgKm0CTh3HEfr4VEKircsRbtxIUQEa7awXQeHyPTeKTkSsmpx2eeMThyXvnhj/+IKu/I9cIYH/BehU+8H1R+P+laHUJgiAPeRWIM3ZLE4uyIDwbrWh93FrdxDbkR/HWvFZ0PG3XXdI5ph/F7a2nFcJJbXFpqwg3qFbnmzPmSWdaixYu6dSw961x6gruyrlfOl/c8PT3pGlKr9hqtkNdETqVrHhjev39DShdCiDw8vsKgypibtL61hpoTp4cjwVucNUwxcBFBjCX7yuW6sEqlFlhzpSKcHn9x+vQPIsX+HwN/BfjEGPMD4N8F/oox5rd1meL7wL+la6f8LWPMfwL8bRQ8/+/In6IUCHA8Hvidf/wf7ZVfNZTb/qSkztTLOpNS6gFzr5Zb272qXA8INXh33c38Or8npffEQT0Mci6U1AiDmsrpsumAhukVXZXRL3vwLx0ys2H2NYPv6mfWq9xl72rV0nAYSuvBGbr562B2KIa4sKzrz3RKtqB8q3wAGG5eIltHQ3/7m/yCW1fo51Xu+GCLuyUNP5+LdAe9QzqX5nb8fZO6S5K2Heg+qN9hif2lk2Q7hb5jfbMzsiebN5UjuJO4pkMpuEFSbq97paltfMC934ie7/29u+/E6J9NelQv6G7DN7InEDsU0m47dQNTMbbe/Z4my86EDo+R209uWcKdlufWDSlFuzMh+H7fOtHZBjap7NrWvkhr9tGaqua4Ppv3Z7Md3HZUU8fmawHY7Fw4b26clN2vyt4STGM68fMbQ+jmEdXILen3pGC6B9GWWKqJY68gY3A2doVCwdjGD3/4Q/7yv/lX+LP/66/z9/67v8Wf/PH3effuHbWfry5TmY2H92zOfN/9iH/5d/9F/tpf+9doxvEHP3yDGEdc/D53ndsCOEOujVoLuSREGmUNvUOykOaFV69e82vf+Zznt1/xez/57/no4SOEysfXj3lKrxiCJa9Xvv7iS2LU7kbwR5oVvvjiHfP1wmV+4e9PfwIH4Yvvf8UffPG3CT5ig+fh4URKC/N84en4wOVy4dXrB6aDw7nMRx+f+OgoXK/PYDKff/4dXn36OwiFZVlJqfDZZ59xnd8gTf2lvJ8Y4sSSVkKIhKAyw8Mwskkn12770JpAEWrJ5LKol9KkyXI1V+YEZjCsrKzPmet5xYWVt/yAWiyIx9kRFxfiWGlmwcfGYRiZr2fAMsQD40HNoL/9ya90+FDGH5Uf8MnHn/Md961ecUXJ7tczIUROp0eWedUilESstYzxyLycNdHqc0aMI4SBMY4dcYCaSw4R71W9yTnPMieKNGhq8muqMHjIMuCdZ5wimER0A7UmSr5ScIQ4IF1ivrXGu8sLz1889wpxwL+8o7XGMAzkUhTSJ4XmGte2IvOCNBViMfNMSgr5qTXvQiTLMu9y5rW8Q+gdoJJ58+49xlieHh6JceD9upJLYTqceHV4JOfE+/dvqdUzxQO1aqAxTSfSYtSDqCWCKLzJVjBV4WE2Nz5+fEWtgeOgBq8fffyAkrPVm+bdy3vOL29VlKkJJTVFfrhKKgr7staSUiLnzOe/+h0+++xX+dann/EborwhLbrp+NtUQFtr2iV1/r6e9cFrg2+Zzgfeik/W3Ao/WxFI9pXstn5rcqUy1qryKds2s/OP9RnU3vmUfb2sWU3HvfdYK/zohz+m1tYFPDypLOA8GEMqmcsyc14vPDw8IXRj4OKJ8QEEfNA9bF6utArejX3f00rY48MJzInalEPycPyElBIvL8+A8PT0Wiv23GTQmxRCCKzryryoFcQ0PvVii/5esA4/jDo2a6GWRFpnbAxsENpaEkLh3fM7xnEkxoHgACplKbreuwi54o2jtS4/L5YQHTlVWofcTQcVCMg578VWRLoypkL8jPMcjwdeLgvBe4IbwVRSyYiJpCKsKUG2mpwlQy6V6yJcr43nquJDuWTWdelCLQMxREJwlHJhWRYOh5GHxyNPT0+U9EiuC+MYGWLA2omnp08pVYVLxnHci9ZYNR4/TAPv379nmRPWKAy0FsGYA0LtqAqYTsqvMl7l+Kdp4ny+snGEa0nUpH5r42ipXRHauVOHRXZIqwjGWealqViPH5Eaefv1wjQp7+1yKXzx47kjtxIGOBxe8fICWMF7iN5iJFNLwhiIk2cYdJxcLok3X3+N84nhsPLu+QdAvHWKkb17I72zuhVBSy5qpn54xJoBa4KOleA1cW6qmrfFSlM8KNqhaRe1VmFZFqbpyDhOatCLxUez82ylj5WUdD3c6THiKS3pcYp2HUupmtjbI2448hg/IRc1F7fRaOxeEu5QiUeF4cbB88mvrKS0UFKirImcF0IcWFPmek04V5nnmTXDEAPBOUpwWCaCh+jh+HRgLStv358Z3ICxgTr/Yi32fxC1wH/957z9H/yCz/914K//ace9f83XhT/8gz/Ch6CTL0aOw4mn02ttc/aAr7XcF6PCkm6JV83qpr2ua/ducOo4nz3XS+XNmxfi6TXHw4jUVYM7Cb0D0MhZ2EztnNPN0FrTuVeGVkw3EdSFWXMMx7pkYoy9Si6qRgio94DcsnXvqZWOi26cz2cdXHfJRU8dPkh2rDHIhme63d9vJC0bflwnxYeQtJ/Frevm9IsGxQ3zLrIR683+Xejdmz3Z6a16buIGWwK2fecGu7hd79b9ujuzWyt6KxdyM5rczq1t4hnbtZrtfN1+nC0J3RKm7bsacLs9aVNux4edq408vSVvzit2fuPb6R/lGWwS+9ayE473zlq/xbYLEtygjW0/hrEe6QIlG3TGD3rM1pNL7+8T0E25Muhz7PfQ9d++vw4weOwHN6GZti/q/aIwAlXSjqtXVR/FH2zPyDmvQeTmp2QMJVdiDJSsij4y6FLyH/5L/z6+/j9riEsPaupfzaR/O/Uu8n1NZpsd211oZAr/7cN/xe8d/wbCLQndxuXtW9uPfGN+idnHmnS8eq1aha2z8Maq4MSzXLDyg71gYOxNkGGvmrdNkreRjSqFXe11F0jYqnobzEgD2dbXNh2He3GoDxwf/nCHWm6bkHe+8zDv58AN0oy5rQG3a+/FlV693+b/h1BW0+dw3/Sb3IQA9Kr7p248p+3o1rh9vCjvswfGW6dBBLF6p70zxD6faikchwOD8zQRSirklDBGq5Q0y8vzlcM0UCT15DEwDVolHjp0TxDmuVBSJsRI8AGMxdnuk2L0CgZn8cFD0PF9SVeMEUrUa6sN8mWBocN/enXde8fkFTpkjMW0hpTCmnMfjMrrG8cIpe8bfR9bsrAuqpTlnCFYw/lFK6iHURPgdV3JddVzDgFpjRAC78/PCIILgRBH5ueV6CzBOx6enjifz6Qy9wXWk3KmAaVK3ydn4jBxmVdSVahVNvDV5Q3D4FSVVYR1XTkdPsF5q4p1snA5r4wH9TAyQaimqV9QMJ2TkrDW8lu/+Y8wjgc+PRz5De+QVohx7Pwz7WbXPrcEhbYF728dqL4vfBNivI3Nvut8kIttc1gaeyIlcvNqq6V7SPagTQuvWzB3W/vUh0oLiCkXai589PETIo3z+dKDbzDS8EbNtEtVkR5pBSOFh8OIowt9CFjjscaT0rxLlxsUBhmCzh+DVd5Q78YFG6BLgQcXmcZJFSPzhpoBY/R+1pqoqIx7rUIME0G6l1ZQb7BUVowxTKF3Xzv80tBUtCEE3DDgneVggvpOeVUazDlzeniFdV55TPMF7y02enKxzEshBr+rLCtU0LDWpvy/vv6IUyhV8IHgDEN0pFzwg1dRCe/BCsvaKBhsUA5SLpklFeY5sa6FlAutJJZlVWRQMzgTeXwMfa1RCscwvKLJTBPlKdV8UuXUYrhWmGdVjHxuC85bnKucz/O+x1jZIM2z0oeaxZiCtQrVvJ6vlKrd9FrhRz/6CnuzXMVao+qiPvS4T1So3js++eRTYojUDnNOKe1d2pwyOVdqGVXwIl0p+ULwA5dzY11Wnp8vBBfIeWEcp2627TmdHrsUfeh7l8JXnVdI8PNyAXGcjg+8nK/kuvL4OJDylRD6em+bogekIiS8d4zjwPE46jrfV35DZRyE1laszdpxE8i5q0Y6pdtY13h3faOeglXVDqUK11WNpIOPWOvIRYVovPe7v92mtF2bkFNhmAa1PCh6rM2TqjUtJJSm3UDnHBWhiHYRxQ49bjZUY1iLo9aB2o6Iq4SpcnhU03Z8wXYT6ykaCsKaiqp35oXoPNY0nEPHvfM8X2zvIjfl1P6C1/8XtcD/314incDb+RHGqO+DZvlu7ya4LdA0BmMc43hgGo8aBHS4kbRGbYXrfOWnX3ypfAHvtGojmVpXypy1pWkFY6XDWfS3vPO97b6RbjfowhZQbb46XeK13YKfLcBTgqd+fefCSMEYVQfLRcmW1vpeIa3d4OyGwdbvVu1omFtC9GEn63b/9mTnfiuSW8XvgzfVrWl/52c2NWP3Tej2nbtEcD+e2SHw98e65yFtiUS/lfvL2rvjyPZer0wanWha4b6TRhew7gZB1A4M37i+uwN+4z592O3q90u++dl70YnbR+9hZdDl7s1GbtQ7b+39d7oKGOz/u42j7Tj6XdvhnbLlxz2x0n+oeabsz99sUXM3qdw6hKYH7LcO50ZS3U9QK7bOaua35Rj9ufRDdqhVh55un+mS9mI2/qBBTFMTxX5Orjr+hf/hd2lol0Ln45a01D7Hau/8buMcWhZezs+IVF7OL5xfXhg0Y+Hp1QlnDeu60Jrw3e9+C4xWY0uunA66WAfvehLccOGo3hXdKyRER1oTxmpwPUwDJSeFWzXp5NvMMB4BlR933rKuC84OiBSEijOOZV0ZYtTKndeOHUar+K0pbCoOUXlXpYsQeK8VwD6WReDl+oLboGdbsAeIqBiIbqCG9ZrB5O6D45U0TEbYBEI80qx2h7wSmVNa8T7sfLfWIK+JYZz6GFJoh7OaIKsyq4Wm3CpNOhvNqIy3jrFOMLeWaTxynVU0Y1sPRTLex14EadRWsTb25FM5qEPwfLSe8L0wI1VPzlmH29K4QTknKrMs4PWztTTd5IzCrq3pxPNSezIWFJpdaq+qa9XAOkOr3dRTWhcqWPc1rpbW13rF7JeaCQykoqRn6RMj5YS1XcjEOrQ4r/PYOkeIk15T00BaBSv6fev/iwhWGqUWhep1I811XRDbNGAxyplqUhnGoM+hNdb1gnWBYhxSKi0Ll/WFwzThnHoONQwmVExTEQZqwLqJ8dA0aQPECJXEdVlZ8xURyFl49/4Nh4eRZU384Ec/ptSKtrwcNAdiehHP4V1kfDgyTUdaE741DHx7Gok6G/peuwkime7p6HFbt8neOkZaTNiXlNvq3deunym/bV2rnpzdvPV68cHbXuy7Ha+1tp9T66p1G9e35/6o2JRV6FFKLEsF/+tgPI5KNI0YPWntdg7GMHRUhHWWLK6v1brWueh1/BmLGyZKzoyjyrznXClpxVuH7ddQamHs3kx7UaRWpmFUa4emXSnbdF+JPqj5dIgMzmvRgEANnlz0HE7HI7VVUlKOUc4K29toBA7D08PTbU8M8D4/Y3bVOMvoPJget3hHmCZyvmCqmggjoj5CtaHJlu0cw4aUjDMBI56SDGkB7yKCo4jVoFwmnGko+NphQ8ZbwxRVACKlxJpWPurPvjVVWo3jgZxTL2BafAiktKEjDKWs5O5pZ4TdzSKljK22I5sMKWm3zTsV9ZBVVVR35AxqnK7jX0UNpKrxuu0xoDGKZlpLI2eV17dGoeQ+RlL9CrfFMNaQl1W7gK0pD8g2rtfKuqhgiZqoG1qFWho5N9a1IjSsg9oKLy/veqwZSB0RZM2NLyUbRaXCNDmMr5SSePd+JkbB2oT1mlAaY3vXSFRIUhK+i7rpnBNogpiCSOnxsBYx8loIoasVWoNw4Xq9UjoKRmkumqjfYjm1mdjMpF3nF+q+o4lLqY3Tw0n5XZ1TZq1yqbTIpWbm6m1nuqyJ0WKEmA7DlJ4AW1rVogZ9Taq5r98mIFaRLs5aVQW3AWsHrM001FBZjCF1CkiYAqHnAD/TH/jG65ciuQJ9iAqxUI+YGxwLNi7LpknvvCfEQIgDMQY1CHauyyRahEyTynw9E4LlcBg5HEeGw6gbbKlqoNeTEWtlX2gxSkJVDHZvU+438VYp3gLaW0fiBufboAV0F15NtjqfxevDrBs5/4Pk6sOntQUS90mSVr7NnrTcUG7aGfowIdJz3qvX++vDDtd9AvShp1bnRX3w3Q8/rx++JTzbR3ej3bvk7paI3RIwIz/7Hbk7l3u+DFuecJcQ6ed/XoL14fnqPbDfeK93qvrGv1X8uTvW/RPZ6vdsnae7ztk3xUGMMX0B7mPMbJ0S9gVqswyXTX7fbOPolpTujRa5f4a3zsV2SLMnQ1ua3TD0JGrv2rTd72zLivR67W38y/6VPUja5Hq356JBkqFJ3e+Ba/DP/P5fpdbtdxUmq6o+ijtvUrEGckn7faq58dVXX9Ba4s2br3n79o2Og5L51rc+wjlYlivSLH/+t34FQ6LklZIrrx5HvB2I0eODg1pxh49w1pLzyjKfmaaRy+WC9ZY4DEzHkWWZeXx4pDU1D0wpcTy90uTGO4bBcz5fiOFErQvSMt4F1jVzmEbl6HirSVeIXK4zpTaCjUyHiWWdWZeZVgoxRvXncAHnVanq63dfKaRYtrSq+/MQujCFFnhe3l2xbsEHh3cjtTisKzR614QAEmll7ipRlnm9EkPQDbEvQTkVTseHXeLYWfUFbFWTezVPbYQwYIx2U7WmHPqc7BLuxvNwfM3L5YxQsWbjrq4M8aDdN7MlV+MenNTaOIyDkuj7ADLKxNfjWq1AxuCIISIo/9YZTRxb7VwSoLSEd4OOqR50Ho6a2K25klOh5MZ0GDRla1WV+KRBU2U9a4wm0HmlZqe7oIDxym0sTU1gnVclwLqqgpr1nuAjKV80+DCacIYYWJdVj2Ed4gy5m5zaLmBgnc4l662KByCUmmm0XeraGq2A5yIM0SM0lrRSkgZXVeictESuiUrEGM+2RDeqErwFwCPiidHjXGArqims7Kwmnn0peLm+wU+vebnM/PEf/4BG689Ri4QG5Y1Z44lxYpoOTNMRXxKfxMhHMe7rlbNO15PeMbmH/+zd3h05cavwbGvZto8atgTtvgh326dVJU5/o/X3rdXKNxpq7XvFfaFwgwbW2jZhXe0ydfXgVispCYenP4NqDAvRwugdrvM+g7UcvCZQmnCqSp2zlkTCD0Pfpx0xDszmyhi9yq1XhZI6AzSDlKymrr4jNETNb6NzKoed1QNLRFWHMQYTA6AF6GCtCqlgEGcZegfoNB2opbB2lbYk7CrIa860ZjoHqiec1uGwtJIQq13B0XkEFZywzkMceD6fcahvIQacqUTvac2yKdelXIhWu3itGtbcKAnCNEAzKoYh4OwBs2XXXRTLefXsak1Y3apwsiFqQlK6d6gLLGvo3eUGTihs/LtKqQu1JULoNiBV14laM6Wy71kpaTGkhIqx6uW3Fdhaq5SaKSUxDAeNF/t66nQBpZVehEWVOa/Xay+OeaQIXoTn8wutP7sQAmVNCh3sXCLrLddLJqWCNGGMkesloTofWszBinb80Y7gmtZONdnMvRvOwzANIA5phmEQSobpaPDdG3BeFA7nvVPV27Ct7z0JaYY1VewlEeNNmAMMLhVaTYhUWtUYuqVG6BYpIo1iGpfLmdrnZQgWa/2HjYAmlLX0JFSbJh33gIjZ7Ymu6cAYIkMcGIZxV4x01rN1uLRgK2BUnVjHkSWnpSe9+hutWlVaNVrYUdSIijtt8dvNB0/vjfV6HzfObiu6V4XhoAVqoxSOX/T6pUmudPE2eGN1wzC3xXD/04TS9fPLsyp4bMmGt8pTcdZxOk3UljlME6Vcef36ke9993u8+uQzLueZKoVaVlLKzEvicrmwrGdSzuTcZVR7MK6Nsm5UarbqSbupHsnmMH/DlltrtSvWtmD+xhOyznbp3aqVWaO2bcZuyQTo8LK3vacHwxqw3/F0zH03Ce5Tgb2780Hi9I0S4XZs+m/2/751P7QD8s3Ozv7N/T3buy+3D26BuVYRt+7TjaNzd/6aUez/Ft0571JDw4cCE41vJlcfvLbv7oFrP8ddvOG20e+f37HG+vrQK6zt7+pxDVkAACAASURBVN1z22pVCVJNcCsWp5vl3XlVoJHAtJ1v0U/hNlaM66IP9PHVLQB64r6NJ8WybwWHrfJ66+5tVZodzsi2YMqe5Nr++U0dZ1NXazQ19xOFpxkRsBv/SgUGhjhiWqQWNdwMIahATC0glU1BSWTFSMXaTl5tM6djJPaOzzK/cL1kvLfEaPFR+PjTCZFIkwljPud0OrHOV6zRwofzliE8cVl+iDUe754IftAx204K46oZaQ4THTVnCDD4iWGaaF4rzMYaLsvKmgv13TNIpjVVujs/v7DmBSyM46BE/5hJaaakjMPz+tWn5Cyd+6mbzngYSHlUWLIsXK9qdPjysrCuKzFm1jUT40QMPXmLI/M1MQxD78SqncPx8EoVu5p2/5xzjIejSoJXy5rh8fBEk6QJQwMfBghGFdyqcqBaa/vC753no08/IYRAXkuveDaWZeHV01MXjyjKG7heGULEGY80odbMMHoNZAVEHOtaOU4TkCh1JV2Tdiaa3dXJDuFBVbbW91TJhDCxzjAOIzUX0rKSlpXHVw+8f3lPpRK72EUtiWGI6nlCo+XauXNFO1heqGXBWA2cUlp4fnnD61efYHAKRfKWaQoIFR8gNIvKAjem6YSzmmwsy8z1uhCGTAyB0+nIy8uZJpVpHDkcDr1T4Ylh4HA4MU0Tf/THfxc/KPdAxZag0PDBU1qj5pXaCk+n11i12iHGyDwvTPFA7J3FdYXjaVRIIZrI5FQ5nk6UopwaMfDqo9e8e38mlwZGcEF4nF6xrpVc1p6Yjjy/LDiv6I20CNkXjseHLvGukeHHr77HvE5M46MGx4PB5so4Hfjq68Tf//7XKkRjNrhc5XgUjqePie4A4slJ+JPv/5h/4y//DlPc9uoOC3VhtyJxrhfLpNGkm2BvnNm+zv8MbL3bkRir69BWONs78Nxigm3P3ThYGzzXNNn3FBHuIIHbPunUvLZD/cdRFf1Kl0131vPpp6+xVhMY2+XDT4cH7ZQ7z3E8EmPgclmIIXQ1YeXiDeOoSXZTeXMJE8E5vLUMXotpeVX4kbeCGxxpuXI4HbHe9YT5wLJeiT7jjJ5/jCOCYcmrrnd1pclEqnnfS71VGGtbM4gQsJQlMw6WUhJ5XcmrjplmYE1ZIaVNodmIFna8D7QCTTJjnHBOO9HH+CnrrGqcPnimQ6A1WFflsYk03r77ktPDE8uSuFxnWr5gPBzG0GFgyms/HCaWJe373OWydmNZVdbMuXA8PVGacL1eWdeFIXqezz/Bhdi5PRkfIqYZrHOIEZ33tWpiO89crxfSmghDV3TNhVI0oRzHkTarLHeMBy2gHQ4K/WyV0UVsNRg8TQrVZEoVgg1dar5Se6f6cJzwzveu2ErwonznpnPPuoofwDZDQEVfLAdOp0LOiVIq0U6cThs/s6lXGUaFGrromrWPSgUwCucsRVVIW4MhRMbJMU6GWiypKHwfRJUQB4PjiWYqDTW8H03ci0Wb+Mc8X/tcbb1gYzifr53vqzBZYwwk9XJFHCY4rmvq8a8hLSquscG36UlNawslFaTRkWnd47LPX2stz9fG4Kfe4dLPpFkT5hhjh9Zq88G5iDWeTeBEhWLMDqFs1ZKydke9ceRamKYDoBSRkrZEvFM2rEWMYE3caUbeB6oIS0psHesNtv9/9/olSa7kgyzwFsSqYMC2uOa87p2sGB3DMKrgQp+ctVaWtFDfK641+JGH4yf85Cdf8Tz/b/hpIvqROESOxwPjODKOI09PTxyPAzVrb+O/+a//czALIUTU5M6wlCvjOEJTXtYmN23tTVCg1tqNSFuHNYY7wzfdNGrZ1HUUniOuL/pWseHqf9Wrb70kabvUnDEa4Nxum3Q5z1tS9gGPov+/ud+g6GIce2K2VTq/2XmxsBOLP2jmfFCJBBVb+KY4xs/rJN1pY3BrB26asfLB976x5f5MsvizQhz7R/d7s3FEtIBdvnEPOi/ujtqzLfx541OwJVpaMdoDCGvBbNKz/UfNJqhyd157R8uxUWW2xEYvRBcH3ys/rVWFfPQOm3abKn7z0OmdSWNs5/HVu5+Sbn4rbEIGp+Go0rZ9HoltWFP7MbRCWlthGie9xo1zJIWW1cPCe8/pELBWWJczMQjh6MklkdLCONruz1KZr6qYZp3gHJyOjxogynwjrZrE8RE2cnelIBScG/DdPJyWEVnJm+RzMpQAL+eFGE4YPNf5GbErgwdspnImpZW8Vk6nI84ZSk5crpZxPCB4pOqmntMVUEWkYVA44LwWHh5fIaiJ4jAMWi322lVY1itfPv+I0/SoC3kzWPG8+eILTbClkquq6ZWmZGbrNDgZDo5aF67rjEmWw6S/XfvnfDCs15nVdE+kLjpxOj4RPfvGE5xH6soQLeIVpoSslJWOabcY00glcToeiCHijOPd2685HA74EJkOB4YxkNfE9bpgsIzDiPeWb33yCS8vFyyFcdCO3PX5CmKJYeB4CDxfvubydiWGkeAjLVUGO0LOvTKcWdrM6eEB5TxrMvrm5R3f/fxznA+4oUHNiHWM06kH0zDPZ8IYVPo2ZWpR4Yvj8QSYbhy9EOKkULucsc0z+Ud8HHrtQbmyKoO84ILDDzqn0lKQumBtUr5bCFpsMYZUK9f1hafXT6zXBWtV1S+XTK4Z5wMvlzNv3r5lcgFXGqabMq/rSvSe68sLzqvUujThzZsvuVyVbJ96QeLx4RUxhC4HLaR1Jg4HhvGAd56XlwvGmN2jxznHm+efcj7PXVUtMh0ixjlyzV2qv3GWhPOGVAzg8dPAUgp5zqy5qSlpeMV/+p/9PtbB3/3nX6jfEb748YLzH/P2K8dPfnThy6++5nD8lOPxhDl6THPd0DPz5uuvVT7eHvmnvvevMsavsHaDT4lCc7ntD9a4njBXbLgLRjYbk/7ZrVi0rcmCKATObqgNhRffowMEoXbexbaua+DJnmxta/8Nzq9xwvl81kKs70UrA9Yb5nkhp8owjPhB328uUH0kGce8qmpfs45kHGsuNBcJVgWNWjXM15VmLGu6KIm+ZA2ahyOHw8gQPWteCWEkDm4viGnXQzlXxsD798/anX/9WgPS2ljmTCqZ0/RAzguX6xkTHdS272trykzTxPWiHe8Yg+5n2XKIJ16fPiYOgbSulFKp04a2CRxOj9SmnBpnO30hzXg/ghhKznj/inRUxVXn9AaXUpiNyv5ba3k6fU+FKboPkOliYmsu9GwZRAtAMk0qTDEMzLlS6koICnGfl0WFQpqwro7aBsYpcBq/q+OqQamq2HlZ36uAwtp4OSflcLbK4TTw8cNrQBjiwNv377r/m3A5nwlDJA6vMcaRUsFUVZp2TsBvIk8bhE5hlSkl3AZjA1zTPUYtJvSZZtMUJpkSxil8OOdMXtZeSBLmlysxZOJgGX1EXFW+ntVYQ4zgQuPVRyOXF985PnKzMgAO06iFKKPJD61hmkGK53pdGIYDpnmcN8SHQKkzy/yOVLIq17aK956Uco+BLCkv1KZwa+vUyFqaNhxU/ETXVw1TWo9lNa4YQ+sy+YbQoerLuhKcQp5FBHETUutebNajKCRwg3PnXMi1Uio4qZgqjIeAkJjLlfNaeDgdyFTa2rBYhl58wFtwDrGe3AyIZ5m12DAGhUJe5+eOWsl78frp8ZU+u1wopeJd7F24rqTqA0UMxkWcGwh+4he9fimSKy1i3Umvm83XZ+ug6HvDcAuqFZJSd5NfJeQ7rDeEXjFbkwaMQ/ScTgN+CuS0knJBXjLnTho2xjCMWqn56U9/yjDEPjA2E7jWZUa19aiqg8ozSCn1a+iBtZHeqtyEDswORdDymaVVhVkp/EfJ1KZn+61XKwy6ULUOzds2h10jydwqcebnBPZ61/rrGwmL7byH+67WDX7GfQNMcx9MrzLedRO3QwOb0IKakW6/d+uIfZhw9G9I63+/daK2RKYXNbuCnv1A3MD0/XkTENh+ZktE904XN36enrO7ydwb5YpJ3dQQ9Zu2JzGtwzU3HpXIplLF/r6zToOKJkgF55W8TU9Gd4hjUxnstldtWjePpf+78ySM4JxgbaPVoi7tZpO+n8k5dZK0GuqO8YHgtDWtxFIYh9hVKbUzYZkxLQHKTTFO8LYLFZiOVM6ZmlXm24rgTcI76fog2lb3JjBfE8439TZyHmMqra4dAqtQvxAUn19robbGZa6dlNr2trtzcL1cu8mfPseShYfjSK2ZlK8YE3HOMAwPlCKsS8bFieAe8W7Ae0+Illy1MosJYE+Y8QGR1pNVofqozu5dLtFiiC5io27wrQnzXHFemKaJkqXDGrVSNl8rYzzwMD7B4cagK6WSpZJrQqxuBkabfYAwjSM5JxDhcFBPkVZ17lrnMC5yOIw6N1qh1pX4eITOcRoMGNNx/ehzt8awtNK7LUraD37Ae6PjoK4sqWCtQseO06TfzgsheN4/v8UaxzgMnA5HLIbQ1GjdNYOvwvL2mYjHBYVIDYPn8fFIyo3WlCB8Oj1RpoSIBbFMceibU1Gxh9rhUA8REoR2wIXA+PhANYlUVJRoOARccER7U92bDo9Yq3j64CzTdOB8fcHFUed0q5QCSy4YK/ghMp1OrGsC53BhW/dXYgwElAQv2XE4BILRblarlVoqj4cHal33oKmZjKsQjfJhrKinUd2gX6LV0Nw858ulc+HcreOMcrwKeVfF0+DH4lNmGLQSq+iHAjSGccLZ2GWUC9M08vJyVh5YE5aksLBpiMRBFdKs9VzPF1orveve9wYxlDXTBJxvBBsIvmCtkrKnsfF//MHvMYRXvPmLX0Fx1J98Qv7kT0j5hS++/AFv331BHJ+YL9c94SmrdpKkFj45/BrfefgtVW+8K+RtnaQb51Nhea537KXbhuRSungL+zoP98W122Fv+5d+prZtr9Djl6ImqKajCvZqf98btGvVsDZ0KP6mQitsQknbcYKP5Cz93tkOXRYKMDfhOq+UnHogf+Un779iWWaMHaj5DNL6OHIcykguCzkvlJwUYbBcOKYTh3FSfmBbGIZpTwqdc6QlMfcgt7VGxfP2Zd2RLofpQEW7FdZFXr/+TCMCa/t+ZMhJuxvhYHvx1jA8Hqk1UKylIlzWpMUzY9XcukEwcHn7nloy0zAyjhNrulJLUcGOJlgjDGOkNcdyuVBlUy1VFcVGo0kl2riruDUaS0q8vJx5enyNGsg31nVmmOLOlys1MQRL9DeYmTeWNgxdcEAD8xBC7+IdEXTfyXlhCJ+R8sLgFqLxiARiNwzWYryOgV/79qe6N7TC8/MLcdBO/aZI/P79C7WLTZRalavrIinpfRA84fSKNWVk6PHYXojeECkVa7Ly/VvDucAwjogM2FenXiQq1GPUTtqacSbifSQaQ5NCKR6DFgAClcfD2GMBVZz2PmCNIhO27s7T6URri8Ksrefp1YlaDFV0Xys1U4sjDgbjGqZkbDMcxpESw44+Goaxj6sbjDeXrAqJaFEp+kFdYCRhrRCCwkrxvVNMh6HW1pNJkAJ0mKZ3XuHRIh0tZvDIzvn2eFzQ+NCwFW5q55OZngzrfG0FkEatqgS800k6+ieGAL4irTE3GONEroKJoENEjc3DlBnihDUH0mXusNOqSWipnA5PvH+5Upql2YiJB37R65ciuTIGhe3AvpAa09OIbbG+gwog+t9av5H6NUGswXZS+I231To+VTkPm/+EIF38YiMvFmpW47dtUN2gdrfgXxOa/t4O49o6Gprdi3R4w32iKDfNrVqqVtlLxqD44i6ArddudVNy1tHklgR9U5xig4xtL7mDtv281y0x/ZBzdZ9NbVj3XdShJ1b05GIz+P3wmzcIBvSO0d17N4y92SEkW0fy9i/2ZGh7xtsv3NQCdSSANoqs3HGotmQRwz3Ebyd1dWPH9kFCZj/wFUO4iUKY/u/NZHGH3G2J9E18xFiDDw5ThCadU2CaEty9VlybCDFaciqYnrSF4FiWVRcEK1i3oY9VaclY053eEzDTmtk7XbVL4Oe8ktLKNI3UqmTaUgvLcqZFWBftkuqCa2leA/XNWLeUhPUrqbQ7wjcdK62w15YzuSSsc7RmO6xCJahVUloLHSIqSdyk9uvzpLTJtW7dQq3ibRw0Nc90WqlHK2dqV5A5HIIWKbyuDyEEnDc4D8GOtHTBmtqFBkbFxXfRAxVuiLjQcDbsCTdADIOaA2aVeo2DwiLUdLf1hG5hmRPBGGwcGeKoJohpxTqVvy2pUoswaPuM1VaCi8qbcgqzGIN2YpoRvAuEMFA32dttfnTkR+3wDiXgq4/RpkCq86B3kVGfkSGOWKtmrzYp5t0YenEooF1vGEe3izt453He44zDxogzqvYUAizpGecH8I5mhUKvmOeVUrWgQNMkvVYl2ns7YJrgfNNkIxwAYZVMQiit0pYFmnb/RKpCLqylUPHWKwyoZvXUMX3MG4PxA1jD0gPUViulraRVydXee4ZhojZDXbNCVRBSt7owbMl7Jj+vveOrRRbl3eim6rtwkiHs4iOmq2cq7E+LEUIXQTIWH0Nfh2wvaAin00NfFzajSyWZj3Eg16rCF51Di1FRER/U9wXUoSEETysjuqMZRlE4T6lJxUBMo5YVZzqx3lr1/OrojWkcoIPHrQRC2DjFDUh893sPfPX9iTrr/D7YT2h1Yb5U3PVjPom/wfH4pF0FXRAprkPdDXz78RM+fzVhzQVM5da9B+9C36m35fTGRd0Czxucr22L8F6Q0r/e9vt7jqp2RRxIpe5GwqIGt71rv2djcivImi6ctO1p0viArL89rw8404PBffSmK9qtvFwuGBrLqlCpOa+8n5+prbBer6jrTA/iXaTmpl5ORYtMQ4hghNRWJPWiogvUrvbYmijMKWdyKZo8+EgYNKhf0kqtBdfHXGsVldqgB5u3a6wi5Fp696BhLPjoeb4kLFrccs5AzYTOwZIGLen8yCWp71VtYFSpT9fVbiqcukKoaLBaBZoYatPA1lqL81GRHib3MdS6bHrQhK4/T2csuVsG0EQ5Q1hKKsqT2oJqEYz3KkLWPMYrf3cvwuIYwoAzFm8cg4uIGLwfesKg6nQlp45OUV9Ve9TuihHdE42z+M4z2vhGamFiKEOltIhQGIaJlBM5K+dHE0Z6MC59/nuscwwdwTRMB2KM1JrJee2COZWcA2moXfrdUWrA2EIphtaUv+hs0j3UaMerVRXg8J1TZK3STYIzGHvshXBDk8wsjZYb0rLyh8V28+oNkqddG+/tPj9N74xtcbAWhmGMofMUNalMpWBFIXLBe5zYvfAMenyxOhdb55kZ60iiRu5bAbqVTr3oharathhM2OJE531HwXSePo7W7RN0XKCIEaOoGVCBodYatNqL5I1SWkf4qOKk2ZeNxuV8psSqype2YkxVfz/niOJwVjgOjlZ7jFdnftHrlyS5MqhCr+ydgQ+6WNAToI0v0hMEMb2V0RdiQw9cA62qypTp0Ard1Jx6tXglXtNj+NYqMTqKab07tSUEbl/01TvnlhSYrox1O8+t89RbuqIeV1t1rEumYIBcMrWU3p0yCnHRUBPYunA9Yes8LGPMTW76LtG6EYS7AbG5bTz3SdLG29omXv/QDsWgJyfSbiIMerY3p+4NN8vWydqS3q39s2dENxjf/tv750wP3u8SvC6LbU0f6t1bTLbztVul89blUsyx6cngpsxn7u59vUvW2Sdt6+1oYwzOyd4d03Mu1Jp3jykRNX1WuWy/J7KlFGpb9+NuBHlMg+6Rpl3PFecdvldohjFizQzSDWyHwJLea0lHHSa1qtrNqRGtcGIF7NyNhB2H4YmU3veFWnkC0/TAsmZS0s7W5fLCRGRJqrRnjN2FX7xTE1KLJaeEs45lXXQD71L1zgWFQqAyrwK4qkmdYuIzLlhK3yQ2fkVKCYxo59dYpBXCoJ5cOWdqrYzjsfvJVZosTIeR+aIS1TGOpDUxz91ccgjdp64SolUVMwuYPqdd7cpHgbxqoKJ2JFrRtsHj3YAzKgfdSiVGT5OG97oBjcOBlIpyL6pwuQjzfKWWxJotMQfiMFK7EmmIjmEIZF9ZZ+E0Dj34gBim3slUxb9WqiJhRDsJ0Q0Uqcxp7eNZq5iIULL67TVROEVeVyYbwSoEyrpGcEHHogk4N9BElceMA191LTwcjn0eG4LzeGd58Kcd7qp4dfB+0nHgDDY02rpSnCaitRUuRVjmC5eLkrKn6cCypF6JVHJzYFB8e9VO0xRGSl05zxdKgTWtnC9v8eIpZcV6TeZqK0x5YRomEA1MCpZxOLDkWVWjusrU+fpCrUo6FlTMJKcVg8G5mXE6Mc8zoc810EptCJEmjZQTL88XpnFgHA4Mw0AMgWV5IYYBZwzGK3b/sjzrmm2tNoSMUKRqV066X5KBOE3Ixo3zGoycjiegwzqzKlRuyYMAxmnQ6Zyq0vrgqS2TrlrQC97RqJwOE7lUnFVz1ffPjet1QZpWUEvOPDw8UUoieMs4BWptpLXy8HDCOa98rFVwTruZ0ho5rfzT/+xv8ze+qPyf9T3Wvufx4QjyD9HOF16VyG99OjAePLkoh8burf0KYnk8znz0+LJ3XPaOPgbnQk9QblYcH+41/f0N5aExmq4t++/IzaCv7zm7YbC1usPcdcxAC5ntXmlv2wmsClXshUKtTBCC736CWxfs5q9jjFVezLd/CrZyvpx59/yOEDyXRbsaa145pyshel6WM3EcUbVWFYJZ1qJwlCYYHNEPOK8BfS4KjZsGx7zMnXer6qS1+3xKgyFMvBo/IrXKWopy6xajiZpAzoVrSrimHB/1KkI9NK2wrHMXzRLGKfLTL18wwDRFnh6OsInb9D25lMIQJ7CQaibXynCAa1oIXud4E513znu8VTqCLl0K4TMGLYCVqvO3qLVCcJ7j4UHX4C1R3iCjeYuFeuwksM7aNbIWBuO68qpBOt89jIZlfUEqajzcFS2DDfjoGENDWCjZUaULpK0ZqlBz7rm6ECRAlp0HaJ1lsModUi8nld9fF5W3186cQuOMgZfzhVwqsUvpL8usYwiwpqnfKYJxHh8HHk4PvJzfU72ltd4JG3Ts1GqpBWod8KGQSqF06feGQ4pSQJx32MGTi/KWodFa3hMJbw+a1EihNeUVtloRyWAS1kXWJffr9buM+8Ypl6aegrXPOdsbFhb1BlMKjI4dWTRGdlb3WNugUdgsanaOvwjNNLzr3e0k3Tez006arqshqi2AdvVhl3lEY/i1COzdNOWgaee6d05b7nttXxOqUErDieCiFrn/L+rebEmSZMuuWzra4O4RkVlZVbf7NvoSINgAwSb5FfwA/gD/il9C4p3PEKFwBIQEm2yg71RTZgzuNujMh6PmUQWB9PPtEEmprMjMCA9zM1U95+y9tq6KHAvjMHSrhoQ5Uxq3643dBNI0M56kkaGVxTuR8ceYeDidUZ0eue3v9pH/2MefRnEF6INDDfcC4R7OW4qYkFX30NAlAObngbvcZQjGG2rN6FwAjR1GrBed+ruEzfQv18B0QqCRilo1IeG0Kl0RozRVqS7pkAocNCW/35THZnO8tgPocEwCGtKFAA0681f/9J/zm3/yF2hr+PJloZTMbX0lhkxMhW1b2fdNsLjtvbgSk62560S9GeRnQcb2po/+7xIKad/eDwKKo5h7L9KO4scYTWmyIBujaUhiusjIWu8YWDQeWqWpjLWZxiTTESWv4ZBZ1CKbuuybQbwqnTJ033Pr+7vXckNjKKpIJlNHs5ciUslaGkaJrE1bAE2qldoSkkEmEwHdGpWNYZACQStNThunwXIAHYzR7OHtXkho07MtnMKaSgw7+77hJ4e1A/v+1oMSG9PgCOlF6JQaYgzsQXwdBx3rw+mRaCK17sSY+yFPsjkkh0VTm+Hl5feYjvU+DNHWTAJZiVf28MJpvqDMSOva41Z3tm27Ty6996RyE4JOrihleHz6iHUwDSOl5J6PBNu2oXQmxtKx3ZpT+wqrLbZ30nMprKuEXyptukFbEXLtMlyZHp3mmZTE0+UHS4qVeTyhEHqP1ZXHh5P4EKgyUXGOEBPWa3SDfQOjLE8Pn6QoKvB4PvNwloOq7k2RNe5CYUKoT1u4Mc8zWov3rOTAPJ9Zlsz5fBEKVBbTeCmJqoUSR8388P13fPzqG0Y/o4B936l1pbQJUIzjyMPFYOzA68vCtr7hrCXlnXk0jH7CWcPIExdXGEaZTFrl8MPMHm4oJQeE59fMX/zlhX3faDWT6s7r9YVvvvkGVOky4JlpvJDyRgiLkAfNhTws94aEaZUYr9Qma+AWN9blM9oFxvHxPtWrtfC8vnCYcJ0fyErxu+9/4sOHD1jr+PL2zLatDMPIMJww2rDtbwC0Kh3iWiu5ChXvenuhtcr57PG+deS4eO1iWGnF45UjrQsv1yshdtmrlU2zZsdSdvyg0dbhrOM0OGijTMVSpuaK9Y3cFDHJ949ll3uiSdGvjZb1xyXmcZCiWZ8Yhpl1D9w26cY/fTijDazr1oEdFczE6fErlFLkvqbo05ncRkIulD1RiujwTyfxBOQikmzJUnyP2Vi3V8Ax+AHnxXtyPp/54/Ot060GlrRL0X1QrHpjalkWtHK0pljXnU+fPvD6RSRWxoE2sG8BIUd6nNt5fXtmnjS5VYQwLTCdab70JsiAMor57CUDrmQyDTXsxAhZandq0fzlb37D//X1T4yzgxYg/584pxj0G189rJz/0x2U0MB4F23cN+rWVPfsvhdIojqQg58cXg89wtHwOuTNMm28y/nuGsD3TL2jOSZ7l7p/LsTEenuVvdW8Kxas0XelgWr62LSFOlbkxR3QD20kOL2UxL2yQ5D5KWdKlVzBRuH7z5+ptXHdV358e2EYBpliAHuu/HTdWLs3s76lvs5k5vPEj5+fae3I9pE8H+csj+eJ2Tuubzf48oKzYjUQIqo0wU6nM9Y5Xq9v1EEmS9f1yu12Y1gGPn369niGZgAAIABJREFURvyrIRL3hHcnWedHjzViOXDaU5uhZKFMWldpeSHlyuwfuYwzrVRpdtWGcYbTwwM/fv4Jo8Ub432H65sd5UTyvq474zjSGtz2lVIz3g/Mw1kmJiiy0ryEyHk+MUwn9hD47u2V9brz7bcPeGtw2jCdHlmWH2it4bzI2VPaGMYz81nIojEGUsyEILE1WhtQmbjIGewIsvZ+ZFsXpnkUNLiCYTizqySoejNQUmbuQItj0hdS4Xq9korQ64ZB9qZhcKBA68bD6UI7n7mua29ua4yzWNeoKtOKYnAXjFGEAAdNb98TphqsD1QSKTfWrZHbK8bMKGupZSXtIhk/fD5KWVJorOsNZRTDONBqZt81tMIwOB4eJ/GB2SLXIEdyDhjtuK7Pck2NkCqnwaJnh9IDrZ1IJZGyJuXKHgu36850muTslxuxAFURk/gZndWMs2YNK3vd+iKgcHbgMsgkTmklz03qz/yR79g0+75jvcFqkfJ5NzE4333REFMQeFJt9/O+RJtUAZS0gx4KoxuZ+n4lURyRaRq6r1EKsm1fWK63/jUKg9Z8OH3ADxIeb9AYPdwVSVK0GwavyecdOPLzLKUTw3WpqFIkMqRWmipoqzlfnvj7Pv4kiiuUxvmxk17gOGhqLYumbn36VH42vVAySVKdsX/k/MjCLVIZ4xSpRIae0t20uBjMocns5kMhBBm0EyKPtV5yRZIcJn8+RTsmBQchMITQoQjqnm9y/+gdEtMJgX14xbqu/Kt/9T+z7RsfPn4gBMXl8cy3X/85kn0lkxTnpYrP3ZNQSuHt7Y2wS1hya41lWUgp37uHDUBzzxDQRmRGtu9VisO3Iz+zmOBVLxgVyvWJURMCYzH7fdrVeq6XXHt6h9ujkA25NWnYtabeuw+6YZRBcabSpxW59C6JTO/QfcqgLGtY8MYwWItBUVqR7vvoMFZySCqi0bYGnno6fCOKD8Na5unE7RpQOtC4UWokhB3ljzwkRUqVolbsoCXdvojhtbTMvjZiSMQUCRnm08AaX3sw4sC2iueP1DtTGlK8dn2/xTnPbc20Ughx6ZkclXE8cTk/io43Cw736cOjFD+lUHImhYxyicHOTP4El6/Zt53T9CShw63RWmQcLvfO0DGdTSmjBhnHO+e4XTeM9YRYSEl8RYMX/K414M8y2Qg54cdZDn/rG855nj5cepbJEdBbOc+PUA3bHmjqxrJsDOOEdxaItOKYzpZSV/b4zHorPJ5/RQwi4TDaMU4P3F6eIXuc15wfNLVp8pEz5wzGe2AgbBsYxTRPOD0xzjPLciMUmVZ8/PCRUooYn0NABcWvv/qGwR/ZdqJff3g48/b2xrosGKP5yz/7mn0LWOPxfqA+rLw8e8ZJDkKtafY18Pj4wGgTMVXOD46Xl1fG8UxrhT3EHmhrqE2aOZVAyhsVCFsll8bDw5lSBoHYVJmA+uGR7394lWklpaN6nyklIHAPzTgmPn/5jtP8hBDaCk3f+iQx01BoLdj2t9v3iMRVIh9SfqclSYfQs+2J8MPrHcaTYmHbn2Vza5XT/EAp8Hb9DKrgBksjYPWJ88OZ1uDHz7+nqYyzc28kWS6nTyzLjZfrZypSdJ2mJ7aYqHvBGsfp/MjFfuD1+oWaFKUpcimUvJLLKoV/U5jq+OnL7/GDHJ73LQCWYRzlPtMeZ0c0EyEGSvWM48yyZR6evhKDey7c1sjD4xk/XmhqoeWVaRjYcmBb174RK9CwbhveeQCub6/knCRPUYPSsnbHsFGyZIlN00TJgX0XAqFSErb78vaFHGsPI5X76unxI9Zq3t5e+O677/j06Ru+/eZXfPnyHT/+9CPPz6/86ttfs+erdOqV49OnP6MUyWMsNfVpQcYYz2BmrPLUlvi9+nI/2DjnmOYL59MjLy9vLMtOKZVvfvWI1ZY9VG7rxv/+b/4tn358ZBi/5Xw+kVLib//fv+N0mnHeM01n5vki7bfWaO1QDIhH5ZANC8Y8332UxyBp3zeZcvdxVq6N2vfDI1C4dGDTXZnRm4WHvFrRCa3Sb+Ig6Gql2HZpCIxOoFQimf65d1hep9YdTd634SPORHJ8Wpd9idRTXt6hHIElXvjp+SPLb/9XSim8vq784Y/PKAW32419jzw/v/Hv/9137PuO9/4uNau1kP74hVgyqtPLam04ozFG8TvTFS7KkFPpDRGNVo3bski4rurUzVYZf//HLmXqUwnjmH643gsE2Vcr8zxglJyZaq18fPpECJtI/GpC6crD+RPDPHCrlr/57rP4trQcCqTx9wdaK1Atg5cQ2b/5m3/Lhw9PPDw+ypS1ZHyqbNtnYtgptWCNlfiDLvuqWRRBDw8P2D5BCXnj+hb58XrCKjhPE7/5y9+gvSO0HaMrynk+nD/xdntDK9NVALLWzaNCq9q94oZmPG44i0w4F06nc8+EikggvBI/mDmkbWAdlBqYeyHRGpwafHiaSalHP1gLRomHs6uAWoPbtjGf/H1ypg2EWHB2pKjCFt+Yp4np9CCT2FbxNRPzDdpDb7ZXrq83Pj79CmsqtUS27UzSlXme+tmp9LNvofJAaZlcRJIfL4lhHNHKsIcFUxzOa+bTDJwxFtZlB3XplhLNvsVfPF8xJx4uH9jCTo6R4hMfhoFGk4mqV9jRY6zl7WWjlYS3hcfRY+aJeT6RSyPE2LOxNM7NDINnGAe2EFmuK4MfOv69otSJsOd786SUQGmafHsl9ZywcZzYQr5Lsh9PZzlr9wgKZTp/oFTKnsgtoRTMw0BNGdtgMAPjMJDcyMff/FMETBHZtg1jDz+ZTLJjLKLUaAWUwhqRYcfYIw60A11JKfMOfzNYq+5nf6W0eLn/no8/jeKKinHlfjOLOfAg/PTFWSnQGcX75wC4yxJEGma0Fj16Qyg60P1XR6iZucvlVM8zqS2hmusY5CM0T9C9Jb8z8I/N5Xg9h0xP8l1Ul/WpewUOoj+9B1LeX2xjXRbWZemH4MRteeubukUjQZXWmd5xE23s6AceHx7RT/o+spXCSrxEtRRSyexhF9BBzrIQl96ZOyRxzXRfjOreAt1Nwe8yjsNDJpf2GO+CogDlfrNKnkpDtQMiod4Pjf16iZlfkO3GaMbBMXjLODpSDuQSKS3TSLipoFTqZk3YX184X57kOlbBmQ6D5Xq7sqdI7gn1OUdy2lGqkspJks+LBMDWmvny+syemxAmlaNmgzaN9W1DYCqWfY9IaKIEq3onWRr7GnFmwlm5FoN3PUuhq36UHBgGP3EE5VHFg+CmJ5jf7+lSZKFwTpGiIJuN0d0PJuGh4+BIUUAqxiimyZHLim0TWjligpgSD5eH3llPUBWDkylVKYU9S9bDvgcxODsnIbFV7k3bu6bXtzeBASiHMjD4E4P3oolWmtrlRPM8oKtsnKPRhDwyOpgH39HZM5vOnE8nWrtQypM0EpaFj48ehRGMes08TQ6jFc4bppMUD4qjMSEo2GEcadb1g3dlfJSObss73s48PT3JM5gzk3eUMnO7XhntBAWMtozTREgRXUcG21DjAMDlcuE67IRYWGPi5WUDlWlpQBeRrK17gFsRMzCZ55fIHnaW7YZqGqMtOSu0bsTcPWxm4nq74ceBXKygwyfN8/MVAd0kSonEeu1+y3aXh4g31OG9oynDdf0JZQqhfKZVyEXyQVrzaF3Ec4ihrB2Koos0HbKglY9U+8EPDGPjenumNZE9ez9QskxbGuLbuy1XuW+1TK5r92w0hVyLZtB2YNsDKa+IHESxl52iNZhBmlpGs+wRaw2lBwi3LTJ68cDlJEGq1lW2dSc36RZqZbG20hD/QlOVpkU6u+xXlrhgtEObEdMny84OTFPs2OCCMfL+xrjxtj4TgtAvc4l4r+8ZMUfcgdaekK5458QPog2xhnvT5Disliz+RmMyexJJTNgVtQmy+nSaWGOgVY2uEHKjLBufPz9jDPfCIZfC5y+fBUgzjXxQ8OX5C9Ue3oHE7bd/1/ep1iWjUpCEPeG14LxFQpqlG9vN7LU+M84je9gEoZ7hx9ffMo1nXt5eCKfE7/74Ey/XwBw/3adCtVXWdUPv+zsYQr1LrVH6Ltc+DhXWipT6WO+P/VR32V7fQeR2+plk/g5eaD/fR3soaFcAHcVP63I9rdq9uee869TRKFLMvu8fgzLdD0EH4En97OscbdsDxvQLtHv/PChKhT0XrstCa0KClGfjiOU4ijOYpvlnSpbD+6QZ3IhYPeTPSvd6aTniyH5bNXkvpC4N1koR48q9828VaelT3W41cM6TSxYlRO1yJgVLNFIYtYpRhufr1t/DQmuZXCM/fJHnW3ciq1L23RLQzw+2X9PDW/7l5Y01N354vdKQXDZR0UgBLeejivOe0qEl8j0UL9tKKxWlJIPpdHoiKUNIkTW9sf37f8fj5QyqyBRmTfw+v3JbZDrp3cA4TBgNJUeMFlBVjo1c2n3tyjmzJAlZz0mu4zg6QsxQHdwVQzBOE3tO7LedEHa8G3rxIYVxWCQXURQoEmlgjePsBTyRO4ZcUQUuUqooknrAeKdAUQvUWLC1ia+3Tzum0VNjZKs7tYl8bpx0bwyLZ6pWafBX1Ugpsu+V1BrzdGKez71pBvuW8b7ft00xTgPWeO5D3SZgpWPq0xqMdeByeWDbBbZSSpSg6XAg3jWuh/t+eBjFo6caRic5D/kRpQSc0ppCYXH+mL4atm2lffhKin6l7uCNbQ0cwwfrFDEiapKaad1CsW0BowWOYqxcz3WJXfkkg4xYelOMPkyhgGpdBgkpCESqpIp1Gm8H1KRpFVLLWGOYR0Gw72khpkAuhRwSaPl7rTZB1BuZfGm6vYFCTuWouUXxM438fR9/EsWVLDjy+7sK4cDCHZKx1jqn4JcwBqFMdyOekhAye3iVuvbyF8XY8V1UpccPcHid6mGA4zgsqZ9Jqt7lCsevcuSHHIfsuxa83X+9+5F+NjlSQouqtdCqpGfnNVI4DvZd5mfA2kFCk50hOM8wDDjvsMbS0F1rfYAiRA44Dp5cZYqXe4ZFiuJ3qUWmNEegaKml5yzVHvTa7i+2FdPfgffi6qDM0SdU2mhKa1APySO0ljoNT2OVRRmDH+RgYHtYYq1RCoy8UOpCzjtVZYbBoDpatNXMnr4XjHGnDMlP6UAv5LSxbxHvRvZ9J+w3oNDUQ5cI0sN1GzSZRLWW0XpANTHix7BjrYyMSy/GrRMintZCmTtMq0JvFOkoRqAU2si1V1UWeHqX9KBY6QOEoeTrhBgwrk8PuknfdX/OMYk6fE1yapCE05QCdOOotwMt07O1QLUiB34lsjnZgEUbr5EDh9YGq6XLaKzDG481hpIKg/NYJcGQdjyhmxYtfj+I2HHmfJppWZ635gy6iYRw8ELWVEphZ8dpmPvT1hgGxxd+wDrJRaEaaikMqkh3fxiY5gGav+vea02QE7MdMIOV4jZsOGO6X65iUEzDyLZttNKwzuKdY1tutMY9jFwbR22a620np0zK3QdpCksIbCGwbTs/Pb/x8GSJ243WhAK57wulObQpNDIpZiqRfd8wyjO4mcbOMBgpmDFY19jCJhG8RWS1MUu4pHaiVY8pktvKAQ8VitnRYT7R8NQmXXqjLTEtfZ0wtOa6/y0jDgBNzUJRUt0fdOjhBWurewh6pbZKK5WMRmuRj2ptsMaTcyXlTbxX2sh9q+V5rhWRiCjF6AZqM/cGQqGyxhtGT+QmEoqmFNsuuVFKiY80ppVc5HuV1gMgc2JNG0dIqQJUSXg99TW5EGuColjjIgfuqkhZ4QdNikGKqxC4Xm/EEDmyeEqNGNeIIRN2yS7UWsKMmzp8QgqtPJiANRrnB+bp3KMzBAhwTNnp4JvDA1srtCrNHmMMuTW2KBuv+EAbYY/k7kMZhoHT6UTImdfbT73RYRmmE8+vi7w3B5kvr6LksD28Ur6ZYO6bdIu10WQyqkgeS65aQkzjKkqHUkmpkq5XzqfAHgKlVm77xhbhEz+Q9PW+J0lO3C9jT3oFwkHxlf3w50TfPhHqe/HPfU3qZ2vZUZBJwXhAJswv/lz+Xe19xwb28OD2iJF+7ed5ZlnWPvUfftbEPAok+j79Pk27K12O/+dnxdUvTxz9a8j6ua5b3+MTMe5UWofEcN//fxHrwc9gU60dpxaUOiBFh4NZXMyoTsGtQpgVj6a5z9Bq6QCsXpi1VqmqkIikWHrDMtEMAj/oWR9GWcL+hnVOzlRKKGql3GR/UxprB2nSddKxVjLltsa8n3WQ/M0lBFRvAuZOUNRd9lWryNmmeerJKvJ5ozRr3LtCpeG9RbmdVKRQKiXx+brwcH7AOglirlV39PqKMYbBT5ymB5zVlHSV+6ApkdG1wmWVZ0ukdJrBDwLssYZ5HklpowaZPEvekeZyLmz7xrosbNvGPAjG3I8AQgg8VfE66vuzXCQQuReNrROT78RL5KiaUnq/6aoUYFZbmkpI7IjmNM+EPROjZOc5L+qNfZO197i/nPM0BUnLPquVxvoT0zTJOa9VWtkZJ0ttRpo6WiSEViPrfy545wgx3M9wDfEFz5Mne0Uu0pQ+gD7GqF7EGqE2ghTnZe8NWYezHmc99GapdxJV1GpFIw1m8Wp3G4uyXMYsxaO1DKMnhSLnz1pogvrrmZACYhIgCOznjmvXMvlOpdADZ2QyVfsUSWtyqay3GzFKcXhYZ2zfZwcn9/foDbU30zWFpBROa5pSFCP7Aihqp8MqjAxsjCIqIYlaJ/vF6P4B5Fy1pqB2kMRhiIU+Ael/pxaKPrTVreOsj4kR70jPXPAoqZ61BIY5J6P72jp+unOTWxUzpFJNcLgtUKtsKKXIQ4s6CHOdSqhkoTu8Nb/swMkm9PP8odLzlWQ0/MswXOvEjJwzYDSpRLSWqUnJx9SlkENh26QLe7wG+a9icGKmH5xn7Hp/P41M04Q/icdFAldlsaAVUl6IMbDtO/u233n/y7L1LoZ04VKLXessBzKlaz9geFozck0yNCK1OGgGRSHXV+ZxZPQPTMMDD+cLTx8dg1PkErktV377+z/ydruyrC/EuFDKjlGZy8OJ0+UrjB3Ztithf+Hzl5V5nBjHkdIgr1JAW5VYtisl31iWN2Lc0FozzYaaNcM49pRwzadPn0jlTQ6VSgKnt3VhsBJKZ+2AauKHEm+H7NCmh5sKxKJgrSHsK/MwdxSzbEw1tZ4VIVS4cZo6Jn2XwMoqgawtFyojug1YPVJaFnOw8pSi2bfEcr3hB884jHg38Pb2WZ6JWrEaPj4+UE+wbTu1VgYjBw1VqhRGTdGKHKgfzg/EGCWjpCqut53zxaGbRhWFa5qzGbAYrDZ4P7PeNpyWn9toxde/+oT3jrBFUhRC4ORj93tI0bPtK0+PX2HVQf6yDPbMr76ZqU2Mp9ZYakt8/vGPXM6PTMOZWA5t9kauiYLqOUbdJNssqMayrsRYWJYNpRTXt411Ex384D3OGVrTrHHtB+TG63rjfH7g9eWFGCK5VKpS5D8m1vCTdHuV5NtcLl/xtnyR2AU7Uer+Tp3TjWHUXW5kJKvJGVLd0WWiFiOhr8uCUoWfXl6Yxge8m7itgUJkXa9yL/TQy2Fw755O7XtWmKUkS1GaEBqml1B02ec4XkhRkasgYg/segi7FEZGpEhKO5wXEpbzjlqkcyzfTw60sYWO54VarbyXtlGSRTWH1kKtWtcFpUyfHDe0HphOA6VkQgxsy8blYritX6TYsAMlOl6vC+fThDaafb8Rs+jtrRlRRhPzTmrpfpCvNRPiFW/3nnNTWJZVele6gtKUUnl9uUlOV0oY4/F2Yb1thBBIUdZiN2iUkbyedcmsS6QUWcuGwYtEJoq0ar6MoBRaBaxJUiyVQEqRkjvFbBSMsxyaNTmLuVvyYDLmy4s0D1BSqFmRWjVvQTmytiwZ1uXG58+fGceZeTozOAeDJd5uIvFTjWkWLLs1TqhsRaMoPD0+Yp3HOS8HlLixbQtHttOkZ7YlU4Om5kQsBWUNYuTqqgQHYY88z/+a1f5B9hDoOYkiq5PC5D1Q/iB65tIgHZ3y1vuVR0dUDpXHHncPPVeiJDmmWAIRstKdNqZ3+6UrfWQMKsBp7sQxeU3yerzzxCA+m1YrbhjuDbdDEZKzFKrHPllLn5L1Al5qgDuH8Ggb3r/GkTe49OKqSsS6nAc4JmuFmHe2IAZ9uoxR6K4i4T+anq11wpm1Xfao+3RNfnblPEZLcXy6XOS5CpG0BObTzMmL3DumJH7XpDAVVJVGqiJSqrpTb3OJpCC/ZE1Qsm8j8QJGywE5Rnmd1kqDdhytSD2rXDitTaft0aMtDDlrxtHLWt0Drb2Ww7BMD1r3pEhTRR4JTW7w2z/8jpqMHE6t3Cd//OEFYwu1QUmKUIJECDiP0R7Nj73Jt3ZfLVA9fqrM88ihHtq2wODG+0RhHAcUmevzDaUEYDIMg7wnHTJUi7xH3jqhz3rDNExsVZFj4uFy4Xw6EVNCp4AfPA1DkV4Q67bfmx8hB9Zllf3KyGTXjw3w8nulccYzzRPzlChpRjWhBWujeCli4xiGUSA8/UxZ7MDJTTQg4EVaWyJaB8bRcDoN3XMsHi1pjPUztK44b4gdoKN7zM+yPHO6XCTIPCtSCQzDwDyd7k3vmhqDHaTB1Bq4iZIi1lm08xg7ULN4pa0SmmMpDYVFKxjHAWc8NQuo6PFyuT/HznmUq+ScUBqcUxglU33b1wGF+OnsN4M0IItAoVJp5HxIerWsb0ixXUsmPo4dVNNfU8r3TDvnPCLn37gugXEYcNbLRG86C/m4BGmkaUvuDcua5Lw4elETGacZBodShtvrL8nZ/+HHn0RxBcgD2Rcz+aj3Kc9RTHij7lMqkS0c0gVkQQC8d3in2FJiDTcqhaYk1NcqwXzSjoR4RbWtdyALziLSLyWVtLVexrBGE0Lhdrtx3CSHlvUgcBlj7qnR76x9Oj7ygEYcxKnCsiz3DvueIlZJJkOrDVGadliGvBwJ+Zun++J/l2E0GY2HGNm2CFzFB1aP7/tOFnRuYBxHpumE95bHxwvny4Wvvu4TMWfvE7mUIsv6wr7LDbqtKylEtjVCDWgj5K6X1+8p7ifm8cLgB6Dw3e/+b/6Lf/Ff83gyOA0x/MS//B//J/7qH/8Vgx8JKXDdF96uV84PM+fpCV0rHy8nPA2qpQXFgz7xj/+zv2bPkT0lUskobTHGEmNEO8f4+EAqVx7Gno6uOmJ5UAIFWTOtKpx74unxG478E60182D45tO3PH95YVlWHj9IYvxy29DaMY4zzily2ahZusIpK5bbzm5vdy+fVpp9WxnHCRBCXquRYRDqkFIWaz3btjPO4vmqtdFKYfYzphlUy4wGxvmEd4ZluWKy4nJ6YDhPchM0xb7v/Jv/7X/hr//6v+I8+F6MG9ZtxWpPqjDPA3/553/B7377B2zRDP6EnS2neeY3f/afsG3bXTJ5/otfcx5HWYB73sf07cTLy8tdKtCyiEFP0yO73kjpmdPsqA22LVBLYxxnvvv+d6CSBKL6E9sPP0i0gdXytFZw3vL8/COan3qQ5kCIm0wlDprgPLBuV4b+89FgsIZxGME2SoO37crz87MY5F8irRY+fvxISJoQRVoYYuDL6xuhk7NkpCiLsWkNowaMGfj4zRPgIC5QM8ZKps18stzWLBK6yxOtVc6nAdWhJKUGvv/hD5xPX2EMZG4YHDlXYghoZXDDxHr9wjhNTNOZUhBPWjjMwZXaIsYWtvRCSGuX9CyElJnnB7Qe+iR2l4F+rdAsqnlSXHF+uFMeY4ycz5cuY6nkPWCV43x6uE/5lYFhNGzbjdQxuMMgm7obZ2Kq3K47tcA8X2gqUYnkpEFZ3t42QdXqirWNt7cvWKfRtRLzlVgSIUVev8+o5hiHR37/+y80CvM845zlp88/MIxPvL59QWuZStQWiFlyuRSafS9Y7QUf3X0EShm2bWFdEtv6TEqZj189sMdVDpDaUpqmhIxWcqAdz45WBQhjzdDhI5BSYN8K2ihaK8QQ7iHwuhNCWyu0qtk3wSeLtM2ybBu5RgEZ5MrpdJFJV82A5HQ1XWVKoC1sjT2sAMRl47a94JwjpgVVxCeplCHcpGGSXp/vtLenhw88/5C7CkNjtKcmgcjUVsg10lrj8nBiTzsxZ5puWAvL9tKLwMJ12ci7J+fI/jPSlVJ0uR4iz24wn+RAJAoLabLsYe9rp0jvjobmPWOqtb5z9ck7dCDJ+3ToKGWOIu7Yx+7E3T7dEmWGFCxG6TuQ4pg0LctCTJIn5rzIOsUT16mCvfGZUuqwgMMzJpMZ0yV1IPuxUQJ6koF9JuzrvVmbS8O5gbe3t/5zKUppvL6+doKaTI2Oaa/AIPr1KRU7SMB1SJmYRKo/T2dyrR3lLxOL/XqVYoiK9hVMxAwj2mt8O6Zmck1Llswf7+Zu3pdpVywFp4QI6rxFW03ujYhxmGgothihJh7PZ87nWbxNOfBw+UhDcbstvL1dSSkyjjPUTMmJHAN7FRJt2DOlCEJe8Ou2k2gNqlVyjqKc0AY0eK9IQK2JmEWW7v1ZijBEAdJCA5VoReiIVNsjMoI0k4xjOl1I5Y3Xt/V+/+SYyV6C6gVuELFOM5/OIt/uk/y8COhA6JMK7x3jOKJ2sSxovdDUiwCifvhRvNJUHh4vhLBRQ8TVxqdPH9n2N7lXMahqmeeZmH4ip0Sujaw8t9srzs5czmceH06k+D2nQUAOtEatGtuOvFODXiTz8OHhzLLeuN6uLMvC+TLTXOX19ca67oDi66/+EddrpJaNRsMPAAFjRubxkcfzTC4RZ99x60rD5Sx5YTVHXN0x2jP4mWmYBVKkZkY/4/Ugz3KNtBzkmVKtg18a9jxinBMpIBU/emb1CGSccdAU99wpLR79Wqs0rrptxRgNeiChKbGwt9LPn5lxHDGTqANSkbzBsN+IPabDGMPbF4GMWeuxTprkAlzp5/PzzOnhJPfnZe7ZAAAgAElEQVSX7oMSHnlcJTj5gFpY54lxo5QgUzftue4rD+cTpecP0gqXr59EktxR7+M/DM/VsUjr7j+RLtch6JORrKRxtz550j2w8OikNQO550WNWAksW3ZuX96wGIwy5AZVabRVhJLQreH6uLTlIoihChRJfG71QOhKNyL1nIR7Z473jePQgr8f3jvW1TrpLB8dvCbJ1wKlqJQMMcjm1ZRklwietVIO/LzWsrl1Mtu9C9dvRqU7BdGI52Hynlpt78ge0oUuhVCFNb7x5S3wxx//gFa65wSprqv1+H6jDtOJ03nk8Unz4cMD8zTx9vaZv/3b/4MYX7Au8u2vG2P9RNhWnNp5uEz85T//z1EFtj/8lmQ8v/6LP+Of/flXXEYrB6994fOP33PbV243y2ma+Hi5wDRJl6/LJpd95S2ubGmX7qsWM+L5dCJH6d5j6MhXTevAk5Qjp/PAV5++JobKtkbmacIaTUrgjOc0nyh5Z31L1OTw5kxLFurG08MF50aclQXm+paZvRcKUIh8/fErQr6SUmBbI+M4ysOaK9YIZOX17ZkPHx5pBUJILLdXtHI8Xh5FAlMyLffMjC1Ra8SawsP5iWn0PI0iB5y8Y3x6FGJZTuweSF+Rw4b3g0zenGd0mq8+fmKPOzEGGo1fff2JcRh7MQ7zPDFOM0Z1tLeC23LFDY7lthJjxvqBohrXsAB9UYoi/Qz7H2ktow28rJamIvu+9AaDZjpp4t647Tdgo7aMG6SzWxu0ZghZ8/Dxkev1ytv2GbsqxmnEd59CKfL+KAo5b32qAW2YuG6r/Mx+pFGZ51lkiz2v5cvbG48PDWvbvZNcSmAcFbkcuXiKdd3ABEozkD376phmx/npA7U1Stt4OJ+JsTGdMsZYnL2Q440Y36QodBN73jmfK7U8yyFSF7Sd+PNf/QpjNbVllvVGzK8QMkaPUC25NOKeu5a7kmth1JqSI8pmmlZieLcfqcVTuzzX2EaMgdtNDhbjIEZtlBKUc8sMk+d6+4J3QlJUSkFtXJcvTIPcp/uQ+e//23/Jf/c//DcMf5RQ0sfLIyFIFkopEciU4igVUgdtOOcZ/IX9be9rm4VacUbjRvE6pRyZpge02vE+sy4Lv//9/4PWM+MJ1j3CqnB2ZFsTJRe2tPP29iqFjWu0dacVRc2GwRtoWchq2hDjyuVhQhnNOI+MvShvVaQjNIngMNZ0GUkRIZgayCmQcrzL2qaTJe4FPww4aylzIYSA7uHetUjhGUvrIBA58Kd663JhmVhYZ1i3VzmkayMS5mKwykoWC4I1n91DPwyAUo0UI7pK57w2RYqQk4CMzpcHvJf98PnLldP0AIj/K4TEvgXWIJ16bWS9/P3vbjjn0cailFC35nlCK8Etb0ugxEzYd7Yt9J0XORAj/ivJzjJYa2TinaUpIeRX0ZSI3wJEgifXyBiRtIWw9z/p8rxDatg/q7W0REU2zX2i9R+eB0qVySIFct83Gx0P3Tuu+7ZxB2F0UIR3vpMsD6WJuksDj9faqmRB0Zu0rYoUUFQnErR6u90Qz9XGsrzhvO/eSIEpxbRiLYSwkmvqEwtHy5UQlvv3q63RrhJNoRD5VM6RFK6yZ/VB4OAHQgxYZ+5qk5gDb9c3mexZgWZYKxTA2uWEewoySGxSMJ4uM9sW2MLGlsFaIx5a78mICkdpJaHpWfIEU5IidA9vGKuJMRKzyGitkyyzGAvrFjFJMZ8eOJ8cNdPluIaUo/jcqxB9dTO0JLK9QhYfzWD7ZFGw2yD+JPH/ZAYzS6aYUr2hYmgkUFk8qi2jzIrGyHQuJWoP+jVGJPjOGskAHAVIVkpDq8q+beLLKsf9K2tpq50qm6RYNNbfG9L0Yvt2W+7nutYUP67fMY1yJlPNQHGcimEYDcpLvlLaNkrdyKFQWmBLr51+KEofea0zOUEtGd1ff8mZjx8+EHMilyxS5tcraX+l1u4lHke+++EzjczhHjBWfJnzeKa1Z2IIQKUhAeLGWgbvSUU8uc45Bu9RuhHCjcEdKieD5rlPf8Tl4KxhGgV8YY2VqXpqqBaxRjD0KVUMG9L84a6aiDGy7eV+r2/7G0oPeD+RaGxLQmtN2OK9qeL8wOfritpzn6Jq5nmgmJGSG61kuaeclRxFo8lKdQvMIfN2OJRIy/swouRCjoV5fuhXRiT0NSdS3nvoMdScOc2edX3F4PqQvkKNGCPNo9Y043z+e6uaP4niSiuF41iAoaraM5vkBte94zwgGODDbFtS7pWxFDuto8hNLUxu4y/+fOBf/LNvcTbQmpiwG3LQOsTc7XjYTE93LzLidIMcLGjlF9OOoyN2hCHKgtxT35WEndb6rr8+8Ony0Xp91QRmkBNiQhUPgig4um5Zmy7P636E3rJ617pLbpN4Jw6flGRpiEG4ByMemni4d+5qKVjrOwEKWlZ9OthkPH9MCifF7373wrq+0JqMyrVuLMsPaF2xRpN3WKLumQWaGCoUREurFdjG6+2F08MjuUgexrqtpBRlcSsJqxqbM+xxomoj2vJa2Fok7JFUSz/gigRvud16oGgmt0RMW/cxDGhlSVlRqycmxR4zWxDZ4zBY6foaTS076+0V1bHGRltSalzfbkxTkQXXRil+QpauptJYO5JS+UU+lnOjTDz6e1ZigiqdzdN4QiFktNP5gmanVYPXnsenDygFwWzkJBOv0Rhm5/GnUy8yK95Z0r7jnWEYzjivuZwf+r0l3UJbRHJpNAzeUWplqxspB0ouHFlVMYu2/Oi0Km14ebvKxmEUMUe+vAbJSSnitXF+pDbFntYOTzB31HnRUK1o6R0zenBQqgQd1korFq09VimahpQj67KK5ErJxrPsG7EMpFhJuWJiRJmMsaZ3ZDOh+35CjMS0cNsNgxulqyQnNBqFGFeOCIRa650MpatM/iQ0dpSiD5CgRkvYk1Cr9BHKKB0058WjtYcXahbpVmtFqIAtiCQW0YIrFNbBtl3RViQW19tntI40HClXCSCsBZRM1I/Dp1YDRst7Ihl+AlOhddhBq2hr+++LSAWt5PNYA0o7jryXFDO0nWmemMaJkiopFkLayMUQbeO7jy8UoxmHC9U3ctJ4fwJlME6jzY4zXqa13auo0LS696B1kayFfScXULYR4s66LWgtMtjWqsiMi4Qvl1ZF+qEd2jS23oVVTdNK7WAVKYqrgpihFtVjGBJJVVozbFsjFSmoJNA0k1PFOd3lixlbi0wGlOTnGa2xtk8UjmYeko2Sc5dx9462NK6EJmUPqErfjhvSzBIJjUxDrOmHO13v4e+qqa6aa3egknW+Z9iJx9jpDhbIGRo4IxIZZw1GKSg910c7yehpmVRkumudSKykzmgo5aBlSlF3abMC4lYpqcOelKEiodpaWWiJI3D98OaBEkN9bd2DXHqTIvdm5+Gx6tfomG73aZTk/HSPac/K+fnfP4R4h89Z1tF3HxdIQXTQxY7PSyzH+985fBmHT6pVAS2FmnsxdwCxjqZiV8YUgbSIj6n1nbHjnp2oRyTWRNYHa61Q/FrBGHmt4mtLkj/X1zFZc+S61yJiQqWa7JGpSEZh7UH2QGkFQ4UmRWgqcpiWg5t8LWPkGtrumUwp9kZS93T1BnSt8nqbbBgUmuy9SnrFa9iJWaJkVDd7CgFSMrt0l9fFtAv0CET6bCH1s01TWhQJzmCdQ1UJgtfGoZVi0nOfdFRoRbyJrXbQRcFaiQSQZ1CaAZDu77NWArWZhwu5lntjWpojfV1QihSSrJsKjLPyywgIgW7Z0Gi0t9RNgpyV0ngrcluRlCFVA5W9T/1K6eAaLbS4VuT+N0b1ZyFTFXJ2qECUZ73f9KRbZkhQeyhyK7JWKSRIXUVFqwf1UHxL43Cinspd/aSAWjWxht5Yl9daSqUizfKmFblmwvKFGDcELCQxHK1qqDdabcQUZd2hyJpkNZuzhBT7mcURk1B1r2+vjF68/HKvy5MuAbwyqT6yV521dz+o4jivytpp+np2SM+t9Icxx7rb4S3KRryXZy3G2Bu9/X7qirBtXzBmuvvWr0vsEzApFq02FGUwg9AKrdF3gFtthZgqpSa8d+xb4gh2d6NDOUVTMkyptVBKwo09ukCJhFcbaWTQs/mM0RQKqabOK9BY/Q8BaEHDsnbZg1Sm2nStPWIOVIDXQl1pnfYkh70u+bPyZgmvP6BN4ds/M/yX9ddYdxVwAx7VpDrVrpvuqjwQqmNDS5GbUrqX9Vh7paq2rptpa++uGA7a0UHUE4Nu6x0z7ptPKfX9p22NGDdqCSgKVsO+LwxaiFUKgXFo1agtU3O7fx0FUmB2jbyi3g8ZCsgp9o1IWmIVeQ2Cwu0EvZq4nCbZiBsSookURFpLunyMG7Hd+PGHv+Pl9Udy2jmdZz48PjFNE9N4ZnAjKRYSmnk+4fuDmatAE6ZpxDrNliLD+RP7mtAGxrHy1QfDOK9Yo/HOMg4Dxs+SbaBAt8rgLMRdPHQdcTy4gbfXV6ptmJogbyK/8hbvJ4z2FKcYvehoFQ1jEilUvAdtHApL2CvbKoGw3pn7phhjBhXF5G8l12IPQpUxzjDPk3i1ZtM9LqD64m+NJ+RICBI8/PLyyvDJMzjDOBjmWWOotCIZKE/nR0Lc8FaRkxVqTUloNTGPM0prluUGiI5aO8cwSqDd+Xxm3/dOMpMCO8W9HzZk8wxxYw9iVq9FgiW18e9SHiOL7vV25XQ+45xj33fW7cbhK0TJs1KrmHlzy7SUmdwoEwLn0a6HEKMFa20SukjKvBTuYnpFVRKN2/V6zwhDN5b1hqvh7o/Y4840niR7jiZFWBHde4qJlLNkzpwuxD2hEJiI977TJ/O9wWGVo5bK5/OV27SjtUh5/9H6wFBsP9wYliVRlaDanRvJOXWCo2ye+75ircMq0zXeHZnbKk3GZIAg2W/r2jt1mT28MM+yNpUq2VdQ6HYTiXrQGq0c1vVGiQJjBmLcUV2yVmslZUvFoUwlzJnbr1+JKTB4x9PtzHQbhRRXKrksWCcm6tyqQA62TdYGd0iMFON46t6knXH0OD/gsqWYwg9//srp//Nyjaom7BWldmrOaCsHu23bSLERUmSPO9u+U9siuOJvI+mrQv1Wse0LqCLSjdKY/06TQsQPGmcGVLVY7dj+yY4eNK0q9i0T2ck14n7UDM8e5wZiasSOTxYvKJSmMB2qU4o6yiAOQAedVKqU/tlhSgEyPROJmwAqShGJVa0HvOiQYncZiRlRSt77WgtVS7hzq/2wod6bgCiRvNdWyEWoYEeJcRwMQ86yDmvxERitaUUQ7LUWnLLUXMg1kUuiIeQt5939tTlnGEcl2XRNozEY40ix3VULznp0CxgzYI0DtfX9iHvD0mihSdYuB0TJz1TU4dmVvUUbRSs9hP1QkCB/t5/N7oesY03i+K9Ur31dA2n0HCJ/KaCkmXnQbA9PkelFjEybtNEYBb+cSmVKj0YRU329F6Gtv+9oaPWQGL6DsKBR2s7W9g4nopv4Hbmke8NU7p8CWvUiqv/bWlHWigG/1zcSLi/7v5xVFcpYBLzUr1EnsNHvjdwa1CwEs9bIVVQs+x6w9vBz92tjZMpttCO3hjqmWh1BXltjDyKbFqqr6VNFOcPko4hFdbKdNKhFrijRJ/K+GtwwYK2l1ELr6h7rHIqK9x6lWodmpd58EGJjLQalCrSEsYK7TiGITKvKNdWmga6M48geA41+7fq7pIyDhgSItyQAjrsfXHMPtZZjGLlJQ7RUadQbI4dnxHRHr9iJRyyMdK3vk+hMo6XDP9xvyN7IMFoa8CW33nxvhFTZozQ4axVpnLUzWlVqKpSs8M5iTMHqAW/FX4WVSfahnFVNzhvQmyZ9eqbNJA+LqpSWCSkS4oLWFmcGStUYPRLr1gvvRi5BGiZGIA6xxH5eVuQsTTqTHSkIDr+RyEn3hkeh9MkfVff7v3VflL1HFLUem0QDa0cOYmhHb6ONZhxP92mgHzwomYxD6827zDhKoP1BrQ5hw9nc91FZGZzXDKPFDxbfPAIacwzKipS+w1hkD+xxSfZC6a/FGINxQh3Uut6L/5QL1ju2JMH03iEeMO8pqQ9XFOQmNO5cExVNIf9Hqpn3jz+J4gp2SvrXYlosUsm7Hq6nVENwixD6hMVawzCMlHYTbW5RmNwR09mQlUhERuf5+h9NhPyZXL7QqsYpi9OKXDKpKHKVB9P4r2lrpNQvlHrrN4xB4WTMOeq7nwrkgTuCy6z1945ezqkXZrKYv2dIHV040frGcKPWHa0TRmeeP3/PuUz9a2lqiUyTENG2bSPnzNdff01KRQJJW+N0OmGdRSmDVQjeNe8M44D3rns3KiEEnLOApVZLq4bp/6fuXYNtW/Ozrt97H2Ne1tqXc+vT3Sek0+lc6NBcjYgiRqpEywAKaAkoRWFpgVjqN7VULPMl3gopykuVJSiWHyggJpiIVgAjudghCZB7d9Ih3X26z+lz23uvNeccl/fqh/875t4pgc/t7Er1ye6z1l5rzjHG+788z++xiqIVwQ8MYUTVRoozw+ikqcsRP97zsddfo4PAUAp2ux3jcIvG03oelx6QIEsMqqhOBERMs1Um7jFGzEsSNGm7wdS4F7TxUbKDaNIgb1OhLa1kK3iUaGrkQdgSui04u++SstynrWJYL61QmkzRYzS4kMmpyCo+a+xHCkrL+jzFlWd3d3zo9a8lRrmptVY8vXtGrBPaCOr6Mp3QVuPcjSCtU+Hu7o7j7hah8snDIaUkG5oHhfF4y2H3gPUyc3NzS+3ZZdPlXZZVQm/9uCO7hbff+Tw2dIVqlaL0wcPH+N0NrTWmObHOJy7nGR/EaC/mfsNLr77Cu++8x7Nnktdjd41pnvq0TkEWSs7mg1BI4Rf8kS0cNOd6Pah8EJmNtYZpmlHKonUm1Yl1NeS6sh9u0exJk0eVlXk9S/acMRSzULKggCVSpaGtpqhEVVmCDvWRGfDWge84fms5Hh5xPhVybFi1Q7szSkMIlhDkBK2lobFSHBaFbg7sQorzdWM8rzLN+j/+4b/Fj/3avwtA1Y1/+8//M3ztex/G6EBpM4cbQ4pBhhlciFPBMBDXO3JLmPYyx8MDnt29h9GB4I+yVRyc+Feaw9mRDz74CsfjkZINOcE4HBncA6ZpxSjwQ2POH2BUn7RWubabgmH0lCrS4t1hTzmLn8oaCTde4lMaCeMKX/y69/i+3/NZakdV/5Pf/8186499DOuFGHl/dyGulRQhLSvzLCGi3nlC2AHQmDGm4v3Qt2EVZx5Riufp7in/3R/4y/yx7/zneOxeIefGBx+cZDvjAoMfMEazxshutDx59j7ERCMxTRkbAr/4e9/j/X/8HtWLXWg0DfvPBb7l3/rIdYuolMWaRjELP/3vfZl0I83GJgtvBj76vzzko9/1iBA6nU1vzYwmeAkATUkKHoeVjX2T7DylNLkHEuu+KakValZCGbs2yZVStpwbaXrP5+7D6gHloNB+ZFmeXoO5c5E8xJLq9X1speKiIYQRpx1WefLSJXPdQ2SMptpETrFL4WUK3Yym9MweyWFMQjZUHcVuPMssw4hGQenGOAYOB08pjZwSaY0cjw/YD4EhvE9UK4N12NFQsvhWARliqg4Z6PI6aTCFamp6RIJzRpDGKZFSwZgeG3BVcsjnFUIA6nUjYcxzwJNS5rrB2nxb8Lw56u0WSoPthS7I/1agy2jlet/IhcY+J3bJGbU1bfIZiwyz+8EamAZQrsVsa3JOU6VJepq/yC9OfxdFgIacS4sQIy+TDIBraaTYUEZRi0zk6RL8HHvYdJ90z/PSfUtGOi4LRrfrearE9CDBqD3/SxtNVY21bym3VykC9dp80VtUB7WhiNfzRwPGmS4rk4FGZWGJC/SvtcZLa9mvO+8C65ox60m+rhbxI6l2BY7EJPLrUhpaW5FwOUcpiXbKOGfx3vUMztIzwKTemtcVdBXpIBpjRfMpGV4rTUeUaay5sXaZvwkDu92ItztyFjKhcpm6lN5syzAuxh750qWjlUKrCa1F2q+NJRVBvmsrQ+ZURfEgXtUtT0ma5TmLjwkHVXUacM/dNNADxT1plm0jOmOsgjqgrQyoLIZlqsQyk3OhNcPt7ZGYJlqde7ORSfPUt6PmCnQAeO5DlGZ8nhOqgjEKFxTGjjw6HBAZtGyA1iWhlVgTtNZcLjPDMEhzVSDmzG43iJQuQ8oVRWE8PMAHGS6vKbMfPKV2uanROOMIeuByPpGolFbke1SBhUgId8JSsEakyg3EJ3/J+PXUrSsGdbGotkpt1xcURgfuz2dCGPFewCMpFVK5CO6/yKB3txuZFhkIm46MX5ZEsLJx2545axSPqTGK09S4Oe7lGZIS+XzPdK7d41u6VcBwmRemacGaxhAM1qzcHA+0/vy9LLMoo4yVDaasRf7e7Ux/fVU0V60WnE0MTrDRxu0Ay7pGGo3gLbeHgTk6piVRW2X/YI/xHo00PSVL6KJzAy33yVYDrxX7YIilENMsmnAF3jT21mKaZc0NnT+gZEXK75Lqwlo8zmhqlPTo3a4bfrcivtEfcq2nukve0Dgc+tZItm6bua6PTPp/V3I849TbjLqwlDs++fFbMPfEZSXOK4qZYI7sj5G8q7RqOB5kujUvF5Ykie43wyO82WP1gEJzHGZaeyphmMoS/B68Q1vJ21AUrGq0pBh3t9zcPGDcj1yWC3PU1GYwNhDCiK4KPwhSuhZFKgXagtJGLvgUKW1hvS+o2vqjB3Kacc5SmqxQKxW0Z9gNWGNBJUq6F98cMnkSFKwn5YyuIpW0waJsx8z2t68sGW11V+RpStoxp0Rr2zR3ZAwWhZjHBXXdOIyOEESSUEul5iKa4Jyuh/UbHzWQ6xVCArI5VFaa5lK6UVsrSp5Ylol5ntgNZ95//x2UvuC9ZXccOV8yH/7IRzFYLncTxmm8M1SVqLpQSKxL5LAfefLkS2jt2O1uefWVr+Xp+0/QVqRXTSn8OLPbHYirZPos85nqDK4FSq0s80qcF+5OE2taWePKWlbqfObm5iXA0JrCu4FUCnVdaE3kLLUuLHOjlhVrzRYJ0rXrMuXMMaGBNZ9BN8IwcL48w9pbWhOyzlruGewNuX5A7sAFZwcwtdObJJOq1srXv/FxLucL03ym1Ik3Xn+VeY44G3BuYF1X5nNmvpylELCChrVaijKqIydLrTMPbo80KvN84a13Ps/NceyT70aOYvQPRvH7fuQ38Xt+9DdwqTPf8Ue/F610904sNFaU2UnjnWXjcHM8cjrf480t+7DDDwMpJbQK1Coyt91OTMOh0yaDO6J0IuVJkPHGYNUj8hpxWqGNTOwOuw9xPiUGL9u5XAqxRLzWcm3USqmK4/g6Mc0oNeLsnoxmyfc4M/CpL+z5xJ95hf/yj/wNuedyIi4TpmZUSzy6eY1SGpf7mRAM+/FVOWBq4nQv29Bf/vxbzHetN1sz51PBe8fP/ZbP88Pf/lMAfPYX3sQ9exOZbY9Yp5gmIWNZq8ntKWs8sxtextpH3LjKzcPG9/+pH2d+HHnpJ458y3/xBk7dkrnnl/7Ft/ngm2ecumFen7HklVpXLo9mfv7PvUkNjY/96Zd5/NePUgybkZ/6k58TelgbuJyX7kux/TOurIvIqkutMtmODeMcrWW0TWijSEkJOEZDrZFcBKN/mTMpgtWem8MDgQHFHpJuNF438hoRxZDDWs/dsy9gzYAxHq0UtSUu8QPGcY9ShlJkY+b92DMHL9DAOwEbSDNVcN4yDiMpVbRpOAeqDVAM3tkuE4JpmXEh4MeAsVV8K1k2Uy7ssE4T00xOilodpcoG5O40Y+xMzCu5VJ6cL2gNOWYuszwft+0LTTDe1gRiKhIqquvVs9toKNuN/62RU8X6DaEvk3AQ6ppWvaxQAl4wtm+ftCaXglLuBUme/ItXn0vbmiqumyJpXqR700rkt7WKLF7VTR5Y+3ZBmrkUKzk1wuAxRj2X7muDMpprQouCujZalsYmZnh6J2b72uDufKG8X/sGSSAxa47cT/ciCdWGkrsUUxdUlxPKj64AiQ2QWWGvE1TF2NYbMUHT2x4wnGvp/m+N0pJTdS0bGix9+/DcmhBQ2gGqD+S65LVuDa0AfpQKXbLXN5FNdbmvDATWlLvHSdO0SKnP03JtrmQzJ9ugWoRQqY3qDS9ITsc2uKjopvA+oLWjYrBGrnmalmlJFSCQczKIs2bAeE8Iht0w4qxhGD0CiMmkTRGiDLnNaLNjy+MqOQnVsctOlbI0PK1a4gorGW0WjA60LWusLy7NQL/mDKCJqXYZmJX3KYpfzFrft869PqhQrUFb2TzmHDlPF1z3LkUVUVaQ8qFVJM9wYQjjVf6pbWM/3jBNs6gLUiVd5L4chqFvgWsHuUiuU86wRssQ9iyzwHMaIt22RhQSqch9evPyY6bTWSwnWt7nL37pTY7HI8GPglu3jek8sUyaXDLLuvL+exUXHM5bnPFkrdjtHYebBzIIT5npfuLVV14SlU+ZSLmi20BtsgnUujGO9KiSdt2Ay2Y/SCaVAu0MzigUltYy85Q4FQnYnuaLbKycI+wD9/MzQGOUwxnxVuY6MfeNeslJliphD2hyrpQaef/9d3EuoLSoTlprPDs9eUGG3AghEMYB4xpNCwX2zbffZr8f8YNF+cp5fZfj/jWpWZv4w/5Br6+K5kprzeAG0dVucg6l0U4Or+ANeVpJZcIaWVGn+cx6nrnd76i1ENfM6ByWRlHdxlwqrlpSLdgGSjls0EynM8NuwGkvckCVccFBK5SsxRPjFDnNHTChSCnwK8MP/78eLIAYl87Z190HoTpFql0/TBTEllEOhr3nNu4JYWAtM243sAuey1xR3mCVwTaFxrIfG5fzCasKOy9NifcV2oUQIBjP3f2ZIVhSlCnHOAietZQZTU+kdg43VGqZuLt/h/s7h3cjl/Mdxog0cUGxxsLtg4dsQXkhjCxLJi3TfkIAACAASURBVPih63Atmh1qH/sKu6CbwdqHki+ANHQ5NWLuxXv/jHe7oev4pQmlZpSuGCOFEcqw1ESZxUivkcyH1k2IpWSxdHmHt3JYtJZZS2a5VJxRfRtTez5SY20dS6oUSjem6dKfsgpVFEWLa3ubcsphqFFZEMLGquuETLcBv3/EcVfJJfLqyx9nC14VLfGKMyKrWFfZpmDhsoipshZFsHviqrB6T8qZu2d3OGdFg12yEHCGgfP5XjxQWZo7fzjw9P4p7919gFLgncNYTdgZBn2g1IElreSzRRcrE3elsKowjo5ku5xRQ8oXUoHb2xtCCKSUKbkSRvFdpJRJMQlCVT8ARFoVdWUMQ9emNw7jEd0a+/CgT9wVu92R8+mMrjLdVbWxD4GaErpBMAPW7Xm4f4CtJ7ag0aVkBuvYPRZCX6uNm9uXu25fJqetVmrbkZuQDwmahze33N7spEHqW2Mhu52pqaBzY5D+mlxErmN9I+aB02UWKUtPZn/32bvUktkNFlLmMj/pmzvx/WklKPyGFMVy3VaM9qwlC33TWiEVxsZ+dwN9ENRqZgiCNRZJyUzw8tyz2mOUZo4RHwaBgliFcwW1Nl5+/CrWybT1mXbXZ6g1jhD2hLERLGgtz6vSdC/kHCVHQOGPEs1gzMo0v8/5onFDZJorP/2730Hlwm/8n17n03/kTWK+oGqflueJulSMLtQWaATGYcBqi1KZuCTi2hgOA8U33viLr/ChH37Erg4YGiXfYssz5o885We+43N8w3/0Gjp2j9FhTw2Nb/zON3j02VtCs+S6EOMFWuUr3/aU5WHkV/23r7LOMyV1eYoylJiZLkuXPGlKlhwvdMUUeZ7ltILxlJxRSgiBJTasChgn1/Dp/u5KVCxNKFHGiqRIEMkCPHB6wKiRVqC0KgRT5YhLvErivDcdYy9tqXhmkkjSoXvWKssyi4ejD3W8FXmMNAoyiBv2BtUi07R2qa5jXSJNzbSpe8GqIvUcOpGyNoYhMC1RaHKtEqOAA7YgXylE9fVQ2jDhMHdJvvz51S/V40iMUaQsZ6UxWxSKHIq11i7JlGao9ubn7jJwdxEJ6nbmb94sySKUr99CjCWTSbEbJo7jied+LdnyKq3QbTtQ6RIl+VlyaoAABqzjqjYBAUYZ3f/9TZ6oYKNrbHK42imazmqCF2qZU0Ga6dpEUtospZquchSyoDGKuKarXO2a86Web9icsaS0XgmNtVYKMuTrOy2MMrQq544U5gVa37QgwCvJtasoEvVqX9DXglzkf2JbKDl2uRcCBWgVYwXssJGMCxI+vdU3YbDdd1uvm12AMPjnTZwWWX9KqkMahNx4OZ8xXjZb1tmOPde9uVJQNc6b61leasb5ym4ciSkJFCLH3jg3XNC4MOB94KYNxFjkPi9gWxV8fYeuaL2F0i8ySG0FVStaZbbLqC9FKX3DvZ1rz1VG4ktOcZXNopFNiTaauCYJOLZdYlmlOFdASpllXkT21odZEpQsW8qcKtoYadi0kfzSdWVZVsnjQ3XCcLte1+LtlxiMWiGuhWU9d2mn3Gu5iFIHpWg9z9S5pcOfZBBRc+Lm5kbuIZKEvOcNJueuNa1YWhTrmoiq4G0jlR730eMW/LBjmiLOK5SWbKt16bJb2vUa2racrUsxTZf4NiQQPF8y2SlEHvVcFmytFbWVErnospyJKUJTQnz1jZYKzjZyVeRcWaNE9sR4Ihfx/42jI+eMXpLch0bsPeJza9e/L6VKTLVTRw3Wap48vRAzjMnjbAMVePLk3JsWi1Zn/kGvr4rmSqHwKqA7wKIpRW3SrcuFISTA0rX+SvUHRJFwxZaz3ETGSIqykqyopmWlmHOlKd0nRI5cLqxLpbqKdQ7rAt7pnkQdcGZEq4pSwuJXbZt81et0RnXNM3BdR1orN6dB5BxartYuUfqVB0FKibyukCKehm+C5lbKgLUQdijnKKl0XxT4jm7fWckuSaWQl0kkjqqiw0jKM9YOoCVpeomna/ZHU3IwxaIJzmKU3OytKEJ4TE73OCtF9TKvaOPIKXfSmiYtAzFn8jrg7Ng1/TPKIgWLVjjjMO7AGgvGBpELOtdx+IJG1krhnaMW+zzktFbQGk1P3FYaqsgaN49ba6J91mhqU53sJhKVWrYDVraJ4oTR8h+70aPaNTNBdVnB9nm2Ig/qzYS9TRm7bkKun94YWgOqWjbcvsOLR6c9T/CuQ8UozZqzrOzXSSAKJZGi+PoG72klEXZykMQkYX2D3tOQYMlhDMS0ssRE7sZX5z0pFeZpxlqNt2KKb7n1kFSFbQ7rD2hlRXrRCk5bnHLddNtoGQZ7xHvYDQeMdeQ4U/LaTaAdy58zxhhqlkNY6Qql4qyWbK/S8NYS1wvjOAoQQYwwBDf0wiNB5RrWrI1mMAHvvBwSWoqgDWdtrcM5KXhqbb3OtFjdUC2LT0VJBoZWjSE47O1jjFFY3Se7YjbBWU+u0si37nWy2vKVV5/x+Q+9TS6KmFZ+9Zsf5eH9jqobpWq8CyjVqD2PqZTEz33ibeZjRCEELaUUv+adj/HasifqyI988ueIMeK958G852s/85i/+alfIIw7BMFf0LryrZ/5hmthpXsTaF1AKcPTcOFHv/6zOG9JZcFozT6PfP3feYxCCrINrrK9xmEHrwZ+4mOfo1Up6F5/8ohf9c5raN2IViajtTWak+vocAiEQZFy5bDf4UzipdUxPj3w8O0Dn+ZNDnvPiAcquSyAYTfssX2LMw47pjmilFDHzmQ+9w99heILr3zwgI8+fRl/I2CSeerZXLvKs0+d8OENaoL5axJ3v1XolC//4i278wiuorKhNilg5tcip48rVKvyrFcNo8Tzoa1IMI1+7guiifl7u81107SSUS1jtCZ0EqjWhliiADVaL6xL7YRLKT5NpxDWXIhrlGiOJclnZi3WyOeaYhJJszGUnGjI4EMeIx1Koi3matxW3YiuhI5HAdVEltUKG9DJWU3JIseTsN/Sn5uF2vK1OEppIyGq7l3oz/0O6giDI2fVQQRbzImcS0ZrgTg0ifbQ5gXMcOPafDQ2qIRAcozZiLS1NzfyM+esOS+Bp+WLKA1zDJzWcwdJgHOue+CeEwd7uiWKvpVSENLKIUUe2Q9DUwx+wpksHuPuO24v/KCtylmlu6dUqS3nqvuumjSiymytWj8LVG/06E2tdNlyn2nTvST12izL7ywDRmM6IKJvdOhWBtXPmY1IKM2j/P7yF3c/TQOl2/M/R8n2qIfalyJgF2ssJdfuc9sAAZ0A2NdwIhOV61HOtsoGXnohNhR067lJpTdjHQbT87y00Sgt95jWmtoEoiX+dpGW08A6zbgbWGb5xYw1hOCYF7m2jRU/ldJgvchRhfgomXM59ziDmshlIdUovvoO5zLakFKhNdnwpWTE+5uL3A9JJKi5lJ77ZVBGYGdayyRDo2mqf3Y94FrBr9gcCnBBrsFaS29upI6g+ydrK5AVOVVpuIoM2TS1/17yfY2WgVGjb0W7ZrbUAi1z/Zip6CAwNe8lSmCrLl+sFVttNC1Nt9zflab6ILcPIzR9+LsNhftmd6NDbkOMwY+dLFugFbmWtXzWGoX3VkLeN/hL3/SkIpYOpUTGqpUhxQWXDaaDe9bYu9vtd1CKXCQnTfUYDZQjpuV6/zRq93+1fl3IJqn263bLsKo19fvLyQClN+Qlt14LAtoSU7nmXEKjzZ0C2sEm1svXl22KgBA8Y0q4ReIDtufSskZShfNlxlppvHKc+7Xoum/s7//6qmmuVJUHQFMN1ZuU1C+OtYqBtlZDTRsuVi7KOa0yPUch+Q+N4CSbCjRrisTcbywjht3UNHlNuNwIRTG6Pa0JFMIZxxB2tDqJrli3rhOGa7hiey7zk5W5aLh1h2KoFx6w25P/+URAZIJ5WljvT8T7e/TaUNkSVC8AVePoRpKq5ASqNIyq6LpyNCI3aUpxShN39ydKiaytkVthSSsYJXKFkonrImY+56RAbkL1mpXn9nAj0ygDdhA9sw1i5qvLzO1xB/YsgYWxMZ0K2iRy8wS3x5jA+fw+3u3EXG7AB02JI+epEYYHDOMtzu9AFaoS/a41lgoY71G9kEEJdp7rO9tw2qJVP5jq85tW1vuaUjOpk4VaEeCH6ZPHlFdAXUlCorVOPQxYDi1rzXUTCaJpb7XSthDhrSAyilq2rVTFeIM27urv0loxzxfZahi5hlxfGY9hz+APlN2RcRfQWpFS6Q8KRYlRNnxIanlcVmqWf9ZaNM16OXE+30mDR6NkjTMjg894a9i5HfMyUVZNjD2XxjoBFATDuizE2NAt0IpM8ddlJefK7fExzhlqsuQkaNRpWVjX2Kf2mtwyzjrSKltDpQsxrmgqqgnxkqZY5gVnNH5/wFrH3d09+/3xut2VY0h3OItI5MKguVxOYkAulVLFQ6JN/2cFSmtO98+wZsAPMu08n84Yqyg1473GD47BHTifz5JPp4pII0uSaaqSEPGln1vrg5UvP/qAv/HNP4tWmg92d+z+6o79L+3YLQNKjRirhEpHZbc78iXe5m9+8jO89/j+OmG+O5750F9/hdc//xr3beKv/Ya/BcAUFl55/4bf+9Y/wg9+68+I5UIpqoG78cyv/dwnMLMc2tYEatbsDkfOfuKLr77LX/5t/w8PLwdabSw+sp8GvvGn/2mmaUEpgavgX3h8P7TcfV3iB379ZwE471d+0y99E6//+NdwczqgwzOKgZQVc53lSx7csjs7UoYHtw/JZeGVn7qhVcuzx3JfvPLKDcfliNaVpi44Zxj9IzZK5RiOPLu/w3T2yAfHM9/zr38agMcv3fCRj7xMa0icgrnD+Oel8LgfWdfCs0/e8bk/8GVoMAZL8CLFrkUzuMML2xWgFYzSqD61tVqjvEYx9HG0PPuV0lKkFU0tGmsDOS8Yb/DOSdOPyHViabQiUpzggxQfRSEJPLLpSbFnJsWECYZ5XtiNe6wJBC+bCEXFGodzlmlaCKMX4lltFCWFcAi+y9vEJyZRBYpUFlJZUTRijkIQpPamXsJLBTPYC2CrMYIU6M85L8bs7t3Q2+BH2R52DvvDIBjynK9epY3qJt4H3YeRiWDctTGQJsN04FQv/1TraowOdFKtT9K7NLw5Pjg/4jPx0wJ40NLQTHGBpgg6CJBAibKiIUPKbSNSa5Vn3QzmtOebd1+Dawdevk3YsUu5VQ9r3i6NVqlVij/pmArP5+gd2Y5ICo3uZ7nayIVdgkiHkahNjidDXqsVqa39LAdr1XWTJP4huV5BtpYijZLvKVlZ5brJilHODKljZBOolOD4axG1hAwJM7lv6RVgvGVdJ7zvktFSr/CqzYNWa8VayeWsdft9BXhSa7k2SMYILa8Uac6dN3gvxNytXpFoAYvtElylxFNXigSsaqVxzrLbDbSWZYBGpikLuqI6pKLUQl7lvNXGyLVrDcoYalEopzHNMJ8bl/mEd0aiaVqlKctlXqlZtnCV9XruboS/zf/emsRVaGNZ1rmTNyWkuSqwKlxjbZSGVmOXsQp+Xfxtql/vAnNx2vUBnu4qGFEYxLhCVBijCc5cB4beC7nPB9vnsoVStvdavr9YFRIxZRQHlNaM+z3OyPeZp7l34PJKKWNUbwiaRBXp7iOUIUOXqXUQQ9WSO6aaIpaEpXvxlAAjZHkhOWnWWpSVxln3uAlqo8TY74YifrVm2WAygreX0Gi10ptKBVoaJNtBb6U0StMyVNVGMqSMJc2iILJGY50RMIhGmlwFFUGtl7pFE2VZDFSDdaI2y7VQUmHJmaYUxll248A8Td1bq7sCZCY4sZugFNU0CgV69IJCEWPmcrkIFK/LXGstDLsd50kAJdpknB7RRgbvWnvG4bly5O/1+qporlLNPE1Pu0lTchOsd6Qs6/BG5XjcS1MU5WY1vmKsQBOMNrjRscZ7lB25n06SwG3EqFtr7bkBMmHAVfb7EasqJd7x7pMntCcrp1Og1Au7g+V0WimdNgf0G6RPYdRGLGo9bXwjStVrN/uiXHALWXyxuZrmhWkWupZaMvOcefh4B00oUesC7969i0czhoH9fmQxDUzjlCe0MQwPdpT5GeNeZIWmaarSHIzpBMSC3lmEMeiwJoBSLNYzTQtlvuCDTJBauuBoXO7eJwTL668eQQWmS+Q47tB7zeV8km2Et9Aira0cXxlpq0xb53Xi2Qf3pHcaD1/6MCleaOottAtMc+V8bgxhzzjumScx+ubcsMbLP8cCymAGg/Ueaw9QdkL9KZFUohTdORP8iNWKVqNM87YHLDJNRTdSXpnWRC2V4HagG8MoWuZpkdR3byWxXrZqFoylpHKVt4CnNpnwbQbIeI4MIXMN58Nw2EsWVasyFLDWYrTrs145BNdFsKO1veAxUIZlTagG3o482L8kGw3k/4pqhD3oIOGGQ/AEMrlPqGSw1siLbA+m6Rm5LDivSalwXu4o1VJQPHvnxMuvPJbZgMtUVt67f4vTLz+RdHgfxCsy7FBZ9PG1FJb5wlQrh4cvk1vidD6xLoaiRkxw1JiYFqHrPLm7Q1vLbZC0dElIF0nF/hBY1ygSjpQwGh4cdtSa2e0DiZVYCjc3t8xTIidwLhCGHef1KQxnahQfXOFEKTeUMpOKYppXUrpjTZXDfsRbgzGKw2FPSp301YDmAcV//U99H7/j7/w6vvPP/yF2x4f8a7/7P+Mv/PYf4gu/6pv4w//X7+jNeaVpizJgD4o/+S/9b/zxv/S7+OhXHvdG3PMf/7E/CyrTmBnmzJ/4n/8VwPKX/tEf4Ae/5Sf503/o+/j3/6vfRVAj+90Np9cT/+Hv/B9Ide7aeDn4nt2/w7Ke+b5v+wl+6Nf/PLYa/sRf+IMspxP/96d+hh/95C+xroX9bscSz8RpZXmBVvS9n/w0H/nSQ/7N/+afQCvL//gHfohPf8PP8guvfIF/57//dsIIrSq8HXl4I7JAb17m8cM9qEQpIr+9PbwsQ6bbZwB85I0Du3PAuwO3x2+htZnLZYFm8W7g9sGe/VFCf2NaWMPl+jO9/qGX+MT5DZY18979l3n1ta/hzcd30L4MSvGRj3yYs/+Ap8cn16+ZpwWTfG8ILK0EBO8GV2S9maF5avbEKvf9MIwd/FMYtkwWa2UQojc61VGmj2VmXp5itSW4A2DIuaKVFbN0EaR2Qws+2mjMKERSIT8uHG5uupSm0YgYpzmORyl6W2F/OLDbB+Z5obaK8/7q+aEPkZaYuTs/k7/TCVwip5WYWg8Q7kCIJtRSrTo4SSfQFWeDDCFTphI53IwY0/+dJnlKTstQbSmRu/s7QDIRBcak+kangrI0NLUljG1cphWj1dXfVou6SsSUFo9YzlU2i6pLEeE69bXVYqxsypc4yZBdazAVazy5JdK6yLBGPT8vUQKpmJdJ/jdnsH7hs/F7+IT5ndCL3tqqbHJSX6H0oYlEk9hOG259kK6vmwjovq7KdQjatm2VUh0IUMFJf5ZzZu1ku81DvW3MSl0Jg6MhdDznxV9inOny6tQ38SJllsAZgUKEEKS41o0QjLyvKpAzlJoonEQSpsx1k7EsEyG4q09KGyHUFp7TS5Vu1426SOAdRlsqywv1eo+qAAE1aC35hq1hjCWnzBoTIQQaimVZoStKpHa5dFKnoVTNsl56vpnAqISoLAHYWkvTnHMlFyAmlrgKNKaZHvq7kstKKbKlMdpeJaWSHSpwJ5ABeIzy3Ns+B2g4b2mtsqyZ+/vSh94epWS4Nu49uVX5GJXCKcX+Zk8rqzT3TaEw4g/WASG0Fgka715hhUIbub/HvWzvhVJruD3u2O93OCvD9jXdc7qfCIPtOWJdGmjk/Vcaif9psCwrzTeU86SU+oaL63azKUVaIcdFIE5afg7xW8m51qp4zquSmAbvPSWKXHSJSbaRCk6nE+M4Yq2lNU3uap2lRxhYvXkGC9rUvsmFWjIxPh9CpCqxAOK11OI/aoWy5Qoiai9tDRhouogMMa3YYMgpUZL4+UII7IZRQDw5oqg9QsagtJJtk2rc3U3kKgMKUZPQn4tS45+nC7ptg4FK7t7EXGaBytC4TPey9U9ZVAdWKMNhF7pSSZpn7zzn6R6aSFrDGJjuE41IGHY4P1L19Yb6e76+KpqrRuPJ6Rlh8Ay7wNOnz1CLInTU7LLOrOmMtsN1DeyV42ADGlC1UmNE14W0LmDk4V9a4XK+SHfe9SHrnBiHkXU+kbQFZVFa40IgPsucLjOXc2S3f8CyXqRrbj0Yb9tIwXUDgera5ao6brX2m/457WVLh99exiimJZGawe0PHF/yLHPlaTrLQ7FPEx64GwYrsgQxpibWdcI4j9GWVgtvfN0bPLx9yPmy8OzZiUHtuawz3inGITAOgXVZKFmmGsYYjrcH/GhZ15VSMpdL4u7JE8bR8PLDI96NKGWI+Q7XFk4fPKWWxhB2uEEyC7QRL8/T+xMhwM3DWw7mltv4CssyMe4sKEOthnwCXVaOdUbNz1DJ8crhBtqCdhIMfH57ojVF8J44WS4o1gWWGY7HI5fpxPlyz2sffg2Fx+iAtQEfBlCysbHO45zHWE2uGudHghaPlSuFJS6UtIKCQwhY58TAXiXVvdaGtg3rHA3Jp8i5YMOuY2ENShluDwO59YC/2ouC1ghOiDiwSV1kildbzz57Qa6merHojWV/GKgNUqsUtfQoAMEx57iw3w8cbUa3hIkrWY9Sb3b0qDaa8SjByjd+j7Ea6yymT5FTjqzLIoAGK8GMeqP6tEpcT11e0jXTqgeIlkxTldtXXucyTQzKEKzhtZuX+PKX3+LZ228SfJDtyunM7lgYlaKsF6aLZhgMOUWm8+mKTt6NO0ZfGUwjpcL7T54SQuD+NHWEeuXd984cjgcOhwNKNc7zU0o5Y/VepsBZYdqrON948PKrpDxxmZ7iXORReJUUn8clxBUu08JLjx8zDgPL/TsA/NHv/Xa+/t3XoRrm+5nv+Iv/Kn/mt34ff/trf5EvP3qP/+S7/zApJbwf+aXX3+LP/mN/hf/0e/4N3H3FHwLOBpZFZFh/7rd8P7/5I7+Gf/nH/1me3X2FEDLGNT761sv8/u/+Nl46vsK6XCglsiyi0/7P/+B38fv+6m/jG3/5o9AKtw9f4U/98/8rb730hA9/5Zbf/92/kdx6QaQ0Tx9e+JN//Pv4jv/9D+OfGsk4GZfrM+W3/fAn+c0/+U3cHh7RiGjj+nNKczzeEoLlMp1Y14XTnXxdYaJUC1SWJTEER04zjcw0S8NzGB5xwLMsiS9+4QvcPhipnEmz5ZQCd/dPefedZ+z2HlTh6XG6/kyf+cwvMf3wM9Y1obuf4fanG9/6197gb/67X+Tpe19iZzSjf34W2GGlXjQpCvwg5wuf+g8+xhf+hXe4/+YzPhgoFqcHVJMCQWlNzJEwDgzd14CClFfWBDSDNWNvGk5ChGyGpBvr8j7O7YWKaRJrPjMeQ/cZacIgm1fvBpYlcnc3YT2kEmlIPlVpGTMaUor9fmygMkvMEtegtuZty8GR50EjY5zE3rdaqUpyaWwAp3r2UFwJQ8BZL1Pzmjv8wpCKUA2tN11Smyh5KygSyja0kmdLo5Fbopae39Kbke2soilKa5QasU429zlX4popCayzUhxq2WBYL7/vNcexKzpqFcfVs/UJf/vuB0htoannG69SCjOrPOMA5+X6s9ZKz1MLaRHCX6kixUolE0zgs8v/yS59LbvwuMsYRf7XtDRuqjmKlqGr2gqf9rwglN+3yXS6yuaHa0ak6g0etKoZ+lY4DJpxbyi55xYpEZsLatszTRPO6yuRMGfBknvvqMb0QllkugKiAO92lCLQgNalnaKi6nEBRmScVknorsgQ5euNMaRUe7NSYC1sWWDGaKz28v2ksLrKA0vLWCvXSYyxqwckcLppzeW8EuOK7V7AUiopbsVjl5Dq3AfgmS37qZR8lZdu1EhrRUWS4oJSQp3LuTGdG84LTdgYR06C0rZWwslzESANtXvujabVUx+qmO4DWrE9V0z+XM65mmU7EqzFBEPJmdIBI5I9mtCqdWm9wVkv00ukuZcGReS24jHtGW6YXmvJZkcI1hLLYJ2XGBetmeeZZ3dPCMExhCC5S7F231Ahl4YNmriuNKRpUEVRytK3PLC23DMoVX8/pf4IIYAyWJepHcUfY8bavhGrhXlesSaQe9Nhu7xNIzJlpRStVMb9Ae9t39DIxmqNCynF7uEPIgOMEYl4AeM0tWpiytKIBE9tcsZqK4PiXOsVhlXSdk1AKxVrlcSydLCHd0YyTgHbJGh4Ok9sGZPibc/UlrEd7X5/f8fh+JDLdBHPbUnc3t6yrAmvHcF6rBN6bS7ShBtr0c6hjSY1RAGlFesifni0pqKYlhVnexhyTqwxio/ain83l8QaC7vjLTmdKTUzLzOH/Q3/oNdXRXNltOZwEM+HtZbd7sg2BUEJ4CCViFaFYZRpG1XkWtrU6yqyZc9uMMTSu/8qJkhVQRuLMxqjZApIEyy1MhatInYs2FBAr8R8z8HvaOvavVaqo2r5FU0SbIbSbb+1SQefN1ZX6tw1FFF0yZRGy0pkDEpIYiRzVU6nuGKdSNVq2Txchn3wGO9AQ6mZ+/OZUmR64aw8bOYpYccDxnhKgWVOBBcETV4qJM1oBlqT0NhSIs5oBu+IS6Img3c7wKG0eAvomzpnZFVsjAGtCClTUyYuFusGdHMcxhtKETN6LRljHDurKWq8psvP09KJPH2lbsVLscSVwezwxkC5CCEunnBlZWcSOt2h8CxrzzKyEurn3CCbJyXF1jSvDLsDIQjePhaRAwh6Vfwjh8ORorQoPwwY56jxQqwrG5Ep+OFKm6pX0IXQpVQ3YApOu+CsvU7TtLVdRgBU0Q0rLT4s1Y2wCpEbSrBlF7AoI4bUKk8n7wMNh/Wyf9T969a49gNZ7ofSygsPpUbKiqZ3naADITi0PkpBQZDrCKcBgAAAIABJREFUqU8ifbgF5NpOOUNr7A6th+VVnLfc3hZUEn23sYr98UPc3b8rspJauXlQ8QFUk3uqKZjmC+OosDZKhkpONERCRfcvDEYRjAgBVE0UCrsxcBgco3Xk3FjXhlMD0+nUAwkNaT0zjAPT5SnrkliXBsoTy1kkj91I64c9h90jUlKs64UpyhT2cTlyTILwVybzcBrxxZJs5ryb2Y+elcoPfvxn+ZGPf4Yn+xPf9akfEN+nlvcu50K0iWIqT8qJ9965Y384Mg4eZwKuOR7HR7IpVLJBMUbka+fdgt4pfJApdxgt037lk3/3Y/y6n/86Hk6PaUHh7I5f/aVPwI+M/JXf8iNMy4JLjVJkq7K9HtoHvB5e7TAAzW//iU/y46fAm68+JeUz4+6WXFZSniCI5yqlhZxHlKoUnrCse5zd46zFe8G1r7GiJ5FG3j4MhBFQg/jt1kbMC49e2lNVxocR88pzqcTuYDk+9LgZxrDDBZHY+GFA8Savf43Hrpq3Ds+16w8eHVHJsCyZulRuDuJVe6ca7pXIfrwSL5zqgaslJkpd0NV22dEmvYl9u1uujcWWQ0UVrb91Fm0ylULMMq2uzXZypiGXLg7MCajiE+nhoKWIhNuYHh7eZV5oLcHEXdIsWPBKrhlnA62ZLmtrhNA3RjV3/LFlNx5Y4wJo9ocdOSfWeOmQFpFX5VIJwck9G/NVll5rEUnWNh1H/BbiK4h4H0C3LTu4F0GGLZy6dS+VFOuylSq5Qk44p69blC1y4kXAk0imBIft9Y7Xh0/yxfXHrk1FrQpjHTUnAQSoDXjxnDa4+V7ypvrQIn5cFyHVNhpGb7LHbcPY/Sg4ggcXQDDfEvwqHkX6WSzncsnyuSoQmX8nFapOs9uCpVWXH+am+halPZdHtUQpibpo8jXIVjFPq3ydNljrqf182HwgtSbxXlrxRNUe1p7bKrUJXe5ogLpVBACaZYlYa3BuwJh63XBuMsQtE2hr6loT35DWuuf2Pacm9ngi6HJM3euircmMq1DbJINM/Mo5lw5WqptSX8KJFWjd+p/15rWJhFJrIUxKnpwi69Llcg3vPcQKrUiz1zertRVKTgTniFGCg7WyjGFHVmIJkM9cvNmGTiDM4scZx5HW+qCnQsuNmBPeCxgrr4lWCsZprktNpBlotKufTSuN15bSRC5aEdmlppLjQl4XSq3sxsCyTCyz4mLEX1RyFXkglVrkz+TTlHDaRkZpS6uCUqch9YGSQUeuIiW0TWquqjoyvvsDN/WrRJzI/6+b6LNrA9Xkmkj9oBAwjyH25rghmV7C7hefe0XJUAOD0lZUQ2vqAxpNLq1HbhRiXMQD1XHwQsjtAeb9+SBWH7kGtte6ity1ttaVapbzMsvW0hkOh508T2jENYqvVBmWHFFWY/FQFFUrbPCoqsiIpcN42y1GUi+lknDGdViWxao92g4M3ndJdCOuqxBOlcI63S0fsZMFu9S5SQMtrCEZqOf0/5PNlXWSPzCv+aoL3iBCxmiatpQ0yQVkNK1oUlkoLaLQ1OrIcnUJ0KAbUrW1YgzVcrEoxLQrBaCjoiltpaqMshVlihxySkzcPa8d6AjTHjshzZNM/J4bceXW+RW/W2sv/HfrDyGugbVpLXCQh5PsjHtwJQWN5GXJTSc3x86PGO8orXJJWUL+6oR3BqsVpSWRGNC6vl6Mn4MHpWRrklbRjDdxP6IUeO9Q2hGX0uV1maqUHLTaYg3U3CQhvUFSonVVNUOKrJcLSSeUcgzDjpQTNUtgqlKg2yCI4NL6VGzF+3olLjbpQkhpxRowzjO6ClbS7c0A4xhkalVnJOYvU2Iix4Lf35JLZY2ZNWYamjgNWBcwNhDx7MaBMUiuzTRdoC5XmaYxFuU8Mc+kuJKyTPeOx1sG3cOm+wOppS24+bnUk9548cKDWXUyjvj9NJXGFjwth7zuB29iMzdvIZuiXhDtdEVJ49hEfqPpn9t2talevGwPsyo5O5VIKatMNLunwugtW0YeQFpZtO4aGN0wul4f2NtBLORLKMn0Iq6yO2iMG4lRpD3WWJHjaN2bu0TjjPfi/UspdkIWKDOLbKRVrOsFMwVrFmJKDMOAtU78HylTi0zvLvO5y3gqMc2C2V0XUkQkBBjqcGFZIiKxdjQC3u04n2eWZeLCCg3y2khrozUlYaxK8/rbL/P28JT5ZqFWmbZ+7qW3+LkPfx5V4c35LWpGDp/a0Erx8k/eyPf4vOHtt97l5Vck7yxnycNZ5sy6RpZ1xlrHeVhfeDaIxyInuqkXXn/7MV/3uQ+Lt8cqSobbL93wjR+8wc8/+hz375+wsxSl0wvdVS2QYqXWFa0tH/vF13lr/4wvvvIBy3phzKN4FGqVbJb+PAPdC6VIayPeGRqWkqXhuT/ds57Ex3g4Hjid517EKXKpXOYF5wK1JJRRpPr8wLHeMuwD2jjGccQHx5OHJ+4+1kMyrSIvjfw8yodcNKY1lG5YJ3K+J990z/IhyUbJqYi3ReX+DLY9b6pJQUnrRUztzx7bp8Cx/w26y4xaH7BJgbvFash2pfWpr6EWQbynKLEgxhjiKjLF1sQTLDIVkWQ5byUTqs04Y1E4WtWUXCTrz7ZrfpX3lt1eNuKlGUqRiAFrLNV6tNaEIXB/ekpKUmDK3W+gbZ+fSNUFEtGD4mvrAb8Vo+jnqJwpsrXq03o5yFBKCm8p3ORZspExRfIuPlXTiYDyzOkN1RVThxSRFVANqzyP7Gt8fvlRSm8wpWmQe8eZbTq/DSu356ecja17opQyGGXlnO/bnevwUht01ZRWu/yv0wZ1b5TU9mdbc6V6tdF7qV6AXt8LVPfkiFyNJrSznOsVSHItwpvcR0pDyfK9RYrX8y+VgA3o70nJUhhDf963hqFDD4oUouLL4rpFa3p7wj8nNkodYTtBzohnuf/e28+1FYWt8cL7Is/h2okiEpzc3wSkA9V6e//09T3cKHpyj0gjvxHyRJ2hX4B1Kcnw6ipNJVYanFWoJrJKrURJ0mql5EiHu1OrAJKc2ZqDIvL/4FD0rRJI/IH2veCV87JUgVe1Ju9zSYngPKWVfo4KWKPkhnIOaus4ddu3X1vVpjquvrDli2ljxdcpDCcZMFstDWlO3aNYGJwE0tciWHtpVq14wjYEOZ0t0Aew4imTz6kU+Vy0FRLjppBqSppFlAxO29YoI1+3WRW0Vb3f0tf7SLbm3Z5S5cPIPRB4o/o1LdcSRqAQa06Sn6YN4vAW0FcpMgyoJZNKRRtYYsQ6aRYbfUuK/D3ChJFnSo2NF8OeKeVaW9BE9jivEi3kmsVlL1t+2jVzLYRA6iRHjEhZJQBb3s8GHdpl+uBIrsNcEy01rEU2kIjfz1jfoVuFRux1l6jGrFLkCJkIHbUPmhhXUU7IHXUdLvz9Xl8VzVWpidP6lGWJXKaFwQ8cdzs04q/IJUoWUzrTYsFYjQ8jl8tdf0A3SgVjd5S10hee8lDtxXPTSLdd5cbTXiAIKSqWNVF0pDYnh3WVvANtByyaWqC1pRePW4GpsMbJBV46laVru58nuYu2XQ6JTX4gh1EsmfvTzOmy8OorO2pN0Kys9LVIBqpSOCy6049UK1hlMcrTWiFHGMLQJQyVlBbO5xM+aFKZKUV0zdvhoq0knJ/nRGszVUkRo61Chx2XCEqLjyDFe7mpYmZwA1ZLlsOz92Nfiy6kvprd28o6LaQ8UVvGBMUYbjjsb3HOMy8Ta1TM/y9z7xZq63re9/3e43cYY8zDOuy9pa1j5EMk27KN7Dh2kpaQximtCUkIpGlLaKFQKPiqNFAotPRwUZpeu1cttDSB0DYhhUDaxk1ix3aMZSmpalnHSFrS3muv05xzHL7De+zF8465tmI5FHpRTZC2tPZYc3xjjG+87/s8z///+89Te59axzOWRrgppBgx1lDDyu0yY7uOt954hCLJZ+t6jOuZ18y6nrh8MKBVJYaFdYn0nWaeI2E6cnj5grc//DHubl9xXAOlwjCOuKHHbwZqScTDnv2dyAe6YaQfRoq1zMpwmo5M04l1XRn3O9566+OAbx1nLX4CapOmyJdMK0NOiyx6pbAsC9uLDc50WCc5GSFGIfG0Is0Ygx8sIUhBorTQ77Dt826rfokBjWqgi4J2DutbBoiiVeuK07TSd4LJj7HprJshd4lSLG42Q9tL6/2GpVO674p7I9cqhwuROJbYiFvGUJGCfTpNdN01fhRpLCg0YwtbVvgK4yajzdmkLi4ykQNI1yvFIHksteC8JoSFZZ0aoCWxzFML5Vbsxo6iLwkpEkIiJM2zl3esc0BhUMoSV83lVWFZIymL160wM8+BdV2JMRKsHLLfe3qie3akHzp2m8eUkPnZX/tJ3CvH//Ev/hZPn89M88Q0t0IkaX7+v/kJ1mNlnjMhFMklIklhpzRP3BPeffpt3n77LfY/emC1gSffeo9a4fb2ViQYh3i/7h3vErcvIzElpuNCzoV1nrm7uWWaZh6bx7x8dpCupsr8q//0p3mxviCnDMqSL193A188v+GrX/oWlcjF1SU1dyyzNC6maUWpiZwtOXUcD+KL6ocRdeeJM6S0obNbnO2Zl8CzF3cAvPPsCd3BYexAdxN5+eIV1ll8J8X6Mhfm5dsMw0hFcfP8NZ72NGUORwkYP62Zm8PMb//MV/nCn/wmKsOXvnKgHg3PPhru/86Tb9ywnSx9b+n7gbu7md/+i99k+ujK+GXP8U4+R+taFo1pzRHtZB3JYJTCGEdOEpZqtGdeRI4tIJeKqpZxM7IsIhs31rZCogr0ZZXOqNY903IU2YwSD1JJC3KqiNQSmNf13juw3Xr63rM/TlwMlwzdBbUopklyxLq+JwbpPl9djAwbOVxYJ7ktx73m1YuZzbCl73ucN5R6ohRY1kSM56+8ZV1WrNP0g6CFh94J1TXCsgQquakZ5HDXdRrvdJv+NCnZGQfeplba0pqGXopWFfFdZVkKMYIpCutUm0jIIbtQm9G7NKmdHKxyEUR2rRVrJYx8mVcgiUxSnwl+lTO0ROIfIt3o29RIFBu276DKZFAh0yRjDCnp1tiqlCKZP3VR982h82RMtXws2iG/ta+QwuM8fRMgkfeO0zJRgXWJzFNoXf8W/K0qGJGPOduTtaZWTUUOqNb5RlnNzMvaXr9FVcGrxxjv/VmydgnBrJZIVfr+vdVao2oDO1AoObaQ5qZAoakk9FmSJ2eLmKMg8uu5yazRWbVDemmfW6ZmKVBNe09LTTL5VU2eZZxMLWmyPFWZTzObzYaqkcZDTsSov6utrFDoJkFUWjUSc4sf8Q7rPLVU9oeJWkQybLRB1VVeUdXUrChJsdbc4kkCIU1UZqzaYq3nDKQpJTewElAzEJmmG6G+ZtlrvbfSsAu5Fe4VUw01KIrO7fUaaha55X0B7MGZRhZVLZNLaTKpfQ8FYqGrZuxGkfdmgXsoPMuyoBqY6hxRgs6gMlrJ98BgcEbOFqUKVdA6yZlEV1KRAYL4vmuTiGYE1NuKK2WB1P4p5wbnjRT6RqAta1xIJZ4/JBSwhBXnHUoZYoosy0JJVST5nL8mmlgCOhe5byg4LLFmVC04Z7DGUpPAccSNUO4nZiEkoTYrKYRMIwMrLVLLtURCFchONYo5LK3p0op7Y6nKU8gsy4RSmr7vmZZAKRFvLc4YdIUQV1KQ4tAYTcmREGacG9BneafWpChrdi1FpL3Ote+3wyhzn3Mm2Vhybq5katUoI4Hr56HL7/fzfVFcSSEf6TpL11/hfc+yLEChqEI2lWk9UAHnNijjWZaEVhvGbiOdkBqJZUVbT4xFNNI5ywGrykKPAu0UWQf2d69aLkPPEgOeAaqn1o5cOsJi0aZQWEk1YKos8mfaUi0087C5H4VrY8g5UkvL0Ghdg3PWhRgQW8dAFW5ub3n54hUf//AFYVnZdJdixFWRrCKu3+CSEVpfDcxxYtILFoEmrCmAjoIoDbIBqtYhWNZAKWCUZ/A7pqVK4Zgqy3Jgs+0lp6eILKSeDsynic741gVPDBc7allZl0hE5s7WJmyn0Z2n4NldO+rcs8Yb8Jlh6FmWxP70iuPpDq0NTjuG3hKKhLx57+lcx+1+z9XmAm861lVMk1gJ9LSdI5bIMt1xnPf0m5Hd5SVFGTCF5y9vWMNKVZndeMHd7QFnHdsHPQ/f+jgXlxf0u0KIHlUVNoGzBcWRVFZ6JsZuxO8cqQZSXAgLpBDYesOmy0S98vLlU15ML6jVYmzPMG7JOfH8EBk3W4Z+IKWK7zvp4BehijlniHcnhvER1vWURfCutTQzLhK2LJk2GYNCtxyZOC9kVUGLn0q3qZVzFq8g14WwpiahkC66cZ7LYSDnQE0TowPtenK0aCVStJQnYk6t86/QuqDUzMqAthZqIYWISgHnJPet5GYcV4Vlfg5VYXSP0plSV+LatNGuoPVMTVmoRChSCk0SYO+9J9YYuq6njmObCju875iXibFkzrkrMcbWgWskrXhi2FXxDB1PnOYb7m5O3O2fsax3xDg1NG/PdDrQdYaHjy6w5gW9l0Nl7x3bUWAOT7/zZebf/Q4hFlLN7K4u+Af//hd58lMv8beWv/v3/i4pFd799FNZpCocD4k8+5ZxE4hpZjN2PH78AKUMx+OBeUp8+1s3HO5m0mVkf3hFCBGjBTQQ4+vi6smTJ6gvBYy2MsUOiXe+c0P/hSc4A+8+vcGqC7RJVD2xrEcEFy4Ie/rh/nfNU+Dliz2n057w9SdYPfD048+JIfO1r7zi5uW7dEOH1nDsDgD86t/7EhdPR2lSpIXt9oacNGucefVQPFdPvqVRL1ZyOVHVC9ZFpo0ysYUcC6k2r6gxxA++nly9995M+OK7zPNLvDf0Xcfd7b69n4o4jSx3E8v0egLne0U5OA53ipsXJ5TOjVInz+mdphQxjYvXMVLQ+M5QUr6XCltr2YyDdIPJjFcjWjlOempBtlBrpPOWlBIpJrpeUPgpFta83HtoO29JQSSyOWvQGpMFVWxQ9P0WtMabHl0sJcDWX7AdNhjjJK9oo3jz0cdAFdZZvErjxmB9YD2NdNozDAriiYc/8IB1XQlhYg2Zbd+B3pDSHTknrq8uxe/LINLOvHL1oMc7RwiWvniuLkemec9yEsLnOPT88A9+jHWWsM7DZmYmSUOvZfFYiwSrLxIar3VrvmnNbmeYDpEUK6UY+rF5WRJtSlRlkqCEAnaW7zorGXwiZSqMWyM5M0EKBKVpssoiyHxdcU7hO33fHFU1gJb7rqWQIBNK8a5Z6/FdB6i2Hii5Jq1xTo79OSlKOQOllMjJRR2KsTL1o1ZiXAnhKAHJCvqhY9wM7T1qUxBtcXbAaksu8ne1lsnd6XTAeYNR8vzWvT5iSUSMoW8FkxDcrDSGcmLwPecJUSmiRCmKFmKrKdU2Kq1pkj+ZTihUm5ZWYgz0XjzD5+tNKeN0J2Q8LYXwuq4Ye54iZZELq3Kf7yXyzIzAXl5LD6XxVlDaifS5gRV084tpJSHZVrVCNBVOd+LxpUqQN9kKQEdvsFbOYMZ45mnP0I3t+RtiXolkzhqH1gIjEj+pXKfzFjOKf0rkonI9SotsMwaBiQhBMWLU+Xtc7s9vRrt72mKtRQKMncU7y2Y7oqpmf7uHqnCup9bItIrU3RrHZjPw7NkLzhuFNorNuGGz6dmMEmZOTXgv0TRoTc6JZT1xWmcp3Jqy5DSvCCG0UNq9gJFMRaN7jHYyKFYB7RQlaErKKKNBF5ZVPG5KVabp2KKLztNbkcDdx9ogmVqHV6/kbNykrYXIYXolRZN1GOWwvWJZTqCqeJk6R2csShVSXYihNgVZgnxe4gWZr5UMLRQGpy2JyhoXjIHeefFldVVkpQSO66mdizQKiypOpt9qpSB4/jWKdFLbTNKaVCCtK2Gd8bYX+0QtQMQokXXHWAgh8vDhNWAgCoExhJVSHSGq+3s/rIXtxYDrHVRDKhXfNxJqDsRcqLxWoXyvn++L4qpSiUb08aXAshxa9wRKjYK5NYrO9JQYWVcxkmsHh2W5D5PruoFdJzd/qZGYE0b1lLIK9tZ5ajZoU7Cj4TRP7E97Li9GNh28ioHjcmQpt1z7a9YURcplHV5DSgHQGO1BQwiRrlPo1iErWQ5IXd+3kfXr13g2DsufGYpSKN/RbTdcPuxJUcJIpyUxrxHbFY7zLX3XMQwjw3jF1n+AeV6Z15VYZpR3ZKdI3qF1pleBmDIXu54ybliWyDzNaLen86NoYyNQei7Ha3bDFhBDcudH3nn3CTEFTGfZXo6kPAOZYRxQynF7e8IZxbjRjF2H1pZ5zuATZiOFxVIVz493/NAP/gHiGghLwOA4rpWKpe88465nHCTNmyzwkauLkZu7l2wvR2IG2RkCV48fcsUHmOaZw90R3zkuNlt0jYRFvsjeFJSrxHpiLRPGbsndBZdvPSCmliu1aEJaUSpTB4/ddehuw7RK+J5xhsuLLS9eviKW3DoYA288vKSkzPH22AhxhnVeqbfvsRyh9A7feQ630PVbUsqsIbDZXfLo0Rscbk6gey4urkE5kQcWkTrWWvFmpFTfurGZXBNu59HKUrMihUqOAe+QDR6F6TSD7YlryykrBaJCmdyyeUTnvUwy8cot60bphLIJrRUpZ9Y1yuEjazKgNCILqxBnwXVrpYR+aAxDN9xLapSyQKbrzxAPIaIpJCk9x4wzWwoy2RKjOyLRjCJz0EbjrUKTsUaBdRhtRDJoNN45kTSVgtKjrAP6kjdq5cMf+SgxiUF+mo8si2DCYyqsrTFjnSGtJ4yG7TjgjOZ2vQGl+PRP/DQffHTNGiaqSgKDGb4mm4yCi8HinWPzd36QD7xzx2//W1/mercjGUOIEliplGOZIv/nv/55Ogw//tc/ineldWAFGT7YHlftvWTi/fBWC7haMIgfDypf+7PvsPxQ4I//9z9BDCe0lmJs/7Dyy7/4Zf7Ef/ppfPDonFnXm/vfdXE58oEPPuBwMsScMWrHy92KVi/Y+UewvSHrQq4Fq9uyHxdUthjv0MOGl7f7ln9nGMwWgM2moBdFKTIBqttCrBFtek4fjfzf/84X+fH/4C065VBo0rHyk7/4Mf6v/+JbfPMvfYf9D2z4yC9d412P0xalDmy+2fGpv/I2dTpireKDv/mY6//sis/9R7/LF/7z7/CJ/+5NHvz6lqQK2XpQig/9rw94+289IGfoe0XJYkA2qlCy4mpzTY5JKFQ5SkFSHLkmqooolbEKHl1dNAlW5NXNnqHfsawzIQbSXNE2YE1HzoocK11vqTXjragWrLGsaybmiFJWJmNJgDrRZJKSrnQhsn8RWuNNyf7VCkDvBqxx7G+laM15JpZIITFuR7b9BWlKUKSwuTneYfqRNShCVry8PQjUwmzu5UnvfjMxz++Rc8Eby2U3gLK89cE3eTZMvFhvuPnWLS9fPqeows1PryzZ81s3B35k4/BW7r+SLCUZlE6NuNcmEFi6QXw4MVQoGmMqOYm0TuA5RWR2VabGWlWsGYgxtIgMQ8krzg3kuopkp4BREnkivriM0eCqaX5nef+8VdTgqWdqrzacA73HYcT7jv1xL9NFr5tcLJGCTOrUOXfKyAQzU18PsIDauuRVa5KBhOQR5Sp+0Pc3qc/GfJCDmTZGzPxVPCyqyiRHIkAUmXLvxxUJmEyRtBbvXikLqmaU6ptaQEinOUWZ+iclfycFTGvgKS2ZgZ3vWqEt/qPe9ZQquHJ5jxzOKVKMDYUuL8R2UuzlWCglY+3YsO7xnvyYY6HvB0oW+bx1Iu+dlklAEjQCpnGkGATZLacdcqQ1lcVfGJeEcUZsC0UyitY4Ax5qEZUNlTVM7UOpnMWsFZHYW61xzokfbAmoNjG12tKNvaDKkWnifn+g6x3GNz9kTXT9eVLbJMQqUpHAWGsNCovrekKYUAqcdfRug1aa649ckVNinVf6vsNYRYotz/Dqmic7kfH2ncNYyxIqMU70/Rbb8j2VKljrhO6bMzH1KNtxPJ6IUc6OUYncerfbyXOUFWUyajdirKHUzLLMaG0ZBkdIgRQVvtui7EoKKzlDyorj8YTRpWU3ybS5KgV4Uqrt7AApOSpFMiKt3FfebQihENdMqFGYBqWTQpGKd1Zy/0KgFuEXmE4a/VqLrDGXSu8HnJXcv5QDxo4kVXDGYJ3C9xCWet+Q1Urj+55p3rfJUJXvhs4yRQq06VNhXWJrEouCJyeZCJ+lzTKx9FQyKUpkx9htyArm5YjRHms7uu0o32lalEDNrClh04Z1EgpnyRXnpGpMWaSQ74uu/Z4/3x/FVZXF2tkeZS13dzdYF9rV1UbOke6GURWHBOBpZclLFIISFasld6fmgqqGzm1aNa3p3EDne9JacdZTTcDpDWMXePBggyWx7aC3dzilUaUICAMxPKaSUZy9MrxP6sR9V6DUs6empdY3SaI8/hzo2ALTqiJOlXUS7X8KoRHrDM6eAR1QUiDMFXKFQXO4u20m1spuHEDLYq5NhEFh8wDKS46mrlQNoSRUSjhvhL6SHUtemocsoYpie9UxXl5wPJ0wxjCMW7SW/C5nhRB4oUWm6XuHtQ6tLH3nQGW6biTExDwveN+LttV5OmXR1bGuM9pWiirMYWmHCciy20CCJUeU8lQrcrS1QFoixinmuDDFGd1rDvNJQmStYxhG2XC8Y50PrCmQ5hm7SM5ViIFlXVhW8TWVVtQAlDg1Op7CVssyF16cXt3LVARlqzCqknylxkiMN1QUfmdwvoX/GQNrQbHQWeidQjNj4pG+aHKaWV4dUcoLbbEZj0NKhGpwbsD5Hqyj1siaFEbLwhfWhWW5Y7ORXDFjLDq3gkM14l8x5BJRVcIFW/Qh2jqMsnIPq3ZQIki3VWu81+LXyLZpuSW3Q7wEQoM8T5EK4is4S23OuFNpU0nX2kaaib6F/iJetapqkx5VTtMsh56UiCmTY2p4+iL6diMdT6M1fedbZo8I75C+AAAgAElEQVTC9yKFPUsaUypMUyDGTAgiAyylEiLEaMgZ6pyFKmkM+5Po4W9XIbz9o898kd0P9c3vVjgdJ169eeTxu4/4kc/+EG+8+TZKJcZVse47ivkSn/2Fr1CiFHS5oWvXNaH2id07PclFvvCnnwCKFz+wZ72MfP7Pfp1SC5/45cecHq584+ee36973/zZ56DgI7/+mFoNP/y33+Ybf/QZz370ls//2a+R4ir46irHjB/4x28y2o6N3XHz9pEvfOYbr3/XD79Hv1p+8Nc+wK/98d+has+zj92wbiOf+zNfIcSZj/zWB7l+5wK3kfDh7eUlu2UjjZ+asRcbvvZH3+P0KJC28h35+p9+CjNsvtnx1q9c03lPSVq8PGNl+sGVb/8bd7z1v13SP/NQFOM3PB/5nx7y3h/fc/dTR975S0ruWxR3nzphgubyW1tCihhjcSeD+6Lmo3/1Md/+cy95+vO3HH5gppAp2hKvEvo56K9XktLoIm4A06YIKQTSWlHViLejRqF8GUtnLUo71vXUIAayDq9hgQSZRM2yfxilsLprh3oJp5ZOfsY7K8qFrDBYKlFQ5UZjmmSK3LxHSqGUx2snk+Iq39USMikmdMkimwJCqORUJRC+BnS15OkIpd4XJKiOkg0qGVRIpLpiDJSscKbQ+47r3QPcA43Rlu1mx8OHb2DMjsPhiLcvePjA8wt/6i/y25//Dd57+Yxj/w436cCT994jOs3lpWEYBzp7gZ+uKOrIdlgYOglwLeL8xGjJihFPoLmX1NEmJeLFAasTDzYn3gqf5Fn5GpG5+ZcyfZEpp1ev93+FHGadbTleSrfpj2Qj1TJT8jnYtLa9WA6C1jm0MaQswc2q0QlV1cQka5K1yPRECcyhaNnfRUnXaK4tZkWbQonykkqpjRSYJSMImmdYvv+1QCajyjmctjbEdoM7FFk3sSLdE0DVOS+zlQ/NN9TpVuzUgjWgrW5+L1k7FRZnJVReJnfSeT9ng4HIrGJMbTImUz4BlmRZi2l+qyyxMUrRMq/kIJvLOSC5QTRqiw9QteWKNQ+1LmiV2jov00a5BVTzYJ4LSrlGowy6xdaU5kce+kHWe9GR0vUdueUPybUI6KMkUeQYY3HWoUh461sRrzBKU3MUWFauhCUxDFuMa7lQjViVixR7ok6r4qGxzadfRV6YM4LwB9aU2J/ku7jZDtJ0LJkYVnQWyFlVcDtP0HUijXWGojRVRQk3L+LJNzicV6wp3vvvrLcMu5FUVwblMcbxIAoK3HUWZSpVJXzXmvIqUUoipi19N2Bsy95LCa0tynhq3on1IBfWODT1iZxLN5tRcl/XKPtjkbPD7c2Bs1VWayGDGm1Z10AIAjGpWeE60+iOCm0yxi+Ykqkts8o4oXs71wrqmLi63DEOlhAmpmmSIitkhtHhvcZaCVyeThGqyHKHQWJxKkausUo8w2bcElb5DlUyvo+ULERO8fJHdFZUknzOyiBxGueMOYGwneJMLhFDRadMDQKUi2ltDeyKrpY5LLIHtDXAptqm9BajM8m+lrN/r5/vi+JKyk6LMR3WeMi30gw/M/2Vx2mhRml3JtAYTO2lC2IiqkasqZhaJFzReLwfyTlQsqP3G/quJ9aENT0oz8ZvpesxGKxeuRkKG3+LrQ6dq5gPm445lSwZBmddfv3uTUV2z9q0+2ed9OsfCb4r9xpzXWA+Rk53gZI0IQSULeJtcB6lM2vJhLSyzjNKzaRNYZ4OVDLWG/p+I1rZCMoajNdQRmKVkXjRBuU9oUTikhiNEnRoZ5jXCY27zzu5qAm/3dE1A6sdB4bO3OebQOViuCDEs1FbqErjpkNVhe861pBQ6g5rZJTqXNe8bQpzClgvcsg1BaZQ8Xa87yKFmInKkNcFY510w7JhmQ9YV0VLWyK97pnWlZwrzjrpTFhD1Za0apZcZVp22JOKGOBjDJzSCe99CwiVDSIWWbTRBhUsN9OJ4/5FMwtLpzqlxNBZvLEUm1mmhd71dL3H9T3OCbnG2EqJgjEevJNMq7zQaUfMC6f9BMpS44ixlooirlIcjJsRlXfgN6QSWZbaNoVEzCem0w0lP6LrNjjXU5Lo9a1zGNOjTE8KJyQ4tXAOZDbWyOauzps4lGIIqWK0yPFikWms3L6VEDPTsojGO4lZN8bYDgaCAC4lC3WM5nN4X9MAVXG2bTzFCc69HcxK0RyOM6VW1mVlnmdOJ/G2aWMkkNMI8a33Hd47nLU4a+nHDqVLk3SI4Xh/N7GuoW34Ba0rIVopANtjhnHAW0uIgbAuTOXI9Tev+Sf911HD+83soNeeT3zp4/z0r/whxocdMR3o1spDp/nAyw/w5NM3999hkENDrZVP/w8f5oO/+wB9ZXj6UwfpqKLoJ8t3PvMcheJjv/WQ+YOBu0/OvP30mhQT4UOB/Scm+IeK3WbHp/73t9F94Vt/7AVPfuwdMUU3T8zw3PMnf+lToCzd2JHfPPH80wcefGuLVpqpX3jnI6/4kd/8GE9+5AXViEH34nbgyWeeooC3n7xN/2LEDrLs766u2U4DNUd0Dlxsen7nx77N7ceOaK156+k1r35kkkNjpzC/WuldT4wVXSpuyey+2bP/mcjDXwf/VLTomsJH/9Yj6oXi1U+dOPzcilLLfQjt9smINb5lFEpHtx4yH/1rjzn+6ML6VmJ5QzroSmm6Fx79QhFiQjs55OgqOn6jHLWsLHMQKpRCfl+pVJOwxkkWUFyYU6ImyVib1wlnO1JDDlsl6F1relKa0FSUgZJksm4aSCFXhddGJlZGmhEKjXa6ydxEDlarYux6khUy3JkGaqoUDQLoclTdkbQTuY/2mGpIa8FakZnlIoHhVJFc9drIFM5aSvWMvefB1Y4PffAT7HZbvOvZXFxx9cZbzEf4+3//V4mr4vr6MT//J/8CXf+Abzz5Bqe3fp3v8E8gR758e+ABA9tUccpzFSJODzJ90WBNbCCE5jW2irAmkQe3PbC0fVKM6hprCtebI2/uf5iDfUrO8/06ISG0CmekW71GQco7Z1FKs66L0M6UuScIL/P8eu8toBq50Fp3b/CX6ce5sSnenfNMqp0y2gThdSF0nxNU29RNVawGXdqMuUDNZxKkPFYmD830r23zNYH4ulqAdS3Nf9O817pQSxK6YEZUNEq68MJQkJylWqSALFoyOc/rstgQjODZVWxrXG1h0+q+eVuKKGpAJhVGV1D5dVO3QQVySuIhOofK5nOkCA3E1LwmuXmLlEzjchbirVYFpaKUZLXinONMwO06L6oef5ZCa6zNoM/7hXiUh26Q4quReZ3zpCSFNQqJK9Fa3os2Ndba4V3BuQ7VPguNYjrNGCMffIqRq+s3SPnUjm/iFQ5BJJauszgL0wmGYWBtuV4pB5YQGYZe7p+cWcJKLoVTnEEb8hmKUhP9MGCNoOuNkca1WptkKWWs1YT1BFWokduLjmmaKFmmwt1gUGlhLYGhHxm2o+R3WsuyLhSVMBb60ZNTAi3TPaphsxlaNlhun32mKI+uXq6PJNOjLDAerS0X2yvmNXCa9lBpOZwO353BQEKElOmP2EzWkMnJE4NDK0XfdThnmOY7XK8JXoA81lo0HbmsAkdTQs282A5sN5aUFH2nmKeAMpVhoxvG3rSpU5ZzvvP0PSjtKVWRYiUnhTE9282O1CViXIl5QRtNWKVBFVvWqbEarcRTKjJRS4wJbcG4ijKF05Ik+01Vcl2FosrQMO7itRudIbJiTAOc1CpTs2rlPrOpUY9//5/vi+JKITkFx3BHrZnOWzaby9aVEVmR94owKWKQTcpazWY38PBKUMIhzuxv9nT6gt3VgHUdWjumeU9KK76Dvq9cbDtubu5QKtF1l2zcJU/fe5dPffIDPHtPcLfzMmOth1gbiapIl063iZWqVF0bGUhkO0L4g9wAFvWsna5VpE1VSUFoBJARieynA4d1wm579GbDxfVASpplET3uk2dfAzq0cljbkfzABz7yIZ69eMrz/UueTS/YXVxRK2zcyGBHptOEcwbtNM71aG+Z55m724kpFYbesuscpteS59HvMNrw7sv32G6v2D54QKmZNcycTvEeNe+dY9N3bDYjFJE3KAydHbF4drsdpRZ2lyf6sRNTYk6kFJhOe+wI/dhJsriCUo2kr59O5JwxxrJ99JhvfOtLKGXohy277TVTPJCWA9oYvB9JwOZ65DTPzMuJ/e0ttUFEzuZNP2x47+V3KBl6PzL2I7r2UArHuz0xBIZh5K2Hb3CcxeCecmZaTgxdYRgHQQaXyt3hxLScWKwgcKor2KFymDRdqHQValJNZ5xxpeBSoShNNgOkmUqAUTbuZIJM15Jsjp23LOsNa3iFUsg0a1Hc3e3JZWUcFZVIWZ6jTY8xPd5vsL7H9SPdcEXfS1AfxraDQqVkSR6fpwPzvLAsCzEFfD8QgxAkY4qENGPNKEGhNXE67Xnx6kUz4a/EULi4uLwvss6d0t1ux263BSVAl5QCp+ORGCOprOQSKckKAbFE2XaVIydBSZ8PNOgom741DQAjEhejWte2yuZJNQ2/KyQ333fNr9ACQpUcrpw+T7+EhrVMRzCwhlXyr2rhX/5P/hTWOaHROenUVsDbEaUNoQIzaHNBvzF8cn6Tn/yrn+Hm7j2meWk+OMXd/jkhzHTdhu6tEad7/rW/8kEMKymdqGXFGo3XnnmOfPTZm/zsF36EN9+0PH92I2GqqbI8iPzkT36Sz33u1/gXfuVtut/8OGGtHI4r2+0VaygcTyvVGpKKTHXP48/3/Pl//HNMa2DwHVe7gb5zzCT+zH/9E3jjZeOyGuUtrsLtzZElvmSZZc2yWkzV1cBme8l8PPBH/ts/gOss48WW7W7Ht771FKPF5O8+7rBmw3fefUrIJ8y3NX/4P/xBBr+lt570ILOcFmJacM7wY//LJ+BvyiTCm0wICQkD1eA0KoiEq5RMTiL1/pn/6g9SYyaugSUs6G7b6JyJupEYDesNNRasNnjrSW4AKs4XnC9kMlmdOJ1O2HWg77b4zmNSEl2/Ba8Nm+2GeRZQEUoRcibGJFPSoqglgldtiivr/TiO0p23V0CjdWXxkorHSGQxh+mWkns22x1KZ1KWLqfR4oFRtdJ3PeOwJVjHsOkZe09eREqdy0qOgbgsLKeFwQ1cXb7B1eUFlw92XF49YhjeYHf1kKsHj7l69BY3dyfmJQiJNi788q/8bf7G3/zr7D+z50f/0E/wtW++4s23f5xPfvqPcPxQ5Xf8l/kT/9If5rO//VkUhnmKvPPyq7y8/kd88vJfYU6PKVPg8dUTaJMYASNpSsyEloOHhoxGuUqm3MN4qqlUK35S1/YMN/QsYUGVgtGC8badoJuLQgoQGhgDyIhsu2rFZjNgOiv3tDPUBiFINZNCINaVbuhkPW+gDb+RzyjFSs6GzjqqkbgJoZmJuqMoeXECJO/ZDgNKPcc4hes1OukmO6top3C9wTnX1tpGyFOKzTBIs6gCSOGS1khMkpOpjWYNK8Y6qEKmTFnw8jElzi3ZGCKhBpEcygmDFFeKN2gMOcGaMs4ZfCceppwLOWWMkuab1uLjCem1z7M0UiXIZME5h1It+DdllEUIjNQm+0qt6aUxVZQaRSmU8M1Fhuol14kqiooU13bmkZDbWqXhd562iV/MkNI5n8tivSKGxOk4iZLEWbquoyRF1w33tDltNF1vhCLYPHl979lG386Flr73aGc5HSylSAD2PM8sJXF1fc0wdjinuHp8wdjvOE4Lp9OJw3HPy5cLc4oyCVIKrEjg15JZ5plpCXhvCWEhv5CzkTOWMwK/1IxW0HUdxsDpJN5Noy3XD7ay9okYlPIqsHxVmnXei+Ryt7vAec/+cMc5BiUTMdVwcTXivSOGQj8IQMxoAf9cXG44Hu/QRSaS1g+EWNEYvBPa483LiO+24oPUBWc01oysl3B1dYXWIpebpiPDMPDqlUbrnouLh2hTmacT3gvY5XjUXO4+Ts5J8Pw540xlWRa06shRo9PM8WZl2i/stiMPdw/QlxXVFfb7Wzn/dKNEJmwsu90l3jv2h+dYb1nXmXVdWObCbvuIebnDOcuwcVx1A8MG9ncnQpS1SWvDOI44K3tKipkYFIcDxHTCe1F76TDi7A6lMuiEMVkaHGWHZAsGjlNiu+vRZkEpUa1NpxWKpTTa5BRfZzp+z7rmPE7+//Pnwx+5rn/5P/4FkcJVOJ1mhqHjHJCWS6IfOjZ227CfRQIMayEGsLZj3Fzw9ttvkvLE/u7IMi+klLm6fMDx9IpcEs56Hj38IDEtHI9HcqqUCE+fPuWnf+YTfO5zt/yDf/gVfvnv/TqPrzVzco0ak5qhUjejfcs7KlDex7pXSrEECVoTXLF02Y16bWC1VjCv+1Pmwx/6AD/7c5/iL/6bf4R/+pUX7K56coYYJUvB+8Dd3YF5npjnmf3+jouLS8ZxQymFV6+es73sGTYOrwd07QkBnFN0vWNdD7y6fUoqE05fY/SI955+1IR1IYaMMR5rLTHKgSiVjDGKrvekqNpEQPTIVMeyTOy2TjrOVaEZ6O3QsgmkC1mVSDGdEZlOiAuKxONHO1KC0ykQc6DfGGqRTJvpNHN1teH5i+eUGu8DEYfRNXpaoGbLm299mCdPnmCchO9eX2/Z7jqmeaIW0e5659GmcjocCUEyo/x4QVgOQESrgtGK3fYRIWnuDhPzvOI6j/dFutmV+4Lt4mLLvJyIKaKorGHl4voNYows60JOgq8Xj2ChVNhtH7IZHvP8+behBh4+fMTL56/oXIfGiBF27ElhRasKGKGUpYwxEFfpxF/trrHI5HO/P3GaZvRguH70mJANpzlyd5jY7xNXDx5wc7PndJyafKXSDTIB6jsvWXKmF4T26cTLly/46te+RN9v70mGxji8H9Ha4pzcx2uYWvJ8o0sZkQTmFNuBo2WdoCkpNcqhBPiiKoog8hJlGYYNMcxt8qugbZqlnKma6t7gnrN0uWsu+JY3dqZi1Sqb8jmPTKmK9xZtaOGzuZlYI3/7v/w7nB6evufa812yaaW+x79Xrx/UGij3P7Xey2zuf9v9evr7rauqFZy1TbHlT1/LLOv7rur1/67v/+Pv9QglKN6zHFn9nhd3fi55wtQVbDRnX3g79DTZUHvNclior5+v/df9lPJ9F3N+9t+znZwv6vzP3/envr6Qex+MXEv9rseck5bOf6q++y37rnfl9WPe/15/92P+X1zTd72I+9/4vkd89x4AcJ+pdL7E8/v4vuu4l4ap15/hd13k+X5rTy0NijZtVer+/9O+N+//fGlTh5QSeNBO43N3fw1JJaKKuOBe49BbY0YkYPb+M70n3b7/nanf8+vye35KVVQy/9xzxj/7e36fe0iw2Ypz5Mk/+/z/n84y7T0u9UzYU5ShoCKopO4/59oDGVRU3/vu+R73ef09//p733vq9zxeHvfWX9vy1l+9QBuL0V58b1FohMqA80omF226mLPCu45zzECpscmkXj/n+b2yVqZL67pK8dKaXDnJgdJq875syCiEXA1ae0FUl7PUXqap1hrSfUdfPieqnEfUeZISc5NVyjWJPFGxLCvWSnByPuecGd32AwmYN0ZTdMS2OBOtNL7rSLHIvda+CzkVlMotQkX25Upi3FygTUWRuLrecDoubfqTyCWKf63IBMU5izEyEYmpUGKCnNnttpRaOR6PhFXkbEIwdSLD1BqUJ+YJZ/195lMuUZoGVXxgKQeM7tAttyuf885KFpm/Ulhr8d5i3i9XNhWtnOyLvWXoPV03cnv3Hpt+aL8vEwOsQcLhtZLCx3UDxtAUAxIDYo0Xz6DOoIR0WEsDaSlNoZDrCYOl7zuc8yzHxOPHb+JcxjnoesODBw+FKh1bPpROfPazv8FmeMRmvGboR5RZifnAdEqkqCjV8OrlHa7TYCUbkGIxzjAMSny+xVCQjKl5EiCXsb2cyY1YSZyVsyIYwf97gdmEIJLAXCZA4fSIsSvGKY77lRAqu+0lKIl6KEVRi8KPGqs3zNORrvM8eviAr3/9CyzLSlwNJQ6o/Jj/+X/8O5+ttf7U91oKvi8mV2jR/RrToXWH0VuGRlmRxOSVkgrdVrwC67pynBdSkbFvXhV300vmeJDUhBjuJTtFQS4LOSfmNTCt38ZoOZDFnAmp4HcdT18+5xgyRWuUddK5KaYRfRwlh2YKVK+Nti2x/Xwoybl81wJ2zoxACUZb1fPhQ+MNzMeFl89uefH8jtN8R9WzhAlXRQgVZx8xdB3UE7CnHzpQicJKqYVx7FAUToc9xzJB6TDG4wtMQTKmuqGnVx1hMWhbQSdCEDJTSkFMs1m6HzEtpBRY18S8KIbxknk+NhKQx9mRqgun5SiLeQGjFy42iumw3EsT+kG6u0uULIsQJ3bbnv1RE2NhmmfmZYJ9FFJLBq284Je9I0TRJPtO40zPo+sPwDVAJRO5uPBiCA0zL59FVL3EeRml7497jqc9u4sO03LJcq4UfWKZAmO3wVqPRlPWSImBrdcM1kvhUhyddqxRsOC+85QYMMqgvEVbi/WZuERimCEHOqe52owiI8yJEALz/jkmZa62grF2RvHg+gptBCubSyLpyKKF3mO0omrDzXGP6+DiYotzA0upzNNKKYFTOBFKYqse8PT5Hbk65nXh5u4VX/rKN/C+kTKzbHa1emJe6fuecRgwRrwb1nUoJGhys9kAjk6b+00/54WUFSFOgMYofT+dAppsUjbw3g1NBgOpmfWVlrH85XaLNopSFtZ1ZV2lg+o61/xshlo1MSZQrTtqRLJyRjVbaymqyCheidb6LMPJTY7hveBn53nCOvc66d0omfj2iU/+xh/kY7/zMZFxVNlojLaCBa4SOGtN05QbIcitcSbFRIpFMqRqxDcwjjVO/IpaJGhyPq2MY89mlILWNNmT9Q6qZp4WjseZEjXdaGlNXCqFGI70vZVkeCUd5surK7SRyTmAd46cA0pFcg7EEOi7S16+uGP0jsvNlourR9zd3bAcbqlVkdH803e/w/X2Cq0ruQSO+sTf+Le/wF/41T/GG9MFXil0Suw2A2tIEvgcAqUWhs0I2qCNR3cdx+VArLEh2wx5zczrHbGFd1aVefjoDd79ziumaS8SUjcKhlcLrlpphfOe95495eJih0ICWlXVlKLkOSns9y8Jc8D34jMpJXN99agBEiy5JEJcJGS0FFIS7Ptmc0mIEyEG1iWImd9VqJ4YpfjvnKfrNsQkmWsy74KaCl3Xt8JkRRv5e84PQlfTFasV8yx5acZqhsGxBokuiCGjc8+Hx88gHera5Fpn6FHDGbd7eLu9YDds6LsNXbfB9x3Pnj8HDOO45frBNSlHYj7T08Aow2d/6zcAxdtvf4gPf/ijzMssE4FyzsSpfPmr/xhK5ou/8HnKH6j8+b/778p0Jis++6m/z+c/8qv8iV/6czx77zkf+egneOPxm2w2I1prTqdXHI63nE4HpmlmWRYu/QfYDAvj+BKQxtMc5kYLlcPM6RSw1t77Lp7fPuJ5/l2WckdBJhlKGc4ZYRIuXMSbVkSuba1ttGDJINJaYiN+9IPXXNg32PgHbMYdMYncaVlm1lVkYZvxmjUsPDm8w1dvv0HJ4F3HH3zwIR52FyyL0MuGQbylMWZooc4lZ27zwrO8Z41HvviX3+XtX37M1ec2hBix1vPlf+8J17+z481feUApSajANbcOfrr/nM+hqedJkfiUBHOeUxbK3/msUFsEB0I0VueapMKTX3xFsZKHF0Ok1oBqZD5pJvXEtOA7185GAW/EI12bv/jsa7oPcq2gWs7YOUjVtIO8RCxpnBbvVM0FozVKG5y1xLBilEXR/mNEPmctjTxYAX/fmCyNAOuKb345gCbf0g3P3Yqwcdzc+2l0lbPXGhfWsHL2hkmhJ5Ix3Uxl85pR0IqSfB/KnLPkGKI0xslrPxyFlKo15KokKFmrJr8WgMOZoJhSwTqxpjjbSS5fFZGK1ppuGNDWk1OF8+ep2i6qNODI2UJS91NflGRplZKhagn/RZ6DJm91VeOUR6nX2VClRlIRoIJ1lpKL+OtSIh0jh8MJbWBdK0oJuAgimZVpWbHaYa3ndNrfF6xaS9Cx0bHJcM+NTS2eJCs5bqUWcqqsyx7vOpkkVs27731Hssmc0Fr3R4k5cl58WUqvfOhjH2bsr6Hae9n09eUVYdWsS2GeE8pUUhVoXaUQVpmg5tKTojQOtK2sIbEssmboJPCbqipjrykd5DIDBq0cIx1KG6ZpYhh6UpZ8xEjl4eNrUl5QtkBKHOdVpmi9SP60EeVPrpnNdoPRmuNx5sH1G4AmrJl1geX0z+8ufX8UV1Sm9YjREWukai6nIAF9RYg8ISx4CZYh5UgisaYsRU3KpFwIeaU246XRosfPJ8FySrAq1LoKvUVVYkmEnHDacns4cTgp1mYiV9AmM61DVc/4SoC2iJwlLnAvMziHytb3hRPK3+e+466UonOasCzc3ey5eXlHVac2ghf5meQYbIlpJZUFdKbvnGwGSsyd1lmomTUgifYqNnKRHFKEZiVF6j2pqBSKSpQk2F0xumbWKOQVZQ2qNF15EvmEhHNGjF7bJhIb/U1hteGw3LYXKItK1SLlqqXpzEskBDgWK8Sd0Ird0wF0vteur/NK11lq9aSSSDlyPE6M3RXeWZQp5HBit9twnE6ENUjIXBqxpvkYdNPiG3cvzcwlU+MCCIEq1mZmjgvUjPVy0CUt9LtRNrYiel+tC+s6YazHKEtNBVWUTGgAby1D33N5cS0Bp6WQu0RaX1ByYBg2ON8Bhr7vULYF9oVFqGudlaDcKhtCJEiye5GOTQ6BkCQ7InpFNYpiE8fDEeN6EiupLgwbWYScUugskg/R+y/EmDgh3j6Fk0Di1vntuk78gLVwDjKe5yOoc6K8oRbu5SO1Il1B5P22nbzfKSVKLNiuF6OzypgWZp2rAFWp4qsAACAASURBVFjMeUpVtRjBW6g1nL2IhXNg4f1IBYU26v5wcjY7i2dC0KhniEwp0lUzxtxPC5yxaKV54/ljfuirPyDEsiK5X9oIKlq6/Gv7PQbxTShiXOUQ2MKAj9Oerrd43+FsR0yBcWjyOxHpC7q5/3+Ye5Ney7LzTO9Z/W5Oc5tosiVFUmQJKhSMQhVgG/DAE48N+B/4l3nkmQGPPC/AsgsuGCoIlKCSSEqpZDIyo7vdOWc3q/Xg2zeyABc05gUSyOZmRNxz9lnra973eQPBiV9MK4UPQkGMMTHPC7Up+uKEkLRh6ltd6YvGNCTfrxTCY4dSVTYbTYrSUiNaZ2pN5JRwduDu7h5bE7237O5vOV2uKPPD5qdwvLrf0T85Sko0Cus+87/z1/z8uxf8cvmS0XvqvDCGgNKOmhM5TpQi3s7aDMZ3hHHPh8cfKDpSmvj3qIZUHgHR2WsLO2749of3rPG8Pdd+8yKJ17ChcM7x/sOOw7L/lDWiULgw4KeBUhL3dyN1XdFWCIgNxf50xTQ94qyhNUXOkqdSa+NykQZnfzgyL0+0BtM0M08Txi/U1AHSwHvv8HZPrgsxTYiPtSOtRQqMLRR4HAemS6I2hXWOrgtQK5ez5HkZqwne4dZXKOWouaGq59b8QgZ8PHtvGtY62SggfoCu67g63NKFXgiCtgMD8T/+JSEMvHr9GT/96U+Y4kypsoVQyEfju3//Hbv9kV9Mv+RP8i84nc5IPqxAHVIuLL8+4azlzX/7LevnkX/5238rjVdRfPfqd9hXjq/+8peEt9f8+fpf8fnpC5z30BrremZZzpwvT3x4/5GUV3p1hTNngt9zvYOcMktaqIi/xAfN/eMj1oqMVBvF7eOR+vh7ziXK5qIBmE+FJjyHjsdPZ5DWmnn28vpqhTENqPzq7muu/Gv23S3H4zXzdEFpy+V8JqaFq+Oecbzhm7vvyOeZPD/xdH9i6Hte3fa8cDe08zWOC/ujp1aRgeakMRZSLNzFRkgr75dfo6Ji/MZz/f8OxCS0HvM/K8LvLfv/EAC/DXrkPc6lcLmcCb7//zVX0lCJqf6ZavepuVCKVopIyrTUBxXZ0r2ZHrZTcBvsZnmNjAbnDEZLULUz8pq2AkrZzXO2FczWfFqPKn7cYD8rBFoRKafettJ2y7lSW01TWv7kVdP6mcSYUBuRtbaCbprSpM56Hno9474lUfY5dFVtGUSyw9ObFHPLrpGB2rad1cZQyZt6ZtvSUlDNbjUYnzZcbavBJMHkeQ3fPiHijYbc2iY3VTLsOi8CTNj+u9JbBIFRkm1af5QyakRJkXLlMi3SEG/h1a1pUPLzq+cteX32GbdPAdrGOkrOW9MnP58onECifJ7//sdGRyvNGjMSm/IMQtlkhWha2SRUVKwyxA0Go7Si6S2Dsj2D2DSlpC0TcMuHbfIZUNvQIxeD0mJ3yW2LLcBSMtu2raBSQjfItZGsxWbNkoRk2lrDe9n6aVWwTtFaJqaVdVmwtuD6I+sqQ/6UqmzHqkJXu9UgojxJecvXbHXbQsE8SYMWesWaFmgdVIFaKS1bRGPEF7z2jvmyklYZYEqtkLickrx+yUJtMpxrMiQwG/hGFAIzfSeDkRQL3moB6BiHGUSS/s99/VE0V7VWnqaPgMFoTy2GGhvWicyntcgaLyw5y0rTQgielpzojIGqMlO8oyKZMtb0eDcwzycxaDaN2kJgY+qATCFJllIV3fjjU2WantBKtKpWK9oznUg72jYJET0+GyFI80z8gR9lIQDPtLUf//lHuYj3mukceXw48f7dR15+kcnRkFKhqYgyiWlxTMuJUmRFrEsg1yLhktaLCbVVlAmUstKaTN+l2OvkQGuJnE9ivK0yGdGmMuXMzdVBAmaXldO0MOw6rOswKEpuxHnGOQlvo1ZKnonruk2Dtilcqzxd3ssDp4UyUy4J1cBqoR4abZguC9EZaJVWshD2mmI37vHOUYtmXRaGbo9zPS1HluXC6enMzbET/KmplApdP4rRNEckM6+wzpNobseBbugZdwOn04lcVtCKUhPeeYEyxCimYqPpg6dmSbU3qjEeR3JaUNqibKCUwrouGNH8kVYp8rQzWO9xzjAOI7vdLafHCWsUximWRZrTiqEiK/bWQFuDqQ5TMykt9N1AilIsawrON9ZYOE8Ts5HgX6P7zWcl6e21TSw8MfpNshoqv/qzr1kXRU5FwrjPM0tMKBXk+awQ10YIgXVdWJd181D1tFYkXFBVnDc8ne/xTnK7tNJczhO7/Y6hO0IT+ErKBaMyVgupKK4JhcGoftusLExTEjlJjYDBmg7nLMsaiWlrelvZGjdF2bTbIod4Zh9vjZRSkhPyPPHUCq0sMUaeg0Kvr29wWqGM2T5vDW8dRgvWvQ9S2FalcJ3dpsQKzXapIPCWFDP9sMP7jq6T/3+eI2/fSdK7dZJqr7Tn9vZW9PUaNEUgC9rglNlM2HJedZ3FuwHjrsFBCHoD+Ax4N9JvGVQykKjUXGTz0xxgaU2y66DAc9K9gpRW+v4ll6cPLPMj95czyji666Pgw63jz17smZ4+8HQ/YYyj/3LkxXyNSRXvNP3YM1c4L4nb62vcCK0EWilM00mkqrUQWsOWiNcCoClFEfodbrejcyNd3xOCY14qvADtX4ASU3TVCW08bNspULy+2YnpO65MU0Cpxu7mltIcyxrZ94rOVOZ1QZuAth21wsPTjHUVowc0V2gjbcfHD0+sa2TYOcLcMQw7zpeF0/kRbe+Ic2DoR6zTKF2gBhqBaalA5np/y7xULtMF68ST9erV53z/5nseTyeMdRx2B3IUeBLPd0ozvDb/GuccdpTC77QFpD4PnbTWDN3IbjwSfM9u2PPixWuG7kCqMo3NtfB0fuAfvvknXr18zfH6mqa1fNasFYR+la1rzis311eMu/FHCl2VJqQUCa9FKcbxgPeBpcVtcty2YkvcPTlVdvsju8MO6w3TfNnyezxDf4M1Aw93kWEYSWnhcU6UybHrC60FvBHPo9Yai8bqCwopxJRu7Pt71F2mJtC2UdKzhMwLIlo7qpZtHFtWVM5JYh+24k5iIAYO5ks62+OcbIUBSsrbdtTx+vUX1Dbyw9u/YwmJL2+uyOnMYad5so/kMrKrf8KuPZCz4KmNNcS10TlDihVfrjjmHe/qXwJCICslSjB8mmTLllbm6Yzznlwk4FVrg1GGdUnshv2nYabZXu9UZlotYKUxWpYFay1sAx2txbuqtuFgyw29bf+UAmMV3oRPcKEfmx3ogsCFoAnNs27PnHGSPaUlo1NClmW8kWLCGi3PXmsYNKo1rN4CfZUEbj8HQz/Ln7uhI8Yo96oCbTtKTbQqsvHWhH5J24jO2vxn8k3FM5FWm0ZOEW22Brts0jprBBmv1dZQeeDZ21ZYl3mj3ypykkbOh7Ddk+J/Upuv0dgqAeHGgMqQKmpTTCilicuMVgLRKEY2d1rZLTcVUSQkULoRN39ZioVliqS0xQEZkfirTdb97MetJQnRs4r8UeT2mnMUvL40TzLUrRuxWM50aV7V83BTiczSuSa+NqXJefucV1BVfh4f6uYrFMS/sVZ8RE0LfEc9b45kMywSWAFBpZK2YXQWiqQOUg/UJuoOa4FI13ebLaewrpnOdmAMBQGHFbVwuZzk51Ue1RylJvr+TMorcV3wzrJGOWdKqqjmBfKCWDq00WAV85zIabMT6EaOHTFq5qXhfKMfFaXN6DYwTYlaI2wZZs5WpkvFW0utGa3k/PQOQlh4N894F+R9qgLXMgbWKSEqayvgV52xLqFVE5kkBmc7xnEQhcq4/2f7mj+K5ur5QysyAANVUQLkuKA3hGSOBuP2kluUV+Yl0w+9FENlIZWCtdcY3TPPZ1IpkKWjn04XrNFYKynXJSMoSeMxpkObilZetkQ5YrRiWVcpBpDpRz8emecLNckGTD83GOht2ta21Ga7HRhS3ElC+Da92A6E1hrNWgqGh9PKr//m9/yPf/5zPrzNpLjpTk1mim9wnz7sjct5RamAVhKOt0wzjYTSBWMCtMDj+UTOK1fHL7C6pxbF9dVnTNOZikw1NB2JC3fvT5JI7zxXh8DpdCI9T6ZKoeSJeRFaltpITc45UqpobTHGMi8zoe/wZpCHshSm84WUZ5zVkp7tHHFdsW7CmwGnPKXM/OQnP4HWMc0zT5d3aAV3dxbjHL4LvLg9Ys0d1zc93ntqrTw+XFjjhXFsOGs5n1buP95zOYl22jnH7e0rpssT8yrab2t7fNtz//EdfXB0nWccA844vAkE1+NsQN8YcpoE6Ww6lNOc44VXL15znmbmdaU0GMeB3cAnM+d6OfMQLUtMkimlGkUpwv5AKZF5nWltIpckm5ssUjLvPY9v31M3KY8yGmc0a6tYoxgGR+gcp1MilxNrnsklY9uRb37/rYS5YmnZYx18+PAB7z00y9NjotSZvvcYYzHGMRx6pvMdVUWwCUUhBMfTRYrSVhV26XGq5/rwghQj83Im+ArtiQ8fvsdZy24nRXFrj5QyAA5jK/vdNah3kgMVE4oe4w3GVKwLdJ00G85bBA/cWNeVZYkE36GRDJOcK9fX15/8ifOWuRVC2DZUhWVZCMHT9c+UKtmupZLlII+ReVlYV8m7ezif+e7dO0oSj5jBYDUoXbef3YjczxuMg/P5jLMDoTNYK1tr7ztSWlnjssl4PPdmRpsLwXp2/RW3L655cX2Q0E8K8zoT80oykoDqvGdwAyWf0KpjTYrH+wnnZ2qV2INn2erVOODsCBhyq0xdFXN6k2l/zhXnFVe719xe35LqzCVOlDbSd5I1E+fGAFwfEldHTU4aqzX/y//xr7hLvyPZzMM0cSmRh7s7EpXgO1CW3eBRfcDUlVIu3J+fOB56IWQ2g0bjO5EcTXHh4e6R03nC+p68RupJcMzH3cjUEi1F+n7POOwI3pPSyvsPP2Ba5njcUUri3cc/YLsdne+4vhm57fesuZBldYkxjtOsOcU7ahXARj8EoLA/NtYYCaEjLrdY1QuRM17w3QsG/1KKYSVTe9qRaTlz/9QT15XO9RxHz/vt9/K2Z7pE5nllNwzk3Hi4e+SLL46Mw8/Qd19j51d0fWBdRRFhjGwpd8ORfTcwDke6bodzA+dp5q9+/TdcX3tcf4UOI28+vscYRRc6tNY83L2n1srnX37B5198hTF+20BonO/IKfHx/g7jGvvDEec6UioYb1FF7jHWlRwfmJeFm5seZdyP99EWfKqUTPEf7s98/vmXjMOBEHpKbfhgiUtjniOPT/e8+/AN47Dnpz/9Ba+MZZomvn96wK8v2Q2/wbs7gZIUxbF/SW1Cq13SI9++/ZrPzNf49lt+iH8FPDdFjWWamec7SmsCudFS8MW00tkOay0NcMbx5+F/oMRvcEOQ53OTl83LgveB/X7HOBz4D3/V8+1377nwHf2uEbjB4tCtJ+on7sa/4DP1pyxz3IYkmlojh8M16ypbImssX3x9xJgfqDVR24rSEsmhaBs8YsUBzgqIoVTJMLq5vpaNepMt0fMGS1DeFec0V9dH3r37AWMsVEXN0HX9tu0pkkGoBJ4h554E1ebnST6aVqQZd8O2idhY2sFbQM5VazXOG1LKHI8HVBPgVoyRsQ/c3z2IB9t1zPOMc47d7kDOmWVZcUNHaxvIqIrULKcVaw3Oe1Ea5FW2qZs3WyuDM5oQZKMvzagQObWWBrDWJhRGNjCWEthHCJ6cJQRZa0VMUuzSRNqpnAzJnVPb4FAaNe9kW1FqleGxVoTNZyxbNmlEaA6s3/6cit4L9KlsGzWrND7IzxxjlMw1F3DaY42AuLwSOXjzfvPJNuKaxJOshdZbW5ENkduIlDVR2soSK9qA9x0CEYkSSZISzgW8DzTMNkyXxixXoReC5FOK5Fc82NZ4yShUUNtFho9Gtnk5RVwYWOYFlAatULqxG49UGsu6knOl6zpwdiPxSrP1HGXhbL8N5RMhWHJustGjUZtmmsDXRYLeW8WmHTkZaSrbKvE72nCZTxIrYSznC/z9b96S8kwr8ixbLToYYyTMGZO2rWr5ZB+4TJIz2nUdqsHj3aN8n11gyxbcNGJoU5mXEw9LYjeMEmKeEvOkt+FeRZsTKZ2JcabkhmKg64bNfnAm17eMw80nSqxWinFQ0DKPjydOTyvOPKtr/stffxTNValwPoNSFaMa1sjq3BkLTXF+FHpfyVESo43hfHni+4d7SlWUZsgFLssZYx+FGKYN1iYMARrEKini+/Eau1FNkso0nbmc77i3mvcfGpc5o7RHoemHkWVeUc3w5Rc/5cOHd1ymk1DHtvwJrWXVWrapUNnkg2rTAqptNa+29a94teQh6DpHq5VvfvOWlF8T0yNad3i3I9c9tU3UZmS1awrGNqbLA3Fa6cOeq9tbns53DL08iCkmfBDjfy6TbPvmzNOT32Q1s+QFdC/JRWGs+IQqiss8bQemZBlprSi6I8YTXjsBCqgeozTOZxqFkisxFW5f7NBayGetNvZXV6ToaS1TSuY8L+z2R2puxLJSifRux3ffPGHCGWVA+4Gb40vmaSauJ+K5UvMVtcDT40dKyaxrYp1XVCsM/UBtlbRGdsOBr371E+bpxLpcsCbhh4FuzcQs5LaU4MvPX9J3O0Egt8KyXIh1xiBSrJga7z++pesDNnhMHzgOA+taiK3QjOQt6a5RFBJEuQUwf//2Des6oTS4YNE+MD/O25YPtIVx14kJWWtaVUQUeugIDkLosK7n4WEmYHG9Q1lLKRbvDe8//sASJwne5I4/+TMv3KEaaLVjnQu3nx9xTjyLJQ/85je/5vXrW0pu5AS73Q2//e1H9n2gVst8Sazzws3VUSaTGnwwzNMZ2gMxLxiX2fU9x+MBo1+InMp7Hk9Pmx5csy4rj4+PDINo9hsK7TW0wv3De3JZN9mGZVkL/QbZ0MpSimaZI103EPyAtYGSGz/8MH+SoYh+vjAMAyAGanneJ6Cy3++4ubnl9evXlFXQqudp4vFRcZlEXtN3nsNeSF4xJuq6kurmQ1AGTEG5IrrvDberdKQUyczKOROT+BQlaFMyi97HgjEypbTuLe8eJEohbL4s1EY4VZsUslQylVxP1CQSF2s7rK3bVk4iAJzTGCvGZLuh6buu4/bqmqHT9L1l1xlqS9Qs/hRnAi93O1rekdoj1VUG56jZgZk4dCIptsZDu+bVl78SWUvNGBVJfzoRWyQWAZUMXaCtmRwnaotoLbS+dV24nO9Y10fCPNKFI6nO6NDxYjigrZD7tNpxuUTevPmOl8dbXr9+RfBi+taqUixcffX1pwl3KZXu/VuKziiVMXnBzoGu72kbiW6dF7oy0puO3BbiemHUHVVZLD0rlvSk8K2Qy3cEZbDBS6GQM19+8RlaOR7uZ37/5o6hm/C9pw4dzWq+ffMNVmvW3Lj7+MBlPuM7T54kWmK/7/j9t9/w51f/E3VxnNdHzmdNcB1hN9LZHX2/53i4lRy30sjVUrPmH7/5ln/4x9/xr3Yj/dDRauFw2FHIkiGUEjk+8atffsbrV1cEb8hxZew6WlFQGnFdOJ3u+fzzFxwPB4IPbPM9lAK/bdtTSnShEwKXCyJPtZaY2nZGbFd/KxyvDljrpNAFwND3mlXJZnO3swS/IyWBEZRi2O2P6H0jlp/SOLC3bzYMO4DCKM9oXnBj/5S/nf4di3ng9uZnDMPIw/1H1ngBlu1OVJQmnqVWwSkh/qW4CvJcwdlPvBjED2et43w+k3NGKTgejhwOB96+e8v/9v/8rxxuB0J3JF0aSzpj1QGyUOT24xWn80/R9Z9QuQjiXBvO54m2xaXEtdB319v5Y3GuxzvPrCNKG4Zh5OrqBUo1UTVY8dU1EuM4brk6m19bQ4xR5MdKtnkpLbx8cU3NIt+OsdB1PfO8MI47jFWcpo+ffDrrmpkuER8E/e1sIwRP3wXm5UxcV5TShBA4HHbksuK8nFk5LSgsaZ02Tzg4a8TjSBEdYZPg9JgW1kVk1M4olnXBWsN+P4rUtBSsNZStwaOKb9d7u22jxE6xLAspJZSWsNkQdqS80HKRubG2WNNJk2UEoAINVcF2EnEgI+lKyRnnAmnN29ZToUeLbgVFQdWGDZZlnQkhbM9xI+WVNcvGoW5UyDW1bSioN8JwY9jJpi+lKts8GwhdwNhGTpqh228EPI12iubkHlOqkrLUfb31aKe3rav4qYI1tJaxJqCtk2FijVTaNjRotJrxzuFckKI/S2h16LpPG8pWRU6tlGymGuC8SLIlq0ruozVO9L5wiZraMsoklM10nafmzT+mA2hDawtWeZE0G4c3Ao2rRX79pgz9zrHGRWIptCNHg+DdRT4ZukyMF2oLaKRhzSnRuU7et5IwuhB8R4ybjLGKLUHbgnU7EYgr2ZrmvKJU3mIAnPhSScRV04rB6Ezfa6wRf3NJimAOpPxEKRpFoPM7Yj6LL9YDytI0WLd9PkumZHBBbRu4Dmt7+cyWhZwlt0apAdsGQFGJEsuhLNCzzGw5dTD2/3xf80fRXK0x8Y/fvt3yHOSDa5xm6L2E0yF4yZpEWlVrJabE0+mMUhLuiJLsDdHvskkxGmPXkXOS7lNbPlrRoa5poSrJXojLmaBGHu4hzRVnC1ZpUhQDv9Jw9/E90+Uik+XgSDp/wq4bo6nNkUuiRsFONxHjbmZSNsiATCC10VvzJZfv6Zx4evAo72lAZKFgWFNB6W1V2QBVsKFnnTPT9JGP9x/pup5SDSWLOVdrxbostG1qP00rwY+sKWOs0HaeLpFUHri5+hJrOtKSeP/xD1wfPyOuCW0K3QAg25SmNPLcJda00G2o4pTbJuWQDJpcikgItZNQXGTL0NSFUjWlRWpNJBoKT/Wa2FZaKpCkye67A81aWk0kEr4PGN22da9id3OEVvFOZI9jf2DoenSz9G4nE08PS1oZ+j0dh824PDEMPaVUalrQQHCKuCbSApVMrdAFT4ppQ4Yrqm2oJthgpRRWy/Tv/CQHrrGOSMZYy3G4laazZULXS6Dhltvhg5jp13V7ZjafmRTu8t7WFrdnZ2VeJpZVQbPiJ6wzRZ1pOgKOeVnRykjQpoJqC8GPNDKFGRs0v/jlL1EqsSzy8/SD4fpqpLVMShXnwXcrh/1IbQnrNPt9R0qSGr/Lgo/3PmyTdYvbgA84gTfUIp+7YRjxQeg8KYp3ymiF9SIf3O12HA8vePPmHTHOLMtMyUrAGfFCzD3OdRjtoUkeCdtFLFj8QmVCbdutFCPX1wNreuLu8YnzfMfDwwnvICVYY2aNEaVlutR3nuN+YLGGUzmjeo3dGpmYE6lIdtfz8AaKgCUQbXmpbYtdkEyfUmQAsdsHurDHbgGZpZTNwyWFgNnCTVPKxJTk2ULJtK8UFBGlFtmsO/sp78sXB81ibaPaQqsSV3CeVuYl4s6K4AwheBTlk49Cm0Spy3YRKpR+9r01yfqwjaQy87RQy9OGu5fQUlQVrHVO5Jz4yEya40YMVGgj2WnK9DT1ghauKXXgzQ8nlrWiVcW7iNKFvvMsywNPjxfevX2E/I6b2zdoJWqC4AOm6e25ErRz11m00eQ2oSh4HSgmcvAjtRRyrTjT8fqmo+aVmDrmOG+flShxES6I9wNNzAO1JMgRmsE6w+WcRGquFMNeo6zHFkOMlfPpzBgG7uIMNKxT1HPCG4N3gUG9Yiw/xfGRoEfsbsAeA304cJkS56cLGhjHwFoyudRPjXqtmQ8f39IHzW430IWBmFaMbhJCXBUpF6paePHyQN8PKG0pW2grRqAHJWeUzXz91VcM/SBypMYmBX8Oqc0YB19//SXH/VE2jQrYcNrPsihrDV9+9YKr6x1KCzHNWitDQyN3VsqVzh+4vrml7/tN7id3Qy6VdS2cpsaHWXP98hZnnUzqSxQpebvjwDVd7ihPK9/84W9Y4iKBtq1tXrd+k7pa2WzbtgWjFwIw+J7Pb58Y+4CzQgidpwvOOboQ0MZwuiT+6m8mjFPUkllmId7VBu/nj4w7Tzd4zpczq890lxfsnWXsEs+5lMaIQmSaMukuUSsE7wnWkdZMML1syZtCN4NqGkOTYYVSmyrE8Bzca40jdJ6aH0Rupg3GasbRMvYD03whpcRQQSmP95ZhGLHO0PWWXCesNfSd4bA3LMsJpfwmky5oLVJ21aIUuA1Uq5Lzs20rtBZfU4p5ky9rNIqvf/Y1Hz++5el0ggafvf6au7s75jnSEN9SCEK600aLiLQ2Uk7yTDfZVPWb57Q9Nzu6YYxIv6mJ2iTI1zm/Sb4lP8440BjSM1SrFdSWR5XKj361ZZGoFGst2m2goaWQk3iJjFLENRK8pZaF1LTUBmXLe2oiFZOmUlHLCsh70aqEhlfVUBSMToRe6saAx1nRROSyUpd5A7U4GppWiyiGtg9nKWxbO2nWa5W7LRXkmXYKqkFTaC2Kt40e6wwpLbQmPjyFoaT26TWhQUoN59kaAvHfOic+PuEH9DLQ143Qe6wdcA5KmfE20PQzUCeLd5yOrMonb3MpFa2cRPhoCd5tgEFAP+KxF1KkMlooA8pgdMAg29dn71Z9Xiw8e4Rbkc69sVlS8uZTe7bRQK55k6xK/p01cr6IH1HiDJSWHK7ne1XkkkVgSwgwKq4XmkrEFfFuZ1DW0qyHLSLCell0eKvxSjZZXms6swO/KbZKAyWqmWeao9qIkBL/0lBaCJb/3NcfRXOVUuaHd/dbsnShNPBePrjOSxaNMYq4qO3AlA7zfFpwJsjURIFuULIllbpl4kSWMRBjQiuH1pZcmiSf15WKmMUpK53qmc5QUsUbeA5RfQ60e3q8F7S1D9uERrOUsmmDN9KaVp8S5J+Rjko9L/flkn2e7NXatvUpXKbCN9+cefG5A1VIOZI3E33oBc7RmqLUyLD5jc6nE9N04erqBWE14gerEDrNHIX+EmNkKYIKn9cVWzc6UALtFuY0WwjIMwAAIABJREFUYwqs68Ld4x1Kd6Q1EoLChUAXBnI18r7UhCZR0oQxkLMiJfDBiwwsiXZbGZmA6WY+kYCc8+TCZuQVklJUEYyWVOySxdS+JG6Nl9dMKZY8Y5za9PjSyMhkMEugoLL0ncUaRVwSzwAPbRo6bbJPZbFWwu80MMcLNRcBUQRHUZ5WoDTZbPa+o+QFXRWmNnKM7LsOhRSeqsESMzHmTR4FyzSjjMWGQCqJvCa8qmjbUE0CmJVW5JhFLqcbzWmcDrSWSKkJ1t0stKoobaE8B/duk6tUJrStIk+omnmWy8UajbOapjKlPAdaZmo9sdt/zhqfcFXyfZSqXF8fWNYzOTfGQaP9jDVZ/FZOM4yGWh3aysWEqpTNyxHzSqHiFSgjYAyjDIGANR0xPdFi3qQOEHzAB422jmH0jLuOV69uubv7sOmkCy0XmlpkGprSFqiqJRxSaZQKGN1Rysr5ckLhZOu3JoxbSelELpGG4jGcGYfAujRyVjQlIddty01QKLowUEeFt+oTjnZNkTVuBC+9QTKU2bJahOX7TMIUU/hGqmoVayWTxRorz0iWosNoMQYLmj/LhqklKolSNFZpKURbpZTMskRMMmgrJKecHao5vFdYp2nNsiyRmB4pWbxsRoln0Vm3+T63QGe2TRnbFk5pahOfpzHiJbicL8SYyEmoqqHzWzaL2eRjmWVdmKaZhtm8AErQz50mdBrnLK1Uni6Fy0UuTGMySiX6APO8cDnPXGbx7T0uUnC31nDW442nDx1aNRSJ/b7n9kryo7Q2mC4w14JPdYs7KAy9EuBOLhuZTHO6PLEsK4fxgO96vDcoYxmUI60LcY2obeDzdFmpdfMj7XpSlvOl5kSrjV2/Z140fetRraO5kYMPdH5k4CvG8qccjj9lN17j3Ujnd4z9gYf7b3l4OOPciLGeNRfxnSiZkkvm38TLl9fsd3uJEVhnubeUIW0Dw6Yi+90BawWCU1tGbCmakp7lRpXduJMp/eZzUVoK+5rle1CFq8NI5zvM9kyj1CZZF1O8tYbXn10zDB3PICatFHVbg1Xk1z0er9nt91jjpLhREuxJjbJBLhDTgFtHBnuQqblJOKO4civ9+jNSTJyXJ06XN4xdoZhMaZFUI00HeU90lc2GaVQcWmU6b3ix77naLwzjQeRUSSIWQghYZ0kp8XiZ+fs3P9APO1rL5CjPi0JTWyJ0nhgLy7rSdr/Fz0KcMzbT2RGa5Oct6cwP93d0h1Ve17YxAYoATZ5R+AZpoHQ1SEyalqI0F3iGDjQplp8Hu0ZbrDKoogG93WGCeY9rxgcvw6AmG8jDIHCerrPs956SF1oTGIx1GqUKnQtbDqAVFU0WOBJVGiStNM5aYpGcNgmw1tze3KJU3O4K+Pyzz7btwQaw2gaAz/yHrYgh5SxnGwqtNh8QSnLFWkU/gxqUw2xAhZITWv8I+dBGS36YC9ikWaNIs/OmBHoGcagN9oGSrb6xhhoFDvMMA1Fas64JZwPLKo2a0Y7WDDIaqzx7Ap2zxLoNzLfapFU2rw8YUzFGBvzGyPvVtiGh7HSqyOZgyz99jikQAmcTeKFE7yCeKZpssmQgmAmdpbUkg0MV0LqK13aDFunNXyvRJ/Ilm1D76X3YjgOalj+PVjKYUyrjnDSAVmtizVJg8hxeIPJL2IJwt/3gcyOk1XaPKclGs9rQtPimjJHn3Wyvu8h17adz47k5/BRBsUHXnj3TAn6TIXVrQilsbZMY1rqtzEXd9RyFoJWW2lfpjbAIVVAvgCbnjPPyesmWL2Kd5M6VJBJGdN2oxCLR1Eq+1xmBLFFlIGG1SBKLymQy2jgSFWvDtnWE1i5yP24/k9E/8hX+S19/FM1VrZV5Xj91tI3MHGE6zWjtcK7DGs28CkFQ7opKjI1mgSYr1rH35O15qlUOifNyFuhFmSkZWfWO44Z+TbSaMLqibCKnrRmwQfwMTm/GRvk1u9DxTN2xVibSnfPQRB/trcIEMZ0WoLQmB/x/9iYIwrxth2RHa5qnU+T//r/+mv/6v/sX+GBZVgkRnubIq9c3OG+Ia2RZJ9YxMF0W5jkTc+PN23fsduLTsNpTtacihV03asJOcz4/kMuZQkNjIfe8enHLPC8oIrUVfNeYl++EhmJ3OLPneLzh7uG9ZDmVLPkBVnNZIuuSKVXz9cuvOd+/IxV5IH3oeHx4j9FBXlujGccr6qbd9sET+p5cZuZFJsryGGZSPnP38JZhGDBaMc0nlnjhsNuJjr0qzosUfVYrOu8ZupElJiHjVEVMinhR7A8H5nyh1ojRjqvjFY8P98zzLM1cGNAcsPYANqJUpustca4MIRBco/OVh2nl5e7IKV6Y4kpKhSmumMFjgqeUxoe7J8bDyOXxTjw2cWaXHF0nnjfVDDmLWblSKGUl1ULKHlQmVTHKxpwxVqYlysitXkvhuD/wdHnLvtuzPxyJcRGc+xrQao83O0q9sK6T4PJb5TR9ZD59y/4wYnTaJjqW65sjKQec9XRhYIkn3r7/R7rQ4VzHuhZSbAy9SDnm+cLTeSanyMPDPVZ7DrtbaJ73+YkuDJs5VAKQT+fEMq9bVgmE0WFdx9NT5sO7P/AvfvUvSbkS+pmUIvO0sCzP+XVi4A0+sMSPdJ2n6zwhaE5PKx/ePn2aeJUCH363fio0Qqdx/kJNI/NlYYkaVA8cqa3y8PTEx4cHrq6+4Pb1NX3I1CLG5KFZ5piIUZ6tZY4Y4wguiKG7ZkpOOOuIeRXKoHbM04X7+3uMPm3kJzFiu+NG8lQikYlxRRmN7z2+90znhNGKvguo7Xvuz3cik1HSiFurN9KnhFrvxj3TRaSxOUdKWSkl0veB4HdbcfdMVvKyrc1Z8LVr3IAKgkRflgVqRhtDjhFF43gcub65Yhh6rPaYplE50pKEfOdcWJfMZT6LP6Iz+GAwvvHZq5/i1ZY1Uwu1ZXRLXO0Dt8cReAX6c9a1SQhj0cyXGR8UNIF0aA1D14PvCO6K4DVdX5jPMKdEjkUmumXi7bsPtDqDrhSt+HD3KMUxUrwexh3OOfaHkbQ25qWgjCW2yMe373h8PJNT41e/+gVxusei6L0lXldaVHx5c0OIP8OsX3G7+0DfBZo2ONfRdwc6f4X3e2oRqWdOmb/7T3/HEi9c31zTjwOXOcqQxwjRK8bIfh/44oufsj8c+JQttRWQpSSRubaKs5Izt10Y0gQ4QyqRnAV883SeeXH9LDd/lls5KvJ8xPXM4gNdkM9L2xoqo40UKpv0+3jVbzAiyTuqNcqdVxxaG8Zdz3E/MC0eqtwrxgowweLxLtEddwyvd3z//gkzGIZ+YLcLvLzZcxgdmkZaF9Z54r9Z/w1rXJj9b3lMf+Dt3QMfH2bOjx+IcRZKWLMspaJb4apTfH0lDUg/jqRSWJcoBEsrPtz7h3vePHzkg/lPWNNBlcJeOZmIXx1u6ccBYzQpzZxOf6DUX4vUSY28Gn+OKTL0ez/9E3/13V/zb7/6ChpMl5l5Ghi6I01g/TgLXSfn+nyR3MQCpLwRALVkA6W6DZAUElhuFK1kPryN+McndvudFHE583S6Y+h3nM8bVMBYfnn7q01CZxm6gbYP1AL7ww4oLOvMMF7LpmhZWdeVnFZyKVu1IXAFbwdML8Pm0hpKdczzhPOK4/UejWPoB8YhcHXdUVthjZGnpyfW5cfgX4sm1opxAgiy2lBLRisJRKZKzpC1iaHf44PkJqUUpdHTstWTPCzHzc0NWsNleuT+4SPnU2a3GzeAAuQSsV42zs+NmTIaKhiz0WurZCUpPMb0tLJKSDVa/v2ztHz7Sz530iArq4lp2STqMrQpq9RoWmuMbmhn2Y1yPzVVNkVApVUr8RAto3REG0etagtDL2g94GwV1U1N5LQyz4Xd7qUQm6sER7eS8K5DI0MeY0SZYUwvDRRNPMdNhtO1bRmGm1IhpkWAJFZjnRZV1rowJRkgL+lCq9JgONtt9baooay1sjAIQgcstaGKQplGI2Gc2QBcHu89OdVPr2fOCWPshs6XZlSbiPcCDQNwLlBSEhrfBiExxmCsDJprllpYyKIOaYbF2y+yUiE9KqWZLuKR5Pk0UzLs1LojR/GuOSdxLq0JpVpRMK5ucCy94eRFbqrHgFKZmhMlK5RaKEWGlForgnN04TnjTZNyoxLwdidLAmXE+/nPfP1RNFdGO/qwE8R6LtTSy6WVF3JLpDWRUkUZyS6gKbyXjcO5THinCUFxenxinhOX80wDxnGUMDg/MF9WkY70lvP5PaiIIaDbjlJXJirB7LAB5qlxOIw8PZ2F+OWdEJjmlbTmTweoM47T42V7k2USuCwL47jHGEdKiZQWVJO1otayhYJt0iXDTcYw8O77lf/z3/2aw3FPN4xUZt6+e8PumytC12OsJaWJWt/jrWPoO47HA/N85u35DM3hTMduX9BODIXWFqxvzHNlvvQbnUzCkD++Lzw9Lex2e25ubvn5z77i/Yff4GzFmYEUO5YFnDkybRr+0Hm0HpguK/NSiXnhN//wLcf+SMyF3BbaaaKUCyEEclkAGNPK8WZPjYk5Lsz5QYrWYtDabdu/Rth7Lpf36DbitScyYyrcnRLeW6wxfLh7i9EOa3r6lsm6UEug6wJVJ5ay8Hg585QPGKe32mUlny7YMFDXiWU5M69nPi4Ra/0WjOcoJ8mTmZc3BK0Z/A6l4Xd3v2XOmlw1Fc3d8hEukf3uihBGwqi5TA+i9zWFflCcnyolOpRNGKcJpnJ//xGje5yT6cfdwwOohFYG63q6YS+G3u410/JAawvDaFnmhg8DylpSasTJ4/TI7rBnGEZCcLz98IbxULGMUALO7hnc59w//Z55mWhFsx8DrXiGcGCJH3nz7m+xduDV7c8wVrZ3y5x5vEss08oSH6lMfPHVZzw9ZPpwQ9f1DKPn1//xb9kfAr2L+F0g+JHlQ6EQ2V319H3Pukacc5znC+fzE9Plwh/eSkN6mZ6AyvWLA6fTAyE4crakVVPbwhc/uSHFzDQ9cv/hD4z+iq9/8orj4RalNB8/fC9m6agIwdEPhqYuXLsdt8c9ayzMS6S2Weif6hFr/8AwJEra8+3bH4Asxtm8UFVlGA9cX++xL2+YTpp1Vjw8fmCeThjj6ccB3Rpqo91Z0zPNZ4ZOfFupFqxRpFpYTyfiurIsC5d55vr6mt1uR98PHMatedqKjBA6DvsjbNQxkcPMdM4QgjSOxlaeTh/Y7Q5Y223FsCPOkaXMxBRFMtYUx+M1KE2pQoM7nydOlzNpibQiEuuXty+gZRQyOTUG9ruRLgS63tMNDh+s5DzVtmH9IcUmU+acKbUQa+T6Zr9JJBptI22VLM2y0Z7gOlqtXN+8kCJHNfrB8nh6K+HbyqFVzzob5nYnA7KcyWWCLFQnrcTPEdPCu3dvaesFrTP4TP/iNU93J/b9NX3YgbLktvD2/syaIqUouu6KNz888M23f+B8voCyvPnwyBfmv8fpHdporFOoqojnM3NuGPvE7avXNO0BT9cf2e9uOV9OnM7bBBygJu7vPvCTn/+M4+0LMJZan1AqwGZMT3Fl3AmZUSJGNuT7dpnXEsnxjDWa0F1LsaEaqjVAwA4xrsyXE8vlxNi/RLsgW5FcNtWf4xLPzMsTab1Q+wHnwyZzbYBk68hASwK5Hx6e+MnXPydlRakblGDbRCzxwjI/4P2AdTtKaejSsJtCYHl6YH848NnrK372Jy/5i3//13x8uPBxnnn/QfO7333garfj5Ys9+72j8zvmtnL7+Wte618IMvurRsqZ468e6fcz1klMyMNd5M3333M+nUjzgn93oeXn4ULCe421io93J/7iN3/L373/nl/87HMJGd1kedp4URoskfN5whjNbhio+QpnLd+fzvz9H75nXX/P4ejoR0tcNZ9//pLTgxR5ktUUCUPi8fGB1grOjHT2yDQ/8PqznpwKMRZiynS9oRs7nh4npmkiF9luBCtgEq0aN9d7apuZJsH5Ky0SlnlKGCV32fVV4O//9jcsy8J8CZzvC/v9ntuXO86niRgNoxtRZCE2BnnflgZ9L9l9QqFtBKcwY0/NkjVVa+O3v/s7LpcTz6j4f/qnN4zjiI9GpO0liy+nUyxrQaMY+kAIFe/hMi0sEcZhZJ3gcLgGnUhlIsbMMDS6sAc0ycYNoqGlybQifafOeD9yePU5X33+Bfent+z3e969/8A0LQRtsWuVHMQm8tecNwmkbqQ0kXLi6vaWy/lCN4jyIKWVabqg7e5TSLJ4mOS5UNu2sFTxyLUihZj3gZiiDDXVCko8ds1E2d4jfq20Sv3prGxjtFZ0+55lWWhNzivJplpJJSG5qJrj8UqkeGrz/LRCSVVyD73HOifvT2kYHOuaWJYLbqtjnDXUJuoICnjbUa38/ofjEa0N5/OFTJbNVZSzxQePtQFN4Hx5EGgHMjTUWMm6MmIByHXl6e4BbcSb6VzAB8e6TuScRc5tA6kWlrSK1BMw1sgwskoMD0BaF7RROK+h1s1jValJZKvib96gFRtZWnJkxUrgnN9oj0JaVdvWqTUBWIiXrwpgy/x/zL1Lj2VZmqb1rPve+9zs4m4ekRGVWZlVTRcNCIkBjZgwQCqJOcwRQuJf8Id6yATRMAAJCbrUTaspVUPlrTIzIsPDL2Z2bvu2bgy+ZZZVUldNGpXySB4KhcLczY7vs9Z3ed/nDa1ZFBqpbiRgGTiA1uJ7876yrJGUEkYLMRWtGxhEJK4CzSrkGlkWycASwq3kwRkr0lzJAf3bX78XzZVsriZSXohpki2JCoDDKEF8JzWz63vZjLb8AqtAqZUuaPoQuBw1Zkn0mxvRlDtNTCvreabGSkegZ2C6aHJxZOTrjQHBUC7o6tn0O5Z5btrZwrJMKFXEwJcFGWnbSlSjWuZLIUpGasNRyhbMGNO0rQghR4kMyNvQks3lz54uM3dvBnIqpDWyv+kIX39JKZ6ctZjfk5FpIR1l0Xz/zZl5ytjm4dIqMl4SxvhmtlxARaHwREUICusUqVZSfkYbzfy88Hh6YjNsoBScrVgShpHn55FpvpLKRGYBCmV1OOOxzmCcgaTJ1bKsK0s6k+pMiZV5GdkOB7wfWGPm+w+fGJxDkagq0XcbcoE5Xl6b04rkyuSiGUehTyVX8caSqoXqeHj3jsfnZ7TzRK34eJ2Yp2e002gjvqW5Xrl8/1vePXyF9xpqZJlHgu8x3YA3get8QZmV0/zItHSoxXI8X/DBYpSlaHn+dDXkCDY4Sk6cTmds2EItrChynCnaUDW4bis5S9ZyWT5QQ8XogVIqp/MC2uG7XgqunHBdz2WcCAGqjWQNS4nk+RFUZE0Tjx+v/OGP/gTbVeblyvl6wmiw3qDdQmIlx4p1mnm+ShOhe3zoMaaw2W4Yts0roCroI0s+UlRku9vThRus3gORdTpzPJ748sufcHNzw5rPzPHYzL0jX/3gC6wNlJz4z/7z/4SYRrQWdPLx+Uq/63Bh4Hh64rtPn7DG8fDwwKEPdJuBcSrgJk5N0qoNhFQIW8O0vKcfDuxv3zCNHfP6SD8EbNdhAsSLYdiLbMeawB/9O3/A58f36OpbNkVhmbbcPdwyjjPpNFKWwjBYMZ92lkNvcTVxuvyGwQ9crxPn8/ccDo6SK8+fRkp+pNLx4eMjkHl4d8ePfnJH3+94Po5c2jY9+B5qx93dRiAsUXDNxgbWOLZQToX3lt3uHaVqrpeV49NIWhd2u56b2wNd17Guyys9bF0Xxmnkcj1TiqMLW0ETGyWUrPkbIY4WuTxQiSWWlp8iPs8QAsbK1E0aOMMSV0pKGK0JXeDz+dKmzkoiE6ygqbuup990dEOHDBNz2/xXckKCH6+TwAdqxQchoTrn2haltFw8CejMSaAAwaY2YXXia2tBsy8yFudsk08134YWn0bie6aLbZPnynVaiatC6zuZ5k4SMJnCxHPSPC+QY2SeM2uE6zQzLSPWnIlxwo1/wm3ZopVjOa08lRV4JKXIeB3ZDDu+ePcD3r55y+FwT78d+J/+6T+l6we++CLTd3tiie39taQYeX584v5t4O27e0LfscZVsOmIgiKmyDSPLPOCMr5tq+RnXGtqMt+Jp8dPjNOJd+86Ue61/DeqTIzn+cJ0fkanTN9vxSvSJnQvm6mcE7VEnC44txfibWtMFQajHWvLGRRp2k5koyU2WWfbGiwjx+NHnp8/Mk9v+ON/8BXTvEjYKJID5Jzlcnnml+MjHz99y7zA4DcSOkom1UTMIz/9f39DrSs+WLbbez49fyQm8SaHEOj7QHq/EDqP6SIn+0uMLXTDLV/98R/yxf0b/vIvfs7/8c/+GV/dFG43Aob5dUx8qGd++B/8iJ+Ef8D1tDKOR2KOvGYfFcXhZiMG+ZKgZLD3hL6gzBtKTcxTZL/bcRk/UorB2zsOu3v+tf81P/njH/Inxy9RKvE23vAvwwd+8NU9//F/+o/oOsTnrGzLMPOgE8/Hz3z//Scu1wnrLKfjhYqDLKjwtw937Ld3LHEixpmYZ5ZZQoqPxwulVN6+vefLL9/xL/pfc3t7y49//GO6XoimKQKIbHG/P1CyYusGtLppjd7MGqdmQTAiZTNaQnPzSsmJP/6jPySlzDiOLMtMJbPd3LCupVkzZNvig+fAi9ensiZEpqc6qJ7buy0fPv4K57UQE0vi9v6W8+WMVQXvLarvOB0jtdiWDaXouw2n45F5WpoaodAPHZenGZ09lsoyzZQkGx55hgtWadDrq/fPe0daI33XNWm/YrvZyjYE3SRrLdOrKmoxstVfV2Ka0aanatmYoMGoyjQ1TL62zMsidZ7RAjFKK0qJnL/vg5xbSLPkbCeDoSzbkXlacaYjBJHj5pzkWY+QcqUUw/3uHqV4hdC8nh0qYXTGecnlC51BKSEtyv8rA35jhU69zCPWdChoFOHmvTMdoZEN47riO9mkad2aHtXCjNvQyyr44ss7WWLQoCJ5RauV/W5oEuPEtrc8nS4SFo6ilhWqJce1Sefl3JL8Lql7S8kYrdDay0CviqVBwHETxsg95K1jUYvIFWmyzPqy6Wt5VVa3mAErElQjHtKkimRJalGXKV3QReOdyEIFyKLF+1aAauT3zxbsSxZbW340AIdSIi01prKuIzpJM+le42L+za/fk+ZKZHLWSrKzcRWqoSTJEFGIicxqJE+mBQR7Kw+e0RpVFKZ2vDlIg1KqZFfF1DFOC45KrRpTHa56gnGy8nSGEBx5tZADRkkA2vOTaE5TTuQcqVQx2lkxhL5obZ3TWMRMqJSiFi3FSBF/lTWWXASyobXkauQsXpnyQg9UmjU70mKYK6SpomLF+Z4klg+RoFTR8WrrsEomTMZ7IDbDoPyrwROjkq/BYbstyia0qqRUmaYKumKsTFVLWZkuK4ZKFxRWrxg1czwhD5XPGCca8aoDm27DdicIZAnbtfR2y6b3+K4yXhZivOKUQxfxyY3LCbM9IApJg3eGUirODWLELpJ9ZdQeVZXI52rFdwN5LZzPV5aYyTXwzW+/b5kLorutamLY9pgqU5jzeaVWy8fHUytORP+/GbYo5ahVQBuuEzpcyhJ0XFUilZlSBxSNKIQiJ0UwiVwWUp3IM/iwFflpbrpuI76wVCrruuC6nqQSzm5xJmBL4Xo9ghLUa4Fm7O1woaNWxTgllnjFqIjzmjXPXMYT5+sztSbGUaRr250XEMQ0orVo6Zc5U2tHSivUjLdb+u5IBWpxYui3mVJGUkxixjaBWjSpzvjg6Ycduz0okzldHzEWjO6YxhHvNlwuM1rNWFcJ3U48IkTmeeXpeZTk8iUyzVfmdaWUGdtZ+t6gtOSTGWtxQdGrBnopcvEMm4HtZsOm2+KdJZbltTE4HO7p9ZdU/cTpeaEU8U1qqymxyjPVTK/XshJ1hK7iqyO1w3izG9jfbMkp8e7Lt8QVvE90U+Tm0HM5i5RxyYg8KV4InaGUkTU6rJVg1XG8oJUlxf41BPJ8PTaYTEfOV1L0DSxQxNTuvaDmldA0w6ZjWSfOZwlsLlVMxX3ft2cxgZKDvJbazkUNGFJepQBKctEaK/AGhfiGY8pcWsCtatM718LXS82gwEyGp+cnoR4h56lt/i/nPM45vDVUVaHKYEjADOJDXZe1+XzAeEiRJmFUTXaTRfqRpJCS/DuRo2ijGtDHiM69GeaNUVijGTrfsm4s1nusTyyzIeVCKpFUBZuvqiUlQdMrrYkpkdZIjomaE/MyyfQxJdnW8gVaRVx9g9G9FBRlJaW54ZF7tsMbbm7v2O4PdMOWaiyfn058+PiZH/1oi/fhr91a6lXuva4rNzdbdvsNzrU8GSVeVW1UI1vOBL+VoGAl0pUXK0spMqBIOdKFLc4JqpkmAVIaYpRiV9XCbtjhfd/+XnidxAtoRRoIowzeb/7m99t8EGucSXFh3Fz4n//0f2C3vyU1j5hpXo91WWRrO10IYctfPPwL4kt4fJPKp7SyzCOSW6XFJ0UjockJDlkxnk8C2bCavt/JM5PFDO6sw3tHTCKFU7Yw248YW+mHPXc3d9zs9ny3+8DP3vyc+w1sOvEsP0fP5XpiGDpC8FzPC2uc2tCy+ZurYth0UEujAWaU8ZKTZkWGm2Il+Cem5QRV4eyZLnwihoRxBuOdeLWdgLNsMHQbSwiKctUcj09YY9nvD/SDY7vdUArc3RWGoePx8ZHTSTKVOt+zP3RQ4IvbtxiriGnhch6lubieWWOkH4ZWDFq2u563XxyY5iPOWXaHDaG35JzY7wbO5xlrZFuSYhGq5V4kwKVCTGMLjS3ULPfmu3cPzMvIbg2s69K2gYGKk610TmgNyzqJx0xLzRKjY55n+s7iXeDufgf6SB8OUEUeuN31PD9buj4QgmtRAzIoiA3s5ZxH64jWcvbFlJivUgeW5sNw2ggdt8pmvTb7R+g0y5KpRQY3teaWceV5yRdz2iBh6yID7LqBeZZsqpwzqxH2vodjAAAgAElEQVQlTEni5dHaoI1t+Zltm1+0gEiMnLO6KCiaXBPWCTrfaAn/VVSc7XBGaoJcZnJSeC8QE7RAkKwVb5/NQr3zQVOJ1HWBJO9HPwh0KhcwXjZmIciQsL5YTZTI6YyVbWgqAmxQDbBhjEEPAq8xTY45DJZN8uTUPGsaljhRsmSL1VrINdH3HSXr1zNJEbF9j3fu9cwBRR86UWI135XThqgtLyaYUkRqW+QNRllpgnIqIvGrErtScsIYhWv3fa0VV0X14LxsOudZNmTWmnbXF9Z1FQCWFrmu+L5KaxilyTVag5NG0uhCrQlFRQBiVXzr3kh4vBV43mt8Un1hLohvsxbJ8qwVaqrkpsz6216/F80VtVKSwvmA9wO+ExJHbVz9nBTadoKdVLJirVVRssHZQEmVOCvS4rh/uCEnOSxSaZS34km2NlNalQfFS1aDdXIAzGMhJVCI5ME5K0WcqqRm6ndWQs4E1AC1arRuWtGWIl6yNFcxpvZh0pgiWxljZFKbkhgSy8tESGlUNKzXShwzWiXmo0w5qm74SyXbKasLWVVsgE4FtLfkYqi0/Kya6XyAKobggsgYrF0pLJQ1sl4LtmtWMaS5XdOEKoYaFc6uKDWyrpZ+6PBaAvY612PthqHbcNju2G575mXl6TzS9Rv2u1sOtwOn7sTp9JmSleBCo2K5LliVZGqhjKSv10IXOikCiQQ3sIwBrSulNspddpzPZz5+euZ0unC5FL777ltC32GNEGT6baTqG6zdsK6ax8+F3a7n6fFEjgkFOLuy3U2yeTSaLgT8rNhsB3LJQqYKlpQXgZ2kTGqZLCV7klLiT2NhHmec7ylZ8oa0KrxkfC1xJuaVm9t7TtdnrClY5wnecx0T10nIYeUllDALqGNZMqfzQlEXnLH4ZFmjZDV9+PhBvDlTphTJu5mnlXkW7Kj3jnXJdEGkKuu6ourKZmfEGJ8HqIquW6n6CrmgCZgUBN7hK9odsLZjszU8Hz9zfH6i7zd0YcP5PHJzuOX9+0dqXdnsLIor4zRSiSxL5PlpYR4T43RBW4WxmmVeqZ8+s9t39L2g1nPWeB9wTolsbZVD/e5ww9AfcKZD9QpTeuJSUcqz3x94uPkhHx6v+D4TFzhfZ5alMo0LzimCN5Ss+f7xUbwu2mIHz/kpCvnIOfCWab7wcLjlcjox1I7Qabzx5PyMMWBchjgTOstm05Fi4vHxiYsfWebE5XxCMq8CzltSXjlfTpSiCG5PKYmu61hmCW3dbLZYozjYGymAAmw3Pd+9f+T5eKFWoQ9uhp1ciMYQgoQt5/SE0VK4OgdKJYbBEJzIQ+OaMUbjnGl+gipbpSzPVkqZFCMlR6HAKbk0Ulx4wfwKHAi0tm3jpMWUXiq5UbZeZNASxtmCMduQKxtFyUpkO1nMycaIN1Px4h+zlBJkK6XE5K4UlKhEbtF8dNZqNn2PtdJ0oSuboQdlWWIk5hXj24R0zizLyjJHslLUUolxle8N8UF5tcXpjk7v2dYfCTDBNIFcBuuEyNcFz2634+HNO7aHLUuDJl2XiW9+8w3jdGW327Hb75omX1QJVPlzc87sdweG/iWrqMrUnIiAkRLzPDIMB5zrX+UtWslkNsYkXiqtOOzf4JxM4cWDoFFasY4LOa5orRiGPc6FFgP8AlOolJLJUc4urTwhbNqdUFj8xDcPvyD4wGf7HU+bj1y3Z/7H/+Kf/L1c8f/Wr3fAf/j3/8c+bS58e/co2UUFksnM68LxcsHNmmUu/Pqb31BK5vZwz/2bG4a+pwsD1soZ4pzB2We0coTQYx08fjpy/2bPdrOl1i1We7a7jpvbgTWtLGmlZimKQ2/Z7j1LKmhb2N8M5OxJObLtetYYMS18FirDMPDwTlDy67owLaqF/FaMcXRhYLvdYGxi2BhylpwraxU+9C0nSciRp3N5bYxD6KDseX66YKxmu+158+YGSHTdBo3UHaEvbHeGEGyD8RT6XkgMy5qa/9jI1rtqckOhX55GlkXIu94bkfdmkQMJkbmw6koXHEY1FHmVBkbrQggWMMxjosSKDbp5fCxDPxCXJNtFp+hcRwqR6/UqFD/l8NoxpgtOW5xyeCuDnkqjzlaNN5o1LXRBPIlaKWR5o/E6oLwFCqlUtE74YKBBQkwnEJWcdZMoVrQuxDxREK+Wt57ttmddR3ItgKUU8RKJ/0iJl8sIkVXrl2bCNO+QAizWOUIwlKTb5k6C7Km2BSADVPRUiFEWG6UkSAWjDUZZAQZVsNrgmqz1dWiB2FmANrTRKF3wWWR1skWXLIF1iUi8iEBXop5wXrLZShEfaQge71rzlTNKObrQC4VXS12bk26/h3AZruNMqQK9UIiUXFvxRMpdlCXOSSuMESJkXGeRygaBydAGkKks+AawyjUSU8HZ0N7P2qjYSbxeSISuhMT/7a/fi+aqlMrn92e87wlhYBpPKLNyc7vBaJjGGWcOGLtnjDPzNHI+XtDVsN8NdL7HKE9JYP0zXfCUApfLyLquYrr3L1IUCZE1RlOLIUZFXCdSrIzjSFwzNRvmZURhyVmQmMEGahLylPeevm8PlleM84U1LbIKHrWY8VRLRc+ReZ5fJTrRZZZlYhh6jAWUGBWt0YzjuREGLd55rheZwGktX6tNIfgN11PCakPnPVpLkGrXG7yHxMgwGLTL2DUzzQvff39ku91TlWTzpHUlxQlr5eAPTrHb9Kh6iyqamiKpzCgM05QZ54x3lbQt7DrNdL7y/DRJZgRwOZ+5f3NAaZleh+6OzncoVVlT4nI9MV0mrueC8wXvHeNVzKmX6yeMUdwcDqj9hl/87APWV4yDeZz59lcj1+uFZZlZ1pVf//qZvu/Jny+NFgZd53l8f8H5TC2W6yXz2X1kXS+oqjDa4QZ4PB2FqGMlyHaZE2/e3tH1Hh88XdcRl4wNE4mVOXmGMHBcPrNhj1GaNVmqiYCgR3OsOLcBtfB8ehLKju+wZsOn9xc+mQ8495nOHdhtb/nLv/wZSzyTyyzPW854tyNnTYyZbpjYbHbUeqbkSMmV6Xoi5ZGcFLV4Pn6auV4+i5xSCVnIOug7GKcj43RmHCOb4UBlYeju2PR7tH/Eupnb3QMaS5wXjBrZ3mw5HT8yzxKKfLk8i368PFGrIUXF3d3MOIrX5uOnLLr/vKC1JyfF9ToRuua1WWWLG7oD1/EqwYYpMeor59PIvpmWUyqcT1c2+4o/3bBcI4on0CfWOHM5ynbgw/e/5c+e/zXaLtzfPeD9lu8/PLHMV07PI0Y7uq4jhIn3H3/NZrhjM+wZBkeMchn81a8+0v2rQOgM/89P/3dM9Ww2G7rg+PDdL/n09Jndfo8PQQzoQ6BqxbxU1mVhXc/c3NwRVxmelCxxAKfjGZSDWriOT20LMQnpyBhyvvL+wxNPx8AwbBiGDeP8nsfnj8TUclvaBudyGbHW432g7waeL6OYnRfBCeeUONzu6bsNWFjmRFoXbPTkLL6eeZq5vb0lBEOgkHJkiQu5FjYNK22NYTMMZHKDVQgmXgKck0jMlMiZzV+7VFvihUROtFwUlXs0kcKKMRXnQCuHKuoV9VtzBivymVrN7+QeztK7jpcsM2MM07JyHTMxXVnTMyUaNpu+eQs74mTF+9Vw8bmREXNNkkGnO2lYS+U+/vu44lEqY215ld68SLvv797gjASn9psNtgt8ePosZuXOUag8PT4SvGG73eB8YFoli0U22pItmFKi696h9SD+WipVSVNcS+F6vfDx40f+8A9/gjG+NaixbSUtz9ORGGectex2d0KZbBRHbR2lwjxKMHbKiTkGDohMSnztGm0VJWXm8UycZ/quF6AOQFX86t1f8t//N//137h3xTv4+/+qf+2ff9+vP/vTn/Jnf/rTv/Hfvvtfn/m///xbfvDlG/pOof2G6Try7fePnKeFw2HH4Huoil//5ju++uodD2+/5vn0xPP5ibvbB1xXeP/h1xjd4c2AddI8GCcDj2keGS8y2Z+nyHgtPLz9IZfxA9Z0WLPHmh5rVvq+w1kBPMS10PcDb9/eM81XPj9+op4XNv2tbJGNbMOOx0c22wGlC/N85Xo9cX//FlRqJELD4+MTDw/3jNOFnBJaG/aHO+LqKLXSdVseHn7ENDmMEyDLMh+ZV82XX91DCUxT5Hw6s9vds8SZNV5RunK4uWXY3CDhsQFtHKqM/Pa377m9vQMUnz8/Mew6ttst67pyvV4JneQsPvzwB6ScuF7PdD2M0xM5r+LjHgyfPh7xrme33+Gc4/npicvxxH63p+97hq4jF/FNqVJxVuO9pbM7tDZ41+F9h/eey3Qm5QWlLNZ1KCugE91gI1aLBcUoJ/EvXQ9kno8fyXWS2mWO3BzeCJQoZ5Sp9BvFtEbmOVFzxmiDCz1936F0ImfZwqVVEeOCMUEIvM5LxluVbYsC5mlhmSMxR5S2aAwZxTzPokowmtO80HeBcZ4lt1CB0T01z8TykjlnmKeVWhc0jmAGht6/Bli/ZLVqrUnzWew51uGto5oMTkAWWut2XtVXqM7L64X4vK4r3sn2cbPZIqTIiisFYwUsl3Mi5cgwbFE1i1Ks+faW1XM6ntluNnTdhmUZmZcrSjnmWQZvClFaqJpkSGnhcj0zDG/ZDBKEfXweKXWSLZ2uKB1JeeVw845liWgUm20HSnF6Fs+i1oah3/B3vX4vmitrFXc3Vnj66RmjVo5PF44fjzjbs+kPuI1jMZF+04t8aHOk5JkcF6oG4zU3dwO1XilVo7SlHwbevLmTKV+GZclczgs5Cn4zxcg0Xfj8+J5lzYReVqhxVjw9fWbb7/HeYbViXc7sth3GClJUKwkuFoxxj6sysdgGx+l0wntPCB05J8ybA7UK8vx6Hek6z34biGUhpiQNSXD0wy3Q8h2mLASeF5wrlpJXLhdBqxujGYZezMnnQhc8w3bgcLvh+/cnPj59oJrCdjtQSTw+f5TtmgJjDAotKOEQ6ILGBRivimIgqUKsCU2gVE1OmmXOPD8d6dxVpuBNhmiaqfO3jyfgW4wRucS6CHYaJVuEJWdU1hi7Yu2VnGFZCrlMrSh7ZJ1+ifcebSQ4OaaRdbkgORoGVQ3e94xTlCbJQRccy1x5+s0zlc/NnG/k782KtCaXibScuZxbMYtgNNecuPl8odSVXDLODdxv7tEm43uDDYbTx19xXmY6Ld6/gmazSRR+RqlGZJbLSs2uGeXBeUf9P79HO4j5DBS86alFMY8ruc6UppmvqhDTE1opjFXUc0KXZzZDIHgrU5m6ghGQwLxIo2UdLFcNKeBtx93bjuPTRyFaabA+8/j8LddrpO+OkqszFa7jld0woqojLRXXXzjs3zJOF9Z1brEGjq4b8L5gbCKXlfcfpKk1FhSJ+y87Dje3pJiY58R0dZSiycmI36VNCZc58v67I6iItRBXGE9J9ONVJva1vGG6TKQoRaYxso0bxwVnLbc3e96+2RCj48P3n1jmD3i/IZUT/SY0GmNmnM/cDnecr2fG8wlne0o2Iku9Tnz8/jPrkjiPJ9FjV0XNiGfhtuf5eqJcJJess3sMPeP10ohSHh+WRkYSWfDp/ISxsMYFyckxpJQYp8xuu6PrpHG4Xq88PT8SvGPoBq6TNEvWtbyVmvirX/0U7yUAtrSiO6dCTKX55TTzLL4VybxSMkluA41aBGNtraNsA5drFDqcddwc7hiGDc4bkfyFgNWa63hhnieoEe961mUhhJ5aEjHOLKv8XIiQQuIeakW1jBPnFNgKOGoVM3MpiUrCBYOv7XurlVivLIv4s0ChNVAKozlRitAIJdhWQpON1gRl0MFTpsi0Hpn1WfyPvqBa3gzZUqOCXElR5JQozbv4H2F1L17OUrgcJ6iZm5sbHh7e8ebNO37xy1/yf/35v+RHP/4jvvrqDwha0Q9bSi7oqtomrPAP/+G/y/7mALptxLx4GtJaWOaFy/XK/vBW8M2mma7blmC+Xslrwruem5t7KrIVyDFCrWx94Hq58OnzE8t85osvO1KWP5daSTm10HFptqp22G4nEuQ1yrlhNNoYTpcLnYfedvSbHYVKyoX/6n/57/gv/7f/VvjhxfCbX/w5JU9stlvefv2PmKdLk2m1SJEEv/j5vyLliWGz5Yc//PfIRTaspQjBNNXIN7/4OSVdsN4RdvcYO5CTfK+ojFKW8+NnDMeWORuo7gZTIwnJ+RtCYPAd33/4Bh8q3lemqUlWlaOowqwu/Ip/Tq6ZUiZKzqjkePfwwOenj9JkeMfCR3p/Q5zFW+i9IaWZ3e7AdZTwcKrC1ExcEsY6nNMUJrTpeHp+pqLo+y3WBVI8U6pCG4vzPVWJ7zVfMx/Se0pd8UGxrAmjLaEfeD5duU5LU2dYvHM8//TXaOUpWWSJnz6/58svdzw9f8bomZu9Io4z07QAomSwrhOYDHC9zHz32898/11gd5Dsu5yuzPORUhLbIQhptIJ3np89/grQKAOFzOl0YjecqMWzLCvjcuZwM7DfvaGUlVxm0I6f//IjoXNCJp4XvA9cr58FpuAMIcDT8QMfvv9ExbN5Wpnmns/PP6PrB+ZJBqG77Z55lVy9WmSIHYYNHz4+UYrDh4FchYA7zVeUFjLqcsko0/PthxMAzgZcZ5liIgTPTe+xF8c6zyhd6WyHd55h4wnXQD/0TSIauFyfmEfLuq5QK19/fc/x9Mh2uyV41zYtQk51SgiptRZ+8cuf8fDwVqS8aQWduX+4pVBwvsO7nqfnI+GrLfOyEpdMSrDMZ/pNDyozzWfSmtnvOnIp5N4QU+Y6HpnnBEg8TCXy2998x7t395iuI6VCXAvj6URKK8ZqofTVlS/f7ZmnmXk6Ml0rX3zxlvPpwjIXtLI42/HjH3/N6fKJ4+nE+fyE3WyhRJZ5xlnHfr8HVm72G3K2LKtA3vaHDcbIwGpdIttdT86JbX+DVpbz+UjoNLWaV/hEKQXnt6/Df+ekNpKcKBnYQWGZr+xvDtRaJELHCobdeUvonNwthVYDS4D0GoW74EJAZ4XJhv1hw2HXA4XQeW72O6xTnM+nljmpCf6deJ9rZVkWliXS9wMl60ZLtDgH33zzGwlBF7sYVMeynl+XKcs6czqdCJ1hnI7knPGdWAqM7bjZ7dluNhjz12Xi/4a+5v+3Dunf4tV1lp/88U5CFJOiRMf1/FZ8IsYTgieuC1XrluCs2fUDaEtNHSVZSlZoFanVSzaILvS95f5uw3azlaLqMlHSxOXyCeM6KV7yxGbredhuQTcCVO7ohoLBtQRtg9KWm5sblmVmXSLLkrk+X/G2IwSDdYKNVlUmEqVk0c3mzGYr4bXDEDgcnMgFjcEWRyiWVBxrWkVdqArKVIbtQMnra2o2clVitML3Yj7XJmKsQisJZ0tMXJZMyhXX9WgL1lusCzAlKs0Eb8CZSsqV6zwzx4KeKinJBZKJ5BoJdaSqinUB5wMpGXKdQCdSkoDctEQ2QQMyYV1zZomm6ZY1VRWWdKYqS01FfBpGptRaOXKBNSbGecZQiUU+xBUJWjXIRicXaUJTKSgthXkuRuReNmE7RUoS3rquC7F4lGpZETWT8krnezG0pkJcItoUTk8nCqppvS+cnld0zWijME5IXHNW6CLkHWUVj0dDzZJ1JqGHWYogJcRI1dLfx/NMNYIMvaYRbRTBbnAqUGmxAXph8E3WkzNUh7FCp1lXUDqh9EfSDDlrclaUJO91fZls5cj5vDBsNCiPNh3Wa6wVug/VInkb0HUBpSvUiHKZNVZOl6tsFNgQY2VZpPkrGJxSYohWRRLbY2FeEsYUPn+SUO1SYZ5mfNCk6FjmyLquxKwoRQh4L8hqsjQJ2swYV+mHgU+fPje9NaJfr4GqegxgMNRsWSaDsQFDhhJlYxNEg78uhWUurOvE6goxV3KGcb6Kzwn52deYuU4jWoO3iLy36pbZo1H1BaCjKWkmxYk1TeSG330+vqfvN5KjUhLjOOOCeW0YUCKvE4mF4nKZGaeZuKzUalgKpDjLUMKo1pQJIGC334uPqhZqiVzHK95JiLptpmVtNN6LxFipgrGL5M8pR4rNEL3A07m2kE3aYOMKjxpjRMoihafnchbwjnOScSVeIQ1UclHEFUynm8yjvBb8zkrRKGhc8S2uaxSva9GkLIMnpWky7oKp/lVKI36ilyxCQCuU1uL7KQpFFJlzNlSt0F5Rm6ldG0U1WWBEVIyTQEjw+PUtNsvk2RFIaWFdJWvq9v6eh4e3+BDw3jPHhZ//1U+JKdJ3shlb0/pKLi25ENeINprd/lYGPrVKsHSxWKuJ6cI0X1nXhW7rRL5SijSZVHStTNMo+VzDjtAHGSA2uYyzlpxS23wNbIeBrhuYphFr5OaPccWHwDSemaapRWzsAUEpU+urx2meJrwRSITvh5ZPo3HZoUpAO8t4mXjjDthwoPcH6snS5QEbnQRE68p8XdinHdpsGcyObh5ITaYojbyExusjbMKOzm9w9Zb1CqVqqBahwSqGpeews8RUOE+aEncYIhYwxuKiJzGjnhe67YGDveVh/yXn8/ev0vqhBHr9j3l0/5xpTRi94XDzwGADS/yELgveKAZ9x3pc6CtYDXZV5OKwtdAnTahePg/TzMaLr62ssK6Fqgx7BlKJ5PNMonlFVKWoyqIUh5t7ypJY80Spkes4crkUYl2ax1LIbJ1z0lRUBVpgBNo4gcOkyDyPXMa35LiiKHz8+CSy3CR2Aq0Nu/0WY+UzkXNiXkbW5Yk1Hhq4Rr63lCoxLaR1pZZK33sKhXVV5JJJJbEskNMJa3tiTEzzgh0tz8dv5BwxFWthHBfKsTYkuOW6rkxTaVtljTIzl+uZZZ0p1XAeJ07XEz4kjteP4o+vA+Mk0jOU5DvlpPj2u0fGaSLGAhi6PmB94XI9CwJcUrKFzvfX/HIhOJSm5QVKHpIt8Hw6k1NhWRY2W/EfjtPM+VTpOyHyXc4if7NGMy+J7WaHqlWG01pzsz/wOSb63je6c+ZHP/kJm03PvFxZlgnXGcn2wkhEhfY8PBzEi4bC29q835bdbtvyDjUlyblp3Y5pHjmfz7x92PH0dMSaQeRuZqbEzFdf/YCUEufzhaenM8FXum7fMpWg6wxfPhw4ngwpduwOB5wv7Pa7RoTUeL/B2oQNga7v2GwzfR/Y9PfS6CgIXYdSEarhclX4xfDVVzeEbkPKK8sysqxX3t7fMU+Rw+EWax3Ho8SKXC/yM7+EKZdSWNaVF3DbvCTmOTb7jTACQnfTvH8iHQSNc0kaqFrpQmC/v+Hz5yeUKRhvCE7y9fpOcuoUlv1eNmhaaTSadU3kbOj9ljVexTNfRBZqjMJ7qTX73rPb3qLwpLgwL0989fU7UAJqUihC56nlHeMoyHulb7g93JBLxRpFypHgA5fLBV0tQ+fYbETy+ne9fi+aK600d7cSHlkLjOfKbgioltOgdeVykYNCaphWpOsAtidF4eYLXck0cl4rnlRpRreMNZXdNghVTa/gKl6BMYFh46nte/F2wIZEieB9wHuLMQu3tzvWNbDMkXFciWnBOku/6QmdXNrbYeDp6TPjNJKipJ0Pg2CpaxX5z7IsaDS5WEqGlC1mbXQondvP5lDaUbL4gUpOoDzOB6yVMMOCYCyFGqaoqrDmSK68onxjkqaAFsonocaVgqE0gktNqXm2sgS2qSxhpykJorsIEQkg15VclPzKhVxWlqixhib7KeQs5nelxKNBM/JXVQFNKYLg9F4oYGQla26nSFE+JEpptEXkLlQJe1ZadMiNBBJzZWpofNEsCYULrUhZTP9KlaaXlsnrS1q7sfI1KYsMqqIpubDUiG55Q3qpdFZhjBdwRy6UmjFZiR0CMfsXKqVmnLUoLYHLQhgpUMSQKg+3NIkvJEmlLHkFZZshvWhKzhhvyYkmeUoSspskUFcpC21CZOzvJFa5FooCSkUbSxc2BA/B6oa21hiUXFQpvyJLSzLtwlNtyyC/ljVRVZUGqSaCd6S4Ms+RcUwYLc+GNYtokGNksw1M48yyREmPd+DcgNayZUiptu2EXEjeviBoZROJki1tqUVol1ne13qZmJeK9ZlpXFgXMcB2Ws6CmCKx0fJiiqDs6zbHeXmvc8niB7IarRvlSakmy2sHUdWvct6UZ5QuWK/R1YDOtMxaIX5W+YzIwFie90ptBaptnqckF7R1zZvZ8lW0gAe00a9+plxWbANP1CJkIhfkgvYvGydvMVbyW6qM+zDoBtFJ4ikCYpqpVSQcOSvqJKGrWjtUCwB1tqPkjHNWZI1qJkXxSCllQZm2gZEwUlk9SFMgBZOYhlEFXZBg41JF5qN180P8jsKl+R16Xs4sxCP0giY2mqgTAu2xss2KgkVQ1rSzSyanShsZOlVBefTcY7UhuAec3re/SgmY1NrifMfNzR3b/R6aUXmcJz5+/sRuu6Xvepy1zOuEVlY+Izk3b4Ki63s5/+RoEcpUe+bXdQGqZJbxEjba1vpVir+cMz4EnPMsMcn7pFpDGRM5JZz1IhV3lrTMLRdIGt1aMvM8SxC0DU2JIaGd8sNWapbzJvRbvNMoM7z+XbRKVT7jJTNsBqw2WNMxN+BLLhkymApxkYwuH7yAM+TSeL0/igJWCMHKhr0fqMoRTUZn8daI3zbTdZbNxjAtmcssRXNtXjNqFW9ZWjjsHF1ncdY3WXRkiROUjK6VFAeC/yMWPaFcoT8oLktkt90KPdVYCobFJ7rgWoFbydnjWxAvqhJzwhuHMbbBmCo1eypVpLIlsy4ZbRxrsgJKyS1TB4VBY7XlZeQp1DLHmoSOaZxmjqsoTQrEPLEUh3MDikzNkVwTT89H8WCSqDWSk/zsMc3UFtgefNc22SIJVlrkrssyU0rGWvGdL3MhrkIrlbPViz8prcQ4U9EsOVKNIiuBWa0pEnN6zQBKsaCsI81CWkN4augivmLVspGv0wxE0PKMj3MlV0PMV7T2WK2ZlqltWrYpakcAACAASURBVKM859VwvD6KlLaBvs6Toe898zwJgVJJKHItAuSoVRHXyBItyoivsQDBdThVyEUa0XVNopBBKHRGT82fBGusGC2gHD0pglNYQ6sBDKjA5TqxRBmMxjVidC9nUUqsKYmiIgkIxuqK1blJIOeG59av23ylHM5JjbeqgqJgnKFTHdog9o2u4yXgOFfL/ZvC4XYDKDa7gd1+S04Z7zuMsa/ZmMPOgO4Aw+Fwy7R+5HAjdMe4FpwXWWiKhWGjubvfYUzPbntHqZGcV1JJbDd7SnJsx1Fy0oYtaEdOms3GAD2H3Z5pWlvTadH6BlQihCC+LFVEmeV6Ylygaox2nK9HruMLj0CGZ0ZLlitIg7wuC8475kWOrr7b8PbNLSXPvNBRS1G4UFFqkW2Z7vDOUWpqmXwCGjFaiyXIaGISqiNakUuU2lZFpnWkyzuJbiqVWOLrALGqVr+V3/m2jDWE4MViMUbqfieNafDN192yNzvZTP9dr9+L5qoWRa/v8UG8C+vlidAPvBBASo1Y51kXIfYpnVimBWd2raCvYMR/UrOssI22VCzTskhjkxUaw/6wawHCmVwzGYtCimytBXLRDx7tttQM3klzpfD0Q2C721JKZZ4WtJUJ5f7mlhA6Sq58/YMf4jvD+fLMui5t6uy4XkdekKDqWgnWkpJqxvSK7xo1RhWqEsPc0G2bdGohJiUSyc2GimTY5FopJb5SwABiqeSaBHuaCnmOGJPRpsmLkOJtTQ7ji4QdtkKwMqOVBKmVmliWgvOKnK7NONqhtEwqtLIoZXCqFecltaLJUqvglrUVYmMwG9ZZ9OQSjqmFhFaF5lKNJRcLSnLMVH1JN6+kmkT2qSzahOYLWSXArlQu50kw0DXIFqBN4tdZ1txaZ6yWonpsn2hrLaH3pKRx1rXnrFKKxWiHlOMVrSvWFZwTes4aC0tc6Dr5GjFkVgrgW4BsKYUUMylnOYyybDWC7yl1ZVomIVYpTXCB+dreT63QRqhglg359dlPLbneiHHXWGiAldBZISvmgvGeeU2UGOmKRu97nA+UtGKMAmfpvaVUGC9XSlpbH6rIRXyCgsTXAmpIIv+pKOZZAlDneWGeRV6j9cp2u5HLsRQJu5wUz89X4ip4V2M0230hhLYxRYsMJxeCHQjWYbThzZsdKa4CYEiZUmeuy9iaBiiXCeoF1ELO4nkZuoFUPJ0v7cCPWG+ZL06aDiNEqj6IKbUUASn0/SANg3EtTDWhXSv6WtNeqyJXTeicBHOXTIwL3vWUKnTJUhImNDNxMS24XHDKOdcmkZMQw5vbTTMb/66ROp0Whq6Xgtpovnv/LW/u37w+iz4EIU6mjPEi5ZNEeUVKjUZYEp2zzAtoJUbw4IX2R8myPSiFnC7UslCURRVpVFKObDc7jK0oNbHmE2ssaF3RpsfonTSPxUqzVcQba7RijSvLukjBDWhdqOQm7xGJClqAGCgpfKsqZFKTsYnUylbJYimlCIShVoop0mRWR9ULZU2vl5hS4o1wJogsLmdiqoTxR3ROGjdlZUtqjcIPO3aHA9vtHq0tz8eTyGZLZhpH5nHmD77+mtCF1gsVilEYpUllJq4zikrwsgWXIkGCUSmWdY3EmLHO0YceRWtkVKVmharl1Sc19APGOOos6ghalkpcVwly1hrrQmtulYByEDN5bBIXbTu6fotRirgsr8HVaAkqNkZh+zvJwysVrzwaaTQrQsvTgOvke81FspGqEik6uULWrMtCrhF0L1lRSjXzt0XpDGWl5sTNfstm6NA6MCfxQpQq0jZqpZSFzcbTd7Vl1xQ5z1+GbLVS6kJA8/UP3hCzZs2ZUnqG4QHPBaszjsz5+MSg/zH4QnYfuNi/4NtPC2/uv8BWRV0zaxq5uTmwvRlIOXM9T5QsJvjNzlNV5Hh+ZnO34Xqusik20FmL0lGegyrDqP7/Y+5NdmzJziy9b/dmdhp3v01EMCPJJJOZkKoSEFCAAD2D9AAa1ztooHoETWskQEPNBAgCBA0FvYFU0xKEKmUmmSQj4jbenHPMbPca/NudNRGlIS9A8E6ux3E7Ztv+Zq1vLYrLbnh+2dn3gZRuCYtB6Vk2Uj5wPCvcJHCFFBtdVT59+iTESwUlbmwxobTHaAl7Pd994Pp4wTiNGRCsy8vK4aBRVpqSy6VQF6EiisfFcDqeZCikd3IWkq0xht6E7NeVyLIbok7pXeJYnB8IdCNnp0VTyRzPCy44eof1tgtN113IZacQMd5hlJa4kY6c8d5KketEpWFUIMaNVCtdbSgScZdQ2tYj3mrmMHPdHtFa/J7aaHJTtGulU+Xn2PG7qoKxQvPMOUoIbqrD0mCpOlD7iiryjLWmyVXyjIzx0JGsKhw+GCmL2hjalcjpHMhDyv50zXS103KmlkxrGdrM+eE4CvbM48tX5vCAGyHQWiliuuG9HkMXQ4oSa1DrlXmZMGZ8nycNO0whcDyfiXvi/v4btngTmmmWwUTpleVw4O7dO4ILfPrp90K6sx4fJpzTvKyfOd5PWBPwbmKvmbv77ylZs28JrTPee7bNYK3jsJyoFbQ6Y3yilCvPL098+PgNtJlSVmJ64XpLpJLkvHQHlmmWus4KSbWUSvBHtNmZ53tS2chFguTvjvdoXaQ264F5quxHsXQ0Iilf0Mrj3Wncp5Xn56+EaSGXIwqDdwce7h/wtnK73dj3SCkK7wu3fZfhm7G0akjxAlNgng3LYcY7JVtAN5NLorWNrhqXNVF6JtWd9XIhFsX5fEKpzt4qt+uVKQSMzijKAMKs5CxBR1DwXuI8DssB74OEqd+dMAZKK5SqUCx/sq/5s2iuaHDoB0zRxJSY+sz1KbPGTNcQZkVcIe+eTEdpaXaUn1G6MQVZx0ugmcGZSXj53nI6vyeuUvxrUwmT5nC+pzYBZVwvtzFhGqSpDiV3tBbwRc4i//HB8NPnpzdqllKKaZkprVNrZ70lYspcXv7PEcgKGEOhEYwEpmll8N6z7htVG3LP7Dmx7QmMFHOtS+BdjOLJSXkdfg5LCAdKk6A3FHhv2fbObY3SZI1pTpgWWhPJEShyGbkNvBaRQ86zi6CkdtnuaQONRK0iZ5z8TOuV1g1gKcWw75l5nrBWCamGitWCvC61DjxpZ883Wpb0bpChd0ejlRDJnBOKzh43eoMQwsi3ceQkFCHvnKx7uwSW1rqNSQrs+wiw64pti8T9i0gAnGaaLYoJRio4aPZbHpkMBqqmZCXXQAu6VJZ/heATKVWcmZinE9Y64l7ZtxudyjI5lE7c1jZyJQzr5cbxbmLfdnKu9KqYpomXp2cpuFA0GtNsqVka+9Y6aS8EP7PddnIvGCdBhgJEkVyMnOW71gPx+rox0lqRYoLx8/fnXV6wXXO7Zb4+vdDrTMrlrWAb43Sck+ZXec/L8wuUDWOlOex0UpaDuo/BRZgmUim0bgjTwvGomWaD0pXj8cjhMDOFiR9//ESYT+QkBYMxhpfLFy7XR4IPLPOZfd3Yt8jlYrDGsRxki8SYiAlFzlCzJL1Pk+H4EER2VmHfQWGYpwO3y4WSOt5rwuQle+c8s69D+uctz8/PtNYG3tvIxshKjAFKNpvbtr3J+bSGTsV5w+12wQeJZvB+EqlY3tBaGoRSOsYrUIVWBV9sjEhqjPe0Zqjbjg+WRB/yGoVTEymlt5yUWuU6tt6Gj9ERgkfZV09jFelDrUN6J1IdrTWpiQTPWIXTIk07n4+UIjS9bduJe+N0emAvK603nPXMk+dyfRwaesW6Xbi7uxNpUrrS6wXnHOv160DqSjGVyop2dgxJhHwKUFsHJduQ88ljnaVn2Yy3UthTZENkh71BjDItvru7A2Dfd7T2LEHTcxkyWeF1tiQByb11mvUc0n+KUjCNbRZ0bunCFgvGBj68/wjGEGNiypVcKpfLM8ZZgp/Yt42Xy5V5nvnFL/6KME3knAVAUgrWelJKXG83AKZZzsGS89sWqtbCvm3klDksMhkVhPXrTkO2htu2DUn4In8feGKFwEP2uA+j/vR27zhnZWAIKK15fn4mpYTzfjzLkHPCdIexEkWigU+fPvPpp8bpdOLDN99S+0Zvnq7k5zijeX56AhLL4SCDOi0BmzmPTZiCdbtyuWxM0x3eTWNjp8EIiUzRSPsLMe0Y4wiTxgVPjNKMKiW5f6qPzXOTZ7nUipkcDNRxa9BrY7GWkhUxNdYYWfdPHJcTYfqW3iK39EyuFWqTbMv9AfPDv+BXzDj3f1HVC801Ph4mlAlo3lF143h3ZV4ica8oLbQ/HQ588/EXvIQrdWD8/XRiOZz54aff8HJ9AlVQ5j3fffhLzlPkcnvitv+IVo2DX1BYjLZMYULbT+zPKzTLZBa2LeOTRZfG4ej561/9BT/+8IUpgNILPix8/xcfuTz8xOQDGkdO8OX4By7XJ+7vvmUKR2Jch/zK8PAw8ctfvEMxU/rKd999S0qVp687eRBqHx7OTGHidovE1MgVzLIw91lAU00ImXQNk+J6e2Q6GCSMV5qcFLtscTQ473E2EGMhFvEtBddRphKcIe6RGCPBd+ZlpkUZfNE75/OJ2+0mG6IOpSju7t5xeY6UBra/vv81ysggTMJlFe/ef+T5+UIpBT9ZvBI7hGw1MpoVqqhJysiden7eBiRBMOq9a0LQpHKh147GM/kzWllutxe0kcGkoO2d3FtKYcNC752ny1e2XZ7Vu7szMV14fs5vsn+GUuV0Eqx6jDvTNPG73/8WZ2Urer1E/uL771hvN1HpOMsU7onxD6R6oZOxNnB//pZ/+O0/4pzjsByYg6fWMTAeWzHjYV681E22413jED6SNo9zhruzeH9SykxWfP7XlxfQG0/P/4D3E9M0c1gmfvubf8cyfSC4A7QTZX/i/u4jra/Ulli3K70G5sVxuxVyTpzvFPvWUWSMgdlJQ/7lp2fu72Ziu/By+Q1TcHx4+JZ1u5FL5jCf+fzpEd3AL2em+UAwM+HOE6OoKUppFJV49+5nTPOFnFeMq5Ss+ahk2/waiL0sJ5G1bpGSXjgcPEpt41ytLIdAq01gbAMoFCZHTC98/vpM8AuH5R3mZMh7H0AhGVrUAqpHeoN9U6DfcbgzCCHQgD7z/fd/y+Pzj6hcmYzj/v35T7Y1fxbNlVKG3/3+mU4TbOk0MS/Q+ooxhrvzHRe9kmaZkoNkp8iWwqAHxcoGz751mbJW8Yg8//7fs8yOZfFo59hSJ2WF8w7jFfPR8vnTl5EtIz+nRtEzX2+Zw3LkcDqgDdiguVwu7CNEc55nlLI8Pl1HcnxnCjMx7yL/0uC9p/UrcY8DCTrTjWZLg7PvRWaQc2GNcUiMRDS+l41GRRtN14ovL18lGdxpOo3H54ixni1men2dVBd09rSexcTnAtpoeel3KepLbijVZOqpZNKqtBRr2oisTFeDVpacK2ESs3PcE4fThFGCfU5ZMK1aN6wPGOtQxlJ6ksl1KUN+JkVlinH4ysxbc6X7kG3lQkob1gY6ldobl5vgptWQFoKQWkrsQs7rja7le7BmklV0aay3Imvmhky8TEO1LlkebhLvjpU0+fP5nppF+uGchDxao6it8HJ5ohawRpCsjc66bgwitWBiexPIQI/kvEGXa95aIQSRXBgjhKPbdqV3Rmq8IaWMQlbO4NHGEoKXIr6kQdEJYkxukgfSKKguWTXWTNAVNYsJ09lZNqVaXqStJsnZ6WKkbuMa1pKxeKz12MXRYx8vDYgxYo1ijVGGCMaMTe2OMyJPU8qQMkCn1UjJCnVnMcYwzdJc19SxvrNM38r2xRq8t/Ta8WbGO4vRhtKAWglheYswKKUwBcU0HYavRDaDbppZwgjLbYrz4SwZF0qm7jVVrGY8lyKHPZ/PaP0DpZQh89JsW5LCZkjyXulFouuXLL1O53x/9/o0ooBrvMrv3GSzpdEi8dJNyJ/aEFPEGpmeKiXI15fbC5OTafq2J3rjrRhPOZHSDlqzp/Q2vGlIfEF79f9ZofeVXIUC1xqX64XT3YnlFN4CKHur/PD5Qi3jg6PounHZvuLDQrAzCsuPn37CWjXoUyLJ6uwYZwdsAqYgxCfdXnNDxNdIkReXInFbnzHWkpLkB4Uw83yJXK+30ZRKDIHXnst1ZV4EA308en744SdispKt5Y283FfZwBitx/kEp/3XWCVwCqpk5YisU7K7ckzMy8yvvv+Ou4d3GGv5n/+n/5H/+D/6O053D7IV8kI460pJpk5M/PwXPxcfgtZDMizvo7eNUk4cjyeMlliEWitai9wtp8ge5aU+z2cZIulxzbuEta/XlVorzjmWZZHmqxTaICNqrXl8fKR1keCFEASdP7aerwGo+y7ofOesTFylGhW5VqtQ5JqkPTEFj3eBKQRi2uXdMd6zvcPnLz8yhRmtJ5wXKSk2yLXuckbuccXZhSmcZOOObJ5brbItU46439i2jRDuQHu0Ucze0JwWSXMUWXncI4+tErNMiHqvKGRL33unZmj9wvMl0TjQ1QltNblUyrrjvMWG98Rr5XS0aHZ6zSx+omJg++e4d18wxy/E9UXkomoDXdGuMbk7/vpnPwe9jm2keDIDesjBYcuJfbtSksQToArX/SceXz7T3+I2NN6cWeNOmCLznFAtYVrgMINCY43hb3/9K3789I84Nw/vZeaX/+JvCBOkJJuW9x8nPn3euT/9DGcXWqv8st3z8vKIYmKaFu7uZ9IO/8v8v0ugdtAYkzlPR7EAUFhOYJ1imu55fHriy9Mzxs6c333k+XohbRuxJGpJxHUb0mOh63YTeXwCmqK1Qi4b2b6Qc6C2KgG4rjNNB5wdUjpjCThivrAsQuGrtXO9XgnTRKWQS4EGwQbxz6pO7wIXU6wY80p7G1jueiHGjZILd/fvOJ0C1urx7pv59PkTtUrQOl1xOHygcaVVGfxZqwjhNKJzGliF0QFrDdsqGt6uEilfqRnxCnXIpVPrTdQfeuRlFcWWnmlUrDdMTmS6d/cLt5eC1oopaGIs7ElIpa0XlGsY33j4cEdJhZrh7u6OKSji9gqC8MR05XJdWY4e4wwxrnx9esH7hVorj89XHlvF6AytYawoFrpumK+G+7t3HA6Ky7OE96b9wjRrwiy+pHX9jNP3TH7Gec/zZePj+wO1eDQzrh04hQn/mjHV0pBrVy6XKyltQCf4ystTQRmYZkNJmeMysW1XrNEEb8k5Mz9oWt9wXvOzv/gOqw/ETXFcZqHGmgotDIS8nO9bLGwvF87nO5TR3B6fuaw/YTjincEFMKFh7JkaE8vBcThM7HsnxcT0IAqlGDPeO67XK2pQajtiDzDaMB3PWKt5ebkwzQs5RyFx98LkA3dHJ3J9paFmvvvFd+Lpz5WcG71q5kNjj4lau0iP1Rc+3ClSctTqMMn9yb7mz6K5QkMqA/LgBZOe4oYP4j3oNaN7FsISYoT1Tot+OliclwnP4XBkWwuX5xdKzVin6CniZ4WbLEZVYqp0Hdky0CVkD7uhujQMtVXynvDeEXOmrTdSyWgrzccWN1KRBPg0Qt9SSeRaQGnWbaN2CctEKdZNGsKUC6YW8Q/1TiniM5JsGWloZEotXpFeG7lEjJYMG1THOkUpkTKkZnTJEpqCp1VDzhkQapkZMsHWKq0LuUmPqadcQ1GQ9y7bJW0ke0GM8w6nNaU2pikgBWVCGZEf1iayL/lRgsvurctUnUoqG3qE+SltcNpJAJs2NJr8fkOkqEYAXasNa6XgpsuATaFR3tFKHtlFhm3fpAExakgdlKSxKyOFeR2ehuEv6ohfwFhLR3xWsr+RDKUYhy5c8RZMaB1v16oPAEgsaWzxFL0YMHX4WqR0WbdNrsbY/uQiaGoQnX/ZCq1Jo9a7BH4658fmsIiavxt6029SIml6pbA3RqZYeUzijR0+OqTwDuZAzYPqhkh4IFJrf+sOtBJvmWiWgd7QdKpi6MbV20uv9+GB6iKXkn9baU1RqqKmjneWWDo5btxeIqUKEMBZj9OevDesU+I/qJLTVUun94QMCxVWa5QRf0KrApYQqo+CXgTM0ETznpNIF+UKF8HeWzH+t9rJsYMrAyoi20i5z+WlGqZpSE8Uxg0So5ICwFg7yJ0yaEgxvoFXJC8mSaHayxuZqzX5TrSR7+nVV6S1pWQhAvrgZBPcy6A4CgBDCkg5z8I0DUlsHYUTI6uqo60RWeKQSAQ/UQaRSRqFirFyvqQsWUnKGDRN/HhKo7zIY4yTrinlndoSx/ko8A4Kr8AT8QNJ5pRxHlWVQFWGJ7NECZlUavisumSgoAVCgdqpVKxr43vu0A0pfsEZBbUSVwnunUKk52dKc0LEI+Enj9XiLVTdsOSf49UBiWqXl7Sfzbj+ch5997OfcTzdsxyPoODz50+s68bpdBrSXNmeayVnaymFkjMhyECIsaEVzb34oFKU2IG78/ACv/mWZA8d4z42zIjnqMt31qqQaK2x3NYbfWxNvfekUsb9pt4apxSjFM/WjXgCCT5tTUr/rjo5iYfTOwncfX1W9RiIKcXwZFWcl0GOGrAWQeiLTC9uiWmynE9HplnCjIetC8Vr2HFmngLenyXzaHxObeWs6a1Tc8F7x4dvvsO5E8Y4lBYCa9wzdI1GoRVM80ROnVS6kPcGPZIuk2OqTK73nsU2a414DsniS65SIGxrhJ6ZZ43RhqYrkzfQD7SXTr0FPD+jTP+W2iNohe2eMC/cnm64YDBuwVsoWySoxhYjex7PdcuclgPLNJFa5McffyJYudd6q2xr5zAZdIN5FkM7SLadMlHuxW54uDuzzL+QM4IGunB3PGCshNGXCr0kJq/lWWkSEZDKinNQkoSVOn/ihz/8SC2FXBrbmkBpchHAwp4icd/w3pKLY98zMRbqdiMXzdenZ/a0U1oZ7zrZOrQG65bxZsYoRyuvagbLvGjYICaRQZcomz2txF99vd6YfWAKC84pacibwZgqg9TW6dWOQUUhJ3mHad24vESsMziLSJHpWC0erMMyE/zCcbnDBSXwGGTr8O7dHa02Uk60CtZI0LJShmmamKbAuiasFfBYrWKzkBw5j2RUVdm4mYWah4/UKJRJPF+e8F7kdqhKLAk3eboy5NpILyvBWowSL85yCOzxSYaFwUqYbZZ4GWcczTW0MpwOd2hdMcPf573ler3RVWU5Otmkdi2yWtOJKZGTkFL39ZmYy/DUi0eudXh8embbds6nOw4HT2uRXDrbapmnwL5F3DJAMdbhzDsmrylAbxLdYLXHO6BntMrUADm9EJzBavH4OasxOuAnacJul8xhvse7FboMu9088fz8Qkfhvce6zsv+jGEmp8aWGlo3lHHseX3L9bzumYM/cb2JYkzjca7Rc8GaiWA9qmeutxWrFfteyOVGzZpSZHOotaL3Ipm4LkjmFCMEXUvIr7eBeZ7Q3XE8Htn2G7V2nJl5uH9A60qOA7qzuKFckkH3FDS6gA0J0wBnOSwLzjdRE9lOKVIv/6k/fxbNldGK8+kgq9Gjp5ExpmGt6D9701g3w008LlqLyZtc8EEzL555nliWGR8ysJMSGK9oTMyLQ+tOyZlcEm7u5E1INvJGrfLCKyJdyK2iqqW0Tt4i25YwTiYJcY/0Ll9yaQqjBkxggBL2nMW3YO3Qzkv6eG2NVqB1KZK08cOEX8eXat9kWb23t5tFKc2rUdp7S4zpbfUtptyGs5am2yj0FM6JB0Q8JY1WO96/mu4bmI71avgoZKIp2u8uECIF2hhyzRjrZYpbqyA4q5jm1QCGqEHwqnVIC2nUmmH4r7QWqUvKUVDjI4G91ipbCQVCq5ACWHK+DAYNWkAZVXW0FmmcVh0J7BVpCdrLBMIYlOmo1N406rymvaPRRktqu9JDWtNBiW9DNp+K2juxiAjBWplENqWpHWLJEgSoJT9C/ow2rVVirgQv2yc1NnFyCHRqk/wokfUYQfJ2cEYPycNgn/Umq/JaZCqNoJStEAFE0jm8Xk7L1qQDyiisclIYv+FFpanQSia0Sin57L2gu0K1Dr1A6W9hpnT5O73LhBppriQHxIpcRgvUo9ZG1QJWaFVersZYOpXDJFveFEUupwYOO5dCyeIPqAgJyATRsedcx30ovjKjFLXIYdabZGfUltBaGhkBpyS0CoCcEbzmA7w2FaMo6DCKWo024GQV+tbgv71s6/haR7NZa0MbI589Z4x9LfYQVkmTzYnVHaPkeXTejKGFbEKsM2OAwhvptBYx977CTbTxAMOPVeWsqH0UGXK9Sxl5SmEaTX4dNLFMqZ6YMzEmmQp7wQy/PqfGaGqptAH2qa1gbMNaBaq/IaLJDKO8ZFAJUHxQMZUMTroqaKfHBZDBhvMO2h+hITnueBsouQ5/YafWDYUVEEZLWAuTU/QWoRUEvrHj5qOEtfeALkeW+l5yn3qH1tBWcOeqa7QPuOB49/EjIRywznK9Xvjpxx/QWnM6i1xP8rak2G8jWLmWwjR7rJXpYxsHn9GGuCVSSrRWmaZJ/tvqPwBVIAj218YpBJG9tdrGO0WeoX3b/ziwMJoa64BUyB+BdmSss6KkMLKdZDR6IGCNlGWj6Zx8XkEPm7fhlFKQ4i74+iFjlZmPFLpWPg4xRo7LxOEwo617OxdeG8dXhLIUrUeMFbCI+KNGA1blM2ujWQ53tOZBsCrs+y4DASVPlVbiNU1JBlN/vH7ia+zjWroxXOnCDkfVQYPUhtYUrWbW7UKuGje9x5hALhlVMg5PizO9T/jJ09TvMZP4maydMd2wb1e0Ejlf7QVVFME71ryS18T54Ui8bpzCPdpaUk3EZcNbz+EoESsXcyU4w8EuEpsyO3zQeHtHM5tESlTNaZ64PxpKLnI2uI7VcnZl5Pkoe5Kzv+6kurHHxOX6GaUk02h3O9ZpHr8+UWql5Mbtmsmp46ZOyju5RGqVcFbrDKU2cqpsced2i1yuG6VVXsOl/VKHz1Ty2VxQoPs4BeV94d2JUlZyrrQW0cqzfDSCogAAIABJREFUb1dCkJiFnBJWKdmmtDZiNIaPW0glaCXRF1BlMIdsgtfbyt39SYY/iOJFGRmKBjdxmM84HzC6AmLRyDmK2mEMsEppbwNd6KK8sJZ9L7IRs7JJa1WGd9YaoVcihGGtFCXLfSzvZqSWsoqmhLrYjYbuxtneaLmTYkW11yNASyiucwM40dE1i1TUKIwzBBd49+6BlDbCJENjCRMW5UmYlfi0s+d4DKAr1itq6Ux+5muTGlMrgzFijTDakpME0xs9C5CIRCkC+KqpkQfSvLZGyhnVJ3o1Q6orXlHvPca8njEdMKQqkSfOyvbPWU9tHe9FtZKsxruZKThqzeRchjQ5gzJoXYhp43K74swyaJZItqyxkqHYE406fLf3cjZiCO4gCHcSzlqMErqx0xvOQetpAFw6tDDOKvFzizxf07ulI8MdZ8TW4LTFaYeeLVY7rPIiaZwW3j28Z12fSKtss5blwPXlebwrwRmDNRWtDArxk89hRutKLBtGi0phT/uf7Gv+LJor7xz/7G/+UqYkTpHrjWn+HutnkSTtKy4ELrcr276OqWEkJFlxHw5HlsNJOPq+8d3339BqZd2vnM5HlC5cLhculwspJb7/8DNqgVwLnUTXGx0jgAhlMH4mFShVkOK1FVTeUbhBKnPMc8AOqg1kaisi72ljc1A6FisSnrqCGhS0LJjqw8ninEwdaxXjszGyohfDfMMOdLLI/ap01dNEyY1SRCYmWVqCcFZGM/sJq5WQTt5C5gzO6jHx7iItY5B76HJAGCB3ARs0gwNokZwhZSGBGe8oeYAUlEw0rTFS7OUsRlptJM3deyl4mxLPxR4Jk+Et9yaXgWOXDZMeQIhUqjSmWo8mpBBmO0hI8M033/D8LNk4aC3md2SrFybxgJRSsVY+a6uG1iDVjHUy1YVOKvLQHo5ecmVKxftF8jbiRq4Na7QUuVroRJ0qBL/eqAmcF836FneMFmmfNQ7rNCnv46FvgEyzlDai7wbQilQTVo/tihJy1LbvAjPQMqEWOdA0ij0Bicyzo9SdnKIcCFauqXMLUivK9I4uYbRS9FXx/uSdtAtyVDxInXmexza1jAO8siyLNGZd/D3eOcHeI82UAmpbZUrvJHRxme+4Xm9iPk0CW4m70BxrTcS8iiHXHNFjo9h7EzJhbRJGGCwK8WC0lil5I+YVta+E8MfsslIS3roxgBjhmLPnNYhQIZ6ul9uLGLa3yPUm4d1KKeIegYEYHvVurrL58UZj3n5fuZ9SKoRBahQ656BNliR0T2/wbkYbKbBFPinkudcXVUdkV9sW3+R/vVUulwvaGh4eHti2TbZkWnM8Trxcnui9iRS4Q247zvo3qW+MkaBmcs2scUMXhUkb3s/DZwcBz8vlmeBEmmmdIWjDHneR9ynZopcyhhi6U1tj258pWaFpiDl+xzhFrRFFwLmZ4yESUxeoRzPEWHh8fOTde8H/a19RtuHcwstFCrUwTTjj5LyeBCeec2HdM32pGFOYmDiUvyEhGUJ9UNNUV2zXyN39Oz588y3vP37Hdb1xXW8sixTyT18fOR4OIkUyhpojGhk0bPvKvq+0VjgsD3gb5BxsIvkDSGkXPw6M5qy9Lq3eJHsxCoQieJEM55RH8yxDppQkwNM5R/CB1uVs9cM3BeN5y5nTWTZsryTZVz+deG6FSOi9H4WRHYMuadr1QBfmLB6+eZ7fNmACLklytnehGy7L2Dq+oq1LH5+5jkFGl9DR/jrkkwlkyRWrNS0XctpF3qoV3soGuNXGp0+feP/hW7mfaKK6UEaKaK1Qpo7hKIiGUiTb9/ffcLtF9iLvKGsaBYY3rVJrYt9fyN3wXfgl03zkeks8Pv2eo79jmT3T4rBOkW5/x92H37HcJ4wxfPryxLIolmOhlJ2vn164P5w5zgfiLVJ04+P5gZfPX1l8wOJp1vDt3/6a61PkfHcQb/P9xsv1icN8plWBwrz/eKS2zF4rtVq0CtxNll4LZjlKoYii5s4lPrG+XLluleNpwnvZfKW0cX36wo8//F4gQl2hcfzwu8+clg8ytMiN/VJ4/LLjD1dqWwGNVjOXdSdMXjxLrRDTTkwJ6yYmM9G7Yo0brVRS7WgsBs91X4GL3I9oti1znCYMiZ4rZU+8e3/Hl88XVG14HzjNE+u2YfTwDb+RJhWHwwE9YEXrbUcpzekYxMN3y6h+oeUKThQFJScmd8TrMy3Dy1NC2cQyGy6XGyll8ad6kf/GtNJqY57usNaTBwBJMhmn8TzJ4FFpCHZGu0zKUIumVcWaJeJAmU5TlVoz9/fv8W6WcO74wvF4Jhd5bqzRnE/3aAWl7Kw3SLFzuyXmxbKr1/pqYr01jvNE8NMoxD3LwYmX1Mg7OkweHyUqqGOZpzsZFhrFbANaWya3kOLKMh2Gskqx7yvTNKPNRC1VZJhhBhVpVeqkfb1wepgkXme9kMuG6UfW1Y9sQ4VzsnFOSewYRntMUPh+YE83lOrM4cAUzjxfvrKvUqPdPRwptXCYPJ4JazJfHx9lqNQrXe+kVsh15en5K946Docjh+OBuMsw/LXxsU4yzXLa8NZyOs6sW2Fy4sWtRRrAX/3iA2tcKXWn9UipN1oJzPMBN6wjeSgw6F3C4A8Lxna2fZWaekt4H3j68ii2HOMxaiHFyO9/+5n1lnDWE4Ij7xqlIzIUKIQQse2e674KEG6fKDkT9xcUhtoVL5fbn+xr/iyaq9IqOdwQJGhnixfqNWPUSSR/ofDpDxcJa+0yzV+3jXW7UfIjfvrKcjjQesLYQrCLPMBtY17OrJeVFHdK6/Q+84ffVy7XjVh2St9Z151ahN70qru3WkIOSxeWf0wvnM/vUNYQc+b2+BV70QP7qWUKJEOu0TBJ7szlcuF8eqAPOV2thuPxSG3yctLKkHoT+payIiEJbhQ8Ij0zWqH1JKhkpYY/rLEcPDVXrpedeQnMi2ddN2IvaO1xxsrKub7SBKWJCl7C2FITep4Z06TjwZNzG3k6hpQN1gas8bzyh43yxCh5KwLJ0LQWMc5glUiomqwW6F0m0rorlmUSXWxjHIKKkiLam7FhEKmlUQMqoLI02V4IfoIc7VwveWztpEG63J5ZlgWlA9YKwTDFDe0VxmWsM2jtuZ/CKPylSWnAvIzvrgdKq6SUSEjRTVeU3DBWKHmvB2RXcL2+ENyJtm90lTGmM4UzNQ/ctzGc7g/kBHEXHbMPTmRcOYv80RhSkmJG/EcWusyScsui3a5yP6ZauF1esCNU0NTK7eUmf7eaVirbFgkhESY7inbNXjppe0G6mMaaMvPkCXMgx8q+VY6HO/b1IgVf76iOeOpKJRWBhgQn1LF9k9/FGIdkfIiRtvdOp1Lqzuk8oSnQM94vTNNETpGUDS57liBRAhI22lEkjsth/ExpIMTof8M5h/MHrJtZFnlBKiU0pGmS70Wmujf2/cbhMFF7k4A/LcTDDx++weh/L+ZwK58bo7BeTPyvPqfa8tuWR8iJIgU0WjZrx+OR1sofA7S7kpy26giT0Elb09AT2yqezN4FpX8+30smXC30Vt4K6VILXTF+Zuf5+RljDCEEYow8D3nzq3dJKQEkuKMASYx2LPORNb1gXeV4dOQq0uBOFEmKtnx5/Mz7d+8pMYv/zXTevftAr51SIq0X5mVhDoE9XkSiq+HurnK9PXNYHnDmjlrviHHl+fmGNR3UxG0vpBF0i22olrDzRtNj6qotL+uF3BqERHaVlYopcl+43iXcePHcL3esm2Ypf4PX3xLbBmi2gTPXxnI63HH3cOR0use4wOPzha4LxsnGMKfM9frMr3/9V2gj2UG9NdBCO/369ROXyyP7fmWZ/1qaJYRqKLlGmnW90lphGvEatUog5n/4R/yjgt53zopOv7U3325OmXVd+fjxI34K7LtMOZUSyXFvfTTSkTB9HOeBSBEFbCHwlfV2lUGbc2Pr/bpFtbKF77LdKiUzTZOARJQi14JWGqOGgqE20I3gF1ABoyxd6bF1qDLk6kiAbLry7t0HGBuGWpsQ/hSUlNjWG49Pzzzoe6a7Ba1h2yLffPMzjLYDbCDF/uPThVRFjuwG9VG29UKkbTnJtF32GWNjbNCqUZuQOWtO3D28J0wL+x5ROD68/yVhPvHl8z8SW8WnSfwvRPLf/xJz+kw9/jtyuXHID3QUh+XAX//yPYs7kvuN5SCDOdD8+ue/EjiIAa1njLojcOF6TdyeOtbcs78kWlQsh5kwTXz5XJjmGaU9izVMYWK2C7lFDJ0QNMeT5g+/f+IQ3mFPR85Txs+Vnz6tMszL4PuFbx9+RtqRDV2u6OK4fLnKgDEV6rqiy8rUJkpxWDszhTty/pFeMqUmGRLWyMNpYd8iNe1orfl4N7Fmz7quOFuZD4HLtknO2iZbiOPsyfGJ3jSOA4vT9F3z829+Qcq7bKl65+PDPb1voirShtYi+15wRnE8zRwOE+saqQlKW2UoZS1339/Texjh6o5SN0qaiVkGRKXsWG34ww8/cDqcOcwLORd6UdQGtAkFpCgAidehscS6dPY9Y+30pv7AOFxwkoeUwYUZbMU6Tc4bcV+HH/Qo5z6d43lGa0twjn2vpJKprnHbC7WsKB0xNuO95bZe0Lse4DT/RnG+bTu328bXxxunO8PlItyAefbktnP38I59TyIpNqB6YL+tKCNDrcvjEygrg9Bd1Ajvv/nI7XnjsByZZkfvkeDOpAyX6wuxPHM6vud0vqM1qA26CpTe+MMPv+WwnDmfD3Sr+Kc//I7z8VtmN2FoPF8ufPz2ID60nNhj5PHpilaWruQzNQVTcPz9b/6A94Hj4YibT6ReWSaPNpnSbkzR8fHn93gr8UmdyrVEjstRBjWqsSxHvPfE5EgpsqevzPNBgEW6YoxiWRbWuOOCJV4V+975q7/6a54eL+KJpBGcJu2yoTLOYo1IGHPa8U6xl0bcM5OfOMwTvcrZrnrhpx9+5N27B84nUdPcbhfev3/Pl89jARMz54eZZbas+41Sdp6uP2H1zMePH0k5kXPl+O7dn+xr/j+bK6XUz4H/Hvh2nET/Xe/9Xyul3gH/A/BL4B+A/7L3/qhkXPqvgf8CWIF/2Xv/N3/qv5Fr4ct+Y98zz49X1vVGSR5vZ5zTGFtR1fP4/MgeIymLOd1PstXpLxdQnwmTI0yg1TMpJ263K4fjHRpHyZmSKq06SrtS+jqkCgpjRO/+mkmilEI7zdPTZ2jgrOXu7oEY9zFdEh0sxqKsGUnTsK2Fu4cHchJyX20SVruniFJ6aJ9FipGT5FkppQnBEoLDjOwd3nxZ8iCrAZCQROmOU6JJlk1X53ia0VY8Mc4rwc7rhtYyYDdGtghKgdVvQqiBGB6YXQaRLiJSo1wxyggiWcta3hiFs4F5MeSchkflNcfpNUNKAB37uklR6CT7QYrROsKFYV4OkvelRCqUc2VePLV0yThqFY00I92I/FBof535ONF7QmWF0mKIb3Vg6GsnJQhT5XDwGGsx2hMWg7aRuDda1SjlKDkKLl/LQZxzZdtXnB15UqpSyhOT9+RaEXCfIvgJo/vwslW89bS6g1KCKs+d9x/u2W8XSt5H0y7QBqs1VktauNWW2/UCRgs1p1S8D5Resc5jPaPJyxzPkuci6HQ4HO4odR8Td8Pd8R0p77QMvQzwR0pvoBKloNXOWjPeiB9DMLINpZUcbm9bIPNW8GsUp9NpSKtmufcH3CBMB/qQ+whYZtxH47kuJaH1gd5lQ6OQgcOIY5Fnt1b5HUvEGo9zktJ+vBseOTxaOZS6UZJMUJW2xFTHFs+LPjzsaFM5Boc1llIquSasvwOlmGeJMdjTRt4FBKOsQo9PKxPcsZVIG9Z4SmkYZXBWGijvpwFpyZQa6X1lnpe3jUfrjcvLs5ybYzOltebp+ZllOf5xm4UiTIFt2+hdNofTNHF5uYwhiAwucs4yOTVQq5Dq7u/ficRrT2yrUKoa8h2YsUXcU2K9rXgXCH6ml0KJEWuGTFcplmmitcZtzaS9oFRnWmas9wQ/cZyPxPzEw/u/oFXHvsn2L9aKCpGqVqoywImcI6qLLLBS+PDNHblktu1ZJMkukLvBuo6rndI71ETJlYP1Q75b0aZzzL+mMbOZhHOWdb9CM3zz8Xvuzu/58uWJ3/zm/+af/91/wnlxkrmEwmlpSLdtY5od796/e5N8oiVXr5RGjCul7BjLAM+IxEMNKSgN9k02W/M0vUkqUeIBBXmuUtreYB1yfg75qWhKSVG2k9459PDsGSPqBYOgsfdNKGfTNIuktr+e+4JgLLkMquQk/gEt17h3gQW9Nmm1ZJHgvXr3Wh1eF2mc1JAbfvnyCW8t7959RBl5Pl79uW1g49ftOoYI7m3bVdvwaFot2W05ozVMYQIGNl6LjNhoRFXR6vDuCR6/8cd8tIIEufbaAEMZkuE2Ms5ohpKrJC21TC2R8/EOjB1DicS6dabDiW/sr9nWjbRH8c91x5YN5vKArf+M5fvfU5Njb5q6Gy6bJuXfMc9S8O1x4/Onr7Lhub1Qa5J3WbFstRDjLqoA79m3m8ghl4UQJlJurGvEqMBhWbi/PxKCwxmNUYKAPx5mLhfxXlmjQDkenyIxV7bthRgjl1ul9o5WFm8+QK3c1hvH44JSkmHXauZwCLQk3i2jGooNS6M1hXcLuImYbqRbIe4j1N4p4m2jNng4zATvMVqTMORdNqnaKOq+CX5dHZisYxrQiTmIrKp7Octy2plnQ0ehjed4+BbjspzfNhD8gXdny+32lZy9ZCzWznSo3J2/obXGvq98ebrRW0aryhQa3VtKLTycD2PjKfErtTW67qhayTUThyJE+veMUnXUbZneE8455nmhZLi+pOG/0qQktoRty0KGYyH4hVLSeGE1tIKcI1oLar5Vy/PLk8iJq/x7z6tMVp6XvlfojZTO3HwYZ6zG6I1Y5Cw11sqm4ykzT44YL+y7KKHiHsUGY+0YCFZq22W4WjoohbsOonVptC1yvT7y+dNvON+JamnfCz/8+I/8/W+RralxeDfhg8ikzcsj83Pg8Djz5etPHJcr98cTi/dc151qC7nstFYwYwhl3ESKG3G7UC6fBsQh4rzmsgeOxzOpgumioNjTxm1dsXbhpcaR1xa53iJ35/eilKqFx5dHPnz8S1T3GOexofJPv/3Mtx++pZPZU+J6e8b5wDQrti1RkibtgZI3UVCUnZwqh/nE2ldiytQqIdHrTRRg1gTCeWI5HLhcC85L6P3h5Hh8umGKnJO5ilVhiwbrDWF2EgegJmJ8HpRoCO6AUYHHzzeMA22Rmu9P/Pn/s7kqwH/Ve/83SqkT8H8opf5X4F8C/1vv/b9RSv0r4F8B/zXwnwN/O/73nwH/7fj//9c/rVWevl6IqXK9bdwuiZwTzl1BNQEr2DPr/sy+Z3ISIELhNfy201Gk2rhteRSLjW1vbPkFjZeNUgejK3sqiFPtVduu32Qfr81Gb6LRfTMIa0MtO6WOF5eSB7I18WbIAzfIbNTR9Wu0NeQkPiQ99Pf7LhpSO8Lu6JKXJYZhRqhex9hXeQhjCt7RRioBIQEOw6YSiUXrVchl+hUtLp8B396kHozJaauKKdjhdejyUJdO72b4sDqHox30JGnsjJENE6pjPSOhWlHbTG/ijymlYHSQ3JXAML42OtJUGC1+MGuHQdEMD4WuzMskL4Vhrm8De5zjH5tgUKxrlGBgFEaLPrrWNPwMCmsRfXEQk6vi9TvVWKOkIOuF+TCx6XXAMjo5aYKboFs6AwmIGxNV2cCB4EGN0gQ/o00Q880o0l+3dq1IkTV58dM0BACgekd1Cf3srRPcTG9iJu9IYHSnC70IRal/hJH0LjCL3jveHhAUfpOcCe2xtYwlVRejcm2yDRp5VWKaN6IdR0hLYnSW+6WOrmeapxGqJ7lDSkvDNoVZMtJyhtZpTYpypRhNSRaYhZXvubfMuu5vCfHeW0pOqCbNcuudmjMKCZZu4zMoo+lq3PNkam+kslO7QjVNzYmcBPbi7CLfuZPNc0qR7vpbo7jv+5u8Uq6tyKGslkl9G96Q2opsHnob0jjxZSiEmqkGGfBV015qHnhnTd4SrapB0xPPitJ6TCMZxuNRoGuoSXw2r+fN6+dDMUAYfTTEg0JIo7UMSDNf66CCDpR8sEG08DXRWuN8OqDoWGtZZoszZ06nRbbJCPks5ZtIWK0l+BkzCIfikfRoN2G543A88/JyZc9XtrzS0KAXukpy9qqEm6xMkKumo6ld5JIoi7Jjk60SvY/iQTVUqyhtya2Rd6FjfZx+RdB3qCY5XLUVKdYevuPd/Ud6N/zTP/3bQRzzQlAscs8ba7mlqwQBW83hcKYPP6Sce/K97ftGbZkwORnKvMIVxgsg50LJAmYJk0eaB6ElaiWezeu6U2vCmnkEnsrz1Pvw8HUBXrxuoETC196kfkI1Lez79taAy8am8xpwSRcPVEppSH4d2qjxvmvjHWFotRCjYLGneX79p+LZ7E1ktA1qLmz7hj/djy31uIe0bPF7lfO71iL/LSUglVJExtyVoNNzrQJn0uJJBcawEajyzL1tyl6NY4oRiqwGnt1K8Ot4H8p7Q1yYSjcM0DW0Jk1FKxIu25AcNtU66IJqFh8WGcjFzLpFlskJtCZ5OvfwVZHDhSh37hjkaczwb6esud5mlCpsu6cWka7T5Z2VqsglYxHFhSqFdb/Q+wVURbKEPHve2Ab6+7ScscZijcY6SLtDuzy2YpBTlU257qRciLsCIx643iylKUprApRRCmsdYTpglCHFhHGCAG+1EbSlIYMcdEepMDL1RHaolcF4g44Vh0E1wZdb1f8f2t6l1bJ1T/P6vddxm3NdImLfziVPYpoUVZqK2CxBwQ9QPXsiIlRHG7b9BLb8AAU2FAQRFLRh24YNFYQEhSy0KM08JzPP2TtiXeac4/LebfzftfbJJM1MUSdszmFFxIoZc40x3v/leX4P1o9CrlUQWkVVuUesleFfKxCP1Csk8WhrDC0Vke+bAnODorDaQ4G4H7QskrPxNMhnHAIx7qSQyTkSjoAqTuIbWui2BwVVFB5UjdEWN3gu16sAsGql1Sze0fKmHKjdz4tg42OQUG4vaoC6tx5toAXKowumyoZbodFKk9KBnwaU1kKSTAkJY5dnP7oSc3m/H3OQoGEBRIh9QSkJHG9NE3WT2sw21m3Du0WAD1ZxvR1o/UKIGzEGajnIuVFqkK2KkloPlSmp8Oarf3m9kfZKiAlrYds29riRX1+heVp1pBzYY0RpCXJXysnPEA155ciKLY7EmDniK+ttlcbZj/BSZJvcMgq4P3u27ZV1uxHzgTKV4+if9Q76ang4Kq0pUtlROhHTRqgHW4pcb6sg3XVkHGc+v7wivnoZHBU+M0+PjKNH6cLT8yvgGAYZAr2+7DJoW+RzNsqzbonjyB0SIjXJsUdqFUJuU5LhaYzvAyrxNhuTiSljvXuPYlHdt59zIMYNpSxHOKTpVkqUCDGDCTRKr1Wg1Y11W/F+xA8zf1nN8Jdff2Nz1Vr7c+DP+/+/KqX+CPgp8A+Af63/tv8E+O+Q5uofAP9pE+fZ/6CUelBKfde/z1/5qrXy+mWlVDquVg7n0nZSlinVPILqdJ2mtIAWepr5mw8iRDHbGiMPlpQcqR5Qitys1mAcVBK6eYShnzviOHUZRS/8UmX0A0N/AO0hiX5cSaH4RgMstXGEiOqm+uN9+yU0PpSmKaF7KVk9cRwB52wP1FPkIqQYSXWXIlGb9u6HaFUM1co0jAVVBFXqh0EObXkuQakUKsb43mQJBjzrhrO+N6FVDOaAH2QCVCVwRMhemG4UrMwztCr/Brp3aD0ixoH3Guulccl5pKRCOERmpJTFO8/gLdaqXlwrUFIQWSXestYEo22sQmkp9Iptkm2DEBXvH05cnlcEFFmpSvH6ugqy30kRn4s8GKwVmZ/zGqMrnVgMtZFDpcQ3I6TAEM6nM0oJJEHrRhssrXlyVIIyR/wHt33HO00zBnBiHreywbJGse9rl+J1EITWhCOhMUzzgDGKkBK36ybNbC/4apHvEYMcMMpAihFtTd+iVMFMj4v8G5JADYyVZtEa/z4cKKVhe3NZikA0rJJpcul+NW8GvLOCJaUIzQcxAreq0Z1+6P0g+s0m9EzBlBd5WPf3gankvAutTylyaez7QRtAqxGNGO/3sGG0wFi8c6R4yLWshEIl70wGCUplKplUdTciQ22J2jI5BbT20P1jx36Tg1MHnBM8daobx56Z57lDTjTrdhGpSE6EJGjzqiTss/aBhNaSNVXfPFdOEK2DH/tBLtdC6b6a1sl5WjVyCcTQyEnogc5rvB+67EkiDyRYWBo7pSthP3DeUcuPBbdIY1I/pBvTNMvf1YOkaxOMcs6xG7vF++i9YZ5mYjxQoZBr4+tP9wxiR8MPHv9wZpo9MYZuNtfs4crgRqwd8H5GqUaMO846YmqoPaEYyXlm3V+57huh7Ci10JpIyxqJUnfGcSKnN+iDJcZMytIA6S7XMKpQi+6Ff+5IXEcujZIUKnrG+jOMd7Tu51QYPj5+x89+9nsY7fjh++/51a9+xR/8wR/gB9nkSFgyPdxzF++A1gzD0jfG0m28bWdFj59Zljusc+SapDnoe9IYZQo+DFbug34+QfcAac2+bZQipErnXG+MZeuFepMpHX14Js8D8ZTKxLt2GEA4Dsn3+60GW2vdry/ez6XS5ahv6ofW3jy2AjIIIXAcB0sHd6AETlSoeCfXWYyRkgTQobUWz2qXegscRiBDtRa8X2TYkjvURSkJgS4SzC6ZYEIdlffS3j+nN5hLqaKPl1xuAS1pbcBUbLMUlbuXU57fuciAUbeK4u2elMaKLAOdZpBBk3trhAN2kM2xUrCtgXkcOoQDWnCkP/uK9FWm2YNK6oTLkZSaeO2aQfFArSs4eWa0YtE2YVXBOMkyK0mjvaM1yb2hnxhxAAAgAElEQVQ8joDzPZRcQyZy2Q6ONRCjEFPFq7tTssc4GYQIWEh8v0oj9LFUsc5224I0cxjQ1vUG1DOMJ1SVoZNzilYVKSoGN6BEX0ZTBWs9tWpOapAzrCr8OLDWDVqipQT93huGk2yBSgfo9LNCV1BVk0KiKgEvvQ15lfKUECTE2GTisVKrRNCUmklpg9b48PCJYZYCtObC63Mjx0unHSdaEwmz0gL7ylljlEU1gdVYZRmd5zm9WTU0qlmoPVakGEpvtq1xTONEKTJ4SrFgJ6ljrLXiQezWiGHwtNLD6Zuck0a9SRzfPO8HmhFrxp6fuktAc990eNfQVXy+xiisa+QSBUSUilQvXrEdK1TJs/JOoF5h/wGh7uaODdeEtJOLxAAoLWeI6sqSUmGPO9slsh03Bm8pFfxguG2foU1Yc8I6qDnjBkuphRgOQtaMw734kkoipoCfzhzbwe2WsKrx6euPxNeINhIyTgNrTry+PrFuK7kmxsnIQFVb4lHJSbGuDWMbp82hTe02l0hm4/nlxm29gU58++0dP3z/WZYJ1lJr47oWHu9hHAdq23l6eea2HZzPMwrN5x8uoA5O8z3TeOa8GG7bzrbueA/DYDDKcVt3rI1CcK4iDXVuJqbIERIpBlKNpJIYp5EG7FtAGVFupRQJ8WCaTqybDPgk5N5x7AeG2impkHKktcAWnsj1jlI1/H8ZIqyU+l3gXwL+R+Cb32qYfo3IBkEar1/+1h/7Vf/aX2iulFL/EPiHANPsuK1ywZWaehaBJgRLDJBD5WhZJltKSTjYZGm7geZ606PE1OtnrJ5IMXHsnzndielOmg2LUTNGSaNQsjQU4yTod6MEB2/MQMmSXyO+l0YpitP9TEgykdN2oFVpAvc1UBMMfpHpRUpQFVo1xHOfeiEsRn2thDxizYTSihAi25oxd6lPHXR/AO+9+ZCD3g8DMR5oJw871aCVRqHJlNRaWoVUAqdpwTspvkXml+VwQxNrpDWRDrTqoBgGq0F5stlwHubF0coLy+mbHiibua0vMHnms0h40Ak/RrarZa8KPzT8XHB+x/FTGpmYdvYtMs6V2y1inRdzf02iyV03kVqWxrqKbyqVCCozLYblXBine758fubL058xTBNGj+R8UFrBGEhhZFkWmSTWAlXh1D37NaPIfcLl8cajVafMDCPOtU5MtFjvOd07Xl4uzKeJGDPbFgjJU6tk0IAmpEIO4FVlu+192lEZpoTC9Gm1ZttfuZvvWE5nmoJ4veLHgW07SCGglWGZF7b9SowiGRm0gSYTQK0q1MJAxrdeLLRMU5nBnbF6p2YvXsHc0G4DlRlHOTD3FvH+gVQvfRPjqHlg21eMrj1LqzHYBWWMeOi0wljNvh8MbiRlAQ3gFEY1tv2ZWmQDZJTIhlSTgQTGcHd3Yt93ti2jlbTI2jQEtaRoylMRL6N34oHy7pFK5Dhk61JKZtslwJZm+8YpUpvGDQ1IoCpuErpaLoXcdlouxPzCEQ3FZSY7MdiBPa0yVNCKDOwhkbkSy9YnmJamDNMy01ruBaeSazJWUt5pKjOamXXLnO48frL4cSYeQaiSysFoUB1jr5UndPly05nl/IF936htg3ZgR0uqWXwyfmBZFvbtYN8vWI+gsJ0mpTO5RFARbSODf2BfO8HINawTmuH5fpBBThsZvOWrbwdenjQvLwe3raCHmaYqzt1jdCHmCyh4vX7h46eF+8cTrRha+cD1+srl+kp+eiaEyA9fHtg3IT0N80AIV8Iu3lBjISdHaH857HzuwI5eVCuPMTM5FyqB2gIhKAoDpWbO+md8Pf4LKN049ghY7u8/8nu/93f4+OEbbteDy8uFL0/PKA1fffUNSuteMCmRJLbCul3Y1gvOKYwbyDm+UwA1mn2/EY6dVsDbGWM0uco2BRqtKo79Sq4rA/doNVBKw6hOtusgiGPboAWck9iAnAoYQ9MNeaDIVmrw8jxQGFStFMTQXXMjxwQ5484iG9LG0lqi1ULtm9SYM7UUyXoyg2x4iwSzyuhB9e2SbELVWxOnDEpVkXm3wn4E9m1HK4d34kOrfbOreoOVS36n2mptusdGNtmNQlWalhMpHOR4YMcZpY38TKtsWuX5K5K+kmUDJh+bvCetNVkprGocJUMror5oUBFa61smn2uKkkDV3OlcHYBTM6WI30bpRimJmCOxRIxufQMRoLVOngP9Z4/s0y9J/kKXnEgdAe/kWprIV5tRVNMwRrLk/DjjBwGZtPaBlBLjIsRibeke6IOUDsIRGObKUSshiVS3NgsU6i4+Rk0jxCsPDw9sr1dCOCi1ME6zDOj6z8W7hTeyZM6VFDLOOkZ/xhjVG4VGiopxGqlFIBOtjWzbwePjJ0ARozTD13BlXibG6RHvZhl69s1kTCIfT7FwPi20BjFlSpKQ9JLBjQPn8x2ff/iBabjnvAjePIdEDA3LQEmKHESqrZJjf5bBRaqeZbojpEAMgW0PrPtnWpNwc608pmqcV8Rb43QaMapwvT6Rw8Y8fY22jtoyhs+EUilZ/IeiykioVvDaUqkYlbi8PGEH12W9gdeXK/d3H/DjQlGNUiK1Cs69ZoNKikEZoUVmiYawxjGYCXeybPsr1TiMuXtXf+xrJByVYTA4r6BJDVsrGCcZVeEIcg/oHrOAQGxkaGFI5A4o68AeZVBtQJtG0QlaoNJIemNLI1mNAuUyHm2+JudGrpbBLpTmGUcnzxE2OYPa1rflhgJQAz0XnBAVX16fpDGrCqMsgxtAP/H89EQ4IjElUjkEg2+9qJxS5enpBsD5vKCNJtcErkH7JSXLECOlzG/+9H+XPNCep3qEzOPdI7/5zRdSCWzH1nP3HL/6vlCl62WaJr5/euHbb37C+eGe7bjwfP3C4Ayjnxj9CesVqQ2kKNv7z08733z3c2IpXOJn1u0Ld3phcmfWTZ4JANbBJQUJiu6xJus1SNNmpZfAHdS2YEyhtUhMG1rNLNNHqadUxjD8tf3S37q5UkqdgP8S+Pdba5c3EpUUfa0ppdrf9nv1P/OPgH8EcPcwt5TEjxMjuKFyHJGQJCdhPsn6lipUsmHwWG2gBaxzDN51CMRMbYkcxBt0Ov1EpFZZmg9tLKU1zuc76bY75nTwJx5OJyTno7CvkcrBOJykIVEJN+7EZFBVimyjLNM08/pyY3QaM1ampTAM0zuuHCr7vjGMjeWU8d5g9MgyfuT5+Zl5nnDWE+LMp69fyVlxOjmm2XAcB18+pz6VbzQS+/bCw+MdShWOY+PpywuDX0R2Uirg+PDhw3vmyXFEWpVJ/u12k2mTEkJaTH0TUTLxOEh5ZRgWmo7ooqh15NPH32FaFH5IWF/5ibtHqQ/sxyuX143bLdKa48MnjftO4/yCsQufv8/8n//kC8u5MZ0a2lVq9aSySxizW2htgJhYFk/OkW3baETRtw4ObRy0zPe/+XOMHhlHx+/+4hOFg28+nVi3EaUV82L5/P3B9XKh9Unteb7ncrnhbJ/0Vpn2ea/AdNlcqax1p5ZEqxBypSVF2AeOWyOlSsoKP1psLdRcKU28K8toOLadeTrhp4F1e2YaJ3KAliolgWWkZs3l6UJIgW3dmJeZxY8klSi5ELcbcQ99Sj+j6wxseCsNmnIKvATpjuP0jiY3TrwU2zV3BHDlfD9RapJtiKo83M3oNuHGmZA2jmMHFTgvd9IMIVO723WDVOQQQrHvlcfHj6y3g3goWnPYcUFT0WgymVoCTWtSCuLzsR5rjIA50LRa3+EQ1+ulUxod4+RRTe5FgNIi63FgTGOaJyTHpVC3QpLwCpQy+OGEs4No5rddJqXTxPPLnzNMI8ZKdplqI8MsC4RUCtrKBBgFTSWaObBTxqtHcjqIIZFTwA8aZRS0RK2C4J7nM/t6AUMvehLDeSOWg1xGVBvYjgN1rNyd73GD/JuMVaQUKDqgfGLwmj0+kWtCG/FEGu1YRsPpdKaWxuvLZ9LhOC0fKTUQQyLsK9pmWh3R+ozRD5Az9/eQUn3/XFJKlHjlm29PfPXpnmmQzCVTBkZ3cN1WfvknvyKTMNZ0uaHh/nHm5z9/5OHhAe8Gnr58YZostzXhHdzdzaxrYRwcOe9sx871mjktj+QkRMMWFOPkur9HwjXX9QZqZZw8WjsxbetCyiK9Ow6BNizLHd+Fv4/RBms8RhnW7cJp/sTv//4/z1dffUtKjT//9Q/cn8+EsPLy+hlr4e7+jjdokFIKZSQjbNtu5Bx5fPwoMlqkKIaGs451WyV82xrGaZFrsKYuWxNf0eXy3JHlgj4vNYrql9ZloY0tbEzzgh/EAF+qxApQCw3IqbJtVx5/8p20Jk2M9E1JAxfCQQg7WhemaUGrToRtGU3D2om4yjWwby+c/Qe0NX2LnN69ZEqLjCaGHWs00yBB1fpdQp1QZiTFazfwK1BWso+aNFixSBhnDIEUE/YtyPxNTtmkATFaE6MEHDvnmaZJMppq6Y1HffdVvGHXWysoZd69wm9yV+iy1yrbybdNrKJKb9oamUKOGacbflB941k7rbFRdUPS20QWRtMMwyR+2Uk8NjFGvJOthV2/o4WZbfrjd2rj20s8wXKdKBpGi39ZaTDqx1gHkLwi2WBKg1NrpeaFwcAyiuQW6Nuz1jeR4gvqYgCcWQV0pABtSGlFJ/DKsIdAirkPA55JqbDtB5+fLpxPI1CxTiTbpVa8vWOxZ7SS1PBxdNzfieLAGM00DLw8PfPtNz/rDbjQWE+npcOvElZXHh++4tgPmu7AFJO5v39kXde+NVXs6wvUIETDDDXL5z6Pnhr6NpTK5fWgVQFyaS2D8P24EbKcdc4MzKMMC3PMkm9qHGENGCzhJoPXnOHsvybsCm0FiNCS7qRZGTTU3BhGCbp9802p1pjnkZgUVIszhsc7I5aHPj8z2nBazhzHjjVO5LilsKedcbx73+bue0Kphla+/50WeuQIRraFuRWONZBzw9mRYRjkebQeosahsu0rykCMQllsqlFVRbWCGxzzaRESaKrcbhuDkWsylyo/x3EhhsK+BZHBJsGyp1QpKXPcEikfXJ5cf8Y7tBtIR+luUPET87JLfYWHZtlrwti3DXVmN42nlws5p7491lxvhck7Mv2amlzvUywxlO7tN6S8yZas222MkXw6OwjprVQJ+CWd2PYNlGVyjxgcNWcoInX3k2Vbr4Sc+dPf/IrX64XJnPm93/s5sUbifuN2bJyWexo7OV9oKmInzz/91a/wg5ehQsw4M5DjZ1CyYEE3jovmPMuWTGlLyo1x0RgtkRYxZpY7z/XyRe53DDV7xglSzORQRa5cwl/b4/ytmiullEMaq/+stfZf9S//5k3up5T6Dvi+f/1PgZ//1h//Wf/a/+1LCC8RrUynYtW+wSjvD2ptDM54jNK0UoX2Noz9cImy8vS266CjGNG9Y9v37l+R7lx0tg3TRD8/OHrgocL7AWcaqmmGccFa3y+iih9HLped8zKQC8SY0a3x4WGiFCvTdK/JqeCNAZXRpjBNnsGdOd0ZrBN6WDp2PnzSzEvF2krJDj98w+vlGa0bxma0bjz+s7/DsWdyDjQCv/jFd+9m5xBkG/Fw90iKmuNohNB4y3TJqdP9tON6vWKM6jeBhLuN2nI+DfJwswVjF7mRnYAylC7sIeJn0YC3ZClV8MlaT+SUCUdimAbOd47B31ELbPvKcoKvvlVMi5Ow2+tGzWJWrj2HYhhPIlXJiWFwfPj0Hft24P2A9QWlMiFUluU7QkgyaWqFefxAjjN3pyTbSAvffHrA0CgldzNp47yc2HdB1jpj0TRaqeTcoMrBqGfPMspDphZLzZrTeCbESE2FXERjb7XIBRQN6zV+cJy8XB9SdHlmb0lNdemToTWNriI1Hs3AeCe+GKUUzg80K+CO+WHscguHVgPOFpoScIPRtnsrpFk3Sot8Ekg183A3kcZCDEGmbF4ae/EmZJzS1JxwqmJnJVkZyaC0A1Xl5tdC6ZM3oRn1IJI307PXlGKaPLTck90tMWpu6437+0UkTV0yl1rtOVWmZ1EV7u/vEE+jYr/tomWGHyEbPR9OKEeFI0Y50N0bWQxagGm4R5vSzauN2jTTdKLqQixFDkw/QBNoRovyzLBm4U0qpa3FddO89zPTTPe7gXOmu240SjVySZhBfGEVJDi5KrR24sNIV+xQ8W4hq0KOO61p8ioZZm/+R9U0MSfuHx7wznS5qqYVzboGammgDG7QoGQrJ0WqRBhUJRsSqwWCEOJBjBWapRSDdRnjBpQeCXFgvUW+/vSReR4xfsNNlj0WzstPeHp54raulAQtO4498dJuOBu69PPgm28+UovIg1qLhLB16SRS/KrI6bSQc+lm8J2789esN02pEojtnFxjDSfbz7zizELOlZgKuno+xj9gtAJEqUXk2V99+Blff/M7TMNZwiOT5NeAhHRv287Hrz4wzp7Kj8G/Wmv2bWffV2otnJfH7uNJ4uHrRf/1+iLRDsPMstx1XPmbPEhyD6+3V8S32UN9S+j+FTmnYsrsYWNZHiU3qPsiDUokZVm2Aa0mvBsF6KClcGutoLUjpcR+bBISb31vdN6yEntxnhOtJKyRHEORFL4BkXSHVIh3i1owTuO875sZKb6Fv/H2OVXGQRrrgkjyjNLvcsQ3L8IbyfJNCtm6PFm3Roq5UxvfqJrSpLXW3n0/OaX3xuLtpTvEqUGXEkoRTP+3lC6PpUlYKogHrOaEHiyDHyhNiedJ9f90oSaETtg9WcbIPTyMIvMqtXHEyDJP0BoOw1A94f6XsvXsk2w6fKPKw7Z7lnWPP6h9gydSv7dGo/VmW5mOOWkyxEs5iv+4fwYyg5YNrnh9YZjPlDqhgakm8UA5Q8oHua60KgTF2P1fxg4Yt3DbAl2g3N9TRZsbP7z8Cmc90zDIvRmEFClEUZGou3HGmAGJdmlcr7cfKZiqSpankedfe7NVhMQ4jF3CXiDDeTpTa+l01CZSLTOIOsdapukMo8hk3SiQoJYyZY9oFM6PaGuIeGLehYxYRUPg/SiRBj27c57uyQWUEb9PVYbSxLc7jxPKS0NXEaCB7zTK2gohZlQb8WYQME4qGCuwIwzkojnWFedEatiU2DB8J+MqLcV2KZL5mUuhlUTJG0ewDMOI8wM6G0pJ3N99xcvLhVIkA89YGNyIsa5nPAbJX7IjSjvJiszi7ay1UkJEq4LtniHJA23kJDEg83CH73mi6cgY5YRO3FU/qvtvtRIVQ6mFEiqlSp3RWu0bN0UWyROtgvZ0r6ehadO9XxJPUfrQYp7vcB2sopXDuQGaEhle9+zlnEhFUavu0ToITMsYjJL8R6Pher1hdCF373Pr/lpjDdZMcoMk8YtrVckpst5eSSbxx3+cuTvdM88Dfqj8yS9/SWuKu7NlmgZKhseHR0HRh8RxFFTdienCNAu4RfxzM84mlLZQ5Yf8+DCy7ReOYyeFxPVqUDpRsrANrPGUuhFLw5gR7yZa/XFA81e9/ja0QAX8x8Aftdb+o9/6pf8G+LeA/7D/73/9W1//95RS/zkCsnj96/xWb6+SA8o6jJGLoVZpTBq8E11Ma9ByJ8UI+rR2I27t069KQTclkoiqaDXTNH0KJ3SilBPgUUb1CajIMVqVxsQPFucs2oiGXCkvhK2pYayjZIU3Dj8YjBXDXMkasDST0VoObaM1xinGYUQhRaQ2mr3IYSjm+khrBufu3pOirbFY3xj9iXQSdGhl5eFh4ThkmhhjwJjCp48fubxkbreMUpF122VCVkWygJIgOOcNIM1qaZn780fG0VGcwjg5QByKcfKgZDreVKE0yEejFmQrYhOtwb43ahG/weU1MLhMToptz9jxwA0iKalVJvW5Vay2lKpE25xFbiCzQjn0nR3QyspmRcmavWRDK/1n3sAbOUyNkQlVDQ1vLMs4igSoSVCxUoaAHN6uT5o0Xd5RZOKZdvl+IqkS35pB8MXeWowae1h0BWx/aDSRRLkZrWUiY5hxCgmxLIIMR0ngHsiaWWtNSt2s3omQ3gq9TYJqpchHOUIKfSarsL0YKSlAE3mTxqCqQByU19DEgJwz/d/eUBSm0cvDHfMetvx6aZSGvAetsX6iZbonSBowpRXDYKlW9q/GNqiVYRSpxFgsjcTY/SIirZFCa5rka7SGUiLdQ2lSzBz7AbrKgda9I7o2QkhYK/esd7DrhvMShi2+DYXTSPC1bqTU0KYyzhO3/foupzBGcvL8YPowoYlngy7/qR0q0aIUUcairCWl0iWmEjattYSHa2dRfSoPRZo17UFlaksoWximO/Zj68Z/S+7elUYPp86aQiWkTENona41Qogc+06rDedGkabp1E38DZSjVUdWBVUOjGuc7xZKtYS4SeNfMlVvbEHzcgmk4vDWcDsSpRlK81Qc4zIxLhPmarpHptCU5C0pXVGmMFiDdUJCi6Gw3naGYSKmG6gejG4cpQVE3NRQRqRnrYUON9GdNMU7VVWmrw1FojYYzT2T+sBUvwKjha7lHFYNGDVzmu9EQpPEm2SseHJCPMg1cX//KIVdkS2G7lunfV9FDqZgHE/yM3hbFQCtVm7rC42K8x7vJ3KV57TqEIiUEiFsTJNQK5XWtCKgIa3FOxqOnrHohfBX4b1BccYRq+QYKlVxbvjRo9fvZ6CTa4N4Qpx7L+Bbg6oUBkgxSD6XBu8nGTK0t+8jk/cUc5c+ZgYzyhBRtT6nFmO9bOkztCyxFEb8pHSJPShqke8jAJFRPMd9Cyev1n1ipfunZHNTi8B35F6W8y7n/N6Y0Z9iP9YSyLSp9c+i//0NORdlKiFGed58cH0DHmMWamERH6PSpYfCig+1lCJRCx2Ra6zF+UE24R1OZdWAbQ7dDoJ9oinZjrci/y7VwScSdMtf+LmIBy7zFpcBsrVqSOPx3qfV8uOfo703lpKBJkWzsxZdZXDl2iTXqQKlvQwuR7ErtCYSTecW5uUTOR0o9eb/kM8BFTiOHV0tqRi2vXC73PC2S3e1keI9BZrSaF3eBxrS9Enz25oUz9baTn41xJhki1kq1VSaqWglweVvHuMUEhrZzlmt8dZhTO1wAQkbz1GCWWtTWCwaaXA04LXUZEY7hsFztLVTiS3ODYSQhIyoe50wnDE2isXDSDh3TIGiE9oaGpWYIlUVjBnwxqFR1FwxNLR9I1vCLWzMQ4fslEQrCtcll7L5VrRSOgyr0nqUhkJRk0igtRXJqzMzoxNfE7WIfwyNam++Q/lPayPwstagiYeztbfmvNJqBOTvp8l1KH5IgWC9+cpaFQpjqzJKUVqhqmQpvsF53oYx/RYVqIwSn7xAfuTXdScvCVBDaiea6e9DY3XfFmIxyqGaEF6tsfKzbxWahuKgvUF75N9qdcNo+RmoVnF6Euy+MTTkGsrd6yzvTfi91pR3iBe1UolcXp8pAcI2MUyKy+tKSo0UJpbFU5viK78Q9ko4RIVGjeQqtOvSCuu+MToBjDgnVy45o/uwqhY5M1rNTNMIJLm/detxG6Zn0Wms+X/vufr7wL8J/C9KqT/sX/sPkKbqv1BK/TvAHwP/Rv+1/xbBsP8TBMX+b/9Nf4FMReXCxYhmOtfGt5++RRvH05cLMWfeNOmtQS0GhkHkBE109zkKrnPwA1XBcaRO3xMTfK3QECmFQJQEKZpTEA1/3jFWczrdsR87xma8nzBq5NhUR4BXvDGcpwk/a2J5hq0QaiUFjR8Nx7FBEzhCCpnqM7erYCyneaCkwmuo7EfBGDHtpmgZJscwjEzTxPnOc71eeXh4ROszqbyQcmCZHmlN4X3AecOHrx4I4UrMkHLmcg095BBEMiGAAmPopv1MaYGHhwchXlmFcTLJOp1GTncTpVRuN/n+uSS2tbLdMscR+fDJc3k9gAnnJrZ15Zf/x4Y1oSOzK3Z6EfNirigs4+QI+4phRDWHapr9lkhRplXxgB/2K+Mw0ErsmS6eEAovTy94r/vBbKgxoFQRkEJT1NjwQxI/lda98UzElKR5UZL7VEvqVD15SIMibkEmzrq8r4jjUdCqsIweZ0+g4Ho7cG6SjQO5kySl0TBa45WmlI1pnqFpUpL3F9MmFLkqlJzTtMhWpTfaznu2PTC4scsgDtHwZ0XLhaplelqBcKxS/FsHVR5uKZX3LZMfHLfrDaXBWini5/PIx8c7cjkEVuAcuVzY9yBmYCPNFakSe4FSK4yDf59ml5ppBCChtRh8baeg5Sz+SGMUtWWOY+XhcaJmKKkxTf49tyfljPWNfS88fji9+wBrqsQvO9M8cTrNMoEfDdYpzncTznsalhQSwwS3m2I/ElqLdPS6yoHvvUOZgjGOxw8z2sC2bjw/v9IQul9MR28GoxhsjRcdeZbCaZw8Tgshs6Ex2qFsA5VQesPqAVU9rhpcaRzxihkc5WikWjDaMIye7dilICkVkhCffvj8GY1M4QdviDEQ4t43g5VWLd5BLpFWG+PgaUzUdiHUG5TAzz/9lJId2/5rYlmpKnPElXWvPF8z9/eRX/zOt/zZly9otZBiZj8iRSnC66+57i/EvNJUphnFdHrgdHaMo+0HptDJyhEJcce5M85v6CPLNNlN7HFlPTZaMxjrWGbP88sPxKjRasS5hcJOriuqFdnIak/KgdGPfDC/4L79M1J0lMZ5fmCZH7B64s/+9NeSw2UR32k30ocjdHx65f7+oRvPe8htLwZuN8kEk9yZ8b3A7w4baoV1e8HY3gBaS4xJmjMt25MYIykffJge8cP4F84oyeIr7PuN1gQyo/s2USktG0clzcURdrRRWDd2YEZFNSPykvom6cmMwyj+4k4Ohe7tao14rMSwoVRjmhdKzX27JoW4VZoUOwClZrwTqc6PxZVCKbn/UjooOTBNZ2mM6lvzIi+hEgZKzozD3O+RXpj1Vy2FUoUm9k4JrCKLoheHitalg/V9oNToqHYauoOnCqXj639svrTS0AmwrSlqect3lGIoAKgAACAASURBVEFUSpnarJAmlWxStXWkGGVbVipYMEbw3RWwfmBssN6uDF6AVjSwr9+SH1eaidI499Bl3q8YGQqEGPnt5rBEQab/+Pu6LLAPZWXAI3RIpeT6s33rmKvI1vpMq2/234a3jpwLgz9zPnm0tpL9GSPGOAa/cH//E4zVHeufeuPV8yFrptRIKZEcD44o4KRUZfpvnSEcm/jAFEBhGc+0KlJVYwTec7k+96wv8W8LCbPhrX73Kh9b6JRc+tkjRXdrsmWWTbJiGqYOlKnyuavKEXMfPgrVzXkZYgsZGEY/YxWd3ivXhjMGZ39UFQzuhHaN5+dnTNOMTgLJ6ZvaXDI0gzcDxsqwOqUkG+UaRcXipNE1yFlPTeQYCTEzL7M0IW+Zi6WAbrScaVlku4P1xFAwg5aQcDtSs2IaFrQScvE0nFhvO8W8yU11f2ZILIlGY95IwkW8szTFvq4dfiPvoRihr8Z8EI9EqxXv/fs1J0HbMkSWLXqUbWoP4DbGoLTA1qyWoaOi0QyyrXOSnZmLEJppsq1TyHVstJIqWRm8dYChdiDINJ3IWWh9WmmcdpSmKFXep8ZhtcZpS0mRmDPOjOz7F+7OH9HKEsJOazs1Nxpego29w6sDo++oLVFKkOasKq6vL1xfLh0YM7BtV8K+40ePViMpemIq9HxrSo5My4mUGtux8nq7cZ5GUs6cFsPoLdRAqw2jBrzRKFOZF8U4nlm3CzllqJqYHLbzGErMqPGvb5/+NrTA//4vPGH+4utf/yt+fwP+3b/p+/72a/COX/z8W2IM5BJ4uD8xjhO1FbZ9Q7XA4OA0z518lNj3QArXPnWJ78n28zKjVHlH8Mq9IbKRXAqPHz+Q9SEEmGaELKhkbe+Mo9bMy+sL8zxTauaIG0YHnC/dODj1v/Pg6fNKjAe3NZKSxhoNxytW31NbJceIsY711gmG1RIPxeWykgFnRsZhYpoNP3x5wrsTw7jihw3/xdEIPJmOGFeZcBRGl/qhCbct8vnpl7w83+RhkBu3yxODP1Gb3Gxaa4wTMopRitN8z93DPZqRXGXCnIvC2zM5NZ6+7BjjcPae7XrldpHC5E068/K9IyXBE6+8UpLhcosMw8o0LczjzO15YB4XkcjRqKkw6A/ETcy1uWf20A96rQ21Gq5Pt26Et/2AUpKurmQrk0KV6UffamgRtOEN5JjhbVNlPZnG4/0DIIGf676TW2RZRtyoaa2wTDPrLVLooXdNvCQPj2fm2WOMI4TAp8dHjr1nNDXR8s4nJ3TGbKGJCbUVUKoj9vvU+21CrJRk3QA0JQex0o5h6DlrxsKguN52TstDl3eCcRaNwxlFqYHWMqkKmW6cpncJ5zB6vv3pJ5kmKfn5mMFz5MTL843bdefh4SPOWg5zpcRMTA6fJ7yTLVrRmhBzb5zEkD6agRA3UJWn1xt2DUzThDWGqp0AAbTG+Qnn5UATH42MJrfjRR6ORjGOA8vdwLw4TqcPDIOCulLyV9zdnxFiXeTxK88RdpbTJIHDVbHtmuU0cve4kHJFK8vz8xX/ZFDNMA8e6wuXNfL6smGdxtnGPIuOuxWR+y7zwuX1Brr0gNnM6XTHy8tLx6CL1n5bdx4fR/GPmcQ4R8DTYiHsB3u8spwMzdwYTw6bLVSY5xE/iRQwZbnOtVIspxmtXCdARbS3ndZnOuLeSY5e7cj32MDe8KMhZ8vTNfA//c9/hDULTQVK2Ui3a6cKvhLiwfU3K3/8y2e++8knXl//V2KS9zz4RxpR4CNaiJ1fnna8PxFzw3kw2nNsgdoiVFEBZF6prXV53MHl9gxtFJJj04Sj8PwkhmhrJEw210AqV/wAjdg3cQqa4aflX2Ey9zQl25vz/IG/8/v/Is9PF/7wD/+Q2gLL6V8WElqOIj9Tltd1I6WDYdA8Pn6STce71E5Q7M/Pv0EDp/nM4Bz5DQeOTIS3I1Drwf3ylchJWxMvXkOmpzlxHDdKCSzzHc562Tw3LV4oLY3Kul0AsHbgzcegtEEh9/YRNq63J6x3QIduIJtw1RohRY59JcaNZboX0ExJ0hI1aeJSzr1oeds2eUr38cpQSaSDKR5Ifp5nXk60JiHnQPc4yaBpP26EtPHBfyMyeyXTbqM0qZYexi2Gb93JhW8wBZD3JJsrIasaM8o0vcpGV14CWMpJJvvWCrimL6CkeTQ9bqTIllgpjbEa450MSEtF9ryizrDOMQyCUM65gOq0xVYoteGQIFKoDN7ivftR+lTkGeiHgZLFVxxDYvBS8lwvK4lNnrFKzhNpoOX+l/wnJ1Pr3ix6N/BG96xNIBs/0h2bDPveyZKSfZlSkk1PJ6XV0n1YfSOmDXhv8IPAFXKUDD1jBQakdKPpTGFjXxMplfcwZvGfqQ5bMhgz4pYHfv/TL/qZIw1cSokUbux7JMaDlHaOrRDCTkgrpSSM9nz8dMfr/ioDQ+dQSvH9998zjyPjOOCc43a58NOf/lS2RMowaM95GQhHb3+MoibD5Gf2TbbNmIodFaNr0Hru5CDnSspyDSutUciwK8ZD4gWOjHOWcRlkSF5F1dNapMVKagXTNMv5TnIoc4QKTk9cLhfOZxle+Glimc7cXi8y6IwypGi5cX0WmmzOhRwytyNTa8Y62XxaOzDZgbuHkZwq4QBrBvKUCfFGyRt2GLg7f2C7JazzFKNZX8X3ThbFgzKWkhtbuGCtwfZt/VvWoZ8czlnWy4UUI+HYhMjpjGyy0yGefeP6cKPgrRXfXwPJyPRgi9B1c3hXOEmul0JbxTjO/RqLVFWxbiKnTrFUMiTRiJ2kVYFLVDT7cWUcfZcMN27XnRSX3uR0YzMV1RreypDnOC6c5o+ixtIGpw0xHHz68C3haIQsnIDTdCde45j6sERjfKTFO5SynfocWcavieqQGANVgY27uxOZRIjiVX76vHG+W5iXEevh9fWV9ZaJOXQ1w5lbuVEv8OWHK4Ob+HD3FX/05YmffPsdp2Vinhxffzfxv/3jP+Hjp58zzU7gHL/5zDQNGCteuNenp7+2r/l/RAv8/+tVq2iTvRvxZmKYPSknrHLYxTIOinGcWC8HShvsvcMYx8vzE8ZqGoIBfSNDHfsuenqt0Ao+Pt6h9VmKpj1giiHGgtBdxCejTQIaRjn8aBi8phTJVZJgSMe+KrIulLKT8s7d3RnaxDJZ6ii5QevqKEFL964OrM+o9omSQOITN1KI+FNh8B5rGyVFaIZ9vRF2OVSmeSCETM6veO+Z55kjFDZ9YV03jiMACu+1kKaoGKX4xc9/lz/91a+5f/iA1oZt33l9emacPOPphFaWp8+v5PylT0EL1miW+YGQAlo7coM1XUlHFr+H9yg1Ues94QjoKoAC54Xa9OHjPTFEShaNrCqay8sLziwY7bhtO4OV/KllNLjBcz5PLKdTzyGSnIzL9cbrywu+07H2vRJSxQ0T02SoY+F2WalJU5psuMZhptWId471dhBCkqwMNDnI6t6ZkfMiq3WaPDy9F5Pi1199Rc4HIQm18MOHb7G+CspbW45wwfmJeTkJ4c5XUmrsx4tMeJrHmhPTNPQgPpm+lgp+dBwxUJuYwGUbVN4LkzeNv+j4AQzfffM1e9gRuIes/a+XjXFcUHoUUEmoGKfILbznXsVYO7hFDuTaCvv3T6A3uYabZf/1QakrfpSG1qhKqxsh7UzjgLKKkHbWPaKVJRdNTAJlOZ1mnCqUGrjexLBsre+wh0qKibv7E9df/0BJBVrH4vdg7FwqqSTmeST/uWyTnFPc3znOywOfvzz3wk6QyrkWfvObJ0rt2T9KM4yyFYtJJIbO9ZywoilRPBv7umHNjDWeaCylTLSm0NqjGSlJwoBRsnkrORND/wxTQuUuVzTw/Pyl5ws1Xi+FyRUar9Jkl0b2A8tsu+xDfqYxRmI6RCaFQSnLMo9s+9b9cBq0kclukziCmDKtJlI9JFMNTY4ZoxIleFAO4yHElcKB1gpjFbN6wOk7ar1BFd39MDWenr4Qo0jKrFGE/VW2j32K2g5QaJ7+8ZW3wGiFoRSZYnqr8K7gxp2aF4wacd5jhpWXpwCcQSeUCvjRoNSI1qK9b+qg1EDO8/um2HnF77R/Fac8KR1o5fnJt7/LL372d0mx8f33X7hcLvy9v/d30cqRqjRlAMoOrNuVfV8pJTNNS4dM9GJOyxbwtr70reBMKQIQEIilBV37BDLg7IR3Uz93pHiQa7iw3i4YY0TOp5TAI5TQ3iSbKlLywePDPc5LNk6loSpUJcOnkg9aO5jmD5RWeiPUyLVgQIK9KXhnGOely637tklJw7fvO1o1CaQdBsl9y5242Ro15441jlJoUEkFrHpzOjVABjy3y4VWZSvrvWTqQaM0aT6VUsRwYK3cz9777nP+cZ5qjCUcN2JchVxbB5nQ1/z+e2gSCiwRD11unwvKiJqhtkorDW98/3P1fUOmjaGVN+KgePtSSjgjn13KmVobxvc8sJpRtasUUkRVUQpoo2m9nqhd4ld60Xm7XEgpofXCOHo+lH+ONP1AHp8xqC61k3vIGJFf6e5Xo0u6YpRBp9aq5ys1Qkq8Ef6Ukk2rbMHe/HMarenUOFE85NaVEj2b50gdRvD6Ihs5rWlK8udoSjzCVQvcShfBydPl6VZR6yHPTqDUxh4KKZV3RD59QDKfBhYlsAa59PuqAqhFhnRTFQlobaJyWRbNsa+sW0CxCXzqn/6KcXQo3VhvK1Dfw9QVkgEZjo37+3u0cux7Yl480/h/UfdmPZJlWXbed8Y7mfkQERlZmZVZza5mNwRQlB5Evkg/QH9eDw2QaFAkoCZrzCEmdze7w5m2HvYxz3qQ+kUQUAwgkcjICHdzs3vP3cNa35r0vJNMWjeGKeCqIZiAGzzOQ0qWwU6EOLPM2kwr6jvTcmHdnnh8fOCrr95zvV46cEMx2yllSitgLY/nd3qmb+pfP51OxPuJIx0q/XSWbRv59PkjuYNanMQuixVijAqYqGBzwDTBCz1SwGKz0YxFo/l3s59YyzNGDE48tsKyTIQYKEVl4cM0cDjTcxWF47ozRA0/z1um+cZpOXN0ip0xYJteg4OfiEPAdny/t+5VBXOzvHjvFMfvDGPU+I8jNaYhKq/AebZVBwq131N2jCAwTQPWqG/doPdNyepbza3gXaM19YgOMRLjHdfrinG6lytFt5I+BJzVe3kYJuRGN3XSBx+hk0Qt1qgloCTLOJ5ZRvWK5lKoKbFXrbEQxzAGXi7Pekai94P1et/oht0SB4ubRko9eHq5gCk4G0lJMNYxjp5xiFi/cX0R5TI4TykvUBp//OMfGMJA8IF/+o8HT59f+O5v3vL45oQPjmVacE7zXZ0DZ/+/hwj///6rNdVIStMgTjE9u6Sbc61RX0ROheDd6+RHpVnqhYGAC4HLy5VpOFG9Sh3OdyPLaVCqWiosSyAnh+uyBed0sxScGlad86o5JWk33xO305EoR9MUcy9aoIl+/2IMRirOGs6zbjmECWtHhEwpGzGO/XV7zOlMnJoii00gHxnTZpzTC8VKQBKUA1JWWEDNW1/DH+RjR6oQ3KgY9eiZxsA0DkzTTPQTp/Mdx5H4+OkTYxzwwRFDpEnPTho0j8Z7bZSccTg/IdV0WYfFhYHgNaPIh4g1kc/1A7UapkHlfik1UrZMzkBQWtDBSMISvdIQjVO53TyOTNPAPA/cnWdC9DwuD9ykII/nR/5sIzllSmqQ4TQZxmAIMaofRnaGeKImfeBpTpNjGmaMOIxs7MfO45s3r+t1DRGOfTulAcbTOBH9PW/fvFNp1fqFy9WBDMSgh2+t6ocRQbHhMeCC+mLioJNZ70aiX7i7u+Pz088cx0GrYENgPw5s0EDr635RHLGzYG8+BcFHRTjfLIHbvrHnvXvxVPbUjLDnA2cB003fpmBNN5Q3RUSLaA5NQ7NxUm6EoRK8ykTzcaVRUR+YbmSMFOJgsB3r/TDM7JuiTa2LhKBNluLUBedUNlKLBttqg9CoFHJd8VG631E9AdU0YlBTsSkaFqyHnUXE8uULXC4vWKPyhibSs560zWgimotkAut6/BIgaRXB2qqGQwsW25QA1Zrp3jtLqxoE653t02mo9UDk9vs6MVT1kbzKfKz1tFrJZEwxiHiK7dJNtKFueaYVTzpWBXU0Sy07rSXUvefxJvbJYOtZQRXTPMUXBJ3AOW8xBIyTX+TL1VDJqP9EC90YRsVFiw6jnDNs6xPzMOv1RMW4FWle4Qbo1mE6QfAzVaC2W34RtKYZOynpds86B6lSqyHlAlvFmo3gdSOrQc6iWTYUrGv4YdBCjcSN+BbCpJv01hjkgXfu7/F1oYmwzPfcnb/i7eN3SDNs64X9WAHh4eERkZuPp3sRmuG6PXUQhU6i1ftDlyEZLpcnRAox3hHjyC0M1/UgVRFhXZ9BNPjbu9jR1TqllSbUUjjSyjKf8S72v3cjvmnxoM1WY54W6ICJ1rQIVzl7AqmMMTCNJ0orvehVQIgxhmNfEcn4YPFh0C1l366JAee7DLJqcxiiBlILKr8zfRhTs4IQjNHBTYyTwix602mM+l2MqZoH6Pv0vBMPpRfRulXS4Gzv7WtuXb01r0YhUJrTqM2HNjg3It7NHQqltb7tA0Tlec64178DBrmdDdQuibuRDTXzUIQuH0z4Lkerrf88Xe5Il07q5lnlcSoh1bBm4eYlAbEqn/ahD7CM5kyO44hPjxix1PlT39b9QgY0Jip8qZ+rtywy9RBWbXbhtYlTGeQN2S6/NGF9YBRj5BazYrrU8ZcG1mLdQM65+6R0O9BK4WaYMUYUtOEs1et7ob41ld1pZMVt4KDhxXpt6Zdwr8RDfWZaJ11p0aWXHkQciFevZGtIaIQwcz7lfi6VDmWy/fMwjFFzEMXk15+mtYPaGusOtexcny6sq2Uco97XTeV28xyhavM7TTPDPJFL6p4fQ0Nw3tGa5qp5NxCDnv+1quRtmuYua6uMQ6Q0S64V7xW+JU2BDTUrkMW5oJskA0NYuDtVWsk9XFqYxkmHpLlRk3qmxRn2NYHRBsYPAeOMkgNxRD/iqmMeTjoUMrCMXofp0hCynhGlEoxXkFeteCyDCzSrcBXTBO88blg08LYp6K21RhiCxrMgXdpnCeMI6FBn3w+iG3qsispOm9OBh/dGtz9UnFMZaAwD0ixOlIbrjWjYcgyU1DDe02LQPL68aZ6oNKz1fTNcO7hIc9ly2aFNOhzo2HVTDZbGMi9aCzQNr86l4ZySmOcxqkzUw7JEhkHv0/WyUUTDxUvVnMJ9bQyj4s/3LZGOytQXMbXo9jMOHhsKpR6a59kKw2CBAjhai7SsRFCDyo/3rTIPS/+sdCnjozCdApfLlZRSz4M0xDESgqW2xsvzfwfNlQiUnltQSsX7vs2yPY+q6zhL0gwRWj8oqtGwtdshTcRSCR396OzGPA7Q5QqtVKZpwhkwohN16yyHqwTrmcZJQ/gq1GIoknT6LA4pmZalZ2YpPrgVlbsYPM4YvDH4uGBkxRgtvnM+aPmquHZndTpsHdMcutZfukHOK6DAWjB685tmsKJTvZoScVDN+Bgizd5S2CHYwBhUkjeEyPt3TuVk5qoN18MbxX+iWWDR70yTwRvXm6tIyT05xXiME0xoSPR4K/geeGyMZZkirQSG0TNEh20GW5pmSviKMY21NWRUqZH3gRAbX7gwjwvLNLHMM/MwkdNGHHVSoA90WE+Z/XrlaAcSK35WcmQMA85HvF2ZxhPrS6UUwXrDvAxMk06kYxh5enlinjREkS6PoES96bGqMw4zMUwauofDJa/ekARNDLVTeqyzr8/zJgJVp7U+hE6j08BiHxQwUV7DCSt72Wmi083r5dInw7oNVclqw/qB0hI0RQofaaNyyyzRBzfWUNpBFX1YV9HprndW0cX19gDWkFpE6XbDIEzzqLkooKt0iXhHL5R02+lDRGhgLNOoGNmUlXbng8pW7E073r0wxUI6+nbBCt4BthJHh2rw9d7OuWBCJ4Z11KzychVUsWfI65VgddvYmmaQYHroqFEyV6tgzNE3WY5xnMhVvy6itvjSjHp+OiVOWteyY7rEQo3JpWZoKj+9NVZaoCjABVGwRzXqNVEUby/ORIuc4CMtN3LSs0Wa5ujltOO8ZiMpnCXSTCIEA7aRDdgSFLaBfm1trhwYjzFBzdp25PPzM8GrIbpVXrHdphesTTJHeuY036vcCo1MoHlEukG3QRhgCBOl6HXhDCTJiLeIFGq5Gep5nX63JpRs8KHQmoZcH0fGR6+SNdGmwof+NVp+lX84OzDWBwyGma+4r78l14N5Xnh8+BWP998wDmdKrVzXK8ex45zldOrFSd9qgBbG+/6CtYZ5PuOs50j5tVAU4HJ57gVDxLvQJbUdnd5jF/bjgndDb65uf0bPh1uob847y3LuUQF9a4HCUkpW4IPzqqJoxnAj5WkT4zXIGWEaAkOcVVLZi+vW2mt8gTVV/TO+BxD37YEurjrKvyaCM3g/avi8HL0W143UcWjxYK0QYuzTe/U93X4uaYoMNtbgOuXutjG/ve7WQ7l1y6ekvHqbhmOQ/jnUUvCuG/2F121tEy30rFG/S63dp9RliTpkuWkDf3kvbN/saKajV6eTERBLFX2+e6cNShXT5ft6lhujhVEpt8bnl4Dn1huKm+dYfUlVw2Sdw3tHTmot8G3At0da2NFhKq//xOgVytL+QiYp/WyvBYUNyg3I+Ho93vxmN7+g1je25/aZ/jbodXV7j28Npr4XCltoFHKtvHZH3dOmeYTu1nPpazC2UyKlH62G6H3/+iqno+mwQBtri7H9c+9Nkj4POsSg494M2hR6ZxX42pUX1lpKVjiH90oJPdLldeiTy453uTfVOy5AI7MnvRZaU29eqZlWCkNUAnPFkPOOghX0vRzHocv0VDrvnCUnDYU3BsZxfKVEOvdq2+sDRfN6BljryGUj9mgTHZqofN0aoeTM9XKoPz2ZHibecDZA63WmtQTnoDUcfeMsOjQ7ZMfjNQ/OQPCeLetmppaqviURaArXkKrB0TT1T4rpS5kK0zApLbsU3TQ7iF4D5aGphUC8SvIcCE1hIkE9UdL9fjYEpDTiMOC8JefMPKlH2uIx4qgFlfu5phsnBzU39ZOHSAsqb40xsqfEDVbROupfl8uizZtzWNuI0RGCUduGOE7zgPPQRBvLUqQPc5Q8WLLWjdPsGCeV90YLWAWU1FYRLNdLz2PLlVYKOSsW3uyVtSTSkTidRsRqNl7DkNPBHEYF20mjlAOljerP0MULFJ9xt9mUEYYxIBLZ9511TRhJLKeR4dB7pZbKuv7F5v7/4ddfRXNlrSf4M4UDSEzTmS9fPiOSKVVIhyBGGGPg2DN7yyoPKVaT08UjxvLp88bd+Q0+VlwQ7u7ueXl65tPTFcTg/QxxYfSeZjX4L+Wd6C3BjoxRoQV7Kozhjqf9J8qhiHjnTTclG1oW9qsmb9dUCTEyjDDExn61LOOIatEby/CAE8d5nhEx7GumUXi4e8v18oVjuyKlMYyVZX5UaYOtYDI5WYag63cfGnd3d6TdKhayVl5ePpPLCs1wbCqDMwaMF0pOlFpwWMYYWa+HGv/dwN38gJgXxA1YHBSHKY71euXufMc4RXzo6HE305rSq7b1iYfTHcYuKnuUzN3pnvFREdxNGjlXYoPTXaAUoTbBB8EScV4Rx0Y8NVk+/fSCMU+clon7Nw/8+NMnrPE8ns+4+zO5GC7pR5w74cOMdYEYK/UwSLsSQuDh8T1v3g7U2uDsefdWTZ0/fVRApYIHDTVFYrQ9eFUnzj9//IlPTz+S0qYm4RqZZsPT84tuz4pmrD08vCeXpJOXmFjmO0pRStG2Xij5iev2rGvpYMit8vHjJ476TMpCSlWloLGw7788iFUiOPWHkjavtSVC8JoHgW7eUt4Iw8083RBb8XZiiDNGhEwip4SYK6fTCWsX1hUeHna8P1OqAjWmqeFcoOQDaRmkMQ4Ll+2g1IK1mhJ/Q2C3VjnSjrGiOU6l9Qd0IzhHcRosqDrswjhMvVAOitx1hpR2Uk6UXKg3yZ1HQ3VbYzotQObIBSuKhc81YV3g2BvW6zQqbRnrti7/hSNvGBFOpzPGGmqrHFuG9Q7rb5Oqg2VZ9J7oeNhcM9K8ylcsOKuwkPOyqL+zaDBsjCMlbRhzKJnMNoYw4M2DFiQmkdJH0vGgIIthYIgjL88bp9PIEEaVIRY16BYaR1XikG8qv6rNKaigVvZtpdbK+XTHw9073jz8mpf/+IXH+4kqmiT/5vHEjz/8xOPDe6yNPD8/I0V0yGIczkXuH96Rjsq+XylSkSqsV2G3rk/CdViRUgaUuqmftealmKDIWmMsNVUGP/dNqyUMswaKhkbOGmmB0aDbnFe8t7iex/a+/TtO4xljdKNjCPzrv/2fmMY31Oq4Xl948+aOy/WFbduIQ2Sa57+QZ+nw6Hq9kNLKOMzc333VFQ06udcNSuPl5QvzNDF0emVKmVv9WWrr4IeV0+leZT/W9sJSC9tcC0dKlJJYlvd966MTclAoT8kKUxqGyDDMr8AHBUSAcZFaL1i0aPI2gvXQtPFuGuGEoRCi06beRmo9wCiK3BhHLZXWMpiKcVHx2c5jaH2oosOGkjIp7cTBqs9DD5TXn8mgDfnLyycNnI4DVaQDELS4Bx3E5JyJg3TIRM8Pu+0hRAvIdCiQyfQmQ/9X61tjbc5TSrSWdKDRQ4NVeq7I/Nv2R9/7ppALq9K5zmZXbw2mo8T7ELVAjINu3aUPaqz6o6AXpdLQSIlfvg8IznqUrGdw3jF0if2PP/zINI2c4kK4/hbu/gmh0JpKy4RbVuVtuwPeem2STB94dBnwkfOr5O8WqPz6OfyFh612KIi+B7c/98u5rqCCRJWm1NqepaUo9EbtuVH9VXEbGq3r3gvV0K//o6PEbwCRQruR17rE8eYjM32TlHPqYKKNQ9HcFAAAIABJREFUX4YbFnX8aCFurMO60KEdet83KYxzZJze9pckWDO8biGlw2dqu3JdnwDBiG6Tjv3K9eWFgmHLAbkU1nXvwBfNuhqGwDwvWFswJCXHegVLea9k15SSQpNqRqQyBs+0TNRmKKNihud5pJTMPM99O6vDhJyjZqGlguEFgHnSa0YH/IHLdeXhQbfixhh++vH3OCvUXDhS5unzJ1ywDPOCGNdlyRd8mLheX7DesJwmTvPMp89fCCaA1++xXS7E2KmiIuSUOU1n1utKqTq0GaeJWhLBR0K4+c0DpRxMY2SaI6WeuLwcpMNQikFMY55mvPU8Pj7gg+fLl0/4wdNyUmpFEyBwf3/Pni4cWTHkOTWahWnScF/KSJgmihHdxllhMFFr5zDixpFlhjAMHNvK4+PC+TRiMexXIXjL3f3MtESeX15YxhMpadiyQS000+w59iutZYK1OJd1+B2iRt+cZ5qs/Nf/+hPPz5nzMjJGx8NdJC2BL0+W3/3uJ95//TXXdcX7yDKe+byr1BbjaTVT2mem6Uxrmbw3WrWMceTp8jPODGxHYD8i8/TAsetgrbbCvq7cP95RSmPbrqRc8O70L/Y1fxXNlTHQpKpcaxoxNSI5YF1hCoY5LOz7legDwQXVg6bK6TSxb8LluiMtM48nyKtmrBTLXhJlrzwsj3pItQ5XmKJOBa1K3YyxDMOAcwHnA+c3kc/PLyrV8Qc+CNN4R01PDH4mhEgImoNSW4JWsNYQ3cjpfibXC4LpBc+JZZqZFyWmXf2hsATraf4Ot8xYK9ydYtdz6ur8+fnKv/r6HcvwgA+OZg+eXj5jTicELQSieUspdwzTSM6FUgrj6Nn3hGkOjxKYrl9W5vnMMCg95zgS16vmktQKkgVP5rxYzifFYpZsCIOiypstOGM7Pa8BWkgbs/Dh0x9pw8A8vu0Hr/Dm7UyIgZfnL2z7zrEu3D06op+7ob/ydPmRvVxwwfLzl53/9scPjKMjRofME/M8cz5NlC/3lCogCWMSmMpeDuLsEAqfnn/H734vLMvCOBl81CLl5bLio2LbSylYgZI8rozcpGFfvlxobIzDoGQku3OURoieaRn61sPy+fmJZZkJNnDsQmqfmKYFExzeBKwXcC88vxxcrxvXVbXIaRWmcVR63XjP5+dnzqeJ6/MX0nrw5v49K1cykz7opJGODeqJUvfXyaLiS3v2ldOcj2mc2PejZ80I03mh1cC6NZCsobvxG7ZNzaulCPu+qwxjUCy7s41tg7QaimjRduyJWnRLkkvu03GVLFIcDc0EuW2eatZ/t9p42QvNGAwV2kqrhXHySvo0ilu+7hvjMDL6CM3g6oCTidT0YSpGZQzHcUVwOGa8GZmXHcEjaFZazipl27cvvxC56oV1/YJzKt+IYeTylLiR5RRasIPRg7FR2I6N4Cb2Ve/febGEqEb5b74DaSPgGEaP1FmHGz7gveXTk1cZVM0gK9584btvf0scR2KIlFT56YcPvPv6kc8vK3txQODuzSO///3vFTpiwVuVGYzjwr7v/PH6B3768BOP7wO1JVJqlOz48vITYS5cjycQj/GNr97/LUda+0QZsC9IMyzLPfPiyany8cMzy0k4L3fQDOt1VYro4EnHTslZ5TVjxNmRdbvw5fIBsRXj9AEkzWGNZ5oCR8p/UZwcGHsQJw/N0Y6J/yH+74zLSN4zrUIII//2f/zfCOFEzolcVtzgOI6Ddf3MMDTef/01WGhHb4ydYK2wr0+clsA0zYxx1GyhqmGQIoacGsf2xBDUz2NMIJdMwOGibkFzXpmdYT6dsSF0YEIDceCEnA5S2rAeYlgAxw1nfKO515KhCdEPGM49b1FfQ/WClUIrT1hbCGHURqjk7g1rPWLEIK+bnQjGqgRUbF9Qq2vfm0QyhmYD+FGLoY5D1tBkSzo2jaioI7lpfAZNvZbGOZ1UP620tBP8gjMzDU9rW9+QCWIgNfBuY5kWYpw4qn4Nz9C7lqZFnhSmAXCOXD25HpiqU/xqGsUIrRxMQ8NYjzRLFij1IIrT33OGLe0421gGh/ORYjRLzKE+I3G6pg0mQ8k92yhgQ8CWpl4xo/k5HAfRtb5RAzH6TFA/qeKOWjHkPeGDIp5Tykynkfn+kXW9cv30ia8eHogf/zUihRw/UecfadyUNApMiR6Kq+x57XJK9ahhHOMwqHzSovLRpg2RoN6tJo11VzWLDs36YKDk16ZTu++i2w0sYh0xqj8n+MA8jxhbqNUgRSW9IlmVM1KRCkVu2ZaWVhr0waK+3Bs1WPHTItIzK/UZV4vHhoiRqTdNrcu0dbspXRZaa6X1ba82H5b1WKmozPTWSL5uT4ztWznD4E/aaBvAOOblgXfvXb+/GnnPLA+3jWjtZ7ZKfEtNOnSgsR6Z4By2QtsP1suFy/Mzb968IYZB/bi+4CajG2Lr2PYDHyIv14t6DRGmaaaUwnPbyDlx7In7+0egEYJutWoruDhQSyUdGWplCCfG4Ck+I7MORp4uLwQX1Z9uLdsQOJ1O1Lf35Kw5gtF4fJcpWqvPmXysjPbMth+s+4Z3E/ulkHb9rJ3xlKKbws3o4MkReXi38PnlmfEK8+ioBqINGh7tDfOk4cnffv0NR71Q6s67x0WhbXXHpEowjvuv37KlyNNnx2TPxJNnb89cr4kYPc56xqDPka+//YZUEtf1Qj0qyTX8MoAE2uaIQFwUTS9l5LQ88HjeCCeHWOUNfHv/hrReuLu7xzklsl7Wj4QA93fvANjSE2saWMbIcew8P38iDvfUtvH2zci7t/d4H/jTn37g7/7ut0Dm48fPyLXxMF+5fvqMDQv3p4W/+ze/5Z//rz8yxDP4gb08cXnaEQthcISxsuc/c357j2Q9g4s/uKYnxnnkWDUs+u6N4/d/+mckqyzfe8vdXfgX+5q/iuZKdY8r4+ixNpDSBecbznudyPgTznni+It0sJSKNY5xzF1fq1Q8zUNKXRYAcxwYoxKDihTtiltjcF5X/jR81PDinHQ9nGwheuGrd2+6Ua9iveHx8S2GW/aG0VBVE/t0D0yzDNEReaDdJBDSaGRKEmIYWN6eWPcXrM2M0RDd0L07OiWVqlO7+/M9zkHeq8I3XFVEtxQ0r0pXzDEOqucXixXH9WlVDelJMeGlgrCTt0zZVcPdWmWO565D0cKzigNGajXK8xchSiSl7dUgbHCUbjAMbmAcJ949elrOHEdBZMdYQ/SR9XLVB+vYmCePdZMWxTWTc+FySbRmkaSm9/Odar23YyOXg3W7EmLk2CvDOFBSpWwqX/GxUZL6c5xv/Po3d32aqlKvcRxZzpZ1u7LvQsqBIQbVXtdDJUjDTJgazy8J5xrGZkWIU6Aa9tyoTXGj+76x7hpGrRNCeB9mQrCEQSe3l+cd72aGydKsavXjNJCPynZUJOmDNbeK8Xf40XDgKNnSRA2mpWTioEMlqab7ybqUpBrWy4pI04dnFfb9QIOTYRgGcooq6TBCzpnnp/VV/mRt5HxauF6uHEcmJXktSo6jh35awKGbuVr7Kt2SjoJpvSC1RjdD4ihlB+t1nU4jOsD0BzDgo6UZISuuCN9T51ut3ZcqlLYzjTON+irnCmFSdHsIQGHdPuHNhAt6n4jXom9ZQp/mBryPPMYTLy+botRFhymuZ9lhdJoOOiEeR4sxEQhYGXpAqE7URYRlmYnjmdT9b9aKovXFKV67gKB5L7U10mHZ10wpH3Qi12VZ6TjANT4/Xcm1Isbw5eN/IR064bfGQfOEMPFlPdQPRsF7IeeFOJpefHuie8N1eyHnndb0c5/CV0hr5LTSJGOdovqPPWOtSkFLSWxrYH256Dnh4HzySjGzDj8MvH14z/PzE0fOGAx3ywnjBDGGXJWmeppHctk5TYr9zrlxSToV/7X995zDV5qD5s5s6wvLdOa0vOF8fo+0QEobAM47xBq2LXXy58LD/Rtac4Dm6NFUirRtFx1chUGblnoLqNXmpubMEAzDELF9It26h8oG9V0c205OWTHjmL65oZ/bGjAMjXnW7Cpof3HmGVqrPU9K88BiiJSStdmhy0pyVVmdCbgwUPveQxcy2jztx0E6tl603MKFb3LeLoMsmXW9IIZXwIQ0RTlrXpZeD9u+0pqSPYPXojlXNeu/egmOq5LoQvgL0AIg6u9qrXEcK/PgOmEUlep1KZIOPQ0pNaY54myhEbBGc+6QSkO3LlU0xNdZT22KAdfv1jN6BIxROeZgBdC8mBtNUX3TXbZbct+GKQ7a4lWeaHUgiFXJUW6FOAgYNKfOVPV4Yft3buBUohWcbhkzSlY7nxesEa7XCx8/P/Hu7T3SHK59RdhnNv9fMM4SDBgxeCOkVLRJxKtM/CgaXm/AONO9Ut2vqV0z3hqM9fhmqa8tu/rwrHOv15AxuuVx3Q9USiativEvpbJeNrbtmXk5421AUP+qQoMGbpTCEAI5p+4x02vQB0W70882QaEIvyDzDS54bcxv29h+i1hx1N5MveYEWvUgW3OzEqjn8Be/Wt9q0pdZxr76uFqfVrzKj6tu7nMpZBrLMGGMQ0QR7V5M3yLVLj/T56FGA1Q0D7Xx8La8xgUEq89S6u3sFPaWMLK/eut0sHzh5fLMEFWWmlNhvTzr/SKB5vUuNrViOqzFxMq7+3vNauxeWGkwnE6U0jrC3vJVmHh+8kyj5WDleXtiLRv3ywOt6GcTYoESdD8oFVcc2/WFyQdcs3ijkJHj+gU7BEwN0KC6xg+//4l5njjf3XP/cMJi+MOf/5mcC8MYMVFl7n/63R9oZHx0PNzfMfiFuJwYTwYvcHkRcJZvf/010QdMs7xcPuDed+XKkbg8X3lphSWCk0Z1ML65I9ozxRUdJN15zvMEdumb34pxF86PI+tlw9uRIdxjc6G1K9eLMI2P3N2/peQV7IVWdwW32JHvvrmnZhiHiVLOIJXz+B3FZ6wXhkllsO/efsPl5Ym7xfK//Lu3rPlP3J3fYJmYp3t+/f17LGdeLk+s24XjMGzXZ77+9SOCV5WO8dgWCdFgbcM5mEbLdn1mnGeGODIMHuszNYceN1ExPv+Lfc1fRXOFgSFoeGzNauLzr3kkEcXhaphaTrX7shytGaZ5AInkXDn2hHODkk+s0p68cwwhkMQq4cw5hjAwBDVV5pJfDwuphSq3LIrAMEaO4yCRGAdHcxp2bC0q7fC6JdLcI88QIsviaNWx75VUEtgdbytDnPpD1XAcEKxVDxXq2cp5xXnFRSN0/GyiiWIfa859rV80p6eWvs7P4HT62QSqCM57hELPiMe4Qkq7PtCBVjf8GKm9KRRRNDKgIAPruocpoundWlxL69NAyeS0vxYeIpqyLS3rAywpTXEYHeM0s0wzW1qVymbA4xjGwp7AGaWxOZ9BAtbpwySVQgNSSZheE9RWMVJw7kaDEuJgON+rl8fgsdYrMc80YjM4H1kIOD+wrTup6LYgDI6RSJW5f94eZKJZfZ9zruSiBDgfwut/t1ZpAh8+fmKIDu8Bt7OvQgxK9LLeI1X/fKmWUujZSgZaxpoR5wcQ9QI4cTd7t2qXJVDFqa/K3oKY1dvSqhItpTWMFWyXwdTaevGlHo4qhZeXi05sjdUiXpyGDYql1cpetYgvRYtnb3QrY4xF2i9r+xAGnIvUmvDOEodIyroyH8cRZx3blnHRKPK8S31EGtuxdy+I6QQknb63rv93Tih1R0S3v9ZqPMKynHujVhBRiartWi8DRD/hrOneF9shFoFxqPpgrnq9eH/zaTj13IzdPGy1xGkNqmRME4LTJgsU3V4OqNXRmqeVTEpNQ4mtnhnSCulGdbIRZydqtTw9f+YmrZFmKB+fWNcdjIZO5qRNDU2LLMQg1ZJTVX+Mcwjq+Wvdf2INbFuhFdsBAEI6dgaboAljXIDGsa7EaDiOXc8p73FYjDRaPWhie/abp+TKEINKO2yj1h2DJwaH9TOpbIixmKCSqBh1uOV74KR3Bh8G5vIPvI3fs/g3Ks8pB9M4Mw73WDOxr5nH+0CTvR/3FmMd26FgDEVuT6/XMDefT1VoSoxRnwXW63lzu55KIe0rzkrfItge19uLWIMG5OYDYyzWxV7K9q1U//xT2snpwDl9X25bh5surJTuD2kFCJ0UV1+1pgb13VmraoWGbqQUbdwLZ9FICJGm6HYbui+qgtXhmEhj33e9z2LA+76pfvXe6C+pUMqhhXyHOumWm349AU04jg3nHT5otIW0qte8NIVGYPWLtVsIbr9XelCuysY6tMP2oG4TdKMmBUHl+rrdqIzeKYCCPtzonjU9kSzmJmumkKvSwsSq/FKk6rXWP5kmGmpuundRMIjt+Y3GdFBIptWiXg/xt1JCWytjaRiqZKypfbNkMARtEIxhmlQx8OXzF7Y9MQ4RZyK2gRzvKcszTXQgaSwMQyQXvTYNOpQ0GAoa9wKNXCuuN7egz3kRtFFDumzO9eGrfqiW3nx0sppBsCJdRnr7maw+o9qtpr+pB/RZeINtaNOkFORfcsra6zUrxoAz6rEz5vX9Vn9U7VtHvV5dx87rxvQXKa619lWaqfJNh9zeX2Nf764mrUti1dsr1uLoMsfWsE7PP2/7P6/XWQeLoM8te7sujPTh9s0HfWva1YtWakesI0rcy6UPUG7hux260X111sKMZQzaXDmXXqEkOWdq0WvfWQ0Ddk6jUUJQL+ANlpFzxhrDPN2y8QSpAe8CMUAMShPethXvPfuqGZLWNO5Oj9S6E73nPM/kfHA+3SHNEIJnGDyfPn8iDhP79aDkhh9HTl9NvFxX2p7ZL1fevTlxnpfu1wUrDY/FNgPOEYzF5koYYXKB0zASsOz7E+e7kbvT1Gtgy2QfGMZIlcS+7zy3yGkufHW/kEtkO0f8MOs16gq5FkoRvnr3PcEn1usLKSf8EAmmEMURZGSyCzaufHlJlOSQtmJtZB7vsX5gWw9KbQxRn8PNZ6bJIaKZoUacQncChGB48/ah+1MVMHd3N1E+fSS4QAwn5ukebye+/dX3fPriWLfA48OZZX7BxcaRwLrI4/3Efqnc35/RAGzNPzyfJqbx1J8JmbuHAWmRdVP/ceX5X2xr/iqaK2sNd8tILaqfRRoxREWRGkfOfeNSVZOai+ImrQmcTgvOWtKW+bgrh/7h7h5rLet64WYuDaFTdJyu8Z13lFrZD+XfBxswQWiO11V5tJFmK9YLD8vCsR0U07qpWXHwZVtpAsFZ5ui4OwfSYV+BCN5lQnAsU+xT7qTyRu97CK0HsTxnJSHeDm1pms1heqFValYYgvRAxZaQZtjTi0rkxFPF4rxqcsVsui2IkSoNbyoWba68rQyDsB1FNfPS1BdhhG19IYTIMiuS1FmHtZpynw4hBkepO7WurNdMI+Ft1I2aaWAauSOjl7iwLAvTOPGyP9OoWKfhr9hClqSFnKsIFe8mhtlzvWomidiGuERpClHwsSGsWDfhWsE6wzxH4qDNqPcDBst+rJSy94m3bjVyi6RUcaLFVan68B7GqRPwIs7cU+1OKQmXC/Yo7Fvq/pjc5QpCqfDhwwdC8IToMaaqtM0duKCQjZSF9XrQepq5tES6WqiFcWy4oBuq4AOIQ5yjBU+pis1VoGDFUJUiNCjIoBrwNoArBCytFye1ZjClZ88IGMt+FMYOQEA8KSnW1ntHlkbeC00KtSqcw7rwSlm6YXyNtYzDiLMDKSmGdBgUd+q9hl47ZzmSU29Oq3inGTH7trHtm2q3nX0lPCmUQ/1bznhFl7eb3C6AsczzTM56TRoTsCgyu+TbxlaRrtKzXmputFIIA9DNx9YGnNcSxfSt5mAmhYbU2rNTBJFCykcvaNVDlo+NkpWc15qjyvHqVZBO/lPPRSP4qOHf46LbiZLRoFPXQ3+vSEM38TiiXfDBai5LH6SIKMJas1U0W4lWybtm4VgnpLQSomUcJqrxHNdEqwkjhnm6wznPhw9/ZlhmcmqYBlYig7d4XylSENGsGCO2S2M80xiRumKNSsh8dFjvOC4r0XuaM1rkdKKcNaLnjB+Y4ol3+78n+AFjgjaTZePN/bfkNLBeGjl/4Tffe1LtBnMMjsCxb5SaCTESQtRGmvbaRJekvs7RjwQX+wS8QevFVCkc+9pJhxFB/R16+d/8Q1lzfLyGGd/8VE3kFYCQjp2cNoZRSbFanmkDZ4yh5ExtqfsNlVZFb8DMbbiRC87o36pNJbZOTFcYANaQ06bgAKvgEhE6Dr1TQauwbzsYNZT728/cyquMD1GoUmka1XDLBVSgSPeKoaKElNY+pdfnjO6TX1vP14I5p4KN/TM28vr+3DYTtSZs28EsGBuw4vV1WfO64WhNsL34xVikQwloQkXlNqbVLjnLFGOxNAi3YrveuqJePFcat02I072d1dxEY3RzpR6pA2fHfsbx2hSbPlBKZceapkW7NVgmrHW0WolxIMZBp/OXTaXEUQFZS/oN++k/cxiVyGEbwzAgu25gQGhWicVWdMtqnKFlhYiASuLEqHrFWt1EOKP1hRjXA2/p12Hr73nPN3S3RlPfjxA81p90OGB6w2XsK+RIdZG9abGuN8vyukF3Xrf89CHVzbulUIbbZ6dfs1mFL+h2p/aoAnn10P3StPUhldXvIc2A7RtfYzsAo9CK0udugzV9/TeQh++tKjq0LblLCRWTba3vNYgqMlrJGKeDbReUnthaxTjf8+YcIkatD5cLpZbXJjan1Dfb2oQ5bzjdC8EYpDbNGSyJkhNQ+iA7E5zHWfXihsH3oYRVqWDK7Iei8KcpIrWRc2FL2oxZ25hPE/P8jucvGgdg2JWYaxqPywNHfsE7TwgKlhj8iDWRaYpMk+fHHycmP/D8+QvpKMynt/yrv/2W//BP/yc/f/7McTmYf73w9VdfsYwzUjL79RkEwmnCR8DoRjgaYXSW0Q8EE1j8C4/TSDAWjw72TtMJFwPiAilEprzAHDlFoclBaolsHGXP+MGRSmI7Dh4efsV5bHyxH9j2lTBFUvqCWEtwE9GNiD9Ih8JvjvTEum383d/+G5blnpK+UPLBEE7UlhjGgPMRIwMhe66Xax+ARSyBaZx5fn6mlKagJ2u6AMczDROnZSJtjYf7R6w/qHUhuMjzu8wff/g91IIfPL/66sQf1h94OD3ifGU/XsgH3D3cM09nWqtc143z/YC1I+FauVwPjv2/A1pg9I7T6MkHDDYSJ12Rp2R1+mwK3333gGmRfTvYtpXjuPD+q69VupcLT+2Z+O69mjpdpbQD45JqdhNM88Q0n3DOaPK6KYROQsvHqAVSMGAS1iScDVwvLyzDwDyeeTjPbOGZcXgEVFvto+H9/anTcxLX9UWnX7YRo+dBTsDYN1M6QRvGwOBOCBvezTir8IvT8g3eD5RWSGlj25/JuZFlx1jLMo+8fXzHth2UbNg2w+cvF5BdpShOUZS1wH58ZF4C5/Md59M9Hz5U/uYfvuenH5/Yt8S7r75jWc58/PQTYgWs59OHkfPynj/96U/s+wvXl5/56YefOZ/vmBZ9lF1eKt9//x3WezXNb5VSLevxI6f5Dd5PiGSin0jXKx8/fub5eed08vzjf/jP3L+xLPOJ4AfW42eCP2ECuKCTKoXaJaxPUHaa3bl/fKA1wzgF5tlj3Yz3npfnDbCczwvOCxCZpxmh8fLDT6gJV/Hd+/6ZLemW5bpetGjnQivgg+ZROOu5Xp+57LqZzLmwbTvbtkEXuDjvNJTyKJzPZ0C10Gk3hAi17Yh0wiUJQyTJRRvHjscu1vBcC7CrvHQTfG60Vsho6OwwXInBgQjHlpX2JJaGo1Zhzz9wf/+Go2gzZZ2GtyKWdV0xxnJ//8iy6AZDtf6Ahet2ZZ5njBOsF9btyhTfYINBbOF4zahSJDCoid16SzQqJXt6fnl9wF7XW/Cz4/n5Wad7tgHqbwrOMw5Dl/jpZjSlxI04lkpmiAuG0ItnA63wsl27vK4jeIdDzelNKE0HDOFmlO/m45wNKSdqXfF+YJ7vqVVpWCUn1uvKkTIpvxC8x9pBzdd4zZAqL3wxX5BaWKaF8/KWLW+kY8c6mMalbzM7lrgZ5vHEL4n28PnzZx7uHsEJpQrXFyUXOa+konJk3r//NbVmfnz+kSaFxzcjpeycTyPGKOmPNoHZEOcwrhOKrMqYS05YHHent3hrcFKRogHT33z9lvPpDbyDI63s+wVvPCKVZBNgmYYZaMT5TIye4MBGx5s3Z5ybOErlsm/4GBmXmW1NKqmVxnxayDkxDBN38Vt+lf5Xwn3Bu4nLZePleuG7b/6eu+U3/OM//iMvL8/8/T/8HT5a1qtSHbzVwvdy/UxtgnMDMUx6Zt8K0lbZtpXr5Qlr7xmGW3hr6QWmgNEQYSWxjbSqfhYrQikN03HlTQp+GInTohPvdpMWahZTq0kLYlEghnbR9MQx4UgaHu6MElCr9M2JKKjCiKekhLiEGAfOMngtdmvfFhiBnFcMpU/3Hcd+4G0n+NVGKRpmHKLr28TQJYHSX69uhEpR+eg4zMQ4vZrh6X/S3CAUZSe4igZxBnI6AH7Bw5fK9fqESwU/jwQGzeqS/Gp2l6Z/xppnjB1xQQdoIQSMFHJtmKrfc9uesAOd7tv+YsNYdStihHRsDFOimVG3UTepojX9/BekNJXtWsONpRiMwXi9BvQ3BeeF5Txi7YLIqFsdr2sJPZ6awqKix7qE2Ia/0fGMU9lwcPzmb37Df/qn/8Tz85VljpxPA0PwJFaM31WaZxop75znB1pRaaF1FvzM0qqeQQLTeeF6fenQB7gBFrw1VNEzIeUdwRIG9ecY0zdTY+yv24B4rlcFLNA3M9FFbjwL7x0hBkSd1a8NxK2BsdZ2WuStodZnGkYbN6kadUGrvalzDJ5fkPW9xrkNw26bMQVmpE7U1PN833dum/pb06aAJt1mhaBgJpX03hozJYt6516fH3O86wPdTnd1juAcOfUwcik6iJTYUeZOYxFlAAAgAElEQVTgnWUriZIyuUApCvS1YWBbNyUNRw1/jm6kFN1E+RBIJffGvqhUenokWMNyUsKu7f6wdbtQ+9DfaQfKOI7sx04uB7VltvWZY1/x3jCPngnHx88/U4+FEEdktJSm2Pn7X59w3lKL8HB/4kirYuNFGKNjmc+8eVC597o+8+bhzONgKO1vMMYyBsc1Z/7nf/s3PF0eKFL46jff8OHDB6QYzvOZ+/P3/PD7P/P45g3n80yTxk8fPyHNMk0RI0rdexgX9h93Dms4nSbdzsjBzz+9kJXiQpwND+/fw3FweXlif0kKlzk8w2nh5A3VF7Y//8TDtwscwnGBPWXe//q3/Jw+IGGgBMuf/vgZeMdWfseenpDqmX76Fd9/96CDYWPxcQLzA9a+Y1sT+/bCPD7iQ2KcEiE2jDh+/193vvnNieenJy4vL8oakMYY7pnGhWl0JBl5evrA+e6MNWeObePNw4mUdVjuvMFZ+Nvvf6tDqnolOMvyEPHWY5rHimUIE+eHCWsC0xC4O92BGYD/4/+1r/mraK6C9/z2u29Jh2HdMtftZ873Jy4vejOe7hVnPDgPDx6RBeRblmmhFigC4+Md8/we70HQB/OnzwLWsZxPxKhyju3Ycb5pCK6NRBtZTgMvl4+UYjAMODtwHCsPdxOn0z3Rz6zPO2+/f2SKM8Y4Sqvs+RlphvP5HmMaT8+e+e5XbPuO91qM52T4+ccnllMlBC1W//iHj5zuTjopyTtGHM/Pn7C+Ms0jwzgRx5lKZWDsRCbHkZ/I6crj44l37+45nS3nx/fgGunYkdr4v5l7kx7JsTVN7zkzSaOZuXuMOdyhRlRDENS9EFpLbbTTv9VCC+kvSJAgqKu6qtV158yMwScbSJ5Ri++4ZwlQ1/oGEMiMTA93I41GfsP7Pu/tzVuW7czNzY1IuZLi3/3b/5qHx8+EADEl/KDRKvFvvv11J94tfPvhHQ9fN27/7lfEbeH5/MBlK3zz/ZH1KgSZDx89KZ/ZNqEEuQECE/i9eCSU5FF9+fyJ+SZh1EZKJ378+oVvfrHnen6kqSu7o+bt7sg//Ief8MPMzf7I8XhD2gK5nlDuiVklvv34a+KqGcc9KYmv4XAcmGbD5bKh8ezGA/cPf8J5z7pFtm3jsJ8xeuC3v/2JZVkBRTCBL59+EhiJnQC4P12Y944YCzFeuf96wruBcdqBUiQaqkRu3xzJPXvJ6MY1R7ZLFvJTLVgz4XSgdAJOq7CVyjg2TA6ylVAZGyIWT4uCBbX2wJIXNiKlLtS2MNg9Ja1iUm9awqfrJkGbTUhetEaripIbaI210vTUyusUt2RFTBu6T8gbTXJFutSuNtn83b15T0kN40RekXKiFEFJvOS0xJK5rgu1vOSuSNFgzUDVMqVtFcIwvuLhVVXsjhMpZ7Sx1Cx5SqUCzeC9Eypd37xRXjY0SHZaTGj10nAZ/PSOp6eFkjNGK/zoWC7PpFoEJY7l5njksiRO5xOlPuHcGRCfx/l0wt8Xkcw2WJNkEL14yR8eLoSg2U1OApP1zPP9mVIkFHctKyU2liUzjIFxF4iXRCqJ2hTbsvBwvzEfPMf9LdtWuMQFoyp+8GzLwmGeOR52bKczznn+8pe/7hu9xJoW3r65Y93k85gzBD+TskyyrdPMN285n554fjqhWma/O+CNBifXh9EWpSt5vWKsxlKYguV4uOP3v/8tgz8yBkERx3xGNYXKjlodymlUMdzeviOWjcZnlnUhr4m8ZZmeD16QwspTk2JNkSUt5DSSyplx2POXv/obPrz9a/7+H/5PLtdnwuC5u33Htq1dzyTy5RRXYrwQ/Mg47DE2kLN4CmWLU0TFoDLe7iTY1yhKbBgFW9q4XM+cTo9415hCkIsHeCG9lVxJcWOLC3qaBXjQmxCtRb60LhdKjjIt7/eFmlsvaOVaX/o2yTiPtZPsf176HVnzEOOV2i5YP+H7hqXV3LdEPZOppR6LoPu5kPaNAqi+ETQQc8bagHWCVy9VMhSttuRSWa5nmipC0nIBqy1rWjtdTraDOSeezl+4PdzR0NQmx1ty95FQe9bXFe27mgBFzluH98m9Jychmc77Ga28fH9VaH1TZoWzDTpxnANKReKiRKarXwpukVrWLJkzb9/fcDpr1ijkVpFiyTmTpnohDBfseIPSllq1vN7ugRNQROJwnBgCxChYduf1a2wBIA14abx/t6Mpz5oMp6unGYXpAtKcK6iIc/I+rmtGNcXwRjE9/DXN/CPaf8ENgXEYcapghkaumfunr+xvPTeHiZIr5/PGOAVKeUApoaNezpHvvvsWdGTdKqnWTt8UD8+LZG1dVnbT3N8/kfCGcYfSgnEfRgmb3bb42uiIrNq9NlQKJAure59evgZElv7ydVn1RqupV7JhUUU8wd1v/Yqf779eGiAJYtav/22eZzmW8iL5bB3aoV9/dlM9S8mKSkkrOh5d/4tmTAZrcg+Tn1kq0qzsZ3ywMghyhkFb2R72Y44xklullEoumdS3Se/vbsgpi6Q/RbacwEpOnHMNP3vm/Z6wC121IJ7qnzd/IsVclkVkggUZ4PwL8qPIVwutJGJcRKaWEqpWxnBhXTMxnfnTD7/j4fErVs+M44BzBlrlTz/+yHIVdcw873j75g2VSkxfAPHafvvhA4/rlYJC1cJpufC73/7Ar//ye958fEvOkR++Xvmbv/hbPn390vkCjf/qv/m33N8/8On+K+sqFN95vsNazXE/MI+efNnI18TN3cwwjGgM5/OP/OHzb/l6WtkyDG7g84+Z47wH5bD6hjfHmbwWMgrvZDlS1MJPP34iExh9IOYLf/in34Cv6L6Nu9lJQPRdraC+wdo9qtxgzYq1j9QSKany5eFPUM54c4s1Ox6+/MT+oHl8+C3Gn9kfJ77/xX9HyotsDSdPcB70DZN9K9E9NLCK9796hzU7tnXlIS2s6cq7dzPipwGlkjw3k2Ua7zgcJgZn+P3vfgDlsc6g/UTKG09PIrUeQnhJ5fkv/vqzaK5U16t6p8SAO3jmQ+CwDxjrGEbDmq6omvo62WHUzDwOxFSh+3DGYUZr8Sel3cTd4QY/jqzbJlMXrXl/947z5UxposO2SgyPh92xS+gl4DAXT2tK0qidZxgCuT3zcPkiemklkrz377+jOTG16+D4/PCJ4+GW3S5Iw/X4FRfEG6SNpNr7KaJMQVkhnhllMVvCuIhyDWUVIewY5h2KyrpG1jXy/S8/QNn1iYri5m6HGz1brMS0UFtknj1TfYNSnfKjPalGXPAc7/bksrClpy41MCg1UYtmXRfefwhM457L6Qo/RG7eO27uPF/rM6jK7dvA09MT4xjEb6Aq3g7MuecZ0SScUBvu3g2UciBulTWu5FJxemQYLfNsePP2Bv13NxyOM7t9YBg1afU8PW0cb75lmj278Q0P90+y2ei5PaksnJ5yl0I0YlzY7WZSiRjT8EGj9Yhm4te/+iXLsrJuC1ortnQvhlerCINDMdDYpInQmuNtJa0Roz3KVMZBMc837KaJ66JkgtYqN4cDy5rQuqAMWJ3QesB7kdkorXAp0ADf5R1NVb582diPO/Y7TwiGFA2pntFeY2iYIkGy1sA4iESNBtP0Bpom5UTcEsuyYSw418i1IlmT8vCyfeNV6ooxFqvH19DKdV3RypAr0KSxqUVTiXRBKi8B0usi4A/rZAK5rFEKvO5LqVUmudaKHKM2qFvB7XYS9N1ExmGtYlnl82e0EZ9fVeRWUbVnczTxBColREGtHDk2xsF2vDB8uf+EJtC6pPN0umKMYbteqa1idOVyPUFtGB1eJSbB9yw4axm9YMaXlY7zFclTiplpEHkj1VA2gxkHWu1Y2grDsKMUkRW3okhrYhp3fPn6hFYW6xzjeKDljevzQq4iGZunSYq9XLHaY7AMg4SSB2cZwiDyoNowWIIpqMHTWuuv0YtcyxhMU+zcRLjxMpFuYgg3tqGqFr+UlXDwdVl5yRJ5ftg4zHfM4wFrJLZgDIZ1vUp+lxYCZXCB7SrkPNaCrYbBj4RpFHiQ89ASynlC/p6hfI/SlpQb+/1b5vkN0/iWbdu4nC9AJQTPOI6Sb9bBDq1JIPy2ndgPt1gjx9u6yUSgEwJbMFbjw4jShlwqUnAImjqmyBoXjOkFEXQPaMFoCWdf15WUI4cwgta0UkTC1OTcbOuVXKRgdK6/F7ILkGK+yVZZ6GfSGJVaxK/y+lobqVy7pFsGaCUloIER6V+KEUiUyqv8NpVNtgZaS0NZs2ygUWjteMEAv/hL6JudmCLoinFOmo9XY73IrmutrOtCSouEv2sLL4Vu93xJnlx/nloHupPhqki4QCSVtQkmeZhGSnKUqrr0DFrV/fOmsaNnf9DE2NDRQpRzKKYY8Uu3Wrh9E/DBUM8CulBa/L5KO0xTlJpJ8cLhaCk99VrrJhALLd7YWgolN0LQ4uhSImluSomUWosErQIlJYxq5NZfb5NGDfp71xQ5Fm6PMyE4liVyPi3EGHh/+5Gjv6f6QvCBUjTaGQob6MaHjx9RVbNeHrDNMM0DD09f+LD3PXS6cTce+dU3dzw9febrdqGSefP+LZfzhZgWaLJFObgRaw0xZraSyClh7PzaCBslG7CkeUWul9ygZV6yh+Qa6HEsTcuG2BjZPhkhqqounc05SX3QQSalSJNAzwxVHWLhrOs2BWkouzJVZIy5kNi6vPvnRkzpnjWGAGaaQj77WomfrzeUOeefZYIKqeOKgiB5kN45vA8Mw8AweFz3rAdrRS4ur0Zoya+By11GqhTG6X7N87rBi0mkx42+RQ2BpnUfmMgm43K99CGpouTKGmU7V7qkOeVCTgKXUT1jLMaNmuX8KGWoqqGHGasSeIcZRrR/i1YD1ohvfYsnyCvGOZpWrKnx9fHMMFpyXqAqtPZ8+foV7cZOwVU449lK47e/+6NETWSwJrCbqgxnWVjOKz9cfwAFu2FiCgNbzCznZ96/e8tyTjx+uWJpzLuBz6cnysMDKWq0KqTiOB7lOmux4i1YooBbvPj4m2kU7di0QRXY38A3u1+AcrRSiOsD53PBziJfrQWOx5FM4rLdgIZhmqBYJntg2A+kIGqzb+/+kvMlEtye3XTTLxBF2L7lvHzhh989MoWf+O67v2A37dm2M8tl5XB4Q7yuLNeFhmbe3XF6WjBmo7aM0aIG8qNjf9jjnOV0+iPLJXHcvyOEHaYNHHd33O/O/ZZfyS132bTrXsVMzKd/ta/5s2mujNOC11WNwzDgR3nYWRswVqE3KUBUk/wTo6RIMa51433F2NITziXEjNFjfZAgztpQ2jCMgXkKLNuKaJK7lErTfVSNWhOKgW0TsIZxgkhfnwtLlk2IcZ61ZHCWa1q7KXrlui6MU2DLomMubWOYvHhbWqWqRtgVjN4Y3UtieQMbxLRtjDSUgxStWjXCYNmlyt3bmZJSN0Y3nJ3ET2WcPBhbAQpaSbic1jLNvi7PWBskfJiG2iIpKZbtilYOFyzajgQnOSCKxt12g54s2mam3UgYPMPoOF8a0yjfXym5+e4YyEVyoEqNTPOe+WBJm6NkjXWZh8cv7PyA85pp5xj8xIf3TrZ0weBDZTCeWnZMs2fsiM9hKFgtniulLMvSeHo8MQwOpRo5bwJYiCJN0P3hY/VACIbdPBLjQIqZ/M07tq1P14xMxbbYOoJfE8bMhYa1jaolF2W3G2lNMTSHtV1+0zypbq8m6dYqWjV2O4+xGWUKJUnjHzchV05j4O1h7Jhzh3aBh8eIo7KbPSCUxpJkTT2OA8GPKAXv3t2yrVmaqxixz2fcYGg6sW6RlBLWjuKl6VKUlFKXA9mumS9cz1dCGHgxDEMjp4I28rB7yVMx2klzXiUjSLxOiQxU2whBvRao4oFA/A1Rgr7lISqFpdJaVvC1EdyLPCpLALPWqGZ7YKOgd4kiD7JaJvJGS8G0xVWgN8bQimZbV7x3tCrynlILcd0ku8p40Jraeoq8Att1+rVmmWoZeRAX1cgxctwfur+lSuD4C2pYGZzVBG9Z1ozVhlYzOWWR0NQu2uoG8ZoVFSkajBJanHMGVTwGyaKzWjGNI1YLFCK4QQrjpjBognW4bhI3RiQ7SklRacMIQVFyZFkvNOXxzqGwqA7VoRpa375oBGAyjnuCD+JnqQ3dDFZLkW+0ZimRYRhEUtMMQQcUGVMN3noJYNXiMxj4yMgvCOo9VRWC33PYf2AY9ihlSDlyuV5QCsIgMQdrTDTduuyuiuG8rFjjMb0h6HYdaFKMb3HFWIuxLwHTnUCmpBisJdNUQdmpS8iqXENVivGUosAYjMbagZd1U3sxJiGh3a31bCrnxb/Rv+olZLeUKDmH2oifqBaUNS+9juR85RVnBdQBqt+LQb1u4TZy3V59VJIp9QKXMNQmW+OYrzgjE3OldA+x7q6U1milkF+gSs6jjQw2ZFMgP1M+x4lxFwjDKHLxWl69QnTvoOTcaYz33YsiW0Nh2PU/14JWtQOEOgQDua6q+vn57b1lmizLtr3KmFWrvSnsX9gazmZirqSsKEVjjECDdA/VbbUwBMNhP/H4LMHyEq314oHj9Xxo3chFQR8QyCbjZbclJbTk3wrwpjQt90dql5aKPDLFzBgMh2PAOc26Jp6eVz6+v2OePqK9woYztVm2tslzz1s+fLjjfP9AXlaMDoyjJm6N27uZZV3JSeIVbvcOy4hqG2tUfP/hwKe29MHwC61PmuAyWWJ2XLeId04GWrYxeihZEXzAD4EXSXrNS/fHKn7GU/TtrNKvG6f6MrmgQ2GV7b62LiZtyMBNvdaxUucYAQa1Tr562eq8/Blq3ziprqiQ90e2jPKu1VqpqqDp3rSuttAKtDG9IWkdvkSnnL5cNqpLZqXJ1k6oiwqBcAjERTxxpcjgTiFgpIqcW+neRH7osK+1ktKJXCo1icew1oq1je0aX71opUhOm1GAEkm4qkWGNO3l+1ryur02cEorSo+KwHTPmPUYDzQ53lITFYN2Ce/lXoDqwbdFk7JI4GmN5boyTBZrkgwaVCWmwnU5k3IDLDd7zfPpwtsbkZavufBwvmeeZ25uDlhnuV4XzmXFoMilkWIhkQiTbEXjlkhR7tnj7sC8l2zQtMhgtVUtS3Yr4dE1NJrWWO/YhcA8OmIW3ypkgh0xSrF/M7DFynLJDE6zpBNOH6h9SVBUwtYBp3cEP1HsilI7rLrg3cy8mwmDePGnMTCGHapNxLhwvQhZXFRFimD3JE6UIjTqkiIpR6adRelCKZW4ZoFvVfFpL9czcXWEuwlvfa8BSidxJ4GwKUuMC945St3I/2Jw+1/69WfSXIHyjbJtFLUxHwZyq7hBUNulVjCS/KxkjIdSkaIKzWRiWYnbxpquTKMQP9Ay3VqXMyFMBOvQylDKxps3t1yvQnVCG1JNLMsiGE1rpeDWEoSnNCiTyU1Jw+Qcznt8GFhzZU2F0/mR6/WZklaGoHm+fmGJAWcdfnRMO8PD40n0vlYx7jToyG64weiRuF65c45pPHZja8K6yrpI4OfhGAh+wobGaUvkJg1ZjVHyM5h78N6GapqcE7d3e4YwUovlx88/cdi/xRiFNZ797iNfH+55eP7CMI4cDje8Obzn9LSwxox2hvffveGyFU7Pz9wc32CdZt0EizyM9nV6phRMw1vQhVSubPGECzOlLOgWsMPEx48Tv1Er08c7JDQuE5NANWqN5Cyp3M5qbo+3YvZdIaYVp3eUdsHojPMOZ284n2Qia42i5I2cxUSaSu4EIzHIx7QxDJbdfMPpKfGXN3eklLlcrzyf7tGuEerU5WdiGC85ojByk1YGO1jOzwvWWcIo9K77r1fGUeO83LCXa4Iqq3UfPI2MajtSUny6/4pRkV9/M/Hf//tv+Kf/9M88XTUPV8/junAXZt7dzuQKj+fI6SGiTcMPEmpNaxwOe07qzIChNoMyV8Zpj7KNdqqUuhAGjSII6YmC90q8QErwtbWJpMN506VJcrNcY2KcJHPoBWvuvGOeJpblzLpdcQ5qVazrgjVJHkKmYuzQZR4KZSylVNbryrXKgylMA9Y7UpaHkWoNrS3bsjLNsxCBquJ63giDo5YiFJ648PbtG6GJ9YnqOE6UrHDWoyykLRNXyV5LaRXARlGkkjDeY6xDmYq18qTWSqErbLFyOO7IpdMzlaWkR+7uBlQzLMvK8/OFFM/U0pjGCR/0q8wn5/bqVdiWhf04s27dMxcvQiqyViRstbDGlXmesc1RUiUuCeUs7799T8rSdA6ThEOmGNmKFD77cc+aCsauIvNpQqDzwYufRldqUTi9lwyuJo2D8xZvdszjnpiuxLSwn/eCp6+pE0IVy1VCYb21Ql4thnm3I8WGt57BBy7LVSQrk8HpIEVqKczl7wjmFmUNmcyHj98z+DtKkU2ys47z5YR3hmmcRBZaN2kkrOqy7QtQcG4U+XIVjcXLJijnyBYvjKMATioiJaLDCnIUep8PGj8M4h0tdK+QFEspda+cH7DGi/6z/exLeUGRG6NwIQhMRRJ7uu+qSexH2bDeiARLi5xHdYi2BKdHUlrwfo/W8nql0Wm9uRLZ9pZWnNsh0AsJ6q0vMrcqUJVtO2OnnWzjlCbVDJgODZCNQ0qLKA+8BAy3DhAQAA7QpPB8//EtQ9hBVT0vSFDar/4ZXTneil9N/Gw/H9dLhV1LJZZIVRveHqUx5LV/7T9bNtTD6Fn+VEjJvDprGiIFM1povaWdeHwybOtEKeYVu650FbCGyrx7u+ft3Z7T+UrJvWHo2xNpbrsmsxWakqwp/erx6Vl83VM0eIVWA7nK9krb1qE/L+RUkZWNYyZ4uafF5PjNf/49H799w234BqsDNfxf7Pc7rs/PaKsY5okwT3z9+if8zuKdZZrg+8Md8+7I+XwlpczgHNpuvHt/5Lj31JT58P4WtdxjtOS2pVSg+53GaYe2nmtMpFTxTjN6ze1ec75kwjiinQyVkhUYyYvXyRiDc07OaavdE1iBwvnUSXp9qCBh7S8DMqAj7HWXLCqlsK5HgfBzIPILKVArhVH61aNVaqbkTEmlUz7lI1RqReXatZpd1WEkV9R7i/e+N0DtdaMoW7VG2hI1wcqGNhrvHfN+x+qAVtAWrBEpa2uN5ZrYtkLaRIIeY+vAIvp2mFd7iFIdSkLBqtA3+GC6FPjl71mr2c0jZify7ZQFbLQuQql7IZ/qGok5y4AxF5q2bGnDu0E+R4gtRAK6NUp7pjEwGNeHFP2aVkIknsbeVJaCRjEETS0SsLyskRqrkJBnhx8cQ1Ck8kTOL8NJUVOVCs4Hpt2E1p7B76kpMQbHNHmeTvfkWrHWC/BpbBhv2Q1v2O+cEGIb5HKlrJW4VXJtTJPDpUi1lv1h5v2bN6jnzD///jdUrdAeifILjsONKJgMEaMLipXjfseWE4+nE2mNVNNQBKz2HOYjy7Ji93JPHrxl3n1DXj9zuB1w4Tt+uf0l//E//z1/+MN/RGnNNE7c3r6hFVF6eCfb+XV5Bu3ZzTtqSzw+CFRkbIrz+YmSEk9PJwb9EasHjIZYLnx+uKCUJm4VZWDaT5wuz/jR8/S4kHPi22+//1f7mj+L5qrphtsZslZc08b9s3TT6/M967pyOj/z5v0dQTmCDwyDx4fI509PPcRXoxEPx/PyFWXkZlxz5Xi847KecVVMjdfTmet6lgm4gqoqmBXrN1IBdCCMjvP5AUwm7Gasczx8fqRsGuMGaJocFW9vf83D80+s6wPGJN68OaJ6zo8zgyQ4t0LKVw6HY+8iG8a8RfIXDMGNzB8+8vnLD1hXuVxWrpco+M7JColtjdw/fGZ/M/H0uHVcaaaycPPW83j/haI12g24nUOXxv35THm+p5GwLvD56bPcWNBs54IdK9rKhzwlz5evK6VGUrzQiviLcoIwuL7ZiOSy8P7DHXFVkqNj4HA4UPPCdTvjBrh7c8P1IgXvblIE19guFz7e/hXWb8RouV41sX5lPggIxFrLPB5Z1ieuVzGIGm0J/pYfPv2OYVexSbNcLTmJ3E0rycfSFtYk0p+cNcNw4Jv3f8vv//iPPJ++SAheOBDcGx4f78k5cl0uPJ8Xvv3lzDR8z5fPjzw+3WO9EPZyWtDKCA6/Xnl8OjHvJ1SC5bqimJiPIz681CCGUje2ktF1x+hn6A+qd7czzib2x0o2kf/hf/z3/P3fX/jf/o+vBK349vtvUK2Qlw3xo1g0kaenL1x0wNuJf/iPX4DK4AeMsVyvCYhczpGSLMEfyakQ4wPOBdkG6IltewZbIAsi/+7NgVKvpJihGrT2jJMRWSpSFBrtCXZHCIG8FdaygdHsxpnSzcDOONbtmXpNzIeZauD58cRhP1NjFYy+lmnf4+MXvHUYKxjvmhM3hyMxZx7vn1iXjfP5yvff/RJVK2W9kPLG8+MJ57QMBJzlcPyG9brRWqSUFVSitEwtCm0bYwgMwRNXg7GWquQBq/swJieRvLy/+5anyxeRUXpHGALv3o2U/AVnjsyTZxpu2eKFt+/eoXCyJfBw2De+fv2KtppxN/bcIE9KoI1i2BmsKnz56YRTgXkYOB4sz+eVw/yW3TgRvCMuK8+Pz9wc7yQ0vVW0ygw7z24M1CKNb6uZ9Vr7OYdSn6AGWt2h20CwUiRv6yJp9nZEVSk4Sq0Y4xi0YQg7luUi+Vg9eLS0hdndUhOczonrpaKaFEbGKrwbiLZyJWNdYG9/wbz+GxIr1jlqA2Mc373/a4KTh6FSGmsnnp6+kmLiw/sPvH37llo3lGovQDNKquS0sN8dCX4QNHVtCDhmIKbIdT2zbM8cD99Iw4IMDWjiz0nrStoWOZ6X6XRfITYauWWWeAE2rHMMw0jOSaaqip4flOVe40CZF5+IQDLEswIxZnJeUewx2vctcCWXhFH6Z4JtWal17lK3yotIqvXNVs6RUiLDMAnat9buG+kStlxIUTLOau68baX6lkXOj+61ay8AACAASURBVGqVnBLr9ozzTXDuKEp7CZ+Vx3nKm8QB+GfG8F3f5dQOlZBTVWqGVvjw4T2nz45zFIKnarBdr+AcNW3QFobdJoMabaCKtBIEjCEbr8IWIw+PF1L2gqJXBd0aoMn9WK2BN+8CP/4g72HtEkNaJatNijwtsRxfvtyToqcUjYoREwzURlXiyyolMY2Bp6umNcF1G92lk8qIBLBUdFuoylJ1kAaiKXTPe6LV3hwW3tyN5JTJxTONN7z98MR/+Pvf8stf3fDh/ci4/ltu/vpPqGlHGCemaabGxrsPR54fZPuJ33hz85b99JbjbVczjIaH+6+cn1epXeYDOWcO+x1ai6w3pUZqiTB6ns8/EmNiNwR++fGXeKvxNrHzCy2DGwrGOUqTrc9+OrKuV4EOaah1Y5ocOUuEiHWOWirvP4z/orkSKMW6bK9SN4DcQ5OlIf7/+q0UoBpYa4Ug2Js5YwRYAT/nauVc+4azEwyLnOfaZefyu3B+vvBCiiy10orryovaFaXqtcl5GR4Zo3FGhtvWGby37Oaxy9npw0GD87DTUX5W6ZELW6LWRIpZImzox6stFagloZpQIWWEIPS8LS+v6gyNxTrNfn8UGTmVUiNvP3zDdb0S00ZKibZUSnVsmyGmjZg2anUYbqltpSIROkJKz7KuQ6JtvB0wZsQ7iw9yHoedQdkJoy2OHp1QxVerSCxp5fL0wPMlIrEmO9794huGwbOSWC+LnEMFsay8mffc3R443nlagaAtOTbOl4zdiULn0w/3At24fYOaGqf7rzg9cDje8e7jR/7xH/6Jb757TxhGvj4usK5MNxvrWqkYrB+w/sjT48r91ye+PlwYpgO/uDvSakZRBK6hE8M0oDAifRyvjEZsOet25fn0QGsr1hRIE5WAqiu3u2+Zfv2RZdtQGt6+2XF+2KDM3Bwc0wyfPn1GUfnxh09AZQyWu7vAOI+UAnYY+Itf/7f89ONPnK9f4Fr6ZspjzcDX+zPaNt67Pd9+/5F//Md/4rI+U0vlD3/8zb/a1/x5NFe1cT6dZEI9yTr0fL5n3h+4vdlx2M/EmMEKXnZLJzhXxt1MSo6cimwvEG17mBw+eLR2nJcrazxxO94xTEdSHQk7QXauceG6CPnscPctl9PCkgo6KPw8slxXTssGy0bMFTcrnJ0JfsI5z8PTM+MwM4wabZJMYuyRdY0y4VCR9+/f8Kc/nTkeD6SUeHp+YNxn8iY64rie+cOP/w/z/panxwu7aeb47sinnx5YisNgsbbhp8Tz5QfWtuH00EOHV55OEeNGtPM0ZSgm83w5iaTSB8CzLZFSE1tM0hiimMcDhUgjssSfuDwVUApjMs5ZvOv47JyoWWE13MwOb4/onWyFYl6gFjKam5uPKKNIacGFJz7f37NrN2gsqcL+GCBNjM4yHTXrULmcHkWq4SCpyM4fOMffgankFtlOz0zDyHU9YUPFusbj15W4rrgQOrK54oNhtzvy5esnHh4euJxXhtEQNwmsXZcFZX7Pp5/uhRKUEk9PV67XxDgkthiJ20quEVU9Y6g4O2D1jmW9cph9hzkYJr/n+fKEZiatksWjVMaZgevzE+nSSIOl1E3kTSSKddzf3/G//K+R/+l//t/Z8kZuijAfWZbG5XwlxkhKkmFF1uRWqCpT88I0zSzbieW6ApbWHGkr5CQSu5gzMRuagVbOUmM2zxQKpQVaLD2sMTONM3E7U4t4xoI7kotiXeVB5nzmwonn66Po943n6aHyy1+95duPf8O2rXz68iNf7k/sj45rTCgVQRVyWRC6eMUaxWAcyr0TIIdqhMEwz5ZajBj850rVjYcvK61uGAP7g2ddNd/94gPDIHLXZYtsFIxvrJcrLSdu97fUWnl8+oy1jmm357A/8PyomCZFa4nz+cp2aV1ullF6RekzTqVuSr6wXM9MkyPmQAVyuxDzid1uZHecyWuTLUnJPD1/5s3d94xTwIdCU2cuzzIp1lrhjCUuKx+/fY/VDmfkt9a/Z9SOeWeYjwbSzPs6CFJWOzSe07WgnUIVQWtbEzh//T3TJAORkhulWlIaenRA636bgtcSPaFMRdsV2ggxUNHkvHB6+EqrYEPDWaFanh4uWLXDGpEtvv9wJ01x2aBVdDOMkyOENxzK3xHSB2leqmJNG9N05Hh4yzze8HheRMpmNKVunM+PvHs3cHM3E8aRlCV/T1XxTInxfMWEqXuCVJctSX5NTomaK+MwYt1IaVXc7YDufo6YN5RS7KYbvJYQ8doLuJdA3nW7YGxjskG2t9XwUkq+bCwKK+gdSvcJcwPJHhTZSEoRbQt0imNrkGuVZtFqKoV1u5CLXFOqZ/KIf6VSiqNUmUgPuwElnRytG/i1Qq7Flkj5gtKacXeL6uAVQeYrlFFsMbOWDWUKzu14wbm3kl6lla3K5P+6nQldPi9yvoqxICRKaC1RypXzc+YS99TWUeYv4a8d7GaD5uY4sm2BuBqR/GkJN23ZoJv4VccxYJzGugWTJd+tNcnbo3tvtClQg8jZWpNGssk9vCUtWPpQGSdB+McieU3aKlIG420vyhq7aSSMniE7Yt+oivVMY5TEi6iSuP3mKNCB1LdyRhriF39pqw2rwI+KsjqhzZXMdx9/weX0n3j8krA4/uJvJob9gevlmeXhzPXrhXE38vbmwPXLE2A4DHtKydw/fSLGBW0UsR7446cfuD18ZJgmvLNcr2d2b2dqWyhJo7eAShtNOe7efIvVUMsTf/zPvyH9u8h+vOEXb78nbT/x9XTCjYlYwJTC92/f4YYj1ijWJfKnP90TQmDYe9brynKOHN/+mk/3/0yOAbTHBYurFj8ZdNHoCsoktDakrHt2VumfsUbMhVo0rTkqV7QeoXsStQlMQ0BRe9CzkaD3XIi1kGuhlUbNmlwUuucz+qAwZuqfF4vCgpKNVYwy5BGwTe4+TVEHxCjXekOIresmmZpUCeAG8QXX1ihsveGTe47ustIXkIag4B09XAylHCEEtpR7EypbzsfzVSSRSgnh0FlO16vEvxgjFOFhYJ5GidswLxu/RC2q530ltm2jFcW6nEg5opQhJ00uhbjl1+OOqZLTE8u1cG6FFCOxaMIwCRmVJk2WD/LaO5zFuYlqdpILZhVP18xpA2ME0maMZO9VP/LpOfP58YwLlnH0PD587fmyBrdZrtcr827m5v3M7duR3/zuR8wQKNryHBfW3/8RFwKfPv0EiAVmHBtpVVwuYjE4VoXSDzydvrLGiKGynlfO8x3ej9QSIGnu9o4lXmnVUsyI1jvmoyJdF8YQsFaR0sb5mvjt7/4ZlONwe4PzOx7OP3BZFkpR5KT45sO3fP3yJx6fnng6wWXZcCFxuZ5Yt4VK46/+4ld44zk/X8nXjc1rbDXkLePDyDTsebh+5sdPf8D6ARcmtiXx8JNiMgd2d45crnz9/PSv9jV/Fs2V6hNArQ3Ohy6WcMy7GecDtSpOz2dUaxLaqsRkWIoTc2BZqU3S6cMw0ZoiFoXVmTVDQbGkRFsWlq2BF71sSpFYpCj0wwzOoqph2Sp+tLgQZDKZC6VFdpMhRwlGXbaF6/ZZsMdKaEvOGy6XKwqF9RrnxAegTeDx+VlMrq11vbkiJfnnlhouXli3K6iGzwWlBVKhKARl8S2wrQrnHVRHqRYf3KsvoJJoJMpS5Fx6kbbFLWKUZdrPtCZmQQ1s15Vhkgd9oRKcp6lOU6OybFeUVgQXJGRSKYzeQ3NsSTKtfLA0pRjtDM1SkmTKaJuZ3IwulrT0/6YSRgkBr1WZ7npnwTSgsKUzmlnkBEayXK7rhnNaVvw5yzmJGmMNz6fnviH03N3dsC6Fki2lKB4ef2LcdsRtoxQFZJQu4lVDTKdDCKS1MPiE0Q3vDUENtGzZ7S2tiSl93nlK87TqqEXwpe/f3OG8JqeKbhXnHcNosWoWmlArspFQcLmKJ2q5RFp1PJ8qGI0LDqs9y/rY/QmaljOKBNVClxzllinR4EwgpUIpDW0q63piSz30ujVU22jZgJLpV85nmpVtk2TANGpZuZwzNbcepi15EdPosUqRcwRTes6IxngDTWNqIsWNdZHcEGMUh8MOpQppazhrmHd7rIZmpdEwRrFcnqhlZBiMFGMtknNjCDPNJqBgvWM0R7b4jDUWrT3nU+Q47xiGQUzU6kRJC7Vm5t2ExRLsTqAa28Z+v+N4c8P1eiXVBfQOoyw+vARKK3wITPNIGA0pehpC4fLeYh3ydWS00cz2wBgGalzRwDQK8n+eC94arJWHsvVHRvsi9ZAQ6OgtxRtaaQRveP9+T/zNB3beM04O7wxbTozTQElaNo5FgCuX8wWaxmoPXjG4gOt0OY1C6wmjnOSGaXng5LqgEQjJCwThfHl+jXhQOuPtLEtz87N8Z5x27A/za8gnqmKMxitBBsetMte/wpiAq3cC+qjy3g5uhzU7agmkLGGgL3KzWmUavNsNhOBFptWbC5RkOeUcSXlht9uheh6SBPeCbgJsyDnivO0yO+gdCyCI6Fw2SkvYnuPWmhIPhv7Zk1VaxCrXM7DosjdeJ+zbtpLSyjBMPdOpvT6TJD+tkvIiPqzeOL1sWnjJy6qFnFes8xjt0cp2W1N5PTZp+iSLxRhLo8uluu/qRZLXKDhnO5ipw3HUC01NUfqAxFiDc0IFfTmW2qfSLw3hul3xfoJmfz6/DTSG0iqlCJXz+WmhtlmOvG8HVMfDWyNIeK0StVrZIiCSstbpsC9hryJDkwZUKTk9tZW+lZTBvLMarb0AUzqNVHyNMqTSWjEOnpvjnvsvX8nZiISyaKl/q/jAvIXd7Kk1kZPkRBojAJ0XmZtc16lHmxRq7XlK3bPV2r/wnqlCLJWYHKX0cGc0N3d3PD8+8+XrmWG2hP97JOwt3jXZbFSIi+Kwfyuff+1kGxcjqlZUq6zP9/iW8W1DNUdulUpGayE3Nq1xw4SdBrYN2TZZaNWiR/H7KGshDOg2ctxrwiD2hrVGlC3UdEJXy6QbH24MZkxybcaNbDJBXzkOOzaV0LbiJsu6FbRKKONRyoOuxLjRdKMUKFWjJSAJpwXyopWlVhnQSk1QiOXE00nQ/Q0JhQ7OoVulamEy1lLFt6lUV+yIz6sk1fMjMyCh4LXKxrivisS/2rdXVI01A7lGAbLU8vrZodH/jPydHoAs/kaBTkgoco9KKSKbjDGDlmu+AefLhdK3e10v+XpvUoiPTSE+SsnWlObq4f6M7Z5gYzTO0nPUbCc4iqfaGoWxO4yVWAhrKrlUtIoonXo8QKE0Ry5FGkYFuiq0Lh1YInCrVLaXu1WXPAbxsGkFSe5hPkj2YDMKurdTsvwMpRXWpfF4XXh6ikBjGAzpFFm3zMP5wpfnwo9fNu7vC74/JzUJ1c7sdwPreqa1jDOqZx86ahaYz+OpoHXjdNnoemyW9cyXrwO3dwIdSTlBm/Bu6lCShVIqP/14ZRcmnBV5Y2uKy2Wh1og2lZKv2EEiL1owAvXKlcvpnpIXkTi6Ee891+0RZxzNNnLdWM4RFQJej1iXRarfPK02YkxyTSaFURPBBZzS5HWhqMpoJyHbqoHV/fy8+P/79WfSXAl8Qcg2QaZ5OIwZurG3YZynxiKSMA0xKZn6eDFya20ZwoQxE9ctkksSookaMG6ktMaybaxZo1Mlbi8YzcaWC5ftgrM7Gpq4JXCm+zU0qmpQC9oa8ipm9toyWzqzY8Yoj9EOayynyz1DEJSjtop1izRlOV9ONATLXPsgNiYFzWLtRCoRlGFdE9t2kklkLmgdKQ1yCrRqGX1gWzy1GJRTeN+4pvOrgbqU+lqs55Kl+NwdOcw3bPGZrV1BQ0kZXT3QaLXLvthQphJzZEub5DDZndz8GjRGUDIRtl4xBEdpBact2xZJaaWUiCqK43BH3OSGHeMV1RTOjx2VK0WEdUq8cU1QxUkHlLZ9KlVpXRdvUL2xSLRm8GEgPt2TYsFOEuq5rhutapwJlHYVSELNMqduHQPsHWGUDI1hCCyXyPEwULsH3nvNcs0cjjMpRq7XC0MI1DZj1ERKjWVZePP2yBaX12ZHqca8k/yylAq1FabdiAJqiRKI3DaMFjCAMhZvAkZpUr5i1E58ZqqhQyUvPVtKN7QpeOvAVqiJmmWLcl2eoQXJ0FEarzOlaJkwlopKGZUVNSWMadielZRiJJhBpptDoCXLvJMifosScm20wVj/GmQZuiF6i8+9uFHcHkeen84iD3OO43yg5Mp2fURjcdoQ0xlNYfCeSu0ZbRo7OrRTGC1AlKAHgk8EN+DsRNArg9MEY9Ct4bVm8JqCwvlBNrdqQBsJWL253XM8ztKc6A2ld1jrmVRlXS8o1QhhYJ73jIOhZZHQhCANQC4L3jZoSqh/w4BBczllwuAZp8A0Bm5u3rCtG61mjPZ4M6AD5FQxSkAUeXI8bZGqM7tR8/7tjvsvhXmU4N1aJbhToWhFPodbitjQiOsqxYKJGN0YBt/ppV6KIOPZtgUfRJKDKpRNg5LipDWDbgM5f0Z7IU9qLYHLWkGuCylXUm7s5j3D5MXDEyulyarCWAUpUNKMz9/jXL+OW6FScTZQq5UpY9E974e+DWld55/wg2SbSYMhRbtWhlQzKW1s8crxeCf/H8llohe9Ma2kvPYMEscrYAJp4lLMpLL1YN/QGzt5lsjSSskmSWWMCVjjXpsLELkRrbFtV/E+KS1b8N5ciafnxU8lchqjjYS9ttqnxbp7SgoxrV2m7LuZW17vy7HXmiXIWOmOJQeoHWrx0sjJPU9bjfeStyVeJHrWlXr9PlorrAnSeL14o2rFGk3Jcn5L3jBaKKMvHiuJatB9iy20tmXpgBp+BhRI01NoRvXBjKKVF0iBdEq19HMNKK37ta160yUbxlLEY1lrxhjwwWKNI+crLz4eOReaWpNgtofAMAbWtUg0ipbivDTQRf5dOfEtlyI+SKWUgD1E3PV6HI1MTFe2TXfQglxrWmmqkoa0tYpSiS3J9v8llDflwuF45Hq9cLlc+eMfLlg78e3fThyOG85XruvKqVjG4UCjsMYNY4V86a1HUanpwuQ9jozm/6XuPZotyc5zvWfZdNscX9XV3exuAiDIC5K6A0kT/XpFKO5QEhVxRVAACdO2zHHbpFlWgy/PaWqCMVST6oiqPrVNZq7PvO/zJglXLpkQxP+mtFw31qvX87eqSqmwuerRVlM14BSm9OyaFucKtWh6WwnWkMczuXgaa7m6bMgqsMyFxmZqUzD1mX07EG0GnVE2knKmLGdyLSQt9+YUElqXdQDyEm6u0EbAQgppyLX6+T6JeSEmsWCUWqi64LVDozDO/kwjdKyftV6b2kQKmlwmXkKtSyxUJbJubbSEMtcsMImq0UjQfAlpJTtLQ61W328p0jwVKsaKhFWw9Ou/uQIqSs6S55Yy8xKp+udnV0oS/v4ia3zB2q83tWyuc6am/JrFpY1iWRahMmolvjJk09R27me8e0k0rcPZDmMsKAHGQCVm2bprozBKiQ88S2ZeXT1uOeV1QCSkxlp+rqFLqWSFNE7rYMjZBqMcCgGN1LxCNoCCAD+WGBjnwBTEr0hynMdAzpbznHk6TlgtwwnnlDSHBsnS05WwSNPpnCGOhaFHaMG6MidpepdFi3/PydTl8WnEtxZnFfM8o4rFtZZcA6UE5vnA0+NIvbnFOQuq4pxlnhasFQhUSTON6tn1PX3jhSQ9a5bxSNd4+n5L14niJ8Yjrmup7RpJECTaqOs62fguZ1JU8swqCyks1Kzpmx3OADWRQsBZR99t12BszaZR/KVffxXNlZjf3esELBfN0+GAHc9ShFuDspqiAiGW9QDTlCq4U6U8m37Dm9vPeX5+xmiRGGojwWjGNNQVTW5zparMaTytKMgGbS+YQmZcHuUirIblkNluOlTpyBhcpyRPI1eMc3S+Ywkz53Ngu3NYb6gYzuNMqS25KpESusw0LzTdRjDTRqN1Yl4O1CqAiW4YKKWj7zrO52fG6UDXK7Qy9N0FpcA0ZdpW0bUbasoseZ0ubz3jucGwZlaUROEsuvic0KoI4t61HA8PnA4nTPXs99s1A2Km1Mx211GUIaSIMaIp1sqgqsU6ofLEAE3nYMmgWkrZkNKJp9N7ObhKlRT43HBzecuZEzks5LigO7l5jNVYB2GZOZ0XQlmwrmG3uUFbx7BtOZwCOcJ21zP4GzrzE2HxLAsc4lFkBVbTup67q3dk8whlROlK2zmurr5gWk6cTqOECDrPfD7jr67Z7h3oxHk8490Vm2FHTpqSFb6Fx/tn9hcDMWROpzMxTbTNFc411GKYl8pmlxhH2S7FULj/cKLVd1zfblAmksuRmCNhVlxd3El4bDpyfjpSckDVlmw0rpto8cSlYpRmu9nQ946PP460nacfLP2gad0ljw9P1AixFs7nwOH4yNXF51Clse36LVq3HI9HWKC1HqtnqspYHXDW4O0e23dsd+1rkWo6j2vrqxwoZ5mMGm3lu6yJfg8X+zccxwPTtDBPq2Y+RbzTbFvFrtekpeX983u0jvjLLV+8/Ua2eyZzHo+kudDvdmgUQ7+hbRS1BN4/PtN2nsZ3NK7BX0im25KKbI1j4WK4o9gz5+NIKBPDpWe7Hxi2Nyu2XtM6z5vbPV73NL7FmoH/+7ffUUqhbXr2mys2G8fgK10rB1ZKhafnwH5/g9UNRhmMVsQws70ZuLy6whjHTz++p+9abM3yWbp1kp8UcRyxTcfV1S25nDj/9Eg3wMW+pWHHm5tn+kYzh8TxFLFY5ulETULMan1iCRN93xCWkVpPWFfwbkutWhpOd4n2gT99+6+YecZ5S6mG8zEwbB0pzZQy0/gtw75Blz0yH55Y4hGtFPMyEUKiVItvB47nkxxWjaVkzek04htFr/6Gwf5aNvm05DQjhE1L11zzu3//ExcXlrdvLzDOEMKMVV6ajRxZ4pmu7wQKhKYggyyr/OrPmRmn82uD8MJ2o4p8JywTyzLSao0xzUpDe/FxKFKKpDxLqCi9gIpqQimZjmptSemA0VUogMZTRPW8bljk3JnmM97JdeeslwaySNGipashpDPGGYzxa0EEL16SWispR07jYd00CdWw1kxdw2pzzSxxZpqODJsOqkGb/9RwrmGvKUZCWHBUajXSaOn62nCKdySS07LGXjRrkVrXz6fiXcvz+EQMM05rNsNeYkMo61aniiw8R3KdVq/xHm3t66bupYCstYiMCdjs9muTv/phVkiChH5Gmk5xfdNyeJ6oWJQqKFMQrIVsdhsvWTTGeFQVEubaK6OUbKSck4iKaZlez1rZgFRQFZ2zkBqVwjeZea6Aef0+ZLPwUvAUiTvRmVosSEKbFPhotFZkEiVnjI2ElMho6uqWyyga67m+ukJrzfv3T1xd9Tz96TPS3SfM/kcOy/f07msGKjFNnM/P7HZXeG9p+j2NcaTUkOckaoW2wdiGUzowjhM3t5dobZnmKDl/LgrAK1VOx8gXb+9w1uF0YGhm2m5DLIqgMugNrd2xW3pBYRuFthZtNePhI3WZGVxD37YcpzONqXS9Y5kWju9/YBgs4fGJ8yyy/rZtiWwx1r6GWpcgvlI5/8Vn/vH+A5fXdzhj5btVLd7VFWUum6nTFFDJU+1BwBHF8v48A8sqAdTkPKNwNN7ijF0JyR6lwTpWnLtInxsn/iq9hmVv+wucc2irVyrogvNeBgspEqJICVNZ4RqrDPR4PFPKz1mNMUf6Tc88F+ZFACJttyGlDOvG6xU4gmyjcxVir1Eag0ZXBUmu3/N0kuZIO4gNlTOpCrBF5Ioiz7POYKxGG4WrCd+Ir9cYcN6RSpTPqCJbxFwxK1lPlbr6xaBtRMosL06GMrYRWWtMkf2uwypLTIlU5Fpvm55pltyutMZStM7zAviptdJ1vYQ7GyNP3JWcaFu/5pVZnJPBXNdKzlXjHafDmVJnqlG4RmJG5nmmjmsEjoE3b3f88Y9/5vkAViuWaeHbxw80vmPYNChdeH4+sB1uuVcHrFPYNVdKV4tfn8ElZEwy3O7fYp0AQ6Zp5OF+5MsvvmS7H8g18i//8gGvrthetGyGlq5pOT/PTGNku+nZbFsqF/zu3/+MHxxViTT2eEhs+p5cJ2KIpAV23Y6L7YYP7z9yfJq52N/8xb7mr6K5kjDRSEpyI8gENeE7h3Uahcg9clFChFNJcNkseLehcT1a9fzxT99R1Qg6kWvifAo4t1CrSLiU0lxcDoSwYLUjxMTh+UzTtdRiWVbDo/VyUT89J5Q6S74GVTw7vtC2lkzzenCdzifOEyjVYn0hxhMVTcyVeJgZ55nNsMcUT54KMT9C2QjVsEl0rZeJHxpnG/p2Q9c4lpqBC3I5k8p7BnNNGBXkitGJXAL37xuM3a2HXyCnxM31Dc5siSkxjk+czp94fjqDEqxkSpDywrJIIdK2BtRIXjaMy4i2lr7f8XT/TLc5kue8kr6emZ4Qsk6FlDRdu2XMllwfKVXwovOYyerfRELWtnz1+T/y6fFbliXTWUfTWqbxQKnrNsl3eNPRmBbXKjSN4LtNwQNdtwVvKENDvnU8TT9A2GN0y3bfskSNpuXiQjLScmzRzU94d0WMCsh8/vnnfP75rxnnB87jM842OLsDPM5rQjzw08d/oW2uodyxHTr2u8w0HzidRprG0bY9zrZ89+17Li4HDodHSpr55usrSvQY84GUEzlaWtfjB7BqQ0qRw0lzfn7im6++4jQFztMEyTA9XdIOlaZrcL4h1yMXlx2qGgweUwe++eWO9lvFeTuLLrtqTqdrTofKOM5gDdcXF3Rtw8ND5XyyaAb+4R9ueP/+PfOY0Tgur7bcf3rmcvOGaX7i8fgnrq63lLwhpwlvLZe3tyid+OG7H2maluubK7748pYfv39gOsiWo2scX3z+jrfXV8zzDEoz+I6nceLuzQ5nFPvtwJef37JMgfP5jAM617DdemqdqDkTF2lQSR7MGAAAIABJREFUv/zqlsfHR5SOuLbBuoY87jA6st21dN0t9x8P2Maza7xs9ZwlxYmryz3OGWoufP7mjrvPbji9hg1X+t/8M//d/W9cbgauNxvm8cT15gpjJJRS6UyJE1e7q7Uhc0LdMgajtHjmaubzzz5nmZ7o20ve3N1wdbXjhx/+xG73luP5SFwyTrV888uv+B/+6TevYZbTeaZ9t+Xq8galNoxnxbc//cCnh98TkGBKZRRffvn31HzJ0+ETp9NHUjmxzCOn0xHvT1xdVn759dd8/DSg9EIqgVQKl9c3LOEMVjxN43ym32rm5ZmyTm61dkz5SCoWZT1dYzgvMzmBrRW1UrA2+x1hScQshYnVDYWJkGbaZsubm2/47W9/z/F05Ob2DZttLx4ILfjdnCIxzMzLM0ZfotWKjM6BWiCrxDSfWcLEzc0dbSvSTl4msLrIFq0ECpGYnGwCEJO90mCN5f50lGwz43FuKxlHWqTeGsHWL8soGwHTorVQ9V62Pi8SpBhHQdEbaYpe4ghQsrWSDf7E1eYOa5q1eQxY416n6mGZOZ/PXF7cruQzmWzLvt2Q0kwIEyEtbNWFbP5W/8irXCgllnlimUeadot3HWUNzdVKhogay7JMTOMZtHjNeMHPA1CJa26Q956h69cNTSLnssoQoSAN2rDxDNs74uQZxwCrH0XpKq+9JvaXW7YXHqMLy5JRyq9Fr7y/WgR1rU0klsiyVGoVMI6moq0lJTnPZSuleXo8Sg7VGlBey0uQeMGYjG8KxjRMUwYrHrkQA1ZZVFJolSi5ch4T0yj/PzqJf03L6wtzxBgYhpa+r8SpI5efN4Y1F/nssnjnLi48wnBrKfhXGWpMia7rKSXz9PTAH//wUahlP91Q7gvq7QM3N540P1PiGWtmPn14xLuWU/ssFMw0r9TMlrksOG1wttJ3mpoCS5yZp8DdZ29QumecR3KaudgYpvOybjOh6sS3H460XeZ5PFCqpm23XA0d7+5uGOeFMSRiNbT7O87hR2pWWK252u85nR/57odEippt39LvMu8/BlTN6BxZHr/l8vq/cJifCeFELYnL3TecF6mVUHKPfP5Fzzh/YJwSXTPwt9/8knHK9MOOlAvzsmBMpuYZ796JhC1OjOeJfpvIyVCyxtnKMAyM50AMmZQq52WW+2Mt5pWqst2IJ1QUSal1HV1rOcwTuf7ssyuHkRTzSrE0pFrRqogKY430uL3ZolYgSkpCpqVqnEkMXX714QESm1PlnpFcxzUOvA6v0udaxIflnAQQd12DsQarwdeAc6LEslZhrEQHLIvcjwqD0T0pi9z3xVM2LxPWNvIMUXqNoVBoIxvhUork0ykl0ucsDaSqipQm8RgDRivmw0lep+AScdZySkJl1bVga5bswJIZJJtGbAaq0vcN3nvapmFoxffadeJLzrlirScXOJ/PgnEPgc3OYPSFvDcsGsfN9Ru+ePdLchZ7iG00t7f/CGVVJOSZ7374PSXBMAxYq9nsjnjviXMgzjOhLLghU5aMVtJQLtPIdx+/pe8bYk0oq9lt93gH3377Z5bfFYwy/NM//JIPH39A5R4dPErB9eUAW422Et8QcuGXX/+KYXMtcRwUHg4/kJYZ31iRtmehL8d05O7twNVNS993f7Gv+atorkQv2kMNTGH8eTVboWZAr4WmraAi6IprPOdzxteGacmk+EzVEW3hcJiIKeCcxrsK1aGx5KR5uJ+wNmNdS1GKeZz49MNv2QwXeLvF2haDYVlOxHiP0XlNHO8lPDCLCdFoC0Riynit11W5BGe6RtLrc4aaHSmehMdvWoxq8M2GeYoSOlsrx+PMm5vPuL9/QuPYtG8Yhpb/+PCv7C4d1kFbt4QZbt/c8fD4yDRFlLb0W8U4PeGUxxpHzonjUR6OShkxXVcw1sn63YjEcsng/BbvHU2jaTrN/WjYbm6oqgjqeploB41R3epJmKlkajEsIbDUQK0BZ3qmk4QaOlvoBotvBko4s+SJD09/5ngK5DKibIcrO7y7wuiFfhBt8cP9J7abic2mI5dIzJFaFN4num5DDrKCbgbF0wiXF3vJcHKJmq/Q7YGSI8s843zkavNrPi5PXF5uuLy8Zj4H3t//B5QW1IBvGp6eH2jtLUqtcI68pdYdoWRKHLE50TQScnd784auHUROGv9EOA/U5PE2sd9ZdHrD+/tHunbP3c07fvzwPW23Y9tf0bQNbz/7jNb9G62+w/uRtj2hqmGjFZc3Oy6ubmiaLf/7//nfeHN3TcmTHBa58Px4xusGOs1mKOyu9hyervj97/7IZttxffuWcZxx1nN5/Y5SMvOYudpdMHSRnDRUj3eKzkPfSc7X7Y1MqHKEu8sLtNLM88K7d3e8udhjjce7lsY4XHlmPwzoraYfOq5217S2lXgBY+mHDcZ8x9X1gC4i5WtMoKjC9e6Ki00glhPXN3uc6/jw/hMhBO4+u2WeF3bDViSCzlKsaPSNNjS+oWl6Znfg+voG7USetIRMohLmiTQpnHJcbneoXGicIpZMjAtDJyHKKS6kcIIy44zEFcSSMFax3+xpG7cWirIBDlGAH9YZ9tsd7+6+IMwf2AwD3nZoPL/4umcOE95bSiqoUnl6mLi+czReyFpt0/N4f808SmacMYqr7Z7Wv6NpBkAzpwXf7FBc0O8M51Hz8JhYponP9p+hlQcyf/zutwLv2AxUFTidD/RDS18N4zhyPs/EsoC29FvPMkfG6UTNCcMFeb1/KQJ6KVRCqigtgawPp3tuyn+lybfkIuCLykI/XLPtb9Gq4/7xA13X0XY9WltijlAKxgr5bJmXdfrcrcCZF7nZyxYjkXKg65wY1qsgkbXWGK1JUailojq4lNbhZfuSM1V7QppF7ugarG3Wgmj9exJ8Qwiy3bNW8oOEGLZCM2peZYOVvt8Ipp3VlFZFFpeSEMVyzhjTrMG0ep2ei28nhCSQIBRd11NLJq9ZW7UqtKrEuFBKwjlH22xWr5YUcJKr45jniVIzbdeyGS5Fir56SXKWszDF/Jq/Zo1ECLyumV7PUcUyzxileXv3DnQDNa8+JLVK46TIiaESZgfFrz8mi/dk/VmlVEGtl8T94xOVLcK6f9k2yYZDr2TeEBJhsSvyuogULBXxAbIGyOrCfJa4h3UJuRqksuTB2QI1cH8fyKu/Sv8n31nOGWPFx1FrZVkMVheUslTUuh1VqzdOGpe+Wip+lX5n0OX1365lBbiojMaTi2xV9OoZy0WzfqNoIMeF77975u27nourt4SfDPXiRGM7qtb43HCxMTw9zeQ1UNZ7zzg+YbTg+HOS4v768hLtZeKvneVwODAMG2oSCXsoZ9puABRLjJxDpN/vaP2M0ZmS5fliusRPH76n63Z0TcNSF0y13Fy/oyjxQLW+w0yG/U0j8s6SMPZA/uItORts4+iuGu4/jvyi3aGqeCKlBoDNrkUbxxIyc/AMmw39dofvNmjtOYwjU3ggBOisZzNci6QdS0GR2VFLYX/VcD5PLEuibRs5i/lE6QPGKC7TFSEILZZVQnh8PtO4VmTUJZHnZz4eRyG1rr6sWqAk8b2yhtUK3IL/T9ZX03qWZSZnaTCs8et1rn6W2OYsviUFVYstpJS6+sFEamqMRPawPgdEGrhez6WgjWWzv8NYTd9b3OrF0ohksCR5/uacBHazShapEmxesng8ZR8tId3jnNaGU6IN8rrJfvHMppjkflQyRCilkJPEzuj1xS0hoZKWhnTNAzNGnr0FRM6oJCZimhPnMUA9YTAoA21jcdaIPK7v0FT2rafZ7cRn1yqWJWGNeG2l0xXllNL9qkyoxLqgql+VagPf/FKiQl5IkrtaUFhyBEVZt88zIcmiAwrNRaJdRvk+5oUwz7z/8ZFhs3ldkihV+Lf//u844+jamWIy06hwTvH89PxKxu17z25/yfdPf6JWyUkd+gseHj6ssQMWqsU4BTpglEFZw/nw/wPPVS2VuCRSkofGi+lYgiKhUqjW0viesB6+4jWsnMcjOcs6uus6Md5WmX7qqil1AxgBCFQ4nkZc06KMwaJofU/rB6HAGLsGZjaEJdB5I2FkqaCNp2nUmkSuJRxWJ5YwU0tD0za0nYciRbE1Bo0npsLQbvBeoAg5iW/KIIbbXAvONDIJzIVCJsXAPBasLdSyUJWgtbWpnKcjOU9AFEMsIymdKTmQzYv/oLIsE9qIb8W7jnmSE0PrSjVQlKNqTSpQQyaphZAaeu3XBiowDD26GvHylArKUZUEkMoouUgR02wxyqJMwRm7FoMORSSnwHG8p2JEu5oSISSM6qh15iXpPaXIPAt+u9SK8Ya26whjIGSZ9swpcj4HCpmh2+K9Bw0GyxRmYpY8pRQTcW5QsDbYjo+nA6EcMHUNpnYGhRPZUzmIVwwvXh6EdpRUom8G7o8LXZOgRkpN9NuGFGSabOwARTOOR1T1GNVjlEjQYhbaUS2Opttwvb9B5w7XKvrs0bWDuNDuWhqvUWTe3nzJbtuCWsg5yiZhCjgnhYtWhs471E7x+Rc7nNNs9zs+fZQJW9MgDa8N7DeeqvfUashRM00nbm73WNWRkqJtKyUVlnMWTxbii/js8hJt1lynKGbab9694+HwRCXTdx0Xuw3sevEwaIuxnhSeaNsBXQVM0A2GD8sTm77DNQPa9ewuBlIwjIeJWVl666hqAi+hvzVVNpuemmWCbozC1JlNa9n1DVhYQmA8RZQxqJxJIZNyorvomKdR6FQlUVKQQOEqD2mrFc2wbqd0XTdLctCWlPGdXwvrQggB10R06alZQAbeb5jnQECMyM5ZQdbHtTil8unhiZgVrvGSHaIdyzKzZJlcUrXkt7kGrRwVjbMvWUcLxmrabqCdtuQys9luAEdYIqflkf3lDahKiAWtLdoqwiIBtKnOQtrMDpJQMOdlIpWJvulJZSaVyJJW07WXJrNm2bq18R1eXWFVR9GZmAJDt6fvrnBuYAkz0zxys3mLd82rZE2hVolcIqSFpnEixV49SQItEBpcLnLw1eqlQVhx2FIYW2KQKAalWGWFdSX4rRK5AjmLQVruB7lupGL4OT425SjeD23/k7ld/AG5SKiveIFW6uj6T7wclzkXck5432C0F+DMfzqzFGt2VBK6qnNuDcNVL/rDNWtrEWy7NljbvAaDvjQx4vWIq6/AYU3z8/uovKo2YgxUCs4aQUUrQyW+etG00pRUmJeRmhe6ZoPRMnHnxaCvVh+U0Tincc4zLev39yqnE9WfhPRm1AuueqU5KqAqCXEFRAKoMrVmShGSmVJ1BbBJJpLRGmc1xmqWpYiEUq+lo5agYOsMbWvxXnN4nl9fj9SclVwk4qRvoWkEYFGyIN+pZkW+yPuUIjiia2CZlUQbvPT3sAIQ5PMwRqILxlGy/F6kowotW45c0EpzeTmQQuFwnGgeNc61aLXh6ftEd5PEG1Us7dCz2W2Yw0Qqst1Aa8Y50DQ9xjuOhyf08ciSxXOqkKn4HBZqyXLNmo64esIlwkJyuHKONE0v1ggl26+X51VWlUzEakOKQUJavcE0LWp0bDdyTcScyLGyv+xkgAfkJPI733g0Hh0NJS/4GtGqkMLIeJhwbsu+vWA7bFBa8fj4SN8KMKZrHY1vcS5Sqgy0lNbYpqXxG7Sx2L4ym5EQJrpmj74YRAFUIyk4Ztui7QZj5b71/oBBY7XBKKgpMedOtjVao40hhihiTlEPU4t4SI3Qz9YGyNI0nhgXai0rLVCTYlw3UrKRSSmTVqltLpVU6kpZNpT12lhRM+vGlZUkrJnziDGamj1HM69DOgmhN1p+b1tPSZBTIiUBquVcXv1QAoApK/FU3pS1hrp6PLVW0qwpoTJq95LZJx7KF3tGyuLBtlbuiZTLKg+GmNP63FjPyaJXeufL303rAEn8ZrFG+r4hx7QG32v5XSta52i9W/PDQHuLWz1mpeRVdVBki+gsDo1OCWc8VAG6GSNE1ZjS6iurWNORSnn1Rdba0ZQeVFnl32Bf5Js5kJeR0+kRbTy+TdQicTUpLqBa5qWCSlStOJ0CDw9nnDP0fUOpmuNx4XA8U2rCWsV8Xjg8HqQ3sLJYKFOSfFXnJBM1pL/Y1/x1NFe1Mk1hfajJQYGSwk8yMjTZaHZNjyJRsgS1KV04HY+UorC2p1cbYhTUp7ZCXypFcM5Nr7BWcZpkhViVHM5d2+PdlyK90A6rHcZ4vO7Z7bbM48Q0LTjboHTGmEqIC/M80g1Kiq1sRWa03UONxCVgG4+xPSWOXN7saNuOsCjGU6ZpFK3f8vT0iVIC/WbLsohMJOXIOC+cpkjbifm1VClgnc88PH7AmIwyiWVZaFMhp0wsiaQXvJMcFaUWQB5AzvSMp4DShqqEQGdVS1YSXJmmgJonarnAJ5mWlVLZ7XYssyLmsK6iG2IJOC1mV20MIU20BpwV2YBRhhgt2hqojpIN43lm2FlyssRUWeZIoxJLOEvTtZLbQhqZ547qLa5t2F1ueH965DBO5BRYlsDp+ZHG9xjrcV6IaLrThOgpWqY3aSk8zxPKgVKZeT7x8PBEu9GkpCTQzmWs6TDGkaJMpLrO4jD41cgqE2TL81MGnpnDRNMrLq4veP74hKbFKMV8Ljw+vpe1tmpIS+Lu5o4f339imQ/UuqDMFfvNNaRIUxuK7mn9HuKBqCS7JsYTf/eLX7CEEW0KFQkVfX56pO8UNVtqMehc6ZrK3359DUpLGOe24Jwh5YmYEm2zsO0txglmOSyZGM5cXF1RgqfWLXBJChPL8cA0ngDYDhd8dn3BNB+Y55lYCkY7vvz6G376+C0hzrRNw6br6TqPNopSFSFW0rJjt7uWiVQR9PPp+cjlZcdm29L0FW0Mjw9ndpsNrWuxpdAocC0sIVHQXO56coirDySQ88TlztG1ikIlhkKYZ9nc9p7jPDKNI2x2LGVe5QYJVcV3CAIkaBsh9pWo6HrDKS+cDgvGeFJYcNsOrTXLnIkhYxrZTJ/PgU/3H2l9z/PjJ4xVDP1A4zourhrmObDMCari4fmR5+dI03U0nSTOz/MjuAFFB8UwzU8Mg+U8juSiUFZjfGUJAeusHHyqRTsw3qCVpyqNR3NxteH5cCYHMK6hEpiWhSVL9INvLDkb5nkhLAsxReYwYu0TsSyEKNKgtvM0fYspkKNBZ881v8EpLwWzhlI1281ntG5PKZXzdE+MmbYdJHC3yoGtXxDlOZLSRNN6tHHiBcp5LWYkxDqnQKmBXAT+oBSUGl8hEkv8mc4nzwbWxuhncENKM8pKoSBQhBXio9aCYd2OKeVfGzAx0ws2vKQkwJucUMqum5EVmbw2ES/ZO23bryoFaexkmAC8NFcl0bYiC6prI/gau7XmYZWc0UYy5KTgeGlMViN6EXKm+HEFMlTKS9CvbG1CDOgVctK1wwq8YP05sm1NSyKnQE4L0+LZNebn91QFbhKWgnWGrrN0neXwFMRb+/KCUOtEF2muFBijCWHNKaPK5k1J06Z0Qhm5x+rasLxO9NfPzVlp5qzVhKWuEqfV+7Y2Uc5pus7RtoaPH0ZQL8G18h5LgZISds3+ketGi9xRA1XLVmptnlGRWgPj2ZAz68+T3wXBLnRJkWxp5km2r9pI8abWAjdnyZl7e7cjxsz/8/t77u/PoAo3N5qPf+zY+hO6izLMaTv2+xvS8ZH5fGSaI33TMp1m2r5l2O94PB748PSIP880jaNrHK3fkcYRaxTeSQTC4+lBpGkVdKnUPDPNExf7W6wXOVR6ylhtmJeZHMQfqJzneHymUPFdR9dfEE6BaivWe6xvOccF3xfqdGY5jTx+OnG9v2VKMsiMUaHMBZWRh+PE6XjmfJy4udoQQsd0zKR45tOPP3H3xZfY5Oj7hs3gSOVILIYazuii6O2OXd9xOGQ6XTAuMj890rbX7HaOlDXTEpmLxRjPZn+D7waUNvTDkThNNM7SWrvSeQ0hJrQ2GGOY51k8sKuHSKtCJuObBgFagbEW7xySt6dW2mpgGWepnbI885clcp4WYsrEWJgWady8sbzkc+WcVyXzSvTNiZoVS444aykxM85RwmytlaxGDV1n2A79CjUqlFSZsjyLSpZrPIQkm6Tyczi0tZrdZvN6ryfraBp55hhtX/1oMcyvfklXFWDRupehV4z4RkBq8yIgrpwrKhZiinJmFnkelVQkH7RtcE48lPuLLbUUmsbTdR2Nb9aFhPS0MSdYQFu3DpeQTDntCDHIdlongStV8F5BUSy1IsC0jFEFdKGmFZykwrqxr+JXswNKl3VIojHO4/yW1iQ0C1dpZJ4WSn3ZSs+EcCaHwul0ppJwHrGt0DG0A8O2wzWGh/sTS5SNdaqB8/E7whixm41sqrQouVJwOKdQVnLR/tKvv4rmCqBrWlCVGAPno8hdChXvDM42nE4T3/7ptyglhjprNdYbrna3VCCkzDw/Mi3PoLRMjW3C+ZmHhwdgJ1NYv+f58RPDRkJNlyWz3Ta05gqlLbkuLOk7hu1WtlgbuchCKOTY4RrQ2mN0w3azp29HQpAGcBoVuUb6YY8zHRSFNpUlnkCJsdZ6RVULae7wrqViWBYxGHadQ8XCvEwsITKOmv1eo+1CKUeOzz2duwVmYjowTieu1ZcM/RGtA1qLGTYvlcb1tF1D01l++PEHvvjya5ZwYJyeGccTx7Bwc/N2NWgeabTBtAtzMDK9iSOpTFAd3nu27Yaua/jXf/uW3faSGE6kNNF1Wz7e/8AwNBirCHkhLJWb/VvGeaIkzfXFHU2r+fHDPcUkHJUUz8zLiVJ2NE2HbxoO05FGAcVRZnj48Im26/jxh0cJxCXiuOA8PdC7QE2GGM8Y32DNQE6anB3OZuZ4YL9xaAPzMrPdb7m6fMOnhz/z8PQHTuczu2HPdvNMzhN9X/n7//KG/+O/PdP6huurz9hubvjuz/d883XBNRWlM6lmdsNbbnd/w8efPnL/8Z5lCuz3e4ZuK9Nnl2lajyoeYyJGLeT4xOe3d6g08el5ZM6Vz982xNPAVDJVO7wbuNhfMy9HjoeJ4zEw5xN3tw23129YRkNYFJeXhsPpxBImStYo1XLxbs/VtePp4cjxOJDjls42GBdQuoGu5fbmDbuLLR9+useaysWuYRxnLvsvWRaRYbbNgNaG77575O2bK65vrihZkLp/t/kb2sbR947jSTIeYlLECJbKL778QgiZcWGcI+fTzO2bgb/7u89w3vD0/IkPPz1QauXyqoHsiedn/MbRtjtc0+JbT9uJ/4X8Ehga8C7IRDJZGjegs+P7Hz/yt7/+FR8+/MQP8XvubrYcYmE6j6SQUFpxc32FMZq+9QytJy6BZXqi6eXeuGTHzdUdh/FbGp/wrsGbDT/McuCFcCCoM7bZ8PGT4eqmAzVxik8UfcX73z3K9iFBWBQ0M95cMoYTz+ET/WAhtByef6LpDNtNR2kh2ULKsCyZZYxsvecUj5AMMSTe//iB3b4j6xHXKjaDpy1XTNMR4xW7dsDagQ8fv2V/49ilG9LSMU8JbSesl4yhXAzHQ8fheKTtLG1jGHNmmc80beay/CPb8iv0KPj8WhO5CK3qszffYLlGgtEnQgiUDMPQ4bxb3T4JhaNkBLaQjlhV0UbkYCKzk+Z2TGdiHlEq0jbdK0DBaI3SQgyd5xO5JLxtcK4hl4zWSmr2UglZ8PO2GKjia+IVZCFNVIyzIHuVlWGZXjd0ohykrtPQUjIKMeZLQ1tW39EaaVBFliuSNtYiXlFqImdLCBM5T7SdxehGhle1rM2Jo6aEs5riHRWLc41sZ1ckvF4lTfN8JOaJtm0Yhj25xldsPLUigcOZthVSruDVf55u11qF8BYDNzd7lN6ilF8ld4KNLiXjvSbmSOMT6EAuE9QOpTOqqnV6L1u73dZgbKSUDEWiKcCszU6m1si8JHZ9wTrZPtVqJWOoJNk2KkMMBefLihc31OKQ6C29bu0yWlmMXlBkSoK4yJ+vK4JXGWYtGY0ihcjDuJDyNcbKRjGXTCGiqhUpJIFUJlJs8dbIUqNKrpU2lhAjSmW0ScSUqKVbG9Es2V1V4Y0l5AWINK3CNZGmlaD5h8eZm9trsh7RDSxl5OHTR3KcqVkxLbKdKTWzzFZUEkrkkZvtQCgL3/30AUpmv+u53CnCNOK1Zr/f8u7LHZ+ePq7RFw7rWhKFnAyGFlUNcZlIxXKanojLM5Bx7RUPfGTTS8pJCoUP3/+ZczmSF8tV67lst/z5D59wfUfKB3xb+NXVO7a3d3y8/xPPz2eyhr/96r+Sy8j/9S//SvWev/vn3/Dh/X/wON/zeEqoHPCdYVpmcln49Gnmw/vKfv+G23eX6FSIcSSc7rk/Llzuf0Gm0hrNZ7/6mtPpyDwtxPlMzTP9xYZUCm0/r9eUoZaJfuspNTLmEYhcNd+gVBKKslIka6QG0BI6r3Tlp/c/YJb51UNbpoi1lbax1JrJKa5bWRngoxS2aRjnSNt22JgxSrzKjW1XAp9sT1PMzDHyEnCMUpxOJ/q2fQWqZGXYbC2bYYf3Du8EGtK4Dm0z1mq886vnNPOSu1VKWTP5yro9l81+zYWXWItlDhgjdelLzhVA03QY47HW4ZzBewH9yOarME2TSBJLISyRGEUW//T0hLOeFwIqKFrbia1Ea7HjULHeEVNeG1tLoRBylIgkNAYjg/4UURi5r20rw8GUmUNkrEcciimfkFxDefYbC05bvDaoToZiOYw4pTBNK95KBdN8WjNk5Tk4x48ctEZbh/cdw/CGigQpowp705JS4XYdFGmlIcvZAYl5OfLDj39gjJlhuGG3axk2hric+PaP33GOhawTFxcbum3Ht9/9JFAPr0l1/Is9zV9Fc1VK4eHhUTp1pXC2J83SoRorpszdZkARabpC2yuML3z35/vVMNrjfUuKcDm8I9WDTCXzjqvNjrrIBCpOE0Pfc7n/gsPpgSktaCq5zMwxcbl7C6owjxOhBHRnmUQWAAAgAElEQVSV7KkQR4zxXF58yfH8A5WI845xnOW/hdfIspyxTq1GRiWemv0FOT3hfUMtiilHTodE68507QbUhhgXQn3gcByx3uBax5ICYU5QQBWDKpbt1hJD4OHpR1Keubm55Xj+JLlB2awSFdBYYsyM5yPjJOvR77773TpQyJQyo5Xh2z+/J+UZZRd29ZLr6w5de5ZlIeYjlZUslSMxLsTY0thrlnBgDifmZeFwntjuNafSU2fB2I+jSBTjspCLyH2WyQjGMiqOh0KYz6AmlljJz2dSUrz76pJPnxSUIA82fQQCptnR+D3zOPPxp0dab/gwfWAKmafDxBdffsntmxv6TvwYj08PGJ84Pbfs9o52KDwd/oPnwyPjKGbJv/3q18RFcTh9T80L46Hyv377Hd3V17iLSw4p8PH7PxDDM3ebDVYnjFa0dsvXb3/D79//d5rWcHd3g3eeYU01R4F1lrvbHZfXng8//EiMlavrt1z0mvmQ2W8UPZUSE+HUcffFHSFPPB/u0dyy23Tshp5wveU87ri7fst0VuThROVIzSNe3VBKwLmGrtvSNIbnw49cdjsumx2uKWjVsdnteT4dmMLC23dfcR4f2W09revYby8p4Y/s9h1x2VKywa7ZVv/T//y/kOvMtBwZp5lUEtOycFgMbmrJOfL4eGaz3WGt4RhPdN0G56Hxnm5o+Oabr/He8fDwxHQ4UmvhcEz8zVdfUeqIpnD51efM8RNLaBinkSUd2HSXMFuqXVA1Y4qmrRc0GxjPAasrV1cXfPPLK4xR3N29459+8xnzmDn+6Q+8ue3Yb27Z9jtOpwmjFXc3W379i8+hamKeyT4yTZE4Ka72Pe3zJcZm+o1ls70gM6FclcLZGlIqfP43F/Qbw8PDmftPE3e7gd1+IMUjIcyEnOj2X3F6GkkH0Mrz9otbylzRRyF6uc7ifaKkBUxhf7Hh8vILfnz/ZzyVi90FrdtwdbFnczEQ8j0pScE2dHua9sg4H5imifFUePvZLcu0oxpNbRLtNvLw9J6m3RCTIS+KYXOFbY7kkGl8z1fvPqO7/x8JS5AJ/toE5SyNUtvsGfo7rL5mCUfatiflwvsPz+wuGy4uNzhrSUE2BcbAeZ7XwZJnu7lA6UKtIj8pBZRVxDiitODtG78Rufeq1xLZXaXWSNN48XQZI40DGastqiqm6UxMM01ziTUtEqod1pPEUjIsy4LWir67xJiWlPPrWaMRX9L9p4/kIp+HUubVvF7XJm6eZuZp4e7tNY33Ik3JrHlXK8JbJ6yvDJserfxKzyuvUIglSsO02/f03cA8SpGitH7dbpVcUapgDav8BJGrWclRotZ1c7TQtgnvRW4OeZUxyouWAOdETRPGsUZ5IBNho1awQCGXmbaTjU1cNDmvHqNVMlnQ/wn3LnAAcicSzhX5j3zrtG1lu+3xbWWZKyWLvwSkIdRasRTFdu9pWnmvOSqMkwm5SJ4007myuZDMOaUUuQghLLE2oWujiy74xuM9zEsSWWAGpZJkrdUVr44Ua02rSbPnBaWtquRXSdGh1pygEd/0KO2kkVRQVhR3zIW0+ldO05m+c0LZvX/m+Hzg+z82/M2vLhn/0MJgubtV3F284fn5npgzqSRiGXk+F4Z2h3kaIcFuuOB4mPj8s4u10D9zPh242HRQAtP4yJ/+MNE6KdaXZeF0OmO7lsMUmH76gabraTc9/c5T2onxkyecAz4vbK925GWh6Vr8puUhBub7Tzw/HXi6/8Rnb97S3V2QToFus8M1DZ274PnpPwinwEV/Rb/ZEMsn+o3j7//5G3JVeK/orr8hhIhVFbNa4DeXe7kvsuDUx/lEZMIPDb54abSrJdkHcpWN8fR0IJVPTOcW7zz7YWCZYdNanv78R9L/S92b9Uh2ZVea3xnvZIMPMTJIpjKplKpT6i5UvRXQP7+BehVQrZZUUpaSZJKMwcMHM7vTGfthXw9WP7SeVQEQIDiEm1uYn3v2Xmt9KwT6vuft4ZYffvoR03RU67k/nbn7+P/w9W9+i/MtKVec75nWlbymrWLDM9y+pIRnddTQNS0pRKKpG3RnJUVBe69rgFLkTCuVsEzSf1bAauGdKiv9cDFl6cVs/Rf4jbGOm5t+s69VcVxseel4OROqDGbGKJwVcEPTtFijca7DN46u1fjWbBmfSsUhdEvQOEIO5FypRTH0B0qRvr/nUvTnqgNdRUHPKTMmiWGwqXe1aHICZ1r582tkkPv6qxvJGis5H+uWl6x1cwrE5QsApvMN+0FULLHyeozZICEYUoxSi1AK42UkphN+60uTAmnHGguu3bgF1SH9WxPabQPUvFALpCwLGK0SjWtIMcoSyxlR0dqOsllrjdGyYFkuUjVRA1lFVgKlOJyvVCX9t6WMlCSwIKUVx9t33Lz01Cz0yWI0eoB3v/9LSpyhzNQ6M04fUP6ItUe09oT507851/y7GK6ebTwhrtQaefXyDWkrmIspkKYkHtLtIbSWxPhwZpoVfV8kZGZA1RajK1p7CrLpCnlG4YjxLDkKU+m9p66gUsa5TOd75unCHN6LvSUFcqyofKaWgjeW/aGh5DOlrKS8EvMMjCjlCHPEaIX3HTF5UhpJ6YxYJSwlJi6nKBtRDG0zYMxKKRpKgzUyRdeaqUm82672HK48jespJUoxZBUK2tC11NpQo+Hm6qWUaqZETJnLw4gZjBBzcmEJAUUBlykRUkiEuHB1+JquhVQMSvfs+ldM04jeDgjnJO9QbWCaA3UprNkQ88zt7Q169CzLZ9bpkZdf/YGnxxPrMlJSYh4z3l5EDrdGDgqlcPaWkgQ7fT5NvPvqFb1zWNuB2zGtM6pkYo5kHemtxvoDbd9Rk6dTmu6bga7Zk8YLN9ct3/3hLXFp+fjjP/Dy5Z7dviW4ZvOfa9YABUc7fM2Vc9w3YgNqN29u616R1wg543VlzgH99CilgwoOb3bEZWJdVgbnGfo949PPDK4nNQtRTVztGo5dyziPJKXAwvsPd3zzm98wDIGSE7fXe5b1if3tLWZaWOYFowwLgRInmrZy+2JApcC6zJhGNrfO9+QAX726ZpzgdJqISfHixZWU8ymFbg1Nf2BHpu8rKUVCPAGKYdjh255xmTlfHqEYrq8O1FS4nJ6w3qObAypl4jpznu65unnHz58eqMygxGrg/BWp3qE1dJ2jBIVrI75RKCIlnrl5c0PBcD6fmMPCbrgmh8gaL0zThZIi37x9y+3rPXfvF+KSMEdDxqJiwngFyqPqytPlPcYOG9VREVOADIkVLII1doYlLGRtUFoeRn/1N7/HmR6rpCso6PeiOniD7uWCHmNgt9/TDJmwJpxTtHVg2Hmst+S6cPtmYA4jw67D+455nlnzE+GpYG3HmzfXPI6Rbu8Z80osE9YabnY7Hi4/o7uAdz3tcMP78/9A+5UYAw9PGWNbht5x2B3RqmE8T7y+/S0fPv1RvPhmZX+1k9yA3mEbC8UyjjPdYGiagVIghjPLXDmff+G56w+1Ys3A+TSBynjvyLHFW8slPaCWHpv/irw8k/W2jSngnMeaAe+usHpHjAvKGLGQlIh3K7/73WvartnsVRnpUjWUnPBO0/U7rGrFDqgUdcMz55gpOdA2FustAqkQG5nYtao8mK1AHpxpBWm8dVKpbau6rpOcta6TDJNCvkaRTFupmZTWzZbmfi0HVjLEpSy2vKb1WOUBNgCDdCQpZUm54LzieO24uhpYLkYiOhvwQWtLDAnnNNo2eN9QokA1nq08vw5gE6VqUjZAu1Hy5KGn0MSYGPqGlOViqJWVwP1zl1CFFKGUCeskbxXXhrJ9LbZhAq1Zw0rrI8poYU8oQ66Sy5KcQoUCja04ZViLk+ekerZd6i+5s6bRGCWKWC4VbeT9q0ieiqpE+ZFkJEYZtJLXLfE3I+qlCjReLHghVjBWhubyKxwDMrtdgyIxTZEQZRh3VULxpVZyCrTe0nYa56tYApOlNptAVxUaR0yBUmesSljlSUq+xy289eVXzhFnpTS1JCPuYb3l8rIU0WoFVKkzcY0lRs9uaFiXSkqFu8cnXl4G6fnycoF1vkX7Qm/EEn8eV3SrGRqHNoU5LaRT3lQZKYhVtsXEwnC4Ii8TYV5Yx8LLr2+3PGvGeenW8U1DTBdMBssVblA8LRHTWRonuXHXe6zeiVVMJUo5E9OK9ntSSdx/+pH9cYexHVZ5VFJc8kfOU6RvnFDtlpmiWpquxdiJsIzc389Y36B0g3cdTlmSCZSoaDuL9R2q97hhIMZPTGPB+4Gr21dopXm8/zOnh3viEun7A+f5zOVppO88KTnGcaIfrrlMEykuxDzzNJ4ZgyKv8qyzJtDhefjpH0jVk6vH9z3OZlIEazs8R5b8KIXoUVOq23KfBaJAlkzvac2OGsOXXN3ucOD89EC3v8ZovfVNbTkildEb86IkMLaS47Y4qpoYpae0slkHIzRNQ0hxyzAVvNWUKllHY7W8HgM5VKrS5KQ2QqYiVylcFxjRgtIerQrYzXK7QWgqBqMcVjfUFMkqbTTSitgCn1VyEIq5E5jFdu7X7edPco1bDKdCTornvqyShTWgtJEsXRLaYkGgR2zfc0XAHjVsyrvWQtbOW9xnoy46Y1HP1QlG7HUKoMjrUWSUUXgtSlgukh9LOQpQw7gvluHGS/5ULOOBohLKVpy2NGYjLarneyisqaJVI7bMwuY0ENUuUeSc30bWQsY1z9ngnqbfo/1F3BIo3K75N+eafx/D1TatC5xF8lYhBlJexeNuDOu60PQtpRhirMRFgv3eaaScvdJYJ6qMhqos1Qjb3zcWbVoZZtAs87xtEizWabztiGYhpgsbiVMCw7VsQUSDt57LOG2+2+1DVxaM66lFAoAEiKFinJShSemieFPXWL4EK02CVBY0Ga2ywB1K2QLciporTnv6tqHGQk6RmiOxaFIMdF2HVpZlBqo8VHOt5ALTtJLSSuPkQ5GLwBm8F7S0th7nB6zZ41whZtksNX7H4+VCXE84awUdu2SmcMbbDuMsSzwxHDppItceo1piVaRJUwLoLIf7WmaohRKRTp1+2NCmDcUKnTD7gm0GDruerhkoesf503ucUWQiGLB9C7XDO4f3O1TTMqeCd1c0x1uG/TW7V1/z5+8/8vgBrNY01tN4S8gN1mtUzeRQuO4Hrhvpxqhojm1LMZkaO2pTcaowdIpf7iaO3sqmymm6XU9ShbyFNxtrWMI9rukYek1uLMfecdPu0KoQSiZVxbyupDWgahFmUl4xRnDuNVbSvLDOI/3O0XQW13qg4fL5TFhnTPbYxtM5x+XymVc3A13rSGHHw3ih8YYQHEUpnDUoLM7tpWwyr5Q1kkImZKjVgLKE8IAzR7Q2rGXlMs8crwYylqqhKAgxg3KcpidyvmBMxJoO6y2lCgq7aXtiHPHeSUi2Vvquwxoj/z91y7K0gFArpWjUcnM80vfi5Y7zyjiPUjlQpH8OLSXMMc5UOrSuOF2pNpOqvM5KJdWANpY1L1DF+216T+euMDTkVFnWiajEvBZrJhBl2CiVgpRceiXbPHxAt176MpaA6wxzzlQtxK2qCynP1AT7/ZG+u+bTw/fEWRNLpqCFbKo02ms6s6NxB5T2Uk5aFkoVimjNlRQTySZUVYRQ6dtbnJVzIMYkodlSAQfVbHmfuIXuLca0OB8xqkGZR7k8oreBomDtjHWWpmmZLgpjGg7NN/T2G164vyHZVUrWN0JTrQnrGkpuBUldNLVGGSSSlKbv9pZXrw4o/YwmFgjK80NKm4p3WmxkSZRcQSVLPslahWtbvLeEyVA2yIdSQq5KMUiuVftfKXNbLuk5SxDihLVsGPNnm5r6MoyUXFjDgjWC0FVb6vy51DclsYcM+3bL6GxvHWyPVDljfaPxTYt1WtQ9sw0C21CU8kLTWLTtxZ6yFik+ZZuckA228WXb7KZfcz/UL3f9lCN958k1U+rz96P5wqdXkGLGN4qmFXBGWLYyXPHGbIpblR5BU4QAWtU2+D4PMPJ1ndFYsw2reaOibR1jW1hKlmtWBpIYqlgCtXgqFQqtoaQqytYmwZWkvhTNaiUbYC0bT5zTKAphFQiLDKCG53JjVKXtDKjIuiZSVmgnSkjVz7bIgncOa6W6BeyXXFrdMNkYTckrSkWMRqiE2E052f6gt0xYrVmWKo0lrpvCURWqsA2IMkDVkgQR7zXzxeBdQ98PrGHl/umB0+PK8Vqee1IaA9Y1eGMoOrNog+4M3mmqyvKzVOJ2CQ8oozGuwWJQTkHY3s9ayCFuH06xfhmK3JFMBiI1LqxrFRBGlTxV1+9wbUtjBiqVkGcarRiGV7j2FlImXe6opUF5x7IkYswkG5hnjWk1qhTSM2RlNsQ4scxnLuczVbU03R6DpZrCup6pqcE1e2oOsqTQgNakvKKSAKjWmMlKUZXlGUqQsygV6Col82Gl6RXOS99RKZFlGlF2hzUaY6FvFU0pnOd74qqIuUHnHmUjOTm034O11PyE8Z4QCykJbr2UbZByUtninCemhLPSs+WaFt8M2LbdrMgVSkU3HU4nnHlWlL2U1T8DNHIFWnLVX6yltWSomqbKIsoY8KqSixeFXmUUmVwqoQrQIucNYqO34uUCNStZ9GK2YejXDUFVFb0tp0rN5BypKmxnlEFogGb7eQNjJUsVY4YiFNGy2ZyfbY/yMykRlufzW239ac+/SpXC47IRPcUNJUq6MvLPQJgGyugvih6IGqYQFZuawfza2yU1THWDumwER7NlL7Wo3GxLIOETJHIqG2gE+RzXiLFGVKnN7m14BuxAYyrWWJJ6/n41RStyFVu4THeFVEAhUQnB9yia5sDh0Ak5sha6vv8355p/F8MVSoGt9N1A13Usc+bx8kQuAWctXbcjxMzeaFQyUD37pqO/UsS0UJJCJU/Tepb8hC4GtKXSoxT0B03THgDL4/3K5/sP+MbS2GFDZHoa7whJgsXeN7ROwpBaiQq0TIaYwuYlbSm5sMyJzmQa15GTWBtTUgw7tgZwpLfqqGTblVdiWBlHDxb6rqHxUZSnXPHNkVJmcpnwviOXRFxGcl6AJAShdcHpBmsNkLm7u2PZckfaOKqt3N3d4Z1nP+w57q44TQlbe7xr6A4HDq/e8fmXjzhTCNERkvQ6oAem5YnWG/ZDxzorPn/6zB/+t/+D49UV//Kv/zff/sXv+eWnHwhLwNoO197y9OMndoeObn/AuRYV3nPsOy5PCzVqDi9fENaJscwMvefmeIU+fstd+RPNYUdre6ZRNoW+deiaqE7THnpOd6CqZTe0tG7gw+c7VIn89q/+dw7Hl4xLwat/5JvvbmlMKwNspzFjQ99qYKakmSvfoew1vhvw1rLretbGMd9PNJ2h7y3dYJjyPV+/eIV3jlAK87xy/XKPUjJUp1ApaqXoyG6vcHZPT0/fXIFrWMLEukx0h47z/Z8p0wVVMue7hTdff4dKGVIkh4WHxz/z+7/5PccXLyC3TI8X5ulfUTFT1w7f77h60fDPP//Ah15zc/Oaw9Ut958+EecZh7TZe+3Ic6QWA0YKCjt/ZPz8wP3jyBoDuS60jQRfz9PEvIyEknjdvmFdI1qDa1vafE0tHucUIUfmecGZDuNmauUL7IX4iFWWZRQIw8s3v2EcF9b1EWM9V7tbmu4Wq1f6ecUaj/EFZwwqQ987UoD7uztqq3C1IRFI+kKsHa27oep+u3ivuM5SUotCU8rKFEaqaxjnBeM1Q6vo+o73P4907UwpC+P0SIqCbl/iwhQnXLejqo6ny0WWK7oljIVsP3GeZmppWIOiGQohZ8o8oYPYYvtOCy3IW6qOpHrP5WnkuH+HNUeWaWJdM7v+Ddbs0KohLBONfsvd+Z5+aHhxe8O6wDjd83n+iLUtfXfF+4+/0PaCBo4xs4QL+92OGNNmYcn0O02IKzlrqIa2uWY3HDBNIAULpZHLd/iZl68PeO9ReHJ6ogBvD/8nr5v/yMvbb1hDoKqFeRbFWZtCjHD/WZQD5+IXaEUIK0plbl8cGIY9p5Nc180GEihVNrO1JGIK2Kpli1vhuSw3Zyn4btsGYzxhsv+f4QoU6xpka6kMqjoBOpjtcl0LuWRSXjDu199bayXbYyUQmlwy83xhd2gwWzcMsAEtzJZhqRy6FtcMrJN8bfU/FQPHGNh1mt2+k9JlGuxmndseWKS0sNs7mrblcha7jrYbgKMUipbOxrZtcF5JHUL21Bq2C8w2/JRIN7Ro7UnZfMl41W1Ak6Fz4ep6T9exXYikoLduVkaQ3i5rBYJkrWFe5Bkk0t4zZTHTdAatZWubMhvWXLJIStUtN5Yw1snncClQheoIbFhrTY4R3wqeuFRYligFwdv7VBCUOxSca+WzsUaxKCJfR1WoVQpfrdUIqllsycZYuQCiqFrUgcaz4eEVtTgqiZLtNtJUikqb68JgbUXhZLG0LYCgbJdfuaQ6b/BecXpaQQ+bcLd93owMtaJwRazR1Crb/q5vSWXH49Mjnz6duL55Tdvu0aZhjjPWDKhSIEmZs+salK2UkCip4L3FlIDRGWUtpim0vWNe7uXipjPaVT6+/5GUAjHBulaGJlDygjEGiEzjR+KomPJMXTVd09DevECbKoqU1hjj8AT2+6/wvWGdH3hSC67bUWrg7vGOy2mmPxypyfIpifJptEETKKcLOWRCCISY+Hj3ieHqzHK40FjDevnM0O0xw9fE8YnxcsF1DTe3L1E2ENLI3d2febgk3rx7zc3uCLFQQqB4x+HK0TYOaxWn2bA/XqH2O9I6skxPOF1JdBxvDlKWnh/Jl4p1lXbJrFOm5cJ4eqLkltoEqi90NtApWNOZEiKuT0z39xS/oykeh6auZ1HC/RHjGmKYsFYyOtQgLpwpk0m0zmBsAp0x6kiIZ7re4IymFDgceqb5TMqy6LFu5sNPH+j7Nwy7PUPnKOuEwbOkQCkRSyUuFuMNhbzdGYVkWNV2JhmH8l5cwDVvZ8c21BQNBqoKLPEkyn0pol46K6ROK+XmtUCKkUwlRsmlCvRHllTSWacxVkQFbZ8VeI3Wcm8IIWzXdYXfAGayXNGyjMuWmIOc50rJsLjZl1NKUoas3fZzKBAcrdVm47ZfRgHJ/iq0KRhr8KZBoVDKME0rMSSqhRgTMSa0kQEzb/j5bhgouWyWSERUUALpcU4IkDEksWAajXPS7yVLsPKFNm51YZ4DMSY5c/odfTcQigCP2vZ/gZ4ryfQEjG4pWTay33z7mmVJ5KhQeBQN6zqRUkArx9X+wLKcxfdfPVpZTuHMq9dfb6G6RClStNu3Rx4eR87nC8N+4OblV+LPTIkSM0uuLCHR9QN9J1aRaZpQypGLTMTj+Ynf/fXLDTQwE9aFpr1B6VFsbEXh2x0vrxuePjvSOmJt4vWrdziv+Bw+M80X5nhP374ippb7h5WcL+RcePXya0IeeXq843J5YtfteHX9GophnBKn8cTx2DLsd4J4D5lxOnF103N1fcPTaeHpcaJ1nm9e/Y6n84SpHZ27Rt/s+fH9z5R84jAs/MYM7IcbLucPHPdXtN2Bf/nj9/zuu98SrveEuBADNC3c9H9NmQ8Ep7m+vmE8GcKYGWzH9fWOp7nH7p548+o7fHtgDStjKLx99xvcO3C10jnDx/c/EuKe1y9e85u/eMOhU/zXf75nXVbWy0RaZmxZcW7lxaGjbwdK3rEzv9CojkzPU/GkeaJd7jndO07TDzw8zJQyotXMmmcaa/j9VwO/fFBcac9hf4sfMj99+JmdbdBdSymJZX7Cqmu+ffMV3llSinz+8BFfZk5PnzhcHel3O+aHlYii2/doo4k1wvrE7c2BGivxElg4cWcXhvYNnT1gNKzLHf/xq99jm4aYA5fLI3n5zIf7O9aoQbW8ffXXqLUlfnjCdRfadma8PLFvWz5/+iOPzmGa7+j6W6re8/2Pn4nrzNfvXrPftdx/Pos/PEaO+1vuHx+pRmMbTdfBZT3jXCHEM7oG3r79lk8fTnSN43DzCm9bdPbM+Z5UDLVUrI+M43/nevcGrSpP5QFtnzifVoZdzzI/8o/v/5Xr45HGt1QqMa+8//gRqyw5Ol68ueZwc+T9+++J8URZt66doJiY6c+Bp+mJUjPvvn7Hw/yBHC2KjsZqfNOze7XjMk8UIs71tN0rHh4vrDGRi6YoyOuM6weMlUD9Lx9+4TKPrLmSc2RZVpxz1Fq4vjlyc7nhdJq5jNJHl9KZUhNX13sMey6T2Jy69paSV5ztBdsdEzFA20pO6P40ktOJtr/GpSMlO2JcWdLEsoJzV5RcmdYHPt79id9++5/I9Wu5TGXLuj7y9u3X1NJSsiblTMonqtZM08hlnMg5EOM9N7evsM4wz5GUA/vhpSxpYmRZM6fxF1JK3Ny8xbuBn376gb/6w2uWic2mZPjtd99x9/kTzd0b1LTj4WFkGPZYO9Bfv8E7R+stf/d3f8d4uUfbStNpKgWtPVpFqioYV7bLr/qfTm/J+uS60jjJE0xP9ksflGw/ZehqW7HO5GygeKxTYrHYYAqlruyHA9bsMNrLsJPF0VBqJWVwrsW1L/BmhzXP6t7zc6RsHTGJ29tXW2brVzeYIK8D6BlrVhRbpxTPYAj5r1ByiUJBWMWa+Izv1jxbUDLTPBJSZZkd1t9smbHNxhYKjbfshh5jFNPFENKmQmzWk5Qy2hT2+w6lBIg0z/WLVVIrs010BWsz6xpIyaLMwHNA7Pl7iyFwPFiUnonJbpTcSN16xEqVWgXbFDCaFCW3VIvbqOSSm6g54RqNbSopITUngNVme12CDS9ASGemuUoOQu0wVmhsqmTU9nUbr9gPnTzLU5al4JePj7x+5xVKrdI9F7feqlJQSNeRQZBkV8ce5xTLHDk/rdimFSjJtplHVUJYef16h9aJ8RIhKzDPapfYR0uqoLNQhTUoGmLKaKvEBaNkSA4p03lF00AKMyUfQCl84/wlkbYAACAASURBVDnoHcfDFZ/v7rm726P1Nbvpb/n503/l9deBCKRsSbVl8C0N4IaWxjfs93v+5R//hRf9G3zTEXXmfP7I6fJIt+9xXUNaYEozpVZcMejF8NN5Yvei4Xz/ERUyO3fFsO9I1XNZZh6mB8aiOI2R//Qffk9KkXEeebW7oeqVT58+k9ZVFj/uJWn9SDs0bMYGtM00zYmYRGHpDkcexydinLhMFx4fL6A8P/zp73n36ju+/ep3fPXb7/jv/+17jH0i1pnT9Eg8w8PlFxon+PU8ToRUSfWa00lgQy9uXvLNu2+xGOJ6ZhkfePftDlMKdx9PjPMZ3EJ/M2AXg9Ewn0f++Y//zDe//wNFZw7Dkd27W6bxnvSQ+OX9A2UZqV3heLxhPo/kaaSsKw+nlXFauH2l0HoSxcp5fvnhz7jhBcebF9zcXvHh0wf23VuOxyuaznFa7ljPM21/pJqGvKmHYbQ8TQtWWXbtkZ9+vLCsD6AKtmnojy84nb+n2hVX93SqxaqWgkY3ipo16xhJeSHPEe89zjty1JuNr4DKZJW5TCudbqkqorVknVRWDO1RFgx1xbcdxhwxKm9Lok2VJxFXAcXUmpnnJ9q2Q2uFwW3WYoVym3XQKNrmSKGS4rO9UJZb03QhhIBSmqura57OJ9qukfO5ZPa9Y98ct4GubsXBZfv78kUZK1mTkgwoUAT00Ujh/JflzNYNmFJmXQLTNNE0nfQFtgIpsRE6HEonUIlSYFkK+70hrJEUJ1n2I0ppSoXxkrZngfRZaa3FEVIzTdNI4TRSi1FMYfA76laMF2MkhQWjRKVfN8Ly/9+vfxfDVa1KrHs2sSwnzucZazVaOWnYNoGhveK8FjABVGHJUcg52n/pVAhx4vKDWF2ctfT9Ti4l5UxKBqt3kDUP9yfyZtGxxmGto20Hck6sl5WYV+4fPnHY7zFGcJ+v3+35fP+Jy7wQQ6ZqS9cdGBfptVJYWn/Nrn/BfPmIb1qULnz4dMfhJlA1GNuhwysen57odi25GNZV7BJnnylM5FxomwZUINbAaXqQ0N3LV6TVofSetAos483tnuGgaNqWYa/Z3a58+OV7GmN4sTsSppWP73/k5Tdv6IxF2Z7r/S1vbq759HkmJse4LmSbuHmhMPqJddKcL4Ulnri6PfDu5e84P41cHi8Mh2uWxxO/f/cdZTWsU+XN2xvSdOHYDYRUmZ4Ct92e14ceVTU5ZEoKXB3eMVxb8daHSm0Vv739W2yJqDIT4hPT/YlMoN31GGWZP0eujm/prhtSrczxwtWbt+Q4o0PFlMjL1jOHjGnt1siuMMlg8s+secd59Phs2LVvWafEoWvQuoNQWNeZh/oLe9fQGMfhesBcLlTtOI0XHs4P9LahpIaYBZmtHPjV0SnPmlemdeVzuvDVy1uePvyJVjuGoac0e376eAZ1EfQrmqef3pN1pW0dTZvpWrmn3n36H9QUsEZx1XYM/RXXv3shErtR3F5fYbUilhGVL9TY8vRY6fsj9FBLphsc3Vo4Xx5Yl4CfLV+9+AuW5Z5WS5ljmlZaX5nHMyp4XFe4P0to2dlKLIXpFMhzZp3+hPaOq6FHqRtUM3O6PLDGGe0TurVgKikszNPEvERevvkK6/d8fhr5+PlOiH1upjOWnBSXU6HqQnAXqmmxrmdZo6g7u45lgeWSSDWh14/Ma8JZh9U9P/z8M7lEjC1oK4WnqRrmZabWBWpkmUDbJ/r2DUZdsZrAfneNNf8X8yVz/hxZlsiuO+BbvVUqrKSQMPUFOTySdcCkz1zCB7zdgbI413J99Yrz+cyyjvCs4Jk93mUu0ydKXRh2lruH7+mHStsMVAXGrZzPdzReDvB1LXjbSeddycQ8EuIZ1wxY4zaAhseZA+fxPXGRWgnrMiEUnL0i53tKuYCaub6+pmQLVRPChatrUQS9PRJC4XJZKEyoH/4L6/qSVMGbSj6fqFulQus9vff8wz/+Pcfba7zvSSlirZLfG4U1BuctKYka9JwbEhtzppQRAGP21OLBZNn2VVGVljmhfcDYFqs1kKmqIHUNilylE8U5talhCvs8OlRDznKJaIfI8XhknRwlVVJNcgG1imVd0Lry29++o2l7UpDhaqOZy1VeicVuf/CcH2TTKVk1UXpUtWKfsXnDoAvQRGAO0otZYqRtDMaIhQY8otVUlFboqoil4n1A7GualIzglbVC8rWVWqSHJsaFWi0hSF/Qc8VN3axsMS0oXdA0okRmJejpIhmnIt+c1ISEullHPYqwbYLFJjPPEdtAVYWCpWLRSpTunBMlb3mLvLKsmYKVy/GasHj0cw4M2fB2bf1im0xJhtdaN3z7hrf3TjMvI/NcSUlKup/9VF+KU41cAnOW4l6llViGNrvoM+jKmCpujpxIGVSWnqpnHDb6OS8RN2S/Rmv5FNUNgf/cbaTIlJpISZGipyj1bOaUrKOy5DhjW0QRahzjpSXXQs0KpR03Ny94erjn890TRhfaruHNb3ruPzxxmiPKem5vGp7eP/D73/wHDJESJ3TWVF0JRrbpjXG0V9dcDzdUItO68Pj4JH2GaGxjaQ+e/mli+rzw8vZrVM7MTxfWyXF1bBlUT4qyEHjxwvLpl79HOYdpex6mQlEGpzS+laH78fQDXjmsHdBWc3d55Ju3X3OZEqfpzDSP+HViv78mhYau67i5/hbrJv4yvSUVT6yZ9w+P+L2maLncat+y9w2NbTHOM84XPnz+wL4/8unTT1gFXntiWvjjn/4VCOR1gZx5dfuCh6efGMtIbjXN/sCqDDc3ew59R82O6/kVoUS8UaxpJY8nbl7conaVm6+/kR+cEonTQvCR6TLx+fGBj+8Df/tf/jOP55/58P0d958W/uZvvyXhCJcT5/OZ7/8VDocryvkHPvz0M9W0vHzzku//+3/j67/8A4ehwcaJDx8udPsBZ2dSCny6vOf25i2X9UyKBUfmyMq3X/9ninKEZeKny490uuV8BtdpjAdDoulbvGnxW3HxWheUBqfFp6+K5tgPoM9QHaoatMq4NkNdN/u2J6eVmkdS0ShlRCFSlZSD2C+rKD99LwAfZyxKK3FCpGfioCxYTqfTBrMQxV+GOjkjKr923B33e1LOPJezj+PCNC1ClS0Z58wXmIwxdnNbKdZ12Wx/8pf88005V7+WCY/j8kWp6/thqwh4rgWSMuyu63GulWHLWoYuyftnO7puR0ppG+IgJVHwUYLiN3orhQfCEqk1bbZlzXiWLkXrBJahtUY5Wfjo7Ux1u/8FlCsUxCwdITln5nCmNR0OKWENKbHbH6mrBZWpKjOvAe8tmUomEWpkzTNxXum8BIfHcWVZA+iCVi1KOdYotgxrJV8R80yukctyIWeRZlOJaGPBKKqu23+3ME6Bp/OZnCvOtnR5JUXNuq4oBc5WHh7OpDwT4kROGUXLOvuNviIbCauPdO4ajGLXgsazG46cTne4vuBc4fz0yDoWrg8vcF6K/VaTaax4dimVWhK6OsIE2RS8gZvjNWldKSQ617DbdzTG8Lu/+BpwNE0vHyxVOex7+n1Hv2spV0eezt9L2L7TON1hUma4tsRVYbXn5e7I43qH1wrTWgarpei43mKqePnfXg+0/Ws8iqoj1Req8dRYOPYNqmaWp3tU1lw3t+Q1Easl+wPGB0IuVC0Iz+HgxR5jFK2G1hlOSyEbKxf3XHGd4/b2ltPlA0YrnG2wdkfvHiWblyImZGxTGVpLWEa2ZQy6whhGfA+20awx4nRHNQ6rAN3QW8V6DtKbZEDpSNM6dLtDK4Ml0i2BcJpx2pMrnMaV4Xjg6fPIfHkQC0AnBbjaNlhnsBpUmakY+mHAMmCUxeWE8R1d12MbT9GFu9MDar2gzUrXauKaqDi5PGm5OPz484+0rWZJF9Z1pbDnxXXHHAzOd5LdqJWiMyFHqtY0QFICQykJUlZyWQp6u+xkUsmENXN9DX3vYanMy8o4L3jnSBvUQFu4jBewQXp9SubQWZa8ZaTSwrqO6NYzzoFu12O0IaRJEM3IoRpjIE6Z14crCgmqJqXK6fJAZeR4vMb5gZCyKD6xkBNbR4ei93uMbtDVo7HUojcoQCSElVrqBmQQ9Guxnnn+TFYzVjcYq3HG4E2PN+3mWTcs0wQEnJOwb0qgaqF1QkWjOpRqcI1YlaZJSHzO3HA5P+KaIF0dRh5u0seRqDrjvBd7g+7puxtUD1Y5wQC7HkySbhB7xf39e5SKWNdwPOwlu5HkUptzZBpXVG1QegTl8LYnvX9LOl9TsVSdUZsKVFSl0y3LuvDn77/n8fGJ/fUVz91RuYj1uOSAUhGQ7Z/AJUQNMdqI68AKsa5ks8VbNtAC8mCXgmTJzsSUBUBSFWx225wzfe9xPpOChKmrEvucNZYYF3JaaXswersGb2EpuQQoUTt0od95YpDPhNoyRGqzqZQSUTpjnSGnZ3VN5kSlFDnVrXgzE2OmJFGOni2BEmjPWCdlxQKYa379fjelTKlCZSEEi6KllgaU2O9QYj0pqdLvFGtcoSi5KLG9HrW9tvz8esShAQ1qKxp9tlyCnAHeWxQ9MbbkwpZr4ovd0SiFdYUlrMSkUKpHqby9XhnqSpFsQ0VIgiXrL5jo554oXSSnUYnEqKjPauYXMAZApWTQBp5R+QJdMSi9/bnVQiqBvvNYW6QiIIA1jqy+vJHb+16xTkanlCupKLwWK9DzNJqylKELil2hlNsWAc+fRLlIlVJEGaiizqW0qVr1V/VSNu7gncXaQlgDRdLuPBMu+76nG1rWZeHx0dDtHIfz16znH2Tzrh2uZhprCKmw23e0xnJ5OvHmzde07R6DhhCoaseLd28I0wn9+QP3+o6bm6+w1hBLIJWRvtWkKZBCwBpLu9ujdSWWmWygoDZggEFVTcmBOmWUT6xFM1ixtxYSrTeMjyfGdeESVi7Lwh///B7XZi7zhfP5RF9aYl5o3AGtNJf5E70GVV8SUiKViRATtS58eJyxVuxoMUPB0jkwLtH2mjVnHsYTndO0tqGcHadpoem2rrdUubvcMS1PmEaUw/NlpO32zOtMyRGtFMcXL1nGC7mspDwRygUeR5IqDMMRrWAZFyqB6fzEcjlhVOLbv96RUuZw/ZIcKnn5IAXCayTmSoiZGBK97zHmxOlhYo2GoVe8eGFw5kKNJ8r8wPn+E5fTnuEg9vDzeSTNgVIjxlWsysynyLwODLuOUiIlLPQ3L0nlhLayCEipospCLZ5aLCjDnK10auWKynUrJUcesNWK0o+AMUqWZ1suSTLKVbKM4m+VzFGpC1q1aGVRukhh72bXex5shMInhEyFwmjp8mr8RuHDQq1477ZclJzJIWZ+7aWV/1s6xJ57BZ9/1p7PEP3FtisnxJax2oa656VSSuEL26Bu57LkjPWGpn/ObBbWNRHWzHOHoaqS33t2CMhzQvrPKmLdrlSctZulUV6j8w5jrKjyJW3wEhkoUxLsvrFa7jbWfHmu/Fu//p0MV0J2K0UOh6wSyhaKil8KIdc8EVIAohSJ5YpxAIlUM7lGUgnEUhhsg1IwLzMpi29caQFKkBJLNLTOyVhWAzUmLtNCLVuQtArXXinJa+ScKVMiROmlyVnyBct6IUUlW1ld2A/I5cZWUkqENbLv96i8R9UVqvQ0DN0Lds21SJPWsRuu5IcoZYado2kVeVHEMfLi8JLGN6xrYd9sxWU6iPUxJDSWdc2oRtM3hmY48BQeiCXTuJZDd2DVEy/fvCAEJVvNmvC20u93dMMO3zasdeLT5wmnKn3TMphe2szjTOtA2YbBNdTeU1KhaaBpLeGy4kwDFLxTHPeOrr3iMj0hSKNMVbI9HryVTMS6Mp4Stzcd8zwyp8JqLFOuXMJKdRalPU3XEEtiiiuDF292XRexFpUINaFKYbdriY+JqgTvWVVL115T64JRRUKftuCxnBdpRQfZGMaYBAZCYR4v7Idras1fugxgIeQZFz0oS2UFDVFpinOYtuNgoK4K3w3MITAuM0frKfGRNJ9IZSbrnmE4gPXbJaYQlxW8ox8GWt9JLmkNnOeRrCXfY5xl/PhndBlplaXxDTHKBjfk5Utm4unxiavbvSD8c8JkRaiJNQuxUhlFqpFQImnLhUYKoSRUWEhFQuLWNlSb0LqSaiaEyDjODIcW33XEqjiPYrnLefOAG4Nt4Dye0S1YHJ1u2PUNRE0KKykv1DKijWJdFD7rbeM8EoLCJL60sYcl0L59Q0pywY15JcaJqkbgFkNPTSuFVQKtWQMNRhec21OKFQiBthuGe+t8qqIcPB/SCotGs66JrltEVbEOawytO2K0w2jIOTGeT7Q7jfOWdZEBy3lLJWCMh+yoxdEOgbz2rGsg54y3A5f5E51WaN1glaViGMdHlMlYZ9G6pZaFnArODVjdoKkM7UsyAcUGLakd4+VfaNsW73Y4PZCZmVbZdpaSWZdC0yRKesS6Fq9eEj59Q1oblKuAWKWew8IlZ5Z14Z/+6Z9YQyRlCfoqvZGwKAIWUgu5QErlGWYtuGItpKx2J+pWTpvFblMxnjM/sOK8ACdiyJsdQyhypcpSa3f0KB1FHSvPF1l5yAnUINE2TpDaZYNU8Jwpko2JNoI8nmP5YvfbHjLklBHASkYrUcxkGHh+iGuhDnoJaaekyVm8/2wDj9qWAM5kSlWS6an+12wXz5fvgjaFlCJUt5XW5m0QfCbgZYzNWwmpKFm/fs8KtmyFsVsvV9VbhmjLU1E2tUiKOI1RFNuSiyfmsmGdn/+92Jm0zawxknMr9Nrt4rCJkFQqbSvdXyVrctHb5pbNzrglnEqg1kiKZotjGVGclJHBshbICmOf99xy6dJGhiu5nEhWb+g7FKsMOhG0sfLvvrynks2ztpLSRh1U5svnVxV5wNRc8UaWDAqH+nK92YZ86qbMZTon3IWc2M4+9WUIrYot+6LEQaNhWWSIV5ZfFTcrZ/fDcs/5MuM/d7z5/BZdLH1jabue3lp6r1nyyt739H3L5493fPXiO5rdwDRe+Hw54fsDu6sbZg3zdGK/cxyvrzDGkHMg5YWhP8iZvk4k29C1O6xfCUsmAlHJ+2+LRjuHjpUaIjFVotHEUsgIXKX1PSmtXKYnHpeRcyw8/PKJ21cdKUwsYcS6wmV84ObqK5SujPN72nVAF08lo3WQAcoUTlvP08HuuJxHlM9o16FUxJjE6TKRqt8IhYXHyyNrXNFtQyqVaY2clpneOnrbQIJllpqddZmZk6iO+8OeNUzUsqCRz/j9wwlsh7Eeow1LWMlpYp1OlBRpO8ub3x15/9OZV6++oXOOzhaWkNB1FWhELriaqSlgfMTbmZIyYf7Iu3ctIZ8Iy0JeHojpI8vlBGrAeqjpwqefVo5X1zR7jcmJh48fmMsBZwdqKaSlYm6/pW/FyRPD1lmmCtOaaBqDbRqKbij6/6XuTX4tu9Lsvt9uT3ub10UEI4JdMquBq7Ik2VZpIBiG/3kPBMMqVZWVWVIySUZDxuvf7U6zWw/2eZHpgWqcDoADAkHi4r179tnft9b6LQ0xLjgGgcsREeUyK5Rnq+y6RBEbiEQi0+QQ4vlp06VTS5W7r1QRKTPIcg6KFBcynl4shAtAKIPRhZJYVQZlFAlDDKFk/XJ5DmIsZcNSlyqHnFkIiAJtdNmJLFZjpUo9Q0rPNQlyWQ49/1OgNHF5PkNYCpTVH8/vosBrFgxqKT4Xf7QuPqtdWi1QEfJicyzk1pQSCInUy4C2/GxZrOtCKp7x9sEHlFxcCimVLFeMVLLQAXPOf7Lc+h//+fMYrpBUfUNK4GZHLTvQisAE2iGqwI/Xv8dPDrX0GGxWZzw+XNO21eeXohClOE1rhSSRGRa7X2KcHku4UUjubhT9JrDqO7rmjJgyTbVeJtfAOB0KxWualyk6Y+sWKQ19v8G5UIrk5iPJtRx3RUXr37Y09Rtmf8eqPkH2WF1TmTMmv2e3DwzzyOXLM1atxY0jVsD5OnF3+8DVuS3KktC8PHvBSe9pssS4jEHx6tUXfPz4gdEdyMnT1g2rjaZuNZvunMbWvPvxJ0wsqlQlK6RXoDJunLh7GCHBr99c4LTnrG+IMbK//cTd4/fUcsV0HEEnVn3Lxm64vb6l7zdUVnJ384Gz8xXDYAm5NG3f3U/U7Y7z8wuMaQhOcHv4SF23aFkRw8hx/BltG0aVqc82bOyau59uub6/5fFuXwKeFUzDwODumMaeoVlj1ZptZ7l/2PNUWZpNh6yg1Q1zysxxYvBHDvtrjpOjMjVeRORU2t83OmCIKBUZ2sz9x5l+e05KA7/cveer9RVdW6MaTVQB8kBbfcX+6SNT8CQheMoHrJLkY8ZKCypzciO1TyhdY7Sl71f0bcvj47F4sivBmE6EKOgvvyiYXiVIPqBNUYm8g2kSmM0IcyaLRKsl69Ul3//he5q+pmobUrbMpxlrEmMKOKGxveTx8R5jZnLyiASvX73l3YePKCuxVY+PgsfDRx6PM9NcjpK2C+TsUXVDVpqDG7i/v6OuI1VTI5ThNE6M8wOd7JCyKrSe6pHrxyOb7UtcDIQ40DU1IYWSI5GyZMvGIytTowElFMRzLs4zH375b8xhwprS7D5PnnGeidkjzcD9bmZbNYXMWbXEeWaYJtyiPEZO1FYj5CUi9rix9KmZKhCcw9qapmkYB8fsCrTAmkjdGbRqy8ksDEhNyo7Rx+VynHAuMgwzr99c8bS7Yw4RU22QWHKEaRqZ3YyfE5vt2RLUTzS14csv3/LTu9+Tc1224zkTXIsxLTFKYhwZpgdC9oyTYHZ7cj4yz4mcJW1r8C6xe/Bsz1p2+0eiL/YPpQKruuXpMNKtKzbbmpubn7k4Py+h6Gni/e0P/NXf/DXXv/xIpgTlt9styjiO4wPTKBEuowe7qB2F6lRiSorOthyPJ25vrnn37h39use5iXkuxeFygQGkNEIeCKlsTwvtiuVFW8iOvV4WG7EUp5NloaLljERjNAs1VRB8LsOiKMpVzoXkZStBiEUpZLGVFbVsIZfZhTTq7Ge7iFQSkTI+lI4pKdOiTpQhWSyZpZwXYqEBYyQ5lmWeVM/DUC7IflGQvhKJUQ0j+k/Keova4n3A5ILUFxj8VCwmxXZWFJ7gE6urlpg8bi75KlHuGIX8uNDBQjpRKUvOsgS+TZFPpNBkQSGcMZIpmbIQi5q7aG3kXKhmQjhcmPBzjfcFBpJjOVcWWY6cY9muqxpi6Z6JUFDrgpJHSomu00teuXxP5J/MKCk/l5uOKAlaNeTYEKRCWbm4M+TS9ZOwtWSeZpwzSGXQpnwPn7fZUgrqBuZ5IniBEGUxKnPJSsWUl2WeQunI/nHmdAStWyCQUrGlSgEKgYtHxiEiVUdIhTxcSJvL9yJD8DP1pjgtQhDF/ogofyenBTedqFQh/MWcybIUWqtFvQJw3tG2K54enhinmcenA6fdGS++fE29qambjsYq3PwLo0rImKmzZvNqSzzC6mzFrXvkP/3w3/jbv/r3PP18zeQOJDJfvf2SWZfMnZaKyrQ0q9fkasfT/TXjMDAMjm1fc7l+wc3hntEf6GuDHx1RRTZNQ1v3/PjuifXrnhQPzPPENHqGveHqxRsO/sR8OPL0dM/TeMD9DOt+w6o/R0vD+/cfOY5PqCowxxPTxwaRb+m7lrPNiv7lS9b9hhQzSmXmMHJ9c4s2PUa8IaSRDz//yHGE129+Rbd5S991/OEP3yOlJE2Cw+6Bx9trYoB/+zf/G8fDACTOt2elXNh0jM5zOpx4uP6J4wRWQddV9F1FyBN+nNBHga1qgor8fPOJ81qz7s4I0vDpfsaZA6e4ZrPt+XL9Lb//r/9ItSlnZPaSKhmiHtmcv+KrN19CDLy//ogLLzk+3uJcJCKpNmtUF4j5SKUqvnz9gn+5/x4ZI0ya4RR4//6Bt3/x78l+4Li75e72gfHoOV81uNNczquqYfNiw4frO7ZX5fvQ1BY/DjTGFsR7zDy5E7gJkAQfGYZTyQcpizYKYyVVu+Xm4T2HpzusVpyfXXB58ZKqVnhf+APaKIZTwIURQUaE8v+TMhGTJ4kyiBglsEuBu589GQk54r1fYCqC43EoSHlZBIgQXKn+mQpwxRhDVRXAktaFDCpSQimF955nYqhc6IUxhGVZWzKPpT/LfFbCS7myQetYaoe8RymJMWZRyMrns9YSQvgM3oFnNT3jY1romB7vyrAoF894mP+kE0yUyo9pmjg/P6dtm2XRohe7sSOE+fN/+z/682cyXMHhcECqEkaO2ZFcR4gK5yXOe/p1sWHIrBYc6czVy5fMU8I5T0qBtul5/eZrxmHP8XhAGMuL16/43W//hdN4Q5YzlX3F+59u+Ld/f4a1cDocyUkUG5ZSWKOxdsPV1SU3v1wTncIaOD+rubt7pG9aVFcvkuTM7DRfvfyGtmmpVYPkwLY/Q9AihKPSLUomfv5UtjLbL76gXUEtI+geoy2VEJgg+errV4jcMA0Z00dMnuiNxWhBJjCePrFaJbSxDGPmNE7snyqE0sADk4NuK5D9JSlOiLBs49yRlCquLmrCnPjpDx/JOvO4uybFgETwYtXjRoNWmpgF3tW8evOWOVomNzO5EWMlD8c9wbcc5wzZMbKniTXjbo9TM1kIxslha8PsNadB8suN4fJF5uPDE2O4LwfL5PnVd19yNzwy+YF13bJZ9dRCUwr0Min/zP3NjNEWkVvGY+AxRqSeqFNEBIEfNOb4gM+aPBT7magTT/trzi9eM0fJdJrZHU7c7g68VBPBD9zeX/Pt11/RiZq7D78w7g+8fH3Jz9c/olUg5YhLiaA1XdcxHh3DNKGM4Zs3b5nRnHY7wunEeXPJzYf3uFC2TGOe+e3315wejnz9za9o5Yr93YFu1TKOd2SZwRiEbuiUgtHz8e6a++k9X799ibQaF2E6jQT26EYQaXDRk/JEk4+YTdl8hzkTg+CHDz9RmZqUivKTUuI+akY/nU3PKQAAIABJREFUoXSL1RUuPGFMw93DPfM8IIhs+po5gNYdUPF0HHn11bcQBcfdidPxiK4z3p+xP3iEdDQrQdVawnHAzTMxCxqp+OLVFYengagjXg/8dP8Thz/cYltPbRvq7oyu7pFSUlmNUIHBSd58ecbN4x5BxEhIcyCKzGncobVme3kFqSKLEWsEVaXZnl/w4/t/QpuOtqvo2hatJVW3wc2nUup7dLx92yKlxEfPHIqS1LYb/vDDf2aaTxhT8+KLr/nhp3u6VTksf77eYYwhRodSHqM1m/Nzvv/+PWdnfQkUK83d/S80TcNpvGd/OHF8glcvXzMODxiTqSoBuefy4jX3D3uqJtN0iZ9++oV5FAgl2KzXXF2+4Wzzmv/7H/5PpNpRNxqo2B9HfJq5u0/c3kPfNUQ/EWNAojhbXfJf/vM/stm8xOiK2U18/PAjX339lhgs4unX2Mf/QKKjquTykhSAxPtS5vnh/Y+8++lHbGNBapwrg3/JPoH3Hm0TphJkNIgKQdkCFutYUbae/eveSWJ83i4WJ4B3C/XVbskB5s92kJLLissLKwZffPHLZ3weegpMN5EZ8XHk6WFAqnOELN0nUmqizxirqZqireRkCklruTBbbQlh5OK8RaiJh/sjcPFH8MbyHprnE10FQmhSNDyXzpYl5bMNL9K0hhASPpWh7P9TiAxIpVA24EdByhKh1OdgtVguMUI62rZiniBnhdaWZ9hyTqk833FC1wGplu7GLDFakihDgHy+OIQDp3GEWFH6bRTkuQyPqRD9QpqwWjOMhhRKxqOqGoiF1JpEWGz4T4SUyXFb3i3PjccUmmEMEWMplSB5TcgNgT8OoJ8XuiJjq8g8awSmWHFgKWIuFiKtFN6fiqVQGrRZrHyxADRyTiQEXWPIKRYrVFIIWYZNJUwhDyKIKXJ13mJsIERLymrpqipD4TMpUMhIW0ukNJR6Figq4B/JlTkkdJOo6uJgmI9LKiU8Y6JLsbFAcvViw353YL878vPPR1598X/QtH+guxy5XL9h91Thxz1Pw56YIhfrDbvhyL98/8+ENPP3335JZiCaBqIiO8lxjDj2pBhJypJ0i3SBs/Yt1rxgCo+EeEcbe7IItKuG1MB+2HOaR9runFb1ZLvi6k1Hs4UP764JPtKvLpmedgQxcBweeLy/ZvfpEdrMqv2aVl4gfYtzie+++nve//wPPFwfEMmyrta8emmJWNxB8f1+4FdfdZwOkixnhHZI5YlD5Hg44PyMO0l+8z/9NR8/3vKP/+kfiAnGeAAteXv1DbVWnK9rPv3ieNrfUFlNDIGbT/cc9ydeXJ5TW4PRCZUkPtwWt4N8RddtmB6O7J7uC9VZzexPE133ljcvBde7Hdf3DwynyPlVz+76Ft8c6dqKzfoFUUu0LpCT+Qj9qma12WJ1xk17cp7QsuX1G3h83HF9s+P8oqaqrtg9zIsldMdf/O2XWLPF+8A4j3z5bebm04+oVx0XFxtev+r5w0+/w7sL3FSsbHGCsTmjFpHDzQO7h98j24ZaCmwSGG2RdQ1thfQJWynsxnJ1dcb93UTXN8X1FSPjKbHuX/MXX/+GupKEOHJze2AVuwIoiam4G6LCJZjGmegzm/ULZDUhKFZj50du727AN6w3LXVTE5PGTWMhmfKs4JeycR+mhbhnEEIhxVIYLCUpKEZXFjGflZ5cYDLPVRPPw1PO8vO5oZTAubAcyuUBds4hZAFNAJ9hGaVaQ35+X1gr6ft1UZxiXAaiktMUItNUmr61i9Oj/B3vPU2ni8pIcWxItsvwlAjRM88Tu90jXdehtUIb+/+P4SqnxGn/tBQVZj798sh6veX84hLbrNlNgnff/8jZxYradsRU8fA0cnnZsz+5wu3Pmcf9DmkeyTISkyZIwc9376m7FmVfkZNDy5b/5d/9JZdbTYgOL2ZWq44QZpwbMUaz6noUjr6z2LM1xhpcOvLNl3/BPBd7S9tpctDc3+4wsqJtWrpVx8PjyOHplhhHjBY0FzWrXtGaBpUtddXQ6UtUdoTkSS4yHTKVrvCDw81DsROue1rzNU/3Nwjp6FYahORx90ROAq0brq5ajuOAMEfiCSYyiki069JyHUGJjKosj+PMdtOjVxrhHIfdI72xrPsNVtY8Pe1p1xKZNFkkvDjyu/f/D31zRdYeH4+MIVDpC4QYqSuF1pLBB/ZJESRkBpybWJ+dcYieuraYlaWeDIdTwDSSNGdUlmy+eMnhuCPrmbqu6PpLeLrnkE4opWirmra75NPDjwir6NqeqjvHXX/PPN7y8+lIXVW8fvEFb6++ZRxgOB3IOXLevuA2/UQKAuc9QxxY1RrfzhB3GKl4ffmap7s7oq5RSnN+dk7VVUzB47MqHUFa4OeEDAbTCmIbmYGbwyNicoQoyELy4f6G1aqHWaO9p3UT5JH25QWIGedmtI7IymJDD1oyh8D7Xz4ytefUtWQm4MPID7//A6tuS1SRrASikvgpMfsJIYu0n3cB21qsWRMc7IeRFO+4OGuLnQow1hBoEGqHD57Z1yirmYNiPCX8XNCjvklUtsfPflEtBvY7SXQOLSTrrmP2kqKIR4yuqey2tMNngZEaK6BVlvnkQSSmeWI8OHYHz1fnX1N3DmTEpwnnI4fDiK87utWG7dm3fP/9PxOjo2oyVZUxKjPOE33fk9EcTxFjFdM8I1V5efspc7b6FTnX1K1Fmczx/kAUR5SIzNPI48MjipoYItYK6iYj8sj19UeGeMOcPG5e0fhHZHViP8yM45FxHPn11/+R2T0x+yNuikXBFgNPpwfquqau1+wPgkK7FsXTrg+chh3bC73Y3yB4wcefb1F6xqSa6FuM2jDyiRA6TseEG+759PNHtKqJUXDcBfycqVSmPVfoJJimxH6/Q/WWrt5idEOiZrtSCBE4nO4IPvDm1a9x7h5z/xs4fUOOPdZqqqpcWlMqxbBN2/Ph3Y/M00jTNMzOoXSx+Dk3Ya1CWI1zI30jsLZGqpopRoR6BhtIggtUlaRfWUiG6ZhJUYDwix0jE2PgizczSo+FThcV6LIlLP0ouYSRe3h6KH1hRWXJCMq2cDGNQNZYuyaEZRsqyvfy2aKRU2aePeS+XH6XuSDEuPSTRLQAKavFUreUSVKiQUoJ+r4nRs84zCi1KYWYn0t9M9qCDwdCMKTYIJDwTOlKqWTRVCQlRwhiyQRmkih2m1Kym9A6U1UdwQVClMRUAAfPQ1xK5Xu3XRXbdSlcU89Rgs9dUSkGlIbNZsNxZ/EuI2QoQ3BWRd1JAmtFWQyMBYwhhARRskGCVLQdU0p6g8+U2hENUi6ZiKLvCASrvqFrSvdTimKBfpQy1rIwTkChSMZQQVYF7S9iGXAEJaMhFOv1mv3hUKokhEQpUT73MvUKIlpLwhxIns8uv2KRLMXHKUZy9KXYWWhyloSQlgwWy+85LtZbxXbTMc+B48kT8zM2/pn2WCz3el36iGYvGOcSuv+sPIolk5ECr6+2bFrDu3jPx+tPvLpdsw4X1LdP3G9+RDcTh3lCUHE8BY6+ZjwemWZBrRSbpiUlhxsC2hraVY83iuiXMmwkRmhEY/jhlz9Q1S11LVh1G1TuEFjC4Y7oAm1tiA5WVYvVgiQnVFWw9IgWZEaZNd9895LrhxsEK16dfcdXK82L71bs9zvGU8CNEyZaLmrF1JzzevuSb7/5jhw0u6cjGIsn8fB0zS+fPrBarZEo/BhJvuP87BvG4Y6cHS/PX3D7cWI+eLbrDeuuJ7NlDEe+uDxHSZimGuSRi6sVw35CCsXmbMvUWd79/BFty1ItzQGTEkJWCNkg7YZ6MyKPR2YXqGxk3Vt2hwfeBYXHU1mDNCtUlEgi8+nIdDwhrKAzicb2pEqS7AmZRg67T6Tl4rzdvuDw+InYt8SosVUF9AzDgDKJVkm0qnn7+lsOB0e/kmxFz/19ou5LTUDMgix6fv2rv+Ph9pb1RYdSAp8dps6Exwe0tBjR4MaBbrMiTxMqGFToOdztuewNyWWmSRGpiU7wy9MtSlfUdU/fbYhix6frAxlHFiPzJEhsEUIuVr6ENWu6rqWqbakKUg1VU7KyIQRUDkQ8MUmeTiPxEDju4d/9m78rZE74XCg8T/PSMbVUZfiI9/OSr9IoLYvSJTIylbWTlGXhpk2R8Z/VouDTksssM0EInmdK4edBCb3wDSRmYQ809WIvXwY6EAzDRAyBEP2St3y2QEpEVAhZbOjF8lugdkrK0lW3dBWGFJesVVxyYhpj1sRYuv9SjgQ//atzzZ/FcIUQpADkUnpGKi8I75/RrRqELk3UyqCExs2ScXCFDmdL+e44Oh6fbpFakxEkH1Fqpq8vaep2aYWPrDpDW0nGKaHkSNNogi8HvVYSLQR+HBGhYGJrpVDC0qmKutIL4UehVQNtKt1TeWZl1zxFgRQeqYucPxyOrNqavjaoVD5TazXTWOhhOXpyKOWObhqY50Jb0afMulujK1k2eraBrBFLw721NU3XEUTCx5noAz4ElMiIHBYFKhFCplttGceJcRjRUmOMol8ZalVjTYWRGmlLBiOmgE+ekB2ncaCpNgjlgZIhi+FI3yaUNUhZIU1FlhVBy1LKqSS5Frggy8sB6M9aHh/2KOFBBBASbTTD0VHXGmkrhFJ4I7CiIhNKoWySeEoR3Rg9/nQiTkdkEIhkUaKlMitiDqxWPTlF5smTvKBre9xxwHkHIlLXPedNR1hk7UoqJj8To6IWCowmColqDG6MJS8gJVokyAmhK6TI6OAJcaZabEkuecKQ2GwvQQayTEgj2dTnOLtlmE/MOdCsDGOa0cJCzDhXtiGDGfFCkXLEKMU0zPh2JAQFwlBh8H4kzBFkQOhSJxlOnqaLuOiY4w6VPSGWThkEuORwruSyoi/5BpUzybUISnu80RqpMqdhRC3ltkZGhI9E50pQTipyNmhpGfyRkGO5uLhYLGBSIEUiJ4fIeiHHZdIznajvCMkVoItUHMOEL4wmXDjiTp7TMKCriFQWvRRITrMrgfQlF6kFGFssfi5MeO/oVq85HUsOI4tEVVW4qbS4g0IoyfF0KqWHKZYy7hwZRgcUylH0gRwDbaPY7XzJbeiWeZ4JecQ5h3eRiR3zHLDCknODFJYQPYe9p+/W5dxQE84FxiHgfIJoWHXn+P2EMg4hLNEL5smhVAEsTFNkih7nd1xdfck0gptKB0fIESXbZage8Mnz+OjI65r1akXfrRnGCWEjwziV2ommYbz/ijS8QYQLtG2paoOxluWWWRSp4Hl6fCTFVJTZeUKIki/zIRBiRMVIzp6q0tSVYp7MogIU9aZk5iKVMWhZguw+xAVCsPQJZUAkrs7XHI5LSFkIsizDwiL2oBVY0xSqW5afPyeCRQkrg0eJKi8FtOWVSiaW7I1IlD46yMvg9JyVCiEhRFhQ23Kxii30qOXSnFLGGKhqxTRFYsgoueDOl+Gq/AiLBS+H0g/zR6lmUZ1yJsUJckRQlKTnfFkWy6cWgiSeZZ6iKOZFWWGBJpR/L3leNxfVqoTCyw1ESkH05XeQiTwH2cvHKapKXqybEEsJ8fJsZMTye1rskFkt2QxRgutBAKZkm551PbHYJwVUjSnv7VQ6bj6DLD5jJDNCRqxRuPz8PApIgizyYvuElOaliuW5RFmQWNQ9iqVRCOibarEgRWIqP0uRxUKcLJY+72dClMhsiGHJeMi8qGn58yWtsoXcGEIqmTpR6JXL14AsMiknrJXk5PEBYlBQXpF//H2LEtxvWo3RDftjxx9+uuX65oDWNUpscfPE2XeCVskCnkmZYS60yNoWJW8MEWsNITuMrFC6WLLSdKRUXfjS6zdbXBoQPiNVVWIMOSKUxFRrOqNIHGFR+0bvmL1jOgRWZx1G14DH+xPKbmnshos1bJo1tWrYvLjAyk+M5kjoIlU+wzQJ8fIVylrWqzNyjLhBEjVIEVnVZ8TssFLh5oQ/Razs6TrNMAXc5EDVHOeJVdVx1vc0TcXj3lGbruR/rMHYntZ6snd4P5RuqqAwVUSKQIqlbD146DdbgnfEnDl5SNIWN5MshEZly3fF58jkAiEL2k1NcieUqSFKgs+QSq7JO0+SAmEj82nCzwNIizY1Td0wTidSbhBKUFUF129UA3kk+hnvIsfTRMyy9KJKQdu1+GFAopesv8BWFduzc0xVzsR50sSYqRqFlgprDZ2uWa1qUq2XvM+MDI7TXlKuLoqYJ5SuQJ3w/kiaD+TTE14FpAHvBqbTDiksdS5AMWUsbVODjsWOlwsIbhpPqMownpaCZaVYtSsOvoY4IoWg6zQIs5wZEKUsiyst0SRCTLjgMTpjbKGlilzcCD5GSKIgzrVGy4SLnhDL2a2EBqNJYSznkJQlSRoTcrFHF2cASJUJvrgdBAKly5Ij+khOFMtxyEseP1HqVhLGsqjbaiHAZqQq7oLiXEiLbTqT4mKjJhGjK2CbZZElpV6Iic/nr/pXx5o/i+FKCoWgRklBbSv6vnRhTKNH6tI/cXZ5TmMrGqtQUiBTDUGxXfVoZQlBMI+OedqRF6KSIKIbizU11tQLj3/AjQMahRYCo8BqaGxHrVeIDPO8x40TYVYEHKhchiMfqFVVyGSnjDSKjep4mHakMKO356gkaRuDMQ0kxe7uiU0TWVUW4SL7/Yg1J44nT8ozOTtyzFSVKi+E6InBcX9/Q9tAu9YIUaN0Q4qCplsBEqVLhmS17TkeFHOciEKQhCK7SNfXRJkZQqJuzmn8DfvdgZxgu+55cdHjhrI9iNlTr4rn38UBHwORjNWSEAekKVaI5MHFO9bbLUlKYlBIWWN6S84JJSXGNniRyAL2xwOSyOXVhjwkfBpwyRPRTG6PANqmJSvBHEZ8bVlXK+b5QPKeaZjQtibVhmE+EscdMkw0dk1bWeqqp+Kc3X7Hm9c9dm6YJ8luv+fiVc+n+9IBZJsGZRq2zQWjFoToSGHPZDQqKQIwkVBJYWrDOCbCnBE+ouoMEmK2iCQwPi1lnSv8OJCmGaE0ItWk/ETAkY1kffGaNEumcSQJSbvuONyeio/fe9w801oL0jP5gJKapmogQJAjLmpkktRCEsMEURNTIU5iLO54IsojLp5I6pFW1MTgsV1PlnDcP5HDyDw5ctIIEiHtiPOaptPUdUdV1SA99/c3GKWptKGvaxoliRlmNzMhaeoVQjTM4YkQHdM0IaKiqQpyOxFxIVHbjjweEVLTNA1WGKpWsb8/EVLG1hum4BC6lF2P/omH+ztC1CgS5AZyh6occXIc3YiQCW0cXlZ0zQbvPc6dSNmzrgTz00T2iSoW29zD45EpRDKa1Xq9XMxhnj2n04Qg4cOIVQ0BjwtAEPSrjvEwkI3B6DN2u3vQD7gZgi8b+3EUNPUXGNUXylI8cTpFmlqV4kbVErxmvz/h5nKJe/16RRA7shxRolyGT6cjm7OKFEsWJ0dBli1VVePdhJbQtpLjnCB3kB1SjDR1xd2nA0YF+raUhlqR0VYz2jLYyqyoD/+G0dcY3dK2LaYqZdM55cWKJfnpxxuOxxNN09A2LW07MoxzUXhigfbEEBAi0bSWqtKcdhKSJMlEJhFjJqbIqtHlezsnfMgooxBpKcpFILVg3V5w3D0UPLssKGoRl+EmF9x8TnbJaS36yCInxRjQJmNtGUhCWHpchCIhIRdbeKliyMS0LNdyMfVlBCEWG55SJbid4nKRzrIAGCiX8coktBafX5w5R8RCwkMIRC6ZrBgCBAGp2P2e6YbPw00MAxmJEBbx2SqTiJ+tgwqRIMWZlCo+oyKygM9WxYSQZYPqXUNehpi8dFwpCi0veE9gZhwtMRVLZVG6NOWSECA7rBXFtpOLTVAs04R4HvAWU6LSFiksmYXWKtJivYnP4yO2UiUrEcuw99kh85yRyJksA01dc5IGkiwgi6xIIsAS189pYDiOTK7kS5/VVY0iibT8DGDb1ggryJRL9TMmIxJRy7DtXRmuchCEaD7n8lIqqlzKmZgyxgiG05FhVrhYIWVZbj0PrWXyThhbuv+CK9brbOAZxVwmYIkkI1SkajXb7Zr8wzXX1w9cXrxkm9eEk8GqPU17JAbHPCd8lkseSzB5x9M4o0xP1A6fFNFrglT4aSCnxDgP3O7vqGKibSTZT0xjxvkWIw5gMqvNGa1dcTz+gjGK24c92UVEyhxuHdJKmrrBhMzonhjnS843L+griwsnkrT4vKZvJW19QErHWf2Wx9M1XS+ZQuTxcKCvBHVjGOOISJl1+wVNMxPCRDydwAnWmzWNHdEiMsyJ2U1kW/Nic8V21eBy4GG/5/L8nMfHJ9q+p+9aTJaMD4/4OODnAupYXVRs130BG4VMEIb24gXz4ydidOyHEzFNaBLaBtAelyxN04EaGPeeac5strCPJ6q6ResKIxNx3iNizeBmokooJQix5EQ1qlhujUFZhdCgZaZKIFSgb86J3jFFx+BmPn66ZntxRZgEUmSquuX0eEffrIp65WeGOXLx5RVRjKUwfZakDP1mvagxDev+AmUzWWvmeeS0f8SKkf1O0PWautYokaCWyF5yejoy3F1z3M3ozQWXb1ZkP+KedtRVOQ+8tNSrFWeXPac8ElxVIi3jgB8Sorpg9zgghWS9bdiut/ihR4qBqol0245h9AUIJCVZJuY4sNIWTSB4x+w8VS+oaovwEENm9gXOZYShqnqUKcphihOTD0gMnbXkShbomCh3LdAoKVFRYK3GVgapFCmPBJcILhO8QBmYYxngYoAkBClKhIgoVcQNnKCpzVJS/CeAjMUvXAjFCzl0AXQViJAihPQnPYrpM0UQlnqIyvyrc82fzXAlU1FQmrrGDQ+8fPUGF/aQInXbsjFXWKtoaoXREj8ZfvPX/4EUPfvdgfvbJ+YqYpsto09oW3FxvmUeD+weT6g606462tWWuykQpojJmrN6RZ5m6t4i04BA01UtIpWui65X1K1gDp4vX7/k5tOI8w5t42JXELRbS8yR3XDi/MwwugrvSqHlqq9wU0RXmsoo2h6OwwMuZJrKYHVDCgJtM6v+gmEMxMOBqJ64P9zTra/wQXDa7+iaFmFqUlJMAeZpZHuhMH1F3XUYYdG65frmA4cxonSm2Wr2B8n+FEBbRNI87CW6kxzHE1q1WFOXhyefkFpha1WC1VqW/rAIPkFUgdW2JyA5Hm4YTkeUzvT2jOmg8E4UXK2FaXLLBiNy//AjprVYXbbMMc8c3Q9crL7CO8E0DBynPXVTyoLdIHBTIKSRly9/xc3unugHKhPZri6JU83Vy0tUlhzvTlxcrhh3mTieID5yOj1x0byl6xQhGXKuOR4PbGyPjAIdDCC4aKCOhmgsoarJTc/h+rfF7pEj85x5cXWOoOdpfyL6E5U6Me0F8qwjC43RBRxC9EzTxBwDSMndzcgcPFWYscZSjz16PjG6PVVVse5XXF2t2O9GRDZIJZEatFVMaSbPJ5ABpSrGGZClyV3pDCGjTaFiWdNydbFCeo+Pjqf9NT6WDe3bF5sSTA+CeU48PgaQI6ZaYZqIqgaa6gXfrr7i7vqO6Gf6yzVawTwc8AJsXXHxxRX3T4+s1+ekEAh+BpE4f9nx9AjzIGnsK0IMGJOW/IZF2DUiHzAW/Ow4jXsq85L9/DtCaJFZEqdIZWE+GNIYmesjpjlhdM9hviVGsHXLqvXEPLA77MlJ8/r1d7z78QPT7FmtViBqPnz4ie3FJT/++HtiTrx48Zrt5hVSSqZx4ng4YIzgeJh5/fZbajuwV498ujnRdl8RkmP2M+PsyfLIcAhUtmK7ecGbl7/hn/75v1DpC7Qw5JgQYuLqasvh9AEhEqvuAqvOqNszYvQ457m++QGf7rg6+w4pYUgHVv0Z2+2MkjXeKcYhs+7+io/vf0LKI30rOD9r8XeecT+QskNpwTdf/iXn9YkvvniBkHB/+xN/9evf8Pt37zlrrzDNGnH3FSOWyrTYqqVpLMqU0kXB0nkkBb/97W8x2iAQzG5GikJwKkCHyOQGcsz0XUaptFywFYkywEkhyETcPGLOAt6D9xZyBVEti6Nl64fi08NHDgOE2Jci3lCUlhQDBavg2e2v8fOKQpwTC8Z32SbqiJIZJZsSepZFUcl4SBopIinNBTGfujJIyRootnPIhDgzjh6lKqBe3j75j4rMkkVzXi+Y9j8W3j6j0VNK1I0EZQnBkrP+TL6TUpEXoqC1muTLYF5skuWZFLIQU1OKoDwpg5srSAqjNWIBUKQSW8NWZWoxao2LhpRLl1/wCZ0pFrZQiKbebYDn7haB9zPammIJCoFh3KMPvpTh5uehySJEJIZIDDMxlcUf4QJSLp9nGdakUOTsCWlGK8vTbkDEGiHtHxWrRXQkJ0R2hayZzGdFTwiBkuXiUgaY0iUmqZDCgCqkNCEkOZTts5QJoX3JH6UlE7Fss4vlqQyb1mS22zWHQ8KH8jn8QogsKqUsfUDL9ycliQ9FuZMsBLOl6FQQyUmiTIsgczpNdFtTfodKIYTEB4+xmRjGct6pilXfs9/t+PBOItIZb1+f8eH/yqy/+0S19pja0jQtF5ue6ehpQqRdJ9zpCcuKT5/eMwTP5sVLsi69Vbv9iQ8f7zk7P6N/a3BhwPtSdL15EXncvSPrl9R+iz9lCl5/RhpBXbVsN19gDbS1AQLOT1TWlg4ylziME4fhyPnLFU2bGAfHYdzjqx+RrWZ/N3A8jbiYuTz/NdsrOOw9h/0JNz9y9cIi3AXrqmJaH3l5+TXH4YY2lSFye3nBTGAloI7lvP/mVQs8Mgwa6xt6GraXK3bXN9RCUdUrZFNhqzPOzh2/XN/z9HAgWUGYM4ovkHom5P/Of/3dO/7Xr/6GyY8cTyN+9nzx6hX//f01h8MeQcLea3YPI0P/gNaRnE8451Hmf6ZuZiSO4wku1mdoJZDZolSF7S27+w+I4YSoDLndsNtnfnl8R1dl+rrnxdu3zDOsGst8cgQXqbvFIXLUAAAgAElEQVSO3kqm45EhK6SWdKvM8eYakRtKJr/n/K3EXzflWdSBYX/DcZphE0k+4B5mxvs9X//t31GZCrIkJgPCkqSnW1kOQfJ+94lXK4s9jsgQuFyvkLJmPv3MKCMzK6q7gFEZMY1Mu5nTmFmdtejZ0aHo1udcvt3y6fpf6M++wNYNUlVEpwizJ2MRouDgDZb9fsDqBiFqyDO/PIzU7USeJSpXtN0VtjLEMEOawEWk7RChwfkJKT0mTgw7RWssOXriFAmTRtqIsJkgDV5UGF0x7GSJANiEMAFJR2Kg35bhzPkTIJnnYn0nRbrWQJa42VGIhgWAkUNaloh/7OkqsI7yx48eUFRV9Tl79fl9sQxlIbh/da75sxiutFZ889W3pOTQGv73//gV1592VNUldWMxleHnn39h266pTMIaSd1uebze8+LlivWqxjkDsiPkHS83l2jVMA2RF6+uWDd5KR47IFKHVZq6LYhTNwWU7KlNS12tyWSG+ZFmVbPZnJFzZvae4CNP+6UM2IKykssXr3m4/Qi6JwXYT2By4P5pj9KGqrJYK1F2xW5w2EayfdHzw7t7lNLs9zukgFW3Jk4jd/sTwxBxPtGvJNELHu9uCMmRo+foLU3XkrPBh8gwHrn9+MD5xTkyVoRZE3zJ27S9wVYSqTLv332kaiUpF7VMqsRPD9f444pVl1jXkdyOqNRx2O+RTtCtGnanE+gn/CzIUVPVhrqueHoYUcazPpNMY2K/G9DUGGORSoJN5OwWnKikaXqkWWO0Z+JIJnJ2dcnHH5+42G6pK0VwgThOPJxOiKywpmF90eHCnovVltlZYhh5efUKkxomt2eY9oxi5PHjD/RNT5IdqIrzizP26R7bXyGcwx33VEoStUQEhUIi2i3bt5fc//a3YB05OXZP17y++BWzOzCKATc5fHJkf0CphDGS1mgGr5nijK0tVVchZjiN98RYMgdCwjSOmLpiFJqoBDLNPJweeHl+QaU1WmZkUGyrDYfDA3H2oBTGXBGEZ9MXfPjheM/5RYcbLc7tCe4IRnF1+YLbxwODG9Fa0GjBenPBYZgR3qONwSrLHD1d27BdtTR6w8QnpKqROhGYef/LL7x5tWGz7jgdPZ+uP/D27WtiMjT1GevVmugSIo3EBLNLjKPjcLom6VekZMlG4tUT5+dbcuhLekNkRBqYnafrtgQeeby7ZtKO1arn/uaGeQpsL17z+uUbutqSfMZPiRQL4eiUR4yJrNuWYRLFUjdIxvHE0+M/UbeKut4yniaeHp6Y4yd27+7oN5qUBQ9PH6hMgc9Udk3XXCDUiaeHW65/vkPbjDRQt4L9YcbWNVJnpmnEe8uvvvk1w+kJNzmub9/x9uuOnCemcc/948DhuOcvv/sNsvfE7BZVQBFDoq5bjAkMw4kvXn7Hum+4v33i/tMTjTWcb17w/1L3ZkuSZel13renM/oYETnW1NXdaEAkBRpIgpKeQO+sNxDNRJrBBKDR7BqzcojJw4cz7lEX+0QWYSbhGoqyvKq0dPfjJ87e/15rfWseEyI4ZGNJPPL65QatGqTw2NGBcrx4VUOqmIeZ0yEu9rINEk1IIx8O32H9hB6+IYxfMI+S7qRpGvOZ1FRVClJJ0+RDhj/+4x+Zpolim0tR3eTphwFYaHg+IWaBUB69BTfnjN40BaRaQAKLJpSipSgiUrUk9FLAKhdLWshZ1rnjcLY4t1nsov7zWhWjw+hAVWarb0rP1L2sOhACKQXKQlJXhkvnEWl5bZE330JoXJhplUIrw+QiUOXfR7X0DqRAXWtMofL9ZcWCuXaoBQDnnc+5PPJgFaNCityaopE5RzdbNjtFtbrh9KgZbO5xiQm0UPgQSSHy8tWKsk4MfbY1mkIRFvR7Wpwn7aoA4RHCIGS2v7jocoFtyLUkUvslv7cMqiJ3TEkpM0kvBUwhuN6/JNgqK2fkQUbqpcSZnCWrG42U+Rr/sx8Zs11GRBCO/fYl/dOabMbMQ3AuEV2Q7soTosCoFSHlg6o85OTagxRiLjav8r0jhcpqJYvBUECKAaMl++2O1cozTBkzn2K+/iHmcug8QAl0CXcPHbMzS0wgb6CkyHh/SWK3rvFuZOgFk825P63VQhrL6pZ3HmTCBYnzguAF2qR/djkyGnqm0Ao7eYYhX8TgJzSGpLJaN08z337xghdXkfPFMk4z1y9f8unDLxwOA3XV8PXXJaa6UBYlKVmGeaASDWFruD+9gxi4Wm3RSE6ne8p2nXuOpiOPzuJTYr/f8he/+4o3L254/+F7dCVp25JSwfneIfUbrF8ze8/T0ye+ev0V1+aa2Y9ZiakT3TDgEVhnuZx62mIghJ9z35w06FIz90+UrqCMDRHJ6eFEIT3BT4g0E6aJ//Zf/wvb+gXVrkWUaywaJzZs1q8oq5mpe+T+7h1G1ry5+iLbdPE06g0qDXgqlDC8fL3i8Djz1W+uGIeBjw8/U1eeaYZNu6csCpCeMy6j9ivFdq9ZFwWXxw8U9Z5K1TT6Ff/52xWyfUM5Xoj2iLdHbp8+EceJq3ZDEokfvvsepOJ8ubBfb9hvtgzdJ7ryB473kdkLrKx4+W823N69z0orin4e2KqC7nKiXa9p1yUfH95TNzUaxdA77u9+4ttv/x3vP31HbdZsV1c0OlGLhn58wgaLqRVKvkCJikvfE9PAalVy+4ujLQXjpWfsR7SuCE2idiVxMtggufrLP3D16ktimBn6I5fjJ1qzo65LhI74OvH6dUtT9iACk+3phoH1q7/gzZf/C1U5Y6cn7n75nmb9FVUR0Qxo36GD4fThnsZcoYrA/DizZcft7QWzTzS7CrUeGJPkTz98QhcV6806g13UJufC5YSRZ5q048u3X/P+3SeOxwvRSN5eVwRTE5zATYnHfmK/2WJY51yUm4lxQoSIMQZZGdgk0rzGSc849ZyPDxTasNls6YYO5z0QIRyQsWYuErpIufYhZXul1iL3LqaCcZgxtUHITHGNKeJd7lxMS3whpbRQDLOCH4Kjqio+l50DQmTS4a8I9v8fAC0EGW+sjaZpDddXNefzI3VVUFcGU2hev2yZLiPSaIpSo2Sg744cjwMuWKwbKCrFpt5T1Q3eR3w48/Mv97Aw8IWQtE0ONSd0LoUtJM7PPJ4mNruEMSU2arrhiCxW+Jgxlu265TJcMlbYKJRRvP9wojAKoXPr+eguzNEzxQ48WGloixY7PWJKk087LYgVdMcHpnHMGEk14oKlqmtEmW+Mzl5QrHGix4YB70aK0NLsm5wTUYlm0/B0OXF46lByQiTBOM7Qa5pVRI2SFDVztGzX1zw8PtF1J3SVQFhMoxjjhen8QN0oTGpJOpJkwIVIPxyIcUZiEAS8nWl0ixIBERVhlmglOR3PGJFhBHVToKRjGI5UZk1dramrDZ8eH1m1RV4sJaRUsdo3+DRjEuxWV0QZOc8HZKjQssSLSNNK5stEUZSU65Y5WMbpjFKaoikQtcRdeuq2pfeZGFWohuP5gig9hIhC0FaaUEtsnJDCUNYrfAJTF9kD7EeqMvJ0+cA4zMgkMEZxPM2smoJu6oneEcqCulYMfsJLj046l8lGg9YV1lvmsSdE2G1eESaBDzMhDrzcXpGmEQqF1AUh5u+6qjQ+ZkunLARxAOk8IgV00JRmg4gRY1Y4D8fuzOFhzGHWUiOlRWmFdRaBR0lHcA7ntsyzyORMMbDb7qjKFV0XCUGjZIHSnt4+ZriKcshC01sLWmDjxOGcP48uBM4vWHLn2G6vmeeZEGaUMlSV4Nj9QiG32ZucIv2lQwuDLhNKtuw2b3HOQqjYbb7Clp5xitR1Q6UbJjvg5g43TaRYUZs1SU7Z9hQa6nKNIqGEpKo1V69KPt0+4Z0mJYV3mqJoqcoqI8f9gFATkDBKY1SJ94m3r/+SoZvye5cNb178HlTi8ek9Qihe3XzDu58/8ur6S8Z2w/l8YBwHmqZmHAdMEdlua25ubpjdsORdIs5feOo/8OrFb/BBkVKgKlrsEHhyT4SY2G2vCXQEm5jGAecsQkZG/wv79dusJiVJkIF9tcdOIdsCpUWJESMl1g6EmJjCkf5pRDz+HnxJCDN2NkhVIZRiwaoipcZog/eOw+GRH374gaIsM0FtsfalBFrpJVPkiXHGxTy4eJ8W9UOTCAgZCTF3T+WMU8TOATvnTFBMFmK2d0kJZS2p6i1Tb4hhgRAslq/nvhMpQESd8zs69z2R3RpIslWvMNlkn2JaSIAs+HBPWRjqKqO++y4h1K+5JZHAO8t2X5Oiy3mLZfjLJdzZEigF1LXGzhbvSqQoETLbE/9HCp5zE2JU+FAt1rmUB4iYcmaMQHwu2KUAIRf8fR4KQ/TEYHMJ6mwIMSKXMt/8wbN6mHAI4XE25BwSkAfa5f2I3G+VWIYIW5CWgEJKcfn72f5itORqt6Xvxa/qzGdSV8pAiBiW10vkQuOUv6eU6ZBhGXSlCDgXSalaBrUcRH/GKz+nEupKM035VFgIiZAiQ5bk0r1FtpI7GxfrskQuroHEgmVOYiEDOqxNi0r1fCXir/ZPkahKvVhETS5lVXxWCRMZ/IEI1LXJZcZCL3ahhVwmJTHk69s0JXVtGCfHOOVOsxhi5gWFfF0AUpyZp3wgCjLbrasCPwdOp4Hb2wu7G8P5/Za0ssj1kegemVctq2ZHDJbRzripw0WPSjlLR5lt6bmCoKZsVth0IWAp5AqBwYXA+fRItb9mGPJ7K5uKkCx2npntjA2eYbrlZrfL8bmFNu2DRypNUTUobQg+29TquiJGUKpEp5Lh8o451pSrL3n9xdc0q5Lz/S3H8x1zP1AVLWN/Io4RJfL9raqa9eZFpja6ju544f72PX/1h98RRWKOueJmnka0qNluaqpC4eYTcT2hSoMuC4xRHE53uGFLpfe0u2u2RiPigaKpSUnje8WXr7/l3e1PjH3Oce6utvRR88UrydDPXIaZzXrNer/BjpHdasOLqw2VHAnTwHr7mm1RM+CRFFztrhn7kWl0GLXCuY6i0ITouFyeaCuYxiOl2uYskZLM9pTpdcrRTWeeLhdEFMiokSGSZoGmYBo8TbXCBcen+/c0SPZvbpBVhfAJFwzWjjRK0xY162tDl068v7UIlQjR4sLEcLpntV8jlUeVgt2uZZpmnO9JMrLar5j6J4oXb6irFU1VUJsShMbLinLbsneWNI+si5fo5EFY7PgEVGjVcf/pjg/vAqB4OnTUuxUiNPRuZtUqPn18YP9ig1EBP11wfuDeBDRwvW8oqkj/8IgtS0RShDlxOg/4NjIFh3cW6QJlUVHclEDIBzihYBICEQpiHPFx5vJ0y/vbP9GPUFZbXr18y36riVPEFCVFYVBaYt2Mc4LZBhITygyEWJJcQIoEcSk4Ji3WQPG5JD0XFOdnptEFMSwdXCksZcI+P3+fF6ZfXYb/rz//KoYrYGmPlgu3H8oqUJglzOg9bQNuiITgmOfslffBMtliIXpkP+Sq3WBDzCAD5RnnM4py2WAUuaht7HEx5KZ4k4k/sxsxfsSJxOQjg/OU80QiW1N0obiczxhRZoCG0JwvA5tNzDe9DHgxM9kzXuZBUVUCVdUM3ZmII0qNFAJLZHQdc8wlsEN0QEmp4dnb2U0nlIiYOhDSRD9dCER8mvFJEMj2ipgi3dkhVcxN8s4jMKTZk6aEnSRFUeYS1ZhwPuCnOcudWuB8HiykrUFH0PmkafYxZxhEiVZFXvRCwFn32b8a8+9dtniIgDFp2eQPCKlRqkKpBjCEOBODRimD1IJpCpiqYB4O2X7Z7JmCpxQTKhakKLmMJ9bbDWrKi3aUktN4RsaeRu0RosCnBMYQVO4EESF/Jhkyll1jqEybW9NTYE4WSURFy/H+liI5kmQpl1U8PR2ZRseqbmnamn4YSTIXS1vnEF5SXikEeumIcZAcbvBUdZlPR30uwPRL6P6ZrVYXFcN4AZlAKuYpIhqyzWQBbXgZGPyIwkFMBC+pQwAl0GohPqWJEAUpOKTMZZfOJ8bpjDE5H+RtYNQW6wTj4Ihxpm4rNusGmAheEKMiMdEPZyCrFknAME4gIj5YfHBM40idGkLKpz1SgBYS58a8aUyReTJM85FSS4qiBiE4nSd2mxJvc69FXW4hdbg55pxRFOA9p8OBSYx4a/HWomWNDwqjW1ySTJNldo7dqoI0IfGZRKnVgmxVVGWFNjdIVeBtzuNUpeZy7nNPXVFQFS2XSVHVFdE84oNAm5Km2hDlhEgGJQxNdcWq6SCmnCM0imGYiSFnNwWWIBNX+y2Pj4dFaYnE5ICBaRoAjQCcFWjhmaYBJWvqqmUOE8Sl5DQFok8ome1PxigUijlaal0w2YRWhqLImdKyXOPsiE8OUzqGs0Vf2qx+iEQIBq2fN4+S5zSwVor7+0c+ffrE6XSiaVefyxQzECFvLgXPw0YgJEFR1gv1TxBi3sxLIiLmYUKKvMhYm7NQwAIOysoLZKx7oTOBL8acz4kp54liytYnrRXJZ0XgeShIpDxgkXuMhMiZr/TsVFsGEe89dakpCoOQkuAtWj1PRLnx0AdPWZqlAF7me0/G5aCH7A4UUNcFk3WEIBaYA//DIvq8qfYL8KLg87sVz0CKiCBTPZ2F58GChZCXCMToickhlcd7TS5tzmh1FlUpq1MeU+Tievn5dZ7fSbZNIhN6eXbF+Ayz+BW4EJdgt1JgtF7Up193BYmYD4F9PiAoK423OXP3TJPI/2Zc8ospB+sJObeWFjWOZ/CHzJTHlGlcKUp+hWcsoIsFpKGkpCwV1s0knluK85eRlqFOSklVSKRMOd8Vf/13njc6uQQU6tIgRSJFlRXSlLKSSFhKmiOSQGmKfP+ieIaVPINVSDlyVTZ6KRCPy/qmCGG532X+k9+fY54t1kqQGexSVzUX29P1E3d3Z/Y3O/pDAVNFmSrql56xX9ZIUYERjDIwpYjyM1omiqqkKqsM25CaKAz9PGT7OCUxGlyYmeJAI1c5CyLV0rc248OcC1F9BGWxbiJ5h7WWED2qXGEMCK2IItMlUwKXhqzsBoEwJbqo0aam3X7Jmy//mtVaURlNiB2yy6Tly3QkaU1VVpR1waq4xpSGJPJQKrWiqiOyyCAdFzLoKqVcv1CVFdVmxeN9R1U3SK0RhcpDVq/BJqQ2CAzWC6TWKKkYesvUe4bdHiMCnZtJQlKZEn+Z2a1L5nkEMVPXBfvtHl87Sq1IybJq15znJ6RUFFWVlZt5RCDRssh1IUlCmtFl8XkoRVqc7fG2xhQ1hSmYphM+JkY/MwXL6XThm5cv0XrMQJSYsDPIpPDzzBwsMYIpE8l6pNAURU0YHcolqEAZTWEKnk4fuL9oqnVLUSjQGicsh+FEWUgaI2mbiilE5iBIWqFNTV03WN8hxoqyMLTba57OD5w7QVGXNE1D9JpU18gwEeyMnWekLkkp4OaOvhtwo+LY9az3GhkTth/pvcCOkjDJ7OQdHePlibMONG2LKRXJOp4+3KFvXmWS5GA5Hy5I6TPExGSatcFiDFgbFmCFZoqWUpYUpkCKBj9PnIczLgqMkAhTI01CC4nR5fL8i7+WAS8qeyIgUkkMPuc3RYbI6OeeRX49FJIyWwRjeBZj+OenasuzSyBzvvb5Ofv/8fOvY7gSiaLKy1QMARccVZ2N55MdmKeJuk0024bTqefY96w3Fik8ymzRKFKSWDuhxY7705HRWdqm4ubVG6bBUxUbVqsdq/WKy+WHPNgIEFojTJG900LirKObLqiiwMaZotCYosDFSBQeGyTCgyw8upR0Y4csIMoIynHsn2jrPev9js1mjzFbhrHgNDyy0pHdpuThp4nocqmkKBKxsDT6JcoUjFNPN54YpwHEwMvdDQGNtRGpLMN4gFjhPczdhPcD4yCJZCKZMYbtpiYmwThO9GOPFCVPT2eMbtiuDZfhljDXuOQwRUFVlQydw6xslk69ZHaC3eZFJhoJjRAKieD41KN0mU+GCUzTSNWWyGiomoqm2fB4e2S7eYORDSlIur6jWWtEFNTlCl0q7u4v6Moy+zO6bNG1obudoG7zRtKOPBwOvHy9pdmsGAbL4XhgnI+8vKqwQjGPieN5QBnHPHQ0qzVGSZ76A9dlQR9HqqJmbbaM4wNP84hNFhU943jPzx/e8/pqxW57hSkbzvPMMEl8CKA1ZbOmEoEoZ4raEGOg7y9UTrHa7nHRYt1ICPDw2HH1okBJCTL3KNze34LKJChlGvrhjFCKpAw2Cs7TBcqUMwlSkrSgt5Yne0CJPLzOoyc2gtqs0UmRoqJur1jvWn5+/0eEiGzWWy7nwHQ5cn29oSwakq3o5AmtGlRRkZzj/v6Jsv6SFLIVbHYTLn1CylcYnZUWO4+4edlE4NDSsduWTL2lrNY01Y4ULcfDR5qNoVAlwcPTfYcPiYt4YrV2lFXFbEGXBX3/REwRoxUyrhimX7i7vSWFwJdvXvLnv/8jIibatub66oY3X/yWu8cLPlVYK+kmzTjdcr19nYtXfcDHkcNBQGgpypb1esN2+4q72wv395/QheT6esc//sN3+MWm11Q7zoee49OBqpSs2zWmqBmmJ1RR0NYvSAmsdXzz9Qs+ffqewEhMjkTidPS8fvWKLhw4n+9omxVNU+D8lOlHQvL65Tfc3Xak0KCU5OHxxJdvr3P2RgAqoVSJ0S2rpmEUI5fzhasXO2LKhEqjDdPoYLTsqj2rVUFRBP773S9sm4bJWqSxvLhumLoKFwUBg5IrJA1SJcRzSasQWXGIiR9//IGffvrxs0UqpLxh9iHTHWJcNs5JELxEykzhFIC3YcnUeeKSoyKKBcubSGRqXAwgVPaoZ9JSRGsglHhvl+FcLZSzrOJICUVl6J8EmcDGsqjFZRjIQ1n0Ztk4AzGT7YRQBO/QbaaopgQuBNSiZmSU77KxlgDFr9Y8mRbce7YOCulp2wprHaCXQcHn/7ecyic8dS04nch9S0Ll4RO/WNjyIFNXJaenjJKXUi1zSn69vCmPNK2i7/MSnWELMS/cAMmjdS707S4Ro+TnfiVY1Bzn0SoXp2olCT6jw5+HTikl85T7XYQIDEOHsw2f4RVL95MQkhAmyjKx261yTmpRNiS/KmFpycBpIzDGMJyznVAseOUYPTIWGfYRLCLlYT+TG1kOK/JnFUJSlJrttubxyebvXcQlU+VBgo+RpjDsNhVVZTOqfWmvTsu/kVW8nENrao1QEr8MRFLkTJ/UiZjkAnRJaBXxHoKPBL9Qx5ar9llR0xFrZ3Jlm86Hiy4idVg23FnhrWrFPHis05+H8dV6zThNzNPMp9sT3/x2j4+edGrQqeLqr0ZOp5Hx4tht1nzxxQt8GDiNF8J0ogwlpXnJpi5RMg/E1oJEU5YNghrnYAye2IApEvtViRKS8/mMFAJlBMnmnNl2s+X+9iOBnhATIta82n+LNMNSGj9DmFAuMcUJ5wzO1ui2YrfZo3TLerum3io+Pt6xWhn2L9a0a8Vm+5bjn3uaeke9rmlbQ2kqHp5+ou8HsJGmueJv/nLHh/sD524mBMlmtaFozkzzI6ZYU1WGeba07RVJkYFZqqRdf0k6nuimns76DIKqEtYquvOZ0/nIMM/8h3/3DX3/I+e+I/aBw8MdWrwhakfZWoZekpzmam/ouifefTjy9uZ3yGbgcHmi8IH96xueDj8zD7BdX9G0LdGdMeoKpENogSFy//QBUs6qIhRFWTLbjtlHnBtxIWJ9yXa3Z+wPjGHConi8TPzm+po/f/d/MwXPb/7qP9DWM+d3Dwhdo4oCLU+sncbh6FOPdyNquHB2G+ptTVU3+TM1Mz/efsR4TaxXtLuSWmkufeQyWpJX/M3f/Ft+/Ke/Rx4TbdGyub7ijz/f8fhxZLPZ8+L6hm1SXPzP1G4FQRGQFNvEPBjWuw3rfcHwOLF++ZqqjkQ/ESbP+/dn3n7511RCogUUdc3p/XekjaKf73MWVjhO74+83WwyKO5yYjgcePXC8OWLb2jbihAGHj6+Q7qCODj8HJDGE6dAsd1R1zVGtexXX3NjT0xpAlVQmDXj2LFfbZBkBHzfD1TNBqEclTFIVeCmXFiOEEQyiVQrRVnkzG1cXG1KLYfSKeKipx86tttdtlcLSa3LJXPLQjLNCti/9POvZLiSmOWXstA1n+4OrFc3mVg12Sxzbm748ac/f+7fOF/OCDUzuRmlSpQsqeqWw9lyfzowWYt1N1RFSbNd423kcD5zGWBz9ZbEEyF5YlSISnI8aXTTZBUsjnz17St++fGQCXFSc//g2DSvcLbDJUtyAU/g3H8kdAobEt2UF6j97ktS9NzdfWSafuTwdESZiAs7grd4/y6HEZvl5ikTXTdy7O7w/kJZaP7jf/xb/tv/9U8cn0aSmKkaSfQwDh6tL/RDz4ePj/z7v/5PiHTEmB1FqTj393gfIGkKU6F3icuppywL6pVBK3h6ilCcSPMOLVtMUVA2txyeHri5+oaiKpjEBfDYkK1rJAgBUgHNPuEDDH3CJs1uXdEUeSHoxpHZwd3THW21oq3XFG3N/Z3j1f4F0ywJY+Lrb3/L99+/AzbYaHgYTgxhwl4Cc6UpCs3bL77m7mFi7g/UTU3VVtxfHHu9w1QKWXgUM3GW4Gd6mwdW9EC8+Q0PdwPn8ZG5OSOoKVcVp6czYZrZyobf/e53OAJDTDAMBBV4+fWKaQ74aeLD3T2yTDwde2LSGS28ilwmR88jEYdMgVo1NOstvbsgtUbrBgJ03S3t+gVKtDivOPmJqmrpvCdER7nd8NgH5vGAMCWyWdFPIyq9ojEKZcBJR7Sep/4Wo9Y0zZ7XX+yYrIFU0A2f6MePhFDQljdcThdcNXL98iqTjqjp+oHJBbwt+Id/eM+qrYjRM45HbLrjarth6rvF4leRXIkyu3ziK2Z0EZm6A1JLYlREJ7l+UVMXLzk+jTjraGvDL7841jvJOI5M80xRlvRThhQ4m+jChak+ViYAACAASURBVBQfWdevMOpCJLBb/wW///obni7/gPM9pJLbu8TD5SPd9EhCYfSWWr3icnnExTNzOPPu3cD/9r/+7xwPf8SHjhAjY7dnnj3f/u41Qg6cz3d8+fYFWiueTj/z1G25fvEtnx6+I5WKCUM/zEQGar+nXa/xoePQ/YQXO+YI/fAIePa7L/Fu4O7hO6RKrDaaYTxxvf8Nl84zTTarsdJwfb1Gihx+3+4js/9ESAI/F4hU8+J1w/lypNArYvTYeM84vUApzePxHiUF2/YFl4OiaQKX4UR/n/N1Nt6DqkCU2LEl/vB1PnUWLTGuSCI31GudLWoCT1WsePfuO+4f7phnS1nVWOdy9uVzg+6ycU8p2/pwVKYg8ISdIt0gmG2JKRuI4EIkBY/WjtkGrNeEJJcT/rCoGwlTGHa7krvb9yTWCCWIImbV1GmUciTRM0wnRrddKHV5jJFSEKznar+mqTP1rusnlGpziW5Ii3IkkNLSXQIhZveDQBCj5/njVbVhtS44PEx4J9DaEIRdijGzxTHGmc32msNhyhCKxfqRUe7ZGiKFxxiBSBUp6c+dVSIlohcYZahrQ2Ec3mXlI4nw+ZTUu0X51XnotVNBRrXnTJdQCbwkJUmhK9armnMV8E4uh6hZtpMCXAxoFdAqv7ZUhpiev1PxGT2+21S0jcZODzjbECOLQrScziIpCkNZJJQIBC9RJg8yzxkpgBQdUuX8W2latC6Jn/NbaSH0igxUkB7rIaaGZ+x69njmwVZKAeRn0PliCTEXogq5BNKSIgVLDBMhOKYJYtTElIczmdIyyKts9fQeH8GNktkKbEiYhUeSosBai0yRdVtSForzweFcgdSaGBx8pkNmEJXSLBUOIlsRybm9ECzJLYcXSITw+KQJKSvRQnjW6x2ztRzjE4fDiV/eXfj6myui7wniRDcE5ihwnJnsSHcK4Au2qw2zuxCcpT9cuPryS4TQpDSiVM/L3SvGseXxcGL2A0JbalNx+/7EvJasVxt08YpVe804vsfPPdPYYd2BlzdvmZ3h0l3o+p5LNyD1GW0KCiW5fzzRlBW71QqpE14OPF0+8O7dyHb3Eq1+4B//6/9B27Zs6obT0DFHyW42yFXJXNwyDZ6nvuTLV/8z++23SPGRee4JQtGFLaldY8wZHXtE4fC+oE+B5C1JaN7+4TXnp8Tx+EQaEm2zoRsHptMZZSJVmVAx4oNg7I60q5LXX32DSpohGEylSdPEh8cDm+qG7vwRRAChKIsVv7x/hzKvqKotL1809P2Z48MpZ0CTxfcrCtkzzZbz3DNdNGrjuDxWOcdpNEVT8uLF1wgrkUScC/SngRdvNqTUcegGnE+83r/ilw9/4uH+go0Sva7Zv0o8HL9ntapYxYLpcGT35Z7R9czjhbLe8s3rPxDiLX/3/S3ncaQsDW9Wf+Dtt5YPH37gpx9nVqsdu9cN4xiZgkJR8urtijDcMp5uOZ56RjTf/1CjjWTbGgyB0+EdRS/57W9f8/Lqmn3dMj7dM7kLH3/6gX4QUF/zl1f/iXY/EY0isWKtErfHO2woMjTVFKw2e4S+MNkZ11sm6yjqDf1gQM4IESmU5g//5t/z/tIxDmCo+M9/+28RYsJ9/BMPSWLLFc2qZj78TKFaiqpglj1fSknZGPrhE0+XE6vmFevSUoSI85rQTWhlcf3EFAIugSlqpnBCkmEeCElQR/rRo6TCGJMppz674eJCxHXOobVGL88ppQzb3UuUUlnpXdSrcRyBRF1XFMbgrf8Xx5p/FcNVQqCLBp8Sdp6pt2tO3ZG+O+PdCHjmjz1F0bLd5pO3+8M9TdPwdD5h7QWEoGlaUpJIlb3DSIcqCo6XIdtkgFP3A8PZ0rYVZZl9mpe+AyKXMVuMhFGczyBkzTRbZnciCk2hGy59n4OyheZ0PPH65RXT7MFa2iKyXe0YxgOXy4VxulCUEWki292GsnTY8IGr3Z66fIkqHFHMTM7hYrbH1PU1u80bXl7/LUJ/j9QOIRRKbVnfbHh4/IigxugVX3/xCu89m80LrJ2Yxg6jDVLUrDcNMc2MY2K/uyF48NGiteR3v/+fuL+/RyAo6wiy59OHR7744rdUrcTZGescUhZoYJxGrBuwYaCuWn55N5EQVFXJ19+84acfvsc3K0pTA5Ky0Vg/IE0iiMRltJi65XA5Y3TFZrPl6sUNDw8D8zxg/cTd8Qkp82n1ZCtcKDC1ZHf1mkP6gMcyh8D1yw0Ppzs4fMjyrpKotuJsT2hRUemGRu748HTmcD6gC82kGkQaML3ASCiNQcTAOA6MIlPEnBsY7RklDHW1you3OzJ1M5u2xIcRAciiAjnR9xGtFWVpEEqhS0mzvmJ2lqHraUzJbremkDW1KGmqElc1HM5HXPQIpUE3+CB4PD8S04ypK5AaYQL3TxElazbtNWP3hDEB1MQ4PfDnH97z84d3vLhe8fLlDcYo/vzDTyR/Zr/e0673FOWGv//jn9hvUy7nlgFTQxCB/fWGFKC7GF6++or7U0cUmmgd05TYrq649CeEtkjj6YYJKyEMp4xLxTE89Ww3I/dPF8bJs9uvkXrm9c3vcWFkdmduXrykvzgeup9JaWa9bmjKtwz9xNWLt0hh6JxjpZ4IuiC7BBOn7j1BzLx58wqtCuwkmFxPoUrq6prr6z2qEAQx0Gwcdu7pup6ZQNtUGC1JqaTQW6g2CCRFoalqiZQDX339BY9PJ4ZxIMSJogQbfsKfG0glKt6QvGe3LTHmGmsjKWh8PHB1vcobc5+4XEYejz8talNJVAprH1mtr5kmT4qCt2++4XK5p7NQVQ2rdkPdVPzw3d+x3e6QypPEzHpzjRQNx+Mt89yx3cCsZu66C85OzNOITTNPgyTGidjvSE9v0EXChZaUahIaqfMzLHdMZWte1w08HH7OhwNlwTS7z5bx5w04nw1ukJJCBJnBBd5yOTu6iyKlChd8th6lBCKgjEXKrF567/PmGE9KRc4WWUs3ZPJqTAqRElL6fNroQaaA0Yqqrjkf9TIYZPy5EM/0PkUrSozKvWfPXVGkRCRmy5z0S5dZgaQhpBmBJMacKYthZJwmpill+JHIS59YbJOChNYwDAMxPmN745L7ybALrWC1bmibgvsoFrkjZPVNKJ47pZ4BCtlOlmkZMUW0FNnWSEDIXMidQkUUecgRiqyQ+JDtojHirECK8rPFJdvwUlYOU8rIYS2ZR09wOYvGAuiQC1JPGyhLSQw620fVM/0q21pi8GhJLsosFMRsrXkuZw4ioqUmeI+SMzEGhktJjOXiOl0GEmnwziOERyifraJBEGQuIFZCLdZIifMdkhFrQcgKeMbP50ylS1lfUxKQkeNpoOsMUZa5FiQGQON9towWpcKUmofbDudrYhQ47xExoY3O+UCZKEuTHQFzxLqAR2B0/t4kYlHEItfXG2Sa8j0cQWlJsFl1DN4jpKTQiWkO2KAJUWUVTQZmn7PbdVUzjRP39x1ffLnNABWv6P77FXb/D+z316zrfGgnYkQJzaa9wQfP4Xzi4fYH/F/lTjylIhd/ohs8qDEn/XxNd5x59faaKAQXZ/Gz4jj+xNw/EgPUxZpmV3H/8BGjthi5Y7MbSLpnHB16TmgtePX6mpjWdMM9l+FIN88UZk9dN9RFS1nUUF8x2yeMKWh1g+gnLnfvePHqa6ZO5mxsitzLP5KSBmHR2lCamtP5Z/7xhzvqpmbTrohDwzdf3/DzT3/i4fDI4VCiaRlCj6lb1qsNbd1i/QfEymCdJwRH02pSqSkpKFSJ0hWTVrz/8XtcsmAqVtcBLo6X+xuG8cKp7xjtI+dL4Hjc0tQCIyXXVxs2bWIaOlxwXC7v2FQbNruJoZ+YnGWjr4j6lmAN3kdGm0ii4mZdMFwsRMn19R4/nFGT4kW5w5qZDz/9mbev92hdELzF9kfefXehNE0+SkkOP8ycPma7sEoS6SRar0gI/vL3gl8+fuS7n24JRsNTzdPDiWHwGFNhz5Fdkx0IqI6PdxMpTGzWr6lMztxdm4Z//Pgjv8yB7WrPt7//C97cHDl3Ped4YL6c6C8j66sv2L9eU7qJXnj+7p/+C+umJapIYQzXqy3r7ZaH2wNKebSBwZ2xdwElSmII2DDRNC2PT2dIE5UJ7Nctf/7lHYdxJsaCumjYPlgu44mPP92RVMnNV1/wym6p2h0+WOx0xtmR17/9S+axo5QBs2qAGZkGog+ooEDMTMOJy1Qi6gJVGpKLtKstSVikEoDB9YHdlSZGgfeC0eZePKlLlBEonTBFQMhFiVosgFImxmlASYMSihgiWpmFPOzpLiOEf3mu+VcxXHkfOD8PQEKxqWqQBqEMRMcyLjO4EW9dtlloCOTT0iQlSmVv9zjObLZrqqpGilxeOE35YhsDQk15sTEVQggmN+LiTEwJb4e8EMaZ3gmikoAFCVJtCMz4NBGSwzmF9Y5m9RrrnnJ3EiHLiumCNJFKtJgSCgJKmdwtMHe0zQu0ktTVFS44no4fcP5EXeRTvn488qfv/09W22KxdJARmFKiTPYbV4WhaStmdyKljpDyqWddNShVE6Ndwn0JmSIpaaQU+JhJTELV1LUAAtPk0aZEl56ERSpD07TEaEmANhKpK7Q3SAoKQ86wVAWX0xllJLOfCRGUrBFiRVXlwmPrPeM4UjQlkx+ROmKF4Kdf/sRsJ5SWmXJ0PlFUgU1VEQmENCKDZrIVUQaCs/ghgE4M40SlCqTShAh+nCmqNi+agwOZsE5gTIlShhAkwfeYosW5RHSBGYdKAS9Mtv5IaOo1w3BGqER0OaSvS0liWtDQgr4fKNeRQu9IiLw5CI797hqhaubpTHAXdFPhowTlQcwEB5fJEWUgJodAoJRBVYmXL24IyRG1om023D58RKhAFILJTUzznMswlSFFwbm/4MOMUjdIaoKHoqzxs+DcOeb5zFM35J6HJLA2EmKibiEmyfHU0VQ16/WWh7sHoi6oygqjAsl3lHWiaCu6YWCYLmjTcH1zlTMdMRLTzKV3BOGRpUJHSRKa7bahUCVt1RBp8c4yTj0hzISYB30pJC5a6nqN0ZoYBp7OAz6EbI1KCS/nTP8p82lyVCNGdNkLrw2mKDFFyTAO1E3O2rnZI1VWbIJPC6BB8eqrrFxJmfHhLnQUKuc0lQooFRm7jnm+cLV7TVO1mKJBxIHoBHWxoqoUwUtMuUcAds55mrIscf6MdSXRq4z8jgXODzgXgAIpdsTQAiNJOHzqOZ4HhJb044Wi0KzXbymKlhAUUhlEMMzO50B+AIRBG4mQmrLY5RO0qKjEDiFqvG+BCiF03nQuBY4x5jzPp08/A+lzQXCMC4WNtGBoF8BDfAZBBBKRoigQZNud92nJ4fnPwX8pEk1Toc2iviw5l6wEZUy40YmqLghTSYi5eFQmchYqOLTKNg2lDM7LXIK+bHJjyq+pVQ4Xzwu+PWX/eB4ghCSGgDHg3a+lxc9Fk/m/CMnjfCCl3CkF+VAvFydn+2KpsnoXQx5McxbnOY/zDG2I2MmSYj5IyorGc+YoLQNQHoqEUJ//zrOdz4cZKQNaP18nmYcrCSIKRMxDamECpsi5pBSXa/scXfocqs4QkqpUzIPlGdKRxxcBqMWJke0rUpiswiw9ZSLlIsyQHImFzpfIitznotz8mjGknH+qC9pWMA8a0q8Fw2mxIfqlG00qUFpnG5qSeRheusBCEpA8KWVsdfDlZ4WRzwXSyx+xBM+lxof8yXmOwglIIaBMoiw1ZVVhbY8PuURXRJFhVgtyWUtBYTIh0oWU7aVS5OuzcDIyXCsP+M7lDdnn7Ppy+pB/xzxCJ7Qu8YG8rsRnm5CgLGti4+m6M6fzhfv7jrbNACh7kpR7cJNnEhLdGEyp8zqsKpSKlGpgu9+jtEILg6GgOx1woSJiic7hO4FH5twe4GLEhYEYPWLJ+oUoGOYBgSH3nqX8vSRPWSuiTQSXELrgcDmjLEhK2kpTiBW61QQ3YYWgXm9YlyuUKDAqUBYelQJVmZ0yWkFKjhAcwRc06wYpBMMwU5tAokfrgrouqGXJ8XFcnkeQUMgiEQeHjRMzBV61KPUGa075XvAaG+Fmu6XUEjdFpnEiAGaVEEFT6D2b5jXeHEEHpi4rqO1qyxwv+DDSjxaFyO4oJUmqYrYDx8MjcafY7Eq8m7l0lvnsKIs1Jml88Fg3cPEjWhSMQ0RLQyVyd94XX39DP525P94hi5LgEuvVmthfOJ8PbJuXOTKgIykFRmcx8YRcYDyFqbDBErQkyAppWqqqzWW/nUOqgmZdsFpp5BSQyeBFxKWZ/jKzXrUE75A6HyAfLw+5skDAbEduH+6I84BQiiAjc3KkUjP6GbMuKb1gnnvc2NHpiFECkQynScI85ueukf8Pde/VK1mWpuc9y24X7rg0lZVVXd3sMZweSgAB6YI/WoBuBOgXCKRIAsRwht09XV1VWemODbPtsrpYcbJaF5rrUQAHCSTyZOzYEbH3Z973eUFHyDP9aaJrrtHaYlVmHmYElqZq6CqFPvsIm65DVRajJfcPPc3KcvFih1CKuoYw7XmYCyWVmJmHiR++v0ObBWPKvXuansp9Qpxps5yHZ2JBLBNhSiwLmBcvqTqLzBUpKmTsMVIhZMXgJce+3NdQ+cu14TmjNQRfPMC5yJe9c1jDGXYRiCFijEYqWe6N/7Ll6l9HcxVT4HA8Ym0xcTqvkEphbEMCQhipmpbD/Mg0RbTSZdMVS4EqlSxTN1XTB1+Kh5xIqeiSQ/SInMgS6togczE/uxhKESPPmRoikkUi5Ik5RKRqUapo+5XIZLmQhSdlR87FyCdljdIGpXTResdCimq6NUZ1BeSQZnLWuCUyDpJVp0B4JBUkwzxG0DNSrRAkZn/gx/e3vHz1NcGZcsOUmRASTb1BSIE2oIxjnCd8HFDCYPWKpqkRsuLUn1iWhRRhXALGKpSShJxwYcRai9IZtwS8g81mgxAe5xYkAmstMWYWN2IrQaXLVH+ZQNeJpqkxSvP5/h3tasM4nIhhobI1QjcIoYnRE/yM8yO1tEgVQSWWtHD/7oQVHbvdBqU9LkzgPTSmwD/I+GAIQ6H0hOCIbkEacEs5v0pZYg4s00K33uJmT/AeTIZUwqO1tighCPGJGDQ+CMLZQG5FKEVULj6QyrY435fiDHUOw9X4+RGpO0iZYTqBhbWqWEJk8Y4gI6uUIAq8L+hl5xeWVL6YKidU9hznCSE9PnggE/yCwrHbXhJFZsmJ3eaG2/sjRkfEGfkbcmZ0M5VszjkNlrbZFLmh1/gwo02LnxPD4DlGh1CJr19fIhBFnpQklV1xDHv2hz1GGS62V/zpn/+Jy5u3BSmvCiDDmpluY3Ehcuxnmm7L5faGlBQheGY3sIQTSYC1BqMsm80O8ojSmrpuQFpuH34ipp66MjifmWdH5pGmbjEWjI6EZeLp8ERVbUAW+ABag4745EgxM4ceqydSqkhIctakBP1wQJlcQAZJYVQuJLJE4YjpzPaiIFgFmpzAuR7jWkgRq0szc3xcmE4BOolpFEZAjoZlztRrjantL7k5bsYtmRQlm50miT1uhuAUEkmMFf2wh6wwpgStityh9YyQjpACblF0q4ZxGICa7eYrQswsri+AAmNYlgljLcGJAmCRmZQ0dbUjTjVZ7OiaDcvcIEV9BlrIs4E/ISm+GTfD0/4T7crgfSjyKFnO3y9I2fL40nKVZF+qqkVJe/Y8JbIIyJjRWpWGRELVVAhRKH48e6WEKJuPnFFa0nUt+1EU+EB5hwFNTrGExAtJzoWap3Q5khJ0W5DsRhdohXMBQaG75RzP2vci/9MaYpBnqEPkma0H5y2OyuXaQF3Qu+IsPTz/vqAgzfNZ/pxSRqgzmVA8iw9L0b0snpzqZ2hdWQKpIikTqoA65jkDz8CL598uWw9TQ1WVxivlX853EhIRy7CxrktwshCaFP+yQTs3M2filTGSykpyOgfz5jOmWKizl+sXCmFpvjNZPgsvOTeyAKUpT+mcVfgXZ+8ZZqSNpGkqulYx9bJAKASlATrrBwuqvbyPQpZ8K3k+f6loFsu2SZf8NB9iCYUmI/J5e5Xzl8+mkoXammPxvKbzsPGZZ5Jy8bjVlaS2huCLHDIhEFmWgOQUSTEirKaq9JnUeL5GiOemWHyZWispEJTNW4yQMqhnf4UonxdyPA9sbQl5DmeYSRYIVQZ75EhVVfTjwt3tgXTdsF4rZh+4NIbjqScsBtigKomLHh0rtJDUpqFZrRCywBUUFcNxoF7XeBcJzhMjqKpmcQW7L0RGipLno02DSGXDNy0DtV6jpETISFaSEDOrtWb0Z1lvFgxzz1o0NNUWbQXS1+jWcjzOJQ5GJqqqZnhyZTtrDFbXZBzKGKQtnXgKCh8kRrXkHJinI61VrDtD1xoqK7Ey8vH9E7q2iEoSk8U0gqqXzPPMnCWjbolc4OOpZKkpmGaHlvYM3XIM84IVCr1SiAWkbKnrK6TNPD58YgqBJA3byytc8OQ8Mc0F7rUJa8LiIWuWpDiMJ4SusbtrFqWYROZ0mvnmxRUhB1hGskskETkNDu8VxiSGKDG0bG9eEp6A0yP6DJhYbVrqHNHLwMXVFX0/4QkkIk5lmsqjnMYYTa4KJTpGT7845iiwtSVFSih902Brw2bd4u97YkgEJXiO48tZM7sTUURkZZhdz3a9IVqPd4GHh4+43vPi5Q2iUngBymqmcEKbEqORU6S2ElRCSxA5M7oB5WakUiAhCsrwU0m0tWhzztLzjlUNXVVjlSLGI6qqsE1HtVYoHXnYD6yqFZcrVYZ1ITD2Bx6GkbZaY4RhOEw87N+zuSzwKSkN/fCIYcu6M1ibAIdgTRY90c0k54lLZtyXqBIhTckqXCZErFEyQVS4Ag9GSVGy/c5odiNV2cDnRBYJLRQpJAL+7NnKpZZ3C3Xdok1V/Kj/wuNfRXMlhWCaj+f8Hfj0+Z5Vt0JqDV7S956LC87mTkrgorKkIKmbhpwdhVZnkbLiaf/A076YDb2LvHh5iXORZZZcXrzksAzc9w/EUAzHxnY0rWG11iQC+70kzJ6urcv0MnpCPmCqy0KVSxklI3bVcnv/xMXFNevtDYfjHoTkcThwsXnFdrvl3c+/Z54G2vo1ldqA3WKNp2otx+GecZyw2tGurpjmkbZdcXHxGpE7YvJIGZmXgcUNVLWmq19zHJ5Y3AmhTuS4RuAo47xyE17mSEoKYyrq2hCXDmU889yTc2K9arBV5u7uQA6CylZsNiu89wzTiA97pJJ88+bv+ec//QPaKLq2pW0bjK3ohyeWxaO1ZdVd0a1eMQx/JOOp6gqE4MPnB6RKGJNQZkG4hBUWwlykWErgXULIxGqleHXT4BOEc04LKTM7idZ7BJbgSvjh5eYl+/4Dc+rJFCO3kpp5PCGloq4qKrUDa3g43dMZxaZb4bzk7uFPbLqvWXc7hFBYs8K5iXkeGKeZx4eZ1brh8OhpW8v2ZkPOmc8fe65vOqwtutv+3qIvI84XCpM0jv/4Xz/y4uZrbCUJwvH+x0+8/XbD/KSJvsLWLaiB27sT87hAzOwfThgb2a3eFglUmjk9SYhrpJho6oaLqxf82Q88PN2zUxfcrK65XL3i80PxWY1hxMWeZZCkBazVSKUR2aJ0xfEwIIRhtdry+uq3fPj5/6Rb1bSdpqlW1G0FLnPwB2Z/YFzueXGpGJ6eCG5GaIP3juwibSeZGOmne169bni8k+QlcbG54N//u7/nP/7f/0S3axnmnseHe2Dg8spg7QX9aebTpzuW5cR3374q25nZMS2Jse/ZdN8iVGn8oWP/+BNxswCwzJ5ca2QqKOiUHLiR4/ADRlXU+oJVvcL7I42ukVXCi8wUAn/6+Y+Efx9IThEnRQwTp/iASRVK1Qgym2rhunkFWdEfBsg9F+tLlJpZ5p6QNG3bcrt/Twyaptqx6TaE+ISk+NcQYHXDYXhimJ548fKGzbZm8SfW7Ssa+jLdFzXX67f0w4+sq4bK1nSt5Id37xnmOzbrDV1Ts0wPXL/8K8b9wrj0BfmuBUFF2vT35HCFSwovVtSdRaSC0wbFHD1dXeGXhf7xnusry+OxZ3GBGDmDHf5S01AGDM+aMpElglSagNqCXPDREZMrUjZdkNqlcM9Mkzh7lErBmnKGKCGV7CdTWcaxx4jiCQopocTZN5MXXFCYpUgGhSyN1XPfl2NGiLKNmBcBVGdYQinuE6WZESKRc0VKJQZAZAFaEn0J226aQHSacPaGZQpFD6FIOaJVoOs0p+NUNmBJnsnpGakVcV7QMlEZiVEdPjqQZ1gF8dy8S7TO2MYzLhmfmi+Y3wyEMxa8qSxdU/7+HG+LyKlIAbMk+EyKCzEEFmeYl7YEZwogi1LwC5AqYTVUWhCCJMn45f3LibJZllBZRW0V81R8wiKf/UIIUvL4IFh3gqaClAU+KYyQ52IjI2NBmjeNxGhB9jAsDqk6zi0WUgkikRhAK1BkxjGjtT6DM0pQbxaCFB2rTmFNTQiekBXyvEFMzyj7LFFK07WG3drw7t096QzOkEmWQYyAmEuRVxuBjo5pCUTU2SJVICHyvFU1SrFeNXi/EJ7x+7IM8hCyQFyyoK40u43meCiywPI4r8ukIoeIyJl1W0EA7zIxliZQSIMQpdnPlaVpdozjE8fTxHqtkKuKp9OeX3eWh8PP7I8TD4cKbIdtA/28o9Yr2trw7qc/4L9bQCmysYzR8rJec/d0QKmKzbeX5DGATpg6o3QmJsn+IaPrhrq2dBqGUeF7QbMqeUARyXwMLFozucCwzMS5583ra5QHmS1SarLtSUQur18QU2KcJ04+84+//57NesfV5RVr1fDwaUatEqpKKBMx1tJ/fqBrVRnq6sxpTtxcfEVMmf505NHPyFry4qtfMy4jT493SFfRdC1uPuA4se8l/nRL1jNN2yClYpxP/PjjxDjOSFnsH3V3w8PjHSlaWBA3SQAAIABJREFUpIxU84lf/epb3v3pZwSG9WXHaldx+4NA1Y459vTB0S+vmR+OVKst1IpunThMD4g7hTY1plkTRkfVKu4eHggeVvUVF13HON9RVQKkYHKZQQT+8O4zVixYY5jSnl6s0D6xubzmd796zeW25f/6L7/n+DiSRWR9Y9hcXvPD7z/g3AljKza7maH/QMgwTiND39OIS/LrRFOtaesV1qzo5YElOKBFmxZdSz58viXLM6AIz+9+91uaesvh0DOOJ0I48eAGqlqx6laQM58+PlCvIrf790yjIyXLxestbX2J8wMherSW2O2K+/f3zGMAq6gvO95++w3jounnmXlOvHh1g5wjp8PA7XGmDxPf/uotvT/hF8nKKF58V/E4jPSPt/jRkReNqzwqXOK0QKSJ0+nEegvHKfL4JHGzQOqRyS3c3FSsu5padlQapimRpabbbPju5hV//v5HHu9GkAmVPPkIq4sXhDQRfOSiahmGR7QGERUiGkQ22OzZ7LbIxhKNxAjDdPyIHz111XJ1c8Vhfs/t/QPDONOfWrSq+Zce/yqaq5QC281LvHO8//kWbYvsqq46Nm1H17Tc353YPy7sdhu6tmW/P1FVYOpipI2+4vHpjpdvDcm9ITiJiz11k5nGDKlQuFKoIAdqWyPrkv9StYrPtz8xLgt1U3Nzc83HTz+TKNlRKRapkVJ7uq4lBI33E5v1lnnUfP78QE4Ba1v+9t/+PU+P/4mff/6B2zu4ulzx6+/+Z969+4nFPVB1kvXqDZ8+HpnmI9oIri5fs3864b1jFg44IvFleq0uIW0hGo7H97x6WbHTDbPLHE8/o8IF3erVmZay0PcH/uqvv+XpoWEcRublSMq3WFmhtUaJFqPWHE53TLNDCYNKmcUlfAjkrDBmja1ajk9wc/2SxfckFnx64LjXXF63OOfxbmR3ccH93R9ZrUdcdDyOjr/9N/+B03jLMnmMNLx4dUNVvWX/9HtiHkAmCF/hQmL/6BEi4pxiDgekSFTVGoFhHEeuXmgqeYPWNf20cP/4jnG+K5jzQeFCzVdX14zTghQWqxLCWKzZsuk6MgOn8Qnbbfn64tc83j8yHxbapsVI+PThe+qqoq1bVB2p60tO/SfqvKKqdvSnniQ9H29/QklFZVbUjeXdp39gt9txcXWNlDuW5ZbF70FJlE6cTice7ztyLFNQhCeKmZRH2q5FsmI+SS4uJD6dqCrNtq54ePzEamvYH0cOdw/cP31AWc/15TeEGHjYf8/V5RW2VqTkSt6XeokxmfXKEJNnnif6Yc/dIXGx/QprKqyR9G6PbSsmd+T28SdCFHz3699xOt7RjzNWBppVw3FQhFRTr664vFlhpOH4+DP7I5AUlWzQLqCN4+p1h1Xwh9//N1oTuXv/nigD5Im+P3J18YIcLdv1JdeXv6Gff0Kpiqbe0NYaZMNxOrDvP7BZX7DqdjwtA7YeWJYJLRvapkOrif54QjYJtwTef/iRzW5FNomqdZgqUptLbj/9mWZVgYxMri/FWixUtVXT0tYXPB1umVmIPmAw/Pa7f8vQHzFVh5BFWhf8iTwKjJUkufDz5w807Zp183WJCehPPB0+cHP1FmMWpnDk0N/yq29/x8OjwsoaHSpeXrzix+9/pLFb2rpDKs2Hu5/YbkuorPMG7yTD8jMX26+xutD56pVjf3zP9cUb6kZyGgSbiysO/+074iJR0lNXN9SNKTuIrJAhk/LApmlxc6A/3uGXW373b97y/vORGM8eoy/blF8eXyRnZw2UUpqXa0WnPIRMCAq0LSALPyOFoG4MlxctP/zwREht2QLHDErig6NrDd1KEMJYpLeYs5arbKViyFxcrqmbIrvMKRLPwY6UhRHaSOq6xrnSQJ2PtlzzcyIEX5C+0hB8wrmA1AopzzS8lEgx4EOPUrvzgO7sGcsJhEahESScH8muIuWimCAlpBAE78tW4rzleHryIBtyLoh3sjwX8CO1tWy6jtNp+kXZhijUxgRKSdarjq4VfLy9I6dVmQhTNq6ZgPNH1usNTQP9aSTlkpHzjFWXUuN9olIFmb+4snlJKX15V0uuU+FjyayJAY7HGSW3553VX26vErbSaE05f7LIHNOZnJcRyCTK8MUZtFqVxtkuxGgIWRbokQIfB+x52htjyUiTOp/ljOnL/T6ngBJgq5r9SRBy/LJlU1IzuwWZIz5E+mlhXlLZMz7LIlNRnKSYWLUrdpuWfuiZF0iq5GaWbLZMSg4jBbZSJdh5zHgXyEKjlDpvrMokG1maaaU7hmEmRlu2myEWEIcSKKXREqwVPO0fSGmFVIWKKAjnzVuRTGorkHrhdPI8PClMrVjtGv75P7esfxNZXzwxDhPrzrKMArGMODEyngKCEndQ2ZbL3RVS7Hl6eEez2tC1K1ZdzT0DblkQrNCyRmvBV9949vv3IBRCVEzDka7esttc4dzM+/fv6E+fOFQVlxdf8+LigmF6YtutOfYHTv0JN1PC3vd/IkVLEoKMZ5kSq1VLYyLT/o4f/zmwWifmh57r3SUvdy94Gk9s1w2fH34kxkgta+qVwlYrHve3PB7uiWnh6uo1f/j9f+fF9Vt+/erv+PgP/4VBnnhxcYMxDYMLYGduf76lnygbklBzsX3LxbpI7tpuxfuPP2HNhjlNhLwgoub3f/oz5mqFzBIp1wj5K/7mf71iHk4lViJKbu8/s8jEPDwijaLavqETGZ80Prgikx5qTiGw2RYvXMgzD6dPfPvVb9CqRsqiIHocP/H9+//Eqrlit9ry669eoZs3bNoG7wNPT0d+/z/+QO8+ojV09Y7L9ht+/08/kL0jhJlhCtw9fubtmxf0h5F5USTZMoV77j7dIPVnUvqJOAde31yidcsSFo6HCX8YGJ4yL99cUNcNwQn+9M8fmMYf0dpijEAZh0Dx4/d3fPNty/X1lvunf+Tp+4mbFxuUqZjHidunAe+OZOkxRrNuLhmnke7igq6J6CpRiytOw0Jbr1h1a4TY0o939H5kwSPbyLqxXF3Cw7tPzEHhXEd0mfWqpqkrKmORGLKuUNlzOBQ655vf3qBCx/H0RBYBZTNCNHQrzThFTqcTwR1p6pZ5OXF9sUXmmf96958ZhpEUr7FW0jQeITyHh89YE4hR4P3Iq51g3Rmm6YgXjq7uuJ3v2Q9rVA/KB/Ynw/WvJFlJiCPx5FgeHzEx4pwjM9Ourv7FvuZfRXMFEMIEQmBtQ8qJp6cTXROBwDiduLl+gzWakGbG6UTKA91mRVNvmMbE3M/UdUXyNVIYbGWwNOx2Fzyke8bpgZRnEp7VujvXFkU+6LwDkTC6QgrDMDisaRj6E0a3VHZF1RhCHEAntJaAZRz3aN3hxmOZ7ouFn37+PXWjS5DZPJPTjmWSSGEQAublxM8//8h6s6NurpDSYJRhsws8Pc6I1EBomP1MSB4fbrGmYb3pUNMFfe8Yxz05Z9brN1TNDTENGC0R0tAfPePggUAWjsU76mZmuy1wAe9jCaLzMy9eXpGiYBpnPn5+h9aJzfaalARP+0e2q4itBDmXomdZNNpqlNKQA9O08POHj8hUgbeUgWLix3d/IEZfjMRI7h9ONKtPTMNQzPKiIfql5EV4RV01XOwueXjUNG1TvAqiBCIq4zgcPyMkdKsddXUJwGoj8SnyeJhZ3CNS1eQY8d6x5J7HR4fQitU6sVpLxt7jhidS6hEi450mVo66aRn6gWEYubxqGec9/amnPzkO+5luI6i6zNB7hNZsLnaM04FuvWXxifuHExebK759+9c8Pt3j44EQRoyVLDMYk0l55tj3JGZev/4VlV6Tk2Te9izuM1eXr7DGkGLi8rKhqjuMmTn1B572D7x58QohJfO44J3De48SDVWtmJeB4+mIsZaUTZHYZElTbVmtMikYBKqgm4PjYvsa7y1KCWI6cTyCFBVd1wAKKWr6HkwXWOaRZVh4cXnNfHIsEtqmYd00nPYDF5c3ZAXLvHC/P3Jz9QYtJNO8J8wTGoufBd55MkekPssPXE2/DJSAzJrN+iuUKZ/Lw+EWY2q69Vfc394zTx6ZYHv9Cj8eyNlhqsTbb25IsT4TfSYe5vdcbF7QrbZUlSxBr17T1mVL6ULPuNwjRYuIia6rSaJsXoZxZl56kvbkKFiWyHa9QYSRLC1ab7i+3CF1YBg+E70kJ8uq/oZljkhlWTWXmPaSr168xc8T1tS01YplcnRdy93tHZuN5/rmmt2m5jTeIWSFi46Pt/e8/epvkTJx6g/0k6NrNrTdFucE/bgwHCPV4RuiExi1o6ouqJsOl0IZAKUCdzBYyIL94ydyPtBt4XA6cToEYpTnTUtxIv2STXS+CAu+FN6VgV+97UqmlhtZ5oxtBTlroj9L3HxkGBIx8IXiBhGZC9Y/5sjsI/kYybk5P+cvXichE0rLs3QIpDLl2M7Hkcmk5KmbXSls4nKW4qkvCkQQaB2JXp23Z8X7E1MoAIEUiCnQdjUhlEyv4juCMzIBsj9vjTxuEUDz3GnynCtV2tEIRIQqwcDxjPGVFPlvCpEUFSJrtCz3MZELBh4pMLpmEQNSlZwVJWsECilkadxyIoSSDRTOPreUn/Ht8cs2T+lMCI7tSqDPWyu3JDDiLInMX8AYm3VzliBCSoaY83lz+eyvKq8/Ro/zkSWUV/scwJzJSKXwMUDyLM5zIiPkjhhFkSEKQUJBsgjpi9RTFb/z2cVU4COpPC8xle2WKjj0nOS5uX+WA5ZPitHlRyqJkNW5Ly/vS8qpDBSQWKOwlWQeI/HsDXz2XJXcQYc2Bq0Vtmrp+9uSYyaLT0pkdQYaPjfQkc+394TnpZV4VruWprzATyRNY/BzdS7Wc/H2PUNJYoki0EbyN3/7W8ZTwFqFFhqtMn1/4vjxEdUdqayhtpY6SHAR7wNLisi6ZCvdfXrPH//xv9NdtNztYbdrsKpmyJn98Y62FTwd94wj+MXy+qstWUoWn5gmR44NppU4J3EOdCVYseP66mtSTixxBC348w/vqbsd6ISsBp4OC6QG7xxCaCq7xYU9lalpdUOzqtldaw4Pn7ncvqYSmuHR8enwGZMrrl9ds9nUxZKRBN7NiOiQIdKfZkbT46PmQd7hgyNWhslVCN0SU+Tp6RPN9obrm1eM40CIkd3lBqMkn+4+Mw0zV1dQGcviHd2qJouM96CVQUnP6CaG8Z7D8YmbS8tpX/xnShuO0x273ZocHM4rxn7N7/7mG77/+QcQI5XOKJu4e3iiUgqrCnZ8TgGXZkY3IYWgrSsau6NbO5q6wtYa4pqcaubZI1SkWWvEo2bTvCAMI26JfHh/R+97lJAYJaiNwmiJzIkqOWKeETg2jSYli7ZneZpOXF5dcxoWSAGlIiEINpeGGAM2N6xXK4b+ofiIgiA6jTIwzR6J5POnd+wf3yFzZtV0bDc7fFjYP514Oj2xu9xR1xWZxN3TO8ZjYJgzRitaq/jqdWJ2B4IbMKIqxGkrGbUkyyJp3r3cMfT3tK0GLLVtuH77ivuf/4RbFlws14m3X33FdLjHtg0hwelwZF012FpTVQYpNFkkJJKnxxNhCRhTsd02fLu7YB5Gxn7CxZo3by+xqaK2RUb653ef8HOiMQriQnjacz9vGBfFxXZLszacxgNXl2/48P1PqARt3ZCNYzlp9ncjORvcxUJIT2RnuKhqhBacDj//iz3Nv4rmSghRaGm6wZoN/TBhrC7BgQmk0NS1RcoVs8vk7NBGU1cVUlhSXPAh0NWS6BuEMlS1pW1rltkTfPHWCCWJyVPXNdN0IBNRWpKyQkqJNS1KGdwSyv+be0IoF80YIqZSZzRrLmF4YaYWCmMFUksyjv3+WDI8rEHFVAz9Dx+JMVJVLdZY+qMrYaTCAAYlDU29ZrAz1jRYWzPNPRnNskzkJLGmQcqK4EdSBClqjHiBVhalMll4RHDkPPHp4y3tyiJkQIhYbsbeFZMrgnmaIEuU0F9ml1qXD2vbdjjPGYJwIqUWcc75yFlga41bFGRLXXumZY8WGTc7pM5UjWAa70E6YireFBkNQj4Rg4Bsycgv53/VbjHWoLVG5JbgHbY2KGXxUTKNJxa/oHWFDA3Rg1EdYQ7Mi8f1E0uEqjFE7/ChFHXOrUkuoDQoJTg+TjRtJMQZt0SCOyeDmw0jARcmYpKcjk+s1x05lyJVoGmbS57zVUKaadoaE5qzgVniXfmzaVZI51m84+WLlmWWdKsGKQVDP2NtS90olPSIJKlszedbV4JjpcIvJQxbi5bKZlJXEXOLxBL8qUzSESTnqMwWQUJLiTWSpha4aS4DCl2jrKE1kaRqtFZA4uHuA8ZIJJkcEksYS1C17RC5GNxF0hghqZRESA9J0Ngi2YrekSqFNmsa26FDou9H5mVGyUhIC7aumZdMjgmtBEPfo80GRCzTQFEm2j70ZCJa7TCmoapgHA/My4CsI1pcsqoVXgW01shcUzfFvwgRWylyVGhdseDLxdpPWHMBokBotNFY252vL56QR+YQGeaZRm/JssBdfDwRQ0ayEInMPmFdi66q4mXKFiM1Va2ZxgMxx3PmzIrZ3VM3OyrToJVmnBZMZQpIxHtEiihl2V12VJUkiwVrFMJpmnaFC4HheGLVXKKNK2bwmAnRYNSKcTzgR4saL4ihJScwdoM1G7Q2pHT26WRPVhKrDXcfPiOZUTYglaef9iyLg1zgAPl8zf1/ea7OReuXfUYuS6in08Q4l6lfIdgVsIvVnANNBTGWrUm5kgCUDK2Ui0E4ZUlK4kwS5AwNKmG7ojAH8B5AP1MFzkrFsoEoWxTOwb6cfVB88QxpLc5eLPml6co5n4NwCw2trjXzs3yRs39KqNLC5QAUv+EX7DZnT5rgLB8r+HRtBOP8C6Di2WcUY/E6PS8Gnfslb6qcz4woPRYxOhYXzxJNdV4bnrdJWaBlITVmij+peLPOkj/xfGzF2xSTIMypkBjPzcmzMyvHjBKJnCCE53PEs23r3ECJM4CivNZwbq6+/CPK688is1lt0SaQYsmNklme5aS5BE9T8rueQSkhFE/TX3bwhah4bmZTwgf5y/txPricy8YGVbLDYizbxuem60vob+T8/EUu7GMRe8rzh6Ns3goNMVtBFoEQItOSiEkiRHm/JerLMaPLuRjGhZS64rdTz891Dm7OBaZijGIZS02Q/+I7VLLXilF+1a1489VXTIMvXg8NSkUqW6HXv6beOZra0p+eqJmo9Dm7axjpLroC5UqR5BaGQSN1g/ML0yyIQjH2J7QuPqxlDpwOJ958fUUWLSE6lrmAShYXII54PxFCoG5est5ecDjeMY09QioOpx5bbbGVQVnLeDpS21JDQalBVitBir58l2VAKkFbNzSmZZkGDodbHva3vNh8V+id3hFjQgr9BfSjhKapNqQAOTvG4REfBrSukapidAspLrjgaGLG2BVVAhUXpBIcTyfmeSBFj5SBLD1kQ/DFk5cSLGnCqjK4MMYQ8sw4LSXoPbsSx2ByUTPJQCAhciIIQdXI4pkVGiEqrJ8JYUAkgZQWgGHuQRRokg+C4DOVaUtdJ1UZKvqBbMrzICLWCHxQSKFJIjHPE2Poqaqy0VEZTCNwy4JRilVTE5QmHTztusHUkYzCK4OQhsX3hFyUAkaZM8CsUO2sbjHaYIxnmRyRSGsUWkqq2pDjzDQsdFXDzeU1GUc/jrjJ0dYFz08EmQWNqtCtLnS98xbbzyNjv1B4Uok4DWhVoZWirgowQmaYh0DbdKRs0VTUtib6InkOZzDMuAylBrUl53acPU0TadalBgsBlnmg0x1GBZIKgGZeHnFTx+HhxDB62t36TCXUpOTopxkP+DDRDw05KpJUaFWGWTEHfMg4H2jyhlW3RZGpjSb1JyQWpSeGsVgatFpYt2v84liGwN3x8/9HR1Me/zqaKynOnilDXW8YR8/19eZLOnpTN2iTmed8TmOGnCSKBjfl0kBFV4ICg6aQ2CTdWvHTu+9xi8NYgdUVGQ8kjqcHsnS0XYuSl+cJYgVZk2ICNFpXRA/juBDCzKuvWpaFMlVMM4iFZYm0q9L4TNOEH0suRVVblDacTvc8Lrdc7F6yXV/RVBtEekLLFmRB3aYkUKqlqprC0LeGY58hVaQQcQkm6YpkQRratkLRkv0KdEE8x7gQg0fIiQ8fer76eou1CiUzSlqeHvfU1RYlDcMwom3FPAZiSIikeHH5NS9eXbO4hWUpptFhuiclhxClQUhixBjJeIC6tmy2EA9HpumJxXl0LFARpSJZJ2Y34V2mUmvCtEB+LnQd4/RESrDbXGKtJOWJlAL9dM+2ukFiODx5XNpjrCBFxTKUbUpTa54eR4Z+ZJkm2ssOZCI4zzIniI719oJxcQxDxs+C4/1E940u0/Z+oT8sXF+9PG8BigF+mQ3TMPNXf/0dWnVMYyamTGVrjNHMy55heOTNm2+ZTk0hUqrM4+M9h72nW60QYgUi8/Lmik+fbrm83KK1wZqB3eaKYX5XQmCFZV2/wBiJIBC8YJkcS+ip7A1JOKoKXnYXHB8cs99/kR8lH6kqjYsjVkKzWdG0kk8fDxjb0piGttmQ/MyqMwXVvEz8cPeRy8uSvp6jRuRMs1pKIepmcojILGnMjlpqmrpCCkllBXVl6E89fi5EsN1my/HxI8P9I0EkLr/eMkx3SL0jp4BEYavEOJ64uGrRWrF4j3eRFB0+jOQEWhjqusIIjcoWooI8EZ1mt/kGISI+Hplnh60My1JyKWJy1LVEqwpRNyhZJvVJCCKeJCK6sUhdpvZSCaQCnxdO85Ekm0JNIiDyhBJbopiIBKLM7E8Tu+sNOUD0kZBGNpvX1GaNjPNZruPxYaGpDXW9JibF+9vPbLeKsV84jRONtWix4VffvcGFhcPxRBZQ12vq9goVHbMfyMCq3UFWCHqGIZBCZuoXRP+Cdvk1SSYkHVqtit5bgFE1OXmSLBIugeTu83tevzZIIwt9yjlCCJTNpCDmWIr5v+ytOBPyzgV+zJLbo+Pj7cIw8yWENcQij6qspq4NbdPi/akAIM7SwnwunIUsDbZRNSkLRPql+UglCRhELpuaWKhtWYov8sQcc1ErnLc4X/Dxz3x0gCywRiKELYV3LjfoQgE8Y7qtxprAeFLnTU1BkT9L6TIeIT1KZRD63KDlL4yOlNJZBqawlcA9FP/TM+o85RI4K1QuFEARGIcJIZ4VEnyREEqZWdxEBpZFIJXmF/9bacaMAWPSWS6dyOjiHRKCAq2ggBOIeJ9wiyzHnRO/oDfyWRI54RaFlAU/L+RfLipLIaolZUIsE/FUwE9fDicLYkogE1cX1wgcx1NPjCVMWJy9djmBTw6rOEMryo/WsWyuEGcABaTsCNEjvcC5ApwpGORf8PAxJpLyxCTxTuNcQmDI/CINfQ6YjqHg+ucFEPKL9DWfG9Ii6QzEtNCPA/PyPLQtzVXx6+Vzg1ykfCnFAvmXAknxoqVYZJgil9BtQQn3jUmcmzBRrtFS45NHScHV1TUXuxds1ud7fcxMU6DZ3XD55ju2rzRSR/6P//1/4+V14uZSoatIdoGL9RqlJG1Xs92tuRtHLi4uGI8PTKcZ8oocc5ETCotVIOVA1zUMgyCFIsFMjPR9JtlMCAtujqy7HcoKfJyY5h6jy705B4cyNVJ1WHWgriXjKBFSYZtCAHw6fMKLiewm5j5x2V0io2eZ7tkP75j8wOZyh3N7pslDqmlai/OFKKxty9WqY1p6UuwJbsK5Cb3bcak7DqdCX5bSkEPAK4uyBpkibll4eDqhVSSkgUN/ImfoVi8ZhpGQAtpKxuUTtdnQtFd07ZrVxnN6fKCqFcGXXKZVW5VrgRKYKiO14/7hnsZ4ZNbkYAmp43qTeXx4IrqAk5YYa06nE+3GYFVV6NKngGotQpjimwyRcb5DVDuSzsQ8YGXmdHJYLbBWERGIOSOtxJ0iac6s60JP7qqGrrFg4f3nWy5uLKbK5GQQEWaXGcaJLAymatBWc+oPyErjnWYIhs2mIoaFZZxISWDMCmurcs1eMnEOdE3L129e8ucf/8hwOEIIfP3m9ZlRENDS8GJ7Q9ALUYqy5c2wzAfGIbK6aJDS4+YnlmWDFhVVtyKTmI8LwVs2qy0hSLyD6TScZdblqx6C58Pnj1y2KyQTs3elThCJzcYSQ2YZHcs0sdINVV0y5mISPO7fcXyUHB4jULG6bHh6mLHryKF/5Hb/gN5eM8cDxyeF1DXrzZZVZ2iqjF8ODP1MDIJRVFzcfIVWGfxEHBzWrlldJua85/aHPZUwVG89h6eJ++OJo///QXMlpUCzIiyGOTqgR8gVOZSCKcQZbWs+371jt9vSNis+H2eU8KQUmJeF4BK3tw9s1xIZBeMc+PBp5nQsRLNKaWwtaDtFWARt/YrFHRkOPVYlptFz2N9iTMVue0nfj6BkKcqkIMvMME3cfu6xVpQQ1XaHn644HU4kFpp6ja0M9/cfcc6yXm94efNbHvcfaJuOtmvZrNe0a8PYB4w1TNPITz/+iK0Tm80arSpytKy6G6b5yKuXL0nZMbsn5ilyffU1SmqWeeF4/JG/+/Z/4fb+Paf+DrfsqSvJt29/zeH0iVM6YCtFt/yaZTgQ3ZFu1fH2q1/z6fYjQsbSfKmMNTWHh4DSipwEKQaIW5b8gPeFTJPygmugsSuyyMyTJ4c1D/v3vLipcZPj+z898Zvv/pbLC4Nh4Gk+cH//kd1Go6sZcS5eVhvN433PYT9jVMfu0oI6crW9YFhueTz+Ge8qlvBE/7kqZvLKoQBrDEquaE3Hiq/5+nLLw/Ez8+l4zv2CX72tOQ6Cx4cDh6cT1eYCURlaU6avqy7x1atrjvsTwTpkdqhU8bd/9T+Rc8V4SsyTA/uECxljDEa2yKbi8fFHZLzGyBuUrajqsiEclw8opahsxf1tT9ftyFnQ90/cP7xHuBZdtQgki184Pv6Z736zw4+CU7/Q9yO6GXhbRGHUAAAgAElEQVQ8fI/SYCtB01jmyfPtN79hGG/ZH25ZJkhNQAmH94HgLNevv8HKR27v9gQX2b3aEBaY5xkrd1xv3/D6P/wd4/JH3v38PX6RVGrD3B9xzS2VWVFVDZXMTPs7dlffEHLiMB74H3/4gRdX33EcLG6O3D4sHKtPXHZrXtqakBeSdDwebvlw95nOrrna3vDtt9/w6f7n/4e5N3eybMuztL49nfGOPkZ4vHhTvszqzKpKCpqhBRqsJcRuNHQMFQUJJP4AzDCUFpAwFJAQMcMatQzDuquruqoyO19mvsz3Xow+Xb/TmfaIsG9EVmFYNmK5EmEeHu7Htx8/d//2Wutb+DgRncCwInjHML7DFCXz2YqL5TXRwXG4x48WGUpeXD1jt4vYcaQsDbN5w1/9/C8pTcROUJiCmxfnjNOOp8cnStPQ1nNE4dntHkAHVGHQRc3U55qFtlmwnD9nGhWzxSWldigdSKf+nrZqeNjtidIzny/od5Gxk6g0kfyAC5Hj5gmNQZeRlGz+fSoaROyYjhY3aK7Wa1britfdHQ+HOx79e16+/BGb4YD1njE6mlLRHTT3TztMITlbX9KP71m2n5O8QiVN0yjevvqGav+HhKlhilua4hPq8gptikylw5Bixte3TYudRv7yX/0Zs9UBVI1UisqcYdQlMf6SjLUG/GkwER+Yeh8mAHLwXwvamaRc1gyvK1yYTvj2rARMo2e1MMxmBlDYKYHwuccISRIRow2LhaGqE30XQciPz9GYMoWtUNnSFWIuzRXJQIx5qMmQN66frZE4YgjEmE6qoch1DTGrQWVZctgPOK8RUhFT7kpKMRBDJKlEVWX6qff+lN3JA5p1UOpcMOnckIeZmEEXUuSy3kii0JIUJ46HgRjO8gB26rRKMWGtYz5LCOmwLuJ8QugTiVAIlMo5sMGO+fReGaSYcxrfTsPih/6piWnMubcYKpzzKFUR0yl3FgPBD1SVQcuCFBTWebQWqA+DBRJvJ4oSlBaEmLAhA0PSB3seghgibaVYr+akJHh6yvAQfZqU8+CjGKct9/d7lBS5fJcZIVmEkEgh0Uow9Y6iVkgMMQikKE4DVVZ6sq0OylIw2oHJQvBLkPGE8E8nZSqSkmS2aFguKwpdMtmHj6priikPVniMycNa10U22ymTDk84+kRCyfJjxi4lj7UDu+NIos0DeMrevxRy5koKRVNXkBQJg5An4mCKhJizd/o0oB6OB45HgBIpRC7tVpnaqbSkbWpe3HxKRqCPWclJCXOyaW5+IUi9obxx/NN/+r/wj//Tf5c/+EnF2blEXpS4uCOlwD5ENkJw9dkNrao5KM3UWegkP/zhT7l9t2O33xFj4MXNNffvX7HfDTRNxWJeMo4ChCHZiYJEM18Tref29XccD4/EYcQJxac3z3h795ZDp2mbltmiwU6euo2YApo694RWukBSIoCy6Nj2Hc/PBLOl4cyseTl7iajec3gYkdSsVi3jCJeXaya3Z7IHYrD0UyQGRVm0NM0MaRLj5i29zQbnsphRFmfs7Gsebt8zHkbOF8+JKqCLJVVR0dQVF+fPeHv3NfBEDCP9Hu7vDqiip60nVvMLrm+eY2vBu9tXDP0RUmC2skgBq3pJoxuirykF3N91NE1DO6sRpSb4lov5Zwz2yFO34f27b7i8/JxZdYaRAqzlcrGi1z2Ts8Sg+ez5S/76678gCI0bRoZpS9E2VKKk1IKiSqxWgn/v0/+E93dvccMu2xOZsV5cI8KEQ2CF5Id/BF9//VvqqqGtF5TFiuVyzat3rwFBYQQqan74xR+gZKQ/TuyeOrbbLQpNU2a1LbiOxbJByYg0DVHU+Djys7/6mourFV9+3nI47JicpVYNF5cLqsog5MTdfkuymqo0VJViiJ6mnHBpRBlDU654etjRGMFqtc5wl8MOrlpIE8PQ048T5lAThAE8KQa89QQBB7ZM45D7BKXhOEygBGVtmF1c8OMf/0Ns/zP+9V8NtIuWn/z0S/7iL6DfHyiYcFax2ykWS8W3736LDYGgKxKeQz9yGC11tWQuz7h/3LOelcg4EeyRaegYd0eeqhVJ58Poy7MV3QCHwSIxvHj5nMiRP//r79geDwQm5ovfPz79nRiuUoT57JJpDOx3W6JIPG06ht5RFIbziznv3t5R1QJtIiEOSD1iCs04RJQ0tE1JxOTyXJ+7KISoWC0Lus5S1yUSxbe/OhDCjqaFsiop60u6Q7YpBN9BcthxhpFNLgoss91i3N6xfYCr83Nmi5KyShwOPUM3UBSLvLnu70lxy9X1JSnmIk/nHFXZ0vceOz3xtDlw7DaslleUvmIYOqzfc312jaBkt+uYpj2rdYvA8PLFH+EsfPfdb9hs/px2fouWM4piwWeffcXPvv5TQgxUhaauWyY7Mgwbbp69oK5aQOOsp6pLtpsd49EzlLeQLOPYU9ZQVnDov8dZw3p1ToiR4/FI2zakeImzj0z2gPcO614jl5cUzCEapmnL3//D/5DBbjiaDiEcwxhpjgPjdiANgYuzOcaMjM7nbhEkeM3VxecM/cS722+5e/SkVNGNPUUdKcsSLQ2fn/8j3rzeIMRI3WQ72NhN1HWNEhIZA8fuiflyjjIF3gXms4q7t+9AVMRBIKYGM2/Yb/LmxGjDcpFP/4fhCaHmVE3JNA28ffc188V1PglWI+OwZb26YNYsKU0BKfCw8bz45IzgJdaPLMuKtvmUQ/eWY/fIvr8l+ILzs8/phz0xwMvnX9KoBhclxz4Q7ERRwTRo9psDyRtWy0VGyjvPNDn8GOmsoy0rNnddzvZUz6iUwhT+lD/IIV/JEqMnlqtAjORgp5sYxoH37x+J4RVffv5TtHxJdAMpdohSsqyvOUx3aF0jpGbfdcyrC46HjkhAa8MP/uBP+O71e5Z1hZYJYRxO9Gz7gvlMU6DYbDpWShMagZATzr/n/e3I7mlElxnhWxSGMURevrhms39if7wj9tnX//xmztl8xjQ67Gg5W8zZbB9wHpSb8eXnNxz3E7GCFCO7TYfUJWM/0axKZrM5w7RByhFlJKosKIqComqQUvJ0/8Ddmze0s2uMUdSioSwKirqkKWccDlsKcaRuGr745Ac8mnt++ZtfsV7OOD87o25WDMMepSpSbFCy5OrlBbcPr0lSYKeEJ1uwHt5aSjXj5lKw2YDvBjwCraARuWenaTRJ7HFh5GmXaMsrJhvYH7bs9lvKoqZ6+JOT3Uii1RIpLjJBVUpCSgRv8d4xW6x4fPfA4/s3VOqJs/USqfPGNoTI02OHUifbGPEj0jt3Of0NsIXI6pEUAiMFoYswZcVJIiBIkkyIEHJ3Xsy2UO/jKReTcleTUgQ74KaAUgLvimxfJHcuhZSIURLFSIxlzgxZgSLXTogT+VSSKAoyPMdlK9rvLvXD5wsZMKANYfL55Nro06mopzTQ1BqtZLYSccJv52bY07p4lBCUpgEKTsayj0sTQ6R3u1w1EbN6lF+3YgYrJDBKk8KA0Q1Ns0TI7m+sK7nfyTqqWiFVzCjwE0P5oyUuZVz8etlgVB4efYigHCmZkyoIwQeUykqkc5miqLQ5JdHy54kpoRSYIhPNvBcZgW74OBBly51lvaxxk2UcI9aB1OqjQvShGLk0karM1zw5c7pmTxLFCXsOBHe6rkAIOqtIMqGyEZmU8jBbG83qbI2UisOu5jA4kgeEzr08KZ0oiyPeR6LLFEwhIfmsMiml6IeJ9ULTtlnt2R72IM4+wkc+wC9AsJwvOFvNKIzi2E1UTQ0x2xqTdNkCpAqkAGc93pErC5BIIsbkhojuaCmKkvOzOV9+cc7DwzcnImFEyGwzHYaBxbzl7OyMum3pjwNCarTUqJSIMQf3i0Jx3D/w9e1f8v7dt/zzP13yxWf/iOc//gxrt9zd3eF9JI2OadPzOGo6tac2mqaWRCk4ThZRaZ4tnlHXhqbSvHvzhou1IriJcb+jMi2hVkhrEDFHB7bHB15++vxUFeMwSnJ2ds7YPxHciE4HUh/Y3G759NMfsV4vSGLgu194XnyyzJS8wfO0iVx8nnh3956ibDhf/AARDvi4oG0sx+OBb9/+lk9uPuHN218SbEKKkma2JIUjyhSoqkQUkjA90YmGy+cFhMj9Q8aor9oV9QtFcJFSrxlDwDvPod/yuBtxrub6+VeUu5rN4x39fsuLq8+IwgMCkSxT/8T9uzfM6hXNvGFyR2w38OOXLQ8/e8MwVbz4wU941HuMbhmHgdG+RbUL6Eekyl2GJWv+3o8qmvI5FxdLZEw8vd+yve+JswcOxxE/CcThHBGXXN18SYgDm6fXFMsK67fIoPLvc9S8v7/n7n6DigVGneF0IMnE/nDkMHTYFPmTr37Ms2eBypSQYL+759DBaj1jHEcSPWerT1FJ8fSQD5hNkfsNV4uXtPWM9XrJ8xcLfvbzf0V3vMcHSVnN+OKzH+Dl9xynPcHHbGU3R1JoGeyRKUY8mdxcRYfrR3CS51cXLPSAVYKuH7nfjswWdSZml5oylgzjgeaqYXv3lsPYkyioFufs3twjhUZJw6zWVOtzxsM77GgZ+p7gLGZKiDTjuJW8Hd7xtf+athnQYkEhI29uX7EfRq5ePOMQ3uO6iYsvFIftbxFLRa3mSNUglWI6zPn0syUFE8Pt17wZHfdDw6psqCgZJ89suWJyPdGC1hVxvUDJkWdnn9C0M+qq5Od/8X/z1ctzhDkjKtgNv79F+O/GcJVEpsrUGlMu6Y6gVUV91mIKhTYSo0vquiEExzQ5louay8tzYtBMk6Pre0brCSEgKJHSoFSgbRq0XBJ8JNjAenHOvrvDmGz2dz5QFIKIASpCgGN3oDAN58tzpMzS+vWFpDtaSl3ircNOR7pu4nz9gkTCOpstNyIHx43OGQwhHX3vaeoVMUT2fU/dtDgX8X6EJLm8eJltD2JEFZFSwTj2TFPPr775K+zk6fsDX3zxKSEkjseelDomd+D5pzW373qEAlNqquqCzeYx+/VDJPrcOVKaiqGcGMce63pmszmP2yPjNP0ORRsy9l3KgsI07Lc2N4y7rGytV89IYiI4zb7f4YOjrkuGfmK7ixy7wDhZFBWLuUBKkx/4TcOb21+yXi9JgPOBdraApNHzCiE9Ug1MYc9hn7CDATR2TAyHr4lB0NRzCnmGSrkV3PkDmICpFId7S4nI/ZNK8PQ00Mwgyh5tKlbLM+pmzjDuaNts51Oq5JvffMtiuUSqGh8moojo4oLF/BrnJ1wYMUWJ0hEYcc7i/USlW0Q0FFoRsTztN3i7QqgyAxNiT3JHpu4xI5F94DBs6dOBq+tPcXqBmwRaDTzcdigyvlhIjfeK7jCcNggClKKuFCE6yqJBKU0Xj/hwxPuIsxHnBr777heYMlJU2Qc+DR2FUfjQM9otY5949eqXaCOw7oBW2dbUNAvG3pMmSDrkiqmyYOge8N6ji4b1+Yrj8BtKZSDlvGG1nNP3ksHnsHISMyI9s3KOMhJpEloFZBJIXxGTZJyOJOd4enAInUPcYXIYrbCjQxDwdqQ77AlVoioyCc9PE+NkWc8vc37ETVjfo43ENZGyzCXiWtRYe2TWGozSBOfYHvfEkBDC4Dw8bm6ZrRr2Y0BPJaVtSPOB7XbI1Dkh2NxNhMlzuT5HSMvQHyAUOC+IKmRJRRlKteaw+4b5sqbUCht77t6+oSzXtPOCuq5Yr85oqiuQgXE6ME5HTB2ZlWeUcoH3NT45jC4J6YBSkbqsKU2J0CWjSwi5wOhLtCkRUpwGq7z5rYqGN9+9Yuq2aJNPJkOSH+233g50R4026jTUCJT8ACsICPEBbXHqIkoRISTGFFSmYOyzaoUQhORJSRJSRCqF1obJWoL3CKlJKaPRk8+WtrIwFEbRhQ/P+azQfNj0KpmzrN6fNAuRy8RSPNUxiEhhYOw+DCMgVO77y9avnDW1zjPZAhBIJU9W65PtMeXXhONeIigRH4t/FRKVbY5pJMWJENRHVH0CiAmhNM6NrOcFTWUQIrE/5qL1bAtUGYrkc6dRioKxyz2IJHLISuSslPee+dxgjMi5oiSQKp1w8zKvcYhUlcIUiTCG3JMmBCG4bOkTGRMvUqIyFTYJQgynPJvgYx0TGYJhdJEP+XxWxlKMv8t5nRJ2SuZDgRg/2A6zwilktvyFEKirgugmbBA4JzEy983lAV1CkvlAU4WTs1R8jG7FD1a+kxWoKhSFyvREO7mPAKMPeb+UEiIlZk2F0ZHjYfydtfDjW1bdmrqhqQ2FjiSK0/2dMfIpZqVOiADSkZjwrgTUiaSY4LSm3ucuOSEzZTCGMt+TH6AeUhCiOGUVPdb37PbF6d4lK7IxIXW2OlV1RdM0WGszPl6cADICpNb5fdqwf/D8+l8/ok3i+1dv+cUvXjNfF6wujthTBHHeNlxfrNhbQWePhFO4cUrxZNMtEdQYI0loTFlQKk00Bc6OODuy2zuWVT4csxPMVzMslna+RCnF48M9NnhenJ8xHvf0XY/SFaWUDIcjMYBQgvlyjSpqur4nBMEnN19wGN8x2UhMHqEmcB1alpDys70t1xwP+wzXagrKUuJjjzQNRZF7SomJZraiXDYUypG85ezScRw2tGWirCpiGTgeDzw8PlFXNUJ7tEwcpyfUUwJKmvYCIWrscEAZjbUJG/P+Z24UIo1EJdF1xdzNOPzmgfjwnuTgXnnSp89QQWCHXHEi3Ig/7KhnFzTVitnijMU8cTjk3lLCxGSPFGqF0RUu5u6taZpYzs84Hjbs9zueNnuuqzPO15rd0zZn6akYt4+UWhFdxIcJU0pKU2BWK4pCsdvv+eZnv+bi+TneDuy3++zWWNZY6zGmpiorYESpFXVT432ua1itL6l0QzurkCry5v1rfFREFD4G/LDn1btfkZJn6I+URc1qeQ5SMvZHrBXEKeBix9jNWD87o+8P7HcHmtYSJk2UIyZ6GpNwoyMKz2E4chxH3NBz9+07VLQMY4b/dEMBhWE+WxG957B9orB7wmhpqoqqKBiOPWcXFeP0gB0iwWratqIsJUZrUgo8Pe4oTI2PisXZNasLxdXNJctFxTAd2e17DocnmqpENobRxZxLLgOVCkw+4StDNJpx6Oncjn6YaFTNxUyj9EA/BKTYE6YDj0PMGc2iyF1f5LzY73v7OzFckQTOW4pS0jQVIgWCF1RNiVTgw0hZVtT1nGO3w1p3soaV+UF6oiuNEzhnkUmiVIEqSrRq0U3JYX/EO8esrfGxoKoKUlR4J6jqAiHiCWbhGAdPMYOqKvEukhIsZmcUImDdiHcTPgVE0hitGIaO6B1t3TK5QAoGZUqMzuWjzkmCyeWMMQm0qUkerMvI2nbWsts/obTHFBqtJftdJhseuzvGMYdCl8trnh57vNsz2gP9+Mj5zeegIiF6fEyU9YwIHI8TSkVIEqUCy8UcoQJJBIbBUlYFzk942zFOAaMbpjES056iqFGyYuj9yVaiKaqKWXNOSAPbocdaT0wOIVq22x39YLGTIzhPWZU5gK1yn4qSMzItpkFIzWRz9oakKMpMcRIqYdOWoqywg8FPeTMyhh2L2YpSFyhR5YB2+rAZCYQEUtW4AFJBkgIXA6osMrWvVtRmji4NSahc/KwSUkaO/ZHZssxQkFMmoKiqTN4joFS2hox2ixQJLQzRe1JMdN2ANpKIR0pD129Rp7xBU9aMwWFERChD8I5u2DOvZ0TvSCHlzZ3QROdRWqCVpig14zhmYImUGKUoywKtI0aobKuJoGUg+oAfE8Fnitf+aaBZaKpyhjF1fljbjGIujMBrz/F4hymy3dCYCqMlhdbUqkYKm1uDVL4vlAJ8QtiAHwbKskUECzIgjcSYBTFt8U4gUKiyQYaCStVonTt37DCiU0klG6SI+NijhWPoR0xT5OLtOCLDyNPGY3TOQYgYcKOlrWqUVkw2srd76mWDURIfDF0X8MHT1AVaa4iSqlgg9AFTVmgtGTrP0NsMJlDqpDD1pBQZbQA3MYUJH7YcjorlhQGR2G2fkKljtcxqlR1HotuThCel7MevCp0HA5vwLqBkRKtI9IFJ9DQYyrLEaIGkpe+PBJcVE5UkpdSUpSRog/WeKKEbHggJtCxR0yLf46lGihVaL3KpLacMjMjlruNgebx/S1V4FjNJ084Y7Um5JysAo7VIpbNaQ763cubpQ6xGfNy2phSRUlIWBVooptGdcikfylOzrVdrhdKaoRtzwWxUIHMvUowxr4eWaKlOuab0tx75MUaqyuRhzUOKWWFJnCxdCSCAcCfymuQDMCNj1LOqIkVEK5WfR2Ta4Icy10x4y7a7ofMIUfNhgy5O6lP0Dl0ljJKIpPA+q2UfriGlrOY1TUlTCazNihIyj6NSqtO1OOpqhiQxDB2kAiEUH8iIkPM2dV2gVKa2kU4Ztb8xOMQYMnRGCFIUuYvR5LWXCJLIcJBCJqTQkPIzIJ2UyI/9YCSkzKvuQ8S5bA3jBK4BPlIIIVdw5OLn38E1PtAdQ4xUZYHAk4IkRInReW1IZCKiSEQCiJhVyZRpfPlH9TfspymiVC719U4QggL1YR1OhMiUUEIwq2tSdDiXv79MmBQnonu+AcpCZ4CPiKRY8OGzkD7AWfKEJ6UHPM5m23vG9Od1jyp3xEkdEEqgjMJO8gTEOI18HwJ4QqB0Qkj7UVnPBdIfMm6ZgFmWJcZo7GT/9p1/ys0pnfN+u4ee998ceXH+JXdPt7x69Y7nL2vqpSSpJv+uGUXRFGgRGB0naIPHTiO6ELgQGPqASJHCZJiWVhpHIjpHELnvK8SIApQsMKXEhoF5NUeiufW3HI47ZvMCbzWxi2gKinLGsd/T2YmimlO2Re5rI6GMpCoL9k/QtHNA5Yy8KbDDAR8EWpZU8xn77j1al1R1jS4U/b4jiRJTGrSRKAGlqPF4piEfKDRNzWazJ5UzEirnFuPEFJ5QAarSUFYl3nXselAioJWgnS2IaYdAIgqJVhqZEsumovcjwRhMUaO14fD9GyQaUSb6uMO4NckHknP5Tx3YbB4oe8tqESnXNYo5EkewgegtznaUTYWORYYuNCVtXVHXhsEe8ENHHCzdpmN+1qCFwIuIFBHbjyyaOS7m4UqriuA8WmnaqgYf2O0e0OI8D9Le5cO9FAghYYymLAqk8hyH3UdLKkLRzhYUskLIwLHvuN/cUZYLpI4YAZHAMD2hREPX9QQPszYhZEFkyujy4LHekrwgCkWUGodi11uE05Qm5p9dqbGjwBiJPTkqgvccdz1tGYknS/bTvkMXhqKp8IMjBkHyBwolMVUFUlEIxbKRdP1IiBGtK8oSVAEJx+QCrnfoUrE7bCmqJbN2RkqJtpkhy8RxyECUCoWuJVEMJJHQ85LKBuw+4IXHSYnTiZ4BKyKlFohC4xjZH0bskJBR4MeTu1eaU5G5zFUyv+ft3zhcCSFeAv8zcJ2faPyPKaX/QQjx3wL/BXB/+tD/JqX0v5/+z38N/OfkpO5/mVL6P37v15ASZycmnSgMzOYV26cB7y3JB6app53NqKuWEDzeWYZh4u79gdEeMEZTFAXHg0MVAyJNlKalEVcQFhRVApm7TiY3UJQty/k5UiicdxSlhlSh7YQWA1qOXD+f5zCg9QQXmVUls2rFwd2iVElTFPhKs9s+st8fKEvDy09fcOwWWJuQsoRkcJPHqJbtbkdZaubzOd55jDaEMWDtSJJ9PtEOLUqWCCVx7kBZS66vzxiGwOaxY78/ELGYMmFD4HH7wKvvVvTDkRQnJptfTMZpZPd0Uqxqx3HX8cnL54To8NFyOCTGu0dk6ZjGnnGcKFQgeIVUPVVVULczBAUxeRbtgqquCU7hQon3B8pihjEVfkpsdw8sFjBbJPxUcL6+IMotzna44JGu5ubmEqMURbHAFIJ/+Zd/yuX5pxijUQqEjNg44/zykp2dcGGibQva5oxFe4EAQnKUVcXjU8dyPUOIHPi/uDynGx3WTwjlWD8rqeeK6RAwVUE7k1hrWTQ1j4+PHPvEYrHi2bMVu923NHWF1gV26JBacOhBkDvQhv7I7vAd58vPWTRXVEXN+8dvORz2SF3StHNefPIDvn39DU+P9yxmM64vrul1yeXqks5lapQ1nh988UO+//4t/RhyhWlMXFws6HaJwsxYzGdsnr7hbHlOilBoyXxWsTtuqMo5h+PA0PfM60iINcImCqkwdc5w4XvwNUWpqWY1r1+9o501VKsK21iOx56y0pSlzpsSI0h2ZNU6tI6kpDmOJXfbe3702TMYoT9M7Da/4ovP/ojD698i5UB7XmOnknF/jylbTD1HloYYFzmrESLT5Hj/9sjlesWyXVEUCZ80nQXtKwaXGMeMku2OR6auYzlvOV+vOV894zg4FAVtOWc9b+h7SwqOdpGrFqYx8vS0p27myFTiJ8PZesXy6kDT5uEy+iOmOqG9pUOVgWU1JzGgdFZ4xuB4un9PIdcsxAuQEsc9bhpYlisKu8RHzRS2RPWWaTBosURQchzfMZs3DMMRhKeoC57Pv+T28RVCS8qqRSrF9r7jsD1SFHC+XEEyyCDQxuJFyrY437PZvUELTeGviXfXjINCcIUUC4RUhBiIpw210nlj/91vv4H4SNPOaecLfFDM54nN5g5jCubNmhDefwzmf7BU5ed0OA0qJ2boyS5njGQ2K3GTw/lAOg0KpEjwEa1zYaoSknEMhBD40IeEyJtxY7JlLvhM6IspoEQej3JmJrBYLAjREjwEnw9aAv6jgkHKqpRzghBlVq5OG9yssmQS4Gq14P5+j4vZUiPJKPIUA1oVlEWF94fTJjcrwlIA5NeSelmznCk8mdQY4ymbBqdBUVEYjZIR5yJSFqd1y0j7TE10zOcNKR0ZxgOwQkpDJA+sMQRIgrat8H5iHEMu6AWkUCTxgVroERi8lTibIQpCgzxBI1LKG+WilRnZPSWcyyCQPBzFjwOBMToXnVuB8/qUe+NvDGzGKF4AACAASURBVB8nymGUBJfvw5hEVqWiOx0GZjWzLAxGVlkVdYrAiWz6kbSXQOX1CwH8icr4/x6qU3SEsGc4JoKv0XKGC/5jBg4SyWXkc100OOdPgKnxBBEReU1jQogMTBHREDy4SSCk54P1NdtLFR+gJ1oL7BDxLlLVJ6U2frheh5QWbUraWcNhn+9RqeRJOcwUQ4GlnRcsloLHx1u8P8sh+JR7xSY3cna+oigNMUW8DRRFRUweKRQqKUIINE3B4+OOx4cHDts9/+CLf8I/O/5PTMOeseto9HOOmYvP0U5s+g5lSpSpmbUaMfUU48j5izPevH7geBzx1lKWJUWpCESO00TXT7TzGWelJuwCCMViVrAdtohSk/QMlQoq3fD48BY4w7qBnokqtOjmkv32W4hHdKOI0XDY98zXM7SW3L7/DaUsufnsU/rRctgfuLl5zs9//i+IYUYzb5mtajwztMrlxG5wjIcdSY80qqBdzmiKguHXgs3mt3RCYNo5Z+cXaLVHl0vGITKNUJUtz24quj0IWVKWNVZ3eL/lcXuPkQU3l58xP6vxu4r5oqQuFeNxT9vOcT6AKRBljR8HXD3HLD+jnJcUq5HuYSCFHqEkpT5DmXNut19jd79mNtuwuxlR4Qvm84ZSCFwA4R0pHpm6JfPmgtlVTTUTHPaOsi6ZyTkrFfjmzS+Zh5eUWlC2NdoopmGNCAElPBiBUoK7hwe0EjR1yXp9xotnNW9ePyA8tHWBul6ymNX5ID8FQrJUbc3Xv/05q9kzqjKDtmIK1DPD5vGBx82G3X7g5uYcXTga02CMQSqLnyRumGOnxO3bDVWtqZoWKUBKT1SK5XLBZr9HG8lytaKfMkjmrCzQxpCiZCVbSI44BYIISC1YVXNSeAQVEDhuH77h4vwKG0ZChMLMMGLLbLXCiwIfBeUyEe0eFwpUqanLksmTxQA/EqwBW1GbW16/7bk4v0HENfu7ibN5Rb1aoE1LXYOpJJQ1TSmodS4J7/YeoTtsFBBLXOEw2tA0GTOfysTWjtw+3lKJmvX8jKvLS+4331LrEqETutQ0zcXvnZ3+/yhXHvivUkr/UggxB/5MCPHPTv/236eU/ru/NSgJ8RPgPwP+ELgB/k8hxI9SPiL7/74IrRBoklPgNcPY4wbBaqERqsBNBfvtgPZ7fLSIlB+bdfmWxVyAlrioKfWSRX2GkwOTHbh9/y3XzwXRzBBCIaJh+zQR8Cybc6qmACF4/f0tlxefEvwekkOSeLrbZPkvSpRsSLrkYfdb7NSjhEQFw2bzlqKumC9apCy4vd1jykjbLhmnnv1uj7UjQljm8xVF2WLKGqEEF2eX3L3b8PR0x353x8XVCqNvGKaep8Mt+/E1V8UfsLlLhOQoyoSi5DDskDKymM9ZLhd0+0TC4GygnyYe3n/PbttRlprFck6rVkDDw22gbMKJmmZIMdCUDcNxZPd0xOjIavGcdlZSmIrkC3a7LV99/u9w3D+xezoyn7f86us33Lw4Q5sC20eOwyMQOPQddVOwPLuABHVxxXAwOD9g5ob7eyjKHmMUpJrF4gsot3g0yBJjKtIAXb+nKAzFsiQxcjz0SDqqRqG0Z7M5IkRgv93lTY1SvHn3K65vvsze8jgQpif6ThGsJMqI7Q68ffvElz+8pC4Eh/2B27f3fPr8j2lkhXYSHSMXteapm5AzUCmR+onj97/hsy9/wtnsBVJIuv0tqRto10tevPyKZr7m29evST5yvjynUAX90bPZdBztSFVKjCq5OvsjbL9Ciweu1jVV2TJNPXd333O+XlPWnskdAMmExSfYHHq+ebfjrLmi226ZNYp6pnl43FI1IQfBlSApSfCaYbcHd2SoI6rU3JzVOCdxCZAGJQX77p6r2Rk+efabe2L6jmX1khA7YrIICkQUbA9Q1w1iWbJ7uCXefUtZW2KMbDc7Ho9vWdVXjPGRceypwzUP+3u2wrJczKjrOfV8jllMTOaBWNQoNeP+buTm+jm13RPCSFHU7KY913/wU8pUI0NBM5tx+5t/Tj85hsmyOre8/OKCb77+nnGqERrutjuqusaoOckqxr5n0Dv8JBhsh5COauZ5Pr9CaYn0kTII5u2cd3vFcjUjOeiPHQ7Psimooka4vBnW+hy/h+54YLB7msZRVs8x83UuvJ4cv/r2Z0gxp20So++5u73l5dWMi2crNAY/Jap2htQHPv3sHFzEDxaXRtqqQstzbIReb/nmuz/nj7/4MeLhS8aHK46TYteXaGMwKQ85xhiSGHN5Yjfx+u4V/e5X/Ognz4leMk4jq/MVdjgidSLISO8dwYE6UfiEEkSVseXeu4+KVeR3KopRkqbMm13n1Wmz6k/VA1AYhVExU9OCOSG1fd68i4IYLGVbklJimhzjkFWu32llIiuqcmTyp/docMJBish88kBKltEXTFPCx3TimEekEjjv0CJQGUnwCS9UVuBOfAivs4pmjGQ+L7BhztMhf3khs8KjlESSh0aiQos5SgeQp5eqKIgqEqUneI+NCWuz6pxOGRtOG++mbpg3BXYqOcSKqCTEiBKKhCIKQVU4UtrjrSJYg1DqpCYGQJOERpCzubqqicoQhaTgRNkTMQMiYkRJzzgcGCaNjQopU2baq2y/8Taynpe0pc3I6ygJKSAiCJny4XdKzOoCjefpMNJZA2pB8pYoP6xNOiki+R7xJHzKG4cUDUGeVMAkmGmBsT0htsT8EaQPhMiTZTFFT90aogu4AGM42RWFy6j6mNvEhOkJTjLZyOQnpCoQp+laygy00EqwaAwhdByOPT46hJpDzNlaISI+9LSVRriRae8YhgolycPsKSsmYi69XsxKSmWIU2A4CIJUID50X+XvX0pFUyxZtTPCWPD+/RMyaeRJCXXO0dSzrIK6eLo/fN7jSEESmehYVw3ff/vnvHr1XY5FJIhS8m/9/f+Af/gf/QmVeM27py5j3r0ljh0qSKy941GUaKGQdcWr9ztskqhaolXAhjt+8VcPXDw7z5nTNtuNp6Hik+trjKjY7gfevX1Fvag43h6plOHyak0wEz//9XcUquBiMUdox2LZosxNrqKRFRfrl2yOr1EioaTi7NkZVVVx6A8YIzi/rPj+zZZyteLN3R1vbx9o92tePP8UP01M0x4lI19+8ow/e/Vn6P2CapxTJs1wd2A2/yHz5R5vRjp3z8XFGtd57LjFThY/XXB19SecNQlUIAnLsdszDB4pNUVlSIXn3eueWaNYzr5gef4Jz5Tlzdu/phUvGILAWcH57Dk3P/2MwY1MfsA6y9vbHV999SO6YUN33DBTWz6/+CPqm4gQFik9OjpC3/PYK4LQ1Fc/5T/+B/+Ef/GX/xsPhwfePWyo7wpMrSFFqkIxv1rzxdzy/et7rp+vmM8K/Njz+edL3jz2HPYH3Dgw95GDd1yt1zjneft2g5qdQ5RMg6XQiutnFzw8PXDc3rG+uKZuZjw87Fi0K9q2QReamCb67R6GgO0kZTjj2dJwsVhRVSs62/O0P3D/tuOTH6yxJIIOUFi+efgOPc24efGMqi05bC3nlwUyjDjbcQgTox3Z7DcM6TPOF+cs65rb/oHd5si8rliVFSMVYxgJ05GybmmrOZuHJw6biVDk/igvZqxn1xzeH5C1QhaaYUw07TmfXjUM1jL5gfklTF2kFIaQBjr/nm9+faSYVWz3t+wOt3if2E0vCPcPlKVBl5rv3uz5/OVLjkOFL0pmM0U33DOfPaMsZ9jgebx74PnFmrGL7J8eIN7yg8s1L25usINHlxLZJORxyWcvP2FyR4bpAGn7ewenf+NwlVJ6B7w7/f0ghPjXwIvf81/+MfC/ppQm4LdCiF8D/z7wf/2eLwJRcTxO2NHy4sWaxbxksgPeBZpGs9sLfvnNd1w+W3L9fE5bG3ab9xTFOUKumHxDNO/ZDiM+OFJMKJVPXF/99h1GGbSekWzEh5HtcUNgTlU1XN9c4C1sD09Yu2c+r06nVI6mWWJ0y3a3wVlPXWsEER8sq9U5plC4OCJVoG7m3L5/4njc0zRtDtn3hn54z263wRQTzXyGMpFvvzvS93umqSN6yX4vSWmHKTRte0FZQb9/ZLm4xhRLUrJ4GxCxJUVDd+y5ffyepqnoxyPeRqLLyPLNXeLzr1pi0Lz+ruPy8hlnZy3TNNL3E8PRsz2+YhhbiC3LZoFzjueXf8z9/VuGCPPZkvViRd9blNJIZTgedhSF5HDYc35+w+XFJeF9RIg71quvKEyFdY53t7/mB1/+iKpQ+Clx2O6oiiqXbKaIVIFFPSekgaYucDbxdHtAF4mqVPRDxzQ4EiOz5YzRRroxEKNDm8huG1idFQitGIaIMJGkOqrynBQbng6REEdm9QVnqxXztmHs54yHhJ96KrNkPbthHI9ImjwsC4VShis5gCgIfiKIwE+++nusLm54f7clJcnVxXOUDoQ05/HxiXd399zvHmlanemPY4edHqnnhiCOjHaGxVDogcVii6kFPo0cp9x70i4UNjimXd60fXJxyfG4Iww9tRCcrS54f/uWZzdLHIZhkNT1HAiYUoCIhGjp7YHF/BqjIKbc52LlJbtpCzL3kujK8+L8JUbrrJ6K/DVXTYMLNYN1TG7k8rJEBUGJwtQVXbXg4XHD82dLYhDstz0uCprLJTI29MPI/mCZL1ZopdAI8JL1+YxqvmZ/fIXxjlldMgz3JC5oVytiFAz7xItnc6be0/sOQs/95pZCNTx7/gwfPA+3D6xnLeerFSRBConnzTNefnnDm9s7nIPg4NffvePy2Sf4KZGCQErY+h0pJtar51xe/xAbAxwfELZFREddRb66/Lepqzm2PxJDZNaesbq4gL4jlgZlCybb4fsGpUeklOgm0bSap7sIckndzFmqhqftLcYLLpuSOmn6tyOreUZePx07nvZbzs4bDtsDU9zipCMqzx+//ENuzp9xu1ny1FU8HUGgcNETXMD9P5S9ubNsa5re9fvGNeW0cw9nvrdu3arqrlJ3o0YBjgz+BkwMMDCEIQxsXIVM5BIhAhMMIsBgkAGOiCAIBIqmpe5SdVd1DXc4054zc83fhPGtfW41QRStHXHinLOHXJnfyr3W+37v8/weD2LuaOoNH99eMw73JO757POG99++Z7PesdudIUNkaidqZehHz8PhAYdAahBB5saFtIAcxG/R0JbPS4HWILXn7r7NO71S58YmZryztEvAqwlM00AIkhizNyeKGe9ilp2a7FNRi78m+6jy30Zrnj1/yYcPj8QuxwGAQAqT4yEIVJVmXTfc3ww5HJacNxVThkwIIykKjfdh8aA9Ydyf0o4y1EFpgR/9pzxDECQBPnhsIVlvCspKcHM7EL0EI5/UW4TFmyMXxHciEYJEF7lBS0vBPw0tx2MgBgje5B8m5KI8JObo2RhDU66ILn1qkqTME7BIXLK/Evuzc0YHXYwL4GP53ieMhHdsthVaK9KQs6mMWjw9MRF8nqLUq4g2CoEiJZknlskv5Lz83L2fMabIUiKX86XSEjL8FPmcosfPI2NIOFcgF+DHckZJLOsfHEkJRNKIpLN6Tyw+sZS9VNYoNqua6LL/zx0npFbEGD6RDmOAelUgtSFNLlscJaQYPpEkU0qURUG9KnFzSz/OJEymKT4JSFN+vSRYr3asGpPJwjH3oVI+vYocxG2UxpoCrQ2JIXuIolhsc9m/rJUn+ANdPzIMLp/fGHOGX8zeIGstzntCzL5DYzQhRYgSrbP86e7ujnfv3nI8HiiKAu8ddVVSlpCE4370+OmalJamwZZsLtd8+69+Tq0aUlWSNLTX92zWDadhYEKy3ryk3ja0naPyiroq0GbD3B55/+1HJBokrMuCqtmyrmrCPPPLr74iGU9TWc52O853e3AKN3uunr3EWkOKOdNKqA3H7sTkI5vNDjdOaBVp24GHxyPv3t1z+XrDar2nLhXGlcQhXxOqvQVmDqeWH1z8EXGcGY53jMHy6o/fcDiNKL1mmg1D1xKKEiMLdHILFTUwTEe01sxdYpg8Uhd8/uZFjjMZR8bhxGbbsFk3dPMNP/vVe453HylViVmtkaUEFXj7/pEvv/+avj3Qjwe0ifz4Jz/EWMmhOzG6I5vNji9+UiKiyrlPUrN6fsXj4S2HmwP9MZDSml//4C31xSUPp5b28RrzLJC0ZhwcTlmQDTN7qt0RJwKHccJNkv50z9gWbKoGszG0x0fUocRLqFYl603Nh9uPbEyNFJrJed5+vKWbjxTrgm6YOP3mbiFMb7m/fyRJgbUlqIogFU6P+AhKW1KhCCp7X7WIrKpIe7jJ4cBVjbaKm0Pg9fNzhAxMY8faaHxyzH7k8fGBvh+4fP6cy11JIUt8iLRhYmRmv9sjhSP4kbmNrPYVfn6Bc4khJIwq2VSGqRuIIlCuKkRRQzngZIZxBVVh65oP79/hg8CYguk0MbUSqQb6/sDd/QP7ywqtdmijGYeOd99+xdnmknVVEAKMJ0dVCET0nI4fcGXBanvFy7MG7Jbj8cDYt1xt1sTY4oeARmKt5mHuWGGxVeQ0d3z8xUe+9/1nfHXzz7F6S12d0VTmd/ZO/1qeKyHE94A/Bv4Z8HeB/1gI8R8A/5w83XogN17/x2/92Lf87mYMH3ImSr6ASqQoiMxIJbFCI5Xi7u4BW+qMJR87qlLStyXO5FFiP02ENGJMQASAvFv68HCLEpKqKinKEqESIRYM84Q/BMrRUVY1AYctLdqsqeuScRiZ54CQfV4oren7kSIWCKmz9ERLjFYEJ4khMI0tRkumeWSesj9mnjzWGNrTia4/0faWi2dnPN7fI1WePvgYOXWPFBakKlBKYdSGcRxYNQoVFDEoBCVSCKYp4JygMCuCFzTVDlEJgo+cDiNvXn/JfmvxwTF1fc6/QuAmwdBC3ybmvsYNEa0chbW8fP6MsY24QSNkzJIaUxOjoywlUlWklHDuwG6fA2z77ogQAW0rZh9JIssobCnoxxMxeZQB7yVdf6C0xVLQJYSe0DTEENEysdtUzGGgKgwqGUTKO/FSKLzzeJ93ATebDUZWFBXMbmQaDkiTm/OpdwTnECJijULJyDR1hDCilMfqElGt8s5hscZYhxsj89QTU2BV7wijJ6QZqSK2tBSyIgaVPSsIghAgFVIYjl1HN7Yo7emHFiVXGbgQEtM8U601zimcT/hp5ObhA8HNeJ/DWAtbEVOEEIgJpExL4VoQ9EwMnuBmtFS0fYeQBVKU1NrmrCJyUPPsE7NLUKbsF0uBdhjQ5ZFZtFhtkFITBk/fD2iVzfzWlpASrb/j/fuOm7uecZr5yY9f0ajA4TQhTgIhDEIFurFd5E0zbmyZwwFUBUIzTCcqm/PaUkzMcQnjPLU5l0rMeHeNMYp2PBDljhgVh7bFlJr+NGO0pjAaRERS4B0M00TbtayLZimYsgQhBMWpT0hhsCqH1dZ1gY/5PJXGsirh/ePtEnAqEVJTlxW7qmbVSGIweC8xskJ4v9DONGVV4+YZksNYi48193cfOVtJjNGQMlLaiMD3Xv+Qw+GesWtRIgdWa52JplM84mNGpiupsYWlqC1S5kwgKTVaClya0NR8+MWauw8V7aRxSVDIBSdOntoYbbi7vSNOBwo1st1arDZsaqiKTDrzbsQWJVqZvDnl3acGSmTTCoK4FJbiqT7OH7nmzwWg1fSjXPKA+OQ3iT5ijc4aeBXoxzGX4UvBCzHT5QqNlBIfsjwlS+lm0hN2PIUcaBri4mfJ3iaRbTlIJSgKS2UNUsz56aW4ZFxlmAFaoFXGjKfcW+UpicxyOiky3EAKSYyaGJ/klFnu5r3Hapl3OY1gdj1CFN/5z3hqar5rEJyLCEWmfS9BwimC0aCNZAz59/FTvyqePEC5yZMRgo+4kBYQRFrIh7mZKYxGCYV3Ae9Y8OXxyUmUH0cqmqZmHpfQdJ/QRn16n+SeKKBUIARDCGRvg1gkkUqSlvNilMjk1dGRvUWZ5giZ/Jhi9j9VlWBq+S4wd5EWxkQmQIbIxWaNFeCdJjmehpQkkeV3McaMrl5CbedZLOudm9dMq8xrXhQG5zPQI0WB1nlNSE9+vLz8Lky44HKAcEoIAoIl70vke+JqV2VasIRhmEA90QvzywiLf2+9qiisZp48ziUw+dhxwdKnFDGqYFXvaeqKDx++zl6rFCHkr5dFkRvwGBZQl8SHsEhx8/vAaM37d+84HA4ZPW00IQQ+3/8Bja7w7kTynnWZT6c2GqkF33x4hxYWNziSAFPl3MapSxk2JAWHQ4ctGtzYoWWiMAZbJIZTXGAaisJK0gQSxTCMjH3LNE7UtsHqjPqfpkhd7JjnO7q+w4eKqloxzCdECqybChcT7emQ0f020E0Tw5y4fHbJPJyoqwppDD6AjBN+0qyaNdpWDKeevd3w0N0ypwDGMwvH9eMNhdUZQiIc8zixqg11XWUQDIlxPGGMYRwT4xg521eMfUdwhhjz+taVIQXB7Ad8mqnXJdJL5mnORFOd8mufHUIqtC7RCnbbc7rhgWa1xdiCoqzRxhGdIipJVDpPb2PEi0gwMykdeP/xZ0jZ4YNDWUlSIm9MicAcHIeuYw4RqSPjFPOGShAMwwkVCrQwKJUfdx5m5nKmKAymkCjnOU7HfJ2QGVZjyhWRyDQl5nHCauiO2Rfkk2OePC9fvcKNR0Ly6AJ0oWmHge06MwGa1QZrFMfxhBY1WhcoLbi4vGSzqVCqIkWBHyZOh1uUZfHjGyqjEaXh1HeMbqacV6w3Fe52YibLEerKoqRGGUUK+Qq2263Z1hUfx47BOYIfGCfJMHdgLEkZxnHmeIzEqBYKaSQOAk1BUWQCqQs5PsJPFqESFJ7tek3yCqslLiY8sN2u6Noboo+kANM4kkLE9Sfc0CO9Z7Na088jjTQYW2Cqgs55Wj9RRU/0OWx48o7H9oRRME5wPKrf2S/9jZsrIcQK+G+B/ySldBRC/OfAP1juQf8A+M+A//Bf4/H+HvD3AM7OVlSlIlm1aM0TSUxoXWXdeoxEn3jx/JIQB4Z+wpqC02lFUVgml4vJqCaqpkLGSPACqQTt8Z6Ls+es1yWmyNpnYxqu7685DSPD6KldNrIaK7F6TVNVC8VI4/2MkIlVtceHCecVWmUjNiLgAzxJbNr+gdJUaCXxzuGmyDwFVhtJSp6+7/DHQNVYDqcjVWUxWpJEYHYBY2amOeCdpCobpl4ylIHg8k1OK4t3E9M8khLstpcEr6iaPGWbpoFpiHzx2Zdo7en6A2PhGbqRaUi4CdwIQx9R6RkhPhLJuvXL80vefjWhpcWWZDlbYVAqZqSv0VTlim+/vma9qknJc2rviIwU1YpxGnHJ0TQV1crQDo9IIVEmB1yOD49U1S43QmJC6hmRdsQ4YK1gvSm5e5ioqxWF0Rg9o3RCq4YUIkpHtJI09SV6tWEce0b3SAotSUu0LAlTJDiHMVBXJTJFvO8Z54SWZgk0LRYPRcF6W3N6bD8VgNoK3BwJbkRJhTEl82RgjlRVRZSJwY9ZoqQC49wzzh1nlytubm6oqxIlCqSSuTHeVfggiSngJs84TNSlZZ5d3okXitllUpzIgwGiSiSpkFoSSbgQKMqacTxiiwJbmaynjnknN6aI8wLnMm1NmxUhJGbXc+xvUTJgpUEIw+x6xvGENSWFzX+kijwMD3zz8Zr37074IHn1ZkWxM9lIniS7/RWyMQzzI8HPaJPQo2OejmDEsls/kpxCKLVMMTxpnpndA6YMeBxd32KKLf10yAVo0LTDiXQjiDM0dYXWFcoq/BA5HQYmPxJSlp1NyWFUngqMIfL1hxvWVUEpNFIF9vsdj6OkKUrWpWFdJK4fHgCWoh6sMmzLM2wdCN4wjyUhJdw0I1ImLUoZOd4dKAuBLVYoIXGjwe4DVpmFKidI3vDqs894vL+jP7WUlcwQjmImuJExRGRpGeaczWaNZrteQZxJJJrCkITkNI8cHzS3P28YJ4sTcimSBSLl3foIRBc43l9ztk2cnxnOdytOp5n9ts747JRDQst6C9GB8KSoAIHMhp2FV/AEWWBZG3jCsOfJlUKbpRn662783DhZhdE5pHUcJqBcmqtlpVOkqkwmrIVAiB6rVH4NSwEvRaQbTswOQszX0KeJQ4YXCApr0FLyiWyX0idSYYrZr6RNnpqlp9e0TEqiz42aUnK5hzjSU8UvBCJlaZnSOYNKSIELGQDCAqzIhbzAygyY8D4yzQGhUp7gqWVNAtSFoiws0+SZfQApspxsWRgBi3Qv0w6dT8sDiAVcEpAkitIQQmCe4tJcLUW5YKHoCYxSVKWlb3NmVohLsHIMeTKRx2UomfAuQzpCTESZm5gMNMmjxKrSGGOW8xY+NbAiZSKWSGCkoK41j21uGNJyDuHJA5YBC+t1TfIOEdRTL579a2lpmkJEmpwn1fcz46iQqlkK6VxIRXJuW2E1IfiFcpgzwqTKFLSn96KQIt+TQ8DH7PXTy7vsqauLIbBa1Vir8WGi7ccluJnFd5bpi+BpaosxcDpMzA6MXoKEkUAGsyhtKYsNVdkwz7/K1+wYc0MuxDKF8igpQebGSnq/kCUXAIiAr7/5mmHo83udXB98vv8D1oUFfyINHatSo2Se/CYR+Orrb3hzfpn9tXNCF4LCFgQH600JCm7uW7a7FwjdUWhBZSxlKTgKgSiyuqIwEqcEWkm604n+1FIIzcX2gihbEoJxnDEqxxacTkf6YVxoiSNiGqnX2V/78HiLCwKZfCabKsvrN8/55V/+OdausNYyNznbbOojMkh00oClkCrfOxTISjCMjofjY/Z8l4rCJnKuzowxCmkqhnlm7B5IyeJ9nvpWxZ7j6T3BWaTS2EpQlpbu3hOZsYVif3lFd//A8RSJbvm9jIFxmFGyoLAZ8iOFJqbAerVFqnNcyJAuH/O1R0TP2PUQFcqWWCmQ0vP4+Bu0FPg4IMt8PfLTjCDhwszo5gxkSJ55zLRQ+VKOggAAIABJREFUawu8ExRKfNoAEliUSsvvjsPPHiMEJ9diTInVFlMWSFlkCEVwIAJSObrOsd03yCQY3cj2bM/dTUu5khkuIx3HQ6Bp1pRlRVk1hFkyPgQIJWK5drx6/ooSiaAkRYkTiZsP79nYHdvNJm8OTi1KF0xuYBoGpi5xeb7hNHY4ITGFZVUbhpCwOqEjiGQ4W+1oCs2DNkxuJvmZvk2c+keUrRGqyKql2VI1mwy0mQe8Txibr+VFIdhsS6piyxQmQprQUrM/O0PLCiUSSQasljSV5Zvbr6nMGmJBd2gxleB4uCNMHqM0RktkL3IWrrZIVdCOCxxvCQM3xnLqRro+IjhyEgNh/C4a5P/r42/UXAkhDLmx+q9SSv/dclH9+Ftf/y+A/3H571vgzW/9+Ovlc3/tI6X0j4F/DPDFly/S6zfPmIaZm5sjtw8fefH8ipQUwzBxOBxoinP+4Edfcjz13D7cM82PONGxbgS1NjSz4t23sL26YEgD/TQwe8/ZZo1RWaOecJxORy7Pz9lszhnGiaEf+Hj/a06He/a7S55dvGRdnTP3I5cXe6bZM00zLkzMs0fKDqUkIFGy4vHQsl7npu3m5pqmClxd/hg3S/p+wLmB+/sRI9esq5q2G/jFX9xiSzg9jlgL+8uC8+1PaLs77h8fmOfIi6sXtO0dMYw09YaiWPPx/o45HikKRVGUlMWaH//R3+Ht+1/w4f033N/3CCxloVGqZJ4DKZ24u56pbUnVWFZryeHQ8+rlZzi/wxQJWxgOx8CL569BHSkrzWZ9RkCAmLm+/oiUhj/6wz/m7uEOaXMAXFEn+mHi4vwFfa+Z3cQ8nzi1txhZY6xEa0VRFOw2NdXak8RMCFCVG6bxSFXGvKPo5oz+VudoBdZGzi+v6E4JsQqE1JPShJIl43Dk/bs7Ep6Lix0frzW71ZZqb4gxcBonznYWAmhTIVTNw8eO0+lbrE1UpQEhOTxojseOutliC8P94Y6gRqIPaFHgUsHkj7zYnVEoy+gcp+5ArTU37T2jb1GGHIAoa6zeoVWZ0caToO8CMU25KJsmCrY0Z3uUGBhix+Qnbm4GXr3aZxN7HyntGe/bn+PCI03VcHX1Pa5v3tHYPZvmGYVpOE1fo9EEt0aIPK4fhxGjDUaVKKmpy0jbfsCqislLnAyEcKKqLEXRMPuZw/FblPbI4ZJn+3NenO9YN2dIm1hvrzi//Izd/g3r1WuKcsU33/6MD+9+yv3Nz/hie848C/qTJ7iBspwpjaGUCUfAxwkfIvP0Ea02SC2QKpCEBtlxPL4leEWz2vF4uKHSBePoCNEhlYEw4dsVylTsz17RHT7i6zwoM0pgxZG/ePuel5c/4GpzzqbcM/UHCtHy/TevsUpwd/+RVy/eoJRms92wXq+5ef+YZcSHt6SgQWwodwXCLYTFMNDfv6e9cTzbfQ7bEqNLnl3+gCn8nGm2KLFB6YbJR64f3jKliWgVk9Ws9AoZR5L3SBnYnht++ZdfoVJNtWooVxXzpNGToFlNoDXT8IK/+umPcCFn20kESnqiA2kUKMUcPB+//gUvVo4fffkl67M17akD5sUbojIXLWhEkkyzYxg9/ZiIRBRPEIJcmD5NU5YyNRejSIyRFMaghGKessRq2bhfENXZaxG9ZB6zJC0tRawXASEFAk1dG3wIy8aFwMeR3JyJhbRUoVWN8+Oyw/uEV1fEOJGIGGOYx4lhDISoUFLmUOCn5qu0rDcVh8P0XQO2zIBiiiQ/o9SaqqwIIU/BU1oyrhBIFEplats8J2Yfs60rZKKeWORx61WFlA4fArMDZEKhSckTksN7SbOz5FBaiCnTFckM0jzmCtmzYXQNRHwMyy60IqZ8TFJgtSpx80DwhpQUcumChchNnEhgbEDqZSPO5wYsk+ocWuQGOCVPYWqmcZnskRb/V8rnZUH5b1aGtu1ou8g0CdSSo5an6cs0TQmig3HMeVlSyQzCQCKkWqiEjmG8RiqLS5oQc7wEy8QqhYREoLXAxUiIioRFS0NIcfGRZSlvip6qtJlIqUTGoy/ZV0okko+EFCmtpa7L/NojhBioVAELDTCEjGPXVoASeJcYpojSBUIsgcxJEJPO8QQx4ObAMIy4aLO8OQlSEkidN7Sm6cTjw1tCKJjz4uf4FyEwOoO1XO7MMoTJB4IMBBXQSiEEnI4nfvlXvyR4j9U2qx18pC4rLvYla/vI+9/8itUXFxm44SfmoUU5R7MpWIkzdFTYGNm/2dH2d0ghcV5QFoqqsKRgkbJEyYZNXfOBD8Q0MI4T7iRpypL1vqZUgjJJnO75/LPPkapgcC3H7o73737Bs/NLmpVlGDvevr3j1YsveDxM3PcjZVPx7PUF3371Dj9GVDTUWpKmA6VQKK9Zne05e3OJGB3D0XE83HBz3TKKAr+dcQomkZXHxeqcl5+1hDmgk6SWlrOy5NQd6SZP0Jr15Rnh/iMkQWkVspLUpUWwISE4tR3v397xxQ++xMg7wCJEgxQWazWX56tMVg4dJhVE1yFsiQ+Svj3h569Zb7P3NASfLQDe5rgDF9Bpoh0dZxdr1KKuMIUlSYutI93dHf3xESWvEOHEcNIIA8UGmmrHsT8RxkhRrHh+9RlCXNEeDyip0aJk3xRUZwpbSk5Dy93hFlFotrs9KUm01FS2pD31xDRSFgZVVwgJ0o6s9g1FbRE20od77rtHdhf5evHh4ztK+4bRV2y3Jeuq4NvfvKO2FeNpJrgJaRRvPv+c0B34xV/+msfTyP7Va/bnTSYgNpY5jPxfP/0L6s0ZZ9vXCDnzcPcbvvn6gqqwkDQuQTe1nAbBusnRIWFUDL1h6gQIRV01SKNpbx/oHu+Ywz1SFtRljS53PLuydP3Ex/cH2uNIVfcMNyNRemylmEZomhI3uQzH8pbnry6Z3T1JgFWR9nCLG2cKGRj6nmPb83s//gG37z/gfCQaS3dIuFPPKBWiH5BCIayisZZCKASCIBOnxxPjkHDhCDhUqn9n3/Q3oQUK4L8EfpZS+ke/9fkXix8L4N8F/nz5938P/NdCiH9EBlr8EPg/f9cxYgg4L4mUaOOy3KUsuLu74/GhZewj253mq6+uabuBKDxf/N5LiqLg8fZA+/hI9AMvzne0dyPNzmILyeOx5Wx7hU6GcZgZji0yOe7u3iGtzbvr0vHyxedM6wtESKTJ0z48cL7b8Yvf/IwkPKbQCGGJOHz0sBCEhqHjcOgIaYPWmqbe8cXrLxlamUNiteXVFz/kf/2n/xNFbdhsd+y2e/7l//0zXn6R8LMkRcPUGl78Hc3/8j/8CeP4yOXVhpV+zl/++Qe+/4NnVOWK0pacX2pev3mJMSuGPvLh/Q231x95cfUGq0uaesNqbTjfXfDV17/BTfDq+Y+5vHjgfH9Oln3sefP5ZyR54MWLP6TvHPd3B4b+yPpsxPmJcTxy+PDIixff5/7+EWu3rFdnHI8OrS3n+zXOO9q2R+qC6Gva7iPjfETInIOzXlXEoHGzYg4V5Urj3HsQOUj44fBrSAGtX5OC5vG+ZbVqcD6j9r2LGF1x9ewFh+M1khllBDLNXD07Y7Mu8W5Cqsjv//7vIULH4eGBth0JWAZlEDFhCo8uBzC3lJvA4f7AOCsuLyI3twcmf+RxtGhVUxQVUQR22w0iSbwb0cpxOF7jQwaBlGVFexfY7p6z3U34dKLvj1yev2ZVr4hpJomOxmi6YzbLawnb2jL3UHDG+uyScNbz0L6lKCp8cBRaUtcFMRRg1jTbGltZTjIizjSn4UQ/fsCMBcaDttC2J4QUlE3F1bNL6kYyDgOw4mL/hundwPl+g0iG6DWri98nyhZbQTuMHAeHShcUe8XGnFEWFavqOYfHHs85+8sf88WXP2acR6R2/MH+3+H1mx/z61/+M/7kf/8n1HaiXsFqpfG84Hx7xfH+xHh6IEXHi+df4OOW/tTjXcQKleV+QVGUPWGh1FVKUpYWbUuELMGBVpLj0JLmkZqK7esdxzkxTiMxRc7qM/7NHz3jdJxpdMHl/oxq40lzxfXNW3rniUpQBE1KifbuwOH9DXHwsFJEBLEAUyauyi0P7pbBnzBScL7Zc9qDv78nMqNEidUCJZ9xGgeqesvu7DnJBO4fv6GoE8o0uCEQeaCoaqr1FUJsaE8G738JITH1niQGyhjohWHmGe3dBT/7FzumWOdibtm5j0HRlA3d/Mjp9iOhPfC3PrdcvXyGsXvCXFNVNdM2ZwaJaElBkETP3PZMzjEMecdUCM3MRDbT5KlYCkvAZq7wEUhiSFgjWTUNu805X6dhQUz7XGBKRSJQlGKRoQmcA6FzBpaIAiktSgpC6HE+T3tCCBij0VoQpgRJ8vzqkrJ2TK7FuYQ2JSSXJ1sEUnJE7xnGitlnuYziO2ljTAEfRmYH/bBMdWR+fQmF9yPPLs6pKss4nmjbDqmKJyBdHmzERFMrYswToIgipvCpmSQl5tljdUY+u6iReloGEAEpRaa/iUBTrzm1J9ouMAeBVi0p1QidoRd5chW5uXmgHzSgCTHkr4klWwrBZl3SNIL745zl8soSo1ueT54sZTLdimHwjJPDB4HwJVqaRYaaqMqSdb1hHjLOOKaEXuIrnsxvUiQEjlM/40OFlDqH6IawACuyL6muLPf3tzjf5CZ+mUhl+mRuzI2SFEXi2CbmOdOiwxKb8TR1S9EjZJZtRSIuCgIOyJOf7AMjKwxEZJo84wwBidSS4CDztXLEhFGC890OP0PwfW5UQ4AkESIj76VwNJXFuUjbRwIFhTXLGizTN/JGSAgTQSlQJZ6wNE0JCPg54H1idQ5B3PJwDAxjgUQxDf2Sabdbcni+w/QbY3KkR0xoKUkh8md//mcM44BWOk+4RPYkPn9+yeUOmlWL30VWSqOE4HiYuLubuXj5GtzIcJpQZkVar/n49Z/wOJ5YNWcoYWjvHxgOkvNXO5IU3N5f8+6vBmbxwNnZ8zxBTkce20fEcce63tAYzc31Hb9++46yNGw3Fc8vLjmv99zfjFhdoIoC4Vse7x7pwz3n+zW77R4rK0ya2e8l3XHmcO8JXc3nn73OQcw4ursDj/c3dMORspywq0A7GNr5Fd2YEIWmtpL7r/+ETb1hTjPD6LmdQT5/xer5FWpsOTw88lf/4qf85A//DawRgM/0ydiy2zzncDwikqdpdkSfp4aobKm4/ngE33O2axZJ8sRutyKIAl1aplPL9fUNF2eXHO4hpgEhI4UtQWTZeVmX2PUW33qSr3k8PiCU5PPnGw7XXyNDxZl+zvrsDaHsOdz0fO/3fg+fHDePHzmOFqclq6uSqlpBqVFRIzS0/SNuCmg2XD4/59jf0gdHUDXbC41Hs27WBB+4/nhNoRp+8KMfUpY1KcLd3S0uVhA77j6+5+b+iFaXfO/1C+IwMMwH1Dzi+wN9P3AbevpVQ0yS4FVuFkXCO8/bb+9or78lhZn9rqbWFfWrZ9T0XH97zfXHAz/6g3+b0sxYpxkLTWHeoCzMyiBioNSCs/UFVh0w9RlBTUQzwkqyLVfMd9lGI9KACIoXz1+gjcEHweEwUdYFp8eJol7x+fcbDqePdKdIvW2w1mJMw7uPLafTzNnZmvOLPck7zs8r7h/Mp02tx7vEm1dfMo0zhV2xO3vGu2+/wU2S/fmOuqo53U6k1PPx4yPb7Z5nl1sejn/B/aNF5xRSojCUZ1BVAjkZnINE+zt7p7/J5OrvAv8+8GdCiD9dPvefAv+eEOJvL7eq3wD/EUBK6adCiP8G+FfLlfDv/y5SIGQj9Meb9xhdgtKst2dMU87/Wa9rVnXNZmdISGzQOR/irqM/XePHGRkEVq6xuuboj/R3R0whqOo1D493lLag7U90Q8uTGN4lmFzejWR+hUwTyXtE4aEpOR4+cribcNGhbELaiXGYSbFByZKqqSisoy539KdI2wbcXPFnf/o169WOZt0gleanf/YrSrslOI8bNDJa+hO094mL8+dcXFxx+azhN7/6C6oicXn2itevX/DsquD55Qv8FDncdyhxZLVuSAmmaWCcJpI4cvcwst5+H4mi0BuaRnBz+57T6ZFVfc6zq+f85ptbknxktdqiZJ2T1R89fTfQdQPT3HF2UbJeG04nRdvNtP2JflIM85HSbnFR0A05I0eKgsIUpLJEiZLD8Zp5dEgqSl2jC4scFVcXNdt1gUyer3/doTd7unHi1PbUektZZj1vDAPrJvHy+WsOjwNVvabc5cBOqQJCD6ToIVqcD/Rdyzi0kDxFIfnw9ltWlc7hjaVls/kMgaNtPzAMPUwV4ywp7Jb9fktwjvv7jsOxZXteLmbyHo/DmpJxzLpzIQPrbY2fA2FM9EOgPx7YrnesdzXOQ9v3TEPk5dWeYchI/cjA82cvIPRoUWG0REuH3pbZ41HmXwz/4NnvN9gSjEwUUhK842y1Ad2hZECbgTQLxkmiC0VVaoTxuBB5+fIH+OB47G4xpeDh3lIoj1D39Kd7Ntucbda1HWPnqKoaqQPWnKGTpJQAjnWERq0Js+DDza+4evYlSjn+1U//KX/6L/9nbF3y5ovvU+gd3k/M4sj580um7pYQA2FWzEA3jfgUWa1XJBIPt49cPttSnze4eWYc8vlxqcT6EqEU2+0V/rFiDgoXslxiZSs+3jwiK0NVKzaVgH7ief2c2XUEPyFDSRKCs3WF0YZudHhbZWO5rUi+Zepa1uuLpVgOCOlQxqGjwE2KfhqYphPHoOhHj8eSEPROYCpQlcXICklBZT1aN3ifw4Dff/BsdhuU3CKJKOXQZkYLjZs03t9Buodiz0X5guA6inVDsVvR3j4w3P0hP3+3oh0KRq/Rn6p+iZQKpRV3D28R05HnK8Pzz19wtjeMU2R4uEZIibaJaTrhfSClHCAqVC5UjGDxJzjKqlhkXE+kvrzbnot+sRS/eXqilQbc0pD0C51NfAIwAGy3G5S29P2UZYcyy8dSFESfIE4o2eBdpus9+ZVCyMfPmOAT87zINWVuVoiSgFsKcIXRkm50n3xEC/Li03wq8wgS3eJBebKQpYw9RIqI0dkn5zyL/0l8ynnKxXNEKwNJkZZp0sKF4wmQUZcK53IjE56kg8RPPrMUA5O/JytWNVLq5XnG76Z63tE0a+KcZZAxJKSRhOCQcvGDkZAqIeQTAp1MmPvkchJIISiK70JtIxJExrKjc2OuJEibEemzSzifPnmepA6IpDLEhEDdKLouHy8tMjlSWkKh8217vSqYhk+JT4tPLK9hjBmiIVNE64IQxCdfVkb0Z3hKTCErE0zeCY4xH/NTtys+4TNybo4xzHMihLjkXmWfoxDZM6ekpKkt0zQyDPOST5ZlpDE+JWalZfIXmGfPOHl8AGMipOzLyrLAwGbbYCyM88DdfQ+sstxPpMWnZgihxxpLVRh8TDiXoUdKSZq65my3y/fHxR+pVJZfOjfT1DUpJbqu5de/+QqldT5+yl4xKSTawOPdLUHec9lcoFP2dJRKszaGU3AkNthSohEo13LevGTy1+ANAsVa71jVChlGopcoFC+fNYRqjZIaGTxJlyS7h+SQaaapK3af/4Q/f/eOZBr8WOOFoS4k3wy/IMYGowoa07B9dsWvPnYUVKgpcvPwc6bpxOnYIIRgu9fUjWW/v2CaAzFItCq4/OEzPnx8y/2HO/wc+eLVKx5PJ6QwHA533B96fvj5Z3g0SQjquuHsrKLvHlGLhNWKklVVgvA8HLK8va4qxumeb7++4/LyBef7S9a+xxYFD4c1EY1QCVVY1tuLHGsQDIXaEUWgHwImuZzPVZWsNpqpTyhToXRimmasPGO9tnlKpBRD6tBaUNrE5DKACVUwucjpcM0wjNhtwZdv/ja2FgynkWEMKNtR1Jay3CCl4XD6SKM9p+MBUxRsLncgS/o4oeoStdhhjF7TGJM3MkRkvd/gR8XtQ4eSHaTANLV0x8joHzBFw8tXf4sQMjDicOjo+8A0F6wqixIN3eDo3QOrsiKKxJBmFJAUdMPA7sUlSkLwibnvkLxgtX3ONFaM/h1VUcHQUJUJKXqGMKBlw/rFluF0IM0g9Y6UeqQyFLVEFpakzrm5/ZrH7oCbRlSIGF0jC4FLMAuBbErsZktKGaRmLLi55vd/8G/h5iP39w98eH/PdrPi3fU1j48B7xrWK8Ovf/UNm02JNRoRE5vVnt12TadmnAscDjdomWjWFiUCfuyY3IwpBGfbElsIpjgiZEG93jP2HX3fM4xHzuyOpBKVXbOpCyLd72yc/ia0wP9tuSv/vz/+ye/4mX8I/MP/v8f+9P1ETt0DRpcoaTl1gpQKjCmQIuEngxIVujJImYu0x7uOY3tEJYXV2RA49DOewDRNzFEitefm/pamLpimgXmaUFpjlKXQBW4KjN2AqxwpOKZ+ZNQOrSwIhxLQj4G2HdD1hKTCoRlSIvqZsipQFHTHB25vTnTtwLNnZzjvcd4ho6MfHilKzTjAPHq8a1lvDKvScHV5wdWzS5qV5u1bx/e/fMZmtef87JKy0Lx8+QLvOxCRrjtxdran67JfY546nH8AXdD3NzgPUgfKsuZ0CDkstvQoeyKJnnY4YcpEYdf46BjnE21bkICyUiR8DgPVgqox+FDgwog2INSMj0e6YUKpwDzP2aiosg/r4XCLKRVaGiSWSmmi6zF6ZrXybDcOLSQ3p4KhB7zP4ZgWdK4NMCbLGQblqYqGsjDMoc2ToORzGKOPlLZg6I8IfC6clGHoHFQ1UonshUsJrTVltWIcHfNC5lOyQGqFwzFNnnq1wliVDeMJhCgATSKHbaYIQpQIHXB0eDzKSKpdiQ85nFZLi1UlWkkKq4hR47xGCoOWGiUVQihSEhgtUdoTU5YLNtWKslovY+aE1KDLhE3gAgQXiW7GjZ4UIloajC4YRoeLkSjJUqUwg4zMzmNVLibGcWLVlDiX/RFCBVzoCG7OBV9UaGk52+9QwxE3Z+OtCx0hjszjyKk70nZHooTIzG6zR0jBMAxIXYCxzG4iekHSknGcUSaDMgSKvutYVWtinFBS5cmBCIioc4EnNYqSqA1h9ggUVitKZXIRaSVWRUR01MUKFRUou0x2IilFvHQEIwkJusMJgkEKiVFZGmYWI07V1KzPNhxPHWUClxxumpjSwEl3TIOnWm2Q2jCGgBkcZVnm3J7k0KWm645s9mccTwP3D0fiydE05yiV4Qjj0HH15jOUrHHDI94NiFJR+zXjYQPDmjnV3F8XnO7POQ2GOWTACykhVA4ujX7G9yMqdpw1cLUvuDivQUMcB4geEXMRbgQk70lCgspgCWMK5nHx9riELRZSxeL3SOnJJ7KMaLJhZEGXK+ySXTLPcSmil+YmZQ9GVZdIJfFhyvakfBFfrD55+lUWBYdTnggl9NJgLZOOlIjJMU+5QfiOuieW5icX2FZLTt3SEP3Wc3h62k9QDjcHlDSfitXvJI8h+3ZmRYwSoT+hGPLfMYfohqgz9CF+56N6gh2QIkIEJudysRgzmjz7qVjCmAVIl/O4QlY1kNQncl1MGRBU1wVHP+WJbXpas/w9GVoRMVYhVSIxf1pvISKZopGfX11lCq4PMecv5ZsuMeVsJuLSlATPNHm8z+vCcqyn14XIzfLoQpbQLZ65p1Oen3dCyIBUOudexewlW7aGiSEgCDksW2p88Nn/xXfHS0l98u7ZQhNCztB6CljO06+n85HyOUgJ5xJ+QbWTlkZJSJKQBATGCJzLWWjLkGKRuOa1TGTPXVFoQgrE8B0M469VNClhpMrfF3z28rBY75Z3H2SFjdEapRV+ztEEIQaaumS9XlFVFc65ZQr3dP/Inj5rLc45Hh4euL+7z4CLPIRc3suSwgr8NOD6mVpuclh4Etk7yUSIgXFKVMqgZUKnmSRLLBUyKQqpqWpJ3ZREI3NhKg1FTPRK41PAqEhRFzhXUGiD0dnrOI+BprAYlYPtW9fiCo/RilVTYXVFCprNZkP10KAFyDgiGNFKIEUORdc64eaRyVfMKeF8IPaRJDZMPXRHT5w9XIyICFVlmZJhmPJajC4ikGgJLk7M84RWGcKklKBZV0zu9P9Q9ya/lmXpdd9vd6e/3WsiXkRkZFfJyqKKokzKgG0BNjzV3P+nRzJgwDZkGzAkkaYokqUqVZOVmREZzetue7rderDvy5I9IGAYNuhIZAwyo3n33PPu2d+31vothnEgRU1VGrSqmOwDITpS0ng/Z5upqXJ2WUGMlrK8IMgZl2ZcChQGnHcgwCjF1dUlZSkgJooyD2UujNTlgs2mgeDojyeEJC9RU4+dew7v92yuXuWMmhZoI6gKg9aaae4Zx4EYZkpVnXNVT/mmAZ9czpEmsn03WfoJikojlEIViiTLbO8PMz44QowkpTj2e+w8k0KkrUtm60gy0/XqpgMcQlqUElRVQ1HX9P0OqQq8C0SRzwtRnK3JMeXPa61RRXF+UCRMCUZGivIZz14u6dYbpunA4GeUCZQmspYeNyaaosDJAkcgEDFaURUlBJ8t/za7JnwY8MEhQkHZVYz+gA3gkwClGF2grUpCTExjQLHg01c/5btvf8vYb+n7iXKRqIoSow1aSZSU3D8+UJjL3OEZA4KEdyLXC8WJfjigZK7ocPPM7AIiSpQyVOUZVhQjy8UF0ZdYbxFOIkNeFJhzRlcbgy6Xf+9c83+LFvj/1g8pE87nbqoQEuN04OriS7q2RgjP0E9MQ8uzC4lInqEf2D4eOQ4Tl5uOotTICB8f9tAKglA4G/G7PY+7LZOvUAiU0BjdolXFZnmJjAdOu9zqfRwCu91EignnBIuFoGtKTocTw36kUQZNxzRLTn4kJsfl5TWEyOP9kdv3dzzc7flP/uynDNOM8z1JWJabBFFCKjIhaRr46us1l8sVL15taLqSeQ48u37B5c8qtKxJ3mCt59NPX2DdicNhy+F4oq4a9vtHlIr4cGKcttQXm5xfiQmpNELe0HY1IbWZLGxIAAAgAElEQVQoPdPb3xPVlt1xABmoqxPee3zcchpgtVrTdhV3dx8h1hRVYLmqaNuWoc/2vhQ9KfVMdpelWzeDTmilKYzBxcB6XaOVYewThWpIYcCmPWNwvNjAn7y85n/+l54UJEY37PYTWicuLxcUxpC8xPuZbllkil2UOBtIaSQFmMaAHT3t8wX9cGS9ylkKUkFXKup6wThNDOPEw/aW66trFstPUOqAd3cUOV9MStnOU7cL1lc1w3CElNU4SUtvj5RlQ3Ieb2f6k0bXCuu3KJO4ur6k3XTcv9tSmILCNLSVJ4Qpb0wKSX8qcJMkBZkzGSGc0eCWRWkzxS06LjdXVPWC05CYw0xSArOIjGHGDwE7RuwYSUIhjUAXGqShH2eQgrvtW6x3jPNM1VWoYkKVLUJUpBnmaUYpTWFqqrpgmnsO2xP9OGBURdesef3qS95++0t2p4GQAsXScBi3WDdTFQ0Xmyvu7m/Zvb+jFhCR7PYDrk+ousDZp+2axE2OZrNE6YoUDYuVoesW7PYWRN5oHvsDkkQhK1LUnB4HbBiYRktbNbRlhZaBxbpAG0kKnnG2XG8+Y3ca8yxgJPgRETzH0wljHHW9YPf+B5rlM3ShqKTCFCUy5tzIYrNmM9ww+ltaDId5IE4CPysG7xF2YFE/I5mSh/2RsHPUN2vm0OODZ1F13B9PvPj8cyhO7IaB07THVCWF1Mw2L3uur76gMCvG045xPhGUItpHrH3J6aFlP0re3m9wSSJlVhmEiKSkziAGyzycmHb3fPZqyaubJU1dkETMtQgyUpQqR+wFlKZiDoqoDVFp3DxRGs3DzjJOkRgSPvisQInzIATnEtanw2qGDpCgLA1d19Eu1lntQeYMy3k4khLKMhP4vD//OWdFLIZMo2vqhrbpgLwtjFHnBQPp/BAHEmdlIj/YVRK5tw1DihaZIqWWPAbOhcHnpFiMJCEyYl3K3CfkI7o456meBpazcmXnmXmGmCRnAe5s68v/xJCHPOskIYAs8oGYLG7kAT5MTM5jXR4USHmYfbKxCQSmMBwODmfjjwqQ/I8GByETVVXwsMuH9kTOUEX4Q+9SSj9SFmPK9ldFytclGdJZ1Wublug11ubKBZQ4X52nYc8TQsT7jmF0uavsjGF8ovHlPztgnWOcHT5myl1+Pel8LTP50fkRoQwxZliNkokksroXo8fIQF1KUhI4H/PhT8QfrZUxl2rlQafK2Vx/5rQLASLJ7B44Z6+0SnhnmSaPcxmBn7H1EiESMeQslNaBEBzenVVRkX7c7j+pikZrVsuWU38eVPLf+qPSKc6DvUhQFkUe0vQMIZ3V4Kf7JGPxTVFkAJV3RDzRJ9arJavVkqJQxOizUukiUUqqsqCuSqqy4Ha/48OHDwzDQNu1eelFHlS11lSNotACjSSlmlOaSMAcJ/pwIOmWYRhRpcCUBUqXTOMRnTSFLGnKgmWpMaaEuqSsK7SUPL67ZTsNSBlZ1Ia2rTFK0JmGQueOwLffveHFl58SkQzjxH7Y4h8Dz69f8vzZBikVQ28xStCaGhFHYnR07QIXNF3XZAphdDzeP+CVZU4wTYnxAOV7gR0fmMYDxJH72z311Zds6oZ2cZlBPn3PbIts0wsj03FPpTbYOJGURBaS2tScxh0h5H457wRluaFbPhLSzKm3HE87SCbDF0qDF4lpOpF4BsIxuZljf+CqXuHDhBKJpm1ZrTYc9neYUiGkzMqtTBRVSd3W9PuJ3XZHu17Qjz2JmZQc/T5ycVlSVZ5Kr5FRUi81x+MtPnjmqUcyUxU1h6OgUgpTlTRlxewDsqqwPjDv85K1Llt89Hg8WidsgBQNIkWCnzgdJ4oSfOw5no64OdE1n6GKyHKxQpsS53rq0uDsAaMU5WpF2Rn++t99C2UiH/0FY98j6gKjDc5ajqeBzcUz7JxrkQSK5SqflUMsuXzxgtftaz5880tu3VvC+Vne1JqtuycNjjhKvI3Ydk9pJLUpGDwM00wpj3RVzdEKopcYGuq24f7xDusBoZCFY7fb0b2u8DFgR8uiumC1WHA6ThxOJ4oqME5bNosvaJqKohKQHG62jKeRWObP6Wke2G0L1psOU2jCKSHmTMsdhgk/eTqzQCVDkAEhMqH8anPJx7sHTKHpVEPVSmQo6Np83rV+pFws/t655h/EcFVWNVW5ZLvdM44j1XXH4fiBFJeAYPYDUiQ+/jDTHy3Hg6UfPQ5NUhVRaGIUNPWa7XSHMAUxeva7I68//ylg0BQ0puNidc3//jd/zeH+EUlgVW8IY8V2+5YgI9oUTDGxFBsePg70u5lKKn72xZ/zL/+nv2b7MGIKzeayZh6/5+OHt7x++Tn/9M//mK5dcvuw5eJZQxIzLli0XnC1+MnZqjCji8jN9VcMwyO3jx/Y73dcXt7w06//U477W+bRMo8JO2tuXl5we9sTYkW7aFHFiDIHvBdY50hRMfY9vbNE4UjS8rD9nkXzGiVLYpTM88Awf8fz658wj5LT8YBUR0KM2LBlfxqQfc6kXW1ec3+/RYjIerPk5vkaIQTDcML5icIUxGLFJ6++QErJqT/ww7tv+fTqM7y/Q6SZ1aJCFye6lWEeG4YY+XiQ/PI3I7f3EWVabq4W3HyakfCX1xmvaWfH4dCzWK7zcjVKGnPFEL9Fi0tWrSI1kbYWjDvPpvsUaz3ffvuWP/35n3KYTzweHuiHLW1ZczpdIItIlA6tHca0DPOJpu6IQubQsoW6XTEMuRSuqQJ27Fk3BW5OnLYTu9+/5/XPruhMw6Zb8+rmC97cDhg5sWwXCBJvfv8dSlzkzAtQlh2fvPqMv3v8BRfPFuii4LidWa+XFAUwKuJkcX5k/2Hi+fMXLKSiH4/88P3vuL6+JsWc+Qpy4uc//6+I4o67hx+4fXiL4xFixX47MU8RIRt+9o/+CXP/AzLUBG+QpQM3Yow524sU69XnCL6lW1QoVRK94ds339MPgsk5SA491VB2FJWkbDqqoqEsBT///L/g8fGWh+0H/HFPv5e8/uQGfaHw08hp+IA7THTNS4bRsd8/oArPw0kz+5mpt7jxxMVFhfUH6m5JoQ1+nPnuKLi+uqRKAhktIyOLTYnbuwxIWBv+6re/448+fY2dRmBifVliTppClQTr+XB3y5gC/viO66vPiWbJYYjE2RETDP3M9m7P9t09m69esJaf0KxeYb3n0M98/cdf4/Hc73ZsHx748vKG/iESDYTkePf+lmdXP+Hd2xPawOtXX9I2axDvebw7sF4s+fLVP+abf3nJ6fgRpRJlfUm9ec3h8Mj2IXC/tzwOI7qs8gfvWfUQsgAluH+/Jdktiyby53/+KVUNzjpOk6VAZAJS0zHse5L3VJXG+EjR1EykjKR2I0IVHMaBfrYEcqZERPnkdDurGP9Xy1n+uSgKQNL3jmM/5usvFZwLZQujMFoxj4H+GJBSIVREJomPkUhkta4pioro1Zl4l7DWYkxWJLWCsgSfOQ/nYSRkgl/05945mRVOrYAMw8jENRjnidWioq4ylv/J8iZSJsgJlVHsi0WF1p7TkHteDJKYFIKMNM4AN5uxyuH8GsXT1XiyFmZl0XuPDxlSkf/NgyQygrAsFs+xdmKcHXFOSJWBCkJ4ICuCKc4chxPO50qFbB2UWaE4vx/WZft59vTnw/15bDpbEBPeOXbbbGUSQiOEwXtHEI4Qszq02iiqqsK5IS/edIY+kHIuKSWH1h4pFMqUiDlTElMSpBAIZyunUGAKwf3dER90VtBEnjwDebmtlKKqFNvtIz51RDQxgiAihTgDQlKGhzBxGk8E30KQ+OAzfffcCYbMdkUps5IrpcqY9uQh5V42KXOJdbcoOPUHhtHj41OPFxlcEsk5WZm4uLhknh3BH/O9L31eeAp1VvsSz56vCNFzOg0Mw4zUFZFMcIxR5q11qTLN0Sf6PuJmQV0aXt7csFwtCTHQNTnkPk3n4VgrdCZ78PHDB7755vdUTUOM4cfeshhDRkDrDE9ZXLQMJLZoohKcvGTvGz6//mOc+57v3/6Gql7z+euvGcZ72tWaqioptCQmjxUlpoT708Bx7yD1rLpLnD+dFyE5h6hHyVyWWB8IRUKpCuUGCuUJtYZyw6sXX7DdfmC2E1pXJLHltN2zn2aImqg2qE2LWhkmaxlOE8t1y9WLkm+/v8N7QX25Zrj/W/7xz7/ENDf085F33/8Os2zYrAyVviDMnl+8+RahLWGOgKNUgs3Fcx6O3xOGiNEdF8tnHG8dTaPwcWB3fIMeWrTsELFlsVjy2esv+ebbf8/vf/2RxcWSy5sFzy867HjPh/t79keHD4qL5YyfIqZM+NkxnHrGISC15vG0ZbQjm9Uz7rffE7nI5crPX/Hi+hL8n1J2Hd1yyWq1oj9+x7/5N/8dIRZos2Qct8AVm6uaZpI83EUePu5AXREXM0l6gp3Zvh/Ry4qk8rBWFxdUF8857n5gHg7E2XEsDsjVp2ijKUzB9dUV9+8PvH7xFReLHX2/o1lZzKImesU0BYbxFkgsqgrnJ3zYE+4D7fKCFKHrljT1CtsHpjDTTyPzNDAME7iJi2qJSGC95dv9R0olqR/+Bd3tc5aLGyqpiWUJDDg7YofIixdf8M2bX7M7ZKKyd5J6LpHeYI0kFIHkAk1Rs1lcMRoPo+FwmliurhjHE+M4YE8Ty6LChYHVsuPFsyXLcsm/+t/+R5pFyz/7L/+Mbi35t3/1v1KmNevLS0wNu/23XK6vqcq82IpBQDD4eWa7PaFkXgzITnK8v6cpC4r1gnnUiGnCKInRBYUyHD/uGI73tItLlosLtIb9Y0+7SByGxKGf2b4//r1zzT+I4Wq2M9M0YSRgIrYfud1tKaodpkwINSDDiuQ8+13POMzUjST6lt9+fKBtDet1hbclMhp2jwM+RApT8/t37/j0xSu22577D99xetyxvL5g7nvaLrJYwS//9t9zsbmhKhWFSpTixPu3Fq011y9vqOqau+2WP/mnL/nhzTY/uLuS9eqKT29+htKBlAKHU8+f/OOv2e1nRh+QoicyM4YTZWnwYWB/vOf7H96yudDU1RKVBB9uf8n91rPoKmTKh15U4N2bE1WlWXYt1jn2+w9UlSLoAaM9TbFhuVyjVU0/HJndgcurgsPB8vLlZ1g38+79luuLlwz9iaaqWXQXBJ4xz3fM7h02OLSqaLrPiLJFFQZtAnVXsrlaMPYpdyrZmAEgQvH+/g1Ka4L3HIZbrI08v1lijGaaAvvHCaMcdbEm+Za3fyt48+YNlzcXdN2SRXfBZ599wV/8m79i6gMpgCoKpBK07ZrjtsfPlqurS6ZtR1VonEikpHn+/BXD6SOnQdE0JX/yZ19R1IL++3uUgvV6Q7eouLv9DaO/pG5KTLdg9zhQlyumKVsdrp4944d3twg/YRpBu16xXryi2A0YExjHe6I+8o/+9FOS1Gjdcpgt3/zbv2B2I19/+XOkzPm39WXLxbMLri+fMw4T+/2BZbfg5z//Ix77E6MdqBaOMT3w5u1IXXeUpWZ/euDq6hqveoL3zKFHCsPufiARiH4kuCO/+uX/wmk60Sw0ZWWw8wKjlpT1iaZLVEXFw8c9w8Hiph5jFJtNh5OKh+2BomqoqoKPdw8ECoLMqPb+5CmqmfXFM46HnPtYrhfEoGnbG5TK22ttWn7YfcPhODO4magTojoyPSxYrxaUZUlwC5rNFcPpSFHV3NxcczqN2FNPQSKFGTdtGR8UQtXcb+9AJrplwZXpcOOMjTMxzPSDxUiD0YaiKCl8zWoRefvx9xgjqeuSky0w7QZTWk77j+xPjzT1istnzzjtRwiJzaqiOVvYlIqUFXQLw+0Pb0hCsr76jJvnn/P7j7/h7775JZ/cbIjRMVlH99kzwvs7jm9vcIcLFl2LGz39cciY/sJw6zyl+jnJzyiluCsa7h7eUEhPcpFtP9O/8zweFFGcFReTP3KTzkrVbGdOd/cw7bi6MLx6ccVm3RHSjJ96Kt2RiWWB6snqoAVJQhATj5PloniGEVkF4/KaIc4MfcBOOTOTYXwh53nOUAuXEvLJlwQooUnJoMpAFEfGccLbGS1VFq9S/owrSw3SM9uJfuxxeEoaUNlSQcrY3f1+yzBNuBDRBiCd+4QyHbTtKt69PxB9RCaFEhlyYK1j3ZZ0nUEoyd2HR1BdtuHFQBL5sAseHx3jDFBmKITIOAx8HpyUkoQosfN5oExZfXtqaiIGog+cXGJ2CSE0yf5h6IxEjJRsLhpOA8Tcu3EeBLMll5SoS8Vh/5F+VLigz8OoQ6AISSARGCXYHw+EKElCnb1gEYMghAz7NjriwsR+3+NcSSZHJkROYyGlJOGZ7UjXrRBKndW+cM6gGaKfULViuagZ3Jztw0lChBAsyuRtsNGCpqlo1g3z3Q7vM2joSdsUEXyYUdKjTIUqFqQx4/qJ5wFTZekuOs88O6SWP9oXs8cynQElkkiAaHGzJMaKGPLgiBKEmDfkxDyI1rVmCg43S7wXWd1DkIQhpQy0UCrj/vuhJTKSsOc+tnP3Vsq/R6vIMB7ZH3r6wWbsPirDNQhnnmOWNDMe3p/Vz4ROnDu/IuBJXmGdJYZIcIJpPPGPvv4nbC4uKEqDnWeMMThnsRZEElRlSdt2vH3zhnfv33Poj9RFmYmCZ8KlROZOqLLg7elEJXe8vNgQH7KCJvEQjtzvfoV1juV6SWkqpv7A1cUrioUh2lzVEOPA0R5wDwf8JDKKX/dcF59k1dIb+lPDuze/RsuWsiyoKsliVXP74RbRdggdEWYgzZFf/PKvURqSCITkWS2umAEnA9pI1quaXX/L2289TVtTVjW3jzsevj1hw4CXYF3i1adXvL+/pdAVZWF49uxTvr//FbP7CU3TZSXaOKDh6vkGrRXDODLHnhBqvIu42TEcf8PDcUcx1zRNQ9ctafWKeQzM0y378YEPtzd88dUfUxZHtvtdphNOkmE84l2BTAHCwMf3gkX9AtMAOjCEid7CsklcXd8Qg2HoI9c3kof7Hc8uP+OPvv4zbq4Nh3sY/JbevaO//xVhPCDCMuNCdWK5fknZVIxjoGqv+MnmJWM/5venP3I4WhKS1WXJ0TumKRAdGB/w6gOtMhRmzWAtKsFu90DX1BSFxkWPMo7d9kSzaLhaVjzuHjjsjphUURYlV+s1usygsNkHEDlGMU8tl5dXtG0BJPbbE2Un2O9PHPcj4xFicUJ3A/NwYBpGgocXP/0SP+05PgTsPPPy4orj/XcoY7Cz5bjf8WH7QF284PpFJOGxU+DFi5cIBvr7PduHHtu2NIuIOxkUgmrt+fabW9aXz1nWa6qi5GF7T3d1SWk0cnL4MbFVQFFjpIfJ4XYNz199wS/+3W/p08z15YaL+pJ02WP9I26AOEhEiDyO76gGTVKCQQbkMPJMXWD7kYmBcrVi9hFnPY1WCCf41d/8B549v2HWEaSnWXT89Gdrfv2773HCIZXl8Ye7v3eu+QcxXIWQGOyISBF0RVk/Y3r8JR/ev8dZR6ENf/STDHOo6hqtDTHNiAR9P5LEgFATKdY0bYOUFpk8ulDsP458774lOY1Ec311yeb6isuffklIJ/anj7z+MlKYkqqqUVLgnaNMnnmU2OgR8cQ4j8yTYrmqMCZSFAGtJC9uPkOZXBCpdIkPiaYuscMdo7Mo5Rnmd/jYgMjh3utnDbPf5rwFEiEDQmRgh/UDdnbEKOiql+gyZWtXMhx6y+QGtA6UlcydStozzTsWXcOL5nNUETkePnB//4EQNCmssP4cescDjuQjWk6EuECg0LKk0AWFtqQ4Mw2Wg9Zs1lc8Pu6JaQLh8c6x391RNwKjS1LMkeCua6irS2KIDP0tIYz48YphbIhOEoLl5mXF5uqKulygVcntxx+4vOrwcSQki7MRLXPQuioFNiaO+xOCkrIsCCF/AFmfKGpN4BEvKrS5xJ0tGtoolNKEIOkWK7zNQWaEJiaY5x6lEloYEgqhXA6skr3n1u2ZpxnvPRjF8uoCXSuSU1SmREuFX9e0y2vKQhH804FTMtpHTmOLEIqq03zc/wY7VlgLzilCiCyXSyZ3Sz8/UBhD21yT0opTf8QUmsVyw3FvQQtisjjvcMGhmTFG5/4q685FlleMsyakAzbsmQ8SmYqcw8LT2z3RCUwh0MqT4kgOUgSmAZxzOBe4fr6BAMIkCAGXRoKvKXxksiPWjXnbLSI+gdKKSi5pmxY779keTqQoGHrPom2RRjBNgpgcMXmEsIw2232KOpd49+PE7Oe8BLeCuow4PyACKAouuo7H/RZZVAwh8bg7UHctUidc9KQJhCwwxZQtckpgjGSzuaRrO/pdzowN3rLf3RGCJ8xgYs2nLz7n+2/+FrNokSpvsT97+RXvfMV6vWYZIoQDx99dE8claYTkFHYU+D4QHCijs/VyssiqQCZNsInjyTHNiSFqJpeYvcSmSBQqb+ClBKmIPjKc9kQ/oGXgotHcfPYCpWbQjv10QCuBCuq8yY/4FIhWEkzAp2x7C1FjjDiTxyRK5jb6aZyZxinfx0LivUWJnPEQZ4hAijmTIs5Qi5SyPTp3I830fsb5iC71mS54fg+LiqosOImZ4DIJUMmUhwbysFGW2X7lQzjnOHO2KcbzwfipA+qMZU9CnMuBJaRI19ZcXGzYXLRI+UCQnlzcLBH4s/0rkPuPcg9ieoITnPM+Qp5rEFzI3VQidzlBPjgTsr2tbQusFcxeEGTECEkSEFLO9BkZmWaPC+mMWQcZRRavUgZbSJHOh+pc6PsU6JFC5H6cEHBh5nDo8dacs2/nLKSA6GOunzDq/DxUOTsq8vsT41NGLlIUsFpr+rHH+3MBs8zPkacglyQhRWK3O5w/91X+ekW2w6WULaJKwWwtdn4aWMXZdhnzy4sBayeOx0ecr3gaQkhnUAeRFCPSJIoyq4Ix/kEZjUmgpCIEjxSJqhIYkzgezra9H9+vs4oYsmqlVUa8P0E2hMg5nBSf8mVZwYrRYm3Iyt8ZvJJSRJ4Vv0iiqTUuHJndgI8epZ6u/1PoKmXynPD44M6l1vJ8j+Vfl0jE6Ci0om5K+uPIPDvKsuDlqxeUhUEIiSmKc94q3w9P8A2Adz/k0mDx1Od2DiqmmK9F19VoLVGFIkp46Kd835MotWTZGJ5drHnYjlRNlcuBtWa1vOJkD4io0bIAKVD+RIgVSuSsrS41h+MJo2NWtqzjJ199zf70kXHoGV1C6RofLbgt5fk8s9vtKMXiTAmV+KB5PD1g046iWdE0K+plZFYFPmh0WWEqAzrysN9SNw2FKTBSY7RhG0dC0ghZUijL6bjFlI/ENCFSRPmGbr1CGM0UPCc7E5Nj7A8IElrle/36asH+8Eh/6rHTyC5t2Sxe0HULxvHEx/e/oihfAyXGFJjCUNWB2/tb6lqjjSAEQ10usGGPcB0EifORsnFARVG0kDS73RvuHyy7/ZZpOmCnez5eFIQTDOMJG45EcWAYAv0cEUVBaRasXc3xbk+9EASfOB0TCoOQE0pJjNKMs2NxfUF/f89qsaYyDd4OCASVLjBVVqZ112KEp1C57mHyJw7HwDR76mpJ0dZUpSNUM1JohAKbZsb+xOQtVVMghGYYTgyDp2kGYsjda01XcDxs6aoNjVnil4mXFz/jYr1CigDnMmxPpG0UaEUIiXk3cLftKYxECE1Ua4Tw7E47tIWmqbi+ec6ivsYNbyi1ozaeRVdnuzy5DDjJyKJu8GNPVSypqhXmsuDZ1Rpmj/BnW6RILDea6EbG0RMldGXNy6oi2UjoR8QmUtQJfyoIMS9r69YgUkWhWkIUSNvnDKMuCUlmyi2eYlEy3vXIZCjbjhd/dIN3EAtJNIogEoeT5XB4xCcQUrNq/n9gC4wh0p+G/KGPoa4VSQomu6M/zuiwJr4OaF0RY35gu3kmeZFzWimiTMb9IgNSxryV1Q4RE8fDiaZoqYsKGUpKWfDJy9eM84l5dixeFwz9TFVpUjRYJ1ld1Zz2gLT4lGki3jZsLgqUmrG2J0VJ21VIranqivVmw/dv7hDhSEoBHzxCSqzfEZOnLGqauqNtO3b7nhDmnP9pCiQF1ga88/hgIUmqTucDQvIkQBea3WmgVQqj8uZh9j0+JoxZ0dUrxvlIjInd7pEQC0g1QUqKYpH7UEKAkGlMZb0hRg3k/oSy9BhVElxiHi373ZFhGEhMCOkgiRz4dYHgLALNou1ouitKvWF0I0RNU9ck1zLNedNe1gJpFF1TY3SFd4Ht9gPryw3DpEkuwrnnhORRMqFkYhhG6oWhqVucGxmngdlOBAYCewxL4Dm73V22cDyVgbqA1gpr53OgWxJiwrqeqpKIoAjThCki1kq8FVgRqYsZJd25BFKhy5JxntCYfPiX0LU1bdMS7MzsHT5adCHppz1pW9G2K8rKZEVvtDivsc7hnKdbZBXAe0tygSpKxtHh00gkh75n65FK5F61MJNkft+NKPDREYPN71XVMNseH/Nue7YjTaXOYLXA5D3JJwpjcrQ0WGIqkSpbtKyfiSkQ4obZDoyuxznLHCSF2iBncGHE2hGiwpTlj+hoXRQsFy23t79j3+/wLpF8R91Gat1wOo2c+oFuFfH+iO0TSmjqsiJKzxxnovYkEdgeAsX1Ahs9KlVo06Iqg3IDQStmF+itpRQNRV3iXc6NzM4yTJb1ZpkHvqqmKGqkMBSmwMmZfhxzZ1JKDOPEaZy4XF1gQ4u3LVWskbKmUAu0HJnnChEFdWi5/bWgajeIkDH03rscwOUcmo8CgsrQhyiZbeBwcvSzwXnB7AQuCqIUaBkgCJIPpGQRIRDtAclIUyqu1iXPX2wY+kf6acBOubRaoHHeg4wkmYhBEsQZIpASURiMUrjZkwJEmU0/wcf/6NCZD8zxnHnKkIX8fZmlJnl2wEWUSpiztSp3jokMBDmfQ1MU+XPDGAQSH3KmC5EQQvJEHiwrRQx/wLCfU4PyYbYAACAASURBVFCIkD+XJYIYM4QgPcECUt7gQzrDYQqMqZBSkRHknK98PkBrLdFK/Z8O6OKsSaUzCS9EiwsRH9IZdx7P6kbWIooi93GFEH4EZUhxhn2f1YuyAEE4H4JTnqfOd0FKAQHo8/2XYn8GPogfnYV5EMll3xCJ4VySfIaYIHLmRyowWuZiUW+ftLXzwf48MJLzSFUt2O3Gs6Uy0xOf7J6SrGBqKTJi3QuEOhdIn6914uk9i4zDRAiSjC8X/AjYiNnSJ0ViHE/YufjDS/rx68qWNkTEFALn9Pm/8eOQgc6DoTZQlhqtHSGEH3NnT+/dUw6OlMviRcp2yUgeUknnr+1spVUSnJuyfTJxJjie+SznjJYgURQC6064kG16WVXk3KmWX4lSIFU40zBFVjDhR9hEzo3F3NUnI9ZZrHVsNhs2mxXxnPXTWuOdI6W8RM3kQMkwDHy8vWXoB7RSZzUsv2lP437X1hilkFIThOJoPUHlBUhVlFws1qyaJWMvKIs6LxKlxHrL0M+USlKoTJSL3uHnDI6REpwT2BSzNTRZrPfcLL5i5h4XBW6OuJSpnSGOSJ8XGcM4YtoOJGhdIouCw3BLVAFZamSpmOlRpUZESZKSyWabrXUjC7Wi1CVYRwwJUeTIRbCWhZYY02DtEe8GRJCI0KI3uU5gcpbRWogTKQSKQlKWikRJ21V4O2ZxVAhOhy2pfkZpWkgeqT4wTyPNoqRpF0glcXGH855GBqQCqSVF0WLDDp/7JBBSYYpI8AI7B8DjwoHD45EYAnYa6PfvOdyXmKhwNhHwiMLy8GgpVjVaQIgz8rBntiCL/Lw97hzLdo0xLg9AQuNmh5Qlq3aF1hVKGY5pRESDUhqlIrJQ1JsVws7gIUSXAUrOE+2UnxWzRssKJQK6MMxu5HjaQhIoLTFFDYgfSachWKbZ5wFhuaI/KJbtKsPjrOPTT77mxYtnlGUFQjCNA3/3V39Be/kJdaeYpz2//A/fMNgMxdHGIIuWUktSukNKhTYl3XKVC5uFQ6lAUSRQAe9nUAVJClwQNG3LbHuUllRVS920FAKGOZ9VQTCHgVacO8qiI86BSipaU2GjIFmH9R5tzvk0GdFtomoU0tboMtNro/MUtcZi8iddyk+kqqkpygKpFUJL2quG7eOJaBJeeAbX40dFiCFb3aWiq/4f9lz9f/Ej+MD+/pj7jUJEiAotl1xddSxqhds1nO4mLl4sOB4O3D98JHJCseHUW8pKUjaC5abC8khINh8cgeuLAuKCrm7wI/zVv/6er78KPL/4FKUbSvGcskicDltCKPEu0fcDrz//kqvrxOGQ1YyLzQXNZ5+jykz72T0KmrJi8lvcKJl9y+KiYX3R8c13v/9xyHGO88I0UpYV69Ulx+NIU14yzI9IHdhcLulPlg/vTkipKMs1RQGmHjiNPYfDidNp4MWLl8zzRFEaZBDYKeDjieuL54Qgubs90k/vM0GGiA8HZnfPZn3FenmTLQ0+UOuKuliyuezoh4n9YUCiqRvNxfozhmHE+gfevHnDxeaK/WFitiOLtuPm2TXTtD33KXlef/4lSr1i6GeUSFysrri6Knm475mnCa0km80F33yzZ91Fkh8YR4s2uXgyOIGipetKDvsHiA5vA86CNonNhaEsl/igmazDhhO74w8oA2W5AKn59vvv6VaSoiyAQEiBw+ERrQ3WOuxo0WqFm3sSisl5XJy5XH+OFAXjMJC85Xpzxc1zz+6hx04OOw6cxj3NqsANd7mc1yz4+O53dJuWEAOJSNsWzKPjdHzHynuui1cY/YKeD5ymEWszROM43lMUC7r2CgQ8Pr7n2N9ycbXgcX/k/u6OquyQVcXkDoQUMFWLKeqsXsY6E+BsDzjGeY+QirZ5TXQBVZ7wweUttCiIamae90gpkMIQXMvFZcc8jHh5JAjL7761KJ0yNGayJDwvbzxeHABJCAI3Bq6flfT9lpSy/UMX1xRdxWN/i3WBi26DKi1lveT2bs/727e8bGoeHm6xQ0VXbZBFQ/CWYBRFUzBPR96/+Z7Liz9nTA2m6EhVzcP8wLNPnvN4v8O7SNs2JAJl3dK2hhg9w/jAw+MjQt9Q1yVts+Fw7InUNKsFUQROtyMvX3+BUpr78Zbf3P+GH07P2B5vOHwIfKWu2Vy+5re//o7v3u4IYkIOFeLjBaf9G64+uaBrK0xhmGaRYQdJ4xO40TPbyOHRE7xkconjnPCxQJGyOgMkFzm5AeEj0eZG+kUVWK8Ul8872q7G6IL9YBlPJwoCrVFoBZOXzOOEKqFodbZGhTOtjjwwhGjRUuK9wzlHSoGmWhOCzoONBIHMUIiU/nCciwGU/tEC9YQRbOuaQiuGUyCJnE16ymUJJIXWSJEPvs4HUpQ/gi6UFtkKVRVMUzrDLOJZfVDEkOlrxIizM+PoCfEJeS6zvS0l7DxyPN0j2OO9JpDy/xMiZ4ECrFYdVZk47AeewAiQrVgJiCE/hEPMB8Wc24Eff2GCxaKlLAW7/YB1EUFBJCHRiLMqv1oqLhdwS0RE0PqMBEnijK6Hrm1o2xqYsyoi82ARRTgPdIK2LHl2veZxO+UMFAHEmSxIBAJKK1arDeO4z1AIeS5pJnc/5Y6rjHWfbQRhzipbQKk8uEkRKYygKgvSeUARIt8zIcZzhuhM4kiRoZ+RoiOeUeiCPHhYaymNpilbtDwwTZEoYn6Pxfm3I7Pa4z0JnZefIUMxnsgpMbo8uMtMAhVphuQzTS/lv/FHaEQMyBQpNHgfcCGj5rVS2TFIVnKkyMu32fWE0CDQSPU09j3VhSQiudR6GA/n7w11vuZ5sMvxuryEMEYQg0GpfA0Rf8CkZ0FQENPM8fTI4XDC+cTXX3+B1ia//jOy3f84FuevQ2vDu3fveHh8wLqcO5y9zffs+SJJmVgsWpTQBKvx0kAhgEyyW3QbLjevmIeAPJMafZRMc+L27hcosWLZSpLxzHbPcW/Z77fUdU1Vd9zeO7768ooQH3HxiE8nfvO7/4CuJGW5oqpVpsiSF3hD7wne4axhKgcILcuuZr25xn9wMBfY5JiOH3EPE21zRV0XHE/HfG2mQFM1LLqK5BMPuz1lUbPcdLz7+JFhN6GvP+PT13/Km3e/xs89RhSkGNjeFdRrlWtYUIzTzLJ7zmJpKEpBfxCY1PDy2ZrF0lC3it//7h3jfsbbhq655Gc/q+maF5SNwRQ10+z43TfvMMX5BpcOqRwhSDbrlzw+fiSlicuLS8apZxoSx+M9MQ7E5OmPluurl6TkGMcHTj5RGI1cdZRtS7to2IY3dBcNQhrcHPnh9i3Pn3/J/nGLtTPeCtLZ8WN9wM7ZQXI4JL767Gc8PL7j/e0bLIJFWeEBXQjKQtHVmvtdzzSciGkGk8nOzu7pjxohAk56+uOBrlvTH3vef3zP85tXbDYtKUasjRhdsl6uKYqAC/ZsxzW8fHFFoTVTPzJOCa0rFhcSo9YMQ+L+8Qf+h//+v+Wf/mf/DV/89IaiGPntb/+SxWdf0XR5KPUxslo845NXJUIUJComK9nfv6HRRwKRKAS323e8uq6IUjAFSW81ZS1ZXEZUoUFl6vTHd+84jJGmKmkqw93pHrY1i8IgmegPtwzDiocZqhJqGel7aEqwk6BoWzabBVM/YPcuf5+nRIya9eWGD8eJgEQlSaMb1rqjvdbEGLB25sP9R7Ss8WnCzhPzNFPFjsX6mnGcsLNHir9/fPoHMVwpofj0+c9om4aiLKibC/71X/4r2lUHSnL/2LMxP+FvPv6K9WXN5mLJD28H/vk//8+Zved4HBiGkctnHfO8YFFHpE7ILvLNL77FlAqlHOVS8M/+61d88ewzFqsVzk+4KnB4XCDdBl2UdF3FxWrDb3/xHS8/N2f6T2K73fH98S/5/PPXrFc3tNVzdv3vKasKzMBpfsNf/e0vqKoLVAGmVIw2MExbLtevsZPidOxJIfLykwvGfcfx9IjzFplKbq4/ZTi8BWEp63xdHrb/B3PvsWtZmp7pPb9bdruzjw2XmZFVyWKRTTabTQFES4A01EhzQdekoS5C0BVIA4ECJTXIFqvLSZVVlS4iw5w4Ztvlf6fBvyOLowIkSADPKAYnItZebn/mfd+nTcfoPEolvtflxTWH5o6268nNOQ8PG+xoyMUBjcBkGeMYyWvFxfWa8/NLuqNGS0Nq8HLWyzN+8av/ldn5GegI2jFYCVKTzz2b45bXr7/nxbOXfP/udxASh2Nre/wYuTy/4WwRAUuYJNv2PYv5BZcXlwixZLIHnj29ZLtJspT54pLLS01RVigpkdLRDFAUayZ34NDs2Hx4i4olfQvO9rhpxIXA1bPP+OabdzgfkVpyd9thTMUwjGzDEclrnj57wWg3HJsNo+0oCg2yJy8WCJm+qMfxgcU6Y/O4Y5o8eW4Yx2+wg0QBdVnSNRVX1YJSeHSu8PkcJxX7w5ar6yukijw+fiBfGja7hrIeUWbgzW3Hs5uf4Dz0446vvnvL8myGHS4w2YjOBpy3PDzeUs0zNs2QmE9lzra9w97fkGc1Z+s1h+aWfaORShGiYOxatNng/EhdrAHDm/dfc7/9jizPKYoZUVgur845tEes61LqGQuKZaR57Cgyhc4d7f7At28EVT2j6QfuN++YzedoFGW2Yp4vcTZybEZ8FKlAdYFM1Xzz6htE9EihORwsv/39/8bLH10jdUWYOg7DlkNn2R8nVD5x+Tzj9ZtXfPrZ5xx2HklglFuabkN7gNrXaFmxWK65374BnUziscl59vwLdrs909QhpEFlGW08YqcBomKaLLv9hvPzK37+1S/Jsozz8wvmsxXv39wmvpEqyOcLbh/u0wQ8RPp+4OHuO/67//bv6GwCncooyLXiP//Jf8NqucDogPB3FCbn3S/eIZDJIE+Kr3f+VO2ewhGiSulhqLR5jU6igoPgiCFNSc/WFbOVpq4UVTWnqnLQnmmydF2Dc5F1MTHLksfEekkeBbmcmC8rhJJ4PGPXgsyZzxYg4Ni1yYtX5CkwQeSJNxYjk0ufWapUVCfRnEz8pJC2R2l7dUq5CIJMZ6zmJcYIHmxLED7tmeJJLhYCN1droo20zUDbDyiVM/aeospS4XdKaDs000kSG7E2NRi4QF1pFrNUKA1jT8ScPE4nZ02U1LOcambSu89DVmYnv0jy8UhIUiwcfe9QusCHjxHYqZmq6pIsy5lGyzg5hBIolQHhpH4I2AlErBHRQXRpu3UC2qbNjqMIOx6/ekd/uMb5FU6CEemLWojUbOUFPG4eCF6lJlF8ZEKBtY7lrOCT5+cYfcROHULr1DDE+Ae5W3QE7zg2RzhJGP8gmZOMo6XMKurKoLWibR/w/rRpQQAGawcyrTFaQ/SpIYqkTih4JDI1Aj6SKZmuQ1bx8GiJuCRRT6AvQpgQQpLnOavlJXf3gahITd+p4RCqOMmbNYtFxt3dlPxdMh1zkryn94gPlhAs4zBgpwznIyH6HzZOP+zVYmAajwRfEsjhNFAIIaUNWutQSjCb5yg10XUD06QhJt5a4rmlmP5MC87Xa6rCEcMR59wpaj99trQVFQgHUjkmO2Gdw4WYECGQ/v8I0cNqVaN0istfLCo+/fQF3vl0BU7ywWmaCN6T5zlFUWCyjJ/97Gc45xBC4UO6b4RK7x6iRCrJ6myJioHV+oJ8bYh25NXjLyF6Jqs4tIJDd8e7h7ecnz/H6AI7TChd8eT5J7SHhnbqWS4q+v6K9ec/xrOjt48s4wqhGvom0LaCyUaef5Lz+9/d8uLFZ8wvLuiaA3e7O4wxFGVJvdScX2V8++o3qKGktx2D2/GweaQoruj7luAt6+UT+uEdj/eWebXkyfqSKHrm9SVNu2ewA/PzNUIbmsOe1VyTC8XP//FX/PVf/zue3vyErt/QHh8py0AYOmoxJwqYwkQ9/wmDP2KKGqVhd3jNzdUL5sslzk/cfThSFCv+5IuXHJstw9Aj5RJZw/fvv2K/nXCTZL4smM0vksxdSExesTnco8ynxBiZpoF3b7YUeYXODJc3K6T0/OqXX1LPMpqjpapK1ufPub97zWcvP+X29gNtf2S9uuTZ9RUf3rzByBylDVEMvHr1JZ999gVZqejikaH3HI/JogEqMVxrzbt3r7l9+JbN7j1VdcGyWvDN2zvqxYJPP3nB45stu6alrlLi9aHx/NVf/i0f3r2h74807S37o+c/+9v/lIfb16wrzV/8+BNmi6e83/yew7HBxUhV1KzXK6xrkbZARM1s7lFR0TcDMVpmc7h9/Cd+9j+85+Vn/4YsK/n1r/+Oz//kmlff/IabJ5/xl//uv2L1X0v+/j/8I+0xScFnmeHP/uKS1+92bI8H2n7P6CTLVWDXgVQrZtc1i0IylwOFiLSjYLd3vLr9imAntF5Q5B3LWU0uKpYLR4akDJIfnz3l9sNbqosZjbW8ev+IOW9AZOTWM1Oa8+U5u8OIKkGbgThNCFvw6WdP2OwesTawuDnj6uacYdoRskhdZNwsFjS7B45YolQYM+MnX/wFX375O5pti9ASncN+eMOHnSXLchbzNVdPPvujfc2/iObKecfr718ho2aaPJttw/l1QYwGky9ZXHj+/c9/zr/66+ecnZ9TFAYfJaMfQQiyInFu3t2+pqo1Mso0eRwELz+7Qco1T85fsKhX3D8+YlxPPzxQzUpeXn3Oh7cTxzFD6pRO5r3m+vmM65trHh96pn5kNluSG0mR16AGyBq+ePYZY6Ow44D1HUI7ZnOT2Cq+oOsr7u4/8NnzOZ2wOCsIPuNs/lOax29Zr85BWpqDww4NzXFPXlmyqBIDximGSTCbXXB+s+buwx3HdoMyEm8l3797TdMcGTvNvDKUhUZNMy5vnmN9j7M57SGjKEAhExNkbNjtLItlifcK7zTg8HHHdg+SirrO+OSTZ2g5MKsNebagzObkJmPzsMW5gMkMxhi81djwyDDlDINiHFpmC8U0dox2SFNEJRn6jO1GU1YKISO7w4FDZ09SBJK22BiawwFjoCw1RMnjtkGYEak83nm67kAUDVrlBOfY7R44Wz3FTgKtihQsst9TVRWP94lvFVFMds+7N5Hl8or5rMQ5z8PDI8iJutAEIdhu73H7EZWlWE4bR3Quodc8bu5P7DDN7tGh9IDrLGWZ8/Kzf8vd+3tMpvBuwk0d++0RP3aoPJk7hzGZqWUsKAuF0R6lc1SckZdpkhtcxmr5AhDM6jNCiOwPd2SFo9tavD0ghaRegmKBVKeCMEh2u46udQxT8rZo80g/JbbLcWjpFFydP6dXEmkmICcuLzm2HWcXT3GTgpBxtqy4e/ia0UKm5+TZAqVKvO8p6xqYGOw9WRV49/4NPg44N9ENHZ88eUq7d8jMIXPJ6uwp9/dHFrMLhJA07RHvYT7PmOyeoDRnz3/EMOwJHPG2YbI9Hz68Q8qKvCyJYuBo3+CawMF5TA5FkXNzeU3b71nMFojT5mazu6UqwegKTYGzgYfHW8KJUSGF4Wyd8fkXK16/PnCefc6LxU+JBGSMHLY7pFBURcnzFxd8MrsgBIm1nmFqCD4yDCDJMDLHDgeitiiZEWNK68uLSJ7nVHlFkWlyo1gtL2n6NCgJMfmHvLMoqSkyhZOe4EcQAZVlCJlhkTT9ntyLlLInU/1opWUzWoxKMEpLILg0L48ChjAwuXQsH2XWHyf+KZEtniRikIDFpOCBEFAaEIHJOfphQCnxB6+SSBKjspRMo2McY4pqlxM+aCI5J9QWOhvohmMCOn8MAzjFen+UXkGO8yc2FwKfdGKJd3UC9FqfvJbKCzj5aAgnv6kSOAfTJBAiAXM5ScuCs5gscc5CPFWyJ8mZ+EgQjgFnR8ZR41xKyRMqBWXEYH+QgelqwSxfERtPdOrETfv4k6pu7we8A05FtogQCCilU1MZHJPtyAqDkB+3VWnT4n3yNs5mhuWqZH/cM04TUZlTJPyJwRckMBLlBKr4YQknRfI1EUFpQ2DEhRHrJOMUcFGkTadPTZ8PIE7yQRnBTUk5IqQ+baRCSvpyHqUNeZGjNFjfnJqRRJGSUmCnicJIjBHJd9lZQkxy9RBD2rJygv+mbHREVBAMSp4guiGkraZLMsNMCmZ1SdMkWV7ynYHSKQVQnDxumVEURY0Q0w/nMoYkxxMfPXwhJG+sD3gv0k5SCwj6JJdNXDYlJMvFgnFwxBBQSiJIseqczkkIgeAtTTtR1yuuLp5SV+UpmCJtKn1IgSd1PSPPE9fqu+++4+HxEWMMiHCS6p7OYwSpBHkhGIYHHqeRuN4zy1sYJ0QoIUqst4xYquWS7OgJQeJ9C6pDZ2t2m1vsYBOagBIhG6rinDFEprinnjskFat1QVlJ9oeJ2/ePXF6e0fVb3rzdUZUVV08uGRpL6jkjh+YRo+dcnj/F6IyuPVIU8M3v/y/KomJWV+x377FuJNc1WuoU5hEcZRVouo7j8Yi3kWlukXmJoYBgMWrDN1/+E8++uGYSLa1vcbsbnlzfEBkQAs7Xlzw+NqxWBf2hZRocy+qK3WPHcpUkgH0/goTN/h1j73DBgZy4++a3jKNHMsfkFdZmfPLic97fvqU5DlgrWZ1V6LxJSZkxhaU07cBZkRHDlOL2neHZy0/puiNSZZi8YrbsuH/Y4myiYn64f8fh0BKyjH6wxHGgKgXjOBKCxdqRtt1zcVGcGJgaKSMmH3D2SNs2CAoWs0/JyshXv/+SdowMw4gEZhL24xsmEu8KmfHq7RukFUidUxgwteKXv/4Vy3yG0QX33cgvfvErrp4W+JAhSZDvMp+x3W4hmJS26h3vbo8I4VFSo0VBlm8wSvDd179kHAL7o+X50xtmVU6Ub7nd/Iz5suTHL5/gdY0CtBvY7b5kf3zA+opyPuN6mXM8PpAtSqROIT/NISCqGYKR4FoUG54/a3h28TmH7YG+f6CqBPuDZ1WumVUGGSdevfqO0WvUEBBSszqbU51Z+qMlVxVFUTKEwOvHPU/WlxRZQSECZ8sZt/u33LcDWVFxcXZGiBNGOUbrODYdx80GHx2dSptuLTXiWFLNDNE6Ju8ZRouzI0FoosjwUdCPj3+0r/kX0VwhQKuIkQkqO1UeLRWaGbowXNwEDs3AsxdX1HWNUoJPP3tyCimYGKeOwbYpIrXfUBY5IhpsC+VsydliTZblWJcmlllm8eORMVQYJyiWHnzBNAW6vudw3JHPjojsDJ0rTG5AdNSzJaM9IGKPlAOPjxbvFN3QpUhZk+EsSJlj1IrCeHLzQD/0lOWCfL6gzNYkRodgcooYBEYX+DCxOqspK0WWaw6uP+nr52RZjrcCP4oUURwVIiYK/KcvfspiNsfanmFosN7SDyN5kRO9Yrc9YLKB1fwC72zimNg0qZ4GlzZCMqC05e52x3IhyXTGrMrYH++RMn05e++YomSxqikKQwyRvnPgFXlepinz5JnGnrCvQW6Q0iB1Khh8HBicw/UQcSAj3dCRZWUyTPrIej2jzOcMU4cPI8ZIjo1FyhQjbG3A+RapUjKWFBofAo+Pj2S5QmuDDxohJM4qppEE+6VHacF8fs1sNsNkBu8FwecM0wZjBEJkDBPowiJcms4GQJoKqQ5MriEKxUyfoWIPAiYbTsbwnL4H710qBGSGHSecPWBklRYDcUSGBW6qkhfEDrTHiauLK/opxaZmWUlV53Rdh7OpsshMhbM9RWZw1uLDQFnVuEGQ5QolPdanl2NZlWSFIIZUWPe2ITcrxpCS35rGUpYLUI5CFsSYEaMm0/MEEwyBcRqQsmBWz5Eih6iSbwmPC6kBEMJQzwzuBPmMMeJdIMtmPExvEWGiVBmz2ZKHuzdIGU6FW2SaAufXS/qeBNueRsaxJwJSLqirnFl1hrUe6ZMgTAdNCBNlnqF0xEhJph2TUpwt52mDhMCFA0JIiqKEYBg6x2p1kYy71Zzl2QXKRJ59dklsnjDnOcvq8hSFLXB2SsmCRmPKSFUZiElnXYaMQs8Z3ch0YnsV+gIbfSrYIwSfgw5oo8gkZEpSGkPEnZhWEh01WmrcNGGDTV6cKBmlJJMqFebO0k4x+UNkAgtH9wcPzGhH3DSlGs0HgrMoGRFKYYTi2KawEh9PMrwYiVKevCsBHy1KZMAffC7eR7K6JMtmDGNHN7gU8/1xfi8ESKjqLBV71uJ84psFn0KJZEwJfUJExsml6G9SVLqMKe1PqQytFcNo+Vhn/vDv81GdGHDeMUwOoT4CaE/NUxQIHD5MCZQbP4JoSbHwJ66SVoKyrBgGS2RMm4VTgEQC9kaWqwIXplOIgfjYviFkRHjSOzLk7KxkjDa9Dz76ZUQ6M0qkIIof0vZ+kB5+pH9xkuU5jgd38vyIUwhH0s/F6EFoECmwxnqLUgZB8qIJKZPUDotzI9v9Ducq4ORzO3UhIVqUTJHm3nmmKfHTCJEgEtvFWUcmJWWhKQvFZtuRyoDTBimRb4EkLSzLnHFqmJwjUyZ51kibK+8tusjSFs5HnIcYUjckTtcsho9eM5/Apbk47ajiySN38l0JcXqXCvI8Y7tLEGX+GbssnH5XENFa4lz8ZzDijzK7kwwVkEpiDPTDiPfpOU0A5GSuS8fmMQaGsaXreyZH+t5CnBrzj5/Xo7VBipJ6dsZqcZ6ey5M8VwiRuHN5noC3UnEcjnz77Xc4n5QnH716fxgEJL9eXRfUs4xpGJFGgPa07Q4hlnzkw6XbLp1/rSMiRtwUqQtFf2wTyFdqmqbBO0/ftVg8wWcYKQiyZxyODP0RH0auLj9FiEDXT6m5FhKET15Gkxh8zkfysqaqZyipmOxAIFLPDJk2aKPQmcQHhTIaj6ObAsMQMXnD5B1Sp4bWx5FML1PcuxKU9QI7evb7gXyWsZxdI6earDYnS4PDTpCXjqmVROuRfsKOHd7PmMaeyY14b6nLCudGmqZHSsFynfH+fY82BqEsNnQ0jeOnf/6SpjlCzPAuheqMdkhBHsKjW9o+HQAAIABJREFUlKTpN+S9QmczhNAUeU1VzhinLvHQPFRFiXcqAdxx2D4N4LXKqYu0wXd+z9A17He7Ew9T4cKeoSfdCwKmyXL/4SH5mmWBzuoEcFd7rlY5eVWisHS+Z/Idxsu0fZeOQ7OhUAVKRVQmqUpDmAaO4w7XOiZrGTzofMU4Dlg3oJDs9g+ne9sxDg2NDAhpMFmGRCGCIoTkZbH2SBSCy6eXnD8N2CYS5R0P9wPNyfscY88wjUzNgaoINNOEMilBOM8VH25HMCF5maSCkGOjpNAZWnmMqKkXL5jNNUUhaI6BzX2LU4rGtdhBEaJn4wI3T6/AJyTDosjJS4+IHh3Tu7ObBqIMBEYCBVFmRBVw0uG0QGqRhpO7DU1vkzfT+3RP6Igwhhgjo+uI04FZnTF4SxwnvPeczde03URdVczqEu/cH21r/kU0V0pKlouKWVGSZ4Z+qukHg8pm6ExTzxwmn7h+MsfZNAm7uDpns+lom4Zh6nAhSVT2h7dk+RkKRdtMLIoFxkiG8cgwWIZpIIojmJHBDQz7keWZxvjAGAJTmOjslmB37I4ZQSwpao0PW1Se0Q07VLAYE7h9d8tsVuC9Pk0+FAc3UJUVMpaU2Zrl4iJJQ+YVi/qMMl/SDRuUidg+bVXq2YwQO8rqjCKvkEIzdvdEMVLP1gQP3fGIFhmmqrEOBIGLy4zPnv0lsyrn7v6B93fv0wM8DBRFjQ+Bw3ELtGQ6T8lSp6mdDwlY6kmafSED24cjmSrQdSrKuq5NCUy6w1mHwHD9ZI0SkbadaNsBLT2Lcs4wDfhgQQXe326p129ZzG+I0rDZPeLweCmZpoidPCrz2NAjvSHEwOQsOpMs6is2u0fabo9QgmHoyHOFkgatcoR6m4oyqdA6I+L58OGOFy+eEkgRx1JKEpRV4UOP9R2Luma9uEIbm9gtGIxe0bRphCiiwgeFzSLeTqdUYE2tc7SWeBKEMjhBriWjUIDBB0nfd/gA4+gwRqCzmhgqpnBH9BqpBcb0TH2Kwo5RM02Cw77lxbOctk9G5sxk5FnG4dBipxYpJEpJxkFR1QWjOGLdSFnNae2EMZIYLdamCXKV1QhpUpiKFTAIRFxALBljz7G1lLVEKI3CkOcmTeNEjdIe60aaY0uZr1ktroDINPV0wxYpFMPYIqVGyTqlVZnINEkSbd6nVL7YnDA4c6q8Js80Itokfzn5f/K8RgpB33f0XUtwIz4asmxOmS+Z1Qua45ZgPSJKclESDRSVORXYnrbZkpkl1SJjnFIQiMkKhMwQMidEQcCzXF4mX1q5ZLW4xMuWpy8ukG+eEvr6hw2OEIm8LpVKfB2dQkC0MigjKHTOLF8QdOTYNXRtx8XqnH4aCbFL24BgmPAEIniLCx7nI81hj5ARjUCLZEQPVhCI+BggCFyuUCiit3g3MTrPrKqTv8I78J5cSmQC+OC8pU8B1yQukEAJQa4MduiS3E8GQvRp+yFP4RUfi08lODl6+QhJNXm6BsMYGUYQnJor8UNJmFAA/ZHJJV+rkBnBpUQsoVJxHIJgnCIhnqCvISJDJPgTeDzLGMYuTe5PW5STESbFyYuIC45+6FPcOJKP+yJPOgfWjXifEuniD3abj0ETYIymLGYcVUcME0qnZyVG8UPRvlgVeNvgQ4Jjq9MzjhQIEZABrFe01jIGTsV9Oo4UdCBQSlCVGdv9hA8pBlyJP/hpxKlh0Fpw/9ggZJk8YafER6FE2h7G1ICE8DHc4w+RFsDpizwQo6VpDjhbc/qLpyZN4P1EZiRaSWIIWHtq3kKC/kYhcc5TFhl5bigKQ9/vieTp3jj5sMRp+5NnBmMUu12P8x4dE2w3NanqlBQJSp6AyCHJTk8ZEOnIP4aNiJiGA2SnMIvTeSRtGEXUqXFQCVdgneXjFQ2n5kqeQkkgImU8DdvgY6pfao/DD+EnWiuUjhz2PT58bCDCH7xuIgWLZBm07T5BRe3H7eKJlyU+RmwGMmMo8hmzek1VzbF2Oh3jx+ZQkmUZIqY01t1uz/fff58k/ScUwOlhOl03MFozm9XM5iV7uUFmEDWMsiOEMz5y0JxLDbeUPgG8bWBoBderJdZt0VohBByOB6QvGIeWKQyMXjCrMqxvOBwfGIcWrTQ3V5+yO3wghhKvEzam6zoqPSPThigjMeaUVXaSgqZnqh8DVzcXuCltk4uySMNDWqboiU4yTIrd8YiQkFUFhZHYqcOYtBGUqqRcXCGCZJgCma9YLhdQBqIasSEyOOh6y3Lu2b2zzKoMUwseHr9nNlsy9A2jmxCQhsvTlIabCDJdJ2WHcEjpcb6naQZiFGR5xixC8JKmaZFap3cDjhAmBrunHzNEEzG6oq5rtNYpYMV5vE8pt/OyJmYS5waGYURHRUbGoqgwGj5sjhBI6I6FZjYvGewDXZ9RVTUySvre02yPGB0whUDqDMGSs+U162WFygRjGLjbH05y23RvBmyC2JrkSYwu8SHrpeb+/oF26vFBUC6XkAncOGFDh/aCzaZhXi1ISIcRFyRleZ5qASnApxCiSEDpSFnmXH5ySWHeo0Rgshtu329xW4esM6IZ6fuW7tDx7Mk5rYtkKqJDYJwCXeuJZjxJoUsW2QIXPFJqhC4xGvLsit5+oKo0RTRsv+opLs84Djs2XfJeTqpieX3JcfNIsB6ZG6IwSOMJzuJ9T9tP1FVGiD2Di4RY4qRjEgEySdTQ+56hOXDsU1CMEoIgAllpEDZiJ0ecLFIMFOI5Vk4EIZEoLlbXGHmgmlUUecnUTn+0r/kX0VxprVgsEiBuvbxknCIXT0u++faex90OZyf6vudx/4aqnJFnM9pWsdl+oGn2KJVR5HMmG7l7a1ivbsiUpj98i7gs+bD5HUU+oypWXJ1V/ON//DU//vyGKAb2zYb67Iz7B09wmqKoefmy4Je/vuWr3/0fvHz5Bc+ffUrfweg/kOUGOzm2mxZBhQ6foGSgHxvuH+6JPrA8y5nN5uRFztXViuvLl2w3Le9u3xPiKxZLgWaByRMAEik4P7vh8fGeEFSauEhojx6lW7IsYzafMStzgtB0Q0o9Wq4L2l1kO+1QWnF1dc2xVTy5ecHQ90xjh8BSzZYchy2EHCU1MTuSlwVtt6VtB/rRozPI6oZ+iHibksq6VtK3D6wvShbLCiUz7h/2BLtkmiSTCyi1YwwDVblkPjtHGsH/+D/99/zNfzGhXMtgr5iGkn6AcnmGFIY4jmx23zHaLb3cI2WOlI6vvv0ls/yGLDdoI1ODICz17JyL8xuKIudXv7nlu+++oaor8ioBcxFpeykleBto2g3Pn50RQ003WLqpIy8WbA7fUtYGrQzeGQS7RN4mJ3rDOIQUGFIsGWLPOO25vqoR25I63DCOHfv9B0yxZhphNltT1yWH9p56aek7RxAGpZMv8M33gUhOlhXM5uf89qtfU0ZLUWZUeQIct92e9eo5PkyM0xFaz9RppPQE2TFOB0LwuFihTJqoVbUAu6Ib7olEtKwYJ4fzzSn+V2FMxtn6Be/ff480gsWsQnJJN0xIlTwseMWsvkSLksPxEec86/UVWmumqWe9XlOWZ/z+qy3aZLTdDiUyiqJm83hEiJGimFMVydfw9u4fWazWGJOhpEXQ8OTqguNhwLoJrQXPX1yz2R7ITU5erNGVR+sr9rsDbdfQdhsOB4UiT02YjAQ/osqMzXFLnpcMvefbr97y139TsVg9Y5juOTb3rM/WzJdPefvme7x3LOcLXn39Hd45xJQh24wodnzxbMkuz2l78DFtHMRpQ2C9Z3c4otU1/dBhdE5mSpSquH/3nvrsHLxEOEG3OxKlYpqmlJ4ZJPPVBZ3tCFKB0oxRMtgjuRZEciKaUQ9YBJcXTyEGHu8fibbHTiahVIWg0AYhHMdmRwgSrZI5vW+O6LxCGpIEzEImaoKYsDFdW5lFluslqh3phz4ViTGekszC6QsahEzbDIkiCA3SMvqJ0UIIOUqkFL14CgAAmbhcTc9kLZzi1GNIkqjMSIpc4qbIOPLDFihGT3DiJINSacMfLVGkCWk4pQEKoZOnxSQY8DCka6OlSEOJkDbR57OaECeGyTHZmCDMxFNIQtomLRdznBVYm5q7VHhFpErbnhgDw3Ak14aAx4eIwvORTYUMZCqyKKHxNoVUSEPUBux4avaSDG02q+j6mAzwQaBI3qsEEYbMaMqqwN2lfhbSlkZKmQoNUlOZwJcSpT4OwtJv98NI17YsFzXrxQ2z5RMe3g/4mCanH31S3mrm65p5pSDa03Y+/OAtSl6zCCIQgj0lSqpTQ3TyAglBcOm6j2NP14507YBSWWoIfVrPpXjukBQL1hB8ltILATj9IQIotAqszma8eFJChFdvD0QpT01MSoe01iNFQOpTD4L5oRlJn0+fji/dT9YOFFWOs2M6pFNHl/xyafMipCPSY6cUvJJkhfKfVR4SJTQmC6fmr0DK7LRZcBijEpstCJQUZCZjPrsmz5fEk39NKYW1NjVPgHeWxXzO6++/56uvv+LxccPZ+TnTZIknSargI29NkZmcWVUyuZZu/TOkr6m7kmKx4PCYUoab45aHh7e8fPkpy0XJft9xPOzx1vF0/Z+w01/Sjy391DIFy3n1grOrC95+eMX93YY8v6BvJZk+p1yukCLy5s1rkAciihgD1h65f2f4s8+fYrSiswOCBVmp+Pb1G6L3nK/n9L1meXYGcWSyjtFqlhee9+8bpMwoywU6C/Q9iV9lBFNMA2dBR9NvGcdAebbg+Scr3r+7xfaPbDcPzOZX7O47jq0nCkVdZuwPHVX9nKuraxYLzerScPF8xc9/8SVtq5lVN1xdX3P7bscnnxr6oeXd2x0/+uxf8/Xrf6KeFxRnBX3/yC9+/iuquU2S4ynhDn708q/ohw3399/z6vvfMY1QV0usmxjGDRerT5BCkxc53jfs2y2Hw4EnFzXreQFKEjq4Xp1jdIkdB5r9ETsJ/uynf8Fxl4ZWeSE57tMzm5dJutc/RsoSqplmsnu6Zs/UBq6vb8iLDCEc3jn61pLlN2SmQqrAZHfkheb65oKmGbh/2PD45o7r8zXL85LLZ0tAcf9wx3evvyXPI0oL2nYgkyW7bYfSSQYeUTTNAR8KFsuS+brg9vuGIBwqK1FZhQsjv/tNz3pVcDwMbDdHtDrSb3PqPCM3BXl5wxA1zXDPsH+H2TxyPq/J8grMQPAKFTPOLmq+/fY92WJOlgfMsucwTrSPhkzX2EmxHxquVs85PPyG/e6epo/86JN/Q3/3hn7Y0bnAXZcjpSX0AqXSkCT4yJOLzxmGRzbbO8Ze8OTJc949bijKNVorbO84W6/p+gOTHTFac/3sOT/+0x/xs//9HxBYLi7PeHa1ZjgqLrIKh6cbAG94drPGxp5+aDl2zR/va/4fd0L/P/xok1Gva1SuCEZS1jNev99y+eIJyydzXr36knwEazXZckFdL2gPnszUnK9nZNmMTFccmg/85E9fcuyONK3lfDXnYdtzcab5cHhDP/yGuprRsePVW4+3lq5rebz/QF1f8fBwIEbP5dWMm/VL9rclj28VhRz55PNn/P3/8isuLhcYrYlhxvX1jON2ZLUuWV2cMV/mFLljMb9k6CzHZs84Nkgpub45p2l3PG7ecv9wZL0SlOUcpWHyj3z3asfnn7+kaQ68vX3Fbv+Bm4sfk5UR0IRJMwz3XF0+R+kDu+Mjv/3td1TZBc+frTG5xkdJXtxgZEEXtyg9MV9lXF6X/O63R5zvCXHChy2LxYzzs6eEaaBptkhbcXPzKdNw5NAdsWNPZODq5hxiRntIMd/VXHC/eU9RVJRVwfsPt1xe1rTNlkNzQGr44s9v+Pv/+Z9YnT/y4sU5X/zoT7i8/FO6+IH2MDA4uLp6wu1tR6TFhQY/Qq7PeGzvmc1qCPD27QN/87dP2W+2vHq9IwpL3/f8+b/6q1SY2AnrWp5/ckaYJFIpsjxHG8lkW7QsyEyJD3Nub3ecnQeaoyMGhySgtGTX7FBqTP6IcEG9WNIeDhRZxvr8kscPW5Tx9OPANEWimmHjI+uLM8I0oztkSO057C1FsURIiw1HBpdx8/RT3n/4hm4cubr6cz5/ecZk79gd3nHsDqwWK/qxh1iwWi24OL/gu29eM0wbzpYrirxmspoPj9+gDQyjZZxGqi5SmAuQFoJKPCINh2aHVFDkM5Q54/7VHUhLkecoAve3H7i+WScy+zhyPO6JU0DWEetaYtAsZtcMY0fb7nB+IC81OhMsV+es19dMU0fbbSFK5vU5PnR4d8SoOfg1WWbIsxwJdP33rJY1eREJfYqH//DQkak1KIX1A9uHe84Wz1nWS+pCM06Srpl4cv0SZz2Hw5bNtuH64oxPLm7wU8XgHfmngdILtt++xWjN8+qa3Yc95WLBzCwIqmdeSFafrFLoQO4o8oF9M3J2/C+ZZ98w6Qbv/WmiDkmuld4L4x4urmYIXSBVRplLjKuYwpiido0nnxkOwx1jSEVdlkkwKQwgOAdWQhRcLD5FGoWzA27s0FGRy0BzPCCUIKtLbD+hdZK2huDZHLbELsWRoyEYQV4vieMB6yeMyMnzcwo90LmOcYRxzOl9wa4rEMoymxXMZwuUiLgw0bcjXTfQDiOZUQjhiSFBVb2PWDb8/Dd/x9gb2qHE5AmAayNIpSmLDG0M291A1wVEzNJmW6W0ydykCXRRmgSVRZzCFZJPBlIYQJZnHE4MJnWSUCFSIzTLc6SU2DGw2zu0rojeIZRF6ADOkWULFvMK4kDwDVp9/CZJbKqARRvFZrOja1MBrFRKCwynul8ZSVXN2D/2uOljAZ8aBhkNNggG6WmNpTsolIcsRqQfU7afsyeZWWAYGvphIp7S6dKm46NUNOCdOwUayJOEjTTtD5HJWeqqoirStmgaA0plpAD7kzdOJt5ZUQgKI+iPnmny6FJ91FGSWFCR1aLm8rwklyNjv0NmZWqsfEzhPt4n2aNMvqhhcHjhUCdv1Ecpp1YaeQI3CwzeRpRJz0iSkaYmwZi0kR6mxJzihJTwJ5ZWCJ4YHMErnIO+tbgYEFEifDoH+sR88sEyjAMP2wkXV+nTi3C6tvGHYyvynPX5HKltCt4gRdkHC0IlGaqQJJ5RHJkmSQxZAmiH9PlP4C9AYLSkqnKknFKUefRoFYk2pPN62oqVRc1qeYVUKegq0+qHewZSY1rXNeM48vXXX/Ptt98xm89wPm3LPm7xpAAXLToz5JWhqjKsPzK4LZfLiqqCu/ePLOofoZSmmhdcXC+pZgVf/v6Oi7M/4fPP/jUX5xmvvv5HtJzYNS299Zi85vfvfsn2N3tWF5ecP73kofstOl4gTUHfj+weDggkN0+uToBtwWK15PLakOvED7PuQDc80N8bXBzJ8hyVlawqwf7wSJiAELFuw7v/c8enL3+MlIpxnDA6gzyAC3S9o586zGVNiB3K5BRS4aaGN6+TYkGYmmg8xargw+8f6bvjafFeIY1m27/ht2+/JIaR65sM+abm9esHlmc3rC5n/O67rxn6kXnI8d4zBsuH3dfsjzuicPhZ4rCZYkDrFIbVtgeyrOCb7/7jKT0Tnj/9AvmJZLlc8vi4oT125IXm+9e3LFYaqRzWbhFiRTVfIvMIoUPPO7b7hjw/In1S1Dy5eUaME9c3Ff3Qsdlsmc2vGewt3bCjyALnlwui0igRqG2FDhN1KXA5vLr9AMFTG7APPZ/9zSXeTfT9nsk2HJuG71/lJ1/gxOc/esIsuyAGQbtrOR6PXD254G6MSN2jtENoxfnZnPfv3uMxxGjo9u8Q/gVPbq6pKkVz2GNjx+Ox5Wx5jTA5m7sHopJ8/26bBmJ1YPQzLm/m2LbluN9yt/3AjxdfMHSWyVvAMgVJriTRFtTzirIueP9wx65/x/1+wChBVRiajaMuM2JVIIVkNc/II1xffMH1+Z9idMH10xVfffct2lnOlKaUS97dfscoGsYspNTUDh6O/4GsLNE+Q8eMze6OEHOYHI6OXRB8+e4NmatRQpEXARkG/uEf/v2J6Wi46xq27wVVPiFsiTY5+UxyfNhS5iXbTUPfjlxfLP94X/P/tiH6//LH+YFm+EA3NmyOO+ZrzWE44raXKJFT5ktYNazOLrCTZbO7o8iLJF8IOTGoxJHKMybXMC9mp/jdyIubH2PHA0WWolKnyfPy+ecMXYvWS56VnxKCp+8nyqqEGMmLnLPVFUWZwjPKKmMaI598+hTvHFpr6rpguVzg3BGdJ5Du5cUVD3f3GLXAqiPGKC6qS6LvCdGQm5qz5SWHvaUfOubzOWU5w06K5Y0ghgGtYT6fMQxHXGxZZVd45zjYHfP1jM63TGEkCsjygqwGp6BrG6ZxYD5bsjvsTwyjwNA62qMiBtAsyPKSsrrh2By4v9/TtDvGceT8fMlu9466rKjrFbG4wMcjeZbTtT0iSM7Wz/HxiNEjLuzoRsj1guAzpImMU8M47Pjk02uC/beE0KCC5/HhwOvDz9Fn25QipWbsDp77Dy1VZcgKkGqi7QQ+OmIDRTbj2fNrHh+3aHVGP7Q03Q6tDG7q0hebh+CztLmyR/qmxYeR2WwF7gwrPIGBEAeG6Z7F7M9Oq30BMSdG6PoPqSgSKbWs7xuKsqTIDKDZbHaUy45pTFwekBgZeHr9guBHhu7A1Ef6fcS7fZJTqMix/YAd9mQmSUWOhwfuPmypF2Any9CNNIzMq5quu2ccH3l8zBiHwM3lDU3T0nd7ikKzXr0g0OF8i/MjinMOu44YO6QSKD0w2hSNXWY5AdjvvqeoNE03cWwGMl1xdfkCobY8bPaE4BDKcbfdcBZfEGLAuYb3d19xffkMbVIjhyx5/vSnfPX1b1jNa7TOMHJJyARBOLyakkStFXhxZBwSTDrEkcfNkc3Oc3lxgSkD7W6HyhJ0tjs1b0pG8qxBU6GVQegZ++nAYMdT0eTIM48xChfnvHr/hmFs+dHn13Q7wdX5AmcdXd+zPM/YbN+SGQsOpu3AxfIcgeDYtmz2B5wTGDSLRUnb9BwOI1lenaDkOoUjSMeb9w/MLy4wYcJOlnbvKTKJLAIxWJy1jL0nUyvKeZJTOGsZ2x5tCpzwSAF1USCi5dimWF/1EQoseoKdwAqkyPm/qXuvJ8uu9Mrvt91x12ZWZhkUXAPdZJOjIClpKOlVf7ceFIqQIjQyIUpsDtsSaFShXJrrjtteD/skMBOK4DNVL0BE3cq891y31/et9VumbvFhWg5/FavdNSFEhEhlYxAyl6kn5LnYY3UpWg3BcLiUzJ/3smxdACHLBlMItYgbVfJ4ooAbBAUlLpY8SQ6B69VzNlvLITsevQdZEWQRCEuDFFKWYmKAJ8JfVgliLCXwSCKmZKGkRC3o9AIYkGW4w4DIZZNQQjwFykKMpcsmB7xfUN2iZJPigv4GxTwfuZwM0ywQCwUVlnySKF1cu80Wf3xYLFiLZUwEMqV7qJIVgkxvPUFopJKlN0dSsmEyUxtJJRP3cyDkAr0RFBJdDAKjFXVV7Gz9xZEpj6eAQ4qVUeSE9xP9GJj8096KxY6ZSo5DySKsFfT9QGTZxC0AECVkEbEiEXLAhVIqLbJE5oxImawWURcT0QVm4UjFh4vMqWxLUrHglZyZJIQKH1TZLPKfXCeZyW5C5hqZFXYOCFmTYyBLAWIp6fURLSu01ksuyBSENKX7Oua8ZDYLtOM0RlKsF6jIk2hK5KyJ0dE1ht3GcL2tON4/bbAET77PsBAD60axaSUf73qedqqihDaRQEip2HCVojZteZ1KQC1b2CwWoEaAXCyxY2+xNpXMrlKl0FgolCgSN+TE8+dfobX5yYZYcndPmauyFdXK8Ofv/0TfH8kikrJarn96cr5CzkihC4BCZDyemA3bzTNMJZj9yP1x4ooLKSVO5xNv372jbZ+zXq+x/sDhUZDDMx4PJ57ddKAD3l7oTx7ddHx9+xIhEzFatpvP+PDjEa1XSN1Sbywvbl8x9BZdGwAeDgfW9S0Hd8RHy2Xs6fsJG85sNi3KWE7jG4KruX3xArxgGiYOJ8fLl/+O4+kdTb1i3T4DOXM5T9j4ESEypuqY7ZnT+cyquWHVXdF0FY/HH/C+pq42KHPF6XFmChYqhTIZzMA4C9r2huumIYXIOAxkmbm+vaVta/r+PZ/u3lDXinHqSlZRCd5/eOBqv6Ju2rK5zo4kG879A8EVzHtVeVSVeLx7JCW4ubpmmi48PL4Fyv0SSpL1gX6UaNHw4tkvURrm8YQWa4SQWO+YnWN7vWccZsbxTBMrRBcXWleiNjVhEry4ecXhcOZweuRwHPni1Wturp4BI9EXcqybJ2Se0NrQtBtuX77geHgsWdMQiHOLSgMpBNabDlOtODxYrr9Y8+nuDZfzhRxAh1sU90uvXAH/2OBpVx0iG1JSTHHLdq05PNzx6dPMZTqRUmS9ucLFyPR4wNmeRChVN0KBUKxXa65Xe+56QdNqdleCfhxZN1c0nUBXGe88UjXIJhKIXIaJ0Z0QouSSa92wrq64/VxzHB5J2aBNw8uX14R8phHXNF2HaSSfHt8SnadpNrg58f7+xONxZvNS4WRkthY3TEzzgdXuBaLp0MYxDAMfPx25evaMjhrhHX4c2G7BucBlSPjwyLpaMzsPS/XDbE9Lr65DxojLmcHPHD9MyCxZdS1aN/+qrvk3Ia5ijLhgUapCCMscLkz+QDjNaLFCYdDa0DYtWgVyHun7A1o3SMoXk9EKITV1MEixQlCCkV2rsaKlqm8JsWOczqybK0RSaFXRtdtCEMoX5qb4ljebNev1M4LvWa8bmrbi8fHEamU4nTxCCJqmZb3e4r3kqZizqmqU2GCqCuXKwHG12hKjx7sJqcu/u735DKkCShWvu8iCri1TpZQjldFUpsa5GXIkp4iPFlWvmEZXUJ5ZUOma7a4jBMM8zzjrqauZfphouzKZc25iHAUi1yhZo1WFlhWr1jC7E4Ko+2YhAAAgAElEQVQKLR11JXg8XDBaI3SNQFHXO1IslgclM8ooRLxCq0fmeCF5z7r7knmeicIT80DMM+QNX33xBefTI/145PHTWDItcSF4hYlgFTFatFxTG41QguDKlDvlgDLw6rMbvv/hE3U9YG2PDyNde11EjjLltsHhJo/3qRwCBVSVIbpA3QlCLBNNYxIhKOYx4ENASaiqipw0WtZUegXREIMjK8c0Z1JQbLZXjNNcvrBFQlfFtjONnrryVJUjzDVto0hKoIxBmQoXHnDhkU13RW1qcvbM8xFTV+RYcjbeeXIbGKYDOWeMbpAUotBsJ7wtZKj91Q1R1KRTz9D3GJkwSpNzXR6/H0hZ0DSb4scPE7M7sm5vMbopYV2tUFpgXSnDXZQps3N4n8u2aSkMDdGCsMQ84ZzAu4zUiWk+U1dr6uoa1RhivpByXt6/jrrusNaVPjUCPmp2zTVSrolhJucSbu7qDZc+EcOEEgotJTKXQDZaIhjo+0fqqkJrwX6/4/r6mmGqim0tTaAMs7NEPCF7XHLIGJjDI+u2LRugkJmnabGtJeYFfb4Ljs2m5tBqjqenvE8RD2Kxax1PPT5cI2Uo+ZEsiMtjzZTDWYwBow21NkThsd6S4oSuOp72DlJnZls2G1JJRG3w0RJjWAptMyE6JKWwMZIQRCrdIAVYb5fDmML5CW00OWtmJxhtwPvMZdQ4l5cC22JjyuIp15N+yo+Yqkbpst1NMZepJ4ttzgSE6MqwIk8oNYFQJEkRFQtIwOiy5UqxFLrmlMgiE0NYyG8ZGzwZUfJrorznM6U/KCWL8+DsguuW4idLV06Bum5RSuDTU3YmIWUBdeckECnT1poUBMEvR+sl/7ScnAu4Q8aFFviTZ2z5L+V+kwFPyPy0TXrKdRUmRUbJuHSIPf2eIjGfyo6lFKVfSUh8zEuW56lMWZJzKKLJFNS8809lyD//npyW0l9VinFzXCxj4ucbltJkRV3XGFMzjNPSv7QAOMjkvPQ35UhMkSTKfXiyLz5l0lIsVDAhMiFGYhLLz0nLhkwQUqDWAq0zUsSlA6wIiQxF9KZynY15GkpN5KyLCM8/P76Ui+1PKRYrpuVpY/TTBafQ/YxWNLVaMrNP4qpYdqVcyog1pVyVQLB+eVhLLlCInx8vpT6kqRpyGpct2M8XXizvDalgt10R3JkYBE+dbSIJhFSk6FFCcbXfs90VgmupNij3WUsKGGNxGx6PZw7HD2VwpETJ3+citbN4EoKiVJIIgZQZaRxT+xYlQgFSpEAk0k8fS09YBh8y58tI3TQMF8un+4/c3x/JyYLcFDCMsIR0QcmO1XrNbI+M45lNd4uP9yXXrBW6MmRVDrzGlOfLukBTJbxgsTQbfJxRUmNMsahN0wUjmzKo1QqlFdZF6mbHML9DCIkxNdN8wfuJkMeierPAqzVkgXMOLSzVZkvwCesGoPSBGSVB5iKQRMS5Ge89Rm8QovTcVaZi9LFkeUXG+4FL/0jbvizZvRTKmUp0bDcrhBK4kKhMhxKKyY4EFyBrhqmAFobxQIxleBZD5NIfkbIjJcXp8sA0n2iqLVVtqKoWHx7xLjHJUiqdsypF4EseyqcBlRI5eNSkMFKjjGaaRrb6llonrBqwoUQBgt8s5c6WFFqUSeTsiDmRVI2oEufzqeSihCCGuFBgFUpotBCEecLOPXYeKHCrjmkYScGRdUagyFmRfEVXK6zz2NmVzXr0DMOECzPWW1arHVLUzHbE2pEUAxnHet0iVYVA0LVdieRITb3a064k53FcBHbZ8h3HCzY5OqNIoVSPWDuVEGWSVKZjt7tGZou0BoFB65I7H8MJlwcyYKNkGHqCT6QAzmeiSKASKWkkCpUEPnh8CKVLMyjmFBj7gI+Wydry2sJRGbDzidkFQkxlqz6PCN1SVStq00LMJCTTXDpjm6SY7AzBsO5qmqbCh/8fZK5SgpgkTWuoTBFYIThG9wGZFY1eU+k1do7sdmvapuKffvOeFy8rjPFUuiqe8Fmx2WwRdChpUCoT4iPtqsWoG6R4jvVHhsFS6ytSTAyXwM3NlrYyzLVAadjtbtFyx+n4ibZtaJoOctkapBiIQhM9rNYdkj2X/h47z5zSGaXWNK1kdhkxsQS+Jy7jGaUF223FL7/9ln5weD8xTeWxdqGivzik9AjpUVIwTZ5xvpBKhh3vZ2LQRJ9Ln46o2K5XPHzUJNcgcuB0Gpjdiba9WfpjLNYqjHqFUJGYL/QDfP76c859i5SKySqkSggU4xixjCjpuL66IvhCVUFFJn9Pa75EmRYRa8iC9XrDub8jqXHpoNHcfzrw9Zdf4X3N+aI4HxPf/PIzqs5j7YXR9QQ/sdpkdrsdddMS8oCkwfozkKgaz+6qxf4B5vlSaHsKmkaSY0XKK2Ampwf6k0eqitps0CYhdM8cHrnaXTGNmjQ0bHZ7Hh8unI8TwUfqRnFzs0PlFa3a0poOmwIQmaaZoX8gBvj7//q/5zf/GBDqgGkcdaMgbfjznw/s9pr1qsb7Fd16RlUNSm0RouE8zpiq2BuQGqU1urJ47yFnjK4RAny0jHYoSGWtSDnx7tMdkhWkFnfJXD1b0VQNKT7Snw+kMPHLb16TQ+YyHJn6iare0TUvcP6MCyNZnHFuR9u8LHY14RmHe7wXtO2aGB3TPNDUa7RU1M2GUNWlmPCprFB4nB948/Z7rl913H28JznJqntGpRqGccQFA7EAFdbdCx4P3xV8tlZsumu+/upXHB967OQR1Bip2W1ekJIuNp/kSbGBbFBKYbSkMYrz+Z7tasNmtWa7f8Xt81fMPx7Z7TtqG5mmgMdyf3bloJgjw8Gy2lQQWoxpMRvDuw/flZJVbchCcjw/0rqZ9baiXVXlcF8QFMt0uUzv58kSfMn/KC1pm5rZztgxoJWhqQw5zqSlmDCkjEsWaQIIA7kMSuYs6eee6CNCtJAV1g7IVNPWK1JO9HPPHCzNpiESIHl0UgiV8W5GSairhhAcpl0zDJLLmLkMMz4VYbqc5xEykUKxn4UQiN6ijaTW3fIaNLRSEmPJmD0BLnJKDHNitBXBCdpOFYtk+UnkKIq1ThisDQVgkSFHVywZISJIaC0Yp6FsiJQqIelcipaVKkQ7ayPDIJCyXTKC5TBss6NpO4yBEAUpT0sWRv60vcgx8vL2BZOz9KMtJcriSVQW4EHKidkdcc6TngI5okATyKWqICQHaJQo5NVyqH7quRLltSBD2YylJ9EkFwueBGLJnFFIjMinglpV7q8Am6Cra7pOYOqM92eyLNCLZQVGzhmtBVIVSp/WLfinTI4oojo46kqzWW9YdS13d7YI6HKj5dIUkE8SnlgaDxEsQiUVkamEIIVA12wwRhJSeILz/yS+pBR4G9htWqo6gwx4X2SVWLJ7KZcOtUobmqZCCMf5ciGlPVI8oU8oFQkx0hlF2yjaSvJ410MuMI7/hEFSrqsspdXT5CiQjUVALpCQnJcOslgIbSIXi+IT/a8sw55yWkUk1XVdhgHpZ/Fd/l0APMYoXr/c8/B4XDajCZEjSgBS4Gykayp+8YuvqOuaeZp/kuoxpkL+jL7UvyD47rvvme09CIeSCikWCIrQy+fMQv1UZZCrdKbqHJfd/4acBeexbJzXa8Px/JaYfCGd7q4ZxwumgRAzj4/3HE8PfPuLr4BSuqt0wFSRnDzjdGGaj8zzkRCuESphQ0/MmpwTH+7f0lR7oligHFrj0ozUG7TsSEik8VzvbpGiwrmJ5BS7F2vOhwNt1xJFwKeBaR6omw1V3ZLwXPoLiRFtFN5HLpeBunrBbie4XCzHyxFt1oTQ4P2REO8JaeD5zZfUjUIkTfICOwukmhj7MyEU/P7rV8+w9z3zOCGI1A1IFbm6egU4rJuYRs/19ivqCpKcyTIg8oqKQEAxpIHRnvBjxE0Ja4+knDidNEbuqHQpHn6cPjFM98Tg6W4KdXZ2J95/eMNu84rLcECKTNPUaBkIzkN0KO1Rjcb6iTmsoBZUNXhOuOmGWu/ZbzuCOjOMj9wfDN4P+GDZdA3XV4L4sQCm5ui4zA8Mk0KZPUZLshhRdAiK/Tr4gJKW4+kNOUU2q47tes2H9x8JWSFl6QeEhAxrqioy2Hsu/YGUPJeLpKolUsN6teL25iV3H09YN5DzTFXXxCTQVYFuKVW69P70L9/x2YtvWa82IKCpIy4UCFIKghwr+v6CMbviwHCOOMUC2EkaYxo21ys+fTiQRTn7Ky1xdiaHwBDu8JcTMdXUFcTkOY89ShqurnbEKHGuojUaIwW9npDNDqkVIRSgz2WO1OuK4ByTh6YybJ7X/Pj9PQiBaSoimfef3rC9esVW1zRCoKQhpeIkSDkiVYv3lv1mw6prUAou5+O/qmv+TYgrISTjdGLy9yQilboiJ42zM1pqVLUj5iPffX/m6mrFzc2ev/sv/47f//YN0jyg5ANCVCS3Z7ff0rYN6/WW7WbP3d0PuHlAtdB0Hdvdc+r6gWkM+FA86J/u/4ygY71pMbrCjRnHIxlfij4HRwjQNWte3nzNh/cHfv+7d/zVX/+K6/1Lgof7h/d8+vCRL7/8Cq1XrNpX2Knlzdvv+PLrF7x994YQRpw3aLXG+4rZXZjmE9PY0zYtYJnsCed63KTZblqOpx5kRleJ06lnHiD4gJSSVdvy3R+/J2No6g11U3G6nFBaME4nTKW5unrOPGRun68Z555p7olp4vrmL0vPU9asuj3OHbjef4G3ZdqsdOD9+/f8+td/y/H8jsfjW479W67277h+/pqNe8kwTJz7gfMpgp5BWoSAtl3zm3/6B6QUNG3DL59/iZ801gv2+6/5/Lrm7fvfEmrJar0hhYbhWKAmb9/9gc1WIIcz/+t/eMduX7Dcfe8ZzgMpQAgnNruK7abl+vmG3/3ud9T1FhkNOhm27Z7KOD78eCQEiaBhv/oLfv/2Deuu4Wq3ZtWugJ6b56/JsWMeA6fLnxHc4sMBKRtWXce7NxfatmG1/hwfPPefjlR1QFXwePIcD4LOaJpVRZg12nikdkzDAKxxYmS0F4bekvNArTbLlkSy3+95PM1s1t8QwsT5csc09bx4/jVt9YwcFfM80qwcb968Q1eZb//yC/brGxR7PIGubaiblhcvfsEf//Q7prmnrmpun/0dbXOLnSTnxwHnZrrtxPWzZ8zzzDD2WHfm7/7273m8TxwfB8bxQuQEUrHbXXHuD1z6M563/PhuwvmKVd1gpxk3WWLuC+GwWyGM5nT+kZRmEgNKap7f/oJ//o9/IOeBqpK0zZr5smJcTdRtxWrzmk235p/+6X+gSl8hhUFXiZubFh4KCWqaPG4auD/+iabecnOzI4mK6CXtvqOqG5wd8WFgd7VmbdaMjyNZQrfd8ePDQ+m5CTNKRl6/fo38XqOzpaoVdVURyiqUKEpORCJxLnA+BrTKyCozWFs2U8IAEiUlTa3ox4FkPFXVcH19y/3jR2oFWigSgtlmTL2makZyLlu90Vl++eprchLYeSCYkUBmDA6hIlJkUlIQBO1qTc6ZycHh0TPPiZgasij2F1MVUZWiIMZMdLFQ3FRGLkXHYYyE1BexlcpxWumyNZJGL/8PlakRaLKqqVd1qW4AmrYcamWG//l/+QE7GaTe0q4dZEtlNNt1x7NnO6p6RX85IkjEYEt2KCa8d9QGmnpD0zQ4N0KOSDRSSISSNPWaplZklu08GmMMKRYroljWCut9RRo8asxAuV1xkCWEytRGUbWKGMtzIHTZZLEAIKTJKBXwRJDdUqwckbJY/kKKKCUwpiImic+OmEqwq5zhIyhBSBMxSaQsfUQsFsuY0kJbS6hKkmVisp5MhTKioNEX+AZZkolYV4p8pzkjiGRCsZ1JBYHSk0SBUIzjXERMerJVLpwJkXF+4HAa6cdikZQCkiwUvxAzOgtWdY1WAuct0+SoVitSKNdHClVsiEKUDUryTLZMiX18Qpk/baUCQjrqVrLdr7k/zlRVC6KA08kKjSS6C3HW5HaFqTaEMSDisvmTRXwUS2fZPsWYsT6AWfJpZGJWSKEhBlLIBA9TXwi2UpXiYiHTT3bXulbsti1a6UIUZOkxk0vpcgaI5OTw8wc2TYVWpQsrskBfnKOuDDe3N3z77bfY2eJD4AmNnnPm3F8wykCWjMPAj+9+x+uvNPF9YhwdpmnKVjHzU+YupbA8d4WUXNcVsXrB6fyBum7ZbJ9xs/+cafo/kEJRVw2bzRbEzB/+9I9sutd89uUrfr39lspceLx/i1AzWleo+SWvX/+aHz/8FmMi++0VP7z9Z7IISJ1xIXA+n7m9fUFVdVg7EtNEtcp8+PiR0T+QUukM++zlNyjR8uHde1LIXO+/Lr1BP/6p5Ls7SbWeuT//gVW7pR8ODMMRbz23r7Z4HxBZ03ZrsrBAR2UMLlkeDn8ETNn02JnJTohqTaMiOkY2qw23X3zNIf2GD4+aVXtFW68JF7jeNMzhHhdO9ENmt72l6yref/jE48MDdrJUzRveDZ9z+2zDppIMnx549dkv+Xe//oJ3n97xu3/5PduvVvzp3YHr3Su27RqtWmyv2F5dcXd/4PFwROD57PUVdrrnNNyTKMW3+yvNNN0TfUDwnFefdfz44xtAYkzDbHvifOH2xV8gK8XgPzG7hsfzJ5yVVE3D7csbxnigaRwiK2RasVnVXO4jz3bPiHnC+xEjOnI+c3h8oKo0V1eG1jzjfD5zOPWkONN1NSHUeJsIdsDN8/K5aLj/+Mjsym1WLzuGY8DaC3VMNM0tVA+cB4fMFat2x2k4cp6OCBmpqoq6qYCGaewZckAKwWZ9w/X1c4QS9NMJZ8vz29YS5yaUgi+/fMmHuw+MU0SbiKkTK3GLzT2bbkVTrTg8WoRuaJuEUXKpGhkRzlDVmkRg9j3T0aPqM/vNa1bdFXWlGC4f+er1N7SixrmJav89d8cLdaxL3rlSfPXZDf/XP/w/zOcPXO12vPqLv6BuJ6I9cf3slquba2KM7LqBqtvQNC1KCB7efuD09kjO0LY1KWi26xZTSfqxX+id/ykc5//759+EuDLGIFT54jNqxeHxxLdf/Vc83D1g5yMXvmeei1j4dG8Yx2v+6q+uub7pOB4vCCloW8lDf0dlR6p2zeFy5Ps//56bZ684XUYSZaIwzSP7/YaXz6/5+OmO7777E2234nweqarIZrPj6uoZh+Mdu801IXju739kHGZSglcvO/7933/Df/vfdHy8/wNdo3BupqlX7L/YIVUJg6Zc+ksu/Yl3Hw603YbN6it2mz0//PCOtoPtbk9lrnGzQ5vMPCWc88SUePH6FlINakLKSNUkzmeHcz3r9QplMsPwnuPlzG73CpQn5gJ4EDTIunz5DX0oH5D3v8GYstbNUfP27Vu+/MU1l15zf/9ImG/JypDFBR9H3DyRCHz//RtevHzBZy+v+OO//DPWwae7H0F4hEioOrK7AalaYobZnrG2wlQ1bSdKjuAysF4ZopsZx4DzDdOg+eLrLzk8WMbpEWFGtlff8Ln9C4xxVLVA7tb8eP8bfv2rv2MeLe/eveXh8R1zCHhZMfo1Wuyg2vD85bccjx9wbqBWL9BoTFAM7oHJfSJUmV9+8yVaquKzT4kXL26Y5iOH45HzecL5C69fvWCa1mQ8VSVZrTUvXj9HsCVFzfV+5sPHH5jcBW0yUkdO8x+4/exX3H2a0Mmxrg3b3TUf3z+QhUUbwarteHgYkNLSrA1rU+HikfWuwk093ieiq7m7e8tXX2m8P5JSplkbHo4/lOyZybRZcwj3vLzdINUMOmJnxf3991xddTyTG6SoUKzpmhVNpYCZhCvB3VQzzR+RyrLbr/i//+GP/OqXf81259G1JIrSRZWjptY3qO2OSz9jzAYpJ2KaOFze06pAjobd9gVCVzyeHnj58hqjfsHh8cT5cmCYHlDVTNuUaaBSms1V4jz+iYe7AyllPv/slttnv+Tx04XJnpFeofQLrp4ZHk5vUFqx37/kj29+y247wzwCESU7hslxPl+olKGpWpQMyJx4fr1jtgMfH/8jm+sSuE8BCJK61niv0cysOsl2Z/h012Oqavk0SmRRSEr3dxeur29YrzrOh5lapCLaakOtGt6/e0v7bEs/R4z37HLDTbdlnEZSKnY7547s9y+Y5jJhJMGmW/Ph8JFsC8q6aWtEFlRC4uKAj56YFW2zI6Ua50QpL9YZ3c4oWEpNE/NQrDDTPGGdK9sPUxfEsFQo06DbpQdILZY5REG+x9JPFnzEuolZDARfiHJxyR+Vw6tAaonSCmMUzWbFqtJUlWbd1rSrNUaXbqPsoTYV3/wCgo+EEIk+cpn6knGhZh51IbeJjHeJuFRLOGs5nv5M21Q4WwRTDB6x0PeUylSVxLpAfwm4WdKYrliGcoUkI0RAyNLLlhLkGMuWJy7brVi66RqjqLXAzQ6RW4wyS1YKclYI6UhMHE+eGHbL/V2sbDIT50TdJhCe4+MDzpqly2W51rJUFNR1oOkiWj1h0cVPB20pIcaJ1XpL3RThq2WFL3z9cohPpQdNifLajDEzTRahi001RYGi2JFtcqw3WyojeDiPSFUV0bdYOgtiPoCISNkilcKHARlzAY6ITAzF4llVDVJRnr9sf7ZdqkXwJIGPA9YNaGMIvmz3Ui6AGCEkEk1Ilu2uRevA+XThdDEI2SClKk66HAtePSeMMVSN5nyayyZRiIUOWOx0MTqqlaJtNcoobMg/5QdTLsh0KTRg0VJQG8nxeMbOJftUttSZsID3S/5OI7Xi8dFhrSblkrkQSpHsxOdffMlnr16WLV95oSKQi+gryPW6bjg+Hvju+z/SbUoxcwiSlNQiQtMC5ijXUIry3NZVjdkfiS8+EOa62NK9Yhw89/Ed2pTfc+kvfPp0x27zgtcv/hpB2cbdfToQwz2nY2DVXZUD71XHx7vvePnyFmdnLucjRhsa8xwjJaINrJsGZzVzuNC2HUo1WHdh026QxhKDxYbAD+8sN7trrBtwVhH8xOBG0IHRHrCxoqu/4DhdaLpVyUcOZ4TSHA8nvK0wlWCzn8gZQlCsVh11nXjz/h/Bv2K3e0691yAc93c/8PnfvKZ/bJhT4Jx/S3IWd+4J88DUrDFyTwwfsc6XHj9V8/zZK0b3wGwvbHctn/3Vl/zv/+f/xE19Q+23VLphe6uYVo6PwbJ99Q3/3ctf8c//8j+ykRIRPdYOeBzeaT7eOdpmz1dfvmT2Pf3lyNDPaFWG9n/1NxsePn0iZYHUmjne8/HeofSyiReJxnes13sq6RntyDA6tts9/fCBYCzTDA+//cjn+68YvGS73rHaVfj8lvNwYOrLe8RUmWE4s1k1GL2n0h2VyczuSN0GTNUhxY71VvHh/QP7/Q6RFePgGKcR6y/UzUyzKp/9643i3fn3nC49MUlerDPr1Q4bL0zWc3g40rWergq42eJsJEwaI6/Z7rdM9sxgzxhjOJ/uuYgTbbOirju+/5c/8uL2hpvna5pOcxkfsXHGJs/xOOKDZ7d9TrsqPWEuWbwcidEzHh1tK6kaQxKatl1jw0TVGDbXa9r6iv58pJ+ODPYBIXdc718zDgNjmJZaiJZXz255dr3i8PHI/Y9HaivZa8Xnf/vvuXq2Q1eRKRj+i7/+S3RlkEbjfcJOimlOXM6PBD9T14rPP3uO0oZ5mnn4+MiL58+5O9wtG/dSAP+v/fk3Ia5AsOrWxWITNet1xlSe7a5mnhti6tltXqN08TsrqXm4f8T7WDz6umA1E2MhxKjrIiRMKYXb7rYIoO8DWcx4H/A7RYieujFMo+Pm5iXWzoQQGIYL43jCzZosPEpH2q7h8aHncDigVcNmXcozh/HAbC0pQjACe/JM00yMkdnOdG1Dfz7RNDNSepSGaZwwRuHspaB8V1tiiIzzmZhcsYfhOZ0LhluIxOwi1g/E5DldPpUpnTSsdzckWXMZL8TQE1Kk0hvG8USMMyS4uX5BtJJhOhBCQAvD2u04HhQh2RI+DBPTGJGq2LVC6kl5pq5/wTwPuHAm8sB28y3WzqV3gXIoW28qlOoQbFg1z7B25vAwkUOLqRpWzzTffPk3/Pj2zzhvCTExDQohNrjwkYxjsyme21W7J8aJ4CxST6yaHccHR/AOpSS7qxX27kJKdWnNDj+yWa+LX9g57OS5XEYQiXkUIGfWGwUknt1k1u018xj59OkDUtWQO9q2wlQNul5hlGe72zOOA8M4lGCtgPP5RM6CtkvEfKGtG5COmCwuzrz7+AHnLFXSMLbEINGmAyJKeZT2bHdbTsMd6Ipa1YyjQ+Ah1mhdsdm13LiXaKXJTEiVy5t7HFitim1OidL8fjg9kOmX3qC2WEaMxugKKRUhXBjnMhmWZsbUkmHwaHNX/PJqi9YVoc0YA3UjyVITMcx9KYPVRmKEJCbP1fUzxmFiHAbs3KOUYbt7RpIZbwestZBLf1XX7iDV5PTI61df430ghNKfcelPCBlBRFIqPuxVsybL0nNWVQ0xBwKBcz9gdKZpz5gK+v6OypRix0bvOYb3KFXC4sEGplPP9rMd85SwAVTdUlMOxd6NTGdHCl8y9wNUgXXX8vxW8/7dhbqWiwWpTOelFFwuM+Pg2G8bjNZotQMkRJhnS6UNXdth3RlnZ1xWrOoWmRQhzkgZuN7skcIQvEAKQ9tqhEwMfi4bIyVxGVwQrJsVlWhQEoReoVSD96BkwpgIMpFdYrYlixBDJISEEgqlDZUs9q2UynY7UEqt81TyXQV9vWysAFCIXIhzumoQMiJ1AVjkHAFX8inIkjcKkZgDPudC0fGawVvcFKgqWYibKBClR0soVcrgdca0BikytSmi7+Z2XbqX8lM2qGQQ28YWwEbO1LUkyWJJzLGUA8uckNIzzxPznEmpWr7EioAQohxk+/5CiuWAoliEYn7qnhIIoQFFTk9FSamInp+5kQgRlyHZfx7I0lIjcsJohVaZaXak3GwybWAAACAASURBVCy2wScshCSQiKlg62OClBrUT4GkXLY1CFKyJV+RSka0+Pye7geQBAhfgCe2lJcjakCX0mQyyXu6pkWLmhwXwSrkkh8rd750mWmalSaJSD9YoHRfGQ0/KQZA6WJtjFmWjZdazH7pKeelUKoM8JzNjGNGKk3Z3omfXmc+FDBK1RiUzPjHHm1WgFhE1bLhk2IR87lkWHLJMglZ7n/5ncWuGbNndBFMTfI//10xlxYhVpZUmWEYycjFyin+M4vh061nr5iD/qmjSyCx08T11Z6bZ9dsNmucdyhZfkaMBTQilk5FOw/c3X3khzdv+PrbDu/Dkn+UlPxPXl5XRcBKWQivdVPRriWmtfhPjqv9NbMrEJC6rumHmkzpG1t3HUYZQoxM87EUKKeEnS1tJ9HG4/yF+XBBqWtev/xL7u5+5P7jA6vuCi1XzO5EiBaBZN09px8fiDH+RHgV4gS5dNYFD6tWlfPTVQupws0eGwSYyOwHRDDsNq/Yble4MJazR73FrDQpjWjlqeua3eYab2GcHsiLdbSu9gSWvGmIaJ3YtCuGPjDNoYB9VELnG3L+xPk8IAfPzbOWjw/vkaKjaTvqVjG7nvnSY0xNUzf4kGmq5zRVQwie0SnWqx3jeODh/gEpa5QW3J8eCV5Q14X66H1Gas311TOELFUKOksejo/kvGRNL4n15gVSRmI+4nxPGie65oaqKrZPETN1NgyjZYg/YjNYD/jIavUcoQXzNPJ4vi9gGukYgyJOpmRnqUGVM6BLPb0deLV5Rc6ZGHtSFozjRNtsIBdL+qk/Y5qmZHITCC0LDEL0pKxJRASO4+mIi5aQIkIZ9tdXnI6O2VpElqzaNdvtloeHH4g+kWLpv4s6QF6Kg0OpxVBKMA5nvAusO0nTSU79PT5eqGpFEgEfZOntE1Brxbo1XD+7Zjr02Kn0561WW/b7ujgwyDinMSYTgmcOCZclr15c8+bNH3FRsN12rHcNH04XLs7R1AZdC1SUVLVhnjJt1fL1q8yz9gPyZuaqOyOlYqChqzdkmRnmiWgtu6sdyWaCjoXdQEOIIyKDmyx+9igqcjRI4UufK4LVqvtXVc2/CXElRIFBFNiBLh5RMZTthdJkOkS8xvsZqRxCZo6nA029Qiq5ePIB6RmmR7KCbrWnbbakNFN3e6IHOyVcGOjDwGQHck5kAj5Yttsdx2PCuolh6FFaMLtLgVfIUqYYo+P+/hMhBOb9iLUzMQZmG4gxMTuBnwVSW1J2QGS/23P/MDPPF04XYJmuxphxvsegkaKhv5yY54GULVqAiz2DPUMqGYGUAlUtUFozTCM+OKpqx+7ZFYfzhJsfIV1omk1B4cay5YrBk/IGgcG5GecGVt2aEByn0xmhPFlahCzipG3XGNUQU4OLRQw6PzG7AaViyRTIihwVMXqsO9MBldojU0fMsFnPjBeBkh2VWbPft+w2z3lsj4R0IPqJTIWd4QnP3NQd85jQShCjIgRB9DNddY23Fh8HhCxiuGlXVGZNyh7SSNsqxvGBEOaSYRkuaFMMQ5VJVI0m2EhOA3UlizdfWObZgOioG0VrKtabaw4PH2lXkpAE/ejJciRRBE2IAVTpT1mvW2Ki2Bmk4OHhSNVEkAamRAgaZcrkWKviaVaqK+KqfOcyDp6uaRApUzfFQinyK6SoMNqTUjl8WOtZrVcoocvkeu4ZxwvKOISQaKkha+xcDtHGJLIc6McZbTTeZazzzNZi3ETXXKNkTU7QrSQ+9qUQVkhENtRVQ1VVSCVIQF1runZLLbfIfI/zPZgNmAqfPDZYUoxMoyXVJ6QqdQn92NO1e47+Du8t3ifG+cj19S113ZGTZZ48UlzI0mFMWywIIhQoRl5sNMJR15LLPFKbLVo0kGpEzmy6FSIK/FjQ5zZ6rE0kBHW3J+cJhCSmQLAZcbkuQe2YaJuGq32zfH78nKDPgFSCeXJMo8O7hKkMRlWAxs4TPjjarqapKoySS5jWkqsVShmUAKkDq9WW09mSUWijC0zCRyQRVZlC/EuShCaJrpRjS4U03ZLNCUgVMICQmRgkNudygI6JnCJhCZhoaRbQQYE55FQOrmX7UzYnYilQlbJM7ZUoQWwlJUKVIuWn6auUGpIq1rVU6G+CgFKibFJixM4zdopoXT6blFI/Z2BE2eIoKWjqGqMlYMhZs1pLTFXujxSFfFhe7w7nJoQI7KQkZEv0ghgSKZRDa0o/VUCVQzkFGJFyhBwRMjJOE8EbcloKY4mIxT749PhjlDwVygrxM/ThiexWWBuGn4/uy/fVQgKsFnHlfKEXieXxFmgBBUGeCkAmBl1Ex2InKxuyUo8cky2dVEkWuJgQC/WxTJtTSlTVAsUIxZ731AFV3ICZECKbbb1seFJB7EtJFokniELKuYAxmgJjGaepUMRSySRJQRG8KVNVBqkCKQtiKgajJ9H7hI2QpgwiyoY2LVudn6/SE8wkkdFVRVMbcr4gEQv+uvyblMrnvlLl3RfC08/4GUKSeRK+pUB7tIIsm0UeFUG1sC8XGAkoBSGmJUv19ByX5zLkJ3iNIKJwUZahgMhLCDzz/Pktm+0GrVUB0iz5qZTKoIgMpjI83n/i490HTqcTq/U1dx+OP4nbJ2ErckG2C5FRSuOcK7UElS7F2TmyWV+jpkSiwJYkKwQSoyuaqi3iP3tinIt4w5Byom71glCPTOPAtvqcrr1i1Q207ZamMYzDXDrJgofU8Gy/4TLdY305fBuzAxkJQZROImFQyjAOZzabLUbXIBzjbMqwKA7EEGjMVIrFXQEWNe2aqtL42aBVT1M3VHqDypq+/4QVE1o2aLGh6iTeWVLSaG3Qqw1TH3GubErFWGPUiiAlIWWIkWDnUjys80+vsWE44XzJEuYsOR1ntuvXdG2LD2Wrv+r2jP2R42ki5AQ6YZ2j1nuMqVFS4V0ipUDTdFg3lI7JLPBhIgZFTgLvZvrzFc4Fylgj4NPMZD3KKDIRkUEbg3WZ2V4KNCdX2DBiqi2d2SG1YVIP+DiTpStVHNRooQnUtOuaGHrGaSaikFTM9kRmRuoaIQTj5MjZg4jkMNPWW3wqOdMsI1W7AjXjXPkMUTLw8PiAMR1tVyNVTdtc8eA/InIZjtWmojI15ERlWqSpQYbFvusK3CJC9BmtKyR+GeYkNpsVx+M9p3OPNqX/T+QNlY6oVN6d66qikoqgYI6BeXTc3KzR0hJTKIOjp8+2nPEhEUUg5sClv6CrPeL/pe49eyS71iy9Z7vjw6TPciwWeclmXzcSRgYCNIAwgP6rfoiA0Qwk9LSZvuTlZZEskz4z7LHb6cOOqjuf+nN3AQSBAlkZUXHOib3ed61nyZSV7oaOyUdMnp6Efd8jZMnoHSd1wcX5gibckU0zskzRy0gXBE3VsNu0uEESpaSpG9Z3jrLKyHONlvD4lOzHMSRoVXXUkOmMkE/4IFFaM5/P/kVd869GXIUwIZUgKzPafWr0HnpPlklOTo64/rDn/n5LlluKCiQjs2bJrr1NVpWyRmmDC5a23xJIwuzy/IJuXFEVM2Z5xoerPUL1fLzb4iaQ5MznM6xt8T5dPD7A69evadvvUaIkz2q6bo1UEw+PT+zae7b7BcSMLMtwztKPPfvdnqY8oppFnJsQaJ5dviSEnl8/vOfu/gOZvuLFi1fYqacgx3nH6umevm+p6xP2/Y5puyVrJvIqZ7fdMAx7QrRcLJ6T5zOC8NjdhqfNClEp3l1/INcjRzNNXs7otx2zhUYNgseHLevNiuCWBK8pi5KT4xnT4NAmMA07JteSmQIXdpg8p8jmVO6Y3W7FdvfA0fKUo/klIhiuPnxgtlim0t1R4mLHeus5W17SdyN3Nyu++fZrnj1rEuVK5RjO+ekv7/BYfPBYa1kuZ2w2K4rckGU1IpQ83D8xPw5IoxBBMG4zCtFw+cWebuy4e9iy302cnl4gRUF0mpCdc9wo3r//M1IZqqZgHHfo4pTF0iRM+NBjGHn3c8vQZmS5RBc9t/eO01ONZ48PguAu2a3fE+M13ief8OhuKFRNNcvYbwUPdz11ecRiVmG9QmrPFFp2o0SJAoLCucg4bSlqhXASrSsW8zkPD5bl4gWLo0Tmuxl3nD67oNun/E/dZIigEZQoIUnlyBPTGJjNkkhzNnUGKS1ZzC8IPnUMCXK2myfG3lDWmnLu6cY7jDvh6bFnu+1YHgsiE96dMXnHZFfUTcH13SNKLBExI8ac05MzpNCM04i1HiUzto+CZ+cXaKXppz1l9YaH9VuqwqBkmtI+3N9R1LCcX1Lkp4RdwfXdFbvtE3ZyyaJkdhTmb6jLHD898fi4Yr29pShrjMkRxpPlis02cH72irqeMZstseMTddlQ5qndfrfekmvJs5NXuNHSih318RHvVik/WZRzimZGFiqklOjMYIoCNWSUTcT7CWMkVSVp6oLRDak1/tNhSIJ1I30fGAbBYqGYnAMNMUuT9tLkxOCoiwyrJRHJaDzkhkyVib7oFR9unzi5WKKyiinkECNSZnihEMIgVUahFJ0Ln4EIcppwWERMeZmYmnwPIX2NEAExQu8947hLgX0Sel3qtFVBCGIUyKIE4REhHPpdBdZORD/hsWnWINIW4tMUu8wLymp22GylQ6gyiijTXkbENNAa7XCw/jkmOzJ1PXayhKBwbiKGgFKasppTVYI8r8mzw0AjVhRFjlI6ZZwyQwyRslwethASZI+dONANE+6+GzqyHObzgLOCyQ7JyugDAofWqdx2GMckRBQo6VEiHfSNiWRlpO0npKoSDfEgpnwEJUISx8IQQ34QTZAAFuBD6onKMkWmoW+nJErlJ8uYIGBxrk80ODKcM4dD/qGYGQFRHuAKnhBUytmRts3OBSJpG+SdY7GcUVaKYZhwvscLiwj+8JyQeAdZHkDuEsQjGj6R7iWJtKg+UfU0id41OpwDXWSEyCErZonBM59XQMd2n/KhyaLoD+8xDdbIJrQuMSpDyoAPHmn0Z/EVQ0BLSQiWQIHQGUJkaUjFZ/QE3lvyPHVmIaBtJ6Sq00DgkCUTfNo8Bax3MMIw5amOJX6CUPx126eUJM8N3oEPh1JU0tY0xonoE6W0yCJVpVO/mNAEAsENLOcnvHrxAqkE4zQihDhkM/m8nfTeIUTGz7/8zPX1DXme08wy3v7YYseUOUu/JDEmoAUi9ZhFbBKTMWIHQV0mAXK8KEGB81uqYo4UCm9hGgIxtNTzAiUN3qeBGvIR7xRFM6MsS6qZIfiBx9UHqrrgu7/5A7c3V/z49heOTgV5leEmgxMjZZ0zuQ4f9uwHEGrGsN9idEVRNuy2HZMVrDeWIktEvOAj0p2Bi9hhxW33gfnsmCybUVYleaGY3AY7GYxpCF6x3wZOlhfk+Q1EgXUDXTfx4sULnp6eKIuck+MlD+ueaEeU6IlBYYc5rdoz6kB9PMMEw7DZcXb8FVGkfPfQp+/3pj7GWke7H5jGyMuXX1AUku1mTd/t2W5u6cc9ppQUxqCNpu9KjmbPEfRY22O15+bmGm3A2h3T1JHlc5q65t27GwSK5WzJ/d0tbbfl9PQZx0cLAo+8f39NUb5BKoUyUDc1tXrJZv+QiHthQpqBX3/5novjL5lXBUd1xq7b0cUOYkaVzXjx7ITNQ8fz52cMrWK73rJolli3SpnoEBEq4/h0wZ/+/E9YaymKmtPTF3RDR9+7lC0VgiZfEoJCUJGpjLzwfPzwI29e/5EiN0gC465gVlU0ZYa1lr4bsFaQFwtOFi/Is5J+vMdZSW83OD/gg2cYRpQqWC7mybEgNUEY5nPHZB/Ic3h2+oxhP0fpDf2+xfaOwkduf33PfD4nywzb/Z6j04q7ux0EhSCjyDxVnuNcTVkKVC75eP0jmZ5TVhUhBG6u1ow2UDSGECbWqz0fb+44Pg6cnlTUxxnHl3NC9xvmZZ7AXUNge79nvqhZ3RmMuqCsZ8ybGfvNEy/OFlS1TsNIX2N0pJnNaOqK5eyIp6dHnrY9eTZLw+ay/Bd1zb8KcUWMzPKvseGJ0d6gpOH2dk+RKyYbWK1Hiqzh9esLpgnafcfj+payvEcqjTYFWVbz9ZtX/Pou4/bxmmHYIKj5229ecn39kdXqETsNtDs4PV+wfPGC9eaRm5sPhI0iMyv6rqNpSt589QX/8F9/YLKOIrfAlra/Z7J7+mlFEDm6n9jsHjk5vgQE0gjOXmTUpWe3tcmeEzvevv+/ubl+4Oi0YHmywAXJEB8hVsmGZiR63+Faz/nFMWWreFp5Hm8sykR0IVEx0m72XN8+sixr3GDIdM7JZcv7q//M/b3lN2++5NXrV3T9HpWPBNIXQpEtqfJLbjZXWDeQZYr9TjKfFUgV2e4HtvstX7w6pakMdpzw7gkpQeUt/TbQto+UZUFZCj582NBURyCT5WXfO7K4oSpKMjPn+Fxz9fE9r7/8GusGxnHE+zteP/+W67s1RVagRc73//yBN98uqYuXmEwjzY5qERGiJMsLpBh4vHvPyzct1VzQ3dXE8RVffXXO3f2PtE8TVTbj+cUzRvtEURwj5IQQnshIphtu7n7BhS3GwHF5wWxe8vHue6RxnJ6d0Y+Bh9UVLrZkpmReeaYBbtsNQiYB44Jlv51odwGtS5bLBVfX/8wwLcnzGhsd2+2a84s3lNkxQ7dls71idA9cvnhDac6wo+Dm+gmJ4tnxK4SMODfw9ZvfAIpmVhFjpN2OtMN7qnpO344pc5VXfPHqj9zfvyPLI3meM2vmPKx+xZivMLpIbesy8NWXX7FadbTdhs6taZqM3eaJKCLzRcHJ8XM2+w+IYsRoQaEMQx/YDyuOFjlllePdjIe7LVnZ4fyIdRPKBMIU0OuOaeoZek8IH0G0aHWMiBofV6weO17PL3BOspu2IDuuP0zUM0NWeHzYM5+f43yPVANFDSqrKMtj2m5LVhryouDdrz9zcfaaplqy33d8//YHLs8rdJ6xWW/xbs/y+ITH+56H2x1llpObAhf2NMUFXuZE5dl3D7Brid5TlnNUM/HT6v/iYvo/KM0SEIQ4Ml9kXN/2ZFnqXvqUrVBK8+v7B65v1jR1Ot6pvCDPCnKTIYDF2QG3HCRSRvIi0A0rhmHE2iQoN4+WttsCiXoVZI7UAnlAOoPEDiMQiUIilaRUmv3QHibxn2h0EYREqjSr94cYh86LtIWPSYR47/FT/IzrlkIi8AlZq1SiH5YVxmSHDZZM1DwFLoxEHxhsoN90COGQfLJ5SaRJB0YlxAHTXDCb5WhlUOqw/QgxYXNjIASPnRzTZJPgGQbatmcYemII6bUd6HpCfMKNa0yWURQFZZ2TFQZjMnSWYfKKSM3F7ABE/7RFEOKwsQlEMdEPK8xZhgs+laKODjslAhfeIXDsVpahjUD2Gd9u8iqhjkUg04HROiQ1yRH2qS4WdKYIfsRNNlkhYyQ4nwiJKm0kIZKZDGMkdpKHTJc5fOWlXFZgpKoKhBJ0e4fzfEZ0I5IdMikMhx0jYx+JqIODL2HwhSCV58aIMhU+BCY3kPjcBxrgQcpkWtPu1gcUck6Mn+o+Pm3LkjCyfov3I3ZyaJnyiKkzLYlYaz3PLpeAZBwjziXbWwgRKZLlzoWIlpKmUng3cXe3Asq0uT8g8oXUxDge8NvpMxxHTyJUkOiPIRKlQ+kIDHh3oHTGir9S0D8x/NIzsq4LysLw66/3yUIZA8G7RMgUhhAnQpyIEbSPaCaCF8SgKYqa3//x29QV6T6RD0Wya4eI8w4pJLPlkv/0n/8TN9f3OBdomhKlJcMQUh5HJfrg51d4yKJZ5w6Wy5GIQMsly1nOw+0NUa7Jy4Ll8pSqSIfWye5ouzsKc8SrZ9/y/Z9+ou8nFkc1v//6tzw93rHb7GjbgbpZACM//fQjVZlzfLzgu+++4/rpZ8Zpnw7PU884RP6Hf/c/07Y3PD595OlpQ5mf8N1v/lfabsf13Qf6YU89gyg6hnHCTpEQ97SdpqlnzJszJrtGZ1uGoUeMJyDnrPcbtDAJ246kn9b86adf0CY9M6SQFFVNiIqmWVKWM6RYUOoNd/sVRh4htCL4LWXhOSm/OQwWB1bTA0WZAynDq6ViUVsuzl5j/Y77x2t++fUdQj5ns28haJp8SZWBFwUmqxnbyLSH3//Nb/l49wv3Dw84N1LWGu/WxLgghgHvLJ6AF3B+eka7H9itWy4vlrz49kuIhhAdgZLc5JTqhKIKCDGyWkVevDxL9RnSoHTJ0fmMJhf85vUX1CXc3vzAarvm17tIUZbMmxnWeebLiu3uiTBY5lVDflRw8+GRwY74ODBtNkxhxnxR4FyBs5LbmyuaueTh4Y4YYdYcYXTLq8vfMUy3bHcbnu40//E//p/8/MuveCHQWc7d3fcMW80XL55x3JQ452nblrI8InrLfvvE0/6OpyfHb779iqyY0XY9s4Xh6bEnr2Y4O/HweE+3DxyfKsCx23dsV/9EqV+l50FIwIrRwnp1xXq9PdigR374pz8zP83pu5Z+P+L7HP08ZdyruqRaFPT7gbqIzOczhIy0+x2FyRBCsN52DIPj8uwrrH/g6XbFsH7kw08zjKlolsd494gUiiIvuLm/JQhHXkaUGXl42vDF62eoLPLx40c+fvhAlR9zdrFESEs37ujHlizLODlLi4XgJe3+3wCK3YeJGAe0KImco0pPloGkwFkgjlycfcM0ebxtaRq4fPYdj0/3aFMwDJZpXNG2WzbbNYvFkiw3KOX45+//P3brFikDJoOsiLz98Yb5ogYRMaYCJE/rD4SQvuzevf8LT5tfmC9KouxwQSVbXW+YN6cIoRl7TV2XbDYrzs9fY7KCzfaOjzc/UhWnqFiS64Kzs0s2TxIZSzKTU+WSx/UV47BCbSaqskZngnomWW9vPnfnjL1h6C2mFviQI2TD0+OWjf+F2QyyAsadZNoXfPXlJVUx4/5mwMYNSsC+9QytZ9gHnp3lZIVFB5/yHsMDm80eoxfkxYzzkwVj3xFj6h6QKmWVikIlHGX3QNcH5rM5z06/YRj2iVRmMo6LZ+z2K/pWIopIbjJG0bHZPCFQaK0pi5wffvgHAo66yclzgdYjIjqsW+GCIAwDUgruH+4oqoYi15yeFcgwcvt2gVJfcH5yzF9++i8cLxvKKqD1yBAfuX38hfPTL+mHPZPdcrQIaDXxrDwnxlOsHbm9/oUsW1LX51RNSZFXtO0dWi1ZLM+o6wo3ebQq0SYyug2j25CLE7TRLI8qjC6pqpJXr17zl7/8hSxfU1Q5WTbjYf0DmpqmmnN5eUw/CoRv2A4dwzDQDzuWi4gfDdOQ+kEwDmUEXqYc4Ngni0BeSKwTWAdRCbb7lryUaK1TN9uywXvB0Ap6BrSKHC2bRDszO3ToiWLOOK6YJosQGUbnGFMiwjHTOCHVRDOPnOV/y2M7QuwYnSU4jXUF2JHJdljXUWqP1sds2y3jNKQuML0hk5b9/o4YFHmpqecaO0aiHwlhoB/WVNUlTSNwIdB2e4Y2w40bTGZQMmPX7wkElFrQ9TCNOy4vL9g/jhg2yOBocsPF0XMm26NihvWWxcKg4hcp79VvDsWvge/+5juutmse1o/sNytOqCDCbvuIHc/53bd/ZPsPM3wQTEPPaDvmxzk3d6lEVohwOLgmm2TwAVXA85fHTJ3h5/f3OD+Q5QopDO8fW6RIZblKgRYW9CHAfjjvybKgdTFNFIMnuIBwEQ72thDAuin1OYVkXVpLjYrxM5iBmDqVvD90EiHTBiEeDowkEaV1gRIGYRIwQcgIqJRXiemwHYTFxYmxG/nUg6WFQOvskNH5tMkSSBUPVLuDcBn/unVIgqRjv5NkJjsU4SZ7nNaGT0WrOlOpgJLqAHxIW68YbLIcRvDR41wqZ3Yu2UXa7QaiOmSzUiBGSYXOcvKiIMuzVPJeVOR5hsk02kikrJg3p+hLPm/vgnVYa7E+EuMIcc9QOuqZwboJ7yacSyARPw5opdBasdlYhnFAKVKO7JCjCSEkyiCSICRCF8hDxi0h1DVa65SbsI5hVEiVf876IMA6i8k1SoH1lmn0KJX6zVz4lEcTKB2ZbEdRpC1s9DFVj4S0iQrEw0BA0/cT0wgxSqL4ZIeTh94zS5aB1j5ZXnuPUvqQ6xUH8ZToWHkZ2G8t4+AARTJZx8+2R7DkeU1RZEy9TzUhQWCkOaSLIjZOlGVGVadB3mgt0QW0zgk4Pgk/5z11syTPATERhSYQEeGweRPgADdZqmXFclYT8TytPFJrIh5iglT46BJAgoiQgjwv8CH9npLycEsmC6nU8iDcC4g5NkiqpuLFszO0lnh/+Hv5RGQMAe88RmuI8P79e96/ew9AXhbozDD2mnY/EZFok7qI0rMgiWVxIARKFZktckzestndEYJnUV9Q1TOiCmzbRzJ5TIyRaeqxU8+Xz3/H2A2cn89ZbZ54ePwFG5coZkzjmrZbJ3t6M6FEyTRVeD9RVzNePj/l118GQvTMFiXWdUzDA5vHgW6dM8/P8aHl7uYjUUZMplEqsttvmD8znB1fcjxL2G07CCwruvGOmxvL49MdJi9xbmS3X1OVx9h+B1is79l1Lf0QOZ0dU9c1RFg9bJL7S3k2+2ueNu8o8hqiRsqMMiso8ozeXvF4t0EbEGKin/bkoUKQpWeBjIy95frdDfPlEedHX3FydMavHz4yq07J84o8M5SFwK8Ftl1yVGU0z2C9uuHpwy1CKspiRm4iL168QohAu9uz33aUhUTkgaIsOXpxwqyeU2YnjLbj/vEto91RFhUXJ89oco1A4iz4cMvVdcdidkxWFvRu5Mdf7wgDzJ4c86amjZ6H1QO6ceiiw6qJ3VpQH19w+/Seo3rGF2/e8OPH95T1jHqe6HnOaS4vX/DL+39ECU1Rl/RqxWb1hJIl89k5F2evMUpxf/eA97tD1+LAw/053diS24B2ktPZEaJpCbZjCBGdacqZ5OFhjc5BGUFZHtPMt2z2NxjW7QAAIABJREFU90CGdZKr63uULBhtR5Zpzp7N2a2vwC1pyktcPnB//4CXt2TmCKKgnQZWH38k+p7l8oSj5RnLWcnT4xVGVAxBYKTk+KRMVstO4sLI5BR963lx8TVjXDOMe4jwx3/3W3769Wc0l5ijnPlRyfsPHm8hzw0qz/BBM06JbTDZwHb9gDAVl8/mqbdVCXRW8W61wpgZdb3k5UuZSqfFSD1vIMJ63dGPCbQ2DgPOCYrs38DmKoZAP6zI85KsyFEyEgZ7CME7pIxkZYYLA6iDjz0UhOgY7erQYSEx1rPbJ/yv0oqyKFmvnhgnhzEKFRQxCo6Oz1N/jJbMZ4nRnxnDdMhOPT4+Mbk9231LkedkuiSE6vOHIYVBq5y8MAS7AQ8qGprshPX6nqyRBDcyjBOrtaVuCoyZ4a2gH7p0gJOebtglMaUyQvR0/ZYYDNFnGF2hzci+v0vt2KYi1zlBTAh1ODBMiuenX7M8mRGDTIh2XTKOA1rmFEVNpmCYtgxdoCgKjBIM04hWOdoodKaRWtCNezyaXKbWaTtN+BiRwpCbAikiRpWIvKA0Bus91lm821OYOW4KjPToSmAyjfNTCsd6iKFns92yOG7S9DdKLp+d4ZxHqilZI9xEnhfkeTpweQdFvsDZJUOX40LAcU+R5WRqiamngwj0BAImLxHSoKYcIRNMQMkC5xyTd1T1CVleI4RhHAJD/8RsltNUNYWpEUGy7+8Y3Z6jozkmRNphOhzqUo+T957NZk1eZIf2bpu+vOqCccwJThB8wFpBphfYQ86p70fGIWAriWVgnALWRZoqZ91uCPLgMReGsjhiHNP2QtLT9y0qCnRmsT554k2WE7zBGI0gTeG9LYhyRhR3SJ2GBtvdijyv8D5lYXSe8gfWTsRhRCiJ7dcEnUpmgxeMU4cxCpBIYVCyIHqHNBYXA1pH9CzDWjA6nRmstweM+0RggQ8H0IdrUcUGH6vPNq/V7g4hPE11ghCSbtxhigXO2SQ8pKKpSiZ7z3ZXIkmUr12/wdqR0fWE4Bl6gTSBXIG1YG06zG2He+zYItxIFjN0Nksev8M9p42jetEyPc5hAiNheZR/zkf8NcER+USHs87z+NQRfJZKaUWi6aEShS6pWkHwERdtOoceDlQxBpRSifrGp/xLOAzmD8YoERO5CJLoiDE9w6JKAkfGQw9TSpaEQzbsU4ZEir9Oxok+vXrvCSFtNQL+sL1KQir95+IQi0p2NilSyWnaJJGsWIfeoNSD5Q/lwfw1VwQgJSEIfIgopxKhUCm0O3SGJdcaWqYtlUSiZNrGSaU/2+gQyZIaoj+g89NB1tlkN/PBE3w64Hs30XeOcZT0raTNEthFG5k2c0qTZTVFlrrjpNIYlWAveaFBVEBDVQV8TGAj7y3eOZyPjIs5WeZQ2lPX4+GNxkOHVgSXMnZKJgS48wE/WYRMw6sQBMKZg40wFSpHn2HEoWRXBkSMqJiybtrkBBSQCnYjED/DJdJnqMxBSIXD1kukzyFtLJOYyLP09+1jPGS3IG230p8X5QFUIxWEiHWe+Lms+JDPixKpAm4csaPHOwgiIfMR4WBnBO8dVdkgpccFi3UWIYpPLRNA2qKmbjR/yAEfGuU+gSzEp+vrU1oq0RTjAQKRfh22P4cuNiMNWkmmBN7ECHGwWx5mGYdN1uEjw/n/7lpNrwoh0pnDaE1dlpR1yTgFirxKXWJ1jbVpOyWVQsZDp1hwiJhAB13X8pef3hIO96WUESUD07jD2fRepFR/TY2J1POWsispS1efj5THI/voEMJT1BlBhlROGjx5mSOkoKmXnJ+9ZNbU3N69pagaMl1idI4WJe1+QKApiwopXHqWyp6oNd57Hh82yRatNW6aCMHTVBVDN6GEpq7nFIVmmKAbBkyeMWtmGKPo+pq6qhEiY7/vkDSsVh/p3SO93dANLSabI2UqOZ/cHhETxGeYdsToUEpRNxXBG4Z+QopAWeXoTNBuxvR9FB0udAnCoCJCjkx+ZLebGKeOyUWUipTNDBc80U3pzBMC/bAmOkHZNBRCY1QFXh0+Q49ShhDThjzPTeqlUoHtbkdWVpR1OmiPtk15wyJgspy8lJycXLAfNhhdIKPGT4GJiXa7w7tUwtt3gXlj2Pc7JMkmu1iesG23+DgioyGGgG09xoxs2zvGqWKwA+t2TyZznAoEO9EODvaPDMOOnQjcrx7ZrVuyvKTSOUrk6VokokTKjXov0UVGHY7RukSKjPXqkVlTkavm8AxxWL/h6uo6tUbgiFPHrJIUpiFESYgjwzgwTBNFleFJpbzBC1Sp2bQblChTD1pI97WLMDMFy2qGG+6wnSE1Aiq0kBSZIeAI2iB1TpbB09WaEBVZXtE0DetVAkRJIShyjZYT4xAhFgTvsWMCK213t/S2S/Z8NDakfKKWhkxpRHTkWUEwiUA6Tj15VrPdpiqYGGHylhBG9sNISYMKGe3eYn1HnDSTc6kX0/iDtdygdY7JMjbrDcMQDjZIwzT9G9hcRaCfdqADZV4DGSI2TOMqNZ0bgY07ovJENeHcwHqbHpmBA/1vElQKpklAO5JlBYtFRWSLUAEpExHGO81Xb77h6uMVAkEzr/GxZVafsm837HZbttsOKQVt26W8gcqJHoqyYLB7UKkh3IiGeZNuOBs9s+aced6S64EpdIy+5/Zuy/NnL5GxoO1Gtrue+ihNku00JRKWljgXEXEieIUkheWk0jxd9UQiy/kR+aJhdDeURYYQiuAcr158RZ4bur6lFRukmdNt79F5RpnnqFLTdWvaXaDICjKdE8NIWR6jC5X6S0LHEPYIcqJIZKtxHMEKMpVTV8kGVeYzNmPH8fKCYbKsNyva9pa6OmYcOqYpHCw9DUpJnLWMdqRre4QokDriYkA4w/mzS25vb9AmoZ5jlGnq0mQMQ596V/I543SMFx3b7oFhWvH6zSWEmtTQ7PAxYLIM53uUKSnkjHFShLDD2xSOdW5ivniGVDD2lrbdst/f8fz5S+Z1gYiSsetZ797T25bL8oxCGaSOqcU8OASecXBstmuOjmYcHZ3TdhucbZkbT6YumKZUrrrfD8yaOaN9SgLUe6yFYcgwyuEYCTKQFQX9wx4X92Q6oyqPWM4ueFwNqMwihWVyKypzhPOplwYmpEpDgrJIdibrAt4VxHxGpECqkbLM2W4LqqphsiGVUIcWbcBNAedgv4N9+5ajU5XCwEEwjlvyWYGkwJgSpdLDPDAQQ8QYhTE5T6sRJQMog8ey3T1hMoELNn0uocOHkSg3DOPBv4ZksCv6rsd7Q2ZyXOyAJePQoWVOliULkQ0P2N0co9JW4n7zwDT0eJ8gHt4F8jpQlwt0USCtpes6rh/e46eItJrKNOT1EiESRjxKz7a9pbqA0KWNtYgaowu0VgdRJf5Kh4sSISOT9bx/v8ZkJm1nZIIFKJUEgpKf+oaSDc9HDrSzJJSCd58R6CQTWRLTn3M6EaNS3kYcjpkh+lQOjPksiAR8LnCNhydniHym4MVDxUAMiWb2GZyQJMwBXKFQWh9AKOmwl7ZJaaL/aQuWcmGfYAKBcEDUp8Pv4cAqQByAB9F7XAhI4dFKM0abRNvhZ6TNnkKp9BqElKlPSca0JTIKpTK0+ZRTSe/HWnegs6X+Lucc43jYMnnLaAND3xNDelUJca4oiiJterXCGE1RlORFTpZn6MygtUZqiVEKIdJASch06I4h/VxrLdpMLF3CcHtnEwluFEw2UpYgVUCKMVk8hf9sz/ReHYiunogiBolCEOJ/B10ICiEsJisJ0X7+//lUWJyuRpRS5Hn6qrbOp4P7If8ViPjDtVKVGm3SMMiHgFGfLsJ0DUQhyIwikxmCJBKiiAnPcaAXJhSMYxoibgx4J4gypKJl/nqdRR/JVE4Ie0Y3YX1AGnnoq0r3TwzyMDBw+CBwjlSijDtcm4f3GCMiBmKQeBLUQ+mD2AJAQvAoGdFSIKJI1kGR7g2ZIPxAIATIVbq2Q4gMY0AqmVD8AMKn0FkMZFJT5gU604zWMz+qmTUNUmrG0ZIZA0ImAerTgMHIgrZtub274f3HD8xmC6wd0/0iPNO0OWTyDtSig5FUqMPATIBzFi0l5eme4mRg3OZomWEqxW63ZxgH8ixh6aUQLBbnPH/2NSIGHh7uOD7OEKqkqU6pi4bV45qyzKjLitB41k+KINcole7++/s1RydpKOechxCpqyXb9UBZNNRHBQiBw2G7FUZGiiKjqgpm1TO0mWj3A9v1OxazkZ/f/7/46FBGkZdwvHjFOPVE2+N9T7vXLObHDNMahCfPG6Qo8daw321R2nF+fo6PI22fMO3GFHRjz/xoAXFico5p7Ni1HiEGggsYNIvlOW23x7sOHxUqCDq3w4gGG/d0o8PbiNEV+36PdT7du0ERGGnqgFCOthvphonji0vyPE/b8tVEu3NUVUVVL8gyuHj2gnCTip/9FNh3LboITN0ek+UIqelacDbSjXtyI8mykma+pJsGpjginCc4SSlLijowjFu6riVGwRQCxmVEq/EiEVHb/QMhWjbbDavHETsaZkcKr6r0DJkGpqGlLgr6IQ13dJUzr07QMmO33fH0cIvkhPqkQBqFiwaTCR7ur1ksTggiMjEgpEbGY/LcQxwZuo7V08g3vzum7Vu2O8fYO2ItGMYBLSLSpDLhyVuk0hS6YqZztKrAmAS+cB6jDHVesR+H9AwrapbLBQ8frwg+gXdc9LgYGPuJujAUuSFMHdHq9DyOkeACRa65f3rLMCoCBlPkXN3dptcUNCJa3N4CHpNFpmnCThOZgfVqT1GlEmQhNdZveNp21GNEU7HdtExxIJOGfproh5bGCCY3sNu3lIU4UCgnnA3M5zllmTPs7b+oa/5ViCulJHm1YNeOXN/dcLxccHF+maguViPNxM3DP1EXl4ctAoQJnC348su/Yb3a8f79FYvLmnDeUVWnlOUMFRVVccrGfiQET/AaKY6Zz47JvyyYRsk4Ch43PyBi6hnJcsHRUUm2f4UxhrIqkEry8LSlyE7Ydz8z9HtGMRBdyW9//zvef7zncddzrCpOFm/4cPuPlHXJ8XLJZFvyvGYaIlVtqBcnPDzCevMTp/NvaLILkCNGR8ZeEmJAZ4Fnz+c8PPZk4hTQlPqM47NzfvnV8+L8W5pmwf3De7JcsDxa0NuOzX5HpjV9J9is7gFPlhtOjr5A5XuCspSzI74+/y0//PktQeYI3eHFBjeNFKaEqNPk2AfOL2qGXYlWFXW15NnFMzabvycrCprFjHpWsP3hnn33MVkhRc3YacxMkWc1ioiWI7IRnMyf8c9/ekeW15yclKxWWxbLGS9evKSpG6yb+NN/e4fzE5NNJJ4QNN6vmXjg5LxiMf+OstRsN0/sdj2THQ5t5RN3d38PlBRFw/lFw8OtYL2+o6ojs4Xm6uqeyXa8eH7GSTGHILm7faB5c4x3Pbv2iafVFVprxmlDUx2zbF7xaG9xk8DkKZA8dIIn8YE//OF/5+1PH3n37i2jfeKo/h1B3SF0j1ICh2Tf36J1hskjkR7nFyh5jjERXEdrP3J6csTTUw7WozM4Pylpymd8vPmR6EZePrvk/OR/4uG+4377A6vtr2ze7vkP/+Hfc3P1RNtNCGUoZhqRD+hhhgyQZ44vv3jJZjPR+55923J7d83/+IffYccC6xyj71mP/4gdv6QPaTPXDRNK7akqjzYaozK0WHB9+5bRTiidURYFo3ti3EHXTgQfWcxrtJyx226RKqB0sk2dnrzk5vr2QKLM+OY3f8vf//0/Yt2esopcLo7ZbwL7LZydVJycLgl+YBwUVT4jz2ZkRkMwPG2vDqWbBSIbiU6yW7cUxYxcVTxsHihPnzGFNc55NHB0skRKyTCMrNctjkiMj5wMR8Qp0aSqMmc2z9lue7xPvXvOq8PWMlmzpATvAtN4mFYdKHcQ0cakf7TGmIwiK5DaHAY6aQqfBE/aytgAzjmiO8ANYkgh+5gm/Uqp1HFSaIxSBzuoSpNv/iqinEvCY7KH4tqYAAgxpM0IHMSNSiSxREeFKbjU+3XIRKXNR0Ag0cIksSVBqyxtlw65KKUlWsjDvekI3hM+lap+Pgan3JoUoA/0QG00GINEEZAJuhEBLZFap7yQhBAsU/fXaaD3Huf9YVILIFAqYzarDiItCUaRgl7wSXLGgHOWyVnsNNHvOx4eH5mm8TMYBNLmpyxLyoPwyuuMuq4T4SzLmDXp3+nPTwfVGBNpLvjUy2WniXnjmEaL832yNro0iImipjCGcbAMfc80ugQEUcna6W0kywJ1kzDpfrRI0mYmBJX4BwSMydEyMllLP3mE5gA+UXwiXBI98zLHxyEJXRdQRiabFYmG6e3AyXJJVSh22z7ZYGUSfsk2GEEEjMgolKGPKX+nKBDRJsGUpD+5rlivr8hKzziB9QatIlonsmRApOs/BOpqwTQ5nO0RB0HEISuG8BAFs6Yhzz3dOCXMeRAcVqfpZ0ZBlmVUdUlRCna9TRbJmOx/UghCTFbL5awmKww+OsbJIUWWKK+H+xZIpa0q1Uxc3dxQN3MWiznGaPpuQBcmWRijR7qQNsIy9dz9/Mtb3v78E/UsQ4pADAahBdpIuq7D+4hSIRVXH7a/4pDZS0pXUpYGHx8IwVOYM2b510R/i6BPdMBizm5/jw8OpSQ+eN6+/Yk8P6EfA0I6EAXbdaCqGqIY8HGkqmaMpSSoNIxYbx6xQ4bWNUYumJ9eMm8q1k9rfvz+T3z5+vecX5yx7W64/jihm4F+WtHfOfYbxeXZN8S4Y7I91gWy+o4YFa8uvub87IzJ7xJQSUl0mVNmjq4fqaolVV0x2R37bs393TXfffvvGUeJDxM6q/j+z3+HkAVKZ7gYMVnP6nFkGh1SJNDI2UXN/f0jQnuQOU8PA1VT4NUKk5eUxSwNiISgnR7ZDQFv0yDn/uaGpimZguHj3RPn5xcUxYJ+6NjtNqBLLp+/4qef/iubzQ1lpem6iW63RJucrArsduv0fe57Zs2So+U5q/0Db15/x27fst6tIDxSZwuaBdRVgRKS67s7phG0Uak4vut5/vISYQru7u6xsWexWPC6eYNWJXk2R0rDZnPHMH1ksoFNv2XztGNef8WyLBAGrPUEL8mzBWcvjpmGjn2748m1VKUlTJLzk0u+ePEtD48feHv1MxcnX3C0/Jrl4hlvf/qe3eqexfKYxWJG1z9SqGPyoqHIyiTA40DZLKmXC/Jiz9vHK4Zh5GRxSnADdlwjRIki43xRkwnPh6trkDVlmTFOkYmMmFe0tiW4lkJISt/QPe549volwTl23SNiM/Cw27CcPaNqTihyxTg8YgrJx9srfF9Q5SccHy0ZrGW1u8YLx6yq+emnDYKBPMso8pwcyf3TFefHX1FnS7zqeVy9ZbNWoGbkUiMRzJdzMlWxX0/YsWc2K/j4846z56lIPM/y5PKgwDnPMAwQJEaXnBwtyYxGa8Gs9v+irvlXIa6kzIi2wI4dzrZMTvL4WDKNDiEMWV7i+prdaImhR0lBUx/xw/f3nC6/pDAnvHhmKDPB0etEIfHB0nY7nj+bU1fx80QzBsvN3S/sti1tu2OYVuTzO+zum4Q5Hiz7tmM+O2F5lJDfWuacnTX86S9/x5tvvqLMFgivyIzi3S9/piol85nCh2tuNztm84pxdDw99MwWGRenX/DulyvW2ztcWLM8vmS5/F+SkHEJmX60fI6mp+22BD/Qtz3bzQPaDJisxOQWF7bM5zWj3bF/eOTj1V/IMsO+P8E7wbw4R0vD0ctTpnHFOPaMUyTsj/nD3/6G9faBzXqHs2+ZzQsigraXTLbi5eWXdN3AfrdJvnJTkKtLoglMbsPt6obr+x/pupabf/hAWeUsFg1fvXnFr+9gNpsjhaHde/bthtm84v7phnEc+M1vXuJJGP222yLklj/8/n9jPl9w9eGBD/0VUjkeV7+k6eH8iMnC9e07XrxcUogzyvyY0iyRcWQaB6rKUMaMqddEcc7Jy0smt2W7u+fnn644P3vFdlOw3ezZ7NYMbs+8PKHvH1FGUs4Uox1498t75ktD1She5t+xrP7Iu49/zz7bMGsW3Fyv8N7x1Ztz5rOaYbC8ffcrgf+HefOcN1/9hsfVD2yHK2Lc4t0EKC4vFszql+x3G0bbIpWgqgUUHXnWoG1J1z1S15pZ0xwOWZ4P1++4PDshTEvcmKFjxcdfrpk3F7xY/pbz2RdMJx2PDzueVj1CRso8cPX4FgE42yIIdEPJrNSsN3uK6ozXr75EScN/+7v/wh9+/y1Ct3y8+0BZzRmnFqMbpEidPiavCUGk1TyBIjfk5Yx998gwTFgryf5/6t5kSbYsvc77dndab6O7/c2sqkQVimhIkTQMZCYZzWga6AH0Anok6jE00kjSQGYSRQ0EgYQIsABUk1WZt783Ou9Ot1sN9onIghkNYygGmXktb4S7H3cP3/+/1vpW2RK55vLqHC3XDHvJ4fie9UYjlMC6yHgaIVUs2nNi+sS++57yXvH1yz/FlBBTx+n4CTtKnj0/pygU3bhHycTV1R+zXi+IwbE/3DPEz7QXVS65TREnEvvdyMWVQjcPSPKeu/2OzWpFVYKbAqbYzsAEjdSayliOx4I79RfIdEEZXrNar7m8XNP3lmHwMxIaeDT15K04JJR8sPLNuSEizo64aQAgPuShpJwtdxlxmzNJBqXz/agrPQ8G+ZCstUaIXODqfQZA9PtTVrJmMEUCtNIoNf98mQ8ReYiRWSFS80H699SePBfMdDpyB5DR2ZKWEo9qS5oLXUMMuBAY/fiweEcIMYMtFGpGtysl0UXOWj0ccLJTTj5a1mKMBD9h3USfUo4bCebvVyg92wi1wmgzXxM5Qx3y33n4iinDBMYpq6j5fmUcttFFHkqVROuCxXKVr4vJallMES0VPnjCQ/5qsgx9h7WWfjhwt3e4acpDxqz2KKWpqgajDUVRUtcV1ULTNi3aGExZ0y41SueMU5xtelJBsB5nLc46Ls89boZ6OBcyXMM5/HRgd5foTgIfCnywJOfxfraiEbK1qWiZTpa+CxS6IUQ7H9bzqzOIiKlr4pitySn2pJghKQJD9kNAChPHI3S9IwYwqiCECa01xICdJhbNC9ariqHfAS5nsipNCIKQHElEjBZcnrWMznLyPis3REIUs66ZFwraCE7HO8YpYkcQon4EmKQUcmeM0HlrPoycugGSIs9lYh7CxKwCe3ywTE4w2kDG+s85xfltmgeRiWGcECLhfJqXI7P0m7LFVohEVSqatibGiourFmPmQXoewEIMmZA4q751UfKX/+Ev2e12aGUotMH6SBSWrDBXCDTI4+PPyPh1lV/PWRolppHFck2imQuGe377/V/yzTc/pa4ix/4TH67/PYv2JTFFPnz8xN/+7S9pG4UpCrrjiCk0i3XJl8/fM02OsjZEFOM00B0FV08uOZ4OjP2ey6uK7pBI6QTLxKJukCrw4qsFjt/x6fojUpyhm4/E1GNiQVmuKTc1RX1DdEu8Ezh/hwsnTOFZrUqaquDTm1smO7LdXNEuliQif/Prf0/XtZyfXyKS43p3YtUU3F1/S1mWVFVNmHpePrnAusjQ5+VEs7zKtxN6lADBiqHvQTiiVzgBZXnHzQ08ffKCqlqhZMXTp1f84m/+AmVSPvgag9ZLqtVAu2hz9qp4kj9LunuUhmKROO4H7vafUKWiaEtGe+LZ85/yT/7wX4I4cjh+4OOn9yzbNeOoIRYMQ0QZzf3hLfe7Yy7G3VQ8Wf0M2Vj6fs80Dlxcbvm3/8ev+OM//RlVUxNwHDtP3SwoS4dSnhgWaF1QlJ7r62/Z3+9o64ZXXz0n+QXD2nLY7rn+5PnRyx/xq+++p+8df/DyR0gzsdtNGKkozYLYWbqkEAGC9sQ4IVTDN3/wmvv7G77c3XOx+jGVfs7VVyXL5YqirDh1kovtmvv7Gw5DjgqU7YJhcCRhmKygqEfk2FDJlmKpEYuJffeWl8+/wfWSSKBaFfjdHtOUFGbBol1zLjR92rNqYTgMHO9PoNe0bYU9DajkUVFyebHi9sNnwjRQLyqiGAnHid4OVFWDqgTfvvsFl+evKYq8RDC64Ggd2/UTFusCoQSH+4mr85+wqBuOu5HTMfLV63/Fk6sdv/rtr+g6z+sXP2Iad1zfHzg7W7E9bwlW8OR5olpIQkxIbairgs2qRJDjRavFii9fbhn6rMr5FInh/wfKlUAiYkVTbinLAiF7QjwglUBLgxYFhEw3qto1hTGkVHJ5cYlzE4tlwdnFOW/evGGzfoadIv3g6AeIIXD9aWSxrGkXinHaM02OafL0wwHrTzxd/Jjj7RaRIssWrq6uIAmKwjN0B6YRFg28fvqHeCsY7IiSEFyN0Uum6RbvEk19ycvnT7k6/xFv3/+W9x9/iQ893/32N6SkWC0aUCp7gKMnBEvwnuAFY+exNlAUFUrBbn+HMYqXF1dIUTENhtu7LwiR2B0GfPCUDRij6bqEMRVFrZhGT12tEaqiaCJrqZFsOZ6+kAhoUzJ1hvWTC/pxh1ZzS73wxHCiXSdCMNhBc3P3lhShXUEhYX/naFaGJTUiCZwL3N2cKHSLZosUJaWxHPvvCMFhyoSPkbv7E5cXC05dR1UrLp9sWK0K3rx5yzR6nPNYOyJkpG02s2R8BNFRFU9p6g2HXc/151u++volnz6/xYWBskisFgqiJKYeZyN+KlFcQCxI6RbvJ4qi5s/++b/gL/+fX6JNQmtNippF/YSP7/fE5Amhwk2KRnd4n9WEyUTWqw3Pnr8ghYrgE+0yfwgXuuV0PJGS4OLiBe/fv2G9XlOuapQsSEwEp0ixotIty+WKwR4ZhgnBCZEKiqJhdDtM2aK0xkWND4IvN58p60SzbCnLgjdv/o6zi4xG74cD3gusLbKqUXmK2lO6TOLxcUCKAKrOzfDDSDfcY23kRz9eokzi9v6PECtVAAAgAElEQVSIMJ6yrtiP90incVJQ6JZ185xS1XSnQ+7vISKiJTjJ9iJbbGSqiH7NenuBUQuiU6T6BOISLcWM9x3YnFXc3HzEmIqq3JCS4XQaUEJQpRzkN+qM+/6eui4IociDg4BCG9AWpEMWiXCyhHCgUC1ERXc65lLSKXH0DiVqXr/6mskaUnTEEJFa8uHLLwnBIYRByRajG1bLgWAHorolOsHx8IyqJsMY8CBC7qURD8NAmi0KWR16wLWneeCSs0duhpT/EPgQ+dAZvGMKE1b8YDvMsIlZ5ZEaYwxNU1MWhqbQtJXBpzbnjR46reYhKaZIiBHvQ1ZJhnybUuSsac7M5czTgwoGD8PTLNukuYco5gxXEok0Y98FAqNUtoPlX9BzZiWX484SBj7mAcJaO1vdmAuKJVqpRxKhFIqiyH9+6JjKSOwHpSnhXMS5iTCreYisRmidUfBCiseB7GGYkyKrcvk2MlBRqRyB88HlOgwrZksmFKaYg0gCowsKXVLXLY92zpgfb4jZfuh9Hk5DTLjJ4vzAtOtI9xkikuZ8nJBQFA1FWVEUmqJUFKZEzQO1VjXleh46RM7CZZBJxE19fr0rR1nYuU/P4VycVbBZyYtmtib6DJRJLvdTpdz7lbxnvVhyP0HyA957KpFLfR+UE2MM51fPSMki9geiPIKMyDT3vEVyQL7MHWQhRqxzCJUzI0poUlKElPJ2N+U8xGQDUajHPNVsBiQEz2q9Aga8C/ggkEo9DqAZsW5IscPHASUSGV/u5yxeVlrz6yTTLCeXCXsP2Hrx6E9lXg4ltM7LAykTIeRXbb4K6TFbKUTER491CSnWFFW2/IoH+67IIIq8jJAYVfB3f/drDvtjzuUYky38SkLyaC1pW8k49qSk5scxkzWVyRCLXCSWs6xK0i4iRZk4DZKzi5J+uGfoTzg/0TYlhAaRJE1jWG1KnI0s2ktSODDZnuPhwMvXG/7uFx8ec5BJOGKCrhuQGJbtEu86bu7fcnn+BKUbhnFAGUnVlJAyIdHajhQVxiwRUeCmbNr0k6A73eFdoDAKN0KMls5+RvQnTuNnNutz+qGn6wZCtCgEbanoD7nseLt9gSkCLpyoKiiKiIs5P+u8ROmCRblAK40UirIocG5id9yR4ghyzv1IQVUWdKcj1o1UVUNRBE7dgcmPFEpTakPVGHa7L9TVBqhxVtJUZ7hwg/MjEYmSmn7oOBxuQQi0bum7iePuI892N7x++WO+evmn/PEfRqbxmjfvvuXm7h13+1/z8fOOr58+QQmNKgVGR0wRSEpCqiEpTCHZnNd5xlZQ1go7OGJ/yHTTeQFBNBix5GIrWbcNWjYs64rDYaIsBFeXZyh1wko4u1qzmCZc7AiHA4dxpCobtDTY48Tly9cEeaTvT3y5vsb7iaq8IljLYT9w91HwzR/8iMlGPBoZItYn7vZ7hCqQSeTuLxXphpyFizFx/vQ5/e6OsimJREIQLNpX1FXBpy9fCNHTrEoGm+jpISmULFlUG7Qr2A0njIbteY13gmEcSMETsbipp6lK7sQXgs8LUakVylSY4oiQIz6ekAqOp462XTI6zd3tHusidX2GlIHIRFVXaF0QQkQqgTGK/e5Iqq5RSmJUkxe4p4mYoO8zzXp3f0thljTtOVIlvBu4vd4j1URwmrJYsF0PTNNE09SZnhw9x738B+eafxTDVYwJ53uUTlRFS0ggdSB5QQyBrusRMpeaNfWCuloQg2G1mh77RlbrFSka7JR/uQAzIjlvv2IE7wTegVQj2c4jULpBiwuEEBijc/9KpShKzTgd6foDfWcx+oJF1RKjgxRQQqBlTaEXJGHzxtcXLBcXc5BS0dQLjBrnPieN0oJMpfQ5QJcgBohBI4iEOKKlBDSnU49SUOgGKWvswOzld/mQJxLaFMQgsSkTF4WITKOlrluUaSE6XBiR3OLsEW0UhakIopqzDIKH5vjutCcxIbV89PEP40BKFm0zGlmb3GNhZIObIuMw4axDysTxMGSPt8jB5RAkxlRM1nJ7d8tiuWK5XFJVBVIU3N594u7ujrKssqU+ZQBFWbQMwxHiSGk002hp64RUHuRI33ec+p7J3dM0gsVijZKGU3cgRY0QBd4pSA1tu3wsS11Wr5HyTR48kLNFtGCxyAfJYDV2hHE6YFRBWdU0dYsQlsvzl3Rdvm3hJYtlziL1fUeIjqK4wOgFSjYY3WCMxscTwzSSxFyEnRSkAmsnhLAoCW29JPkJUMSkSF7gvcPZe5Q2GF0RMaBHenuDtR4bhrnUs82ZDpEgJowWnE6WJHzO0YXIaTyhjCH6iLX5urnoOQ5HaiVpFhX7vkQLQ/SBkBxFodFKsFxu6LuRaXQ4O85b+jYHsn3BMCbsGNFVkQ9IqUNrg5SaQmlULHDuSJQ+1w0oTfCSEHMxtvchE82inpH99vE9KKnoYo+pMjHDB0lwgqKSKGWAAmIgpAPHg8Po/MFfFGuKsqY7TkzjQHAR647EFFBaonTud9pu15xOkUmNRHnHcH+iaQ1FqVGD4wGf/DBEQbYm8fuD1e9Z1R58ZkI8HKDnvz//UzwW+84K1IMgNtPgpJBYqQhuwpYGYwxKaVCKEOeMVZqpb+QDJInHYQWRA/LiwSaHyAXMiXmASg90g0cVTKR5m5/Ilr/M5CCpnKPK+a4wgwXS33/MScxUw4crkx7mrTxMxpyxk5Hc86QkeIhihlwIQVIP1++HL0lW3vLN/B60gZz5efgS8+CV1bo8aD08vkxWzLa4GMOccss/L5jwqPiJ2bb2MIAqKTEyD9gpRWLIEA3nPT5EvCvwLquK3oW/P4A5R3AJO05oLdBaIkUJSqGNmV+bmR4n5QxJUGouF2+IIVHVJUWVh2g/Z7ycd/mAExKCQKEiba2QCEKQxOgz9AmQhckE0SmjzFOMWcGaB0dBXjQiBJON2ABJ5ANrSmIGmuQhVRcJHwPWR3zMwhIzyCVfyWzJ1Frlz7Dfy/b9YBCdnZSA1uVs4QuzYpWthdmiKZAiEcKUS5R9Aikev1cK8ThgKZXzWi5InH947T/c2JwFTIKqMGidiCG7CPJAFR/faz7mPjetSxAVQjVIlYvD5UxYfACKSAQpRA6nO66/fCaElJcVIn92C5k7xLSSlKXgcBx5lNEe3t9SI8UPV0cIjZQBIQUheiY7cL7dcNgf6bseqRLbsy2HuymroCKr327OvWplSKZ4tEtqncuUQwz5PFQqhuGEMQVlofNAy0QS+f8LlZgmCxSIh24/2THZE6vFS6RKOS8bZB6KvSXEiIwFKRpInv3xmtHtcHEEIXAuo+4RlrpcEMKInSJQUFcrlmvN3c7O72fwfuR+f4dIS5q6oShaxnH3mNEUQmHdhJQWomCxWFEWLc5NFEWJ0g6pB1DQ7TrqusznK5EXRFUriLYmBYHHkYpAWRrGMZeJS2HQOqvWyswUR22ITNzcvaNtWtpmzcsXX5PSBq0XrFYb2puavvse7yJFWZOEp+8O3PMJ1VQ4l5+Hrg9sthuA/B5OFiGWpBiyyislxmimYUQ0DYWuUYCMJd3hRHCgtMYYaFsFKbBe1qTGEPqA9wZdWKJMuJSz7rooEEKjTETIae67S0iyO8IJm3OR3tGNE4Pw6FJw6HasliuKWiKMwPke4SJ950lRUm5bluuOlCTWB1yMTINnMwaknhinnvtDj1EVMWS3igasd5hUMroBUWiKusi1QpNAm2b+XRsoVU1Va1IKOGfRqQAyHVngCaFDK0GIlna5QgeF8466yddaxkASMUdK8nfk7Lf2BCZInmXbolQFwlEUhqEfsd6CS1g3UldnKJl7UafR0Z9OSO0QqSG4Hj8D8FKKtIvczfjDJ9J//usfxXAVYqB3H6lNQVtuSWKN1CPORk6Hgbvba548NehSYIoNVdlS6g2n4x4xI4QlFU11zv39PZuzhqqGvo/UVcXlVUOKmU0vaCmrI0pD3SzQqsAOClMe576FxKnb8/riGYeDZBh6bBjQ9Vdc3x3ZbrJyptFs2nO6Q8fZ8hU+Jm5vepx1/Or7/0hdtVyev8C5gY539ONNtoF4xXJ1zhQOKFkjqRFJUTWSwfW4WZUY+mxvqKpIVQpE0iwXWb4tqgKpFNOoGPpEWQr6Yc8w7EkEyrJltd4y2pHr698i5J5t/YpariiKBpY11zdv0arNSGpv2e8PbC5bpsninM8fQLJh8gN394HS1CzbFbqIJCsznc0BJPpxx/Fwg1YVq/UmU9WCQcoW0sD9/iPrQ8WPf/xT7Ji4v73nw4e/pmnOqHXerOez2iYrBtEjiCzqDfvdPXVZsViUNO2am+s7EAWq8sgCfNRU5Zrd/iOLxQJTKoahR4kFF9uKsTkxjJaPbwPLxQKjLSLlodtNidevvsqwid4RlMP7ibZZsNluWW/WvHvf4W3BcikAy+3txPasJdiAlDkcO7l7NstXWHdgSAOhAlk4BrunqRYkEbjb72mac0LQCOGRMqK0odVP6Luc91PKM01HiqJmsgMujAQ864szPt/doJXJ+ZnUIYsJ35e4KVPqlBH03T3l0uf298mzP5148fQlOlUE5/nw/hPd5DHhSKEK2kpRijMWlcZbh7M7Ukq0iwUvn/+M48Fyc33PMN6xXi9w0yXe58Pf8XjDzd0Xnj9RNNU50xSJaWC5XNE2F8Qg+M1vf8Hr1wuapsFax2R72mKLdRMhDFmpsBKpLCllSEc+eLXc726ReoVWBeOoca5ie66IoSBEQ11V3B9vCC7Q1DUgef/+msunrxkGz/FwpO+OPHm6JZEwRULokcPdF77++s9wbsT5Hk+HTzuW6+c0jaHrJkLMVLrHzTi5v4eHAWcGVkgh58VNVrCSmDeS6eFwKOcBIf/7gfLETOtLMc0WtJBR/I92t0w0kyLTxqTM3UmmKJHaoPSc7yqKbIlTWY2VSuXuEJ+7iPzcR+SsAyHm+zercWTVO2eX8lZd6rzRTTOkIswglzw+5sP6wzb/4WCslJwR5XnoITFDNUQ+4MdsM7R9nCEb8dG+pWW2BRbGUMy++aIufhgaeRgGH1S7PLilGPIyIublgtLZYpmLbv3j85NiVhWlzM+lde6HIZP8vGV8vX7MumkpZxBHBhAIAVVVUCwX8xCmSKiM9w4e7y3eT4y9ZRonnMs5rH7wDGOHD46sbgp0UUDSFEUGa9S1Ybls0FWBNlntK3VFXckMDyGSUi77Hfqeti6otGd0AWst1k2MNmdUC2O42Xfc7Af6KaClIjibVZ6MxyP6ifv7G/phous8AoESmkBEyvk5lQppIt0wMLlITOr3BteM4ZcyEXFU5Qal7JxPAJLMr7EZGSiEYhhGztZbisIDXX5Ms8qZYS8BrSXeT4QoGMes5IYY5kxdHo+n6NDzABQjTFNGq/+QY8r3XwlJXVdoZel7h5T5d0MSecCUUmK9Y7FY0jZbymJNkgVCePIbMyuhkUASGhETQ3/k/bvf4F2HMisSPtsjkbmYOyXkbLUd+gmp0oMrMNMxhUIKn5+DmCAKpPIMXSSFkWG6R5sr+n7POE3UjWZRX3Lnf0NKHjtFhqMH5Tie7tFCs2xaqnrD9+/e0jYbXDwS04hGslg0XF9/IUZDUSyoiobVaktMiZA8RSW5vj0SQ4WSGiEDSE8/3dC0P6UoNNYfmQaJ89mClVLI9SFJI5Vgt9shtaSuV/Sdh1hSVgVFpXBTw+F0kyEdOnejNXXL/V2Ft5EYwPmem+s7tstzTLumLGqub7/DlBGRlpAMRkuKtmPYG66unrJabPirv/ob1qst7SJntEL0TFPgyeUlkx2ZbKDvIl/9+Io3v+mIySJlZBwVZ2ct3ukZnGRYb0ZSlDg/gLQ0i1x1cr9/S9ffcH33O7T5Vzy7+hEvX/xTXr34I6bxhsvzv+Bv/+r/QsuayVquP/fcq1/TbM+oaomUkdMny2pxlsm2fYcLR5b1U5QyWGdRCuqypDt9hlRhB8dwshgpuT98Zrt9gqwF1vaQAkX0FEYhtQElSbJFjR29nbDOszhfMDIhfaTUhrOzBaddQVEJ9NiwWFZcPW/ZnW5J0nE6nQjO89OfP+f+8IUWRd2UtIXg08d7dNhixw5rQaJ5+aql34MXmoTg+u4DS7NktS2IXcf7Dzc8f/E1fuhANoBm9APn1Zqolwgj8VFCGBBUVM2GQoNIA4qGdrWm67JlWhKxTBhVQoykOOUMsBaUpaQ2DU1bYsrA2++/sJAVWguO3T1GD/lzM04IPMt2hVJn1GVeZPjQs1ot2e17EIGiMGzPrlg121wY3ltOx5GQBgq1oi7WhGA5nD5RFCW7+44nT56w3W6p6x8s6/+5r38Uw5VUgigVUVliec9x73DjFm0EDo+sJ3zYUBWXeFtytB61ge3mHKUk/dDz5cuvCCni04Gb22yLmOweaTZs189YLpcoDTe373E+omSmMGklOHUf+eYn/4z+pJgmR9s2aFXS1ivU5YrkE4vkaRYrXj3/CZOb+Hz9jtvjtxymN5j2Ce3iDG3O+cu//l/5g5+9YL24wvZLfvHX/zdl1bFY5jd0SpbR3TCGa6JbUOgV27Ml9aLGxSWn44nJH2nXJe/ff6I8Ws7UK9r6JXdfPqGM49QNxFDQFBf8yR+9Yr+/427vCcqCTNgkQa0RxiCrA69fX0B/hk55wzXaI2W5ImFxdmScRm5uPJM7sj6r0LrJqkk6oU2Dd5rupOiPd7z+SckwKiab8enjcOLLzfdsz7as1iVtI0hpTW/fMY6emDxPnm65v/V89XSJ5cjkTizXCy7Pn9LUa8bpyOTe8NXX3/C7331H9AqjFtQNXG6fMww9bhrRpmRZv4L0lovLDd46vv/dnn/6zxwb+ZIvXz4yTQPf/PjnrC7uefddj0gL6vKCT9d/h64GxiGrKG1rsMrz7Xd/jVEtq8UlL1+9ptQN13dvESqiixxM//DhWy6fbBjGEzc3H1mfafb779isX7Jsn9Od7uiHt6TkiQOcDoq6Kfj69SvGUXLqBlzq6Yae5apAigUkzf44cHX2ksUy95NNfk9SE6vtOcej43A6cL37nrZtZwVuicAwjJ669ZSLwDREjn1ksVpSLyeELNGqZn1Z0DZ/QrA9yljKOqDjivWF5W5/y90h0FYlhTjn7OoJXZftBG7yvPv+ljDcU5X5PXc6nRhOHqMFwTu6rqepNRfLryhNDSja5gmfb+949mJLVTREH/jmmx9ze3vPuvkKqUeO4hPjYBBS4kMiOBCpYrt+zsXZC6w/cTrdMwyfeHL1gskes6JtFGWzJKFw4cQURkJUlM1LXr/6CaPt2R921NWKw6EnqR3l8oAoAt2xIUVJ11tOJ4tWht/97nfcH35LwlMUBTz/3ynu/zVl4ylLzXFvUIXlQXmaC1mAhHcuDz4zDUwI0HpGOsSHgUzMw0GasyIzwhqYT4EwaypqBmYIEkabnEcBYN5sP9oBHW50eZB7yASlPIhhikcEuTEFpiweS4KrRqOWJUIIQhY5M+7c+Vw4HGeU+BDwYe6wkmS6mRSZ5KfyoaMuNVWlMUrO6k4kuGyvCjGrXHkjW2X8/KzYhRDnDrHfvzbwmA0LgcGODHZEnn7IWimVu6KUfFAfBFKaOYc1D4pC4KwjhHHuIYo455imKQ93swqhdbbIPTwmYwxlZajK8gc7o5QZLR9yMXFyAaV1znmNPi/y5uxOCCErX1KCKGmWC1bnc5GyzI9VK4EP4XHAda5j6D19ZxlHy/1+x6frN0x9yDwOcj9W3ZTZTqizXbSqikxcMy26SSxkfk147+cDg8dZz8fPA9CwOW/ZXuQak2x5zIMmMTCMAWsjaVbDnO+JHrwD7x9KmAdOY6AfJ5yLaJNlTSVN5k+kgCk8gYJx0IydyB5RmTMIaX7tkzyb7Zqu6ziePGF+vQoSUqgMesESppQ/u4MgRIkUBQ91zfm6JGJy1LVCiICPkDBIkWmJD8h0hGLsM32yLKt8H/hhIZB/XmS13PL04gqpFP3oiFgKXaF0IApIXiDRVLXmzfffMnQ3XF4pPBW3d1nZlmrOjSEfHQQpSPoOlAYpHgZ3AyKwWlYonXDW8fH9nuXynO9+9+foZc/Tq+eUjUcpzWJZUTeC4CzbTT6MO39kGL8g5ZrN5Rl9d4PWgmdP13z6bDj2e4I4gpjwoaRprigrg9YAA2/fvWO53iKUpx9v6d7t2d8Jzp/u2O12DH1k2T7j6dU3vPv415RlQ1uf46zAB4ugQCtA9PzqV9d89foFIWZip4gKPyWs3dNPI6KbsEPNi9dPGKeJ5AMx3PIf//23PHv+FUWlGMeOj292/MlX/x3/7X/z36PMibef/5y6igSu+ZtffOB0CpydndGsakKY+PLpQN/CdrvG+Y4UrnBjLny/OF/hfYEsLYWxhGDZfS4w+pTrO6YJj2KcTpz2jqurJ5xfPOX2vqPrJtabDVppxrADSprFkqoS9PYD/+P/9D/w05/8EavVGefrF2wWLzHiKT/55k85nO64vb/DVGu26xU3x3sGO6K1x04CIbMTpKgEhWj4/s1fcL59hQ8TQjhcvOaf/8ufM5wE79685zB8ZrP4irOzLYmAcwJJgTY9h2GgP2YK5VnTgIDdZ8v5qmSxbPj23WeY9twc7himiZQUmyvFrusYfFaKFost1zffMpxOBF9QVEsSeRlhLZz2UOgSzRmFUWy3immcGLtbjPgjnl4GbNBEUfJf/cv/kn/7v/xvtPVLzuo1+uk7xkPg86cB6w6UTcuLr75iX9wSupIiBZJ33B16kgUlRsply3pxzuHujm5f4bygLEueX77EcodIJaSA9R1frj8wpZ7DoctdXIsS0YN3J0QoaJol9YuCT1/eMU0l3npicAT7hT/+o/8CSAzjwP6448OnX2aSYrOgaQqIji+fP1D2hkiiKCGFhtIUHLv3xBBoiiXeQdMalouWqmw4Ha//wbnmH8VwFUJg6CPDKXK8HZHKEfiehiUiaWRY0B8tXdkzjnucDdzvPzBNltV6kaVm4RHGY0TFsr6CULDbHfn8+ZcUpqHrerr+yOfb/8RyXfLs8g9Z1pcQFb/78InS/IbTQTFNEW0s+0NFcNkz22jLE72HRU+JYLItboQvx28pWsun3Vv0Yc96+Zyf/fhfM43veXf/Dufg62/OKPVP+Pj5l4TUUTYVh53iX/zZf83t3Xv2+2usdXz5NLFen6HXW8Zx4nTaUZVLYvR0wz3eKaLXlPWW9ZVByZJgG5pyyWhGtstLFvUGhCMlg4g3EA/U5oCKS3zs2Z6d07bndCP8p1/+O+pG0i7W1M0F0rxHCIheMPlAiHdM44GyTtT1inLZIGm5+3TkbLtCJMs49qA7fv5HP6EwZ6RQYKeILiNKbBhPPdM0sLmQbM4K3n96w9n2jJ/+9A95++4Nn29+TV2skKJCxjNur/e0jaFpFlTlAhkKQhxYNuf0fc94tDx7rjnbtBzvBrxPbDYVm+UKOzRc/PQVMQo+XX/gN7/8RAwtWnlC7JCm52L9U/Y7yzgMHKaRlODZs0uEUDg78N3bP+enP/sDBntNvK9xbuT65gPPX5zx/sOe4+nEaO952f4cP5ZEq+iiRYoV0e/yAagQJDzWJq4/Jqa4yz0dzvHy+RllCePgGPqBmEZ2+sRu/4UgO4oG1psn/O3fvKdqErqUlGVLXS4Zuw7vBooicXG5putucgcNHpqO2+4zulyglSSFidvPe/aHN/z0m5/gXOJ4GkCUPL26ZLk0hNxQjIwaYuRss6GtWz5/PvL1q6/xfmB/ekdMgZevnvDl+hPL9gmCiX7wxDgg0gprJ6QcUQW0teR0/Mz9BNEbLi+e8Or5htu73zBOPSDR0vDkyQv6fo+1PYUpGMZ7Pn35Duc8xmiunlyyWT3jzds91g8sFzWNXnG4vc35g8bgXMfttaPWK4QIuBaeX73iw8fPjFODVAatBV9ub4kp4EMuoOyHO7rpkA93ShHRpFhz2AVSmoPGKWFEmTNKzNmQGXe+WrfE6POHTj/lbT1pJuzJecD44YCdu57kD8NWAsi476xMPADgIRFm20gWC0J8gGeIRyicVBlKIeaBLFsEZ20pWEY/0neBo1I/HKrnzh2ts91QaYNUxUw5zGAe3RRIUc/dUw+DYVa+/Dyw2NEydHPdgjZolWmSRVE/Kngp5dqLwpi5Yyrj7Elg7YRzboZtZHuVUrmW4gFg8ahSzV/WWpyzBD9nz+I8NCQeQQVKSoz5YRip64L1upk7lrJdLIasDj5YKgVkcAQgQiB6hRU247f5wXoYQ6Yhai1n1TLT7bJSk+13WagJeD/MNmtBStl+JGYqXgKUNCwWFevVA5QkW+qi93Ovl5/x8iPORawNjKOjuz9gp4+zepqIyeOj/MHSqBWm0FRlHmqFEqAVhWiyZVTmBaZWChkVuohUi8g2PjyvU779EAjRE2LB1Dm8z9mk6AZiFEjpCLPdMjWRu+4dQ/BECUhFmocmJTMtMoUEIncBkjKMJSgxlxE8FGZnnPd6tWIcM+I+TD6TKvPTR0pgtKCsdL4/iZyJ4sFi+AB8CbnWg2yXtDZkC57KapGYh+yqMnT9NMfMIkp7LBOJB0iLpjIT/+9/+CuCG9huDBdXa37z3S0x5kXFA+glz8S5O/B4us12O5E390JkuEJWch2EXEmw2dRoLXn6rGWSPfvDLU+G52wvLcfTPd1gCV6zWS2QMheDN+2Wpt4wug/U1YpSn2O7Je0yYKoVu2O2i283T/KCRS9YLioWi4LjYc9hB5vziBSJYUrIomMaC5bLFZtVjeSMy4tLvntrcW7g1N3m/ij7gqfPG6oKxlFy3CX6fmC73WBMwX6/z6CwlOERVX3Ob3/7gVItGT3YqcdLy/bJimpVI2UBIRLNyOb1F/7nf/dvOB4yWGy9KmgWNRcXT1hvInVd4KaR1aagbRqkUJx2HfXC8OHTG9r6gvXyCff7d5RlzTQ6Qsi9gi++esWnj7WhRpYAACAASURBVAklC3yY6E4HlFYMtuPj545+umGxWBCTQ6nc51moNe9vPrNYJLxtMaZls27521/9gtVqg+Rv6PuOX/3ur3h+dcU33/wZq7OneATB37PetFiX7bpn2y2r1ROEtHTDjt3uFqPXP3SlKkFZKb79zT3d3qFUy8XlH5Ci5ObmGqUMq9WGVd3w+ctndmng5Y9e0zY1+y9fePvhhm2zYJgmxskTRIFzlqunV6ACkx8YvWXqa4gSpStEMjy//Bldf0sSCqkLph5KU5JyISkkhe0dy7bE++xCECLy5u3f0rZLEAqtazbtn/Ds5Y8JwDR6FGv++E/+CcvLN6ToKY2gWSh+82bg2eaMIiaStzx/sUTrRHc4cjyd2N9GmkpydfaSuq7x3vPp7S3PX28Z/AlIyFRRiDVlvUGYiKkkptTs7xOLMpFinwmfZsOmes7EHisDIhWcn5/x61//krqpqJsMS9uc/wt29ydOhw6L5+c//zFj/ysmt4MkMEXFsqnZbi+BM5zzjL3A2cjZRcs0et69e8dpuP8H55p/FMMVBGI4UciK1iyRMrE7fkCY/Au5VILgBw6nj4yDJ/jccF6Ymq7bYb3H+0S7NIzThBYVlVlT1w1l13K/fz9DEgTL5gVuOHE6DfjpDqJkvS2Zpo4QYbFc8uzZj/j4Yc+pv6VdlEQj+Ly/J1nPMn3E+k0m85wETdtSlA3Egv54oCxKtCqp64bCRFzYc+ruiTFQVivOtlesV5rbmxOTdQih8S53IwSf7XZ2AhcGFsuGsmyQaKzrWW+2CGGoyhxYn4YeqS1GlTjhicIBJdZ5+uke6w8EP3D9OVIZye44sD9e8+XmM86PNKpAqAkhA1XTIIH7+3sQE02bCKZGEpEyYkykMkt2d3vG5h5lKlarDYfTwDQl3DRlm1OU2NBTmFXOjJU52xTTSEoJ7wMuOKpacThGLs82FHpNf4z4MOXNow4YE1BKIKLGThPRW0iW/e7Iun5KaVYI4XLz+ljnDXpKxBTxvqPvHKt13nhPU+D2xvH8suJ0uMday/nlEu9HSAEhJFJEQkx8/vwRZxPDsGe/v88FwWPOBDrbUxSJ7jSgdck0nWafeMnZ9pL73S0h5gGhMC3TMHG0JwJjLpxOE58+nlCZTI1MlsmdiKLLGZJY0x0O1FUOGMcE7apmd7phWReM3cThdKAYC1Q5IIuKaRo5dDu0KTLNTXoKXbAorpDpBuePRDSmagBDU26RSTKJEe8Sy9UKP0liGkgElusyHyQVlAhi1Bi9eBRbTAlVEzkcAk3siTGilKEslzgbcW4kRoX3gvu7AxeXl1iXFZeqakhhwNlsx0hRM04d4zghMEgk3sN+f2TqPuKdQssFJIMuoVYFq+U5hSk49Xest4LRO/anI/fHHWX1hlN/IkTL8XDk+vM9QuTDzfE0cnefc4773ZHVcjuXziaMLNCULBYF0zhxf5vJf/mMPwMchAQsL189oyzA2pFp8DibD7w+JqyNDGPu3YuRx0Ofd+HxpyHEDzAHMTdGCfGYbZrjRtmiRXxUsrIVb566HgY1cs7l75u/xYzX/r0/A6RAcIHoBd4rBArxQB0UEjn3d0ml5wEsd0SBoNAarWfcbZlJfkqo31PDEn7GplvrflDbhPi9rqsHi+SD1VLM3T/i8e5ny1/+7wdYRIxpLm1UmPkgHVNE8KCAZTpeTCmj171jnMbZDviQq5pVL6WzpdvonFHQGqPVbO/MHWLMmbQYszaGz1fau/n+pxkxPj8OISUCNftE/34+zcdpBpzM+R2RP+uETBBneh3i8b2Vs1gCU1bZwhVTHiq9Y5pGvPM4n2mH1ufMVwzZbmnHDtsP+eeJrB4pBEo/ZLwkxiiKosGYWeVUOd/XVPXcY5VIeLTKgaLtdkHT5GWD83bOLec8WFloxikw2ZQVrwABT4qaFLJN1nubQRiqyBjzNC8eHjYFIl/7sjRzvk3OQ1wePHlcHGRbagiWGC3eZ/BBEmlGtj+8HxKZ3ZIV0cm62T4aMLrIpE2tIeUSaER+DyYriTh0URBSYOoHPu+uubu/Z7uoUEJwf7PDDvIxTpVThbMlUPD4mnIuoWs5ZyB5LOi2Nsx10pK2aTBPPpAaELQ4Kfhy85HVWuN9BjwtGp87zRKYQlGUktNwjZKSs7Nso+v7Du/BjZq2uiImx+ngOByuUdIwDJ6hH6jKNVpWiDRku5QKqKiQokBSzNej4+bazYTmfH1Bs9qU1E1eGsWguby4InLEe0VhVrx8/ozD4Y5pOuVBWS24OGuJdqLWChFLrNM8efKM09Dh3Zxdjobj8QunTme7YfSk0DB2+XelUg+2XUVVNpRliUgCNXeZCpXfX6f+yOiPlFVL0yzwfmIYD7x//4FxGPFhQEjHatPw5VNPWbQUJQhpMbqkT4Hdbo8xNavVkuXyjLLSSJnJlOMQaBcblBbYaWJ0nqfPf8TZWc3udM3dYY+dAqUGUyeUahAomrql6+849QestQhhePXqG46nT2hdY0ydSYo3H1GiBK8Rk8IYyRTHOZvUQZcYJgs60Z96gnXcH/Z0vWe9VFhlSBRU6wLGhCoVyIgRFVEc6Y99XuCoRHBL2sUGuZCMrmOcesbBU6mW6DyiFJQV1G2GrlX1CiENg3BUVYkTWbX1NvCLX/6f7Hd76mqRAUra8Ob9bx+XKCIpXB/ZGJBiIGhJVInkA0FAxORsqhFUjUYLi0/goieVPc7W1NWGxMg07YjJUpYlulToIiGNx009hRE5UhAEpoJkE7WpKUReki0XBf1gsWEgjoEkYdvWCBlmymzFNEbAQSowuqSsGqoiMY6W9XpBXSmU8Jh1wam7JSVFVZfocvUPTjX/SIariJSWwtRUZUlwkdosUfMbX8mET4F+POB8RJDJSaLw9OOeYbQ4L5FFzbE7IFG0lUeKCqXh2N+QYkTrlvXiNePu/6PuPZsry9IrvWe7Y6+DSyB9VXe1bwbFUYwYihjpD+sfSJ81wxDJITlDtqsuk1VpYK8/djt92AdIKkLiZwoZGcgELu7Fdefsd6+1njXStgdG3aGQ5GXg2PSEAJUpKasS7wb6ocNkkUFlHBuIo2E77jBZBDlDxBIZDJmpcCFybHcIKZnV5yhqvIPNbk8/bhEKimLJrH6Bjx3ffvc1VQ1aGxwSpaDrhkS885YsN+SmpsxP8C7Qjx1GJ8uNBKwb8WGkae+xNsd7R4ypZb5tdgy2J+BQ0rDfj8SZI4QD47Dl9v57ZsscFyzReyKaPC9x08II4RFCUddzopcI+rRwij1gafsHyvwEoxcokaOCxlmeFgjWebQKKCOQWkM0OLcjNyEd/FqbcKh6yXx2Tm7mRNczWs39+hozWIwZ8dJhZI5zcQpNB3a7A/VsRl6USOXIjEcETd/usRIgvV7KckWeZ/SDpWkG7KDpupGmORBiwOgzrB3oOk9RZAkJLUuOzQAUdF1L3x94/eY1dlB4PwAerRXtsaMoK3zo6YYtwRueXX452SQ7pDIsZiX90CTinkoDatvteHg4MF+krExkZBgPRAaMrMlERdPumM9mbHZ9gnyYSLc/slgucU1PO3R4UZCZESM81o+MgycrdBrWI+hckhUZVW3oxw5t5pi8wI8OgiKGdOJUWlBWS5quwVmH0oGyKrChIwo7zRSaEBKS2nmbhrdCEncCpTPCOKQsTtCMQyA4QwiCGDzD0NMc2/T9KAheIITj2G6JQU2ocMtoHWDwQTAMA4fDAyL2FPmMoixx1mCNRZiIj5HBCoYxY7bIGWxP1w8MQwpZW5cW+sfjntuHD8zrc2KMtO3Ifj+SZ4rjMZIZSYrdCsZRo/KGUi8pG4EQaZGaPiYkgoAYPLN5xslJgZY1dgh4LyeFx9G0I/tDQ9uM+ClP5XyC6Fg7JsVmWknHf20UjDLBIx4HsMeBjkdYxmcp56kw+OlrgkeT4eeMFzzKP0LI6XspqxSjT6SmCNGKR4chgkc4BEhtEiI4S4vRTGfIvEAWikwlwpicVLnJeEeMjzCGyQbn/ESF+zwPavPY15WAAEr6KceUsjUxBrTQk+Uy3cuUhZKP88k0eH4GYwgE3scJLuGmPsOUC0z3P5Hn0nABXinUJA1KlQavicuBEPGJ5PWU25ruQOqAAhHTBpIQaZgS3iMmUIfS6TpS5i4QouOpeBdQJJXTP35lmqSj/DwdSyXJdIZ+gjWky1mb6F12gmpYlyiNbnTT0AXjaNMGlyfZPYNPgJBpeJRSkBcp0J2gKQZlJJnRKGkm8qKaQEdQV1BXqdB69GOCZ7jUb5Zpg/c9Ush0DEam8waTWhhJeR6RfS6+DjHNn2GCY0SIPqCVZHADg1VYl6GVmZ7nx9ewQAqejsHepw0BJjvn03geU7YtxoALHufS/6VWE8Aim1SWCCJl/9JryKBEUl3HfuDY7ljf3WNtTD8nJbv7Dd4qMPyr3F4q5JUyZQiVTlCRTD4q1oLHPi/rkpInRSDPFerslnbcEWMkKyv69kBVF9OgLRHKcuwOhOBTJlIE1usHnl9+iVQS5zu6docbEmCoLGb4OLB++ARy4OSkTMffpqPMc/LMIGNAKKiqka4xFPkc60as64GB9TaVkmcmQ0mFFiWLZY7SydFiYkE5S5u31kXs6FmcLYiD5WAKCOCtoyoL2m5HkRXkJnWx5bnmYdPiXAQfKfWMoR0xJkOhiF7jA+BT1jA5AFIRd0STZxWZMRyPe6SaMXqZjvnHG1TRM9hAXeZoAaHzfPjwgUU9f1Lnc1OgxMhitiArIiYLFEXOoZH0XY+1CZoxn60S+Y8hbRSFjpOTtyAGhBzxoeDq5Avmc8fHT3fs93ukyJEoCqXRpkxOHCXp7T1t10KU1NWM05NLjs0tUuQoqZFyJMQWRKqXcVZQ1QqhAz5YurHBHdK5pMhMynT2A4N1gCYIxRAFASgyg1I56ID3Fu8hzxYI0aCIyX4eBcYYhMixtiM4T9eNVPNTxvBIS/XkeYENAiEytE4odqFEKiuXghAcd5vv6AYFUmGKBeWsZrvdcnV2iTYa7x19e6RUB8ZxIKiCKAxj6whSU6qcrMjRGnSW1m/eqUT6y3oGe0QV5ZPrRBkBMmKkwSiBUIIsF8yKkn6UU22CRT4CXqQnkNYXQsqkZvuRvm3xK02IFj2dV3bbI1FETDZHqnT+MZmiOfZUZU1uUnawnlXcrT+S5xVVXaAnl8P/18e/i+EqRE9eFmhpGFzPfrfl5z//FQ/rDcf2SJAer3Mk2eRNVjir2B4+4UVLVIkwtm86bDxyu7bIeI9UEanbFHRVEGXPvn1PkC1jyKZQuODuoWG3t2ids29HPt7cMy9fo2RG14240VNmr4gi8N03W+bLI6/ewnI5x7qert+kk58bmZnUs8UIuMhJ+Rxp3tFZyIoZzle8e/cnHnY/Mlu+ZD6fIeg4Njc0O4PUknqW8ez8Z3RNZD6fEyGVwN584ouvIsc9DD0Yk/Onb/5AblYokVPlFVfPn7HerBMWPavI8gxnb3FeMg47EIGLqwXeKY67HVIp8kIzX+T8eL1jPpsTYsnQWd68OaU/LhnHB/bbO9br/86bL2YcdgPt0aHEARE9X/3kLzg2pE4RDa0rCCoSOouzgWAlLgwEdcD5gBgNwxC4OPsSQTUpNJrM1oTrHilLQrDc3b/nxYufYqpTbJvRNhu8bylk2j31g2XwgTdvTvj46R4la4pyxmJ+wdXVV9zef8v1p2+4u7vnl7/8D/z47lMK/WrFZnvgcNwzhj3PX8xZlEvEvmV1UtO2A7QtPkii1OhCE63ADxAGTZB+IgA5QvS07cj19Q1Kpqb3zW3LYeZR5khelCiVMXQd7fGGKCuiUFgXGYYG4RwxBLIZ5HUG5YIQJfV8TpAjQXiqeUk3gshh+SxjMVvxaf2OYdyjYsV5dYHJI9d3O3plOOYD9/mGPASEuKBWGUpB39yzc4HDcEAZzbxe4Z1AGkdezBMkZRwZhiPdeGQYQJJRVlvqeknbtkDEZEsWC8HZ4tdstjd0/Y627xNIwc0ZxwbvW169uuD29j1aeewYOOwOXDw7oWk3CCkwpqCarehHOByO7PctzbFlGPcI+QktT5nPnnF6csH+0CLzW/r2E2OvEBT86rcLoltR5xVVdoVWAlWNrNc7hPAsVordw5h27nzEj5GgMgqzwPkRhCEzc7pGEld/x7L9a4ypENom+8+ETk7JDo/3kZubB3Kz5OqiBiUolxeEYPHO4saRtp1jHQgdQQqGXuFdAtEcjw1t09F2HePokjUwpKZ66z1geezj00olBWoqb02wCAlCpR1zMZXmRj0t6OFp5Ir+CaiRBCRPyp4kyAAyImJEoCbYw6SkoAkMqYB37DgeHTEkZUgIQWYMRZHyqHleUpQlZV5S1jmIRNqjyMnLxwV2nBbWnmgT+c72lt6HtPh3abjKMkNZpdb7alaRZylXlYSpBD0IExwjhDCRzkDJKTuV5Wlo06loWQiBs6nM14ekqDnn0gLGjXTtQHucICWIlCnTaoJKaLIsnRvUVHD8ZO2cHuHo44TIT9cdQhrI1GMGK03iyZ7HZ3soT9+bhgaS+hXCZ7UrxIh33RPtL4Qp0zZtnjFdBgF5kVNW1QQ8+TyAhpCGRDumPq9krbQMfc/xcJgALOlpl1GkATozmMygswQX0dNz8Kh6FeXyUWh7PGnj7MjpKZyu4lRGPVExJ/tmJFIUK66317SDQylDtAHnPUhw1jN0llmtGMNAP2rGMb0eIokARorBkYcSIduJBqkmxTeiSbZPYkT69D7wcZwIm4a6rCjmhhglMabOM+RjjjEBJ5Kil3Nzc83Dwy37w5bF/AwRPUWZY4zHN3Z6vORTMbAHRBAoJIRAb3tcSGS3RxVyQuEShMJPqHjVHZHKcrf5PV4Kzi9+wcXyLU3zIW2g6oK2TR1M1qeBeewDtx8cf/nb37Dd/46hXaOcQPiSZT3DRsfQe6TUPLs6Qxubzj1a8f7TLZerGZU6Z1aW1IvAbbhhNX/B3eZrRn/DfH6KdaepCiZEog0YEp6+HwN5yHlWzLgf75nXC47dhrHbsH73wMnpa+ziguO4Znv4wOgLuvGORX5KVeXMThy3m3+gGWtWs2fUq5JllqMridAnMAyMxzWfDlueXZ7jO4WSmsViyWDv2TwceP38J1xevGDoDF9++ZY/f/OOd4ffc7/9jssXp9yvt3SlIDMKmLHff+Tl+Wuunp0hZOBPf/6Wt29WZGaOEGkjYV5rhvaE4PaMFg67kbqeTcefGXUZWCx2zKoK7wuMiOQqIuWI1iWL+RXG9CBa+m6LVi/RqiQERTs4qtmM5epFUnWd43BoKPIFY+9xIt2/6uULvvvhdwxjCyIyWE01yxiGATsODIOhqhacnpwiYqrOmC9O8G7AWhjtAecHgltx8UzjLQxDT9ccuHj+lnLWkytDkVUU2ZyylmxutmgPM3PCgT1lndHGjt5ZjtuI0Occxx2hH9FSMFtUvL/9lmW5ICsyVK5588VXDFh8XzGfnXJ1dcVP3lzw5s05zu3Yrj/x7ts/8MPu93TDDKVPEHrGrhmJvuLN60tMJrFdB95yc303HYM1wTsGfWC/vcf2gM04Ob3k2O1Tr5eekWUVr7+aM7MZd9t71vsHNg9bVvPnKCOIIhDcyA8/buic5vS8JtMZvpM0bQc+YocDvdvRDTU6nzObL9jvt9w93GPMS2IM3N/tyHRPVedYL8nLIjkpFMxm/z8YrkQ0hG6OqDOyCuQw8mn9Z8auxvsCLTyv3s7Y7x8Yx1QuqPIddX7Kbp/IWiZTjC6gdAexJNML6lnJH7/+J87OF6xOa5QJ/PmP33BycsW+dXSdo9Ak9GtoQFj60bI/bujnu8lfnnZyri48b179gqpe4vxA11nacQ3Cpkb6AIKMYGt2+wdwpFbnLEOLFzj3jo/X3zG077k8v6Ron/PD92vy4o7LZ3Oirzm7OOVw3HFzc8e772958/otffhEcDntfsZ2bRn6R2LZwOG4xagVMUass7TRcjz2KJNsIdZ6uq7FuZG87LG2xzuL0Jo8n5MVyRsuVU6marJ84OzsAqUM/dAx+EB2+j05OXFb8fFhw/fvHJIMpRxGbzBlz0n1kuPdnuPhDp89cHr5nLvNgbIWzOYL6vKSv//b71itUndIP/YUZc5u94nj4QAIvO/pu0DsDS53OCHI9An3N0cWiwIb1nT2Hdv9DcP4gtOzM7JM4/tAPwh+/vPf8rBOVo7dUfA//eQ13//QEmLDbDmy2V7T2T1np2c4F/jx/R/YNt8hOGG3O1KVFUYa3n99x2yR7GLKa/7l795RzkeQPi0yipxnLwMP1y1ReBbLgovLmk8fb9kf97Sdpesi11vH6dmMynZoZRCxwsoHLs5WCClo2pHRduzWD1wsfwlizsieXXODtcmm4vzIeG+p82csVgtUrvFhz2HzibibMVvWgKbvoN0PvFz8JSJrOLRbPn235eWbNygz4sKaTJR88cVfcfPpW7JYUMiMRWYwDOyHkaIW1PWMMn/Gj99vODk32NzSNj0313/k1eXP8HnHw/qW+x82nC7f0LZ3dMOPdMMO55bUVYGIEaNyZNTcfhw4P3/Fsdsm24hwZPoNeRHphz3D0NHse0yuaLt7ju2Bph1omoYoNC9eRMaw4c/fX+NcoK4WKJMqB3CB3//+QFW/52R1ytnJBVWd8X/8b/8ngZHZvOJ09Qua/UcEKe8ihUn2VSyVnmhUqqQ+WXLoGqy1SAWLRc1+51EyAn7aIZcEr/BO0g0j+9ajUDR3N2iTI4RMyHsLZVlTVBlRCLbeMq9PqcoZwzDQ9R390NJ1DZuHHTFaliea06kHbugCx8PI9cc1Js+SrygKYpDwaMvwKbOVMjckvPJkkUsZq7SITLbEifYXP1sJRZTT2OWnbuRHAMGIEYIoNEELBEX6eZnsizGAtR3WthyO60nldsCEQCYh4FOJp0nZLKMpi5zLs2fMz08py5I8z1Bap6HSWuxoUx7LWYa2Y+i6J3teluknq6LWhrzImc8ytA5T99h0DpnyV3JSgaTS5GIqI35y5olp0f1IIIyp+sDa1LU3KTPHtpvyT/6p+0o+XVey7SklJ+hEKrlX6jFTFxIwxEfcGKdMVqq2SPbFNKh+FlviVHArp14vnZS9R1tknKiBhPS1kIYn5z3eD59R/SSww6NikuhaGqUVlSmR4hGKoyZBbBp8Y2S0Fmct3nmcdex3LXZ0E2wkDXhSJTuk0hqTGfI8Iy8KyqpMQ15epl1xnfDzSmlkTPjkt1+epHzTtMgchy4pS9bjxqQG5yVgH+iOO+y4myxx6fkK0ZNpzcnZFev1lt6GyZYnH72WwGTjVLBvUtdW6s0zWBeeMpCpSDshyh9JdofDjt/9y7/gfVL1zk8vECLVe1gxUCwLLq/e8KfrT1NNgQMCUSicG1jNC/JcEVzKbRLKRMKc+uWISWGTPiCiJ8sCUd9x8ewtm90DP/zwD4SlpKwMx7bHmIyfvnpNf0y20hg8Ughevbjii9dXfPv9R+7bNc2wxo5nZKuGfmgQxvP67TOa45rNw4aqmjOva5azjPMXlwSraYeB7ceeRf0lEoH0M+gsQzCsyjzt+seIqAXPnr3md//4D1zOL7m8kHzxcs2X9QPP5hcsFgvK8gQpZrz79u/4/R8j+4Nn2wrKsuZq9ZrNuGVwHXNnOPYjccixYoMNLdk8o28f8OMR5xWeyLyGh48HTs9OKMsybVB7TSnPuPmwZbfpkBTc395z3B85qb7g2cmvaIY1eSnRWUtzvOf6/QM//eJ/pJrD5tCmQlwZOV/9jOAL6pkiKzv+5m/+M9Z1PH/1BpOVbDcHPA8c20AMBZKCslwQo+fHD3/AjprcXCD1wGbbU1UrVvMVRXHF+/cfcD6w360JXvDmyyv6IXB7fcswjEQCw/AnXl79nMf+syh6/vztfyVXrzlZnpEVESVzslzSdQ3O2aT8YzhsPHlhkMozui1B9GwOLWWlWS5q8iyy2e4YOk9Rw8UXJe12y8nslICjH1tuP95yc2eJok5l9zKyXJ5ijw2hPxBsQ+stxeya2eKKdd+wP7Rsjp7rD/cUX85x1hN8ZLU8R6otQnqGbsfHHyzkn/gv//iJm4dvODb3CKcwomVVaV6+qrm8+oJdO7Lb33DYfWTvDIYZP//ZTxm7LdZ6iAaZz6nqkm79iUxG8llg29xw3EU2u46ikszmMOzhp1/8B55dvmVxes6337yjDz39IJA62ZmPB8N/+l9/ltZnu4boFe5ec72+4xgeGH2PGCrqbMnZsxI3eIa95U/HHzlZLsiV5ND03N6v+U//y1/jQ89ue2B33+Gb5b851/y7GK6klNRVsikQcubzkvv1D2iZEOBZUbC5P3AcOjKTTkK7/QFbRJwVlOWK+eycfXvAC8fu2DF2D2Tmgl/97C/5cP2ed9/dg7DIMGP30DJbFASv2O48++OaxarCyBlaBUKVvOcnp5ecnlyR6ZIPP7zn980/Y7KM6A3EjBcvv+DT9TukiuRlkuavrq743X//HoElywLdZo/QJdVsjpKGAxsG+4Ff//rnfPzwI11/wGRzxrHn0ByRsuL0pGSYbZkvSppWYEcHakNRD1xff5gKLAMxDrx982uOO0vEEfF8/+4dg31AGZekURXIck2eZxwPA8FnzM8v2G5b8nLEhz1C5JzUvyDXC8bOghgZxoH7+3dkpUXFJbk54ze//h/4+7/5b6CviT4gRcbVVcUfv/kH7q4l2mScLs755ts/MFuuyMwCSWS7/oSZFkaCHGE1l2cv2Nz17A8/MtoGIQzPzp+zETlK5LTHwIdPD5yuXnE43KLMgDYzVnOBn+xMUmWYasFq+Zb15oF+GHGxxYWA8z2zasa4OqUfA84OzOaKw3HL8djQ9A9cXD6j7x3DsGW93aLIUNIxxAoZIkF4ZqsywSBiZDYrmNcFH79vCPKIyUQqoW1bDu3ISyI2XQAAIABJREFU7cM+NXyrjNFFWrvH+oxMZeSFo6wV7364ZlbPqauMYfSMvWGo9hyOA87BfDHn9maNJ2Cdp+sHihw+3t5RVwqjM3b3HT97+1fsDnu2uwf64chf/Ow3HBtHHyO5hvNVwVzNKAtJlnu0UfgR8mxOc9iybTd07Y5ffPUfCQ87DscG6wcy07E8mfFwt6Vtd1jbobTgk7rm2OwYxx6tNfd3Wx5uR1anGSY7Y7dzZDns9j+ynJ+wml9BrGmPDqlmzKqcIndsN3u+++FrstJxfn7Cl69/zW5/w+rklBA1MfaYbEagoTnaBJ6QmmP/nv3xIe1uqVRTen0bWJ1kfDRHimzL1fNL5iczNtsb2r4j7084OTtBqg/TQjW5kfphDdIQQmQYHPvdn7l49owiq5FlTZGPbPwx5TNI6HUJKC1pDi2b3BKiZug9mVKszlIOLDpHUVUI6en7nhCTMtZ0HcPQJnz2ODIMDmchBkmWG1anOecXC5SxnHxxjhsNx/0/c2yaaVEon8x/IXjqusCYtBvfHHvGcQBvpgD9I0TgMddCUqtiUqwSuvpfDVxTPicFdSZGW0w2TqRPKtjUifQ4nglSl5NWGkSWBs/gp9tLk0z0NikuI/THI9v7B7JsouApgzYZRV5RFAV5lqXFel6znOeYR9VEplt03j0pRd6P9H1CMMeQACEhhM/GSTFBPpATrS2VKmuZQAVKpuLhVEj8eRAsiiRlJKUtDUFxGsTCVOL8OGh4b1Mfytiny/rPNk3x6OaEp1zaI4nwkXz4eIE42eUeQRJhGsTSU/B5AGQCYqRs6GO/0wQ5kRIpHhXLSVQJIVm0/PgEwZiuNS304anvS+nUoWTyjLx4vH4+WxvD4/1PCl2YBlMRYew6xq5HiNQHpbVKw9WTrTTHmKR+KZVyfUJAUebJkilV6onylhDg6nnN5VXE+4j1qTvMOouzIxIoM0ldgB8tXZt6u9ykqk6YF7QusI+dkAHgQBbNU+5PK0kWS7b7DX3fptxJ32NyQ2lqhJD4CM6NT91Uo3U8DMNkIRXTYzgViyMIMeWkRuuTeol4GuAJEokieEfAg/BEoShUzXpYk+lLfvry15gwUqo5L968RRtDt294++INWZYhhEKrDK0t67tbMmqW9StytSAITdse2R4eQETKosBwwttXVwTR0/YHtMjZ34/Mao3EYe0WbeZsdj0mL1kqzf64oawuEBKa3Q2H22vu1gVniytePJ/x/HnG6aXGhlNgxfYouVkL9g+au80faUKkrFdcmDmDv2e/7SiKc/I8JxOCUX3gl7/+isPDHV1zYLW6ZHH+a8Zxw/1uz822JTjJ6dnIw923aL3k9auvyMsToj/ifMvxmLKi/ZBIk0JK+r7hsN/y7NkVs/qUWVEgg0LIA4ElUbfosmGew8e7b7k8f0E/wHqzxw2GxUkxHSYFJ6tzhqFHacd6vWEct3x19huur6+xY01ZahbLyGbbU88y7Niw2XSImGGUQSpLwGKdxPWG+/sPiLigyCVRtES/oqhhtCnG4l1N23ScXC0wWhLpAc8wRIoyo+0su12TgF7dlsFGQuzp+j3n5xf09o628zjXUpZwc/eOzMwpXEnrcqQtWMxmNN2GYWxRhUTqDKUFTXPkuNuzGyxXqwxlFOXslFlVcOhbmvaIHUb8GLDe8bOf/hz0SLdvGFtJYXpOV45j3zKMgtYZfDjQ7Syras5yPqNpHc9OfkEWLHk+p+07Pj58IBNzgmiJemTwHV//8AfevHjOZrNhvd7T9i2xeoUyz7lY5pzODd9db/Byy9iPOC/o+4y3Xz3nx4/fID5laKmoSoPjSMRjh0iwGb/95U8I3cCwswgnqc8qbj59pG1a8qpmuTwj0wv82LF+2AIKVdRU+YLlssKNjoijUppvv/1AVghCFLg4sG+v/8255t/HcCUUeZYwiN47EIGqXkB0GC3J8yoVN7qRISS6S54tMbnARQsy7UYXueLuPtAcGrRURDcjK2ecza/Y7gSjbTg/PefhYU2dzVFC0/mUycl0lTqnEFSZxsdUXuq8QClBiDCOByIrlEw2lBCPFEWScFMJastms6YoKpSMCGlp2pF+uKaoviDPC+RyjrMds0pzcrJAHyLD4JFS4kJqmlZSUJUzYpz84Aqk9GRFKkhWPHbBJEtQUeRYF+jHlmEYUyjX7UF4lDKUxZyqntM2Oc5GcnNKUSp8aPHeI4Sg64+J3NakricXG4KTbA8RoxyzylIZj9aRvCiJQaFExbJ+zhg0qhzRmQCtMVkBKOxgU3NIlFycvMYNaSfRqBI3SLpmJM8KjJY4q6mrBX0/UNcn+CDZ7I7c3NwhjSXLIS8kZVXSu45xHNGyIArNx5vv6PoDo2vTCTBEbm8SRMSoAq/nqTleO1wjkaLg7Owq5ZXshr5vcGNAixqMR2PS4sR5VgtBOCYVQ2oPcqTrGmQxElzyRu/3HUKcUc9m9LafvNoS6z1+dAStUCbQNZbm2GMHixtrzs5OCENaAPVDwAfNGEZMUUAY8cJjoiHLFG7U2EHix0DwOWWxpLcdhdUp1yYsXd8hC02RLQi5IYwBU2boIAnWsj3+QF4qspykwLSBh82R43GP31nyQrI665BepgLt7sAwDig549itU8ZChqmrwxF8jzo6TCYZh4D3hrYfUaJFqz2ZSba1cezwwRKCZ7Pesd9vmEuFsxVD32MHhVEV89qQ6YjUcHv/dSpSlAops+l98KjOCBAKZxuG3jD2I0d2DDaS5wVlscD5SNP05GVaVqag+IhWaUwZhogQPXaEbmjYbStmi2uCsrjgeezPeSwTJaY8zDg6+kFSWMUYQExKktJpeDMxFYpaNwX9AwSf/m2nsLq1LtkCCUgFSqup9DbZ/cpa8fLVM/7w+2+Ykv2pADQKnA2UVc75xZzFsuL2Zpe6lqxi6CLb7Q5tEhjgsVfr8XPwKVciJpUjXS9P+RelHqUgMf0JPP4P4DEClgqNk2KWvptABZ9TYI8uxXS9j9Y25/103JIIqabXSIYxKQOUZTodDybyX1K/sqfs0aM6JWSy8gmVhozU6eXx3k2Ps50e+8/JNCnF5y4rlRbZj3RBPZUIa50+i6mD6/HsGJ8siSk35L15UowSCS4+qVJPatNk+XTeE116HOWjYvXo4pwevaROTYPrpGDFJ1w/0wCZhuIgUhEwkadM2xQYm56nf5X3efx5Pt/mE50ypM/KP6ptpOFbpkWrnKyoauo/00E/yYQxOfKmoTZ+fn/IZKEVSEJIOcNxjE+Pf8q4CYwBpbPJXqMQIhCjnu5PQEpNyoSCkGlDScTI2FsENZlx+DwwDAMpS5i2C2IUuBggalK0K/Vx+S5Zr0WMCBGJToLoEdKjlEfKiDE5Uhkij/1zA0WmMAq8dzSN5XF7Ib1lJjsi6X3kJ+tpKgieinARabNteu1OncLkmaamIg53eOfACKTOcEOyHqY+wxzvDsnuKA2ZKZnPM7xrWD9cM4wdWZGlIlqb8nyQbst6R56d0I8OOwSKYo7RDjumUuJ8AoFZm6N0wMWR0bUMtsfHREddzGpiUXL57EtyPXBsIx9vNH1n6NuG+8OWY2MxbknQFX3colSPySqGVqGLgqqskBKGriG6BM1wrse7nmB3rLtAXijm8xlZWXB7vyPPPMvlDKNLtHHs90MC6BhNojJ2tK1EJnGXEKEsI2Pf4YsMIR3GwGJR4EI6vppCo51EqoZm/BqCZOhSbud4sAjlkDJS5Jr93lLpEmMqQhg5HhsOxyNa1UihGCc4UZ5pxn7E2wm+pQNdmwBPJsuQQnJ+9ozNuqfrG3w4UtdXdMMWoRSCFDk5OTmnrCQxCoYhst1t0bqgDJ5+aLG2R6hIXZdY3xJcQGuV1j86nzZ6Am13xNnAfFkitaY59MzLBY6RY7tj7Afms3P6IZB7j0SlGpVgGP2BQSTbbCE17QDtvkeTs5gvKeqK0jia3kxVFo4YW+7vbrEetMqoS8mqUDyb5XRO0TlBaSKzskK5Hjt6mn5D33tMobCOpMz5kKAZNtCNkd55rLDs2i2hM8zLCqVXXJ1F3NqRZxZvA9ELhMoJepsk8aiIzqNNgdI9WHBoRBy4+bSlPfagIYQRWWjmcoY2JjEMVE1dK7qhIUZNlhUUmWIxW9C1B4S0uGBZ7+840YtUAiIF/vOW3v/rx7+P4UqmN4v3qbfD07NaXDDaI1Iq8rzGGMmhcYzjnqADp4sLhNkkKINoGO2OLM9oDzahXLMAoac9NDw/e0ulK5p2w+lqQXsYmBdnGKPITaLdFYVEyxIlM6RecWj3tO0IbCjyHpNLlNCImGOynLyQ7JtrsnxJ3/fT35au6TldXWFM2hm3LmPo3hGiJVcFRb1kHFQasOqcECru7u+ZzSqkkHjXEXygyJ8x9CFRr4wixgztBEJptMoTsQpN2xzRqsTajq5tkVLhrKTtBiI+9SwYEHJGlhuECDibUZQ167WbGudhvftIVUqOh0DXdQi1p1qcs92usUrgu5axuaYq4WR5hZYlWsw4X7xFeE/M7glxYIiBs7MX9IOj73q8cyyqmovVG3bdgUgqwnu427DbbHn99pQsL2gOEaVK8rJIFjhdcmwa/vT137I80bhgcC5Dm5yubzkcNTHkZMryw8e/ZXUyR4kSKXK0inz6cIeLB7yPyaZhhkQHZEZVzannFf/8L3/CxpQFcdbRWw25pF5GnI90raWsR6IMmEIgjaUb91g6VMwYx4FhPLI/dpyunnN6fsaxSSpYUZm0qB5AeAiFYHtwgGfoGhSSN69e4foDSii8N4xBcr/b8OaLE6JLmGAh0+KjzGZ4m7pFimLG4Bwqc8yWGm9n7NsN7RBYFEuMqRi1pjkeqPOkSIyxZb3/wGX9krJWEAqO+8Cn21v2+y1DF8gLTVY4opP0dkNvG9reE6yhHzcUJWRZhtGGSMDkkbbfwxDIsor+EPBBcogd3t1Q1XuK7IT9YYd1yT67223JM0mZ5UQfub25wTtDlIoyz5nVJeUs49B8RFQKiSb4gsKckRUBZ0eiBy1mqFkgMxUxpgX2w3rDq+evKYtT+qFnHEbyMkErfEjDVUSBNDgv6TrPOPZIJXlY7zDZ1yi3Z7QXU+fRYxYmDRRSpYHZ+QlKoNJC3IeI8ynT1A0WZcxkaZpUZwxMC07nA867qdDWo3TK4ez3LUPn2e/31JXnzRdn/PGP307B/cjU6IP3Ea0VZxc1P/35OdVsImj1sL7v2GzukaIGOS3yY1oMxOgnmmRSwUJIK2TJ5wFI6PQ7w7R+f5oAxNPCMu3Zf854JbUnqSrpsUoL7KQcTYtRJVEm/5zXIRLDSNe3NG1a8D/a4KSQSKnTJozJU6A4z6fhy1DVJVVZoI1+Ul2igdFarOVp2HHO4axndIms93gbUj7SDpPSkmcGM6ktmUkQD2PURNmbaIbTolggpq9VCVogP1sOY5hQ8d4/LbTHIcFanLMJOuH9kzIWHxWkR6lrKi3WKgEuniiR0/MQ8EnJmlS6VM4cPue3JgolUkzq00RBfMoN8qSAxRh5JDdKBPi0qemmyzyBHv4VmMGgn/JXcoKSCCknBepxkIsQHcELohcJKz90BO+n3yHZW6VySGGQQiOkJC8VSufs9juapsHoMvUMqs8KpkBwPLjU/RZSz1nqV0vHFcI0THmLlBlRTLZYLM5Ggg1JyoqR5thy9ixjMc/J8pL9XuC8SXZbJlUwREqtKTKBFIGuc9MMG9PbkcmuG2UqYMZjnYOYXjNqsnrG4AkkW6WYQDCzOiNzGUU0jHagbe4xi0u6bgMbxcx58qzg9mE7bX5qTFZxmhtC6Lh/+IF+HDg9v6LWJUYJqqxGyIhR0Mc+5e0GCFZRlwUmP7BdJ4fIrF6w3t2Sm4phaBjsFutaum5Lb3uWZcb5xQV6VnK2esHNzTX3DwMx5Ayt43r9jg93HxjGgddnL6lPCtrQUZUDs8IS44r5yQqNY+yPibAWBB9uf8Q2I9JF8vbADx/fcX7xJS+eX/Lq2YzRpvfF2Zs5UkqapmW/G5jPC7KiSPtMYsSNCusOSTHVhlmpWN/uMHmOUklpmM1WrHctQim0KPAEVqeem5s/46xEsUKanN3WoTNPngeMcVjrCd5Q5AsEPQ/3O6y1zKoKiBwPB4SQZFmWCstDoCwlo205HjqMqahnFdLA1ckXHA6/Z9jvGW3L6ZnkcLwjz2qUhK4LXF6+IIa0CTFay253pCgTqCrEdI7wYeBsdULbRoYxUuQZTdNTViucnzKVY2BWPWO1eoZzI/t2gzYV1u/p+xE/CLLFOWPsyGXBbL5Enxl0rNjt/oV29AyDpVUjfWcZ20ixqFmdXfL86orbu98TmhytI6YekMJyc79GyprlMmM+13zxbIaRinc/DNz3nmJmsD4gyOi7A4fuiNJV6kAcNH50eD8iasP2cOTYOVyU6AKOxweGRlOYiuX8jOfnC263R2RZE5xjPAzsNwOmMqjKE8eB465nIS8R2mM0RCfZ7e9Y33ckCip0bUNRV+iFSjl6K/FeM1vWNG2NGxRG5WQayqIkhAFPh7U9x6Gl9iZtFEbITP5vzjX/LoYrHxzrwx1lXjCvV3RdYHszUi9LisKgi2RjYLOlMJosqxlDy+5TS1kuKMs5Ws3o2gM/+clPGZueoWsY2o6gknrx9uUvaduOf/yn/0pRrtCyIsaRbmgZ/Y56PuNkOacqL5Ci5J9+918oCkD0hKA5OTsj+BohHUK2xOjpDzm7dmC+zClKQ7O3/OTtb/n7f/rfqWYZV1cvePPlSy7d/0xwgbY50hx3LBZ5GgJJO9ree8ZxpMxKymqViCwxEkTqEwrRphNiOKNrB8raoacDQaYsnzY/Mg4eERUmDww9aE7pesfDTc8f2v+LqxdvEyLZS271HGRgvzvivUudC9mRL96+YL3esNvdIU3Dby8vkATWtw3epY6f+aykzE6oqyVGlXjbUi4Mtx9aunYgz2pOT2tWy5xjs6Vp9uzWOy5P5nz50y/puoGbm4/c3P4RRQ3ihKoqyXPF9f0n6rpks9sy2g3WKYpKMF/mzGaJ/HNsf+Tj9S1d9xJ7Yjg58djQ8PaLv+T+tme/7Tm/XPDx058Z3APeRSBjdVbzpz+0LJcQ2fPNu3f87g9f84tfvaQsTlkfWv7b393wV3/9JYXO2XVbbj5dc/tB8/r1M4o6IwjL++s7PJFlOadtPX2XermC2OO6AlzB6fyMr/7jK/7mP/+esQ8oHfClZDyecHI+sJivyPScH77dMfRrfvmLv2AMIx/vPpCZio8/3pFlyZLWd47b5obLi+fM547yBMr8kh8/fkTKA1pBpivM7JTzc4/zbaLzEHl/92eEfk5eGXTuOX9egFeoCFUuyVYSb3bsdiOLs5yyqLDDnN3+ng/vP+F9WoQ3hy1kawY3Q6s5RkFRCaLaI1Qk0yVVVfH1+2vyIjBTFdLC9vo9zfFbFosFRZ5jtOLlq1MyMSMNPIHDIWWtymKJ0hGhtgzjFV++/QuO7UeaZqQ7Bk5WL9H5Dev7lq515Drn6sXrpzyOlIpxcFQVHG6OKCl58/qKi8uXGP1H8rzEFDmH4y0AWT7trjtwTpJlgfcfbyisZG4uETINRCk8GZOtToAPknEMtE3PodtzefqMze6QLCOrjN3uwLETFNkZmT7HmIqmO+DDSAiJtAaBYegoK09ZGzKTc3d/Q5GXdM2Id47lsmRW18leOHVCRdJwcH/bUJY55xczxnGgrguKSlHWBqUzhJBolfaxQ5ApS+M8X355wclZQaDn7naPGzXBQbCevh/ouoFIliiOwWJklqhdE86b+Jjl8lNH0WclJkwZsMc+L/ApF/NoaYuC4NMw9ng5o/XTIl9MVL3A47zgcH5ge9gStpNqFCFhzwGmjittyPOCejbDGJ0UAiAzBWVRMDMGoRSJQDYNJc6nvi2b8lXDMKZFVWQiFz7S3xImPDMGoyfSoVSIafBS8tFeKCblayo91mkTqJo93tdJrZsyX0m9tIxDKjjt+55hTKTR0Y5PdsenTi6ZrHXaTH+1SkXRWk9K4KMNbbJJ+kBwDuvdRD+c1EcpnmAWj0XNQU/kwEdgR0wZJ+Bz7ss7bHjsJ0vqXCpyTlm0/8d9NwKtcpQyZLmmnp8+ZcpAph1nGYgx2SCtjYxjhx1Hbq5/4Mcf3jO0AxDRShOFJIiUXa7ymqIwIAP90BEJiXSoNJKUI/Q+EQsfrZNSQZ7nLOoFdVVTlgXt0PL6zRk+DGy3B4Zui5QS78YnxdH2HTbXzGYrsjLw4cMOHxQyPuqDTMdHT1GkDqautVjbA/nUD5SOIMQJ+OE90ghmi5LbTy1vnv2KIHsOwy1ew9a2HD51lLsHLl/WDH6YMnGRYTS8efVT3n/3OxaLJcYeOHb3FPOadgClC8o8p8w0+WnJ7qGlqjJeX16x239kP2Tsjg8IEZCmpp6tcMOAtzuibchcTq00rlvTDgUfm0B3uEV//yPbzRYpC85OX3OaLbDDJ37z1QueXT7H5IaPN78jHwv2xz3bh45XVz9hNuv59PFH2nZE6Rlj19OLLdtti/Q12eyXrO2f2d1Jtr3j+e4VZ6c/p65OUMWB+/UN79/v+OKL33BsrtnubxmdZbW45OIq54f3H/5v5t7jR7IszfL7XfGkadceIkVkVlWXaFXNJnsGIAFittzz3x3MgBiC3Sywu7pUytDhyvTTV3Fxn0dmb2pdsQpHeLg/M3O3d8/3nfM7aK0o8gXrhwNt3yN4TpEvSOSS3SYwm1xyrG84NA8kOmE1/xIxLHBDhSEOn//27/+aujkw9Ib6GPjyxa942Lxlf1hTVy2JvGQxv2B1UmBcz34fqI6WxfyCD2++Yr/bo9WCprak6YrT1QWz2ZymO/Lw9Z5+MBTlgiKcYAaPUrDdrAnhwHJxzuE4cDjs48YrKXjx4qds929J0ylSgnOG29v35HqBTnKyNGPoPNPCM1+kVFWP7XoKlfPk+Yx9dUR4z+npBBcqqmNFohacn1/yiy/+Fz5/8ZRcKrIiJykzEgWv/nDO9y9fs9tXtK2jSwKrJ09RqcLJig/33wAp292fRnF+Qqrm/OQXP2e32SAQtIPmj/eCxG748K6m7j2raUHrMsrkCjWZMMlrrBRMc8UsLcEGrGl58+49ky9KiiJB6RKk57g2zKYl9bDmuzcVy/xXHPdrglowKTKWc8fX3/+Byek5KjE411L1Lc4X2H0b3W1ZdELMr8+AIdZ0tAZT7ZCiINGaTGfMc4GpW+bLOd0xMNSB8iShbrYM1tEZ2G0Nde1xrud43NG2PWen139W1/xFiCsfHPiUNJ0znZ8yKSRZ1mB8DE7fbhpgSpZMUaJECY3Oai6vrzkctzRdizWg0o757An5yRWEQN+3BAqK6Yzbuwdu794hygPL80vazjBfLPnyy2dk2R8w7oZjvaHtLcFpTk4XIAxKzlCyxNqa43HHpNRj6DxwfbVgUuTxjdwn6HLC61e3/Own/0gg3kBvb2J5XFk6lvNTTuYXtG3NbLpCyIx26ElURlXtuDy9xvaapm7o/Gve377j8vKasijwwbNd7+htzeADaRqnrY3Z0zSCto5B+WHoCGimk1Osdwxuy+FwRGcfcFZghth3M5tNOBxr+sEQgmeS52wPr8mSkqzICW7C7/79BtNrrMtw3tF0A0Favvr+ntPTBWdnF+Tqkg/fbJBBMctXTOcTXFjz9kPHcT9wPNYcqzX538wxfeDm3R1fff17Lp4ZTqZfkJcF7VBxe/fA3V3DxcUFWZ5jLGw2NYv5OdvNmq6F2XyK0lOurhSzyQVpqmiHDWlywts3a9YPd9RNxWDPmS9XHPaaTX1L3b4nX5wQhOF+vaacZHz6xRm6SCjSkulkwvOnBqxiVl7y8AHSEr78UlIdjuigePtVjTGeJCsJyQN962LLuZ5ycfmUqr6jXCiGQVAd4Lvf75lOEtKgwSmGrmO/bTg7PUd4wdAdqKsdnz77FYf9EWOOZM7gB0c2VawmSwJw365ZrqbMplO6bkd1PHBy4pgsS+qNQvgZZb5CMmG//56+O8aJtiq4vnrBdldj1/eopGF+KjldXPP+3Vvqeo/Unhdf/ATpFKbv6PqaY/uWxu4x0x4XNC5I5KqmnCnMvcI0gmHwVJVmcZbivUEpg/F7RH7gfr9jX03J9AxjeoosZbPpEXSkCXz2yZcMI83OB0dgIC8dRSFI0xkhaD7cvOHF7G8wzYEwtBRZgTOGZh/wFpQWIEO8AR4L0hQmc8F8dkpdb0nTaAdpuy3ffL3GWEOSpaR5SqgDZXZN19doLUl0gukddd0AGikcUkeB4L0ZLVDj4Vh73AB971E1JEmOCwKcozpa2ranLGYMg0OrDK0kPgwgIubWBw/S4yIlnNk8JU0Dm82aPEuZTic475EyCrBnz8/59tt3OBsHIHhNmkq6rufu9sDr70uWZ3A87hAipe8D1kCWh7FrKk7WhQgoDYvTkpMrxeC3LK+XzIsrwhA3q/PpgrvNA5vdDmslwSa8ffOWPC9omp62Hej7DmNj5kqOYiEIxg2SGg+dIyUvxGLlgBh7j9yIW//BZomQEdMfURhRJMgfWeZEpIYlevwPAeIGL9rnoqgbaNqWutn8CBTxaNeKwiYKIUWa5ug0QycpSRJzXvPFBJjErY6UqPHrxrLi2NtVdQPedREiEiCi1+UIiJBjDlChdMww6dF66MXjJi7CIB43h2r8vwJIs0jmK70fc2UjXTEw2g0dgxnhGtYxtAOtdxGe8GhRk7HDKxlLorVSpKmiyOKG77HEWogorqx1HzdsZuijpcq5aEET8dqV1D8QGKVG5vrxRfkoiP2YC4Mowox3mDZAGMYNmR+3kKMVM1EURQmB6BrRiiLTWJ+xWi1jHrLrePf2LVle4oJHBI8UEaPctD3HypClKWenp8xnS7y3ZFkWe5C0ZlrMUSpuJROdkOYZ0/kCqWIvlgumNkUUAAAgAElEQVQGvMeamKM7Pb1gNjcY29F3Eb89DIa+a2nqivXeoeqADSmDbaPFj5hFFFJjB8OxhkQrhCh4+nz28Uzjg4s/rdYjRlErguTQNrTdG0Tt0TahO2hmzwxPrq7o2oq27Xjz3Z5p8SmQMJkKpoue71/+BowkSeaoJCMXA2eXz5kvDM70aBWYTwv+3//xR1wHz55cMltdkJWX7G5vOV8uGeye7eYl9UHx6bPP0OmUHkljGg7VN1gruF/fsWsrPrv+BC0Us8IRRMfQvieVS56dn1IUU0xnudvdkGWK3a1DqxWXyzl9U/H6qxt649HphNlkTrFY8s1LjzMpIdOs3T2fPvt7ymLJer3jX//4b1yfPWGxWMa8OLBYnnFz/0eq5kiWzpmWZwg01kKaKY6Hht1mYLGcMJmmKE0U11Kh8wOH+g7vJNNyxWKxZHfYsVytONaC9bZhOj1lvd3Q1i19Z/C+4uzkKSfLJ8xmU+pmy93dGq1PsUMWu9GAi9MvOC3/jv/0618SaEEe+ed//n/Y7N/w4cOfEFJz/fxz/v7Xv+L3v9+iXclifs7Nw29J05Q07xiGms2ux3rF55/+goCh7fY87N+QKoXWcYh3PFicg4f17Zi7y8jzghA8zdEzn55wfppSNxuchSJJqJqU7TFwtkpIMJApgjxyu/kXmub/5va2QhlLLgJBpjgyJqtzyvk1uhvoH3ac5l+j1Sler2h1yXR2xiSX3N3t2O97VqXEqwWnp1ckOkEmgrv1gU8vlmS6Y7N74M37bzl0OT//qwsmkwznDbvjluXqBaEbqHZ7qs2R2WxC3fYY3xEIlGrF+TLF6A7pHbkIPJhbGhc4nefMZzOKPPCTpOXDTQd2QlnMOfmkoqosiZszmJbWtqTlBPweIRTzyZKnT6/JguWTqxcE78fBTs23r39PkU5ZnWeop5Km2fD1Ny85u7xCJykqc/SbWz68OxCcBq/Z3Nz9WV3zFyGupJAsFxcIKdhstljjyScpGIUQOamGPI+HKDM4um7g2LRIWSHkgHU9TXvPdKZw/hJUSvCC3nZs9++5Wf87oJisUi6m1+w2W5TusBIGG1G01kwYLAxDzdBZZrMLAgY3eBw15XQgVQlFNkMFhzMtaSrAx8Z5ITVINdpferq2xgye+ewCY3qauqPIS6blktPTGfv1GhjwAcoiJwnX5FrQ2wapKhSCp0+vKfIiPpbWY2xFmU/Iikhh2my2LJcLjOuwPmZ3nC1iDshF4MJsmtE1FwibMs2WOCm5P64ZEpgUU9K0xxiDdwX1XnL9xTMkmvX9gSTVnJ6W1MdA3xt02iP8gqZ+z51rqKo7NJLFLJY5hgas0awPHwjSYYwgBEeSKg7HO/74zb8w9B3TpWY6PWFX7RjexZ6npq3J82fc3NwDYZxIl/i9Hw9yscDysG7QmcHZWwgS5zxFfsrD3SuMrdBJYLANN3fbaBFMUyZ6iRIzAlvSLMUFuLtfY/0B4wxKzJlPp3z+5YKgFCiPcT390FAWC67PF0xKS9f2OC+pqjNQPcF6jO857lvWa8/JWUBIN3r5U1bTJfpEM7SOzUOFEDW79QwZcqazgvNzePvhFWnmmS9zVlfnHLZHBms4HIdowPIC0zuOhwPBCbybsL5vUbmlPQqEbzkeK/Ip2L7Be8tgAnXT09cNeVHS94LBSrzJULYjhIysWCC05+b+AV9EkMGxtry7O+BVh3cC7weEEOT5jLoGc9QI55GqZ+g61DGMyGxPXW8Q0vPsk2uESzC9pO9SlIj9VzF7r3j/4YgzHRDQiSKfaHSqWW9akkREumYiePn6T7x9/Q7nBqazWBLtXLQPSe0YugHkHJUEBmtxx4DSFeu7NWmWkqYpQy8/Is+9MwgCy8UJy/mKzVpgbA9YFqsZ63WDIB4qU5lGxTFOw+OJOlqyHjcj3kOqBHVTo4WOU/8+YK0kz6YISoQoMXY8zPoIiRBIfPDkuSLPE5IkFmtbC23Xjx1EOYkuWJ02pG8SjImUQIQlBAl4hqHn7nZHkucRn6wS0tSitcTZmAwZK3dHIeJo6pqmylB6wu7Ykp5WCC8QQdK1CdbWTEpJlk8oiiknp4KymFHXLU3T0fUGpXOaqmXoOvrB0Pax16rvBpwDwZgLDI/0OvnD4Z7HLY4YrWdhtF6OtkMh4HFrAkTFEyXEx2zRuLWKvLj4R2v1EQQhflSINTIOESH2EfV9TT+0o91N/ogwqMZOHYX+KFJi0a6SkiLPxs+NohLvxrzVI1Aj0A8W3494/PHSY17wB3iGUtFC9lgsG68jINDRikb4DxZJQsz8Ka2QWqCTZBRdj31pP3x+LEBmhH9YBgOybT9m3B6F4A+PN9oIkzwWIv9gVRwhJ9HriWc8sHZxIvAYBYu29Pg5UowkQSVRaTKKr/iJSujRNhjzTtb2hCDjwEDG7aB1nlSnmLGnzzuP/A+Jv3iNSimM6RFKsjo945e/+luOx13suCFukkQYASGDj2jyZuBwqOPPlhzFrdIf6YFxABAiJS/X6NSSO4uZ5ExmE4SI2dsy11xcDaP10+C9HcWoiMAJHTelNnhEEGMmL2YoFbHYtatbgrNkSYaaPUWlC5JUMkskxjTMTp5TTi7Zbx94uPstlyfPkSLQNTXVbs0kn6GLJcbX9MYSjOOw7dBJ3PwNTuGHkqfPP+P+5j3GDRyqCs1ASs0kP6MfoGeHzD318YHF4pRifk6eVpxdpGweYLn6jDzPKJ2jMz1eBQYf7WcytchBsN7cMATQkwTTlySFZTYpmE7nrLeg0wm6kOPGV9D1NWna4xQYDDf3r0jmP+V8PmcvN9TtHdtdzmK5AjK8ix1TSSJYLRZYp2Jey0kmswV5VuJKhU9zzlbPQN0jRexF9d6h9YTKrtmuWwQTTk6ukfrIYFuMa0H09OaWRXGNTR4tnym73Q4hA0rFPjVvA2kSgUOBCVVVEJzg4X7NfHLJcnHO6hT0P5a8u/mOuj5irUMXE/zQUagzdJYzn5WstwXBB4xRCFmwOp3x4f41x2oNwDDEzXU+u6BpDwhgNi9JMkeuc9q2ZxgGvDf4UJMk0bamm4zeNEgMOusReDJdsJo+4/NPTvn++1fsd0d2XnIQG6oqxQwN3lYQHF6nZMMdfgj4Q8uvnp4izJa9XzK4jHymeXj4hvZo6E1LVgZIFIfjFi3aGKWRkixPaEzPoaqomsBs8jnLiwk6dxzbI03TkOold5sbTG1IhObk4pwPd29ZTq5JCoEPju4QuHg253C8pe8dXgiE8pyfLLE46qEjSM3+vuJwbFkuUvI0Q5Hhe8N0mtEJR9sMKA/O9+wPNYdwoCk7BBOqQwsCrI+EX0mCMILedFhhyBNJrgTBeLoBuqNiOV/iww5rAsFlpGn5Z3XNX4S4EiK2lhtnqLue3jiCicFUqTSFmpCnOTZ0WF9jXMcwCPLC4b1BSE+WCZRM2Gw/xGlikmP9QN2tKSbDWOwWi08dLc43VO2A9Q0mHDE2jACJ+Gbbt4aiSLC+x9oe7wNlXqIpCNoyncC0XNBUYbzjOIRoyQvo+ihYhBDkWUrX7lHaRruG86RJpCjFELvF2VjGGDw42+JsDcGR5Xm8URhJcCmT6YTg9Dg5Bq1K2kbQtRbrPUGIWAApMgQCLVPydEqRxuDnJJsh8xzTCdLSonTA+pphaOmbjCxbUObTsZjSc7paMp3IGBSUinyS8XDrUTIl0RJJTtN0PHkyRfiUYDOckTRHSMtYn5kkmkQtMX3Fw9AiRtxw0wh2xz2HxuJ9LMhcTBdsd3dYA2lScn6uGPomWpNk9PObnjhJSCTD4NhvB0xZsdvfoxNHOU1puoqHzZ7JZILUcVvQd5K2tegkHqp612FdQ5EscDY2ws9mE6yQBN0ihgBywiK/YjYrIDTo1DEMiuMxJxEZMu0QCEwPppP0nSGInsEYisJRJFOyRKDEQJImrE4TirRAiBRnFV5o6mZHZwJJoZmKhCQr2G0Nnh6hPIM19C009Q4tUgSSIAxCRGqOVgbjLYM3SOlQWuOFoB8Mdd2BTLFWE1yBaRX1vkVn0bZj/MBxt6eYFQy9ZegCihKCIM9KurZh6PpRsDuCG8t0R5z00EfhHLynbiqyPOFkVQACIyxCxI4U78ZMjlX4psWYFi0V2ilMMCgdqJuWojRM5yVJKrh794FjtY/bLTEgSVAyx5mYs7BO4oMiyA5nB0zvqGpJIJagDma0CIlIZwvBIvHkyRStAlmWoDWMYQ20UjGTJR1apSjR88igi9a2x7zRY24FlJS0XY9M46HVBUfXtxTZEtAEL0YSnPuR1QvAMZlEVLlSgUR75DgQMqMYFEiyQsRNVRufY0TEU0spsdax2zVcPimZTLIRUOE+Wt9i1gYebXiRaheiLVQl+H7A9i52UwVP3dQEfNwyaEWWaE5PluRZzuoKvBQYm+IDdLVk6AT9ENh3FtM72vWUoZVYE63O0e4cLVaxTkqNJb8Rov0ormL+ZxQto3B6PJg//vuj9/BjBiwwiosxC/WIIP+Y6Yp/kTyWEY+vmfeR2OYeP46vqvy4SYobrkQnIxJ9zBnpH2WNpOQHY9h4vTKKZhkeBW0Yv34EGnjhsDai4x9zao/lslFo6R+JzseC5secV3yQUipQj/kp9fFRhRDG1/aHLBcjqS5uS8PHreB/EJOjrVGOQjN8/AmPwu5xkPDR5hk+OkDjkEA8kgzHC/34mvofrl3EvhnC42sT4R1EyT+GyTwueIY+gguyNPnh5zaMv3fhh03po3h2Lt4fuyEH+0hajOeBEVwfv4v3uPaxLmDMHAqJknzMlMWOcDXyVxwCjwyKRGc4GwEdRSrROsdaEzeM3hOcIUtzdCJHMEuK8C6iJXy8diEDeZIxuDiQMtaSKEVQGb0LBO2RuYRBgkvjEEooEhHIM4eUYHpDX3Xks5xinjNUR5xzKJGQCEF93MdsZ8hww4TlyYS7B0NjKvaVJJE9ZZrGMlkEhZ6gfA9OIr1Co1FeMc9PSU5S5qsLVqtThoc1XhcE4ai7A5vDPaeLM9rhPdv9ntYaFrMTqrpjkuekaYYUkGcCr6cE61FjHq9pB5TWTDKN8Y7d8YC3CX0/4J0hSQJCO6wzSJVg3UDbHZjOErJkSW86hmEAn4/l1BlaCtA5eTZlV72iSAvA03cNTmuGYaBtq3G7XUW7WbWn7Y44ZyA4FKMNWMdtZ93sURICYx6IQKJj9UMg1htYb7h7+A5vPSGc0HVxkJjrFflyihCBwQWCcSRSQwAzWPJsxqG+Yxg8SqVImZOXgkP1gFLp+PuocN7SthVSCsoy9gc63+O9xdpAU1egavJ0MnbF1VjfkqUKLwwESa410qesJlNu9C07c6StLSpXcYCcarxOcEMskuvswNAPOFPj8kv6KuFgoLcdQ6io6y1SlOP7hScoR39sMMohRAM4imxGFSzNYPEqZz6fI0o7Lj862ja6cg77HdLnyDQhKE+SZkzLCWkhGcxA4w/xd9lagoj3nsOuQgVF1bf0psc5ze7YMDiDxWCDww0ydk6almAMOI8bDE44hiFa8gUPGNdyaIbxPU8DE85WSXS6hfHWOQgmeoJrPIP3qJAynaVsdjuk1uisICvTP6tr/jLEFSLmLRJFUBOkhN1hTZ5HRZqIFG8Eu2OLEzsQPUk2Z3mScXfTUuYl52enDE3K19/9C1prVsuLWCqqBJ99/pz9dmB931BVFTqBptnR1DukSsgKR9fnFElJnhVkk5z7mzueT65QImA92DZjMZ/Sd/GOOJtOWE6f0ezXWFfF9TCOIs9pK4tKNMUkQeoe51uKNEdrjzEH2r5gdXJG1/bsj1vqeoMUB/LhnLYLtM2Ak5a6r/Deo0RBnp3w5OqSV69e4RpHnhc8ubrm5fff01SRiJPkgbpqmZXnTCaaNCnwLkfJCkUgTyVlmTOdXBGU43A4YnzMBOESnj95ghADTbfDmJrV/BP6fot3HUmiWExXfP/N73jybMLF5RXT8oQP7++4uD4jEXOGTnLcDcyPV6iiirYqr9BhRgivsV4zGM8wNGw2Q6Tv4eg7S1MZ6uVXOF8TzBTTB/APNM2WokijoJRQFCVFWnB6XlDXDev7D1TVmm7Yk0uFsZ7tYc/dh5ZyXqMyg0rA25TdbkDrYbTiZHQNpMsz+s7Sui24Apl6jK8o8pzV/CmL6RW+6whij0jic2WVY6pPSPIeqRzBlhRlj/M72r6mbhyeA2l6TTABZwNFmXN+fc4sfUZTefb7mt2hoyhyqqZmu2mRsmE+K6nrLUE6ZGKpu5btWuCDiTdTmZClkqHpSQpYrCTTUtIdISsNKo1lqIURDH3Boe3IkoKiLFG0NMeWVXGCC4G+GnC9AhK6QwNO8dn5Zxz6HVfXZ2zWa24/PGC6iPhWY37TOUWWF1hT4SU4H7fJXZWhRIfW8XDmnOaw7ynK2Eg/dIHZLB6UkjQFArvDHjO46KcXA9mUiOJvGybzhBAU3kIgQ6ew3h5p256iXNEZi3Utlo6Ap2sTrp5cc6yOmGEAApPJHCFAKY9SAWdS9rsNic4p8xkhCN69uSPNM5rmnj4ckOVIZDLm4yR/RDTESZ8nBtmFIrgewRjwx2KNQeuUQGCwPdYPuHHT8XGazcB0XqC1QMhAlqeU5YKutRwOe9pmi1SSp5+ckpeCtlH0nUKobtyIaLwLHA89gmj9cgbMEDDWkuYagoqbm/EwGkIgzwom5RQpLIv5CqWymO0TUNcN+WRCZ/poFXYtiVYMw5Gr5wdmVwNIwXp7w4nyKKL1ae8OeK/hw+e064zDoUaKhKF30U7YDLTtgA+C/d4w9JHo53wgOP/DJkRIEpn/0MMUrxp+tKGKIvdHlsNRZEketzjjOX8UsfHMP1LaEKPNMJ78PyIYwmOh7liAax29GejGjZR3Y3ZMPsIwJCopxtLdlCRLxm1rik4eM1DjEML8UKhrncPYx66rHwiO0WZn/4P4iR1aasx0RQusGPvIHsXWI5vxEU6htEQL9THbNT64R83zcbv1SDg1diQbPsarxoFJvP6Rrqij0JZSIcZC4ccNYyThxYm/C9FiF0yA0PNIEIwbMoeQkkSnJGmOHi2kj18XBDp4ggwUZbRpSjlahsXjNMLHbZe3MSdmLbc3H3j1/Uu6oY9gjdF2qVIIQiKSaM+UEoLXxK64EHOPw0Dbx2Fn8D4Wjjo+OiQQAUWGk26EU8R8oAyKVAuCVAQkeMsjcMT52JEWhzkGpaIdU+lAkaV0+4bB9BjXoUSgbfcYo2NpcyaZ+ZR219GbHUO/53x+QarMuHlVBCPZrbfMF0/ougpjLNN8xvkq48ObHc5DkgYoa3rnqIcdImgcHhU6nl58yeG4ZjAdRTGh7wPT7BThFH1V0R4rut0Tnlyfsbx8QjE/p2LC06e/Rvqe9eY9rz58y9NnTzluXrNLW5wcyNKCXf2GJLnG20BrWoq8wIkJTXfABA+ipO0FyAXTskDpaPVM85K7zQODMZydnrNcTWj6PagMY1uO1YZyekLsJbNobXFDHocWJmHoDcEZQui5v7/lfFWgVM6x2uF9jfcWoS3BDdw+vGK5XLDdren7BikCZTZl6ONwLorzSEpOCs3xeKDtWpbLGUpLDsct1g50nUGJKXe732P8PR/uSu7eG4TLObvMWJ1kTCYZCo1wisAt9XHgcMhZni+5XX+NMR5rYb/rWZzM+fBuQ1nMKMsVkoy6WdN2R0Kw9MOBVJ8y9FtSPUcpTde25EXGrDjHDJ6qOzKYlsX0nHowCCBLoDoc2G2WSJ+T65LeSMrT50h7YKIztDql7+JQT8pAX/Q004bvlcCYc6wH091xv75nlj3h+vkJXVNTH4840UaisvAE0eL8jqaqmc5ydLJgOpkxX+WsD99ieoUbFIlQeLvHND0n80uk8GwPNyxmFxSJjDnRtmboNty8GRh8h84zlE549d0tk3JO61qEstghcHQGnSiscNR2QAuFSmr2uwPeCkDTtA0iDah0ghagMk/fbWhchzMOETRFekYfplR9zzSbsSoW2Ps9hViyqWqcNCyXJeVUc3tbspifslitQA1/Vtf8RYirJEnpGsVgFEqXTEswXYa3A8e2oarvaIcjodc8e3bO6emC475jd/stzUYgpwlt1tLXki8//RV9a+n0wOcvPuX9+3vev+pIpGI5mZFlgqaSZMUZWa4pJjldLWnVjiTzeFtx3Fmm5QLvK8qJYDrNsHbFy5c3TOcFRTFB+ZLb3WvWxxohG5QOaJXRbh4QWpFkCQjLen/D5ac5Qxunk1rDel2zFwlCDQQyFvOfUk4dh32L04JsHmi7Du1A5T5udtyBMn/GFy8K2u6I9xadpFiT8OTJJZeX50zLKa9fveNXv/oFOhvY7De8enPDpXrOpIw3RmcjsjjJH9i+2WKMJUkVRZ6y391ycjqnKHNWlz1OfmC98cxmBWWZYk3PP/3T37LZvedYH2kGS34CXW1p/TEWb6qOp1/kpOkV223Fbrdj331HEqZorRmGA3V9YLEq8U4gVAwmW6sQypIlBc8//znOFvzX//rfef55gU82iGxKXp5TZFe07i3NUOOV4vqTc64WJ/zLb//EdnPL4djx5MUCXSh2h3umc1hMU+6O9xSXGS+efon2Ge9ffeChuuH+9jtEEKRJysX1KX/45oaz1ZLlbEaK5sPLb3k4vCUtsviaAjp/Rz6ZofQU6y21+UCaz6jaA1IqTuYXDH3g/dsN08U5SmQMledqecXp/JqEHXjDcvaE3f5IuowUHuEGjpUin6QMrSe0ApXkBFlh6gRJgkwSnFJYDMEMHI9HBtvRVPDTn31KkgwI6UjTjHa4h6ElGKhrgRkGkmLHnBk6ycjyDONqbtY7JDmTYs5iOkeplotk4GRuuPCWdWO5CxPqbUSnqtSTioFqX5NPC5JCM5stqdcZ222HymLwXgWHQBPw+NATHAy1xnrYH48IJVjMVmTTwL59YBCS7QH2b18TQkuZTQk2oW8d3g0oteDs7BOEDPT9EG1y0pPqCZNyxrOnl9ha06uOch44OS3iIVUJZrOS1bJgva4pkiXL+RM2mzvubt8ynUwwvWJa9Ihkz3vx30jUf0LYQBBjroU43X8c3z9u46RMI2VstHCZPuB9QnTymbhV85bB22gpstGiaOyAGB6zPwnOG/IChkHTNor72yNnF0tmi5ymGaiqliJJ8SEi3KNVTrK+a7i+ntNbjzUNxgyxCkE8UgIhBIvWGUHA4HuEHLh89oTdpqcxLYGGzlVMk2csP/ueycoxK0oG2/P96/ds1RWHtWZ9/8DJacKuMrghlgB38sD99paLMwPXGs56fvKzvyJJDKa3eAtCaAbv2W0Bctqm4ze/+Q3r+wZ8zmHXcnjo+euT/4OuN1gbcNZjrY2HZhGJj0DEc4t4GIgbo7jFcdGTyI9XLFFXxvya+PGtbuw8Gz/gP9gSx8P0R2ueUHhvf8iK4XHDkdo4wtGDCwQkjoDgsY8oQaVxQ5jnsbdLJ5oiT9BpxkfAhZB4F618MQtlGQZD27aEH642AiH4EU5eR/ui1rG/S42CVP7IEilEfD8Nwf6wEUSgEoWW+uPHUvwgwONTE0brksF7T98arG2jGP4PQjcOSdTYYfVYWKx1Mn7/RzR7nNw7Y3GuGV8ry6NifbTlJUnsIPOokY7LuKUbhSrx0BhpgYGq7sby557BDFgbKad2zJI9bjmVipCTJNEft5E6G62WKloWtX7MxcXnIXaPBaQDO3j6weGdAeL3y/OSspwisiwS3QIfX09r7cfXKlIkLa9fv+bl9y/JEs311RnL1YRDaJF+ggoaERLutnsup9BXB4ROuf7yH7l/+zucc+SLhOIi5f23eybVPcvZitB11IcNv/kf3zKEnmwyRSYFUk7Y32nSfIkTFqMlhTij3vWU5SXFzNFy4Ljr0OWALBLyLOPiaso///bfeNq9wL95Rd11+BrOZr+lsA3t/p71wx2Xz36NOm/xSYo2Kakt+OWLX7LZ1iiVo0TKt1+/5u//51+QJB1N22JtTVFItJTkSUqWZaymE7wL1JXm/OQLzs5PUfmRV6/fUuSaLJtGzHgDSW5Zzj8hBMXL13+EdE6uCh6aG9a7DT/92V/zv/+v/yd/+MNv2e8fmEwzBnvEBc1CndF3jvc339KYp5TJDKUNnb2h9YL9+yOL4gwtU5qqYrWY4mzDYp5zcb5kMp/x8uUfuJj9DWVZsphUZJNTHvYzNseBLMn49OdXXDzNebjbcH9/w6v3LUla4qXBixZjekDx2fyfSHhLz0tceEfbr9l9v2S1umC1WJAlBZt1R3V8IGZU49bZm56uT5kXBcvZhDI958tP/jPfff0nTLsGU3N/+8CT6885Xy6oqpaHh4rzsylfv30Ta+/yU0KjWM5WvHpzx2AsaTrh+uKMN99/oJwN9MOB3XbNYReBM5NyRZpOkGKFFyldk2LcEStrtmtFMRFU1YF+aAkhcLKYk+aaIj8hy+YIbTjsMq6uS3ywHA9H3rx/FeMjdcdsdc7J9ZfY/ZHdboMUkr5rqfd3yLkiT2fYwdA0e9ISnKjj+x05iUuYTXr6RrFM4WzlyRcJTbvn5Z8GRKYppin9cYPol/R9i1CaRC0YmgGtFFmWEZC07YFJlfJ3P/0ly9UFAcFX4b+xeTiSX+WUkzl5OmUwjk+e/YKrJ1cUWc4f/vUPf1bX/EWIq2Hw3NzsKaYJpwtNWU7ZH2t26y39UCP1gEDy5OkTpHZsd1tkyCiyJcmlGdf3gt3hhrTMMdax2xm+/dazWp6y2b1jUpRMyzmmU6xWsbH+cDyyWR/4x1//Zw77ivfvv8F6w89efEpeFLx8/TVdH8lT07Lg6uoaPeKKu2aIWzCVMph4ACunc45HTTkVeG9pW0PfKt42PXVzR9sYvE148eJLnNzghzDahQQLdQ4Y0lShdEGeJsxPHG/fvMOHhNn0BELC/f23DMaRJgUnyxP+yyQhWhEAACAASURBVH/5z7x+/ZLtbs16c8vlswXbfUdjvybQslgmvP7mHeEk4/rqE5yD373+isunBRdnl1R1hfUNT59kVNsJ1fGAEIFZOePqk4K2a3CuwwTDajWlOmxZzBbk2RIlp2y3Na/ffWA6KVBjuXFRag7HHdYNTGeG04uM775ec/WsZDrXDP0pOvVst1uktJRTRZHn5Mk16+1LqvY9QiiC2DObnyCVIlFzkmSKznuq4w1ffb1lMAmL1QU6VHTDhiRVaJ2x2eyxpkSYOd3GYCuLKC2XT04xbcd2u+X2wx0//fIf4oRrEHjn2O0PPP1MMdQ1TQO2jz01Z2fPSUuHVBEGcnn6D8wml7z/cEez7bi8+JSbm1vS5IR8LENd7xp0IjjWH+i7nnbfo1XC++1X+GDx1uCN5bBtCCKQ5oJyolCFZbvb0NYGQiApoDcDMnVIJD4IunpDpid0lcIPEuE0RaJ4890t83nGycmSy8tLjFE0/Ro3ZLFkVmfYIWNX73BW4IxCyVkMZytJNzjevn1PniZ8tY0liv3Q0fuS1kOWZiRJilQJ69stRbGgnGRIJelMxvmlJpuccGwbDscDgx2YznJcsLiQIGVBM9TjwU8jVYJzmq419J1kaFpqYen6jizJ0fmCIEFwJEsVl5dnbHcPrDexMPrs/HScZDvq45H7e4lWJdPFlDQVyERSpKcI8RuydMZicU2iAsfDQFCB2XJGmj9BqIZ371rSJGUySVgUiv5B0PXRnhdtWHFDIJUiBI81DmMk8/mMIs+jLdBZjgeDMTFbAoC3I77dYYYBN/RMZwnPns9GSIRhMIariws2mwfKUpMXS6aznESnXF+dYIfAftNFK1wYNwsi4INhsz5i7TVpnjGZ5R/zRx8tXETqmuktxvZ4L9FasNttkUlABI9TLfLyD7j5ltYM0EqywlF3nhAGuuNAmmjKfML+8JZEnNGFI72rEKRY37M73DCdLplMJ/zbv/6OLJdMypzgPYfjAakV15cvRkuv5Sc/P+GXfzdntTqhyGZIcupdR9203K3XbPdHquOAaSb0B0dzrDGNYhX+BuMkQ+di3xkC8GMHWsx3KSlRmo/WNiliR86jWYwgEUFFiyCPOaOYZQOPD3LMMsUcU+yY8h9th0JFUSekQo5iL4orEDgQnn6IG7WuHaEeCJSKw5nHjJdWUYxkWTaKE800z5iVxXjwl+NWZMS7OxvLaH2gHYYRcT5eEzF79lhYLJVCJYxwDf0x9yVkzD7xGF8LYxcXj4uuKLZUkqJF+Oh+FCNI5HEr6Efrqn8sQvaeobMxx+EfbbTEnBMyUj1HymGEZQhUZI7EvDJRRCZJQqITrPVjnlOMP89jj5MUsdLAxgqNIi/Ji4JAzD9Z5/AufLQ2ehcwZhjJhB1tZwlHzSOlXkgxCsRkfN4eN2syQjfyKBZD9AFHy64Y++HG38e4TI1EQDV+ja5rOex33Hx4z26zpe978jxHZxleebRcxv6zrsPamsEfuWsHpLYoOXD3/g+sFl+g1P/F4WDYrA2nVyvscOBoABNzd/l0yU+e/4R9u2Zf76nqHf/wi/+J1x8Edb9HiIHTvKDa95B3NKbmdnPH1bOCzf0WQU7wGYdesDhRHNfvEKFEqZx8VvPvX/07l4vPeHr9nF//+hdsb77ju5d7FssZl5/OUFnK65cbdB7wSU8IPWkJ796+YXu8xQyGPJ3x+YvnrB8OhJFe2vU1WWqZLwqk9Gw2dzTdHpVkTIoVSiakck4iUoJoeNjcYowjm0g639HVnrPJc54vv0BVHa9ud+zvDwx+IEski/mXHOp7hmGDNy3L2RxHT2Mb8gRmxSn3Nw3z6QVJ0hH8gAstF9df8ubVH+mbFpManJO0x0CYHui6lP3RsZgJLk4+5f2HdxhTo7XBdgfqdU2pSxYXSxqzQ6oSdMZ6+4Gb27f87vdLvnjxd+yP57TDGnTH/cMNBM397QN4zfn5JSenv+Tdh9cQIoyjrvc8mX9K3x/pEvj0k18yOfsp/9vP/p6H22/47qv/j9264e1mw6yeolXCaqWZTJfc3d6SpAlpIvDJgFY5+JJpWjCdLKjbLQ/VdzydfcLzJ3/D3/38CdZt+Jd//e+Ax/kO0wscGYmOdM+u8UjRYEykbQoRmM4yquqW/a7gk+eXlEWOMZ6uStltj/F3rx2YTa442PU4ikqZqFOKU42lG4dKmk+++Bn7rcfiSYuSQs3ZrHeoVEDwZFnGcr4gbTW7/ki1C9SHA4098OnVCdr3GF/TVpGIq/OUssiw3nEYOnSes1qeonRP227ZPLzH5BP+9NXvEPLbCFO7fcvs7HO0nkbq9v2Ov/rJL6mqP9LVBk3O2fXpn9U1fxHiKoSA9QbjAoPpsJWl7Spms5Kpn2CM4enFgsViyeHwwLFao3RHV7dMp5PYdeIdZZmQZRprBFqlzBYTvCsx9ohS4MPA0EqKScrgKoIYyIuE9WbD/vgQs15ecawq6rZBipwij5PCfujpuposy1AyQSuNc5q6qem6GiED3iWUxQlSGLqho6lbBDl906NVSpFlGKEx1jAMHfPpEplojsc91luM69GqIJEKnUDdbEfaGDjfsT/e0jR7klTjrOT9myN5Eb3CfddS1cdIQ0kVhyqS3nQiWS1OuL7OmE0mdK1nMZ/x5PIZfZ+wPz7Q9YKyKNACBDO0KkmTCc32wHyW0fdj2amS9H3LcnZCnpaAJMsHNApBO2bdNPcP7+LhVmcoWaDUlNnCMZlM8A68G7C2xXuHd4o8mzGbXxC8ZlcZqnqH1hnXT89w3o+2DwHCsD88cDhuQcR8SdM80BVLlsspXecRSnFytiCfXHK4qdlubtgfb8jwHNcZvRpIpOLFizOKYooSORKJdT37tmKxmFL7DD/EjUs+0ZjgIORkSUYxTah2LbVscdagVUAlnqefXHDc6Pj4vKIbBrrKILOAUI6s9Gz2O1Q/kOXR7tO1ffRY8/9T9147kmVplt631dEmXUZEysouNd01PQ3wZjDDeRG+KHlDELzicNjkdFexqysrK0VkKFemjj5nC15s88gckOjrZgCRATgMnm5mx4/t9f9rfWvGBclkNXo09J1ktsTn7VN8SAkMTOftiNYLlM6RqsfOlrqeybIeYwJNXeGdjrm1Kh4sJz/h/EyuM7Su8W4izZekcoudYPYD8zwyjANd3TF2M9fX1wh9hZcrgpX0p0e2289YLhcE4ehOHavlJTZ0jGPPNDsyo5lnh7Nxyqu0R4gEKc8WGRno6yGWNZNA0DF0feyipUoZJAojc4pkybLcIIUn0cSwqvMYbVhWS7LUElxgs73Eu8DYTxwONVUlAR3LHjvLmKbnSXfPfl9Hep5StE3NZCdGO+PDgFCgTZxkNaMjT6LN73kKH7cYARF0zEa68DG/4j+eTBXGGOY5IESkueEswzwRRGCaJ/w8UVYZAFmaRwvdudwTABGvjSQ14D3lImG1LFguM06nEa01P+V2oK576npksYx9V8+n5md59RGSAHAWikJ4mrqn/OwdsX5qorEtd7tvudi+YJrg3dsTzk2kesHQnZh0Q15kzINEFxY3W+YAiUjxTnI4HGJxb4BhnLBOUlULjDFQO5I0cGqfSBKNVIHlqohDnGpLlhYED113YlUt6MUOcsfVy4ShUayXS04HTVcHXq2W7B8nZt8zDD31aWT31FDOv0D4WO4bUefujH6PmSKlzwAGnnNBkYz3fHjnvA2Mr1ksWn6OBcXH+LPIehZj538RBM8Z2nDe/IRz7ugjNv6cL/PRRuK8iHxEKc/gl5+DJqJAUlpH8SbOwkTF7iotY0Fuon9unnwe0j2/35yLlAMjEsEUNytSoFSI+bGzxfG8FPtoB4x7OPjYv3W+dvx5wPCMfH/OH0aQhQARMAAhOVssz1tTnrNt59apacYKyyzgY/+WlATvSYzBTjNKGqw4FwT/Nx1dMXP2LGSOxwOr9TpCN7xDIaME/Hi9x1dHnmmKOoEEg0D/BFvhOdcVrZHBOaz32CnQ2jn2n6WGrMh43qIhIqjiY7k2xH4x4pBu97Rjv99xOh6oT6eIZQ8x1xkzqjNSZ1RrifctbV9zef2KYTjh5wnnJ/ppRPVLnLd4FysgktTADEFMKAFpVlJVJXmWc+o8wU0YpfH0ZGl0JyBn3NSQVlcE1eBnh1CgtKMsK4Z+YOgsebrk5vaSp3f3OCcwJmGdLvj8s5cYFDa0HA4JowzMoqEZZvyhR+mM7eWah92PWC/I85zVZkHdnpBSUy1yymyBteGc1U6BQHOaWby8YB7VeQvtmTpNtRbsHg8olZBnCYfDCWWi2yYQSJOUapkhp5JUKHLj+PDja/Yniyo8q8WaPFvjxgBeoRBkRrPc3vI0Dtixphstyglyk5PnCjvXeO9Ii5Kmb5itwugVaVoidE9eZqgUAhN26hn6R+p+jxKKtEjREuS04IsXVwxDT9MfcPOJwIJhjFurzfYCy4lTc0/T75ndRK4qPrn9jIe7HVIY8rKkKiuSDLI0w1mBVhlZGihXBj3mKGk47A7s7v4XLi9v0dqR5UtuPn3F3h0jaEUmSGEQWPrhiFQV0kgcNcMQN6+JzlHGYK1kUeVooSnzFS9ffMab9zVKLClSgUkFjW/QIsG5GoLDyJTZDnTNkWnsMVoTnMa6kVSvUHi87bDzjqpQGJECkiBTisowTzOrcsGyWiBpGYeBoFNgxvmO/b7GjTmL1RKTGpQULKo103ykKhakqSJQczycCN6eCayaQi9pHz31LmCFRCUJwcMYPEUa0FISZB7PM/2RIi/J1CWrckKmI/e7FilzyiznKs/51Zd/R7LY8HS45+u7v+f+3WvaQxO7N3HI1PMv/fnXIa4I5GWOTiTjaBmnGoFmtdgiguF07NmuN2idgT8xTx4fTjgL3sUAP1jKsozCR08URcn1zTX3H/oIgggz+HD+kEoZpxqPQ6eKu6d79sd3CFIkmuHwAYGgSJekiUJrwfE00vVHYEmWnj3xwuDsKXrorafreopcngskLXYOFIVmmkcSk5EmCd4qprllGGC1yDE6JYSWvu9x3mK0gCAZhpbRHZAqloYiHE37iHWWTEvmYeCH79+is0emAdq6JwRHkS0RBPyssU6jhODq6pLLywJChnczL24i5WYcHUk6M4wgSalKjxIVSi6QFDw+jhSFxyQ2fvAERwgx0DtOsevLpDVGOIaxxgeJkCVNc2S5TmNPSciYx4zNdkOW5ozDCDiUzEm0iwI2XZFnSxwNRSVxbkQIwe2LJaN15/4UiZCWtjswT4KyjDfssR/xYma9XXI8Tngn2VRr0nWJGgXDlNCMgTJNEDaK7mWVcXGR8bS3ZFlJYhSTlRwGiVElSmbYMGP9QKGhPXVIWZAmKd5LunYmTBMhOLI0QWlHsaiYGsM4WGbvEVrSNB250qS5xBSSsekIdsYQp+sBh0l0PKZImJ3EjgJrFVIlSKXwIR7ErZsQEpRQGFVEL7uC2XrmZsaHiWVqaLqJrjvR9T0vP1sx9YK+n5idI8iEJLcINVMWhk11yXF3pJ46ArH/R0rL7CZIcmRi0GEGOyNbyPNYwIwayYuCarHiWA/MNqKXfVDMw4SdZ6SIG5MkLZiZ8WFm9j1Cc94kJCBk/PrsSFODUQYlNanWrKstZVYi5IxWI6nacKgblJIsqoqAYJomsiTDuYCbA017Yk7H+EHsR5zv6BOJd45hmDm1HVJ4lCwYBsswW3o7MtmRXGaAYnaCcYbKxDzI+cj8s3/Pm4gQbUR2tjjrUFKhtCaEhMm6+J6eO4JiLgTsPIN3ZKnCWYHRCXmWYnSCtVO8N308tIIIHi0VZZGwXObsdwPGnMl5Z/DBMEycjgNZFuEY8UAaPmZBIPxMFChAxmyBqrHFG/J1hpYpzX3K09M7rq+vIWjqesaHI9vFF1i7i2WtCKZRk1eAUgiZIKXBO4WzFjvH0HBRZMzzFDfOSU6eS/JiZrd/jzGKosjJ8oKhddgppbcwjgN29ojEIKUizxWbdcq8Sri9XbDfQVNbvvxMsX/QBC1o2pnHhxPJ3UDVaBSG2U6cjj3jAH5I8VYRfOxWco54oA/PFMVYahvOr1kskI2kup+AFeEsrp5ths9whwBn8RQ7vs6Pfu6mUjHDw3n7I0QkHIrzIf75MO9dYJqfbWw//T/lmTQopcKYWKJsTIoyyUfxpc62tthhFclXgRA3nP45YxShIuGcYVIynLc2z2IunLNBijPU70z/O5cYPz9lIX/2HJ/FV4S6xOEXH5Hzz6RDfxZ8/tyP9RG8QcCdN2DBR6EafMyDeWfR2jDOw0/XMT97bX4G7KibmjTP8CGCYyTiuav44wbu+T173ihJKdHnXrznPqzg/Vk8P/8NeA/DbAmIOPgRUUSf38yYq0N8FKEhxCLm5lTzcH/H7umRrmsJPgqCWPIMUnisHdClIys00+Rx/UBZrFFipnMD42yZfE8YH/HenXvX4vWndQlhAO8IIZZgd82Rsa/Bz2S54nh4oO8UJkswRlOfaqpqwcwQn48yGJWx3VTcjfd0XUuewmq1Yf/wHi9mVAKZqri9NnTHjqmveRhmQjowq5a5b+k7g9I5v/ztNQ+HgAvx+kuSDHt6JMtKyqKkzLMIdMhSlJJ470lMhVE5Td8jdQSSBS+Z55HmNKBUilYVh8Oe1bokEG2/WuUkiWIyOanUGD3QdCfaznO5XbBYbkjTLW3zSHABfRZ0ZVqx6xyKjDjakCRZIDGOYYxZucVixeHwREChTYVOCpw4UVQ5aIm3Ez50DIPnuD9QLjcUWQ446j28ul3i7YidO/q+JRMZw9CDlCyqLV569sc39OMJIRSZXnF5dcXThxqdpCyqCqMM41gjxRlgA2TpgiQXmKxEBk2wPY93f+K4+5HtdkO1zLi9KrBPTyg1AxpnBUpZECNKlWgNUk20XeyA1CZutqXT3F7egs/w3jHajrvH93hnSFVBnghsYlHC0LZHBJoyW2G95HR6RCLR2kQH1+TRmWAcT9SnARcaqqogBB/tfClkCeRpyWazJUs0dtozDwGZFSAU1nsOxwOlFigZO/KEjOcD7zV5lmESj/MnnB1JtCZJFEZJnNX0+xp8SmJytAyMrsE5RbBDtGublBAEQ/+ADimZXnKxfIkz97RDh5KOPAms0oSb9TXBLBh0R6YUh91bjscjNkxYMZBVPxuu/H/8+Vchrqy1fP7FX+GD5f7hjvrU8YtPfwde0HcN03jgzdtvMOKarp+YRoWcPTfXLzgeJuw8kZcCLdf0zVms5QWb9S3/5T//z2wuBakpSVRFebUkNVdY945Tu6cZOooy49SAVB1SzXjfsy5X9IMjMQu0ypDKgZoIzDg3QfAEL9mslwi9iDfZIOnHPW48gE/I0jWr5QZvc+a5RpuBpFQ07YHUfM4wKLxTlOUF9x/uKRcpaVISvOb1Dx+4eZXGFvkkRQjFh7s3pHqFloJ6PPL67T8zycD3fzS8fLnk3/7dKz7/9Irf/+O3jF2DUCNGJRRbjZ0WeHpMZvns0xc87B4JXpIkBcVyQdPuuLxaUB8989ggxMjLmxv2w7cUixwfHPv9jrwK3D99i5sntArc3Gx4qu9xoYVgUM2GxWKNtSfsVIOfwKe8fPnyTI4KJEZxc/Ulu/07FouSefbsd0eyauD6Zss4zDhn0dmJVbYluIwsTTAmkOSGy/wrhNDY2TOZASNOZIuJsXH0NezfD9z96e/ZrDYUleIX68/47OIzMIKr7adY2/Pm3e+j1zb7MpbJzpamnilzQ3vsabuaydZ0EzztQL9aMs4P3N+/4ebiBUJPpFlBpktMkfHDm+9xdY8SEp1nvPz0koc3J+YZhA4keLbbFCEj5EQpR7YpaZuBNDHMs2boA4N7RCSBLCsJQdL2E0iFnSO6nETTThPdcBftsCEwO4sJKV6sObUNPlhUnvL4oaPtLJMDi6c+Hdmstkh5otIVZbLgGN5AeGRTVqSLFeFqSbG95f1DHalEbmBROMz1F0yT53A6orUjUDCMnnlWSFlQLRJSvaGfH1BBIsiAhIubikN74OGx5unxwFevPo9IeKMQErwVXGxypByQwp+7WpZcbi4Zho5p6EEGtlcVx9Mj1s+ROlVWvPrkBd988zVDH2mbRZ7i5g4XND6MOE7MU+ybyfMlRb7mWH/AWxE/XBKBRDP1kjTV9MOM1Jo0y9AmHjyj9SxakqIlSH60FIGlbVuc9bE0tljQdfJMIYtZIO/c2cI0I0IgTyTIEcIF4xCFyPXtFR/uPpAYg9bqTB10pInB24DRisUqOQMc5mhJEyp+ADNw2LVsNvFnVjoeioMnHuzh4+ExCi3HMLdUv/6RyY3k6S3bzQ1aLTjVDzg3kVcFN8WGx6cHRutYXVyijaVtO45HWK5XpCYFOrRUeB94cfuK1XJDWVZonXL/+J6uPzDNM0VxgVQt49QyjArvDddXVwxyT98PzJOn7zuurgseH+8Z25mgBDa05KXi9bfvebofGQeB4jXL1QXVwpAtFOU68Lu/+4quHqmWsXT9/bsHpqFCvL/BdQXj4Nntd3SNw06SaYxW9JjZsZHWJyM0RkqFluYjGCJuNTxK6fPrGQizADmfiXkCEcKzJP5JwPoJJ8+BZ/+TPH+mV4rzf4UCI/RHQRDOolxw3pY5Sz+NtN6ftzfn7yUkSiYxQ6QNWZphEo02Bp0Y0iJaVROV8pwXDN4xzwPTNMY+LesYbYRRwFnsCBER9PonfLxSCqN8FCdn4p7SOkIAIGYO7bmkOa5WzuLKgvdIoX+yQuqIPUY6hPQgQqwO8FHURfhPTl03SPP/Prz8nGBo7YS17myhVAgVB0TijGF8zhgHH3AiUioDAXUWXfKcFUvSOAjM8hyp5DmHpbHOfdyuCWFxNpy/dyz09g6UUUigb1seHx/54bvvmMYB7yxKiHgoDx4pJd7NjH3DOASKTx84tZJ+8EzWcPfmDbfbLY6Zfm6ppx2L/AVCgDGOLJ0IPqVaXhHsO467HXd3R9K6pD95kmokX6QkScIPP3zHw9PE8qJie7WgOTmEqtG5IJDipoxN+VvyysXcpn5gtA3jcEPrHWBIg6GfLPXuDf0+ZuHzreTu/Z+p3UxCCgK8gA/v92w3rxjHnubUYueRJBMECqbJkMi4LVmuMg6nHQB/+7vf8e7da4bxATkLBDmYlg8fDmy3L0iTnKaOwKjNtuJ4PDEMM1plPNwfaU6PZCZjWRa8+PUrkn1Lmi+QWYlJYZsvOH53T2JWpDrlsN/RPvXcXF5w/fKaxaLkLz/8V4IYUHJBkVbcbBZ8/U/fUG3XoFrqoWFsWpblNePgmUbL0FnGUEfxp03M/041//xP/8SHd1+RlxovLPUpLgzyYqYbe46nnpcvP+Np/Me4AJA5+ICbDGW2QorocqiPDe8e/4iSK5IkxYeR1GSIyZCVmvVqyeeXt7RfZby+e4frvmM+DFwLh04de5VSD5JxDOTFlsvLFWVRoZVE6p7D0xPWWi4yRVbGPtHPXv4Njw+PPDy94ccPf+JPf/496+ITpiHHqJTUXDCOM/fvd1xsXvDJq89ZX6Q0bYu3mqIsWSwq3t3t6OwHjm/foSi4vPiE9Srn4WGH0pBlKV07kimBlhPz1HPcN5TFDUHP+BkgpVosKb3DTwNOKIKa6dqGRfGC4KM1OstzvvxiwzRYpHAMfc2Hd28ok0u++uXnLFYbpJR89/YbRGIYDxAmT1JaNssl+zphaGuCcFxfbdkPCYaWqd+xGx5w4gL59T/Q2hrUxPWLin7q+POPP9C7JXO4IKn/f0ALTDPJ6x9eI40nKwOr8nP+/M8/cnWtyTLBotSk+YZxHClET5J6nI/ISyU0UiYokQCKU/s9N9e3JCbn++/esaxumMYn3OjwiafIJW/ffsfD/g1WWnSeUncPrFdXaCWAEetqtFE4HZiSmQA0w4myzAhWMk4jUlr+6he/492H77FuJADaGDbVAmNeMfY9fdew33/g9uUrxnFB33f0fUOefsH15RVPuzuauqOqMoKcWa1f4axhHC2ff3lDkveMQ6CpW6bJslxtMRvL7mmHcz2//d2WL75a8x///SfMQ4qzgT//5R9pa0k/NJRVwrK8YJo7mq5Bmvhhdjp52v5Aoi5xfsJ1I8d6R1Z8AeSYDFATh/k1kzmxP460TU/fHPj09pZfffpv2ZTXFEmFmx1/+su3eNFxavfcPX1AbE4gU5ROUZlCJ5bv7r9jmebYucXamb674ObqhrcfvmWcD+SVwI4XJMmGdnzieNoxzo7blxXfffsGKQPLZYpJLXlu6boTwQUSnTCJjFVVYq56xnRGhxO//mzD7ZRynS0o8px/OB7pxieeHp7IzZbry3/HV7+84JtvvmXsj4z2iNCeq/Urcj3x+PTEw1Ng99QgdUrXt2gjKKsNh3oiZ2Z7XbEsLng8tigtSLNLYMbRMYwn/uN//7f8cP/645rbB8nL20/RxjPblvr0SL4yvH/nUNJQFDkPd29YLFcgJc4FhvmEEpqxT8F7punA09ORz7+4JUlyUmNItUJ6xYe3J8ZJI7XncBq5f9Oz2JYok8ZsTGgQLJn7FR/eKtqn91zdXFEqT9ce6YKlKj8F+wlD9/fkacZycYNOBt68u2dyLfMokaPg7rFF6G9JjEGpDDcpqvXI8e0TCkFqcsZh5v/4L1+zui4wScL17RqhFUO7R04FxpRkSYXzPetNhVGSVKfc3t4ytJb6dGCaB0wi+eMf/8TnX3xGPww0bct+d8TOgtQsYql305BMls9fvWAYLN4blDHRriMlbRPb6hNTMQtB7yz4QOI9yo887EFq0HIkb2fQISJrxTPsAAQGISJsIXiPlIbNZs04Dngfp9fjCDrxcfgSPP5s0ZtGS55JqlVKkpo45W6O1LWnqWvW25z6FK1cQmqKYo21DcyexWpJXhb86Y8P58lvFEw2gNaSu7snNpcJi+W50DyccefnpjkrFQAAIABJREFUDc2z1UwIxTR6js2Jm188sblU3D2848cfDlxfv+Ti8gV123L/eI9zI3/9N78leINWOd5plFpRLX5kUaYkSYlzK47tid/86q/Zri6xU6A5dahzTcX+8IjXHVcvbzm2A2Wx5eb6U6pyxe//8H+xWlbs6po0zdhebLDzQG5GvvzNbxBK8cObbxh6z29//W+wn2uG3pGvBH/69g+8/xC316vVV/z5T9+Sl5637zuUSri5+QS/HBBXP5yhLRPH3/+f/J36HxDBMI2Wvpsp8oq67qjrjq6bmccSAZwOA9M4nKsyAt49b23ixjJAtKYEzhtMQFjAR3vgGYKCl8+uwJ9Z6QL+fG3IMwpc+Z+6v+CM0n8WEVIilUAL83Gf9owzh0hKm6fAPIK1Z5FEzIRJqWLG7Zx3SpKELI8lymmSIbWBM5mQM/beOR9x49bhrGeeZrq5x9tzYbB8hjdEbPrHfiutkVJgzhZGpSVSFvG1kOHsfAh46/Biihjrs13wmXwopcQHSLMMpCdmHWOmjvPrHs7iThtNXdfk5ZIQBHZ2sU9KyfNy6Zy/U+pMCxUf81TPFMTn7Zt1nmkeGQ9HtNbRIq8VQkrcM+WTSHz82FsmJSLA8XDi/sMdh92Ouq4heJIkQSlFCOJjZUD82RXKFKxWL/j62567xzvSMuPmxTXt7lve1jUXr2758vZ3fGV/zT/+/n/HzhPCJrhOcX/6ltdv77m6LGJp9LZCUPHly5yum+h6y/HYcnXxOddXI3W3Z/f+Nc3xwPvdxLJ8SVks2C4XDO3AYdfhvMGYhH3zzzzubrjc/DWJUiQi4PuR29tfU/5iyeBHHus7vsz/hvrYRRGfKlCWD/d3jC5HkJCmC377bz7h+9cS5yBLHculYB7P9ttgGfqJ7/7yDjsayiKST/2kuHpxeb5/Ovb1G77/y4/8p//wH9g9Hbm/f6Tr4kbo1eozLq4rVBrop4Y//OEHvvj8EzIj0H6EceLicsE31qHLksVqTeodzWnHZWVYlRqTSVZXl4S5Zl2uudlu+eWnS3h6y2QUD01NPzm2qwtef/0928sXlFXC5WXJ+mrNN9+8pxuPjBNo4/jlv/uM09OOehwI3lOuStqxBjOet26e929/IMkMftKgDOWy4N37H7m63JDnhuACb39s0Trl1e2vCXiO9Tucn3g6tkz7QHj7I/8s/wHckbqvKbRjncFUCt50F6w3S1bLjGn2zKMkUVvs7BmHiaGL7gYt1yyKC1aLgr6/4+37HWVpyEXCNJ749Ze/ARl4fHykHgp+99d/zX53ZB40Hs/j4R4fvuBv/+1/x+PjE0Nv8bPhs9tbpiHh/dMTWbHk3//tf+Kb7/83NsWS06mmbRt++5sv+fbNdzw+3eHnGTtanh5aDmNPmSZUpaFYSuyhp2k/kGSaqtRcLCu6ruVwaEDCYpnRHQcuL1YURUmhL5DXju2nn/Hj3Wvujh8wumBiRNY122KBTjMGLfj+x79wffMJi40iiImmfYdhy2dXOZIZ4Szv72Zev32NySaUcYyD4pMvP+GXv3aMowOvUfO/LJ/+VYgrUCQpsX1aV8hUkhhI9ZJMS0QSV4Bp4kmTBUpekKQ5XTuBr9FGkpqEvrMsq1vSZMM8z9T110hdEuyKrutpjntmO+CxSJliyNBW0HRP/NWvPkeQUjcn3t99YNfMMY9V7/FzYGGiJUqKnGpRUJUlbT/EHp8Azs1Mc82LT9d0reBwaOiHI8tlxh/+8Ge2mxuEVIyTRieep/2BtuuwfkAOM1aMPD1lZ9ymQogT/9P/+L/yy1/9gqpaYB00zXy2bJQxWyI89dEzJQfSZI1OS+pGIJNAudiQJoJ5CnSNwcn3qCRFmRJYwDDg6WNGxs9kiwQnIiGq73qa9shkZ8q1RMos+uEN3N83fLa5ZOpXNLuJU/2GrEz58zdveXf3llP7yMWUslyuUOYcENaGru6ReUFZVJSLTZxiTwOIPvZQdIJ1tWC2NVkuEaqi7QoeHu/pxgPBx26fapHw/v07svwc0HY9WuQ0uwdW6w2fXLwgKa+pR4febtifTvzw5j1P9kC1LkmWmuB67p7ecrJTpDyaDOMFFs04xZLLNC1ZVBf0Tx6hHHd3J7TRrDdLhmlmOs1kSUvfBV6/fc2nX1yBqOLU2Vgulimnp4Zq5VlsCxbJLevFgrprYxmgHQlJT5WsSXYNQ99Tt4FcXzD2Fu8HEIpELWH2bBcbTCawvucoBuZe4+cJUodKFFM7sFmloCrGaWR/3McPHyeQPj2TuQLN+IZF9oLm1PLD6we+mK5YV0ts8NR9y337AxdTh5ATp3Zg1xxZX3hQFuUrumbidNwxDj1pURC8AiGRRtN1M/gKrRVGaazqWS4ThJf07cA0ndjcVmxWP/VA+alHyYmXL24ZupHm2NF3MQ+FHEjSQFEkBBzDdIyFwWEAOaKNR89xOBNQJCYS1LztWSyXfPHlK97fv0XKv5AVkBaW+jSAlHTNQGpSMmMwAryr0VpT5gUX5QXlVIDvok6RIeYseMZlg0QRnEImERpgiRZN5yXSP9vznnuG4lQ+SSQ68dw/PHJ98RmLxRpjNGlSMM8dsxsJwSJCiAdildJPHSqRpFnBy5eXvHnz8NHeRIhY9mgN7M+2IcU8+fO2Ap5P7HaOGHRZjlz98i1pusGPBmdrbGixbuJ683cc6g8osWd2Ne/f31NWK+rTA0qkXF7ecPtywe7pjqooyfIcgWQYZt63d7EwFvjxm/+b7cUN6gwLGPoOO1kEgdPxSN86ynxFlmm0chjjkdIzjSVZsqBvIfiRXGy5vL0mTZZ477B9z4+vd2xWK9SmIATJPHly8wV5eoxh7aCZ+px3d9/HjGdwzNZS5p+irx7Z7d8ymppkq9h+8ivkH19iEsViEfthlssNTdMQHIgQqx2OhwMBhXWOYRiYpsDYWQQaH0TMEc0RH85HrLpCS/9RNMmzyBBne1ksg362avIxLwXn7YyUz0qK6MtzcSF0flRAIM6dVxHMIGMX4LN7TvhofxIj3k+4EaYR2gYEOkJlhALh0YlBKRPtPcYgtUZJRWoMRZ7Ga81zxlVH2+FsbaSQuVij4Pv+TOkTP7M0inO3pPoJMa8ilj2WKZ/hGiJ8/D0REopFhVSGn3D1nIcc5z9ny6GzFq1EJGGm0ZaG+Okli9vmOBw9U+PPG1wfs1RJSpqmpGlCkkjyIv2p+ypEUScIMVOjDF4ppnGkaxqGvqPvGg77J4Z+wFqHCDGH6azlWQmH89cCUFY5m+0SO1qK7AtMPuN1z0TH9dVXFPKCkKYMsyILAlHN8d4TIDWaq5c3/PEP7zgeHVlZkOULXJfQHqJwC2Fgf3piXW4o0pwiszjnKLMlP969gTBCUMzTxJvvH8kXmixdEkJJxhckUkGXELRnUgNje8QnGcfu7fnelpIXF2y3sbjZY0FNpKbgcrtkmGra7i27PTTNDmcFdpYoFI8fGq6vb8izF1QbQ5LA4+MbrJ2YOom1M1mXIjAYnbLdrJG/gONxzzhILrYXXF9f4fzM03DAlYD3dEOLMIYgFWjDYCdOh0f+6duvuby4pihy0IGEiov1iqf7txxOB5LlGmcCF8UNbbfjxzc79m9SVsWKD/ctp2HGKYFUjs9/8QVXV7dY2/G0e8fXf9qRZBVW9Dg7YafAwt3Q2ye8BSMKquWCOYwM80yWGZaLgv3TgTJ5yUSHszOH/Y5qVTH0Du8sdh65u//Al798Qdse6IeJ2XpWtzmH+cipPWJ9IKxWbDbXHFxP7TLmecngSgY90w49GoGUGc477h8eubzYUpZrjKqwvqU5xd9TZxV20LTHHVPnSVLFarWl6zuyvML7DO897VCjTYE0e0wa7YRf/+UPfPnVBfvTA+Pg0WJFBRTpBTeXGonkh+/+xDyN7B5PFHlJtdry9R//CZsJpn6kyFJevtzw5vtHvnr5C25uFuhk5se7H5Dpim7aMQ0joygpk4ym/RahKiClqWeUWJCVknGq6U8Np6bl4S9vub69RSgYbYcXE6k0BCGYnGWyllwa+kONdwXSCIbWoejYbjcI7xnbnuvrkS+//JRvv/2OYRioypz7d08kRuOdBefYVpt/UdX8qxBX8SbaMk0JdlZIAnkZcG5kHCTeC/KyZJo6BClaVlRlQVvvSdMChGUcLdooEhOxyuNosU5ip1hyKYWI3Ua9Ja0Eeb6MExPXo0SCnRQu2HhwdpEG5FxEKQcv0NrQtTNSWGJYf0QpjQsRDTvZiW5oOBxg7gfqpmW2I4tVzmzPOQzhma2jHxoEA10/ELCkmSCIEes7hHd44bH+CCjGaSC1OUrmdENEfatzXiXP16zWmnkeCHLEBcPxeELqOMBTRiOUYJgHllcLkqJgdoJjc8/IHqmX8dPMaYQwkQxoU4Z+ou8HpBTMXezwEF5RFitsP/Ht9z8g/DvwljQf0bPB+y7a9kzG/unI5fYFUgmQMTfi5pmQWYzJKIsN0xTpV1oZgpfUdcOyOuGDx1rHNEfAh/UDKA9orFO07UzfBS4uLqjKAjcBVtEObaTn2RnXNNy3A/WuJRkHQtcT8pirUsrhrKXvBxrrqQqDtxGNLREc6yeGwdMPjmGyKJ0wjgPTaJmtPduuAs5O3N/v0apmGAemwZGp6JOwNmCbjof7R1Q1kZcapUZCyM4WLUHw0R7lrERqYtmx9yQ64dT38aCAJliDxlKtNNZbhslS5AkhzEyjQwaFDoahH6guclQSDzP9mKAzCxjmKU52hZxIVMK6gryQ5IVid3jEBE3T97Rjj5eeIlOMoyEwo4VDKcF2uaKuJUM3gres1gmH44QiJ9USISxNO2F0zJCN04S1c+yksAFrZ/ACN3uEGAheIUkospyhnzEKrAIhHNPcYhKNSQXWemY3g4BpHhnG+NdayzgNeA8QMEZxdbllucrP6PUFVVVR1tU5vO5Q0qFkYLEowLtzFsRRFCkujPgwoUSKNgojBFLEStGPJ6X/5qYFzsVrIW6TRPR2w8efiXMGIZ75AlIFlA44a7AuQHlEZh6blFh7QvQFIjx3DSlciMXE4zyC1Ky3C17/+HAGDsBzAa9zjmGwZKMnSVLGYThjpZ/zIB4hJPMcX0uT9QRXMrlAmlRoBXXdUOYvGMceG3qSTKJkSddONPWRNMnR6haTVEytxFno2p5uaBinAYkh0RGR3g9POLciSUukVLRtg/Mp03yPcwElHEPnWa0MXgqcFYyDpywq/DSeH29J01gsPwwdXdvRtEfa7ki6SDCpZ5omxqlHyQ12jv1rggQvMopshRA25taUolheovKe0B5RZiSvSgbxgZ1/IlutKU2KdQMzJ8qlJzE5kpTd7sCLV45xGplsh7UjYv9XYA3OwTzHjrf2NJ95elFQeysiet+5+DvPWZ+H89tytpsp9RPi/+P26mPp7s8vu58R/n5+EXJ+rHjOfUXLXNy2xq3QcwYqBPB4CI5odY0Cws4ywjPkua/tmTCoY7eVUipCaZ63aefNlZICJRWc6YreS1wAeAZFRNCEn86DCaII1HI+f8/4HGLdlfgopoSImH2cf36GH6/h5+chkbFby4dY+ikEzp1tu88y9VmcBX4mXCOMxjqLtPKjCPT+p+2bkOKc0RJnO19gHmf67kTTnGjbjr7r6fuWru0Q51yeOGexPorCIH76+vm9NkohPEgd2F5cYkUbC0/J8FrggsONI01/woaKgMTagXHoEG5FoRcoUVKmG7aXW7p9Tf24A3m+X3jHbD1100VEvRMYo7jcvMSYWJA8diNudigp4vnGJaTimlwv6JoehEAnCTZZ4r3huD8AsFi9oGsaFtuUMklxduTd/SOgGLoeLzxGaZpjS55k9GGI+PV0iZA7pBBkyRWb1TXXNxWJ1uwedxgmnAs0hyN28NheoY2mzAR5uiQz7lxELpCqpLcth/p4/v0fEQKyvGCaLNM8E6TkeGrJ8+5c/l1SFGtMWtM1Pd46lPMIo3BWxdyjddh5ZnY5wxwrH3QSGOeRTC9RRqN0SlHmvHn/yOaiwIcJF+JnWdOc4iAuKBAaY1KaJg588mRBnpW4hSRPMsa+wVnIywVluWToRoZhYp4cQlmOxz3zbICEROfU9QEbIE0L0gA6RCu+8xllumJVXOJnxyJ3KC9jAbbMMTp2isIFaZqTZo43bx5I0hyl42BEugzljsx2QIQ4ZJnsiJwK8rTAOct+X1OVGUHE3/F5FthQs98bvI/3B0KgmVoIGp2mBOf58PCey5ttzHkbyexn2rGlqlaMbmYaJH2vWFVLysKQG4NSimW2opkDaVri3RxJqXaCEAE/nLfQuYZxnJj7kWmO/WT7zn7slxMh4Gy0XvezjXVMRUGzOzK2I2iNCBoxgdEQ7ARE+I3JBcIYlpsVpo/5eDdLVKbZLC/IdEpuzL+oa/6ViCuLtTVDr7HTTFVpqsowtEdGJ8mzkiJf03cz+AQjDUoIprknzzPGKWYfNpsNSuY0dYcPDpOt4tR0blivNmRZyX6/JwuGJMmYxxE7O7J0ycP9yBw6xvmEnQV5nqGERaUeSWTin449IfS0fbz5Xd9+ggNGOzHZCFbYP7UQRqZ5xgfPPHtW61uEHPB+ImAZhh5jkpjDENEOYccJshFLzTT1OCf48osvQU1YG9BJSj++QWsJPkHLnKracH2TcTi9w04zY99Stw8oI6mKBUIlBOlp5ycuF5+i0oK2PfDQfUuiRjJTIlWCnxXzaOiGHcH62GQtFEWa0ncDzsdSxLIqkOmSf/qvv2caexZVyq9+8ym7u540m7m5qai6iu9/PLGoLqNuCyNJ6jnpGmMUWmu0SnB6xFpQMofQ0bYdk3vAuw1DP9M0Dd1co4sIswjCIISmnxoEGZvVK7abDXNvGaYWV6ccdjX1u3ta/8DdVJPLicui5Ga5phKGUzchsdFSNcPQ9WhTMrQj4zCiUkk/tDR1xzAEZqtjWW4rkCisHXl6emS1LphGS33YIRCs1wW7x5qbixyCp+8bulPs+DKzYB4tIXtiTCa0KVFeo3yGtSv60aOUJMsUwium0eHqGYcl+Jm5m1kUjiyZ2B9buuZEWWmUmZk7yzQZjFA4C1oqpPSYRLNcrpDZwNQXzL3DDmOEoxRLtBTky4xUX/L2/fccT3vqoWHyE+Wi4ObigqcnyIsRmTiCE6yzNdNwINGWKtdUFzlPu3sIaTzchD6iVouceZwZ+gEIhDBhz9aYRKe4UeCIxbN5smC9vOQYHHaaEcGTJAqPRScapQXDZBm6CYFhnCf6oacfRkKApm2QJOdJcWC9XlEuDXmZYXTKOI9IGW+AIXrpyBLNxWZJqgPHuqHtBsoyxShFXQ+EeYhiRlnkM3JZPOeunrMznoBnHj2UMbfhfcxPIp7R3c92oIB3HiH9OfSvSfI1nauR+jt03pGoCq97svLXyBC3QdYFnB8J9HSup29aFqtV3CI8Z29CPEx6H5hGzzQEkiTBuTbmiESEb3jvkSpeI9MQMINGGIcQE1V5hRQFb97+SFU9cmzeEWTDMs1ZVL9g93QPPk7vjUxJtGa7XdE0NadmT9vXTHPLorpAJQkCS5IrXAhonQKKuj5i1DXj2KCkRSKoT4HNRYEdJc4GZjVzuVV080jTHZl9YFvkPBzesi4uaJoDTb1DKsHkA0yecRzoxxbpFb6fmK1DCo8pJJ988gsOx9cgDEZnKLEA0WMSyGRJmV1wONzzbvyal+vfkK2uceOe3b5ms8wx5ZpATtu/5/MvL9if7pDhSJJA/v5vWBUb2jaCiKzN2d/LWC0gPLOzTL2laS1dN2InEM5EMfBRIT13kPmPXWrPgidixDkTB3+SUyHEjYp41kXPVEPOVtAzeIPnxyl+Jtzkx6QXzyTAc2gmhEj4nP254PicLTxfwCAESufoc1eUNibmErWJtmCjzjCLhGdmCCHgHFjrzt2KPpZHn0kWUp5teSGch0sK7x0hBLQSP9efZ+pg7Px5xsVLKZnsjJ2neLgTAvv/MPcmPZJka3rec86xefAp5pyr6lZV32Y3edlkt9BaSEBrRy20IwUCWukf6A9IW620ESBtpRUFcCUtBBDkQiKBlsCh2ffWHWquyozMGH202c6khXlWFQmiqSUNSKRHeGS4p8OO2fm+732fdzSoMDxOAadjKgan4kcKgVQK9HsyIhir8f00kZv8a9P0ewrXnqbDWhvauqU63HM4bOnagWEwk6cM/yNp8YcX/YnH7gev3XGk5hzSeaRoubo4x+iRw+MDba9xQUsYhVg3sq3XWLvCEwAaa1rqzZJZdoEIA2bZKaerJ7TqmsP9DdakOBESRRnaevrqMIU8S4f1I5dnr/A0tM2erjowK5bkqaGpNbqfJg6RyOjkgSCMyNLZcc/R0jcDURxQFgm319dEs1NWixO8iWnqER+MrNcdWVqQZQuG3jObLfB+TRwllPkZi+WBOBWEKiENz3l2/indQSPHB/zsgPVbfvfFHcoV2C5EmABJwDx7Rph2bLY72m5kWc6JAsfXn7+jPjQ4a4nCqQipqzVaa5I4p8xyqmqHc1PxVZZP8UFKkOYEx4mlxdA1A45wCp8OJN/d9OSFJ0sEQnnG0eCdpWpaskxRljMCqRg6gwyntWeto6kfiIKpWTQ1ASRd3XNyekaWLEmilGhVYvQGPRqMDShnF0SxZBwC+naC+pSzjLfvvqcoTimLU6I45O7hmtnynOV8ReBhrFukjQjFgpPyjLPZkrs3dyzOlkdKaoISCXFsUYH9QYKcZJLtdsOLF6+IIoF3GuVCQg/SS7z2tHVPZ2uMCSmSHGTI4+OeKFoiA4UxAq09eaGmhlyREUcR3jvWTc0wWObZDCUlu82eyxefUK6mSWZdbQnThCxJ6eSGtu0Yh4EPLidP79C2KBlSxifUh3viaIbWLc51jLohUEuMA6EkcarI0pHD2uO1IVCefJmTCjXh40dNrzu0DuhsR9uPLMKI0+UKf30PxhBYi7SSmIBcxei+BakQMkHJkMd9T76cExWK7fqBJJ4xup7V7JRFOaPv1n9lXfMfRHElUNTbHCEEs1nG+dlThPP09VvC0HNxtWS/NQgPYSRRKqQ5xOy2B87OHWEkMCbm8fGe06VA6xbrRpCCKEo5WTwDF9AcWh4f39F0c9bbz/FqJFukPDn561y/+xqj+2nyoTWDKHj16kPCpMO6iqHteP7yJW/fbWi7mtE7yvFA09esN28JI8tHHz5DmaeM40jdrqm7PZvtwEcfw/4xBSspUkcgFgRRxnwxYu1A1/U83ldYo5DCooRgMXvO6fmequ4ZB03bvGMwPUo5nJWkqWMZn/D2+kBZzsBoGEcuTi7Z7DrKcg7Ccn17x/cPX3E9fIdGEESwWkkSuQJCRmPo+obqoGkqg5JryjzlZHmGFznevJ5Goc5zd//I5ekpH358gdUDSnpmi4h31/fEKibLc1bnGb//izPGQR59CxphFc8urrDiQNvesdttJiT8vMCaEEnM1cUVSVzwsO2xdkCFHmFCmnqg7zu80wRqZDYL2Dx0XN+84ebmlmrTQvJIrzyyA2UVaZQRPN7y+3/zZyxnGbYf+OKbbykXSwSGUDmSRHCoBXF6h/QWlGIcy6lDFc5hHDC2RTcN5WKG7nN2e8t6c89sXjAOkqH3U/jn2FHvGtI4J449Kuz44PkJLz5c8MtfvWF91xEuI/LSMs9CDm3P2GpW+TN23fWE5M4mWt52U5OEGTI+GsJNSyhjuubAMOyxrsFRUOZPkGoP1uKFo5zNiJKcbpgycLTvWEWX9Ls9ygnmacH56SUfffiMh4cHqn2FsYbnlx+i6ejXFWhDnjiM3LE8K9lXPdvNmnaTcMgrBteA9yRhxDj0/OJvrdg/jFS7Ldo54uJ4gxsN3kvm8wXa2SnP9girUEKQZydIZUnihDJPyNICZ8yRWDkjiCzffvc1bdtirUFKQZoW7OsO795jtaciNg4KrNMY0/L1119OsIV4SRwNhMGex4c91k5hxtIroOFweCAOI1bzJUXi2B/e0lYDbcVEJxQbhqyfNmpiClTEvQ9yDUBOXb/DvmV1MpvyQhD0XYOKwekJ0yyOxnfrRvJEEoXvN8HwtflHPAkUpQyo2i337wx/9Mc5QkQ0rabuNizPTzk8fsN+s8ccEj7N/z5RHP6QbySOciMpJU0zFZwT8VH8UIBNkw05kfikJs0Ui/mKww7KZU3dvUUPMSfnOTd3r1GJp+8d797tyMOGZ1cf0tQloQpY5CckWUzTN6BChEoRAXz51S2BOiVLFxR5wbm1PNxolE8II0s/HGidwfoEFRYoFZNkHV9/9Q1xWBLHU67WN9/ecnL2itFrDtWeutoTJj2L4gnIEhVIrp48oxsfeHjc0g8t3g+Y4ZHl/AqparRu2FZf8unv/Se07YSVVhRIaenGO6TssS6iqhoEEednpzhXs9+DUjnnTzL22+rYdY44OX3K+iFgcxgI05A0O6Fe/g4fKt7V39DLiicvnnN6tWBxeoJILC5skWgQM25v19jNkuzwMdvNDgjp6oGuHRkGS1W1mGGCSkx0PaYOvZw8PwAqOGZXCX5EvjNJwf6NHf0R9DA9N02AnD+ei/7fKNM4UjZ++OdCTbTL9zT/SdI2TYYE4F2LHj3j8L7YmfxnoH7Mh5IKFUxZeGEUEsURQRSQJOE0FVLyx5v+cbrjnWMYp/Bkf/Q19cOI8z+cwFN21w9FyvQ96yxSSOqqxno3NSuNxR4pkFNWVfADDdBZ94OsUUoIVDh9Lj/5vdYZnLZoI9gfNLqr2G63CCF5+uSKT39+gXMlbT3QNeOU87Pdc//Q0w/u6NGa1vdUcE2NjWnJCzwG61pMO/L82SSPE9rTmozrxy1PPzlHBRHCCV4+v+TXn31BIKfHf7v7lM8+uycpF+wPt9w91vTKcbnIefLsYw5VT2c70tQgfcUqiYjCAu8Fr7+7owscrz54zjjOMfobhDniA7njAAAgAElEQVRH949s1w94I3jy0SWPN9/z8mefIGOJ8ZrT5IJ/9fk/I58tmRcrFsU50cuB337+r6gennB68oS/+Ys/5LOv/pyua+l7SSiWfPyzl3z+9S/pW4/Xnl1UE0aTqmY9/jlvb/4Vb68/4V/8+Rv+7t/9zzk5s+yrb6nWEeVC4HyBdpZe73j3fc2rj08JA39c3xKkxtuBAIP3mm++eMO8fMZ8pQgDQbvt+fjZC8ZgZL1e83j/gJKn+FBAHDL6Ad3dT4WZT0miBO0dD81b7tuRj5clwiikiVieLICI3XrL7lEQxYpPPj7ni88fuXj6Ai8833/7Jc/Oz5gvBOOg6DtJXVUUWUJZhORZTBzmxFHA96/3BFGCcIKmrXnc1ZyfLZGDwpuEMr9iV+3I8hRtazZ3j1gjOStLIjVlQraD4DQ+4Y8+forRO/b1HY/jwGX+ghJF21RUzRapcsqipG0GHvw981PHYjEnSVJiJcENNN072sby8vlLnJfcrx9Jo4hsZjkcdvS9J01jdAeXF1fs9zvW6y1ChsSpZ+g7VDrd65t+zcPtHdEyZZYuiEXHF7/6kvlJQL0/UB9qPnz5KUkw0oYBehzoXU3dXfD8fInpoa9HmvrAbv2Wxk1E1CQM6TqmzDwpGHrD/tDA6Qyjc6Qf8MJgVcqTiwLrD9zcveN2veHVy4/44vVXGJFxIRRL/QqinFkGZBLCkDxZkoQF5I7OOIZBcBnnPFYV26YFL0jlBSfznHeb79jv3tDUAV37k+vZv+P4D6K4MnpkOZOEQYqSEab1lGXMYj4nCARpmrN9POA1xJkiih2P2xu8shgXUJQF5Vyx3r7l9c13ZPOnyCDjsNtwucz56rc37LY3DHqNmo+shjPGQaO9pekMu9svWJwoojikNYZ9NfInf/tD2lZj22FCoZqI8/lz3l3f0rdbRGdp8pCbm7cUpWaWF/Qb6O0b8rzkydNzwuCC7779Dl23ZMkkydDWs612nCxDcBZrPe64AVgWM4JAoEfLdrNjtowQoiUqNHmpub6tuShekIY53kju7h7JyhBRrYjVglkZQKQQ6o7DusH6EREanp8+o+9bNB4/RNh3JW82MF8anOnRQ0eA5Lw4ZTa7Igw0xtfU1lEUl4Shx5oeM7S0/QMgcGKSLg1jw/IiQo+KuulYbyqe+jOMtoRhhrGCu8fvuDx5Rlqe8/TZJavVJbr33N69ZrffokJFPlvx9bcPOA9BIUiCHGUyOGzYNTXGGazwVIcSJRXff/sVRZ7y4YcvCJJLvvz6LWESEYcZXTvRBYfe0yWCMMn44NWnJOETRt1xqB+5vbmmSDPE+IQ0j3BRx+391yyiD2EwhNLj4pHr6wZsO8mj2hZnBN9+vSWLkmNmzOSnGWPJvronGSBJFbN0xVfXbwkLRaFicJLnz55T5K/omq+pqu8ZB0EyKzH6QNP0bLsDdV+Tr1Lq2uF8yMnZgovzE3aPFbMsZp47jBgw9UCcZMggQBhF4FqeXJzQasO2OvC4aXD+wHJV0PYOKSOev/qQ+8MaGfZkBTRdwLubNYHqOFQdMhKERUQrBjZ39+ihJY4Ff+M/OiOM4LuvrrHGkeY5725aIjFjPhtQqmfX9Hjf8uL8OXm5REQh2/UDnQ3ZHirsaIhMSFYU2GEkSRMCH/DQbLHdwPOLC6zuGUbNbHlB3wr6Xh6ld5aqugeYcuaCFOdnBEFEozsGM6Ktpu001/X32P6WNMpZznKcs1jn2VQ1N9uIOAnpmzXlMqMbYLce2e63JGmPzArwAdZrAhUQSo1B4H60sjBFGzqEEiyWKy4u5tQHS9cahAqOG8If6WvgMaMhnEOQKLTc8XXzv7GuWvp+QVlkpLFEhJovvnrL6dklZZlRiphuv+bxzZq+c8TM2PfvUBK0ngABUk3+mSAQjP2A0RpxRGy/fw/AVOT5ARXELJcrPv74Cf/4H/8fhPHZhFJ3HZ//7jOCoCRKY4SIiAJD3d7y4tnPuL1/x25/y6aquDx/htEN1aGn63vCdODl8w9Jk5JxrHi9fUvXVzRDS3d3TxBMmTpGtyxXqynrrN/w7m3F+UUIVOyrlub2wGy2ZFZcsCpOsL3nN7/5JS+efsKt/B1ZnlEWKQ/r18hE8OTJE/a7LTc3r3n58or1+h1hkJLnGc5p/vyf/+9kWcowOpx74OryCaZ3FOkp3iuabuThseUP/+DvUPf31PUaP0AanKJjSVhM4b5tv6OYn+JUhnMDbtA8eVogRYHKQrq+QmIpFwmPjw9YN+BkRzveEsszkrwkOq3h9M95/geKk+wKIbd0esPjpsFSoN79gt29Y7ffEwYxys/YVwe6bsQaz9BqBj2Cn/Dq2mkk4TH4dsoIkky5fErKaeKKm3xVXh1rFAfvQ5P9e7nosXDy/vjcVCz5HyavP5moCUn4fhoqPFaKo/jQTfO2Y2ivHivG4f35r6b3dCQkIiRKxYShJIwDwjAgDCJUOEmCRBQBk6fx0Tmq+oC19uhKY/IR+uP/QUikMrTdjqra4hyoQE6SQqYJVRCEqGAKLUb+WAQiFVHcI1BH2qNAm55xGBnHKfNPDwYzdigZcHo25/KZoCwDovAKPe/ou5rq0PHhRx/wz//5l9zeTROJIAin6Z8/TtnE+6aMIghjwpkj+P1/zZfX37PUz1nMZpRXOedLzW/+9Zc4HRNHMeVCcTpbEShF3SmqoeT3/mDJY7XnxbNf0Iw1b9av+fX3BZ/83hPOZpKudazXAZ0VrJ6sSOMQbwznlz1Zarm5uWcYLV6uCOKWoWp5dXXGfL7i9OqMyvaMFlzvEMazNxW2syyyGdJrbt59ThoW/I1P/pBf/vIrvvjtNzz94BKDIwtXqDDAmTu+/vWWqxe/R29qPCNSeW7f3LM8/5gnT1+ShRHV5p7nfzjwevNLXm8Cur5H5Yp4WbLfHhDScnlakmcjnT4QJDCPA4ZhixwCFssUVcwobMHHyxMObiBROV5Jej9iCBFjSBmPCLvh26//nD/9k79DkzzS9zVd33Bo39F1IeFZTBiF5GPLs9ORJH5GkqaowDGYNUVygY5HqnrHrupYDc/5az8/J4ih7lpWZ6eQerQvEVJS5IKimLPd7ch4SuQUbjyw28fMZzFeTCRqMFyuTsmkIkwD0jAmTCz7w4xmB1Gcc3n6hFksyYuc/f5A12uyNKdtb7HjChVAlK749GdLLldP6Jt7Dtuau7s13egokgsWsxlpkiK8wJZrtndrqjAkjWPm83N2+9/wF78eScKC5TxFNhnVICEJSXOJsnD+JEZFnrpWJEHGi6sTfvPVL4mLBQbHrnpk7Dyf/PxjNg9b1m3DqxcfoUeIlWEWP4EzgZxb3r2+J1RXnJ1HJIVg7AX7asqHlKFk9fEJn739f4kSyziG6L7k7OIDjO1Yb9cMPYRqSTk/w5cWYSPsONIetmy0YX4RkUYrzpKc589e0Vd3HKoD1eO3/MXhkT/84CNGZ9DawTBSpp7dYcTsIqIwokgDYrHGCokzPbEUzOKYrmnwRGyaA8PYQ/tvWQX+reM/iOJKCEUUlSRxShAE6HHLu5sezzhNpR4q6s4wKxRpAZ4paHa+lPRDjTGOJE7Ji4Bqe8LT0w8wRvObt6/ZqYJ4nrJIzpDBgqtXJ4zrALm5oR1qjBvxfs/NdUWWzQmDjNX8gkDFCNEeM3QC0Iovvv5LZrNy2qB1NfiQy4tLhLAIJMM44tXI6Ab2dU8YpBTzFW3TkhWOOAxIfEEkZxijadoG60biOODi6mTqehmPkIo0mzTgUZozOsmgPauTM7KoQNcCM1jC0NPULSItkFmEU4bN+po0l/hQgQ1BGlQYUwQpj7sNdXdAqA6nMgbniZOMMj8HC/PyjHHYU3c1g64wVqMTx9A78Ip5ORW7w9AQhJYodCAd8/kVTVsThIbMZwgRkaQhxlg8ESerF1w9fcXQO+yQ0O4Mxk35SJaaphl53EVoJ9lVDW7doQJHMcsIRECWpFOWjJfsN4+Uecp8dsJidsqseMahPoCb8r70oGmqkUDB4/0Dh/2BKE6YJTNqe8toa7SZ6H5BEE6G2iFAqYgyX2GOmG6rJxP6yXJOksRT9lYQMlumtJUiUOD9OHk6gL4bqAOH8DlKFPz6l9e4tCENPHESoIYYRMahctSDRvsB5aBvS2QkSUVA4CGWC7R1FFFEFKecnS549uSSazMhn8MogjDi5uEdcZGSpgWJTDncvOH+YY9MIIwsy2XEYTcSSk8SBxA6vvj+S/b9mpdXGWkWsYwUf/PnZzy7EoheojtDPfTc1J7WJiRBSJEFzNKMEUOcZoyjx4mMvJSYfmQ0HQQDRekI/RwRSLQYsXakqls606GNBm/RfpLiGSNpek8YGRANqQqp6gGtx0k73RxIMkXdaMZu8jwORpMkCSotCMOEUY+sN1vyMiOO5oS+ZH/Y0mmQaCI/oG1Ita+n7BoGRt9g+4gsFzgbY0yFE3uePjuh2u+wqgNnkWJOU3cY68BNmy8hHJOX5D31CvIsY7GYUx8OWOsIVIhBo+T7xryfphBCkKURJr5hI77ESMvoOvaVom0HwhCePC25e3yHQ6D1Kd6NJHnEoENkGJLPc9bNv4DwCqED8AFKepx5v0F2OMcPBLr3srP3h/eemhvuunvSa4iSGClSuq6iaTck2SRFM+MwTf2xtMnA51/+FmcDhM95/f0d8/Kc1eISJQ8E7RaUYLt5YOgHtDHUTcP9wy1JknOyKggjT9PW7B4t8/IcQ83+cM/j45b58iXjWGOtJslSVidL3j6+RVHQtz1JFuLEASkvaJoBbRvy2Zz68Ig3PX1nwUc8rm/p+x0qU+ASzKjQZqSYLRFNS9N0E743V+x3Ld1osHJg9UHHb9/9Mx4eapzVPL8qWaSfQBsjVUgYphQq5dC8xTIigwQVzBj0iB72PKyvGXXN6eqc0Wq0P+AYj+dKjkcdDfoRKjQ8rO+QXiDp0abF+AqUwK6+REUd2VnDsrzCu5rkLsN02RRY2QusFRhjMM7imUzYbTuijZnuUeMUmuz0FHyNEHj0EcByRKjDUSIn32v3pgJL/lhsTcCHqYgRR1Km81O0sPOTv0mII37+GD/yXunocEc/1tFz5ae8th88X+L9hE4y6mmiOmWB/TjpmaZOknGcZMXyOKlz3k1oc/Fe0DgVmkJBUYZkWUhWCOqqx5uEJI0oZ1NIeVtrZDBlNQ6jRvmU3X2Ls5Pc17nJe2mNnciAzuKtRQYK7yXj4DnsNIuZpR1HtDY4K5nPVjg/RT3ww+f4033N+ynjj88ZrTlstpzMz3HDSL9riOyMtDjn2UWOdQaPxUnLKlsQqIB9XfPu/pFnT16iwiVBHBHaltAJohOJsy1xEmCiiG6Ai/MT9vuavVNIJGEEd/c74jQHFSBDRRh6VtEcrGAYe96+fUfvB2T0QJZkZEnK0NU8ufw5SSiJI0mURITBCl3f8vb15/z6y3tslrDMRvJThUqmoOVhP/LNm+8JM4vyHteBH3PGakeNxGY5PvNEg6A93DJqj7aOF0+WHHZrqt0ebzWmDRDRAqc6+mGPtjVBLJDqGLDrJDKYmnBPn3+KcSNaN8xnCpFodDPSDQPaKE5OVuz2N3gPRnuMsSiZkJYFWRYSx5I0vuLt9T16nBQGcRJwevqM198/cn55wWwe03Q36G5H12dkQUaa5lxeRHz33ffIRczpqiQOQ+5e13gZM5qGoRno9YGQBZenM+4f1lS7mvlygRkjegNVM2WQvfrgA6RvuH63JYxzzk7PMXVDtTOEUYwoBVVVEQRXNP0BIeRUEOQDdw/f0rcVoxk4WUxerZNVglUt9TgydgHNsJ+iWmSIkNB0A+VqRRBbAiAKAzAlUewhFYxuZN/smfclfjAc9lv2+z370pKVc8I0BW8Y2gY9elSQU5QOPRiaduDs7JJQ9HR1S1XXNA8Nm0pzee7xSrPZdtxc33J1uiLNUtIsZuwTFuUF/dATJYokjtjvb4EEXEQUSsJQst6+BS8p05wkjsiCOXffvUVsShKVMDsv0W1HHM6Jwxa8QSCpzchQD4z9tG67WHGoW0Q4o8wSbCDZPba83lkWswKVhAyuYbYo0HWIHDNsFDObZX9lXfPvLa6EEAnwfwPx8ef/off+vxVCfAD8A+AE+JfAf+W9H4UQMfC/An8LWAN/z3v/3V/1GlIq4niGR9ANPdbUtF2HkA5tHYNxdKMh0jmi8VgjqeoDl4uUpuoYrMWMljIQOBdgR40UlnyW0o+epAwJijlholisLnncH8iLAqSjawVWVHjjwECURJTlHO8EaRoyaosZDQSWQ7vnrLigTGKiCHAJcREzDAbda0bTIaVm0JZuHBAiIU1TdvUOEYUIkRGgSMKcQ7c5+rJG0jKgTEv6tpsuxOLYYRPBBC8YJP2gyYoUYwRtN2C1IytCqmoLRHhlUZ5Jqx2EQIYM5KRlH0Aq9d7mTBQ69DjgnEKIAKlSnNPMF0vu73cMWjNqT98dJhKbixF+2mxY5/Cuw0uD84a2E0SBmvTdwhEEyREn7dDGgJcs5heU8wVdu6Fre7R2tN0ew0RuMwYO+554FgGTCRzrMZ1EqYQ08EcKlOMgWqQUFMWSNCmpq4HDXmNNiPfm6N+bvG5pEjL20Dc94TxCqoBuOGBcS6A8WhvCsKXrLIEKyPI5VS2mDBYxBd8VeXKEWkxa3ySLSFUMjFgbAI44UChn6doRSYzCY4aGJBjJkhAvQrQJCIOUtpd4L1GRIi2iqbOThSQuwClP0Eg0FWk5mWeTJEbbhrSAOIqI4ph2EJSLnCANSOOYWKRUYvIRGjqcN4SBw9qJ1FRkCSr2fHtzgw3fB3haogCeP5uxmjecJzPUKLm53VF1jjoNgJgkjNFjSC8tQZLS6Z6mbihnKdvHAW001mvCEAKjGHTP0Gis97TdQDd2hOG0kbLa0Y+WfvAwjATKkUQSF8F632CtxrgRs3lkMB1gUGIKOfVhSqhS5rNzkixje7hnITxBmOJ9wDha9ACBTIkjNeHrW/OjtEgZhOrxzhBFJygm+Y0UjiJNUU5S5BXonGh4QcA0MfvB+C+YHovJ+yKAMJykTsaBdYJAKawdpw3lcXDkBQSBIE4UbbSn8d9jhEaFU7fcOo2TkqqDph0JDxHWG6wxLHyGYZI/Db5Fx2ui9Cl6kDjz3s/xU0/OEV7wbx3vwReH4Y67fU/6EEz0TQ1d19N2DXESYbSg7SqsNSgl0aPgzeY78uSUYTA8PK5p2hZBhrXjtCn1UB1a4tQAk7Sqqnd4B26ZTWAO47FWTOeRtjRNTxhB3bQMQ08cR5RZgXYd+/qA7g84DSoIGXVH1bQMY4dxPUEs6fo9ZmQKIE9Tdrt3xNEE9nDST3IoPFqPeMYjxMbhJPSDZrQOlUvKc8tN9QWVs0gf4WNwakLh++NUIwjB0UxI/wBQiq7raWpN2+5BaoIgoGkqvJpk21JJGAKEGhHSgBA4pxnHgbrb4+3k15FhRN+PON5ikhaVDPjc48yK+MSSDPHkqdOSNCkZqgjTB0Rxghlgu9syDFPWk+4tUiWMxyLXes84vg8n9kcfkjtW/e9peOIoL33v4TuuFX4Mx30vaPNHQMbx1/3ggRL+J3lgYqJUimNxJ7xHqgDnJkOSEEdpHg5rj02K9/CX90WTEEdi33HJ/RToId7fwY7vwU/SxNk84+w8I4wMWRFgx5g8SyjnIYd6QxRNZF0PaC3pDpq2qqf7lJA4b45r56fwGkukEqyFYbBs1z0vX83pmm7yhQWT1NHqyXDv3hebPxw/Pn5PDAzDYKLu9pY8ziDwSKcwLcSzlNNZQmcPdKamHT1hHP8QEu2cYLNuKJZLRjtgjCMSES4QGD0yCI82njBKJpqZFwgRIYSiae7oGjnJ0YRHG01ZFhRhTt+21G1Dfdghoxl5JAgjSRDDYd/x7NkvGIcDQSCZlUt643n7/QYvFGfnl7x89RFud4PRFi8tYZywOj3jq7ffEUsmcl2nCP0M2x7YW0NvC1YXS6zuGf1U3E45T3P82GLG9jhBlOQzhdMxo+4YTcugR2R8jiBGKIkRms5LsjLHmgSjA5yTbMYHtBsxziJEQJrM6YYt+BC8IFAxo09YLmdTk9hZFBHz4hSPRCBRIiIMMrR9JAhSwggsG+r9jrHzhHFGEoWEkUWqgbjYk8480iVsDy3zZYnzA9q09G1LEE/5gHGYEAQdzg8MfQ6hoalrrBPgI4q8JE1rkB7nBoaxpTeQByFSCbwcMdbQdA1KxHgXEEU9gz6w2x2IZcgyL7BjM0GfpMFawWHvcL5HOof3GmM9+0NNmE4TM2ElxlvKckFShBBaqv7Aej1wqDfTWvYCpSLquiYIE4QEoy3jOMWIDIMhijIkI9tqTV4uWc1jotQQaIXvFUUxkaRHPVFuVeA5NA94lUFY4mrBYnlBtW+QyhKEhkO1IUmmKbSUAVEU4JylbQ8o5wmLGVEZI4VFEpImGVmiqJpmuk9EOVKOaCRNPxIIhbHThNrpFOsd1rUoa1FjQFtVDMZifQYqRCUKEQq0mdQhcTjRYP+q4//P5GoA/sx7XwshQuCfCSH+T+C/Af4H7/0/EEL8z8B/DfxPx7+33vufCSH+S+C/B/7eX/UCUgnipGS73XD/8I4ii0izlGEYMGbAC81gd7y72x+R3IKu98zqAj1O+ltrRsYhY79r6Ju/ZHlW8rM/+Ijvf9siXAWxxwaSu7dbNo83lEtBYiOGyqOijhdPPpw8WknEfBFhjGF1tqDvDzTtHuFa0nBBb9fEMqSczdHdHBc9oO0UMKzHBqHNZH8yHcZtiEfLdjvgOWGIIBIBeTqZ8KV0qEAQxgqpIoIwwDuLdYZ+MMhQUaYzvJPs9geMiNlXLUPfImVASMGhPzDS0/iEQAWM48h6uyaOc/KkIApT2sYyDC0qkJwslszzki+2a4IkxrSgXYcxLYtPZ+yDiFHFRCLksLtmPr/gZHGJN5MGuNo3E7kJRysMdjSo4AZtGvAKJSEMDUHo8E6RxBlxkjLoln33gEAR6ITbmwdOzgNCOSeJcpJ0SxTC6UmE1xFOh4xdhhTDJEeRAqEEq2WAMZ4oiel1xevXX4KI0FYhpEVKTxLHbO8bPv34I0ZtuL97YBgsVxdLRl0xaov2I7pTJElM19UIEZLPnoDUjLbBY4iCiN54lAKQeCvASU5XM4QYEEfIgfAjQqR8eXONqQ5EgeTqvKQ2PVmwABlxEIY8TZE6pogTxrLk7Okp+3VHnE1yHpcEDCZgfmp48bOCOIm4vxn57Fe/4cnlBenJDCU7rr++4fTkI6xs8Waa7gQi5ec//4jvb95w99CgdUeQlQQio5gVpBk8phWr80uk3dKPDS6wrDcB//JX3/GLjy55ulxiRUJYNiShxegQYwMeNh5TGHwc0W133N3fcvXk99hXAb5XOCPwQtN0DePgwQV4pxitRQ+aVToHC5XuIU3QrqfvKqQ0iLJk0B2dnTZfHs1D88jYt+RxRrlICIRChjlGRzy5ekVezHBe8PKvr3jz5oH7+z111SFczHJWkkQpfTey31Q8fT5HyltCNUFBQDPLZygXEcmUkIL24Hjy9IpidoWoz9Dff0pZpuzWG/TkWT8WVRw3LXIKIsVRNQ1db7BuonLpyXKBlNPGzzlLmkqi2DKEA9iKTuxI8qsJohI6innM9e1birSk6h/o9Ja+92z7AI9F945D/cjF+ZL5PGc4aHpj8D+ANtxP93LH48fCy7kp4HW7W+MfK06fzKgPEVJ0NO1I3zuUynDO0XZrhJDk4QqjFdvdhge9RY+Gcex5XN/z+e/ekGWKogwQwtO1hjjJiGI10UFDR9sd2O9jjM7BJxSFQvchh4Oi61JevDrjzes3CBGSxAUqSPj29a/IZgt2uwrTCZbFBY1x7A7fokJPFCvEbiBQAtPDrExZncXc3H1Jll5itCMMDCenBfdbwc3tO+JYTJ4+p1g/alQYMC9T4nlMEBpefuwpFnK6FuSebuin4sgJxnFAiS1RHCKUA2Gw1IytpGkOxElMms4Jo4Tbu+9Ic0mUhkgh0ZuOMOkQAWgr6euOokxwUjOMPQrJPL+g3t9jxnGSmwrDff8G3+WU2T2qGBntwBhb8ucfwfcn+HFOnAU8fbai3FmMNpPcRzvK8pzDfkvd1DR9T7W39J1hGOzR6O6wzh+x5EfHkVeg9A/FlBQTPU8Q4rw5BuCGgDv6ndwRdS5/gLQIJI4J9MJ7UJ6bCjiH+skE9ZjnhUOJ46JSTNOrn5y3771R05dHYaJ8fz4fp2x4pAywxlIUGadnS5q25nJRToVHmBBHCU1XcXaRYoyZCGNpxO8+u8e66bOQaqoz3fG1xBTkBUwNTmcN4zDhpKP4JYf9A2mSkaYJVXUgiqMf4DJ4ifXuOIH76VqcSI5FnnC6KmmjjG3d8uz0GZFQjF2L9yOhyhnagX1XMSBoR4Nznvl8ydnJBd/89g2/v1qx2zeMw0igErYbQ3oS0bUtDsFqteLd9Td88skfsFieoceRf/p//QWL5CkBMIwdTVdxfrYkyy+xPCBsjTEHlosnrMoVaQ5e9DR9z+nZKbd3LaMRDCbndvNr/uJ3/4RnH13yZ5/8Z/zxf/yn/D//6J/w5fXnjE3FYpXz6V//Y/aHG0Y5SQKT0iG3OaM5cPBbgqDnnHPa/YGwnJGlMZHS7PeO5SygCaAbPaM3RPaecZyhkpBAJdzevCXOT5nnM6QY6HWLDSN6s+H05AQpSvb7gfUXO9I0IUgkgVNYnRGlO7q2IwlLynxJUxlWy5LD/pHqUGN0wMc/+2s8bK5RKiGIEuq6Y74s6Ht9pAgnbLc9fkyYzQUy1BwON1w+7zh/ukb5Bw6bnI6Sk3RGEEiED7AyIrAZboTzi2cEWc797g1uiIjDET321JXj3XVNEBgUlrZruGm3KDEi1Yq2HRFyihHYN+9o2knaF8eSfhyRouVxc8s8mvMkP/8kYT0AACAASURBVOPqNOXr24b5aYJQnt1hT5auCEzL2Df0neXxoUYlMWmYIr1AiZ4nT0tm8xVOjrCXhPKOzfaatHhKObtkNYvY3H6JdCPGG4wzGD8F3O+2G06XZ6ggZFt/Qf+t5Bd/7VOysiBKA6JuTp7GrB/esdm3OB3wN/7oQ3732S9phwqroNERZxdnRHFMV9c03cQjiKIAL6bcxihMKMoldf05zb5H2oDlLGSeF5ycnBybly1t3+OdpEhPCH3L1jQMreP8cgEYKmsJmHF6umDTPeKto+8kzmjOTgICoQlCRXl2yXbzwM3DniiPiRLJ4/burypr/v3FlZ+ucvXxy/D4xwN/Bvz94/f/F+C/Yyqu/ovjY4B/CPyPQgjh/10t1eMhpON++zlNUyGihl4vef7Ris2joGslQoW0zSOzRQhyktKFBr75bs1J+QyjLf2w5er5ClTDLL3iPHvKRfAKkfySw/1bTK4xsaBaOy5evmA9PBLkgo9OT3ly9Z/StjV12zKMA84NdO4rsvJjgljhREDf90ixIk9W5ElOlpRUrPnm7negHAKFVZ6x7lk/WGQYUs4zlvk5FY/oHlygScqpa/T5X3xGkgeUixTnoO5qHu8eEWIkCCa99vYwssxfoI3G656TxTlizMFt0NbQVpLV6Tnvbu45fL9mrAeSIOHpz3JCEdANktEIlmfP+Pb1b5iXJbP8hDBc8OqjJY/7rzg0O8woKYsZhgDjAhAJSRbz6llKYGe0jWYYHxHBFm234ArK/IyyWGBdz5s3b9Ha49wIbIkTyaxcUJRzgjDg+u5LHn5zTZaeksYLQgXImJvXjjg2xIlkUc55XO+I4gI9KIxWCAlBNk7dLBOACen76e79y1/9GiU9V5dz0rjk7n5PHCmKvKRIL7g8v8RbzXZzy2b/BiUXHKo5us+YFwuevyr59V9+TqDmEBqGceT6zQ1JnmH9yKBHjPYIlyAGR5Ek9J1ne/dAk3XMUkU5L5FhwO5xzcevzghPrwiDhFyWNIcGQUrXDQhnED5Cq4rvNp+hMcxnOcoJTs8U12824BxlHlHOQZsN9TZkDBNMW/Ds8kPmK4/1NX3XM18pdnfXLE7mZHlMeRrx0Z/8KXe3W9IwYLWYs68FXa85v5rTVBvWDzs+/vCKJ+cX7NYl/dgyiop/+pdf0vc51/cNgWgxvUcmEyTj5HTObC7Z7GvGbcfDuiIUkg+ff0C1n8AUZXbO0Lbc3b4jyQr0aBi0xTlPWCbUQ0c7eKRQ2CBi3dQYUxMXEUIEPFQHyvScofZ4DA6NA5Qv+PDlK+alwpkDox24vHhOUx94d7PBjYpf/+pbNts7vBiZLUPOTs/panB2xLuB5VLy8tUJQSAZakm7iQmDlOzKMTQDaZASzWMsFV01ontL6nrK3BIlE9JeiOmyJYXCuOnC/p675r2n6yzayONmzxMG6kgeUwgPfWeZzSVejDhvCVRIWV5w/7qhKGLC1DO6A9ksp9kN6METhTFOBGxu74mURbqQXBU8WYW4VcLhTjO0E6r/ff7WTydY4gesNbyvuoSQ9L2hax3WRYzecr+7JlCKIJ5RNZYwMvS9YOhHtrsHwuiWk+VTmmogCCRZkfPZb/8lP//kjyiLE6xxXF9/zmwxnzrkdUtVH8iSp+y3O25vdiyWhlcfXPHV726w9jWbTcvQC148e4IzO169eknXdXz2l99SzJbcbQ7gPOfLE/7g9z7hl7/9S5IMkllKGEc0h5EXVy8R/H+UvVmPZVl6nvesPY9njDkip6rMru6qbnaTLVKkIFMiYRmwYPjCsGEDvvCd/oRv/Rd8Q0gXggFDsGHYsglIgCHTFg20RILdreruGjMr55jPtOe91+SLHVldpMRWe99kZsSJcyIy9lpnfd/3vs/rEwYLkHO68iUvqs958PB9knwPbWv2DxZ0wyjTC5OED5/8Dj/6V/+M6TRGONDuCnSfkWS/x0I8o/Nv0U3PZrVlbzFl6GuK9RbHLzg+fcjV1Za2HfA9SPOMw6MUX8zBBvT1lunM54tnL4mClMVsSRj6VIVCDxrHaVF6hzH7zJN7GFOhZE9VCo6Xp5DOuVk9Z3P7Bfv7c85fvUbox0yyE9K5R8vAyd4eL/ovuLCveNVovhU/4qp6S6zfY2l+QJy4XFy/Ics8lvsJS5ujT8Y9WWsHY1zAZ729JfAj+r6nrCq6RjN0Hl0/jMCKQaC1hDunkxBjSOroszWj0kH4eP5Y1tt3QclC4+LdKWENCIPWXytjGWddYwEyNgPuJHPGchf5Ncp3jb2TBnp/mfj3ronw7n6++5xWLsVOsts2pBNL34ywC9U39E5H4DloORD6KVGUEIYRq5vn35BDOsAIC+FurVv7y9dxPVBqYL1ZcXVeEScB/SCp6h4vGNB9c+cDu0Pq/5UuxzuK4EjutLi+x/1HZ2xf/JhNO2MyyUjvCc5XBR8+PObF289ZXV8zOzykaK7QVlG0DZuu5v0PzqjLL/n5l19gpcPZ/IRJOuCQcnx6ijYDP/vZz/jB9/8z/sbf+AOS2LJaP+WrF8+4vbziaO+MZBJQDzd8/skv+MG3j5CDoGs76uoal4jrq0tm2ZQ8ibjanvMP/8l/y9HeHr6ZUN4E7K5vefT995BK8fTF57y+uOXbT36LhSqolYV4j2f1DScnD+nX5xQ3a9bXFTbXPDjcgy5F2QgnPuP0KMYNOggchB+yF+xhuysGXbKtSwrZEEZHWDqqoqNXPX6iuXcY8fa8RdseL1RUTcfe/oKmr9hVG9abW3a7HWf3HtD1NX3XkicB2DnJLEAbibYNHzz+Np99+nOKokDgs1ycjJJDzPg7vQtqN1pwfX1BGIZMJ3MePnR4/vw5q42PX1sa9YosPOf5V5bNpUb2Od/6zt8ENkTxPo6d4AQeTz95QbR8jLQtXVfTV5r5cYRnS6JEg6tJcoXLnMNlyq5Ysy3XHB2/hzItt6sblDZMZ3tcr5/jcMbR4TGLRcYXn37BbBZxfPiQZZyzSEN++tk5B48e4fs5SlmWE4MRisXCo90qVrcNWm7JZ6egCkI/YzY9RcSGH/3kp4SxwvEHBtuBjpFVQJqHzCYRt28ipnsxjapwrCEMQxy3pCgGJllCHMdk6RLHL+nMDcXNQLFp8aIl+6f7bIqSZOKRz13enr8lS5dIVaGlJM8THpzc51w/JYpc9r09wjDgzeuCbJIQxh4Izfq24OjwFOoKR1mop9zfS7koXrEuazrt8d7j96mud0xTizFzwhKe3Jvy5uqCSbbP4UFOELjUTYHTjzuLH4bE8xQFBKGh9654ev2a69eWhyfHeJFAG4Wjjv66kgb4NT1XQgiXUfr3GPjvgWfA1lqr7h7yBji9+/sp8HrcpKwSQuwYpYO3f93zS6l4ff6aNA3Z35vT1SFFUbLZljRNheNX7B1HrK5rXM8jDCPiyPDh9318M0V1AVqd8PDxFCNvuH/8EA+Xn//Fj5DDliSZk2QxcRJzf5ZReyX76RmtlmyHnk+e/RnWGuaLGXEe0HaSzNvndrNmaBVdq9HWJYoqZukZoRejBkk37Ai8JXgbEBKXmMl0j708o6wL6rbg+uWKSbjHNIsIQkunSl6ef8qD+/cQnguOxsietm4RIsB1DcLpsUazWOwRxR6+ChBNSN0VnD08Yr3d0TQ9LooXr875rQ+/y8HeGXmyT2DmbOstnz79GXW5I/YdpHQQzNntSnbFc6SAi9U502WC0j51A+fXLwnCPyVNFU5s6WSLHQxKDzRNi9GCSX7INIspqx1SFfRSEIUpUZiyuikxFuazKY6w+IHPtrjACs3+wZI4/E3efLVDeZBPDZPU5/l5RSkGgigkinOKoqHqbgj9FNcJaNuakzAj8kEKRdt0FOuCPMv4vR/+HYah4YtnP4HZinQiwGjqemC76vjORyes1zv8wLCcLZG9ZVd/RZaf4Hkur16+xTJweBJyc1NRNFuicMpkmtC0LVr3SD3Q1QOucAj8Dq1GPfwkOcSqkrbvMbqnsh2b3Y7Z2QGe46EbiVE97VDzpuzJ44zfeDDn6Sc/4aqSRHGGby2Xl1e8/94Jsyii7yuGfkuS7OP0J7RrS2W31LUmTY/QfYkXRATxhHlyjInXuIFDpwduqwu6VxvycMbiICDsMupXDY+XOb1a4bsSooBts2L75QWnRx8RhzlD1ZClC+aRwHqWwXT0FAgzpW8qbvWKqt7gBhNuriqEH9BLw8tVRSgs2axB2lErvrd/QpZMeCFXDKZB9R1DUZD6E4qhRAuFcF1MZVjkCet6oJMDcWxp2hK0g+dBGDrM8xzf9wkDB893wY+5fLXmgyd7TDKPfq6QcuDHP73k8HjG8fGSyXTGV1+uePBkOkpUzJhHN+gbLIbZfM7h0T5D5+GEDqFORoy06zDbm3B1NYJaMHb0udgOz3VxnBE6A+7dofEuy8gwTpp1yHhYfEci8zFW4rrindqJOB2DuIc2ppIJ4URw//6C68tLdrsGJ/BI0xRHtLjCQ/aKi+sb4mmKwpJFMX6c8/bNjn1jRvS156KMuCv+vuYCfr2n/lu9rDs5Yzf0rDa3SBUwnecjZrquqMuKOBW43hzDjqpekYsJfW/ppUT2A8PgMUifTz//Ocv5CXGUsyu3bHa3DNIQBD5JNnoLHbfkvfefMJullOUViIFBXbPcn+G5E7bbaw4P9tjbT7HEJNkoVby4fMmDszOOlkd4IubkJGUQhiTPiZKcPBijDcIwwxUjme7DD79FI11Oj58Q+Blv3j7HehscZ6R3rdYdn3z5LynKFU0bIJyxAEizKRNnzlR8F39YcbN5zfF+RNvUaKsIUoG0LZ9+/nOEDUijGZMkp5Mlb16+YpE/YTY5IohCwviY/aWi7yRDD6dH91gVtxjpI0xP4GuaWpGkCs9LCdwUx1qKrYVu4GjxkMf37/Hy9SX58oqyeouqepzkFBFYvvryGcVwjeNK/CChbTvyfA7Jhp36EzxcvGmAdBRRmrM3P+b65pyuWWPWjwjlEw7274GAQZf4MeTzhNDLcX07etUkdG1LXTX0nWKQmn7QdI3GqDGjUWmD1D3yG7JUIbjzWQUIR9/9/zr4/h0a/k5O+07yLfAw7+5ZIb4uqMRd0PlfbQzwDbT6u9fj6/te0HeapjZMZgHGM+hh1DoK16HvW4IgYVAdzbalKns2q2HMPxJgrOXrwGDxy8LNdUcVidIapTTGwPOvLnj0OEMID3DJshlVsxlX3p2G8V0hxTd+PrB4nofViqapUGXB0dlHuF5GU99y+eIZR3vf5mp1hfVhtj9nMZ+xWu0wemw6DbLgejcqPz749m+iO4m8Kfnek9/h9fU12/WatuswMub1xc/55H/8M5689x0++vA3+Pv/yX/JP/3f/iE3qzVJH5FOfTwv4fL6ksCTpMmEIHyP2/UrfM7ACHo8kvkeJ2fHTOMcjKVc7rha3vLwex9RVWPgu2sCvKjnwf1TNrc37G5e0tycc/TRD9Fxzuyw5tT0BNmEujRErsANXHzZcPBwwecvP6NY1xjlsfJ2RBNBoy35ZMHJ9CG93aCrEIYWH0MwPSGKPJLE0MuxCE7iCYE35Wa14+qyYr0ZePytxyilsMbB9xMcx6GtFbUsGfoecFlOA3742z/k7cUrrm9u2VVXnN/G9KphOomIE5+qGSfl+4cjYfT1m2v2D2bM5zPqqkMqj+XhEwJvyfV1gS80QeJyeXOOkoYy8xgaSbHdcXRyysvXnxNnGV4QsNw7pGl2xNMY/IBqd8sXz37BBw+/TS97rGOJ0oB6V5DPYvb39hgGTdcKZvk9tLIoVbFbgx4CTo/3qIae3fWKP3v6GU6UcLPdgCoJRMQkneKlHUPjEkQhR2cRzs5jNp/h4qM6h75tkcOEdCqRukYbS5bOMVLiuz5yKCiLFbO5Q9vu8KME1zoMQ83B7DFy9oauvkUOKfdOj9HC4cWLV4ROSJ7O8ROPZ09foTtNHKVM5jnVi47lnk9ZQ9VIbm4aZpNbgljQdorNWrFYLji9t6AfxpxLKTVh4hLnGmV9dO0x1ILvfXDGg7Ll1UvFs5dbXv30E/J5xNs7WexyOsV1Z0z3Eg72Yjyn45NPPuXo/b9JPJ9ydXHFzc2a40lO7i9puoqy3lL0WwI/J05Cut7StBD40V9X0gC/ZnFlx9bMD4QQM+B/Bb7963zdr7qEEP8A+AcAk1lEnHiEkYdwXfLJGJobhz4gsG6LkpowcvC9hMBP8HyFtQODbHBdhyTMqAqJ5yuuNxdYBZ0pSSYBiAm7WlK0DYcHIeerktP9JQpB25VoGjxnhpJjKKjAxeqIYr3G6NHgi2voupbb2xVJlBD7Y5BmGu0zmLHTEYaH7Gf3WW1eM8gx9yONcxwWWGocF6JkAmLMe3B9gaan7QYG2YHo8XyL5zvU1ajtHFQH1icKfRwnHc39raQuazxhSaMZgbtESp+66fEiwa6o2RUlXVeBCBGdpukHuqZF6Q4tBAIX340Q1sURkrrouLq4Jc0MvgfCeAyd4eGDBwRhjJIW7kJSfS/DaCiLlp0e2G1r+l5iNOxEi9caXC/CWIG2cP56jbCG4c7LNagez3Fp2hqEg5SWvrdU9Sg/cLRFC0nbdGxX4g5FbAh9wb2TQ/7W7/4+33r0Q756/povPntF37X4QYgxGqwmiAzbTQHWIwozHCekq3vapgNawlDjuBrhhGw2DUJ4TKZTrHXQxtL3HVYrIs/DTUJ6OeAnLmIw6GoMYZzNUhrVUrY1SksU0DcSERh837B/lNBc1uSJIE9CHN+hM4L9ScLZwmN/6qIf7XPZejBNMJWmqjX94DIog+olRkg0mnX5lqkX4FkfTzm4gSL251hforSi6Fo6s2NQA6mZYbEkGahO0TYVjgiI/QR8l011zsX6AkcItKzGwOHGjiF/QNdphrYgCMHqANk4mKaklQOx5+E4Dq4XkCcp2QTKVtG2PUVXIBwfZUYQjbGKoemJg2z0/dnRJB64IZMsQ9kGGgNKjV1jY/GtT+rF7E9nWCHBaNTgEwQ5y+URnq9pG8kgNa7jcXiYcXw6v8t2MuzthUwnCV3XMEiJ1hI/GdHLruMSRxGzSUyxuWFvOiGOcjwvAKcgisHiEPQBngwQYvjai/HOm+J8w7AuhEBKRV2P2nXLmIn1td/EcRHWgjWkacCgBvpB03QS5WnwnJFCKFwc4eGHI5xC3U1MIz8icEdwhdSKbb1mV0F8+hyRzXBrD9mOUBv7K6XfvwRbjHRrC0JjUZTVDqU0So4FW1VVY6C2tggR0Q0FTRuj1Bi8KlWHcByur3a0tSFNMwZZjr4x66GsxLgDvueQ5BHCvQsgdz3ixCMKU9JkD9+L2RZvmUwmKN3heT7L5RKtBK1aEaYWSU1bd0jTk05GQqjqBwIf6l2N5yb0okLpniCyaDFFKYvvCfb37nO9WWMZVQ5D31NVO+bLOUM3oI0ccfpWUtY7PFfQDy6qj9lbnnEh3yJth0HjOnPy6Yjz9l1/DMk1IbG/x/5inySJeXP5hunePknqAwojDV1tiWOXAQ3WxfOnWNuzq14SBRlJlBFHMcXtDsyWyksxKsXaiPliyVpvEGKHIqWpV1SVxTg1nidxHItVliBKEX6LtSVCOcT5HlJq8Aa02+MngiSI6GyJla/RM4f9mebFm69wfEM+SZHdBi06fDdFaItoSyaHDkb70CzRzYy2a0FrtHZGIIDW7DYNghCt79aIdZG9xloXpdVYRDkOxjhYo0GM63Ccqr4LPr7ze32jD+CKkc5prP46CPuOh3FXxNk7z9cYWA36l2Q+xxmLIWNwLFgh8P2EIAiwWJRSdJ1CaUPojRO3UZ34TlZ71xC5g2q8K+Qc18VxBbtthTYJQnQoZegHDyXH8OJfFlXjmrvbJe6+/zFo3PV8hOuihSZxEqTqaKqC7bphPq1pVUsnKxzhEURzgnhAOA6B45H4Eb0zsNoWePMpWZwx3U8oyhYlO1pZ0ktJlIUI0dKrc95eW9xQ8P6j7/GD7/1dXr99NsrB1TjlX69eEAUuYejiuBapAma5h7IrtuVrFBY5nNEJReAL4syhvb3l6cuviP0Znutjafnks5+R5xmHScj7j/a4vXDZy2dc1CWbrqaqB2JlMb3AcQSBGumQ2hlwvYgwMqA0fbMhC4+ZL5f0bUdTVBC4HO4tqLYlRbWlqxvW7BDGkoYBng+N0OyKEql6fA+SIADr07Xje4AABtviewFDX6Pugr1X6wuKcryfwzCg7TqabgtIqnpH23V0g2Q23cf1JXVd0w8VUibk+RIhOlzHJ/Fn1KVDU/QMskV4ljRMWc4WyEECkCY5SRyjbUddNVinH31NNFRNiB9m7O0HYHqq4hYtAozjgRPgGEtb98RJjh+7WNNQtQ1hkKGlppUNi2U4etzxkMZSyIbl3j5+5tKX0Heand5ysjjFwaCGBjlIwniKRRIEMZ5jsTQ05RbfNcTRHq7jY3RHXWqm85zpxMWl5fz5G/ASkjQA1wWticKM2D1EtwUYiPyUAY3vBRgFQy+JJy79tiXxE+IgwhM+rpiQpAGtbBGDJfKTMYhYxsRRgOd6BEHEfM9ntWkou5qiqvACix8sCAOwjqHTBaRHhHJC6m+YBOBmKfP9iN7GxOmC44Mj5oHD1JuQhJKuucFzfbAjlTTwXaLIx/csQgkc7SCUg5U+aTwbYTxmYLB6zJz8Fdf/L1qgtXYrhPgT4PeAmRDCu5tenQFv7x72FrgHvBFji2fKCLb4q8/1R8AfAZzcn9nDwz0GOVA3DfvzlNifIRMfnB5pfYZej0Z1R9yFcbpUhcITLXEQgNDc3tQEvmFXrjHWYbqImOY5XR2yqW9phxJ/qtgUJflsjtaWvlNEqUPohQy9Zug1nuvgOg5Dy91hctw0u6Glqi6RaYo3nSAHQxBHWDVF4DCJTphPTtmVb0iSEQM7mcwZ2phe9oRRQJbNMXZgaHvc0MVz7N0bjMFxFK7n4rkejgB9d0D1HJ8kzPBsRtca2naUI/miIcvmlJXkZnWBVAPzbMP51WtuNxco09PpiMQE1G1HXfUM/YBwXZI8QegYYSwuBmEDNquOuhjwPTEmdQ+GszMX1w2wSOq6xQsVWoVYY1FKUhY1dVPfdfhAFh3WUSBCgiDEGEFdl7iOxvV8jLGoSiNbi7EdrhONyeRtTa9aBAHSGEACmqYe8EMH3xt//8cnZ/zB7/8BR8vHYCKm2QnaeYsgwOrRO5DmLuv1jvnshDB0cNwWMBQFVFWLMpLZNMHaiOvLhnwajTKetqaqe6wxuI7A9zx8P0EPBjdyRwS3cAldlziOKHY1Td3iBxY38NCyRwkH/04yGK0jfBeiMGLQ4PoR96aCjw7hwb5FhRn/7BeGdDZD4rCteupOYoXC9BLDSKLaVTc4/gFBmOJ7PsIUJNEJyhoGDb2CIBEUXUnbe4S+T5q47CqD7DShD57nI7wQRMD59QVYSDyX2TxgVw847pjpJAcHL7KI0CJ1hG59tG1GfYwzrgff8UinEZ5v8GVEaxVFVSKtoBsKpFYjFUxDPwy4sYuHh1SSwIsQrk8c+xjr09SWIBghH4HrE7ohkzRDi5FWqWRIFOYsFyHd0FCUNW1riKOco6MZe8s9yqKiKnYcHKRIpcEMCCtxEEyyBCEEoe8zzSLSLOD5i5bwELJ8BFIUhSHLclw3wG0ynM6783993V/nHfcLnFFOZKHp+rFrpu3dZMjcHRydOz/eiL/2/YDKbOjZIfWAanuMa7DCwfNDHFfgBxqlDJ3UaBym0wzhCIz10XQ0Q8V2rTg4+wI3+i5eOMM0apQdvTPh34XIfrMIfAezEELgk+CRAApHGLbb1d3jx1DyvtcMfYu1LgifflhRt1uETTEapGwJw2QECpUVXd+C2+E4YwPEao3ueqLQYz7LGVRP02k8zyGILEk8J44muJ5DpARZ7iHVSIWLwoTQj5gvchQNu6bDyhB9d8gW5u6Q6jfIvoBsH2lKqqYhihKs8WnbBqxLEi1xRYYyFdZKHOFgjWC5XFBVBW1f08t2JGfh4nvBCNERMcKJcTwPq0bvq8uM/cMJXVsje8UgexwCFrNTlos5jrC0TUuuwQ8gti5m8DDK4MYaP5RgfVwStN+y2l2goxmBLwiiFEmB7yiaVtH1AiMgCSKS1MWYDmku2VUXKL0g8AyeZ5CqY5pP6foxy8oPxsmIH4RopZDSUpYdjhsSOIIm2tE5BU2omC8zRDvGmJgsp6XDOCWuE2CVYbANoR+QxseE3Qxah6aXCGpcZ4rvRmAsb19X+AHU7YAxAt+L0D30PZRVQ9e1+H5AX+RYY74Ga7yjbX5dT1nG+9bcTY4EdxOlu6bm3fobC53xz29gLUCMeYBKKhARSmm+Cb/wvATHFQih6YVm6O3dBPqOaChGB9e4kN5JaEcfuP1aojiu6bruUNLiBRqEpO/H4HSl9Nfr7JfNjG/4riwjIt8b9w6lNXawDKpE9S1WOSOYKuBu6h1grEcUj0AL33WJfJ9GWHrb0cuBSZiSpgE3mxuMbelVQ6cleAm+Z8knPs1wy5fPf0rXwpNHj1kXFzSrHWWhmc8XbIpX9NpghjHOwfNnOIGhkyt27Q2Rn9GbA4Sy2FAQBAo1SC7evGU5h+lkikvHm1cvuX//PZIs53SaIvucNAjp1hW3xS3rXc8smRI7DlZbZB+QZSHrokcpl8CLcEPJpt4RhTHRJKQqCsr1BtPB/F6Oo6EbenrVs94WpH5OGAREnoP2Si6uz1F6gyski0nCMCjqosAaxrXheMRxRDvqudFas9ldMfQDWZ7jBz7zWQZopGrp3+2Djs8k38MMGmssrjfSFefzAxABVoNjA1SXI2yGK1xc1yX29plPD7i6PMcRkE0TEGP4dlFVKCOJM4GRDdKMOY55ltNVt7TtDhFNUcaOjUQFVjvEsY/v+QR+y7AdyJNkXBtWjqG+dYMRMcoKiDyCSUaYB6B6snXvPgAAIABJREFUmr6h6nb0wz2Ea+i0pO0G/CAB0SOcEMdTeEGN1g3CGiJvQhikaLNBIAhCQRAEuNbBsQ79IO5IqA5Wj/JZz+bE/t36NJZ+GEOdjTBYxEjjVIY4C/E9904VFYCIcZwQ39ckmY/AoWsFgZ+Qpgl9D2EMbjVgRYs0DbJzCNxgPK+4DZVacd4s8FpFIwR+7pLOM+Z7E/AykmTBYrIgZAt+PGbqKYsbJvRdhVYtvusyn+YEQiKswXdcAifEMzGxm9LUPYPQWNci9fAr66Vfhxa4D8i7wioG/h4jpOJPgP+ckRj43wD/9O5L/ve7f//o7vP/16/yWwEEQcjj9z/ky2ef8vTLn+EKybcf36coC5pmRdvXLOaHFLtbenWO4ylmiymb7cDJwUNcN6BtW6qq5fT0mMXcxw18wkmAIx1Kt6LFoJuWm7rgZJKj6i290qCAIcIJrqkrSd85uCJh70CSpRHGjDQ0pQWWBuEphGcweBTlLZEIyeKxk5TFgmZ4TTqLmQdTXN+jaFYop+Xeowf4bkxdaqriOc1wxTyckoQZk8mMrg5w3QlWDUhtmM8z6uGGpm7xhItnDNqmCK0I45iZNwcjUFbx+YufcLuqKXYdwzAgnIo09VHGoWlcptmC1M9pG+gHCY5kV1naNkc4FmNgOj1ms9rhex6OGElZQejwk4+/xBqBlhajFGf3plRly3SaMJ0lIyXG8ak7By0dhI1pmxKtd4CLI1yiKEIEHkXZIoQL1mFXbAh8iydGxLo1iiRxGDpLqwvcQDJbxsTxlEGVNH1HLz1O9vfwXI+r218w2Bd8/4fH3G4V6/WWSsvxQOT5GCFxvQThGowoCb2WjJBd0TAMLkon9K2DkpZe1ghvoKwqQk9xejIagbfbEiManHCg6TWyE0ThlB/+8Lu8ePMlRktcwMMlnXhksYtSlqrVaK3x3fGwM3SWymjmoWWf1xw5EUkf8a8/ecNqe5/H3/0Wlmtev3mNlBVJklFuRzmqcDU4HtfnJbOFx2wW0fYdZmKp6pq+HghNwpPT+zx/9ZK+0wh8ZukSf38sEMyg6buSvtsR+zFvNjuGDvIkJZl2tGJLWwgcGzObzfn9/+g9fvLxx1ydN3RNxGJvhuvOkWYk+hlj2HUl1VVFHMa4To4bGt7eXuJqhVQGaRyCyZSb2w2JDfGFi2MClO/wdrsl9BSeD0kcEQbOuBUZi6RHCz1is4sKI0LCJGEYBK9elnT9LdZ2DDLj7PQ9Lt8W1PUG4XQs5vf56Z9/hudr0nzGcnpC6G9wxM84Pd7j/e0Bq905h4dT/Ah6vUL2LUooDma/wWwRMawjrlcSh4QwGGgcOSYAvSMA3gW8WmPY7mqm02Q07xuDRmP1SIK0VmDUSGVqakUx/1co75wISSM1nW7ASTDaQ3YSZTRKCPADAs9lkrr0mwicDD8B7Va8fv0Lnn71jEfOt4iCiFoYhNW/VE/95X37L0kDHUew4AnuMKHY/YQ8z9lur8b8HS+mbyrybIaUDUq3488pEsqyJgrNXRbQQBhMePjggxFYUd5QljXZJBvjDXDxCBiGkvniEKk1Qvc4gaG3N2T+Eu34WFwmi5gkZ5y4lz1XV5cc7D9kks24unqN0g2HR1Mm7oKbyy17i5z95R67+hnSjBCKvu+4vDrHK/MxsFpLqtWGy4tf8Oi9Q/ouIXA9ktwnizJcR4ygJN2y3RZc3Vzy/e/9LkHoo5UgMIKPP/3zu8LWoAdN1zqcnSWYIUSaHVKvCLyIxeIBbVNjlOb08DFuAk3tMJnlTNM5IRlfvX5DnFlcR9A3krJaoXoHiYeK/JF4Fg3kRws8Zw/TpxSrczbXF4TpBC1atu2XtKVilk+Jk4xeulzdnvOf/uF/zf/5L/6EXq2YL13yScq27JC9Rg6a9WbF/vGI2r+4vqBrFUGc0nPF4VnMZlPx5vySOPRZHio26w1dYxE64O2bC7716D3cbIPJXrPdXaK9p8yTj0ji+yRujsxfMDv2eHl5hRUxD+9/h730gLIuePnqis32msXygOsf7SOEy9ArmnqgbxRaWYbBoqQZG1muh7Vj4DEWfN+9e58QX8Mh7J1sz74rgJwxFNl1BXXdsF5b7qkUTEcYhERhguclVFVJUw1EkcPQaa4valyPO0XKnXzvG1OrsSgaiymp1Fik3XmxqrKn2MDhyZTpBNquY7dbMQw9xoyTub96vVuDFos2HZ0cKG5K1F6Cth1hIjg6irm4umJ+MGEym+O5EbvVC+I8uSsAJYMtuS7esH//iMU0xbY1lzfXkEWEkY8QHm1Xc317RRJ/lyw5opUlq81r/pc//ud88K2/R5i21NWazargt3/zD/nuRz/g6uoZm/UK1eccHcx5dfsFSktSd0lmYibxBOEoTNfQXlc84PuIfZfe6ZHDhsDPebR3yEeP9pFXDT/6+Ibu0SFh2VDfNNi6Ip/0nC3ex8qKpu7QpqHu3/D2VUZdFcS5RzbzaKXEDgHpfEKepySBw7/+f/5vytM9rCeYLKfM7DGvrj9FA0MrsM04ffzq+mP6fs1iNuPb73+fi91bri+/QpgZ08kBh/sBrtODsRitUHoAJySbRqhBkiQhT56ccX295vmLV1hhiaKUJJrz2ec/xfN8ojhhOj2gaddEiUdZKpq6RsmS+XyPLPmQPE8JQ5/tTUm9uWJ9W+MGDl6s6dueoTMkYUoaB0yziM/PK95/bwZC03Q7BtmTRhm9Kqi7mqJosMWc9z94DG5Przb0sgUT4rsxk1mA4/W8fXs5khDFgDaWdLogSnJ8J6L3rwjSjiARfPnsL8gmKUM3oKTkaH+f+XxB02q6AbQJODud8OzZDVreECeSZJrz+Czi5x//jOvXPtPkkN/92/8h/+L//VPcICaNY1wl+OLVJ7h2wqOzB0wnIevyK86veuaTOfPplCiMuC5XKOsQTUKGoeXZy5d09Zwk8HC8hCwCqzdcvGhQqiTLYlIzpa4FWZmjBgg9n/kspqmgLgqaXcGgN6y6LW+3lyMTIIJsT1A0K5biAZPQgW7L24tzwrCj6F7QB9AjKYVFllsslshLWcZzer0Zm+7SR1tLWfTQlVxdV6T7S5I8p1sVv6qs+bUmV8fAP77zXTnA/2St/WMhxCfAPxFC/HfAT4B/dPf4fwT8D0KIp8Aa+K/+fS8gB8nrl2scnfPo3oegQr744lMOD+c43j6ff1ahMjjYOyabHuGFkk1RIKc+jq8IIkGeTPCclOkkI/A1g+y5Pd9wdfuMIPdwY0ueCOpuzt5sxtVtgxCaee4jlY+rQkK3xI0Uvu9wsDylKXuK8gY5dMR5hvUalHJREsqdxHEtD85OublsudxsiEOFH0uUELTl2GHSuLQ7SzcI0jgFZXj76pzFyYIXXzVos2V5KKiaG5bTR3iei+cL8oVl88piPRdloWxr2vpjrBcymeaEYURblazW50ySCWHss38icQJNt9nDqJii7KnrDdvtLWU/jDQvkdK2A1HqcXm+RTga19eofkcQCkSY0A+GspSkVrMuK9J4QujFGKX47MuXhF5CUZTc3rpMZwnG5OSzEL/tqaqeIPZw/QDPCxj6geevXpIEOV4Qkk9j4sShDizlBvb2R5OkG1j6zoLT4yofrR1uNh335h3FrUCIiCjQfPrmR/zRP+548+oZWhcsD0L29h7w6sWa6WwP3/d4/uyCJx+csave0PU91kKW5WhWOP6Ior1ZXTJdTJikx2jTU9cFfdNydJIR+RO0ahFez8GZS9/7uGYfVIhsW/78Jz/i5vYGaVyCMCKLQ64vWpzFgl5pNu2OLL5invns9EASDTw5M6jzDdvrln/TaISn+cUry5Xp2XY/RZpuLOCNR7k1TGYekzkU2wHPPaEdVtRlR9/6RPECz7vl0cMZ34mPcXqf1XXFPDpiyxajLPUmY+hfUVcBRbGlqte0FKTmAKE1UeggUPz8Zzv8MMFiRvO38Xn5tEUWC7JowHUGnj3dMl84lM16RJE6LrHvME9cdGTAsxB0TDOXaTYbSWV1j2ccwqMJQyOwvQWlsUPBZPmQvm3RZmA690jSgPWmBOEigoDbsmYWuBAENKrh7fVT7p2ccHL6kM16wW63Zhg29HKFtGtwNY6T0NaC//jv/x1W65Kbm1uubr6iHzqU0lS1ZVs4aJNBuCPLj6nrLdvyHM/rcT2fX3zyU4Z1wnT4XXCKu8wZcUcDu8Oym3GOZYVD10niWGGtMx6ixDtwu6HvRrnj0dE+y7nLpRw12otFxvbNW5S2RNFAkHqEwqVqBBKJFj1Gauprj9P5jDiRHB4dM50d05XXuE7CyewQEc5YX+1w8O6iYfkakf2u2nqXWySE+/X0SitNu60oNi1D6ROnMUmcMz+IefPmDf0gCSKPJAlRvc9uU6FCENbQtwqUotpcsdybcXz4kOHcEAcz6uGWvpdIZclnLs+efcH9B0ck2RzH8Tg6OULKDec3lzS15ug4w49yHDsFx8G6FVX/BrmVJFFKGO2Thmf4fgMHCZ6TU1YuZZ1w9vCMoqjRpmP/OCNwQ9abl1RtQBLN+d73voVlnC6kSUyezsBMeXPxFdP5hMePHvHhhyn/xz//n/n5x/+GJ+99j8lkQdXd4roVe/Pv0lQVsrogz2O6rUNRPmcYBlxyqroici9JgwijNOvdLa5/QRg/IQ3PSIN9ZnnAZlfRDS2O57Nc5MRJgqMz+l6Okw+5xXF92ktFmhmCxEUEPi4Trrc9u11JX1l+9zf/kNbestqUdJ3g9N4HfPni39CL52ybLasdRNE56XKB6kKqckPdvmJ++vscnXwL4Sbcrq5ZbS+Yui4vrp8yDA4uM8LcsFguefuqpdwq5pMEYTuEo3B9H2UUm+I1cfYYFfkUxZq6LQnEhOu3FwgXerPl50//lO8++m0u3q4JgoD7Dx9iRM/0tz8lnyQ4uyP05TEYRZYtuLm+pqlrBA67XYO1AVqJUSLeSdpujNQQwsEdca1jE+OdNPAObiHEGNytlUPXgtU5SZIhGGMGgiBkt9ogFZTVGDQeRT7W+HeF2ijntcZihBkX0d3kV5sBgYNwXBxnnKbd3myZzkKyLKEsLkfVgut8XYB983oX0gyMVEfhEoQujn/LZ+cfkwQz8mhO6E3Y8IykTeiDDuk0lNWOXpyhjabYrtncXPL4/nts6p6vnv2CQHjMkyWyg9vSIZ2mpNMOfXvLT5/+iP3ZHvPpgoPFHsFvfUToXXKzuqRuWoIwRrPik0/f0LY9VkQkaUo0n3Pivs8gDY4I2Z8t6Tdv8eMFKlvQzmLiLsJ6ktCRyK5nc1UQzzR//JN/SZoecPadJ5yewU9//BVhHDPonL/48Vvkgx1n7wUoN2PoFe5ty3B1jqM2IBx0mOB7GT/7/GO+5/+Q/fkBkedy7/4DinaDCDKkdSiqpwRuhBcmuAT4yhIpeHJ2yNvVU6TsuDx/y7177zP/jX2UGul6e3tTPv/iC7quQqoWpSXGxGyKHQeTU+bpDEe7NBvDveP3eHPxiquLG45PYlbrVwzS4fDgHvfOFlSVw5tXN3CH5LeMhXeeR9RNxWbT0LdbmmFDnoMfxDhWs9685vjgPmGQYK2hKCv+g7/1t7l68RUXl9fsmpJoP0B2VyyyQ/J4yv5cIFzwvZYoTgiCGYuFQ5qds9veEKk9JumEJJVcXr3BMR1JkjJb7mNlxfX1DYGf4LOkai9pih2TdEHkgTQDu9WOMMgwgSSeZWTxIYv5Emk6Xr+55HpdMlNLbtdrdt0W2fm03UD6WnG0v2AxOybLE1hmONOC1087NtsdmJyj4w+4Ln7M9e1XCPMeRwd7+L5idXFF/iRDeR6ONRgueflmxYOHD8knx9ysXyBw2F8e0DQtV+e3nN0/4fz1xZ3sVzLIju1a8+TbCXWjUX3I4+S7vPzsS8wiIA2nZFFKRofpKnatQXguYpZQKo+mvKUsOnBcDg8XvHn1FjEJcToIa4cn753w8os1WTbh7OiYj75zn7df/pgnjx9SoSi7Gsf/txC9f+n6dWiBHwO/+e/4+FfA7/w7Pt4B/8W/73n/0jfheUTejDyZc3hwwOXlDffPHlLVNwxdzXI2JQo9PDfAQWMkqHJB4muMNEhHgWfYFBUiUCyn+0ThnMk85Mc/LzlwF+wl+2TZhFkMSgp80eB7LlEYEoRjp7bfjQfxSRpwcf4cS0TXG7rBYddf4HsJYRCD9dBWc3ywx+ZqS10UOI7DbL5P01V0vUQTYIWgKxsiZwIaZD/q06ezFN8vEaLFAkakRG7OplohXIMVkuc3HYs0QXYarQWd69A5G1wBobYMMmS72ZIkCVGcsV13NJuBeBrh6B2qdYiFy+liwXbdkC1djA4YpMB4w+hT8ry7LEmDHzoobegahRA+WRrT92scJ2MYQvpe07Y7XE+QxS6dslSdRHgOm1XJZB7iBRFh5NK3mm5o+P+Ye5MdS7I8ve93zrHZrt3Jx/CIyIzMyswa2V3NnjiA4EaUIG604iMIeo0SIC0EEXwAAXoIAWruJIBosMgme+6qyqqKnGLy8Y42m51JC/PM7BapBrSQ0OaLWLjjBjzCjtl/+L7fFziLdRYfBNS+IehHZBAQBjNWiwsC0eFxGDMQxSFRIEjihLYd6QaHt4K3X+0ZR8iLkNksZb1YMo4laWYIgpTF7ISu1vyD3/lddvs9h/2eF8+fkEYZg6qYLy7JkkuOdcUsTJG6ouobqqYjimLq7kiaKVSSkq8cu6rGhxNJKooVzaF/zPzZYwbJ0A7UXU1USDKlyKKIk9WaMDOMpmE0HikVIrzgvj6gHcRCUDjB3X5H1yvqQTC6gVfXLW0eYntBNo8pFvMpNLZrsKZHuxErHYtZzPFOIqUmVB2JH+l2FfeBxa5OWc8LLq9mVK9fsQ7njN5RliWKHGyN8AN4gxsijmZAhpI4mEIEj9uO5UXKOBo6pzGj57Bv0L4hiKa8H60dpveYcUTrEQimMGsvqFtHGEiKcMnDcEs4j1EOpPak6QzlGkIlManDWUcQTCn2MvRTTprTDE2LYZKHOJHw7q5i1/YU8wzvBePQkCcx5ydPSeOYMYlBBEhpSdM5gQJ8QNe3bO7f0Q+aJLE8f77m4a5HCsliuWQ+P+Xl5yWnz07Y79/RtRqr56QJfPblpwg1EkQLxmokKyKsq/H+cYqO4us8n2kaPZnmrZ22g49Pv4ki5GKcdUjpSDJB1zlGI3BxgAoDrPc40+F7P2XdKEnXakTkHw3skvl6yenZFbtyx6HbERWWH333uzSVYxEXdGOIE+C8+LoWfCwS3V+TJclv/VZ+8lqFkSJbrnj566/44P3f4Hvf+xHZLObf/Nv/DaccTipUWJClCxp/oHUNps/I4oT5yRLtRkIX0RuN7S2rkwVD25CFGcPYU9c1w6BI44BAzMAoyvaa+SqmriqsMQQS6lITx4Yi96xW5zy5+JiH7VcIZ6nbkd3+yOvxDcvFjGyWkyYdUWCZRSHl/ktCNSNUk99lX/Y8f/oxx+MOQUiRPWNXfUHb7fF+wFpJc2hRUUhVDVhKFmvJb/327/Hv/+P/zvXDa+q2p0gLRGTRZkSqlHx2gaCh6Tacn6/Qg6Y8jDy/+jFZGrPf39IMO7JlguOcbg/JXOCjidyXxGDGAOGjKU5jf8PJ+kNCqenGDV9dv0X4J1ycXzIOBw53n3GRzLh48jE2TjnWhrt3Rw6He66efkCiNvT9jlh62vaO0Q5EoSKQgurQ4kROXVV4b5gVl9zfbjjeh+hxIBCS1dkJ+3pDWwVo2xHFd+BCXr1WzJIrlichMYYhMGhnqOs7rDsyL1bYURL6HOkDjLeoSFPXGhN2jL5BG42WBodCyISu1fzs019ysX5GfewQ7JHZS3wXkcv/ksU6ZLbIkXLBxZWiKo+EkSSKYnAJu+2OduiojwPlXjMYh8dOgd6PXwgP0uGcwFmBlDFnZykwMI4Dxow0HcxXGXXV0zST9DaKo2kb/XhOpASLmKxXwqMkf01eCDBRDFUo2O0r8rsAr3qs0Cg1A6aA0v/79XVml9OQJOEkTewNOshIo5hZPkOKkHYcuDp5ypOTD3j75jVluWW5zhDj7ZTdqMAEjv1g6G9/BVWNS04Y4jNWF3PioaHuHuj7isuz9wjlOadnc5zvaNot/bDFseDi4iOEVwzDQD8eubhYst3tOBwqNvc1YSCY5Qu8aSi7wzRsMzOiwRFGkKiAX736NacXC9JEEUYRT997TllvCH3IvMhYn84oH0qC0DLqhiSDH/7gCl3vQZ4RKolXCsOaH/z2+9TlWxpTMQhNurCIIOHLL/+S3WbJ1cUV0XzFze01s0ISJyGmaVnNr8ifrtCDpd3XFDNF1d0jpSbNQ6JccPP2l+TRJc14pDM9ZT3w/tMrPnv5S3Q3QBByenLBu7dfcDhuMNbRmkuc6SjUgvfOPqbIWtpe8+z0GbvjBulHuq7HI0EaAiGQHpQXCF/idUQYSsBQ7nYUcUyY5xDmeJXx4UcrRC/wViKjkGwd8+7uhjDOObm4Ih87bGh5e3NNHIWksUDYHtkG6AjaXYOQgrQI2e92FMWS+liz25RIaXieLbjfByhfkM8W7MsN++0DSZgRRhFeRGgXkGY5oVKMvWZoI9I4x6luOgem5bOvHlAS5vNTZjNLJByHKiSLpkGetIbWlNzu7qiNI05igkCzXAZkaTzFOwQhYbxgni0pkgJvB25uf4WQgt4ceXt9zywNeH5+gvUJBIqqKbm521BkMcLnrJ/sUdUd/V1F13nqZs9y8QzrFd14IF8EeD8nUgFStTTGs/7wOUGyIsvyyUdXNWR5SFUeqA4d9cMESyGIHyXWI9vtA+M4kPUBwis8Csix0YbjeKDZVDyUgtEXVHXH4VgzjoYn69O/va/5f9ME/X92eck49Fg/EIkBGQyMo6OuJ5lbkkWTrt4IcFPavNOO2bxgGGucBSc1QhoGPbA9HAiiCBFo8qKgmJ2gxBzdRbihp2lLcBrvPK1zLFfFNGkPEoJAkmcZdVOhVDTJF4xHO8t6UYCTCKdQQqFHg3MKpQKUkjgvCeOCcvclMoxRQUTfb6eMCG2J4pQkipktZojsjmQGcgjxLmXUNZ3swGuQBicdXTe9VLSbCGVWGHTX4WxCqhakyYK6ueH0PCWNAvJ4xvY44EZPFER4FTAYg7eCUEZ0g5h8ZcYgQwlimrzIIMIYzdAPeG+nYs0brIYwjfGAsZpRO5RV2GiaiGtnadqethtQoSAIg8fp46Sv9d5N+Fml0FaiggCPm0IM5wmpdZhxeDQHW4beEoYSj0OpCTCgB4uxBklCHM4QPuZYbQkURFGEc56m7jg9cRSzkCAoUDKirivWyxOWxXvMsmd8ef0XUCZopYlDh02DSRJoe7yUhE5gnaUfR1TtiFSIFAGKkEimNGNH34+MxuCk4OR0RZ5kRCrBO0kcBszSE5p+YOxqkqLAHgeEAd079qWj1JLOSnwnabVgpwOq+kgYRQRxOIXhGssiThAyxJse1zU4N6DUoy0Ay6grBJb9QWCtwuiQRbEgiBJ853HDgB0sQRQTqo5ABSgVIoRgsBqJwvkRLR1RohBqahQ8HmMHmkqTFTFOBTg/TYwHPUzenFDh3eQbGI0CbRkR6MDjjEIPAt2DHiU+ffQhETC52R1SCdq2xHs9Efa0QUQBgYrBS4y2SKlomxFtpkbGOcdmX/Lm3c208bI9syKj7UckEdZNGOemb7i5l1hniOOYWb4gSaYip21GmmYgjhSmiemHEWMckoChg36caHlCSpx1CELwCh4Lucc67GtA2TegC+umaffXc2vnDYELsXZEoPFojJXkaU4QDWivKWaKWM6RJsUb0Hqg71pSkRKqGBVMnk8bKWQa0wyWu4ea8/yKpj5OkiYPURTijQf1LeXsUTX1LflMiG8bLDdhhbEx3//u7/DD7/8uQgrut+9o+hqkQIVyCkFmAusEsSB4zPxBOoLIk8powhQ7Q7ZMaJsaYSf4hpTx9LNK0HQNgXIowul+sClxmEAwZRsJnzMazaBHlHRUzZHVvMD7OUrEjHHNw/6W92cfE4YBcSyQIqXtp/Buh8X6ASHmhHJOHgdYo2iaDu+mZ1E3DlhRM9qMTE7Poq4pKcsD69NLPnjvO+w2R6xpWc0/YPAju+0DgZoRqADnp/eEIkL7gWE8TnhzF0xnRii6oScPVrSjwJlJcta1HV4MGDeAH/F9j7EGYzSoERVY4jgEMyfJQxLlSYeehbNEhzesLk+5WCSsXcTrm57LcMBFniaW7PYj+9FO9wk5SoeEocYOPZFMyWfnnF0sub15yb4fybOU+aIgjCWijkmjGX7QjH3L3fVIlAzMo8mXGHlJGiuSpGPQR/TYsYhO6XxK32iwA9aOtL7EAMLNUC7AuZqubhj7hjRKkTLCYwiUYfCWKJlIoHttEMU9sTpihwB3PEVG4TRciyRpGhMFBd6N5M4jsNTH8TH72PMto3ACW0glpvwuO2Hio0jS9dMQM1ABg9bk2Yz9ZqCtDRNg4tH39XgwpqZq2uxKOYEyxsdNk+BbL1agArpHdca8i0lnMXUvp8+DvwHe+Jv1jX98broJXhVBni1wZvL3GK2IwpihH/E2nGTWMmWdLCd6qDU0fc/oGp4+/5BcDwwDlGONlHNGfUfXHAjDgh98/x+Sx+e8vf0VVbOj7epvpJTOgRKKIIgn+Ig1GNPj6QlCgaeibgzaWIR4/DeQEcfqQJrErNdrhAjohwZtII5S1vMF1giKtCALIzAW3cLV1VN2hzfoviUVMYOwNHWNNNNz1SnLQTuskngfI50iSiKSeEZ9qNC6Z9SaJ0/fY18eEdKD1ygpGcaWTHoMht4OJFGMCA1BNN0ZnW6ojhvcPKKzHYMdUbZFhecsiiV2NHT9iB56nIiQQYwTgqav0NayKwechCBKmKkcsoBts6fXPV17xOiRY/lAFmXEKkIIgRl7jDmQzXLCMGSVLcDHSXHRAAAgAElEQVQJFBGeEGTMybpg+/Ya6zTeKaQLiGKFGyGZzcnUCit7DuVAoBKsdugelmlGq7uJNyA8Q9OSpEtUGNDrabR3eZ5AVbImwYWSti85Vh3pLCSLA6I4gjBjc9wipAQvpve4kTjvCVWKMZaq3mHdgBUp2axAYDhsbrHWIAlAebwf2ZYPbA63jBbiJEZFhjApcEgWxYwsTzkctgiXUixijB3puwE7Ki6enDOMLc4o4iBlsSgwWEbTTtteB6tlgLEbjNkjhSVQFj02SGkRIkSIlLxIMBbCMCGUkr4fQKbEUUaW5sRJhB6gLPfUdU03jlivsHpESEsYTdCbttET4E2GKBHiveL27oG6r5AiJjAh4yBRUcLQDvSDJBQzlqvzv7Wt+TvRXDk3daW22SLCkjhccnNzQ9sf8YxkmSJOFrTHHmc1QoJSI0W+wPupoDGmJU4m3fPD4ZbRaOIk5PLJOeenV9SlZr9tJoR2+8A8TxmNpdees7MX6HFLni9JU0Ucp0gpCNSScdgjHUQiY72cc9g3eOGRgeT2bsfTZ09IvUIbS3nsmJ+uH9Okl2RZjNYdddXiREyS5qzmSy5OZgxqJMgkQkV4EzFYg1EW60YkhsU6Z/fliA0kVhqMG0FIDgdNrypO5xnPz694/fYXvPhEcXq2ZrnKePfTCmsD0iLFWE95aBnHFm9mDONA03UMumO5mjHakUhFxHFCYxqcH1EejB0Y+p5AxkgvQRjwBoHCagmPGRDOaoa+wXlBWXaEajJc6nEkSsKpSBOWOAxxJiLPJErBaBqcBxUIjBFo67Ctpql7VABxrAiCx0DHQGDdiBIB0qeUx5Hd7paT5ZJAQdVUNK3n+uYrnlytODlb01QBZVlyfnHJevGUSK3Iypa6nARUaZpRLCPuHg4IZdGDwwzT7+OsYOg8Bo8SkCwzQrHEmXEKiw48gUg5OzlnOV9P+Oiba5JoydPLT9ge9xz7l0SJQ/QpblD03ciXmw4tZoxiYBwV9RBSBYptu2dNiO6hbTRNX3N+ekUYxoih41hZRt2TJBJn5ZSqbvaso5ShqemGiTK4WHhmy4T2WNLUPegQIw1Kyun/UaYQtuh+xPWWzmuUGlmsF5hH2MjERRgQ0jHPLxFK0I/95D9wPSpMCJmCcYdxetg7rTFa44wlS1LaRtN1ZkLG6hFjNVqHjxsgMHRU5Z5QJYRRgGDKfgmDDD1qjGmZ5znHJqTcNzhviaKAzb6iqV/izeSFXC6fc78rSRKFcx5rDQ5D6XK8H5mlEaHKkUEDCB42B+7uHpjNLfsbjUgEMpAgLGWpyXL1dQs1Uce0AmIE33qapm8+musfKWbO28eiyn9TABpjpnMsB6xXKKdYFDmtamkHzfk6YDm7ZGgUVTmw3WqsH5CiII0jggDMoDkMFUkW0teeu9uB+ImmGVo60SOsI09jqrJFePmNx0rwn5cqCCFw1uN0gGLJP/+v/gWz+Yw//On/wR//6U8x3qBUOGXsiZ5eS6IoYl5khD5BIcENJElIFgYMwzjlzEQ5KgwmWqiHOMlJFwKjDcfqAWcKTueXWNORRDkE8hEgYIijBd3wQN/fs/ctu/2G87MTYpUi5gqVav7oT94SBhFRpKZGL4qJ9iu0b9G2w9Ixm13S1ZYkuMAq2G5uyYoEZ1OMGTDBJIVyviWLC3bbktevbwj9ik/e/y2+En9F31pWy3N25QO3txuSZGCxLJDCIlxMU2narqbrd+yO1yRt8SgHC9htK9J0jVLT/WONxllHPVQM42QKCV1InGR03YAKe4JYcrp+SrlJ0KZhHUtm6yXd9o763VtWbsV8uSBKZ+ikZ2G/IEocx8DyrjaUNkKFITLMkCJnthroupF5uuL09AXnl0+4fvcr9uUNUXqOjDJ6XROFGevVAnEcub/ruN0MfPyxZHA1KjREcUI2CyhmLboeMKMkFQtENGN3eDtBQkRPX98zz5+RyXNiCdpv6bYVY3fApAVJPufyakmsBmQUUuQJRZywP3hYfYlIGjjmjA9LnMtxzqEHj7AamXuUEGS5JElBSId3Eqm+blwe6ZxeIpE4bzFWY42f/NHGIIQkTSJGY0mjGUO3o6kmAMzXIcVfN0PwtbTv66GExLvx0Xv8eH6Ynl+jHhh6h9WKLJtz3DXfbIr/5nmb/vSPcA0lJc55xsFgRkeSzzgetxgNgSwwvaWv3pDEa07TNUjHefqCQEaM2tC0HclQcfHb/4yPVzOO12/4q7/8U7q6pC4fGDvHuviIv/+j/5oghL/69E+53zwggnaizIUxZbXFW/kYdRJTHktGowmjgCKLCOKWzcOWMJpR5BPwS4mWfblB64Kzk0tWqzMGe8ux6RBNjzcJVlvmswWBVdQPRzBzzp9c0Y97DvWOsfIoJPvdgUgmREFEmGpe3XRE0YxERSRBhBQheZYgncC7EG08Z+dXnGzeoW0H3pC4lLu7G8RqhreCcRg4jp75WTIBvLqeQ1VifMfe3CNEgIoCktxRNp7VyTnKOx7u7ih3d1iRkS/OSOKAftjTWdjvS1TgydKQk8U5oxMYFYLRjEPJqD1lfcTma1wywweKvh2xbo9DsJovOMsvedjtGVsHiSEKHIqAQbeTVFtLTK147+p9bq43BGqKowmTlGdPBcJbqmNHWwVcnq2o9gOzeUQgPId9ydkHL6jKa6SEIsn46IOMN6/vebY+pekNN/t39I3k6tmCNM1QQYRxgjiRTCngwaSw8AGDHYnkEtO3VIcji2VI2xpmhcILx+ZQEYfum6GE9iNVVdN1e5I4RQQJQlr2tWAcEq6e5GRpxMtfvyQIzgmimDTKyApHU8LJesnd7SuaumMUS2YrQVf3zLKMLI4otxXFE8PD5kDfWyJZEMcxgQwmyrOKyZIVWR7RtgPpLCFUGW1v6eqOMBjJco9SIVJG3NxucX4giEKKPEd6yzj2qMgTRoKmUkRRQhCGhEGMNZ63797Q25Y0OUFGGTJMAYM1A0k4Y5YsmaV/++ZK/eQnP/nbO5//H65/+a/+x5/843/6I+JoTiAWCL+g7zxt12KsRaiE9foK28dUZUnb1pyerpkvVkgZ0g89++Nu8hLFxfQCcAavU8zYkUQFh/09VXPL8ixAqYLehDgfEEQhKohRUctylaOUYvuwJ44DcAqnPcILpNLc3W+4ud5Neuix5O4GVuuUvh85lg0Pmw3GdsTheiLI6IEgtiSLDJ/EiCTEKsvt5h1/8mcvGXqHCgwq2nL5JKY8ara3NQ83Jceq4u19Q5RBEE3l3HFn0P2MUM6QSjLakm4Y2DxIbq5L3r3e8urXgourJbXeU473dO4BM/S893wOCrSdiIjaaMJY4oXDec1slpBlyUQBHAy6N4RhQFsPeBcAAdaNzLKIOJ6w595b8lmBHiO8c0SRophngENJ9Ui2SUjijKEZiWKPVB4w7A8PWOsxxmGdx3lBFEYYM+CdBB8QqoCmPk6TxFFzPNRUZcv+uEdrhR7lJAkRLf2w5WT9hEid8os/v+b9D1c4L3nz9jN+/ss/ZNR7tveOYWxI0oizsyd8/tkNSkmSYEYeFyQR+E6RxtP03VjLw/2IjHukSqbCMYtIoxzXCwSTPE2bke9++D1ePPsB3Tjw5buX3G+/JBxhMT8nDFO2hwe0iDiahHd7uDkOiHQgCZ5gBon1HagDMuz5+MUP6VvPMBiyImKWnNA3MV3rJjys7ykWhqaaChKJZ7U6ox4abh5ecWz3EMe8/PUvQBqqxlG3EhFA3Q40VYseDYGIKGZr6maLs56uH3l4OPD++xesFkvSJECFHm1b4jygrxracvKmzbMVOGjbhqZr6UaPCGJGpzGuR2BwNsNaQ9dNBDOQWD+gbUWYhCRJTBRK3r66Y5ad4BnRrpxaFxGgQghChVQh9ejRg54S6YOIumopq4HZ4gQVhlhnqOo9J+tnjJ0mlCHL+YJ+sPzR73zK8/s1xWvJzz79Oe1wZLMvaTqDNgJrW8wowUsSccZSfcT9/ZHq4ND2sZWS4rH5FN8WeB4CFU3hp9bivEOKAD0MxLFkucq4PD/l/u6Bvf0coSYPkPIjjDOsbdGmZdCCk5MF65M5gfLTNm8U3O/eEgUts0xS5BHX19c8fbZiFb5AmoymHWhqO22Kvinuvt1Ywd+kBVqrufrglH/+L/4pv/t7v8e//Ff/PT//5R+DGEjShH7wjKNldBYbeJbzM6SPWSaK82XMe0/XNKOhHnaMosTKga4WnJ4tMK5kNB3aGOZrM52TVBKGgDC8ePGCOMzRo6PrRoI4JM4kx3pLVR/o+orF7JJABFTNkarfoU3Lk8sndOPA/eYN95t3SGJuNzdYG9P2A1Wz58nFGX1dkycL4ihlsEc603D3sCUQIVmY0HfvILzn7kZTHkfCWLA+X3K4D5ilCatlymBK2srz5Mk5UQR66FjOL7l+d0vTVDhnyGcF58vfZLu7myhtes/99guUijg7OSMMI8axwYkbHna3VMcBpRLOz0/I84R3r0vKqqTvG7Tp+PnP/pLN7RE9BpAUvKPmyYdr3n75FZ/9+td8dfMlG7/hbfUVr159wVev7nhTKVT6lPKuRAGzZUSUZJwvn7DbHiiPA96tePq84P7uiPWC0Q40/ZY4WrJeJ+h+ZHNX0lQhv/+7v8m76xsO5YHFyYzzixeIviRKYoI45u5uB7ogSQNkXDNyx/YepNBcPTnh4nxOkSc4G9L7I90w0PcdeQqvX35KkV1RJGtCEbKp78E5VLBGBQ4R/ILhcD41/0ZjzQgChn6ijvadpe8cbeceqX9/DXLx9brYP8YtKMjnMUEYkSQJQRiweXigmJ3y9tWW3faIVA7vp+DgSeorHz/CfdNseeun95t7pA4K8c2+DAFKKeIo4r0XJ2gtuL0+MPRmyr3z/pvMq0f4Ic560ixiuSxYn+VEpxs++/WvgZBAZQhCnBtp2geCIECEER2Strvm3/7gJU/bFX9vc4XULX/2p3/Fh9/7L1idPEUPd/z8y7/k4xf/kCy5ZJ4/5/L0+/yb//C/8uvPfsZm90DbjZysnjKfPaGs7+j7Hdb1OKso5uc8vXrBxdl7eK/4xS8/Z7V8SjFboZRkf7zhWN0ifUoQxIyuZjA1i2WBE56ybnj3xVvO1s8IiWkPLdvbe3rX8tXtZ7z81c8odyV5uKbrt/iIKcqAkHl4yW999B16ZzBCYISkOla8efOS27sDxjrm65Q/+7M3DLbh/PKc88szRtPwR//hP9J0PYEMSMKYn/77n/LxJz/AmpBhmAZj3//xh+yODWaUSJcQypiH/QEhArJZztn5Am1q9tXA0+fPyNOM3fU9trecnZ4zS3L86Hj3+nN8bznP11ysF8xmkvJgWBanCB/h7CQljaJ4sjj4kbZp2L1tud49sGlqjPQEmeCz158igpAgWBDHS4p5waAt2/2RQ7XhWG2wOuXs7JIskwxjzaHcsVw+JVCG/eGOh+MGHUR89N53aI/X+Ls9ednx4fNT0vULPjhPeZJrTlXNeWIxLqHpYm7uWn7+lz8n8QnvX73Ph+9/xPvvf4STA2GaYa2Y4GNhyGe/fImxNVXdUVYNiIFAeMahphsGjA148uQZi0LSdw6tLWEYoFRMVWqCSNAOA+/eVnzy/Y+4fvc5XdWSqJjnV3PevHqFcIooipGRo2lrvvrqDUW2JE8ybu+/YLfvaKoZZxfP+ODjK45lz4v3v0exKCiKlLOTgijuOG5KwihHyoSy2uHVPb/4+ed0rSHPC8J45O7+QJrnLJcrVos1zgTsD/e0zZRJGIaW04uU8tjhnCXPY+bzM7QbGAdNoBSr9Zr9viafJ6RJiHead6/f8Ad/8Mc3P/nJT/6X/1xf83eiufqf/uf/4Sef/OaKru/oW42SGSLckuYjKpgoQ92wJQgNg2kx1pMV57y73rI9HBitR6mczWacOt1UTn6SfcvF+eVkeLWTl0hwRhDHhEVFsVIsFys2d466bem7CKMj0jSlOvZkWfGYzTQSRVNGy6CbaaI7XxPnCUpK+rEkjDyXl5ccOkE19NTDA914j2DigDR1SVMeaA81oYmouwPdUVJtHU2lwQXYHpRWJDJjPr9gffZdjt2eqjti0Ty5eMZhUzOfhcQRHDY7+qOkyM9YzN5nPf+Qy6s1v//7/5izp1f40LM53jIPV5wuLlidF6zOMuJYEgQxoLBu8i3EcYCxU5ixNZYgCInT9DFAVeK8wzNSLHKck49G4whBgB57ZCCJk4wsWxCHCfM0J0tSlvM577/3jFH3RFGAlArnHLvtkThKCMNpra7HgX5oWSxn39ClunZktZzjTYTTknG0DLpmli+RAowxDL1D+AxvJYFKsXbEcM9s0aJEhh5hHA1hILi6OifLE7I0JYpComREuIAoNBPqXZ6ga8t8LTHG0XaW9z5aYsYEbXr6vqM5erJUYsaRpj3QdgfSPGFopy3RZr/h/uEB7yWLKMeJEec6IhmQx6eMPiKIYrIsoogUl4tzVqcLklThnWaxnFPv4kcycMOxfkXfOI77HdkMTs4z8lnMft9x6DsG16PtwLs3d7T1A4HXKPyEexUxd/stx3qkbQ2H7RHp4GR5QRYVDK1h827H2fkpKoAojlifPUMoz6svv8JazWyWk89X7A4lIvCEUUIULSniK7wRtE2LGQ15UHC6vAJr6LqedhiJwpheG4RwyHBAhjVSxAgbs5jPmc0SUIJsFpPnZ/R6oKz3tF1Jr6cwUqTDogmjOabvkYAeRjb3Gwgimr7lWJU0bctoPH4Y6NoSZzUeODYdf/6PPuPyVlC81WwPA8bD1ZMPaLuR16/fAIJintG3Aa5ZEptLrNfstx3OSqSQf20wLb6em4Of/KLToMEBDikEwzCQ5xEnJwWnp0uM0USnHencE0SOoQ4wtiTPcopiTjFPAYHRA1Z7hFPEYcAiPEd6zdhVmKHmvcvn3G/uuBt+xr6/QdQXNKV7zOT5m9fXm6xvEdFMBMtUEC09f/wX/5pPv/h3ONFPcl5rqcsenEA4hetDumNCls++8ZyhYs6unvH08gXLRUEcCtquI5Ah1jU463BOMSumrfPQToHMZ2dzHh62CAaq+oFhPPLk2RIVdRzLkigWXFymCNWShmsGXWNcTaAcXZmSZUuidETFDboLSPIZgQoJA0maRczSCz58/xPafsv95i27g0bFlvXak4YJYkwJ5YxDucH5hDDOiZMZvS45OZ3hnKAqG+7uv+Li9LuU9R1KOeaLGSoQiPA45eWIiEAW1MeS3e4NTd3Qth3GjBN0wUfsNyN31y3j0HJ9/RnPLj9mWVzStpq62XJx8RxtOvbHBza7O0btKWbnVMbxxfaBV3cbhuYD/t5v/D7vffghRmu6ZkZb7qltiF2ccvmDT3h6dUk4nrNarlmepvQm5uVnrwijBOsdN7evCUPBMAryeUyUasr2BmMG3r3+kps3W0KX89/+d/8Nr79omeULZvmcpjW0XYjaVJhDxXG351d3W5TtsPKOfrxlHFvOlt9DScvp6RWj0bz86hcMNuG468jDiCIRNHpPtj5lvbwkUDHGCpbROTLSfPX6K8pdzbPTF8h1TTBcksVzkiShqVukcERJBj5kGBwPDxVxGiGYtkneeYT031IxhSSMU569OKFsavrBIGXEajXj1atbrt8daDqDDCKk/HbLK+QkXxdMaok0DchnAVXZE0UKcI/fB4RDyAnAZa3h8mrBzfUt202N0V+HED9uvx+3YFJJjHYslzOWJzFBVvPZ/f/JOFrOTq5YzE8IAsnLz/6CiydnBFGEkoJ5rjhUt/zxb1xz2Z7x3Yf3kEowuCN9a0nikI+/9xF/8bNfsn8wRNESFQg+f/OHvLn7M4r5nLOzJ5yeXhFFCuGnMzMvTjhZXSKlRErPsTxyv9mw39es1k+5OD2hWISkuSAKFgSBZbVekiQBw9gipCPOa9q6Ax3y7OlzFJ7FTNG2Rw7lgcvn30GEjiwXZIuEII3IooL+uGUeF7z35CN+60f/lP3bI/eHa7wbyeKQLFckyZw4SrHiwKb8c+J0TlYIDsctNzf3OBNwcn5KluY8v/qA73/vx3zn+8/4d3/0Z5THPSo0rM4jDnXI7k2NHibLwmr1hEUecPNuh/Uh89MnqGSJGz3HcodH8b3v/wNmecrN9UuiMOTyyTN+9OMf0jUDDJ6+6TgeK7rG8L3v/whtO/qxRip4/8OnmDEkSy5QQcGbh5cEa0+yygnTCClhuThBCE9ZdQzDQBRKvrh+h5QGPZQMXYUbI252n/Lq7VsQkufPnnB78znWj4zjSKwyLtdP+eztK9brE4TT2GFDOm55++tP+fOf/gW7tw+cZzFFXlMPK+ohQEUJH338jH/yj/4JLz7+kHS2xPuYOD6h6ffc3V9jjGa1WpPPYs6fxSSFBtHR1QY9DnR9RX10eD3jx7/5W7z98o40PuX05ITz8xx3NHz07Iqh3XN/c0N1MJycpZTVA1GckKZzymOJq3sUCqHAqynU/uJihdaasqpRcYINWpqupe8VTZ3Rd5LXb264v2upyh7repqj4d2re6p2w/Z4zbt3DyRRTpbOWCxmSOl58/aa9foMoTTHcsNXX37Ow+bIaKpHMf80aLl5t5kkp93AYVeS5gld19M0FVW153C8Zhg1Z2dnnJytyLKIw+6Bf/0Hf/7/2Fz93ZAFekPVvUPJCCESxsNIkGxBaKwNwc1RGKrugLECQcRuP3K/O1APN4+5VymbvaGYzzBhhB4ND5s96VyyuenJ0oh8FtENDSrMSGcCSUAYB3zw0fvs9m95uG+p65LFSjCMDXo7Am4yH2bn3G/eMJtngGLUgiAaMI9TLms0+90t2rsJSmEEQixYz5+ymGek0RtGbZGkVIeGF1cfkKcxgQjoO8+xrdkfdpOVLsmZp2cYoTGNRVtFKhPGxpAEIbv7kjxJeX71CW+7V1MIn7Bo6wljy88+/RO0shybikAVSJGSxAVOGvADUmlmRUbXG+RoJv/MqLF2IFABIgxwDqwDEUAQeJQKCIJT8CHWTXx/gWAwGhU4FqsVQRjQ2y2hcmT5CQJHoHoEJcVcsd30j3+PJ0/nSBFizWMIpLO0bUWexcRRgkoiGt2RxglKOkY9Zcw0Q0sUB3hvGfuRrqnwK0ugPG/fTXSvk9OYthEUM8hnMVKeslykOG/oWzUFM6sd2IFPvvMJXTsw6pF5kXE4HGg7jZSK1SJBOU9zaKb8sSAjShUeS5onHI8NddNOxCAT8tWbz9DOspwvMT7B+45Ajngf0OmYtttQHQd6bfHCE6SSZHZEm4SiWDKbPaFsG/QwIEaLkAFpeMaxqUjiGEmC6cHZimCYE9oDcaTI0pB92WHsDBMowihkkWWEUUzZdgSJI44lQiesFynF8pyyrimPO87OFgQ+Zr6YIWLBYDxJMmO5WDCfxYSR4v6hZLXKQAn04BlqsHbEG4vwnkAIomiSMhR5gZMe01UYaZChmqQDUuNcA1ZghUD3BpzHyyl8Wo8j3k6bn3GEJFF4b6bPVyF5kmDd1/QtQbFYYHBsdzuQEIYheTLjujlijSPLQIvj5F3zUB96ql1OkZzR1gObXYkZLXm+fERxq0fPm+PYlTiGx+Lt66fUf8o7F0IgpuH21Mg4j5u0gmjTUzWWu4eWZv5XBKnBIdDDSBBZqkbQ93vCKCBJZkgnH8MTE7wVlMctkYShD7EuJgzBe4HWgsFpgn5EOUPwGHb6n1zi2yn/15eUEl1L7j41HIpfEISOYYSmml7cSbxA6xFrBKP3tOMGGaYssxwVzojVHKsFh7IkySLWqwucCWhrjZIBcQSKgNBnHA4bZvmCNI7pu2aS2ogVYRiSZSkeP3mdtEeJGN0n1O0dUTKQRTO8mzH0Ac53VIc7gmRERSlGtcyz8+l+ER4RRgwm4O3bnrKawArr1SXG7hF9SBAlqOLRT3AjOT2ZEwYxo+kRdso0PDQNh/KIcz1ttyWMpsJ9HA0yzjC2R9uOoRcI5/nkw0+433xJliymF3lxStU2RKFCS42xPcYMnJ8/pen39LpDqhgvNDtzTVXfo5TjOx98gmvPQEzxA6ON2dwd+PzzDaezE4pEsC0jzp9/wJtf3qOFJLYpba1oqy9Yn32EkAn92IP0nD+dE8kZQ2cx9sD9wy1BGKH1SNM1HI+K0w9WhLOY2AuUzzje1wxDh+kNQeCZzSKavUZrRaoCTqKCF89fcPvFp6yiEONgMJKrc0ddS/b7lr5zHHchP/yNM2bpCWcFJFGP3lb0bkEY5ijnMbojUTNKD4tiSaLmqOiChGvksx3Ddk6/n6bCaZ6iogJmmuXKgtjwNUcGpmgEHrPlpnPp6fuBvteMusUYwzi2XJyt2W5K+n6cmikhmKI5xQSi8ZPs0IyQZRHrk5RiHnNzvf/GVznRQadngBQCYx3DYL6577+W4kopcO5bY+bXTeD07JCY+B12ectp9oTd5oAXA6Ou0FpwcvKc9ckJ2jhwfvIpJXOkVEjhENLQ9RYXwe3NF6yzOc+ePuOHP/r7vH1zB2qk7no2+6+QkUKqkTjKwElev35FXhxYzE+Z5SfM8py6/pz5PKesjjR1w+p0RZwKjtUd1JNMXLoLtB1g2JNEBYvFms3DjqYKSaOUaC7ohpJUZcgwJClCZjZGhgPDrmGeXCCVp9MlNhb86Ic/ZpEtWS8vOD3NaI4F2VhQNRVdpYkLQZasCGRH1VbUtSaJBqwtHn1tPdZaorjg/KwgCDxvb74kiE948f7HjP12sp/qJauT57z3mx9yaO6ohoqmN/zO9z5mHv+KVleUbUXVwOK0YH/Xcrjb8IV7xQfPn3P17JT9seLLdy/Z1edTtluqybMFM5+xP76k6zqW83Nm+ZpB93R9TxB5tK4wxnHx5IRN1ZMQkShF+n8x9yY9smTpmd5zJpt9jPGOebMys7KKLLJZxUHsRksQ0Oof0NAPUm8EQRvpl2jZGwnaCAQhsAc2u7lIUrcAACAASURBVMgasrLyZt4pZp/MbTyTFhaZlaIErst2Abh7WHiYHTvf973v80pBrA2Lk5zu+J623bO5cyQZxDgipcIYiU4b7h42HOvJhnG6PuXZy1Pa3TgBMqXGAc8/esnYdziV4FTKf/jlWxqvqdYX7LOE/3jnOQ4tMhHMThYIO/Bf/vPfU28bLs+f0Y0dm+2Bh1vPi1cF49jS9Q2jGykqwbLMce0DSd7x4sUTRFxxe3dDlo4QNNfXb9g193h/RWkLolgxW+Y0bYvrI0WSMHsp8OOBeTX5V52dgpzLomSMI6bUVMsnnK4/oWnvkEgECuuh7g27/i2jCIRCYZLIfLFAyAptFNaNbHYdWXFCMVNI7RGhZj7LCE6S5/njEKSgPtQMY0cUUC5mHHYWrUtgymoNQVCVBSIKMjM1A5p6w/FQQ0hJ05QktTTtntv7B7yXzIqSJ09f/JN1ze9FcRUBHwdgJIaW/a5mvoS8yFAixzkzGdmVIkZF8J5js+Fud88Y7oFIDDn7RoOSNOPI0Pfs9w2zh8jDw8h6tUAZweB2KF3hQsHQC/qs42QFKI8LA8MwQB1w7siws+RFwny2QiWSut1SzguIYK2lSKFve4KfzLMmUxQRDsOIkRptMpRIKNOSWTanlyMhaLrkwKq6oJqBEJ7d1nJzFRijJzUKmRqyJOfY3aBjRIqUXFQwQCYzRuvB5CwXZ9wVH9CJpB96xs6SpB2HbovXkiFYpFAkZnqPdQODG1FGMVqHSSekNGN4NPELjE7RQuJDoB17ophkRyZRaFnQD3bKHJGTc9f7QK7V5FMJlqbfsZ5nIKZxqlKRrj0glSdNFdE7og/kaTlJQcL0cFRKIYUkuAgmkhiFmhdUZU7d1AgVJ3qdmvS90Qe8d4xjz2g1MabsDzXOD6zPLtnvHFI0GJNQVQVG56AG0ixgm56uO5AlGR9/9JL9bqA+1CzWin3n2G+vH7uYOX17xI09i+opWZrjfEdg+l7s6OgbR194+uHIMDRobUDqKQCa46PZO6G34OLIMPT4ECZwgEkZQkP0nkSVzMoZ+8NAmjuE9yipMemKw2HH06fP2dcd290GKQYysYYkIU00iUiwwwGlF4QYCFGQJyWjVGidkCSR3Bgyp3h+eUk0JYPrMblmtsoJwzRp0EoQhEUnmpPFKUWmcK6b/q40e0RmO7wacf0OpQRlkeAjJIlgsEdMOhl6E5fgvCdLC/K8JARH0zrc4AHPMEi8C+jEo7TGDQPBBmQwU/acj/joEGa6fo2UmDQlWgcCdJLQ9ccJtY/AO0GqBXU9TEW09zgBaZISYqDZR44PgSxT4BXNsUMKhTY5MVrGUeBsj++PqK5ByG9Rz9+THwGP2sBJJBQfJ1h8qyeKE2xCTGSz0Vm2hyPj8i3GFrhhmEh00j3m/IzY0YJXKBRSThlzPkZClNTDAYIhMQuyRHPsRwYncF6hg8ZojRD+2yTU/8/xj4srISSuj+ze9zQvH4hB4oZI3wSsj8yqEmdHvLU4F7DWcThYdJAk0pBlA0hL33QYsyRLClKdc/QNwmuii7jREYYUGRK0kBAm5UFTHzlZLUiyHK01XTPQ9+009REGN2gSOWccHUVSIVXK2EequaA5HBFBIDEgW/CKgAcFUikOxz3tUOADmMSQ5TC2ETFkE9FRjDjvCU5jVEqSJBCnddCOAR8CUgrSNAUxwWOGseXYHCnziLPfEiPBhynA+uz0fGrO6RSTzFByjrcjSsFimZIXkqAMd3dX+FBT5Cs8DXmiJsCBMKQyx8WKpExwsUd0nnm+ou4rjkNCPw5sjnCeLOlZonSC1GvGVrA5fqB8eYGMKV3bg/gWkOER0pLmgSQpEFLSjwMSxaJ6gqKgmq9IlilGJtzddiRGUz/skcqTz0uyRNH2DiMFWmYU6Zy2dazCjLEP1McR1AjK4ryHmFJlp6RSky4rljMwHIhXEkRCcJFUSVKTcGg7SDXz+YxUltgQkHJg1B8YTMuoq0dHoyT4iBRM+PLvaC3fkjr/39c5AsZxJARPCCMxeoRIJkz2ocN7/5hDFb57T3yMLlBSEEIkzQyLZc5ylWGMeiyu5CNI6LFIenyfc4HDvv/uuvj+xOrbwurbtWKCXEi8PhKyHaviDCkMUg2AxeiSH7z6CWl+xB63jzCZhPl8Kq4gQvA47/F+5Njecbe54uZ+Q1muSYvbafphHYPtKXOJ9xC8I4SGzfYWUzgQJ4DEjn5Sc5gUYyRJBqt1SjNsOdQ1gkmFomVDOzQksiBLEpbzM+5u9gxdoMpzskITpEdFSW8HHA6Va4wWCBvQSfb4f5umO+vVE3KVEGPkrr6noWOwAe8MRhi0guAUxmSkSYmIBV1bI2JFlmnKssTbaSI5y+c4P1J3NdJVLOcLxjTgrYOYgk9YrStG0VPbnsF1aKk4PUm4O/Tst3ucX5CmKzKd0Y2e9rhHiRdkWQaHPceuwVnBs/UTop6k6gZFWeY0Tc3J6SVFuaDpWrabe05WK2TvEAzMihV1tyMVhlxpCqXYHjv8rMRoQUgnD+GiLBj6hsFOoBWdeJQwLOc5s7JAKUFapBiRozpDO4wc2pqXn37M1fUHRiFpMTyMYPIZ89UpXYzcbXfsDpKPnhjWqWIMju1uR5S/pT5OJOKmaxiOKdYqYhwY3TCRe1WGeNC03R4RLfnS0XcOXWjmWUAJR2O/IT/Z0zYDDkvblSxWS7a3+wnwlWes1ikRRUTiXEdne0S2Jk8znHePHZKULK/YPFyTJpE00Qhh0KJASYP3lmFoJx5CIulHCHis8wzWU5QleT55sZZzSVEIutYh5aSuEiKw3e4ev0dNXmS0TUOSZIToCGFqtCSJmtRHWlFkOftjzTCMJKZASYH3PUIE6mONiJpgPSfz6p+sa34viiuBJk0XIFr64cD9pqEsfsjq/BMIGe82G+4OO9ZPzvB+oGn3bPdHHrbXlHOL99AdPX2fECM0zYgdHUpGHu57iqpEJxmDFexqR5LvaceOEAxSHIgMHA53SFkijWCzPYKs2e9G5m6GSlLyeEUz7hg2FqU0RkuUXLPb3WNkxtn5ms9++ITNTcPut1+SllCUgs32LYkcsYPiUDu29Q3V3JHqp7TNgcHv6ENDN8LZ+RqJQpMSGDl2LcvZhKDPkzkypgQ5UpydUVUlQkWSQiPkjN225ubqAWV6nn+8oB0sznYQB6pFxu3dPVH3yFSSlyvevnvN6dmCJBGPRluJSQqCSMAojHDshw0iRsAQg2Rwln1dk+USZSatsfeOzKx42OwZXU+MPeerFce2YzVfIqVhs3/A+YGLsxVN07Pd1nhn6O2IEDxOxTRaSpRMGO2AUoYXL5+Rp5rt8Q1RCIpqjsnWHOpbvJMIOZlOIwFtNM45rA8cm57NfUPXNZys18yqFV9//YGPPjljfZoS0Lx/5/noxVOePX3KYjay2+2RycC//Bc/4m/+/V8x9MfHqU1LmkueP3tBqkvef3g9bTr7LcMQiS5jaCT7YctyrolMqeSOhtOTQGRGiIEh9pTzJaaRJNJhclCF4PbOMk8indmw2XiGrubZRyvCMHn+hFHMFyk//ZM/5j/+7T/wq9/8hrQ0fHQeKLMSETVjq+iaI2VxSdv0dMOICgX1tkPjWM1mrOYzWlnzg08+4c3NB0zqWZ4uiMoTupHdpkb3kmo5Z+h7atnj/eSta/sWVyuEcijpkTic3bBYn7NMl/joObZHNtsHvPAIBYmSjI0nmxUkOqftR9omoT7umc09BIEOkpk0VLOE497hh0AcFSKMDG0HIqCLhKxIiXYymAut8N5zONb0tseodKJhBYn3kdEHPEfGPtCNR0wyXaNdrdg/RGx1R4gKY3KCh7G3dP0WyYLD8YZs9OTiFd5NgYrxcUPFtxICAYLHXCvCd1h2ISbUufeCKEbSXJPmgmEc6UZLu7khBI9SES0Vzg0oUeAGz/1hz8n5HO8j+/rA0AeytOTt/jUX5Ses509Yrip+c/VLmhEUEq1SZuWM27DB6EkW+H2f1feP736OYJ2jPvbgNO1e0h8j3kqSNEUEA0Si6widQ+mUrku4G3e0fU0TblmcJCRRE9qEboRmbwmuxbuCth7ZbQ+kYsnlk6e03ZbDYUOSSIYeymKGlJG2OWKdJ195+qHGu4iMS55f/oSbq2tC0CgjSLKWF08u2OSaYdwx+j0Sx+FwZBBHVBowaG6317xYrqnyOVINWH9Dnip0WFE3B/bbLUFAVaWTRFVCURaoRLI/tKRpTlWeI0RCnpU07Z794Zbd4Z4iOeNk+QlVHigSTwySeh/46T/7M968+RWbh3uy9IIffvzH/O3P/4qqKnj5g+cURcoXb/4To7eEAIGBY3vPs4tXpKpkt9nx219+w/Hhip/85R/TD4HN3YH5yUf86A9+xDwp2d2/4fomcHt1QMkLZvNTimrGMHZsDw3vb96SK4P2nkTn7DZHfBiIwmISw1/82X/LN19/Rd1KlBY8efoxX/76a84WL3j+9JT53PDv/9M7VqsZNx/e0TU70tmKZ+cX/GJzwEpDbgM3d98QfEVZPKFp7nm43XM49kTZMV/mVMk5YTzhcH9FfjphlP1Ysrvz5MtA09Qsl0tOyxP+4atf8+IHF6TpCKHm0A2kquHm/gMqK0lOT6D9aKK50U4+Fm8fs64kggDi26bGVHhIOU207eBJ9ATPzIs5q/klRDdRxGJEKSBMuU6Cb8EqU3QGMZAXKWWVUpaG+bziWLe/o24SHwm400rgrOfu9sBsnn4HtAnB8/2qb6IPTvem0RolKkRYYVLDxx9/xL7+gLOCzJzzw8/+jF/+9v+ka4+AhnPNPK1QeioyvQ8kKYSHBqRg233gl1/+B2ynGWNNkRuSNGU2W+C5wg0zYhhwYY8LNfPlC4T0HA4PDENP1zq8UyxXM+YrSTmPXL1+T98nFNmCNEno3dc07Q7UKXpxxrw6R+uvGJ3Fh0CWZzw/X3LcHHl4+EDbN0htyNMFlWlwXYsHnNIUheThrsPZDZ3t2EmJiiM3H3YsqzWXl08xZc83rz+wXM7IzIoiOWe73zB0lpcvXnGyPqWpm2lz23myoiA7rfhw94bx6KnKkiyBcWy5fv+aMJTsj0eOXUdZRr785pcsFyPDGLGj4HTxBFs7ynzBrDIkRUpfv2d3X6NkydnpAmUtKQOH0bHvevCW5y8/YrOfJr4mScjzjK++kHz28WfEYDnWG8Zuw9mywGhDZQyZFtyMH7i5ypmfzFifnJImGi0Me6nw40jvHELByeIprz66IC8SBjvy4fqaz3/8hyT1Dnt1y9XVDaIN5KlhG+HgofrBK07XZ9jB0jUdTkiScsHZ0xlO7rCy5uWnLzgONffdnjzNOL8458VPn7LbPIAcEKZHKM3Dbstvfr1HRUeRK/Ynr3l39XMuXyacnyfMZ4K2O/DZZz3dbk63XTEeKvpgOPjdBKtK1whzwXJR8ObNP9DWe4ILDMqiLl7gRELTwP1mR5TfUD/spiZwnpAlC6ILzFcVXd+wb245P/sUpVv2D2+xrqeqUhIdyQtP8CPj6FFKopVk6LtH0q/m/v6ew77m8tmKojRYD0onJGkx3dXREWNP3x+wIxhl8c6iVYmUNdpInBvYHR44PXlKEJH6eMvQ3TO263+yrvm98Fz9L//r//xvn386x48JVfqM8/WPOFm94jdffsHXb34N+sBycY5MWg7NNcfmAaMV3hvqg2N3L9g9CIau5+52i4yKRKXIxy+37Syb3TRu3RxukSad9kmmJ+gtX73+EjdInB1om5rN5kBRlTx/9gpre25u37DZv6PeBcbB4uyUo3F99UCRn5LmJUmWEkm53zbUbUMQHUIMJGrFcnHGdneg7/YkoufJ6hzr9vzm9Te8v96QphnVcqS78zT7lvp44GG7Y4iW1dkJJikYB8knH/0U13nyTLNazjg/ecr//X/9gvPlD/Au0PZHRifp6qmD7npFe0iINuLGSBSSiGB0DqNypAJvBX5QuEGyrw8cDg390CL1yPpkjRLf4r4DkZ4kZzL5yoDSEa1T0nlHwOLHyHBQXL/fUS00noEgHIvFCi1GItB0R/q+oyjmRBHpu55hmLoH1TzHOofAkxhBlqe0fYOPA1U1Yz47Yeg1xJ4kGThZ57x69TH4c4IYqRaSfAb1sSUxJUWWImXA+YYoptBa5wNap8zLJR+uvmC9WkyLeZ7xzdcfGPsOkwSyIgEETeNYnhgOh4aHhw1D2/LVl79leZayPr0gSWb84ldfTPKP6DnULZtDSzEvSJWmnC+4vHzOjz75CV//dkN1cqSaJcRg+PqbLcMYOCmXqKgZho71mSDEktRoYhzY7m/55OM/4e3rr/j6zRsOhxphe05mFyR5wW7f8O76mo//8Dn5ck6zj/StJYotx3aLkp4iLUlMjswHTDpjf7jD2h6tJfksZ3UqqeZgTGToHMe6w/nAvm7ZbGua/kgMa6wAFwUCw/rkhBcvnjPYwGZ35PbhwGx2ympxxqw6oSxXSCPZ7ne0w5EQuymkWuW4ANZOEAitBEKmCKnxYcDajhggzSRaayQG5wRROO4ettgYGWNg3xyoijlFOkdGSXQOJR3OR7y1+NHixgEZ4e7fdKQ/j6S/AaVLNvtbirTCaDUBPw4jOpumSipUFOECZyG4KdNO8C0wQjx2wx7lSDGihHrMlpqgLCE4knTqkkkt6fqRv//wnwkux6gcpRSbmwNGGkRMpulkJtneO6I4YMcR/CRPyJOCpTmj1Dkywu3ugM5mED1pnDHnI/b17+RO359SfXd8r9CKj/5PKRSL+DlfvvuCfhjI04Tz8zW3t285vzyBWHB/ExiHMIVqS0UmDFU247PPf8IXf/+GftwT5JG2aXn6dIXvFmR6xenJCUJ4Xr16xW57pGscs/KMefYpRVEQosV7ix0MaQbPzl9S5iVd25BQMa/Oma+eIpOUw+ENXXPPfLXkcDxye3uNiA/Mz2Ycj3bKXpIZi+KcJ+tPMFoiECi54NnF58DIy2ev+PzlH3JsDlxcvCAoSzvW7Oua9hixtgW6iQwqKt68/SVSJChZIql42Nzz2aef8vTykrJIGfuW7Z1hPluzebijb1uWszU/++N/QZ5nJEmOtQLnCv7hl3/N2AnybMFquSRJI/P8B8yLZ8yKc5QQfPrD57y7O7K7b/B9AAPXV2+YZSnQcehuydMVT5Zr0mwGSqOM5/r2C56e/ZjQaer7I+cnK45tz3rxjKcXn/Lyo8/RsuTDu1sStWAxO6MsAyrZ8OHdPYftSN8ZurDh4eo9qVacnlyyXDzlr//2f2PfQ3FakC407WHP+fkL/vIvfsasMDSHLfv7hJ/+6U+xznF3d8Pd1Q1/9Pkr7NgxHlu6ZiDmBVKdIWWGjCN9d8eX765YFVPchkkUi4Xml7/5mmp2BtJjio7P/jyj+3BCDBahAgjB/V2P9+oRlS4e5YDqERoxRX0rLbk4u0SSkZkKLQ1XH7a8/uoGEI9TIPX46m8bElM8gRBw+XTG+rQgzw13Nzv2u4apePvexAx+l18VIlII9vtuysFS359iTZ8ppcCNgfmy4tWPSy4+UvzN3/w16/U5q+U5s2qN1gnOjbx9+5rUVBTFmtE6uk3NX/3ot8xvCj5+e8Hnf/ARdS24WHzMevYcLUvuH36DCCl3Nw3j6PjJn7zk9Zu/I01TpJqCj30IdOOGvq9x4YjJPPNygohsd9fc3d1w/aHm2ZNPScwky49EdvUHjnWL0Zq8gCwfub3dItVIkhrGseer3/wSHRYk+ZInz17x6uOP8a7h+upL+mGgH49Yt+Hi5CV3/YH3D2/56t1v+fUvfsXPfvjP+Yu/+AlnL+Z04sDYWO5urxHRoGJKmZ3wX/3pv2I2yzAqQQLrdcnZvKI/TBEwdddwffcbTlfnFEVFkmQolSKkxfotEKmKnPPzGe/efcnmMLCtB7ohoql499s3bMYH7tsNH95veP365yxP5kSpiRjO1ue8/uoLwJJnCdVyyk9FQhA9Y9wRZc1yccnZ2Zq2e+D27g1v375nuV6hE0M5W1HNzrnfDixOLlmfLRmd5e//7kvWxYLmaMmyyX/bHB1PLk7YbzZcX13z4eo9V7fvSJITvJsk55rAfdsSx0iSzMnKNf2xZ3v/DqNTFmXB2aIkA549f8n1zYbNpma+KDgeNuSZxMhpGiqF593bOzabI86mpMmKsgosylO0UrSd5cP7PR9/tmKzf83d3QM3Nx1NG7i9uiX2Fzy9+Jyf/fRndHc1SnQcDlt2+xbrTvnZP/8j9k2N9SkxVARGbg43iGQgL6AsFEPv+NEfnJNnmro+8s2bL1kt5pxdPuPs7AXL+Tnt8MDD7jWZmTMvFyRGcv12x2o2p6wKEJF3776hbQVpKfCxo+k2mERz+eyS8/OnxCC4vr1mNk85OZ2RZhCFo20CIl4yW04xLrt9R4yCpFBUVclsljOfz/A2I8sc1t3g7D2ZTvl3/+4Xv99Ai//xf/of/u35Uz8tiskSYyJfv3lN8FCUFeU85f7+wGC3RAfRpxwPgrbxeBsZe8HYRnxoKfMVLkaaoWN/PDJ0KaPvAYcQHucGwihww8D+cORheyRRJSeLNWmSI8mJrkRQIQXsdjsOux4RLvjR5z+k71u0EsyqiqY5otIe63ruH/b86hdvaLo78nlP3zXsNi1u9EQBNigGaxnGntPZM0JfkhaerIiMjefmwyTDGGxgf3Ds7+rJbzU46v1IvXF0xx6tHfvjHU23R6eKJ8+fUrc1+8MWO/YUsxwRDG1taWpL39ppQienPAQhDVk15/52jxARN/oJD+tG5vOMYZw6dFprbK9BRLIcikpS5Dmnpyco47F2oG0HjMmpzg0KwyyreHJ2xouXK/KloPctTdcw9iP26LjbbAgxkBU5w+Bo2iMhgvORpulwvp3kLWaOCAWbhw3KOKQGoST9YHnz9Q3BS87OEhbzOcIvcb4lCkc5h3w25TAcDyN5MsfoHK00xUIhZYXUESkiBI0WgfY48nBfs9k2+CgY4h3Od8zKgsuzCxbZivvbD7jBIpRF5yObQ83HH3+EiAlNY+lHR64N83mCyVLQKZ11uH4kNWuEL2g3DXXzAW9rCp2Sm5L2qKlmM56tF5R5hpSGECQmTzAqYewtm+2e88tTbt+9RyWS1emStKxI8pxt3eJjZLWcMUsueP/1NSoqyrIizVcIFbhYP+fi4hmnlxcsL8/5+u1vib1BhIBRDa8+eYE2C9p2mEb8QyBRS5TSk6+gH4g6kpcCoyMyKPxgiAM87G7YNw3CZJw+fcXy7AwnBSFRiFSjctg1O5quJgRLaiTdfmSeaSpj0FJwHLspeNv2eDGis0g+Szl2O3RaUFRz5osF1vV4oZFpAloRRKDetUinkUiUjPTjhsGDDFNxMi/njL3n/r9vMX8nSb9IyPKCtrlDyIh3U6iokB3WtbSNw4QlJ9lHEDV20Ezd8UkGKIWc+tLfygFjREr9XXGFBO8lUgWkcQRGrLfch1+TpQbJRCLLE0ff9nRtj3MgZcn99goV1xRZQVloou0xUTG0O+5vr3j37gPKTE0SPwq0m5O7c7rD1C3/x4XV7zruv5tcxTh5MxEeKSM33RdIZdFaMNiek9MZfRsZG0WiK56+mJFkEiMTlvM1r16+4nAc+PHnP6YssynvZuwJQaJkztMXZ/z4D1+SpSnv3r4jyQTLdUZWOnys2e0ONE2NtZbElJydPKdrLEbOuVj/AE0kpAMu7ui6W/aHDW0rcbamSJbMqxds95bt9hYVNCJEbDcw1gHtS+w4xWQkxZqbzR3bumGwnigFJ+fnBBzzakWRLJAuYTx2PDm/RAqLG/fYYY/v5xPsR4hJXjoc6ZoDx83IcTfdH3mVU2YlWZKxnJ2wqE54eNiy2TxwbI7UzYHb+7f88Q9/zPPLp2RJwsPDhvdvt/zqF1eUsznnTy6Zzc84f/Ix/bGlKjIuLy54dv6Mpm5QUrCrN1xtr1h+fI6U54gowQ303ZGqWnC2+BMSfU6QOVe7K3Sqabuaut3QdluavUELx+nqhMXsBD8afv73vyLEgRBHuu5Ad7hjngqEG4ijJ3rF8nzNsix5+uyC2TLnsH/D2ckfcn/9NX3dclqdc3F6zrZ+PzWchkBZrDk9fUI3NgQXGK3nMDYIsef56oxZkWBFx354z/nJS568PGO5zBmbhvvunidnn0L0DLbGJyNRbsh5gQwFo+3Y7x12/FaS+1jkMPkypZQgJKMNFJWhLBV5IQgM/Jefv6ZpHVKp6XVREkUEEZBymlx5F5nPU179YEU1U4y2J8TA3W0ztVTk9Bvld/fY5MPy4VGaPnhi/Mf34O+ohtZ6zi/WzM5b0sWBk+Urzk7XbB42HPZHvBVAz/F4T5ZIqqJgOTuhjQ/89Y++oXzrufjF5LnsYkdh5lRFwmxmOWxuybOK1bpgNlfUzQZwNO1A2x4Zhp4sWTK6Bp0YEAOj3RBDpB+mmI2qXLNeveT91a8YO4hhinYIUWAHMGoCRh3rkdF2SJGwWi9YL3JyEVCiJC1SpBFYO1LXNboQtIPHiUg2q6iyGSpf8eNPf8h//ef/kn/93/wbXrx4hUwiWSI4XRQ8e36OjHqS10rBerVGCc/9/VucHQFB8JKbr96zns0JsmPbfcCHgBYJ+7pmsB1FGWnaK7ZXDhc9Qfbsdns+e/5H9EdLlpecnJ9xs30gKs+x2YJ3nF+e8PTlOd5pgjVkpuLJs+d8dfVz2q4jSStOzi5Znirurlu0mbFYnHFxcYkPFiMVu92G+tCQZmv+/C//lM12N8HS7MDD7h1np89JpEZ6SKRmt7/j4uwJIg60x3uClxhTkpUlxbygLBKi9Vw/PKBiSpFWFMuMKHtWyxXH+p5vXv+Kn//DV/yrf/3fIaTj2G3ZtQ9Uq2d0fUMMgixNMWkgMxm2i4xdT9s07LY9q/UpPh4ZB8vYal69fMHmfiDLS6qq/Bw8kAAAIABJREFUQmvNJ8/+gO5oEWGGEitwC9bFDxhrTXuwHPY9Jsz56MWnnJ1esJzN8L4heoX3DucgoHj64gWnJzOCtRDEZFnpI1Ia3r69Zbttefr0FaeXgv32wNgEjCy4uCjRCiDgYyB4jUkTrG8YbSB4QZLlZEVBP3T48BhmPnScnzydcq76nsRk+NDy/sMN4zhMjZOYUBQFUjukikTpuL5/i48DRb5iVq1ZLhbU9R6lIkZHjBLYpuN//z++/v0GWiRJysX5CwiT6a2XlryUhJBR5CXrVYlt7tnUe/J88s9416BlwEeJIZIZgTIngMALDy7g3EhiFFIqtMkwxkwTHOfwVmN9ZPASWSaMblJ6j72n3rfMpOL9h1ukDI/dpTkhRoqqIgaHMprnLz7lUG+wNtK1lrru6KLCqoDte/q25yB7VKo5OV8REBx3gffv7yeKYBoQUU4+DK8JQyCOEhUMaa5ItcJ23+JhJbv9kX3bsNttkVoj0wJBwtXtDe2xJniLshOVqbcBh0BlGWOMJFJNngsH9a59fBhokI4oPWmRkGSKqmTKt1CC4zgwzyaKoHUjRgVKWwIOo6FMEkoS2HgSoTBSIXEEFfF2ZLSeoQ8Mw0hpAkluUFIhUaTG4DNLYlLG0bLbW5JEURYK6adNqNSStukQiSS2juCmaZZB4a3iWDuG/pq8LCmyCueOjLsO5y15tsQoCL6hGyxepdjhjjxP0CInxpzIjId9Q1EIsjwQUXTtAwBpmhGFYhgHnj57xf1uT9sfcL7m4nLF4bAnYhA6cnJWYLwmyzQYjc4jX3/zDh9HZkVPIo84JcjShMOYQjSoKLDB8uJyRpFJogXrBC7k5FnB0A80Y4eTnu3hAVNogtOoKEnLjBAU2EiuE+Z5wf31PVIFTOkRxjL6yOX5U6wTRD1RIZv7mkO9Z5aeU5UVxmi6Y0vvIoNz2CjBGHz0kwfLKHKZ0tsBI0YCOSYpUGkOwtP2HYNzU7CoaNhvDxzbPVpO+RFOWPwwkKqEIk0ps5LiXCBUIDiBGCxt7HDOI6XH+YCPEakTsmJGkRYYJXFDS9cNUzdJpBhjMJTcb++IyhJEIARHPwa8GCc/hZC4IBhGOfWRoyR4Qdt2QErXDlgtUDLBOY8xEzdIi4mWR9S/y7j5nvHqOzrZdwXLd+zAR3mQR6kJazvNUSa/z2AbjCnRwrDdHidPoZpgG33fsF5fMi8uCb6BMHC2PsOHnn39wBAc3lrGThNEj4uaVAW89o8ZQOp74aXisdP+WFQ9YuOnc5/OMXhITMonZ/+Mq8MXHMc75osVuc6mB5OdOpqJnoKnF6sV82pBiJEwWKK3EAIyavKsYjFbEn1OVWTkWUledMxmp0TZgGinKakpeLjtSVNJNUvwLiB8xv7hik570nhCUcLYt7R2j4sdxUKSjpccD+9IYqTIC1bqlLeHGpNrjMkQQmFHQZopmk7grUcnHdbVKG04dj3Wbnn+/JyyKBDSELxC6I5iMWe2yPGblq5VRN+xOlsThWN0HushTxY0+xbKA7NyxbxcTNCfAN76x8194Fjf0x5vp7Dx4JFJxrNnn3P94S3COUr1nI8uL7lPHtAqEJyjKs/JEsXpSUH0OYnJsDFwuX7Bbn9F0x+YZwtMn6OkIU0UMQZ6byl8yjJdMQiPHRpQ5wTbIVNJwNI1Rwb7jqdn5xRpSvAdb97+ljJJKec5iATvFFlaIaQmyQrSpKCardiOFuE8RhSoCFLO0ann2EWctSjhmFeapmV6lgbHYNX0bPWSYRxw1lOkFVH2eHugc5J2kOSuIKkmz6+3ihjnLMtn5IWhPjqOtaNaamIyYkyCSTNS35Mk7ZTPIwNRfOuAigghH6/uCYpkR0sUkm5oONRH6n2Hkua7wkiIadrsowchp6l0DJSVATEyWEfwgdOTE4y+wbrwnX/q0a31XREVvMPax8+V395nkm9FhI+rBzFGjJFENWJFR6pXbPcbtocNCkWRZSgdKGaGJC9JkgJtI2MniAGSPCOrKvb7BkfDEHoalUySSX3AujmlKtEJ1MMGLU9JVE/wLVI4losThrsd3oIIAm1SlBF4Z/FjoO9HjscRNzryXGG957gfsNawWJ6Smgm8FUKgqiq6bqQ+tthREWWGJ0EEC6NliJ77hz3Pnz8nhhohAvNFjmv2zPKcNIGoRkb5joe7A0WRI3VASotQmmLm2T4M2GFaC0Y3kGY5EkNicmZ5wZhskSbgB4fzsD45J2WOHAekigQ/+csdAuEN+JwqWZHkGS4EhI0kLqCUoDpdUzd7Btfi2RK5QGqNMoGojry7foPJCoLv6cPIrjkQjELnEqnBB0/X9+z3B2bFnGqeE+KS3X7kq9df0bcNPjq8GKiWKV2zoW8lMQakgfp+zzi0SB0xiWZ/2HN5ck7fjDgcURrycsXYdAgRCNGjvcZEw35XU9cdISrOz0/I8gw7FrR9Rug7xnFApQnee/qhYzwekH6G8ikyOAiWUVrOz0uS/IztrqM+ThabeXFBUU6+9dSNJKKiSJ6QmYBSegIgmQUxaXHWcX+9YRdbdJpgck2WFyxCYL/bkCaaqsjJclAmmRqhncU5DzLFhYG2A5OlLE3CYrlC4wj2SIgRa3vqo8T6gWM7MA4RgUQqQfQez4BUGqkTjs0WpQxCqGk9sAPHtpvy6ezIOAaq+ZzCyulaiR6TCNIchkEx2MDoBpbLNYmeU+QFQljqY8MwdhTlDNBYelpR/5N1ze9JcZXx8tmPaNsjfV/jxcBiOWccDVlSUeZr5pXlYa8hGoxO0LoheE+iFCTiUWe9Yn/cogCtwBhBknqyNCNLS4xKGYhAi5QaHRNMnKY5x24gaM3YOerjjqTS3N3fslquqFYlIUDdHIhxSjKMQnFy8oTDYWAcGuzoEUJwbONE2AkDwY2E2FPWW+ankxTCWsFDt58AEdYTpSQiydIM6SyplKhMEY1ABXA2ErzABzg0Da27oa47lEow6Q1D56kPR0QcJ++Fd0ShCJIpsFMrRmuJ0uCDwI6WbmiRWiGkJuIJQpAmCT56sswQw/RwcHbSUPsmMg4Wry2DySdPiZHkKqEICbbuyWYpEsXQWZrYkRrwFrwT2D6QmUhZpjgbsJ3DqIz1coHWmmHsEXLK2ikLiesUdgStNMe6QwRNDBOZbr0+oT/09J3A+p6mq3lepEid0jeRsY8YnVDNVmRpxPmGrm/prMS7LZEZqZZEl9ENitYOaDQaSXsc6YY9Jklo2x7ikevbaz7+4Sdsx5Fx2OHdwOnJGdc31+gkI8tzTtY5wiZIZfBC4P2IGATKaNwjYl5mCcEpoiwRFFOHLgysVhrl+O4hPo6TbKttO5quRSaS3W7LLCvxfcCOnkRr6n5AaItOM6RW3G9vOH++QOcScNjWsl5+xuurd4Smxo6Oze0d1jpi4Umriiorub66xUqHCw6h9JQb13XIKEjTnBTDuD2AsxBLTJqSlwVDGBEuBRsedd739G1P0+1RwmFUxGnIVUGeVWgSpFDMVznHscP2AtyEETdKT/LTACHAaANZVk2b++Cx/Yi3ASkliVCkSiPcJClEOWx0jG7EeoGQU7c0OnBB0Q+OGCNSaPCCrm0Q5PRDj7YWrQTeSaJVpKlD6mmDSPydFBC+Ffp8/3gM7P2+vykCcdJ+K6GmcN8gpw2M7DBGIcXk6by8OEHrHB8c1jY8OfuENKk4Hnpk1JydPqXuHmjdkdQ7SmUYWzNlAYWAjxZ0QGp4hHf+/x7fGfG/PV+mU87ykh/kf0TvDzSHDUVakeqMVEeCHiaYCJ5cF8xnFUWeEUIgNQl1/cBo28fQ4Rmzao63oFTEjhP9c7Fc0g4W63oUKZk+xegteZaRZwn14cBxZxmaEa8URzNQlgoxgO0HvLZUyxllO6feJvhBTEjlpOCtT1GxxFChZAJqICsU+6OY/BTVCL7BmBOsm9QAzsO8quhHh4+RKCXzkyXKKJTIMSwQSnJyMWO7u0NLT4amE9O14jJQScpisYbeEf0kDfd+oEnA2SPBHwh+0vtX1Rl5fsI4vCf6gvPVU2bLjMuLN9Pzw4/kSYEUHXkh0SIj0XNu6hvWq+fsDtfEEFlkK2Z+AUTKNAVl2PeO/4e59+ixZUvT857lwm+b5vjry3R1VbNJojUQJwIh/gYN9Gc05UT6KxpoJgkykAFB077LXH/PPSbdtmGX1SDydBVFsMe9gURiIyeRgVgr1vd+3/u8wc9QAWMsLpRcF5/z4/dfUi82JOk5tomxvacuPkJLQTe1HI43PL24IK8LXNCMk8AocF2gbJYsmgXLuqC7f8DpCjcI/DBC2qKUp6wb2nCgGw/UxlBVDQoYh45p2pP8HCPgQ8THQF2u6KfEaDsSBuczlmaLVIGhnxAxR1JRm0sSEyHa2bPoMkZvyZlJoSaryDI500MfYRYpfehgSSA+Pudx7kSLwDC23N/f4SbmqAIhgPjomZxBMEJIRJpx680iJyTLOM3r//riKXmmCP5DODi/FzDEo3CRIin+Ps/qP7sG01wUhmRxYaSQkcN5RxQjWVaiVCQmj6kNKsuJUTF2I+5x7yrqhsVmS3s8koLDSUs/jDjvGWOPCp5piiAVQhiMXGLUkSBnsE5ZLdCywbtICKAehdYQI5OdsNbi/QQizHRJP3flYtRUVUaZ5yhpEFGhlJiLq66jGySFqqjzmsnd4+M8pXPqBzJ9wfaiQhtPWUruxgcKPdAND5zGW6Z4w+m+5+WzzzGZIYnASl4g5BlETwjQDwaRFdTLCpE0RuSUWU65KvHC4XyAYCiLmibboocTLkxED1qVYI6AQouC7fIp3dQS5DwQ6v0sHmVZSVkvmaY454b2K8rCILRj8mdu3+9m0nIQ+OTYnR8YXUWz0IQw3z81wGQ9PnmqusaYDJdu+ebb31FIjco1WhtW20v6+x4fxUw6FQ7rJs7tgaKuEDJH65FCCw67jjEERJGRF2sWySAkjHZCTzlFVrE/H/Eu0jQrLi+v6dtudiJKA9FgpwEJWJuwo2foHTqKGdihPVF48rVguY2gtgRGHC2Hfcfz6y/Isp7gPSkrGAeHEkuawlAVOf3YokwGlaQ7t5z2R3w4oPOS9UVJUUi0VPTTQC4r8jwj15JpGmerhp8nlkY7MUwtKUSqsiTPC5Q0pClHME8tJTFx9zCi8o7z2RKCpCpzXLAoipnQywwC6YY9y/o5UurZ/y0FXX8GEjFGQgxIUbBYKBIjiYBMAYEnBYGzHuccV9sniLhCychkzxxPB3xwSGUgFSAzkg7/YF3zj6K4SlGwXX7G9ZVGKks37Pnu29dkyiAR7PcHhjhysV2jdAQxslxE3h4nVostvkgMg2WxMfQ+Z2hnJeDioqHvEst1AVHj3Exe8TaQF2uaoiBIiU8Dh3ZHyD0yKkwh5qBTUzF0kZvxHrRH6C03b4/UdcOLF9e8/vE72m7HYX9kGjyCEi0GxoOFlDAmZ33RsNt1vHt/Q24atotnvPxoy+k4MnUD0SqKYkGqFU2hyEVO8pL7/Ynz2ZNnOSF0nLpb+umENJE8ryFJfnj9LdEpcrXEmFkN7vs5d2mxKOmnkbuH21l9rku6c884TCQlWa/XOBuYRhh7gZ88Ph0py5o8y1FSonqHD45KlRTa0PsjTpxY5BqPxiUDBTQxsVku8GPO/buRtjvRXK1wvsOIicWVINcNbd/Su47JOUxw/OqXf8T7+3u8CTz7eEElSpLQFFc53nm+/e6IUA1NuaSsM4pCo6Tk28OR5MUcjknFw36PTT2LqmSzWM0Ga5lRNyXj6OmHjmk6sbwoiRLO9sRw3nE+QbOUoASn8cw3P/zIxdWCq7yiPfX8+N13/Pj6K87ecXKnOURZrnBjmPHZSWGCpilznO4xKuP9jze8/vY1H718xovPn3I+BwhzN/Jv//YbvvjTj+YXgTc8rY8Mp4AVOQTwYuL2/o6scOx3e2IMbC6WhN4y9CMxKZJL3Lxt2flvUYWhMzU72RObSGcF2bSg0Aqjetwk+fGH79FasqzWiFCwWKyYXM/5LFBxRWGWJGGpM0meRoIbGcbTTPyLM+1HRMnZe6SzxHQmacvUjQwnS5nl1HnExoGiySnVlmGw2MlhvCR4yd3xCHpHXiqebS4Z40w09D5iR80Xn11xPOxQqkTKituHeyQtYZIYcnLZcHm5YfY/aOxo2e0fKIuKcfB4P+DjhJElgoy+6xFiIs8jk+vmcbgoIDIf4GxNUoYpRnR0bJoVp9PAVEzk5YgrH3NvUMT5NIaYpWnihxHA3+9eAMiUkBF8nAO0YwjEEAke5CTpp5Gx36P0/ML3bkGSs2cjz1aM/cT+/h1VDettg6kC3a3k4cEhVc7FdoXYGG7vbhGxxcgBrSRZJgju9121/wTLnv4QqzZfb0pQ5AVZNvH02Ya0vOB8GHn2fE1VZgQ/q7+De8/Ty6e0fUsMiY9efUaRl7y9/QuEitRmQfCG9jSQVwM2OE5nsP7IeTqitGezbri6+JjxsORi1VLVBjta/re/+g8M+we++MkLloslWjaQAnW+IDdbpjhw3nUc7r9jubygWawxmcaPA0Vh2TSXaC6wI1wsBmzXktyASjkmXHB//C1eL3j+4mOevnhBrkuUnHg4vWYcLZkq2CwMx/uBKltw+dEVVV2Qlh2/+fpLVquKi+s133/1O2LIMVxSqSvqYoUp7nl/9z1Zdk2QOd9+9yXbZWK72lJOE/04UZFxf3NLTInLqxXPnj6lGy3bzTPcZEEY6qJjkpGH93tWm49YvfyI0nUEV/LyJ5+yuy+5+e7IJ//sije37yiKDYgM392g8wKzUmRqSb4peHV1wd37v2PRGMr6kqurLe+//3OalcJaRwoZv/yjP8Nay+FwICCQRhNEi/eKRf2ETGfc375jVV+xWnzED6//kruH72iWT1hlS8ql4EE5bh8GjCm5vlghfeC43/H+5j1ZUZPbgVTk+CSQ0nG+Hcg/rqgXDStdoJ7Du3cPXDx5iso1u4cd52FHOzmKyvBysyCkA6eHE9l0JA8FwWu0mf0Q8pEqm8QsRv1++HX+PfYTmV4QY8BNBeBJhMe1MYfTJx47yzI+hgdHNts1WvdMU4edLC+fJao6x9qEcxEhHsUJ8QeCy+NSk1JBjHMW5CN58g+LMK01wzBy3PfIjePJpxtGeeCTFxV+grsfHON+R1qCHs6EaaA9Q/HkDpRHS0lZ5JR5yemkyHON0g4fJ053OfVVxs3DCaUznr34hLz0HHZfMQ5HksjY7a8o9AU2JpzvOJ/22Mky9hl1rVhvM+om58///W8ps1dcXT3jk1clgz3z/Xc/Ul5vWK+eUOYLvvrmL0Ck2a8KjNPAn/zyCX/xVz+SPNT1guWyoOtPXF0tiWLi9nhP0VzQhYn29EDbn3G+5dnmksP9t/g4R5+s8iVTP6CLiUxqhNZcXV3x8PA9MXqMzAi+YdQJO3Yk56ii4M333/FPfvWK03HPMHrycsmr5/+E+4f/FWUiZWWQmed3X37J8+cvWSxqhIx0veXbr7/lk09/QlHBw/3v5v/JdXTHFuscq+UGEXNyI/BpJMQz4wir9RXtyZFnOa+e/4z16p79w4FJKOpqxS9/VZKZnq9//S1yLLmqP6HKPybfjGSVpR0OfPPVazJT0Z57druJvFzzZ3/2X7O7/TVKO5QIJCQyboj+TNtNxDALap9+dM3TC4/bGMYQ6CfFl3/9HesrwegG+m6grhq+/up3PLn4govlJ4hl4Lg7sLk0HI6JZD2f/UnkNH3DqXuJSBUXq5z96xsWn2v2RzicFD6scLt3+NFQFws220tW4orD/pas6rBjy+j3XD3/CB+P9H3P0EH3MPLyo+cQPeMgmYJg6t8x2oynT58hZORhf+Dm8EA55Ly8BllHbtuey5VB6p56mVNWhruv95QInFds1pf89Gef8Luv/haZVrOYLQJ5JSmrir5PFIWgrCuyTKPMRHuC3OQ8uWr4+us3lLVic1GSGc3ursWeNDoPZCohjML6E9537NphpmfmNWUpadsOpWaZMtP5P1jX/KPwXP33/8O//u8++dkFr99+xzevf8f72++IwaJVYrQ97+/vyOuCZA3EAiUqVqtLzodAXTe4kDh2HfvuRN4sCMHOB5uUIWSB9SPjOG+y2+uE9xllNbP9T8d7jC6Z+hGjMowuyYs1norzYHEMYEbKsuL9+z0fvfyCul5xOp0J6cT+4UhmMkyWYe1IZUrcFEnREINhGGEYYBwS+JwUMt68fotIE/WiQhYFD4eBw3FgGA/c7x+4uX/gdO4Y+j390DHYflaqZaKuaqYR+s7iJk9dVATnIM2t0hgjeaYZ+p723GLHiSdPrunbnvN5wAdYLJYEH0nREaMDHCGOmHxGDfs05zAlm1FnBqU0SikMhqGfePr8KRLouzNnMfFytZoL4WQxauInn3/G2ze3jL5H54nlquJkB/rOEq1BU5Iv4f58ohs7ooyoQmPKnLJuKMoaKeeOlpcjzo1MznIeJu7eHWknSzecGJ1HmZoXzwpckphszjfYNi/4/GfPOZ8tzkNZahaLESkbTF4hVY2IC1arR/R0mFWUCKyal5AUbTvSdi2LRc3gJqIeKGtYr0rsMGKKkYvLnO3FAqFWHKcz1VJTlhmbzYJP/3jFKbvDFBlllpD+jjgp/sk//xUvP/qYzfU19aphGAJhEnMRozXnvsfZif40+zuIke400LeOompYbraIvOLUnufwZKcINrKocxCRrhvo+5GEnoP/7ESmCzJdzOjoglkV6zwiaZ4/e0IXRo6HB4a+JTcZL158Rt8GrA1454nC0Y1nPIJhnOgOJzZ5hdYZ0miSTCB6ylqzWlySFQZhEiEJVouKpllidIX3ivVFjQ0jw7nFDgM6E3TtieQVUpRoXbFabbDTiBQSKUCKxHJZEaJnnHra84nDw8CiqbF2JDgJoUBrEEIhxByQbYMnpUD33wbkvw3Iv/QYKRmPM+ZbxAwlczwjqAGiohJXbMxHBA/RFwilEPIPxwJnYmBMEUgIKWcEOwEvPEJrhAr46Ikpoo3gXvwaERMqKXKV8eTJE07tLUKf0ZmgUFf4CWIc5vHG0bE/dVTlNS4daJaazUWFt7MSPbmWQq15ufqn9CeDtY+5c/8Z39WHvB75eNiLIfH8xTX1whDMHlG2bC9qYnBcXW+4vlpysa0osg3n9sT2WvDsRclqXXBzc8bkGoTHx4k8q/jlH/1Tsjxh3YnD+S3elqy2awY7eywv1h/Tnt+hVU5h1iwXT/mjn/+cpL5ms1mxXi1Zrku6c8erTy9RqsA7j/MP/NEXf8Lu4Qiioqy2NFnNubNkWUFZVmy3lzx7vqGpn3Nz/55jd0e+UEz+zORHTu2Ru/s77u5ObLbXHM9HjIYnV2tKUdOf9xijyMsMXRZ8/5vf8fXffU27b5lOAdk/oRaS0A/YacQLwRef/YR+mFVXXTh+/qsr2m7guIfjqacbW1ATzz/+lDdvfsSPgSfNNTCyqjTDpLi97fn1X/81//v/8n8jgpmV+vMJdZ74xa9+wcdXH7FtrsnLgmeffsTVi1d8d/s3/Pqrf4s/ZjSrV1w9ueLV9TM+vXzO6XTP5sk146klTYnN8hVPrn/C9fY5QhuUKVmtXvHbv/kdZimIcqIfzoyt4r/6F/8lUnn2+zfc3H7L9TNDN52JoeNis+Kf/rM/Q2QnKl3igyMQ+Pz5TzGt5mK9BqV5dziyva4xfmC9XfLs5ce8ePHPyThxd/gtp+NbprFjFCX785lpVARnKIsFOoskJKM70PbvOXd7EgbZXoETpDSy2+9pDwKp5tG+OSBb/L6QYRZPMm345LMnxCB5/6ajaz0my/++KBJSgvAoo0hxpozmecGnnzeENCHk7E+ZxoGus3Sdw7mI/oBmT/y9YDHTANXjuO1MC5RSwwdBZl59BB/ZXjSk6j2Deo0TkWA84/QD5+Oe6azIqy1jN3F+ODEOgWLznB/e/xV//adHnuyv+Pjba4a+o6iWjJNlshaRYLO+YlFdImQA4XDWk5KjyDRKSZKUFPUzri+XbNYXrNeXNPWGu7s7fvGLP0GLhu4sOB0c19dXPH3yDGcD794+8HB7z6tXL+i6I7uHHV078flnP+H+4TXBVig21GVJEgFBTgo53dmz3x9YrzaM05G265gmSWEW/Oa3f84wncnLksvtSzKxwU0O20u6E3z1zW9593BHPxYItSDPBb/97Z9TqAV1WdNUDUW+JoiSJluS6wVGN5hFgVYLbNdje8c4wk8//Rlt15LpJZlZYHLJof2SH17/yO3NmaFXFNXEy+cvmdzEw/49x/Nbrp+uWS4aTKbRj5aS0/kNdZOzWjesNxfk5oIQJ87HkZQi9TLw7/7dvyc3JbvDOx52b1hVL3n2/ILkJYtVxfLScHPzhqooGN0ISvLixXOeXT1j8vcz4ClqslyxrDZstgukzjm3guXimkJ5ZD4ThrNK0x2/x4gLnNWMY5qtKd0bqqYhzxsyUzEOLYvqgv3uwLk98OzpJX/8x59xd3fHw6Gjtx5TBI43gtu3Z4RMPHlxgbcZiIx3N99zOt1RVzWbVUP0mqZuKLOMux9+ZDrM54eqrBh7i3OG55ucXBkK3XB59QKzGvn+u+95+3ZP1yd+8ctPONy/5Zub77k93YK09Df3yKrioz/+jIuXzzjeHvjFH39CtYYkA21nCb5hGmeRJOI5tSf63nPs3jOFA9a3tOeBly8+5c2bW5x1mCxDSLjcPp8nHBIkn/jVL/4FV1eX3L8/83Dj+OKTP2V0t/gwYTJBVWf0vcVkUOUbJBXD4PnFH39B13oWTc1yuUAg+Z/+x//wj9tzZZ3l0N/PB7e+Y7J7fv6Tn3PYtQyTw+QZP/64Y+hOpJBQUpLlisFOjIeByTnG4PEiMbZ7SB6TGZTOmaaZTicwZHkzb0Zlxvl0nj1ZWjJ0I0On0UTc1GNdyxRyAgFtNM4Hbm/2iFjhjqKpAAAgAElEQVTRDx1FrkE4SIaxTwx0SCExKsdFjckrdBSEmBjGkZA8ZVVDlOwejgxdT9+N7E8JoRWn4UwKlkEmUjIgcoqihCjx0RKJSG0wSWFtIDiPEoqq2mDHkbosUVr+PTlpHEdikqQgSV5CgnEc8WEOTey6HpEM8JgthkOISNNcMEwRO3mcHciSpp066qpgtWxYVdd8/fV32H7Chx4pRqpgcJ3A1J4shyKXnE5H8qJEiYDOQLDAKMuU3VOWkiIraN2eZrHAjwt88JzPA6ej49l1j48QPSQt8MlTN2vKeoFQGW+Hd2wXa+yYE1JA5nA+OTAKkZdYMt7evyFb53gSdd2QyYz73YlmueR0HrAuYpQB4chzwziNDP3E1AZ63RK9m0lHYSJXEjfJ2RBpImoVaJqC8ZzxsJ/JY9vrnPbmllwvuFg85dnVJ7y+fc0xRLblPHJok+Sz/0LQPJl4e/c7dncdRSboJ0eYHCkJQhRM48DpNCLR5MbgosAJOYd37naI85GUFEYtIAlc8DhvyYuG3h4RMaGlZnQS2h0pBHxQOASbVcXt/RvsICBNPIT3NJVmcgkbFEOSOGuxuz1CZ4Q453KlFFiIEi/ijMjOc4RULKuSw7CntSdscGxYkuWKPBlql+Enz89++jGHQ0c7TlgKBt8yxkCSkOea1aqkP04EL/BpQqNoig2pWOPdRPQTMVj6/sxuf0Tr7FFJUiiVMU0RrRR1U1KVJcdzj1Iz0lZERZ6XSDkRfMJ7iRIVJgvkQs5I5+SxDlarApIm1xnaJGyQID8EB38ghc2HqRAe6YAx/oH6nSAJUnBIElpJRg580/8/WM4olWOkQitBN3QkkTM5j0BSLxT/6l/+S37zm3/D/f4BFwL1cs1iWTKkBSGNjNaijeTVq2vijwltFygFWRZ/XzzJOVD474NVHz8fSq6/d4FIGEdPXiass/g4stk0VNWSuihRch4tXWmJKhXNKoHquNl9CbqmqDSlKhCyxIjE9z98R7MeKSsBWjG18/6agiEJyf39a/Jcs3/4gb7fU1dbcrVCqNlnYEPHeX+DG3N6N5JEpKlymsUXiCgpqwyXjozO8vHHr3i/e0FzWbNYLqnKnPb8ADHx0afPiGJDIDK5NRQdUjqIHbvDmb/7zUTb7siNwHZLXC9xvuP+7oYkJIuLp5TZkk8+/jnWSqLX6Gxk1Tzlo4+eUa4WxFyzPw8koefiNUIYK662vyBXA6bfc+73PBwG7n7Y8fzFx6zqFYtiS2p7+rvE5cVHbDeCVZ1Yrp7x6dNrlpfLGZQxwbB7y6RzTPK8uFrA0NLGnOuL52wWC4y95LDviN09h7tILyt2Dw9cXl2RPdMczy3H445Pnr6gP5+QbiS3E/3hwHphuNysuD3ccne649XmBW+++oG7454pjiwu1vzNb37NNK65WF5xeX2J9h1vf/iW6+UrkJK8qbDJstlece4P9G3PtsqoywzdK6bjmTfHe07ue9x4ZpoSLkQm27FQOXW5RiKJ9PjkMZngYTdQNIq8rPnh9Y6mVqQUiGkipYCUEanMY3jwh/gDOUcgACSBEPPa7LqWrpsYhzgDZxDMZMEZZCEeG1CCWThpGknXPzBOAa00ZWk4DTsWy5KH+4muc48wmA/zgR9WlpzHjKT8A7+jnLNzEiA+fHeUZcn181csP88oC83XP3xNkxkW+ZrVxQU/3r3nxx+/5mL7lMvLJzTLmpv3C2RSHPZH3ry549nza9pWkOUZZS4oc8ndrseNdxgNuVEICe1whigpqxdcNgvKZsOXv/kbjK4BgQuWzz//gsNDh1E1F9saVMs4jGiVEUJH8CNXl5fc3tyjVEaW5WR5ZBonhm6iLAuahWS1yrHWUpY5Ic75Qz//+RdsVmu6rsX5CaPguDtxdbnFYwnBcb/bUYiA0RKdralNgSgF9ghltmRTb7i8ahAjlFnNZrkgyxX3D/fk9XL2ziaFzGvqXJMFxTIzVOuCYHJ+ePs3CAFVOWd5te2ZqrymK+7mqFotuL5+yrvbrzBqw3pxRV005KYi+gYRLZmcO1PLF0vevXnL7q7F5A0XWwUysljVaCnZPdyxqJc4q1hvthRl4ub+De/uI8rUZFkiBsdgjxTlZyQ/cjjd8v23t1yUr/j5L37J/cOOu7sz3gV0rhhdIgoweeJu9zXbZYVPlkO/Y78/8POXP2cSEZ88IQW8tWwuf85iUaBNoqgGvv32jsuLa7JsiZYKoeDr774hMLLcVOS+pn840j8YyixDRM+bH3/Dmzd3/MniX7HZrChrD4xEu2C1lrjoeHt7h/U9yXhO/YnUHVm2LZNcovIS7xXj6GmPbxE2Y/vkBRfPZs/i929uITYsc4EsInWTsf3Zc4ZhwHUnjm4gy4/81d/9JegMnc15YFEMVE2NiwOTHbh7OHA8TKwvShbLGq0N3Snyw+t3LNc5QiRc6EhIvvrmS66frChqRX/y7Nu3nPs7Dv07Jge7w8Sv/vSf8e/+w//Fw3GPMZpVecnxdEDriSQiyBNv3n1JXjTE5OmGiHX/cF3zj6K4csHz/uGBafBM44AQnnNv2Z87xsli8oyb9w9MbsRoQ6Y1DIkQHVJCiLNnxCMYbU+uDVppYvRIqVBKk+eGpplNblUNh50jBoHOSqbBIeQ8ppRCxNqIDZAVoJNEBIghotVsOBwziZRyDr2LCmstSkWaumC0YXZqiIQQEZH87L3yihgDk3WoLGNwnulwnpGeaSYZKq3nDR/JFD0JP5NR4jwrboxmHD1KSTKjyU3O1E+QDCkmkvAzl98lUpRobagqgXceISTGqBmkwQyaMOpDZsisiEcvUMKQZCJhKQrJYXJMTjBaTaFzjBS4waJyyWpRk5kcgwYzEE0gBhi9Y3NdgyyQIqHIcHY2KKMgKotOiUJrQq7pBs/pPJDJgtPpxJA5SIophnnGVanZTBw9UTqa5QVdkozOEaNn6C2xkfghMDJRBs8Pb94gpWJRLKlNTTfAepsh4gQhgkpY6zC5QdiJEALRJk7HFqMTk53VwdxUGFGDO2HtSJs6ilLPsATnaFNPqhMmiyjizJZLcN5bZJaTlw1GaSYmvG6x1jPagd6eSBGEkYRpYBwiwzAf9ruhJ8sqpNGMIZCEIAqFD4EULYlIrjfEKPAuEbxgsuBsQsvZgWCtnYlnwSOleYQtSPp2YnKJGKdHstMaGzVuTHgrIc9oe49RgmHs8XZkUVfIJDHZrBY7D2NwyDAXEyImhNA4nzA+ggJVKowVSAlaK7JcQQo4bxBOoZj9HkrlqCwSXEIokAZCnP0sEgFJ4/0cemknP6PRHwsjaye00hSlpiwViTkeIAEiShCKJCYAirKgqipAsVhKiIkQwxw6iiLPDUM/h+kiBVLpubv7AcEuxax0P34+jATNY4KzaEEUQCArNForBu/YjT/S1AWgSQLC42z35Cy5UEQiLp7Z7W+Q2lKUAukFPk4kMVLWGSGAFIbl6oKqyDm3kbGNHPk7pP6ED0Z6IcQHCPTjNc5XH0X6T/xiQ2e5uCxnD1vSNPUKkzsm20MCoQLSBNbbFVIPTL6lH89cri9R2s/ENRIpBg7nB2QuyXKBEjnaJISPaJETrOfu9j0vn33GZnM94/VFYuz3CFkQARcco50QUnLsHtBBozEUeg7Y1JkkhXkPTFJwtblkkiPD1M5TCj4hsGwuVph8zbltGQuJixEhIiE47HSkHxQpOVLQjMOEliUiGRKS6CPuPGDyxGL1DC1XiJhxe/sl+WpDtbqgXuZ4aQlDQEpBZnJIgfNhYLNakV8u0UeJC5ZpjLSnByqzZDKaSRlMpvjym/esu5zlVc3FaslidcW2ECglkU7RVBc8/PDnyKzAp0Dfn/AnzWnS5FWkzhuS1GwWDZVKTO0Dh2EHQtF3E1JAqQVeBAgTcRhIbkI4h3GOl9s1wzChneCyXrPdXHC4vWUcO1RmaLINHUt8SoBltGfe357BCZblEpssths5tgd02cx+0ujZ5MUMStAjwgb80LFv9+RNjlQ5RVaTZTVKCM7ngfV6TV5kJBzOWWKcf0T0EDJyXT4+X7NfMss1iBm/PssF8T9ahx8IfjEF2u5M1zqsjWit/39+yRncPldpEikFi6UmpBHS7EVS0qCkoarzx3fmh07VDFpCzFMiiA/dqg8xDeI/yZjjcXVmWcaiWbGsHe3pQB4NTDlB5MQi4u2AVJGs1OSlQiZPoVYIoShKxXqrKReR9t5TZRlZBqQAwlCWNYqAEAqVVeSZ4rg/EaKGmGMHxzD0FOsKBIx2pCyvOO171quG5bJicongJZNz8z1ZLKirFW/fvOf6yZLFcoExinGwZCpDq4RSnjyvkHIGCsU4EqNls97iXIf3DoEg15KT7alXa9rhQNe1jNPA5aZkGiyFNpRZhVAFdtRsF2uaekNmNNvLpxidUS3mZ2W0HVlV44IlYtDKoJxHRMuqKokq5xwCb+7ek4maXOeYbC7K265hu55FubzI8THS94EnlzWZajjsM8beEdxEUZQ0yyVaRc7ngSKr8a6j71uqasF2u0UKgfeWYRi4vFhxOlmGIeB8ZBoFKEWpSrSYQUd1lXNqdyAjWuYUec7xdIuQn5IXBTrrOJ5a8nIGsjjv0CZwHg50kySpBMnRje8I+TPOu4GmrGmabF47OmecHNJbAhN1s8LkczyIEAKpJVOfMEWJzECOntvbib7r2GQbkodpDCgR6doDQoU5R9M6qDJ0ZokhEPzEFDt0kbM/n8ncwGqTUTUZPvXobElZlGgCLkysG4EuIzY6fvh+xGQbGl2B9qgoKfMVMhlUEERrESrw/t2e1XZLqXO0FEQsowVlAkrP+2VezEAqSYVIihA6ur7l6ZNrpBY4OzH2nmE8EWKOEIbJjtzdv2F0LSH1qEwyhR2ns0DKgrpaU5SKWucc9zORUGhPCoH2bKnqcaaOB88w/sN1zT+K4sqHyOt3D9jeEV1gvcr59vvbObw0WIRI7HY7kjQslwaUZhoszg0orUki4JMlRIW3E5kyxATODhRmSVMbFktDU2vGoSXPHESPIEOZAuF25Eo8UuokWsygh0IKZEqQJGWxIIbE/nhAS0WRlXPwmsiJQUMKRBFw3sNjsvysYAd8cIzT/DpICjbbDed2YBxOEDy5KUAZhMpQZCQnaNsOveghKmKYD3FZZh6xmobMGESUSDTRKWLwCBUxlUEoRbKJXGtqUzLaAaMzstwgpUQKz9iOMzY8JXyYSW3eekxeoKUmoMhLiZ4kdnDsxiPnXctCK0SEqmxoNiWZ0kQLId8xhYGxFyRd8eLjJ2TUxDEydXum3tLrnCkOTLZjVRhyAbHwTHbE9SPLVeB4PBHEAI+J7UVWYUNkOrcE73BpIM8ko8znQs5FSJFgEuPxjELyclvw7etvyLKKZXmiLiqGceIqzKhNpSLeO879wKooiXKGFxilOBwHLrYZhIQbQNUVWXZBJ/dM04jvE1MXsGmiSyNTH9h/3/LT52sKcuw0MLTvGM8964uCXCyQIiMycfO+ZSEfTbh5iR1P6IXEdo5pHDgfHeWiwUUPySOin7Ms9JzenmmDVJLOnimyuaNmUXPR1I8kJCbTCMA6B0kTU5yLGCmYRo+3fu5WhBGlPOMwF5HT2eKjIDMVycMwTUzTiMCzKAtCsGxKQT953p5HHAkbzhASmTBopfDWMqGReYJCoEfFfn/ARYmPnhAdtVwxOUeKEqkCU9CQSZAOVUp0qeimlofjHetmi0DjXMK4ucAgzWZr7z19f6asCqoqRxsxo1I1s08qapLXTL4jkWiWFevNgv48B58G7wnOEnyau9rK8DB0GOUJS4VQmoQnPR64PowDwiMh8EPhEsIjll1AkAiTKJq5AJT9TPuq8hXD4PDREoQnRzNMPVnWIKSnt+/5P/7P/5mnzxVFlZMbyanbsagyilKh5JJcNbx48gX9+chmNXLPW26H/5cr/RL4MAL4eIHi9wCL/8gdlj4kAiW6dsDoFZXJ8aJi0TzhPPyGwz4QYkIXgmaRsb14xWR3DNYhpOVi+5R+vGGy3fwcORBKM44S7wVSZgg9UOQS73L6MXDYHXn5ZMHHH3+GkJauf+Dd8IYs287xEAmULtEGduc3lGJFqRYgwLkBrQ3KVCil2e9bnj7Z8NX7rzjuzkg019uPqCpJYXK0NFgh2dYlx9NATD3EkRQHyiKgRIaIGqJge7Vg/6BZmVm1zSbobYdsNlxdvmRRrhimB1Rd0dpE6gYyMyB1QZZrMpVhx4Hd/VuWdc1q+QLvGrpzgV4GJn/H4c0dx6IibC+5fPqMb757S3HX8+zlhi++KFlcbZDDgfP9iaFN6E+uud/vKaqMYRi4fXfDsqqYBqjqGl3kdOnM8xefUCnPqTvRn0fWzz7hZn9HEXtyCds8x08ntA+4IZKCZFkuqJuMv/jtbyiU5tnTz1CLBYd337EoBVmWUfiCnz39JbfjDVNsOXRHbvaeL57/lJeXLzj1O46nO3b9ib59wyaDWgmCzpisoC4VpS9wYwlhIssT3hvqZstm85S+O/L1zd+wXm+pqiXjdOB4mjuM09QSp4nCbKmyGhshJYXWNU09QjrNBRBzYfTBPyik+INnPTH0A13nsTZi8jkI/QPYZRYg5i4XIqGNZHNRIeWIyM0MvyoqjNaEKB5Hw8Sjv2vO1xIIhHycEPwDiih8yN/6vffxw9+V1CgUYUrcvu1Yl2v6SdB2gd7tMBKur6+om4yExdqBPF8ihOLyuuKzny6wYUfWCcqmRCIYzpE8u+Dq+vk8Zm0dRblkeZnRtTOG/RDOuEdw0HJVkkg4f8ROjizLWG8WrFYVt7d7irxgsB0mK1g02znEHsNquWS1XhF8oD9HNss1EQvJIURGUQba7oR13aO/L3E4PBDC45rMzAwMiAVukoydox8nzPPI/fsjXs+TJEJrgpc0dUNdV4TUUy5KsrwG7RjHdiZlejd3bIiEmCj9CC7RbC8IRrJ/2NP1gbIuIGhEMqyXK+5vO7YXa3QmiSlyPJ6I/oK6WKOEoT+dcdYS40BT16xXS1Jqef1tz8XVJVmhubm/YRhGFs2ayR5xbhY0Li+WnNofuLk9YK1judyyulwzjg7hAiqzbFYb3rz+kUVzxWp5zdPNFX/91/+Gtzc38/vicX+zqWWaPEWpWSwN2sCpbylzQ05GpicG8R37Y82y+piL9YpxGunGA6fuTEoWrQWXFy8I8Uw3HRFo1qyo6hWJCWFHZOzpzy2TOGKToIgrinTJ88uC4/4GlRmkVEQbYSmJqaPINIVWHA4DuSzYHSNNYbj4ZMsXTzQ/fPUaVRqabcP2ckX35oYs3IN0DNJyjBNhUeEzRfCK2CcGP6F1gRQCgSPKgig6ssrPeP8wixan85FmKTFG09QLykoSfUF0GcGCcw6lEkVRYzLJpASn/ZksFzjn8TYxjj2ntkMZjdKgjQf9hr/8q+9YL15x9eQZzSLR7g6UWUVVlCRl8dYwdDnndCZ7tFqNk/8H65p/HMWVjxz2I95OCCwqS/x4d4fOZtVbiYzFYkE3tgQ3MoZEexxZb3O63pNQSLVAOEMlVpS6RKgE8UzTNCwWCpIn+UCzyPFuoFkYpCpQJud0hvVqgfCK5Gc1tqhzht4x9XZOR68kdw/3mLwgK2ukqujaARhACkKC/eEAqcQ5T24MJs/RpsYUJVOIBBlBJ0YRiIzUTfb4sg9kmWcKEaUDpjRUi5zBJ1ycyIzBmIr9/khVlBgz3zc7RJwLPLmqSUSsm/O8Rj93+wB8sNR1Td85MpORFxlZLhHijFYJkRQp5kDi+skGaz39GGiHyE7seb6+pu0i3RDwMVKtE68+v6ZaLIki4+ahpYlHSIEEBGkoqtmYKFwGNuHSyOqi4f2711g/kGuFiGsurwPTKJEUCNZ4BnwLu2NHCJKrqxVNJui6ETdFogOP4v52R3AaHwKDm9ByjY6BaB+IPhDqa4Zxjy41D+M73hx7siyn/3XPojQoKZl84mx3HF1FdBFNzuWzC44/HCizNdI6bBzRosbT4mOiqa744tVntP2Bd3dfQpawTvHl37Z88d9c0U5ndOjB5UTvscHy/uFHlKwwSrPkBYfves7TgTGdqBaKaSxIfY4SkrKxTOOIkpJhHHDWo4oGnQna88DlZk1T1pzanmJVMpzP2LEn2IHBepaLmtViiSDycH9HXazAnin+P+bea0e29EzTe36z/AqbPretKu4qmmaTajMaDGakmRPpeHQBupaBMJIuRdBNCBIGEFrtyGk2yWLZXdukD7/873Swclc1BWiOO4A8yYxErgzE+uMz7/u8cUyWRXRVQ54HfCtw5ERxTMDxfrtmaC04iWkrTk/PWcyPybRDBwNmoI+hGTz3h4qH3Ypl8pR1NxBpR6oVpcywiaJtd0irCELR1ZL0eMrq5j1N3xPHKfPTlKioMcMYKu093D88MJ9PGZzHVC31oaEdepaxQnjN4DpkZwjejpumAEJEKA2DqfGVR+sYY3v6fiBJEoRwBGmZzhKuxXi4egFH5wuaZsWiLPA2wxrNdDplvX3A2gHHMG4xegMyRQTxgQPBD0b2UXr3vcUpBJzzWO+ZTRKUlljj0XHEJ599yup2g3EerWKiKKVpG9K4BJ/R9x1GbliWH3F9UzGdJUynBbMyI84GpCqZlaccTU/QDHz91edYAmmckqhnTOyEu5sd/o+VgONlEX6YpD9COVSQKClo247Nds0w7VFFhBQpd3cH6nqU5eo24ejkI6ROOZl9ytL1rLc3DMOAcxrnJMGDUppf/FevuL3a0rYHAjV9t+VwqFnOLjg5njDJMo5Ojrm6eUvXbwihZzKbs5w9ox3e4XwNxChR8ObmLck0R0hoasPpxRHCP2PXvGNXfcf+WvBv//Lfota/ZXakODt+xt31lk+e/oLN9p7V9T2HQ83z84+5vjK0jcMHySSaM41LHlYbus4Q6YRykqMTgRsCMkhm+YRPXj3j29t3bHf/SL1POT1TbA4NppMcejCmoQsrfvknv0CIjvu7A1+8fkdnDZF4y9F8xtm84JuvvmNIPIlY0q0bfvvtf+bsheSTP3vGvJwzSRNcd+Dvfv1/8i9+9JeI4NkcXuPu13zz5RVGWYSUxFJxNK3JlzNa4dkMFdV6z6///ks+ev6Sn736iL/89MfUcsv6/p43X14hvebkyXM++eQTNnaHjjXCeESA1ipevfxTgnL0oebt2y95cpxx/vKcpvP84XfXvHr5kuXkGfuqo9o3eDPws49/SahWHG63mG3MJ6+e8u3nXxEtzkHCff2eh7/e8u//+/8Rnz9ghj8w2Iqr796xP+zIshua0w3PX/wUow9sDzvyfMZscczbN1dU9Y4g9igNRZyMeQHOI4IgjmKWiyOE3I8qi0fPk5Tye+R5CB5PIIoLIj1FigbnWyLhQHzIqPqQVzXeLEpBUcS8eP6Ct2874jhhNp2zXC65uX6LDxDHiihWWDt+vkk1xh547x59VR8aux+kwx/OiA/XKBHUVc3N3Xti/S1Bar67eQNygkcjheSnH31Mt6/oG5Ai5+jpJd/eviMAVb3i5v5zgnrgF//yFULuqbcx1ixoNwfWq4rBtATpUQxsNwOTIn+k/o4S3tkisLkLCAHzxSlB9EymJf1Qc3dfs9m0nF0kHOsT6spw2O5xVvCv/tW/xoWaqjmw3zU8PX2KLY5ohw5kRCyn3N49kCRz4migFtf87nd/z5/+7N+wO2zGgaiJ2Lc3bBrPdB5x8WTG5iEwyRI2PjDJAx+9UCzOj/mLn1voX6PCG5SMuX048HBzyX7oaF2Nw7N+aGi7hqAFk4Xkz37553z+d79l1Xb0bRgLf1+y3x94s95CSPjxn7zk/GnE2zdvaJtAFE356JMLlPf0bYsZ1ghds1geURQxu+2Br75YsVgsODrPkFGFMA1prIgj2FdX3N9fE+uYTz/+CaY3uLAnyQxIz2rzltnyJecXBVW14ub2jrpWXF6+ZBgseME8e8qPXzX8+jdfMp/Puby85PJnM/7z57/isJOcHB8zKY/Y7SyJ1lSrHX0XmKrPODys+MWf/pJUZFTrlvdfDJz+yR27uxpJQZ6c8O79OyaTBWk6w5iBL7/6mosnc26utkgkaaR4fnkC8Qu6zpIlBSdHR6hs4Pq7mjSLSeKI4CVbVohmQChBQkQeJuRNixsSrJxx08zJt7/jeP4at3+DWU+4fv2E45d/DqvXNHfv2FRX7OtrfnwMK57iykuSs2O+/epzcANXV3tEUJw/+RE//3fHXH3zBQ/3D2ONe/wxT57N2R6+Y7Mb6OsJzm94+exPSZJ0VKGEFCFj9ts9bVvTdDtCcORFhB00AovSHbmeMliPCCnBNdTtlyhxxiRPkUGwfdjzcNWzPJ3w9vo7uqGhmGas1jfEuaHMj8jTI47n+X+xr/lnAbT4n//X//gfTp+fkGUReRmR5uOLJNB4pzCDoqlhOk2xxtO3jjTLUImkcz328WC13pEkAhUFpAoIoei6FoJgu6nZ7g5EsaNtUqwp6BtJsx1QIiKLJVmsUMrTtjU3tyukKojiFCklq4c7lJg+Yl3HRma76RFRjwsdMpJMJ8cIocdcrThGRhFoTTsMZPmcNM+Jk4gkmaH7Dus7vPSkyRTh9FiAxRodRdjaIUxPHI26cGcd8+mcKEofpWcCKTPieMznkBJ0rJFa0To7SmP6FkRAigitU5q2HhHUUUnV1GOYJAEhFUorjGtJ0wwlY5wZi5vJXHN5XnJynNM7Sz6fUJYLVrst377/hn11TxEs7RCIkxlnF5f4wfBuc0fdHLDugBUHbncddeMooiXHxQvK7Blv39/zcGdYryybnWU73BPpnElZMClzfCsxjcB1gb51NI2hrTuGdpQcGNvTDy31ukZEEhk0mgRvAyak4CLabmCwlvnsKaaH3WZ8bZzYomJJ9ohZHfoRUT+bz5kXJcb2dH3NycmS27t3ozm0iFaZSEUAACAASURBVHh6Oeftmz3nR5o/+ckpv/z5Ez799IzNw4p6aymSnIuLYyJ1zGq/5dPPzvn41RHFPMJiSTNPohNiUaJshKVjCJDkY0L73e2GwT5ORByY1pPEEftDR/ACbyVDDXmUEcIIXGl9h4gyXA/SO3AdhJYsmjLJS5IkI9IRRZ5h7Y4oLkmyKXFaEglIh4SjdMrFbMHzixMuLifoaGxk2t5ws9nz9e0dG+uoBjuCFQZDFyyD9xjrCGagqjaYwTIYT997WmfIUpCiI5Ie4Tzrh3vyiQYj8W3A9y3zcsbQOLCSSMSkOqOvW4QLaCmYlCl1uyVKxmyoKI6ZzBOKfIb3EQKQCibFOfOTnN70CKGYTuZ0fc36f+gofh+RfAnd0FC3gUSkDIPh0Oy539whlGa5zLmcP2GpP6FrEqRWj5teOW6qxOit8N7hnP0+N+qD0V0Dk1wwKxO0UHg6KvUdSa6RkSPNNWmWYLynafZk6YwkzoijiNOzC+p2j3MCO0DXDqTRCYvZCcJLdps9m/sHnj19ipeGySzj008/Zf3lhO2mGwlojxP2H4o88UfT9lFWFQheYG0gKzxxWCCGGe8Pv0brFKUFUaTIkoTF7BSlR/x137Uc9ls+eXWJIMK5kah4unyCNxnTcsFkkpGmgiK94ORshlYly8VT/uLP/w33my/p7HuEbEgzzdHygt26QogwDtEiT3VoSFPNcn5MmU0QBLbNljwvaLueQ92jUktZTHl4aJAipShz1rs7svycqq1puo7BSLyJmE6OSOIpSTTh4uScv/t//oH55IzjkxOm84ymH3jYvKFt9wThieYlxfFA17ZEcUI+mTKbTzhfTJguZkRlRIgdwW+5enNHmWZMi5K+HVByQVVZinzKYnGMd5JvfvNbsiJjtpgxWx5z9c6yPBEoelxf460jWSy5vv2W4BuOZwuMSYkyzfOLJ/zo44/59KefER2dsdkdeHJ2xryc8/s/XJEKT+ocWjhCBHo+49Vnf0oxXZBMpsg0JVsWLMqIMhWIUHFz8wW/+8Nrls9/yvzyObOTE2bpklIUdE3EYQO2jbhYTLh6f4XtIhI1pYgUR8cLmsoyhAjSjH3XcfXVWwZnqEzHfhiQmwbfNnz93e/4+uYbKixCLgi6x2Houx57iJkvz2naG6ruPVp78ixnOlkymxyTRhO2DztePHvJxUcJRZHQb2O2uz2ru/4HQqCQ3w83Rhm+IODxHppm4FANWAdKKUDhQyAIUEIBcgxkDh7BQAg7ZtM5PpjvgS+bVc90lrFdj0Z59+j1DngQj2fCB1Ko/CBV/NBUhccAYUkIYI3n1acvOXrpUfMD796sePbRK+bTnDSRIByDgRcnFxR5RBx7pOhQsuSvfvIFT5oT/mT9EzBL7q8CmX6BbY/Z3HnqekNZzJCJR8bghQZVUe0bCJosy8mnkrPlK7zcMNg76mbHxfEvaNuBfqjZ7zdcvVvTNYIkjSBYnOvBO7abB5bHM4piytBrXr/5CiE2EHpMb7m/a5kdSa5vvma9fsBbyUfPPyUrYnbblupRodMZg5ZHTPLlmH8WHEoZdvcdu7uGm2/X3P6+5/6mBZliRc6+yzCRY1uv8b4luJ5qu+bs9Iw8j0GOmURJNmPTHOhloPM9g9sShQPfXb1HaMHypGR5ErF5aHj29FMmZcZg1iihOTqK8X2O8OW4nQuG49MTIFBVLa+/2fLJq5c0rcVYj9YQJ5K+jxEhYT455uL0CVlZgS+J1BFZesz52RFX91d443F2AAa2u91Iak1Tjo+WPL044asvvmB5dIl3nt1mDzbB645JfkKWxJhhw/ubr7m/viUIQ14kLMopX33+nt7A9fsV9/dbzs5LtvYLql1GGh8zX8y5uf8C00NVb6mrBtNppnNF1wqcd/T9gZu3b0iykuXRkizLGQaPyjJef/UdiYw4mk44X+b8/m++4Nn8GTpAN9Tki6dcvnzGy0+WHB1l2K7lyzcNdbjAlBf0ecmm2SE83L37LU1jQV0g0x9zdfUF9dsH9ldXXN+9x0YXnJ4fg7J05sB2dYfrMrq6wdgtTm5Is4LgUppmGGNcfMyTszPa+oreHhhMQ9t01M2apm1AOKI4cGi/4/bmARU5pARJTDHL2Fd7ppMlZX7MbmUx9SkvPzpj6Dz3Nx0vP3lKzw1SO4TSDL2mmEg26x2anEmx4MnFkv/9f/tP/7yBFkpKYj06fxASYy2mN/ggxvDEQjNEks60SKlISk2axsgowfc9lgGExztP5wTBRCgHwQf63iDpGYaA85KHVYMdBrTyxDIlVhohAlXtqVyLCAEpEvDdKB0KYyq4ICfLYqzr8c4SpCBIQ5KOxmcpozG7QHlMD8Y5hLB4BpI8RukIAngzYPoK0wfiJEXHCiU1telwKsJ6SWRhlk3Y7gxpUeCCpfU1SvXsNpYxxFeRJSnlZIIZDP3QYrsxNM7aQJzmY2AigTjJML0FDNZbdtXoXxvsQBJFBCXoB8NERzRthXeeEPpRJncY8GGPRNB1HftOkDQtwzAgPcQ6ZbXpsJEiF44ob1EyY3dXc3BbZrnk4jJluPcQSgYvWLdr6ptb+m6PEBE+CFxw+DbBekk60Ygg2G0bIh1TZBHE4/sjUhM8ZvQrAZmM0PMIFT3qNHyg8y1NvyeNjijSEkvM6n5NkeTEmUChwMckMicMAeECSozGydksxveB4XHTmE0FWsZcnh3z4vmMVz8p2LYHPn6VIRQMbqBYtjyf5tg6p0impFmB360R8cDbm/dc399QH8bwvUlRUJYTylKxXd/QbA06LTEm0NQ1Co1WARccIYzN1G5fQ5zgo5hBCqwy6EiiBokYBHKAuAChJWZwj1saQWoGLs6O8F7QDw6UYOgkBiDyKGlHWqHpQCp6ZZG9pbtfsz9UdINnsIF+GDOmhLdo6SHXWGtQ3Xi/CiXpIk0kCpRS2CCwQJloXA/D4Ec/VCwQjaJvPF54RALSjp4ij8SEgDcG53pCJLESvBaIOEIKTRJnY/ETxo2tl90/8VXGeDyHw+ifE3jarhqLHEBpgZCBum4IrmDX7tFSoOOINM1IUj3KC4Nm6AxCJMgPcLDwgQwmR4nRYwE1TrAf824+GNgjwWPdhlSSPI1BBZRu8VjAEbnAXMzJogglHc55doeeqoF5mZClJRLYbyywwQ4DQ9tzOj9iu9lhBoE3PW/ff4WOLhFjfQbwaPD//6buPAYp40GMxLTgAtbwmL02bnF9CJTlhKIomRYFR/MLjGtQQhMUaJ1SHSyHfYc1kiSaspic461hsAbvA0pmOCLOTy+5v18RgqDvFU/Of8SXX1U43+C9o27uSLISjyGJU/J8zn79nn5oERONFDGDOeAQHNq3dMZivaGuW27X92SpZOgb3r5eI8WE7cOWIYyfD4vZlFgKmv2BPC8oT2bgBv7sT/8FXkiiRJEUMLg7lotjgusJ3nOo71mtjnBWEUUJpvf8+ve/5qcvP0NnA73pGaqKPM4JYcPh7gqXZJwVU2QyJU8i4sTTmj1RNuFi8ZS6HjB+w6xc8sufPscnNWVejtdX3XM6y9jpKa5r2fcbbB+QkURJS5YolstjYt8jh54inyBVzr/+b4/Y3r/hdJaTlCk6zxE+Rw2eTKeEokBKy1G5ZL99jxs6jAMhZ5TThLbas1/FZHlBnM2x9QbXgXQJs8yTysDZ0ZLOjVjx0/nHrPoWl2Y4D8K2mF3P848+A9HgxUCSHZMkKYd2x6rbYWLBq6cv2O09Ojrj0K1Z72+4unnN9DylbQ84F40EWQvPn32Mt4bddk2dtRzPnxCpgkrlSHIWc4VUm5Ep8egvFCI8+hzHGG8lNdZ52tY91g4xIYw5kkL47+8NKUcfcppoprOE5XKKt5oiz0GMnq3BtChZMJmWTCYDd3dblBzx44TwKC0UPwxX+CANDH9E6RwhM9D3PbY3RCEwmafMkim4nkgKVOwxveemXiGVZ6gt+38YOPnkKTJIzJBguiNOj5/y1Ve/5eHaE0lJogtc2fPRi0/p3Z5ttWW1M+TTnEOz4nRZslhMuLm9Qpox/DpJCprmwNX1t/jgSJKYOEpZnlictXSNw/uKod9TJGeYkLBdDwTZ0ZsDbb9nX8VkkURKBwwMg6AocgQOa0b68Lv33yBlQVnERFJhwgzvYggp3gWSJGE+nxGeZvjW0O0b/vr//lvi8oizuznnT5acXR5z2AaUyIlTj44tgyuou46imJBL6E3Hzd0tRIGqqjB+ABUIEp5+9AQpErRU7PcV3qYcNo5+cLjecv32lkg8w5mBOElZLJe8eXfg+v0OYwcQinwK2+pA1dWj59cmzGbHtN0wyuD7ikN1jxA1fesZhh7jepyzSL2jaQWTfMJ8dsJgHFpLjK85NA88rDWRjMnLHIWjk5Dn8Ob9A9NME+k5kShxPbz80TOU8Dgz0DQVl88umM5i7BCjZMaLn51x+/cFkcrph46r6++AiN7sRsz9dMmTix9ze/uOSGbEWiISQXryKZXZow4xsWrAObaN5uQ8Jk89rdtzva54/uycskwRUpNYQTNU7NuSlgBWkcick+NTup1Cx2NgsM631Kaki47Y7iqq7Z5kqonypyyXU4SQ7I1jEypW6wYlJXk+YdPd0FY3ODq01hAK7m5XZOnoH1NKoEXg5u2ek5OUzWbHoW6RKiLOLEOXExUzYp2zWwum0yV967BDi5YlTXiPDYJd1RCpgIoLiqKgHTq8hHJRgg5kesHmUGFtIIoKsiIlVj3TcslyNuGRBPD/+/hn0VxJQHiHDxYIBBkwzo0BfZEYTdBS0TYtQgukFng8IUh4DAREhPHQdAI9QMBjH5HsfbB4Dz4I6spirUUKRaoDJAk6CXTtSFyJlBxRnHqg7y1efsAvx0gd8NY9mmwlSjmkipEiQskYoUbvlRNjASZ8IEiLVPFIPbIBaw3WgvOBIsmIE0XfO7zwxHE6/r51ZEVCXeUoMcqMgjBjkK4IRFFEEkej/jVTuDjGHwYG60iThMiN8gW8B2dHQ6CwpLGkt1A3FYmKRw9GGMMNBzsQO48JY0YXKpCWKTYMNANIL/BhbHD2fY/rB/zgSRJFH8KI0u0sq9WeXE+odwHT1biJ4PQixVqFtZLe9gQa2kNLRERQ7hHl7WFIaL0hUhIlFdYEJA7CGF4YxRqhNMYqlHDj+waBkAonRwiDRNANY6hclmq0iuicwoqGOIvAjU2zchFKpAztgA8GFYVHjbbDBYkLDhcsxrfoSLNclkznEyyeZOJJyoRD1XJoG/JFT5lFmEYjAvS2oVG3qKKn6Ty2g/bgmcxK+tiTBEtQjgGL8woVJM45hm5AK0XoDKBG+hyBujKk0wIboPcGoT3GG/rBYAePdIIIEDJgzKhF11GEIozFux/fd846nFOY4PA4ZIAk5FRDjVCghaYTMaHuqKqBrncYD1GUIYUaU+WVIIlium4gcgJcwHswePIiQ0uF9A4V/KMPK9ANHiEdaTLKbIbOj2ZWLYl1yqFpQIMPAWssznlULEELTPBIO+CFGkOChcMHhzUOKQ3GBpSOUXgsPX0zIPXop7B2IE4jEKMBVmn1uNkRDLYHrUl1zKScAx2FPCb1y8fGXfywmUI85lyJH7qtD3K7EB5Lu7Gg8sFjnR8lRFqQZjFCDsRphnE9vbFMQkQ2m+AGjzUBYeMxFNQIvFcgIoSUdI3FPvT0XYftDMqniG1HlKV4adhsDjxNEpSSj00ef2Sq/+D8+MGP8k+M+cHhHsOSA2CGMbBRq7G5K4ocKQSRih/pgYo0zulqx9DYsXmUCk1E0J5haAleosIELUG6GOklQ9dzf33H2eUxSTRBBIESHikbknjCoeoYekmexGSRJoQCERLMIOj6nsXJkiBqpLYoHTB1oOka5mWJ8Ib96oGsmLE/rBHKkiYxs6JgsBU2DAiVkGQCFQqKfM5qs0FGkqKIud+8JY4S4qTAOcP+0LDbG7AOM9T0/ZbXr7/hOFtSTiOUFCRekIYIrxX0BucUSSLxYqAo9Eif7Hp0lPPy4x/xfnNLZ3rwA8+fTfjudk+kNVka4SyEfkcSS/b9QNVvSK0mTkqGvqGrD4S+JS8ThiIFN8KHjo6maI45OZuiIolxAVf3NK6D3pAIQaIE3hjuru/pmgpvPVKXHC8LIuXwpsMPChkLtA5o6dHSISJPpBWLcs6u2jMMluBi7rbXtDbCtgO2bvECykgTpMAhsEGRTqbUjUH5mERGTJOSWldMyktEnNPalt3+CqSgLEuUSGibgO06CDGEgBIRx5MjYrnADQFvNUomROrD4OBDIPD3K6wf7j4hCd7h3Pg8+eilHece4o/AFN470jRlMZ8yny3ZrlriOMeHkZAaRYwU1niM1/gQJPxHuXH/FBwjxB/5Hsfv/fAcZx0ijMOpKJJoGWGdQ6uUTMPQt9wd7kgzjWvh4bbn+Kkbz5egECTk6RSBYrvpyOKIPM5Qj1ERqY7JTIwMPdPsghDeYayh7zuaqibW27GGCApBxnZ/C8CUI/I8ZzJJ2O1qYMy4c848+s1y6rrD0eFpAOj7gAqBJBZkqcJ0DYlOCImhC4662XNoDiSJRyYFSRSTZRnej5EuYxZYhBIZy2WE7w07IemlYbA9cdWitw0qqXn/7YofffKC4AccDqVj9k1DnE6JoxghHfvDjjxLHiWIA046ZNAUkxJnwNvwvXevrXus68cNpoPN7R6pPdO5QKuEJE6oDy1KC5Ikxoaa1foBa2sEkkTnRCqlMmNkyiAdbb/FmY79oaPuDzg/kMYpWnn69kCeFuT5nMWipe87hqFlfzBII7B+hKrEaYSQKToWWGNwSYMPOYQJqc54cnlO3zbsd1uatuboeInWCq1StC6wWNq9ZjqbIiQMds+sPKVrNyihybOc5dGU+/sCpQJSjWHSUTpjdbPG2LHWcqal3g98/MkRwSu63mA6y2wyIZvGIzyrErT1mk3vyWZT0liRCijLglBLNDk4QT94RFoQsieY+p7KHTDCUcZn+GKGFo64PxAbw77aMykXJFFBnKRsd1dEyehBznVOvTswyJo4VhA8fbdns5EsT+bUzcD+0DGfTTg+mnHzvgE/1ndJNKXMF7jQjwola2gOeyJV0g8WrwPTSYGWnrprUVITp5KqrihnEyKVEkeKJJoh1YE0SSnLhCRVHKrqv9jX/LNorpxzDM2AEwavLCJOQGqM6fDeoWVEqj2RjrB+oGl7nJXEsUX5CBVGE5IQY8o6A1gz0PU1aZrgBjemOEuHENGIX28Mtt5htGK2TBm6AMIhdIJKYnSUslrXRElPlIIJgt47jHcoGVDCEyuJaSPSLCFOM6KkYAhrZNwjkCgVocUE2/Z4eT/2Oi7C+wy0ICsS4ljQNDVZqpmUMXXTjKbK2JIXcwa/J4oE8+mS1WrP2cWUssjRaIbKYt0Di/k5UhlEBPPjU1RbUVcVrh+lS87syBOBVhl1l7Db7Tk/WmJDjREtRvQEDdt+R5op4mhMQy8WCUL2JDYj9CmVGphPcg7OUtct5lAjcsfJWUZQmn1rePd+S5YL6n1H2wwYo7hfeYY+5dDsMLZBCcfJZEkkMipXY4IhEOh6Sd3ssK4nj3LSIkcHQ9v1BC0h0jgEwkiyXBNFEm/gftehZc0sj8jjlO0uQ5YJx4sJbd3T1pYff/YJbQh8/e3nmEPDeXKClYK2aQnJQJprZmcLbn+3Y7KcQDTSGm9vVugEdJrwsIOvvnug83sOdYzxFemk5/zjgqp5oBM1nZU0vWHj75nMC+biFOUKuq4lTlOs9RwOd/Smo+s0aZnTDQPCC/I4xsU9zX1PFE0eJagHfBthZaBxB6LUMksK1vsdVd1gnUPoCO8CCINzA1IoMjVhEmvW61tcAOsEQwM6TmhsRdv3uEbw4uySb2+/JnhHJGISkZMoIC4RtkP6gTRKqdsKC2NhGEUkNkJHKX0/4IYON3SIpSBINfIpArgupjMdzTAglEVEnmKq6VcBaz0iE2SLiNXWkeePWWNdQKuYLHN4C13bUtd7AordtiHNPVHsCVjwkq6XSO+wqiOJMmIl0VE+NlfBjf4rxBjWqzWTosTUchyG+IC3ljiOaeqWmfqUqXyG0YIuQPgQPPr9Q4wNr4fg/fcTavGh4BLQti1xEhHHApkIdKLxouJ0cczQ9+x3ByZxxOJ4wv3Diq4VKHFCtdsSa00/WDaH0VQeK0nVmLEJ6wO//8PXxInk4vKYNEuwZkY5mRFHG6wZN0dSfaBufLjmR2opI3kRAkiLD2bMByNCyQThUy5Ol2iVEkcapOf67jVPn10QhIMARZairCIVMcb3+K5ht14RFzH4gA4FQkzJ046bb7/FGMvQw5v3v8fxEdMix0UxYJkdDTSt5HCoqbYdoYFFnnJ+9oyuU1SHmqH3/Oj5Je0wcLfe4tyW4D2xViT5CbGcoDrYNi2N7EicQKqSVAbWQ0NcpHS2Yb01vHz+U24f3uKinvxxM9dVeza25+L8BUV+TNc+0PY1Q1PTt1vqeoOl583Nt5x3U85Ojrk4OeVwv8XJY5blnEgF7g7vWW9vSYs5cZBEIaALz5NffsrJ9pTt/R279R3Gb1k93IIzqNmUTCV88c1vmEyPqWzDwRxIo1OOErjd1tzVHZHvOXr1I3abe0IDQ+e4O6yYzp+STlImRUzkHfvrb+kzCaREKBgcXx4+5/Nf/Y79oSXJMn7005ecFFOykyU6i1Ciw272FFkg0GGpaaoOnxzjkNhOUm1r1nff8nD4lq+v39G3CUVyzsd/8ZJ3v/prji9nhCzioe4p44GTs1N0kVBVNdffrtnIBpITkjjj6ckT0njg7PSE5XRB13m+fXOP2Rtef/eACDUJAy8XH9FsJXX7FtcsUGpGU7UEB16IcZDqRnBE+L7BGmWByLH/+R7RLh43th9kso9gmhACeZaxmC9QsiDOHAiHFIoim5HGmv1hj/MQRQIRxKhM4cOmyoPQ8GGQKsQoFfThEbDxg1wRASqSJJkmSQV91dCfBjprgYhI5ZjhCw71BlSKSmKiS8Wh3eK8I0khKx1dvaIoFKttS/AJZTqj6xr+8fd/w9nZ0ZixWEqOs0um6ResV2u2mx15oZBxzWazZugg0iXWttw/XI04cxRR7JAM5LnA2AyPIyiL1h7hWrADEkUazRCqJoiAlIppWbI5fIM3E7yROKu5X61YHE9ZrzZs/Z75bE45LUnTFPdIfrVDxPahZjGd4ELAy5j/+t/9N3T2lnKS463k+uqaz//wJa8+/XTMNKoGVGxpGkOWniAnCTr2+EPDYWdIc41zltVNxWQ+4f5+jzeBLM45P39G12TU2yukshRFzuXpBb//1W9QcQq+YDYLnJ8e8xDuR2Kj9tQ3G+7u3pOqlMVixuKooGn27B56fDAkUSCInkPb8LC7pqr2CCl48uQl2hTUXUOfGqTQHJ/M+PrrK4IDKxz9QdMbj+oalAyISNOawKQ4JmBpuj1We85OlhQqZjAW4VPKMkcGTX2wRJEAZ/jVX31NtW14erFkvlwgtEX4iN3qHaY3CC+4vXvNydkxdbXB+bEp16khvIuZlWdEWrHb3RPCAFIx9DGmhwTPzarm9MU51a7jarvisH/g/u4tLz75GWKR08c1WZdwdg6xaqj2hvdvtjz/2QSdPWN5NCOdbFlnHVWdUj+I7yXhRZ6gXTUO1kTBtLjk66//kbIsOIues5hcEpZ7rN+T5SmH6sDt+oo4u2BnGqzU5MUF52d/xi9+9opm838x9B14+PTTJ+y2gfl8hmdgt10xVBkijUjihDwrmM4iVvcP+BCIY4EQls294Ufxx8zLI6YZaLHkbrclLwdkPDbR1zer/2Jf88/Cc/Uf/5f/6T8sTkoiL0m8QA6WMp4xSTPSSI8Fo1dUHeNNrT0qGnDWEmeefrDsdw6tIoLcE1yD0oLpbEbbWvp+QGlJmsSkicaHMWG8KFKKSU7AI9U4ARZiRHQPpmJSlqTpaDjFZRAMk3xKmhSAZhgcSaTpTU1VbVg/3FE3DTo2KG0xQ8+711uSWBGriDjWRDE43+CEY3Bj4VhXFXmWctgf6LoexOiHKOYCpdOR2W/3DMayOJqOxLe2RziDd4btdkORFpwuTtlvtuzXa4QNeCvoW+g7y8uPPqFuBna7A1oG4uIpPqkwjDk/wcFRkjBTBdqn9A76sKPIM8qpIMl62r7m+n6LFQems5iL8xPKPOX8yQWNkWz2jtVmIJ9aus4S6QytCppa8e5dRd1s8MEAgs2+pTGBth8lY0nsCLXkJDvl2dNnzBYzVqs7kgjapmewHhcALLYfEEJgzEDdbKnrHi0SzpZLjmcLgo3RqSBPxw+3KMl59uQFv/nbXzN0gjTJySajB8ULED7GDYJN88BAjY8NaVkwX55R145nz19yOAzsds0oSesyrq7uEV6TyjndasbqvcDZhkT1lJlH+5wyueDmrmHXtMyPJqQyYvNwQ1cfGFpDU8Ggah4OW9rmgLT9qCfPZqRZjlQjIjpLYlADXgwIOW48V5s1J2cnlJMJINge9o+W6kCURkzmJQ/3K3RWYIOg63rW6zVFkdEPDVoFJnnM0GxwPiKOM+I4IUkkSZRSH/bjFEfnHKoWIRJSPQEXqOsdvelRmSIpJFGmcF7Sd+BdM0r3kBjTIrSnsz1RnLJcnHByMhuDXDOJ1JLtpiLLxky008URT04vaHfN6C/DM/iObmhQ0tG1A+5DNlQ6IxbFCGR5DMGWKlD1hjjPUekYINi2Fat/35D/KqL4XcQ8T/nZqyeIKAY14ttdMHTdgZl6gnLZSHgSOf+EWDGWb2LMsPHOfI+3ffwxhIAQgdPzgijSWBvYNxu+2/76cYOuaBpPXXnquqeqa7abmqGXaDnBuY4ij5AqPEoPI2KtMMPoA9GRpJzGHOo9SkXEScpsPiMz5zzc11j7g+RpfHwoJh+vjRETPxab8fi/BEMcB1TSUau3TMojAg0CR3CS2OA4uwAAIABJREFU/X6DFAnSSyLpSWLDYjrheHnBdDZDxYqH7R3ruzvK2ZRZMWOmcwoX0zUDk/yYtJxhEjg7c5huwJoB2xvWt471Tcez83NePnnJ6dEFTfOACNFoGtca2ye8eb0GmXB7v2W92/P8k0vu76/od4ARlEXJ7OiYz179mGa3YbO6o/cDebpkdfuaZZHz5OiM3e0VNq556K45dGuGruXo/BmVa0kiSZpoytkR3QbwDUfLEy5Pn2MPW6IixSQpg5IYLGI6oZxpknQgSRzL+QlPj6Z0dQOMqGDnev7+b/+WwSuqfuDu/pr168+pqh27bs/DYcV6vUFl53TfvqY5NDidM9UJyzwbvaRdw/2u4qrXZH2EdSATydOLYwZRsywnpFJhu5a3t18zn3xEFTQ+iSmnJd/9+nfsaihPnzO//Jg4zOk7hZ6AFC2m2vH23S0+PUG5kVbbpJr/9Jt/4O/+6prG9px+VPIv/7tnnJ7teXF6xPOnzzh/8ownp6csl464nDEMMdvbjq+u/0DV9vzh99/w+qsrkmLCz//sM8Jwy7TwPL28ZDZ7xpPJM0LrSeOIj3/8lMOuRYWISaw4KnMWRcm6ecv792+5fdOwvhEszpbc3B5wZqT3aq2xwaM+bLH4QQw7bqwevz5AJwTf+6AQIy3y8mLB6VnJ/rDj+HhJXR0wZjxHmrojzzOm5RLvFG/e3I6QKB/+ye31w9+WH2TDbmyuRm8WSCkYBsuz5+fYuGJntpTTkulCsj9saesKO7SkmSHP5Lildob56YRdF/jbX3zD0cOEp18s2B22ZIlmls84Pzvl8vk5minFxNC1FaZzHM+f8vvf/iO73Z44SiiLHKks1kWYQaBETJrGaB04Oj5CiYSuc3St4/i0JEtzzBAYesmkXLBdDTx79pzZvORQrXBG8+pHPwffst/dsVn3XJx9xmG/QWrDZJISQsJkkpFEJcHGHHaOn/z0Gdttz3Z/x6F+wHvH+dkzDnVF01hCyJjMJrz+dsPTpy9ZLpf4MDBfHPP02RGeDqk0Z0c/5/XrO37y0+fEScR+25Onc7JCsN3uqKqGKNZkacaTi7GoTwrFbrtmeZwhdTQCeYaYF89OePLsJSpOiFPF7Eiz2RieP3lOnAi6ruawgzzJCG5DN6xY7255f3UFQhKnGuMs769uOBxgNp+Oyp7qwLv3X3G0uCSKHcYeuF9dcXO9QYpAni2YTU85WpywP7yhrdfU+zVD05Kogkl5RDvUDL5BxJbTyzM2qzXb/T0P6zvevFlxdvGccpIzmWWkhaIfWo6WZ0RyPsbfyJq3311TLE4ojkriHNp6RZpI2tagZEakZ9gh4fh4zmymGfoD9zcr3CBI0ph9fcWhfY/xLdPpkv3GMNQtiTQIb5jMZkS6w/YNvtEsFi94cjJnt+u5vq/ZVZ7D/p4ikyA7etOy2lh+8fOfM13GOHpu77e0puPy8gmxTlBSsFhM+ejjc7767prrhxvut3d0XUIaz8BlzMoTPnv1U4qJ5sXHLzk7u+TFixd89uPn/M3f/R/EiSTNNUFYqvrh/2XuzXps29L0rGe0s1tttLvfp8lz8pxMl6vsSihAgGwj8QMQ/4BL/g4S91xyA+ICIRsbjMAWYLtcRWXmqZN5ut3Fjm61sx8dFzP2yUwJ1S01ryJWKKZWKOYYa3zf977PC6nk/GKJlI7r62uMeMa/86d/ijGJvq+RqeKjjx7h/IG+r2naDh8HpJK8fXWHHw0fffQxQnZ0bcdh59jvekwu+Cf/4y//dnuupJIsL+a4AMMYpoyY7ohCEUm4CEPsiUkg02T213pGGAOHnYckKEo5ZWyoDKWnDpNPI8v1AqJAaZAyEYJDBihKO/m7YppoeaFFKTeZ1skZhgNFYXDO0DeJ+rhhvlCkfKIHjmNHSgMhCJRQaF1SWcVh2FHYDO8jhMSTJyvm1QOqOgTGwWGMZxgG+kFQmJInj1/Q1kfWqxUxBSJTMNy761dU1clUFKLZ3Htu799ipKLKStYnJxw2lmP7nt12S73vqY8jF2ePqJs99dBCcEDi5uYGSCxmBb5rCXbDKA6QRpYm5+n5Tyizke7okVqSzzS33Q2aREwjCIfWA/OFZvCe7d2eWrY8fzTneDScnj1D2iO9f0XX36OMIC8yZNLsjzu0TiiXYYXFaEOwA0JHVFBYmZNJg51JMi3Ybt7T+5EQPCnJSSohFWjNfDbntr5ncI4816yWZ8xmGX23pbIXnMyfktuefTdSZQazlqAiV1dv+OLnT7i/31H3HUcxUhxLkuiRHw7PLqHVkkoviEOgaTfMForb2w0i1ghGRIT1ejkdGseeza5mf4x4peluoMhzzk/WPH30kvv7LVYIVKawdsGbd685ffwpTVPjj1vK3LFenVKpRHvcEP2GonCMDIR6mEIE9QIVFZGAF4kkBSLliNTy7t17ijKnmlUIJWk7h5STZ3HodzyenaFTzuimnLUsz4kiAhJrM6rZjO3tLafLE6IQRAIRB2EqDoqyoMznNN17ylkJBHxIqJghomC36TCZoigt65MFh31HcGrS8VvFod/THTzaVBTZAikK3r57hwsji0WB0Zo0JIzRCJWYL+csFwu+/eH1tH5H91B4GxZlOcl4Y4bWGiM9h22DC0eUMWSqIjmIQ8dQD0QTCTFQNx0pJpCJKAO9G3h/f0fdOaQ2LMolUoBCkh6SyozN8e7BvA48tLonOV0MvxfSO0nxJm/FBIKIPlD3PUppkDAOgdt3iuOmwViJkJH91uNij9IJUsP93R4peqyeI9CARKqBsbdIocisJbOWEB2f/+xzSI62P/D96/dcXv5Hk7H+4RD54X1JqR4kRR867QKEevjp5AcNXj8AQabp3mF/RBvPqHqUrCnzAgaPyhxWZpS2wPUdbWgYfA8SfvLRT1kuM7abO5r6hl1/S65LlheW5XqJ1Bmn7QwLzFaRfuhp+wFtR84eF0QnaNoGITyZPeO3V+9YnOQURYFZGl6eP+XdzbdkueAyu+Duhz27dzXqdMvY7rlqB/J8gR8dRpZUxTnb9zVa3LO7v+Ww2XNzveXpswsO2waDnbw5o+f+7g1KJoIzeGeYW0HTfE9elaDByUDKl5yePCeIlqa/5+bwPV8+/xMOfYsdR2Yq53Q1Q2hP24/ENCK1Z9vt+OnTnzArDLt6x1ZEstkTVtIwJocfPDpKzs7WZB//HGTCy0S93/P1N9+QukAUOZRrFm/fwpMTyvM5xbxkLnN++9e/wvqM5ekSWyjOHz/n9Zs3mCzhjKS702TLBZ+ePMOuzlC5ZazvOGyuOVWfolOJSoLzlz3+8J7aZYQkKTCc+I4/+UfnPHuqWS/uaa6+Yu6PnJ2dMVRH9oc33L/u+eLvGd6/H1jajC8fX7LtnpM9OufV6z27zcDTZ4+wPTRDQT5bsqjOydKBmx8OrBcz/DDw/V+8YV4UrE9zxkHhyBGPnsCbiLY1Ji9QqeTi9AzFt9M+laaMOvmQ64b4QPR8aCAAv/M98XsFlgAhCG5gMS/QWpEinJ2sOBz2LGaXjK5jf7zGOc/F4hI/JEgHHpbT5DHld3vAQ++CmCIiPjQz+BBUnH7MqBzGnmG3IaQDL559yfnJGWP7Ddvxnr5rOZuf4OKI6hpEgkyvuDv8emrMxCnOJSC5e/uKs9UFRTFj7BOZLRAqw3eB6BJ9PfDVX/01L3+y5nS9piwXHLsdX/31Pdo0FIVmMVvx7NkFv/ntX9I1kGdLnr98jrGRm+sjMQSMlTjX40LNdpdxerri7/z8T3j95hXffPtrykxydvqUPFvz7NkzlB45tlt615Fnglfff8di9pKT9Tkfvcx5e/U13dAy+IGYJNrk3O5eQ5RoW5DlhrZ1/OLf/YK62dN3GZ98/EdsF1uOzYH9/kBKmuzS8LOff46QjqEfidFjrOfi8jHGQte1KGWoa8/797eEB9+7H3vWqzXbw479bo/rB1YLzRAGymXJMCR+9avvefbyjJv77zjuNzTtkcKWrKqPODYD98c33B/3VLNToOdu95aEYDF/zurkghAcWf6Ix48uKPOId5J+PBJThxAJ73JG1wECaw0LbTAYhBIIJYg+8ebVN9h5xM40VucISl5d3fB4eYLoDmRlzsc/uWTwNVlZcKxHDseWw2FDVkwWiVLOyXPFejnDD1tubq/o+x4rTzgeb1HZ1DyTMWe9fIyUM2blCqsWJC/wac93r76e5Lg2B6tZqIHm2KJIkDy7OjA7y4lmQIqIlhJTJq43NUcS2eWCT56VhNuRx5drBt+B3XJR7Hj17W9Zrc95+uglH3/8CX/x5/83vvNIBDEEtl2gHQRffPynxNjTtw1vvul48tEFTuxp+jvuv3MYu0ZfC6L3SFkT3TUxDoQIy1XJbJ7x/roijDlu0CxmL/kP/+wLru+/4v7+isPhwDCMaJnTNYm2uSX4hDUlPiRu7l4jtURqzfvr33A4tMxnC5aLCucV28PN31jX/K0orkjgw8AQBG0QGKUZw0Ah8kmG4yKRiNYGISdfS/QKpcBqi5QSYyRKCBJhkuwwFW1SO7TUUyc8AEmjZEAKSSAxBg9hRDP5H0KIGO2ZVRVCTs50KSPzhaSqyknTG0YSI0WhEQiUUtPUSwpM0HiXiEGhhZoCRGN80PpOIayZkRTWooQh0xPuOcU0HTaTxEdQQpLr1bRhhpEUwY8BJSdqTRMGMtXT9Y7RRZSCmAKjczSHPcPYIlKkyA0JyLKpEx59RoclZI62ThAU0ihScNzvJsLi0pYsqxn10IDspwwtJPNyhhw0tAPJajKbIWVJCIbt/ZFjd8QoQfSGeTXldaUQWa8K1idLuoPieGhp+4ZybpGiRDqPDInkEtIGbCFZ5Ivp4b7dIWTg9GRNkpIxJpRQXFycI1QgyyRFljOOgiH3aJ0Tk6aoDNLm9O1AUzcE0WGt5eT8lDIvuTtseHfYMNM51laQHAiPzQV1UFidEdKAt5OPqXcHjA1URcZ8tqQfDpA8eI+QCSkSySdca6CXHGMiriUi5mRmuocPgSQFUQh0VpB5T324QRKZlyvSEDl0LV0LKUlCdARAq5KUAsZM4c8+RLqhZ7aYU7d7hrEn1FNDYhwCKUp8gKHr6c3wY3mghMGYgroeMPkMrQzDkOiHgHctWV4gtMS7AFGgdYYQipAS0mqklQgSwSlEsBgp8NFBDPjB0WMmr0OaCjAdJNZYYqZRVqL0SIgHAh1ZYTFWomXCGIghYbOpC9gMLfnM0ndHstyQCY2QAUmkyDKiNxAlrveT3A9FHD29n0AAaRCMaSToiJCghAU6XAx0zuEipC5NE2Ic4+DIrUFKi8QgoiYl/aNZ/cfhFYCYYBofPE3pQxebD0VMJAQgQoiRITr60WGMpj50FEVOWRUo2TH4AFPfYwLmyIIoM5iik0FAG1pAMMaMIXhi8FSLnOCbhxDEQAwRKRVSpEn6/OEQ+Xt764e/5XdXREhBjHHyv9WRo0vsw46yBGPAGonwFh16VIQ4OPrD1OwKwuPSiNKKeeap62kC2Q0tQidkNhLSAF3EqIo4JPZHgQtTFkvvG6zNsUYgY05fd7THPVW54nR2goge3zj84OgOB1TyFEWBlAWxjqyrGUpGhEwUM81qtuawPzK6ka7t6eqR7nAAEtoIkvS8v72nKGdoqfDO07sGowXCQxoibewZ/QEfaqRZABoRFOfLC5KQWDOfQjx1xPmBXdMxF6BjYrPpyGdTqLD3nuDAqjUeTVWsyPMKYTJC4zARXD8iSKxWFbMw0IZAO46MyWGqjLDT5HlGPptTnK44k5pBJZQAIzWyyLh48pKi1LixxUfJ6eUjmuaO1G1Jfc+YMqqTMzKZ0FmH0COxkhzftUjnSWkihp2fX/LdzW9IHrJMs5o75j9tma2vKJwkHRK2TCzOT1BCY4aImDtMETl7LEkqpx80edmRN4qewHJeouKc9ewU324g9QgxAaTGfgcqIjIF3hJ8Nu0NgyfXOUoX7PZH3KCwVUnsS0QwiBinHBwhPgykPqy8HxWw4kdP1B8CXT5AbR5KLGKAaibJ8oiQHqkU4zgyrxSZzMgGy9CN+BAeniGmRmuIyIeJmPgDyXD8sNR+XFcf/FYfIhuGYSCXI9UcTtfnRC+QKUeJAiUFIQhCSlhbYfSMk/kFL57WWPvNRCeNEZvNuTh/QQqB+92WfeuY5ytmc0V8yKl0PVxcLLFGsJiXLFcn3Hx9hVULnj69YL7QSBnYbm8mOqNlgnExotQMo8HR4UPHZtcREtxvbkgEbPaIYRyIaWAcM1JuOT0/I9KjLMzkijKdsd/vGUaHNpAXBmtL1vYTdPeOEA84l8jLHOe2yLQCqYnJs9m9YXn6ZMqzoqMs5iyXS0ZnScngnKdu70kM7A/jhOnPNKNvub5+ByKSZRaQSBEpSsuh6enHgVlZcHW1QZr08Hk88vZqx+JUo1ODCxMUzBpDDA1KS7S2pOQQwqOkmgLSZYbRGW3XYrOKarbk8vI5h12HzRSlsUgkWjnu719zbDZYq5nP50ipSMFidIEQkro9PHyOVNP+VkLbvGe2KHF4iBohDMm33Ny94ths8BHmJiPLlvjYM7qEdwkwDMOIlS29E1iXYTKFRmCFAl0wm63xqSVxJKZp2JAXku39kYuzFVVZMC4WjE6T2RtiksQIfd+z3e3IVU5KkhigmJdkmSBhEEKjtJmkxOPA6I4M3jG0Pcu84na7ZwwDgx8Rbmpw7A87khCc6nPy3HA8DuTZ5CFsx46bzRabSk7XC84uz1HjlvX6nH2jccGSZgPjOHA8XjOfzSiLinFssNZO8RuBiSZbLWgDyKQmm0HfY5Rmv2knwMtM4X3L0OVU+QkhjrjgSf2UuXpx+hilFHebd2hZEWMkxJEYFXn2N6PY/5YUV5Ghr+m8mkyzD9QwmzQySYSPKClQekKeJoAgpgOAzhFMWFZJwvkO0oRK1RKgJwmL9xLvJEpNeOUQImOMjG5EhBGlM1LQk2k4ehbLGSEIEh5tImVlyEzFdjsSQkAbQTnLcYPAmsnsHKLDGoV3ESkkRhu0koxjpGs9JElWTJ6vTEiU1JAEwzB110V6ONBFgaZgni+puz19O+DGgFaCLMvoQ8K5RN20jONIfPi9GAIu9RyPNVJJtLUomxGSpygMSki8AJVptNyinZmgIMpQ13vu94pybrEZjKMCZ5BqxDuNRFCqgl5AJjUmz8jLApJlHBM3mxt61yC0QlMxL3OO+4EYAyenFR9/fMnuTvOOawZ3ZD7PEL5CyZbkpgNJVAM6F5yeLlGywPtEdA0nJytCStRdzzgE1qslykw5JZPFZKAsViQZOPQ7KiUwpmLvWurmnhBrnjz/DGuW5OsctOLoe9ZaU1UXDGNLCD2LeUaoa9JDhomxmhAdkZqkDGY+Z3V5wtWbayIDOk9ooVFR4g+eXFhkgugibXMkeoVQiigcTV9PqFPXTUW1yTl4QUoOa0FrSwwldS0QJsONMI6OIAeE95h8joiREAaci6zmJ4whp26OHJs9i8WSzGYEN/mZgk/U/ZECjzU5UmhilHRt4Gw+IwHHw/RB1w0NMUmUkXRjjxIWpS0hJYaxR2o10f0kpDD5HKSUWC15qCTom5EQA6iRYdAQFdZklIUgigFSg/OgbJpMzSkRExSlZrf1mKRou8lDluca5yO5Lad1lQZC302dZi1wY2RoI1pJiJbR94xDh3AaI6fcshQcQgtMliPEAec83eCmewiJ6wPB90gkcjGf1qKeyrUYH0KDY5i64uLHNB1i+l3XOpKmw9aDB2PCOEuk0IQQcWOgbx3VacnYjXhpiEZOON8oJt9Z0mQ6n3wZ0hAjfAg87VxDCAE1jmjdE31Amoigh+QwIpskz1I+dPEfvFWIH4OOYZImBf+jI3+iCyIJMdH3I13sua8dozuyXEjy3GBNxljXjKVC+MiYK4IPkyw7F3gxoo0kFzvCVjJ0I+iAMQGvapruQNMFjJhjkuW4iYx+5NjcMvo9J+szTAzMiww/TGG4ha14cf6I7X7DsT4QnWN3e4cuJyS/1ooqryjEnIGENIqyzFhkc377zRVDGAguEryn3k04/iyfgEDX1/d89NESkQRhGOmajvVJQd90GJ3jkmA33DO6EaU0IkoIkrPlGZuxReuC3FgWsqBpWuq+J89KvJAc6pakDDYrpwl4NJzMLtlubzhZnU1o+dPHXL36ntC3iNChpWR+MiPvHPe+576r6dPIi/MnnI6PyTNNMcuYrQvOqiWb2yMxScQYcUXg+WdfMB5vaNuG4GG2XHP2JDLcO8a6YwyGajFDDhtEaNEoymLJndQw9PS+h+hZyzNGL7EpkpvIehWYP4XdzR2uBq9LlrNLoj4lqo4hDQy5RJ7YCU+dDL10HPwRoTvqrUXEGfMipzQWn4PNPMaAloqxG5B5YpAeL8CYCmMNY9NSPCgXrq/vST5jvlpgw5yxNQx9hxQgxe+UeT+KAX8vy+0Pr4fnHfGjB0tMxw2qmSYvQEiP84IU44NCI2KswWjD0A/Tc2egLDM2u0kq9CHAeLqbhAcvlgCEnKTDv8e7mXDsPpDlkvXakmWC/WbL0AVEtBglscbQdzVazSjyOWU54+MXj7HGIFVA6REl4fLyKdfX77nf7OiGDV9+siQ+UEunprLgo48vabo9xmiy3FA3B5aLz3j+9CnLlaLr7vgXv/4rPv3kj5iVChd62m6PUiWz+ZyuTxybmq6rybKKupkO48ZkHPYDNsuIo2B0kaww7Pb3hBQwtkKpBc5pyqpCmUhIA6ObsVy+ZAwj1ggSA9oo+sETvUQJRRQ9++NrtruMpj1O5FEleP7kC0q9RJuMfmg41ntC6gitoywrqjKjrmvu7t5zcnJGUZSQEsEnFuuSfqwZR1gs5rz5fsP6dCLaCiHYH0aqtaQbjozeYTIYBs9sZhDMkMrQ9x39eEDIjKo8JYqcfpjC5VerS05Ozlgs5txc35Hl5fT/Eoqh66jbO9pmQIkVVq2wOjH2GUbPIBn2x/oBxZIjdYXNBdW8mgA7Q0dwkOKAVoLr69c4P6CMRY87lnJB1x/pB0WM9mHaIgnB03UtSTqq/BQtBPNyRYiS5emadoDj8R6tJHku0Saw293StnOKIkNKjUgV89kJo/eE4Kfs07qnOivxXhCS4PxyjhYONyhAo4yAKJHJkwlHSj196ygvDW9uWoYwAh5c4vLZOe+v39PfONwgsdawqY+gDVoKhthTt/cMu4bclDx5vObjz3Jckvg4J8tLFqXn6uo3DP2Ok1VGVRnGIVHmJUPb4QboO4GWJUI2CGAceuq6AQxudMzWFWWh2O2n/LDZ/BQfG3y3QwhFjAKr5ygh2be3zEpD33uSCEhpyLLqbyxr/lYUV0KJiWbWOZT3aO/wQ6LxNVJKstxgdYlPmpA8QgSKTKN1RsLRt4GujmitqZsjy9kKUyighQj9GAg+EqMgiYgQinFwjN4zBo+VEwBjVqxJSXKsa+7Glmpmp504BQYXsEaTFwsQBcZ6bA5SeMo8gwSHuiXLI7kwCBGJqeFwhGVxQSoteSFZnhqEdqRDxv6woRtbhJEgc2KUE/7dJbSxNLUny2YkaQih5cXHln1tWM4teQnWthzuT5Hjgc39gf2uRsvA+cmC+fyEEBXHZmSII3eHDa6Lk0F3ZtA7x9oWRJUTtWLkQFVB3wZ+qFt+ePuaRWaZnQTQJdEpjoeBPnmqcokfE/txoLSKpr3Bp5YoPHhB8itSsAixIyVHe+z59qt7HCUYycWzJedPNZu3DfMMclGSQsHV7j3BtWzv7ynzEz7+6CnJd4xDpD401MeaPFO8f/sdq5NTEILDfoeyPc+ffUk93HJT19jtyZR5pQNlJcjzNaM7sruS6DJDqyWfnIL0d8hCsKtHuuaIsgPt8Yq2laRk0NpQlR0mdYCnD7AfezJdI5aa+XKOkgU3rwLBOeaVYbEsmS9n7JsdY5to5T1jakkHxUdPfkbXtPT9QIxweXHJYu2437yip0dapk2eE4ZuT921RO7Jpaas1tMznKCcG3bHw+Sn6yxt7Rmanp998VOu3r5lbBvm8zlOtxQ2EUWgHxxX7+95dPkchWHoG/rDgSov6JOi7wZcMzDGkcV8IjS6EIgRvIPx2CBUZOICOnxvGXtBVVrKwmCM4X67RwlPfzxy6DqqSpHnGp8alBIT9KWcsdt34CWLecmzF6fc3V1z3PSYlKgWilAfOZtVuCQIKSKjQIoZKUbMpLajbx3JQ/IPuWEmY3/ccfL08RSHEHr6oZsmSSlNxWorEJmgdx0iSVSyZFoxK3LevnnP+aKhyOODNFiQhH6gkj4cpNIHP1R6yLB58DKlqWNtrUagEGiyDAqRkXZgcFSZpG9rrjYHZCkoyozkA1Ip8sLQ9QeS8IQ4RT+QNIe+nWh9RMaxn3wVrqOwGVoKVBi5H+8QVA8FKw/TK/jdBGvymQQxjdSm1xQkUCoxDiOHw47X1zfMKoUU0DcJEQTe3zCveuznL1ArTdce2dYHdL7E06GzRC5PyE3OkxcvUMrRHm7Zvtsg8xIhCrwyDCrS85onLz6l3A7s7kdM8KQ2YnNDXmYUmSDPE8u1oe4i2grmyxOGwXO7PSIaT14GimzBsQ6U5RKbZwQCX33zF7z54Q2XTx8zX81oxYH7zZaoFcdmD6mm0Jp2f0WVr7DO4sfA2x9+w7vdNV9+9mecrS8ZDy27TeDZ+Tg1DIIku5zxSD+l3r/HoCnzJ/z61/8Si6DJLbZKLNcdwdUId4LO15TrjNPTz/jt9/8nm6bm5cu/yx//0b9HpU7o2ysu2j1p7HAJ9MUJS18j+gznWtbG8MU/+DMObcNxe8tw/56dnVGePSHFwDgOHN6+58VPPuX2oFivH7FcnrA9RsrCcfHlZwQv2d9v6bY3bI4DASgqw2JW8NHPPuJud0c39IQIdSuw1SkrA0WRGJXm/vgxx8OGxdkcOy/47l1D/c+CGOuPAAAgAElEQVR/4PxsznZwfHt/4NXVHct+z33bcjcIdnHFn/7xn3CeOy7OI2cXsDob0WMF6iXZ8hRdKTZxgx8Vx9ue4ALCJX7601/w/s0vOd6PbMaALj3rheb0+SOabM77XU7bNwgmkIWIkJQkimnOK38PfT75qn4nC/zw/XRNrlRjEuvlCWVuSSHRO4eUkaa7wzlH1/Wslqe4GHFugBSZVSX3mwGhp3vE+IERKv+gqPt9Wic8+L/SFLZd5Cuk6vnLX/5LEntUyqaMzbzg889/xv/zVy31IeCHW2T2PbK4IqaGxUnJ+dNbXn37GqUf0fcaEXMKXfLZ5z/j3fW/QeqJdLc6S9zcSGxWsd3v2dQ7Ai1WCK7fHjhsMlKwXL9pyeQVi/UcZSJNf8t+O/DlFz9D6wXjEJmdLRk6Sb4KCAHb3ZY0XqDsAZOPoBxX12+4fT9w8XTB7WbP8XDNL37xC+xixze/ueJQDzx6lHj963e0bYstwOaKpmlp2z2hUyxmZ1Qzi80SNzcbvB8QIrATjqpYQdLYTKO1YTG/wMd7jscWIeyPeajnlwsIGUoUzJcF795+Q9ZmlFlOniWs9Tx5Oeew3dA2PQjF808esd/9gMkMxipms4E//ze/5D/+h39MVilQisvLF/z611/x8uULns/m9EPD//RP/1s+/eRPOTt7xrGp+d/++f/Bx5+es7ndM59HiiKnOXpWqzWrpSKzS8ryhMVKcndT40dNMypCqLA6oquRzfgKX3sW9ox/9X99zd/9k8/QquX66jvmZ48pyoJKnKO1QecDr6/+LTFaxq4kxYLTM8vp8gJXj/RtR9O2nH/5EQKH6iA6D6JnWS5pNzuqecF8PWNfv2NXv+Wrb46Udo4WJQhHDIJZeUJmMyBgRELnOXXfIOn47GcWuY80u47RjyQVqNQkx//Js09YrxdkJvL+/o5iFbndOO7vHXXn+OSLnxFQXL19y/evv+bR8zOqRxua5j1jIzGhYFnmHENi09/xm/eR55885p/9z/87L55+xOOLU6w2FGaB9IKhUXRSs16dINOIa0qGceTu5sA4BooqMPQHynLO2eljXv9wz2p2SqYsWmgeX5zx/v177u9bQpSktCC3hk8/PYNxAVFysviYH17/mvnilPV5QV5qDvv+b65r0h/qRf5/ubK5Sn/0j54hRYbrodk3+L4jkdCZoZxXCGNxzYCUEW0gyyVKZjSHnqGfsm3W6yXbbYfWEmMSygTGHvyYE1IAFbAWxs6xXp0RBTRdS7cfyLORsiyxdjoUXN/fUFaOqpijVcGh30PQLFdzMqMhRdp+h1aJWbGeOhH7mqSPjL1Eyok8UpUF7WEykiIDUQw4nzjLSgbfI7Rgvl6xP3qcH/C+IYYejUXJkj50SCnJdYE2Hc2gkcZhs0BeKN6+OqLNmtW8oswM+23Pfrvh9HIJKlF3LaY0XN3cYkyFtQVSQ3c8kMVqogniaIeeUk/TjT5E2uhY5oJiobF2iRYVwksaf41SGf2Y6AeHVh1aL/G+JQFKWhARqzL6bmQcHiSFcwnek+U5eVngkXg38NGTS5bVnOQVb98eyaoOLTRFPuPiyQXv33/P99+8xoWEMpPp8mRxhraWKEa8ODArC4amoksNQY7kOmemKparc6KHoWkI3GL1CfPVOcTEcXNHlB6ZzWndBOtIriXIQCamEbXJ5rRjT9glLi4tJo8c2552N5KvNV1s6caR4HN++vGXvP7+OwSGqjxlu2kY2yOz1USprOvIxflLjk2N1poisxSF4ObtW1w/4kLEJ0EcBKcnL7nZvaNzB4pSYWWFVUsG3zG4mphG8nyJ95pxjPgxUOSW5XzObneYAgPHSFQQx5bZrKQsc+qux5qSGCN919G2LavVkv2uRlqB0IIYQelAma8wapICjl7Qh47CWoQQhMAkp1Qe52r8MOBayXA0rBYLpAkIHZjP55QlpDiy29Vc3e7JlgbUkUVxxqJYUhrF26tbjDEsFxWzqqCrR96+foOyibzIKKoZ45BRNxtI+VT8yJ7CViRquj7RdwqpDPpk4HiYgBiZLUgh8Pq/37H6byrW/8OcsshROrG/31IWGevFHBEDj4Z/QJmfYtQcqB6mwWGSASSYOuCRpt4TQ5jkRTGgkDjnMVaxXOaUM0vXtAjhcGbLNvsK53doo9BaIyQ0wx1CWPxo6AdP09XTATifMw4B70a0cQSV0/cDWhuKrCBTepqmhUAKEZU0f7T4zxh7S997vPN/oGNMKYFIWKNpju2PEzH4IKGaMMCDuOdN+MfIaChLTQqCsYsTLtpFlB4pSsHJquLR8yVNY6iWBafnC55cXJIpgSEhIsRBUNceWwlUZkgS+tBw8/pf8ff//n+KzQu6oeHm7j39AMvlCUooQghUheLr796gTUJrSQqS+WzBYjnRNuv6iB977JizH3va5IhW8OmLZ7z++hvuNkdCEizXC65e/YauFpR2xvnpJZ99/gVXN79EGYM2GUJq6uNrzuZLSrNAyQxPxM5PON7+FqcTab4kXz1jfWwItHTC08TIeNUQZeLj54+ZVwXvb29JMXD67ByRctomcnO44eL8C+6v32CT4Mn6MfMnlxz271manMVsgVktadyG7vod2801+8MW5XO+/OJLmm6KWVAJDsd7bD6jzCe5YFGtCVJTHzcIYdG6YowHRGrIdY7OLEHBr//8L3j68hnHrqduHVYv+fTTFxB6+n2PazwvPr6gd295d7OnGUDbinWe06bEfrunPhwJceDnn37G0Cd639LFjk6e8/a3f47CkWWnlPOf8vRyAeGALhXSKsLQ0dxfIfJT1GzOIDz/9B//d3zyyc+x1iB9RDSBLH8JOiHykXKpePHiJeGwp9573PGMcHjCsd7yr//Va67uavrRo415iLj6vVgEHtapkAihH+ITPhRaD7PnBFpG/pN/9O9T5ALnGsaxfwAhFETiBOIREedhMbP4wfHtt9f88pdvKYpJujtNHOKD7fJ3OVdKacax/xGeIeVEJv78i2ec//zA7KO7h1zF9BBk7JHCsVqsuDg9oywTTX/FV9//CzZDz3/9n7/h7N94fvHPKi4ff8zT9QnHK0Guznn06FPyc4uShrbd4V2HMZqv//qveP78C/JSEejZ73qO+0CWCQq7wopHvHr1Dd/98G95/OQRjx5fUMw142A5OVlTlStiUPz6q7+c1qfpkSpiTM48/5TbzdccdwfcOJKVmrpuWZ/N8c5zPDTsdi0//6OX7LYtQihOz07IS8E3X9+QkmC9OufLn/4ZLn1NdzQE95DDp2qu77+l60aksFTlEmvgWG8ZhoD3CiEyVicKUjnh3DVI1TEMjuViQQxwd7vD+Q7vcp4+vuD0rGKIe371179ikZ+RgmRwbsKetyM/+eyPqWYl28233Ny22CKbSHbKsKwuMXqGtoqhGxianstnp3zz3Vd4r6a4lrBhcFvevblhuThltTzDjZGT0xW/+e1fMgyR5fwxP/87H7HZ3nDc9yiZc3H+GK0Nt4dfMTqHUQVP15e8vbpjsTinlALpOm6bxPrRghgzvHcMfsM4Hun7hPdHXDzi/UBlfsrnP/1syvXaHjE24+sf/i1+yBApIzOKLz7/Yxbril19xXb/FoRmGDvev/8Bq1ZcnnzB2B8gzpnNLWWpECLxv/yTX3H5Ys6j52fMVyX377+jfTXw+tt7XHSsL3P+3t//nLv9DeV8QVXMKKLCqB4nB3wo8GmJWKzQMtC1DZvtDVfXr9nuPc9/XrPIP6LigswHmjeR5QvFru94vxvx6pSTkxOePRbga+7e3VGYC1aXC05W51RFQQh7vvnNb1isFrRDz91my83thieXz1nOZgy94+Z2z3p1xs3NDWenFyxXS0a/49WrHxjcVFxZO+OLzz/jh1e/5bDfYU3GxcUjnO/Ic0vbHxnGAaMW/Jf/xX/1r1NKv/j/qmv+VkyurDZYmTGMgWHwdN1Acp75rCLLc2TSBD9Q5tkkyRGBFBwu7DB6gS5LIBKjoygn+ZIQmpQyEgPSRBSTr8IYhxaRtm5R2jDPC5am5ORkwd3dhrbuyarIcj1D65EsVyjt0RHc0DGMAe8MBE0ImiQd9+0OJacOVFGecXXcgNJkRcZymYEf0UrQdoHDtsVHwfOPTtE+ox0dt7c1SWQMY0dMPUmMDD5RFCXa5BhtKUxOlpX4NJBVFqUTfpCcn6549OQZY3PgsL3HqIJHj5+SdEdkoCih6Q5UhUFqgRSROAowGZvdFoOisDmLYkEKmkiDJJIribEWPyZcNxV8yRl0ZmCWiAykNIESmnbA2Mn3pkWapJbCIkIAP04ZY04xVzMWxYx8Ztm1B+YrS9NPsrboA6vVC1rf0TUdTeMJQjEMkRAlIQZESuR5RdN02BhQuUDYEikLslzhx2ryOSnBo7Pn3O0cIQQynbOaPSZgiX5AklisKvZdIg4w9o6+d+Ah5gVj39F2HabQZPNTZusjMRi21543155i5Tlc9YxhQIjIYlFw890b4jEgpKcd30EK9L5FNTk2k2TWs7m7QylJCDAEAV7QtC0SifeaEHMeX5yTkkQkjREls7zEd4rRRbp+YAyeqqoIDo6HPcZmrE6XSCG4ubuBpEAour7DuUlSq/Eo4QjR0fZHoheT7EJbtscj63VFiIF+GOnbAWs0u/pAVeTkhSX6ccoMY/IYShnpux3jGB88Qwo/TCCUuj5OXic9TUAuLjOk8OS55umzFalQCB1YFAWlKlC9Ylbm+Jjoh5EYI5nOWS6XWDtNo5XMSFki85MEVQhNVT6i72/J8oysmIAV3kMcegxi8k05gRIaARRFxrya5Hf32w3zsiA3Bu8cQztiigyFAdSDaT09yP/S5IISEu8H+JCpw+Tg+HCwUlKS59MUr8gW3A6/Yuu/xfuRxXpJ37VEJg9Ypc9wsSUvDcYIglAkYZECGKbcsBQkIkweMt9H2qbnMASq0pKZSbboQuIv3v6vfDT/DxApm963+F0HH4CU0Ephrcb78HtddUl66MDrTPP00TM0lvp4pGvb6XloJym2TyAHxRhKQlTc3x25ud1yff2e7uUdp7PHfPb0BU1d8+r1Gy6enhFC5LibAsmtlMzFC95+fYcpCqKO7F1HO0QO3eQlUsmTM2NW5rT9nnb0GFVydXXL7tDSuZ62bwl+5NPHT2mP39P4I1JodseBfowU5QykIWJZnJ5zeTHHqKkh8NXrvyI2B7QQmEyjK0Fwe8YGvOyx2YL16jGb/RvWJ58itCGIhB3BZppWFwx9TXOsOV8U3DYdr9+9m6SxytC6I+0bz8n8nDKbY0xgXTSIk4zjsefV3fdcmIZhv2P1+BnGSvqx4Zvf/JbHhaQo5zTJcnN1i371nmJdUZQZVlmePf2C5t0NfdfTdA7VTORIURrKPKNQgv5m8h02bYtPHRHFR5/9KQvZsVaa3g4cx55mc8NysWR9UqFOIsbBtvZYrVA2QxdLkjiSdZFHq5yxMlzte7ajpB0GMqNZzU84n69p/JqlXjHLV9gi57h9w3w5J5tl5NaQlYZv61vMusQaizwGFmFJVoL3A90w4nrPRxeOi4sFo5ugT67uOeyOtL3D9Tn0PbnJJkCLYIL6CIFI8Q+AFumhCTK9lH6vqJq+jikRQuB0taYqDYgR342kFBjdSFYU03TQ9UihEVoxjoEUYb2qUPqD+G/aGxLpgVb4OwfYw9v58fsPU+8YE951DN09buwYu8C8XKG1IoSBq6srDhcfI0XP6Grq9v+l7j12bNuy9Lxv2mW3iR3umHvvuemzskyyRAoUJEqASDUKEKCWOoJeJiG+hd5DDUESCVCESBZZVSxmVqW99tiw2y43rRorbmYREKojNUq7E61AxNln7RlzjPGP77vGLnoEH2hXJZvrFX1/Ymjh7mGiUYnV8hLfaWKINPUMh/kPP/srtHH0bs/gFD4ElNJYG58aGZJheMPFs4aL63+EUB5bClarlsNhpG2WADh/YLVu2JyX/OazL3l4eICs+eMfN2Q5YAqoyiWXF8/5/OtfstlscO5IyEdCsmjZ0tQaKRSGiqnrOTsr5ztXWyHFxC9//iUvn30fHye6fsfV1Tltc0YMB2KcYSAZjdEtxs4TQhdPHE8nXj5/QVGUTK7jzbsvMGrJYulJJEY3UlWWptUUlcL7xPvbe+JUUa2eY7TChRP9dKRZrHh4fOTDzRsOu/dszj9GyoK2bbGm4LgfGLob1hfnCDJJTXjf8smrj3n79gPdKVLbK6qyYFhB2y5p2yVSSLrTyGq1pGkazjfP2O4eEMqThcP5xPG4p6gKukPGFi11tcSfIr5TqEU5P4tTxhZXFIVlGPeMbk936nFjIAs3MwCSoCw2LBcVY++ReIgBheXbH3+bvkt4l5Eqc7f9mvfbSLs0NG3Ll1/c8Mm3nrPbH8jBknKgKA3JCR7ub9kKxfOL7/EP/9Gn3Nw6XK8JhWK5PGNXvOXq+8/nCa4OPEbBZOKM9hcF9QJ+/cVrjkdHU7zgYnPFxbOC3e5rwjQ3jtqihkYw3dbcTJ7GHPjuR88Riy2VqDBtgxIDf/qzn/Hs/I949+WO7MCINeVyw24/8fj4BpE8UkwEJ5EmsDseedzuadsNk/OzPL4sqZqA85H1pkIoz+G4ox96rp+/YH/c87B94GH/nnfvI8NJU9dLjM104z0pB+63PQKN0SWqUH9rXfN3orjKeY6jpRgxAhZ1iXcWWxQopUg5IlJCKEXO4klyF9BGoLRACAUoxmnE+4S2CiEg+IBUGmNmD1COkRQSShiQEqMUhbJAYr04Y+gcIXmS7NG6ng9GOS9AzkdpmB8kMUt6g89oY5i8J2aP1AWVWWDUAaUkRs8X0qqIGKMxZpbaDmMkC4U2JTp5pn5LzhMhRVKeO+U+ZmQR0UoiYmIiYq3FFhFbRISIuCHQtgWbdcMuThyFpKgFy3VJN444l8lZUhlLWULIguAjfkoo8/SzmBdpc0rEnIkwi2KFREuNC5HsEzkwZ/6jInhPjgGZEzJLdBaIp+V8FIggMYUEY1Bpft9rW1HKCpE0bkqImLBKcOo7QohoORfQQ98xnAI5aqaUMTqShSbnjPeJsjEIIbCFRRhFEImcLUZptIik7BExotH0wwHEDINwPiPNvPRbFiWbzRXTw4l+2lIqga1LMoab7Qgx42PGEZFFQIg0+zlixOeMzQGRNGmMCBK61QilUMKQsyeF+f0py2re5UkZKWfJsixmAItIgrF3iGznHL+QeAfjOP9+i2pDyA4l0nwJ8T0hBoSUSGGYnEOKjDFzUTu5iWEakNLM0TCV0FI/ofwVzkemNM3y11zMKAgJWcyghJTmiFthNSJpIEGa42GFNnNcxo0IMRdOfvJzASAz1ggWbYUbAilEtFJoJRknB7mYhbTG42SPkwahCworUTkREDSLJaeuI8bElOaCXOmMNgohFD48bTdJibHzRSolgVBqFuEKMEUiiYyR9QxTEBKrDd/IPUXOs2A0p7l4SbMYVyCf8tMWKWZS3xNe7G98nb/MEytm/9XTRe5vLtMrCWWpcc4R45HIAWsWWLPg2A0oJSiMJrmAERqlDDHNQBSUZBgcIShIBdF5/OSJLkMKhByZhgkZM0HPh3oIiXE48LL6B1hRPsWh/gYd8Am+oaTEFgU5zwTOOdYkfkszywmsaYlummmFed6JU0bTLiuksjOxtGoYxp7ttiekidHBslE08pzD4cDQ9YTgSHHAuQhoJJroJDmVuCHP8u8iI0eJxSCzhuggTISo8cNIiJEEOBnpdhM3twcOQ0fvJwrbIMMDp+5IFA6d4H24pe86jC4oSktZVcRQslqu0EITQyCNnm9/8vc5bR/Yd7ccjncUVpGlonMdXQhI2+DdBCuFkQXKBbrdHTxrcXl2JVWmpDErDqPktH9ES3j20QZTJfrJszs+PmGUM6fuyJQCU553nLxbEUVi2x1wIpOU5rQ/sg8aUVbIsgYJ3jv0lAmxZ58Mz+uCZBQ5aXLKxOzY7Q5Ui4YCyDZRWEmWJUQJPuNDpKpquvt7FoXkrGmRRUQZQ3/YYUpDURe4046pD8gMplSYwnD3uEclQ2U0tlAs65aTT/jQIY0m24qFtVyfXyODRKlMUgM3j6/ZjjUX6QWb1QbChC1XVOUSYiIOI8vyDOUzUlik1ijryP5IGJiBGbqmLNZ0xQHXHZnGjjxOBBG/2Z6ao3jMEdhvMOvzZ3AuYsTvuh+zEPx3NRE5Zdqmmv+O53nCJZXDWoOQgnkcJpj8iaZcIoWafU7LGqXmn52fCrb/2IH3TRH3H1Nw5s9iRimJlZZKVRRWMQbPOIxk5njxuw+3WLXEmoRUGVWuOe0dKQpEsui4oLYN24cHVLFG6JJDf0Q8ZmJyZM7w3tOPHWe1xbmAAGKam0HLZQtJEwBtM7ZQGN3MwKrsOJ0GUoJh7OYdzTxiikDKPW3b0J0GHh723N2/o6gy1mrqouLsbMFlf8F6ecV2H8g5sNnMRaPWs8ZGK/ATCOEpbElZGBInHu53NNUDkxvouz3X1xeEEH4b5Ywx4H2mqhfENBNWtfK4cW4W5RzpTh39KVJWA31vSEmQYkbrgqLQJCJTnKFCSip8nCdWtlSEqFFyjpyO43EGMWk9A6Gcw3vPfrtjmnYII9BWIkXkeNqxWJfENDKOHW4KFJXi+uoV1hqMmX2CziSu2o8oS402ct5BajVSCqSZ941jCihKKtvSFA3yNOIHj58cCEhesLhak/IJ5zum6cQ0TjTVGafuBi0LqmJB27YE50l+BBI+nMgj2KKYHZJFxMeJY38PQmNdQ04a7wTb3ZHtNmFE5mwRZpaAGJDjRPKKvj9iN5LCFjSFodGC3RF0axC5QCuJloGH/Q4jJWUhyRImErsx07tMyD26f2QZKmxhGPsJsqGtzlgtFQ8PW9zkGdPEcUyYCvopUjUN58uGTV2wKuHrmx33twNh6vij6pIp9Txs75nGnvXSQpD43HPojnS9p24rXPDsjx1aaYxRTNNEToFpOuF9oh889eIKbRtMMaLdRIrQ1guaZYk0nnHacnN7x9B7VqsNVVUSgvtb65q/E8VVSInDfqQyilVrKZdnHLtExBGzIyWHyvKJtpeIORGip7BLUAGezO0hBk6nSN1alE5MrqMpzyiKkuB7pjjihkxhGprKYrSFrOiGkZwSi7Yi6Z6j3xHCTA4sColCkgKQBUYJylKihMS7ibJokEKRyUgZEUlTlwXaZspSksPsbNHasFw1fPRRy3Z34rg7YazClBIzGo7HE0pDSgofMi5JhhTQKeNSwOWEUhldAURiHslyRJtIjLNZ3RYFpomUqxEvEjFKcih4fnnG4E6cppGT83gXMCJiVUEWAk8gdBNClaATSsqZpBUFBNDZIJUGqZEE+tHPdMMkUWSaStN3nugjUksUBislVVmRTUGIgXW7InlJN3iGo8MUCWM84+BQ0lCXC/bbB47jATdqgg8chiPtsiBlRUyJ4B1VGbk4O8MUFe4JuOD8bBuXOSFSxg+B7nhgHLboUhCF5f39xGZVklHU7ZrN+QuO7jXH3RuWtWLRNCSbuHv/iDCSKCwhaKbDjpGIrTWiTCyuQPg0L/5S4CZHjobNi2vueSRMAzJLJp9Zb64gO1KY8E5gxExQVGZ+frphwpoVUiV88PjgePv2HZ9+/B02q2sika6/Y5oOTNMJXShsYYkxMgw9i2WLLeeowOF4IMvMFAZyzthGYkuLCpIcMyE4nPcIJIUAwTwBbhvJ8bFDKYMpNXXT4oeZaCnSLM9sqpKbu57DsUfImZwlUkFZWYQaaBaKZy9W3N3uZxqmNigp2R8milKx2azBdOynHcErhNSk5JhiZkiGenFGN06IlCDNUV1kD6qaGy9TRMmKLAS2nON6Q7ejaQuGPiAlKKNQ1tPYC8b+HkSiri0pzJcl7xzTMJJFpika/JSJSmKLGlNDVVeEaV5ifVpNRwg1v0/kp32r9DcKmG9Or99d5lJKKC0ZDidCmlAGqqbAe8MwZKoGdAH9rkMbA1GRgyCFSM6Rw75Hs0KLCjeeGA498/DsaYqWEuPJcYpzBz7ECDkx+h5l6qcolPjt75ievs/a2Wvl/dyUQswddSHBxwk39sgT9N0eLSukMCilaNcNm4vF01TaYLXheHrAjRlp5s9cnCokkq8/fIlRivVZixt6ehc42zxD6obdwTHGR64Xl6yWJcYk4ugQqsBWmhAGxm4CRt5+cUezWGDLEhcDoYe372642+8ZYuLs7CPefflTzs6gqiz6BLfhS7TyaF2wSnC22bAPHqlBCYFRlqvzj/mv//P/ls8/+zk//9W/5ebzr6mr55h2zXF/yzDs8Y+aZ5treredEw+j4P3791w/+xQ/DdgsOFueY+KCxiomHFrC+foSYxte3z3wcLil2/W060/56uaEkx0iJ0pbonVNXMDX2xvihzecL85RRvNhe8AuM3a5pC0VTSnxxy27YeS+h/2456K5xFqNlqAJvHtzx/I0EMqSqSpYbCqMrbGyofSRoT8xdne8u7vhxcUFF82atrZUTckv//LPiVJRrs+J04kyAz4ihUfGkffvHyiWSyqvqKXlzG54kwQ5doxRcQglmwle1C+4fXzNoT8QdcHt9o7jmz2fTBHnJbLfsV59hAkV3WnL4bhjeb4h7gbaZYOpLEE7hu2Wm33H6vyczfU5TXtNEEcetzcENxLGiYPrSE9RO/GkGEg5o58aHQJm2sXTZxJ+W0/xu/3D+blvaoubZnCJVjUozwKFD0CWWFPRT+9RZoHVBiM0xgqU/JtnA0+FVuIbWug3r2/2rjJ5Bs3kjDWWtjxjU8kZnlEP/PKzXzG4SFEs6MfIGB3LszX1whKM4+2feqLPjAfJeN/wrW99n3/31/+c7/zge1i7Yve4JT0MSO0IcUJKyXq9RMi5ACkKRWElboLLT654+/pESrDerMiMDMMDVa1JIfDm7QPr5TWTf7wygBcAACAASURBVEddl5SlIYkj2/2ejz76Dpv1c371y9/w4cMNz55vWNR2vgcZx4vnzynthsfwSPSSs8slmYn41AxU2mGjYrc7YGRFrsVM5RSS2/uv6fse7yPeTxxPh6fiWeLcRMqCs7NzDqcDw3BC2UBVXhCT43g8cXNzQ3AFsRjZ7w2kuUGmVY0gMrkJXUSurjYcdgdO4weSqmjbEmskLiYykcJq2uoccsY5x/H0wDR1TJ2ntJL9fospDEUhCfGewUtOpz1df6AfRhbNBX/0h39MxtH3e4ZxZLM5Z7Fs6fo9t7dvcU7SUKLVhLWGi8slt3c7Fos1y0VDU1pCH/C+49QdKJTFYDi7rnj4cIcbR4Jz5Bi5urzGuQNV0bBartE28eVndzw7cwjpObgdhz5S2Iq6LZA6MY49MXes22vC5DnsOrQq+PLzd9y8G1ksBNdXE8a2xHjL2aYkB8MwvOf9l4F1e875yrBoBK8/H6hXJYe9QyvLclHw+pdfs1leoRYSCOxOgSQ32HbAh4GH4WuaXc1HL87Z7+/JQlA3S66uoevukVogJNx3R84uZoLgmRGsV+f8wfd/n3VjeSMltw8PfPnZ1zz/6FvYsuPh/p5u6CiLS/bbHV5qfBCAJWWDj57T7SOF0ZytlyAmHh5Os/YoBbreU7U1QmmsbTk7syzbDWXZzmtCKjIGzW8+/xyjGlarJavVgtvbu7+1rvk7UVxpNHVlsSWohQZpEMMW4TxGBrCex61m1Q4YE5jHJJIsjwxDRmmJKRTSJmzr58VXoKkDpwfPeDyyXCkWVcOuC0Qx4twe7+coVp88v3zzM5ZlS4wT3a5Di5L1psIaENqzWbfEcYU1mThN9NMRrS13D4+smjXrxZq2rnh7f6RdLjk7K1ivKsqi4ovP3/Fwv0PtetrFiE8DTsIUHogxPYmRQapIW2mksowjuDShMJS2oK5qohjp909FltXYsmCcKj7//J44nRD5hC4rbm9GvJdYU3D+rOXVp+f84he/wu87gg9Uixo3LRCim709KTFT30ZCUngjCApiP0EWiFqTZaI/HokkjPAz2luV1EUFMhNrMR+mKTCFiXE01LZAqoKEIQqBNIkcHD52DJ3ncHRcXKyxumA4TEQ8Ul9jjCOGA4f9I4V9zjQOeBeJIfB2v6dpK1wA7xJ+6HgQPZZ23gsTlkF4xskTtwl7ZjGbBu8XTP3E+bOKqk1sd59TaE3X37AfAzYa1kvF7/9xw/t3MEyCrBTaKt7djNQ+UtWwLGHKC4ply/oakp+4eX1gGjRKCnzyjNPIarHC+dNMAjSK7C26HDgcHP2wJ0bB1BsWG7DWQMyolBDW8NgdMG5ktVjy6pPvzT4GfwApSClw2PUcDnOBN02C/f6AcwPKBECgtaEsS5w/sDv2GFVQ2IqFXqKN4HjcIYWkKBt2u4AVDfVSYqqEVI5PX/6IoU+cugOn/pG3928QeY2SBoikEHFuxKyOSJ2IVpGKzHf/cMWvf3bLh9tIDIbLyyVebvniZou1JVW1IXYd5IKoB6TuEMZyc5NxY8KoRKFBeQtJ4I4TwXm801BnFuuawR1JYmJ5VRCmgdVmiXOZYXDEIKCaqNoSHzNDSIg8T2pqK1nVkpjmTlW7qihsidDVjHPP5reBPykEiEwAeCowIZPDDLuQ4ptJ9vz6JjwYs+H9+7kT26nIfTzy66++RIuLWdi7s2wfOkQ+YmipG4dUida07I6BdIJAR6J/wspWjH3Ej4GcMuvVhtPQz9HOlJ5w+ZL/cPPP+fb67/O8/QEgSHmOKWbmi96z58/46ssvZoFviEg1nzVCKD67+0ved7/kR9W3+danr+i6AyEJznTF2WJJf+op1BKi4XCYePnyj/jeq8xh/wE/dny0mSWwLgkurpYsXlxy9+6OBGz3E947DqfIxabgNOyplzX14pqmuOLDL3+GfugJJE4x8vDuDavL72BKkCZjlaTfHjlbX7O5+hbKGAgDv/7Nn/G4LVAHMPKpuC4NRRGYes/d2ztUuUWpAl2UpOj57OtH/tm/+CmL1YaqgcXlK3TbYjYVL1c/RFNSVDVny4L3twfikCiN5If/yY8IbmREoytLVdW8/+I1k8h878evKIuS12/fc/fhkWfPWwq1wseab798xq9/9RUhdLR1y/p8M9Mfc8fd7Xv6YaJatDRLzfvTDnN0rEIGrbjr7+kOA0pqXpwteLz9gl99+AxR1NjKUtnM91++4qsv3pN8R1FkFstLlmfnKJtJMREdmLOa6vpTjjnjDkeaOvBhC83lFbH3uF1HXFYEK+n2D8TugAo3nPyW8BhJuSLLjFlM/P4P/5j/81+9Z7rbsT5MhJVkWVtW9QIjFO/uH/kvfvwDbrc9SUi6x6/Yvrml+3Rg98t7YhaYZs3m+69487PfwMMWpKQTgvOzj3m2WeH8jtevf8Nf/fw13//RkrZqmZTleDyi25JvsOtZZLJIKJ7I6zn/do4kpUGIOJMFkd+0Rkg5QopoFFcXS9x4T4gJpUqePfuY2/5r+v0RpTWrVU21eoVwJTJqQhZ0Q36KJTqyeJpuJ//bCfc3EJyUZlJwTE8SjKeobnACSc04OP7F//4feP69W262D5S2YbNs+d73/oDrq0/4vR/8AUol/vWf/a+8+sEPsOVv5lh0avnsq0jfVZy315TFgnE3IqxmmL4ieE9drbg6f86//bN/xeVmweXlEmMEu4eO8g8WvHhZczgcOBweuX5RMY2KN19vGfqBujWzYNgEpg6Oh8z5xy1v3v178q3h6uy7/Ml/8yf89Bf/hvvdLVkOBAZO3QmqJTr1VEXND7/zY+52n/Hh1xFlNc2qRheWi4uK4xHiMNHFnkIYfvyHn/Kbr36FP54Ypsxf/PTf8/LlS1IK7Pd77m63/MP/7I/Z7u+IeUBrQb9bc3l5zocPJ3zcI8sBN5x4df17TGPHNDi0kjx7tuL29AuUWCHimvsPE+v1GYO7mXdf8waXbgmjZLVYYBRMXc/bL77mO9/9hHs38Xh8JEuDd5qlPcPogrKOOLfj3dfzukXMPdUiIsnYwpOCgzDQ73d8++WPyNJxPGw5nk4cjlsWZy9olgbvAn/9819Dgj/48Y+xRcMwBh5OgW19IskFjSloTMHh4YGF2pCiR0XDspK8f/M5r17+kItVSw4j/9ef/oxv/9HfY9FmuuOOaexBJ0prCePEOPSc9gO2LXl4OOD8gHcjY5/5/se/x/dfTozTwGm/4/XYE53no+uPuH5xTbUO/Mt/9lfEsedw9CA1H32y4f74GbVtaJdrNtdrLg43FKZE2Rl133eBGBUuBIqywBQVr7++49PrF0gVkVVA1xW7/oAQms15RVlZ3NBx0f4AV+/YPuz5cNfx3R+d86svPuP82ff5xx/9EOdOLMqGD29vqSxoVWB0pmkminrDumgpVIVRNb96c4OaBCRD8GB0ibWJovKEOHJ3d0CZni++eEcOktVyRVjWuGC4/XCkKAo++fQl/+S//BNu7m8wZt77tnXxt9Y16ic/+cn/B+XR/7vXP/2n/9NPFouKqrRUZcWbzzqsyZRFjX5CQm+uagpTzgvmMSKlRcsalRtkLsheknymsrPE1BiLkg1tW7I6q7C2hKxBTizWFUMHPiiElpgiMBxPM40tZGQusIXFloLIRCawXpyxaC3aRELs6Lo9j48T7bLBTdAPIyGf5oPh8AGesMiPj7dMrp871SIxugPDNBCCY5oCIUZsEdA28uzZEmPmByCEhBY1pZXkHBj7mXAD4P1IjHPHoO8jOQkUGpEKTntPjCPd6YSP4xzHiifevr4nJ0Vd1Kzqmjh6ZA5URUVdLYhSE/VEzHPGPHsJIQIVwzThw0RdG0bvWJQli6alLhu6h0DXDxStw1hFDi1lUeNdIuSnSEzsyUwcpwzSUpc1ykK9bBjHCR8Stq6IOkEIJAZUnbl4scK0BdWqRBaBrAdW64bJjVSVYrGsWa7O2O9HiqJCakUkMriBYTzRrGvsoiBbwfl5RVNKUirwThOiROUaWYzYqkapijBJzpeXGLPm4vwlV1cv6IcDUfT4EPFOkMMcVR19Yvtw4uH+hA9pxjm7ecJiyxX7veTr97d4l5l84uQcN48jSjc07Zq6bQmyI6luduhEifOeLASJNCNQp4nT8UTTLLm92eIcxCgJIdAsG5yfJ1Zdf6JqzFMnZn52QvBUSqMpSR6iC9SlnaMiXgIagZylsc5xOjhOjwm3V+Dg7bt3HPpHIg4rl6wrRVVpylqjS4VeBkwL7ZmlbCVdN/Czv3jk7o0gx5KymPcjpTAkV5GmAj9JSiuYxpEUEiIpjLBUheDZdY02nnHsyAmGceDi6oKqrZnSxOF0oF0WRAYSI0LPTrZpiozTRMyexVqjUWgj565toWmbmnf/3ZbNL1rOPl+gdcGLlx/RtBVVXVKUCtTAMr4iRwtYEGaOzKb4O7R5SgQ/zjFdeOpMp6d9D4lUcxPG+4DUmhMdpzQSiJhaEZNAaos2BdllNusLFitNUconZHjD7tFQVyvW6zV1U7DdTxyPA9pYFsslwzQ9ubIK2raibRo+fnlFSIn9eMdhvOOy+YRvfD7zS+Kn+bJy0F+w159zO37OX7/+U94df81uuEVrxdXlBf/D//jfk4VHaY21C2QuUKqkrEqUgSQHhPLEkHCeeSqYdyw2Zzx7+QnGlnTHI59cXrMsDIWKSCZyHBFh4ONPv8fNTc8vfv4eWy1Bjwyd5/bDgZv3B/7wx/8AYRMyaYbe8e79W27fCbYPzK43PZHGgWHsyHKBfYrS+C4wjYGUJCFpRq94/+E9SiyJXjOMA2/e/yXSGrIc0DrQViVGlVjdcDo94nzH2fKcr796jUsjUYAoGj755Pvc9Y8cjgPkgrY+pxuOnKaBw25gv5spmiFFukPgdIwMvWd7e8NxOHA47TmeOg6Hjv3+gFIlffckR28NP/3rf0c3TIQgSFFQWMXZ+gKtK5xPPGy3KNuAzjjXM56OTIPjMNxynEb2U8d2OHAcO3aHB6SweB/Y7e6xNqFijVEl0cPbr295uH3LcDrgphPHYcv/8S//BV9/9hX77Y7RB0ap+PLmlj/43u+zLhckH3j38Ia8nBUjZVHNPi88x92e7e7Ew6HjoR/56qu3fHj/nuk4klwgm5FidcX1+cdMk+Pduy95trng2fMli8uWi2fXfPfTH1HXjvPrNT53TG5LZQZu3j+SdSKOlnBq53N+6zidJnwIGDuzXfLTVEo8KRNimt2Dv5UGP0V7hUiIPLdPvvvdS7phz+QnXHB0fc8P//HEdMiEKRNjYBo1aXIk7+Yd5KLkzVf3+DBL2GcvRZ7/JovfxYil4LeF1Te/VwiR9WrB2eKKRXmJKz9wfrVgfXnJ5vyKulzjgOvnF+wPe969f8/QDURxz7/50Zf8nvoh/1X6TxmmO37ww4/o+gO3d+/oh47f++HfQ2uQIjONI9vdgY8//oik7hndkWmKbC4bKrvm3dsPHA+HOUY+er79necUxayL0QZOwwHUgDSZolBoDLdfTVytPqHSBbvtA8EvqdcFfgq4IVLXS4rijNVijZaGacxEJ+j7O1bnBVolTo8HVu2GovSces/u1DGlHqkN0WWuzq94fn3NX/3yM7716UesVivqaom1DWM/smgb3CgZu0RRZnISbC5rymrex708/5T14gLy7KAbh54Y4Vuf/IBCLXFjwk+JlA/kWBK9IXrBxdnHOD+itcZ7z/G4xZolUglcGphCz+3tA8HN8KKU52LampqLzUvOzzcoLfhw8zUkg58COQqqsmZ10bA/HGds/t090+C4uvgW7aJgt+3Y7RyCku98+pxf/9U77t4P+FGyWlZ8ePMbCiRWOIyZqG3N6E/oVlGuV+jqkvXlOVJntvsP3Ny9Q6mCWjc0ZcPheOL1m9cYWyNUxihLZc5YVB8hdOTF80tWZxXawmE30C7OOQ1btscHdscd7aKgLFfEGOn6jr6bdxCLWmAM5CQY+4Ioek7HeQp3dr7i9uYDhVqzqC5ZNBesVmvqJnN/v6e0Fc+unvHpq+d0Y8fucYdEs16cEbqenAKjSxhV8+Mf/j7T/sToIzFm6tbyoz96xVdf/zndYQaWvfrkJUrDzc2WkCNNW/D8ekX0CSUqZCzQlJxd1nzx5nO+/d1XXF4t6boH7u72fO8H18Qg2O8COZd8+r0FD/f3TONEzpmiFTzu9thSUTczor5uDfvdwOvXd7x9e4PSmf/tf/nL9z/5yU/+5/+nuubvxOQKMkWVAEffZ5QRtE3FNDliDhirCb7DTYbo49yuEnN3VpAJIRJ9QBuJkZKYIyEEQoicn52hTE/wDqklF1cV3Wn230DGFHOlulc90zgQc8RWGjIEHxFJILPAjSPLizOOB4cxltXZAmNqykYw9okUJvrJodS8UzOME9vdAa1m8EXKCR8izs1umeVqibUKIee9sUI4XJjmhc9lhZQaKRW2gOAz/SkT4+y0yRmCg+44krIgKY3zAkLCGEVyGu8iU3T4HNkfB3zIlEZhlYKQKU1BiJmQIs51ZDxuHAgTSKFQ2jA6j8KQZEbLeUfHaI1RBUYZjFaszxqOBGyl586/TFxuNgQXmfwJHyfKXBDjvBMhRSLkRAoeJes57pVh7EesiRSl5TjCFDMuRRJHtLEE5Ukq0p7NLpTryyWFauiOidpayt8u7M9OlXZRUFeGpOddgaMfuFhsAAHS4/MJ4hFtDY0sEFKhBEhRUbceH46M/YlF3WIWjv7Ug5dULLCV5m53wBQtRblmGt0sC5TzNKi0FcEd+Vb1HGtmD0qInqX2GDmATExB0LmeSpZM3ZGcJKa0jNME2SFyxoWIDxNWFdRNy+Q8MUSMsSzWDbf3W3yIlE0xFxt+9q8IIZBCoiixJsxumJRnkMbknyKskIVnGB1G10g1XxQKYzmOjxSFJEtNlnGGoAhFEr/zJkk8lZGUyhOGxO3rwHSssE8FmColLiRiJyhtiRCJcToScsC7WfCds0BlmKYjRiiW5ZLNi3OGU+Lx9kQYMkhHUydQDUKkuUlBwocwe66kxJYKKTVZBAQ1Rs+giZjDvB/4hCAXQpGf9qZOJ0eKM/xGF2mmA6KAOXaUUvotcl3ALA8GvsnczfeppwJGCqQUKKkQ2vDl+G84pkdGJhISoRTOh3kfKUE6RVx/z2pVkpNnf9hTlDVNXSMVTH5kmgaSCFSLEq0UWcUnAIbE2BItFVoZNpsN3fTA0Xfsp1u+OP45r9q/x9vuVwzhQI4RfztwOPS0F5LFWcn51Rn7445xHJAqkoHbuwdubh6om4ZzAX03cdgOIBWjS4QYmFzkcfeWpmyQSAotuSwXtPWS4XiibSuurq847racrVrOr69IMXJ3d2AcJ9rG8umrDZ98csnL77zir/7inoPLSF2wWBl+/avXnF9fQ5gYDhOPH0a0Mvzgey9n8WcYkKan0iUf3ndkYLmYqXR1W5CiIKbEMEa0WnLY7hl6B0rg3JKXH72iLAxGzpPSMAzU5Qo/gcNztztw6LtZdK0XiGw4bW/5cPsFpV5DEhz3B1xKTH7i0PXkCKYRHI4TJDNftp9ExTELptEQY6DvbsgRunEAQGvF8TjNO5Ii4nzHKUtyaInxEWssQXr2/Z5aO45Dj0gKJSU+Tnx47NlcbBDGMHXgxpHdsQNZ09QNOUt+/eWXCB5ZLZ/RVEswhpQUH7Z3lLalrs949cn3cdkAniQtOjfUesnd8Uira6SVSCPwIfLyW9eI5CCMVPUKNyrG6UAUIy9NJE0vUbalsQtkTrx7+A2/+OwN6SKjrGVz/Yz7xwOh6BBRsSrX1LohMfCvf/rnQKSyinVd8aJdst29w586pBCk6Ejpm88y8zkuvskD/u71N4XBc1TvCbqTEhKoyoKmqZgmM58FxlO/2DK5jN3cY4Nl3G6I0VNWDcR5L8vYuRHST3IGDgmBkIKYQczIwKeCTiJEevLMzWdlzmmWvI+B02GiqgWr9uoJyKJozYrL50uGYQspUlj45NMrHvf7+fvT7NFbrCQxCYqyIsRAP+15vN9ydf1tjt0tJ7lF6UTMh1lczRx7XC7OOB4fKApNWS4oKklVLGeqp11wflFwGu7Y7r5kfNBUlaRtanTWPL/4BCssbhpx4cQUEgZFYUswGVOU/Pyv/5xn189nCEiWfPR8Q98v5waWtZiNwsqSvltCOlGUgvVmSVFY+uOCsigoCsVyUVHYlr6bG37WKlJIc2xwSohcYGRBvSzw/kQMksqe8+qj7/D23RdM45x0iGmkKA1uzHTHETeNrNcl9/cVV1drnAvsDz3H48OsuJi6+S6zuUCmJVJ6onQsc0Cp9ikqHpAqPnkMQakK5wRWL/no+bd4eLgl5jUuKlQICMz8d04YmuqMulxyeb1hf7wnTAqJpW4Nb25ukHqNKQ3CjDi/Rw2Otul4cWG4fl6xfdeDC5zyRFIKW63oDjvGwdN1J8bRYcWG37s6I4Yj0ziiRM2iPWexrjjsH7g/POIGydllC1I/Ne4Ti0Vmc7lieP8IQlFVLWW5YlU/o6lmifL2sKNdXJJ5JETHcPIMXeTFq5cUxYkYMw+3D5wt1uSU2G0fGLqRZy8vIGqWiwKlI9O0Y7+baZxJzuCnQjWkUvPVVwOL1QprGl6/f2BNSykDUXlcGPj1L19TlAtiECQck+952G8xDayKFZW1tJXlNgVKBVImQhy533Ycd1v6rpvv8S4SQmKa3OyZFJqmndMpF+dn1DbhveTm4YHsVhidscFx6LYIbdgdHhimR3ycGIb/H0iEhRJUSwUi4aOjag2mUKSc0CRMqTh2T0EcqchR4j0YZcjZPS1AzIsQgtkrIXJ6uixDDOOM1rQaU5SEnUcbiVJivqyIkqbWJDIhjdgiP02PMiYrhJCEaXwSceanXakGrVd0ww6p04xujRFERJuSEMW8UFdYUhaMbiDG2bUVI8QgkXomuCA1dVvTnXZYqyltgyDifY+xc/dNqvmCrlVNChAyxOgIIRMYyVEi8ze4ckEIsyg5EBkGRWU0ZDlP5mKkKmqO3YjzcwErTERmSXQBZecpxZgzxIRUEqMlRiuqUkKeJXzGCkqtcOFJqigSqnSUlcaJRBQZgsDKikPfY60gpszkPCGM5KDQtkAyT1akkkg7k9ligmFwYDxCaaSS2FLTrARnTc1iUZD9HM3YnDUoJRingFACFyVNU6AUYEBJwf5xT2yXWCNIwjGlA1Vtyb3AGIstDCJH8pCR1pODI/hIVSxQtUVqjwqahWwJKWO1QRcVpqiwtWM8fiB4T0wWKTJajiyrGp5yvYlEsYyINDClif0kESninCT6HqUMQtdIJXFhQhZzMRC8IziPLS0ZgQ+BoipReu6QZgFSK3wYUEIhxZN9RUoycga+yBmxnsJcsCk9FxUICClT18UcS9UzBGI8DpSmIaEIMRDjRN8HlLIoI5E6gJzhDHGaYyRpKrC2xDYZXc53zPlZF8iUZjR5GJ/2vkpUEuSs0MKQc2TqE7UxtPWahZGk3tD1B3KGslTosmSKASEUShYk5NO/fXbdCDEXVEVhyMxTJ5jhE4L5/7gsi3mZFQ/EucjMmpINKSpyUr9dls9PZ4l4usSlnH57cfsGs4xIT5Ha+f3WyiBU5tF/yRinp4u2ZOgizrmZLBYS0z6j1UQOs9fu8aHHlpGqVgglQMwS6+XFCisLkg/EaUJlhdIzlVBrgxCzJDxnQfAJ5068l5+x1Nc8jF9zmh6J0UMKiNIQRUsMdgZYWEuIE0IZBALnPL/69RcslxalNUUhMUVCIBnHgRQgZsGxm0jJzIRLa4mipihrYuhY1CXX15f8/OGBKQoWqqDQglUjWJ01lFawWdc0iwXLi5pfhIRGs1quUaXlL/7sp1y9/A4hOLyPpFSAlGir0FJhpECmQH9KdMeJJDJCCsLUU1YF0WWc9+x2jtW6ZBwcwxRIQmPtOWV5gXMDITpsBpkTbojztCInDkcHUuL6CVXVZCEYhx2vb77k1bM/xGbB9nhk3x0ZpgHn8//N3Hs0SZem53nX645PW/bz3dM9DYwBhiBEMChFiNqKUoS01Eq/RTtJv0ZLSVwoGApFUHAECGIw09PT5rP1lUl//Ou0ONWN0QbryVVFmciszGMec9/XjfcR0Qa2dYPAkCQZJkmJo0cZjfdgbWSkp+9aXAzkeU6W5cSTRqqMSEeIgXGcrjs2NFRVRe9G+uDwfUMzDOigpwIh9AzeE5QgRDUlz9mBsW2Jmwdm/UiaJnx4uMOHLcvacra84uy8QpULmv0e5yWJLnjx+Tnb2tM1W2J0uFGiRMrmdMLmglInZMuK07Fmvi5RBGzT4izI7ILUaLRUFLmAsCabPceQ0tZHxluoD3v6Yk21nLOelbSHls1pRyIKjMhohiMylWxOD2iVoM0ZplhxVp6zuXmHbab7ux1arOuI0T8CI6YA89+hrE+1hJDfoyX4XZpn8BN5tKoKjJ7Oa5MH0uVAdjFwOFlccouoMmSviUc9lRSP/h+EmIJSv/cr/gC1+f8//+OrePydf/xOiBFrB4Z+RFSOoZ+a/sQYyjyjrOZ07YaIRSlJmiYIOW3Fuq7hWO9IUjlJrFSONgaTCI71lpevPsU5xzi4yYstCoKXJElOlk5fW9uTp6vHPLwB7xWHhw1JkqGNRGvFYFt0PENikCJS1z0XqzUqapzv8WFAKMvYG5QUGK1x0dHUOx7MhGov0gKTLDC5IU1KiqKgzAxt7VCiokgn2FKepIQo0CZFJRqda169eEaRLwmumUBVykJIsbbBBwdCIUWC1oq2GYg+I09yUv1IeewHxrEH4TBGs99vaevJg7xeVxTFmixL8KHB+5F6rFHa0DU1ISasVmtc/whIMhkztWYxT9ju7/HBo7VAG4WPI33fYN2IEILZbMX+8JEQPM6PDDaikEQyjDYo4R9VJQO7zQkpMuYzTV7AN9995Hp1jkkDLrbsEyckKwAAIABJREFUDzsWuSbXlkxDkeaMSYLrR5wdsQpIJPeb13Stnu6DUTK6jiST7LcNPg5kRQJy+j9cnCIUAhEvDHXboJRGkrNcQpLKCebyeFopZfBeoPS0oXXeIpVkcJbBTjVLVCPjkLFYzLHWU596ztdLHu5rhm5Spoxjwm5zIksTfOho6h14hdIek2QoJfDjgCkSHAKExju43Zw4f36GOAVicFjreXdzz5NXz/Fxh4sjbVdTn/ZIEVDSoFAEG3FhasYSLYkq0JwaVLSE0eH0tHDx3tE0jtE6kCOSnrbWk02gEnS9Z7s7kbBCSIOPnv3xRJLNUSaSZALHVMf+U4/fi+bKpJp8XWCtRbiAUSP90LM+N6SZxo6OoV6Snxn6Dpra0bUjeTIh2I1RJEZMUzQkWhTkqSbJYBi3NPXI+lxRVIHN/Ymut2RZgdYGKQL377fk64ysLIhSAnukTOhaIGokEiEGHu4O5LMRnXlCmIq725uOs7OpAOybEu81JksJTmBtpI4eJQXHU4+UkaJMyZVhv+/QeY/JPVmecbV4SQyeLCnI0hJ85Pb2gA8O58BaEMKR5zOCHwBJmmu2mz1978nzlCQ3dL7hVIOQCTpLSEwCMcUoS9daemdZJYJiaeiajs4O6ESQGsEyP8P1J7Iq4fLlOePOMw49Pk6Fd15odIR2OxKFJi0kYWwZDw4RIjLxiNyx2+3Z7/fo1JHl6SSFSkqkmaQlNlra2NE3LZezNUUy3SyEyelGhw9Tg9x3I8YrlCnJ04JknrGYOZbzBV3T09U1Y4i8/OQZXb/Hh5Su89zdO5SQuLFjtaiYreYcd3tGX2PSBBc7Grvj8/PP2HzTYDKBVJ797paKHMtUxC7mAhcOHGyDMoJ5lXFZlrx598D5bEbnLYGGq2cFY3vJ269uONYt0QvawxbVfgDbIwJok1Ode7I0EISjTDVdn/J6U0/ZO0hMtORFwf7+MFGHtMI5jzKSEMdp4qgKTJKzO70jSRKsC3Rtj3UnVrMLtJokbMF7RjmSaIm3k1xGGYlK4+Q/QGKUpsgLVJmRJAqtQMVAIma0ncP7FOdSmrqGznK+npPlClW0iLKCmLG5HYhO8OrTBZvThsFbgjATDMJoVBLoxwMTok/gPCQ6EKIGErJkyeVVSX04sNt2DHXg05cvmS0GVBIZvcGJjjSVtAeLDRFpDOWswtoOPNhhouDNFzNWy4LD4YT3AS0zynKGFO+pqoqzszOado/QA1dXJVImqDBjfviTaYMiJFKqH4oiJTWCiA9hyrRS09RSTD8kPgYtf499VkJifT1RyHj0XATF7bsDeTGgkokwejokPH02oaXdGPFjwsNmAHVDMUtYrWc8eXpN9fIa7QX94US92dLsalKVkGclQhvGIDgOI6dTT1OPWDsVV38//j/keU4M02f/05/+jC9+/BnfffeOd2/f8913v0Jr0KmiKsop8kEl/PKXX7FYVJytF6zPliyWM0wiaFuoa0+sM+bzK4wqJjR847lvAs9UwrOXK87PFuRlSbm44uN9zebunlRIcl3ws3/5M+r9a6IT9CfL/u49w7ZmVZaUqzXJvOKrL7/k8tU5dx92yE5w/vIp375u+b//4ld8+skFP3q1JoyBX3+7Zz7LySoFieXUWbrOorpAW3vub3qCq7i6qhjdwBgGqvScvh359a9/SyIFn794SpVX9O2Asz0RQUgS5oszvLPk5YJquSRmlq8+vqNa/ZiQBpqh5+Pt2yl6I5lhMoUNDXZsGMaBLC3IbMl4cszPL/i+8RYyIaqe0bfYxtL2I0UeEDJiEo2WOUKURAntONBuAiFEZJpz6mqkSji0B4a+xqQwL5/Td5JhsDRth4ieLgzsbz9i1JaqKmjCiA+O0/ieY9fi1GdcPX1KtlaIMDJqz/JiThI6YkgYupH7056jG1FOUzcNIYfLZ2t+9de/pbFHlOzoj7d8fP+O81d/xHqWU5lI6nvG/oay2tJ2HR/vPvA3v/xbfvTZ5yyrBUk+I2SCp1cz2l89MJ+tKPM5rRDMsyt+8c96dg8Nrk/IqyuiVNjRYEcFLhCGGmsbgMeYFYWQ4XsRC8RpUyWQj5j0x0eMCCkJPqKSCa4zup5h7Fk9b1n9qKEeTuwPO1zXo1JN9tzSffmHnNoeLRNSKRBOEOX3AIsATBAr8b3sEKYNWZzIsCF8n6sVkVLQNDXWpSgNo6356qtvefHZc/JFikl77jdvqKqMbug5NSfu7gM2OLyPHE87Pt6+4+rJiiyfBoUhwHx+gZANTTPinMY5yW535Oc//TPevf+GPJkxr+Z89ZvfcHV1TXAZQ2c5tUeOx/ckxdQgaG24uLjGuYRXz37EfGnwnPjy1x9YzpeUixLroNkFlouCN2/vqcoUrQ37U83PfvYzbrfvJ89Lpdnu39D0LfPiKZE5u67nePjAq6cvcAO0bUv9cKCzKXomiLkn5oJf/PxPkGLB2fKSU7Plw8fvCBRkqSbEO6w9EUjoakOMCYIEOzi++for5suK7fYD49BhsmnIst3e4J0gS2c0TeTlq+e8fvcr9vs9/eBQCi6elBPRc3Ac6yOHXUc102RpTp6vMLrg/YfXpJmkKCa8+/FYc+y+Js8ThIi09dS4Dr1ASY8yiiKVuFiC6uhdzeGw4/WHPbc3B/74F7/g6skFdf3ANPva0rbQu57Tfse/+OKC7cORm/eB/VHziz96wfG7bym1xkloe8fH+xu0umB9dkFRpty8/46Hh/d0Q4NILfMLy+32azCfInXK1ZPnrFYld/ffcHt3IE/mzMoFZ2eLaesWHM4PnE57Ls/PeP1+x+lUUVUZSkmO/QNNcyKGlMTkXDwN/O1f/S3/7I//hIvzC5aLSJL2vHs9TnEkVeDYfsPXXz/w6WfP8S7Q9Q2r2QsWy4rRt4z9PQ/NRy4+e8blqzPazcDm4UC5XqEuA4f7hrq3eKUIqeezn/+CQ/vXHDc7umZE+8hwbCfaJIaxTnA47jb3zGaO5XJNCPDsas35vERIzU54BttPSrDYE8SGU39H+PACokEbNQ2PY8RIzWJegrDcfDhweXnJp59/yof3JTc397T98Z/sa34vPFf/8//6v/xPz169QDAdnN5vESHBDZ72GGj3gifPNPXJsd+caOuaIgdjLDFukaInTTWff/YJQhiC87jR4V3kyXWJJmM2z9Facf+hI/jA2WoxeR4qRyIEaR7ZbluaxlEVJefnSzABoXukaZjNw2MAZUGMBX2vub/d8wdfXDGODcFbrq7PaAbPYDt629EOLYd6P90AhECbaQOlE8Hl5RqdGNI85+LqHKUL7u4+opSkzDMiLQ/3B0BOhm3tOe5Hxl6wP9wyDEeyNKdrPeU8JU1zhEzwylOcJbjoGAdPf3IcH2qMKnFO0o2BbT1wu9vSO0eiC1JV0Z8sibQ8fzHj+klFnmlKPePly4Ks8Ag14O3I4Cznq3Lq3qMjS8558/rAJy/OOVutiL7Eip712ZJqliOJbB4e2NkdfbcnOI8UGhs8MuakJoMQJ6pPljCMdjIsx8DQd2SZnqZA1uH7Cbu+2XW0tkFknurcsN9PCG+dZ6jU0A0D61WG1gJCgh1StMk4P8841Sf6sZukdEOGF540STEiR8U5SRLp+0gqZ8yLM3Qi+LD9yHJ2xSw9J/aa+Tpjqc8Yh47j6YHN7g4Zc/J8gdSGdujox4lOuWkNdZwjq3P6kKD1S6yrIBouzuakyRzvDVlasFzOUclkdvU2MA4Wbx2JSSjKApNqfAhsNnt62xCixrmAHUfyTHF1/nxCaTOFWYpE0HXjD6TLCXoBWZZO2HiryNWMNpw4HluO25rTrkHGgtG1HOsdTXfk+vya//F/+O9ZzyXReuq9oMo1p/uEVEZmmUHHGUPXoZNx0vFrzdAe6evIcr6izFNktEifUaXnzMspjLvpGkYvCEpwGE48NFtGFQmZpBUjowx4qejbdlpDq4jHMQwtq9kZicyp8pJ5WYDvUI+UPK1S0mQi6H37X9/w5LsVF2/mhOhp2gE3GLrOM7SRyr3E+xzBRNUTMiBEJEb3A8bduSn/Sz52XjHGHwooIZk20MrhfOTOfklvLdYGgnWI4CmzHN9Cs52kw8NwYrc90dYDRqZcXaU8vV7x6vk1y/mSj2/2bN7f8+u/+w0f3t5wONZsmo7qbElW5mRZRpFkFFKhEkmSSrLcUJXZtK2Kftqkes+sXPDJq89wvkaZntV5wmxpmM1TlquSxbJkvkxRKkyyxN7xcH/iq998w4ebG9rWQjRkyYzLi0uurpecXeacXVYs5pd8/eY7Out4+/GOX//2GyI5OklZnFXoUrJpN9w+fCBfLuiGgfvbW+7eveX8+YJqVZHnBUVS8NM//Qn/8B//Bnc8MUsNz57M+bN/8Tl22GBoUG5gmc24uFoggpsQ616QmzP6g+O4O2Ct4+zimvXFkg83H9nujmhp+NM//uc431IUFbNZiUkl5+cvyMw5Q9PjeksqE+63d3Rdx36/5Zvvvubf/flf4zCUKsNEyJRhGLe4rsJ7Sd+37A43FEU1TTWTZPKeDAOH5p662TIMA8ElE5CkPdK2lhAkq/UCkwYe7nYoWXB2ds3ZxZovf/PraYtvJv+l83Bx8QKTZngBp+ZEPuuxfeSw7dg9nBjdnuMpsFgtQQjuP95ycfWc1WrJbF4ghOfdmzfc3z2Q5xUxQn08sfmwZba6RhhJURacr86JAobgOB573nz3jn/3f/5bHg73PL18xaxckuQFWZXRdnu2mw98+PAdb95+xc3Nl9xoh5079rbm3/4f/5H/8r/6NxRrAanHxsBwjFRiBb6nHQ7ct3t+891viX3CaV/TtTVG5xz3NzT9jhhKdLxiuci43zbUbZz8sko/RglMG+p/9BhO1zrBlAUHk/LCWkueGV69vODyTJFngmo9EPWe1999YBiPBO/wNjJYS5i9R3WXJColSVLSLGfoHff3B2KcqKDRx+nkF0zQmygeMzbFDwh2ISZZX1XlLJYF5ZlEXbzj6vmC8yeviDFlt+24fv4Fu+ZbuvEjeZ7yxY/+C7abHf/vT7/iul7wh/cv8Dbh1NygVUGWT9mOg/U07Ug1TzEG3r57zXx2zfnZeho4NwdcE1ieP6Hp9ujE8vTFOeVMo5OOpmvpOoFRz/jT/+zPyNKUw/HEZrPnyfOSf/jyLwA5SZ6V4N37LeuzZ3jnaZoWlOLJxQvazjErzzlbX9APb/ExpSxyjFKIMXLYH9gea+6bB/Zuw5g2bA4nyiyl2R149/Ubbt5umFUZN7evGV3H8xevuN++5+rJBdUsQ+uE3dbx7OkzlotztFIM45G63lEWJTrxKO3o+p44rpnPV6S5RhmBknPqbkvfR0CjlUJqxce7G47HGuccUgt+/MUzhOrp+p7DvmW3sfzkJz9nVq1RMseOkiydMbo7hq7Hj4HcpMzKFSptcaGjHy3RJXz19S9Zny+wsWe7v+Py6op/9Z//KcHB6XBCaWj7mvv7D+yOe5RO+Vd/9q9JJLy7eWDfSpxeMgrPze41KplBTDk8PEA48OnTT/n85Qu+ePmcn7z6EctC41FkRcF8VTK6hrvNN5PtRFcwlFTFEhkdjI/3/5ljt7HMZnOuLp7y7PpT3NDw4skrkgT6sWF/qsnKyIePDwQgLTWj32Ndw9grutbjrONX/7DjF7/4KTI5UXdbhj7j6tniUZ47SRHrgyeTAe1SirSkWFdsPtyxe1DcH1tiJvjnv/ic1x8/8u1vX9PaHWQth6bFmDl9d2TsO47bmjK5pG73rNcLrp8uma8dbz/e8uMf/Zyz8wVRdNx/PLHWr1ivlphU4WNg6D3dcCTNcrJsxtBZBCluTBAiJcsKLtbnPHv2BKMiduhwtuf248jmoWbzcMAFz9MXz/jf/7c///32XAXrsKcDWZaQJis6n1EW8jEzCFzUvP12RwiGdTFDLzX92BJDJDNLpJh8QMJL0kQgKhh0YGg9dd2RZBlKpEhSrq8rgl9xdinIlyDTklr1nI41F6sUoQ3lTAENSnpEKgDNcas49h+ZLWeY1BCVZLlOMdmJRBsGK+maHeVyRlM7UqUgRnYPjhjBW4ezEecUy2XJbn9LQJKXM0SsOB07yqrA+ZG7uw+MY0+UR8YxxSQGrQV9J/FuS1lJpMzY7zsCbpJfoXDOcre/Z311QT96iJAaTVIKuq4DqREqpShzskzg7UCiExKT4dyCpBqZnRmsixxPjp//WGA7TfHkCU8kDH3DrrbokEw5NkLx9OmnJMUZywoOhz37mw2La3CxJraCvrG0dcviYokOMPaRfrTM8pxoFEnCI0o/0hwOkzTOKNIsRelzum6HYiBKg5SSuvMM/ki5LJHR0LWC92/f8AdfvKKpT3gv+fzVT/jw+ltevfoEJRR13XIYRk7bEUlCcJ6Hj0cWL66mZHTjyTPNOp/jOkmez1jM1yzKktv7N1zOLV988hNcP/J3f/nXXD69ZJ4bZudzxqTn452jPnbE0DAMHW3XEKMlUytUKVAqYs2R497Qhx1D19HUPX3vWZxfYR3YvqFudpM/Z3Qk2kCc8jbSRLHbNhNaG1BKsyjO8GEKSLQ2RWvHYXecboJRYEeLGyy5qej7Hjv0zBdzylnK9uEAQVBmMxKVTdCKoiOkkTAaorf4PvKHP/oxn3/ximdP58zPvuG2uSF3nifzgsU6Zbk8QJgx9pH94UDT9yiRUeWSLPP4vqSvPX17JE2nwULX13R9y8xXFEVCkTuOpwO6UEQ9TZnf3d5TJhUxjBAcSkSqecZmu5s8KxIWixluGPCDYzGfMZ/NOZ5GvIuUWUFETrkZXU2MgbbteNge6Nqe4DVGK6Ic0fS44CbkOTwS9qZ8rYic5JMxPAIVEry1BEDEKUA0CEmMk8QwiMDXw19x7A94L9Fy8v0VOqE9Wup9z3HfkWU9RZkwryoyk5IaxdXlEq0iSitGN5JmPcX6gmqWo5WkyFLSLEMbjU4VaZaRZiVt17BSntW6QkqFUhqlNUM/0PcDQ9fTdgf+/C//LxaLiqLMyaNhdJP3LXgYB4jBY0xKlpVEH7H0rNbVdK2L0LY9fTvFOLStmhpKKXB9S98KfvvbhykvLQa0aUnzDCl7iI7oBKv8jNc3X5GknjKNXM4WjGZG1zVkJrBIFZvdHS+fX9GfakIQSLPkw80Dn3z6DKymrz3ffPsWZwzPnl1xPrQcjie8E9x3DUoWlLMFLz79MZvDw0TNjAodSt5/d+CrN//A9asF81VGdJK///rvWJhPePvVBwiWf/4vn3Nzc5x8vgL8GLh/bflv/rv/lpkeODx85Ms3/4lee6Lz6CwhyJGmsxAiFkvnHTJ68uqMxDmsbxnGgWHcULeKfjhwcfac9fIpWpd8vP0KpVZ4H9kd3uHJqWYFzg/QS9KkgrCh7bY4ZzE6YV49wfUB50ekSVhfXLBel3x8v6dK58jck6Wa69U5h3rDEAURQTFXDM17Ng8dSs8I0eCaIw+7ljJPKLOMIi1pukAk4axc8/L8FX/80z/k7c0NNx8/8rC5o6hSVrM17TFQlAvyPGD7hjH2lOmKMqzJkxn/+o9+zHk+YKjou0jXdgz7PVfPX5CoFcaPxG5AloE0hXqoOfZH4m3Kz1+9YFZGtjclmzfTFhqR/OCzEnICG4gov6ejP6LXHzOoHn1ZEf+Y4ReQOEI8sdlHyhc7uuSBvtlA2tF3nrHf451HkFLMluhhjxQCGae/P1/NMUI9+qw0UvofaISTH/N3ipr4vWdzIghOclEJouFYvyfGa4peYAdLXd/y9l3PEL8muEC0mo3+yKtPLklSPXlrasfVxROqJOCdwDlHU1t0WpCmasp4M4anT56yXC6pSugaz2Ez8vbdl5i5RuuUWXXJevGMd+/+mncf7ggEZrOK88sZu90GHwKWgNIpwRo+ef4TVvMLjElpRzi1HSq/pz0dGMeWYlYw+A3Oj9SnAT+02OCZV2cgA6f2jt32iMnySTqNJkiDwHH1NOXm3TuiH8lyQSklIQw0zZF+FzjVksVyyf3mPeARKNbrGZuHW1arK6wbGOyeU7MhfLD0w5HRdXhvuD9usF6hNCiTYozj7LxE7Qf2O0tdjyS5ou8i8/KS+XzObFHw26+/5eWLa7JkxEqLyTT18Ug5Swki0o0Hos24PvsD9rsdSnquruZ8/HCCUJAYiZQaayfUv/MjxkTWZwXz+Zy//0+/Ik0zyqLCmBwvPBdPzyYpoc4Z/chpMxJNyfq8Yv1U8e3bvyLPl1gfiG4gioG8OEdmhrvjN3zYNAync66fXqEGh+0dwxC4Xn6BdhlCawgjIzUiVqh0NklPdcZ6nfDLX31L253x4vlLXn3ylP/wlwdseWK3v+VY10RVTd7mXBHFQDvskM6xWq+4v/nI8XRgvbpCZwlvPrxlsdYsz1Z8+/Ud7jhQljNWqzl5mrB92OHcFuVSlFlTiXMemi0yGp5/ek1awm+/+QatUp48vcD6E84PrPSKt1/+BYfjAe8VZX5OdbUgzi8YG8vm0JBlkll6SZUmU1YbGVdnkVdXl0Q5MNiezGhWq5zT8JbNdk8MORfnT2jbE31fUyXPuDp/SV498Jf//tcsljlpZgCFGwXt8B5lAmWVkVfhn+xrfi+aK+8cQ92jvEA4DV4gsxSBwYtJRlUka7QxKCOI0uOCQEpFqtNJcykVfe+w46Rd98GRZIqnL85wfcowTIbYqycz3Lggr0ZMKpCpJFaaIsnp/YiXFp15VMzp2/iDTC03hiIsca4hygGlUtwYEWQkiUZGQ5ZkHHxgHB9Dg6VGS40dRxRiglqMASkTgteMrifEI5sHg/MNWkXKbKLePdydEApMajAmmSAFylJUhrzUCBTBSdqhxzkByiGEJjUpiokKqDNFpiY62ak9EVVEG0Ga5NixpT11zGawXJYs12s6tyXGHj96pJ+eT+kUbRTKaIxSdLYj2pHCJCTJDJPkXF8Yjqd7unEkSSV2tEih8BaiSJitrrAelI5Ajx8d5AUojQ/T+zIGENKS60BAQZSkmcT5BOc9SmlMmqCUwPbgHAy9nwhuUeDHiCJFKk30kdXyAkFK3/U0pxpvA7aXRB2x1tM2HZvtBh8cwQmCE+iFQsqJtjUOA2OSImXKk7Mn6DhBSqKAiKe2B6KypLnCGIcRgq7xECKpngpzbQryRAMD7XiAoqSjwwsISjN4x3bXg5wuBkLIx0JgyhBCSpQKtO2AVOqHKanWEhHF5MUZLdH5ybMFyMdpLTGgjabIKrASHGRJjpaKRGcQFUZrZLQoodA6Q6gJepGnkacvrlmezbCi5+3dA+3wwO3tQN8pssww9po0TXBjRJvIbJay2UWa3QhOIlaaal7QPZKAgp+CH6tZ8ehL9PjQ422YPAGDRyZgpMCGiPAeGSIiyOn1SYmWhlRN0QuzvELESRakhEArQ5ZUHE8NSTrBJZT8x/yZrh9pmg47OpIkm8I15WMEgnvMh5pU6ZO/CoVUTPlWMUCMSAFRCmQQBCmmOu/x+I1xCgO14kB0U9abCIrTviORkug9WWZIzhfMKzEZ5JOCNEnIM02eLSgKSVbkRARJnjBbX9K3I4SA0Yoyn+HwBDw8yhedm4YrPAaBg3sM3xwBj0kUWTSM40gIE2U1TStmOnn0YkzmceuGKbDdTz4yIcEoPR13AExgHwh0nZv8IFLi/Ymhc0T/WNiKiGOSH8fQIQgYkWP7A1XoQXmUjJz2kXzXMLiOIs1ZV5a62dK1DdFNfjzXH9nsN8zKkhgEw+AJJkGXCVWZU4U5ZTlDYDlfrxitRyc56/WMsgpcnGUTdW0IGD1g8pEu3BN6iRoVfedoZWTbPKDR7E4j3ksOp544WNwweZBevfiMs7njtXJ8+eU/4FRB3x8wPsOkhkKv0IPBBRB++hyaoSfLkgnq4kGJFGsdRleAmTLHXMesXCJVhhDTtWzzcIsMJSEOQJiOmzSnrVukmoKqAZRcopOB4AXRJnR1xWxuH5tsQd1NIA0p3eQPCYEqr5DCs9vsSAtIiznNUOO7kdZImjShLEsc4F0gkwaZV2TVmui3NPURnUSicqgo0DpBm2KiclZLTr4neEWq55RnKa9+/AqZZgRR4hjxoqfIM3SuOHY1bdfhxkAmcxSWWTEDJMf9W+62krPVGqGnnETrwuR5juIxzup3vE/id/dWj3I84qM8cAoPl3Ia7vjQ0XaB0H2gTCOJyujbevLhJYIxRPq+oWCGlhLvRsYRonB0vf/hWvBDUPf3j0dvZ4zhh9Di76miCMFoLeNoIUrWi3OCU3TtHqMkZ+ucXbdhDD1ZsoKQcvfwhpefXAECIT1KDSjTsd91pFlGCBFrHfOlYF7lZElO8JBnK5JE4y0omTKfzVjMl/RDQxIEwzDStR3BCS7Wn7I+O+fy8glPrp/xD7/6ew7HjhhHlAnU9YkimWNMidQCxkBZllyeP6dJC+p6j4+eh+0epRVaTD417xPSTND1J/puwEfITUbEY8SaAHT9HUYpgosoachMhhYF282Brh9wUTAMLao3P2QMSgFZLjnu7SM92WOMQppp455lJdJquq5D68kPk2ZT/XKs7yiOBbd3H+lbS/CSdj9AmDz8IioSnRP895uYaQu9mq8wCTg/EpyfvK44iIoyrxCMjH2PwGEHyFRFYnJGdswXGXboSVLFaramzA3H/cD1kyVFOUcpwfnqHEGDGT3ew/3dLf1JoPIZXjnuj2852QaTnDHG6Z6otcQH9ei30ow2cvdwxxAa3P7IWDu8y/n8x3+KUiVCKrRJMUnOqd3j4glDgZaaU+soqzXBabp+kpjPliXH+kA/WpAKbTRt1zPafpLNO4EMA5dnz+nbgBsjLnSoJFL3LbJOgTjZYNLZpAIJDpAk6UielmRRgxs53N4SRMLsbEYQHf2xoTu2XF8/4TBOdZjEcTGrMHnPUFt6a5F4og8UeUroR+zoiKLgk2efkCcJg/XEkFJE4v/ZAAAgAElEQVRkAp1GopSMHsZ+oO8GkBMRUzItF5J5juA4eQ/7kbbfURTmkV81yV1H14D3mMQgUXx890/LAn8vmqsYpw9oYMIdK+VxQ4kxKSJaoh+ZlefoTNKNk6wrykCSpWiRoYQixsBuf5oKdRtAwKwyfPqjz9jeRm4+3DCOHU9XBX2rSNJsuhCMkcRAVRac7JYhOmLiqMQZ3SbivcDjWVQpRVFxHN8gzWTIO91bulYgZCTJNEYtaE8PNI0l1ZJgNBLFaHuUmTwW3jsIEnyKH1uG4cQ4OKLoKfOMWVqRZyXeahKdkmUZWhmci2S5Yr5IkWoqPspK0497vAtoGUmMZDGbQ4hkSUJmMlJdMOKRQ400niSNpImmOfacmnEKmk0kF9c59xs4HlrCGEhUzjBYiAnBjqgIqcrQyjKMDXmWMatmBB8xKnA41dR9T1YY+tGRGDPJppShnC857XYoPRlN0yQiVUo7OHQUhCgYPJgqEI0j2Ei0Eq0jWVbR1EeimN5jhCQ4jR8kvfXEzlKZgr62mKJCq4Tdbsv15TXORuqm49TUE0nOpzjClC0W4Pb2gbJI6FpH11oMkXm+ou9bYnzUywvJoig47bYcT0eqeYXSgrrfYLRESU+eBTKd4PqIDAGpI1EJUJIqN3gfqMdIUgl8F5BGk5cGFyLb3UhRaPLEkCQTLVBG93hSC5SSNHVHUVYIOdElhYi4MTD2/XRR8SCTDGMkQk5h24iIkoosy4g2Er2HKBmGgE6yCdWgPMENIARG5sgITlrSzHH5bEnrRr7+cMPx8IZXFxm37zOUSlitI13jmc8zxnEkxshsllMYw/7Y4EeNlIblS8WsEjgr0EqTJBlFbhBqxDnLOFi62iKlwY0TkMMkKYnR5FoigkZGjVYZIvZkpsCoHG00ZVJghwZjpi2TtREhU3zocDaAjkipUTIBwFrPOHq0EmjtJ1yyiggFKiRIqZkKNjdh0yVoCdEHog+PZdTksZDikQIWfhiaE+JA7zrwA8JJBBNQ5rhvKExGngfKZU6eVqxncirSpJqK8zxDjCWzec5ytSLJEhbnc1SSTbSrx+BInRgkmtGPOB9w/cA4DLhxCqEOfsqwsnZ8bJAm+VSWZSTJlInlfURKQ1HMSJOUcRxpu5a6ZiKkOvsDICgC3k/nrNYKLSQSGKwjuICPMDoLLpLoZAJ2CBBaYceBGAJKCIRUNEONcZ6h9wxdYOtGTDLiVaDKC3ZFhxCOjw+3pLpC6pRBHDnua9pOI8REucyXc2IWELokVYb5YkWadghSxjEy9g7XW86vlpTLFzSnI/vNnrYPXMiCnf1I0wxIFHaIHP1I60YyNWN7OKHQU25aM+AHS5LmLM+XnF9IHrbv8FFhQkkf7vDj1NQX+QxpA+M4DT1ikOz7mrHIaPsWqRVFOWPoWspyRXCapm4hep4/f4b1DucsdvTUTceieI4PRwRTEZFlKc2pJckEUkOMDqUMaZZO2PDoOW4CV09TqtlELBw9WNuijMd2J3rrWczPcCJhtz1RWMMySel9z9Cd6IKnTRW9L8nKir5xJFqhjKCNOc2hIzhLTCSeQDc2LGfnaDPFlpSFZuwTmn2DUIZsVjG7PsPLDOegjwFSwVl1STBw2B44nY5kGCq9BtGzKBZkMmdz8xve30KaL7HBEBATuMlN8JgJJjHtqYSUv1NIPJ6MMUwhw9OOefKbyglg5UPPGHuG4weyYk1WVNhuIMiGrFwgXKRzLUN7xKgOrJjIj8JSt49RJUzS48nkJX+gF4rvoVqPCPbvpcNCCIZ+mAarImO9eMrpeKQftpgiZ7Uq2TQPBJeg0zlCpBxPt/TdegogTgR5EYhix257ZLWegBdZmpMmjjLXJMZgoyRRJYKIs2pSEVRLnj/7MR0tSqaIKPDOcba85uLikmfPPuXi4pK8gF//6jcctrdkuWO+gGNb07qUaibQcnof57MFnzz/guPploeHD2y3NU3dU1U5RZqhZUa/KzFJpOlGfIAsn00DZQaMWhNCSmMb2toyr9YkSpIYie01h/0RL5ngYwkMw0iRJ5PvG0GeGjqdYAeLTiHPC5LWkGYJRueMdiDGB8g8SjMV0CZwv73n5lZyd7NHS01R5Bz2D6zXz3DW0TYdVVmSJSVjHxBRUeQFF+crkAPvbw4Mg0PqjCgP1M2R/DGf8uFuT4yC6NUU5xFyojiyWGYMzYAWBWWxIJGQ6pJZuaYs5wxjw7ycMw4WxRRv8XB3i4hrqrKkcxu2H98TRErXQxA9Wk4ePi8so4M0KZE6MrqP7HYdh9uP9PWAFDPOLv+AZK6m4bzM0UnG/u5rAg1VtkJHzWbfMV+d0zUNbX/iw02kmkl2Bw/60bMvFU3TTWRrMwXMj2OPSXLOLtZ0bUfftRNsJvRstxqiQhvBfD6jaQ6Mw4AikOeSLF+RoxmPNafdPWL5lGyZcnjYUO+3OMfj1rNl7HoKI5ldZqzOruhOBw70GOXoT0equSLXCpV6ktTw9PyKtqlp+4D3espK7BtMmkzbZqEmL3+VMV+UGFUhUczy1eS7jYJuONH2DedXK/punBr3wpC7A4mf4kmGXvL+7eGf7Gt+L5orbTKSfP1D6roIlroZKAsgCIZac9h8jaxyet9hw0iWzcjygqYVjEND0x74eL/l+skTymKJEoJxcGT6GUpt6HvYbXtma09b97x6+QJnHYfNA3kauTvWOC2JaTEBmd2aZ2eSF5cpUSSc2gGZS66zBbNVTl4V/Pv6Nb/98pfM5pAmhofhxKYbGVrHqDzWjCgmY1zwk2laKcNhX9N3DVKlKFlgO8/QaYpLxfFwYBxaXr5asbkfHwmDnhgiZxfTdGoil1mk8eT5Euf7KYMo0fij4e72HWmakqYFiWnZ7fY09cD5Zca80lS6xc4Cq/XnZHmG0HC/fU972iGEIM0NQk4Y4b4J2NiQZh3Xl5JyBl0T6NqIiI6qPPL63Te0QtDKQL0feHq5wMiEmo66HTneHfnDlym2VejZEp0VtKPjb/7DL1ksV0ih8S2kWaCOHQkC6WF3F7i6/pROngixZ7AS2xtcr2idRuuI0YHgNft9jRwsUUraY8cQB66vn2EWGYnLcMNHslzjrSYzK5LlnK+/uYPQgfSILhD7FvXcUBYrpFKMrmU2y3jz9Xccmz1Jpnny9Jx+CBjnCV0PMbDKViRZgaHgeNjStntmS0E7ePb7BqkEZ2cVh/07xkOBlBnSCNQSsmjQTpBIQ54Y9vs9zo2IYVo5j6NFKuj7GqnNVEz4CfUqBBRZipYpEolR8bGcmIQq97cHluUclUxN6Ou3B0xqyKpAkgS8gNSkxDAQ4myiEeoj724DX775W0gdycxxcSn4+qZldBbpoH2Aq+UTusZOjWoURCf4/5h7kx9dsvtM7zlTzN+Y08071FyiSIoiWxS7DbfRdsMCvPE/6IWXtpfetmHDsC11S92wpBYpklVFVt2qulNO3xzzmbyILFLthQAvDCiAXGQuMhKJL078hvd9XpMsmS8U3mtEl+COgnleosoEYzISk03Y/WFHnpf4NEA4MLYC4Q3GgZEBLRypgpgo8rKgmi+4fXWPipClGqUldrAkuiRLC0Zr2Tyc8FgWs6fUpx0xtKTpZLpGiN81GbNZyql+IK1KBjtStx1WiIlAGB+pTv473LOc8k3C5NuI4TtE+/Si98RJNqQ1J/+Gt7u/YrdpcL7HORDRUKU5Est6Pp8gOjJFjAKVRIqZJC9zsnRO3yQU8wUyMQitWV885TeffYO1EWst/dBxrG8xWYJUEYlARUViFCITmDDJjwieqiiIjxEV1nm8Y8oL9O5R2tew2Rx5+vwc7y2jHaZ8vdGjJDjnGQfHGC0m1YxunOxuUiKCIMlyhr7DWctZVSKMYvTh0QPlGRtLluRg1OST0wnaDmyPJ0RMMNGQMnJoD8gkxwhHLzt0VeIpaJ1ARE8wir4vOFstyUuDVOA7R2O37PqeIleclSX5uuDu3YaMBN92vPzlr/jhTz4lJgXras2qnPPtuztSt6Q9NozWkGjBu9cnumaDbT3rCp51W6Qdma0XrK7WiChoT4ExdPz13/ySz/7vX/Hw9cAHn2Ssn15y6B3Hpuf4sOPp+QV2NIQoQBqSmeLLz97QNZ6syDi7mvK2Xrx/yXKRo2Tg9u2BZ1eSpt8QiWRJyidXn/Ls6ifc3HxO3x/RWtPaO9aLNTb2uDhQFjnd+BJ7WpEkCXmpyPI9Wl6xPW5xtsP1gmfPrjgNB+ZuyTwvebL8gM9en9gfaup2oOun4PG7hz2pKalihaxSZosVvb2jcTeM+zcM2wYlzykSi8kq8vkZz6+fok5hAruIkbbbcLrtyReCY/2a0wnuDzcskxkP97eMiaA4X3O5/j6fbz/DtyMmBHQxYosj2lTIaDHxSBEDUs7obEBIw3yxpjCCyHeI8ynGQQr1+4gEpu+lAhfChC9/XCE5FzDpVBRGMRKefI4d7rndnkibivXqgtu7nm1/hOgI9PzdL/6S66d7VvE/o/Tv4XxPNgNlBP6RohGZNtyPJA2+w79/11hNMAsegSaSoiopZwV32wmaIGxPbY+M9y0hCqr5M6y1EGvOr1acji3eT9ui1WpB3W65vJrjY8tyNeO955/w2Rd/jrQF6ZOCvCjISkdbe548TTntLLsHydnyj3nxSTW9L8qK9dkT5tU1QtoJGmI9TX2DiJL6cEei56zmz5gvEv7Dv/81ozDMZ/NpqNcrLldLfHzD9tCQJQuurz/k4eaePJ1xfn7B7nhCuIyL9RVI8PHEu9dbloslUnqg5zytuL/b889/+jOCr3n37jd8s3nNbLUGsULplKzQFMWMzcMNRhbMihmr6gI/1Iy2x2Rhyt8LhmGcIkOMkazXOdafOB06Hh6OCOPJSsn9w46Ls0/I05wYR06nhhcvzrm72bI/bKnrlsVyxuXlGakuUCSTd9VZTvtA0zqk9Az+QHB3ZEYRrOB0iGilePbhgm7ccv+wQ+iSi4tAHCewWRt6eiX58R/9IabQ2DhSD0d+8/WXXF8+pyoq8mjph6/wvsYHjSFhri851Cc2xx1SgDGBvIgM7oQYR2obCN6yuDrn+cX3ED9q6YYN9WlDmm0nT1QXEb0naXb0Tc3QN6SXOWrlGW3gZF+S5Pkkg96/5n4X+OSTH3Fqd2z3Gw6HPSIW/OAHPyPNBKfmgV/8x9dsNzuWi5JRR471hiBSEhMY+4iIisW84lQ/oMggaERMeHrxnPv7AZUVJGcV15cL9t2eu/sDQdTkK4sbYfPwirMzie8qGCSZkuxeQ66eYNYtQQ083P2WUv4R6zJFLh09it3uFTZI+hAZRUCFlDfv7gg24fryCT/76c9o/vL/IJutiFHircaPGW2/wceOsipZn+cs1z/l81+9oxtaYh9BZERfTUMG5/DWkqflP97X/P/UL/1/upQQnLY14+imrYyc5GVNn01EP6NZnr+P0YpjXXM47dnvNmRZxunYMo4dIViW6wWHpiWEDomkbSX/3X//P1GtDDrxOCx//u8seZbz7r6ePAPWocXIJx9/gA6CxCUsmXN3+8CP/+R9rGs4ng6kleDy+oIvv37J/u6B096gkjsSM0OGgBaRrArklOxxHOLATjTT1KnNENIQxCSV0YeEJK0I0RHjiEkGVGI4dAOtHciThFmWUaoSoyXt0HJqD1xfXdJ2I10cGbzFjpb5fE7fpyxWc7I84e3tF1y+uCTJPNiAPXbgDiwXFyzKlOXMcHGRoB4+oo4jeRKptOawj/SDw/eaLNMszlPm65SLJ0/Y7be0TcfD1jHYmv1hZDYDlSkG1XLzsKMbe4RKyZMZb+5viXICimiZcL4ybNs980xSrD0667n55YH//Mc/5JTU1MOe/FRzOXufL28f8FZRZQmffmTAHTDJJdum5tX9gbZWGGVZriLFImVWpuzvDjx5esbD7sT22CATQXiV4NpXzGYFq/mKMnvO6fiSKpWMQdH2mk8/uGZ3uJ/8aDQUVLx9XfP0WcYwTInqT64vCHpkfTZHKUlfN9zdv+bJkyfs9x373REfNFoa8sogs8jQQburWZ9XFGlKdAHb9cyLNSHJGIaJbuZ2KcpBFCOYBJ2WlOYJle7p+pFubBhDg9IrsjTF28lYqYwhTxYQHTJqCIrd9gZlLHm6IEkTVnmC9OC6Dq0MZ4sZz54s2OzvyKsCFDjvMOmcYfcGa2+nF9NFyYffP6e1TMGEWcXV2QXKjSSJIc8Sisyg0I9yl4mydHP7lie5R8iStvUIISjzK4S2jKFDp5KyysnSks2DJ9KRSMWseM7QjRT5Cufco/G8oq09vd1TH46cjg8c6mYy5QpJiAIhAtuN52wVENIjhZsyc4Y9l2dnKKWwdqBvpjDvIpcUhabvFUJXZMWC8+GHZPYpPohJbuXl4/TbA4Hg1ZST5UeM1oTgJ7q6EIQgUSoSQ+Sz+7/i4L7BFC1CjczyGVIYnI0cdifysuDdbYP3h8ffociylIuLC7SOdO2W+XzBoatxzuK8QymFlNPGzTk/BZyHwNnigsurNavVjPki52w1Z/FY+IRo8WEiJ9oxMPSOphm43+wJYcqyOh4ahtHS9ZbPPvsaqTTGJBijp+YsRoxRFLOEnATvPcM4gWyccxOqvm4QQmK0obMB3w9keUae5cQY2XRb2tFPAJAoGIcOpSQiTBPqNFFIaXhRnrNaz9FG/Y7GeHFV0rU94zjlROFGtsd7+psR7zzzWcWzZ1f0wx7vN2z3O6QsOVusGdsBliU//Jd/go8ptc+wLkdKhS4E174hE5Gh1cRxyR//ZMbZhaQ+HaZg7mrJzesbqnxBvshBS8bhntu//Zr7+7+huf8akzpEMUPJkouZZa5P3DYn3n79lnJxiRWK8bHoDjYnLyImC/hw4ic//QMO24HjNuJHy+muYVcd8FKSaCYpVnfk88PPmS8Lzs+uKaXm7q4nfbrEOknbe3Z1w+nUszyb0w+W7balyku225eUi5SiqlicVwxRYvsLqsySaEnoWz58/jFnywVd6xgHAXLk4qNLxrFntI7hUBOfRiynaVBjUi5Xz6nSZ1Ocgq2JboPsUnxMkVrglWcMisvnKzZ3rxi7mixNeHpWcrv7CrMsEMLSHV/yb+++Zn6e4ZVDqYJZfs3x4SUdB0SWoqXh8sknIC9IE4mtA65X6GWKUAaUZVoZi0dCoESI76SAQGAidcYp7FSoKVMxm2Us5gXzKue2bcFm2KajY89oBCIoXOc4DR0nP/Di6l+yuf2KLD1SZB5FTte3COQ0UEHgRUTE3zdx//CSUj42WNPWe+wdQ2/p2yPb4e/49nPDP/vZj0kSzeZ2Qz4+5dTdg9Es5wXvnRf8h7/+OXbsSJOSNFnw8usvOTtTZCZlHAa+ffU10VWMYuCLr/4erUree/E95lc9u+ZzzHzFx08+5YNP/oi2NSBHtBYMSvLFu18hwozjYUPwHavVjHYAXUVaW3N3V/PD7/2MH/0QDoeW46FFyowie5/7/ZaXX+64vWu4vNLU20AiMvwQOWwPzJIZZfmCs4sZ3vfc3r4Fcc+X3/w9y/k1q+UFeamYVSm3d2/IizmzxfdZrAzeKbIsIUk8IjZ8+/mWAKzOFaPv+I+//Jz5aqoLQ4jEwaJVTpackZeGrjvw5t2WiCPaGcvVmixPqE+SH//RD/ntV7/guAtU2ZrvffIjhG1x7oRUnnKxQEnHzds7nAdjUl68eMabb7egYXWWUJWa+fwFx31DFJG7uwd+8/Xn/Ff/5b8iLSz9uyPBOi5WZzTHG2SmMekc0gUuNgwx0rcH9ocNX335muWy5NXLb1gtr7k4v+Tp0/fZHd7hoyHEDFRku7/l0/c/4Wyd0I8Nv/nNG4TqGE9foU2BVhnKJ9TVSCU0Y6N4uOt50K85Xz4lKxTDeOKb16/QeoHVA7/+7eeEX3/Jv/gXP6E57rhvX2N0TpWvuH17JMZfopRDCMkynyFkQn+0HLcNw9jw/U9/xsPtgWgtyuQsFtf8/O/+DilhsSipZiXdINm8uSMGRVmsmFVnbI8d67MrhHyg7juO+4G2CfTuyPpsSZLlbDeviSTc3r1m7CPRpmy2NdX6mmVlCKNgc3igOwm+Tb7i2qwppOLhYUPbDIwuUBYr5tUKH/d0dcv19QKRDPztr39B3WrOlkvGYGlpceLIcl3x5ee/wt1pZm+fUBYzLlYLfvaTj3F+4OtXr2g6wQcfP8UPlmbfkfrxH+1r/kk0VyFGTCUQDoQIKJ0RgcE7xjBN7u2hpUjNxKYXEaXKKZsmRmIUEFKMmVE3D/QCtNYIpUF49qfTY5C6wj+uVB8OG3KTU2UzpNT85jevKLOM5bxCCnAi8s3rG4ahx7qexVrysBVsHna0/Yg0mkDHex/PCUOBGwXj0DKMNS+enTHrRzanlsE6/OAwWiHwIAIXT65582aDyZk8O7lhtArh5BRgF0aa9kA0c6JKp42XTugHyWglHkeIHTH2HE8ty8WK4BtO2xMpmkIY+tYSUYiyIo0eLVd0ruFme+TgJNJEbg8NflYwO1uwWpzz7ddbqup8yozB89lvjxS5J8QR70aCPzLLl9jW4ozHtkfuXx2o5gnJIHFBYYNjV48UuSVLMrQ0HE4DTy/mBNWxOzaEQ2R1eT49uEVKmpfstKW3A5+sI08vJNdPJJfPJirTl1/0bO4cdtS8996cfFHy9r7lVCt8P0csjgzD5I1RMmJ7R0gfeLg5odw5izynp2XT3iNkRoiGcYycLVfUfTb5QxDo4gLnGw6nA873jH5gf9qSZ3OsDYzWguhRapJXeWuIviBJEspiTm/3WDeAgPbkybKWRZ5gEk0UirYZca5Gq4mk5P1IqXOW65y+8zzcbwGBGwbSzKCSEnsMEC1lfkbrJEMPxmjKIoGY/I4qqNSUT+P9hJWVfkDLFDt60iohLzQRhzECbSZJlw8O129Zrub4yaKDsoZFmrIsM7zNiS7HbsEHz+ZUTxP23FAUht3hlqYb6AdLP47M5gUYwWxZkOcF5TyjaS1dO9K2A7YPfPBehZEKJaZmlRiJJhD8SJ6lzKqM4/FI0x0JcQQRkUqwWpakpgQiSkqyPMXaDfNlNgU1R0dwksuLF4QYGV2PVFAWKUoJpIwYE9FGcWoD9WGPCedUmInC5iGGSSceH71FEU/00zZGGjMFL4fHvL04eeOklFjX07Y9apQIkWGJzCvDcp5xfjafzgoX6YeRcRhRWlPNZmhtHoOvAybRtF2LlJOvru3bieqoFVIqvIqMg6WpWx5kpO9a6qbEu0DTDBRFQpYn5HlGrhXeg7OBvLQgNV0/Fc/eB/qHjiSZPEKTuikwjiNpmjIMPc5bxnGSoE5YaYnATAG21j36Dqa8vrrtiXhG7yZfi5AkyURd1EpNWYKJmv6fMSKFn/xEImEYe7abKbcoAlma0/fttC0EfIDDoZnkySGSpgmLdYXJJCbPUFqjtYRQ4FHk8wWlEFg7Yn1H7yyHZtr+SwlVdU01u8Y7jx09VZHjw0iMU5hzkjqeP7/geOrZvd0zDpY0jsyUICwLTm3B3fHAze1XfPTBJ+TFCpmsSBqFlgfa0KGTlDzJCVHy4r1LjDaM1tK0LQ9vBMd9h7cjeIe0YhoMrSqqZEZRzlhezri/v2e4bfC6o0tzlFmCFdSHPftTTWc9rov0/cDp1LDb1aj1DDsGjoeGrvOkOtDaW6qqoshKICUKT+8H8pkiy1OCz6jmOd4fkSoyjJ7jacT17fSM6QrlC+7fDpQfarq6R5KhVcbXb19zeXFJqCePnlSCmBmCruiHhr7tkXpJB7z6zUtcHMgKRW4SurAgTyuyNEGEnkQUjN2Jh+N+Ol+KBaN/YNw2hIMi9jOOR4P3/rGDmjZBv5frPl7i8fmMk+cq4JAxTv7dJCNJEkIcOV+sJzBN3RL6Gh0afrO7Qak5Ml2wzBasFi9YfZrBmzV+D85a7OBRCrz/7naPPtjfYS2mn36nGHxcb02eEyJGJZR5ziI758ksYdjfUHtF1yuSZMMPX/wJUY0MruHupud7n3yfNP0VXXeia2vef/FTIg6lB5SMEEYg4bhvJvmudmw239LGE6M9MC8VB/+K//F/+CuWZx+RZxVplpKmBjcGEpNCkCghcK6nbTxXV1fYcU9vb3l78w3BKUwiSLMpYHaWJ7z86jUhaK6urilnisHes9vWNPscfVwyny357cu/ZXu4oCwqIophGEj1jFm1oJqVuNgy+JG2t/TDHutGLCMXT64JwaJVwCSKLnzLcnFJ2/fsjkf6TnN+sWK73ZBnhuVqwZOzFcq0NO093vdcXVxRFmv6LlLOE5QODOMtL7/aozkjySSJyajKJ3TdW6x3eOnJZynRtZOfLUT6duCbb2CWzThbKrq2Zn84kBUplj3tUSNFzo9+9CN8FLhxhlaaLOuJ9FSzivrU0A8t2kjOLw2f/epLpASTwov3zjkdTxTFfNqqYpnPSu7uNChDECMhnkhzT54atNRUWcXHH3xM8Gu6ekTqiJCBftcwyyPGCIo8Z726RGpNniUIPKmRPL1+wrF9S5XkVPkM20teffuO9fmCcQgomZLncz54/xrYTe8mHxFioGsjOr3HM+DjSBQVT59ecTo0DF1DFD0ff/Q+TTOQZqCNIMTJsz+OHUJZpLG0Y0fWB3rrOOxrbt+eeP7ecy4WT3A+0B4deXqGtS2lyamMRqsUYsbivKJraqLXnC8/JHuS8O72njdv7knTjLwoGcKOND8nL5ckWcnhVFMuEzrX4VvQSlOmKfW+Jik0mTb0bUfCgmV+Rc9IImFWaspUc9huGAeLIeWjD2acDgdwCSqmZMb9o33NP5HmKjwCGaZTy6SGfhxATOZt5x1tHYnRAJqprDFY5+n7nqGfUMFaT6Zza6dEdqU9SaoY20AM8vGsi1jnsKNFZIrCTDSh+tTirMdHj4s9iZlxv91hR48A0iKdgohVgUTSd5YxBGZhLO4AACAASURBVGbzSSvro8RHyfAYkhutRzuwQyRNJFoJrJ9IQ6aAIIfvsgmnczkItJST5EdMvqDBjdjOIaTAqAzvBcPg8CIQYyA4R5Gk5EmCdZ5uGJDGMIwdDgdKojRUqzn1g8O5ER8sojTE2DGMA10vqFuNiRIXJEFqHBo3eO42DVU1rdpFjHg7YuSAsyN9Fznse+5utiwvErRSROEZgyU8hsbGKAlegTcEF0mK6VAfBk82V3Sxp5AexWSeL2PN+0vH+jwyW1giHh8Uy4VgVUpmRcUf/GHFq4fIfRORgyIhnV6jzpInGhEyNvc1etVjdElwivrQczru2dcHikwhhcGHSGcnUpyUCik1QkliCGz3e0KwIEaSQWPHFkFAa49JPcPoOZ06xhGkzMiyqfBu24ZhGJBREp3CDg6rHUKLR7+LxLkwyc6YTLh27ABN8B47jszKOfumJjMJigyjJE48TN6uNEd4gw8DMYxIMUmltBGUZY5zkSRRSOlxwZIYiXlsCkJwCCnROkfryUdnR491HVU2R8uM4CRjE9jeNJgk4KxjHFsIkGeW08FinUOowHJdsdndYccp802bhCGxWO8QWhLx9GM9BdUy4Yj96KaK2TP9z5kaPiUkIYxELwkE+v6EUiNaCoQwSGlIskjwU7Cy1pOXaj4vMRrSVKNlirOTfHZ0NS5YtBbTy0U8NhEBfJxgD2W8QMWMKCHVhs7/PglUfOcdeqQETl8TnSx+hwZ79MRtu1cEOZJl6dREB0eeG/IiI08TpJL01qKNolAZWZZCFBij8X5qrJI0QSmJtePUoPzuawo1FmpCPMcIznuOx5a+H2janq4dqKqSxbxiNi8oq5xZVRKjwFpP17kpb6/psc5NjaEUpGaKJ7DOPQb2TgVijBHvPY4pNNkYM4EBHn1YQkx+ssh0xlo3SSid9wgxbfWyNH/0Xwmk1JhkooJOg7CpQNZCEPzA6CYKWGQKMm7adrq/94zW0TQ91vrJ82U03TAQ94E0FWSZACnoa8+YdJR5RIkJVOHoODXxEXozFcXaFFRVidKWwJEohymUNPGEIFBGsZjNKKqRd283DHXL06dLDvsjTdCg11RnFdipMbXB4KLBywqZerQMZGlKqlP63lHMU4wxWJuR65yEjFmRTVsjF4hekaYFowUbNCKdUS4u2d6f6E4No4ww06gA0VmGZqSrG05tT33yIHu6zjF20BwtwYcJwNAHnJE4IRCVomkHxjFyeXHFUB8wmZgItKJguVyx2bVToxo9UkZEzNEiImMOLsUP0+a9rwe0VoREcLvdMl89wcQEiQc/0p0Ggpd0h4Fge+ZpRaJyqmRNO5wYuwERNUViEFHjreM07LGdx/WC0U6hvyYLCBWxp5TYahQW7wTeTRJk8V0COv8gY0p8B1SZ5NTx8bMcHxHpxkhMMlEGZ/Mz7ODoxz12jGRGYfSewSlSY8iShNGdUC6ggoQ4DdOMnrbJgmljLR6HAvCIYv/u7Pg9P/A/kQgGP2HAF8trcgKH7T197wgxwwtDZnJ8jPRWUJ8C10/P0UpPjd04kJfnRDSRHu8swsNitmJzfEDJkRBq7h++ZhYE2+0958uSRXXG3c0d33y756OPPiIEwbu3d2RpxUcfn082AycwenpHL1Yz6nik7rb4WNN2LZGITARCOka3pWtHdBpRRjCOgWox53A64dxAlANJ7jm8ezv5A4Unyw0xBGbVDGMSrHPU/QkfI23fo5nyBBOTUZaG3a6hax1pknJ2tiCGCWYykU17YpAYrabBExKVJDT9PaPrERLyPJ8AWEagRIKUnixX7A4debbApBl5VpBlFfebltGNODxN36CDQIspltGHSN9ZrlYpiRkJSYFLE/qho64Hok8wSYpJAnVzosg1aVoghKHrHzBpitaKiMe6E02b0g8nvHNkLmG9nrNaLxHRkaYJxhi0SiZCqNKMExGMLBegHTrJUSohjxYRVigxReTE2OEbS4wW70EISZkv0IlCyGEatMdIWZQ0g2BeXiBnGUPneffuniTNKIqccYgcjycWZcU4qsnrLMD19SM8qUNokEpRN3tUMQ3+Jl9Wj3cJQmik8iA8dgwkSUJRZhid4vxAxHL/cGIxX0E0KKUZhpo0M8RgJgVQmbF5OJCYqa7UShFESmqglxFlDItyQbVM2OyOnJqOfgyoJCUvFsyqc9I0BxHQJqOqEvrOIYgUeYa6Uti2J8k1Hoc4WPqmZ1Ve4LPJn7mcZUg8w2DxI6SypEhSBuEIRJCeqvjH+5p/Es1VxBMZHg2gAiEFzvYkmQI5UeHG0TNmEw0tRI13gXFsOZ5qxs4jRYJJ9TSRteBcRKhAphVZkuGcwPtpQjNacKNkFJ4+rXE2oHTC4CP2eOTYjFxfvUBYR3QSScJpn5CnOevVM0zSstlv2W06Qmin4sQrpPKMIvLN63sm+G1EelhdlrjoEaNmsAk9W5KymybSQRK6acpsUo2SU8GMnJDcY9+TJhnzcjFpnt0w5bFGjQiGq6fXiDFlDB1OemImOPTvyMsMoUas96yXa+5+e0siAkVqqGYF93duIrP1A/d3UzEilKLuBqT1OGEZ7EDq0ymwEQ0ejvUWR0/dCk5t5NQ8+ibynIjEOU8qPFo7xsEivWdVzqibI9VigUhSoo90dUdnHFYHpHfo0fGsPHK9UjQ4vr21dF9ZGCXPny959nzO06cVs3PDL/7yWz5/pRBR8WQ28nDcM5+XLKqCyiQc3/QUaWQ1vyaMCTc3J+5OL0nKgSLVGJMTguB+t8WOE44+S1KiPxGiZLvd4YOdpnVFye3mljxXVFVCaRL2x4Gu3SNFSaIzTAJtXXN8BKroxKBlRvSSvh3xOqK0mFDCGI6nHjt2FEXK9mEL9CiZUpUZ6+UZu+12KlSVIUtzGvuAdSdm1QXzYsnt/S1DX6NViVKaJFGkekbXBmbzKa+oaSRReObzgn7sadqeslygTYVRCW5scMOJobfU6XEi2tmE7tRzd7dHGo2PERcCPkTW5wmaCmstg60ZxcjYRxJVkucZWZrQ9Q+MQz8VA3bk2IQpXiGdoDNaaOxgHyERHqE1UuegJI6eYZiepbHvWawLlEyJ0RCiRoqBfV2TpPlEBh0s8/kMO3YYpciKDCkUb9+9woaGNDPMl2f4KIkIvJcMfaQfG6wdWYmPybggAFomEPrHBmo6k4RWUzc27VAevVcTcWzKr4nYOPDV4a+RJnC2XDCbJ7R1w2KxgMf4ha617E8H0jQjy3PyPMOOnq5tiUwb9jwvHn1d6nc1Y5qmGJ0wWjttyJTGpBO5b3SOwTqabuT+YcdivmSxmDGrCrI84ezsDCHE1IDVLX3f87Dd4Nw0GBFCTPdNc4ZhZBim7c449r8DqXyX4eX9lGg/0Z/k42Ztkjv5x6JHSYl8dPoFYBAO89gYKiWQ4pESSUCKabAQvWcYBozRSKkJHrbbPUhP33c0TUfddNNLOQjwkq63vHlzi5SCPJtRVgl5KWlOLXnpSY1CIOi7gBc1zUkQrJjQ8r5BmxmrlUdrj/MHtNacr15MPjsAVZDPC56+9wRpJAjPR3/4MT//d79iED0xW1Et1zxZJtzc7DmeOqwXtH1EipzVoqBIEzRw8keikKQmYIoUfbYiCIlK4XjU7DYN25uO9dUlr+7eUfcdwjnOEASdYWOO9IroS06bB/IkEgZN6BXH+4ZtHahriZIJkhn7bU2a+akQlQ4RO9ar56TGcHP3ihAPPHv+IaOzdKOmSAVpqkAa9geLUtNAr+sd1WKFl4roNQFBnha45kQcHC5EbPCcmoG+nzFbrjBypG1uOO06VKrw+xHbNiQXPVm25L1P/5SH/Yav3r5kGCWzaoUg0nY9zalBeIGhQOkMbSJCeRJtCM0HxGGBNGIi4PlAjIp/6G16bGl+/+kTj5JBYNqWTvJlnQbSLJKYhLRcIRgY0x5hO4q84vkFfHuzIbqItILt9jO6I1wnz1goAUEzm2u0giGGxwGL5HdBwt9dQvx+a/UdEj5OW/GuH+k6z/zFNUN7wMkTyIaUAT+s2ezvp63f0BHldzld032cj2z2r5nNljjf4EaH8gXvvbhkCNN5O/Q1h0ONSq75+sstp+oJ711nfPTiB/xv/+df8AefPmWzO/K//y9/wfn5JXnxCafTlvo0MCue8/TyA4xMCB6sGynngtdv90g5w7rIaHfY8R2r2VM8HU03MHQJT5/9gGMZcamdkPOZQ6egtCPKljEGpNasVlPO4G6/4Xb/mqpc49oD82zNsrzAqwERA82xZr8dkcz5k3/+KZ999htSk5OWBfuH14wucHa5ms5gN+Loub3fkRUpWW4I0nO3fYvyc5zPSXNFWZRUZcLQDUSTkhcFOoHNbsMwWjyezcOG8+oj0ixBpREhFLmZUZYFbR8psnMuVmve3P0Nhw1cXC5JUsX++Jq6rlHKMJ+dkeaazdcn0iySZikRy2gbvv36SFFq6lND2wxokfP9P36Pvn2L0RPQylnBcrlg8B2x1+hhRpHucLpDFiukUDTHLYmUpOUSIQd81OQLQTP0iMFD1EiRkJhJ+t93luAEiRFgl1Tpc5SS6NhQ5SNESVlmdO2BN6/veXrlEKHi4rwkNZp97ZktNWMcUCJFCM3D7hXNvuHy7H1mxZxA4O3br5nNCrTWhKAZgyNNFFdPnlDXLfvdnnKmePf2jll5xdnZmnm15NvXXzAMHavVc2azBUXpefcmUs01UgZCGEDkuL7GqEiiUrJkGlyW8znD47ukbVuePHnGrFriQ88w1hNVtphxiA+IGMmyhMunM5rjgI+Orj+hzcjhcMNq9ZQ0yzAGTOo5bU/kSUFuMrxXtPuWs/l6ytrqapLkP5UC/7+vfxLNlTGCH3zvOX3vOB4afAwEZxh7jw8RJVOuL0sQhv4RqT6tentmVQGlmjDWnBAipawmPHqSeewI82LJ6dgx9h2piSzmC6y1CBWJYmIKGVlircVFQZJIXr/esJhnuCEwDnvu796x368pywqTJsyqiuPJAoEkcVjvGPqREAcWF0vwERUil8uSaAbuTj1SGaqqoBuOLFerRwSoRCK5Ob5Dp8nkNQgeJTOq4gxZCIL3HIYtWS65/oM1m5sTQydYL89Ilwte/uqGIl9yvrjmbvuO9fIca3uOTcuhGZmZNZfrM5SKeDny5uUemWqGo2X0kdFolpeGcZDUdo8XmoABbxnagS60EEGphFkJSZmz3460zcjl8wXtyeEGT9+NHO5bcI5cZ8RRAAmqtIjC8duvviUvJatVSRmv+fbLt6RpYF22PFvsWSrFZ68nk3SaahZlxfwi4/rjNa9vav7Nn3/Bm2/gcBioljPS1PL5mw1ffLHjp//FGbvB4rqRqw8KPvnojP0mpY8NztxjW0tezJHWUqQdq3nF7UPOxYunE6nIjry937DfewIdaSFIUs3bd/ekaYq1gs1m5N3bFiESLJGhOxDDnvlRE4PEO0+WaYoixXZg/IIoLJ3t6PuGrJhz866l6wdC8BwPIx+89z55llMWBVmWcTzWKGkQZGgtyaua5sFMePxiIE0NRTZN+AQp3gXsaElSzYcffoi1kweRQrPZ3LLzD5hUY4ykbm8ZbMD7BUpr1ss1h4OGUKJSg9CREBMuXpxhvaUZ+imzZFlSHwPL2Yy8MERlefX6DUmmWc5LMpMxtA1N07JcX1OWBUrCZnvgdBwos8hyUbFcZtxtv+Vscc68mDOv1iwWzxjGI/vTW5qmo21H8jyjmCUokTHaqQgTMWG1usBHByIgpWS3nQIF22ykySzWumkz5RTWC1w4sd2+wdqJxNZ1A9a3XF4sGd6OKOFIZzOMTpBqyiciBpyfJtPBWoosQQpJ33lAEbxHSBhp+bz5vxBSPKL7p43MfD7H2Wka7NyEyV2vrsgLg/MD/dCgVcFsPkfK7wrFKZ0nSZIJAPFIG+utw1pHkZeUZcXxOKFfTTIREL2fMPbvbnZ8+/oOQSRPk2lrr6ZNj1LycSsF4bEI6/ueNO1QiSGE8NhATT45KeWUgZJq8jynrqd7CiFQeir2tJry1YIPhBjxzmOkniaNckIhd27yWRmjSU3C6ThMgcWLnKrKmFUZWa6wdnz8+wSXT2Yc6gPDUGGtZxgCMSi6ztH1UxM45Ww1tN3I/mhQSlK3AyYRxMdNIFHgvUeK9HeyrLwwCNtwOJ4mJYDRZIni66++IC8N1SxntlDsjjXeviaQoOdP+Xon+fdfv2K/34MSLFYLfvjpp4BmaA/0Xc/Yj5ytLnjz7QPrszlXT9YkZcB2msW8mOiip46vXr5mfpaidI7WK0Ri+Ouf/xo5T1BNx/5+B8eap++d8/y9T/n21Sv+4s//V65W5zx78oSmiTQ7ja/POd5vOHYnZtWc5WzBod6yvliSJ57Bddzfb2lOKfOF4n63YbA9Mv6WtqkREcqiYX3W8Oy54Pb+LePoKKqc88s1bT+SpnPyIkAcebjp+fEnP+DkNnTe0Q2CVfWUGDKS/ALva+4PXzHLrvjJj3/M4f0bdtsbpIn83S9+y0efaJywqEzRHw8sFj9AounNiBYnnr9X4IaEtj7hfctiLfnLP/+c5+kPSVGPclL9iGGXROTvtkEwyVpDDAj56MMKgRAm8hdxQCnBojpjNT8jTXtE/5LNYYvUlmxlaFuBTEbOnwSsjYTouXiypvzgOWnakbpb8s2fYO0RozOEnIiiQgREfGyuhHhcVv1eHvjdsw0C592ElLcDh9MOnSiCSliscl5cnHGql1AE3m5e0fcnPvrkI46NnRb90WNjj/eOb17ueXJ9CcKybTa8fv2AyiyjkxTlJR99+FPuHk78N//1j/jtF5/x81//z/z0p/8tf/avf8rgN2jV8Gd/9sf84Ac/4C/+8t8wDJYyX1KeC6pljxsMRfIUsyp4/eqGs+XHSAVNe2Bz/8B8Xk6SeBTBJ8SQsN01rM8vcM7RtiNv3x34g0/+iLquGYeRaAU6KTDJJS6MtLsdf//Ln3O2vmBZPqV8dslsvuTQ3HDc9Ty/+hGXy5G7h684bTW5XpHmBkRgschZzRRa6cmPaiNROM4WK7qhZqgt0czA1ZjMMputSJKMh82JGKYNLjFOMR3e8eF7P+Dvfy0R3vGHH/6Af/7P/hUXq/cn1UwcSJLIZ7/4NdcvUqpqRZ5WPPto5O7u35LlhvPLFS8+zvi7v/mSci4ZwzusHXny5AlllfDq1Tuk0MwXazr9EsGK5eLZBKLJcz7/7DV2gF1/x939Ky6fZjgbub3bkmSKs8sKeVqQqSk82PnpM9p3DVcXJWmeIkVKNGcMvubt6w0xCOaLGcKOtG1LXqZING0zNVJ2HDm0PaemZr7Mafs3dG1k7DW5ecrz956znM8Y+pGxcyzP1qwuZ3z58jPaOpBlcz54/iGbh5tJKo8gREC1uKBYzdckOme72VMUc077hqatIQaq5Io//dPnNPsCfMbV04Lbhwx0y7F5Rz8cqWYSOxi0OUMpT/AtUrf0w8B215CZnFwKtnVgHDzLeYlUKaPt2e5+yX5XkmUlRVEQo+PNNx3S7Gjahl//5iU//un3OR1OxDgNBs8vPsT1NadjjZIZ6+UV0VtENdF900wiTMsXXxw47Aait2gJeZb9o33NP4nmSivNflPjvMKYGbMsZV46TqcJcoGQJEqjsgplavToiH7Ck0cdfhf66UPCalVwv3OcxoFFrtkeT5RZRlkZlssELeG3X9xPeOJc/w7zmucCzZQdkyQa109YX4UiTVKiGDkePA/3b4mA0gkiTWmPI8pIfJDUTQRr2Q8bRACNxNcjfexIVzOSRCOiIw6BY9ej9eQF8c4zm0163cG2OOdRmcAGR4jpNEGPI8dDz8PD16gopyIqdvzys5/z/rP3yBKDdS2JaJiHOXUfKIVALeZsjw1VphianiGMxCrFxRxnO5TwOOHZbO+J9uz/Ye49fi3L8uy8b5vjr33+hU1brqu61aZINElJlAYSoJEggBP9fRqIEw5FQoMGNRAp08XuqjZVWZVZFZER8fz1x2+nwb6RTQhgT6RB31kAgYj3zj3n7J9Z61ukuSPoBitGEpFi+iZODjNFXgQeHxoSNSNNp1SThCLP0MFz8+0NbTOQZjmvvj+lOIYQOq9AC4ZDjbQVvhF0ZKRnCecnMy4WM04nlknyyIf1I6IoSLN4cL6/Fzz+7cj+z96AUiBTtk8WKT2TKiOrpnSD5vPPAtSCNMuYn2acXSse1w3e9QhhSYUjl1MuThYcNlu6Tc/piefV8+dkukRg8DpBPy+4uflr5sscgqDeRllVklo2+wE7CpQsWS4ryqQiXRYIAW27Zr2qKYtzyiNt8unxF1xd5uBBiZSySOiHgHOBqihRSrJZN3z75p7ptKLIM9JMk+cJQmp2+w1CWdIiMMlOmU0lzkrqvifRgjSpMKNnMpkwm00YxobV+i76YkYTqYMmoSgnmGGkrweUVkwmC5RICM4hgRfXV5BoBBprHFLs6bqG5ckpVVlGpLaBxTJuyIwzdG3Pi2efcDisscFggyafnjIDWjdi9iOpUGRZjpAqQlnajrbdMz+dYnB4CaMbWW0+RNliCJRlQZYV0XPSDxF0gCBJBXk24dDssb7GM6AQpHnOcnHJfrfj9uEdaTohSUrW2y1BjBSTDGfUscDxEZudv0YJz+XlFaW6QKkcY8ej3McRcMQCKUoVx3EEb+g7R5ZWBBli3pMIsYHRmnHs6FrPZqVRsmc+n+O8xR3DQ5VM8E6jREGZe4S0KJUeC8X4u4cQ6Ad3lNLFBmccLMZanK1p2w7nHGmaHjf8sWFK0wydSXSaIkKcVDrvQATcEDXh8d9TEBRKFpRFToROHkmPXuKdjUWsivIla0QkmsY1OgiBszISATVIlZGmCYEa7wLejYzeYY9obKXEd3lbjW+oyoh0J0jGwdO2FnXExkOUQA6DYRgdQ2+w1iPQ6DTFmjFGP0ynGGMYh4q272m7kbazlFURAzjHEe8tWgNBgjdRNibjfXTY79FKkSQJCBjNnqHvaXvFarXDe0uqEpCgjtTG0fScXi85uThBCIVUmodDg1YSNxq8C6gsZT/25Ms5o1bcbA5YbzlZnPFus6apd7R1x2x6ys3tCiEsIUiGtufq+RSlbWwcguT29o62WeFEQlou+Mf/7H/Am4HQ35MHqKxgvb1nskgYhWEYtzyt93gC7z8MLCYlRZmQVTPW+we220C9FVhTYusbshLqQ2w4Zg97nA387ps7slIzcT2WnnEA7ILPv7zk/PyENN9zVzt+++6Jfb1jGDuaekt7qNivD0jd0bQPODXlP/zVz+n6Ea1TfvJ73+cHLkEkgvXtmvv3W16+eEFf1zw97OmaSKmczjM+3HyNTgRpJrh7s+Pq82f4u8DQW7TMaNp4zfzRTaWkitth/3EooONTKySOOCTBx/dtkauYzyUMh0PD8P6e2RfPyKQkNC0PzS1qVvKqOCFYS7Nv2O0CbgxkJxUpOU27Oj7vxO21j5lbMcT42FB93FqJj2B2cZQqxkGIM4ZxaGnrdwxbSNHUScLX4wqvb1jykkkxJ01TmrHjbrVnNCNpXnFy+pqLk89A3/Pu3QPDoDm/WPK0f8NMnmNNjnGO+9snHrePPD7ckKqS733xE+7u3/Kf/xf/lK9+83NGnfDyxRIp4B/98X/N+fkFaaa4ufsaiEOsabkkyeccmh37wx3r+ye0mvDqxY949uKE25ubqPYoNDsadOqp65bNZs1ut6cspnzz9Qc+//z7IDxv3v2GN29ucF5SzTSTpeIf//QP2W8a8Ak6U+hKIYzl7dc3nJ/vKauMain5q1/9n/zRn/yUcexpu47Pvvw+9dbx6tUlbbdlf3iLCw1NbTBjzMEMxZ7H1ZqLU8XthxuUyrh+tmQ2rdhvLF2/o+ueuL15y49+8Md88ck1/dCzfdry5z/7X8iTOUJMULpkNksZmj37HrL0iUSVrLaPnJ99ymazpuk2vHx9RppM2KxrpEjI8gVnlyW//fqR2fSc2bykmkq8qHn72xUnp3OqvCQtNOdZgbSvmUw1yC2//OovKIsl3//eT9BplOL54Rna7GkPB8bRIXROvd4iBsnZ+QmLxYQgDHVvuLq+wLqBtt3y4X1NlkfCYJoEymlOklc03S2jkSSJoJgc+PaXmufPr3j5gxPKcorQOz68vUWraIVpmhV3XwXaruX5sxc8v75gs9qjxSIOGgCH5ux8iukzhIjlWlFU7DZrum7LdDJlfn6OHQ039x+Q7hqk49D01M2ASh1aWaQISCq8+MDTQ2A6nTKfT9Gp4+1TzdnpCyZlSpAjoY704t3hgPUd5USiEs/Q19R1CyFlNjlnMZtgbMJsseTsekpzsCSpitJel5G4Mz759CXeeN7fvOcXf/3v+eSzTyjzgtuH94xmIC0kh9ZS5AIlU1QiKOfZ39/X/P/WIf1//CRZSiJSkrRCIHGy5+R8jveeoTPs9x1pNSLsgHcD1njyvKDve0IArRWT2RxPh0wB6zm0HV1nyROFTJPjFNcxXywAi9QyvqQTh1KONI2FQfBDXOe6gcEMBC9Iy5TRjggZizVjHUlqGUdHKh1KB6pS0h00rnVkWUKWJ5gQ6AYPZiDIQCITdJqgbCwuoiZbM5lNafseFySQEILAGoNkPGZneKqsJFexeE+0JEGjnKLZtdjSIRPBZC7Yr+pIwEskk2JCrjJcvydNJIqcAdCFRJcWpSHJcgQTUjljGA+0o6O3njxx+CBJdXH0iAwkOkP4mGeAc7g+53RWYE4XNEWOTEDqKM0oJynGeD7crJhMi4j9toG+hnxpSIockQr2g+fh0bHuJelKMZ1OyPP4d89PPeZhR1bmlGXFsN5QzeYUkxxVSFzq+HL2kkkyR+qAUz19u8KMDtvFDQJecXGeIpGU1ZQsS1mcLUnynNXTmlQ7kiTmMSEPjGOG94Jx8CSJoqlHmtrgnaLIfTTp54bgYRxhHGNIbKmMmQAAIABJREFU5XwxRwhFXR9IdIqSmnGwOBei5HJvYkHgHCJIFrMTnPHUh5qubUiSBEHKYCwhWJAOY0DLjmFQpEmOTnOKXCKEoiwjXlxKgXUDOvVomSJkICsFlahiIKTylFogtCRNAm27O4Zqg0Aym03x3iC9J0szbN9zOGwp8wmTYoE3Bm8HDOCQKJGAVUif4pGMHrxx0U/mo0zBSYVKLdU0gRHwCqUyVFJxaDtSVRK8xtkDaVoQgHEY6Pqe3oxkRWzkvBd4F9gOa3SmSNKjt4LoWRlHFzHH1YSsmNL3jmpaYZxkGHrcGPHvY29p9oZBRbmHlQEfRyuY0X+3tREiSgFF+LiZMeAtIHDOIBFshjtW47cMfmAcR5RSJEmOluVxCxR170oFpA7oNIaLfmcQESpCFpIYXhyMwzgXvaQhItQj6CIOnoDvNkzjOEZpHtGraqzBOXuUSB0n5R5Gd4ScKAVBMI72O7+YEII0TXDWRHWTiP9PkkZcuzg2O0IQZbNZitbxz8VsjtY6elq8x4eCcTAMYyQ9WuuOoZ/gXYReeAdt27JeS3a7fTR0J4qiyFDHxto5+13DCALvYBh7nK0xdoy5Qsei1broKTTGYm0gyyvcUc+P8Dgv4+SRI8ntSG3TSRKDs6XAOEtd78mzPN5LnnjtPXGD6y0MjuAdKisQSgAKj8LZHi0VQiWxmA8eYyyZlJgxavuzPMVrjyGg8pyTKuapzOUCZ330gaWQTyco6aNn1wsOJnB4qrEmkFeWU5fivOXpw7cIAs57TDAUxYypKzFDBK7oNNCNHcPYY5yBDrwZeHZ5SqkVbTtSt1tW656u9eRZgjPwRq1oaxis4dCO3HzYMpvnqCC4vZHUzZa72ycOh47tbh99EyHQtpqbuwd22w1SDVi/Y5sViNXIYARKZWz3O9zYUsxzdvst7dBxqC3j3ZZm1zL28R4d3zo8FmEdtjbsDw2vn1/T1o7EOspcsN93WB/vjZgjxXfS1bgw+tjVxMHDR0R78HF7GkRDz442fcNUzpHG09YdZtdTFmcwqVE2+gA1Hb65Zz47J7OAjVmCgzM4juHE3sfAevF3/qq/IwZ+fM7jJk2Ij/Ep0deXFIFMLwnGkiYpk+mUvNKs7nuCTklyjWUAOYKI58PD4xOLxUv8qNFZSWAkVZLnX/4+d+tv2R0eGbuRaTUlyQKH/RaVpUyKCtKBv/3lX1GUOednU5xVWDeipKRpV+zqA+vdV4x9wen8OV5MMV4TguDh6T34jKooSFLF7e0ji9NT8gycHxAq0NQtu313pMTq43NW4ryh6xvqpubVp8/JqoGmW0X59+CidUINCGVAxjPCh4Z9s6fpFXZIOTs7Yb15R9sOEDQnr5Y83tzxu29rjK3p+gN5XiKTHkGNUBkyqdjUN2hAiYqymqOTnGAEaZbgQo/xCqlHjNnStS3eKSblkkkhY8gsmtF4tvs1Q7vh0I6kScGkmuMcZEVgsI/0xnF769CJIs0T8qxE64Sn9RMoT293qN4gVYUQgbxIEXJgGLcMQ8tmc+BkNiGIHOf3PDzccbKouDyf4l3HdrMlzy7QSYa3yRH+U/Dq5Q+R2iK0Y9tscL0BpXF+xJgeax2zyZLprDwOREaqaoH3A21rKcvoP5VJz/PnM9I0oetb+qHFu4Tl4opUa4KwZKXi7Zs7yirB2o67u0d2T47L5zMGa+maBs9AVVzS+oa+dZg+Em+n8wIztJjRcKg3jJ2hdh2COwarOHSOvKhIMkHbRs/tYjajqgp2uwM6gaKQjL1HJylSBpS06CwgxIgZiZJ7nRIYkaIkyRwBGzHz/Zq0lCiVEnxg2NcoSoKMqizpek5nE1I14/HxCTycnS0pJzmPd1tG2zGakW19VK6I4/mrNR719/Y0/yCaqwCojHjhlMMYjw2WSVFC8Bhj4oFo95ixw5sRfEBSYG18iarEk+YFu/2OIBQIz9APx7DPGObnncdax8npnK6PqFmpEqSMxayUCeAZrUGpHOcDxjm8Ay0yUA4pNPJojhfyo6QnmrkTLWhFNIcmaYrMNKMfCS4WBBhDUMSbSfffGWKDCAgNTpj4spYq5hkFR/AWpQQ6SZAiTl1TrdFKoIRjkscDNuDJSslykfPmdoMXjkRpdIjT4m4ckEiETMlFRp4HslIilEJmGcFl5KkmBEVvNN46VCnwNoDQUbseLHme4ccxkg89uH5EzyTL5YSsTOltS9fvIU3JpEUIjzMjWs4RCjgSyEIITCYTvLQ0PRzGkoFAf5CIUBJcicIynXacLpYUZUVeFnRnGl0ZdBI3WGUmmGdTJllGkJbOOHabA6ZNcINGihiwOD1JsSGQFwVpnpGUCWmhML7DDhYxQtdaigKsGTEGnA2kMmXoAqaPU0mnYgZQS4d3Gm81Qnjmi5L5vGToHbWH2WyJUhlDbzDGk6ZxyymUwo4Oh6MqNFIrum4bNxQuEJxC6Ph9YgXBCYq5o+sHhNSkeZQ0xZdKJOGNpsf7MZrRlSNRMD9NkU7SD4GsyElSQXNMkve+JwSH4EjgGi2Ej/ewQknB0LdU+ZQ8y6mH6BUIMsI5pBIcdjV5miNFnN565xAeqiwHJwlOEIIhzVJGE1BSk6UZw2Bo+4FCG4Q3jLYjsY5Ep3S9pe1GghSxYB276C0UCW3XUOgCrUBIiTMWvGIYLAFJmhYgFKPt0UlGINB3I31nIYAdPX07IkVLnhe41OGFQwn/XWEmRJQdxRYSvIuADAEoFRsKpRSjbdi7+5iz4x2TqqQsS4q8pOs7klQDUZoUhEfIKDP8mMcjkHHwDbGgd+7Y5HwkjUWAhFIKpdR3jZWU8jv5XgxPFRhrsdbFObmQEATpEUIhlUQrHUEHQUD4mBME1vBdU/Zxs6U+kgGPOV4h+Pi9wjEMGNJEoVR8fr0USBK8ASOi0Z8Q8dQfm0OBQooIEdrt9nwsh7VWlEXGdFqQZgqlBYmUCDTOBTpraNqOrh0jvcsanPcopQGJUO7od5E4G0AOOH+8jmi8M4gQqXIfVVtJmqBVbGhxDk9AJ8l3h5AMKT4ogjKxOPeQCIk3iqA8Hhe/N2PxMh6dIQicj+fAaB0iRKhSnkepsfMBneTMplMOhwNpnsfvMrEEHe80IY5yUAUWz9D7GBIeGry4ReqUu01Ncnw+RZKSp7MoXcqjh00ngbbfYsfAaDx9Y8iTkh//6McI6djudry/Tbj5sMYM49GrlVNvXRwK2JG+HajbMZrUVctqFdgfPLd39xz2HkmkLgohsCZn0x3YixEpRoS01OUOJQeCSIGBzeqBeVVSdDn90FJ3A7cPK3SrUSF64WznWdd7zs5njGPH4VBz2HUwbMg2V1TaIMRIU+9xPg4tPgYHh2Ng78d7Op6n4SgNDYij12kyqRDCMYYNdvIekT7jsHqkWR0wtePk6hkTUeIGg+8NYtRUUlKUOQSHHUcEEqQ/Ais4Djw5bsWPjd1HmeJ/XNuEGPoaiM+5tRapPUF6OtcidCAtlywW59zcvUUqQZJKhEoQKnodvQuYwXDon6gPHXmmSBLJ2LWUxSk+/I5+qGmaFoHg2ek1ZvQoGYvxssh5+/a3vHr5mmqSgnA0zQ6tc7pxoDdP1N0jwZ5j/UDTRu97OdEMvaXKTlGyYBhattuG6xfPKQsYB4kQB56etng0QiUkWiATTaYLetPR9C0haK6fXbLZfaBt9zR1gx0EpycLfJCMpmG9fqDZd6Rp9MVbE/H4VZmx2zwe1UXTY4j5hsNTG+FcQpAkM5JUgu4RCLzP8aGlaQ9Mijh8bNuO0bb4kJNmOSpb4lyHtS3WGNK04PTklDTpIm0YMG5gvb6DMNJ2hq7vGE1DVZ4hfY/1Hd55uq4hIDidLanKFOcMb9+vqao5zg+MJnCoPaPpqSYpUvf0Q0/XwPqpR8tHAjkuNIymwwyBtm3xoWPsR6oyDviTNMeHkaazTKcneA707sAwdnhrUTLD9D3WDIggqPIFZZaz3+/iGSES+mGHEAlFUTKZVtS1p5pEhU4/dDFT1SxYTEuSBKwLOJsAURZ+qGtW/QDmhNl8yt3DirbtCGJkXr4k5JqhhdEEdGJZLhd0tcH7AesNdVcj8hwpe7rBsNtZTk6uKMqEod9ix4FxGNCqRGsDwWLGHuElmU7A95jx6LGUAalTsipFJYG2PTCOAaECMlFI4Tg0d8hGs5ifEnzgsNuTaUdQEj94UiFQsmboKoZuIE1SptOCIGCza8gyhxeebhiYTKZYP8Tne1QxaPnv+fyDaK4gYMMeawx+FBGtazIIMWdmv9tyenbCu9t3OBMN6FWe0zYdfR+9UyIxNJ1jv7c4aQDQQrI4zxiaNr4gj+j25dkUVecMY8C5KHtpm56xNwjpY1aMHiIlTGuklnjhKMqM5mBBKLIsiwZvPUCAsff0XWC0htOzBcZD5wxeDyRpQJJiDAxmZJZfHwvSqMMezIg93COUBy2QTpEkCh0U1np0kVNMJtzf3xO85+ryinySY8aaxWmB9zGEN+kU5y+WvM13TCczCCndPrB+WjHKCAzIk5zX11+Q6h6XLBmDYLCBftwxmpYsDeQ4Bjzz+TPq+kDfj4yjZT6t0GlKkCC8ipQv1/D40DI5WZIqxW7V0uwaOMtxzpJKzasXL/C6oOlG5ouUk0VG33iuL65ZrZ9IypLL68/ZHLZs0jukDxjb0vUjq+Z3XD7/PaTKGAz84Cefcfv0l3RmQPuceb7k/m7N/iRHyJbga7Tw7J5gNplTTguKqSKdgqLDWE8/7Ll/WPMHf/BDeHXG7e2e+/s1m/U9V1en3N81+CFiq/ddR5pMUELgg8OYgYe7QN9tmFYLlssF55cpl5cXOKMQwMlyzuJkwn43MHQeY6HIcsb+QJbnODvSDQO7zQ0ny3OSNEPrlCzJKPITOrOh7w3BR+T45VXGm989IceWJJcgMoKPvhtjeoZxT1EqVusdUFNOEq6e5azutpzNp1xcXpIXJV/96rfUhyZON5c5WVYilWDc1wilSJRCyEAiC5IsUBYJUhlunx4IQeB9Qpoq0jxwd/PAH/7B75Po6G9xFpKk4OL8jLpuWO02GBzGR1280hqlBe9uv2VazjCjYyDghKbe75hUC4RKKaqEJA2s1je0bYtSJWV5Spqm7Hd7slxGOVkfyIPEmYALgtGNbJs1QgSkqKJcx5W0ux3eh2NL45ByDZygJKRKkiUaqQR1Y8DHBitCGGJYoBQxBiHixwPVpORseoo317RNyzj0nJ2fcHa6YDJLaNsWkIzGMo6GYRgZjUNIhRTyu+JMKXXccsVhTlmWxyFSbHQ+QieEEFgb5X1pmkaZoozNVSI14zge5X0gRSDRCfP5gqLMgIAZRtq2i2Zm6QnEYdVhFwNqldYoGRvW+nA4TtpVLEbHkSxJML3D29gQ3e0fUUqRpglSSkYz0DYDAYVS+rhp87GIlJEKqYSmHyOuXuuEPM/IspRhaDm/mPPqkytevb5CKk/XBG5u7rm9eWC3dwjlYxOojvEaQpLoHNQY4RuoKA/HY53FBxl/J+HjxjhE71mSRIJlEJAkCVU1JcsKQnB4byOFjpzRDhgXNxP4GFjrnMPLAXfccGkXCNZ9B+CWUlEUBcYaJAGpJN5amm3L0BuSRNCJQHOA3WFHXmRxqNJ7hvFAoUvyXJAVUJWSLC9wgyaYgLc9RV6xvHiBGwak9yRF3IorfSDPMyaTkuChq0uCTGialtXTE2fz1/yLf/E/ItI1D5t33N63fPObO77+1Qds76OiwVkas6fpe5JuREtDMCPBW0zf44yn3Tu8XVGkJd5KrAlRyWEczsXmKkkhhIxnyyWzxYSgAreP70iXCZvDgcO+o94P3NivuLiqWC4vkSLn0I7s9o8gEg67PevViq7p+Iv/4y1/+vkll8sp3dixXq0JLCOCXUR/nZTxWXLh4/cgjw19ILZfAu9bLi8vSJQn+IGinPDk/pK737TYbkQGyXbY8EP1I4RRuE7hh4wvLv+Ix0VGWz/hA5RasJjlMSJD2ditC33spI53wnGz+hEF/3fUwEi1ODQND6uW4vme3919S2dqTidzpDDk6QRVKLpxjbWCk/IZ++17CIGz00tevfqEtrvl7ds1X3z2iuAsjw8PrJ/+b6YnGYmOxWjfwcn8c6rsFX3TMA4DmAnTecrv3nxDnpdcXp5xd/8t52cvybMFVZbixYaLsx9ju4Knp0d2+3telTOuzn6A7VO62tI0j0wmV/S9RwpB1zhu3x94f/vAD3//+6gkp+trgvQ0w57U5wiVszx5jbVw966O7z4KemN4/elLNpuB9fqJt2/fUKkpV8/OMC6lKDJOTy/55d/8hjJfcrrMmU4rbB8QckuWKaxRkSJpetJUk+XRs77dPHF5eobtU2bLKWWV89u3vwFlmVbXXF6cc3Z2SaI0Q9tzdjZlsTzj5GzKr7/6hlTNsQz0Y8sw9nz66ksWZwVtt2O9emIxm9DbnmArtEw4O5/y81/8gouzPyWEkbZf0zUDy0WgqhYYM7A/PFLXNSfLM4yFpulompbZIkeIDhccUhkuLs8ok4pv3/8tZZVxdnrBaBq268B8NkEKwWb/DcI5lEwpZxmTao4qeh42d3Q7iVaSSZVi25SNWTP0A0IpvA88PH7g5fPvkSSStj2wWj2yb1qCKymLitk8+l3fv/tAlgf6vufNb584vZasN2OMxSDj5eUXFJMCHizWjBEaI2YsqlOGdE/TbtlsNjy7fsXixKFTgZCOXf03XF9ek6SGzWrP+80WZzyplkyqhEENtO0G6QouT18AI8ENVGXGod8TrKZuA31rWS6vODu/JsgWY2uU1Nw+fINSCUVVolLYd9/S9QnFJCfVKR7H5nDDdHrFxdk5y0lGojfcP77l9PQ5XkA91jzerXBO4JEIFcjKwOK0YLNa0449dhyOG87/9OcfRHNlDfT7KprRnUFKz8XJGYddG7HdacXY7jhbPOew3xG85dnVFTe3Hb19pJwL5mcFj3ctXSPiAZVolEo5nxcYLVAiR4kE4weE71EElPAI7cjzEjsq+r7HBRc9VKHH0JEXJWlSMQyWw9OePHcxL6QVJPIUofY4nzC0sF0NXJ2fMDQDVkg8GluXTFTCyfNTBmHZ7LZsbh8QWjKYLY4enacIlWKtRKk8htW2e1KZ0feCuj9At44Fy6gZ+p4sTVB+wm51ICQ+kg3DyOpG4ocEucwZzMjebCieKc6qc+r1ARkcxUVN3c8Yxg1ea5xWdP2eMCRMRYHzEuEF9e6WYppHEIa3WFYIUqyFxeSK5ewc43bcP+549/4bkIG8WiAmJ9CPtGZkSC3pM8vl2Zx6f0e9e+Dbt46T6Tnv33+gVDkvPnnJZz/+Hv/yf/6XDPWaapqTlBXZbEaZfo+kKrH9gB9GUntCv+l5+envcXr2GilKfvm7f8PLk55UClI9p5x8zqvLT7i8yvEG9uueu8ffkmSBfWNpx0BQCU/rAW+mKBXIsw7lDJuVQYoErWKBmIgJ3QGUSiBA2wzstyNXz85QLOnrhHfdirv3HoGkKDVFCYd6z3p1wDiBTjKEaknyASWmCK1RUlIWmpPznPWqJ0kUi5OKgMV2IvomkpSimFBVE84vHXVj2R8M06lEo2j7mkmluVjOY+6ZmCO0A9VxGN6zPD1Ba4EQ0YD5B7//Pe7ub1E6iXlYY0+SVrgqoWtHrA1IkTIvImyg7w407Z4//eMf8OHDBp1YhAg4IzhJvmRRpljaSMIUBb2xPK7fU1ULnr94zv3+A12/ZTFbcLFccnZ6Cann8eme/eGWXu1RxTmPq/csbUtRTJBCc/e0Ji/AiZ62adntG64uZySppqk7hm7EdYEw3lJVk2gWx3F+ec433/yG5VIiZcpQg/IFgh1JAmcnFa9evSCpDNunn/N0GAm7wOT8ORfZP2Eco5zIC4dE0PcdSiqUqpjPlnR9zUr/LSI3PCuvmc/n/K//5l+z2awiBjfMKcscpCDNM5xztF2HUgldFxuLLCswLjY41jhUgLwsGEfz3ZYqhECSJFHSR9xQFkXc0mudRLqVc1jjAc1snhK8RwRBmedcXC5JE0meZ8znM7788of8+qvf4MKAdSNN3bDd7I4epei96roocey7AaUsZZny/PlFRASnkcKaZykhWNq2jdLCJKUoKra7hsOh/q4JVImimuTkWU6apKRpynSR0NQj79898OH9E03dkyYT3r7ZsF4PfPh2Tz/UfPiwij6wICKEZBzIC02aJ0glGIYOe/SLSRRaxgYbGX1yUgmEjMWuFwOEAWcE/RA3vlIJrDX03fAdHVGpKBlH7kB4qnROcBbrBmwQeNEjgExIlBR03tMa/x2+XkhDb+LGMk0SlNT0Q2C3XyOFI0k049CT5wWLWcloDKa1aK/QBLx3dK2gbwMBTTYrML0nlYrFdMGzZ8+50oYP796wWj3ggmGRzjiZfoLUmiAc3u1YXF4yPzlFKsvQPSLCjH/1r/4nNk1Dbz3l7JyT04r/7r//Ez57+ZLnV9f4sOfP/u3PePNmz2a3pu7fcffulrGpCGMS4VJDzlPdMpiOIAJJGpgNCc+uXtK3yRF3r/j05TnZInB/f8Nq/YjKLeOHlkRIcgXF+YS8eM7u6ZZ138YttFIE68hzyWx+xuWzGZvthvNLz8X0lEymjHZgNp/ztI6hwR+ble9EeEIeN7eC4OPG1HvxXeMjlSdJU+pm4Kuf/Q1tM/LTP/pDitKxP6z465+/53dvPlDpikTm6DTjUTaUzYSmawliwJ3doA//Ld4YggvR04dAyCH+TOH/RQ77j9SCH8Eb3kfJqlIVX//Fr7h6eQ5Vwmp8z83PHnDmwPvbNSFM+OHnU6ZnFwihObRPfLj7hof3I69fX/P0oefQ3lH3vyMMNS8++S+5vviU5vDIb775c/7s3/5rfvLDP2Fooqzs/Crh2eUXfPpZR9v03N90aFXwcL/i9FQxX065KH+fd28f+OTlZyRJSt8rhjZjW/+KKrtGq4RhbHh40iSTnJfXV8xm53z6vQO/evtXbA8XJJlksDXOOa7PPuOw23No9rRDw+P9ORmn6ORANYMf/+Qltw8bxt4ymcy5urgCp0gSTbN95On2A9+8/Q2vnv+Ydx++4nE75eriS/7Jn/6AVfvXNLuMy6sT5ouSf/e//wem1rHZPNG2oMQprz9/QeIuSJMF3lucHPj220f+8U9/wGRZgu4hVLx+/T3S1DKOnrdvDlxf/hGzuaMfHU+rht1TzNcryxKtprj+GjN2jEOFNQMWx24j2K093765wzuDtSOL6Rljb+i7PcZ2WDtQ5lMmVYy/kMKBTxmHQFWcoRODC4FUveB7P3jJz352z27bUJY9Z2en1M0bnI9+7OXijGa35fmLK8oJCCyHVWDYF3z66jX1YcubN1+zrHKul5+QZQsG1zKOI4KcD7e/xIwgZcnrT78kzSdwJFHv6y3eabqmJysuuLy84OrsmtvVN6y2B3QeKErPt3ffoKspWsJ8OiG4lA/fbnj9xZR6u2G724Ay/Ls//9/4vR/9CSJx7A63jGKgOTQ8v/6Eyy8/44vPD/zVz7/C9BXXF6+pyhzhHd53FEqxetjw4ebA1/UDL15eM2qFJqFMMvZb0NqC7DBjR7dVpFT4UDN0PWJImKV/yOlZRVO3NMJQTE9YnC55Wnds6gPeGlLpWa0HNttfxwAyPWFaPGP5/Ts+vL9n9bSm7WqGvubZ9ScEN8caC8cRzn/q8w+iuQoEurGOtC8HZV7y9PiAd4E0lcznGqkykqzAmZF63+B8x/lFzsWLL+jNjm7c8OnzZ2SvNH3b0A8j1gjcUFFNJVIQEdAjBOVw0iF0zDTohj1tNyKFIE0zymLG49NbprOLKKmrB9q6ZT5P48pRKfJC4cOWdiVj4F0WWJ57NoeG05OKIlUEAoMwPNzW5PMUnRsq1XJ6/YL1dkPTeqyALA/UbYcQJdoKFIJgJE23J0um6EQjtEK5QJ4I6qcth80WnScIP1CpC/LpDKk9367vOH9xjhkbhDVMqpJiOkPbAVFG0+vTk6Xvnkh0iEVh68lcSTWZMJoe04+4wZCqCXla4f2I7XuGg2NWCg4B6npPGC3lFFS6J58VeJ9ASJnMNMp6cBGD29WBX22/4nxeUBYTykwjVcHj+obzi3PuNw+s/33D1UXFg+wIAgY74uho1z2vXug4GZWBD+sHqsUzshn04YH9KnB2rXChZruHRCmmL3O29VewndIdAvunnjQ7UE0FusyR+5H7p4aHm5qqikCKptlgx5yiyDBhD0Q52X63psjnVJVmNIL9fsAGhRSKtj1wsAHtJbJ6JFcTjJX0TpCKKW1tubicMJ9XKJFifY/WmmYvafZgvOPyVUm5bKL33gf2+zpqh5UiqwSzRYKXe8qpIS9TvEvp647TZwqtBYGR/TAwmg4tc9IkoBIQYokbLjg5iTkp9/cfqHcNwQm0zgky4INhQDDanjR3VNOCLJnS1D1JPlDNJ2gtGOwGCFxcVpTFDG8nbDcr6rYmqzRJkjKMgqyI8psin6BIONx0XD+/okgmdI3nm/0bNvstIDFaYQmEdoNIFbu2oTUGJRXdsKNpJ6RFzuJkQlWcY9sDaqjIXUeWWESZMElzRmtIU0mWKtpecLG8ZDYvSXQCc8G8WnCXrZiWBdOioN+PjE0gjJIyr0jmGUmi2IavOPQt7dAymo5hHJnJz45Ah0gh3Je/pbM1Ztdj10+8f/+G6+tzLi5OKasKUBHJK/UxLwrcqLECnNeY3tF2NUJqjsq0KGmy0avgfZSdCQQ+SLwNx21XzFwJAfreHAEY0cuRJAlVUUS0rhbkuUYqH3Pvdj1NO/Lpp4L57JJf/eoXrNb3sWkTAq0y0lShtOTkTPFCCowxJAlUZUJVVRRlSVkWJFojRGCxmGHHkWEYGI1FyJRD3UTZzRgbxsEYgndkaUaaxvwWbwPXV8/xVtHWPZvNe7JMEyf6LW0XJdyTPtlaAAAgAElEQVRpUUY5oo/SDlXq7zwrdvQElzCfL3HHvCyBZJAj1sq/K7ddlPVwJM0RjntLG6+3C4bgI6VQax3jBQaPcTFzTYjNsck9SqvTKN8VCDyQVdFrxdFv449QBYjkyLY7oKSOzdwRguGFohkiYMOFGGZvnaMdHYk3qOPGMs8VWIFWkhA8u7rGffuG4Eaethu6wTGdTel6cL6OZM0qY1Ke07UN+66OZ+SmZbO9Jy0S9vWOuq0JfM2zZ8+ZJl/S7ff8xV/+X3zz9W/Z7B7RWpCmipOpRp4t8FVOt2/p2pZnhaKbzWK4r4WmH7De8vj4xMlsyXI5JUXSrt6x2Rc8bjrqVjGZTFhUOU+re5RMKIsp69tbslmkX2op6fuRqpzw8GEfm9xEkuQ5i3PB2PS4rotezhChJJJY0hyDTqJv8pg9BR58lBsGH8E0SqVxw/No6OQNrQy8+WpDt/oFJ6cTqklClZeIScdsMifNJ6hEM4YDQ51xNrlGec/mzT3nsxnB7wjCgfIEojRZCEEQH6Hwx4pGfCQICkASgsNaQ3MItO9b/viPfsp0OseLHdun3/K7r2r+q3/23zCdrdjXNfX4iNu1EaU91Ixuyw//8Eu+/faJ1eMKIT3Lk9dcXSx5XL9hMbtCUKCTlyTJew7tI6fzM66uvgB54N3NW9JMEITB6Q7ZaubzBf1wx/79GywpX3zymqftDZvDDnSPVU8sT6/o2o7gBrKiJJ86dvvfIPyBxeSMcnLKZ89f0G/2hJkknSjuHxNytaZQmkU2JwszqqUizeHh8Y6bpz2NTbi6vOSX735NkWZk15eUVcb29oHZSclkehEtHqrjk8sXmCEnVYrV6obVOmMyrdjs9tzf3vKDL55z9/g1ZVYwn844vXhJUVSsHjYo5SiKOZ9WP+K82qGbhodfr+ndnsdtw3a34fX1BVJqttsGlSbYIBlNS9PscL4lyT7h9uaR/W5gbDWff+8ZYdtxcjJFacXp2YI/+c/+OZeXJ2zWj6xX93gG2r3ixesXGFfz9HRLoOH2NuC8o+tadrsNr6//EZ4NzimkTJnNLW++rvn+lz+h7h65uX3Dfv+ITAKH7g4pUnI9g1Kx2dzTDzHva3toqObP2OxrMjXhJz/8p5xdLPFCUdYldbNn196TVxNSnWJHCyKjzCYsrqd89evfYI1nuTwHn6KJvvFEpcymCzrnGEgZxprRDli9oum3lHqBFFNAc/mq5HH1jqY54EKPECPnF9fsDmsSnSD8hFlxgsWwXtdYI5jNcj559TlBGnb7J/Z7xfXpZ8wmM/L8hOVS8sknPd++fUvTH3hqPiB1wdniS2ZZz3rzjixLYnRROICoWC4XON/hXeDT7/0Ur3pubm9xLjApJiTFFrHd0ZoRG3Kk8OiiwCQ9u92e/e6O6+sXJO2e9TrW5S+fX+O9w5lIhFYaHh9u/t6+5h9Ec+X9UUZy1P8PncEOPUVRItJo1i+zgJD2OEXNGIeR0/MTnFCEXjI6T3CaLJtglEFrj5KatrHkZYJKo59C5yWDc4xDgw+eJJU4Z6PPQXzEFSuUKMBH8towjCg0OlE473A2PiBSWbRKj9QgR5JKklThnEU7j9KQZ55qknF5nRKAp/uESXVynNBGI7JEQZAkOsUOjnHoSVWkZSU6BuYRJOPYI5Vk8ILReQg9iexJE0t0DkishWE0x0JEoqWKP29vokNBJHiv0ApG14FwRymUJgR/NLdDmsRiZRhiAJ1AMlpB1xtE8v8w9ya9kmb3md/vTO8Y051zrKwiWRIpmu2WacH2xoa9s4VGb730l/Dan6GBhr204XXDw8owPMDdaEuyLUskJVEqklWsrKwc7hzTO53RixP3VlFCsw0YBjoSgcQdMiIy4o03znP+z/N7cqfVFB1pCuiyRAkIFrzzlEVEKIWzDh8iyISLlkiVLZwKpBRUM4ONlvvtmuQ76lnEJUtw2TMfcFRFTfCBwhiq2iBNyTQIht6idCQimemGttZsncd6xb7rmcY9N6MjOoWQgmZRM7gMDPEp502sH+ivepwbkUiqqmW0CmvBO0l0GcHd1AKEwRjBctnQVIrJOYpSYkqB6yMnS4EfBM5JxCRoFjzS4HyMRBFZrJqDlRJS0kjdsN8PB3LcoWSXxKxtmM017exQqrjdInWgrHP2RQtNMw8IaZmGcCCuJeom76Amn4EC3W6PVg1jP9DtOiSR05MLJhszZQtJcBqSIrhE1IKikRTHJZEMOxECrHMcnyxzZ1VVokQJcsYiHaGNZHITXX9DXSu8V4SQsNaSAshQ4mK2Lw52R/AJlzymqFAmo7KTKvGTgSgREowqUaZAyYJhL9hcr7HjhqasSdYihWS+mjFvNC5KpMp2McTI0+MzlM72O2HUY/5Bm4qymqNMk6cVMR6gDQekb9qxG+/ohg7rJ7a7Haqe4bRhEpKhv2TDuwOdLOdqpBS0szlKZwBNTIDQyEN4NqW88Mt5qZznTCmh9CHnQzgs4nM4/+EKiWTdI1L9wUqYUnokCz58HULATg6lBd6D95ZxyL9vJ0dKW37605+x3fS8fv2artvRNi1KKaRSaK1QhxyVMRrnpkzA3CvWZkLrLVVVIqUgeJfzK4C1E855UNkK6WymI8aQctFvjBhtUFpnu1b0NLN79vuefvBUVYNQmfiWBU+28RHF4/9PaEmp8gQwF5NHtMx9R4/ZF0BpSUzf4LkfwBiCLExBPD6POZMTDuc68ZhjA9DKPIqcB2iCBMIhwxMPz7kgZRFgsjCVKpctk0T+vLC50FcLdcja5cyYj7kfKR4yQimfsg8524SQkYTDB3LRuBBorRisxfY9MYnc86Y02pT46OnHHp8cIdYMvSWEHpFApQqpAvPFkqqd0ey33NzcMnaev/zp54Q40PdrNjdr6pnOG0BtSUwzFu0Juq3Y6TVV3TFvS7xsuVuvcToyqxtCDFg78PLpE1ZtQ5h6Pv/sl3D2FN0WLCvFosmT6MkaOBRPb3cjjdEoqRBJ0HcjKSYKk5Aq22eFChydNEihyJmjTCST0vDNy34ohMtvhINN79s7yfl9pKTGjZYhDOz8jpu4J8nE7XqHj7CcWtrW4KfIpAVKB6RO9FvHOO2JToL1XL+7YRW7PBE7gGOSEL/xOASPsatvXcQ3ubAoMLrk2fPvUbcNo11zfT1xfx1ZLJeMNlBWDQsF+37AOXnYQChJsWWz7XCpg2IiJcXkMqV4nEbW21tIikTk5YuPM7peGKRSDKNnHEZEqmjmNc285BdvvuLlao4b9/TdGpcq7rbHOGuzk6MqcZPh6dl32Yk77DSQgNH2hyjDQO86/GA4Op5zfXlP8ALpCrr1SLk6wluPd/l1qwoDamQaJ7rdRN0IorNUpULJlM8tVYOtaxAerUuOlyt2/QfmAhb1EWUdKdPA77z4CJEsY1HgZzN+8DuvSD/p6BsQ2lDpSJwSx7OGojYoE3DDQEqGxfycmDpCN6Gkpaxa7ncjWinmi4qUdlibN6EDES8ywKWQBhE6uqEDniKA07OTA53X0s4KrN2CCNR1jVSK9vSctoV9NyKTYex3GLWiVApVGWRS1A2s97eIVGL0HFMquv6al82nBDp8GJm2cP7kiN3+Bu92tLWi7wdWxzVSCGKEdnaEEJH9bkcoErNqzmQVwkwkAkrlwvJ2FpDRMms0dTWjrRvu79+y3w4oWSIxjHZksVhSlRl/vx8HhNSUShIAHwPGOMahQJhIWXnKxmLDjm13yzTlqhklwJQjk5uQckahKpQsQEjGaUdKlhAK2vaU3t7iE9ip5+sPv2ZZn9EsNIVRiDDhY+R+vWfyElUkdsMdq6JAKEdIudKonknm+piijFirmaaJ9+/fUs9rUpIYYzC6pBt6nJM5l2UMZQHr2zV1AyE6Eo7BrnFpREqBPPSTnpweMXYdw7jFup5h7H+LqvlXRFzlbFoeCSshGfq8A2pmh3D9NNK0BV3fY4xmvphDzJ0/+ynjtstCY7sEdUNKHQmHVJLARJIKqVXeDYyKblPgh44oIipvg2G0ISWH9yPBOrRo8ZPDTiMxBNpmhRAHdHECEXKYvao0buKQecgWnWAdwccDGCxwfrbi2fM5fT9yfxWo6hl2kqhCZCSk0xRSUiiJ8z3TMKAqhZYVWguUSCQHafK4KhGlJkZJcI4oAl05wqSovKYQJZv7HqlyRhoCQ9eR3IRWJqOIpUTVht39GmSkKDQoyehG9CG0nsuaBf3YY5RBCUUQkt1kacs5Qmp8gnGcmM9XCDGSksUHxzQpdNHgnMU6hyygqEuSABstIjla03J6fsxmMzCNPYoyY+ttnxf6QYCInJ1lkZuSpiwalkdL3r6+ot94dJmfv5Y5TxclOu653fVsdmuUE4yDpygEs3lJ1VR8fdkRIwxjwIfA4PfcXfeUuqDQBl0pNjuPtwkRFCJpUtQUZQVUCBloZwHTGt7e7CnqkqrRuDCxOi65uxY4K/FWIbSlrD3OT3R9ROqJ5bzCjgFjIu1C0M4qbq63zJYtxkSknCgKSVOXLBYlTWvQGoI7QAW0oygEWpqMFk4ZghoDVLWhbUvcUOAm8N4zDTdc9o5pdBAjT56sOD274O5+xzi5R+oVIeCtwKKIbeDouMgCPfgD/U1xdrHMGaEYMdXEbFazXL5kdD2391eEONKUJW4SjHYi+Ejd1gfqYrajJUAJg586jJIoYxAmkbwBUWOkoNAJQ0XTGIZ9ZHvX8/7dLVUVMWcefESLAmUTpciWuYQmRMm8cTRxxuQcIQmS1Nyv+5y5UiWymKGrvDiVKmeVfMiLXWTE+onRjvjg6bqObfOWUutsmejzVF1plTNDVUlVGiT5eJrshJT6sADM5eciRZQCH9OB3CcOa0D5GMZ/ECQ+WIDHhVgIAec9KuYNDw6Lca31I/2OBD4ExjHb2zIpLWSBIEXGP3vPz372U/b7jmHoUULmDSGyeHsAPoQQ0FrlD8aQLXaQO4OUlqSYcHZCa4lSecqWyAS+yU6H9W2mWD4QFHMJ6kHQxEBICa0LiqKkrOcZ2iISQmZ0u/cWZ8Pj4zKyyMSqw3MltUYA0zQipTrQHcWjnfLBUgl5wy6L57zwNcaQy5EjUeRMXUo8do0pBUobBFAUBn2gCoaQS40fbiemeLCF5RyflirnPcoSISSxLPE+kxOD94fXNmJdFnQhhMNjlFnYqly/nCmVCecyEt/7kI9T3eBSwkYoqipP0YRCaU10gXG09MPIOExEnxiGgaIoODk6ZrlqOTrJGyDj2IGo0WjefPmazfYOZ3sWuqTWDd7MGEKN7Uva9pjZYkaSNW0aefJ0RRxn3N/+KTpFlqsszu0059nZGU1j2G4cu8GyqDRtYagKwZPVKZcfdvhYEWIEGxGyoNvvmTpLChnLT4LTs4bk3aGAHJpaIlS2T0uRCCSEPNT1PohvIQ4U9HR4LfUjMCUdJsD6cMwQE+5QAXB8MWfqI+FQfyAQdFuww8RsYalnkX4n2IUtm90ddhrZbfbM5S+ZwuJbCxdJFIfPeQR/53Loq/vWNyhNzauXn9ANO7rhNX2/ZdwXfOfTp9ze3zCfF2gt0DJSl+cIqdFqhogLvn5zRb20NHONHQX7buTmegAR6bub/L4hcHz0Q/yUgTS7rmPoAjFISIqqaJkvSzbjX3FuJ+zkcNaRpOTyasfFyRxRSYiW5BpqfUYsDCptsH7DsLfUTYM04MSe3XagNAJpLFIYCCCmgbluue3W9HZCmUD0Bc4H7ChILk9E+vUVq4VEStBmYNFo5PmSu25LiiWL5oTry79EzS1zHWh0Re3XnD45Znd9Qzpa0J6e8b1PPubtr94wCsMYJ7ruEj9GnhzP0VXCpY5tf8mmb/j41fepqhXFXuDcHS9ffMz7Dx+IzvPitGUY7jMlNgqigGQizo7Mq5q+7Ilpg7UD0UeWZ3NSSrx585bCJHZ7mys3dMGinfPRy2fcrr/CTxN4Qb/zPD1bUFYaISPL+ZKkO4axJwZPVRiKckVS7/DhIxIJbXK2rW2O2XdrJtsh2bHfdzx/cZFBby7Q1nO2u2vGcSD5yK4wTL6gaHuij/gYUcpQFjUilCznC5bLFVJIfvnFB+yUqCqNnQL7bs3x6gwhApMbGe2E1BKCRQZHIRJFWxPsjCkJdOGRemS77bIFMjlAIFOBjxtiEJSFBKGIIaGVIQnLOE1Yq2naZ/STR+gCgufq/qucMRsyqMvbjkLD3XaXyY9KYd2WcVhQtzXOCWJKzBYNi/kpfb8BEsHDl6//hpPzJ1R1S1EbkghstnvsVFOXLXXVUNdw+e4D2hRoo2jngpQGvLeUVYFIGusCVVUxdFt2+zV9vyPG3y6f/pUQV1prGnNEShZTRF794Cl3N7kBWalISg1GLHjz7h1Pnpxx8eyYqZ/YXG9ABYyoMGJBdTTjRz/6mK++0nz97g2363csVzOOV9kjud9uuPzwHjtJyjqDGZzNu75lUWNtZBoCYwewRatMspotDM2iYLfrKCvD6ekxx0fH/M3Pv6BoQMmA9yBEwWwhKMUZwz6y2/Ts9je8+s451+8E4wRSBWZnNzx5OWdygsmO7Lee5ZGi399mAuAs4SawdkAmgxQ5rF2UMzbjgIsRgaKWNZGGy90tSzdwUrbMCsPQjfTTQMIjBVhyw71EEnwuJRalyQsAAi54BI6mKEghUmrDvGnYTx6tI5UweVKlEklkgox3E85HlE6wDshCU+iEDIFuv2e2XHK8OsdFy3V/QykLxn4kuhEpYDU74eT4GPyaSQa0KLm+7iiVJhaGRJ6Q9Psd7fEJ0Rd0O8GsNXmc3JxBVHQ3a55ctCyPLpj4miluGa3F9aecnh4RcfTTFvfhnrv1Bh9K7JQY+8DN+guOZy+YAmz6PXfDNgsEN9DWLc1iRj8knjx/iZ08N9c3fP3lLRdHF6QU6LsebxVJCW4+eIbUE0LB5Cr0fmJ2MlAWBVJHfNow9GfcX94RRaKoFUJJjhcGJ3uQkbJMID03d7doc4qgIkqHMYbkJf1wTzeOLJpzus0pRZHy9FIH6tJgRMFs/hS1bAlppKoEb99eUs8Kjo9P+eSTV2y3ayY3EpNAF4aqXPPhbU+hS7RU7NYTMmnqtqUuBLFIECxGzRinK3zqEboh2Zb15o7Lm9ds99coE5jGiZubkdlizvHpkvnyBSGtWd9FjKw4mh+zXl8x2jl2tEzjiI+JogSBI8UKlWbM6iWlkOz2l7RG8fd/+IqPXzxldGvKQpHGwPqrDZV7TgwTR8eai7OKk/qC/+5//AmqnGGaGbJsWC5bpJC4oBicwAvHYtHSLuZ455msQ2mDT5FmXuOip9sfclLjHmVmlGWFVIbF0eo3pkjO58W8luqwae7QWrPe9I+0v6qqMKbEx3DIXASSSFkkybzr6GJAGYMyPBICH/JLf7vk1xjz+DMpJVJKovNolW1rwzABEiEVs9pQVQVFoTg/2OjiA7FMSqx3jOOYs6Yx5sydzIv+h96pxymbiBRlmTHsKUMbxAGpXpQFDyOFJCQt7aPdLosJiNFwWAcDMFhHIlvvEAmfIilFirrKkI7DtMq5DCfKYg18cCDyJEgcJlJlWR4mW+Lw+PXj8/MwyXrA1+fb0khTPE64yqLMYgty7mwaEVOeTBpjODo6oixzp4kLnuRysDuERLd13Nse7/J9KJ2fv6LQlDNB05R50Z/A2iy2YswTthAD3lvClHCTZwwBkDlLJxVSRwbbEcPEfNFSVQajNN4lrq7vaeoWKUuECHTdmEV/XaK1wkYLSF5/9fZxKnlyfMHF6RmvXj0lxAHvB/b3O+7XG7p+5HbTMYyBEF7zO59+SlUq2qagrI642qwRpkQniZaR0hj8kPjy9RtGZ9ntOo5fPGc1m3G96xhVwe/923+Pd//9/0yMjkSiqEtePf8hf/Gzn3Df7TK1rFny5LlksTDcXHXst4EnT54igme/H6hlTVm2qGTY73MuUUp5yNjFw7QoHaykErCk5A+C/GCVrXP1gGrOmL34fV58rPn66zeQNETJ5fU7tpuJdVcj7wJFM3F6foZNER92RDEil5E/uvov+YT/GB1PUfKbYywf0+KRIPgw2Xq4PGwOxBDZrDf86R//Fe/eX6GKSDszfO+7c6wNhNhze7POjhOx5N/5939EU/1TpHBoOaD1hBC5TkWKQAyWr9/dME0dVVXTNCVVKflf/tc/4R/84R+SWHN985boT/BuYL0bcGnEesXvfPqK/Say3StcaJk3M56tPuK737ngV7/6Nb/++i1//w9e8tln/yfRCWYzw8l5jRvPOTk9Jogtw/CebvuWq02JbjzHyxecrj7iOF7g9yNzXUFK3IxrQogoNaOon3NU7ijUJZvbv+T0ScvirGZ+vGB2NGcxRsxtw74v6Kc1fb/m6JMV0/YD79++of664SomGl/z4tXvcXFxnusCrr6kXjwFU+CiYvADf/32LWU9pzIVevK0sx1fXf+U85PnXBy/wu8k//oPP+LjJwvu379n+/4Lahno60TnI50dmLWGcdzhUoGVklk7ox9vSLHk7vYS7z1jF9gMcP6sZtNvmMY1x6tnvH33a+43V2zWW3bbAXyZ3/tAihFnJ/a7nrZ+SQqSuqn5wQ8/5i/++j236y9JOGbzmtvrDednT0hBI8MdIXjms8h+2zFf1pQVfHj/a9pZRVGDOkzClyeOD1dbJDUhTqz3r3Gu5uWzZ5SmwVuBVpq2PqJtPM4lNrt1dqt4y93dLW4KXJy8oFgGdoOlUopSnXFy/pzru68JYyKNkd3bCRpYrlbENOYNzdSgqpHrD3vctKYsBsZpoAy5aN0YwTAO7MY73l9dcXp2zNMXH/Hqo1fcXW5YHBlu7zZshhuefHKCl8ecnT5jtVpSVYovPn/NYnnKbjsyTg5FlSm1JJSqWMwbfvT7NVdX7ymrEqEGNv0NbtrjpwKFRiTF+nbD97//A+7vr/DO0taKJAz77gpEoq4LZrOGP/2/fsLR0YKhT8RYcXZ29Nt1zf8b8fP/9yWGyDhuMaWgKGqUaIhpQ4iK1eqUp0+f8+c/+2ccnZfIAtb7Pf1+y/H5COIkK84CUqj4+S9/TjeOxCKyOFtQFAWX9zeQTD75ljVHx3NSzKNo7R0IwXZ3h5YtRrXICtpZPnFOtmOcJtx2R1EILs6f0VY1tnfM6hnPPzri/r5jv8+LmrPzFpVKymctwSZ+8QtFUSbmC8WrpxXVsuTP/+SvGEKJVCCEx0+B5+e/x+XlPSE6pPbI5cj9TcfTJy/wHq5u70EnTk5rUhRIbyh8wxj3jGGP93v64BlTBF1SVpIksk1gMVsx9B1+zFYDTUm364kBlNGoJLFD3sWv6gaZFHaIaCXBQ1sXqGRw+4ipapASLcCIhJESO1p2mw4hPE2hOVsu+ejZc7yN3K5vGbY9T86PKGRFXdS0dclike2f81mDkY7dnePl0++xHe7YTzvGqWeyWwoTmc1afBL0U+5iOTq7wLmJmEaqhaEH/o+/+EsmN6CN4OzkHF+Alxt2/ZZtt6YqG2ZnJUNviHuB8pI07PB2YBqyTe/jlx9T6BnX728gOJIMPH9V4u0d9zcdTVnyD/7w3+UP/o0f8I//8/8Wbyfm84rFWc3rX61Rq5565pk1mmTqbMswOTOSHNzdv2e5OEGrGqkFRu9YXDxH15J+2rLZ3VA2E6dyTowl67XHxy6P5OslUBODJMaW25t7lkcwb2va5pivf/0eewSnR3NWy4LZomK/1/z+7/8I7wPWera7ji/fXB8W7Ba/mWiamradsVn3bLZ7mjby/tKznD/FFBqpI8oEZqsTut2McRpBDNRV5P7+cwIjZVlyevScrr/l1Xc0UmlC7FmPXzFfzqmPS4IN7IcdXS+5ODtiOmR0UIqh90zTmijAa0Mn3yECvHxxwXI5oyorni5meLekXh6RUNwfX/MHP/oxqn/D+w+f8/kXP+ePXl/x4vs/5uziFbps6K1l2/UorTg9PuHJ2TnX1x9YX6+xziNVgdIlKTkQnqquqeucnQreY4pcWhoO66SqqggpZvLgQUxMk0OJjKJ3LuPyV8fN48Lee0/wHiXUI0JdKJmpf4DWhvl8kX/vYE97WBgKkTeevi2u9vv9Y3nxowhLiX4Yc3eWlKQoiC4LMIRAasnFxROWizpbR/Z7nMsFrtvtns1myzhOSKGw1pHSYS9eRowpDlY48Sj8pmnMwuTw+GLMU4AHlH0SMNqJacojfVMUYDzRe8KB3FiUxSFzJpEy33YiMPQTRVFQVRXee3KpM0x2IsaEMZqqrknfEim5t6t4nEw9CLIYI+VBEI7jiHPZ4qwPVkUh8mTK+2+Q9ABVXX1zW96x2W7wITxOxuqiop21aBJVk9Cmfnw9Hl73rusBw/26J4Tck+atIyb/+GF9ulogVSKFDMtJMWKtpe9GRucZJ880WpQy7Hcj/W6ExOE4SfTj7pFsSRJoo6mqhrquaduGfugoJTwQ66zz/PLzL3n58iXL5TlCBIJ4h2hWmN2OZYzUdUVwHqM0u/WGD293/PKvf8X85DmvvveK5aJFo3j95ZcsT+Y07YqirDId8PI96+17dts7pjv4n/4bxYfPbpkV4KLF33ue/94POV79m7z+9ZdcXa0ZhwE/VNxNFmLJyUnBs+cFlx8+ME+Jqmxp6obr+w6l9EG8HGyfQh6OVZVdGkIC+vC98Cj4d7uJu/DnUN9xPjvni9dviH5ksVwwX8w5ffY93nz9gSgidWuYzWvGQbPdf8np0ccoGt69f8ff+0GLfDtj3Ai8jygpeKiv+2ZwJb712MI3U9eHcvfrgcu/esfpxZzN+h63jzA2FE3i/fvXlJXGqJo4wruvPsf9eMLbxLANbK4jJ+KIbuhY3w/c3Q3M5kccnbVEn3OZptTcbn/OP//jBcvFElMknP2Cfqc4ff6CoC2/evsl/WBZ6TPqqmZRzlnNT/ibX/wF47Q71NWcM/UFdZNwo8WoiqUhggoAACAASURBVEo95+OPe754/Rk39xu8i5w3pyjXoewSd9vQd6C14Bc3f40Nd6jUUvCKYARRbJDzkkovOdd7rm+uqHfPSaJgax3se75+e8doZ8zn5zx/ds6vr59xN8xpq57iaM37y1uUnkNd8eXtB7744z/hxRx+9r//U9LZd6mOL2jbEuUNpm2pFw1HqyUvZh/ziy/fsO9gKEaY93zvo4o/+6P/DbRjc3PH65+95ruf/oApWlIBlfTsNrdc3sP5Jy/z8dJW9P2e6D1Mmn4/cvXuik+//32iS4g0EdPAL7/8grY5Q6kaUTqoOsIAs5OEt4muG1nvLhm6xHc++f5h8h15+/U7ri9vOFrWeB/Y7Czn50/YbjcUheTJ0xVKJzYbzzQk+q1AygoRnuDdNbNlSYqws3fY95G//uX/zdnZBScnZ5ycPuPy6i273YbCaKrGYKdtBljYdSYUh4Yf//jH/PrNZ2z2a9wkSWz53pOa4S5v0Kk68u7LW+arIyZ1w+h6Jgd+H4h+gTZ13gjzcL/dotWCpm6pCoOMhtV8xpuv3hCC5Pjkgo+eFnx1teHmbke3v2dRXVAXGmtHQpzwyfM3v7yl1CVyfc0wOOpyxvOPTrm8umeyeVOzqBp8WnO37nM3rdZok0C03N0OCLHDlD06lUg29HuPYk7bLnB2oGlKvNNMU2TsEq9efsr9/T3jOHJ/b1ksC3bbkeX8lLZpsG7zW3XNv1RcCSEq4J8B5eH3/0lK6T8TQvxXwL8HPNzDf5JS+onIW7v/CPiPgP7w/T/77XeSe0oiCRsC2+6GJEeqdkEUjrcfvsaY7DONk6AuZizbJcumIdHgwsTU7+i6K8Yhe9hlEqRQsb6fGKcpd/3oEq0apiERfcr2DiJCBepiTkqZzqW1Rh6sFEJpClNgvUNQcHe7Z687pEjYKXJ3vc1dJsoQQ4EQiZQ01kcSgScvWtp5Yhg7vvy8Y3DXvHu/z28KElWp+eQ7ZzSziZMU2e08fT9h3cTzl+fUpWQYHVUNSUyolD3MLgZu13dUVY2JeRd2lIHZvGEaLfgESoDSrG/2BD+ipKEwhqZoGOxEUxY5/yQkphAQEoUuKYuSEGHyPXbybH1HIStmOpc2i1pQFiX64Gc3rUQogbMTwVlCsOy6DW6MjONEVTbYKTByjwsW5BI5Cnad5fz8lKpWSLFjv7+ndzsmO+GDxxSWi/NXTJNgN+7Y2z2iNIz3E4tFSZSebd+zDAtC2uOiZbeP3Kw7zpY158fPqevEfhq4vB541sywU+6dkFqBKxm6jqJYUNZLCjXj88++IkRLoXPO6uSk4PbdhovjE1599IxPf/cFf/6zP+P42GDKGlMK9pstH//OGaZ5wjANdMPA8azGWs9+HxAxolXBi4+OwSfCCCnk1z+IAd8lAgOmttjJUbSG5APT6OkGTzNT7LotEY2UNcOQSCLS7QIitMzqBXXlUCo/3/dxz25XcH+3zwSioiEGyYfLa4bB07YNQglCsiBqnG/QhQQhsbbnyZMneJstSgRJW7R8/fYD2lTEdCj1XveZ7KkDiYHN9pqmWiFl7g8KTBipsF1CJ0khDbIsaY5OGKZLRBIYWQKSbT9hdMOsWdE2C1IcqatDj5MVVEXBtnNI4TFpjjaGsmn5xZevGa5f002WQb/k/Pu/y/PvvsCYloikMYayrFAyY+bbqiEendL3HmUCQhqUNJkQyHTYAReEIBgnS4gQD1kOKQVd14HgW7kcma1sKfdYEXNBsTLmEROtDp1T317A98PAbDbLi/nDTndZlYyjPQAYQBuZATtlgXN5woRImLLg0PLB5DKEJ4m8SVKUkrIoHidGIXr64eG+79hvTbb8KonWBUZXzGcaY2qstXT9SIiHzSRnc4dWjEiRJ2Q+BoSXj8LqwTospEHEgA+REDL1KsQA4gD1cT6jIGS2KoWYCKNHigfbXhY1koybz/1tOQ82TkPu9isKBBB8wFl7yON805n3bWH6MKny3j9OEB/gFQ/2wWmaHgXRY8brMBlMMRdMc3g8uaj7m8lX8IEQyVk9n0VvXQuMAW0SWkvaWQtCUDX64GKTDP1EDOTjwSY26wlIeD/kPLCWCLIgNFVJ00aCj2gtD8LUEVO2eI2DOxyrAedzR916vaGqJoZhYLfrspCrysNQJVsZi6pgtCPj3cA0jnTj/nAf7SGToBlDj1Ca+dEp7eKI6AO6zNOvuqkPlRyJ25tbTBGJaWTY92h1j1ENqzJnLN32A/OTitMn5zSVRkTL288+4347cr/tsFNAJs3N9R2vvvOEsY+M/cTdlefyw5q2GlFTR99bBgvSVHnXPx1okDFvIAgO8AgS4bDZ8CByjDEUpcAgmbRjN96zOjpGKTAFRCx39xtikgiZewunSVDWgsY07Nf3wJ7lQlMac8g6gyT3UApkPlJSztaJv2MQzI8lT65gtIFhsoxuyzAAxGw1u5pY3ztmS0ddB6SX/OrzL5kmy2a95+u31wyuZ3wHdVsRUqCfblC1hXXBNFi0VujihLK84H7TMUwjZZmo6wqrO97dfIV1I113hRVQn7SoJGlNzavnL/jF5z9nvX+PlJmYut8uaJqSm+17ul1Poc948qJmvx24u1kTnObo4mO++7vP+NOf/CV2BrNlSSgnnOkxskGEEhcGDCXPjk+wfiQOd6T9G1bFjO9+5ztcjoJfXg0c2cSb11vKFqIoiJeCun6OmpdEs8EXDne7g1SxSWW2s0+X/Mlnn7PtEkdngaaIVKWmIHGsahQtmgYvFe8vP2D0Cbf+muH+hlUtuNsOjMmSKFh991/jzbBlGkZWR0vKsmS0Fat2xdnJEULB3a1lu9kzdonnz19xtFowjY5hXDNfLpjrFlNJQgzst4GLJwtMKQhppK4NVWPYTCBkycXFc26u1iDvQawozBFHR0/5/IufUxVLTFsxa44Y7Ib1ep3f8zFS1SXWZivc/eaWcXBoPef47JTrzbvciRlLFnPNYrkC6VnvrnGToFAHa7WdoAsM4y39tEYpTV2XxFRxt76i6zqKosqwCDVBf8r50XMmP0KhWJ2cYrtIO5vRDz03d/c0NXSbCTk9bFRJtFxSl0sUAjfl9+YwdFRljTKBor3m7m6gMUuik5TMWVRnLJYTP//lT5lsQJsWL3KXmHU9hTZIVTCMJb0bGEeLliV1k50F4zhhigJdKqwd6PuJebtCS4l3hkrXLC8swzCw396x291RtWQxFySCrBO6riSmkC3jSnO0umBcHIBdwmBv/7+j2CfgP0gp7YUQBvjnQoj/4fCz/zSl9E/+1u//h8Cnh+u/BfwXh7//hRchEj5NkAQBgafPVJs0MtqAtYkUFTGA1ImqlhyfrFDC0w89o8tdMtZ1GLPAhxwK9pNgt3a4OBIT+ALG8dCPkhQKKKShbEpcKOinPc6PCFHQFjOGKVvg8ieiRIhEv++ZJJRF3jXb7zxV3aCExoVI8JpxmDAmUZaC+bJAqgkhKtwAUzehhEObhBKJ2Vzz9HlFSiOIEakduogIn8uIp2HK3s86ZwjiFIky5Z3y1FPrmiqWh7JPSTzspjmffdTCFNj9hFKgK4VWuW/oAWakhUBJSZD5hFCagrKs8CEy2g7nIs5bnIA5khQCzqaDbUWRIiid8wBSKqIVRJHoxg4/gQ0eqTXj5Alyh3MTznqUWeJ8oh88dSmoW8P97g5TJyYPCslsUROTZn23Y4oDqYj0Y0c/dEjTEkRgO+yYFS2BkZgc1kbubhxH7RwfFN5npHPfBSp1BP6G4AZIkWg1Ra0p6gpVGqYpMI0byqZEFyUozeRGiIJCGaKPXL6/5rPPXufFz0wjdWJygsVxQWHmxCTY9R0iemyfC1yNktSFpqo1ZWHwfbbUJG1IWISQSJG7Q5Qq8rpAJpJ86HDyTDZCanMA348IItZB3wfwjhAU3kX6OGGtQKuaEGCz2VDXCSFqumFA6ZyBEUKgjCYc8kamyGAI7xWL1ZzNek+w2eYSUqTvxyzwE3gfiDExDBOmzIvQbdwgmTGftQQ/MXY9/SbQVjwuOAQeFR0yHjKOUjKNjqrQzOZz6mqG0QXTlNA6EZPEB0EI4I3JXnWjMWXJYmXY7QaGooZ6zrxeULZLilrjfQZECCmpKv2YyymLAt+0+GgR3mVhcPgjySXBObDOgaiXhQVSHTJB/jCxytmm3OOUqW6IPJGSKgNUxLfyQABpHB9vt6rq38jwhIMd7KGnB75tafvmmu1d2ab3ICiU1njrDoJJo7Q6THsyICLEgPOC9f2WjUgYJSmrgqI0aN093laMkWGwjJM7UFsD6TC9SQLCQ84L8uJSSaTMglJKeRAt6VCOnFHqUmYZGFNCHoATUopDNiwditLT4zUlHovZs2XvAA+IgBKPdrBpHDE6lyY/2Ca/bf17ELEPP3uYVmmt8d4/TqDE38rE5PxVrp3IJAGRC5FDyOfJA+gixAOVDogpOy+kC4zWPloCi8Icjp8swhMCqQxVmc/V8QBG8cHjQ0Ac4CZaCgIBqbJ4JEFwDiUFslBZsCYJKGIC5xzC5f+vJOcHx3Fk6CcgMvTjY/YspkjThHx0hIidRrwfSVFQVfm1iD7gPCAyDEKanA+JaWLX9UQS/dDnDsC9ZT6vkDIx2YHNeospn7NaViTfYfs9q5MVzWLGrK4xIfD+i6+wTnFxdkFR5sXLzeaK5WLBrIFpjLnfr5hTVgXGyMOG5cNrlSXMw6H4eEkJIR4EV3qk9JnCUNWgPYzJ43xguTzK4i/smVzPZCMxKpSClAQxaNp6xuV0RYxgSkVVFzgfuem+QtonlOI43634TYLFt7/6jbSVEEQOwBUB/eTxEbx3TJuRaexJscKHxDjB1O8RVwrnA7uu5+rmFt0kBm/op4R1FqECPkzst45xnFBKUhYmW9PtiHUO9o6qOEHPOrrNPdNo8W4iGujnI4Uu8UEgyFPpfuwwJk/Ob2/3GDMDDCHkqXnfrdCyoTENUShUUWDaEptGNvaS69Ef+gE9M3NKkoqN39DvEub8DK0sToGPBUfHrwjqhCk6pgB7G+imEV21uMmyWW8JpmbvBGHwTHuPizPK+gwbasqyoqgCvx4dR88+4fT8lLIp8C5vZPnO4XWBNyVrAevdnnnd4CbLPjjcwtBNickLqqbl6PyczbsxOxFSnrxI02LqmuAjwQWm0ZGiRoicRRbSUFQNIFBa5BoboXNmPOWJuikKTFHS7R1C6EMfoaKuSuZtoCgTbpoI08jZ6TnzdvXAY8GYmkh/KLQH7xP7/R55sJQro2m1QitDYVakdEVMNm8uBUdV1SQRSamgrVe0ZSB4zzQNuCDY92uSmCirBVJVxGhwYaSqW5xNxCQwZaLrJ1TVUIkSYQSF0XRjR1O3GJ0Q9JydHOPGN3T7nhihLBsWyxNKUxGDx7mcQQ7J0TQVyjiE2DJ0ARVLqqJhVi1YtDWliYhgUJSUusUSDr2DkRAd1vfcXe4RxqB0HpZ03XTocgwkAj5GXPQIoairikIV2GRQyWBUdgWnIJnCjs12Q4gRkXIRdtvMGEeXp+Va4INHihXzhSCGxGSzS+W3Xf6l4irlbb394UtzuP4dJs63Lv8Q+K8P/+5PhBArIcTTlNL7f+G/EAkXAqiMldRFYBg92wPCMU8+JErXLI5azs7mHJ/Mufzacnn9nhADpqjRZWA5P+V+fU83DAyjYxwdLnkCFjllBGYz11RFSaMqZrphebLg5n5PN1wxDDtiLDk/n2PGmv6QUVnNT5Aqe0mVyGAILzpCKNCqRAjNfrdmGhfc3W1Yrma08xlKacbecXZ8yulxxek48O7qb9gPa7QSLGaGxbHn8r3nft0jVW5Glxg26z1+F1BFpD6G2WzOhy870qGfa3YEprAISgiCFBODtxRK0/cDgYCKiug59K+Yw+LAgogEZ1FCo1RGIiudF/9FoYkPXToBgki45EjjhqIpGcaR0XuMsRh0zk2oFlM1yELjRc/oLTFJgkz4GBmdQxpL3+/Z2D3L+YyiLLh8f0tda55ctCQmVicrUtwzjYLlquXyzR3r2x3tsuZotSS4CaEc6+0eHyIRD2cw7B0peXRKiAkKdcJub9l3W/a7HklFa55RYenchHWW5AuWR8cEFRnchOsHTi8UVb1AynxCvb75wGn7Efv9xPXV59xv1wTg7FmJxVMoOHkyx6cBpgJiwKjI2K3p7k6pZ5p6BvOlYbvf8un3vkuaSab9hA0to89lvaMvGMdcJDz0GUstpEbp3JGUkHjvDtaviVnTIFGHpvcP4POJXeuAKQJVKajbls3+Eh80Wuf8QT0zjIMlETClZhp7qjoRQ0J4jyokssh0Q1WAUhEXt2hdZTpOymJ8eTTj8uYrGhqE0NhxJNo183lLcIH9emR9FTl6USJ9QXSJye2Z4iXN4hRd5hLSoe85O5tzdLwiBEHfB6yVhDCxnC9QukBqwWx1TD0vqeuG2tQ0i4r22OGLZ3kiEgVx57jfdoRwAHFLgS6zuDGFpqwrbAgUlUTYhHOBEF3OMcgcoIf8oVUU+biOcJhc5SlVnoI82PSgbeuHkxgpyW9IdYfFuDbmMYD/sNCfzxf0fY99zCQlQvC/AWB4sLf1ff84idK6xpgi20y/NW3JnRtZNFtvs904CCQSpSRSarpuYJpGUgz5eyqDIR4Em1QKOx3yTVrm8/DhAzyLnYBz/vG+ZZSP1sV0EEpSKZTRlNowjkOebslDDooyT72VyKChELLl0VlICYkkxoA/PLcPeTKBzIh3n4lXhX7oAuNxIhhjpCiKRyH7bVH78Pw+AC0eb/dRgD3A5g5WSEEGesj8ueSdY7vfo7U5ACjUN1m6UiONyCXHMmcgMqjCIKU6TM30o4iTUlIuK8qqRCqB1oJxHCmiyRsWLsMVdtseqQwxpcNmVE/7/xD3Jj/WpXl+1+eZznjHGN8ps7KyMrtm9wi4bSxhSy1ZgGj/CyBhGYklErBhgeQFkiWv2HiD2NlmgYQQC8TYMgbb3XRVu8tVmV05vGPMcaczn2dg8ZyIzGp1N+0F4m5eRbwR994499xzn+/z+34/3zwlLTJMaggBTBJR8NqAdxKl4uekc7E4uqsj1W2z3SJlgpRxardVW8r5bCrFjgWsUkZiZtzkkZhEx3yY93hvoxAMiiAODGNP348kpsCPkvfeK5nNJDYE3r0e+fC7BeUcRNC0m4yTsyM2+wP1CKnI0NmcJy9O+cF3fomjVUnVbnj77p5hHMkLjZRwebFlsfglnupTCrXCB0F/XePEJLiDgIkkGB4nRkSxH8bp/SgRaNLUkGSOUMVzKE0z0qTgUO3YV1u6oSMzZwyhw/suiqyQUiRrtnc9p+cnLNYrRt9xqA58sftnLNz3Oc9mkcQ7AeL/ZKDFV4CaEAkboEAYTd1IhOnwfqStHKiR9fIpSabpxobLzQXepFFsDR37Zs88jx2Yr99skGrk6HSB8Ia2OUSRPsDF6w1KpaRpiHnuqqNvHE+/5TjsKsYetMhRPrCvavJEoeTAy7fv0MS+PVAIobi7vmI2S1jOXkAI9MOe22vN0eopi3SOswMh77m83pPk0Pp7Prt6gx0tz/KPWZYLnHAcuGJzdc/2+AWLpSItV7T+28yfzfj5xUjlBLP5GpE41CxEaEsQuLZl00UK6f7mlv6w53xxztGz9xnuBVm+YL7KONpWPD37mCQd6aoD1282hIXmcHeHGFsSP9LWCwYbOLRVhOoYTZ966mEkMUvybE5RGJbFMfN1Rje0tF1HUpaoXHJ1tcNZyzAOZOmS+VwTRLRjW6sp8hUhjHT9yOHQ0FQjx0cnSAnzcs3p0Td58/odXbVDCoHWgaHvyPMZRa643TdU+ztW6znr9TGb+y2Hao82KbN5gkxSyjKhbRtubl+TZwvGMXB+fs7p2RF939PVMSKQZCVSGmwXp/mEgkX5nO98+Kt09U949eolXTuCVBz2I/OlIi9TlMohGNIsUJRrLi9v6dqWIsu4aW4wecmsyCm1ornZ0h/2dOnA0DsY4PzkBX2/pa53tE1PmpYcrU5BOIZ+QBBwqkMKOdGoNf2QEBhxY87RfMnxekWeW/rWcX7yMf3g4tpzEGA6pEiwzrHd7/jy5S3f/+EPScqBrqnZbrfMFhJlAtaPHJom2v2zGcYkJCrBZJq26qn3PXm2YLY+ZpRbPn99T5oXaJViVMZiWVLtBPlyhg8dh7trlF9zdFxQVZF6m2flnyGD/pyZKyGEAn4P+Aj4L0MI/0QI8R8Af1sI8Z8B/zPwn4QQeuA58Pprv/5m+t7FH7vPvwn8TYC8MLx48SIGikfHF1/cMitmyGDQeU65XJOZnnrfMk+OSOWSm8sr6naP0YLcSLKiZxgSbq/ecnmxpalHjDYUGSgWKJORFjmnHxdsdxcs5inr+Qmr8oy6Hbi6f0Pbt9jRk2XQtS1pojk6ShHSsFosubkZKMvA+qhgsSj56U8umM9OOTmdY3TKOHRcvb3lxYcnlLN8wsoXHK0Lrm8uUCKlyFfI8ZR5ptB6IDjL68/3DE6SZ0fs9x2HXcuz54blXNB7FWmHMvrWhQoQJGmaszgq+OLLt0izwhiB0Z5ca1TQEQdqNEVR0Bwa7qt7bLAQSrTISZOcrrePXn8pNbPZgq5rqZqKfnA0TUeeLxiDZxh6KldzUi7IekEzDjTDwLyYoxx0fUffjQxtBcZyerzEWstge3QikELT7gtcr1EINrcHdApt09M1huAGfv7JDYt7j3c1znXs6h3zvODFByeYNEFqSVAjhS7Y7x25LlgVJb/3o5+xOs2YJ4pZEkhPHdd3n5KVZyhpWC7PyI80ry7vCV5RJHMYWz748JyLy8/wDDGvkMwY7AprBc5XdF3L2KcMxtG1FSBYn51yc3+FVQODK7CNZLvdk6Zr2u7tZLVzbMaeJC3Y7zzVAeoqQWnPT39yhVI9aWo5e3JEOBxzf/+KutsxOstiOQENXPTQCwWJKRj6gA8W5wcCkiw9Y+hqDocd9b7maHlO0w9kUtPWjrfv9nzw4ZwsPUOrLC42Zct8kbE+0rRdzfawIUlmZKlhGGusr2nbljdvEvb7mlk5Y7lYkqgFo7yPEzmhWJRz8jQjNrLN8S6nrw88PT7nyy/fkCcp7z/5gH/zr/0y1f1NpAESCLTcbW8IwtD0lr7vSZIVgTgJAUNRpJwcLxiHjuPjNevVitVqxbw8BmWRMk4Ldi5EO9JuwLYjYx/phmmaU85LxCQM1us1UimOjo/45kcf4Fxge9jT1dXjtaiqGoY+MNpIkbRuZLSWEAxuwp8DKJVOlQVMtrfwaDWzo8WOljzPcVMGydmRfox9SkpKsixD62njwzm0UgTv40RRfUW/+7rN7WHiEkWBQJtpEjZNd8ZxpCjKaZroUQLGwQGCNM/J85y+iQXBJikf7YrOWfKiQOmIS+37nmK6ZvkA1kW5lpnp73Ueo6OI+WryExe03kc7xkP/Vlycxdc8fpAmSJGgfRRbD5NzACfiphBMoob4N0kpKcv44WWtRU45NykUR6tj7JRD+zq84kGYft0qOAzDL1gDv/5zDxMvNaH8+z4SaKV+mD5GkWt0Ook1gfcBOzoOQ/uVgNZpDIbPIsFTSkmaJoyDREr1OPGy1rKvK4ahexTMaWZIkwKtFIlOKMuE1fGc1WpNnhePz/vq3QXb3YF6yir6IKaJWJxSlgUgLE+fnVEUOXgLwrJYzKmrls3mwP39PU3TYAdL3ziGMTCMDi1drF0gThAfbJl9F/Nis3kByjEMAiFzsrIAFNZ6Pn/1Bh8LxDh7/i3a3SX7vaL1cKi23NYNQmiCb8FLTHHC/d2W//13fg+lFEWp+PCjp+zrgZev31EdDixmc8oiQ6YpUsVJhHMTf8J5CJMwJj6uFFG3BBciWIoo0sBNHU2BfshxYUbrLD/92RcUxYKmNuzrO+BTlF9w2HQ4B8bcsrn5jGA81XBDd3/L6BqKfMV8Oefq4ksutht+4/2/Fu2p05Th67fwKPym+auIU0VvA84qgpSUc0FR5KwWc9ZnJVLGnE3TK1y65fLtK6wbWJ8c8+LDU+raI2RNUbQEbwj9miIX7LtrBHHKUWYzTk7jYluRoMqCvb+nvVK4kIFKCElJYQSvPrlguWzZ5Bv+7x/9U7xN+Qs/+DZ97+i6A2fPlkhzT5Y/x0gThfvYcLp6gSx69vsLfvb2x9ze3jI2htMnH7A+PeVnn/4Ue/Ieh2NFvsh4/uQHZMlLbuqfsz7/PsdHL3jJl/z+pzA7z9EioAKcnZRs3+wZlKRxB9rDhv3uHnP0MUV6TLZesqs86x5eXb/l/md/hE5S/p3f/ivcXb/h3eUNQgk++NX36LaWSwyqNJhcM1aecpny+vWnaF1ycvwC0QZu7ze8OF9EMq/vuH79BYM7YXG0YDab8erzt5yvVpTFnNE6dtWeu7tbXjz5Ycw/GcfVzR0/+4O3PH1P0zY9bR1Ik5QPv/UNbm4vuLnZMS81i8WK//P/+jFZqskyM01Z4FDlPHt2zkcfr9htD9TNniRLUS5W61QHwYfffBZhV3cdh/pAkZ/h/Z6qqUmrGScnx7x6/ZLlKmOZL0EUMML19U+QsmCZn7LOn/JHX3zOe0/OeXd3yZurK4LLOT3PqPY9RVZwcnzC7nBJ1VyQpYb5bIVUKaLdI/se295gw473F55n/8Z7/M7vvmW3N6yPzrh49xmL2Yxn5+9RFQNSatI8UM5yNncj2/uWthl47/0Vziu8MyQ6JdBw9PQZ5+dLslyzua1p+4HVaUY71OwPLb7WfPjtb7K979hu79nuLggh4eriEu8sPozkuUTKBV030o8tNlicC2i3wg4dZW7IkwSMpDBz7u7vsN7ywUfv89GHv0rT7hmHAWsdlxc3NIecvDBIFeh7h2JHfSjI0zVSGN5dvPzTJFP87P4z//fhQhGZz78ihFgB/60Q4gfAfwpcAgnw94D/GPjP/zz3N93n35t+j+U6C/0Qlqoq2wAAIABJREFUd+uklCQ64PyexeqIeZmitUXLlJ3bs69qvDAolaGzPaENIGJnSwgDqIHVyYpyIfCj4/h4xqJ8TpoVBBHY7W5oNxmutrjmgF0EOr9D4JmXBbNiybw8Y7GMuOvBwjAOSDwqzBCiou9r6ibw0cffJTML3n/vfaz1vHt7y/mTlPYQqA4HpHbMF4bDAWSQDJ2l3e1YzVcEvaQfrun7Q+y2cgeyWU0eBkLVsbkeOF4uMEvB6ARDB922oiwEyVxihef6bmB7bTg9c6S5QiZQ7fY8XX0TadJokfCOPE84mZ/RV0P07Q4CF4giIoALjuACsm3J8hSBBG8p04y6GVBGk+mEMPOkU3mrbBqGrmEMlr6OaOFEaZI8p7Ud1b4l0RqjFJ3ryJKUMctQiQI78O7uNeujZwyup647Dt2AN3C7uyGL/gz293d0ZUNXtAjhkRKenTwDm+OGWwZuCOqCb39nyf3NgG1E3P1PNJvdNf39G8o842g1Z71ccXe54xvP1zAWfP7ZOy63n5AflYiQRT+8jZYpnVhMSJBBcbg/IM8qynyOJEeJhLy2rOZHiCDou46+l9iuRRUFIkiGsaILILUjyQTeC/a15+nZOdZ1NGNPFywrB21/j5MJMIdhQLg57WGHFwrISGROuUpp2h266RmGgDQFTvR04waTB87nR6QyY7Sa0cYRthUdfb/m+GhJkqiY+fv0FU1XMJvPMKlhuZyTZZqXP78gM2uOlu+z/lbB2zevOTvPmc9mzMslkjlX99ekqScxKXmR8MXnF8xna0xSTGHSlKv7T8mTkkSlZDpjsSy43wgGMZJmKfP5KWZ5xKef/CFGFqxXa0yWst/vKYoCpTK81+y3DbPZmqGH66s9dzcdy3WF9xKFiwt6pciUxKHi5MEokjxHex9LAU2C1Cl2YpgLGUgSEamIqic9P8J7zzD2dO1A0wSaJu5WdkNLPw60zcg4DjFrJCTOBoaxnyY2caHtnX0EFmRpjpAaN7YoFTNjfddjrYsB2MHSNC113WBtFGIPU5AHofD1iYtzDmPM4+Lc+2h5eBA3D5ALKcVknQqMNpDobII1eHa7A21bU+TZNPUSeOsIIdoL4yQK0kQ/ToA8HumGCSM+CRsJ4BmG5jGn5H3AC8UwxomeVpEy+GCZdM5jbZzkSTkiVbQrd4NDaxmLkZ1HCUViEvKyIJkAFg8is+97tPxqMjAMA2KCVzxY/8TU9WLtiJ/KY6WUj/mt4D12mmA9PPdhGB7FVgRgRAtP03ToRD3aO533ZJmZyp0jqADcJJbGRzBI23q0TpDSELxk6BXO9TjnSBIzCUVBVbUT0TDB+wHvYpZPSYGRiuoAzgVefnmLkBG5Pp8V5FmKSTLyEIV1LF2Wk7gzOCvYbVturz/jATGfZoLTk2O00miteO+9FywWKcZoggdnBWC4vbvmcKioDjWHfcs4BMZxxFpw3rPZ7ZkVsd9MeAFBUZSa5+fLaEsUmiIvseOAJwWhY3755AmzvIybczbaTROZMHMFw+AIIdIV//APL7B2ZLGcc3b6BPB07YFxHBm8x/log3wQVZE+GabJqJqmmAEYETCdAw+CG5TQFOmCstCcnp7w2U//kLftK1zwWDKq8YBXG3AnQIE3CetvKPrLHdon5EnJvDxBmIGrywNOWBw+vnksD60FjwLrYYblw1ekQKVU3LEIkkXynH/tr7zgxz/6Mfv9lmLWEJxE6YwhHOj6LYcNnK2/xef604jS7ixj3yLQrBcmWmLtgaGD73z8Pl03YkdPajKa6prdrmEYQYiEWXlGXhh0dcCFHik3aLEEvULqecwyupiR+fLztwxT5nJ4T3HIZ6in8OL5CefnM/75T/8p7y72rOdL5ouSH86+w79Qn3DYx+qUMFp+61//bb717YxPv/yC6+sbhk2NMBqhBNfX1zRVh9aCxULT1XdcXlRUO8WH/9ZfYrv5hGKd4Gnpuo7j828ipaIde6SCZx+sub654Sw/5uS9BYNu+IN/9vt85xvf4Nd++Os0Y82btz/H9nM+/sZ3GNyBtj0weMev/fpf5uzkBD8EElWy39/x7PiI42UBQXB53fNLP/wlvnx1y/q4ZLbKafw/5/J+zxMTaKuey4sto+358s3PKFffpygy6qbm137zGTfXW0olmM1zgi2n4ve4qSrTPXXdks9jX2DTW6QwfPPDb7Hd3WKdwIeMp89TdvU9dgwRTlPOuLh6S13vMUmBpCRL5zTDniSXpFmBIGGzu2Z/2FAWaxbzkjQv+flPv6BtHN//wSlCVPz9f/h3+Ov/6t9ifXrB/dbix57vfe8jrm5/Cm6OU4G22VPkCZvNLW5IkXnCcp3xxF+weXvB6emMD37pmG/8oOUf/ePPYDwnEYZhuOXQBJb9mjQpSI5y+qGhrg9stxuUMKyWS+pDhfc5VXuDD5bEFHQtJGlD3WmaTvLuaodJNPsuYs+r+kCiVux3HY4DQdWE4HnytKQ63JMmM8piTp5rDtWGYegJwmOMIMsKCpMiXEAqTT6bc3d5yer0HJUa9nXN519+xmxxzPpogZxLhs5xv9nx3vtrpIxwor7vOD0RpEXF7c2ezWaPVH+2xvmXogWGELZCiP8V+OshhL8zfbsXQvxXwH80ff0WeO9rv/Zi+t6fenMeuronyJ4gI/XDqHjxtN7jvKIfGmaLkjSTeNGy31mCHGm6Bm0FTmQomVIdIss/yTSjHBgHSaUOVH2N9SN9V7E6mnF0VIDw9OPAvCxR8gST3uNxrOaziA3Hs1wfoU3C7h5Ojmo2XUM3jmTjwLzMaUfHbrdDBsFyluNTQVsnhCDQRlCmabRMBRcnD2JgDAKj8jiGNAqlPWMXaGxP51usHBE+QylP1TqQKfP5iqOnz2n7LaQNaMesLNH9MWl2wOqBwbsYUi8y3OjpbSAES2M3jHmKEBFn3DcN+TxD6iRazawjTRL6fpjQ0RlC5PRDj1LDtAsqyPIlUmmqemR0YKaFpJU9KlkhSfEhZZbF/g+ZAFIhXMwNjWNLIGYNhNJU+3ucCHGX3/UgLIPrUWlKruNksW4PBFdHq5JSfHl1y2gFWgQSI6CHXbthHBMs4IKIoex2QCQBnSaoJNC2ntE7Lu82ODvQhIa8LKddVY8SkuPlkm7ocN4QCDgFQSdIucRb8Di0hrQ0NE1FYsxUiJxRDy1qUBHOEhK0FAQJKocEjQk5u+sNi5MMN0r6znOrK1TQ1F3HODgkMS/UT1YIhENICOoA0jKbzxBk7LctVV3RjiNGSYyMIWmEItgGHxza5DTdQFY3DH2KHQVZukRqhR0DUoVoS2r3vP/iFClnZHnJbFmgLj3n5yfgJF1XYccBJeYUiQbvsU3Nej2jGjqEdAyu5VDdILQnK45QaYENkk8++ZTrqytOTs7xSrC969juDmidURRLkjSj9y3ZLEGbuIulRIaW0fpW1xXeBYz2VDVUVY/w0Y6njCY1Oq5tpjyOFwoxepJUT0Il2rOcc9zcbPijn7/B6IS27TBmCwK8twzDSD96hsHGjiI3ElzMlHkR956di5ki59xkDZu6nkKcLgkRyYJ4h9Ymig/nieXgMsJBpmVXlub4xKOVRiqJSZIJWBGzWhF0ETt6wpRZYiIPemcfrsVEB2K0acpUkSRxYRlzkA9FuClZlpMkZkKfe7z2XxNIPtq+nMJ5T5DERSwa8ITp/gXgncIH/zgZCj5gfexjivj2uMoUQqJU+gukNDfljKSQBBnzVYkxWOL3Hv5uNwnHeGwDhDiFQ8RsnNGaYfyKrPhwLMLDBCwwia4wEeTCY/YqWqPjY0ghIv5eKUZrp7xYQBsNYrIT2titZMeRaDObRPX02CIQc642wjoeMf0+4IKNGQEXsNO5FYndEqkMRhvKckbfDxFVHkI0mAmBUHFC45zFuhGBo+ua+Np5N5Uox6wyxPPMM52H0zmmtcaOA7tt/Zj72txXaCOQImb/jDEUWYFOEk5O1qxXJU3bkOiY/bHWRZhK39E3sRB+6Efadpzyqfbx/ZDnw6OoEUqhZOyVjPbmOKlM0gjsSHTKg1s24ElzTYpisZgzK2f0w0DbdljvGbzF2Ycp3XTQ41GOf+njxOix4So+ByFIEsXzZyfYsceYJSIVlNrx8Ucn3G1qrq537CvPs9XH/PTnv09RjAjR03QDn/w4IIIjS1qc9nRbTZKCbeLr2/sDn+9+nw/KX3k85uJBYE0WU2TMzEUIB4AkeEnX1Pzkx5/T1iNaaQKe7X1LXmqGoWVsLBrJkyczjFaMfYjWQQ+rdRnXC+NAO/bkSYEyAmXj9cj5nu2txZg5aS5AgQsDTR2tWUE4jFR4DPM8ILylay2uD2jtgJgJTZOE/XZEioHXFxdIpVksSjabCusVfScpshRhYOjAOUVTjwS/470njrfXrxhtj1SGwRq878EOtAdHltQ8eXaMGzqcH0m1wKaeTz75LGa62wbre4ZOk5qcm8sDQjjmc40yjqFvEeWMxTxDpoqLN5d8+abkicyQCUiZcnlzz/xkQTc2NG2DMQVfvnxNmc9IZxlYzd39HYPtEOIeLSvGvmJ0JxytzunaQNVuWa+fs6uuadp7JDGLvT5Z0bYDVbPDhQFlDN0AxWxNCC3eDtRbjfcW53r6oWEYA0mqEaTkeexvdTZm1cXB0DQjd7c72sGzOwSGoScbFV4UzMoFwxA4Oj4iy0surt/gvCUW08drrlKKxCiqasC5HWnWRUEfAteXlzR1x5u3n9OGG070MbPZE5aLCkLLYbunyFOGfuDuvma2NFxf1ZyfLiiyGTZYbreOQivGvuPmOnCykdQXHTooZrMcmUnGbU1be05OCorSMNjAuzc1ygjms5RslnF8upg+G6csNRatBYfDLW7s0MqA6KgOMhKNbY0RltSAFJZx7AjOkSYFRa65ub4hdtp67LDi6HxOYLp+9CN5sSBPMobe0/ee7WYPIqOzI04olE6R0iIIWCeQPjo35mWOVy310FL3NYMbOLRbtvWORGccn85xE/H3T7v9eWiBp8A4Casc+C3gv3jIUU10wL8B/OH0K/8d8B8KIf4+EWSx+zPzVsRdqHFweOGwYmQ+W6AEWOsZxhGjA/3Yxp1h7bG+41DHouBh6EFEIlVZpPR9htGaJFUYndL3gf3+gA0Djh4hLKerlLMnC7p+5PZ2QEnDrFxhlSPInkWpOexGRgnL9YLF/IiuPZDQc/BJDF87j+s7eic5bHcYqUiNoBVgkoAUCUYnyBAnF+NoCcJiUo9QA2OMmCFUgtYBIRP6saN3Ho/HTH7tcfTTQlgxXxzhDwNODigTWM4ysvePcT7hvtnQdRVpmpBlCr3X2BAzJd14oJeWzJdoIXHBkhUK7zQ9sTMmTTRD5/Ah7sjpJKVqerSaAtzEXULnLe0YASF6Ip+pskWbAm8l4xCYL1JINX2AwY0xXBgs49CCJAY8TU5X77AILPEYpfrB4iGQKtr0DnXFYB0qAE5y22wZ/chRNkOIaNm8ur0hK5doEXBKIHVcUJWpxiQCF0a6qsH6kcu7LaPtIFhWyQlujHAGJQW50YzWMw4CLzxeOkye4rzBM8ZuEekwpaDvaxAJSmpcsIwi+q210qQmxQiJIyAlGC2Zy5yruy1az6Az9E3PXrTkaUY7DFHweKCuccHh4tosLrTahiSTJHmCVgUVNW3XMbhIitPO4oaWNCno+wZEIE9n1H2H2DkSmSNQmCRHqti34kdB0IK2H3j+jWNGG3DE0mMEzMoZXTNSHWqG3jIrjrBU9N2Bvm2ZrY6obI8nhkurqqaYzSOEIkDT92zf3FEfdugkxbQJbddxd3/HB9/4EKU1g+vp3Y6yXJHpjNRkaJlitJzQ5AaIZErrPMNQR6iNkMjR0isRO9omkIQPAj/6OO15sNj5SFS7u9/xxct3ETduA0rEnqbAtIidhNJjNEJohnFEaQ0yYpW/DkAQApSSk60sbmFZa7F2RGvzaBXS6itkuhACpRVmsrI552OBro4C0BMX8xPHAaNNzGV5F6cvNv77MMlSU55JTqIMotCw1iLVNNVQUWTGjq0xTtmEjyW03sMEZ0AGZPDTFNvHjRgpptLh2KHlpEPxFVjA+0AYB5RWqOnx/VRurL620I95tP4xgxTBER6jTZySTwtRP0EtopD9agIVpxITgEJK3GCn/Nck3LyD6ThI8WCtjPkcH/yjrfMh/yYn4fog6IKYLLgTse9BjMUDCnaMebj4JwqC879oMQxxovUA1IjAmGh19XF8wTCOPNAIpVQoLSOV1bkIUZgohvIBgCIsuOgocHag7+y0bheRCmeigHlAxcTzcBIzgBKS4BR9FyetIURbYpi61owxZGlCklSsVjNm87ihoaSkKLOYRYsnFNZZtpsDIUiGfqSuO7p2oG2jfZYQOFTt45tHyijItTEMYzE93whb8WHaVJheI2cH8kIjiOJ8GOMiyIQjvFOM3k4dkpIo9r82HpqO/ePX/kGwhQkOpJjPC9oGnNKMYaTeV8yO0niN6ixCpXzw3jfZ3F5jUs04BqpmoLkxLFcZCEfTVtQHHzO/nUJ4jw0db6tP+KD8ZR5ol4+CDx5fq4fzg4lg6H2g7vbcf7rn+CQnLSTODrSt56GvS6GZF5JZGSfG3gb8KEjSNK5xhj6CpkbLqjB0raPvwY5xARq8JE1yjBF4MbBr9uCz6VAp/GgIWjEvDP1gsUOPFp5Ey1iqmikElu2dA+W4urtACDg6WnM4jIxYqoNFSUmaBUarMbpgGEaaruN6e0F79Yo8X+KDwYqUu80lSklsV5OojiRJUcKjk+gocNnI519+iVQjQ+exLqBkgRsDt7d7ZmXCvMwY+8DYOZq0JmNGbjLmy4Kb6ztIU4pZhnOCumlobEUzNHRDz6LMePPyJS/ee44s8khTFpq67bG2JjEDWjW8ennFd7/7jO1hx67Zsz494e7uJ3R1oMjmnByn5EVJ31v2+90UezDcXMdSX+cch33DoaoIFAxDTdNWKAXrkwVCJBhj4uReeozJUTqNVrZuw7Z2NB1Y5yPkjZqTo5zt4UA/RIDRbLam7XZIkUDQ09tOM5sVdN2U+ap7yiIDEva7lqau0WlgN77mmVpEGEOAru2Q5JGSKgRtG1Da47rAPEk4mmd0YuTdQfFitmA/NGzf1mBSqnuDyzxCS4yeoyWMnSQxBbNZyv6wY+igNA81JILleo4bJYkuYpbKepQZORx2dE1PkuRkmcS7wDgOSOFIE0miPFrHCAtBkiUGgse6uJYebQoIFvM5Y9/ixx4xQpnOENPnxziO9O1IWazY7GsgQvTyKcvoncBN6/U0k9weNngGbBiQSmKdY3/oOD7SZHlG3T7kO//k259ncvUU+K+n3JUE/mEI4b8XQvwvk/ASwI+AvzX9/P9AxLD/nIhi/3f/3x4geIdRKUE5nGupqwaLJC0FSoI1EhkUl5fvKIoVaVpgXU2QFucVw9BzqGp2e0U5m9MTLRNn6xJbK+5uRvLcogrH4Bua8S29NQhStIR3lxeoZMPiWJKXCUoERtczVprdnUK4hCSVVPuK0+PYSTNUDW+vXrM6eYYRFuEsTb3nbrxjNpuTyDnjMLDd3JLmgrYZSZOE+XzOvFzz8u0d/dB91VniDkiTYERgDAI7OIJPSVTcqb29umJ3s2O+BKE7UCNt0nN6fEpi3iNcSbquJcs8XoyIRCK8QniBQdMOPc4JtMjIj0pmy5JmPyARlGkUCbPVnKrf0fkOZQo297cslhJtIlK56W6wJKSFwDpJ2zqapic51gQ9MjQd1X3D2eIDjs7PeXn5lu3+hjwRlEVC5R2pzpilc8YhINOB7rBDeMk8W6CCQ5UlWqR4L9h1d+jEUyRz+mGkbluEEZRGEUKPtYFEGdoenG9JhESkCWKWkJWKWVbgx8B9u0fSQFB0U47AKM1hX1FkBYlJCN5zd3+LMgpsh1QWZTzLlWY/3FDO5iiTYYNDmYBUKcGNNH3FrrklXazoKoeUCj2Rwg4HS6oESQoqTzl/umRerug6ixQ1xmiaekTojL7r2d/vUVnL2dMlMkkILoWQUhQLvG9odpYQ9ohEIUdQGAIiTihVRWZM/BmgXASqqsEOkOoOLTUDOyyaVOdoqfCDQYYlu52gaje0Q+zmcX3Gfj8ihCLLlxRpzul8zf1eMFrHGDxXl9d01iOMwKMo0mNUlzG2DbthpNcZq5MjCCU/evkFo3cRp20tPzxecPXmgt3uhtWpxuhTjpYnGG2wrqeudxT5EcdHp7H/Q8Ld/S3WdWiZIoiTSWst4zDirI0ADm2QWOzgGAEfBG074rynH0aqusYLG3f5VI4dHaMd4wRrWjCLaeHthMekhtid8xWQ4GEK45yjKIpHoIL3Ea7Qtm20SSbxQzRRCcBjBvPrSO+H3iWEmHpORLRCDQPjdI5KISK63LqIZs+yKFwm21yWxdLdh+JdYwxpmj7miR6sg00T4RVx8hIY3DhlmyLyXMlonZRe4bDYYB9Fo1Y6biiFqPa1UjyEppLsK5CEc+6xY0qbSdDZCMJw1pIkyWO4XynFOI4TTCeKwsF+rSSWuNBMkpyxjx1XHsc4usepHDDpMv14nxHSEO/HJBGD71wkKfowgQ+UQmodC5VDFDQmiWAf5+LzjyJI0nXdL8BDtFI4xsci5gfAxcP/f/X846Sm76Mt1DPiRstqtWIYLF3f0PUN3jqSJP2Fz8OH88s7F6djMt6nC3GS/TCZTU1cUPiJovVgXxn6Adt7Vus5QtoIc8kMZbmgKPLHiaf3gbvbe7744kvGwdO2I009kOcFxkhmc8NylXNytmKxnHFysiYvUoQIbLcbtpua3bZhv2/Y7yrapo+4eBepm0Jo3BghMRN0k4Anz5NJtI70Q8tsnqGVoDpsYjnzKPj+8rei9TJEIBLiq/MkKhf5+PoLISEInB9jBdZECvRecHu7QyrB6/2G+8NbQtegMkOSKk7Ojvnw26ecni75/g//BteX11y8u+b6ZssHH3yITC0391dcXFxT7e95cvp8yhlGEWtdN7kLpp6y4Ke3hYfgI+jl0S8YwRtOWPrQUiwDOpO4MNL3DVpnBHqKckGWzhFKst/eT7ZSzbwsyGcF1jVc32xpmj5a4NeCu4swbd54skLzjY9y9ruOton9hl7sKPIZRT7D20DTjKSJwJg0goDMgEkhy5aslkvSxBN8ixsFsxXc3txzdWV5OV9jXUI7bqmqPdb2FDPNk/NvsJyf0/UNm/0NP/35v2BwHcu1pShmBOAnP/uM5XxJnixINXSfvOXp+RMWR55haDlUNX2XsDjes98WKFlwelJwe32gqu5YzJ5h9ILNjaauMur6mmrfcnb6nO99/1f4/eGfUHe31J3C+o6j45xUeGwQOGGQAZLQ8/rTS7TZUM5SFotZzEKrgrxcsygNf/C7/wdHR9/ES0vw0DQtQ5egFfgwsN1d0neW6jBQzFJmi4Rirnn7puWbH56x3zfc3Lzj4nrPd7//jN3+wNAPzOdL+jZeE4bBMvTxGpPnJ+TZgb65o64a3E5TLFaYLMEOLU1zxzB7wt32hovrS4q84NnzMxAWN6QomQGC6tCxPlox9DVd4+hbhdEKwoonT56R5ZbZ2rOtbnDJS663f8Qf/dFL0uQv8OG3fpOgGrouoA4SNe755Q+e8uEzyfGppdVnfPHZlnsRsKGm3u35g/+x4tmLc+q+g15RcA5yAa4lywqkELx7XbNeH5GkATtadv2B+SKnWORk6Zy2HambA033mq6PlEZnDcGuWB1D1ymcTRA+IJVD6xQly9i/yshhV5OlKUYXFFnJk7McnCBRSxbzAqUEpyfnfPnm93BWErzGo3C0vHtzB2GgLA3Pnj8FUqRM8V7ig6UPB66ud5ycLJjlGRmW49VzjNzR1Xs21xsON38cWfqLN/H1JvH/v27zoyT85d/6kLr17OuOLPeoMBAoSFPNfBGQYcbmevfYXTJaQV7kCKFIspTZvGS5XHJ985biOEdrgd21ZGPKarFkoKce9+y6W9zY8OL8KXlS4pykHgbGXkQMd+o4OVuw35UIIdjutxzqHWjPR2c/RGRjnDSFwJvXF6wXS+ZGoqWmdSlfXH2BHQxGJ6RGopOapraIoFHSkxhPos65q0f6scL6Po7qM4FSC/opnF+kGXmmWCzXrJYrZnnOzz95Sb4YGaxnHF0sZqRiuVjHqV6QbDYbNrc7+lFE3zuKLuwZQ8tcrynTOapQ3N3dsV7muF4wtIJUa/JFoO5GOmsZnSM0niAcWT5HJ+m0C1zjnaHtYk+KkilyIQm+wQhJqWYkZk6nHU3Xgvcsixw7SprQxZ3joCGUoA9kSqG8xveKLhwgSWiqnrppacaORb7i/OiUqtlzs72KCxgS8jxBi0CwA52zSC1RXpFIzXyRUiwT6hb6rmfoGrquZrbMIyIVhdEZiU6p9hVZkpGYhL7v6HxAqYFEx6bz/a5FZZo0LUlNSapmJKnj+u5msglJuqHB6TgBNEaRpwlSJlS1YzWbkckU0cNi5UDPqfuGrjsg7EBdW5o+kOYzFssF1nWM/Y4iSynykjxb4D1sdzdx6pEkWB+wY0fbelwQBBFw/Q0ns1O0WoBSjKrhbrdhbgrW81PytGR7uGK1PGd3v0EBRZ6yrys630bbpQzgB45Wp1RVzEnlZY5Rirs3d4hCMQhLO7SE0aH8grwsCMKyP1yzKFbUTYcKklxnzJZLXl+8wePjdEkrxm7kZLFiURTM5wWLozl935OZjERnFPmMFy+exfLW+4bdpme/G1GJpmkq8jyPYAgl2G7vOT05ww6Ww/bA5n6HUjkOx93mlsvrS/6Vv/ib/IO//Q/45f/p1/nOP/oBIcTOuaHvHi1uaZJO1iWB1oYkSenbuKiWE+zgYTH9QAN8sOU9AAy+3q/0INCUjgh3N9opmyKmTFJ4zFg95IqKosAYEzNeduShSPiBOvdQdrtYLL72+FHQPeSJ4CugaxAnAAAgAElEQVRIw8PvPDyO93HSFrNgkTz5IGwedtjbNqLZlYpZGGu/et4PwHrnYjnrg6gwxuD8SNu2j+IUonDUky3zISumJ4vmQ+eU+9rzfhCWSpnJYviQuWon22X8ehzj9O7rpMUkSR6PJUS7YaJNFDbePU4RHl6fr0NC4kTPx4mbfwCXiMdjO44jSZI9Pu8Y+Pljn5nqq+zXw3MZhgFtYr5JKY1QkrZtSdP06z6xCYduURNK3znHbDabMoiRoNjVDW3bMvSBYXCMtmexKDBGkaUJs1nO0fGao6P5RPPradue3fbA5v5A21rs4AlB0A8dWkvyPKEoHwrdS5IkWprdaKnrGDyv65am6ejanvv7WBqrlCTLE05Ojnj/gzPKMpuEqXo819q2pzrUbDZ72makqVv6CfE/uggY0TJDK4NOJEWeTACTWXzvdQMf6N9kPl8wWMuuqhlHhZuybiAfxUwIxNoDPLZrOGzvEMRjmRcZz54d84X93zg6XbFaLSkTzf02IpuVClFE5hlXb27Ik4xZmZNmmpevPseOLQGP9wLvDPks4Xd//CkXNy2HDrJZwl88+bdRMpkmVmFyLYb4WemjyGKyx3oX6PyW1/53+O4PzqmqGiFHsjxmANN8IPgUZzPsEK3F//jvfsbzH8347n9zShAj29uBNJtNHXY77GhZLZ4wm6VoA4PtCapGqXm8fonAcj7j8vqS0+MPOD46YbnMGcaW1y+v0YnFOsvb1zVn50c03TuMSimyNeU84+qyYjk/x0hJ3+0YQ0PnB4pyRl4UMSdYDRz2EQaijCRfOT5/+YrF+oQiM4ih4uWrV3zvW79ManISk3O8PmZ/37I6kjR1z/1dwyAGvNhixzlG5xRzQ99brm9fE6twNKO1/NqvfYfriy2r1YLnL54gguXo9IT77SVVFQvihyHnh997wtX1Ldc3G7I0Y6xjZlTnKWk5Y3NT8/7zI+4PFfuqw3aBb3/4MWX6TarujvvDK253ryjSD3j6ImHoG+6uNvRt4OTkDG0EPnT0Y8XxyRo7GA6HewSWjz76Hl094F0UaFVdszqeYdKR+9saguLJszWjr7h85QBLkjvS3HN9aVkvn+B8x27/lro+8M0PPyYEh/MeJRVtvyO4lPOzJ8zLOZ9//or1KqNtOowqKfJjRrunmDdk5hTnBPe713g346/+1vd4/epL/vDHn9IeZrz/wfscnyc4H6Fx752/x1/65d9AiU+pD59xeVHj5r/Nm3cveXP5KfebtxTJKYrAtqoItqPQgdnZCxKToo1DG0mWFmz2r7i/6ycHlma2mGNEQiBgjEJryWeffcHx0SmeFoSnzI958eIFu8Mlzb7H95L1ScHb6zegQnR84bi5qCkX58zLkiLXaDPyyc++pFw8TMUlmTnGq3dUdQsY0mxJU3lSnaF0wzBW3F43nJ18h8Wx5FDtubvfUZYps9kSIzUKiwgNRiV0Qxs7AfUMGRL+/X/v7/5eCOE3/iRd8y+Vufr/6qaU4OhkBvct+7bGmDllKkFpQrDUTY3t7+LILni0UTx9bw1hRdNt/x/m3mtHti290vumXS5MZmTmdsdUnTpVRbJItsAWiZZEqRtE60ZQP4eeQA+gJ9Az9IVeQBeCIAMBAkh2q1sgu0xXscypY7dNFxkRy8w1nS5mRO5DXlC3tYEE9s6dGWZFxFrzn2OMb5ByxjnPu3f3RCzD4IrX9X7iqrng9v6aOSXmPJNkwFpNTDVC1TRN5P5hh6krjDzHWonSnrNNy+7+ls26YnP+DDd5pMo8bB9IecKazJNnT8hTRb+/RauJet2V/oUWxtGxPUSMFRg7Y7RBmRXCtrh4IIkB1IhUGUQNGdx4QCJpKk1Oiv0wI8zI7CPXx53dlfUoA0onmAVPrp7THyQpVVirWKwy2xtPbYqBIiWBElfMcaQfBsbxljOx4uLinMO4x0dB1rpgSULE1jW4mTj16Kol5PmYqShZhPvbiWYhUFIhhWG1ajngMdZitcQaw6JpOFy/ZakrlqsF64s1P/78N9QGyBEfRqYwsll2iBSIcSY2mdW65e2Xt0xTAAnLlWXd1bh5IKVIUzfMYaI2mpgjIWREkMdsSskdzTnR1C3X1wciGq0kdbXAzzUP9zuapqGqGqQw3N5tUUj2fkIKj9GCnd/S1EXVlNJSWcF6uSYniYgGgmR7c0DWDWRFcJHDQyBrj60ltWloOWN352m6HW4eiGKmkpL9JNi7PVKLUhgqK9I0Y5qyQCEFpmlHZCJHwTQAh4m2NpxfrKl0wZC+vL3l2ZMN4WZPdJEUM/e7GSsH2lYVZWeeWCw7hm2PEltiLCWk2+09KXpCCsxhQKqa6XhhMVJxtq6QZITWbIeRm8MDtoHmfEnvSjYsRo1QhuXCEt3RoiWbkmOrV1hlsdqgsubjqw+xtoT/+2lAtoZpdOwPB/px4Pb+gfXZkkl5rHaM08ShP7DZnPHy5RumccaalqZdsz30+H5immuapsOahi9/+0VRKoSgrS1unGga+PSTC/7gDz8g5oQUGe9H3LxDaUOMHmPNEQag0dowTUXtTjEx9sNxsZiZ/bF+4ZhRyvlEiCv5xRAyxtijVY/HDEpKiRQTwcej+FV6kWKOkPNx4RwQQrJYtFRVRQqRpqpQ7ZESFyNKF2tkipFhGFCI48Ag0Mrg5/B4Hn1EPvNeRTk9x5PV7mRPzJnHgbEMfTwqLjFF/DQ//t9pKNRSP2aiTtkq7z0pF6LeydYkpcSa+riLf7zQxYQ2En30XkqlCL6gjcnF/lebhnwkChYwQUJrQ13XeF8W501TCo9PNsjTY/62YpSOBEYoC2/5LTsnopjL0tE2iCgAmBRTqdcIgRCKfVEpzTBMWMvj8JYLKvI4TJXhVxwR7SVjUQiF5diL4+cuHM9TEj9Pj9ZTKSRNUz8qjEWlKPm7E5DDGMXl5WUhz84j49jT9325H++ZnWO/63n9+pa6LvYWJWVBvSuDFBVNXRFNpO8HjFFAYDz2Bb4LGqPlUdXMiFw672xlWa07nj2/wNpSCZFixPuZaRrZ7wbevL5mv+9xrlg+tS63sVh2dF2DsYrzzRpjropFXiq22x1aG4ZhYBgG+sPAw/b+2GV5ixDFNntxscf7mYhkTqIoV1kcZ9JTR1mZXRDlfRfi/Jh7UkphbUXbLrionhFF5GHnmHTAe442RYEPgS++es1f/t//HjfOLBYtT55elnyLEPT9gFCSy8sFMTu6hcbsBEwFT//T67/i0/N/wtKel4wfhWiZYyk6zrl05Z16uVLO7Heezz57SYwzImeMNmWgrtcMY7GI1w3cXu9IMdL3E9fv7kFE+j7z8cWGdtExe8PN6wFrNciJftrz5u0dT1684LC7x+qKrq65u7mn3w+cLQLeQb+Hz377JTENnJ2vaOsVL56vEGpkqZeQLCIbZudJ2XGxuaRtKvY7z/XdRKUXGF0yOm9u71ifrRBS8PTyGecXG754+bdcblYkImM/44eR1eqcfe8Z0DS1ZbUK7Kdb7r7aMwwzbsqcXSwhnKOVRMjM7Ga291tCmJBGozQYq/j8t+9wbkBIib3ecXv7hs3dK/aHB+Y5IGVN19b8x1+8Zg4HhuGBl68+Z9V8yOaywsfI7e2W9fI7nF8suNs/4N0eoyxCeNw4Mwx7pqlH5DO6lWa730LMNO2aqjqw220xVSYzM4w9+8OWp5ff5Wy1RhvJYegReKYp03Ydzz54xsPujhAi2+3INM6lV7SbyUcwi58zN9d7/vBHf4ogMUw7BBfgO/YPAW0FVWPolhX7d2/ICfb7B8Zh4PrmLZpPqBqFUA4XbugWFYvlBcvlGj/P3O8zD3dbrt9NaF1zdXXGT76640d/pLl5d8cwHco1KTekP1tR1z+i1gvq+Sf8H//2f2LV/AFXTz7g/Pk5v/rs1xjfUC0UMVoGF5hvt7SLUPolqxaja169eoMSGy4uN5xtOnZ7z7JZ87C7IcQZazvOVx+iVELLiJQRrWd+/rOfoitJW7eYSvPZZ1+wuDB88c1XeB85W26Y4ozb32KtpmsqYpAsVy1CzsRgULKmXTRsdxXz0UEwuZEUB9rmvKjKySKYEHrEx0AWI9rO7A47dvs7GnPGsq1ZrzTelVqYYQho5fj00xf/6FzzuzFcSUOtO1YLiMKjJbS2QtkKHzz7rae2kcvnV2y3B6bRoXXp5qgbzTxHpmkkJIepGsLgIJbFclYBFwaUFTSqEOHqRhdLIRMiRw5jz8VKoEWHkooUPSmWHFIMghw1q7qm62qyWuK9QiSPRLJ3W0QIpKQRY2C9UHjviPOMnyJK1oiq9MYgJJNLTJNjmPaYmlKEZ5ZEnxj8Aa0USmr63uNCwZWHpBEZUq5I2WKMBhGY5h1KUk7mueSzDruB880lQ7/Hza64JKLGyJqgAzl5cgjMQbPrJ0Lm8cJZNwtyFJCKEgcFKVzWLAFTZ1ARbSyNqEpHjs0sskRWGiEhiIRqNVVjOWuWtHXLcNgROaBkR/aK7AVKKEISx1LnhKktbobgir3BGIU1VQmmy4IkN1qhlEWqTIrFi559wpiCklcyIbMiS800QUwzWhWKodENKEc6YuWDKuADJSTzHAnBkSMYY8lJEmLpfElIEEVijzExjFsOcSBNsuTDUqKuNSEZrBQYYZApM48TTZ0LdlkIRG3xk2FwI7Y2KNUghCFRFjLaQBIjQkjmSSEyqAqskYzjxGqxxs0wTjNuciirkVoi5gg5oWWDT8fdXSLezQxzoJIG0pFUOXmMTtTGooxGyEjMgikkalOzaldsNmt6d2DOqQAakIgkCTEUMEsErQ2JSI4TjV3S2oYxG3bTa5ruApElMXii1CihCS4Q0lGxTMVKFZmJIRCCAjGyXLZIGSGMHMY9TdsWC2IMCDXhfIPUAuccc/QkIjILbu+vMUpytlzRdRXRTaxXTcH8+5H73VyUieTx88A8Z8gagcEYXeASSVBV4jEgXwYERc5H1UOW3cJ8zASVbMupx+i0yBePu/cnVemR5ickqAJceCziPS4QtVZUVYU1hnjMN52ogdM0ITjdVhmKYozF3nYsKjbGkMN7JeWkor0v9n1frls6e08FveUxpCMgJB0Ho2KzE8ijunMaKKSQGFOVIfGoqp26oRTfsunB432fKG7lMZTcaMqnPFXJJBnM40BojCEnQU6xZE/USbUrw9nptrTWRzpselxIf7sfLMaIVPIRqHEqdJa5QCtSSgTKZ/c0gArE4+NIKSNEeq/MxffqZAwRcTwmWp+ogkXlO72mJ/tnwbYXm65S5Xwy+0K000qhjESUNfjja3X6cs4dBy6DOA73PpT3cnldCpgi5EQKgRAibpoRsnxfSEFd1cxHLH+KiXn2WKOPSl15DjF70gx1Va4rWhWQiZs8OfZM03xU38qwIgUoWbFcGbQZyTnRtJa6LoTKcXBIAdMUSMPEcJgw1pTrnLV4H6jrpvT5bDpyoihloy9VHi7gfcRYXQqac6aUJp8sduL0IT3xYY7fORUIHzNvUhQ4SS50w5QK9IOUyD5DKu1UITpefvMSoRTtskNbg/OBnBRRKlRVCmKHyaNtySKW/rWSXzuEG2KaeZzdRanMEFKgkIQjKKUUblMGxKSI81FtmyPT5BHzyJPzNbhMmBzTNFPJoti6wbO/c9RdjTGevt/hZlNInMmyPuuY3AMxCJarc9pFw8Nhh1QVTdtA0FxoScqOw+EB7yAER4glnxO8wOglMSukaEvfX4K+7+mHe/rpLcaeo02FVBmhSgY1eIebR2JelA6pvi/nuayOkYSIiBktG7SRVJUhR01Mme1uiwtj+TwoSbMsxbyTn+nac7RVxDRwspTGGMhCUdWLQieVDVAxOs+b69e42DJOJfpgrUCpiZvtHqUiSma0rtkPB5bJMoeZcRjprAc0lapZ1B3GaG5v36KzZgw7fIhIaUk4hsmhENSqRDa894Q0ozTUdXu0c09IYfBz5t31A9/75GOCm4u9Nc5MbmZ0+9KtqMq5d54SUie0Kj2JUpqSmVQgctlwOTtb8/buLXUryLJC6sw0CqLvkZQNbUTPMN2CkVTKIJXETRWLpaA/7BnG4XhOEfSHCe8dbk5srjrq1nK/gxgFymi2+1t+88XP+OjFB3Ttc84uEh9/AIf7aw4PiT44+v4eG2aqCpRRVF0FM/gwkqIkJ8G86ItjQpfNHqXKNcY5dyTkevb7nmFMnNXmSMbVx493xiRFnhxz6sElGtXRqiU+RRq1JMobtBFM455bH7C2ZX224P7+hmEszgbbRMYpgTBIFUlMCOkJcUJkhRCa8/MNWsdidT1uskUVi4uMiWEqxd8qSdqFKe/H7Nnur//RueZ3YriSKITXtLbC1h2z21GrDtMYnFM4OdM0go8+fo4x97x7e49zipRmFqsOmOn7PciJmDJhnNGS0s+hAwnPcmWpasX9faHwIR2RQPKR/W7k8qJGMJKCZhwjYd6xaCumPuHHwNnVmq5rsI0g+Bo/Ou52PdvDKxqxJoqKadtzeSm4uQ5YEllLusogiEihCMEzjRP7/YjPI+u2w9bFnpaVYB7LSTrnhA8Tzg/4JJBYpFBEDPNcaH5SzMRwhxsnhCiLummcubu559NPXxTO/zzhUyb5gDWZtmrJhLL76HoOwwhGYlUm+4i2F0w7R5gToEhkuqbkeqSKVM1Ms0worbCVoukEIcwsBSQr8Rl8SiSTWG2WLKsV+MTbly/RdkaxJEWDChKlNdPocdOMMhLV2EKAjAJdCYSW4DVzLD5ykTLCJ7q2ZggOKTU5gice38Wa2soCmFCSkMoC33uPE4HOVixWHUN/YJpHBJ6LzZoYQgFI5EhIgbY5O1LjCqM6poQPCWkVQTgO8ZZRJ/wkMDJSGcFyvWQ4WIwEmSGlAThArIizI4uMV6L4fkW5sOdU7FnkUtosVCCmCWtbhgNkIdGVZdkteDfcMwww+4lx6pECxnkk5QLi0Eaj15eEnAulMUdy8NzvHvju848xwhI9eJeojKWpK6oKUIHtfiAIRXu24GJzyfn5Fa+/+jVBBqzWdKqhkhXDtCUMvuwWWlvyL9PI2dmGtl0zxpHd7iVmVWxwbvZIY3BzZhgPlMJTS8gzTaMheXKWKNUwjBNNC5pETonBDQxuBqVBBcbZ4XcHpJL4NJfhOA7kmDi4HcumQ+oCtcgx0NQWFwdevfyafjKkmBBESBPOjdTVgpxBitL/FUPEHgl/RfEpJKOcij0ukx93noHjYHFavJcy0zKkiEf73mmhXhb+EokgEh8XxSdlyRiD0WXIF+pbg8JRWTphw/3skeoIoZBlsM85oo3Cx/dAhZMd8DQUnDJQSqmjClYocDG8X8iXgS8jZFnAKq3fI+CBEEu57XuF7kRPTJx6s76dJRNCHPulTgWqxZ2gVFlwnWiAJxvd6THro33Oe442RvH3uqqgHJOqqlDHYx1DKENZzo/dUSmlR7WrDMcnsIB4tPSd7vOUm5JSEmb/noR4vMBXVfWoJJ0GN8X7QRLKYDSO41FtKrbGnAspMKfjwJUlMSZIx+E1l5a4GAI+xQItOcJIrLV/LzM39f3xmOcjLETCcSGmZESo43sv+ccMYE4Z5yZSFOV1OFpRrbWAKuAPlTGmvE5dV9E2DdZYpqnYAfeHgfm2KIYISdd2NE1D29acnS+xK0PXVVSVZbPZkDLsHya22y273Y79OLPth8eBuKqL5TZFwdl5x3K1ZLlaIOUpa+gZR8fuYWDdr4khMfqEn0vJ+Qne8u1B6n2mqZw7OA3JorgrvA9McSIXLg0+QnIBMUdSEMx+ZLd94JNPPma9XgPluFmjESS6riOlAsU6M5acDSIXeiYpIAnMacRHh5V16UaLoWymyjIgxuBBJnxyON9jpOVyc4USI9N+ZDdPyNmhZo2aA7GHw2Hk4mmDQODGyOE+YlRNt8ls7+5wE6RYszk/p20rxkkhaHj25BLVjRgLVaVomhpCxaY94+7uht0hUmlF2xoOfeSwHxkHz/m5RkiNlPrICYnFRTBseXf7GSm/YNU8QVtwMZRrGAldWRICFz2H2zdcbzPrVcs8SKzJGKkRVY2UgvOzJX6WOBfZHXpiCmhbbN7WaPa7O3wcQVyipCXmA1VV4ZNkmD05JWzdoMxApdco3TL7wK7fgsp4Xyz/MUZMtef24Q6rFKu2YXP2jG9ev2WcIjF6oh/od/dM4zlWNyxbUFpw8+5t2ZQ/Uj2tVkx+zzj5EhWQgb535X58oJKadbdinhNuckQ/E6Lnm1fv+PSTH1LXhnmeuL5+xzh5hnF3tIIaTKWJPiGkK+d+UbFeL7i7uWGxNEzjwNgPPHt2zpevj1C2XOFcYBo0MQwFJCQq6i4xzm/IY4dQK2xdc3jwdEvo+wKcmtxEVS/oDz37/YH9wfP84yUogalqUAKlIofdA3/7k79k7P+Ejz/8IeebP+A//6dP+A9/87/zmy++5M27O+aojygyTaM72m6BqjX7/nAE0ATGaYetWmSW+Hmm7zMxSu539zx79oQQPbe3D+wPI8v1OZ2tUEowDI62XdAkSb+7Zr+7xshLlKt5snhBFoLaWry7ZdV1bB8OPEx71usnvDirSTkzuoFp9oh9ZppabK2OuTmHSBXz7JFkKtOwOd+wH+7oDwOgsLZm0VYkPyNVYo6Oce+ptEbXK5SWpDTz6u0X/+hc8zsxXMU8c799yeQTnoi1I+36sni0fQAB19d7dsOPWSzOWK4uGPcdT190WNtimEluT7NwvHr7FdNuRImEVTPRSnSV0JWirjoMDV/+5rd874cWvVDkHEhTJnlJFFtc8OwfMsu2Zro1R7JJQ9tZDv2eu/tbrJF0TVXCjX5mnGYIoOXEYnPG0yfnXF4aUtaYSvKbz9+wfTeBShjr2Y23nG0ayDX3N56bt1/w3e9+wDDNTNMDMTnaZk2nlyyWK1IO7HcHiJLtwTHOBlIq3VeDBpkY+wemcWSxXPLm9iV7NzClxJwEIvXM2x3nl0/JsuHLb95iKk3dCupmSWWXhNkzjlvcnBnHoo48f35F3Qk2F+cYq3jYv+PDj9Z89cWWpoOzjcJNmv3DnugkxtSs6pZwuOfpkw/Z38/cXj8wusTF00u2t4FaWtZtzdtv7on1nuWmReoKt52LImUVWQb8NLJ/4/jouz8g+h43Dng3sDl/SvRFYUpNYraSdml4/c2e9WJN29Qcpj2DG1h2DdEXa8Ju+45Pls+wdgE5MjvJzU1kGvYImTBHj3AeJ5Qou9dzKJ51LWrub/fMcUTVhpQ9tsuIKElZEbLg9uE1y2WH0AZVSTbPDFlV0HuC93hXIAXrsxajy+s3jjtqXWFFIs4QnOHswtLUhv1hZooSPUm6bgNAijO1kXz08af8zc//BqNXPL284qNnTxBZ87c//jVT77A60VrDojmntobsHVJmPvnkEq1rmsoQvOdhP/GwPbBatUjr2Ycb5l3AC00WE9ZKlqahywvW0jDtQ6FZppqLsysOtwceDgMzcHZ2BtMZ+4OnqWq6rsPPgWHqGWcHEqwsCk/MmZwLQWy9aTFmyfZ+Tz9mQJESfJW/LOcGn5mngPdbqk4zjLtiCQFE0gjV0LvI16+2fOUfGHdv6cM57crSLSvWm3IyrOrEcp1pguT+7gZbbRgnyqDmMyEkrDWPu/TGFMhAUS8jIXmk0MfF+FF9S7EM+Tnj3MzDQ+nrOilH316QF2XjOIQIqKqqUKOUQlIUuWmaMLqoBzEVMlqMBaHspgltDUIocoqlGuBofYvJP+b/SnZqfFQSUkqPcIa2qYtKeiQFzvP8qJBYa8vCIBzLgCkLXqsNVls4DgJunmma5rhIF49WuBMRcZ7f2wlTyo/5rxPdT2uDMfZo4RKPxycdn+/sxkei3jRNjzmekzJ1gkdkCmVRHRHw/zDv9g/hISda3ylvdbod4PEYyONQeLIECiEKIp9C29Pa0tQaScnJpZwJx2MGZRgbx5G+L7CTZWdpmuXj8S+Fzy0xBtw0cjgcjr/3/nGfus+Ao+opmIaeuq4JvmSu3OQZxgPW2iMdsVgQQ5zIOWK0pa6XxOhJeT5aGCmbG2FEW/kIgwmuDPwiKaZhJufAZrPme5++oG0rfHB88/VXfP/7f8x2e8c4jozDxK9//ZqhD0hhUcpgzDWbzZqLq5bVesGTpxuuri4436zZ3u9wzuGc5+b6nvv7HX/3i6/ZPRxK2a1SLBYdV082XF1tuNw85dnqCVIqbu4PTG+3R3dGGRoLtJ6jiirIucA/ks8oZYrinMqgOk0TTk5McSTERA6JrBMiCZQon7Xvfu+POTs7Q6qisEqR2e/3xGjwsexad2cbnn/whLe3I7Anp0RlDOPY83e3/45n3ff54dmfQFZI2ZQspoq0HfggGfvE1ze/4tXuF2wul/zX//LPgDvCFJgHjRuvOewF/ZCYxoGHfeTt7UumyUMvuLmOxOCPhEtFqxVCV1gT+dXffUPVFELn3c3EfPsKUwmCD7x+fctht+X8bEnOEqMjon5AYLm6ajgMfcka+xua6gm2akgxMPtAZTcslxP9IdOYmYszxWq95JefveTi8gNWZxdEOSC0xyzeorIjp8jrNzvOF0+5PL9Ei8Rhf0eMLR88f8rt3R0pDyyWa27eXrOsVkipmceJu5sHrp4t2e5e4aOnanvOVuf0LiHQpCxxfo8WgiD27A8FdNM0F8UZoWyBqCjYHu7oh5HtMNFXhs16wXKxwc8GkaBRmTTv6Q/3OPdQ+j9Txx/98X9CTBLnBT4Fkt7y9uYdDw/7AoJaLRnmLcvmWaHwisRu/4CQEbftePHikqoJ/PI3P+GnP/4Vf/D73yOnwO27G5rFhrarGcY90zSiZM1qpbm+vUVKQdeuWK+e89tf/pblokGqUlP0cHjg+59+xJdff8793Zarq1Jmv1xeklLGTYnanvHmpseF4ajcRrIU7A8NKUSU9FgtefJM8/Kbr1B6yWLxlGn+nO1ZocAAACAASURBVK8+l7z44KJku28f+PDZJ/z85z/l//k3/zPn51f8N//qv+L3fvD7/Pmf/3f88Ee/4Fdf/Rv+3U9+gTYLrFEEZzhsM9/56Izd1mGUQcvM/f19cX4JzzAIUtJUtuXN/Tdo1bJeL/j4449ommv6fWR9JjFV5v7VPf1OUSG5uX7Jy29+zW6b+Cf/9F/w4qNnLJaGJEb6yXGmO4yJwMRilfnm61vOrtbYTnD/cM0w9WAiWRu0tdRmDeGM+9sbVsua5aKwFd68vma1aYgp4Oees/U5u+0tQgXqpqFqNkyu5/b+lraRNLUsUY5/5M/vBNDi7KLL/+VffMIU9oxxQMsWERR2kakagTGS2/vXWPmC4MtuZZnyV8SgHhc8zmum+Q3O9QQ/EaND5oqLq4xWooQiESzPiod1tah5etXRDyM3u1vGcGAcA3Nv+NM//ZBXXwVWy0uWiw39zqPal/T7gJItTbPA+4n18iNubnb0/Q4lD8jYI8UKKZco01It4MuvP0fpukjBOTANivt+j/cOMlhdk7JH66bI37lYJ4zhmM9pqKoVi/YJn33+16xXHY1pGA4RIS0JSUgzOc+0neVmd4BssVVD1VYcbq9Z5g4vMv3s2O52PPmwJkXFYT/h58R3PvyIadpia0mIgnEQLFaaszOFrUqc3U1wt/XYypGYSESa9pw61+zGiSwFdW3Y7r7mfPMB0XWESYGbyXHi1eAQzKys4vnyE2b/wPXuhn52JCHBjmyHmtoqrEqEaWK5WJHRHB4O9A8PtI3k7GqJF7n0Ew0OmUDpFVAsRdFPDOF4CY4RlQXr9VOU8Xg/4V0iOAm5QukIYgZKz9TTzTN8HDj0e8Zx4uLinFYvyUkVa0MnmdzMzWFECYmVEiMzr67fcHn5nBQ8fX+HrGaGkGikoW0q2mXD/cNAW9eQUlHMfGTRnWEsZKEIURKDY7fdsTnb0HUFnS6k4e31DVJlbKPR1nJ9+5oPP/guWkmc6+nallcv93RNhZKCsR+JKnHeGNbNOZVq6Q89l1dPOey39Ic9KcGf//O/4Jc/+wnIRCCy7ffc7yecn0vwva5Ydx0PNw/sR4e0ksWq5nxxzs2bkaqTNJ2gspHbNxNZtizqJcu2o2kq7u7fATCMI/vDnifPnxSISNWgtcL7kdvbe548e07OZaHrg2OeJ5bLJaYcnEJ33D+w7w/4UMAHKSqW7bIgqUvAkHlIhDSiDdStIUTPX//rz/hnf/VHfPJ/PeObr1+xXFzQTxOI0qOyaDfUdXPsFSodY9Y0SKmR8r3NK/j3KOj34Ab3iO2WUpbd8lCUo2IDmwslUahCJDzSv07Zp8pWLLvuCE6wj8OI1hrn3KMa5r1/HAJSYX8jdemm8vOMoJSrzs6x2+1YrVZUVVUQ9UclB5GRsiziV6vVozqVU6Y/HBjHESEyMZavYjF7D+s4ZbQyPNIJT2TC94h68Qh0eESVP+auTjh7iCkcAR7vwRQn0MPJDnjK8jRN86hgndSmEMPJtEpKqTzXbyl3wzAUWElVFTtaCI/2x1O+6fQ7p8eghMQ5jzmRDo+D3en5hFCoh0aUqooMhG/l2KD0jKUUESmzXC7/HvlQKknbtDR1jZTyCAGJHPqJlMqxi5Tn69xEDBEl3mfpToopGVIuCt44Tgz9QFNXKF2UtjLkzYzDhHOhOOhEyWI1XUXXWapKY62m0pr12QXT4Bj6kWEY8D5yd7vFzYEUM1pZPvjonMWy5cmTK54+u0QpjvlkxTiOvHlzze3NPdfv7hn6Ced8obmauoAzFjWrVcPl1YYnT89L1vioCDsXmUbH9r6Uc+52PS/Cf1YcGUITKTbIEOZH5TTlk3KlgUDwI/32AZFLdUjV1KzWK7q65rPpLwkyIoRGYZjkjq6tEVkUmq6xCC3JR0x8ZSyVVgzDgG0qkDCOPV1V86tf/oavvnnL/banbhfkMCOULjv4suM/ff4vgQIeiDETYqRpK4xO/Pzl/8uv3vyYtu34b//Vf8H29huSS6zsBb//gxf89rPPycqzOl/z8Xd+wM3tLf/6v/9f+N7fveCf/a9/wHb3wE//w8+AkYvNOc+ffcDL179lsXiCtS2JxGHYFjrb3NJ1K+q2Ynt4jXNv0Kqjqdd07Yp5dihZMc2OyY24uUdIweXlM4bec9gNXFyseNhPPH9+CTFy9+6Gs8uKw9Dw9IMLpIG/+8VnLLo1yJFxHJlGT9c2fPD8AlJkmhyHw4QbKy6fLxBKH8uVE11dI2JiGg/0hwfGvufj773giy+u6XtPt+gQcmazfsboJwa/J6otXfoe85wQ2iONp+8DyoLPD4QwE0PF5mKNcx55VKuDm1m0z3j+Ys3sBnYPDzx/+owYBFL2hT6ZV2gDm83zQsWbtmy3PXf7X2PUFbU9o60XGDPz9tXI5sLSNi05LNBVz5tvbniy+YDzzYb7wyvefnPg0x98iDaJYdihrGY/XgMBpSyVXtIPX+GDJ0ZDZVd898PvM4wvGfqMlh3WLvn69ed89OJjpJbMwbE7bIlJ8+kPnjO7kf7QY/WKt+8+R4gISSFzzebiinmaqRuB1mWzodKXLNf2MWd6dr7k9ZeBzWUpf9/vAueXHYO/5ps3r3n39obD3YF/8c//gj/5s+/y4sULum7N7e6af/vv/yN3d7cc7nt8n/nD3/szJvfAw+GOEGdWyxbBkn68oe00i2VL9BXb7Zb9w0zXLfnww2c4v+fm3YFp3uLmHf2hZ+glz558h2WnUWLk1cvXrM4/4M3bt4Dn6nLBZrMG0ZIR2EpxflHzi5/doOuMrgLImWHowU4EnzDqjFX7MTGM3N9sudhsMEby+ee/xLQBZXyB+qTMMO1J4xIpIufnF3z48Q/p955xfyClPS4MbB88/+P/8H/+bgMtcko4tyVngRVrJA1nm0wUHiEiWmaeXlyh5Ib9YcvkBrom09YLbq6LZaJuywVxHmeSMmQliE6ipSTPHmEV2phjQW6PUZo0J/o7Tw6C3bXDrtcsu5ZsNIf9SEqGceqpGsGL3zvnzW9XbM41qtKgBLXLbFZryIa6USQCIhoOh8g474nhgIzQrDrGcT7akwSoeFxkiTL0HXMUiOLFFVmRk4YUcUNAd7BoWyBjTA2pJoYGQWK3v0EqDRqyTLhtRBlDmmHqB4ah56ztqFPL5AZCjjS1wMqa++2BHBKN1fh5REmNICNFQJuAqiqyqOl7R/COxMxq3TCOCls3NAtJVa+5XKwQ13dsd3v2/YGL8xUhRpx35GColaKtzgnmwOhnfEi8629YNopsIMaM8x4xKmoLMU24nOkWlmHsqe2qXFyThFAxPWS8CiQR0BKENoS5x6oKrQ1eVCQ/EIOgMoqurVh0ayZ3S/Qe7x0hSFaLJYuFJuaJeXZMQ8SHjJCSRbuiq9fEOOP1yDB54pCwg6ayDZUs6qZVhrubO1adwWpHOF6gnXf4lJFJIH1GRkfWimEOSBFRSlBXZ9jOMvYOP7uCUO8EP/rR9+gPE9M4kVKkrjsuzi6IKTAHx/6w53xxjowzk3MFTOInRHZcbi65PH8GVPzV3/xvPLu44uy8ZVld0LddWSwhWS/OWC5WbFbnSFFzv98xRUck8aTrePd2Js6ZQCY0qQwqsqgX825k6wSrzYoYR+IUSKlivVyAqFFS4MPEcP/AODu0rcmmQreJKXi6ZYsWunTF+cx6vUYKcSxzTdR1sR9Nbo/ze6xREFYsF8sCn3Cl28LqmpwTCQ8EnB+QtUJkiY8J18+sVi1CCO7uH2jelmLDcPxZrSyirtAWEp7R9Y8dUXVTEbxDC4vI721uBV8uj4vjvsAXhHjsGMokqkoX1SYHtD51XB19hfI9SALKYnye5/dYdt5b1k6WvtNgk3MuQ6+1GGvQxjx2F0GxSBndYa19BFmoI4GuqipSLvkqYwoIY3b+OADxSMb7NqxC8P75wnvinnOOcBwovz1EnYawv5/rEscMk3gcQkvfl3y8v/dKV8Jq83hdKIXm5bzuj3h1KHAJN89lwDpmsf7h7Z8UxNOQJoQgHjuvTo/xpLblVDJlJVckHo+dEOLxuJwW9UIIXN+X0mJx7OZKkGUu3U66LLQlIHVRiPNROUop8zAfGIapKLihDJVuft/ZNXt/tCcWa6jP762XMZ4okuV9470npWLTzke7Z9sq6rpifVYKeadpYpocs/PMc6DvB/a7kYdUhpumkvSHkqsyRnP1dM3FxVnpLBomxtExu0DwRQX6+uuXfP3Va2KMNG1L29QFkiESi8Waq6snj6THEIo62vdDgdjsB3YPE7/51ctjD5aiqizdoqHrGrrF8nEgXT18p/x87ziM5XOU0lGdlSBy6cIR36q6IhfISKbAU7pFQ1tbKiXJ0ZOSJ6dIozrmfkCKokwHPxPwaFOjlSXMkX30aKkQRiNUJubSvZSyQshiX40pl+wliYzD5cjPb/6aH178abELi/Laucnx6/5nHOQbzp9ahp2gqy+Y9RuSv6GTW2q35cNux2HYkW4Fd9Nr6vYMmTw6DTQ8oFrN7336ITmXjeLxELCyIqY9Wq1RuiFGjxa52ONdYkyeqmoRosWoNTlqrq/v6PsD7cJSV2c09YaqXjHOr7l/uCPGVIaVVBT7w+GBOCcOh8DVsw0pDQyHkbar+OiDFdubmfXZgoVeMRvB1ZMVh36LmydyTFhVMeueQz+TsyVEyTzPvHMSlSNGZ7QEGWpur28RwlPVkNJMcHu2QhByYo6ekBVu+pK6PqOpa0xluL2/R6aWqq1RVuO95/WbOxZdV2AsVqGl4t3NS5QNhOTphwPh3Tsq1R0DEI6Y99T1miF8gTHFfhqDQOcPqK09AmMsfb+nacu6aOgPWKOoAnz0wQ+Lsi0jXfOMDz/a0baWlByIwP3DG6pqjTLFwno4HMihwyjD+qylbVr6w0RGM4/QrFc8f/GMpCbmaULFGqksq+USbTT39/fc3d0yHGaenF/w0Yc/5Pb6NTmU68A3X33Dk6tLjK7QpthZI45xiuTkiX7m6/2O5fo5d8Ou5Dkrg11l+lvLZn3J+WpDYzv8NPHzH9/wq1/eYi2cLy5phCEMnhgmqibx9uZrlquiqqUocJNEq0zXbLA2E33i9rqnXWh28h2jG7m7q5imyBx2R5U94mPPdrejPato10/p2jWLtWM/3KBtwBpFXUlWC4uPpY/RzTO/+OkNQmlimsghkEVkGEaWekVVJxKBh+FLJBABbQVtW9EtF8Tc0zQW7xXeC+quYZwHKtMRfObrL35LjoIcRrTRpcf2/0e5+p0YriCjZBkqBCUjgfBoI1FGY6tECoa6NuRs0dpibWQ+AiBSzszOkQmARAmBUAJRZXIIxGDxWZKDRiSDtQYhSg+G99DYhra1tMsFdX0GfoEUb6iMxUiLEoqqDpBb2q5G2MgcZ6xuQZY8l6lbhslCskcF7sA4TeSoaLs10giCKwj1qqpRWmJsaeqeY7kopCNVqwAlEn5OTHMZdGKKBL8rFp0oCceQt1BlCCWL44tv0KKU1/oQcc5BXTN4h8+x+GqlJEVNCBmtFE1jEDngXCQLdVwweIiS0QvmySNzYrVuaCqDFBpVQ9VKdFRMvqBtpUrkEFktznlzOxOTR2lBrRu0sVghCdkSU2Y/HZC6LFDKrn4ghUxrBWPMzDGgBeikwYsCeNC55DEGT9IRITNGSmSlyKnsIEpZyku1kgV2oVVZKMdYdkLJhHQKpIM1Ch8VWWl0XeHjDH4uRZdKM4wzWSZmX0h50ySxTWDVVTRKonLGTzPqWAZZABygkgFmkpSELJimQIylmVzJEl4leVZri5/LwkibjECz3qyQx+6UYSg+4LaxeC/LIt0arJQEP+KDJyWYfSSmufT0yLL7jUpImUjZ46Mjq4wUEWPA6orVcsk49EhTupRmV5DpzaKirhQuJAiJcLSC+eTo57l0OamO1cowHmaCB60rRCWpTYt3gXFyzHFkdjMxCiIQJCQpqZuaMM3Hwt26UJcyRJ9IQaAri1Se3jtSDgXGkjNK5WPWRGNEpmk0h35AioTSApUVUnnaZo3AEuaEkGXB7SaHGx1GVwTfI2X5HUgc+geUlgzjQE6ZqmqOucf5vdISy7klxVRKZEUmBo+0BSYTYiLliNaKMpycFAeK7Yr3gIdMxprmEe3tw1xUp9nBSZU6KV1H0MQRsgdIpFBIoRDHv2tdQtCnvh9rq3LbogxJWpehQBxXoSmlsvA5LtRPZL50GlSOA9fJzvjtwQKOZbzitNNvjgvzYn0jp79nkwuhZBmNNiR5KvcFKcri7fF2v71IpqiAj8racQArVsx0gvw9gkLk8WdETI/kwaJuxeMObbmfQsV+rzyW50tRPo+KW86xgB2+NVwJKUponGM3WSjl0hkQudjIEBwHyvJ4jDZFvTgOmVoWq2Kh/L3P5JXXNiGP2b18VOisKf1oIZZhv/xueISB5CMwRMpjz1aGmAuqXkiPSRK5lNS1QWtBrC1Q/p2SIIYCCiGX4UcpiTGaGCsqazDWUllbur+axDzN5foUEm4OOOeZXdkwVFIglcDacm0odEFN0xRCopTl/9zkmcaAc6Fko6eAmzwxwOwKxdfaQpMr1luNlGVj4pQpfA9OEcfmKOBbn5FHyIUQICKH/A5tBLUpPWc5FBBJ6aAqpfUhlsqRGAI5QsATw0xlGpKYQJaaDS0qMvL4uQIhTuTM8hWz53Z6yc34lJV9ihUNQmZiilzvXpGrA3VbkSIoIbAigjhQ5QNuNxLGGbc/MIyeu3hPvXiKn2fcMNDf3aGqDYu2oao7xslze7ulXayY/IEQy3GSQuHnVAA5ueS/OlsRY4sxDUFEUp4J0TFOASW7Yv9erojbO/rBobWirkp/krUCbUpmtGpKhq1tWkSCNEeWbcdB79BS0S3X6LMFSjquh/momKqjappwYymaDl7Q9wd06iB7gpIYpUhBYueMNQYpE34OCATDeCCmTEJh7YLeX6NshQoCZs2yaRnnBKn08QU34CfIDaVyTGQCiUN84H5XHQvENT6DiCMqW3KWJBJxGBi8o6prtLEFqJSWaFE+6yHMTCN856MX7B729H3PPPc0VU1lW4QIhDSTsqFuNWHOSGVo2yV3u5esVy3GZvzsGP2BynT/H3NvtmPnmp/3/d7pm9dUq1gkN7mn3t0tt9ySYyMHiQA5QC4g8A3kAnItPspF5AJyEiQBbAhJ4CQWnEiAJUs9bm6ORVbVGr/pHXPwrmK3EEDHTaAAskAW1/rWN/yH5/k9LJdP6JYtWks+3t4RU6Qot7TdlrquSSEyTj11U2BkmWXEEs7HI0M/4p2iKGqWq4r9pyMuzAgTcNZl4ma40J6TRMqJ4AT+AuCYrOfqSeI8R2ZrCcYTRc7Cq4qGumrYbq95+/0rUiw4Hc44dyauS+pFgUpFJiTXguP+AWO2tE1HLGG3O9C2kbLIqHREQPDp4t1NODdxOp2QwhDjTEoBpTRdu0A9LYAR5yZSWlA1hn4KbDYtdWGoCkNbV1ivccEzpUiKAlNFVGmI5NpeCoESoDHZOzXd0zYL6qbFR8s0Q1MvGUZL9Pnc9FYgZcE07WiKDeaSQTfNE6VRlGVHVbdsbv7xruYPorkSCOq2w4XINFuss9wderbXG7q2oWrgw9uBura0TUmhFszTgftPt2yvvsM5OJ5PlFWWMcU0g/SIBh7uI3NoGftIsoGy8CyaawgjSQlkaWiuV7y8WmcPhCiQcYuQFXWCwlQUZWJ3+xZVLHP4ZIwEGyhXVxyHB9bLJRUKf2fwdklRzygfSW7EW8M8W+rGEILlcOhZrlZUtSFJTQgCGy1KBmKIzGMghvygGabI6CxRj5jjAypZFBC9xXmLi57Vds1pOOLniIiaul4xnD5hugJTSoKXzP3AYbSUTYMpSmwMTDaiy5KiyKGURks+fTxT0yJlphklFdn5MzIpNu2Sl198xXjY0W49s3RMIeAPEz+8uaOsC7SWrKqOUncM0x1SQ1slVoXmEC3TeSIlQ6kKXBIczg8YuaKgwiXPYAeEb5Be4yfP/eHEj2++ZJ4z7KCuIue9y7lgQaGFQsmczySFIomMuk7B05YF53lAK4lWmt3ujuWqJihNUgkT8gU9T+CiRwrN06stn/o7xlNPJCEKhQsWS02JRMrE2TsOfc/18kuYHfPoiT7gbZY1ZTFipJQVAxFdZJqYHSM+9khdEHyN9wI7fuTJ6iXSQ6UVTZeYh0g/THSLJWXZcP/wCcGClE4IEakbzRc3T3jz5jWzTUBBU65JSTAky/v7T5znidVqTbdakkJi93DPfezBaL64aZAkjEiEMPPDq19TNZG2N3jrmZ3HaqifGMTR4UaHnzyqWyOjJXrHHGduuhVdq1BxgXVQtAofxmyuTjm0t1pUTKcRPw3MBObCs77eYLRhsAec8yy6pwQOBAvRC5LT4AXjeCJFj5YGlRakZHjY7y9oc0HTGrR2pDAjjKasa7plh53ueLJdseiukdT88ld/BwiMUtRlSVm0nIY3NPUKrUtiDHy4fU3dZQCEklnCZ+cJO2danxCZXidQiJRyEZYCRZGhANZeQAJEuq4mxoTRmZ5px5nCdEgJ/hKoq1RAUl8KxdzUaFMxzz0phUsxUvE7WrgkW7VSDmiUCmImayqVqZqP26V5vuRXiVy0x5i9ZDk7K3/PTTPERGkKclEYCD4XfOnSkD1K/0gJd/EzGWOQStEtlnkbYm1G5RrNom2pqxKjFafTCW0U4zAwTRPWZgkI/G4L55xFJUEkfd4w+ctW7PeDeR8L/xgT3jvGefq8JcuvqaCqLluqEHJvCgQSSUiEupDmhEA493kDmBLMk71MoxVS5q/gp8+QDescPoSc91ZXcDkmRVV8bs5iyDCKGCLDMJJ7H4ksDefh9Pm4mcs28ZEC+SgltNZemqcswVRSkBxE77OfwmcCoFL68+cXY0Ty2JTmrjumLJLsh4n94cw8Tny83dM2JVWZM62urlZcbZaslstLyDMcdke+//57DocTh/2ZabJ49z11XdN1Xf5atlxtWrqupWkrytLQn08gJEM/cjr17HdHDvs9v/i7kZQSpsgyoKJQbK83LFdLnj+vkVLQNBXTbPO/uz/ireD+4YGhn3DWYaTgp6tnIGt8ymCBnFUmEcg8Zbg0UkIKoo+fG+WYctOdQmIYzryb/wNNU12kmOC9JWGRqsg/JgIuUqo6g15CDh2XSjE7y2TnfMxTQlFeEP4ZkKNllpemmKFPnkipFb/a/3t+tPzPeVJ9i5Iy+1+ix40CLQ3ra0c/fiLMJ7QbSWnk9k5zezcyTInJaawLnH97yzB6zueZj7dnmkXLFCeqtkOVAlFG1ldb7u40s52wbiYROfeJp09LiosNoS4Mx74kiURRCtaqQhtPf05M00BRKrpFw/ncEqtIVZfUVUd/TnSt4cmTa6SAuvnI8bDj25f/Aut6rOtxg8LUNZOzLDcl680Vv/zbv2Z2E12da4nRWpSqcP2IlmBiAjvx5OYF05RJkaeTR0R4/tVzXDgyjCesnyjMmtGeLhlrhk23xU4jM5bp7ChExR998x2v3v0SP8M4eY6Hkc3VNtOFA8TZcXA9sYPBnljJNevllmK1YDq/Q8QCTY0QNcfTR6IbmYNHmZIYBqSVhLmAOBHSGeyC7775I/pzz+3tW16/+SVNU7Db3eVhRiHobY9KE2Fqubpac32z5mH3lq6uqWvNPI2Mh4n1esWXL74GYTj3PYFPHM4jP/r6BavNmmE68PaHN6jK0KyuEFpy2k1UQeHDRFXVVIsNz75sCXIkeJWv4XTgenvDMJyzrUNEgh+plUIEg51mBhvoFjUielpTkWbPeX+kP9SXZ57GOjgcJ0xd8+2PnzJPGw77I0KAkjWLakOqFtRNyf3HV/THxMvnW3QheP/hA+vNAqMli3ZNUxfYeU+IBqM7vLPM88T1jcTt8zbPqIKr1Q0vXzzj7/7TX5Emz6w9pnZ03YLnNxsKrbGToyo7igLu90eklPz0Z1/z7vYty8Wa2QbO4cxmKUEe8dMCOwuCi5SrgqpZcTqc2U8HVt0WLw3n04nZRryHMCbGPiLXCzZXW5Ybz/e/fsNq8ZRFu2G1rnn28h+Cjf5/fc0fgudqc1WnP//z70BIlJFIA6rqiSlQlSWbzRKloag2PDzcs9/dE2xE6SX9OKCNymGnpqA/9czeMvmZyc5o2dLvRvw5IJykLVtefrVgddWiDUxu4rffv6O7GpBSURYNq+U1223L1eIaN0lO555P/S/46U9/jJsVZbGkqZd8vP8F03lLVZYIIv1wJiXB2/e/QUiF1Ibjecd6u6E/OYYx55RM04TQCusE3ktIGsQZPymSL5HSULeeYRguFK1EXRuebG847s9oqShMQWFaVpsn/Po3v2SOI9JIClkTUiBGeSk0IpXRFHoDpmKOjvv9LdLPrK4W2OgZ5gGYKORL6oUnhInpNPGkXfPJf+J6vWXTtUg1MPcn2uoKpyNWOBZqQUwz/WmiLCrWVxvevN/z8eEDXdux7BbURck49uweTihVUpULvEt8/PSWGCuKsqZZ1Nzd7zic9qgQKLSiaivmA6TSEqUmYdA4joeBZdHSlh2ladkPDziO1F2NKQ2z7Wm6FbcfJlKUGCNx857FZoOzEzIIGtlyVBaipy4UXd2waZ/w4lvFm9/csd8dGexMNC0JQVlOeDtzvHd4WfKy6ZiswqG4uiqwduS7b3+CcJL7D/f86vZXuE4hokdfvGizP6LUgvPBIqPhT376M4b+wPUXFbObeXgYkLLkZrPi2FuiCHRrOO9dDiqdHcInmmZFP9+SpKcsK6pqSYwF3nuOx3vm+YxUgc3NiiZWEFuk6mjXFePuHd/cfIdEczg90Idbav0Fkz1xe/+JX/z2NV/97AvaZsHh04w9R7777gW//PUbymqmWRjqrmYaJD/78ZdUpmUYZt6+e5e3cc5hyjojjL2nWV8z9x+x9ueJ+gAAIABJREFUc5/R6rKGY6Rblggd2e0eSASut89JMctFUgycTifKKpP8xtFx83TL/d0epRZUZUfTFtQt7HfHizdGUpQaLTr2+z1KFiwXW5zv+Z//+3/HP/mfvuLr//UZp+OMMp4QJWWV86WG4cTkTtRVB2jm6YLexlIXCwqVsbHaCMa5RypNVTY07ZrJzuz2n4jBUVc1VbnEyJaU1GUVExESyqJCyjzLcn4Cn+EMpsi0yONph5KZChpjlrIhIovF4jMSXWtNEnka7C9baaUUZZ1z8mJIn31Njz4mKaGq8xYk+JSne0qxaLvsb5ISEDjr6acR7+MF+e0oy/LSjGSvkrt4lRbd6iJd9MT0u+DsrmtYbxbcXF9j3Yj3Fq01q9UKY0q8C4zjxPk88HB/4Nzn30+zw/m84Ym47H1z/tKAKB7pgoicjwVkHxu56cqI74t0LyXcbBntTFGWedghyBPgC1HwkZYYQqBtW9TFR/b74BEuUsPsaxo/SzW99xRV3mI9btX6vv8cBP0oYYwh/4y2zYbpnNOVZZ45S8t/loNKnSWBWmvaumE892idfXnjOKIvlMf8snIjWhXlZ1ng46+c4VeCyF5ArQrmMQf4Bp83Xj5Yqkuz2DQ1y0XNctWxWDx+LYgx8ub1e/b7Iw8PO969fY93hnGckFJR1zWbzYb1puPpsw03T6949nzLalUzz477+wfu73ccDwOH/cB+f2KaHd4F5jmfV0WZ6LqKJ9fXbLdPsH4iXMKzS1NiPv2Uh/2Mc5CEvnw+/jOmOZJJkEpo3NwzjyfsMH/OP7u62nD9bM33/v/KtNIL0dM5S1ks8paXhFKCtmtwbqKqTPa4ioSQcDrYLIu8eM+KouL1D2/Z7zM9Tqm8qQvMxOggRdquQqvA+dyz1E/55y/+a/7yw/+YlQkynzfH/iN/9l/+nEVzRAtLnBXHhzXrJ1uO/ZHzOOEpuN5e8W/+9f/On7z6kj//t39KjIL7+4+fX1OKiavthkJ3WHfG+jnT2B5OdO2S1WpxyYX6JbKdGMeATIa6rJEqcOrv0bqkMDVaVXTLJef+CHikiozTkdMh8cUXX7NcdGiV5cXzWWP9gPMTNiTu7/Zsb55i7cjx+EBKcHN9g1GGoqipmgWnncP5O4TwpOixdkCJkqGf8CGhjWG7uWIcAkJ5pnlgd9hT1i3WOhbLBd2yRqiBX776O0Sl6OprNs1TtuuC//s//D9st08Bwf3DR37yT7/jfOfQqcL7wA+ffmD74hoRJipTU1VLbIjs7+4RQaBkQilHCgVF3ZJkJggnn6l016sNV9cbNtsVUhYc72d+9k9/ghSBv/5//5r51LBYKZ4+37DdblCU/ObV39Pqa+oainrEYTgdIi+eX0GyfP/DK/7sz/6cfrzn1796z+7hzM//xRf85V/+DS+/+DkCzbl/4NvvDO9uP+C9JoYa6Pizf/kd/+bf/i/MvaGrr7l+qjmNx0yUdZJkFV9/teXtqweaskCqmdP8mqp+Qt9rZMzUyG7zBbI6o4zKUuDTiKFlfVXkJiNmpRNJ5KHMJbbCmIb1es3bt68u/lYNuqQpFzRNwWwnfnj7hj/++RcMp5HgFFAgTeLu42t8yHmCwQm6VWR/+ERwieDB2cSf/PxnrJdXfPhwz+vXHyjKCudHVssMsAihp1Lf8Kf//Et+8eu/5937W1arJ5TFkvPwwPE4Mg2R1brj17/6e7bbJbpQ9MNIwGHHmpsnV2w2DSnmQdfoz/TjjHOSl89/xD//03/J9rrj9vY1/+7//AtKI6gaQYjDJaOu5L/7b/+HP2zPFQgmm6UEulKgToR0MRwbwWn6RPSK4bwnpgkhPUVjcH7Ehh6PgVmy25252rbIEIijYJwDiZ6yjdxcr2iLDjvM3B0/QNXQLVqKuuTZy5KqVez3A6dhx+h29MOWxc82oMHUihfrH7M7fkKEFu9kNkJiePasI4TEOE5Aoh8OlA0IYZCyZFu3fPhwZHNVIbTCecW62nI49cQ4EqInBolWHVIlrp88oesWfPz0gaIeKFnnwF6dSFFTVjUpOaSJNI1gOu/Yrlp6Z+hny3ia2DxfEB24GdyUmHHcbEtOo8PZHlM6tqsls5upypq2W2Jdz3ZRMfQ7ytqwutnw7s0dX7/4gvE4cNzfcfNSo1XApIRUkqIsWS8afvM3J66erBEqcnv3gYRmsdb40HMYZ1xaImOT18gXOtr+cOTmyQv685gLBDvTFYZ6vSJOHjd7puOMs9nkjfJImeWiwkdefr3lenNNSCXujefQTxwPFl1Erp9c0TQle5Po+wnvJ8pFgR8TMSpciNgw0C4abIyQJNPkeP/xPd98+894eLhlHDRl3TCbkVqsmFyAZLi+6nL2mJsJQAoxo5qTZzqdEdFgU8Li0C776RIR6yPbmxeM7oBZKUw0+OHMP/nJN7y+fcP9Yc9oB643T+naTAvs+5HenSmNZh5BOkMla9qy5rff31G3BrnStKVAAufzGeFhUa1Zr9dEPIqIT4GQehARIWakyMWAZ2Z9fYU7T3xx/YxvfvRTfv6nil9/+CUP+w8gA3WneXg4kVSgbhWFUYRZ4K1l6BPFoiIFOBx6huGEUYov1gteNJFFq/ib8zuGccb2ljB6ym3Jdl1n751zVG3Nfn/i7v5I09Y0TZac+VheZLOaxUojg+LrL19ivWMYBx52d6zCk7w9aQxd2xFcy+l0xmiDNoIoz3h6Uoqc+x3HXmKqFqQjzgnnM42tLJpMClOJEEZSsqRgWC7WEDze7rKHywZ8CkQL1s+YusySnNQjREKJguk8QCkujZLJsthaEnwiOkuInmGeWHULnPWkOYBQ3D+854tn36JUgbMO6we6Zs08WZzMuU1Dn+l5VV1nIpypcvMTBM4FQog5rBxB07Y5h0kkZjtxPp15xL9HEtMwoITEkbdJ1oWLpExSGoPQ+iKpkIQY8SFvZwIwjOdLcQrOZzTxQzhgjKZ5X/GqvmW57KiqiqoqGYYzzt19bjyCD8Qo8tbIFJhx4nQ6fyYKRpc3EEKoy4YlYmTxGcMdomee7EUWJ6mamvKz3yl7yowvMUUBIhfVGUaR8/G0MjR13j49Uv8et2G/n2lFzNd28gEEaKkwpWa2WVngbaYcGlXQbRZYmwEcShrKQmO0ZhgyivwxWPr35Y6PQcFCPUrLwEdHVAl7oSm2bU0MASkzIljIPIAgpgt5ML83LSXzOOKsxRSKolLI5FiuStp2Q9s2F4KkvPiuzpxOPbcfBn75i1dorSnKkrqqKQrNYtlS1wVfvHjKV18/p64X2fNpLcMw8PH2Ew+HD3y8e4/4jyYX6LqgW1S5SVt2bDbPWS4d3/5IUDUldVVx7ntOxyOH48DpNPDu/T1/+7c/MNn5soUtqMuCb6tnIBuEepRvPmZc5c3R7zZXiZRCDu29yEVzBlz+g5CKzdUNWsuLBDdL/iDDc+w8s99nr8cw5NyyLJE0VLVmsawu8kTNannFNJ/ohz3n/kzXrRmHM8hHUiekAEJXVEXAxQN/+/AXVGVDMBFpPMoE2tVLbGg4+IgpE82m5eXLb/B+Zvv11zRNTVl6Xr/+FVrBcd/z+rfvmaaADxYldR6qdYb7+4/UzcQ0z0hp2F49ZRhgf8ybn0VXc7V9ztl9oCod8+jY7/c8e/6EKhRIKozsMLrF6IibJ5SUlKZANQXHe8/Hd/dMS8vVVcebH37FNMbLUKdkuV2w2Rh2h48YI1mvG0Yr6IdTzmoaE3EH/X7JemMpijzEeLgLLBcl3TIPrqbJ8fHTnqttlRUZSrG66hgmiw8JZRKImXdvP7Isrlldd3SLNWXZcrYfqTYtuuzAKyplmU4O72EKJyY7EOPEl8+2/PY3v+G0G5CMxDihgyQlg2krrq8WPNz1ROfwaSYkD8kzj5FeWYpyRpuJcRhYL5e8e/ueojB8+dV3LLs1fX9PirDfn4npzCjvqIzm4ejYvz5Qb2+4vqkouiJvP55v+d/+4t8TxZnzZRN82D3h53/8x6RUobRgc7Pghzff8/Bxom6XXF0veP7iJb/97ScKfQ3ljFAjUl5hFNw/HFFCsWga3r2+5asXz1EyYK0kpRcIDG07IoIhWcXt+1dYOfD05iVaSVQMiBDpDzPCKKSSICJKlDw87C6DTMOLZx0fPnzAWodWBcaUxDQgpKXvFSEkthvFm+9/zWGXZYlVbdDSsFhs2O0eICWapuZ8vkVSgcrHO6jAq1e39GuY5pGyllxfL9jvJUJWJAJJeD7u3vOffjVzf3fgfJ6ZpneQHlAF9P2BcRxYrb/h5ZdPCV5RmI6bFysm1zOMA8tVd7mHwnLVcOhzzZPCmp9991/x4vmXfPj4a969e4+3hvVKM8yvQHpIiXHY/6NdzR9EcxVixMYDKmjSXFCISFUJYgLnEzJBaVpIEySDoMCoDiULepWJPNPosu49xIxuHh34hCoUQTi8mPFSIypYdmtkLZk9TLMleMXuYSBJgS41KVoikYfDDikqQNIuFG4642eJ0l3WH8ec9D7NZ3ywmMrAWAJdRg4XipAKNmtDU4OSidGUDOdEih6lIEaJnSKFFkil6c/TRRr4O8xzSBCd5HgaQQSaSqOkYpp6tKqomxo3BeYQKLoKJRWmNEQ3c55PVEbjvGO2jtlZZpG9NcKZLJfRkUJJ+vGInUYqvaAyK6x7QKeCVV0hhUemmaKCTq2YY6IPEw+7HVdXa5arBYf+xN3+zHdf/oj9kNiPR2YbkL2lrPRnTK4Lgbo2SAVCRkiBmHJ2TlU16FpDSIzzzPF4wkVBEoGUHNF71qsV4zxwt/+IUBXCJFRpKFOW+Eyjp2lqKqOh0kRSzuYyi+w9cjMhOZLKMo3Rzrh5JE0lh35Et6AvhVxdgPGOYQzEBFWX0HXJ/cMDm3bFslswT45x2nO7vycJhU2BxaKhUQ1JOqyfGSeHVjU6eKKeMCJhSst5OLE/HCA5nqwrmD1GGiqt8EYhTYWLjmWpqdoSIwrmaWbVbXM2BjUpSMYpBybqylCYitLUmKLAjQeGYWD2E0VlkJScznkaXlRV1mSnQF13bNZPWAW43VlsKanaBZVeMo2JZjOjo6DQHcYsmauB8TTzZFNhlhVl2ZCY8S5QxInORYyXlKpiVJ6oHcE4ktRwkUqmIIkXP9bD8YEoFphqBUngQyJZh76EQOI1ddkQ0wEpLUXpcW7I/j9ZQlSc9wPdco2QI0ibTfZtkz0fxlBXFdoYlNFUZW6oh/FA0zZ4P6NRpBCRJNq6RiuRoQkpUz19SERklp+GwPF4h/OW8+lEdBFfRKpiQUp5qq1VgaEkTRCCxntHiJ6m7Zim4WKGh9mOKJWI0RJCxPtAVenPyPRHtLizDiUlbnYEH7NxOEbcefwMjSi0QSmJnSaCUhiTA8ivt1vatqBtLpj82eWNyAVZPU4Ts3WcTiemeca5QJKSoqhBZAKb94Ey5Os3b5PS7zxdl9foAxyOA9PsKYsMmBECfMhgB6XkBYUuEeoxF8whJJhS4mx+X5/ziuAfNCPWWowpkTJ8zuTSWl8CWtMFI8/FV/EoCcwG/yR/B7z4HI78e8h2IcRngIa4xCfZaUKK7OGUIrt8JLnR0pcg6ZhyflyMOXRaxkRZVAgSdVXl7z0GGV/+P3Fp4KYxo+fFIxyDSFm2pEu2mEJwOh0veP4EBExZoqW6yEmhJOGtRX4GpQiii0TEJWZgZBwDZWGzL0oK6qahqmskiWfhmr6fsNZd5JeB/e7Ebpd+16wWJ8rKUJYGpQuutldc3whiIHv/HgNvZ8849YxTz8ND3r5JITCFpqyKHJ6sc/NytVlytV7w9Fmgn3qc9eA09XyDVPIS2ZBrhJgeSZ2PvM6LvSplOdHvh2WnlCW40zTRuyGfv+Exjy6iLttQJTWm4Pe8dY9glvw+YlCMyZLSnP1dMWeEgSAEzzSNJCIh5pNFALMFgWG13FCVWfb/4cM9Ugk0nsJAUxd58yU0KgRmO6HFEREcWkpKE1ClxNlDbhyxJDFiKsNwmFCqJMyB0fVYG0nizDgFpIS29Ww2L+iH32CdZ5pHpjHbkSSGslQY7RnGI1q0GN0hKZmnMecsGk1ZZhXQ6dxjNCyXC+oqkym9jxkC4gNMlrGfmQfL0M9UlaE0NWVZ0e/3KCXy8x2P0OCDQfkCrQqe3FTY2WN9lg2Po2eeDZvtBucHZu9BeXShSKNnHP3l/DcgE4XsSLPifB44z55a1xTCILWB5RVVs0QLy6f7W07nI0Ypht0RZyNJSlQhsKdAZUoECdLEcR/Y35/ZbJ+w6CpMlbMT9/d52+dc5O5ul7e3VYcpIqUQVFVBUSVO55HDfsbO0C1Kkq9Ybq+IlUdKgVl2XD9Z0J/ODOeRdtVSd4rdyRGFQSvDOO+IcU3XGtpFTVF1DL3FT3vaZY0pNB9vD/iQeHrzFT4cmOcj5z4/57UUFKpCi5aicsy+p1CGomh4+rTlfn9kOOfQ4rIuWFXw6ocjaaFpFjVtK9nd9+hixaLtKGqFtSPBgbUeowuaZklRavaHCaVLUkxMs0UJSTSSx2V6oWqGvkeqhNQ5FuKwVyw3W4IHos7HsRp5/cO7vJEvC7bbDWOf2B/veSTqaqMuEneFD4HdrmeeAvtDhZCaRbcipsQw3WNYU+iGYBL7w0c2qw0WRVko2q6AIdB2OnvwXQ4pV+biT5YKJVs2mwprD7kBV5Kq6rLSodtyPN4yTROL7sk/2tf8QTRXKUWEHkhC5wf3pOiWmpgE0QOpRBctdakvN1ZFCgalSrSccLZndo66LrE2YgdHmCNGaBQFKU1M80RMOTRs2S2ZXW7A4hioGs0waMrGILUnIIhS83A6oGROmDcoxnlinjTGTLk4JxFifigGcjFiao2LDWVlqCqJ8xGjJLM9EVzIRlAXKQuNcNlzYERCS4FEM40z3vU0ncRUJdFnbHf0eWpXVQql8hR3Hk+YRUEQiSQiSqf88CTr9yESyfKY0VrspbjzOpOctCqJ5MBiqS39eUbGgHWJcUq0XUtlGrp2gdECp/ZoU9AUNcqDG+Hu0PPVdoEyOTF+mAJd0zA5B+OMDz0xemLlEDHLj4IPdM2CaRo/yzsSOf8IpXPxoDR6rpntSE2FC5kyGIRmvV5xPj1wHE6UTYMyNTpqpCwRKNw84OdIqTWyNAQSwhSU5QJmQSJA8hlokBTeSxwKyoqPn+5QlUeT85U0UBaRQsocpNkqXBAkD6uu5dn1ltv7nvN4YNf3OazSQFtVNLIhlg7pJM6TGwGVqY9COig9n3YfmdxE10o2q4rde48noItAjcBUNcOsWZaGuq4RGO4+TGzW1zjv0UqTomKeLGWVgzNz+KqnEBn5HEPAzZ7oBIXqGCZPWUmqLp8XCAVJ5sYieWrtuVmvWbZbqmLJw8OJYuMJpwIZG0zRIbt802/rEiEFq0VH2wX684gQjmkYibpCNdlYnrSHMhACoDKEwdvAeR5RpWY+HuknSTkWSFHgQ5YwRSUy8plsTn00ojdN1o1rVWeiX9RM457rJze4OJEEubG4FEPGlDR1gzYFIeWCb5omnOtJSeL9DClfO1l2qxEpcqEgXCR0BpK8VHWRaTySIrjBZ3iGn3M+lp9xQeRhgmixXpAQWJeBI8tlQz/MFEVJigHnzigNLkw5aDolqlohiCglMiBF5QLOGJPjKC74cCkzQleQ0exS5IDVaRrRSqFlTVkVPH16w2pZslzULBYLhn6kLEtySLBnGEasnfn0SXA49JzPebpfVL/zpgQTmWcHwl2oeykjjAGj86Mkhjz9HsaM4waR88CEvBBSDWVZUBQKlLj4WSJScfE8JYxRn5sc7/3nXKrHzZuU2Uf5KKUTQlwa3pQL7ZQ9M4kLBSSJi28t/QNc+2Pj+kgqzNJLc/HX5TedvMdodWlKLp9Nlbdu8DsPGQiqskJJdykCCryd8zEWj+cMFxiC+Oy/SilRmJIcIxGJxBw/QIIIMQTGcc7Sx5ifOVVdoZRhnvKWQSnFJKB5zO5KCS7mcO8T0xgYhwElZ4zR1E1JVRVUdUlVauq64nQ6MfRjDv8WknG0F1+dI6WBGE80TU3bNrRdLr7bpkRrhda5iZdKctifOR7PGQM/gXVZUvdIdCzLkrZrqCpNVRnapma9rphCyzxZ4lCwPP+U2XuGiX/QXD2CIz4b6/IHQArhQmTMA1lxqSmcc1hn0fPj+UO+fxiTYR7k1y6V+NwAP9ItvXeIpD9LOEPwaH1CCkXdtDTthLhsM6XM/rgYEt4n5jSjTU3T1sxzou0aykqjK48sPUYmpIgZLGJDznbz76m0xrkz01wwjRXn044QPUiPLOYsKzwH5jARZk+IlqZckgh5s+wcx9OZZzfXCBUIaWScJadDoFkLqrJGioQLI8fDjq6+RhcVUipm2zOOM1ebNUXZ5HuiLaiqyNXVEqUUh8MOrQqQhtlOOJeR7d46vPV4KXE2oItMfY0RpAJtBKbwOCtJPlBVkuWq5PbDiJ1HrJuxwWN9zewFLiVc8HhvqeoKZGSYZmYnkfoyJEkFdkgMx4HJQbtoqbSGlJ/jSRp8mBmmiWGaWHcdh/sDwSmkUUgtUNLQthVS2GxdjBqtCrQoqIqKus5USHltSCFh7cQ8jBSl4Nz3aJPQOkHWAnA+nzgce9ycqJs1blAUekVRpTwMajP84TwPeDtRlCWbJy2n+cSirGjqhigGzueBunIUZsVqucQ/l6SokcZi3cSnTzPL1ZLFqgMROZ8tH9/dY+dI17VoURGCQnfQTz3eNDS1YdnVFNOE3yuUFuhaUYga6Qt0MNS6zn7Kk2XRblguOqTJ15EQASkUxtTUVZuptyFQqAIfHH3fZ++vKYh4IGKkIiaNLgHhMlBiFsxzhXUWQYkQiq5ZYufXGVrUlLTNAjefsT7LDaWq8N7lTEpf5+B1GwCRZa51i2jBWc84v0dJRVUuEGj66RYpntI2JabQRDI0qiwV85SwswUZsVYwTwFFx6pb0naRvr9DEC5Dwoph3NGZGmcL3OyQ7eof7Wv+IJorKQSLLmvfQxDs7ie2VxuMIk8fo2I4RNbra5q2xfnEb7//AaFs3sPHRJgDU4qXMEEolKasC86jhFgye4d1E742nD/es9tZpEssS82TL57QLZ/xsNszjGdkYdg7QdQeoY7INENf8v5NQKkzdg4MQ89y1VKVgrpbEmTi9bs3XN1Iqq6jKTvKosTNCz59+sjdxwO7hyPzHHj+/Cll9YQ3b/ZY73h6s2Aa85NkuRAgJLOzrDc1fe8YjxY3ebqqZbu+QpJIl0m71pIfbm8RKk8YT9PAdbNgHA6E5FmsOlbLNefZg3EUKofLzYPlyXrBEEb2x56oDmBXtF1J72ZOH17xZ//FT6jVNTrpLLMwq2zEjSfqpqTu1gixZBhn4vCReTzTlA0P+x0pVqRRI0Zolx7vNedpwE4BkTSrdYHt94TkQGiQElMqZncmiCGHeHqD846vX94w9p6HB0W9FtR1w/F0JqSAKZos00sqh6mKyGJZcz7ZjN2POXyxW7SUqsbaE1J46kqwaTJy/+Z6S9es6MeSX/39X3H1AqIqmakYD4LrP2qpupa2qHmyXfOX/8dbfvzixzx9dk1dVdRnjwwqkxx1REhHsJF9YfOaXSna5QokKC1BlUQkh9ky9A+sb1bUhWYOgdg4du4OX85gIlKVPFt/xTyO2GRIUaILR5Y45wBRUGjVErwliIBwI/M08nD4wNWio2lKyrKhrmsqqXGzwwM+erq6wYeS/WHP+XRCS0MhV3zzzY+oK0NKM8YMjH5Ds90w9TPHw54XL5/xn/2z77DuxG53x/WmRRnNtLZ8+rjjr08TP3r5kmM/0o8DISaUEEy7A9VqjdCS6AaO03turr/JsjQ7c7e7Z9luMFpjbQBV0lRLCgQxjRm/nGqqsiCEHSoKCl1RFS3dYmAc7rnbvadtK7768ivevfuUg0djQqDomgWvX79hnHoQibLQl8FGIgaHVJqyLDidd7RVR9YaaSKermtwPhBTIBd4BfOQWFYbqAVVo1EmB5mO84BzI7oUCDRCBqKYccGz279j0dzgXI91A9afkFJj7ePDRJIIzC5P4yOaECQhzKhaX7xQgXEcUbJgvV7AZRsjRGL/sMsSq7KhbQuur69ZrVq0EtjZczftLmS2gqIqqXVFVZeE4LjarhlHx/E4cz71HM49wzDgoycmGIYz8kLTk1IRU94mZSpeApEwpcI8FrMpkpKivWDBQwgEsocwy68aQoxMcwYHlKX+vIEAPnuVHr1kVVV99i49kuMekeZCSJAyR1vEiFZ5IIL4HcHwsWn77He6bCuyh83RtllimputwM31lrLIx1yp/HerokBKST8MDON0wY2PLEx7UQjkxtTNEZlkliXGjC5/zM96bDaapsFfwBmPzV4SkbIqL5AMS71sadv2HyDvrbXIS2OojKE12dtqZ0twFpJnuVogpcJ7h3eeECLzHDideu4+PWCth6Rou4q6KXLuVam52q7oFjX6kv00TgNjL5gnR3+e+PTxIXvlpimT5aosBey6ltW6ZLu9ov2qYblcIFW+9qz1DMPE3d0Dd5/uuP3gmQaLvXjjFpuWpmlYlk+4qRbEaQI8EPL2I1wgFuRi9jFgOTlLvDRXXKAfj7RHbTRN2Vw2kgop5GVTGvE+Xoh/4Hx+HY9bRSlzs9i0hqJoL4HSXHLuVtRtyfMvbjidT1luayas8/Rnz9w7fDrw8eHA/lRg1Ib/5l/9KxYrgbMTh909v331H1mvlxznI9bPEAbG84ColxzmM3Y3k3wmV/oQiSIwp5H55DBlyen+mL1jpaSsI8v1gsjI6TyzP31EGkfSb7DzyDAaUtiipeFqs2EYet59/w4EeLdHG8NysWRTrpnnmuVyTQyRcbTUxRV6OVPVebNaN2UOWZ0NUtmMND/O1IsFdZoRIjJNDrd/YLne0J9GfBCYooVUMQ4T1t6hlMP6jv2uT6zAAAAgAElEQVTOUDQ1uoS6CEyxZ3/+iCkl6MDYW3yKRGVxDvCJokxUZkMIEmJAEMFLloslhVaMg+W4P3Dqb3n7wycCkaopEYVimgWSBXacmc5nrusrrlclUpU0zZInm28IceL7V2/phxOHg8fakj/6+TPef3iLUIHNtuDh4R47B85jlk0TDNvrFdFDCCOm9pi64P5XOx7WM00nsWnCeMfbV3B9fcVm3dHWFb/4fodQS7790Re8/PKat2/u2bsP7I+3dIuSl90LdBGZ7IHvX7/leJ5omy9ZPpnZ958odYnRDVU1U5UbdKWYpplT/5EgG9pGE/zIeJ45jBWb7RXNZFFFImrH3buR7WZL20qUySTiL7/9Cdvrp/TDif3+nv39meWyo22XxCg4Hc9Avoedz2emaWIYzygzEsonuNADI12lsfGMjzXjPGFtz6KpePXqe5x3GN2w2ymu5ILF4oq6yfl7x+MJqQRN3dGfz9zvPhBYMpwt54Nnuaz49psXnPYlL548RxrwcaYfd9i3JXXxGBTfULfPkPEpz18scWHkhx/eUZiK0+mMECbLxjki7jc83Aae3Wz40dffUdfw8faI0gqSZuwth/EVr14ltpslTXXFq18P/2hf8wfRXCmtOZ7yZLKsDLr2/PDmAaUERSVZLAxfPf8Tnj37gg+3r/lw+wZdOqQKlHWiKBT9qWIcFUOIaA0pOc7HHi2vGc6OstKUVQURnD1SyRLdZHTjx9uJwkiGYcTFGYVlvz/RzyXWBpz11M3Ij779gh9/+3PsCB8/3OHqyN/+/d9makttuHl6TV1tKFeJ23c7fv3+Hc6daRvBj77+htPG8eb1Jz68uWe5dXz77VOqquV0nvntb1/nwsE6goP18gnnB8d5OFMoyRc3V5SqQ0TFuX9AicDTqy2v3n0gSDCiJsSCwfXYcEJoh4oaO5a8+v6W1Aa6VtI2klpp4hQ4nj+hCsOmWaLVAt0uEMqx2dY8vVnz13/1GmUPPH9RsnkSEfWZEFumo6SqKnRhOO1fM8SeVVlkbLqf+cWHd3y5/YqFuaaKLZ57jDbc7z8iZcOyXbA7OrxXSFmiVdbsizTjY0aLemcwKbK+vmYK04XeIy70vYgqBCIYhDccTp/4/5h7k+bMtvNK79ndab8OXba34WVzedUVRZdUFbYjqsKOcPNrPPaPsP9FzT2zZ/bEJbpkq1wUJYoUydtnJoAE8HWn3a0H+wMuFWGrZg7lCIjMBDLP2Thnv3ut9azN+RmmLPIGsB85HEe+9/1XzM5it3cMvWU+3jL1W5K06KVhHnr6cWZzccny4oJ1aPj2nSbNEdMUFJsKXU+4eseHz18QdpFf/Lu/pbGG//Zf/df85st3fPn1N7z+uKU9+xFffPUWZ2cqpUlrSTcdqcuGQha4IfL2d1/zvc9e4YLGOUFbLSiVpS01KpXgJIvNkTjPCCpK3bCuFsgp46GHccyny97SVCVDPzKOHucEWlV5o+BmpEwsFgsELd1xom0WFFXFu29vWLYLyrKAkNh3I8O95dMf/0sWixalPTY9YF3k/fGexrdUpqSun/H+6zcc1bdUVcXVq3N8tPz8F79gvV7QVAUfvHjN3cNvUVWJ+uiMKVo6a/l49ZL4zYGHg2MMiULAt7d7RnuLEx1NW/Pu7T0yFWhdIoVlu33PqjmjKstcihojc4wM2y1KlZRViUBjh8jHH76kLGpi8FxerRjtLZ999holC6bJcrbenLItAiUt2/t3fPDyFS540okip7ShqnpimJjmnr6/I4asCFhvT1CZAufnvP6QeJc47Gcu1hewiDg/M7oBpOFhd6AqMzQg4jBa0g1bYgworYmpZ7KHk0V2YLY9F+fPAUfCM1vHze2B9eqSlKAwJXXZ0JQFfrbUy5rVcol6dsE0TZiyyLnMkypSFcUTpe/27g7vPX//939PcALIG8h5npFaU9UlRZFfMpUpWK0WxBQZ55zT6oeJEBPGFDRtizT6yVIH5IFUkKsvTiqWtYGqbv8BBKIfB7TSFCd1xblAUTRoVQIBwWNJ8PwPhqsQQq4VOH3sncs2PZ3fFwnxlD0ipdPglG1iMaWcG1MqK0mnzip/6lMriuJJwZJSslqtUOqx2yvjw1erFTEF1ps1bdNQVSWFSTjnWU01s3VM1uOsp23bJ9BIjAmp8hrsh4FDd8T9gy6xbF0sioJJAg5iEphKU52shMIoluUKY3IxsPAJbz3DOFCWNefnl7kvKzh224cM8EgR7xz90bPb3j4BRVbrM5bLhrYtUVpkRLh33N8f2O8O3N0fOPYjUmiub3qOxwNKS6qqpG0bzjctz55dcvXsLNc9AKZQ9P1Id+w5HI68vf6Sb9/lnKREEkKiritWqyVNW9E0JU3T8Kc//RNMkS2O82T55S9/hzAa7xxdv+dmd42LkSSrEyHwJF9lSSp/nBIIRYielCIIiRQCfxryYwzM08jd8ZqyLPP76qSuVmVBZQRS5gOAeY5o2eJOucOsluaDPW0SSnmUkjRtSVFVtKsWUyhS8lzf3CBTzlvFZHn37huMrNA6FyxrVfLzX/wFZdWyas+oTYXR56zOz+hvBpIDgsoHhESKao1mZujfUJkLpBR0B8f11yN2LLm8uuBqs6aqFFWbmNM1230e0rQRGLngj/7kNWWz5M03b/jydzeMnefbb9+jVAZqeJ949+6GH/3o+xSFIREZh5nN+pLrdzd5r1SWvL+/4/mLM3b3DheyxUybSBAjPkWkqtmcVXQu96jVzYKyvCCMCSFqAjOmjKwuE/YoEDpQhxYlCqqypWrf45OHVKJUzTB+hdT3VFQIZSjrBc7PzLPNh+3a8OWXX/Lh60/QTYEUnmQMXdhyGEvubm8YhxmlKsTCc7bxJJF7yeZesFot6fstMSXqas3Vq3N+9flXzMeO73/8A/7sJ5/x/APPZ398xdu3D3zz9ZbffXHHzbstSmuKMneezbOA5E4qmibalh9/+hOO3Tc8bC3WOarliv/yv/kMOwiSzIPE+7tvmKcBaSL92DPuj7z+3jny+wJpRh727zm/alHigsqc07QN28Nb/vrnn3P+vODy8gOadmacOwqz4Jtvj1xdLnnx7CV1scS5A+/3t4QUWTRrXr24QiVH1x+Z7UyUgW/fWOplVvVnl1iebTBq5uu7r5BHxdnVhrI4p1hp+uPI8TAwjR5nc33Jbr9lmiZ+8P3PqOuSyECpNKbasN3e0w/3CClw88DbL78k6RukviL4BTK1FPWK5VnBYedxs6CPPdIcsHPurQxSYe2Rs7NzNhfPmN3A8bDD9YIffaY5PAjcrOjuDXUF9+8fCMKiS8Xl5TM++7SlagJv373l5vqOz378h9zdX/N3v3qHKTSbi4Lj4QGpDGM/IoTh+YvvcTh0fPbZKzbrCu9/x//2v97y+e+uKaoS5x3b/ZaqTXz4+vsI4XF2xPxHpqd/EsNViAFZJbyLHIZAjJ52qUnREFKiGwOORDccQRiq6oLD8YjAst+9J6SI1CXtuubtzTVK5XKw9dk5wwFK0yARBCsQQNusmFMgBhgHweHwQFN4hABTtFTlmvpZyTB2WNehkmJZrCjVmps3NwSnsFPg89884GPHxcUFpW7ZvZ95O/2S9bqh2w+MXU8IYKRi0B5tSp6/ek43DFxdneGsZpo8pgo8e1kzHGUOdcuM2I0n+1J+UAsO/QOyrrB4kvd8e/sAakGzrCnKgpQCd/093QTrZkmpNYqI9QrrJLu7HqGgXq25OF+w3/bgHKWKGLPAzol2WZC85HA/8uL5Ga9fNjjfMdoJ+1AAHpEE0zQiJk+la5r1gt279wxTR7l0KHVG3x9JPmC943gEGY/ZBhkG+jnx8sVLjt7gnCXpRKE1owUfJVobFCbj7hcyWyWDRRjPZDVajSzWNcFDnC1N3WJ0gUoaQsxZO5VPXBMJU2mCBrk2rNszykJRLSu6bsez9TPWqw3OzlzfvaN8VqIRLBcVq43BNPm0K/aB6QCCDZsPSm72twRpqRrFm7ff8vzV91gWhun0su/tAD7BiQZWFYoPP3yBG07rXTj8tKfQK7YP96So0HKBFIFFXZ66WDzb/QMmFEQpiMESgmccLSEm+sHifSI3SM/5xCgFtJKM40hwFqMtzgeEHHMQX01onYjeMUwT0iyQCi7OLxDS8vk3n7PfzTjfEULCl54wB86eFdxt94SokdJwd3dNs7jgOHiOnUMky2gFu67DR4FUFUVbcb99j7OC0qxol4amFGzvH6hLzcpc4JNm128pW4mSHq3h2dlHvL85sGhzXqcbDhAlZVVQ6AJjJEJYzs+fkUKmHAkcppBs9463/S1lUbFoN6w3Z6eeoprCNEzjIb8krebQHRjGA4tFzcNdDql7b1E6sVqu8WHOWT8VmFzk2Pdcnp/T1A2mKnMPnRuZ7UCIuY+oUAqjJSkFrAWlYOh6Zjtk5bCuOGwfuB22rFcrlJbEGOm6A/Nkc39Sgn7oWC4XnJ9dIZNiHh39OGBMgRAyB9uN4u7ujhADTd3QtgvatuXY9VRVTaPbHB7fHTJCXFcgYPIWaRTaaHwIhCnj2nfTkdv7u5wP0Sr3nrhISOBjzPUEItsbrbcomTNKQj72ZAFCUtcNj2XEGXEOfW9pmow4DzE7DOb5garKg5Pz4UmderTNaa2fiHiPg1oI4WTJi6fBLTzZ+uJJEUopEwSTSBnCEfKQNU8TnPqsiiITFB//rgCKokQIOB6PhBAYR8HY9ZyfnxMDHA/DKQeXMfQASmlMUTIOE7vd4ZSryvmeEHpQipC+y3pprU9DZDh1z+Ri+PKksNh5ZjzM3/WUSckQZpTMCmuhFZfnF0x2Pg2GAInz83NKrWjrkrRekZ5nRLydZ0IMjPbA4d0D0+Ry55bIdllpJEpqzi8uKMqCpm3Y7XZc2DUh5S6saZq5vTvQDQ4h8vMrRk9ZFU9URVO2PH/ZkJLIdMnH4uuQOPZHDt3+dJCRLXplZSjKDP5Yri9pVhV13VCxwVw/483NNeFk/UuAUpl++7gWIFe4uJMi9wRFTr9XCK0qPqn+JW/cX3M4dDxSMIx+zPOl08FLSUqCsqgoC51tyFIjy8cC57zWus7RH7sn1dE7j3WW5aKgMAYRa9wYKJYZ6SxsyWJRsVgs8fHI9tBz4xy77h23w1fMw4wSiqauqKqC3a5no85pmjWFTIxjttXGJEEXbJ6t8Vh2u/FUyyFQxkEqsVPEWceU9vzff/krXn94gbULli2cb2qWi5r9tkcqyatXL1ifNXirmOeElAnwjNPAMPZYJxmGiWO348JfEiM4l9/NRQ1FHZCFzEqonXj16hXXb9LJHurRteDY704WXs3x4HDTQPSO6BRaChZNjZQNkjkDQRSsVgV2lhxsnzuRVMQHh58Su4cBo3Je8Kt3v2ISVygMcYZVs+Hm+g6pSpSB3W5LJQraJqviMYLWBdVCUVqwoyCNianvMVox4Hh3+4Z/+3/8Bf/iX3yG9QcKrfn0Rx/y5//yT/jZv/sbtoe7XANCwbr9AS9eV9ze3jL0nmXdcnm1xrprzs7OCGHBeNhzfTB878OPiBHuthOVfs6nf/iKuZ/ojwP4SJFWfPqDZ4xuZH8ceLjvwFe8+PiCmHqu37/hxatXXN9+wTSdakgWNc46VosFhMBhe48xjt3dzKp9zlmjkEBTCoZDpJRLkih52N0hywx+CUEAhufPSuqqZX/0CJUwpeDh/h3b99c0ps1QJmV49fIF+8Md0DJXhuvrdzx/ni34j88Da9+xu1Y8e/6atjpnEoGiWDK4gsac01YbGjMwzy11PdO2Gikq9ifa7jR1eK/xXrHd3rMfj5TlkrpYU60mvv7yltevPmb1ak0Mjr/82V8SveT562esz9e8eXvNy1cbhsFj1Iqri4bl4gxTWb7+6jpnnl2NTBUfffia6+s73t8e+PbLA4tlyTRMDCoTdh+2bykbe3q/Klbrc0wZWK3WGC2x9oB1v/tH55p/EsNVLq5MJ1VCMAwzQkLwLhcKA9vde4IfgIIoMlLx7LygLBtma/EhIJWlajLyF6GpigVD3GJ0RvZGm08T3exPQdFMpQox5vyKzotu6hR1o2mKFVpqYrJUasHd2x1FoamKBqNrpIDhOGCkwY6BGAy6HNneebrDyNjPlJWiP1QIZooqFyOeX51R1RuOThCip5CJdlES8vuaEBw+jiQR0KbE+8h235FEJljZOJNCIkmJLiU2RqJzQKQoKlycsTaiY0LEjGJWPjEFTwwRaQWpjCQRT9713K8SkidFiXfghaKtG2K0ICVSlHgbCPSYIpHSQPIaJSqEdaTgiSkQEgjrICggkGQiupyv0boiyZA3ZUrjxpg3pUkQtMsUNhdQdURpSORiXm9nYnCZuucERg8EVZKERKpEUTZUZYGzkegDhZaURYLgQAqMEhQ6IVKkkpqmqWiWS9x45GK9QZHouge23TtWz1YIGzEmURjLYvmM/r1m6B12SLnFW8Ld7T2mqVhdLNi9eSD4SFVW+OCZwkitDSK1lFIhVQIcpizYHTKNTusIGGxITE7k9Stm6sKQFpIkPLOfGLqJ88U5bg54O+F8zkj1/UBMMm9kQgBFLoeOeTNpZ48WguAFQmVLjPWW42GiKDanDXHA+4n98Yb9oaYuNZUu0akgOo+bI4qcL7N0FKZCi5JpnHFuQhnJsRvoh57oJxKwHT3iVII9BMt8zA8nIXPRrS4EkZDLRrWhViVus8RUOYQeoiNGS1WCENnCY2dYrRoQeTPjg6DQkkJX9P0BpfLX1dEQgz7lKDQ+Wnb7h9OGTJJSHlym6Yh3ihgsKXoOx8NTia4PIXfaKI1zGT/uoyN6QYjgnMeb/KyY7YApKnwUpKRRQiHI91uKvHnLQIFIoRu0zN59ACE9ITmik9jZIxmZnUPHXNxalgUIh/MDWhiUlpSVzn5zP5JGBxaG8XDqsHIgAtaNgMQ5mcmBWj1BJITKOIB4snQEF5EyDzFJCmbn6IfhSbGYZ4sPEa0LkgDrMwRDqmw1CyGh1akwOZ5yKydlLGPUHwEDGWQRgicGT/AeRO6EU048wR2ESNmndXonPCpKT91Up8/F73Vj5WyNzRnTxFM+7rsS5/REKXwESoj8m8hTjuu7OpJTjoxH6AXMNrDfdYylxTz93+3TBltKhVYWa3MpNuTsjlIK513OgsWU7V2nfwdk1coYw2PhuTGa0hSUl+fM04yd8wm4tQ5HhjNYEuEEMRntfMo7aYzRqBNkhZTpeRAoS8Nq3YLMA6SdPcfjlItmJ8c4zjhncWQwk476pLgpFqsa512GmwSXq0NC5CnGhiTNj7h/gY6Cqspl7SlGkCC1JEmQRELIOH9iwofcp6dGS1WaXDrau4z2ZmTpPEJqBPIkVH1XF3NaHXDq+XosVRaIp38L5GwVSVCLdXafnF6uQpygKTJnkp8OBmLK3WtS54HZZGgIp2iCFILgQ1aH5WMvGUBAioS3M856Xjy7xKU7dtuRFBNyFXGuI8mBSM4DIsDNFudmgpSoKEkxkzuHcYQU8iGK9E/rEpEo6sg09viQiZs+5GyuUQVuzp2WUg0cD5GvPvfE5AnRUdWBpmqYxog2huVyk8vZFxVGl4SQ6HtLiHu8dzirECJhjGR/3JNSLk+PBOpFhXMjkZh76bwnBofQxam4dcIlmMYpOwyiJs25JoIkc6WDn9gfdszWoUxeGz7mInpigQsqd5WmiVIbxtlhiWAiIiiUmAi+J4kGKSraRU24G5FijdFlJnUGRW0avAv4mKjKinZR0XclwgVESNg5ULYKNSj6eeC3X/w9ZSVZLi11aWjbJZvNgsvzipBK9seOw76nLc+5unrGseuY5wPCjNzfH4lJolV2EQiVsD4yjEeIHhETzWLFYqXwoyWFTJd244Qbc8axP1jmEUod6fs91k70x8SLlwXcgtZQlIr1quawf6Apa4zUiBjxdkYkRXKnEnuVCM6ADITgCQm0aambCudn/BxIKTANA5ozzlbPkdpTKUdTOO7f9yyfrVm2S6bBE8IMeLSRgCEEzzD0oCDh8X5CyorVpkEIRYyK1eIlswu42aJFQimoq5p+6xAqImXIBOiTY0Pr/DPVdR11Cgx9x3oJZm2IyebsUxHxcaDvJy6fn3HcH0jk2qKu76mqvIe3czih5DPePbtiRH4XUlPqkrJQFAWkkFXpeUqktmC5alguVuz3d0DKirBRNHVDiBZ8wvv5SUT///r1T2K4egzvloVGVYZ5Gk7FrDY/R6Xi7vYN09jQLq6I0XAc9lw+P2ezOeNw6LnfbtFqZrNZM3RTDjwnSfDTyTqRCF6iheR48JgiP4iFTJjCIBJ5MzNHjr0nniXOzi+oG0OKGd389bfXnF+s2aw1ZlmyWhjubhIPt1u07jGm5eWHNYetYzgEvEuUZeCwjyQ5YVxCSsP6fElKFVIFVAqnALahanIbunWCY7dDCElVtQzdxG63ZXPV4rGMwSERNLUmKE83zRAlSiiqYsHsPf1xwqSAlgXVoqD0ESOXOQQfPdNxRAqPELm5W4hcg+hdxMuCoAzeJW6uO9pVi9QlMe2wcUKJPMglFMGfE/Y9Is0AzINA24GqaAkmh7YbJbHSgTKgIsYIEDCONpc9k5jlmMs/p0BQniQVShqcnfFuzpsGREZiG8c052u5KAy6LDBGY6eR4DxVbbBeIqKHJDAiUmmJmxxGKrTIhalVSiy04jDs2R/f48OedfMKLy1SdVjv8PEV3e6IOMEt1puCLiiODx3ntaZZlaj7FTFAWTVMwcE083y1RoQlnohnwLHFMTCLASlj3syUhn5wmHpDdB47zRgrSSR89BmhPeUybT8NjLbHu4Apz+i6A2XVkhJZuRE5mD5PE84FokgZAZ6y91gqTfBbDv2BRVtjqpKEYrQ9tw/fEPzE2WrFatlkW2P0RBvxSIqy4u5hz9XmJTLlsj0pI0kEunnP9viAnUfgDCcEWoD3jq4bcPaYDylEptLF0GJMRSKDIhZVQfXigjmMdN1M3we6wwOb9RIfZ+wsCaGmWTQM445hGEkB2nqFqgSH7gFtJA0VXgqEqFlvVggZ6Lo9+/37PES6lElRKbHb3xFDJm4WxvD+fkvbrmmahJwlzs8IoZBCEbzDnghZVVXhvGeaZ5S3TLanbBp0rCGCVgkRVQ7/qvyisD6iVUFTtaTksVMGSZRVQQxg57yBloinkL4UgqptSMmy291SFS3LdsOqWbLfHfBhxEeIBHwYcMEyzYGYLCkK1uvzHAavGtpmSdvWuVgyemIIWU2bpxOVsM5E1RSw3uJOhbjSC6z3WWE5ZYxCjMhT31ZWZgIpKpCCFD2cho1Ma/NPmaiUImWlsPOMsxl7bYqMek8pIAGlM0ggpvznY+SJ5hdPNDgePz/lp/IsJvDeUZgy91XFrKx4755mrUca3KNa9dRXdcKjPyljp+/12CuWhzBJ1w8Uzn83XJ16eVJKxDDjnSWXama4SEyRwhS5KsTn66J0tmTN80xhDFVVZUBNkhQmUhrNoq159eoVEsHQD/R9T9cN+BC5vblhnD0+JKZxwHkPQefMh4J5mLAiPdkCnbWsVktWZxvatqEo8tA+jTMg6bueh/sH7nf3DN3ENAes8wyjBSUpKkXy2TqrC01TZ8Jkxt/nUmPr7GnY90z9DFLh/XyCT5xKkZWhMsXTOsjDsWCyc6b8zh7veuIhq6NFPPKhvECoAqLiEbuevqPVn+5LBrykR+DM7xVLP34cTwOt9/5kEf0OWKHViRIpvlNE5zkfTBidQ/I6VGht8hBrcmF9WZwUt9IAga5L+NnSHQ90/ZE/+MMfcPcwsIsD3nkEE8euo2gSRldUdUFIS5QSpBAJ5EqM4ACl6YYj8yhYrdaZYEY+hEjJZrBIshTa4FMmNXqXIURSlAilQW4xpuL97T3zPBGSpygbfvDJx7SLmqIo0apkHiPPv7fE2sRhN7LfTyQmUgTvICXBer3mfvc+UxRRCKHwrqbv5nxoSmL2Mw8Pd4RYEImE0eb+ThfRxiMVpKBJSVDokhQn5qnn2O1RqqQxCu8yREtqTVXVFFES4kxIkqZp2L3fEYNEaIESJeuNRuv881aWNc2qRteJFFLOli3XiACt2TB5jxKBRV1hqiIDPJLFIHA2UW40ZV8xHie2hxv+/b+f+PijDYumoK4euHvYc3ZZs7YFh0Ngt91Tv7iiLBuWywXW9oS05be/fcvlVV6XUmiuLq/oXM/77TUinIa7ZUNIEynZvAcoNH4eeLi2dJNjstCuNlRV4LDf4Z3CiCuqSlLXBtCUlaYwmuE40Z6tKHW2DM/WYUzB0Pf5GVZIlssVnOzsSScWqws2Zw27hwPB9lg/s9v24FZcbC4zdCQ+sGgSRxNZrdZsNmccxcB2/5aYpidC7GrdME8+g4rIB7p19YpnH55xf3dg6C2Xq+e47QMh9EzzlmGKXK5eEnzuDFVeUFYJoSeYc95WoOiOe+pmwTRkp81cZtXz4nJDZOR+u+Xh/cwf/MGP2D68YxgD/TByOOz56ivParVgniLOJsZp5nBwSFpMkd/JVbHCu4BWsFhmZW65rvLeVS9YLc+5unJ8/eXMos007ZBminpBd3wg+Dnn20P5j041/zSGqyBRVmZilIqsFg2H/XsqvUBrBSkXRA5DR9uuqU6b6Wneo5t1tu4EQZnWaAGL9ZqYIsNxyzCMmFJmawvkQkgshTwHyBOokCgtkbEiTIH5MHKXJlIdUdIQXGIeBzYfXBGC5nrX8+b2nsu1ZrNoKPWKlDJN8OFG0DYlmxdrCIo311u6aWZ46ICB5GGe7vj4hx+htSJE6B4EMUqWZ44QZsbZIURNYRJXZzXhrGS3MXiZXxqtirjg2NkejEKLAhMLVJAkeUBPkmmwpEJSrTW4HJCXVcL5SN9NJBKlkRAGvE3UvGIeZrTUzFj6/oHj7i1//JMfMs+BbnvgOBxpFw22UxgpIMDxwf/or/QAACAASURBVLFcCYbDRHewuNlQXZ1xfJjQRYkyklb3tHrNwQ5EJFEU3B06gpwyJTAmXBxZn68IYcbNDmdHijLQ1hWD9xRFSVNVjHdHtGrxww6Ex5c1QU4cDyN+CJAUUjUIbTketyipaNqGtqq577aIckk/Tbz5u2tMbbHvvySJhCgSl02DVztELUkODg+Bn//1z1itG/7ZDz9k06xwc8F/9af/ir/4i59xfXuLDzPLMvLJ977P519c01KyOXvBD374MaSCN5+/Zb+fUaZlTm85vyxo6jWVaRAx2zF++MMfM/U933z1W7bjPfPNimrVUpsCuTaUKA6FJbkISTHPYIoKoiTFkH9uigZ0YJg9bvI0peEQLFebZ+DAdiOFKKCpOdj8cpFSYVpFSD1v7g98+yCoSkOcjiybc6TxVI3l8vIDttcX7B86UvK5Sb0ueHN9y346YkVANjXvPr/l41fPqIoCFwP93NGKJW7ak0pJXa2RfsMHLzX9sCfYgJsq+mTZ9gPBChRrFqZl0bbs+zuCmPBi4t3NHVrUhABSlvhYshsSsVgQlMJSUJcKEyoi0B87rt9d8/rVR0ghcWFi9j1JBnbbiWmaWC3WtM2KONYkLWjMmqbYZGhG8CzrEqNKZm9BTJgCptHRdQEpSj58/SnLZX06hR2BhJ0sIkhSsqA8i7oloRlth7V549m2FYVYMLkOLQKrqqTv8qY1Fh7rPcMwQ3oswM00o1IPSAXDcAQRqeuKabznfvce5yN1ueSjD3/It99+jTGGq2fPctg9JEylGLueJBLNquH2ixuKKqu/1gW63mZr4jxndSUlLi4uaJcNzk547yiLluAhyYzIlkJmCzMZWpBSJBAoq7wTDoGMQdcBXAIf0EIhjaEqKwLhSeWRUuN9ZPYuw0dEVheiDxnEEXImSwhBlAqpFUkIkILCFAQfiaf6BOsd+FOmyZQntHsuhg0hw0ik1E8ZnEfIRTihzB+VrMesVruonxSwGPMgJGQuevbRowp1sjOCQqFFpswJrSiUyqTCExnQnNDxZZH7nOq6pip1zgGVBmtnPvneB2gtqeuS9XpJs6h4eHhg+3Cg60a8C+y2Pe9vtxwOPcduYDfM9KMlxuzGiFEzu4nr29/kAdkoLi42SClo25a2bXj26jnPPnzO8dgzTzPDOHLseqTQTHNWJoUwGK2zXV1liERW/8hAJaPQISuqKSUWy82ThfPxepnaPPV8dV2Xu77KApVgtg5MQocMJIpkQqiSNYpcxhzDachICSl0HoiiI4YZfMxrj9y96JzHGENRlpgib34WTYMN4mmIEjLiw5Sx50XBcrl6Gt6yuJZzW1IFvJ+xU0TMkrpsuLu5oypLFm0LKpKQvL95z7t3b7m/f2CeHZvzNWW9RJqBu901VVOzfdhRVytKs2S371gsFpT1CiUFSiiknrg/3CMoKcsFxlSEbMQlJYEL0PUWKaFsBatiQVMvweWfv3bZMNuJ3/zmQF01/NEffcrxeOT+/h4lZXZUjB47ZUNHd0j8/a+/IsSRmAJKVqj0ItetGJVJdhcNh35PVZ0hpcD6ju1Dxr4LDULlGoJ3N1uapqUoMqlWC0W9WlKanOea3IidHZevX7A7WObBYolsVg1FtWCaeqZhi5sim5WirrIiFaNhc7aifyk57numaPno+2f0z0fGG03jKhZ6RVO+Yg6/oFUOIyTOVbQrQ7tWGUoWFJsXG77dfcve9zw7a/nwbMW3v3lAiJb1csW6bSnLgq9+9yVffzVRlJKiLqhWPS+HS+J8RDpYFgvevdtx9eqOs81zjCj5za9/yV24JckCkwrW7TmvXv6Yu/0vuX//ObsHh0oX/PnzjzHDFcY1rArP2dmCq+eXHB8mNoWibpd8+pMfgkykMBGDJAaJU2/48tffIE1i2E/87X/4imkMLKsCKye869DlzLu7txAV0xBxU+Kf/7Tm9j7x8vUVTV1wf3egWpVcqppFJZmGhkkbFpeGh9t3CBnRC/jV519zvjmjG2eC2+P6AyRNP01YOyGS5Pnla37w2Q94c/Nbut5jqktevHiJdQPD/Uy3H2iU4Z//+Z/z2y/+T968+Zq3375jezPy7OU5PvhMf54Th/6GZBcM/RGZKkqx4sXlpyzqPSEYwliz2MD2sEfKSAweZSJ/97e3fPLJj6jaPfp4z25n2e137PYd52cXPH/5PJNi04KUdgy9pd8X/Ok/+wGSPSHVeOHYPtzQsuGDj66Y+om//utfUy9a/vW//s/57Zf/gTfXX7I/PNCsC/bjLf3RQdJcXm7+0bFGpP+YtvX/w6/Vpk7/2X/xMYXJaMa3N9/w0adLgmtws8ZPkaF7T7s2lGWN0ibLxiKwv892j6IUdEfL0Gs2a0NRCFzw7KcDwpQc+pFx8jT1ikVZEmxHCI6YoDArmtrjh4gbIVqF0CXVSpGY8DGS1BnB7RHBEl0geXh2eUnblATvmcaMMv/oR1d4nzKYo9DMs+XN2zdAgZCKJBLDNHK+fpazRToi9cTDwwOr1RUxKrzLG5Vx7DB6QbNoadc1o5X4QRBcjwsdlp4pOC5WZ6QJ7OBwwrNerFDANHbsDw/oJDNtTEEQAW8z2t3oNSFNhNBRnPoRmqZC6wKBJqYZUySqqiHGxN39LS9fXpAILBYVWgn+9m9+x2d//COOY8/+MNLtI8uLDwjjHqmz7cKNCW3WJyViwlvHom4YhgEnE2iNLBS2H7gwDS5EJufwfqasBMHlLpWiKDm/eMFx2tOPAzZaogh4V7Aulvg534fOjZxdVWiZe1tilCQvWa2afPriE8EbAnuU3BCSI2TTDZaetjIYrXJXUb2BYaTVGe2uyppn56/xOuTTNFHS1kuaOnH7/tu8MY2a/e7Ay9dnHLoJHwVVU/P2+hsuL87puyN2spyvryiqNS8/bLDzzO3Nlt2wxR48SUpczHagSmqCCZS6QqWC3TZhDOy3Awko6wKlK27uvqAyLYWqSDEjggvdoAuRSxilI7jTRjI4SJ7l4py6LVCiJDrP1D9wdXmBD4rRzljvqYuGtiwJ2uKTw/tIWVW4oJjsiPUzKQaiq2kqiRK5s+xh6tA+096qumVzcYVJFYf9DikiTV2wWS/4xa//hu1xS1lo1s2SZbXh4fjAPE8YU7NcnGPdgaZeoMh5x92u4+rZMzZnZyQBs7Xs9z21SdRVDUScm2jbBf/z//h/8b3/6ZzX/8uSeR5YLZ5ndSJAcBGtoB+PFEVB07TU9ZIvP3/Lom1oli3IyPv7dwzDgfOLC0Dlk1ldoLVksgMheAQSrRpCsCiVTmuoZJpd7s9LeSNYFSWH/R6JJrhI34103cTZZkO7aikrQ0iWh/s96/WScegZ+o5PPvmYYdziTyW8kpLnzz7kYfcO63OeqCwbSIYYoGlXLBZrjseOopSn6ohMbhsHx9nZOTFFZjuelHLPMB5JsaAqzvjex9/Hu0hhMihhGHuqujr1TCkEOXci5OlQQxqU1CACMSZSPHU7KYjO5bxSSCfrmWCaB8qiPNm7EkVR4kmEkLM6Ij4laQgpEmImw5ZFgYv2CUiSUjrhy7/rrwohUBQaIbK17DHPVRRFto1KcerWy7ayR9vh4+e/j2qXUj79/YxPz5baRyw88NTB9d2wKJmm8aS85S7Guq6/+x4JSmPyEFXW+fsIgSCdBlmDEPlaLBYt5+dnLJftidwmuby8RJ3gIDFGQgq8efsN99sH3r2756svbvDeZ2X0iUaYr81mc06KiXme8GGmWZ7oiCSElBm1DCcEecz2yHkkhPCUhUspPXVYPVL2HumL6XQ9gSeEfogh20F5vF9ZybPeYaeZQmnO5IcseUWMBUo1xOgzSdE5kDLbDVMEEXOezs4cd7snDDsCrJ05OztjuVzRNA3TNPJG/RUBd1KnZqTUVFWBczPTNDFPWbF5VPeUkphCnay5eQ20bYsiMfR9Xp8pr49u3HN3/8Bs86Ft11kafZ7VwlKSxIwwgtEOeJetcVUT6fs5D9hVQ1U3zPOR5FW2ykqoytw39b//D3/H5mcNH/+bcyIji8UCI1tIGpLg+dWC88sL7u/vef/+nt32ACnXHZRlgTYK5yzPnj1nnifs7HFzYJoPWN/noVRKlDDYwXB2foYLI9b1bM4uubm7oW6y8imkINqcRbZxICSLMYbbm7tcg3A6cKnLCjs7losFzlseHu7YrF5QFJrjkK9VXW2oWp2vyWmAHIZAInJ+3tA2BiUju/6GYjNS1SXRFdx86TmoiWW5oi0WtGVDU9T86svfInRARokcDasLzTjORKUQJtuj++49wygoZMGqbFjVK0LlCFO2fPsiQhU4bt/x8Dbgx5of/vg5m6uS/W6PTpJNu+bFxQ8RxUBCcexGbq5vaIpzvv/Jh3T9Hu8tf/wHf8SX3/yc6Azf//hTfvqTP+NysyR5x/mqpSwF3o2IpqUuPkBKA8KT0j1f/nrH/ftb9rv3PNy/5Ysv/oq7IDi7fM1is0KYid/83ed88PI1bVsCjvfbO4IakLHKfWyi4OOXz/j1m7+lZkGpKqIJvPpPf8ju53fYYSQJj1QFf/AnP+WLL37J3fUN89HzwQcfMaUOJQ3WR7bdgbP1mpvrG6o6sVoW+Kng1bMf8+L5hmGwfPPNPWUbiGGirmtKU6NEwU//k0/4t3/xM+zsKMuaeZ5o6oLjsOP27oZ319e8fvWKu+sDlVmd3r+CeTQ0S8ViKambgql7RpDvGXrHenXGRx9/wLdvfkmhrzClIaSYVTh5pKmqnIeeHFIofvLTz/jid2+YJs9ms0brRL1uebh7x/3dLf04UYqGq6sPAEXfd9ze/pbavOaTH16hCs/N7TVSKrYPe/p+OD2Dr/jv/7t/81cppT/7f5tr/kkoV0IIjM6hUjt7/Ay4hDzBCZxN+DQy9Ia6NiyWRb6Y9wk/50yUdw4wWDcxTYEUC4Iv0VTMQ0D5glqVFDoS6BFaoU9llcTAOESIgaggqIifZqrFEh8Ek4skc09V1iya86cTT4Tg+rhDkVBGUJ5XPOyPSG0YwwSDZ+xnojwiUgnUCGpW6xYhNcPY4exACA4hPdaOxKhxNudPjMkny4OdmQ490zjSyDXB5tPXuqg4W66wU7brGJ3Y70YWtWEeHdMwg9NcPdc0TUJohUswDDNF49lvJw67yDgbhJmIPj+whQwI4ZHxdPI0eXwICKnp+om2LYkxEUTk7HzFzbc7koqIKGkKg4yRYRyIuOy9jwVCuuzvn2ZS8DTrNfiCo81UM5EMbVXnk0gSUku0NJhKAKfMhfCMc4/zGf0cfMTFgCkEXb8jxUQUCVmA8xGUzBs9HDEqZjsyjj0gqJsldXnGPLucBaIkBUGQitxILqlEgU6K695izlYUdQ0a7ro9zVmDFxMizdRqwW++vMNHi9K5b2yUE1/dfUXdbDDFitkbimKJsw4tNCjBftuhC8fs8wbQzQklC4RMDPN8wgUXWJcIThJltn5qLZjGLlOnU8xWIzSlrjNsQRWn7yMIyeKcwIWE9ROLpsjljkISoyJ46A4zUlqkSChTsO92pJhV1YQgphkvWo6HB3ywSGWISRNCyiflLh8YRAJBGxpVodDEpBjdQF0uUEXDNAcGt2WyFpU8Kc5IldfV+foyb9KEYXQCaSoKqTDSIEkoXedsnW4wynLsDhyHO1QpqOrcz3G2OSPY4+mUPJf+vru5yRvlFAgi5uyIcxSVAqlISSFVQBeJohIozQmasKBpKqRIzNYSLTTlAmctRVFQNyXewewsPjzCcIpsJcxOJnzIA15xyuI5n0l1wbk8iGlNWSgW7YJPfnDGNI0g84GEEBWLRQ9BUJqGclOQyIS9TC/TtM0C5wICTVnU+b4qlf3mMeDDjA8T3k8M4wAy4ILLRC1Z0Q8QY8K6mRQtSXiOxx1VuaHZ1Ox2W+qqxZ+UCqUl4zSclCCydS8GpAgUxpC7mvJmKQR3+tkSuatGnCDaKWZsu5AUxlCc7IEx5a+VQkSJ/Fj2wZ1IcDk3powhhkBmHmaOttTq5H0X/2BDb0xWqzJUQ2bARQqnYShnoPL6jU/vocdMlz/ZIeNp055OVshH2Ea2n+Vr8PvZr+9skHnwKMvcheUf0eundwbk7JsLgaoq0YWhKspTBithnYMYTl8TdvuO93fbbHUX6gTDUGzONhn0ANRtnfMuPrJsVnz22Yqqqthud8xzPuCYJsswDIDEhwCiRHmBJCsbCTAnhe0xH/bYpfbYy/U4oBhjqMqSruuehsqqqvK9OF2nR2BEvjv5pubhVuN9wHmXLaKmQBBzXhEDJ4rf720QHu/Q035BScHos4r+eN/Tk/Xv9/vH8pDoU6Y0aqUQQp8GLE1R1MRFzltK9Yi6j3hvEbHIHVg+st/uCW4kRkdZaoxRxDQh/x/m3mRHtiy90vt2d3oz897vjS4jIxuyyEpSREkgUBwIUs010QvlTIN6Az2DCjXRQIAm0kCAVFKRlcUisyGZGZEZcTtvzM3stLvVYB/3iCoJnEiDdCAAv01cNzc3O2ev/1/rW2Kh6jQ4jQ8LW6OppeZs12GMYRiPHPsjqjD4mIWibDSbNvcdhmCZxgDCYYr1XJESwScGl8vBlTYYUzEtuaA2yoRzE3aZqYqCEA5My4KQkm5XZ2tscCQJSENRSA7Hw4tNs2hbdOG4exggSRQlZbFFFxqpEkYJktSMY8/5Rccye4J3tG1L2Za583JUBCcRIZGCgKBQEvRaEK3lhqLQKC1ou5JpymQ2IyWyKDEyMPcJZdbNpKj40Rff42m8QyRJZVpur7YUx5mlnNClw6/ZztuzlsP+yOI1bXlOEpG2q+nHkXm0iHnmovyIwgX2w4zF8erVNY3RbIqOtB57YxGoyhLrLVM/cf80c/XpjourVxSqx40OXYyENKPKiFEa00i6qmJ73vH+8I7RHujnwA8/e41UPUnMCKXRRjKfND/5p3/Kj3/0fV6/7pAEDvd7YjSUuqHSmjdv3/Jm+TUPT0/54H4cGEfPZPO1exoP/Oznf8tvnzxffK74wQ9LPv1CELlnXHZ0O0PdVlTjjqbbcjiODONC8DOju2DX3eJ7h11C7mnsZ/ZDT3KeqlIYqfjd17/m/vTEwc4467nf75mZCXIdyFiNV44yBexp4H6Ebfcp/fDAL3/1AetybcgSFU0D0zwQU+Bid8Xf/f2XDMNA8Pn68dHHr/jwbk90FVVxzsV5YuxnYtAUnaJuBFIlFneiSTV2VvgFUvB0F5p58MSokKLhfPcxh8MBFyxSG7bbDcfTSLttWGaHDxEpNW/fHqjqTe6OLeF0eiLNHYtXJKHQBTx9eKRrPubm9pxPPr7mk1uNSrcIPTNMM3FOPJzukOKMzaZDF4kol39U1/xeiCslJSIKpIgok6cPdkxoEyF63LyWzi4GUp6E2dljJ0F0Gh88wSe6bY02M8gcHE0p0tYFzI7kEz6BTJ4YPcRiveBDCgFvQcjsDUhKoMp8sfUusLiAMgFdXqCqDUJKYnQkLEnKfGhTCak9p7Gn1TXO5cB4zshUpKizxSEFSmlw3q6e/InoEt2mxrtsBAgpsVhP07WEGFjcwjxNiGhzAG8FGbAkhC4YDiNGZaqgFBJJQsRIU2qqbcXNtaetc7+VDZGhSly8gt+4hekI8xoAJ4ncwK5ifvFEQ4pihQh4UIrZB7QPiDlXymhd8ObrgbJRKK0RCMy6oUrkCWVVaWK0BGvXCX8iihV2sVpHhJDIwhBlQCqJSrlgURc6C6UQiORgsnWWEDNOOYWIVok5TOsBTKKJLC4iyLaQEPPX9DHmomIELgRUkvi0IJJEyWKdfitC7HE+olNEq0iZClQowamMpI0zXkSUzoj0aRp4vD9gKocuI8InkIrjMCJ1SwqOZY5ED6Mf0evUf5oGxOKYlpQD4iiEChko4rL40TqX9CmpCE7hU6Y7LTZvSoTIG4kYfA7SkqfqITikykI1+HzAmyZLpSJlJVckuMSngAwCF2ekgqouGeYZSbYzCalwSZDcwjxZYvQoo3KwU2hSzMQ2az1RJkxS+XC1evSTEEitCTGyDD1S5IN/CtmvPduCqmjWDEWeYFqf0GVNQYmMYu3xFWhdonSJKkAWApcWZjdlApnUGK2yv19IQgo47zlNfQ5RR8/ibC5KjlnoaClQRhFXa1FTt1RVh3eSui4wWmNKTVWVKKG5f3iPt27t0JCrmFcQ1p9BSiumOsMQQlr7taSAmEWbc1nMiyRJIiKUxBjJ7atr3r1/m/3xIuWsZFngloTRBcY0LNOSNwdKYwqN1ob+2IOAQhcoLfEpYQoIi8O6kX4IzG7BLkveXEaHD56yLEHkUP2yLCgtWZYJay1KLVg3ZSKhhtn2SKkoywrv86E/hAzqSDFRKrHaAwUx5G1LjJ6U/LodciglX8L8aqURVmVBVRV5A7NukeYlY9BTiMzEF5tbhtvlrqNny2DKauVF8DznpMQaXH4GFeTuI0NcxUoWed9CMfK/wgvs4nn79Pw56589f/5dUMZLzoe4Cp38WJxzGGNeIBPP269na2Pe8Hz7mglJYUR+7xSlIfp8nReFwfWeYRwJfi2qdYHFLrQfHl8EXds1uchXCkyhqZuawhR0XUfT5MfrrOfUn1ZQyZozmvPrwa6vSykkwfuVshZJMYMLBCBXcTVNE6wWx2+v3+Il7/QcHszPz3+8xXomSKb0vBWLKFmQ4oJlZBE9BRfAt1kqIb8VzkJkYS3W7eR/9PFiwsnCLiTPKPYvWH4AuVIHQ8x9WIXRhDWb9VzALiSMQy6+LowmJccyzAQ/4+yEUjXaFFg7EvEk6YnCE5LPYJJCZYuclIgk0CoPKSECASGKdcObXiiXai2jzTJU5p/9s1AMIW+IUxY06Py9u2B5ehrwIeLjshJLFUJDtCFnYVJEJvDLjNElehXxLmiUKAkhEqJkDtCWDVIsCKkwosAuAaMNUa9DmMVijGKeI/NocS6hakVbd0yjQyootML7QLlui2TS1HXJcfHMc0SJXN4rErgZ6lpiU8C7BVNIyqgpVceuO2Pb7ZjjB57CES01RVlxe1uwORO4caYwIlvBq0C3M0ixwUlHKi1FU1DQ0i/53iSIGda0dkza4DkOR851u+bgSg7TTC0bzjcFZZCM8khVrVColO3F0sA0zPzohz9iPz7gowUpuLw8Y7bfkPAkIbDWcnvzEd///qfcvj7DlJ7kPD6cePtu5u69QmL58qtveH94w2kYGfqF/WOPqTRRKXShQXlsIbH+yGn4wPHYMc87rq6uqEqNVBEhEoUu6JoN03SHVQ6i5XD4QFNfQPs8yByx79/g5hMhaYTPTpP9/o7D8ciwLCQRGf3Aw+HAnCakVGzLc+ahyOcqG1hCom0kUs/c3T2x2ICpS5AVrWqyk2uZGeaRd+/vGed1cM+ENteAQghDXXXoQvH2my8py47NZstmU7PYma4rONvmYfS8JG5vt/TL/Zr/g2WZcbZYt/YRJRR1bbAPQNLINfdcl2eEGKjriqLUIAPDh4F+vMt0wpAF9kM45O1zCJSmpbp8TSFuGOcDoLm5KjCVgXSJLgTIhdE+/qO65vdDXCmFnzybLnBxZnh8UzD3mvYMwOFnx8effJ83j3tiiJwOE/d3A8lvcBZC1BhRcnZ2hkt9DnamhFQ9u7OWcjE8PIzYfiYFCclgbSLGBXDoVBOCwoUEMtu/zi47JAVumlmiZVt0qKJmmDzzNLJMR27OS16fnTMtE6M7MS5PJDOgmhk/laRYstl0qGKbX5TLgFsG5kEw+QFtAnVrMLKg7S54fDyC9ggT8INHlx7hLeNsGQ6O6/Mzgj+gtcSHxNN+pn/oeRrH3HS/23B9dcGmNCjj2W0UV9eGmD6QLCxhQSZLaRbOdxVfJp+D6CRSMgjpmSbwUdHovP0ShWRYJnzyVG0JUrLvRwql0FKy9DP7k6dJCqEcIU58vNVYbyl1R1ttub6ueffhkegdQgqklhxcj4sBnwIiCVTKIftuU2OERiyeYeiRMtt9QkwkEbPNoXfElCfmRgYkDmOgqhXSCMIyMJwiRhhiSFgfKIpAlAXNtiV4yTx6JnePwECUGOWptwbhFU/jzJA8vXHcyorP22ti77DHgdEs2KrgdP/E1eWGeldwPDwg3IwuAn6wWB+oyjNM6FgOninsmSeHkjkrUFV6xXU7kisgSlwITG5msROmbDG6RssCkSQhjHTtJnfqzAvRzcSYb8NKGZTOG6oQFvrBQ5IEFzFlpCyrbLOcHPMQ6MVMcdViSkMUntk5mqpjmBactyiXCFYRFQgdQHgWG0njPTqBlFlwT8OMLit8tJmKlvKoolJFJhiljOkmlvjksNPANE1cnZ8TkkeSrWSJiqbSHJ6e8qFWK3xcKHRJqUzeXE8Wa2dIEhscc1gIJmGKgihgWTJpkjCgSoPzWXzOziG1BAGL9ZzGmWg9F5uWaVhoGigLwzQH5kmy275iu90yDAN2GUg+cb674OLyAik1/+pf/w90XUH0CWctShuasiKlXILbDyPB5Y6xdVqBNipf/LVCujwUKcqK0+lEFA4bYFwEV/aGaTnkgYQuMKrAixKlAhJJdIanp56qTWx3W1LQPD4MTMsTbdOtSHTFPE1sdy0hWobxyP5pIglF2+wQ6Axy6SrOzrYopVnmE4vtqXXL4XgEKZmXifd3X/Pq9S2z9ez3J5Qw3N5++kJlC+uBPAbQRUWIKyFRJOZ5yId8o1eYhEXriph8zkHpkrI0bLo692wZjV6phcMwvWxNhlO+Pc0rOc8Fn21hMlt2Y8wborCKnWdB9Lw50loTglsJfZEkWC17GVYi1DMp4XnbZV62MnnT9a3F8Plw/5zJesap562YftnwPFsQ3Ur6e6HZCcE8z5xOJ+q6pq5rpJSchoFlgXkxlGX5gomXZAuiVJKqqdiJHc6tA6UkcM5yPPYEFxEpU+akEOtjjQiZe3263SYfLIqCqjZI1fFcZOx9YOgtmI9GzQAAIABJREFUT8cDRmkSOaeWe7oiWimSlIzjSAiBsixx1q42urzx0zp3IhVFgRAC5+PLJs/7vGF9fp6U0us2ya3fS1iHQ/nw3usHooLbeJM3aevQT0qZiahyhZiQSCEQ3DM0Jf89sZYES5E3rZ6ZO/ELkBEt9begixgzPVBGQhKM45wto3olyTYVKZXI6CnrXIsSyDmwh/ueEBaWJfB0OKKMpp9OzHYmhYhPgkbBOMykkK9N5+cXPB4/kPBIkxBCM8+OuikpTEmUAuSEtQFJhZIKrQK73QalFNM0czyeaLsKkJBihvjoisPpCVMr7DLhvEUbs24GM9EwhkyLK0uThXxMGZu9BKqiZY6WafSMpwPf/2xDTOvZSWRk/dSPtG2Hc57Hxz3ezfmeOoOkoGxbLq7PeH/3nkRCacXj/h69M0Cbh6ZSUzeCx/uZQmvqKuf4NJq2lchl4Mme+OrNl1RFyfd+/IqL7SVu9ii1Q9mBKu3ounM++0nBPN/jbURJzaYLeA1bqbjtrrMDRVsGetr6jIjise9xy4FGNLnrTyYEkffv7hBx4PPv/ZCLiwu6TUlTXLEpEj4szNZTlxXHY0WyGqETqMTbN3f8i//6v+KrD19hlOHsvKDbVbj7BsFI8DP3dw/8xV/859y+rpBmYJg8Bo2PPX/z13/P26/vsXNkSEfefnjk5uZjrq6viPUTvgShC1RTUTUFX/zpH3Lx+u/x4YkhfMU3b3/En/3pf8nQv8EuM+NhBCJKntMWFrOV+Djwu9/+nM8++nNuby9oq54wfMmb3z5Qho6TP6MPJWe7SHIL42FgcjOm1sgmcvere47zEV0p0q1EL4amiAhRIqRhcZF2oyke8nsesRAFSHmONBpvHe/e3TFOHxDkDrvFeR4er1bQVkILTa063r/P8JTr69c0Tcfj457bqxtuLjvu7z6wPxz4yZ9+n//xf/o3NOUrTCGZ5nvevpnY7EQG0wmAhFtgOmWKqEiJTfWa84/6DMIioFXN6TRw9+a3oBPttuJ8c41pH/DhkbsPif64Y3t5TltC297y+uIT2k6ji5F3by2n04FhesKM6h/VNb8X4sqHBe9GClVzftbxZ392y91pwKWRolZsP9/wePeB7c2Ij4HFKpp2h1GaB3+g0omz84ara0nVXvObLwemKbI9a/j6w8RpPIISFGcVu80ZKXrsPDPPAuckm2bD7rLiYf/ENM8YJXh8PxFCjyolu7ZCWUkcH3HB4ucFGSKXmxveff0BZQzCFGhxRnPWgejxMeBSIEnJMjuWJfvNq6rELQuTX2gM7LozXt98wf3+DU7OjNNMiJGyNAzThHQRnTy7NlEZ2D8GkpiIyeNcPmjIImDjhPWaP/j+LWWcKXDEsOewP1LXESksUgmIkumo+d/+Z8vf/8ojpGGzbajNBVpLhLGYUmAKxfHpkbreUJYVRuRMxN3dHX6xGK2py5LaGL747FN8XBjnE/18RCmRD70xkUzB+Ucf85e/+ju2xQ4pFV7mqZqNuZwxOM/Yj3SXGxbhsdMEk6eUhvEwcjrM6PXGl5xHk3u2EJ6qlDzdH9i0DaUxlE1Jt7tk2Tj8vDCMC85DxGOXAqWhbCRla3j7BrQQpOSxWBY3EyvwYmJxkWgD2xZ+Nz6hjaLaaNrdlvHpgeLScMLxdP9Ef7Dc3HREseCXhcUHDscP6yE3W6GkVgzDTAyGSE27qdi+Lnnz9T0ajU8ClxJ1u0OoxDSdAEFdV+gWnJiRRmJQDEfJxfUV0zQyzzPDkA9D0xBQMqCNpCgMIShOo0XKgCmgKiq++rt3DP2WqpPoytFtt8wu3wC1KCi84ul4IlQguwJTlTRFzf2Xv6XpdhRVhRCC0+lALQPTMmGdRUaBieBmydIFVFtTKMmH+wNVu9IoU+T05CirihQDp37m3eEf+Cc//BGElnEaWdxA2ZS5HLfSdFVDtTnjadIstmfxOQemVUPTbok2sFjPZC14S7e7pKxyf9Ph+ISqxDplr2mqHaLw6MKw2+6oS4MSEhUEXXVDW13ibeBpf+T6+hItavpx4sP9B37ww8959fqKaRqY54XJjiQCt68/outaqqrGe8/Z2RkPDwfqqkYIePvua16/fsUwjDjnkcpwdfsRh/7fM7lE22w5P7vh57/8awptqKoNuqipy4oP3wTOdhsSiXmeUKXn9vUt3mfEu9QFOJinhIgGpUrssvD23T1F2WUiZTAsduDz7/0RRgqWeeTpaY8WFfMysqwCoCgVNoxIrdFaYHSCNFM3O5zTDKeRL7/8Obc3H6N0YLEzzjnqumGeV/IqFu8yIczZkDOLa2bphdYnBC4GlM8bjqEfCVXF2XZLWVbUdZvBAjESrrNoP5yOnPqBfhgYhoHvOPlWsRex1r4c8quqylvhNSP1IiZiyDSv1eaWxVd42f48b6yef/+5cPjZyga8CIWcG8qbIucc0zRRVc9UPoVzDmd9rsFYrXbP9sTnLY+UMtvqlMIFgfWSeT7lf1vJdQsdaZqGZVlYZkdcs0E531Si6gxCeX7MUmZr5rPjIh4GDk8nIA+m6rrMW9eUMzJVbTiT20zYsgunYaCqshiLMRF8pK5axqnP9n1jaNv2ZQv33dwXsNYC5O+1bZv1MbGKrexkmOcZpQSlLl4262Vd5Mxe0gS34txzuA1iQLzQJiI+ePwyE5x9Bmy+bLmeBfYhvONh+jX3jw8Mw8DVzRXdpnuxeKYUmOcs5KuqIkZYXAZP3d8HmrZi2A+cxiPOT2gDZ9sub+ldIoSJx/3AuMy05wVtvcl0QwfOOh7vfkfwns22g34CGXI2JEaOQ09dtwzTSFlUbLozQoyMY7/mnQXWTTweF3xw3Gxf8fr2NfvTO0KcGY8WrTIqvG3rtdz8OeO9MM8WVix6rgdIyMEiYsXZ9pzNqxuMrpntAV0qNkbTthDUgQ9vT8QoMEXJbleTkDzcDXTtjp/88Z/zvc/P6Q8R7yLL0vOw/5rfffme61fXRBb68YBRBWXZsiyOcVg4HqBtO7qt5+Jyx7bbIrzh5nXFb387opRidyl49+D5yQ/+gNJUjPOBh4cHrm4/Zuwj471lv3zg6fge5MLtq88QQfDwpieoFhcqko/UlaW+9Bwf72nac7paQKoIImCnhs8/+wFNa4hx4vXVJwzHA7/5u/dcXJ/xz//iT/jlz37B3Te5GL3tan7z1Qcqs+W/+JP/DN0EnuZ37D5WOB4539X88LNPScrw+pMtKR5RxjPPC4sd6Kd3fPnVG7wLjKNj/3Dgr/6vf4/AUpUV2801f/CTL7j/X+5xbsC5hrJJ/PJv3/GHP/6cVkbCeMQeZkTxY2q9YKTCnSJf/Wpk5g6pUi6HjhPxGDnfXHE4Ct68P1BvNwzDO5It2WxrNu3H/LMflPwff/uWX7yTvB8Vbz/cMT9qluORstBsTYd9Gvhoc4FeIod9z9fjN3z842um44yLAVFCYQR/87M9zo6gPDImjndvCC5n0LtuwyfXDT//1VfMY0lhCupKczodWMaBYXkAoamLCz765GO21ad88cXnFIXmeHzi9qMLhqc9SsP5Rctvf/crrq43hGXDq9tP+Wd//j3+6t/9G37994907SZfkyJ8/vklxIQQHVKcIYRjf7dfgVEl7S6w7a7Z/Khlf3pinEfe3r0jojm7OGc8Od5//RU/3FT823/3v6J1om0Mu23Lrn3NH/2TP2G3PWNZzhmGm39U16if/vSn/78IpP8vH//yX/53P/3hD18ze8tx6tnd1NTdgsAhZEIZhak0YEjJgFAIJYlyoGoc3VZQtzDPC955qk5QNomxn/jw7kBZqDwZlZpC1whGjA65myUYXn/UcDycmAZHnAErqXXNzdWOrm6pdEEhBclJjJa0bcn2osNKwZhmhnhkdEecdyzHknGU+QKccqD41W2NUTHjqccZ2Uz44OjalsJU9KeFr7584DjOSC1o2oLSlEgxYU+JZVxw7oSQWXj5KPBeAYqqLpGFBiTewuFBIr1GiFPejKAZ+4rhBO/vA2/fJN59rXn/PlP0TCGJySGMpSgaJrfHpyl7/kRCpmw1rHTJrt4ivEQLQ1U0lFVDWZt8X1MRoUBqRWEagoj45Dn0J3792zckodl2G9qmoSoKhuOJtmkh5J4a7xzn51sOpwM6gRKKaQqYWmJtRhkXZQEqWxttsDnbHEq0KTG6QEtN8ILj08TxscfOC0IkqlpRtzXD4BmHhWUZSWLETQbnHKWpKfWG4At4skQrSE4gQ0QTuPzkkuaiAy05PZ34cH/kbHeWNzhznio+7nsWl1hcYAmW2Q9Yb0k+okVFbS6YjjPRgUya6CXTaFls4HQaECS2u5aq7Zgni/eSEBXepwxrGTOCW+nIbrPDecHd3Z6hn0gJmkoy9gP5ZArWep4eRkyhicHjg0eVJaYqqTc1ulCkCEs/ZRiBUKSYGPqZzQ5UYRFKomTJrt1QmYKiWCfga9mq1AmfwtoNJIjTwnxYMFXL5vyCuqhRGJYw5wxDEsjYoE1BW7eYsmD0E1oYtt0FQhpcjCAT0zggEbiwMC49USmGqWdZemIIVPqMXXvBbtu+WM6UKWi7LWVZEoNjmnuUitz9tyPFX2qaXxScbbfUZcE//aM/xi3w9Vd3VKsQsn6hH49M88Q0BKalJ+BxceHD3TdUZW62355tuLm5QpuMA76+vkEpxfsPd5zvNvn5n7LQuLo85/C0XxG9eeNSlIZ3797QtWdU5RZrc+pvmRaqsqauWxS5fLdrd7Rtze685OKq5Hdfv8fakDvqVO4hatuGkDwuTlRNIog5R0lTADxShdxNEzJUwNoMJbm+uWVZHPNk2W1zfidHktLaxyXojzNaKEiRoX/i9SdXzMuJYTwwzz0xWq6urnFu5tTvOQ25HDkDVfImx1pP1TSr7Th3sA2nkVM/4KzH2WxZPR0HFuuxi80W6SgYpxkXIkIqtDYIaejaDlOWJCnW3FKVcx6rkIN82P/WGhgRUlOYYu0ski/2wfgda9nz330uLv5uL9YzEt77vD0yJnd9+ZUwaK1lGAa8z+Irlw0nYgxYZ1mWb/35z/bFsiypqoqqagCJXRzBZ4ACQpDWTiW3AjCkUiijUUagSoVPuVswpTVTiFs7zLK9LESPXWsshMyWzaZp0brI2+2Q70laKeqmoSwr1JphW5ZcUix4tkl+C+p4fk42mw1d11FV1YvtMYaANuYFWPH8vD1vMovCkOmSGYe/WAcioWS2pI924SHs2XEBq3DOWzKR7fzkbJ63GYH/rbrKYj0Ez4P8BwZ5jzaSuirZbPNmN8WQ6ZOrXVLrnBN0wXFzfcNm29F1LW1Ts9lsGMeBREDp3CKSsPi4IFVEFQlUzjVWZYNRJUoIlrnn8vKSzaam7UpMGbBpJiRNiPmeXbeS3dmOosyvR+ss4NfXUiCR3xPWzdz9NwNX+ws+/ocbINA2DW3TUharJSrmjLD3lnmZWJaBqisxRtG1DZvtBqkEm+6KECTGVHTtjhAi+6c72nbL9myLNlkYXV7eUJSSmHrqpqJpztA64dzM3fs9+/uRX/3drzj1e7yfSMT1+SwRgLOJ/mg5Pz9jmSMCyfasxsY9UhnKoqEqW3bdFdGbnBet8sCz3V3yxWc7mkrg/MRxemS2ngJF10g2W8Nme8nt649JCZRWbLcdn72+5PjY09TnnJ9fcHW55Zt3b3h6cLT1hrPNGcPJ8+f//F9wddVilyMf3r3FToEvPvsjds2G821H25R8+eU3nO9uqZsGZTQxSLbdjtpAXTVcXrymrjwCTVW0NFWDSI7rq1umYQQCVQlX1xWnJ8cyKR7vR969+0CQPaexp2xqms2OZrvFtFvq6oqQFE/9iI0V169eYUzH5CxP4wMP0x0EzTQ5fIx055qrG8UyWsbTwjJ6Ls5u2E//gK4vKYsNjTE8zG/BSY6nnsNhIPmaePc1f/sbyWFRJJ2IXlLUDcpISl1TpIppDJwmiykqtk1HV1TEeaR/GjIuvymx7kj/NHB5vUEbTd9bPv/e9zkeDxyPJ4Z+IIQZHxyFbhFS5ihH8vTDxPHUM04nxuWR/Z2nLCXWjRxPB4Z+Rhcdx/1ThqaUgoinPyrOL1uQC1999TXH40yIC+OwMI2Brjnj7KykrBLOefreUlQT+4e86a3bgsLU7A8nhhQQRlOUhqQtvq85ngI2RdqdpikWNhsNYiTEAe8nrHP86h/+hr/+D/+Bn/3sl/zsr37Nv/0///btT3/60//+/03X/F5srkiCqlYgDEVhkEIzuQVhHFWpqEpL/+SYjnkbFIQnMpPEnIknUbFMgqd7wfX1lrrxGOXRUXPWnNP3J4TUIDTDfmZ7VlI3kkpHRuOYplM+IAaZy+5IfPSDc/rTMZOBTEKWICZN1zWkFBinEetyUaZCk0LM5YQmd3cg86HGyJS7KKRCiIh1A8lHbi5uKdWGYBPH457vfb/h7eOAdYJ5lPg0s2sFMYJRHW15jgsjqorgRM6DiIJIopAGRCJ5iXOWx33EO4XWFSFGToeInQRLUCxrrsGGSKX1CjZIJBymsfhF5OJEHUlCEGPOP0QiMgmuz6+4f7xncS5PU/2Cl1XOGwhB0IbTtLDEnJECCFFQlSW6WoPt1tMWDUZq5jCCSNRtmf2u0mQYQUqUTYvQFqnBxkCcB1rdILXGVDXRBYSTINfy5ymXD1elxk651DcJcN4xnia00EhT5qnlccrT4SQhZlpQsOCd46w9I8jEEl0Wb6Fm7rP9TSTNWVmjQ8kwDcz9gpS5/HgYF5TK09OyypQ3UzckqfEpZ+ekVgSxkKIjzAkpSrbbnJdQGJYlEKwneIHPKX8EC8LkEDgxsK0Uj/0J6xZiChRSQYJC10idD8ghBEIUiCgQQpPj9IbrTwpizJlFPyrauiCFwDxNQM62JK3YNPkmrFXLw2PPFBZSTCipcjmtzCW93ucSSJEkm+0GrwSYgtk6ptOA0pqUFJFMYKykwUWLT7mgUxUd2tRUTYcsFEl7Zn+ka0sqU+CDp58H0uKRJGKQKGkw2jD0Pc6DszmDVxYN1k9IJaiqile3rzn0dwgh0EpRFoa6qYnW5vet0iSR+HD/lqbdEUREaYE0Oef14f4RUynK2mC0oKkq5JC3z9vNlhQD4zxxfNrjfKSpSvZP95SmQ0iBSKC1omlKvBerA00yDD1t2xFjYhwnpEicbc9Zpp4Q82FZKYMuBFWTQRbOB7RUmCKXnAqpQCTarslizs+E6DF1DWLd7MJqkxM4P+PClC0ayVGofHA1OndwaVkQLCgKlF6zS0iWecGIvM3ZnW1xdmS/f2Cx+aDr3MR+/55pyrYkAZyGA4VpCSlAkkglsXZCpJytEkJS1hXTNGYrnvU4PxBjop4tWimMkhitCOS8zLN1TEDOC8ZnEaOo6pp5nonfgVAIIfP7BYHWhkT+Pb1uWoR4FmDqRSw8W/+exdV3RdCzqMqbLlDKvPx/wLoNyZ8/b9G0UcC31MNnwfcsULJtLm+18qQoobRErD+bbNfmO3mm/DwgUiZGrhk2IQQ+pDxUSGtILethkBlXn7+mYpoWlPIrltznnF7IgKCMcI+rFS+LsZRixoKk9G0Wa31sz/9JmUuQYwwZRrRi6Z1zLzEosQraZxy6lBnZnZDkItEMx/ExENVCCjmFKtbXMOtrQKzf70teOqVvPyfy1eFnNKWgpiClEiGhMrlAlPURh5R/RikFEJHFzjw8fqAs8jVKSjg8nRjnQ4bIlApTKo6HPVorrLPEZR0oIbDWZ1oemWI4TjNaFUhZkkREEvKvdX49SLUwjkOufEgyZ6wiEM3L8+udWAcd+bH2fc84LGw2XRbawqNUYnE9yhQsblltqBElS1i7j2KYmZcJLbfsdmdURc5ZTdNMWTWEFJjmAaFCzqTi0AaKpElpxjtNXRWIBP1x4P4hEUJknheEDCgdECKLVEEmxZal4fTU55+XTMRk2Wwaptm/5HGruqCrzlmWr4lJUuoSYQrmcciZdJt4esrFxruqhjhTFJLXn3yGFY/0p5S/XmVwi8MHi0sLPtZEWxDGyLS0uKApomcYT9zf3/HjH10Qfc3jhwI3R773vdfs7wtCnKmrisurLde7G3wImLHHdYGz85qPrq8om5qkFLq9IsWFcR5WGNDM4eEOIQNlJYhJIcg27ZjycyqlQEnN9fX1+jxAUhPT1HJz8ylKvyftP6B1h48TiYKEJyIYB8e20xQqUpWSrqmYxhPLEtFFhSk042yZl5HfvfkFhWppdMlZcclhtBz7IRPxiLx5v3A3NwwanJ/xi2Z3rRmXGTd58ANzgNEuGKWoipKu2eDsAVclhAwku5BcRJsqX5NW+M7pNFBXDXMaWZYTb98slHWA6JEqIZTn8ORz1krVBEApx+g9IQqmZSDOgafTwsc658GTCwiTaFvJOEC7tYR5pj9OKNFRFBUxLMS0sNgj3uehSVkYYh2xy4zWVbaf6pQHgOPArCzbrqY0FX0vSWFEFLmuIyJwKbIES9lppBWMQ0AZz7F/Ypk8PoApvkvd+X9+/H6IKxJ1nTBF9t97G3PZofI55FgllsKjimxrk0KypAUl8xQ0JIGMgtNe8fq6QSWLkg7Twvn2km++Ae/zNulpOaI5R6MpykhpJB/2dyjZURQgZcLoyOZa8OF4IiSbbTKNwBQNddcwj4HlaWIeNF3TEoPB24SfHLIMGCXxURBsDl/bJVN0smHcITBcnV9je8lxGXDLxNVNg9eSw5NgPAnC4lB1iTZQ6o5te0Hvjwxxn3GtUVOrlnGeMTIHqSEXi/aTJYYaoQwuBE5PC4eHiNQgS0jGg3EknXGZMipinBFqygcClQv7BIYQs81ExkCMga5rMb1mdjZT1OTIKAKFLtCmRGnD5AZc8CiR8wiF1vkQoUSems8LpS7X8HtAyERdlaQYaMoKtzhizGQpYxLIgLULi7MUtQZKjCmJBKILOcshNDGCjFCWBabIgs6HhHWBeVw42zVonTGxx3GmaTNyNwZB8IEUE7oqaNsNUYDyllpWpFHQzzMBz7auaKuKYGNG388OoXNIPx/AVG6Nr7dMw5yJfBFSXAgyoIzIkxKRCFGya87ZbrJ1aRg8wzyjQu7LeQabKAJlWWdLqkukEBlO/csFW4gc0q7LNpc2iogTFlMUCCQyZUJWoRs2Z4ZhnLEeUIaiLljGBULuy9KlRhQldam5bFtKUXPfP9LHAVaamtYFShfMPmduSAqSZHN2xmwAI5mWkcPTE5vtlhAlSeSyPq0Sk3NMa2mkLnLmwCdPkglVCeQcaJsCKQzeBlz0OAtnmw0u5I0bRPrxhFqy5UggaWoIccFaRVPVXF3eYP2w2pkUVWUotGKcIodDj/OWohI8PR0QukQVhkIplI64sHAaJpRLNNGw2XSroMnvhxTAqJqqEBwPB0CwaRvuHt9RnDUYrdf8jaeqCpY5rkI04GbPpmvpTzMhWpq6zdYzo/MBTORevoDDx3zwnWdPmhfKqkSIkpQkzju6bcNwyGH2DPIRmTjqhywmdN7mIgTWu2yB9Y6yLFiWbM/KVMQMFSp0hZBpRUJL5rUXqjAFbdfQr1PJJEArQySw39+zLGPOB5UVx9Oe7TZDapQsKIqWec5kMq0KTFFiqoJhii+UOiADQkiIBJKVPFbqlWwnviMUHX4tJlJavVjBnqEF+nkrFdOLMAnr5wj5ctD+Lmr9+dfAt1uY9fX+LIS+FW7fZryec1nGmDVfFF9+79l+97wly9mv52xYRpfH9c/TCsQQQiCUyOIpxDVXmV93pJTzRikPgaQUa8BbrKRDAzGSZIYniPWx5z4yiTaacZiydXAVKyUFLqxZzpRWSAjr38mv1+ASCogh53NfntP1199us1izpLyIw7AKyLx5CqtA9UgtUVKCViCWNQuVATBS8XI/kyKLwzz05HlBxXfoFSsTMn/Nb04/57OLTynjFufTagGVSJPFcEwJ5wLTtKwH/4B1E3f3h2xzr0vKwnDY77HuhDEKbfK2OQRHUWim2bLMjrKuEDLDekLM28Oq1JyGE2XRYbQhSYlPmW4o1bfQl9lOkMgWOlNlAJHM96K8Nc6gGyEk8zJzPB1ZFoeSfs0xZdDHYnuqepdttD4QkZAUIXq8dTi74PxCrBLtWY3RkmUesW6iqluGqWeYF5pG42NiWSYKU1KXLc73ECaqaotUuUpimkaqoiZGiV0iOI+UAnmyhGhx1qOk5HQcqZsCoSLWT1y051m4qbz9T8mx2+449ncIq4kqv9f644gRFePkGE5ZjDVaZDKh0LRNTX+cIOb7XvCBk02MdkZpu1pGC+yQybXDbHHesoSBD+9/xx//4TmbrqHbbkBEdhcFSm5xrqJoJJeXOy66C5zNnXzez9xc11xfnYNUzG6mqXYgeg5PR6xbaFrFcNoTRCTJTL+VqUaoCDi0EXRtQ8RyedHxuD+xuIHEyNTPvL7Y4nczPgxEX3DoB6gSQklkNPlsWtUokagLhVEF0zDTjwub3ZaqbRmHIyFK7h5/h5IVF9sbWtkgkETyPffd/pG37xxmW+BChuIEK/Bupu8H+tOMc4mkaoLwVLLEKJC1xsZIrCVWpLztj4q6LXEuF78bLXl4uOfV7ecUpcW7if1+ZhvzdrJuJFrD8TjQVDukKjP8zHjKyuOcYJwmfJwYJktIC1HEFXIUkHNini19H1A6Mk2OqugwhaaoIiE6hvmRclQ0rUEIhUAy9AGlJdoYQlo4DY/46PFxQamWuu6ItmSu3tG0BUpnkmgAJhtoNjk/PrtAd6ZRpaaUUMQE8T8B6vwnH78X4kor2LQSVSpQgvvHO0R0RG/pB8cyeOpuw/df7Yhhx7Qo9v03CJmYBkVpDLVuWQ6Sbrswj4J50rgQcP63bK463t8d2Z96zl9LtLbcv7NsupKrm4bHfYUpNd3O0m4Em4uCX/7mFzxFS1FIqkKgguf81mNDjwsavdGoBZSA/mQZBksICi4sBZOkAAAgAElEQVTg+5+9YjhNPNwfGU55ChHEjCxgsz3j8qqjkIaH4xumoed82/DXf/lbfvCTHRcXJXbUnN4nko2cXSrKQtMUjs9e/5D//Wd/ibUeFRwwIYRGk2/+znqEiQgdmWIBSRCloNqVfPiQiN5RyESzVUQhUKakUA1EwWlYeHzo0WZLUZcIYaiqksPTCW1kBluEkfE4EXVClQotDHW7Y3kzoAtB3RaUZUOMAwMKI5t8uDMzMXpOwxE3LIRpoS0rosrWF6UzNrkoNFFImqrBL5H944HvfXHGvXvC2wkhsx1kmi1lVSBSwidLDIF211CqjPMfTo7kBHMakbqgqhqUgG13mcPZfsk3KXdNdx5fbBhDn3j92Q8YhpGYLFUDN3XD/vTIHBZQhtkrDrND9+/zaFglxtmiVYVREiUVhTRszAZXDNgp0E8DhWnYXZzh0owPEZKgLgu++OgG4S1PpyMP88Dp1PPR7gJpNvSzYxjec35xwSnmzgajwTvLMlmubq/yzeVwRJdqzUMlXHRY6+l2LTKB9HkyeLu7YJj2nIYJhKE97+inPWVZc71rMEpxPDl2Z59iTw8c5pFWWT6+2vHwduT/Zu5NeiXL0nStZ7W7s/ac422ER2ZGVGZRKSRUIF0hkJCQ7ogZM34OMyR+BhPuGIGExBQJlZC4VFVWZVZmZGVEZIQ3pzez3a6WwbLjHnmbmtwBZZLL3c8x22a2bdve6/u+931eTyDnEk1wujuiTWLdbGmqirAE6rrBzaWDSA7sL1oOhxu8kChjEcJyPN1D3TDMI9IIml3Lu9tvuH+wBUGuElWTSSGWbByRqVc11Wh5+fwN79/dcTg+kNQDVSNwU+mZWi0gBbpVzdgvzIukri1t3RSCpgCRE35ZyFnwzTd/QCowleD5y1dkNC4suDEjVOLm5j3r1RU+TgzTDBhW7Q6y5OHhxPFhZL9/xp99+Ut+8w+/Ypx6bKVZr1q0EkhTPCvTdDg3VCzjOHA6DVxdvSCLHqUyTdfx6sVrPtz8gfWuYbe9wqiWm/e3ZAbu74409Yq6aXh/fc163dK0LW7J3N2fqNtLpvlI01bUbU30Cjf7Mwa8+FyWRbClph9KQ4Rzk2RxA1J6qkogReDliytSypyGnmN/YLPSaJVYrRq6bkMMmW8+fM/2Yl/CY42irbYcTweMLT6/qrL0/QkXjtR1TdOsUaZM4k+PA03d0bHCuQFE5DhMKKmp64b1ZovRGjc/ASwSUjccDo8kUQAmXbfi4fERaapzsVuC4X+cQ2WrimksUzHvi3TSGIsQqqDgfzTtKHlnoaDBz1lUPy66ngqop4wnYwzGKNyyEM/TracJWM753BAyxFCmPt5/8iBpLT8WXVKVSIBlWWibBnEm78kz8nyZHSmmM97f4L2DXLxHWZZirW1aAGJMWFPAE9baskjPEm0UUpSi48mmFoIDUfxiWmnmZUFpXXKsUixy+5TQWpJSxHvH6XRit959DFW2VYWP4SPoosA7HEABJ5wpgc3TRPFMxIvhCciRC/0OSaJCmUL7iiEwzwHzhN7PZVrJE9lRnJl7OeFjKMX0uRjMucCChMjM84gQxVvVrFqcmz/KFJdlwZ6x9YhEIhLCBET6YWSeCyHQ+4W2UcXbHBxplGib0QbyVEicVWvZbBoSCuc83kfWFw2319dE51DeEhPMbqKqCmQESVGdmAYpIloWuBHCU1sN2ZCzJQtL13VofU3wCe8y63XH+3fX1NWKqjJIneiPkd2mpqlrlHSMy8g4nRBC0NQ12+2KeZ6oa0lMI/MwM4xH6nZFzonFnZjngZx2WFszT5HatKy7Nde3ZSr2cN/jXaTk+3kehp5d3qG6jhgDszvx3eEWckQbgTaCttkVWvLsccEh5cTL169LjyB4bm7ecbm/4ssvX/B4klzfv0dXisN94N37a2bnsFXkxYvXrFd72qbCmsgwvufbr2c+e/ElKZwYju+pdy+5n2Y2bU2j12gN4yliXj/w7s6R5sirzxsu9oa8aAiJzabjZ39+yfvbt1xeXrKpGsaxR2aFlS3rfabdROZ0YrPfMYYBqxs23Zq28yyLpLIb6gpevDIsC/zD138A3bDdXfHZy2d8/fu/w+gdXduy2qnzdLRjs5LMTpB4ZL5fGC+OKClo64rr9+/Zb58RbM+Sy3X6q5//x4gYGHuBWzwPjwe6esvD4TtCymjTUncGYRu68Tl+dpwebrgePW+++Ate1YbT+MBvvv0VyzryZm+IU0WMFetLRZon0ryQoidqCwJao9hvC07/dHjH/eFAXe9YtQ111RJmSUyGaTTYqma9rulvvuaP311zcVFR1QYhBsaTZLWCylboKjOOt9ze3ZVzprYotefFC8m77x/RdqGqA7aF777/NV3VIXViWiZ+/zcH1puG+wcQUmCNYWUipzN63RjN9cMHBAppWh7vF97/0JPETBaRrF5jbCbknlevXnNzfY3MCSUFL/av2HYVbjhS2yLL75cb2vqCcbrmNBw5zpEv9p8j6jXTMnI6Hnn7x7f/ZF3zz8Nz9T/+D//9v/jLL2kbRbeOXFxZsplJOIID17esdMNIxe1x4ubxgWF+pNWS1m/hWDG898T0yOsvPidJ0Caz2kgWd2S/uWK76tivV7R2gxSCqmnJKjH6gYuXK8zFAbkeGOPA9++OQOT1szc0ekMONYENSnZE3+LmQkuR24XXP9sQvSO6QIWmutwUA73StLUlZU/TCLSKWCVoraFTG24frjkcj0yLwwXPOCWs3FKbjq6r0HVm92yFzhXBCQ7TxM3xPYdbQ/8YcVOgsonkDS4MKJOLeTqs0LTsLy6p25YkJM1lBY2j29U0+wazrVF1R9UZoozMYWFaIk2753icGcaRFD2fXV3RH2ekjCQCwzAVHKkIrOqObbPGjQcq3dE1FbWVaBEYnePi+UuSVEw+EnxmWR7wh0xaKF0FBXVlCM4RXPEEBRGZfcBIaCrDumtZJmhaiVQZ5yLzXJGhSOKI2NYSMaAssxuZ3QmhHNiWfggsYyS5J8mN4XA8EbNkd/GSdmMZ/EyQAiqN3VSMpweUDkS3MPcOZTQ+ZgwGnSG4ESsMEyMxBIwwrPcvEMlCiGhtsHWDGx22KtlqjanRVU0SNW4sZuwcHbaWxJD5/W+/5fF4RNeSwTlOOIIS2EpxsW6ISyKFgstWdQ1th64MoVaYbcPFiz2V86xaxRgnTvPIPDuslOguk5VBqIZ13bFqLP3Y43056ezb11RNYLO7QOuW4dRzPN7QbDpcFvQuklTNYhX71QprFM4vhKOGOLLZtjStIoaZV69fkkRDjp7sJqTQaFPz2fNLtqYh9IGgErrSVCqjkHiv6FaKw+PbIuMSLcMgCSmTnUJjMKpi6CeiOOHVjO4E7caAbhiGSCU1jbHcDhEXR6Z8z+IPDMeRLDXf/TfXrH+1YfP7Pbrd4JYRZTRClUgAF/w5VPWEdyMpOBafMI0lxYWcAkZD9LDpLthv99St5R+//T3KVhyHB6ZlYJp7dGXpp4mH4z3DeKC2CiMM0+Ro2parZ1cchx43eS52V6y7DW5euHl/w9XFBcPpxM11kdlZC03dFW+kkfT9A+2qobINWikkCTcfsY1Byhq3SB4Oj/TLA6aW+OAYhh6RK7LIzPMMCZpqhTWZZRmxpqVptkgpiPmEyEBUaFHz4kVDJTckD/1w4tjf8PnnP2O1usTFhcmdELVivVtR1efpaz8Sgyclfyb1DczuBq0CPi5oU77L83IA+YhUira+YrP6nCzv6ec/QraQG5Y5I2zg3YcPLN4hpEDrzDgVZHAMZVG/uKnIMM+Bwd5NZZqZi8yq5IMVBruSpWgSMhaZptAIBdpomqb9KPMzukzco3cYXRohUiTmeWAeR5qmgZTP06UyPZnn4qvSWmOMLoRLin1VfpTmabwPjMPINI5c7Pcfp1uykB9K7AYJrRVaKeZ5oGlrjDVIpc4ZWkWq6Jw7490pBZNW58fJ8+TsU8BujBGfItraEiFyBkSIEhyGNAptDdM8E3wg5zKd3K43H4vGEAJD37NMhY4nz7JKpdQZhFEyEL0L+Hkhnel9VmuaqqY6ezaNrTFVha2LVHGaIy6AkCU891F+oModMheEfk6pNEjO0swY49kfVkiBfbjn/37/v5Gz5mK/oW1rrDFUOnN/+47+OLNMM0pNPAzv6E/35DQimJjmD0R6lAn4MNL3jyhdwBvZC0RSRCLHeSY4RU4abSzojEYXJLY4qzBEjdZ18SJZyeXVjqq2NJXEVgJrQWtobctm1ZVC3U0EL1C6Ip09VFU1Y5vEt//yBvudZPWvNdPc42OgriuauqGrO4RoWa07QpiJcUFLxfFwxzh6YioS2qarub77YynspcGImvvbOzKZ/f6Sy4srUprJXvP65Qu0yZyGR4KvWO8UlemwtqayAudKHMSz5xuaRvHdd+8ZlokoAhfPn/H6i58ijeAnX77AhYFEouu2tJuBqgpYleg6w7MXO/7h62+oVw3T4nh4PHG87ekXgRflO7SuKgYmDodv+e7hG769f8t8TPxX/9m/pK4tD8cD727vWa9WSAfreoUUsHjP/uIFb15tMKIoWP78F1/x6vnnpbEiDJe7K376Zo0fNGHhfF1OhMXw2c9WtBcRWWXCvOKX/9HnPF4f6eqaV68u+Nu/+2tiWDG7UqBrvSILxeJCUfAIyTLf07X70uwQxce4vXJ8uDkSk0Api9I1qAohZ+4ebrh/PFKvLzkO14zHA6fbnuF+pl4Jrm/fI0Q5b/z9335NSiNv3rzBWsnp9MDD40hYWtpNou0URjdUpkLozDd/LOHiL188YxoCb68Tx+lIyAf8KWKlZnPVsrnYFPBIq5D0CDTRQxgdihXrRiNTkdr1YeB+EaQKApFlCpAth8M1x+nIMC0YNFerC+Y+4Z3Du4nDPBMXj1+KD3h2t/TTwO7lHllVCLWmaz7HSMM0JI6nmX5YWNUbnl1dcXd3z+k4IjF89vqSd9//gPPF3xV9YpkC2myxraTeOQ6nez57/YZ1vcOUFTpKSWyXeLifGU6en355xd/+9d9zfX/LyT3gxcCq+oLVXnAcjkQSF89axnDN3XsYHwM6Kf7sy+f8H//Lr/55e660zuwuDVlloowIadFpxcV6S325prUX/PDD73l4/8AcAlkk6kqQZ4utI24eOc4DWW24O57YrC2mkhwee7brl5AVgoLu9vOEd5bNTuKjYnKCq5ee7987hsmRkmRdXaJkYmW3mKYjZ8GUHvFuIQSJUdB2muPo+fbde2yjWL2uWI4WROTh8cCma9l0Ha9fXYEKrNcNUiiCA2LD559fcnExFC31dOQXv9ix3+6oKk3Ccx9ueOjv2HaXbJ81CGP47u0fMLZhtbVoVVO1mXff3bHbrLC6w0jN/XjLMmn6ZULVElUJbL6ishtUVTKSPIlpnEoAY0okkVlfNhgDTbDkaDBYYshsNhukBmTGVREpA4s/lewlbbBmQ2MiWSqSkmSdWTcdflrIc8DMgRQWOrtm1JmsM0oXH0NOoJUs5uoEOje44MAoEJoYYXYzVaNLFtLKoHWHC4FxOkJKGKHRjTrLQsxHo7bPjm5ryTGQvKcxFctyoFlZpG5A1zwcTyVTInlyiLS2xSdJnBYIoDI83t/RdCtSEiUjiLlk+0iNwIKXTIe+QFOsLIt2IemnAcZIZWtspbA2oXG0K8E06jJOj5bDMDKpWOAnLrHddwyDw+SIzhGRJI/He7p1B0ni5sAQB2qlqZVCZ8A7+jQjo0TWkmfdjs42LEtPlJIUBDJnUBOImv1+j8+uIKXHgc3WcJgfySFh1iX/6+7ujugSWiqetzVaW+I5QHi13RCXCalW+JTIi0fVkg+HG6ahZG6RBTk4ZLXhuCyIFNF1phOWxSdq2xb/gsgwR6x5ha0rmtowHY+kbMlakpRCYVldNbx99wEhobINctoS1Ei7C4glsEyRZ82GU3DEXIKW20YCHSBxvufQf+Dg79itWpRumOeFw+nIuIysug4tFCkmxnFCCY0KGSlbfFp4eHjANZoULFl4Fnck5sQPb98RGYs0yFGKsiqjbURJTdXs+MPX37Lbd3SmQqua4+OB16+fUVWWeZq5u3kkBFe8oUJhjYWU2K6vivF3nM+5Woqp92jp0aoEz2ahWEJkXEZCDGgTEEskBYFRLetVS/KSy4sNKRQZDTnSmJa7w4KiLMwfD3esuzUJh6lzMbX7lrrxRXomKzIbFh9oW8nsDM5ZrBDcfbiBXLwYKWa6tub+cEvXdaQgOZwmltpSNQm3HHHLhFSexceSIyPucDnz4f1vWHVrdBWQ2qHMxMN9Cc+1tkXKhqF3KFnjXTl3FeCAOBcnBSLhgkeKSE4SpSxGG8ADxYclREKqfC4EZJmiiIgQn8h3AkGSEq0qwrmIU0qx3Vyc0b+SqCM2Rtx58vUUNPxEGSz5SfEjZa/IDj9JCquq+rjdT3j0cPaB6TNhz/+J/PAJC/80pfsxEfGJRPh0+3FO15MMsGmaEgZ8Bkrkj9fgIr+cxwlrTCHoxQLwCCkhyedJ3aepXgkzLhMhT2Ce54/Y9HyGgzwBQsI5I6tpGqDkaWUyIfjztLB4hmLMkEvDIxE/wlVKQDBkyu9TKjl5Whve9b/nXf+7Io8m4YPDeYlOJSR8s9vgluLzatqOfnjEmIxSBaSxWq2IyRFCyXWq1i3TONHYTVmgkpiXoUzUPsp9Bcs803QVKZU1iVQw9DPGVAgsOQnGPtKfMutVDSqSKT5HF2amhxGjzBloMhFjOkvJItMUcE6QzlPJptYMvWPdtShVZKQxUYiAXpZGRgjEAF3XQa4wtiKGxM31HbVdE5bM/fGRqR+pqxXBl9y0LATZ13St5P7hUHyfAtpuy4ebP9JUK7Q0pJi4er5hHBVzmJjjie5CI80WoReqRuLFCaFhSQNV17De7rnc7ejnb0muY7u9oqoNN9fvabrIzfUtmXJ+CZUvHazomWfBkGq++vlLfv33bxnmQMqaAc9PP78lLg9kP9BVlofDDU5MzH6h0oamlhz6I2v2PN/XpG2iNS2nQ88Xb3Y8e3bJdrPGuYGr54oUGuY5cDrd8ZOvVozzB/rbyDRAzuV8/PzZBUJKFue4uHyJmwV1ZZmmgV//+td8/vkveHZ1gVsi8xwZxsx+L8mpNFPmYeCH97dkDOM0IoRku21QGn54e8vpdCKSaNdbJjeSkifJBDoy9O84PY5MQlDplj//+S9R2hNdhVARpRYOhweilgiTsFojqHA+oZNmu7sippKlVtkB5IxVlnW9ZtOuOD7e088L0hRKtB9msquRtcEYi2k33N2dCKFM7OdlRjSKu9M9hyOs646L7ordbstoRqZhxmcHreBGPmC7pqhQTg6TbVGCiETdKKzRTMvC4SGwuAHvPIaZ1y+3CBLOO7x3CCQPR3g89QzjxDj7c+NbopKiEprdbs3h8Z7D4ZrcQ8gBU+24OzwipKUuvh8WH/lw/55pSnTNJW4yfPnla64/jCgbaNvM7D/w7jtH1oqqtjg/8s3fvyUta3a7FU1XcVj6f7qu+Q8vjf7Db0oKusYwOsepX1BVKkQySuZHtdKItsIsCluVsMyYPWFWnMLI7CdEE1mtW+pVR8pPF6kKJS1KFuSyrCRyIzk+amqjsVagrWOZbslOUKuikU5BsKo3GAxGqKKVjhUpFPOelJQuXupYfE8gkZUnVSXANZ3pTgIBImDUWTMuxceMFT8Xso6QBQSw3+2Js2CaA0JF1nVD3weCkywioOJIqwzPNiseT4HZueIJMSX7JYVMTCUksWqKHjvlsz42Z4Zhxp5N4iBRWaJiCTqVSVEpS2012WaiB4khJJBaYqszVckV+USMkawSUgm0adBywqVMIpwlJYrsIiYmFAKpa7SUTHIsr0lQtP3yHJsoJZU2KKnIMZfJlixdUm2KCV5JS12poscXipRMIVBpi3vS45PPHevSsdNG43MgJE8IhiRk8SEZSYwzSF8elRI5ZqIrYdNaa9CSRGSae6q6ATQpF8S0T5GubZC56NQnNxUSkqmQWpMoobU5c85jcSQiTWNx59A7BBitWLwnGYEPGe8WqrZGxFhksRHmuZARq9CgcvFYQMb5Ball0SWnQAgJcjmxWqVom+qsd08kUUzuIS/UsiF7SU4SITPt2iCbyDJOhOwoPKuq6OJFJojEyfcYu4Joyens79qo4s2JBrJBSMHsKeHCgFQaRCKryOQWrBKs1iWYcpkTbvGkGEjKI52lrtdIU46h2iiSMkXeGJePk4dl9HRdR6Uass+FzqTBoEp4p9bc9wMxC8x5oeWWGZ6kWDqzxJE5gHSJeXZMroRmLt6BKh5EaxTRO1SCJMp3d+gnBAN13bG4kX44FA/L0tN1BqU1U/SM08xlV2N1BQjGZWRYJnZyXfwdbmHVtcgzdGKeSqCxEHA69UhRyJ/iDIKYpv5MpysdfCmLZGyKC252rNYr/FIkZFoJuq6hHwbCEhCmnG9C8FhdFRBEjMTgkEi00pAh+EBKnmkZi8dHco5IsMXXd7b8+JCIccEoQWU0s5IE56iMZRxnci7nRakk1pxDN4VEq5qu3iDUEe8Xcg6s1g3Ho2dxA4jM4o/cPX6LlD/B6A4tPUs4kvNCVa2REoJfziGuAWTx7CitECKSskehkaIUJoJEFoEsFDkX2IR3Jc9IG4VSZcrjlvARAuFDKXQ+hfuWc2d4mk5lyFmcieCxBPGmAs0oPphPFEI4XyPCU7HzFHSc/q2i58nrBXx8rDiH8T6hw398K36y9LFgeyq4fkzyg0/Y+Kf7PwUbQylulCrXonLmBCVLMLB88nE9yfHISKURsgRnn3dE+azPYcgASyo0Wz6p9cr+1rr4q3KRcGqtEUoVuMS5AC3FaCCdATWZyJFrxvyIyJJVvvj4/CmWcGghMzfjt9xO39K7uzJBq4tUtHwuGWMNVVMhVZkiCiWpqpppPJAQCFnCdFPOCDzynHcVhEbrupzHiGgsMQR89BhjqauaKmp8OEMktGS1alhmhZuLF1aSiaFkrHnviL40MqUo+837UAKEVcbauhQ1lOuGFJboi7yyspZ11+LmBWUUy7yQokC2lqYtXrCcPh2jUiqMLv7np+/3ul0xnaeli1/QukUkwTAMaKURKJpKcz/3xAzaViWoPWUamZA6IyVMS0+7eg7CMzuPC1OJQWkrMp5lmbG2RpBZrSzbVcuzy47wQ0vColTx7g39iNAGNw8Ya1FWsPiIqQACwcPsBU2r8D6e8dqJcTrx69/9Dbu2JriZ4BdsXReio86YWtKtKvZ5w6pr6Zoi1d/saob5hF88EsVms+b2ZiYkgdWSutb4YNjvDL//ww3LWCaUQgwcH4+k5Flmz2HIrNYdv3v7HVVVnafzCyIV6blbTvT9jFEbpmmGHAjRs/iB/pSwdSCGdJavLsxuwIcMSiBInE73OF+yOaVUVHWDFjMiFWhTVlA3NSFKFu/QZ8iN9wGZAsusoQKFY3IjTPXZ8xg4HB6KLx1x/t4rlugIGWIs6ykJ5JjxTlNV5Rzrn/xtEbyPOOcxVS4kvZiY58woatbb3bmZlQssRkmGuDA7T3SR7GBVdUijWNwCPmN8zeJm4uFEyhMpehYXuCbSrhTeLWUyvzacru8ZplJEL34hv8907ZpsACXwAUJMjIdHYhYIVXH14gWPD7f0U2k+ypzIWhECaC2oKkkMgrapqCuBsgklPe/vv2dyNftnHbbSDGPk8W5i1VqyqvAI+mH6J+uafxbFlUBgheS0JPrDgukcu82eafH07sDYj+RGsMlr6qpBSs3h8USfJ46nBSSsL9c8e75mf3HB9Q83LFOmbddlkWQtUkoqXbPbduisUNIhK0mtPe9uT+i8Y7Mvevu760BnmlK0+LGYt4VEZ40450XJytK2l7jc8fhwZI4Ltk0IV7TOWhliTLhlorvYsEwORMSaClNHbt719NMBXSVevr4g54qbD0dydLSt4PWbLVtreDjA4e6EEPdcbPd0+0uCe2DoC9bzxcUKlmJezUqXXKomkUTH5COjm4jZcTweSmhbVdMYS6MVlSmo7xAzOmgaqcBmghAINC5EfCjJ6QjBskwYCylksMV4rLQhyYngHD7G8rtksEIX47eUWNOQcgA5E2IkB5A2Y7QkUk5+bdeWhUYu2GR5NsvbyuCdLwWW0oVqKBW6qc8ddY2bF7SGnB0Cj5IGa1elY+oLSKAPgbpbk1CQPDHPVHUhOeZYFgTLNCCiom0bBAIvHMzFRK60IgeYo8RPidRoMhKfi6ZcuIwx5XExB5JKKFHjU8JPC26WtPWGaT7hfEDLjNaZ2ZWg25wKdWoaJ4iJHD1LCqWjVBVSoMwlCkALy3G4P9PYCl1OW1swWFmQZGZZPFLUiLSUKRIJlxxJRpY5sPiArgTPPm/puQPrSGFh7D0WQV11JFumALfHB1696JDK4hdY5oW2swX17S05NIQMJKhNWRwkIZBagPKEOdCYhs2642Y6loycxSNEQJqFhudUK8WcBpZ5YGdKd/1hHHFhISjQeoOOkk2zZb1eMZyOuMeZrCSrVY3uJFkInBvIWRCEZDGBYboj5YSpLHXTEEMqoZ/LIzEosqxYr7YMfU9CUpmGemU5Pd6glSSkRAolU25ZJrIoC6xpcXRWY3TxTZEly3IixgkpW2xVmgM3dx+QtizqnHeQHc+uLjkNx+KrSYKqNsSUmeaJGDIiK9q6xoeBaT6caZ41KTm0lnjnGPuZoR+pTFPw0kp+DOZ9eDjhnSfHiNaKefLkKMoCRJZpDjmz6hqQikyirizH4Y5Kb7G6KjhtXZUFRvaEuDCNE6bWBVRjNFZLpnnh2bPnZ3pdpq7KInS72SPPRUdXW55fvuDQL4QwIBVsdxuGwZFyYJwfCePZkNyvscZitWV2B9bdGiEEyzIX+dgykpOm6zpk3SDEeYETZmQl0KrB6PNiWeTzRKyVgbEAACAASURBVCIgpWJeBuq6+kjOQ2QSBQWec2ZeFuq6+oixTzEV2Z+ghOyei4MnoEXx/VAQ8eei6iknC/j4PE/Fkzj7n54Kq/Iz8SfY9yd0+VMo7tPPnm5PxVI8I+gL7j5/pB4+TbPgT4sr+ISaf3qtRmm0UR8R81JprC4+tWVZyrn6vG19hhI93TcshYj4tF0y6PNryZTzfiH5cZZGFZjE8VS8eEKci6AcGccR5yIxZFIEN08Yo7hN3wEJi8WmCivtGd/umF0Jqv7Dw79mjme4jzC0XZFPlto0F4O6ithGfETj103Hqb9FJ0HKFURNjgaRU+n2xUStVxhVvhuIEvwdomNyDmtt8Z5Yzbd//J6QoVvXdOsV67bi3dvHQrUUIKWnbQSLm1hCIlLAH13boVWJAhjHif1+hzAOHyIiaaqqY5pPAFRVzWa1pu+PpCxYZoeXgqoOrNct81I8Vkabc90bMcaipCblgNUaKQTBF09a09X46NDU9H2PQLBd7Si8xQDKoowlJI8xDdparDXkGPn+7Q989dVLKtuR+0jf/8BqW2HUuhCE3cC6q2irlq6TbDeGrkto0WJXmZQGxsnjFs8yaOomILQkRZiOM/riDOtQGeSCy0eyENhGobQgBM/Xv/9bfvbmq/N7cuzbS3IOtJ2h3RhWm47N9hLEQggJrSzPri6ZXOD2fU9/HMgp4f3C6bDQNQJjBF1nSVHiJo01DabVnIZ73r/9gBSeYXZMLvHzX7zi/Yc/sm53tG3Ldr1C68Q8zBwOD5z6nk3Xcjr5M+UzkNIC2RBd8dklMsMw8HB4ZL19SVdZXPDcPVyTZc1pClSVpVvtMfhiAUkLCTgNjwhZs8QDJp+jE7xApIhYLJGIFiPT0hNi8T/5MHJz8w5dr1GiNL2cc9z3J2rTomVbfI4iIqSjLLkyWaSyXqnMGdpSJp2QaZtzIyvANA30w0hWgrop096kE1MMTId7/JJQuaJrWnQn6X0iejBOMy8BwZGmEVibmd2JH74fuXreEVPxG1Y28/b6liQ8MXviOeR5nzJeVAxeEpfIfr3idOrxXlI3hv3uksfHRw6nnlGVInplV1jToU2mac25wRbPgLBCU35/f8equ8SYDU1Tlww7YVjtDNJmJu8Yvfsn65p/FsVVVpLr0yPRBy43e6KKPMw3mLbheFz47tcf6CpLVW95XEbmsYTOfv7FBT/5iz9Dm46UKjSWmz++Zxqn80koEJcaQkVMR+BItxF89Wf/guubb1h8ROaOZ9tfYGpN20L0mSokED3GBuapILHXV5GqzkynIh/pVoaf/tnPqOsdfe9KQKKu+Ju//Y5puGU8PTIMM1fbl2y7Sx5P14zzwDjMVDZRVw0pWRIDy3BDblY8f3WB0RolE8NxRi0VKge0sEjW+H5DkNe8frXh5auf83CXadsjSz8j84oULdd3PzC5zPpCk4Ghz7TrmVev98x9gADWAknx5uVn5JQYhpFlDhzvF/aXz2haS8Tzu9/+gW6tWPyRgp1NpFCIRotzZPFYQBBxJvqloKdlhVGWJTsGPzOOnrA88tmrl2w3O8Z5ZlocTVN8JFbXEEtXTGtYpRUgCDHSrS3vP3xfsKaiJMZvdy3Jl27JtCyMcSCriPcZkyUmG1RqyHNiOA5Yo9ltnvEYRlyWPN4NaJnZbSoInvEwYazBVJIQRqp2DToRQyakSFUrrp5ZtJLEKNht1hwePO1uz+HwQIgDq64hJc3hdqKqHdVKoXNChFwu1rlM58bBEYMiRcXsFn54+wFjFUhFozWNqphPA1fPNoyzI0nB5vmaWq9Jk8OdFuLs2a87mqs1j4dHfEhIJCqVhazVdSHjUCjMymQq0yCVpF8+8Dj+wGb7CucrTuMdtw/veFgeqURGRnAzLHFCtIW8dBodN4+eFCVt255lVAJRt6Q4YoQnicA0HAhz5M1nX+A8PIwTsz/w+vLn1OnAyrSs1I6/+sdfYTrDxeUeaxTHh1tSOzIej4xuJOCo91eIU2Y6BXws0IlVo7ACan3CVjNT6lm5LQAyQxIeb09IXaRUISeWqAmqGPj7ccYcC4UvuIWsqnIBNTV9P7FtW3SWKFk6zc3asrnaMI+OAv2V9IPH+7Okq66ZBvjis1/g/IFTf2QaHbvdFdfvH0GCkOCzYnuxZZgHqvWe3XrP++u3LEtAaIE0BZ3f1ls0W7wrGU8KTd8PdN0FVVVTWc0P777lh7ff8eb1T7l88xrnPP3pkTef/4Tbuw+cjve0qy1//tVXfPvt93jvMFIhaokgMIwDwTlIghfPXtPUGV0ZEInjaaE/3UBMCKWo646//E+/5HQYOBwPLMuRi2eZq90vuL/v6eOAwNKuDf18ROrSpJiWkRQlr9Yv0EJglaapW5YwkaLEmqI8mOfAfnfBy1dvmNyJt++/pum+4P33j8SQ2K5XbLqGpqmwleXh8Igb71jSSHSaepU59icOjz22hszA1cVnrFfPEXQEXyYbKRWSokDQdfU5bDLR98fi4VIWniZVoagJnuAYQhbkd1M1+OA/EgSR+aMErkwgPOIsL0NAimVK1Pc9mVykZlVFTPGMbC+jHWPNxwmYQCBVKaLqqv6Y6STPOUhKqo/EQq01MUWcD+dtN4X8dUanF6x5oYEBGF0mJaXoEJ9Ae0Kcp6afJmnkjPOOylosEHMixFhiIUTxrFVWI4HD4XCeHBaCZtM2pUl1zsiq65oUIsuyfIRhrDdrAOZpZp4//TF1g9UGjCLEgPcBzhgOnye+9v8XTTas/RtOw4lf3/6fKF0KtLZZn4EoHZCwlcQYVbxYw5Eo4Rxphc8lzLsyF0QfGUOi6zQya8ahSA+pNMlldBgICpZQ6IIGxdX+OVIIYh+ZpUcn6JcHXDCkIPjs9Z5V+8AwTngn2Gw3xOSwVYUNkdlHpunE1Acutju0LQCO65sbkpxp2hVdu6GqWu4O78qiTyi0ttSmAhRqV4HQKCW4v79lf7FBqQaQpKSY3aE0EYQkhMycM/3hwHrTUbdXKGP44d0PKFuhdI1IEeeP/P1vrrFdRdOtQcLDgyf4A0buCYtnngaqquJ0OqGf72gvWvbDlu5CMk0zMWakbkhC8OYnv6ReDbjpkbc/XFPXLcI84mbFNAUWP2HsnovnF8xu4PbmmsP9I7e3ju32Od1mze4CfvP1/4NuquLpriyX2z0r84Ix3NOt1mxWF1xff8faPuPVsw1t0xJ8y5svtjzcfCgwnCh5vn+JJNO2mWG45u9/c+Du/oarq+e8e3uP0ZIXn7f87//rr/jP/4v/EltnhumRx8eW2+EepROr9YrPnl9yOo48238OSWKVolsJ7u7+Dmtese7WWC355pvf8uXP/pzKKDKKJWVII9LU+NTjQo/zI4uX5OEdwUMMEluvmcU1S57wUyT7ijT3VK1HW8M8er5/+y27yz3DEM600MTQn9i2jpWuSafAMiw8u7hidj1+qslRoOUF79/+wBdffYVWkmEYuLk5YGtHLSrWdYPaNOSskVXJ64wuEjLYSheftvQokxDCkIJk3ayRRjP3nn/89te8evNTwjyzTBMhhXNDz+NSZoiC22UgzffMi2W73XD5as2v/+4f+epnL3l2dUEKnt/95h/ox3v0Y2K/u2B71dGP9zw+HPEhISSYStI0guPpPY9OkJRARkFV/6zQmzOQHMPpDjeN3N1cY6zm9eevcX4m5bKO90vgj9994MXzNVXTk3KhS775/EvuPrzj9vtr1qsNu/2Gv/jlT9C1ZFky3iX2jfon6xrxlO3x/+ft+Ysq/7f/3QuUrFGy5jjesr/qim4ZRcpwGkfqesUyenL0XD6Dd98ZdpeWjCHFmp998QV/96v/lxDAe5jHyGrVoHVF21ia1tJ0istna96/vy24b1Nz/zCwuIl5viMlh1GG46Nncb7gQ6vE+soUtHrWWG2pbE1lL2jbK+a5R5nM5eWO331z4t37X2NNomsbarmiPxQ5U8plUqFUh7aReRwgedZry5vXL2lXGw6HmQ8fDnz//Q1//hevOfQzPiwI4RBioamf0XZbhIRxvOf6/SO1XlHbFVpY+nHk5uEOn32R3imFc5qmTkUGKSR4Q9UoqrYh5YIqX3wkx0RlNKSEWxaOw4n1xpJikc4pFZlnh1YdQiQyriwsTMOyeLz3xBwIMmCrQrISGVQUpFGCTdhKoYyinz1G1yj5JCERpCh48fKKaZhY5gljEv1p4urqWblQeAcpUNWKZU7MzjP6mbvDNSF4atlQqRaravYXVzwcHpBnb0MfjyzzuWjFsOnWHA8PPLvcIzUlhDXMDMvjGSlc01QN67Xhcr9imRfc4nAukkJDVIpxOjAvA0lI5lljhaCyClNLppSw1GhVJCNCWw6nmcZUqOLMxseFmD2CQvUMvizKnr++4OF4xMfAatPSVTvGU488xxBE7Wgy+OiRWlG1Lfv1nu8/vANFkTlIQSSXRYFtMLYmioHTYeZy+zk5R07jHdOyIOqJOoFJFcQV7w9HtpuCtE9ZcX1/wlRFnxyCYJomuqYmiQkZNFpKmg6Ukfxk82UhzvmBO9cz9jO/+OwFOlhu3k+8O33P3YOnrQXbrWa7M/zu9yPP95YsE0tOCNHwfNtwnGbIgtpWLLWkC5H9usKHhd/8/nu69jlVJ1FKkHxkuB1INrPaaLQ1xKzox5k//M93pFVGzuK8rnzCOn/Kyzl7/vkkaCoyq9IJzh//FBnUGTWdOUtvztS4XDJ+0p+cU/PHBe2nMNozTOH8dOJHz5yBM2mBp3VwkRdTvCyU4OKnXKacM/I8UShTEVGOkaecJvj4mOJXKVv8uJj+SFsrRDdEkYQJKc44+fK+Cu4+I6Qu0qzz9vnog/kRHDufJyx/so/zOR/p/LRSnKfR8kx6KzKuMil6Cu59ktiJj/K5/LRvigGHT9evgt0uvpunHXf+tD9+3E/by5+2w49e97/n9vT55R/f8/z4j4fL0/1+9Pucf3yc/QgZ/umjf9pd//bz/fj2tJ9/RCPP/677/Ru3p+d/+hz4N6/1/66fPT3u33gvT///8Wt+Or7+fc/7tP2n9/3j53t6/Mf99OPsqk9b4sf/FAJELtOxkD51jQuwQ3xE7P/4Y3k6RsX55+Xp88e/y/fsT4+lp5f5tLlP71Wcj6FPLyulVKZb5yntU3D1p2192mNPu+Xp2JNCnl/Tp2P46Rh9mk6GbeKLf7XnZ//TFUY1hOAREqTUKKmJOGxTcTweGcYZsqZdUewQSp3PB+Bnj6ktWcAcPMM0YtUGc6bcGun49g/f065qdFUmsDLvUNUdIpcMRi31mUq8ZXVhUG3k8fDI4R4uLnas1oqmzUgh2W62CLEwnWaOd5Gf/fQLkjhwc3vD8ThjzAU//+XP+e0/fs3kepQBoaHTLXmCSCZ3mS8uLQ/HgVbXWGU4poWfvPklce4JLhCjQDcVWqyQfkLGkrUn7JY0H1kmh1KGNz95xcPDkdefvSHJnrvjH/jd73/LX/4n/zXP98+pK8kUHvirv/pr3vzkp4To8dGz6lb87rffUNk1XdNSWcPt9YgisN811LWFXKAzOWu2lwZtIl//9h0i71BSoLRFKssw9XTrltF/oB+veTjcM7sKoxRSNCSh6cMNg3+kra5o645KKw73j2itWBYHWdA0NeMcyblGKYPRmsZayI7D8YhfEjJb9ttLslzo+57oodZbjFWY9YIbF/rHkevjA3XT8Pxij6bAdGRtEUawLJ6coe4q3JjIaWFxA4uf0LYpU+CqOvsSNb/75js2V8/YblqkgP+PujfbsTRL07SeNf7jnszMzdzDIyKza6Qr1XRJ9MUgjrgE7oBr4Kzvg1MQ0CDUggaKorroqpwzwiN8sGEP/7hGDtZ2z0RCdZzYiR+45G5me1rf+t73eV6eB263WwyJ0zxwcRP9rmHb7hByIURH8Inddo/JPbXao4UlxjPff/8fsKbCmhpryjb6Fz//kdtXr9ntC0xlHE5IOTGKQLaKV/sdO7tnHUaEsBjdoHRmXTxdr9G6KABttSPEBSF9eZHHjr/+65/x69/9nO/fvefl6cRNb3h1vyc4A1Kha/j48szhbsM8ruQAb1+/5r/4z//1/5Zz/lf/H2+DfxybKxBs+odygEie2/YOYy22bogpczpfSDlxt72HVhffRH1i22nu9q8Q0uJ9IsaB7a7Dmo4Ui2vBe0ff12x2lrYt0tBf/+aXjEPxSjR1sTbnVLGmhrAWGIJIG5IfyDLjl8TH7xIpqqsHo2LRGh8+0bQz6+IRMvP8cuTT81KQnZuatrHMlyIGTekKRQgZpdeyRUIiVYPzFc9nzxJOuEUQg2BZPU/nMyIpRJakrBjnSIwTRjdIAZfnM24eQRuUsJha03cbhBU8v1yI2aMrAanEELpekZPn+cMFqTfMLxPaSLQRKOFxRV9wpaWNbPoGdXUo+eBZl7Hc4CqN0RqtLZWxDLNHG1Mofq70O9JasMFSFOFmEB4dKLGOXHw8oi7Faqkk1lpCjhyfP5FzicqEZbkipctrQErJMCxfXFXBRVQCnOD1q68Js8cvjpRKj8XUlkwmZk+baubpyLatsZXFWkHTH0onKuUSX+oOzNOCNhKrLEpW+DXx9DSWTegamKaBttGkkACFMRU+Z7TNGCWQChIK226wkkKZS7H4yGyRo4rrQTMRWUOREOcoyUHRNA0vT2cSASVgPa8svKdpi3vLRYdbV4w0JZedBcJH5nnC+RUlBDkrRBTFjRIjNgVMisSYUbImpoWQVkJcaGyNUAYdMioolNQc9jUxr3hf/FGbpmHJM1kFhIzoPGPqzBJmkmhAVggpC6b3fCyxBxmQKqCMI+SFkBxezbx6s6OqBkyuaKyh1YKHe0eaM/12y8OhJ6SVp/cvLERudhu+udvwf3//A5s3N8TsWcPKdlsVD9Mc6Pc1+12DCJnBDfhVl+dXVW77Xv3rDcEElFBs7IYcIsN0Kb9jLbBV2TxO44WcEnVdc7MvA/0yl6FayhL9kVc5rfeeJIocU0rLNHgeH090G4swkc9y7nXxbNqaZZkwpqKpO9yygrBMy4UQi28nJ1VIapWmrix11XI+r8RUtgh13bCuicoYYvLXfkpEyQpbl8NQTplhHNDK0HYVmUgI5RIjZxjngZwTTdUgRc1u13C5XJimhW2/4+au4enjhRASUmlSXqhqVaA1dU/X1fz2u9+w6TdlUzcvbLY35CgQugwfmVLK//jxB/p+h9E1y7QwXCY225pxnJgXx+HmFeviyDhSLp3E2naIpDBGYEw5qD49XxBCo7RCakEmlEuc4FHKYE1LjBm/rhhbXbUOiWkeaNuenAQ5CZq6EO+CjwhRABnaSKbpApSOjDW2xDfX4l0SUlI3ZQPg/QrXQZZUom6I0gEqvifxJaL3+8P1NeJ0PV+Xbo4ihvhlC/X56/Owe7lc2O12X6KBSpWon/fhS+Sv0AGLwuLzQPI5Tlj+3+tm7AqbKHHA/PsI4h8Mvp+niBh8+fk+d8EogBLIKFViTOWmoXy/WkrGacIac/25Pw8YZSguGoKJEAJN0xYaZYyszlHV1bXnUz4PtNYsy/IlWiqEYJmX61ZwxbkZrUyJsIfiPKtqy6bv0FaT+XyxUBxPKa+kFMrALjzPL4/Uti99beMYh+kLEEMKjRCKcRzouyIgzSkyDieqfl+ihCGUDrExTHllJZEQdLaBybH4QCYXiE7VcnqeaJoarQTOTfgQ2O/uiCkQg6Pve5q2Zl1XpmlmWmbaTYOxqkCTrhtSJTVWadTfJ4ZpZNvZEsW8ovSttbycJta1xB+bpsX5jNCBdV3RWRXiZU6lEzrP+BSJQE6KJAJrGFhTRgnB/etbIOL8SvSe/U3D6hXrVPQeSgXWJSOVY54dOgesTtSqJjrHfMm4ORCcQCeDcxdiyBjbk7PmfPbM80LCo3TFOI244JjdTPIOrTU/+5c/IY6R83Hk+XzGHrZYUTD3i1s55zM/fPwtr7dbwjpzepl59fZb2p0mreCmou7Z13vmvNK0DUo1/Pj+Ce8cl+lEYmUeFfvt1zw/v7Ct76l0j8wBZTKPT5/wXpV4efAsbiVmQcoet2hiXHEuoFUmeFFSLaFnGJ/Res92v+X1V47vf/dISAKZygJgWibqPuD8hWVdSEGxO7QMx+HaEbJY27D4M8lNZKEQbPCrJK6w2WzRJrOsA4fNa4ZxIEZXXtPK8PzppZCBsyZHOJ+f6LYNMRZBt60TQnqG84pEUrctfVwZR4e1FUYrfIokBJfhfO2llnrLZ3oooiRtjscTfl057A+IRiBkRMpMWAXjUC7inBPs7254/vQeqTOdtrjRMcupbJZl8Zgu08Lzc+Swy/RdTQiett2jTURSLjinS+ljLtNE3xmaw47oDSlahL+qJoLk5TTg10DXV2gr+O6HH+iaHba2hJiZhhGlHcpA22uQ8Pz0ie/f/cjj0yPjeCHnSNfsQAo8xeMVkuY4LOxuDyDX62vn/weeKyEVttqTCMQ80bY1wZc4U4gJLQpm+tA/oIXFhZlpCdQ1CCyCUmqe5gvaWLRWRFFu0tc1knMgRnC+/Pn46Yj3Ea0EwRu6ViGFxqi2kPQWj4iClEoZG2B8mQkxEH3G1xllAvNyIiJYlkQMgdPlTAiKV/e3SCTz6JmmCFmTkyDGfPWfZMKcMbUCoZg95PPEtCREsqQkqeqK83mhswUdmYVgXTM5j1SqRguJmyLESMgevzq81Bir6LsNKUoWN+PjglYOKXIRDcYEIpFzJDqJ/HzJmCIiZLKIpOSRQNc0OOfJQiBFIiWJtTUya5RQaFne4I/nZ0xV7OysCYMhpVBuukVJcisrIYB3ntV7lDVkio2+XDAnUKVUb1RDkSeutFVVDlOiUKhiTGQEIZbfuUiJSls2zZbzcmZ2DqkzIZY/E6UU3JuWYz5RWUVVK7SFpqk4PhcBbEEJV3TNBmRAKklOCecTzguEUKwOpiVSNyW/ngVIYbAyk1hRolDGys29RptUNoMxs/pAYyUhrhipyi1rFtfh71oCFxpbWaaXGWO4HkgSUfgvfTSfAilCzIIYJVnIUgKfPfiEMhqVy6FvHjyiVqU7lxaSM9RGlQJpWkkhX30U5dZXUCJ2Rgl8UAihMFJdEbyGcJXQCopUFJlIJKJIpey6RIYwYqRGGKg2xe3jwlIK2F1kc2iplEf7HkWFNIFvvs58/H6gaSyH7QZd1bBmTsNALRRWSnbKIAAfMkJpXr++J8cNz6eBrm3YHBr8knHnmbgY4hoRckGIiu7fWAQGjabJFi0l8lgEvdJA3VqM7ricIzH4EiW9bTDasi6W4D1KK7Qu5fJCbQtEPPtDiSaeXibW7xz9vkY1HltZSJLj88jdfsuylpJ5VfVE5xGyYpgEqx+BgNGlK9TWhqap6OqO7hhIeaaqK+q6Zbg4mqpmdQMpOCQCLVuazlDVNQI4np4JPnFzc0CqjA/lNltrybR2pJSpTcOmvWGzrXl+fmKaFr55+y1CetofP6GUYbvZcpk+UTWSrrlj0x/o+ob6F46+PfD0/MQ0z3z7zT9juHiEjiQRiTkS0kT67om721vqqmc4DyjxGmPgdD4xTSu7wwPeOWIe8WEuMs5mg0yWrjVYK8hEzCdBEqYMGUaiDIWqmAVKFfpi8IFxPFHXDXXdYqzm5SjpN3vIApE12/4erQXBB7RqaNsDtlKczo+AxuiarmlZnUObQs6LMdG2NUIUaS/k8r4Qoa6qQifMGefKwVAKVQ4i4RrpE6oMhfL3nafPA87nrePnyByUjtQwDNzd3RWSX84YU7xXn4eQz8PVZxHx56XHH3axypAExtrrv1uGK3Ul9v3hxkteu1HltV3E0aVan8sFQkrUVYVPRbKerwNhpU2BIVy/p8/C5c8DYIwR50qy4XM3K4SA8566bTC6wFRiDCilWZflC2Ex53wFXxhCcKzLVMSeIeFDQAiwtaXve3SlKFuhhNIQgyDEkRAdMXqEdIgfZupqQ11XKLNijpCiRwmNVhatDfNs6PstSmmC91RnR7UpEJoUIsKVx66pLKuEKGXpgo1luEo5oiTUukadJG3ToKRgnsvj9fD6lpQDIaz0fUvbleFqnCzjZLBtTdvb63DlCaE4eLq6ZZkWVrUitUBb/cWZJss5lxBCEXNbBYsHVV6HWmiUMazripHl+1i8L5Fx2SBx5fGMkLKm62tyyoRQ6ru2kqxekVJGCoWtFCEkbCNA+qv4uCQcKmsQYiW4IoQdzlNxqkmoa4cLKzFKwKAVNK1lnAp1uGzJKxplaY1hsRSvVT5Q6Yq2LpL5JS7kGYbLM+bmDtPrQsiNpUsThMa74iY7bBq8k2ipkdKyBk/Ecx5OxY+XN9y/2nK5TKWXHWFxkZgk8xyQory+p/GC0ZYYIcaMUOV9SWlDQuBCIARJdA1CalYXGEePrmtMffXhxUjG4/1MiOVxyEEXJYY2SLmUyK0Bo1uC2+LniZAXIh213SBForIVCId3nkoJUiigD6kEioSbXImASxAEhnFCGokQCltJpPKEtOJ8gKxQQtLULfMYAYNQGmEMflmZ5om6qVFGEpPEubUAkGTp3zsX0aKcH73z+DiXLuX1LCJV2bApY/GFokGlFXHJV2JtufxR2rBORd+R80SMnnEIKG2KL1JociyD7s1NSRkZo+j7luBn1lkiKV7MZVhYz6WrWjWFwByJrGHBhwatynlLKRBoyIrgA6fjwDTPrOuMIFBbidQSH3OR8EpdhiyR8REigigyo5v+ybnmj2K4UkrhEiRh0NUBqTXKraQpo9Dcdg80mw3b/jVCeaRLHMeaIEZ++PCuyAOlx1hJDBXP80fWeWWZM0pmVjfy/FzKxVIawmIJ8UySgRw0wVVUVY21NSluuJwXnD+TQqZuepQy5OBw7lS2B2nF1LC4ib16jVKe1UXWKXDob9k0t3z4+J6PHz7Q1lt2XUsKgpzKk4pQIYKCXD5w1zAx+xVFnAjUwgAAIABJREFUwKoKI1te3d3w+PxMlQOyUkiVkSKzzAsv4UilKrSsqa0nJIl3njGMVN1MI95w/+rA6aJ5924GMTFNjtO5fOBvux5BotENKSTi4kr5PZcCuFSavt7RmLIh0kqgdYWpSoEz+VwGGzJ9u0HJE1LEcvOTEtJnKmuRQIiRaXZ0h55p8fikiUKBySQrUcoAiZkV5IqqDSorshO4JRHdpRDUciaEUMrRqiaLteRmc2J/2LKuC9PoWRewbUEvZ5nKYIihb2rasfQevHeYqiIEh5AZo8r2TmjP3est43hiXVecT4UCaHt8KBsoJRuMbfDTkURGalV8XXklu3B9PkfcfCGhuIyZ1aVCO2NhWha23Y6m7jAhg8m4MJGlQBmFlpJN0xPCWiSiQrLd9Sw+lINRztRoUtYgFVJpNBadJR2WSlRlcxQj+A3doWYKnmn0yCBIy0oMqvRJkiU7iRKpPA5ZENzK5TxBU1P1DZVVrNOIxjBfZlIAmStSrBFVxudEzAsqgB8Nza5QD0WAh4ctSmVOT46QJWZnMW1GB0VFhRQNDs/Xb28R6TfEFEAs3N8/8Fff/in/+Le/5PHpxKcPA3/+5lt+8fiEsobtds+bN/coWp6enpBWICqFaBxdbJl9xexm1jDSNgGCpm47RILH9x+5vb0BDCnmgi/Ojtu7js22Y51n5mni46d3NE2HtQrbFjrYb3/9Ow6Hlr6vadqKFDtSMCzuhdUPmCoyLQGlAnXfYpRBPjmi19R1hwuJ0zDRVzVCRerGIlUixJXb2z3LHMiiYPqHPKBUQeBnkRnngfOwsswjw/iEFIFt3xNlQCwbjGmoKk3TGn78/gNd01HXDZKW4/mZ3UGz2fTkZHBT5nDYs8weSYH8fPPtn/A//ff/DhdGvvrqnq/ePPDu/akIQJUn5gun85mmVUzjineCynY83L9ivHwgp5VMIHjP8/ETzo3E4IkqkEn8i//4n/Ob3/wOZKbbRJYZur7FB0+eConL+wmdBOKqgTiejwipSKnEcUWQ7OwO5xKb9oacJNO4EMPMZXhmXWdSihzqV5A1y+wxRlFbXTb9l0eEzFTWE6JFREkWE4IaqSq0lnz69MKf/OmfMK+O0+nMuq744NDGIBFfhpCYAlqX4WVdV3qzxQePW90XLLvWlM2y0F/6WfNcDiEFRiH59OkTdV3jfYlV73a7L8NIjOEKYfBfgBVCFI1FU9eM41jofNfY5GfMeY6x9IKu4uPP26Q/hFJ8JgwaY8qwpk2Jz4n8BUwRQiiHIKWLauAaf80pEYWkaRqcK8CQzzCPsmUrl0H7/R6AcRxLDysl2r4rGPfroKmuxMO6aRCUAXNdV6rK0jTNl23guroCLbpuqeIVgZ+vEddyV7UQ3MIaxi83+pqMlALnJ0JYQETmMZJjORtonaibzO5wS8rlcLzMgSQkwzQAZcgxVcXx+MJ2u8V2NV5KTi8TVSXYdIqUSrLhdHlhf3sgh4j3iawUSldkAbaqMFmx+InpZSrC3UqwMR2X84y15WdRWDKRFBeUUnR9T9M0V9lwRltLioF5KRdS4+zQRiG0wWYITCByUVv0PSFmRCqEyOgjw7LS1JoUIk1trrAKxTAMVLZ0/7TQxLySkkDqApC5f9hzvkzoSrL6mWXNLHOkampevbol5YHZlXjuy8cLdWUBz2n4gbbV9O0rYsj4uLDdSx5fToS4sNlseHV3z62qeX43cJpnDv2Ov/7Lv+RxfY9OW7CaNTryO4VkpW9e8frbGyqb+Pt//yNdXTFncBZMnXl4teM0PjJPCaMTb799w/tPv+Q8TNTVht3uhrv7THASZSNreubTyw94r9j0BzabHVpLji+em23H8RiodcNu13F88bx+eIvSCyEsjOPAcBb8sz9/xTR/5MfH91h7y+7wwOOnT8QQUEoAhRAZnaZSHYf9a378+BGje2wnSq9plZjmDc/Td0XALBZeP/wUpRwvz0cu54nVJZbze0JStK2mqSS1lbRNzdnPCB2wOuMujufnibv7A22rWJYLwQekFhxPZ5Y58fbhNX0HMQlWn/FJMa4rKebi6ROZykrWdaTabK+XQtA2W97c7SElLsOZ4+mFVw/fMJ0jKTqq2vL6zS2nl5GQCghOpaJiEEIT44RPkENLXGseXmtqG/Bu5XQKbA8SnMXoQri2W8tP//QbxnGk61pub+5xfmR1I1onQnAcX16QuVxEhjzjUuD113c8vX8k5x5ra5q24+bwwDBEXFhYpgvrktlttwwXg4yWnAWXeaISWzb7liQFx+FEv7fM3pd4lzQM7vJPzjV/HJ2rN03+T/+zf8X93bc83H/Np+czVb9wGj+gteV+/y3nlw9c5kdiyqSsSELzw3ef2G86rM6EsPLj9xPdzrAsiWWdceHEfrtjHcu0nLMni4n5vKXty4f2ujq8q9i/qmmbPfMUePfuB+puZh4KHECphFuOJLFBWfDZsYal3IarjEgZmQ3WbAr5Kzt8KofhHMEtI1+9fqBrNwhRMV3g4/kH6qqi7xt225p3331PygXJDYbbm68Ypk/cbPcgJaObeLk8F+P6EFimQgP7s798w+Uy4UcHHup2z+39Hu+53oItPB9PJJlJWUJSmCSwXUBIcR02AmGNxFVxe7jByBq/JqbwgjSe7b4vRDQs7398YjhfaOqKrq0ZLheSLJS2mByIhLItgYbVOfw6E9yMGx1SK0xlys1wSNzf32NNEROO44Vu0+HTjF80flFl9W4gJa4HigrwfPjwTFUVR4LRGqzCrR6/eKzUvH19zxJnVgSLi8yTx7mJrq+JLqAwVKpjuFy4vetAgo+Z1YE0qRThAwSXyDkwzw4Shca2qYqMUyRs3WJsBcnxfH6h1x1pcczThdQn/LwgxQ4hamSG2c20u5a+q6m0xo8emS1PzwPSJtqNxq+evmtxLuJjBJ2obEfXVgznM8N4RlpYo6BptsgsyD7QVZq3r+4YxxEfI7ZumM8zrg48XY4M45l9B43scb7Gh0SKHjkbZDMjlMTomk2zASl4Wo7MbkDIxJubN/z8Hz7R1Q1aGrTQvL3vSbuK98cnlmXkxii0v6G/qYiJcsiMH/j61Tec1xdObmX0kv1OszytfPPTN9w97BAq00TFr3/7K6RqONy85qufvEV+/Bva88z7s+PfnzK1+CliCyKt5BQJKHb3G3qdmQbH8eyYQomLuctMTB5lBT/9+it++PEDk1+IUaBcS1aB4XJBywprO0glUqyvNLJ5nbh72IGEeZ0RQnB3c8twvrDtDuSoCC5hK8HL+YhSDbYCVU38+DSTVU1dWTZtxU3b8/Jh4DKNLGnG5xXlKjZbDSikLNLXuoXLyaNlS1VV1E1gWdyXSAdZ0jaFaihEJl83tze7PQ8Pr9nvbvAh8Lf/178jhchhf8fh5oau73l8/EgQLyBAUmPEFlKk7WFdM5KWr7/+M/6X//W/4f7+HiUEKXi+/fZb3v/wgtSKkBzDcKTfbhjnE3Xdst0euHn1in/7P/8tTb+ilMQ7+P7733FzW/Pt1z9jt7sjsTCMF371y+/x0SEkrLPkZ//ip/z21x8I3rPZ1mx3FtaevtswLxP/+Iufs7vbcZlfCp1FKtwCP3n7JwzDsXjcwooxhmXWWNUXcEAt2Ow0x9OAkpLaGrSRLMuI9+7LBgmh8a7E65q65na3pek7fEjMy8J63RAFn5imCSEEbdux7R6uA0qJzVZVzc3NDS9PxxJtS2XzEmNkt9vRNM2XDc9n+t7nYSnGiLWWaZqYponD4fBl8Pm8/ck50rYtORaC4eevGAsVzhj7ZaD5HOtLKRDJ178LXwagQuoT194chSZXVdftWSamUAauZUWJgmKOMbJcf29SCCSCZVmY5/kLqKKqqi8bsT/cjKWUrhdVnpgTu13Z9H7ezH3+HeR89eN9oRkWQqH37irULpTH1ftrPw98XGjagjBf1onT8ZHdvuPp+T0xuqtQ2fL46YcSfVcWkcuFxcf372i7ir5v8UFQ1RuUSaTsCGEhBY2Yr7RdAabb4seFEC5EnUmVQTUt4fhMZSzWdBjVQV7oNzXDeSH4TN82BD+hZUumpGiE0BzPz1R1g7UGKWCZZyqzvbofFf2248PHH9nta6paA5mnxxNa20IvA7z3bFvLMC+YqiELzekyoq0jpxJ5lEKTEzx0t7ycB9aQyELz6fEH+rahbxrqSqMNxFQRosNWgrrVxKz48Ok31KahrjZUTY+uJzabey4vE+NlIKXAX/zFN1S2hiTxLnE+zlzOjrZPZDkxrEeEf4VfYLdrqRrJZTpyvEgevt3w+u2Br97e8x+9+ob/8b/9e7pXGWkcw/GEspnffXfk/DwjkuTNm9ds7iXupOmbA7c3dzSHms3O0KiK6Tzz7vsfwUa0aEBEhPKYJiJFx8vLitGGbd8iRGIYRqrKMk6eH96/8M//5YFGPzAOF4KbuT3c8B/+4ZcsS0l7ZBzBN7x585bFPTEMFy5H6PqOlF94eT5xPq8I2/P64RWIQvQlR3JytJ2+PhfLRun0sWKx37NGyKnh0G1pRGZdM1VbsTk0PD+d+eVvfuTpccA5R91k7g5vqZq1XJJmjUyGp49HzMZQtxKtIt/97pnN5iu2hx4hAy+Pn/j4fqDbHkCUCsD95hUin9jcbFm84/H5GS0D4gq2CTmRtOHQlzqDi57ZRZah5Zv7PcHPJBFJKvPx+SONajgN5XH78z/9mr/7P1+otw1Vk1EqcB7BiEhT1xgr0Dohc0+jBdMwEHxm092Rq098fDdS24rtpiMmze3t4Ur+tmhd8+arW37xj7/keXiPSytttSfFEbcmtrs9u/2e0/OFw66i1Q3WNuiqYZoE0/oDQhbZctNs0HohzJnoV8DT3dySV01dB6b1wrsPjzjpuXv9Fdtug0ZyejzxX/2X//Ufd+dKYBBa8nR+z7BcePPqLesU2OgeqTLD5T3P43ekZJiGSAjQ72rq2hFjxZKKk2B/V/Huty/4GBEyYOoMviV5wTReCp+/BkRmWWdWH5nnTF1Lnh89z/ldiRLkwMa+wfOCypFK1VTbnzLMj6xrACnoqo6maRgvkdW7sto2cHw5o1TAmBqrGpKc2d++IifNcPHXaFtGmwafVo7jwmXh6hqA1ZUPGhcnpvVMekmkJPAxsLvdM5yP5HAFGwjDxw8X7jcbQtOxqkRVKX7zy1+TsqLtGjbbmrA6uqYHockigXC0xjKMCak1TWWYw5l+b4vMMYaSYc0FjT9PC8gnnFP4UKhBKfXltkxY+q5iMplxTkzLRKMS0S2EqfgqUDXbuxpdtWhVIQQs85GcA+NQYjXOR+L5jDYZESsUBqEWYqqY3UQQEWymspqUJDlLfArMbkTNlpQF/banspYfX544zwuehLaSSius9OAbkgPEStae7V6QU+AyOKbFkRVoCxaBzAaBIQmDVJlNL4HI8/GFMXoeHu7JMhFZ6bc9eynww0pSiqrbobSifmggCYZh5OOnjxxuWh5ud9S2Q2aFkxOXYaDfVaUXsRZClrSidMGCB6dQRnJ3c18khsmjBLiwMk8vJa6hbVmv5xWUIYbEy+kju+2Gy+WCiDONzhArsu6Y3IAA+rqh7XpOa4ZkEFFDCNw+vKLNG47DC/MyoWl52O2RlaNSioYtKbcsbsJPgbhmktbUWzhfPqGzpVI1tj3w6DxjCKwpXJUGG9pdBAKX48hlGGhtYNNfI6Rhwb98ZHp6RO3+jG3f8mf7hY+Do9tueT5OxOR4+/Aad1Qs9pkxXPA6crN54Lv3jzw83KMVXJYL/8evfkVWhrpqaUxFpyx6Xbjtd8xuYZwHlhCvsQxBlpIY4DydibMgBIU0CmEGgq5wfkH6sp1Nbs921zC5yLAGwhiRRhOMI0qBm2VxV3lFVqVL4bxko8ENGa8cSZS+315t0Y3CLwvLMDBmwfGU2bctVghIiWE8EfFsmhoRJfMloqUlpYVf//bnPB+PDMvArt8TteB5OfEUnpEmkZ2jq2u6uqY2PR8+vudpeALRUJnIy/gL/vo/+Rmn4yPDeSQFMCbx+s2Bv/iLvyKT+bu/+xuapubdpzO7XUXfNTz++MTbryoen0ekUOwPLSm85s/+/Ke0VUttNU3zimMX+e53DrcGlK74+m2P5MxPv74pQ04K/O9/8w98ffeKtu9QtSaohR8/lE1K32u6xqK6FjgDS3mPr+D25sD5XKLhyDMLI8JvEbrCh4W4DHTywLTMuFgooDFFalXxk/vX1Hqm0ittvTBOL/zqtx8RutxyerdyGRcqq0HAZXnC+RNW7chZXGNliuPpI/O6XuXgIJSnr2+pqxopwYep9LQygPwSLdUGzsMLOWXqxhDCivceJYoY3UjJugaWcSlbm2uk0LnSkxBCIylDXs4J+RkKkSXWFG+glvI6IEeEKK+zRCTlhFKacRzJKaKu3dcUSnQwpEiIAcQfSpDLV9221G2L844USwexaZovG6/PHi6tNSFFrJJftlspJXK8dudyQmuDMbpE/679MHKGFHHJ4fOIwBUcfvBkkRBKACOPTz8SQyrPvU1PSitSJqbJcT4l+r6IwJXKSFlisiFFmm1FMopTDIRnx/2hY1wnYnaImIimpTEBkiflgE8T2RZKmgwZOUj0Iuj0G55fXlj0Qr8RzOuZNRmkqEo35TJgVHkuSxQKgzSRxra01QGtEzkPtLsN85jIWWCunsJdW6NIeB9ZQ+K0DFTSsulbrKpYomGaPHVXto4+hAJQErr0QWSJjIUQOc5Hssz0fUXTdLTtG86XC7pVqPrqymJEaoVLmfVyJUbqHUIpXIqs4wnGzOXykdYaNtuapDL/+Kt3bOoW78GFSHtrEB0cnyaC9ySrMfrCw1ffFJprcui65qGr2W0nwrzywy8G3PeSr/90R06eNRj8xnB8OqHrnu2NREZQQtHwGtuM3N4e+Oqrt1gzk5YNug1oPWOqRG4lfjwzjzMxQtfd0DQa4oxPI+fLArHjcLhFG8l+D19/c+B/+O/+LT/7qz0IiY+Jjy/PbG/uqNyZl+cPvDy+0FSKcTxxPJ1wzlHVG0Bgqxu+/vYVMUU+fPqEdxO6uoK3ppXNXvHu0yO7zQ6tFONwZp1rum1NdYWZWAxJeGTtCGSeniPfff+BeQx0dc3Nvma7F2gRqaqOnBPeB4bzwt1Dj6f41JaLYttsuT1UBD+yLCs5CqSxyAxN07LdbPnq4ZZ3vzsRVpAousqU3rRuabs9xihCXAjeM08BbQ21qVnVynF6RCSNEqb0s0SFUplNuwNhOD1rbrcdj/MLOdTs+h3fvt7x25//mipaRDBktXAcvuPj2HBzc+Dubs/hbotqBLgLIhqaqsI2Hh8v5NhBKmmg86nFKI0VFevqeDk/0fSOeanQ1ULjZ/q2Ayb6fl9667rGaEc136EqcDHw/v3vuN0ZxnNGGkXVac7zQGU7alu6Yt/e31K3Hcd5wF0WpgAvz4//5FzzRzFclWX/SEoanOT55Zm68tSmKjjo4Ujft5zPmZQjPgTOx6n0d+JYDoZZY6xGKs3dfosQmcvlgnMrMWncCvOsyVkjVCb6yLpE1lVQGUGKASk92kBdd5wvJW6npCZmyXSK6FaiP2fck8Kqiou/oJVCyOLnEDKhVC43g1lgq75IeYUgpoB3ASE1VWVY1uValpcYVZGIKKvISTAvM0LqIsvIZYOSokDJCmFLsTgmQRYKrapyCykLgnpeZ5a5FJLrWmOEIblSgJdkahPBZTpbFTs7EllvmeZLkRrKz9ulQN2Uf9uHTPDl1q3f7DBW40MkZTCqwi1H5snjg0QCVqWC7JUanxPKRqq6Rglz/T8kIaQrnMOQUmaaLux27RV963BxZrM3WGUQsrzZxXW49mRqpMrMS8SoIghGCFwMzMuCQBHcSvIJtMQIWeKzUlEud/O1CB9wayZFibISVRvCGpEZtEx0rUHLGp88ISYSisN+Q4qemFZkUuhVAwJTmSJJDA6tRYnbaUPXNNwcbjE20DUdle0JLvL8/BEEaCsIMeNjwlRFnhtSkQg2pqEykuFyQiTo6p5puDCdVrSNKC0hZC5ekuJKU+8owr8SnzFKUmmLyAK/CpQswIZ87YYIIzBGo7LFKoOtCjQkaYlAY3RDRpduHKkcwLRlWB3BrFgpsLahqdrSPZESAwjhmeapiBSNhFwhUmQaF5QI5KQgGUISfHyeefvKIsjEsJDTliS/JpivqOqO22rh3fHX1LVG6wwhUhmFagSjL8//vsv0ncZ+FCxuIhOZ1hEhCro1xdINQ0pq2wEaHx2J0gFsm5YkEkkmdG1oNz1DuBB9hpwwNbRtxen5BN5TCYUwHcM4M/vA6hLBZ0wrmN1CVIASyJwx0hKdQ6SMlVXxIkVB8As+R4RK+JixtvQFfJwJq2AeQYdMXxcU+zRdyES0yNTK0nY93a7j5fTCPC8Imen6jss4s6aErjWykiTvebVvi1Q7eJb5E23f8HwpMUyjK6ToMBqUhP1mS1/vaestorH0fU0msN03DMNEU+8wuiOmzLpernLXsm0wtsZYT3AwpZngMsnXHPZveLj7QLyRaF2zrBPDeS5uE1Ojsubtm9e8fdgh9YJfJrb7inVumIaZtm24vdsyTQvIhc1Osi4185SJSfDVVzechyem2SOzZl0nBMUB42Pm6ekZSPiQSYhrr6ynqgzr9MR4uXDWI0+XC8PsqVowSeNDYNvtCWHBhQUfHbMSZJtR2pCz4XxOaBuKo4wKqWxBEKuyqXCu9JdyKhTSkCNCJLSWxCBZV4/RBq1NIYIBQuYvot+qstehpQw3n0XAxpS4Y8rxS+cpxN9vsKQQRU4qKKmFWLqVKaYSB48lXaGkIIvf+7A+UzL/UE6sdfleviRdZEkT2GDxzhO9/395tb70v6RAKlUgGn/g2+La4dHXWGK6KqZSziUyeI3Me++LqJeVYSr9FSjbxhBWQlyRCqyV1JVlWEamcWCeVmLQeOcLWKlwmfA+oKWi7WqCAB/BNuUSIEuLj0U5IhREkZCVxuSiSxBKERAkCTlRLmJNxWbXkYhkYgG7ZF0chCSEVCiTCX6l0i3WWGJe2G4KJIYccN5RVzXrPCCVQUhKsT5ptMzEKEhrIrlErDIuJJRIBZbkJSK1hFTcP1ontG5Kp0wplBKEkFjDWsTmosCptJZUjSqUvmtXuN/sOU8XnHPX9z2Bc9DaCmUlmSK2n5YLgpqYLeuScauFWuLcwHG4EMyeWiuUqWhth+0qtPX0m4rFXcjZsd1tsJVGKYcRLbV6hbUKVBn6x8Xx/tMJGYvjqdE9KtcYs6FtG7zKIAVrWNDKMy8n1hwZ54FhGNluG56HCxKDtRUxTux2t+S8EkJ5ryqfk8UzGaIg+fK5dD5/RKiMtoKHN/d8990RLXd0XXFmBi9BGKSq0VZQtRIlimS7nE8jZItQEFLprNu6p9s0DJeFEMtjchk9fVMTXUIpgVQZtw7UbcUyL6xrwPnSrbu9bYumR10/W8eZurIYY1BS46qFrttxHJ6RQtJuejYyUDU1XpQzpU+Ruil9b20CUg8cLwGpBD5M1xRTxLmMkCtVvaOqK46nkZfjSIqKVkq0zWgdmaeVttpRVxV1ZTiPFSJ79FVQ7OaAlRWH7ZbKVPRVg5bQtx27zZ66LufxGGY6c8Pdqx37mxplHW2z4SffvkKJQuN998P3SFthbEuMMIxzkSrniBIVViWEWUhxKY3REAmrR1tYZ8+H5QNVM9JsDggn0bqithVWeE7NsfR3qwKzm8NK192SE7T1HiMTIrxgbUO1ZpCWxgjqN4d/cqr5oxmunB8BS5KGp5dP3N2B5bYc7D0c2gPzNKB1xLvEMkdSkqyLQySJlhUpeTbbjpvDnuAyx6fAkgdIlpwTOSliMEgRiFFDlmiRCy5cQtVUGKsRouLjhyOmkgilyot4Wal7Q5LX0m1M5AA5BaraIqVgWcK1AL0SXClAkjWLK3G5lCLOO2xVVv8pBqKPKGsQQiJUpNKajMD5FVuVnHUQBTYxz0WQq1UsHxZCIdAkFD66K6ig0J9CiHgXCD5iVUVKxQYvckJJIEDXK2IWuJgxtuLTp0+krKnrAtvIQNNYvC+3otpkBJrNtgZRegdKl7X0PDjWOaKsxS3QbMtgKqQg+JVIupKpMhLQ0pA8mNZiFMQQGIcZrRqmtLDGpdxq1glCEbuFFIj+gq4bbGOR5NKXUzVrKrK7z1hcqzQOSXSe4Dy67UupWxf/iQ+O4HK5WcsKpYrLStc1MXvKyi1gK4FUG56PR0LING3H7e7A48uPxBSQuhQjiZLGKpROgEfIkrlHNlSm5u7mnml9QumalAXL6jhdTnTdlqwiSUYSEdM0LPMC/w9z77Fj2Zbf6X3LbndcuHQ3rysWWSqqCQLSQNBAA71ZAYIETaQH0BsI0AM0Ghq00IBmAppssJusLnPzmjThjtt2WQ3WyVtsoMFJTxhAADHIjEDE2Wfv9Te/70NSWUtX1wgCx+cH6mbFqmqZjj3JJarLigEpsbgJxoBSFikKWWtZfJn05ZrgJfM8U9lI07aE4FmG8RJiFyiRUbrs15+HkaQFkXI4Ekrg0kKDQKoSko5LTwoLlSruonW9Zr/f09QFGJFSYBlGxpCx7aYcGqLEzTNGgZ8LZUkqyfEcudkojMxI4YhJIO2XTKEjeYMgEkNEEFFEgvfM40Bbd4zeYpVE2owUkc4oztOJOSzEHNg0NTEbvM8E74i5QjQtznlCLJQvLkRLHydCDgitqZo1yzwRckIZSdMYus5yegx4PyOVJDYjyxRK1zeXQ3vy4BdPNhKsQNpMh4AUUUiMMihZFWVByLgYUbYIVonlIZhSLKe+IJljolK6EEov1EInI6aGpqvIOnA4n9FK03YNCHh+umcOC3WqqUXNMgbsi+IAmuaJ/jzw7Td/xnEot38tYyGkDWeImc1qzYubV6QkqJuOcTzg40jOgeP5SNNsiVHh/ELkVycnAAAgAElEQVTII34WhAQ6SVKUhJh43p+orSq7/yZwe/UFt9s7pJWgNN+9O5VCiSJA7toVv/7V18jsOZwPeD+w2zbM1jKNPXVdsd6smeYeqQJtWxWCnBcoKbm6rglZ44NBJ83kjyhZDuIplcxAV9eEWIqF2las2pp5mdg/nxnHA6oaeewnlOqwQhBzQd9v2yuG+Qg5I3IqK+bGkVMphOdpoG6LDc3aDqNWhbCaevCRGGBZSvHRdRWQympnLg2MGDJalWlTCGXiw8UNIBBYqy+FjizTMm0w5vJMy5fi6nJfL+ANkMKWg4cq64gXAD2k/LMcOceMT466rv8TEXIBVBRc+OeMBZds0+e802d4hVIKJRXLZWL1OVuWgZwK0OczNAP4GeShtMaYkqs6nU6EGIrE9/IzC5ywPDOEEEzTwOx6cgapJCaVv4mQIGS6rPhPF9n0TPAJq+uyfiVBKkG60M5kLgCEKMBGQSUtTVNBAhEELjuMyQyLKyRboZG56AwmVXJpkUzyDmxgvaqJMTJNc4FISEOIhThpjUaoSFgcVlRIJYi+UCit1cQAMtRo3aDUGSlKxu54HNFiU0S5IRCniM0X/HoSxJzRMpVMWqjIn4EjuQDCtM4X6IVALAU4hBSXArNM45qmLpAk55FIrGlJYU9wnhggWEOIIHWNNpIQM0ZLFn9mcjPzElmcoFHtpYB3uGViPHXYnaSpG5pqRbtqMdWEMpG0OMiRqrIoFQgOKtvRtjc0K8NxfMB7x/7Y8/R45MVmhbSZtl7R2GtMs2J9pXCjIIvMcdgTvWRZZtKSGcaJeQ50YWYcR9bdLVXVMrsHbC0wthRGSmqkFIxzj47isv4qePnyBS7siQ46uebqasvvf3dPZVa0zQ26khz2PevtNUIXcTzCoWV5H4zzzDSNgEbqzOwjQtpCrLMGrRqEMBfoZsOqtYxDKioCmViWM0pl3BQZhplp9nSbhvXKFqBEFrhFEUOPDwvmMknyvjR23FKADXVboWxFiGCr0tzwLKyTgGSpqoTQA0/Pz6zqLS7OOJ/wobweMZ0Qopwz0wXypi8E3JwjWpZzoxKKytgCYrINgkxWChDEHBCp4mq3w2qFTDCPJ9arlqurNcYonEtI8YKmvmW91WhbsoS1veLm5iVaK+a5Z/qD4XZ7R9NqFjeS+6E06VUhZNd1g5t7zs6hqwLiSgFUmwiT4OHwjDY9N1GAq7m62iJyyfyvuzVKKIzxxOhK0YbHjRKVaowSIE7MPtDUu5Jd1Rlp/iR3/899qN/85jf/5bXRf+HH//a//6+/efW643A4MS1HunXFEvYYU9E2OzbrLzk8O5quHJ4FhvXqGiENflF07Y6rq2uOp0fefLHj+Dxz/+HM+TiSkgeRMFWkbktXSWnDer3i+qbl9oXFuwWjV1zv7rCm43QYOJ1GqkrStpZ2VbG9UcyjZejdxWYfCHGhuiBRK9uyXt1yfXVFCAs5lb334/kTdV0zDIFpjKRIuYCWhXEaQGTWmw2Tm5AqYStTDNc6s9pUBN+zzJ5lEcVHFcoN2NqKui2dXBET5+OB0+ER7ybqeoW1LW3TUdUNQgnWmxXtqkYaTX8WXL94TVIZYTTKaIbpzNPjCNlQNRWrbfFkOF9yW5kRxIiWFm0ztlZ0q4rtVct4Ctw/7kFqbm5vGMaBrq0v6PIJ50cS4CaHzIK6slTWEp0oxLAYyn68NDStIcqErhRXN2uyknhviHiyHMhipDIbYo5458kejC6S0c+44t1qy/ncY7XFiAsVZ7UmxHJTTzGwf35mWWCZBdoWPHvCUXWBtanRgA8Lh3iku33NtAzYSvHF6zv6/sz+4IleI6JGRMn5+YEwnyE6lBEkBP00Ed0MKVOZFcpotG75+PGenz78QMxzuYaDwydHkh6jDON5ZrfesunWCBLn056QSjaiqitAU28E3/75lpdvVqyvLNWqZ7ttcWHheO45nhdIC5vrjhQE0zkyjzNXV6vSYSVTV4rGNvTnA945nC+fbbdiCSUsCo6bmy2n8RPrzY6mWaNMTdVW+HACH6l1KR6LhLWIS6UyCKWYZodbIikkjFEFJ6w7pj4ynAe07Bm8x08tKUi0AC0i1mz44dNHfnj/jvv7d9iVJ4fI6enM+fnM4fDE9vqOnAXRC+Yh8HR84mbVclp6XApoW9Oqjq7ekOZAcoFKCpKKPNzfM7kZZcokNvrMMB5KVjNGxjljq0y3a1ntVihp0cmQF8HcBw7HiYDj7Zdf0XaaplXYTuDcAUkhBcaY8KJQliprkJmCRrea07lnGmdidsgqFHCO3+HmIkGW0mO1RSLLKq0yLEuga7asVzu0sYxu4PtPv6Wpt9R1Ww5/84ium8t0pMAIdus7pBK40ePncgC+vm5J0bPdNDSN4uHxE34u6FspiyKhH3tWW8sf/vhbfvu73/Hp/lheI9UxjgPnfk/MgefHgdktpMQFRLEnLJGqNtR1i1ZrpLP0wwPn4cj+dM/oPhDjhMgtm+4Fb16/ZbMR/D//998wO0/dGOrG8PDwSPCZru2obWl8NI2+OFgy603Fl1/vOBwfUaKjrrZI4cnCU+m2THKE5+uvXlBpiURQW03XKpZlzw/ff2B/mMnZsL1ZgW4hVEhpkEqhRCKPge1ux267Y1WXVZzN6pZlPjMMz8QwURmN9xNSeZTynI89s/f4sODDiA9lRUpdDnYCGKczIcSCmM8FM26MwZqyJgdl4oEoLiUpL6tzMVJVFfM8k1JEKYExqnytRSEUipJl/ewMy6kcoks26wKBkIKUE9M0UVUVxpQCq9AOywqf1rp0xy9Y5hAK5hv4GQmfcyJ4//PUCikQUiIvk65/XLRBwdh/zpxBKaK0Lc9RrfXPsCStFcZajNY87n+i69ZIWYrqphEItZAzPNzv+eGHDzw+fCwbKEKxWW25u7nh+noFstDKpDa03YY4B4xpWXUr1m0DybFqLlCk8ci4HNjtWh6e94iU0IBBoITAh8DiFlzwVHWNNQV1zgWNv8yBTPhMwyfjCS4U+mQo00etKrKYCCESfSYHgzE1KQ8sbmYeB5Z+pFvfgcocj2fOxzO3mw1GVWy2tyhrmMOMoiJywlggaZ7uS75calcUAFmSKeCPz1PQpmk4HUeub17RnydOpx4l4XzeX3xN5Tqq6y3aWlabNT5kHp4ObLeFajoMgX6IGNUgRU/fPzJNgUyLkh2vXnXEWCZCgjL5HKcnYgCSYewdH94/sMxTAQUjyVrx08e/58O7A9PZsbuu2FZ/xrT0XF9f8eXbV3z7zSu668xqXZFF4HgaePwUMFVkf+xxLrNe73g+f0DkkiMGweLOTNPEcJ7LBNkqdOV5uD8xDEekzLx89ZKvv/4SF454l8lRYSvDNCzEsICCqlnRNS2//vV/xXptiHHh48dHar3m9q5Q/WLwSJUJLEyLx5iatl3x8PE9YXFsNzvWqx11vS0ranqHFIKcPUJ4Pn7cs15do7VlnifmsWdZIv0wEkJms76m7VS55ylN267L++DTmUP/zDT3TFNPFjBOoWzRaEHO0HUv2axr1psaazX9eaSymnM/4xwY0/HLP/+ah4dPLIsjhUBTWWptSlPDlCIyJUm3WgGKnEu+7/q6ReRQJlm1xtrIcAi0XYWfF077A4fjA1274Wp3jQDO54Grq/L7jMOZ56cDx8fAMiciglP/wPPxE7a6489/9RXSRqT2VA3MZ8PL27d8+fUVL140pDmh9I7r7RqrakiW21evaG3Ge5Cmoek29EOg3WQmd2Toe2Rc451nGo60dcPV5pqH+3/g+KEnucziHS457p8PfPv1n1E1FUuI3B9n/t9/9bcffvOb3/wf/7m65p/J5EpgrEUpi60q+r6nP89sVxXWrksnyCe21RX7p8Bhv6euF66v3vLlF2v6fs/9w3u0MXz49MDHd2emsy80LtmS08jLV29o6x3/8HefOB6/Z6lrpjlzOnnaWvHm9TWn48A49jx8fKDWLa0w9PcDR3Hm9tsbzNUTu65Dqy3WKNx84uGTJ+cJZSZs1fPH38/cvtQgMshA1UpO/R7vJVJajKrZHwJKK7pmByLx+HBgvd2QmDgPJ0KMGF0xzkeEz+SgUNJQ1QWvK7MhesmUC7pXN2WCdL3bUDUK5yx1XVDeIURO4ydCukGbQtJsr2rUukcJhff60u1Q3N295Nw/F9qZaXj/7pFlOaF0pq46tquvqFeBbqsZJ8fxOHE4Hlj8wpQ8gsj+9BHIyLwBn7Gi5uZ2XR5w2hBTYpz7gqn1kRQWpMxUlaFbrZiXcqOzVhECzK4UxzGMhDChZMXNFyv8AjFAruH0tMdoRd3UaCU57fd477FNSxCSefCMjw8FO5oTKYbLCpNGNQalisBUicjx00RsItYKurVkbVfs339P2xqq2jK4gffvn5FRFK8OJUMgLQSlmGNN6BskitoESBnvBEM+sL4xHE/fczwdWOaF3faal7dfEuLMNO8ZlwPSCV5fvWaZB47DA0IInp4nuu2KJEDYyO1ri6gU/TCCrLh9seNXN1/hlpEQPEM/cf/hxP6j5fmxx80SJRq+/fqO/fETaI02qnSOvEBkiMmRRbpI/OD4dKJuKmq75tP7Hi03DINnGJ5BHKisYeonLAZZaYTQfP/jD7RrjVYGckHAd/UG52dCXHD9iBM7NmvN4M7M88IUDTe/uOH40HM4TdiTZnPzlk/7n/g0HOnHZ4I/UIcdOu1Zm5qr6xvapiXqMw8/HZBR09iOrf6a9x/eMS3lIKqV4jScSP5ALQ2buir4bTcx+gEhJNKuqOsaKzKnsySFQmMzosboSPSW8yRxU6B//om3b1fc3LZ0G82n50f+8O4Dr1/tkKpiOk+I1qCyIE8OKTPbTcvpeEAnjdWapjP4ZeD2xRaRb1iC4+hOpDwxTu/IKVMZzbr+kvvnJ5RSDG7i5EbW647j6flCR4VhGTBdh6lbVutrpIDD+cAUPM1qTfKeeVp4dVcRSPjwjFaB1XrD9z/+kVW7Yx4lLiz004jebHg+H7l/OqCU5RfffstPD++YFkUKa6bxns31xOyH4slJiefTGWkUm6Zjnh33D5/QJqMrRUyJfjrTp4lh+InD8ANNW9O1LbO74l/8N1/Q2mvaeo1A8G/+9d8wxRO75o6qtoSY2KxWrFuNMRLnR3abiv3+E7e3X7Be32JNw+Ph7/nwzrO7LpJ4YxfafM1m2yJUhw8z03ni/OxRclV8LPOEMJm/+qt/wW5zS3KZn777DtQDze017WqHqSynwyfefPUt0+wQMlGvV6x2khgyyxyh1WzWO4besd/PECyV1sTYU9WWmAuARNmEXxIuKlRuySFzPD1R6RW2WReohP6MRQ84VyYRShQcsfOOnD0pZZwrTaR8OdBLrXDRs8wztqrLxEIqqrZjmiZMLgVS27YMw0BtDZly0PdCsFqt/hNP1mfR8ufJ1PLZ+/UzkMNcpmaZ0+lUIBuXjJU0xb31ORcGf3J4ff66rDCWRYZlKd1nmcvhjxTLqqMQlDXOhWk5I1XkPDxgqxpy5PFwLtsC9Y6b2y3X1y1aGPbPPeu2JUTH/viBZvUaY2r644hP5VCljGd7XWGkQiG4/eI1zw/PiJzZdNdsr27JTGzrzSU/lkla4FMg5ExKiuglwxww6zXnqcdHh1KJqqoROJbZkUIszkSzguzwIRD8iWUJqGGBdOJqe8erl1/x+OmekDPLkgheYEzHeqU4DROqlnQ3Ncf5RKuv+OG77wgCVFXRmYXMcvFvNrx5u8P5kSUsjOMImEvz6MQPP/7IbnPDr375a8gnpmni5uaK65sNfX9kHgeqZoULE4tbCMPI+qbj8XAPaK6vXvD4+IFarXCLICWBUakoDrhivS7vPzdN/O537yC37DZXBXzVJ6TcEcKRcT5wfPb88pd/wdQvVOKKTfeS1y80w/glL7Yrrq92fP3NNb96+xKf/5p3P/zED+8/8e/+/kdUlXj/bipExCZx/+Ee/7uKb775S66vWmK658MPz7x6+ZqqkWidcX6Lcws3N98WyX3/yPHTHznvP59P4P7xE0o88fh05uOHZ5wr8YP1ViPUzOPDge/fDdzdXPN3f/O3GJtIPnC1XiOYeLw/QTIY0RLFxB/fHfnml1/TtQa39Ez7hc16yzQcmJcnpGyw5o5v/+KOp6d7Pn1YyHHD7bUneIeUhhfXX/Ldj/9QdA4CBj8xDT9QNwbimrEXBHemrivadeLkEtM4E9zC5mbHYT8yDh6jBboKfPPNltPjifOhrJ6/ffErXrzWfPx4z+NjzzDOrJor/urX/z2H4yPjcMKNC27KfPPVt4S0cB4OJGbGabpkTgVBCh7uRzY7W6aeMSKE4er6Gu/39P2RaZxpulte3G1YdZKUakRa0TQz799/uoDjEt///sBf/7eW9Voi1Q3zfMXxPHH/eGJyj4R4RopYnIHxzO3uK653r1nLxL/81/+O7aqmwOklj09PVNlxmmaWIBijZBgeePPNX6OlYpkcw2Hgm29f8P4Hz7rd8uL2Bjc/cz85ntxHDqeEURkxNcThyMPzPR8eH3BU/2RV88+iuArBI3XE+cDpcQIUtV3z4w/PnI+B7WaHEBP3H2YOTzPzEMlJMVcRKY9kZupacXwOrLdr7l5a0jUlYBtXPO1/z/l8ZJ4GdHXm2xe3rLsblKmQWrLb1Ax9mSrFEPn2y9eEMPDTj9+x6jT1akV3JenHimGBYZgYUuTqSrPZwDwXfr7RsLu2pLyQoyJGQ4iR7XZTCGousiyOqmrKxKuqiCmjBAWVbUvoHwRDP2OMRQeJyAFUT9tsUWqNzJoQMv144up6i/bx4vqSJOkZ5oGqlhAEbikOkz6MWAO1lawawfnJMUwjswvElGlawzhkdrs11kqOhz3D8IzSBcNeWYM2nmFY+OGnPVm5skueM7ZasaLkwrSCujV0qwopTEEoS0AFJj8DGSVFmRQFyUJZdVJGlQ5B8MQECIHWglolFh+RETQ1lerIIZODx7uIdxltJPWqLdj3eSkC0fUWaUoH18QGP5eCUVy6tqtWopVCGVsyAUIQIpz3gWAg4phD4PX6lmgTbnKMpxEXIjfrDic8SiqMKp9NtSX4yLJAP4+4UB7gm2aNlpIQJ56fBozVdG1DXXWs2iuMiYXs5TxuCFx1LTJlrNZkpcgomqYIO12aOJ4lRliEGqjqiklMHB8mxtc9L25fsl0bGqVxB8n76R5lM123pqnWVBWMTiKtuqx1epRoyUKX1QFV1tPGcaRtGrpmhZY1Tw+PrDYNuirrNQgQKfHF3VuiT1hTckTrbYepBeOwMA4DVSUQoYZU1ge0rmnrLeN4oK4Fq3WDrgTP3x2oxEIlLUo0vP/xO0ybqSUk2TCFwDQ4VpVGq4zSgSUsnO7Bk7F2YBYnHh+e2a63zEvx62QFmeJHy6pkJabomcYyhYTin+vPZ6bTjJEVtmvRlSUlzfHxWPw+lyB+VhopLME53DSz7bYs3nMenlFCIoIgS8vsJpQo1/Bhv0C2pMtB0svMtlmVta+woMh0dVek2NOExHJ3+5L/4b/7H/m//uX/SVCZJARZgE+Rqm1Y4kIKiawkMSr6YWY4v0fkjKk003BifzgjcqatLOM8MoaFWme0lcxiYfKZ5XBP1+xo2w1vtiv688I8FPHoqm14ehjIqkyrnA9EOaHMFc+PA1rL4nwRgXmeuO7usJWlqg0pJrRWIDTLEjkcnzHtTPAQpaLORew7nCaepx9RstDIvvnFL2iuFbYqDiQ/wWazYbfdkNJF/5AjIja4ybJogciJpV/zV//1W3QV8XHmdKyodpqQZ+YpEKZc3IDXa6LPpOwQStJ2W6zOaBVQteXl7UvWV3fo6ooQYBxGFi2J0XFzvQUZGJdnzscDtd6w29yWNbmQMApevyoFmTKKroXRFxGnpBQOddOSGRinM25ZOJ2PVPqGjVblfUUmuEhdNz8XMsXNePFj5XCZVMk/5aouEyIjRYFY2OqCLy+ers+UPSElWUDb1mityoq4UFRdixCyIOcvK4XqstZT5MMl9/SPgRbl5+qishDiksnSPxd88SLB1Vr/7NX6PNXy3mOUwrtQ6JVSFoCH0pf/F3/Gr8dwoSPmiPcjwzjQplWhUnqHmzLR25IrNhexu4hIlVi1NVJUnI4nxMVlVciFxRPpXSHFJRdQX2qkyVxddficGYNDSEV3VTEtZT2y7uoCMFgW0AKrFVoZen9kWjwSQaUNAocWBlUpks5lPXsaL1j8i3A2Bpq6JkeBDwvHw0farsG5NbW9Kvk0lcnJsVt3eCrGMPF8zui6YhMU0SXCBSCRUnHLzc7TNMVv5GJPTJ4sJD5paqExVIQ5sn/a8/btF8xhQBmBkJo6NiyLL5sqSSFEAxJShL4fUUqz7joa2+KniXVnqeqaVq8YpieigilNhCVQScVue0WmvDaBkaeHA22jUZUDORPEMx8+bLi+3qGrorV59wfP8biw22qe9k98+njP8Zv/wA8/HNgfJgbnSJXj6VB+jrEtdbMmhArTKL5//0eeDhUvby3Op6J0iJkYHc4lXlxtOByOuKXAw6RacX23JuWRx8cRNxtub28Yh0i3WrGzliwiPgWeH54Zx0xVNUzzntP5gL4QNn2YESKxf95TVVvqek0IAWUMz88j+70j+TObq4Z+PLP0PVIJdpsKzcjf/O1/LOtosdCVtbIoY6lNTd00fPuLX/Dddz+UCa8uTdGn/YHarhifzsQAX339BcN4RAiJrVpIgnfv7pnmzN3NGqU1+0MPf/yR9cqxvQaRypbMw6eOF3dvWa8m7j8eeH585u1X1wi5QRsQKrKLmazchbYKJEVb1WUtUFWYXHMIA8dTAfqkXJ7TbunRJqJtxbpq+eLNW95990devdJsd4r1zhS1xrZMv652G77+8mtg4vB8YJgiwStevXmNraHvIzkqqrZhOM2c85F3v3/gvNmwqm9482bDixeC/X7mw0dHdJFl0QzLjI+ejGfoe/7D333P3d2OpjH084l//7uP1NUGHXoeT55XL25pdxknzszLmfk88Or1Cz7cf4fzmfVqhV03/2Rd88+iuIoxMUwLbgnlsKyuWGbBMp9Y5oRRDdZmhvNC9BmRFfMQmOqeiAMRyRmULl02rSUJiQ8JiWN3vS5dv+jZXsE3X70g+apY1Sv44osN/SnTnx0pJmy1Zn/0PB8EJENVG8gOETUp+EJTygk/QWVKZzGmRE4Bqcs6DlIjkobsLq6SsmWrlcIYgZRltzpngbWGkEvg2dY1SuniK8gaiUSIXGAbSiDQiKzgEooWokjcoJD/8gW/G2NZm3NL+Dn7FGMo8l0Sh/3MHEayyChVckNVFenasss/9GXyZ4xBCV32/Qks81zWTXRCaknb1khhL9yNYubmghFOIpJkYImJupYQPULIgrfXCmQqokuRiBlAoVRVwsxZki/URytrEOWhpKUkulwOoz6UN3lb43NkiR6fIrKy2KbC1BXaZ2SEMPaIJIrYU2m0rREyYWpbwtQpI4RhtyqdrkAipozzZf/fCkPOkXmeqW9qjG7KHnsqct+b2xcF19xPxDwXeV9JOZSCWSTmKV1kxRaEICfHvCykUMR3StWF+KUkCE1KkLNivdHIGXwcmacFd1n/EUmSKTj9ow3gz7hZQSouF+8jzXpVMlK5mOa7dcvkHIki3cRDTBmjbfGdZMU8LVTWlg50TOy2a4QqMIycIykECIl4ed8llfDeseoa0JF5CWVEqjzWSJZUBMm1bUpHV3iMLZkDYxPSl2vZ6EI1m5cJbS0aTaUsVB1ZTDS1xVgBIjAvMDtFt/IoseCXQIwVOQeMkkC5Zpc50NkWpGSOkXkeic5R21K4eh85HUc0ivVqU6YAwTMNAzmV3z/JgNQS264JIZXfP2bapiPEkXlc0EIgsyaOieAL8TEJwdgnag3WinI9uUyrNMNwwuqMMgpVVdSmwVehwHkQjPMARoEu73FSYlocu/WGeepJKZcptqzKBGJxCBLbpi3XDQkBpJzox5ElzYgOQDD7wJI1KixYDzEoNIl5HNmtb7na7thetdzfnznPM0oVyqiPmf1pZFwkOkmECizOo3QDaLjAQxQ1Qpa8UoiJeXGMzFjdsoRETBPjMPL4UF0Kh0Bl4Op6i60urqYsMcawXhtWq4bgM8vsCGHC6JZ5ciB6cq5QwrLZtlSNwPsagsXWksN5IAePSAEhJM4X7HmlbTnAp0CMCyGe0aZle7Plyy92HIbI+dyjVOLu7oYsNUJGnB85nQ84t9BW5d6cYioZzSxKPlMHsigkv8HHS5EkiCHTNbZkY7In5+XyN5pJaSIESKkUHTHNWFPIgt55Kltdvk9pvilZ6G4uBlQujRIpyopfysDFReV9+LnAKbJehZLFy5VSeX1UvDR3YsGbfxYCC1EOoJ+zUv+YFPi5sPuMlRcKlLis+P38r0R5rojLiIo/Ydo/i9ZzKvfH8q1TWZOLAa0UwfuyMp5mQlp+BoK4JSJlIsSSlw0iELVBS3NB1ldoDVqVnz/0Hq3zZUXu8lxSkRALjAkyPnqOfc/Lqw4rJXMOTGOgWXe4i2NrmF159uKKcF0KYnYEEalri5YaowUZR07659VP78IFu51QUqGVRqgiPg0pEWNZu6/MimmMVJXBaI2QgXmeabW6UBY1Vduxvl7h+pEweiIJhybEUoymDFlksoggJNpWBcLhIi5CW5cJZd+feP3FK+bDUP6uQEyJqqkIU0KUqG1pomWJVgat1AVUpcnaYTRYralszRIUKY8lKxokVtmSJ8uQiTjnESozO4cmkCVoU9NPMzuRWcLI88lBVkxhJO1ncpK4OTMcP7A/ZmLSCKXQIhITLP7M7DzTLOiaDh8d4zjjvMLomnmJnIeJaZkLYTMZYMO5P+ImgZSWZrsiJzidzszjQoormtYTvEXZhDKC46mnTYb9YcE5gVYCl0aGfqYyHUJIfFiwFYwukHZzo9MAACAASURBVFQgec/pcCbreMlqemReyrZOcPgk0SjmOaHVwvF0Ksh4kXFLZL1uEUoQkmecU8Gti8trnAo9GgLOjzgfSVHig2OeJ3ws5xilDHEaIZezZ0wJHyL70xPKCKTUqJyYJk3bbpBCYozEWsE0DuS0wRhQWjIvjru7K87nU3HvxSIfllKUZyGJGDNZQEiZlPOFcSDAQNVoVCxnupLZKr6vAl0pMLO6astWVd1wd/OC9z99R7kE0+UeFBBZFnCOT2RtUTLi3MLDwzPDYWa3knQrw2oLxzNM84I0EhUcIWUykpxDIQseTxirCcEyLQMpzJi6JUtAlMardx4qW3KxDkKQ+DyTdWlqiRz/ybrmn0VxlXLm+XlCSUFT1bR2y+PjE947RNb427L/6maPFAJrLKfzUOSdYUJIhaCmbiQPDwcq2RGD4tzPrNaGb355R1gC8zSijeTVm1s+/njgeNyTzhPXV5auu2UcJlIK2LohHjyb6y1uKejvZRhLFz5MGJmwWuFGT9PW2FrgQ+I8FhN52zWIbBBZEaJkmhbcEtHKFnmkCKQMbvFlQmNLByAERVO1RYK2aGIMGCHLasiF3pZjIoVCIzRKM409cShFi9JlR3i12nE89pdOVKCp1uhKg4hEmZhi5Pns0HWibSuaukOLhrqe0Sbj5gTJcHNzi1aCaQiXg37JkW13DZnS8VpvV7jZo5Kk6EcyzveMc9lLd9GTcqSx1+iUUFJfwti5ZKBsTUqReV4KDEEpUBD8BbNuIm3dobJn8D0iz0RvWaZATom6sqxWDT893uNSRihDt91iqlIsIEC5zJiLNFapYm2vrEGZjKkt87wQY6C2NTevFKObmYJiSYHjeYBFsmk7lGxKcFY6KnXFHByLX4hKcfPyJewfiSoibaA2NXPvi2RTgda53OBzyVMIGXl+fmD2iUpvkaaiWVuWsFDXhuRleVhmQbeq6VaafpAMQ4/PCa1qQjAFxSwN0wTP+wc2zxVKwPlQPGzr9TXLsnDuT8DC6zdvOY2fyCJQVQ3JZULKxflgK6TQTOOAsTXhgob/6qs3fPj4qRAwY8AvCyYI9sdnqpUli4a+P9FYS5ALVV0TkkSIZ5pWEWO5mVtreXras70uHdEQwNaa9aZhPJZiwJoiDk/OkpNAmXJP0CKz6SoMnugD3gl0Lek2C352zEPxnnh/QJsMSjHHzDQsXK9v8bFMdPtzz1oZZHUhooXA8/ORL168ZtWtyTkzjRPD6cDVdlemjsxk5alqzeQmRCiuNCNrlBhLB51ErROhLyH+XENSmcUJbMzYRuISzB7O58TDh2eubyraVZksrK5uCApSEPjQ8//9239TkPxalurcwzRP3N1a5iARMWGswcqOFBxSCxCCklu3bDY1IkdyTPTTQlZLAWQgiUmhqNAyE4JhGDy9PzCNA3/x57/izas31E2DW75jjgplNNmDj56nT0+0zRuchzg7go9cb29LsyNOpfhRFVZDSIGQMgiNCx2VNcRUAvWn00DwFettg5KJeRkIWTDOCWOKm2rdtXSNutxXFFJVuIvz6Hg64nwPrGi7hnE6UNU76qphtUrlMBITUhSJqPczh9PI3d0L6qZDS8H9p8cSHvcRazObqxs2uzvuDz8yLT2JhTdfvOb5tDD2J/aHBx73n7BNi61qxn5mmRxKaGKIODWULQMJWt6hhEapAocoExhdXislMFbTtjU5BTIjPrhCNvSROCZW3QqJxLlAXW9ZFl/uZVKhpCDEwLLMCC8JwRTX1xLJiEvwPVyyUxRwS85YaxBk5nFECNBaEWdPSglrL420yxqglJ8LqjJ1CiFcJlp/+vgTlbBk2VKKJWt0kRZDgW/EC5o9588Oq9LkSjmTYwFUfC4AU4zlILgszMtISBMu9njnsbYmRkHwiZwlpPK9cpREV+ANm+0K7xd8DAXGIGu0Sj9P/aRURFForLa26MbgoufT4zOb7pqqrsnA/jDy9uYWJQLLvDCNZ27W15iyu0hOiWEZkJVhu23RQhHiwuIoTbUsyJeisalrci5nGSkMSmVSnC+yaYHWCu8zp9NA0wSaaJEqMvqZJMvUOpJpq471rmW/jChdyG9W2zJt8hIhdIlW1AoZO2xVY2zFcT/ixpnrq6vy+k/Tz0qYZSp/q5Q9m01HICK8I4ZMjhKRJet2XRp+WSAFJS+dIn5ZSKbkr+MykrOBtCaLlqhOxETZGgiw3q44Po/42SOVpqpeMoSIj4njcMa7mabdIfTA8WFBpCJ+/7v/+JHN9QvatsJaSxaJ2xeWjx9O9OeZaepp65ZxmklZ4YPj8alnGBYen54uDVDJZrViHALDcMQtAqvXtNQcjk88PR0JIdJ1LefzSM41OS6EcWIYetbLmnNfKItCOYT09NOJDo3AMk2OThlk1YAxTMHz/uGB7oUlpREtwCp4PB6JQmHtCikMy5KomohtyiTW+0BIFm0rpHIsbmI6LQWUYypilGQKnKRpLX0/oYyhbmuycMRcNqOUzJiqZrWyLEuCLAgetNYscWKYDNFlVBI472haefEGjmWCFuBwOBZfq594fjzz9ouXTBfqbkqZ9XqD8wNjKNL0EAVCS6QJ5JjJMZOSoFlbutrggmOcPU/7A9c3NxgryDmSUyEt1/YKRJFm56RwYWK12VLXmXnxjOMeJa9JfsYvkTnDatMxjD3n8cDR7fn4IXD7Z6J4rMbIOEdgoTEepSxKSKROJAmYwLQMpbkWFppNQyKjlaG2O9zieX5YaDeKlCviUvP46Ll90xKywy0z7rT8k3XNPwugxf/8v/xPv7n7YsN2fcfrF1/RNBWHwxnvSqfJhx43CY6nPZCpKkndZrrNxLQMkDVaNQxDjxQV7aqm7SzrdcOf/fLP8a6EMte7juP5yGlf4aNkngX7Z8c//Pa3HE9njsOJU3/m/nGPT4qrmw3dpqJuNSo33L28YjwXxPZ2K/jLv/wGmV5wOvUM4xlUZjhrurUm50CIgbou9Kjg/4TWPR4OaGOZ5pF+ONL3B7zztKs1OUB/GDnuT3StRktBXTV03TVCalIuayLTMl8Ido5tW9DzPpQ93cXNRSIcIyE5EAYXe3SV0E0m1RMeiTCQL2HftrnieHwihlTyYF0DYYWxgm6lWG0qrOlomxuSGNBWYm1NcJLz+QmJRWKQSK7vWoTYIrREVxLTVmRhMFoUP1eA4BeGfkCgClY+FhcEWZKTIwZHWBSrTcNuu4Msi1Q176nqinnMxFCCskJAlhJbt9iqQWUwSvLxx5+Y+4FGW25vXyBzkWYubmKYel6+eUmICecdiMx2uyXlHq8UcwhMw0A6H1jZilnMnOPEkAPzEvFBEKLHWMXL13dobfjDd3/gcHgmk2jaDVqvORwd4zRjrCfl5UKmKySyx/0BoQ1Vs0FpSxZlBK+1Yna+TJRqw7F/IPgZiSidaFnWPZrOIpTGeRjzJ3S1ImRwKYMuk5hmZREqIjXUbcMcFrKKKCNBZKJ36MoQ4+UQMTie98/c3b5F6Y4kNFLDd9/9hKoVm82WV1evWesWbTWbuw7bGs6HkRwiUTt8BBcy03zEGkXbNkglmJaJq+uSwRoHT06Gm5u36HXg/ceerAxVV7MMHjdp5hQI2kPrqVeCTKSfepYQWTXXvNxpTvczD+8cp0+eV7vEdveCw/nM8/nAaTxRN5ZhOOGjR0vFul6hgsQ7V7r3OTKeF+TFK5RzRChom4aweESWCCGZ00wS4MYFPyeS02jdcX//E3GWpFkTRhChoR8GbJOpaoH0gjcvtmibyyQsCaLsudlqbu+uqOq2uO1kZPJ7qlqgteRxfybbQmTKqbxvjITNruU8nDn3A+Pg6fQWoyI+OkIOmNriZ+iaFqU1WQrqbk3MM4sTnM6Z/d6zamvi5BA5E9PMvr/n269/TWtb7h8e+Iff/xalG+pmSz84piVQNQ1CBaxtOQ8nZjfxzddfc9pHfvrwjmE8YVQp9rXWDMPIMIxA4uWbL5nHnrbesm5fsDjJaezZXK/IAs79gFQWP3lUljRVze3uiv3hmX5YGCbHOE/08xNJHnh8/ollGdG65vr6lj/8/jvef/g9x+MTTX2LrYrmQav6ct9MdHVHjr4oGkKEpLFVy6q9wtoV3v3/zL3JrmVJml63rN/NaW7jHu4RkZmVlVlVpFAFCASkmfhuBAQIAp9HI8040EQaCQQlkVWZlRWZEd7c7jS7s14DO1GiAFHjjLGHH8e9+9g2s//71oo8PX9lHC3g2fyVeUlsS+EyvTJNr8zrlZAHvvvlB56+nnj6ekUxQg1IuYM6kILjfPJtMmvAWU3fjZAt3q9s29KgEFWQS6CQySWQi2fdLnQWlC7UGqjVczzuuU6nm/6hbQqUUjirEbKQsmddp38mBiLaREhpRYgBqRTWOpzrKKXiOofRFkGLCw7D8M+wiZ9dVFLKpmi4xdh+jib+/Oe2bQNap7fWilaaWtuBH9EOWrnUG3ZdtGla47v/ZwTCdkDTUrCtM6LmpjHBk/NCZeNyeeHzpz+y+gWjFbtdj7GKdQ109sh+2KFkO9ivS2V37LhMM9N1Y14CNQmkTaQUoUqMGZFmTwie6CPbFrlcJ7reUGplWpp78Lx85fH4AVkENWTCvLHvB6Ro76pUIs7pRveLmeBXfFiwtufhwwPr2qLJd3c7lmnBGE3nOoy2+HVGG4EzhqHr2+8lK/b79r41VvCw34OBy+xb5y1HdFXMbyfCHLDKcTgeqaXdvEslkEo0EECCrrcoaShRU+LA8VhvcVIBwuC3SD92pNKencv1ihCmubEQ7V2ZGxK+sxarLdSK3+Yb5r7cJqUeaQzTFElBQdVYUxuO/IYiD35iN+7I1bNunmWNCF04TzMFBaJDyZHT0xPa7tjt95jesqVAp3Zclg0hFClJfvg0s64FI4ampZEZ5BmtB7ZVUqti7A1fXz9hjaMUhcDQ9XvC5nl9+9IO7XnjMr2xLh5uInDXaX7/j39A20OLyAPeC/74pyeQAuVA2MTT80ZF4X0mhAxCcV2e6YcD/c159fn5Tyh1h6iSuK7MlzPGHLnEE4WIpGBRuBH6/o6KJJfm8Xx5PhPjhtLtuxlCZfMTxvRYMyBuXsBti2jTMQwjw2DoB8HX51fWdWvPJQGpJTm1aevx/pHrkrl/fEDLkZobmGpaL5xPM8FnrFGksvCnT1+4rCdy3RispcSE0YrrdeJ0PrHbd7y9eaYl4EMm1+aLXdYWR00pMC8r/cFSQ3vm3Ljjm8dHBjfcqKg998d3KFV5fHckxsTl7Hl59nSDpcaBzjlcX/ndP/yOx3cD23q9rZ8r9w8fCDkwxxNJLOzvHvny5cz/+r/9ntN1Y3fsefri+fDxHUY3euC6Sr7/zZGcM/vhwK7bkWLk6emVoduRfCWHyn/zr/81v/kXj7x/f+Cwu2Po3xHK0vQpW0IWOB40/+5//t1/EWjx53G4+h//+39z976jJkFJheAvVOnRJtD1irHfsz+0eFopG6nMOAdvp4lSLNsKp7cVv1b2hx3H3T0SzevbC3/69HueX7/wev7M6fJECAIfCssWCbmCMZihx6dCEW0zezjeMQ4HDsee3cFxPO54ePwVD48PjPs28dhupLGvTy88vVyZlgRS4zpHiCta97juQIqBy3llmVZEVYzjyMdv3/H26tm2QkqKFBTDcI/VhpJbN2s6Lzjb09vxRu56w3SGlAS7Ycd+v8f1HU8vrzglyQiSkCALW5wpYsV0mXFnGLqO94/vcFoTlsjLp4Xj3nJ5XUlboqZCXDe891jXobSlCDi/PbHbjTg3omRDQbsu8/o2UwFtBCHO+NTkwT54Sq6kshGDpSKbdJGOeTqRkkdUhUKzrFc65zgemqguxhVrWz/GmOYsqyKz39+zbGdCWpuviMowHG7oUgVKMQ59O6DFSt4ShMzYWVKKDF3H2A/44Bn2ht3eNteDrihjuaxXKu0G1vuZz6fPXJaVNXhKrWjXU8QDa4jE6FGysrMjxnRNdkIjG/7w44/40nLWUmmm6ysiT6S6gQ5IFfj+4y/R4pEQDWsA3SvG/YhPEe/XJjxNhekyNUeSEUQ81+2K1JJSCyFEpvPMu7v3GC0JfmLZvnB8p9qGR8wgNwSCTt+Ra4sLrFugZI3UEh8jMZaGwleFzbfYVM3tVrvrHVVqtuQJeSHWVxCROXmsG3m8+5bXr8+YnWAqZ9bq2e3eI5F8Of2E7gzHhztSzFzf5iZVlRrnLLlG9vs9/WBAFN5er/zFX36Dk2cOo6azluA9VSS6vaBzElMFXR74619/x7fvf8HD4T0Sz333aw7Hd9x9c+T+mz2Gd/Q7S6I5w0LIfPj2F+QK+25gbx0qRaqqzNNCLs330nX3xFDpR43pFEJLcor85a9/DUWybhvpFmUMl0hNLf6Wi0EKQacF94c9Hz/8khgKpRgOw46d6RFbaZ0p5aAaFBpcxRbN4fARKXsul4nX8xntjojaSHhv84J0HiksEo0sBiMVi1/QrsfaAQoc+54YryAVaMO2rRx3R3KobD7gU6DfD6QYuZwuzFNzZVksvdbkFNnCRlIOIR3UjpArawl8ev5CqFeuy4yPCSlatKjkyNhZ7vZtbcolUOSFrlMMw0BKiYe779m2Si2wvxuQWbHrjuQUmbcL1jk+/GqH0pqcNKIM7HqDU/D+/j1jv+N8OvPHP/0eoTMhrU0mXiq7wz0ljGg5YKxBiwecUZRSSRGSF5wuX9HaYJ2mGyx953j38IsW5+w0Hz4c+Ff/6u9Y/UTIiZwyg5LENPPljz9xejuxLBufv36mM44pzJiu4+PH7xFG8fr6mYpnvzccj4qQZu7vvqVSiGXhm2+PKGmIacaHQApg9Ih2AaUz1irG4UDfHdi20Jw2PhK2jZILMWRSzDQaucBvkXHXU2vi//o//w/6YURrwbJeeTs9s65XttBEw+lGtHOuFa5TivgQ2DbP5iMpFkptRD9uax/8vyOAP3eltNY45/6ZZPifU/6ste3wVGuLvOnWMY4pEUJimmeUav8PN2+VFO0zYww3MEvhcnllCzNSZZCB0+kTb+efyGWl1FaML6lQqiRFgaTj/u49h11P8AthS5A1dw/3dINDm7Y+S9k2rF0vUNIgUASfmWPh4e4eyGx+wqcNlMWvrXNURaYoKEullnxzXBWWuFA7gRk7ut2AFAojNCWBcz339+9uF1XtANYojh3zvPD9d7+m7w3UjRQVnd43v2NNhDQhRcWHjZTalFHoipGJGhTOQtcBFQZzTwmQoyBlzftvDiyTQskRsGxb4PHdO7SySNGiq67LzLNHa3ODnWRiWnk5vZBypu9G3j9+ZJk9SlpSkERf6Vyl6zo6Zyl1Y5pfUGrg/mGPNjfCZdaMw4HoKwrXCL1dpNSufa9yRhWFVhIfLviwsgXPtG4gSvOAlkJIM1pu+GljPr2wTjMptwrBEitD77Cmsk5PzH5mMD1aOrQa+PDwC06X021SIbGmwwcBWXI4tn+vxPH69oSSTXTt44Vcz1wuMzEWjHYcj3f89PUncvEI1Z6dn778xBqvzPPG2/nE8+sL57f1li6pSNG6aFSBSAO1RHJaKLldRGyXTF4FqtzgB9pRfY/D8s294PUy4bdC8IWcJKVIur6lKrQeWpc7L5zeLixzIoSItK2jOI4HxuGAFJYvnzeG3qH1nnE4shsGurEyTxOdHbG2Y00T1/WMlLbFj3M7YHedYZpPLMuFbfMo2VFkahN+MxBSRGiNdaZFvJeNza+si0crgQRSSNS4Y16XVq3RPd88/AWxJPy1XZzP20LZPKVEYhDkLAjeM89nvnz9Qgjr7bAckbpwnSbWbWbZZq7XC8u1EGaJkiPDsGOeMp++fOJ8XViDQMiRv/qbv+affngi10w3SIwR1GTJwZNjhFTpux0//P4FSiGFwD/94yv3H47E2iApSkvenj/z8tPMP/zHHzm9LezvdkzThctppiKxw4i7f8e/+5/+zGmBtVRKLCSTmMPCeplwncZYSRWC1Sd2e0NmI8uWH51fJnItKN16WiFUjncHai2NypMThUoVpZHenGvTgSx5+jJxd7fjcK/pO8Hpi2RdVZNoApA4HhypXrleMzEC6hPLMqOlQwiJMY55bi6sw11HFxzaCNZzYImSsASkLGQ8iTZytkpTs6diKKmiECirUaNF6g1jDDGAKJLD7sButOhOI4rCBeiFIZiGrS0V4uKxUiGNIklFypVQPLAgRftyumGHSkcQTYBXq2S3GxFYrCwIXbCmlXGLaOXPmNrBo99b1hRYp/bzXLYJLolcFTHA4gMptSy8Fra9rGWhcwPXi4csbl2NRjOKaWW+rKS4YvvMNAWs9c3LVXVzRmkoRqGlZK8N2xQItb3gOiNRsielhLO23dwKic8bEY81lsE4DC3S4IzFGotxGusgl0wVEqk1jh4fMz41GqGolbJVKgatKkaCrJJMxxabmM5IAwoiCV1CE4Xq9rucrxf2d/dNjhw8MQqE0QiRb2QtjVGSt3omJSAUcpHgLHE7k2JEINCmTQOxmZgBL3GiQ9YKNVIJCGC6XBgOA9p27M0DDslSzlA1FA1F0R96pm0GEloUtmUjyESIDbntjKbrOzCVtEWEgN3jQ5MLq9bbySlxmRdKzsjsuL5d+N3yH5vrZtoQtsUL0YGn12eUdO2wvi0M/R0qVA67HiErW1qad0NJlDXYTqN14fXTM1ruEdqgjOXjLxyd7QjR3zoqldOzJPmO15cTL09n1jnwi//ub3h6PXM+TfjV824YEHKjMqHURjdUwuqpPiOEax0ov5EECK0wZsDanuAL0mbQkkwiJE9RgigKoleYZFFRYToLQyBtzUl2d9wxbwFCpIjCkifskPhm19OP7fA4iea1E7LQjzvcYWQOZ6rNCKWIW2BZV0rxOCnpTEcWGSsXDruB5a11PiqC453h8f6e02VhTbVNRweDywdCyYTc+hK73rGpheXsWeaFX+iBaZsw0iEsVJGRRbTIbo7NibcJ3sQLunYoLQjJE8rM67lSkqQJeQvBw7Zd6boebSzX1zd2xx5U8+spqal9+11nJchasYUNoTIH+55aIGZPrBt79Q0hekJKhCrpzI7r4ll9IiFYciHajsuy0RmL0Y4NSVgMUhakSCgKViVCzXSuQwLOBKpwrbND8+Es00Znr9jRUTK8XRfs1ydyEmSfiDkQ54XpcmG7FISWVFuZY2ELK9O64vqRvdhzfj1hlECZSpCR52nB6Q7URhYbIXlOryeCvyBVRUgDNHl5qeLmP2wb6MvpijVtOuxji+HGlKk5UW9gg6+fPzGOY9tc+ICTlrhu/NPXJ6QWdL2l6xxSG3JYSWGlikoOG123Q4hKqZlUEkpbYrXk2Lo6zhZCaOuYEA2wsPkVI/ubFzJDDdTaukncpk7WuBYdK/LWMRYsfkJL1SbsStF15tbdaoCLXApVKELcoCTkLT7o05VUZ3IU1ADTujIvAUkmJchZ8v6bb3m9PCE1WCcYRoWWIOUBfadRUrH5zOvrhV/96jes68ZPP/4IIrPFwrZ5go/tAu42Vc0xkKugG4YWhU+gtcQohc0j67rgiwYJgUK1giJy69BlOG8XlNB0xmCMRIrCuoZ28agE2miUyAyug1snBTSdgxQXimgHWFEaQdRpRymVGCTrRXD/jSVljzUO6yxPXy9U22BQyEKl0QWPd49sfmMLS0vZmNbDsU6htcBvmWva2I8DSkqu1w0hVqpcqVUhhGa375guN6G3iggVCSm1vnXyIDK2M4iyUoJpsT3jGHaNchziSqrNWzgcd7w+T+TQYqVZVZboERhC2th8bDRLq4jbRJQN6GJN28DHCKkUamx/L1Ww+bVdCAL7fcflekFJS9f3vMxPXJYZWV3bD4mA9wv94Z6cBcviSX5CSYWQihhhWjLGCtapTS/X6dZ1lonT/EoSlb4f6UZNPCXCVimigBZoV1oqoBSokVw9vkJVgZoqKbSOnlEJISuqB60FW8qkCEoUsipMWbLM4FzTDkhVCXFBW8d89XhfKbmRP/vhgek6E1Oiy5qc4bpM9EPGdRbkzLoOkHVbinPFDg90TtCZPUpYQih0dkfcNpAapR01NSAHN/ddzp7dcOSueyDVgF8WlmmGJEmhES9tZ/De0/U9JTe/aAwFLTd62+Hj0uimamvwDmFuHdfK+bIiHzQhrW1vLiz9sGd5WwlruDEJHNcvb+QoOBwGxq5jPxxIVbNljyyA6qisxJRZQiQRyfUF9U+ZWJo4fF5vtYxgUCoCrZqxXj37wVFzZlk2tBbsuoEtXDi9nTi/XPj04w8Y0xFypB8dvr7h5yvBbzh7QBTB9fn6/3uu+bM4XFErorQirY+Bl9PC3d3I3WEENNfrhjZnQpjRppX9txCRshVmpRT0vcA5QwqJmFtZV1sDamAYLZKOFBXbtiBUYNxr+gEECyIXZNZo0WTCtUaQibjNLEvBe4HQiZ9+/MR+d4/WCiEq6zYj1Y79viMliDFw8YV1yghVMa4gHUhVMMY2IWD1TNOED7Fl743BDZotzIBAiFYitNbieolwIIvEKIesgkImyUjKBR9XOmcwo21DFB8otAcLoRHComUPQpKip9xwueN+ZFlhGARVRqQpWKvYSivAx5jIVaCNZF6XZiXPkZAWYowMw5GcIYTYivO5YJVDq5u8UGqMSqSS/58yteyowrCElWkKHIxuC9/qb24VBUhU16ZBolR01sQtEUVp5UojoUq8XzGyif+EbIjckDacFGht6axj2/JNZllJJbVNxBYRWSKURBnF5AOpZCoZSiFHENJQiWgp0EIRQkWqhJGqRROQ+NRuscWtV5BCQQmJEQZRWxFVCgeY2wKskEVzva5M64xJFl01pQikMFBoBMQqENaAyKTaoBEyajplUCTyLVojtcIHj6091jqs7qkpoIpBVYOsGoVEm0K4ekqO1Frwm0e7CkXfuhKCIhRSFaQUaKu4kLSKQgAAIABJREFU2x24XucmEc6lPW+xSbOtUOS48erf6HcH4rbSozGisNUry3Zlt9shMiSf2O13dGxYbYi5PbO5VDa/4pRqNExTWK4bw3CgZEERheO95X4/cnoppCRR0tA5yem08uXzC89fztTYIbvMdXthnmcIGnOn8OVMzgvg0QritlBioFhLURKfE1vIOGPpuhFre2LYMF07THofCcmjVc/5eiWmRJUNWkNpQteaErUKutEQsyGmQsiZGCZ2rm0QXN9u+xEVbRrFzFjJ4DoKnqXOFJHIIlJk6w9qVTFWo41m6A2d7vF1QZYCStIPlvd3x9blywFtDTF5dsMO4kZaF2StGCkRncPMmpwKKTRAgbUGlSspr0Ah5tRctVKSt8g0XXBmxDlLJbZJaqnUrBE0tDq5UInk2iEyxNgOJqVqKrYh2qVn8WeKdG1KuM1gFWN9aBc4tCmo31rnI8RIQlFS25ya3hNE4rxMVGMJm8dQ0LKVotcpNfCLbIeUkje01Eirb++QDXAo0TbwmcLpNHF/7+isJqbKPHukON+K7Ll1CVMkzQlR+zYlF4mIYA0bsWRErqy+skwr+91A4iZHLxJr+yZxTxFRBCLXFseRFiXMjVYXG+KmCCiVHALrNHH/7gEhKyE1aqpzHSJLFKp5x0SLim7ryrZ6hqHHaMWyzLe4mUMKiTOqeQVTaPCduJLHhDYKoTIhe0SyiGpa90dqQJJyoOtHoMWPhBDkWigxIskoWalKkH+eqtxIhjmnn1/gxNymY8J0SClQSjQI0I3WmUoDZxQyKQfkjXERQiSViVguxJuE2odILqKJjqtCK8tuv2ONJ6RUWKdwnaHmjNIVrdvtuSywTk0gHENmXjzKVXIqzTkWIyK3iVuOgVQSSIFWjlA9MQWEMAhh0IhGGL11g1NpnbOSC1W0iKSSqk34lEMgKDlBEUghoCQKBa2a17EW37pGZsTIyDldKDWjpEHrjnhzQUrVhM+5tAtOZRtMB9HcgaVCP3YgEusaiUkyjgMxe4pPSC2YlitWHzBOorQk5Yg1js65BhCpASk1Qtebj0wiVIMVCdl+nlkq/KJuE5yAFPl2mC3/DMnS1mKVxeeZXAIxC2SWxNy1g3QV1FIJNaAEGKmhSoRQdLbJ5qNfqVK3npFT5FLJQKqFkgIpJ5xzrGvr5iNbRHGKF4qsGCe4rp5lW9lZhZKakislB5CKXGDdNqbLC7vhEdf/DFlph7hSGpEyhMR1ut5UMZHCRK6Vvm/VjCIrqbbLEala504pAaKwblcyjiIzMWTC1oThTgukpsGnjCD4DWT7+FgESzSE0GANIFDmZyF4+5yYSgNoOXVLQrWosJIa7wPLtNze2wJtKtu6Qe1RUoACq3dY7dsaUiWdHvG+UIKnqobbWqeNzolbl1ORUqCUhFMdaVtJWyBthaASqXiUkrjOsCzts0tuk+hGn8gorSG274WPV8LmcbatT5TCulWk1pSwEUtDtndiIBdBCAlqwlnBOq3NB1dkq5wIg5CaJDdKiuR5ba6429oScuaaLpS0tKmxgOAr8zJTjUXr3CiconV4bacIIRBjRrrmpMyxsi4Bv0WkiuhOgxSspaPI3JgBlDZhvlSW9ef17//7vz+Lw5VAYpRBVAhbYrkWOi2Q+z1UOL194eX1D7fR+yP73YHj3rL5eDM094xjx+l0JcSFvt9jrGM3Hth8jyAzXSvnt5m38yf+6//2jg/vB05vZ374wydsPCLzPUIVlK2gMqfTlSIuINXNHTIQvOctfsU6jXOtDCjIWCdBwnXybDExnReOjyPHhx5lNdt8JpSbyFBELj+euGyKfuiRwpKrYds2/O2myfVQ1ErWoJUHIwka6lQ51YUueZCV0Hs67ejudrBOxLTQmUJYe6zYY+tIjZocXokInFV0g6Pbj1wnz3gHPiVCTAjjCdcFpXekLNimBdvD5Rpo0klu5vdWnBeChhlWME1XSvHs9NDkwcvKfhhZgqdm6NyeNa30useaghSRbRLc3R9Z5gs5F7Tu8WvFybX1hkJlXgrjcEdOsdEKq2BeV1L1lOBb184Y9oeeMHliTSRbkMf3KCuoW1ucvI/YPmPRUNpBCGe4XlYoAn07OKHbwbjRazTCabZ54eMvWhE4eMG2wOpn3h1GQgisq6doyeP9O0KoxBTJGazZkdJGyaDQmNLzhz++tv7D4NCdRsqCHQ2d7yBCimBkT7ETqbSIgxSVLCRGOkqElGkHbiVBC5SRaKW5TCta7tC1uVukLOT8yrJupNLy8WuZeciPjWQl2g35dJ2pIdKrFh91zrIsM4jSOh6xyWxT9HSjaBSjKtEmkUNHTRpfCtv8hDUSazS1tk3hcVfJveHttdH9UCNVLaxxoXpzo5GtPB4fAMs8z2xxJhWNEYq3t4WSNOO445e/OvAf/tP/zjw3YWvX7dnSxHX5jLKOu/tH3PGO19c/NWR3qrAVhFopIlKUot5EiJe3C99/e6R3DuNEozdKx9evzyxze67MoPn6p0+gGupaVItPgU42H1ERlZVASpIUBLEmskzs+5FUoEM02J+MjT6kJLEEJj+TVOG8XJG9RvawezSc3hJRZDAZqyW7YvC+YjqF6dtGZr/f41zP2I3EDbZt5e30wuNv/7YBBdJGTFdSesDZe0YDVns+v3zhcFStw+qhJE0RK7XeYCFaUkVgXQPT+kJhh9aWeVkZdz2bT5SUqaZQq+f+YY81DiEMx8cHnk9PiNojVUf0iilMaLdihjuQlXmbGpgoBXJtvSHvI5fTQgxXisgo2/F2uXDZznTVUGPlpy9/ZP/uDqNNO5jPKx5NSgVpC1ULVLS8vU588+FbUm0Ova9PE9SFb79/oHcacmS6rPzi48jldGJdFmqp5E7x6csT/dBjjMF7eLh7z9AdCHXj4s/k5cq8bejeUiW8Xt5AJ5LILHOgUnm83+GXgsgRimRnDnz33R2fX/ZtCmQkw9hxvZzRSuKUogjJOq1IkVrENENBMAwDHz58JG2R6BsM4Je/+o7rsrCuT+0mdd9xfDeSeGTbYltzrg3TnHNo06sUCIsiLIFu6FBGErNvl5JCYY3FuY51NeRa2JU9uRQul4nf/uavWZdISitSSoZhj/eJHPNNJqzZ/EKluWZKKczTLVJWJEoWBJmcb+jx4ikpklJsVC6j20VV9MzrC4Ur0c8I6ahYcmqgIknrZTg3Uovn7rgn/xwbVwPrurAsz8zzTE2Sj4dfYpD8/nf/wLQsXOYrh3cHJBlhJVZoZKwchgNCV0IqhJTxa0abjstlIldBP1iEWng4fCRsnnmbiGWlriC0BmVwtuPh4RueXz9DtuTatAC74YBSiqenPxHCzOPdPcY4qtjo7JHOHNlyYo2OFE8YC+PYEYVjPm/setMcmqmSheHuccD7tgl8/91HYkjsjjuiX5muM0o7lKqksuLjQhXw9eUzH97fLjPXxPPLGx8fvqEg8SERcqHrLCV0aOtQVjFvnmnzFCeQxlCUQ3UGayXRe/IN1X+3f88SM52TlJJ4fbqwfxzQSuNvPep8EyUrXYkhslwmuqOiyIo1CqN37HYj6zLjtxUhJdpqtFN8/fKGUAXZbACU2i5w56WQoubh/h3TpUGTpMyUupJTE2YX60hFs24S02lirGRXQHhers/MM3z78Rs606EGxWV5Y9hJ7o4dUgliWYi+UpPFr5mSZ3KyKGXY7WENkWlNCJnZ5pnD/g7IvJ6eub//HlNFi2JvnsM4MA6KbamUKqixkOIZNyqyL/gs6ZWCWliWiZwdrjcoaQg+sxsPCCQU8CESk+Jw3LXqREqs80sbNFDJCbQaWNYTUlU6rdGuxXFrBp8metfz7v4Dz7//hDEGrCSRuV6e+dUvvsNa14hltXKdr+TaEi4pa5TqELIiVcZ0Co2ilMQ2R6wZ6a3CmQbOiDEjhQWRmaaFGAxGdVQaal7Ujv04ktJGCJnTdSGk5lr1a26eTgK7vcG6gVw150viMnncrk3bQvRM0zNbOKPdiAB0hZID6xwRubn4RBKEOTDceVKsIBXKaezY8/ZyYgpT4yLYgU+vrzilgA5jO3QXsIPDx0zMmvNFYZVgv9N8PZ2YppWQ5H/5UMOfSefq3/7b/+Hf/OrXdyzzxvltousE+/2BFDdCvNANC99++I6Hx/dsW2CarxzvBvqhY/MzMTdJ6bZ6zpdXYg6EEAk+83b+AWtGrFM8vnP87d99x/ntyuefnnh7ncnJkUJm81cOj5b7xx0Pjw88PH4ghMzd4Zc83H+P98+MwxGne2TVlCw43A384+++8uXrV+Z55bB/z6//7huqTXz7y/d8/4sPGFd5LifMY8YePOiN00ti2Du0bqf5mjw711ODooR2q+KGjtW3W4MaI7pujZbXGUJdWZcL63XG2iPrvBC2C9QNI3csFzjuDhgNKU58/xdHfvzhTEwQcuBt+cx1mTifVlYfiTnx9rrQj0f8thLDihSZqhrp7eesfUqFZfV0XU+tkGKbiLi+Y/OenCNSSaSAOV4pZEDgY2G327OmjRBmJBt3fQ81kgTIrsMdekr9yr3r6fUOqwaMUEid6A+OWhWXS+J68Rzvu3Y7VQRUTd9JlusESITWDYzQSTrX3+Kg7UVANvTDiDSKOcxM6wWKoLcd+2HP8fDAn/7wwt29ZXfYod0Ov06kdCXXVnBFJAZ3IMSJGFe0gN2447DvSOFnIpVg348g8j/LepVU/OLDR5zTjO92mIPm/PaVZT5zd3fHbrfDaMHpfGENp+bB6o+M3cjrPNHbtsEMIVM9HB9GclnbNDEHEIpuN1BVIpSZOb4Rt8jDwz3zunG6znzz7gO/+fhbzq8nltWTlGItZ2pMWKXRUpFjRkpNSAWpGrZ+uVyRFbQTpALbKrDWopXAaNViG1vm9JL4+PEd1mjIGUGiVkcRDtUpTF/wPnCeNiQ/l+/3WK3bDXZoZWy/FvxEm4JQWePE5fxCVZpdN/LtNx/5u7/9r/jp7T8xnSY+HL/jr379L/nuL7/j7//9D4jqKNWw+dI2LkvBGoNWkpgyh8c9ZE3YAssycb6+oJUkhYRWmr4f2LaFkgLH447dMLJeG4xEq46+37PfH8m68vWHT20z0LUDu5KV47HDdBnt4Hj3wLSuaDOipG0LngTvAykF1nUibDP3x/dsy5l1PeO3CasUr0+B7z4+0HWGdV15+vxCp5qbiyJJPlPyyrDfU2uipMiyZJzrkAXmdea6TZh9Yez3zZUWMs7ucL1hWcKNWKb55uMH5mltpXcqKVdKVnSuFaaVFFynU4sNoZr3zkpOfiaVCBkkGqUclMiSFjafiElg3Y51e6PrDjjXoaTm5fWN7799uE1NDVL0zfE2Sk7bG0/nJxa/UUTrmBplKUUwLWvz9fUOZSy5KEIuXN4uvL18JW6eh+M37Icj+77Db5nLNVAynN5OXC8n1m1iWWe+PL/Rjw+44Y6uH9kPjnG3Zzx0xBI4TVee3l7Z9Q+8vp3Z5oimYzCa17cJITTOWEQphK3yF7/8Nff373HO8To9kUTrDYQYEUJwujxTasDZkZIVP/34mf3+npfrF+b1Qq0VowdOzxf8GhsunMyXpy98+fqZ83Ili0w3drxdX0lkUon46IlxY52vlOzRStJ3I73reHg4IEUh+oVlvgKF3iiGTmMNvL5e2N9prtNXTqenJvrsdmzbRAwzKW3UmlnngqiN7EWtCClwnSblSCkJYzRSCoRsEy3vE2GtSAlagayVHCLb+soyfSakN9bthfPbM36BrrtHCkmtHik9UDje3aGNunWYm5trmyNxLZQET08/orXCWovtLFEHtjIhVMI4yW7Xsz/u+NOnP2KVZrQ9nXA4DIpM2iJhDW2iWDb2w8C7+3fc7+/46YfPDOMd+13H/XHP3fEdVrToavEKPxekD7w7vKPvHcMwsBsO7EbHcT+i0fR2x+Hw0CL5OZJLbLG4zTMMjlAnfPYsocV3c01UDVlBqJUY4PT2gg+FXByzj0TmmwAWdt2e588vQGgwEKkoydLpjrAVkq/UKBFFMgwjm48gNePdgefnP7FVD1KhtWPcv2c3joR1ZpnWprA4jAjhCX6mFFBmhy+K5/kVnzOpgjCCYW/JSSCxrVNaEnGeGEzP6Ab2455v7j4ynSecbT8rquT8PBG2Sk4ttYS60GrUjYJbaRqDz59fKIXbBETx5ekTnTmQc+a6vPH0+hUlDYfDgHWKVArGjKxbi15mAQWJMQOaTGctu2HHNF+aT8o5SilcpgmpeqowKG1uPWxPjAEfrgilORzf8e2HB84vG84pKonrdUaJzPU0E7yncx3/4rd/jbPw+nJhntaW8hGZ3nWEBYpvKHtjm9/JuQ5rHLXCNM236GklF1Da4XpHZzSitgNrCAHr2n7GugGEIpeAUT1GW5QUPH29MnR7doPEOUGOguP9SAoRdYvfWdNRUiNxlyyQoqNW8HFingPzEthi5K/+5W/puiPrXLicVw77AzEknO1uZFGP0qIJ0GMjUWpliF7TdzuULmib2B0cvX5ECItPlfN1Zr97QNQr27qSQmY3jtjOcn6e8XNoHecieDtfWretVqgJo0YKlbiF27QQpHJQmw6g0z27oSOVjWWtLFti3lau16l9X7gRE5fCdomNJG0qSmeu14wyglQjyzbz+vZKiJnT9cLpOiG04Td//Tf8+//lvwy0+LOYXJVSuF4mhEx8+NDz7v2v2JbKurWyXy2KX37fM22R3d2AVkeqcPz93/+ex3ePlFh4fbsAG7YzrN5zvgSih/3+AVEOiOrxfuaHH96Yr4nDccTYjWWd0Pae3fuOcdehtUOKnnlK1LTn+emE959JWyTXmRRU826ZgdPXFSseGY8ZN0jsEHj41ZHntxG/wNcvE1FdKbaQamgbEKEY33cc9CMID2JDq4KqENeKD+02Z8mBw8M9/djhTMHUlaVmgg9opxiHI9+Olug7ltPKcLxnvHckr6GcUC6wrZHzeUF2hv5uQBtJIbebnV6zzFdqjI1M5WnxkG4kx4IPgd4YVIg3L0qTQlJgvzu0PkNMWGeZl4lxb6Eqpq1wODiCnxBFoqRm5zTzy4XZr8QokWKH6CGlythZKoq0enQZKThy9NRS2I2aXG79uaIw2nF/GLEiowdYV8+0TOQk6LodWWaSiEzLCcORdfKUGlA2Mw4ObTr29/fkmpieV0RSLFugJk8Oll5qnMtAo09RCwbLem2vDTsoeitJacVvsRENa0VUqGVoC4pUt1ueiswNj6+1xjgDJVB05bptxCmT1tZ/e3o6oWXFGUE/Qp471ikSpitGaXaPj4TwRsktm12MwIuIcQolBBXJGjLhvGB1RQqDlQOr95hpxiTFWHcsrxNP/IHrdKYYhdOW9dmz64+kLIm54kzl7fWJKmWL9BTQ0rHNG9Y2TDE5onCscWUJV6SUjPs9x8PI4luGues142j59PUMRjAvkevZ88tf32P0HmLDme93D8RwxW+B63Vm3Tz73Y5vDkfmZWbxM6EuPOzu0MpSc2JZPU+vL0yb59h9ROWR188X5m3l7XTh8ZsHOqV4OV25Xj3WDmxz5HreGtXoaCjkJiINCz5uXOavjP0OhGServSDQ3WSZZvJ8xVlNHcf3hG3tiDLWKka7u56hEtUHSis1DxwegPbW4QUrJe5TaRkwDrFYDRzKKRAE+6mzHxJWLmgbi8FqwyyWN7tDCXUVrL3hYfDPSlXcoaYEsvW6GSShuYXfYthbXGic02s2aeOlCKn04m+6+idYNs8NfVI2TDbpUi2RaJtx+VyRghFP4wcdkdeXv6IFA6tLfvDHY8PD6RU0M61W3NR+Pz1Hzn2HbKvRKFZtg0G0+KqtYFWlNf8+PzMu/sdx97wzu0RjMQ8sa0r5IC9+8Db1584z6/EFDHOAQKfElJ3CKOQcmH2M72zGNuTneHl7Y17d0ACIcz89NPCX/z2N3x+e8Ln5tlb5jMp0vpNUpLJXJYV0UfWS/v+dUKilUfpzLTOXJaFfthj+gETrmzLxun0RK92SN2DiHi/sU6F777/jk9Pn1okU1au8dp+zmJqXbFo2SIgKte1IXy7o6DqiFWCdctsfuPurmeeLni/IkRBqorfVkznOO73SKUIOcH/Td2b89i2rWlaz+jmmN1qImLHbk5z82QqM0VTqSpVIX4AFggkzEJIWPwAJIyycTCwwEACF6wy+B1gcKsEQlRlZd5zT392FxGrnc3oMca6aUFSEhiwpa0dWwqFFIoVc43xfe/7PGim6QqlIEXtsQ2bfb3sIaEouq5DyEQ3SHTbEITki3ff4Bd3E9oq0lYxaIlsDDpD0ArigsgOLSs5MIcr4zAgRCLExLoGUgRxi/0VMjkvtF1bJ9rzSgyxxkvDFikrsdQYTwgXlJRoYRBK0LaG16++5OnzmcUFUokoq2rULc4Y3bLd7EkxkQpVoI6AFNluOkIQ2LbHGM3T8wveJR4e7tDaElIhlMzbd1/XHmyqOOooIiWDtB29bVnCxLoGbDeQiuQ6TQzDa4T2XNxCmir0aRz2nM8LTZEMbcN2t+FyeqbpGvreMvQ90UHxsB23MBYQisPhTDdofIQlRcZuYHIH1ujJGBq1YRxgcgdcDgQHViqm5cw0XdGmxVpZXULe8bAb6ZseDWz2sm6tvKMg2e7u+eXXJ/quIafIMl2RQvD+Q92gmqZFuxZUqSLoBItLNJOjlCtnd6xb8mFEydrtTS5RoiYXBaow2oFCIomAMAPf/v4HpMhVPSAMJdXo+jIf8E5iGsvBH5knz0b3N+2Mrz3tpiK8leWG7K8d1BhvGgChsWYLJRNjQGm43+9vdERLr/Ycjkf2r3pi8JxcJEZBzAeaVoFqobS49YIdIrN3iFmBbHn95kt++PV71mmlJAhBsnsYCadnTNNhmpZlKQgh63bEtEglWYPHNDX2nfEopdnf75lO9SKgpeLw9MJpemJdPBLqZq50/PL9B3bb1+y22zoUXhNSFRD1tV99qYJp8nStYrNpESIwTVcWYdBSo6SkkGjbAd1YMtQz2LgH3yBF9UPd7VsoEJOrVY88EwhMy4JWkiYGctQs5cxm6KszNTj6jeTleKGUjq7tsd3A4fnI5eJv2hoQRbDdPSBEppBRqeVyrgLyxq4IVejaofbr21C1OIw4v3A+n9EWcshMx8C5OQPQdxtkDznA9fyMXxuEFkSVq1Q7rBTRoLSt7+/nlahXfPCkUjCdAQPrtJKdh9xgbSFECDlXiXDyWC2wsoesiaUgqTAomRVhiRQCVreoVKOZKsNoLX6NKFVj2F1jWebz33qv+f/E5UqImoG01tANDefTkcvx1ucRAmsHNpsda77gg2ddPcFfEeaWhw+RZV4xNlNCqbEhVeW7TVMfMlllEBK/SnbbO/q+oWBpGkVYFP1G1YNrBLcEConz9cJ0ueDcikyCpp+Ioa/dokYyXwTBJf6gV1QqcHg5IUshpsDiPF7GGqXzkpQzMSWMrJ0gY+svHarCFuxGIbxB+AiqIlmTW1l9IoiIoSUbTTdY2k6hFRgviFrVHGsUuHWl66ovqhQYS4e2DaHMJAwZgTCSGKuUuOQGkQSChFsD/W6gsRbpDfN8QQpVuzm5ugvudndYZYjFI0pGC4FuNLazpChZXeGyrNUJkSRGSIRJlBgxSlEypJKZQ8TIXe0dITCyTglS8SBBiRqdSckQYnVlmUbSt5acL8QiaKxip0eGcUA0Kz6vtRy/RmhrRjxmAUViTU8gV2JNzpAVsijGviGngndXjC4MfYcoEHwB5ekHSTgVYsioKDC25Xo+EOMf0PpVQKxkzfFLCY2tCGOBqYcYI1GycHITZ1+jZQaD0JaMoNG1oF/ICJkrcW1NiAyNrmAEYxpabSgo1ryA+MNlt5bcXUhoadBFArlO7xpDWjW7vudhK5nXmXVekcpWrLhbaVWDbRqkqJ2aaV65TivDWAWLMVZiWd/3NLolU8g5MU8zmUwqBaRANjVaNl2mmgkPmuly4jJlurFDJk2rQSGr8DYJZKZ+nTDfqGQV4f31F1+RfZ28hrXgUsaLgrCZkBKrz7jPB4RVdVI2XzmdAuVTYi2B67rWaGAR5CjIOuKSpwjYbDfkUljdXGXggpvQ2hBS9bLEVOhlwzyfqjBbCDajwc0TSjZ0tqHVllO80jSCoHLt7gUQRpMit4u2wDSGkm4lf6kwSuOjQ/6NM0ghZYcPDiEUTdEoaVGqISTHdM0IKRn6gcfXD1zOFyrdukZsdvsNJQdm71nXVHtUJJa4EkoAWTuRyjSUVKp8vCRKFIxjXyWwaC7nmX5oceuMWz3rsvDN199wPn2gMR1N0yJUYXYOisKdJwozKEPfN7SdZNg29OOIIDDnhcZ0lCxZ11AdP1IAGVkKvd2A6BDl1o2SgpAdyzyTfEQryWYYkaVBFnmDPNTYirEVmx99wKVAUYLLGtAlUXJiLYmfTu+Z1kqqpEiU1vjo6/MlZlJOCNtwWSb6zlJM7aa00jBfz/gQiSgQgpfpwhI8kbqVjlKQRH2OiZtI93I9o7KhMxpjBN55YvC044qUAe8V47BnM3Z4H3B+QbUS3RpC0BXBrauvSirIZJAFZSSbdktKogpfQ6SxlnX1xJQR3CK/68zY9fRdhxSGEHKF7yQPuW64kIICCKXIJRNTZjN2kB0lFEgFLUCKSPAzQRS0UnWoUuowLCUqqluZCo6KiVzqhiq6lZwDpIrzLrlGkotryMnj3MQ0z5QYGcY9pmno2g1Dv+FTOqGkqN3kprCuHqWqy6qkwrKciT7T2bE6ClXGhRZKRN2GBF1jESKxLBNChXr57y0l1p6VKYIma67XE0I2dEOPtZowOXqhMTojxYQgst9DCREXS41iZ8XqI21rabXGKMESZ0JxFB8QMyhhGdoBSFwmx7zMOOdBREKoB7WYqv4kcEFpg8YiUITs8SlAqcO4zlh8icyiPidKSayLg5JxbYK8kIPnPC3VSVhqTnWCAAAgAElEQVQyQpb6s5QSFHhXsedt09/6QJmU4k3aLhHFkoJkSSuIz0h9rSCbEElR0pkteEWJkZwLUhS0SjTW4rwjpSovDy7UeKup5y1je2JamRcHQrFpGzTytrnz6Fi/p8fXD/jgCbkCCXKRN2LpTApVFqyEorHV+ZkzGKPwXuPcgja1FtDZvh6oS+0QClUF5z5E9Oqw2jB2HfN8RBSB1iMb2VBkQilRlQRC0SpbUzq6AoiEAGMsKTv86kjJkYvCh4Js9I3smLFWMi017kyuPb0UE9GVunEvuXLtS2HsBqzRiFIBY1pLvK9QrVwkKSZMY7BC03WWplVcL1dCrn5HLQxD39KPI/PiUbn+PoeUEcqQxE15UEDi8avH6EqttMbigkXeKaQsKCB6hTIJyXK7LAaKFLUnJuozqbcbol9obd20xeCZrgv9sEOpQkqBnAUP9/es/kiINap8Oa8oaZmXK/nm9wTJ8fLEWFpyCIjkcNPCsL0pdUoBCmEJZMCnjPSG7X6DDYFUqPHy5FhcjbZnIZFGoI0BlSgiorTBWigpYmSPVvX1GqOqkmXd4b3Hl0BWCSUKGQ0p1655nmnbHbIoVBE1tSILTS1n49bCdPn/gUQYYNxY+r6jbVs+zRdC9Chp6PqW+1cbGtsh1MQyLUxTxWd2XcsaFta1IlSzVIQUaG2DNQYlBUonclkppZKsGiO5u99RG3iGRmtOYQYZK1kkCLQWCFM4Hl9wiyPHShJqukp0y/mGko31kJOjJHlB8on3P74gFkUuVfwXHOgoSdEQcyCVQKsUawwIU0EeRVQizLDvMM6g1grpKNnhrkt98zKKwXSM2w7TSZQtZBVQBiDX+OCccMvMdrtFCIXRDcPYoZuOaT0giqoPXg3rvNA1HSRDjhLdRNZQIRtNq0FLDqdA10qUlKBqCPphd4+uHGpKqv2lpqsm+Byr6d35WCN7MZGzo8RQgQ9KUojk6JkXwW405Fg7B7ZRLGslCxrVIqUmI8jFkFKmEJHSY5ta5BVCYm2DtSPd0BDWSAoKSaYksFrSDg0uSmIpaNOyrmeu04WSBTkptGwYxhG3LCQfMU1mlAMhpZt4M2EHSbPCmjMlCpRs8asgF7BWVbQtEq0toiSEyGij/4b8qA11MpUjlzhzcSutkfTGMnUWSPStBQrrLd4zdDuyqhcQLQ1zXjDaooWtk+LkKbLW3X2IrKsjpIwdmls/CEqq8uF8try623H/0PNyvPL0dERZw8UtXOYjYztgdO07plgvajGCQCOobwpCCPqhq7LUEtEapnlBtwbdaJQxZAo+XlhDRIjM7APT8wvbzT0lKKxStH2lPy7XmUZ3NU4VTijh2GzGeimVhtePr/jp97/Ui1GqnSYvMqIE1uyrx2vO7PaSdZohroisK7lyqPLwEjNKGECSiiOLiNKWzXbD6flEdA4pJa216KIQqmFZE7mIOvUOhct5pciM7QzSSObThX4Y0G1LZyWHxd+m9oIUJXGtnQytDQrQUjLuRl6OSy0MC0UpknlZ6xsJtVDeND3IKzkaUhK3KK7GxwM5Wrq+dkqH0fDyHCArjJJsxpbtZmSdn7lcVi5zoOTMZmhY/IrPkVwiIiV0a8ghIYGh7cixoevqxSD4wmmaGbYtbW9IOVJSom1ahm7EmL46k5LjeL5iTO2BlJy529/x6v4OJT3tYOi3HdFlllOodC5V5dE5WLpupJWiHuKbrsqrUXUbIQXTesXPDgl0jaU3Ha3qmecJnyIl10O7bRtSjKzBM5eEbAfmNSJlqih0mTgePiKNQcSCypL9/oEUV3zyVWibBUM3Ml0mbKtBKVyuAs6rd2QkUhliiEzzCUW98AmtCTngcyXnGaHoGsPhcOBueABjKuwhhEoy1JGCYw3w6uFLdrst5+uFmD0pKmzfsV6WW4+oqyCIRhJS7Tcaaxj7LS8vF6Z5IefMg+1YlxUhASp4JsXMvMz0/VCdLlSARc6BlGIFysTM4XTC6KYixkXhcT8wXR2lSISsW71cPD4shBgqpTW3aOWJgQrnkbWLU0oi+BVKQetK38q3jbSSkpRnfDyTQqUOeu+Yl4XpPFPo2WwamqajJFHpo1rQWI1QcL1csW1HToU1rqxuQhbFdlPBLpDx0SNlIJdIyZKx75Gh8Hw8gAh0g6LXA9FHuq6lVw3SFw7BYds/xJoFSkj6TmObhFYeiUNLx/RckElBMWRaIoVtbxlsg8iJTx8+1UtMXMmLgNLR2aFuG8PK+XrmcrkwbFp8kKSSSUTO7kqRnrG5q5eOFPDB431CS4HUmkZZMLVjIqSgFMixIDG4NRKcZ3VXns+BuztLEYIiIks+I7UlkSmCeo4oAttoclyrtDrFSvYtLTlGEo6srijp6iV6DYTrGWlbbGvrxJ+CMgVtIkoUZC6kUIg5oKUmRQ+6oJTAthaREgSBUJKmMTRKI3Um5nrpUUqx2XUsa2bxgewzIQiUMmjVkLWEUPtEqqv+KoogxUROFSZmkqGXPZthx/PzM1J19cLVRvpOMS8ez4qhYdNbLicHKDIgTMPsjphGI1KVO0tVh5ytbcmiDoIaY1nWQAx1kJCKAC3ojKgSaxTaRJ4PFzojsTRIVPWFClOHb9ETU0AUxePjK2K4UZlDHUrmHCHW7y9nGAaL1rd+OzW5E3IixkKRVUHTND2fn4+YxtK0LSHWwVoquUJnIiiRWPxKL0aatqEfOuQi2Gzr1j2niJthd5c5HY/E7NFNQah8GwoajG6wpsUvE5t9h1CFdS28vJxIUVVnZ6q07i/e7blcAvMaiNfAdJ3Z73vW9UohUuigaKKYcb4OMltdECmidUMupWpbgORl9XkCMVuUbuk2I9Ps8W4hxFoHIlmk1nW4ExNG1mFLYwRNA+slkINFCY3RpaqAdAuqJZBZswcRaAyAQZTaGXduIaWCKLJCk3L5G/m7cxE3J2IKf+ud5l/6ciWEUMBvgV9KKf+eEOKPgX8MPAD/BPiPSileCGGB/x74B8Az8A9LKd//bV+7IGjalq7b83D/Jf/Ov/33+O77n/jpx/confj6m5Fvv/2W0/yZ6zpTpODNu1d89+17hn4LBUJYcS6z2VkOh4TEYlRPbE9shweUVtgO7kdB9PVilG6H2YLkcHRcjk8IFHd3rzhdrixrrIVvZXl6utBtOx4eXzFdVz58+siwNbx+9wZjFIjA6s98/34myTPd2GLbnnLSJFc3F4h6mOq7Da9f91ymZ1Y3UbLh/u5L7vcbpusZksMYy++/+4DqJbZvadsWJwK/effI+w/vuX6e2Yx7Uop8/PSZvb3jYXjA9JmuaRBCo41AGcHzoRat+7EhA89PC35Zud/cQzJEBdtxwzlNFX2/Olbn2e3umC5HjDbYrkEUiRIKQUGQUUrSti1ZOg6HK+tSHyCv7h6JYSWGQPQ1ytj2kvPFgVoxTSSHDZTPKNOilQbtMO0Vt2qyWim2xjRaO6BcYVouTMvEcpkZhx47bDC2RanCZTkz+ereUlLRWcFuu0NJyeodi1+JLGgVULojZcWcMuNui4iO+81I3450/cDHT7/QZo2PCh8i0/VKPw7IoDCNoDGZ3WbkOJ0o3B5kCrx3jNuubgpkpGl0FdA2hSwSwUc0Ct1opCxoEdn1Aimb6rYphVQyT5/PjF8MbDc9KWaOhytJBVxWnJaZ6BObfuTh8Y7j8cA0JRZfsLuRzeae3jaIUnDryr6TYANvv97x+OotRif+3t+X/PL+xO++/cDvfl8z09O01uiCqFGhcdOSskNKXS9VsqngjnSh3NxefTuCXbF9i5AN85zxId+EyPXAcBEdX319D6lO8dar4IffnXn15UDbVO+Hc652d85nQsj1NZZ/h8qKrjdkaZh9JPlClIFLmvAlY9SGcPCEG+xEq8x+22HMhumyULLHtlWOnAg0ogIY5nllmmeMMnRtR9v1pFI4nE9V9tp1tG3PX/7lX0GRdHdb2qEhxIDOhvlyuSGiF+brFR9X2mGHKoU5nEkx8ObxgXWZwUteP7zj4/uPaKUJURD8yvufPrDbDwzblsYoSmsYxz3XU8HNnugCjVJEmRk2A9YaQl74F3/1A8n3bPs7hr7D2o7reuV0OnJdFxYX8Yvi1f4VPoZK9ysZ8sp6memagYe7V3z99dd8fH7h49P3dVuKQhnBx0+faXvF3astRrZ8++13jOPI6XTERYfpFGswrMnRdw2dtYikeXP3NZ+ef+LlZeayfGadV4rw+HTC2pbHNwPzoeNhvyeHhdXNOJ0I03eYWKfkfjVMYSFcV/q+QSfFcpz4kz//Y/7Jz7+lCMEwDnTWkIi8XCt+P5bEsFUYs8PnxOxmTtOZzd09j7u3hHXBrQvS9Kz+F2KZ64VO9lXobTL5tulNOI7XI2hFiRWBn240qU27R0uFj4Hj5T0IRaO3yKZDGshJYZqGIhVLCLgUGdRCWhO5ZESOiLjg18x+M9DZV3z/g2c77jieXqDUCJ7zK23fkVy9GEcy03LlcHoi+IxRDSklSgkoUSOASmke77e8HD6R4gfarsG2lus1s9lsEcpQEhzPV54OP7HbbrFG0VpNEQ8kBPuHR4QS+Ljw+fm5ksTCwrxMuE8Lf/LFbxi6LevqmKaZvm/ZbC3GBiSSponkqAmxrf2ispDFzLweWdYbsCMXQl44niNNExA4pI6E4PBhIvrM7CW2tXT9QCmeECZSVHTdlnFo0UoQ0wpkcpm5LEea0NN1u+rjCnD38AqtLFq2rNfAUHpsajHCICm8vnvF7n7HsqxcThdwDj3OfPnmgbaxuKXw9OlnNrpj24/MBT5MgTdfPDCdJo7TQnCB6eLYvf4Cpc5oKREy8PP7X0F4tBG0vSHknsvVM24HWpvIMuCvV5wTxOVKo1asrVuipVQ6ckyZdfJclkApNe2RpeTx1TtEkaSUiHmpaGOZUAZWn5nmhXmeeXP/DcnNPD48svmjjs8f39PoLTk3GNPQdCMffvmZlFN1TbXQbyTzSTE0A0IJiivgPe04Mi0LWRSk0bg48/nzr4jS0TR7tt1ASAUha986p8LlMNGNGqtacsqcTjNDrxh3IxlT/yb4/fffIlVGKA1CE0Jmvs6M44bN2EIR/Pzzz2hdB6nBR77/7mceHnbE5OvmKAnePD7w8edn2rFG8pf1wnZ4fcPjF0pxRF94vP+KyZ3RVlFk5Hi6IGXVvkhVnwUxZt68/oplXZiXFYqgROg6W0/9SKIovByOvL1/izWC4/XCy3Hl7eMD292eTnb8+stH9o89aVkgVY+kVSO70XK+nAkpobXl4+f37Pd75E3Qvdls2GwGnp8/sawLQgh221c8nc50Xccw9LRdy+V8wXuP7VqM1pyO1wqYsdXzGHPiftOiLazBUWZB1xX8UsjUuHHOmXmJfPXNlst1wnaacTMilWOZAu2wobM9ikyICz6ImsrKHqmovryc8T7jvGJ1T9w/PKDOpT6ftCfGK8MwIrUgFfCr4F/5i7/L+x++I6SV148bjC5cl5lh16MHw+VpQhaJ95lhY+k3LcfrE9uHXaXXlkTJkaZZSWqpm/o18+n5ha++vKPXHYTAeb6QQ+F3v/+ZN1+/Yvtqi+40RURcdKRc5carW9CmIaaC0QrbdTTdFp9nQlqqwqdIEKpu+IUhFcH79y//71yugP8E+OfA9vb//wL4L0sp/1gI8d8C/zHw39z+PZRS/lQI8R/cPu8f/m1fOEX44cePGPPMZjiyzIE370b2dy0+ej58PJLNCaNGxrYnxMDpcOTv/ut/By0t87Tycjjy9OmF0fSk5Ikp4fxETpLetCzTRGbi4XXL+5/PbDcDgkxJkbt3A8u6YhbD5bjy+dNfs3/T0Q2vmU6Jdb4y3mU2d2/57pcfEDQ8PH5J0194/8tntmNP1/b09jXvXvXI3Zc8fX7m+eOZN/sHzglCCqQiQRiWElkP77G6Z9O+Q0tLh+L06Znj8ci8rGy2O2JwqLhDRokpjua+4ccff2Acdmwf70lrYRGZP338Bq0KiBojen72vH69Q0q4TkfOlyMZOF6ea/l5dnz97s9IkyMID63AbEeUu7C4muE3pnA9BdbVYXcNbWehCGZ3k682Dfet5XS84EXAZItpLcjA6s43P0k1dfsQaFXLbqMYxjfYruOXX17YbiyqdBVLnB1ds2NealcrB8VCZD/US7BWmt32gUZpukFxWFac9+zbHik6lusZpTLWCNpBcpojZMh5IuUr5JHLwQEF2w6Mu5Fpfo9RsM4X3LySqaj6UsBagZCWmPc8Hz7x5mFESsPz85WUZnZ7S9e3GKVYrgtrOKNLR9M2GGO4nE/0g70dOGBy0LkR3W8wUpCEYCM0x8sF3XQUnaCsPD6+Ic4rpbO0/cCj7XmZn7lcT2htaIeGrAOfjr8QQkQY2HQ942aLShl3mVEZeqEYXg0IO3GJV5ZPv+JDhs8Nh8+Rt/ePfPObL/mf/uffUvxCP4w0xhJc4MPHT6RU2O+3bDaS0/WJt2/fUKIkxkyIgcWd6nTeN2hdzfKPd19i9YbrsnBZzrz+CuYZ3HQl+ETB8Gf/2j273Ruej0+s7srdQ8f1slByX+OaFJJKXK5PtKXH3SKh/dDyxEJG0gjFruk5H2fm1ZEpNI1hMyjUnNFFsfjE4fCCaWTtaJSGgrj9rCJ3Y49QmtmFKiOXGZVjhVPozHbX0pkNqmsoouDmwP1uxy8ff8XnKxHY2I7fPR3oV0k/9Ly+e+Cub/GLI6dMzokff/6e12/umJfM5VRYJo/IoBFcD9Mt9mzxi+E3X32Bc45lmfHrynZ3T9E3mfN84en9iXfvdhSZSHhiFHz4+ETKGW1a7ntF97bh1WbPL+8/QMkMtqOYjmU5sXt4xxdvf8MX7/6Ij59eOJ+vCC3QTUvIFWLRiPraTDlxWl4YxrekAkhFNwzorPBx4eV0JPnCF6++5s8f/6QCGRS0w8D/+M/+GW/vNwRtuc4Oeb7w9eNr9ncD73898P7zJ2T/Gp8z+9ET5MrRr+RLz939noeHPVobzueZv/7rf8HpesIMbY1QJXh+PuOvEVkE1hjiHHn1peE8L1Ak2/4V07ngP18QJqNE5unje5bjyuOreyiS68VxOL2wux+IaaGsHms6jqeVV6/vMJ0kh8Bymgg20BnBOjsOhwNtL+maAUqLEgYhBa21qEayppXZLSg1EnLi8lJ7I+Mw0A+W2Z85Xw/Ms2eaJ54OvzIMA8FHlnXCrSe6zYiPkVQia/Qcn19QSAqQyDivEXWkSwxV1NztesZmg/MTS5rJKaKKhaIJPjNPVQHSNB0KQQqBi1t41i+czyfSU/XX9GPPuq7Vt5cLPnoWd+V8CRit0Kaj73uW6UrJvnr9QmKeXkhR8sWXb9AqInJByA60I7NwvTqWJbO/e+SLLyoFuPotBdd5RlmDRFROtRQ0HVzOM1oZ+mHL8cXz8Oqe0/FEzondruc4fULQAhq/Bq7PF7IJ3D/coaUguYWt0og24/KJKQnarsWWlut6xQXIwtL0hmaEDy+pbkZiQ9f9m/zy4S9Z3UrWGTH0/Pa3/5TXD/f0g6UZFG+HV5zcC52BmCTLPNN2DSmvXJYANNzdP1LEM303kkiEoLCourG2Q3VKMaGyoREtNAXTFUyjSOtEiDNFSBIFGa/85qt31dUVLM5brqcTy3ll3I7cbbbEfKU1DR9/PfDLjzNGyKoV2Wf8mhBtZrMx/Oarb/jh88+YRiG14XryNMrWy6puabuOKX6mo25wRFYQJTrf87i3XJZMEhrZenKY2YwPlSgeEg/7ji+/vOe733/i4j3KWqZr1egIIVBa0XU9+82GTx8+MF0cuSRs0zIMtySIgM2u4eH+z4hB8fJywKfA24eveXjbc/nuQPAFq3pkbNGiqjBSaZBq4PPHJ775k68IIbNOjnXxfPP1n5IOjuhnzsdPOD8xjiMpOtZ1IV4D43hHTpJlCiyL5/XjGzbDyDQ/sbiVGDO7saWXO8yNzplTw2++2mFly3S9clqPuJjRjWI5zMRQt3vzfKRkjbiJuNcwYbs9RWUKCzFnTucV7zKN3aPVgA8TmbV2BJGUmDidz0SX6WxPSYkYVv70j78h58AUPa01tENHmK41hYEnsnC4BJZLIEwLACVL3CKY/9eXqqlBYl8K9487lK7QJsj8+tP37F9tWNfM4XjgfD7RmB4lqZv2oWG7MRh74uP7lYfHB/b7Db+8/5ZPn85Ipem7Aanh4+cf+PQ0oEwHncatKzFK3rx5jVBVM/AXf/Hn/G//9H9BXuZK5h7Alg3XeWVaT8QYkVJjbcPhesCYQtP0fPHmNa3SXE9XOqPZ9DuyKdy/NWx2e1rbgagJm6ePn5gvDoFkd7fj/DLRbSwxnLi6T7TmDqtBaonMihQVf9h6IiLoQrcp/88vV0KIr4B/F/jPgf9UCCGAfwv4D2+f8t8B/9ntcvXv3z4G+B+A/1oIIcof9O//J38yCTtKcgw8H37BhTNreqSxHTEVpvnKtL4Q4wbbWIbWkMLA4ekzfbfFrYnsIvf74eb38JQCne3Z7+6ZLldCdJhGY+SWRgcaK9Gmin5NK5h9IKPQZmR337F7lfGLpx0zqkmE4vnx1+95++6RRvesc+G7bw8Vl3u7RHRWsd1v+PjhE2WJDKZH6Z7BZpZlxZeJKGZCWAlpRhiLaSoiveAQEna7LcM4MIeZN1+9ZTds0V2C5grGcHqumNrGaGhW7BQQReCjIBYoSpPkjMtX1lVwvgTsaPGxcDkvOBfojGWergxmQzESl698Ov2AMHcYBTlWCZ9UW4bNpgoqJczTyts3XzAv14rBXVfWxWF1U9fRsiCaTCieOWREqZJI29WSa0HhvIec2eiOMjesOYIqaKtptaG7FpQsWGsYhjvWyTFNE8JoOmtp2rrOn9aIVi3ZaigJo0FkKF6QlMDbiFI15qNLz+o1Qnb0/YbGNJQQUVnVki4V875cJvrNDqlgmWbm00q7GzBWs+nvSFGxLhOqXVBqW+l2CJTqgPlvsOgCzbokrL2J60KC1bF7eGBZZ4xSCAGH85ntZosQBpdXYoRx6NmxrY9DNyNkpUYlmVBGU5Qg+ITNgkT1bwiRKclzvi5shgHbNpiSubonGj2w+IhfX5gvJ/ar5XoUuKsg+cLjK8GvP9WCavSe0+HIw/2ekqtzScrCdjNyPp/YbAYaY5BRY9oBoUcQmpgLwc1EL1hOR9YEkbqWd+kIomHYDPQbSy5QcLeIh+R8mWkaBTrRjA1amyrEXFOVG2pJOwy44HjxJ6SGQffY0PI43iP4hFQS23S4U2bYK3KMFA84w7uv3vD9Lz8jjUNKhV89fQehOEKOVepqIo+PbyuAoGRi0ti2Q6mCkTVXnbXExZksBT6CuCY2nWK3v0eKgBIJVTS7zY7PT5+owX3D9Rp4sx9J5YIQhbaF+1d3tG2LDLWo2/aKRu9Y0sR5ujJPK0oqdq1iukzVbZZbNsMjtuuROuPiymWKzEHWCe3Nn6RKhyTQKcfiPGFVZGUYuzskmcPxA9Ny5NPTr2grkLqKaokCqRQuBeKaEUJjrGV1qbqZSCxuYl4lqdRy/GYYGccH/vn//jvsAH2v0TLy9u0d37z9ksvsOVwnLtPM8HbHjx9+Yo0X+jtdI89uIRmD1CMjA/t2wzBY2rapXb/F0WiN7DS6bxBWcXy5Eo1CmYJB0RhLjJLD9UIRGSklwS8V828UIRXWJRDczNeP7+hsg3MrvlyJPnM9e9ARbQSpTbgUuMwTjRaokhASJjchgkaUhn64ZwkHGi2qF0gXjNmAmSDWg9syXVnPhqIdwzDSdh3KGj4+v5ByIaZIypF2VEyL5263Q5RKty2suDjhoqverZQoCNq2xs2EAESh60Y601FSJoYAKTIMA31ub1P9uXrQ1uoAWv3M2Ff8+jj0xBBYpol1WpmnM6qRpLRwvZ7Y371lWhxaaMZ+pOskp+uBbmiRwpBi4eH1PSUFSo4YnbCtwCiD7cpNaHthnh2paIbNlt0u0ViHWxLbXR0+pChAWl49viMysfoFHxyITNsNuGUgR4ihsLuzLLNnXSPiRrPsu0essTRNV6NprWVKE4lEShkpJPf398QXTytMjekvgaDgOi2VlGdM7eOFKqotFLRuIIN8/IoWkKKKhYP/XB2YuSGlwufTZ7StRXgy5AKxBF5OL8SYMarH6I5xM+D8tW4Zi8QhETLRaCDD6iLzesZuWnLxOB+4CsN5vjB0PblIQiycpis//PojMgs23YaH/Sv+1b+z4adff6SIRMagbcsvv/5U9SZKgxKkEuugORaCNyxT4uHuEan/itUXirPk3GPHSAwGTYcWG/o+opqGcSdJOZFZcHND18N4UzFoJfDrQmoUfdvRjgO7ccfhcK7P5daQSGw2e2JayCIjlCOmwmbzDttMpCQJOTDNJ16/+SP8uuC94/Ac+PTxwD/4+/8Gbgks84JUK7/++kIMkq6zjKMl+CtK1GQAUtO2IxCZrhdClARfWw2H0ydKzCAUsWR22231ScqMVAJVDG3XsKzV9dZ3Y70EuQljWqSqOgk/L5AbvKtdKdu2EAuoglsd0Qc2+4GQPLYZMao6DPdbuFwvaN2itabkghK1C2Qai9Ka83FBMNP1ulZOXCGFm/sOWbH8zqNp6mXZR3IKGC1p7QahBT4HlvOpgkgYMJ2liMBpcoCh6L4S9yjYVnO+PNdYrlDEolAnT86SwhVIhGiwreXz0wveBYxqUEIhheTLLx8oxfP8/Ex0Bms71sUxTYHoe+7vOoSOZDx+LXRDz/uP73m1e10rK0NDryWm0bcNUWKZV55OK+29ATKn5YLzGWNbgiqVQjs7pM6kYjBSYGymaVrImW5oMFKTlSKy8vqrkSxg9jPZOYRKaN1gB2qHXEfiAsELiqgbdlcc8xIxusGo2p1HRZLLdZOcIf/foNj/ZTdX/xXwj4DN7f8PwLGU8geL1s/Al7ePvwR+AiilRCHE6dpdF5oAACAASURBVPb5T/9XX1wpeHgYWSbPYV1Y1plpuXBdriBU7ZGIDTlVP0drDRe3crmcUMoQoyRnUUW4Syb5WkSWraFpGs7lXEW2WZHIxLKQMGgdEU3CpQuHl4CfFaJo2rahaSKyWHLwuOzw3mM6zbjrSFFyeb6yrgolCykLQgarILhICQVVFEh5y0rXCZ/VDUpF5imgR0kSnjXO5JxQPrHrO1rbEUthKgvCSEJJlJiRRpJdpOvsrUToiN5hkKyLI8RClpKu62iMxgdPTIIQqYfvkusbRtI0sqU3I107IGImhEKm0JqBuDpSqrLIkiKbTYtWdY2PkOTiWJaJeZ5Y3cK6CEpT8c1Cg1TgRGIJAVkERiis0PhF0bQSUi1Vb4YeITVkD5pblrVSeIyuUAgpBZmI0posRRVRysLkPZmE1i2NMfgUULeppxCShKCIWuanSKQw7IY9sqkULR883k20be0WlZsUs9zyyiBJueBCoC2FsdsQYgV+CFmFxuQ6yRBSoYy6Gcgzle+gq8EdaLRFlEL0iW4wBC+qDDoX1nVlv79DidqxIFEJgMqQvMfHgFQQfKCxze3iVhCioIzCSAgpk2OufRoRsdZgbEPyMyl63KUheE/0jpIrfdO2W1SRRJPYPew4PUtyyoQQiDGxs03tpVDFjWNvWZyk7dpahHYJIQfmeSWXXHs9JeLWKykWSlYIFMUXTCuR0tQ3wo1lmhLOL8ToSDkSUkYWiLlgSu0kxRBYnCMHT9N19HZgWlZSiJQkyaWgTKWVaVHdWq1VrFeHKImwVpHrZhix1lAfU5XihgjYZiTkQiqJLAr9tqXtRzKQQqi/s6Uw+4UmNTfEcWYlYbq2Sp+lQPeWptTplxIKUQraGFKunhatJI0VuGVFlvr7gQCfTH24W1svNsohhGZZppu3qKBbxbp6yk3UKpB0fYcQGQTEnFnW+qZqrUWQkUUSA6xLIoZEioFIheOEAtfLxLp6RKNZ80IuipIKIseKUtYaWQQpZFIMmGKq8Dx4UKl6Q+YAZEwn0CrjosfFQCs1FEhhYbAtg90SwhmrFoKoPbRpnhFNxHYGZvCpDtaUNvSDqaCD3R0xZpxzJJHQtqUdeoSSdQuea5TVFFBFo7VF24F1mdDWABofZmyryaLgfcK5gJaJrm0rFCNljIKmEcTggeqYWnLg/6DtTVotbRc0retp33attbvovuacU5l5TpmVKAUJDhRBHImIIwXBgbP6BYLU3D/gtKBGTgRH+gOkZoogIlUmnjx50tNEfPHFjt2s5u2e3sGz8oBgWUqlKyYRO1Zs1o613vfp7vu6ml7jvYMoaK/Z/bRlRGtodIfVhdYKtBKsmyeniLYNSSk2t1Tkshds88omNrp+j9SGIhWXZSXFglQgVCGXhBQNLtSi/5Y8c1ww0dRrLyfC5jDKoJWui2gFIgvIVxojoWbuikSZjNYNOSvWVZBCYoueIiSN7RE6o9CVAJhTfb+VwYWEERWvHmKuGyuqqY6qovBBsMQL2zZhTXUzxhQhO1IMV8pboTtofJhwvtLrlmWh6fZI0ZDKgnML05QZDmBURUzbxiCtxi2OkBeK8Agk02VCUKOSIGgaTUl1Y1EIoAi06ioUQIHUmnYc6uS21A6azDXCVTfALAYoKFD1BEIqgTAZiKRg0MpQrr+WbSFKQWNNdXCm6k4SyrCFOsG9LDOjGJECpCggMwkLqroGIbFuE/v9rs4/lEZqjfB1I1k2FVajRAsWGiPxQVbVglUoGmKQ109nhBK4nFOVVGfN0G1kHRiGHu+oQCVVnZTSKLSSaAUlC1SrELreq6f1gm26qz/OX6EMTYU7lCqtVlKiTIOLgaaxGKEIfmMWniSgbTVSSELcsK1G6ArqQBWMNTw9z6SikEaAuG7PpAqbkEWgKSTvyPFvYAuGaX6tXW2pqsexKM6njZQKTdMwDB3aZFwQyCJR1/uKKIlG92hR0NbSWYMwAzlHtNToRgMB54+IrCv4SUn2u5Hn12eCD5RS0NZQcoWHcIWpFVIVDZdCyQJRFBSgRDIaISQShTYSLRXRVMJukYGQCtpUgJiSEkmsCSCpKlhMKGSpgmiKhCIpqcYDQ3R1Bn+Fa1lTBcwV0KPJKtD2lpSh5EQIjsaOVdKdI85n4uZRpgWfKDKRhcaFQN/Ujjey0LQNlwWksteeu2ZeXb3Gywwl4FPk69MZ5x3KCGzXUKKpUKiyR8hIEZ6Yaz/e+6pSGIcBlxw+r5XgmDRSNpxOLwymp5ECazS7w0iKqcqPReLp5YUiDX1nWNeZeZ6JqTIMqqBcsiZPqxVGKKySWC2wulAQFSRWqksvpb+hThuCiziXyJG6SaxFnQOkehBAqZAXkqzS6LWgO0URkiwjQ2Nr58tnUirXa/yf//gXLq6EEP8+8FhK+Z+FEP/2v+j5/28fQoh/APwDgG7QvH1zw8VEktsqqUUajscXjLV8++33GPPAMb5iTYcxmm1Z8bFa0IXSSN3gw0Lwmuw7hAKKJqWENuJq9w74fMGVJ7Z4qDl34fHTC59/r2lMpO9TxZRmxdjtmV/PTKcJpQXv3t0jtOR8nnl+PTHsbtBmRgiNMAZazenHibbtK03pCkXYtlfa8YBqGhKC+XljfOhwKTKtR5LXiEly+Flbi8BFkIVmCRMvp/n6AdSEsvHu3S0lO/waWKeI7e/qwBQCykja3UBWIz7W3TshBefXjbaLNOg6UCfL+9tvSSWQ14UYFLa/pWtGvl4WUsw01nKZFrq3dwQvqxjXtpwujzy/nMi5WrBV6UhSoIWs5DFJvVkoyCGSU6GISsQaOolVCmM03dBShEDnaoFXxvD0/AO2ba8CPMG6TSgTGPcjq6+lX5c1aygYI+gbMBbkFpFIhBZIVYvD4OtNCElRmjffvMMeE1+/1JLxus18s98jsyKnDKKgbYPbFqRtCCWRdYYUOfQ3vJxfWd1G00nariPMNb+ujQQT0dIwTxtSGNqmox9aUJmmHWitBirdMVN3bHOqFLcUPGhBTgW8RJjMJn1FsibqwjcG9vs7MrkOGFpgGo0ImrwFoveQCoexZexHhFQctyMmS86vCyFGtE7c3Sl00bx5eKAfeqRJnFfP7V3L89NjXcTZoQ4qJRNzqQNto7jb3zB0DSkEUlwpWXN6OVJKRFvoBoHza40LJCg+UrbCcHsPAqwBLTJaFqZtIcSZIhKqafHBVQeb31BiwwJb8ESVyVFh08AUNlQSlFCqP+02ssyeECWmqX4dLTIlbGzzglKa+7c3xLDU4/3rpKMYhbUDhEIOC4jAzeFQre0GkqgLYR88i1sxqmCkRedEUtDvdpUMmDP6tqFcAtbsMFojZcTnRJH1xNZaSd9Jnp+fGfsdwggCkRwKshUcdjuQhfPkcVepYs665r4bw3Re6LoqfC45YtuWmJaraLWCL8YOmqYFKtHSrY5LaZkdhFgQIkISTNvGUlZ0Y2n3A5jCNkkElfQVE9hSTyZDjLjJV/LaNuELNJ2tQIC4Iqknv+s2sW2f+fD+GygR5xfIE9oL/AoyFwajaMaRJD2KjNAS3WqUM4zasCSHMZLBNnz5eOT9z96wTAvnaaIoj7CGbuxwm8OvG9JoeqPoVBUCJTTv7+/4+GlGJk0SEh8Lh3vN6gTe115APzSE62IlBodU0A8KtwWkbslFsc6BNz87cHw6k71CWYPtQSZJ2w10skU4z83bb3mdJvz2RMkBoTVZaKbliAgGlUakeMa5cNVIlLo7HxXLOtF1BiVgXTxvbw+8Tl9Zw8rmN07+yE6PtKpHZo0IG+1goVzJaVKCKIRVoLIm5kBJCWt7sjhjOoGSHY3d8fL0iHeR3c0t7a7Fp5mwOgqVJKhaQ9P1FKHxsaCkRknLsq7sdg8UPM4XlktCC8e2bbXg3lhenl8wxpFSwm0Jt4ZKPksbKZZKnMuRYaz6jnlaeH39ivcDl0mwG66UyV4xrytfnn9EKUfbKLToePp65vZuT2ttlZJSaNt6mpALBO+QAo6XiRhACkszGJCCGCIhVMrpj8dnXMp13JGWtu2JJiPUfIU6SHIK5KTY7XtKiUzLhFtnspQ0pUMYQaJBNQpJx2V+5ry8gDREl1EqolS+Qmos43BLjr7SLJcF3xhEUQilagxXapwP0GeksjTG0t3Uydq0NaSg6PYNY3vPdDkitcO0dQG8OUNOgmlxlOcfWdaND+8+kP2CdxsZw5u7B7x3f1gk5CSwu55UVtzqWKaF5fFKhDUriULTCSg1WYCINa4YJS+XC7fNLa3RlKRALaTYQCMoIjMtF3Y3IwWJi4niVlIOTJNDNQbdVGjYNK1Vzp41piisjMzHV9zmaceW/qBRL5rLtGGsputa2qan65+4XCa0hsNhJONpVMM6e0iSkhTCZPpmROqIthLZFLA9Ja+0TYdRluBnfKz+KKkKthH0reE5RLbZIxS0nWXbAraTZF/7+F02CClZpzovUlKgjSWEiNRVC5Cip+/7iqJXgs3Ask3s7YA0sUaHS8t08tw87NlcJIZYF7LRgM110ZQTCoXSBufq4koUgU8JYyQhFErR2GYkli8cbm/w3uO2lZQLq9uYy0QsklRaXJjpdWSZZoqEfn/H6/ELY1eTOrmAbRTSGLpmh8TWGGWYUEREchQ2Ytz48usXHt6ODKPBWI1fNefpyONzXWgjARmZl4XgI12vefOw5+PnM4tbEDRI0SBpkDkRtwUnK33ZmHukKihbx/3PPzwzDjtalVh8xs+Opunx88I49khtiDrQiI69dVijMUojYkL39f2S2SBT3WwXoqLUs7bMIjJNlSRtZO0vXk4OI+rcOOe6saXKhCiKRrVIVUg4dv2Bdam96RASvf2XpwX+m8B/IIT494CW2rn6L4EbIYS+nl59B3y6Pv8T8D3wUQihgQMVbPF/eZRS/hHwjwAOt325vAqCg75TjLeq3lCspOt6xmHHp49foGx8+fGMW2vB/fb2gZvhPaWAFhPrmisWVHm6XvH2XZXpGbMxDIZ+aDncCu5uP3B8Trh4dUG9evodfPvNPft9Qy4BIx+YTo5pnlAa/v6f/xlaCb5+emSeIvtux7gfePPuJzx/3XB+wdqNv/+v/Qn/x69XbnoYbgQRz8ePnsPtyLJMPD6+8M0fHWAaYQtYEWj2iVladK8Zd4kYIn/9vyfe/vQ7vqrPZOnxZI5nz/7hTAn1/ypGybc/+Z437zJaXCWwZeWHz595c3+Dj4mvX19IeUPQ0NqK/g6LQ2vH+eUJpeB2vGdaHKfLC2HdsNYwtiMvfMFF2MKFlCJ9f+Dpa6QdhzrYNQ1/709/wR/99C1uqaQ5nzaibHEbPH05cXmdyVkiuiPeZaSseeOny+t1NyvRdT17e4PtLNN0Zp3rzyJF5v6uxa8RpQStsaTgsMJgpGRdAr97/R3jvmfYd0glKaWwrivObcwuM/QjfTfy9OWJ6ZIoJTPsNLvdHXfDH/H16fc0jaSxlhQts3ul0y3D0NMNFu8LayykHNGq0PcNSu/Z31gSjlCWa+T0wDKfCT6gdEXqtvIt82UFanTAbbCtjhgCUkpu7+8pZM6XIyUJGtmQQmFZF27u7kg58/nzj9zsHzDCsCwLzkdsXwvfwZ3AOPquMPbVGZVyx7yuHOeNQ5EoXUAIlLRIGpYL+D6hmxkhV5wz7O9HuqapzjORKEieLud6jC6gMCISuNUTUyWJvb6csPZ6cnDdUSc3nE8rKVaOAtExnXcMuw63RabTCdOK6kjaLKNRfPPhDT/8cKKMntkvLKujGMvDh3ckp8lhIZw+QdpxOk7c3t5yOBxo255Pv/uMNA3rIklBcP/2Fl9WuoeAKBGfaqzvj3/6DZdLxrnEfp/pBsV0UsTkSdFxfnRM0ys/+emBtoHny0ynb9iPOxQZmQEsYS2YLCF5XHQcf7ygF8H+vaDfGZRoeXp6ZJoWHh5GmnbkdDphmj22P9TTwbyyv011EEp7Usy8ni4o2THuWpo24f3GeX6m3ze4xSPpacyOUhzTaWZdT0hRgR9+S1hrCcETvAehkaruZm+bYN02vHe8efeWLW6kkgk+8vXzC0ToWo22CoXHO0U39ggFqiQ0irhaxlu4edjx5u03jM3G4mcWv7G4jbSeQUmMzoydYNdIWB3plBGmYdgb2jvLx9/9JWucMM4gy4BPDUlERtnSyxvG7g3f/vn3bG7h5fEHXFj57o8/MLtn1scX9vaB/f4981b49a8+8vf+7jegI9P5xP7+O27We3zMXNaqzzDJEkugaTSHG4sWCbPW3fA5eF7mJ25vHhje3dOZDrc4Xn7/W5bzO/yaMLaQheLpOeDiyrz+gCsGsWm++9k3/MWvfkPfN4y7nrCdWZeJFAxps8jc80c/f8+n4ydyXnDLRLd7oNMDX9cfcWi0lrjFkbxjubwwrSs+1HidzQp/CahsuGne0WDw8Uwogaw11hh2d4J9vyPHkWVtOJ2P7IdbCoFht+P7f+U7Dvu/w//0P/5ThoOh37WcT4m8HuhHzbSceTktXPyF+/d3nM/PrOsCEW67G+blyDSd2ZxDyZb7N++YpoV5LeSyEctCDNDoAdtAEQtbmSgSXM74GOophh65XM60reXnv/gFiJbZPbIsieP5RMpHfvrHf4SxkpINpTQIecvD2/fYFrpW0mrB6+sRq7oa7QuedZt4982BLOrkSqjE56cjRQmmeUFrS2tH/vJXv+Ynv/ieOW8QVxotKZtj8RfG4UC/v8Pahi+fLzw+1o5m0/RoFDd3B16fH1kuR27u3zEOHe40I1lpTCZlxd6+JWxH3LISU6bsPOuyUlKhsQ0Ph3uOr2fu796zxY1puWC1pOvauvkbFDEq1jnStpYittr7VJksLvz8T78jppXj+YnHry+07cB4O2AbAcXx6eMr82XhzcMDXTfWhYhpePv2J3jvuFzO5OLZzkuF+1wdRctyIcWGcddhRObl84/sxgeU7RBaXAmMDdvyxA+//4iRmk73aFkpp65sFJGJLjKdNygKIxVKZb5+eSKGgGokWjRo0ULKHG5G3DIzXS5MEQ67t+x2DUk4lmni/f03UFl7GFU4DA1//NNvOT0/kkvFyEmpyWljtLcIGsIkeFm+cnOvuJw8qmhu+oHf//LCzdsDqfUYu9G0DcXdYHQPwLwsTB8XYhIUUUgpsS4eIVQF3MRIjJGn5y/c373FuTqn0ErysBvxcWFeNpSsQuIYNMpamkYhJWzbxn53X+ciayRsEZFb5mPEtpZ9r5C68PvtM2FKHPY39MPAsr5wOp14uLsnp8zlvNDuOvpW8ePxieALD28HLidPKRfG3YE3H97QNIr/7Z/9Cj1s2M7SNAYVBV07UErBxZXp/ILIG6fXQMn1NNGtZ9aT5P77CqI6TxfSpjhNF27vB9rW4H2gHxX90FDITJODqLl76PHxgvAdu+GO/ZA5Pp6YlguzL9AW1uRoWoMqllZ0vP32A7c3f8bQdazLmcfH3/P48gMfvvmOdYmsc2Joep6+HPm6RQTw9vYtu90NL8ffs03Vf/bmbuDNm5Hp1DLHyOIT65RorKgn/FkhMAztA7u2JbmAkoWHhx0pTRxfVyppFdwSef/+m2udqGCs4HBzy/ISKciamCqWz58noEUKixAFt/1L0gJLKf8Q+IcA15Or/6yU8p8IIf4b4D+kEgP/U+C/vf6T/+765//h+vf//f9T3wpq/MG5c93t7Qqw0o8tP/vZn5KS5uPvP3E+PRNWQ4gzOTuMshjzvnZ/1gvzdOb7n/wU8ortBKZJBFa25GjsjrG/oW0Ny/wDIXleXiKmlRweOgggPVf3B7R2xzj0GOswbYs1D/z5n/8b/PIvfoOQX7i509zfvsXYkc+fj/z4wwsxOXRjcPdfePN2B6olInm9rMQQmaaFouDw9sDNCOO7PV8+bsxzrDtquwvDu1AHsHPiF392Q7fvKIwkPO1O0tmW9aWhUYlBKdr7A/GKeA3BQSoYI3m4f8+ybbw8X/j6eGY/3uLcBd1I2t6iTOFXf/1r9rcWWTTrHJjWjYd3O0x/YNs8r+cjt+PI/U3P5RJY14SWG2/vbpEyVbeT0vjN89e/+n09lSngY6HIMzf7hm/fJcy3B+4ePvAXf/XXuG2rx9gI2tby+PiEloqSEtP5xOuXL0gh0O0ObSrGdFoWjqcFoQxN3zH0TZ1Q6hYlBcUEmtYQk+Z0OlNK4vuffkCgefl6wbvA49dnlHpBi5FCqn2ufuD55TNtp2mMAWDdTtzcHZjOGzILdGcI0uH8EW01ezuyGw58fvyKaArKGHTRTHOhpPUqr7QoaREYtFIscQNRGPTA5x+eQQraziIleB+IpbpA2nGg7TuOzy9oBe5yAuC278g5MV022r5ldxgoBLw/4ZYZ5wMlS3wbKXklppnVr6ToUPKO3e0t5/OMcxfawSNy4cvTV9rFsr+ztbcWX3jz7g3WGM6Xrwz9PfrVcrwcWbaVkOuidHme8OtCjI7dbsc6eYyxGGur44PA0A9150gKyB4hC6fTKyWDlhbTGi5nVxd7SvPl+YwWFrdsNEVgpSHPAh/A41A2YfqW9dHTtLC/iahm4vcfF0K05HKi7RqK6PnxhyOX8EqOCiM1Q+fpOokCNhcIMaG0YJ5XjqeZlAtCKYp8ZDhIvjzW3W6hMzc3Ch8163SEHNgdbhHCgU4INI2wvHv/lnWQbMmzbJmxa1iXV/aHHTFvvJ4cIRfu7u4qqGZZiDFwGFpSymxuoukUb973fP0yUXLPdJ5xYeLuTUuOFnRfZa7ac5meeH1OSKHpe0ujWy7zin1QVTB6da64eK7wHKWxbU/T9XT9SNkSq19w4ci4azi+XNB24HAzUtQLGYlWCtm0DLuBy/PMh4eBfixoLZinE8lBXh1WBbRNzItjWZ+4e/9AkJGjX/j5v/oTGrFn8jMrCz7O3O3f82e/eMe8TDwfX5iEw2bL+7sHdsMdyrR89b/i6YdnunFgbx7IZ4U/wWDusO2I7hu+u23oiuL1+Ag6cbgdcS7SdBktMqppEemnrBfP27cj7dAhhORyfkSOFn9ZyVli5R7BAA5c3AgpsTvcIcoKYqWgyFJgdxozWbwDqVVVhTy/onQmhDPr7Gi7B84vgc177u/3PDzcsYWv3Ow6jN4hhaigi3DkcLNnWRLT5EkpcwrPeAc5GEqShHVFKM2uH5AlU9JMyQNW7vHJEUqmaQbG8YFPP/4GoxW7/Y6H9jtm/0JrIbHx+PQDp2PGhYj2kS5LPnx4z9dPT3zzzVvOs8I0mb/7kz/heFxZ/R0ueDZXKYEurAz7jp3aUUrgdHrh3bt3SCnx3pOlZVtW2j5htSCthcVltKrxrZuDIoSFQuZw01OucaXLeeP24S0qn5B5IyfJ6w8vqNjRWIXRhhAKb97dMq3PrKvDCYmxHc+vzzS2RymNsi2XqWKffSyYJpD7xOk4M4z1tEIh+fkf/wk+1kl3ypHLemGZV4bdDcPuliwVv/30O7Sw2NbS2vba3ajEs2HoiTER3MYaZgZ7hxIKI3oKGqtXclY0YqRtBJfjE8OwQ9seLRtKEvz8T35CFonXS2KN1Xk1DAPWSAqCEArTNFNE7Z4W4XidZpJInKYjw9hx//CWkKnvj5vYXAISH755IPiNaX7Ge8v9/VteXi9clmMl0MnA49ff8nB4RzcYVAu5JGKckUrg1opUT5vknD7y7rvvyAGW04YPK29vHpiXmZxLdYN1PVGGil7PiV4PKG1ZNoc1Lfthj84JUZ5JQZC0wmpFDhuqDAy9pdGwXAL9oJleH9lioFw1ALLsaFVPTIbzElCtZNf1CJlx3vH4+MphvKG77eGasLjXf0KjTnz/IBkG2N147keB2sM0R+Y5sGwZaVrQlTCacmRox3ryKzzWGu7uHnBO8Pz6EWMUbdugtOTHH39kHDoOh55SEo+PrxjTVUR3iaz+QsqOnNo/iHhnl5h9xGeY48bsLrTKsm9ueTx+JcXEbj9i2h7BhlSZIgPd3lIuGe8yXdvx3U9vefP+jt/+9a9om5qGEEw8vLnl5XjiPF8oStEfbuj2trrCsq5+JiP5/OVjdVQay+oX/Axdt0PIgFCBsbvByMS2vuB8Yp4LSnfYJrL5hZASuUjmeSKXlaZtMLqhxNrllHhiSMjSY23hw7fv2c09l+nE85cnxhvLm5sHrO5QdFgLp+mI9xOiVOpwcIK//KefcKmmFVSjefP9e3x+rt6wqHl9fuXHHzzvPhikKhyPE+dz4f2HG3RKdDLx8F7w5fhEosdahbWayzzzfK7U0cYIDmpgv2vIOZJzRlDY7y3bMqOlRWtBkZltW5nWSCqFfmy5f/OG6XXFbb46JpUkxr8FoMU/5/GfA/+1EOK/AP4X4B9fv/6Pgf9KCPFXwAvwH/+LvpEU8P7dQ3VeZA0i0O8MbvOs2xnUilKSANzdHxh3lhIb1imRUkVQtl3HvHhQGdMViki8ngLuovnm+4zQG5vfmJaJLDLaNjStqsLNdca2O25ud+zGAbKk6zTjYURbiF5ymn6g2And7Dnsb/jmm295eXrhN7/8HXfvBg73B6TNbFuhqAvz/IjzgWHYI28PSA1FRUIpfH688P5hIncFJUCYwv3Y0uieJS24nNmPPZ8+PbPf9di2J8tYEc3tAaMLsiRiTDx+OVLUXLteUXJ3e4dSghATUir6bmBdHdZYdC2q4FLdVQrXPHcumbavH4UiPFIltDVYZZiX0x8oLoWEkB6BJgVYtsgvf/WJwUqSWEBUwp6Qhh8+FZQotI3m9vXMy+NESFuVzor6ukophBBwW414jMOA1ZqMImXPtAaECMRUXSQ5SYKvUQe/XYk3FEyr8b7ufCploRi+fHnmctoqUKPRCFlIaaWUiHeCkhcKgVwMMSuU1PRji6ROJDO1CJtCFa/6EFFF0TZV2Kk5YKkFSIFknhfau45SJNsaaBuF9xeUFpQMp9OZnBO7cSBlTwoRYwxGCbJQaKOqNyY5jkH/3gAAIABJREFU2ra7XheStjGcp6XGEYREIXA+k/AoJehsR0m1s+DX6sCQqUCwtej/eiRlEELx8nRGFUN0gpACPi1okyEFLvNrRb0e9pyPR5xfEAK01mzbBYIgx4oklbL6qIahoRSBd5GcI0IVlEpIWTDGMA53PD0dEUqgtUYUzcvzwjSvjGNP29fc+e5G44655qm1JRbJvHpk55Emg9QYLel3qXb6XJ2gtL3CJ4MQmkKFM9io8RkQiZQTPiiir5/npjOkXDi+LAhdsLZGUlOKlKxZlwhCMVpLIrFtEak0TWuwVpNk9YZoY2ik5cvjV3J2CB0rSthVwMJuHHEhsLoNHxNPz18AiVQZjaQxOy7lQowzbBIfA1Y37IYOIRKbB2s0jy9Hdl2FyYSwskwZJTRN26C1wbnM/rBnulTvH8UihcbFCzFJlLJ0bYMQhSIC3kXWNeFLAlmJa7uxRwjF15eVYZCcni/kRqIOdcG/2xv8esFdNkrw5NhwOp/Z3/Xc7A/YstAdWlKSzJfa7dN/ZyS4QiihRj5d5rbtCcuFHBNN0yM7y3ycMMaiVCHEma/nM6vP7DuNyrDOZ3wOxLOAzZO3Cd+eUbuWh+G+CmtFZN02QnLY1iCFYjWC3btb2lFim45cBKccmPOEL5GYS3XN5BpFRdVWi7WWtqmDOSWyhoUQRRWIi4JLkSxXyuWx6hc2T3QRUkffN+xuGw63LbaLfPr0yNjc0HdDdYTF7Rp5b2nbrlK8kiAUh8+BUM3QDOMtIWc6q5AoQo6YzlZCqyjkAsu6cry8sgXP6iNbXDjsc+1frpllmXlMr7QGXFjJM2RZQErmdeOXf/lbMhXG8Xz5ym9+98iw78mlniZ1Y08/dCA0QtZojTaCZTsTQiLFgtGWzS3orfZz3OZQypBTopQNo1QlpF1W+r5FqOq6ih6mi0Ophr7TVzFuxMiC1lcvoITz5RmfJoSQKNmQcsSFlXG/o2lbNudYlg0lDUoZxFUR0jUNIiucd+Q4c397h6ZFSPAhExaPMQol63MgorRCqQoqzNRe0u5gSUWQokCkQsyRxvT03Q61QU4r0QesOKAGXRdgKfP27QNKNhg9IjEUIoebO37zu1+zuBWpazwwxlLfU13BMaMeEeg/RPBTyoz7Hc75KnM1FVW+LpWgqK/34C3W/gdFgtaQNX03IsRVvFvg5vaGh7e31RcXa2qi6wzGyFrKL5Km7dFtJLgVoql9JlOw2qD6fXVxaoN3Ed03FFFqP6gYSko0WiGFqB04Iei6hgS1553BGEHb1khjCRVAVUjYluraLAYpVpbLhV3fY5UkB49IinUtpLwRc8JKjRKW1hqkrhtKMjlKAqQn5MjsPeObgTVs10l0qbHf9czt7QNWN8Qk2dzC5gpKV2WN8xvLIqpCJrprWmcghkAI1X+UKbRtT4i1VytlrSJ07YgSEkQCaeiHHhcC8+rRTcvbm46nxy/4ElCtgQDLtoFtsU1DIrG4GR8zN/s9Aok0mSw9nz/9luPrEaUtWks2d0EXjbG2OsFy5nR+qXONAMEFwlaF1z5tqJCqG0u3HEZLa1qEAqGqt7PrEj5uCGp/EelRovafiki1/yYUoMlJkkSNwm6bpzEKtCBlz2XNWBFIOWCM5sO7b+jGGqnc8gWlNkLj6tyGWj3R3cjT6QteQDaJmB3no6dpZ0yjSNc5olJw/9BSkmRzUPJQ6X4lYVuDbVoEmd3uFqEE5MTmphqvlAVyIAaYZgE0ODdjm+u4HjN+q9ej0gpjFLKVzCqTcyDngNvqwirngrYCEPj1b1EiXEr5J8A/uf7+r4F//f/mORvwH/1/+b7GGN48vEUrSwhVereuK6/nJ3w+YtrMOHZYpbi77zncDJQ08Fcvn0F52lZhbMflsiEN6CaTcsQvkZha2r5BisI6B5Yl0PQVkmGMprG6Um6anr43NI2qxTYBxgp2uwHn4PX0I6HM9Lsdw3iLNj3kV5TQfPj2wP37ntfzzGnykDwprSgJvVUQLRFXSTkILhfB/sYRVUK2ir4zfHi4RSYLOSJV7f9MF8f793t2+6bir6eJxiq0VqQYWecLzq0UecFtEVLt+0iZ2bwjxoBSghg9bdOR/8YRUMC2GrdUOovShaHrSKmQk68niG2LlJKY1+spgyKXjG4iOdUiZnaZ1+OMuNHM20QqBW26GgMsAnLEaDhNE8sFsnAUylXWp0m54EMtjCoh2F/JhItzBLfho6tlZWOQyiKErCJRqYje1ZMHWReMIXikUiilmaeNp68nSq7CSWUkOSdiXOiaHhBM04KSgVwaUpJYA7YxkBPyKufMRVHiVfKXClFV4WHMGSkbKjQzorXkdNlwY0TJTAwJowWieLRShJyZ5+m6eGhJTpFTQamruFNpcimEbSXmSMwJKQRFgpB14LbGVihuzESfKk+jSLTWaGspYSOHgNYCjcLQsK6BzS9YYzFaczllWluqHd4ntpBoO4lSks2fWP3GrXzgPJ1JqZByISWIYUMkU6ENGnIRyOvr9i5dJ1R1gMolo42iaXRFu/tA01qUlogC29lRUv5DaVpIQRYrRReyFNXd0mg8E52NKKMgqdpfGnpKFoRYMEaTSyJ5Ab4gqDnqsRnZhCcTQEa2EMnZoqVGmir6jjHR2ipARBZygOxNPTXVCqMMfou4xdM2EiV19XukwBY2hkZhtOTLl2eUVXQtGCGIAoxuSanUWEmKlJyZLgtdNyCFRGmFEFU+HYPHu8LmEof9A1LWk6ciDNGD3yJ6HysQJUREtgyDpukaBIoYMlJLLpcNUTTGNChl6sRHJKQG3ShyDqzbgo+BWAqxSIwq7LoBKzXeZS5TxrQZ7yGSUCEzjld8/ZzZLo6SqkA6l0jTNoy7G4orvL9/4Mcfz6RLJmdJjJJUCrE4SvHIrJHa49xKFu2VvLVRuhZjLDll1m1m8xmlWqQyVyqeB1EhDOUKuQjZsXuj2OmBEhXbtlBItVeYK3bfWjjcDGThkLrKPlMJFL+QC7XjaS1FZZY4o0U9bUaCIKEkVQxdCiElKk30mmwontk5CuZ6v4zEtLLbW/Sg0DaxxjNrXNA0NE1LI9s/gBmKyDS9ohUNLtToESqjLAgUtmmrFkKWumkgDVhBjhHI1b/lHYsLxFxl5z4XlD0haAlrYPMzy3JmaOv9wZWCL5mYBdM0c75c0FYz7HvK10cej8/cqnqi8XR84cFItOoqJTJnlFEMw8jx9BHvIkpachKEEHFbqCLq8DdC6loQL0YjdMf5dEbLOhF2IeJ9IV4c+92IMgoRHFLkK2ymkhGlsUzLGZ9rxK2RmhjqIlhaUWE2IZFSxBqL1FcoUfA0ykCpkV/nJpxv6Ls7lK73S4Wg7wy5eGIoCGFobIdUpV6fQiBloWk0i4t/AFwIRO1lNhYhGnJq2NJG23QV9R48Mnj2tz3Ra5RsKFngfQWKTNvMFjeUsdjGUIIC6UCV2hWWdaM3x0LOdWOkaRqCm3HeE0Ko/rh0fT1CoUXDEqpY2lqL1g0pgzYKKetCpwD7/Q7dCtzZE3yksQ3GVNiFi5GCoOktqIZ1dWioUmhpqyBaVly7si1pm5FXNQNASZIYHdpqSqmiaGvq6UaOte8pcj29UkJer5lS53o+VEiTkuRiSP5C9pnkHFEUcl4Ja8Fvuq64lcJqW68577Ci+kO37YzM4OOKShGdJP14YDq/sMyZ4CXS1gVAyLECp5RmmSdi1HRDdYKubmaZFaXkiljPGaM7pJRV1H0FW/R9T1yX60uSCNR1PgDyOqY1pkqfvQs0bc/+MPDjl4+sbsMojTIWvwVKiPS7FkTGp0BKmXa0+ORJwrFFjz8txFRQpo7VlDpHrnMCTY6ZdVlr1f2q0Qg+VfiMrvO2lDNdY2hGUzuGFHKGzXna3lC8AgrKREJeicmg1NXoKwTaNhht6yYEmVwc5CpjBkXMnnCONPaMCytCKsb9nqaFxx9eEGSGvsGlTBESrVrqDddwnM9oe4uUCmKNyZZSPZshBYJzaKnpW02MVfiuhpamLcToKm1YaKZzRPcdps24ZWFdF7bV0fYGkavDLPiEkAUfA6ZVFWqRZY3BUisU9Vqs7y1SIohs60RKAmsb2qaCBdbpb4cW+P/ro21Hxu6WeXnBuQXbDPyz//WXCDMx3kaGvebduzcoOdRuQYjs9w2qqYuEpjcoaZi/TuzfGJQpKApiX5Ad3Izv8FtmyxdkHrm70Xz9EhDZ8HBzx7v7nxDKxvHlyDlsPNzfsk6RaXni4e17Ht7c88PHL8zrkbdvfoYsic+PP2Ct5N/5d/8t+ttI4MI5TJx/uCBmyffff8/Duwe+fJ1Zn19YxZmoE0kZ7sYDVmvW5QkjCh/e7vnZu+/53W8escVw6A2tNnzz9oZuZ+jGhlGMzBfPvL2QxUAogiluFDlXoMUGImfO5zOFFedmNh/wodANI7lk5nkGJF0/IIpjns/UorClpJ6car9ESVmjcrLQDwcul4Vtc9jWcrgpbLOqES+psNrw9vvE1y8t65LJJXFZTjy8fce6TaQYSbpnWk+gAsootKxCy5AyRSqE1mirwSh01yJJyCJpbYeQGZury+RvIAFaG5aSIYJW9cj7Uo5oU3f+P395xvnI/e0busEgVeT0vJLLxof33yCK5ren37EsM32fMCZhjKcNI4de4pPD2J6+6djmyOnsubnf03QtW1rRTcf+5gDCEVKkl4qPHz2n45GuM1hTZaNj35FyzW+HGGi7wjoXyA1gK4raR5p+JMTAPF/IBY6XGX3d+ZUqkmKmaK43+DrhE1g2t2Ktp+3g7mEghkROGomktYKn1xdMU93opILklugdTVsHCx+gHwSnOUCJiPPG188X+tHS73bMbuEyO6yWtDQIK4kpEH25xoM2EBprLVIKEg5KYug6+nbg9bhQYiG5iBJg2shuZ9FqpOsGhNLMbuV3P3ykvbvFJ4lbE522RONBN6AsZIlUK6rcEmJAlMK4b/j8+ZnjyaG1pmstrRV8uPlA07/g0owL1e+z240kMmlbKPgaA1grKAQRcCu0ZuDuxmLbGvW4nFfymvA+IL1H5cjx+ASYSouikFNApUzxkiIa0C2leD59+pEiC9pomqahKAhrwugqml3WI32/YzpPLHMVmn/7buT15SuFRMqF6eLpbUujK4paZM3dTY/pM1JqYqqQnsfHV9o+XU8TBdbUWJMSAqkMUWQ25zk/zTSjoN1JRFHc7Qea1bKdE8vmaJoRMyiGbqx0JF1oFDx9PeLnUO+fbmO3H7j/P5l7kx1Lsyy9bp32b29nZt5FZBNZRRaJqhIlzTTQwyUgcSK9gZ5IIwkgQRJFqaqYUREZ3pjZ7f7u9BocY0oDqgEECWVDhwPuMLt277/3/r61vjtwen+kaQZew5XTcOSbu9Flg20b5vPEcHhXBbPWY7QlhDPNsEOVFhcsy/bM95+esKVKrG/3mV23p1cNSWuETBy6kfKyYT/tSMIQBYgkSCJyX56RUWFES9dZbsvCvDi0EPSDQYiCjxGrNoSoFwm1BqTQdGNH/7gj5I1vL1cemnc0tpIRRa4i56YZ6PseYzdiqLRJpQXaQkiJbQ0YpbAWdLsxjA1LTFzOd5Z1ph8brucar9v3I4fxEcVIUBt6SPU6PgnSAn1nQdWSufcedCGIUB8orWWOM/9xU5pjrpeW1hPnDWU6WrtjuieGMePSxOpvzH5CmQYRelTKrHHh5TIhowK5ERKsl41p6tmfHohJsSyedSt8/nyh6wKS+qDRtWPdIseENZK27Xj5uqFkC1kjpH57oFYs80LXNhjVQpG8vrzS9y0yCuZ1YVsjKieWxtaelck11ZFlJbL6jO0sqtcslwnJCqYuz3a7HSE50uooOdE29cIbi8CHiL8vNNYyjB1KRnyCl8sLpt1TsqAUWWOyfeF8PmPsWFMCuSOLRNcajFYoIQgx4H2FZhUUWmt++fKZd4/va8xKSWZxZ38aiAlM9jR5IeQJ1MDqZ9xWFSz96wui8eTiiDExDDu68YRqbsTkWZaVy3mhGwqJiDSWsT8RwoxtJDlFnAus08z7d98RfUBJQ2MbEAkfHF1bialFeNYw05sRKRNKZpQu/PzlJ25nj1H1ckHOeJ+Y55lcCqfHPfepItexEdVIhOjxWyC6G0a3DKLh0A9c1xeEFggpoARSCpDqcJ5KoFWalOrrRRRFDgaFYZvdm9co0dqW23WrcJum9rBvl0wJlpcvt3rl1475vnA6fGK/77Gt5T5npu2V5Zugb0f6tuNye0VZgy8LQmq65gF7Hjg/L/gQkEZweLIMY+R+v+Kcp207cioYLehaCyJzXxa8bzCtoCRJyRUo1XUdQgh8rFHJQQ5YW9MwpQhiKCyzq0kUqytRcE2oTiBzJocVt2RyiNymida2dcDVPWu4IdSu0huLYA4Xgl/Z8koMBaUs3z18h+1Wlm1DKHg6PeF8Yl03gs8UMrvWsISA1jW9IPDMy8Swb0mxVEiJrsCxZZtYN48PESEWnsR75untCmgCm5tYZ8uwq7TOjKHvWxrVoLUAPLf7hd3uCak6UhGsbsXfA6ZdEUajbcvrdKePhst14/E48nA4sNwmpnABRuZlw4fC7b7w8HgCDErueDg+8vh+JvtEmD3JbWjZM5097z7sGA8NQid+/PGMEIkU69Xxy893Hj4eOBlLThLnUn3G2x3ZXMG5QIihUpxbA1KTigIa5uXGuOsRUuB8Is4bQhhaa2oNY51Rasdu7BiGBkrCLZH/BE7iT1/q97///f+HY9P/s6//9l/+/ve6eeV8eePtS2jbHlFaohds20zT3vhX/9OF55cbzs8UNv75X/5TRBZopemGhtP7BpccDwdVpY+3yOPwW/wc0HScdgc+fTK4beLx8cRuGEgRQr5yO8+M7RMP+48Mds+/+9f/HtV4kJ6YZ0pJxKUiUHOoFLXhYeQWzszhzrfzC3/4w8+Er4bj7gnbFu73b/zbf/03/MVf/edI2aPFjsHu+Sf/7MCyndk3LQ/DCSv2fP5lodtDqw0NCqsXHt9XaeM0Ob59/oIoCymo2rFKHu89y3WhkTu0qASbn378hdNxBCGJoRB84eH4RMlU2p7bWNY7ykLTL/SjxdiO59c7v/71EykWgq+o8rgWTseu4pej4PF4hDxTfIPIYFRkPypcnAlzT94UJQR++M1HpvtKJCOsAl24zi9ElzDKYk3D/T6zGw8opdCqZu0Bvn37SkgRbS227ch45vlOSr5K66Su0ZK5onqPuz3n6ytKW3JJbGFmCxd+85uPrM4zTzP364IPnv2+ZVkWvN84HHecDo8YO1IoxFg3wW6+4xMsa+B2m0h+QUqD0KY6cNYrbkscj5Z+aOnakRQs9+nObl/7Bsa0XF4dftvYfGBZHPfbDakij6dPNM1IzvD88pXxcKLpR1LJrGHDaM3Y9Qxth1UGcuF0OjFtM9M2M7sZXwIxV2eEsYqQHc/PM+Pwnsvtwm1+oeiVZc6VtFUElIJUgsOhYZ42gpMYeUDoxOW24NZAKZJu6JFKEXyEXGitZdePTNMdF/ybDDsS/ErfG0pOOLcxTSvGdsSYIEtyEmSR+O7TgRgDyEI7NAz9HlHg9WXh85crt2nidOjIvmG7JNzFYbNHJsHzt4XnrxPTdaW3hqaBx6cTQz/y/OXOPN/RWqKkQArYDQ3/4r/8M1xZmbeVaQ103Xv8IjDS0tmORrX8h//lmUK9njbW8vh0ZFsjW7hTRKDrLTmveOd5fzzw/vGENJL764V980COmsu9lvqfHjqsbrGmo2ks92li3Rac29g2RwiVJOmdrwhiqXBx4fV5JoRCaxseTjuMyVzOC9Z0VZEgMk0z1pK2bDgdd7gwsW4J7wT328rXL98QpeNw7EixDpLL6rguE0VJQo6sbmNbPCXXRZRtFbZXSDmRnUPqgh0VzcGwhjPX5wW/Jiz193RbA09P73j/4Xv2++9JZuXwMHKfJl5fvnDcbyjrOM8vqCEwvhO8zH/g8el7TF9w0fHTL68o2XF3rsJrdpbZ/0J0juvniZdv33i9f6ERAt0b3HXFnVe8K5iHJ2Z/Q3WFft8gzcDYDuS4YYSmUQM//vwLw8NQ4z5b5vmXC6fHd8Qi2TbPtnmeTh/4/PkZ0za0fU/XjFjb8/L8jJaWkgv35YoQoETP0B3o2o51u3I47KpuI9ZLe0qFHAtdq1C69mIeD99zvgXWLZByRKqKWe/6hqZrUEazhZVhVxcp9/uV++1SBxdtCdvGMt9pmoHHpyM+rKx+IsQZKVeuzwEQ6EZjTMt824gxUEqkENmNB5btK6YxSN0QsiQFhTUjPgVWvxDjinN3kKkiyUuLVZr77YrCklPBuYWH0wMxRJSqHbP9/oQUHcYklK6URi13fPr0xH5vsQZyCggy19eFEiVaGXYHxfFhz/7B0I2Gftdh+sgw1h5xFp6h01jdEgPkJEAotG0QUtTITk7c5zv7fkBR6b+l1MvKdF7ZHw5kkVjjQoie3/7uB15vX7hMr/iSGfcncoq8fLuwLoHTwxPH3ZHnr1dSqldKoyXTcianQowZH3K9sIXC6jdS8XSdpjMD21Y9OIKM9wupGJAZF1du05Vti/TDAW0EUicyjpfbM2uYUaqlsXtisHSt5Xy+8vx14nxeefrQ4raIETt6e2DoW67n1wqQyAlKjXl9//2nt55yJnhPt2/48OFEzpkQIta2rG4ixYwoCi2rOmKaAqoIGq1pjcHIHURD17b0vSWVxG74wDhojLEgLHqQnI49bs0si2N1E3/517/j5XkihIIArM2kLEAYjNFYqwguMHSPCAEpOtZlwrvI6fCenDLer2z+xn2qaogYIylGrmdIJbC6ComSUtI0R+7TQhQTog0c3+95/+HXJApbTNwWx4ZHtz0hKEqSKJEJS2adE7mAUgbbjlVXU8pbEiXycNpzODSsy8L59crLyxkpLe8+HJBCE2NBIIkh4reNQkZoze22MfQjIdTFp9KafuzYtoXGdpUmnO9c799QVuC85/V1wqojfX9EKMhyI+mZwSiK0MRUr8RDW7ut0+rJWWFky3y/UoxFN4puaBh3I857CrLqX3KlO3Z2QJYeJSxaSab1xuPTu4oICRHnbuQoiEWAaDF6x+E08NNPX5jua72ylgqh8c6RcqhLCw/RbWiha7R8cSQfGMb9/yGl4clRkaSnH3rapsdvidvLrXaZjCLlSBEFXzaU7mhty37seDo8osSGW1eKVxzGT3z67j0iJ1KM+BjYQuDxuxNfrz/xcnutaPV05vR0Ytfv2HUDp93A4/sThVrVUFIwrWdsbwlJ4nxmXhxCGWynkAYq4tOQS0SaQkwJvyUkNS2ldH22oEgKBqMaGtuglGZ1ia9/eP7l97///f/wn5pr/lEMV//df//f/P6f/uVb5MdLMhv/4r/4gVwizm2sq6eQWKbM7gCnJ8UwGl5f/lijV2+Xh+7wSr9TNMZyPDzyF3/xFwgRePfU8e59x/5o8U7RDpaHh49IbVnchDJ39uMnTvv3jO0ehWboR5RtiG++I6saHk8/kEug6Qy7w4FfPs8cTj3320QMme+/+453p0cedkNFloqFh3fvGexIO0hMX0hq5uc//IHf/fq3WDkgcosoPcJWM30uGakFDw/vaPoTt/OV+XYnOE+OBWt6pKyZaKMsl8+Sw9AQY2DbPEbuaBtbXRlZI0RD2/VsriLclFbYtsPYzIePH0kZrvcJpQ3rtjBNV5Z1wbnA7rBnPLSkVD1QQlXkaM4tyshKfjOSwZ44X2ay2Dg9GfpecbneKCWSUh1SSpDshxON6ZBSM4477rc7y7wgKQx9zzy9yfvGDmMVl/MN55YqD9YGomCZHM4XlJYMQxViUlSNI+VCiAEXFpSpA4JE0jYt+2PP/nB4e40FvK9W+93+xLo6brc7Ia94B/fF4UIEJdEd6NayuVgFyNT4SMyBeZ2rO2hJfPn8XP1TymJ0JQMejydyEThfM7tKtozDCfEmZW27nlzqm07wG9l7rFbYpmHd6v9BGk0RBSMF5FDfJL2u11klyFSBtWRECkHGvfnGDPvxwG4c2Q09bdOwzitat0z3hRACkFBCI3IhpVwFiXFDaY3VCi1qhDD4yBYd6u0S07Ud4zDUWFvwpJRAKOZ5QhuFUpJCrkN3WOtwJxQuFO7XlddvF+7zRi6R3hZOO839spGix8iELYEUBIf9yL4baKSl7wXdvrrD1m1jXa+sU0Srga7T7EbFcXjgv/qv/5J/82//jj/+/IwQmtPukV030LUCKUP9UBKG/jDSjYau13UjHhNCpT91tUAicLRN/Tfvy8I8BYZhBAtRe1zOUGq02FqFFIqUM9O8sblAzoKh3wGZ0+nAbtdhjOR8iWhdME2k6xRd0/P5l18qlrmzSAXTtJIF9H2PEDDNM99+uZGiIWdIqSCK4nc/fOB6ddzuW91khoxpOyIJnyrdUQhoG42SGUkleLkl155B9Exb3SB+/PAOESXCFEqfaMc9YXOEeSFtM62NRPnM/nSsXRGlGHTL6+1KVAXdt9i+wzQF6HHrRgwFqwcklsd3LZqZ5f6F632j1QcMFaVvB4NfDf6+EUtC9ZbxdCAHz7Le0LKhtSf65h0//vQLOQqsNrSN5O5nTN/QNR2tajBC8MOff8/Pf/zG7T4Tc2Y3jpzniRwD27SwXBe6dkQEXz84haDpLOuUESSaRtJahciG621mmm/14SNJQlrYjSfillhmR8rQtT2zWwhhJTqH26pbsF50My4uXOcXwlaY755tCZSSOJ5ObGslV2ndsmwebaio7qJp7UBrIWx3MoVEIWUP3KE0WDXSNiNNo3i9fkPrekmZlxud7Ql+Ifj6oNbaE7/++DvCllBSY6zi6+0r/W6kG0e0laAiQ18dQULIGqGSAqEdl8uVefLkJPnzP/8tmYn7/UZwgbZtMaZGvMfdyDB2hLTw8PSO23xlcY6YE0lMXK9XYo7klAjXHvHoAAAgAElEQVSbR2RBaxvatkKMgt8Ia6BEQcmy+gvrfqY+nPnI7brQ6p4i4OvlC5flwruPv6WxHSkHYqpwquP+iCqVxNe2mlg8X7++EpNC6kpTta1ASP+nB8UQPSFGFJpc6qIouUgJgSyqCkMAl8tEER1C1v8XqiLopZBI0aBVjzY7lnXlNi0gBdpocpT4cGfd7lijeXw8kvKN5GoEWYhMcDPTbWbsdzRNg37zqEHheruDkBwORxIJFzaCr9fUb88v1TkUA9sacEvA6Or6izGSE+SiEbJBW4npKuk1hMxueEeJEaELss3EtOHX6pzqB8swWv7+D1+Rovogqwg6En0hhDrIUCR+8eSQaRuD1gLvV6RQ5FywjaXtLD5OuDjRNi0pZObbiswKSaJvd+zHPbuxp2sNSkXavl5fXl8mcGMl4BqDagwxJa6vE22zo2kMIU+YriU6W59lwsrhONB2GiEijYWmlaz3zD/83UrX1J5s1+14fDoQQtV85ASn48j1uqB0JheB9xIhBKks1QsmJSkKdrsdx4c9JUem5c5aNv7hl3+ocu6cWTdPTLWAJlSF40itULlWGeqCMJHKxry5upixHX3boPsaZW4aSSmJl5dXfHBMt5l5urKsF3JyGNNVSXnw+K36waztERhEkSTvKVlVqEl2eLfgt1JdYLLSZ4ddFWSvbkFJi1ZdjfVKQc5VzC0RXM71mi4l9fq6rdi2r4CQUggusk0eEux2tjr0cqE/DbhlZlsrvO1yu7L5OrSmTH3OzJ6Pj+94frkyL5lUNJ6EkzfubmMNgcUvxJSRamXZ7sxrJT8ucUboxLquTJeJFBON0Yj45nDTmtPDns1PNVWUwLsEIpNTfIvlqvp3W0XwjnWti9J+f6rUZ+eZ1xWhFF/+/uv/6XD1jyIWmHLheHhCYghOUKRHCosxht3Y0w+GeV749CnRDoqmNwhp6PvE02nEmqbG3lIFX4jUkktHSpr9qUUkSNKRi8d2DdK+R+oOEe/1amI6RO7YloSwgcOgOewf8DdPEh5tJP1wQsjC4hONyLSDwGiDu280wtB2hs6OqIeECj2siewyjWnZtjMYgQsr93kmktHJom1HEPWiZHQlIPooKAjsIHl9nbndLoTosJ0hREVIklygvIlLq8gvo6Sh73ZkZaDEKj80Ah3LW5QyvBX3M8UXlsmh8CyrZ1kc46C5X1eMVDSqozEN+53FyAYlPdYKjvuObY10jaXt2rpZTInoBX3vcUUi+8TzdWFdMhH/p16MQZCLIAKilLerzlY7Q0bXHLmsLhcfNtJbdn+/P77psAEkokiapkOYOuhM96UWmxtLJf9blllC3tj1Q33QLwEhDUI2SJlRum42YpQYm2rKS2hK0YSo6AdJLAkXHTEWGlldEzkklCwUQaW0GYEWiuw9bdNgbQU8bJsn5UjKdeANPmIbw/HwCbclclkoRJp2R/SQi6fAm9ywgaQoqUqJyRK/hlr2VqZKTZPEyoISb8LJDE1j8WFGylzleVGirCDFQEmgpODh4UgpmqYZSMkjVZUsG92QEigh0VJCKqyLQ+taZvcukEKm62t5PKW6AaLkKsaUgpwkMXm6vsYic8kVPLFJxr4jF1hXRwoBbQytLghZGLr6ehayYEwVJIct0bYjuhF4X/BF0ba69jnKRko1U16SoT8MGJORsrAfd8zTUqXbWVYIh9+Q1lJSvUwmCcf3e5QRaAVaSkSxHEaJC4pCqh20xbHrewqFzVWnyn53QFmJMoBoSVrgXB0sS6rYc2M6lKi+OgSUrOlG86dMt9IVorHbD3h3I+dIePPymKbKIxMC0xqUkUAdqHNOHI4ncgTTSGJykCKiZJzzf4I7xJTAvb0v5PqwuOv3tG2LUpX6mFysjjSZkcrQtgIpJVpI+l2Pj4EkIts2sfpAJxRFFYKYkdIQtojVlqFvscWypUjX1o5inBWH43tsad4okhtzutF2ilELttnjF08nHwgxEEIVliMV0oKb69KAt9dP3gIlx7eehSfFCWsrkCAVwbJtSJMpItYokZU0Fqb7FbddSSmgtMW7mdFagttIFFAZ769vnqVMCNQom0hsfkMISWt7budXti0ilKDRAqUTLhhyVFAMUlqQgc3dcX6tUmEpsdJQ2npVjtsCVLBNzAkfakdxN+4h1j+rWX9JFhkXAyElZKkl620JSCwxK0hVbBxcZj8MGD1QsuDbt1dyiSzrRC611+fcxnyfgILWFk3HenNIoORCDpG+qRGxFGufJ7rMlDeUrvCa4BNT3ih6wTmPtS3HwwO7fYvzqjrPSJQEW1wYd10dBrQkFNjiDZeqmFvm6ri7z5GmEzSNRGpF07Z4V6XzRSSWbSIFeHp6xCfD5Z4QGRpjSDKTSUiVMFYQsyPkSKa+f93vZ8IWkCha3UHirR8qCClynWdu91eUaukajdCaUuoyo20MQmZCDJAty+ZRti72st8oSdVhyyqktvUCVzIxJQQZJRT73Z5lXqojp9F0bUvTStRSH8KDTygxc73NSJHQSiBIlCiQJHLYcEFSYpWh329TjU+SCaFwm1ZKgSLrBcD7hA9LFZ8WUFqjTSCGUDs9UdN39VIWQyKmQpK1LhFSJsQIOdX6i0y1S6U0RRdSzLiwYqV5+6xJTMuVUfXYrkVqjV8qEMm0tr5enKA1Xe2KqeqY8z5jdca5uTouu5bT8T2rmxAl4bfIfK+Yc20F2hiEKqzhyv6wJ6yO4DyEgvOaZH7CqLb+XBF0TYs+1L6REpJueMDIDnEU+KRwPlKz3LFGs00dVu/rHS0blLJ/Ss80rWBeIvnteSPGjWHYMQyFaV2ZLgu73YiStdevpKRtW1KObFvGbTXCGWSg60dSEogi0KJF5EJ0C5QWgUXKKvMVWUMRZJEJwLR5jn1NrwxdxzV43DIDlpxhWbY6DIS3wUw39XO+SJQo1f2aodWWbb1RSr1c+c1Xl52NCBVpG4ESDXrM9T1GCYyWLCkw9AdSefu8SJGSMpGA7iXWNEDDOgekBlR9rYSwIZTAbZ45OtZ74ePTO1CBkhUhSl5ez2xrIYWIoKBkIeQXYj7WhSGFZBZ++foLfgvkVEixsPlAIdLv9sQYWe4zVjU0emQYRkqWvDxfSUkRg0SiabsBpTWrXymxUqq1EoS4kHPBuboAs6pScvNbv1ap2g8vIhJSXbTkApna1QpuI3j3fzs9/aMYrkoW7HdPWK1Z5joF3y+FFARt19ENO4JrGR88tqsmaecLT+9PfPxwQgrNPHumzw4XEiVr3BrZ5gvffeorIGFeQBQOh0+krFm3gA8JpQyNeWS+gEgO0WcOuw5rhxr/kBWg0DQjt/XMtAZiNvRjpm8tl5czx4eBpu3xQdPtQYQ9PiXcFghhY1qvyGyZN8+yRA6nB6KTteQuwYcVksM0Fpdg9YklLHz+8o1pvmIbhe07wgLzEqrZWyokGttmXEgYY+jbligEQv5HE7hk3QLTOhFjLT+nmEhbwLlIcrda9BYgRvEWA+sYu4Gh7+ks6GJQCKQSjF3H7eXC6diw3x2wpmNeZ+a00I8DIkEoV74+rzW+mEoFHJSBRlfhM2QohWma0FrWK5WxNaKhFUWUiv9Ogcbs2Y8HpttEKQWpFG0jaboOlwJu80yrY11Wju+PSKUrTWaqWeeng0WIjPc15+02yKVeVrRqiUFxv98r+EPXmJh3kv2+ZwsT623GuYJQoprjKziQRMa/QRyyyqSQ6MeettMVWb4sb0PvyrIt+BAZho7D7onnb18IeUbbTNPvQdSsukCgVVsHnZDQoor5VK7fR5RA6VruJhesrX2InAvImg2+rje0rYNjydSz/lYH6tZanp6eWJbAbuwJUZFyqHJSbTEpQZH0TUvImWmbMNZgja3gkCKx1oKUOOdZppXGqrpZ0/qtHJ5pW4NU4GMmhIwIitJX+IR/G06HXU8igcy0RhNjxNiK8i+lUKRlGHdEFvABhMBYy22+Y/E12pEE1g4MY0NJAZELx9PIzz9+oaRE11hChBgWgqzY6whgNN2uRwmHERJVLH7WDK16G4gKStby9/64Z9k8m6u6hP7YEItHGo21htwY5sutZrezxIfA2BmUauiaKjQVAvp+YJpeQQTa3qLbTL8zpCRJvn54aNPW72Os8shud6AgIG2UGOs1+/jI/XrHthCCx7tADJmUXCV7qUoU88HTNj0kQfLQHkeatkMZTworfl1RpYGYGfqBpmnIZYME/dBhvGVdN9ZlwvlItz8gOoGTd6ToWe8OOxi6vkOJHT1z3QTPiWnKtKcdQzPQNJrN3ZjCF3YfqtQxbxUe0vcnbtvPrMsGGLTtEDbiRcGWAs6zzpnOgJaV/uSWmS059ruWFCRhqzFnaTPISMoOkSVCJl5evlHygpb14jHfz4zasgRNshm0YPOvmL6FrdRlTinoRjNvCSUbhnbPPP1EkYWmMVibkTISU0f0GS0VpjEkBBRPjh6yRCtL17ZsFtb5TvIOhca2I8gKtRBSMQ4Hcsj/+3AlJIj6XpNS7a9GWdiWDVO62hsSIKQkJ11JhMIyz47X5xu797D5hVI0Smrm+5XbdcNYSd/X34Xnr5HxKMg54bfM4XTAbRMu34mpLt2cLAw7ENTITMn1EkZUnA49794dCdHhnUDKFlQkRs+6Oj59/AAoQgoUVTjfvxCSREpDpjCvkVwMuT7No4zBWMPr5Vw7PKpS1KxUPD52rFvkfptQUmGNJQhPkbIOEQomtyG0xJqW6D1ufqF4QCoaXaPybdNQRMSlwG2Z2fKdRiWQfe0lJsF09zS2xv1S3ACJWz2dqv+ukJqMIHlRH2p7w/G05/W61khRBqUUu92By+UVKdQbCbNBm1QBNgncGugGzzRf6dsqDF7nFSENUniC24iR2lXKmel+Q5sKwkmRt+uPIZOZ1ztug5g2pNBoaetlXV9rT5f6ms4loiUoYyq5tVXoHm5XT/QRSaFrFD7NRJnIAnKRFDRReEQSdfhLEaPqMNR2Ams0riiEDG+QLUmOgm7fMYVb/bzNkpI0WZZ6CfMtTdOyP5w43xrWqSYjvEsE6ehtSxYZnz2rP7N76FjcSsIjjUSbhkv8mb48YRkw6Eokfv/I7XJDSclxdyRuGX0IuGCRi3i7vLj67IRBFEsMZ04Ph7efTakQpzeQhdIGmQvLdmff/4r9QZNKBq40TaXyOleHi74b2NYJ7x05GQqCpAOHw4ltdZQksVIjZWJz1yqULhZV6VAUBFnUZyWPYNkCh64umZSSbHNmcyuUhERBlmxhpdU72qZBqcQ6L+QMRoJWgqwUgszibpTC2/ATSSKhQ6Qx0FqDkQ3axDf1QEErQSmJw+6JZbuxbhMx+iqQJpBzU2mT7YgPM5sLKAOlgFtXTNPico1iT3Piw8f3bHFFlJaUDM+fv4LcVW+p1ZhG4dw3nGtJqT6bdarw89ef2bV9HepCwDuPyIaHp4e6kFwSfdPR6Sc+PH5EqMz59bmChwL1/XvoKCSmL7+8PVskisjMy0opGrfVy6vs6zAm+I8eVlWXlCKRcnhb5lhSCqwpEEO9DMfw/9Jz9f/HV9Nqilr4el759stK9ILD7j3bllnWhdt9w9iByyt8+H7Hw+OemDZ+8/HPyR7WdWa7X8iu59sfN263b7Xf8thCPpC85X67USh0zUf+/u//Fbk4rB4Y+nfsu++53v6WX//mHdZq/vanF379fc8W75yvz5SXCEXhQ0ZEgzIWgmWaXtiiI5UHEEeU6vnVx3e8Xl54PX/ldpvYwivCPjKMPWMXUe3Ib55+xbqsOB8pMZGXwvUu0N/D+bLx+evET99eSHllP2i0qTLbl6+JLdxpmwNdWxGehwfBdEtsy0ZOhW4YKrWq7wipsM0r9/VM27YQK7EKE/j06UjJiRgthT3truHjuxM2W6AQ5cb1BfTjgJEt21b4X//9Z3bHgbZvyaLwejvz448/8/S9JpPZpplpviNjw/HUkmNH8JngwLaJ3bEnxMQ8rWjgz373W5xzbFs1x2dKxeo3A4O1tNZyvnyD2GBNQ9NWQapfZ85fX6slexwxtLRK1sL0daJxmX/2F+9AC0KUCDkgtObl+YYxVFoPkdNDy9/+7Y8IWTep/aBR6cS21CTbrpVss+ZoWjafKELSdA3TulJCYNsEm1Ao0UJ2lGklhVru/eHP/4wsJuRUICpkGXj59pltW9Bdj+4bklHkdMcvMzopGtkhlMbnDaMErTB0usXYniA8a5jYwkJvC8ru8WHDGMneDMhsWWyPc9Wuvms7fBRgmrf4oOCnf3jl3TuJaQMhKNat5R6fGeyJ4Grxv+lPhC3gZF0M5GzYt0e6Y48xqhbJW0HX90znjZyhZIUUhtOp4/X1TJax0r9Coesdzt1JWWKUxDbVPp+zQ4pCTD1G97QUXJxBFH71w0dy6lg3SbsPPJ4yi9/Q1OtFFoVxbzj95h1bfiZOC4No+e2vTvyP//O/Y3OObpQcrEaolmUJhJgrxUkZ1umO6UWN1qaEiIYpfqXIvjrKEvRNYjg8IGxELCslbigLJVq0bDCyoUiNeVQUJxDacPowMC9XpJrZDTvavkWZjRxXhNoTSiC6hWwdZ3euMdHuwL57AKVpu9rx8WmlSMF0M/zw6REf7rxcX5nWhRzvhKQJSVDykaf3j/zy/EfG3Z7xsGdJgXmeUcHTiMh+0Lx73xOzJK0LOTnkYFmXib3oeT8+sN/v+eX6E8Z8pDGwlA3vCm3eSCYgtCcITSqSXSuYX++UECg+83iShOK4PVtKEqgm87z8yK/+fMfzT45vX+5ct8ijNlzPPSEVhJ2J5YUtzvxy3mjbHe93lvv5wjTH6kijkH3i8Ycf6MsDy5yY50AsgmB1fW26QHEVJz3uNZfLnewLu7ZH2sTx4cRym5lvMze38e79pwqECY7kNlYSrdgTUyH4mfXlzPd/9Svc7DCqw8iGdV0YPjUgNxaf8KtgXTPvHiJJZ3IB7Vs+Dr9GpGfuayREQYqF8/INQak0QmHZjwdeLi9QAsY0CAUpVxeLVoqu0eRV0nV73PZCFhHVaLwElxcKkuzh5SVy6PcsVw/REWLi6d17vH7GSE1JEJNj8yslNnSHkf2podWJ7ZsnlzdYzX0C1aJLqF3UXHBFsesUz88brZEcD0d+86tf8+PnCZkKRkfm5Ss//f0L3hv6XUNjK7WtH0Ye9k98OX/ly+UzxUbCuiBlHaKUabD9nk8fT8zThF8d87bhpoU1nAk5kYvCqJ4PDydORqBWaHOLtC1zdshcgS6qbYnJ8HX6I3Zs6Ic98+oQhEqHU2B0XYRp3bNtgWXNlCToDwdaYdCyIboKMBrGI9P1Vim0MdPuLMOwZ5pWZhF4eDiSy5m0pYryF6BtTcXot96w1AafM5FEJONjIt4WXr+81ittTggBnR4QKuA3Q94KXkfsAL22zG4hZTieRkyfiGGjsw2N7jjfVvrDULUeKaGV5tvtlYdTxeiHkpBF4+4OpfYcjuZtaRX58fMXhv7I4XDieDyyzFfcVIdW3VqEUfzx+kfsIBFbA7HD2iP7h5Xzy0R2iV5IerPHF0kUHoqDJtE0mtv5glED47CnqJX9w0DcDKVYlDJM87ku2KJjWl/wJTD0J+6vd4Qw9KPk9XqhxzJvF7QRDMMJtxnWtdAMhmG0dJ1i/Umx/86iVUXXL4vjcDiw26W6ZFErF5ewNtHtDO3Y0u1almVmXWac8FhVe0HDXjGtV0KMCNnizo6uHavcOEdezxcu91+Q4hNW7fnuvSSzsMwrKVpM0yCT5vL8gvcOrVu0sSTr8SHRNntCyWzzxoenA35b60KFtWpm1vBGYuTt6mPotOYyvTJvd6zpEKVh9/5Ift4QGxz3e37JC8PBYt7o77p55OIu9GakMZbOSH7++ZnjvgflCCmz0eAWxagGjI0gI9HPBCcpXmOMpjWCcZwZ+w4lMiIH7v6ZYXggZ12dbMnz+Hjk5bUCNZwvFDKtMNyvnqQ2hEo0veG8vhDPkr5RtKbCn3xOPH44cDwcGJuB26XBtpp5ipAknTiS5KX+LkQIEfq2hTLQxI4UHV227NuG6eq4jQHbG4Q9EuY7vUqkCM4run7H+4dH5mnjvs6sWwA6cvZo41Fmq+87fsd+bGl0vcCe5zN70zCLSKIeYWTIzPGKbluMbeH2f00L/EfSufqXv//r/+yf4LZEzDO/+d17piny9csNtwlOpyf++V+956cfzwRfiCERwsbzy48s8yu3+4XX653L9cI/fPkbrFWcjiOPTzuKgJz0G1IyMS03pJZ8+PQbjN1zvaz8hz/8oYIVtGFbPa8vZ37+6Q/c7zM5aYwZGMaW528btmlRWhCDo+0lX//oeH29cbtfQHmyC9y+nfFroO/2/OU/+Wv+/scrt6yIaqQd3rGpjnDJfPnyla/Pn3F+wnQdRUu+XW58/vaVz68/c9+u7PoOLRvcBs8vVwQNKUViWkAs5NiSYmAYDLt9y7Ylul7hPcyzZ90cu72tm852pGsalHT86odPdK2iH6pVfFlfKdtE3DaEzLS9wXR7oijsjnvG0wG0pOjC+Xbl9XLl9Xrl5fXM19vCZZrxOWGajrbdsayvLGul9SgDQmWO+wdKyni3MO4HltmxLg7vArkk+rHFB48okHzi9rogASlradv7wP2W0KZl2Bm6XQvK0naQZGFaA7lIPn58YP+4Y94KPtWLitKZ+0tgGCRNI4gp8u//5u94eHoPGOYlc7kG3n/Xk9kqMl4ahHHcXjY+fPjAw+OJFAsxvmBkxzo7lnnBmIpKlii0rpHJIgqXy5ll9hilOZ32IDzdqJA6sbmFb88vbEuG1KCkROnA4jdS1Oz7R8Z+j1aFIO5clwvT6qvjqji0FFglUVKSc0FKzXSZq4shgdINx4cjX799JeWEbgxtL0jLQmt7thB4ub/S74/1whUTBY1RLap4brdnciwY1XHcP7DfW76dv3G5X/HRE6InRnBrzadv84pQC+vqmO6RdYFPH5+YngV9o+lspXDdbndsKzBtj7UDUhnG8RH0Ssgby+Y4XyZQ4EMdWrqhRfo9a/qGL5rVWy5zpN1lbq93lIBukJwvL/zdj38kFzC2qeS6+YbWmsbW2KaSiuBnWtXSNwNN17HmFecmyAYtTZVMjz39viOkiDGKX//2HS+vG3/4hxcSheHYsqaVr9/OjK1m17Y0dNzXmdvkkDahu0rLu98CqgPkSgwzsSjcpOjtgXHY0Q8G2xqW7c4WV9bguVwm0rQxtpoUawxSikxIK0Jb9ocnfvvDn/F8+5ne7hi7HqtU9bwFQ1gjfdfy8H6PY+N+m9g34//G3Jss2ZGk6ZVHR1Mzu5O7w4EAIiKzmMyWzC6yuGC3SL8eFxRh9wO2SBeliqysTETA4cMdbdCZC/Uq6VWtEysXLCBw93uvqf7/952DwXJ8vVCiYDolag4ovXL/wSFF4PRy5u38xjkcm5zX9fS9Q1YItwXt4W9//x/IVfPt9Zm34z9wngIhXjC2iUG9F0y37whW3JCwQ+D07NEGlLtQ1ZnLS+D0TTHakc4kwvrKdNE83G2gCGqSWNuzhInidygxctjt+f3vfiYul7ZhmgJPX4+YXqP61LpEWPbbe0KYuF1nanE498juw2cMGVFii4RuO3a7O7biA9InRE6orhEnDZWxd+Ra+advzwzjluV6hVjYuS0PH+6Yi8dJOJie+80nnkPbmA7O0jvNy+U7nXbcbXb0pqPmQuc0WhusGdDaUonM6xGl3ovzQSBLj08e1IrSBXJhvgXuHnZt+1Oh1sTD3UfW5cy47fn0w0cKjTRojUUJRVwzyyIZx8LDo6PrFL/8+ch+3JNUwodISgI39uw2juPxyuk4kxbB73/8Pf/n//F/8Yc//Ad+8/Nv2Y1bPn78kfvDnhAy357eKLHSD9BvFMYIRC3kWljjK2tYiblwOp9xcmA3HDBCE/zK9eQROTFPZ4QU3H/4kawlp+VIkgXlJK4HfOEf/t8jT99mioQiZ07nM/Nt5nZdOJ4nallYVk8uhRIS9bZiB0Xf77Fd87uFmHk9HQmLxxTBvRv5d48/E3wglkymIo2lN3tEsli5Yey3SJGaB1FElJZYNSKBrpegYQ2R15eJEE9kGZi95/h24/h8xDiLsj1FSZYSeHk98+HhB8Zxi9SS0+UJ133ibv8J0ykWf8Qvns+ffuLD/Qd2uy0xBoToGPsDJVUu1wuXy41UDMFngk+E4Pny5WdyCU1RgqQzHUNveHj4BFRu0635fS4NsiJkINeFFDNaGKy1KK0ppYBQyORISyAuV4yqvL1MWNExWsNgYXCw3Y1kAsu6koJmv39kvkXO5yun05Fx2BBWCHFiCVfO05VcK3ePA1XW1m/VEJPl/nHEuIqPC1I4Hn9KbHca11sEhq9fv9M5i7UtIns+J6TdkUp532b55pm83KgZjGppi9PxGWscWgukSvgwcdjdk0LGdZbdfkAbeH09IaRqCosiub/fsy7xXwErWmniCim2CL5WmrAUpim0LUtvGcae82lingLb7Y67uwOyJozWDT8fIil4hk6z3+9wrqezXYNF7TqW+doAEqEgpSVmME5grHqPY3qu04kaBEo5tOsIcUUJQfAz67qgRc+8LgjRkPE5V8bhHmsHjHEtCqkNHz/e4foEslCrROue/eELyihCnrlcX9GM6GrJ6YKUgX7Yc7tmdgdNzp7pNiGFxjnFdF2hwG674dOXB+7uHzDWNCG0ypxPJ6zssUqgZUHWAecUd/d7ai68fHtmPw4M4671WUNuGol9T4wF71tkW8uB789HXr+fWG4rznVoo5munteX7zx/+0ZeEyToe0vf91jbc3y7si4XpBFIWUhrbGfc2hQMxjhSKu0iIgIVTy5rGzzhSVlQS8PzD8YR/1V1AGktXJ6uf92dq1Iq378/0bmOx4+PnI4tptVvNLUUfEj85c+v/PDDR15eTvzyl2c+/jCgjOftZWoED7K04DwAACAASURBVNdT1ML2zvKw73C6cDs98eXjT9x8ous0RUhuc6CEyHidWdfI8XIm1ci0Rn79NqNUpWSP9ys//fjb5hkqlXWuzHNEyAUfAiULOtPz7373M9M846MnhIW//PonlNTUWKgp86evXzlPF26XBazC9g5SxkZFjYmSm/F+u5es8w1iZGMsg91wd9gii2SZM9DKoTlXUg6kkilFIvMGrd9jVz62iE/OHM9HYmz55RzbzV/ISkkJUTW348J2vwUS6zKRAjx8+oKMCqUlSkuu68K6etbUMKMhrDw9/4pWGqMN1nZ8/vyJv//nvyC7CLUyr5K7jWaz7ZjeDwybTY+gcjm1XkJMkU5ajHV4nzC19Ut8mICK0fod9Z45HS9sdq4ZHmttwAQj8FFQisTYhqBthw7z3svx/PkvvyDUgNISoyspFCiqZdMzpCTYjvdoo/GpeR+EKTy/PuF6Q620S3zIbLdb1mmhpkTvFG9HsDrSG4cWI4Mz3KYJ17evhcj88vWVYdtkitZ0CFkYNgPTdAPZCtxKKFKGkivWKcZRs549m36LkoKUPCmteBJZaKROqHf/DaUVlBuzSZGl5+6wx940MSeQmpoF+80enyLLuiKBbVa8fj/i8VQdWeJCuXlM16NUh18XNlZzvzmQRfNyJVFIWaCFwYhKSpXXtzes3qElGC3oeolShr6nkR+zIc6RcWhRyTUEYvZ01iJKxWmH0YaQb2ilmFKhFI1WG0ynqQRWH4lJUlEs1xXhFGlOJB+wKEzS2HAgeM9LCFxvF7QzCNk6c/PaorBKF2rLc0ItSApWNajImlZu4cJu27cHCpWlzISSGYptqNeaOd9mtHV8+vIZqQvX5crz+bl5icLKOSRsvnKKN9RQ8QTCbUWKyIf7n4g1M8+ZZUooN5BjYFmW922eZl1XVn+h0BDutgg+P3zAWMFtXlmWhbv9gVwd2/2ecRhbL0YWzKhIYcb7hDIGt5H0Q48bOvrtQNaaGE6UqFivkeurp98NHB56tNMsHspLz/3Dju/hHyB7emk4v618+mmHU5IqM6Yf6YrjMO7JOTNNHc/fFY8/Htqql8Q03cgFnn+JONMEnUUmHreP+DVwXRZ8WRBuw49/s2W5BLwX5Gy5G3d8Puz51b+Q8oy2Bi03JCDExBQj13RDqoV1qVhn+cN//A3d1vD97SulSjrdeoElOUiJwXWMo6MayXU1VCUbvUp1nI6Ff/+7e7JY8WXB2kq/sSgUfvHE6yuDUoR1RWqH63qGzYHMxIftAz0VkTPX5cRxWrE1IriAkAyuZ7vbMs83Si50ncNqh7OC1Yf2ulQCKTRdN1CCgKrY7B239IwyFlGg+BbpXeYGoeg6i9Hg/UquiTUu3JYrWlbyKtESyIUSYLs19KanJFiDZ7/vEbbR8HyIpBwIJbL6SpVw/7Dnp09fsHh++fqVUkQreOfKZjMQ44U5TFQKVWakFAS/NM9P1Y3cVtrWNStB1xvuDiPLvFKrpFMDpZ+I8ch22GPMhrCuXNbvFAJ+LYS5YAbDcb6y6T9RpcSLBe9XNvsD/jYRY0ZqxTwtaNWhlGyibVVYloTqK0iBr4VbbECH3jic1mgl8LWyIojSII3FWku6TXz5/BktLPO88Mv3/0m/tYR3MmSIV4yZiUGTo6RIie4M0Sumc8AvTXvy448PCKVYpgAysdn1uE4hTW4d2lrJVfP2fGF09wxjj7Ij3/75iXWNWG0xSiGl5nw+st+PaGMYhx21OlLlnaJnUbJjunlSjdScoWZyuLC/61nmmWmeWJYVqwce7kfm9czluHA9Lzi7QZDJuVCRWNshTMCoDUoLsqhMt1c24wO5hOY2VA4lFIu/obVltx2IoTBNVz48bvn0w5ZaC9EXVl8QKoEOCFEYxwcmf2vurSooQRN9YvIJqmB7t+fzlx6Mf9dTCJQW3H/ooApyVVQkh33PkiOVldtUKEmwHRK9NRRgXSEGCF4iNgZrFbkGpvPELFY60+N6i7WaeZ7JVSBSxQjRPJcyMc23NlTXju1mSzah1b5zoErorOL+bv9O6xO8vLzx8eNDG+ZTCfE9IkhFisLQWw7bzftlZMGHjFKtE1RLQFaHlQKA23zCDBt610MVpLUgawIUsRYkASM67nZbjqcrtSaMVKSa6WxzfpYKtYCPM7vtQCmVnAVKGpa1RTipAlUyKQW8PmN7hbLt2fv87cz+oyYmRw6tB7bdCqRMONcsmuSAG+4QzJQakCbSj4bzOVBKRogKRbLZbNAUQvBE36TTW9exTlODjZnmz3x+Or77wgQ5Zk7HU9OllNKqE2thGOCw/8AwWowNvL6cSLV9plcBNXi0Mcy54E8XljnjpAECl9NKyhmlIBPbQLp2/+rupAiKUG0YT2W/6clkop9JGZwZmKaJKDLKDkgkMa7/5r3mr+ZyNS8zQoHJlnnJHO4Uw8YRg2f1C/UI203XEOO5sqyeTmSEtGQ0a0gsPqC1Q2oLoh2iBZKcMtVIMjD5RJwnOiXxsbBET9d1eJ/JOaN1QalM50aM2SKkpKZEyJ7bNIFqHawcK2v1/PTTD4RUmVbPfLthlEQgEUVArsRbZl4nlvVGERCsQVcIUqCFRVRINXGbFkKcSTHjOocxEll7csxk0WIy9b1giGiknjUlrPKN5gSAQAjBuhaojeTSAmECIQopJkrKaG1RoiPFlsWttU1OjR6RQrXybIjE0HKlt9sNhMCHtmlyXStng6AOjZhG8U34KSVKZWSFwVmKESgZub975HyayEUhlcO5nookxibsBEnwqZExc+sc1NKExko0t1BOmVIThUx6x6p2qmCM5TYvUJrfjFqZ54Wub+CTkpsAMpeElK59jyXRD937hTUTY6Lr5TucoQIVIQRWD4yDIy6FomDYd1jVk0LAWccw9AhV0KpRhFIulJKQMqPVwNB3LY9eYsN/qzbVUUpgpGqviZSRClIB172Xj30THmot8LEVKYVoCoBcNTFFVJXUKhuRME3sNhu0VVAEurOEsDJ0TY7qc6BQyEmwrpmoCtVWvA8Y1fDHSmpyjK3/JS1aSqQ2lFzwS0aW2mR8ayL5zGgFfa8wuvVFSikoW1BSUGND+rtONXx1qc0S32lSTGgh0VKwLIFsF+Ia0cLSbwY6q7lMz82RlloVOeSV0ViMrOQKUioG3RFkZQoFX0KjXCn5LqCtAG1KSqSzCmU0UjQQjRSClCI+VVIs2I1DACkl1uDRpbTpdkqsoYmdXX/HmiYy7dIWo6frXHt41dzKxn5lc9DkJEmxSW2RlhAmfHj3+OiKMxohE6l4fIiEEBBKICqIIlvJ1kCoS8vJ2/Y7l0bjhpG+79GlIKgUVYi0B6Usme1hi0IjlaS8d4M2xiLXAjXjnGa3dxzuelJMzD6QsmczJkpNGNlK6WJr2G4HIgs5ZHQ1dMJyfTsRwoJ+l2cabclRIUgoIaAmppui2zm00eSqGe3I7brgqyTQUbPkflchawSCGCqHux22c/TDSMiVVFN7QMh3v1uMzJeJuw8dJRa0VWx2A1llhDR0tsmi52WGCp1xWGObU0pFlNNk00Mu5CQJeaWaSD92RN/jy8RaPKHQaITZsN+OTHVFSgOyowiNX1c+734i+ZXJX7jNM+s10e0VpXhSCmzHj/S2wy83qmzTzhTbwYz8vjGUPVo32EtnDEZbpMiYziKkbB8IVFw/cF0audIaTa6ZnCLoJhrPJWO1QVXVTlW1IgUoJxs9L0QQhXE3MC8BoTS2M83T897L0bYJ6J01TG9X5qtv3rzUADkfwgEfr2QVwWSUaRsISYOn5JK5LStDl6G2zo5UjkQi5BUpbCOpWljXSE4ZasDnmXm9Yawj5JXsM9o5fPKoQVIVyCpwdkQZTaFF/aQWrFNmcxgRukmzhVGE7N9nKJVYClIpBO+9WSmIZK4+Uoyi1PYZoYUkEd/l0c0LBxDjgqSBkkrOhOqhGoTU/0pEVcZS1owShc6AtpIUM9kHhJSo4qgpUWuklkKJEb9UJBCTB9Gz2YyMg2H1CxvXnIHer+TiSUW3DnTfg1xZfKDG9jMWorL4FsGStM6X1Y4YCylO+HWh5Iwbe4xcOZ8yqw9oIxCs1JygtP6QQqCFpLyfMVJqXWIhoNZAQYO0xCTxcWLQ7fVaywxF07se27WM2uW0InXrcmulML2jHzpu80SVBRCssbZ+TAqt995ZzCAISfyrFJr6Lpkv7bNVCoPtXIvwroEUBSW9C2xzRmqLkBapLaXqNpClPSNTrGSTMdZSqyDGQqmtu5drRpR2SblcG2RIoN8x7IWxH/A+IGV7zisl8bF1gVPOxBDQZovTGh88OUe0UixhoVBRnUF3Bqnfu1VUlARtNLfjTC0Gqx3aSLKKKNfeiykUYlnRRgCGKt+J0qWgtURqiVYdVjWvlpaGKtvlqpSKVJBrICaPQOBce/+nAFbJ9txXiphWqjSULMm5gZhiWUAKpDaQVkxXSLGghMU5S1gCMUX6wVGEoNTMbZ6bKDy9f3bUyn7jmp8vN9myM5JSEutSMVLT27YMuF4mnBvoOk0V4NfIsraNkpQCgcJ0EmMMSipSqkxz64oV3Z4RqlVWmWNgDRkfMkYDNaOaS74pSXTHEiLUgkBCaUmFUnxTPZTWR6tJIeqCAqxWpPD+fsgNwpdy/jfvNX8Vl6v6jmCc5pV5rQjhMM40X04tpOmGEh3H0wmhNJvthmk5IbTi4fET8xz5/vzM5ZZQ1nG7KDpj6OyeOQrmKJu/qSYut5m0XpHEliEtmkGPBN8K7ohMTCuPDz9zmySVDDIjbGWapnYBNBpqKw/P/pnbPHM6nTieTzw+PnI9XZBSo5UhpoJfZ0gFWSpybSVyLxZqaZuHUjJPz0dqLe3N0jmsszz9srLZvh/Oc2qxD9t6UzkrggczrPigsNZgjCGGyDJVhs2GKhJrmNHGEqNnXSI1C7bDwLjZcDwfybQVsVISHxJaiPcD5kKtGasM6xpY/Mq0zAxD3/oCtRJ95Pv37zg1kPKKNhI3jlglWG6RwW1QUnK5PfPlxz8ilWBZLLWCGxyny4VcIjFUyioIa6XvFWFpDo9Yz2w3d2htSLGQYibESDf+y5ugUmWmFMv1fIHcsKKd6ehMh1IFUQslqeZgYEKbPcYYhPRYA1p1lJiam2PsuD/cc7u27k/fGwazx6gFgsUqR+8Gtps7fv3lmcO9ZrMznE4rrtOt1D1nck788GVLzY7NpgMy5+sN8T6llkJQS6aKyDh02JJbAfTmOdztUTJxOV6hCu4e7ihhZppnrNVYY/BegMpI1R4eOVfm2xUlM0iNdqZNkd5eOAxbBmnQRYKIhCCouCYtxZNCZvNwQChDSfVdZgpxzdhO01lDjoV1WihlJYeVsAb6zrHfthy8EIrpCjFeMX2hiNqiOsWQqYSSESg6MyD1Slp1o/HkyHrzWI6EJbDb3XN/9wDA28szokRKTYS5gpMY19EHhYyZgmDoOxbn8UiKtLidYr6u7cGlNUZrTsdKDB5rm4dKKYlfBbm2v2+XRtckpiKy1owvmfKO451CYo0JKRSHu46vv/4zVTVnV+/6tjc0Bqs6eqEp85FKQQrXlAkqcpszUwgN+ZplI//tHJWAIONDA24Mfc9tjcTUiuPH6RVhIqbrGbsNx6dXxrstWndYbVHJU2PCG0UqUJIklUSn2oHO3wI+zoxjZKcVXie6UfCx33D3aY/sKqe3lWleQVbeTq0r51THYEc+/r5HDorL05n1GhjkgLGVf/z7/05Wkmo04zji10AKHWPv2G5GlunGWiOP95/ZH3qm+dwgDKWg1ICRI+fbyrpZ2GzusFqzzpW7D1umlHCHHUkrXl6f0WJBCI1QbYKbckPNKwmqaqZpZUpXnOvp9Uj2heeX7++yxy2S5mYpecL0HUW1/+8ablS78Hr+xtZsuN/u+Pp85HjxBC852APjtsdUR5gyuQpSbsRLvwZ23ZbnKXKemtuKVTJ+2ePTRF4WjBmgCrQ1iJoQqnC7eXASv65kAv04YnTHul7pnGO0jqenJ4YPe3xaKNmjBWzHDefrCla04UrOTRNhOrTtmhNQdTinyLLBZ4yVJFmQymI73TbMveP5+J2Hhw3DtmuIdsAI6AZJKYLvT2cuL2eSa2V7iWiQndQxrzPVBkwH1lqmY2S7a/GcZY1tUIlrMI4sMLLn9fyCkgUjFJkGS0hVcrpeqfVG0RkfK7vxjqyOBLE2X4/rSdKDKhil2N9/5O38go8Nad2pSixgnCGL9rqwtsOHQkJQc6HEwqZ3LHEm1CYQLTkgxQa9sZToqSmiaodQlfPllWlKHE83xq1gma8M/WOj+obEvCSGweCcQyo4XU+M4w5bBKMrICUptqEkNSOLbpv2tX2eUAo5JOZr5DdfHil5JUbBdjPy8eMD03QjprYdut2OCFsJeUHZke044t/jdGX2hBBJORKqwlDQ2tC7gcPugefnJ0oOTW2hFPd3d9xOf8HPhZgkfa8pLPi10tseqw21ZkQaielfIqMVN2zwPoBKCKXJNCVJrrYBL0qh5JXd+HMbwK0BRGi+qz4TgsCaDfvDHVUWfFDvw7HK9dqeLwL17j5aON0iVTSsvy6adaosSwYRm8hZCEpWKJnIsbZo2mCodWaaK2p4YBw37A4dx9sVJKTcKIu1tEROrRnvCz4IqjAY1yiGuWZihueXJ3abjzg7NlDa9cqPn35C1nZwt9YghGKe3lAWlGo+tuDbwCYX2eiDVnBdb22jHwNiXSkYpDVtsKgN2kqmySNKh7aOzbZn1Jo5eKy2yJJIutFhjWhaAKqmpOYPHMaevjdYIbisE1SDUQ2lHkpg3DpSuhJTE7kPY9fUG1OEzuAGRz/seX79lfW2EnzBL4lhsJyvrzg34EYJIbXnlK9YrXDWoLsWad7f78lYrvOZX7694HpF8JkUC0JWeqdZ50bUtcax2XVcL2dAgYFOK/zSKh+2e/8hi7apjHmlFtHI4VJTdeB6mZhukKtnXQWFhZQiUisO/aZVFcoKQmOdxd8iziru7zb4EDm+nOn1hhiuaCFRVZB9YffgmOYbOcR2XpQOiUWrBWTGaonRClHauTPm3C5m/8afv4rO1X/7f/7rf7l76IhRonTP7r4n5sLx7YIQ8PPffOTt9YLqCiGtTNPE6eiBDp9a6fDl9YzSA92gSdfCckxcp0xSipAWBAvVT1y+nXi8e0Q5zXWZeXk70nUdnz9/ptbEsizMy8ow3PF6unCdT0xLWzFKucPajugr0y1y/2GLlh2vxyvLmuj7PZfTyrevT6TYMszT7EklMQwbeuvolMFah1UbKJpaJcZ0DP2OzXDXLi4yU8WE0mCNI+fKui5UAZvNyLouxJDp3R43/v+2PxV011Dht/lICDNaGZw5tM1CrOQYmdcL3759Yw0LsXpCbQLL+/0j1+uFy+3CtC6IDJthw/F4YfWBL19+4nZbmGdPKaDeiTkltK8FihTbNKy3dwQfWfyEdpJc4TrfqLVtqb5/f2kPu5JZ/crldEFLw2Fzz+oDS1hQRjGOe7QVDdVeBdvNAyl7UokoVXFG8f3tQiGzGTp226H9rLcbQpqbuK4AwtA71SKdJXHYt1iVNQ5RK0oUavV0tUOi2PRb7vePfPn0ieenV3bbO4zVnC6vmM4x+wXrFBXB81Pg0+ct272mHyzjsOGnnz9Scsb7hRA9Sgu2u5F1Xck5UkqilMg8t46VQCOk4HKdeHk+tZW567CdRBjIOSOKJKyVy+XCDz8+knLrH9ZUMFowpwXdd3S9RSl4fn1j57aIKkm+skyZjdvzejszxwVhFD/c70lK8fSXF95+PaKyYHc3EkLAuebQOV+uXOYTPheqVHRdz4fHLbVaQsjvcJaJfjOi1YacDalIhFFoZTFSIQtkH1tEdexRVpJSaSSk5YrEcn/Ys9v3fH/5FaUMiEYzBIPddRQVCEuLKGIt3X6D6APbR8v9px2dGpC1bc5az8qR3uECm+2W/X7Pbn/g7e3M+XaCHBmMJlOwvePqr/i0ImXl9ekVqRUhCyqWYRggB7RuD1OpG63NKMO4HTGdJWRIQhOuClN6BmvYHtrhMZUFUys7N/CHP/6u0UtrbV1QEkpkoL5fVCK9LUzxyrSsrKEQYgV880i9XyKm05nX8xM1a9bLjRw8n3/4yLQGQsosJbHUyFLb4eThvgmsz+cj9x/v+ac/fcevFdd1bHcFY3qoik45eucwPTw/vTQxbFGMdwMv6Y2X5YbPlVo01o5cTk+UsiBFxWjH/X7EOcHf/ac/8PNvfiDmib//xz/z088/Y41GhMRD/8Cgtvzw8bd0bsccIm/TiU6PXK8Tt9uEqIoff/qRt+Mrt9uVUgtfvvyEkpJN35N84vn7GW0Mu/2AEoLkI9frwrIsbAaHrJqwwtv1xLeXE+tyxqjAfqdYr4G3f7yylTt63bMsNzb9yF0/cD9s6FB8/aevfNju+bA9sHEOSWWnO+a4MEVPTAK84X/7w2cm7ylJ0OkeM27JWfJyemNNC7qD6yUzdDv6ztBZQ4zwdnzh85cP7MaRmgrHtze6zcg8e5LPaARKSm6zxxqNkZIcAyllOjegrQMMYansHxSlKmrVaK24LhlJ4v5uYLsZOF0CCcu//91PdL0i5cBvv/yWZZqI2oNqna9fvj5x/9ttm6SnjA+eT1/uOZ7fmPNMwOPXTC2S3f1I15sGa3BtK6Vlx+i2DL3Fh6mR7mplXVv0zweP1CCNQihLSIrODRhj6ayl1ozsKnS04UxsInbT1fZ8UQa7PSBkQtpIzJUUBPUWUQKcciQfWeYb23Hgx58/tR6a96zTlS2OGhcO/ZaPu484OXI8fSfWwLzO3JYbWsHDw9/w+OEHtpuBUmaeX3+l7w+kVFnmmeAj280e2UHIiet5RZSeh8c9sXqWuLIGwW9/8xtkLXTS0mlHqpmfP39GmiYxpghyyPh8eicTQigXXl4vLO/EYe8vnM8vpALWdhglySWwElAGOjeScuWf//RPdHYgpXbJ2W+3fHz4xJ/+xy+YTtCPCiErx+PC4EaEkG1ImcD75lcyuqMfBh4e94QQ2ex7+t5SxMptfWKz61CiBdLHsWedBat/T96UhZgmcp0wasvQ79htOiieaZ4opfmiUtRN+ZAV6R0jjwz4BM/PF87XK8oWfvzpM9aMTMvMZTpTpeDLD59x3YhzHUoL5vmKn3qC9yhVeXi84/6DodZASOkdVgHaNref+JftDhU7NEiV1JoQ24HZmV2DXgkwskJUzNcZSsUYQ66BH758RCpJiA29/sc//u+cTmeWxRN8oBBYpqm9rq0DoQghIJVql89Oo21l28NmL5FdINSFGBQ5FEQWdMaw240sa0YIjdYSrSrUSNWF87KihMIKjZ8nQvF0pjL0mnHsiXFmt9sihQHaVlKaynVZiCKzSrh5WFaPEBmtI30XSSucv4PW4IaMdZXkO8btgDHv/47qGA4WTMWnwOoLWvVM8wmQKG1Q6n249I6abxLmwHRdm2dPSwSRp+dvmL5ne9jSj22Tf5nWJpfOiUpmtxmYrjM5pyaBpmI7x+oDVgk2nWWjHMfXE0KJFqn0hcvrib/7uz+y3R+wzoESfHt+Y/fwAWN7Qiy8vVyI2bPdd+hOI5TiYfdA1fDyfOZ0nAlrZL+/R0jbNpsycXfY8vyn41+3RPi//d//9b/8/m+/gNIsPryDCy6s68w8X3l5fmZdPK7fcj7fOB6Pzdw+3/D+RkwL0lQ2my3/83/8mcKE28P+04636ca8tpJfIVNE5fHLI0WpdilJkv12h5/XNuEskio0ISfmecKH5jpS9Nzf71EKUo74deX5+ysvLydSqmhj0ca0SWbwbZMkFfPqETqRQyDmQBCRJU/cH1ppFST77Qf6vsfp5m0QpSIqDN2GDCxhZfEzrht4enolpvBeSGyrbWNdw5gDIcDw7rUxVlOJXM9Hgs9Y3Qg7S1hRCIb9liLam8PqnpIE8zIRUqIWSX73Yiit6bqeWmFZYotmlkJMuZVX/cptCvg1AYJSJTk1BP4cPEvInKa1TVxCYJ4XUkxMkyfltjUbB4cSkvUSEbqyOfTcP3xiXuP7ZbJleaWQVNFW3p3rGMeBS5zZH+5wnUUIWHNAGEsVoFTzaGkp0Rqij4TVE8OM0yPRJ7Zjz+PDjt1moKPjw/2e/W6LEppfvv5K7waMhSpWYl3QVnO5tljhft/z8NgjhCXnghC0Iuf5jNaVlCFnKCWTU24xghTJJaGNRBqNUpbgPdPtAqoNDA53G0wneDu/8PJ2REvbyE5K0HWVVNvF1EiFM20buKTMbn+g6xx+Xklese/vUMKSM8y3wDBa+rsONxhKzSyXF87PE+WyoGJGUdHOcv/wkVQKb5c3lrIy5Uy/2dFtRrrR0A0JfzPstpqHR83DI7y9ZkLKCKHet4MZN2o6Z9G2IbJF6bl72LCunut1YokLIVWWtSKtQRh4eXvi/vEe13eNnhgyKwukFql1nWIzDPzydcaHlVIkJUvm2w1JE2XWIqEqhr5nt9vSu4EQMl+//srlemQ73NObAQl4Zv7860wRppGWlIS1YLY7ut7heovGYFVl3DpiLlxv6zs8ILapZyrk0uhqp1+f6aRi6BXKzCjTctyDHtj3d2w2I2HOeO/b6zonVr9QCFSR0Fri7BatNDXl9ns3rRx9Oc+cThfejm+cryeiqLjNyDC09/vx/MKwM9QSkRWMNNhqEUtGCk1FoawDblzfJqzUbFzPbnOHrIYcFnIO+BQ5+kQsHU479oNh6yq//PKVvm+0Uqg8P11wXc9mPFCq5jYvDG7g9Xvk6y+v/OXrX/Drme34kdenI7fjmbx6FJXHxx/49vTGsgYO+wN3h5GP4yNOWYZuYDPe8/J9pcjAZntgt30k+sp8PjJfrqQQcdaCajGVGjPJB263mQ/3n1GlUPNKrTNVJ6KxFBRhFqzHzLYf2D86hnHP5vDAb/72d8hee2a47gAAIABJREFU8nh4pFc9JRRWZn74/W94OU9cpiuw4ISmjpLLO/FuOw50g+B0fGu4ZqM5X2+4vkdqR62C2zwx9lseHx+Y/Y1pmTHa0XUCpcAvhfXWHtqyi4Q1I4rG6aFJsnf7d0x4xtgt0gaOp5WSFVoZjqfvCA2r9y3mZ3q0GZEi4bShlsLr7Tu2N3zY/dB0Cv7K3/z8W97eXkhS0A3vlNeystlv0VrQOcXdh3uyvDBsDMYZqtRsxnvG0RFqE2r6NSGMxl8uHMaR++1IzZ7X12c60yOyIXpIMXJ3v6Om1oPNBZSBvnfE0HrDSkVMV8hBkGMh58zqC9Y5hCygBEl0bDaalD2xtH5HLzp66wgxs2ZPUJnSVW7XI0Z1KG1Y/EK/NRhnuZw90znzh9//J7xf8GWlUOhcx+awx3aWl+cnnr5943b12H4kxsLgDNttjw+Rp6dvTJeEkT3393eEWOmsY+hHtLJcT1c+f3lES0UukGtlt9/i1xuC3JDtsuPjxw+s69zgNdk3obnekotsctVSGAbD6fjGbndAScs8eVQvMdri58Q8LVADskru9wfu7w501vD3/99/Z9hYQLD6xPm20tmekkrzTKq20bm7O+BMQtSVGCem6YSke++UgTE9UgiIgu24Ybfdtu28idw/jAiZuN2upNwost5HJJWx6zm+Tkhh8Utm9ZnuPZaqtcQahRCC57eX5h6iybdLgtPxyDBmbF/QtvnIQgqoLpFKYJkD81nz5cseZ98j6iXx7es3lLK0T94W/+z7gRAjxnZst1tO50uLboqBHAW3y+W9huDo7MDoRlw30inHZhzYbDqsg1pXbvMVJTuGYctud8BayTyfkUo2aXNJWKXQ0lAS+DUQY8KYdi4UObPeblyOHuMcxjm6fmS/uWPXG+LiSWEFkdjt7oGC0RKrNEY6TssF0ymclRiZqRQ6t8Fomi9Ma5CK+eKxZgNV88uvT5wvV2x3QFuDMpV+6OgdkAxGbNi4A8ZmYoVu6BDK4JeEMrJBxdLEEuZ2Ic6CXBMpp1bRMBaJxTnXum4lkIImxMB7Ahe/3rieF0LwKCPY7wdOlytFmeaMKxFRBT5krNEoUVGysNv3bMaRD48bxk3XovipbSFLLdTSNmU+JlIVLFNgua2MrmM7Wn799Zm31xO1FA6HsUWoU4FQMFQedvuGo4+gRcfjlw/c1qktP5RFC8swbuk3jlojkNhuR379h5e/bqBFpU0RECBVfc+3ivdDkiAhiPFEegYhNbvtgZQDq/coOVBJ5LyQ0o3tsGEcHUY74iLYmhE9aIRaKaVQGXg9TcTU8v6HzR4jLMe315bn1AoEnM+ndnjVFmd79ts7ck6sS2aeV5b1hpAd0DPPhdXP2H4lyoxzqskQo6eikKVSaV2YmguhJm7TTD86lOpIueDnRGcrlUgQK9fp1grPqjQBac3vvY8eqSpKgRCC4AsxzW07UwXOOUJILW+b2yS8yWcdUrZX+GF3R02xRRJroSIwxjHNEzl7JBItNT4mbN/ypZWCEG2SKaVECY2g4tf4Hknomgy2FoQQ752X1hkohbZRUIYUEn5ZiSGy+oi2Gucs2ih8iK3oOYxop5jXiZCvGGXQukfL5jhLArIoSFVbz0rz/r1USm6bsBRocsScoVaMrBitUAqkqHRaMLo2qY+hbReGcUftA9N1wcRC13coDdrkdjFOHp8CpjOMuw4hwQdPZyzreqPQTN/texdMS3P45FoIfm3izqIb6agk5hDQxiBrpGSa6LNXFK1Z84pPDbxhtCWmFWEMRmt6qbneJoZhRAhBjA01roWFLFpR+Da3S7psrxOjVbPXExE1I6pElY51kVjtUKNtXZuxpxTB8eVMSDMhL0gDO9OjhSSuiXkJTDnyn//j79hsjyBfmJYr9x8UIdrmaMmS83VGaZjSjZKaELkzilrlO1lRYTpNIZFixOeFkC39OLL4hhTPKWGdJiSFrJYUF3JNiNxxGBxmGMhFsJwTRV6IEZTo3r0m0HUaISQppXcQTURKgJa3FlRS9Gx3n5CmtAmehIe7e6JRKA1KtPeWUgrjNCpJylRZlpntIIkhUavAKoWsibs7xeAMQ99TtcCQiaWihG4x2be2ham5/UxMZ0EYbFdRuTlfTqeJw/2BUNuDSQmIqRWcq1zQRqGNxfvAx36DSLBmDaZSlKXrLLVIgq9M88JgKlO8okXX0MtCMnY9WhmM1K1zmgoxibZB6DvW68RoHdtNK1znsrA/3KPM2ErNSvLp8X9R9yY7li1pet2y3nZzzvEmIm7cmz1JCAJJCIQEPYVGeiMNCL2bJmpBAoKqsjKZlXkzGnc/3W6s18A8a6YalyIQM/doPPycbfb/37fWAWM23FwQWna5cis8fhzZ05UEaPUBUSHsa++paMuaIAnJ+f6G0ZZpGgi3G2tNlAIpN9YtMAyatI60qHoHL0VmO6K9oVSIsZBCxukOdjFS8ng8crl+4zcff0JpwRYD6Wsh64VQEoMaOMwP6KkyPx8RqyK3iBYjb+crPz587h6oknEHz3m/gAGPwwtJbJW83TFaYWeL0pnzZcc5Q9OS8l5OTzW9b84FsnUq63V5I+aIlBpr+/tZraE7xRaY5ol9WxCt9S2pU7jBEWNi33dCKBxPB4RWGCOxRmOtZhgVUnhqCaSSaUJzmD5Q9oqoglIyUggOs6GEnX3ZuN8DX16/s6dMrGCc5fBw5PnDRq0GP3m0FeyxggKrJS1pajKM00Qte6eTNoHzltIknz58ZHIDZNhvmcE/gOiDR2c0OQWs9hQtkSKibGPdK8fhxNbuLLkLUnM03eHX6G4fNAJDq41SMkkEsoLL5Ya1A7MfmcXMmnfWurPnjUJikpZCYU0R2QzjdGQjk9ed2gRWG/7y8heKkAz+iFKBkCLSKZY1kWpEmIL3julhptWCRBD3yH5vuEFzNB1AdL+/UoXittTe30wZrSyv368Yq9hSZAkLWlqsFe+OqYK0fehI05TWhdkhJsb5offHa0KqinOOnF9YlrVDjZpCN4EoGtUKRlS0FRjV+1ilFCq8d3w1uQaaUEzzAw+niVZ3jOr1i33NzPOBtL12H1IuXSZMeu9jCUTLxLAwec+yXtm3HSkcx1N3M8a9koLonW6lsYNiGLvsN6WCNDBPI0NrXO8BYz2n04F9j7ydLzQUtWacAy2gJVAYamm9c6Mcx4NBqo7+r7K7oD58mvB2RBiQNGLccXom7v1SoFW/vJXCP/Wva+ngrPUeOmlVeo7TgT0seOMZ/Yizln1beDg+Ut+7SymDtSe8lj3mSkZIzf0eMHoGejJG6yO3nLHWYxAoU8k1YrTpwtqcyLFg7YEYG6VlrJMcTt17SGkYaZnsQE0gq6aUjVh6JBIK3o7UUrlvGyElfjw8QVPkXAmh4KeZmiS1VFotPBxm9hyw1lFrJN8TyxZQZkciQDYqGjfMjKeld9sBY0bsULneKrlqlOmd8bgVJKVrJOaJw3jg7XIlxkSsqQu5Q0aq/nvTKq0ZxlH1eo1V3W0nescut0KMkVIr3mru9ws5923i7RI5HEZyLqRUWO+NlOF0nAile1H3DAVFTr3fqJVgGMZ3/kIfPoRY+fDhhBIaabqn85c/fuByvXBZVowfmY8HUilctwVpC4eTRTfLdPAUERCi0apgXcI/e6/5F3G5AkglIWWXiO1bx5+22vqvBiFEct6ZpxPWGcoWMVojhekvmgxxWznOR4z1PaIWGqfTiLQ9zpSKIkbJ5RqQKJxSOK0pEeKWMeodfUlh2+/dZi0NUijGcebL9z+x7tu7p6cwjSNSDn3bUyJVS5roUAyaoKIw2iNEpLa+lZI0Su2I2Pmo0Eay3iI5FkrLQCLmndu6UZtmGiqQ0VohlWCcHDRJb7Tmd8t0P5xLoUD0Qq4xkEtHchrdy/qtZoSSDHYm5Bt7KhQBQihounfDZEIrC0hy7VCHWjvsQyKoKSO1ResuXS05Mwz+XUQqaDVjfC/YNiRkTRWgTfcKxdwPQyWXdwhHB1OUBqlkZj9gjKaUyvnyhlSpT2uMwag+LS8oEKLbw2PCaEUtvetD7basFCsKQcuNVjLG6X4Y0opaWoc1AM5Z0rYTQ2HwAucs5/NKk4Lp4PDegAyELRBS6gLh3C+FJWX2raKloJRAk/J9StYFwyVmnFUI0foquVl4L9OGHGkq4gXoplFCo60D0x8Ye4iQBUZZrNakvPbNj6IX1ovsnyclRbY+AW66T4RLz/17ZTpIowlyjb34rHshtuVGixLqxOQnpM0YLRgmz7IVtusGMiFt/38ataU2wbZl0jXwcDryu18OVNE7h1uLPDxKlk1TdogrxJDwHrZ1p5SGVY5xaOz7TgOMtWAbuWxo20BUSotoq3qhdQ201gWuuklklcTcJ9laVA4ncNPA/RZZtkDzGyI7mixoVO9+lYZS726jHPFOd8qm6pdz3TRWOYwbSO1OExWpNIfjkde4QCk0IWglI40n1YwwMBwMLWRqqrTSS7dOKVqOPD5MeDeitSMlRYw3ZJVIJWnAel9wk0JrjSgKKTsBTqpGyd2VdrsHPn36Bd5EcguU0v0+XnuyjBir8INn3TcmO7LnTtgybqQphXQO0bq8s6VAlolcF1RODO5AK51Y2WojxsiWE1I6apNY5dDGI8qClf1BJ4VgXwXz9Ehtqn+MMMT3C6myAekUAksMidPJEG6JikTqgZpWOpdG0pQkV7gsd5b9htG97L4tN9ZwxdkRKQy5dLT79SI7yEAVWi04MyCtocR+YW6tdUFqFRhlGB9Gvvzf/xn3428RRrKERFwFbSi0llEWxsFTdcH4I3Hd2PYr59fM28vCg9spKRDzRhOS8+3MqCec1KjcOIcdUQKTO6CdZU8LWwiMk6cPRBtCSXItNCJKNbQcEKJwXc5oafF+xBiDbII17aScybWDWggNIwVGC7QVWKfZto2YumNJGIVQjnFqOKfQqjHOnpr1+/OyUFrDaUOSlpYTpTa0dv2Cs99Je09kfH19I7/3Z0QFITSDn8ghMw4WM2rKPVCExbp+AGqhIkRFKNmTHq12zH71TG5EvA/1lmVHTQOlVZTocdrBWaoQCB3RujKOgn3bGN1Iy5Goe+woJICKkp2cZ7XrYvRSqaUidCHHPqBQSIzS1AZFdMBLK5X2TmTMufyTmPgwHwnlxnK9d6qkyfzp+z8wGYdTBiMbWVS0MdzK1l/r3jL4CTsZqIK4JmIq0BSH08zzcCRsO9/OLxjvuW2JVvoFS2nLy9uN0+PAXgN7jdjShzSNfghstbBuoUuCRZdKh5BxR804G0raoIQ+/W+yC1VFT660veFsp981EylEvB2AHqEUog9cu7xaYJxjHCb8pFGtA79qhpLAOYesfdgbi6K2nvhwfkRZSROJGAKHwRPiRk473hw5nSaW+0qKfRgtZJdwD4PDuw6uUsq996k0lcb5vGO8ZnIjaavEpaKdp9bYty9CQzVYZTrhWBqslmidqFWgpeznCtU4TBrRJFoYBJXGjvUDMfTOk/rbuTJEau0wsJxzR7afr4w647zvFNec8W5Aa4UQBakrxikqirIJamgM44QfHEu4EPNOa4370r1jShmEhFIUCINUiiYESgoklkql1l6hUMri/YFUFmquVNl1G7H0i7QSCkPvM8rWCZW1VfawY0aNrJBjIoRApuCNphRDiDvbHvDjiNaKFCJSVB5PM0tQHewSoaRGihlERehKabBH1R2qXrPF/tzRTFQyIVWE1FijkFISWkY2ideOcRjxRqOFYI89oaNdP78rrTsls1ScG9GmdYef6vqI/vWAlCKtFiZlcE5xu/SBn0Cy3APeObQRvXsfCrlUHh4conhIiprBO8O6XVD0P9c7B6ILr7VULLFQi+iC4ZoxSvL89MSy9P6/lj1Bc99u7HllMBZnNboYkAIoSNEQrRG2/x9croRohBTRquOZS4ldtqobgkLOmbh7Pn9+plW4nhfOl1cenya25UprEiEcMUVOJ0UIK0jJOM1gKufbDaV7ZGvbC2iDHRykyHK7UreBwc6Mg0DZSst97ae1I5fMtm89VvHyJ3i3Zh8Oj30TkAp+AqRB6wmWr4SwMw0z43ig1AlEYL9GrDOMs2MIEmkG1rhR9js1G6xThO1O3Ho8URRFyitLzQyDYT7OOOdZlxspSmgGbQxKLxjr/0m8GeKNFKb+olAwDiNpk1xuL4xDxwnvqfD67YyeB5qToBT7/R1/ajvVRaD7g761Dl8omYKkhoQQGqMtVhmEtjjf19BSOZRSCLVRssBnTyq6P7xFIu+VuGVSbFjrOMyWWDOlwbZn9ph4Ogn2eOO6LJzfzjx9OpBzJRIBgZTv15fWoBZqqXjviLF0jLWUTMMR5wZEzoR17UXMcWC0lhYUsUTSnjmXCw8n+gMBSdhfmedf4sfE4eR4eBxJKbFsV0Jcuj/JDVxviVx3KO39DV/iB0dtliYrlY3vr1e8sUgjUKIXUefBse6K67Kxx8TxecJbj5fHLtRtG/e4s7VOmrLeMCjPvgWOxxONnZw31jXzfPqBWjJGacxo2cONsid4nxAaYzgNR7ZtYws7e9zJLfLxp9+Q9sh1XdnvG849MDlFihu1FlIp7NvGNMwo50AltnjDCRDFoVphniX/4//wb0np7/jjH/7C21tA6hPu+U6QllQlKYKoI06diCiqSHij8Gbg2+s33DDghwGvDfdb5HiYGEZFa4F1vWDUI+Mwsa2By+vG6bE7ijKCqg16dqzlG1YJkBu1vJH2wmGYCNtOyTvNQVthHgdaSShReHicyHHDDYpBOxwePz/zdz9/Jasr42CQ5ogUmnDfemxEdTS80hNf3r5iDprPvzqhk+Yf/s8/YvzA6DWzryxbY3QfenxKQNgLl+837GwxStOa7nje2aPVibAmtj0gZ8e2Fvalf46QGetG/Mlyvr9wWV94enpAHzRrXBGyT+X2+4KXhvv2xra8Mn+aEFqy7HecHRgPM8I/8/Llr7QWkS2xLxlVDCfduF1u3JYd4R3T6QNOK1KD25qJWbLGnar6Zm5dMlo4rIYPpxGtDP/Hf/o7Ho+P4CV7gj1kNDtCF96uG6IJZn9hmj0zR9b9TswBaxx/+NPvSWEnpsZfvr5Ak2xhZbCNwzjy+DBwuy2kuKDdhHYjpcImEuFyZVk2tiXx6dMv0FLR0o4QiuP0gRYGam5s642XtxeSBOdOOJHxSlFDQNUHti+CuAf2/cyfvq6keOB2foGyEdLG+apQU0WeAk0q7qnyfdn5PGmM6oO4fY34WRFqFx/X2od5tYIxBTsIrBp4ff1KzJrx9IF5PmFUYrskwi5QcmScFXu8MHhozSJkj05J1emoxgwYrxmOmpweGe0KtctKnRt5u20IBFY7hHbkvBK3HVFKd8e7kbw3tnRFqcbxOLPEDsyw9YJg5eXbCyI2TocCKlIKTIcjlxt4b1jXK9fzG9Sdw+ED9X2insvGcXzifr+/R/521vSKDEeaFMS8U+rKh9OPFDZS2Gg0ZneEeoMaeiReWFKGUCrohkUxCMc8TazhhRxB6t4rTbeFHz99gmqIe+br5Wd++OEDD25mM4ItdMT27XyBCmoa8eOBn5zj72+BvRZC2bmHK3V8IAVLy41aG4OZuPlrjwPZgcP0wMvtr5TURahKGZ4/TEwPinkaUHeJ3x8orbCl8ztwSCGp7EHga6WZglYW30ZCzHhjUTRCCCgcKSXQjSZk9xbeF54+PZLEzn5fud12BANKehCF0ja2s+Dw+RMPDz3OtYbE8fCB79+u1FJRSqO0JlNw44C1hmF0fP/+HSsGlMhIWXFDPzgeHx5wwwF1j7x+3zkeTzx/fKKIyNfzX2h4WhtQKqBkY3CGnATny5VhUPzw44G3t+9IHIqBUixbzUzHD8R4J+Uruey9j6N39uXWzx5RYuexbzlRWGlws0Oyv2/VRqx33Nbv6GIxrZ+rsrgR9w2jocmhe+TsSEo73vcUkhAVbQ2v5ytW2/eBgGbwlpoDra6IKpBNMtkTgx1J8U6UGx8+nrjfLwxDp1VqG9FG0T2Y/eKfS2G539FGMk0OrQRfv98YJseeEzEnUs0cjjPn1wuDHpndyOnxA0JZBpGp+Z2GqBRb1Xz69EDYV15fviOaQWnLOB+JzbGtmWkcuLyc+9DWgB06QTXWlZDvbOGKuMSutDGq4/01eKFZ94BRYCdPQjIcZ7awsi+ZvCykXWJcQeRCjolbqeTbRiwNP0xo1UmGCsHkHN5rjKx8+/IX9h14H3hQNFprrFPsayLHxsfnIyUvlFZJKfL9didFRbOFfV3xxvLL3/ySkje8PdLqSs47oMg5YcyMlBJt7pTaWJfA6AYGM7IT+PD8zJ//klnL1iO3VhLzxjBNfFIDhzGineGHDx85v7xwv934nq5YNTB6R66Ny+VGqt/wM1gMZRMs5xWxVB6eHEZLqv1bD/r/+8e/iM7Vf/yf/+P/9OHzxLYvXG9v/QF5HNmWG/sWMHJgHmZqM7y+fed+e+Xx4YCRDoXoWVTTKXcSSVwTJRaabOinE29Ln8rJVFhf39jWOwqJyCBa93G4qSMoQ6osW8UPI7n0eKK3lmm0/Onnn8m0Hslwnc8f445WDmcP+GHmcg9s90ZOjVYS1mYkA0u8sec7Md8oKfOLn37k4TRjtWZb7lzOb9xvgRgFUg18+vj8LqQcUcKSQuX8dkfUiXma8V4T4g7VIVUjpcoeGkYatvsLVoPWvSj69ftfeXw44QcPTbBdA59+PPFwMlihkUnibOMwPjOOBm2gkFHe8Xa7YUeHGwdySvz4y98wHg1mbAhbEda+FztXGj1TXKsgiUBqhdw6aGOe6fhkMzLMT2AUHz8cOx6+iG5HTwFpTMf96ozRBqcHTsdHkJIlrtzSK1Il9iWii+Y0Hyn5FSMFVENrBqMEqiVUq5wOlk+fDohiOR4OPD6MfHg+8fHjM/N4oNXM44Pn8w8PfHz+zMvrd7yTTNOEkI7/8vPvSXXH+EcKiq9vf0Yri4yJ4zxzOM7cwsqeKn5UpLJwv505uImn00QJhbQmWhJs977hctbj7MRyv3M69QtWrbCHRC0BRYDYceatJfwoua8r93VnDQ0hZw4n1fsU1uP1zLcvN/7rf/87trhxvl7JOXOYn7muZ5Rr+FnTquAXP/yC62Vh3wNSKX748SeO04nzJXG+R1LrJCLnj30l3yL/6ncOb0+sqfLho+A//DvJ69f/jf/1P79yuxb8MPH51x/IWfO//y8rl0vETYofPv/Eff/GXi+42XF6+sSXb9/QtuDtQBOKy7ZSkuZ00hgtKAn2tc981nVhWVb2FHC/M4Rz4cM08MPTATN6vl662JYcsKoSY+LXn/8VogpEExit+Tf/1TPn1ztSWrR2xLCRamEQDmtmtJ8IZafUijahb7PbjMiCLYse6xKV0TusFRg0Iij2t8j3v7wihUYpRYmN9V6YZsHr619JaUcrwcPTzLeXLzw9HZmG8R1f3Ah76NJDGtoLKivPpwfKexT0YZ4wBo4/jBivoSrCWvEPhj1GSssYX8FrQJJqoqlGJRK3xnqplKWR74kvf/wzhQVF97zFLaCNRHgN1mKnA4+nH4jbyuOHCT9IStm5vN2ReJwZMUajVcV5xYdPnxBGc1sXXt++82/+7a9JSyRtG7WuFKEJUVBEASk7zVRn8iYowpCE5nZZeHm94w8zUkjCbQWj+PMf/4qR8n0K2gj3jR9+eMS7Rs5XYr6xhkrYAlYbPj49cxwmXr7+Ge9HpDR8+fJX/pv/8Gu+fvvGsvcI5TAIyIkaKqJKlHE8f3zk7fy9b/qUIa8bdlKEdWMLDaEHfvmrXzIMhvP1wv28IYPCTUN/5lhPa3C5ncl7Jcf3LX8LpO3ONDikUIhsaJvi9euF4fSMcY3JS3774Zf89c8/v5NQd0JYWPfC4+FALAmhFNP4yCAs59vXjiU30PYFiuAeAhiJmSz3LfB6P4OVZN3Y2s5gZ7bbjpKWKhRvtxvOVq7LQjUSNw9s8YbUV0SLSARK9Cn0dTtzvb1xv54pe2F2M+TCcl9Zw446DZz3bzwePI/zASsmokjczpHPH5/49e9+QP84cRwPeC1oMnNPK9//cCXKrXdx7IHJfQazclkXlrjTVGM6NpZbnyrPw8RhPrBeVwqJoiUYg5KGtmVq1Ehj0V4jW+SHj78ihExJBS0E09BpZAKPNhbrNYvKnF93SlNo7ZjtgRwTlUKUlWAESmRyaohWGJ3nx48/8uUfv/QYldMMJ4ceDSkvfD1fud4DWmi8ixSdGcdjl5S3RgsRZ4/EXIg5MvgRKjxMD0zDhNWKJi240hHXoaDGgSW+saTA7XajpsZvfvOvef3TN/Z4R1nB4/MTg5eYJ0O0ga0F0ub4+Q8/89e/nrnviaAabbY8PDz1tEVprLeAFo55dEgh8G7kVz/+lofDiZ+/fieGzOPhxH//3/53ODcSUuV8Xnj9vnCYD3jniEWQq0Q7wTXcuYdEExYlPbMbuN0XkugbCikDWzgDifW+sV4josD9tr4PDxp2VKhBoIpEpsYoNc/HnvpBbOzpzh535ukJM8G2R7y1PD8+ERbFNEEMV0peMFogqOQMNUpaEkgqMawY16itsq2R6+XO4/HxnQhdOR5PTG7m/PaNmhNUyfeXlSokW76QckExYYTBuczturLe965CcApvNJRKjpkqJKIutNKR3aEkruvGh6cfOlpdNlC2Ax6WnZwyrQC5+z2t75j98zmg/ch0UIR6J5WAHyZyuPcBupO4QTNZ0/v/WiO9xU+OKjJvL5c+HBSay7WQ1Ib0ui9hUiOGxvMPmusrlF0xWc+n48yeVtaWibKfzWiKaT5gnAbZUGgO/oRqipQyyxbwxrDvBTsphtlgneUW7oDh4fDELz59YhgL+z3xcDyilWRZbthBs8eI957DccQOid//PxeW25UmAmaQCO95+PFIxnBbIufLghaSnz7toU1DAAAgAElEQVT/hHGCLa98Ob92RkJUSGuQXrHcMk4mwirIxeLGA59/bfnrzxs5SRCG63LBDEPXncTA7fqGsYpBWeKae7RRan7x+QFFRNApqoLCtz8t/7I7V3+LuPXQiEK9u4vG8YmkUl+brzvT0fSPbgqhNBVFrgXRCkoKvJ9RWjE7hzZ9O7W8reTtRhBdSJYaCOVRTXeHlRIUVbjf7zjnAUUuldwiT88n9rWyhcSXb2+dLlIT65qIofLTxxPrva+3le4wCKsLT786opom742XL3eUee1I7VbYtitPz4Z1e2P5Wrv7xE0wOU6ngrECKSX72tjjTqV08pn3+NFzu9253fdORBENN2ZKrbSUKCmzhIYUCm0U0N0EP37+DSlE9j2jpeL0eOTLtz/x+ccnljXx9rZyeoRqJC3X/mZDxUiDQlBjASEZBs/9dkabfuiMceN6/c7zx48INMY4rDXkmhFiIteFmhdqiby9VnQd0FqAqFxeXtlu/e+aSmVPVx6eThgliVmh2sTz48C23tmWG7y7ONKWOY4HmmmUWHl5O5ORuLECiVIay832B/MsOzpVGuajJ5Ub6V3kWGtj8J7hYJFas6fCvr+SAa0El/VOvJx7DKVWhEwYLfn09JHj6ZmWA1IHtN2Yj89Y4/n97/8egE9Pn/CD537L3G49RiollFYwNI7DxMF6vI388HRiXQuXy42385kPnwcuF/9Obqq46Yg7SOR04u18JcWd+UGwbYGUKuflTCtv+FPrTowBHtwJ7x+4/OP3DkUQ0ITi9GHmL9/OPSoyDrQC0yCQYufwoFHDSJUVse3IGvC2cpgFj5Nm4c5vp4qukvs3yWw9n39hWc4Qo+RPf1643jNuntC2Q0T2+IobC8gRoyZUczw9PuAdXN4iYQn4w8D8JHC+x4Pu60pshXVNGG05PTuUMqy3Ct4QB4kyFSkC6+076jCCVigxMEqPVAPa7l2yTSWkgPHynTrUeyxlj71ncr8itjveK5yR1KzeXSquR8TvkRRTj9+UhtKVZeukzlJ6l2HwM9pIQFAT1OI4HX6JHzVCF758+ZkPH3/C6B5HpBact2wpdmBMasQgEEbzFi9o6/j0+QNOSVrKbLdO2JoHixwkW+3dR9MaNjVqkeT9hpZwnB3CjvzxH79zPDyhpCTnjcPRgdXksiNE5TCf6MsM0/1be4/BPT4dSflOKEDRfPr8gXnuA4iUIrVB3CNfX74hpCCXzHD0FCRSK6SRUKDUjoCeDg9IBWnbWTfF68srsW4UmRFZM3qFKRJRepz2+v2V3/76M3GvLNfIqB741S9+5E9/+UJqgcTOy8tXjD8yuCPWDiilWLYzzivmYxeOfvl6ZrmciKtkSRFM5dPHn7j89WceHx+RUrFvgd///g9oN6Ab0BrWHEA79necvmhwX3Zu2xVjHMNhQGeHevSk9Tu5bggpOD2OpLXShCWUjZojozeESyIJiRSZQUb+9W9/xZdwo+QeQz9/f+O6X0hJoXV3YcVauw8tL8iW8GkG/Qg4kL2/YNojo22wrbQ9sWXY9h2tJDHulFRoWYBd8BJkbZRakLEwmANVBVRrqNyQLbPdAsP71zK3laA2hJIoc6BV0Z8GoqDQWOsYRsP1tiCAYBKwEbaKmT1y7NvrLAZ++/l3/MP9DwhT0Fly4MjTL37gP/38f/Hh+QSucb19pYmCFJmmekdUKo1SvaOLqhS1U8dXajOIqqgls913joeJGDe27Y4Igul4ZM1XlvRCrhGpJG/Xxusl8fBgEUbw/fIz8myRUkJN5BjJEmTO6NnjjKU1xeVyQwwGQiOmhpSC67YyzBPeDjjRZb/nXXB/XdAopsNMzaCK5Hgc8cYT24o6aBoFIzQwkRZ4fjwgqiSlRK2JIG+EGtlSQCB5fBiQGxQhyULSYuPl5c5wGtBWYrzFGsXDT59Z0sbr2yu5FJ5Pz5jjwIf5QBGN9t7bu16vrPcdJRWHaUZTyGln33vcUFvJGt64rws1S2pb+PO3v+f3f/9HTo9PvROvMiFCkbeuaqHheOTtbefh6RmrINf1HW4gUbqiZKFUybZt0CTjOHOYFffrhYP5gdPjxw4iyDuXsDM+Skro5xRjPcu+Uqrp8T4ExIbCYsQdUSMlQMsXwvXIcX6gichyv/TziB26L0so/KCx+wwtdwpdqhzGR7xThBARLVNSYPAjNVeaFEilcFpR8o5UglIjoSx8ePjItlwwUjEOXTdS6d3Abb+zbitVWyY3kEtFO4uVlvN1YVsCgzUIKdjXFa0OPD+eWJcz2/3KJQZaFlxvDiUdp9NHlJXvyHFNaWBtRLWIKQ8ULSgmsuYrTRzIWaO0wtpGjYHJe67XO6V2cqHHIqtEtN6jHGbN9n2jXbrT9OIysS1IerUg5e7RNO86nLSHDlyphU+nZ4RQpFzZ9kIZNMo7UP3rQW0Y5bHKMFrNoBXn84qUsKxnSt0xRrJujWE0ICDumctr43DwhAW0VRhrCLkQLnfWayLtFYdDSsVtj6y3jT00jofP5BCxQ6WIRMo7IQr+8A+BVATH04mffvGZP/zdlfPbBSl6X3U6CEK8kWtBa8HDUfebSDV4aahGIaxkj1eEFhQpKNJQ1D+/ufoXcbkS3frZ89VGUnNm3yKtqO4EEhlkIucdIQTaWGptGGv6xYJMKZVUMl40nNUI1Yv9+5bJcaEBmU5nq0US3+EJQmmMEQzzgJKSlHtJUYleegx7Zr0XtNp7D0YIpLQoacix/xltqP0NMqyICoNzkKEQoGW8sxgtQSgkM0o0rteFfS3UKinFvZdOu4i20SgtoU0v3WndZW9CanQQSKH6SnTdaBJaUQAYo6AqjPeU0g+F8j3CRwbvx945ag3nRlJpCCHxw8AwSRDdOI5QIBo5JYRQSKkQUvVSs7Gk1KMoKTf84JCqR+taa5wvF4wzNNF7UK1UckoYNaOVBJl7QVdWrB0QCJRuWOeYxwNxW2lR0KogidIlyyl3waqUeD3Qape9tgoS3b9naj8YiybY9wV/OgCamCuiBJxRxFSpLb3LjiFkQSiNwVqUUuwxken0wtYqTVQOx5nSMuM4AbDvrTtjZkepuR9WhxGpKj98eMSYgXl+4L6/9gObMx0+4BQhJSQV4zrEw5iZkulbE60ZhpFaCi1rrK44Z5nnA1pGEo1pmhhGizV9y5Vy6aLHKjgeZu7XO6iKswpra4dxuIlYEikVlJLkFHFaoxTUlli3S3fCtILUpTuWtGSykg+ngYeTRpXE6GEcMi1W0rVSxAFlBVkKltS9WEtuXYIooTRBS/1SV3MjlUiQN+bJk3MvhQpRoRZijR0wIyR28KRtI8XWD1jaMgwDqgnOaeGWNpJIPPiBHz8+kBoorTC6ey1K6wJLbQuldPlsv1x28nlDcjg8IJsm5dLJm5vCqpGmXaduykwpgnXdKKXhjGH0vcewbysxFkB36aqz798HDT86hDBYN1DrRtxWSokILEuMCGSnbVHfi8m5l/aVRIgCKJy3OD8ia+YaN9bbilGaSWuEimw1okeFLh0tvSUYfcPZvmm9rwn17lChdvqSH0dCvXewguogjJwycU/UkpGyvXfeeodJoft7yT/1Byt/+5lrQNHfJ2pt+MFzW27Uluh3FME8T1hliSlTQkKUQqqV2iIl9x6oaKH3D2vqcBwpcEYhkeSwgawYI2gNSm4UBEIZRn+gtC6iDHGjlr1DdJpiXSOlFGJMfP3yQoq1SzeF4Hy/scdEeZeR35eVkhuPvr/mco7k1N1T43QgpJ1UM2u4UYkoIRDK9k5u6xLcLVeElB30UxNC9H6iEqaj3RdIrWKd6q9b6xDh2mNzFNa6opzqXVXRv76NQkgBITVSaGJIrGLDeU9tAQRolREtQ+7JgJokJUuM1sSl90q8sWxxxfk+KKipdwxD7DL02iIp1Hcq7QnZWj9UioS2lj12gfffvidzBj94hAhdPl8LzlnCniiii4a3sKFUobVI2O6IV9m336JhBCghURMMY4fM1NKQMhH2Qi4RIUAZRU6GaZ6IsdNHZU29i1o1OfT+rxINhSFu935x1IIUNpZ1YdsXhFDo6ti3DSEkWgm0fO/kpkKtXSyqhGANmbonzNAlx/L9EVhqQYmexLjer71HlzM5ZkrTCFVopQtHrdI4p4jB8jB7Bu0QFWqF08PA15cXUlVINWKkxGrDetuoZUOqjVB2Yu69KKUUShusVsQcKK1H4UMoTI8j5V0JYYwl1kbaK3nrveUqGjjbZeSlf16rXcgrZe9tC9V7IzkrpNZIDbF2LU3KPfYf68qX853Y4vtxoFLrRhUDKfdLfmuC23UnxsS6LGQlMQKGwdHqSi0FKXUX0Vf9jmHvfcRhmhndkdEPlJopJSJTQ9va/82IdxXLkdIKe+3/z11wrGjt/QxQWj/LuUZrioamNoHWvkcCZQdrhbBBU9RcyLFXTRjae4zWIoCaG8t67zAgBFJIpCo0EqIoWoVaM9f7C9e3BeclxvWkzHU9Y/WAcw4QXLcN6f27qLZhlOI4z8SlIqxAADEFShgxB9DvIuyO2tdsYYfasEbx06cDr+cz97ASRWZwirzBKBWtFuKWCRlEyWjRqcRLCOSQaaXDPIyVjIcO9ShF0FLvJDYjGLRm0h18cd1XQm78+OlAkdD29k+Dxb8RmwUSayylCLTurqdcBapppO6C79YKcd+wxqONodTMti/9eeode1ootaCN46AHstjZ90qqGdkSg7Hog0PrijEJu20cZWTyDeE01nm0LzR5Q6yl06NV42VZmB9HqjQdLObA+yNSO4bRM3jJ96+dao3oMDQ/WM7nDUSnIiAUpQpKkf1c0ipKd6q2rH/TXdBF7//Mj38hlyveJ8EKoyXrfeEWb106KnvsRZlGiCtCKJzz3SpuDar1A1StnS6FEAgtugNo77hXCMRCF4sNtmdT047UDuU0Tgj8NFL6GKQXMvXAuu6sSyGGfsxYlo1xmDHa47VnXxI1dyRkFe9r5+YQRZFTIKcd5yrT6JGi2+LN4ElxZ7lnoACCfW8YK0FqUu6XK2kKRvYSsZRdstwQaN09QykVzpcbrAUhLFobvNco0Sda1/NLh1kYSc0J77pHSBvB+frKw8MTuW44b/CDYZjeqY21f+O1JrmGvlIWUlEF1FYZ54H9NZJKBal4fDi+X65kx2tfrhxOD/3AQIYmEFgGc4AW3uk6lXkeeDg8k2M/lBljUMoS7gtkaLmy5Y1xMoQSablfegY7knMilYhAo5xB2UzJGoQFUaltxboDOUOOmSYijUqMUEpG6Yp1krQn9ntinjTjKKhC9oOCBoXESYUfTqRUmWZLzrHnxtPOMA/I4rr53HpyeePXv/wJLSdKlazxO7VWhsGijcZ4wx43SiooLUGCmxzLEt7fcCxmMNyXV6SUzMeRafIM3pFK7gCVecC6kXW5k9eIpPYHsbI4OXC/3hkODm0ghQtushg7kdeFHBfEVvCug0WqaDQKl/sbIhmU6wTGGBMHp5knzePDyGm2XM4roi8FqCKSRWEXM5FIUpqoGiEuhCrIBBSVWgW1WVLQvXwtE3u48PT8mevX7iVTplFyZF1vpNw4PhyZ5pktJoRU5NqoTaLViFeat+3CPURy0Zyc51c//sDP315BKYZhYJADW7iCDGjzLllufSvSaDQhEFJxmCdyjNS4k1plXwvT8YBUjqobmZ1W+kTTas/gBqZxINaNnDO11E52VB5rNesakEriR0PYG0IK1m1jj3ekhtvtQkwJrRTWdGFnqQpKRYiG1AJtQYsBbQxC0H1+IvByX5jsiDsYal2JJA7jAZlguQXWJBmGijEnWvXcr3dGPyBlJb8PQZzXhJyBXqROqV9wqLlLYp1hGCx76HJSZyXOCLZ1o9XWt5+tUlqmyYQyEyV3b4nUmvP1G4Ny7+/D/fKqEFy/fWHbN2Y/kmrBaEFNtsddxdbfO2VBGFBGMhjHetvJKeInhR/hcrl0YAYSIQeeHjxvrxdKjuSyExAchpFcFG/npTvkGry8vDF6ix4dWWu+vXylvkuAa2lsW2AaBoyR/d8ddnLcmOUHzDhR18K+duKc0Ila2/sU3BG2lX1P74OoTs6quaBMh/5I4bDqQDX/L3NvsiNXlqbbrd2fzszc3J1OMiKbqlIDaaQLPYogQK9zAdV9Oo00E1Clm1WZGREkvbHutLvVYFuWZjUQNCiOCRC05tje//99a1XynLOa3WHAL4mqM6ubpLVsNK5BUiW4MWeUqvRP13coZetUeLuhrIIgyUR04wnjhg8RXxRFFqQ0GCHJXoCUuE6zJI9pHbJUgqlpFaEUitb4tOLnGSVaDs9H1nnE+5kiEinqu5/P199k1xIDiM6QkmBbEqZRaHT1HMpE0zmWbeOh0ZSQWG4j83kj5Yi6X0CjzCzixvG5R/g6CDMGTudEjAGtwZQqSt7tdpw+rvgYyaE+j5XQd0BPqkOrYoiLqmg5lRnPJ8ZlRWqDNi0oQQqRXW8xKleinjaEINnCjLYGqy1+FZznid3j/UANGC2qmNi0SKk5nU8YY1nWBYXCClO3uUmy3/c4ozFKkVLh4bAnhju1NiW6fYv/NpGyo5EdjVFVXB+2ux9uJYREzLZCvXQ91ImikbF2e/Md5tEeHPPN19/VorhOM2nJqFwx9SEGkpTkvFW4UlbkINFC0TYgZKSwgjRIpbFNjZrepgs/3k5oLZE2EeXCxzTh+kesc0S/oohQfAXa6Erxu9w2lJIs80S80+1Ka0g5EdcAxWGNu3cII1vwlJLp3A5j7R3GFcgxIlOGXPBhIcZCQfLlyxd8WNm2qohAQgiZIkr9O7FU76HKVSFDAWFRtqXkBUikHFmvE5R9HRfFTAgBv61YJbHaIVWFeo3jjbZroYh6mRP1vJIjlPtvyfnyG+MVDnKHda6+734jCU3f7ej7Pev37xWqkuteSyjJvt9xnke0FCgJgkKOgZTm2gszmpIUrtNsYWNbFkKA3e6B91NgzQuzSPjU4GdD81BIPuDXQMwGXQJSRmJKrLcZmQVKFJyzmFbR9JLbR0akgl8DwW+oRvNwaOlaRSxwGye2Ehn2Xb08FE8MoFQmiohSFXjU2YG4ZrQySJVROiPRQAV75FzF7baxCCkJMbBET5Ya4xxbUois0cbR9gPv5/VOcRTk4uu2sddYtWKYkWniIBfMrg6pmk5g2sjtGtC9YpSKmBIqTDSip4gGIQ3dTvH18xdcs4eSWJYPpBD0fYeQ9dxar0ECKVUlkUZByKBK/UwBWKUQMdRpCQUhJdr8+/ea/xCXq5wzl48bfd/THfac1yvzdOLzF42ShXkasa6wbQtSWLpu4OXlM9O6EOYMaLRpcM3COL8zLhtKtjSqu28IdhXtvG5c398wSjM0OxIb67big+JyujIMmr5taJoekRuW5ULrDIeho20GSN+xusFphSwrflk4Ho5VhDtupKWwHwx+gm2N+C0gKFxOE19+6il4brcPSIqhP1DYMBb6vufHb2diuFPHnOX502f+/K+/ME0eIeslSesd81SJgrZp+Omn3/H9+29VLicdVjeUXD8Mx+Mnlnnmer1CIzgePxHLSo6C3WHgfJrYD4+UUvB+43Y58fj8hZCupJQgO1rTkFV1tKSl8PC453q94mNAGY0xNQooVWKaLqRY6IYBYxzjeEWqQuM6Do+PfP/2ATLQdIZD1/JwdKRQsIeW4APjZUZEaI1CpUyWAqFbHo9Hfvz4UWmRJDAJ3VUni9EKpxLjNOK0I+cAFB4fd0BmmjcKdbW8+olp2kAk2ATlJklRVTy9CqB3HPbPLNcFa9V9OzfTdD0xJi7XKz4szMtG13VsQfD501cO/QPrNKLzjuDhl++/8Pr2wT/8d39kHf9aNzRyhWumH1q0tszbxm2dGY6KqAQ5B6TOKFtoqM6cl08PSBV4//i/MW6g3x/YvOd02Vi3wjZufHk6MjQNIsOP1xPDY6Uz5VSYLiOq3+NLpABOWfa9Zbc7sCwLW0gkDBnPU/+FbdtIfsYJQd/V1+XX377xSwo8HJ/59suVJDbmW2a9af7H/7QjtomHnzWHUjjfNOmfNlz3iLV/+95OpCT4/PkLSsO6jRQMP17PFFm3etZ0GN0TtkRYFJsIRD/jjCSlDCiE7DjdftB1mjIXlDAU3fJ2OnE9XdntdzQ7RU6e8QraKayTNNaQSsE2VVwqlObh4TPfvn3Hbwmh6sambHXqFXwFpQid2dbAz1+eaNsOQeF0/kEphtb0YCXatDwfv7L5d4wSaKOBwjS/4fUMoiCF5no5M05VsOtsg5Kaph24jRO73iFUZI0ju91X/Gy4XheW7ZVtfWP3+yNqSrTasm8ObOvEsWmZx8hy82wjDM8NISZKqZHcxhRCTNze5ureinCdXqHxNLohLZHvt194fHji6+dnSo7E6OvFY54Y5wWjtzsRNbOwctwdEQj8slV6WdyIoWLe17BQ5I2m/wJoUkyVRImjdQNKWaxwLJc3mqYn4pEEWveC6T3GukrA8iuX2w1/cfz804GXLzvavvDrXz9QLqGErqjqKPDzim41TdvSdz3HhwPn88g0XygisD8eKJuBNKG0RmiNlDVuPU9zlZXv9zwcd/f/u68H5vYJmSXn0wc+bggpaNqGH69ndn2PsJDLwjxfuX2MODegTYPfEruuYXfYcR0nxtvGU9/Tf+kx0xlzn7R+vH0QxkSWAmk9SmX8FOl2mVgkIbUM7ROnj28IRN1OKEHXKP75T7/hi6ffN7Qvz7xeblyWDUzBtVUXIUK9sProkXrh+PKHCk1aEqYono9Hnr+88Pbxwel8Ylo85/HG0/4BJSVWt6S7FkQrR0qZmDzT9J1mcIy3D27XE35d+fLywq+/vXLoD2ipWOYF2+wYVIefI1FK/u7rHynv/wqiIyhPkp7ltrF77HBlQKJIZSEHgTOVSJZT9cxUMJFj9CvTdEUFMGqEO2VNonjcNxCPBDWz5ZFvf7kSsmJ/eORvqYU//rd/QJSV62lmnFZ2Q0eYaic7lEJjNP/w02f+j9uJL5++0DYNp/OFef6gb3v2fY+Wknm6UmLdNpu9oT0MjNO1fie6B7QxlbJXxgoeSnVL0wwNY9jQ3QEnHK11OB348frGy8sLUmpuY0SEtlLIyJQUuN5ekTLTKEvWEHNmywtRHmnbHdst8+O3E/IAjeto246QPefxxBYFu6EhpoAPK43vWaeFbp8Q0pN8pDl+ZhMTKcC2Fs6vV1a/8DT0RDYm7zHNni1U8mNvDL9/+Yk//fIXuvaZtEVyiOy7jv7gaJoBWQwiSRALwioup5U2GvZ7hV9nHj/vuF5GbteZvDNMtxvSFLQ2GN3yfDwgjETcB9vr5Lmdp5pQKRapDLYfiOdviBTxa8Gvkv1Dg9GFXDZizsRYENuFklXd0PuVeR457BqGXXMf5he29VY7tG5g6HfsHgZKyfjo0UaidGKer3StI/lSvahC8PhoGdojIgtKhCwTx8OR83VE9ILjw5H9/ol/+qf/E60glhoD1iKy6xRdIzDWMAwNkpnxUsm5h71lWibmBVxraZqMIPBxvnE4PKH3Oy7bxPv5zNAf0TsQ2VE2S9wMLYksEwLBMAx0jQYi45SZx8T1HHCuo+0i1+uVy/vG737+zMe4oaQE63g+PlBUoiRVhxpF0rQNflU87ntiTlV/sW7ELdfnRqt53jUsi66MhGzoXM/wYnm/3LjdPF+eDjw97vnxcWHZZkQ5YC3oJjOtE8sMff9A0zhSurIsM6ZxxCAQoeHzoLi+1s6ZbTPDQyLkgNUN5Pq98Uvij394RGnHFgGh6PYHikqcLmeiL2SfiaHw6VNLzplp8vzrv3zw9GJIgToUToXb5HFdRtz1KaWUOpSMCSkzxmqc+fevT/8hgBb/5b/87//5v/kfPlGy4P21QiyeHvcYBci6abhePUPX0bQNUgnGceFy/qgHiCRJSWJcqfGMYpCidmmKCrWjhURKiUTQ946UJbnUsOB0W3h8eKJ1lpJhmTzrHBi6huA96zKidWI/HDBCoSU4o+iahvm24bdI6yy/+/mFH79+RykBGXJSBF8n4Ou63j1OisOh43q9MI0bOUBrW66nGasVSmZS9Hy8TwxDy+k8c/qYuJwnjDF4X7heL8zzjcbV2EgII11XxYXnc+3UDF13F7lC1zoulxPruvxbnK7tLEY71nllnkb2hw6SZLrdCNuGyJmySdrGMHQtD7sdu3aPpPDz1y+0xhDWjeU612gVDmt72rbHx4UYtzqp13Vzo7Vg9/CI1JYQ/xZLnJiXC3FbMWiIAq1iXQU7iXUtbx/v7I87Dg8DbaO5jif6Q4/tOoRQlT4Yagy06ysm9PS21qmi8DX+EkKlRWlRJyNZI4Rh8yOlFHKq2PBxuuBsx7pNxBjvKH6N95G2tbRNUwmSIUFuUVoQ0sSP1x+cTwWfEsIkukGRvOLQDfS9o2sdXeNYlqXKPa1DGU1IE+saiWklxUz0muAXvnzesYyZ9x8jHx9XjvufeHu7MV0jcZE0peGnrwN+rdEl6QSrhN3zkds2cZlnYjYs6w0la8lWlIJRhVIyQlavnI8Rv24MXYOyHq08slTMsm5g8oXrDEuUnK+BX39ZEEXz+z88YH6OpJtl3QwFzeOhpUmOxrakIAlecTzuKVmjpKvEx7gxrldW70ELTGMZDj2Flaf9Z5xqiGtgHj9QwqClQWtDkZlfbu+kMtWHX1asa+H7+xtdP9D3HdYoQtx4+fJAv9MIkViXDasfWFbP+Xzj9HHjcplJqcZClBI0zrK3e75++sS4LMzrREgbL09fUXdcf42zCKwxtI2l5EAMK89PD3z79ZWhPWJMxzQvnK+vdfvgI8sSWJeMEoau69DKIYrBOk2327Euget55XrJPB6/cj1fGOcbPmwY03LYP7FzG4PN6FxYxw4RHnn7/sH1MoIwtFrRdC1D94AWltv5jNbQtH9DITtSksQc8dsKObEbdgkYWrMAACAASURBVBjhOL1fWGdPjKl62ER1v1jbYJuW/a6v29ptY1trjM/ZhvfXt387/OYYySS+PP8eY+pnbp5vHPpHXGuR90t11zpktmSRQEecVKzrgmsdy7Ty9ssHrn3gf/5P/xPBL/z49s5vv1zRptKmYlpZt5F1jQx7gVItKSjmqbqkTucfdEMFFpzPN1xzRNnqCIwh0Ngev1bCWdMohr5BFMk4zXS9o9tpQrlSwkr0qca/lSCkmYfDnqfdZ3RpmG4bOa98Oj5xfBgwVnCbLlhtGfZ7urtctW0MnkjBsywrr79dELpGVPuhZRgaGqdoikMagdAFaTKkSPAbbVMdLEPbs62ez18+07UNWmlSEUjTsmyRJDyFGRUDYuswrca1FrIlpJXCSm/2dOpAmguvr6/cbiMlgNMdw0PH6XRC3MUYPniUUvTNUHUiKSNLJqwWWdwdjRzpuoHk4e9//5XHhyqS//rzCx8fb2hteX564aefv9aNhyxIEZEElnMhb5VA60O9qP7hd39k2zYEpcJHmpb3t3faztB3mt7VTqQzEJcAW6ERjoeHHafLO9NtZJs8CoukqZFSDW2jmMcbhg6RDEZojEwst4B1LaWyzfny8kJUiuP+gW32vL1+0A6GEDLLMrKuM0LV/m7XtyAKm19pTFuVJ9HXRI3t+PH6G0Y5cjLkJO/dJMGy1XidNlR8vKoIdSUcTu358vIT1q6kLZNC7b9qMzF+BEpqaJs9u70gCsn7ryPrLeBMw+PxSDEr2KVK7tfCNkVaN2CVpdGGXlsaJxh6hzGqEi7XCNmhhKkRwTTz+PiJoWnYpsJ0zgzNE/uhQ5DwfmPZIs0wMG8X5qkK3FGerj2yTQmrJcMgWP2VbZO0TY8xkhAv7B81y+pxtme3O9I0lt3hUEmlWtPtG7ALfhX0raOxmpICmQ1hqicQKekHw9PhK858YhgeODy2ZBnYvCeWv0W5JClbUigIJF3X8cc//q6e0zqDsRLr9L3/eyfD+pX38/daqcgZITNSF4zTDN1j1VzYhq7vWOeN508vFWqm6vdG0RHyyrrN3MZKMu0bg7ECBLW3ZHTFpYfa+5JaoYVlHCdiKngPp/eN/UPHtFyY5ol18SilOd1uXN5ObKcbJq40wlWAkAeVNbo2IJnGiW29krnRNDvGqTqhKILDgybmG6VoSuxp3JGvPw+c44lsQDeFxsH1bWGbDSJXzca2BWRpmaYTpWQaZ+k7h2KjJEmMuVYUbEFoU8/VIrMfDDEXDsORT4cHdtZxvd24rQvzUn1V+b7V25YIpYJqut7ycT4Tk+A2Bq7zit0pbP+MdkeUGxDaodpnhPvKlhpSsTTdI7vDMzkLagkoMF83vv965vlhz+OxRTaBeb3w8XHjdolsi4Bi+P0fnnh7+8GyjORSn8N965jHlW1JUCwhbSgn0E5SVGFaF85/nv9jAy20VlBqjEWIwuPjI1pV0aouktb17L/+zDR/EGNdtYdNcJuvSGWhGITQ7B+rSNK5hpIl0zbSCEEONTbUuY7sE/tuz3VK1U+1rbRuYN+2nM5veB9oXJ2iUSzcTdDrvNI3LUVFjBE4J/CbRyuBkQ6RJOfXG0Y1DF3Pum4sy8Y8L7S9gZLxa2FdYBqrCT5HweIz39Ybm5/Z/KV2nIRGaoUojoddjyhwu001Z64yXdsiJazrQsmRXbe7Fy9nwrZyOOyZ14lSEloJQNL3O0KslLFcMl1z4Nff/lrjGEaTIoQwEUOuPo0cSVtGWXBNg1RVfuucYxxvTNPMtlbyXZHVal8QTPOEzyu73YAokpQF19uKEJHpNhNTqjJlCiHPUE04lCxZloAwtRsgJOQSmaYL1mVyUwuPj0+HWvqeNyQaLR3rstG2Qz00p+rpskYgVY0qbn5j1+95f5uJMSN1ou0zw64lLDUOkJKg4JmmiaatlnUhNMu8kYEYwFhN21pAs25wu84EXxGqxkqW9VqJiVrxsLPsuz2X25XT5cTH7R1le7SrfcESIiVJTBZI3dRLqLCcLxtia3nsDxy6yMPiyDFx6DVBZ3KUNFlghYB9wxICH+OC1JL5NiJzopFVruqkghDrZEfku89mxS+1lxJDoDEN87wixd/e94RQmVQUWSqKkWzMjEtAa8/xU+HljyP/9OeN5/1PHI4PKKM4X0788qcLzz/9DqlmSrgR4sDD8cjlciGVgDLw8THj3BNFJMiB8XYizgtPL19JgooiHyWbygjlaTpAVEm0URZrG1Rx5CBp7d+mkJGwSsLqmdRMLtXro1QFu+QcsffYqXM967JS8EhAoen7B77/eCMEj9EKoQXLOLKtFUQipURph1aBtu1pu5aUYgW2pCrNBJiWkaZxdcoVa9x1GHZMU5WtOgPWWUDwy6/fKD6ilWa/O+CMY50nhMy0XUfXDrSq5XorLCWQrCc1lmV+RdpMoxqktLSDRFL4eH0j+ZrRH54bsiisaz1IGmrkV7WOtus4DEe2OSJLwhhJ21mG/YHNx3roElUfELZE61rCGuqBA8HH2zvb4mldgQwpBmIqjLcLQkis1jVymUdiSJS0ouRG0o5GFUoxyFiQPpEukukWICke94/0h0diGdliIiWD0Q37Q4uPgZw1BYHSgm6wXC++RqASnM+RttuTUiEjOByfCGHGNgWdNTqAj4nGGrquwRiNIINIKEXtY4QMMiPyyrpNCGGQRjOtI/v2kfcf5+p76x2tOOAnz7YFkIKn4zNWGcbbDW0blLK8fryjGsv5OpITNGZg8x/IxoJOhLSy3GYO7WOVrOaNkBZ0qWkGKR1xk1ynmW6vUDoiqDGobcus8w92rSZEx7oKbrcbP70cOG2eJArtvqMbNNfrR3W2pI3eaPrdgbJ9VBKr7e6C7yqGF4IaybzDLdqmQxTJ5TwhzUa4dzWHoUcLyeNuj18Cfi1oHNf3kURm37fsnOTXb/8X//Uv32hMoes1TW9xpwVpW0qqF02rek7vHwTv2fyGUgp7cLS2J22F7EuN8rYtb68nDPc6gJD8+PXE7WMkAkJYZEk87DVrmBGhULJkWyPC3L2AytIaQ+oveBRWdigF//WvvyKU5sfbO37zxBh53B345/dfUCLSNy3DcEA1kmmc6I2hUZrbaUE5aLoBrR2QeXwaauxc175zlzouk6cUSZaJrBKNOTLPJ2Kqw4ph6OrWJQaMqb1wGkdOLTkuFesuMs62zGFl2AnMYOiMJS2CoCJFJbY1M54Ch32D5V4yzQXkTN85LteRmMG4HqPKPc4mSL52KKW+8fGRKcHy1D/xPHQUK/jt9TuLX9HWcdg9UfIF5Wo/MonC6Xymd0OF64RC1+7Z5glnDEpBIvP2diUVi9H1OWi6Bh8WMpGQC+NW2PJEOGkGZzECRCm4vuU0XcgorHUYZZm3ld3uE11vME1kWp4J+cLHx5VxrL3EZY6oEjG6ypZjjhWY4D0+rHi/VV9eqZFuqQttUwXFxjRsfmT1G23bcLl6UgCIJArdcCCz4WOmJIFWluvtxuZHpHZoqdE649dQo9Q5Y7TESEWOGSUbhKxR4iId1hnivSsacyJl/29AFmd7Nj+jtKJtLEEItrUQQ+1ui1go2YOULOlW5c9SIIThNi7cbjPJS4x2kFqU9ISwUqRCu0ISE9lHsCtLySwpILUjrhuySJS2UArny4ndQVOKYLytUCY+PfZcziurD2SVkcIRtwq9KUoyTgulVB9jiJG3eWTzdRCRyoz3mdkrCpm+NZQE2xqIKWJcg2sFS0pMc+QvvxUObSSvibBmgi98+VmxOyReX29cLlca67heq+w53c+Z2iqG4REpC+vmuY6JXGQVI/cWZxXaJt5ef2BsImVY13TvZxea1pKSQgqJswNF+HppKeU+jvp37jX/f16S/r/+Uap2AWKIlJzouo7gJ5SuMIWSZHVGCEVJkHwiesmufUAoeS/p1SmUkk1V9QpQirq1uLsrhBBY5XCyxSpPlDUqdBwe0ELcuz4C5TRKF7SqMIkUq9CYEnBWYUw1hHvvEcLU7GxKhM2TYr67DxameWFdF2wrSVsVQMZYf9SNNeRQV5BeFIyprwNUcXHXtQRfezWtMwh6urZBqg2BqtnhtLHvd+x3O0rxLMtUARGq5sFLSRilMLrSkYpIIOoHZtsyIUSELHUDE3K9fOVMuXfYUslYCkUKihQIIYi5er8278nUHo7fAsZFihCE7GuJVgqCz0Rfc9/S1FIqRaCKYRqvqKZgZD00eV8nOSELhABRUo3rSIHgXvyXlUpWQqjvh6hUQylELbKz1cuB6CsoM0COAonGWEVOtSOh7wQlKcHtBqTQUFLtMIQVZV3t9IlMTgllFetaxZFNaygIbFNLzzmBKBIpPdYUpHAVXa0VRiu00pV+pWXFzJd0Rx6D9xFVdCV1oYlrYr16JhvQDwlpSr2ghlIR4a1EZIGOpjquOsOa60bIyYSfA51zGGEpOaGtqm+QrCVNIcQdAFP+3x/eCFmANApxL9UrLYkpEaMkxkJM9YHy9Ch5fErodmK5JdpPdZPsY+T9+8JtiXSrJyZfe3EhYnTdzBWR0dJB1nTNwOYnvF/JW6wEsxTwPuL9hqCKLOvMJVdRtW7oTKlutaQJvrBlUHehX4oVYyzdig8JJWuxWopK5KolXIOzhhwDpTiErOAUoSRrCCTKvXMhSSHhk0fLv4mc62sqpMZKRdEJH2b63YBQhVgCuSS6pq0o6BTJqWBNi9GWeN+eUgrrEogh0lhF3zb0u46wBXLM6FbfC9UZP0bCLCiyUGTCtQoZPZ3SlRyVKyBmGSGuGVEybV+dfxSJomCUprOCRjbYrqFpO7rGIWJG75q63WotSlfhssgFqQRW6fr8UfW7IYXAOM3qZ5q2rYViYzDasIQV7xescThtcdJgRGFdJzZfByiYzLxMoAVSQUwF51qgeuKc1TSdZYsj2mi6YYeWEqVAC4XKApGrZHrZqvBSKFFBE0IihCaliBSStnXcpu8M+x1ONmQFy+mDpjG0bYOSipIzWktKiVVGK0E6yTYHYshYV4XEImn8mgnbhrW6yjVX8FvdbimrcaY6Un68fUOEhLWwrh5dBDFWT5pxlmUKqCLwaSMmWC5XjrsjiUiKgRAjjfsbrl/XSEyqYI/r9UrwglJU7baer+y6loysL++dsBdjIKkqVdalyjXJBSEDyiYKiaap/hkjBTJKHg4PKKkpqQIc+q5HiNpByFpBCtjWEUKlpsn79F0JzfUyknNGmdqxLVlQEoQtcF4/CHEjl4J0Eu1anIkUKylJ3mWuhWmeSTlBliA0JEnnem7jDSGgcY64FPxUqWJN02CUZbtV0IjUgiKrW0/pAiFSUu0NEyUphPp6Zsm21o1kaxpSDGzbwhg9fdexbTM5lbqxFhrtDCUkUk6EGNCqo9x3fFrW2PGWFpSS92J/YRhaYohoJ++Xl0K+1N/ZIjKJhNJVNpuLqILTRnG7+Trg0/fHtRbk0GBMHWqInJCip1WCZBd0FkhZlRM5FFIQpKAw2vF03BMTpBjvkJmNlGBdNqRu6LoDKS/kUIixetms0YSwUKjPiH23o3GCYh0ZgS91k4PUNLZnyzOJiJSKuGxo1VGrxLXnmnMF9EhlyKJDW81+OJBTIMWVaZ7Y7TrazpIlxJJYtkDJEIJAaIkyCttYwql25VrnkEVwOp8QxbJ5c98Ga1SjkEojZSAlX8EZlQ9LyrBtvoJBYiKkSMyZafEoEdBGoI1Ea3MHP5n7JiliTf2sCiEp1LORtZYtraxbQhaHsT1FBBBVUOxLrPRAZaFkUsnkWON6FFHrG7Iw+xvyTgAlV5hN3ztyrhctLS3WNVVvESutUytDELni0I2hpFL1DzEQ7+dcLetZ+W/RbiHrZx40RtXtszGgRGKZJ4pPRGLF6SRobQfSo4yq/+/kQWwVyZ4FJVfVUIoSpRTGFKJI5LCyTjXtoaWqtGvZs4VK703ryrSu9LsOHzNb8PW9FpLjg2NbaorCWEvTtFidCLJuz1KWXMaF5APJCyiWKWT87cp5nLnNG9sWCQhCXkgBSlY0Q2HXPXBZJvIsmJZYwUi9wjmLtRKlN97fI9o2GCPYtlqbaClIo6rfTLn7mT+RQ6JQsFr9u/ea/xCXKykkujSUOBJCJeGECK5tIQvGi+eSvjHsDEJUIp4slr///d+TRcQHz+IXfFkpxeLX6k/Y7XdcPs4YUR+wHmhsg4yKRilM3yIHy7F/4ja/0dgOo0BJU0lr2tI0CSkqeCKVFds8oKTCe8/tttE0ok4OikApzTxPbFtkHFcu11t1EcyWFO9TDArWNsQlkmJBImld/beOx8M9vqjY7Qf+5c9/IqWAdQ0vnx7ph5bbeCOETEoZpRu+vHzBuZbL5ZWbv1TYBxJjHCUHJNSLWvCVOmgk+0PPn/7pnbbbIaQnZ0/KVeYsqLnTVCQYgTCWImQtc0vB5XKhQCUXWoNPnnGsJnvtNOi6yRmXjW2qP3JdO9zR2AmrDRrF5XLmoTmiVctWItNy5enTC36OleiXMykEHh8OuE5QgHAvoiNVzbvmRPQLfadZlhvGBrSQpNTgt1z9JlLTNB1KZdq+xgetUzStYpoWnl92GGOI0XM5BaZtwceKTdfKYK3AWnh/X0gp0idDyZKnrzs0muzBTxtG33jcP+DsI0oMjLc3rmFi82DtwKcXxXWMbOtMY6tosBZqW1rj2ObEx/crl9cJmc6s60pRiWm9shs+g8l0raRxCqLlNAVMlkgUWkgIG+SMMn0VUmaPVhJRSpWRCk0mEJF0XSZqz5zrIci1O1xTY7JbLGhjWJbAtgS2uf6gtK3iD39o6PeecVronOXT4RFP5Hy68PbXC2bfcLl9UFKhdiMiy/odSGhtkUKx3x0YGk3cNvArRjR8/foPnE6/MU43thhodz0+rjSdqrCAlHjY7RgaIMZalo4QFo/uHVIKUq4/ho6AXxPGGMyuoSSBX+v3t17UM22rEaIhhEC8EwPdriGtpaodssQYixQLwkiUrYf/edKEWEiiQAEf4enzsW6d1xWtTe3UTRMx1ou59wuN21H1x4XgE7OPHB8feNjZOi0VmvdvFxQW5xxKCi7Xd9a1Hp6FrWfOw8GhlCOFRNgKy5qZl43rh2RoHX2ryOnGeBHk4lDS0FiDkwnlLNrtKpwmbGhd2PUD+/0OrRXfvv9K2OowxDUWpy05COIdO6+1YtgPKGcAgbFVGD7kzLiN944ZGO3Ydw9YGXifJuZ5xDUdxhVe3z5oelc7i4vk8/OREBMiF4wUaJMJaWXYH2BQrMuVzXvaXU8W4KPH+8j5R92Mu1ZXT6FrmOcqjhdF4uPGvJ5Q+glnexL1cNwferTRKFEvHNZWTyFohDSEJJk+AsoONLanbRpSdKSY6LsaOUQWrrcRWSyNa9BGE7ZI+3Ak+kBcA8lVdHFYIkN7QDcaoTNxVJXQGAsiZpZ1AwNhGfF+IWVBsz+gteNyCpSU2XUWqTLffvuga48Y5xinM+MlEpYbulW4zvJ8fCD7jZJqTDPFyDoaxGQYekvXC4wdOb2/8fC8RyDZthWi5Hc//czb6wfrVhj6Rx4e9lyuExSNEprGNeyGJ36c/koOGZsbEAEp6lYrpcywczw/P7FuG+fLil9ANJpPL498jFeWpNCbpWs0q12xtPglcbuM5ML90NliTEvYoO8a3l8/UFojW8f5/R2/FNrBYlxD4zQOhXGRoAKehD/Dcj/IKRwSi9GCdVsQ0lFSYQmJ1kU+Pfa8vb9yvp2w+57gZ4oqSG1Q0rFOkc/PT8y3G2HduI03ZFMddcuygc88Pz3yl283Qlprz9MYYtYEkbGyvnYpeUpZESaTRb3MFBNQ0pALpBLxZWX2niztfTjpMUWTg2XYqepcKgmi49AOnJfvbGv17x2eC9cPQY41Yvby4vj08sj1ciH6UntgPnI9157kbnAcnx/4eI9Ml0ozVbLQdZbbFNgdn+ibnsYYMgvGNvS7HTiFbetQedg/cpoTa0x03YAvVxqXaWwd5F6XicVPGK1Q9OjmwN99+sLLlxeulx/8+PYv/PKv/8Tzy0C3H9hSZlw9IhvawaClRilZtTS6XtZdqyt0wXvOrz+4fFyZ5sI4Jx6e9+wfG4ZeIYsirhGn+bfhb8rVXxpTJQciNcoqrvOEkaDDvfdlOkoq9IOglDps8j4xdP39whgrmZrIus6sSx1qDFLR9ZJEwzhGNj+hneTnz78jJk0Yb2zrRmszWikaZ4kk/OyBVAcTJaE0PO4fmKYr2xLJRsFOIAXcTiNWNWhZQUrShOoty4VQqkwXGgQbIBFFQ1pobFtpmdpUkBYWq0XtmpfE7WMmB0jBk4QE0cPQ0+xq3yqFREorw4MkxA2pHK6tIt3rJdIPPe0gCTlxu/0gThtegMSgZSGpzHR6pdU1tDvOM4+PPZtXiAIle7puX4FoZQEU+90jMXtESGgEnZXsj4Ifv41AxO0sw74lWfh+eSOWAraCv5LNhJDwQZBiYfEz23JGTrqerZVmvzM4XTCq2qqWOaFlD0mh5ULTFE63rQ6uKGgrGfqGdZopURB9Riroh+7fvdf8h+hc/eM//uN/3u0ahNB03Y62l7hGo1VLSbJuaHYdw9Dei4hV2FiSrkhVU3CtAqFx9oDfKpJXGY3KDi2qvC+lzMN+R1nB2sLQOTrX8+c/nWkaODx09L0l5UBIgfEacNbQNIZff3mj6+p0f5oW3l6vXK8jrpE0jUUrTVirwfpyGZnnFe7+l0wtyStdcelta/nxdqXrLd1giGnj9D6hhKOQWf3Mn/70F8ZrqOQpY2kag1Y9p9OVy+WEXzdad0AJyen9A+8DRrt6iNUG5zpSzIzXG/1giHljDRshVYLfvHqa5oFcCjGt9J3BNdVw7lzP0B2q+VspYsjcLjd++eu/MN5OtG6gMS3OOlwjeXr6PRmDTxFfam9Fi33NFDuBdBsflwm/boQlsy0J7yPD7khOipIKWhd8uaAGXSeLtuWnL7/j737/e3KKWNfRD0+4ZmBdbyiZUWhIPQjPFjeMKSgp8FsibCvGOtq2YNzCdbrV6GLRTLPn/ePCbj+wzIHbZWaaFhAbqgVUxbDnkvBpoR/qNM5YhWszm/csKbLOgfW2sZxv/PTyXCMLY+D1+4UfP14r5loZpKKaxtca6wzJ42OgbQdigO+/fPD2451lm9kfnrBtIZVCJIOG83lhnfMda9rwsS10nSJvhW1aievGYfd7iuiJRbCmlTVNLEtgmRaWcWZbNpwceHjYo5XGryvTdKPrBiCijCDmxGW64ZxmHUGjaI1Eq0jTTfzhGZTIXMbIl33Dl0+/4/V15PufL8zfNl5+PrBezwgBUhku5+vdeSURFFIMNFYT/Yookd5ZnocjWgpuyw9sL9g99hTjeXjpcbYlbJnrdaJ96BCxvq85F2y7Z3d4Ajw5RYooqL5hXa44PdQJX1pIKdH3bYXltB3OWhASvwVyBqUa+uGhagY6zW5oOe4PaKEpjQHZEbNmWS9QJLbJhDgyLyNC9Jyvb9ymlWUJ+C2xzoGSLaIopNT3jl5mnmdSAKUbPn155PHpgbgF/FyIW8vHjyvHo0XahE+BbdaU6AlFIq2lbXr27oU//9e/sI4Qt4ocfhvPHI4PRF/ljL3ZQ6yYdlkyWhameSULwXTduF1uLPONvm/Y7R4oxbCukXX1NE3L8bhHCsH1NCORfP3pBa3FHeGs2TwEYPYT03qFHLDWQTSEEMnZ8/n4hb/+8ysxZZqmZbAH4g1++vLEoX9Gl4GQJj5/PhDizBYX1rxRhKCowG288vb6ym+/vLI/7CqNLJyY/DtFKJTpKzin0UiVWaeFr19/RmvHum68vf2GazP73WfOpys/fvzg4fETCMG2zmzbQgyeaVw5nW9IJRFGsLHSyoHjUw94vF/56fN/z//yv/5vnN6/8X56Y/IbzvX8/R//jtVfuY0fWGdIPjFNYyWwWYvVnnXeaG2Daeq24f1aaFWDRKCVYvdwpO8afvv1W0W2tx3TuXoUL9crgsRh3+FTxrVPZBG4jCd+/Djz8vzMz3//lcefn2gfe3zI+IvCti3WWOImaLYdna4HQEqN/Bl9RCjDti2s64222fP2fUXh+PT0zB//+JV//ue/UHJDygUhI8OhQNS8fbyyrBO5eKztEFQceNsMtO6BttGsIjKtE/O20uyf6A6W6bogUfS7npfnZ5SPKEklu5KQ0qC1ZOgOPOyfeXr8xLS8Md0uSCGxTYN0gaICKWX8VjuPL1+eeH1fAI2z9aLrdIsultZ0DE2LVer/ae/NYm3L1vuu32hnt9ba7emqsa8d3zgxCnZMZ4soCo5ABiLCQ4SCQERRpLzkIUggFHihkayIFwIIlJckEBBdZAhEPCCixBK8YHBIwEmMdW8ut6lTVeec3a1mdqPlYcwql6zgF1fuuffs+ZO29lpzbZ0apW+uMcc3xvf9/9zePHB58ZT3Xjzn2fMtKU9UWuGJJCVRoiHmSNOURXeUmTR7tAjUtmO7u+DsYsfp2BO8ZxhnhnkGE7DVBkyDD5LT0fPpm1sOrmeYEt4JKtUhq8QplJMDjQR34ub2U9rtBmU0/TgxzhmnHL13+CRpTMd4nHjy9IzKaFKITG5CpcTp6LC64erqnNfHb+J7SW1bzrYd59cNwXcQDRJLToJ+mBiOHmkT0pYTg+989Ia2VeQ0Q0409SVXlx9yP9zz+uGWu1PP1fs/zOu721ImS8CIhBKOtjLMLhOCpqoqwhSQsZR15ZyotjWvb498+MHv4Ku/9R/gt/+Of4hn773g+fMP2G3P2XQdtQ10TVH4PJ4c8yywwSBs5Dg7jtPI6AZM0nTqnMZqYph48+aOFDTd5oKsDEFEqvPEODtSsihl2W4brp7s2O8faJuWi4srtpszEpkQZ8bRMY6B3a6jbkqZdo4aUl3+f9KAtSUJGvqe/X7POMyE6BEycBgeOBwCYEEIfDjRbqE/ClIqG1GVvWYcSexJuwAAIABJREFUJobpREgZpWqiz9RVUektqp+SlCbcXOTRyaVySYqKGCH4TH8aAMfD/ZEgPF5kXJRUAuykscEivOTNm1tslxFCoYRFY6hUYuwzm13L2WVL3SpefXyHcLaceIVEHmtSsqVqIUp0kox5YBCR4/7I6XgqtioIHu48LnjQDqUT86n0L/Vjz+gC1koSFciENpLzs0v2x5mYBqyARhvqul2qrDwxZLRWXFw1RDeVQxaVGdwrTsdImjPGVrQXHaKBu9eO959+QKMr3nzyKWNQUG1xKZNzpmlbXIz0Y4AMxghCyMRcUxtoO8XmrEbIwK/+rVdo2bHtzsk+881vf4t5yjTNhsuraypdevFz0ogkSNHRD/dMs0dqS7c559nzF/ydX/72/2/P1fdEcvUn/+TP/Vvv/WBHuzXszjcImWjrK/aHnuPpSMqB3XnNJx/fLMZq53Sbit2uxlQVSlmEMBit2O/vkNKRY+Jw49DC0TYtOVb4WdCPDzx9fg65QokGJQyv7+65vNxAUvSnkduHO8531+Tg0QaUEVRGcn35Fd7c3HM8nYBIcoqqrqhsW8oqUkBWnlN/xFaa7XZDKTJyVLbG2gbQvPzotkhFyyK9251tqawl6glkIOfM8RSXUkFoGsN219IPE3d3t8X7K0n2d3umuedHfssP0lQN0bM03SusMhil0EoynEaU7nDOEaNHK0lInuPpDQLYdeec7Toe7vYM/USInozn7vVDKU/IpexCbyxPXjzBYMuRfWPYnm/wfi5HvzEi0WybBql9McucE2N/4tl5R0ygMNSqomoNyhrubx/oTz1KKa6fvI/OGrGYuXWbDUJL+qknpGLmm7zgsO+JPpFzRluBNRqRApWssXJDrTZYY9E6LWVe5cQzJ2i2mu3WcNZYjNFUWpXSSSmxGaIQpQdrlsQJzivFy2/dkKNCocgu8+r+iHI1MmWUzDRtw9XVc97c3HE69qQUabtq8eWxCAn7yXMUM8k0nEbH6aEn3jlO84nRO5TVbLZb2o1hd7YpZZw5k2Lm8rws9rbbrtT+C8Ptpw84P+FTJORSLqssZOmZ3cThbmLTNmx3DXXTUNcN3XZDfzpBLh5mITg2zQ4XepAeZMQIi7Y75mnm8mrHkxdXjB6uuoZdI3FTpD9FXjx/wewk3/nWG/YPA2eXG4SHkBQpZsLs8VNm6Pd8+P5vYdtd4ibPYX/PqT+ipEVLwxQGxhAQspxGF88exe3rO/BFRKLbNsxjz2k+LNLzCudHGl0xxRGQaFkM/8Z+oja2mFf6ESk8WQjSIqGqtUZhSD7R2IpN0+D8zOs3r5iGcSmTyXz88hUq18ShJ049UExD8YEwZ5wTuDDghxk3OKKLGCnYbS+wugi4VFbRVIYwTzRNKcGrKoVMnm2zw7uEd5GQZjYXFp+LJHOO5XscZebZ1VO6eoN3mY8/+jZKCaYw4oNDJcGbl3uUq3h6fsV7z69RKrM9g6ura6y1TLNHiI55iHz1t77H8xcXjL1nGgXnFxuIATfM+KjJKuOHganvS+9N05FQ3O3v6McBhGJ2YxF7EKo0zNsaKQxNW9SvNm2Ln2e6XcV2a7FGMruBkCX90HN/f8PD/k0pj5w81lrapqGpy0KmH3qE7Kibcy7OL7Cd5OXHR9wskbLi0B94/uQp29pATAzDhNaZWoNzB0I8gU1YteWy3TEcB/aHE21rmWMguVJmO/mJm/6e2u5QShNCZh4CP/pDP8rt/o7j0CMUfPDhe3zy8luMwwPWatq2oiXgx4HD3BOMoNnVaCqyKiXS+4cDdw99kQu2mhADw+GEOxx578l5UURFlJI1B/vxhHMJN0ROp57rJxc01rA77zh/smM+lNLaeXSQJU+fXVBfgJGGqe/pD/dsKku1sWgVMSpRGcm2rdFGIVSZz2PSbDYGbSmnWzmhdY0ScbHJmHFh5vb2lk0LXWNom4a6qXj16V1JtLqartngxhLvyio225rLyy3eJbabmsl5xtlRKU0YIAnF5GdOhwfOunOs0ihRY1RN026w0lIpRdN2pX/z8Ibb2ztMV6PssmOuLchmEbzQbNsNfoIkfOn/MaXsfegD77/3Pk8uL0sfRxPLvWoEZF9KBKm43x/Yn47MzhclYJGY5/LstVpwPPY8HBO26qiqhhQE++MtWQiaZsOuPUPFxPWTKxgTfnK4OEGci3ek0FiKl9f+UJRwZS7eQqcxo9sa07bIClQ983A8FSGlyaOTZNe0zEiapsHKms50nJ3VBMoCGgLOH+maC549PaPpKlyKfPrJnm17hY97BlcEck73M++/eJ9u0yByZjz2XF7s8JFSwm4sw/jATMLHAa0lbdNw2W0YDrcYK6gqjdUSEYciciItRluiG8jRMfs9qIw0DdMMP/LVf4Qf/8mf5vz6KTf7W4RLnF1e0J8OzNPI0yfPefnNl/TOMQeHD47d+YZpPiBisSxIXpCyJMrElBNBSNqLHcM8ljh0FRfXO5LQSCmxCgwRkuPTl8dyAm0M2Y189MlrRjcxnEZykmXzbeqpqzIfjsPA7E9UbUWMgWmacXOiac5QRvDs6VPqrsalQE5FqOds09FUxcOpq86YfF82q31mf3/L9UVHSmVjSlWZ43CkbltGNzO6kZgj2SeCj1hdUZsK58ppYts0NE2L1pYQBLvLK5qqQaPRSfHkyVMGPy8tGzDOe7KQpCAgUhQDt+cYo4mLN5vMmptPb6nqmughzInzK4U2NR++/x7Pnl3RbA394YDuIz72RO2pasvp+IC2CqlK+01b13zl/Q9JKRFdJsfSoxnCjFFFEOLu7oBI8AMfvKDVljDNRAPTFBCY4nUnElEXs2znHPNc/PdSVLz33nOaTuDcnm9/456QZi6eFNGeKcCrj4+INNLUirqxTLEIZhk0m27H7vyartmw6ToUCTfNHPc9XdeBjzRdhW0sTdPy6e0NZ7sGW1lyllhlSCqw6zoUmn4/cvnkjHnw7DY1m85wHI588vW7721BC8iE6MskW9UM08yp74khFEM/KXDesdluqU1HZSpyXgxAydi6orIVp3GPUZJxDqSc2W1bznaGnD9bXEHI5d+KvphTSgFaC2Y3l1pxHzCm1HUf7x8Y+pmYNXVTlebAWBZoWkvCVOpvlSr146MbaWtN17W07YaqajF64Hhypa8UQWWKKWrVFnl1IQXzPBNcwuiMqWpk0szznsunl2y3OzbbmqpSvHn9hqapSxlUSCRfPGjmeYYk0VohZEaK5fg7Z0QuZqjadmitFxO+iBKKzaZD5uK87VwxDK7rFu89fd/TtA2msljTIJQiSEvTmNKzQDFZfPPmDcTS+FgZiw8eIzUoiZ9yqVHWpvRyIMlCAgJrLeM0kXNGyXJEfH97R9vUiFzMQ733TNO4qDqKUlucIk1dEbPHGEXd1oxjz8XFrniaRU3b7hjGE96XmnMhJNbUKG2o2tIj53xmHB1dY0AIEmkRKcgorYuxqA9oWdN1Z2hrUVIBgmpxXo+pmD6mnEGZ0o9TCZSpmN2ID579YU8kMkUPNi71zxpQHB56vPU0m5a6bmhsTd0UQ9WYIkDpfyCW01GtiTHR90NRPyxymqV8wU8lu1IRIUqplHeOVCvILGbNgmmeiEmhlGS32zH2Ew/7kSg8dV1xefGUyp4xSY+fJWOfMHqDFgNDfyzGypuOuun46DtF3MAojdGK+5sHuotz/CyZwkjXdvhYTBtJgZxK2WlTlVKrMSashbqyzKNHCFnUDXPpM3BzafIu9ek9pitm1kYXwRKtVOmpCYkUMz4GKlMU1ZQWKJWLue3Sj5lSEViQwhJDKKIN3nNzd8s4jZyfb1FSMk4ztqloqpqkMtOcOJwGmqtLaqMhB3z0NI1hSpLKisUEumGzuWC/P5R+HAVGa4zS+JAW08LyvSuGsUUdTRmFD46QMt4XQYlu21LJiqa2HA8j+4cH6sqQRMAqi1aKVtd8+MH7NNqw3ZbG6HGE494z2YlxmjmdejZby/XTM6QSxJipq4bJjRwOR6LzTIPHRYXuElWGzWaDUpo3d3sixeRaiCKNn1Iq5qjEUgptG+Ywk5UiRY33kuPdCauLyp+QFonk+qpC6zMOhz2nvsfYpvQrimLoPs1TWeS5AYzAWEtnKx5Or/Ah0NQ1VhfzWaME0RdBFoCUIloVUaQYPClFdhcXpcdEKrbb4juEKb0TzjtO4wlspt0UK4jkA1oZ9vt9Ue9a5pfD4YHjsUfEiUQgykxdaqdR2iKiY7/f0+REP/Y4PyFELEb2AqZ5QkiNUZZdt6GuaobhwDgM+Fmgzw3bzRbnHGGOVJtiVl5VFqOLMe/tzS3JZIyp6TYbuouaV/vvUNNgVClxJSUmNyJTIqdEyglhM03TEpLABQhzkS/OThTvPCFIOZTd+6zJJE7DibZrSbl46wkhsVYXG4niVLrMHZticTCPOD/jvClm6EtvY13ncuJ4Gqm2Nc4LhmmmHwfO25Z5nEvJfK1p2waBIQiJi4vRO4IQPd57/OxRtojRdE3xccw5M44TzjtyApHKQg1Rnu85eKKbEVuJUKL4I4VynyXU555uIUW8D7TbltPpRAyRIANVXeH7sh6YphlB8Zr8TNJfSoUVlv3hgEzV4sfpqWzFMJ6opCUvFgNd15EFDOOBYZhJXmO6Yj4eQkKK0vckhaTIRPD5emWaHUparLJ0reL+8BrvIyJHBMXQVQgI0THPE7Nz5JQ4nY4IFambikMqZuhSlz46Y0yR0DaKru6QOePmEzmXXpKUEyF6gvOQMtqa4ivlJqzUpcpDZpSuSuzqmnFUNM2WbnNOSpqLi2uUMYQUCH7G1JaH+xs++eQjbl69RMlFMU+WHrYcSy83lOeBF4njOFHXohjqCoHSmgxM80xlNHmeSs8wisoYjBCEMDP2R+ZZ0521kFwpDZtnGtvgfVGJraxGojkeT0VITS4bIcEv/XOSnEs/UNM0TPOIj57JT4hkkLaYwRf1XYl3EWP0YoOUOdtti8l2SIQcipdXTkX9lkTyxduttYZxHJFWYK0pCToZIUof/zwPeB9wwVNpiUWhlWKexqKILMs92bU1/exQUhKjo3cTxgjqukKo4uNEylSVods0GGmRQLeB17cDUMq/nY/UtmKee6qqQrSSutYc7ksVlRCanCXTGNjLI1JkbKWK/UaG7WZLShPzPOG9R8n02RKfGCLHeSQHgTV1+Y6HiAwSpRU+RqIPyAxdVzOOJzKeaXZYawl+4HTqqauKuq44P5N0O4mqFIFETAlbVbSiQ6BxLnJ9ec5pHGjalhQNp6F4pmld7pVpHtk2LcYuFUY5EIIgx0Dd1uSYkVKy252R4oyURWG4riv6w+E3zGq+R5IrSm21KuIGMSb8PCFEWXwIkZkmx/nmGiUrZC6y6uMYlt39RPCeFCNd2xGTJ6lIW9dsty19P2FMQCpFoiJFQQhucdxOhBiY5yIeIAR0bYuUongeOA8q0VSW/XGPFAJlDEqC1uUIOKWE9w7vi8BFVVVUVYXRZUdFa83syqK3qWs2m466K3WvMUbGaYJQmralkKVfRFt22x3nF1cYWySBvXPoypQxCInaGCorGYcBJQ1CCITMpZE1R1JcHrQxklN52CUhi4pXLs3dwZUJPGW/jLUuu7hz4OLibGnWLeqFmWKIp5QhpdIDsb/fs2m3GFujZTkps1qCzGgVsVpQWUn0Dq1LKRNJLIuqqfw3i0sy4zDQ1RWC0hMRXODhcI+1pghDSEVdKWIw+JDRRmK0ZCSVyTJHslRUtWGaKc3XSqKUwcWENrb0nISAc4njaaKuOzIQUkkEUZIcKYIeKTLOAWsbpCntscFFjKmIcygLzyxxMdAPY1FksxZjLcPUl6b1eSz9ZlYgbWmkt9pA05K0Q1SKs/MddVWjhMJW5cEfUlGskVKSU6TrymnWPM3Mk1uSh3IuigXnBqAYJJKhqgx+Hj5/SEtZyhxj8ugMxlZY03B4uGGeEpP3hKC5Oq+QmGUCdWUMqiHHhMdja8F2uwEhuLm5J4QKo1uEkMzzzIUx5Fiad62taOyOGEIx2k6l5txavWxaZNq21C1LVZYVmSK6YYwhuYwPgVi24jDLfaCURisNEbQuIiHRB2Y307XF9FPJEvt+8CgEQhZRmmmeMaZ8z8QihDKcitlhXVmkkjg3l40Fa/A5kLPCh1Dq76sKpTJKRdq2IS8bHFVV0TQdWlekFDBWFgNpJakqS9+P5JwxlaHdbjieTggNWimU0uU0XCiQkiwy0sjyYCx3HUJldmcbXJxBURZHwrA9v0ATqWsDJGJI9H3CVoHZRXzISJU4uywn396XB7yQI+M4El0qpyYpF/+bqqWuS/nkt1++LMbKqZgwxxiXTY9EziXR8KbBzREtDTJLyIoYJdKUDS9jFUJKrq4sxgo2bcXheCImyfE0kmJimCaOp57r6ydIpRFLyViMiWmc0FpijEYvokbkXARDlnsbUXqc8pKcxhywVcVwsycjqOqaeRyLX2ASxBhwPnB2uaGqTXl2pIxUioeHB2bn+Gw9cjwdubt/wMqEUJlsJMIY0lySkxzL5lVGMfkRQaauNVbXxBRwLqCNpK1rjCxl28FHpmmGZDHWYGSLNwZvItZWZVGiiuVAjLHcs7qc8G22G6TO+NkhVbn/tcr4aV5EKcp8H3IkS7BVBSHgUlE9nb0n5kwUgCzJlbENOWVmFxjGkbbZkcNY7CFCxGaNrTQxqSK5nCLb8wrERBwDIRYJ95Qz85w/N56WlE2+8q0u82k/DHTWMoylGqGz3WK5oMsi0s/EHFFaMccJH0pfZJIeYxNCiNLv6GdyLjqY3pcSI2kUSMUw9shFvEBZvUhrC5LI9OOAFIbgQxH1AZzzbGRNzmUjwYeMVqUUP8bAMA4oUdYGACEEQg5Um4rb4wNqUc+SxpJzuS98iijiYsJscKGIAkkEpf8TUvAEIZAZ/BzRVqJVKR0syrcB7wXRaKSWZT4KoYiNCImU+fPNiXEcca70VcfomecJ2yhq22BtRQgBa0typVVGK0MSUNc1WkB/siSlEAFYEqxhLCXYClWuJYmUpvSNyYjQEWtbKl1jtKVrz9ieXQOWlBN9fyxr+jAzj5Lbm5e8fPkt7m9fkfPMmZXEWPrJJEUgDAJJaPJimgwU5TchSakY3zoXUAS8S8Tksd2u9HunYo57PJxQ6pzaWrL3hMUI9rP5XooilGa1ZX+YkJLiI2o0OZckISV+TQTNllP10Y244MoJXhZlI0yJ0g/qHNoUkRIpM9WmKQlSisQYiNKTc/n3ijhGuW/RpsxjIi8x1mU96RzOlz59qTLOB3SSRJmRQnE6HRdRL1VKbtsOFxJSSnwOxOhx3mHrIqYlhCARqFtLXVvausFqRQgjkBinIz4kDoPDmIpkJ6QtyrlQvo+fib0oqXCz48HfUzem9G+JVFQ2Ly9wXpByROuAEDCOI8KX+3maRrSoELaIucRYqnNiCqVMMRfJGCkF/al4rqYsODvbkGKPm0IR9VKliqBrKzy+rL2R1E2HjRbvIvM0YfRVeVZaU1Qjc2JyPUIqQgwE74pPrJaLSnWxZ/Les9EVk3eQiqrnnKciTKZKL1aM8TfMab43kisBz5+/KMpBp5EYE9vtln7oi+uy0gzDQO5SaewUYlH6MORseNjvefXmlidPz7m8vOT8Ysc4DfSnI5nSzP6ZIWNVdTTVjgd3z+QmxtFxe3vPdvuESheRBl1rxnGkrmtCliAi4+KWXTcaKYrkt61aMol+GPDOI7VEoJhnzzzvSTFze7vn4rJBELHWst1u2B9HLi62hDAzDD1xnDhrtoz+wNAPbBvNV77yw2w6i5aasT9xf7jBGMPxsMdaQ1O3dLsdZI9bdgik0EiRGaeeeSryuqWUpcVqzeQnXCiu8mmRkgyhyF+eTgc23XlRZokCJSsSME0DzpfdGZdHKg0hNeVBLjJNVXN9ecUwFX+Uy4sLSJ5DfygnMUvPzri/pdttSV7gR/95YlUmzaJy8+zJE+rKMk0zIUZyShwf9ggUm82Wy8tLnr94wte+9qtIEtFHxhwxWrPf39PVG5rWEsLM7AaqWtJ1HcrUfOObHxORqCSIITCcJu72PecXlyAyIZeFSle3nE4DOSWEzHz08RtMteH8qoEc2N+dkN0Ga0oSMIeARvCNb36TprYYs+xW8VkzrVxOLS0plMVNW1surs+x20tSA6rWxfxz9KRcHgRSCCbvubt74L0XTzg/v2A4jvSnsqOUQ1yUpxR12xYhlwj744SbHVZolFJIJYpIgrI4P2KsZLPrqKwhp0hVVZydg3UleZ2miSAcQ39PVUmUaZE5EeSebatp2gplKh4eDhwPJ4SyVJVGoNjtNpDKRocPHiE87724JjjB7IpIidGGeZ6IsXwfus2Wl6/e8OGLF3gXeNgfy8mRMShTNgzQGVMXbwmZygMyxVQUJWpRymSlpB9HLi8ucK6IxWilcXOg2xisLSpM8zQisuf8/KIYf4bAdrvl6bOrxTi62EHEWNQCj/uecZxou5Y3r+8x+WxJ2iHnRQFKpPJD5mF/x83tG66vL9jumqJ8SZmQlVZUbc3gRlx0VI0tu3mHE4fjwPWza2xTE9KMzw4dS5/a2cWGZy+eMfY9TzeXhBTwzuFHDwq2XVsWx9NEygElLecXF+ScGeYRXZVT5lef3uN94umTS6xVGGuojMZaydE5Ej1SVzgX6Y/35ByRSjH1Hh8mmjYipWYYZoQMKJ3x8wkCVAaU1bRVxZOLS37ww+d0G10SAe/AR4ZxT23P2G07bm4OnE4jwzAyzhMxJabZU7UNCMEwnLg73CB0Zrfr0FITgwcviMFjtCqnxzGxOe+QqogMTdMEVjCNE30/EFMmS1U8VZyjEgZykch/+vwZc+/KgiplXJwxypRNMVN2LH2MDNNEkJGmq+m6jnmeuXl9A8oUtUBdE6aIrSy11TTGYJXm5vaEQpWFeS6ncUqqpWJC8fT5c66ur7m7/4RN19JcdwzDxKevPubk4erJJR++9yHzs0i9a2nbhpgCn7z6mBxjUTMk42bPPE589bf/fbz55BMOhwcQIJUki8TkZvpxxIUMMeFTwDQ1VV2TJ08iMI4T/TAwTjNts8MYS0pFYS4SSklcLgsocmnAj2ki57LZVkSBeh7uTwy+KMVVInO2PeM0n3Dz9PlC65bM6Vh63Wxn6ccTSkI/ewbnyDkgZBl/W3XYswplDD7CHGbmyZPnxPvPf4AqKG72twxDj2lqhKgY5pHWFHGYjz79hEY2aLsDMtM8cr5tmN2EURotBbP3PDzsywmCAikN+9ORp08/ZBwHhrFHITBNUVGdpxNBet672mFHzUcfvcG2LU8+eMJpHtienTE8HEgkXjx/zv3DA6/evGLXtlzuzjkdRiRpOXUT5CA43vV05zWNLuX8IRRp+q4qmwrGlOetMRXdtoMU8PNA07QcDzdlgzYLLs53TG7AGEWKkWmauLi4QAB1XeHCTN9PXD/Zcbs/oKWitoZN1+GkYfICZSxGVdzd3xYlZBdQWmNMSwg9280VQUSyKPfN9vKCXXdB013QbS6wVcOvfu2bhBhpmxo/jXz9k2/xtZf/Lzk5KivYdoroJo6He5CSqm5IQnF/f0SkHpE0ZIObZ9q2JWeYprn4eKLwLpJTIItApRQ5RqZxZOx73OwwtjzPhTBEZ9G+JMUxRdTib6aUorISqcvmo/eett0yzzN9P5CT4PL8GimhaWt8mumnibrqcH5GRkmQiSQAkdlcVlghSAqaqmZ//4ZxHAmiWMwAjOOMcxOTG5nDgPCRnCUxl4S2aWqmaeL+tEdIxcXlWbEDoeZ4e4vrBwLlO3t2cf65ErRQCqEs4zSgdFFM7dqWcRwxtUFpRYyJprGEMJFRxCz59NNXSFuxP9xw6h2nQfL8+TVnlzvuhoH9/UjwA127oz9NKB2oaskUJnzK5CkSo2RyYKuZpq3RPi0+diUBvbu/wySQsRyWWKnLoYQviVTO0B9HjKS0aciiI7DZGLwTxKR58eKMypZ+fe8Cbh44HBzanhNlJKSIMZa2aRluTvg5IpTieDyyOz/DTSdCmBGyWLdYbUDIZY1sQCSqWmFNMaqf+qVKg7JmO51Grl9siC4xTyPzPOAm9xunNXnZvXmbCCHeAD1w87bHsvJWuGaN/WNljf3jZo3/42WN/eNljf3j5l2J/w/mnJ/83T74nkiuAIQQv5Rz/gff9jhWvvussX+8rLF/3Kzxf7yssX+8rLF/3DyG+Mu3PYCVlZWVlZWVlZWVlZV3gTW5WllZWVlZWVlZWVlZ+RL4Xkqu/q5a8SuPgjX2j5c19o+bNf6PlzX2j5c19o+bdz7+3zM9VysrKysrKysrKysrK9/PfC+dXK2srKysrKysrKysrHzf8taTKyHEzwohflUI8XUhxJ942+NZ+fIRQvw5IcRrIcTf/MK1SyHEXxZCfG35fbFcF0KI/3C5H/5vIcRPvr2Rr/xmEUJ8KIT4BSHE3xZC/C0hxB9frq/xf8cRQtRCiP9dCPF/LbH/t5frPySE+MUlxv+NEMIu16vl/deXz7/yNse/8ptHCKGEEH9dCPE/Lu/X2D8ShBDfFEL8shDibwghfmm5ts77jwAhxLkQ4ueFEP+PEOJXhBA//dhi/1aTKyGEAv5j4J8Efgz454UQP/Y2x7Ty94T/FPjZX3ftTwB/Jef8VeCvLO+h3AtfXX7+KPCnv0tjXPl7QwD+lZzzjwE/Bfyx5Tu+xv/dZwZ+Juf848BPAD8rhPgp4N8F/lTO+UeAe+CPLH//R4D75fqfWv5u5fubPw78yhfer7F/XPxjOeef+ILs9jrvPw7+A+B/yjn/NuDHKXPAo4r92z65+oeBr+ecv5FzdsB/Dfz+tzymlS+ZnPP/Atz9usu/H/jzy+s/D/yzX7j+n+XC/wacCyFefHdGuvJlk3P+JOf8fy6vj5RJ9n3W+L/zLDE8LW/N8pOi07P9AAADk0lEQVSBnwF+frn+62P/2T3x88DvFUKI79JwV75khBAfAP808GeW94I19o+ddd5/xxFCnAG/G/izADlnl3N+4JHF/m0nV+8D3/nC+4+WayvvPs9yzp8srz8Fni2v13viHWUp9fmdwC+yxv9RsJSF/Q3gNfCXgb8DPOScw/InX4zv57FfPt8DV9/dEa98ifz7wL8GpOX9FWvsHxMZ+J+FEH9NCPFHl2vrvP/u80PAG+A/WUqC/4wQouORxf5tJ1crK+QiWbnKVr7DCCE2wH8L/Ms558MXP1vj/+6Sc445558APqBUKvy2tzykle8CQojfB7zOOf+1tz2WlbfG78o5/ySl7OuPCSF+9xc/XOf9dxYN/CTwp3POvxPo+bUSQOBxxP5tJ1cvgQ+/8P6D5drKu8+rz45+l9+vl+vrPfGOIYQwlMTqv8g5/3fL5TX+j4ilLOQXgJ+mlH3o5aMvxvfz2C+fnwG33+Whrnw5/KPAPyOE+Cal3P9nKH0Ya+wfCTnnl8vv18BfpGyurPP+u89HwEc5519c3v88Jdl6VLF/28nV/wF8dVEQssAfBP7SWx7TyneHvwT8oeX1HwL+hy9c/5cWBZmfAvZfOEpe+T5j6Zv4s8Cv5Jz/vS98tMb/HUcI8UQIcb68boB/nNJz9wvAH1j+7NfH/rN74g8AfzWvRozfl+Sc//Wc8wc5569Qnut/Nef8L7DG/lEghOiEENvPXgP/BPA3Wef9d56c86fAd4QQP7pc+r3A3+aRxf6tmwgLIf4pSm22Av5czvnn3uqAVr50hBD/FfB7gGvgFfBvAv898BeAHwC+BfxzOee7ZTH+H1HUBQfgD+ecf+ltjHvlN48Q4ncB/yvwy/xa78W/Qem7WuP/DiOE+PspjcuKspH3F3LO/44Q4ocppxmXwF8H/sWc8yyEqIH/nNKXdwf8wZzzN97O6Fe+LIQQvwf4V3POv2+N/eNgifNfXN5q4L/MOf+cEOKKdd5/5xFC/ARFyMYC3wD+MMszgEcS+7eeXK2srKysrKysrKysrLwLvO2ywJWVlZWVlZWVlZWVlXeCNblaWVlZWVlZWVlZWVn5EliTq5WVlZWVlZWVlZWVlS+BNblaWVlZWVlZWVlZWVn5EliTq5WVlZWVlZWVlZWVlS+BNblaWVlZWVlZWVlZWVn5EliTq5WVlZWVlZWVlZWVlS+BNblaWVlZWVlZWVlZWVn5Evj/AAHGm+rjgZlYAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7GrWIJywLV-V", + "colab_type": "text" + }, + "source": [ + "## Train a detector on customized dataset\n", + "\n", + "To train a new detector, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new detector\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "E73y5Lru-wBx", + "colab_type": "text" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "There are three ways to support a new dataset in MMDetection: \n", + " 1. reorganize the dataset into COCO format.\n", + " 2. reorganize the dataset into a middle format.\n", + " 3. implement a new dataset.\n", + "\n", + "Usually we recommend to use the first two methods which are usually easier than the third.\n", + "\n", + "In this tutorial, we gives an example that converting the data into the format of existing datasets like COCO, VOC, etc. Other methods and more advanced usages can be found in the [doc](https://mmdetection.readthedocs.io/en/latest/tutorials/new_dataset.html#).\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [KITTI](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d). We select the first 75 images and their annotations from the 3D object detection dataset (it is the same dataset as the 2D object detection dataset but has 3D annotations). We convert the original images from PNG to JPEG format with 80% quality to reduce the size of dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "rHnw5Q_nARXq", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 224 + }, + "outputId": "5ecddc66-cb01-467c-b05b-3f1fb1f0a39f" + }, + "source": [ + "# download, decompress the data\n", + "!wget https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/data/kitti_tiny.zip\n", + "!unzip kitti_tiny.zip > /dev/null" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--2020-07-13 14:48:40-- https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/data/kitti_tiny.zip\n", + "Resolving open-mmlab.s3.ap-northeast-2.amazonaws.com (open-mmlab.s3.ap-northeast-2.amazonaws.com)... 52.219.56.23\n", + "Connecting to open-mmlab.s3.ap-northeast-2.amazonaws.com (open-mmlab.s3.ap-northeast-2.amazonaws.com)|52.219.56.23|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 6918271 (6.6M) [application/zip]\n", + "Saving to: ‘kitti_tiny.zip’\n", + "\n", + "\rkitti_tiny.zip 0%[ ] 0 --.-KB/s \rkitti_tiny.zip 0%[ ] 58.64K 289KB/s \rkitti_tiny.zip 3%[ ] 245.64K 606KB/s \rkitti_tiny.zip 15%[==> ] 1.05M 1.73MB/s \rkitti_tiny.zip 63%[===========> ] 4.22M 5.20MB/s \rkitti_tiny.zip 100%[===================>] 6.60M 8.02MB/s in 0.8s \n", + "\n", + "2020-07-13 14:48:41 (8.02 MB/s) - ‘kitti_tiny.zip’ saved [6918271/6918271]\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Wuwxw1oZRtVZ", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "b02de373-4e5c-4d63-cb4e-7ef103e44814" + }, + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kitti_tiny" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following package was automatically installed and is no longer required:\n", + " libnvidia-common-440\n", + "Use 'apt autoremove' to remove it.\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 33 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 1s (46.7 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 144379 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kitti_tiny\n", + "├── training\n", + "│   ├── image_2\n", + "│   │   ├── 000000.jpeg\n", + "│   │   ├── 000001.jpeg\n", + "│   │   ├── 000002.jpeg\n", + "│   │   ├── 000003.jpeg\n", + "│   │   ├── 000004.jpeg\n", + "│   │   ├── 000005.jpeg\n", + "│   │   ├── 000006.jpeg\n", + "│   │   ├── 000007.jpeg\n", + "│   │   ├── 000008.jpeg\n", + "│   │   ├── 000009.jpeg\n", + "│   │   ├── 000010.jpeg\n", + "│   │   ├── 000011.jpeg\n", + "│   │   ├── 000012.jpeg\n", + "│   │   ├── 000013.jpeg\n", + "│   │   ├── 000014.jpeg\n", + "│   │   ├── 000015.jpeg\n", + "│   │   ├── 000016.jpeg\n", + "│   │   ├── 000017.jpeg\n", + "│   │   ├── 000018.jpeg\n", + "│   │   ├── 000019.jpeg\n", + "│   │   ├── 000020.jpeg\n", + "│   │   ├── 000021.jpeg\n", + "│   │   ├── 000022.jpeg\n", + "│   │   ├── 000023.jpeg\n", + "│   │   ├── 000024.jpeg\n", + "│   │   ├── 000025.jpeg\n", + "│   │   ├── 000026.jpeg\n", + "│   │   ├── 000027.jpeg\n", + "│   │   ├── 000028.jpeg\n", + "│   │   ├── 000029.jpeg\n", + "│   │   ├── 000030.jpeg\n", + "│   │   ├── 000031.jpeg\n", + "│   │   ├── 000032.jpeg\n", + "│   │   ├── 000033.jpeg\n", + "│   │   ├── 000034.jpeg\n", + "│   │   ├── 000035.jpeg\n", + "│   │   ├── 000036.jpeg\n", + "│   │   ├── 000037.jpeg\n", + "│   │   ├── 000038.jpeg\n", + "│   │   ├── 000039.jpeg\n", + "│   │   ├── 000040.jpeg\n", + "│   │   ├── 000041.jpeg\n", + "│   │   ├── 000042.jpeg\n", + "│   │   ├── 000043.jpeg\n", + "│   │   ├── 000044.jpeg\n", + "│   │   ├── 000045.jpeg\n", + "│   │   ├── 000046.jpeg\n", + "│   │   ├── 000047.jpeg\n", + "│   │   ├── 000048.jpeg\n", + "│   │   ├── 000049.jpeg\n", + "│   │   ├── 000050.jpeg\n", + "│   │   ├── 000051.jpeg\n", + "│   │   ├── 000052.jpeg\n", + "│   │   ├── 000053.jpeg\n", + "│   │   ├── 000054.jpeg\n", + "│   │   ├── 000055.jpeg\n", + "│   │   ├── 000056.jpeg\n", + "│   │   ├── 000057.jpeg\n", + "│   │   ├── 000058.jpeg\n", + "│   │   ├── 000059.jpeg\n", + "│   │   ├── 000060.jpeg\n", + "│   │   ├── 000061.jpeg\n", + "│   │   ├── 000062.jpeg\n", + "│   │   ├── 000063.jpeg\n", + "│   │   ├── 000064.jpeg\n", + "│   │   ├── 000065.jpeg\n", + "│   │   ├── 000066.jpeg\n", + "│   │   ├── 000067.jpeg\n", + "│   │   ├── 000068.jpeg\n", + "│   │   ├── 000069.jpeg\n", + "│   │   ├── 000070.jpeg\n", + "│   │   ├── 000071.jpeg\n", + "│   │   ├── 000072.jpeg\n", + "│   │   ├── 000073.jpeg\n", + "│   │   └── 000074.jpeg\n", + "│   └── label_2\n", + "│   ├── 000000.txt\n", + "│   ├── 000001.txt\n", + "│   ├── 000002.txt\n", + "│   ├── 000003.txt\n", + "│   ├── 000004.txt\n", + "│   ├── 000005.txt\n", + "│   ├── 000006.txt\n", + "│   ├── 000007.txt\n", + "│   ├── 000008.txt\n", + "│   ├── 000009.txt\n", + "│   ├── 000010.txt\n", + "│   ├── 000011.txt\n", + "│   ├── 000012.txt\n", + "│   ├── 000013.txt\n", + "│   ├── 000014.txt\n", + "│   ├── 000015.txt\n", + "│   ├── 000016.txt\n", + "│   ├── 000017.txt\n", + "│   ├── 000018.txt\n", + "│   ├── 000019.txt\n", + "│   ├── 000020.txt\n", + "│   ├── 000021.txt\n", + "│   ├── 000022.txt\n", + "│   ├── 000023.txt\n", + "│   ├── 000024.txt\n", + "│   ├── 000025.txt\n", + "│   ├── 000026.txt\n", + "│   ├── 000027.txt\n", + "│   ├── 000028.txt\n", + "│   ├── 000029.txt\n", + "│   ├── 000030.txt\n", + "│   ├── 000031.txt\n", + "│   ├── 000032.txt\n", + "│   ├── 000033.txt\n", + "│   ├── 000034.txt\n", + "│   ├── 000035.txt\n", + "│   ├── 000036.txt\n", + "│   ├── 000037.txt\n", + "│   ├── 000038.txt\n", + "│   ├── 000039.txt\n", + "│   ├── 000040.txt\n", + "│   ├── 000041.txt\n", + "│   ├── 000042.txt\n", + "│   ├── 000043.txt\n", + "│   ├── 000044.txt\n", + "│   ├── 000045.txt\n", + "│   ├── 000046.txt\n", + "│   ├── 000047.txt\n", + "│   ├── 000048.txt\n", + "│   ├── 000049.txt\n", + "│   ├── 000050.txt\n", + "│   ├── 000051.txt\n", + "│   ├── 000052.txt\n", + "│   ├── 000053.txt\n", + "│   ├── 000054.txt\n", + "│   ├── 000055.txt\n", + "│   ├── 000056.txt\n", + "│   ├── 000057.txt\n", + "│   ├── 000058.txt\n", + "│   ├── 000059.txt\n", + "│   ├── 000060.txt\n", + "│   ├── 000061.txt\n", + "│   ├── 000062.txt\n", + "│   ├── 000063.txt\n", + "│   ├── 000064.txt\n", + "│   ├── 000065.txt\n", + "│   ├── 000066.txt\n", + "│   ├── 000067.txt\n", + "│   ├── 000068.txt\n", + "│   ├── 000069.txt\n", + "│   ├── 000070.txt\n", + "│   ├── 000071.txt\n", + "│   ├── 000072.txt\n", + "│   ├── 000073.txt\n", + "│   └── 000074.txt\n", + "├── train.txt\n", + "└── val.txt\n", + "\n", + "3 directories, 152 files\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "YnQQqzOWzE91", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 304 + }, + "outputId": "6089a4b6-4ed4-4dee-8b7c-048c66dd41e2" + }, + "source": [ + "# Let's take a look at the dataset image\n", + "import mmcv\n", + "import matplotlib.pyplot as plt\n", + "\n", + "img = mmcv.imread('kitti_tiny/training/image_2/000073.jpeg')\n", + "plt.figure(figsize=(15, 10))\n", + "plt.imshow(mmcv.bgr2rgb(img))\n", + "plt.show()" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA20AAAEfCAYAAADShy4pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy92bMtyXXe91uZWcMez3zOnadGz0ADDRAgQIIiKZkUHWGbCioomn5zOEIRkv0kv/hP8Itf/aCQ7AjLYVEkLTIsBi2SYnAEAWJGg81u9Dzde8989jlnDzXk4Ies2mff7tskGEQDDXp/Ed333r1rV2VlZWWub61vrZQQAkssscQSSyyxxBJLLLHEEkt8OKF+0A1YYoklllhiiSWWWGKJJZZY4v2xJG1LLLHEEkssscQSSyyxxBIfYixJ2xJLLLHEEkssscQSSyyxxIcYS9K2xBJLLLHEEkssscQSSyzxIcaStC2xxBJLLLHEEkssscQSS3yIsSRtSyyxxBJLLLHEEkssscQSH2J8YKRNRH5ORL4jIq+IyP/0QV1niSWWWGKJJZZYYoklllji7zLkg9inTUQ08BLwM8A7wFeAXw4h/OX3/GJLLLHEEkssscQSSyyxxBJ/h/FBRdo+A7wSQngthFABvwL8/Ad0rSWWWGKJJZZYYoklllhiib+zMB/Qea8Cby/8+x3gRxcPEJF/CvxTAG3Mp4Yrw/hFEEQApDkyPPCnzD9vjhFog4VhfuziMQuRxBCIJw8LX4V3HRq/D8HhnCV4h7SfBxBRiNagFAGJ1wwe8OAD3nlA0FqjtEaU4L0HwGiFKIUA3jsQUCIEH3DOEwjxd0rjnMM6T5IkaKUX7tPHuxN5sNkLfTPvuRDmhy3GU8P79FAI8Tcx+hrmH4YQ8N7hnIfgMaJIs4yAUFtLIKAEtNZorUmSDJ0koDSi1LyftVIX53OeqqoJIcR+knhL4SGNjI9NLu6JxXu/aGsIIAS89zhb46wDCQhgjCExCVrreC4B7wM+eKx1VFVF8G5hqMR2KhFEFEopRKR5Lg4fPN7H55XnOcYYRAQRicc240jm7ZN2CC3c2uJT4GIgizxwjxcjtnmesnCih0Euzj8/pO0/3z7f+Pt2jHh/8W4o70hSEy+hNEGn6CzjYHcPX5YoCQQB2zxDrQz9/oBur4NJNMFbXF1RlyXeB6wPZKJIs5R8MCBNOxACSkNNjS1rlKRMpyXT6RhjNFmWobWmritCCHQ7HdIsRSnV9E8cB9ZDUU6p65qqqHBlSZZqVlZWUSYjKFBaIah3zSsXY+piwMX/O+85Pzvj9OiAoigZ9nJWBz2UNu0bTxCJHq/5Aw14ArPScnI6wbnA1vomeW4wRuFCoLQBpxKKyYSBCuTdBAf4IIzPT+mnOb1ej6AE7y3T6YRpUZOmGW42pagslXWE4BAJRJ+bYJ3HNXObb+cqedfYQsWGSiBNU3rdHsOVVXrdLqiLqbE96xJ/NxHe9bf5PNSMY+88o+MzRqcn1HVJsDX9TofhoEdZV5S2onYWH+KcEdctmjlFyLOM9fUNer0+k+mENMvodrvxrZHFWSlAaN5iCfi6wtma2lqSrItJ03fNjs376R3WWkRAiUJpDaJ4z1za/KSuK5RSnJ6ecX4+xugUbQy9Xk5iBLBYH5hOZ5STMbPK40NA49EizfmB5u9KKbQxZFlOt9sjTbM4Jz3Qzoe3ZaHDASimjuPDA5yb4UKcU0UFtARSUaxvXSLJu3+j5yo+UM8KxtMx1locnjRPyUyCK2umxRSvNcPhBr1uh8a4ma9Xf/0MEAiLN+Hh/GxMOZtgvcWpQGIUqTE4G+dDgkXwiE4Yrm/RyzOkWXuUekhffSB4cLEMczvzwWPe+/TacRog1OCg9oZaFJULFLNAXZWkuaKXp9RFSVkEbEhI8wRtPMGXZKmCSjOZWmolJLlmvQepCLNScTzxkMGwJwx0fB6t9fBhmY8fZm58v57e/9/wta997TCEsPWw7z4o0vbXIoTwL4F/CbCxuRl+7r/8LwhBIKhIjNoJXgKIJwQXDb1m5PjQmMQhoHSCJxpbc4NURaMshEgWIvGweOtQwnyyCD4SrhACwQviFcHXWDtmfHpIMT5FB1BegVVknT5ZfwWbZ1SisOIIdobxJX5aMDufoSWhP1wlGwzweJSCXq9Hv5uhlBBwzGYTgq8xWlNMC8ppRa/XZ7i6gklSjkenjE7P6fYHrG9skuUdnHWIr0kSCChQgkoM3gfq2iIIGoUWhQTwwSIS5otqaBYfHwLS3j/RYKtswM8JmsVWFd57gnNYW1HMZkwmE9x0ykaScPXmHYablzidzdjdv89sek6ihTTLEJ2xeekq1+48xmB1g36vT2oUeaIjgQ2glOHsbMzhySknJydYVzMY9Oh0ujhnIyFKDNZalNZobQDB1hblBYXgJeDx+OAIweO9Q0sgAWxZMZ2es7v7Nrau8LWjk+fsbG6xublJfziMBnrwFLbm9OSEk6MDRifHWOvw3jOdTChmM7Iko9frk2c5Sim8gtPphOPjE6bTKUmScunSJW7euMXa2hrD4QpKKYJ3pNqj8CBCUAobAi4ERBRaKSQIznq8qwneNuRVo1WKUmbBIHAEHCJE8usjOWzJ5SKpBebGBUrhQkvWItEMkXvjrKUsa1SAwXCF/uYGxfkp5b23GGx1CUlKpfrIzg3WH32c//V//l84e+lFOkmNSxTHZzPeeeOAbrbGZ3/8x3n2089w+eoaxWyf0d477L76CuW0ZO+05FZvyO3bt3j8p36SmzeeQtcOs+o4DAfsvb7LQK7w7W+/zp9/84tsbq7zyK2brA6HnBwdMptNeOKpJ7h95zbD1ZVIkGxJYhJGBbz05osc7+9hRwVvf+ubTI7v8fFnP8UzP/bTDK5ukuVdUpNhtJmT9rbPHiBuErDiOZ3N+IPf/R1+69/8K1558WV+5lNP8PM/+WmylTVSbamD4HRKJhZdKzSaoB0TLM+/tce/+3++yNGh51/8s3/O5z75EdKs4HAy5s//8m3c5qPcff4FPteZ8dFPXuegqpl6xRf/5Pf4zI0n+dHPfI5Caqw955vf+ia//cdf5/qVOzy9kvCf/vQbvH10ztn5Pkli8eQgOedFzeH5mDIonE7wohAVnQhx9AiBDE9FnkEnN/zY5z7Pf//P/0ceefQOQQlBAk4iIUwlmTtK5L2WzRI/hIh+s2iUz52bAtKsuaEOFJOK3//tP+bX/93/zVvvvMT48D4ff+QWn//Ex3D1hO+8+RIjO+OkKhidT5lOa0JQVMFTlBWPfuRxfukXf4mnn/4EV65cZVaWrG5v0Ov3CSrgm6lMY5HgCEFjxaCDpTh4m7PDe9zdP2bn0U+wc+U6yXzsBcCDeOpixmx8SqebY7IE0QbIiKaMLHq4qKua57/9PNtbW7zwwnf44pf+nJ//R7/MlZuXSDqGhCmqmrL31iH/9l/977z857/HO3Q4Fs/l1YyhyhCEpJsxXF9lY2eblfUNnvrYx3nmmU+ysb4DQeN9wGjVENOmye/2QC5avBJJw2vPnfNr/+Zfc3zyHGdViesliKlY0RU3+33+yX/3L9i89fQDpPBh76MHakAH0NOavede4E+/9Wccnx4yDSVXnrrNje4qey+9xguvv0y1OuRn/rP/lk996kmUrhE0EjQiFpEq9qfXBAEvjWsoxGcQJOBRBBTKQyg9f/Q7X+DFb32J4+KA08yzvd3h1vYlTg5rXnhtD18ckakp3c0r/Oe//M/4+J3bqMrhfSBNo2M7BFB/C3ay6LxtWPYFoQgBcA1B9YSgAb3wSx//9D6uqQ0t9d7iZzXWC6fjM8zsiOHwCkduwJ+8OuHrz+0yPRFKZjzyyRU+++gl3N4Jb7xc8+K9Dt0rXVY2pty5CVeGivO/1Hzhm3u8bio+8RN3+OVnO6x5y1f+IvAbX52QP93hZz/Z4TM9T+ISrAZHJHY/sFl4YegG4nhoia3A3B5/oH3LJeNvDRF58/2++6BI213g+sK/rzWfvS8E1bwq0pA3mocvRBeUJhqureOrJXXqwlAVIbSTmlxE3oJvPXtt5KS5aAwtvc8gk0jsmhdGKUVojRhpjGAVLvxTTdQGIUaXBNrhniQJaZpGQhR8E8Vz1FXNzM+wlaXb6TEYDsiyDOvi5OG9x9Y1SmJbghK0NojyhACegGoiJUYbQkNafQjRSyiKgI+fExCJE6S0BK7tAwGjm7lNFHiDFbDWUQMqaLRJEBUnOiNCphPyvIvurdJf32Z8fsLB3j3ORieIFFSV5Wj/hOFwjWtXr3Ll6iXy9VWM0dR4ZrZCdw3b/W066z1Gh8cU4xlFcUqeZ2SdHKUkEg+grmsCkJgEFSKpVyJxwhUf+7Mucc6ilSbp5PSN4mY3o5hO2d/bYzqecHhywvlkwmAwZGNzg7WNNfqmT6+TsbmxwuloxGg04nR0itZCmhhsbZlMz5lOxnR7XTr9PqsrQ4bDIaPRKXt7e7z++mucjs64efMmjz/+ON1uL0aiPJhEo0THsSQB5aM3N/gYLTKqjchGQh3HdXQszIezasd5/C1NlA/AORdJmtYXTgvAO0fDkJsx5xeivgnGpGitmJyfc3p2gu7nZHmK72TYEFCicD5gp1Mm43PS1FCUM/qdHNe8s0qaSKqLEWmlW5oQ31HVtBGlo2PFOVywBBzUNh4fAtbPMNoBHkIbAbWISOPRd3EcO4/H4xyIVszqKYGUlbXLTOp9PvLkE7z03IxvPPc8ob/Cp1Z+gjztxSiCj/ffRk1p3usLoyhGOEUMIQg+CNb7GGEWEPGo4DBEhxHBN84hBTr6Rn2whOCwdYkPlvW1FXxw6GnJ89/4Amf5q6zplFqb+M5hCUowOiAGxGiM1rhQsbq+zc7lm+wenPD5O0/x+FNP8fYXvkzlfFwtNVjvUVmCrhKqaYlODElq8OhoozTTmxJPsJbgPJOzMfffeZO6OIMQEOsRHfB+SmI0kDxsQlzihxgX0a0HDVuIH83Kkt///T/kN3/rN3nr/quMj/b56CO3+MTTT3IyOuatu68xCxXntuDobExZeZxTlIXFJ4GPPPoov/RLv8Stm7c5H59T2Zq1jQ26vR6ouEY5F9C6ndDi2qqad04ZQ5qmJEnjWFHvjlnFSLHSCZ3+CiZLo0M3LHy9YLSHEDCJ4drVq3T7PbIs49KVy2zf2cYMMhQB7VISelzZ2eTq5qO8k/wJV9ZWYTYmUQrRKcOVVYbrq2zubPHkxz7KE08/zfbOVTqdXnTySnNPId5TSx1iNL5pTYjvX2xZa+PELkhTTWo0SUhIOjk2sgV0ksc197twmsj8v7gu4GJkK86llkQJcVr2ZFlCSDRJ2jyC+RN4cEz8dddbNJuMAmOEJFEYFRBJ0aaP1hXidWSVypMmgUTZGENqbBpZGA7fL0RBiZ/H1Vo9SnRkB1xdM56c4+oaO3HoLCfv9RkO1pnWOd/6zj7f+saLdIoEbI+ZDkhiGKwkbJgNkqnjrcMTjk/eorNiuHP9FsnknOPZmGk5orPeZXUtoBOFnWmmxTmKCf00oatBOQMWtIAoT6uo+EGhtXGFuV5j/vkS3398UKTtK8CjInKbSNb+a+C/ef/Dm+EQQusMnJOlecA6hBjSbl8waYmbmsvYAIK6IHE+RAMw0EYZPNJ6kIkELGghBPeAsdvCN7LAgGpkUy5+tnBsCGEe6o9GOPMolvcOkyZR4qgVUR1osbYhbXUdJSgeVFdhGoJSliXT6RSlhDzPSZIEaciZqOY8DRG9uG6UjYUQe8wHj9ZC62t3xOhaSwZ98Bftlzjx+lY6J42UcyF6o7SDxgBPlMHX0VOa5AMQoWdShqvrnJ8ccnK0z/n5mKPd+xSnZ7jpOSeH99na3uTS9av0hgNMapjVFbXz5IOMq51rlOcVp6NTzsdnFGVFmqakeYYxqjHeo8EujWSxlQr6Rg6mlI6kvCHNWhRohdIpV5Ocyfics9GI0dmYWVFR1RU+WFbX1kiNQasOWZayurrCwf4B9+7dIzWGoiiw1lJVNdPZBBsc+WCAMQnD4ZAkSTg6OmI6nfKd73yHuq65efMmKyur+BBwQWHSaEQ37gWEgLMe52uMKIyOBEead0AEQvOMWvnSonRVoRb+fYGWgIQQiY5ztvGWXkSuQ4iLgaBIswRCl6qynIyO6ecZaWJASyPZCZyenvDqV/+cYjbGJIairnGqiVYRvbDSnJuGGDnv8RCjwToOMFEKvMMHG0lE8BiJ8iNnp5jEo7REH42o5rUPkZCKQjVjQLRgTEZQhjRPSNM+u4f3mU0mrPe6XL7+CN/+9jd59bWXePKTP8LWdjLvl0haA1qbef+1RC4EGseMgaAJXnBesD6gjG5krz6+K2Kj0UFASQDVLLISMEZQCupyhjaCctAxglRnvPziG1xb3SKsPo73Ub6ENoDHBY8NoNMM6oT+cJPHnnyGP/7DL4BJeepjH+PLL7zM6eQYV48RCdjgkEZOasoa6yw6JE1EVqNV4zAgGmh1XaDxvPnGa/zBf/odLl+6xHB9DQkerXzjIIuR4HYcxXGzXKJ/2BEjCgsO0hCl+XXt+JM/+jN+9Vd/hbtvv8Xx8T4fu3mFJ29d5/TkgIP9e0yqGXUinM8qitJRlAFvFd4rbt++yc/+w5/l1u3b1LXn2tXrUfXSSO9aR6JR6r0Sq8YibGXlupEgtt/FCGFrNiqUSVAmAaI0U6nGGYbngnRcXGV9YwPRQlVXHBwe8db9XR7p3SRVLs49QaE7glpd47A+R9uEnbxHXxLyrUv019a5cv0qjz/1BM88+wlW19Yb2ZqOjlQPzgeMuqA9DV26aIVEB6sQ4ztz6bwIidYoLWQqxacZIoEERdodoJP0Pe/doqri3d0oAQQX5YjBRYVHcAiepCFVIXgSozFJIE6xF0RN5mb5AuHkge584HqtaWaUoJWKCqbgAY3oHqIMiaQEUYgKJMZhpJyTth8c4liRQDNGY5StriomkzEheIqioN/rM9jcQCWR6NqR8PVvn/LN14/Y2VzjM1tXef7VGefjY6x2iKrIcZiioC5H0J2wur7FSseQVBlGTbG+YHNrk831BBGFC8K0rAhMyHSHhB540wygqNJpbbgfGFqCLu/6Owtke8nkvm/4QEhbCMGKyP8A/A5xnvrfQgjP/1W/EdEX6WbzPCKItM3F6BuLOuToCW8nv/kk4D3eOdpg3VwGRWsqX3gbW7HI+99IQ44WAmdVXSHWgkkacuRQ3qPidDk/rwuBhIC1NdYqfEgJQSiriqKYMpvNKIsZdV0hCBMzJYSYC+ca8rQyHDIcDpAmuiKicN42M7SKoekQCZqzLsrtaAhOY4xKI4dpIyyxrwUVZJ6HEPvNRsFAQ3pdY3x773AEgkRJQQiCKwvqqiKgSNIeNYFOmhPqKWtaGPT7lMWE89GYk6MRxydHnJydcH//PvcPdtnY2WJ1c4PuYIDJUjQWPPS6PQb9AdPplNPTEePpmGI6I80STGIiGQ4elJ4/X8KF/FNrgzEa7x22toCgkxylA1nWpdPr0+n0OB0dU0wn7B8eMDo9YWtrk52dHTq9GN1Ls5zNrS36/T4nJyccHR5g64q6rjk/O2c6mzEra7rdLv3BgPWVFQa9PsdHR0wmU1575RXu373LRz/6MS5fuYbJMpQyMQ+uyUeSNlqoHsyNaqXBsph7SZgHhdvxrJTCOTd/ngvv3gMSySCN/CP4JpLafKch+BidNEZQOqEUiYQozSAVplWFU5a6trz62mtMJyNWVgfU9TRKPFtnio/OERWicyRGnZkT6QAxN04MGkFUJDxKKTKdIdrg65rMNDKVEKOIJjEoHQlJS0JFBKM1qAQnAuKxdeD+/SN233qZRy9vY7I+/dUNTKLBSVwATRuF9ijRiGra3RD/1tkhQUfpbRBciIuqDdFIQ6I3W6mAgsaoiqQVBVoLWZ6gdfRvhFATVADrSbXhydvXuHcw5mzvLvtHmzinEZ2TpBq8UFpwKsrD894qZlyTdqE7XKMGKlvx+JNPcD4+YXRUgig6SSSvHqHKK85nJaEoiOabihFOpQnaIFqhVIJScHY24Vd//dcoq4qPPv00/X5OwHLt5m0u3XiUJMub6dS/K2dniR9GzMVjwlzR4h2UU8uXv/Q1/v2v/hr333qD85NDbl3e4PGbVzk/2mfvcI+QCOdVwdmk4qyoKGuwVsDH6NhTT3+MG7dvc3B8zPbWJXauXEJEk+TZPE1BHuIUnTeMaPxprdHGRPk3D4pt4gfNe9h+K4oL339LkxaiEgGkiYLdunWLF59/iTe+8BxD69i8tU7Z6ZKmBm/h2E5YXevR21hnY+UW2hr05W2efvYTPP7EY1y+eoU0ywDmDuJ5uxWAm9NF3fYvi41vVuIQvxEEk0CWa5JUYbzg0gSdGPqJYri6iUny7/rhqqYteA/eIsGTaCE3BpMI/X6H1ZU+WgLGKNKkRs+J7kIz30UQ5H3+tfh5ItCIkhDvCF7w0iGgEK/AOTCexDhSNUFhEUloc4zlvRf6YNEGBxr1RlkWFGVBmqbYqqLX67G2uoYoAz7B4vECZRF46e6Ywc42P/3sGrcq4Y3XRpR2Qm+4waCXs+HhKLEYcayvZVy9tEpHa7KQgBOSTsL2VpftfoIOseMmRYXWNZ0MtJKLkJa6sE8/LFxobpU8GAZf4vuIDyynLYTw28Bvf7fHiyh0492PHrRACG3xANVECKQhdAuTcmtwNVZtaBJ2fBPVUI0EIzTSBa00cxGDj97+IJHUBB/JTGwQC5EthQTFXJrWvFi+NVaJUr15tK81XpsoWFEWUUYhMCumqGYh8z6QZx16vR4gTGZTbG3pDfoMhwPyTpdOJ2tyy2LBDudbiWhss9ZxiWglX74hkBeE9YKktsStPbbtO+9d9L2K4JtoonMW56LuPFrgjUQM0FpFCZfzWO8Rk1K7ijzr0O1muHJCWeSsrm6wurXF7u4uZ+dnnE0nzN4pOT47xbzxBpcu73D9xnWGgxWU6mFMJOWDfpdOZjifdpgVBWU5oywLANI0bcZLjGSKRHlhZR3Ou+j9DCBao0SRGI2rLd45VJLRX11jMBwyGh1xsLdLVZUcHZ8wnkxYXVtldX2Nbien0+3Egg29HsNhn+PjI46PjxmI0HEwnRUUxYyqqsiynH6/z9bWFll2zt7eLqPRCS+++ALOCx957Ak63T6T6Zi6qgBPmiTR3AghSkBD1NArI/MoR2vRxPHejvMLMtca04vvQBs5ap+vKIk5jrTywDg2nbXNb2MEVhOND+/rxgRSKJMSPZI1o6M99u69w9XeICYf+ABBUCgIHvHRk64JceEJoSlEI9GREqJjBhflrEpHb6IrJRKjeoLSDhfcXFoUaD3wF+9WNEoUtavBGBKl6HX6GJ3yzr37pHiublynO1wh7xiMSlAoyrKIEesmetdKL9vIMwJKaWzTtzoxoA0WjRODFYXoJIr6pel78YRGguy8A00jqdV4H2VAyjtAYcTw5J1HOD31fOPseV588RWe/cynkO1NKlujMARSPAbro5RZmS6OApXkiFacjU65c+cWb77+EuVkhEhG3h2gTULtPYNOh0lZ4VzAuhDlzVVNWVvOq5K01yXJuoTgURgOT075td/49/zH3/1dbt68QbeT88yzz/Lzv3iFtTRbGHvv7+Ff4ocHDsE15EEB1nq+/JWv86v/17/l1Rdf4Hj3Ltd2Nnn69g3Ojg8Y7e9Riccq4XQ2ZeYDs7LGuQSjM9a3LmGShE9/+kdZXV2nKCrybhedJpHgtNF4iUVD/F9l6TVreZIk7+MkaNfl1ohtbYN3k8FFFUwzfgVu3LzOz/79f8DXfucr/Pb/+ZusPrXGY598hkfvPIMPCWflGZ/5/CfZvvk0a70nePuVXdafvM7n/t7n6HRzCO36GBU4c5kngJ8RpGB0cESWd+kMVlGSoMQ080PrMm6oW5NGkSZCp5OR54baCb6Tk3RTNgY9rt+4TdbtfdfPVhEiMZaAiG/m9Sa6p4XBoE+x0o88N3i0qgELpPPIX3um+UL/UCwUyQogDlId701JQHuHSEKQHBssuCjXDN7hXYXUYyQ0x4QPdj5pnZzRsd/eYjQMXVVRldU8tSDNcvJOh+5gOHdGNh45lFZMS8f93YJRUfHER65wbUchr82oixF5TzEcZnSNRllPNS6YVQVXh+tc2RjQEYXxGYGETq/H1kqfvhYSL9Q1UbFkBKMCidF4D6KhDA4lgfQD7aV399lfHcRoIW3OZjsU3s39l/jA8AMrRPJetC63+JL5pkDIPDGDqEF2/sEiInHwXMgwQojRh0ixpJnjW99ikwcVYp5XnGikWUyay0tj9NMYvdJ6Oxp5oTEYYwhao1wUSkhznagO8wRxeGIhDaXj9ay1WO8wxqC1wtoZBOh0uqytreN94PT0FNtMInmekRjTkNCmypJ3sdiG0lhrmc1m6CYSocSAitW/fDPRRMPUR6LaEDpr7dxQDd6jtCYxOubOqVh9riWhMRTehng8wdV4W2FVgmiFtRXUFSbNSJIM8RVFVaHQpJ0BXgKdLOX6oE9RFIxOTzgfnbG/f0ivk1NNZpzc3+fK5ctsXbrJYDVWtlMqygX7nZyVfo/aWiazaeyfssJrSLNYtSu46GE1oqjxsZCnVvOKnIkxMQ+wITaio+dvc+sSg8GQ0fExJ6MTjkdnlNZycnrO6sqQzc118ixDKc3q6jqdTof+YMDR4SHnkyk6MUwmE2bTgqoqKcuCPO/S7XbY2NjAOc/Z6SkvvPACs7Lk1u3bDFeGmMTgrJ0Xg9HKUNW2kXo2Epc5IZ2PygcWuChvvChE0pLxxeqVc5ms8zgcah7BUw3n8hdShyYa2zoelI4RJiUGCYqUQCinnBzuMrCWtNvBOkh00hRFaaqHKsEo1eQaxnbG/ErmUUHlY4EcrzTaabw32GCwrkaUwzqLC428snGQzO8PLsatKEKowXkSJXSznDzvMaksZAlJr48yoIiFAmJl1hid1LqVSl4Q3ZYYqsasNdogJsFJghVDZ7BG6QOpyajqGT4BvEd8jKq7YAlJlHgXsxm2KrG2xlUVxhi6nVVuXbtOOYXTd0442rvPvXsn9LsruMSTJR2CpNggpNh7HtcAACAASURBVEpwFkRSTNJDmQxtDAfH+3SGm1y+tMPx3l1cLZgQyLRipd9hZ2MdbQxKJ9QuYCtLUVQUteW1owMORmdYL5gkw2tD5WacjmeUNtA/KzDZGsqsLDgNWBK1vzMQaoTd0YivfOWrKC8Up1P+39/4D7z90kvMjo+4trXJxx97FG0tByfHTOqSSsG4rhlXNbPK4kOU3P7Ip3+U2488zhtvvclnPvdZ0ixhOpuxtrZBkqegGuHd3FoOD6hRFprVBM2iysQ01ZOb6fA9xz5YOTh+KIT3HAcxyiYhEBXImjuP3WElrPD6Wy/zrTe+ym/86/+Dzc0bTMl58/Vv8RO/8FH08DrPPP5TDDfeIrvUIevlhNBIvAONFDoqPry3BAnMzvd59fkv8dxz3+bTn/0JHv/EZ6IjS6Rx9i406mLSJUmFvGPIOxmTwjJYXeXOk4/xicce4+pg/YHKke369T6P9qJDGuWANI4lJRIrJ+cxiiciaK3QugIshIYSSKM6aNv7sM5f6PXWpgo+kDbqB6MUxvsoPU9yUCVKHJmOuemagLIFgpsrRz5oxOEX+87ZGusqfJPbmyQpWZ6jjIl9tkhW2wVZRfuunAUOT2esbQ/Z3pSooqos59NzkrWM1Z6mg4DXOG8IWc7W2gpbnZTUg68V00qhJGEtzRgqhXFQ1FCWnkQCXa1JVbRHg5qLOD/4TmqwOL4ekMVLXMdbG3rJzH6w+JCQtoBzNa1xOi8UIGHBUxKHb1OqIEbJGmMuyilaiUTrFVZR5jSPOizIGtqoHBdGiTSedkHNJxOldVwSmnC6Uqop4x/zx2JumaAu5uEoG2sMQWtrUh1z2pIkQYWYszOdTiiKAqU0WRZz1oqixDbRj1bXH4KjmJU452IxkzxWn6xtzWxWMp1MyJMMIwqd6iYn5yIC05Iwz0XOn4jMCV9wjizNUHkWP28KPrR3YxKNuEggbOWj9IGAV0JRl5TjEd28SydRaKPB14gPKJ2CNigDSZriipI8ydju9llfr1hb2+Bwb4+jgyNmp2eU4wmHByOG61tsbG6wvbVN3umQqJhb1836bK6tM15ZZX//gMOTU8bjMYN+n16niwA1Ho3C6KRJLoqEXxqZnszHiCI0pNVkXdY2DZ3egLPTEScnh8ymFVVlGU8mDHs9tne26HY6ceFLMrqdHkfHx5yPJ/PnNJvOKIqCoiio6z6dTpfLly81ZabPeO65b3JwuMejjz3K5vYWK6uriA9UZRGjpy7M88PaBaaNCLU+iRi5lcbD/GC+0QNv0mKeW2jyOVvS10aOm5POiZq6KC08H7s+zGVN/U7G7etXeOfFFzk9OWE9S9EmYzKeEUIg0Rpva4KzMa+hJY6impy9ZksBr0jFQIj5W/iE4Ax1MNTeAzZKfX0kb+14ja+7x7tYiAQTMDpuuyDWYYKlm2vyvEfayTF5Rt7rkyqQIGiBpNNBBMqyjo6P5tl5315HzautimpInDZ40UyqQNpbRSea8uweSdLFScD6EnGe4BUuWLzT+BAoijLKQl1sc+0DHk2eJty+cpnjp5/k5No1di5dZ6pSkDJWEfXRGWGSQFk7QlBonYIorLMcjw45v7fL1e0dnvnoRzk/m3F0dIJONCv9LqLVnLQJsZiKiEZMQrbaQ7/2JgdH52gjaJORJilpbkjTjNppbMjY3L5Otzd4wEhs8wqX+GGHcHo+5Vd+5de599Y7dLTm5P593HjMpdUhz3z0KZLg2d+9z9TWTAmclQWzEMviR7kb/PRP/RQ//wu/yKR2dNb6bF++hFKB1aBI0rTZdqLF4rYnsQ0PaRbKmKaQEvN86wV+0/5zHmlb/CxaBnrhGjx4TSUxupMqtp7YZO32GjeevsZj3/gGL7/4Cr/3pS/Q2YTty5cI6zcYm8ClR2+iuk3V3zbw0l7T2sbBVPGdF/6Cv3zui7z24lcRMfyoALZGkvyBiFT7l9A4gaW5b2M0SaLJUDz22GN88jM/xlZvQN7mqTc//xvlljbzfLstUaw7HLckCY1zTSgAyzxYKYs5gQuNfiAaJg/+LRCjdviYc4zCANoYgjYxjz5YEqNilc8gaGfjGvPX38X3BCHEKskhVNGJ5m1DYtOmyJnM7yM0zoLFew04CML5cc3u4THbt6+xMQRqOD0tOJ6W5Jd6DBLBBKCE89JSp4aN1SErWjAWphbOa0eSp2x2E3pKoRxYF5hNJwxSw2avRw4oiVFQhW/ykQ0PfW++5311McbmCgvABh+3qgrzOOxSefEDxIeCtAWi9z16eRarCsWJN+BjDgpRjiWiLhTsrQSQdvJuJ8YYMfPeN8UpGo9BbVE6ErK4X1jcL21RcjkP+rUSwkZGKY2e3i9sLeB9JGkKuUiibqIVFyeLUUMRoapqZtMC5xyDXp8sy6it5Xwy5nwyptPpxGIPRRH3DmuidN570iylOxjgBGazGYKQGIN3cV8pRNBpSprl1K5mPD6nLGcx/J8kdPIO3U4nFkApymYPOEVZVvjGgyiAa7ZGsNZS1zXW1lRlibMVIbhY4rkuOHz7dcLBIVtXrrGyMqTbycmSHBs0SsUKXcF78qxL8FGuZUwHo3M2NnY4Oz7g+GCPWVUyO7jP0ckB+7sD9jc3uXTpClvbO2g6VNbibU1uEq5s77C2vsnB0RHT8ZjR7ITEGLI0i5EfnWC9ixGiVBP1B/7iGQsoSSKZV5osyTBph053wOrqKocHB0wmE6aTKZPxjOmsYGN9jdXVIb1enyzL6XS6jCcT9vf243NXmqqqKIuKyfgcW9d0uz0219dZWVlh//CQ3ft3OT074er16zz2+ONsbW3RHwwYj8fUzqJFkTTFMB70UDP/Mzou9EW0OPh5CfvFypGLZE4p0xQAuSCG0SCP7o+5YiQElGm3i2gK9aCItQQdt69d5uyjT/GXX/k2+3v79IdrsUw0QnCu2RfPzh0pF3vheUx0WCJBMI0FFCufaoQMS0LlY8QsShgvomvtu7Yo+VQiaLGIs6jKY3xJpgNaGZAEKxZJE1wlUQJjLRL0fCuHoohSyTy/yBm5WLBipE10NDSCStg7PuNr336RH/+Jz1PPTqlsgVMeTR3b2P7nLiRZdVVzPp5QVTWdjsajyDLNxqDDIzevMUVx6eoNXpkc4nAEX1KUU7RRaFyzNYmh2+mjVUKWp2xurPHam9/i5pUdbt+8Qe0Ck7JiNDomSxO2t7dJs5yqtkzGMyaTgrpyoIRPPvUIlzfW+Oo3XqCwwuUbN+kMe0gilJXj/KxG6Q6ra1uk6YdiWVjiewwNlKcV48NzJicjahWoyjGp8Tz98SdQibB3/4DTcsZJWXJelRTBxYqGQdHNcz77qc/wT/7xP2br6hWS4ZBrj97EJLHaqFZtwa4mp1MuMs4kXNCr9yAw3z/SmLjX58OElO0eifMftWeMbK49aCGE4xd+7Ill38Fkhp0b19ncucInP3vGcOcKf/yH/wF/MOPlt1/gylMr3Lh8nSyN+ygaFs4ZArau2Nt9ky9/6Qt887mvs3v/TUJdYJKcaSmIdMBH4tK2SR6wBQLtbqNJqtFGMF64fuMGg94qdXDkoltq912hrWMdyaCKhZ+MijaVinOZSjN0kuKdxboJMdLWnsFdRNkW+vc9TyG0z7PpE+8wBIwYlBhSrcjzFJ0aHBYlsSiKq6PjVz9QoaXJLWtVPR8AB4g5hwpRSSz0Jh6RmPsf2rEjQqxO/iBBDQS8KKhheqSYWcXtyymrWnATzxuHM45KYS3p0BdFGiBYz7lzqJUul7dTekJMAwgwU55uP2MliZE6EahtQFGz2euwrjVdKgwjcFOMpCBDkOz7Et16WNEbL2DnDv+AJpLTJV/7weHDsTo3OWBxHKjGIIU56ZG2VHlMrIcLQnXhD2i9bu301UYmFi/TGH9NpaP22q08N0SWdxGtaHXNDfmTIEgbJQjtshKJnxc/z79p97KwrkbZ2DLVFBgoZpGMJUlCr9dDa01RFJRl2cgic3wITKZTCJ40Tel0OsxmM8bnY3xDzKy16CDYOhKqsqhJshQdAtZ6Sls2UrDYX5PJhGJWxIiFtfPonfOesiw5n81IsoQ8y6jrirIo8M7irMU1pM2WxbzinSSK3dfv8ubuASvrm1y7cYOrV65y6fJ1usMMX4MRyBrSELQ0xRkCtUrA12xducrqxhrj0xEnR7ucjY6pZhNsXTAenXLvnXe4ceM2OztXUCYluChbSxAube9QDAaMjk6YTaYUtiDrdBrPaCurCyQiTV5BQ+ZF4ubowTbFWgJKolFslJClOaenI0ajY6qq5PxswmQ8YTbbYHsrGrRpmrGiNVmasr6+xu7uHgcHB+RZJMTT6ZTR6IR+r0+n22Fna53JbMbJ2Rmvv/4qZ+Nzbt+6zUceeYTaOcqqitX/YCG/SuaE5aJapp7LE9p911qp63zTcHggKhILgGh8E6NuScV8q4wQ940LIW4OH8TjrMIyz4VGgqebJDx25w5prfj2y69wcnrGWn+Dbt4hVHER987i6hrn7PwaEB0rSiJpk2aD9rivohB8Qu01tQsYomQ05lS6B3L0vPMLhDN6cMWXaJeQKE9qBBUU2iSgY56g1DFX0HmLxLo0iAjT6ZR+v09ZRsdFC60VWsVtEpQGlZhYJAXNG+/c59rdfR69cpnR8S6CQ4UqHifS7C0YJafOBWazgqPDI958801uP3IDrw258nQ6hsuXtzgNiiCevYP7HI3vUk/O2Nj0WFehkyYXsMnxydKUbp5x5/YNXnjpZfCO/b37jKYFVoSNzTVcXbF7uMudO4+wurlKlnYQDKPTc46ODnHVhJvbq2TPPkPhE3Zu3KSz0mNczri3e0A5O0CCoSptO5HOISILZuZ7zWYWDn+YB325vn+wWHwi8tAvmlWyhHsv3yWxiq5OydJAttLj+pUdVrdWuf/m25wXY87KGaOqZGIt3giVdQzyHp969kf4hf/qH7G2MsTbml6/y/rVraZacqQlrlF7hL+BiirmfntMkrC2tk7a6TwYUFtw4Lbkp13r21lqTglbbjW/eR+9KVrinzIjVApRayhj6G+t8/f+4c+xe/w2L796l+cPZ7y1d07vJz/PtVuP00bwQsNW6umEL33hD/nTL/wRz7/wDYwRzs7HlGXg8pUN0s4mmD5t4bQwJ2xtCf6FaKAIRiuM0mgDZ+MzCl8xUDl1WUFwmCydy93bvnpYhKPlHvHEClGaQNyjM/4XcE2BKCHgXUUkag/273f5xLiIUMW5PTGRFBknJInGpAZtFEbHed/T7C0qSVyjH+yKvxXCu/6cd0MbIVUS+0Sa6rjNPnPNgkK7xZQ09kF7soDEWGQF4xPIOn02hgk5MDmreHVvwsTnXEp75GiUJ6p0bAm9nGG/KRbnY/S4VJa0o8kEDI4QDCaBO49cZqXb5fIwJauO8Wdv4KoRyfAqknUgiev0/O1ugxThwX9/V29c2+nzQ98dSb3Y9zWEwKwsObUlAVjp9RGJEVTdrO1/k0v/cOD9BuXfxIHS/uLdqoB3n+Nhbpn3iL3fgw8HaWsQ2sktxMRlUY1xSXyxovQwlkBt9yQLIk3RkQC+7YQLiZiIv3gxG6MwhJioHw9oZBVNnltMEm433AZBNbp4wbvQVF1sCpM0EUEXHDKPXEQj0zlHrqNe3NpoCFXOU8wKQoAkSdFJgvW+KbZRkiUpiUmwtcU7T5IkkXgkhk6nh/dgqxrvPLYomZUVhY6eyX5/SK/Xo/aeWTGjdpZev0eWx724RqMRs9mMqq7xzlHZGjdxFNMZ1jmyXjfmOtUWX1h0gDzvYIOjKGfUrm68VnE1dt6hEoXHs7u/y8nxCa+/9BobWztcu3mHS9dusrGyAlowiSHJckQprLfxeTQy08Qk9JOU4doK5fiMo8Mjjo6OOT45pdftMxqdsrnxDjs7l9jZ2SHvdDE6xSMMul3Wh6tMphP29w+YlQXBKdJOHvMBvUPpWE1SNwtoQHC1a7y6Cd4FvHUEEUzWQbRhI81YW9/g9PSE48MDprMZe/uHnJ6eMxj02NraIOvk5N0uaZaTphnDfp+D/QOKIpZxLqsqVgitSrIsZWUwIO/kHI9O2L/3DuPRCWcnR9y8eYvBYBj3z3GxYIf3YR6lbCPFWgtKfJMbHQjB0hYmcd4hVqG0arajaDfQDfOoGd4RfENG0M3my/FdESD4aGSo1pPqo0wwhEBwHuNjJbInHvsIq1ubfOu5F3jnzXv0syG9YR/ny0gArQcnMQrXSiSJ73LQNRaLkIDT8V0LU7SvmLlAkAQLWOew3s/fe2/B2QChzVV0UTjkNUF5El3QTS1GKfAJvo5E0RMo7Qxra8TFSpqn4zNef+M1rly6xMb6erTlEFzwZCpDh4Ailr9XGpTydIYbrF36CH/x3AtsDn+E3somk9P7cb5p2hRckycbojIgzRLcrOSVl1+hO0zZ2NmmbPZfHGz0kKAopiX33nqVL3zpD9gKwqUrT1M7yJOMLHOE8wpchUhJbVKuX7vN9uUb0BngR2dMzkeUCGenZ2ijAMtbu/dJOkO2eh1uXLnK5Rs32L78OJOjIw4OTlDJAJUP2bh6jdWtTd7eu8f+0TFCRVWe8OYbL3I8ukKWJ4BCSdyjMUnSC0fXPIIhCzbfxfKzqKhqoyMPUrslvmcIF3/E97nZfypEQiMhEprgPS/8xUt8+Yt/Rr+TM+tkXLm8wdbGgPW1Pod7+4REUSo4txWTsmJaV1ijUabDnSee4vN//yfJBh3EKLJughigWZ9jgKvdHzQ2KEZ92vapOeWCNorUFOdQGpV1UHlKVwzBaFxzrodDaN0Iiyb7g/ZoACy2jtJjTUYxmXG49xreGa7dGqKSaKxnq33S7R1evf9t7u1N+dTVLtXJKeamR0ssJtZUUuJsdMx//K3f4o03X6aqC85txWg8AUm5cfMRdi5fI4RmuxIig70o0hAu8pS8R1wV89TR+HrK3Tff4tEnnmRvdJeD1+/xyCOPs769tRDkawnbu8y6cLGlS1DgtUFEk2iFwYCYeQVEhTTKB00kL+25FitxLjy4dw21NsD2IEkUtBFEaWprcKJQ4tDao4whoQ8kJLqP6G587i1R/B6FbFpKP+efi+2V+Tcg0fGOxK14VNNzF3NWoCYWHdaAD0IVYFKWDDqGoRZMDbu757x5MGHmA52OIc1i381czdRbcmPIdbRNxYOfWvJqxtpqSj+xaEkRFVhbDXz22VUy7VGz+0y//kUmr3wD3xU2P/VZkmtDhBxCJ7ZOLtb1xbuMUcMwv8f41cPM//Cu79TFx40z2Nqaophx7/47vHX3LndHJwzX1/jEM59ge22T0BA2s3j6uX74h3WODw/5e9tXf11qQLiY54LM341AdHi3zs25ArD5lW5shYWNzN7TkofhQ0HafAiU1pIo3VS6a27NRVIWJFa98t6Tqihf8j4gSsWy/42EkRBzmOJ+SR6h2YS2mTwRFYt1hGgMhmb2EcBo3WxkGEuyexdQSjchaktbyjw4BU5FYzJALR6v4rWCrXGupq4tM6VJOynaJBilCd5TTAusdeTdLr1+H2lIG9qglY6kTZsofRSF9+Csw7kSbRICGu0hEaGoa+piBkmCzjK0jpGO4D1YSydLSZOk2QssNIYpFFVFVZTMxhMS0XTSjMFwgDdRsxxmJaqs475iKlb2c0lcSAOgRZOYmAvkCdTeEyShrAK+HjM9G3O8e5f9t1/myrUbbGxdYm1tnU6vi0kSRBmMEXzQUbZhorwQm9FN++jeGt31c85OjhmdHHO6e5f9w/vsH7zD/v4229s7bG5dJ+/+f+y995MlWXbf97km7TPlXbvp7tnx67ALgICAJUUsAMFGKEL6EwWJgoBQUASChEhYAgsQJLGLHdvT3pd/9fzLzGv0w735qnrczmKXoQG4N6amqsvky5d5895zzvd7vt8eKEnVzMnKjGs3r7KoawbDIaPxGGNrkkSDysK1tBaBINUJVliMD6IlUipEqrA+qGYqnRJ/wOraBmXZYTA44fT4mNl0zqKqWNQN61sbrK6skGSaVdWnSBMyrTg+PmFR1aG3S8nYHzcnNYZ+v8ulnS3G4wmj0Yh7t28xPhtw8+bLXL58lazI0UmCMQ3ONnhiI3n0b8MbWu0uT4NUQagmSOAH8ZAQBUWbB++QviHxAA4pfNykgo2GtRYhoz+g9AgX5rjxoX9RCA9CoqSiqwtE11HXNTc6JRv9Pt8rv8/9e4+YVQbLBrPphKZeBaswDRgLVihqmWKlw6o5tWqQZHibkiSepj5jNbU8sYo67eCUwvnQVxmooBpQWBuFI6PwjPQpUhfYZICfn1EmFXkqSVVJITo4VWE7ioWdRQSsVXv0vHfrHc4Gh/zsN76JBPKySy0AnSAFaG/IlEcrR+IW1C5F9V6iEI/4zn/9gH/+C29RZhJXBWXKxjVoKck8dLOEpAg0nKS2uMbywYfv82UN62vhdYSEUoExFWvKkc8co0YwnzYo2QkIpJiihKNIHElaMU+6rKqcrLfLNCnJ5HMy6ZkvDNXMM28MQoyp8g5mnpIsKjreUJYpIrtCZ32HYmWPO0+eYXVOJRzpSg95loH0aF1TLZ7xJ3/8u9x99p9YXVsjSzoUeY9Le1f5H37+F+j3epGKHotaSJyQqI9sai1dVOBR/mJq9+ML0n4yPmFz9x4vgk+XxyOcwrsE7yT37z7lX/3u7/H9D9+m3y1ZXe/xla++SbdIefTgHouq4eRsyGg+gyQBr9BSszCelbVdrn7pTUSv4MxOuX7pdda3N5jWnqaRZBkgWpvmj55SVG4WUXDWA0TmACxpk6Td8IjGY+iPzRPBcg5d+E5Y50xA9X3ouTPGMBkdobRlPhmTl326/Q7S99jceRmts1Bw1AbpXDDylppRI5D5Nj/zC7/G9Utd8iT4wflYPBbOY5oG39RIA6nImdceJVLSUuPcnKaegTMgdLAsajOCFplAxOTWoe0YJT2NSxC2ws0XPL/9Pvv33qWZplzau4IQOy+820+6+y3C6IQDDU6naCVJAWE8zisWTYWUUKQpzjZgCyCBJQE0XRar22v9SWTWkASJc6KREIE+pywq01j6qKxEC4ukxosU4Xtoq+mmm+h8C4QONgniBwXDP8wIFgvSx5CP80QscCdCHJgioJmDaHBJgqQLLljA4AMdw+KZI8hRWARzCZUY00PTNaGo+uRoQoNGScNqT5MlCoNj5B2V82zqlAwwyiMT0IuKV3sJ23spZbnAoVHeoJshnelj5oN7LN57m+SD26xPKuqtHuLmNlzbIiTXezhktH6yKPT5rcKBkDH8J6KEPqKgAgg2WIGZFtdgEeHwyCZrGsNweMbp6QmPHz7g6OiA2XxMYywLJN7BgwePgw5Ap4sgeg4u88R/CpCbv/DBhc/t2vNpf3Pxc/zKtzOy7ZmQ4BUWYlOFIMcjnQne0g6E1MHu6wfskV+IpG0+n3Pv9h3WVtfod7rkaR7U26QMSJqI5tBRPEGo6NWkNE1jox9ZCMAD7m7PudIQr6J4sQQce8xaMYYINxBk/cNjjg+BbzCDDPQpIc/l00MFJ/yetQ2uafA29L5572maiizT1DFRwnvyMqfslCRJio1cZ6k0KklpoqSycYLZdIKQkrIo8B4W80WgU+SaytYYHJ1+F6kU80WFG53R7a0gpAw/b0CZZOnXVtU1TZQ5dxFlUkJinWW+mJMkBZJAz3LOkCUZXilsHXqCTGNxNgRhQTwrSPhaa7AIJBoTzZzrasp4PODho4esrW9z5eo1tnd22NjepNdfJclStI73F9BphpMK6T1pktMpe2xubDGbjNh/9oyTkyNOz8aMx1OOTwas7p+ws3uF3b0dsqIk9CooiqIgy3M6nQ7jyZj5fM64mZDoJPQ1+tZTLFQcW1PulkfhncMSRCiUTBCJJs1Ssjyl1+syPDvj7GzA9OCI8XzOZG3C5toK3bIgSRJ2d3fp91c4Oj5BDQZM5nO0tujG0DQ1o9GQLEvpdTuURc5gcMbzZ8+YTebM5zV7Vy6zvrGG0jokbl6QKB1Pz0VRlfAhlaIxBlBoJdvCepjukcLrcQgnsC6I/LR9ne1CvRQleWEEERGiOiVCgBRBWr8sSZKEpmnYWF3l53/uZ9lYW+fv/u4dHjy4T5Zn5J2ctbVeVHkMC5mL9BDnLS4iAV60dNWAnBsKHHnowUNEn8BQnGmtHAQhIFOEar61Hq/AexMSssi5UdG/yRJ6TYk00JZKapqGv/nr77C7vsGNG68AkqTbpVXcbIs5LfHKesFgNGd7dZVH9+5y7/5DXtkJbADrPEImwaKkWiClYHt7k/1HY8bDM+aLTaoR3L97j/SVV+l0c5QOQVu3LLl59So3r73EvTuPefL4MXVd0VHB+9B7g/SWRHlQmiTNyJKURkqUgE6n5Nor1ymzNUbzmsHJfd5+ts/DJ/tsb/e4vNIjTTNG4ymn1ZS1tU3WtzYYVg3zesbjJw+p6wU6kVgbhAlGpwPe/ZtjQJCokl53jRs3XuWnX/06Iu0i9HkAioxqevjzNVYQFVBbtTG7nFcf9YD6yfjRx3JJiHWJ0PetEMZjZRD2OH5ywO/+X7/PX/2Xv2YwOeHN118l62QsmorT0yNOzkYMBkPGswWV9dSNRWUpdt5Qdrq8+dbX+ObP/DzXb2wim5qiu4LIUlLhqC4y7D7HebZL0AsJiI979zIv8ed/8bEDi4/96yL6ZBrL44cPefTgFm99+RU6/T5Z0UcoRVoq0AFhCihkmJvWOYRQPH+yT775Bt31dfJOAiLEAReTl7ZHKtFBpEhri/Kestvl+HTIaDxh99K5FcvF998mQuHJcCjvQq+v0CQ6IVEZTV2Hgp3IQqxxHld/jhF7t1shsljut5FVlKUZvW6PuZiiVbRzac2lRRuYnqPonzjEhbsTMwQvIU0UmZOIOgSVWjqEN3ghKHurXLn8JV796uusb18Cof6bxPYffljX+AAAIABJREFUD61bGa5I83fBnobBPubkLou1Dp2db4DotikfGEmqLFo0KBv8YOsFuBoqIajxHJzUyLzL1csFTWXYUn26NjARKhOUwXtZQ5I4vG5ohKS87Hjz+jWSdc9CDUn8HHV2jH/vXfyd77E4+BDRjNCzEIvZhUEMniEWT/CFBZHj6UdkxgD6woP30STbf+y5kEqE/TgCHNYqrDOMJ2POhkccHDzh4cP7jMYT6oUhTUqKrIfMJEIYvPVMp9Ogf9BpX+Wf2vikQtHnGRcSNheKyuEptyDcEoBCKJTQCBHSLtt4BKHtZzGrOTo95cPb9zg8PPrMV/tCJG3WNDx++Ijj5wd0yw7dbpciLynKkrIs0VkWEziPU+eJq1IKqQTOxAdTyhh8E6sQ8lMh4rCetX105/5VIuj2gwgSsc4FtSMZ/ay0BiVD0CkhBLdtw68gCiWE6rJpKiYTGwNAuexh88JTR/EGJyFRmrzsMRmPmQ5HKCGpG4MxBu8IiJH3FEUBUlBbG5CxVKN0gnCGSb1ANTlpntPgGI3OmM6noboiJV4EpSoI8HeWJXSKgmZRM5qMKLQl1RrnDbWpMFOHdhnWBNESUzV4E/uKcFhTUdcLmmaB9QZJMA9XeJSWNN5RTyfM5xVHR4d0Oh2uXL3KlWtX2b10mW6vT1rkIEAJHaT5CT16HkmS5PRXNJ1On529y5weH3N8fMT+8Rmj6YLD40P2D7a5fPkKO7uX6K+sIlQIotf6K/R6XSbTKaPxmMV8zmJRo6RGpGopGCNEq7AFoHBe4p3hvMcyJA1eSrr9PmW3Q94pOT46YTpdsJjuc3R4xO7WBrvbm2RJQl4WrG2s0e11OR4MwusvKmZzS1U1ceGr6HZ77Ozs0usuGA4nvPP2uxyfnnL5ymWuXbtKlmbM6xmLpiLVSVD8EkEcpK0baq0iHC9AOKTUMT+JMiJCxmTnggrZ0mQ9Piuw9BvEh41etJVx5+Kmp17w+AvegJ5Op+SrX/0aWmd897vf571336aqp7zx1uvkhUYpAvrdbipChHhfOYTyCC0QTiOSDsgaTxKSZXFhAxI+ChOEZ7JtJrcuJICNsCyqOoRWUgazexnM4b3xYCGROvhEOUeqNVcuXeb2e+/x13/1HVZ766xvaUgakiTDqrDFKylRnKvJPn72jH4lWN3cZDqrqBaaLJGkZcl0tghrQwZJbdBKILxhMh5wePicnc4Ox4cDeuU+V69dJk0VDY5EJ7x880vMf77BVN/hcP85p8cHrG5cQ8W+Wy09mQ5ritaSVAeRB6lCYjs8G2I6GisS0jxnc3ObuwdPGI1nLBaWIl+hzjMePrzNaDYn6fR4enyCT3J60YuxWsyZjMZ4UwehlsaiVUqSJui8JOt00WUGWuBkDIGEDNnZkrsVbxcXEjki26Gdb/+Yi7BfwLEsbrf5jRcIr0F4HA7hJQf7h/xv/+fv8P/+6b9nthizd2mHb33rF/nzP/1jbt+9S6pC3+NoPGM8ngOSNOvRNGO8lmzt7fDaG2/wpS+9yo2XtpFNQ94JiGuaCnSLmH3aOcbPEk9r3xNi/fYnF/ps27fj4t6sPm+SHzsuBehUsb23S7ebsbG3EX07ZbhGMpxRS1iSXod/OUev7PHazVc59CVWeyyfXGLwPlRzlNZImaCSoJjsRIet3eusbe6EBMhHImQMqts1MJAe4g1z4Tp45zHWoZKURWWoY+IcWjfaG/xiEvgJl2CZ47bKkW1Ar5QOSokqIU1zrDAhpggH/cxDhiN8fIgLX0il0EqQCE+uBYWWZEqwstJj86e+xhs3Xmbvyi69jT5euU842o8+lonZR07YE6ad8iFp82cjZv/5PzN78JfIr9xErN7E5znBkDxFOBHUMFUNLg97pnHMpWOA43unjtt3D0lMQeNynJGsdnISIXA1jEcNWZ6xupbjhMCiUNKhVw1r/gxbzfHmAHN0n+a928jvv4c8PWRVz0HVeFeDaBBujqiPoHmCy+cgu4QJo+NcimZuLzx97TyJPAfvggeviPMwUvVm8ylP95/z8OFjnjx9xtnZGVmW0e10EKJAp56sKEizFIfHWYF1nrppgHN7+39ay7n4lK8/6d8Xh1/+2EPQECBKvPnoYUtEOkQogjsPjXHUkzEnz5/x/PkBH354h/dv3eHWnXtMxtPPPNMvRNKGB4xhXhuq6YzTkxO0TsjSjCzP6XZ69FdX6K/0yHolWuvQXGvtEva+uPgEHn+LrH0c8lzyTeMFdzbAx0Gzw2GtAWuAYPItCZVjb4n4TFAnDIlh9LuKLxHWS4XSCmMbvHBkaYaSwRDY4XGNQEYYVChJ42wwFE9SdAymEyHRiQuiGQRvFSFCVVBGFKQ2BkfYQDKpqI2BpkFoRZqnmLoO/VFRClwrjfDB/Ni5oA4ZyuKwmM+wqUZaH5Qnx3P8NCR7SilSpamExFsT+tqw4cPZKFkP3gcDVetDcC6lAG+p5lMW8ymz6ZgnTx6xvbPD7t4lrlx7iZXVFcqiRPkUoYOxqncqqG9KTZJK8rxHt7fO1t41BqcnnB495fjokNHojMHZKc+fP2dza5udnUtsbu/EJEaxurpK0e3Q1A1HB4cMT4c4YymKgHQGJElGTzAXXlOopQm1jabQOs1jtaRhbWOLXn+dk+NTjg72mU4qnplDRsMhW5sb9HtdiqIIlgV5TtntMBwOUaNAQ6zrYO0wm80o8g6dTo8sKzkdDHn27Dmng1POBmdcvnyZbtkhSxMEkqY2WAFpkqB18B7TSoXZGPve2nkul8IlIGMC57wPi/bF4FqeS/u2CLJwvCAAEvzNotJq/J7WGpQI52Qtr7/xGp1elw8//BDvGo6P9tna2VjSM9ptpU3apASnHD6YuyFUgRMN3kq8k0tT2PaRDo3zLX1VRbpvqPB6IWmsR6qEJE2RKjTCYwR1E3AHZ4lJqSNVCZsbG1y9dJn79+/xnb/8K/75v/w2KzrD5WFxlUKRqkBVxhN89LRmOB2zs3WNsiuQskZqRX99C6umzBfjWPTR4CypFqyulMxmY06OFDurlzk6OKVTdNja3kCpcF4ba5t8+a23qCvDs6cHZElI+IRUKAWZcnR08L9DQK4FMtWYLCPNUhZ1hfMTKisYDU+oREKnu4r2c+oGRuOKlbVNnjw/YP7gMbvXXuL+4yeknRW+VJQURUGv06Xb7TE6PUHKhCwrQWhmxtJXms76OqqT4bRodXCX9yf0C7Uz6pOX9mWg/pPxYx6hHcBHpEQ4EQSBlAAlGByc8vu/9/v8yXf+gtlixGq/w2/8T7+CIBQv9vf3wUK1qPBWIETGm6+9Rd00vH/7PZoK+qtrrK2skyYFvW6XhDZZDEqMcrmvfoaj1DKu8fhIDoIgRuFcsPFQOgnn4T1ZXsR97/OO2JkkQj5UlB063TK2O/glstO23Sx7zFxr7yNRaG5ee5n5cIpMgnhREjb+F9+KCwGxSjRCJegkQXmNUD0uX3uF/tomxrkA6NH2c8oLbJ+YDPqghK0QKCHJspw0zZnOKxon0VmBuiCS9APHRy5+a/djI9XfC0nVNDSmQedqKVAWznIZEH3i+OiPXvg1KdBpQpYnqIUhlZ5UeNa6Xa594xJb/Q2KJIvgvCM0QsqPHuVHGoLze3sxfXHxZgtBVGMR2JMJ5u5j8gcPkD2FePMYka+FeK/xoKOYGR4vPcIJjIOh89w5HvD33znF14avX3+V2eKMLG9IOhU+tahGsret+KUr18kvl+R4lBGIyQnc/y7V6TPM9Aw33sdPnpCfDZGLCT5b0PgZWgmUq8DOEN0eYm2OF09xLJBcQtAJdE40F+PZ0CmpYhHNhtahdl8HGtMwHJ5xcnzM8fExB4fPeX74lMZZpEjZ2tkjz7pMJnOSLCcVofe9cnVQLhfJUshGKx2us3/xDn5Wgv+FHJ8KE37KPvaxb/tP+DrMGxNBIyUkWoY1w1vHZDph/+iYx0+f8fjJU+6+/11uv/f3nJ4OGIzG1I3FxALOZ40vTNImWml+Ed6krWsq62iqmmo2p1oEw1qltuiv5CgpMM7SelmF6l1s0BQXKlu+Terc+YtxnrBBkDX1Ini7eecwTY1rgry9EATqU2v2iw39SpE5JlzbAC1w1sZEUuBNjXcWa5ulnLHSCVqniEUVKJE6jdVEEYRRfDAA9sYuF8o2gDYmyqpjcCI8mHiBS7PoLxdgb+MrrHdR0UljbRRsaQxNY2JiGRqQZ7Np+LcKNMm6qs83fS8DPc36oCpobHhohUCnaViok4Q0UYG5WweqnkdgYv+Y9hJr64D6aEVdzVksZpydnvL08SMePXzApUuXuXz5Misra3R7K+RZQaIThAzKVx6Bl56k6IIuyMsu29t9To/2OT4+5tmTJxwfHjMcjjDGUpQlRacb6TYClYRg/kp6hV63y9HzA87OBuR5TlF0SLIMKUJfZW1MQDikjGbmEhl9w6QIoil1XYMxbG7tsbGxxcnRPk+fPuRseMZkFrzjdnd2orhIxkayTqfTod/vMxyeMZlMmM/nWOtYLCrq2tEpe2xtblLMp5wNh3x460P2nz3n+vXrXNrbo8hztFTBNwxJY0FIRWVcpO1G83RrISLCLyj9SR90uPHLhN+2XUfL5vYQRITYR7BErkWYgyr21TVNE5MvQV5kCNHQNJbr169y+fIuzlvqeoGxFYmS1Jw/OzFvChVADNaD8R7rNdZmWOupK0MaBQLCoxtRTyWC2IYI5+WEoPYWocLXoqVXCY9SYeFz1uIjpTwgU4HunCYJN2/cpJnOuXPrQ/r9Nf7Zt/4FeadLliZIqUlVQioV0gdKZm0t2WpJVnZQ2mBcg6sNjVf0NneZH1iqeoYFsjQB2/DyjWt0d0s+uH+XfrJOIjRPHz9nfW2NIisCOicl/W6PN159le3NLbqdDLzFG0fTLPCmIsGRqAQvBZmCTpYwLTIYB7EgQYK3NiStToNokCKl11tnURnM6ZjT4YwHjx+jO2sMxwvWsxVm0wrvJErl7O5eZXNtG6JPY2Mcz58fkTjBatkjFTpSuVjOLE9AZNsekvb7Lb7mUXghL2r7/WT8OIeA5dV2mrZOKZzn5GjE7/0/f8Af/oc/4uRon9WNPr/5W79BKuD+7dsI48lUxngywdWh0NlJC1KZkZclWdFB1mN2L11ib2+PPM1RsecsgKcCRKBAi/PqyicGQ+f3PcwO8Ji65vTkjMFgQLfTY3NjO1DwnSNJsiBE9rnythd7r2SMZ4MioPoIXOZxy5VPEGg7gsl4wnS6YKPXo+sFqLBOftQTLrxcWD91ohFJiqoqhJMU5Spp3ouBgccJ24rKxxFLG54YAYooZNR+BAXG2tY4Ea2NZOvHeaGA9UlCJC+8w/b32r8XiGgfYKMCoZSx+/CHlG9sk6LlX4XgCJ0kJGnoUyuShBQotOTS7g7K6+CtKQRIFTthz4/34xqflEC0gikSEYrsViBqTzITJMOUZt/AaIjondEcDzFTSPeuIfsFjoyFgtSGwqd0K0wGDYPZMV995RKFmbOePuXltyxFfo/GOFTSZe+yxWNwTYY6rGExg4MP8Lf+EjncJ6stLBZoMQFX42hYJDVGW7y14GtEV6Ff6yPe6DLvTZEkKBaheEnojTqf2CEmvZjCITzGBurjYDDg0aNHPHpwn7Ozs6AanqYkeYdEaYoip7uSU9UzKjdCCIVzClcLPAqlSoytsMJhjYkAhvgEddf/HsdHEzePijZJCIetFwwGx5wc7vP86VNu37nD2+++x4NHjzk8OmS+mGBshXOe2liSNKOxQYPis8YXImkTAhKplkGiFC2FTQUjRJ2RSAXO09Q2BKdRBlcrcY4e+DiZvFwuLi2qFib5eaDaBqftMuQJSSA2JC+L6RgtPGmiwTkaa0NPjjUs7Qm8wHkROOPRRLexoZ+ushahw5HrKiaXKggrKKVjpUvGvq7gB9ImC01dh4XcBqRCSdmK1IYFwTWEgp9C6xqlE2Sk/CklcN5inWNZ4PN+2V8kRehjci4kVq0tgHcG42zwaxOhAumiapaMSaf1HgMURReRpWidUBR5UM+0LqI6wUPPWIejJiEmQHVAD4Kao2E8HDIejdl/+pSH9zbZu3yFl67dZHNzi6LTIcvz4C2jNIH6JkBYvFckusPVl15ifWOT/WcHDAZnIUB3nrqpSUzDeDZEpwlJnqGSBCUEW+vrdLOc4dmQo+NjDg/36fR6lN0OSkkSqYK5uFKhZ8FZnHVoEeh5xnmQiiRTeBPmzPbOLkUn5+joOePRkMHgGZPJjI31DTY3N8mLnDT1rK2tUZYFg8GAcaRMNrVlsWiYziZkaUanLCmLguFoxPDsjNu3bnF2esq1a9fY2tpCer28l4kMj67zHhF9kUQ0o/dxj8S3ML1vO8nQSgUE0UbELD4LUrZ+hCyTsranzTgb545Aa720FBCCoHqJo6kNeZ5QFD3m8ymjkQnJfqQa413sPw2JnxMOmcpYYFEkSRlkqL0MCajzMSZogycfk71gUB/QUEtj7FIUyPpA27WuxkX0uq7Cuc/nM1DhmGvrqzx9AFevXOX2B3f53nf/no2dy3x9ZQ1XRjRaROqXszSmZmFqnCpA65gYJszNlPfu3GH7yssUq5uMZyOUSpHeoYRjpdflZ3/uZ2i04PndQ7wN5/7k8ROuXbtCp1viPRRFzt6lHXorPfJUh95YocM9szVFpMGiFKkSZMKySDQOyLMMfIqyTVBATXMQFUla0F9dZ+/SVR6PRjRWkWZ9rFMImSFlRpZ20Tqj29Vk1wq0kJRlRlNXHBwfczqaMGtqptUcg6MJdaKoghXDbx8+JyKusAIa70JC7hxZrOiny4U4Fs1+IkjyYxlLpknsyWmMZz5e8Id/+O/4gz/8Aw6PDtjb2uCXf/3blFnKrbffY3A6ZDGaYKYLlCUkTevbXL50BSUVT548pW48K6sbfPuXvs3OxgZFliwVCgMwE6r6IQHRy8LM0pjXh+Lr0n7Et4VTh7OG4eCUuqrZ2tyiu7JKmuZkbTIj5A81P4S4EAPEavfyz5cwkUMKG1EJG3psbQESRmcjRsMxt56fMO0nYCp8JpYqkOdIYXitvMhJkgSZZzCdUnRK9i5d5pVXXwlncDGz+aRsIn7tXSjEemfxLqgBd/o9qkmK8AlJkoWCkzx/7U+fCOcn2RiL1glCSJI0wwFpljOynnndUBSB0nghOPqMi/viebfBerisgZlAojBAVpSI0ZRESPplJyiJSpZUTb884EdP/ce/JnzsbQWJcWxjGNQJplpHztbZbipSO2B+sM9sVLGxUSD8Dk4U1AhS4egngi/v9Dh6NOL1zV2+dbPEDt5B7x2gM4HdL1HlDURnBcwpp7f+jubkkLXTBoZHJM1D5OIhWs5AdvFFjqkbtHXIxpKroKptUlis5qTXV9FvbDDtNFTC0GMNgcb5IBjjvAzotBR41+BFhRcJdWOYTEYcHOzz6NEjDg+PmM3mAb3OMja3tiN7yGOco9dfYWNzjcHZgKPjI4QIDCchQiHU2rCvSpXgXY2OcetFRHM5xCdP+S/qWKrNn1dDwvcvFCCXQ4Se/DDdA736ot6jjc+u857RpOHZ82OePn7Mk0cPeef73+Xh/Xucnp4wn8+ZLhbUTRAMTFFIUYR103uaWsS4/rOv4BckaRPkeXZuTC0lUrZJW0Kig1dRmiRhDYl+UkFwJMrH+payE0rBrVxwq6Lj25qL8LE/J1QkfKQ4CqHBm9AP5AXeOhpnlokfkcroYuXfeY8XEufjh4WmcQHQUCHFaupABfE60s0aA94gfFCrTFSCs47aNZCFAHrZl2d9TNZEFNGI66cN/i0ChRAaIz1CmCBfj4vvBYR0GG9DZSsiFm3QtHS388FE2BpL4hUi0TTCs7ALIFT+hHVBVdAHTrNRnsp4yk6OQyFlQllkyExiG0dTNzTGhOq6B+MMwoXXk8ovVcGEUHhrmYwm1FXN0dEJ9+7cY3Nrm+s3b3L5ylU63S5ZHtAwH9U8QSJUhkDS6WkuX80oyx5aabIsRQrJoqq4d/8eJ4MTXrp2lct7l+iUJUpIOkVOlmrKTs7JyYDBeEQzrMjynKIoSJMkICu1JU1TkiTFWRcFV2xAo7xHJhKvGqyDstNlS+7R768wGY2YTcY8e3bAeDRjfWOdbq9Dnqd0Ol3yPGdtbY3j42AhIKXCGMd8McUvPFmes7bSp1PkDE5PefToIaenp1y5coWXrr3E1tYWaZIGNFO0dNxgwbCMU5Z9a4RnQWu0kEvvM+dd8CRTMiZVwV8wFDTaHjdi3184qo3qU+3XSiuUEqAgQyFwWNtQVw5ragQ+9mGpSI0JiZg1YAw4aclSQaolMvZ1kim8E/how4C1gUoZ7TRazRjngziJ0hKh0kDLdQKtE2rb4KQJFEspsF7SGIuMaKHWCVIGlc9MKDrdLsPxnOFwjHfBbiBRAiVAx0KJtQ0yVZBKahfsCKz3yDRhPl7wd+9+wDe/8TV6a5sMqymJkmRa4U3DxsYmX/+pn2J28jc8339Op1dy/+FDjDW8/PINdBIoVkWvR1YWCK1w1sQSk0BgCWmxwAlBmkhKJRkpjcqC7YQSJU42GFNQyQykIM1T0jxjdW2dqihJ0hJjBmR5l7VVRZ6XZGlBUZQYbWh0TpkXbG6vczYb8HRyykw7Kr/gwekzPnj2mP5qH+tC8aZFAYqsYLPXRwixdLSaVzVHg9MYfys2VldRWYb+gRHiT8Y/aHiPFcFHtJrP+cM/+iN+/1//Ho/v3+XGl17iV37zlzGLGd/9T3/LbDhjOplzNhhijWNne48vf/mrXL92nW//8rdZWenz53/yH6n/4N9w+8mHrK+s8rW3XiHLVFQ4a4uhIiYzn35P2znSBsyosH7U1YKiLFld30IlKbS6k6Klzf2wYZ9Y/slSgMy3eUxMaP0CRGgHkLaiqSWoPHiJZhk6zbj34TPEWkpiaoRLlv3OoUAbjh/806DodJg2hpX1dW68+lV++7f/V17+0mV4AUkS5+d34VP4uhU3E0GN2UqKsqDT7zId5FiTxIS33S9/iGsBKB2LuFqHoqXOsAQqPSjUBS/PGD597rE08ZCRDuYdKiswk4Sm9qx0VlnprraGDqFcGNFC1SqrfH51lc99VhffD1xAJT2gJVjP2XzGO+MFE9th+GDM1+/d5ad2eyzGA+ZnU1x1hHbrSFeSJg6pa3Qn47XrikJukqlTNtP3OKn/A9V8H8kOa9s/Rz/fAt3B+xFF0UO757jhiHQ2RLgznJ/j9YIGhTECqUB5gbASmgqRSbKrG4gbfeSNVdgrkdKTIpFo8Cn4DEQaevSi6N5sNmY0OeXodMjTp884OT5mOBrRNDVl2WF1dRWldfD/dZ7JdIL0nrXVDlvb6wwGI44OB+AKnJdYI5eFUanBOYMxEpkkJGkWkDbvL8xtv/z/i5I9/wiGFOfIzkcqKm0hIQwRxQpbrYP4OwSQYDQacXR0yLOnz3jv/du8884tHj18wHBwymQyxsdWpFB0DvMxaHgHNlk4l/hJn7esfNr4wiRtiVaBKhipTlKGhSVk96EpXwiBEuq8aVleeCgJZWDhW4GJYLLcrg8Xq7zxVcGHIFUGVROUj9z3rMDXNYvZOBhkx6A2eMa1ktYimlYKLJLaeMbzBdW8wvoq7FPRlynLsiVNU0sdpFJ18EtJkwyZFYhS4bxjPl8EL7a2lzied0ATXLBEEBIhNFJohEjCBwIlwbgG7wNC5H2o4JlonByWULGckEJKTGWRqNBsWhlqLXBJCBNdZRHWkulwP5wwVNYxnNb41LCoHcZ6TGNJhCDTCZlOqI1h0SywTiCJ1E5rMdZgTBA5UFpEtU+JNR7vK4ZDw3gyYv/gGbt7l7h0+Qp7ly6zvr6OVIokzUISoCXOhV40IQPNtFX2RAAKJvMJDx7eZzo45undu1y5fJkrV66wurKKlopsbYVer2RtssrZeMRoNGF4VpHnOXlRkCZpaBB3JqI6QeVQJwqpBTiPs+CNQ5GQuow8z+j3Vzk7HTAdjphOF4wnT9ja3mRjY41utyDPC5RSZFlGWQZ60GJekySK6XTCYj7B24a8KNjYWKcsC0ajCXfu3GY4POPmzZe5/tINirKLaRoas4j0xSggEhPsttnd4nC+IUkicujOTeDbEvKS8hbRZxmFa1pEWkoZVBujJ2C4FiEpkkIEOwjvEI1D4lGifTRdTN5NfAGFdxpBgpbgTI3EkKicRAUFJUVQEfWurW75iJoZjAs9dD4eW8sEK4HYmyJlQMY8Jji7y/BsNs6RSYmNQkOLumL/8IDN3jobG5ukRRVESLxbRmhKCvJEk2rJ2t42X/mpL2MOP0QkwXMuTRK8dmzu7fL01hPeuXWXX3zrJunpQdi6bEM9X6Ck4srVK3ztG1/hz//0z3j6/AlXrlzi6OSYTq8kzTO6aQqJQgpHY2p0ltHMah49fUxXVGghkCql9qHClyrwUmKBRbUAJ5jNTfB0zJLgiRcFQKQWlJ2SPM9wzpJnKf2VVbxSSAU6USitcd6xMA3HZwOmzZCFqZAJoDz/9Xt/y7OjZ5TdHrUxyEST5wXOWb7+la/ym7/0bbKiDPPEW2azIf/xL/6UB4+fIITiZ77503zrZ3+WlbKgNW39b1Fd/+9ttLU4EMHCpLH8yZ/8Mf/63/w+z/YfcfnSFr/6K/+S/YNn3HnvfarRnGpcM5nMUCpldX2NX/21X+MXv/UtLl+6RKffQTjPr/7WL3Pn5ITDf3vM6PSUIpHkuUSo1sih3Uni/5eBz/nn0N8pl+uQ957FaIqXNVnZiQVadf634rzK3Y7POzWWfyd8RPXDoibbi4TDiwbXjGmqBdXwlFmTUO6t0FUJTV0zqyvm3tOra5jPSFZ6L55LPFaWpmRZxnQxo9df4X/89V/nK1//JhurmzH2i32GPqw/sVx5nkgKoohZQB6l8GGft54iz+iWJWmSUFn5D8o9xohHAAAgAElEQVRpggVCYIFYH3odpdLBNinSWtt1/AfRI9uw6aOCLBKx3B+cs6gkIe+scG2lz/VXUq7eeJk0LYO2QAuetjRQWruAi9T8H23ER4BW+bKdk9ILrGifE48QnmxjjY1vfgN37QaT2QC9vsbg+IB5PedkNKA43mdn7xWkgNKHdhIyx8qu563tBYgBonpAf7vB2xzyHN1r8OoOtVCI7ozyzRJ/6Rr+0jZi0MMdzVkMgt2TcAJnHVI1zDBkhYKsQF5dRb1xGbFbQF+ANBTe4pbCIwLpW69Dh7EVzx8/5u/f/i88fPqA+QIQkjJP6a+sUBRFVEsORbbZvKJpGoz1dLodtjbXOdzfZ//gBHyC1hnWBwst4S2tlUYAKQSucdSNCboJsCzKtyn8P6zY8v/fWK5b8AIaH96Bi3548ftOEIQGfWyfCvYIjx8/5MNbH/LBrQ+49eEtnj99xmQyCa1CMfEwVYX37gKLkIhKCqQMBXOx9KRun6t/BD1tgkC7utifIpVa8rJb9EzQCizIZW+Opw1SLxwt0tmC8IKLi4uPPxMExO1crMEuZf0FSjhkVqCdQ3rPYhoEBogmpSHXC8cI3QQS42A6rxhNFtRVhbEe60A6HQRI6iAkorVEK8jzlJVOl9Vuj5euXOG1r77K1TeuUTWG9959jwf3H1LXgQIpXBQeXyafMYBtPJCQJgVpVrCytkpR5BhXUzcL5vMxZjGlmk2YTiZU80XglgN5npPlGePZlMYaFtOa3JV0VzdINnpMXc3x2VGARJqas+GA+WKGiwjE3cZSHJ9xNpziXKBzeoIZsxaKRClElmOcoTE10ktEpPOF/jyHtR4pg+KhUlFgxRkSEiaTIXduj3j08D6bm5u8dP06W1tbrK2v0+v3w0KZpEid4GxNmpdkSYJOAsoglaAoc/I85fTwgOHBAcOjQw6fPmHv0iUuXblKb6WPEJY806zpPnmWMZ9WWGOp5zW2DvRBrTVSKJxwtKqGYZ+xIARpnqFs7K90jlRJNta3WO9vMBlNODo55GwwYjKesLa+wvb2RjQ8V2xubsSkbMzp6QlCltR1TVVVTCYNaZLR7wdhk+FwyP7RAePplMFwzEvXbrKxuU6n26OpF9T1jCRRsbgR5qiINCOHj3YMgVqstV6ibt57EqXx+CCGI9QyaXPOYY1bbtA+VkeDmmP8+/h6UokgouNaPzii4bTFYWISIZEiJVUliAbcAoTF1QbpNJkKXk9aqVhJDEEBBJqniwhXoGgG5K4xQY5YCo11FusdxhuEUjgpMd6DkshUh3uoPEmeohNNbQxl0adAk2RpVF0LyJq3DUp4hDcYU/Hml99g/gwmC0Njg2LqwlTIYpXLL93g9p3b3L4nyVKNbRps3USBI48uNG9++XVmszHf+c53OD07QSWSJ8+eUnQK0iIj90lQqo1qn95K7t5/wNnjW7xy5WXWLUzmFU1do4tu6G2QKpixW8dsPmM2GSHSMhptW6QOSVniBUWm0Rqaes765hpZt4uXlspMg3diHvtRzQI5ndEzHn96hsoycut4/t4H0SJDsrW9jeyvUosa9eUvsVIqtJuHddx5erlAuDknh08wxnG/3+Pnvv5VfJH/JEn7MQ8nQjFCOfiLv/0Ov/N//+/cufUeuzs7/Pb/8ttIKTh++ITB4Izx2RS3CEJZG9tbfOObP82v/8+/xaUrO7ja4rHIVKC1IM9LXn/ldW5euUoqgny7cA4v86WPkBCKi41nF++tkHIZDHnrWIzGHJ8esr67jlRJ/AG09hEvpA8/0hRpX5RlTOBtTTU95vjgLicnp7izU0Sxyd72q3TShF63pNvv89Y3vsH1lZTNsvOp6JO1lm6nQ+0ayvUN3njzTTbWV1DUEdlTCC6IbVxIKH17UEkw1Y4F1uDlZJG4oFqr1LKQdvFxOS90fMLbjllla2kSKvugZEKis9A+oTVaJ8t9bXlyP+h6fwSIOGdVCmSasLm7y8rmJmW/g0yilcBHDx0LbRgHUuE5Z3H86GtC9CXFo+I1F/G7SzwoFvH61y9zXUnc/Qcw7yPVgjvv36OZCE4XM8pFw7Z0CNGAmYFK8XKGl/t4ewchnoB8gO6McX6OFfex8hAXXPEwucc5hd4u8Os7yCpHDXZJDjOS0RFyZHFTh/NnUIDcW0dudWG7jygVKEMQeBMIF/ZzVIPA4EUTfiYM1aLi3v0PefLkMVqnrK9voBNFqoN2ghQKY11oU3EuqpGHoq0fL7g/PcI0liLtsaimVM0AlehgBq5ynFU4lyBkitKA8nS7Pcq8WOpXywvz4h8lh+JjfM520YgetQSWjzWwmBsO9/c5PDzk4cMHvPvuOzx6HGx6zs4GTKbTUMCWGoeG1mdZBoV027ZHSbUUjcu0QYkmtF1FYMBFJtRnjS9G0iaCmAIqPshKhkUGcR4ot6CkDwuT0hopghHiR6u3eB/gXaVwXkS/J5arsFvCuyF7liJMQ+ktwimUTJBphssLqvkMb0OwGpo8VRQ9CPCmFwFtmlc1TWMRSqOVRFqBq8CYMAkSHShgSmkSlZBIRSYVG70+L1+7yutvvY4D6vmUxWzKfFZHzyodKpaROuIBYw117RAkpGlJUXa5cuUK/dU+HsOimjAeneGrKdPBKSeHR4xHIyAo2uR5TlbkDEZnVE2DFikd2+fVN77CN37pF7C55N/++3/H/rPHlFrz+OF97j+6x3A25vR0xOxsCvIYp8PkDApNAtMYrDdByEjHzSfNsSqaNpsAE6tE411UI7Sh9yhJZaAgOrPctCpvePpkysnxPiurq2xvb/PS9RsUGzt0V9bp5gXCQ9MEWwUT1R4TpVnf3OTmyzeZ7e8zODpiNhnxZD7l9PSY5/vP2LtyhfWtTbJuBzBMpyNmw4pur0+Z5RhjWMznIUFUGi31sgrirEX40KvlXKCYZnmGNw5TNyiVkKYpeRpM1IejAWdnp+zvHzCdjtncWmdlpY8gVFfLMqfTKTg6OmQ2m6O1wphg+TCfm4C6bW2QTjLG4wm3br3P2WDC9RsvcfPGNcoiwfuEQBeO9F8vlghxW9iQSkbUyi6DKgFtj3xIktoFo0Xa8Ch1vkw4Z1EqzknC+3cRwWoTKSH8Ui1MLOu14J3EWQFORmkvi07BNjNODoecDmqapoKiXIqreO8xtsFdEB2iXRO8AxtN0iPFKvRzWrRSgboTEz3rLF6GKpnznqLbJfEJOk1C76hSJGkwVhfC452hXsxpmorHTx7yZ3/xZ/yz13YDuojC+gbrLQ8fPsSmW3T7q5wMRuxulUgCI6BpDHVjSFWOzj1vfvkNzkZnvPPOOwgVEv2Hjx+TlyV7eR6a+rUEG4oZSM3t23cofMr1X0zw0pEkKUmSEPSBwrqkdSvpHW3TnQlX3lsaU2GlAgx5JmmaKffu38IqSZYXqDTFuFBkSpI0eEeZBbPpFDScjQZBqTRJmS8qvJAMhwmNqbGy5v23/5a/6CWslx0SqfHeczQY8OG732N0vE+SFpzsP2U4OGWt3w/04uW6/5FgrV2+W9DmU/aLT9xrP+UPPuNHP3B8NBBpWQ8fP+AnHd3/gJ//w87kRR3OsIvVjeXtt/+e/+Nf/Q63P3yX1c0+v/Zbv0yZK97+/rssTAXOo3XKjIbLly7xq7/xm3zrX/xz1nc3qZxHaVASPEGROE0zdrf2uHH9OjL6IIYPyzKKXyJsF2SPYrDTov0BdbYoIdhY36TodgnIW0DiwjMdEQRx/l7b/f6Tr5x/8asXGtjaj8C0aaZznj++xbNH32f/4C7VomIN6O8oKh8q6kprym6Xmy+/ztrsFLlooOOje/D5kdtXyfIMPU9ojGEynbDjLdYtECKJIh8Rn/LRVl60aETbgdgGh0GlL9EK5xVSeJQMCsEujerGfD406mNzte2RJ7COrHM0xmK9R0V163ByP2y4fR6pC0AoRd7vkcf7ZxuHUpE0ew51BsNxEwqb6BDj/bhHuKqtJH2MF53DtsV6AUkqKMqUlAW5XiAmDYtxw6hyDATszxs2Z2P6pSSTg5CscYjgGO/2EXIBymCFxpMifIOoz0hJgBIvSrxyCE5pZIUtpyTllGQ3R9htxELgFwKSLdAWcgUarJ/gVSiKITRKJEFUDwhCI00odFKDN1i3oN8vuXRpj3lt8WIVpTzOzqirilam2QHWOJwLvY7zxQIpala7JYmSVI0h0QWJLENbS/DkiUJ7Fi9rrJMURUm320VrHVPkj8JUn0WU/vz37/zOfeSbF//xsW1DvPArn34m4uOHWy4dodc2UCANdT1jeHbG0dEBz58dcffWQz54/xYHBweMRkMm0xF13VDXVYhNjMXgsb5BRo9p4jPoCc+0VCqyYEL9wlkd85MQk1kXem39D+AqfyGSNh/jONnCBBFNI6qPWSRCBHd6cMFHrUWe4u8G/w8f/2uRtRicErioOBAy9N8QaRN4HzxLpAjVDSPOpcjj+YQNyIUqDjHYUC7SuAyNWQS0yREqWDIY+/rEQm0xTTBGthpMImlcUItMkXS8ZCPLKRRUWiGyBHSCVjEglU0wnhYS7dUS3VAyCDgopfFCgkoQSRbVhXJ8lmEE+HSG8IbZYozPOqRph2rW0GlGFAoKlfLyS69y/cpNBqOa2fiE1155lW987VX+cngExvPaq6+xs9vF43n/nTscPzvlrKpYNDXO1CQWEulxicK297FuwrXTIlb8FGkaEFJnLTaKU3gbqhlNY0nTyL1u5d2jKuZiuqCpjhkcj9h/csTulWtcvX4DeWmPsiyCkbQRCBK0LgFNmpesbe2yXq6ys/cSo+GQ/efPOBtPmdU1+0dH/x9zb/pk2XVd+f3OcMc35pxZA1CFiSAFECBFWqIkSgq1ojW4/aHDYUc47L/M/tQd0Q63/wBHuyWqZXZLFCeQIAEQQAE1Tzm+ebjDOccfzrnvvSwMJNhqBm9EApWVWffd4Qx7r732WrQ6LY6uHrJ/sMd2p0s36qKjjN52H6kU0/mM49MTJpMhOhJkrdR73QnnExArMHWFqH0/iZKRVx7ToRqsFJlSqDij3ekzHJ4zmQx5/PCUyWjO9nafTrtNGidk2zlpkjIajxgOR8znC5bLJfP5nKquiRJvGp5lOdPJjKdPHzIYnjIcnPLCzefZ299GK025LDB1EPEQjQDNmtCkpac3uqbPs4F0YUURMNaGYEOEiravXvvmfUJSFsKP5lxhl9Y6xtR1mDse/1RWIZzCSTBUWLtA4TBWYGpBqmF2/D4/+sHb3Lh+iEsNtagCnVFiSv9+TRUq6tbfl5KglKWWGZVxCF0iSwtVhJIJ0pR+ERQEcRaJNYqyEgidU9VgVAzKK0Ta0uBihQiggZbOVx2o+el3v0t//gbXbz6PVI6yglrEPHh8zGA55vnr18k6bQSOOEmwkebRbMLp+YTrWz2cq+h2unztq28wG425e/c+qU4ZiAGPWk/odrdIshglKpSOQMdcObzK9s417jy64MajJ+wcdVgKkKnGCEesNHESMS8spXNULiJyjpQlqUhJrIAaau09JxOtcU5w66M7PD49IctTkjQF51hWFUrFJHGKcuDqikgJWmnmJYilYuZK5vMZy8WCKFIoCad37/DDv/3PJFpha+vXpEhS2xKnNFUd4aqc09MRzz3nAmC2ufh/2obwbJXjEyEpl/KnZvPd2PXdM/8m8Cw+dx+69Pnh3zQ0Gdmcw7kwrlcTZvNDVxchVmrFzd//kkz0mWtdn3VdMvJN6j7BCbsdxho+vPsx//u/+b/4wY9+xFY34ff+4L/DGMNPf/ADBhcDTgdL5jNDUcPRzRv86//pf+Sv/+ov6XRbDXS5AkhxnqkgpAcvRBRhIy+G5emMMtSQGrwdWNHcHM56lFrgQg+bAAu6nXsGBp5uJTb20mZ12nw4z75Swuc0SdD6DYFX04NGjMVbvnia4uDRKd//znf5+P5PWdQTDg8O0K0uk/OSrXmFyYL1iRTITkYc76PznlcbRAT7IFZrmQ+kFUYpSlPx8O5d5oMBdx7c5ZWvvMmXX3k1tO6Fytoq6zM4NnrkkB63CnhsGmXE2qCFIVIaEzuEqoCgynxpQnzKCBG+yqesJpaaRGZMVY6KBLGQSOcBFe//qJE20O6cZdWjJprRsPkZn17WW9VHGwGU8KUSuS5YuLD/OK88qHArAPqfv+LuWRorsZxwQXKjkttUCFMVk84qthB0sxZPFpLhcsLALSmevMveKKKdb2PFYyp3DyOOSfC2FK7KKYt9hhN4enJCOR9ytJVzdHCAzq4jCgMh2dPxHCtHVGKCVSmxk6gkDfTHpqZTI4lw+D59KzTGCSoEWkRI9hC8hBCHWJf637GCYrmkLJZoqYgjTVlZXG1QgIx820BR+4qrNQ5Te0EtoRxZKyHLFRJHkuUYq6lq8A433krK24hYamtxWiMjD0SrEC6sUu5LY+YLHBtDuBnal+f8xj5hN9ZOsY7z/eF90Uz4dxpwxgXbIBOWCem/ACcFVvrfR/rCb21gsXScno04Ozvm5MlDzo4f8vDuxzy8f5vB2YDpZMFyUWBsTVX7Nc7YGussVV1T4/UolBC+OKMVQmsQXjSttjWuLqmtWQnBSRH5fESAlKEw4DYzyU8/fiuSNvDrbiO7L4SvMCEkTiosCiM1Tobqmwsy4ha/MUg8VUO5UC3wSZZHvNcvXKjQ69GsT+EHUlp8cb0G5bwSnQUrpfdKMcabRgvpS5xKIKRDYqmLOfP5GFNXxIF6YK3FCQPaG3NT+usyVmKEvxfjvFphYiz9WBMrx1xZCiUow2bhSWgWG1QTpbGr1cfhe36avj8h5LrXTgpqKVmqhKTV5fqVI04n59weDNDacqXVoScswhQ4B8eP7rOYjZgXjseDu5yPP+bxyQmJgrIWZGlKnu6yv7PFdtzl4vqMmRTcP33K3Y9uU16MPSVDWgoFiYjIglz6MnCgm41d4G0NlPBWCxaPLDgriWVEJCMQPhFYNdxqb2sQ6xRRa+x4gV6U2MWcgpqiWKJVRlWx6plSOvHV0kqjVYud9jZZf5fx8Jzh4JTBxRmz+YS6mDM+O2F394jDgxdoZYmPjpSiv9WjouLR4ynj+QSD71vTKkHpFGkktgasIFYxxtS+QiK8bYIHDhSR1ERxTJzktNp9ZpMRs8mU5fyUrV5Bv9clb+X0un2yPKfV6jAcDhkOhyilKZYly+WSqqrIsoztnT5RHDEajrn10QecnT7lK1/+MjdvPk8UxQjnq5aNsuRKwKOx1ZANPdgDEqbpg3NeHckDA37xsEiEM80s9b1z0pvNW1uvAjDnvBKpk6ECHc5hjUFZH+w5CU7VCLEApzyPHkcaSZ7fjXhbDyhFxtGN5zBFiYwEy6IgjlIk2ifGwqs7+uDRIYXBqhbLeoFQBbGMkXVGpGNUOcW4EmMqMBGgEVZhag265dcDnSEjDywI56BWVNRY6ei0M/IswbqaXZ1QDWo4KCE3QIIkJk63MfMFUsYgK5wwJEmC0ZpH8xn3Hp6S5zn7h32UUmz3t/jm17/JeDDh7OkZkU45Pb4gyx5w88WbxMIghEXHOV9++VUmx+e88/MPePz4Cf2jPgWCha18nGoNWZZgsoQtneKUxEWOxM2RFaROEQuNFd5g3JuFx+T5FjvbiiiS4CpMXVEZQV1WUGuyuO0ZOnWFkgIlImonSaMMLORJRLeVEssIJbVHlrWnvmIEkarRccVkMSfJD7F0OTufUVvH2h7KBVT0kxvUZkj+SfLcZu12o1DwKXvdJh68rgd9saM5x2VfomDD4rzKqBBqAxXfvIv1Nf96h7v05anOUPrchPnScOv2R/z7//v/5Hv/9H2yVs43vvUNsiTn7ocfMZ9cMLoYMx2ASjp86ZXn+Vf/+n/gj7/9h6uEDTy6LkNlyAuCgHAlKgIjBDJPQtIiQ9uYCA9e+nXCCazxCLXvezPUpkIIiZK+X1OE8yOasDqEZxuPRjzzbj/tafhnv6qjXHpOAgPWA4dOWDQSWQoWgxnV3FDrHBV3uXtaMK7PeWVa4nZCb7QSFMIxj3KIU3++YMrtLVkdUlhcVVMuDYvaYLTgzge3+C937nFuLbsvvEktNcoZEAZL5ANJ54AaKxLW6aqnr1nhVXAjoVGiwtmlF/OyJYYliCTco2blObbKisL1IagFaKsQVhNJhXIRRS2otUAZSywTwLedxDLz8ZUT4IyPmy4lzf78/lpV83LCj9wl1T0nw9tshAPsGsizIUBN0hSlo1DtasaA+PWnxTPHuiq7loBZ/VcIVMPACj3fQkGmI+qJxbQkpbXMqyEqnfLcc4fs9h9i7IcYOUYwJ6JEVgI3VoweT/jw/VPe+3DE45MRe/sddv/8G4ijF6juO4aDc8bFOaI1Z/9mStbbQpL7GE6AT6usb64REkkCLkWQg0sQZChinFMIEePEC8BrWNfFEeMsDC5GPL7/lNFwgjdnByFrlHXYymCUw8QKIy0iEoAJvVVeVj5tpagIEiURMqYq/XypCs+EklJhQouPRWIk1M6itfIxMG5jHMpL7+DXPTZXugbGIcQkrpErVs1bDfM9gGRNLB/wXB/DCIuta5/jicgXbBR4FXBDYS3j6ZIPPrrLR7fvc3w24P6DRzx59JDx+QluPsMtppSzMXVd4bQvu7uwETgERghPFY90APNBunWxydZ2xRJqBAyd86qfkZagfGIshUCH/tcGXP6847ciaVsFkBC82jZS7ybDCn+vtUY3pUZ8o60JDV+bm6qQXpWuqSrIwBW3QVnvMp/a0yed8HRJoRQYiREglPJN/6FnSTmvouikw0mBKUuq4BQfJwlZ6ql1lAXOCozzXlFNr1AjuS9kCGCURSpHHHv+soq8IIvnDMvVhgE+mG7UJMFhQkIURVHwkPN9fkoqL+CyKNlu9Xj1led47fe+zt/99Bf8/d/9gKcXj1noBWlH0N3ZYTaf8vRiRhQ5iicFH9+9y/ZBl/3DmwwHM2aTc7qtLtVScPVoj5dvvEqhFF+3Fe+9/z7v/fxd7t27z2K5wNaeriPwvi1WWKRSqx4qKRqcMVgZCIG1wtMsgoKjExZjTFAR1SAjlIyI44wsycjTjFaWkSYJpampqgqtMqyxzKYzEpFiraWqSu8rFylMbWl12qSJZntnm+l4n8HFGRfn55ycXXB8OuL4dMju7h57h0fsHOyxmBnK5QyspZW06WQdrDUs5yVSmeCdpjCmYrFY+LElpeeUo7xiFY3MNMR5TJxt0enlTMdDppMxZ8MBk9mYXr/Pzm6fdrtNu9Wm1WrRanlj7vF4TDSfU5Yly/mUYj6n1WrTOtxjMplwfn7KP31vxMnxCTdu3ODw8JA0b1GWJdZWCOyqGbau61BB84nbZm9bI0Iiw5hzYU6tG8Y3w6nLQakQDU0Zmr7TsId7hcvNeU0j5++vJ9Kal168ybf/4Pf5+x/8gHtpyl//xV9z8eQcFStkpSjqisoY7xNXQxIleO65AKF8k32AUq1dA8BNX6sIlQRjDFXtabSJjr03SlFSVzV1VROnsQ82tSKKvKiHrWqkECRxEhRk64AWO9IkJY4tkY68P1y1REmJUN7MdjAcMhr12dlvE+cpYgaHh/v8yR//EX/zt9/h5OQpebvF3Tt3SdKEK9f6KBGjIsmVq4f8/re+QbfVJb92k7OTY+rKIEXEcmEwTlJbR1EumM0q5sslsYwQgR9vnF85AtmAujQUy4osa6MTr06Gqz0Sbr0ZeVU5iqJEaC+YpJXDUzATZJx5tUtqIqVIdIRD4VTlZZCF8lYdbkmSKJ/URBlZe4887/iwUAiPwH+OCIFgo3r2KSH86ue/9Ghoe78cvVwd7tlv17WuFYu4IWAF0OmynVgzTzb//891bJzLwYcf3uHf/tt/xz/+43+ilS/5l3/2V7STLg8+vkU9P2E2njEZ1eiox8HVq/zJn36bP/+zb5OnGc5atFS4RrhrlU74B6CkQgvVdGZtfPaz/w/JtwjBRmjWL5aFt+1JolV/yOVHsZkgfOodbvxMfOK/TQJ4+Xft6jeaf6l1RJa3SVttVKzY6m9zdnIfnSREWq+qL0ma45RmMquwUqDkpyThzve0lWVJWRlU7lkFk8kEm7VWZthNL9VnDVFfKPRwgJAgtfKgsJJoFfqY3GY99fOOtXGRCAraSoFSPr6II0kriUhVRKQiL9+uE5CxT8JltAKBL79nPuW78Hcb83atsuewoZdOBq9TEfqfEWK17z9bAf/nOi5d++ZFh+tbxR4SynROdVCwkAtU7JjWZ0TRiDd/94ibr/Zop1MUFcpEmDojsh14PKV65ynq4ZRrZ0v20XC9xda1A5L5lOJnH5Luvcjucy/BsM+jR3dIu3voqAXRDB3PgDEwRYoyjNQIQQ/BFthdhNgD9oA+hCQOUnzCLnFoprMZjx+cMhjOmRVgnKKwDqEjwIK0VKamLGrP9rKWxXTBcrZA6ggVKaRTJFFCO0vQOmVZWNx0iXU1UhrmyzKA5n6MqkgTaRXiNUI183Lf1bOrwq93fEp8IbydYs1aJkShPBzivK8dMjBuEVRCY4SgRiJ0Ru28/VZpFhTDMcPTBzy9/S6nx/e5/fA+t27f5fHJOcvKsFx4pfPIKWRp6UYJvX6bSipmUrEoQ5sGPhnz9gv+z3VdU9WVT86soayKkHsEGrSUoV81UKglwbYs5DVSbazDn3/8ViRt/hABKZErb5dmY2+yemvtWrbcetqTsRYnG85y2Hac7+fw+Zlbne/TEjYhvBeFa2gazmfPTiof7SiFU77hH+kX7rooKOsCoyVRHLPd7zMxYCsbZOIjjABTCqysccIQ7DJJtApIdUKrkxFlCiJ8udw353hqGQQ5dtboVCilWmt99i/WNgmbipB+gAgyqVgMR9wtxvRuHHD16hVevvE8L/V22W0r3rn3DvdGJ+BSopGm25YkbU1R1pw9XaDkOcaUDM7G6DqnFWvyTHLt6gEf3b7HfMePlmEAACAASURBVD7hxvPXmNVLzospk0dPSKzALCtKW+KwyEj6knWjpBL89RB+EDfvQEmvqJhlGRaP+AjZ3HOMCDx8IdfqonmeE0vHfFkFoReNEIKqKpjMxlwMzuhHOVGSo5VAa4VRKVWp0Dt7tDt92p1tTs9OKZYLjk+ecHZxwvH5Y/YvDjm6do2yLNFS0Gtvs7dziEQxng8ZjE+Zz2fE2m+GTsNiNtvYuAM1QzuEWiORAkkcJfTjbaI0ZjIeMZ/OeHJ6wmQ24ejwkO3tPkmUsL21TSvLydOUwXDIZDKmKAqssRTLOUII2u2MJN5nOBzz4Ue3ODs/54UXX+TFF16k0+kgiCiLGYtFudpEN6X8V89fqRB8haS6qbiFeePnyarugFSCZvnGrU3YG+NrJ4JQT4DP1si4X8Sc9ZQNjCXSMdtbO7x04yaVqfnxj97i+//wXf74j/6U+/ceUs5riKX3EEQQae2VR2XohxEi9DM6L0lvrUdUBQjhvema+a+0RmqFMRajVpndSqGShhqtPU3SCCiNwQpPOxFS+gRFSWxVYerai+g0dGrrn1Nd1xyfHPPe+7/g6mGXyuwiS4dxNU44nr95g9e/+jo/fftnDC7O6e/scu/eXVqtF8iPDrxokDPs7/SpX36O48ryve/8f2yLitf2j3BGo+PcK6rWFfPFkOHogq7s+i3VOio8ki+F8Im7NfT6Pfafv+kRVFOjJSjpLUqcU1ghmS+mCFugXIUUntGwqC3LomI0GjC+OEUJQ5ImRFGMkRVWSNK4QyvtomWJjmvysuT0wnHzxot86ZVXiLReJ9C/wvYuVgj5Zv1Fhjgs1GVWid1mpaD5+af9+Vc7NvDcVYWH8L21UIfCkXqWGhQqH+LZv38mAfniR0gerQeLJvOSt3/yNm/98MfEIuabv/s1UtHm1jsfU83PMMsZg4sFKu7Q2m2xd+05trZ3aMUgRYUUfp3yFezmI1xzA2ilgtKx3xP9sw7PWYTgWzRtBmJ1e56eXaCkJNbxuiJjvdrs5ccgVnf2ybqkw5OY/DqzkiG8FPO7lVgkQXjs0vN2IJUmzVvEaYpMIg4ODhlezKmiHpGWSOk/O05SnNTMqwKjgsrbpbfl/+RVmVlVk6I4IkszRJYRaf9MPTDx+WICq1MG8Erie4KjyPfE68CQ+FWPVSVYgI4lcSJQqg5j1D80rWLanR36OwfIJPeJm9AeNP0UcP/zErYG6FuB4qz3CRVFq5hrdTTA+n+DXrZfdmzKtwsBKgO5Y1iMhiiRYkSFFDGZ3qGbHBJxgRBDYIFaZLh3Tqk/eog6n9NeVrSpqAVU1ZL5nTGLuymit83ZqxXbndfYfenL9K6+jhCZBzBljWSKsyc4ewGiRooESBAiB9oI0QHaQBvnMnARDe1XEMq9QpDqlL2tAybjOTq21MZQV160rioNykBpHbUUtLo9tBTMBjOE1aQqIxYJwioEETiNc37fl6ECXiwL6rIkir0wl8DhlCKLk0Bv3nieG3Pki8NTm2t6c55PB7uEWNtouI3f9CJIcrVSGCwwI8agqgpXWcrRlFsf3uad929x+ugpy6fHmLNTJsMpDwrBqKxxsoVwSzLhSBLHjWt7XN3b4mirT6Yk90/nfPh0yflgwmKxoAqCLsvlEuus70ULPfc4r/IppVx52jb5i1LP5jbKJ6LOBSX49a7zecdvTdImRGAkr1aqIEKyamy2QUnOreheQkpskCdt+m8CSOVv3TXy5j5RaKo362SwEToB8AmREwJhBdJaZBTjlMYKCQqiOCJKMiaLguFkQimg1WqRphl0DKaoSZKEytSoWqNqh5G+R06i/GIsJVvdNjeu7nPjaJdev41MJU45P2FM5elcQfXSe9IJkGvZ/0Ylr+m588bSNtDGQiDuQPlslMI6fvre+zy8GKGtYbfT5a//4k/5dvTn/M1P/4Hv/9OPGZ5fYEYVqZHIKCOhzdMnj9Da4ExKVZUsF5bpZEq1MEwGE/YO9/ndP/sT+i9c5fbFUx6fHJPUCqfARIKiLrHWUhQFEDanYLjq8MHgOmmQXrwjTalttUrCnfCefUpFPuCONDqJQzUSEJIoilZiGVmWIRKoR4bT02NmFra6fXrdPn6RVCRphjEWHTkO8x793SOm4wFnxw9YLGecD885H53x6OkD9veO2N85ZHdriwgJVtJrdclyxXwxZ3gxZDwee4N14RM1G+hCwtmQ94fE2ljvOWYFTijSdpc4a9HqzBkOB0yGQ2bT2/T7Pfb39uh020RtRRRp2p0Wo2HO2dkZ47Gnozrnq61xknB0dMhwNGEwHPLWWz/h9PScL33pVa5ePaDT6ZKmKbPZbC1UAit65NoQ160342Z5FF7QB0JfTwBRfKM7q7kncD7EEqzGrRf8D4IljZJrqAJ5oClsSlISRzHtJOHFK1eIvlbzgx//hHK55Ft/9MfMihlWGr8oC4ExFmdqVOxw2q8VzonQABxoDCGwsnWFsMZvgE6htSbSMVL53lMhJTqKvFmuFE33kKfRBwK/Uz6Rc8r/vLbWSx8L2N7aZlSN/f2aQL3GP7P5YsGdu3c5PX+JypScPHpKp9UhDfSrN772BlIrfvKTt/27MIYHd+/TTnN6Wz1wFTqCq1f3mB9PGA7OqBZzFrOC+cxQOslssWQ2nlMtFyglybI0qJ36vlojaiQCawucq9jZ7tM/ukZhHHXoW5MIv3nL0I/rCjAFwlbgamrjKI2gqgyDiw6PXI2sl3RbLd/srjSVdZRlRTG6oJUoUueoakMctzk8OmR7O/dtw9ashJw+cy/4zIqu//4T1Zhm0d84A5/53a93NChvyMt9oOBWsIUPMy4F+s2nroEKnqnJ/WrH+uqttYhAXZpP5xzsbvOn3/4XRNL32t754B6jiwJhSqajJVHUobPXpW4t2Lq6xc/fe4s//OZN9ncPA0AZ+YSsKac5fCIm1kmb34+bHz57NAGG/53R+QWD0Tl7B7u02o1kvoAGe3z2tQpCxrWZfK/uduOXNn7smhM5v0daA1I/M5zcSgNDKuV7gdsdyFO63R55mqC7PdI0xq9NDisElRVUBsrwefqZ54+TFIWXgfd+j/7tGxtikNX6cenSP/2Vhj46n4f6vjLnvHKgDcj95TH9OYcLJDUbYiPtkLpGSl8Rj5VAq4ijo+vsbB2ydbCNTju4kAj8qgkbXE6ALiVtWqHjQOXcYFSsmYnyWUzlN3Y8Wxk0laWcVVRFxayC3b3n2Tu8zsHOEVqmSHcb5xa4+UPKH5whfnSCm05QYbwZlVFELWy+g27v0j66Adu7jHc6iHgf3BZR3AHVwVkVmA4VyOsIMQkX0gOhQShWonhNFt+AIrJCWAVO+/FUG6SxdFottrf3mFeW6WRCkub0ul1SIaGuGM4muFjQ2+pRF0uGx+cszRxpBWZZYuKUKrYsrBfeKMqKxbJkWZRenVgK0jiiv91j/+CA7u4eeZqw2++ihWAtSf9f8RKfma/imbVk9TvOIVyBxoDQOPzzEKsCC1QCptYyXlbIeUm6mDG/f5f5k8cY64iE5NWdQ7723JfZSbp054Jb797l//jOjxktT6jrEVnbcuW5NklW0c4s03LGex/fYjkdc3pRcb7MmM4WFKXvk0eEcSUAIVDKCyM2FHMZErQGSGmYRXJzHrh1r6q/1YYZ8/nP9bckaQs3KuUlhGY91xq/NY/2NV4HQgmk9YlMI7AoXLjpwEf351n37zTfy40qT6PS5FxA84VEJr73xxo/pJRzZEmGkhG1kCwx2KpkMpkwMZYYSR5nRFFEbY0vgyqHlA4rnZfVVpJMa/a3tnjlhZu89Pw+eSzIejk60VDwiTEb7h5n8cGqrTHCet+0kIiGX1ofQagBCd3tHZ67ccj1jqT/8W0++IefcO/WLf5LGnHzm1/hjdfeRIuY+9k9pKk5HTzi+OSYhYaspUnTiEjBfHHhk8Z6iSvh5edeoL21RRJHfOn1L7P93T1kJJHGB+M6jjxQ5CxlWa6e+2bSYLGrd6CVXlNY8ck1odImVYQMypsqVD2b72tXo6SilWUksd+ERWgEXZYLHnx8l53eFteuXafd6ZJl3rBbqshL1ztBIiK0ium2O0xnQ84GJ4wnF4wGA+plyfDsgunFmIO9q2z19kjTmEilpIkmCQ26s+mc4XiCkBop/bmV1CgvAhh6x4xPLrRAC4mnpVk6cYs4bVO0OwzPzzk5PWcymbC3t8vOTp80S0mzhDxNSeOIYZ4xHI4piorFsmC5XOKcoNvpkCQpF+dDbn18m+F4ypeGN3n9Ky+ztdWnqirKslwlZs27aOacQAS6akjqGlBgs9LmPCJcGwPWeEpOoLk28HQTptqmau2aAKtJ3Dy4oKT3d7PGgHMkcUQvTdnf2uYbX3uDf/zhD3EK3vzaN6CCqgq0DeGNwau6wCmHdypVyCimDs+06aGUAl9Bc9GlqrrSUeg/DKiY8malxtRgTKh6C6/KWBIaiL1ZvfdUgTROeHHviDo5x1VLhPQ0Bx17ylGapuStnIZ+dHx6zGQ65frV67SyFu045o03vsrZ2Tl3bt8hSxPOjk950unRbh+iMglliRKSTivlxZvP88E/fo8H9x4iZURRzDCTGfPxzBuRC3DWeBVTV7MwhX9GgHCe0hvFAmNLisr3MlXWoYXvi3UC34ukmnsUPrBzfmwoPPXXOd+r2NADvVWBlxG3TlDXNbNpwaI2TOaWyXSBqQUicStE1ycgzwRyYfA0nkoEuu3KV2K1zj2DRl6KbcXmEvprH6vaT8jGXNiDrPDJ2mRZM1ks6UUJ7VgjtQiKa/4KLoPRBtcIkzTo+eclrc/kn2uynb8zpWB/b4u//Is/4Q9+71u8/dZt3nvvDl/96iEfffBjhOvz2D1k92CP/rVtop2Mb/3pt/n+d/4fhKt8YmYczpm1ybJrPst/uBTSe6KyoePoNp4LjpXRrIXZZMLgYkhvZ5ssa3kxACGwpvYjR4eEdePBbAb0jcBLcyn+8Myb9V+E6h41UOHMElMViKiH1MkqB1zl8CFQUlHkPz+oHe/t7nH91d+h32sh8L06BIBJKN8Daj+Rp4Yg0UqMgdoYEiHDHuKrSFEUfX6ytr4L/yYbL1l8T53vDzSYqqRaelW6UDD8xL999pD4x+ewGAy19VXyONYk2oOau/uHSBkh4lB7E8/MlC9YLrnE2GAdY63jLBneczO4/P9/k7Yfn6gMGsdiWPHg3XMuzpe8/vUvc+0br9Lq7wdBmhG4KUwfMn7vMeW79+jPHLiSAoOJWhTRDsnR6+Sv/SFi/xrq8ACXROzRRogUZIQTAZiSAXR3GkEHyP2zEEkATlYyQ4Gq6zz8GUqnQjbzxPjfsQYhDXv7e+g853kBnVZOu+UFpEaTKQ9PHiNiSdZKOXv6lK2tNibPkCgWywINlMsSUxmcg+WyZLEscUKQpQm7u9u88sorHF7Z9+0qyu/vHgjxLTpfqAy8cVw2rW7ekV/P3Wptb8bS5iT0lXRB8Be0YEpHWVuIFZPFjHuPHlNPNFtWc6BucO2Fm8S9HN1PII0QKkYsHdXdC5bv3mNYTRlVU5yuWVQlxfGCshphqwXVfEY1XyCNwxio3NQnYg2FWfnij9xgLhnnWS2N+M1mYWh1/+E/jWZDM+F8LG+pKi928nnHb0nSBkI0nE65nvxhoouQpHgzSrlaJK0xSCVRTVWORu1OegTOiWAwaFcVhWerbEAISmRICJ3fV6VEI+n0t0izDIxBqwhhHd1IIdKYpCoZnJ8zGJ4zK2ui3f1V1UdKE6ho3nxSeIV4Uqno5zkHe9tce+4Kaa4gVcyrgqKuMHhpfE2Tyq77ijz60vT/BeraRuAtAgriF3sv3lxFinhrhys3D7j2wk3SScHbf/MP/Ie//TvEW//E1rUtOlnM3uEOb775dQ4O97h77xY//PF3efutWwyHkjS9oJMnOLxHR21qdDBUfve9d1lstfxzV+seLuv8RHcIksQjcMaYVR+eY43YNNe+KidbgdRqjb+EsSC1QkVRoK/6Xsa1WbSnqiolsRi/AUrB8fFjHt25zcnxU/YPjtjdPaC3tUve6pJmbW+rIBRRnCFURN7qsrW9x3w5YjIZcHpyzHg4ZnQ+4OG9e+zvHnD03FW2D7ZptVpYY1G72xwcHPD06SmPHz2hLrx1gJQSLWNqUwdl0RhBU+n1SbgNIh9xqomUJE0z8nGH87NTHj15ymg8Znd3m73dHVqdHmmW0+n2SVtnTMZTJpMZRVH4PouyJo5TDg4OmExmzMYT3vn5z5hPLnjzzTfIc+8D19AhNxM38JRIFSg+1tqQVPn+UOvLosFPBG9rEQJwZ6z/AhDSj38VgVQUZe2pA9aghOd7y6bfzfhmeqUkUhi00ORJyu7WNkIpvvm1r/Pz9z+gWJT89V/99yRpQm1q0ijBGp9I2cj3UM6WJXVonLau6Y8FrMXUFdgEGZQkhfD+bab01UpnLFopv54IiXGOKCQnKoqoqrmnoRhf7SvrCkRCXVbc+eADdp57heVsjKiXKxSuUZFN09SPyygGobl7/wH7B0dExpJGCa1Om2996/ewdcXTxw85PDrg1se3iaM+1184QCsf3PV7Xd746ms8fffnnJweM497WK3ptrscdXYZL+Y8HQ8xziGNRUSSoq4o6wXGWkw1JY4cVTnFLlJG8xKERAvod7vBnEEhJJi6pFxOEdYE/0wvP74sS+omyZd+LJwOLljaOdZJhMhRLoK6oNXWdHd2WRrh1XMD5NgsuZ+gT20cvnoiV8Fe4wvmrBf+8Oudr1yvFX79Jrm5Fv7aRwgsrHMYAfP5gmK5RFhNt5MjlOLp0zOOz8/Y7/S4urNNr5/T9GWDw9mQPAoHVJhqgXUpUdK6BB7+kgvZ+LNYIbDOeXkIrSU7Owl/8Gcv8Lt/dJ3lsubf/ZtTXn3xKzx6+JDnnr9KZ/cmb7074s6thL3tl8izFpgAljUN/p96Gb6hfvPzm+t2ziKko64LD9ZYQVlVHF45Im1lNNU6AKkDmhqea5P8Xgr8XFNp2wzSNpO15u8NsARRYIsRo+FThsNz+oev0e1dpTH8XbUy4Ne3NE3odNpUScT+4RW+8uob6O4ehZQYU2BNSbEsSJxEyVDVEBsPpvmjBVsbhJAr31cV9kEpZVCE27jkX/Juo8irxQklsEFGrPFtK8sFy+WCTnvjnM2jeubcYvMaA73bSeH3G+fBZCU0xIlfw/HxwyZJWXxiHHz6DTw7Zi/rD3z276znxm/u2EwQjPHx2Gg44v5Hx2h7yEsvf5nnX3mdbLeLoQKmqGoKF1OqDy8o3y7IlxmyDQvZZZn16Nx4k9bLfwD9lxG969g4p9aSpbGYBeSxRivhNQ+c9X3DEIZwBHhg2YkmMF8DMl5dVGHdGkStAIRBywpBhYgseSch1xk7+9tYYylnM6anxzw+fsK7t28zLmZcv/kcr7/xGlf3X8O98graCWbTOdPF0vd0G0lR1hRFCXi2Ur/fY3d/hzRNiFPtQcCN99bUAqVobC2a6//8wf5soray7rk032EzcVt10whwIqW0EQ5JMV9iFgu6WUY1W1AsZnT3t7je1hx96TlAExmFMAJhYW5qHtYLTocDzs8GXLz3IRc/+jG3f/ELTucn2KSgtAW2rphcVB5YrwWilgjbQjjlYxNdEcd61aNJyDOamMg5R6yTsEZ/MsdYK2yHvvqqxlR1sCOznkFnKqq69ID45xy/FUmbD7qDmbZYr3qf1olgrUchZJDudC70kwSK1grpcW4lftAkbp+1mUvhVfI85iHwfTIOIkmkY2IdY6rKJ5DOEglIBaAVRZaziCcUVSOjDihPfdBaU2sL1iBxaCmJpCTXmm6esbXdJ+lokm6CjSKM82bBdqNK4IIwhHFga7+wW+F7VnzlxiPWTa+SW21cXp2xlpLJvEQOxgi7oN3LefHLryDSDg+HZ5yf3efw2hFZO+f+ySlpr8vNV19m51qKFS3G5xWL5Qlnp/eZLsb0Wtu4asjP338fmcQsEsWFtJw8eIyrvEKSD7JCYOXWdFQXaChRFHlKVkOVkw2SsxZSsYGiJwKFbVX1wfPiZajMSZp3D4RqljEOJwleNI7pZMLHs1s8fvSYvb1DDo+us7WzR39rl26nS5zkaOWbfb3PWEy7vUPe6tDp9BicnTMeDDk+e8JwdMb5+IT98yMOjo7o9frEUYTDKwCdnjxlMVvw8osvEQuoXI2IvDCMDobxznmJeSUVMtKUZYmUChknxLGnfyZZynjkxUruPXjEdLbg6tER7U6L7tYOcZ4zm84ZDAaMhiOm0zllUVGVJTKRHB7skcTX0ApP5ZzPL1Wcm2S3SZQbA/qmZ7RBSBuJdk+T9Mjpikpp18pNIsDcASfD92BGCBUFFLaZ1b5RXuBW3iRKRWgd0Wq1qbVC6AgnFUJppI54591f8J3/9z/yv/4v/xsCqOoapSJwktpaahsWRKGCDG+gZ9rgveLsClRuEkshFErJYAkCpq78HA/32tynJGw6LqD2wn/55M7w+NEjHg+WXD3ao9fS2MqtAJYm0ZUqQoiI2bxkNJ5x/8Ejrl65iuopEqXZ2dnhtd/5CtPRiJPTE66lObfv3CHvaQ4P+hDEIjrtNts7fabzMfePLyjzlNHFGW0Dra0O2JrJcMxiMsPIxDdJV0uM8BRRrSTD4ZDzpxdMSr8+9DotokiRZ60gGCMxxZIP33uPxXzKzZsvsnd4zfeWSg+kWOeCCI8Ogj+Gsi59BdRZlK1IrU/elV6Pm2ZVXy/8fDKQEyAaX8Cwngb18NW62IjcgK+cfoJ65f8xq7O7zUH6qx9SCkxtGY7GDC4GXD+4ipYSqQU3ru9z9cousROkWnm5bGEC0OgbzYV1OFNTFSNuffgOu0evsHuQrcbYZ4uxrK/dNQ9lA5F1wfKmEQ3SKkJlMbfuvM/946ck+T5f+9qbvP7VFyiN5OcfDjH1nG9+/U3SrEMjf+0T4uCltrqMEGQgAiX0k9cnhMDZGow3sZeRorfVX79n59Z92eGUjTiHl9sVFKVhMZ/T7nSD0fN6MHiBsfW1NJVHh8OUSxbTE85P7vDk8ccsyjkv5Fdo9a4hxbovtyHRxpGi3cpJpjGFM6Aikv4WVnq1t0hLnLFEQqHDPFMeiWKdtG4g5U5gjK/Y+J6foEgo1nTH1U1/7uErMF6Nzq1ZCM6ghaCdpb6H6FcYs5vhsxMCpzVCJzgUkggpg5n5Kgh3l97spf+vTvbrgR6fSNg+52e/iaOZYypQ/Lu9Hl9643Xcqw6ZKFQ7osBLhaVOYUcLzMdPsB8f01+AinvMRYrZvk7vK39E9OIb0LuClTmIxNMfDUSlxIkCI50fh37T5NIYFmJjbGieFUiyrrHqWZOuK2dxwqKEQagKoQSidhT1kkVhmQ3GTM+PKUdnzMuSl166QdbrkHVzkliTpzEoyXIyw5iKTien2+0TJ+2wX4fkXXo7Jhmqg34Y+3WoWQckn/YOv/g7XSUyzV3aMOdpEjW5ijuK0rJYFshIIYRjNBhhlwVtvUfWycnaiZ+mUqEx2HqMWc4pxjPOTy94+6M7/PjufT46fsqTx0+onj4lvRggiooyjnGuRlKDtOBqZLAZ0pFGRTJ87/c6P8f9GiGkXK1RK8ZAiFMdNvgQBzDbEYB7H6cY4/1iTR1Wd+nHqWkKVb9knvxWJG1weTCs0KTw1fSrefNeT8uRDbXAuVCxESuAbJ3Zr8v1m1TIprLQfK6SGunAWD95ZXghxvi+FalivKyxxdYFQngkPZZeDj+KIgoWHvkVwqs06RppAsgoG/l6iJVCOTBVRRRFdHe3SPoZlYRkXqJ07L3XbBEASBE40b6/zzPLfHJS1T5rF+BLtyGQJGzsQkgwglTntKMWZ6cn5O2UN//k99l/4SvcffyYn/6n/8iNXovo2g0+eHDM8sOPGI7vMpuOeHp6ztXDm3zl8FV+9rP/zE9/+jYPH16wlXU47Y1Iu20qrTmfTZg8OkVZcEpC5L30lNRBPKJ5rw6lvCqflr6y0agZqpB4evNG27D8g3qkV2l0wqsa+kpK+AposbUOW3uj7kZuvvFYioI/1WI+5/7dOzx5/ITe1g5Xjq5xeHREf2uHdqtNlra8caqO/WcZSaebkWd9trbGnJ8+ZTIacnZ+xtnggrv37rK7t8fh4RFb2ztcnD/l3u0POT8542i7x9W9LWZ1yWJeEscJ7X6fSEfUtaUU3ptOKW+KLIA6DHwrJN3+Ft1en/lsyunJCSdnQ2azgn6vR3+7R6eXs7Wbk7XbdLo9Ls4uWMwWVFVNnubs7uzQbXdI8xih/BwxxqstrpU5ZQiYoKqqlcJXMy+E8JYZVfP7ai3oE9hrgcXhJ6p1jtpYamNXksGmsRxgYy6HhcmGnkvCOZROMMZ5A9TUATIoRWlu377DWz/8IX/1l/+KuvZ9nUJ5up7SFUJrhNI4Z/ziLwhUTE8bXHtAhi/pcUMpguFlYxDu+dX+eoLSa0PfDPHVKsmta4OUiuF4TK+TYRKFotmXRTDLdCAUVSWZzgre+8UthIioSkPyckLc7aIlXLt6xGtfeZXv/ej7PD09wZqE23fu0ckiWu0eIEjznG6vw2j0iJPBgGLWIt7aYzwa8/jxfZaxwKLIdIIWitl8zsXFKbiKWEVYA0VpODsbM8dvjJGKcGik8qqZWMt8POL4wQOm0wl723v0tkusSlYbijEWTO3XJ6mpa8NkOsPZGuVieu00JPkiVNtZ5yGfsx+tVgrr/1RbH7QIqRrI1cNqDm/lItSqiGOtQ8mG9r75gc1XoNr9SkGGWCWMsZZcPdzn6v4+URi7xkCiFZFSaEA5T3HDGVa0nmYPkyAjQSvX5Hn6CfS1mWvPfvynPRcbRFyGoxFKSHq9Hq6GSEpKKygWlq29Pq/97pd4+eWb5C1NbB1/+S9v8OT4nNe/fBUlK1ZVNCeCH+YGohJa/o11mPX0XM/z87C2XAAAIABJREFUsJ+OBxcsZgN2Dw+RMghMKAHWehXmJuxb5YPhGQVRrvHwnMlkRt5qr29yI1H71LKMhUe37/GjH/w9D++/TxxXHF7dZ74sME4E+4XLw0wKQZ7G5GnEshZkrR6IyM9/QegxtuQ6RtUCjSKyoBqvlGf6EJ1rgkxPKVZKhX0qGOn+knJSA6qun4g3+I6SyO/rdYWU0Gm3PN3yMx/GM+e1hHVMUKOwMqHV2aPf36LV6oNs/KyaEDyIyzSR8irY2vzELyDg85vPx36lYxMc8XQ2SdzJcF2wlNRihkOjXIKoc1huszyX6BqErim6RyQv/iHRjd+Dg9+BuEUlBUIYpKu8vQMxiYZIx9hAgxZu/ZRZQU/eCsI/rAgRRDSaSrNqPOWaMSYEWkpqV2PqElPPmAwGzErHuIqorCZTEbv7e+x86QZCR0yWJY+On/Dw/gNOTo853N3jpevPs93dYu+gzaagnZTKU0IF68QjXK7D4r0mBatdLSScYvWLG8/5C76XZtj57Ti42Bu/NhTLmpOzAUnSIm/lvhhgS/JWRvvqnm/LsD4+ny1LxqMFZ9M5J8MBH/3kewwe3mZy/IjZ4IynF2ecL5ZMjKMSCmVhUi/QCFyBVxeXCggq5VoSxxFpEpNEEUp6X7xFWXvVeOf7ZIWQq52lri117cVIaltT1eUKGG8qhqv2LBfskayX+1crw22HVhGRjJGR4vjJk898dr81Sdvlsn8Y8E20JzZ6LLiMLMFataihSDaVl2f5zFLKVQ9Yk8BJ4UU7sHivGghBJCihsMYiLJ4LjkVLkFbibI2ra7RSnkZmHUVRkNa19whTEii9XwQWRfgs6506YqVJ8owkz0DpsKH5jF7ryDehGt/g3ASaUgucWSOwxhpMSFSaEqtbLbdgi5JyOefRrXtE0nLlYJ9E1Oikw6ktqfKMK1ev0SkLxiaiVgl3Hj/i3Xd/xC9+9hF1tSRPf0g330WIGYPBjOVozoW5gCSCNPL+KxbqqiKRihoQWiAK58U2N7gdm4mz1hobEogGCWvUdaSSyOBTJ4T0an7hiwaJaJ6J9EIlMvQmKSVxmgbwhaBS6PDNosZY6nLJ+clTRudnPHpwh4PDI/b29+lv9+j1tknSDknWRsqEOgiMtLsJWdZmNhsyHg0ZDgeMhl6E5MmTJ2z1t1CAwhAJR4TlaKdPHTmeXJwyHo+Yjkq2t3fp9zoUpWAxX+Kcr8BWtcE6jQgqorX1qodJ3uHwKKHbm3N+dsbDRycMxhMOruyys7PN1s4u29u7tLIOp8cnuNqQZxmtPEertdVoQ0eT0gu+1PWaN73qcROsLQBESLJDJa6Zo+uX+czkXU1M/7waqnMTVTcKUMIFG466xgoRegA9kKKjmNp6f6XcefqWKWqu7OzRyzvs7ex4awXhPcOM9SbeQvmeFOPWwQbCd6msEWzfO2ewq/kCoXIb1g3dCKOEjUkFlVKfsPlzWROot8bX5pUQpHFCrHS4Hh88KSWpjFf38sIbmu3tA7K8xy8++AitIl64+YIHKawhT1O+9MrLzKo53//JO2jVQaWCB3cSXnwxg1bOfLHg0ZMH/PE3v0b7fMp/+Me36Hf7PL9/hSVzxsqyXBrEdk2CTxoX8xmRsNhaslw4bB3RyrdQQlPWFcYoRqM55dJ7+GSRJEJyuLvHst0hi1NiFbG0BKN2Vgl3Vdfey8cKqjpEGkFAqVmL/Nz2OdfnbeyXhlZYM6zx/XDzsuJiMCSJI1564TqREBRlye3bd9k/PKLdbqM3VOl8MrQpmd6c/Qso14UEoNlHhGjEHjz4JvHUSbnyZ3NAHRJbie+9AOEsypXs72+RtltsUjh/tYfhr9vHS/7ftFs5cgWVOEIuRJrEGEriVkWnq8FZtKt4bi9iN9snlRLnvAKqC7KuHruwrPZc1+wteOR3Nen9zrKYzaiWc4Sz7Ozu+n4wKXzLiWuqTwE1F6FXPNyQbJ6RdbTyhF5/K9Ds18HiOnELCElTmXACZwRPHp3xi59/zPDimOeu73JxMufhg1Ou3nREWq4qBIL1eZT0yXcryZBRghPKJ3oxJNLDf7HSRAhacYIKAOsn34sPuIRYM1uEDG0ZwisgX35vnzKsCEhX8zpFEFOoKsCSJDGtPKPGryFN2rQKcD9jyAjlViNdpTkHV27QynOyLCdptTyoGRLRFXThSwDe0iSOPQVi9Q5+8yqP/62OTdC+2dssDkGJdoV/HjZFmAjiA9TWC1ycv8vOczdJX/8XyCvfwiVHINrgPCZNk/QKi5MW19DynQprRvO+VqE9UAHVxuhUbKqyNvQ5gZ+fZel7rSazAZlY0o6sFyLJO3Q6+xiZkkhBMRrw8P59Tk4u+NkHH/Ojt9/ibDJge2eLP/z93+fa3lX0TkaS5iDWBu9CbABHG9DKpXEWCiLr+fTFs/PPYhRY57CVQwfQulgULJcVadYmz1pEcUScCLKkjZQO4ypmixHlck6xWPD08Qk//vkHvHPnIR89OWUwW1BfTHCTEW55TmRmKCqsMz6xkpIqinBxBEqRW0WsY6I4Q0W+hUEpTaQ1cRyYODiqusROp35dNMbHmMig4m5YLkuKovSxkzCrYsGaItnYjfmh4NXtfT9clESrGEsqhY5jlNZ89O4vPvN5/lclbUKIu8AETzavnXPfEEJsA/8euAHcBf5n59zgl5+M1Q02NMkVQhsqbasXb1fpK0IE1BVPT5B4qK0p9W5W1TaRzeZc1nrKYxOqCUJwEs4phUAqP80M3l2eUCZVIWGIo4j/n703eZIsu878fnd4g48x51yVmTWgqlBAASBBNqRuqUm1mYaFxI1EM620kFn/C+q1Vv0vqHfaaCGTqdUtazMZ1RJBGkgjCQIoVKGGrKzMyiky5tHD3d9w7z1a3PvcI6oKBNkwtdpIOVCWGZHhL56/O53zne98nwImkwlYw1CvpKndcdQjndOYmFzkNmPYH9AfjaAsCDjA4EOIlScSTdCSyrYpIEp9d11SKennffef96C6Kogi04YyGDIHk/0jXrn9KlvXNjl3ij/5+fu0jee9a5usTStkvMU1YyhGirfl73Pnxjs8e/Y5Ozsv2T85RGqDc/Ggcg4aHwiNQ1eOrI4N7SbTVKGNht8pSP9l7NwvN2h2tMhuDuhEXxFiVUR3VDYVzVB9KjV3VQ2TJNclLdDOV8+3Aa1sosolw0Wt0Urw4jk52uf87IQXzx+xeX2Nreu3WF29yer6Tfr9NYqih7VF9OIIhqJcYTMvo4fa+Rl7+/uc7+7imgqjNE09Z3NjhWvrq+BblBU2NkeUpebs9JzTs32EhuFwTG5LLi5miDhC08QKFVG5TQg4EZQ22KJgZDL6/SHz6Zzj00O2d3Y4ODpia32dV27fZmNjneBbmtmczFgyo1B4gksUxrQWQoqOFKT+tOXXneqjTo223nt86uXq6D9CojIn+kIcgEjTjWhhxOU664nLVAetIn3XuRQkpJNt0eeiLUFlKANl2ceiUS4wKEouZhVrK6v44MnLItIqdTwoEUlzxhJkjpfkUxaWIifdkUSiIEiIYgJdAueTZK/q9haiAqTROn4vrbWmaenR4l3ktltrUS5W3iN1u6sa2yi97Nq0Z1iG403+/n/wu/zv//J/4/NHj/nBb3+fk5MT+pklt5bReMR7773H8WTO55+9RBvhKYHc9rn21goSAtPZFNGBb777Jp9uH/D222/x3rUbmB6c2cD+wQn13gTlhUrNuX7tGsM8Y/+VFlPeYLB2g2GxylQUR0f7hHaO94rWQRBHqDwyr3n9tTcB4aKquZjOaLHMm4bpdBopuN7TuhbvfKTVumgY6xfiFomZQNSJUepy1SVt+FdeS7JQ6EAza2Iv0vkFz7e3scbw2t07eKW4mE754IMP+c28T1H0k23BVWVJodvbwyUUp/v1qYy0SBDSPV0KZES65J9YgZUYaC/i2ksiagohJKrhItlDQAeCOGaTM/KRS89iMRuXe+ClW7taV1n+bMSgArYoFiACSiMesjxQzWfMJzW+mdOGU3LTwyoFbs64PyTRSGLwoBSk6nL8HfrK74sOHjExJSHxbVWzv7vL6njEyvo60KaKtYBWC1QZuDLWXVgY9xePtpbeaABkX1XGlzg2KnEt4gPvrqQQZymyMYP+FuPxLebzGefnTQQAJRVkuXTTEoVygm+wWY+qrnn64S+YB8drb7/LqBcRfkM0XF4v85jDall8gC6fRKV4oTufwtJ6qKOXLd+xrLFcHslur1NEOnoHKGkNnTBMUeTo0AFtvzpQ7hLeIFHFcXPrxmJKK0jtCpHR4iX2y4uPexqho44n5PrfMDD/d/XVgfXdSymFlghOaPpED7QujxZkYPFb18ny36F4dQW19jrYa2AMDXE8tUhK3LL0uOKzM12V7SvPUBN72fK0PmSxiYjyiDgktHE/bVu8dzRVTV01CAW9fMjacIAyDaYcMm+EBw8f89mzXZ4/e8beFw84fvaI6aSmDpYGz9vf/ja//1//V3zr7bcYFAVWZ8uEAS6Nd3d/S7A/TtAYdy5FoRJwewUEW7yDL8+ZK0YpXSxPnHfexWr/rA5MpxeMhgP6vQyUYTgsMDYjyzNCCBwenXF4NuNkPuPZ80c8fPA++ztfcH64y9nRMadnF8y8Yi6GJkDe1lhpURpqA8oXGMkoyz5DY6IncqnQ1jPoZWRZibYlSmVoUyzXvPYEaRHfEvBYm/pd0VE0SGLSLeJwbUtd1zEmyVT0wlM6xQ9msUdHi6IYP0GG0mDyJL6oYsKW5SX/Nsy1f1dEDi99/U+A/0tE/qlS6p+kr/+7v+oCqQMkmVurhWx3nDQRxQtBYrCIAdFdRTkiuIk6Exff8u9iFbF61QmbxEzZpmZCL8lRRy03VK3UYoJCvA0lauHDEFJEEc2hFXmRY3sF5Jbp2QQ1m1H0hthMY5WKquEebICgDLXWWFswsL2oxKairH0QRW40JjiM9QTjcSEiubnXSTuo2/AV1hhyG4hAZ2wtjkGCwStFKyDaEDLDnW/c5M5r1ygKwVRCnuX0PZz4GrOyRlEabo48J0HT6JvcrAve+/73aPTv8JcPHvCv//Uf4U4q2rbi4uKEdjIhtxlONFYbciM0dYVGyHCxz8zmuEXyuxxlScm2tiY2XHZSoF2iqi2t95HOIRBSEIyKyJZOwZ8ToRXBK8GpQKsEn5IHcS2qrTESyPKcSseKKSFgVKQFdQeqAKFpadqK6cU5x/unrKwecP3mCRtbN1gZr5EXOVlmMFahtBCUQeV9+kPFhleccIDJchSKufccHOzwB3/4Q97/6GNkWOJ7+SJphxj8D4YDyqIfN3FRsV/RuXiPBoQQ6V5pDlmt0SHS+UYDzWTuOT3e5fx4j7Y659Xbr2CUw4cKJ4qWGhQUGoo2oJL/oQDBLRFh1+qEgQiODGV7eN9ilSWIR/kQpaSJ67IlMJvOGBV9Cq3xBKzWoAXnGpTW+CAgLfgWLYGgNR6FE0nMtxTUKJNM4UFbi3eevMzwLTHYVtAjGvWu9QcMR0MUHnF1rGhZg1YWozw6eJTL8AFEQ+3b5LHo8aGNXjbW4EObArlIZ1BJ/VAjGCVYFQhKE2zBNM/QhcXgUD4qJGo0WabQtkEFyKzCeFK1tIcyDZkVyn5Ofdag2xZDQ/ANrWT01l7h1qtvUx09R7dzzo7OmeUlm5s3MWQMyiG//d57uPOKl3sv2RjkPH76BeM7t1krS1bWrvHDP/8ZxfgJG2XON+7dZG08hkwxKC23bt6indT4xvFi7wXXNq9jleL27Zp8ZUo1WGNS3qOqDX3nYPKMzVFO0dtCmdgDZ1c2eeONN8jznM8efMpkOsEkMROcj/uxBwmK0HpUq+iZAqMzcptBcIjPcU5RO09DjdewhHC+SlPsQK6IL8T9XQOl1dxcH/Hb3/4m0+kM7T1ZbjHa8O1vvcutm1v0egmxluXFBI13gbaq0cFHv8ge4HOUcSA1kKMkUdBipB53UbVMYxbs7kWQxSIzyXRiPqhUTQo5OiHtomtqqck0hNYzOz5l5WYKi5VK6qosvOhUOueEuL81qKRgKMkqISoaBqJQTkMUCtIBdBC0KE5mnvOjC3YePeOVu69hVzRWBVRWLJ9z2o9joL4M1r48HkFCUn6NH9e7wHQ2Y2V1jZW1VS5XwXyQRTIbz9J4/iY9GcTFB6qURVESa+mRLiYmR1CokIBRAaMDqqORpYRNFIiJ4iv9vGTeG6A3rtOvHf3hClala6QqqKjY4iBF9FUMPsqYI4anz5/jlfDmW9/CkzH3Aa0ca1nGqEhVgZDUj1XcUKKeRMAZkMxgxJOH1B+cWQRLoU0KfyNLQKdkPj5as+iaR0saa8hNTk8V5LkmUzm+9TS+xWQlRheg4j1frbIpLtM2O8pl15u4UBwMId6/UvHcpEmWQekMVBqdRUVNlUDqv0X5GsCVClv6zqVF3SVTAdGLjJ/85i3KO1tQWkT1QGWLuQ2denk3Bp1cWqRHg0KURbqkXQJKPEpFeqp0BQaJ7QNn51OevXiG947MKDbXxoxGQ0yoqSfn9Ne28D3Nw90dnn/+gN0XL9g/POXjz59wMplR1zXiKpRvaFsgA68Dq5tv8sr9b1EMyvhRJc4FxWUQZAGTXf1SpWejEtNJkt+qAolSeTGZQ6d/N/gQ13pAET99fB6iLLUoqnmMRZhWuMkRxXBErzeAQR8pMmqrCWTU02Oqg0Pm0wueP9/lL37yCx48fsnB2YTzsyNm8zPadkZbz6ED4kVhdEaJwqqYYGV5js0Liqwk0wWZzqKNiVUUpUEbjzYeER0fGhmB2HakdNz0vEu2VMFhpCWjR9CeNmtwBPKgyRsovVA5RZV5dL/E2DLGbSZbFCIiaJWec9Jy6NZ0BM89PkSD7l/V+/n/Bj3y94DfSX//H4Ef8iuSNqVAlCyUyeL60ZfyeUE6pouH4AUrHStbFhUC1UlDB2IvVdr3leq2UjAmZrodJczY+FBDooVZZRZ5Rtdg3NE7jFIom+GlRdro15IVlnJQYgcF7nzCbDanX87p93sgsZwtLqCDxPKsMSiTU6h8sZBDWvZllpEpjxKHp01qlTFYV4oY+KSKiVeRDpLbWM8yyfdFrCK0sRDfSENTzXn/s/c5cXfYXBlTtMK1zev8R99/j4N2hq5awvkE4w/piWaqxsxQNO2M3qDke+9+G+ctw6IgVxlffP6Qh59+wPHRGfOZJ3hoqorWxNK/1elAsBmQlCzVJQQ2jaju5L4vTVClOtRBL/rSun6W+KMq9SfFpD5KMyucEhyOJrioHBladAgUWpEXBXlRYoyNMuWJC+1DpK12KJCKzC7Oj06ZnEw42NulPxhy59VXWVldY219jeFwGM2zrcUHjQ+aQTnEjxryXs6snjP3jqOLC5r6BScnE7yxCa0WtA6pt6xFG5XUGWNARBBKpfBNhRcXqbAS+xusismbRacYQkFho9S/Dzz5sGDQi+p1rWujUIuNc0vbDKNiCd4kKpKOkoRJ4jg+b2MtWepbqto5NrOIdxTGUJqMzY1r9McrTKqGR8+e0dcZb9y9x+bWKjaTGIjoqPQqeBCPxmFUoAmB2nta51M12YNy5Mam568IWiFGkNbRyb3rIqMwA0yRx7WWZ1itUSEaVDeuJYSMYT/n5ERhJK4FpaJUf3Wpgo5EE+mgDXkWRWHaTrbe+4jGBxcTjmBptWVuTQrQUg8XJAXDgMKD97TNHAkFWinaUCIyoSw0/WGfC9eiXaC0ASWOVue0us/m9bucN+cMrGEym/J0/wjbW2PUH5Dbgju3bvHv/+B7/PCPJ7zc2ebO3YxPPvgp65ub/Kf/6D/jT/70z3ix/ZzbmxsUqkZywRYZIUQ13WxccHpaMRoN6OdDxqvrKDNmeLTDp2fC9umM86ZH5gOFO8OGdbTS2CwmuS5fx26+wmDQozg85bR2aGko8wzp95j0erhaURZ9xoOaPC/o91pC8BS5pcgz+v0BeT5A22pJGVnQ8C7t/csdfgk8oZYHXfqp6+ur+PGQzGiURJnrV1+5Tb9fpJ8I6SoGv8i6LMaUSDulbs7RNmBMhgSP0g1RDKC7icCigyclK0qW9SdJNDZJUtxRnLADpRLoGAyEqFwc8hbRGsFhaBkaQbU1Ku8cEGNl0IfEFAlCaD0XZ+c8e77HXOc0csrqas7ZyZzD/YC2fWo1ZS5nzILQtCPm5y1Fovw++vwRmYLdL55ydNqyvmJBOboUVLpEYjEEX64ILNDLZQ9r+ojW6rj/abPoB+ueVae4uiDzpRhWI4Qm0M5arLXY0iCqTMFri8LjU2XN4lKCoQlykfpel/1uouK+N14ZsDLqMzE1rI7puYyVtQ2MUtiQ6H0kOpwG0ZEar3VGWfSwWYHNc1wzR4vCSwQ5wZG1DtsTMAUxIE99SCgICsHTEnAqoIOjQINWtEqRKYvVi04xlDKRDrpI8pe0NJGYsQWETFkyon1NLyspix55MUg9pjmKxPX/8jhd/mt3qHZIgIn976KjiIWEeCYGCYBB2RySgFcEQ7963b8NuZu6Eluk5D99LUoW1u2kREanZK4Yjxbv62ydlboaLF+NqztacQSbRCl8EvuxJNReIpsq1f3R4plMLjg4OsbmfVYGYwZlhg6Oo91D/vSP/m9+/Gc/4kIrwuqY+WzO5OiEZjJDmhRn0X0mwalE11Y1ToHOSkxWRMAChSiPwoFYlLLpfR0dVlKcfPkzJXEvCahkwC0qCryhDIGYAIYQY5PY1hE/nBJwdYvCIaXhZFZzfDZjazjg2kgjRY6SmtYJp+cVTx5PeL6/y/HOF+x+9pccP/2U8+MDzi8aTqce7w3OC613tCHgUnXb2owsK+jnOb28JLMZNjORVpll0ZM1z9FK4UPc35Ui0ZoNEirwktaFS/3n0TIjgroJTBNBQgt+hM4yyAKiPbqBwlqUKrgQQzkuGa4NycnQOuoz6ISgRMZf0lvwPilVL5M272P/u3iH+wr94Orr103aBPgDFTkW/4OI/DPguoh0XXS7wPWve6NS6h8D/xhiMKZimr7sr7lyeC+59cYuVRIj/cYvGnC1il1pkqoyMYdb9lFdufEv82w7vvNl/nP6ucWP6CiMEBO8+L08zxgMh4zHYy5OL6imFZPJORBQJlLLJAQk6GQOHheZ9yHKBweL6NgwnZksSlr7OIH0JTVN0SrKjLK8T23MQo5ddXTQxXYbMJnn/PyETz7Y5id//gHvvvUmr9xZZ/98n+tvvMKrmxt8+JMHOJWxsTpmZe6YN54dWp7NGsr9XW6u3uTe5jWy61tcH6/xrddf4+NXNnn+fAebD/nk04d8+tkDtLa0bROrbBJR7g68u/yMv/4VP1uSr0uspQ4R7gQhdKxuKp1YdeqrZ1n3pXS9WwW9ssT1G1wTqy1R7lpiH16IgidIVEl00lX1iDL6bcvFbEa/32dza4ubt26xubGBKXpUrcNVFb3MktsorKJEcHWDeMf61grfeuNt1osezcUF3rcoHQNLYzVeXDpBYjDVNa2ihDa0BBxGp0b5lGx2tA7nA5Vy+IEj5YM41yJBcAL4gHeO1ntmumCqQbQQfEXTtEltNc7HtnWRcmUNZfBkoSXoSB1t6xpX12RYbl6/RTkcQ56ze3SMm1UcvtzhjW+8xltv36fs5TQuiut0qq3dMHYmk0p31ICYKHnvo8l1ok0t11mXaAnaJpndNKbBL5MymxlmTYMh0Cst1fwMFRq0SOqJ1IRW4ZoQhSpksffQIaSdgmnrWpo28tJDMMv76O6dpNjqfHqPR7xLKFmiNau4Kds8Iy9iAhxSYC+JFlEWPfqDIVVe0PrAydkFXzzbhnyFe7fucOP6Gp6G9du3eOe73+VHP/oTDo+O8D6QGcO9u/e4/Xu/x9HBIRdnp/SLHjp9tiAKLUIIDlfPWRsPMSpg+yW3t24wnmwyfXrMg/P5IskJQXASYmUz+eqhFaJjD6nJbeyX8UKWW1xukHSweXxSvPVcTGf44BB6ZJnFSaRGZpnF2svUuy9vCpfW7SIClSvfgUg/jfOgOwcs/eGAjunXRWMd4zEASkPeyyDvYX2LVjMkRBaAkCo8KjaEq045WBKQplItY7n9pzvr7l8W8Y4EF6kU1kX6UdDUlASlsXKG7jVk9gI3OaDojYlN76prF4mAnGs42nvJZ598xCcf/RxtWlbWetQb6/ziw4fUdc7W9VfYOz5i2kxpRDGvFVFUA7Ttk6EoS8Xjh5/ynckFihsRDRedhA2uPNqvHYfuNej1GfZrrLnEajHmEm39UsIaEqVf6WTDoEGDd57jw0OKPCfvjxcsrJgjxwjP6PQcVQ1S4VzN/Ow5w5Xr2GJwZYLEM0+RZYa8sBRlwc3RFndu30q2PyxobnHAhOAanPilx5K12CzD+OZKkbFpaqpqzniUL5PENOqXR7xj24TgFy0bIQWEWnUiOF3q1l3/0lzuPj8RFUhWkYgSbFbQOmiDZdRfRalseYO/6qW666S4JUS/txCimJpJok1KGVDLgP//C0XHfxdeC8yBq3HEYqQWZ0X3jkvj+bVXSoBTeoNW3byRKM4jsRQVlVdBKU+/n7G2NuDk9Jz3f/Zjvnj0OXs7L6gvztl9/gVtNWVuNafbgChyNPklS6yQhK6CIsWGAeU9VlusOcWoCkWRqsQRuFHxUgRZ6D3RIVfq8jNIi0mJgHMkbW/wNvrkCigjqTjpCKHi/GQOVY/1YR83r9BFwPZKxsMC27OUOnB4+pzz7c84Pz7mixfH/PjBcx68OOTg+Iz6/Bh9cQQXpxCEuTPMvUVTY21AJaG6MivJeyVFUZBlOWWWUxYxaYOlKNDluL+rZnXxvTapbUQrgheCb1PRICoGO+/SnhL71yTrEYwlyw1ianJrKEWhQsCEkl6Wg3GR+WNMqupFE3AJIQkWRa/a2Muv05kbYxCX4qEk1/RXvn7dpO0fiMi2Uuoa8H8qpT69/I+YWjUmAAAgAElEQVQiIimh+8orJXj/DKDX7wlCVFJRNjVJR7pKpMqoRf+SSZtvt1ZSt0xcZypufp2ClwRJ/FPpfueVJK7LcDs5WKVi9SZurjEp6oK6pUplTKasMbhUZu6XJVvrG7h5w2E4Yl7NaJqKLDc03pN7i2i7SGKW5Lx00JHUY9CxsV1iEud9QCuzqEp5wgKBjlz6pa0Bsgx3OhA4hEDTOoIPVJMpk7MLLm6M+eDnP8X9+Ee8ef8tXjw/ZG3zOu++eYeetYx6CtvPOW3njCYzirJiKx/w9OSC3WfbvHNtnXt3bnLv7qsMV7a4dvsWvY0hk+kpL7efcbS/T6iiyWBHhVwgn1xCvC73F8LCe60LGhWx4hqD5ojhKol8cqOWlNZO0Q86MRNL4+qY9GpNr99DWkebNbRVDCraVtN6h3KONvhF1c0HofVcWvSGtm2p53Mm5+fs7+6ysbHJ2uY18l4P7T2h38NaRU6WbAMEfKDILGsrY5RA21oEjY56GejcpOpOQOkod+3aFhFH2cvRydTPmphYKYmtjSqA1VksapH687qgIsJCeElGrcTES+mA93WcQ0JSJ0vmkCEmdyhFluWg+3hVUgcfN8U848P3P+D08JTf/M53ufXKXc7rhk8+/5xeXnDn5g1W1oaLa+dZfikO6mZiEshJ1fCOFmCUjoqVwWN1gQQIzi3EQLrNTJJojdEm+cjZ6M1oFG3tKe2Qk/YlP/mLH/Lo00d84413sKYmNwV16KODjeqw1iYxIx3V8pKaZrQEWCo9LZRMlVn8Tro9QyWkLqFnC8U4Dz4EtIobrg9RQTMQKditD6AtolqMTh53WmOyknkrPH72kkmt6RcDVjfGiBUaY3jz3Xc5nVzw85/+jMJmnB8fM19dY9gfslqU3Lz/Gtoaghdc0y4kx89OTpC2wZQFWgkmVUCHq6usXyi0eopQA5GGHLTB4dGiEQ1taGmSpHTrG7w4xDcIDpc4/tE0NqrfeSRVUhsyZzF1hVeQ1xUKA6rbO/864gZfn9R96fCAtIf7NMuMWu5+ybs9ATItVnsMHpgS/AWhWUNnm7G/U1VxT1/UiRKoQKL6XQrsL6eTHf0pBt8BUQ0tc6yqEDXE+biORab446dMD56SyV2KcWQhiAHxKoIpeEzekpcTvHvK+mCPjbzl1tYteoMGe6cgL1fwoUKOTxhfW6dpPTu7O1QCre1z0c45PptxvP0E/Iy6rqJ0PRqC+hofrr/6Vdd1BBUXz7ujh339mEXaeaJJBhf77RC0Fcphjs7jmusO6mZeIc2UYpyjFMzP9jg+eMSL4x1o9nn3vX/IsLh96Z4FJNKZy9IytD3W1ke8/cabrKxuXfqxpQ1J97WkTKnzEs2spU37AbBQpM6yzoeJBYDGlUt1wjpxv5MEJAmkPUlfSQS//klBtw6UCthMYXKFygwmL+gN1rh+Y8xgsI6xxddf5OtewSNaEVpH0zaJnqWwNkN3SkBXErW/68naX/f1y5K1r3ulHlMCXjpZOw8hAqoShNpDVc85O3zB/t4eP/v5L/jwo0/Y2dunrirm0wsGRY7yDUYJTdPGGCGdN8FH4ZjOZ9WRKvdBkeOgjWdMVT3CuX2QmzG+FI1SUaE0SMu8VUwuWuqmBgJ5kdHrFbGi2InCoYjm81US0eihlcE1PgHgLd4d8ezJT8myGRtbNwh+HbJNstxTVTWPPv2Uj54d8+Jgys72Li8ffsrR80e08wOqdsLEZTSqh2sV1HNKFynwoHE6J+sPKXPPsNT0Bn3yssTmOTbLYx8pOmkUJD2Ly8OmJAKJIssYhEjrDJLiI6UIEggCVglRSKYrDgRCaGhQNKaHLvtkWSDLCmxmGGaGXMZMvKU5UzRygcKhk6+01lEMSgCll/HPIh+5VJDxCG3wV70ef8nr10raRGQ7/bmvlPrnwG8De0qpmyKyo5S6Cez/da/XqXR9adddoFcRDE4IlbbJbiZN2hRwdQFkqu0ulCW76lQncX45ces2dZWoHp0RnvlStU2p2H+mlcLGKBMfIDMZo8EQt7UFojg5OqWaV7SuRVQ8JHKTY7JIzcpzG024bTR2NioFtkkC3WiNGL0wP5WUl3aIZ/e5An7RaBsWyaVe3q+UaFVirKYsPEUWqCRwcFGz+/AZ7YHjvJry6ZOHHL+8z8bGKnW/5GzaMs5y5vMaJ45CFGWtOJ5W7JzscWdcElRge+c52SDjG996i83rGzx8+IA//eEfs/PFC6zWiL80dosEbvk8Lw38ohohKkGGC/EAtUAnO/W2rkdcqWV3jARJb4nIsPepJ6qXo1xJazWNiop0xmpMC1oLOIHEXfahsyDQ8bDTASUKY2NVt60bqosZBwfHjFdWWV9dxQL9fhnnXRCsVmRWo0Wo51Ma1ZCtZgxGY4rCJqVtwROoG8fZ+YTpdAa5Zm2wwsrqWuzBJCZtzjn2d/cI3tHPB8waz8rKmH6/wCgSqOCi+IeCTkLaBYdIiD5ETYNI7JGLviGRC67TWAiS/Kd61E5xOp1hNOn39JhmU/r9Aaurq/QE8n4fJcLqeESea3xoo4iFi4h26JDFFNnETSsFwxIwxqKsjRXlkOg7aSwRWYAocgkp00lJraPjB6/QGIKbMSx7vPbqKzx++IDJ6Q7v3L8LjYo0RrE0TUSxgnP4dNgtENAEN3am4dHOoPPLUYtxRUiVwkThTftLt1VJQjwFhfOB1ntcshCRpB5V13NMPohjIkLrhdv3XmPt4Qt+8fFnvP7qfb7xzuuxZ8vkaBHefucdTo+OefnsBb0s5+mTJ9y98wpZlkWUVaKyrguxb6KZzZjOp+TWUjVz8p5FWRZUY6PAtxeIN0hoEGJPkmvneGJvYlA1IdRUTcu8mlBVUzQtEto4r9I8q9ua2rWIUdgipw2exgf6WiemVojoPh1T4qp8+uU9/ksbAl8Oq7o9+urZoNL/4zxLIXhkI6SQQyuQtsFXFxg9pQ0zRAoKq5IoT5sAhh6IXmgxLLBG1dVbLlUxYBkgJIAFcRg8hjOODvaYNzfZuKUweg6ZYuPaBn6sIL+IiZrkzBqJPZFWQTBQK9ykJpvPuTFwDJt9CHNujTS9YcYvPvqcd1+9wY0bN/joww8Zrmt2JnMOvTA9qdnZPmB+dkxwFbN5dfXR/nVz5vTkXdPEqvYlNHBhiivdteI4aaWiOB7ggmNyckJv0Kfs99jY2oiBbJdUK4VgaJuGMK/Iep5qPuXHf/zHPPnifaZqxmt3+gTXXL2hlIZrA0WRYZXGWsPqykqS2o99Qt06jb1jCp1FWfPO0gMEaw1ZZpPFSdz/BoM+w2Efpds4/tIl5kv6oIRA0zSLM/eyWrXRJqHp6We/Uqm58mHoKjPaKpSNQVtQhsFwncGoH+dDhxT8dTKMVBYOCcaweb6gt8YocJlQSgqY/s5W2RaVp1/1ugyDd++5uv/Ipb+rJO7Rsb4U4J1DXMtkMuHJ0xd89NkjXr58zs7TBxzs7zOrauo20LokcGMM86YhU9CmZIJUySWd2111N54t3fEUFj3o4gPHp485vdhmZeU6VkVjahegahom1TH7hxV7e2dMLs5p24ayzBmvDCiKjPF4xHDYoyxLctWymp+gQqCuS44OGz75+Am7ewesr/XZ2Cx4+PADxquat23G5Gyfn/7FDidHR+wcnPDh5y95sH3G4bmnmnvyEDCuAneGcxUtOdiAUZaegTJRHfN+D+kNkKJPqQM9A3lRYLM8VsdSxTuOTaQyhgQedeKDy4pkWHxvYQclASceIaBCBHO9j9oHXa4QcFGQxJSEYsxosMbGyPLO23d47d5drvXH9JshH//smH/1Rx9RzR5h7ASlprHIFJazY+Ef3c0WpS8llNGjzfsYj/yVysL8GkmbUmoAaBGZpL//x8B/D/xL4L8B/mn681/8ymulkHzRnNeJRcRYPj4AidurNl3f01J1TpnISV7WryKNogsUYYmSLRK19LW1Ngl8LKW/Q6qqqfClipDWi9KOEhPlnDUE5cmsZTweRwpU2WM2nVI3NQFFGTKKECkxXa9AVzpdJGUQKWLSPY/kOSSXeupspLmIdJMgpOpjDERZ0DtBicaQcfPabdbLgg0jrF3b5EVTM2lgdXyL126/weOdz3ix/4SPf9HSlp4X80Pqec7b1++zUQo31lYwbcNK/wbXv/UelRwwP9xldW2LW6vrhONTJqFm7fomN+bnDMaD2E+oiap7V8aZK+PxNZMqUeP0soJ0iSKazu0YjMXJh+hLVDyWybhOcu15niFFpJ1qIp/YWkVjY2VOuQbjFLrVCzqqQkUpa6WwOi6RSF/NMVrj2wYlQq/IMUpRVxVmahaTNbSO4BrEt5gcxuMRRZnHcpkGj8cqw1l1waeffsoXT59RFn1euf4q9+/eo2kbTs9OGI9GXL9xg6fPdrg4u+Dm5k0O9g+599o9hhvjuPn42M+4tblOXdVUTRMPa2Po9YbMPVRtAyKMBkOssbi6IvhArygoinzRu2lCw9gohs7x7OUO23u7eKWifIRRiI7jsba5jvIOixAk0oQkxMp29HmAziNNpPMwiTS6+WzGfDajPxqSWxOpHYlqVGSWKMAYN1vVBUZpnXbrEGJSbrTBaDAIt29s8YPvf4fPP3vMz37857z9+jcpM0NTNUiI+0NXScuMTp5e0PH5XeK2e+cS0ndpzoosE7cFbNIpx6kFioeKCm6S7A58EDwKk+coo2nmM/q9hjLTqSqnqFp44+1v8eTZAcfHp0jjF/fo2paN9Q1+43u/weTklL2DfZx4Wt+wtr5BbzQmL/tpr1MURc5kNsUjFKMe3ntqcbS+woYSEYvyDppzmqkjzE8J8zknJ8c0tk7zQMhWK05PtsisZm/vBdvbT1ChxSrBSvKWKQqUiclZ3bQ0zuMEWh8r1yZ4nGshy9K+Fjc6WYQ53b66/PviiX9dPPV1gLcCJYKWDuAJsbLnPM43ZLlFJUW3ULe8fPGI5/uPeO/7/yVF6hNeKhTamLRJEjySSD3uVFHjbSqW1LIU8Kd+PYNF+4bJzgMOnh5x/+3/hJwZvj2gnsx48fSAG99tsGaK0oYghv4ww4hCe2hOKx6//xnbHz9jQMPqzTHnB3uY/pTByg1c3ZChuXv3Gr49ZZhPuPP6G0x/8ZQHT054sX1C3Wp8UJydT7mYV7TE3m6tomFo54f0tXvvpbUFEbyI67hLThPwsiw1LmxBlO9WheJk/5jzszNeHa9GqmRmY3ClNVp5fFvRiiW3BlWWGJPh68AXD5/z/NlL+teGaCnw7dedDzFps5mGNs6dtnUo35AtqlKJaZNiA7SKNKREZ1ZIUmaOAmXde8qyJMtIlfe8+8hXp2AIVFWFd36xN4nEhoTOJ7W73iLB/fKrC2YgXsNY0BrRhqIcLERCjF7GBou7vBSLfOmiqU9TYXOLzTMWGXrHwpG4d19eSL/c3P1v+0stxiB+udzTu8ezjDnk0p7VJWZdMhBBiAVbiLhfzOcN59M5+3s7PHn0OTvPn/LixXMeffGcw7MJiGDDLLKpur4wY3EejM5ofIi9knXaklKfdtBxDpqk9xAWIFacb7Gv1OJFczY94vB0n5s3BKWFZu44P6vYO9xm7+gJ+wdzTo6nzOcz5vMpzrf0+zkrKyNGowFFL4FtbopMvqCXZ5TFBnWT8fDhCx49eoYxiuGg5Pnzz5jPzphP/xdmk5ZmPqWpK6q2xemcWmWILSLwHRp6GTGWylfoFUPKvE+vsIx7Gb3MY6xHCo3LMmqtKLHkIY5QbBeJgoJeklDKpfpp7E3v6gA6xiGLn0vAdmJWidZR8VEliw8f8F6l4z4guiXPFTfu3OH2W/+A7919k1c2+ty/s8nKeA0tGjXJYb7HTz8ObB9c4KygcEQhpWipIkrh/eW2rwjZoHQ6uzWZDzG29/7LBf6vvH6dStt14J+nyW2B/0lE/g+l1I+B/1kp9d8CT4Hf/9WXUpfWkKTY1y/QrLBAw9MHV8v30TVkqvTvyUx3qZJ1tVetbdtlRa67SkcxhNSDEjfUuAhYbNAAAb8IQiEGDbEyZmJfUFkSxkKWZzQpmMka0E1C4lWgms9jcO/9gg4ZAK0tRVEk1ZpY3YmKUOrK4oxUDENmo7F3ZrOFb0zs8Yg9f4SAtQUbN+/w7buvsHn7Br3dbSbKYI6n/Mb3vsf6zoiTnzSE85JZUVNPttn54ilnnz5na3PE7u4L7gxvsrV+j5ujb8Jqi6krsumE3Z09to9O0YMek8mE/b19LmbTZbXza86D7llfQT46FCKVFFVXbUtVF32JvASRGig+2hyQRapqN57OO1DJQF06Lz5NZgy6KGirmtSHG3+P6ZLxNqaCEgUr8jyaDXdWBEZFJNVmGVnRYzgY0CtK8jwnECJakvqyQnCRlqaANiN3Q9Rcoa1GW01hY0BfBo2f5OhZn3F/i3425vmzXXZ2X4IS5vMpr732OtW04vR4wvpwi6ODE8Yr63zw+WecT87JjEEk8L3vfIdnT59yenZMUZYoY3jtjfvsbO8yn0YK2K1bNxn0+rzcfoE1mrfe/Ab379+L1V6jCcoTrGDygtHGGvsHRzSA0wpdFrQSohiOOEznCSjJ/yeNlfcuypE7h3dtCnqTMqgxTC+mPHr8OTdv32Jjc5OiVy5MqI2Oqms+JQZLE2sXqZRaL8ZNS6q8h5zMzumXmq2NLfTrhu3nz/jFhz/l9tYbvPn6ffLCoExUqlQLBCj2cXV9pi7J03cFnWzRa7ecp9baZCHAAiGzWYab1ngJselcm0jhzKMpfBBi3x6QWY3BYaWlzLOoAugVonPWN69hTYb2gSyYCC4EQdBcu7bFb/29v8cP/+iHHJ6ecHR+wtknHyEYev0BZVkyGkVfG0FYXVuJ1htGs56vxeepIThPPTljfrrHfFJRHe/RHr/k5fZLpBzhm5rMGsq1TarzY0ajPs+efMHLF8/xTY1BWFsZEXwaWx+o5jVV66HzmNHQuPhMz05OIauZnsUehRC6Hohun+9CnUv7+Ze+c2nj+Or3JIIWyfyPSEXRKC8cvNxnNB4yHsaKg9Ul62u3eL73PEoUAioExIASu+j7EicoG1ChwifD+RACypoYYGNTLaNIn8HhpcVXFfXxc+qdHQb1lKz5jNlxjZMJF6fn/PinT7lu73GPG4wGPayuKY2QiyILLXnesDES7mzmNKdQzTJWV69Trva4qOEv/uwDbDFCE+iNwZQtjx8/5MnTE16+nOJag9ElKnMMhyOM1ThiRV+kxdh84R/01ccoi3Ny0csNse8OEpBG2ttCqmTFXjbQBBeYT6qE1q+yubkVhZCUENr47IJElP/48AVel9wYD7HJpDZ4RZGPyLNV+uUm/d51lO6zPECW920uGag1jeOTTx6wMr7Bvft3Y4Sb6GnxrTF5UwR6RYZNx6g2Ghv0Jf/JaMlT1y1leem5sMhP0+Wi16exNjI0iLYh2iyrbMsp3QX4X57HQCeqpg3oDJsP0FmGMgXG5IQQPZ2imLZcyS++7qVUAq6v/BJIqhF0IcTi3/4u5mm/5PXlRyuXkv5YRIjMi6iPEC1yujMjSAKZRTG5mDA52eHo5Jifvv8R73/4MTsvt5mendBW81glU4aW2MJhVRZ7qQDBQIiqs20bEIkJnDIlIg6tfGTFhGjn5BMVVgA8lxgpFpVlOAnMmsCDx0/Z2DjEoDnenXKwf8be0UsOjp4ymzRU05a6rggShbhOjx17Owrnai4uzjk+OWI2u0BpxXe/811+9x++x2vv3GXzlW9w85XHPPjoEx59+hnHezXnRzPOLuZMHXivUCpHRGMyg7KClorcBMpeYNCz2CxHmxXKckBhMkprKTNNZjxiBG/S5+uELbvqV9PE2lpqbbJZrCJ246akY8FEQKXzne0YMl1LVAw1LSIq+rgFD04hIbareAJBOfpFwW/84Lf43n/4n/PNrTVGBExwIAZ8QKymLFcwakw/HzM3x0BOCA0htIl6HU+Mbm9YsOKQRMNMlXodaZv6VyzQf+OkTUQeA9/5mu8fAf/ob3StryydkPrWJCr9eci6RnSdNilNV4uJ717QlWLUJSSea6JaXR68S/e6SNJEcSXDlS5g03oh/hFCiBUHUr9c4il3jaYaMDpWabI8J4giKI/1AWUiepJlFufaiEL7kILEKFmtjCEvSrIiR/n4XR1idh68j6XyrvSeduGOwtUdtpFuFhHzIHNmkxl+2kPurzIY3eG7W9dZvbbK9mcP6G2UvHfte+Sbt/jo813KVfjm9DX+xcn/ysGLM04mMy4+eYx93fLGu99hvDJk4k558XKPjY3AytYtDic1VePYf/GSj37+AUd7+yiVDjO+fD4sk+PFd7rkukO/pFMtSjMhxDGV6O3AwrQ2eFSa/CF0gTSx4pOOWa01mbF4rXHE/owFrUUn37dUtdVKR7lYYnIRkzaF0Xbxs9pEWmuWFWSZjQpFJsOrEANmL8kEPKopaQ0NDa2ZxX830X6XJNtN3rJ5c4Wz2THHZ/so8WQ2QxrHvdfu8fTJYybHJxCEwli0BFTwhLpCzWpWTM7W1jWePXnKdPeE6f4Za6Mxvd6AJ8+eclyM2Nvew+Y5Sit2t3cZjUY8e/qcza0NTGYJxB4rHzw6YkRRVlxrvDYEYwnGEM1DuwAvljwv94p2TfoR/kvjKoHgXUKiDcYa+oM+EgK7OzvMqznrm+sMhkOUin19EAM5kxLmumkWIEsnla6NQUvi98sECZp+MWJzvcIqyHXOcXHMiyefYrTn7lt3sDbDq0iJi0WZTlUtLPaBDoghVeS6/9nUWNx1bvvkpfjleS0pIIz9cwZ0VDJ0LladCqsYFpYm12jx8eBSmtHKOkXZI1pmKFSI6/3ocJ/heESeZbz+5hvsHh3wlz/9CVme47UmeGEymzKt5hwcHNC6FmsNRa9AW4OXwMrqCndefZVvv/c9ttauMTnY4+Xnv+Bw1tCenaGrC4LRuPwYWo9F4Xa2+eLjn1MUGW3b4pxLFeU558c9QGiblqaqIp1cF2AzlNI4qzk7nkb7hLxETMHmxg2ODg4Z3rm1ALkjZe3rqhF/s4jSGA2+RdpoWltPz/EefOsJXmLFxgmokuHmPX7rB6tMJhWTk116o2GkcYsmuLjfn59OWV21uPqU548/Z3pxzqv37zPaXKf1CjE5mR2giAeyqGmsJLZzwmyP9fEWL/cPePrxH1D1r3F0ccDN1Ws8fnLEv/roD+ltPcQqYZhr3rizyQ/ee52sPWFzJMxOHvD6XXgpni+eHPLud++QFTm0sL5xi6qFp9vbbNxQnNUzjs8MT3dPaGVIbzCidoJWNevjTQZlRnQP9SijklJeIo2qq33e3Z+Xv98Gj2idzrv0ulSZ0Sq2CiCKunFcTGb0+yV5fkliXOJ6aFrH8ek5x8e7rI4tKysrWAuEFlTA5jnD0Tqra7dYv/YKm9duUxTrX5oLYbFum8YRvGI+q5m1NYOBdHHaMl9Jh4jWitxaCmOSumMEDiLdOi527x1tKwuEfjENE5VqARZqzWAwoMhz5lXsbVuII6Gu+LT9klb+GLNoUE4RgmG0co2VrZuMNtYZjdYQkjy/WgpDXGH7/NLK2OX08v/Pyv6q14Jq/RWQKMkz+cigMN2z1iysOSDu594HdvZ2+fTBZ7zY3ubpk8fsPXvE8ekJ57OGxsc1YsQjzqU+ZoMmng0+scokdJUdHy1UOnoey2KgToGUpHsXlc4vWAjQ6dS2EwFr4eTI8+EHj6inf4jBMDmumE5qptWceX2GnzeEtmVezZheTJjNp1TVnNbVeB/BVms1RWbJQs3O57/gT5my9+YbbGxucu3VNVau/XvcuPcGH77/mEcPt7nYfU6YPMHVDVpFplcIAd3E+yx7fVZ7I0bDHhQQtEOpC5RE5kHVGmaVQhlLMDmxE1nAOryOdGkly/0rSMCJRnRSXlQKkxS2LxcHuvXT+c5KPIAIwSHS4mkxCLhO2yKLSZv15EPL6rU1eqPeQp84luuSroaGoA3G9shtjjeWQBbjPFmKn4gEOl/GEASfWofEx/XtXcC5SH81/xZ82n7tVwzKoudFzI4DWttomKwuGyezyFBj42FE1iChUkgUcUj4mkBSg7vKcb0iBQuLBXJly1MJYewG38dKmVJ64bXRgfadeIFLFbFOdW5RIdM6LjyjI9IcfBQ5yDJQKvYgCbQ+4ELktnofUF7SJI0HhlWReiWp90Z3/kDpEAKW5WCirOx0MuW8mrF7MiPbO0EPPIVAb1TAyHJ76y7jzVeZ9L4AP6FfDfn89fu8Xz3i/KzFqJJH28/h5z/im7lnOr/gox//BGsLvvnt3yTvDZDgaEKLrxpcVeG9RH+KpHa4FIC5OuaXvkIrg1UGI11zaSR4LNBQOtAlLOilnYpo9MzTCxQ2Vikl+oXFq8R+HqNQeURinHd0IuOd7o0ORFTIRMN0FuNpFrTW+J+KVhGJQomOdESts6ReJ3jf0oZo+uq8x4qNQg8JXZHgUa0wzPuMeyPcRcNIWdb7Q16cnXD67DmrxnJrbZWLs3PmUjMSzzA4+hIolWc4HHJzdcw+gZ5SjLMMP6toWk9fZazkAzLn0apifXWV0XiIsYq+he+8/SZ3b13DqBaNSqIlmoDBK0NQFlTsuYy0KkACWhRGkudIAjwWifYVepXH+Za2bRK3WxbVsl6vhyiYXlxQ1XM2NtZZWV0jy4vY8BwkKUsasizDL8y9dfK3SX0bBsRrlBKsySiLAeOhIpOC0mQM+wXPn23z8w9/ym+t/IDBeGURzJnFYRcTM5G47haqlZdpLyJ8//vf585rrzOdHiAiFEXOzM/oKkbKROsQHyQJkBDnhLWIUrimwrcVrr5A+QZxDQohiCLLi7jfaRP7LFVAxLO3t0vV1mxuXcMWBe+8/Q77h4d88smDtC9aMpsxKHrR17FtKcsyGqOLEAy4KnCwe8jP6r/g3q37uIs5PRryMCW3QlBDMJEAACAASURBVN7r44wgvZxcWXTjqXxNCPHIzG1MVCRRPZt5jXMuqo+1ERV1rsG3PgrxeINrWjDg3Rxs4OMPP+BPf/TnvPL7/wUhaKz95RS9v0nAGUVfWowSdJHRTKZMJudkxZCV9Q3WNlZS8tBCr4+bT9nbeckXTx9w//VXqZsRKyu3KHo9MIrKeQ4OX3C4e0buzqnOzzja3+HiZJd3f/v7eGspRqtgLRJdfQj1hNDOscqxvfeEm6PbrN5/nY8//xEnF4bt3efUaxdcv9bn8GzEy4Nd1ocZbjbls72f8GrxHU5ePiKXip6Fd958nX5/hTALHB6d06qcvFzl+uYKpxc1vbLg+OSE43PHk51zpk4wRcZFPUMCFFJh2hrra4w4tEqo8CXFwK7P+8qz/H/Ye7MnSa7szO93F3ePPfet9kJhB7qBZjfZJEcSZySzMZnMpPlH9SCZ5mEeNKJJQ7LJJhvN3rAVaq/Kyso9MlZ3v4sezvWIqAKaTdFMEsYkNysgMyIywsP9Lud85zvfl+x2ZI9Eeqy0xCYxbZJNf1tMVaDx1RBrM4pWj91rm5hMi2VG45VpLATDyxcvOB+OuH37GuvrbYLOcfNLrPagZQy1+106gw2667t0Nq6hTPtbYyHGSFU7YlB4B0XeZ3N9nzxrNcUtEq65BHJjINOKwogwhEqMAINZvF5r6TvPMtlXGuN1iQeW8IzSWnrhEmqfZbmI4TTqzQ3D5PWlcHn+LJdMTE53fZfWYJ2syFB5JtLs0UOUtc57sctpenyba/DtxE19x7+Vy/cHKnX/XzyaS9IkSChhWSmlUNYsgDwVPJrAbDZnNBpz9OqYr766z/OXL3n48BFPn71gNB5TV3MKVTMvS1TWIqhMPL8MGJ0KDibD+SSSR6oSqEaRtGmmbVhhicoal55erJxzw1IQbGQZHcVkLzAf5zx/fIyf/SO5LnClpy4Ds7ljMplST69w5ZTpdMpsPiVGEeTLMptk7FNfNwFrNJdX5/zmd5d88+QLik7Bwf517tx8j43ta3z643tsbw94+qTg5VHk5csjxsMJ0UWcE0Cjv95nY3OddqeDzgzRlig1R4XkyaszfDRUQeI1sQyRIVy7Cqfm6XzMQqU1BEDXCdBXCRAMqYjyOii12t8matYGF8TSQ8Ua6W3LAIu2gj276HCqBisWW2Kf4ASs0qmqpxVlVAQMSnmxe1mZh4rUWZHicikypPsWBTQmCguG5uc/wI/8XiRtsLyo0Axin6htGUpZdGowjLHRC1tJnFLNTS+oBOmGsWxClCBn6c/WbFCoFIC+ts41MrApoCNtXAllkeZ/jVHSByE82OY9GlW1uEgEmwSwUVSMIVBXVRp1IgnaNJUaa9DWYIzHBDAhpZ8NhRDZPKy1qCTB3GwiekVNRylN9Ja27WDWC2zPErJAnFd0bYesNWCiNOejS4rYpn+ww+TY03Mz/us/+xNu33uX337+kovTK8ZXz/ns83/gV/e/IfqMGM6oyppf/Oor8qzN2vqA9fUeVxdnKB/QUXx9fN2Y5arXJs8qdbX52WiNRWiIjSdZWNBCRaDEkxasRcUuLpLy5r2aQHtRBo+SaBstm3FMvZMxCI/Z6IjRgagNSon/h1Yak6prsVGSTIqLNjNp7Mh99UmxMWqoa0/tvagRIdQKbXJiFCXQmM4/s4ZMada6GdWGQ3vDu7fucW1zg24r59b5HmcXJ/T7HTY3Nwh1zXQ0YWOwxtZGm3a/x/btDZxz7Gxto9VHbG1tkxeaw5dHXI1GtNoZ+wc7bG0POD95hdKwu7uNzS3KlVzf26IwkUgQVTXEJ1GRQ7TEaKQXLGh00JioxRvRN15scTFnSADKUiUypEUqeZMgMuBa6wXd0BgtBu3O8fLlS4bDIdvbu6yt72CspXZODLGbSni6dk1Vz1OjokaZHGs8kZx20SfXLay6oMgMa4Meaxs92oNemrvigaKUIs8sc0VCJ0n01rj4nNU1QCvNeDzmxo0bvHgxw9eXTKdlqtYJPN8Eu6LMaaSfLYTlgh0cKtSEeo4KFd6V+LomhEY4CXJr8NGjrSZ4x9nlOa/OTtHasLO3z8ZgnT/59CeUoznnlxfS5O4j04shdVWhMcSZA61pdTs0ZvWb3Q1U9HzzxecMR3PyWNFWNcpCpjQh03S311lrDygvx5ydH6EyS4yIQq4yoCxWGelLnFdJfj9A9Ewqaea2SXK5XRS0MyuV6byLi5EvP/+S2fS/o9MtJCkIMVkBNALH/5LqQCRoQwg1Ojii1ZwML9GmYvfadfEt0gqChRAIFLS6O+Ttrzg6/iVunvGDD/4tRbYBJlAUhv5GwV//h78iXB4TqpJrNw44f3nMZ38zpre3z61336e905YKrNa4eUU9PyR2KsZuzuPTQ7Z2r/HNmSK0HXlnm7fu3OTd3V1anzvcr++z2Yrc2MpphxbvHeRc2S2mV1M21na5OHWMr2oGrZzoPRfHF9T+nLX1Gyg34vwVHJ4fc3mlORuWBAOBOXmuKSdTei3HR+/c4vpGh5bymBgBI1Su33OJZ7MZFxcXDAYD+v1+2rOEAtkocaoFkimxZjWdMR9NWNvZRGcClKpYg3bgHU8efs36xhbr+9fY2Vpjc3uTXr8r9wy9AEQjNSYPtAY55sJiOgVrOzsU3d53nmvwAsQ5B0YXtIoeJi9EbKOJH0igYBIV0iiypl9ZiYKkIfmzJhRcgKDvaJuQYbb8Pe0LxiYxMZNUbZPi3h88lFTbFApT9AThT3GLSoCzdzNUVJS1BLRFUbyWuH338R30yN/z6/9/yBGI1MkaR2tFnuc4YlKYrrm6GlKNLnn26AG//vVvePLkGWcXlxyfnjGbi1AT2uCdF7aKj2idEaJOdH/pmzTRIxY7LOwiPFVaP5diWGKnIT1oCpMqcdJ6sZiEcdnq05jR6BATo0WBFusZynVmFxVn8SkmZkyvrpiMZsxmUFcG5adoJf3vnUyEryTurrHaSj6pxS5kXgWizolOMbycEc6HnB9f8vLJS3Y2dun3Num1Onz03i63rrV59myXo5enjK7GjC+H+LqkVRh8LBmVjizr0rEFudKY6KDRAFbivaaViASZEFFReqUdVuD3FB82UIpvcoGmJWhFRViOZfK20IGICB3ekxR2JenVQO2kRzlmEI0iGI9XHo0wjMQ0XC+sZaKCOkhiJuyLepErLP+tKtA3ca/EpTotytYYFHlSMv/PIWmLDdKXKlkLpEGCfa1ErUvQ7/Q44IKXaopK1ASzfD9SdUorRdB6MdC/hVQ1yQQskrTm56Ui4zJh0lqhMNKLFkhJXEhVt2WjYVMhNJEkjy0mp1ZL35vJEm3TlRwPT7maT5h7z7wqxewvSK+cKOopkY9GLURTSNVGlb6DViqJMkgTs0JhTIfuwFAUmryo2D8YoKpANa0psinzoHn68hlbpg3dTZxSzOYV2/0+Wwe3uH3vJ5ycj7k8/YYXh19z/GrC+fmEk9NjMUquAV9z9OIVp0dHQIUKoti2RHOXmavIh6QgLd0DuVV66a/T0E4XNIBIbCaqWiZrmqY/qfG68GnxkwncSF2LZYIFpP/FpQQfbWRx1CK7r7VQWGMyobbWiGBA2qi1aaptdqWKmoABgQxQWiZ/g6hCRFnP8fAlZ+enlGVJt9Ph2sE18VRzjvW1Td7eu8t8XqEM1FbTG+wQtjMuh+f01zPW+ht0vaeczim9obvV4VpvF+c8nbxFb2uAzQt8Brpf4CJ02l22b+yz1htQT9+GKPLsWW7ZuX6DvMhwVgQy5nVFnlmo02aBlqpjVBBEDEejZdmMok7VCMDIt1wuMjFEYhQqamasbISzatHfFhOtctmfJmNlOLxkMpmxM6vY3dkTA3AkAVRJkdInlEy84ESIRpTGM2zUFHmkDJFudw0Vr9A60Onu0O5v0u308ETpNTSpEuicJNPp9KXnNKZKflMNljXjyy9+x9bOTTY227RbGeBxocT5RvU1NUlLOI9zcWGYGUIg+JrcRNomUOIpZyKjnFlLFaXnzlhF5Uts1sFXgeFswrOnz+gNeqxtbFNkOTcPrvFf/umfcX5+gc1EZn0yGlNO50JPdJ7xeCI+g84xq2pOXh2zsdNnfW2Nuo6sdwquZiNMJr5N3ig67Ta9XocwmtCyOtkHiLCFC9IGlhuLNXkC0gw+lCgVCDqiTE5WtPBVSSjlxgrFGG5cu85P/+RPaLUyUTBVQlvzPmJtWiNIVfFmKL0WaMY3/s9yDyAIEyPA46cP+Z/+/f8CKue//e//HR+uf4BdVPg92gS29vZpDT5mMvod54enfPHbn/PpTzfJizbKRtY3OvzxH3/C48/+nqdPn3H08pCDm/vc/+ZLLr74NX/iSgZ/so5SLZF+VnB1dcLDB7/l2dGU0XjC/MsHjMuC927scmNjm4Mtw+T4t3S04+zRF3T3Ovzkv/pvuHfQo7w6Y1hfcXZ2RqZbPHjwkhgi9UbOxwf3KCvL/ftf0u90KKdn/OwXv2Hicjauvc3O9nVaW4YXV5ecX1ywNWjzo7du8NNP3+Xu3iY21qlSbIhRpfWpgSMUwXtmsxlXV1dkNqPIC0H8094Vo4AmceXaj69GaDStImd7bw+VGXyoIDpELd+hTMRmEVSNio5ur03UBlZoP9pkEEpirFHG0x8U9NZa5O2cotNOcvyv328wGG2xxrLWX6PV6tEqutJOwOtHAxaSBIeE4mwTuJtEp2g+Q4Jh6SFTi+G4mP8NaoQoA+fZ0obHGIu1GVZrGiciRROc8a0jJHnSpfujnEPwPrE/HPV8SmYMed5Fmey1Xt7vPv7vysrevP7f9Xjz0HckivH3v+RfdMbNQr1yHX7fGS6fW75CpRMIccHiT5VW6T8fjy559OQxz14c8ur4mPv3v+Hs8Bmz4ZDZbMa8qpjNK2onbTMu2dRoLb3cOopCb4xJ6VBrlHfE6PFe9t/gIegI0ae2DtkrF5S42ADdCdBXr4PbzVhtaHQmgRWxsawJDm0CvtSMz8eUkyGhjLhqRqg9Wvdp2w1sXqQ2GgHMYxDwOzfCZolJvEdpQ1fnuNrjXSDGjKLoUE0dr6YTLs8OybITWkWLrbU+g1bBwc46W+vrzGYV56dnXF2eUtYzJtVcNAdoE8jJaJPZQAxzfKxQVnzSlAoYVWKCRzlRRdaqaDTDUuKmyZQSkDxNT5+Ez5q5vyjMsEzaGnpkDEthQpH0lvlvtElrhiWzOSrLiKYpBsRkcJMJ2y4KndYR0UYqkiamgonwJqUDKiVrTcImQDDY5AsXUpEqhkgiAPyTx/cjaSOiohNkXzX/jCgFqiRqoCLYAvGcSYusCmgjKjAoLcGzUmiVSZIXlwNaqcRRTpS6gF8uzFGkQlWqepGqLSqpy6xSeTTNBW56YaQ1zUVQ2pJlSfIbt/AxqoX4QC9qsiBeSOM4J6g5RM3ly4e8fPGKeb/DdH6FdR7t0udYI0aNEQiSIGpt5bsEMTAN3mFswOianEgwCqtgrEvG0xn9SSS+GBAP9unubqII9K4itVGULqIqz2ZeMrWKYXC0x1N2uvvsbm9iu21u7ubcu30HYwOdtuXxg2/4h88+5/6DQ8YTR1lWODeXngFZORZN7NprYpCETSMCC0pJv2JQiqAtMWgc4KPDR0dQIclrxySsAqTqoU7SzmJqm2gDMcpmbguMziDWOA8+mQXrVLmJaKKyYsruU7IRAzoEgpXm7+gjUYv6qLY2+YbpZXKJAQpCtDg/w8QW0bQIQWNihVWBnALtC6wH6oqnjx7x6OETlLJsbmxSDStG4yGn56fcunOX3Z1rHB+fEfBkuSXLLSF67n9zn+2tlwzW19ja2mY6nvHll19z7cYNNnY2KXpdtBvSa3cZ+iF5UAyu3+Tw4hxjc14MhwzrkjwixtHB0+13cTFwdXpB3mrT7fUF+Jg6WiGCG9NeWwevha4QIdMFTmm8FQPxkAlnqklIlmCFlmDMSyLceCVGPMaCjxXzckqr18ZqMY5V2tBudamrmuAcJy9eQF2ztbtDdzAAxE+lUfWLzqe+NitBwEI4MFGUrEEFS9HpojILETrdtVRprYmxJDpPlqnUc2rxxuKsojaOaZhRhECMFUZZtC6IwFrHYMszpseRndt7QE4VhtTeEXwkN5qynBB8gNhnY+MmT+IhEY8OiiwGcl2iQ0Brj+lsoU2XfrvD3MEoBrSWLcBVFRZNd/sG5ZMnnLx6xunNj9hcs/Q6kfVNTZavgTHMZxN67QG9/nVUkVE5h3GBrJSE8cpXjELNlw+eMRmf0W63KYymrS1OZ5RaE2zGeB7wcUpFZI4mE5lTYpS+4NoFglEEFHmRY5UhKE0ZS6wKoA020+RZh7kOmMyCzVE6p7+xyQ9/+BGZ0aCqhFZmAuIEQHlQDoE37TLga4q5CsDJa5DKEdFggAyPjRl4Tb/d59/8xZ8TlGJ7cw18ECQ1zBif3ef05X0O3vqEbmsXPbvk4fP/yMuJxf1ji4/v/Cltd041aHHj3h2uHaxz/emQ8fyUe3d3efI/HzE/vOQf/uY/MT2bMJ5EWpsdtlTJZgtePCm5/+Al3V7Gwc4mu5nmRgi8v7nGV1/+nOPjJzz8xrFZGK7vbvL5k6dUYY0P37uDG054/tUzNI/Z6mpMscbY1fz2mydcu3XAnY9/iJ9NeXT0ENMKXGuvk3c79Ab7XDrNaVlycB0+2LvOf/H+W0Q/wk8vQa0Rkyqm11DVnrYRWWulFRcnF3jv2dneEbp3UlOMCnIc17bXKLRG4ZlXU1RUBBVotdtyfwHvK5xX5HmLGBzjy0uiqxj0BuSttlTAlKEJjEAA2Rgs0eUoU5ABg6JFL89p2zaGRI18g0KrkiCV1o62zShsm8LkdPMC3VgaKKljNXu/sgYKg5mDVYLqZ7qAaAScgWQcrzGZSz5tK2CjSlGUBoLC6kBhoFd0sNbQ7ve5efMmP/r4A3KlpLK5SFhUc+Ir8UNzjeMKyCx9dRJEZmS9NRqfUF5Tx/t/8hAAUq7DCj0z8RzUaxVymccqRmIDkGux/Fltb4jwmhrmP/9bNZ+VEuxFOqaXNZUE4CYSrFB4TVz0squoRO0vgEcxcjWunDG6vODxN7/h4uQZz5894sHjJxydXFIHw7yGaloSvBN6uPcC0i1iD4VRTYIV8cosfHVVkKRMK4lPIwGCE6Vb12xaS3k1iTEFDG6YHs13lvOHRiBFoVLMm0D8FB+5ENJ4zTBqSnTpfANoVWCLRkF2hou1gIwRVEyq7SS2iIKmcuW8p3YJjNSSMMU6LlS1Q+WpXMCVNeV0wolRFEWLdqdL0S7Yub7B+m6XqiqZlTPKyuEI6IZ5koyRtbJCiUziRAJ8aLwKNG0aDcYRY6QOTgDfxpIlpmaXVDxZFSBcFSJsxM0iHmJJHXMULaDCU1Jrh/Weom5jYgcfOgxL0U8ootAzg9IYYVZiVKR2Y7JsRqCNCy2UngGB0OhoqAx0KgLFuOiNj0YRkrl31AqVGTKVYVaq/d91fE+StmYqpkU6LqswUmWSyWFQjUiYVFpS35P43nicj+hoRKVNa6nYxOX7A0lhMSxhn2Z9TghjTKXoqJs+rDfofDEpB6U/Fm8x3dRvaAQZpOdMPs9bkUQ3GDIlyjkUGcoqdG64c/sWO+tbfH5xwvOjY1SEQmvKCBgZwDlaZOuDR2GwVhR6bJZhrSFGJ2o1i8UhEnXNrBpRn414NJ5ycXGM2e7T6XUZbGzQ2dim6A5YDy1Cbji2hgtX4SdX7OucdqvHRsugpzWjK09/zfLe21u8c/M6+7u3ePzinNJb7t//hvPTY4ZnL7m4PGFWTQkhMVaUeDX5SvjNQiMR9FuulWzq0jemUCaig15UEJvluVENlb1QJSS2mcVpsCREQ8cmSZRkXhmDjpFGyF2QluQJ11TNtBIBEZuqaol+apL5dUShlCGzGegCbaRqoI1Qx4wyOF8h2aAGryFEXOXoFV1y3SbPO3SLdTQ5J8fnTOdzxlczjo+/4vz0kiw31K6m1crZ3d9jNnYcVec8f37KYHDG+voW0ymMhjVHh18TrEa7QK9oMzKedZXT6Xb53ZMHbOUdplXJnVvX6VrLk2dP0FazubXJ1WTM1WSKsQXXbt7E1ZGjoyN6RhF8xcc//gnXd2/gvGcynZEFuXYuXQnXBNTN/VHLpl/vY2q4ZWVOeMpyksyGA5PJCJsZMfROabDNc4KWfsOry3Nm5ZTtvT36a2uAVNJFF0hDCFSuxFgrNGHNSp9JtqBAaZsRY8TYXKiEVjZRo5NXk82xtkVdicmUNoa8XYAW5NsmemEIAR0r7l7bkCpyWRJsJkpiUfjpVVkRI1jbxmZr2OwSjFT18AED0j+oQ5r3XaxtJyQ1UqVVRSEokHKwtr7Ltes3MdpxfjEi1FPae5ZOC3xlKFMAZYwEKN540EKx7qZ+wK1BG9dps7l/k0cPHvHw/td0Whl3btzg4eEJ01mJrwOlEzqvr0uq6YxcCYU3+kgVAk4pvAKlDBYrZBLtCcpJwBYcIcqCbG3qL01+OKPhiKvLK+KNXVSsZK7pDBaiQc2K+gY0n+J2Udlu5jsyv2jWXJ3Ec2Dz4A7b18SQ2WiDpqQ6fUo9fUZRXNFuHTM++iv0fAM3POFaL+PZxRmPDp/y8knJvX7JBz/9cyp9AeoV9z5+l6DeZzSd0tq9xVudnKdfPeSzv/5bjs7nXNUXXOso/t2//Td0uz0GG21++INPuLG9Q3V+znonw1+95GCtxavn8PL5Q9q55607Nxi6mt99+ZhXr045vxihW23aHcvdrU28bnEVDZfDkouZ4/rOdZ5dPGTr1vvs3rZUkzkvj16xsbPNk5enGBPZ3lmnWygeP7xPf6PDB7mhSXBjs7eqhi2i8bVHB9jY3sIY+60IWqvIjes79HsFdTWjrufkWYveoLMIsEjMGGMt08mM+eSC6eiSrU3p+zMma15E0yvaJEpLCwKLCiWhdijvybTQoxZjYWVIKFGEIM+UKM5lLfIsI9Pii9lUYDTN+gQYgykydCaVsEUyFhU6pr5IH8jbirxYZhTBC3C3jBMkHsgyTZFbpjNREn7vg/f56IefsLW+S25t2reajYbFObEYvbIGumQr0qxbNrOvVdLkx/+3Erbff6zWuCSBaM5RiSCPNKLLJWiEWSL/wq/xHRW9N85kNYXzOLFdSCrUtRORCBToGJiMzhlfnfBqOOHvf/eI3/3qF1ydvmB8cUQ9GRG9x0eDjzl1zKiDoXKNx1ezp6UELC1MUhlKZ6SyREmXPmvS34WGERQiWrJbfLK2gRRj6iZWTRTeJNRESqZWBduatp/XhMBStqV1M47mCcxcUvAkRApAKd9ncXviouosgmTL6x5TktG03jS31LAiaJRU2yvvmOMZjkeYy/Nko5HR6XQoOoXYQQWhnrqqpi7nlGWJq9NcMJpQBQH9SfG2kjUkePEyfs0ZT6ce0HRoZD0Kq5XYNxK3xXdSouoYiMkI3RLMDKysljYYqA21y5jVChNBTLqEtQCsiBaXaEpilNYSmeNe7nk6s2Vt3S/uhQ9Bqv7WkCdqtVZ6KYDze47vTdLWLOyrAJU83AxiUd+RYS/VthiSOAjSm5Ib4amqxE/3YeUmkeZZUoMMzaRXShT/UK99ZvN3q/YAkrTFxc8hCLKjUxVI/mjxdaQuo5QklVrQHrEs0oAFlaGipbO2Rau7xpoO2GeH6MwQZm6x4McoFSdRBqoXEqVKi/WATwlNQCijLiS4xDu0q4jljNPxjMOrY17Nrii941/9xV9wy37Euu1gi5ysiHRbHeo6MB2fcjMEppMSnzvaGXhvGU8cv/7N7zgY7NNrdbh3d8DBrbd4//13GF9dMJ8M+bu/+1u+uH+f2ntGowtgLhL4mSU6ybgXtM7QqGdK7VgmEEI3XblfxCbRSombyH+l3r1lH1JIXjzEgFUk8+nXK6Wri93injaKkkonpEwQM6kIkh7XSRzHSHgZPAGb/N8kQZe+EbPsbzDSnzjY2mX3Ws35+RUPnz/ng7X3ydpdukWLO2+9y+Mnzzk+uWJ/+xrTyZiyLoGcPO9ycH2f0WjCxcWQdmeNiKLX7qAnEw4Pj+jlBVMmjHzJxsYOKiiy8Zzb927x6OlTqrMhaxvrzE4u2drehNGc8dEJg/4avnRcPHxOnrdRozntzQHPT484fPmC/a29tECHtBjKdSCkBmmaeSALTYOAsVgg5UmtNVarZLbuefH8GfO65K17b7O/fw1rDFVVUWQ5eSZJllUwmU45fPGCtdmMjc1N2p2O0AdUasz3woNf0B1iSInd0l6gmcc2M8nfxeMJuNrhK085nbLW6eFnc7T3qFpSSLUS3GqjkAbjknI6ZH9vm43NAcfnR7QzeOv2DZ4ejaVXQWmqEKm8Z1bVhBAENYsL3akknhETpTrgXcCQpQ1AEWoPWSCUgcLkXD+4ib88ppxVHF4OKUJByzpcaZk7R/SBPG8TEbqhbuVQSXWfELHKkLVa3LreZ29zk16ueXV8wqPnr5gMh4zHJU5n0rsURPrZ1RU2ajIlipnzqsIpRbRiOt/JWiJaYjxRB3TyloqqFGQUT62l+q11wdXJKx49eMAHP3hL6CfpaIoSsmym4E+tJBiS0UICTBYgTVr/A+DT689HY6ZTx/7uBplShLrE+kPOXvw19cVDyBxPzo7JT8cMjzVeaW68nTPQbX7+698wHD9i+vYGb/3gh7QGmlfHx5SHFc58yO7dd/jxn/4PzI5/xYe3P+bpsxOuja743S9/QTWfcnhxQm0nfPLju3zy6ceEaaCOjmdf/yPnp0/Icsvlq0tu7ffY2thh0C0Y6C7/+OgRD3/7AGM0n370PtcO1rn71gHlbMpXL8+xvQxdtDm9rHh6FdhZP2BzvcejR1+xu9nFMKTbLbmxOcCXIw6fH1O229C7SVWXtH2ia6mIjTCeTil6HVnHMs36zoasvWqZ8CzyDWW4vJpSVp4sy+hmo7iGLAAAIABJREFUfYip7yZCJFBNp2RFTl3XHB69pN/J2L12jaKwlJMxOs++9b6LdRdh0yggJvozILR0pZb3f+VoglUaAS6kXUGb1fLNaqKFsHS8p6pr0LJmhJU9IaT/W5sRQsRFR5HbpYfbMisAJNCyWYaxGf1ej92dXdqdPioImLRIWFjGEb/veHMfev255df5Ph3NXiv9TpIcLUB2hfSeL5KNuPSu+9b3+Od+sSYlS4lh+tvFqIoC30jcaKhjLcl4NMxKz9MXpzx6+pjDF9/w9OGvePX8C0blnOHM4euAxaJqhaYgBo1SOT5If7oPDZ0/LM6m2RvEvuR1UN9anYQyWKxjjbp1TAmcVktLjTevQDMexO5GLRgs6juu1apdRfP/1de9qQr75mNaL5OuN0WhvqURoJd76tKGK6zsvc1gFW6UfO+Ic2NijFxeXmKtpdVqCdMjL2i32nRbbZxzlGXJfD7HuWQ27Zc6FKvfcyFK+EZcLNWsVEULy2ram9fr9divyTdSn5uKaEwSDVSp4CAKr941I64ZfYn5phLVNiml68U6uswRpHq5KBewQJMW8zvROBf3kMVa+PuO703S1iA2cWXgNYOw6TQTiXZL1EaClJAEH4Io03nnCbUnzzK63cECdVBNTX5l/VCsoGph2Qy6uniuBn7NIY2kSyWY5u+MEUGSxQDXmhBdojUmNEKJcpAxOSgLRV8kC6nAKLJeF1XkRJN8J4xUimJqVvWJjimJS5KsN1JJilrLhhG0ePMoMDrQMYq802JdZ8xtZBxLZudTHj96zJkr2eluMBtso693mZctjl8eMT5+wfXzC+oiIx+ALWqGoxmbRYv5dEbbX6FNC2sUdTVhc6PN+iDD1wOM1Xz4yY+pg+b5i/t8+fnf8fibxxAztEWSarXA5lCp2TRF+YJIKCE5RIWgJikjbUQMA0p4zLqpdCa0KEQxUFUBQ+pLWum9+s7KaQMKJHqtUjJxF5CSvHDxukhi/Cjp81HO4aoabQNVjNIYm54PCWCY1yVX0xkmb7G912VtY4M6VDx58oRnz18wGGzQLi45PrnEGE273RXaYLdLtzvA+8hsXomyU5GzvjZgoOH84pSNtQG+qhldTblx/QCbZXzz5CFXV5cUmSz+Pnj6gy5/9Ec/IoTA2dkZsXZcu36dVtHh1fEZnaxgf2+P8/EpVVUSnCO3RugfUZSNgndJmen1DSf6xrNNkhSfKpdRKQEanCfUQn+8HF3x+Zdf8OL5cz795FNu3byFVuIppRZqbJ5WUYDRnJ4cM5vP2dzcpNPpiFdaELVQlMK5egW1lE1V1vklpz3LmkBQQ9QYndHvdJkMH1HP53RswXw2JienntdkHUtDidVaFLRyE7GqZHr1ksxcoesxmVK0Cnj7rdtMJzMuxlOCksqP0tIbGFwtf29l02uer0MUVSxfY9sZjRthcAGCIiTfsH53jcl0Qjmd8/WXX+MnPe7d3GI804zKOe12G9sRNFIoM5ZoAiaXeRS0PJdbTa/fwRrFi+fP+eqrb7i8uKD0Fp/ySo341/gg9CJBqOU95dpKgOZcjTWQaUEQtbEJYZQ5YnOD9xUhOJRXzCdDfvnZP/Dn//pP2drp0vTkqsXuI6HJopC2aEy36fflHrHE18FEaMUIesIguyIrCqyGykdUrFDGo7qRvt1jeH7Ofn8fLjyz9iZXgy5qv2ZXPWEnDOlubNPeO+DB+QvG3zzgwddfoe3bqN4eH/hdNvsDWtzm4Hafdz+tefro58Tzh9j+W9z88H1+/bufMTx7xV//h/+RXjS8c2Of63stRhfQam3gw5xeXvPe2zd4+ugphw8fU/mcn/zgp5yfHNFXmhA008mEqprx6mrCWeUYlIqzl69o7+3y3u27fH3/V7x8dcg7uzfY2l/n7laH3z49Zavb4pOf/ID17R2+fPSSy6tLNm5keGWFXjytmI+GdHKFLgoazF4Zw6Jslq5tDFC5yNcPnvCjn3zM1mY3rZECykjLgefy7IS17S20bbN/sE+nZTEmovDYokjecEt63Gs7qVaohnaXAp+FNHYMSUDlTZqQ2OlYYxIdmuQD1Yyd18pyxAjVbEbtHMpIRVDW5pV9IIL3kclkymQ8pt/tyCkFEU+S94qL9zTWkucFWmsGg7WlxcFqnLBS9fuuQymFtfY1GtcfSvC+T4dSdrFfx6SgFlVkHiPlbM7VdEynXbDe6cnMXsmpm7D3n3VE+Y8kZGLXslwLmjqkgEQRQ4yGcgrPH7/gy6++4tHTRzx4+oDHzx8ymwxRtUfVNagSr0qUzvFBE0OGsS2RXHclPpZAjXgFF6DsSiCtFgmPTgyPxd6TKjgCgEkPfWPBJGteSFYOjVp6+poxLgS6lqJtaXyqZRzcvPafumAxLl+zSgd8829jc22/87mVv09g9mrc1CRsq8lFM5dCjAsVVZ+SL63B1RXzWcV4NCXLLJ1Wi267vUjmWq0WIQSm0+nCl7X5v9g+NUDsElRdViqX96EBFd6cW6/3BZLi6gaYiSlp0lIICp6IQ5mcSAbeYJOE7iLlTgtas65lKGxaIyWRbCiwanGt34SumnEkPe/peoblWPh9x/ciaVsGxOq1xW5JjpNAMSweWXltFFl/V9dE56jLGrynXbRRTbCmGy5wet+mjJOuuFZ60Zj6JoKxOJeUpNEo5KV/4Y3fFxl0CjFUkC5+FaL45XgniFAdwSsiqZyqFDWRMhkn+hiISqf8waCQcqsPUcAtWclApcQl/S5IvvhRqBgoMsWg32ZQdDCxplNO6LRaTCYTzh4+5sHkPg+yFvagxUwN+Plf/xWj499xdFTQXrvBzrU17tzZ4PhkQm/9Bndvv0PXa0bjOWVd8flvf0nlSu7euU01r8jznGvX9+gMtrn33i3a3TnHx8dMr0rpgWp8u1QjeZEGaVwGbio1aMY0NlKhAp2QOx8CLorm0GukKhUheMSMMqCjNOvGlSStWcReS9i0JmiF0halA0Zn4rOV+hoFedGgZeKJ6qRQL0JEJM+dxSnhy0uVtREoUdy7c4e9nT2qOlAUBd1em2sHW9y5eZ0YFLntob1hMp8RCOzt7bC9u4lzJTYzeH+A84EiLxiPJ2xubKDDDu2dAYNeH1fX3JiOOdg/QGvNj1sKX1VsK0WWZ3TaLXzXkm/26fW6fOg/4ejomLkKbO6ss9frUFWOjZ017vjb9Le3pY8SMaCWvg+Pd7Xcr5hK+WmcS29ZAiZiSCpZKl0/MY2M3mG0YmtjnbW1Ac+ePmF0dcW7b7/Nu/feYWNjnbquKIqCrChwaRHrdtoMLy4YDS/Z2dlhc3OToi3Jm/fLxW1VFXb1seXmGtHWgoOiyLh76w4nh+d88Ztfc33vGq0sJ5RSOY9pTVGKpCIamE0uiW5GbnLGly8xJmKzgl6uOJ4MyTPp33ExYIuC9Y0N2p0WdSU9EVp2FqK28k+J4qyr5+hWl0wr2kULrQyV91hTJNCmhdMZaxsbFL01nr58QTurefzsEqcNm2trTPoTOhtrxF6LcjQh8+BMm1aeY2uPcQGUpzAWkxXMy4rRZIq2OVpZfBChHx0tqECW57RsTqYMKoAL0n6tCwmWfTlHa0XRytFWYY30CElPY0ApR0AndU2Dc4onzx9zeTVkc3uABFkS+Gu9DEzkboZFp8oSkW5W/dSfktB9FUAF2TL7WtFbm4MaY7XlV198xu5Owd7Bv+Ls6RFfH/2Ovb09PvpXb3FLbVH2BpyePWSndcmfdRUb737A188e8LO/+z8Yvfx73PyMH376Fo5Djp+MOW7X/OTHP2VQDDDxGDuZQjalnI/55d+8wgXL2Ax48eIJa12FbSl6JqfT26GeVWy1LaOzEc8fP2Wj1+Pa+3e5qnI2drbYbFu2u2CzDs8Pz9FZ4P7JBWdXkVu9gp4Gzp9Rv2zRmlbc3t3m9jsHXMxr/urv/55b997mj+++jzY1//Cbz3jwYsLWjTu89aGhChBCybMvvmBezljb+JicjKihKueoWpO3OymOXiZYQWlmdVJSW4TIKZOPUu3KMlHkzXKLyUyifjliBJ3lsFA8XEqcL9Ss1XKvj87hvPjeLXvIvx3YKyQAtNZifFqXdWyKsasvhCDvO51OCUSMNYvCrTFmxWhcdurh8IrMGjqtYnl+6Wo0ZxMJYBS7+3vcuHOTm/fuCrV05WNJyd6iIvWtZGwZaCrV5Hff/X2/j0fDhmqAu+BhMqkZzqYcD0+YVDNqV3Gwv0O/3V0wUZT8Mc33/+cd365JNY/oxTkkAAiPrwOvDi/49S8e8/Of3ef47FQq8JUFuiJK5B3RT0FZ6ghBSRFg7uukc1ChYgmUxFBjkzesYln5UQkg0EqtaOuoZbCtSMF/SsSSoFxMfZ0Sw6+MjSYxa0CrGBeVJlgCkm/S/Faf0yviaKvJ1O97fQOcrY7R76q8BV7HrxtxvtWq1fLvNTZVqEOy6cnz1JpAM148ZQi4smIyGmGMqKO2222KoqDXE9VY5xxVVTFPFlJNkljX1eIzpf80LM5LKZXaM163NHnzWi4ZQUpAXoUw96IWADoESdpURogZ+AyT7k+Mi49MxROwWlFohZESb3rvRhBlddSuHEqqkQFROI/Jn08Snf8Mkjb5TrKKKVTyy1g+2ZRBQ/DJfDDdtCRWggq0WwVGtXC5owEEffBL+cy0SC4aEWNTcF9yiGGZtL1ZolyUrrVelGRBkGgfhaYILJK4BaUzIf+Nqa+8raAX0YPCEvF4Ih69UMOUwkVYLgTpO2mtxSjVaJSxaCM9C977RZWnkZaNeKpywsnJkEmrTWdjwKDflffodRmNR5ycnjKOCjUKlLXi5MVTLi4nvHr1Ga3iG1qdjP0b25RlxtwN2V77If1OdxGMnp28YjQZcufmPt1um7L0TCYzLsfHYIdkLYsykZj0ZaJcpEQdFZPhpi8oRHAhpsSzESNpEBJBqlTqkfCxEYpN9Eat8EGUmmJ0qCSnu4oKrfr1rSIvcTGBTRKeFCquIK3JzFwvP0clXnrUFhM1RWYoCsPEZQRlUUQyq4nKYA3oULOz3kdnuVSxYkk7h8HeJleXEy5Pz7mzt8MMx8XokswG+t2MTqdHiIE8F+PiuqrZ392QczbQ21lPKm+wn4Q4Ygh8uP0xIUastdSuQlvNdX8HFQOZsdz78D3e+VheY7IW87IGNIPCcP3WLk4b/KRCR4dpqkQqCtc6jcFGqtgYnRA26e1sVB5DFKEZ10gaR6iqisH6Gu+8/TZ7e3scvjjks88+48WzZ7z/3nu88957FK0cH6THrMhzQoys93sMxyOuhkOqqqI3GLC9vUPT4yL0zGTOncbKYj9UEKNLyFpshhH9bo8P33+PR/oh33z1BT/6wae081YSshGjS2NE0lvFQIwOaz0qllgqcqPJraKy0DZwOTzFlQ5fg4qeViuj3W4Jfdl5QdKAOioJpLVs49FX0pUVvfTn2YyL6Zy2zfDaUCnLBMON7R3u/fATvvnNGS9PT/jy4TNavTXOL0ZkaDqba1SFpiLQUZbdvMf2+ib7+W18jKz1BphOzqTyXExnkLUYbA7IQsa8Eip3FqF2JTG3tFsdsqiILnlHEgk64mONAzKjKTpt0JEsa6F1TgwBYxXaeEKsxdqkjpSjOScXxzx78Yw7d28l5b5IVPVr4FNa9CQQi8mLJ0rY7OopRS5ItjwvmyzRMC+HTIePyfIZvc0uqqp593ZGu7NFbja4/t5N6G3zxdd/S2Wek69bnj46xNUVmzs7fPLhHU4mkT/+wYfc26z5m3//JVezIaG6YGd3m1Yn5/GrV1T1HN3dJo4H2Pom84niajLl7OwVnU6PrrXcu77B6ckzHj16hp+1wRkODx/R74KOOe1iwEa/z531dU7GNZflmPn8jHHQ3Lz2FpMLw7yeEOyAg70e/nLCndt32O21+Pl/+l+pVMXtj96jv7HF//Yf/5KLq3PesZ4nD79k7sY8OZ/w6sIkFWK4GF5ydPyIrKx494MPKdotoo7EuuZqeEa32ydHKm9NgBwClLUnxGWgA4KcG6XwdYXSsLa5LsquUeagBCp6QRMXpY1ly4MIiKmVj0rrb/DS3lDkFEWOXo0SV4MEJQlbXmT0TIssM2Jaj/n2yxUoYyi6HZTrMPOWqHxCwFfAnSh7ZVVWPH3ylF67YK3fXqD4zXvFCEorbt65zbVrydsvF1aAMSJ45Bta2Xec/Xee4GJ9ejOY/r4fqTIRFFfDkkePjjiZXHBVj4k6YFuG/rykDoHMCnV6NWj9v5K2LT5xmWO/VlSNsFCFbGWGt+/ucH17h7v77/CPv/yGh8+e8ejpQ4bDl/h6iFUlMQj4g4FaQRkdPukiKBXBKUzIyUyb4DUBJ+C+FrXGRb8Zy4SmsRGKvPFdVwHjRaVKhGeWj4vy9GrS9FriEb9dCfuuFp6IxJ9vJl//nErumwkirPRbGvtaRbF5z9UYeZHMhriImwhBLGzUEsRoaKXSphBQqqaqamazOVpriiKnKFqJStmh3W4TvDDpqqoSdopzorZa1zSSkk1F0PvXwds3k8vVa+NDIFoFSvxnYxBgXqyfHCFGnDfo2E6GA+lmKIlXY+oWMRoKpdCxaZNp7ldYrDcLeqVaxiYNg67JRbReaRH7J47vRdLWYCWvo1ApqNcK5WOiKdGUyUTKvynfmiQ6ESN5UWDQoA1eLas1y09iUcqlubkrN3NV4n/VG2VJp1hOCnnH5eR4s0q3UItp3jvKpuGcmAxHL4IoQiExWFtI4oDCZjYFv4JuNuij1oa69hDtUrY4VamMUsQFpVN4y9pqxvMJRycvsSctDIa86NDutmlby+baGv0spzZTjp4/Q9dzgm+Jkl09ZnYReTSboozi9PSQh5//mrd21lnfWKe/tcVweMHF5Rnz6YzTV6e8OhnR37rBzHnm7nxxXUIIqafPLxZgaegVNUkxrTaLQdwYlkqyp1MCBtLAnvoGYqpnKdWATBgjsudKibBCc8+1FrNS59xr9+/1Q6WSuVqMCUVS8FosaqCVyLtiMmII5EY8tkIQ6pt3DqNEJKoACgMq1KgAMcyZT6+oqxmdosVseMX0YkSYlrR31tjfWcO5mljPGV2OmIxHaA15Vohwh1JUlSNmFoXG5BmT6QRqT6vXJcssFk00ilnaHHwM5JnF1yL/brTm9q27VLUjqIosb9HpdjCxIu90KGPElV7UQIMTaVpiatxPC5aUoxK6LImJWeklC8hCezUaU48nKJCGY6XpdHp0uj263Q5nJ6ecHZ/w2We/4OT8jI9+8AP29g7I85wY3MLGoVO0sJnl8vKSq9EVMUb6/QF5ngswE8EHR2YylJHKc3OLfXCYaAguAobgod1u0e8V3Ll7nVCX3L//Fft7OwwOPsJqmVuZzRIwkqW5KAlDnmu0CuBKRhcndHsHxLbhfHjOdDgl1hXlbJyWabVAK7TOCGSUfk5UVuZnqDHRoaKjrkpq77n/5AkbgwNcq0e0bWYqY+pqQtbCdHuMrs44HU8YZD288lSTCX40ZqI9pasYZDl+cx8dNGvTPc4uznkej7l+cEDMWmSdAbvXM7zpEHSHGC0WRa6gqkto52Q6o5rMiU4MuOvgmVQzfKiZj6+oZ1OKTkbQgTxrY0yexBuC9LpF8apzs5oqTonVjM+/+oIfffpHDHptgp8B4imnVQ4xodlNbc2naZd2h2o+JtYzbJxBNUNbKzS/THP87Lf86mf/O7cPBtx6q4fSCtu5R9bdAjUHAjvXAt31Nrl7xXQy5vzwJbvr27S328zCU8bHl7jTM7784m9Yz3I2Nm9xdnjG8KLiYP8u71y/xXZPocwlVVScj1p0si1mpqDT6fLu3etMLp6QtQb86cd/zOHRkJ999TWTMjKazunODPtFl7OzCVdnx1z/9AZVPeLp00OqeYnvrDH+5jEnhyf01zrc2r3OeqfFODvn4PpNrh49ZeNgn/aaod/t8eLlK3w1Y6Obc35yxHxm6G906fe7FFPHZDrFRQFQDvaus3t9gG21iKFcrG0ms6kYtooER7xXzMta7E2UJGGoSFXWVPMR7bxYGEIT48LrsOnr1iYnhGVQ+nogktbRlfVXZ5aiJWh7p91eQadfP2ROS++qMQrnKubzGd1e6/XXkcBYreiuDTB+iJvnRGrZsWOSEdcKfDLbNhlnZyepeqK+9flRyT6UtVpkRfq8GJJfoezBvpZ93eTftiBYfPvvCJ5XH/t+0SS/nV5JvLN8WhuDzXJizEC3iDpQu5qr8ZTxfE67Jx6Z8Tvu5x86GquFJtKSisibr9ILppFKsWO7B3/0030+/HiHy4s/4i//8gu+/Opzjo8ecHXxnOk0UAWDjp5IRUx9uDKMNVp30NEQncJTEk0tsdZKQi6Jm8Rmr8eXcfHYaqLTWGmotJd+a3ylWLQpKoTg0yVWi94xkPHRGESvUgAhAd2rMe7iXL+j/+07qnjNsfqcMeJPq1jGx9+lC5B+ApJyJ8JcWv08CbvT+SH94k3/exOz1bWjrj1VVad5acis2HwUvUIKEVUlAibOCcvJe+qUBIeEEK2yqJpzWL1ejZl5DAGFI3glRQ+d7LViJZV/Z8lMl0w3wk5L0nZIlbYiM7S1KIYK1pN6+1IvpE6q6jEJ5JHGsUclf0jEuqE5nz+wBHwvkjZAktAFh1wtNoDmd5Cpr1KwGDwoI0i/D5LlaxQqaozSSYluOfiW2e3Sg2mx9ugmyJMybHOsNmQuTjO8UW5eGSCrE1V67pA+NESaVHxyvFC11JIWqJUIrGS6ILM5OilFhggyA1JG0iSpiFcWJnF9taHIcjKbQ/AYVUrFw9VkwYPzjKdj4mRMVitaRYdJDAxnJXXtsf0B27t9is0NmDhMu8ZVE1quJtaRYVXj1BTvOjwZTTh+9CUmM5iigwuevMj57c5v+fDDj9nbbTEua7x3xOAILmC0wWoj8vs6Y8kMjkJTUSE5PaQevkXfVLp/yUFeIYiKpOji1eEjBC9lc++dqDCpQFSeED117V5b9FYbUhukSJpsrQT1KqbEOfmVpcZ4uXNyz3ItXiYuKrIYpb/Hanxl8EmHXhGkodk5qfw1G5CvINZsrvU5Pznl2aNHTC8rbt16i34ro7feR2uYTsc8uv81k8mI3Z1dcpvTare5uLhkOpnR6w3YXt9gsLvNq8MjJicXbFzbo9ft0un2mQXHyfEx8+kcWxTs7+/SNobq1RlrvT6tuaOT5ZQuMBtOUCrDtsSOQhNECSx4kSgOIS06KTlyAaUjOvnNxCB9pT5KH6ZUKQ0BhYsRH+S+m6zAmJxcieloXhR0u33W1jc5fnXEV19/xYujQz7++AfcvfMWm5sbqYcNtDV0um2CgpOTEx48+Ia9vQNu3LiBMQbv/UI5stnYlvM0EGIjKd8EmIFev4X6P7l7sy+5rivN73eGO8WYIxKZmAiCIEVRJEsSJVW5qqtcQ6/2S9n90H+C/y6/2Mt+9LJX92qXXV2lllSaJYqUCGFGAjnPkZEx3Omc44dzbkQAoqr7UdWXiwAyMqZ77xn2t79vf9uVvHP/NsI5Do/3eb98BxE32U/tAwcpqYwlL2tsJw1xjAVriLUjnw7Qpua9O+u0VMnR4T7PHj2mKktEkmKMl4vWRkAUU6NBBVm0KXAmRwnvz2kw7B8dc3pRs/H2O1gR45KW7xcTC2oZkeiY9c0bpL0NimmBaqdEmWZ3/yWHJ4e40Zitby+hpcIWFdW04NXxBUcnF5TTEU5K4jRBpS103CHWKf0kJZFwNb7CZBm2hqmOEWiyThenJbmpMLbi6vKcy7NThKxRiW/Cbq0gimOsrZDaJ6Dq2jAtLNOyopUkfParX/FX/+qvad29xU9/9ANGoxPee/cr3L77PlJFnlmzJUokLNYz+X3YMr66YHDwGF1OaClIM0e6YVlNBX90f4O11RZPH/wWoh7X3roJakLWXcOInFcvP6OnjjAXEFUpN8qMcnuH6kaHH//6c462z/mwu8nb7YTLVsJ4JLBfjKjTHHHnOi8uHpC9miBdye72gKP9CwaTUzQ5f/HtD1Eu4Qe/3WHn1ZjbX/0Gev0lmx9cMb4cIbcFF3tTSq7Y2T3l9vWM53uateurxKkA3ea8MOx98QVatunXBT17yf5YIHTGq7N99naf8vxkn699+B7tOOKDD29zdPKSyZMrjg8OsVcVq5NlVm6+jXYTmlrojbU1tHOo2jO7QsW+RlVF1GWNS1mgLnzAifDKjTyvMLaxiPF1pFEUeTmlAueqhb15/md4dli//WNyhgmb4CTIDhtHfeld7oy1GFO9ZlgzP3zQFScRpqxBOJIs+R3aZkbiWV+zl7Zb2KJLWY6RoSehE9Hs+VJqptOcSEekaUqz2fjU09w4ws0ulZvFAsb4PUNJjQ7rz38bx+uJ7tePJnEtyDJFu90musqRtQTpVQ11CZNJAZ0uzZVzbv62C//8Zw8XzGoIO7Boim1DL1bjBLUD5wyxLFHCIaQgakl0Iuistvh3m58wnf4Ru9u7/MP/812ev9jlfDDidHCEtFfEuqByOZYCV/vEkQgBedP6SamGZWsawzusnctcpVbeX0E0NWCvkwGVWfBBaFReC+BIyMVYNQAc97rz4WsskTEzYxBrre85asO4XYhDv2w8zmMg+zuPLwLNGeCx3lXyS03cFkCfs3bGJrIAcBehv11gBv3rXo/FwCtyTJBXelWPb1SeJAlpGqOUIss8G25MNWfhqgpjQpuCqpoRL4sA902G3QjnxwwaiZ4ptawzGGcxNkKLjFgonG+93mSPvJxfQhJpsqY1Fza4Wfv50eCK+X0XM0WUcAv19+EKNcqSf+74wwFti8h/AWA5zzkHJs5bO0hrEVaEvhGCxq6nuUgCPKpdGEA0uCeANodYaJ4+L+BcHIhvDsqmTuzLdcKLNGcY+FIRgLVnkqRARzK0JMD3x5g15PVyTy2Ut/w2LjC//r/ZZGyyTcYH0AqJdBLpsQ9PKt3nAAAgAElEQVRK+AbeCm+pnDhFbr29sTSOzAqWoxQRp9STgsHVmMvJmOHZK9zlkOGwwsVtVpeXWZe+yfJxMWU4keSVYDopyN0UjILSNzdX4ynf+8cf8PA3D0nbPVau34EkQ8Y5pi5RMkaJmjK0K3ChbsgF5qbB1yLIEGl6qDXX3hP/NB3klZL+HKVv/isIjkPO+DGhvKOYpZ41TF0M5N+k+Z1rip2DVS/zRRdm2H824TwImDOGWkKsvUmOFd4WV0kBSmGFw9E0WcQ/Jn1D6xfbL9l+sU0/XSONMyIkg+MTlJIcHx/w4tET+r0u6bUIKsfp2TF7u/v0en100kblFRmSxHn2rSUkZjTB1IKDkwN2Xr5ifWWNpC/R04okVZjLMYPhhHzlGsYJWv0lUqGQxpsZm9CsHFeDq728V4R+NDSsMYjwuLAhwSKD3GAhqaHjiFa7Q106dJL6zL3S1HWFED4AjBLH8oqk3WnT6ffY29/lpz/9CceHh3z04Udc39wkjhMvLzSGdrtFUfaY5gWXlwOqqmJjY4Ner4e1liiKXssGercpP89niQAhfC1WIuivtFHScefuFv3eKjqYt8Rx+L5Cg4ypncISURtBgSVWvlZDS4iFQVEgDEQuJ5UldTGmKKaYdh/rFMIpjA16Ch0jpKIscx/42gJrisAsGnZe7fHrL37If/fXf4PsdpmMJryzeRepPOhdXlomPrGM8oqd3SOWOh3+7Dt/ztOTY1bWr3N0+RScQDmBMo5qMuUXP/0Z3V6XdhYxOD9mUlRYdU67vcSNjS2uL/dRVUE1LhiWjqPDM/Jxycb6DZQUmKBmcFIRpyk6iamqGmmDNNxplFQ4W1OVFU2Nm5YxVWmx1vH82TN++IMfcfDqJX/3H/9Prq4O+Z/+x3/LzZv3vezO1gxH53Q7q+ioRVn78aaUIM3aZMkaLV2gRAHDSwbbj3j52yfc2LzNklwiVW+BMXz2xS7f6F4n6WwhtObBF4/Y+fwx39xM6HYT9kcPiFstVtIW57/dpd454v31jLPtn3DClBM3wlYZebWMw/L5f/z3bNy+weGLA4yE08srzg/OiQcTXCy4dsfx3ts9/vTP3+fRox1+8Pf/QCkEZ5MpxtaMyor+9SXGg0s6sWTz1k0mJIwLeO+9D3nwYo9xWVFHNaWtWFtaYWu9Td3OORlOeLL9BKNqlm6vsH7/OnmR8/D5Doen57TaHXIryK7HaKEphiNW2zHr1zYQKkh2rC8jsDikVSF4NpjaUJUV6XwxBL9sEWlJPh1TFPUs4IqThDjJ/FND7fUs4HAhSGv2Qp9y9rUifkdeNE3zMiksKN/7sd1uUZmKOPaS/y9jeYTw7Syk8I5sSRyTNjVos3PwL3PNC6RAxDFRElPXXoK1uraGWwh9jLG0Wi3u3L5Bu9OZncObhxNBfmbs7Js5GfYtIYKpyxtf+F/64X73PviiHh8Iaw1ZGqN1gqy8kkFphRA+uRduhI+xIJQ8/NcBNg+b/b4jnUHYCu9IJgBFbTVGeUVVZWpsfUqiErTrAZpg6UfSq4m7jv7qHa5v/lvOTkt+/vNnfPrZZ5xeHHJ0tkdeDnEix8oCRI2hRAhLpATeyr8BbHOwJYPEvQEDKrhM+zjClwx4QGWC7X8w51oYW02sKJWv4/Is2oK80S0olRY+dxG8zVm410ftXNZsXiMW5iSGWHitWNgz5y6R3pHRzcpM3vz815hE54I0ugGzgVXidRAqZsAySNyZSwaV8g7F/pwNde2TwrWrqMuSqoyItF8jtNYoJdBJShonWGcpKjOrh2vAX+NKOb9nfqnTSoOrMdYgjE+1OwlGGBxeJeeIiXSKFm+AKeHXUxv2p1gI31xdiBmR4z9nLndt8MHi/Wn+k0LMrs1/ad34gwFtAZosXBY3m9lNNkMAkfRBsi+lEczSN2IubbNuTok22YkwRuefJYSnxYXwAX/zPRbB48KxWAf35mMmIHtfr2RnskgrPIVqrJdkWBss6p231hYzKw2fmVSeP/Lduk2YvM3Juxm+9Gumdb73Ex6QOWOxtS8cl056iZyTxDW0nKKbZKjakZmaZRWj4xSSkiQz1NIyKsa4EupRwYm55PJYIDo92lpQ6ZLl3hJJtkRRF9TlkOFoQl4J6mmFM3B+OmBwdkFpDC7+HN3qsLrRIdYVk6scZzwL6ntzBf01wbpZujBBvNyCAI4FC5Q+zW0Oslkxz+OKAOSU9u9bm4pJPuHk7JS9V/vEKqbT6fgC9IVF6k1QLgJoE41kdqG2wtGAZhGae4dMC3O2yeIbNoeyOJyUvnm4SjCARPom1UhKB6NpSVE7Li5H/OTHv6C7krK81uPtu2/hpjXlKOdoMEHkgvv33iUlY3h8xWQwYToZoW/dpjPpM5lO2Hm5zavzI5aXl3n/9tsMhpfs7++Rj6f0RyMyrZnqiNOTEy4vLzk+O+et+/d5d2UZnHfjtCFJ0lD7AueziwHxOGNwIQPuGpkwIRMvxEzLPbcDBqUiXBR7x1fhnQZVFKO0Ioo0SmusSWg5S6vdor/UY293j4cPH7K3s8sf/8mf8N57XyFrt70lvfEW/pnIiHSMtZaTk2PyfMry8gre9r/J3HkAPnNztWqWzFFaIrVDamh1Y67JVdKki3Vm5qI4nkwZjiZUNRingQgnvFTSKR84VmWOFTWxqH27ieKKVNSsL3eYnAw4Hwz47PMv+PDd2zinKCuLd3jUvh5RBMeyOse6mroo2Vi7RnEvIRKKR4+esvtqn34lKbNlysGAjTVwtUNkGZ2160Rac+vefZKf/BCmgihK/PW3llYcEQvJ2dERhwd7fPCVe2gMrhpjSr9BqbqNGQkiV7PR15weX7Lz8jmX5yNiFdNbWQ6bna8hk3GE0BpbGrDQijNarR5JnGFtzen5MXVRonVCO+kgjGR8NSJOU/7v/+s/IG2JKY9ZWhJMx8OQPBPUteXoaB+hU2I0qLhJ16B1inIlFREXL1+ylGjayQr1ZcJhec6Ne5tY2efeh3+FXTrli0fb7O4dcPdmj+ff+weq0122z5Q3tuGSt96/R2t5idj2MFLwT5/+E+PjPe5+/VsI2uwfDzktTujYZZ4/P+Tg+ILl9dusXrvD2cmAyAxYW0+o0oT//OlPuBje4eN775CmmifPn0PcZe/kzBuzmClZX5EupXz9g3u8dX2F49MBO/tDslbNzsGIk9xLQONWxv7wnHJ0TrcdE8WKzBmSaxn95YTD0ycMS8fw0jEUGTUGKS2VMHz4lQ8YnQ5ZijX37t2fsebCKSa5wdgysMue3up0l4gTHZilEDyFtSuJo5AAJDjIzjdQ5xfqcG9s06xhtpO//udCJOlmK2lI2lm/FwcHuVZdIeLoTajG/A1E6P8HTcJ80Xjotf26+Q5h/5RKIiONVJJOu40Tevauxvi6m+vXr6P1rOP1LEh7zdFa4IMPJEiBXuxFNvvwecLxX/bxZefQXA2fRAVB1opopS0GeWjkDDhrKQqDsaAXg6//esQWxtcCSyssmNLXSsmY4WjM7vkVe+djXrx8THH1ir/963/DvduriEqAdDhhQHlZmtCCa3eWWLsJt969xl/+D5+w8+qQf/rBpzx9+pST4wOG4zNqNwI5BlGitPVS+NmX8uOiAR2zkS6EL5kIpRvNHliHPm9SKaTU/vcuKFiYB/N1Hfp3ORfMNJrGzHL2/s3fi+U3r5MHcxDZvNf8a79JNPzuY/PfzWMj59zM2fC1sp/fY0Qi34yfmyTKAnDxgGxet7cIEBeZMV/7Z4mV8ol5a73hYJCHVmXp477QBsDi0KEOPYqimSNl40I5NzMJjFxlMaqaGeU4EXlzM1dghHeasE4Sa+9KPL+Ys8gPBCRRRBp5LwPr6iC3bQg5XxbVjBUpZEig2RlptCg5Fk28/88cfxCgbc5gNMAkLJYhkLbW+QDYzalHKb3j4+yEG72o9NkOLzMPHd/F3HbT1sZn94QLrID1zZcXaOHFAdbUtc2sU8P/MwTt/OQsy4qiKDyJH/pyCCGRznpE7ry8zlhDXZXYqoTZDfa0aSRAh8FXq5AoWtj0bCPX8CdMOOPwnXxndSNDawDReH5JkihmKW1TlyWqFsRZjKsq7GiEygt0qjCRgiSjFRWkdoSrBZPRmFpUXIkJg7MhaWtAt9+i046Qy8uotMfkckx1NeSizrHBpAUnyK+u2J2cEWlLXYIg8vVVCBLiGQClAeuzRUrMABkQGjiHaxBAuQDvVGgaZodZqwUbrlJlaq5GY55tb6OcpNfr0ul0abda3k6+ef9mkcEzdjMgiAeZTVG8bdga68GYt+z1pjmFsUSmBluD8dkyG5ipsjLkBi//0grnLKX1kovl9S1Oj8YUVwarNVGWcX3rJr2lFarKcGPrNqY2CCdJW13W17scH58zyq9YurbC8uY6UTujvdIjWe5hMs3GrRus39riUtWcnp9hygqVaVY21zBlTbrUYWJLlq6vcuPuDVSqKIoKof3YFDJcc2uDW2fjcFRTmxKBBhTOhPti7UKyxHrXSPCOWULOagmbwzlHFMe+kaZzvkm51rjakErFqvKgPI0T9vcO+P73v8+L7Zd89PHHvPvV94m0b3HpjVCM/1k4hpcXFPmEa9euoXWEVjpsDP5ezovHHU0dqtKeHbIWImfQUeTrS53AmZrRZMLFcMikMhSVpa5KJClCxZQmB+dQ2oEwSFshrEE5hxKQJhHGWaSIODg+5dn2Dmp9jTr2606qvKFCMYaqLImEIYsVWay5trzEW7e/xrU7d7l5fsWPfvRTer0lfrN7zPBgh/TSUOQRtiXRWYel5R5Ii4pjdJUR68jbDwfnT2EdiVZIY1lb7nHn5iqnZ8dcja5YX1vjq/dvcq3T5vLwgNOrESttyf07G5y2UrrtGCUJMjmfaXZaoaRF2BpRg04cWeLZQ6RExyllWSGEptPp0E5TJvWUSEXUpkQXBS4vqHVELLsIoT1Izgs+f/yMB08e0e8tcf/+V2m1l8gLWF2KQQ45OX/Bk8e/4uvvfpNe9w4bNwoKJuxeHnFrYw3VSkm7NVrvo9yU8W7JR1sVp2rK1v2bPP70gEk+ZVfvMrmuWL3TY9JOOI1TPv7239DtrrH9Yp+9kxMm5op8Iuh31inlmMPRAaWKiOsJrXaO6Y24cfct8scxFwP47j8+wgqL6PbotFKWcsnp+QhXl5yXY1pLHQaXp2yXEx49P6IfaTauX6N2GacXJwg7YSmLeL67w7ERXFtfpZcKvnp/i1yNmQ6vWN/c4uzwjGFZsjcs+NNv/Sln249xcsrgaoKWgrt379DqroCIQuEFJJnE2GSeZXeSdrfv26gsJgaFX1vzyYi6HPv6jhAw+doMv1p6abt/H2Y7d/Oz/d2G6G+IfpzDs84hcKnLiroqSGUDyF5DemFNNsSxJEo0iZNYU30JHxb2Bzd/FULjZIZKpf93AxqZf/bhwT4bayvEcpk06TCT7PA6bGzYO6/GaH7bPMnNn/MHeLgv/emNRPR854WFJPX8/JvaHK90wkKWSpbbba7Ghok1IH0tflk5agOJ9tcyeEaEesbGC7ZJ4Taf1XyiRWDQzoFxGOuojODiquD0cJ/x4T4vnjzl8eEF9eoWUXeZa607OLVEhUOpMvTIlFjnE4bWOSIvACDrQ9aJub51i3fe2WD7xZjnT3f49NOf82z7AaPJOVV9hVRThMz9eA+S3kbS14CFpi7LhoSutY7K+NIMECitUFqjo9hfS+t7loJb2JOUB2qzHp5zdNvEo4vyyEVTteY5zc+LYGzx34uSycVehc0hg/x55uXgGhD55dLIN1/rY6bXgeWbxxwM2gBmvLOmCklMpRTW+F6q1tiZiQnCt9uI9PyaN+fRnIvF11TPEuzSs3ZxHAdgPO/ZWFcGU1sqZ7CmpqorjPN9SQU1tYSKkIQIiSsfjTcpKj9eI6CdeCYfgunawrWxzf0KMa0N99va8MbOGyzO2cl/IaDN4aiDy0xDsHlWJEAT6U1HjPDOibWzoa+WC3b4jZ42MHDW+mC36VbfAKhw9aX09L0Ltpsu6KBlYGKcMUEK4XxtD/NJ0UwWi7fCBjGTZDlnZs42zjqUFcROIkqDqx11BCKKcbWhBVBOcEJRUhMR03I1kaupI6gSSWIksvKN+4gkCJ/1QIGxJVIYIi1IdIQSCUq1MMpgoooyAmF9TZaLY+IR5NZyYK8Y5pJuPeLi4IBqmmO0JFcltgZrprRHE6xUlNYgY42aOrSb4oop5VBiDJx0e9j2mBUpWJVTOpstLuMOYqrJTyfIeoxEUFceOFosaPzCJx3WKZSMiKVPoEnrgnRVzDIpdrG3XWBqTBMweA9NkBqE8kDX1li8Fhk0rXaPpZVVhoMhF8MrrsZTwKGVppW16HU6vu7HWuIoQuAbYWrnUM6GujmfUW6CFSEVDk1dKVqVwQETmRCJiK4raZmKkbHkFjqxZljB7uEZ/X6PzKSewRMp40nB0vpNPvhklXxSEWlFbylDxZonh4fsbG97kJUkrCyvILstbCvlK9/+I4q6IMk0Mo4YVTWrG1ukf9ajsoalfp8iiljfvMknrT5VMaLTSdFZTNTK+OCTbzCZXtHuZsRdybQ4wxlJZCOEkmitqccFk8kYYQ3S1eSywskSpyqsqJFOU1eSAogjBVJQ2wpU2AxwYPGGHg4iBUrURJGXEnircBFkUE2RtvbullpiDcRxSqvd4/j0jBevdnixs8dHO/t88u1vk2QJxlgiqYgVmLpCuJrp5RVPT47Y2LrJxuZNEP5e4ULNoy1RKqXMS7+YRz3AEaUlVpQIpalEjLLO15pF3gCkVAmF1fQU3qhHKQwKKS1SV95VtHJETlK5NjqDOBswsYZbq2vc//Ajels3eby3S3u5Rewi4qrACkvdWqY2lq7OSbBo57h3+zoTGyOEpN27zo23P+Ab3/kqa4OC7/37C+rqiHExZfP6Gu/f+Qpr3RZxMkaKFKwkDYmfOFHh/kQo68GwKXKOLw/IJ+dk2vL+rbdIzQkPfvUzqGoOzwbIzir3ri3x1sYNZNrDaoswoKzEOkMrEuilhNPCUk0rEmlxVN7ARkbkUlMnGQK4sdZn8+Yaz57vYCqHinOupZqWWEZHq/TbH+KiNpWw0O3z1id/zrPv/6/svPhHLp9/lw+++h16Sx9ydH7Oq7O/Z2U9Z/MjSXvrDkzfZu8nP8amI5ZurbH76hG2esT2ix3u37vL2vomsbD0I83m6SYn4wFuq+KjO7c5uxzyaqq4kW5wY+0Or16+Ynhec3DwOaPhhPzoCCUESq/y7gfvM8h3+OWDn2LrMXfu3qSMWzx6esL60S7f+uBPWUtus/vrn/H86hjRyrBXl2y0WiTdFV5dnKD0hLjOOTs45qhSjHPB0FW0rqWgahIBaWuds70LnFNES0ugOgxGVzx4eszqep+j41M2q5y8iHj67Bm99U12955zdXJIJ0nYG19wbXOd4e4+NyeCrV7mnXg1REjvsThDNBJJ0qQwZokvCEybKqiLE4p8iGM1ZIht8wYh5LY4Ec2xCiIUZTS16TCXSwYGICR1TCnQIgVqqCpMUYAtcHaKczVS6tcCV/93iU4MsqVIpaKtnCd/5Rv83ozRA4SGqI/qJURagsywzpuHCAHKerWILXLGZ1dUnRi6KUIoUN7k4jUurynC+7LjNenbHy5w81ACVLiHzHRGHtH4IpTg/WnnNfVejxRYRmbwmDgWrLU0eRRzYgwTbVBpRtLtobQLrT1qwOGsBlPgdEIhIhyOxNYoWwdgFfkI2UzJJ8dMhmeM98/4yed7/PJc8LBKSPMr/k0/51/fSPibd95he/MO/+nJNi+ff8Hx1Q3WbZ+2iFG1AOdLSmrt67C92ZHw/LAGtGDpRsLHWzEff2eZf/XXd/nhf/4xn//yFzx98lsu8hFW+FotJ4Ir94IscF4nJSgqy0wKLBVRIoOZRnhM1B7zK9/6Ze5s7YLDclB9WR/XgsCamjrUk80AmrXzURZA0JuNvmEhVl0AfLPfzz3aZoYfSoU61DAc5mDtdcC2WGbyGmj0w2RWm2qtDf0I32iWLRy1q3DKM1NaxT5mcBZbezl3JuOZ8VzlCipRo/AAfuadJBtAFFRbziGbfo+ErIwUXiau5Owc00SBFZgKsJK6klS1pLIJyoE2BmMKcu1wtibJHJEqkCTe8FD6evbEKZhCXBsKGUCok9QiAgxC+RZC1mkcMhi42eBG6mNUKaNwryF0ePN9J/+Z4w8CtDWHC+xRk7mAZkIEuWMzMJVs+F2MscFbYHHAuqA9nvv0mPAv39thoW+MBG/N6ZF4c7xWtAizgdBkIWZaXgfGQBRpnItnrzHGYSvjs0phQjrhjSnKosCUVWApLFZC5QxFVXhzEimwUuDq5nN89sW70jjvcKNkkIsFUGldKJx0M9cwhUCmKaLbJa0rxoVgNDrl8uSILDe4yymmrLGRZFiMSaQmrhVrLmGCwElN6iL0BISpIJVEiYTK4i7G5OcTrqRFJBWX2lB1r9NON+n0Ei5HFZN86gf5a/m6+f8NQyoDbTxnDZuMg5vJ2aRQPivhQqmg8kGGcI3WPNxX17xGEUUpS0t9kihGSUk7y7gcXHJxfs7Z6IrB2SlJkhDHMVmS+t5Uep5pEuG6Nj0BGytb54cM0lqMcFipQGiU8w6LSEVuLFJJ2kmLyeUlg8E5V0Pf6NmYOiQHQGhBu5cRxQmFddSFxIoWcbrCqBxhiBFRj0mpyW2NtRIVdaiMoJiYUDMHQnTQEUxyyWScI6wgSZfJWisIIZgUof5Pteksb+BExSi3uNqiZcLllUJGDjfJGQ2v0KpNt9cFcUDlpj5THbWpnaM2zmfGpMSY2hvN6IjaVAi8s6IKem0XLtasv4szCDwwo9lwpAyZTBA6QicxqdYsKY1LEsZVxfbLV/zdd7/Lg+fPefve27z/lffZvHYNZVyQP2pEXVGUY/Z2d0BELK9uEEexr6GTPnsnpQ4SSotW81oF5wClMIFJFziiKPKbDl6mHMcarRWFkwgVYQntDaxBGodyEmQasvkOi6Xd7XDz5k2ydovffPEFV+MzPrz/PrFNsc6Ql5Yoib3MyjnSLKPnHEwlSOUX9VBL0em1uffOO2zJdSZfbHNrfYtvfPwJxWiAtM+JZMKwLKidpdKSOpFMtMW2YpSOwUoEMcPBhHw6QZDz6OET6umYq4sB6ysrVHXF6cE2ldBs3rxHv9OmrAdI0UI7y1I/o9dJOKKiuqho91bIeksMqwoT1mNbFUhn0EqRRpIbm6scnR9xOR2wGq3TtZKvf3CfoYtIljOKMJdjKbl9fY2tP/0Gk5OUH//ilxyOX3Eyjdnff45IT6lMjhCO3Vd/x9byN7n/yW1sdYVxjp39PWyc8ZtHD7l97z6KGKdKxqll2475zd4uX//Wt4mKkntbq5w/fMkPfv6f+M32Y/rLfZZsh75wXJ0coqcROvYGSmdnZxRiSjtpMxnmjAYlMlJUoy4iXqMoNrj14V9QHpxymp+BtJR1hrr1Ca3iAnl0TF1LtJV885tf5+HTHZytGExzHjx5zmU+RGiY2oqJc6RxSmkc40mOqw1VNULphFt33qNwlpOjYw4PT4i7y7zYfkFUVUinUUmLYVlxun/E4ckFGxt3Zs1em8Rl2LwWN13PEljrpYeANYYskiSyQolQ09aQZn4bCrCPMDeaerWwf4cWH6g52SYCc9MEitbMv4CIYtrtNiKuEYmeSbdmn+vE7P1rWyPjmMQK3yvThehggfCa7TGiIdwUMm7NP6+hfELM0e6kvP32HdZWV1F6gRJiDkzmgQH/heMPE6wtHv6WuDcemf8sw20qhcBKRQwo41AonDS+dYIw4TJplNWkXYG+Bm7g65Q6ccRalhAbEAYvj48kFb5eElsQieDybCc4U2FdzPF5zcODMx7v7vH0yUPK41Pu6Sve3ezwl/fe5sPb38akq3yYWNLLbV5+8V3+6Wc/4j/85OdUFdx+9y3u3rtPEsVo8CZmsQjhsPWPhfGE9MF+U8ddVQWGAbfvdjjcVzx6fEKRX2CUTzA2+4FaSDY28jsHKB0jAjCQQgRZTsOILYzNwK4YY1+T7S0qvOalNzXGzWvJ3gRfi74Nb7JfizVwiwCrkc6DB2xaa7TWs9q55js1r30TpC2yeXO5ow2fI2eu5w6oTXDrbgikUFLhS1D8a5Ik8VL+rMU3Pv4md27d5cnD5zz4zUNGowm1ctSACfutDHt041rb9EujGdduTrI4gc9SyGC04ovzfZziNBqFjlOiJMLQInKgjaIyBWWcYTNNFBmEKMDFIdkT2DTnXY4xLpgG+iXO1MEpPrQYEQtLn5stiKEFhPOs/fymuZAc+/3HHwRo85TznK6fmX68oee1Zk4bz+rVmislxML7hL4x8nVTkWZw2QW5lgeBizag8GbG4rVJZM0bNqK89v5zoOfDtlkmkwVNcNAze6DhXQqH05y9wxMmkxIlIxy1v+FBkmKDM5BEeC291rPCaCklKkw6GbJCUgoiHRN1NIqIzrVV+rJG7fQQZcVk94SRPaMwNVUkyYqEJVJ6pBTWcaYcBZYlFaNpoTHIpQSXai5PzpG1JG336CaKYXHCxfCEyfEZdSTIkg6VtFRKEJvXXYQc89HtQm87B4H9nEvYmu1ytlBInwHyls+hl4atQ48829juYG1jlKHQKqIdJejY0m63uXnzJnlR8OLZM46OjhhPJuT5lMlkxFlp6KRtVlZWvAuYkkgVslrCg8AmmMc6v+CEMMW5xg7YgweE5nww5Pn2DrqdMDVFkJIYtPYyCClVaGmhsE6QX41RUYpAorVg6+ZWsKMVJEmCFY68zKmsQVYyjHmHjr0Mz/cw9A2sY63RQpDn1mfpkEidARo/8g1CGe+0aR1a1AiReyZKV2ihWFvqgnHklcWiKYxgWlpKZymrmkRZ+p0OOBOcrMSsjszRSAQaWYeXpjRGJcZYalv9x3UAACAASURBVOOvq9Y6FG/7bLwR3jhAWO+QWgvB0fCSYV1RW8fF0+c8Pzzh+HLCxx98jfv379JrpZR1QY0izto4Kzg+PmJwOWRzY4tWNyWNY6qyoihylPIub34dkDNprXM+syywSOGII02k1ayYWSlFHKdUWKz089FaQ+0MkRMINM4KTG2xVY2rDf12h82NdbJE080Sfv7zL7jYP+Ab731CXfkkUVn7ZtQqynxvP60QwltnCwyx8L36lDEo4VlhrGXvxQ7O/RxhCv7o4x4rK+uYKuZyR3NVFYxMwWUxYVBOcSo4uLoYqboYV+KM5vn2OcvdFnfe/gBsRWdFUx4fMRiNKKdDXj35Ncsrm6yubNDttLi5qZkMD3i6/XPe2bgNKmVSjUlri9YptdBM7NTXaJQGMxmy1m2z1FZcXI4oihXGGlxPcWNrnVZPIRwo19RH5ohKURU9ltc/ptTX+d4//n+44pS//dcfkckJnz/YZv94mx8Md/mf/92fs6z7jE+P2dq4xSSRvP21+zzffcTVZYu1610O9w6w3R5f+6u/JL8cwmjEYOeQqh7wYHubJwdHvJO+x61uj3ogads+PTkl6yYMJwVnZwfkzkslbZmy/eCYNNHUecLB4AIVXVKobe5kkvdvrTBRhl/kAw72xyTVkK21a0xHY4rijId7+5wXJVPrsDricnTJcHyJE4YsbXH3rbcZDoZcXY0wVYSwJe1UUzvBaFoyqUsOj05ROuFg/4hup00/zdg7vqBUkvGrIcv9da6ufJsNGVZTayzqtcKM+eFrgr3iIM9zpuMx690OuxhsPkG5EFsQEFIAbgh89ntxIw9/m9p6eRo+SHYyvKAJHJ2bsTkEcxtiQx1551DnRFNmt5DzEyidkKadID1uJIpvHs0eEnad4MDsrf59gO6cZ41rIzGupLvURmcKI2uPMsSXuVf+yz8EbiFE9Hzb7Eo5v+t6sZ4PRO3itXTOuzeKyr82lKEgQfQgTTJ60RrpuGK9m7DUkj7Zq7xTp98XCjA5FsXJpOZ8VHN6uMvJ499QXRxycXbG4aSiXt7izjsf8s537nI7HnBnaYTop+xWJT9++ID/5YefcfTgU45++0Om40PGGqKtO6AjcmcwWFA+2WaEo/AZcBQSV02xVUFe1ewfHvH5b75gZ2eHnZ1dHj98wOn+AdOrIdbUiDhGtzJardYCMAnx6CKY8ZbkWFtTFKEOrK78HGwYrBA/+q1bzABfA/7eBGI2gB8h+R2J5KK52u/rSfamLHL2niHObdi5Jt5ujkXTEX/b3ezvxff9Mt+HCuOdzcM1sQFcCSFAhvES5LVa+bozh6OqDUsra7x17z6D80te7u8zKgsqAZUNzKiz1MYhnUW44FQp7Cyh5HG4j8kb5m9WlzpT0zXAU6F8GgghTFDyeddaHWmEtrSzGNXTtFMHVPNJ0kydZm2S3vRPSu9AKYwI7RdqYJGlDBJIQrxjmwTx7LevYYrfd/xBgLZZcI547QRfe04zSGca4AB68HJG0VD4dn7Gb+pqF/W9i3b+nm52v/O8N3XBvtcYr02ahhVqHKSa7wpghQ2UZ/P+wUp21v08TBChmVZwcTUhLw3OgKlrIiFCw8bGlWaezQxJi9lkmGVCccHEw1HXjsvRlKvzc9rtiFa3hVOatX6PVZtxpjIm1Ey0QBQlcSFwY4sSknYnIYskKzKiLB351YBpPsUaQSvNMIVlrd9jpdfm5KJiMJ1gLVgMqPmEh7n4YvF+NxPLOuslroEBtQsLgqfa5e+81memDN5qWRAt9PLQSqNUjNQJWsd0sgxZ1khjsWVJohTtVotupxNciCR5UXBZDMgnE8pOx4MQY6lkI7FV89HY1EEKhxYKoTw9j/WCI+e8O+YkL3n05Bm5zbFBGlLXNWkSBR22A+NrNqXw7RBQ/ty1lKRxilIaZ7weGqGQkcYJ4eWDOmyE0vfjc6HVOM4SSRncDfHXVWsMEdZInPQF2UIbgpsukYy93a0pUdKiheLk8BBpBaPxlLqu2D86xMWaGsujJ09YanW5s3WDTjv10tJQlN4Y8TQrmk88BKmzDe5SAWM3WHvmuhTGcZRIhHVIY6mE8KYtVY0VCqk0rnIcDyc82zuCOOXu7S16rQQnK/IqJ4sTtJaU0zFHh7ssFX1W19dCNlH7YuOQorPGN3pv5pRvcWBw1vep01rNmLm6NkRJQqolla2gKnzLiRAEOuEBW1VWOOPrybIkZm15iTQyXF9f4b137vLqyQse//YL9ve+Qra6io4SlE5ROvFMqlIoYZnUNVJWYAqK6ZRIaGI1751jpOQ3T55STa64fe9jBuMJeW2I4oQ08UYm48tLnj96wuDinETHXAyuQLRodSKODnfRAtbWlpnWyjPPrYR7d++hlKLVafPgi4cU5/tkvZQlJSiOL3j75hLtb32FvNR88fSQs/MRWX+FlnJcjgvE5JJEhprC6QgcZNahS8FkCuctwY+ePeajbsw3WylZqLtCgSJCqA2uXV/DZiN+/fQlK6sr3Ox3UfmUXqS53b/H5spHqHST1Tt3KS62yTba5NUp/SXBn3znXX75s5/x4PEXRLsxrdUurx4+48ZbNe/e3uLl7jEUhs+e/JaTizHrGzfZvPUW9+6+xc/+/gfsnx0RSXjvnTvsnJyye3rE5WBIXWta6QqmqLi21GZ8OWWpv8TNm9e4ttnnOn3evvWX/ODhZ2SjIcXRAZfFAf0tTXlecJlPOdp+ThJ3iVWLKI4Ynoyp8pLr11Zoxxlp5SisoFA+KZcXJVorLsc5J5evyK0hL2parQ6X4zHd7jJ5Lck6y4wnI67GBXCF0l62OHO+/dKG1fP9DqAoCqqqIk1i7myu8fhzi81HNCDICu8wS1ibm6bGrpnrDdMWVCsqil/bV5sF3DXrBHa+Gfhi9WAJIGcJ29deLSRKp2iVIETFDJIK8eYzaVQXDbgg7KHOVlhTBwmgxbgEg0FFCicgSlMfXC7sN/+tHM2lnp1dUy9EE1uAwAZbfUfsLLGzOFV66TvejEm4YGYT1Eom8qqCpUTRWu5AG9opJFEBVNTWYl2MMTVXozOmZ4fs7J/x/Yf7/Gr3ChWlvNVRvN9t8ed/tMzNu7dQW/coXY+OaTGc9vneg5/y8OkP+dVP/4nDnUNe7Q4YjCyRUsQ2o5jmRJc5T5684C/+LKeMU2JAC0leG87GI64uJwyPjtl5/Cv2Xv2Ww6MTdvYP2dk7YjQuyacVwkmEhSRqISNwkUQoOWOf3rTFhzkoKsrSJyOsZeZd0uBd29Rf+aTBm2zVosxwsV9b89ibsWvTD3Xxe/w+E73mNc17Nc9pWLbm+xtjZqxf81yfrJxb5i8CzDdr13w8F+JcAqMVTN0IbJtznmVTIvaNqI1n+6rKsrt3wP/2v/8fOAvTacVkXFAbi0w0IhJo4XsR22a3bjJH2Fmd5IyeFwtpG0Eop7LBcykEy0rgjPC1tRJQ1qvVAt0iKYllgZJToFpgpxc+y3nfQOt8OmTRAd0GA0K/9ix4YmBAWJxtCI2F++VkQ87+3uMPArQh5qANGlC0CLIWqFnpjRCMCbbqIgSGotFieJtZIQVNu/oma/AmrTybOAFMNbR3M3DfnAS+IbZXhC9Sxc3Pi646Hs9ZXCNpDOyStzGtfJY/aJlrJZi6CHQbrTOsAelU6K4emj5K33zWGU+ZV3VNpP07+0aMQWrpvG24CAtHJSQj4agRlHnJ0dE5xFNaRiGyDKGhvdzh9s3btGRCfTFleDVClTl5VSCBYnxFnlaITJFIgR5XRJljIivcaEA9rWnblEtXMHQ5pfVF4sr4nN088xlsngNL5K+9Lzlumlc2mYbFa+8aVi483gBhIZxvWG1qnPG1f02vt2CwSSQV7SSmqisO93apjaEoS++KprwbWaQlWkk0c3Dm8FkxreZTxH8P6107I0kUKWSQthJML6I45s7de3z43ldIdczTl084OT9iOh1jg011VRZIqcgnU8ajCdNpgbGOJGthrKMsS9Ioodfro1QUztb48awEQmiWOn36vR61LWm3Mzq9NsPhgOHgjLKYMi0LTFVhiXC0SLKUNO2StNpEUcy4GHJ6foKtDUv9DFPBdDxAiylZlFJcTanziljFtDp9dl9c8OrlgFJYTs5OcOWIV9ee8c69t3n77h3a7cxnvuw8uydVWMRESK7MMwueoURgLd5ZM/RKNMbMat2U0r73l5BESQsRpeRlTSU0UbuP0yk7R6dMy5LNtT69dorSEUVd4VsSgKkKzk5OuBpd0el0WV9fRxJYVOeZP0vQWQXJgsRvDlmW+L53aUZZlgwGF6yu9UlaPc+qCYWpCxwl4F2/Fg1YrKkpiymmKtBSs7mxxkdfe59elDA6GnNyeEBclSyvX6e2jrIW1Ajf50x5lyzlDJGrSCLpzUCcZ+dUpMgTxdLNO5we7DLRklxIRnlBXVmiWtBBc39tk/wty9NPv2Cal+wfHvLy1S5KRyRxxFIvZfd4wPnlBStLLWqpOJvmSCU43d1m/2gPWzuiTsLB+R7LvQybFeg04ReffsrFcUmr3ScfHDE9OeLiakKSdoiSDGMFGQYrJBvX7/B0r4AyQsYx5xeXPHhywB9f5Fx/yy/bkYVl0UbceB8rdsknh6zUQ772jW9xo32dk2dfoNoviMdX3L7VphYdTFQR31zm+OSAnafPeVv1ePDZZwxOLnn07Ihbb79HnsdMDuDwfIdPbr7Hr08c5+cltbuO0hPyq4wHD19SmTPydUu5UqEGBpGUxF1LdV4h0w5a9cjSNW7dXaXfMgg3oQBOjr/HH3/U4a2lr/JKtqj6jq38nOJwn2p9nypVRGlEajOUSqhKiPsZx8cnjKcFS0t90n6PweWQy5MD0qSFiHygKFVEbWFS+JYp1zY3KZAspQlrDmSUUldQ5wbjcrJOj6997UM2N6+HPcinDIV8k7Za2H7DGhvHMXEcowRsLPVZTjSptCBsKC/wR1PRFCbynO2aETLujUz9wqc6sMIFGb8hCZlri6WsKoqixDqHEvO1Yh5+SepaUhntY67gYDk/qzeStKEeSwhBPs3RWuJMhRARSkfBYEeiohSnIqaVo68TrFNe1v/lNN6/3GOW1W8SagAimGaFcMmF3zsLVvo1DkEuNAaFcpAK0CHR4oRByRGZc2S2A3GEi8DYEeVol7IasHsw5JePhjw8gr3TCZ18l2/e7fLfv73J335ym6Vrd1hd3iCWEmkKRpXl8bDk2c4Bp9//ez799FN+/uwxR4MjRHlBXOZoI4mqiFxkTKKIqL1CPal58tsnnJ+cEuVTXh4fcXZ0yqNXL/nhrz5jf/cYczmlHJxQTc99Gx7h/RJkFLHUbxNFka/qE36c12iM+12J4CJYaWz3RWMeJ0Uw9PEJPWuCbLwOydpAVb/JWDVAra7ruWLLZ0deA0r+Vs7BXkMoLIK9xdKe1+NSryqJlJ5JPRcBW1MitAj8fJLT/Y7M8ndApxAzz4iGJdRK09AsNlwnLTSRjoikpJiWCCWpaxgOx0ilKMsaqWKMhPEkpy59XJUlMXEUeTVZAIgyMFdCBGM+rf24pIn3Fxzkg5qjedw6H0+LugYlMR7dUTm/dztbEckRkRgjqAJLHyZJ8N/w18Enm5wQCEJDcKeRLpqRE841ba68cdvsmjX5k0D8OAfSvU5UvHn8YYA2mAXw3ulNvjY5Gntx24Ah4andxib+Ncq2+WNhcCml/OQJBZxvFm16UDT/vMV6tjcdc+oFvelrRZgL52GtDbKCee8FT59Dw7ZZY8B5SDOpK86nFXklUSpDoT374nwzP/9955lN39DT96zxmQHjXROtbyiN9XI1ARTOMTYlxkhMXZHEKXHcopjkDMqSq8oiY83Jixe0Ol3aWZf29VWWS0MnTdi8tsro4/d5+uQBmBI7zTl4vktuCqYuZ1QZpIW88jMljiUmAo9jvEmF37SdT6s4hwpuPA3gDQWInvFsNN1hw3UsWMWGzKDUEhUpT7/XNaaea7TBX+vaOgbDSy4ODomcI05if0/CxqWEJA6LljF1cKwMtXUB8AspgzRyzvxBk5ckNN30QYmWHvApKen3er4XkDFc37zBKJ8yGuc4JyhLx3hckGUtNjZuIa4LDg6OODo6wVgV9OYKpVLa7SWyrMVkmuNkYNqkJIkzbm3e5e7du1hnWF1fYXPrGkcHuxwd7lEVU4SpKfIJSdKiMuBszNatu3zlg49odds8fv6IZy+eIKVgZWWVurBcnr+kmp5w+/pNdp/vcrizT7+3wrvvvsf7H33I0eCE57s77B60aEcR19dW6Pc6tNttGilvM+/mKRgwzhIpjbNQVjXjqymVMXR7XdK05bPqTd9B59teWGtQCCKlyZLUZ7Rqg5dWKKyTRFkLhGGUF7zY2SOLJFury3SylKqukdZhhcEJSV5WDC+HVGXJtY3rpGHuOBs2uJAssM43pW/mpzeo8ex4Mc05OT6kjSXtdlA6IlYCKl/zgWl6u/gAONLas7nCJxZWl/tE8S06MsLelty6scX/+8OfkBtBSoW9JylrC4kAW6OVojY1ESXGVGgpUa4O+nlLd6lHtNzn7OyQ1nKf9a1NTvfOiaMEKSQagah9L7VWqoiSjOXVNXb298jrEhVrhnlBXtdMC0dlc9ppQjTMyZKI8XCIzC3KCQ5fHlBWvm/f559bNrdWefJkn9XOCu1WC4RiSUW04pjaCeI0oaxhud/CighaGd3smLqYUhnQ0f/P3Xt9S5Jl532/YyIi3c3rTfnqalPdPd090+MwBCAMOABBwUhckkhKWnzRE/8F8U/QK9cSl/Qkwydx6UEQBQoCRRHCgDPowbj23eXd9S59Zphj9HBO5M2q7iGwxBcQUeuueytNZGTEiXP2/va3v09w3hszmxQ4Y1DaIoRGGECVKFkihqdsugk3WiVNBuyen+InQx59+Bn9Z5YrN7/CML3Ozo0rZC2FsZ7hacng2HHn81OeHE/pbCu6jZSz0wpXCPYeTbh67escndxhNqmYzSyTvGJydMTTkxN+82/9TV7/ZpfDz3f54O7nGJ1Ryg4+aXPl0hvcvHydpcTSaYwR7pzxNOHs7j0efvoB3/ydb/HaSzd59Z3X+dlgyJP7f8Ay0DUdhrJF2ihpNhscHZ7z9Oku0+mEJFXoLMFLgcwSqsphcTQbKfm0itUqwaysKKqKvf0jLJ6GSpHeoRotbJZiRE43UfT6BePRlLIsg8cisbLlLuiKX7YJESn3PszTqRI0FEhvqc2mPcGkRooY1IflLPaUXwQhdcBUJ2s1AHuBK/tAqYvyg0IKdNpA2im1suM845z/8uAVSjdptQXtTkLWXkZHkYdIVF8IKMKvOlAMnyvRSQNESrDv8LHip6hsrPKJFAjiVn/NUraLUlusbIr6jM1P2wKwJkJPe+EhAOGGBBlUg6XA1vFIfQ2rAdaWHA8Unx1Ouf/oIf1nP2HZPmOj4egkW3xn9Ws0X/0G12422VnxdDKLTDNMsoQ1knF/xMH9j/ngs0/5/e//mA/vPWT69CPszOFki9JWqBQkCcJrGrpNmjWZJgaROEQlePzZff77f/JPaGIZPNujd3rKyFT0C4MxgkalaOFopisIJUMPWpogdRCYcQEFBh/aUxQSEcPkxSRlMTGat9UQKIGRs4HUIQi3xuOtx0hDVZkQv+GpBT4Wwf/6cWtj0WARu+Ai1nyxOrdInxQvxLf1MXrv54naohfb3Jw7VhIXE7HaL23uRbiQJC4mjXVxxVvx3OctVu2e64sj4gLWUZVBdV3FVgSdaLyPfYgisGGKwmCsI8scjVST6gQlw4wk4vwDsUvGx2viLzzvAojk5kUaLwQWT2oh9QKVXSAXDoGUSfBglQWJnIUxEQnd4Z7xBAWkUJWzzoeeRhd8k3WSoLCBTeQs3qvwfX0tlOLARzXKmMQFBU6JcRfaGl+2/ZVI2upsVMrFUG8xKapfF06Qi4F3kEZ9viojVfAWt1wMmNDrFT3PFjdRJ4h1Nex5VGKxDD6nZ8og215zkT3P98e9uH8Il9r62nvjQvTEuxKBYZRbHh/0ePpgF9s/D55mMYHwamHQ2zCJBAU+gU5TpFZIrWOS6OYmyNI5jK0oqyIEkVrjRlOUCz5OE2UYSkGpNNpBf++IZ2KfUiu2ljfY8BnX1rdQW5ssXd6iPTtju9GkpTPc6grDo0N8WZDnJb3zKf3BiNJ40pgYTwkoXUAkKupKY10BiaAGc5cfIZ47bXFqeg6RCL89UmqUSkhkgpE+oEbqAjVyeEpT0Ov3efr0CbKypI0sNBKnyTyJt84FDrOI/WU+SN6CoDImmi+GSRRvo0cbWG+jPH7w+8IRTajDfV+WM2b5mI3Vdbavfo2ltXX29/c42N/j6OiQaW6YTocUhefa1eu89vpbrK6dsH+8y2Q8wvsCT4rWlqwB7W4XoTWT2QzjLJcvr3P9+iUabU1elEym5/QH4MjZ3l4m1atoAabMSXSgi56fTxhPTymLc974yk0Kc84kP2Z9fQ3vBUpodrYUJ3uene1NKCyDkzM6rSZrq6vceukml/0VmitLtJc7NJVkpd3GO4OUKsgbx0tY+62EBcFjTJBvrmxovs7zkrP+OdO8YGN9k0aaIoBWownOY6uA7CVCkSqNFjIwR12orEofJJOzNCFJMrJE4irBaDTk7qDHxvIylzc3aaYaT91TF67p7rNdZtMZly5foru8jBKSoixIkiwWCcUFaiZU7AMKPis725t0OiknZ8e0TUGn3cXLoEaaSKAqmeVVRPfCBD2dTTE2LNTNLCUvNeurK7RWl1hqN3Gm4oMPPmWlmfBrb30HpRNkKpHCY6wBY+cecNI5vMkxUiG8ZdY7Z+ba5L0+w5Mz2lmDNGswwTO1JVNXMXIlhbQYlwOCdiel3W1gJzmVryjzClNNaGhLXjZoKI0rBOtry9y4eZ311VkQMREpz/ZOePD4KWjBSTGhu3yDlY1V0s4SS502pjIsL68wGs/Imm28TpnMRvSHU5rdG2ytZOwf7bH98ms0V5c4eTYgNTNS0cOJEU5kGJ8y6J+RpQNaos3t67fIGiX3fvKv2Nvb59mzu3QbW3z1pVcZTgf8+f/9L7j99htsb67RqTJ+9vO7eFqUos1wYvn4/gM2Lm3wyre+hSo3eOUr32NzbYvT0/+Ng/eecjYesLZ1menpiG66zdHdM/7u7/09Plt+yMmf/JiT8xFerfH67W+zvfEKCQZXnPHuN74N9oj+ccl4KHnz27/CwAzYqY740+//gB/ffcBBNaJzMqXjGty8+QaPh0cc9c44m47JJwUvXbvKSrfBqH/KtD+g2+ly/eWXyKdTTs72uXnzOtY49g8OKYxnY+sSp2enHB0f8ubqKv3RiFF/ys61V5mWk8CQkJJ+v08xm4UgQfgoAvBC9PcL1qwaMVdKgHAYE3o5ZEzYLmJ+FVe2OmkiNta/sP9IIwqF7IuynFYKJaMSo3CoZpOMNqVKYyJQV/JCr2vYlyLrrJI1XVSjqoNpF9dw8dyaL6ScK0mmaRb6plEhgPIhCUWAQzOa5AjruexWAzMgCof9tUrc4vwWJDeBuS1QuKZ2IYMTSIyEwkIDT5MKShu8xvwMJxqcTGFUeg7OTkmOPoTeMT+8f8ah26a1dI2bV36J1zfe5eX1nLXtbURyHdwGLAmENLhZj+FZn3tPH/In733MJx9+zMMPfkj/5AlFNUE2JVqOEckSlWmQ6Q5Kj1HkaJ+gfRuDRKkcKGjJLkV/xE//zZ+ROktSlGig0ioAf4migaSVShqt4BHqhQCZYIM7FHYOGPt5hVnyRTrjlyVNPhpveRf2EWyXFFoH6q9WCq0U1mmsq3BOzuPNecsHF7Em8UosJmovbi/SI7/sp35vnYTVa3OdzBhjFoy9ny9WvPiZi0IpX6RhLhQx/MXr44MIEfxSBYKqqoL/abQ58IR2HiHVPE5UEogxvnMeU5jYD69ppJosC7GBJJxvb2sz9Khv4fxC8ljTFBcSZBG0FDQ6xAgCrPAYJInQaClIRUEickJGsWCPIea3CtY5SmOw8zYoEac5Of+skETW7w6vUUpTK4bWOYFAxMrmL97+SiRt9TZH6mJ16sXNe08ZucUqCep1LHJ/Cai+8LEPyNcc2wUFHSkvqjk1ElC73C0cxyLyOFe+c0EQ4MUy8iLf+AJJkBD9h6KCxnzweGeoTIGtcpRtMCkqDnpDHj07RA57rAqPdvVSGJFMH3pvrAtldusCfc/6gO4Hc+lQnVMy8NYVhKDaOVRZIQoDXlJ5z8BVzJREN5toIcmMofAVxitEWYTJrXIwyzmrZhwcHLC8tcPq1XU23niNN199lbTI+Wx3j/MnB9jSkfV7aF9RFMHzwutoOGgvvFgkxOSmnhTi0h+vjagT5YgyCcQccRIxuQsSueGSCR96yWRtuC1CMl+ZislswmA8hrxEjYNXSpqmFwqiApaWlgLqFCReIhjp52iNlyLcWD5QdpSQeCGpvMOJoECohIvfqU7uQq9d1kyQSrF96TJbOztcuXqV3d1nHB7uc3Z6Tu+8x+HxMS93V1jb2KDVTXny9CGnJzlOlKjEs7W9QqPZCH1zYpnjk2O2t9qsb7WD+pQzSG3pLqdoleKMB1NhqjwE/pUl0Q2uXbnOaGrp98Y8frzL2toWb7yhODk7ptVoc/nSFXone0z7I8YTaLW2SPQ6kmWsaSPlKstLDTa2PL2JRVXjaDMi5hPiXCxHSIRcpGiEcyilCvetDkm3NZbj4xO6nSWWl5eZTGfoyAn30geVVCEiEBGQsxBQWbAVifJgKyrnWV5q00o1k0GPo5NTRv0+N69eYW15BWfDZNtIU5yz9HvnVGXB2vo6G5tbtJpt8rwCofAqWEsIGcCQOSKJQyvY2lglKRv0JhNGg3PSVpOslZEKicwSDBIxFkynU6x3TGZTyqrEo9FJQpokZN0l1rJNhC7ZWl/jys4O+4/vQnzwnwAAIABJREFU85Mf/5jrL9+gu90MwIsUoYfNFnhbIYVBYQItxDsaxlD0+sjRhOnpKVQZZVlilaCQnhEVA18yEVVQzYoeNFp76tjYGMgnBWmS0emuoqRmkngOpzn9hw+YTAZ4oZGqyWBimMqUSV7x8Mkxl1Y6HByds7O+xq0b15gOB6RKsbK6yqPHjzkZDDgbjtm5fI2bSxtsrEpKK1jatBg94dLlBJUfcHz/iO6NBiPjOTgq+NMfvU8r87yy02FZlVzdaXE2OubKy5dorrR56dqbtNsbDJ7sIWeO0b09LluJGozpj04waYO0m3LjxlUORgPu7j3j1b/x27x64z9g+eorSJuzuqR57cY2vd0Be6dPYGYoz1PSpEHvScFrb36Xib3Jj3/+Pud9xze//uu8eusmH/z0fSrt0XqLh48fMusdc/21WwyTjJ//7A/52yvf5uDRR/zZZ3coBSRpwne+8y5vvv4uf/DR+zw9PyRbaiKlpNtts9Jp4qZDttZ3GJwPOH70jLWNVZZaDZbaDR49eYZOEqRKmUxmNBpNVldXSLQilYpRv8dgcoe2TMjLKULlXNncIlEKJcWcnF5Tif6ym86SgEa7iAgLAd5hCoMQCUltdblYBSD2W9fMhIXnLmLQoPYcPBLjKiAlpBnaZUgXhLkudlzr/kXREq/jM1HxVYSELQTLlrIo8R6SJAmeU/LFEKdWAA60QCegso7JcACVxtlLQAjEglT7X6O0TSxWMX3sAwrJtHeh99iKoNZdV0/byiJ8jvAZWIUbj5mMP+Sg1+MHn+QcuWvsT4e8oQd872aX/+TXrtFaf5nCrNFsr7OyqhBijNQVkOCt4fB8xp2Pf87p5++xf+8DnhwOuH/meXxacHZeoU2HzOUksxE6dXgtETSRKkUlhlRYGipD0cBqiWwYhKxouQSbJBhnUV7SyZaiZYHDxWQhdR6lPEI7hA6y69Y5itLgXRAFc3N1FYnzOV5UXwpuvKh7IFRIhkOyENdDX15U3mJSLIVHJnqetNWx5It0yXn1auG5+vn6NV/WX7ZYBVzcZ02frD3VLgzAzZziWW/zZIsAiix+5xe3+rG5sfZCP5+3dQ+fDG4ZXoQmsDhP6GiJ4CO4FB4PcaCzHiVC3OtjwmOcwxUlla0oTEKaqCC8ppLgmRqHtxAiKEG7oN6olEZK5nGiUyCwaJOifYKQYGWFEwKHwlkFpsKXE2w5jvdHPR/E+WleTHJYb7C2CpW0uqJXtwB5FwQhQyA/zzScdzhDaIOoQW5bz2e/ePsrkbTV94O1FimeN7r+wubDRZVSRTPXgI6E5y5+hBB4LQNlSUQChfdB4ECI0PTsApKolbhQulr43C+jPVYLWXBdSbDuonlzESFBBBNvIR1SaaQOlcDwUZagZmUxzjE1YGWCt7Gh2xEyEiUivMncuDCo3cV9RVGCoDDkYr+cQeDwvsIUE2b9c6rDY5qFZ2d1g9XtbfYoyceWpkqwpUE7Q+YrhIe0KsErnKsQODKlwFlOemeMTMWnvT4vJykv72yxvLWJGuWko5KN5RVaouRoNGDYy3GVi6aECwiDCL9rUNYJ4mT2fI9gfTkXz/W8DhvZOd7VkysX54iAkAkdHO0LY8CGHj9fljCdomSoqEkhyXRKq9UKVJHYT9bCo1QSxsvC0Lo4mHDcxll0pPeEU1RgzYy11R2uXt0iTRKGsxHDWYGxFbqpefWNV7h64zLnZ+f88Ac/ZDybMM0nnJ6c0Wy0eeP2t3icPebs+ARFl0wus9Ts4vA02w2wKeNewaNil82tLbxP0FmDte4O2jdCn5ROAhIpBV5CVXrWN1a4dusaR2dDkE12D88pbUl3ZZvV9XWW1zZYWWuTppaDpwc0OhmtlQ7D8YSk00A0NEYLVi9vslENGezlOFMGCl9VzSkPgXITFFpFXCCc86CDQItzYKxnMp1RlIZ2q81gNGIyndFqNEjTjE63jReCylZY4UCFa+kECKFw3jCdDihnA9rtFkpJUi0Y5xUrq6v0rOHk6IjdZ7vcfvVVrmzt0G42qYrgk9NqN1ES9vd2GQ6HXL16k1azA1JFvScZUHwZ5pmAHFokjkQKVjptOkstznsjRrMRXpQ0Wx2UkrTbKatuhc7yEhaYlQUOQWWDQpaUiqwRKohSO3Y2N7l+dcb2cpub129Er8jQbzkejGgmmky7QCnGkekALigZEsita28zuHGFRnqOd5alpTblMYEGozXOe8qiDJQNpfDWMp1NyPMSGRPotNlCaM0s93hbURlJPstpZW06jTZVWZFoTYEjazRx5ZQsa3LeHzPVkt70lN6kwuZjUiXQ8jHtbgeZpaSdFslSk5EfkXY8W8USq15SiZJLN1fJWgOGxZRylPDkYJ/jJ6fMeid89vgJ5WtXef2lZZbOV+iIlHW/webyW7S0pizvcOlWTrOzzXLaYTZ6RlkdIvSQWTHi6qWbbC5vke2dMnCevQdPqco/I5vss6Ir2ksFr718hR8+/ojpqEdSSXpjQ950PHp4zLuv/DK/+Vtf59K129y9+4BLW23eeWuDWze+w4PPHvHeex/w7OkT8AN+97/8DtduXWL3+D0+PvuY88YZVzqWR+cVrZfXObYjOh894fzuUzZEwtl4zHKjhRmPeLj/lOXVDqqVMes7Hj9+wOZkla+9dYvJsMf+7jO6q5ssr3Q4PjtDScnG+jo7G5uU0wmuLBjPHFYkWGlZ6ipMWURGB/PYwn8J1eoXbgJEmmJ16GcigpDCeoppgVMSnUpclMYXngshgDBZRz+mL9+9w6MTFQKlCE4IX1d7/AVlrz72OUEyCpTMl5NINZob7AbrHSl17OGrD6kOsBbWc3FRWTG+YlZOyESLRAfK1b+NSvrv61YX2uor5QlKuXVijAjnuCScF1n0Ufk+u8c5j56uMDxJyc8/5LWbH9Bu9vnOK+9gu7c4rLrcXnNcbozBTFEr63jZxgpNITzTqWJ0uMfevfc4fHSHjz7vc+/hIcVswGRwzKTfZ5Z7KpuhXIoRCpetYJIlSlnSqiQbYoiXmkmiqZI1lNdkTtCUloZKcDJBC4nRQcFZW3AVeFtRWIPB0kg1SifopIkXTYwLYK8xJZPZhDyf4rGBPSViwPWiJHvc6j6wxeTJGBuoj/FecC7YZdStF/PxJ4Ki5IXaY4jnaoCw3j9EAHnh/y9WzxaP58U+t8XH6wrbYjJXJ4zGmPn7FrUcLo7P82KyVieBL1JFhYvd6hEgCF5tKgC288+P59X7aH8UCidC1qfOBpaWtQhn0CKJ/rgBRPcygDiFsVQuKEo2Gw10miGjkE74Hg6hVUzUFuJnAcigQq7RaKNxhBYbLxXOa5xTUJXYPMfk01iFDCCTjKBPPZ8Gj+AK4yucNyEf8BaHwTkTQe0Lf8d6JARPvjA+lFKxS+j5AtKXbX8lkraLY1xMmBbQufnTIjqnq/kAcy6o90kR+5Fql/p4+0n1JZPvc+Xn8LmLg34RZahvknnVIAot1MeAcFHG9YVKW32jCgU1hTJ6jikVyCaeQCkTUqLTFkvdtaB0ODkHS5AnVaFcGmTtmQty+PnUG06gdRbpZFisa1lXb8BUiDxndjpAGUX38nVef/NNNr71Dnu9Pr39U8a7e4xNH4oRiRdo6bAK6CSYVGCjtP6gmtLvGaZnE4aZ5n4x5eFozO7RmNHBKaNijFY5E2eoqgxFsC4QkVYo4oWtw/svXP6Fi+/rRC7Cw97V9ANBwFwDIivq1XeOaASkQ+qA7HgpohqYmJ8b8EGivabASRWTDT+vwkLt31JLS4v59fcxaQsosZgvgFKCkh5TFYwG5wF8aHXxytFsZKgkYMhpQyITz+bOGo8ePaGopqA9j548oN8/56UbN9jeXKWa5Rwe7SLEFiurKzgzZW25yWA0RSlDlkGr3UZKgTFTnMtRytBIMyobdNiKqkJlLUSqSZtNmkuelc0NVtNNSl+iE0Wj1UYKQT6ZQNam8IqV5Q0u33yN+3ce0l3fwaoGpYOsu87mVYOfjJidHMaJx6F1WMSo0cZ5JdWHCVurkMQpRafTIUlTTk9OscaysbFBkefMZrMgxpJIdKpwymKlxymwUZbXi0BJdrbAFhNKAvXV2iwkimmgt65tbPDg/B4//unPONze4dVbt9jYWEcpQVkWaK1JE03v/BxjHFev3KDRWsKJDK0WlKCEJMtSzGhKmc/IEkVpCqSSrHY7tGgymo44PTlmtdUl66ywtb3Fm295fvSzT/ACJvmMTivFeU+SJmgTqh6NNOPyzjZpe4XT/adc3b5Mt9tlVg2pygJEkyxJ8FUZ7hhvg7iKtTjvuPLSNd74xtewpmIw+pT3d+/ivUN60MbTRtMVCZkRuNxCqsGJ6J0YDMeRgkbWRAlFrzdlOi0wUtHIBE4r8knOSzevs9xZ5wfv/YxG1mC50+B8NMShmXqNcY5CpLQ6K/hyBgqW19cpvKeznbG0ukqaZXR0E93aYHlrmaPpJ8zGp0zODjCDCW6qGYspJROuX3mJydgxxdK5ssbT3QmHd88YNZsoUyA7MzZeE5ydHJO4Nuezc+7fv49UCVvrlzH7PfKzAUmjzU7H8c7WBm/+0k2SzTXOHjzgRw8/ppMllCcOaRM6MkNKx8lwzE8efsiuybkzhN/97f+Mr3/tDd595yVm0ymj0SPW1xvsNUa0mim/+Zu/w4effp8/+7M/5fD4Gnfe/xmzB9fobrzD9VeWmdz5KW9dv0WWdfhXP/iAb3zvW/zRH/9L8mLGxuoa3hq6nTYaWMqa/Np3foXe8f+JryzKOzbW17hx7SqPnh5QVC5W2jIGgz737+bkswkSRxWNqZOmZjgYc+PyJTrNBkrMJbD+ojjgC5uXQXXWKzWP8pUI823dzuYIa2Ytjl9DbQET/cVJW+itEQgc1ntUTYcU0W9xHiS/eNB1wBnm4cqWWGlJkwSlNfU6Tg3ufekBhGDc1T0zgHEl40kfp0xYkz0gNILkF+zjS/Y5/+vF13/xkS88MwcbF6+VuAjy5gGzmFsvLMCgIa4gzrnz93ARWNav9PH7Uttog/chu/EuWGefjUbsng057A/Ijj7hFk84L1OmszcRydtce+uXuPbaDtrt08luIPUKl51kqaWQooGVkCtPZSb0Dnc5fPSU+5884KOPPuLJ0VNG0x5mXDGcOE5nJaYqaFiw0xFSW5I0RSiNkg4lBQ3VIVWgMUjh8MpSxbWk8gplHakDnKUSFXlhKBxQOHRuUUKSywqlE5oqQWuJzDxO2ZgsSCwWY6uLKuQC/LgY28FForTYQwZgjGE2raiqirrVJ8QSNsam9fvrvufnJfmllHMlx8W/iQykxR60ix6ti6TsRaGSxe3FhM3VbK1YZavfu2iA/TwNkucSwhdpofXfIc6rq27h/rcelFCRFgmLhmZ+PtjDnRgvR12XBCloJgmF0xhklOWvY7o4lr3DVRZHibGepUSin6tU1boIFwC8gGDsLWpqaojhS1FSGYX2Kc6AchXVrKQq8nm1saZNL84L1hmMKYMIYM3MC41cz52jOi7yPgICxGSNsKYLV/c5/ntCjwxNnLFs62ttufjPh2qM8MwlxZ2PWXd4c7z0scetTuThuYTL1WVews0jCOVZnMc4R02/qwdc6IUL76sRqXAzLlImw+Crk4x6yAkhg6iwd0HJ0YTmVmNLxq7kaDZh73zE0nILX6Rs64Sk3aScZeRTi/cGY22sQoVFTQoRglgcwWcmuq7Em0gqEfp/VJDjtdLjKUkBIzMmvmDoHY2lZd598zqvTqaMrp4x+8pNksavMB6O2D8+48nRKYO9M3IlmVlL6RwZGbOqYjQbYwcjitUOOJhUOWY2xhYzRuMJXpb4RAXelfOhp1vE6ztP3haqkWEYx3Pswbn5wPYIhK+lUsMi5Alm5s4FNT1rbegZCgLSFybq3qG9fU4i1kdZaSUkSiXBADoiQgEkCI/XXlsoFUxg4ymuxWWE86HHSimkVvjK4CrLzFjStInLS04PD1hZXWaSz+jnBeurK9hJyWw6pt3p4LFYX2JdEfoTpcf5guOTXWbjU968/Rq3Xr1KXkxA5GQNT9ZMGYwGJKlnNDnH7OZcu3aNlZUuSeoRsiRpgJclRhQkKsGUNlIOJeN8AlrghCPNMtqJJskUs9mEZ8+e8PTBA06e7dM/7dH+2iqvvPkGaXuZSy+9BFkDhMN5Rau1ytrmDnuDc8p8iow3p3NVQMK58E+qbEDDEgfGBhrl0lKbrY0NEqUYDcf0znusrq5gPYymMwpvabQykmZCUTq81whUMEUV4d6tyoLRsA9LS+gsUFwqazg+O6eRZTR0h1u3Xubs6Jjd3V2O9/e5+dJNbt26SXe5S6PRAGB5ZRnnPPfu32Fja4f1zetolWJchcbiRTjuptZYoZlOp6g0VLfTRoqqKrKlLjOpGPVHTKzBNVtstVOWV9aYjQse3L+PvLlFo9kkySTGVFTakcmURtZglYTN12+z3dnA2Iq00aA3O+En7/0pq50W6x0Nrgi+hC5QXo3zHO7vY9P36Q+GHJ/e594n+6hKoCqJrDzaKTKnaEc1PJOXiKqg3ZK4rEE+M9jCkTuDlxY7yzGVwTcyJuMp9iBHC4N5dkCrMUK3W5z0+8ikQaPTop208F6Rj89JlKTVUBhC0/bpyREizbj+8quUFoQJFEvbVHz+7JTh9Iyvv7bOZPyYR8/2OPeOqSzpNiXaGrpOc/rsKfu7y/T2Ztz7/CGD9piN1mVEy5LrdU4Hhm//8jb3Ht3lfFqSypSttWWkOaaZNTGzMbI6Yand4OTJB6zk19jeXmdol2npNnsne+jBiKtpi6PRgNwKxh3N2q1tvvKVm6yvNFhqCZ7uPuOjjz+iNz7je9/7NZJum1/+3re5eW2TzvqM/+uP/jl/9Ps/xZCz7rtsLq1xkj9Ey4rd9z/lUV7S2LjER48+oZ8PuHJrixs768x6p1zeWOeVl26SZE0OT4Zo6Wk1m9y5+zmvvvkOaasL4ohZGTwLHYbRYEJVdrl2aRupO+jzCfl0hNSeb3ztm6wuNRiffIK5soHUnegjdCHJv7h9Gd0pLKomCkZJ5tGOFNGapa7KxADtYmfUyYb4AlYqn/sruPUEUSGQIDNktkTmkqhiWK/VcV0AwGItCBdQemRQ8A1rS5h3wvvqA1j4rovJCz56A4J2ntRaRJEzq0qELcOLveKFU/ULtvrYgkj44iP1iuW5CBgvjqcWAZnj7ojoAuwJFFAbgWsVz23tcRX2XPfSh5Q5BIIBPLyopcVeXhECPeULBCW1HvG0TDmdSGaTAn92h2pywoPjMQduFZc2eKfVZLu1w6Vul6+tvc5ZsYnsaJbXuwh/m4ZIEU6RugFVWdIbW47HBT/66Cc8evA+Zw8/ojg8pBoJhkXGqUs4mYIf9HDWYdI2pRUooWgur4BIcEIF6q9KkdbjhUJIjZEJwpcoXyJcQeUsuU+oKsiqCuEqcuGY4Mkrg7SChggVHiMcqUqDwIUSeO1RqcU7kEKhnUNJRxUl4T1R5AxiFhHP9YKdVGB7MWdbhapSUBB21kTRtFqFPCidB4axnfdO1+MyiOVAKYr5YJVKzgVDAsCYoBMdtBmo6YWhUoiPtjPyImrGEQsEobgg69cSEoSqLL+gKjkfR1wkpUBkf/hIUQwxcg2cXOACC5VAInNNXADhZTix1BVdIULyE2oqCu8D80z6SOj2FmUh9SEGNjgq76lkpC+K0MYgUeAlxguoPMpbsqSuXOkQ87vAsItXNlRJK0dlDMrmOGsxOEoXkj/nBYVLEdWU1qDED8cIV8P6ItyDIs4zBiojGbs0XlcTnvMXgJnzAnyMTH1I5D3BnL5WwK/HSTgv/47qkUKI/wH4PeDYe/9WfGwN+GfATeAx8Pe99z0Rrvw/Bn4HmAL/lff+Z3/RZ0BIynysYoiI7jnn0FKHQe+Df1U9xdUJmZAXj3hqJCAUMGuUIN5+0XivFraIk3estvDcngHv8NbgJHFARRlYr8L0HBEQ72OPjQ8Tfaj0xEXEVUHJ0Xu0czQUICwn1Zg//vRj/vjeXZJWh6S1xubmVbotzcxPMDKnkga8wJqLRNL6aAKIxApF5WSo5IlgGihVqCSFPjdJpUCnguVOB93Y5sHuA/7k8095NJjy7teu8drGNtev7pC8sgbrV2nQ5GTQp/joZwxO3kNYx/Csz/lsQlJk+MYyMz2k4Qc4LRhZQ25mJKJAigKhNNJpXOmwwiGSMJkQK1wiLqoeSbCwj4urCEbHSoYbo56YPGBNGUwLZZwkvcB5iYyGnYiAmIhEY6RA6ITMpyTO04hYqZkHKzL4ggkZja0F4ILipmCeCDsf6UCxmscclQ1VuwRF4oK0bIVDGouSKWWSUTnNUqPLjUvXWNte4af3HtBd6tLMUo4PThkM+oyGA3rDIWe9cxKdILwgH8/wFhKVcHXrElfWNmlIBUlGbit2D49YXd9gNrPkswrjNE5reqcTWtkyk1FJVXmmeYGSEqkbDHNDmizR7a7TaDTpD3o0l1eRCTgMtjIcHR/y4Yc/4b0ffp+jh/tMzyekSUbv+JhXXrvNjZu3EE2DU3mY0I1Ce097ZY32xhb9k30woarbSGVUXwzKi9Y7rPNU1gU/s2hxkShJu5EySxPEUpvRaMLx0TFXr19HpQ2GoxHTyYykkSGERLuMTDQpKoNUYIzF5AXD8QSHYDgrGM0K0jTFWU+qw6LS7S7T1Akby0s8e/yIe/fuMBz2ufXyy7TabbIsi0bWAu8tp0d7DE9H3H75OknL0pZVmD+SBFfNGJSe0jpUZZDR1ykVoZc067ZRoskw7zM9PUaYBlnWJp9MGA/7PH4w5crlS+zsbFE6TykcMxPUKa2xNBotdEeDdhgTpKb3Hn7CT/b3eeutr/Dm195FaYXKOkzyAZW1HD18wrhXMC4E/WmBmhgaEkY+IdEZTqVMTZD6Md7RThRr7ZTV1QaYimI0Q+Uan2TMnKEoc5aTlMoavFeMhxVJoiiqHE9Bq7WETxo4kdBud8F6bJ6jpWO9nbCxmjI4H6FUUJAcTid8/tGniKQDconN6zc4GZ9wdLzPS9sZ0jouv94mb25z8LM93Njy+ttXGQ4nFKJiY2WdTd9h68oyzbdy1pNVGrR5/GSX/X+zy2tvvcJ0PGVydMZXr7yJ0su8/+D73Hhjnc9++ohilNHe6fJ4ZjGjITeH56hLfZ6eHbKSrfLs+AE7icU6yalIyciwNuXK1Sv87m9/i3ZjGVdO2d7YYnDrNqef3uXPPz7je7/6N8m8Q+kZl29cp5U2+eZXbrM/8Nz57M/puH12HzzB6BydZVxbX2H58go/+OgOv/Ub3+XZwzuY4pCvfuUS3/7a2yx1lzg5G/PJo7tUDUHREHz3m7/Ek8MJD58eYYyARoPW+jLl4ABRzcjpsJErmHrWkox+c0ijlfLa1Rukbow9+xGpu40Xr2J8LSDBlyYiXxQVcAhXoa1DWAeiwgqJQ+JUDciI0MAfd1n3HXsf2gF0FpKeIIKyANR5SGpQR4jIxgAhGogkoeFjLaju/fAO6fx8nfPCgQ6gqJbJQmWtDoIXA57nMyXPHF1FuJDupJWg61MutbscHZyDsQS1wDom+MtsdaLk5ylmrWYZbIFjgHdRMAMf2B8RE0RhCYJdCo+mTr+kZw5aQmB4gI0kRonzoVtc+RrdD/Qz5Qw4gRcKpxRT62hJi5ueMD1/Qr93wvv7TT4aXCURmnf1Q253+3xl5wrXLn2Ts/GAm1sZjQRUmiGzFa6iQ5XDJXipqUzB0dFTPv/8A3Yf3OfgyS6Hp+f0h0OmsynOGiYjGAzGTPI+lQ9Aa4Im1aF61swkQmSx0V2RxWRbkIDSKGFxrsSUFudMUO/2PrB/7BSJqG2P8QiUFDSjgJsTJgi5iVANllLikwQndaTcBQBeEipESnhMPNFe1Gt+naxcABIh7xAoqS+ohcjQv7XAhPK+BhM8xtYVpXiv2IsqXUiqnq9gyehnKoRH2KAPLnwFOsR6eIGSCil0HcrOR5oSsfDhIZ37sAVwxFaGsizmfeiLSdoF02yReRZa0C5AbXkBHETWlFRq7pnsfBDtCgKZ4V4QKoj3We9iQheSZ4sFRChMiBifIdASROUweY4fjlGibgWS6CzFqwaVVpTOI3UKBFBSAjNXYhwkUiCFRkbJf1MWgZHgw3ronMM7g5EFzktsnIkyX2HFlEJ7ZCrIrKdrVei1jXOSsw4lKyDDG7C+RV8sI1WKrEocFcYFmqR1dk4BDdchTgWLll4xDnbz8/9vn23+MpW2/wn4b4F/uvDYPwL+H+/9fyOE+Efx//818NvAq/Hnl4D/Lv7+Czf/HB8yNC7WFQ7JYuNlVHqsB5UNU5Ug3vd1iVYy39+LQiHPfWZ8mRRyXpasEcQ6mAtNmvVNpnhxq8vPF4tfXfoOF8prFTnyDoPHCsXZcMxpb4jzCWUlaTc/4bXbN1jfaFOWBd4FkQvhaqNRE8uuYfpQQgb1LZir74Qyb+TJyqCAVTnIOkt0Opuc531Gp31O9vf4g4OPWbEEKuTWMurq17m5c4mXLqUsiyFvbUObimcne+wdDGg2V3jn2+9y6cYOx/fuMpj1eXy2T5ZPSbIpToRmTAV46fHKIGRwh69Hau0Af9FrEdGn+aJeUxZDcjYPBhaulZREqt3F4hhHDEorkjSlNHLex1b7btXTrly4/vGN1JNXMH++mMSE88GIWkbKn78YpzL+sIAseSEpK0N3aZm3336HpCk5KRXPjgd89tkeD+/eZTQcIbRiXORYm7C5cZlh33B2OqEoLFkq6fX6PN3dZXtzA5lpSDX9QZ/+JEcimUxylle2uL65hTGG8WTKaDwjz2dMJ1OMqSgLj7OSleUNVkXKYDyBJGVpuY2QFmNK9vef8of/4n/nvR9+n97pMaLwaKcRUnFyesrP3v8565vb/Oqv/hrf/Y3f5Nr1GzgbQgoaLbqmtyq+AAAgAElEQVRrm0wnI8pRhfAWZ+wcaRMiei3FiT0ocMZrKARJpCd6IC0qjDHk+QwnJKsrXQbDIePxGKRibXmVfKtg//CA4WCM845TX5Fowfr6Ol5KRuMpS0tdljsdyqIg89BuNehkKWKpTXepzenpKc+ePOX4+Jiz01Nu377N9s42nXaHpN1CecF0OOOTjz5idaeFE6FB2ONjs3o4TiE83towE1gH0pFlCXq1RdMnHB/MGOwNGY/GNJMUW1WUuWd/b5/ZdMLm5iZppHEKIWk2Q9XPOEPiHdZ6tE7YWFtm9+lj7t69yycffcw773yVqjCMR1OStMHaxhrf+BvfobN2idwm/OEf/hGfffjzANooQSmgBHJnMUIjkoxp5Tg5H1AkKUnWZjYNUvKlhpX1Da6vrzMrg7rncDQkNyWpF1TWMhzNSLImly5dQ8qE/uCcVpZiPAymOXk1IZ/m7Gxv0TvvkVehp29W5kgxxKae09kx1hacnzia73ydhw/6HPU825dvI8oRnbWUp/1TZknKyekQ7p7QcvDs4S72kufg6ce0m13e+e63+emDO7z3Lx/hBxP+7ndvcnYw4HLjGivtLpPqgJOxJDmVVEOL6R+x/lLCZCIQbsrd40fsTnI2lebZqKKn2tzeWOXXf+U3sIXhePSULU6xZcYP/vUP+fTO53z717/L+eSEf/a//mP+9vf+Qy5v3GZps8l//A/+AT/413/A+fEJx6MJ088+o7Rt1jdvcDA8oSos508/YVzmfPbxZyTC0xuM+HCSo1xGt9PlsztPeXQ0xVZL4Fv0zkcMzgcIIUkaHWbGsrqyxuH4DBLodLvsn5zhJgVKCXJbkDVbfPLRp1zbavHGu5cQIoBRc//JL6FOLa6JzyVu3sem+HrmDCFnZaoQevoL0OzF5MYjQQY6+3OVvXq99c8findhYheAEm5eScA5nK3VIAEl0Dqpj/oFNegvqRjOH6uBWI+L1SxkpLhrEI2MpNNh62oLqZOYNKn4KX+ZLfbpBIiPOuYS1NwQEV9TVy09+CiIgMcLG79vMz4OTlgSaunvFCeDhyPx6IgMHikTDJ5c9oAW+CbKxFSmPMRN+niT8cG9EZ9N2pycPuOrjQfsdGGzfZu3l7ZQKuMra7CV9fDtS5SrW3i/QVMatPTBvseBsTN6Zz329/a4d/8ed+9+zvHxAaenR9gix1YFVWUoyorJNCcvKorKYCwIpUm1DglKTW2pe3jmRafgmeatx5QGaw34Ek8d1wTmEjEp8d6jlQp6ASJYxVgfhEe8CKbYgUbnYiwhEar26NMRSH4+YQoZWX0BIwtnHmPEimkdI7Bw78iF2OUFmqL/krFZs78We9he/LtWPYfw3cvSQBkrRnO6YzRjFgqvZOjhyrLY8hGOte43X6yshd8LyuRfUm2jPmfx8drXdtGS6+JHIkQA5utzUMdznjrhuzgnNsayIgqN4OvEJYBowoU11hmDN2Voi9Aa5ZpIAYnwpELjfYXHBS0MrSl9oFEaFwATJUTQrNChElYVebSuCXekcYGjZX1ouBHeI1W4nkoKUqXI4vgK1zL2xNUFHhm94XQaEmPv8c6E4/Y2zGnUhRcR27Vqivli/+BiX+S/Iz3Se/99IcTNFx7+O8Cvx7//Z+D/JSRtfwf4pz5c9feEECtCiEve+4O/6HPip1HfMbUIROCFRhrg/DXhVfWNWydsSoSsXeCDAz0X6EG9hQEVJtmaVqGlnpe3g0R43VgZst4L+twFJWQRmVg4V/Mfay02lkSlELFCJqiswyExFmZ5hUdQlhJXTRkOJqyutlEyxfvgTxFKqHEfMg4qUfd3BcEHU1VYY9FpHPxeBqd5NM6nlMKhCPK3LSFpZYp+YxVZWVyZc3o8JD/8Mbu+4v1kxupyihIpSytXmJkG49KSyQG32iXf/fZt9K9+nUeHT/nk8T32zo748x/9nMHxgGkOzorgNB8DBhGlUiEe2uI5i/ewnF/zxR8PEdnxcaL0zuOVR6pQWq8FVj3Me9GUiCqQ3seEjXkisUjRqT9jcUINybuI9hAXDbtzDzgXgTbh5pVW4v6lksFTS2t6vXOePH7Cy2+8xBtvv077dECzFapGu0+f8Wx/D6RiY2Md4RxnZ8fk1YgkS6mMoT8a013qsrIaUGulNUKlWONIsoytnVWyZpcky8LkpDRLnS7eF6RJgrceZ2bgNEnSAJ2Rtjqsb2/ipcWYCY+ePOQP/vnv86d/8sfkkympDIuej5VRJaGcFTx5+JDjw2N2n+3y9/7z/4KXX34FR0DSOivrdEcjzmZTnAllf+E9PlZKA4gS5e9dLd37PB0Doo+iC49VZY63FSvdJZQUDIZjJHD96lWajSZPnz6j3x+QT3L2nuwxOB+yub1NlrYYng9weUm7kdFcFkEiWoBONWmqaDYbZGnC7rNdPvnkYw4P9vn617/OSzdusrzcJZWKxmqX8eiM0WBAlSharSZa1kCNR+kEpMF6F3zQRPRhsSKgeEpyeecSs2nC2cmPWG0uRXWoAAqdn/diU/M2aZpGjzgRqC9SBfnfCDa88vLL0ZxU8PTJUwTBx2Y8mlAWJVeuXeLWm68wLWD3/jN6w1MKM8MpR2ErplVB4R2jsqIkYeYS2mvb3HzlLU7GQwbVENl1+NKQVzlapxgHV65cYVbl5EUOeJIkBWEoihkq+tplWcrSSpc0VaxudBmP+jw7OCFLNaWYgGxSCU+61KB/dk5RjuD4EVcvLeOsZzwd8/R0yuePRthJymy0yzvfuMJATbl3fMBkkjA6G3Fwp8cvf/Vt3n71XdqZ4HRvj/OiR54YesMhn39+j7XmOh/fPybzbXqTlLyRMGadspXgrGV2NGI6OKV3cw0/brCWNfn2N3+Z9z99wMd//ohez+CXl7h88zq/9zt/n/c++AH/y//4f9DMgjfep58849GTcz59MgA94+TwIXJ8zn/0W/8pj09G6MyTtRUrKynrG8s8vncHWMZqRzKbMLUlSUOzvblDNXNUzjOZCBKh+eTOHucnH6KTNoVLKWcFhZYcHVVM+iOKWUlhNY2lDqYoGU4qGo0GXa0ZmDwE7baiqByjcc7j6R7FKOXyvTavfs0hswhY1YnSl1TbFntz5kJQOpDoTAQEfWxZqHck5vN3BOTmk6ifP7aYU4n5XFuLN8SmFMRFccyDjxWAGs2TMqlLeQRfpIs5/CIhrD2UXkziFgLxeGzSh/YCJ0LAlQs4nU1wWvHSm7dJ2sth/rIC+UV89ku2+rvHz/J1pPI8TdFTg5cmPpeBJ9bUBGBDFREJ6Fg38HH9lAgf/B6NhwqJFhkCi3QW6StSxiBy+kPJ49MZT46GTPY/Z2vyhB1pmZoNZslbbFx6m1ev3WSjMURnG7h0E52ltNIWUhcgOogqgJDOWPqzgvPeOYf7ezy8f5+7dz7n3p07FNMZzjh0jJPycsY0n1GWBWVlMNZH8DghizQ/RAhWHUFB1HmLt7UNkos+ViGGcDYkFtbMeI5bGmMw60LSJ5WKpq/B40rY2F5BnfyEipmIVSkVBZpEPJb5ZfuF5Y2LeO8Lz8yBZDlPgp6PCesj8fP9fNk+FoU/iMf9vHpjrabuEaIWjTFh7AVZ6VDt0oq0mVFTZZ1zzGYzhBBoGdaX+XEt9E29mIQtHocQIlbKmGtGBOpnzWKr9xfZaPPvH8Z+SMzk/B72PppaCyJzinky7ryPqnShvOddDfJYcMFvVOBRzkFaoZMUkWV4neCRIf5IG3gRLIGcC+0N1nkSpVhqdsF2mEwmzGbTcN69RaIQMkED2jmcVKRKkWhBM4UsCQnZQqQYq2Uh1k2SC60NJVUAD6RE1GwyGURpwjgL/exSakBincNaM9etsPaiX/EXbf9/e9q2FxKxQ2A7/n0FeLbwut342BeSNiHEPwT+IUCSpPOMvK561GpSoSLjYqUjKk3VCP580EQcqwZJvCeo0cSKl7/wXLPWIuQF8iaiH4evmwBZRByIyEG9UIj5cby4zdGL+H8X6QoIsHXSJiQORWkcRWWYzcpgYVBpSuX5/5h7s2fJruy877f3PkPO9+adpxqAKszdALrR6m6STVKiSClEW5TDVMgh+8HhcPjJ/h/86r9BL35xhBwKWwzaQVE21SQbbHQ30QAaDaCAQs1Vt+485Zxn2IMf9j6Zt4podkvygzKAuHXznsw8ec4e1vq+tb7v5OicNJU0GpI4wru1A0L57N8nFD5p8x4gFXPljaeVjIkUSOnLLNAKEdVDo22GdHBtbY2Xrmzx44tjxicnbK6v07AOPRjjygynI+ywoB9LzsoLusvrrGwsku8/5vZ7f0GtGLL48tdIVzp84+tf4231Fsoobn1wi2JY+p4y6SVaXWj+9InO7MbP7nMFagk8slHR7FKaqpBgZvjov19YhEN5hHVyJqtqrMUaPDoXREFkSBjmSdvstjPbUC8lb5dN1/14DAtYFdAE6X8bTryqORfSl6/aIN1aliXnF+c0D2vEO9t01xfRegdrHO1Olys3XsJJODs74mDvsS8dMWOSJGFr5yqNMPlLCyvdFUphKYZjms0O9XqddmuBRnMBoSK08d27URyTJjUajRbNumWx1SVNmmxcfZmlrWvIWKISKM2Ep3sP+X//7R/z3rvfp3d0QiOuY6xDRilx4m0uyqIkUjG1OMYWBR/8+McIbfkv//CfcvPll3FSEKctFlc2GI/6jHoFzhYoKYJ/okNJiRICrbVnIpk32vpafYW2bsY6KSGopzFaG4a9CxrNFp3tDgcHR2R5wVJnge7ri5z3ehwe7dPv9zg/6TMcTOn3Riy02+jFDkUtQemSTj1lodVECM/CprWUtbVVarWU87Nzzk5O+MGf/zlPX3iRt99+m253mSRN6XTaFBbqzRqvvvwKn3x4i/7g3BueSgnSb2JaFwitEU4jnAUZ+7XFOWpJggKyyZQnTx6TLbVZX1ul2WwwGg05OLAURcHq6ipSSYyxJPhr5YRXuHzp5k1azTbTLOf99z/kJz9+n+svvECns4iSkrLM6J3scdLPeHD/PlpPSGIHaJy0lNZikJRWgIrJLMikxbWrN6n3znmaP2X34imRVDQShc6m1NdWOTw6YDIeU6/XfGKpIobjMfWGZLG7RGehg7NgSChNRtrueLPdwYTT/oDM5ayurVKWOWcXPeoLLWQxobtYY6mRkE9KTBLz0WdfMullNPM62ytddh/e5f7pMdo0GZ7l3Nh5lbv3HvH/vPcBv/9b77C9VOPlN1/i4zv3+OSzT9leXWPcO6V0io+e3GKhvsR0sMuiaeFcxjgb4oSh266RFfDXX/ycVKS82O3yeneR39q5wujWlOOzY3KTM6WgEDnbO1f44c9/yFE2Zmm1xSiz1NtLLHc3OTy6zfi8z60P/po0P8O11zntDXj9xlV+452XOHrwAY+KCcY4egdjlhKFE5at7Q2s9MRsd2WV0kCBJu4sspxEXL92jd44Y+l8yOHhEUdHQ5pxwo0XX+Tu03MSKTg7OSWpd9m6do1YT+mPcoajjGYkMU4yyQoatYTxOOPg4AQnQi9K8Iv6RazRV1WhEIAUL2hFFacQRYpISGZboHg2XXq+Z+4yKO37QILQRygKFBVTYL0djrXGI/KyUoqsBE3mlRrVxi/c5Q+pPuirHtX67XBo7/MoYkrnKARMKOl0mqxuryMihcER/fv4I1z6aL+7/c2EzYZrW4UeRvhjpQXpQmAnytn3Ei7CihTPlRsPProIgcXYAikM1sDZUDOalJw8vMvg8Db758d8duo4iF5ia2WL9e01lqKnXF/d4GbnBmZ5lStNSMUEYU3YDC0uTihJUEREztHb3eenP/+In96+zZ27tzk/OyabDEiURDpHLL3nVpkXZFnGOM8psBCubRRXbIKYXR9rDNY5Mu2FzazxMunOmsC+enVhZytWMrz0UiJUefT6fF/ORMGkJAT5LiQHgZVz0ntuSkUUxURR4tm5cDOkkGihvzKmg/mcqMoFBfNkxj0nt//MezgXkvDqZOep/TPQwvOve+555+Ykg49P/XvOmLZAJggpZvPsMnGQZRllWQZaeS4koiIPiEdR5P1rQyJ7uQ9u9t2q7y4q1W87A2Rc0JGYj/3LZEbFhlfVVVVFE9hATkvrq+UqxUUXmDdVWmTpuWUlvYcuTkCpcXqKy0tEkiBMiUgTXBThiBGyhpXBssL6lhoVzkxrgxSSWqNJlMSU0xEY7ceH8uNeWt83qKXwPrHSBoZYBfIo4EcOnLMIp3yHqHOzGS/wlXChEBPn5uIulZCdkD551VpTliVaV2CWgF8wFqvHf7QQiXPOiV9mLPDVr/sXwL8AaDZbrmKxqsbFyoTvcsCtAm1c/V7dUOych8N6kz0rRehPmlPAlwdkhfgb42Vaq4FflXLNnhO+Btda6wMy88x3mF3g6ti5F4avPbaeapupGAoZUZQWayVCRDhraNSbbG9cZWOrS7MuiJRGCUvpCn8uAYtD+kRNCj9pIil8027iFyLhv35AWjyyJPD1w1IZWvU6L65u8/d+6zu4s3M+/PGPqcUpN154hekw59Ht26hiTKcGOSMmo3M2FjrUllo8vajx4e1T/urTP0W23qW+kLL1wgY717fpDYcoZxDkOKGDt1YJVuJcNDM4pfI8m/Gm7pn7G6Z5WOPmi6WcobYOrK9tN2GC+wk0L8Mx2ps1CtwM3QhvFO5t9UnPMW2XWCBX3VuqRfPSucxXK4+qKDVDf6skrtFosLiwyNnZOVGtCR3FNHNEUYfNrTXipE5hC3Z2+rz2yuu8+/1/y+nRId21bWIZc7Z/gCs1ca1B0skpnGE4ybH4sTMeF6ysR7QXlrEIr5wWAAlTGt/KLlMaaYN6ow1RCrHDSc2gd873/92f8t673ycb9EgAM808K2s1JAlx7MVYpJTBQ0YirOKjn75PPYn5p//sn9PZ3sFiqTc6dFfWySYjpkWOEL6XrWqs9cM+KDHiQhlkBsKSpglRktKoNymKknq9Tl5MqKUxRWkQAhppwub6GrtP95lmOc1mi621NTrNJmfn5xydHPufe8fkCxmxlAidMlGS6TSn3agjlCBKYr/mFwXNZpMkillotjjY2+fxo0ecHh/z6iuv8/Jrb9NqJahYEKcpG+sbLC4s0HOOLMvIi4IoAaIgd2wNGI1AEgVAQeHLdoRzrHS7XNnZoX92yKNHD9jY2KDdbtPva0ajERcXF2xubtJutynyEiEFcRKjgE6riV1bxTrB4f4hF+c99vf2MdpQr9f54Cc/4s6920T1Bca5wE779E/2KCcDakogqtJuhPdrTFIGoxFOWa6tb7KY1DnZ2yXLJmyvLlPmJdlkyGDQJ8umIIS/L40GtVodGRkvN7+xznAwpOyNqTdScJokjugurzIaFxRacnYxxEm/EU8mQ1r1GsZYjs77bK0tMzw5pewPWUybdBopb3/zBm+8tc3EHlO6Fj/64C637z9mf3BGs6Z4WvRBWtx0xGg0JC1r9M52+cf/xe/zyaNH/Pijz6jlj/nt16+y2oi5ON0na9bI0ybHw1NGkx7XlpZY21lnpVbDxJKzYsTSUp0XWERtLbPciJD2goPeLWRnxEp3CT2Br914kSdPPmN6/hPaMqYZpQwnmi8entErjpjmEx58+SWd5iK983OMcTQaCW9/820GxweQjUhriihNmeQFd+7dwwrL1asbuERQljkH5/dxwmHlmPF0H1k22FleorXY5lqyQDnu83R/n1Z3nc7yJqa3x+rmBs5dkFhDUk5xzqGd72NyQgXQz6+fxhii6Ku3ey9c9eye5rShmPkpzcv+aklCXCU0wmcYVbhyORh9NiLwwKjAYE3BdDxByYi00fFbdhHsQYRERrFfq2VI2JybM3KXk8pnHoG5my3t4tLzzx3pSg/MIpnkGYMx5Nr3HsZJEB8xzILHX/aotpMZryccM3nNOSQ5S+VECLm0gMiFJNYKnIgxIkZSBnZNofH99lHo2nKAySf0Du8yOrzHwf6QP/mkx63zOnUKXkgPeXNH8o+/+Tr9+ht0VrZ4ba3JSu0FqFvipMUIhbFQygaxKJCmBFOgSdEyQVqBHk15/4c/5X/71/8H9w/3SWsxrXZCmiRgC4wpmYwz8kxjjcCUvhdMxnGIOS04EeTkSzDBvNl425YSb2LsjPes9X1GAinme6kOzJV63kOXKtYKIUJI2mYxharMlisw1/unSRWh4jj4dSkigReycBJdlvzix5wxnpEFISa4PMRdqOypxM2qu+2Coews7hDM4trqdbOE9Dm2r6pOuUwiVD+ddUjpQjwrcaGdREoZrqXAVUmdg1L7HnYqFqwigS55t1VtPlEUzf4XQqDiKBAmEqXmCpOXz7n6XZtKQdH5pBqfdDrcrDLOWeHB9uqehaTNOItQEuEskbZE1hJL5f3z8IlQJbxijMNkpU/2dIls1IjiCKMLXORZwTgKpZwOjNZkeYk1fu9cWFgkXmxTTHtM85xMC0yeU5VaVrNWWYI4igpFtgHDryoP5uHgzOJIWhkqvnyMYMKaGsl57mG0pgjzwhurM2M/f1k29R+atB1VZY9CiE3gODy/B1y5dNxOeO5vffhAOWSa4nKyhkfnrL/x1Re2xoCL/Wbj1UE80m399iED+yFwM3akoltVpLxBsvR1rz5D9lN+lhhcQguknAfsAhnqzy9NNvELJFjD5PCiJSpsQv77WedLjq5sb7OwsMJyd4u11W26yw2y6TmHR4+wJqeWpIGt8uqVzgZmMCCOSlaNobMtY5aEWGMRTtBIm9gIpqMDr+KT5yS1lHfeeQdXaj764Y9Ij07YiGI204Slmy9w7a3XGOQDHnz0EWUxJnOKVDtWOg2GEWhhKIszDh5d8PDOx5z2hgxPe9hSIm0daZQPZkVQyRGeShZCBFNwC8Hjwxjr/cSCIWUwdpstDD5JDSWUzhEp6b14hC8VrYwJER41EkqirQnMnb/3FSI1A2aFV3yKYzULZGaLsKgWPDFvCK1Qo3mK9+yCbe1MYakyrHQ4VldXeNg/YzIYYqaOlZVVbCnoDYY45dmUes2xvbOFxDAuCg6Ojxhf9FlstRhNc8qDI3KrKayhLEfEMviOxHXW17dJ0zpSRkHByoMEsYqQKNKk7hO6SGEosbbkwYO73Ln9OcV0QiwFSeprvW3h0JSQZwhRn/mrRSHhcmEx/tkHP2VlZZXf+YM/pLm0RKlzut0VBhenDIc9MH5sS22DIXxAlKW/fkWRc/vLW4zHY65cvcpid5lGswFAnmeMxiOPYElFNs0YDoZY5+h22qyt1Ohd9MjzjIVWmzSps9Rd4fTslIveBQvtNlvrW6SRpJ565TDPilkQiigSSJkjpaVej6mnDRq1Jv1ej8ODA3728c85OB3xzjdfp7tSR5o6uAQRrmlZGo+4Cc9uijDmrCv9uCsLhPLrkwLqSUqjVuM73/oW49E5n3zyMXt7eywuLtLtdonjmF6vh9aaxcVFlpZWqNUThPRzW0poNerU6g1eeeVlQDAaT3DOMZ1MONs/Zn/3IVp4AZw8yykGfVoSXDZF6BKrDTrPiWyJQGB0xpN7DxDCYtFc2epitOYbb32Ts/Met+89IE4iVKTIsoIoihmPJz4JQHByekZabxApRadVQzDF2YzhcIJ0EZ12C2N80C8igy0LbFEwEI6zsylXa01Wrq2TXutw860GX7vxIu3RmK12g/FwwsOnx1zbSHhjfY3Th08xyvLKtau8vr7O8cFDWlHM1doKK6s3+OzOfaZPh4wOLNl0iY0XbyIXrvLRrVtcjLcY5AYzUgyPj2jkkps3rrC2tUa9bpGdGru7Pb79n7/JRm/CXq8gGmrK/gPOdj8nG/UZTHP2Hl7wtddv4GxJOR5y/fobDPsFh8cXHPc1utbExQnZYEArGuNsjTJdIlMJT47OSaKU9atL5EKDKDm5OCSt19BEnPV6TMaOiCmHhxfsXNlmeXkVKZo8uH1Alk353V//No/PCj7+6/dYWlxE1xqkSZ1SSrSe4ISl011EGkuelbhpjnUOrb1/kXI+AVGSYBD7rDLZ84nQvLdcIlVCWm/4XUVYhJPUUol85njwYhuSCsOcsRLOB9bGBElsmzMeevGltFWvFs8Q7F9K+v62XOmr/mZFBWvOWZjqN+cV/aaTCaenh9TEKZ1Om7GOufXoESd9gTYTnE4R1vhAWAqE/VXVI8PXYL5D+KIw6ZM/xAxZdzhKEfqkDCjrjautjChE7Fk3bUCUlMrLzZOdUY4vOD7O+LPPz/jkyTnjo3tsjL7kxbUF3lh7jVa3i1zc4htXFnhnu01nuQXtNmUSE2GxbgnhJJGr0QAiAVMkViTEWF/ZEtotsI7xoEc/1ySNRSJziNKOcjSlNBNKPfXCREKAjHBCop3DaF9m7YFD7ff/SpTK6CD84PuYtJJYCSKU0wmCPH5lXizA4EvNC23wvUnSr6tKhWqXCiYQs7gKBRj3zD4eGpS87Yz0Hn5SSZTwXnTWeMn3Z8qDA2llq3hrNtbdM2N/xp5QJS1Val4lNmCtjzV8LlqRBd6P+Jnx8xzj9gxbNSMGvICbP0XPUDspkCEZqMb/bE7PcwpUSOJsiFN9DlmRFs+W481ir4ptCzZaVUIXx/EzNgKzvwWgsko2cRYnvMWOfQYgl+G2+XkrQ5JXnZO1zkuLSAmixKoYZxXaWv98db+lxDhHoS3kJVJmpEmDSPh+NL8GBSVHazBaUzGWWpcsdOqsLW1xMRhycjFCALEtsS4QBc4Dy0pF/rqjZuI0QrhZm6qKwrwPYJNU0guvGYs2JVIK4jieVfrNyCJtg8hL6IUM65X4JSz/f2jS9n8B/y3wv4Sff3zp+f9JCPG/4wVI+r9qP9sMTQCPnlg7K62TjhnNqgL6ZrT2KVGFyjkLNgSK0mvJVCba4vJkmPl9BQbAVT04z57H5Ye186SsUqB8vqftMnJSMTLOOd9MbeUMMLTOeTrUWd7eZEcAACAASURBVKSMcKZElznj0RAhpwiRESkZULYg6hBuprcN8z897AbO+MTFGoO1Yo7yCYeTkMYRCMfICYppxqPhY/7qgw954x9t8M2vf407H33M3S8+R7SX+O1f+w4777yGu7bE4/0jDu49JR2XTKOIm1dSdm5s8un5U/YeHVLmEbVUgLY0VIR0OqBmvnxTOJ+4uEC3+EA8dH9XTP6MVvcohZABNWLeZ3FZDMwv0QIlHBIvdOGv07MHOSzaeeWeOWldvUPFsnnbhBkqxLyP0ViL03gcqEq2Q9ZnrfWIeKjDVv7mY62ZlUhGUcTS0hIvv3aDdj7k3Y8+5GT/lLx5itWCwXiCEc4H50Jzdv6Ug8NHFDplMtYBxPB9nMZCUQa21Xk5WVWh6FaEhmO/gIvgc4QQaG2DJHaEUAZrSybjAXu7uxR5TrvRZFr6zVZFwjfAOk2RZwCkSRqUW53ffJxHoMbjER9+8D6bL77CN7773VmpQKPVJk5rOO0X6Krp2bOpIITDGoMxZai1n/DZp5/SaHW4ceMlut1lz1KpiOl0Ql5McA6vRGg0zkGcRKyvr9AfDOkPJkRxjU6a0mq12drcoixzmrUm7XaDVrNGq9XGOq9oKbX320vSOkrG/pYaD6jUG02arQ79/pCj0xN+8O4PePW1q9xQb5CJFtZ6hSfnFw2c9Kxx2N8Bjyxba5DGr1GJ8kIrkZC0W02uXlml02ny859/woMHD5hOp6yvr6OUYjAYMhgMGY3GLC93WV1d9vLNDpI4wpqSqzubLHSaHB4d8/jxE4yeUo9alDpHk2BcRK4UubDEGGSZoacTb30hHCvdBiWGIhuSDfqU+ZhWt8HLr75AJBVX1hfZf/qQPB9RloY4irGJN+mWTjIYjnFCoE2PvChIk5i1pTo1WTAYTjk+7SNkDaFSIhWxtLxCp11jf/8JvV5GPswpc8H+aMoP3/+E1956GTkxPLr7gDe2l3hy/IiziymDseSokRHF8M03X2LjH36P9ZUlsotTmvkqX3/zm/Rzybs/+ZSzSc6/+pN/x6BsMC0T+uqEnx2cEQnHcX9Ee32b0+MRrqhhrSXtrjEoM9aXFxgd9Vhf3qKVOobZEfcPL4hOc/7qx5YoaVLPGh6Aii74s3d/TLfbRReKw/MvGY2HFMay2Jb8V//1f0NtYYM/+aM/orf3BcPBlChtYkROlk94+bU3eOH6OrEe8fDLW0yKIVd3tjg+7ZNlBpNblIXxULGysoCxdaRs8tt/73XaCTRbNYYPdsmzCVIIvv2Nb/Do6T7DQR+H5fr1Ld56/WvsXezy+OFj3GDKUhM2t7aDPH9IJZxDqL89GJjtY84Hu1XZj9/3fPLhgxlma7gIaz2uUgf2f6j6aob9Ab3BObVaQpxIukvLfn+M0rCvqXnw/dUn9BVPzitcLi3687cIFZ3OWs5Ozrn95V3ee+89Pvv5B/zBP/o6v/Hrv8bRRca9219wfAGvfv1VVhdbyNCT61sifqWGttmOUlWO+N40DxY7qfye7cJ+RcEEBSKi5QyiyABBnkQMgUU7gek+4+GUD04THg4yooNPeCG7j8hzzvYleXKDja99j2+ufY/rtSnry6ukay9gFlbpxJKGtWAMztvWYZykpEEkIlzp0MWI/mTMmTb0egOKkxO6NcHytcD2T3Lu3b3L8WhIZ3WNzt4TBoNzTGSZliMMGiMcpTUUuvDAtxXYwviEyflyt5kKtPUqJi7Ec4ggImLdM7tylVgV2gTmTPreQ+n7kbw9RNi7qx5MybzaSXg2y7pQamftjACAIPRAkMQXvs2CYHAtpY8h50RBSFwqdozLCdVzvZqXfpFCXCrEcvP4RlwGjAEnZnHi5feZAf6X+9mqAV3NrSoKEnImZ69m4Og8cfN9+WJeHSQkMoCNs3428TeTxecZNP+LTxTLUqNUSZ4XVGbgSZJSq6Wz76Iug9fWhljAza7p7IN9dunZMyQyikLbiR8XCZKosOhCU1gHUlGr1UjTGknkvfysFGgJJpLYWIGSiCQmSWNfLeQCauQcOvbq5KX2ZtdlnnF2MqZsRLQWF+l01xienZKdH2ONQKQ1hJj60t2Zfcil+4UHZKrbYq0HycpSk5c5hc596a/TRJHCBFJC6xIQs3JKhG8LkkJ40sm6r17yLj1+Fcn/f4kXHVkRQjwF/md8svavhBD/PfAY+Gfh8H+Dl/u/h5f8/+9+2fuH877EdFyWhvDsASFkklL4bL6aBdXm4XydqKv62MR8Ejz//ass93IdcoUBXHaLr+hgUQXss8k/n5ZV2YirfHDCYkT1XtWCYkNiNUNDHNPxkN55j/3HTxCiQbuzwrXrG2xuLaKURyaFjBBOhbwh1GZbibTCJxyW4B/hmzArFSsfaAsKo4mMpl1vcCZTojhGxJKffP4FFypmudsllhArC1FKe2WJxaWUp4MnHOzdx2UjanKBkZbUmku8/OZbXFv7Nd77/k85fPKEb739Gk+ePODP/+ovMU5jKfCKZQohDQKJJZrXmjPb06kadKWr7pWc9zHMrvj8uovwu/DceGjAdnOrCBcCaelZ1NJ5durZ3fyZUXdpEXn2mKpE0kvX+sR7tvKGYKQqJ4jjyNdTB3bVm2Iq9vb22D95wjk59+7fo3/a5/OLMWnaoNZsU1iNjCMWWi0mo4zJKMNKiKTCRjLMBV/WEblKCSm0qIcNRTqBQqKEAusTNWt9Ba9DEKUpSS2mMFPAMOydc7h/QIRiobWI0oJxOUaXxjcYW0muNdZ6KWWVyuD9YpGRQEiHtIK9/af8xff/jNWNdbZ3NrCupFavU280GA1yz2rPLm2FvHkBgCj2CW2aJjx5ssvx8SHHx8e024vEcUK702ZjY50krVMUxczcdDodMxoNaLVaLCwsktTqDEa5Z/WEV3eq1Wpo7ZhOSxBeaa5RT1AyJGyRRMjYm9YLr4TnZEwUS7qrddqLyyyurLP7+A4ff/wxe6enLO28xGQ8nq0ZKvKy17ZiJISblQNhHbosSGLvvzKzbHee4V/fWOc7tW+zsLDAvXv3ePjwIUtLK6ytrRHHMefnF4xHQ/LpmPX1dZrNBqUuabSanhVOOqSpopYqRsMByhTgQMUNhKoxHo/on59giylJmlBPI4R0pLWI69c26GdTytISC8FoOKS70kTbjNWlVZ48usvJySEn56eUNiJOG0gpmEwyqpCgKApGkzGjyZgkiVjrXKHdqdM77xMJxzifsLjURIiIs9NT0miV61de4ChOOTw6J881E6V4dNbHfvGYa1s7bL72KncOjzk920XomOmwwUUyJkoyNrt1BsUx5viUtfoC9bTBx7fuc5AZ7pyfsm8nmMQyHoyxozGTss+112+ysLHIUbbP3tGXNKINlpcXKDPB+/fusv1ig+HhPm9u3eCFqys8/vIOu4cHTHKBzR3v/fwTvvn1b/Dmyzfo5QWilrN/ekxvULK5ucUk36O+1KJVX2R14zqRUrQT+MN/8vvcfV/wxa2HfPngMbW6ZakGB4++YHtNsdZtcdRqsL69TVpvkCQZBktNRfSOT8inCYcHOSqNUAlYfUA2Oef9W7foDzPSuEUUpxw8us/j+/cpsh4vvnSV7c0uUSJZ3VxhOp1wPN6l3qhx9fo10iTBGIeMhAc0ZcWi/QoPqSi1pSz1M4j9fJEMS6ZzYIuwVsc4FEZDWZQkaQLOUK/VWFxaIIqkR6hnlSHi2Yh2vjTPPsL/+1mfMz/fQrGSqILY2XIDQiCcYDya8PDhQ57uPmapu8Af/JM/4Hu/fo3V1VVUM8MlKdO8zitvvEK7lRCrBC+lP1/nf2Ey+dzpVvuZnV0YX4ao/BIBNkfKKc41Q7HjBGdOKCcxT4oFvuj1qe3/kPXBpxjR4q96NziMV9i0Kd0SrkUFv/fmDeTrvwlr26w0YlakpSa8rUoel16yXjmckpRWeesQbSizkmH/lP39ffafPubp4SMeXgw4ORugz/vUXcbyVot2M0FMSk73x4xki1q9Tq0RcdErmIwzsnLq91XjPWRLa0KgLRHG9+dVCbQKiFbVx3PpVs139IoFqZIiIZBxhHUSbS1Ly8u0Wwknh4c47Xc1q+ciGcBMHboaMBXg6xMFz3xVbREOMQOGPT7rgoLlnFVyQZegYpueNzoWs4HJnFEO4LoMPfcuJHt+LFbqyX7cz0o73WUGbV5uWDEx8w8Us7E1H4pVnxkYa9DWS/uLcO0rTzYbBNOq+NNVPwPoYS99zvMJZPXTAyyVcX1IDEXV0hK+q/Ogb2EK5KVy0Eo7ValolqT5s1ezf6uQuAkBVvpSQim8LYi0GiMlMlLE9ZRao06S1n0lnVJEceyFcKJoxrpFiWcDnXWUxrfHqChGCoUpvT8bFqSTWF3SOxswHE/orKyzurJCtNDg4mJIv4SyzBBCEMUxSH8NqivmqnXMgtGOUpcURUFRlGjnK71iFeOcREVBJEWCqkzTS+utqpyPJVU1brWe5RC/6PGrqEf+81/wp7//Fcc64H/8Ze/51Z9TCffDbEcQl1CNMEGM1jhjvQ9HOMYKF6RgPfLnEYb5Qv88C3YZzagatVUUzWr7q8SuStwuv4cNE7J6HjdPMp9h3wL6UZFifkFx4CyNWkI9XiBVgiIrGY011uaUekqWRahI43uyfP+aN3v0bhShZ9kHUuGD1Sy5BG/uYXBCE9dj6tayGMUIkdJst3jx+gs8PD/l1ofvU48S1rY3WX/rZZ7cfsyPPvkRfXHAcW+fTx88JDqBKxsNUDEjZ8iNZXl5mZfeepHT/i5n42Nq7YhGp45TEiMjBCnSRQGxtDjM3BA9XHMrQnoZNm8ZJkOVLCFU8OXw11eGxa9K94R1CK+R6lWfgrmlscFaVDoKW1LY8itTtstNtvJyouhcWNB9sqJU5D3c/uZgnQENEJqA8XK4xlrG4zF379xl7/gJWVRjWkLkIuqug5k6dg8PGWUZhbG0mx0GZyU6axPVAemRQlwozXVi1qfopA8LEDLI01aJW2DbHLjgkSKk7w+R0iEpsLbk7OSQ/vkFkYgQcQOaUIw1ZTYBFRLRUuCMRZcFOo6JlB/FSiqMM16tC8f9u19y5/PPWFlqk9Z8qUSSpmGOeAZLikoDzYYyDI0QwisyRoqNzZLGcMTZ2QUXF+doban1mhgH6+trLK+sYHTJ2dkZvmvC0OufYW3J4vImi8uLjEd+YV1aWmY0Hnq7A23QoynaGFpFnVrsTecjlQalJnyyLS0i8uU9UknqjRZJrUG78Qq9XoeHh4fcfvwjDvaOkUUR1NAinAx+PhWrXvXf4oPJiuWPo8jX5GvPRFhriZOYN954nW63y8OHDzk+PuHJk10WF7u02y1whqPDQ0bDIdtXtuh0Opgy8yii06SpYG2tw/p6h9iLaoFKUHHNm28XV0kiR1bkGCFJ2m1ebr7CpL/E7ukxn352h4ten/FoynA8xEVbxI2U0mimRUncaGAzTZrEgKQoph4McN6GwAmBsYYkSWi3Oyg00vlS3PPBBaPxmPZCl0Jrnu4dsba8jBJ1YlVDiQl5lrO2vohKF1nZeoelnb/DF/feY/P1K+wfPOHx8QnZzw549c1t2q9vsRwPaJmCre0dHroznvzsMae9KdaBqklurq2gmyl3P93jxo2bsLBE0VqgvrRMZ6phNGF1MWEaR0Rpk0lfczbJuHazwcdffMb9h+c8Oh1ykuUY12G12WL//Cm/9911vrl5lYUPFf3zQ44Od7m+nbC09TI//OxLbn77O9x47T+jnvT5+Qd/ycN7j+iaPRpKkVCg9BQzsDib8uj259jVJRrNLnJgebp3wnQyZW2py0vXrvA0tuwfHHA+OODr3/oO5xentBYb3HzjBpPRBb2TE57sDYijiN/8zrc4P3jMw/6URBqKyTmnFxGL213O+qdM8ynnvYzbt2/zxrc1SRp6iNW/j++YD7iMC6Xszs3W36+AQbE6Q0rJtN9nOMopckOzvUittkxncTGob1gvNOJAyQQfAAUEW8zZu/m7Pn+6FdTn8L6oYZ9H4PsnqpjBH+qso7VQ49u/9g7f/vV3PPujAHEKSNZbddZ3boCte31GZQOh4UOiqlTvl16xZ+IrLx9u8YqvVgiwEAkLFDgzYjQoORrBw6PPSQ4/wpkmf/m4wVOxxautjLVOnW63xbd2rjNRda4v7XCt/ndYbihIG5jaIrkI/nih/MxpvIiRKTifHnM0uODpwYTjgzGnewecHTzi4uSI87MRg+GQrOzRyzPKElQOkc5xtwfgpiitsGVKmTSIazH1yDEsR/T6A8+ElQY3K/Uh7PE+0DSuwqktJaBk5FtRLiUn/qqG6ifnwv5tPdNiLaUtIUlZ29zk7//e77K63OFP/82fcPh4l3xUegl36W1YpJBE+LXdi9bMdQgQ+H05JDBCSpSKUSpGVsmcCDoGs4Q/fIEKgKdiPZ5lWOb3fs6WiYA/hC37UhWRT7okVfbuk0T7N9/tUqI4Z79m0WVVvkpVhunBeR/LGiAKn38ptgkxwixhvHQPdKgYqsDn52Oiy+WPyCgkpfPjgNkxQqhQZmpQoZRSVlVTs7i0iudDl63wXswi+Ka5GQHiNQmM9c9HUYRzliRV1FopUT0FpUBGyCgijnyfohe+88CUc5ZJNmE8HgOCeqNBFMWhOssnuI00IcvGYAzZZML08JBiPGJzocnq+hodVefs9AlROg4VC8+nShVbWN1riYoikloKsgnCay8UZTarMosumaXneckkK4LEv/Gq0c7iTKWE+4sf/9FCJP9/PGYDT8ylcWeIhqj+XinS+dpZFcUQaqN9wsczDFtV2iHFfCEH//eq9K6aIMYFeVBXeVi4+Yczfx3MERFwoXegaiS0s4levYevgp1ZawZVKMfK8hKvvnyD7c1NJsMRt754SH9UEtdARdb7QyQJrvQyuMaEBIVQGhlq+KvSi+oaulAaoHWJ1gXTfML05JRJ7jiYZNTzMeejISML5WDA8soqb3/tDa69tMkn6z/lk88+ZfjuLi+s7vDCxgsMXB9qmqkZ8uj4lPXPOrzR7bC6fZUrN1/i4x+/S6Mes7K5QXTvIS7319A6n6hJV5V2VpvwJdERd6lw8TL6Fo67BOU+cycE3i9EzuaMeKZBtaorhksL3nykXbpW83/D/HcZxp61wbQ7ONYTPqtihKvkzvcOMmN/lRTU6zXiJPZytnmOMILR0EvXIySRMETKkBclF+envkRKKbyyWgj8A4rtgqCH/57Bn08olFCzPLZKIEV1nULCiZKUpoDIMBkP2d97Sp7l3pRWCVxkiZMUlZToUoPxjc2VopHMM4QE66IZ+ljdQ11mfPzB+1y/usnNV26QRBHtVpvh4Jx8VMwQSinCwi49uBDHMUmSYIyhUW8wmWasr69jjGM8ybjo97n95Zfs7u6yurpCkkQkcUy9XvOmooVhmo3Ijw9Im0vUaw2MtmRFRhwlwezU+8AMJxl5WVJXILTF2lAWIyTGCUQUI8I5OkDGMUkU00wdraaEWo0n739GlmUkxpAXBdpookQE/zZ/Tfz3kxjjlSWrfSqJIiIlKXWOMZpavUZZ+vLQ5eVl2u0Oe3v77O7ucnZ6ymQyYXV5ESUSzs/PcM6ysblOs92g2WyAAKm8DYQUXg5dKYG23t8mjiTNWoNaGjHJJuTOQS3m2tUdRLHO4sU59x8foB2srK0j4ohBlvF4/4BIKLSMKCyAJMsyb0EX5P2zovA2B+FeWmM4PT1H12KkTDFOU281KZ1H4ItQLnJ4dEErrdFKGuQqxzQME3K6ssZ01GJ15U3e/J3vMo5O+Nd/9L9ylO2R9xz21j7djYij5gmJyvmid8bDL3uM+wnDzDAsp5T5hJX2OpHs8qS2z9OTB/zWb7xGf9RndPCYJWLihTauqVlZXqYj1vjs7IAJ8N69zxHTkuviChuNJQ5OH9FotWit3OBscI/33/0BS5uvMixq3Hj9JcrY0NeOf/i9f8Ag3eZkbDk4u8Py1Q7r16+TO4XeH9A/GzAqJQuNJrWFdcpI0sslnYmid9rn6VGPNIl9EETB4oJg5Z0rtB5NWVxf4+W3rvLll5pufZHD8wt6x7skpsD7ngkOdh9xvPuYZrsFOudg9yErKmZ0MCKtJ9hWnWY9otlsIRGUpSFKFJUowFeth1+VzAmB97+qUPLnX3LpiXwy5OHtLyhyx8bmNdY2rxKljWf2XQ/YBXQe33vrGQjf+/MshfH8h7n538M+5wKKLoWAyAGGPC+4f+8B1jpeefU1IhxGG5QKvc66gAhK7cA60jjGlBYVB9BLejDUBHZEffXX9V8rnM88TplLzVd9L8ZoilyjxyMGp3cY9O7z488e8ahXY7HR4aZyvPPyMr/59S6P5CYvbrzEzWaftCbYaN9AKA8SWRSZEEjrkKYkUTCykGUFejRgcH7BnYM+52cXHO895PDkgL3jEy76F+TjC8xkQJEXTEtJqUFajZQajEDp1Cc1QjLNNVlhMCXoyQibODrdBabOi2Epq5AyDsJvDi+i4nvvKmNvLzhhQ/wVFAHdfAD5cj2f3Pie51Bl45wPxgHjLP3RkE8+/4x6Krno93xXRaQQoQRTXLoXAfF9hrETovp5KXGrYkEZYj98YielmiU7UqowDioQl2fOfUYrXCIALrNUs8gxfNYszhHMvmeV7latS242P54tT5wLpVX/zz9hljiLEP96s8NLSZcv1ZxXk833KoQjjhJ894d8JnGrzqESVZNSYd38mFlM5tzsOB87VXGKmR87S9ifvf+C+WcJ59nMinD3ybdlzsx52wupDCoCFQtcJHEKtLNYp5mMJuTjKc5Y2gtNavWUehKjC0VZanSee8VTJRCxZ1l9uTJMsowC0IWhnI4peorNnau01lpsX7nCC6sF7YWOT+SfvxPOeckN6+9LFMekqgYipTQZxji0MR5ox/dsJnFKrd4gSizGjdHlhCIv0EUBBJbxub7j5x//SSRtILCuQgIF1a3yIiIV+a2wRvpm3RA8G6AMkvCxiAJlOZ9c3suLS2JOXpq92gCdDIaenpycJW1SejETFcQYfDub32CQke8fw7M8M8dzvH8YIZESCJSLZgmlwrcx1qOUmkpZ6a7wymuvoq1Bttr0BjmlziiyIYOLE8qi8CIQoYG8tHhZYCRWeFbJ4Qetc6Ge3IIpjfd+sBrHlClj+r0JdmqYTIfs2im11S6Lmx2cNYz7IyYXJbLRJpcRk2HBtRtLrHeXKE9KtDWoSHCsB7z/4Y8oFXRvvsT48By0wUQRJoogSUjQCOtpeatKJAqsQhCFvkLjJYVDB4DEgbAIUUnIzldh77EhZ7hShTBp6++hqJDMyB+TRpJUQIogJgZRw4nUJ4Sh3r1Cw6p/u8tGnSL0bVxeJp1FiHmT73y2+vNVThIToVE4IanjoMwYj3pQCPRkTDaakGeabDIlTRKmeYmRklazRa1ZYzjOGEuNdjl1oZBC4aRvwnbOYq1GmsAiOxE2GUGiYiIh8CPPX0NnDE57NixOE+IoxTnQWtPv9zk5OcU5PGOiPfqulCRJYpQEmxmEBaV8MlrmuVcmDWNQxjEOME5TixW7j+9y+7NbXN25hlOCOGkS15uMJn2v8hmur9EWnMEZX14pncQ6KIzl8PgUqx2dVodOu81Ct8t4NOL87Jz7dx+glGSh02bnyg7LywvE7TZ5NmUyyemPDygbbVZW1rDGMplOsaYkSmJsuK95abGZwJYTxrmhVo+pNxPqcc0rcOHN2j0K55MSFdfJbEajuUin0eGEU4STlERkVlI3FiUMRhs0IUGWICMH0vufTp1GmAHG1DxH6AzZtEAlMaXWOGGJpOWVG1fZXl/i8ZOn3H/4mCdPd1lYaLPY7XDeH9CfDNna2qK71KXZbKBURJI2yadTBNb77yjAeQUqbSWFcRiPZqGnOTYvqaVtNtaapLU61GNsvUYpHB9+dId2ven95FC40lLkBmcNcZqQxDFJrcZC0uHo+IRCG29HoQ37RyfUdzZImnWm/R4qTSjykrPzM6yBTqNDNvGiMWsL60zzHBLFVE/JXUEmh/z0s78mPlzi0elP+fCD9xieDGjblJ3lVbYW2jw6PSKKBNl0wt2DIZPTmKW1FazJ2Vm5xtFowv7ne6TtOr/1u99hpdvgk09+xOnZKS++9Xf53nd/jx98+hcsdlsssI7UU/Qg5+ioTznNWN+5hmvUabSb1GvQnzo2ljZRtRFPj8ecXTxlbecam9deYWV1m09+dp/JRUmcJJze/4jmK7/D9tfe5Pd+9w84ufMh/+e//GPu3NujEaXoYoSUCWYqabev0mjB7u4DJpnEakdto8Orr77E0lJE0p6QtBa5//Bz9vYPKRrbfHLrHqaccm1zk/Nen0F/wMMvH7Dc6dBcbLPWXcR0F7l1/z55U7DYXmNxfYftjRXPolIjilSQdi9BJSFUgZlK76xfxgWQM7DGTnjBJzcPFv2e6vfYIi9w1pKkMdYmLKzusLS8Tr296He7yohJ+gRGSO9I5v9TGOG7lpSQ/lhBkPw2CBfPo4Agm27x61+eaSIhwShwviQrSSKsijjq9/i///xdBqMR/8OV63QWE6TQNK1BZiXalUwmBaf9CUsLq5ip4c6tx3zt7TdQTd8Do4T1qqvCB6tVx50OW0AEOGNnIgRc6tUqZUKpLdnFLnawy4M793l8mHPUNzgz4u2XF/jON97kFbvM6tpLrMXQFhN2Wg1eqC2itUXYFjJyxBKEklgh0Sjy0jIdjCjGPQbDPodnfR4/2WPv0T16p8fsngwYDzOmvT5FmTMuRuRmgnAl0hpwAu0k2kjQBilKUhkTpSlpLfX7XxxjtGU60ZisIEb4VozSCyuZsvAMCsKbWOO9WJ2U83I/AQgvSFGxbr67YL6H+haJqoJDzOxgJCC0IUaiB2Nuf/AzhPS9SFZrXOmB/YqlEs7NCKi5/VG1efsxDIRz8QIgfhxXHloKpBepckIFmhixQQAAIABJREFUtUU7J1GcncWTMtgYcdmLTVT97j4xLIUldxqNZ1Z8eZzDGUOE78EXVcVR1eNPVcApZonBzCfWelbZhVLgyrPYvyKUGuNmgIwTgPLCIS4wbErFNBrekN6ERE9IQRTPivdnAPSMFAuJtGchJVJEM+Aa5iRKVaV0GaD1LCizhFlJGWyYZq9mJi4YFC9d8NKzeJBaElThBb6XXzmE8oyu0TmuLNHGUuYagaLIC7I8998LjZIdVOIBTZOXOF1ihCGqJ0ijEVmGKCJiYagnaSgrjpAyxsSCk36f88JxZXsTudjhom4xsqQBYa5LsCkCn0tYUSIjr4hdliXD6ZCinFCWOcYWgPVVSpEXMVO1GBVH1NMEW2h0nnlRnNA/dxko+KrHfzJJGyLGT6gZLRMmhcQ5iQ0Ty3ti+gFuJb7WzkGEwJeSecl/grKkDQvJnDafMz3GVvXIeLQlQB8OjZR+8nrvGMFMUlj4BlnnBEIpFBbjNAKv1lj5yEmhMC7GDzvjX24EGCgzQ55pDo5OGOkpB4MzisJRi2PiWh0VpVjtQnIgIBL+HKSnm33/qUUo65MePPLubf0iIhUTRxHWwTRx5MrQtobSQt1qdjox3Rs3ODm44NNPPuPOl7scln0W2uvYWsm7d3/OKg3apWL12jVi4VjZ2WIjSrj185/ROjjl7ZtXuf69X+e9+z/j08+/YJqVxGVEpBXTpMCI0Jge0DeBRNjgCu+klyAXNphwS2RlshcEOqo+OL/MBElbJzHWL1AGhxFglQRrUVgSY1G5htyiS0mhfUAgcVQaLTKwsFXtuVD+/KpFpsrvK+Rt9pidXlWiAxhIREQhJIWziCxD2pI4EfTOT5heXCBLR1QY6k4gC4OYFoCgKByi3oCyxNgcJ7VfuMOi7PEB73PmYF73HU4nDohMIUFIPw7iSHiUJmxkfmR477tJlpPnhV9w8QhjFCkqyZNOqwUIyvE4gJECW2psUSLjGCFVEF/xp5iXQ7Ca+3fuc/aNIcsba8QxxGkdIz167qSb9TBSoVKFxhmH98+RFNpxfnpO73xAo16ns9Sh1WpzZecK00lG76LPcDDg7u27HLWbLC0vEseKNGnQSesU2Zjjp49Y2dig065hR4bRZEy91SSWEWVpcbLGJLOMyzHJ1FAvEhZsi1a9SaoSP62dJVGgIkGhHXGjgyoFC60FmmmdIi8wqoYWCVqPkTpDpQlGxR6TtxYVe9Ws3MBYF4h8wNAInhzu02xLGs02pfbjVimFcCWptCSdOp3XX6bZavHwyROOTo6ZFgUbG+s4A4+f7DOeFGxubtJoNChKi3AxMpY4W1UJ+E2zNAJXOkrj16xanJCiKEuBRZBlU1Qz4Wg0YHSeM50UlK2UyWhApBxbq2ucnY7JsozVlTXiNGYwGpHUYo90OjFTwy2NYe/ogFqjQdqokdSb2P6A897QW084vEHvqEe71abZWQAzwLiSSTHirz//Mz5+8AMSeZXhcJ/J+BCpU+I0Y3u9iR1PmOxP+O6vfYuj8zGn3Q6nT47Jd0+IRcm3fvsfILuG/cd/zs1XX2H9jRe5/ektnjw9o2+b/HT3/6PuPZ4lS7Izv5+LK0I+/V7qrMoSXVVdrTUw3QCmG0NiCGIGs6CRs6BxjFv+H1zSuOCCK244CkYOZowEBjSgiUFPA41pWV0qq7IqtXj6hY64wgUX7jcisrrR62aYZVnWExk37nU/fs73fec7AybuHQ4HR/yjr/4un3/jN9h/+ir/6l/8a47vDsmlZZQsAnuoSxbWsb0J+zev8uWvv8zu5Vv82b/71/zsrfc4m2m+87uvMp2d8NPvf4+XP/dFNBN2uh1ef/V1Eq9p3fgsWvwJ27ng2laHPHMcXN5FqYw8CaDewVaHJNvg6PCE6WzBxv4el29s8eDpXf76R29z58EpnfYuk+Ipk7MRk/kCa9tMpqHg6Le7LJygujhnuJGgkgRqg7UZh4cjhlLw+PEpr18J40Eas+rGmj/qC5bQVDP7SUbpfbNXiSqCxWweIfCQqFa1YTqeYauabqeNyDK621fobl/meUquOUubv0ZEPg4HVj6CabiQoC4B+VUMDgewwJmK09MT3nv/A54+OeY7f//b7PX7pBpUUeBKzYPTc777kx/yo5+9D0rxz//4/8In0BOO13f2MRfn5J2Ma7deYVBbWnkPrbs8fHiPnWuXOGgfoADhPFIYHBbvg9zbCqh9iP3KsIzDpa3JKoMzFZPxjB/fOefw5Jhk/C5v7o056OXsvPlpsmtfxaFpV0d0ej0WsgdCowRM6wRrLLoqybOcJNnEeseiNhSVYTQdc35+zuPHT3lw/x6HT54wGg45Pz1nNBwxn04oFnNcXeKsobYGIcOsORfbSZxxcd82LSEWJwULX1HVQxZmQauVk2dtWh1Nb0Pg/S7eWExVU9g5lSOyV5bK1EuWSsgArEvPcrRPU0A1S6k5tQIzBc6ucrLauii/tmEGoEqhsghvSdOEug6Gc42OxDUr1/vQGhC1iMIGlUs0NwggsSPO5A0lQZNbhPEz0TDDR4MSseyYX7JnqmF6xOrabeyfX/bhE2WDzuKkp/Q1n/vSF9nY2OLDDz/i/PQsSHO9J/GCVIDQksqaaHsf7puXsjEFD/sh1oPCiTBLbLklGobPAmJ1HSLuXxFs8zHhZE/TbClTtD6yPkqghYqO3S4ycA2Ttnx0MR9+PmdeXsba11atQuGiGydwIUIa7pt1Efd3EEY7hAuyVScFzYRjJUOOrxCgQCRBahxAyRppNLau8YUltVDXJdQ1OIPMNJg6FEFYrK2grvHWYnXIz/EVqixIhEZmCp/qeJ0SYzWV91hbY0cDFvMCoz2dL1zn1uYrfDrZxBaWSkg6Wocc0jjKcox3I0bjEwazM0ozpaoLcFWII1iECq7rxofYonVC3sqRLqxV5QVlWUTg4VdXbb8mRdtqESwZlybzjIfJipxkaR/b2MhH8A+5NnyzkSiua3Cfa/CMr5Vtf3MhYVkFC3UTZByxZ8XHU01GZGjJAUV7+EYCIkRAomrhMSIkaTZqpw1Q1objs3MmtqCWjvF8jrOCarFANTa5NAhPLNTw2ChBaNioMHTcxmAaLj5Q/QolE1LZQtSaclHiypJUSeqi4u6H92gddkmTFsPjCcKfcenWNT594yZSVfzN+yc8vfMeB+kWB1cvsZiOuf/xPdjeh8JSTsZsd3t8+be/SX5jix99eAdTVUiriDwoeLEyo2hCTkRUV31/hI1OpOkbCUOEuhr5QbgXcnlvAaSSQeJnm8bjiCIJlv/Gqkfy+de6FKCRR66eZRwtEaWJATCOfY1E6aUXEAeeKxX675RWtPI2KknJO116eUonbzGblwzHUybjaZjrluWAxAiBcQ6pFdQiOmz6JVPbLMYgNYhrlEZ667A2bH6hAruD8BhXY2yNQJIrSZaHYdkFJdPplLIsIaJfQiu8aVy5IMtz2h6Kug4/F9HjqizRaUIna1E7i9QaEUBu8ILDZ894//13+c3db+G8I8tyEp1QN/9G3FPPB34R7lmSoNMUIQVVXWOMYbKYkiQJ/d4mGxub7O3vsn+wx/DinOHwgpOTI9rtnL3dfa5evcH+/h6z+YLzszPa3R7dbo9QFNfUmMCsi9iU7A3SCkbjObNZwc6GZ297hzRPoo6+iR8yWvdq0jQhSxMqgtRBSEGSBHAEglGQjWh7OLNCRmej4cdsOuPdn73NxbOHvPL6G+wfXAnGKdojsVEaFtjznZ0tNne2GQ6HfPjhh9y/e4+9vT263S6nxyfMJlN2d3eRUtJq5ZhEImNfgtIKL+Jgc2cDQikVJBqRaKQXpFqRKhkcrARoldDrtpnNF4yrikQ5NjZC/9rW9jb9fh8vPPNijnOWfr/HoqhQMmU6my0tuefDEUmek+Tt6PBVUdiCyWQSXPScZ3+34LOff5PKVPzNOz9H+YxuBjYrGB8/ZD4f4UWCqDtku9sskj0eXwx55eUX+A8//C4vv/wlcpUguxnlZM7Ozi43br7Gs0f3uRjUDLuCv3j4Uw4fPuFi2qOoWtw/esrZU8F3fu/zfOVrn+Onf/s9NnZu8LnPf5F/c+8JrW6PbqrpSsWZaJPs7HLtRc2iOuZf/PPbfPmrv4EUm8ymcHZ0xr/5o39FnkjwC+5+8EO+9c0v8d2/+BPyVsaljUv4qsTYBe22ptVO8b6g1Wmxf+kq77zzIXWtKIsuBknWT9i/fJ3DwwcMBh9w584JJ0dzhmeCc7tgq5dSWkFpHCcnJ2gdJLLdTpfpeMB8csG8GPPKizc56HU4stDLLjEfC2azIfVOhfezAJwA+Bz8SpK1HvOabarWkHe0jiqTwDa42jBdzEEq0kyTddqkWRoj7Cqx++UvsQz/1ju0r1E+AR+kaBXBRVHbLCgzpGPmHY+Pz3n44Uc8u3eXwfkRPoFPffbTtLYzYI5fTFGLBeejOf/+3/4pD58cYg5P2b12jZ/82V8yHg7JleK7GGRdcW1/h//6v/pDZkrS2tgm2d/hybzEfHSX37q8Q1cptFd4myATh2CGpIXxgZ3MKJF1QeVTTmcFh+fHJMNjsmLEoKxBXuGNT73MZnqZFy47kjxlYXcZ+B1sbTBizPmioPDhzFJK0e9vkCVtbF0zGA0Yjcecnpzw9Nkh9x8/5vj4hIuLCwYXF4yGIxazGXVVU5dlADadDw53LihuLB7rSsxaqiN8UAw1ff/eW2QsWqytMKYKsw+zlE67Rbsd9rFOQ2IvotwOH56fUmppEBVqIb/Mnz7ppr1qJ1m7HhXc9JQM5llJItnZ2MQ5x2y2wBPUICYMuaWpARsAoDH9CqCDRfhV3tAAD2EIt6VxXGys2FcAvoyjlJqiJ+ZablVgLk3oPrG0n5NDLnMZj3Q1n7p1ky995g0m0zl3XYX2wZytpRVbrRadLMF7GMxL6jhuwHiPdYTiUzRHb7jWZaPH2n2VUi6NsdavtSEzmi3XyPallEum2nobCEYRwfSlLPT5frX1z+ic/4WvffLnwr0LeYpUxOKyYftWeimcD+MlnER5cM5gWXIvFIsFeE+NRDiHLQp0YnFeYZzHOENlLN46WjKLhjIVwjo0ElOUFAqUy4MhmrGYqgyMap6QiAA+OGuwlac0UAlHTWixsL7GUmOdpK49haz43ODr3LI5zgumgzkeyDc2kFn4nKaYMR+dUS+GuKrEWIM1JjC1ciXbDfuwGSTuSdOMlkiD94DSqNmMqixw5pf4KKy9fm2KtmUz8dImKmZBy+ItbiBYywVjcu/XvhsX3idnHSx74qJuer0goOmJW0P7mp9p9N4N3bwydhFrWuJffFnvsBJqGZLBYL4hcVIxL2ueHZ7iz8/IN9qIRC8/y3PaZh9dLiMDtYofEaH0q4BirUWYpgcOnBVQZWx199m61WN4dMRsPGYyX+CsYjqfYNwQLSR5Kjl7epv3zu6TJAl5Itjv7ZMWKYcPzpjLkk6SsdPv8pVvfp7CCT6+/S4iXbBIS7IsDy4+IqBvSgRmdNU0+/xLa42zHisDU9A4hCKIAxjd8n6vv1YFWfh7M1bBxYguhH/uZ37JW//C65cV8qvvsZICfHKsQESunXDBCAeP8Y7aORZVTafXZUNqcgvZVsn87j2KooI0zL9CK4x3oILVrTEGoROaM66ZR7jUzIs1Fyl8/KyBYfPR8MVHJNHFYCGjxbeznslkRlXXsbE43q/Yr2d9uO9JlpLleRiE6kPybmK/plIqrOHGKVV5tEyYz2c8eniPL8w+T9JLSXSKVimVF6tG5disvBytIQIQ4glBuHYuSEeaZKCuOTo+4uzsnH6/T6/bJcszXnr5ZWbTMbPZhNOzM07Ph3R7ffobG+R5iyRN0aVmb3uL0WTKdDZHSLC+wkmH0lFa4zXWSM7OJrhas73ZpdPWJJlajgyhWZJKBHAAR3BGDfKSLGlhnUJ4hfMWJVO8r4HQjG3rCT72zsxGU4bS8bH/iOHFjK3tbXa3+3gf5hZKGY2RBGz2++xub7G10efdd9/l4cOHbG9ts7O7w2LmeDSdMJ1OMcbQamckaRKasXWYP+QRoRdJSrKsRafXZVYWbHQ3EQou7e1y68WbnA5GXIxm1EZSVQbnPEW1wA7HbG3usLW9idaKKo5oUFJw5fIlTk/PWRR1kNSKkPQUiwUqyWjlLWazAlNWMSxJ0jRFCMV0PiPvtrClhiJhNJvz+a++xh/8l7/Ff/rbuzy6/5AP3r5Nku+ze/MV6G/xzu2PmIwUeecK/c1Ntnan7M0kbnKNaur4f773Ax6fHJO0M45v32YmCvx4n47NMeoM4QYIc8LF0PB0sMP+za/x+rUrPL7zgL3WFpc33+TpvROcfUR7f4Nv/M63ef/tH/DuWz/gyuUd/vZ7f8Z4ZhhP5rQSzebmNtN5icLiC8Od957ybPhDnj56zKs3XkEUhjsfvsNsPuH4zFGbBc/Ozun273FyNmR78xKD8Rw31qR5zqOnx/zLf/kAb2acnJ5Q1RlG9KlMzUldMp0vaOU5aaLpdjooMjyO0hh8kjO3ntok1POaWVlw/doLLC4mFLMJidxBkOEQWC+QwgZGuAlsDTjaAKKxJyYcZw5fG7yQoXdcSISSpElCkubhDF0ey78aFW7isCBak8c8vBThLVPnSYXDW8l8WjMcz7j/6D0e3r3N2ekAg+fFV17li9/4KleuX6fXztBOwOIcS0klJ7z18Xs8ffaM12/c4v69J7z24qu8+9a7vHDpFSbVgrFd0OrleCdYzNqMvKA9S9mRCcbVPLr/FvVXrpP3NhC0MEmCEZZWPcV7waKUnI7PyMtjipNThouEYbIFLcWBlOxd3eHm1asUdodOq480FmGmTKclM9oM6xKbWEy3hfYZPdkD71kUCx49fczTx084OTnl6PCQo8NDLs7POb+4YDafM53OqKoKUxtMXSN86NNb0TI+KIu8DUyKgCRN6fd60c2ugtjL62K/r5DPg6FShvyhKktMXTEej9Fak+c57XYbmSgy0SbLUpz3zGbhmuo6DCqXcQ2sm4190rBiJSmMsd8Zgpsj6Cxh/8olkiTl9OSM0XCMMZ6qrNFSIEQjUg2tH83aFUsT/wjmx5PQ49fc8QMT5+P7CikDq+NX1/XcdbIqZsK7NOdtOCubHi5j1iV/IU/QwOTigv/7//xjZvMS5yqU8KRCst3vcnVvl0wKxrMFhZeIqqY2ccyE9WFeWWT+nG9aOhRay+dcJZe9afEVAO+Vkciyr1+KqHZqjjNBI4lscuUm320Adu+fnxP2vKPALxZuzXNtQPMArIfYYFww4/hkMBBCoGPHaDARFCHVF9BqtZAIUiHBOirfqN3ivRESq0OOi7UkIvJ2tcMWFRZD7WqUs8EEx5iw7o3AOxNk2kKGlpHxlOGsZI6h8pbapAhpMHaBkClWKA5swkZ/k262QY4k2epTVZZ6VkJpGT044eMPbjManIEpwddLYOCT6yMs2+jPAWHGXpqQSkVbSITW6HlKVZT8qtevSdEWeCTBivkImyTS2sAKc4gBQckgF/TBZU84ViiFDsO1VxT9ClH4JBIUEvLVz4QAFJsjm+Rt/Tp9CHRCeIxZMUYNWmFMGCQpYretl8HdMkwMCyGmrhzzWYkrHUYKZKLQqSLvdJAiSP+cMwiIvV3Ndcd7IjzB2D04zTQNtFJIZLQmBUU73+bqtZf56hdeQ1MxHQ04Pj7n6dEFh2cXnFwcc3L6mMV8SD2vkaMK72pGypL7DL1Q+KpimCsSIRheTLh39wHtbpvTo2ecDJ5wlJRMZwVKKJwMU+1ZFhrNE/NrBXmDCjWujS4ycE1AWWe/1hZ789yjTLB51s41c+qj46dbOVXapeX/37Hq1taCEA2iF59pDIQN5tEEr6WLo/A4ERqtrTMY7ynrMjwVIXh0eIyrDVl3A6Mzks0dtIsyj+mUoioBh7Grxt0G2AvrLMyBWxXmflnACyGiI1vTkBwOMikDg+UtUS/vMc5irWM2m4cholJivAEp0Fqjo6ucEBKtBGmWBROKugw5gXPLA1onOshVl7CnB+E4Pj7k8NkzXnrjJVKXkiYt5gxjUQi1MaBDELc+DlH1YQ/LKDOw1oEJRRJekCQp3nkGgwEXF+dsbPRIkqtIpbh0+SqVMQwGI0bjEeeDC3q9fuz3kwxqQ6fTRXfboVj1i1A4xl495yBRCc6Fwm02W7Cz00boTVqpQierpnWpRNDGO7Oc/2OMwSUJre4m+IT5bEbVuNGiEVKxqByzomI68zx+9ISOus7mpuC9d26ztbfL/v421w622e63QYT+wlYrC5msExzs7dH72tfY3tzk448/5tmTJ8vRAJPRmPl8FvoS1tBVF8EhlSShPynO0dnd3ePS9i4bW5ukiUZ4x3g8Zj6vmc5rTO1J0hY6TbB1RaMGn86nKK3I0oS6roMRjhRMpmOcCzMge/0OQmmMdxhjabfbDAYjhNAhPXCCeVFxdHrG3fsP6G1toYTGmYrZ6II/+eN/yx/8439GN23RlRscHgtuvPoZfv7BjxB1l40rW8yGp/z4B++SZF3Onx1iyh550uX2vQ94OJ7w7a98Br845cdvv4OZthC6ZvvmVf7ht/6Q7/3pv+MnP/sxrT/a5Jvf+DwfDZ/w5P4hly73uLyXITq73L/3hMloyNs//ymnDw/BaC7vX8OWc47ORiwKQSrh/PCIUmYkCjZaGf12j8mizXs//SnDJ4fU0znTyYAkSykqR7uzydHZGfcP7yFEwqw0lLWjtjnpbIPJHMrpmJYS6LSNNTowT9YghMarnLSVga3Y2uwzn5QMBgOm8yIkOVmLw9MZqREM3Jjy3m0kLXZ2N0h0B0FGM43CU8ckVzfBb+1UE0FCZGt0koaENstI8w6+ATmlJG/l8WyMIUmuxdBGRbEWX5cCkGW8DctbSIeiAKeZTCuOD0958uQ+jx4/5NHDJ7TyPl964xa/+/WvsX/9Ci5JsTrDosAHKfJsXiMrjy8cZ89GPHjvPu6kwE3mHHRavDcf8Lk3X+FH7/6IF69tMSkGJIVlMjtnYBK2jQm9NmbB2ZP3GT15hauvfZ56oXg0rRkUE3pnb+GqlHunJdN6wuUNw7XeBq+8/BLnsovqtrjWAWFHGASpc0xHUwonIWthVUKet9iVMC3mnJyPOD895uLZGU8ePebw8JCzk8CkDc8HFIsFVVlijcFUdWDNTE2iNd4TirZ4I4UPoPEnLfXb7TZvfvazfOHLX+HDDz/k52/9nOFgtJxtppTEi5U7tiAMmW5YpfXhv0IIkjSltpbaBCCy3WrR6XQoiiIUb2VFXVeB2foliqZPOnYLIbHOhIHNKpQSRVlw9/59Ll+5ym9861tMJ3PufPQxz54cUs/L+Pn8skwTzVgouVy9eKJ00UUfgyYsylUx18gI8WKNqQtAr5Shh0uyapcAEEvCoMn71hkmsRzMrZUmFQmTi1EAroOTGIkU9Ds5u1ubdFo5dbnAmBrr3BIQXdsxS0azkWMGEiK2KzS5SXNf42+tG4ksgVII80Q/wSk0hEaYV0xTZ0Fzj4gjFETMeZwP/WVr0SKcNW5llrJ2XU3a15gLOmfjGCSJkjLKMBXSB0WWVCr24TVzIEE4h/TheSJXZYD1ApRCJClSe6ppgY/Oxs5azMKQaImlDqogIdDGLa+JWCsoKcA5XO0oFiUlhgqDcR6tHIrADoZ1Y7AerJA4BEmmSDKJVWG+XN7pkWedEFcjuRNyjJAHIxtB7wokb56DFaFFRKQyMIBKkiYpVbrgV71+TYq28FoVbM1mWiuahKdxlWkoX+JClqxkHzKiDJ6V4ySsiqr1GW3N19cLhGVx6GWU3oX5Zys0ptEyPy+9bIqRZgGLeE0yUnjh8oMGNtUJ+9t7dLf7tDY71MKAglTCbDSk9GHBEZ2XbMyRhWzMWhpUY2XjqmSkwmmKIYmQGcYpOptb7B50wFleqiRVrZhXhkU149n5Pd5572f8/G8+JD1fcPVgg0lWYYqS8nzGZG7wVYm1gtOTIdPBBJ1Y2tLhEs8hBZOiXBY8ltCz5AiGLmLtWpsI0cxR8b65bzIWnKuiRK4FyEZWuURxWLFgwUzEx6bZGOAky/+Pj2aZTfiI+n5Sny1iFegazToxSDdRz4cg1qzLJsgve0MI7kHWe8bzAuFgPFlQTkpmtaFcFKE/zXtaeYt2K6eoSubj0XLdNzKLgPQ1fXV+xQA3e154PAbrTfz8LlrtEj9/45IoqGvDolywKBYgBEmaIVzoEgAbilMpSZMsuBulNUmSYKqaptm6cZPMW62AUi4ztvB+o8E5Tx4/5NanXiRRGanOEYREw7oa4wzKhf0qlCRVCmMBKZaHl/BEiSixb9OtZh5Gvf10NuPk+Jh+v0+n06HX77G9s81sNmcwGPDxxx+RqJQsy7h69Rq3br1Ev9vl2fkZ87pC+gQtFViPq/xSOjmfl9SupLIll3e36bbzMArAmOUh55zF2BqlFToJwz13bryE0C2GRycMBwOMWQTJqpQYmeCShIWdc3Ryxm67w97BDWbzivHjZ1wMBhTzMa+98iL9bguhJKpxW3UBPd/o9fn062/Q7/X4+KOPOTkKnx3vo4tosNQO4zFkRFclOtFhsCnBpj9VisH5BRcXZ5xNLjg/O8PUNe12m6KaR+TYhjlaSjMYDplMxiSJ5vr1azhjuDg/ZzqbIVQSpB9S4RDMFlVAqK3l6Pgk7stw9Cslqa3DOYkxitvvf8SnXnuRrKPJasO92+/z2//g27xx69OcPhpy++37XL9+i299+xuIXDA9vkthD/nonXu89OIXOT0v8FPFYHyOTi+4fv0AtZhhjCHZ2IDNNp4JJ6fPOD8yyPeecT4pkWbM0Ud/w3dP3icRu3zw8B22LxsKxmz3d3jpxgEf3nvM7Z/+AJEKknaf9995xEae8tqrX+OFWy/x9N57fO+v/5J5WdDPQw/1ZJyQCsFme5NLewfCbY3wAAAgAElEQVS4Xskoy+nkLY6Pj5guPJM5FHVwX827Aq9UmHFVT5gV4MuC1naf3e1LDIYTJouSYmGYVnN6vYzZbMHNq/u08ozh2YjFIrh61s6Tq5xJ5cilRLTb1FJh5hX9jT5BKBd2uSQkozScxCdAMW8to+GQ6XTG3v4eebuD94LagyMmuhAttWOsXJp7LcP6WgJIjGOruCtiGLcKfOGYnh3x8OFDvvfDd3h8OGNzs81Xvvgi3/n2b7DVv06n3QZZgrdoBMp7jHAYKUF5lDOI0YxsOuemgE/vtWm3Ki5/9jpXWwUv7Xr2uxNe2qrY26h4Nj1BScH42VtMs12sfZ3aGBaLmsf3nvKD//f71GcLnj0r+ckzg20pPrdzzpsvvsGtK5foHHyKvb0Wvqpp5Xto32IhHLPFhEwk1Eh8GWbSDRcV50XJcDxidnTM8MlTTg+fcu/wAYcnh4wHQxaLBcV8wXw6A+/wJrR8OGOXY2ysD0BRIkWI2XWFimBikwdIXDyHmvMI0jSllechWY5Fd3A6DvbifplfNW6I638XS2Cqcbp13jNfLLDOMU1DfAVo5S3yJKWua4qqoqrDjLumTwoCg9cwWs06aJic2lTL83u6WHA+GIJSvPnFz3P91i3e+fl7fPzuHYr5gtl8FmToUhLaQkL7SzP+yQsZP1ezLn1gmtbW43qugQvDohvWQ2kdJPTNL0Q2srHBWy9OPpnzrVo6UrRqBVDUe5QKRXKatRFaczEeMxuPKWrLvISyMktDDxsLyUaqKeJzbkBsKaMp23PM1mpvNXngUuoIQXm1luMs1UKyMfbxzS8uf25ZDC8ZkJWD5nrBCM+3oqwXjUIEZQr4pbP28tp8eCa+KbCjVNBUNoDs1obzxYXRTtga5S2yAltWkKb4VKISTd7rImdlMDgzBoUnSSS19CyswbqaBIVOgsmM9MFhQRKJFhfAD6lj450LX1NSIKyJZkMpNUHC6lQsliXodjDZ6xzscvXmK2x9MOJ4cpfK+CXYHm6jjIaINvCca2sHKfAivLfUCk0WjZn+bqIBfl2KNtEgC83coxWyEb4tntv4zjtsVeNVLJQIyWXQ8AqMCEYWzQJq0KNGHtkUcw0q0bgBrtg3D1F+KESgcdeT/FDIuYhwKILVfrD+D9cdmgul8yjnkcbhaxsGAAtFW2f0shab3R7dXo/WdpeSmnI2wZQLZhDQqPh+jRbbsSoqgCXdjxAYEyavO78qLIzx6DSnFhIXnbyUzsl8ihaKnB5uW/CsGjD/+UMS3+GFz3yJV7/xZVpphp2WzKYzDoenDA4fMB8cUi4mlPOKajZjupjj545xUWOMReCCI6f1JFJTGRMT0Ubb3hS+wa2sYcWEEEvkhrjxZbiRSyQsaOtZPsPQiBsOF6zDRYei5vMrKZfPtDmYiEyp83658Jug6b3DWcFS7toU9k0hTpg9xtrvKZ2EgySuWQ/oJKXV7XF+fMJwPqfwkiqGClOV2NoyLYpgeazUGosa0J2lU5QPA5mbwnEVfMMfpRXWO6ReOUcJKaiNQaoMRJgZp+OQ3KKomooPkPGgMHGeW7Q5lgKlFXmrRVVUVGW1lIMU8zmdTgeZ6OXdVFIivcCYgkcP77KYfwWVp7TSDlnaovAXSCWjTXw8SGLhLRDUVb2c3ZYkGle7cE9ic7mL7plaavJWKxQizjMcjji/uKDdadNutTjYP2Djxk2ctYyHI6bTKfc//ojTo0O2t3fYvnaNre4mp6cnZErTStt4Z8EFSaYTgqKqeHZ8SrVYcPXSJdrdFg8ePqYsa4qiDLKaBmSQkllZcfveQ/Yu3WT7xst0L1UMhmeMhqdMpmMGVc3UWioZPlsz0uDOnXtk/S5b232E8pTFnHYr5cb162zv7JFpjTM1aaoBh9aSmzeuc/XKFe7fv8edO3eYzyaAwNV2bU365bNMsqD1r+oK0WqxGE/Y6G+iEkVeZwjvqauK6aSkrCRCaNIsw9pgOoOAolhQ1RUnJyfUZUmxWFBWBpRGSY0XkrKsKOYFrXa+TPQaAAkUQiQIKWl1E4ppzXg4ZzId8KUvfJZ3fv6fGI/g61/4JtevvcTv/94ue/tXmC1qnjy9zdnZR7z3w5+ifcHN9uf46md/j3//vf+D3/z653j/zjFSC77x5dfxf/63yPMhWatLp51Sjifc7OwyPp/w5Pt/ymbaJukoqukFD48HFOkHVH7CRtFiPqgZTktaYoN+L6MYPsXKfbobeyzOTsnSNr/9W9/h81/8An/8RyOkgkwpptMz/KzGj2pE2kZJxfB8xsHOJq1LLbRKObuYcXo2QMk+eRrMAZTIQFtU6vHSU8yHOFFSGMf5hWexEFiT4iqPd/WymE50gkSQpRmt3MbeVUtZGZK2QLbh2muvc2nrVd7667c5G55THtQYNyaTIfH0JkWIgPxGZASAs5NT8ryF94KDS5fCnCGCcZRMMoxbpoSxdouganNGP8elrV6h+zrOTiL0iM8GRzx++hH/8Yf3OB4vuHSpxde/8Sn+2+tv0OtfRuUJXopgRCUsFokkDT1LHrQDaUAUJdXhY0Z3fog/vUv/4jG/+cIpdZ6jkzbq5G95qTfjwdvfQ57NWNx7xvbcMqsq7t/7D+g3vogZT8G3uPuw5IfvnvDR3T/mxy//gH/0X/w+f/APvsTerVdoUdIzGSrbpFaOsQzzU23pg8mAdMydYuAShtM55fEpo5Njbj+4x92TY46OnlGdnVKenzErS06MoahKVGWi23OYz4QPxUdIImmQxdjL7ElV6JEXPqiRhBfRXU9AnIflgn87xhju3r3LvKxCH3NRxPM1AmBSYV29ek4+SAwbeZuSYpkjhRlcKzA6yCdrJpNJAPrSlDxJSZIEnSbUJqMsS6qqWhYVOs5faxJ6F+eyefyyaPARGDPeMlnMMd7R397mm7/927zxymf46MM73L79PsPROcaU4VzTSTwLJVonkS1MAlgRG6Qa90e5NoM3HpTRiEvHETE+9CHFgtZ5F/HMpi3GPQfQr1ptIiTSsG5oLAnBjRK8N9TWczacMpzMEYThydY4jE8iGBJN1kRUGjUMmrXE42bpMtnE+VUus7qvQoTrbPIXJRVOhdaR59m8cP166Xy5xpYvwWu/BKuFCmZuzfdCP72Na+n5UQFxcUaGLbb5xELFmVCMGePimCyDdxZjLcY5jA0AtBIi9GQbgxKCTp7TT8L9dFWNMAad5aRJEgyWlMRJT6IFykUjNuWoogouTzVpllJKqKoCX5Z0asilJFUJSZJQSYNwFq00vq7DuiHEOCcFha+x3mKWJIQj0RoRR6oZn+NkD/QGQua4ekxd1+ANjjr4KSpwElIbirdgkBjmMDtrISr+hIwqwl/x+vUo2mjYK4A4zR6x7O/BB0cV5wKVHwiAkL0KWBpGOBE2ZMhLGwUvq2DB89T9Un+93AhNYSEQqCVasGK2oEn8GzetZkzACk1YoRoKj44BVSCQzmHrmroo4qwxz3QyQrQlMldLg5VVoIzvG9HrJZLUwCHRLdNZu7yehpL23mJtmA9VO0eNRgiLQlD7YBRfUvNsfM7PP37InaczdqeaW9fnvNbqcOmFq2Q+ODR92nso57hqjrMFZV0xO73g2bND3jl9ytnP/pp7Jz+hmk5wIvQu4MLdV1qAbYrMtUPeC1Y9b+vBYK2AY1XQCEKxZa1bMlJhBphYFq4eF1ymYxBzzi4ZsuVKWzJ2wbRh9a2GzeMTQShe83PgR1hnUmoSpajlymmqqquA4JYli7JA5B26nS5lWTIv5wRJa2gMF0QnLYIkIXxmS8OoyQbkNitEbNXfxrLBGATGhLlv09kcJS29DRBCBWv9qgpSDL/K11by1HjACYlUannAJUlCpcOoBm9dGP7oAsMTzr2AEKVS4oxjPBwwGozYvXwJJXRwGIzs6bJwjs/NOR8KNmOW60EQCtHQgxMOdCElWmqUVrE3KjKxXpDowAYWHmbTKYvFgo2NDfb2d9nZ3mI6nTEej3j8ZMbYGF7/3GfJs4TR6Smqv4kSGUpkWCNBB9TLesFksuDj2X2uXL1MXXvmiwIfr6mubej/SgVSZ1zMSiZPj/BJKFI3Lt1g4+ASz549Yv7uB1ipcCoU1SpRoCSFMZweH3M6HtDqZCjpKZ+OGZyPuXnzJi+99CJ5kiAiENMATnme8eabb3JwcMDZ2VmQQhrwNkiZjDHUpgpKAimo64rFYkHQWzsuLgagYFrPmEznTGczilpSVAIhUypR4WxNkim0khjrEc4xHk/DAFuV4DwUi5Kitgil41BTWBQVAr/sg7HWU1aWurYIpdja3mPKjNF4zryYcmV7k9P9HS7OTyjOCpzz7Oxv85//3rcpXc2Hx/cYXZxjzqZ89sUbfPEzX2V3+4DBcI7IJ3zhy1vMp8ds7PUR21e5upczGD/j3rMnnHhBkvZ58VbKtLjHyy/s8fJXvs5P3r7D0Z2PGc4+ppu2mT9NucgM3StXWNDn8HxMN92ks7lJUQ+BKZVLKcsxf/4Xf879R4/Y2T+g5SwjbxHFgla6i1UaoQV53mc4mnJ6fMzW5jbzuaGuJUiFkiHB29rapKwrziZndDsJpsyQuWRrY5PRaU1tLbP5jCzvYmqJtJYkESzmC3Y3txFK0+31EaqiHk0wRcVcGmS3h081u1f2een1l3n44RBr65i4soyxq9gVz0kPrU6XbrcbFCtS4iMijNJkrV4YESHWen9EjKcxUW3A+vVXtONaxlJbW+5/eIe/+t5fMrVzXnrxFf7+73yTy1eukOcqnpk6xImobfM+DOC1cc9LAdJ6vDH48ZDJkyNOn1wwOxowOTxnuJgwzeboZMbWFiS6TdpWqLYnmy/wi5JEGlAFw+kD6uocaUv2+m2+9uWv81vfeIPf/MLLvPTKy8iNDWZSIekhFhZR19QOzlyNIqFbWtx0xJOLUz568oT7jw85eXrC6NEDxhfPOJqfcmbnOFOSz0raCwNO410474IRQ5xN2yg7nF/m40sJWzz/QwuyCOQR0UhqTdXjvUUlKVJJnAlFlXn8OMSFCIrJmB81Y27Wn9k6++r9qihpztmGvvKsZHHNeV0WBYhQADWzvdrt9motuGCDXlXV8yqnJpGJfgLOirUEPvQ6Szy7B5fo9TfZv3TAnY8+5NHD+4zHA6yrkYnCeUFVG5QKMTNRikSAFwopw5mi04xOHvru1z+5kEGeJ+JAaGQASJukugGMG9JASb0kAdZ72hpwN3hYJyx7zJ3EYEO/Wix0guOGDkYga7loOCObzbSWbHgR2ar1vUvIK2Pvf5hT27BsodB28cxmuQ9ZqpfW32KdoVsvSm0j74s/2zz7UHgFRlJF0qC5o55g+OVcGJCtlSZNU1KdUCwWTCczqiIU3Q3D6r3HsVLENZ4l3lpQKpAXXqJjDiy8QHtP4gn9jonCaEmFC2ZmtYcs9O3reG3Shz0kTDAvlC6M8nCmpioL5qKmFAYtEnTMxTwh/jipgmmcaDI3ixAm+O3GmsX6BOdTHCnOy/j5PDiHcSZ8TuvRMuyFuq6RaY2sa5QOzJoSQZnmTDAH+lWvX4+iLVIFYdGsLeamOzFKsZrEWqrAkDkpIjIVCzIX+owsDQoglhIr4BPUdniF2QiEJFFAUwx5HyncZUHXFH1hVa3T5c//WVHYgkb21/TjuWhcYSjrBaXNWdQlF09GwdrUVNiyDEyKCjIusZasO5rNJVEyJOU+Fm4h/gVmIhQrFiUciQwOSs4nwTFKgKfCIahcwdOTEz6495jZtKQ3tcwuJlSzMDeiFpCIsIh1qwOtFDC0cWztX+Hqa5/jWjXl3Vzw9Ed3KM4nWOlJCNS3iPeCX0LpL5874bkqpdFJgo9Of877pSdNYJsaqcNqgzdNEz4yow1D67ylmTztm4jD6r2W6JALPxHitcC7teAV/wTg6ROZiQfhJYlMSKRGywQV6DGmkwn3FwsyndLb2GBeW7QOfYyCwJ6FSxex8BRoqfEyWsiGqmT52WD13su0K16TdZbSWnIbgrc1ntm0QCmHEBoV57QVRblkQXw8fPAhoQ4zXmJPpNZBc64VOk3QZbTXjRJGaw1S5DGDUjhTB4248iwWM4aDC/YuXUHrnESl8e43PRd+KfsDQnOwdSSxF9M5h/DB3rexkg4Es0QridZ6BTISbOe1Emz0+2xvbnK4WDAZjZiMRnQ7XXb3drl69TJFVTE1gsW8IFWKrc02wsyZz4c4qxFJi3a/D1rinaA2gSW4d+8hRblAKR0apeMfY0Mvq1QJt155naPzKXcfHrKzvYu6gE43pd3bRiUtKhMa51udLiJVJK2M1mafw2eHjMoFxxdDTN1iMbqgmFU4F2fSXT3AeUdZFksm2Plgy97ptFH6IDD8KGREC1WUPznvUFJS1wWmDkxqolOG4ymj2Zjb9z6k3e2SjGdM5gu8D8Nyw9xJi7cCmeUIWSOFpa4s3V6Pfm+T6WxBbWYxYRAYE62xVehNNbVhbuckSUaaZDg8ZRnmx21u9el1ahbjU06PBuz299jqHHH7vTs8+vghL7x+A5VB6hyfevEFbl65Sf0P/zEbOiPrdfEFXL9xk7PiiJ2dHD89xaQ5byYbtPWYjz74EX/1/feZT87Y2YZ/8t/9M05GR/zVn/yv7J895dtf/Qo3fv/bvH/0A54+Ouejnx3Sa2/S6W/y6N4AZ9oYrSgpOLiyARs9LgY108mM2sLG7j7y6GN0Oafb0xjdwZqC/uY1rr94i3/6T/+Q//l/+h8ZzyZUZQ0kIe7Wwe4ZZ3H1jPlsyMXZGYt5D1t7trpbzMYp1hpGwxFZO6EsjhFGoHVG3soYDYacHV9gami1NqhKQyYFWZ5S1TXj8zlnb/2MJx89YUv3uXppk8u7KVq0kF5QlwXC13il0WkLISRBCSbI210cKji+4WMMlDg0Sd4hydsx8qxAy+f//5czbU0h0IzoyXTCwf4N/rO/9w0uHWQBVTYpHo2THkQVZqLJMIIDG0YVKOFDn7RzXJxf8P5bb1NeXHCQpzwt4Pi8ZiO7RXenR3ezHeYzaYnPFNiCcXqE1WOqbMy4njHvSVxnm7Jc0EsM/8N//0/I25rN7TaKYNYyd4qp8CTeI4oKM5pzWEz5+fCIpw8eM79/xMWDJzw8ecbpbMi0qPALD9MJ2AVVUlCrkmI+w1YeX2ukT6gJPe5LrLLJE5Yh3i/dD4X3y1iHbwBoEQsKolNgc7dl6DlGL3MNY+rQJxxfzxVivwTID+d0lBn6AAQ1/eJhFlo4k1bKkHgGxd+v63r59SRJaLVaJEmYD9b0NJVlGfKA2oRcJfamyVBRRAVOAOZDvzfU9QwtFDdffondywdcvneFZ08f8/jxA6qyoNfvkaYBJExUitYCIRxSNfN2g5lOp7dBohKw4KXAy6hMgCXoKxqLey/CWSUIQCYrIH4doH9+HwQWU0QV0fIlmlEB4e9S6kg0PL+flkqzT+4hIqngWfZsy9ie0ziCSiF+sectAiurwsIvz2PnXJi/6D0mFt+rM95G8mBpV4JpCs74zH+Z2czy+2JlDNfK87AGRFizodi18fpiYQgI4ZdLsmHmG/WSlDL0UvvAUvqiCC7J2uCcQMaRG87XeBmZa6eXZjumLPHWYFOJyALInspY2HsbJMjSYoVHxTxQNGtAKpzSWKFpxsn7ZS0QjJtC3r3ao1I2JjEaZx3SB0VT6J8N+6Q2NdoYpLJI75BRgip9yF29+/+Je2TTBwF+zZY4VLtE1G25AWIQb1gCLWVgKuKCsr5h0VYaX63DR12XMDYIiRSsbcL4Xs11faKTM5I3y3+7YQgb2eISifKxcTbkqsGMRIhQtGkoXEFPW3SaMBoOmA+LgCBIhfQuFKK+YZtCN7kTHu/CmG4h4kwL0TgohqlcYXcHp7tcSVrakyBQLglz3XxFIiyVN4xGZzy8/5jjpyewGNEXXS53cjaTDNUUzM/BMmHqBGiCo5NCpR1ckuOkjtatYfG7Fcy7fDWozi8+/CDfcNaCioW5EEtZwIqNEc993qYR20c2QsaN2jzjWN2wjjKvehPX2dcQRK1dIYiNiQZKPvfeDbAgfGim1UKF9Seh1+1x7dpVup0udVkxm82QiyIcSHhSvWpUDex47J3zAYlRUj3f0N1IWoRcumI1eyWwa46irGhbh1AaqYIML+CyoaC3xlFUZegbExIhogTRuZUzqggHgtJhpptUCp0kKK0wdb3cN1VV0yGuOx1MSRAepQXOVgwvBpiqRqskzodpkoUoiWn61EQoLPI0Y6O/gSlqytmCsrQ07l0QJVbehyIxPspw6AUjIC0lqdS085z9vd3glBav9ezshCzPyfIWKu3irGNzo4/qSWajAa0k4+T4Au8NVaXxBKRayYRWmlK7EqkU3lr6/T6LwQUQR3tYcMaRJC1eunWN05NzRqMJxXxKa6zJc4kzIL0CG4abGyyqlbB3+RIni4LZfIpTitF0xtMHj5ltbbOztc9wOCJJoKpKsixjc3MzJFs4alMxL+ZUVRX66rI2SZ7itab2Ie75WIChFVmakuc5vW6PqzdfpLKGDx98jHWevYMDCnPKeBJYMhX7Ceu6xhUlKknIVYtqPqcoaiQaaxwCRZqlmJhcKZ0gcThbLdFdKRM2N3rs5H1mi5qdvX2UK+kmkouTgrt3j7m612evu8v9xw/43/+3/4Xf+f3f4cvf+jpZlpOKBJ059G4fUaUhDLUtWcdzwD6J95D2QGb0UUCbrY3X2Np+gV7nCZPZET9660Ou3voav/ud/4aOvUAPn1C3dvjml77O99WPuZjPsHOBsgY/mdBJu6hum1tv3KAcnzEZzBkvNH/x53/Fb3zzmzw9PeN8fEE7maJTQ1F7WolkMhmyqGouhkNGsxGVLel1O9RFcPTzzgaZmzWcHc+pWdBptYAOMik52H+BzXyHD97/68A2GIvwBVvtTTY3erR6OU8On+HRtPIexaJkPlvQSlMQjnpeoNotWr2c6fER7U7N9v4NXn3xBRJ0OFFVKBSUUngkzoVktjaWo8NTLl8+WAJVQmo8wcQmSXOETJ6Lm/EUhJjERH7gF0o30XzPhX7Vay++yP6lq2TdNlZVFN4gtCD1oD3h3xNx9qp3eGVDIldbzHjGe2+/z3f/6j/y09vvc/PmFX73732V9gvXuNqWbLS3aPV2EInCKsF4MccKyKuSBT2m7TH6luRT+5fZvHGLnZdvsXflMtsbHWSvhRM1RhqchbqqeToecDgf8fTRR9gHE84enXF3cMjtwV0WgwF6ZDHDYFZixByvDLWVJAuPtg5de9rSwcwjrGThNFZLqtQH9c0yJ4ugY6OeiModvF/e22XhRsg1PI2JRvx6zG+ka2T8Id74KgzCVkphpcPSWMSv604+cQyzApwhJJEi9vmI2LrgV7RPzHH8GiMTfrcoCsqyREQGLonKgSRJQpIr5FIRZHzIKYQPDFZZlpiqRiZpKEa0ojaWqqgRSnH9hRfYO9jn4PIBF+fnYXwJHo2M6YbDuwrnK0TUOEX6MM7/apihdabSrxoQhFgrGsIIAEckAdbkkOFcW1/vAhFHL4R5OGHETHNjvffhcxNbgOIvNz1sv4xQWGew4hUvSfJm54klmRbcohvgOhSYhqoOzFhRVdFdNPRKCuvCoO+Ys64buDWAS0MW+DVviV92nc3X12DyCIQHCa1WSRiNJKO6J95TvwZIw1q62NTOjdFOZMpcXeMXAms9XmqMdQhjcHWFwyBTSeISfKKQOvSwJTYUtVprSDNypUhLj/ThOQkJQofZyzaqkcLaCOvUikB4CB+Kd9mo5poN40BYi/QGhYmMmcTaBvePhXNUHi1rhHj/pXMRoIk5rTPLZ/h3vX49ira4UYL2OXxJRq32qmiToX9DrpyIGmpYRMcdHwNgo7QM6P1qA64bk6wGCcZ0uCnyaIqFRtYl14JcQKME0PxHKYlzK9p9HYWxPtrAe4PBo6WgFi441WhBKVxAqvKUel5Q1zVWGLQLFbj6BLgZVBWhsT92SeK9xLlQ9CiaPr3AMmprSYVHOYlwUUroHd5XCF9zcXHKg3uPGJ2OSWcl+/0tXj3Y5aCfk0qH9TKaO4b760S6HBCokGgZhvmWMSDZKHUQcWELokTRxUPeNwFoSZnQzGlzPrg9CiGXKE9T5PmlOYgIQyIbzbSP97s53Hx4dksJQNSFe9+gWTEW0ayxdRZ01QO5lKc2T/W5INUgcStGViBIleLlW7d48cZ1ZvM5b/3sZ0ynU9JUU1sbmVGHqe3SjSmAnoHyd9YvP2fDOjfIWpzOF6XBwYxHqzT2U64OTWMstQ3IvtQarVMsJUVRhYN8BTfEBO15NyylghSxkSNqnVCJ0M+FtZiqCsWelGHOjpZ4W6N0inOOs9NTZtMZeTcnzTK0TqJJhQvzopbXGp6pUpLtrS367S7lbMF0smBSVBTlPOjP48DVpq8OEfrDhFBgaqSHPNVhZpmpaWVZTIDCe9Wm5vDZU/4/6t7k2bLrOvP77eY0t31dvmyRSIAASVAqkpIYkiipOslyNa6BosKuie0aeey/xREeOMIDDzyoqAgPqkJ2yWpsqdRQIkUJACl26BJIINuXr3+3Od1uPFj7nHsTgEv2jLoZmS9fd++5e++z91rf+tb3eTPl8PAmViEI+6xklBdYo/EUtMpwfHHBumqYZTP2d3eZzEdMs5Kmrrn3yquMtKYsRvigUq+NxShDno+pl0fkasz+zTmPHz/g5OgK3zhMhMKKeacHVGbZvX7I7PyCLjgyWzIf7fAg3ufxkyNeunHK+9qzrg9ZLhd473n1lVe4ceM6o/EE7x2r9ZrziwtQBp+XFPMdRkUOQdJdMVeX3gBjNFHDRM2lD8BorpYrVquKG3fvslh1rNcnooKVZ3gvnpG1C4zLsdCGUDRNi2tFBKAoShwq9eSkQz9KYG6MSWCLwjtHURbk4zk7O3OMv2SceXL7Ep/8ZA2VpxgX3KopFt4AACAASURBVN7f5fHz9/m9Pzjj6fkjfu0f/lNu33oVizAgYgnR10QTaGKkVBOcr7A2g2BodMSqMbde/jpvfO0f8eT5Q4zz/Ivf+Id87eu/yg/f0nz7B7/HwY05H797xK2nDa+9/gWe7jzmz3/wE9T5PuP8gH/53/xrnl+uiG5FcX3O137zNZ5eaP7j7/wB//bf/i/Y+ZzgReTFljOcs0x1wXmleOfdd/gf/scfcXF6RG4jNhdBqOmsxDsHdLjOEfwIq6ZkI0U0geUCzs4fseIZq5WjLKfkRY7Ru9w93GFVXXF1fs6N69cZT/dYLmouzpaMi5L5dIxBzorWNTQXgZHaYb+4TuEL7t69TjZxRNWhM41iTIhaqgwIS8ujuFiuuRagF1vutzpRPVY0naPtvCg+6i0xg7STRD4VbKWHj9I3o3ohDAX5TOONh5gzIgcdCLR4MhQZYl8iPpPOZywXFe+9+x7/8f/+ff7qu9+iqZYQHQ/bY/64esrERoyv6GJEY2gXFa2CVisODq4zzscc3rvH137tG9z9whe4sX+DXE0JmaWJHct6yWq14pPzUz45PuX5w+dcPnnO8fNjHj7/mJOLh7jn4NcZ5/6KtX1O1laopSV0E8gVZVZhqMijwZMTvAGTkSmLMUrOzGhpY4fza3wUifP+fNx4marhfOzbLHrZsT64jX1Fagu8lFhoMwFG68153SsbWotOLQreO/Hq+tSjP0e3A/IYBTTrq359QB56i6QEaccUVKutfvv+99u2JXhPXhTkeZ6UCVMPOUHYgkqBtRTTCTb106kQ8CpQtx25zQguJvPhQFaW3Hn5HnsH16jXKxaLK67OzvFtR/AdXVfhfA14TGZFl1xl5OUUnalB4Mv7MFSWtjMw1ccGfRwRSbFoL4AHyqnhPfbURpO62nrBvL5uFlICEFP/Wp+Ub8cfva3KZypXfJbN1b/upz/XWyVU2Ys1VSsqn6uqEqqslfMxTwyq4bZVQ8mCPvpKYt/pGrbWyafWyHA9aZ32Kpt93N2fCyTKfUQNMapKAL3ZCs5MAntJathGS7MSCVjXzotHburND5klJqZQHjVBaZwRsagixXvBiom3dFunQo/VaCttWDrFkNEj9Ma+KokihAwdDTol/b0CqcTSGhUdOrQYWrHB2bqnpcdbqMNRx2GuQ4qndEgFigQsCPPv/w1WkcdPRdImCz5JY0aGcrUoQ4akDimIftzqkelbckKSvR9Qkp6OxebG6qsv1tpPUfXS5hf6kfZb1Y6e07wZRGPS5JIES6JCI4bWYpa75RCf7nij5BCMCrzWrJXm3EeWlxUxs+SzGfrGBN02+LrGr9dYJw2RVoXNoahTxY4AWNA5UeVgcjwiqSolcoNSOVZ7TLR4JwimTnxekITxvGu5WrfYtWLSWYrRmPndm+TjEjWoUvUJm9zSOkKBeLAZEHNfG/B0ItagUtU0VUfF0FNLkpOOIFmpnqhDqjCnErjykhSr1BxNkARQ9QsbCeK1qAchQmKoAFkwxGhotYEsw4RI1rlEzdtKpL0b5PB7HzNpOFaJZrDpF0AzrIX+UNNaY9FScldRpF+1YNrT8YzR3oT3HtynNpHK1XROKCck82MffOKBRzm8lEr5d2pxjduoq5exSL2Muj8IosEHhVKGIsuxGogdRnusloqrySw+QhMjy7aWpCEN/ZA2qyiG0MPG2tNJLJ11KNsrQlkIka6WakpUEhBopUTII7MYXXB+dUndrJnsjIUiYHK0LfGBRMcQoMFaC400th+fHKGiYlKMObx5jWvG0FRrquWS9dWCql4no1Shu4zGFt86EbsJETpP3XWsm5ZMaToVyYKsS6Vljcb1glHXECrFuvY0naIYT5gdZCyuVqi6IgsVOyNNcEvOV546FuyVinKyT56NKe5qRnnkvFaMdueUpuXs5EMe/PU7TMrXyUeWZvKEo5NP2J3tUZSJheQrVCgInewLoSgJdoJtLxl3Y67dewVz8IirJ/fJo+Fw54C9g0OenV3w/NkzaBtoasbX77I/LXh4/z5v/+B9VDbH64AtDLPJiMyK0fa4LOQQTEHH/u4+N2/e5ubdlymmExrneH56ytPTC6qqY1xMcK2na2pa10HUWCuV2OVqQVdXlHmOyTJ0Zglai6eeUoS6oalrZtOxoMqJao6K+NDigqD9zjdCdTOW8c4BrTnhYr3m5tig2nO+dOcW2e6IdbXk3fsPmJfXmU+nYMEr6ACzdVQZU8peoiFTAYVHhY5v/MJdbhz+C9ZXK165N8eq5xSF4+jxU5brlr3yOsEteftbb3Fy9BS9tJS2ZDIpyI3jn/+Tb5Ltet56+/d4cHYfw3XQV+SqoTk7R1nP1XqNMQ7vJ8SxGMDWq5pYR7SXynRbdbSNo/VtEpHSOC+JbAyaWBkwhtA4nj58QmhX5GbMZLrLZHZI1zrqCG3QrCvHfC+nsIZFrMjyTuwljKUY79HpitOTJ2RZy3QClxfn/GT5nD/aa3npa/+I6TgQQiZ7rQLoZddBGUUxGg+UPK0QsQsVOTs944OPHvD6V94gGNlbrVKpSX9zuvZA/3BCDhWEZJcSAyb1z4nMNwl4EgsCAautKMymvtLo4NGHT/jOn/4pv/+7/4HT40dEV5EbaNZLLpeat599QogNIXo6VeC8xURFMZ1w/fYtbl+7y5ff+Bm+/ku/yO71G7QELhdrzo4fcbmuODo75slHD3n87ClPjo44OTnj/PKCs8UZarVGr1asafC1QsWMTrUEsyYmw2Zl2+SNmCXUXWGKHKd7yw3pJVfKo7UiV2BVBjHgnR+sXgSslr0+xr5ylgBP1Uvyy/P7KIGr0MlTgDGAiwLoBS/nSogdpPNKR4mERZzNEL0EpD0QSj8niG9kJDGStEIZWSdKx8TOUMKIQYJd02fqKdZx3gEKm2fyJa2wxmA1GBWxWoSSIFXylCJakVA3kwm2zIgWPCK8kuea6Dq885ggYxGIIs9eFMxyoWLqEDl/fsK6rvG+IxLQRtO2HREtnqiKIRP2PctKS9XLoBOFcaNkEAEVDEYbscbRhoBUB0Oi90WJXtBakVmFKmxqz9CEaIYKFCrxoGJPg9SbpE2Rety3qlVh027TX0xfiZWiZx+vKvqE34cN0wvDAHYq+kRVPhojFE7v45Cs9c8/PKcg5sMY9eD35wmy9Pe7FqW49H6RHkElZQ/lRenUZApSS4RRSYNAibev5G298FwAHSkyjTb980vSp53AplolVl4CAXQErwRExEks5RD0I/gALWSpDy/kGmeELZW1nkIZnAp4A44IqiHS4tyEtiuIUUM6a4hGZsp4sDltgNYLIAJxoEgGPDZmRJV0JhBxQgFCRLhP9CfSRhoiuIhv/y7QI6OkuD3VO30xIU/pWAjJPyR9RZN6nlQQTwYtdL2hgJka+z77UlsUxiT6YbRIZm/9FLDxoRjQxxhTdi0Vg+i99JEk+dJUw0glZbkhtIqiiCPvgtp1VCHwfLHGN458OmOvnHFweMDYGrqLC9b+CBXWiNIAMDDHVQrgHREJylXURGXJigJjDW0X8D7iXcRHobtobdIB2rMPpeLX5QadZxgXmWYlxd4u+toelDkBcFHoUNKxEjFANqA5KlUdOxQdMXZoHQlKpyB7MxZgGBxAkil13PJt67nBKkpNWdMHGL0KWYBkxqgAqy1aZ6mx2qPaAA6cg7UPNCGivKeIIjHbz4tsPClrUSkxj70qZNr8tpAt1SNZfQNzn4Snnw5agkqUIbM5o3JMPplgJmPy2ZhsXEDr0SaDpkFboR6GnrcN8hr9cMaUJCs9bNDDjq2QqjGpoVgbob/Y5MniW7quAhwg3PS2a2mVo+paXKKumZia3unfclqnRoL1LLM0rRFKlTWiFtkEWQNJYUtpLSh62jB9lHG7Wl6xWFyxd7gr1TBlBAEzFqLCBU/nHRabhIMi66airTsu/SUoGE/H7EymXN/dxezsEghcrBbo3NC0DUWmUypv8I0X0ZTxCL9coH2gVVEOd+dxmRhNj3RHc3FCyC1qVNDGSB0N0/k+wSt823AwK7j10h1CNDw4vuT05Bh3sWDnGmTFAePxjKY+53vvPeBkeY2ffX2XsDrmW3/2Nj/39w75xje/zrPlW5ycHzEZz/GxQStNZoyMVQzkFnRp0UVBoTRzU3Dj5svMX7rD6uoTdqYTvvrGz9DONB/+4Z/y+JNHjLuGPHievX/Ea7d3OT0+4fJsSaM01kSM7jj1olRldJTgykugqJVhNt/B2pzpwT7FzoyTi0uyckK1XBNDTIdYoOl8os2J2XvnGoosJysLnHPo6Knrji4GTJaLalf0FLlhXOaU5UR8Db2n6TzGZMQYaNqaqq7IbCAzBd4UFLsjmtWC1rVk+YixKcjzEWU5pQ0GJVKmw2EQldD6Sm2GAFnuk4gOQvnNDXzl1Zd56dqOqOetVqyu3uX/+O1/w9XTh3zy3odcv/VlHsVnVCdPmY1v8Zvf/A3uf/whR48fMMuWqHjKn/3ldzlbH/H4wce89Sf/jqzzZC4nCyW+CzTJLHZSTlhWF1R1ZHG2oMk0s+kIbSzLqzVd1+KJcv8ohVMKT5s8knK6RuGbgG8aMuXZ3dGMJmK70QbP0cUVmY500XB+viR6z3Sck2eK49MrytmYg+t3KdrIUeuBU+puSbdqUMFx/9GEunPMkP3N43Fdg1YWbcvhnF3XFS4ECkwCJ0US7Uc//h5feO0Vbt+5Awn8kHNXDftwH092Ss5jk85yTb/Hp2QhIgGgkz1NGdmjhKppaDpH0zacXVzxznsf8d47D/net/+IuHxEu7hkHGq6phKFRZcsFMoJrZ4T8pzReMbt6ze4desWr7x6j9dee407d24zn884r1p+9J03+eThxzx+9ITHj094+PQZi8UKt6xYVWtcVdEuV8SR4UTX7KMZL+rEkgnEuExVwECrAFr522lqLVUETUSpdgiYBwp/qNlOa30IkpBsgYMxCUr050Kkp0ImvD9NVq9q7dmIlW3o/v1LpNgnSoJGTBXxIP1Evcx6f6ZIMK+TaXXPaElJfOoJk6/3Z5FURXpJ+v71c5slPzlRdy7KUoLvF5gc0s7hg5PYJlVefBBGUuhaqq7BEQSIDAHnRXkbF4QCirA8Wh+kwu8dmkCWWWyeUYxGNC2sqxZCkKqilj4rZfsEAFTQQ/8U6RwmBIlLElgv8YImeKHJEb1oJnifkj4G0TNSnKiNVGui2o454lBF6hUegxel7aA3IG0aqC2qJhBElGcDj5Cub0tlfWjpeZEtNLCIwqbvrS8AxBRbETcJ4AsAdUrY+p/t5xA2Vb7P9LRFUYz1MSW+6Vtapcg8rYG+XcT2FVpAK2kBkuGIeCXxl44SZwUr51kMokjr25agDUolfzktoJBLMaVNRQxtDCoTMSiCS607Dq8UbQxiF9BKjN4YJ3FP9Bgl+UbXaZq6Z/oI0OSDqPlG3YGGJkAbg/QHRr/pLYyw7UWspKKR8lk1TOemMpkq2e7vQKWtRyI0cmT0bxg2ZdeN2bBQAnt4cJtjq1JQ/akK8/A8w/PFjYdb6Bf0pzK8Tel5w13uFWG2n8OH8ALiMCASWhHd5g7pN0ObuL3Be4wWydlHHz5g+TTj1v4+Iw1Z5/Bti0283KgUw1uWmD5dUxySU+nBkqAtFZXoguPk8pTjkyNGN3bICysNOcqjcUzHGQfXZoymGcpritkEOyoIiFx43XkyWzLKJFXrOePDCaH6JKrvbIjDxxCd9NZ8zlykAUEwpc1GJJOYJrKnrcqAyusGQQu9k0RZx5QMayWVqtTf5rz426jPPvtn6Af/fx5Dsq4ETbFW1KQiIn9vjRibe+9ZVxWr5RKjMmxWEFLfnbEG3/nPH5ZhAPqHHj7vk8bNprqhVxhrsDZjPB5z7do1tBlR5pnQCtJiCV48YUxaM33lsU+bFQg90mQY26KSEpjWRrjw/XpPCmC95PL2/dZ2HYvlEiLJuNvSp8wKGExgtSYvSorRBLShcw0qyOHiFwtcVbFSllFeMBqN2NmZYYucqBVdXbG4uKRd1TjrBaVVQr/0TmrQQQk6HDQ4AnmuKQqLLzI6KxTbtm0ZH1xjbHIuT8/Yme+T25LGB64dXCPPLHZdsKpqro4fMjaae9dmgOKTp09RnPGlL99jd3/OZCfj2o0pZ92EplFARsSgdIExCI1UeYz15FFhTEekJoQ1RREYlRBjjdIdxaQQFDAonh6d8MreiMP9PR4+e4JbnRDqimgMRVGmA8BidI7RUXqnkKZyFcWjbTTapekcp+dXPLv/ERf1mo5ehRVW1ZroIkEpdGZkjWnp5YtA5wQJjG26n9N95p2TINLDYlFhtCWzCuegLMconaOUwXeRet1QWgcTg1Kend05F7HDm4YGxUXdMloXFF7TtZF1LX1hICpouTGybmN/3yQxKFSiKecolZOrCfuTQ/TdQ5T2ENZ84xe+wdOnY77/4/d4evKAq+PnvDy5zn/3r/97ysMb/Pbv/huePPkev/2//6/oP79NefAKnap49viKl+68QXN5glt31KtI3TUEq/E+YqPQhC9Xl7jgKExJ07aUhZW9SSezXm2p20Z6fBLl0zno2ohSDbs7Jffu3KbMMzBjzhYtRjdAYL6zQ9c6qtWKk9NLprOS8WTEwf4+P//zX+WVV7/Cw2cXvP/h+7hOjGAzIyI0y9UisUs2wYNRht5uxWgRR7BagDTF5q9zLV9+/VV29m8wms6G/WHA2IZdAxGOIomG9MFpTIExwtDwSn5OWY/WHSFaQixZrRpOz4/5wY9+zE9+8j4fPXjIydkJo5ElcorN1hQjh9eR3Ja4VoHPqCko9m9x/dY9Xv3yz3L31owv3rvG/v4BEDk+PubP/+xPOT4+5uj5M46OnnF1dclquUJhaJxjNB5jvCejYTq2PD1bEmxBR4MZ7ZBbBe2aDagoqtZCLeyD1f7MY1OBjJEsS7TwLYAYGPbtzEqlwTlH6HuNEwhKGmc9JDwvUiG3e/K3PxozZG1SBQ8GrUOKa3RSoU1S+2yCyr7W2bemREI6fyPRB5zzg9daWZZD4tUrDA+qkYmJMgiYKKG2KWM2AiTp/PDBo1JlROKbRNftPF3rRCFQ2y1tgDR2um9XiPTlML/Vj9WrVBZFgc00dVPRhU3FsR+/qKS1Y3Peft5pvJW8RKmiiEx7qpj0UvgJe+7nNk3AMBfbFMZN/1gcPPOU3vSL9d7DiUGZ1tbGUmD70bcH9SrUij4h+/zIQhIjlRLHjcjbCz/zKSDg8763nbAN18wmvlbxU8D39rhsgjBAicBXig1CVGkfEljIJ29eg8IrhdcGbTMB2BMQr1NiaVREGbWhYaLQHowSdVDRlekJj308rQaxwg340OcdZtjDgoO2alP7Qa910QOKacyjI0ZHUF4ov0FE2wjCkhoCci2xR1CA0cOOy5a4jAi3fe4UDo+fiqStX1A9OkE/0WETtsrgKHpuMUMQyzDgITiZlPAiX1iecoMS9JvOkHzFCN5v/YygIds3yzZX+4Vr30ZJ2EIhhqRKDUhMXy5WIMaaq4oOxeLslGfVipPZlLs3Drm5P2diM5RyOJWaNpUWOl5fdUnBskq7RoxOqHV4tIkoFah8w/1P7nO6XPJ0ccIXX3+ZO9dmZNaTWcXIaiaTjJgFQqahMJwuLukefMT58pKLyyWlGfHSzdvcu3ObosxF2Ub3yZBcg0n0DgiC16qeQhPYppZuxqhPbuWfzcYmAhpSgt6sjSHJi3GouG76zvr1osQLyVqUBhecUDVU/xrwOZfytz42qNP2ZcgbkETN0EESDZEf8M5R1zXVuqLIwTpH6BzB+WGDH0CJrcsannv7dT/nWnpz0uH3o9ApFKIuqM0o8cVTnyMRksqU6YVzIFUq+8RNJ90bqU5ZawcaaW92H33EdaKw1q/rAdFTUkFcLVeIKMwWkhwc0pcq1y2sxkDUBrSlI/G+dcDojma9omkdlTbkZUG+LMkmI6JSjIsxN67fhBBpGpEPXtcrbG7wbUsMou5oTEJHrSLXGTs7M9RkRK2hcp5nT5+xOLtiVpa0rWN3Z4+yGKdkp8QaTcigOltj80jbdjw+PaM6OaMcK9YVPL84x7Ujfvzemzi9ptipcN7SuYgPkc5FutbQaYOxuahWkcRVEvVVh0CGStYgaVPXmt2DG5TjXbQtWdQ1Hzx4gLu+w43dHRywuDonsyXBuWTiCjE4jJE9czIeY4sCY0fkKqJtxIVzLq5W2HGJtjlKBVwjlaOsyMlHBY0LLNe1iMZERVaO8J3cc5PJlCzPWNcV3bpCJVp413rW6zopmWlmtkQrjXeRq6slF6cX+AIOikOKvYwiz8hHBTGLnFYNK7dgx1oOdcFPfvQBb33rTf7Vf/lbYtaeG8kuVAIukMBJNlWzibmUBAzKKPYO9kAHYiz55W/+Im+/teJHjz9mNJvx5PEF71c1/9O/+/c0bc3Hj36AyluOn77H6HyBvZxydn7Obmb5L379N1iefsJffvvP0Zkl4mlDS1bOqF2Ja1Z4DHW7kOR8VDCbzQjB0XUtddOAChg0eZbj2jXONbSdA2WYji13b+7z2qs3KDKLi5bmwRGdC4RoqaoaFQ3WjonRQbRYUzIa5xxc2+XmzV0urtbMxmMuLySxsIUkYgGHUr2gFWgs2iJgWHQYlRGBu3euM8pN8sySSLhaLFicnnDn9h2MFsZGCCHRyV/cRDUwxg8JBzriJD3FEdEqkkeFDdC6luerZ3z00RkffXDM++/d5+Gj+6zXK+oqUo6m3Dq8SeuPuLxa41tLXhxixwWj0Q7Xb91lsrPLq699kZt374hh8cUZZ8fP+N733uTJk6ccHx1xcXHGOtmA9D5leSZU8vmoYDTbZ74zZ3V2xifPrsh0Rgw1TR3YvXnIb/2zf8nDb/8NZz95G4VL+7+W89j3570gqLJni+jMsCdvBbX9//t4ROtIOcopixFd11FVldixuIAXbX9hloQo63ur73j7HHjxtfrq2SYe2QhtqQGk6+lxejjLUveSElVh0QZIiWOIWGsS+yJjMpmwt7e38TvbeoQt5cH+LNdaU5gMqzRtiLikLulDwPVnUaqsxAheR6KTaqDY4IrgRDKvQpolgqgQEpJjUEpC0vy0bcd6vSLEgqLMKMuS0EhVU2s9yPRnJkPZTCo5Sio4RmdYm7zRtLTcRCXxJl6qXz1lr098vBNpe4MhBIYEVxJDhvc7nO9sn+9JRToFsUoJaBrpE4wwxBv9vifTvkmeZP7lmfsQszd27iu99LGSFjsDEsAfw+dnBp/XrzbEV+rF197+GZXGSgys5fbQeiM+0idIyqfkFSXgYOwBf5XmQ2KOkBCigMYrQ1Carm976OczBEAYeiaJ2TmtRCMgaTGoBFOEvuVGqU2rjzEDy06sNCQOMipilSGgCZ2lWjWo4NFREq2EbqAQyxux1loRaEVgx7skjicU44jE7cpootFErYlGibG2EnZgjGoj1Pa3PH4qkrYU/dHLWMjm0S/U1Kemki8EBqUMPcc1qp5esK2vt11jkcd2pa3fRAWR2iBhn3dZ8OIi/dseQ7/cgDiqIYFXQHCepqpZuSVHlwuitUysYWQtXb1mvbjEjTNCkZFlUn0Uf9M+4BXETypqMQl4dCkxTOMQPS44YnBU6zXPjn7Cjz/8kNe++DK/8rUv86Uv3CEclOQYxkVOUVqay45n58d8+6++g6Pl5OKCq/Mlucr54r0v8I9/9df48huvYyd56iEEYsSFFrde4123SXyTKejnJWxplPqM7VPz1Gfhus/S6UVNhuSdOCTuMUacC0TnxWg6oYCCaKWKQ0od6Q/azcT+f3qoreRfJ9EbnTaY4DwxWkF30ibuWxH9IETKoqAsRpi+VI9QBEWefdMbstnUYxoaWf8y7YrNZq03G3AvKJAoJM55ORiFwwTERDVAeODIAWiVxvW9R8N0fB4YkgINazb9mCHQObdJuIn0Pmw9pbOuqwHV7cdPGmwjzokgQ+ccSmnK0Zjd/WsUox2ii6wvz8iITHZLQt1Rr5Y0dUXrO3ZHBadn53x8tWRnOmcyHrG7s0tR5pRlwcHOHLsXeH51QVhWSfhBJPC11Qk8C4zHE0Lb8e6773J1csHBbIfrBwfszne59+oe+ajkqmqkvy8vgTXTyYjJtUNyE3h+eUY5y7hYn3H87hMUlrKoeff+O9x62dJVGbzqJZGy0CrwQeODJbQKVYNxCotYRSivsN5iKQBRZdTWcv36HaIqWNeCDK/qhmXdsRcU2WjEznwMQTj5bdsSY0dwcH51SdfW3Dg0WFtwcnZBVdfosgQysmyMsRnrak1TNagQGZUFVVOJWEKQudQmYzyZcOfWbVbLFWcnp0KdjNA0LV3XkYlGMlJ5tVR1RTQRm1lBOPE09Yrl1ZIsg6uZ5fp8DlhMkVH5irYLPDk9Yadpmd0+5vs/fMSTJ0+5+8XXWYTAl7/4CpM8w5oe3YxDD1aAQapeOBoSuIaQVONC4PGTC0x2yPWDLxGXgdfuaR7f/4AHH7xJVhaM7YRMKYK1xBA5f/yE6XTOf/uv/iv2RlOe6XNevrvP0fEVmZny6PmZ9PkZEfboXMBaQ9u1jEc5N25c5+TkmMVCDFad8/gQKUYlvmtwrsX7QJYprIlE33HzxnVOjp5x995LBAzv3P+YdSV2C/PpLioaTk6OOTjYwWYG5zqOjh5x/fCAxeUxO5MJ66sM79Y0zYrcRhH+gQQOROIA3/e0b9kmdqal7AGJekcMuKZGuQarA9HVWFMKlSv4XkyXhKEO87CBNzW9z5FBAu+np2c8uv8JH3z4Dj+8/zc8ebRkce5RUTEaaazJuHG4myq0NaenlzT2GvuvvM5XXn+dL79ym1mhUa7m9OQJ58/f5W++94ccn59zfH7GxcWSalmJkFcnvSLT8YRcazCG2WzCdDyhzAsKq1EW8BVGOdarcxZdFD9TZdid7vCfcuL+pQAAIABJREFU/7Pf4g+PGv7yJ98fgDTZ4iXA789g6TvqQecXAdz+Y1+Z6oNWoifTitm4JFJiVBTAg9TPnBB831P7opx5/dEox9cW4jfs4aQzU6dgOfEb4mZepTLfq9nFIa5SmiQM0Sd6IoqRZ/kAbvdVsq7rhnhpGxTvq1h9PGWtpciLIcHpuk6+T3ICVAqL9Fh7Iq2WRCa4dJbZkPCGmJgDwh7YRHlSNfFBGt18iEPi07YtTVNhMs1oMiLPC8rRaHgPVvdh74sghELGJURJDiMRvJPhI+ATZY7UDxicVEUCcfAvc2l8+lRtG1SWYktqBRkE0BgEx9JApmsZUpkXigf9GuiNv4cYSWuiCkM01f8uw++T1lJap4nEnKLs4aXDp9cUQqzqq6efBhC2Y+Mei96OW+LWeyKB7H1y71XPwUkXhhQb+vtIaYUJAa0yjO697ZJIiZfetYgnM5KAoYL03yPiJVYbUco2Co8fijo+XVOIMk9CfUwgdJJzUVEUwn2raVYNeJ9qgANCiEJUort6RdNe0rqKEDpI9xCk+wZPNFF8/oxJIk8C+IvK9xb1/HPG+NOPn46kDbkZQ1INHDacfumpDfe6T+K0Nmx6vVJwKneFJHVsVHm2S9Tb3OoXEji2S71p3Hkx2RvocX2ZO33+Qlm5R7D6dRo3IhoqCj3Sd47MWm4cHtKhaFYLrqoV1+YT7LggWFlkVmcDEqES4iKVGhmjzUEgSNumuic3QdOJMzxRU9cVDx8/5vruhNs3rzPf22WkJ9goXiqLasn9Tz7iydkj1vWC1brBNYGcnOcPnzExlv1Jyc0v3hMz4pQYtV1Du1wRnIMY0F7ok+lShvnt//28/Fe+LjQO2eUZ+reU3B+bhA1JjpyT1xIuPqnsLF5T3osPjOmzlq1HX517AQ1le6NJaFL63guKkoMXxwa5M17GzyhJzFyiPigFZVFSWEvnJEAxSuGjiKm0Sf3yxRGSDVQNG8Nms+5HoL8HSFLBSgs10mYireto5M5JBuvBddILErasNAbkeOs1ojxXb8gdScakmZWKlYKoIs5LYGBsEi4ZDhyZO+d6+4VEnzGWpm3QHtqmkeQeObSiA2MzdnenGAxXyqM7OJjOaRZLjqtF4o5HduYznh0dcXx8zPJyiVGKnemYl1++y/RgTuYd87xkevsWqu7onOd5veJZlzZ1LZYZretYr9c8PzpmfbkkVB3rqyVGWbJ8jJmUnF9dErxjOh6RWUtRjpjN5vjouDktuffaHR49+IiLx+f4rubq8gRtc9xHLbPpPsGvILYo1UBsCbGg7RrcuiI2kVCvUaEV9b/oyYLHOPEZVBYa14DOyIspNi9Q1mCLAmUKos4pJoZyfw9Lzr27r1BVFcpAkWX85Xe+zccffkjrO6KOuK5hubqCzrFqG5pWpI7rrsMYQ55bptMJl8srqmpN1BaTjSiLEUoZ8nLMeDzn/GLJ1Up6q9pOKEcxShBXlAVZbggqo3OeEB1i4+Hx3RrXrljWnrPTjN1bNzEqJ88t61Ugtxleay5XLR988DGnJ1eU812u3X2Vy7rjj//irzmYjPiVX/g6mc2GgNUDXqnUXxoRuZJOVqHW1LWjLMbc+8LPUYyu81//7K8zPTjkzW/9Cf/n//Y/c3b6gPLaLl/7yn/Gr3z15xnla/7ozT/mj/7iHd544za/+Y+/yrf+8P+iae7z2msFysJ07xpOnfLxx+c0nWdcWorC4FqN6xr29vY4PDzko4/u07YteV6A0jRVhWo7FBnRu+EOb5uWs7OGH//wXb76s1/hxz96B6cM167tc7VYslp6RuMxhZ3QdZ6imBBCTQiO87NjFotTTo+f0iwXGCweTduJQmHrAj4FvcI9NoTWIwr+BoXYbvhEr9JqA6RppZmNCohO9gPvUm/0i0Fuv285Ug9Jr6rrA+vVmnc/fMAP33uXd959n2dPnrFcLcEaMp2zszsleWfQ1g0X1RMmkzGnZ88xmeIf/NN/zvjwFZbnJ/z5W29x/ugD1hfPqS+OWa8u6XxL6zta52hijiMTGh+wu7PDwd4ermu5aGpRTnQBR4dykI0zQhep6pqmc/guELRG5yWLxtGh2b95G2MzaNuhl6zv6+pZG4L4G7RO53PKkbaV8SS+MOl7YvuSZwbnGqEoe+lF1QmIBogh0HabeEjmRA3793Ys03+uEni2UY/WWJuC3aFaJyvPJuGGGLdjozCA2BGP93qj/Jfioj5p6z0/t4NLpTbUyE3MJP31IQRc19G00tPZpeuIWqWKhvTtSa91D9JKIuK8I8XUaCOJhojTafGhU1s+tVFe01ixzmnbFhcdo3GkHE2weRxonRu2y4Z22rZdX89LKp+iPq6jaBSE1NcYkTkejKWjemFOejbVZ6iE/XkbGeLTIakjqWLH+EJ8opT+1C33qaQpSFVO9/GA2lRUg96wcqSVxw/Fjz417JO72L/u51V6tr7Wr7FPv9+h1YheTqVn2QRc3Pi/ua6jCS7tO2n8VP+u4gtvT2upnpVEbGs2tGOPVNkSu4vCCjCrIsH27VSS0G+qa/Sh5fBXpR5wlcRQhDquht6z6DVdHWnXlegt0FsBpCnxitgGuvqStj2ncWsx047bsV1/PVEKAMaitJxa4unXgz5J8CXRNf9Tj5+KpC0VpZIYyEb6FhIWEjZvHHhhs9hsPH21LDVoDjcln/kd+b34AmKkXvjaJtvvb+pBmhUGX7DPW7gqKccMXmF9CK6QioPzlFnO7o0bjPYO8FqzrJaEds3uuGBnlKNDg49OenO0kmbMEFPgb0QJKUqZ2fQbbSot93YFRDBZQZGPGOsRe7tjbrx6h1e/8mVMMSc0Fr+C1emKtmqp2xrXtphVxEVHVztcGyhUwfNwxPff/j4v3ThkcrhDfm1GIbUbtNKMbYHVBq+0+H8YI/S3lILIuMphFBIFhHSgqbQAYr8d+5g247QuglAQskRnkDHoDxGhSah0UwbEVk1uhq1eAhhkkD/Ny5aNKnHw1dYhpT5brZWfj7jOyVjLahVj0SIfNsssyxkVJXWMOOfoGodrW5FFV71RuPz2RuxGIQfWdv9Dfz+EARPrAQFBhxLaFhWuC2Q2p1EpWE5qbDpAZlRqtE6KnFqEMfpkVfotU9OkkgRMaY1Kh79QT+XwEqUzhVYWpXpKjgQwKEPn5FDPcumJCyl5hL5xndQXKhWK2XSMd9L/WRY5eT5mb2dKaxXLy1N8iEz2dzk8POCd995HKYPr5HdNhLHN0K6jqdes4oKzasXBZMZsvsfLN29x+/AGphMVv5ju4bOzM+q6kUPEeVzraOqGru1YNq1Q3YjEAEVmKbQYFD88es7BtSkxRPJyyt3bu1ycPRVqBgAV68VTLs8+ZnlxjKZiNldMishkBpYlE2vIuCTkK4y6hO6YzF8ytjVKrWj9FagxeV4SoyUERV4W2CLHBUVUOS5WXK0ucU3g9p1bnC/OGI/H3Lh1HVNYogWdKVCBvLR434Ivhv2s6xyjckRuDc1aRDPm8xnteUvddXQukpkcYyzL5Zr5fJcsL3HLFaDJywld1+JcIw3eOrCqlmR5RlSBuqsphoO8xfua1sFiVXP0/DkuCyxdS+NbjM7wURTaqtUKHSOvfOE1RtM5e3tztDF8/PEDrk7/mK+98RXu3rsj92wSexiCDgo0iV7kI0UxJkSY719jujtHFTlVCIx3NW/8wj3efOtDYt7yC7/yDX7pl/4Rl8/e4SvNE5pizKywXJ6+z40DePboCS/fvc6jx1ecnVTkNvC1n3mDx58sOD56hLKW5XJBbg1npye8/8H77O3tY4xhva5QRjMajxELSovCMZ+NCa4j+JrlVcN7735MbsecXl6hixFeKeq6xXuo6pr9Wzexec5yccpyfYExjsWF5aMPPuDhxw9p6harLa6DYlxI9VGPiORyP0dPDJqLi0vKUjOe79IfjkoPhfl0ZiryPCOzKbBQst/GAfEZIK7hU9Pj0xGaquOtv/4e3/3e9/n+j37EoqqxNmNSjNmdTIl2RFF4bhxOGBeW6WjKs8ePef+9H7FcXNA1NU1V8sPv/jnn63/P2dm5nE+rCqsMk8ySMQYHJZoSw2WIeC0Bmusci6srMXD2HrwEtF3biXpiYQmd+FVdrSrQGV3scFhqFyiNIVpNXhbodI6H6Ieka2AWSOQwrD75fjpHtBqSNtmz/QBWxkBiWji6psN1LSpGMpsPla6gwLuA12rT48/mvFTpjJIgVqWeM7nfeiNra1OVLCVCfcXMd50keFEhVkEe54S6aGyipg3soxdjnJ76519oJUnJhdH4kL4XpadZqjMS4BtjhnRMKVK/mpwxMVn4GFIc15d8gtzLWolac9N1oBUdyUbJtWTa0IaGoSeJ/jrdEDM2dU3TNkIXNwaf+b5YkpJlvZH+VwxUQrX1p+/f7xMq75JwBhqjNFptVMn7efK9WVcfY5otZeohi5Dk3PXMlF6AZkC345BcbgMB2/HIp3Kd4fUkjNkk87hNgtbbRGzHsZ+OaeU/pAIJL369v7qt39EpxgopWSP2sfWnCiXpantPtE0lWd5f30MZY8R5T3TCl4pRYwuDMpK2ExUhOhGbS9XN6LW07yihfIcYcZ6kki2sq5CKQNKPadHKDxh5WqGoCNErusbR1GsI3bDWe1VXAoS6pasu8W6BwrEtGdOL9RAh6IDBCOtKGeEh9EWfmIovPgqI83dB8h/E10XK0/J57+ehlAxij+XHKJvoUKamD3jNJsMPekAYtlGB7eQLtiRR0022XZXblIRfRLS0UqA3nm99s+ym8paeXKJTREAlJlNhCepjauTXQJbnRDPHxwnlSPp6XOVQztEFjwo+BdNIk2cgqbyl5R4kofM+ghfaRgiCXLVtS1dX5EFhY87b7/yQ//AHv8uoDhzu7rGMjvtPHnL05AjVpOBeA3h8Bypago6s1hXvf/ABf/XmHnsvXeP1+VcpclEg09qQRVmIVlmIMSG8vapSz6uH/gbd2o76wUqJCAOPvE+IgveS1AVRJzNakCKrDb5XB0q9WJLapNL0UIxSPdN2mMN+4+0f/dc+7/HiRrbpldRmy+MtOahba/ExkGcZ49GYiyjVpbpupRk7qXjhJNAJW8jYgM4J7sIAC8mWRw9JSRVYKoySUBmatuP84gJfr9DKJIPeSNdUZJkiS2pNQtvoqY6JepM41T23v1el7G/EHrntE+Smc7RtR1kUqF6JK4EFMUJd19R1TV6MsJklL0RMRLtIlok6ZZ5ZXOjootDDgouUtiAfl9A1nJ8eUxjDzevXRDRgPqcscozSeBfIs369aXamU8ZZTpnn3JjN+fivv8sOluNVTVhNyYuSSZ4n5LUjasPR0XNJtH2UAM5G1qsV9boim07ZnRfkVpFNdnn++BOuzo6pvWU232N/Z8rFyQl1VXMw2adewuhwSp5ZLq8WEGqeP/kJJ0dnTHLDz339y0xnGaU2zMZLYsj4+s9eI94z3CoNKjzhS/fG3N39IrcOMy6uPmGxGNO2IrpjM4vJxPg8YlmuO6a7e3zzN3+Z5cUCHQxPnz+hauGDBx/QBYcyQsNtXUuRaJpBBYyG4B1ZYSkyS9c2jMuS+WxCNIqZm9KenaM0rFcLQoxcLZZkxYRiPENdLiFG8lFOZE1QMC41eZHROodrYzJVF1uHal2J36SKKJtzWXeE58e0oeKqWuGNY2//OlrNmYxyRpnlxv6Ys+fP+KvvfJtf+dVfZjIZc/3GdT5+5yeYGKmqNVerFbXvyIqcMlMYBa6TvsnMZChtQXfY0vHSvTmZrXl6ckJoCp7/4C8Zdy2sI+ujM77zO79DPD7iK6/fZF45/sHXfp4PH3zMH/3Ot/j1v/9NvvyFr/M3P3yb0hzy5PSSthkzUrvM8zntyHNy8Zibh9cJriPPMp48fkJZFkIHS5QubTJ825Epy3Q8oShyKHOiz1hfeNbryE/eeYgpcvauz+iix7mIc54sC4n6DeW4oG5F5bFtOu6/9xHPn50QVcl4soPNI9GsiRqu3bhHUe4lObtIcJG6alEYxvP+fIrDjrxVM2HTaL/9tZ7snoClrXNa94iZ7Lw8Oj3lw6dP0OMx16Y7zGzB9dkuewd3yA9u8tLLU3bninGhKU3J9998i3r5lM5XPHsa+eTBJednp0RzhYkO1daMXUumNSMmODQrH6l9RhsswaeKdhLgaaqatmqIMTApx9RtRwxtOnM8DZ6qrgmto/OG4CMhdjjvsSaiVEfXrTBGYa1JwWVSvhgSMVLfJ0QSfS+NyTYQ90Jyg7Q3NJ2wIZwPuNSLZhCANgQJVFFQZDlKG0Z5RuccmVaD8IfNMmwSizJGwLK2azFaABCI4lMaRKDIKA1KY/I8xS1edChjAj6TJ9bA4LGavCiGQLsHM7dpctvv0XWO1nWyerROFNK+jYLhLBH/swTnapVAA0l+dN9TF8T2Jx3UqKg4OTnh7PiYvWv7jOeTIVkJPqCiobf2+XSiSQS9de0u0Rj7gDqkuVRqm+qZ1nyqgA2xS/pcp1hlSOhS1WyYb8ngh/8PbK6+zztEEcPoS4hx09efMNhhbLXW6d76bPVlaB3qWzj0Vh9Zn+Cl31PDW+ppi+muH4oTfbihtl9gWOv9l7crh5vxkt8Xi65NQtlXf9XWtQ+xVwKIZIziZ56rnwep1nkMGbnRkkyFjTCa0kq8IJNlUUyKeRuZDwGsvQsE5L4yWouCdbqudPWyClXqgUx5R+cCdXVJ6y8IHKDIBUghQudxyxWuugK/REWR/Edt7KR8WnPamqSGqTBRY6NUvE3/N0re4o2W3tb/xOOnImnbTOQm6ZE8JVERQpTJiDFl3H74+U1S1S/+zULcRiS2H4PUf8/X7j1Thk1WfWpxbtCn3hhZ0CwrVQPnthZ+3HqdVN3wqbFYSmZYY8mMoWsaqtbRBFF6cyribMR06X1vjUdfeZEiohfp0yg3qdFWNp0oiARJIGA8KljpQGkNd75wh+r8GX/y3b/g7L2HFB10MeBU3xRssUbkvyGioqXMZwQTcXgulwve/P7bXPoVX/z4Pq8c3GAyH9POC2IbJOBPCIWLajA5V8kLpAdyoU+ESLuj3szhNiqk5GZzCblTSRZZNkOhpYRkwN4XifogCTYqljKbapiZT286w2YZ4sYAmn7MeWFT6ytpw59+TpL6T/QerPgxVXXFer3CN5K0ZlaUQvuaTP/a2x8FpNhUjPvets/Zr4XDraV/zztR3fKdwxCwNqeu1rz//nuUO2PauhafmULhW0/rnTytOH4LmJfOSEna0npKG29MyCjo1GwtPQvG9kqUOgUcCtd11HXNfGeETmOTmQyCF3RLK4rc0naOWNVcnp2wXqyZj+cUFkYmiKJhOsBXqzUXywUql0qRNQZrMqFqRk9WWL75K7+EWyzI2pbrB3v8zBtf4uRqwbrIWK6lunPz5k2y2ZRF5zg6OsYYiyMkXrwYt66rFTvTCbNRwXxW4uyYdVHw/PKCB0/O2b/3GoczaRpeXl5x1S3JzZrJpMD7NYVaYIuF0CCrSBEtB/MR+dhxcfycsytPm08YWYszFaFznDxeocqM3bGiay55/PgD/OQQpW7JmiJK5URFkWtXOatVxff/5m26qhPZ8bZld/+A6WzGhx9+BOnIklWU0EMVCK5Fq4h3LXXlya1lb3dOnmVcLi6FBZBLha2uPV3TsK7WVI+f4nykbgPjUYmxJSaHzGqsCRidkVmx4DC2pCgmaG0JvqYoRrRZR9fCed2yPDnFuzV1U5ONNUtdMJ9NKDOLb9dYYKcY8ae//ztcnT7mG7/486jgePnebQo0b373Ozx7+ozzxRWz+YRcVRilaBsHISOzpdgR6Jp7rx/QuX2cPsPmJdFNqfU5l1fn3Lr2KpcTw/ff/T6Xlw9Znv89vvPW25zUii+8+lX+/jf+Cd/+izcpRo6nTzs+ebTifGEoJrvsTF/GesfObMK9eIt1fcHl+TltVxNjZLFYYPMMF6QFXdHR+o7gHJPxiBgd0/mEcT7nHEW7bqkbmM+mVE7hokQfITjarmVdL8mswuaa6zeucXl+hsKyv7cPMeP4/AJjMub7N+lYcn51yf7hPcrRPhENOExWcHjrBlpHQbMig9E2ad/eJGxavCWVko9I3aHfE+MWtSsQhRGixDOzzC035zu8vHeNJnrKsuSNL36JV+7eZjrfx4wm7O7lBN8yKSzKa559/DE3D65xvjjBdU8JYYkOntwF6UNxkagsTeeowxqHoo2aLkolyuqI8iRJcSuqh15AqappaU9Oh7PbuZpOR7rOMytn5HZMF1co16Jti441zfo5obvAmkiWWbFLCUm9+oX+93R3KYYKnFIK3dsc0AtC9ECwpMhN24nRMSqBb4bWd5gsoyhHGJtobFmB1oY8z4lVRZ7njMfjIQHp2TzGGsbjEbruYwSFHwBc/yLNUSmapiVFUiL4oMDmGWVRkBe52L40DTbZGfSxUh8vAQObaNM6EDb9e31SB0N/v0JtwE4VCUqUYLWRpCODJJAhvUfiUWoobc7F6Rknz55zfnpCjI6TM0XlWoyxjGzJfDyVoNz3lU4xqPAx4Lwji7mcmSlm62MVUAnwFjXqvsrF5q7YgLaaF+Zda0VyBmcANNTWnpvGrJfR75M2IOkwbNbGtq6CD6LQC1JxMrqnzb4YBHwmcdr6Z4hnQtwSJpHv94WF7SR1OwH/7Gsk6imb73/ez/YXMSR4WwnsC2bwvYL08Hx95S0OyWsSAx+KJBKDBLrQEb0j9nNsBVAOmcUnATpwqf+xH5Bkn5R2MIUk+l1M6sdK/AYV8f9h7s1ibcvz+67Pf1pr7eHMd6xb99Zc3e6u7nbajhO77RBwTAIKQ4SEEEg8EJEXEC88wRNSJJ4IvIQgBYkkCCFeg4jlmAzG84RbTrfdXV1dXcOdzzn3jHtYa/0nHn7/tfa+VW0bwktWqerU2WdPa63/8Bu+wxbMuTDMVCYFz/X1KWfXD1nt3KZu9ss7CYLIr5Z0y0tCe03wa2IUpFGMYYQzGyAX64rsA7kLJOuJ6BEqmWImh0SOPSH2n7+2W8e/EEmbUoxVCLYnSslZ8jAa8hCMD92bckPHQHtIvAYM+edN6sYg/aXgfMvTIudRSEEe2ySG0uJnhFl9li83fuft75/T2GHJZTHNBSa5Xq1Z9Z6uz1S6gYnFzixzq3HaoLOoGW3bFqksdak8ThCNsW48Z5WlVa+1xVaBt7/yLm+9ep90d4/v/OYTXrQXLGNLXEoQra0q8I+K7KwYEaaNcqY2Vt7fwpPnz/jDT79P+qV/wm03Y/dwB/vKPk8X56wXK3QElQRLPXKziv/GUK3aLD5DosJWRSdL4lmSdEUxQw+ywAs8MpFjkk0pKkJMEDfVmzxUkV4aQ5tUethMRpjCeL/S58YESpKi7bGTikrksFkZrYUPoZVAJhojnKnjEy7OLyCAczVVMRRVCKQ0FlPF7fGohtrQsMDl8YNfqoBtMOSCk26aCfv7e6wuI6vFuQQBfUtKMiacMagMMYQR106Zc2hJQlIuvAKGql3hR1jp8qWSLIvstUCZhTRMmQsFxpKFSzBes5SohgB2mFspiuefhtS3XL44obu6whKZuMRO05BTpluvCKGnjbBcLhj8jJxzOKto5hO60PP89Jj+8pxXdvc5unWIrQ2RhFeRdfQYV+TnYxYJ9a4jxFiCkCRqTymxWi45uCmcif3ZjBdtZl5XVDpzdXZKdDNeu9nw9r23uH37Jmb9KVcvFvTtNZqAM1dMmiX0E27svsr0zRvQex4df8zZsYgOebdkZQ1+9YLLVaT1GjNtyP0lqAnmqOH2O1+m73dQGmLwouJW1g6jHH3uCmRwwc7kgIMbN2gmc3Z2Dui6RM4WsCjsqLprNFgL82nNyneEIJ2g2PfCB/WRFGRN8H0nXf2cSDHSrlf4pAgJsnb4pAnZ0NgJfbem64MYCpNp25ZuLYbKfddBUDTVnKvQk3XEKhHpsUqhQqS9vuZwlrh1tE+7eoGmY64DeybwW//0Fzg//oh33n6Lt197gLU1RzsTLp5HJjmwkxPKRykI+CAy/zGANtQ7NeGq47vf/JAunzE7mHHzlbv82J//Gt/8J2JfcPF8xYIzPjl+yMO//wFXSZOrGbZ5wkefPqcNmuuLFR8/fEoX5kx2DpjvHWAnmoN6wjd+8uucnz3j7/29v8N6vSTEntlshrMWHyNJyfzxSTyZtMlkepppzcXlCdPbd7n/2gNOn53R9ZFkHOsQmc0n2GhJcVm4Nmt29w4gwePHj5i6mqODm+zvHND1nk+fPWbVOqgUyWR8ckznt7DVTIJfAikHqrqCAosWRb5xFR7XRPLA5x06CAJF2pSSNkUwgTUpokoYpcR3K2Tu3zzi8M//DHs3DrFNxeHNfZy1RErxNWmMadBB1rYvvPsFnn76CX/4vmdnNuP23TXtYk2/0LRtJmsHWsxvAwOMLWNSwCAFzBiKsIBKZZ0pgVfpXg3wPqMzzmmcq5i6CV0CHTIqeGqboTvn7Ml3Sf0p3q+IyZOSJybPhoKRx6KW8Hp1KebK1RzRPOrlvWXw4fQxEkPCOScCqHqTUGljx24FbCCJ27HGsI6OgX6IwrHa6qJIvKLGxG7oMqGUeEmVe0eJn5SR7pqtHFVVSXe3dJW2i9+fDfCH7llk4N2pUehGEuViIzLufQqtc4FKMlJZrFZkqxEkVRCV6gRnp+c8/OgTFheX5Jg4PT7hulvhU6CqGnaaOc0dK8b1W3FYTMVawKpx76yqirquC2SvQNdClE5rUdp8CZqopOAxxp9D10htiriDYNgQtwz3ZOiWjckLwz6ZSgyxSfg311X+31VVSaqEFpJC6f6Uzq8xA9JLYLQpbfhk430qfLaNaqkau6lKf0Zw5Icc2xBgVZSoP/+3ze8yJhCQ05aWgCS828/P41qzmTFjRrn5/uUKVc4yndc0FlBGikhaeGGGU6QxAAAgAElEQVS58NQ7BW0MAsvF4zKIYGGh3ZTChFKSPIUUyVEK0MPnpRgIMWGVAcw4/lIKXC8ueHb8MU/MbfYOYbozk05pSnSrBX59DbFFpcGSJpepJcbyLptSuIF+1RILhLuua5m3GYiivG4V5Bj+2HvzL0TSBttJE+MAGHhGOg18mJINq6HLJq8bPIeGjUYgXwW6kLd4a0qNCcmw8EkFSYZPjBsfKmNsef1mcVRKFlz5nlvB9lBtUptJn5OodKXEGBAP57ZertBVxWzf0TgnBPE2CfdEK8xEFSXQAQ6Xx05SiiVANmVYZ8hxSD42PCxNxuTE0f6cB6/f4sUk0cc1XewlqRqMemJEpYRy0jYeA3rEfNIYJXu3URhTEdeJ1dU1T9srzq5ecPFoTas1ed1RJ3ndGA0MiZqsauPEkQu4lYSUTUYrLQGDHmsv4zWldHNUccccLCJGnlRZKNQ4/1NRgSqZ/9aCtr1e5fGfl4Zj+YrC5xiuid4qt43chgGeWpI5rRQ5RlSMNEa8zFKKpK5HlapjShGUHvHPI1F8+DczTv6cB+rzZnGnVGgHPzq0wVR1KXxoUobKNrzyyj16As+ele1o2FSUIUepKomCcR7Uk8t9yqPIiDFiCK502MCC01A00SMEVWFBi7JaCJ5xOS5BYM4UnLts2M5qGhyNM1Q6UxuFU4YUPYvlgqm1WOdQMZKLKawuBqISCGRizrTe80u//MtUKfDF+6/S7Ew4W1xDNcHHSOt7ps4U2K34s6kkmHutLSlEfAiQBVals9hxaGvxcU0ionNCp8h8MmFvvsOsqbEzOFsc4/0x66s1TiuMXZLjFRrNwe4Ot3YOuVp9l37VUpsZVTOh6xPrlYdgqYKiqWbYesq68/hgWFwumCwuWV4/J0cPCBxKhrgYNRttqKoJ3l9R7TVlszcslx3O1PK8pElROiVaGSpr2G92CMsFXY50MRJ7z+X1NV3bkhE/tL5rIXhJ+IDlcompZjhX0XZh462jLdZqYh8JMaJVNfIoUizzLFlCH6mqKbP9Haa7UxobWJwsWVye0a6uIGmmjeXdN17j+qqhMhOObt7j5s0bXLRXPDp5xDd/77fR/ZK7Bze4uXPEjcMD+rbDmYa9nVvE0JNzj7GWEESEQ7uavq2I7YRqehPVRxbnLeaq5fzFOZfXlpmbUeXIk6cPuXN4ky/ef4fjdklSK84Wn/Dxow9Y+8dMDicsX7Ts7UE973CzS/Z29kjumGxbfOhpV2t251MO93YwjePk7AWYCldPObtcEHzCOMukabh565B82nO9XEBwYBxu4lh7j6lqgbp1LbZyOKXxoWW5umJ3PhGRo0qzbjuiv+Thoyf03tP7FcunLdl45nsH3Dg8EsVNhECfxv6YrDGZolZYAoxx4VPDimwoWKNxLg/Lw/DUXAJWRwlWlILa8OAr7wjf2mpyChJM+hZrhVOLmo1KbErB3uEN7r/yBp9+8pT5bAdTdZw4xaVfsehbjMokv8YqICZyEM5KDJCyJatYOjMKXRIy6yq0ESGvQc0wJ49K6+K15gTI0gcxoibiSKzPj/n4/W/hX5wRfFsSviK4NBSD2SBFJCYtIgJDEW5Y04VjsbWtbPjdxkpXTTzMZCterNYY049BvWLNgOQAKVCu2valbpfO0PkVrGT/sUa8Q22xbdneNwaOVZQJWrjNElQrrYg50fsebYrFQ0kmB4rJAJPcFiLRI0VBeHVGmy0p/U3yidoA9eRzh/2OUiAsKsWImJjsfZH14pKri1Pwnvm0QTuF0ok+StJWG01tNanLI69PKUnQlBEIvyl+qsL3Kzy/YStXqqhnDjoEJeYat+PyvXVJ1kriNio8q01Xdbvbmou64NC5U2MnSZJEkmaAJg8JnNRQHZPpFGcdGUVT1+V1GxsqrSkqnll8NMOgVDh07WKB7r7cnHhp/pafn/s7m8eH2xdDZEC1jWN5K+aVY7vBwVbSVgq7QwFju5NZbv82PUW0jpTECVrGsWsmKBOJGVLS+BSlOOgDSYloTFYZYxQTnalVxiqHUZakFEFByIkYAypFTMwoDKse4ZWmjf6CXBrxY8zKSDetb7m4POORfsQ6Wu64u+xMpqJ43a0JscVamE5qLIGKDCnjjCiLpyiqlSEXUZYQyCHg21W5TCJS11iHrutRofePOv4FSdo2BM4hgVOojeTrcJMBnROKhFJiZOx9FNU7oxjEFHJKomC2NZEGDCxsVcIoWXbe+MwMPiRSRSleF2XjCtFTqEAlmSrwkPIdC2MIpcTYT4k4FiEpgtJSjcwSDs1dzeF8RpcTuVvSdx6SYrH2aGWoXWSihSiZlcAVpdMEZF0mpSdnqQQGH7DaiuN76Im+pfYZg6azK6IOxOsldVvj45yoWrTxBT9eeCDKiuBE6eTF7PFpDcaJtGnoyaFnN4F1u2S7RnNBnXbpoyErR1QZq7zwy0oSF7Ncm0jpLlGgN1nJdc8eYzXaWpmvfqgGD/mW+HTEnLA545UE3S5q6iyKO8qKzLGNoLx4dUWtiSQ0FkryK4pcZWxpgdvlHEs9mVI1LDllTkJyLUlU0nlr8Q4kMj5nuhjoQxDYR+jZtY4b9Q77+5lGZ3LyXK1bji+vCRmUdXhpQKKzQiWPSqoEvzJ+VcrolFADZFNLh84AVhlU0oTsyTmgMbSpSPmbzDoE9nJFCBldZVaLC168eI7BQpSF0CgjnZsQhUeZOyZ2ig49OgdUDIgnXiLkTNRSwEAZUhDzbeXMWG0cgoCkIn1siTlgqxpdFFBhs8grIPuATol5XbE/neCw+L5l5YNsYabiaH6IqhP+aonTE3TSOGVRJBpbU+uakDKVnRL9iuvUcndvl+Bn5P6ISVZM7BJnIsomUvJiTt111NqikiDZY1aitNoFjFcsOs15rlmbNcFAUFIYSa1n1Wa6vuXi6gc8ffJ72JxozJw6VNIRthqvAg+P3+fFpyu+9qOv8Ma9G8RgYQZhmTDLCfiaeVzjK01vYHZ4h9V6ydHOAfOdI5zbwXBBZQ64vlyjfCY3hnUfaPamtMqwWj3i/PwUU1l86FksFjI+UqZC+J99EkGDddfhzBzMBF3XXF0cU9eOhY+SJDfCkamNJuoGWzWEZPBZEnHftzidUTmik4UQ8UWprXKGpqloJk3xapNApl0brkKPz5FbD97i1v3XmVaahx98n8X3e4g9pIQOC27tTLH9DHSFuXGPw4PX+al3XufDR9/iV3/11/FBY5Lm5LxnqRrWsyP07BbVq6+DjUwmNSlWGGpcamkmlsoaTPTMG8+suuCj7/0Of//nfwVnLXuHRxztTnjw5lucnVXYMOX+nfvcOMn8pX/7Z7m+fsS3fvMfc3zd8fgioFWDCS1p5TnYm7NYRZ4/hh88O0PNdtiLlrmBsLpiujdn70BsHZZXHf6sBzuh3rmBm0z4+NNnOGfY23mVPtUEc05OL7h55ybX12v8uiPEnt29PSbTCavFgtOTa2y+ybSacXZ2yXqe6cI552tPjA3BZrQK+NBxsD/jX/mJ97B5XfYuQ9SS+JtSQFOI8uYYMiot/Onh98EiIAdRWMtSDB1SuJwVHikkGmXwoae2dQloZW9NKYgc9vKCqxfHLLsFR7fucPPOm0BFtmWBTyIOcHF+xvJ8zSrA8irSdz1kiCnT94EuFWPilMveB5T9W2sJ/rQxTGcz7rzyCikl2ranbVtijHRdIAZN9BGjwE0snff0eDotnYp5qPhnv/VtVBtxzmCcou1WkEMRbBHIZCoq+iF4jKoE3jSE4EY6k6rEEkY2Uwbke9Zy/brOA0NSBL7vaGNfEiWDthalMglJaHxS5KgLB00XmFbxvMoKZywCfS2xVC4+qSVwttYRY0KXgDwrkdtXShFioKod3veoHArU0OKqhkFCJBWLA20scdhHGewJ5NqXjLXA/OQ6hBgKd06em6IGVZONptciLIEVqFjwHq1k7FgyDVc0+ZyYPDO3x3y+w65rADWqb3J9QmMdVon4lrEOjCLpSCIIxJZYitQlsTSMiuTGGYk/khIYW4noRFG5JKwlcScnEUfRuiARPNY60DIGa2eLN1se+nZorXAWlE54H8kpkKJBG40xUlBJRmKerEGZJIJSWSE9F12QLhajZa4oCyqloppd1MTF1g9rxa82hp5R9bTETClJ8io+cKoUlTcd5E03mTGhSJLLIlSIoWAwiKDJ9xsKGhSFzSFo0nbo7KWx+L5BQL1cQAc1QomVAqMtIcLJxQqVw8baKIZSJB84iWp4OUlFtJK4TQ/0mSGOzJI/DII4MUoSnrUia4fOYLTDq0Ryhk6JcFwdFLmNXLcr/JMnJK+w9+4TouVRuOJp94yFCiQTBMZONXYKNyKBilldk7UiDmJwJFwpehttaOqa6aQRRMQfc/yJSZtS6n8C/jJwnHN+rzz2XwH/MXBSnvZf5px/vvztvwD+KlLi+89yzv/wT/qM4dhGgeWC/8vol/lGRY9clZuulCQFMEg4SEfGIN22bdjjZztjw+CQro0EZgMsbbsPJK1qqSRRVBNTSoXHJBMv54wKQxWu3KgyYSndqoh0f6xWOJXRObI7ndBUE64vOqJv8WGJT4l5M8E6LTzylCUxjRkvqhuliyRBtUiWsqnIJMH16lg6hg7W3Qq/6jDeoFNFwpOVHwUoBny1SsXoL+fCqWjxoSPn4rWy7rixt8cbb32FV9/b49e+9Qt88vEKlRyp+L9ocgHU6LG+O95jpchFGFaqiYmikfXZ0QClMpmH1ylF1hIo5BQgKVSUrpXck0yOCV26m32MW2+rPvfu48N5GDvynFgSda1ksdTDdygviCqOC7MkK0aIpmaAEWSSD4RVRzWt0c7Rx4izljYE+f6FgzjUdoZkv5QexNetWF/koVJFKgtUEjNqq9GhdLysRelMTIKVdqamazuWq0u6vqPvOs5Pn6OzQaNxxfck9h3OatpujV5afE7EroMQRCq7XKihSw2QgpD2TeVKFXPgOmRCDKxWS/q+L/DKCoUWL5skG14uqmIhBNbLFb73TJoaU08xk4ZmMsV7z+nlisX1khASRzdy4chJ8pj7Dt1pZrqmD1BPdnnw7lvcvn/E5XKPh48c1jrm7jlxfSEwmZxFxTNJVynESEIRUqb3kb4PMn7slGufaWOg8x19lPudQiThBJLqFHgvBrK9YnFyhU8L5vdqQm257npOF9esU8X+/AiVMpeVnCvOYbXDeU+ygWwVipqZg1xZmnqOrXZxVrxojKnQWZOyoguB3HZcxpaMIviekALaaJw1pFTI0EMlFYgKGlczmc2ZVZAjzPuEtdDoCN5C6LHWcufuPc4ur+n6yI3Dm5ycXdP2aSR/j9FqlgLXtK6wRW55gD2NHA2VMM7Qh0zVNJiqIeRItgZTVzRhyv6s5uhwV7zlbtzi9HrJIgQm85vcvH2Pu28ecnh0h5PvvU9lKzwzLq7PWWI5O1vycf+ci+UJBwcH3L79FpW1hKypQs/UWKZkTk9PuDr5Q559+jGJVzi6MeELbx/xnW/9Aa/fq3j93pt03Yz59HV+5mf/JZpZ4Ld+7Rd4cXzK0sxZKo2lp7tYMMlTXpzWHLxyn8Pb7/Jk9ZDDu8c8f/Ftphr6vsX0NXdefZ24iJw+/IiJtUx2d9nbOeLg5gEYxf58QmCHTx4+ZTqBxlZ8/etf4/3vfMD5yQX1wQGrtuXi6pr1asn+fIZ1NXfu3uPyesWzFy8ISuNcQ50bdmcOcstysWKmOuaziFJryOIHptVG5Gt7JVQv/VYCLVfgejGjdAYihkROHrRBKUtM0KfM9WrJ4uqClBK1rWiaCUcH+6gskOZnjz7m+uI5J8ePubw8572vfI2j26+Ujoesb8+Pn/KP/8kv8r3vf4ezszO6lLler4CCXMjybwgb7s8IhUuy56Uke3JVVeKZ1YswSdcuWS6WdF1H14r/okGhrSnFWimIxbEwAxenC6IPTCYTfAz0PaA11sq10oV7pY0hJVWg7iIW5KPItotXoXRatgU5hMMkwW9KSdTrdOHpJlFE/qyaoTaKrGwpTlvZp4s8+IA4MggP2TpDbY2YovtigaGGOEbutyn7dNZl71C5wN0VrnLURhMIIuozjo8NLHL4OfDkInlTFE8lodnqXmx3fJRSWOVIg5ecUQIRMxrjHBaF0warFe3VJXW65sFRg9ZTrK1QucOvz2lXLetVKwJle3s0e4cI119goD4kQvZoK6bwMRX4ZELk+zOQIyoOYnIl0dVSsFCZkfO54WJ9RqBnzFpSKeQm0GIoPxQphcsoBTRrjSSVaFI0hcoh3zlGjy30FKUlGREYrR+RC8aUPcz3ImZE4cTlAX3G4ARU4tpBqboUYrYaGHJ/1KbTNcynLQiwdM40KFcKO3KuSufxeg2mKyK2J2lU0klUQUuBaPufkReXXkYQDQvTCL1WYpDd9Z62jQzm8Z+lsMgtkAApk4jGbPi3CfSg9FheprdERgakUy6Jn0hOBDkXrVFa0Go6RFKXRBG9bTl+ekLbQlJ7PL845bK/pMsBnwI6ShNF5eEcRMkUrTBJo53FGSfzzBkaa7FWkktrDVVVY+3nDey3j/83nba/C/xN4H/+zOP/Xc75v9l+QCn1JeDfA74MvAL8I6XUu/mHkcs+c7y0deTCiBraryVgV6XCNLSwh0PrgfQaJSJhaN+W6ttWiz8U7PMwUFJKooLHwLvatGw37V8JzkUYoHRetpK+lDYJ4agqmSNJeZKOoAMJj4+ZoETBLQZPu14SVcS4GTdvH2A0rJcXhO6KrHLZDGTBMAaGtvFL10kxKkYprUllI1Ra43MgKkXCcHp6wfnZufBMsvBABskvNUAlxpb2y5NpuHZaa5JWGK147cF9vvHnfpRny+/y5NH7JZmUxUuVzX9IvD93DI9ntu7xD3taWUzU8B3K3RkrJ8NCIIl3SpGUrMBd1ctjZFjIRi7bDx2EG4iLKuc/TO6hQEBJpLZb+5TrLd5R0IbA5WrJ1fkLri7AVoqgDX3KZKXH7uzAKlHbC+TwuaXaKuWa4YKUxU4J1yiXlv9gXOq9x3uPbWbMZlPMtAISb779LstFy//9W79LbD2994RBJjknamvpVivariNpVUxCezHDDmGEu6SSDOSYSoFgcx03XkCJ1XLNarUmI9AUYzdePwMcRJJ5gzIVtpmhXEMIgXrasLO3z3K54PzyKeeXVwKzUbnAR8CYjLYJZaWYkbPi3v173Hv9TaaHM9Rkj+nBXU7OTrl4AiaYUtnStH0n3XOlhMdRNrCYxKen6z07VV0Mq4udgxbYbl98gZQyzCd77Ogj1Kqlv/Bcna5QVWbma1Q1JWrH8fqSb31ywY0dzSRH3EFFjFNCFKL/TbOHNZFOW3yomJRgWOmaFC0oB9oyme2ICmAxEQ0psmpX+OCZzfeZz2csV0s0G2lvGZaFd4gCLR56Slcc7B0y2TvEmIQKa9ZXLwjdmna55NMnz5nPd6mbhnXbg9bsHezTv7gQzH5RCtVG/AHrpsIVLooo9oqKat97rElMp1PC9YqT509wkynT2nH+4pjl4or9ac2rDx7w4I038DmjKsfu4QEvQg/9kt3GsH/ziPjWm3TPHuMXS67bBat2TeUaJtWU6ODq9II2r0nTPTq1pJ4kcrdmHRJZG1L3gqfPnvO9D5/x41/6c7zzuqVbfMBEAWuDnla0neH7D5/x8dlv8dWvvMbZdeDozn2uz8/Z37XMKsNBEFju+dU58zuv8Y0//6/xwv8Kv/2bv890b4arAueXLTrscXrmYNnhKsdkYqmmionRzCYN7t59pnYXYzKLy2OOj4+pbx7w6aPHNLVlf3efVlmWqyUherRyTHeOWPaRV169y61Fy/rhpxjg1buvcGf3ENW2PH38Cac93Mi1EBi1FM0iYEuhc4Aq/fD1T34Y65gfHICzm/VbRekcRE+MCtyUy+WKX/qVX+fhD94XXqE2vPH6W/yb//pfoioS7+KdZmnchJVZYCwoIhlfIGqW87MTPn74ISfnJyzXK5RzEpnEDSrGmtKF2ULJDPut1qYkCPLY9fUlIXhSDHRdN3JslVKELLCl2jrhdcYC9WOQSBcCeUqwXK5K4CnjOmcl6rd1RQyRlCBGUflEyZxwRfQjp0zTNCQfRDAleVmz9SbxGb5/SqIGOsD3RGSk7GumCCKkQAx94b5BipJYKF0ENFIiqWKtk/VoYLzNR6N01z6/7Qn6QmtFXVVURt7POSdJzhjQ5xGBBMI3V0qS1qqqxB+t+HQOsP+BPJW39nqVM7oE4ahhdwVrMoRI7j1dXPHs0WP602NU9DjnaCrFznyH/YNDlm4BShF9QFtNYoi78ihGqHLhULO5BoPq88CPphSYNoqfpXhbrpdCFWP5rVhJlRhQbwgTWqkiGFX2tjx8Zkm4rClw2Syb2JaezZAAx+IHmEbvxJI4yrYzcrBVKHYCORVq0CaWGhJpow3ZUWy0GOPXIZ7dTqCG77mdlMMQ0yZJhlFCo1GF+lMWkmE6Dj5wulBSRAtmiA82RfGSF0uyNI7Pzftt4m5JIENIkniprY7aVkF/cwwl9RLjFT7SIM+/KY+XfxVgNgBVXeIxpzU5FnP0JJ1ZFQJd5/G9QFFDt2TZZlp/yeWzY9arjr4PrP2azIoY+yKIRykoSSGn6iuqaUM1adBGE7SiI9HHocFj6EN8aY37YcefmLTlnH9ZKfX6n/S8cvxbwP+Wc+6Aj5RS3wd+AviNP/GVn00Y2EyQUYbWWIxxI3wvR2kPy0TZ7lYMVQU5+Y1nmhzbE3DA3MqgYCSp5iyvUwNWPWVBHpSAaLPgCg9ONoCNFGnKEZ89kUjMgZAjOiaydQy9Jp0T7XLJsr/CTWbcf+Uue/tzri9WokKTKMIbkWwdxrhN50OyohKcqdIlG6qOwndSKtLlxNWy58MfPOT0+JTkWzEPLHBUjSwO0qrfQEO3F/thIocQ6LqWbu3wfYs1cLC/y3Ras7pajRWP0Q279KRHc+yt9/rMzf9MxXcYEgOc7uXETskKIB81JjhqfJfhvmutt+T+1SZx+6OSts3FHX/LY9NiWMjkgo1Jckbgn0p4gklnPIll6LlsW9YpYXqDriqCM2L0KPXXcu9k7KmhKlU+b1hghwRRHi+dvbL5GG2K7PP2ucuY6fqOqjY084Zbd+/x2tWS7/7B+1x1Z2jAGmRRUrKJpAJhSYgKZvKe2HuxpsiIIStSBRa/OcU2R1Q2C4XWlqaZMJvNaLuVKCklqZSNmwFIQmErdD0F2+OxrH3g+rLDp6V0tnBoW1NZw3w+xzkjvBTd0JnIapJIO477tx/wha/+CFFrPvjgOe3imolLpNiyYybkKmCyLMt9qcCLCavARHKQXTGmyKprMZUj50ztKlZKYZ1FWScwUaUIPlK5Bhtr/GqBDoJnjzbiWw8ukE3Daec5+c6HHE0cD/b2eU3fQtuax8eXvHjyhHtv3mVv94jvPXvGs+NjfvTBa7zz5bdRR3f59GlE6Rpwws1wDmUctnJE7fGhJ6WAKtAY7z11VeOj+DsO3WIJ3AVKtDi/YKFqDmeHzOa7tP0KTaaPBlvNONo74vzsAl84JcEHmsmMRdvhQ+EUKCVQZSDEKMltkoCXAhdzzhX+DEynUzofWVyd8a1v/hbEQPYtKgd6D82kYbq3A5XjdHHNxfUStdOT1x02tVS6EvsFq9G14+DOA64ef8zZsiWnBSqdU6WnLE/XfPfFQxZXPcb1BH+FChl8xpk1VZWxKE6ePePBK4dcLxdcrxLfef+SL3/9XVxzwM07Rzy/fM7f/4e/w/mzTzi8f4ezVc+k9TTKskwXXLXX6MmMjz58n8fHD/npb3yDf/TzP09qLMcXJyxjw7/6jb/Chx98yIvjf8aDV28DHeiKeTPBWc3k4A6NPeTH7x/yjTfv8jvf/R4nyvBi+QTXrlhfBa5yzeGNe6TkWS0vQTlW7Yr1umd/d4/+4CZaKQ4aQ1odM1lFZn3g6MYD/uy7P4FRc8g1WRkSCTN09X9YcWwT1gLFFseaEjWCX69YXL+gWy1YLZYkN+HWa1+kD5nnL65YLVeEvmW1WjOf7MjeYgxV1cjvXcfaTqhsRdUMHDQ/ft7bP/I23/iZn+Lp8ydctUt88PgQ0YUXPuzL2/Y9w6ELBA9F6Vpp2rbFd62IQ/m+iCSBrSqigkHMLMYg0L3iAyucGfkZkyj52apiWju8bwnBo61lMp0RgwSUsqZ4FAZbCa9zUk9o+w6FqCprbci2VN4H6B0DXL+si1nRNA1KKbquw1hB8DSTIlbgPc4VI++tcx4C3AFilYEQxLBbacnShoLsWLRkiF82RdFhHwU+c403Mdj2vjzur0PymRMqqpdiLcXWni0vlMJ36fIQs3TtEdgtIUIIJO9RKROiIjQ32JnPmc9nOOuo65oqZ5q+Y9a2dOuWmAovedG9tOfnjKztelMMHaJLUxKozOfjkoJv2ey5wznnreePxQxJELQSf0Py5tqmARWmhrixdDbRo6pjKt2yTBEASqJumFMa9/OUQokzQxEiGToxQwMjjzGDcw7nnFjFKIMriTVDrKGHmPflGO/lxEl8VceC9GdiwWGMvNTUKBd8gDfmNKRHmx/DtdUoso6jwuMwNiX+SeNnDMcmiUzjd5LxV77v1gcoSviZh6YBo73DdgwKlBhDem1GgUGjUxx5uCqLHH+lFCRNCBl8IvlAbNdcLdcsLhf0vafvAuu2JbMmpn6Mr8dz0hoT9CZRVYIx8zEXFEvC2VxEoP74Htf/H07bf6qU+g+B3wX+85zzOXAP+M2t5zwqj/2xh6K0JmGjcl+kZWR5RWRSh1LRFs9oeIFSeVS5Kd1puZnDzSvHQKSVCt0gThJRI/RMKltD5WKoOKRU8OGfhfttdRxeTnCiVL5K1SkX1UeFFTGRmGnclL2dGQdGEQxUtSL0PX2/xqpMVVfkGPFl46mcLqy5XERDBv0d+Q6azYKacih2DzUAACAASURBVCKZzGW7JDzJ/MG3v8+zpycQIyaDVapA5FSp1r18TlBUsbZMEVPhFMQgfIPQ9zijqSvB3mcG/t/WUR7ayrM+f/9/2IPj3zYLSh7OCwqWucgJZ8Ee20KcjiWYjFsk8M39+aM/cKi0jXK4Y7VkfIKMkbxJnoRhSYFuKpKGoBWx/ExJQYzoGDHOkI2RsZy2r5VUsEAgBsNGKl9a5kcqi5VWWuT7C+FYrqskXeRclDWjcBIArQ3Re6qqQRvL8nqJpRT8lGLAvEsiLJ09kykqTUWSVmeSgRw8OYvQymC+qtRGTlqVRLOqKoGydK3wPkpXUKatwG/RGltZXD0lmxURB05BhM5TCgAWYyqM1Uwmk2ImC86KhcKOrrkzn/AXf/pP8/Wf/pdhMuV7H/6A42eXVG7G2q9Z5l1OPv2Aq2fP6btYSPTl3GQXK+NWFvF1120SYudGniNakvOM8GhBo5wlZY+PLamy9DlAG9EWsrJQw2odmMUJbajRaYamofULLteeW4evcPvuHX71k6ccXyzIr1pev/8269k+j54/J2PwIo5KQtHHICpt1jBpJvTWUTnHrds3efDmW2jt+PB73xcoKrlQDfWmsKWkuymelob1ymNVwke4Xq7os0G5CTpFjHXorPnK177O737z9/FnV4zuNzkTg6cnUBtHXU3GINM5O66J3ks13lmL7TtM7kgpoFUgE4kRutATcmbRdTx+8oQffPIxqzbw4PZ9Pnj3Pj9x+8dwRhFjYHF9xVd/4k0m9w75B//n/0WlHTWKe4c3ae7PaFvHsyfHxHiBUZlZvcPl+ZrrxQkHswbrErk/4wcfX9HHnkt2CK2i/c6Kd7/4KvPdhmY245vPXvDi6XMu4hVn14KQmHn4N37qp/m13/gV3I2bvPb1H+eNL7zBxN3gK196l197+k1Sttx/8B7/wb//V/lf/u7f5fl3fwUd4fbNI/o04fzykp36Ln02HNy6wV7boS9WvLN/yIuupbL7XJ+/oAuBW/ffoGom9KsFlsTTh4843Nvl+cOHTIxl6gM2Qlx7lII3X3+TP/O1H+X2rRu88c47OL0GehRWOL2pLMDbEdS48H321zIjlCKFwMnjR3zn27/H+ekTutBz940vMr99n5DnmGpKU0/J2pC9qL3lXJR0c6ayFdNmRt/MWTVzmmaOKLs54dwaWZfffOstpvM562fPCWgRXPFxq1C32VvT0AXY+r5D0W9IPozK5BgxSuGcgLjq2tJl8Wwa6ouQ0YUDNES2irLuKTsWbFOSpNDZihjE87FpplJIVStC6Xj0vXhyxhBoV2vSoPw4wDrHT9lceEGxSLI2HNYY6sZxsL8HiNLdbDrFh4gypvDDh5hH+GZd32M16MqOBdnxKPueQAAHwbJN3DKoJ4YQBNqpi4ctLyfM22ii4XV98GILU5SdBzVNYzQxiKXKcM6D2rfA8EoRNClSTLRtoGpret8zbxpeefAa6ClKWwlsU6LNEpFHPSFbT7ZrfNfh10u0q9Cml1wwJGIKwl0rkdeQIGk2sM1hX1dZkqVRnas8nvJGVCWmtOlWD+eDUFW01mhnUFv3NCWJ83LhV4nVgMRlkvDnIstfkraMwES13L8UArEoR6ISPgRyNjSlOJCLYnPCjAXzYS/WWoNh9KjLvIyMGSvSpcg7zqYBTqiG+GBTUJfkfzsDK3/JkuTq8lguMcsQe2+HW0P4pdEFRvrS2422COPYQJolL4/ml4sLA0xyGNEKxo7ygCLTagumO9xeJWqzYhWiqbSlBkxlqGoHlYHGMW8mGG3JSTrwISS60HN97Ud6h6i8CjxWYMKbpG1I3OLQ1S3xacoyRyj2XTkm8PFlsc0fcvzzJm3/A/DXyzf668DfAP6j/y9voJT6a8BfA1H6YSsDHQdYVqMQSAhRjJZLy1Xa0UJijaoQH/PA6SoqVUgg+cOqdMMmYKwZW6mwUbnbPHUY7Jsbvp3UpLSJrjcQseE5hhFOOVS+s0NRoVVFU8+4e/c+Ozd2uGgvWV5dcH15Tde3IsqRRZWw95EqG6IVIik5j0Hz8F3MODn1uLn57Dk9P+fxxQkff/SUxeUSV5QgXbYis6u8SISkJOqGnzk2AjHyGU1VU7uaytrSbYEYg3DMGCCEZaiqrfxoO7FFISTpTVVkvIplIYg5j49tqnMliVNbC9CIrS4KVBn6EOlDIsS8mb3jIvAyjnuc8GWlUmWBTmpInooKVgl6lR53e0mwTC51BEUcEji0dCSMkPczuSREn1k8BwPRcVC+fK02zxsWx80iJH+PKEXBRG+qVV3X0bZrbBADX2MdzXTKZDLBGI1JWRaVLJAGH4UTlXIUci6Qc9gIoshNwBqL9wLDDMFTq6mIv1iLc1VJmAOLxYrj41Pq2lHXjXSHeuEHiuxyg++7ws2wKO0IRbDHx4SJlKRJnptiLwaVWmEMNHWFo2JXT7hVad59ZR8TOmp7i69+6cfgiwFleiLwoov8+j8NLE5OyUq++yCFPEBUpBgg97r3fVn4MzZniOKtpwz45OV2ZIVWjmQndEoRLORJQ+UqGt2DttCLgWsKCpIjUhGTIYviPUobmnpS7B9y4aoochQZ574LpCS4/pTFiNdqSbDrqmau5qyc5Qtf+AJ/4ed+jgfvvMPx8Sm/8au/RjLFYl6NDAEimmY6Q5sZ63XPo+MXxNizN29IyqJMzbL1kKJA6UKmamb0faRd96XkWxQBc5LidcHt5JwFdq1gsErZSFuDs4adaUO4XLJYX6GzorKOvb1dDm/eZLq7y6dPn5KU4s133+Hq9JTnH3/I3/nbf4ur8O9wePsNur5ntrPDbD5hr85UlaFdrPF9RzVTPHj1Der6kNfvXfPi+PvcOqr54jvv8fjJFb/37d9jXkXuziacXZ6wzj2TyS163bNcafpc8dEnlxzdOKddPeT0+QuuLxNWefKqY64zb9w+4ie/8VVOrx7y3p/9c3z1J/8Ce5Mj1teeGzsV+IBVO0yqI37wve/zu7/+S7z7+i771TU7Vea606gpLPuW+3de5yv37nDj6RPe/JGvYJ58yqPzE37/w8dc9QHdZGZHu+xOdnj+8SU3dndQy2tU33Fy/Jw7e3vcqufc2NnhlTu3ufXKLb7yo19jsVrwq7/923zro4f83GyfB1+5hZ1mTA6kIg9rPlN0LCvjVjFN9sKYB8U4CXgrY3A6c7G45PjZI95q1+RqF2OnuHqK0preR5yrUEqECnQ2VHVNDlMm0xlu0eCqOUpVRCzeB7QyWNtw9+59XnvwJn/4wces2l4UHWNfCq+Zqq65efMGIUWOT07Gsaa0xigzJhIhFji3VlBgfsMZx+jBGExlRQBCSDhALgU+6XbEEgvEwk8rMRh1LUIebXtN1/X0fRS1RhTOVYJwCT3Bh6L4WpQI4zayp4THw1quVNlToO87jLU0dU3d1NSVZjabEGNmuVqPPDeBf0nPTpUKt0JsZIyRhFP2gs2eOSSLwsPL4zUSaslmL5TOTmJUR85DPMMYcVtrX0JY5CDrYgxhjI8Gr7gN1UWugyqiJSELN9YoUClTuQplRfToanFJtoa6nqJTwBaxkqSKp67RRCWiIYqANZmgZG3MyN4REAn4wSNsUEGmUGnI0n0dSqdiRVOIC0nUI/PYGaB0zSQuTCXhGTqJcu2Ex6URflLOSThVYwSu5X1LFy7EKIluKRxSVE61knjHlnNIKY0+qSlF4TFSrJC2Gg+SEOpN93Ts9m04oHno/Gnx7eQziesw17YRPmXrH1LRlxKlIamTHSaPfMCRqwZFnVo6kyPlKSPFBTXE2nlMuow2QlfImwR5gzYadua09Zh80vB3KUNv5ta4wZdj2BGHhwZfQ2MMtXPMrKXSClc7stWkSlMZMybglo0KvcRCYfREHMbCMAeGhDXnVJSvpQDkQyBmaPuE1qIwThIYqLOmqP7+0cc/V9KWc34+XgSl/kfg/yi/Pgbubz311fLYD3uPvw38bYDpdJZzTIVYq8eUWSk5SRSEJJnqmCCIBFNJ6ga4mQTBWmVUVgUjHF9qt6atxGS8sGWxHioKaawQbCZ8Kkapw4L7mXN5qQIl18VIBh0hJwNY8T5KmpQK90xpJpOGvb0dVB0grHkRPb7vMSmR6oaMJhRCuB1Mq8fK2HCdtqqQZVMQtZ7A8dkZJ+dXnJ8u8WuPHjl/RZnSCBcwl8k+LgIl0R3+fzvJMcZAygQvnR4hy0ZUNjAMzgw6F1ZsmZQ5ZbIq/KiSrI0JSvnOGoF45CSwhk11VQ6ttbT+jR7x2lrUKWQBSIVjyNamXHLyDddnGwoxjuRNpUeNqxSD4MwIn8iMi8awYJCKFxBSEUMLl8vaiuRLQrzVpdB66JzlUvB62RJiqIgOKqpDtVMawHIfjBZD75A8hXgg1S6t8G0n8CC1XwQIFLWrqK2lqSpUiBhVxkEaRExk44gh4EPA+zh2pL2PxJJASFJXpHidY/BAqqpaoHBdy/n5Bd73fPFHvsDdu6+wuDzl0w8/FOGP4jukShLAsLFkReUcMXTkGKWrNWxUKdCu1wwVUasNRldkDCr3ONvz8fe+xZ139jk4PCTHnqtn72OaCUndILYB78WjqA8ehmBDqQLrEXah0oqu74ihp8k1KgUIPZqIUpmQAilFnNbUVUO9d59+1TE7yHRmQsiO9vQpd/YPub7y6LxgqhtMaMi5dMnz0CFNoBNZSyIUYyahBZrlQwmKJMkf7oMpAaICuq5DkXn+7Cne9+zszDm/uAJbRHlKQEIuCpkorLbs7O5zHTXXV0+p6wqjLLLbmrEgYQFtLArN+dkZWmv2d/e5uLpE5WJ4HINw7MrG7SpXquemzJuM2ZLArpzBkkQRrmmYTefs7x0QY+ZyuSJpESOY7e2w11huVhM++P4H/K3//m+yf/sNXr1xmx/70pdBKbzvWSyvML0qVWKFqQ3VxHLT7RJby6xJ1DawtzfhtTdeY+IM05x5fv0Yoxru3H6Di8s/RBztGvr1ivOLE3IOOHOAMZk+BHac5kD3vHvvJt/95Nu8/d5b3No9wC3g2Q+eM50a/tR77/Drv3iDk6fP+MF3P+Fv/Nd/g8XZB/wn/+7Psjz9iNn0VT54Evn00SNu7814+/CQO6trdm/sM3vvSxytr/iR1YL3Vwl0RXAdJ6dPyLN9Th9+ippOmfie6Dt25zs0UfHK3gF/5r0f5Yt/6qu8uHzGP/61f8qHHz/mn33wEXffeBv3D36F23/wkJ/5y3+R3ZtH9Eo8iPTWPvfyoV76KXtsAqWpbMPufI/lRU3ft4TQ09QNbcrobLGmRmuF6wJ58H8rAb5xVnitzoESPcCMwH61tqMf4MmzEx5+8hirKpxGhDRST2WdGCQ7y5e//CWenRxzdn42WrmIWIfesu8RwQ5SQCmoqylVZSElfPBMJlOca9BJsV51+BhGSN/g5zT4tMaQETM58VnqvYj8pChh6XK5xFqHdRW+D1hjSpHYljXKEknEkKTAzFBc3OynuexRQxKkS3KtxoJnxhhNCJ71aoUyDhVTEbMqPCogJy/xx5C8leJ3ypsOny57xnAeJQsDctlLhVfjfU9OidppnKnKfI7lHDYxj1IK5xyrrpUOWtpwolK53gP6JZdirviFSXBPUmSibAMpSECsofM9dRDlUBc7lLHjtXDOopNGRU/C04YVKgT6diEdziiJq/cCIbdZyTo7xhkDX11jFGQlcWXXdcJfLvBV8bcfYJIF4pbHk3+5AYAU5aNfirKy0luxYOnzleKwqB/KPjga16dRbxEQCyvx1jQFMqfLODBUBf7onJMkmU0yA4y+ftGI0qJwL1/2+NMltiBLsXlsPGSB3hqzif2G+HgwAR+K58MYVmNyWwoByozF+qFhMsAujdGYaEoMBCqFsZiA3rKI0AqTtyGt5VlKsY10k1hpQEVtREWG2G245sMDqjw2Xqscx2uQ03B/ZG51MZKMIkdD23u5ljHKZ4zJ8NY6qqSRFEtSrEZ4afnsYp9kCrzVOoexE7QebDkyzmjsH5+vAf+cSZtS6m7O+Wn59a8A3y7//78D/6tS6r9FhEjeAX77T3y/8lP8M3IRDVBjW1ha+qCUyHZTqjRpCNA3KXl5r6H6IINtWJRfxvDKEUOQKonazpDH85RWpdJQFkPxEvssefVlrO94s6LA1aTSJkqOzlUCG0iBSACTiblntbjixclz1sslSimscZAVWju0cYSYCakoSeWB+JlGDLFsXoVjVL57zJGzs0sePz7j6qIj+QzRy+KThDita4P3HYRN8jd07ZxzY1UtBBG8UD4Q+ojvxYzXKr3hXxUS9zABch5RrmN3a7hGA4xgnLVZNsON5OsGYy+Jkxor+JI+R0IUoQitvcAwoiTralST/Hzn8HNjbuu/P+zIpQKV00bxkXKuavAw2xpPIQRygrqqaeqaLnmRHc4Uid5ybioXo3SFYPqHpG8bz5xL8j0sBMO1kkqoNQaPjLGxoJeG65iED5IyMQai96QgMv7EMG6weuBBFChRVbx9crJjVThlEe8RWKChqitc5cbOWgiBvuuYTqbMZnO6bsV63RJD5ujwJuc3b/Hok48xSuP7juglCOuTiBtohcg+x8y0EihMDJ7aKqIWOX7vfVnspWOJs6x1IgC+vebifMWhv+L6wnH20bd4/5u/yIO332Z+/88SV52oe+ZB1RCss+Qo3kRZp5Fb0vsW362xzEjdGh17jErkHNBaYIEqJiZuzo/9+F9mV6/pU8t3n5/z6eMLjj/peO/d9zgPL+hWz5mwQxU1jZuQjCalKD5o0bMOa+rQiqcVmqAEYquRIEBvcR0HP0npUmdUTLSrNcfHzzGlkKFskdo2Gu0cyhaIkzakrFmue67iBb1yaCw6b3GDo9hxkHJRy+xRpmfd9SgFd+/cJCdfKsGKft3jsyKEipQsOcD19RXWGiaTSQme7Dh/m8oyn06ojGVnvkuMmXXb0vvAxcUVbezZv3FIyJEXTx/z2uFtvvLV9/jmx3/A+eUlcd3z9oPXuLhacnpxxnq1YEJFxHO1aLlenWFcTV4vyemS8+NTfvvZpyz6KbFuWM0OuUgVCzIXT0958vCSlJ7h1AGKTLQSQFTTm+S0YG8y4ezqmpUxzPeOmN58wPd+9zt87b33uH7+gif1Qx5d/4Cf/saP8fa7bzNxc0yuCX3Lk4+/zZ/+6iE//We+/v8w92a/ll35fd9nDXvvM92pRrKK89Dsprol2epIaVmyJMcQrFiAXwLHipEAeQjykIf8BUGe/Gf4JQgQIAHiAEESJFDbmiy13VJLLbFHsklWsapYt+58xr33mvLwW2ufc4vsVgPJQx+AYPHy1r3n7L32Wr/fd/rx9MMZ//qPvocZv8b+3pR3Xr7HV24coJ49wX7pDb75vb/g9Vde5K2nx7wx3mexXqBnY3Ad7dkp+ymiL6/YH4/oo+bnvvAl7t19kXdeeQPVev7P//X/4qNnT/nrpx/R+p6T9SWPvn/B+ScP6P5Q8eGzh/zz//K/YvTCHZIuIf9/2ytBzHsFNU09ZTI5ZDw7YG//iFdfeZXZZMb5JbKH6UbGzVS9gBMF81KiZKmbimbUUDVjMCMSVf41SvbrqDnYP0SFQGMt8+UaU2mqLANPWVL04MEDLhdzfPA7YFeCndCNuqrQWuGd+NmEORMPUecTadWijacxtex/xpK8DJse+CgdIYrawzsZ7aJ0IrjdGa3CL7iQcLHHB890MpWxLJlNGDUNCvDODa6OGIv3fHsWoqCqKjn3rcFaLYEntZXhylENhb88/+K3kW4P0CrjX+V9MQB/kgdRANnSbG2Xwa73X84ZqQt2o9eff5Xaxxh51lftBh8l0MVoOQ+t1jInLrNKzkkTFpOMIIEkexwy2sYqRWMNVkHoe5bzuQDfvSI6AeRtDoExVhQ1xmhaB20XWG46fJJ5fMMeCQIm+zTIVE2+NrCt8+SMDNumLd8rCWqWhNEta7PTrO0yKinRdx7XdQJO5+ZB5cZsW8tkL6GWs7mAzMUbr01k1PeEpkEp+Yw+JqpKUholql7eawylBmZo2MuaCt5ncOHHPO+p2I2SAIjyicQOQJlPC+jtmiXbYyTNNa8lLQ3H4CzLYyZUzuIuyilys2xKiFUudWJ+dgcWLYMXahgpUD7B7kLMjZeCQSqZA1J2BjYMnFrmfbbqqfzSaFDyPmOCzonHTcUELuAtgGXdC5sWclZFiJmAKImkee/I7cq1/qEsmVQUgvm5qCorY5MwCOOsqIyhslr6m5/w+mki//8n4DeBW0qpR8B/D/ymUuoXkef8Y+C/ljeXvqOU+p+B7wIe+G/ST5EcCVvMR/4shfEQdV7YEiVpNWJJyRLIvHFLkk/p1gVF0EbSfdJO41Y6/22hnXb+yRc87fydvOiGGGvi9Rs/IA5bhGnQsEdPCjl+H0EEY3IEOvqgcWlN6644Pr7kB9/7Lo8fPWLSjNEJvK7oei8zR5TFeWmYqiRSkDhsGDu/Tyt5CPIF7bqO09MLzk7mhE5hkiHGkLFw4Yu0FdqeqK59pq1P6XozmmIiebKMK1BmtyQyc6YUJaK5pP0MaU47l7s8kNsLTm40CsKiP3ejhPx9gDWWurJUxmCUmEaNscPBET8vKqu8jx3qfBeN+cz37pxag4xIqYGVGICdwpzlsQNGGZE46EBIIqssDWwZICmmZInQLshUUkXOkDfLLYi2fR95EyoeQaWyDCMJ4iUoVhLEdONxsafdrPCuz4dQlkZEiBpciKz7nkopRqXQz141lKKuyxz2noDOh7BjlLbm/xACq9WKpmmYzmZIQIakNW66jslkgvEho3Tyfq0CkyKNUfjes1ouMbmJgxyVrCTRSRrOhFYWrS2VhimKvTAlXTqSW+DjM9774cd859/+G+7MHPt3b1DdHFPNxJgt6aJZkqqy1j1HLPsQcpJcwnUb8D1hs0YFT21A60jVWAnESAkTLS+98AVeODCct5c8Vsdsnn3M3E1J9hBdtXglQTUVFpu0JMcmKVBqDJ1zLPueEIBo6JKWKOxcNGtyQpj3QBwOVEIiOodRisVcilhj5V7ZugYth97gS9GGqplw/96rtNRsvObg4BaL+TlWZ0lORjIFENe07Ybl+gqMyJS6VrT7WieZJWOlYNzKsmXdFq+wGObt9tBOkcpammrE0dFNNpuO+WqFrRpu3b7L07NjYWOIrDcbDvb3MZuW//gf/y5Xrubq5IwX7t2nGY9JF4m6MtBDijU+JPqNpbWKKlhstFS6wfWKsIks1j39/JymabhYrugc6Lbn6GjEKy++ynvfeUCsPHcntzi7WrJ/MGU+P+Wkn2M3mlvVEX/yh99hxRUfnf4ph+M9fus/avjt/+Sfsnd4wPmnMN2/hdaPCX5OXa351V/+LeYnGz7+8JyPHp7zbLPg9t0bvLM/4+xH7/HVL3+Jf/XNP+LPH37If/vP/nP296e8c/sGjz94SrvWrPs1hopbdsztyYTXX32Vl958jZfeeYvTqzl/9jff5Xvf/g4fP/iQ5mDCPDqulmscDWfnC5hdgdX8+be/zW+fP+Hte4dZCWE/u6F83r6XZM2BwtgR1WgPU005unmbN956m7qqsSlglaEvcz51Re9jmcoj2iirsaMK21aYykqTlH9HGVOjVORof8q9Ozf50Uc/Yn8qjJ4COueoRxKD/f4H7+diLuYU1ShjLnzCaoXNgTwplmAh6FYt0GKsRRsjc119QteG2tbCDJTte4fvQG2TKYuXpwRADHH8sqyl0CvnlRJ5G4jPWue0VXIx6fMYmjKIvvydwhCJmkEPQKn3AVKefyb0D0XSFVIkBURiamRwji7+nXImyJMNStQfBawrkkmFnGVVVTGdTNibTjGk7FkPpMBnaoBS81hrhwTJYm0pe4CLEevcUJNZKyNmUkrC0is5Dy3ZdekjtI7er5mbU4LRhKhBjfFerrUkEVtp/GJkMp7k+xxYbzzRiwytqIZ2bizFVqByMzLUJEoGG+/wN9uim8LkpqH5Kl8flCLleUFhTYMTbnX4WTHXTLoc2qVWyiyQRPbL2zSqXCNh37TJrGzokSUtYG9MOgdAFVZY2gbFtm4r92K3ftl9SVN53QYkt7DcQ5FH77JpW7auAOKFnQWZzwbRJ6Iu5IJ8MKVVVtRktnA8YdIYtO/xThKqS4gfbH9HYa9i9v2hBCRiWI9bzUB5PLxK17IMJE20mKXytaYkSgoIL6C1gD+BbMXR4JIiGdi4SO88ru8hdkSnCDG3qXkunYSkBGIUW8n2wu4sptxTeOdBaXR0kAIpBlRKRK+J1pQn9se+fpr0yN/7nC//y5/w/f8C+Bd/28+99nfISIFS0s2DfKh843VePCkHNqTc7caSnpMLAxn+mVGA3NA9H5+5q8kdPHRDgpO8m4IiQJGDlZuSBtPp7sOw9RPtNDdJ9NZGBYKKhORJyGbjvAy7XW7GfPy44+nTpzz56LEQiAeHEBI6abSqUC7KAFDv8fSMxw2GrTm4NA4SlqCGBVli0l3n6FqPigaDFiljVKgo8jetKrQVE3mRaOw2tc9LJaU50xhd4V2k7xzRB/CwHXKYh6NTBpenYaPTSm2btYJ0Sb1I0Txrra8zbmbLuGGkWQzBI3lw2020NGuCbFxHVa69hvVS0JrP9IXX7u3zm7VcWz2YSodQkZg9Aj5IdLEP4h1D5Iwmz1/RZAluLArrnPCX4nAQFJRN7rGYuVU+fQtaU9ZcMVWrvC7MTqx0DI7ge5J3qIzolAMnlIZVG5JMG5WZc5nJ80E2OG0N0SVSllSsu5b5akk9nVJnUzSAc46+l7EDk+k4H3SapDX7R0fExUqerxAkxS3J/K/Y9/Rtx3p+iSZQWUNVVTgSlVFUk7GgmlF2cmMs1cRm2ekeRu/j4mNOVz9k7TXz+Rk3qn361nFx8YDF6izP7vEkhTQ42R5rjCFlqUmI4hHsuo1IN1yHSYG60lgDdW1pC1bI4gAAIABJREFUqopRVVFpzcFkn+nRiKo55Guv/xzp4AM+/c736I3CW0+oO3yQZlxHhc9rqUqGWmkc0MZADKCDIWDBGDQxo3iBGBxdt6HrOyajlFPPIl3b0rYb/uFv/Rqvv/4azWhEPWoI+YDqvSckScWKKeKTHEzrruNq1eNipO87GmOGQepFMpMrRxmkrSzjpmbTtuzvTbBaMZtOuCISfBjWoFaKu3fv4r3LCZ2JGB1d27PZtFRmhDUVKjfd43FFUJaUJBW4oLdFRpKA6WxGUoqf/4Vf5Nvf/BbHJ6d8KQaq2lDVmvXVCqUb6rFltVwwv1hzd9YwqzS4js1qw/rSsoiGTivqxrBZt2izzzo54iYxXQei9XTdOYuniccf/YioFKdXS8gpgU9OTxnVYG5pJtMRr7/xBfp1z97sEK3H7O3f5d2v/BJ/9u//ivFYvIftZs38Em7dfpdNfMC8XfIbN16ievyI+6/cofniPZbf/waq1sSDKZN33+T2k4f8/OwWn0a4atb0ZxfcnRzxO7/6G9y8fZsz3/GNv/gb/vz9H3CxWHO5WjMfj6h0gKhpZkfEjedgXLH36iu40LLwLU8vjnk7vI5SI2mu/taeLYk0OAWIoMwIW8/Q1QRtx+ztH8qulWTeI1iSivio2PQ9fUg0dd4vNSirUEbmlJYzBAR4lbfiuPvSHb761a/wp9/4Y2aTKWkyY71cSUNR17Su58aNI3rvWa3XgqzvsEHaiPRTxp74oZiXJsOS0Oio0VaLOkQbYTcGGVuJoJcmklLAqjJOJ2GtGpopUdnIfwtDIvut0YaqrvBdj+t7OadSyoi/1BZ653wtYVro7dlXvu5cxDthrLUyEiiTVMH1iUHGApmEhFyloswoyYVy1qbccI4nY6KP+HYbeFIkmUVaaqsKS8I7l8Hzzx6M5T1em4EbpYkbj0bZz4dI8uuaMJ3hncc7x3K5hDzgWyeFLqnaPhJ6TzKO3lTELKnUts8SS4MKDp20SPidJHPbqoIQIPRSCpVrrGXPRevMGm3BeqW1ZI0kGbSckihkyCqegZ3aaaqH5q88IQX8u/YypKxeUECIvlzlQc5b1lhKklcg9X0BuBMhRPq+Z7PJc93qiqqur/kDpbFjAHohDjYiY6Q20GjCT0ghTDuNzfP3tsx6K2D2MNJq4K+Gn5IbT3IDmISFAhkPkLZWDmMMVmuSFolnU1lhVZUmaCO1T74+MUSRLkL2fIfBcy7sWPGKloYyl5OaPB9tEEpKTkPplzOoXWwqBOkpJOhL7Ak5N46gEw5pQjdB0rhFGQIEQzK1gMDBy2zc6PMswDiA+NtaehvoJ/uHE/tHKjaviEoBZzTOan7ylLb/b+mR/7++lC4hD3mvIeu9gS2Nk7JBdrv+k1YyLBM9mE6l0dOQESHn3EDn76bIlGZL0KbcdWevXBkwXZi18iBr0lC0h/Q8Bb09SGSliKxiWMBR4rhjrFFGmqWu7zg5O+dqtSGFxGrVMmnG9H2kqdYDc6eMYTrbozYarB4SgWIOkygztEIMhCS63BAive/pug3BO4gJoyxNUxN9TRiuqcjsSHmQdGmmlNr6DLUYQGMMMnTQSDSs957gc5Oah6/KIS4H4WCcZqdRZntbd27w8P91bqTDcM+yfzFvNDK0VFAmHz2WgNIxR0kDUaSSKezyUTz37x2mLJWtWjFIdkpDVJq2vGkUNKqgTXGn4U8RlJFiwHkncdNeZvOQ5SwpCRCQ5AdI+mSUe1gruW7CTCRi2CZdlRRSuU5yqMcgnkJjJa0rkr2fXvxXELLkO+Z0ySCNUoaxEgweSEL+mUbljSev4bzhxQw66Fz0GaWGpnJLaIonbr1a0dQVSml8iMwXCy4vr7DeM5JZA3JIZflf6yShT1mLazticigtsuLJeMrBwQGT6RhTiS8iomTeUaNZ6IBrKl5/603SrEanmrffeJWXRnschBrbRyY5Zpmk0Eq8hilKI1yCiGKWv8YQ8DkuXJsalDDdMQaRW3lPpaAiYGmJVy0XixPaO/e4c/8m05sTVGPwNhJUSzSJqOt8+ytIHp0Pu2AqvBLZi0FCa0JMEmYQWkB8bMZUVEYDDpUco3rKzRu3aEzkF7/6H3B44wZibk8EjBTmcTv7BfTgAzu6dQ9My6NPnwqjOJ4O61xnqYjP+25ViZ9IEZiOsieyqdjfm9F1GzTizy3F240bN1hvNlycn6OUDAruui7LZ3sUUNWGqKSQtMbQrVs+ffiI3ndwtJcZAUUfOok8rypOjo/FoK4009mU+srQLZZcPjvGHNzE9x3NiQFvaNqaarbChhVVlRiPLM8uL5l3GyZNTd919O2Cq6sN9TRysXrA3dt7/OqXf55xssxPL/nODz9glQxeK0xjOesXvLh/yKv37jA/fcp73/8uv/QrvwlqBMlSjyf88td+mX/1v/9vRLPBUPGv/+23+Dtf/ho3b97HjvZ48/Amb914Aff0U27+4pdwyfIbX/01fv3vRu7cfYE4HnEwMXzx7k30+Yo3Xn2B/Xcamk0kNhXf/N73+eDpU9Z7Ix4v1nQR2nrE1Voz1oaxJsu8Ovb2Rtx8+SYnJ8cs2xU//PAjvvbLXxN2cnfj/dzmLUvvCkqeGZqqGTEaj9HL8hyJxF+KKdl7XBREWkDH3P9n/1I+SiF7lmVXVSgVSalnfvIJp8eP6GOgn18JWp4kDKR3DtvU7O3tgdas1mtiiJnRkqYjZp+XMhqiBDKoHEyVlPjCIVApnX3cGfzN/jU577aIPVrOGR8CMWkqZfLMr3Jp5OKJXSINxbJ4fRu6TU/bi2xSVDCyD8u+WuwfUqhLz6axtqKyMjrFaJ3fl3iMjDF5j5dapcTDgxSqw4BoKgqwJzxIGhiLyhp8LC1fhlWV1EoCyjIodowxBJN2GJDt0hmqnZQoCcZGa8ajEUdHR9RNI5LQKCBkPRpRN4ngZB+IIVApJTHryWCUJDO6GEhI0nJRjzTKobW8B0kbNbmq9oypsAT60NOFVvzAIcq5n6+JNvkAy/VkTJEQwKcCgl5nmMpjsduPFRmuuHDyfSsLofwdpSjJvEPYRVTb5n/4PnISugADsubUUFNIjSUJ0FqLvFXmxom03gePCw6jbP47ov5SKjPYaaeW5TpwXc7pwsxtP2QcnsmYAjqVOmf7+bdDt8vXdzaP/POGbxHWQJpeLT7tlBNZEzIcvE+e6AvL5gd2rPRuAwCRm9EhJVWloSZLaed9lJpeS/02KNHKdc33MKa4BU9i9oxTGGSFT1I/+whd0BBFieRCoHMduleYaDHJkFxPHyMuKVTyqNLE5jm2Kr/XSCT2XkITtUJ7L3VykuAaCfqMxKCl/3iOaHr+9TPTtGE0ymwZDbkpIoNAgdjjDYqKlIzEnGaTq07SuAl7kMMntM4+DWmyzC6Nn3aaK7UtHiFv5uw8aEPRnldxgpTj1Y2QK4LS5zCORAAVCUT6BD4CQWNSwiaDSRUh1iQmrBaaiR7z4v0vMTncsF4uOf30CceXl5xfzBkpjUW0rvuzA2bTPUZa4ZSEevvgsAQwipDll8lo+hTog8fH0mwtCX6NSoGUakKqMFbhfUtDjaXBq5aoczHZSyy6IbMDGoJJRB1QJqBMj7WJuoKm0sROvhe9kQYl3CTaDSiHijqnNKUsDRM5nfeSWJgUJKNliCTiHXN5xlORo5R7aDNaobPZW+mKpDVOOdq4YtUZlNe0qyWqhSY2BMqw4RxKmzfJsnaMUkPkbszMpfgs8oaqZRg2qsz6UPhY0q8CJonMTyVwCZzSeK3pvRilI4GUtMwDUuKByyJbaQw0RB1BBxReBjOSze6JAh+JvDI34lGJ5cREjbEanzwmJnyQdFGjDF3q6WmJvYRwrBdX4DxEkdN4Vfi7hAqJKuao2ZTlf2TFbAE1Elit6VzPXnXAdDKiqhQY8EnYMaUQ9MwFsYXEiK0rRuMZl1cLDhrxSAYiSsu8r01MpNEUM7LsTY5QoceoNSm2RJ9wrWLTB5JRRN2TjAdlmIQxM+tw4wvOu2Nuh5vMP/Ys3RWTvYqbswn1Vc/pg6eE07UEBlhDChZClRPJvPhLA2hb46P4a1JKRG0IakYbZkQlz1i3WbG6mhO6PVSsuDz5mPnxgscPH/Ht9pLLN+6y2Fzg1AtEs4/2lfgO6ohqDNo36LAmpTWhhg01tWkgBkwIqNiLHEpbVOVw2hNVQww1Kjoq1UKsQE25c+9lDu7e4aPjc/7iL7/N/ddf52zeswkVkYaxNdQpoJQlKtHKNzbyxqt3OL644sOPvwsouljTR0VMBuVlj4tKE7WMBKl0pMqFjQ6OlKRY98lB0IzshL4X38/HDx6JoRyFDykf4rI3bBY9prakKtJrLwBd56hjoH12yXh/zI3xlC5twCR81aOMYn804q03XmV5csmq9XT9iqNRjVpcsDx5jFpDqiKnn57SLz03ZxPeffc2s6lG6TFtMnRxAXVDmypGzYh+cYmJK9pFYGE77r5+j1//3d9jVs348Gnk/ZPIZH1Gd/IpzlSsU8U87jObvIY9qlHWcfPNe2gLxBVKbfjiO7e5ee9V1v2CzXqOmd7m5V//R7x9/zb/3duvYVPiw6//GWeXS/746+8z/vMrYtsyGyU+/IvvM5lBkzrefvcOf+8Xfp79114h9Jb/5V/+D/yPf/XHLHpNu4nsHcx40gdqF6jQmKTAexyJmALOeKz20G6IraDDXecJyiBv+CcUBKk0dZYY5byFiKoT1jhGFuoUqVR2rGhF1BJwQyW7nes7RglshKAiPo/PqG3FhBEmWhIehZawgRRAb/j+e9/gz/7w65yvwLkeqzymtlIsA5VS7O8dsFqvB0kjShIEo0r4KGl85AJrIEuUhGZlrgtvdU5NjhLX3+XwBDGc4ULAVDXOSVy80YaYgsxINJUUzj5SW0Pf98IMJ5FpAoSYaF0AU+GigGIhBaw2jGxFnxTBZ9dPBqpDHjQdQ8THSG0tofN4En2IVEnGsBA9WAE9MBaTBJXXmVVUOTRLwstUBtZAxSCy8+iIfYvVUNuKNhedWhkIMY8RUoPEWWuFi3mmpREPN0bOyz44aNfCLoSAiuKz9c5hqorWOy7ncwgJqywqRPZGU4yqUHpFbUUWqZISxkNJdedSJAaX2VlDMIaoEF+uCpLKnDQYQzQyLzIZg7YW40NOQUb4k1LXKTK7FYeaL+nsZfIOX1JIjchQhXvJzJoSIH/LwOWGYQeqL3ViiJGUE8PLzFNSCTLLEtncREUl9a7RIkUMQfxSxhiUAVtX1E0jY24w+BRwQgmKPy+JhzBFL2oaLWRAyE2JQmpgZU0OB5KsgZjEsimfaVtzgahrtCpzkfMnzM2MFB1ZhVbAAq1Qub7ynmwLUTlKP+B1wGuFV5qkK9AVKToSEjQmLLAmoHCxNFViuVBGD3kBor4Q/30BO+R3y/svcmQXPD3Zx1nsTFm5F0PAO5dD1lyWf2YlWJ51qnTI9A+obMGR+6rwQRMrYW3pDSYoRiFwkSq6eoLiCkMSK4xSW+IpM3uSEmrQKo9fyfsNShIpG2uwWuVn/drK+szrZ6ZpK5RiYavKf4oePA4MWgpFl1rQDiBGymz6lC3FEmm7jfkfdNzPXZBreu24/TeDp0oNdGx5p0UznbL0bQukXEc15NHfvu8QI1iFUuItcC4xv1yxCp6182w2vRibfaBvO0xTM51OOdo/4HD/gIP9PeyoIYaeMq+toBCpyBxyOo0ygnDEFJBo+BzokhR952lGirq2AoiYCq17ghd63WhJjouZ4UwpDClUJjdYLgRciChtqaoxqOUOQ7N7gZ+7z2ngcLbIZr4PRucNuqT07FDMMpfkum9Ga5EgROc5PTnmgx/9gIvzCy6v5lxcXKFjomkaUhCpScgzU5QRyr33QaS11pLodtZhfuASBJ1TvdSOHxIoccnlYChImc+Rrr3r6Z0TmUpO2VNZd6AQpmpAs0nb+NuBzt8GwlyDsMr1TOSvD1RYNpBvmcPetfjgSDGxXq2GBMbBJDocQnmTKrHD5RnMO3QamEfxjtZ1TV1VGAT5EmY00rsNJgmim7IhWcy1FVVV473D54H1tqppQxZwGIvSI8aTESZ6SBaiJbnEKji8C/jeDddJ0lwb2lCzTJYfPHzK33zrfRaLnle+8jZHk4rQrTg+OePx8YpVXJLGGZVXBq2FOTMGtJHrpqM05mV9ydqv5B8MKiXatuXi/JzLixHt3Zr58gp3coZNMLOK9z78Llenj6m+8AVMDNQpYrMMIqWADnJgSp6NZtU6Rr2nIO8gBnCZeSSSiyKfgUQIPcYYTk5POTs9YdOtOX/2kOMnP+KVt95meuvlLKOR9WC1wtQNylhQAWs1rtuQfIdWMix7i/5vEcsQBSxrqop+0+K6nqqqwOb9mViIZ2IqEiCYLxagFLO9vWFvgpJgp2Xv7h1177B5GPtkNGJvusdoNqK2hqvFWnxHusaYis1iw/HjJ2il2N+bMZk0hM6wN7HsTTXjqeF8eYVLkYurFYv5FR1X3L494d79N5hvIi4YuugZNxXGWJq6os1shWpqlG2IZkw0NWZ8yM3X3uTdW+/yoz/5Ez589Aldo9iEFhcd2nmayjCazOiVJASaGBnvzbj1wqt8+OCHNLNb7N16Bd/MCPWYe299kbOTY76tAs8mI0Y01FdLmr6jOb1CtSe8du+At998jYObNxgf3mB5/Iz2ZM2Dp5/ynU8eEOyMid1Ddz299zRKS6GtGPwyMYlnqpmMqbQl+kjbChAQlMQNmM/blD+zWWevSMEt87lSVVWW6lkpeHLIhA+CzidEZhaDSOITcZDHhyDJviqzuYNXN4eI7R0c8OV3v8wf/OBPIEJU4uVMKWGsZTyb8uJL9/n+976P9wLGGqMpYQrl7abMiJR6Qmk1FHDaZNbKGowSu4DW6vnsp+HzK+lLB2ld8e7C81JGKQqBoRFLJHyMOSwiMBnPaEY1a9USg8rsmHwOa6wU95lx08Zim0Z+3gBcivdUaU1AfGFGa7SVhisZaSxKzaJ1GQMT81Ehqa9kVjBEAc5MTnve+pdklJJc93LPDXi/BfDy50yIVcFoYYGcc8yv5tiulSAkpfApkoLHJkXbdfkz57WQA+ciEvjiUkSNavEt5roqKZHsG1sRvd/WDyrv19qIhLeEmww82Zbl0oOiimvruhRupRHY1oZbpc21p+MaY7ZVbAGZKcqyx+fDNdJWJpeSZJDHKCnGxaKjd+6fMYbRaETTNHJ9QqkTuDY391p9VOpbRK6oc8NVwvJiURMN9YEa1rnYi3LwSC6A0nAklL8zlAPbR2X3e0s9nrkNpVUOeJffG1IGlVPAKoWysnaV0pg0opYikLpu5LrkMQ+FAZb5ceoaAyfM99YKYoLBZlJ7aNqyrDQU9mrn2oUQ8/oSVt1qg85gU4hbL3YMMb93sYrYaDHeoGMkJBDtTCwU5lArbYkh8fiBzF+sm1r85hkoaayhqSy1ltl1KW7P1M97/cw0bbC7CLZN2zYKV76+G1+q8jcVv1v+2xk9uO7RKgu8vJ73upG795SS6MuHAnm7gcco5uKk0xBbLkxfCc/Yyv9KwO8gvUJidbU1TGYTbt++zdHNQ5pJgzeGRduyXs15lDyXrqOezTgYNxzO9rh1dIODvX2srWVjU1IAyfXJVC8MmxMIUxO8oIIxpUwUKrQ1+F5kncZUBB+xjWjGo8ohAqbMl8tN8o7MURlLUJr5esOz80uuFi3rTuRtkpolT75OW8Zyaxe9dqN3NrbMOhk50EOWhgwNN9vNteiS224FqsL7nicPH/PokwdcXFzQblqU0tT1iFobDm7epKkb1usN5xdXuN5l75Y8qGg1UPK799BIl1aGPIgOvvj6YrnugvJYI4ebxB4XA7nHe0fyZPOsRpkkAxRzIVBiY1P2xW09dtvPPUgxnltfRRpbtt/hwEky+PXs7IJVWBJ0x2az4fjRJ6zXa0g54VLn5MCyKe/cj5h2PZyyE8v7lcQqkcR6gsuMrGHYtKxSpNhjTaJpDCG0WKOIXoaLA/QuQJ1wEUH/bUXw0IfEtB5B6uhbCSKorSH0LaFz2KjQHkyEzivWQbFoK07OWq4+OWM5b5nducPhK7e4XK8I7RozHcN6RRkrIIdi9gEk2SS3M4lyox6lKRFgQJ4NmUck4T5t30PVUN9+iTrd4MZ0yr07E+bv/3uevfcRKEvvIuhahqxnDynRo1LIt1hxdbWk2a+lKcsjOGpbEZFZU0WOI2eipGa6EOn6npXruTx9gt1U3N5TrC6PZG7cZgnEQbpmrMUnhBE2mmfHn/Lk5Ix2s6JuJuhUcEBJQAMpcpu64YUX7vLpkye0y7WkbeaCcPDw5mTSIm13rqdqGiR7qszSyY2eAoqXKIKLHhMV49mMW3fu0EwbmnHD8tMl1ozZLD2x1rz80iE/9+6XuVh+i1XXEWNHomd/v2E8MVTjQJ1gNj1g2UWC11z0Hn+xoNlrWa4sWo9RPqCSouscnY/0MRKNINCbTUe76bGpZrnp2PSeZKbcvHGE1oEP51c0o8it/RHT2V1oNPVkxgZNo2q0Gkl1b8d8/OiY8aRhfON1YjUi6YqkEgtneBQbLsb7PPzhA/zlkl965y2WF5csTh7zx598j9+qNV8YT2i/9R5XiwXLJ5c8eHbFYhOY3RxDMoyqBp0SznVYpYhdD8rggmez2RBC5OpizkP9mMV6RYIc1W4+uw//pNfOfjKsEKXF95GLtFKQuRCos+yxdY4+SBAIWbFAkplififoYxhurQEsfZ94dnJGioFKS6Ebk8xtHFkZkbPZ9HkNSRBDOUMMwuyZLQ4mESpa57lP8juNFX+aVlIsktieP3nFbj8/2eNqhrpjkOdnVqbsuc5JAybNUh6Ym3+IMZrKjjk6OmI0HjEebTBKE3rHcr7AdeLZilFGFAUiXmX2LIXB2y3YdI5cz8xTPiRE4q0/W9/o3MiTxG82m82wBxXzqyVX87kAaxkkEtYh5hAjBV5x/eROW8Ay/57KWiprcX0vDFeMXF2cY+qKF168x43DI66u5vhW7pXr+6ziyJaW3MCjwVSSVqyqimY0IqjcgiVF8NLkuBxxH0PxWZUFlGUx5fOrAoFtLRfXvfrXm7RtSEn5zNuG7/N88T/WK5//3xBGV2rSlChDqUuj4ZPMtS2NMIq8NgXglQCOnDyewVSR8GV/mLXXAjy0KbWJ1KZW21wHp6FOlQj+LZM2ALZI2qPWRdIngHhpOOSeAyoJy1uSTEuadr6G5MCd8v/y3ZXPpcXCYU3NeDpmf1/88LKOt/aZGLeWjd45eufyvMTin94G/hVJozSeiahLnkHc+Z64U4MzNMqqrHclJIoxWmYuhoSKiqhEQRWluMJ7sViVwevl55U1ViTGMuM3g91yx5EUeC3nYooCFmtRfhlthgHokkyUrSM/4fUz1bTtvkoRtcuExaxJHv7/T/Ezdr9vd9PdfW0RhDg0QSp39vKtKiOE1x/WGCUi9PmGsPxZDzV46bal497f2+Pll1/ihXt3GU1GmPGINgTWiwUj4KnVHIxHNNrIcF+Tf3OmrKUA1RJNKtU2RXcccwpeyglXQ8RsApQgkzFJfLvPvrF6mN22RZiuNQiFlEF+1nzV8uTkhBtPRpxdrli1PT7ELG/V+drFHWTus/fqmtY6bWuE/MuHRiUl4U51UjnVR2SdWifads0nDx/ywfvf5/z0BJOL60oblHPE4EUeQOTw8IDReMzJsxOWq/Xghxgi9dVO2qUqcJHasntBUBWtFEpn9BG1bW7yRu2TTLvvexmyqoMCFWQOldeQG0WdvYKl2Y75ENNKX5NnkO+JEEHX13O6duFknpcP8vuvFuew8kTdcXJyysmDx7SbDVWWveTwagbYMSU+7yxKSQbWDkM7lZj9V/OlGNdjwlSy6Ucnw1XrSuNdy2Y9ZzwboxGfVlAid0JLOInrA8v1hvXGUdczEoY+yJpZrTuUSzIgPAqia5XIgSyKvt2IvHPTsjy7JLSeSbPHqDlgtHeL/Rt3SLcSaQHxwQb8mq1J3WQQApQq8pXcrObEx5TEp1FEGCDN3mg8oqorVFVz+PIb3PrSLSrAjSIvbo7J7nWitngqvHcwruVnxDgMko8psml72s4R0SRlUMZKMRh6SnRyQWpB4sn73tE0I27MZtBdsNcE9mrNxCrwHaHboGIkBoVzAR0SPibGteH4+JjACV2IJO/BOlLaJlglSpCNGfbZsncVkKCs8zK/UKTPMe+fxeuava8pM+EpgBK0vB6NZNxD1xJ9pJmMeem1V1ls5gQit2/fRs+OuDE7oLKGp58+5YMfvs/x0yf0MfHee39NuzwDApNpTUeHMhGXHJ1OTA72OboxQvsVi01P2yfqkaXRFhVh07b4mNB1I1IrFF3vcC7Q4lmuV6wurzgOF8xGli/+nZ+j++Bj+i6iXcBORsT9hmCqrO1QoEdgFftHN/FJ0/ae1kV81PQh0S07RtMb/MZXf4M//KM/4/0Lkdmm2nASOh6uF1zMT3j0F9/EfPvbtMuWqdMcphHq4JDXv/iL7N24xfp0zv7+EbWpCV0vMu3O0dQCxPVtT1XVnJyccTlfUk9HTPb28r4v9/CnfQ0qE51lVWhcVPQu4ryniaI0SUH81OWc8TGyDp4DkCCJnJ0ekwSRDEYsdrYeKtBjHj58jNEI6680MUvKjK1xPvDg4UPm87mEUBRmS5VwKwYfnc5gh4wHKUCNGYK7ZNuU4mh3z9tyD0PJf02tszuCpqqq4c+7CdLFe6YUWCsNotFSrPd9L8oHBSkGLAmrNZWxdKHP5wckn/2zud7RRs41dB55kz1vEoKV8kzFhDKfDUhLpQhOwuhNZ1Oci8yXyxyiElB5Dp3Pw8QTCh+DyPZimfNVWJPtuVAUF6vFckjsLKoL1/fCFlVSCFdKiyyv71E+j3hP0pihNLYSv1NAZJECAgUaa0Q66D0uyDDLK9F6AAAgAElEQVT2YdzRENKnyjaV71umO3LTGq+jsqjdcmRYK59t2srn3D4P2//eZbnkC2JR2X3Cdtkw77d1AsieKazqNvFxsIPERLtp8/pKw3m1+17atpUU574fmrbijRd/dgb1c32ybWRSJiDkGcuXYKdC08PXldoCndLEpkyeFCZX54Zy6NeGZmV4ikodn0FQW1vGkwn1dJID5szw3pICXwiHlLhar1jMF0gqY8kzMMPn0cOzttNcizb2esUZt01euQdl3ix5TmQMSLANYpmptCEoiEZ8qiVUTakyPJ1twwoCymf7jaL4DEvjGwlDvydJkYK5lJFi+dLl9VPWwY97/ew0bao0SNuGKhUUYvjCDjIydPbl+SzJhtufd73x2D5sZaPd0szDm6BcPihxqHKT5NcVNHr7HnYRuN3fVxarVkkK1BTo+p4EHJ8cM3o4pnMbbG1RlZXNKkZ0ihwdHDBtKkbGYPJ7dtGRvKAJgsDroUNXkGfZyGdTGeErx3Tut3JiYS4EVZY2mEDbRjCV6JFjIkSPrWqR7LHzAESIATYhcra44uR8n9UmZCQWUjKQqvywF4ZGDRvj869dpk2kgSlDOuVW53sWpfAU2YuWcBcdOTs94YMPfsDZ6QlWaWptJJHKB7SKWG3oNhuWiwVN3TCZTHnxhTvM5wsWqyUulsQwlQ+P7foIKUFUmcJXQ2NcELn8CShDJ4chprtIW0rIHA4NeXadIQ/bTApDlrAkLUPYh+uRD5KSHilwT16DWxQ3JUHtyvvW2jCdTJlNb2LrQ2g8G7+Qz7tYoEMQKVU+LOWP2/U/oJEUxrQ8f6VpMQQS7WbD2ekZMSaa2YRkrIxbQKRatTWgAz/64AfYUc356RlWTgrxt9QNISmUtnS9zLU7PDiktmM04F1gbRuIDtcGYuvEwB4jKsqmFd0KwoqD8AJfeulN7v+Hv46pj5jevcXRvUMOb98gporl9x/hnj2E5WZgEOWQFoQNRU5Oy7HdOVVOrqve6Z0TVW3ZP9hnNB4Rk6epFdVI4ZYXbJYrXpho7k0NdnPByC148+4+588uMbFliyfKoRcyk+CjNGw+Qgw5Ec47UvC54MgpYxl0UVmWVFeWg8mIl2+P+MJrL3PrxVuctIbY96TgSUnGGNRKoW2Fj56Tp8d4YLq3l/cm2W2Lf1FnBLUyhsVyzkcffUSKEVPZ7G/c7qkalaUHadgHrd0iuYIDCbOiNPJ5lXiLra7oU0dMckg3swnP5qcslpe03ZLTh094f7Ph5fv3+b3/7J9zePsF/vK73+Xjjx4wvzondSvoerSqSEFRYViuelyAYCyzvQPGjFCqpqpGNFau68VizXK+JKZI7wPzdkMXIgc3XiAlYTB77zC+o+5l5IKLmjff+QXaVSTpMc4kMIYQEqOEyGOSFM73XrzHZFSTUmBUWWL0GKOwsynTSvP3bxzwSz/3Dp/8zj/gD37//6GuFA9+1DO/WnBxuaHvTzC6Ym/vBuOjFxkf3kFPp5j9MfVsxrg5QuZnjqFJtPM5Ciu+YQxNNcKYKvMYhhTAOZ+Tdn/CzKbPeQXvIXgxqSpDyP/0SVJlYyIPCRZvTWGnPbAKgS5GKoQZJ4rk1ufiazhhVcKoRIwVXatpu0hVW8a2wtoKNZqw3mwAGXr75PGTzCaGIWqelFARVC4cd7MQFFtmA13UNtvzEuRM1zEfOJkV3vIP18+q8u/df8owY3mPAa0dPnhCcHI+xECKcHFxyWjcgI6k0GOCQuWB0jaKd42UiEqsDj5G8A7vPKERQEVl9UdKKfvgRJoegoRdSTrrDsgdBcgln0m966l7R9e1orYgDTLJQMqNmjRDzouqYstQMTRLiW3TUkIzyv8TBiOxXq/Zm804OjwkhURjZR7e+dNnkkIMOb1Yfk5I0MeIDwnddkQtNYV2AhjKTSlx+PJnhXiEIjtA6879hyKTy6zKzr0d7isFn9026FJLbu/+80DpddaurImtVYksdQw+N/tK/OMo0NaijabJ7yGRBrBW5fC70AeW/ZIisd1tlnffR1GbGSsS7ZJmXVK2P1fltPv3cxp7uQ6iYpemqcwDHsD+AejYBdjT8PV8ofK1i8PXTEacC+CrtATpyYxbaT+CQmYKoVAEYZyj+COd62maBpNDcUiJGHItnRu3lNcggNXmGuBf0rhJ286irGGZ51uk21FsOfn+R61JZWZa2ibFJi0AtVJKZrcFaQDFZyhp2CnrrRVxW0NnXygknO8RuafIe0VpI7VI1mN/7j0rr5+Npk3J0iro9/bh2T4QxhSp0vXZYcMUddRgpJQAieua4/KQlZtb5H7btyAafSDPS7neiBXkLgU/fL38nLJx76IhKWbJhdZEekL0OB/xMXB8csK62/DJo4+xtcVUFlvV1NYyqi2VirSrFclaZtMJdVXRdh2X8yuiUuiqQdtMfedi2lqTBzcLZWxMYYuEOeidB7dT/KndoA9BLE0ZmeClMi4PfKF9hSHIZutuw9n5BevOoUsksdJ5FlCQBySniqWhidkeftee/oHZ+klLJDcP+XBdr+ccP33C/OoS3zuqupGwDCNpVNEHgu9ls4yJzWpJ9I47t+9y7+4dTs4tJxdnQxMbdiDX4R7mZvL5EQ9x56CWa1KCQ5LIyPIhKdHHOw9hbl6NMVtZQ/7sQpfndbjzuYtXohyQCjLjlpljlfKA+BzcozXT6R6zvdvEqmftK27fvs0PjAZE0itzydKAbjHclx3AYftY7Vx7+Z0+D+4skc5JyzZilUKnxMH+jLv375IMfPLoE04/fUrqO2b7B4zHIzG+a8Ooqbh/7z5nF0t837E4XzCbzghhAzESfaTvPanviaHJe5qseVspjApUyfHO26/x1q/8PbQ9gLoh1D1Ow6ILPLo6Ze264VrK7LKYm19pzIxBGCLS0BSBJF/KniJ692gM3jtOz0+4tzrgfLGiPrxDqxVPTy+5uGoZTW9CvcdV+5hX3nwXzyd0J8fZW5NkXaY88FcZfFL4uG2enc8IewYCIsIMloQxCU5JBN8zbUZMmxFHeyKhXpyKpJQEfe9IQNf1oBTGVCSlqWvxF0riXpDxC0Fm4PkUZQaj0oyaRgpOSuEr7zHmZzAGiYN3waErOzDWu89qiVOPQby8LgSC92gqkg9opfnB+x+AtfjYCWOrPFWtcOueZ2efMl9cUE8nrFdLVvMFe6MJe6MpoXekTlM3M2KCiTFoFWiaEToGjvZGOBfpU2R+eszYWpQTFmDdbmi9ow+BrvM0dYNBM7+6IpHo2gXKRN649zIHL77Fx6nhSXfOZt7y2+9+habuGSOAmtLkQzxx83DGpNZURjGuElbLGSBndcKYwMF+5PAr93n13j9hdXnB3jjgujV//TcdB9U+R9Mj7PSQ+ubLqIObeK2oZ3vEWmNSTXQ9StVs4oYUNREjAFwQKR9ojLLS9CD3OPost/3x2+u1V/Edke+f95FN7wlJMzs4ZHpwJGsqPxuA+GGzZ3jde/oEljQ0bX3X4aPHux7N3hYkUxBiRUwNdTPhxg1LlQSATNbSHOyjQGL+s7JEq+sMGKWQzieW0TmpTkvYRYpkZiaDT8qI1DuJ71PCUmS4sUIRfETlCeHDPq629UQ5C6qqGmwS5WwLIeRAsogPDomVh74P1E3NdDqm6yIpuAEQkya0Qoet34ucGxzzc+Rz7HmxQJR5kypmd5K6Pi82ZF948cEVCZfczzCoJ8QKUGiS7WcIIeb49J1aS5vhmb7esOQ6KyWqumI8HjOZzRg1DZWVcRBaQR88ve+JTp49pXWuT6Sp9iSiMbTOidRNKTQ+32upR0pzFXbA8nJQDTYKnUcGxSi4W96DQgiYGAVIz/WM1tJkFv/k86/PyCifqxm37M1uvVqIgRzOZU2xSwNsgfa0bSL98FkKiJrvxy6L9DnvbWAJ87k13I8CjQznePne7RlffqqoJrKkOEbZUwYQY9u87ZZsgrVv35t8lgJqZ4A5//rCPieSDLhWRUOislQ54lXIx7F44QgOFaBShsZWA5MYM7g/qhphFUM+J0PEeTeorYa6PI+2kvyJ7Tw+mX+n8nURcEPmDsu5lLQmaUuKAe8dru8H5dr2+uXaMQRp3BD5aswZACl73WLeR1K2lpQUdqsrtIpELUqXyhqx5ewaBz/n9bPRtOVXOewHlmJnpRdcoqy0otVOSlIFi5ApZuTg+aZrd8N9vpmTDYlciCdk1kahU822yy6/e5dd43m9NEPBVWjcgn7EJLcxkej7juUyYCuZ69LUI7ro8XXFdFTRVIa+azndLDk8PGQynWGsZblu2Wx6qlpgRY0kaJV5MNF7YvAUv1T5fD5IMk5p2IQ1EoZJbeGAARWJOSUMY0W+ZwxOazEOBLg4f0b4cAHjwGQ0AdeS+oj3EW09thb0IIbtfRAUfkvJD0UeDMXxj5PzXj8kAvP5Jaenz9hsNlmaI0hu8nkWWb7HWouXp2rqnMq1YVQZbhzsSWFiKtZ9K83qzgMpmSGfRVtDyEZaJZLZsvmTr3PRmxtj6IOMHijkmGyque/UJh8qGZ0qgyJlN89rZ7tOB1QL2WC0UTkUZng4MiqmJPijrolW0dgR4/FYIrrzfWUn3KJEA1+7zrmBUzsbvVKS6Km1DEo9PNhnNptimgY7GmcWTBJVZ/v73Ln7AskmFu2Ks+NjjLU0dS3FST5Ub9+8yZtf+BJPn53zvfd+yKP+KWcnz4j+kuSXVAlq28CowVYW56SIUZU0UGkEqwD/x9f/gDc/Oedrv/YPObx7h6QT67Tk6bMLwuYZoV8Ofq3iNylN8O7BlpAkTJ+2HpLScIcQuJif8+FHH1C7KbNpZFF9ndm9+9y8fYj3G548eMj502eEF17l6mrBxdWay8tLahJJaVrvMSnhEGN0nxJ9lCQtlZO+tDY4L3PWpOSXdC2VpVGu6wgRpvs3ONq/w/6epms9n3zyKR8fL7m6WpJi4uLikth1HN3XWN3Qxx5lasbTPUaTMWY+H5pU7z2aApSJvt95L8WqFrln7x3GGkZab4vnTG2oLPO9xjKXC0ouogQ/EmO4D+goh9zZxTl/+u/+HRB5++3XeOWlO8Rxw/zKs3c4w4eO9WrOjcM93nz9NabNhMPxmEWES3PFZa/xmwQqkDpPpRO39sbUbkFwnsvFBaldousRyll5drVidrDPTFva1rG4uOL3/++vs1iuOTk5xrsW5RL7doSJFT96cIy9+ypPnv2AR59e8E9/97e4c3ALHRMesAY5aENP7JZE7alp0X6NSnvZP5Lwbo61QNeyf2DY37/Ff/pf/DP+/j/6Hf74T7/Jv/n9P+LxRw+Z3biBnx3RNzOM1QQMXZAZQKMMwG1CoDHVAL5cO3tCwiBBL3U2uVv10zdt8vMQaFppkWFGDdUIbMNm07E3LSyLzw2ewvcBr2X2oCcRXcLNN4zGhqZuaPsWrYo8TNjrjMtzdtGyWvWczZ9hggOXCNqirWE6nVHXFSoGJqOGzrsdMFqRtDQ6ST1XtEZhrkrBnozMLa2UxkTxNKuoSEn2tiJdlv0x77cZtCtgVbnW5Wuyl5QGcuu5lj1zK/UKIeJ9wNiKveaQWHV0aU3qJa5dagQZCC7MoezDZSacMeJ98UkYrZA9Q1bL/KtAHOTL5TCo8jkUsr86ZpWBAqkZnBskfZU1OyOT4uCpTUM9kDIQK++rDHA2RbqZ70U5/4JzXJ6fDzJO1/W0m5a+3aBcT6U1k9FIfPre43PUfVQaFyNV1TCdzqiMxWpL7/o8l1PjXB41kc/CUk3EGBjSzQQh3dZxFLmg/J00OK7I6qI42F0iAobuNiTlzNieG9cb1lIrFEZJ5I47vimthnO9zGwTIaqcw35nYPtQS6pSDsj7LyxxkXsmZC7gcH7vNndDk1Vqia08UvxqcTjjSuZCxvivjTySQ78wavlSq1IvbveLXJlsr0fMz1MBWUr9ln/kwIIV7DSfKSomjOBF2KRpVEWFKAnQRvIVSCQlGbQhyjmZTESFJECNtYOaTmuRoKpUmvcMqiRRsJAl07vSWJPrOh9itjRke1RmHKWG0tsUzdJjhDQsvWHN5as4zDbMcx+NtsNcYa2VWGaskC8UBdiPef1sNG07TRTsNlQ7jFZZ7HmhFDq8oG56Z73GYZGrnV+Rdd12y4SU3yNFf1l8W0ZINmoHuahOZY3tPKwliOR5BGYoeMsmmf+e9z7vewHnRAJXW0uoOiqrmOgZo3rK/t4ETWKxnLNpN5i6GmJpa6uxphLNfkqCEPiAsjmFLpFRj4wi5SJKF3STnUJ96BS2G3TpqEKIMmRXSwM4bNQqMpnW1LVh43pWq5bgHTqaHPAtyV+RHTS03Je/BTXa3l+GRmiXqSp/f7FYML+aC9KZFEIWeBEGZTRSkRhXNZW1jJsGqzWu75if91SjEZNRg0ex2myTiYa1lxmytLMGYbg0wjbEQExqG7GbwDuXZ1sVRk0OSLTBZInY0IAqMaPGwvSVjUM+fG6SdJ6tFvNlyHKYLI+x2Vw/bJZJmBmjDWgvEswdD2fMm1BB0Bgu+bYp/knFXRnsPRo1OWo6yiaiFGRGVgZsGnQlPq0hEQrZ2MpF3J9N+JVf+rs0zYRv3P4Gf/WX3+P09IK+PWO9OmF5scCtEliFrayMtlAQlEJN9qE+YqOnfOuHH/Pgqqdv9rh/5yZp1LJIl1zM5xy/f0J7uaLZO8wM5XWmXtaJyo2HFulN9n4OzwLSrFaVZTSZ8Obbb/P6/SnHH/01rj/m4Qcdf/Pt/5e6N/u17Lrv/D5r2MM55451ayBZlEhKlCnLalse2h0g6XSSDtxDnpK3PARI8g91kOSp8xQgQNJIggCBo6STdrcBWy1bkmlRM0lxrrnueKY9rCkPv7X2OUVJRj+qL1As3qq6956z99pr/X7f33d4h+dPnhEfXtH+zm+zUI73P30P340cHhxJNAgKY2s8WjRv2WjB54maUtJEhSjmJWVSHWJiGEe6rqOuEm7wxK5h5TT91ZpPPn3AJiY2+oD1umOz2jBzDmaJ64tLwuyIetawHT10A9pWrDdbjDXMFyM3yxUgzZhWEvmgK50NDxR127LZdgzDIFOMFAlOsmZCTNmgIuVDTDGODqIgl4JeJpJO1I0lBofvReMyeKFiVVVL29asrtf89dPHpNTh3RaAB48f8vfe+k2+/KUvofiM1eUN8+Mjju7c4cmDR2xXoq+0dUVlEi/dPeH3f+s3+PlffYt1N3D+/IJbTctXXv8S3/yTP2erYHHnFvdff41tN/DwwRPZscYRqxUhOoxSuH7karPiblvRtDUpBO6fnTGfz6gXjTSgUaEN+BjQ3tN1a8ZhTWSE0OH7G/wwJznPtt8S+xu0H1Dei7tjPee897z37JrvP/iUT8cb/ElFP08YA73vmZsKmyKaiNGBxij5vDY0SeOUXFtdNEFBnl/vHRpFM7PimqkiomT9NzvylSx6SDCMnsEFRp+4vF7y4Sef8rXTlxiGEe/GPFnS0gikyBhSppfDxeNnPPjsQ4ZaIgG8GwQo2ttk1tuO73znB1zfrNl0K+rgMR503aCpCcOWtjrkYNYyOIdX4kpYnBx1BtmilmIv5A5OIVb4+25+KcqkSZcpl/cE54SVr/WLDpel2N77KOeDhHhLkHwp5vaL++I0GYKntjVtO2c+m2N1hbEaZ+Q6DcNAZeSMHqNYV9hIdrR8ccojTY1Q7KytqK2lQWii2277wuuTeiPrvqIU/NaIcYhJYqhWKnFp4Dx+dIxdL420DxjyFEy+McUIpXzEEKbzLCF0We2NaKuduBZXlSF6MckZhkHoa1pRNTXtkVDNXZB8v24ccNnERhsBHmdVLe6aKjE6h5g6SHEv7HXpDqYzM7dCsi5k0qG1zg7GNrvWSl6vKg1WliGEcs/zobjfoL3wbHzuzyeTD4/QcEPMWX/SWJcGbldfqKlfKXRSbRCGnMrrb4fgZhOOPfpmGRqkNBknFDOZaZ2SXji/fuE95GZpqnXJxldJcsUK2C+/79yNldrlcu5fj7xEckOZ0GRDlb3mmPyWyrgjV6Q5JiDrRcnDhahQMeFHR0ph0muWnOVYGrDSQGcWUmnMi5nJvt40FSdJ7wkxobBTEy+GIZFKS6i5MLSEB15yEven0IV2KddPrk2cgCPFjjKa11SSOkOuidTJ0/OqQFuFtnpyEf2bPn49mrayISXh2O6sdAuaIqiV/GH+mj3UQriueYPNF6l09rup3Q4lKRe//IxCE5j6CZUgZ6GFkBDaSS6M2TVtL/LH4ws/S8ItU87NEESyFPMxBZwbQYkjTq01nbIYA7VWmLMjwdi14uTkmOvlDav1DfOFWGnbqqKu6+kAKvQHY3LuyQQZyvv0QUKn91GA8jajvFWhDKR8rSkj2nL900507j2t0tw7O6I6tDy97lDBE90IscWgsVZR1XOS1gzjKAUcTC4+v2IRTAjMfvOU/0buHTKtHPuB5c0NXdcBEqQ6Ro/OZYnRGqMlA0WTM26cx6fCk/aM40g1W9DOF9R2oGP8hZdT1g+QzSrKfVUZpZKCqbxWbcT9R0A+0e+kkAhlWlUZjBW77F1oqRTslEZfl41xR3sQFKpMxrI+YRxwzmFsNVEvJgAso0JFhO3GUShw+f7vKB+7rk2pHdU35YOiHNIxMtESSjOTlBIgwRqhsRhLY2tUTISk8FFRUaF1Kwe10gTv6boti3GBaWo+/fADPvzoQypTM3YD68slB+2Cg1v3cGPL5fySiydLnj59zHLtGL0jWSlUO+exqiamCqoZ26R4+6fv8uizOV5fo48iTdPw19/+Nrdffg07P0RhmZwiERq00kLZMCahkkYZKYgKCkkSSlJd19w5vMXdl+5RNRVWgb9+TtdGVlbz+Nk5Tx6cs9gqRjWjPX2J9fBjHAavpEk7vHWbxvbcjEvc9QZPwsUg+pHoidHnPD9F0oqAJiRF1/UslytC39EenZKS4vr8OWtVMTeJ2gZ6ZbgaOpbLjmEzcvf0kPsv3Wc5ej67ueHO4j53Xn6VdlbjXI+yFSe3Tmnnc5r5gqGX7BqFIsaAUYaDwwPaWYsyhu12A8B8saCtKzq7IfpECjAmj48B5zxVg+xNIWShnBToEbAaXNfRDTBrWkFjlWU+X3Dn9m261Q2b645URQ6OTuhHxV/99Y949Qtf5uryguQHLp494OkHP+G126fU1kOrODo7JPgB5QbeeutVvvL6yzz7keVH776P10cou6DbOg7bFj/2eDdyc3PNdtuTvEeFQGuNaJH8SDcE1K1jvvSN3+H49fv87aMjPvzwEbdvt7z15Tv89JOfYI5m3D2d4YDBJ777Z3/On//Ft+mGnkUdWS2v+O/++/+Gl4/nzFG899HHXC6vqJJodl596RX+6D/5z/in//P/zkcXNyz7ntYqXjmZSzYVCxanZ1RxhD5y012x7HqSbYn9DVWraCpDb2JeK9L8J9msUCSh8gfLrBbq6K+s4H5xJ95NIVQOzY7gE/Rj4Ga1mop3pRJ+9NRNhUUzROiGkXXnWT98yr/443/Ov/yX/xfX9Pzhf/jv8Qff+L2pXPM5x3K9Hfnhj37Oat0xP2pZALfaAxaHBxweHaFsxeXVDd6NdJlyKC55Qtg12bm3THyizgwGrUkua2uywY61NrsWyjldV5ZmcUzIlHE/+mnyQkovgDylAPQ562nMJhDWaoKXSRykjOQHkVkEeZ6cc6xWW9abDT6KzjCOAQKoIGdPcejUSSaBLmuWlc4h3zFg6pqQwRSdw8ODzxRCdkyifSmIAnzwrNbiVrnZbEk+ZB2RmHyMKcg5PYyA6MHnzVycj2GvYJV6I4TAtuuos4thAXzdOGK0sJ8qYyGfVQqJp0hEMIZRKbY58kAZi7EVTWUxQaIGlFK40dFYcZsl14ZKI/dUJZSS159SQGmhIaZk0KFQKF9soktdOQ0DSh3HXuORW76pFFTqhVqkfHw+TkorjTGlkJcadj6fTT/3xaYNxpgkK3QPFPg8/XJyC/1c0xZCyFljO0fJHeC9q0n361Q1vQ4zaT/TXi2g0k4zv6+bB7L7NVNdMmHoe3X1NFmKog3XKj9vZjdQSZDpkfJ7VKVmkfMuxUREoyswVNSzBjMM+OjEyCwUSVMhJu7e58R+y9m/pb5S7LHuEp+rP7O5HSkzq/QUUaSImEDeS9lJoPJQR0Fu5PLPygYvcqH2qdu72yLeBYqoi3GTZDqjLCHkem13C3/lx69H0/ZCob4r7HXO3iiLq3CyhQIiXb2gbMVgX+cnshSvL1IjYfdw7m9u+wSx/SlZaeQKEi+NTPyl37e87h3v9kXrdx8cIcTcTOZCnYjR4IOC5KmtBIJrBH0Qt0lN09Qstxv6ccTaOU1VTYhCWUCli5dGzUs2T+b0xpIxkXYbQX63cmgQiUqRg5swSed8mSgaNdKuoA+B+bzli/fvEedwtbkmpbWgWcpMU8f5YkHTzllv16S8yYzD+Dc64+w/fDEjaeT3Nv19iqzXa5Y3S9HrlCZCjuFcIMqIWyuVRfBCDbFaSwAiiegcwTqqhFBCY8pI1+5n/cqPMsmT/0xf45wjn3EZgABbVVRVQ920VLNGEMkYMMrivJuKLG30hARPlI744rOQpk26UF69AERTg60o9rLaZEpBFH1TiFHs+ItiLq+dkn8yoZRl893baKdroXaNd4hBjBK1TIPkOqfs4GbklzLZadOCEZqPd05Q3Dpxc33FOz/8IZ9+/CmNqanMjNff+CovvXRASj210czahsqKm6GyhqptiAr67ZKZqzDjnONZi24adFWzHSNYi06aodOMrsV5TfAB9tCtHaJZjL739oCS7ZSR/MJjTyRm85ncj+RYhA3v/PQThuNjWgutNdgk1/P0+Ji2MuAcSoNLiaAMump56f4XuBgEpQ4koopT5JDcCosP2bwDxfOLS8bRcdhIVpEYqCS8qhgxhOjYxsT5VcdqteWoXdBUNW4cIYGF5xgAACAASURBVMDl8wvM7IDXvvwljg4XPHn8gPnigNdee11QQ2W5uVnhRo+KsL2+4GA+5zfeeoumafjhT35MTJFZO2c2axm7DuccVtfUTSVeWW7MLp9CVXNONG+ix3OgRB8Y5eQkxYCKMh0qDmRW12gqBh/ptpqRkQefPeXjjx/Q1A3B9xwfzbhZn3Nz9YhNP1Af3+fo5JjzJw8xjWfTX/Heez/i5uoCpQ0jmjFWpFTxO1//W3x8+ZRnvdAgnY+EQVx7h67j8aNHdH3HYCrOPZxfXaPmDzmqK774kuUkwYI1i/kZ7WLBR89veP/BQ44XiT/91rd5/6NPGEPAOkc39qwePeDqkefQWn724Secdx4doR09Jyevs1yD1cccthbiCj3c4J6fs12t6ZqRWkto+fX1OR9ePyatN/SHt+hvnpPMAYSKysieV4wjrKmpKsOsbRldh0JMIArz4N/0I0UR/YPGZFZHUhZTVbTtXMxxomgUYwgkb0g+4KOjHwbWm47x/JL3fvxTnj96xmN3zRvPn+cibUcMCymy2sDB4SneC0DS1BWz2jKvDLX0/aQwsrq5JFKsy40AqwnRfRXgSUsBKFvSrtHY6bflJxdtT1VbXn39NW7dvcPgPJfnVwzLjvVqPdUl+2Yj+8h90bSlrO0pNP2Q/LRvGyPh3jHGDNRq0fYFofph9I4mn7IDXcwyB3aOjnLs5Ay1EESzGiIVct1TofPlMz5m4FIrhfee5c2SbrsWCmQo+6Fo4EMM+NHjhpFusyF6R20r1CnMZwdTrRODuE1ryJRLT6yqSb9ls5ZoGAaSrajnNlMsw2SJHlIkkBjcQFgt2YwDWtvcZIkus87OnAogG7qEIGHIKSFxD8Lj251ZBThVmmRebH5gT9KiJIw9qqzZzGYSU92mhHoHv9xwZHd+vNhcyVrbNUnW2injrmTKlq+NJKIXgyczNZK7ehQlDYyKEWIgRRmLTRS7TM0tQ4jPv9e9p/gXAfAy+cmjtkkTNw0bPqcXpdQA8n87xcCLe8kL9XUJNf+Vr0qMWZzM92TN5u4mZSMOXVnmhwLYxBTlmcl73Ogcfd/hXY6AyCCLDDFGTPCTx0BpyMukbGdAJOZvSgmN3NhEawyVIuvvoNaWZMzUQxUDksLCmhqNlKenMe3qK7H3mq6ZTAjFvCShiNFhdDbp0kJ59iFkR8x/C9wjy+0vG+luLFsWg2BzoKXA/RwikZJw14uzY8kl+/wi300LsqsTcaKBKDXNbYGCKgiSXAxdYi5aS0NY7lfKk5CUlDSTIYkLXNy56GgFWAmQtgX1UGC1wmpxPjyczzk+PsRojXdCb4kucnB4QDOf40LA6JbgNRMlQAm1TpmcoyR9JaRESAKmpSSufpIVlSc8Sq6TivIAu4xeiu9YxASF73sYe6GeeAdJg2lI9pDKatYMuLqmmR/gukSVanS0JD0SoqCEbdtmke9OFFzuuNwuBcVFkV0jLK+tbFR5ekVEpUi/HenWPbgkAalWHh4dFVmajwFqY6m0FM51bTiczfBupNcDDo2uLVVlMLXBVC9SXEn5PpfJjNrd66jA52ssNMWAT17y7VKiiYaTgyPqasZ8dkQzO2I2n1M1NaP3bLZbbpZLbpbX08Q16axJ2D/8Y4AQpo2MJHQCrQzGiulL9BYixNQjce4KrStxqcKilCWqlJvZJCh1Kqju3hOYp7Oylxd6UJlyBtnMjYz5J4qnQgATbQhR4aPQ3mxVYawUK0QBI6IXGqjXBq811mow0M4alJLg6j6MPPjkA8bhmOA3tE3D0Hl83GKSxVQ188NjDk5OODyxqLHDB8fhyQnrFLi+eo4+OCBFT/9s4Pz8hqAXdGPi0ItdMSVsnnzoJwmGnWijQQAD771ke8kFJ8TE00fPuHP7mi/dWxCipjmccat6hYfrLZuLa/woDXWIKw6qLUYgbpLrefb8Kd125Nai5pUv3OX+2V3ctsNFTUoV2kQqZam0xeNJo8QneGB1eYGOgVtHR7SmputHaUq9R1ei+fB58mB0hY+JjYMhWlyMzNs5uj5E65qD2nL7sGFzU3Hv5ftcrj2tuUVQN2g3UhO40QZtE9vVihhHFI6TgzmVbdiu1lw+f44bR46OjyVs2ViSndF5iSdR0UEY0FiMbXAWlIWq0tA0NEqmBMELA2F0PcvlJYu6IcWItRVNveD68ikHs4GnT55w//4rvPLyPV79/d/m6Wef8r1v/RkXl1c8Hx6yTJqgE69/42s83q5ZPn7M4OB6cYRqXuX3/vDv4x8+4snVJzwb55h2wZ3ZhtX6EWaWOFj09NtPefjuD3h2vWJEc351w7f+9E956eVbzO+/ylu/9wdcP3jAt7/1LX7393+fN94a+NG7H/PNP/lXfPXLd/AEXvvSb/Dws5+QLARqWu2praJuJJtoWC4FlEvwR3/0j/mP/9E/5KMnV/zpn/8lnVmhUiK4gYHEdX/D/Pyc49Zy8+wRcXPB+vqap+uNRDmMFSEFxhhwG8dmtYKQaOuWYeh4+eyA6wjESBNdrm8nFKzswBNiTWZiCKFFo7TN/yjilcfj8QGa6ojDxcGUt4VypDTik6Ibeno/MnaB9bKjaVrSrGUTxKK/rSpsU6FwYgvvNtxcXfHxu085O2tRlQKXGJPjfH3BsF2TlGHTD0RjATFsSLqATxn6LAWshp27oIBmvui7lUKFgAoGa5JMsb1HaRicZxgctq5pZ3M2N1tcjKTMdNjRsAy2Kn6rKpv5+ImmmQoVMSoJyi61BzkOIRuUFE1oLBtsDo8WmwRF0qLJsXVLimI8hLJEjBw8PuIyTTRYcX1GZRfHXMhHLbqc5DUxjMIyGYCoMaoW8zAjVK0UQwbXpJCPRNlPEjmbMbs2FiDXyPsM3uPZ6dRVPjdDyGHuMWK0QhWWUhKnwwRTzueYm9cyvdNGM+TzpW0ahminekAbO02ZbN1AKs2HaMedj1iTm2EK+yWD2Qpiytlwe9TQkidmJtpfJGppPCK7e7g/ufrlYH2RJwQBwYOcaYXhovIkKZFAa+qmFYZQ/nqtskay1K0JqVHFxWLSI5Ya2Xk3TTeVUZJtSKH1s/tVatS0kwTFmIQJke+XykDGr+qzprgf9oce7H2epp8hMW5Cj05JaphkmO5H6XVK36OUkjNDfhDJkJvogLaa+cGClMSIhyRSoM1mQ/RRWEwpZhmjIhmFNTkOQ+3q/dJLTPcpP3+VzaZjGoyVWtxMdU+WtxhhEqErDFYkFIAjMiaZvFmlsUoxpDyhF3t39pvmpESLGZKYbakcc6GtwqRAVA5UyXn7m0dtvxZNGzCNBHcPQymiFSRDiqKfEf4x04QsxETQiaRidj8kq5xjRkz2vlfajXFFYSKGDkqrPPoUrvukA0waRZQJW/KQc83S3s2IQYqlIDtc5qtaQfC1zHUEXYy57RR0y6pMuUN4v2QkgwTWiG5Ia4QOkUS7VDUN3osDZMphkknvWYYPTnKfvHCKUVroFoCJCZ1izkxKpNzNCz1STVzyoMTIo1IRE/wUjKk1IrTUC6K9IyhjPdIfLBjVlpRMjhzoxS1NgXPjbpOOwhlORJKq5SBL5NG7ycWfkcaSMtVEMntIpOhQCIVj7AbGjcNgBSnEo0hUVoMLNCjmRrFoapIXMXDoHFUz4/TwiDE4Vm4kzhp8rVGNwSEW31MDQz6+YjYYySHLSmvQlpiU6HKCQ1tFsvKl3cWSqk+8dvcVdPJY26J0hTaWqmo4PDjkztkZXd9zeX3O4yeP2G6v8XGQVjNlKoNRFANQEL2Q0TZHBiQSHmJDdAXN3KLwaFWjymNtFFpbqlkD1uAGh84U05h3zbLJS24M+QdqUspuVCoLLFSmBssRh3AQEugoTqNaMkySKoe3z5zwEZ2KbssSbEOfN6dmMecLb7zGJ598ymbsCNGz3t5wsK2YtZrN9oYYPc0sUjUa5xPbtWPoR46OZnTBMc4OGUxLGDusHjBNjbYKt4n4VU/nBg7TEQSFJqFVxOhS7CXQdprkq1xoKaMIKVJnnrzPwbkpaCI12h4SLfS373KkX+H9t99heT2wGRtGqwh2g3LPSMYSo8UNjjBcMo+J6BcsE5hKE6sKlyzohmQcaVCkIZBcRx2lpA5GE1RChYDvOvxM3KksDWPshDJatbioaKqWk8NaAtCbht4uqFvD8QnY5phuPRCqgXq8Znv9hMubFTec8XAVOG1fZq62HNnA8cEhm/6Sx48+4+TWARWe05NjrG159PAxN1c3VJXBuS3KRgZvSaYlaMVmu8FUilqRTTQS0VQYCyQJWcbWBGAIHdu+xzSgzYhVc9p5xeChqSxtU5NSZLVccTO/guNDltuB+dkr3OgDHlx9wrOrJwyj4Yvf+E1mBwv+37d/wBENtVZsteXWwR0erxQ//It36ZLj+uCY427g91455I/+7u/SnGg2PvD8gwecxEs2V+ekeoEhMrt9xptvvsnBq2/Szu5x9Mqc46NTlLXEbo1fXdBdPOP6cKRpFcdHd2jrGe08ocwB3cUnuHrkKg7MrKJJnpvuhhpFtzwn6BXL60fgtvRhSU2AWYVvGg4X9zm7c5/bC431W25+8hjdOZ5uLqjbOfZqhW1nLPuBzTCinEOrbDMeRkzcyP6VEmm7EsFXtpX+HIYufxaYzkiSRlMBhqTBKYdqEkpXLNrbHM1m2GTAB4zqiGkg6opuHOj7EQYgVvS2Yvbafbbf0aQx8s53v8c/+1//Gf/wH/0D7t9/iQ9/+iPe/+nP+f7b7/L0+cdEG6mpuLm+po4hT6ktN12PnR+AqlA6Ze1ZQGVnRplQSbSKUlBVOxmEraSmKOZQJM3ogtDUrYB2xWZf5RN6CJEhm5iEbEVuqoammWNthXMBNzpiSGhTY2xkHDYFfhSgEKG1KyWjwpiLS4l7KSYmGYxWkoOntaWqWrS1Od/QMY6JEC0Ri9aV/MyoqFWV55VZbxP6bMpQzkyNsSZrIQXMVSS0McQo1yCOWQ+UgoDcthTk2dwsKVzIACVipa6iSC2MMoTkcaPDWivndZZQlG7BOU80Zqf7NpVM3ILDRGiszkBrwhqFV+LyKSaoSiyYos1gg5i5hCSa5kobfMi5kDE30sK3nqhn0hTk87yAERLrTUzCKrA50NokldX4ChDzmJRt2felNeQj8oXpXp6k+eBQeFmbxhKCSDGqqiahGIOf5A7JJ1wcJSA709J1CWXPD2jIk0CV6yet5Z4qrXMUgDTqKkcT2Vwvp5Rr4CRrTWdX2RCK+6F4NUzmIDHsuqmYU8amyWSRDe3YZRMDbZrYlRolZjAluwxroVVKrSeHa3HUzFUGhkwTRUDf4lwrDtcC/GttMJkdE5MnOTH0qhS7gUAGV9uqmd5XCAHvwiR/EkZeIuQ6MoVxokXqjMonZPrvU8BhCFYzxoSLWoyWkidpTwS2SYYTOkZszMZ+Kpvf5EFDWTARJB/U5+bbKJxLmQota0ErRWPM1Jj/qo9fn6YtQ3771MNio1roiWKoUBANOWy0UtlTOVPS9lz45Gbmv8sdtjaZgoDJY4VCAymN4G48XDQtRXu0PyWCF9ELGcHanPe0cxKM2V1Ja0302Up5cqPMtEQU2F3SPVpnlz6d+ex53BsdwSeC16RsMlImkcJk0ZTQw5Tyop2cJNPeIirNgFxro/UuuyLJxm6NxRpFZWcy+RsHutERQqJpWr7x27/F9bHjgz/7Hs/jY5L36JCwuqaqKpqmxuhqsqyNIeYHcJ9+ml9WRtKKuYZSJfhYCtdMKCCFgB8Hxq4jOC+HpFJyOClFpaSdOZzPOVkcMG9nVMqC98ShY97W3Ll9G91YztdrtkmxsQa90aSMzr1Y0OyQpWk9gOQLlXs6OnxyDH3Hz997j48++IjLi2dE70hKENxh8LjRk1BUVY2xgnLevXcHYxLL63P80O/WVoHI8vreTbxUHjqmjC5rikGONPqaGNVEF4havols2rnRL81aQf2y21ZxpNIZohPE+kW+eJqWUMHl0nRtktrh9tNzkekCot0SBGoYnRhUuEBtLZXRWGNQRvQng3PcLJcsFmcYKuaZkqe15fn5jQR6G8swOtHYhEQVJAbjzbfe4OVXznBu5IOff8xy9Qm9C7iMZBpjchBv2iGaCoyx+QD0BO+mKTFJEM3Ves16s0XRQnHh1DX26A4Vt5kfnrHpRpSZkZInIUYxIYi+T0QEkRgcIEjiptvC8RwbrWgGk5iADP3AGAZIIU9HFCkZNBXddkQlg23naJ2orcqvX9MoS22FflrPZiyOjhgjLG+WrDZb7t4OVBhGF1htHTerkZ/8+F0OXnqL08Ut5hnoWC/POb5zxMBI5yPzqNBVS9AV4xC4vN4SosH1iXjTca89pK3nuFThh5WYxxjP3dMj2rmg5DoplKlISVD9GJQ0cGNku95S1y1G1bQWDo7O0P0ge7W2uDHw9Okz6loMbTabnn7wfPjRA9bbAUtkTFCHwMHoWG2XxHrBy4tD7pgGo+AHP/g+5xfnVIcH6KgwYeTOrOa3X7/P//2df8m7nz7CdjWHiwUGz2Z7jZ9VzI5rXn/zVY7uv8rFGGlmBxy+1PLhuz9A/+wdfvTuu1ydP2HcPOAL9++yXhn+o//g3+ett77I1brhL//Fx1xfXaNMQjtH1Y3MR02dAt/703+FbWp++K+/Qwwa4zTGC7sgGoXVcHSwgLDCDT2VVhwu5txsepz3uM2W5DxuGLAuYjL1K8RIU2t86PFY6rYhmKwZ0cVQgAkknT7U7s8TTGi0Qp7Pwggxbcvp2QnWapzrcWNPIuG8NAyTVpzEzz/8iE8ePZJYiwTvv/8h/+Sf/Ld885vf5B/+g79Pt9nw5OFT3v/pRzx7ei76x5iYzxbMdWJeWQ6PT3i5nbH1iav1ht4Lvc0ETwiVnAtp5wC3C9EWGt3nneRS1LviVxm0AZcjU+bzBatVl4vFTGnKU+GqmVHPDgQ8I5IwqCRuuTE63LirXYpbXvQhTwXFeCJR6grJohJgWV670CAVtpImSNynHSrVQJy0RJKliuCaSkw5RCOkUcqSsnwDpWSCpxUqMzhSkiwp5x3Bx4ltJPlqUhKV5qBcv7YpzI+ipfLZyXJXJ/kcKUHaUQqnc6GA8InsJC1GVSFEnPPYDPZZY9EKBucmLCFFocUNg2OMQfZRI3EOLiS6wYkWKEaJ73CekMVSIUFIMoGzUda3km8qy1yLZRpRzr3JWj/bv09OpHsVQakHSsNSatHy+OjcxCSy+VJmOclZbTHKTti86B/zkCKfv5+nJSrK695pzVRuKq2WSJpEFBCgjNZK6bI3ydp/7fJa0tSg7Nc2n5f9FGAhsdOx7X+v/d8Le6uYnIlsaTeF/fyGI9RFjTFWzE9imGqcKtcDypHNZ3LsEmKgY40mWE1VzalTKyZfMeJimGIGdno+YdjIeyfXlsWTQEAfWW8610VS67joGYMnafY0rrt5ZD6a5T3nd/did7D3XpHeYz86xHtPVAEdDckodJToEacVqTTvv+Lj16dpA9hbHLspW/m7rPEJO9dHKbpkfFmoZSrlnBb2S/D0wq/SmZcbuX+h9xduEXDubz7GmOmGl9dQ+LLAroHLuU7JWlJdy4bHOD2A++PoMsk5PDjk6OiIpmmk2542jukfTq8x5c/3XTW12QVuo8h6trATVU8Tx6wo2GuerDXSJKfCpZbmrTgudSHnaJCoKsXx0YLqtqZpbDatcISINHpVTWUrtDKZqbnTBUguxy9BEsoULuuzyu0XLnuUw4lECI6uE7dKpQoP29BWYucdUmJW18zamrYytFVFo1vGTgl1QSusrWhnMyKKzTASfJhoEIV6K/epIG9Me07ZyCUuQTb54BzPnj7mwWePub64Zhy21FUihZ5xDIxDyHQGMSdR2Y0o+J6zs1u8fO8um9VSXCC13O+YxIlNlUYIpo1Zk6MFjMFPVJAdumxMNiKJUQ6ulHZgBrnlymumvLEJL9tbx7tnIj+PiqnprqylstVEHyh6w0DCe8c4DhATg3Nsho7ziwtcH7k6v8FtN7zxxhe4dfeUfr2iqSvmizlqYWWKGgJXyyXbzQ3e97RNy52790gRoSeECDqibcXgInG1YTavOT494/HTcx48+JS+9/TOk5TB+UypyYG84zgSYqKu25yrlwuYvMkrhMqrjEwgXeZIV3WNzvq6qDSz4zN09RJvfe3rvPPdf8VmG4l+RCEukSkJgq5sjfKOEBNj8My1wjY1qhLDk5QSgxsnrUUgZ/gJhESKBucV/Zg4Pp1hakvvRgwpF6KR48MF2rasuqdoBb0LbLuebrthGHrSsKRLhg/Ob7BGs+009XbkC3XF62+8wl/95XeZ3bqFXcy5Wm/wIdJ7eHZxQ2Utl8trzk5f4vjsZbwPvPLKS9y/e8bThw+5WG3xeGbVgtD0xHHFEBJmdDg8UdWkaOlGiH1EGYeKjuQ8rRZ67zgEwkHFwfFdTu8YHj/6DO8iV5sNlxc/ICVo2wV37t3n5PiI2XyBOlecnBzhLlecWMM//oO/w9sPP+HR83Nmoea1xQkXoaeuLcZ6hmFLGw64d2i5Pddcffwx3/vL7/HBkyXH6pRTe4wbOkKs6G3LpjdsB03bJwwVTTPDuzVXy8T3//jP+ODpNc+fnWNONbd+802WK8vf/sY3+NpvvckHn675zr9IrJcrkgo0CfQYmAWD32xYXV6yOt8yrxoqC5tk8GNPSDCzCw4WM+a1wi87hu2al+/eJmD44JMHXK22KGugrkluQA2eeaupmpZUVRwczPjia6d85aU3uP3KfV5988sEJZNzs7eXlaNR7T/oKZ+be3E7rTFUWqNSIASHURHvPCmMpCDTtda0YpPthVFxdXnFH//xH/PRz34mU4cIStdcX655+3s/pN/2vPnlN3C9Z7Pp6LYDdd2CH6lrQ60TMXhG56kbmRDMZjNsBE+kKeg9KdMh1c6QIZ/PJa+paMtjjJnBYjIgFfAhcf78gnFwHB5es1zK8wIxN2cC6PWjpxtXUuxrDTHQaMTVNimgRWf82FiN95p2NpPQaB+IThomeb07+YYWqB9jrDS+YcDFUSYsJpKiZMiJUVjAaC0T1bxfBQI+qAw6mrxHylRjdJ6oZZKVgs8a3QjJTNMMnRtdmYEnlNFSCyQBg6pKSsS+7+n7fF2MyYDynq4/nxc2F6YhpZx7x5TL5UuRLNjjzpwMj/Weuq44ms1w3tENXc62DQyjw6WEthXJypk7hJ5uCJQMt1CiU+ilCXIjSst+DhncVXItctcl9yJlil2SulBOMFHIl7nRi/6Hu/VVgM/iiiCntUYhZjd2b38vzoUUhlTa6cnKue0zeFAatJADm0sTUv5dCrtpltUGneMaUm7MS85nOcInU448hdTZpKe8q3350P5EsWwQOexh+h7l//djL8rXa4UAlJPMPg8U9mqN0oyiytRzL/svv3aZRjpUlIlvyrVrSkIhjTHKe8jaTfHfi7sGan8Qo0rzuEcRDRGd5P6a/C5TYnICjzFLdHK9twvklmcmZJC1mMWhldRY+Zrv+yZO1zmR9agj4zCKoi9TOl1liUPNuDXoiSH4yz9+LZq2aYrwN7zYlBeArqTwimURa8SVpazQgvQoNT0wkBs8zVSgKZWyOJC8aPLPIU0oiDgFygIJIWR++ucL2hcbkLJArLWE6MRitmlkUXuZ6omzjs7UV5kQLWYzDg8PWSzmVFUFZCpjhmamZjHsIVcpTTlwEvK5c7kJQUbu5eEMMU05LsWxqPDMhRonSFcRPaeYD+aYiFYyZkpTZ3TCWmnerNWEuGugVL5X3gVS9EQlBh1hb9qxf/12C0C2PfL2R7Y/t0KgF0SwLHg3TpMhUsJqTVtVVFrcKlP0+HHAJTncvYJx7PCjY7leYv1Il8DZSg6E0f8CQjhNfctGk6msaq+wUSRpnLSi26xYXV+J+1y3oduOWBUkJDYqdL5XYZTjcT5rCX0PwXH75IjrkxM2vZjVpJRAS+MvoZmykWgyNXhqguPkujRmNymjBb2qrMVhSE7hMkUA9jaWvInuAezT+i3rch9Nmyaj+fOSz5O0znpS0QtUVoCDECNhdAzjyNZ7nl5csjxfUgWF22y5d3pEfeeUk/kBL929y7OLa9A19dmtfLCOjK7P1/KG2ewQYxtiTGJd3dQMTige4+BYzFvGfuSjDz/ihz98h5dfuo9CELxxHLhZLqcDJoRA1w94H1ksJvePbAedXmjiUBpjK2zT4JNQeI010ojNTnHOYNsZ1WyO6baiL0mGEIXyW89mVLOGuF3Te8/WOY6NYT6fE2pDs5hhX3mFB+seFwNjCvR+pBs6UghUymZt2JzF0SkHx6ckAr3vMURsRggPZjNcHAnRo2JgPYhQOyZNNZtxdihuZp896/B2QdSHzOoFdRh48O473DluMbVmvRywtQYsSRnW3YaToxPmixlf/vJXOT++wjnHf/1f/ZfcOzngf/yn/wNf//rL/OAnP+Xh06eoZBmDZdMlOr8hKoWdQTKWMAbidkswoMNISI7gPX3XUUXLpnPcrDsWi5aTW3dYrzeMmw03Vzc8fXrJF1+LDEPk3r3bfP3r32B5c81vffU+f/7ttzk0mjfPznjlYMbl0wE7bDgNI4+eP+DW6atoNUpDlAb0uGJ5sWX+2ivcXA8MvaJHca43pOjAK5SqSXpG10f6bcfge3RMGK3Zdolv/eU7nG8c9w4bGm0Y1lt836HjS9w5PeWTz24Yhy4XpYmqqpi1CmrLyd0v8tpXXucrv/EbvP3D73O5vIYBVGXRlWF2cMjpwZzu5oKLB59SETN4CFVlGceBeXPInTu3wY14teILr9zl9OyUw9u3mS1a/vP/9I+4iC1Pr685PD2dYhnM3rO+X1DsITpEEr1zoAxx8Dx/9JgnTx5x/nhJd7XkzTdn3Cxv+Pl77/L86ROCOcZ72Zt1lor+5bf/Ne+99y4mT5iishm0q3CD40c/eJfNSp0gBgAAIABJREFUquPLX/oK682A9/JcR0a8cyidsEAVI8oFNl1HN3rGGKU4NkL/j4UKmMHKUvx6X4xD1NS0ydvTBDzjGKfnPITA+fmlTHu0oW1nqAzGuRDxOqLrGcrWAvAp0akGFSAYYjYWsUajshxA6UwzS3nikmmQSoHWOei3UKFSphIHOfO10igr57LKZ2KIDjf2YO2ko1Na3KJjUoRUHIOjFPAxEVIgavK0bTcP0C+ctarU1vJ5kjMtKVnrBXD1mYZmrckBwZq6rl4AtEVflZs27+XMzw1SJAecg2S9GiM04xDxTii12hoWbQujYrPdoog4JxO5qGUiGYNoflJK9H6cwOtcoouGKSmsItMF0+Qsao2emBZS9zD9v2jYinlFAfbL+9qB5BNbZUJxS2+iJN4na6WVloZXaz2BhPmbZalF1l6lbPCS69kCHJePFOPnAs3zJBBpvqpaoh/0XhNWKIpl0mWyuYVSenI4tUZPNY4tDqB7jeGL9VAU+vDnGrbP1wjl2sZsohLLhqKyDGN6U8L+0UbnabEWynPwBDcyDj3ej4xezK2wDaZqci0Up/rfy3g7049Fp+czgCDTsSBDmRjx+ecW9pzSOY5A/5IaXpf6PFJM2kqG8/4wSSumSd3ebGnXxpSpX/m+5X+iNN4xepSPBKtFR6oTRM3nXs4vfPxaNG2wmx4V446pU0YoADpTBnfDt90EKuYuuNiyTyYJ7JAArcUS34uDxIR6KJXD9PL30nsTi9KE7H98Xoj6+YVcDg+lds5R+4gKKQlNRe0y5ipjmTUtla0m1EibPGpXeYMvWUgpP8RqN3GkNBepoDkpN2oxZ+gIambykS1WwnKtQ5ImQMP0fsuvHUppRFCbR95tXTFrKpZjxzB2kB8kpeUgqqxQ2MZiYV4QonLnpmu417ADSlJFy5UGJAgy+v1paKGFqqlp00ClNZXWeKWI3uFGEXhuhp6mrkUvZwKbvocQ2PjIYAxbl7OkQpQpRzalKHa05ABE2Rt21rrFCjr4keAG/DiwWS3ZrDeSReV6jA7UtpZmK29eGgHb3NAzqw2r6ytu377Fl1//Ij/5+SeMyu/QuYwMRURfEdUuEnQCEkzRamSXsXzA+hCmOALhdYNOO3qxUBrTZF4zIXBK5YnsXuOWN6Vi/z/Z38ZIyn6++0AA5VdGkz0whCBhlS5hk6LVls31DecXz/HjCLkgyLcYayxN3ZAOj+k2G/p+QKlIt+3ou45m1sjkNHqsUrz68j3u3T7jna6TrBWdaK3GJlAxsl6tpjVtclaPUlknmw9RASkiIYyMbsBGMfeJCIWqH0ac90JDrhpO7n2RtNRsVtfcf/0NNgc3XD78FDDMF7d44/XXWQ4eQs96HOjWW5rG443GJNj2HVXbcHJ8zLO6IimwTYPyRvIhU8IqeW6Pjs6oZnMeP7/g4LCSWA2lST4yaxtIgfV6KTSrELEaxiAUw3Ze47slh7dPOLvzMo/PO5KZicvJsCWOHV1QrPuObnQsMFRGvodtWghS0HSbLccHR3zw85/z8LMHPPukx2jP3/u7/w7trOH/+D+/ydHhCdXtO8wP5vR9x9XNNSmKbmgMjug9PiVMHBHzhJFuHbFBYdtDVpuOpMCoRD96bDMH1dCPiW4IXFyusPUFP3vvA1Bi+KOBjz7+kB+98zbV9YpTFzmdW5oKTL8irC6w2qGAKvWE4Zqmsczbinsv3+bZ5prtKnDrzgFqsyL5Nc5ZHjz6gJ9/dIqLaz78+BOUtYw+8sGnD1lvzunWI2l2QsUZ73zvr7nuW77+5h3wEaMChkRb1XTjQF1VvPrmKyyvN9TW8mR5wds//g71wuLXnq6/IinHdgh0z5+wWm8wYcStL7HJkVJiOwZIhvliwcHBAQeHh1xYTVCRy+UVq2HDHRKzTcPhwZx//id/wTf/vz/hlP+CL331D9D2RdrNfkGxYxLkP9OiW9Vac+tYJpAPPnjEp599zGzxhxwcn/H4/JqPHh9ytRX6Hwkqo7m6fM5fv/1dNqsVFs92s2UMiehTPtMMo3M8e3ZNUz/l8mrFrG5kupESMXqs3gFCo3d0Xcdy0+V9UIO1oPRedpfeWf7nvTHGODk8OudQSuHUOO1/pfAsn3s8VVXRNg0xBYIfSSnbnKMz3Vn2zeAdipFgqzyhKeZOcmaVbEzRtGXb85wblqJo8GNKRC8XvaoaFJEUPWRHO6kb5Fao3GgZI/CdJWYBLkKTU6K1TjFXk8Sp0BStskYZ+RKb646kdvokrZWEXGdAu6lr2qYBZL+vqipPf9wvFOz7Z/p+rRNCELkHu0mRhqwhlCYtAt4lVPD0bqQeRpFa5MlpEDenzDKRex/z9DOV9auZGqpd8xJwwWNtVrJpkxlY2e04n2+SRcmUj5WmpihmY7udId6u7su12X7DisrN3R4rhkRdG5SqcM7n7yWZcWSJRYyRaMyeO2Z6YTJmrc2fh+lexViongIEqxQxeRBg9C5QGnYxGGQabswP6nRuT4Dsi/dy/89irrP2Pz5fBxea504DKCCCgE06m54pCsUwhSg+AlWe9KbI0HV06zXOD6QUsomLfI3O9FmlxFwm5ty+wjIr/YPkD8prNEq8AVIGvGRIp7JJeq7t92SEEiCf3WUzEF1qmWLHP/UE8gWi48tr3Wc93tTITT3MbosVUEGgEpM3XJ1EC6vza+TfCk2bUlOBWrrZydY389WhTHB8XlSlgFbTBlm0NWWkrSgOkrJQi8hRLNOzzo09h6HSBOjd4VZGxoUbv3859x+u8gCWTasseK1k0ZSRdyyZaTHuGoL85nZc4lxQE1BmjxdcejSYcjeKu2GUqjwvajMhNsWuvBTfSisMRrQ2aTc9idFPVINCrSvj5zI5iUlhtWVeN1TaCOLn5OtMZWGAqqqwtkIhVImkS/Pnpns03St2086Usg6r6ImQa+RilAlIkkPRVjW2rvFOppYxiBOZNWIVHOpaqBEqm8sosRnW2lDNZCLS+8AQHBerNT4bsqTpOpWGeteokK9/iCHz+fMmpBKr5ZL33/8xF+fnrG622ewDrEkQI24csNpitEWprOnJNJ2x7wX3HUbOTk6ozMPc6EwjLRnRZ8Ai+Kw70zsEsVgLK+2njbXoJQsNQKiRhhhGdNaqKS2kALnU+6hiPrTKprO/We+hakUkrPKfa6VRsdAvZM34IBPWkOSg9j6iwq45TCFwMGv5yuuv8fOPP2XwiWHw+BTQVlGZisXJGX0zB6XxIVHXFYvFjEonXHLoOHJ2dpd/9+/8IV9+84skv+ELr9zGjYH+juPxo3NGH18o3KqqYpHNAmIM6GxGIs+0m/YIbcQZM6KJylDliVpSipAUR0dnrLo1BwcLvv6N3+X6wWO+++QxIVq0tdw9uw3LFd4brLvFRlmag8Oc7TUyONH3tQcNtqrAaKLWVO0cITMnUnBSeCYYA7ikWW07Zo1CR6h0lUPcA24ccONA8KCNUEN98JwezEi+Z7tasulqfNLYqoWU2NxccnX5hLWDjW6IaA6qxEv3TtHR0zSWxXzG0ycXPPjkI776G1+DOPK//S//Eyb1fOmNV/jks/d5+OgjZrOK2cEp86NbmKrGbDesu0RKHm0sUfUkLfRPbTIiXAld1Q1b6m5Ns50z+JG+k8y5+6/cpesdz6/WvP39n9IsTlkNnvc//pS7pwcYLIcHx1ytr5mdHPH3vvo7nATL0VFLM69pU2BO5N6dI8blmto42oXl9d98g/tvfpGvff1rfPDZX2NoiSRmTYXvRmwKHB0dcnBwyAfv/Yyf/fTHoA191KwcqOjQ44pxY5jVX2BWL/jpX73D//PHG37w/R/gq5ax7wmjRyfDm298iS/cu8f33/4rlusVdfsGyq/50hfuolXg4ulnDNGw6nuGTc/q4hITHI3yqDCKJbSuSLalrmoAnjx5gmlq7r7xRfrVFdEYrjerbMykeP3VV7l7dMSBMRjvmZykf0lNIGJ9OR8CEVdcj7Xh9uktaqv56OgzPkI0ZQfzOeIA6vE+kfoOoxIuOL77r/+M93/yI4gOrSVL1PmA1RbRkkMksO4Gfvb+BzTVjLsnZ7SV4vxJj3MD3TjQVA1qdOhcF7RNA8YStWa1HUSP5gJ1Vb9wFu+Hae/H8OjMxADJEizTBZEjCB2t6zratqWqDFVVU+uKxfEZ694TTcUwipmRT5GjkyPu3T4BEl4wTSprqOsapRRnt2+jtaUfPY+fPN+dwxmINVqo3rbK7JMUhKmQz/VEwlgrJipKM29bQGiGJHkPtqqI2wGra8Dis25H7l3Ofp01zGcNNtM6NT5XhwpTyd43DAMFVlVaU9c1Jk+KSuEvIcO1ZJxWole11k6NzDg6YoKmath2PTHJnl9yYrWpcpauAGfSaCbZK4wmacOm28r5WdV5T/a0s4ZkKoKyuCRFd4xZb5idB0vVoJVFpZ3mO6SAj9LQ2apBUctkMgZi8NOaj2Sq+TSzk+sjbpniqV0otgXuKFEPWktOX4jifGuyWZ7WYKzKQeGWFLNzZf6++7q2fev+wuqSaytwQEqGKscrODcQQm42VT6ncwMhruJyhmtlcxNYwHzI1LQ8nVa7Omnv55d1KuwtAQnK65zW8N7vEwiCyiD6LtM4xERxrC4ynRQibhixM4sC3Dgybjf0mzWkMMlcdJK1gxVdpjGWpBV123C7vs3oxeilyIukEUsvPPsh7PYA7+KUryjymmyUouRnpTxIMdrIhDjLhApQX3qUphLWmxjA7eompSVGYP+efh7ImKKzcnNu1a55S6lAEf8WNG0vvsEytfrFf5PyTFJrLcYFuSEq49FSPKaYsrtOyuNPYBp5w87xZleTTqhBuUlk3VqML7xGv/f5/sRh5yy066u11iQ5o0SEqBXE/MCzQyfkX5ORhKyVSiobrKTceAlVUmf0JBWObijTtEKVzA9oQRALIpPSZBGbhz4oI/EDVsnhoRQTja40DNZaqrrCeXHM0dpQm4ba1CQnr9NoQ4hjRiO9aGhMlbVtdtIRlLllmZpOjfJ0M3ZDmjT50WvJf0qgtGE+OyAlzTgGXEhoAyHA0Duq3ByhA0qbbDsPm37E+EC7WBBDovOR7ejZdgPVfJHDn3eoSEHg8g2dGt7dOsybbnBcXDzj3Z/+BDeMVKYmuIy01AalZWQfTaKywl1OGaEJ3kueTIKh63IAthxuKPXCZdmHasp6lenQDriY1p7K6yubgIQQGAcnm3p+U7qM/PO/lfe7m3SrHF9R3v+LLyY/n3lzC0rttpkM3e9vOwqNGIcqlDZoSz5EhTK2vVwxdj1nR0c8fPyMbrVhNpvRNA2zxTF3bp/RdR2r9Zp+9Iyjl4OwGHHEwO/+9t/i9373GxydHhJDx3p5yXK55PXffZO/+Pb3ePzsgogUIgU8+f+pe5Mny7L7vu9zpju8IefKGruru9ENNAACJCiCEAeJEo0QLYmWIxQ21/LG/4AX1p/grbxxhBcO22E5bIXkSbRFWxxEgSQ4giSIGeip5sqsrMx8793xTF6cc19mNSHKDm/oG5Gd3dVZ+aZzz/n9ft9Jxmw7H6aBxpXpUdyKyAW9DVxu2uTOWC9SAxcFSmp25nP8oeKTb90i/uRn+fX/7Zf5g6/8Fl4UnF+ucH2PUaB0gZzNU7EtYIghafNCQESoipKyKrHRM0ZJO8IwJIes6EecDWy6lsb2SOUwykI0lEVNWc6QStMOAza7k01rRakUJh4iKG2S4QCwXl8AFlPMcMHSdg19EIzSE5G0g8XuJSTT9hanTHK46lo2q5cQRk5OnxFDy/5hyR9+7as8fHQCIhkClNIQKGj6Nc4nP93gbJpea8noXUqDQNIFEEXK3mv7huHZY+r5LkVZ04yOl6uWboxEKdj0nsfPXmC9B10iixnjKKgXB7CYM9s95AtvHdCfrLgIK4JWvPPmW/zI25/n/ScfcbPdsDfb5xNvLbn073ERLbeOj5lriW1HQh9YFBXlsuDo6A7373+Wb3/vGSePPkAGyc1bt1mNkWEz8CPvvEGh4PT5Y95++12ODm/wp9//Ad/73vd4+uSE/Xt38G5EyURVLpRm8+KUg6pgt5qjvGJPC/pxZF8q7pSCRy8aaifonSMWM6q6xjYXiBCw3jPYiLORIAxjdovEKHypQRlUYWhHB7Hlcr1hd2+f12/dZq8s8xYxFZx/viiI174HBDYPzyaGQWkKqroiSkE3jtQ+JnNYdzVtDm6kXZ3z9Q+/x8XZCceH+2iZjADKsmQ+W3Bx3mCKAhttiqyQmr3FDnfvv8HQXCKF5eL8GcGN6XcL2Nnb49Z8zsVqzeOnJwxjMsPQpkC7q9cUEm1kO3iESFXVBB/wfnP1KrNDRMzmZMGHxDbwHjuODH0HsUDrlLG5s5gzDJf0fYfKAbtGSMqyTpb1QlCWJd7aTCVMrIfU+BhcFCgtGUdHCGn/mlUlRVFc5etpg1MpgkAbk/W3UFc1hU4a8bqep709gvCexWwBUuaGMbl9WgIxeiKJRWAKw/7hDWZ1wTiMaBWRcSRkxsBk6tT2PUVVMpvNt6jabDbHu2SaMckvQrbWljknbdpTkwtmn4wlJp3ZtWw7IQRFUaT1JUIepEqU1pRlsa1PpFJp2OlSnieA1IZyNgNVsmp7rE1Nn9IapVPsQrSOwBXSFvHJVVQkFAwpUbqAaHJGHZkip6hmM/YP9hlmhhg9UqQhZsgND5OxW5aIqEyjtWNCgqSU9MNA09pEiywk2ogtDTVR93Kjmd1ZE+qW6sYGkcy5MjI6gQcTWhPze5WoqVdrOGbqn4xb1dnVVyShxdn5eGJcJWrhlPeqrgb+XGu+cqO+bcpeYaxd5RROX1doY2rcJn3YhFIOw4h1Id+H6TtFMvEhONzQ4+2IyEPgQBreh8wUQ7gt4MLkgC4ldV1jTJYxRJHNk3xmBGWH7Bi2DvwhS6KSJo1rKF16X5SUGJGYb0GACn7LigtcxSKkYX68qnFyvRiJ2fzGb/sFyO9zHgxEYq4pJyCHRA2FjPrKVwLXf9j1l6JpA8ivZbsQtk45QjLRGLeLaAttXyssISMkKaTReZ8bIfkKipCa64/ZmIp88FxDFraL8ToyFOMrC/Z6fsd1dG7LAZ9usHyFCU4V1zvS6fen55UaxfT4Ov/+NKmYRI7XohCuPS9iOnQSPepq0Wyb4VdeAxmhzLQSOd2gMQdMp800rWgYMqUpSpkWuydZS4c0WTBFRWgHhJCM1jFcNlSmYDZbIIopfDO5PE50iVcbgek/pk1t+nwmxDTdkBKDKQxHy118VFycvSD0PcRAPzgkPToFhSQRdohJE6RKZFkQVMHZxYpV2zKGSDsMLKs6TYfiD59vfJzvnN7qCDJNHqvSsJhVNM5C8ElAS6Z8yKRfTNSJuLXcFQiqqqbUColECclisaSqK1brdouuxe2Eh20jjrgaIHjvET7RCFT+zMkb1jQx9N7TbBp8tg6eaJphi7KRN9npkMgv8tq7MW3J1/98ixxfm7ale21qfKdNSaCEAj9pGhQuRlwMqQgMgeePH1PXC+baYJYL9vb3qMqCstTczE2b1gnxElKitdhmwfWdpe0GrPV4F9hsOo5v3uVzn/8C3sGdu88YHKybdrv2RKbSzucLhtExDAnJIG/MwaeMooBkGB0vLzYMY2C+rAgxvYvBBzbnZzz58BFny4pYwuMnTxlsxIaCdoiMXUMQDlVWRKWRZUnXdziRLO195+mahhgC+wf7eCE4v2xZrTZIFMt5jdERZQIuWC7OG7QWzMpIZSRWQWstsWlYjY627xPFMufKCRlZzhbMqxkBGF2kaVacv3iCVJIg5vTOsxlGLIKgkj7G4RmtSy6XzZp+cIx9wJiBBw8/4vzyjNm8oKxrnp+d8fjpM3Z2Dmn6Fj9KXLGL1J6LzQbnHVpZgvfZjU/hhEEag64K1DgQvKPUBTvzHTaXDR7B7dff4Obtu1RFwerynBuH+xwc7KPqGqMkP/bFL6Giw8nA65/6HLP9GXu7N/n2H/8BXeM4WZ+zc7jLO/c/xxe++EWq7xgePfkQrZe4AMsbx3SVZmkM+yKy8Za+s8wW+7AoadYd7z86wUfJ/Xd+lA++83XaUSDLHW7MK+4cH/Hy8XuMXcc3vvFdPvvpkqMbh5w+fEIIAnJu48HeLperFR++/z5sGvbmM3aP9jk4OObu8V0ePXxCc3LKvcUOwtY8eH6OteBLlfYNkfRFUmtUFAxjSFqq4FEislgesX94hxePH4AokQrGsefy/Jx/58t/j0/ef51df5Gz8yxo9eeGMNM+J0Rq6xyRcaJO54aoMIbZbAZCsOl6Fss0/Seo9NnKETd09JtLzk+fMS8VCocfHVVR4LXOqLvntXuvEYTn2clzbhzd4q/99F9nXtZ88IPvUocFSt/AKEXbtLRNgw+BO3fu8iM/eoNf+43f5PmLlzjrGa1HyvT43l0NTZVSlGWJMYaDgwOcc1jrGO2wHYymojfteWKqL3JR5p1NLm7IhFo5RxxaQtulwlomhOj5yRlCSW4eH0H0dJsWoyV1PlNSDZAatZ3dHdq2o2vXFCqFGTfNBu+SNk0Klc6sGNEOjEnFd1XOUDLZ/RcmaZHGwRGCZVYvsd7i7JrgNghkQiayaDkV7Ylet246+r5jb2dJjGl2LIXAA5fNmsv1iqXcpVosk0FSgCiu3Divzi9HURQUZZGQxdzghQjGm22DNv0drTVFURBCoKoqnHepuM2GJs45ohAZ3dSUWhOso9ls8NIR8MyXc+bLPUI09C6tIVUUGSEFO/YpPzQmkzEFhEyZVMpjTIUy5XbgFvOhGmOKc/HRU1YlxoCzPTG4rO93IFKwPGRzihCSRl0q5LwmRsHQD7RdSz/06EIzLxcsljVKZnZX/jspmmCSHxiEkAzDgB0tHWxZRyIPXX2OAwgxZHOxaw1GdhhUmVHlnQUlto1yytaN20F5jJPTuiLYH0a/jFsWyvT5TfXD5Cg61UKTnv06TVYIgYwCFSVBeaJOtMrejozNiLWWSEJex2EEPFYIZGGIziLzaxyGlG3n8nNSWuMFDGPL6EaEVihlkGoKY89mXekTSvV/2jmZMMKYy5er3Wyq5aYG+epnUszTVd08NdBCJBM5RXagnqqiXDdMxnFImdZOmEz1Uq3lQorSinHrhZ5r6Ygk5SuqSDICMv8/CNcGMgVs238xUSQnyCODZVtXl+g9IdvgIq+aqGnMf2WPOhWacUsPmeD8eO3w2kK+17rc643Y9DPXjUhe0apdu16BkVO3tJ3k55/YIl6paw9T15anCyEFKHP1+DINEZNgNRt1XL1PV6YrkynJVnN0/UVePfpVgZ+nDmT9U0JDNEon1z1dFCip6WwK6Byt5eGHj3n68Dn+lkKbkhBS/slWCG5t0h3kx043ud5Ob5LTZ2ourqsJk1OMzBMVtreeEJmjHRzWBjrbgzSU9YLRB0LfMwwpD6wuDQhBN4wQwagSpTW2G5C642y1YtU2mCq5nVnv8e4V0dz/o0tmo5Dj42M+9cl3eO9732dsLV5FnAv4kKhvUekUdBN8Wt95FmWtpdCJmuKcRytDXdfbz3FyAJ22mK2LVb4RJnHxNBXTkxg55slQiNjgGPqBzaZJrmKZzpGa4JCnPkDIBcy0Vnl1SLFFQK/9+cepEfHafZumjyJb+UY0GhlTPIEXELQgKMlid8Eb91/n/e98l8vLE8pqTl3X7FQVSiWOtxsG7NAhiZR1yXxRY4PFURJRFLOade+IekY3wtMXlxzdusdyf5+zswvMbIdudK+sfkEKed3Z2aFpO4YhUWzTWyuIHrwLjNbT9Z5NOxCEpijnKF2jTc3YD/zeV/4VX/3Kb+MqjasKzj54Qlkvef7ikk07sphVuP4S54Y0tSbQuTEdnLClY0khuHPnDo+fPOHPvvldhrbFWp9s1YcGKTRHx3u0zyx93yOHSN97ehMoZxUuRs5enrPuB6Q0WGdTaSAj0Sa0bB0CshREN1DrNITpbUMsDDY7gGqR6CxCGVyUOKFwUYILRKHonGP14pSilBwcH1EtZqw3a85ePsW6FaOFoAObYSBaRztuKNSA1GPKe9Q188U+pakxZYkoDLFrGd3A4c4ux7s7nDx9xvzGbV576xNUxiCINKsV87pGqjTpH4NnjII4es4lsLvHcueQudjhw0enrKLg5OySSlbE24pVPzKfVxwV8J2PPiK+FHzq4ICnTcNn3nqLd24c8KR7QVAFQVZcOossNM5IgqrYiMjT1QWtjMwPNHdu3WRnr+Lx957hbMfFqufBoxO6ocv5TOm+6JuW6BWv330N255yerlid37AnU/+CH/7P/gP+dzdO/zxH/0+T188pxzm2Krng2cXIAz9YOn6ESMi0af9QJsK4UcIySFWK7h/701+6m/8Lf6Xf/KPcf1AVRcImXSNPnpu371FeNEQZaLa/0VX8k1NVDMXk653+itCKYqixPrAquvZd4HRCZyHYB1CKWR0dOsLbLtGErDdhuADQ98yjDYh1KVGGMF63XD39df40k/+NMdHtxmalnuvv8bpE09ZCaK1NE2HVJqyqNjd3eON+/f59Lvv8vBXfx0fFTb29IOlMIaQqVzTMMmYRBsuimK7NxKhMDqzeURynctDmBivhpjeO4RXyd0wjjSrCxgaagKmLBgjdN7T28BoI90QEdGy3rSJhqgNvXMIlbVdwbG7t6SsC6Tw1EYTA7SbBh8jpigzoUdlPVxKo0/0PUWMEm8DfvSoQlEVNQhFYcqkowvghz7p3r2fKDsEEelax9OnNtmhS0U9q1nOStargXFsKasqacSkygMCmS350+CmMpq6rum6jnEckFLn8y9Ry1OAdOLqOedfsTWPU+OaWUlbmppzCU2cm7S2EJgiOWZHqVIknEgByVpLFssZZV3SDxOimxAuGxKK650l2DE9f1LaahZxg1CYoqIoaqLQ+QtCHLc0ysmcTUtDDJbRDTn/yyNY9/j0AAAgAElEQVSNAlI+cKKJpgxPkV9/0zRcXq5ZNy1N75CjQOtAWaW1HnFbB0ghDRKVB7kTVTJd0xD2+pma/tzjQqJxSilTbeWTxlUJgcmDcO9crqn8dkifGo1Eqyx82Mpc0nvuX2m8rl/Tf08NXLB+S/n13m/rjevNHQAuooKAQiNKQVEWRAGjvzKiEyIZ0znbIN2AnC1QeWBErludy+ADOc4gie6RevJqiOAdSpkEOCCS6/m1WvyqBr8CbX74npeMCH0ISJ8bUEVyb82tQ4Sc15g1j+GqmfPZ9Mj7FLEk1AQyAWFqogUmu9xPiFoUIpnrxJRXrEkaT12YrWPrv+n6S9O0pStbegJTC5wamgyabq1Z47aJiyI1PVrrLVI1TRBiDutLSE+GN0XMMD/bJjASsj2syDVsuFawXt0AHxfZfpzje10APRW2W23RVNyyrYuvkDwpcmh4QiYmoWx63iSnndwEiRgzfe36u3R1RZI2bNL+sX3umSowvbZtajvbn/ExN1G5OZ20S0obbt25TQSGl567+/fYWewzP5rzo5/7MVYfrjjZbIj+qvGd3iutNVapNBmKWSPIRGHJcyfxcfQwZ9EIkaczKXxToLDjyHroEUJQVTMKIXGqIY7Jjjj4kLn3ic8dhCRESe89VVWgyxnKJrTT50nIhDL+sGv7HnKF5gohMlUiZE1ZikXw2qOlROLoxzHp+HyWqIvkmKSlASJd12H7gaLQtF3Dphvouj6hfvLKBngbgDk1WPk5TQfjhOpKaRN1Y/scIeQsnKEfc1OSKQTEvCGJbTeWcVauqKDXhcbbBXvtM8r5PTJk3V96jiG7lE7PbewH1heXeOsS7zxGrHeM3tJ0PVonwxHtFYVS9H1Ht0lOh/OdBbMi5RQpY5Leq9D40aLLktFrxiD56OkJ/9Mv/wrr9Tnf/NafsljWFIWm60a6tifYkUVV5Hs0bbzWpgZoej8ikenGixGcDXRtT9O2qXg2JbP5IkVZyFRQ7c5Kbt/YpzjcRx8dMd55k+Z0xYuXa0bbYbqW1eU5opox291BCJ20GiLpUrz3tG3H5eUlBwf7bNqGlxcbvLVUeJSSjGOLjQZTGnRh8G1HUBIhDVFoLtYbBu8gTz+HMWmMgvNEC2HwVFrRCViiEdHhxzU2epTeQ5aa2d5u2ht9dv5SJbqoEMpQLRaIKLEigNGsV5fIITC4gd5atKqoZEnTppy6ICRjcKlQtw1FMVCWAVMv0dUNlssD5vUusqgIRnOUIxaWhaHEMTMVYb5L2/WMQzIPCR7WXU/SYiSLbqkl2pQMMYKsGBrP6umKQRcMRaSsd9E9rC87Hjx7zgJHHSyx71LAu43ceO0NwrcfUnvLnlYoWdMxYz0O/PhP/CzHn/5JHp9d0l+eoWe3eeczP8Zs/xb9OPLWO+9yZAT/5H/955yeXXBx8T0eXT5gHBxaQtO0LOYzlvNd7t65Q7NRnDnLg26AF6fcevI+v/u7X+Mrv/m/E9yGfVuzUSD3l8QmoCSUxYw4dti+xXqHDwPaFFlv4VEyoIXieO+YZbFD4y4JY0CEwIuX5zx+8pjDG/vsH+0RpYfoQJTTSfvKPpdO2FzEANY7Jv+3tO/JhGwog48SHwWjjzgficEz9h3RW7rNiqHZUBiB9QmBsTmbURWGoi54fvqU+XLJ53708+wfHbBqN0Tr8ET2Dg6wvcKPA1IZbG4KJk357du3uHP7Nh8+fobWBcq++jquD5WmgnLroKdVCuQW4NwkaZAfOyfFlsngggAZ2awuEd5Sa4VWUBUVM10wyIIXm5amG1FoQog4H5IxSD7gtFGMqwHjDUVp2N/doRCSpm3QWjGvakxRU1Y1ZTVDao1UOp+LkdIYrA1oVaBnJo02pcYUNd5HpFDs7uyhZmmP7/sBG1Jm1ZjNn2zb42LAFCWrTUtdLWj6tO8sl7skkxKNUCm71oVIP1iMstTFjHo2Yy9Ghj6hdWVZUpaGtmtT0eoTrdoGjwb0lHOGSBlaWbtlg09RMbn4D7lJGa1lsCNGJ7MQbQxCKfwYqfTUZKefDd4xDj2x6zI6lVZsiJEgJZYhndUiIlVgUAFdGgbrUNoQg04bSj7rtDZMkpQwrTOn6YceZ0dUVAQx+RYYhEhItXOBrhu4vFzRNh3jkM43FyyD63Guxpt0b6QjUyJjcvSeZAve263N//UG6rqLpFQSJdS2Ad7m6E0FbIzJcMMYMFPcz1SPpppXa01ZCaSzybgHwzZwemKUcYVST9+NMVkvPyZ683y2bd6SVO3KBt97T7QB6cFKR1TZEj8j2FOdYmKxRRSdHRj6REfEh1QT2fSeWO9SFINIgI42JvtzBIbRMowOMm1bCY2WCqMUBE/0DsIVs2giUH+8WI4yrZO4rX9yfXO9iY1pT5mykgOeID1ikmE5h7PJBRkT8+cFApXWcM6v1FOub4yIkCjAk0TCR4eNERfjdg/+i66/VE3b9QIx6c8EIuHc2zdd5A+ceGVAwrXCNqEUIt/oCZ66bsahtCKEDEGTEJMYJ5gzQbpTLwXZQWZ7ZZ6yzBtSnqgmlFCByLkuIm1+MYbkKBOuWYinX5PXz2ToK9IURiYNilLJ6SnBqXkSmIPAoxCEkH9/dr30GeL3uTn0Im4zYbZzkJg6XJnVq8lqNVEwEz9dovICTu4hV26J77x9ny9/+ed59vwp3/rDb3N3tsv9dz/Jjc+/RvX6fR7+2Xd58fwx1qdmIHhwDHg/IkOiAShVEkKywFYliXaSQx6FSPEHPoRE08KghaJSaYK07hu6GNidLalkiShneAJ4izIVcrZAho4wdBTRUWmZeN5RUOiK58PAmR0o9Yy37r1JO4ycNWvW/QahDD4OBDVmjnPKshFC4rPTl7hGA5AiQvTp4PcODww+aXUkyXEvRpfCbrMF7fb3RHGFignBuvNorxGjQHUDnYsIFIGrz1IJINs4i4yG+hhBqkwTDoQwEqLHWZ83+3S4KCkxJjXMaboYttz3KKf7Iy8PkT5vGa7uI8GkMc3OpXlShMy5JXnTTtZoDmKiKjkrGe0ICKx1dMOIy7koIjiiS4L1J2cveXHynJejRyLwXcs4WubeYyPUyyVRG/zoaMcBjKIZIx5NHB1BBIKPvLx4zld+6zdo+56m65j3C8q6oCoLLpuGmRIQTEKRsiV3iMm4w5hEy9FG4WUgaI0UGqyltwPduMIOHbqoEXEkjBvW7YKmqlmg2Ds4olwsqKslXRHohxPcxRlCwrpds2obKqUZbZ9CpMnaiJhE0d0w8OzkOU3X0rYtbnQEa5EmZS4563BCoIWk1tCIZKiyGXqWyxlGJ73FMAz0w0BRzvFhoB1aKANRWKTZwWnB+08fMXRrTKWRPvDGvSPe/fSnMVonI7oQCHag6zZYB348BOH44OFj1peX7B0e8sftBjt0WOdpe0tB5M7RDocHh/TFgkdtpHMS11pYt4gd8HsVd27c5XB+m6KcIVWBLmosIHKEi+tb/ODY3z0gVoZ2WIEp0UVN0DrtgVJB1qzossB7Rze0xM6ivOCpF9z/whf57oPvcxwE9sUll02HeviY1/YCOjR84dOv8Vtf/zrtC00pZvzy7/w+v/P9B2zWgrE5w6vndLLiZttzNKw4qjzPT1aM5yve//oPqHYveeczn+LNN97mzZ/5MX7w9JRf+9WvcjlepkGQFMRCsu5aFvtz1GLOe48f0fUXvOw3dMOGs299ncEIirHg97/9HkeHCx53G0yt2L+xg9A9XTTM5kdIWfHy7AWuPQW/YlYoiArvE5Ly9NkT/vW//OfYZo3RFqUGCmCF4satBQcHc5w6JCoFwuWhXDoPpuJgmv7muxlBwPZdGv7lQaZEUhnBrFBosQCpGaPFBY/zLe3lhrPTZzSrFcFaEIZN1+VxSKJlKaPY3a2p6pKbt28Rx47Txx9RmhlutMjg2Fku8IWk7xr2dw8IwfPy5SkhjPh+zd0bS37xb/0c/81//0/ZjC6hFn4Kn05DqOuNW4xXk3Rnc+5lTG66PmtkVEysBWQKbk71hE4GEsZg3YgpSrxODo0379yhXO5x0TmGk1O8bYluwAiTHSY9hEjjArPaMIw9rvfsLQ7y/p0GespIDo4O2MnGPVMhPZkbaK3ouj7F2GTbeu89Umt0SIwWpSW78zkqFIgY2IhIP44MPpklBamT+593WA8Xq4b5PLmx+qAZBo+UDu8dRkW0TJWJcw4bAtEoMIr5zoKiNEgpmNUVQkDftsTgUUoQhCaOaTishUDJpI0syprBuoxoiRw5ExOaFRXFrIAxn+VBoCMUpaLUhlFotKjAa4g6NW4xoqJHBI8I6bwVIkUiRJ/MTkKEMQZ8EHgXuVhBM444mTL+gkjPWaMQwSG9x49jOud1QVFLQpA4t6FpepquwVqLMiWmSFrHup4hdM18qQk0BBoGN2BEpFSJtaDyYDREv2WqIBzeR9arhnG0VJVhb3dOXV9p/lO0kcWNDhc1LvisJYwkp/6AQBGjT5pR4RK7TEhKFAUTQh6J3jEMXQofH8fsuOjRGhIVMNXCU0M0eRp4HA6HFIJZKVgsDHv7S5Y7+2hTgkxmdi4beIRpYBsgjIHetRSziAotOzNJaEtcH6kEKFPho0CEMUVqBIeznmawWwfn6KfBcEAYgReR0VuQligiIdhUSGuQMuJsi/RwZAylnhBKz+AkfVCMFLQxEqJEAwGHCANSl9iYHVrT6sjxKCnKQ8QRsCghMSENCYIKeJmGBdE7fLQELKUKFIs5yBQNFMnh4EJSGJOM+UJgs2koRosKiqHrsa5HGZFkPLqkmNU/vEHK11+epk2kYnFCFtIgYbKilykYcaJMZsqcyG8MsM0lk3BtisArgsrkFjdN1uKWH0wUBMeVxWiaO6eGLqNCaXA2URynSZ7Ywv9RyDSFkXkFZBpBgjTi9u/A1TQz066zTkYnp0glUSaASJalzqUQSinTDeF8xPuMGsrkZIiIOG/T4vfZ+j1T57ROELoyCqUSTz9kh7Ck2pmMKVSSDwbQObPNB0dVaI4PdtnfqfiTr73Hez/4Gn7/Ht95+Hn8UUH/8oRxWKW/qxIypqJCq0CSxQmEUGhVIIRGkgMYRUSISVcgMyoa8UJhypqlrNgtaiKecHnKy9UKomG3WtKOA2OwID1SjATXEEaB0ZLdcsb+rMTEgPIRI0uGQXLiPHYULKob3Lt3i0GCFY7gW95/9iEX3bdSjEKauW0/9wCpaRISSQo/DsGhU5oDQicEaHSOGF0SwzuHhqRB8QE/UYxIzkkhBqROlNGU4xPxdkChkYhk1CGvHJtkRt+iSFqEKGL6Dkid4Hyl8jQqmiQeHx1CQ1UWVLMapSU4n0S5MVE8Qi5sQm7AtNTJNCIm1UGM/qpZixORNaHfbhxTDphWoFS2sY4YJSkLgxIpA0UpTT2bJ4pq9OjoMFIipWbVWR6cnsPOPhHB+vQF9WyGFfDkxQvWQ8+TF2fossTGyOtvv8PnX/8ETT/w8MlDum4N3lIWM0LwdM0Kj0HqGh8E63WH1gLpPHYYQSWbcBs8CokdRxb1gju3bvHRk8dcrNcMMWIHy9BsGMYNvU30xohkdfECgeMDNdB1h9w6srxct3C5YX56yeMfvI/rOuQwoOcl7CygX3G53iCDoSwq7Djmhm1g7JOea7CW5vkzuqZBEdEy7QnOg3USaSTSO2ZaMK8Um3Zk9B4bHQrNFEofQqAoDYjkkheEBSVpxhUiwGazYlZIZmWFjILPvv0af//v/A3efOs+0Y54OyKFZehe8ujBObdvHBFDz//11d+muzjjBw8afq9bIzwIU0IUVEoy8z3/0S/9e3zrvOG/+hdfhaLE+IbaKxgcLYbXP/EWv/izP89yuSQKRT+OnL0859nzU7QpWO7sECN0fc/F+hJpDEVVo3SNRyFUyehT3o7Umm7oWV1esFvdYHcxp64K/E7N5fOPmEnF4c07bFTB3ETUcM6tvVs8eHjO4WKXfe3ZR2BP1tz/9Gfxv/kNVm2HqzxajRjjefjhNzl/+V3+5s/9NGJnjYmnPPnwgsXODfbnjrOTN3njjc8y9A2ubxCAwTBql4rCIDg533D6coWUkm5osXJECslcCfaiZt02FKKka+Gib6lcQk1eu3dMUDNUfY+dg/s8ff6EMD5hphuK4Bhazxg0TgvGccWTh9/EqJKDe3u8+9Yxx4tjdl//JLN5SfQWt/NJrFhRkVzYpvM2W+Bud7xJ06WIlCLlpPnMyJYRChkowsCi2CWSDJ9SsePpujWnJ88SWqEMRJkztVKyRHJ9K5jXyRRFOAdDT2kK6DfEYUw0vPWag709lrM5wVlQ4MLAZnPOMNRoArcO5nz2nfv86bcfMAJRTHlg0+uaNOpp/yEEhNDETCVM9cFkR56HsFsdGIliHkVyIhQGGy0uQu8cVW3olSC4kW7oid4icMQwIHzBGH0qhr2ldxLtEtLUdQ03Do7p/EjEY6Nl9BbrHcgrgw8tdc5njYyDZxxGmralrEpKXRJVMlCQmoTA+MnRMKAyE6c2mmpes2PmWFXRWk/TNgkNjY51OzI6gVI1zkak8AQ74t1ICAPazFKtJRVBSzAKGTXKe6q6oiwMztrUsOX33EeRGoBIsk2PycCjqGscI67rGYNDWIvwjiAsQnmKmURqBa1H+vQVg0Vn+ie+oG8jne1phx5nLYZAJSKlTtEuxETnFSINI/ts6OGFQShDRCOUzjKatNpDri2j89iuZ3OxolzUBAqULAgy4KPFDpauCbSDRReSqjZZY+yYzecEBN53eC8h+oTElgVayvR5ZPmJjyExh0Qk4OnykE6IOfXuElOU09IlhEDbJF1pqUoQCuvG7AycDN+kSHl6ZWFQSmQ0M2nzZcx+jyKlEhI90Y0ENxC9IxJwXjBagVGalFmcGTci3atBpFip2WLG/aMjlJTs7O1zfPseg4s8O33J+abBxiRTQkhkVaKqikIZylCzcxDYNYbNo0uGS0MfI3Fc0Q8Glx8syqSjHobA4ATWkkxbfDL0CMHjB4csXM69S9mrQkQWixmLmUFLwf7uDe7dusXbN47YnxVIGehHy6oLnG0EF4NmNUQ2fcuLs2c8e/ge9rKDIJCywhMwpYRgiWRnU2GRyqFUQAtNgUDlpi31J2lwIESkrBWmFMx3FjhdpiB4oeiGAakNVVUikUTnicJAiBResYkXdINEF0k3rMuaoq7+wlbpL0/T9kOuiT43MbYmfc1W6HqNO70tKCcaG2yRuOuON1fWpgkQnbJD0uO9qmEL4TpFjHzDTyOTiSomM3Q/xRBM+RTX3HVepQxfjTYnJlqGZJVMqEjiPCe4PgSRUbvc8GXHK6Em4XRuMIXIKJfHW7c1IxFCJAfHIJNINIKNE1KS6Zq5gYLE64/RM+WgBR/48IOPAM9733+PddPyYfuI/+Gf/c+Uv1awGhre/957ROuRMVm4SqXSRpxt5b13jKPNIm6YLGQFCeGTIjnMJTM0zbxesKPnLJUh4hjEHm3f025aDvcXLBdzmrHF2oEYFc5L3Ai1MihVIKRGy0BRSKLQ1BTs6oJm1XDy8gWXAYqq4sbukrt370Ml+ODJe8Sh21IyfUbYtpVAFBmNiCid6QAiTSSHYcxGHymHp6gqDAIvVXJuTJBYbpjI6w6ij0QZCT4yDAN1WQNhG/gY85qUJOpqjBPtVOchQjbsmSz0t+M8tvTE6/bXE39aXPu5LTWQTF0VYWvwM1GP47RW4yTGTjpAZy0uUz9ROukxdKJRGZ00UInukv8fV5mDMYI2FaaaMa6bPIQxFIXGjyObzQbnLP3QMVvucHzrDvO65sbRES5G5vOaRw8/4vLFKTomRE86z6yeo3xCJlUI3D2+xeXpizTBsyPDONC2HUZqooe9nchbn3iLx6cnOOfZbBrOtECZSIwjXdcy2hEdNF3TpkPDj7TNJVq/ybpZ016uWNYzzl6+IIwjb7/zNn/lp77ER6fPePQbv8rL1RluOEdLjdEKkbyCk4HDdMD6nD4kE23Xu6Qpcd6neA2RtYvGgOixzjJah5rcIY2GrmfoB4RUeUrfMZ/NgMCNg31+6oufZ3X+ghfPHjGsR2oJYWixmxWl0SlTSCkiu+gZLA7vUmnP3/33b3Py+EP+6D//7/AkjcAwDGkdu5H79+/zpb/207z/f/4r2uYSpMKEAaUjRal5ub4kmJGf+fIXWS6W9HZk07RIqWiaHlOU1PM5L87OOHt5gTZLjm/exhQmaWOFSKicElhnQUNnW77+R3/IFw7v8vrNWxRlyXp1Tv/h1/l3v/wljg73+IOv/Cb37hzz9rufwvaXPP3GV2gv1/y9X/xFPvszn8csLPtVwbOf+ykevHeCawe0HNHzXcr9Y77zwTeo5iNVjJS3NeNFoBtP+ehhwz/7x4/49tfu8+F3voXOe4O1ARMkYXDIoJktFxgladsG38Od194geofsemadZXV2yk4c8aMDJRldxKqaL/zkX+Xm4RJdHfDa25/BzBSzoqN98Zz2bMOjByecXJ7TiYaL1Tmnj3c4e9ZRzgqKegGi4rvffcDqr36e97/9LW79xGu8c3OZT8ar+BiRSZIipn1NqrSWKqk4ms8xMhWYAQ3CUdaKe3ePuXV8wIinlIFSkFyCTcXOzj7zYsazx08Y+m47nIwiJmt5ZKJMOkcMiq4dGLszJDCvK5xVdG3Dk2bD/ddfY7aY0fYt8/kCOzT0Y4/0DucCn373XX7w0QtWQ5P2rqx/mXa0kM9sqTRCxmt66mkAy194JVOGdL/GzPbRUuCGnmePHhCjpBs9TTdQ1wppAlgBVTLxElLRbjYUSKJz/NzP/yy79RFf/de/nc/1mOmUPrk8miJRrshOydlCwVrP+fkFi50lo/fM5zOsGxFRUBUF1rmU8RYj4zgQpGE2n7HYP0JUSzYWRNMSRHZVdC1GSZxSjDZR1rRKdM71ekM1m7HQi6Q/1MnwQWtNtJ5+6LF9T5U1N965pKeKKYickBAbKQKJmyHzq8iHTYiJahhDjjSKSVcoBTlClH4YCHbAqYT7jm5kPfZ0LgWdqxAo8FQCaiHR0adVbAqCEoxAkIYxgBMSj8bIAhk1GokW6UxNsqQ05h8Gy8vzS3Tfc3DjCGMkTduxblrcaHPNmfSC1o7ZmTytrXazSXo/O2YaZTo4p5iEZEICkwtkjMlJUsRIcA47DFhboQq1lfI4n+46oSWmKFCqQI6CEBzBp1qvLBSFVhQmoTceh0dgVWQkIKJPtSlcNXIBdEggQEreuKqVpZj07mCMpihLFjs7HB0d8ObxDZSQNO3AkwcPOXlxyfm6YdMP2OCSqY3RqKqgD47SFkituNVXFHuOQhhKJdMw0nnsMDAKiNJTxpRZKsaIGsGPAucjY8hUQZl0nm4ckFiE1mhp2FnucLC/y+3bR9w4OuDo8AarTc+fbdZoPySXVCUIRcG4X+HFHCVLdmRgKe/xqe5dTn73D/jge49pnaK1FsuYfJrk1Hek6CeRS3Yfkp9EYrZdZS0XWjGragIdtutxMiDkQFQKby1RmcSACyGZwAwjIkScTwYyMbqEUAaIXuPtx6zzP3b9W5s2IcRrwH8L3MzP/b+MMf4jIcQB8D8CbwAfAr8UYzwXqSP6R8DfAVrgH8QYv/Zve5w//7jTcspXLjTj9R+K+c9/yAZ8vdma/v3jhiHb/zd9QNdElVNzd910AbgyO4kQRKJl+JB1cFuEJm7pnfnVwFVryOQmOFmVwpWu68+LQtM/0oQh50jkr4RoB6JP9DKjFFqq5GDo/NZG1jkHThCCxIeIJeKDI8aQpk9yev5Zr+VtRhcDXee5uHjJBx+8xzD2+NHjY8/v/8mf4sWYJqkBlKwT/Y2QslMSjp/pp6nQl0JmxDA1jCFPBmM+3VXIAZEih5H6JBBezuYsZjNWmxUxBKpC4lEEJxi9oDBzal1QCoc2gYAjiIgVgWZscTYwIwk/vfC0oWd90SA3G+4f7XF8eMju/j6b5wNRXOXSifw8MjCYaARxQnPTBjzaxK0WKrl/1UpTFBUBQSU03ng66/DO5entFZVX6ASjh0wXnDJZhPghyKyfio3UAMZrN0L6rGI+gOPW9CX6FEBpXeKJS+9RiRe5XafTRCHpLvNaUokiJKRIQuDgk/g3r0eZRef9MDCOIMpkWCNNicXR9T3umq2yNiIPBcKU8bmddu/u7jMOkb7rOTg8ZllIxqFhNq8xxnB4dMjO/h7KFBijqaqCgGB35026VcvmxTmL2Q5N2DAvZ9y4dYeyntG1a5p1z+F8CW3P6dkZbhgw2lAYw9ANyJhozYv5kr7vcc7Rdi2VkfR9jTESo4ukQwwJtS90soW2dsT5kZOTZ5w9fc6yqvGd5ejoiNuvv85rn/gExY0Dll/7I54+OyfIlKEUrUPJSFUmq+wYHCJGnB0ZxpHCJKQs+lTQOZ8E2Gm4kqbfPkbarqcbBoxJBaIxBUppnLMJ6M9mDDHvWTvzGZ/91Cf55tc3XBAJSvDgg/f51V8ZmNUld27f5u7du5SLOevB8+hZSz07ZndH0QrJ7sEt2t7T9kPKlSkyahEjOzs7qEJzeX7G6uyE0TdUbc+t1474u3//F/iv/49/yg+++S2ef/g+qyqZ7cxmc8r5kvm85uL8kvff+z5Kal4/vslifx8hB4S0Wf/n8HkS7IVj06x5/P53qWzD7vJdimKOVJr5zhGLvTvs3bzP0fEBe3ff5+D+MfN7N1g/HfHC8Obb7/Ijf/3LmN2SKDuO3jjgl/7jH4c4S+eLG/nON7/JV3/nt/nku3N0dRMTF/zEz/4CzSrwrT/5NjOjOT3Z8Csf/CbPLgbM7hFFYWhWl0QviVpjhaA+PuTo8IiL83PEi+e8e3vO2/fu8vzJGXWt+U6zZm3HFCJeCOpCcHs/8BOfPUaJwG/8zq+DOeHnf+HnqIygqWq6WZFoYbOBdewoFjXz2T7LnYFVf8p7HzwmtieY1+2+ShgAACAASURBVN+m3cCTh2tufMaiQpGKZymuHZdX/64yCiUCFEIR2wbGDmmWCJH0cKaAH/8rP8pyMeeysSjnUMEhokTpkjc/8UnGpuXRoyeMWe8bXCpmdxcLlrt7HB3fwVnLcu+QSmuid9ghuUSGcaSuCsZ+4PL8JeNQo4yiqmYEN2BHz3JW0fcjbdehTcqxkioFX0/W6JPT4ZXTc2LBTLlar5gs/RuvnAGqUtB79A6lJIVWxGAJARZlCT4NcUShUVpjZhVFYehjoDQGvGN3ueBzn/k0f/J730BEULJAa48QNpnWCIWPJGdIa/MwY0PfWvoumQtJqRlHz9HxktBseHn2kkJJhq5nNptR13NclARlQBfoskpF42BxLqEWKaPNUxmBU5kqmKl7IOhHy+g8NkR6m/LNRBSJdh+TgUjbt9Rl0oG54FPzgSCBWzkuJvgt5S41Gp5pTi4ROVxbZJ11ouE7b7FegEi5o9qAs5amH/EkDXAIAR0jGkEhRBoYpCVN1AqrFGiTqG2ywMgCrzS6LKmKGQUq1RtS4bVB5rxSITTOQ7NqMLMZSmkuVmvaZoPKGa9VWWKdS86HMjloRu+zXjNT+gQ4IbAenAdVJP1bnKQxIuTaUWJ0ClD3PjAMI5qk3RMynbuqKDBKYlSRmGBeMZ/NcIXBjQPGpAiAxI66Qq2cEDgBhlfLYpEbtxATkp6eR3YlJyG8QoLWhqMbNzg+Pqac1UgBly/XXF5ecvLiJf1gGW1IUT/ZSE5KQWkiRaaFau+wwbOodrm5U+OC4lIlh0jvPW6wOAVCgxERFQLCgbSgvcCHrHnLsQsmJHTWsGB/ecDh4S5vv/0GN49vcfGy4fTJGd/71gMuNxd0piZqRfQDkoBSBh80o5MpB9YN7O4v+NRrt7i9u896vuLpeUepdFrH1qJ0IMRU56e2IzdrXJM7RZHqVJGcX5WQjP1I2w6MUSUUX6bYDaEUvcrO1UEw9D1uSHIcO44IFdFCQj+g9Ijk/zvS5oD/JMb4NSHEEvgjIcS/BP4B8Gsxxv9MCPEPgX8I/KfA3wbeyV9fAv6L/P3/1ZWao5yZFAM5O48JHQtb84Op8UrXK9+vNV7T13Wb/umKk/0vXCEWXEP0rv1sQody6yXktUfc/jYSN9hfQ96mAvxj3WV89bFipsrlDiE/4FUjNyF/qQUU6aCNQIg463KcQf5/ebLmfUgZayPIkBCblK01OVCmx3U+86Kl3uaYJDOPtLFcXKzz61DJ9cZNYsy0EWkzNdpZX6BSBlbMlvSJepqakbh93WL7JaNAC5Wg6dFhY4eTBl0IdK2oy4p1uAAfUuCwFiwWC0IQRBeIfqSSgUUdWVaRmfEE3+PbnrXbQIjMqpq6qghVQe97nBvxw8ju7pw37r3ODy4vE2k0N0aJXkCeSuevMK0jUuipNhwe3aRfN5z2luAjLopEPNUGLQ0yDDiXHEJz9iLaGExVJhTB+6yJnKbEWek4TXnyoTo5bE4GJeKVtRG2QwfnHMoltziTndO2XyEvrO384hqSLFPDmvRvqSCfhMDgt59Z0solK2chBRRFchxVSTBtbXK3mhy3ZA5Ujdv1ltbl/v4h9XKP09M1bbfm7u17HCxLvB2A1PDv7u2xs7fHi5cvefbiJZ0XGFNQ1XOen56xbjpu37xFWQdkM3J8fIPZfMHzJ5aXTzc8ffhwi7BLDPv7+2yalsePHgEwm81S4xxiRk9TEZhC4kUySVEGH+SWiuLGgdFEms2Ks5MTnj58yKUqKIThaO+AEAXf/8H7iFmFiJK6mlOqdACK4KkLRWU0l+cvcH1HN6twzubGebKIEPiMTfqYEALrfELktWEYB9abNllca4P1Ae+SAcFsXrxiiCSlpGtbHj54wI3DY44P9hjbNZUWdL3j5OQF3/r29wGBKCt6WdH0JX/4+39GbRyD9hi/5oOPPkoCdRESfSZ6KlMitOQ3fuVf8J1vfxO7WYGGGEek8ezsznn7rbdYKMO//srvEYOAPE1uN8klcxwt/dBzfHyT45s30WXg5cVZctYzNUEIhNKoomC0Iy56mm7D/btv8M3vvMesOmVnd4/lcgezOOJ8Y+ndKU/OzhGFI6qROHpu/N+0vdmvbdt95/UZ3WxWs5uz9znnntu4i68dO3Hs2EklIQUERVBFUyqVaMRLVCBK9UKBeOYv4AkJIfFQgICiIiGgSiSVSiRKJIbQOB1xJXYS+957bnP6bnerm3O0PPzGXHtfl5MYCebR0W7WXnuttdecY/x+v2/3Q1+iPX2DFxdbrh4+YlCRpy9ecbWOzGcnlAgvXz7j8cOP2KqO22/e5f77z1mtN+y2ic4d8MW3vwTZE+Oa84sZr4YHHCw7bp2ecOv0gMF7MoIIphi4urpCKc1mteXJ45FPnt5mMVvQdQsOZ0vyqcHrjG01pgTOX17wj37tf2ZcXfH1b/wed37n9/ngw/eZ9Qo9GvyF5uLcczGcs+EVY4mQj1AcEHKBbDCmZfSRnBx377zFvOvqOjaVyNNedr2rodjnGW3XOz569z733ngdO7tdr9tMSpFuOUNpRfAeUsYURDuUNIcHS8psQdvPBeUvgsS/8cab/MRf+ElObr/GfH7Ce++8gx+2OK1x2sp5HhORgp137KLn5asX3Fa3KYNifjBnvlwS80hIYgDx/gcfVLSXygKY1HPXRiK5nvtKGdGqofaGYH/uUcT6H6dRJe8Ha23X0rYNMWba3tGawspvWSyXKN/TLnuapmEAZn1PU+BqvePsxUuePX5MHAPaWlIsqGIAaSblnBHL/zEGXp2fk3yBrFForq7WQg+s7pCXl5dYowmjJ6ZC2y9Q2uBD4PHTJzx7+RLVzBiSNGMpjKgccDqwuhiJPmKUES0dEY2l6MIYMjYEQs7ETG2GizAmrMZZi/eeUoOPc6VlU6zsjSlRkujWjCQ/U2rTNnE6itZkJFrFYNBWkaw0EK5t6RcLfPZc7QZiVtimoVUtIXhUiBiDoHUlQ0VSs1aoxtLPl7hmwbw/pDm4RarGX4dHJzS2pcQBnWWPlhOnOlZjKNShlBETDGU0afQ01tK4Fm0MYwhi1OP9vkGVAb1Ga0cpWfJf3YaDxYymsaDSNbJba4mpbooxMuwG8KCMpu072adtlf8oKCERo2fSmYvGXRoFoxRFZYwyZKXQaKwyGGWlhijXQERRkHUdliOFhVGKppvRty3L+YL5fMFyuaDres4uLnj+7BkX52f40VOUxtqGmIro7IowvrRSqKxoVMKoKE2nbfnal3+Uf+Vrn+J3f/O7nD17yHPOyEWhQsIUcFZjKMJgU1koqUrao0LCGo1pLMtuxr2TI958/Q2ODo+FkaYt73z7Xd57733OLi7YbQdcO2N+fEdo9UpMtyTv0VNCRqVMY8E4h9sljl3LZ157nXF8ytOrM3DinJpTuSbSUevxCXKDPbgRkiCuwkBV5Fjw40CUkQVUN/yiNLGuQ1oZdM4kP+KTJ5NxVtamkgo5ZLnu/4zjz23aSilPgCf185VS6o+BN4C/Cvxc/bH/Bvg60rT9VeDvFFkZv6GUOlJK3au/5wc+pibnpjnJzQZM8sxURYr0fgOaPvvehfkmYvZxyqM8zqRvu+ketf9ZpvfrphvkNcIBN52qak6Kup7ycWOb3N9njySq69dWC3Z9Y/Nh6vQnJ8wkfH2szEknlyFTC9MUoiBrZcrykq9LEiQrFyoHWS7cKQCwZNkMdNWmyWtRhJAqVUNJunzJmAYa26JLomTJX9JZgUpyuWUrItWcaWpzOGkA4XqapLJMKbTWaDROWdBWBlIxVfv8RCrVbUcprLE0jZN8C93gbE/JUkib4ol5IJeMbTtUaeiLw42F/tCxaE/p5reI8znnrFm/fMUHjx5yp3uDN2+/xtPFB+QiFvxFTeHf11ENkwXshAKHECkFXrv3Or11+NWOy1dnXG4GYi6MKRGVwodEjhlnTPVekcbVGCdUuMKN5jzvQ9Dl3NEV3VLVeKYaAxjRM07I3YT+7s/6OnDQNzJVputpMt2R8/vGNSKPLo+hlaBMACHiw8fjIybarWmdmIXU61QrVSeAwj3P0+ZaFz9xBs3kAtvtwMvLK7Y7z3breee9D2VRNUomzlaeuzJaJpLB0/Y91hhyVozDjjxu+aN33yGnyGbnOf/t/wvrWhqtUMHz8OEjlgdLUBrnGmzb0HWtBHwa0cJeXV7duJZrXIjSpJQIXlyjtJYJsNEK7wM5KrL3OBQmZI4XC5b9gpdPn/Py2Qu2IXJw+4TdaksOCZ8jXdvQz1tOD5eU6FldKFJOhBBJGaHFqChGRlXTFhMyqFAQcgZtsE0jnxuDtg6UIWfRtFrb8qlPfYrHTx7z4uUrrLXElPGp8MnPvM1f/JmfQuXA+uKckjytMwzDwDiOeD+yi5lVUIyxRw0DsyYQbKRny+/+3jsQr/jKj/8I27Dhm//4W3z68z/MnXv3ePzyJT54Zn3L4D1tD7dfO+bFq2f8+I99Bb9LvPf4DKsNF+fnQsVqO/puzsHBkvn8iLE4zjYjTdEk27NZD8RtIGXDGBKj3xJi5vT2XXZDwxNdeJq+Q4mZ5eKIppmhCrz37ndYXz7l/jvf5O6djsOl4e7pm/wzP/tX+OCjx/zmL/0SLy9esYuDIOW5MO4CrupWozIc3b3D/XffZXt5gV9t8dvIcnnCdjdQbKa4CLrw+u1j5q6jLYnWKAYtmTxjipRUMEUiJY77BebggD948IKjpufH7r7FJ1475Wi7wSugsbimwxjH5TqidtDaI1r7OsP2iGE30BaD3xay6ulnB9jmLnYmDAofHM6esGgsjTng7Z/4Kid3bvPJe/cwpwdC0TLC4Jv2BmoxbpQmJzB1QvXq/Ir37n/El37Ss8QQMZSsWO08j5++YHbyaV6eX7IdPOOYCT5REgxjpHVaqLrGUILH2oZPvPUJPvvZH2JxeMrR8Zs8ffKS1eWazsAQR3KU4soPAVWC0OnHwK3jW7iuleeqRFebUsE2bV2nYzUWk4FgJdxdE3CUQWuL0tWRUdv9fnvTgff7HtUoKNlK66JSpkqmdUboZznQtwbV9BwcH5E2Dtu3dF3H0DSkIdB0PRrN13/j6wyXgfVqoNSBjHMtORfOzs7JKObzGdbKgOWtT3wSh+PyfMXjp0/Yrrcoq3j69BnaKPqupW06VC6CzudIU/Vnfuvx44602xHR4jroR5xKqKZQosOPHq1a2naG0i3ZyyBmN3q086TqUK2UxmpDqZRtYyR+Z7cb0EZRsiGnGh/DVDMJocyaqhtUULJo3hKi15YYZiU6aqsILjP4hPcjjTqgXxxwmA0XYYPWjlgSlCzNnlaEHIFEsQqlM1GJLOHgzl3U/Ajv5qh+ScyyDwkCJ+9+qg2TmM+4mqEmGWJKaxbLJY11nIdA8OyZL3u3bwq77ZbogkhScqqDaFnDN+utNFk5cXR4wJTztq/4St1tSyH6yDpKJINrHUULJVLsEWSv1FkGoeOwExSzFMk1U0pox6VITqEy2KRwxeAw4v5aa4KkC8GAR8zv5o1j3vXM53MOlgcsF0u6psWPnrNX5+x2T7i8vGQ3DPgUKVoYHb5Q3RBE32/2EVkBlQ2uiONvQHP39BY//Ik3+PDgBa15ilYSJ9MrQyLiSsapBEaRXSaUTDJAgoXpODw44PbpLe7ducObb9xh1hueP3vJ/e9+yKMHz1mvN8TsJaswatqmwSmFIVGyBHKrItpFrTK5RMYI907v8NWf+lneXF3w7W9+lycv1lxsV2zy7rrAq5FFU3kltEhB26QHkPPXSD6FSF1iPT/qkFprqkHS5Ak5DccLGIXKshLkIoZqWStyUpT0/2FOm1LqU8CPA78F3L3RiD1F6JMgDd2DG3d7WL/3saZNKfU3gb8JwomHiaxx7fqklfCGhYCrqllFnu6/zyOZ0DZKFXxSL44bZg7TffaONDcatAJoMzkxXudlGGPIpZqc1J/XN5q170e1mJqTj+vnpg2kPp8Ky0/PzdrrDDNBb5SEUJabzyHW2w3jWF0Ec6I1HWbK6ELyp6wxxOq+k2rIuNHinLE32ChyUtkbtMxcF6T9VKcBbcSBSXA9LdQrU703i6l/E5nYZDXRHWCyN98HLBtdG4RKh0S+zvsF1KKyksmruH8QxhFtC9vtQAgeFDhraWwjTZHP5BRxtqPtFpAGSkyMMTJExcH8kFnuML1m2bcc6EOOu1Ps4SGHJ57HjeNis8K//xEnb5xyuDhkPWzRSmx+S56mz7q21/I6dKWUgqCfT5895fz5Cy6uNlytxAExFqFKCA1E1cV+svIV56UQUrVGluDoUmTKU5gQYSqqLJYxeT/1VHt74Fhz5oqRQYD3XuiMKZK1UFqgitcRbUfMuaJ4qqJ6dWEvGaNkE1geLjk5PcH7wOOnz1hvdlAKVpnqDEktpK4bHa0NRtWAU60ZwxQ9gUQ4JFnspaEvrDZbXrw6J+bC8ekdPv+5z+PmLUZrduMgNNUkmsHJsnsYB6aMvLaxlLAl+i3b3RpzfsXFasvm1RW3Dw/BB3qthYJUnTfH0ROjWGinJJl66/WGcRjFNVJpxsGz2+7oenF0M1osuDvn6BqHVi1dY3HacPf4hKF/xT/9tZ/m9dfu8Wu//r/w8MMHfHax5P533mV3vkLFgm4M3nucLlxdXjJs1ygFJ6e3OTo6JKTE+uFjNAaUIaW8n9gJVSmgtEWRabsZShuKtlxWtG1as0IYASmw9gMp4xii4uv/x+/y3vsPaJ1h1jYsF3MWszl910m4bHOIdoo37h2Ty4KezEFfaA4a3HjGr/zyr9NZw1/++X+O3//27/Heu+/xtZ/4Kf6lv/Yvom3h9Ie+yLfv/2c8e3nBvbsn/Ft/46/zhR/5IhcjDFsxI3j29BkPHzzg7eWSL37xi8y7Gda6/ZqptaadNaCV6FOxXK1GVuuBy8s1L56fcXJ6wu07d/BhQ7+IDOMOQwtJ8+Uf+QKPPvqIP/rmb3Nrrijxgq5J3Ltzm6bVaDuyPDDo9oDMEdvtjnG3YWclCLnRDauLNe/91n1ev3uXtmi8MrzxmU/SdnPuv3+ffjkXcYhdcPD2MSjN5eW5UMtLrtbp4vCYskLbhm9/5485/aFTvvyVL3Bnfsid+ZJP/+ibxBRxXY/tWoIXg5zOZNYXT7j/7Bk/+TM/y8/9C3+ZxaGmhDU2KYxe0iyOcAc9trXCoFCWRic6rVC6ITWKWQ60riHYjCGQMFyNQlt8+fIVL58/5/XX7nHn5ERoVsh6t/GBpy+v2I2aOYYxKy4vNvzi//A/8fjJc/Qv/ybGzInrLZcvn7MritF7tts143YjtOngiSnx2mu3+cwPfYbtZoM2M/p5RmmhZZjGonXDELbiVjgOrNaXkiulDWdnrzg6OcGnSNtagh9pjEYby/HxMajHqCx72eR+Ow1Ci4KmaYm50Fhd88Vqduv3Y9DcGAhrXTXlJTL6AdM21boc5rM5KSaca9gOuz3bwBiN61twFtc0uKal7Syz+ZLkb/Hq8gXH87t0rnC5umK12WIbyZma5APDdou1hjiOXJ69Ig2Jly/O8GEUCuEog5+UIo3RLGct57sNVhdMDhhVMGXkeO6IyeBjwfY9hYzfQa46cFUU81mHHxV91++ztJTK9F2PdQ152EoNUBQxRDQZ70cZnurqPG0dfhinHR9jdA3JFlpYUTXzThlSCaKV1uKunXPGaEccItoJoyeUQtP34Bqa+SG3miUM56y2O0qCGA2qKFY+4JW05yVHMKIzv9t1mPkMZhJNka3C+0hMAZ8D2RTIYgxS6nDWlGvkhCKaq5IibWOZz1rG5Mk3bN3l9UgNQPUX0Kjq3pwrq0Qx+sCr8wsKhflsJhq3XAg+EL08hqoGciXJ3lsQt+CmbXCuPkaS2rR1lmEr+6miMGTRM7eNE4OLXNARTKUYGq1quLYmpkJQ4HVB9R0nJ4e8PjvgpJ/jmgatDefn59x/9z7DMArLRDu8F6fLoLToLEvBaENWla5eG5EMWDKoJCwMElm35JKZAa1uRFaiRJpjKbQl02oxURtURM01pdX0bU+rNZ+88xqfvnuPu8fHRBV5/8FD3v3ORzx99IQwbChEcdAtM1ALUvGEsiah0aqV+IdS4xaMGCJ5BRc5sz084NNf+ypvP3nE2YNXHM2f4s4sOlmMlqFDzqp+rGOeLJIiZZzkwRVZa2JMTBInmPSZGWsNbeewdrq+xbtAGACZYhLTiZjzRFNVlJSIfuDPOn7gpk0ptQD+HvAflFKubmqvSilF/VkJdt/nKKX8beBvA/SzednTG25MwIoSN5xaXu7pDzcRM4o46U1f68Je93PjuV83Jt+ja7tJL7u5aN88bua9sC/kbxThZRKb5v3/CcJQlcM5NZyTs2UuIpwWY4P6uGri48f9GynPpwZspiTNktLVxljc/ZTW+zlXrjEDcd9o1tuq3bvWWpwrlUwRtNYSUKjEfRHYWwvHKjR2rsXVhVUZJXapRUHU9TmKhqzIHI2cjRTEQezzrxPmpVmY3jc1nejVFtFqRzFun4kxGb/sm5D6e1onOVJXm4Grqx3b7Om6lq7RtG6GMqFSFHtign55h75bYtaKw/kt2oNjQtoyv7VBNw4TPFcvz+nbHh9CnRRO51mp79Xk4innkDWG0Y+8d/89vv2tb/Hi8VNULFBt97M2JC2mIWIaIyXDRFNUWqbGWVOb8oL4F5s6C9ZyruTa5Kp6zqnr6U/JktFijOgsKELZ3IfJIgMHKp1y0q1N5jh7nHcaYCDID0rR9R2LxYLNbrdvEBU3kOf69f5aq89rGhSEIO8hpRrnoAFxmQwhstuN9CmjtCGmTN939MslyShs27FcHBCr9bGdldrsgmtHcedyBqUzKTjCaPEq0kRxJytjwCsxflk0LWgpHnIqYK+v90lbuV5vODw6ZgyRQmE3DKxWa5ReyBQ5T82yYblYoHWHddB0PUpZwhh59OFD7p3e5a3X3+IsDmijefH0KcPVCusaUoz4cUsYMqM19I1oFI6Pj1kcHPL0+QtCERvqXGQ9l+GFGNFoY2pTDM1s0gQCKdM0ju12U5H1zKNHj9hut9drn9bsAtz/6AmPnz6ncYZZ0zKf9cxmc5yVjKa27bFO0bQGYw45ahvuHncsT5cs9Zpx8Mz7OX7whDGIJfpswYDh1vERn/18y+HBEY8ePWV5MOdLX/kSn/zkZ8jM0CWxuXjJ7+wuaMopP/3TP80bn/iE6CdRdQgp9GvU9SS+vkQKhe1mzYMHH3F8fMit02NyOQbnyEiofIyZRWf57Gff4o27B+zOn1HiCqcDpIgzA2/daXnrjS/QzY/R7RG5GK42F6zWFwy7kWGbef9Pvsv55W/w9pc/z+zwgNV2y+Nnz+Gg54ff/klODg8liyp6/vCP/oRPfPoT/PNf+ktMvkXzxZKm6QhJ4QOcX654/z/9T7h1+5Rf+IW/QW96SgCVDdZZKfpR4kSYI4qBP/nWNxjC/8jVJvP2F36YW6cLTBmkOCo92Wh8HSIKeRkcMphJKJIKuFBAJ0rZUtC8895D/sH/+g2+/OUv82Nf/FFc8waLWtSjFDGLdcFrb77Gz//Lf4V2ecKQCkMpDKFwdrHhyZOXrMIlrjnApgRhRyyybuQUWV1dELwnZaGEHR0d4ceR5093nF+O+DSjkGkbJ+6QVSvWz2ZYrdmNI+N2oLOGftihr66wXUvTH7NYHhLCIE7SpuqF6p5CmaqDGzNtbYStUP+Xuu7xfSqV79WST/u9rs6OKYu2KRdBrqwS17udD9hesu+MtRRT43Osw7YNbb8gjAd0amS2OKbEzG4cmKOYHxzSdTNmi0UNqY4YwG8Djx88wyqHztA6g8lJKGwHS6HmkbE50pJpSsKUACFispgpYC2XcYtOA4vlkmQzu/VAdA4963HtktXViNaG+WJBUXC5uaCf9Wgn2lhrDWTYbnYYnevkrWCMou1aNjt5zqo2xEYpjLXCqEDVpq2QMhjboqxD5SzrRhbHB5W1OOBmj+tmtPMluu0xTUfTGOIyYbTjfLuG0YNSNL0jh5GUA1HBEAZsAbfZEF88ozQrsuvRbU/IBT+OnBz0ZLWUWtFU7lCi0g1B5Vpl+sjm6gpVEmHYEcZR2EQ1pkdVypxRRj7X4tysYe8Era2qyK4i1iZRY8k5sh0Cw2YHqdQAbwEXrDYycNemJuhk0EJvNHX/pchQvHFWZCcxMw4jKUVKkqloiVkQHy0FyxQ3pa1QG7/0F77KV378y7jLHa8+eMCDR494/uIFF1eXeB9QVAM5lUm5riWUfZNWSvVtqAhbrnVJVrVe15B0IhtFVppGCSvLOocxmrFoVC5YVUT/1hgCYKzm1q0D7ty5yyfu3OOt4xO2z17yB7/7f/Phs+e8WF2yWW8gJIwyovtSCnQiq1111O6xJWJzIWtHUfLelFTwWcALZSxDKVzsRpEqOEvrZGiolcYnpBkvZY/STfuQIHYi7UhFzISKkgHFMHpCEkmKdqoCG8IAcM7K7w6ZEIuca/p6LbrRqQDSKP5Zxw/UtCmlHNKw/WIp5e/Xbz+baI9KqXvA8/r9R8BbN+7+Zv3eD3xcC4irN2I1h1DVVXGPaiFom1Fq/3012f3f+F1/2kRtfzvXxatW15x/ye1S+9vkDjcpjWKsIVly143JdGJT5HdMgcO5FuATve5G2Qy1gSpZ9GWFgjFOCu00iYXlhIkpo2wWfVpKhBRplFAtUhau+RgDPgZKEVqhNJeAqjk0Woo+5yyNdWJkYA2b7RpZmHUNbRZ0L+dMjhkDOJXQukGZlhgzo8/4EjB2angVYZQJlzaV/jVCCJ6mkQZ0/w7udX9KLPCNJSUvoYW6EMPIoALDMBJTIqTEsE3MlnNOjhZYs+FqvWYYt4xDZtE7TO9IxbALjm0uqL4nZUtrLf38CNo5eecxumE2W9KROVu9ZDFfitg4RvLEK5gGBIjNTK7n2Wp1l8JqqwAAIABJREFUxZOXz/id3/89Xrx4iS0amxXWtqhKC1EUlBEbf5EpqnoeC5IcUhYE1jpM1WBQpgbJ7P8uQitVEgOgFEZRXaYiBhG9xhhJKWH0RItNe3Qp165vjzJP1w6lXlOpNoUKjLzXo/dsdjt2w7CPBpCuTO2bfNS0nHHduCFFTohBMk8K+DFJninSQMSQGIZRqGmAMppu1oEWy/ddCGhj6+3ynK1WOKPww4447ihI3k5jEyVJUPbgR3yKom8wlp2PlHGFPT7ETDlON6bxIQTOzs54/ROf5Of+2Z/jV371H5J2IzFFtrsds3lP03Q0TUf0gmgapZjNeopOZGPxBXxMnJ2dc3F5xZ3XX+PhO3/MxXpFKolPf/qTJGX47oPHkBM5Rwa/ozULUIoQE0OIDCETMYQgNOWSoaAls9EKFx4l50CViuCsJQSP1poQ8z6E+PLiHG0ajNbEnEipYDrJomr7nq51dNYyPzhguTjAx8RqveFiswYVUcqj1ZquFN7NW4KJ9PmS5y+vuFyP/P1f+lXW/pKdL3z9//wtXqxecutkSfCaMCb6puXy8pxf+ZVf4VOfepvl7B79TPPs+ftcXl7wIz/6o7z2qXuEPIi9+kSXVgqlHKW0MnWslCNtRffQH3YsdzOi3oHr6K2DMlbdhkZZBSVhbMJ1iYN7xyhzKBVZFnLU4VsnYqOWHDQnUBR39SEoIYqU2PKFN0+43Q78pX/1r6GPjtjmyH/xd/4rZrOOf/Nf/9fotMWGQljvePjwCW+++Sl+4mf+IqQAFMQq1ZATKNPy4YcPibEQVIeZHWFwtI2ihIhCizFDLqAaim3RSiPjrsTl1StC9gxxoNFBKI0qkrLG54BKBVeqCUMROlA0DTmv0bmyHGzk4uyKX/xv/y5/95d+jbc/+zb//t/69/jc25/j8sUrgg8yVKnZXyl4dtuR3/jG77CxSy53I+PVC3QSTWz0nqIGhtFj8ihDqqqh9OOOXCJaFRaLBZTCRx98SNs0ZH3O8/MNuhRKCXWQOdJYTYy1OERJblLMrFdrfEw0sxmua7CHC4IPaBLauD3roShVKeZU1KdAUfuGbaL33yyPbh43tcE3B8ICVCvJK8tQsCQ0PmuhZ2fF4EeOF3OMNRAFKUlJGpO27zBNj+2W2DIyJoXfDhQFrnH7BsBqzWLW44eBEgOHXcv89FRcG4uwJ0Y/CuLSaLIyhDFStmsWBhi3DCXQtQZrlBgwWM181jOERBgHFAmrNK7vcQcHzBa3iPGCkhXzxRLdGMY8koIg+rO+FU2eE1q4yr4WtiIREXOrIEYexkkGXkUpjLV0vYG2Z+Ml/Wi+WNAYiSiw3RyrC2W4IvlAcUJ9DQmcNiRl8VFJdlxnaIpBjxlUxBjLYjmneIfWhaQSl7srMpGLyxWvzq9Ec28cSWuMayho3rpzC8Vt2aOsrKGSGyoeqqoUGq3RORN2A9mPpHEgjGLK1mhpnnRltwjrp5Hr0ci+7KwlRL8/zXKGcfRiFGVBFUXGokxDDLsKAsn5Kfl2Nxo2EF0beY/EliI2/33XizNZ9TIIwct10WgxmVOTLv96IO/HkdnRnNPjW1itef74MR9897u8Oj9n5z0hRNBVPqPErTcUyYQVoKBAzrVJqxnCXLsrUtFLlMJ2DjubUaxBVRmTQoMuiIF4wTYO1TuWJ6fcO7lFtzzk9t3X2G62nD97zm/+/rd5+fAxV2dnDBjG+hhGW9HQFmmMshLfzGQ0xdQhf4GsNEULyqirw1Ipmaw0Q8xcbbekGMSBs15nmcqMqkPESo6S5k1Vs8D65qYkyKgxlkRhDEGac6NRpmCM6A+tknxLpY3Qe1NGIYN0VRR729Q61M5Zk9L3X6em4wdxj1TAfwn8cSnlP75x0y8Dfx34j+rHX7rx/b+llPrvEAOSyx9EzzbB0zfHYFOjstfh/BMNmHycCukJwdk3fTc+n36P2RfN19q4AnttmFLqY3b5+2dUmzumIrg2XjdRPIBcJFuiVFrbNKUv+4ZtKnQ/7hQ5IVulOIHSlTSFqTrV1FcsbYNSKGtFO6AFdRNr5Uo/q81FrJMcbQwmqUqPpO5fCtSkQXL0/Vzs4NkBMqFQymKtGKmUIvo4cqZrimwOriE4CERC8hgn9M4SDXFEDCWMr+LOii4q+7E3V2lV0SFxv5t4hynLNCOmgCdIMY6giX6MoD3t3LJYzGm6hu2wZbfZMAbhmZckTVF2C6BFjZnD2QluccRKF3Yxk0Oh+ESz7GlnC8AyjiNps5H3MWWUnVwUZbNKdar0+Mkj3n/wEWdn5zjXojMYCVETsa0WXdfNLCTZ1eTrVJ07dRE0rm01OnpUJcXJQEDvz1WU6MAQhke14v8eim69nyB0ggJbI8W61gq0uB2pMpmdXFOHZcBQaZNGFm8AU8+verbL+3XtCoS0szcu2IoW55TAXNNQcpZZla52y95Httstox9FJ2nFGn0IA9o2Qvmt0RklRrq253DZE1pDjp6YE4vlEfOZJYQthcguBGb9guglQiD6zBAjcZHBCRpgSl2gs9Bfry7XNK7h9PRUhgbKS6EUJOhTVYcoSiIHOf9bZ4la8/Tigl0pBK243Gw532xwhzPOdyt49CEKmM17QoLlfMbqcmTYbumdpuRE8BIuunrxigePn5LQGAfk6kxljcRh1DzKUrOlqBtnyjLYMfpa/2oqkm6MJqTEOHq6PnIyX+AqvVNV4fxuO5LTSjKMasBw0bqi6Q05lWolrdkMUOwM3RTWY4FmQdKeZ2eX8O592oeO3s2leYyZ8/MLfvkf/EO++IWv0JkT3Mzg9ZrZbMazq9/mf/u9b9J2HU3X0HQN1jmssWjtUGqBUU0dAOn9dFkZRddZbt8+Jp0nOhM5SFtMESQkA2MIPHn8mKuLc05PT7l1fIwqGdPMCdmQWGNMQiUrxWxMaH1FKRckP1J8w4sPH/P0vXd45/e/SX96h3UIXLz/kG1n+aPf/h0aZVEJGApH81vsLka+8evfgBIoZBl8KUPwmWIartYDd2+/wcmdN/nuex8wj4lw/oqwuiLnzDYmvFIk1YJumZnIq/t/zDisefbiPt/47d/EtpF5Y2iyZdYeoruGUQ+YXNCjIbieJnmMskQ3Zz73zFMn6NN8x9nDV9z/zn12Zxc8un+fD975Lq3SXF2uePLwkbjMjoEYAn4cyeOGTdI8OBso2mLGS5qw5vzZMwbdUVOH0DpjjeiXc4mkJCiQVnD79IRZ21FiQjkYdluSuWLetcxmDQe9xZSWsNsQwkiU7AqMdlglRkalgI+Ri6sVlETfWEou1TGx52zl65523ZSVwo2YncmtWU1A0fc9vr++bRpyyRqdlWEMhVQMKYHPGp8kkFkZRdh5RpIg5MpiXUtRDuNmtPqIPFp8WqO1Eh2p9xwcFGLfo0ph3G7Yra+wqnAwX8raqhXWOPrOklJk2K4lv2oYiSHQOYePEfQBZt7hlCGM0hA0XUfUiVjEHdC0HUklcap0jrbrib7QtB2JxLzvKbngrOHwYC56R+OYz+bkqBh2FopohdbrVR0SOSlWtakmKhHrtAwrbYsuhdY6jk9uM2s7jEIaWV0Yzp+zfvWE6DOg8TGhQ0SNkRh3pN2GuYmMaWSIa2LaYmjYXnl0TKLr6yyHbUfRhegD3g91AJTJZMLg0a7BpoDO8tpzrrEDRbT8JUX5nyMlglIJp6TGSQpCqSwuJSZcBej6jlk/Z9wOBC/nYGsNGkPIiVREApMQKYDTDoWtbJZQh9QSkC1In9QCKhWICa0KzooecPQB70dKzhjnqjOwJdb9SE16Og3BGoLVtMaIbkqLYYlTMlQ+e/yUhsLF46ecv3rFdhhR1YE4gTQX1UkxIo2MoVRZUIGSRSMvGCUKqct1ZUtlwDQW07ckpSUWKCZB4ktE4IiCV/DJL36Rn/mnfhadFZcvL3n80UPuv3ufZ8+esl6tiMGzywVPIiuLSl2t1aMEiluDwlGKRbuC7gvZOZLtyLZHsLUgAdkJSomkLHrIkFM1TDFoZ2RQWqRGUmWK2FK1uSoiDqq9a6kLjKD5UtdkxBXVKQ1EdAGnoTHikKkQRmDSiliBplQjuIrKUHQFM8y+9vrTjh8EaftZ4BeAP1RKfbN+7z9EmrX/Xin17wAfAv9Gve1XEbv/dxHL/3/7B3iMfcMFE0Xxmm61p9gVsTPXtbDPVaA/BQ0r4CaV66b+baJGTjTIm8fUrH2vNu3mz+0bx6o52nPoyxTSXZGZfSZH+djjTTlrOeeahVZNR7QYTRijawNZ6sU83XdCotjbqHMDWUSxp0fWaLX6X1CdCVrPOaNy2QssgT2Kaa0TKDxloWElg6pxBvLcK9VSPHwhZ1IYScWJ0LkkopJpZI5QfCZ5TS4B3SZy1vVvMTXDkymGmHE429G0HdY1FC2xBcI1Kti2w6aC1rE23BqlLTmBHwO2tcIB7xyL+YwcImE3st5suNolmGuc1dxqDpi5JWPRbHLEx0KDI++2xFnB9b1k37Qd292u5uckVFaVCiEXacyZ7cUFz5895ezqHKMt2ohrlK5BrqBxTUspiRzyjaanahwnmiJGogCswVpLowwp1Mwbrhs8pdgHXlPfX631x4LjpcCp56KuTUkSmqyEaabaDNahxPTvxrADpBlESSPdtE2NPpim1pPL6vV1uh+U5CzuVSrvkb4MxEkDoKbmVSgzIURCCIQoH7WWQPdZ6wgxsrva0PQ91lhK9BjlMCUxDFtWl+dsdjv88TFp2ZPyiNWW1rWoLM5oqtFY7cREIGdilOeolCCFpVI9YhI0+fLyghQDSoNGNGW73QBZEWMgF9F0rFaXFAZGldhqh3GOT37uszRj5mK3Bi3mBd/94H0Ws55eOxrX0TaWixQIfuT20QmzWc9stsA6x6vnr7habzk6uV2zfmRD19X5K6sbcSQK0YbUibFVhnG3ZRzHPTU2FSl4Y4q4iqIfLOcYbXBGGtCJbuODoDUFcdjLZIq2ZET8HyNsYyJuPDQLSlO42A50psFnwxAzviiST6icRBdYYBhGXp2d8/jRMzoVSDlxeGvGq7Di4vK7+wyiTJGJshaDA2NN3cRk+qiL5GSVDF3X87nPfZ7bJ6csDw54/OADLp9/hFKCTo45EEvmarthtd6ijaVte0pRNLrFaQN6Q8oDSs0YNg05Q0gXpHhOjonkW/K45fLsIb/2R89xboZxHR8+eojrLB8+KXWd1eSsmTUN/YNnbLZXaCXc+YIMp5RtGMZIUQ2271k/fsbf+8//aw66BSZJ7EECNsETqe6luTDXcPXgA7YrzeZq4A+/+S26uaHXLXmr6eyC7tDRHESODnpmdkFuR8bgSUHjzZbLVxts6LC2gXZLz4yf+upP84//4Nv8zFe/xo99/gtoremOjli4lu12y4sXL1hfrditVrIHW8vp6YmYHGw8zic+/5nPkLolyTakFMh+g9WaYTdydvaKcbsmJ48zmq5pCSGIWVQqNMaS4iAOuyMEZWg6R7uY03YtTdfhc2azE8qXTwFbmQeb1RpHwS7nOGO5desWp6envDjfMKS039M+tl/fWKP2soXvc9wcvN4cpGYKsVLQXUVs1rsR0zhyLCRlUY0ML4UBI24uxjZ4E0QmoAxZO7SeYXXHuFmzutwRU2A+X4hZgYLgA37nq4bJsLq8IqRA27UorWjblrYXg6UcPFED0aJKYd73pNkMZR0xesaYBYmxhoRhiB5dEvOuxSihK2rj6GdzvBZzkbZt6LqOrAqL+RxMw3xe1wzr0G5ODAN+3OL9jtGPdP2Mpp1RMKSs2W42hJjIReGcwzUNrdZYJQ2iOPIalO3QMdI1PZuiGbYjWRtc29G0M2zT0jnJLiv+XCawNqJcQmVP3CXaUkhhIO80xmmocSwHi4acPT4MNF2D63pU29Nbg5rcKpWqZhCgEnuNnoqZ6AesybjGoHVDzorsBeGW+lKGlvP5nMODIy7LxT7YWmIVJk2/ous6utkM6xqcbYihVM+EXEPur9leokOOhNGLyVXUsp46R06Z6MMeYIgxQjYSZK+v67fRGoJVRKMIGtCyVjnrcMFTxsBwvuICxfriEo3Cak2oNSlZQsrtVCfU2tJUvVXaDwapWcJT2VBlSUVVrRd4YJcSY8zkXGmlaqJXgp7PCV3H4xfnsB7ZPbvg8Xc+YHh1id5liodtyKSmg2IosVCIkJKAU0bMkbKOJBXQpgFj2Y1QdomoPUpbGgo6ZWzO9FnRUbAxodJ1Du8ePcuFXOUICqmjpve7qWZuU/3QWKFZ5lzq30khcc0FlQq6FHEdVYLelpLQCWwBW5QEr09VeJmG4LUu/lMYAdPxg7hH/u/8abwC+Pnv8/MF+Hf/vN/7Tz7Q9acfQ64q20GoTfUHp2auSAFz7cYDKOFcg3CqJySgVIhpss2fkCxTucelwtDTRSfPQ+4tWSRZQol1qbQO2ZwpGaO0uC0lmXoYjARZK3Fj1EahDbURm+xfpbiX9iphTcbaSpEswieWH5TsM1VUtfJXOG1wFRmcNEQpBrFmLjDp6Kw2tK5BF4GIbTW3KFpRlOgpmqat0xUoJWGsoTWdFLQ1f0TVLA+jZcHLWcSXJY/4QqVJaEpSxFDFvHVRyvFaG4YRF56cDKVYSrYo5WibGcrNicritGPR9AyxEJIHY7BK4SigvaCKpuA6i3Ey5StJ4ZoO18/JbabpItiey/WaHIRa13YzsnUMQQSgvW4p/ZLtZstuF1nePWIdryi66hJq3IRCHJwKEvwdvOfq/JLdeocpTi7UKE5Ok0smZMgy2dt308AU3l5KVV/UBomUSSVSTEPTzdHW41wnVAJbpz1KYQyEJCGaRVVElxaQ7K5c0cmQAqZoUpTsvRglq80WIXhOF5uCinzVBloZCnp/HqusKpwvtF15HVpcwGrO3jTXkFtEiSSuYZByoJBqLoxMPiPiZBWru56KYJUVN1LdSuZSHJi1DV03o+hC23WYtmUXEqUYrGrxmwte+edYdYdI4mq7Y7MO+DFUinRhGEdszowxolOm84m+N5j6GlPJKJ0YxxW73SV5WIHSEuicZEhimdzjEju/5dVlZrUz+BwZY8IWxVE7I8w0D1dnHDS3mC2OePrqiqsYWb52h0Ihhw1aK2bzOdo1mKaj7xecXV5xcbmi7TqU1jTOUiVGFdmf0H5dkVQ5H/Y/oytFxkdx+7QNZImQmN7l+WLBrdPbMnjI1yYlkynDNAgpGSIG62TVCTvPNmzZDFtBk7tA0yb8NkARp66ZscxoaFyDLob5vBdL81IoCMpom0Tbtww+slqtBSFMinEIWGfJuQBJ6C06o80NSEQZNBptGsKw4/mzZ6RY6BdHXGxGHr+4wKhE1za4VihMTdboAKuLFUOX6GYLQh5prcZY8KEQy5aSg+gTcqTQYaxYZRfjOOg6WUeNJSuF7RpiTmyHAZQYPcWY+Na773B6fMDd1+6ijExqXdvK790GNBajFQ/f+4Dl3LKc9wztDqMcTT/SzmZiWnTDAAsNV1GxiUK97bs5bduhskG1Co/CbwbCesOjB8+wpqHp53TW0DQd7fyA1knx0PdVd9yA6xoOZg2zzkoAsjZoZ5lpTdu3dF3LuNtx77U7XG02PH51Qd5JeK49OkKlnpyFpeBToWksqrU4mZCyXl2x222IwaObBqUlhL1rG1J1/2tToY2gchDEqjTY1tEoOOwsL+MONawEbW9amq6h7Tq0AAekmjlp257br53wnfffw6Qsg8F6uUw6brQmlojKWpz09HWR/L1D3Jta91KKDA+ysEOEPpWAQkyFYgy6aUm50Mw6Aj1bb8jKgjZE3THowM5HbJPJtqX4TMaA7vBBMrkW8wOatiVlTcwdY8pkM+e1T71JiAOX56+wTuHHgaEg1GGrsK5BNY2s0yiM1USdCQQZMJo5MQoVzwNDiISwJZbEwcGSjCNnGfilNJLTgKvMGucaMi1Kd+imk7gNZyEpVHZYGsaww6Dp+55usUSbVjLctpcyazSmGjYYyEEo4dsL7HyOUoIUGTLEiCta1udGM5sJZTz5nWRZKs2YLCk4smooJhBSojOajkJbCjoHLHVYpYqs1Vbxahw5W29ZdD3z+QFqdkhxPRZLYz3EglJC6XSNrdm3sso2rcV1DqUyjTMwOqlpcpamWWu0UWBhfrTEWoPfjQwbQaVjlsiArAyu62krjTImj0oeTZLYpX0tBYVMygpRKiiGmImpZT7T4jiPonVWGB9ZkG2tNLHSvrVxon9TwpigQCmSIZxyqSHVFmWs3D9GoYhWjX5i0qkpQpZcPlWL4IiVdsLUga3WBABthQmevaDRKeOSpYyJ1I+syZyVJRrHXIn20qLxqkAKPHj3HcrVmuNmTlmNKILkzOqC0dA2hl2K6JwEcLhxDafKHJMl2mKzIQ+Jq3hBioqYG4ptMLYwU4GZLljbYmkYksbbnqALRbXoACbW90E3lKLQjCg1klU1yYsjA1H07FiaZIhZCWulRAxZaJo+osgYp1CWvVwr5ULK0tQKDdOCEgdU+eOKMzh6kg796cf/K/fI/7+Oia13/dU1t3x6BdN8f1podU2cnzRoGmnq9IR2TaEzH3ugikrccJWMKQksXU+GyaF/b0pSP1fVeVELoRc1IWhTUZ+kkNJoyYhQYV94TUicqhfJPmMLql5JssdEqyJF1cQdziWhSkJPyEsq8lgpCSe7PjehHMsUR3ImVG0OwGpN4xSNdiIMLmKUYY0T3VxFUVIJhOwFCUpUSpsRBC1HpkDkgtA3Ra8nDkwoVx2eEjklTM30EHol7MpWbFEVkj+SMyVr2r7nYLGk6cUFc9Y0dCh0jmxDEf550JC2VcS9xfkZEU2vDF0/R2sHxQBWggqtZtku0LMtu90Oqy1RCX3AGUODYmYaYtezaxvGcccRmqPjW6yHNeerS1DiWCjZd1nQv5y5urhkdbWSaVmuDTJUpEmEuNK/JZwuhDQZSsg5eRP9LbkiyrlQUsRjcM7ROkfTSl7LhFBiNLk2Gs5aoUuKDY2I40MmlkRKgZQ8mqaic4hd8jTVu7EkTOj0dN0J81wExFRKr1V1MoniY6uJ2vcT19dwET1bztUaukwa1MSk80zI3wkl2Ux+5zFo5v2cWbcg5EzW0qw2tkU3ho3f4XPBZIWzPW0Ds24jWXtZkbBstolcDDHKZFyVgg8jaFOduWQSPhkClVKwTpNy4OmjBxwsZxzMGq62g0wLlSbHJEYxKRLiyBhGygC7ILSPPHqsNQzO0fUdyjhoHF9460cYxsLzF+d082OePn1ISju6fobqC9Z19P2S7Rh48vQ5OWeWR0d1WCA6hBtLISB04VKKNJx5Qvel4e+6jju370DJ7IZdFUzrSjfUkhmWkpwrk6PtZHJkZA0lR0DhjBGN4O6MzWaF9zuUEm2DbQ3Hxx0bk8RFLkV6q+i1w2RpEru+xTQNYcgEr0E3FAc4GHYDPnkmywhbi2GlDFCqZqBQokx+5XklGUSESNPPOb88x3YdZ1cX3L53j+fPHjBuVgyXVzRWfGZKzrQFsm1ICdIQRbhPkCiKknCdQeFROmOVQuOwylKMwtuWUBrGcctqXPP/MPdmTZJk2X3f767usWXW1jM9A1AkSMIAYpEEPYEymklv4keniWY0GgURyxAiBpjpnq6uJZeIcPe7HT2c6x5ZPQPwdcIsrbOrsjLTPa7fe875byUvpHal5sz7774h+Ej0EWs8y+WJZbAg73QPNI4lNxDDGAYkNyRlnr//DGnkMO5ZStWhwqXSLmecHwiDOt5hLYsUzq1gx8C8THx8/wE/7HVNbudSo7VCKalTzJ9YA9UPhwPv3r1jv99DXRjDyC/ev+c//N//gU8f3/Nf/+L/4W/+7M/4/T/8Y5a0dDTJMB52xOjZ7UfGuzvu3v2Y56cz33/4nvP5QsqQcqGZQhwUjXbe0srEx48fef/+PdfrVdfz6cTd3R0pZe7v79VlUYQyXbg+fEJKIgfL1Qqv7o/dNdnhWyHUrIVh9bi04K1gvKM5Q0IjFeIw8vbdK/Y7zzJ3XaPoYBDTJRNGh62tiwZu+Ze/rmFb64L1w3mP7bVGroXcjUhCGNUy3jiMtxQCnx4Tpe4xRo0LHi6ZqRgenp9JpUGDNCW8reQkIJ6UJ3JNHIejNkvjnnAYeJ5m/uHDBT8a3HjCDhHvJi7PD+SWGaPFBIeJQZEdY0jSwFY0082D0ZiPhkM84DzL3DClcHAOTOxnuJDLxLxc8DFinSXujvjxiFjVhBUjiPWkKXO5ZjzgrVImfVDdsVQDtdBawVqvodylMj8/sywJ6z3Xp0pNE8fTiWDU6l9a1eFTMwxhhDjwNF2Zzo9MAscQOexf48QzxqPmg5ULtrVOWRO8A2eaNm9WiDbQPLy+35MuVx6vZ+z9O6rzShOVXqtV6WHTjRA8h8NILhdlQLzaEXceGyxxLuS5UqaFPM+qeUdYcmIvjd1xz+l4oC6Fj+8/cb7O2LogCEuuXK4T7GBwDm8FbxrVCmHcUVOGolrQZoQiFWk6HI7ed4mAI5eiZ3HfL0WaasJwavhV1OdALHgsAdVAN0U7dLCNNnBzygw5v4gVUoqgA2UKdYM0KbJNDv1+jzXgUISwNl1TYRh1QJvU8EZSxYjDBLBNKd+P9ohzkb2DwWjsVCLjWkWmK/V6Zs4VSRlxmbC3RHEa+5JNd0FfQwa6Xq83uatW1eGxzSKpUdqCND1HSzPMJVNtppZGzJUSI0YcF/Tcnqsy9gZviMaxZEstgpXSaywH4rFSKU7I3uJEaepOTCeEqa6+5oUia11uSVawxuOD68ZiVl3CjdBQOnGTgkGd2ysgxuN+YIT4w9dvRdP2j71Ww4aNpdjhSP1UWI0a1hyX9VI1JBc1bmCFn/X1Uie0GTMYyz/mfdlWWuT2K3xJmWyt9dwPNeFoKyrIzd6zNfmy4IUvKubVQnadkKyhhRu9YyW5dg5srTdHRiMrkni/MWh6AAAgAElEQVSjY25Tw37vrHVgVQTZqsK5rZme4WaxrWq2UFo0OLNT/NZOWarmhNRa1ajCqVip0IMa+0HXBJxV8eUQInEY8MEjUsgts6QZehGPUWQRU3h+/oT3DVvvCccjwTl2UR2YLmmmlIVSFWVLOZNzxUdHLYbrNbEbNXtLgFar5lUFz/FwwHvbNyC08DRr89I0C8Ub0rwwzxdev33DcX9AWqXWirVs6KUAz8/PfPj4gXmatjdvXX/69avLqNnopGuzLi8GAes6vK2/TpfNSYW9zivNZVmIMehDXivNriY73ZznRVzDape+ZuGJaMZd6TlU/V/dqJBmRXAU813XOr0QWL/3ek2bYQm3pWxefI7czCRcp25SNHPpJeUYePFc9I/auttkJXrLw/m5F6IWFy2n0wkxghPwVThfE6UaajXMy5ov6InBcDqcuPaoRNVwqt7JWKWmzCmRc9b3AENeEt/+8hv2v/fPOYw7ni5zp7917Wp/z0otiihndTzT7MPWNWaGGJW+5L3j88NHzudHvBVKnnl8/IxI0pBjUWrosixcrxOXy4XhdAKzahy+HDT9puJyKzp16sNxv+fduzfkZebbb79lWjSrUGpVfK6pFsZgN1qt9Aab6jCublNVabAsV85Pj8zLhLEaMSGtdZOFrJq30gjBEaJFTNXtzFrGUQc1tmshxSjdy/SVFoPbrtV7rwM4e4t42dZX3wdpqvVNudOuRhhi5P379/zRH/0RP/2f/gU/++u/YioTuYkKy8NAGCOMwnWp5AZYx924A9OY04SLEZFCyWmjvteaqLkypUwWKDWxLDPGNMZhB3HQ4UP/nbt0XZ18a+1W/6t9syIZpekA63f/2T9jdxxxXpGvKkLoeVfGVWzKuBDxMRCt0FLhMO6IPjJPE65CqapVNkZNFKB1ZzvbA491rT49PfPp0yfu7++5v7/nRz/6EcuSeHh4YF4SKWWcV+tqZy3eKDWZWilNmRLee1wt7PcjP/3J10zTwuV64fPDI9dp7qZQ+kzM1zPffvsdz+dL11lG9vsjzkeOw45xd1BKV218/vzI+199g2lVMxlN5bvvIjF4TqcTKRVc6JpWqxbYaZ5pziKlMLx+xWHcUVOiNSFYpwwYVo3Nbf/btOStKSW8FkTapnVfh2Ivn6svBrpdjlDQ/cRY1Ynu90fG44GnecE4R4wDrVnioLlnrTq8GzkdrRonFG1uS8pM04xQKDXxfH4kDgHnFAHbH7+ifvsrLunM4Dw4y/lyIS8Tz09PSE28uj9yPI4YEXJOKg8RqBhcE0Jr5KRmWoIFZxFbqFJo3fQH22hkSkuksjClK0G0qTWdVofRSCFrVaeUcmWeE9Fpth/WseTCUgtVHKaqAYv3PZqpNq7XiYYQjGG+XskpMcbIcLzHhYCEgPGe0JHLuVSu52emp2dySTSvg+DqLZ7G0KN3aFX3XedVs2cFI5WdUVlHsIb7caRYw4epYvKCKTNW1GFZWtmKSj2LtGHZ7XaM48AQ1ZQNC7tdJHooLjJhqdMEreJ80HuLNkTWWfbHPYWGrYpWFWmkZVEji2FQcYBzhBgYdjuKU8qiauKUUSVdM7bb7djFQUOzU2aVyDR19MI733XZyojKuVCt6h7bqtfr9aN1VpHStc5o6jxemz4xarKhZ4nxHmOEUm5u1HEcCM4SnT5lOvT3+KhNG4snT9DSQi5NaeJZyHPicp25LIlcWieIaJB8E7okR2hOn0nvPdlkrDEEp1m/a2zEKheyDYq0zcdhq1v6h+m6b9EiFzFau84lI9bQnOoFlzTxGBLPkrnWhdwWUp0QiVjxHfgBkUgjdvDldv8a0v0muot7r/NX6UnKFecqzgv07E6xQV151hpY1iqqT8D/RxBbf/1WN23SN+Mu4Vj/9NY4vdDU8AK9ku5a99KFct3E189fZrFp7tbaxN0KNf0ZX27q689dX20tSK3BeAe563maFnRmPRTMi6JVpFsi235IrDov3YBbk47wQIfnELG3cGd7uyErcpBSAvtlU1pKIXVnoJYaOelCrwaqMZji8VkRPpxQpXQbWsHZsN5uRXi6OYuIGp1Yq9MBsR1tMkrd9MERnGcYBmKMWO+oNRPmK2v+jUhBUJ3QdW58937i8dMveTzueffqNW/u7tl3nUNujeuSEckY20glcb7MWHcgeM3sUZmdYRgixkLKGhS85FkfjgYildoyKS+IszRTsUEIo+H6nLhcHthfBxwQvSdXpfhl0eLg+XrlV+/f83y54ozZzBzWg/5Gs7mtUTXS0KneirbeXq1/XWe7NTUByTmpg5c0vLPc3d/hYgChH5aqSVvXo+0FMN5tdFn9ndTEonU63IuHalvbao6yPmcdxu/o7Kq7TDn3rLf1OVuRY8MWVvfigHC2h4P2ZvZlHpKxtg9e9KBRO30NsV2WhWWeSMvEh+++5c3r1zg5cn1+4vIps9TCYbfj7fFeN3MXGXd7rB8prXA6veJyOVPCwizQclYBNRqVgVVq6XWemJd5y61rTZivEy0XWipdYxj6s6gUF7o7WCkVsYloB9XWjn5D+mtr5JRZUmJJGR8M3kcu1wdyWbBGncSkZKrX5jzE2BsYv/HrrbVdWP7lDm5evLfrmgvjwDBE9uOgCFkvvGop+DAQvFpDOwyHYWAYB10rKBIXY9h0r8551ew9PXF+nAjGMB6P+KDhxbVWPj88klLC+0CzjVdv7tgdBnKZaaLZZNM8UeqaNRRZ82eWeaakuTvZGujPpTXqdtdEEQlBu7fWGQfS8yq918MuLaqdOpxekXLid3/vX/P//f0/kC8XmqilsgSHaYbUM4aygZoyzhSchWmppDbjHKy+y46uXwiBvR80sqNlLHcYow1d6L/DusdO16mjxlbpSdZu+VRNKjnn7Rn7yU++xsTA58dHVj2zNAHnEWtpRmg1UZYC1vDm/p5/9+f/FusjwWpzK7UpimQMpRVazUhr+Ob7WabGLWCY50Rrj0zTwsPDU8/iU33fqzdv+b3f+1e8fvMWFwa1bC+VljMlLdRSeDpfEHnqDBCvFPwaeff2NTlXnp6emL3jF7/4Bb/4+7/n/fuP1ALORlprPD6e+Ye//4b9fk8Iz1yvV6QVyqR0dCOFqSasFYaYsMbwfF30PHYOX2GISs1zxhOd0kxbTjQMqTXCOPLqdOTzw5nc1j23zxr7OV6rFoGK6rQNvH6Z1boOj1x3zqUvi9P9PcMwKNum09BKbRzu7tmd7in2wrA/cjq+QgqM+wPjTunB85w4nQ46NAuwGw58/v4DT+dHKDoIeHp6IqVEjAdyMYiJnK9nihSG/ZHduOfzx4+kSTPdjOgQ4PlyRaSxzHMfigRaDVhxRKlIThSpasNeodCtyF2EjsCkspBKotLAWYxzGpvRGi1lPbfFKLJvPC6OhGGPJZPzwnVawHtsHDHGad5YMxi86lqb3jPnHPSGzjRR9kJtFFep1hCPe21AQuTx8ZF0veKBYMBQEZmRoqZyvpaN8VERLrUgBo6nE8f9HjsnSpk1vLk07sKAGKtDujwzeC2qjenDR9Emao3dqLVxvl4x0XDaDVivdZczFofbBjoIxGEkDjucDX3vcAxj5sCIz46ny1mD2MeRIQal8GGITjW/WEMLnrYklUnUAj7gurZwtx8YrEeq6hmdd924qjCGoEOTlLchdm0N8FsNK7bXxP3sMtb269T/X5umQtsyxxBlbA3Bq7OtsxyPB+wY8c4w+g6SiKUSEBt0t4kjpiqalUvGGzAVypR5vs48zTNLlzZoNJSl9kEt1iidftF9fgVADAZvLGIdWaSb8fVAk6aNjurJZBMZbb1Bb2atNezjoPmqrdIWuNRCuV74+PF7fnEwnJ0lR0f1BtMqwRhMA2ccBUeVgSZe9eW9TtrqfmM7k0c25LJKj+USITeHbxbaOixvnZKrAzftVzr11BiscRgUlfunXr+1TdtaSOqZtyIX9te/Zm3YYPvvmkAv9jY5e2lA8tIZsn+nGwryA1RAZwsvIwhuqJY+e6ank3GbbkAXcOpUdf1WL7/Hy9ea2VBqxXcHHj1gOp2T9XCp2hS8mMa//N6NX0cvlmXR4qFINyKxVBGK3K7botkaSg293aP1v18gJUYT82x3EGpVi3fv1dVLahfU9yYPbmYn3vutIQBFxIyp2OjYDZbBFshnlnOmlmfcuKP6QMlqWTz0KVgpicenB6x1vHn3Fc5BSlecE4YxEIIl50pJk04LsdSQKa1oFpI1iK24aLCucbl+4tOnKx8fP2CCo+RFJ/yizelSM58fHnh+fu5FWDf9YG1g7Hbgq9OmojR2M5O5rdeXyNiv3eNauhMqzEvjcnE4Zznc3+Fj1GHEyzX7ct29QPI27ROKPNZaOi/+tla27UduA55V6wlokSWaWTbP8w/QMp0M9aeNVQXa1ilezzZbXVK3Z6r/zHVoMi/zZmUdYwBpzJdnWp7J88TnDx94XmaSNTRrsGK5ukQ1lnF3wniNO2ilUUvmerlyfjpTc6E6KLkSYi/EWtcDzhNLSkB3hTJq/3y9XFiWWZ/37sC5BmcqkN+paaV1ZEq1paU1rvOyBZbHIXI+n/n+++87Ve2EtMxwOOFEhw5NlEYcoqKTOWeG/VF1Cp3SuO0xP9izvPc45zTAd6fPQwye6CzOGQ6HPZfnZ+LOc3+643KdMLURnSFYcGZFesFKRUohi7DUyvl81sJpWZDSKEmYEHVkc5acEjTBRYNpiqYb05jniw5PxPB8vnI47HE+stvvCSHQRJjniTxdN/rZei1rA2rQIeS6vr4Yb3SUz9WG9SMfP3xgvz/y3/7mr/mzP/9z/tXv/z7/5frMdH7C0mA2+NCoYjDOEbx+/9waVSCOB1ywOKdT91ayNofrM2HMRtty3mG27ER9vpZlZp4n5mmh1so0zzw+PzHsD2DVLU71cKrxXPLMr777FTaOHI53WmhbsxVbFrX+FhFaLbRacFL5+quvqcbycL4gThHsZm7IuLUOFwLj0HPCujGRtbY3ubpuPn78iLTK09OZUhuPT8/8p//8n/nxT34XN4zsxn2n2Ss6K1VptoK6qF4uFzUOKoXLddJ4GKdhyjlnvv32O6ae22ccmFoR40ilYZbMNKumtnVn32F/JE0XUlpoS2JZdCB1nbIGTnuPd5bRO7WeHwcG7xQRxHTjIsdhGDl1swwoW7HX4RSQpuYykhWRNIB1vUa4DTdfDnC3Zw3D/nji7u7EGAeGMKiu0FoaDhMCu9MdWIeIGmQF7zYEOdnMS5t2Hzy73cg4DhTxGLPDmKbnVJn4/Pi3GDvw+quvQAyX85nRDzoMqgVvlMKaaiUar/KQfuaIag6oVanwIgVs00FVqVRrGcd7duMeMTsqmSKWigen+jUT9lgqgqc1izXdYMtAM5Zhd2B3OJKuT8yLNgrDMHI8nqjiScaRpnkbPmsINTjbGQRV4yTmaWYnilTgLXaItN6M0hoBo2uriWZZ7hypVKY5YTtrpApccyHVxG63J75+RdztWD48UNKESQXjtC4LTnXvBjU6EtOwtg8f+7Mex4H96cicZ5Z8YZpn4hIY3agW7tarp4DXJMQmulkZ65XB03St+aC0uWYanPU8GGJkGAbNX2uGVfduDBjnyMNIzYWWoFnBdwMX17PvbF9Xpg+GnVXdYc6JVLIapq3smL639gvb1iKdAulD0MEsfXi/shmMajiN0+w5awwheEz/HqajgFrT9NrB6GBErBp+4SLVzDRRKqfJhnxJPF4vTNJoVnNGW6uUKgTRGihEvd42Z4zrujvrMZKhI7atdmRtPRN7PpqacZsOdKhmttSK08JHFTNiCT5wjEfG+x0DkQ/LxF/917/gq5/+iOvjmceUqcb27FptqPr4UONYOspZO8qJs9SlboZgClhqjWucYI1a/ltvthw3MX1farU3eGWtxjpDYC2OvmTV/KbXb0XTthaPN7qg+eHfbqiC6VkJa0OwNm164K0uff1fys1B8mUB9LKR0w9umrYXP/tlc7cV2u3mjqcFci9W++f6/pmtmFy/tpZ6E5q/KNxtn3I73+22rSqLhE65Uzu7rRFoTWjNbJTNFa1bLXdrR8VaF5Ia67SoNmC8Tlms6XSPdfPAbK6BW1PaIfZW29aIgea6ne7uCEPsiKEeGk1EaYZpVhdKxZMJGDBtKySkf19rDTiF0mOE0zHyehw4joExOIwXmlWov5WZ2haMseSa1LTBFM7XT7QPmbu7ew6HAyKQ86ITlp3D+QPn80WhRRqlJKrRJqqahdAEw8wyP/Dtt9+w/NJy9/o1YefVAMQA1pBT5rlbHDun12C2Rr9P6L3faHemP4GtTxvXQHXpidbm9kVsTypCyQmkF+25Ml2vWKtBpuNuVL1XU1ciTJ84dlS3lPKiADbqRCWJlNK26a2vrXl6URqrU5rSTkSEh4cHPn/+rN+7/1y+eC5fNH79tf3s2ra1usZOrM/T+tyllJinmVor+8MdIQSm6UpJM84KeZkA4Xg6UbxDgmc3HmigERXHO5Yl0ZpO97UxtaR5ITgdDtRSaba/Z6Y7aZamGoH1ALCGkguPDw+kJel19b9LuVBLN+RBUXCa0JIiHHN/blIpHZHSBjnnBYCnJ0VV3rx5xZIadOxPN3/DsqjhTCpFtSHG4r15gdq2X2vWxnFUFHsYcMov63ugajmHIaquzKo4fVkSQ4yKaC6zZhc5R5FlW6spLZzPF56fn1iWhWCj7lnS+qEoTNer0rE6QnE47DjsBpxpGOu3pu1wPDDu96SsU2ZrHfOSKFnv2UplCSEQQ+iOcn7bH6xTIxhFSyq1I7256GFfaqXlRJondoc9Vgr/6//yJ7z/5uf8Yj5rcVgWQnB4q3oOYwTnoWTDnBKjG7A4Sq2atYTBWKcTTxE1VKqCNMip0LomNKWJlGbOl2emHmbufGCaE9988y3Hu3sOxyPGO4KLzGnq02vhr//bz9gfX/GHf/wn5JJx1uFid8k1GgFijDr+BmuYHy787Gc/w4aIHQbERmyI4F1n0rSuF9b9qXYUSUOudc0qimsVIe4ITWnw+HzhP/7H/8Th1X9ndzhyPJ447g/cH08cxoExRmKM1FaRtGzDnBACzqWe32mJMfL111/zb//t/840z1wuF6brlXmedS+wVp+PnHHeU5zvTWGmGbXKl9qY86IUNFF7dZML0TsIjlqyfs9lYTcOjONIjANxGJmvE9YoUqxbi9mGQt3oX5tgZGvatJldKd9m2zvXffSlS+s8XRVdbeBsYOccw7gnDCPDfsdr9P2qFXKqfZZlcN5QquH5fO6DTEs1hiYTw+ChDGrChahZU0ebl7zw8Pkzr999xT6OhDBwd3eHszBfzpTWeqi35kP5YVQ9tBhaBet1eIUthOiUDlYF60ZiOGHsjpIixVlKjT0QubGUACWwFIcRta0XY5DmqDTmJTOIo4lhmhemeQE0jy3GAetHovWcH59Qoyo193FOcybHccSaotTcJTNfZy3YrSUej9q05ErwgbvdnjKfERq70ZHzlZorJjdsj45ZciZLozjDbrcnOcv3z2dkytgCS64cjntyM8xVqEPAuEBzTpEQZ3FV0ZFU8qaR1Kw9Sy6VZckMu1FrI2Sr2VoTUsrMS+JYKz4GZQt0N0PrDF4s4y4ixvb1GnuAeNPw6z5UsMaqqYy1neYqWKumKsYqhb2KMC+VXBatO6wOFGUdiK61qe2N3VaPKjJMPzvojp7G3RwKS6s6gF8N+ZzrjJG0IdHTdME1i3cGnO86f4MY1eGZNd7IOCoWrKfVDHNherjw3cdPfJovLE3DqIvQ5Tlta8Ca6L5eRGirKsegLIb1uTam0+0Nxq21s8F2r4nWVHetxlWqka4lk66ZS/O4WKleGF4d+Mm71+yHyMdL4XyuPE5CNVF1Z7Xq0EkyzTVaR8bM2nD1pq4BqRZyb5iHcSQ6R2sJqEBVQy2rweRb/Sedqt00e1mQjkobBWpEcxv/qddvRdN2e5mbHsjYvhmtsCdgen5YFz/o4OzL3LXN8a+/kZhbsWhf3Iy1adLpmoBpdBAFabI1ZdHpLVqbv7pqxnoT2frhQOeyrtMORe3Wn22+KJo35MHepiPWepzVjbGJgOt6PrRY1PWrlL2VX6+IyQrNtj5BNKxW/YgheJ1cRucJxjKnRG3li85WTVtu16iNW7t9/y/QNosLvh/qjcHpJCyltInkkb4ZXhqhF7S1Nqz1QM+wk9Vgw2CdEINR5yDTiF4wHqrTh6PVRUX3krFkJJ3BaBRCzldyPnO97tUWeYiEoBOjJWdSabRsaEUwndLhRov1SiNxZL5+eyJNR779fKGz1Fgj1jQwVm3kod97YUNBtQldm29HzunmXrqiXn0ggNGNVPqwQYvBdUGulFjZUMpasgZnlqT0HlmtaO0tw+xFI1SrTpdcn7KbTo+UqjEN0sf0q02tiEBHeW/urOuy6O+lubl/2k4hcc5v9K5mWh8A2M12W5s3Lby9DyjaylYUrc1IiIGUb0MVg9BawVvLGAJYQ3SOn/7O7yAhMqdEuixY65TyaR3LMhE8HHY7nh/0/jlryYuG/voQuL9/xZTPLCnTmrqdlqJUiBUxb61rk3pUgukFYKlla2ZL1WmeugRBKx1VxRCqUxpxd0uLMTCMA7u9NliX61kRFavaliaNN6/eMaXM9XzRwZOBeZk1U6lP/Y3Rw3YcR06nE/v9XosgrwWI0gg7UtQL0GEYOB5OnCfVkJ7uTnz9469x3lFb6Y286j51wFNx3hCGoGh7W2mvBuM88zyDqGvoMI6M48h+v9t6+BD0PZYGwxDUjdSoKcHavNeq+8JtUOW+WGc5Z1JKOO+pmE3PRh/yGNBspdpY5onvvvuW0/2Jn//t3/B//J//jj/+N3/Aw4fveH561EZ0uvQCQ6fNPu5o3uGcNm6uU4+thZITpk/6c6fP1rJGtTSa6D1qTZ/d3W7P4XAABNMc3vrefCn65W3Qa7W6m6YlUVslt8pluhJj0JzGCrk3QNbCmlmZcmK5nPn53/2cw909X/30dzBBMLVivMN5r3rcBgWl86z3svXoltIDelsVUko6Ka5Ni6sKYhzTvDDnyoePnxnjyH4YGbznuN9xf3/HMMb+PqmJQc4aHJxL7c8LnE53DMOeeZ45n89cr2r+tBZlpRSGjga0qtmEH777FY+Pz4zRKbJM00Y3KMXUWM2hCkOPqGiVyzSxLAunKlynmd2+4sKw7S/bmU4vftGmP6dFKai14NwqtfjNSLbep3UAXCmXC8uSVK8aHhmGHcO4Z388cDgdGXY7rHd454lBKWw2Oozz7A93fPXuhDWWnDJO4OHBU+cH/P2RcRz59P1HHh4edG91ldF7pqVANQxhx5vXb9VYKicuT4/QVJNeamHOmRjV0n69jmg9Zcq0pIM+aUL0OzA7UnJclkwIQjzaziqAViNPZ8EvmSlljCvEQfChcbq7JwZLKo1lmqm5gNBzIw1pnim58Oq4o7nAh46cG6NSha3RwWijuzvi47C53EqZyaUxzTPOeJyxvL47kXzjXJ95Pj9ohqMb2A0ByaqjL8aSnSFRWYDcQCrEuEcESl4oRFrwzEvD+oFsAqWp419djdxMR6e7Hr/0Bjh4r0Pw2mhOo3LsigoFj1xhmWdy1uEQVWsaNVVSKcRutwPj8E4bGkTvmdg1kqpBE+J4wE8zZF2fYRjxcVRTkNpIeebp3AdElk7j1ULVOoepDR8VRWsv7j9Gz+qUM0vOYLQmG/p5h7U9q6wjh1WbCR3EaxNl+tDH4iDYzhxyqpHzsTtEN0qvE3T4ZBTkzUI6T3z/8JnHPCPdkK5ZUZMUWfMV9TpkZZuJ0Kw2adIbPLEdTdMdF+PU/6EZg6N1Scc63IwELEFUSlK9Rso4H0gpkx8+c/zqDcvlzIcLSLZUIpWAdSO50eMYtCmtUigtgeh6kVopFAxekUvv2B0OhBGyzMzLmbRcqVJxBjVq6etMBKRp7rHt12FoWKtf56zFIZi2onC/+fVb1rStyIXdGjCwN1hU9FBcp3grwlbXZkNkK7bWLnY1VIAvm7Ybj91uAkKM0lS232adWMiXNEHoBiXIhrJhTefabsOBF9/nRXZVh7hXQ4mt2ezXYaVbdYsG5/ZPt+u/fXSUq+eJtdZ6rlLpYcZ6T0rWg3q330Np1GXWxtN02iXqgoh7gSpyo7HdGuBbcd1KVdTT6UTOOT1wZ7dQu6ZmRZNyq4SiNril32+FoFR3ZYzazy7zQjEGEwPWGqJXLcpcK61ooKd1UKWQ5qVPqyzRRVKeWNKe3bgjeLVTxgilNnBRJ51R8F6UHpZBWiKnCzJ/xLeZN6cdH8+zFlpGOhyvHO28LFCbBmH3N2QFnqQjltbqVO18rp3Gqpt4azcI+ddpuV9qKxB144ROu+0BzGU1gVl1jxsCczMiebnGEKVZePHb+3mby/U1ub7J25P35d+/fAZun7/48zX2wvRBie33ZPvm+pMVGV7vwwvE+sWzWltlGAZOxwNOEhcM0QWmJXE5n/mJc4zHPflzYpmv3L1+x6XN1CbdLbaxXGfm65m8zIRx3FAdsUpZkiTkJWGM64MdvWDNkTNY3y2wWwXcjb6WMimplsKilIiVfirthlWqdlAHEc47nAvshwPDsGeaO/rsLdZ7WqnkXInDwDjuCEveDkuaUhjXPSHGyPF4ZL/f8/btW+7uFJWsrXGezuS8IFnvoVKShN1uYDzsCOPIMI4c707E3V6b7kWNEGxQzcZ5uvJ0uVBq04apCVU0mkH5ipbe8xPiSByiOmUadFpsPXQnLEX91UwpDo5h3Ol77rThdaa/3yJ9oFJUk+lXTZYjAsYFVi2YRWmB1ljGwZGLUBrktPDh/Xfc3R347ptf8ad//Cd8fP+ev/nL/xdrDKUkYgx9ECbsdyOpO5EZdC0s06K61bywTFd8z6EsJW+ojLWh77/qYrk3+0576fqYBmo6sg70vuLXs+EAACAASURBVGRnrGwMMYI6GBZq689Bcxi0WEqLNm8SI7ZmCo0wDvhxUIMAqwhtKw2KFmG6T3oGP/SiQDYETHWo+oox8vz8pAiJ9RpW/fjEeDgRhpElZc7Pk+rG6BlDQ1Da2H7P4XBkGHc6cMHg4oCUQst5G7r5oOYKWF3jL9Grlx+maYP++PiZ6zJjEYboMd53FNGAtajqWQu3EJQeOV/OfPfxk6Ii18TznBnGveryTNqoRSuC9vnzZ7xVelRJi8oAqhbWL5u1l+Yj60tzl4pO1EUoKTFNE8Y+4j56XFB2jAtamDvnGHYj+8OB/emolDjvu4OnPuv3x4FP+4H9eOCnX/8uo/uGecrEaNkfB9796B0/+9ufU3IipcJ8XbC2UXN3FfQBpJBTwjjPMDigDwu8lrRhiIi1uDgQxFKKZUoCOGXIhIGEGuyE4PHDEaxlLhXrAzFq0L06F6paKIbAcl2oOWl8UWfrtFJIy5VpumgGWkc/nHc4bxl3Az4MTFNiWqq67YknL5kQPHEIPF+vnOeJXRg5n888zVeMzF3n2rVnnT2UUkXcgETIUnr4s8MSiN5jU+WyaO6nhEzxhsVqnMzShKfLhNSFljI+F4LzbCiO0QEVVvctZ31nDFXU889q3JJXd7/Sh0zeGZy0zSL/Ok2UogOFEEeciyAasm1sgL5PWPr03Qs2jthc8NHjhx3VqtOlEShiqEXNLkxQA6x1KH3TousQzIb1DGuI6wPnnFQnLw1vG2MpxBY6RdtjnNbXtehwcq1nNz269dQipAYt2y2/1Xmt/3SHbmp00mty7wNRDOl84cPTA805mjebF4Ozq7OxAiZiDM06ijFkoFiDBI0TcDUgLeu+vTJQthq/Z+NKp5iL4OPAYBx747AeslP9Y6vgjaHmxOfvvsFET20H8q8Wzk9nlqWxNEMWjUPxFMRkbfj6mje9tFFqplHKuvfaEBZY0sI8LaSsxjKhGwE524O0tRfcaiHl4AvWGQZvGYbAbhc2Q65/7PVb1rT94GVWzzuzLdKVXod09K21jRpiN/eOW8Dh1gC+XIhya0LWz5VO+CWypGjHzfRhRR7ErE0hW3G/IR7StsVvNtTw9qHfjC+K99oapVRSLgQVOGxNjyLpWgwoWLcphdZLBbpdcW8ArFXxLKIOkStEXlrWPC8RFR+jFu9G6I5RN71dq9xciORWcCO9ERYtXESU6mSdw02zcniNaoVcb7A3CkrTQGmNRCgIet3X65VnY7gLalUs7cb7RrQYldqoVGpN6n4lFYPt2jwt6iyWYpVLH7wiNcYpX3sYdcKHdUCllYXaZlyaCJLYBYPxhmbadlipecSimq5acWLZcn9X9PeL5Wp6wZR7ttutMNjeTpEvWIbrxqsNrBaHOilTul3OmZIVRcGtCIiuAts3wJs+rtN9asWJdNT19m9+/fl60V/1P1jR1RtKvD53t2t++Xw06TmGTRV3tjsBvHR5bf0aX16z/p56XSEEhmEkxMg47qA0JFfSkrAYdfirhegcaZlVl9jpCtTCNF8ZB8thHJjGCAi5KJrcUOShpUJJuVMd+mAINXZYcuJ8vZBq7flzFdeauly1uunMfFAtiXFaRKesdEpnXN+ltOCvpeLsQK3C85Oiwj7ooIbaUbtSuU4zqdSt2MdYdWbsh5NzTouoTh/UYveR1hrzPDPNV0Vvm3pySVUUTdAD5PWbN7x69xZxhp//8huV+fR5Za2Zab7y9PTIPE9do+S06RCD6jahpIrgcEGdD4chYK1DakHdKId1lWvT5oLuULZyPB71955umj/r9B7afsCvAzgd/ETiELHOK+ps0GDS1m7U9/4A1pz5/OkjT09f8Rd/8Zf8+3//f/EHf/Bv+OUvvuHzp4+KlOTaz4nGdJ2xLpCy6q2C9Tw9fGbY7fBBER8AHwO7cWA/aNFaqtKhUsq0jjStqGxrqNbFdHdWYzHmRmlVvVTRQYBdf/es2YfG4Gx4QaFvmrlHgZqJ+5F//Ud/CNZzWdRkqrabLb2awxSmUkiSNkR2GIYvzptSykZDMtgvCtImMC8ZjIaag+rFpFaWUpDrBJ8f8Z3K6kJkt9tzujt1fYoyB4xT9sXAqOeA96SUetO20rf7Pt4ab969ZZ7O/OqbvyfGgf1hwKyDstawXdddU8FII7pCeH1P2B8p14mKVTrxvHD35ivG3R6eLmBqH0bpzrvMMxlI1uowxRilXHE7f1+ewy/pyLbvg87oOWCMnpVrw2JaoaZMTUI2ShF7fnpSYwVn1cThsFdkylp2w44YgjY5lyvzlHj8pCHmx9Me5xrD4Bh2joeHKwe5w1nD54+fmKeJMQ7deEY3bWcdu3GHGEduRSnEtRLFULNqH+PhwBAtmcQYd7y+u8P5yDVV8q50IyI965TC6rrLbu31gJpteet4XmY+f/qIbQtSJhqJivD8+MA8ZyyWZZlwzREHjT0a9jtO968ZpsKwNJo4rNWiNE0z3u+UTTGOWHFK6Z9nkCu7vbImpmnCtcIQT1gfqcYh0WJtxIuaueUpYaaEvS6QlJWSS6bFqLr0HlnSSsU2dbe2K8tCOjLrXWdEPCnaFkecE1rRhkeao1XpjBfda2tO1GAQUdaGdbpnSKuUVHBO8H5lT61ghKE0MKbimsEYh3UD1mdsDODVPKWKmmG4YAhjps2rlrSfpF3jVWqP0zFgQ3fslbat7S/q1LXpEdX2YhU1W3NokdXAQ41p1vrXOtcjJAo08KHdtIpCp/opMuWiJxrwBFgyz9czUgdqTYgUjLeE4IhWHapFTKc8WvBqqGZEqcOmOkxVwyonq3EYvXlDDeFWedCKmFpLEMPYGiKFaiq5NMpUwHgEocxXPInf+fHXPC+W/MlwvjYqjkLE0ofgkhEJSLu5H+u9ls2DItXCtHRqeNaG3drQg0bUtMViyVX9Bah6T1uTzXUSHNZB9IZ9UE+Gf+r1W9W0rYX6rUhcm5wb3Wx1nbPbpPsFNeIlYrEWlS825R/SI1c92/pa6WxffH1HBNZmr7JCwf1peFmBG7aQ401zB2xGKmuR0n8W27TEfIEm9nNHp4LSUY0qOmVFHY3WCe764182pivlS3FKux3epRY9UJQPp01qd9pZr8t2HUItt+uw2O4CFvR3q1U3V6cPibGO6zQxp0WdJfsDtB6ItVZKWU00bG9C0WwdGrUqKiZKlFGKWkdKW9P7p0X9nloh1UUnvdI6KqGcfoMneOWh+174ieu+QlIpaSE3wfqGNQveZGxN2JrUdtibnm1yazHSsrBMM63qTG+jIv2G+369XoEVNVVes6zBQb3h0SnvrXnTRq1vlKYX1dI/7/zy0jVYlrVl79N88+X6s9ZuGR+tF4p+tUn+J17tB9/qpVXObfosv4bMffE/L/6tWqC3H3yR2ZDxVfy/uusNcVDtTc74YcBMM9NlwnvXueJKFRljYAiep8cHjDiWaeZyfmQcPePgmaJjHCMpqUBb19L6HKnRQilJaT56FzHGcF1mtXtvDUx3V+xGO65P0TeNjqHbZlsGB8Z4HX0ImlfETE4LzkXm+co8XzmeTrjgkdqf7z7ImOdF3/tVJ8Ct+R7Hkd1O0Y21AM45b8iADpka1tCL4UJJi0YI7EZCDBjn1HXOeVLVZykOEe8dy/mZKVXEefy4Q6RRaNtAyjqnznO14OKIx/bGbp1YacGB+O3AV71xp7dZtULHQHk+Y3oXJkZ1ZopI9X24SqcgJtWu9euyHfVpRYt/a51SfVAziFKFX3zzHfdvfswvf/WRH/30n3O8f8P7D58oxWCaRgwMww6k4oxwvDvovUBRkpQKMUa++upHWKNBxqbOWKP3Vxvc2vMvUTqxYq04F3TMt1Lm+/PSVmqRd9szXVuhkaltQYON6e+d5tQF5/S574M5HyNvf/QVS22c33+gdWpyKbkP7GSj1NRce3MmWyM1DBHn/KYZPBwOtJJo5V+wOx7ZHe9ZWuPh+bmLNXVfahqMxApFiwjLNPN8vtJEbbl3+wMhqMvdOA7qNodaiwejaIKxX0bTuJIpRZ1c92bHm6/e8unTdxwOI3d3x9uAqhR9BntRGL3HGWjWEcbA29MdOSs9cMmF87wQum7IVHW8s6JFFb1Z1WGh2c5y+XJj+uK17XetYX1nokjtZ7ggVfObrNGcM42AMJQsqu1x/Xu0xvPnZ1K3uX9qnzHG4IMn18rj4yMlQauJzw+Jy/Ujf/fLv+mmBwceHz8RneN6vjBfrvzo3Vt2Y+Tp+UHphN7zL//l7/Pm7Tsu88ScF67PF2SpPH16YEmZUgzn5UIzhmEPJlwR03BeHS6d90jPv4re4oKhdiq+osnqwBeDx1tHzQXTMq1mrAMxjbLMLHPutYRS1oUR4wzDbiTud/jRczCRJoFWwOWFy+WBeZ6ZlwVrbN8LlYZorOV0t8O5xuUhcX1OlJYoBFpUSppzYNHBUXq+YC4Th9rwVvWrS0uU6pmbpS0TRhp3+x2+ebI0cl1RG9nW7tqcXC4Xjsc91jtSrUhN0BwlCTUnpBWcHboGtuFouM4aGsYdTRzznMi5obMgAzjN6RKhYbXxk4a3XlkPQbWNDUtldaXWGfNut6c1jVXxPmp2JmsDpsu1dgqy1oT9zDeaN+i9p1XZ9GitAx0NdYuUlS1k1N17Nfxr6/3p+7VIpRhF0G2tnUYIVjQH0dr+M1rFi6GkhU+PnwnmHl8TxjTiEBh3A1F6CHV3GufO4OOADXHTeOfuRpmrIm1rvS5V9zg1a2FbM8ZasrHYUglpIeeqTpK9Vm7VYKzgpdGWZ66Pv2AXdrx7e8Dv3lGfA/li9EwvWh87c8JIBFH9urTaI27cVmOrj8RqHKj0WzGWXgJug9ia1eRJSv+vNHVsbwLNYUXf/c1x/h95/XY0bb1QWe3vpWcewFpAKu1Imu2N2ko50X++NWyr3++LP3uJCqx//rJ5+wJF+g1/1prg1opWbvLPmzEJqutZF87Lyd1KD1l/vrn9u/XnGHQSF0LYCveVNrkWNdZ1q3SBWnTas8LYpjc3pWhuTs5KD8idHmO9TsZTNwKIMXZaI9AqranoVDYaqU7y1mmNd47oI/ev7tnv9izTxHS56OHqLLnrgHKtVBF2u10XvgotFxVvZlg9zxRmdv3QQ21OpTdeooVJLjo5TMD1ulBqw8eR4+G+O2Ql5mVimQtNHCVXdegbGvd3B8ZhxFnV/Ezt0nUeSnXRNaI8YmcgzzNmuuJ2J6L3fRLb9WaCOhH2MMouW9HJEOtSW9HftUgpsGWzvej+XjR568u+WC/SkbGmsWO43siXpllrrVaM9Z2OaHuDZVbYT58Uq8VeHOLNafX2i/76Y/eb//jXv062K71dCLde7QWA3JGI1hHedaTyZZn0pTlPI+fEPKnpCjXx6eMnnh4euTvdYYwhLcsmUD4d9zx8/MTl6crT0xkXDKfTPZfpWTVhOXfMS3qotrwIl2Y7/FQ/Y27OgVZ1Ab4Axm+OfFJFdbRGm7x1lqTPuleKCVaXtzXMM0qhMRZvPYOPipQa2wNhu51wRx+gW/53ZGuIA4dxZD/u8CFsGl8dQqjBjBgU9aXRaqZmLaRqt9p/+/Ydp/tXm2GCtYEw7nXDNJrvdJkm5pT7mu3IEKYfzlBywaC6AYsK7V13qDSt0pzaE+uBb7dGpokSZkIIisJ7r+ixXQdv+rFeb6nr3qkmLU3WfVenkJoN7LBiO5PC4EPEh4HcDCkVrkviL/7yr/jTP/0T/uh//t94ukx8+vA9UisxeIYYOOwGWrqSsmomc3cZC95xeT4rKu0cl8szJk94MsY4WtOMqpIbTbTophtfNNFCvjY9hG0/I9YCY16uzMu8TcNzmUjpCtiNat1qw/uhD9YSWId38Hg+c/67/451EY2R1ZB0a/0W7ZFb2VzZnFHka0lZG/2sxdVKrd2NA6/v7jnsAnF/pBh1QHzz9i3TnDg/PpGXpEhzLZu2WmTVwOg+nWqjXC7AFeuUTTEGpTvFIRJDuO0tnfqjtKlbLIFYy3F/YDfuOF/OGKMBx5tphXOIdWq44R3R6WDQBofxjsEHxsORzw8P1KquwtY7LSKVnqH3lo5uNNW5rhb0/1jT9kOapLNyc9xFnRBlPXObFqp6pa5HmxlWq/tmBO88g4tIFaQK3hk1CjF6XqdUcVYoRddKbolxd+Rw9MyzRlss10nXTi7E6BnHncZqlMz5fNbGyzhqNZRscAQwYTuDELi7P/Djn7zBSEVawMyOacqkNFFa0WgLK4SqmnPnLM42pBWk9iy/Th/XPVCt16Vx08NXvRazktdENtOM0teQcQHXpQ+tZVJLWBvYjTuKzLTjkeZVIx1jYFku1FI2t9xUoCCI1SmjxUE1ureXBmWmmQLB9gH/grGRmmekZaKzmvuWFf22Rq9t4y4ZQ4xjjwzqNERUOw/dnbvXbKFHpazfw3tHrZ0mOzhas6xsBesczgQdTItSSGtTjZuu676fmabTjq4Hrx0mi8NAa43p+tzXIN2Ea2WBWejeDq6jToquaTPqvaeKhsMbTKfxt60ZK6Wuygutq/vQ3zpL9F5pqbVRpGGtDojIC9IHmmqqokijd562qKFGWhLLU2U/RmJVOqux3VEU2+fAeo1mRSCcxYqaQjkniDOYajeqtbQ1102bNulMH12bdtt/a1Odoe3PpEGZYaCZfunyBAdL9JZxMPz0/iccfvoTHicLi2F5+J6H83fMxlMWwzJX5rngQqa6hCvdjVoEcU4/8IQI3mqcQpbaayPXfy86SNKnWqyD7bWG7N4D/4Mp+29H07aiXrltjQorOsX6OX1arsWxOFF41/b8MhHlixqj7jTSurdIXxToFNE5txXkpiMeanfRm7C18jR0rZrtNKmOE3WY9mbt3icO0mi1QFVKR5MVNdOE+VQapQmrsN4bi3Grn9wNiVvzuqzX5rK2trn8iOsNlTEUg1qEGxRu1+H3Nh0x1lItGnjitLnsVavyxVEd0hqUa/BoZoRFbEMzcJU6GbxljI7goXiLCZ4pLZSlC1fVoo0QI0OI3elObfan6UIrBWt1I7c9oBlpCD3Y0hpt3HLCEghe4eUlN87XTEqq25AWCC7olMuNOLewzAnEUKl8fvrEUjKn04nT6cTxdGLHK5Zl7g/x3ItwQ3CWVrQp9Kkw7IT9aLnUpMLXjkAu04QpjdA30pX0+AUCRdsaQ31Ib4XwirXSH1hsLw6MNgQ0RUOlKv0xxh05JapogGZtnYZmHK4XbJqV2Sg1Mww7zbXpIuRK7dk9RtfqSj3oa6vKDcldf/sXD+IPHkuzbYjr5G/9smLUmdF28SwdpWmoOYmYRskLdVkoJdNM7YQBg3eOVjIlzzjr8GQePnzDMATKsvD543cY45UuOYzU1N0bW0HqDMyk/EAMjbDfcS0XUqvMOTGhYa65FaxpalwjjYpa9yol0nR74X44NkMRz37c45esTogo3doPeriY3p2al/emu0Ld/r7SypXDbiR6sM0gSfPb0nzlzf0r7BpwW4BquDu8wtX/n7o3eZYkW877fn6GiMi8Q1V195vAAQIJ4IlYUGY0STSI4JIbGbf6f2USKeOGoiiChJEgwEfg4fVUw50yYziTFu4nMm+9BrR9TLPqqq66N29kxDl+3D///PsyRRw3NwemGJiCZ4yR6APR2wxFLqSSjKrTE6UNmppcFy+4GvjxT35GjCO1QkmVtCaONyMUleN+fnrm+fmZZVVVwGYzkHp/GyFERf5EKWXB6f730hApti/UdNcbAKKNey24vFOK8/2bO50DSzpvOAQHrdj8I3aIWXdChFazedyoeE2zQzlE/fxKu220dTUALxG8J+Vn3n/3X3n37uf8xS//nN/53d/jw+PP+Vf/1weWdaOkjfNL4zwEhKRsA0vCctUZiMl7nj89EcaRhmdJ2snyFE2SqPjQ8Iiyq52zYXpwvoDLpK1wXmbt5qfEtq7apV83Si5E35CW+Oabv8CJZ4gDN8dbjsc7vHV/xDUqhVyE8/OZX/ziv/LlV1/xt3/77+JaIQj4IFZQOZ2vLKKgooZ2pYQ7rwplIpyXhVwr67oSXp4Zi2f0whYjLTSm6cjNOPLl7S3rsvL49MzT8zPL0mfEvHWrdO7S+y5+VclFE8HtBK12ywHtxg3DwP39rYIuST3uHCpS8PD4gb/8s1+Q5wUPZjg9qkqdFAKBMB1xQfdALRulrCADwxC4OYwgjfVUkXLi9u6Gw5t7/JY0Qc2ZXUh7p4ayjwE0Cu0qrl0DZ6125T2b+RErXvvArsjeffC+G+WKNui796DT/eC6mFdr5pulrJVaKkKh1IVoXqbDdMfoPOIi3k3AxpIzW9My6dPzIy/LM64V1m0hl8x/+JN/p16oPuj3eU8Mg4KM6IhAKysHf8+P7t8ZoDhwX0ZOLyfmZaYrEeacqDVZtzVTUiAdMn6KPJ1mnuaZGiNbSfhwSyqbgdUKtIusCIlSoNZESkrnlm3j5VyYbj3RFxCvHeDjG3ytTDdf0HJhTg+8eSfUxePaxHGCsiyEQYhTpLnKWLQTSBVcsVzIB9bmKcyUtilA4CLj8Y4lrcRWiL4weg25zR1Y2hONTJBAa55KIGdHbQMpG+XbBRoe7w4go55dQ0HGBMuG9wPBT8rsEdHuGOaL6bRg16GdQM2oahC6RmpqBryICm+I05m3KogSZBTQNICg5EpugvhBn33Srn8xMSDn0fsqly5an4MVGRiHG52ncw3fvFo/ZfTziwqCpZooKIio9G2Hd1Fn70reP09pjVRWQg34qvOECpJGRCDgKFEo5sV5yAMhZWYpnAbPwXumMSBFO1K5CaEI+aznkBOHryrSpLdNkGZq2da8UduBC8MJFDQodTVhNFVsbD4ipTI6T/KZ1gpQkZSopxNueIP3haeHDzw8J86j4+arN3zxoxuOP7nn/DKwtpHx3TvmfCTMkVjAs1DnM6dPTzwtCxxvES/8+N0t//Dv/5gf38Bf/vIb/uQ//Rc+vryQlg0fVGyk1apsjebYijUtREg1kaoju7gLLf51r9+Mog2uNv+l63VdsKmsuleZ6BAheIoVLlHcpWIVzDftQifrKo3ARXZ//7mi81m9b9KT2dZ2GfXLL1skhuh1ZKPTvYoNX2uFr0o4zRCES3+w/2C9Nmfv29CBTLHF2YoKE/TvqLTdQb41R7AuQm2GjqAsl76Q1fFeaQDblqCAM3pNaUU9UmgqsCGOIm5PZnZXeXuvdV14enximkYQx3Q8kKp6sfVXq6rsJrCb0OYr5bWL9o+qX9WWjWYFpeiN90DwQgg6pJtN7WwYJ5wbyIVd9CQMIzGOxLBpx0IEmloCvP/4PR8fPnJze8fd/Tucd5Ssw83NCUE8Q3HkZeN0XhhKwufMYRzY0qbKSU5I2YaaTUykYVTCHUy4dJk6UiLyejaiixRcblR//FeCFk1nwi4NYLk00LgkE4rg9n9r+z3ts20NpQKlmnB1AGcdv94O60WHoN1aW/C1aqBz1z+US2G6e801U3RqzdZeNUKrvWdTc8kqRjophVrUt0VNwB1SL/YRNJ1xvL098vbNHSE4HjaVci9F7/UwTVrw5oK4SoyOOHi8bxzGkRY8J0P9XtbVxDM6EovSBnNmy5nzsuwUrN5R0qRMSKWxbnpodbGja+S9cYlLnb79qucoqvbpvcrui6h5tptn1vnF/n3Fh0grEKPj/v6Wu3df8nA+UYLncHuLl8bBUFxpetY7EcLoGYi6/rzXopxqAFEjV+XKl6p6tsMwKT1pGJRiOg48Pz/zVBPewRC0EPRBjZtrUZAAFOTJORtgE4jBYy0E67y0Szy0Lvket53gRWcUX16eyTXZc9ApbEV3u2qv0Xi5GLDr7dREuYjsVMNXSGSraqTeKn4cyPnM8XZienzD23dv+Pu//3v86Z/+Z37x8J/IRf3p6pwZRkPNbb8Mg6einbvSVpbzzHC8YTrcMs/P+gyDrYPamA4H23XOmAmOZXkmpbzP7JamwMwwqT/TbcFURSsS2Dun3uk9GoYJ76NRffS869PGOZnZO6ocl00lGJzNBvbOe7WkpeiMnbvQ43Mp5HlmOZ+R99/yPD9z+PItb/7O30VqIZ3OeDdwM90SQ+B4VOnzed7IOZk/W4LcyEUtTbwTUqdqSsFlTdxENLafzjPOCafziRi9UaCUlltLVWuY05myJsJo/pbWIXA4yJWaMiGM3B1vCb6RlhfytnB62kjLmdu7W4KHtC3E4xvu3r5T9VFTzAVMDl/pqmp7UrQT5Nqeb3TJ8Z3ZQj+DxTrhKK265R2s6+JRrxg8FitEbMZVoNS8x5DaLom6VKyTINaJFG5u7pmOtyxLohTzmxOj2DbBB1WOVmqYzQSnzTovFfENCNr5bcUAYFHbi5R4eXxWMDXoZwtSmKKCMzpPPajyqY9a/DeHn0aqCPdv7xmHwJdfvKNsK+usnpbrNrOmjbStSiereqqVKjgCpXpKcaSc8ampQu2g3REfvcUFeHr5xPsPD5TthcjK6DIlQ2uDecU5FXIwsJyyKpNhCPiAzsKlDSk6RjKFkeYn+/9N1XVN53sr2h/1TthyAhvN2JIJnUikUlgTTIcB5wbO5xPrmlWg5XCHzJm1OF7OmTUn8xX0uCakVc/geVZhDxe6DZB23nr3uuflIXj63Hetlfll1l6msav0kHb7HKyIJ4RB/604o+tZU6GPxvQ12TNgUXEQpKpojsBic9gKFmle4MREu6w7pfZG6GhKDNZpzITg1VJGdKAFvFGDNZ9MpVG86OhKguYyWyskJ4zeMYQBiVqYilf/xShKg/at4oMW47iGl6xzdc1TpKifmt2rPq97PXqEzRSXPm+LCoE0r50vbCZOSmUcNA+kZk5PT3yfz5RPf8HdXeDtOBHliJ/eUWJgenPEi2MaRqKLKh5SINTMT3/6I5aSeHx54Otvv+b98p4PH194en7ifF6RGPBDwPqHNlOoCpRQLF9H82WgXjEBoopc1QAAIABJREFUf+j1G1O0AZ8D/dYlu3TBgEtAlo6+GvLc2t4l09/sdytkeqK4y/H3H2lI+aufexWMr38XVCHp+mt0VqyY2MKrN7m8T7t0Z1xfYHI5HDThvRzofQPjZW9z74l7g05dvL4vrd8H+4xSbGi6CSUXcm54Kw4aSr/T9zKPEYEuSf/q3jide1vXRPCR6ThR90KlH3aX6+gvpb2lfd6j1nqhmlwVEP0XcpGeF3Hk2kgpawI6jsRxQkQ7jy2rjGsME/44mFqmHsRKU6jM88rz84k1FcZxwFF4c39LnEY9nB1sNnAvJgRwHG9YGzynFcSzzIvOBlQtHhs6I2nsjL1we4X4tNdS0mpO+cOvz+/Z/j5Xf93nqdjXqa4Bhxo0r+vKtkYdbi2V6pReUm3N55xt8PUznnQDRFHWy1+0y/O/6sRdC+hcX+v11+1rvPQZoGLGn5l1WbTLLQ5ncr0NHaL2Tr2qUius66YdZAfneaG9PHP75i23b+7xoXt31X0OzAWPHyJ+Gvjw/Kzv1WyG0WjUOWeenp6Y15mT0Xp7kdwaei3aUKeUwjAdVFJd5FWc6M/rOh7EIdrf6/5LOYPz3L/7khACy3lmq8JWKqXMvP+08dXbL/n57/2c//gf/5Sbu4lxcty4EX+YGI5HpMEhDEq/MerNLujTlC5bjDrQclIZ9ZyRlHT/eKPLxUiMAw31Vvr46SOPj488Pj7t+3HbkkmmgxOz8bhSow3uYtIsXEChvhgs7Oq1CEobvFovIXiWeVXfIardp8s9vbY1uN4L4jRu9eRUD2QtprHEJUaVzfdDJISBx4+fuH/zJV//6hv+53/8h/zu7/4e5MSvfvkrphDY5jOlNoaoQ+616YyjGPX15njDll+YT2cONxPH460pxRZKWYDKsq4qDd6Mqt5AWobm8WHk4AfkYB0VcTbQDjUrEtyVSkW6ZYfDu7Ab6V5Ee/R8a02LoJQyzcmOyurPbTav0Yvptt+vlC73eAceaDw+v3D+7mvG+cwXPhJu74jTDfe3b5Bl3gHEcVJBk+PxBhH4+PETHx8+sq7q7bQtG01giKPGFvv5pRnAMAScE+Z1YV6qJqDPMI0jhzipSERTOfLmGof7A9NBlUZ9RVUpMblsKodh4G56R80bz8+PnGeV8V82BSUDnsPxlhD1+zpI1Jqqy3a6ts6erNDBMuxs7mFv39v6n2Cm6jp3eC1opp26Dtzugkx2nvfOXAcI+pmrnWgNGM7EX7otSy6F0d4758oQI6UKeK+JpuiMZuvPtLX9evt+7BSyZPY7XVH04eFB54WDGhcPw43qAniIg1dPQx8YxtGUIyMiHmJAhsD9/RHHV7SUoKgqcs6JZVuNSrjBspLnE+u2kUomNyg5ssyVsgnJVZxkpAbcICBN/Wl95JCOxGnifH5kSRuLJJ7J6klXPUXUKLtog4gqlWEK3N5NDGMgbSvJj+RWoQXceMOcG6k5ttJ0rsiKFPWo1Tla7wK1eU7nmW+//2BnysC6zbz/9MJ5TYQhcDrPrMvG8XhEvKOFI1WEcxYoGSg4SXgCJVcr2IVCwDUtnJyI7nGnbC0VuSiUzaww0rbHWedMCKehwHjNtJKAcqEC7oUZ+6iMsxxV592KiTGtrNuiIKZXEFMXYQdi7Qh3qo45DAMiQf15q+YYKRcKsp/d3kda1fUmRpHuLLWtbLycX3AtchhvzQqh7nNorWGzwQYomQy3C54Aar2i/GMFlo2x5E0F+vos1liXLiq1rc+xq9KvCb/TmvmrtZ7HWyOoQRRlLkUveFsnNRXO25maN1KbSd9+g/zi3/PVj3/Cj37yW/zo3Y84hIl6SsxPJ15OL3x8euTT6RG/fWTIz2zVcV4buTltQhRhGqLVHwrgkRZolVrVF3RNldO8MZa/uSz7zSja+s2U1wWTiOsRdFfFada5ca0LchgVrelm7A+kx7SeClyrRV2jZHuC/VkhAuxBb/9FMw7KZei8qzG2WgnO08xkuSfvuzDIKyS557qXxPc6f+9IgYjKt2abFxMTteg8a2cSqliC3pUHQwiGujoTMVGpaLMlB0TpUyYx3mgQ6n7DxFzvAVUJayrosOVK6BvVONRaDF/EL1I/HMTQ9P2DNeBK6bAXfHQFOUPXa6U0pU2mXJiXSjiKddtG86mxeZfgCWEkRkVFVebacTwcOUy3PJ9eWFIin14YokPkjuAjpWYruBtbVv+jddsIh1uVLUb9k07nmZSzzUbIxdWeCyL7OeJ6vV4+L8o+f/V/v1Y4/bwT7IP6gfT7rXW0Kf2VwrYsbOughNe6S1sqfSmnKxU3HeKtiFF99L164nEpodnX+us5UGhVkzonV1979XX6vfVSLBYtGKWawuF1gS9K28y18O3HD3z/9MB5PpknnTCEETdMPJ6e8UPkeJjwAaPZKko354W2bFTvtCPRdF6helXe7PtiSxvOOQ6HA71L1kUSOhoZo0qc+xB3L8YuENMLl+v9+dmTBFTSt3R1sOY43r9FHl+o8giuqudSy6SabBak8vT8AHFQ2tNyphZY2kyftNDYYE/HBEhqayq40Pd+rbvCrPoXauftvKzaXdwSy/yyJ5vFhvBDCOaxpgdpzpktKQXT2wyb7uuqohdeaR3O6RqkNjoU0Pr1WdfkNL8wDQOnl6yHlLsoH+7d8hBegT77vnFh3xNa7CnLIgbPOAwMVizXWljWla0Jj58e+OLdB05L5o//3f/Lf//7f59f/tXXjNP3LPNMkca2JBBR42aamhW7xrLMhOHAYRpg3tRkXSA4Mfn1Fe9QBDyYt5gN9NNMYKUXT5jaoFOz3dYAb1RSzF5i/3x9v1gst/isbIS6KxD73cNMabslaYLSmtqvWM/i0tW57vR3BLoJxXly0CK3OKU9nh4feX564WY8cH93RxgCy7YR48C8nDSpPoz8KP5I/dda43Q+8fz8TKsQXCT7/vO0Q1DMPkdBIV0bKj+eWM/a7XZBQRfxjulwIAwD67JQks5C+7YgweEl4bnh9qjqsu++/JKHhydOy8qyFYbpSK5iohrdH7Xts2yCJsm+n/dlVNquZXROTDjJYqemG8YYiT2XeN1d6/H0r4vvn8d/7Qh0Bovf68O+H0rR3GedFwNmCz4MpCWjBaZ+Q4hqRVKKdhh2P0UwBd+LCqA0VVgFZSM92p9rAZFAs7nuEHV9DIMKywzjgZvDHdM0IeOIP4wcDhPBKfVVgsNJYJoGJnenapkihAxtU4bKmlXBttTG82lmWTO5alxczytpSVQytaHAklJoKOJYcmUMgSFGJDRoidY2alVKMBKQCmE4MB3ecJwOlCGzultW94JzAy6ObObTeRgG4hSJYTBw3xJ+lWeEvczRfNKFEVcb67awPi8q5CSe5ieW6ggu4kcVcdEOpdtBrpR13s57TxgcwUD4dg2Giv5Eb7oBNWtMVnNuVVLWwqnt1jDL8ws1b9CM4te0sClY/DWrEu2W2bx9KzY3ObOuM1CJY0CYNJfhYmvklfhta1fnSkEF60ptLCnTfXULhVyFVhqSlearlH/d7yHqCEKp3dO07OAVTagZ9ZD0kIyZsKXMsiycW2JOmbYmllrYfGOsGk9yu3gg9zj3mgX3WfOl5zewnzkFzRFqZ4WVBmTKOtPKpiBe01nJrmxZUmYrmbyeaXnmZoz4N28ZZGQtifPzA99+/TWnZaY4jXuHtz8itsDy8IKUxlIyak81EnzUdHlpSjEzJgkN1q0hsrFs/y34tO0dFv3f62Sxe1EZKGadC+Pu2nYI0pfcBfWqtSmN+BrBFaX9dAGIvWgzyebrhPvV5fVulm2+vcMiOjfUE+0+KEkvInuQt2RJ3CXRaVXFP64/a//s7UrNa78PsHfa6FdjyFwpBe8vc3dYMlDN42xXqxFFuDCqDaipN2Ysq4mkUjg6KqLdPEfDkbN2sOKoikRdXtr7uCusbdtGa41xVO8g74IWm/uQuDN00Ypsb+a3IYBgdgmoCtWVmlE32w1hIAyjipXYfQ4+EIYILrNtK+SsBsA3N5TzC3nTJDSnxLaseJNud1EDfGOjVPBFKZ9qiLoyz+oFhg3763rrn+P1LMSlk3VZP10p9PNXX2OXQqwnrb2L3AO8WKfF9sBVXOqIamqVnA+WYCgfPadEk6CIs3VyL7XzBYAwQPlVEX25yMvX73uyXSck/ToteRXZudiXok0LpxgiXtTYubZMCYYClsq8bTytMzixDlKDKng3k5twvLtn2haGKULT2T8dyp5ZtxUXRyQGztvKy3nWNW+CRM6bVK+IyfVrIBZEKbA4C+6Xw6CDLZ93f66Blx4/upw5ogd/srnVl/PC4ei5e/slEiIigXE88nf+1t/Ci+c//Ml/5DAdeXh8IUwT0+gJftBOlUNnCa4OoX4VncqYzTYg2JyQ9kvdReESTRTP5zPraj49JobRD7z+GVJKtg69gR6vP2PDaLuGV+gBqHTB0qp115xegeghf3c87jExJRU6cqKdMp139fuv0g8tLrFu72SgyHDfG1U5U2o4nxLrupovZcaNR3719Qf+wc/f8a//7/+H//Wf/3P+8H/6H/jflxO/+PP/bDTMxnld2TadwtBZW6Ulil+0WLR5umXZyFYshjhCU4EGnZ9vO4KsZEZvlDtDiFslbZZkdBrS3jnccbYd4HCuWrGtHUWhWaGr92zvttv5QTPFUYRiZ0MvloP3YHu/P0d9OQ6399wdJtzNET8dcMMIdUPwnM6qdKqqo47j8QbvA9u28vz8QIgDw6Bm1tN44DDdULJ2x+f1vKsme6+2KblWkw2vOxOlJTWZ7sqQpVVaAR+1kFxFRTqaPW+hkLeN55I4P1ti6BzPLydO88rh5pYxHlhSpXmVr9/jsNjYgcW2PT33XtX+OqrbtFPhPwNZMSqu2CylGOCoQg/1Kkb0M/vSSb4GfHrs7GBqz0GwPet9BHHqIVVUEfR8OpnwVCSEqGBXKwayOJbzTGuF1gzaqeh9s5/TweYet65BEee7+FKlSKUkLRIX53jxXgvxMKg9TvAc7m6YDofdlmWaDlZkR52lE1MddCNCYBonWogM40CtjXdffIV5WVOr7WPZyC0xzyvbqpY2OBgPA+fTM9t6prWNUje8aI1SslCro0oA8YhMtDbR2gHvYDyMhHBDw4MLDG88bhBSXjjPLzQT/tEOtUZV9T/T2b/7+3u2rIVO8x4/HXZgOIYIWEHmuiflJQfYi/poLBh3eQZKV7/YSu35pWguG5z6/JWi4m0iRpt1XnO0WtRsuWRq1a5e5SI73xBaFRxNqZI2MtGVDpXpciaVjWMbqe1Gi9SqYiyNpv7CvQNsKpqC0/NWGtNwsXVxDqLTwqx1iiKZGAeFjppSX4PRoUUagw9Ig3XeeH6e+fT4QpwCw2Fg2RKPzy98/OZbnvLKUjKSK1kaKQhjUdXGIm2/352R1mNoT1NqLQqINh0vImtxWW0kI9VNqcNBc5S0rjQ3IC0TPYyoabsTj1Qt2HKutFDwvnKYJr786it+8pOfcDfesQwn1peZj+Mn/BRJVFpbmO7v8MMBd1wNtJjxEcbDkZ/8+CcMbuKX//VXPH74hnVR8TqTO6CJ0oH/ptdvRtEGV0lg///O/TcDSSuC+Cx51UPOaQfMirWikZhGL/ZeF2HXARXgGgiB10XjKyolGCLS9v/vX38dJEvOZEsqtm0jl3z5ObBLlPYis6P/1/dCUUA9qMSp8lxOqtSos3XXlMnLN/cDI+dMzRnfYPCeiIqe0FEHEfY2eBFq3fYunH6mfthgB0HQwWtRWpTOKAQOhwPeR06nkyVRWrgBxNjV5S5dTv3dXboHogImIUZFkasmvs45hnFiLBUfI1tK1FoYQmU83uB8IG8b5/PCMNQr9U1HShveO+IwcORIGweGcD2jpPSFMEyE8UBOLzqZlbMJtcBm5r9NZKfaNLlaC/t6vayVWrtMyWeF/isQQi5fa+jR5VDt60xerT1nB4DFLSvgdXatpETabhmiaKfJCibxSkvq/mB7gtHpCn3tyXXRpvMy/bq1uLN1ZR2aqiDZboh8vWf06Kh7F+vyeS7XvH9W2OdPUy4cbm8YYiBXpSLlLfPp+cHWqzAOQTshtegAcy2GvH+Fi4Fv379nmVcydRcY4iqhpcieeIk4PKpU6m0f7oqWV4lP/1y/Ri21Z5k7XQndz0pNDIzTUela48Th5p7T6Ynb8cBv/dZvM58Xvv76A4fjPS/njZ++/RHTeMM03OCHYf/8pT8nu4fBOq4iKhjiXVcn609IJbdVcEcNjHVP6mxSsL1Riu4VXYO6jntXolNH/aBCQvrzRcWYSp/30+SyI8RiNNXS0A5Uq/z4xz/m8eETW1q1KHNiXfhqAJNXkYKrNX4d+ztlG3ROcC8UW+V8Ou17r88QK/lh5uu//Avubu443t3x7//tv+WP/ukf8fu//x2lbPyXP/tzRCy+lMRhCIQQ1SA3RF1XVUEwF0amODIvMyJiin0nA+MUuGmmmBZCP58UcCilqcJvLzoF83urRB/oTGdnynrVunMq5tSsuL0wD/oho90pM2CXpp07EUvclZUgtSJRu3hb3ux5XbpP91+842dffcncKl8/fKSmwjgecGVHaLTrlBIy9yI7cjwecd6TUmZbtQv3xdu3HA5Hcs4seeHldLIZjvO+Z5yp63WvzmYzlyLdWsURognfOFX0zPNGS5qcbmVlGgdVY/WmsBwipQnNReJ0i/iR2lV6tR1tHXTUcuJq/0pvcV1B8rt31dW56ZoVxq2Dp22fj+uJcjXES/OPK1TMwCvXvcCa0QCDFny5JAO3PD7orFF0A9u24URBrpw2Ss4cbiK1ZIIT1m3GSaZkvyf/1AvjZweMd0sZlVPfzy77O0EQ89WS3idvTY3Eq+73Uhs1Z2otnJ8fdnq+OO0wifPEcWI6aAF3mI6M45EYouYDQTi2SYHOKiagZdR47yniOcTIm7s7VYvlQpOuNbMsZ7a0UFJifTkxzzMv88KybpznlW49oSqJBXFNKeTDLctWCWHi7u0bxiny6fF7lm29AEdoIdRzp2qFeRyUFrqkhBRv8vuVaKJlzvz3eoe8s4z2/K3aZqfnBhrHWruMKnRxs4uKoVEh0U64qjvq2vROIDiSmD9Za9BFtOR1A0Gu8lwVxWuImDqdFXG0ggpxqMWIisB1lpq/NE68x/lIcEG7sl479OI1b3PGBFCmgc6Gl5zVbgPtFteGKjGHYE1Ald4vKbOuG1uDsQ5IDOTaWNbEsm7MeWUpCVcaRWAtjbTpPiw9T+m5hlxy572jXTKtK7k3wVVPySoEltKmQisOPI7WBmpuyjCrRe1QSlYBlaAsieg8NzeRw5d3vPvpO7766c/46c/+LtPNG5yMZFbwo3ZnpXB7GMl1Zq2VSQK3dwdcnDjmI+MUub+94ac//inH6Z4YDvxSKu+/z6zrWc9vPekp7b+Rok3PvHbVdbumnTVb/NVUjsykudPvOj2oNZpTEYv2Az+jv2+fuflc+v+aTvKa8nXJPhUl06CZ0oV+VkphMyPm08sL5/OZ87IotSiGV++zp1lXharbP7MF4tqorl6ERn7teq6Ktat7VYw66JwjBs/97ZGjwNjUWaJ5KM6Rm7BuhafHF0pVM2zlSvZggCVOesEdlXENmgTzAepo+a8jT+opdU0NlVfXqY9aE+gQImGINAqpFFLKeC0NNDFVbhWCsJXE9vxCHEZAn+U8z+ScjbOvyMzp/ELYAse7O4Z4IDpV89IN4fAxcnfzFXV54P3ySCrqJYQ00paZ55WUiwZerlQ9neUG0rstXWnMEpN2SfD1oND1+Lqj9v9PnezoLajtgnZ0FJEHLc5ca+RNAYJSTUGpKL3Lhwkwyuh+aF8BBHJJCC8/87I69x3UzHeP2svRvYOo96TuYIr3SvWhI4vWiWp9fcOlWOwFqdN1J0521VMauBBorbAsM/M6U6qi+t6Sklob0ehCOWUGU5prWWNByYVWtTuSUoZSrQsh7IIYpWAj/3uR11/9QO7PuD+vC4quncEuQ98pq3Yc73v47du3pOUFKTOn00ZOjRCP3Nx/wdt3XyHOU7JjmTOyQUNFD6rdd2/Fy5oSbt30WdLUN6ldH2C69mopSodcZs7nmXVd9f56LbheF6EKxtRa2da8F1i7xYEV8cWEHARHCGJxSw2Ea09grcM3jqOp7+m8gThVALsGqH4oxu50SFEqdo8nvQsfY9Cu1y6QVFiWBbdtrKAgXV35q2++43cPBz5+/x1/8Ze/4h//j/+I77//lu+/e8/Dh494A0eWNTEEzxCDXYfO8Cmd9oz4SPCOnDYWKjEOKiH+Kk6arQlCn2Pzooc+TYwiqQP9zkFLZ024ayM6TeRLVQ83sUSuF2KpZPMN1CJAbQWwYkForeyiV94G8LUoNyrelf1N7V/jnIq7ILgw2Dr2Oi/TxMQRdC1tOdNagnlmWVamaSJGRc+dCMt85uX5WRPZ6DgcJ+7f3PH8bGffyeabUrKCn727Jl6s+DFzYCAMAz4OzHVBqpr35pJNzU/3WEDnGafDLWESDocjPg6EqudZa71D7PYC7ArT0D9bcilWVIl4u/fKAhliT2BRkZ+dNnntgXrFrGnYjLNcndMXYMp7b4rfxoqpOrMszu/fM40TKWVi0OeHUYJj8DrfNgRa0YQ+p42dot6ugOQmuyUB6DxduypIL6yQaiBan4FvWvgYAOn0g6uYGkotldpM8CaRcwI827xwfnjCOY/zZg0yBIZB9+owqm3K8XjkeDgyDBPDMDJNE9U7Qgk2g2wiGThKa/jgOdzcMrUDNM/wxYHWFBSrVOZ1YcsLrSpdkLaR0sL5vPJ83mgeGoF1q+S6UqswjTcMYaR1ULNkE8gSmmm3CpqnHXwkFY3jCHgp6v3mva19va8pJ5waEICgs+TlInin4muNJga+SdFizWsnOaGgmHei+6Hq/Q1DsOISnBdycCQnCt43MWVmTOqhc740JjijOTYbnhRT63UGHOn5kGg16Jkn7BTJVvSMVismjREpJ803vN+ZFGLWNUrJF0IYqc6jM3RVWUmi+ZUy4vQcdoZvK2BeKU2ooudH1iCJa54g4NB75bwK0/W6oBmw3PbiWN9fpOclsltXSROCC0irZMlXe/Pyey2oxVPVrn6IjhoVqCIVmmu4APdfvOF3fv4HfPGTnzEOt9AGSvEQJpobwA8ImfF44DYeETKlOnJuBO8RCdweJ6Yh8OnjB96nD7w8nu35Oz2DmzV86mUs4a97/cYUbSKvC6fXlfTrzoaIyiM36RzuyzB27w7o4pJXQeuHirGdc7zXPxda1OfUHcCQ8EZOmeenJx4eHvb3fHl6uuo46SyU9x4fLkhSs27MJWm166InU3XvrpUucGLVwk4hbb0Tdino+n3qJry0xs000t7e0aaBUAqtJFIrJCCJo/lMnKN2QKqiRaqAqMjnBQVXxMX515TA3rUIQXaq3+d8Y30LLZRoju7T1GmSTlSmeBgGQt10lq0Uqm+k2kipMh48YRjoqkbLkpCiAiNTGNm2jVQ2CorKO68zWKlsLMuMkwm7OYqKeOXlD9PI7Zt3PD/ekdZnKMW8vlZezme2XLgKi3rYtR4EX69TXn3VVXDYAdjLfdPn7n5tbXbflWqIaLtej4YMlppJAuu2MbpBD4yqQbeWqvTIrKpztSa1D7ii9unPYleA/KFO9PVL95d2BL277M+ewOwUpH0fX+Y+uqJeN6PG1nlp1dRQy54UKU3ZwOPWGMIAg86mlZzY0so333xQ2VyjVtZ5wT0+gXPM51m7P70g6UABupeaFVmKrtrzqeDlUsAo1fCCXl8jetfPTP/dpL/RoWfnvInBqNpf9I71/Awl8fbNPdKO5NrITS06li1zPN7x/fsPmviEpB1mj9K1ncMZjbd3/vTzmQeVFbmqqKZUMJrO+j0+PrDOZ41X26LfK46Uis2w9X0q+3PaNqWNKxjjEKfPPPpIznk3Mt3vheiMZxO93nEcSbnw7ss3rOvCZrNgJSW1GLnqVl4XxP09r+OGN5qbFhsGUDhhGKLNFza2Tdd0biAtk5ujuoHl03f8wjn+3u/9Ln/8r/8Vv/e7/xv/4A/+gPffv+fp8YmSMsF5cloZTZSkq1Lq0Lsqo6VtIY4DPjpeTi9Gv/Ko9pWpALaKd/UqficN1ehMrPejrpOWTeyp0pp2M0utbEnp3BqadAY050TOjccn9Zdct42Hx0dqzTslzbtoZ0jv5hScKIiRUsI5r7YPV6JANEjLxnffvyc7UdouwrZutC3hm1CoFPUg0fd0auOQzme1DPABqnZ9O0gl3kEQxmnk7u6em2ni7nhku79X6uQ88/j4yLquF4ZLLUpP994kxJUedHNzR54T5/VZix2HmiBviRiqdXkqh5s7Gh5EixURtT3Qbr8WRZ3Sq3Yn7HPntXVwsasmWnz1rynQOv37et32fw/x0pVTAbDaJZBfJYbX67qDxHEY2NKqBvSiHo9DnIh+1fdCY+owBJuN1BxhGgdqSUBTK5AYlbpVTW3P+f36rnOoTlO+7DfNd5rFup6L7MJo/ReG6zX9jt4d1YK3x0+Nt6VkqoNchbQpAGJYFrVUpce7gA+RYRiJxyPTYeIwHTlMB47TkWk6MowDLcS9e++dQE3EOOJCUCAreg4ctXBFvfRaLeSSWHOmNS3YlmVjKyvLeSCnI8M46Xw17ABGbVBKYk1nluVFKb4Fuys919L4pRL4cgU+6ll7rW8geBONUpC7Fi1YmhVwfT4X8Wop0ITqDCKvSlnNa7ZnYaBiK/joqK37CxroYvmUoAVKq6b+2+yZe+h+mM6YALbylUXR05o9R2u7oInSCRPLsiHO6zyvd0CnpgqtjxaEhkcB1laagsXuYoehP/1yzxSX0tymNMjN5uxFTClF83pxKhDiPCrAs3eQL0B5zz8vuYqzBau0yGtw+NdeTajF7U0ecU3tZY5Hoh+oeaGw6v72hqcbAAAgAElEQVQIQpsm/HRDGG9wJSJJwC0oW62xppmweIY44p3O2K/nWUVgqDxsT6zTyHreOJ82Ti8blJWaN8srDKwWbw2Lv/71G1G0GVHu8v8/0IVQE8Mrfr90lLIqAvfqqztqyauk9DpxvX7w3Qke2P/+hyiVWtCIIWY6zHw6n3l6eKDkrPNc1ZzRP0vWdzpcD6jXn9WuZ/cwMlSje6z3GTjNb5WG1j9na5eDxtt8mAZrlQSmFryDYImCExuUV+1TdWQvBd/ASdgliJUV5fZg3RrUlq3g6smBu9Am0YKv1uuB+MumuX4+rQcQ6UmaonMuZ0VU9ucdaCQ7RPQE80EYxn5PixpADn3WJ7Gui1Irh0iIfk+GlItXmaIWi1sqakQbB8bDDed8Nn+/xrKtzOu6F000K9ZsTX2+Ol+tlc8Ktd6h68/nGhS4Xh/9zz3Q9G5pX+P2tHej3rxthKEA3hIzRbB6QpVTglj3LtI1QqXv9fp3/rrgRkeJmwEE9nz6nB1XA/BWQAp9+N7mOEV06NvQ/2YHREqJtCWyNLZNaUOlNJx4trLqYLb32u1YZp5fnhl8R/BUgrw8PVGBeVnVZL014hiZhlGNbFt5bRzf0WREvZP21anXWXLZ59tau1AGPy+6sYPHidKN1VS4sZxVier2OHI+nWh5JbimaoclkXIiToHv33/HMI04F5nCQQ1Hvc4tic0VNH8tCOQMCXUa0qs2dKooNUiA08szp+cnak7KPkDVErtATbCZOO2k6T28FN9GwQyezTpkXhxeLpYmWCfDu6hJlHUUxKtA0jhN/OxnP+PP/+xP+fTxIzUngr3vsiw7CNapkdfCGT12qbmyroNeeF9UwvIuuKD7Wrs0zeikUjNZRuaXZ8q6EI5H/uX/8S/4X/7JH/Lv//hP+PabD5xfXiCrn9OyrozBMca4U0p9gMM4ElyjtEyMI/d3t+bvphRUqaoiLE2gZJyYLyUKuDjRgltNxVX0oaaiHmvVkc1KZFnmvZjJOe2xvJTGuiWdyS2VDx8+EIKKa4cYmaYD43DQOZIYKMWSnKD01z5P06lcoEbu3/zqr/j0/jsOd/f89s9/nzhOlFTwEtS3smrZ5rzXM8CKoBC8mqoDuSTr9ui2HqYR1wIlZZ0Raeqz173SDuNE/CKwrhvrtoI0nj991G6sbbtcGojjeHOgrIV0zlSSxt2eYIuqZgYDHGpTKXlpDh8H7aDSixMw5Rj9HLrITFDKvepAXsffa6okVFOsveQMGBUyeGfdwWizQ9UKmLLvp2qoajH1u2DKcSlvSNE50lZVQa+WxuFw4OXlmXWeba5a54cCChT4IXI+LWQbPWhG99vjO6+BuX7WfP7Zms2B9zjeuxSvYltroFNSu11MAz0fgdYKXQhNaNBsDrPnJ/YDGkrzqzmrxPm2sS0r6ekR8Y7oPMGpIXMw4HacDgzTxHRzYBwj9zcjcYi4MOH8SBxviXFCJEJ1KoiBEHxGwoK4yO3tSMVRyob3PyOnGScNKdBMbTcnmyt0ws3NiMi95jiVi1p3k53KCF2ZsRdzymrYb1+Dki0WOwVhWsn794LS8PQBOTY7MLv0f63WITOBGEFzuJtxxL97q2edwXS1d/ot0ZeiD6aGih8GvBsMfILDOJGWDaThXCMn7Yhd52V7p81QAhFvE0nm2VbMKxZH8wrAd4uLZnRMjMnhqifXjW3LSBSG6I1xY7oJtk4byhowLTwr2sSkDzrwfQEfrkuCa4bcNajqva1Z2MGIH+bcAdg55gfr4ppHZrYct1UKhVIzmURF2EqBlIhFoHjTi0hQNpaXJ7b0zHKORC/UAvN5JaUV5yoxAKtnOW+8PK9sK9SWCMHm7asJn5RKSeWvuWZ9/UYUbY0rYQN7OJ0eVjtH1Tk6ktWqDQl72f2MkKuZH0u0+/PqD/a6UINfH9J9jbR9do2tm25e+PrRVP1yzmx2+KrcugZtH/yecOQrywGhC5TUXfXRLoxeiJVaEH8pZ1/RCvd7ZL86bcMuXZOzrN4a0SMUVc9yKtpSRWmOzgs+Kp9bctVt6fqwvSFFwF74m0hEa25XnuuzMXo/L8pa++F5hT5y9Tl6cab3xBCTXszuKL515QwNKrXzwXWzl5RxpdO1OgKoCd153jSW+BEnwmTDsNuWiRK0hmtRD4nDDc/P73GiSYlSMPKuAAc2p9swzrjbD6X9MOzrZw/s/SBk/zcR2Ysq9s9+6Sr0P79at1WFRVLOar5bFQFOKREkQ21q5Dvq1xL095Qy3lc+LzH79WCF+DWo0QPlD7/62rtQAV8J+rQG5fOirXchbDs6UVeJqyRB0OtdzmdyrXinNMeMmpUzqkVDrYX7+zukOp0VTRvStFuxbJvZPpjJeDO1TSol132f9LUoV4cB/Dpd8/NDodOAr6nVtRSqKJ0qBB3AD97x/PhALYng1Kfm5jAiruIDe1H69os3LMvG/f0t03QEhBgGjoej0qZcxAWvSZLDYs2A0AhovKulsKSVXPU+L/OZZT4zn0/UWizJNrq1OO2sONmfS7EZtf7oYxyIMWoh6i8UWBGVgW6Bq0q/C+NogdkanJeFH//kJ4zTuNPiaMWMwfMrz8vPi+BrxdScMzWVC6Xvaj3WHvc72BMCa9oucbAVkEpaV07Pz/z2f/f3+NWvfsnDx0f+6T/5I7795oN2/qJHGKlpZfTCcZp07iwr1dF54XgcKFW9+8IwUFpgXs60nAlOvQKHOBC8EKPOZiG9e9xIubKlSmtOZ/la34uXWZhxODIOR+t0bldFW+X25p2pdWoBEaJj2Vac0bRENElxzmlB6UzUxfV4Uwg7zUaBxpwSp5cX3BDxCDUXaioECcb4FEA7EKOPRk9M2p3wg8316GwPNBV2Wmd8G5nnmWWeEee4u70DRGnJVsTd393R5A4E1vmEqrJGmutAlNL8bm7vWF8Wnk+PBooYqo+JCDkVxLoWA2rbxt7aEZ239T2XsN+Dj9TQkOaQS4j9LDZqbpH7nKQ9rwtAxau1uxc8rercUS/YbA30blyPK506OY6qIrdtiZIr87Jwf3dkWWfWhxXvVCbcB4dHwZpxiCynC4uhlooPKlSi4B77fsViqg8BHyPV5vCLFSwp5ct8sbvsycvLuoNgVh76YStNfSJdjy0WE0ypWtCZTx/0z6U1StKcyYlSDkspEMSsSgqlbaQm+70R52nOGXDlOIyqnhqGA8N4w3RzzzgdOR5vubm5sbjrCcHhgsamKgbm1MaWNwWpRQvLvUNqc6kexzh4YrghhqC9xNoVwXWea5+n2oFb9cRsrRjzxvKT3FlT1eiXxYpZ67ZZ0QJwaApUXJcZgnrOOVuzrWjs1jnDjBrD9zzU8pGi61mqsEmmRkeQqPRAH3hz/5ab45FSNlJZCKMwjhNv3rxlCBNbyaRaSLUg1q1d1xWnAvyWm/dfl66zNz/U2q/dg2uOulZqTTobPmjnsV3Fbd0ulsuKxu1qXTSwfN41Y5v0evjX2TzXf97Fg1wfAdAUzeGsmPwhuqEgMhCCmtM3GqkWzucTTgIRoGWqdeF0RtAopA5iE/N7SwQKvm2keWUuwuJAjdarWhhY4yBlR14zLRVcVYZEbZcOOQ2y7c2/6fUbUbTtL6uoL2i22zth4qxwE8U72IFYCyACdPoWHdvo7WjAFrvbH/ilqttnbzr68BmlQfZNL+SWFb0r6pGVtu0VLcH17ooVZXuVJWiR05oq4LVKLY2EkLbEVlTOtauEdXoc6OUrHak342X/r1gQocGWFtZlZdt01i7lRKoFVzR5wEVw1uGozWa22h5ISm2kksAoGq0H9aqcajWC0p/cOxBd9vb61edwevKwP156YdVvfRdUkf0eNXp7XtHhYvNO4oVaVGmqD3cr8mvFfTVRgnaRY1ZvHc/5/EJyjsEPHMOg9yVveF/wR3BROweNRNoyaU20XDWB7qpvmELwvnraJfhYh/FCzeWCyHm3d0g/VxW7Luxqrfu8SLXv79V5qYW0aXdAnMPH3k63Wc6SaS3inXWhSiGOip73Nd7Rp8u912dx1Rjk+k+vnpkT9m2EiQOJPveLuEf/Du3o6ZByudCB++d1tpearunjYWRsjSVtlC3pvvUqHS6lkrfEd998y6f3H1Rm2YZ0z6dnYvTU2nh5Oe2qifH2zig7hW5t4JwzAOBCmXBOTHL+tShM8AOgCY5woYJdC8b05zYdDgwxEoNnCJFtbQRxrPPM+eWZnFZujkfCECitEMdo4SDw5u07hjixbSqnjDPz4mrd+Fp3s03nHWlNlKwB3ds1p7JSpbIuC48PnzifTkqv2xLjEInDwDyvKsjg1BZjHCeSFUWC06TcCSGqAmLJGR8cTpRqmUnE2ONtXwHW7UIBqmE80KiMw4H37z9xc3tP2lQOOeXCGIUYVcDAWxenyzDvktfu4vEmrd/31+eBE0ccB6J1A0stKoJSCjFMqPkNVAl8+PjAN1//FV9+9WP+3R//Cf/sn/0z/ugP/5B/+S/+T14ePzGfn4khUEtiWReGOKjYRk7kVHFFQLwK7li24URnp2pp0FR0RpzG0S1lEC1AUs7koqi1Fi9qFaBzt44YR5p1Pr33lFqZrIjWxDqpWa9038ZmXa2DrvPWqKaKJqUaPaqZyIWjtg0RlcDXjpYmQKUXgN5rxzIXK4wxynK1tV+Z59nEbwZy2tSDMHijMDdD/JX+1GNyoZDzyrasqOql7p1lmZVNESIILOeFNW9kURXMYRiUJjcccGGizgvrdmLNhVpUL69311NKNGam6ZbgHFvOSq/EKF9W/IUYjGIpexc3eIsfP5DD9S5UQVRZskcte4b9jLvMtV3io7NzEulxIuC8nkvDoCqE27pqt7hpHuDEU6tS119Oz5SykdPGvCyIeZiVkhGURjpNB7bjLTEObOtGLieQQCVQa3tFqOr5zGJdOewsylnnuYp9Pi2tOgCzI96XY0AxWr0XcjlDqs24eXGdjUafI4Jmv6P8N4SubNuNhSmmw2zUVtVPaXaeCJQO5lSez1r0Nnem+k/gA2EYGEe1/gheiEHn526Odxxv3jAMN/uc/BDUbD7XghehlkzaFtJ21rhnDIZaVU1W7Ip1TQjZaQe9z6o7lIpercMK7MVHqaL7qFZazdDqngu0Xd/cyrSaEWc6DG0noVCKgusCxlTR4nfvi/WcwRoE/a/1e5x1bBpr1jlVNa8eiNOAT55GZggjX315w3abya2ylkI20atcK2nVeb8uLNVMoK0UzGXCU5KynMS7vSuvgNeIR/AycHuI3IQG66bzdm5CZMOzYpmDetaZIqZyUwvSiqFwtpZbo+2Lsmf3P0BFbtikVLXhX8cFWbis6f5OWUzB1XmamtgRnbc5bXtv6wCWUnH79mi0XBhjUIaMgJdmypWOZoqX0jT/EssrWs3aSWx2oSh7LmcDglrbTdT/ptdvTNHWKNDnnfqmt2FuEaUSlFqVA1tsrgjRpLhpCGrUKyRfD5FeHPTCrVnw36mKzWg/7WpDFTuQRBDjLvekglbZysb5dOK799/y9PLAvJyhVhx6UAXnUf2rbna66cKQqgaG3kOxjsSW2NLGvMyk2rnIWtTRLsPOelCjKnhBecLOCUMMuKDoRLUAJE1wEsjNcc6NPBfKWZO5VCvnvLGkRSW0lVcH9Yq6J3oA9UDRGkhp+3XgKympP9s+A4civyoUk/eFd6FYavKuRV5XWbRWPEq1qC0hIZBrUjUr1CNFxc20XS2uUVOitWTXpodjpxD2zkjfyDlvpK3hPdzd3pHMj2McRvXvCZHheCB7ITTHMq+kl5mxis4MdDTJhoGd85rotLZTphBUTADlyxcTuOgJn+7HfhBoJ9C5PueodM0QIs7MrvUg9Bro7KApeVW55qQGpeenG+pYSWlmOlSG4S216uxAawXnL/QxMc2n2jTJ0k6MeVWVglQLoOJ72qYJs+sFZdMObetWGoHgR3SuRA8qEZ2D6WhcSdo5qFnpZJSKR3bPOSeNwesMlYjgqya+ToTRC0G8msvXRno+keVyYPU9nRbrnLWm84oiDN5pB25dAE2QmoiFANmT1AsVtXdutIh1PVzsM3iWQOS8F4DjODINkduDdtcc6vU0TAM308DDx/dITdzf35O2FR89uSmSGcIAqEEpbqbL4C9rQdwC9iw04bz8fAx5rrUrpQqtbeR14eHjR+Z52fdciJHSYE0JHwfdP6WBi6ypQhEg2uGm9OLgHds2qxpitU5DLw6MzZDWhArADHrImcrnGEeCC0zTkWVN+HjEhzOtvTAOjlaSFkS1gnhiCFZUowblrXdAsf2i1DC1KFDT724xMlpnYUsb66p0saUsSM7qXSaO3DLPOfH1t9/yW7/zO3z3/j3/5t/+G/7RP/wD/uI//xn/4cMHqI0iGtfXlBEfGOPIEAbtMJWkRZloYZu2bPRxxzIv5FrI6J51XGY09VZr9zEMA+uqYIvzOhTfmibPFRRAK2aKLrrWq2BeWFqkQqfsi5rT45UOCbgaoDlcUzr0NA34qIVnI5OrdvjCMFEW8wd0QmmZbT1xPN4qXS+O+OCZV31O0QklC8n8w8Iw6TkI4CNbUc8vVX8Dqd338GJ02zuBwYpqgON4w9BG3t6+4ad/+7dIbeNwODKGkS/vvmB0kfPpiRhA/Z90plgsK77uXPljxXkhpQ0XBrZc8DbLV0pW4R6b79WuXsCJ7j8tKK/VhC/qfvqyuNvKDgZ143Dx7kKJ3LtuRjsUA8k6UOQUkPQIx9sjo4m71LKxLIvtVU9rjufzC/P5THOBEANLqviadV7TeeRNYBpveVyeyKWSSiCEEYk3eA9jS9RNC8NmgieXGSi7dzYj76WDcNpVQoylIYK4pg/UcijXuyBF9mJGxUNURKRaXoD0ToflY5Zod0N4PauLdas0AUasfpYOlFvyLwpESFWrgyZCAe3c1mpF2w3eeZ4fHlnPD6qOy1/hvSrNDjESY+DmZuJ4PDBEFU9bzmfef/rAsq3c3t4yDINK2NdKyypg1Vv3IQaaa1Qpu2hN74jAZU6yUym7aFxXVFTg76Kl0LuveuZ0QPUyKtOsG29LyjpM1dLhXqRdRJAuqb1latVp9xxhy5oTjeOooydVKLkRnHbDxFWaUd2H4NRI3ivN3N3ZtdUKNYP52urn1c9d+mfJmbUrjxLAHdlwDF740W3gqymwbIWz025fyRDKRqyF6IRAI7VEkURZz/tnckBEQMLewGjGdNOb03O+ss9dqoG33nMvnm5LU3bQwejtzdMcrC7R0qpAThzwDaRo4VqC4NwAXijNYpiACkAJFU+tnsaowInzjOPI4XhAWiDlokqntXG4uUek8vLwyLpVXUds1n0t1HbpENbWrlbED79+Q4q2Xg33xFb/3DsQSNmRLmeeWT041Fyp0vbKWIxD/v8x9yaxtm1ZetY3i7XW3vucc++rIl68KNLpIsJ2pJ1yUlhGRknLIBA4RQ8agITBNECAoIVbSJZ7YEQLyYgOEoWQoAEmbYRl7JSVmHTazjoznGk7MhzVe+++e+85ZxdrrVkMGmPMufa58TIyLITk/fR0zz13V2uuWYzxj3/8v5UIVIVGR9sWVKMRSkecWpJYG9/WntvQ0VyyIZiFeb1wnk883t/z+tUnnM8X67UAPWo2MQlpfQG9/G8LugJVCHiiV+PUcVKBjK1hul4dgIpWKdqNoaytMmi/dI1mqLthrZV5WXj16jWnV0dkLqo0WCurKdR5B6PDSv6timmUtf59r5WnDPE2NaVxHPtz6pNDbHvuE9qFzUXvmx/RdggULTYY8tQ2eRONEIhO0ZDmvaWfaZLLYSByrZIlGhgV6b13zgVLRFSGOhiKHMaRkHba3zGfSCkrkq79+H1mPr2Ip3P1TWpdqxw4G89WDf60RxNeoHUwekU9vduuNa0LFylG/6jUEHjlIjE60nrmfHzJ22+9y35/x/sffLEHITHqtYqzlnORnoDWRk9oKKkhfc7u6zConHLOiZQqFPXSUXlg849z3pK1hhzpxtb6G3JO5LT9D1tfn4IQhWWeVTZ8XpRa43U+tgCXPn/a1m3j77BKqhh7x2g+VmVrFDQN+AejYG1y/tdCMNf3UGrz17JeLtnmdUs41Rhag+N1qbRTNqVEGAdu/TOG3R7xGmDXogGUetMlpGaCr4y7gLNqn5E2dW0oztrXojN5ZXHaH5CS9i2t85n7159oYuAULFBPP98rug3JxjUDd9Q0tWjVbrCKRM7ZAmStoWj/mTdJe+kgTqe1iZDLhWH0lHpiOjzn+Vu3nL/7CeOww7uRIdwQfaDmWXvvhmzqoFuwW0t5sn6Cc+Sr+3Hd+5azqoACrKsmbct8IaeE80GVz0JgGHdICLx6+YJPPv6Iu2d3/K2f/wV+5A/8CH/wn/gxvv6tr/P4nQfGYOpjOGoqzPmsgSjCUlab11g1VsGPYRg5HA6cTmfOJ2EctUIpWG8WWhkVp/TA1n+NAUTUrNW1xlQIWn3Q3kBdkE3YtVHYmsKnCIQwKI37im6q1VitZFZR6nSwXsbgvPnbqVJktbM1Bu3Dy0WpYkVQGtqgHm+VxkoRBY3MtkapmUaJt+B9Pi+9l1ok2/pSoQvtM1brm+Wy4MqIG+H5W2/x3gfv8v5n3+f+xT0ff+sjCJVpv+Pu7bcYPvmIYkrR7UxtyV+MkZRWJj9pz5RTCp1DgyqpgjQRL0Ovs2SQzPmsFcTRbGYata1ab47GFg7QeX7dZ/xmG0XfN0STG56cFQ3QbMmI3SM0KR/HiVJuyVl7Nff7Hbe3t52+XKv2THmn5/6yJm7unjGviSVlxmniM++/zxe+9EOIZPxyYoqR73zn27x+uKckpciDVX/aWeO97S26lWooZIkb1pNJqyzr3q72BZtwi1YdtProWpxgKeJ23m+MBJ78XmhWRhtTRUEaBXBbPKY9ZNqn5JTuJtKZB00lNLiId7FXLkou5LWyXk54D/cvhRD0TB2GSM2Z18cjNFE1ETLKTnn14hPmy4zYPFN2V8EFwfvYzwxdk4Pdo+16Sz+ftnnS4pc2zK1g4FoRQbY/OyZpSW97To/xLN7rcVbvt9PnqfegAsZFdATjrOsyBhVS2U8D0+QpqbLmCiEiLoKLyjhqoCbaA4d3pDqBCwZQK3g0Oa+WN1XYe41bc14VoCuF4DLjmDjsICRhGCuEhHMLnovFocLqHCkXjimTClpEEIe26Wkrj7N4uV2/pcZ6SrZ1JzpfFFmyn4EijoxX4Rc8Uj2ZwFohe407S6kUHPv9HknCWjNurwn/6jJjHK3dQu+RD54gWqDRwrHu47vDDc+f3+DxLGtWu4SsokJI5XDYMw2BmhOXs1DXRpvf7vk2J37rxz8iSVsLfrV64502tm99QpviXZO0d9IcI+y/NqkbyuOgFZWv0ZFe0u5JBYaSOUN57U80s9fNzRnXd+bDjz9kXs69mTwnPVhCk4C/GvEQgkrmupYsaFNutspDDFGRtdYfRkBEN1qlnTUqnQZ0vYQq1eiDmXVNKo7vGiLbBhTWZeXh4ZHT/QNDVaqfyqprRcg7RWu886aqxZOxefNxzX3vKpWw8cTfeN2Tg63RBN74XaNJxXGillET6RDxPlqZHqQUHSujAsY4mJKi9g9cHwiqvmZ9guK0YT9G4mDBdlBkM6XM/cORIhG8MEw3zJcjp2VlrUWtAWxIDBRsuUKfY+3av+cQvx6Dq7H7tPFtSbazg6z3ZqKBX/tsrY2oEMsQI2WeqU59V16/OHH/4jU+7tjvn/P8nfeM+pdZc8FKhFQaNQaT3vYMYSR4bQr3cSSMilA6J8zLRaXHRX3RCk559a5tmsZ1l6LJZM0GkkAVT15XpBTWde1zp5k5Xxtwz2aNoUNhzdZtPGsndW6ghCUhXI3p1uOyJSsteXO9qrn1p1z/fB2IjdNoIKOupeA846hN8v6qH1GL0QE36N5SRQU+dsH3ZNG1irp3RGvsVvplIMSRGHfbEVTrRgHHDve6KbS22RRjJOfM8fjI5fSgh7RUkwwfuo/kMIw9SYuDXn8qRhvNSj1uvSBtbH0YkZps7myJcCmFnFakCjEMHYgQIjl5hIF3Ds95+ck98zzz3nu3+FAIMeNCRtxiVhBWrW2UrL6nWJORJeq+VaursNg+1/rwhgbYtaqLq1ZJVUDCixC9UowD8Pd+49f50R/7QwzO85f+wl/mJ/74P8ff+ZWv8OqTF8zno0pzSyUUqwI4zxiVDUBtNgeREAbWZWWZL4zjxN3tnmVemJeiMt0m7azS/Naj7bV/SUE97U313rHb72FZ+jzagrq2f5hku+0+b/ZftvslUrXXNURC9FpJsWROK/56PSllxCpRzoUegNfGUDATXufUrHtdE87OYb1NmhiWUsgWSEcDNM/LQvRKZW1rSNedXkfrjxzHkRBHUjb6Zs34UnFLwuei3nDTnqUmZIgwqFpemyftvGmV5pRSX5dUo4Lamn3SM3zltygIMTpKTZwvK34xu5kQ+vnknKBkQ/ckVmgJ4zU9Gtr+X7rIRIs1vG++hoHW6637jdK/vClQOO8JRIZxQGTPJg6UcS6wLCtSKqflYgqKlXk+895nPsOP/oGvsqSVjz56RTre8/z2lh/64R/Gf+ubfPTxx9yMo5me6/6tCYNSskD6HrudQ2g/VwPEHAriWE+T9y1h2ZRk3+yHu36/64R3e9h+voVk9rrr7Vz6XLcwluZHaC33lLxSq1kk1KoCOs5YE55O0fStx9MEvGou/f1zzpxPJz3XRdQP7uGxU+NxEAaIg86TYGbi+r5yda9Dv5/X61jbZZ5eZ7tG35h7LUhvqoe1aS90+Bxam00fi09L2gDjd9GUHZ36ApaqirlD9MSoqtrn04XLZaYaY0II+Bh1PjpPDJ4hamw2TLcK1uoV6Nh6z9jOKxNOYfSEODKLQ2Th9tnA7bORsp7BJ4QV8RkXCsFnBlZCXZmPGr/keaVmAzhB7Q2oyoKzpD9ggTYAACAASURBVK3NCOdR4RL7e0XvoQ6JToLG0OvtGV6T0QaIzUsiBwX9MWZNiCDzSi2FwzRxe7hldzgYFVhZPa6f2rqe9P5s93W/j+wOA2FwHI8Xcj6TloQXYT+NSAzUtLKkTKkWf9jd/X66Gu3xj1jStlXJGhLbetTac/QHDIGzalm8kvJv79FvME+TuavEpgWFPXBmK3lXtFze6dmlMC8zyzLz+vVrcsrqD3a1mV8bmWLoTbumdg212k2Xq8AUmwKWJDpLZsQ8N/Qt9ZB2XvssNnWoLVloiVAIXpHHhvLVaspOm6lp6xfR7+57MnK9CQjbxnw9ZjnrtTeKR/t9q3R96iZ+lbBtIhX0hJYQKT5Sc2KqjjhO3BA4p4WStJI5xkiqKttcXTUksCVuhrQVlWFWmkRkYFC035KDkhwQGaeJwUeqOIYwEac9c6mc1mVrwLaqEdLULp0pbzXx4zc2aL9Rbraqm1FNpI31pyOQInSaYQtgqar+d7M/MA0BDC0ODsiJlGZCEDP8hSXNXM5n8prw0ZFlNXN3K+17U34SpQ2GQfuehhgZQwQfEe/xQdF1Seal4qA6r7Z56Ng7KQSnUvfilA7mMWPWmilZxTHSMivib4bKy7IwDINJVue+SW1JV1bGbtH53g42nVf+auwcwqaQ9iYCvonk6JzY+k2crcOnzb49OMzbmo0xquy7CZH4GJ/cO5NcVcnrLLg4MI6D9csN9hQNAb0IMYxM0w7vB8Iw4QiGtGkgPE6RcZiIFhz072trqlko3D8+MM8zOWW8h2VJGlTEyOV8QYVLfA80vaHVoAIm1eTJ4zD2IG4YrKpbTUCijXGb76JoItZHpDDCjioTN7u3eOutz/P13/w6z99+xnx55Pj4CfP8ijEKo8eCrh2tBzIEbw36WyDj2JLX9ot23aAHbelbuoEm0u6B2pZ4qTw+PhhK61hz4pMPP+R4f8+v/fwv8+P/1D/Jj//Tf4RXL1/wjW9+g4eHe1JJZAy8E6VvRcz3qmr/ie7MhRBgmU8w7cCZbH8VxtExjSMuCGmdySmx309qA2HJmjNgbV2WrqTJ1XTq50RrktErtXugc1JVJjV5lyrkmhlHNT0uJfW5X4xp4BxEZzYoBlB6fD/7YlCF3SpaCVL1Nm/+Qib+Ve1ciZow57z2frymTJrS2r9Xq3Jj19JpiM7BGPG1MCIcXOCAY8Uz4FiWmeQLS15ZU1YlTNtHOwDjXAd+cs7s9nul+9VtL/Y+qNC4U0rbE3Es7zpQocnYSimu09xDDfhirQKhVdq28ddA3F318WhK63qFW66eGyyq0KTjyV6fiyWl9F5lve+l98A4F6h4hhg5H4988uoly+WC89rT+7f/5t/ABwWHa1rJ68J5mVlWlWoXzBA6BoJI77eiJQX9+2x/Sj+nTHShxwgbcND3QItznHNQr+Itno7XmyBlA+gaINWA6TcfglpnKECkvwgCu2Hg+e0NUuDyeDQgt8VHJkxn30b3Dk06cxbyupKWFQmetK74YVTAx0AhYTsLvJmL5azUX+8b68mhlp/t+nQPDWMrNlyv5w0YFKEnzgoOtMD/zXtQO1VXE4X69N/b6LhtXKFVRBtN04GLSncMWEXUUevAmh0iA7Vmcm6xYwbfFBOM8eEUxJjiaz3TzIpB2RYjwzjinSNrGVfPmRLwccA5mOLAfnfgEi8Ep0UJT4Dq8TkzlZlxvZDWBT8vMGf1ADBlc1xBE1GuKm0WHtmU6YqmloiH1nyJ0VntFXpLFRgPRIIXqousRUgVsikCNy/TZb6wzjN3txM3+z373Y7dOGpfbHZghQGh2SFF1blIC2EUfIjEyXMXD9TsSHNSFU+TzAx+wHulP3fQ0sGbFkqf9vhtkzbn3JeA/xZ438brz4nIf+mc+0+Bfxv42J76p0TkJ+01/wnwJ9C0/98Xkf/jB/gcW7z+yaJWiV5Dzjqyo0kI3hOCI10Fa03FW7P1NvG5OgSfbg+q1qcbtndXjbdYq3W1G+60eueB+XTuXmy+dzU7CzLahgUlKUXDeW1urvZdxPueIJZaKKUFpy0A1e/AFa1SNxJt1PWuLSrfqaINDcxJ1aFKUVsC77x6R1iPCrWSnfJqNaET9cVoZMArBKwpI22NubrBXKu9XSctTwLaq/vqXOMkbwqeDfmsRgVda4EwUHPmsqx89v0bPvO5z5C/+YL7c7GNWQ/TnDLJFNdyXgHRfhapNE8UHehAKEUDaFcYxwHZBaYxMo0Tu/2O3X4kDsIw71mKMK9Jg4tWCbDZ5AAvGvgUgSLle67z+s8+p984yN98Thu/3oNgKHl0gWCJw343sRsjy+WoTf65WBWg4KjaEOw9IQykZSWtK3EXSdY3JBWGODEFz85b4jFsyb+aeWqCVqSyLolcVtasVTZxDjHIsNTKkmY+fvEhaT2p75drh0/rDdF7sK4L67IalbA8ufZWabtWYW0Is6rmPT3M2sHb9gXBghxatWZDfUspPdECjDa2HXqdVmZVueveliYpPk0T+/2eab/r79OSth68te95NQ+67QamhChK/Rl8YBp3jNMO57WSLD2wtEpXdEzjxDRMfTyWVRNvneuZ0/HI/ctXlJQtoSlM04R3gZw2ylyjBWv/V1LQxzWlrWJmyUHnStTqc7sfOefu56YVPxtD2y+8yag7XzkcArfPAjd3A/ePn/DWu7c83N+zLono98rZR5HuMNq+XFXSPJlQQnBmvGz3wAedZy0paMG6vBFsZlED1Bi80ZRqU2rS7xcC6/nMr//qr/KlL38Z99aB/+Ev/nn+rT/xb/CVH/uD3C9nUim4xyO+CEGAoh57u5tho4HmjKC0oCqq8DXPq/rzhcBlyaRU8UFFaUKYCH5gnc9axR48p+Mju8O+Vwm1milahbb71IE456mmIqeVl/JknpRSKSXRREoEoUjplLN5vnC7P1B9ZZ1XpXLl3McbBzWvFC+EYTDRH+nggFJs0UDOnh+CBtghqLhTscTfmUpbMdqqUg9jr4rpmnQahAVtbRhKwM8z9f6RJJ78cGaH5+E8k6Iwp6VXnLg6/9p8bknb5XIhDgNxOjARAK3oem/gpGgQpMu7gWjFhJX0vLv2Yq21UnKioL2buNDN3Zsdxrb+jRoeKrXq+7Z9QpPW7SyPFmYpG6WoKqGpC4oA3kRlXBt3S05FGHY7Rf1v73AGyK7RIznzcP+yB9OHw45hd8v9wwPny0WVq8W15aD9tG7Q+/UGOHv9uK6OOUS90nh6vrU9tvmsilgl72oP7op+XAG3PdnTOdX/bDt83+vbP9L9+qrTnMJ71NIkK7NgPw3k+aKVTr99Xq0mt297hnPao1iK0tnzsvL65att76+VNC+9D82hFbRxGozU4Qh+MBqwt/u1MRGaSjhsia3S7BUs17kbCObXoobmT8f+OljvQidFOgVVwLyJNQHXT2q9bg5Q5eFa0fYKP5BWNQDX/mDt03Juh3NJBeZs/teqbTw+bN5vSitWMTvlKOr5oqJeZ73O4JQ66jwjwu0YKX4gRofbjxzeGTnWgaEGdjLavVggFmSI1HHA7cAd7qisZFMbra5QfNbPrHpr+1nrDAByvsf4fXykWSeItTRJ1yNo1UkXIjnsET8iEvHDiBu2eMk7qObD7AbH+PzEcjqz3s2E3RWY5XVvVvX4laXMZOdZnXoQxmFkt5+Q4pn2gquBgGc5z+T6CWFRwSuxOK5dw3XV9tMeP0ilLQP/sYj8LefcHfA3nXP/p/3bfyEi/9n1k51zXwX+FeBHgM8Df8k59xWRN6Lc64fQJ3WtLZjXm3HFdIAedDQOtCGVdkOV3mCb71XVqksfXSUlV6G1Bqa1GovMdbU8KdooPlqzvPaTpB5ABOcV0SpamSvG8VX65tYP1pKeVgEsRf2nat/cNtlYDS4VidHX+47QtPnZBCHkCjlrPXDZ/ItqFfNA0arGs92B3TRRpHB/OnK8XGhpIPZdg9FEWoXwujJxTTkJreG4bo3h3+8homPSJKg3hNKqfM7hCEiI5CqslyPf+va3uD2unM+ZnD0pLYzTLTc3B+KQWFPCe5jnqiIg1dMqAs30VBfCSJXEbj+wvzmgAiAWeFZVAPVBKQKXZSVV6EpPCv2DiCFXWKCtB9q1SfD1Bt4OLW1etX/z22F4DUq0TR65QliswoChnVILeVUKjncOrbip+SQipkaacX5gvmiPT14TRbRv6eZw4HJ7yyiOyatJaW0oldP3SEXFEZI02mJShNEALkW7s/oOBXh9/4r58tgFTbZKs87J3W4yGfmRoTjWtPmfNUXGDahpNFmPDtk1SPB0HrYBEnFgaoRtDrZAWFX6tl6tKvTKW6Mp7Xa7Jyhw+27Pb+8Yh4HdbqfJULwCRey5bV3HEMg52c/RlP22rCH6TYRht9uZPHckFw3WVazA0Sq6dS4c5WhBs9J31pSsF67w+PDAq5evVGhChJwWdvsdwzAwX2bdV4Zx+35R1SprVkp5RUg5MQ2RYdTKTJNzTjlZL4f22TaJ7mHwqoyIBjE5V6VHifrh4YXDswNrWnj+/JZpHDg/PuAEggsd9R4GVRoLvvXViP1sVUwHYk3ZWG9bv9stELSEot2P6ANrTSZ577Q4XcvGaijCulTOp8D6+Mjv++IP8Wu//Gv84s/8HD/6+7/K137lawxx5MXHH5GWVUlxIgTJDINnHMYOALRAXanZmkwfT2d8iYQRlosCFI5JKwImelHzSinC4bAHm3/av6yV52iKio0qHIfBKo4KPA3DqOISUjsCr3uHrhdEKHVFcjP/1XJEs2zxQX8nNZk/HzRKLVIoWfSQdQCFtGar9GyJSKMjlpIpotUX7z3LMnc1W51HGmx383rRRET3jkpJmeoFv64cv/sRp8uJF7sDcXeLP9whOSk6vhYCmwhL92ZlU3RuQXJKiWd3E0MYCD7YWIEYfXqIAzkrRRuRbldQa+57Ts7ZhEmaf6ujWSeISP+cBgSN44jvHo/QxIza9+1V4Lrt50gDNUNP0p3D2hxWFVfzrcoRejxTq6o6O+/Y39xwd3tLXma++51vsSwzwTuyg1oz58vFQKLItN/jh1F7jUxptFWRYk/K2zm8+dbSK2CWONVKYzx4r0qBNZsdUQc2VXDkure4nX/fy765At6xfvo+jjx5TgNotNJr8HgpLOcT52FkHEaW+UzJq4k0GZBg1SoFW7fPbUCAs/1+MVp++3ABqGYw4ZWd0CjGwXtciEpFDoHB4lWH+QWXzDAGvV9WFd7aRto+1pgh1WTkt/2tzasWP8SoobkrWpkRUYurSqVIIZqYixdVKpdayVXZOsGAfPVYdJRccSFY4tLiFbTNwVk/pllreVqMslG1m8IwJvYnlgiJOCSDcxnnPGutLI+FFU12P/v8hiU7xE+kOsBwAAlME9TpTIgVQoaQiXuYfGSoyp4qtFqNZyiemJ+qPGtfPmYXUK1VRtjFEWw+q+rkNq81eXXsBhVbqoCLI9P+wLAcmZdFJfeNvnw5n2FwDPcP7F+95u7umVba9LBCRJWWsXlTc+V0yaxecGHHWzcH4rhnmRNrLux3B3bDHvxMeFwJj2dCrp1KDhv77fs9ftukTUS+A3zHfn50zv0q8IXv85KfAP5HEVmAv++c+w3gDwP/92/5CttAdMLy5MuLWNbsvlfKQRDrbSodNccqQVWkewh1pKJum0APtAEsMWoqhCp3L+aNAfMy8/j4wOv715zPZ0XwaH41TtXVdLfrzu1galP+ujwuXWmuVOtTM8nUnI2+oWoPNAl777GNoAkzgOul2S3AbRtER7dsEscQSc7xlS9/mS98/gN+/e/+BpdvLXA+dzVBxAQcaBLtJkvcAuqr66iyiTP8VmXc66C7/Sk2Zk83cNeTl4paH+Rc2I0jpSTu719yWR3F78l5ZV7O+iqvXkX7w956AXTTbM35in4kanE42VNKYs1qgfDW81t2uxt208g4RoosnB5PLPOF43kxnQMVQw5ONymc68m8uCaWsv1/Xelp19+SBx9aRSv2+/R0XLZ+yn5I0T5D7+M6X6jeesdMZbUDGDSk2OOdIvApLYTFU1xGSmWMI/thIoojOq/9aaLAhsrPAqaYWkQ9gGpt90yb5sfRsS4rl3lBSnmqvqYX06+nlMo8ryotL7oZqXhH7JWoaxn99tg25Y2SBGaa2dHaJqjhzKdoSwDboXddCVaEdKvmtQOxiei0Q2AYBsZx5HZ/YDClwnEcG7Kge5O9b/ueeUm9lzP7rb+zCViA0Xelav+AAb+lsgVRXv2YFAXVALBTtq+SxGVeOB2PpLReJakDtVTWahWrEMh5O/SLmah6p6a+RdS7SytrKtmM0Ux0H9QqdowqlOF80LlSCt6oYmGIuBDYTzt83LEkB7Lnxcf37He3lJw5HV9TyoUYHZAVCKoOsqi1hgVt9pFqQIpSJqtUXNFD+RrIaIFNo1nnnA3l1Tk4WJVjCFH7uWolDFEPZu+4nI988Lkv8JXf83v433/yJ/mPvvof8OP/zI/z0z/91/Eh8O1vfMP2X2WwXC4zy7IyjZP6XopVPZP2/KWcuifbEEbGuxuOxzPn04mbwwHnYJx2lOiQulrQgFEXleaqHmvFEuzYE6NwtS5astBsGkLQZKSWpOeNVS5rFopVlbyDy+WB4CN5zbhh0iCRQvBKeSplUequ14pMTgUfRgNOs7FXCiIqa41oP/Ewjjg867qy3+8BNRL2vt2L6/4vDWx1jQbWYudrSsj5zHJ/z3GY2b8Fbz1/m9txoA4D62VWKwPUfPpNoKet45QSl8uF3bow7AelME2TStvnYpU6LEHVPW2IkeZNpiI79KSkJcUhKHDRvM8a6Ng8M5uIjPPOwKCGjm/ngO5vBkpisvbFmShRBQI+WAXSua6mCGKUf6PToWOIc2ahIAQZ2e0OHO/vyWtmHAeWtJLnQkqZm9s7nk1vs9sfqPie+DVQ2ZvoyrZXNsEUOiDQADjFUDbxKJFq/Zvt7w3g7RfwPWDvU4ZN611usRj997W2pK/96XXPtfdqQE5eVx7vXzOEgZJWA/ZrB6A06dv2/7YXBx+I047VQK+2Zz55VNmuxwJ7uxG4WpFsCpxBFRm3s99rxclE2trZ0u6/jsFWBcxl7WullIr3mWVZe+V3Mlp+dKGrg0/7icPdnmfPbrm7O7AfI9EJkpKKBXnPWoRlTeS1klPR/3NlTSvH46OyA/yFw13k5u6WNa1qEH1FsWq9rqWrqA8g9IIA4nXvoyr7olgcUwtCwccdRVYKM8UtZC4kzoSpImUBt+Al45fCQVRy/4RSUJtaqzil4quFk4MhEl0DnbSwARbbu61gU4qxkKLvNPsiRenfJupTgZpKvybn1Ks4isa/KpQzIGuiZDg+nhg+fsHh5qDA2B5iGUycadOFEAI5FeZz5jIkDgdlsa1F8HGEMFLDSHaFJAGx/uLqmgCg7UW9Pv7pj3+onjbn3A8DPwb8P8AfBf4959y/DvwsWo17hSZ0f/3qZd/kU5I859yfBP4k6EG29ZgYBdCrqSDQN8JWtegBc61dFQ8sUZFWRpUt6NG6qVWltt4rRRaspGqvU50lE0NwKlO+riuPx0ceH+9Bqplfxp6M6AXZnHcbtuN7Unh94dtBIG1jYku6VG1MA91Nhrgp+mhZvkom52bW7WzT9z1Aa+papVSGOFKnid/8xtf55MXHnJdLR26xMrjDs9ttZtkaPGhjfkM52/cr1erHV/ehPX6rJO7Nh/feTNOfTAjEgpeb3cAUR5YkQCGXhctyJJNsKM0jyWF8800MZHu7aMF/IAa9pnmduSxncHA8PiA5Iy7hXOKSz5zPF2qlG7NeV2FC8NZqIk8+701Kw4ZSts2v4KpXVcSOFj9V2mxzpStnWuVFq6BGuwgQvdIHcaEHGTilLoVggAFK8cklIF77aSianLf9bhNiukqOKroFi1FifcTjVToX3QyD9+zGQSlOo/ZQ9UAFdM2IwwUNltt89C4wDHrt1wlbezQqoNhhGOPQaXzNNFhRVO1HEUsE23tslRD3ZL6CSf5fIclt3S7L0lF8pQLq9324v9dN3JJMbC6UUtQw1BI4XTW+X7uzdX+NLsegqLrSOQo5q1RyU0cNRqNsp2XvKLDDphlTr+vKw/39JvxTlJanwbz2Eukeqp/fhDOkahLekjHxwv5wIHgFpGK4SoDbd5UtMQ7eq5CNWXMo4O1JJXOIkTVXnN9zODzj4eGRw/6Ap6jNQZftN+9J0Wo2dr1tzEIIOFNP9N5bH5YeYRoINbqZjlFLdpxrPRea7A1RDaN3k3qgpZIJw4CLniLCeb3w4eNLnr/7Du7xFX/x//qr/Ev//L/IL/3ir1JOM7dxTz6doRRyTZzWCyknxmmyimrue+Bkn5FLIYunSmK/u+Hu9sDxeOLh8YH9NDKMEUxMKo5BbQOM3lftPrV51yi5eq+SJtc+sCxLn9sqvKKFMQ02bN8pyZQWM85ZNUlUNGa+LAzDSM6VXBPDqH5Ex9M9Yzbwwgd2+xtKVSuX3TQhONtbC8t6sURZK3c5qY/b3d2dnVcNWFFglLbPVKFZgHjv8VWogvYL2jyufmVNC8VVxOt6iC4Q2Pw/rxkKLWFr+8W6rpwvZ2KFpmzqfWAcB2rV5H70qvZai4Kx1Wj062pS5gaSNDamiC57D2A0tAZPtTNyXRN5yeR1xXkFcK+plE1sqSdwoB5XQ8R1ZcktEaqdiCQ9icxVA/1aNVgNwcTDRsfds+ecTyceXr1mXQtkU/f1jv3Ngdu7O0KcjCFiYihVWRnNByul1Of1m+wGQ6sUILXqgu5tG7PGORWcwqqT14HONYvi6Vmnib3zFuj2RGGr7rS4T5+uJ4uzhBaxuZO1eiFS7R2bH6gxY+R7/a7afnMzDtyG2/679vitxNTa8/oYWUWrcB1v0KtVLendCgPuyXg4NxDiDWJn+5P9rVfpFAxZq3l3Rkd2lcPzPXfPb/nq7/vdfOV3/TC/+3d8kZtpUHGWACsaE61zZj7PnE8z65qYl4Vf+bVf5q/81F/l5uaG3/t7fz8ffP5zaiheMx5hXRZq0dhyTYl5XSmpMMhAzQpapXVV0/RSzRqlsswr8zKT00IqC8cKqRTe/fx7+ENkiYX4bCJWZUdEF3D5wFTexq83hJwYZKb61PJjPStEVTDXMbC26nOtel4Y4Dwoe5ZasvrTDpOuJadrodRCqd4UlCPgKQLjcIPz6q96d3fDQWaOZaV4VYmediOuCMkJKRXW84Xj/T03NzccphtU5qrS+fjV2rGqTpD1tDDvTkjRNbzklTJAjsIyJ9MKdxbnGUBvipg8nYLf8/iBkzbn3C3wPwP/oYg8OOf+K+BP20f8aeA/B/7NH/T9ROTPAX8OYH849K/pLAG5rlq008k9fb0u9KukrYpAqX3T6eyEXgXABrFxfH2XmVctvFa23hZZzgmcME4DcRhYlzM1ZfPDEJrfln6FLaC3GpV9bw0wWtDvQ9DU1DlSzTw8PjIOA/v9xM3tjR0gYp5OG82iFBXACMav13HSfqbSm/q3ZsroAvM843BcjmfuX36CHyKX5UKtlWg+RA7HOE7sdtMThLdt5tebr36P0lXu+j369Hv8/SfB9aaGJmyhKj1IaqJt8K/vX7C+fkkwtbAxjAzDpKip8xZ0RmIciUE9sESElAVxE1AQnyiSePXqE47xnt0wETy8/fYtu2niN797zzKvtOy70VybXxnWx1OMekvd0JDrylk7sBtF5LrjqQvV8Galze5bEyBQyBEHXT4bMzvPteCd9tU5S9iciWTgAyFqdbCUFazyJjnjxfV1gNFxqyWHzZLCo4lzq0w7m9jtUAneM+x2nUZbRCy5NbqFOEWPggX/Xg06WwDTgq4WeHWhjE6VDPpeBKN4mEeLmICKpUrBqp5tPNv4KxghXX6803e972uoHcxigMQ0TdRaTUJ+YfAq7auUqdA581rVfJogioRtHdh9LVZpC85bYhXRjLgSoie61nRv1WvvcAFFiBtFxdbTuq6czifm05mcV5yDtK6MgyqZpbTaMgqUcm154a3qHChZuCwzOO3/DTFo0FZFKyROX5/NjDU43yt5znua+mbzjcN5rXCOkVN65ObOc3ieia9misDltODwDPFAcIpGOgLeZ1UvLQkMXQ1Ggzzc3tI9HovRfLC14DbPRQyM2youjmEaTDDBU3JmnmddA1LJueJcBO9Yzicev/Mh7+7v+KF33udn/9rP8KO/90f58pe/wk99+zv4/Y6Xn3xMWVdKWhCsglqlsw60zSlymRejOEZVnSRzuTywm2457Hc8PBw5zzM7BsYp4KKnov55rV8wNfVdZ/fE5quzub0uCxJV6n+eZ8ZRe2uUgSGktHQaeK2JtC6ktCAUW3c2bhXAKrBkhujI5cKrlwu4yjANhGFk2t0wTreEuOPxuBKtIue0jETwkTFGyppA4O7mpivDDrvB5qslmCio1M4N9fRUf7p5XRg8uOghOtwuwmFgDSrBjYj21Kbcz+guenIV/F+DZ0rF1f1SSlavLTtfXVXwQ7xjLYmSDbxxUfdmy8hqrYgXCFBLhaLCK9dVokbJbPtLS9DqVfDdenWda9XBrRdPKWu6x23vq1UuxWk1+WlqhC7pWVOcVgtSVuZNwLO/fc4HH0QCA69evqDUxHQz8ezZc56/9RagQKW4YFiJiRE51xU1h+DxZQsBrymNItbjJAqxZIurNMRsldSWzISesL+Z5LR5vZ19dtZ1yX07e4xdoZ/dXgulrpaEWfLXAtuyPa+dZbUpY3ZUsvUubud0SsnOVRVr81FFfLz3Kvl+NcdaHPaUWbPFOs3gvh2WpfkX9qql22I0Z71f3vZ/URB+HCbc+LQP8BoQbj3/aje18NGLj6jMxLDgyolBTnzw3jt89t13ORye4/d3BB+5nGZevbjn/Po7/L2v/V0ejkdevbpn9DuogeN5YV4L47Tn5ubAYRqUGh6Cndka2Ca9qgAAIABJREFUT+AiUvc4NxjVtvRxFjaAr0gl4bhI4EREgucwVF5+4zeYpk/4x/7I7+QP372nSsbzC/7Oz/wc3/ratzm885zf8f67/P4hsOSFeV5YcyFloYqniOPiHIsTJGfWy4nz8ZF0OpHnM3WZKesKLqsIlCSKgc9OrGIXLE5kQMQT/ciw25vYkkb+3sM0DirRP+0Yb9Su5pwWTsvC8fFIfPEJwzSyH/e44RYRM4mvQjFQDFF/2vmycL+uHO7ucMNIHCeGYWJ0lUTG1eY1vPUGIxvD6vs9fqCkzTk3oAnbfyci/4stgA+v/v2/Bv68/fVbwJeuXv5F+933fWzo+YY+afNwQ5635K09X9en+Rk5k0tt1IyrTaJV5eRq8+3JlVEQpCVtYoCwF6QUcs02SSs5J06nI2XNPXFpPlegydn1o/lavJlsNlEHUBWp0+kEVRPDKpVpp5v9NDWOs1bcGgLjgwaGy7pQvSNOk/YmOfokahQiBNZ5QdYV7yoBiEH7ahAoVSXhj8cT83zpr+s9bmyHVUoJAXx8g1LwD/nowhxsiJw3KkKpmVQL4iCL9ridLkce5pXqtE9ncJEhDGhDsPYgO+cY4sTz52/x3nvv8+zZM3b7kTkLqaxQPIGAj46aVu4vJ9b5zOP9wH4fefnwWilh4nup+knVTLSnKRsK2nDFT0tY28bv+gH5tCrZFmUDEtp7BK9VByNU9IqzswBKYyATyHGD8tRNwbAiDCEYxax0q4Sa127k7t3GC7cs642AuPa5Xo3Cp5WPgDcOfqN3qAyT0dUsKHA+fM+YaGXpirZ7FQTpQU4/1Ic4kpLKhSuqHBBRawxnohAimjiFGJ7Qoltw0D6jf75VjFsQBy0R9t20WZNQp7QKG5c2TpXWxL71hAAaFAenyXynVYOLJpPuAzX6TuEOVnFUqpHaOWw0Mk1w2/4moDTD00n3m7Sh2y3AqLVQaktQfe/10STRgoBW9cMRh0gc1XgdUSn2NidadYGq3pJNFKPaWDsfWNNKHEaqVG7vnqOUcs/d3duczgulqC/fw/09jtZjqwdYqYUqhRBR8YDdvtNPb/Y7Simsy6zCDFUrRY0O3quYNmdboh1jJEQN2DRZV7GiZTXakVSyKFuCEPAucv/qyOHuGe9/8AX2uz0/+Rf+Aj/xx3+CL37+i/zmeeYw7jjNiwnvREM/tSfIs3l4qRhPwjtH8MIUA8uSOT3esz/c8ezZLY/HR9a8qgJh8KScGcNW0W3VNbkCHZXaoxtEvVKMa156VSDntQfWKvRzoZaVnFZySZr4e4cfVN4fp0yDOATC3quRMAW8ClWlVMk1M4wT63Im5EIc9nrv3VVvkhemaYeIZzG7glo3gKT1snofdB8p+nyA1SqMPgbGsMenC7vDgUN8j8PdgbTbs4RK9kJdMg/HI6uJOmznwxYI62fXvpa771rraU1FK2E5433sarXjOOp+ZGupi4ldgZLe++4LJ1d7yXXS8eajtUqIVfZ6T3lsKrBh+99FnFkpbKCVKdBaHxYGTEgx4FlaQmOVN6fVypvbZ9w9u/D4eARR77Sbm1vGYWRdDc33aDJowmPO0b1sHZ5oAjPX97AlKU6wNouWyFVa/9C2xyvbw9n3esp22RK364dSfDf6/5vPbQlvCE6BV4vfsMRNjLfagHBnPzeZ/xYPeu8tcMcYBHoeN52HkpVOKkgXUWksizZfMEXGp5XCluRftUVcfdD1+X693q+rlNpfqmBzq1K3hF3F+Ix2GjxRm9cQXwiDVomrJEpZqGWBsiBpJl1ueXw489F3X/DN3/wWH33nBd/55nf5zW/8Ax6PR14/PiBeCKPj45d/l1/4xb9PcDBFxzRaT6IIzmvi5pohfQgba+RqDPDqWznY2htD0D5jPxL3E8/2nvT6Q+Zvf8RwWfjC7/kRfsfnvsS7N1/k8/mRnzq+oPLIP/5jP8If+LGvkuvMJa3U2tQtIyXr+Tp4x+g9dZ2Zj4+sDw88fPIRrz/6iPsXH7GcTkBhyY/qqxc8PngzF59UoKVoT/bDw5m//+o1L8rCy5cXlpePXMRUrqNajSxpxQ07PcersF5mHh8eGPcTz2/fYny2U5p7znYWqx+rz5lQEpXKJa+k8wlCgGHg9vYtDvtnrGshX07UkpRB5Fol2fU5/v0eP4h6pAP+G+BXReTPXv3+A9F+N4B/Gfgl+/l/Bf5759yfRYVIvgz8zG/zKUaYwEBpQ5+cdLpOR7fRANrhtCQpT/nUtaMk4MR65Op1cGqBo1To6kpb02XbqNUfShPClAqX84Xj44mczXHd681tPkzOmyWlV/TICbgmvOHguolcRM38RFyn4uW68urlK+bLwttvP+Ott5+R1kSVbFQh66FzrjcyN4qOlKK9Jia/XgSc+b+VVdHF4NUban9ziywzKT1oUlelm/bW2iiFsnHaa9WerGliGkcEmNeFBoTTx48+7vYrHQ9D66yvvp2E9GqD0w3CB5UozlWVb7pYAZVp8AzZk1DEUXKm5kQTaRFUwSylC8t65HS+5/nz54zjnuwCOS8cj69J80krT6WqOhvVpHsL1anBrla89ELa5t4OjXZwcZXU9RGQT1fOUh7+JmKhAXI1ZKBt8tIPK2MbdQBAREz8w1JJe90QtSfCB60+lAJxMGKvFFJGBWqK9nzmqqabitpJXxfVfKUohSzFLCHMPNurOIxWxbQK4hz4MKBJWrSgfztw27W0REqTczM4bddTmmpj68mxdeE3anHvocxbA3wLIILTA0QMbdVEqCVVW/Wtb35VJ6P3rlf6Wo+bc9r/5CzhcdZs3Q55Z3K8Dtd7Yeigkjati6nRBZvD2Nwo5ovmBVNfFJwzvybvO1KNKLrfrr7Uyvl85vTwyLquhGtBk2kiJxXiCT1g1vFrVdmc9XPTmnvwOA6jVbMWvBNCNAVboyXFoL1qUluPpLNeJa8CODTxIeFwe8v5MjMMO9569g6vXrzE4/FGUcxSQZSC7bzeyykGbm5uubt7xt3dnVYoqsrfn84nrSyhdCuhJfMqKtKBDbdVa2SZEVNvTCmDyBOvSmfCJKCBSKZQWfkH3/kWw80tX/rSl/j5n/9FfuWXfoUPPvM5Pv7mt3n+/G3Kmjkv5t/WeiUEyFX1xgXGQROXRictqTDGiEPI6cywOzBNgYfjzJoSh7pn3O24LItRtdWGY7+fGKdRzacvC4fDAcTx8PBA9JFaPTkldrsRnGg/lO3N3o8m3y/U6qmjNuD7qFWPUlVVNOfUPabKWpG9HrLOC6Um2w+UXTeOivxXq5p0KXrUX7D13xwON4QQmeeFlJIK0rjQPc9KEfX1kwaSZiCokIMTMsL+nbf5/Lu/i7ffe5dvvnrJh48nUskKEgXP7nAgZQUCsDNfIwVLopwyTlpf3bKuZPM+dT4QByUeeacUqbo2o3IziQ/BVGp17eZSepVcDDxWcEqumD8bK8RbLOKaE3pu637rDdvA4WVjYBDxfiAOocctIXi8mTVLbVYASg3zSp4istHQg3NQVOAiDKMJAwWCnQshevPSGlSQxeKrtucgVenyfkuSNqDArlKlSGniXtX6xkvJlBx1z6pZGT5dI8BsfLyacVtkQGjnoqFDmiQbhyFs8V29itP0c736s1KRqmtNaoXYzh2NfxDtL4xi1eWge7TeI+kMNh8Co3niEhpw3kBZZVu0PbsUrfps/UXN6sH6NK16E+zv3jmN96RVBRV2pSXdOLp/lICYaEqW1IXfxM7e9lChqg3AcaIA9TwXPvrwNVOceP7sObfPnvHBszveffezfH73Dr/zC1/id37mC/z6176OXwKvPn7k5cePLJdKcYKsibXqPupqYQga60TvnwjiqU+sQDkpENcAX+txVHBT+ncfamVImRojxUFwickVhmGE8ef5e7/0s/zt2xtiPfPweOTV6cLd81v+yl/+kJ/+6f+N/a2qK4/TxDTd4t2IZEeoQixqQ1TXBUkJXzJBKvlyoS4zQSye28HuMHFzuOXZzS1vvfWMZ8+fc7i9ZdwdGIcDn3zywF//pa/xU//gu+SwUKaRqQaWy4WUVuq8kE8X8v5CGAciQsmF+XTi9UvP3c2BKB6XBq3sSdEYh9bfpuun5sJyPKkGhA8cX77GoRRN/b9Q2RJ56XHPVh3+tMcPUmn7o8C/Bvyic+7n7Hd/CvhXnXN/yD7r68C/AyAiv+yc+5+AX0Hj739Xvp9yJLpsoh/6hA+WdWq8pA3zrT+sArmocqPG/k18pJJLVSVImjcSvZrQJldtfW82TM55Ne6rtok0xNvWTk6FZV5Ja8FVZ1x7emOyJocOyUZ18IZmipgJsW5clg7S0ENdm2LoVCWLHhzLnMiLUJNQml+FV88hTPgk5VV5wJMpklWgOrKogW4qQnXajzSNI3lZyblQMnBe1dy7gmkRkWvGO+v9uZZRr2Lot24qJWdNmpwzie3rO/i9P3usZ06kC15oG1ZDuxx4j3hPrua7h2Mtdih7zxjgZgwcL4UkDvFKN9JRLT2oxjnUaWrheDpxPn+o1QB8v1ca3FrQE5Qyo/5M2iDeSw69umbX4VyvWmBJyJsJ2zV63q/fenSum7G9t4RD0GvHWUKnKk7OqrDqFoXS3oomBtqTPagggDManUm5u6Bqf0vSQKraGVHFFK0Q/btl07UURYlK7bTJauvJ0Q7QbaPerCUUUvUh4lzs1TdoiO2GmrZ5BHpgKmqf0eaLKw8VW3NNOTbEYOI8GW80FvXQ0e+eSzaAQil74hp3Xe+vGsdvAXzrizLpHtZ15TLPChZ4T6naSxe8pzkrdezBAk8fnAl4bFW4Rv00/IjglYqnfT6aPIECEKkn5vRDLg6hg0WNRhVCJC0rD4+PXaI9F60a7HY7Us4qXuF1Xte0sCn0ZXLWb59S0n3S7ltwXm0CtGlTgSVvO6pFTwqRAE6DoeCd9r05NJmVwjDuCHHgeHng+Vt37PeB714euD0cSBc1U6cUAopqxxisOjayG0c8wnw+My8L86LS7suqvm/eB6WvuVY91fmjwiKmCGsbu8NRCYgfGccD83zu61tKIYjTioIEQjsEo+f48MjLFy/4whd+iPff+ww/+zN/gz/2x/5Znr/3WT56/Zr9e+/x6puP+h7O60HsIAbPNCgqHgbH7UETqeAjQxhxXv3r5kUVVkOccCEwz4X5ouh1iCNLWTCtIy7LmUo2qiicTyecOMYwEZzSPne7iEg2ALP1fKpoSRzUtkQkk1PBucgwTN0cO3hHHTIlJ/rBGn3vpatGdQJN3tViQdfOEEcOh70myfOiCH3QSm3ORRXWXSB4IScFLktWK55hGJgm9QBsYloxOsRVgqvU4FllpOxuqdMNwz4TTis+ZdZlIWVVS63Vxm0YbU+uV8mWAg9h8Hijn1bR3NqBKiaK6P4gCoiIvWfJWdeuD0y7Ee8jYYhKO806lqlkfND9GjsfnAs4UZGqnkyIJaTO1P5s32mqum0P0QptppaE9xkh9spgle3M0Pke7Ixy4NWwPTijFFYhOkdNif04cpj2PLx6yfHxzH4c2Q8DY4zkiiYvmuahfnm65usVyNYom86Z/FZtQLkBalI0xBTBlYiv2USJtOfKJRNjsPjge5gPllA5rNlBqmkM2RneAETeFC3ZkrcWVwh6P58wH656x4KJPrWYT1UnndJe7R1qLaQajFGj69qHQECYfHy6R4sK9uSyUrKel0ZU4FK1nSD4oMIs3jHtne1N0RK50VoFGiV2E9JwBlQqWF76NRSjQ4KB/gJKoXPU7JAamGvgIcC3Pnxkd/eCFHdw+4yvTjveDysDjrduCu+/M/Ld5xNvP9vx4nbkUgcel4uKl/nKGIW8FuYlaVp/dWZbeApUiiRL4BsyXzYvvAZGI6zikRqRZHFGHBiGiV2c2E8HPplnXs8XpGTWJSMy4M6B81JYljO4Ywd9VVVVvRrxkU6TL2qQrmupGk3WKLMeThxxThhlZBdGhsHjByHsAuN+z268wdWBOVXCfkcdJtZJkCUTa2VeVr3PPrPKhVgC0e+RMpAuM3PIPL6a2MWBnX9GlqxsDgpBVOAtG9ovCNqD6a/mtFWMnaO6YjoEDcBvyf7/x0qbiPw1thjm+vGT3+c1fwb4M7/de189vzf3bwHfpjLYgq8m69mDZH0xTe2mU0ps0VXjNovQNwfNYuvW/2F/71SoK3EG5aqr6s79/QPLMuOFzQuuI/l1+z51E6OofcPRx3Ulpm+atknkUrgsM2PcE4fR+M/FfDmu8wntK3Gu9fUEDdilJYf2p7RNISk1aVB6RKtMjuPAsmjjp/d2AIfWC4SpSFqgbz0/OauHmR9iv55PS162v7cvvlWOuqSyRmQ9GczJW6CkAg6GSSLBsxtHpkGFDwRFhdqnNjPNXvkyGmlqzb0mpLHRbKyi667uh/3/aZP8OglDritsT6s5TyiRvedmo8Nt860hfNB+2BKEdjRdUWpdm2JCE6jRBKq9qFXolOK3WhVmCKoSeT6dNchHe75y0g1aRLrXVwuM21pq9+nNRFSv26q8OAMt9Au2Xq83E9jr313fh57MXT2n/XvrO/u017dHtWRH15P5FvrBAqn65LvpzdZ10ehLzms/q1a9NIFcUyL0gKyZOgvDEIHAvC7mfWgKdvkCEi3xrmTrL91AIgN5K9TcaECtT0gR9pRWrR7a+AUfVEDE6I7t2vf7Pa1Hzzntp1Eas7DJj29rrKHyjTrWhF3aPtjkrzWwatW6LcispWjVxrd7o4n72++8o9SrWtkfDhyPpqYrasqdUlbFQteqq/raZYFlWft809/qXM6ldH8g9YxsFOrar0krP06BgqYuWpV6qRTU0kfAgUndB6iVGANDjMxVqHnl8fVr5rfe47333uUXfuEX+drf+Rrvf/ABn/3cB7x89Qnvvf85zg+voCr10YsoRUkK4240CrIabtdcyZL7/RuHqD6OVYNlqZmcF07HMzd3A8O4J60zwZTmSskmMR7MLN2x2++gKEVKTbmDmQzbfLXKSQyBaRq0l2NZEfEEP+BcIBh7AgJ+nKiSWbOY8u527LfzKHQLgq1faxhGRBLOrbZ/XbFFemCLJRVbn+p+v+82Add2HHjHspyITgPSVIVibJfm89bmvjIMNrEqA/at4mxiFs71ikczGlemgutnZTZlvGEIpuxcLT4o5FJB1F/Ph8gUB4YxsiYFP8TGp8UdzjmCa2ewVY2czeMmSiRa4QmtghU81ahwqlinMz/n/EQE5E2BJudMqCh4NQy2MahkE7HxTOOArCtIJYbQkxUHHRgstuYxsSFvIvttHernb8nTdciiYdjT6peTTZgrxsgQzfbG9oxrpsmb+75+pnnlua0n+VpErAMKxmp5sxfu+ufr8wMDgZ1c9TnLFXPK/s9SIa00il82Kvj1e7X7oH1vMIgCjqZxQq2i1f2qiWGLPy+vZvAN4Ix4p2wO7zTpiEG9H1v/ZTeOb/fIe4LFS62yBoJ4axVQWgdSHcfjzJIWxAnn+axJzHzh9TvvMLiJsgTuTyfOy6JKsN4hZlHlXCWIaSN4T87qh9niArvtNledqUdu97S6qzHtwIT0oodzCrSO+4ndYad2N0HZMDklalb19FqFNVWieEpxXXQrW29y104IQ2fQdfsFp8BxjK1irCfNKE7ps8mxrCiI5jP5IVPjCeQ1+/GODz73Rd569hmODw+cJZFMGTiVhFAITrY1lAt1BXErySXO9/c8hj1lqKT5hCtJFVkRqsuYYUEHpfV79yivF1z1Z1vbXsW+xG3997/V4x9KPfL/r0enLUijoWHBbksMWrCzxbxddltqD/7FkHqHJR11C+xaIgbXi1qDhFrU00ZRZgszRIMcbJF2Ty7ckw2jbX7t5/a8hrZfbzKflrTpQ/sv5rIQ3ZnT6cThZscu6gIQVxXhoQUlvi/sWgVXi2aS0ioeAGpS6732k47D8GTz0qAqEKJVNFy4CpK1L0qTyk0IoFbBha2/5E2uOlwlsmDf7em/tQBQA2vtWXIOglSV1NWTWcU1HETv2I0T4xDxecE77XVsYwp0GqOiLb4RbZ8kbdfPf/N7Av2e/CCPtgCvE7bre/w0edN70eeyb9QbHQeD3nQuUqm2obdvJ9c/y0aDs/28bw0tWC1m9hjqqPKz80Jalk2lCu2XiiZ9XfN2uDa7h+vxuj7I+u8taWt7S6McKkL2htz+lfzxda/km4fvpyWI7f22OXt9f0xQwKpyDcFMpiaqzcVtPlvvi8mVq9eiJ44jTWAiRKNLmtBLO7Sdc6xpweE43DwnBN8BjCHq9ZUqCmigQE3Ognjdg2pW6eRew3Oue7+p2p+zCoYCIw412L5W1Lz2n7tO2FrvWaM46R531a9rlS4fnKnNiiUBW/+KiPQ13vtThP6eKgG/0VB3ux2v708qnDQdePXqEe8nkKAVHtf66vTzFPCpOGeKm0HfsyPqXilWAwOKG1g12lX9ztIotKX3BdqWD1X7sZw32Xmn11JqxtegXphOLUB8deqtFAcoiYf7l9w+e5sPPv95fu7nfo5/4Qtf5P3PfZ7zZSZ+5rMMXpgvZ6L3kJRGWHLGJU8pWvmrgJSC1JUYI9P+wDgFchEkaYVpv9/hXODxeORyWqCOhBgpOTHsJqZhx/F0JK+Jm8MN0zSZ6m+gVtdBCEW/tXKvc6L19BoDwAWTul/4f6l7sy5JtiM77zuTe0RkVt0JEwE20aJapEgNFNfSYmstvejX60EPepCoHskmG0B3QwDuUJUZEe5+Jj2Y2XGPvIWWHqHAuqh7KzMjPdzPsWO297Ztk/Ye+uA1aXR4P3Nynpz3ofaWtNs6sn0pPT1RZJvbxul0orXG9XrD+8DT0xOtMeYFRhdE/RG9urU2HRtSR3zvat3tfZCkNbcBoCzLQqeNGWoa2EbRbuc3GOi39646BT96CEiLhZckq3WmaSK5KAY3rVBqITp0nSG9wbWRdbxFCFEZOPBTwiFxtic1GshtFF3O/kSYGhnfoIlb2w2WSt0Ls5QSvbvhcH0EyMzERBhz7U8tMjPPheH9C1o4vby88De//gtuH18oeaWWDecCv/q7X/H82ed8/tUPifNJ8xIPg13zHIf32n0d+cnhPLPc6eE8cOY0K5fTFGSz0Q4dkb+bw605no4iqkmPNF0t8OVAYa+7/J7oyC/VAtn+u49j01mu4xw20mGXde6MUdezoY0z7lCQH0bEHNUhI/57AcWkSPCSe+FIacL3HVBsvZGbzMWT5ymSum3LI9d0PhB8JHrHFNWYxiSqIUje4hzRBxkF5Zzezyr97i7gVNbqfSdFR82dD9++8p//09+Rrzd++dlnPD99zjk88eGbO7/+5jteloVs7ob6jI85hnzexzPcZntKr6S1hSiQr+7HHQVPu6zRqGsgxIhPkTQJCGI9cQFoAVp3dB3RtG0rpWSWZWHbNrZtG+Bk730AFzbjzruOCPc0x4oC8utq5dmd8b3jk8dXT6Wxos6NDmK88Mc/+xN++pM/4uOHK3W7kTdVxTgIyalEH7Za6Aq0RJdoTQaNL683tvlGd53tvtLqpq1YTc+9spdsbjRMYH/ochS7/96BKoI672Vf/b94RvxBFG1ShClF7EXHDceAYhvKJFNhBBjn3HCQbIoGSWnGQJ525AeEVWMUcb2LxK5zuMF9L8qs/2VKibpFei6jYHlbtI3P01VqckAkgO9930hCXdBZOJ7b/c5vf/c1PsAPf/SeaQ6j+BPNu7xnnI4FGgNFP97V3gpQteC1QCaJv0+RaQrKuG3crqKRF+26aPs9Tg40KzSN9j0m0Hrfx3gDdtSlN0S+iCGlj6gaTgZjr8ud2ScilYBo9qMVhU1lMN4PKWR3ThEKfV7OSpY+4rsV9uaUdXwdC+hPsTgPa/NThZmXg+EY5I8vO6SlGD6sk+APG9hYhL14M7tsq8CGBN4bwi7Pz4+mYD/mxnfEzVGsnRvbllnXlXVZWJdFEn4EmS7aMN9bHwcHHbr3Y88Mvs8xejoGcuQQVvbgFmaI+tui67hW7PmbFfbxvh7X1VtXSHtf24sDhfZu/PfgmLuxVHv/Si8i7ZpOJ+huzL46Pz3TWuO+rIK0zxMtb7QqzElMEejEzZhSYUYkEe1spZJGlaMxS3uBvN6n6DwhQC9u9MmadX9vlWgmE01nJPbHtWZGCznnfQwBu7W/FGtSVLVWB9sFe8O/sQ2CI0m2NQq7A7pvUsne2+EA18TZedJ8ouTGsqxcnp+ZpxNfr99xnsS44va6UEvVeC39mQoXKxOhEjrPeD5WcMvcvwPD25z016nLZC2KajtJhnzQ4k3Nk+ck62NOEylEYnSkGCUpKxKzK4AL1LJy/fiB5+f3fPHZZ/ztL37Jn//lX/Kv/tW/5t3793z92xfSaWbLK701ptOM6435+TLAq4CAYLUUtlVmum1VkonaRfK2bY0QJmKYSFF6715fbrx//8xpvgjqrGNZhN2VQsh16RsOKruSJKYq+GDmFvLstyz9qtZPac9U1o8+1d7ozSlD+f19ZYmasW3DdVV3vBUR8zxjRVStEuPmObHVLCBGErfQZbkPsx+vs/Ral/aFNE20NfPt737H3/3t33J5eqJHz+n5Ce+jSO4cQ6Wyx2hZl7L+9yLHQCCZNZfp3atqxA9gIucV5yClQFRmI28ZH4I6BkqvpP299ehOk/ToTecZ5zw1V7Z105YBAWfknNWZroij6XHOWQh+GIoNsCiI5FEScL/3S2OArp4pXWKI9N9KbHZAdI7TFHk6TeSbpxeYzicuT2eq2wfWSxEuIKwxaK1WkXHryylD9RZEMzmXKVK8t9+vz6PusVqeiXkLAE4MHdJ4X3X6VoDBtf0csJh/PGc7Ar4I4Go5mcoh91JjL8rQol6ZuapnwW6wou9Jp/uDq/jh91qx/HhODcPcvXaUKlPOIn+YN6qFC6OIPKpVGHuqVs15tkNu2vuQedpzi1Ma/+69pzfZy9ZvAAAgAElEQVQ/ii3ruad78tr4+N0r27ry9W+/4XI6M4UTp3CmrPDh2xc+fHwVgw86zsuzOxIauyrIjzaSbp/DuXHmVQV1+2hvkD999IQ0SZ48JwVSLdcxlYkXaWuHXrVYbmWY+i33O7XK/k5TZJrEtMl5J+6PWgxGbySCAiHe5jcaMOCERXQyC6DQad5LpeMDP/vZP+df/Nf/A5TI17/5JcsNtkViEw5iCpReJY6QIAjQ62ScIXkrXD/emMIH5pjJS2Yrd7a6QpNizdRxIxftO7FgeQqgUmQ10/EKB3QG6PP7Xn8QRdtIFB/Q9D2AHW1WbcuKTf8hybX30R3ZnGmoj1aq9i6mFe4Dle+t6YR4TaTVNaPkTN0yvVSRk5WqFLh/2PD7/JbtASU4zuCAT7M9jU7uTvsnHMu28fHlI59/fiEmRdHRnqMuybQsVJUX1LqvinGHukp4Kr0Xtty1D02tir2n1kypcq1Vosxw2RRtLSPQxuiJRBp9ONgdmcPvFT6jKLbHY+G1Yhb93os87NvvvqEugXeTzAGLSN+i6xnzUpTArbpgp/bbuNFP7DRwm9Ooa0ep7XGpfb+I+D6L8/j9xz+P7M1YTQfUejxTXXPp4B7ZXAEiQ6dv98bJKmho74DBMXrHDOWjI2yCNq6PQ6sfoMmugeW+8Pr6ysvrq8zZccoQKeN2dF/DuQe3OvudZnhTaz38HnBN5sWZ45YliW/vpR2Csn7ig/RlHHbfQzfDKHjtZ+z7JmXGQtiTssHaeTfW5T4fCXKpuhbkUC25jsNw2zZyzmy5EJMkhXlZxky9La8YK7X3i0mBMU0yxDfGpGHYWKtE62hfjpiI+OTZSsZgX0NjpyhuW7VUSs2jCNtWQRvnWUZwrOtK730wHruMzVwWO9Y/JOCXMDHTNGGz3ExCaXE2l6LFkjF13TYt3aSZWtzjIz555uk0kvoUkpgddJUBeUFga2343vBeZKsoWKQaBWEmBtgk0kx0KK7DTGsUkMAS2L0AkYJBGUO89MsqoOQQGWMIkPNG3laaMs/QdcZbojbpD/1i/SGXd5/zT3/2M/78z/6Mn/3TP+L53TuutyeCSg6X25UeHbUIuLTcFlrrw/ADdf8qtZCvAgakaZbr71Dyyjx7np/OvF4XavXcriueM/N8ptRVnD1jYNtWWm+yzh0syx1AxpxMEzlnTqcz4NhWk6DG0TudUqKHnZ0tJcsz1sJP5sQFNSgpY6yFxUkbR2C9p0cAxvZdKXUg/95JARSCZz490TvcbjeAsXaP8VFmnsl73m43/v4Xv6K7zlc/+RH/7P17NdrSmKTxvRsg2zS46csSzSlN4+8k8YzKDpvqRWKPD8LGFho+OEIS1qKUPBLxfZ3JXsjbRlWH3CnNhOA5X07Q5GvS31fkTMMNFlvWmrLofmcwhwLDMQBmp8WozYPbd4pcS2s6dzNX8rYRQ2QKnuvHj3z7zdfk+ypnc5JRHU/v31Od435feDfNynrLjDrrCz4OlJbr3WdfmlR6zMZ07MBOFxmyFOFmyOZVNXAEMffn8cmz9tB3ZmfRcUba8Sx9C+69BcEBdaD8/rgd+5xW4A9QUM82+/2fUsrI79pHShzP2tZEmUXYlSFNCx1TEhiraWBZjAKQ11rFZVDf02z0TUlhf9r4GrD+cWHa5ExUMOQO3jdigtNpwsUrMSWmeGJyE1RPXtRcBe0xdpHmOr3s46pG3qJAsV3DYLV7hS75iYonhVkNiTBNRDXDcQ5cZAfU1VfBdXXqLKL62ZaVQh2/a11XlnWRM6c2cfk2hY/TVhAno01qrxhbHIKBo8NxSphghNHszeFcE7fz0PnRT3/G//Q//y/85Ac/5z/95a8Qd8oAOrrHwGlXtWfWJ1Kc5D71TiyJ++JZc+XDyytffDZRXaGESokdeqC3wEBIRt2i0KLlrzaR1fmh6olpYppOct5vmX/s9YdRtGEH82jUkcPGWQDJONfJZWPqCbzMqxob2TEGP4+N1cXYwtzRGGiMIdOW6LphANF0Q/ohO6kDIctbFr36AZm24HCUhB0bcfeFv6M6FrzsIJSAVymtQYhSbXtPKY1l25gvCdfa0PKWqkYNzgsq2BrpfALnqWXFdOAhikFCb4Igyiy3NhamOEE17anp9B4U2XPiDtQqrXfO5wtPF3HoylkMTUTdsAfht8EPfQauCxreFHE01Eqf+EDYe68s942Tn6FV7qWStEcLHyjOM80zX82JpTSxqVaHvOb2PgIpbOqevIbv2zQf0TV7Xo8Hzp5o2MsSVSsgDKF6e6gcCz8L/q03opf5XDJTD01uTQ8uiKowZFXYLieJaKXTbOZYbwTnSJM0gRe7Zv3HOfBOmul/97vfsW6ZdcvDntnWmrE8bxmxB2nE4X68lY6M9e397tTCHviP+8Pu0T6I/lH+uLNij+yzcwcGTQ8Qu3bvdQaZxYgQVAYY5Xq8zMMyliJNnqSJ1279rJImdcZEtf7eO6IXBzbc8bPLtYW4H+iOQN4eUdRcCznX8SxLNbCo0i9FASiTXXd6Ldzvd0rOTEFm0tn9sOLTDvIYo8xcdI9zj1rb5duGiMYYOZ1mALZtGQmMOCqqzBlNKBV170hsKK2MOX0gBjLd6cw8H7gvKylGPvvsPb/61d+xZXg+n7jfb9SWJVmlDSbG7ldpleB3k5kpRebTzPlylr6E4MlFZuWVTZvMna1TJ1LIUpimE6dJrtk7R6kCrLkoTNWWVyBhU1XmeeJ8PokBTu84P4GLNBdZbldiOvH5Z+/4xa9+yb//9/8H/+Jf/gu8j3z38Uauja+/+ZboBKWPUndIgl60Z2706MjICqeyWOcdKYicMW8L3kcu55ll2VR2KJK9p+eLSPdKxceoTNXCeU4qlbT9GIhRrNy990yzsFpbrkyn82AUo12Dk/EQKUa2daVU24cH8MUKH+15vN1uYy+WIsBHjHFIvM7niwzeVYMcr2dKmuIA9wwhFxdTYbxTilo4y9/1JkYnFtdMSmlrodQsRZbmPsICHgdScwBmNKaWiowmMWBFWBrpeUmI0kRmqi3rSs5SsKbBinUYbLcMJgdhHkytkHwgpiTgmYfL+UwuRcc5qBNtrRKHW9V127nfbnIvtXf2eP7b87Ui15J1+bqjlo1cZP/GEHA0tWGP5LKxlZVtKfgELjhe7jem04Wfvf+C8+WCAIZhJJC9d0J6jMfO7bNmrbgzgyXzkBNmpA4g3OLiAO00LuHAq3HUVvYeyVEQgD4n1aiGJj2j0+4uaaBWN8b3UMT5EHbjLJTK0L5xj6hZvPP0Iq7bIYrk1Wl+GI7XquvEcpdhDDO+Jm6nXa/L4ZT52c97AwY6Ot7GeZyT3kpjW42NFJWMfH6a3LvoHEnByJ1VFgC9tUauIrXsXcZ9bOtd47IOWndCMPjgxFVTWfug//POqSKmKmCuAMEhD7Ic9uFPy6/NQVf7tYOLauCVSGqnjzfZJvQguZfXIqjmwlYKVXPosmlffXCEKag6JI/na9dg7VHyIPLeW61uyJaLTYd/B+je02i4JgB5i46aHOfPnvlv/+2/5Y/+yz/h17/6jo+3G02dQtM04dtM7ZWyecqWmackjHgtnN9d+Mk//SNa6/ziF7/kw7ffkRssdSMkjzsn4iz+BW3bCF3GjuRV1E29I312vquqT4rhoAVnwxOnM+l0Ytsyuf3/pGizlyFdhn69/VoHrfgHyYjlGIJOWXB61MLLvnOK8EBrFnRUCObBTB1ab6zryv1+Y11u3F5fxfo+BLUZ39Geh43G44GIPyR530NxDkUPgtw6vMwCa40tF273O8/vzoLGVwuygiT2LsYstGMvYNMNVwRBbFoW6OHr6JRSuV5v4kbl0WTAE6eTut11MUPo4ip4Op2YpiSOfcXL4WfITNPfq26eIlNVa+UY8URxPuzCLVsDcgielGYulyfm6SQFKZleVu73hbZlnDZOp2mGFDidzzxfLriYWEvldl1Y14VaiyCiKv/roueQ9dFMvc7+TP4/rcHH5/RWQtBqeXjmx595i9rF6Ekx7cjbIemwwwD/WFwqxDAQq7HWzYHF6bPXTSGJS6DVSrkvYpDQZB11dqR70kLIigG7T05hXUHGQMcUKbXfx6ErFsdmsW/9J/J6KxU+srAim9oLkGOf29v7fWTjBlup123FxzzPTEmkS2kSRimmpAUGUoAFk2iY7ECRUGeyLUm4Zb/sn0QFlZSSR3IqgIOXPgVN7IStn+QneqNVR+sHhNBDiDqEtGdas0RXHmErhY/ffYtDh/8aqusD0xQOh9feKG9Fq8klvZcEeb+3e3+SMYQm5Wy9SQ+G7lFDpX04SEsQY4WUEiFGcJ7SHd1HLs/viGkmX++cz2e8d9zvN86Xz+i9cV9ugxkLQYwydIECwn6lGJnmidPpxPO7J56fn5lPM7ls3Jc767qQ14z0V3iN0boula1ovUAz2S20opbfiPvdYK9xlJJZ10ULf3VAI1IrNJf45tsXfvLTylc/+jE/+sFX/OI//0f+2R/9jNN8IqYTDsf5+R23j99BK5Qua1Ekto0Y3eHeaUwvDeebDKeWvxSGp2fmc+Czz5748KGybSvrKtLTEB2lbjrCQ/ba/X7nrHLEdd3wypYNyWcVZ1UfhUmxvdV6o6zCsD0/PbFtMkohpaSMLYOBtWte15XT6cS7d+94eXkZTLQxusa+LctCSsL6vb5caa3z7t0Tu6FOG6M0rDfF0ONaxRr+8u6JyXt+88tfYpLAdVtlDWft/9L9byzbW2ZF1sduRY+dwVR6kXNeWARjcoQNA2SYckgjB2gdYQOcuL+20ASgRcCfvWiG1gurJmLei8wxKaPto0pVSx5JaNBRNt6ZqmDvcTPIWJgZ683qI9p3tLdfwSXnVDFURSqbdIzHer0rs5hHzmHAiMXjEcNHLrQDx8c/BdDZi8q3clrn3JirJu6N5huwm8Q5h0o+O6021rLq2WJyRieOlAdwbwB5+nuODQ1F21GasWX2hW7tMF2BuI7rBwXJ4RzRi5frh4eWBYnvu4mOXY/lTMasdez37D2BNsew9yZnS9PcVGPpDmpbX7aY5Ymzq7n6Pp6F1oNqmcrcu4IXj+zjUA9U7VVvjZI7eVvp9U6v4vBoM1+D03E9ToBzMUfZFTJGevTeR24hCgCRJHrn8DEKuxYF0LBh4dajKSCbo1eJy7VkyraR1/0ae+90p+OymqgzfEhMej9iSKPPT+4hOiu5jzw3H9RCW9ilwADNeWKYmBS8kp+Hn/7kR/yzP/45rcPLx1euL1c+fPc1OS+E5AhuIpcV1i69ec7jamb2gc/fv+PzH36F8xO3tbFtsNxfWUvhMs98/uMvef/5V3zx5Vf45mhL4fZ65+XDleUmOe22rgLO5ZVSViGiaLQqsab5hAsTLjoI+xiAT73+wIo2C2XH/943ijFxhmyYBtxe3hLJkUBr0tsMxdF37Y/JuPc77Ss9J4XX1ysfP37Her/RchkRvuv7PQSyTxVt3on5wKFoOzIKR4ZBgomli+IkuRUJWLVWpkma9GtxI3A4p85ZKe2BKHi1BTZEX+Z02ZDkrj8b48Sk/UFpCoSY8OFE613RPLher6yLoPTrloWZUySN4crZR5LkvXQiig28JfYBvNNkrpMmSUhPJ5FtpDiNA7TWhdt1o4RAcZVeG+cpkfFgvee1k5Ln6elMCImn9kRSVO/15YXb63VYf3d9mt9bYW8SgLd/f3ymx+/ZtfuiiDr+nH3/W7kfMFyYnFPLZjzVVbWk135A+QFZz5pNOMSquGoQ9c7QvqbEg/pMaiHosWKsgjJV8ySN0cMx0TlBvziwos7WvpaHzj30AfZPfSY9fEN4NISx97QD+YjkAYdhs/5RrvLmAD9KIo/SY+uruVwuMndM2bPeRZ625m1IOGSqRRd4t2VoYse7N+RLn5YkfrKknRPnKVniclg7tzcIy9/Lb/AuEvwsxYkTKWCz54fZ1as5iCsKXHScMkPr/c5yvz9EuxQjISZh3TlKF92Qstl9LaWSUlSWbWeLrQfO7rskUeau6Ubvzi6tFrmYsD3iiDb5KNp6hMlyDqYkBhm1CisgMtvMNAXW7cbLy7fUuoKT3lh0jUisEoYgRMc8Tzw/n3l+fuJ8FoZo2zJ5K7juiCFxdM+SRFAs02O0vguZXQgoIxLGfKG8FaqarkiTu8gIbf07VykVQvLkcud6/ciX9Uv+yQ9/yD/8/a/42//41/zL/+bf8PT8Oa8fv+X5/ReUvNK2hV6zrne5p7WJYUgIkxZvOseqCevlnTC83sm62NZXQoTTLMzUtlVeX195er4Qp4ltW/HN8fR0pq531nUjxonnp3eU0rjfxQl2Umarlo0wnfE+EmNHjvMykspSpLE/pZmU0pA2mqGNJTvGsskg2sS2bQ99WIIr+RGnJaFVIKV3YTX6zqKvanw0Cm1dz6VW1nVlOl8GmGoJdAiBUmVIekxa5DQPGCsVRuEyz4l9Btw+DLr3viPbAxStimxrxlA6MSXmWUYMCeMjvV4SR4Uh8CgwWpv2nkrhIHEsDQbO+yhNRh5imjifZmIQNUBME0HdUZvG5t6aSDO1AKpVEn5xJFUTNXSWJW6f9Yq8v0+OvK7EELk8PXN7uVHLdjBxEQY958zLy4vsPSfJ9gDT3PH+ABSN6X7cu96lp71pHhGCOLCaQ6Q7RC7LRXp4BC+932WJj+fN/jKWztn54UwaameNjqCJUrDaXDiRLepnUFmcd5Dz3kNvI2S8nmnO2D39vUcVlMXbsaaGAseYyDaKdTnrrIA8AJejZ52H9zVgp1anctjO9XZT9019LvYeTow1bKB3H2vfzhP1JPBJP2Pcc+AQqIjRWCuZkjeN6TKXMXgpNIcrYm07iKA5U3darOlwcZEW2/nhxW/CicrGejd9V8VZyeJoWcVbwCmoP6WJHpVAccJINgUTQeZDxmgGVrpIkPO31EagjnXT1exn5Gi9D4CgO0fujft6g3oF54iXic9/8iX/5Mc/glL55d/8Dd/8wzd899v/m9fX31Lqgoue3rOMdcjiq5C8I3nHeZ54vsySh0xnPn//FR+fruRVxkA47zg9X7i8f+b8/nMu0xPRJ1rutAqtOvKSqaWQt4X7/ZX7/ZVtvbFeXymbgIo+ON1bN/xpl3x/6vUHVLSZK+C+0CXtFgpR/lJzsCOyqQvWHP3GgX9ElTpq4CEyk8ceLG0cr9oITGVdF67XK7fbjVY2fGPMITkm8p9iWkYg4PsmDCAopyUQ9r1SAzWq7/huUifH6+sr83ea6MaZ0RwbVMOdInGayRhKbrp6BqoDgiAK4qmNyU16rcS2N+B8INcqfTheDn2TInRQVEOTUaBTD5T/Y2FzlMbRPURJYM/nmfefPXM6TXSVMBpiXGsntwrzRPKRtTSRdeWCrxAmR/CdvGyUDrMWKMbohSBMnCQF98MB4UZheXxOn3odGaLjZzj2Xx2f45Ed+sfes/U9YfTG6NJlbpZ7lJAFlYQ62NFZ78ZiNv176w1pAnZjgKjvMh/KULIHyWE3+3qUMZOiDkVTATF3UZT72ItwZMyO98T6vj61H46fya7DCg7YZYfHQs2+31A/ew9D7a14G4ho66MZHpM2dy3YtGizZurQC66L258Vbca0CWDi1ArZUdWcJcRIcBMoM4UWZFXRZBwil9O29+b6MCExW/9dCmzJayWvK9frleV2U/twRa97x8f9/hhr0XsfDJvFDpHqBimiigAyhgzbnpSBxnodaHxRAEeS5Ai4YThk1xdwiroKIrpVmOOE84Fl3cQ1zXu+/fobWSsRGY6dFxwV76XHVtau2M97Hwmu6sEsw6U/fvyO6+2VWjq35T7iQc67257Jy6XIl/Ur7oSCXrcm7KGDMSBdbKKt53ikQpIgOCeutJp8OQIfvvuOy+W3/PRnP+Of//yP+Ku//it+8rOf8/79V7y+vJLSieend9yQ3siujn7JR0qWRFJMQZD4ulN9ItfyCpS4RveNZXnh6fJegcdKbbCsK6lH4iyJ2LJunOPMVlfut40cxQlxnmfW7U5KJ5z3XK93So3UFrWnz0kvcJC5b6+3qxQk7Hv5+E/vfeyt1poUkE9PyuRuY/3dbnedE3ge+1zONkkmDTSwvWlxoNbK9XodjEXeMi/Xr7lNr9SyJ/MGRNRS9ZmrZXY/mogdbIjfxIxju4MxszvjHBVUEbBCDBSsV8YAV+2tVKmxQ/aHsHACxgQXaL7TahHprXcqE4vklmkl0ww0CZ4UTgo+6T6tlewKy9bUXCvQnHYyd0QOjAA3xkw67TnKNVNr1168pMoW+OLLrwgu8O3XX7OsH+SeKdBEPzBmrtHcnvN4dhm7xOC9fwh2x17vHZ1DvnQAup3e6zDGDz0WfMfXWxUJWjgKqGYM1vfPakAKSSSuhlHIH4vOPt5Z5s7ujFRDwCnhs4XBFqbLYMo3QOQnzjLvdjmlfNmgdfs+KzZ3xcJOJBggqYYqbl+jp9Np/L7aBKT2WqT11sy/Sc6TuIN1cq06L1Xfy3roYory/fNMp0oP7v2KZB798I+MJMltV7+4GIb6YppnYY9NuaJAvo1Xwu5Fh67AzrIsLMudta2kkJhTYoqJ6JN+TgEgnA80HLVm2kF5M6nxiqw9xn72vWoryX5PwdQXfozfck5Gh4SuEmwauWaojdA73/32N/zZ9X/n+l3h9vXGr3/5D7y8/pbSMjFL/3SvWeYt94YLmTR5fF1Yrx9Zbnd8v3BKFy7piQ810F2h5MJyu/PBvzCdPseFC8lDmCZcj0Qibm4EHPTK56a66RXWTK+FnDe++fZrfvfNb+lT4t30Jb/+1d/y+15/QEXb48setL26Wh/T3wQB2+h+T/qMqpWvuRFsLEDLxtLkSyn+3gXBKT2zrCv3RQ4q1x6ZNbu2twzb43WjCeDeG2SL89gcO9g5ugRo/Wy165DPGhQtMrZCEoDeOttWmUslndVV0Xd8F8lG000+pUkQ8mKHaFPHNUfRIanbllXu7sl6+EY9eGKMeoDuphFiFHBG8m33vUBl96VWOQhbFRTxfD5zmmegDemMfJ+wBsU1/DxDaKTc6ATKVokucJrOTOezjHdQ22u7r9uW6W0/7MehMpCYTxdUv2/NHaV9b4u2fcDzY/Lz+wp5W7lmEhGCNIRLT5YcQh3/5ue6MkU7S1WqzK8LXr6GAhpm04vKalsXkxh5X4bctXeh/FsXNMrGawz0ar8DSIOvmiwok2ejL6zgs3X6cF8OBZt9nrfMmwEVx3tre+EIYtjPe+8FFVfNvyUzTZnI4KL2N8i1g6MYG35I3qhiFGAHT7D+E0sKuyF4juCTMDnap2RyQns2sublvld2EMfRJQZ5M8/wwC7nccFRtsL1dhWGzTmV3TCQZUtOR1Gtv/OtiYvcH5FLVnWwMxmbDAA3kMX2iaxbQfL8sIvfn7od/o4UExR9fgoKnOYzHcf9fufp6YL3gW+//Zbz8zOtFZblhnMNc6aNXuzXd1mYOEHWUliWzv0+7iRbrsIq6BDukndXS1EJcLiffXy23osme9Kv01qT9K53ekWRajU00fXRFEp23pNLZprEkOl+u3K/vfLF+/c8nU/8zX/4j/yrf/OnTNOJJS+cL08s9xfW9SYmLa4Sg5g1Ze31idFpkts0k2vULm6C5pLZnJpeuUxMnnfpmetNhoznsvHZ/Bnn05lWMo7OPJ1pbaHUQurS4xaSY9tWnBcnxtKkYPXIoGAr0OlO14xjWzO1NpL2cLoDmLiu65Acm+X2NM2jALOv5Zzl97qgLpJdARZwseED5FwV2Dwwza2xbUVBl8jp6Unk707HSfRK3laW5c7lIgzctm170fCJ+G0zEIMaLD3kBM5irhWQksDJcwg4l/BBwBgDF3wIlJaHIkfA0UIIIrneWh5nS2uNLW+02klBWLWASLJaN6t3kUaWChFjnQLT5M00fRTKb1UGtl+2bRP2PwSSd7ggA8drreoeGEhp5rPPv+B+u7JtLzK+qMnOiini3d6nP9wje6MvUjSKGVjYi9zRerGfPbhdzu4d+HlSUEdHiGiOpjymnFt6Pok83YyFDgyeFmCf6mV+++pNRE52f0Lov6c4FDWKAVvWG2iulfY7jBFqb3KzAf773cVRPrPEED/6gSXPMDbb2Wd1auaFMcBGLDQFynSWrq1VaV0fsT/E+LAvrSpqKu/1VtTCMAjzXph8M3eiNBrSO+08uFZ3K/ouK0+RTRrSmylMmvRqppTEzTim4YnQescpwEvrMgqgdlrOMkpo26RfLWdy3ugRXGtQKsVlAlp0hkCcZuIk6itPhCYGJb05WjUKVo3n5LAmeKdqo4P/gFEHVbwPZDSPmXoEgovE5Ki9MF9mvnh+R3658rtvbpSb4/W3Gy+/+y23+o3EZFXE0Cq9bCRXyL0Tm4KSH77jY/4Fvf+WngP3lyuhwjQlzjHRa+Pluw+k9I7uE6enZ05TFBmwFu6tSesBzoEP0D2BiGtQubFVaD7w/suv+OoHX/J//m//6/f2gr3+YIq2gX2MjSh0tyTQmliAHtr1sMk1XHQ0HNqQbXkPoX3bKNY6VQOBJZSSaFV931Iy23KnbIskeyq/cIqm7Jf56YLNHTYXnaH5dk7Zt2YOjV3ZQR3YaRvfVZqD6jtq2E+tEGuDKkG8OdjKxrKtpHpRhFmvqXVKtuZVYytEgxxDxEVJflopGoC0KdVFgvOEqAGgyf2MPjGfEtMs7lkOQR5LlgPezA9w3mK1RFoxmJV+Adn9ci8clNwoNWtSDK1XSitMXZqM319OpOcnXl9vuDgzP73DxUS5ZzHgqKoPb1UHLYoz2LYutF7wwSyYx5PZ/3AmY3BDYiFOTnLdYoAghf6eRFvQloRM6hZbdx2TLfTOw7q0AlxK3c6mxiC5FGIQm2Q1hZSBqV6lDh2cb3TrZa+O4r0k0R0cMnMs2ODuLkxxbo3gugy/1ATEaSHmXZB+Nd1TpdZx61sAACAASURBVDZd200TIBnSbkUZve/3rTtNnjXxiTpcG+tteOyNEzbKDlfbe3Zw79vc7lnvhSG1QuRHItGIOm5j2vuvjDXoKm+0vaibbJrEkKJbQY3D+YnewyhMxCFNEOeqjfZBE5fdNEV7AGw+jjpbmGuaUwReZEkKEMjO0iRB95UeNNu68PLxhXVZqKWQkvSlzSnhFRypreFiHHNnct7AwTyLa6QYNkBvYspirmMh2Ew5aE33oxVszsCeIMyB94e5bBL/RGLZcUSci3TfcD0CkSkG5pQo2w3nVmI84XxjKytfnr+ilY2yrZIMNCmMqva25SYza+SeZ5G65FXjkiSN4/lVKbhzLnifJNb6oG6pQeOJfpbQwAWIKmNrjZ4FkMBLoYA3h1GnCaY0OPTuiMFpUeT54sv3dBe4329cnp750U9+yt/+8tf8V9dveH9OtDVwz4E4P/PyesX1TgB62YhemEOR53nMETd4QaZDiMQgUqPgHXNKKuGBl/tK6YEWHNU18rJx+3ijL4XT6cSGxLAwTUzRs20LJWdOp4ktd/omIx9aBR8VzOsAKqXCMyVxqvVd2IJsMkbdF9450jxRswB4IUi/3v1239mpWocT6e70l0ch93r9QPddJPzO0UplWa+kFHl+ehrybO+EBehlxQGlZaqTc6KWgquFRCe4Aj2DKzJnNKSD8kP6cLbNpMJJwL2Crl2nSgPpJfbKRtNNTSPx1jkDs6oFLTrSs2hxs9eGj50YA6d4pvdGLpk0Sw92ocgMqLLSe9XWAKfFquQpJRdq2aDvEvDTHPHBy3MzRtZMqhSUsv6/KptdYmDwrK1R8kZwfqz12itVe8RwO7smwJ2M2KitMk+BlCYxE2tZ48g+YiTo4OcYk8z3036lEOS9RBoqny/FROg2a83YLoEfQzjkSC7Qexznn/XpjuJY2VabJ2g5lNc8y+EgHF2ZDSD7BGCqhQmYcY0aWTWRztsM0lYbxTlataLuUSJ5LBxNpcBBYrvPK9Nn4KQwleArLTF4hx4LApKP9MM+m6ikRk6q/zhJeCUvdE7kn76LKYmeta03yUsVCDByI3hVTTm399d1wEd6ExduelOfBU93iTAnaZOJUZ69qq9MCg0CNnqnLTvbxqZjhLZF4r6oZhQoD47aAZ+IaR6jTIL3UqjFcDDI89QeqE2UQ6UVoo+Mu6/Al9OzuVu/p3pVNP2d0k7SQYtln8STIncxIbqcz4Tm+fjbD2xrodwb1+/u5HolTQEfBfippZK3wtYK1TVKd2xNyuFYHXXNvF6v5HuhbJlaV3IrpAkunz1T88aHX/8dpWx88eOf4p/FbI7uiS7tPhFDyusGkPy6VL75cOV634jzPJQRv+/1h1G0OTSCySZzWtVbVBNrZ6fIrRsoB/SRdDSjTzUxlQRnR/SrultJ46QWYs6kI43apW9r21ZKXvFdNos1x45S0orDTzAJR+RofH0wEKKnzps65YxgIdiC7+B9o3i1Vg2OtRY+Xm8EF/ni6R2nOJO9o7imtuKFkjMhJUEtkSbX1hwQCH4ixhO9F0rW69BA4dhRJdn4YivrVc5jcivrJwLUbaew5CKNwXZAOEX3uxYCVXo7zDCgFri6K845pln62FqTBLg2maU2O6G6y7oyTzOfvXsiBc+Gp0ZPc4EwRciVGKA2GXAsdYI05JaatRAtepA6nJnL2JIJFnL1CGhatKHMqBkIOCu0D7ICrwCCs6LjEVV7WIBjIYiG23kZY1CrsFaDXbIm5VpxVQXC3hG9Z46BmAIhOEGpdB6OSKsCuQrjoJwKeC/DhPveBhhQtFLR3dHz0bQgwZqCxfWr89Yx0uSi/uHzddilgrqQrdaTxNDjNWku24b3xlwpkBADvgdFEmU94zxpmqW3RhFAr8Wq9J/JgeQQxFqkafIknTKIuF0qbYCOIck7qMJobBfQZHfPDMog925SWTf28S4hQg/mqA6lbUhLnQuKdsrvWreN+/XK7foq7ocxUBSierpcABT8qGr6YzJoG0or4xVk0LT0VwjiLs8wREdKZtxRlFWww3wHtITB8tpD6kbcAnTwtiZOzkwNZLjzPM0EH7itL8zJcT55Xl6+pdO5nE/kbZOkogjbI0y+9NbWJix+IODUFr1h91MS5jFLCUGAvRbDdJOiuuEWV6vY6wu7K0lUimHYbROkvyK3RtE+HJs96FW2KmiUDPuu5cZvfrMS4gTe8dnnX/HlD37Er3/9G37x1/8X//q/+++5XwMv106cnzg/f0HZbgQykcocIM0Jm5327vmJy1lmucXBdpfhPBxaGRLVUjZupeLCzPnpjGuOcs9sOUN2zO/PuOAgQKFRqNKj0jMpeFI404og8TFJQBJ2purszRWv5i/zNFFb4fV6Z5pnzs9P0KHkDN0xp5MUy67jEMtqMTrRUQF5w7nd2TDGxP1edERAJNcsTpG69y6nmRQTvRRK2YgxcZ4TpTluy0ZMkfkyj0mVp2niaZrwrdK3Bd+lSd95N0CRWhqhQ/CJ3DKlAi4yTWd8mlmzMFGAAI9B1kX06QC0CtizgyoWSzvmwOdixLs4QMF1y5gCQ/ZOw4VIClF791ac67is1+v9KMKqSRVx0lNeOs5H5vnMNEVO84UO5C2zbcJQ1iIsER2mmGiuaTwTpD7GMIYVP80ntvWuoMTO3ARnoKuAjHsuIn0zkjjuZ5bDCfuk7QoxRZovasggjH/X3MlURxLrVLroJLG3XKfouehx9JGr7V1wLlrcsX4qY08fe3M/1ZIAPDBnu6yygypwTJUkAI8nqeGUAKKVFCeIYfTrdVNnKKDRQaXuTsfb6OdR05hmZ4NdT99j17GQbK0N+aswgeh6hm6fwT7XfptG3tudGwW/PF9pnzkylvtDFNa/9TZaJsQgJZDmhOkt7ezt/iTFlYJM3km/nevaptFVmVEat/uLzqJcKdoTm0tWcFkl/a1Ruxj9+DjTw0RzgToMSxhnUnBOADcvQOzIN7yj67/3NmjJ4fjpnTqDWk6uZ0TrO2vaSsEy9uDFdOr+KgO885bJ60bpd3pYaVX7er0UnD04WnSAp3pPxXOvDm4Zv11F4pgKvWdaF8b1+voqDrU4cnnlQ9mkOC9qqOUjxSeK5jcOiQ3BB5KboMK2VZn52B15q1xf7/xjrz+Mom28LNHYC6MhVdOvmKxn9IO19vB9wFjQlnC9taOHHf2Qrxdy3ViXhevrleV2F9eigZ5/LxXX99j/ZiQVb/7+GHisr+h7EjpNjmXyuxaRrXFfFpkV9HKjfr7xgy++xJ/Ouln2oCEMVtehhVYQOrxPBH+i+0IPTQoYZIG6Jmm9DT81C+a3Qccm1Zv22Q+NtVMpliRBw4Go5BFUfQ+Y/WuulduyspasMiKdBdIr7z9/z7v37yh541u1xF7u8gyqD7goTl7RBwoFnAwL1k+vxfY2DoF9xIPcy5F8y+RCQgxcTuIOd3+90g42y/b8rNh/++97Iu8f1tynZIEAHkV+cn74fnu9HTAqfRCKnKU4no8V0LbO7H63Lr08IU5jfQ6Awa6p9gcU9ij/MEbwuKYf9fPH0RQ7kmms8/G+HV92zzpd7HoNeVTQo6jBgUlDQhBZxnyaH4xIHp6JrvUQJA4Yun2UW5rBz/GaHt0U+8PfD0dQPZx3w4rH3p9jbJGkxYvLY1f5tCK5Kew9EFvOXF9fuL6+jusyJH2apofD1+75uq76vfKMWkfZbLunJvFu4BoxpiGldIpOH0Etk5LV2kTudXiOtsf3dacFsI9aRDum+YT3gVqaGMBMJ373n/+BFGdO81kUCQME0aGm7OvM0OOOo6spgs0VE8mLWMHLMxOE16SScn8qsBuyGEBn8+mMNaxNEGjn3F7sD8DOjCikQ6bVTtOeuBAn1jVT2m+I6cKXX/2In/z4R/zVX/w5P/v5z0d/R+uN0w9+yHp/IfTMefKcUyBOkRADy/0uxg2tsy6rmAG0NiSHrVbOOvNnaw0fIpPzrNvGaX4ivvNc25VeOrdtpV4dYfLMUZx70zThqxN0u8NlMkaj68DkMNyNt21lniZK2bgvC753tRUXZn7bNmgijerNkr/dlU6a7k/Utsf8vbdsB0Is2MQ46R6DXvfB7iVnmXWUpKidp0nmMXUD+jQhMyCoibTQ3B9bRxNxLUSQAdnzlNi2TPCOpAz8tq243rlczjgn5808JYFklaGQM9gKGYW1tLi32Y/WV9+76tewdddG0er97hKLxtJp3ll668mlyveatLmWSm1ikLMsd2HeTmdSSpzPT/TetT9oZV1X6aEtlZJlfE2MiRBOuC49MinNwqBbQaVxyRi7UiveR+Z5wvpCDWzbiwk5a5yHVgsUkfCWvnGvlXuIwxF0OM5aEaH3RWSURxdaLai8H7mAGcbIte5n2lu5/Oi91vd4GO1wOAvG79WYLDC1/17s/5REv1bpf05BDB/2/vo6hkJXZOSRKDDk3PIK2vp+ZP668oiWc7zJ7fT1tu3ibTZ5zA2OeYc559pneXsuj3t9yNmOn9s5xGwumJmJOJLmHsHFUaw1NeAKXlsQSmVbZZ0u9xdVdohzqvRtRxo72OqCZwqB0+VCjDMlF67XG9KbHQne6TzBIM66XkcHBBkjYMXrHveVBZUbTzoWwod/3t7vRh8Ag8yV3N3F7XPhJD7VXMlbpVvOFaSXvauSD2+zNgt1u40B6JjCpjWWZaV/+5EQ1b9g2bivKy9f/4YQEtM0M08z0ySArEmQ0zThwhPbuvH68WuW60fuy4377Tu++d0/fHIN2esPpmjrikY0Or6ppGygN3txIsMsHwu6/T36wz+wJ9xvk+mHar3X0QRtA3dbq3vBpsFNf/Lhuo8b5vieb+n2t4n/8XqrHgTVrs/JZ22IQcfSHR9fX5jTxMmLgYIdYqOvSA+irvI6OrQKtSKuRU3kO6KAFho5BE9UZ6BSC61LY6Xok8tIAkEQohiEWTDWstUymBKTfUrMUKmWonsyI0oawHt35NI0ca9075jmC6fLszRlXhfu333Hum7ymZzQ+qLb7wQiPhRh1TSo2mFnCJsFu/G8VCLmOnQnB9v5LNbVKQgTVGtnq5Wsm/v3FWPHoPK4fvtD4HyQbsAnf+b4c2Ktm0ZhFpIekjHoIF/5jF0T8aY2w4OpsCCHoHZNJRvWsHs86Jxz4/c8gh6MYuE4e/AISJghCE5mNI1G5kMh+L0CR0GJ3q1IkMci5hqzMkyTyp8egY63hbT9t3ee7vffcSy47PvfFp87CvvozHj8uiVc9rO/7xAGKaYeWXNB/npTM6OXV5ZlAdBhx+Vwr/sDEGLrSoYe18GeWbEGTpOaOqyZY4qH9Xh02bPntu8B+x3Hz/X4fBhr14dAb47gZWhqzlVGdDy9Y10qy1L48Y9+TGuQtSDBSZzc0V8zY5H5PrV4WvfUruGpcbCsP+yzoWJQ57+xrvcC07tA8OFh7R6f40jADs/bijYnmaWwpL4PA4hSNl5eP/Du3Wf8+Cc/5ld//3f8xV//Nf/jv/tTcutsOfN0nrlfT2yvH+h14X5baDdxYCu1si0r3nsu5xMlFzWGCmoCJUnlNE30bVOH386yrlyXzHS6MF9mrq83GmJOMjExn2acC2zrInMCfaDlwu1+k/mDPmFzzEKUHrFSRDa+NZlLFRUsCCEQYlRWTO7Xuq40G8gdPFsp9C6yydb389UMbgwYs7EB27Y9xrrudFad9LgJCh1Y10ynilxUB39bMbiuG7lUlWhHYXuL9Do2/d2jx7NWXOi0srHdryy3V6bzheCFVYwmpewNmsrkgJptnqrFi0pvInvrQYCvViS2Si+NONrZ+kra83O0G7de05Izt/vCPM8KSLqx4szd1UnDzXANHvPY8NTYRjEYY+R89jKIuck8znVb1AVQ2LPeO1stXLmzLKuoJfoObI/4hzBz8zzTcdQsUkszHBkGSLZPMfOiDn03lpH9UYS99bvJyzRN4+yi7TO+yohFNgZIYzC7vf4x6bb3szPpGJ/t/h/PhE8m7n3fy8eYYa+HnxdSbkg+eweMcdIKrTtPb7pPHjwNBAwWGXwf5y+0B3dKeQZ6ToEafjlt0GvEaKBdY/dMcDtl0dmv91DIHs/Wtz2Bb89fe5VcH++Hq4RJAbXuVeUgQNiWM8t9Eat+A5ys8LYh7KaGCzIvcD6dOJ/PTPNETBG6Z72v0HTGbRVpf84NFu2p9YEQxezEOTfY45QmyU+VjZKHx0i/j2CqxYTj5816JgcdtZFzVqfjOnJjMVfxUCTPrbUIaKl5irmORj0vnfdEF4VNHNJUTwgzsKk7twCj1a/UslJuLyPPEZl8Yp7P0i8YIqfTmdPpHTk3Pr58YF1e2JYbuEZKBwb1E68/mKLNgp0zpAAUvXCKjO0U9j+WDO+b2fpl+vc3+OHnBBFfuS6vwu5s2egADMHZS5f99TY5PL6O1/T2z7fJvCQReu212k7HNm5Qrfp9WXi933Cns2hwa2OesiIHgnR4L4VVDIGsB0TwHhelb0JbPGVzxCCSvZJZ141lvR2u31h6a+aX4LJtamigUki7T3s/hbpXcugn0uJjIN+atJXayE16Bz5+vPJ6u0vP1WaN7HKNIUau28p3H+5sW+dyupBmaE1o6B72IcRSp4iMTm6rJsj2oUDnv3lOJ3Fj67Vwb4WtLFpUPA6X3ot/K+GVJVCXo30MxVEue5w0oyzqmyLeXtYH41UCAIKEnc8XppMEdrw2kNusAQMrnL2fe1jbpu139pY690T6JITx2XQGU4gRqkidam/iduR2B7GjDfJbFPPtPjjuw2Phk8um7yPryTun9uJSFJgxCHR229/9fd4alIjsRKLv26Lr+BwsibDPYQmDfd/bIu8tivl2nz7seSd9HiZHsWJg2za2ZdEZj8sDYCRFqjxTmZklcxBPpxO9d+5qUDLPs/RKtargiRZ5uZKzSjqDVwZDikcBBm0g7n7/7Bn4kB4+3zEhOxaRAD2IQc58PoPzLOvK0/OFebrw9//wa+Z05svPf8h6W7m+vrIti/YH9mF73UYDfh/AW8dR256IucGudQV0dJ3XQu+7wU3X02+fmaXX3xq9l8Fg2mF9XCv2OYVpVBa0Zul5QgY/JxfoPlBzJkbHZ59/xs//+X/BX/zlX7NumR/9+Mf85je/4fV6pa2buCkuV3rNNKQfN8YoDIY6FKYpaEK8O4K+3l614FS3xNqZemfJhY078XImXGb6VtleV8zp8/LuhJvEBCGETnSevK5igDJPak8vhQC0sZ7WZZFibhb2OmtvzzzPeBzLsnBf7jx/+azfv7O8rZncT4qwEIV5atqPWJuMVgjRy5iKWlmXBdfh6flMDIF1XVnVcdQ5x/16Zz49q4Re5+sNkMbjtf8PHK10XHOqJIDkpe9zud1GjO514+U88947ptMJF6DkBboMspaeKU8plbyK+ZUMAbOkvoMOTzbWHpUhyniMfczGEdCxOGAMmvRCm8xrB1+mSZicUist73nJNM08PT1JnC0ybkGKXz9YLSvC5kniZC7SP960QN9K5eXjC9/87htlThs+CrsnMn+Y0qSsoNOh2Vk48ejHLCwDnLyeE711aEWs4aP0MpvMSHrARU5to08svk0pDXMRf4g/D7H8GFM1x6gyyBUzuzK31cGyWHzSRFrknWoWoqzYkDg6K8psHhoPYxTkexV8PICvTiWtw3m3N7wy+SI1bcObwAp+G6/jnAFFTvvT97PqkRHUnCgEtGlBchSdndn0jLLPo+8AD//Nw3u/jXNvVWcWNwcLLhm29JZuBVMwGOC2F946NmgSYLt2Ybvocp3eO5nZFrVfN+5yfOn9F5f09+/fCzDZBICzFpZeC6XBmjNdlSX2PKc0CWASwj7LLoWxXgdo+6ZgHXuSKLPOkNEHRwUWvasHwj5mRIpSBrHgvNcxJo3sA1EZNOe/z+zJvRYGe5xNQfZRShBSlHaovHG93Xj9+OHg9JxwPumZWMcc0Zh2SfLve/1hFG26mF3fkSLbJAfFpCblPCzKY+IFe1JXq8nkeFjQR/rYNiBOCqZtXam5aG7+/chzvBS57D1R/BSD9paB+9TXJHl5TDadFh1C4wZ8irpxFNU6vG/r1kvS95+TG0jwjtNJ2LXWoJVGyYVSV9ZV+r+cHlBTmvQ97d6ZfGJHvlE2tGug7b2PYHpkLlJIg40REEvRi27GF4JmTpPMfrrfFmHcYmCikYLMnqu16IBpmKdJDVu6zrepGiy9oiUm0WgP62FneRT9c04OWUVa6Q0ZmrpQ1KDmuL7cQ+Gq8lPNQ+29j8/4KMPovY9ezONaAXmuVggN+REyEDsFT5omYppYlruaowiTI8XwYa1rQuxdOFyP06J7Txi7BiM7xESb3/aZN58Igsd1a893SFic9I18Srph/30s/OSfqL1ugRRFNuVDHIjaESU+IqzH/bWjtm7Y9B+LouOfQ0La9j6JT+3btwfA8fWwL998rTlF5hVkaaVwv995ffnItq4jOc8lq7RpL4ABTqfTYOAeB7uiiV+lFrX3LzrnqTdFueMo7OSw1CSkysF5RJmsp88OFruGgUge5G8AReVp03ymNRC7cTGp+PDhI+f5HefzEx++lV6HVstIDKSAlYK6ez9m/vkQ1cLcaXIm9s+WnNoekZve8B5a26W80u9y3G8S8/onMsTjejgOzu0aQ30IzPNESpFpPjFNJ2nY91F6ful8+YMf4v/Df+LP//zP+Xd/+qcCntWm+yziwiSshbQBynVqsllLJer+Llqg5FJZcqXklafTiSkEKJXoHNF1Xu+vpOg5Pz1zZ8Ul+QzX65U0RU5PZ1ovyPRoib9l3ViXlTQHoo/0vlt4R5XTxRRU/aCDroMmNLUyTTPv3r1jCpMqTDYFDcSwwnkpagQ8aAo6pF1CnPZ9e7lcOJ9OLLc79/udKSUulzP3O9xuVy7nM5fLhXUTFYvMJWvKFnpqK6okUBCqN1zT3lOV9pWSlaFD2Mya+fjhW0orPL37jKenZwE01egipVkALj3Log80qlj4B3MJ3AEAFLQrZRuJVSllDBm3OGEywV0aqCz9AEEk1q7reog1+7rctm18TYAr7Yvt4sbZ1RzKeR334+B0kj44IUUaa7rx7fI7rtcr27oxJU3GmzC/A+hoBbITsUmTpune/TC7wIFT5YaccRXBHaSHTz6bPGUZu2HnQCWXPpjHZZFC2XsvvaW299yhhUXvVdfkWWLdXjx1NJ/wDOBXjDzUuEEoK43/8v9e++rwnjhk4WHs/6PqxQrv2vtDDvW9+K4Os15VSWb1uJ81kRA7sA5A2DlVv2h+Amgx+Cj5s5e17ogS5tG12vINex2BKHsd89jfV9RZj+BRNSPPRaSNvR/+3v50qsKJkuQI0xRHfuC8J83TyAV2YSg4dYgoGh8mdX2WZ1epNVF1Zmrp0PU5VVWX9N7GObf2dTCcPvpRGIpDcvxkC4Vdvz13cbzdHs4DW5f2snPBZlTmrC0MmuOIbMAJW+52tY7dc9vb5hAavKeWxrpmZqTtKJwSp6qKgpzH+u1t0xxF4lrwAUKk5t+DeujrD6No0wdnW3Fv6FQLY6fWtSNx3hOco6vVkU3bEZAdbR8o/aFirrUM96WYklqQavNv1yqyq+wORF7n9kr/+5X3PuD2U5rbt4j98e88exARlEcWUHaOKUTpDXKoy08Spx9FKLpzcoVZGYZaKXUhlxvLfcV1JwVbqSpTEDtV7+0+WUPwo5T0+6g8e2LcGj4F5um8D04+MEsjIdZ/z0XmebTeeP/+c+b5RFb9sE67YgbctlK2hUbDJ8/lNDNNjt6kt6a0O9tV5oLcHSNRgX74dz08Qa9J3JW8c8xpIjhpvs5FHPpCDOLWyB7k9rkhb/u23PeDxaFYO5rSeMQe8rgOBjqp8hJAA5EgLDGJ1bRTZooBdFmRgBZxfi+exyG1F48O6LVR+2Nzt8nHjhLJYzA77i8ruo6FkPd+SICOe2Cs26MTmBcXN5M4yb1RZk2Tejm40UOChyLibWE7ZFr5MOH8sIds3x2Zl3meH9he+6zHPTpkxrU+IOp2UBzBodakL7L2iqPidN29vHzgfr0d7p3EkSklYtI5W6v0q1wuF87nMznnwbodi8fehd11zlNKoxRxZJymqFKtqAiglujWf2NJb93vXYzyPFJKDyyyrfEj4CIdLZ75dCZNEx8/vop5UIyseeN+u/GjH/yU2/VFLJ5bEYRZbbD3+CA9td05Kk0ONKeObuFReuucI01R15EUdcHvJjCyr+Jw0bS4HqPK79wuibTPdDxYbf2mJPODWitMKap1vRYGpbHkG19//Rumy4UpzfzxH/8xf/VXf82f/Mmf8NWXX1LWlbLB6XxhBfK2QC/4ustaHeL66X3g+XIZw4ibJuEhOnIu0MRxUGYKdWKUfjWXItOc6NmxLuK0+d13H7jUJ95/9kSrUshF75nPF+7LKkWGAmeSQC989u6Z5+d30Du365VpmjifLqx5o+TCtqycTifut4V7u+r6RuV7kkAVZXljjA+SQAMZLBnN9c6y3piniY48h9d14XQSxjjEBFSutxutB1KK1JKZYlD1jPy+ZbnTWhH2smR6sb7YRqkbwyXVO5ZVWGkfijBVpVBr5t2799IW0GS49+k0Uau4K7fWiMHpYHaR07Uuag7o1F6Youy3LefB+tsZaDPtRo/asVfeR3orbKUxTVGkxMoimJuo9wHfFGAN+2gBi0fCerQBxFV1gZ7miXlK2v8HLjjC+cxtngSgUWfL5qIWe/7hXBHn2CJ7JQQ2NNPqauFOE0bJqQzW99GK4rUP1p73sbiwlgTvPR4x12mt4WJQubuaXWku5724CKK99cde3sfcbR/8/qk4f1RBSByICmY+Fl9HAM/+PsYoRSqPoPrxDNv7Ax0B6YVvTc3rlHGiNVEi2M8jhj1SeCsI15CcTQsxY4vQUqeonNP7MIofHzwx7jlXq1WKoQM4ucfElzvcFQAAIABJREFUx1425yz+2n0K9qTHdR7X7VuAuVUBmfqhb7HUNuahSuEE5Kygm7OHIrGDvTd2DKn3NjJIe1ODsmbdq0lJgFlJmgMY30fRtdJaGWfksix7bnU4x+2zeOdIMSro/1isGottEthZf17u3d66IY7Dqv7QGXtBky67z97bPNSG97uksTvoeNYsCj5xYs2kNIlD+yR5nXeO4Jq6AhcZz+DANYdr+zP71OsPo2jDKvXDayBB0otkjNQx8bSNfpTRvC2U3qLjn2K/xG6vS0/G/8Pcm/TIliXpYZ+d4d7r7hHxhqwpu6olqtkTFwQL6gYaBLTiH9CSggCtBPAH6A9IW620kSCBO2nFBQEttRQgcMsNCQIsVWVnVWVlVndVZeZ7EeF+hzOYFmZ27nF/kdmtXXki8DIiPPxO59j0ffYZA3vbOjQoljPsP+vDQP6Fa7qprrxUxW+b8facICdQtQpYvDSoD5czjuxAk85B0gCtsI0t2NWmzucnzOdH5FwbmuFIKihcCwprYzYAZlWV6wyDDbQE9iSAnMMQRwxxACm0awaY9Bo9rNFYBusWpRNs64Lz8xOOxxPAMi8FYJxO96jBI5cMlxKQizoa3g0DJNEMzsPXgsIT1m3B0+MjAKWqsCRnmgEAcDpg2el1i1mJIYJgc9eUjng8giGB0mWepaqoCFRQ4RUxorYyzGi6G2MJ/Z32EBmyoM/eKGh9wu6c08CBWtWqVqGbmdqi/JwV7GQwiVGXpN03Pr8di/S4zgntKCs/35zOlhMKWAIK7xtFw72wP3pnYWuhdCj2/nz8/iz6a9Peqzb7pnPkosa+Q+lZe1vs73skrHfW9rvb91pfKvM+36x39B8gdi8UUPokplUUb+5HLUrbCqIid34WenUppTmNlDOgCGOPplkSySzV+ForDocDnFMOfhaVQam4Wt+gIK3ee8RB7mfR6+zPlbBX9Q11LYVlRlSXCJswSm8zSddSHA549fotUinYcsbD4Q6n0xG//ptfwXnC8TTCOeByfgZnUR00qhLYene7/o7GJCiWk6nsPMC6B4uCaJbwhXho9DJxzBG1cuuhsv9nlv1kqGofAPfOnNWWOO/BW5VgkLdWsPIxiER+SZIUDQ7f/8738Muf/wK/+PTn+PGP/wmGYcAyz2DnQSFivZxR8gZCbsOivdMkO0lPxzROalMdQhiQIYOYi6KELjg4ZgzOY2PGdjnjdHqFYRrBpWLdKuA9LpcZzhHu7o44HO5kzAI5jKPDvC7gWjQJrRjHoT1TEbk4oNSK8/kiRb/gASJdewWopRUCwBB0xinqUqHJuNFnWZEhSbRkvzhc5gtK3jDGCFPznefUFG+ftw15yyAXsC6EdT0D0Op6TXh8eo9xFJXLUjNADKaMyjKztLDHFEYwKtZNkLpxHMEsFDsqBZfLBZd5QSmM4/GEcZzw/ukdapUxIiLbv2FLW0McrChm9kW+xF4Uvp6t2hcv7ee2lyx47OMLK1RZMp1zVmXggKD9h4ZUGcOFCIhxwDSNcDSITyZgSyvqIujIGAcQi6AJuCDGIONgFPEXF8EYGsq8IZCHjzrYuEJ8Gwmitm5bQ8As+TcBIRtqHzRgr1WSa4uJANJiaFdArHqfEgG6JyUZFEVh0wiQ8M6KaK7dA0MdpVvkOrG6TVps78u/e4HVZsPZWCPAikD7s7bYTtrNxB/to6E0/ivXvq0vYvfnIIUXD5vJasXjHgnbE0P5iyu7TdcIjh1LBC8+ZG3Zdfb2zY4t57ffK+ufI2OGQKfDsrSnVAi6a/ug9wmSrOr5r3o9kHmw0lMdRIzJC1VfUKMCT9LvZj6pWvxA5qudTJezxNMBhIDRYmS9xzlPMGplSvtXX2y9Kp5DGCwxRrDmCJYICktNw0RGi1eN3QLI71wIGEPAMErPHVjm3OZcrtgqFmvKrFBBkU21WyM1lCLI42U57/GiU/Q8AFyT7jndR12R4Ztevx9JW3PYaBREAhqEDSsB6u9uN7C9rn+OLni5rjLcqkkaHMoWvKB/mNweMEgaUNFO60Ma2W3Vpl1i994+4XvpvfbqN3ByhKfzM9JWcHfacHf/CuPhKAsScn9yyW3jp5QbR5hQILNq6t7XBAj9BVUNF2vF/rrpF0BryGywdAgopapkNDSZdtII7j1QxTkuyyoBaMqKBIocNIFRkwS0UTeFg4P3A1AYPo7wBPA2y6bQ9cBchP7lCff3JxABJSXM8yyBokWE/aKCoELizyRoC16SNu8I43SQ5LgyuMqGPxwPraJi11xKwePjIy6XSxs9YdXDa8N5vf6gDsQoDzt6JOiS9dvUWuH8oEqKO43Oxgq0SqVeWiqbqOTp94ZEgaGJl6p1QVgxlav2SvmGQNn6srEOQSvC5nB6pOI2uO/X/W3iYIa0VWa9U6647hFDezSQ7xMzCeSu980tzQWAqsztAgn9XrlF/oDbMQZ7cna7/16iXNjvemphzhkhOizbiucuYZP7Imbbhl3Xyi2oH8ex9RxdLpdGvbJrEMQmN1piVkEG56T/MUaRT05p0UTuxo5AVOpqZe0XDO331qNzzTTYnZAUXkQxMgwDvvryEQzC/cMdxnHAr3/9Oe7uT3CeMV/eI6UZJSc4CEJq1C1mbg4TJMqh0UVFi5WSS6R0Glvf0otlCdrrN28wxEGpz3IP5nm5el7WS0C0B3IWPN8Gd6UW5DUhew+UKtV2pbwoMQtUCx4fCzIHfPfjI+7vH/Dx9z/GJz/9KX748cc43Z3w1VdfYUsZYZiAOGJbL+CUJIEBAZ5ALoBRMC8biCRpMToo4FEg75fWi9oC7qECVBn5csE03eN0HEVOXxPb56cZokg2wHlWmqHH4TBJX6FzOJ1Ows5YRWrfqkC1VKkcQ2xd0PuUUkLNN2I4mhjv+37fOzmntm5szW4lIecVOVfMF1JEnXGZz5IEloRtXRF8bKN4UtowHUKzL8/P73G56POLHuPrOxUQqCLVnTMyC02z1gpXPZgETaPCWFPG0/mMbUsIcYTzAZd1luRlGOCD7CdyUJERUast2cZ1yMiMrAJkWi++YhuM49hQeEOamo3AXvk3zKeUTQevR0RVmq153+PktAgTA0TUSNY5sIK5IoYKEYqAFOlqQS1Z1txW8PT4tfQJ5YTpOAjxs7Ki+Qt8FOQNlcGuAF56mKdBxGhszmMMnSR878tQAZ2J6pw8q0a/DeHaftaqCanXYnAW5IkkGM3Zw20ryDtEH69k3HsWhDlOaaUoIFgvNHVjZKoGy9S8PEiKrzbD156K+Y/dbiidlfeRUPKw0VBxo6dC90rzb3oedlBr9bAWEZvB5VsLSy9O1qOJirBaoqvn6nxo+61WbWOwzwOaPWstCvraE8leKMO3Y1u/P7MWfplhnIq2eBUgkN/tbTbMkLFEdb/eygykDW4V2mJYYytsOyfCJ6jUetIsdgMqoEw1JmvVqILK6bozYTlmGW8gawwIQVh1t8mwXbvtKVZ0Mul59gjuZZ7hNvXvDASSO9DYJo4Q4qBU/ps4WNeF3W9Z8xqXEcBmJ3RklKwLgDUOkyKl9CWXLCJ+GwpIi44xhg/21De9fj+SNuyxNt18D6DN7bEgF9gDRWBftO39Dea8lggFroMU2yDbuu2y9tiTBOLrEyHYwv0wUO1f/WJqf/tCoPvSuV/dEz1GKQVMItW7rQUlM0Ae9w+vUGqFdxVZZ/Zs64rL5YLnpydcHs9Yz9LQTuSFqw4HRwMc1DhptRMvBN096gDo4ExI87RU2CKgoYAjCN1wnRWRyiKdynIfue4qmQ93J1QmUfcKAVQL1m1DgYPPFdE5sJPEkGqFJ4j0PUuVkLyirh28//JLpJtZSvtSxfQR03jEOAwgTwi1yKDXIo57mqb2TCxRNdUwQ3HMkdw+21taliRHu/SRrEdDbu3Z27/SE+FDgNPBplb9M0fREkKS8mB0O1e+Uf2qqeu5tndYEQarQJVSMM+zNuHuQh1WQWyNu7BALbf73IK7Uq+u335uNISWzHoPF0TYot83lrTac5LrZDim1uN52391xft3BOv5tt/b70xVqq+y9ZVJe/X0CVtDV8gMXT9jWxPOOeSScT7POJ+fsS5LoynXkmV/QqiMBGDNSRnWdHWPTazAKGfN8fDe4MzVlA6rDDD2Iqywbas0M5PQN2x99WMNeuf+UpJ2ez8M8Z0OR6RckSvj7uEep7s7nC/vsW0LHh6+j5QXPF+eAehsLqV72Jbw1aHSte2TQe6yIh2ZoxR1uThI/9P9wx1iDBgPJ4QwiB17flalPVHnkqBZaMByH1PrT+rXoK2JNgaE94oxVYDbjCgpAm1pxZYzKoCnc8bdw/cxxAkf/+AH+ORnP8Nff/LX+Kt/+ld49eYN/vY3f4sMgOIAkKjiggRVl+HdATGKsua8LK2fU9Y1AeRQoPLrGrwRM4g9Joq4nM8454pXr97g/uGEp/OMlDJyrricZ4APGOOAShk5rwiDx+l06vqkBF0jcljXDduyYpwmjOOAyzJjXTdhHMQIIiCxqm1iV49NeZO5ZbQrRt4GS5IcV2xpBsCYxhGokqyldUVKm9o/oc5xFcpYrgxQFXETSEBXakUugC+CSrkYRUF3ELpoyhnOSfUeTFjWFeu2wRVRO21zWH3AdJgwTCO2vIlC8CBJdEXRvjCxwTVLMbMJNVTprausYjIhtt408wOGnOWcsSxSRIjDIKqe+pnmD8bDQSiSLPQqch4xSu++qUfKfZWNI0iNidRk5HSG9HWizTYjncOYOMF7wqvXD0jrijWviDHi/nCA12KR+R7SYkqpGZwqyrZhGEcEH4T+BvHN5BxciCDn5JkXKS64bh/3MYIhiDJXriIqDd4D6i8tgHLIWdERkgJO8Nc9gtYqQM0fkOYSLS1r66+UIvPrdJ+LPwC8399ndq5nE+y+omgRo4vDIAnrbdImit4qyEP7ufQ+w161Su84EWnQ79Wv7T5KzoWbamLPFLPvJVnjVnzFTbJgPra333KtUiA2ZHH3CfIRcqyqRfuiMaD6OddSuHbfW9xhNE6tZHgiFZoqQuHNGRsZE47AVRJcT9rL7iyBxY4G70HQVQx9CxjYNRBdJ2l9ge4KpVQwYUvbfl/1mkotyCVr0s7ImrB7VbTl7nlKXaDABxkxEsjJmuHS1pfFgdZPXiuDWGaAysqXcTOO93UoH1zArMqk2ge9bRk5X66Q/W96/X4kbV3Ows33W2Zr6BRaptve22XSPZy7/27fvH2S1Fe113XGvMzIaRNovvbVkQqDBxwgQ36BHS7ma15w79j61y0M/m2ZdJ+IAioVbtLiJMpny7xgiGesyyJol1ZAC6oECucL1mVFToxapZESUF69mUUydE2+mITuJTPCqM0rsVJt1UqOc4B3Q6sO1Vyw5Q0prWAumsCJkRPDIzz2nDKCDzjeneS6vAyQdgxwLaiyo5u4ROEN87zAVY8IaYoOwwFjDGBf4KJcM2lm3dNc9E62+8ilopKIHYQ4YJwmuOABEpqASMxqUN+cEqn88gEhRGxpw5dfRhjEDe5paPuzu910Dg63rx5t3RMEtMpQKQWVgcHLANBiVBa7JhO+YFZpXOOQkwTL3D8/oED2xz4DbO/LsCShlAJvTd5EH6zr3pgmHWBp6J/JP99SEfcKpQkWiLG2Bm0pUcldghUw2z6Rz7djAlbJtn4DSfrsvOT9tNNWHAHsr3oce+SlL7rcPpv2gXo+dv1SCRe7tMwzHp/eAcwyCNy51oPQ+vdIgpeSC0KUPWPPwPa2FUX2ZJm1cpj0+iSgjHp/qypNXTu33rbInZaA2p5lwDgKPex8PoMZrUkcwJV9dM5jiAMeHy9wBNydjlhzwhe//gKnuxMe7u+lelgTtm0GKQokBa3dQYJIECSvQ1SZAaVSEwTtiMHBDR6vHh5wdy9CFrkULPMFj9sTnp4eZR6XD4qEyJFMua2UPZghkgGv27ZdIcP7fpPKPVjUF5uTJ1ITr3aLgLSuePfVV3i4u8f96R4/+tEf4pef/RL/4I/+U7x6/QZffv01lm1FHCfEcZSeaBMPUMQhxiCFrCKJgnNBgpng4CHjRUrNIB0y69mLxkjOOE0j1pJwvjxhOt7jdJzw9DwDJPMNl5kQ3FHksb2grnASfGzbCuc84niAI8K2bjoncmsqsqLOt/cADoMIj3SkMKzbBubSkOFtS03d1Nb6sixIaUMqm6IpCesivcYywFxokqXIUHRBRmujz9noEgA4HCZ5vkSAYxQUnSHnMU4TjuEOIUhfI+CwrUnXhgRu8zqj1IpxkDEJcZAC2rotcEHWRy0V21ogMxYP4ArkzEhJ0FrvHZyP0v/W+XErfNyiHI0Gjh1hRrOLQj+1V+uTBVBLFrqhC6hcda9X9ckWbBftw5Oqp4cgAblWlJSwLTOenh+xzBdwLvBxF2ZIacM8zzie7hFVYMxB1PTSusqaL0kLhMCWqvZtoQkw5FJAzIh+V9297e/u+0ill0ttpxX4NJl2RErBEz9dSwW73ddZsaq3zXsytQtsif/Yi+K9vSZH2FJthYdGlyZV/zP7KFsd3u8J4n487v4TOxaMKgpLBvcxLS25bM/d70kSG0qLZqeY9yJtCNrWoOIbgKp+6n+1lit7anL0rrXU9QyVndpp9nz3bfq7CnjfsYCK9Iq1pBd7iwejGx9kx2YTWkETCbQ2HIWCAQZSldVGTNqbqv1bXhNK+QBUeBGssbVR9mTZk1N7Sm3siKc9Nrjuj3dtf+Wctaizj9QQxFg0C4IWXCqUFrpt8m8pWHJGqhY7iJCQ8xF304RpOgJaLJRKcZ+876ik3A5u7Ccb9ShbWIsmRTQziBm1kiaKilJ6DwaQ8nUf5+3r9yNpgwzPJIMWq8m2SlCqmgUNWpU1ZBthp/aYCEOtuUvWNKln27CyuGVTb1iXGSVtoKo3G7L4rowwADH1+gBoR08MJZENbVvdKjN7oHjLibZXXxXvEzYhs4jBcSGggehsqngJl8sz5ssTJjoCpEmSq4iecBgHcMoiRJGLWhFpiPdkAzV9S3q8QYvMcFxAuphzKbJxdJ0SRB0SpM+pCq2DWNA2iV2q9tkxMhGoSn/ANEw4Hu4wL4sYY+ewpVWqf8MJHD3iEFFQcb6ckciDEoN5QQwZAzMKOWD08GHEGCXwrKWKclsxTrz2YYDguKKiauLgEIYJFR6ZncDVRlonee4xBhyGiGmMiMFpUFewlCTz/EAQUTULPyUYAO+iLo6MI21O+lrh9MVqUmWQC3DwGEPE6D28AzYkZE5AdXDwCBRUTZKaZHoLPIkAJ8/XmpuJGVCKUfUBqHsTMpcKsn1UKiqZsI3SBhQpq7Xu83bMTldBP30MiCFK0GDJqiYPUkDWPSsXLJxx4Vpo8iZrsFWhrOeyN7jOqUpyliIEFLHUnkWxCaaaWMAoEjjAaa+V0P76WUC9kIIF9WAga/M4LMCqVRT39L7WUnGZz3h6ftQAYRcv8d7DR+HBMxFWFadwOjhcb4EmTaPaBxGmSEkokbUqndW2rJfhqDEGmU2oiq9GfQveSzFFHW7eNkEDnNgpUofpYPtAB4O20ETKNpWFYzCNE4YQkLczXr15BfIZDIcvv3rE69ffAWMUWXVAPq+rWjpN1OxeWMHEaHmOqNkx7wjjEHF3HHGYAgZi1O2CtCVwISBVRNuXrXCUQVrN9FQB77AVKVpJ1VzGIVjsD1JRIc1FuGTp6XEqIuO8Vo35quei1AWP73+Dx8sb3A3fxXd++If45a+/wF9/8jP8xX/+Y7x5fcJvvi7YKmE4vMK8CIoQvAOjwqGAHWOcZK3GwSEMAdEfAEeoPCLnATklRWazrNDopM+NGNUx1vU9mDfEccLdyWOeN0HcEuPpKeNwOOL+bkKaL1hSwng4AhyQE+OcF0kcQ4AfIub1gmXbMAwi919ywbrMOg9QUPh5nlFykfWhBQPnPMDSe5qTSnqrwARrvOacyGVvpSCVKmpzavekz0jNhhPEiGsBpwTUClfFRrDu2+CBu7sjCAHLInMJvYs4HO7EFjGQtgQfBpzGA0RQq4KxIcYJ93cPADkEH7HpQGCv87TStuGyzjge7+CcQyosAZJStw7TIP2oJh5TM4Y4YBoDAIeakxarJBBuvW5FNyus2FrhSHpitk3aA6ygNwSJIZZtRQWBQoBTW5DXFaVk6QknQhwjUpHetW2TxFgUaQucA1wAzpdnYbNkQU3Pz8/wwSPGCesm3m06HDAOEREMcEGuBSknbCkLBTVnpeZVOM8AHDxXBOcwhNAo3hUsvfNZZNvjEBG9R+aKrLa6aLJnMc9ahX4qo4eEDeLV94DQxhBxqa1nyHwns/idGAdlFUDjHEOkdp9qlEShumqQ7yXD8eRl7hogAQoJ2iE2XmNOTT5uER4bCSF/Kj31/Xt7cIBQ0J2SrHNLcHxH+yMvrSqqNilxLWPwDoVkri7I6XgkoRBaRmXJHBTlMb8viVmV5EzkbFvCR+BWkCd4tXGEXLMoeorxhI2UQN2Lf4CoAIPMX8AEPLv1bsu/6qxOvS+sPb6VUSrgyp7ggIq0wrgsegsmHV89EIK0ddQqA+JJ5urJ6UhCtaWsqK4EF1WT06rUU1NVlsKcAA1WWGapMILioLP6gEKMrQoNu9SKVCo+/vh7+JM//TNszzP+5pdfIKUZDBFVMjVNBst9h30VOEhi6Z3MNE5ZbTz5VkCXdSHPyTmdEWiLp9NxeOn1e5O0CeOrYWydTLpSGViSB3Hc+0VZlZrUuTckpPXGEAiuVVhqTaIWVpJuAKXxVQlw+yqPVZUsHmbsFZs9+eoXLXBd9b6mlPTVXzt3O07/L8xogZG5gGxILgtaxCxDWS/zBeu2YJjEYBWu2JYZ63LGOs+oOWsFkVqVT+hzIk+9N2AyvFd6Y9qQSxVBlqp0KmbduOKQmBaFufek1AwZyp5YyowdRWOczN+RIaqxJTfSkO2QucJ5QpxGrJcZc85wMQK1ajVkReKKh4d7+GnCxlVn1khfnIhxGPVQK3PKfXfQ2XQxYpwmDNMEoCIX6UkYhoDDOGIaJqFiosCjwtWMSpaIMVxQ0Y5KTRzHnj2z9C055wQ4IkOLdlTnped+VRGDF9p3qfCsg3EDITsSmgarcCQZ7XCnWFQVBSB9byq7tCxBBDMAqaT2tBGjRgKm2KfOoBtUCshaIE0UHZE6833GmnO9C9X80UnFjLQAQYqsuc7gSae3rk0yGXqruAImaCO9WSzKewAoRMAFDENECCT0Wa3meZU79y6AyMO70AaYGt1rXddG+5ICjxZIFLCtVaTBAUloXQXAFU9PT3h+fgYBCD5c2QtZS9L0LT0duaFDdt7Sa2aUHW4U1dbHWNAq7M4RfDD1x4o2Y0irq06DDqfBCjPajBuzh1ylrzFti1b0Gc4N2B2aFaAI3keM0wHL5QKighgY40h49/gOlQn39x+BeUDOi6iucqem5kTBlvX/qaMpkq0jfe6OuAWgRQdBT+MIlIIC6V3iWsClYNsW1CrN4czAplRJ6RGDzr/q6Uvi3feht3J5IXh4WHEMzem64MXW8U6/IVexbc94PL9HuHuF6e4OH//Bj/Cbv/0M77/+Eq9fvcacGb/+8hF30wnhvmCZn3F/GoGaMIyKIiqL2gqQIcp6ycmBqgeoYisZJRWluctz20oGUBE8UMoClxiHwwkEj3MtSHkDc5Rkf004jBO2ZcP58YxhmlAywwfgMs/wkRCnAQUJeRUluDAETNM9iETQY91kttkwDNg2WSPH00lsZC5YV1E/e3iI+/50UhyN04Dz/IxhkCp2GhOSzg0zIYJtEapkBVBIKIjDpKl8yfCORASmShIxxAHHwyscDoTLZZZnZ4N4K6N4hg8mBFJAteDh4YQYBrVfHlwZQxgQfZSkoEph73g84HR/AjggXRYQEYZxAJCxLGeUXDAMBxB5jINvhWRAjin7x8PmCxKR2OFaQEr5S0lkzWXwfFWGh6g9U00gVAxBgtMEwpqyFFpCBKn9Jw0Ex2HEOERsm6jnheCA6hC9w9u3b6UYxxVgKfJ5kr+5v7vHGEcUCsip4pwuOATgMEbEIQrqqoyLSdGXbUu6P0W8ikjsYMm7arD01NIVndZ+1r9M6t57L1TJLcm9jhGOQkO6XSvUWzBbW2LMWmhmzipKAqgH1OIsS39QrfA5gcnBk7RxDA6I1h9EDqkK4sVaFKyl7lgJ7e0Nt/RyYO9lAvAB2ui915liRktE59OlsCW+tU/aCAONaqsEkSZnx3TgAIBDQ6KZutixOBWR2RkH0OfQYuguRgX25FKSRL0WJ4q83huSBgS9/7jqv2NkLbyYlyO8zBZjK5YJSqLgSMdcY+7m3RUwX6s0A2iy+z27RoJTPYZ9sY7poL2VgmtFTdrqVIrGRADp0HqLV2Th6hpzDuyF+RURsaaEzMDdMOIf/umf4o//7M/wxaef4ze/+hI+jCglI5XSCqpW/GTI2nIkNn7bCnyQYfbHYWzIO18uQqMmhYFY7QvXhuG5Pit+4fV7krRZoiUPzjiie6DL7fdWDbVNfa3mIpXr/WVJlSVOexDaFouzz/12ef72aXTb2LoH3te9ML1ctS7auvfY3NKyrvjZbJxotyMcemxRJJSAfZrGXVyCGXlbscwXXM4XLMssM8IYsCGpdjsIJI24RYKjWivCAFStCGj55upeWpWL2YaM7veof/VJagW3njawSNxKxdB6jkTePsQAklkGSuFYMA0DDvd32JYZZVvBOWO+LCLG4BwoepkFNE14wlOr0F09L5ZkQRJ2qfrE6CVodA7HaUIIB4QogS5XgBXCLixSyORFMjkEmWpPWgG7TlH2431guG/O53Zd3SZ01ZqJawWxQ3AOgTyqNsy76OFjQCUxpj1Vx+gCtVYsywJRMvSKxpS29vo1aT1AzgmSZ5TEClUAq0qVJBVxCXt1KPidQilrs6NT6vt79NgS6r1vbO8dcF5m9tg59ecysIqLAAAgAElEQVTVZHp1nxymA+IwwfkgUtgaIDtHnYy7IN9F0ddt2/tWTbHtpVdQ5UdR1VP02xG2dcHlclG6W9TZMrk9v148ppdHl5/t/bh2Teb8jTbVftaSDkKIkpRa8BQt8CNdN3Vfb3I91Pr5dMk3Zw3Ivo2KTvcFp/b8tFr9/PQsQ069x3Q44Nc/+VmjCpe0Im0r1nnRPrXanPjtV+/4vaKPDBXl8ILOVBCeLyuWNSGtK+b5gsyyx1IWtVZyHqX1sElfjFOf4W18hNrwbUtK/SRN3HR9yvjvLhgg5NzPMNqDB0cSeJ+fn3G4P+N4eoWPP/4Yv/nVp/jlLz/DX/zVH+C7fsR5LRhrxTS9xfMj4ThG1LoieMjgbX3GW5axDs7N+qwqSu6pYQUoFdVDZ21Bqu/OoRahI8ciyJrzA56ezshFKEDv3y94+/oOr169xvvHJwTvMY0DKirKIk3vMTIO0xGJMtKchLLkGSICFMBVe+vautgHc8vsId/2KwCVvgemw4hxGqSKb7RlcpiGgyArLLQlB49pOsKFgC2bfQSYq9BmWcRPxkNofVKVBREZxwgXggq1CTphYy8AYFk21JowjAeQ8yKjzoZmB0zTiFoL1nUBKXvhcrmAUCR5YUAGtDNcFIQjREUqlGbtnGvqrzsdW3rRrGBZSxFqlu7FnDPCMDbbBljBVMbQbEkSWxoGDFF6WgOJmi9YZ5QKvIkmXKHMiiqnibv7V5imE8IQMU0jjmPAcQhArnj/eMHGToqbtYLLhnVbsc0MGo8IYdAePeurMp/PKJkxzzOYK6bDCKOktQIt7/3evS0zW202MKXU3me+zjmHIUbkXK5UAHu/dOXDnfWxmQCF0IDNTzURCe+Qa4X1kBk1uxWdwU19nODBjq6O3YqfV+011ie2+6KeSm5Fnr4v2M79JTaNfS/WyOt+ktmyJqBABklDBaxq0elneyuOfab1VjabRh6oWUGJXTyoPz/7mdlo0sKfZuBSAHQMx66hiZ53O95fe3+v9s++Ps7tte//Xt9Pewbbtu33iagxY/rz/ab765yTxN17kGoQMAvT4fpE0JJ5dlJUIwbIewyDQ2Dg7Xe/j+gCPv3pX+OLX3yObV32P2738LpdizT5tdm3nHeV0fv7OxyPR8QYsSwz1nVRJgIL6ttyGwbo29Oy35Ok7ZoeCOBqI/f9NcJT3k/beMOyiK573Bq6VRi7UpDmxuqcoEpvlswAHwb+t68PkLirRXtd3eivz35/+5m9yMO3HY9AkkgEwul0wul0hCNgmRdc5mecz2c8Pj5i21alEGkViwGq4sm4BXkWXGpjZUVnOBlcpApkAaSegPzj9iT0qiJy8zOpJHgZK8AMIAnF7yQ8e0k4ZEjwOA7gQFjXBaVuGIIMcg2OUHxAcIQnesTT8xmUVsSjCIZcLheIilro1kNXE9LvC1eE4HA6jHh9f8AQHZwn1JpE5StvqCwUWOdlRp4nGQhc4ODYKUVDkCMrHPTP/cUv+WVzCua4ekMqa8O1xNhrwz2xBEiFWAICre7mnJFR2ufaK6XUaGn9fbBkzZroTTbdko3Wg2cjE4T0pAZYXAx5qbK2BmiIkIDrrtWqrZWw04NrgVPJf7vuflCtBQTDMCDGQXtWgMPh0GYw2j2y9wKS3Aotq5OTZ8aqkt9py1i3hLSJuFCppckF90IrV/uNCFln0JjOSUoJOW14fnrEuq44TCNiCEibUNWsp8aolj1ytgtiFL2fdPXzfvjuuq4axBj1ymGXrZYgcIqjzlrTgOjWPtDuCG9fhaug5X4fZ2CSSxUAhQDyvp3H3cNrTCqk8NVXX+LjH/whnAOWJEgK1yKKYWxjHK73fz82wWwG6yw5dh6FgXlNWOdZgilVUss5Kx2cmo0CidgESERtALHh2VTWSHoWYhAq7G7zRdqfiIAg9O0+MJRgvLT77Ww4MAGoFXlbcHl6jyEOuDse8L0ffIxfffE3+PPLgtdvv4P/hDx++6vP4DwwHkaZXbde4Kkip02QTdlCyIoaOkVCnQtgWFEiIKOgkp4/Cc2BWHo3axXl2tMdYTycwBU4X2aUInMmv3pf8eb1G7x6/QbLIj2TYuuOQou8LLi7v8N0f8QWVuRkw50FNTOBjefnZwzDKPtwikhpxbIkIDjEOLT+KOmbA8ZxwLatmMZjCyC3usGoWrXKQO/T4V6EO9YNo997wZZ1aappFz6D4UA+IgYCUIC8SDHHEWJ0jRJsNC3nHEI8gpWOnytpT5oUauanZ7z2Hs5LolMLsOWEysAYo8zDg7BW0rZgHD1oCNi2Bd4Nwgpg36S4nXMYx+kqgWMWhorIie+oTKvqywYVxLgwPAqiE3rzVgvKuoL0fTZcPOoaLCoEMgwR4/GAXKr0E1uCFAKO0xFxGPD2ew94dTri9TRhe7pgu/wCLguJbi0ZYSA48lg26XfzXtE9v9uu1idEAafTCdu2ygiGss9oM1Eus919AgGgFcX6QNx8jBUOE0T46zZBuv1Msx1FkY2+uC7+XpIN50RV0tBXUpoekRS2We1fBQlVnQR5tefYJwN93Cnnldv19YU565nrr6H36b1fsetrdpdZZ7ZZIaRhfjCDIe+1v9/ted9nbr3kLdatWRULSRJBu59ot2q3fVUKIMQ38Wz3N3YvxmG8SpzsHvRMFSsC9mMX5HuLI/vY+iYx747f30cR6NheTDqJ6CreAbSdwxHCMMCXAqQko44swdQxK+245DWmSjLKiQHnPV7d3+Ef/cmfgyjgJ//x/8VXv/sdyrK1eNli4g8jdTSFTTvPWvd5c6aO7Jw8N27jKOyaWf//Q0Cgf/2eJG3XC6Kvzn/wzm6D94iB/G5P3OxVa5aZUtaUBW4bXxSndJG98ARuF9Zttm8/v32/JYS3C90M2G0F+jb4h1bSbY6YAV4SfFYUKkhpxfv3j7pxMi6qBpi1oZy0ot2aYwxyVfoBilQ5ZT84ZM72VyJYxDvXmVHB1bVr8tgTvL4Cc3svCAGsoh0g3YRpQ9hkuLCwYyqWZZPBsoNy3gmYRgmOz4oYORdxPN2BGXjeLoB3GMYR0YddOrderx+TlRWKnlCFHVfkdUFZBZ5nztLzE73A8M1Yal8hM3KtyGmvCIEEDf22157o7uImvfG+rRC145L0KQ0xgrzSCtmkfm1wOiOjyBDuzsD1aFpDNxQNscr0S9WvNuewGjRv1SwZqBl8aANTYxQ0xqEidLK4llAZ/SAGSe5yLZiOp6vkJsbYhDBsT0gip9RM3R/btrVh1BZUWNJZclF6oqqB6XDKdU1gFgUrZnEamUURrr9WO+f+/6XAUUFskt3S93M5P2NZFhChjZcQNHQfl9B/rlExdscmtKbbqqdVppn7xukC57wGUNTx81VIYUvNfomb3Onh0nuyBwdWqKpVaIXO2ZgHCW5ZHXaFoBLeCZVJKvcThmHAZ59/hnEY8PDwgJoLShIKtSeCJ1zt/35N9c5dzW5bW4I+ZdQswXVEhPMiypBKFjK7AxhKNXbSf9uVYjTAYB3fAUGimZG3vZ/OQeTFiQjBE5wTqto+PF1Gojgn9FuTISfvELmCvEPNKzxJEeGHP/wRfvub3+LTT3+Bf/z6Le4PE95Fj/fvv8LgCefLGWVbgJIhcXcFkQxvPowT+Djp2oiCMlZgS5LMYBPapwVLJmBko0pSznh8/x53FRinI3LJuMwrQowolXG+XPDq1WtMk8zPKxnCYFABl2XdgKgjYKiCcoX31Br4Zb15hDAAcJjnBcPgcTwedC/JGrV9Xrm0AB3wCC7CsUfNjJI3HA8HwMn4CUeMGEd4H7EsK4hkPAzUngYfcH//CtuWRExkHIS2FaDFkdShNg6sw8xdDAjOo8KDVVlYUClhdfgQkLK0U6xbwrycMRw9pnGCdx7rtohIBFX4IAXRmpP0uQWH6IXqXFXdkihiXZdGIZ2mSVhBzimquo+QcF1AztiLts57eC89XJKA1uZLiISWnrgi+gAfvIiKLAlDjIjDBASPeVnBDCkyOoAdYWNgI4igFylbKW/YUsGaE1KuCKEoUR0aSArrRQReDkrtFsRmmiac7o5ImuBbgtOzAozlYfbb0DUTFZGRJ7syrgXk67rPl+z9R4/omX1kKxwSNfsln1Vbcm7JlAu+CUARhCZZ0aFJDBU9c/r7Pfa6je8safO6BsSWGdoriLIwhuzva4cG7l998bRdLxvSDDGMlqyRxaeS6IhvlML3bVtNf+9a/7CiZV5p++bfmn/vbLT3HlTp6n0EiT0sTqlywR/Ep3txvxP+0nNvKqFXiZhcZztO53e/CTEz39HHTL1/6d9nr+ocXGsrCUIBrbUVbWstcNYED2X+OC99/yqSFcgjrwU//+TnuDzPeHp8Bgo3tUx7DpVzS7Ts1OUe7vE7eE/2jYUk+yMpki50VecIgUw8DW000je9/s6kjYgmAP8PgFHf/6+Z+b8nov8MwL8C8BGAfwvgv2HmjYhGAP8HgL8A8CWAf87MP//WY0DQBV2+DWOVjFNWt/R4QDelawbCFsW+OOy8oUFPO4h+tvycuft87Jv2NmF8KXGzn18t+A+C4Z161lejbjePfW5vKFpw0gJ/3fBqfJgrLpcLtm3D09MoPTxe5qNJ3wajloyqoiBgyJw0nSkBCBpiM5PgSANDUtCRmpIg057QykKUBdwb25euZTeGPR1P7tWyXDCNE2IctPlV6QHKSS6pgKYDxmHAGYyUpbrvnQRAIyLWZZVRDeuOGjW+NnYDyaCm7FZzwnx5guNN+qCItfHaa4XJicqRJoGlVMyXC+atYMmMZd50uHZbNlfXfbtOrtC27j23lTci0qBUni8RYRpHhMGBfcXTNuO8bdiWgjGMiGFAKbXRS4ioqbyZM90NI7fKtlVIZSjtTYLL0qxsaJp3MhQ7RJFnJu8QhogYRHY+QPoEeypjP1zWJJy3lGRuFV2v8zZig/mq4lkrmmNflhm1Cq1VgllZy6RqmYR+7p04uJxZEF4VngFIChJUrq+1cz63lT4b5LnMM9KmyIX2zwj1ROTXvfbsWFBpQYcFKzaDLYTY3mvJqDm7fS6bFaKkR2QYjGollTcTljFqcrgSTKidFPk+NPu2wDUMg3L9WZEQKWYZpcR5j3XNOJ3uEMcJRA6//e1v8eb1GxynCWkTamROGwjK63c7JdyOeYUy2BrTWoDZN5GerpDBryIDz5VQSfqXSN+79yVYFRoaWApS7nVd2LOMUZI4Qw0sqBtGj+AlkJOh0qpkp0msqc9KH3FFzQU5PaNUoLz5CDgccP/wGvev3uKTT3+Bf/BHf4TT8YCHVw94/+4rUIgIo/Qf1W3BEIPQh4kQ4wAfPQi7LH2t0kxvlVfnZMSCzGdyrYfTDE0MASkX5JQwHRiHw6SjHxIAwrJu8M/PGOOAcZBxCdu6YjiMGIYJ27qirBmnwwExRkzTAOaK9+/fCTUvBNzfPyD4iFnRuuPxAUQOl/OCWmWO3vEogfz7x3coRZ+1G2G0oOAChmlQATFFEXMGWBQTQ0BDIGMUMROb8+ljgI8S/F8uj3BgjOPUAv+UNnB12uclTItlWbGlijgdILQy6feqvLW2AceCxE7TiLs7oVWnDZKUVxHbOt2NAFWsa8U4jUqtTghhwDRNaqtIZ/uh2VPnyq4eBKd+VgpRpalfC0Uy+ACyItowwA8ygmFeZrEZqrRcSwU8KZoZpZjKrGrJHvf399i2jHleQOQw0gFLLjixQ2LCmhnPlxWXJQFhgIseW5mxzAkAg92ohQpBvwRd3yl1tSh65Uil9KVgZyj8riGwF6KNPdEXpsV+leajGsOEa7PzfQJoCVuPuhXe+4CtWGb2zPorvXdKOVa1SI3LyRGSUdIlIBQkW2fTEu1jjMxXGnJvqN5VW47GCAYOSFJmBdmdIt/b3h6Juo79OrZXs5N7okLKSgDtSVlfILuakdclMWbHRFhJEUdnQEFpM+WAa+SNu5MgQEVXVEmxo1pe2fSbf4lI2TE259Xal0p3Pww0oQ8+w/6/T1Bvk96X7m/v4zwBrOM3LC+oRRSXtyTjceQ+eRRRRRMEjlWMjwEujL/5/AtcnhcMcUQuSUdmCABCKnIH1KtnaIkaYMjz/vxz3gu81g+bdNas904ZDgEpyXzjb3v9fZC2FcA/Y+ZnIooA/g0R/V8A/jsA/xMz/ysi+t8A/LcA/lf992tm/mMi+q8A/I8A/vnfdRBby1VvgiwkahUJag9Lq1NdkGB/a89eHujOs7bNyNbHxaXdbKf0EKZ9iv1LlYXbQNt+17/n+u/kYfQbtd+8/QIEdtTwg9+r42Zm4WnTjgIAFdvKgsroDAkpBynJLct7pFhLANUmfyvlKEHRTO2Q9hsN2zdSuJP3gqTXDmVPVL8teWNmFG04FXRllGB2Eznr4/EoFDfoUMVSUZIEJuuydIFyViUegcMnPzWjPRNJacLteVQzjI7gSZqkPTHGaUDwSm9kEj6z3grova1FRDykByrhfFkwrxmgDsWQB3OVuBmFpD9+vz567ra9+mcOrs2xH49H/MEPPsbxbsLXl3f4xa8/x7wl+GAcdgkCnL9GeMyY9+iNKaoKtWenOViQb2jPMIji4OE44uH+XgfSBqnWk0MBayFEaGSBAlB3qoh9riVg5ty3lLHpIFdLruzGmWyx/Z04nOuEg5mx3RQ69mdge2UPAORtDkL68yASiV+462KLnXdPebEFn7atQ9e0WOJItlYt0g95dwCTb0mbJWwAWlJmlWgiNPnvfo/3+8Tsh1caqlEiATH2IAso9iohV27y/xIoXdsZq1AXXbfZKKVEOiuLtPIfEYdRVLNywvF4RAgBz8/P2LYN3/veA8AVXAtK3uBgQjbizPug4nYfOKcVRIIi9t0sIxI025HQBZ13gPbsmZ2qVXrmHHQUCVFbRwQj8+60WzsXo6IsywICMIwRBJGcl7Vi+1Z8hSDYSvtBFWn1UuHIIy8z8jEBLuIHH/8I/+E//Ht89ukv8Jd/+U9A9AbnecE6X3D/6i3ScoaLEdFBilA6MFlEaTY8X54lISNCrQQ4j7TpqIfCyEXoc4LWmBkTBVCAUErC89MjHl69wZs3r/C7r95h3aQf6ny5IPkVr+7vcX+6w5dffYlSAgaliNWcsCwXEE2IccS6yvo6HA67Qh88jN5hfnSaJqzrdrVXBFlJWJYF0StdrVa0eYW1mnqR2CzvEQaPw2FEqQXLPAMksz5DlERF5O+zVvyln69U4Y1M0wEAq9qd+HKZR6mMjyKJrHMeKWUcj0eNB7R3rzIOh1F8fyFsS0YuDuM4YBhGWMTmYwRRACezQbntI6I9oTHkR48g98yh7VEfhFJvvpq8FoiqiLtsKYsaca2iWFkr1rxhjCNCEMGXUjMYFYOOqdlSxjyvqBAmA6aDJs0BwQ8q8OHBboAbD6iZkBFQCEA8iJpiTYAWUq233OYIJh1mDrYeV50rR1ZEl01jSbQxIMz+lFJaMa5vATDWgX2GH0ITZrLiSo+mmE0UBook4syS7Ntn9+9jQxwVSWMdMZQ73yiCZbtU/K0P7hNFsyFZmQC352Xv6xMG8VXXvWTXtnj3lQSZuXvNFLMeb/my41WpULXz7X3jbcxo90yuj8Bwe4JmLRd2vJrR+0Fm1lmJe4JFRAhECDE2NfX+mu1eXYMQImjVo2n2d/34LSmOXsfDL93n68/GB2uxX1fOibDVtpmvNSq9vM8EgrxzgAtg0VMFc5Z3VdEOqGVByVVUdlPWHETGglS2sWAfnmeL9yxeR48IXqO43gfU6sBVmAan4wNOpzus6wJHz3j/7nf4ptffmbSxnNWzfhv1iwH8MwD/tf78fwfwP0CStv9S/x8A/jWA/5mIiG/vfn8M4IOH9NLDBIQiBHcd+PQwqlTICnrlxv7QJvfPkAB3mkakZUUu1xDs7Tm89LpN2l7a1P37biHu/nVFm7PklBU7oi6AIUWONJktpSADcL6a1Wp0ulaZ7g9F+n13nqzHgFIjdUqoGExLFDXAqlrC6Dfl7bX2vzO4PUaP16+F3niG0DvnGSqMIIqMuWSsmwwyRa3Y5gtKSljWGcOglDpPmIYjTjji/dMjWFWsADPAvBs7hgyI1dlzABQml4QHEGPGPoIpIKWCbcu4nM9Ytg1EQSg3EjLuRkCfT5+s3/LZ2/1QylX/u/75c3cvTTL/cDjg4x98jDdvH/Bmfo3zMuOyrvDEmFVuPQRBv/peAauAWjJm52Xn2A+iNJqiIXSHw4RpGhCjUyhf5HPnWeipmXf6Za0Mp0qX0ptQ2j3Rq0TOO7e9gNowU+dcQxMtabOqpvMehkte79nrIoc4GEWEg9ekSta1I6+fRy3wtCLQnhhdVyn7oCOtM+Z53pMsmJhJBtWCQamdRA5rup51ZWvAkjZz/lbJ7qXwbx2YPRPfknEJ2iRwuV4zZi/svu/3a3fAFmTqH6KvMLDO1CHn4XzAqzevMcQRj4+PTdhoGkf89JOfACAcDhPStukzlcSLnNBQTBDD9l8f2Nt5eJJZczULSgmtVBaliBE5VC9If1HVryZqUwqGYcAYgySOzBjHiNPpiOBkFIIpyVmyty4LtmUBwFiWFbmIii6XJFLnNsssDHruUv1s99cLfc+7AqoZaZ1RUwbGCW+/+328ev0ZPv3kE/zjP/+HOD68xZvv/gE+/+WnGKc7bOuKyzqD0wrHUv1nddi5bqJ+qbRaHyIqsqjegqRww6JsV6hoAuFUWVKogVsu2NYZl4tQWMdxQGECmRIpEqYYMQ4Rb968wbun9yiFcTgdAR+wzk949+4rlHKHcRxxOBwa+uv9ALAkYA8PD1iWGcyEw3RqSMo8y0D5oKMCapFeQk+EZcsoKWG4O+F4PGBeFizrKsi4B1JakUkq1n7wih5mrGtqaEjJImAlrIpBKLPrisrA8XBAdA45iQS+I8Y4jAjRYcuMdVkb2iABnNA3RaTIYl/GV1//DuARoAG+OJymA7btAriKaTqBq6CyeVuwbULlE5tBiroLcm7iP0Ro4jqy/LVfski/ZK0Vec2IoSI6Gf9RVA1yiAEuCF2P6yT0SJtN6AIKF2yFgVQgg9ujjHApkkQP3sHXgiExYhYVaO8c4jRhYI9aHVJOqMwYhgOGcAAUATHE2YasG/rG1ZQJE3Lemt24FYc6Ho/Nfklvufi8viD4km/yijRb0G7FFqPbmm+yQF98hVFsgyKOe/99jJL0LvMM9h6egibLrLRwQmSJaXISeq7rhnvb9VjxrT/3WtH8yEt+qNljLliWWeyb3gNjm/S22+wbcQUXE9iyoN5GFlWx5ayz2tzuz3vk7iUQ4dYG2+s2VnXwUoTrE0+WolGfGOeSVYX8OsbtEc8+3rs9Vp+o9KwMO7/eD9r9uX19E6DRH8ueCcipqJmoYaa0aiGIEQiAGwQIyBlMypaB5AVZ50hqZAFA1wCKKnjWLlnDzTXv/as9eiq/A3AT1+zFTGkhuiwJuYp67Zav//729ffqaSM5o38L4I8B/C8APgHwjiVFBYBfAfih/v8PAXymJ5iJ6D2EQvnNqSP2i7p9IP2DqoXBweBVdDfhQ8qhBGp70tYHSPvG25O4fuHsn/vBfXjxZ7cbWb7fqyF/1+fcJqhOPYxVZfozYf1PPIXhuVUEaJi1viLAgoar7f5yF5zYoESZfyHHId7hXdbvCbQnfYw2TuMlA9YHxnZMU3CSYdVOJtJDgr5cNq2KCL1iWTfEIWI6joKIeqErHo4HDRJImoi5YIgjhhClqR8ySgBd5YqcqOH54HXezG40RE1KNlqqwLIkMCfwJjLtuRQAQZAUF0CUkcteBbaZVr1BskD89plyrTpA8fp527+9wctZ5g9xrdjWFduywDHw3e98hDkl/PZ3XyMFoSxacm+f0yt49V/g0HrPxnFsVc9eCU2CNpnXYwNwLQDOOcv1dpUlZigFsa3o7rnvg3OtwBCi9MnoUlUUhRsyZGM97Jhm5Oy2XSe6muA5HViMiqLBrqBKRZNrSXZaxesbii8N8dFK3NPTEwBFt1joL6VkoGZ4TXKdI6zbhnXdFKHY1dGMsm2BhzzXAmML9sey7+25SR+ZRy5JqVm+JdovnTvXfc/dOhL52Y0Akq1drlKFZUZ0HtN4QFYH/urVfVPg/PJ3X+Kj734k8ynBSNuKmpPMQIIWyCq187Bj9dRIQGwWaaLoWLq1uBQdMCq0XOkvlSJAHAIGFVxh8jgdD7g7HRo13BGk/y5vWNZLQ3dtrmROGUaHL0WcMXOFg/TX9ffT+nikQuylCBAjyAdRgMwAaw+CAyGOE77z0ffw6U/+HT795Kf4R3/5X4DiAfAHPJ3fIcYDtvyIbd7gIawOKDKeKwFOUNSiY0PM7yj+r/fimjXAatdtHmWhinm+gAgyv2w44vn9o+x5Zrx/9w7f+egtjqcTyHmsOcO5AFLxkFozzucziAh3d3dgZszzgnWR4sL9/YPU65LQw6SnaafECY1NEj0Rf5E+NuYs4xLWWZEPYVk4FxpSeNGG/tPpiPFwQswZ83wBasU4RGCQgeBrFlZIDBGIkOG0mREHAnTgMoFgs+TWbcHT+2cQHO7u7xCnCeu2YVtnMAqGwSPGA8gxghdEf5iOwv6IAZcFyFuBD7qvsFOZe9tj1K9cso4UgaBGNcNVr5RQZYhUsaO5ZKxKZa+hIrjQfDqR9LflkuFcBDmHOIi9zJC5ggRCqqx9cFUHCFcQKrgkRIy4q4z7yjhwQVZhrVQy2A0IQ0TNAMqKlBNAsu6PxyNs7Ij3e08xwRKEglx24SZLvo7H4we28zZ2smTXErLme2sV0a++SN19Vh/whxBQs7ADcskdFd+OWRraWaqI70CZIK5yQ7NikOIOoYBzlnut1MeGKF2pY4o4hFA4g66za2VL8/Xt/Flmb9a6KyD3iOAtQkWW7Juvwh74s/ZUS68VN6TIXreUweYDIL1bpbPDoY0FEpSoKEUyENqIK4g9MaEAACAASURBVIsVyDk4S9jUF5dSULQFpafdvwRmSExCfX1Q/bo5cv1HdUl6US77TPOZ/Xrq1863Jc6AxSUShxAxUlqxrgvAeo2wNREh6gyaSOek90dVYUmYOjLfuaLyzpqRfMMuyFSx2xXjhSjjKpaxc44xwqkiecob5vksbJ4Xco/+9fdK2lh4MD8motcA/k8Af/73+btvexHRvwDwLwBBW/qHe50I7VVUtC/C9Z0yGuXNxXY36raqXTkjZ+mbKZsMobZj31z7B+fUb6CX3rcjMdeqmC9VCPrAvy1I2o9h9Mh2nMooJJLwgjComIEXtrYnCQaEmoIuYdhvoMxT2pOxWmUKvUjPynut76+LHuxXLSDvr7mH+ltSgopSknCAXUUuUl30QSF3dljmTarRFRjjiLdv3mI6jpiXi3CIAQxThHNCp9tyQkCEp4BpnHA8HHE5L0g5wyZdyLMSdS2rPMWoc2dKQSkerI5oTglz2kCFMJEMKhYEQlCEnLPcYxdak3NW2o8lr/0zvF0TvXPu783tugEkkJ+0x0d69gYgAh999BE4RJRKKPlL5CKy5+iqXbd9TFYtI2iPY0ddMyqZJRnWA1cLkLfckg5yeyN3Xz+S73NbVgS0eXwWKMv6ZxTIUGS5R3vDcjX6mXNKI4ZWuW7vy/7a76+seyahRUnfCmuhosI56wmo2tTLoMZ+/BAhXtcVy7JgnmfM8wXTNGKIURxgYZSURFVSE4mSJanmek2vsaBGBFtiS176Jn7Zb9e2hohUwS+I4y1C1whhp7reBkVGEbR5lrvN2Z14E1nyHpozNQQakGBonCbUWnG5zPAh4ng64e7uHp998QvknPH61SuQXsN8viCtCwIVFCSxwS5ere1belDbH84BKnnunSGdDHBRSp0KjWgRSRKwDOKKnFbMF0lcuFadNZixpa3NEDJ0mJk1gFPBGwpgFiTBqy2w3j8CYRhGSexUMMb2dc4bUBLyVvD0/mtwOOHOn8Au4u13voMvv7jHz37yE3z8Jz+GP7zGw9vv4YvHR0H2wwB2Dsu2wiutmCtkvTJAXmYp1i0Lsqr+jJiVWgjUzNg47cEfkfTA6bDjLWWh0gxHjNMJNRdcHjO4JsQYcTmfARDCOGJNGeenM4aBcJykd+L5+UmVTktTbX2/PGOeVwzDCO+zStUT5nlGCLH1aMYYcb48y9+GEVQLximCUZDmDTF61KoojaIGzAUUBsTxBN42VA5YliyUSkTkKvspBKHKH6cJhSJySoIuVbEpOamQDmRES64ZzhMO4wi+q9i2rOsnY4weXHWWG1UQy+w0Hzy4Ot3nB6SSUKsMD99W6bfiUsA1tV4ru24JzOtVcYZZKPxwBMekSPvcaJ3jOCKYEi6TUBmdUF0doaMUMkIQwZFSGMF75Cr9sqXKmBpBgBKCc8jbhsv8CJ5n3N1dkNMF5zFg2RI4zSDyYJb+W15XBMcYQsBStiuUTa4BjdIuQaupGXdokiYePaoWgtD5e9TtFjkxu2A+soLhfWyfab6qp1kLNZxQWe4Tc4VqsWpMuI9VEf8h9PV6Qy80xDPnDYGAITh4mzPXxglU5Jwa4mhBj1D5d1/RF6PMLjcaPBGGIQIInf29ZntdJTps/kn6pMx2sxbV7N6L2bxWnjZWSP+5ZiOsKCcVdoCq/W2XMDCjqlZB1cSwSAapxWxS1oqc63Q6tWdlPu42ia1VaINVC2DXftZ1sWPVuYJSCO7jpn0/fejv+ntqz+Ol5M27KPehFGHWsjHQlLqboC0Io8xUqyw0ZG1PGVRojXXsAsGjoghNuzuOFQ5srbTn+y35Vh93SA950CKirOmc5ZxNUO+bXv+/1COZ+R0R/d8A/imA10QUWNC2HwH4XN/2OYA/BPArIgoAXkEESW4/618C+JcAcDicuDnxl/NUcbgKE1uzqX7QzcboNkrVZnvsm8U5h8q5m7PUHadLPvoHdFsBAq7pP9cbcq94WyJ5m5h9w71tm885J6qFuZkpaFlGK2JOmyeLNlzuML8jkt+VClaqBro7y8zaP6DDe50OG9QEiRzJnDhIIFFtY8vFf5Bs2mf7buPt10goKcEFh5SBd+/fq7MSIxLDhBCcJJvO4/Xr13AOeHx8rxXKDUxGBZFjL8sCXz1mv+BwOGiSK9X6tnK0Uu103RAZZaCiMLDkBC5AASFxRYXT4ZdStS1VFCPhBGkDGa2y6IcbenN9vbfPl3Atnfttr91gifNcthXzOsIxYSnS6/CD738fy7zi6fksQ3mbamC4Uum6qsoRAbRLz9fKKhMsDsEQGXEaAdGQB004nVbESr8+PcFLqe7F9Rzi0ERShOpqBQO05Mz2jw/+qrfC+k1NLKM5JOz70HsPhgTATumRQGc/yLUEpWowvBvZ6z1ZSsGyLBrEVBwOB6XSyvNNKaFqUFG5ypBbDV5ijKhcZL8QIZek17M34lu13lB3SZL3WY323C2RKGVTRETnzSly2L/fXt4HOK1f3dpPS+KcU5nukttzBu/FHEe7MMfDwwOGcQSD8fmvfoWPPnqLMUZsmxa3NHgqNcFRhY8RVdUJb4tWt5XZUgu41IaMV6EG6GBeB0dKQ82iaEfe68gSQlpX5G1B8AE5yaDmYYgYYoTXCuqgKDKR0FFtjAT0jjjvtZ9ZihjbmlBqUbVRVeXNgqAUMAoqqP5/zL3LkixZdp73rX1x94jIzHNOnepqEAJAE0TSKMlEowbiG+gNJDPN9DJ6DQ5kmsg0k0xjaaiZZCJBEGiSYAMEwK6uy7lkZkS475sGa213zzhZaMhkMuswO5VZkXHxy76s9a9//X8xUYjEzEC4+4owHjmdjrx588B3f/1L/vzPf8kf/mf/REVKHr7i8cP3TKc3tFrIabbx0ag5QwiUqoFzzhroay+SJthqxKzn3HuMaCBRBTNUztyCUVF7ivP5mVOcOJ2OSM0sz4+UlPESeX4+E2tlOh5xwVPKlUYjRE3Ol2Xm0+NnpmFiGCfGcVS0X+CaFnxDwaph4DBNa7AeB6X4DeOorQp5IaVqvmhv8CGQjVJMU98z5xo5LVQGVS5FuFwXHMIY1c4hGUjmfWA6HDhf1atsmEaoVf0BS7L5uY3fWhvDOHJ357lc5nUPr6ip+hDv1vUKIrU4mgRaqaS2MKdZASQfcC4CjuucaFmrgohaBahnWyCGkVwqNVfECdPxQEmelBO5VGKIDEPUZAn111SVRk+aFSzu0V23LXFeewlTzjivcz6XQuvG3M6EIVqjJgNVrxc+fPctn9JMGiLfDwFCgMPEdTog071Wjp2jZkFyojlVzyymdtjXsRjDtjaXvr/1KjAW0GolsCcq85yoJTEOleCjViTqxrLQPa+oYNiy4LznME1aee8m11YtE0s4EBDrzy05U5uzMaFxm0gXa4q0pnO6CJbg1pWGrxcXFdDCaduH9dw1VHxJ6kYlTClrEM0mlqFrWCRoAZJm3oq1VYtFt3Xedxp7q2ucpJU860E3P8mXvlzb3q+AkcUKmCgGXfTkZaJy2//Xf25xTvcKVND8RVxiG0CxNppmz9XW1j3ex2i5qlayuzF4X9P3zJKepAMqWreLEffH15/fKmt1nQP7GPo1pct97L3fU14rsNRdXBZCwFtV+Hx+Yp5n7d0PzixzNrVX+wRUAyIgXtePar1/2rLw8rj0OsgKiq9xV90fk4LX/W3Oe2KIhCHgvLIKSs0gDR+cVfj/PyZtIvIzIFnCdgD+S1Rc5H8H/itUQfK/Bf5ne8v/Yv//f9jf/7f2U5nKeqmanalylKl94NtZ9z9XQdrukNfsWtZgRENw9eQotaP+LxGPniErxapSeYmC3wZIe5RIaV1aZdAB9NKXQh+KdIgJFew/Z9+gq6fwUuEN+5RgQcu+ebXTF3Rio1LMQHXKRRZMshpdZDW50IfbTaZaqvKUvcc5cLUZbdEELsTb4qc0wEpb+41KrS8sCLTwKaS6u4aGdPkmBOdJuXDNF8bDRBwHzsuV6/nKYUw8HI5E74mjJ3NlPs98/PSRWhvjQY1bL5eLGqEKzNcrrgbGWHBxQMzHR6h4hKwKznq+SRdfP0Xr18jMJu1aawVxugl3/rYoylpaU7SjNvN5q9SaNFj1FR9QifEeOBgiGH3Ei6wbn1hwUXHah+hUKhsRUmnEXiW17+ty5Nc58XxZuLuH6ByuBJwURu/5w//wD/jlv/t3/Oq77xGvMu+tVVJJWmXyYguwSsoGpz0vzkeGwa80CJyjmoHnlsDp3V4r1IaE4UV1PPam77WuZqV93qi0fffLa3jfK7a62Wqj85bgOq9qoE1Mdc0JHr9+bmsbzabPP2fghQDS54VXsZlWtbJHN1WuivI711QYgpf9rzlnzufzWkEopajUNpqASRMGH6giZipeVdGyFIZxWCuEAszLFUcjiKOkhd6zsQqwBA3qyq7/r1al1KhSZDPk3TEEpeT09aGZepqIWyWVh2heZj3QEQ1gtJJWd31QKq3uaoOqa0gGoh8ZDwdciMxLxvvINB1wwK+//Usuz4/83W/+QPtZa6aVREPV8gKhk6hwsVdJN3NvEVmrkD3oFRoSBJxVXl8AZPqvN/2LNEpVWtqSFqbDqFRn7ximO1t3VKnMGbh0Oh3X4xhcJOWsDIqqiUAqFSeRJWcWk5Ff0Wuv96YHm1UUZ1DgyqnJ8eUj8/OvOI3f4Gvg9/7u7/Grb/+af/Ov/4i/9w//AXdx5Gfvf5enj1dwC/GQkfMj8/NnJqeKltRGcOCkMURPasXmrd5LnCPVLvXtqYsqFnoL3jBKu4gwGIW+lCuXp+/46s077o6RD2fBx6jG5FSyNPwYmI4D1+vCZZnpind4ZRZ8en7iTmA8HgijJpTzUjgejjRgPE3kpCImx9MdOM94GsmlsaSZssxM48AQRrxTkYm0ZPKcGcbIEAMpzQQXSLnbdQzgCk0aSRpuiqRSSangGpTnR1wtRCqkpp5KzrHM2vd0PB44TINWuJrnulQu15kqjcEHmhNN/AsgWlWO3nOZgVxVQt+AFq3SCzivwb9VuZs7aPzlG5a5UHGIBFJWVeOA18QswrIk8rIwTPcMQ2AYPPOSyKXhKoQw4MbA5TIzLwtxGMg4UvWkXInBMw5Re9xaIzSdP8uycG2ZECNxnGAYqEsizQmuF2Q+s8wNj1OQ8XTH8PsnLq5QypWhecIYKEF9FmVetO+niVLx28JlVj+sbg0yjgOuFlxNq4ARrRH8wDgdaA2OU6dbNz59emJZNrBE12uhVcfhcMc0bYF2bmcuy0UTH6/XryJqV6CGH0pzHAK+eUKsNAoKs1pEUhZbPyDPC9l5vFOPv9x0DxuC9tJGr59TU6JmjWdyq+S8ULL2IoZhNMsIoTYoWRkPwS+M0VvvvYHlVd/vMTshE1qSBtJMybKaXVPTz+xJaamZmpWY57yqW+Od9rNYdUtaQ6rZfjitxr1WLNjHkhoTyuq75k0Je1M67MC7JTX2nBid0eFwtZpo1eax6Zz2q/bvWPekNbFqa9KitPtdu5ElohovdEaEtqzUKi/2Ytiqlz0x7X3e/XHLJnqtp0+kqRgNlVL0b+PhCM5xdWeWtJCuus9Gt4Gd4gRXO2VVRUfo8U+nq6KdblqJqxYDYji+4OkVOt3vNG1RBke1fca5iB8OTIcjLSeag1YWXeed9s2G8DcD/H+bStvfAf57UcKtA/6n1tr/KiJ/DPyPIvLfAf8X8E/t9f8U+B9E5F8DPwL/zd/iO+xmGTosdiXaVp1fU6LWej1xfe/+5remqUa96VWTfgPAXN/Nh8N5qmia9xK1+DK73yhdWwn/lprWH3sUYF/luz3nPRLRn6ut4XoiB+b5ohNZcWB9rvdElILKBbe2Di7VYdgmazV0tsPt/XqKiMlSxzWZrYZYNOm1263a9ioI8Arq0c/JwmvEeYbpwHQ8EqcDrf2oapGl4BxcLs+kVohh4HA4crlcuV6TaaFEnNNFM95NjHHiMB5oNK7LhfEw0qgqL1/NZNEC9yEOKlPvnPbfNPCiyGa1ynmXfNWA0vqqXOdG9+bhTViiSwH3xU0pBoVqPQ2aoPWBu1VJpCccNwBBF6Do62qpleuSWRarpnUT6wb3dyfePNzz8fFJA4KsHHUxaqJec21sFunqkNqvsFZxd9W1DgLUqjZ19Pso2/jYKqq7RXtFtNYhQGttVWDdKof6olWZCpSaYGbhuN5rKevrvGw89z1t+MX164utiNpbFD3o0nTS9PmuyJYgTSnF3quB9MV8DW/XjqfPz6owKhCiXjtv0uDaf9mIQ6cEqsDO4+MTtVatVNjYd07Zq96uea5Gv2v9EstG23Visspb5f52PpVSVkpvn1etbVTlru7a1wCbgErlsv4x5wLNOVwVQhiIw8QwHPj0+Xt+9vUdMUTu7k782b/6Ew7TxDhMpnrY+0dM0t173bysiFpb1aQWVjS5NlXf8kHHQHCyVU4NSU8p23k6U3X11ofhaBRKLlYJGBjH0SoYg1b2va5/eV5IOXE+z5SSV4pX/6nfs6hFQxjX9bUfa7MbIuLVF1F0Yxa7foiAE7yHdH2i5HuCPzBMBx6++ppvv/trvv2rf8t/9Pf+EZenwnS44/n5B0KIHE4n3ZhLD6gtkTI6cr82tW70ptZ6D0sjDFFBnOt1nbfee2MPGOLeKnW58PGHxGE8cX9/x/n5TE6FMHiu88zy48L9mzuGGLjktPYl+RAYRs/zc+Xx6Yna1HvMO8dB7pjnjLeYsqTC4XhiOpyYl0xNmTBo1cQPJ3KtfH6aefvuhJcRygUfwUevtMHQQDykZKwZVQsWwapc6tXXclLYtVVczSp2RCbXQqpaBTiYofDnz4/E6BlHVTuNrTEvidrElPIaqQhU8K1p33NznA53jNNEqY3lolUg7xziHSXNxGGk1UBaKuPoCBFC9AouVUGKEMKBEDuo+Iw4ZRhEuz/LPKsgiLOev5Kp4qwH1nFd1EtOPIRhBKeempfrjHMzUxwUVEb7uHNVVeO8CI7AhPULt6YVNAHBEaqjZqWKi6gKJ7N+rkTPdLrDi/rfzfOslVcXGYdRQYNclFXSGt4JnoBaXimw0Cnw9KSgbSJKfUy2pmyOXunuGgQ9QA7DkZAcOZvnWi0Mo7fqmaxUNGlKg9Vev0qr2uubk9JKY4gIwrXOtm+rkE2InsM42BrUAbqCQ60zCmqJ5D2roFXfd9sKvlsBoanVRCnWbiBWMbNiQIwDTmyfK8UAxW4FoABksz25iYGbJSNVE+EiqmDeRd6kr00VXGsEYOjMHjaK6j7GeqnJsMUaahWzWTB0T9Pt0wzwtjXYh0DonoFr3GZJ5LarrN/ZHy/2UDaRlD019paJ1j/nNta+jbtfxE499mgvGW6637oX46+/tj+8DwzTgSZqN5Sy0mMR3Uu2fsl169ySNjqHpVsPabxUS6Vfrf5PqlNbhVYplmy7ZsA/KGvhcELEaT/2krVGhceJVQp/w+Nvox75z4D//JXn/wz4J688fwX+69/4zbtHDw5fBGf6YTcv3NQVb77zRfJz+8/eun2++Tz19xIjuW39GNvXvRwsgE2ElxzcTWnodXXI25LxTx3z/vm6uxb9+pRqSKupPDlLcNe+FgsyWod49lNth2joZ2jFRUQRyWbBbh+cyj3fJ2s7XvbNeewn2n6ilNaQVlffqSGq8GjwntPhiAMGH8mXmWvKSKrEmAghcrp70EpCVnrMMKjS2fv3X3E8Hnh+fuS7737N3cM943Hku+++Y/6ctPKA0RvW67tVXPV8erVDWN3nBajd6mBLspXzr5U0lVeXNZjb37NSK0IG7wluU6bay/X2a/RaY6336s/VTB5cRHjz8MDpdOCHDz+QWmUcBpZlYRz0esxzV75TlEYDereO0RACwQechPV7+2Pfg/hy0eSLx7YQblYQ3WB8j4ztX98/sxQ1ZO4P7w2JdTrmtNeqV9H0ejnX1cv0OvTm5O0YZZ3HANkoOU46cguNqpV5EWouUK2Zvlau16tRvQohRFRUQZO6WjJVNFGzEW5JwEIuefWx0/suPD+faa1xOBxssd421n6cpVaVk7fEsifP3c9NG53rev37e29Bnj0ddr9p7efgPmHev8aJqcA2pWhMR/XcOl8uNPT4H+7vmeeF77//wDc//4a70z3n65lSMnenE4dBBRCcXadGZckLOSVLbgPN6KKtZJpAtyJYAzERZQhUgaaWDNqPIuSsdw5UBW8cJ/WvCYOxjhwpZRNO8OSUaEXvZw8OXu2hMaSzlbQGj+t67o2yJB3B0U23VqXuiqhk/dKEDx8+MUx3PLwdGIcj33zzu/z44Tv+zb/6BX/3D/4BQxROp4HzRdfp6XhHTgvz45kCdJkHHSPFPOXcWn3dU/P3zIi9aNAtOOiaiphI06rX8fTAXXxD+fTI9XrRKldO5EviOE68/+o9nx8fmeeZh/sHSi3c3d2TloXLWasfYtSxcTjw/PhI8I7T4QFKpVWtxogBqyGOSNBe4fk687yYiM+g1LLqra9VhBgib+7vtjHbFg2MgmO+LCCe0+Fee0Yvj2ra4aP2JosnGKB2GEecNC7PAIXWFfu8g6iquh3ddy4wxIEgWrVNOSvLoANPomCxDx5ntOY0n83vMTBOB2JERW8uiVocwxAIYSJGQVzRymJa1CrDCa0kzpdHROB0umMcIksqhBjN90w4Hg8K6Jk/Z4yRmgtXs5qhNIaoa1wIHgmR3FQqnlYQA+KCD+AD0gwIkC0Qr81ERorggmfOlcePHzkYHTIOg6l3LjxftN1gGKIG9U2VF6UoM0WcVXvEUZvQ2Qy2KuMcDENY+8NSWqwvutN9teqlYlvaI45X9Vi8jvnrRfsAxXlbGyN51s8DNayPMTINgWWZmZd5XeOHOHBNxVgmyn4RUQl4Gtor6B0ta4XNWduEVliNmUEXs9qvD34FuxUgU3CtVFVIDjER4wCtEr1b2zHEiQJRPqzV8dagNqcoSFUVdGnNKm6sjCrbXtf4pcnLhO12fevrQmfN9GPv7JeuiqmgoO1q1fbwF0meUT5b2wpzoID0K4lQf7yWPPV1qx/r/rWbCvWXVcP9/rX3+9t/z+1et1879/HVbRGm2z3EGCkl45vWwuA2b2AXRxnddAWs9bo6aS/Gf48ZpWm8sSyJVJa1DaACx9OJh/sT3ns+f35kmS/QdtXCvjf9hsTt/1VP2/+fjxcBpE2S9Sf9BlVas4rSbvC8FnxqhWH/vv2F2G6Qdw6JvdF5e81tBaxvpt30dB9Q7b049o9bxPw20HotafviJ1sgR7NJhk7qHoj1xEOEVeXPseZtrx5HD+rWyewczvpT1qbLm3PZfKJ+4r69ci5SNWh1RfnmKhWuZp2tdHVMq6iAJSyeMY74EElifR9uYIhH7u/e4ULlzt2RSmJerszzlePlzGWeSXnWwAhRXxq7iv0cgw2vvsB15OdLfEDfk9JGJ+pJkTfzxtfud62V5tpaHe2T+bX73RetXp1yUgiiaE+wHoJxGPj5Nz8DDz98+pHn5cI4jlu/jrxEnGBTZVpVu+q2iO/H5e2Y1SDnS7XTl6/bknjZURz2i/Z+00gpreqVIp3il+mUJWA1Q22ymb/3MfpFpe2Le4RZYPQNNwOb4AkoXYJaKDlzuVw0yK+9v0qpxdXmsPfOFLdenn8InmFUfnw3DK1Vfz8cDisIoBuN9jF0n8ZaGnVtwu1mtI4QHYj2vLhddbAv3vv7ddtDm3NmD5/sr/lroJVD14ZctPfocDzig+fz0yNv377lcJy4u7/jX/zzP6JUOJ0e1v5ZL0IYIklMfS2nVUafkilpQRrE4M3I3tGqI7gNBEgmf+6d9gHoJqqBZ+fwa98qDINQ65bo773/Ukprz1ophcGHVeGvj/me7O/HEKICC31LqVVRzdqq9v+uyXJFqtKBgyUmS87k0qjN8fnzE4fjO8LgeXj7Ne/evuMv/+KX/Oqv/4Lf/b2/z9s3Rz49RtJ8RXwgjhPzeSanTMt6n50JktTS158tCOv3a19pXmmm9nhB1wdaUpphRcDDNB14nq+EllnmxBQiNRUeP37m7s0dx/HI41J4fjybVUUj+gE3eT5/fGScJobxpOvwMHG9XriwMI4jNVdiUOP182VmaiN4hwsBAvz6wye8c+qT5jQorjUby6Ey4GzuKH1bnKgX5GDCT3OyPm1PbtUARdvnxNGqmt6PMTANqoRZilZlxauNAM7ZnuhIrbGkikS1EABHroXPT0+ICMksGUSKUcrgfD4jRJyfGMOI95qUeqNMOTzRDwYEqfjB0jJOBsYhUpYrIcy0ltf7WWpiOadV+ENCoJRmFatISoUwKshWsgqNNKPFbnt+hWIUOuXxo55rgda0/7F1yxujK3vvIVdSLvhx5M14Qmy9W4wi7JxWphqV2ZQCh2kkjpOqMJp9Sxyi2dI4q9Z0mprJz7vGGBVQWpaFXBZq2mhuwxDpvV3BB4KH6OPaAlRr5bokVSKtiVoGpEWa9ZOnlpnnSvC6w3vvYefnp36WQiqZ+Zp0fQmRmrMa0+eqyRL9mun/57xQOlgrbl1Xxas3bHNWSWQTyZAGrRRyrUrTF2i593xbMt4KQdoOoOxxiFbhEHQO3O5trTOhbA/bxXD7/fl2veiVte1jFKjei0PdbKHrmryuJ9IrSrsEafcd+0rWbfIEu2Rvd6x7MKr3du9VrG97zV+AUrb23cYqtwnaPunrn/Wit78z63wX9wqQE13lta/DOfdWJeiG7r1a2l8DGIi4v5g9FhGab8q0Mu9JRIHWGCIpLVyuV1K66p7PrgBQNxbI3/T4rUnaYEcf7EEarJtpTxrcKxH2PgDuN6vubn5rL1/HftBZ4lZuRvM+QOqf02ls+36b16pre0TgNtu/Rcd/6p/r720vg+aesK1ovO/B8Jbtg+EHN8hEP8bXUHxdyFTetNMWV3okL5NY98rn3QbU++uRS15pCq4oBcE7x3VetI8FaK1Q/rhxyAAAIABJREFUbJNblgYOojUFl6JVwc+fPzHPM3ECHFyuF1Ka8V6YTkeG52fOl0WRQLSaqKpfqhzpnBjtsSHSJ+o2GVv7UohGq3NYJSxSykBOxegRu2vSES9b2FyvdOzoALfJzW1vY0oZfONyufDh4we+/fW3fPwYuXs48u79W+7vTtyHe1wM/Or7H9f3d3n5fYWl/9Pv3L5j/337XsrbxXL/uE2aWmv4ENbNZL+Y72V8+3wLbgCrMJSiSluNXiHWZD33a2IU9n4f0hpYbOpiOlot+GXbWGqtKiXfqYJ0vz6lNi7LvKq+9c/p6pnr5xv6lkuB0s+/WOVNQNQeIaXEMAxM06S9iFbJVTqhXsd5XuzaaFVJDdRNijl6oCtF1vXzO/J5u4b06tJrSXV/7G0f9uNNRKjitM/Me+0tHQaez1dCCLx//577+3u8F3793a95++5r3r59b72vlshbJarVYn2Zbe09HMdRaaBg/Omi/ZqmEumCIs9KO1a0t2SlvoUw2Xzc/LVU/l+Y54XL5co0TSa0Y+IWuFXMaF5UHU8EotOA0vlIlwttRrXTob4FVc4CsIpWyZ3IKlwzhGBiTHoNow84H8kiXM5XllRw14y4yP39G77993/Bn/7Lf8Hv/8Ef8tX7e378dOJjuSC1MUwn4iEx12ebKz058zbuNlS6J5mtKcDV15bbv6/PN+1dcU5YaqHkxKfPHxlLZrpTM+qEkK4LQQTwfPr0yLt3b3n37j0fP37kel3MiFp94JzzXM5XQpwIIhwOIzUnLpcztWSmw5HBgtDDNHCdL/ghMMaIiIoK5bzw+PSo/nrTYHuYmrtHUXXH3u+6pITPwmE8kAs8PZ/X+9E9jLCeR6XE6jyqRWl8wQnRe6pvZm+gzx/HgelwQJwKtqTW1Ch4GqFkLucL1Mo4RYIPDEOklMLT0xO1qPXMMAR8EGqbtcfVNaZ4MLNzHTfzrJX2cThqUG8V4XEcWWYouRAHtfNQj76CmAJoykr3ozNbSrcJcFbRQoU60H63GAKpQLN2iCoe0HtRxVFFKL0SZsDrMEa8d1zOKuLSijAgxGEgmqJlKYVidOS+lwjWY4o3L6+Gb6I9UCjFrpqvmNIPEyllsymJBr54nBtWkEk934w2iPoPBh9wMay0PS9KqRUTg1K/RfX5Cz7gnK7nOamyr3cq2hPigJkgsOS0WqocwgGckHIlmV9X9M5Ejto6j6jFKHNdfMwZSGAMiaa7VGttVe7u/bu6z1dSTgrKVI1XalWfPXHeet907cF5nGjoXdlRo+tGjlt34l4Z28UNt8D4Btg1OiCGHXJre2B3r+/QE7j9+7uVVo+LLLbphyJbP/v+8RL8fwky7tew/thTaW+Tsv2xdIbBbczUP/+n4s79ce1ZL3uKeWvqvVYqtj/ruFxDoKZskEqD0hDfQeAd++zmOtiVQ1sJwIVANABZrWUq5/PzCpQ472g3n/E37e/98VuVtPXHGjy+ckM0od+SM9iSuheZ+k3W/uLmykY5XJv3+bJqsSZ57AfmTSVpF8i+Vgl4GTx/iSTcJj3r4F2P+SV6YZGy8cr1/9dx1rnj1f7uRGVPxbjJ0lX4dotC/63VtRrQB46KHNxW0ozr/cV9+bJKWe14nFNELC0LaVl2AbgeTJNGlUxOix5NE67pmXGcGMcTQTwihXm58unzj+AbpfUNxhtCeGVJiwZetqgq88FZhVA3GYr25+ghKhrbF6Ym+8ZeXeBCCBZo6mQvpTD7jMiygQH0xEf/78Wi8gqtaf/3/UIkogFtl/w9TBPjFPn44wd+/PF7CMLx/kheFtLc5cC90U/H3X3aI0JC52bvwYf9WN0Wwi/R/s6FV68ltz7fN5N+XfaL8C1VAYTcN9jQVca0wbpl8z9xKrHbYP2c/dzbH6ciyTbWMS9Eq550pas+DmottFpIi5qEe+8J3prFc7IqjSiKbM32vV9VX+/Wc9DgQ8dOjAMxDDbWVO3x+flsx64o7rZhREWfjBoZo/pW5Zzs2opRC1n9zG7HTFcI3SOLpb6cc/1fH8P7ao2uAZ5hnDielKJ2Pj/z8PDA4TBxdzqtG8rbr98RYuRyflSVLadKiCUt1JIp5j3WAbDgNSjp9wQ236Mt4a8rYq4Ji4q1hCjab+a9Vks6qGKVhrX3UfWbQbxWzWCVQffmqdfY1tls93oFCLxKvyPgTb59yYnLVc3q995GinpqwN3oAGJFmmM243UfCz44vnr3NcfpyF/95V/w8dP3vP/Z73D/cMfnp8+UpRKGiel4pKQFlmp9fX2O97HVg9qsyW0tVj3e1Gf777VtrASd39oHE53j2irz9UwVeHjzjoeHBz4uP9JCIIpnTrOew3Xh4eGBd2/e8eHjB65GjSul8PbNO56enpgvT4Q7TUSmQ8D5Qa/V3DhMBwv61Yh5yTPzWY//MASy86ryiyd0YaMGNSfOJTHEwHGaNDAUVUy9zta7ePCkBetDjtBUfCKlRE4zg3luOYSUmhnQK20uN7XnwId1be/rYl4WLumKeFl7iFopODeuFXFdw1Q06XQYTSWzcZ0XarnQmiOGEejWKZXrdQEcp8MbghNoWcemAQnjOCCWWHrneD5fyKWogpz3irznC8fDHSK6Z0prpCXjh1F9ISUjRu0NogU2EaU8NudITYPCIh6p6jOYiiL8OWdqVluV0as1UE51Ff3x3jEdDjofiyq8StCeNNccOduUQ5MkcZVVpAGIUQVAGtOuj1TPXQHYmRgjx+PBKO+FtBSl7OZMGxqncSSOowFBqjaqfb6NelDT62W5mi/jzDQNDONgiYVSpi9LIg6DgXQKZrRSScti/Y2ahLnQ51xZK1nBOwbvmIZIRchN1xtxniqNVrNR3HUv1NjK1DT7vtrayhTYg8Pq8VmUjug6Jb6te7LOYTNp112m80PWqultYP9anKmVq7Ym0Z0Nsqlg9n3Z9oZs1hZNj1F6jNiPoMcybHvpazHufv9BtMD0U+ybfcvDfo/qr9nH2/u/3cbl/bm+v/Zzu63Y7T+3v2f/WT5GpJr6sVVa9bUdKLJ83ixLeizcC0g96dWfG8jf23BWQT4TLMvZ1IBFFHlZTdX1/SJK6/1Nj9+qpK31ZMR+tx3rJqPeKgfr63iZ/HTkXVq/6S+/p+fMPbgrNzLpt5+9//xaC/Ocv3jta9Ws299fe+1PVjf2733x9i340z4gnWj6//Ze4yo36aV4sU1/j8i8PNaOKrd2c017ibttdMK/6Tq9+B2UL27fuczLulh1zyRnq78imp0n7Ey4YqZeC7nO1nskNAqtWLm69X6jmafnJy6XiyHUxqW3Q9EmYq2SqOWgIR2GzK8I0w4x6pe61KTqkKuMsVI7ssuUrjbXegV4ox+8VuG8XaheHSei8ekQA2/fvWUaItPgEQefnj9zPV/4fH420YDDF8ibfndPrm3stC0J2yNO+/ulC2v3uNkSvBCsEXyXnDnnSKXsNv4dbWR3Pv17SmlQqnkKBrsHmEJp3SUynlzLasj6U1Q/YFUzRdaRpk356w2AXArLov0mtRSrzllSbZYJ0b4rWlLUGhqUe2fJ8GDnkK1Sl22TNIS7qDx8a4lOuc1ZzYi1R0F00UeFJ9RkGFKaNYkAanX2uW4VGOr3pl/TfcK2T/77c93qYd8H8CV6qdLocRp4fr6qmfbbN3qeY+SP/vm/JsbAw8ODBkpJJc8dxTYcDcZCsCS0VzwKFgyxzqfaWKk/JRfmdEVNjrUi7HwjRK+2Hrbm5LIQJDJMRws8KsM46oZsY7pUFegZBqVC4VQltPWkxprAc1GE24vYe5Uy3azi21DT82TeZ5tHoN5P3w1v0QSr5oVGI0vj/PTIeLgHEe7v3vDw8BX//ttf8W///N/w/nd+ztt3b/nw8TOPc6a1gg/q26eA1QZwYBXGYj1ZtRVqVQBCCQAqwy2igjo9GZWqMu3qfafBmYp0VlyD69OjUpIlcrq/41zPXM4XlflPVz59+AQVpmni7cNbnp6eeH5Uf8I0J4agSr3X+TPeBYZhYpwCpQWggBRoiRA9d3cDl7P1AHoo6YprhYfDwHRQYZ7r9UpJiegd42HEB1WUXdKi5yO6Ns2pkIsWaxuBWrUqrsmQGpULAlXtJryfWNLMfL0qpU280jSd53w5U56fVI1RBB8cOUFJiRAmjuPEpV54sorgYD1U43hgnq/UVsjlypIS1+WJkmdEPLWqKJZS8mZKuVAKtOMbWoOSEx4FKA7TyDgOCiaWQie9lVJpS7c2OGqg3VRKXm0gPGHwSFCaWs2ZlhcMfTBpesHHgcObt7RDJLiF6AND8fjjHU/VgC2pBFStsDRlrTgzJ+9AQMr6ncfpsAJ3NWeaeMRFxmlCaLoetEaar0arBWfrWbG+qU4V3zMa9vTlGCPBRYrZO5RaWOZZ1xaHechq73tKBTGz6hgC3mnyiGDMBhiGyBQHclZfR+1zbQxRWRDd67WvgcWUs71rUJXq3XLCOxiGaRUSy4j2mopWTFyD5nTOOtG1LZekVXhbY4rNaT1e61cWUZ+4kkjXwnVN3FRYzTuVee9g467QQ1+AttjzpQhJT4T6ftuTjL73dhB/ize2Ko53ahdTa1l902res8ZYk4muhLxnfvTv3P8EKPJlHJ12Pe37Pel2L9vHjntWSf/O2+rdbdVvD1rue7r316qfg+teob0H1GF7lV0rJ6g+cjO6aFex7LHMPjbpIL9VKJvaBUjtgGy0GFjf72VV1Fiv9WuFm596/HYkbfuAEzZfCP3j7mUvK0G3N/rFo708+dsbWNumeFPqZhr4my7Y7WtuKxN7pPvWaHZPifupwdsTKKSrg9Fnj53XlsS6XtWx5BQbQHS+rezf9jKA+8kKX/vy9dtr7P9vKrg/hb5sL9gOZUVtdtQrpSx5QgxroNtsIuS8sOQZTVS9mfBqn8OChljJqG8UmxCi0s1xF/hD53u79bOc0Xw64tRRKUXMdMPUzcdZUOKhdWVKoZt40np1z7847ZeVhr+5KtuvTamZ5nTBv7+74/444b5+y7uv3vLDpx/44cOPuO++51cfntRYYjd2XgMetMdoG3/7CtgtGqWy69vn9OrOa/RHRS9fcsZvz68vnKlksGbrtd8Ip70JwdOFU5wTMH59TwR7IrK/pv1+0nqi2/rgNNl8AyFKIs1XpSTJS5Stn496U22bgqK6agKvgUlPigoplVVIRcd9W+kdKfWeIz22LWlVCpO4yDQNNDKlLpSSVjNt9ULVbH1vrLlHIV/bnPp93SOOryVt3YhcvGM6HqkVzpcrh9OJ0+nEw8MDwXs+ffzA+/dfMR0OXM7PtFaITrRXrQ1qbF0UcbcRBy6oeAs2Zxqq/pcVme7VaRX20LHY19AYvVpdRL8CDZtpbVuVK/f9mbV2QMKv1caUyhoQOmeV2ia4EFWwoWnimLNWeUqFfE2kZKqFzltAriaqlWYKpJpy08pqYeBxLNdnpdDFAZrn/Ve/w69//JFf/OIX/L1/+J9w//CGr9//jMvjhbycdayHQBZBmrd50ZFw1rmG6Bgcx3GN2vbrR0eq9/MWAwQcTY0YGoQmzE9n7k5vmMaJ+NWIiwNPnz8RgxBC5PHxiU7jaw2zv6icTifGccI5oTw/syyL0mDDwDiqEm/Jhc+fPjKOAZpoEN40uUiXM9NxInohz2cQ9dmrpTJM94TgEA/X64WUFlJZEOc1ecmVXFSsqNSs83lJzLUQveM4Raia3Jea9d6FyDDqTMh1NyeKmhZnSxa9wBgDKSnNL4RAGyrXudm8Hhh8ZBg9ucCnz89MU8b7xjB4vB+Yr4uCeM5znRWEUAugQq2zUZATc54pdUGILMu8sj3yklCRr4HgB3yMtFRYSsIZbTgEpwrwTvDRawtGSni0OufjiLgBmoPgefjZe8b4NQ9vJt4MEycZWYDlw/d8KAtLWqwPTvuFvQRCnKilmldtAyrFd/aRVZutwlJqxotXKX8/mmqjY56vnM8z4zBwd3fCDZ60A7/V+y1qH6TN4f6I02DaABrQp5ypTdfez0/mBRgD3kdGN+BNyMz7yHQYQITZbDtKqaRUSXlBWuU4jmvVv6YF77yCA0uy4mtvkakM3hN0CtNqYVkuVITmAs1HfBisv7Kt2KeY4qImOYVCVsGRpuyeXCrzfKGdL6oUOmilNS3FvDI1pghDsVhFE7fBb3RJROMfOvDcweFXKIf755pVx17re9vebzGqgbP9ffu/9b1tqwZue/vL/eflQ2RL4vePWzC377W3MdFrQPd+/ds/3x/7dpDbfvrbpHK/bjrnzXe7rXuuiKhIDPvv62yH3fcrqX63z764CiqkMzhiGNb70RoEVxCpL+7Hi2ta2+2Hvfr47Uja7LFm8j/x/GtVnv73fWDZy5fb46ZHBNakri9U6ytfCaZvB9ZrlZLb43n9+P8W596aoU09e+pob/9/m4Tb/rTSubqEq5bTRXm4zgrejbXs3TP7VcDEPhejAvWeNa0Yv+zv6kHp7fl/cc7230Z74RJvIa0GgCg6JU6Q3JWPwDndMGZT0iqdZindKDsa0lkR2toHRa1IU/pb9FvC8doi1qsTfTI6F9ZzVTU55SYr8siqjldBee97MGGXDH1xn5sGuK9Vt75ImKUoj7pWLpcznx8/0cxYPA7qjfX2zRueTeq/rEIGHRmT3VjuCao2F99SDXtlZj8vSq98rf1bvaevrol6o6qnnyWo+3N5GUyyCzDNtLwosumxRMBrdcbZdaq1EuKwUsX2/YWvXbf9xlVrAaMqLrOaZZesDffOVMz264TI1pPQj/VyuVBrXimntdbVdFsNXrV/q1P/NqCnH4tWENbrWgVxqg42jAMhelJeoFU10nRAFVQ2yGv0QK92vkz49xtdvxbi3Hofh2H4Al1cKS3OIzFyPN0zjhNPlysNuL+/ZxgH3r57wx//s//bzEdHQJPfWjKtZZZrYrleKCWrAXU1FTvR19HsOaM/TeOogbdVFTTom7QS19THTvtZhBhGq2I7Yoh2Da1vywW86yCYIsk+RJy3KlNrlJxUOAkNxmlq24F4alPVV+1rK0rRDeC66qnzuKZId+81qVkrntrbUvGtj82+4VbS8sxyeWKcDlzmwsO7b/jq6x/41Xe/4s9/+ef8o3/8X/D27Xu+nz7w+XrFoWMth0he0irsgPR1QYELrbQZOGIKobWa35FTkCKO1iNUCy2jwIeBYtKEaJjwkgt5njk/PXO4f4M/HDjUgisq5OKcMF+u5CXx1bt3LMcTnz9/1sSiNUQWhjghrnA5L/hQGcaJWmVtwn96PBO853AYAO1lCoP1rwo4H0x4xquFy5wQf9Bj9RNDGFmenrheZ3wQnB/UugYh5wV1FEwqSiFasR+HgZS1CptItOpWm5c5LyxLJgyDVpSjJy9Xak6A7jOuVebLmRK0Uqum0Nq39HyerVriicMRWmO+XpBFe9/uTvcqjIGKXwhwnI7EoVDrovtZXkjLFS8oGt+2ntSUK+N0RxxOSAjUCjk9c73MTIeRcQi4OJLSTEoXaqe0eU9LV3JuuBDBYZX8QhwHwmliGT1pnAjTg+7lz59xpTGFQFwS+Xwhu4qbTrZm6B6hPfqBvMwrI8VJM8GUSMuVy3yBq64pwXs9Tuc5HI54caSlUIwO3q0/9PjaSv3Wio3eN+pMTottUyYGV4Xq4O54t6kLlooLOqfTsrCIVvqzrcfVFBBL0eS8piuUjBenxzkdtP9fIDghtUpwHhe9UhabSu8rI0v9fXOuLGTcAD6OBOcNTND10IlT25NxpAUVjelWMt5FYmzEMChQaeufVJ3j6urTY0Gj0JVMrk0BHVAGkXkMdnVk1zbq4Gt96FtRQMGv3geOUe72kv89MiulruCzIW0aGzq0L79tLThyU2x4jfGhx/HyuG5fcwuc3sZEryVu+8dt0eY2UduDW3uBk33yt6/glaaFgUpdr5cTrbLtA2zB0domzoITRFRIR59T4EwLLN0fNeJcoKRKTlkZYM4hooDB/tjWa9bB59/w+O1I2vYJVeviAXbz6n5gsP5++/OL3y3H2wc8PejZzAA12nWyKXS9ltnffsd+0Oz/fvva16hre25wv2G379+/p1cN1r9hQcb62bvj6IlDp6uh2fsXaAZidMcNkY/bR6xBaE+T95WJrf/q9UrdF7f2xbHvin+7yVsreByOkSbqnUfV5uZlLqyZdS9hp6QLs3PqtYFVWZSjoOqNTfnBuVW6DK4e45bU63n6dTK3pv0ZpTS8b6upZV9kMYuAGAIpBKVv7O7Pa+Ph5d27uTa7MaX3ejOPvl6vfPr0GXIi5YWPn34kt8Lx4cTlfFkX5t4bqP0xfzOo8TLJeeln0hf1zbdFn+/0kj6hlDOfaLiV6tVfezvWe2LjQlw/Q0RUUr1V9kwKHa8CUgyA0D4xYP3//aKvPT927Kv6Y6WhAdIyLwiqaFgsQe8VH+dkNYHdEteustXoKlxaedu80brUfv+svl5sn7HJLNM2CrOanDZyXsw2QoVHvPPWR+kRNLh1aACY816yvn6xOd2uaXvrkf3c1PXAMR4mHh4eqLVyfn7mcLrjdDry8HBHrYU/+7M/43Q6cThOfPz0kWW+cH7+iNSFfD2rwXZr0DwNE4gwX03vAw1HLqpo2SQgPuLQXg3EqTJWaVYt175YYQTiCip1MRABnFdj+b7m9HGr579tdqrEqeNrFcBpL+dCT2aD+TV651cvKt1vKiFGBteVTUWPrxV807USVL47N0+umeenjxxP93g5EIcjD2/f81e//hV//Cd/wn/8n/5j3jy84f1XX3N5/ETNV638m6x7rX2f4cW4aei5LikRGis4oHNl6xkV2QR6Go7gwSP4Br72Hj7h6fGJKoEaovY7haDUSaPaPj+feXh4YFkSIo67u3s+fPjA9XplHOB4GnHDxLycma+VZZkRFsZhYIgnAkrTy1W4MyGbYbnw+PSZ8/OF0+mEBKX5hdExz4ly1kr4OA3oBq1gw5IcvjmcaDJDbQRfOIyBFoVWta9SgyqHGzylCtd5QSocxgNhGGlORTBSSlyvz3hpjMHRSqZmFb5IS6LgTDBDmRkVNGmMwvmqgkWjd9BUNENawBERN1CrENxILtoPezqMauaeErmoMtx0Oulcl0oxReZhGHEuqP1HLcypi3fEVSyr1kzOqm5b00IM6nfmglGNUQGuhlawiYESPf54JPlIGUZSLjwvC5c8M3hHaJlaFu2nTJGlOHxQgZLaKnnO0Iqti30dySafr8wTH4KBuxioVgjWM9p7qbJRzvs6r0rBBec2UQkRYZlnA4QEFxQMFekVaE8LBqiIVmlTWpiXCzn3qq9Ro8Xjg6pSTmMkcMALtKJxT80J77V38Bg81yUxlwwFWs1IzQyuET1WARWaqwqcilLim4udxLbuEzl3k+9myaUJQNm5hOg1sUwF8UIcFLR0TRVve0uAF0eVqj1zRqfXLMvhWkW8xnoNi4leeewBzNuYtQPzt8CnSKdl9n56jYGriUw1i6fYxQuvMcS279hrJXyZKO3jnFsp/9sY6BaQ3f9+u+ftk76+/remgMNgIjv72KF/Z0/onIRdscL6y2iwFid25/iiBWTTPaBV1iqc2DjBqJANBQGWvNIhxd6Xs1oINWG1d3Gq4vKTsXR//FYkbT0xkH7S9UU0Zz+s0tPs4r3Gb93LJrfOILdvsPcgptJTrY/JXNv3R9Ov2b5C0yz4dyJbYLtHE/YBVa1245TTqo7qlijYMewHXA/UrRa2nu9+sq0tioKq3vTkC/Nm2SWBzc49W/J6G1D3hlQNTDWRKV28RLriYVPd091E0QtV1+uznh/bpNrdMn2InpM01grndq36YqNIBiZ2MUQ1xrwuKtO7esTBior3q9WPQUwqvzdpi9fm61Sz3TtZgzHlcG901Z6UNXa9JnZewXxWcm7glR4XQiTEiJvTmhi31qyy/mWStgVnLytF29iysWr0niqQSiUVtVJXpTW4Pl84z1e+/fgRJ0LwXlUcd+OuX+Jt0X25CPTfewKyBcSF2jZzyZUyYNdhS/R0keqI4R7A2NMYYVugywp2gHMRqV10pSMrWnGrTZVGexKUUrZ+H1nNb2VdFDcjz2aVtpwT80UrQrSm987oS6VuyJvfVWFLKep/aH6CXeWw2Ebl/NZr0BOijpw3WE3p52WxNWGbb96rcqkzGwCwyrDTqmk1jrvY/Wqt9gKmjqddIraNpa1HteQCRi0tJaOMsnXx0tniVQ1tmA64IfL0+MSSE18dDozDiHeBP/2Xv6BW4e7ugfm68Pj4iVYL5/Mzg1e0OFq/n65lHsSvG3SfIyrOElYBEA3GROXuq1aCEDX27kIcmsjp8es56zq+NuE3vX99HGerUvngCT4qfQ+lkzXfx2pZE/QuZ92vaU5N771RgVJWkYHBAJDWGmKN45pcFfVXE1GPp5bJFc5Pn8nvzrhpJLfC3d2RN/cHfvWXf86HH3/g93/v7/Pw7i3h34/MWSmyaiCdbT3U8VtqUQS+6ZozxMjVAlVnib0GwY69olsMGlSWUm2uA4gh5roWDIPnfH6kRsfx4S3BH3iIIx8+/MD1euF4PHK5PHO5nPnmm2843d0Bjm+//RbBEQet8B/GE61dWFIy0/VIcI7x7sj5fObpeqW2yWKdSoiO53NSElFThbY4jjQcz4/PawIrAsfDiVoqz89npXZGZUmM0xFpQlqu1Fo4TBOgtN4mDgkOFwZOp1ET4VIoWRNfpcwt2okSBO8GTVKsX/p8vqpBsw8W4E08nc8sy5XD8Y7D8cjxACT1ZWuowqOIBuVOwI2Oes3klBnHSByi9q9dPcs8c10Sh+mIj6Ptz57rUrhcZ6YpEK2irSIXjVaTVXsbrml/1jgetQLZQAjkXJhLoTpvt1r7L3OtDAjNeYjBaJAQmko7eedx04Gl9D7spr2QFLwTYnRG0baxZBWaRiMVBcXEe4agAkotq8CXDbzUAAAgAElEQVTHsuS1Nx0RfAwErx5Yl8uFagI3cYj4oOtkyYllvtBqxvuowGq2ir0461V2UCtVhGGISG7kojYiPnqGcWAYRut3DZRcNUGTgDcxjZK1bWJerlznGQleaZZmhF3wKL2/krKyasQJ85KZS8MPQoyTrXFK7XYSLbYzwNsscnrFqlazLUGrXn71X2u295knZNO1UJwjeu3XLVKQZnsxKoJE1sSZKozDYD3R5htnW2dfx2idObTtvb1q1GPVsgWXtplY4oHdcy/41in9m6BKKrpndYuE22rYRvNT0aG+R+0Fz7qo0/6xZ/r0dW0fc2+vKyvwrsf1EsTb4hux9pkNpO6tDD1+uWUcOedwRHtOgXoFCvcxmrMUfV+s0Fi0tv3rZBVra6Wt/psvE+tKK04N172CctlYQoom/OZq229F0iZAcD1pQ1ecjmw4rSKIBSG0pvQWq2HogN3OVazagmXB9AStFJXzrm01Oc1F0bGSE7VmvFeT116Recm7tcUO1sBsfdxkx5sSjgYr9cXfNjQLO35tc9SApolof4BVgdQPyxI0EapUqhhS49bUheq0J4nW2ydVyafQB+g2SPv31t77hlvRIkRVclb6kZXKVQxAB1iwz+s9LNAnkMBuEHfERjnkKO1mhyDTwDtNPlockK7GBOSlUFHxCtkN5IbmIt3hDRzFlH4QrwktIMGTqt5XcV5fU8XuoVYJSquUYjSNpht6VwvUKorgnTaRKuvDKIRGGaxocBh8NPn5irhOG7Sx0xquaVOwyjtvFcuOrILDi8dXIbhAa445QWrK/59iZJgiDJ6nOdE+q8eQd9BM+QmxxOa1xdRrT8J23+sXr6m1K/bZ76u5ZBeWaNRmtICwVSf7eNqrP+2r1s6pMlcfL/0Y9oDE+nu/Lt58bRDz6DMgwhQYqVggpSp7zcH1cuXz4+ddVczhpNKaI1dAAmGw94uQrbqRkiYLwasxt4qGNFKtULJW5EwCW3bXrrRKypVq8vO5WDUMXW+C06BApBq4oqisd17VUNF1rQtngKpr5vwSHew/+/f2pm5xjrJPfM0E14sj9/sbBqoLhGEijBOpZD5+/kyMA8fpgBPP8+OVP/6jX/DmzVtOx/c8PX3Et6rGzD5AK/g4Kv1p7flURLWVTMuZVDZFsFqSothsyocp5c0sWrz6cRWtMHgfaLmt6484VcVzzpm6ZjPxGk3ygkdV7Fql5IXBB66XM8VVpmkEKtIyDkWZHRWas4BxodSCE+17KbXgrMJAVYU6EVmpsbUp+FZQr0kVytFnaGfS5TPh7kSWzLs3E7/z5sDy+QN/8s/+T/7O3/l9ju/e8PDzn/NXv3zCA8NBrSGW82L3vBI8RklqZkquyfqSZ2pNBOfx4qm1cyzQqmXVfZCmcF4ToTlHNpolwdFKI7pKunyghsp0+pprbYQY8PGgQhtlZpoOfP/jr3nz5j0hjLx5+zWPj585XypH16g1c5wC3ieEC+MYOZ1GVQW89/gPVz788Gc8PDwwDBPu4HCcKE3ptM4FnBMOk4fstd8sX1Yq7nU5E8uV03RkGhvSZsQJl0vm+fGCj4HjcaCJJ04D4pX6ep0XQhCG6JjThZIz4lRR9O5hJHinVOnrmeNxIo4jIp53795qJW5JWmUSOB4PNIeJWWRaqUxO1xFksMWykMt5XWNOB62O51yQ4KkCS240f8BPR2Q44MaDUvxz4/PTBZFKHBwDcDoGDmPk+XlmvlyQFhhiIIwjzylzefyB6XiiukhtDpERFzxxiCzzlVYS1WWkDsQCQxRyyzSpvJ0GJM0s10RunjDcERA8MKkwqfYXA0glm2+eC93wvpFKpTJQxTGnSq2ZEKCmZMqhmmziCk0CpWqy5JxjOIzkkjnPV1LJDIPajAzB42VhPidyukKDYTzgomNZErlli48gjANehOFw4HQ3mSWC/nNScemiaUAp1AJLdTq2vVc68RAJwXF/VGXKatX1WipFBO9UqCQlrbqmkhlPd4xiQmam+FlyIi1Jj0sCaVlWJWLntA+5V4/6ugEQooJZWNLeFYXzkmgIxRVcUMZIEVUTDiaw1UrBAdF7mmuq3ps39kYX61KfUWUHBAMDt0ICxmZpth6Gdc/XOKjYGq3JFmuSZ2qPTfv2cIGCJaKDVcmq9vqyT/qAZlYpPQrpQLKTLabvsSw3NMvXYoP+0P5xjVXXalfb4gY93ab9670fWV4yUm574zvtnl4xtRi2xzIboC/kVHCttw91oNohrTNw+ufavuGsKjc4wjCuFbZa66rwWtsWD3WvyVLSi/jstcdvRdIGt4p6t0IjP12h6O/dZ+1rGRNe/INN2rlzprs0Zw+ebjP/LwfPl8/dPt8/w6/H2/v0lNal6jGyIvjW82n5zq5HQZolEcaXLXXNWrzowPfs6Gw90WRHM30lOH6tpN763/UPL/7eA/c+6LgZ/OvP3WXZEkVDJ9wWdJTWzOtGBS28dzSTL+49Ov3n3m9v/c71ELfr2xPpBvigSGQydSrX3Jok9WN1VlHsQXAI2if3siKp6X1fKPvk6onJMAwqVFKN02yIln7Hl+P1xTns7sn6DwFpSNPFp9XKOETuj5HpaBz654tK1Qrr4rH/zC8pCfs+tw3N2tPt9FytHxLtKdTFTuWA10TBErW9imT/3j1165aKuT/f/THcVodra7guXGFCAtbxtvVZmjzyNu4ql+uF8+WMiKyN7yIqTFBz0mTGb5L5fXxp0NLNrs1IOnoulwutNaI9B1ZxLFsfm16zsHoY7SuOIQ4MJuIi0pOy7brr8W+iRppk+hfX53bsfHFNW6O5Dlxhn6kpp643VimO6lflnOf56UJOmW++/oZhGPjqq6/40z/9BZfrzDc/P5A7+6BVclro3kk9wejHWnvPXa0vlC57stl7BfucG4bwYry12imoAGWl8NDnkGCV02xJlNAVTPtav9J+TFrdObdbDbpJu16fvraLrTn6VW39vdaqdhRRvdPm6xnABDlUyVYBrG0tozUul2e4nBkORwR4uH9D9L/mL/7tL/n44QNf/fwP+J3f/Q/49V//JflilZ1xYE5Bzbd7YcyAxb26m9jadBhPlNyrjTYX6HuaJcbFmAHOrRW3Wju9uyGlcnn6hK+RcbjjZz/7msfHT3z89Mz93T3P5wslV4bhTIyNuzcnDsfIx48/4HzjdHcgDg7nTupXJjBOgbu7E6VkvHtPyZm0ZEJoDHGCQ+A6J5wLXK5X5usz79+9ZRoHC7qhlqTKrjkzjgNOIC9XWoNKZSkVFwZKa5yviRg1aMvJAKHWqClRxev8aZVsa5V3nsM4EUPg6q4sKXO5fGKII6fTAyEMxNEq6E1Nz5s4YtR5uFRTtqSLamhg1qXqRRqDqYL6UjjPKvoxzwmcZ5gOhDBSgRAH1KaircBarQUVDbUeGK/iHuP4wBgmQPDLVSlUzTEeToBXOqhZZEzTRBCzt1kyMnSQVPsyp2HEGSTshpE0L1yXK/enwwoUdaYIohQw7T9TwKQUaK7ofBX1SvPiNEHNCzktiFRc0OQ729rYBYWcOEJQ9cTl+Yn2pDHN3ShMhwOtaS/vMIxgAFatmeBNadcJrWZCOBijoej9rarOqe0Tsxl5N/xwYPAjwzggglWiC+M4WHU9m3WEjtWcygpW+hCQYL2y8v9Q925PkiRXet/PrxGZWVXd1T0zABZYLLFL7Ropk3ZFmckoraQXvej/piSakSJFA3dBLi6zEBbXwUxfqvISEX7Tw3GP8MzuwepxmEBb91RlRkZ4eLif7zvf+Y5aMzsxSFsWVddacX604q5ZhGxaloVSCofDTsBb0ZQgcztlUQWI+kcIIasVaZL9J4daztG1zaHFgyVXZ/hrcKPU5sjZ76cxd/t0pyIS8zepvRalGLRgTWKmDeRdxXMIFEtR1hEh3tr6JPtOc15fv9N8vE7tdl+TeOeamFRKXalK+phhI4g3t+/mkF0aaCuS0Wyg7WPqpltJZfuePr772Os2Xmrv3c5/i2Gu46otvitF0SoGihJToCtg6TI5S2z31RcfPQ3gGwTa+snS2OOt/qtJseTCbwfv9iUD8eGk+dh7dWUopa5iG8D2/mswqWpI9PXn37+3Gel8ELB2k0bVQEU6rsuCoVW3MWeprWjtbrQShlUXyaRp1X4uwCWVvEkRjfrwQekeqPa3sNlmm3Tdz9dz1rL8CxCUk2ugQB76D7OPQNWg2zXYS1lkaFqbyqjVwuUstVINFPWFpP9/Xj1YaIxKn+3Z0u5Nn93qcWrW6MZcpL+vPSnQ/m27gH6Zlw0stbH9cKpdHbO9f703ujIxdZtNMZCnGVcKozGQI+fTkeNlJpZUnay2z38s9b/dv/65ul64rrLJFXTeFvLaToJ5y0L186h/bj6mXb/OXH9kg8iZJQlYVdQm1NVVS9HASl7/O6bENE2cz2cKUjNSR1Nc11LCVhezdj7tmnumdGVMS2Ge5xV89PJEqZXa1iRjLTmItXxzpyxFAiBtVXUy3Noo9MDutkC6AZ62YfSykjY2/RxuZEYrVteliHNpFuBvtBa7buvY7w8cDveEJTKfJ+4P9zw8PHA4HDDG8Itf/APD6Bl2A0sS05+wBMIi3qxSjlZk3OtaQ6lALkuNWg9Ye/Ksz7zeEmxbvZ+5mrtaa3Idg1Zz2DOlt3UKVDOCfmxkQ62mB4h0WrVgpaj1/t/Wh21tFeR8m205StawEGp9bZLn/Ph8xL+M2IMjhJkXLz7h00+O/OK3X/Gf/9N/5n/7k3/Kp68f+fSzT/niVyLzc27E+5mwLOQSKTFVgwNA29rzTlUHxcLpfMJqL4YGre6mEXtayO5WC2Pqsy5Mdq3Zbc8phXk58vjiJdZawjDy+PI1X715g9aGFy9e8vz8zP2DZsQT0xnrIihp03J398D+sCOXxJuv3ohRiBEbfaN3/NF3fsD7pydiyhjtCEtAYdHKkGpfMDEXKnjr0EoCvrIUlhDwOC5pJieZO6GIPE6yOYkYj5KZrfdotxu52+9JObOEaX0mnfP1mS28e39kP46Mg9RtTtNESrAsQbLqqayGKSEELnMQ+Z337MYdy0V6O+oCltYbT44dc0CZUE1vFM46IbmMXOcyL2hqA3Wlq2lWZhh2GG0JIa7NqEVRU+WXNRti3cDeyRzI1QQh523NimER5YpRIgGm5TtkMxESTgx1QsqoIPLt3biroEBMWIw1VXEQSDEyLxHvtLR50JZUm6IL0C4CmFIhBmlGbbTCFS1OiW5zh53nmct0wRrHvpJG0zRzPp95Whb2u4HD7iBmJ3XtN6a6bAbJ1ocFlDbEGNb10GgB57LOidmItQMhJZaYOZ+OnE9Vmm8lrgvLXIn51qBeajFFRi0SxGWZMW6ALDFKCJFYa5y93wijZVnk+VyJ7LLubWLkJKUT3ouaqLWdinGBJKUdxmjM6DExscSFVAG0c26tz9ZalEExRVQx6x5Wuj2ojyE2IlGSHn3LFVXB1lrKUlqvyJskR0fEtpdu5nalgshKgJkaj2vnq+yxrOToLekodXxsc7QCRa0+JK7bHn19XrSZDbT3CPLZiOIWf0h6sY9f+/2pj4X7fbi9Z60jzr2jPC2jsu5bfZ1f28dFnprpnff6Pb6NsTEGjMzlUvcjydyUdX78odc3BrTdBoDtZ/KnBYhwxRDIaH4UMMG1jrVN9I8dvzH3/Y2me8+HYOQ2uC/bT+oDLfJCYWg/huz744pTmV2vR7XsSKoNaVOS2jVtaFJQMdqQv2GteJPjfwQs9q+P6Ya1MZU6STVpzhawtCAyqSp53B6h9TvUllmrP6Cx0jFvbn1Dky/UpFTMqW7oXweHt9fHMhAfe08zkbhdiATAyTFilVr0wXC7JytI7YPk7niSNZFxiVEYuyvJbN09y0fufXvd/k4ktGkFT2lZWC4XltOJOCisEbnJ8XLi6fS8Pgu34Lsfh9vvu/3OPpCWH2yLT3N+7Ov+2r9vAVvPFt1+72oO0d2HjzFxPaBEdSycqrKLLHUHGqlrWuJCqH2enK3On7mq5nOpBjxaLM+dW78vhLBmDvvraNe81FqitnjHjlHtXVClcLu5Y8q8V0qK5JuTowCMUDfMTZ7aj1l7NYLhFlzejs9H/1sOCKpJRwTLeG0rkFXMl5m4JF49vsbZgU8//ZRf/vJXPD8/8e1vf5ucE+fLhTCdCdUpUoa/sYTUhadm9HIFbUpf3ffbdbwBrL59Qtu4+g2wB21K65XRTjHWZzV/cGwJEjQoW+s+hfhqoK3JXayVViHNbbOfg21tu2rlUljX4FJYAylpb1ABsxZDhfPxxP7wEo3isH/g8fVn/MPv3vGzn/yU/+l/PfH4+ILv/vF3efPl7whHkZwO444QF2mE3YIC1HoPlRIxQymJ8+nMYS/XlUute7kh4UolZXJJNOpJ6i9YSRhUpijD27dfsBv3HA4POOe4nBf8MHA+nyV4jRPHY+Thfs/d3SMxiQzZD2LfnlLk29/6NtM88+tf/4bD/p4Sba1udFhTmOeFN2/ecTgc0KNmHLwAvpyYpzPjINJKmszLWualZshMM18RIjMrxPQm18xaDHgrlvRaC/BrZQvLEvB+YHBC3gQVWUIhp4D3jt3ugcv5xPk8MYyDzBWl8M6jlMiocynMc6jlFOKSmhcBZ0ZLiUBGUYqubT7qfhLFOEujOIx79ntpYJ9JzJP0fdNajiGAQQhXrURWOc2LNHcu0h/M1qbySmdiVlymhVykHtRo0M5RsuL56R3Hp/c8vnjFZTrz/DygtWKaZ6TNhmXnDCFLSwattYDlurdrbST4VQZlG3EL8xIwJotCQVGbTNeGzDnhrGfwIwXphZmXS+3DWWOFUgnxXIghopS4FT7c36OU1PBMS6TV8ch6oMSALAWUsygjJP0qaYzSesN7z+A92jiMNmSbsTljYyKEhWVZmCZpM6G0WkseZH1h3RuMNYxmt661Sx2fUkkqg0JXEA+IrLEUeV/9TFiWKmuTVi4xOcYyVgLBQmrrV5E6Jm1RWgxgjNV47UklY1wjUgXM2Eoi5yQ1i7dk1e0+LGuX1O53/Kss2ev+rSpYESKrvf5g3IAiVoFXyaXl9qUJe1s/O2UOtVavj5+c/zAmF7D8YYutW9Am/860OmXYar0VvWt3i6nT2qbh9rpuMUAvl7wlokspHYATE61+vHqS+mMxXh/jfwyMAqu3gqg+qL2INyXP172+UaBtG7Drn62px1xNEEq7qMbipy4AzOtNvgVtH/tTmsynHrG0mS7/UdPJW3q6WWD0r1u5mdS/rBd2NXn6m91fowTM1d65Bqo5BXkwlKoukllYNKT4VXeT9nYsS5Xw9D+7fU8LEEuRfjjrQt6/t5vMKXeuOqXR7lA6FkKp68UklYK3Ui+krWQGLkEahYo0VcbfaCv2uitjpNfAtw8Ie4bkY3NI7gPd50oN4lJl0/sFZau9sraaXHQPZDve1TilVINwWZBEWmcEuNU5U9ag7x8HmT1wSarUInLJtM2XM8tlYrlYtLdiN+8cc7P0/UeAYQOY/ff0Y9sH/ilJNjdV1rVdW7vu9r7bLFF7Xx/4tuxUb9u/nQ/re2C7TyvTVnecRmDkyrKmGFcCJMSZy/nUZcvMBqyUuDt6Y/HDiLe+Ns9UzPPMPM8opVZnyp5VCyFgnV3rBShbBkxkOVvtQgqL1GpVOUi7A65+HhoYLjWr3P9MGL02Pn0mqa8T7DeZHhi3e64UqwRaGanTlMknen/rJLCKCS6XmWEYefX4moc7qT36yU9+yjAOPLy8RxlpJB9rSwLv3XoOpgZPqE3ukVJCGfDWX82P9plWd9FAb0+GtDHor6UZuoCQUSmEq4bh9QOoIrJzUfiIHLRJf1LqnwO1zg2lFAUxqliWZTXhaTLZ/rxLW/MBstSUZK2JKWGNIhsxZGlry3w+E+eAMppULOP+BYe7F3zxu9/xdz/6G/7Hv/5rHl+/4tPvfItf/ewIxjDs70hFGu5GZHxV2+RhXRtb4JBzxntb18Yk97eCM72y4AlJuEk9UJyDZA6NRppSGwZniPFEjJoUd6RQeLh/5Hg8klPm1atHno7vWMKRw94yz6FKXTVvvnrP+bTgnFsBcU6Gr758D1Qnwjrfl2XGGIUfLcPo2O09Rmuej08ElTBma06vlGIKkfdv3mGM5f7+noKufaqkbnueL6Rc2I8Du6E2y9aIAqU6/A7jHufkOk0N5p2F56dnzqeJZU4ic/QDfpC5nIFlWjgeTyijxf1PW4k3lGTfcq33RkXMbiTHLL3/lCbkVBs1G2lMXFqfQbhMJ+lz6TSXSRh47y3GylrklDSZj0HkdcZID7Yl5vr9QgqdLxdA4/2e0nqMKsn8zlPgyy++5N2brwDY7/cYoxl3A4+Pj9zfPZCLBM3OGVqrgmEYu16jZQXs1vl1vrV1T9XnwZRCaAY/1TzIDAbndjhbmMNCLi370ECFWY2Acs1egUzfw36H94McL4oUW2nNOHrmKZFigKxILNja+NyOw6ooCCExz30dkhLTGavxyqE02zWojXjuCcgC1RlWlBm2GgKVXMl2xMio7Y/buivPnrUWSqlZ1kQuUkO/LBMWVz+7GZk0K/lcMn7wsn+HhZxrrb8xsvapSlIipkjmllTq9vMW1zSSS8ALK8gpBazd4qhNdSKxWttb+r25fxXAaVf3V5HJ9wCo/5wqRRpPq2uCtgeYq9LCKErZVBbtehowbZ+lrnFSFysxmwA2RZOLt/dKhkoL8fE1Sq0eSN2Cqp7c7/c0rYW06D/TSN7bWJ6bmO9j49qOI8C1xVOs13eLL25f3wzQpri50NsCxcb498Duw9qzjzH3LSC7neTbTS1XgeLVJPzIgPc/b4Pf1+e090jQ3ztY1vPsArye2S00uZHInUpOqLzVfimF1IBZj1KGkGotlqsPVC2a77+wz6j1mYwGMNdgvXuPUmp1pCsVVK3j22XR5PP9BG0ZqPaACiNsjTBaINLNJTbZQQV9RqGMWQFD/2Dc9p1qY9YDCRBjgjXT0QEIgGGoLFl9wGwNMtu1bGMgVv59D64Qwgf1W1fjRGOari3GW5boNpgFPljsrudYJR3QFFUIKXKZJ04XS/ZGDFWysLIppfXZ7s/n44vEtSb7NrNQirgh5vqc3coG++N+eOyPa8dvr/ljC9nH/t2eTYUw0ZlSAyNdJU4XlnkGEGChQFGIMa+A2lZpnVayGRtniCmugK1JF9sYtLkh7P92vrZJkaoNPJRVSteWarmX3UIcE9mKHDKEIEYGbNLddr1NntePVb+p9RtKP04rEKbJ4qRmK7Zj1GfQ+5Hd4YCznul4JsbE4+Mrdrs9+8Mdv/3tb/ntb38jzbSHQZrTKjFDyVqRohBZ1nTyGyUETaHUTIe92hxv7/0taL9di9urPc9bP6F+w7wmgdq8bGMjYyXfNY7CmgvpoNZnWcCPWc/xNlMsQGQjHBSQY5D+Ts5ia6CAaoRWq/3V5GWBFNF25Dwt7O4eePn4yNPxyOc//TH/3X//V9zd3/Ht732PL3/9m9rw1+LHPRzfi5xPFUrcsraNuGzzNC6hyq4kSNTrWIsJz0ZiQggL1sr8L0XqnQ47CeZddZ8zxpOT1JsNxrJ4jzKF5+MzIQYeHg589dVbjDF867MXWGt5fn7mzZsvGYcRYxzLvDCOIylG3j3/nnEcGUaRsyoL9/cDJc3c7e7xo9SEjYPmd2GpGUVVa1gt+8OBhyC1p3cPDxUQZXSVmE0Xx+AdL1+8IIWZkiU7czydpHm19dhqgR+WGTNNIp3zI27Y8cLvyTGzhJnL8RnnDM5Lg2djDV55QozEJZB1WpvC7w93dW2cmeYLz89HmsTOe89hvxeSKESWKJb64+BXpYeua9huHJimCWqdMiqRgRgD02VmnhfuDgdQmpTBO6kHnKYTOSecH9eMZAwiPbTy4KMyVVIqGT3qs/rq9Se4cWSaJ2LOiJ2W9NvUSLYsFTHOcs6BkroukZsmtDZ4Z1FZTIRKUeicSSju7vaAIoTI6ST1v36wWN2cciVjm0NEWdlXx3Ek+8I0z9XxEvI8U3JmN3rGwZNiZLqcWUJkWRaGwZMoKFXbzJS4yuUlSaEomeooGzns/PqcWK3IThrBp5zXLJkxeq0pKfU4ckBqZk4klQakZKWu4zFGcqwkoVJrI/GccnXQVNIzUFUiMmmcHfDaUOoanaXBISlFYq7Eu1EYms5ZqlRVkZpOqaUVg5G2F2+ZqtTtPyL1DCHV9XGr2xYCbWudE0KoypOw1nK3deY2hm3g1Fiz9sQFId6Nsp1Cosa0BXJVCxUa2KokZ9rATVv6b5UW7Ttb/LyRe+1YYuiynmPuAVarkb4+7i3R3Mcr/Z78Mcnp9r52/OvkTL9fbX+k1+3H8MnteTTitZ1/zjUzbP9LkEeWDw0L+s1+q39QaKPWjMhqCqA23evq3tJt/P1N6n+2fn2zuq1/WtDd7PDl/+0GXBso9EHnh9dVrjJtSu7QNY4usuCqUl0ii8h+Sk7Y6nCWFRzu9hwOd6QsDo46y0MjTVeBch243wLZPkDsx6FnnuqtqAFq/ftmshljOvlZA3fVfhyFroYPCoV0JBHnzsLGHq9GMwoawsxFCuZvGRBr7Vpb0gK2/iXnwFXw2P7+OnfDFmi3WpjbB7XPsrV51rMxbXg+BsrWcWN7T78Q3r63/10LxopSROD5cuGXv/uC37/7PS9eP6IOB96+fc98Cd1m8WHavR13A+abnfnHCIj1mVPXQK2/9v7VL263mbT+fG414z3QbZtO/9mcc3UfU5ScCdWwwDlLCoHz+cg8XUSyYxSpLCIhyYWSRfqy2+1qwKqIIeCcZgrL1tfq5p61gu4VxOUNKPSSFGsMhQ7srnMAVK3PtNai6lwsKVdW8zpr1saiZaN6sPCxOXVLBK3jqPrFv7s3SmPcwP7unv3uQAiJp/fPODfy6vVnaOe4f/GCf/vv/i3jOPLq1YG8YwYAACAASURBVCsomTDPpBC4nE+YtIgDpFKdPDKvwrsGUlsyv59rfbYwxnhl/tIHBf18aOt4X7Nhap8mpSqIKkJgSe1Cqhk0Wbvl2G2ctw1WMnCt4e8G+vr53M7ras1QCnIUH9HczJCA7rMFRPodzpzP7xnGgZilwffrV695enrH5z/9Mb/99S/5Z3/1V1zCwsOr1/zmF79gt3PEArv9Pafnd1vLl5XDE8fMJsFWwLxMjKMYo7TepbmClwZOlRJHYO99NfaQjJaztrYH0TVz5olRsYRMLopXr1/w/vk9TyeRNJ7PF96/fcunrz/j/btTlfvuSCHwu7dv2Y07xt3I+SxNuneHHfN8RoXI4+NjBb4jWomJzDKf0WrAWc3Lly85nydCzDw/HylFmrw/vn69kn2n04nT+YIfB8bdyDgOaKWkBYWzMs7UckZtsX7AOo8xGecGceBtgWLJIv2rrs1FOYwBbSCXxLJIQ23vB5yT3nGXy8SyzFDXM+cM3t1XUxDpVxeCkBzOWQyw30lgPM8XzucLu93I4+NLmVNasnXTNEn/t2HEOgcUrDNoM5LILNPmDBtzIoS4ylJzScRJMkdGKQbnwBi8sZSUoWaJlNbirKykV9gSIxCZ5kRYFrzzGC+EK2p7TmKODKaWdWgl5ixZMtrSm9QRKSyXM5fLpdaMSX81rRXT5VJNPwa8H9jv9rCDZYnEIO6LznruDwdiEdI0xiBjuUzM3mNqYP7w+EhakrjVGsUSFmKstWGlyolVbakyOPygyFnWLBAnYFsDajUMa2a4FKlZviwTqWxZOmcdgx/Y7XarQkophdLVHK3UzGOo/Tlj3OqgVVNpJHSVuNUDSNyCgL8Ypb2HNtV1Uy0y9rWG21ld11ikvYq2pJxIIa97UlMIAFwul1UloOo6KWTBpnARdcdGqm8Z1Ixz/qrXq/d+XaubvL+tj81oRdZ+UTI1N2BVs0MNtGGuidhG0NyS76pmlJT6UKnWZ68aENpA37YvKq5jmlI0SpUP3CO3U7kmQHvy/xa89aS24I0P+8Xeyinl+TNAuYo12jW1vW2Lnyr2YAOlTSX/h17fDNDGtWSQmzq1LfjTGFrAVJAeYJWVVtt/i671Wkokn79mEtpEFFv9xG2gpNlYgQYU2zPwwbjeADRgbQDcvq8/9lWwXKhlzps9uNZGmFE34ocR4wxzjBSlGA57jDFM08QySZHt+hDdHP/rAvkPzmH9XSbF+vtOptUmqUHVVPFmJkCRgmetDdpYCluvq9yyR4KE1yBsHbN629WV3PL6Qbr92e3vG1uxgZS0Ar4mS2uvlLaNsT0c/aKxZno6ANJnB9rD1j6b6qLbDfKVo177+2PZpaufIUufWNhrllL48vjMkiYMkZfnI+PDI+/OEyqzOhveZmf675D/lj99ge3HwKbIRcq6wN4eq2e/epDbj1N73WZMeinmLaFwe38HX13CatbZW8OyTISwQEniRGgMpdQ+NpVAMMYzOi/ub0XsXBRwPD5TULhhWIFEu47GPEqgIc040VtWMoSw3ftaE7JulKZq6ldCSRjrQmaeJnGI1Vtz6Dac/Zj1wLaXDcK1QUz7XUoi8SplI5LWuddifgW2FJyTHkbPz0dO5wvf/u73sOPIuNvz/umZn//9z/nk09dYY5gvZ+IyY7UYHTlnMdp1oLXNl1u5JjTms5c1t/ENIazPYcss9pti//62IWqt10J/Ovl3qW1PUgwsi2R5cmlzi6pAqCQTUmu3xJnWf6+ZOLTvaHOwjef1JisgUJcsTYDrpp1rxrXU+apNpOjA8elLyRC5A0sI3D/ccdgPvP3V7/mbv/khf/FXf8nD4yN/9L3v8+aLL0l5ATR+HDk+q8piy71LWYgDsiIpsFYmzjxfKCUxDgJKlNKVIReCT+uNpDLGMHjLUAMxVTaw39rI5Oqst8RILpoQJj55/QmXeeZyCXz6ybcpGd6+fc/d3T37vWEcDpQsMtz9fi+AbT+CDRQST+/fkZL0rBu8w2jF09MT87JI9mxJWDOglfQLM9oQcgKjGYYBpUTabLxlr6sTX62tLCjmOfF8PDIMnv1uh9/t0Whi2dpKSBZBen9JA2dp1yKEiphyxJykoSeaVCJLCNVwJ0FR7HaDtAAoIh0MC+x2I6N3LGGhpEiYJ8gRbw/sDweMVoQlVimxZH+bUqNJsSUzLW1fmkx/HAe0NkwhcDw+iyQ1DRhj2B0OFXwUzpcjISScsdzv79BI+xmtNFYbipa69GY+orRkCkPKnC/PDNay2w0463DGcD5fOF8uIjc0RhpW13ohaXyuhMwJAjRNzrUOy4LaatTb8+OcJVZnzWmacNU0pq2rYYnrmpBrJklr2B9kHpFl3RTgN0ojcmrGOyKNqnXdN5QWkjhExlGIur27g7zIvM6xfmdbv/QqC9Q1u6SK2jJolayz1uKMw9ca6NbDsbU68d7LnrEszNMk12+3MVBKMiVNuWOMpShqD9zaLzTVdc4LkeB0Mx+Jay8zbTTamVoDty6/V6qo/X5PCLIWtvWzgTYBYlsmbJPbF1r5SiO6GhBspHer0++VNqkDhrAR9tKYvgKdun4preva/WHM28cTimaW1JulbOTZddyuV3CTV8KqdDFrW7cFA+hyTXbexl/t1a/7utvHbxNIwGr41I51G9Osf9+Yq/REbX8Pm8qvB9V9xvIPvb4hoE1e/U26DebkJbraQv4g2Pkgi9AFsv2EWGoT3CsAk/OGzrpzuUXq8l4BboIxNhDXvnP9vPxgDSQUrJOpXduWeRI3HqWrXr1mPJz3DPsdylpCSTitGQ979vd3cox375mXhVQke9fklP3Y9RPq6+RYpYid9XodWn0w6YEaoMpQXafTexefzmWRQmlgoLEz9Ty3cWo1YKzBUAvqWlDdruP2/rXradK4LRhsdTDlg/R7Y/Q/yFDkQsjhAwDXMyrQHtztoW5ZglYUSxHxmP7I/LkF1f3PZH4oVNHEoogUvjqdOM3gdWYicxcgYBncSCpbrVDLcPRz/Rrwflhbd5txaBr/Pg+8AvoOUNye962soi1oTXrRv7dfQG0HOtefA9ZoSrGMw0BOkXmeJJDTCr/foVTV+qtCzq6ygDCOe8ZhxzwvKGUqs7mgtSJlYVj7a25jtjkGth5xsoEty7Lah5dSpHg/9ZtgvfZu3s/zXDXqar32Jtks5bo5d5tLbaPsXRRvwX4p11r/Rhz0i3upTIIxlmEcq6V2YbpMDMPIw+MrslY8fvIp/+f/8a9Aa+7v7zFKsldpCeJIlzKYlmXeNhZj5L+bS5fSoIqoH1ogAFsh+e18b8xvP9b9Gt4IFgnUr8fp43O6rUttL5AGyCXLerSSbSvhEDHGrwFKD+D69UXem5DGu02yoqUmJRkKm4xbK0UmMl3ec5me2fsdSwrc3d3z4uGOt29HfvjD/8D//MX/zvf+yZ/x7T/6I7783e/4+ec/YTd6lovU60ynZ+kBRK4F/lKzZOyWodztdqAK484z+pEUC9b6Kv9Oq5S31HYNxgjppxXElMWau/ZwnZeFEBeyUiwx4Ycdj68eyMXw9t0T9/cvMRrevP+Sw/6OUhLzPPHJJ59wuBu5XM5oXXj5eM/Dy3v84DDO8PnnP+P5/RP60eHdru6BZ5weCXPh/dOZ3U7kmYXE/v6eaZp4fj4yxFCd0wz7w44UI+fTiWma8N6t2amM1F4tIbGEwODF4n05XjjPE95GvK9N7d3I6XQixEjByX5sZJVDSQbWuR0pSTZmmSZiTDh7zzjuyLngva1B9cIlTFIzNjggEUPkcj6SU1glaU3yNk0TfvBobXFOTE9CECfIcXSMowel1gzywe8IaWEOC6lErBlQWXpAtvUohABW7Ox1bfHgrMVoQ6TJ5SSuSKUw7nb4ceB8HlkuZzG+yYFSyQDpEagY9yPaiNR8Xi7MYYEi9u2l/i+mhNGaYb+XbNXpxDRdpNl6JcRszQrLslGIyyJ7j1bsxhGlRFkwTxe5FlUYRy/A1Rj8bmS6zJynhSVpvPVY7dnvLUNOa/3PEhesl3XOWiM9956PDKbgXe8gmiBJ3NH2aT94xv1ILpGYZM1e5pmcEsenJ+mjaa2QASmyAKE+g955iV+0XoG4SB1lLfJePgfizpjzmhfCWo+2rma3ROK6UBi8w1tHofZxq8RYLqKkUdZKvXK5lpfP87ySTj0B2EhJVVmgHlxovdWzw3XNXwNvq/y/i2V1XZsbSEcJkaCrPbr0FZZew7rGxu0arl51zyqIqdCtKu5jpLNaNzp1tR/KeFzHKqWUig0+VK60f/fxRr/f3u65/X7c1Bt9Zu52jNbvYIsP+2vq46L2PTLWste6atAnMtflg+P2r28EaOvh0m1Q2/69Zg1ug5UucOXmOP2GfIuQ1fogV+b8JpP2ta8/8PuPZbVuz+e2Tqtp362SgNVog7ceP4zYwWOHAZxhbx27uwN+PxBVIS4B7WoTyazIpSth7Cbg14/l9fgJl/lhNujqMzcMQCl1gabpfjNb6kxXVrrVj3UmM+28EG5BUWuXOmAVY1zT8j34vJVZ3Y5zY/RTgnmWyd83lpb3V4BdB0zprwe2fQZgu3fVAKKBtpyxWn8w3h8D/re/v/p5lfrFkshWeuXMMfKwcywpM4eEtl4cuZI0he3P65bouAXU7Tp6MNbe31LzPXDuAfStvO/rvrMds79+wRNtdm6fbxbE/QJtlGSZyZnj8Ynz+UTOEWfFpcoaw37cYZ3iMp1QKqONrexoxg+WnArH45nT6Yy1jt1uR1Eiyewt/Tc9f8tIss69Usq6kTVg2INUrTXWDV0/oZotKmWtp2vAUK75+p63DbjfRG/n9e1z2gIF+Z2WXn7yg/V92kgzT5TidBQp04tXnzDu9jy8fOR4PvH3P/97Xrx8yTiMte9UQpVMXGaMlpoLpSTwMB0YkCkv8hNT16p2LY3saOvqx8B8e55asNHe2z8nMUpjbJFPba0X2jGaE+h6zCpxDiHg7IC1juYy5pxHnAXLCprb/W3BjpBAnZpA1fodpVeVgFbVfRJqkCHZNqXA6kIugfP5ifHhdf1s4fWrR968/T3/8MWX/O2P/pbv/9lf8Mmnn/En/+QH/PKXv5DG4UoMceI8STaMTFGpOl2KiQVFjDu0Hpjm4wpox1FkaMZoQtyYZulXZrmczqQQVmt9kexK9nIJM7EEtNVCTOjC5e1CjIqXL18xLQvv3v6O3U5s3I+nZwY/cDoNGKMYRoNSifP5PUUFtHEMu5HHx0/YjXecTmfmc+TucCBFTUmWeZmhWE7HizjrecdSgZrXEmQvy0IqAg7ishCnWTI/zlGyAEdtrTSrTlJXanJGhYzWRoBtgWmemZcJ7zwxBoyVfmEog8qGZkB1uRwpWRwrtVI4b/GDI+XA+RIJU2BeZs7nC1DY73Y8PNxz2O847HfM86Va+heMAj84FAJenDHkEIksBG3JubAfd6AdSlcJOIUYFgGKw479bqzrUg2OnROiKCWMd0IKJQnInXOkIo7SOWVSSeLsqgslJEKM/P6rr4Ss0JDCwhLkHkiTYM3+sBdJqLYczxcZn3VtF5lkyFuWYZ4nUgwMzrPfSzbVWVsleWl9xkoupNQAgOwJ0aVVOjgOQzXWCaQUeT7OgMJ4X/ukGUJKhOWMzllqlAeHs55CJqTWA03kmGM1qEnTmePxyLKICc847hjHUazzq0oiLKGOlZyXZK5r24MpsMwLYV7WMdDe4es+YKupkYC6uk82U56c13tnjJV4Jkmz9pTLJp+v7rDaIPd+CVKjqGU/bDFYKVJbWErEYtef3+6317GvuICWlZjSm5IgtcbikoFGyf7WnDE3e/tNRbMCwCLSzgbEtHEUFMZklDFr7+Nc98E+dmjxRn++8o8t+dKD0Vvjsw20bdeoKkHZIg0BYPKenAroLZb8WIlGH6vcyjL7WHO9jlxoDeevfn4T11H3hHKjFLzFIu3lrKPUBIgfPNaaShj+l1DTBqAq019a9qXd3IrIaRNWXIl0ZW1Sro6L632tEjwtMsP15tW3aGPRulQJQRJgoYQeKC0V3h6Q9Rw6vKLqb6vRS8MypWRJpVWZnzQjrMWmutVVbcx+Y3ABDJJSdt5XPfiBcbfn/uULhv0eN45EYA6BkGdiODNNE3M1pCg5Y9Cb61k7VZqzGBt4qvnX9ZyUgBZrNplQqhpuXQPOgtkyMbk+bDXCL22AbhYTasahtzXN3Vi285S8VP0FZQOcNaO1gUUBlc0tTcazBrwlEUvZpAxy84ih2a2bGpxnSjFI082yNd2uc6Y2qF8f9Fvr9asAUyEL9u1iVEdMqU1y+nUM0hWbVKBUu3KlFd7pNfPqd3vMuCMpCajnOYBtVsam+/4eRFUJgVatlZVkSRrwUqpKafTaHLxJK4QbaZlI+c6WKW0LndYyx1vNj1LVuMN0krbSGZu0zaoysq13n9ZbU87VsXFZeP/+PfN8qQYGO7FPVm0RVoSQSQm0HupaQA0CMqejNEd+eLhHLKP12rx0dY/0tb9MlVKmkqq9fG11ULP9MQRSiLXeUN4nj7/CaDExKjmJAUlKGGtWZrevjSuVbSq5NeDM2Aoc1mxQaVlEuiekOTia1WGxAUhV7y0gvSa1wbsRZz0pwfky4YaRTz79lN3B8+knj/w///7f8/T0xOuXf4x3nsv5mWWZiSEyTRes0aj6LK21vmoDnaUo5EFpmcS4Mpt9rd4tOSXXtBlANdbROScAqW2cSYJP6emYVlJlW0M11tmquFgXN6mFVAljEtIcXuoQZSNP9fxLBXEioRciSfqfGa3qPJbvU0pBlkDH1A3aWlezKbWFuVKg5TPhcoF4waiRyyXg/T0vXz7ym69+ww//zb/iX/4P/5LHP/oe34qBz378E371889x3pKVRluNmgOWLMoErUBFSlYopO8ZKnF/d6jBH3ivMbagSsKQWealuruJLG+epnrOegXAqji8GUglUZS0CEglYIpjN47kbKtBhybFA7vR8vbNW0oB7/Y8vXvLy5cvePHqFWFZePf8jqfnZ/74u98hz2diiKRl5unNG5Yl8vj4ipQLKUHMBjce+OqLL3HW8PqT1yx5YfQjpd7rOM2kknC7kdE4/P0ohIz3PB2fifPCi90d2ooMX2lDCAun45F5mtBaMXipgRur+ce48zVwFdleVtI0ffAG5yO5moOk2jNNK835Is3Gx/HAOB443EntlbVa+hjGiPcD1u4oJZJLlAblOUl/s2GkKEPIC8RAuRzJFXwMuxHrR7z3YpWuNDFeePv2DbvdfgUIOSWW88zlfBZZ9DDgx0Eya9asMr2QopSGyIJfidCMLglTCqfjiZQjh92O3Xi/khjH85lSFONOJMfzEhgGebagtuIJ0oOypMTgPW64ExCopBYrhSBET6cUiLWlirjoyt4qdVyxtj4oaOMlG2YcubQGzbE6WIrkUBwPC5ejZMSMdwyDtPYYDwNLWLhMM8u8VJBmGJ1FD3s0gZgyx0vE6FoPprRkanPmcjqSS8JZJ/WOyqC1ZRgkc9li0JgqWKFUg5UkdaLa1JIUQ2vGK2qMwDLn6qIpe5xTGmJC6ZqxV62cIzGOI1Eb5ulCTFFiuFrLGnOU57k65DYCdoUqFUykahHf1B/KGpyyEIIoAnLC+QFB7tU0RCtKCvX6qvmdNhWAFzGhUuIg3JyXqTF5WAIQhNCzFuMcShnpGVq2eCCtcYTd9rZeoUReXcoLrOVMm1R8ixDbv9q2uAK5zv2yUCCX9qMPANYtEFs/s/6sA4nVuZOyYY5c96IW7677Wksk3WST1kxefZ+zdo2TQOLbFsvmlMhTWvfYf+z1DQFtCt3V3LSsVytEL6UZdOiGBZAAVVjumGtTu1rH0qZCruCiNYym1pjlnClKvtNqBUmTciSHSFMEKaC0+itFBRMSKEhNV32TqjUO7cGqE6phSGXU1pxQFVJlrLWR9L/3HucHrPYMozi+7e/u5WfeC7OYM+dpZpoDKc0sy4nz8ch8PovjpCia1pveJopuyBJhhErJ9Vz6rIlcxxKlMFYbjVNO+tPkQkzy+wbgUo6rHLMFTVs2RUa/VJBFyfVB3tib1Y2SbWK3CdweBlWDK12Hs9mtNmlmbui+tAc6k+sckFuVaiYiEqNIVbasBxjj1vPZsixNFnUt4+szCCsY0TK7LlN14KpNQWUoVTdXAL2B9n7x6FmfBkRyBWF3h5H93cgwOJTONQOmWWKS3ia6NgNXck9a0NxAWs/UyGjWhZGe26jnUc9xXZCauq2hfapBilZr41+llDR1NXp92lrWqo2zMHjV7l7bapst9sPWCPixRvr15RQxpmCNFK7P04kYJsbBYQ/j1b2b55nT6VKdGUUK09zenHXEcKkg3VdDEigYpiWurQycFelYSsJCGwMlyNiSRIoJkEIiLou4hmlbJ6M8C84Ky6wVTJNIeH21++9BCXV+NtOdlFvrCYu2BhprqDQaI6Y9OQG6mxuSDYgxVcKqtt9Ynxm5k86MDP7A4HacTxNPzye+893vcfdwz8uHA6pEfvbTv+P+7sDD3T3G2Nr7qJIdjYhaC7yhafRzMySpm2UuWgrkazZK5p5b5wMVfDY5pdYKa0VO2cBSk4YsYbPdF8Kt0Go7SgWyKScSaZUAueoM1zZ+Z21tyLtJmsMs7LrUsgjJICAIKKkGJ4pCohXwU0SmkorIalECjEtsDnYb4xtTRKmEMpaiZ+bnJ+4eBpYlosYddw+PfPryFe9+8//ym5/9La/+/E8ZP/2EP/vzf84X//AbtEm4MWAJ5Lhgs4bBc04Bq6TZsEJqsoyxDEPNNMSZmCQbYJUhL5FlDuIum6pzaczMy4wbLNYZWUORtiLKSCsAY8Udc7ffY60nJUNYJqwC7w3vnt+hjeZud2C+XDifLgzO80Y9cb5cMN4Tl8j7N7/nsB94//69EIglcXx+R4gzD4+fYIc9OitKSngvjbVLzlil0aXgvGeaLpASJQb8/oDRmp0XN9CSIM0Rpx1OWZyVvmqFgtKWicL9fscwCmFha8NoozRLLGhdQQGFeZF9a6fk2btME2GZGZzDOo3WlnHYE7QQeyEuDMPA7uDIORKDNKBGOVksVc2uOE+oWaY5Sp85O7RmzQvkxLQE3j0/4/cv2B92zMuF0/GZUiIxBs7HM3eHO5RWsqbEQMkZPw4MNQu1qoRyJuYohhp1jc6VaFUpQJwxJXAYLEvQhFnq0Xa7gZAmspasoHG7SmJLLZc1si6GJVBSZLQW7RxWa8EntRF5c1eMMRNTBidSv+Hl/doLM+fA5TLVc97WkZggZo13BmcHhsELCEzSc8/oiNMFpy3q/k6acs9nzuGZcXTsdgPGWg6HPSlmYpQ9LqhC0grlHV4ZUqhSs5SJSjK4xiq8c8RYs2TU2ndlKUZUEhIjLugI8yJyzgVxlyy5SEYXkT/2BK8qEsvIvlbIOSA11zJvClKPnVJmOl9Web5xbiXzGkctJU+tXKZlrKo6LCZKEdVYa5MUY6AYVWMfUEYkd6oqBlhLPmSLd8MoBGmUrGwpzdxJzICscyvRGWpLBpFu59pfUJFDIoWFQvU0UFraO2kh/oTe78hvGkCqfdcodf3WtY1Gi7t68FMl9K358BXQ25wqi5Kkiblxnu+dhuGaXA8psCmvKsnekh+qyeWp2EP2wuYenLuYdnupihAUqipUaEkjWnKnlWFIXK5VO7faA/FrMnj96xsC2liL1o0xa8PoNoM/HJzt4m5Tr+u/O9R7KzOCTcalFNWCXoDUCphX4NNL3ESzK0HYNnmoGSDV9ZOQflsiI5JsW7NbttJzxNqa2t/h3IixHlcLxwtIH5/5AosixMRlmpinmZQnYrowT9JzKseIq9IdVRN9/bWqzn50y8Op9Y9qf6vGkNd+UqX14ikUldG6FucrmXCZDyfXKoVq41wLcIsu6+8/JmssFZDRQHudB70sr//TpMQfk5C1l9TUZcgSOLbAv0msmjwKNmkAbI0bt++6NoQwRoq+c8nSfLVu1KqBozXDqFCFzRGyXM/HD89bFm1nNIN3jNXBSRv5fUzihNdaDNCBy4/JD64MQ0w/h+VnKSlSPRetdQWBDTCrik8ae1vqLKl1YMagrLCyvV1wA7h9M2vnHLpoQlhq4G4pJHSWexzCjFIC+o7Pz7z56i3v37+X2pb9fgXcTcbRtPzDMNJ6cIEYETRTkcNhA0+pkhXzLEXuh8MBZ8WsJMZal5Rl02vFwRL4BlLaFn4qE9fqrloWKlQJX+uTA9va0tdsNYawFSK3+b2BFdUmbt00NnlgL+GIMVanTAnciiqIYQe1KbVHGc3z8xHnPS8fX7Lb73h48cBPf/pTjk9PfP9732e323G5nGjui62NQpvvfRH7dS1gY0uh74nZMoqtHlkymrobk+3zm3Smgra8EEOspg0WqrlUL2tuY9TWhHaefZ3cbTuB/n26Zt1UXZO3THC71rReD50rWbvu1gJku1eqvjehSyLPhud3C94X/GGi6Jm7wwu++63/mh/+8F/zN//x/+IH/8tfc3//kj/9iz/nx3/7I969+Q07v6cQSDGQ5pmYMwaD1dLbbHUVozBNYgozzxOF2ksvFXIQAw1xN90Y6HGUrJoC9rsdqli82aEdaF0wVtYx5yTzsSyFFKU+9+XLl6AVukCYFsISePHiBfM883Q8V3ntwKLhyzdf8vadzFshIT2Hw4GQC5fLGY8GLS6H+8OOkqSpspgyaMIS0Epzd7gnpYQ1Xuqea0A2zRMhRHb7Qw1oC0sQq3kxQSzcPxzY7fZi5FQ05/NJAs4lULJifxhIWVz6rHUYK6TXbr/HGs0yTVwuFwY3Mo47htGwpDNhCoSY8G6PYofRGaVFnh6i1IgNesToodYmSU+/FAOXaRJSI86QpR3A/f09uB1iJOOxznE+XTBG8/BwzziMxJjw1jOdTlxq+4L7+wPKGJYl0AoOcjdHS6lSZiOSb1D47QAAIABJREFU1/fv3knz6HFHzoppDgzsSLng3SCtb7JEACEnpsuF8/HI/f1BmqGPHq131a6+mjYtYvbS4iXnRRYbYyTFyPEipiVayzzwfo+2biV0dVUDnM6ReQlMS6BkJ3tcff4MqhJHknUa7MCwG1EqMc9n5uVEQbJkSlmUknhKtXUkFXLMaGcxlUQrORHmC/M8YaJiGJ00uA/i+hiWmTk00AhKt2x8jXka6VozJy1W9X7A+518v2okZgshtzh0CZKplbWoYLVmt9t1Rk+sMU+uAGEYBjEamSU77JyqvWzF4KWZhbSYVGNI4tW9yrk3tUPdm5UmVkXIkuJK8G77NWsNZSSuCqRUEs3hXHjaJoOU2G0z/Rdn8xajaKXEEd1I6ydV2wSknEiJSsyb9btLjV+vZI0g5AWsZSztXqA22SFli7/WrG+3f/avFdAptcVtbY7Wtb3Bhxa2tb3nlsj/4HUT5/X1hms8piQ7J9/ff6wRsH844/aNAW39jWoo1tRJ2V9sVttG+nXFgMAq72qbc/93C0raMVawuE6ALTWqrwZQQJAYkNSMRmXKlZJ0uLUe6yxGK0znsKi1wRqLMXa98dYNlXF3kmkki71tkl5mrWlsTImQxDq3IH1kmgxBpVKdG6s8qQbmRSiCmmlpC0jNHlYLZJH5CRg1RtyjpOak+q0og7E1W1PWHI0AuhtG4PbBAD6Y1LdsxxV46SZ2e8+aYu50yc31qO/l18azd/1ZH0y72da23znnKmAe17qlPuu1bjBqy641wGaMAS3jaK3FaE2q8lQZFwE3TVqba8axVBZNm63fYJtTNfWF1ko2wkHmRe0gjNIaqxq4rsYXlO485fPteBK8pvWYWrem1dftD3oCo2W9VGWrVM1Kq7yNpVYa25rQWgtGd/Nbr4C41aesvfFUq18T0BWTmMlcLhdSEvOXt2/f8fT0RJiX9R60e5ZzXsFb3ypAa808z2uw3py0WiarTivmJRJjqkSJx1lLDMKC5hSYprmTmOgqn5VnrxEVkknXqzVyoVx9VwOJ/TxfAX+9Rz0I6ud/D7xVzSw1omG7x60/WbXC1hbbwDsKbb2YNVjL8Xhimmc+/da32e/27A8HXr585Ec/+pGYlIwDSteGstaQomy4toJRVQPhenipXzOmumnGdT3Tylxl0WVT2xpCa013vbJ+GGOrecm2Fuhqx26rLKqw9VVs49Sv3WswwlZz2bduaGPfnueWhYa6buVCKbUesBo4SK25FrlWzitIh0bM0N0rec5EKlmQlieR8/k9d/ML9i9eEZeIt4pxb9kddnz+s1/y1T/8iv/2X3yHIX7Gn/6zv+A//OvfY0lkZcmDNBpOc0QXTcbU7yzM00IuUZ4VA8syo5TUb2il0Ji6NhZUNbBSRXpnzfOE0kWcJLES7GpAyb2KVMODGr7oupRYP/DZ6894/+49Xz19hTNC0Lx580bWPvcaraRWpwx3PD+9QykYckKphFIWZzXH4wkbIi8eX6GU1JymKLU+l+lMCAIGnXPs9wfm+gyfjmcoJ169esXh/gHjB3Q1+5jjzPPxGV2D36Eag+QciYnaxLmBXcWyRMbdiLNyDOsMyzwzTWcgV+dmViJP5phj7xzOLMS0kIqAQONlns81u2m0YRyk2XbKqboiaryTWqjBW3KU/nAtM6NyJixRTD+mCyWlWl84E6vaIgSpSfJeMh45tdqfyOk8QS7s/K6T52dE5ifPcN970huPH8A5LzVkT0eGYVdragrOGLy1PF9OnE8ZzYhzUnc4L+lKqVKgazGxERrzIuuptV5MYGosNY6HKqFO8gyWgveOlCHnJNlea6G0ulW5lhAyOkWKjhLk50AhCKlcxIgjxZlSFFZ7USU5qVEzo5DjYVlYplkygN4wXRTLMrPUvofee5xTdWyhlETKkRJzrQcTF01jWJ+vnIvs+ZUMbJl8a704Z69xbC2P0YWxSg9DWFhCIM4i3zTWMPqhXnOpcZcozVQBby1G7ZHar0So66vWCjeIqiGl0hDHqqtqZI8CyeCt8nyFVQrlHDEt69rcQl5Z9w05i8tyyglFrlLGFjfU45WmthIpe9FZapFLZq4ybW0N1ll88hhfn62yKXdafN5IB3EVFVJe1fNSKIzXaLYYvvVxbcdqJlwFyI1UUGLmVKBzWFYbmFQKVZrSAlalHC3ZgdRsq0b6pzWbqJRaDcdamqLtRa0Mor1uM2cbaJOsaPvVFgPUBfgPvL4xoO0q0MllnUwrAJF/1YzAlq25DT43QFBzST0av3m1QeqZlFb31aSZpgMWkmQTNkakhBZqQ0nRInd/a4XRWQIUVD2mSKwE8ChUbYpJnNB6EZ6k5DXAWII0n5Rg3+C0pL1LEc27KoLWS5eNVCsI6JiJUsdI1wASvU7ykmsgQu/ko6t1f3XoUfLwS8JbgN6ayaxsBW1yqu3fNQdwtcDfBt7bpM5rEet2b66zSN1suTrWWhuj9eqI1F7S3+PavKSZIcQYGYah/vHXzHwFIb3hRgP2KChKsj/eOpa0VJKgjk03/o39KaV042vY8GydqAgbNXgx1WhAgSoh0NYKKyXDW4PMrQF0AQH1RTYVW6UXzspcFGcvyQavzo5Fnqm17syqFVCu9WvrvFLr86SUolS77hWIVJBn6rnYajQiQb3UA5WSCTHU+9Ka3Gfevn0n9VQVaHnvhdGsfb56bXg7N2HSFi7TRMkF513tPbPU8Zfs2TwvJJSYhhhpsh2WhaW6SS7zxDRd6hhI37Yt81pZTN1Ys80GOefEXJ1MvfdX86sH+qWIvj/lzSmyJxja+9tLq1ojl67nYU8obeSFgHdjpOh+txMG//n5HdY7Xr58wf39PX/y/e/z089/wq9/9Wt+8IMfiPlFXIjLRFKZGANK10x8afdc6gtVy/ZIBAAr+1zlyjXL18tQ+ud1y3ptvdisNfXZFVmzZMIE2MUqT82pBURb9r8UAZbtu8oKyLbgqT+PlkkXu/cKbpUVJ76Ya/BZgamt+8p67mXNhkr2x63BqpwLpITIZrWYJ+R0YlmeUPkzcnTgCru7zHe++xmff/4Fn//Hn/CXf/kvGF/u+af/zT/n53/3nzj9/tdo5VDjKHWSecJEAY6plNr3T/YFbSyKzOB9tTXPGO/RxosiJEtNkDW2NqS30rQ4RVTOWKdraUskFcnapRykHjNfWJbEPAvwOaQXaOOIIfLixUsUivdP79FGc7i/5+3br8glc//iJdaN4O5qw2apJVdapNBqmpnOJ8ZBnpHT5YJGZPiqqqufnp8ZhpG7u0PNWhhigsvlzJt370HB3d2d7Jtrlivgh0HqHnPgfF6qucjC8fnE4XDPq1ev18bpuQX+JZGTknYAOWGtZjfuKmFRasNeBUWxLGKeIfLZZa1L0sCoxRgjJ01OiiUuNXgVUrQUxfPzM9PljLeaxxcP0njbjCg7EvNCShpnNMZb7g57Bj/UDJJGo5lzwitqywoJnnPOXC5ncsyMbqh1zbJIpSRSSVv74X3y+jXODyjjKWgul5nz+UxOicPesN/tiSEQQ2AcHGFxQCRnqRMLKZKyOAUOgyg/xNxLJG5SUx8roSSOmK5m1nIRGd80LbQMtnDhGWsdw2hRymMQEByWpdruS9bKWS119rV+LIZALnL/rBPyzDtDDJl5msXR1Bmck357znr5TEpMl0DJARC3ymYhL6UZEkBbq/H+ToxDUlwBc/+qyR1A2juIQZWoOC7TBEaAm2u9upIQEBSx9I9JsqS6AqwUpKF72wOGYdjaDbS+okg2ualKGnjTRUClq7L5ZZaWOKYCMbsCg1L3NHE2dV6yu0KCSOzYAL6sm9Kcu1QQjVKoXEGs0VXarNhq15A4Yv1vKVExRszElstECgGXvICotjY34tqs9U7bfqHWaANQUhJS0w/UuEdrva3VFWRJTL71gG17QZ/NLD0BmLpSHdr+rdcWD7aWBMXaoL31sZXJoNZzhRoftzh8VZV8iDm2JEVNTskFrVjk4/Hu9eubAdrKZoyR6wSRwIc147WlLm8/ep09W00+OtDWZ3auJEv1Z6JFphbf14Gt2Ya+mbai2dYLo6mNBWUkg1Eqz1H0mtGQmyEPT06ZWOLV+SQjN6zVkwj7UAs2c60xqdm6wVbTCQXLLMWLwgZILrBN1KKaHEvBaj0q39MHh6U0TbZ8n7atFqoCmyw1H6BrFrBNppqFbGNHr/+9DtbkIflw8vbB7fYBdRWY98fp73FjvPrfAStoa05E7Tjl5njtXFuA1x7sEOIq8+rrs/o+by2ATlncp6zqmaq8MSgU6bHSHmB1bdfbH7efWyWnes8ypUgQ2+QZAqJYdeZN/9wyaVLovp37Cjq1bIANLKxZNfly0czXcWgWzzlvi14DSCKh1NXZD6YQIafK7lUZbm5ZItmoVCmkJL0FY9p6nq3jmBLv3r0nxsjDw8t1zKdpWgF0CFIk30BLk4VoLZmIEKrjVZWDhNqEda6uXMYY/Ch24Q2QNokLQFziStzcOmhRNtndtgEEYgWe/Zp123esAeMmgUmlz/59CG7aXNBa1Sz7lsHrjyvzodWtSgZJadDaUoriPE1My8Jnn32H/eHAsBsZ/MD//W/+ncixjBxzOh1BFZZ5JoWZkoRlhoLpnLJUrlLwahLTGtcKgZXrHG0yEAkugDWD3b/aGtETIj0Qqk+8gLkkjaONNrLGdcB1lZvoD2XHvRlKO4dVQmlEJhlzocSE9cN639esplKCxuidzPo2IS3jvclkUFI/XHQhp5m4BIzZEeLE/uB58fITYvw9n//o7/jV3/+Kb/9X3+flZ5/w3T/+Y378xW/IGYKGpDQxI3JHJfJuV0lBayzOG1IOOGck+5Miu90epapiQ0mNi9TeRKxWaO24TGdyXIhKJL8xBlKS3mSp1vKFBKVIy4SUCs/vn9FWssoPD3vev3/PeZp58eKenBPPx/d47zifjrjxBePuBSUFUpzRGpwTMPH48IK3z0+8ffOGh/s7tGlWChk/OLQ2vH37DlDc3z+sjmrjbof1jtPxyNPzM/u7O6lJqlmPcbdDacX75yfScuaw94QQSQnGUTJFp9MJUVaYlcwzRqNywlvD4O4keE+JSyVgnBuRHlcBpResG/D+Dq0tYRGiJiwTSoEfLGoQ58gSDafzQsgF6wcGL43rz8eBy/nE8XQmp4wfDF47vDFEowlakTF457i7u6PkTJilF5xSTZq41KbZss87Z1lS4Hh6rqFGJUK1QpdNMRJCJOaM8woqWaG15sXDawHUiHpCpN2aeXZcLjM5G8bRk9EoLSBa196A4lgqz3hzatzvBpwfhMAAQljWPaStCVLvKq+QY5WQRkqsRiHGikLJuG39rQYQ1hohVYsmxkUam6Pxgxa31MFjnZDO83RhmRceHl6w3+2xB0VKgRCm6tS5VHXH5uYogLhK1Usj/2Gexcis3QdZC6pksHM73OSjUt+XU64ErKuKKzDKoEwr09Bo50k5rYqU9sc5tza3bsRWrKoArStxaDU5S7ZaQHbBOIvKkpHSVd7XJIxG69U3QCE15PIECjkh1ybgaa2zM9I2R2aXqWqhWrElH6Oph1RV8qiiMW1trnugqlni/4+5N/m1bcvOvH6zWsXe+5x737vvRuEXYYWLCIdlZ8oYZzpJIVmkQAI68CdAJwWCJgjogIREA4kODYSUHYoWQikhIQStzFSaTGdmOGyc4XA4XKUj0hGO8r53T7H3XmvNisYYc611TpR0kLd0dO89dxdrzzWLMcb3je/LMWlxSPZmQbWN9EZKS732vBmZx9YJBl7AatzZipYri2pfyGxn+I4e2c74PbtljQ3lwPie2HBPe7TWrgyLtVBYN6PzPThgrNX+1bwxbJ4VcfdMsPX3u7i81g1J/GGPvxhJm+F7BqD1kRSVGvVeDi/n7JPnfl/ocf/WZj8o26C3f0vTJGvG3FCYjZrzPLDWiWoUOdG/G1txFfCGGgupVqzNPPVseGpCnGzWKnKlmqwHr3ym9VLp6YJQWqCyxJl5TkzXK2mOkCVwMQ4olUwWJM1ttCkZTyPm1Yg1gEx00wZbkzVNFmjJk5FclF3Su0Mk92O/Rzz3lK72nOcI1n4BrJvhTlmoTdyGxjz/jPa8PTq774lsgWDORZLq3WPfd7Wnw7S50HXdGhzuF/H+s2SzKDxer0zXKzln+k4EM0SJVKD1qoWHFjQ2FKfNs42eoEixfDMNikXx1BrZOKqRCmqjj1k9INv4yHfxegBvEunSyCyBTPtdMwltr2+BrfOWablon0sQNSylyzq9ppoVbbVuRY/2KORzn5eGfrZx3XuVNUrb6XTCOTGLb/elGVvP80wIQaSb62YHcX68EON2yLU54b3ner2uAUrfCxXwej1jrWUYDlikqnk+n5UKZ1jmpFXjVuVrRZSN8iloqVTtQwiErl+/574/sqk8tntcSl4PiDan2jjse8jkfkmlsOs6+r5nnmdBHkszfZcAp9RKqtD1o1Ro+55aK9fLFec8L16+pOs73nv9Hl/7xtf52te+xutXr7k5HqEWzud7FeqZKSkS0wJVUJxGDWzXKWPbqJ1u7YGIcdmEPtbixX6P3Xr4vPcrba0Zv24Miafm3Dt2yfes92VZmOeZXumE+7nX5tXewHQrlmh/bi04Kr0X4QCH0SBkh66rOl8pReetg/r8HEEr0oKKFhzUI2kxTPMbbt99weViie413ntef+TEV/7gi/zJl3+P9z/zcdzg+dTP/Qx/8Dufp1bpW8ZIf8jxOFKZFZXweG+wDkJw1Grpeo/34peVi6UbRnIUlELECzIWjzWFlGYNwAzTvFDLAqogmJZEpRJVVU56Owu5QI6F002g76RC3g8DNy9uMM5y//iAM5bT4cj5MtGN7zKOR2qOzFeoZSaqol+phdM4kvuBJUpCJ/2oUnAYjgfe9R6qoHDzdKfKiqKeOxxPq0LqNE2UKvNt6HtByKm4LijCKAJIYgxfuV6vSokWKlnXd8RZUJlh7DgcxNdxmi7ynuOBw+FACL2gJ3cXHh/uMHi6blQEZ8QgapRD3+GcIadFEjhruH+4aDEDWatdh1HTaGsDx8ORbhgwptL3jtubkVoLR5X7t9ayaIGklIJROvI4DtLTNi+MfU8cFqbLtBa4GsrggtAjL+cLS1y4HV+oLDscj0fZi2rhgzcf4r3j5uZW0JNaxejaVYzGEUPXU82AoA1pXXvOOY7HI+PhQD8MOOtYllb0dBTEG7AlGtopjbWWabqy5JnQecZDjylGvRXRGEXOthQjl8sDLni6LohgHI4hHMilWwuNOVV8ZyU5qJXUFXLW9oScSaWuSYokhYZ5icSYKVXGtu86ul7OkOt1IsWyBe6m6BqXtSF7+EbdK9ov1nUdDk8BsRVIkZwSLnhCP9C55hcrCsWt6D72A/SsRt/zPDNdrmvx1XmPs51qCEjfXYxJi3lGE2rxL7ZaqF+i9BNbY5/EB3KeXsGgBuqtGCYG4KLimck5aBzVyitG9s7cKOGayFincIASjEsVwbLG3HFOi7goytjOSenzs8VRppmUI847sJau7zH4FdHGPGW8bQXVSly2Vgrr7MpOWFEFq8mrJpntfK2a5In69RbntaS8JYiSfErhoVq767fTJJKn54HEzNu1Ps87nrfiONvW+T5htKsK8A96/MikzRgzAL8O9Pr8v11r/S+MMf8j8GvAnT7136m1/o6RK/1vgX8TuOjvf/tHfMoTWgv1aUa8Bs7w5LB/EvSzHdKlbJ5C++fq9/k+Ve5Gf9skONE8QnroQG4kNGVD6VlqG5NuIDlhY5M9h1oj7JI+a/Rm6A1GxeO8d5qQGpVDl+/bhSBVGWdZ4sL9/QP3d4883t+TYqIPomDXFITaYxsPRahWRwupXq2y+bUhzApz0+h8ihZpwrbK6MsAyvjt6F3ohBTJ9Lwikq3vbZ9I7ZO1PSIm1ZGnvSxroFx2ht2lrAnLcyi5fcZeua/ingR1e4TreT+Wc1tFpn3+Plnb5o9szPM0rd+3BapCaVAkpBRVM5KNfxNuUDRV6kzrd3EAVTZf552gR6HJC0si6J32I1pHTlk449r8bxuq4FoFCt1xtjFq/X2irLitmxgjS6osiiKlXFi018tp8G6N0d6NsiLNDZ00tdK6hNaejrX/q3C9ThyPR3LO3N8/SFJlLON4kF4mJz1N07JwvWxJlw8dqRTuHx5ZYiQuy7peUy4UxO9KKMgyvrmCdZ5hGBgOoyrltYbnwnWSoLJWSZBTRdS3qlmHqhVr9oisMeiBaJ70deznyL7g0wpAxmyfv6/C7RPYVjyYl5mnKE97H50jttGZWa9X0C3D5TpxmWZevvuK4+lI1/XcvnjB537zN8mxMI4HhmHk7u0brNX+wrRQ8yIHMZIwFaXjYXbsBt0vmsm4fUaHbHtuKZI0SbV2LyRiVIFzQya9ejwtS1p929rzg/erWl6t2kehc3dZlrWaiu4BrSjQxr7tLfMsAgDjYVR1QEGtQxD0ai0keemVad6QrZK631/2Z4n8yD6Zsyih1bxwvVy4XgLDqcOaW2KsdOHAT7z/Cb7025/nT778RX751/4lxsMLPvWZT/Hy46/5+lfe4pVS5U8nTCncHLzGHFJME5TdUIogrQCh87hiSHGh84Mq8AmNJ2bpR2nFjyVmUszS0+Wk4JKy9GlI4Cmen9YVjDOUpfBwfyfeaM4RusDp5sS3vvUtKPDq1Sum64WH+0dseJR7q+jedY6kZWI89ISuEzELMhbL/f09L154bm9vtMos6M2yLNzfPZBy5tSfxBJEPeWOxxNFfdpqzXQhYKj0oaP3juv0yDzN2FHGbFmiJGHDwDgO0tenzJvL5UzJEVM91EwIDqOJybJEHh+/Q9f1BD8Qpw5LwHkRbjF2wbqKozBdM+eHiHNquDxETK30Q0dFkn5nDafjDeMwcj2fhbpVhY7sjAjBHE5HuiAiTFHpfMuyUHLlxYsbbs0tGIMPnZgYl0ycFrz3HA6jFFlorJKtYDKMA4OaXrcZ3Ho/nbf4LqhthvT+gJcguhaW5SqJi81UI4WWVkC7XC4cj8e1INWo4bmxdijat6r9h/O00ghb4lBNU0MdsNWQkwTHcYlcLhdqhS4E+qEndH5Fd4xxSl2z65kVl6SFFq80YikKWbTgT1nRuYrQYQ+HA851YhJ+vfB4PtPFZTUJlwKhWRHDRosPoVGV65q4yTErSrdCw/bQdcyLIHuVQsmJubDalJRSVHl2o3Bvptjb2ZBzplqvFhFeYgFnSblZrWSc7VZAw9KSEbPu4fvzaWVuGKWrl81YOwRP8J6cqySWen8FLVOqoBOF0pSioFtCD8OoLgJGaLFUjYxzklhBbp4mZvLdKk7Vk8HYoi1GQs1srRkoTVhOdz3bS1mLOKXW9Ywo+hmN+fH8XN57h1plDMl+Kv3LbU7VypowWqeWDkVU3+2z9316FrSYruo18OQ5K1uptdggrU2w9a9LBmxXxPoHPX4cpG0G/kat9dEYE4B/YIz5v/T//uNa699+9vx/A/i0/vwq8N/rnz/wsVLtNBloX6J5nO0P8/a7fUX3+eEKspCeP/YoW/t3rdLLsaErGmA3RM3uIU5FnjSJQ4PvojfLOZGPNsWJW7yta0LXsmip2sqXdi7QhQ7x0tHAPGwbXoyR83khKYXl/v6eh/sH0jSrCIZbE9wi6TqVpiQmQZ0ki6xIYpNTbZNETKKNeAPtUDX9smvQVPU1RhfKHlUDKIoiVYkit4kMT+7fc2S0/Zm0yrNHuPYGvPvX7pM92Ghqjca273HD+BXpev6Zz3/2c+N5sraOGWKKeT4/Mk2TJKgaQG7v3eZggR1/fo1897O0IbG1rhuTQSpSQhkJa5VN6EGy8JcovPvj8biiaOjrVondHdLQGmcxkGvh8XyW59Sqa078+YopKgCk16SImtF+TWfN2nPaaR9gS8rbRt+UFNt4pFI4HEagcnf3lu9+9w2NZpJixtqoKqoB7wMxZWLK9L2nH3oNvAVJSEnWwjge6PqNZpeSqEder1cJ4EOgGklkRGa+rglFUfrbfozaPd8QYem9WlE2vVd7I+79Pd+QsM2jra3L0AXM7p7sCxct0WhJQlwW+h0quZ+3gkLpgV4K1QU9ZBwVQVKstbx69QrnHS/eeck3v/VNvvCF3+X164/y8vYdckqcHx8hz+QcoTbVNpkLKUWCC7J3sNENW9JOFUqdXNRW2NiPRVuHco42iqkEWyubwAviG+Oivk5B75E0tDu/+VlmHaeGHAfvNYhw677UqLUtGdwjmsuyMPQB58TQ1Gmfp4g2bB5zKQrFKabN4LTdw63wJcFp+761asHLFHJ94HJZcHcdx1MgjIaYP2Q4vODVu5/iePgSX/qnv8Wf/cm/ws//yl/h8LFbfvFXf5kPP/gGZbrgnSG6zHy9cJ4ujJ3X65MgN2tvxXZNUpy4Xq4wFkoGUTy1LPOCAWJeJLGMlXle1iKhMY5cDDWKgqNxst5TQfZ4U8kxEZ3h5nDDcDggaMwJWy3LEnm4P/Pi5gXBVs53H3A6jNze3DJ4ePPdK4/3DxxPN/SjoFN9N8o8nWbcDrFFWwQu01lEIeoNZOjCgLeelCL3D4/0QdAFi1mNt2NaRH3ROwzasqAF1pQiH3544fHxnsNx5HQ8cHs6UEqiCx5KIcZFCiWmiRAVYrQYHIfDiLEIpa4skIwmJB7vq6JLlS6wUoZzrUyxkDJc5pn7+weGYeAwHOi8FGFrSUxxIsYZ5wzD2BOc+I3O05WHhwdSlOLf6faWfhzACpLkveexPHL34Vt6HxiGnoYMrAwBY9azc2mJQ7XMc2Sar/g+cHO6oVZwXQdGRKGu1zPUjPcD1RnmWJjmBxFQU1EHqwjK0uxT2t5pxfsypcT1ciEpQohBe6dUuMtv3maPjxfIGWelDxNYr9tZq3YsYsck9FZBqJa4SHKAUOQaHdY7SYyWGJWi7BmP+pOhAAAgAElEQVTHQedZUcr8QlwKzol6Yug6TIoscWGaJ4yxBN9hrSenyjxfyXlZx1SE5KzQi43bsYgstvXVWoe3herEP/I6XYTmmFrigLJmvF572OKs+pRlNM2Jx8vMMk0Ym4Ve2zmC66SnVeOBnKqygBoDQNA3s6rjbr3rlar96NK3ve1lQLMSaChbkWQlVbGiMbXiXUtwtn60ihTqu0GsBCSBA6N0waoxqHWOru+opsPaoAnkoj2HGSd5nfZQa+HbGlUtVaZPi2c1EazomWRYC3FrhLUrLO6ZU0IVtdgi4IRBevesGswbLVCLZYKe29SVhbK+P6zxXVUgpylwt89v93RP02xnmcQviGBMkULgj/Jr+5FJW5UredR/Bv353oxoe/xbwP+sr/vHxpiXxpiP11q/8QM/g6eomSktcXj6MaVsqnx75KUF3fu/b6HVduOePzYE7Ll8p3nyFVcaG0gjI4pJWQmmrQEfDGPX0XjDzqk5cqvS05IDt3tfWRxOBStKzuQ5s5RCzIlSVdYb5Tan1gwrBo97ydB9RU24wSJ8YtbvZqg7+f99ZaBWKPkpjRC7qyo8S4hbj8l+HFvAupfgLkWbU58lXS2JbdXyPQ1yT51s32uPvj2tcj2tgrcAvnGyQWSJWwC2ny9rH9cuwfxBVMx9UrgsC9fLmcvlvAanjRe+f+06j+qWLLT32i/oPeI39B19H+g6T9/1GP0uYkwuFS3nDF3nGcYRMCuNrgW4z39Symt1cJWIbvNkt5ZyTqIS5USsw2C25AsYR0mqxN5gQ4j366N9x1Z9jTESUyTlyPl84cMP32pQHWiUgsYPr7Vyd3fPFIXu0/c9wKoO2XpSGirV5m9DTttB55QitT23BduVGGfx0EJ6DADmZXlSnNhX4yRAkwpgzhGUVrkeVnXbhFvyukcv23xuictzbv0eAW73oVFc2+9kHm/rC2OgSBBvux7fD3gfWBYZn8PxQNd1DOPIu6/e4e///V/n7f0dP/WTP03wnru7N5haWdKihQKl3Wql0WAIPmx78bN1sC9s7GmfkoBuiWlKad135KFiNSh7zEi1uK09QTpVjdP6lb7b1lQbnz2a2cajocd71Lyt8UaBHvoeZ8uaEO/lo0spotwXxZuq5kx5EpCJoE77TrU2ioyud3SvJ5LyzPS4cD1XurFSzIXCCceR1x95zZ99/Z/xtT/+Y/7yX/klFuf57L/4C/zhF/8fvvtHf0KPJ9lKCZY8ZWptQikeEEGIWlR0KDWqUuTx8Z7Hh3u6bgQcKTVquFmRiVoN1Tipos+JrpM+ozkvusdYMpVSFR2mIj2PE7UeqVSmZWE8HFmuM+f7R4Lv6EPP+fzAPF0Zw3tcHgspLgydeG8t04Lzw4rohdBzmSZBAtf538578U5a0kIXxApAhBoqnROBpiaskhZZCqmIxPs4HrE2rD2Q16skRdfrBedFxa9WQVriLEFeLYXpehFZfd9xPN0wjgO1GKb5QsrfxblALR5rOnx3ZOhOdL2nC1cezx8Q0wPzEkmLoeJEVTJb+uFA6EVWP/ieWgwxSUHYWykyeGtY4pXH+3u60M4iKdY+3N0zx0gqhZt6C1aoeLJWBGmWwFgC9xgzblfQ3vqHtFe5SNHZhSBnuxMUoxooxoH1zFECdDlbxBvRBZlvuVZizmAtabcu0DUX44LXYs6So4p8CY2x06I0RpKqNOd1r/DGq6jIRsvekpbEw/1VizCyk4ili9A8Y1xYlollnohLZOgPBB/IqXKepJ/xfHkkBC8FwL6nFE9McJ0jUBjGQD+OdFXo8jEmlnQVQ3tQRfBtL5Zrk564hjDJ/pdIaQJaDBIImsiXKm0TthoxoTbSZ9nO10aFbHtRGwNjDIeDoe9nlmUi5QljpJdc0CBF1kR2UWiZtuCDsG2at24pRfa0WshJ6eL2KSK1nscrpbLQNA6q7v3GGijSL9sKmxi7Ml0E5dMfK2IotYqgWCpZRUoKvh8xdmCJFmMKWFE9l/NB2CTeQk7SEhRTegLsuOClBchaYQUYcKX19ftV/K797Ftg2jkmZ4egbNZKoi1njI4HkkiBJFc1o+ji0zxCPsMKZ0q5lEaRtv017NuF1lhzl0Qa42QskMTthz1+rJ42I5nGbwE/C/x3tdZ/Yoz594H/yhjznwN/B/hPa60z8D7wZ7uXf01/941n7/k3gb8JrE71Ky1udzAaldBtg92q0/sge3vPDeWQJIknr4On9KT2f/sKlXNuNehLKa19XCvlyUgZwGq27ipYUwnWYk2FolV8Kzx9CivVsDyrhlEtCxOC34ky1UqtM2IJ4Lx4ouV5AW2erVplrY1D3BT2QN9LkTFjqVJClQlb9Uq0p4H1T7nOVYjEsCbMbaIbIwhjO1/3FfY2Kff0oT1t9fuhW2gSsX99E4rYJ2BrX5AGH7J4W3C4Q6p2928v4CCXsCV3+2RtDUIr6ku3UUxLKeT9/c/ShD5Pk3iuaJOuyN9uFLFGtbIK8zcee0tM92OyD+y99o10yq/ve0na5HCuFJo4gs5RL43C03RlWeZVYUzoUPv+QIPzIl2cUl4THfFPa9ciwUI2GeOVpmUtfSeJU/CevhNlMLtufpW0IiRQS1n7ztqhUzUJOD8+8u3vfIfrdRIxDL/52y2L0G+MsUzztH53USqbaR1mogx3JYSgKokixgLS6zJN01o0aP0XIXgaummtJt2zVPmrFWPRNjVK2Q7g9mj3NGURH5HvulN+NFsC0+5j219av16jTRu3+Q7u0dDtOTKfJFndggRZCzuUugIofXZXdJimCRc6Xr33Ebq+57333uP8eObLX/59Xr16hXVijxCXhUZRLHHB1LyZWFcRtqlkrTdorwxyvuUSSRmaqXqrpm9rVoop7ZCUxKspsLbCqXq5UVmWSFFfH+mFlES7sFGnW9K1R/JW6vPuQBT7gi34adfQ5lLwFmeTsBqKCk9pZTznTFqimLwbqSSnyu48ssSyKZC1REPmR8Y6wziOFA6cp4WY77l//Cq3r34ezyvSHHD9xEc//pqv/tkf8Pv/9Hf45V/7l3nxk+/z3vsf4Wc/+xmu//zr5GnCegNe9n4Q+mjJhimmXQFmo2k5J6JV5/MFjgZMoFZL0vmcc0GEQHX/VEXIJRd6F3C+00DSSvDnwNRMMJYQHNMSefPBG8L5TNcPOBfAGLq+Fy+xaSJeL5yOAyUufPf+LYfDyO3tLc57zteZaZoZxiMi1uU5jDdQC28/vKPrBS2y1vDy5QsNxGfma2QIx9WG4jAMGCrX81mQYirBy/+9ePGS8XjDOI5YK8mtMZbHRxHzaGiEMXLPoSiKMnGdrnT9wHg4kFJimh5k3FKiAl0XCINjmq588PYNH7yVe916irwfwDiKUs5d6Mnnmel6xXUDwXX0ij6kGMlx4e3lnhiv9H3gdJI+uqHvqFS6lFimifPjefWQvF6uLDmSi859VUTuu4EudNLPZK0qsLJa8pQqFCxrrfp8igpksYYlRrrQqVea9F/FLKjGdU5YU3HBrfO8rceDIq7eO028RNRpmiacE9Tr5ubUdlCgssSFZY5YFfyrVQzJh354os6di1BHY5RCmvQoBv1eMhbtjBMV4FkLPyDiXVeCzzgXuL29XffFZRFWQT9ID6Ig/uIR+vh4pusCh8O40velx048zVDGVfDdyiyJSxRT6ihonvQcZyxO/D5rFkKfJgTBByqGXCSuqyVT40JOiakUcozUYVhFwYwxK9rjrMeeAqUMpDSxxAspL8Q4UwqkMmFtwPueoe8pNqsohlAuY0qCDHebUJuv0ucnNEXWZKQp9WY1EZfYUBW4nVmp+Vlp2M2X2Og1Y1A6JGt8XAHfeTrXSyLnHX3fE/pb5sXx+HhHyZbD4ciL2yPBW+K8MJ1n4jSTFmGfrLoTBtFx8A4bPCWBDX4VaJNNuqltSjTcgAlJqqTPUmLt1o/Y06kSazvapUCv5ui1YvaAkNEPofWy6cca5N62+Lw2xfDWuiPXsIIUFfSSEIbf7jp/yOPHStqqmD79kjHmJfC/GWN+EfjPgG8CHfC3gP8E+C9/nPfT9/xb+jqGcdRYVqlGVYIoawBFa2zr5dkFSpIRlw0Xq+LBXtkO21olCWpNjXLXdaAa+lW34N85i/FN0rQZuW4Zs/ESRlZNYhRfISaIighJT5KoP7qWtddKE/9wvmX2Gy0vdIoQNRqlVj8BDbbESBsMOEfGsFRwWgmpxqgMvCYq1qr8fGqjs1K8jKJu0HrehMKwy3jbPVrRmOYFwrog1ZLBSBWuvbSaRovUYNm06vzGuac2vjmKekgVeO9fZjUgzUV8w+TyrFL3RJyjlLZxmDWwhaI9X00RUIQmGuJojV1pXrVUpQ0+VacTJaiZFJPQVjSITEskzfOajKDIZcMTBAG1eCuJvLGiwllyWiswdXcvmmeULRZXLXOMkmhbSzYVW4WjX9FKjankCtOSMYt8z1oLaZ7gKsl3yhljxDfGWbUJqIKKBS8y28FvPn1NIXUVhzBqGeBVnESv0xnZLAxG5oKtUrxAqtYli9z1EhdpENZq7+Vy5ttvvkOtcDreKLJa8S5QTKZWodV2XeB46MkF4jRzSZF5mrcEsIoqVtCgZY4SRDQUruSM8069y1CJcVQow2CxOGPx1irFRg57755WOJs8fBNCEKR4E9GRggrr2m3X0NDm9m/YKdTqoqxaZKgYuXdFeiGqqmtJv5WXeYmghEZKqTgvqqkV2Rtd0b1DZG+lZ+3Vu3QvXxJOR16/fM1vf+7z5OvEp376fazPpHkBIjlO5GUiLVe8rZjiZO/K0ndS1Ky2rVtjNinlFcXVhHYYBqQfYPO1k8RpS7AkiY0yD4MTUQEj0VtLtKTYoAinsWKE67zKbkuVWjySlNLolD1Qxe9I9t8mstHETZKuaUMqVQsbGoQWoVENSvfyHdjicKVgUsUXI4IvSs+k9SnpOWSrrGFnJJBb5oVaPZ1x4CvLdEeJZ/rxyGWeOBxuCDef4PDiY/zR73+ZP/3CH/NXP/mazgd+9rM/x1c//0d858//QCxtkiOnjkeVS6cuKhQj111zXcda4mZPLqJe2I96IhgZGymG7zzdjBWbhxgxqJdRFjVcU4vuOXJuWGM4DB2Plyvz9cxxHDkdRuzJE6fI3d0j58uFPojAxvn8yPly5XT7gtCPVOuZUibmyGW6x3cDGMM4HIlpZp4/xFgYx0EQIevwXvrbHqcrKZ05jQdyiWAK3jnC0NNVSfytsXShw3iPtZ6UDfNFvN/6vuN4c+JwOmJMZZovvPnwLachaE+ckcKoEzRsnsS2xSlKUJSqHGdLLR7IdJ0YOVvnyaViTUetniUW4gJ1vtJo5j4EnImUNLNcEuMwEjpHjAVsZskL+ZootXK9SoHHB493BlzH8eaF0FhrJS2CAgsl0XAYR26OIxahboYukJem9CfOg9U5TOikk9AIolFKosbKeDpJ4OyCJskipU9eKLonDr0I0KScVjZMLQanCE3XqfDR9cr1fAVTcaHHmqI9OZLMlVqkkF2kf9J7z+lwxCD7iRinR7yTYF4CaTlpsDK/jTGryISsM9k3fNfj6Vb/2zgnputErYv0RBoR+LlOM0uUpLofOkLnGXqhJc6zIEeLUiPB0PcDBilk4nogE0shTVF6tfsD1iYwTuMbYVIFK5IcQucWxeRSEgZBFoOTrDXXJP1cSK/upEVGw9Z31hKEVAxzUiZMWXBG9qKhP9KMsE21eLdZSVkcLtg1MZnneSvAWelDDHbQRF2SjLT22ifppe8kycglUU0mlkophmBFlKekrM8H7QIBI8Uui9C0a4oSR3SOw0lslSReEwXMMI4MB0+OM8vyyMPjmcM4YrH4vmOOiev5LOekFuy9Ve0JU0lxJlMxJawWVdZapfNaRfC26JdqxNrGaO4QeqwPhH6Q9gznV6uvmBPTvBBTIseF4hexCVE0uK5JHLSWCSnimVarQMudVCNtUlXjDGcs4t8uCR16lqyJ4A5k+H6P/0/qkbXWt8aYvwf867XW/0Z/PRtj/gfgP9J/fx345O5ln9Df/fD3hqe9UjvaUPN/KBr47hELeBIGa/LVqrlND7E9V6FOyaLkNxJtP6nartiYVT+XPUqVU/NjVcRIYOKFJmJgcdZhS8GaIkFXVbjYKxVABRqMXkBLUNyuWtAqF2tmXzIlq6eHyuJGpF9oiU9h25aI0aguZvPfakhfU8XRYaFQtJfBPLkG2yZpLYoaSk9U3SOQmihKk+0OPUPRJvaUzO1aW+9U1sqUpj6sdCStyltv14TSWIcpsonbhjqpx5MkgjLxhVaKcuXbd2q9Wug1G63+arJJUZNc7VerkJ3Fa1XA5IzXuVRUzKVVqqQyaAgWnCkYow3TpjAOPV3XMceFaZqpen3ioSa9YX3XcxhPDIcDxkrPQGsUxqqQxQ7lCla/t/psOS/y/MY6um6USn2VIC1YIzz44LBOUCdrpdopfT0G7zqpDhZpnC55wRThmOdc8NZictZKEhDkjqW4cL1exWcHpSnqIXS9Xrl/fCSWhDWOeYp6j1vP5qKBNSw1yr22YT0Eiza/N7qrAUpSRbBUyZoEBR8YhxFj5HBra2aZJvX9slr5VJ9HjFSQy4YkNxpj6IKO60an9dpnKhQR1nlU8lM6b0NPk/rLNQR8Fc0AlWOuGnjLPpdSS+q1J6k6ahHkydpKNWVFzo0JeOvphgPm0BGMp0yF4DpevP4o5njinY/+BGme+JMvfYFj59XbrHKZHzAkTI3UtNAZoz6PW6+CVBtlz5TAY1PCk3mjKHdKOBcIWqFsiW1Lstt4NXpzStIX4moLZjY0eNv71KoCFRPSvgpM1f087ySVxfNStgVJyETiua7FPKnPFen/9B2pmC2hAaF65bzKqVcteiwxYRCaHRWWGNXAda27yd+LIOBVAwK06EE2kArX84ecbm4p50SMRxb3Lq8+9nP889//PH/6u1/gr/+rf5kcBn7ypz7Jxz71U3z1K7+PpdI7EZ0QhTthAOSUBO7Uz0YLb7VYnB0IvSWmWQIXZ6hJ1lqtFQEJtdhV6yoBnpaFonPVOk12dXOspci6LY4hBOI08fDBG3yF0+1LYQKYR3wXuDmdiDFzPgvNMObCB/f3sicFR3AQU8Q58GHEGJHLX5YX3N1/yOEwMo4HPZdEKj2Plru7O5Y4cXt7lPsaLMWCHweWeeEyLRxdT50ytaRdoJypphKqo1GkczbkLEU0UsXbwPF4w/FomZeFh/t7Ssnc3h7peo81CW+9JClVnn8YTpRqeXi8MM0Lh0OgM4ZULa6/wZjKPF8gTlgyhz5Qc2W6fMBycfTDIEIkRcRAahFUa14q7756T0ync6TkiWpk3RhVELXFYHPhOk3E60Q6HuhCR4wzuSiiYgI+dIKylArOi6DU9UqJSRJ1k0nTQq89ZJ0z2GDJSyI4KTQ6mzFVCmFChyt0wWEITNNVEaCGIjlOx1FryTK/lvkq9yJGYozS0zcedK1aaq5crxdm9cpsMVGLC4ZhIJfC5fLIZb6s/+esxQVRznUqwlbWuMLo3i6Ud5OixluBfnhHYpYq9NsUE3SR0+mG25sD8yzXOU+ihrtMor4ofXxN/VLQq1KyxHUgnnBekOvrdCW7IrY9CFpSmtBULqTUCuTyfgWntLxmsyMWDfM0rftirRUTAt3hhOsc3owMncSN0r+mcV0Rq4GYC1MUf9K4JKyza/tE88kspTBPE9VJgbPT1gRbZJ+MKVNNwdUq/aHKGKpUrbNVUiyavHmNBzOpJHKRlp6YMiKeVBXFFvS17zuqqq6nlLAsnMaO8d0Tzr7H+eGeu7d3YltznaRI1nVKTUTObCTeySVhdgboIoJXSUZYbbiAC72y1TzVFFF4t+C8IfQd3YsbxsOBm8MNwXmWizB7pACUmGMilsIyXcjLWVlVgtanuGdPlXWvlTvs1t81QT4F1ra5pHlHAxsaMkcDmH7I48dRj3wNRE3YRuBfA/5ro31qRlbavw18UV/yvwP/oTHmf0EESO7qD+ln00/ZQY8rbraiYO3RuLZtUreeiTZwLaBt1dV9wtroOdv7t9N3S1T2wdfT60GTSAS5oOUQ8vlrz1tVpbiWhFlWyk9Ts2o85ueiFy2p29M2V0oUPLm21kD/5PVmr+LW/m/fZ7UZOu+/4/qaliBisPWpzH17TQu0ggtPr1NfWzQTXPuC1o3uKYVxRTQ0yBXnebPdkt1znXOrH9N6f3eiDvuxqUpH2Pe2PBUg2VQ0jSbZraG51gxGDFiXRZDN4D0xZUobi4pSTjO5PvX7kABRVIZKKco1l36Xvg+M44DPntB5bcj2Ky1SDq0eh6VzlhQnFYXQ5touYBSBkcPRY5AgWyZ2oe87+n5giRFjHX3XUauqIBk5RFJKCvM3JKJRvCoG8eKpVXuNShSPGWehVKaaKLmyzGLkmU0RtM1sRsho0n69Xrm7uxPqm9KIUk6IgqYhxrRaNHhNXIX6FalVzLEXlfNt9LY9xbkVEdp7iK+N/H9TxRTvQ0NNuzVUylqdrFXWcqPdSRJrsU5RsDWZsypGonNQG55Xw/q20kp5ksC13+37OffPb98jNkP0lXdf9L2LcAaKMAdyLVgboGq/Ya0ELz5zy2Pk9M4LXtwcGU4v+MT7P8Xv/YO/wze+81U+9ZnPYs2RtAhNaM2D9Ls1I+99367Z7Wett6zRFNtj35vXxrCUslo0tLFoCpr7/r0VtSybKfb+/UvODMNhfd82NqUULpeLzhtRzSvqfbm372jX1/osY4yMoyADjULWBHPafXgi3jMtK0Ogoflg1j6i/b4p56+eFVZ3XKWdPzzc8e5rkb6f5yv+tueT7/8sf/p7X+B3vvh/89f++K/zic/8Au8OHZ/+xff57X/0kmk+U92sc92IoIhV6nupW/FtVTuT4oWgtJbr9crhMD5LmFsSbXa+XH6155CzqllaaICGpQueUgVhKvnAEqOgYI8PkgClxOFwwDrL3dt7rLUcTyculyvXDz7g9OKk9HJZt8s8Y12/9mD1fc/hcFBxoYhXoY6giri1FqbpIuhv3auC6v3wUvAKviMuEecM3h+5XM5M0xnnmkgH+GA5nY6UdOV8mfC+cDgI3RvjOJxuROjCyNWGrld6olDscqkMQ2EYjxwORzBCmcxZikapZtIyM1+vlDxTssOZyvFw5OXLkTgv4vFWEsZWEY9SA+9lnvn2t76h/dcyr+brhZwWTY4sVLvSvo2xXM5nhmFQGrbsK7lkSEmCQWuEWmjFhmWhkpLRnuRCWmZK1p6juHC9XvDeMQzityatF5ZchJbeKxrRBIVEZbDFNuK5FZMIQYHQKKXPcG6npArQ+HWfHcZtb68ULdbBvExcLheWZQZn1Lahf0Ijr1V7pVSAqsRIsI6+D/TDYe27C52RPj4NB+NiuF4S9/ePPD5cVQxEbFOylzkmpuFydvkg9GHv/Iq8p5QoKREj1K7iVQkTItZsgkirybJpfbNiseGtqFumujEXci3aEiF7VioickMVn8y+CZbszo6UtpYLYTdJkFtKXROoUrY9t/m/lZyJ00RRtkkx0qI0DANdLdJ7yaZ5IPGC9EDa6sCLunRcFowF4ySZdraSqyD43vdrG8DQHxiHG/Fh7EVAZb7O5EVaOeL5QqSSpgWPwVWLrRYylJiZlkipEJzFO+TMSpkSEy5XXKg4H+icx2rvXIwzeVrAnPHBMww9x9OBcewYh57TzYEXH32H4/HAaTzRu54SC3GRGGGaFqGk50KMB1I6apE5M08T1+tVfWJb8oYqr0ihMDctbSOFsAK615g1SS9PkJMtjv9Rjx8Hafs48D8ZaSSywP9aa/0/jDF/VxM6A/wO8O/p8/9PRO7/jxHJ/3/3R39Ey0H1glXMY1USNa06nTFuQ4EaGtUC9vZ7+VOFS3YBwtZcybMAqj55zz3StKJCGhA4a549dxPW2KNIVitPUqzaFPZa8NGCkfZYucy7z26LcS/UAawH7f757fvsk8F9n9nz5z1/7E2k2/P2yd36HUtZA7k1EG1jYjfUsA1re86+l3AfhLcErk3m/fuuV7q7ZBlL2Uysbf157X3Diry1+5w0yK0octnepzZkVoHpmjFGPGys1YpOTkrT1cS8mg1ub5USJJnH6Pw12pNY9XXOKGICN8OBejzgmiqhilz45q+G0GZbQmeMBWsInagqtjEOocMhal3GGWKaJTkycL3Iod73HZhmglxZ4kQpVT1UlHqRhLedU8EYUV6rVXsHtOEaCp0PpFRIqbDMkWm6UkyjCa6w5eoz8/DwwOVyoeuFZy8UXW1cxooXVJIEK4ROkB5jGIYeYzz39w+r0WgrcOwliEujFZiWkLOak8ockURhiQspKtK2FlkkWWsbpwS3svZylv6uRQUnui5s94Ems6yUXJ0TLSmptWpAZZ70YbX1+/zR1kITJFlNplX5FWNW7n2hzUFPLRZrO/GlYuBy+ZBucLx6/S5D53n/Y+9Q85nP/ebvU+vHOJ7eo3SPPHxwJvjAMl0kQFbabwvuRVVM/Hr2jdv7PeB5oakl/O05LUlrIjJtz9roPt+7P+33hZXtYLb73J7XxmqPzEkSbZ68377YtRctkV6CVrzb0NX2XfaPlrC1H+89pSqKXAreu/U969oLrNNLn4ezXC6PPNy95fTy45yniGHh9vZdXr/+GF/9sy/yW5/7LX7m07/IZYp8+rOf4uOf/Cm++MV/ShgTwQWKqUxxEYQTCcSs2fVh14pkjTK/jTFiRTJLgrIVA+SnUYzad96P1fMfqvQtTYpCeG+pVZLPZYkM45GX79zSdwMfvnlDzInjUfqdztNVqH0YSsqcbo+EEFii7CvBO6WDGW6OJ6Zp4u2HbzEYDuNJvJqM5XQ40HnxLVqWmcfHB25ubtS7yuL7QEmJ6zRzd3eHtfDixa2wAXIWU2OrCpPXswZ7Ce8CXRixRtQAweJ9x/39Hd5MA/wAACAASURBVI+PF25ujmpirmjLtGjPrMO6QN8fOI4HUnog58w4jhy6AWMOGF4yXR64v3/Lw8MDy7zIulsWpsuVUpP0DeJwTkSyaoFpnrl/G7UvrGeJM1kpsYCanreeWC8tE+rtZRBxhlylb64aw3yZeLy7YzyMBO+xfYAsSM58faAkvyrx+i6s9z1nYWJUoMa8Ki6WYnCuo+sGrPUs86KS+wvWqvhTqco8kiSzqAJoQ3lyKlSbOB5PdB1q3rwXaShrn3atWdQ7jcSC3opVRdW+5EUp+M45grXQdzhlJRgMfS9U25gSKV5FIdTuPFpd8/aUJG1Z4noerV6tQMyVlDOx9efqfpCKiB/FedHiw7j1+xogJnKMWLv11cYlMi8LNRfIUgBs+18X1LrBWoJ3eLqNip4j8zULVVfXb2tnaGdLO6eMtYKKGrEwWVlSiGint9KK0h0PlJi4ThPTNFMuF3zX0Q3C0BFNkCIoYVTRqiLMJGdU2ddsrQDOOKzJVHMVZPXQMfRHjA2cH2fevHnL9ZI5Hm8YxwM3/YnhaJjniW9/5zt897tvuF4n/WyHM45gg8ZvjqwWBT2eoRvoe1bqelY7E2MMUYux7V7nWkk5Mc0VoVgfefXuSz760Y/z7qv3uL25wVpDWSL4wvCyo5TE5XpmjgspZy5z5TI55kksIA6HkXE6sMyRkiSBTFl849IcSSVtjAwkHmz7c4v/q6lrEvf88YNi9Pb4cdQjvwD8C9/n93/jBzy/Av/Bj3rfpy/a/tISN7MeHqwHfoMfYZ+cbcnPXtFLvnfVybf9mBVx2wancXPN2n8lG8gqzCHvuv7bKO3Ptuql3a6RRq1EPsc5T/AWH7wEqKt4htloYLU8PTDZkK29/OuWpH0vsrZP2raqNuybGvcJ3IbaKSLlN7Pm56jAPrmUYD5t46+JmtEJ2FCKUlozq/yuqfPoBawBUEPoWs/ZGmjtJ65hNw+Moh3qTO+cBt1N3EHGR6hLClvvIDwl/7BmcFZuVKVQ4iweJOzEIYz0LuYqTkNrEaGNx1otqYIIYlSydlPS7LTSczgctFfJEdS0VtSppFm5d53ImavvirPSS2TcU+TQObcqJIH2VuisGwZpTrfOqyR1WQsaoijlgcLlMglPPm8biig9bpt80ns9O23g175GGzrEUF7GuSLB+mVamOaZah3deBDT1ZSFb19EclnUqxTp074iaqZUCRBKMasZt1CwZKyTegBSBWnt+4GxEwSszW3vJZhp6HcXOpzdjJKhai/kFrQ3Gec29xva1faCfZGlrbuNyrt/jcz5lmi2a9qLZrS1135akvk9qLuBWlsgrjNY9x7rA4fxSNePXOcrpURu3r3leHPDzfiST33ip/kn//gf8ubuG/zkJz7JkjKYnrjcE4zIoOeY1CNG+9WMrOFcKpXNdmNNouzTQlcrQsk4P0t47F6Y5Gky1l67H4f981sSXRGKU0Nhy9qH23qbrRq7Z5r5fEyRWuqqpAoSADrr6EJHbcEkZvd95Hr2BSvvA/3AEyQ1hEDaI7S7M4K6fR/JdUT1ri5CK3v74RtOt6+hQtHerp/4xE/wjS/8Hn/4pT/k4YMPGU9H3nv5ip/5hU/zhS//LtYNGNfm+UiMExUpShlN4I3RrQu77pU+BHp6UoqrmuaTbdS0SvyWDJdSmKZp3avW++Ikct/f3WY1UlsQQmWexGLjdDqRi5jeO+c4HA4si9DNbl+IJ1sphen6wDQ5hqET8ZYiiNvDwwPX6yQFoSKU06DIWxNWap5zXdevtKh5nrFUap1ZlkIpB47HA4fDQFE7i5wjy3IhLpHDcMPxeEPfj+uZ433g8fGB8/nC8XDkdHrJ4+NbchbBies0Kc1P1uGyLDjruT2dVEJ+whD07JDC3zAMJGNW4QWA8+NZehOjVOa96+j7UYRuZqnel1pY5k79P7ez2JptT2lqxKKo24qLUuBBz8fHuzu+9eeW4TDQd70YSIfAOAT1kTWkODHPV0rtubm5pRT0LNdenJpZFqEKeu+JSxaqIkaThspSRcmyJSb1eKIUQe8eH8+E4FV5uMd7EewoJdMPA8PQqdiD3N8mKCX+kjdrQbQJKM3TtO5HouK7FXCgCKW8traLynWe1vksLSliVp0jlITEDF4+QxJLsRVo1hhY0XxJaio9M8n7aHw1DIOgZ1XsNELvKRpWuK6n94EUI/OS6DqL63oOoYNSMEnokyKqkoVJ4cS7zLqmDKoCGEX6xwqiyFx1LTSlYeeDIu3tLNz2pPbvhpzFKP2TeRZPQGssN6ej2OzkSFxmob17KSpTK7WIB1xKhRg1Aa9FwBVvCH0QxLsrivI7QficxLg3t7dMl8Td3QNvvvOWcRh4eeq5GVUdu4idkbWW83WiKTjKPuTpfQcxCQMqV2zM0p+vsb+zEqsICioIcKZgvcOpZY8kTp6cE9998x0u1wem+R7zE+/z6t1XhFOPqZXgPCUHnPXcGDmLrnHmvExczleJQ2LiepmYp5mSBEFNSRhIMQg1tdH4WzG51RbXQqjcoCdn548CVtYz6of+7/9Pj4pQl6ouNoMGVpq02V3g8xwh2xzNDU0qswUBDdqV37XcsKmxsb7n/hBr1EMJmJoiWjPrBrCarLWgWjMKNrGC9pklb1VbqlFPKosxIhO/+alpXvJ9ErfVZHENgux62MCWZLXn77/TPqB9HkDtAzJrpfG6ft8xfkpD3CN+VpMKH7aG/YY8bDeWJ/2J650zmkRotbd5VrRAouXa63WvSfpmSmi1r2ulJ7VkXaEw6V+0T6oeTyDoKil4rRVTrRx8WdQSW6W/rOqfLTczGJWK3dADSdSMEVQheCeKi33P0A+Mw8AwDFhF2Pq+p8DqW2Odw1tHsF6kyd0mpb9XL10TZaALXnrJVAofDC50WCOyynG5sqS00ixLkR7Acr0S40KKQvPbEhOppqa8yQ0LGpWZVP2ybZJFKYLOyH0XytQjl8tFCwZOzSudBpdZ1cyEcnM6HFakxDmDDwMpzZzPF5ZZfbY0AHHaFzaMIzlnzuez9lMpCrpDbPfoz5YH7X6n66fVIPaovLUSiKQs/nduZ7y8r2huYeyWZLS1safYPUeXvx9F+Dl6XqtSXmlUY+3hBxGTqAVp0RQKzXQ9cxgP3Lx4je87Pv7xj3JwA5/7jc/jB3jxUaGiXR4ypjrm5SzV06rv5TSIryKbXk0T73lq0dFQrueFIgn0t735OeWwza1pEoXcfTLbgq092tU+o5TCNE+MdsRYS0xZfJhC0L5nuQ3NP7DWyjJLQO990P3Ria+Qs4TQKzIldLcQVKQgRp0r232TPk9PTkL1FMsHD4pqkNPuurd+aYMTigwWYz1ZfY1SiizzhLcDcZqJ9ZFXH3mBd6/4+p9+nS/97hf4pV/5qzxOhp//5Z/jN37zo3z7a9/kgNg/HI5HLpdKTgsG8fiSAERZHaYVHOuKeDw8SPA7qi1IzmIiXasEGG1vafexIat7KrnTeWBXpb0owXLXcZ0WLudHlmkGLF0/cnNzw5sP3lANnI5Hai2c1QtymcVnqtDmV2KehKZ9OBxIeeEwHgjeC+J0/5ZGdQ6abBhQy4q7NYlwiip4Z8g3B6H5Ggn+YxTxFumlVIqXs4zjgeDFPDnvDI2vlwnvAqfTDaI++A6X61s1o/aYmHh4PBNToQsDXTesiZP3jmyrmJarT1jJmVwq0+NZ+phLIZesBtqi/BdNUlVFWSd930kQH5NQrMru7Kd5Nhq830uFq2rtTv3YWekLNLUQpwlSkh7HQSifvQuULDFLrZllvlIOB3zoMUqRt6oeep0cKS3aFmKUwuqxVvau61XWs1DIRfG3UXLbnul9ota4zq+UMmaRnqXr9bq2jnR9wAenRTGhj9XM6sF4uVzIKTGM41qcaftRSpXrlMhJigBd163y9fIcQ66Iz1e1gNwruf9qb+DMKookSF7GemldMMasxtXOe7pW5Bj1aqugfHNUWn/X0/UDXT+sRZSi3namVHqM0i+D9E8tCzEn4iz01toEO6pQXiUpcxgVsmp9VUuM+NDT9z3e6TVq3NeYIVbZNdInK8bYOUYVbamiuho8vba9yHmQKLnZ44APHaFzdEnokylnliwWKbYYfBg5HgbGUWLCaUossZDyVXotXeB0GpguV1K6cHd3z9sPZpYovWjFeHIB2/Va0M8bumJEwMq6UdVQwdQibRzaZy+xjMP3HaWKr5w0GEgBKFdY4kyfPbV2lFr45rf+nPv7t9zevuCdl+9yc7zh5nSLtx3gCa7HGEeqlk5jQGuc9GQOV66XC3GOLLOcDWPfk5P0F4qYXWSaJ7EtajTYRvfACoWybuy89vc9SPL9Hn8hkjbQhCBvwfq+yrcFD3attG9BxUaFgZ1im2nUOU0EDViHblYtyX1aEX76WRuVb01ySqXajY4HrAGi9FKZ9Qc5Ytf3eoqAPaUabajY0+/SrqdVLOXzCg0ZaUldC6D2171PHp9/L3iGnpVCrtt1tmt8/tr2OmvcmpwZNsEGqVr67XtkUSRq1/49Qiu799x/l/33yCqpv11XC7B3PTLaZN/osOu9xDyRM5c73ticdRP60ODLO4/t7Ur5kn4iTah1vljrxEjVSL+XeMx4rBXT3y74FW0JurE3H8CGsPkg98qHQLc7fLzKA7exjQ2JMWYd4/a4v7tTH7QMCI2nXJZ17qWcJDnLssnvqVBZ78uau+o6ylUqi2W73fJ77Xmiillo61GJSQ7o+/t7NUjulAazNWAbIMerfG4qQlmJUs0UpMuRcmSer1Kt9P0aULe5v5+HwzA88eGTaxc5e5H5l4OnJapipi03UL7XVlzp+16pb5mkIj+5FJUO33q2GtLdfteC+1Wifkd33tOk2zzcm8G3n2maaNS7fYJDC8Axq2qutRbjpHo5HI64rud6mSjZMozvcPveRxluAp/+9Ef4/Od+ne/8+Td47/XPkNM7ODNRyxtqvpJTYp4nTTw0oFmLAXUtEDRZ4v163u+Pe0RRbCQ25HLU5LrWzdesBfzD4J8UmYD1/dv4yDyXRKQZGTcJd6MHtuwnEsAKdW/b+9o+0u5X1EDJ+V7nf1nRtea72HXd+t3EOiDsKvis6HrXdSt63fz6JDDKlFTaTiHJoqqU5TST44zvOuoSOb5wOOf5yU98lq9+7Xf5zd/4h/zKr/41qnW894mP8Jm/9PN87SvfZgwG6zOmBrzviMsiyqdVgrHgDKUVr3ScQOZqo+22xEeYCNKbuu+bbkhzG4f1/3QMGwU4K8W4trnvLDHOGGM4Hm4IfQ8G+lEKU8YYHh/PlFJ48eKFmHTHhcPpxBB6KpX7+3tSXGTntjD2PX0IDF3HfZX/749Hbk4nWR/DSB86PvjgA5brxPjuu4L8LQtxuWJqZrqcWaaJm5tboZx7L0bZtnJ/P9H5gLGeghGLh1JIaREa47IwjkdSqSqt7zHW40Lh9ek1Dw9nHh+lv67rPEZVq4e+J3QeTGa6XFa68xIFoUtLpJaJ1qtmrMGr716KUvB63rMrayCtfXzbvqDsHLP1J03TjFUri9XLEfEBO44D3re+6UBQZkIpUYRmlomaxdbm8fGBfsiSuCmVNuZMSsta5OmD0/UkyqwpRYw15BI5n8+M4+HJumnrr9ENo9IFnbdcVOVTxrNbv+c+EYsxskzN803QQjsMyqKQc3KZm8JwJYQD1iSsC2A8BjEsz0n6sUvScVXVXmsbcwbtqZQY7nAYORwH5nkhlUZvk4SuoSdG94p5mrDGMoyjUlF75nlmmhZc3NHiq1UD5SKolZHEyBg5TV0I0k+pxaGsZ7ItmWAqPnRYKyrQxtiVjpqLqF9O14yzZrM2No3ZIGizzK+89mV1LqwFummasHErwkt7hjCk0HsxLZOsBx8IQ0fvLCcr6sZd77m9PXE8jiIiVCrHI2J/UeD8cKYLntM40ntLjgvLfGWeZs7zwuW6cJ4Waq444+mdw5pKTYJ+LnOmLAslR+k3deI16HygGoiT2ESUCqWK8vp4HEUnIDj6UYqXXpNtjGGZwZoXRH/g/t7x8PiWvnsg+G9yOHTc3p64uTlIi0ToGfsjvuspWZhVfd9xGAdKLpKILlGK5bnI2r9ehQUQLF0fqFnptHp/U0pY41cV5ucF3B/2+AuRtEneWZWmJn82OhKaLBWqcF13IhgroPMsIWm/2xC3QpOkRjfAFabUYPx50vacJmiMeWLIKsmiIjVFVCobNa89mqO6tRs1Z6Nd7XvSqlI56lr5bJv4HmFpiEFL2vab3P77b7//3ur+PnD8QeP4dOx49vkiOb2vCKSYno3N+qFr/8/+85zC1WsSUcrqUbIiP7uxz3XzcvPe41sCh1JbMTom7b60w072rlYPX1O5KpuZUdqV0azMWi9y5Lu52JKzFoxI/1FQb4+At1ZRADEAbQpWcv8h+CBogI6jD/5JUNhEWErJpAJpXliWTda4ZFVzLGqUrQt+mq8a1AaqcdRFktvG6C05k3ISGoPRXrzS5r2gcet6MVUDMhklGVuVy1V0sdRmj6A+VjGJMe31uh7MXQgka1bVrHbAiQT3IBtpGEQZNEs/Q84i07xohdIU6RGpVainWWm2rcPRUqlZ5oS3bb5IIaMlc21jlOB9U70C1rUYQtAKbl6T1xij9LCs815Un76neFONJhVO5ZTrE/GNdf3bTfRin9g0AYjnFbVaq6q4qg+XAdo+gZMK8zAoErUQ/MDL1++Q3JX3P/lzEN/hN/7e57Hmyuv3LaFLXO8TdfGULMG+9GwKBbSuRQt0jsgoN6oKbAfI5hm30X6fI/h7RsCT/WK3n+z31FLKqg4KrMWdVDJWleEEvBJDX2MMi1Yte0WhS3qOAm/G8i0JiTHy+LAwDj3jOJBzJcWFkiveddJrU0VyPeeFWiK1siadouSq80eFAuZ5Xj8n50xToZXousH9mXmeOD/cc7wJzClxuHnJeZl4/xMf5yv/7Hf5ype/wje//g1e/WTHeNPz2b/0S/yjv/t54vQhQ/CUlOiHUdRZS5LeVmf5f9t711jbsvQ86/nGZc65LvtyTt26u8rtbjvd7nQcJ7asxBCEoiRKDFgxP6JgFIRJghASEgGBUEx+RPzIDwQigIBIKFdQlASZABYigGWC4xjb8b3bacd2Y7ftbnd3ddU5+7Iu8zIu/PjGmGudU1WNWorrnHTNV6o6e6+9zj5zzTnmmN/lfd9PsrolqhNAYYOURLVp/NzxOCWXOnz56ULo+fNmTtyAHEuVW6fmnQWDgnOm6JkGpqalWa0x1rLdbhmGgf1+zziOXFxc0DUtNzdvqka1UMKnMOleWe4F52yxnU8Yw9xxmaaBKTTl3GqH8/r6eg7oYwoc+z1Df8Ra6LqWnLUjlBOl+yRIzhrkRnXoy3lCBySfuuSbzYbtdls+vxZ9hrLXWpdomparKz93byrdehwn+iFgTOZw0AB+s9myXm8w1rLPe3a7u/Lv1ZECFiOOxgjTFLXwoScXjME5g0gzu0XrT+zZvtXM918IUQtVZWarOp4m4jAyHI/kxhFHIU6OaVLDkBh0fICeCz1Xw3BUTZEYnNdkD4mENBQ9Xe3a6PNR3V0txhRWg6Q5OajU7rZVM5lpGrm8vJwLCWqCM86F2PMicb22VedLo/eUzour+i3HMBznfRTUydH5tc5GKzFeyBNhjBhR5kZKanpmrI7CqLP7nLPE6HQ+4DiSUqBpm9P+korhUojlmX3mX5DRMTchMIaB1WatSXyhtMcYzsymCluAXMrMOs8zxDAnr5qMpXmfM1icVM8BmQuqxmhB2qFze+uaDPF8PzQzg6SZE/aVXscsM81T9+5Q/m4ghok0aYfZlhlo6giRNcnPGW8bvDNs1h3rVYNzSkPvujVivDpRpoyxmYvLC6bxwOFwQ7QyM2/adcv6+gEhC/eHnv2xL+MzlPLcrDrIkf44cDxkehJTDLofocdkncN3WjSKMZCxqrVLE95a1tsV1w8e6PXM4IzVRKvvub37IncH7eyaMmNwtVrj7h1vvHHHarVm3a1oVh1+07JZr7WDjZBN5uJiS06RVePIUdf5objBeq861pqopUn1/uda1DHEIkE5ddTPn7nvhOciactkcjnQ+lA5p7DNPLungoQSbde/MQdfT1Zz35rQ1U5KxZwg8iRFMdeg9SwR019TtVIZqtMiJ61LTtWcoiZX1RhDzv6rx6yVwFrdfPKYTnOoTh04ecfN7pyWpUlOTWaeTObOu3z14XOeRJ5rec47DECxPT1RakADmTnJ5dzBsgyHNCcaZErpSb1aOeecJXVQNt65k6bvr0G5Sknk7LLIfP0rReR8LhuZ05g5OSV1yNkoBKPHqkYdZqY7SplbZq3e2N45nG/xTVcshzXhbIo+TbJSCskZ6yxN0yFii6tk6ahQRhrkOI+yULGz0g6maSpVcd3gQ3nA6lBPDW6csxgHJpXzGnVYSgnF1MmuJp5G+ehKOdbHhVoQa5W7JrtzvhqL1XhJeEXUAtxY9UEahoH+sFcL6ZxpvD441OrZ6Fy6cp/FMGKsJcSEjBNgij5D5oqTdY7OFuv7UvqMMczJq/fKsa80mZoIRtH7Ioaq+TRPJA56zykdofL9TwUUCCWQqi5ZztnysK6vmfn3nPaL+jA0hHBy3zvvZM5rmicLILVzXIPjczravOeUpE2/Lis/q9OnVv8t/dATcuTF65dZbS4xbcf7XnuNn/mZn+BXfukXefjSAwRhDHv2uzvaKBp8BJ2Vpq6aYS6EiA7zKgY6p454LRzU70+On9VQ6aR3q0kynLr49X2VSnh+Pmo3SN0O1/O5EVH9q/eOqkV1pfscy+ycnLNSlowp86JOlsu5PC9Oe0k95wNdcS47v5ZVN1mTsBgiEeZihHOOYTwl8ymjGrN5Dzzd70YMakiUsVYnw0xDz253R9OsSUzsdp5mdcFq0/Bgc83rn32DH/77P8zv/6MvQex4/6sf4mO/9WN84sd+kK5ZlSDasN5ccNjflzlNhhxKkCx1lqC6szpfjBJCKAOPlcIagsoP7JkR19MJdO362KJlsU88g4trKnnuZo9D4LDfM4bi6JvTHKh0bUvXtAx9T5g0eRTg9vYx4zhhjEoEdN/SAN9Z/axt43jw4JLD/sju/p7Ly2ulLk9xNi6aphFCZr/bYUxmu91ibUuYlE2jJhcUGnikbVeQM1PQgE2McNzdMww9vvFsL7as1mouMIwj/XBkdzxgLfRmwBnVZlUDmN1uz5tvPKLxnqsHFxiTlN7ZT+x2O6o5jSYhganMrIpSinnesl6tWW8bphA1Uc6lu2INzhpN2kpnOEwnKqu19sx4KeO8xyBMw6hrz2inYuqPxEkTOeMMdrDzMyyMI0M/EkHNRWx10HVgErv9nmO/w7hM26xo247gPGmUQoMXYhrphwOrVUtKgWN/gMzMbhlGHbhtrGGaxrKXmNJ1X5VnRVLHSYH1ao21uq/U+Y1WRqw9rdlKXez7Hu/9vHcgTtkSontlirEMSD+jcde9x0Em0hetXIuOYOpWDfmos+OGccBYh3MNTZnlKWiiY43KSaxVZkwq+15IE2GaCEGfg+vVisZ77WpOUxmlAyGGWRuINVjj5g75aqXzwqagdG/JmRBVrxtCmEcKUOMyyTpmpMRQ5510kdNzqLKC6nPKYua1pfupJpopxXlthTgRxoExq/Si7Tra1VoTM2vIOXJxseHqcov3Tg18YsbYBp8yx35imCYdyB48U7+HqUeSFuPGKdDYSLe+4PLBixhX9ucUOd7dctjdkeMIF2tiekDMmX4MjKNq8XWG2kScBrx3SqlEo3PjLduLDQ8ePuDi8hIxpsx9FdabSy4vH9A4Ybe74+b2VucFjgNhGtmsVrhuTZoG9kPP4xvIRWPftZ6LzZqL7QbX1QJ1ols51muHHxLDpOMuhn6YO26kRDN4mt4XI52JNmWGYt42juPMEDnPTd4Oz0XSRj5RjJAnqYPwJK2I/JSr4JyUvDUReGtidgoazhf0/Iv0UOZE7TzhmTME0c5FrkPLEFKqfy9rxavM8RB3ckc7/xz1OJ74bJwMDc6r1qcOwcmu/ukq+NMP4Po7K33r/Pw9/aCeu3nGztqzpwPO83OoD+cnE6y5M1g6I3MyZ9Qq9/x4zwNY89S/Vz/P0z+rwmkofPiSgdXu3hxblOBbUazTz9zl9IIWbVM5Jlv0AdZ4vO+UE446UxrJSNZ5KM4qr7ptdJAjxQ5fH5elwlmMCqh03CzEQh+LMTKlWK6fVnKznM5ziInDOFFNMio9iZwJU9FezEG20Tk8ISFR3ZWq1jLESEyhNKkjGZ21pgJdIBvtKDo/nzNdA5FqfKVyKu08SnVWzFrp7Iee+7sbJCWcWKxIsfWv5x/iNBKCatPWXYc41SFZ68mpdC6idiFEMuvNCmOqDbtSZfq+n4P4+qCp7ozG1KBGaZZhivN5q7rWej8CWhCa15QpXZSJKaSSCGqnrmma02wwRDvIJWmL8cl7qyaO9d58+674qctUZ7bVAsy59uu8QCJoRzMnzahzLjuPWBI6pHocJ1brFdcPHxBSw2uvfgzxhh/6f/43vB354Ps/zJA8U7yH3JOnRC4D3nUGx6k4Y4wtneBSHJGz9Vc629WU560d+rcajJx3L05sBnkiEazV9Hr+avV9TmIrnQ9KsqrXo+6ztUij17hSfc/3lVMHsxaP6po472LXdWat5XA4zNerdlDPK/n1uMXYs/cp9T7XomL5L+dICAnEaZciTNpBskI/9Dx8+VXG+3tefvFlPvWPfo5P/uzP8k9/5x9Apms2Fx3f8PGP8okf/3uM00TbdIQxoDrGRMoBK6rdcsaSRYpO59SNzpnZ0n8YejabC7rOk0J6oij6dtcopUTIGSeuvGcuT85/Rw03LG3bqFFDSnSrbqbGxhCYhoH97p7dbseqa1ivVvO4gM1G7b/1egSOhwPeWrrNCu8tKU149gXfbgAAIABJREFUb+maluNxKBQ0h4glpYBzuo/HOJFTZAoTb7wxQLY0zYa2WWPMaYREJpTuFbSrNTGpLnl/vGeMIxfrLe1KizW+bbDGYRtLu7bc3T3m9de/hHOeFx88ZNV2+tywauDgrFKufCOQhWmMhCngy/DpysrQ+0o7IdYZVk3HanuBd22hcpc9aRrUEGKaMCSlieUnZRM5qzlHjRFSVrKm8w5vDRbIYWIaBp275gw2W2JS+l8Y3WzyM/QDYUozVR+nyblvHAlPiD2ZQEzqIOpadWQ0RhPjmCpFTue46QiP6i+gRajt9gKgaCOVqlgku4yjmr1UGq92stxcKNzv94gIXdsi5XrGGE/SA6NmZGIdTgzD0DOMPeTS6UuqWROyFiNjZkoTKU9qjBUjTXGarNo6vW8GQkzYQqnNWSnrJy2hJq9D3+saaNR0REpHEmuV0uycFj9mOr/ao/XjAKD6O9eSinX88XBgQIuMjW903E/SpC5zcqbWBEv39Jx19zZAluKySSoF/6kY9ITZkbnxnuocfJKmTIX9kmgbT9d6QvBMMeKw6jdhlKa/ulDasneGzarDm6xGOvt7Yo4MUyRgQRqcbxmGEZMzgsNIw2q9xvuGwxDYHwbuhlvsYVJTHiM0Vrher9k6uL95RLYovRzDEDKp6IZT0v18OB447Hf0h4MWDAxkEm3nsK523Tp80zL1gb4PmDRhGsfV6opXXniFKQXudzf0wxFnJiwHCDp/9jAZelnrDLsYaRvLpmt44cEl61XDxXaF91ta62lbr8eaM2E14b3jeHSQEl3XMq0mxmHg2PfECGNhbRyPxycSty+H5yNpY4669Y+UTsEKxeK+PEBiSnMQd95Fy/nJtqKpw49r1Xp2aCyBvgFSdSvUjkoN/p+mEuqhlc6bfkdN9Ao5jxpE1GPJnPRn9Xh0QGwNdM6SI1Rflc8+dA30K6Xg9FCtnaJzmqMpre8nz8lbkqAySuE8uZsDWTl14WpF67w7ULuR2p0pgXRxsSLXbtxJB6hdrlPwdd65PM0aOkuoOFX4zan9o//OmaFKXSaJrFPlOae5aodJc50SMGc7J3mGk+sZRcivFLuSmAQVtSqt0WCcduoab/FWH/RxGolTJttUKpsni3y17tffFWIgTpEkToWxMRKD2jGLEQ3AysrPWee7TVmIaJcxxqhj0FAqo7WqpdP0LJcEMIFEwCJo8D0GtaV13pbELRURLHMCm3MkJ+Gk+yw9jVyMM4whowmDDvUFjDrF3d/fceyPeMB4r8njNMyJizUGX+Z/pTgxTpk06XpvfCZntfyvCYFSlzXw1o7I/olu1PmYjNrZknJNKPeIJkBSNr2qiSoBPFYph8VQwTo3rxWdG6ddWV/dXQXapsFax263LzqMJ/epnKDOFzu/Z4wxnELnan2tna1KBdOqZvNEB1tE5geyM6qBTAIYpQhZ3+A7HRXRDzp0/YWrl2maLdsHV3zjt3wD/+Dv/zC/9Auf4/0PXsG3gdREHj26xWalh1G7MqBr3NSqrJk7enZOlMJM3xSROdF5guXA+d6a5yTv3Iyl/ql7SJoLBOfJW93TaiKkgWgsQ+LbmYpY19d8r9ffdbYv6KD20+iUp0cEDOOIdyf9bMqJadRu2jhNpRtbrNVz1fiVa1fMAOpzZu7kSnEZPq//lX2pugJrp3ygazv2fc/+sEMCvPqB1/jFX/wUr3/hi/zqr32Wj12/xmEY+S0f+6188EMf4rP/7y/jjFebeqd62KlXZzJjSyc910JQXUtKlbTWstlsGIpBizGuuPSKlpmeupZKq2wYxrGMNSi7gmhFvibKRpSFEMu1N9ayv9+RSLRdS9M2ZOeYxpFjGRTclEB/v98xxVGDf2cZxzDPgtzv9yCJ7XaNiCb1RgwxBO7u92w2F2w36sLonMVYGEehaR8wHPc8evNNYhRWzaYUdDLGCutuDUTu7h/T90euHjq61ZYQBi42W9arFjGZN9/8EiEkVt2Gtl2r8Q8J6xqurh6wXq9pvVfdvQht1+KtYehHbm4e0/d7mrbDGF/0TRvWqw22VPOHvifGif1+xzBqQWp3v6dt1UAmF3fSMKkpRQiDak8NzBKMupadI6Nfz87StmqtUXMGmDtUKWVMKRR57wsdWGlzq04t4H1bNHJDz6HvadqOzcUG32yw0mCMumMej31Jsjy+MWWGXqHAN5oM9MehFOBKt2x2P4am8WU+nMYBqkNuWK835XmQ53twHAaVTZQOsTUWVzSYXdPpMx6Km98eceU9+mjAWn2AVg18isLhsOf+/hasGnB0rddC7KwXK4mjdWrPj2EYdXarFe1UppgIxdDDWk3o+r4HSVhRt+imbSFnxvGoSZ0vLCGElsxhHOmHnt1+T+M8rW9KV07lQDEl+tiT+h5xTpOB9QY7jqoJG3tS0O66tWpAlDDYVJlcmWgiTgwxBXKIHMOeodCnG1/cNEsRapq065iyznSsa2XVdlxfXWOaRoud5bPvSzFGUqSxBmeFaQwcxl3RxgtZHFujY0GcabCxIw46A3calZ68WW1IxoFRrRlRmSbGCmvnWT98SCRiGkfCcugn+klpzyElVqsL/MNr1FBn4Pb+Vlk6YaTpPBfbNU23ZtVt8b7DmxZnG473dzz+4q8SpkicOlzb0jmDk1YpoCGTQyKNiTAmgtUYWF1YDTlGbh4/oj96+mPDMBy43G5puivarqVtOwRhvVlz2O9L0KqFiGEYOBz3jGNkSokpBPp+KB23QWOuL4PnJGl7smo70/TKQzSTSQJGNPiY31cokznrw9tIpSilUncAKJ0Va9RuvDzsZw1CNqX6oU5Zar93lvxkZuKjKXSpjAa1egx1M1VOufJtteuRcp4fSkYyJOWQi9VKgYiozXZSm+j5XMxflenv8WR4oA/ejApeqjbOQi4Up6TCeCnswZMxSqrNJ2pdRjtrVgNLecraOytNSeYZQNXlLZNRIXMSVF8hpmir1NlRTAn8imgzZd2IyPrv5jnA0epjpUfq4FU7C7Br5e1EgyxrI1cq7Fk3dO6c1mRa14TFFIdRZn2dPvD0oWZLF0+y/szZhqZ1eO+wtuqRUqHv1m6vQVK1V05MKRNi5jAc1SpbhBiSUhww82BctbIun4OSXUodaptIUsYfzNegXCkRlM6fZ3vgahEs5Vo6o7z9ENUtCSmjClIqe3gtLUQkB8g9NVwXAWeFbGDKELFESkeVhCQVrY/TRI6ZrtvSgAZHkskixCkwDkcaA+uuo206xqRDyK3VwHYaAlOvVdXWGWg8MceS7BjGKdAPB2JJUtuuoW1URJxzJidbBPCBULp5egtrlSSFfNLq5bK+it7NWO0S5zL7TMcVGAyCs2W4cDKINYRA6SY8qd+qhjihdGB8HX0w021BR9kUF68ENiltTnnrSsfUurjMCWjMOkBVvPLuidrRicYSRAferjpPjIk4DTS+4/LqRYLteN+Hvx4R4ad+6AeJceDqAy8yWaBPWll2gak/YMhYlMany06TfJCZmq40mxPFWDtGp66b7itnlv5Fqxk1AtMA6myGY+1IWmsJKaJDddVl0YjFu2JaX8Yy1IJQfxzp2hW1Vl8deo0TNptVqSFoV9JZi2u9DuMdetpuBVmtosnlmI0BPInEFM/10tCPIz5TZhTW7UOwDkxWjWpGcxcz73VnyToGY06aZAMYcWSx+u+LYQoDx/0O71vyOHG4f4NV+wB3ueHypSve+OLn+NSP/CRf97XfxIBh8+IFH/kd38xnPv0Zpv0Rb0A6XyrIgssWmyMxT7oHz8WxqqEs4xhEGPtAfxjIrV4n65zqr4ubKll10tbpbKNs1NTHeo8pHRM1dlBDohNySZwnVhtPP+zphw7vL1U/5z1Nt9J93zgO+x3DOLHdbjAZbt58E+u8Gnk4TbKGY0/bNnTdCmccjW3w2xVkTZqPvVbSEw5TnDpNVuv89WbDMI1M6UAaAzlZxtjR+K4UYVZM455+dwvhCDg650EsMU2kMNKPe/xmReuNuv6lQNduSrCduLu95e72MeuuZb1qCdNAJrBZdyiN1dNtthirmp43724pNxQ5CkY8Dy4fkNLE/WHP4dgTYsJbTxgG6rgISGSJqiFKGYk6LysD0QpTDljvVENkO73eooVBI5BCJBTTj6ZRi3/n7OyC2K7OBtWT2W4usN7jXMsqZdz+QD+MDMdI59c0riEEpb4a60oyHWl8w8V6gy2MItU1ee6nHWGMdN0KMky97gkxKV0xpMhuvy9FI42vmknI2c90/RwnUjjStQ3ONVpAsJYJmFKdD5lw1pJyZhh6XLL4bs1qtdUkMWsX+ubmsdIdDVhv2W6vqOM6JBlSgH6cOOZYioqTUj/DEWsavNMEve1arFOHYUOn+0gqTJiYiDmQMow5E0Y1BnFilREyTWAbXLvCekfXavH1mI7EcSJkwbbqEGkb7TaCUvhDHAlBzWdW6xU5NazSmmnstbtX4iLKE13ptXa2nTfUZoLqJ0MM7I8j5FF1Xs7TNg3tesVExHrLerPhYr1Wh8sLHUx9fXlJYxvAkELi9vaWx49u6AUutpc0q2suL96Hk0gY7uh3j5H9FxnvE9mvyf6CmFsmVkzjkRSPOr6jc2WviTRdQwqBu92eHHRwvBhPu251NqnzdESG/kiKE2HqyRZWXcPmsuPq8ooYE0M4ME5HDoc9/f6IXEJ33bLdVldRR16vmIZADg0ENK4XQ0xZtfXO03UXtNuWybVInHAlOkpZyGJIQeh7SHng5uZI5E1Wmwe88tJrXF9d0XQdTadFvTQ5cnLkHNnvHzGMR0KCMSZC1mdnf7wnjj1fDs9N0laTnxMH1xab99rJqp0hfQ30gZ7zkxzQ+qA/db/yHLTO6dcZZU6kuPOJtvL1WXT6N2vwnGEO3LTaWivrzInDKZgpv9famSuuD0fttJhsEVc1K5mUJ3Ke5s9YP4lSUU5akZxP3QGt4ktJOtUSf052q5YNZi1P/ayUY8y141e6XyIqOZ+r3Ul1PTU5gjqbLYKZyiVQYxhrmlJxy/M1mY8XHdxsnW5I6oQYy7GhkS0ZEVucq54cPv62FM1KKyxXlLNAcj6HUrqCqBHFKUErAfzs9GnmNdA6FQJrYhoxTg05QrFvri6VOZZ+SsrElAkp682X1BFqNj4RRypdFAyQtGqdU6YaCFDoeuSE96r9qa5tgA7inIsUZX2ljGtbWt/grJqV5Bi1s2mq66AOWz0F6KIJNRGTVVBsjWAkleQQMpEYE8FoAp1yZhoGxv09MUwl+HUksVivIvRE1vl0RWsTYyaME+I0QUKEZDJpCkxT6XSXB52xkSSJGIWQhSGMIBnn9XoO48A0qVGF1DYtpxmAtnY+Yirz3+Jc8ajakzrPzxotyBhrodgfR8DbMqJApFS0TXHlCqf1P4vgjepTJu26xKSag2IoSxSIhkJDzbgEjajVej/qkOTGazW5FhjEKPVE712HxAgpk8UiVqkWvlGnymnoiVPglZdfYLXeYjcXfO3Xfz2f/MRP88uf+iQvPHxIe7nWsQ2HEWJmiAfstMfm4oqaNMkmnXQ/M0NBTrpgkar9VfexaYpzl3BmOpT7zIgGVPUurfdsLTTVMRjWWS1aoQOCXRk8b532j0VKgkjVjmhnJ4slFRMCsSApa2IrhSI0ew5nrNHuctXezjRQRA0FSsKs11pm3aYxJ5dMLcyVJ4gxs8NmSpmm0eJVdb5LubAtCgVL0CS9TpaTrPrBYehJIdN5z353Q+Mv6WPgfV/zAT7xiV/l0z/7c7z5e77ACx/9ILmxfOybP86P/sD/zeHzv4FfNbofee2MSo762wWSRL0XpBp0neihktURdyrUK+vVdS1Vi1jRvazupdTPa4SxOAPmnLEz5VjKPVc6jVYgR0yOeC/c725V0zOV56P1iBMOxwPDFFit17S+oT/qTLKu68gx4JqG46B0scePblmtJ7bdlmEIRCYMlszEsT8wxonNWodbGzE463HF4OHQ37M/7gBhe3EBtHjpMGLYbq7Zrtfc33+BN17/AleXL9J2W6aoMz+dMVxfXZBSYHd/hx9GEkZNozqv5uEJmrbThKnxrLwjxYB1Lc0qc3u/Y3fosT6qEUOZiWhEaKxjs9pwtenIacQ0HuuPOv9vLJTCSTuMgYBrPaZpccbSGIOJysaZSEQjWnxG9UzOamHQl/1fi2WJmECSpfUe16zIRjgej+XejmozP47s91rsaJoVxioNzbmOHCbub450bZ5H7lhXXCHHQH+8wfuGy8sLmsYSpxFnPeuu45AGwhhUm1oKaRZhympAZZ26Tm63Wy7WG92HYlS3S2MJAkZS6YJHDseBpJWT0t3Se7Dxlq5p2G4v1KIfdTsNCSQb7u/uuL27BQO+dXhR/bkzLcbAMB7pj3uGIXA8RqxrsBaaTgtMOUamqJ3RYRxYrVfa6SyJq2qFdf8cp1ELuKWL2XmLd4IpMWhCh3WPaaI1GSfCdrUucYVAoalPCaZUzW7UBOjYa9HUF2dHnY3XFdMu1XVPIZd9UnciKDFPrro+jelCivRjKaqX502IiaZrubi8pt1uuNxuudpsdVB3B23ruFhvWLcdDocRx8OrK66vH3Jzt+PRzS03r7+JbYQH2w0P1isuu2vCcU/fH5RmKLcE45GmwzcGF7T7P449OfSklOjF4IwWUnLWRokRGKaR3WFPjIm2afDWYmmKrELZA3EU7e6Xa73abrnabFBlgGM4HLiZJozTeH91fY0fEmEfmPqgGruNZ7XtIE2MuwPjfg92RLqB63XL1brBS2aMiZvdwP1ux4Sw3mxoG08yE8fpEeMg3N3ecXFl2F7oQPKufYizW0QyzSrRHx1TyozJIk2LCAyHW4b9HV8Oz0nS9s62l+c0utPXp6DnLb+pdInO8rjZRa7qXp5o3cCc4NSkUUoAJyIkk3QSfPn3qbSQM27z+TGeG4TU96dCTzqnRhLL0ERJJTCKc6BeP0eMJ3eZJ6mQxZIYR86avFVRqj6wywiBKlqt56kmKVmrCmRDFfvnSjKv59mcO8jVLpa+L4YRDQ4yYCCHs/fqZ2E2JgERNbVQJ7+ygcgpaapJ2ynpPI1FOE/a5mtTrGhzPqX055qi6iwpYmanSe+Vx69W3Kfkv35txdL4FmfVgn6aEoRILA9LdVLUwE0t5NU8J8ZMKGYiqVRDM3r+rC1JfaGDZtHIvh5DfsKgxuo64PSAFBEdtlmTtVIZN8nAmeNeTrXvW5J0OTuvJYG3tWOKGgZUzYdImge+CwYvCXCEnHRwbK8dOaW6tEzZMEYYBnXZG3NEvKOxOmDVi4ZYqdx3U5p0RlA2GPGYMjB3Xt9pYgix6DIy3jez3iIVEZO1hhTikx3npB0u1blMs66pFhlqt7HOeIwxqhlNSfRC6cDklDTpzpqEW+f1AZuUeiKlu6GJYpq1bdY57b5k1S7qydekoNL2Zv1cuY8715RCRLHUL90QjKEsqVPV1Bis93TdCu8axv7A8dDjmxUXF1dk4JWXXySOB37kh3+InOD973ufjmRIwv6wZww6LN5x0mEh1aDldE8/vb+ealq1QKVullUDZ23RWJRiA6XDVM//+fy8uo87qzbiOjS4dOzLfpxSLBV2Q85nJjYxIVap0NU1N6WERDXNyWcawdMcOSl//7Qf1EJQjFFnFRZKfNXm1r9f9+/6Z6Wnn+8/FbPxUh1PUPdn0tnIjBNtfxwGMrDebjjc32hHHuH9L7/CzxvLb/zaZ/nsZ36N93/0wwzDyCuvfoCv/9g38MnXvwBWu7HGmUKtjhhJJMmEYlxD1vOazvZNUj7TI6rOLVK6hka04FMMTax1SBG1JoBiJmSNnfdycp7nvNViRiqd2cZ51I8l07QNRlzRcWbeGHTkR3VUq7O5qm29aolWOGfZHQ48fvQIuRS6tiOj+irbrTUoPoTS5fYza7M6MYY0sT/syVnwTUsKqiMd+iNGMo0XfNMhtsE1aqogYcI3hpgGxAiPHj0GDE50Vl9MgSmGUp9QPfD9/Z5+v+Nqs6VrdHD7NAaO/YBYy9p7Nuv13J3OKRNHTZzv7u4Yhh33/ZEshtV6Q7P1mJggBvrxyP1xxzGMhW7uMNbTuGrBP5ElMc+JLaZOjVfqoBHhUNZrdSNU44+Otm3ZbrccD3uOxwPOOQ2AvUewGNuULqGaTvlVhzfaiXGNL0WXk77ueDwSY+Bw3EH2CIkBnYu4WjVMU2AcK52yoWksJkJEtX51DxmngEE1XN55nLFY8VjrVYt1PChjxBRGiVRXRDVc0SacugIO44AkQbIhJ9gfDsSUVTvWtsoCypBjIA4T+35PjBlnO9abFSKqg2pag7hIDrk4QBuGoFrDY68JhhWLFYO3yswxkvAIGEcd+uyJ2KyshmRagqgp1zD0SHneNb7FGNUs56w63jrEWkh4JzjXlgJ8nvXA1pTCkAhN07LyDc4rOyuEk5GXM8p4qDFPymq6hbNItDqTLQd2+zt24ci1iaxah7VbttsN68tO6aOiRewYdEYbCA8fXHP94CEvvXTgzds3+I3XP8ejmy9x/9jzcHvN1foFHmw/ACYzhp5dv2M33BOnhMfqYPDiM2AEnBMaZ5myYYpjje5oGkfXrTjsjxyPe6ZJu4hd09KUOZwpB8Q25BgYdntC7DXmzw6kwbm2uKImQg5k79i2GzCR47jj0RtvEN8IrLcdDx9ccbnZMMQDt/sDU3/g7o2BVgIvPrjkxZdf4YUXHtBuIocyBiZOI8kmxCX64y1hvOfm5kjbBlbtmnX7Ek1zpQYpL214cL1misKQDGMWpinQWkvu1nw5PBdJW9VQAU88ZN8uOZr7K3L6u/XvnX9//vW52Pr85xUpZZKc6G/mLBFQfd3JnezppOxpbdb593pjQTbghNmRMJckLpO0UltsuE/dOtCbM85GFrOGon6mpF0sPUZNjEQ0SJ8TklKlyOhnyHM0URwnOD9uUwKnOmVeHwZxrmjXAMjhTVfOldHuAKY4y+lxU47DW0umJibl2FNJbEWrtnWeEWV4JzCf33o+awB4OrdSHtb5qc/wdPJcE8PzbugpsS5Xf07+YoBpnEg5aKAdJkI+uXaSRSujSRM0LQZURlU1TbGzFmaKaTb3SDVQBu0MGt0AKw1YRLSMWTJnY4x2XHIZd1HPtxGw6H8o/aN+9nocel5NWVMlucnMGptUHoB1PeQ6BwsNBkwW+sOB47HHFOG/L+5TxKrjzASKDTLoCARjMClhko60TFNiKK5/625L022QpMNwjYEpHpGp0GxxxaFPAyRrDMbraIQUIiHlMpdKadD68NGA8bzAo7TnPAdZJwMMnZFULkMZ1eD1XGX0XKfEzFoV7UxVY44QIjlR7LY1qdRxCmVd1XOfMxT6nMtCHCZ1jvMOcaY4XJ0ZeGQVyMfSss9na9d5T9uonfE46KzGV199iXa1oltv+OhHv45PfuLH+Yc//VO8/OAh282WIQuHg5pomCSYyeKMIeRJg5uzveuJ83a2hp/ez87/PNfaVpzr2aqeMMbzwlo+szVW+l4MkWlSYbzeSxlXTGDsEzRtijuiJh4q9lfHUu2mqUGJVpX9W/bjylaYwoSxp+69Uv2qe6Qm1/XeqJ+l2jXXQb7VQKXqMWMxNpBKq9cG5rx/q6m3fv5pGrnf3XPRXWKtpz/2dNaw8o7X3v8BPv/6DZ/6qZ/ho9/029m8vMWsN/zO3/2t/NQP/SA3+3s2zRbnHbltCMOgn93qXmKMm7vw6cyBTHKaK/MhTLjc0rQrUohzESSlUOh/lD3G4mxmnMbZpEVpYOnU6c91XqfS/lftqujgDP2xR2Ti6voBzhqiJNabTSmWZKZRDTSqfq4fBjabDVdX1xqUj4Fpitzf3+nw7LbRjlLWZ0rXdWXu24Q1jv7YM8cDYtWlM8PQqyZOUl1/I7eHPZHI+vIhyXj6SYs+2nU3OsJkinStJWWDb1rdZ1QJStOqC+LxOBKM4Wrb4PwKMY525bkSLTxNYSIMJ1fUXF47DAOeiDOJxnti1sS59Q2ERCSzvbzErTt8f+AwDKSkZGrjO7rWIDGQp4GEdge7pmPlHRfbNW3riCmzOqw4HI86+Ns3T8y2XLUNm+2a/f09tzePyTGw6Tq6bo31Lca1HPvAFDNt22nSNKkRjt4nsei/HCItGU8MkWE4aKwDqv+yOoS7aS3OC6uVp+taQpjoQyJkTdJyTBz2B0BI0RCjdtiUsQPDsed4HPBtVxJuV2IKwVrRolFKxZ5/5HA84o2n9aqHbboV1ntsozPG1GVVnxvHode4UyziLNvNFUpsiRiX52fLYX9gmCbGEDDO0HadFhdNMeISwQk6jkWEVDrdcRwYhj2EQddoswa/BmPwpjAwDkd2cV/O1Upnj+Uy5qhsY7VwZKTKQcrrxWhIDboM1nVlGLhqvHPZ//oxlMJLkQ4YSKLjD7xRGzPrDMZbkheOhzu+NA0QasH1UtdxKp37cZq1siGOxEyhhzc8XH0tr+8/z5s3X+DNmy/SrSzXV5es2w2r9pJ195B18wpTf8fu7os8fvwmMSUur6+4fvCAtnFYY4h9hDSRkmC5II1bHbuUQUjqxBqVDj32O2KciCnh7QWr1UY7kOL1ug1o3Np0xaxtgAzblePh5Qp3sWbXGhoGHt3dsXt8Q39/x3j1gIeX13zNB18iOosJPYeb19nvdoQQ2Dx8ifXlQx688CIhJaahZ4gH7vY7bm5eJ40DXTdxdeVxeWI3CinteeOR8OZdx8svvoRvt5hmi/ct1rZk58lTx5fDc5K0nRbm+X/nwXt5p/5XEg7hPKCvFdJqyPEUbbIEYue2+k//3rlKn2tQrq3v+iDXGORk/12TwaddEM9/ppKiXKhiZw90vaPqJ5oD+Prvx5CKlXmeK3z6GaxuVlVTp20URGw5llOXhawV6Wo+oFWcNAeizJVpKfbOp+S2JkM1kDl/PZeLpt05nf1UdRG2dPxo8vnDAAAL+0lEQVQqIlHTx5IMmkIFeEvSVlr659f/6QR7/l5q0sksqodqZKL0pDnwNWVjz2XIclRaXP035wA6T5DHshoiOjctkCk0Meq1L0FkVroMWQ0LVENDCd5cSb2K0UfpytUOXE3qQOb/G4PSDPP80eaEgJLYn6PqvpRGrO2QqskUU2iXujRmK3/JosPlRQNtKdlszrowNblLjFELDpQH9DQNpEmZFBHLlAxE1XVNITAlfSh4EVwREosRJoqTXJoYZERSo7lmEqzNasmc9UEQU+kiijBOI1G0O0NW6odSdk86xywUSsg41x9O7qFadlEthy/3S1nv84iH032s92+edVlSr4PRfUSTjYQxbtYL5ax6wXq9Kvu1FlakaE6VyZdxjSbzMQbMfDj6mVPSDu3sPFMKNK7QNqdpZOh1QPPF5RXGOV565WXGcc/P/OSPMB7vefjK1+g6CYnDrsda1T6RDTGoM5g4MxcIZg3x3N6jVCXRbiOnrlMt5NRu1XlyU69PNuquhmhXN5FLh1sT71yNc5Jo1Tlrl6xtmjI4vOpYy+Bgpw6tImVWW9kzz11UczZFQ2tK51OprlmrWLNDbIpK+7VPzNIrWlDniPF8TMmTe15NNs3ZeqnmOFU/DZDOKPdqkFCLRfUZFTn2R1ZxTdO29OPEZmtIU+CVF1/ms7/6Op/5xV/i8Rc+z/s/9NvZhcBHfttHeP8HX+OLv/4Zhmmi61Z0XcfYD0xj0E59GVUyFxmBOl9QZ0BqQDYcjgy3d2y2ar6UkzpQ1o4iWdSKu+xv1jrGqYeknQYqeyHrc3ceDaB3yDzGxVghhsA49ByCnnffNGy3W/qDBju2rIsxBMZpQo49TTfMA5Bb15b1knHe6n0GSNKhthOlc1BNnyYtYLrG0rYrDv3A0E90nY5lcYXmHlPPFDLX1y8g4jXhJjGFgZgD/TDRdWuc7xjGwDAc58p8IunolElnCLZNi3UNKVtyEpxruLxoGMPIo5tHOifTT2rAUwprXbemMQFvExvniFWPHjPDOLK/34GBMQfGFHBdh8HgTEO32tA0DVtnuZLMbr9jOPZMKsBlPN7pDCnrMdbjy4iE3f2e29t7EOhWK66vL9V8o2vYbjeMx6Oeg/HIbn+g6Tb4djNXmFMSQtCiA6IuhM6Z4sw4YazBesM0auE5xsSx7xGxeNcUk6hMCMLQq8HUlA3GtYXWr/u16vUhBiGWe12MKaYlwqpdY9tWu/45q406JWFDLeurU6dB2QC28zSdFtekfJ7joPrS4bjHOuHy6ooshnESbu/vydliTaZNhpgmxsOBGCJiVUOocVyRehhl8jTO0lhHioMmUkGNv8J4JI4HchgRAZcF7xqsaeY1LmKwpYAUQiCLlIRXO6fOZojqOKhGSo6ua4gx4rzHezd3//bHA8jIeqM6TJ3zls/2LmX/OGcRr8YrrXesmoa2a3HrFrfuaNdrSJlxP/D4jS/x6EtfRIUm0LarQq2disGW0LZqwIadIB956eUtD1/6OP2Y2Pcjb9zeIWnkcnXDqy9mXnnxik37gKnfcnN7x+PbG45jz93NY66vrsjO4QTwOoB+GHbkDM3VBZeXHe0gxDgxxa40LkqRMBV99DRwN474zuJLIjxNwjAGpmkkZ5VmxJsd+XjPZbOmEXjlwQWr1vN417HbH/nS629w++ZjLh6+SG48Po/YcMCkkXgM3H6uZ7M78ODF97FabbTIZDzr1QoTGyYDOQ2E445RhM32ksurC8Q5xAdu7m5I+YA0e4zfYG3Dpm3p3IovB3m7ztO7DRH5ErAH3njWx7LgucOLLOtiwdtjWRsL3gnL2ljwTljWxoK3w7IuFrwT3u218bU555fe7gfPRdIGICI/kXP+1md9HAueLyzrYsE7YVkbC94Jy9pY8E5Y1saCt8OyLha8E56ntWH+/9+yYMGCBQsWLFiwYMGCBQueFZakbcGCBQsWLFiwYMGCBQueYzxPSdt/+6wPYMFziWVdLHgnLGtjwTthWRsL3gnL2ljwdljWxYJ3wnOzNp4bTduCBQsWLFiwYMGCBQsWLHgrnqdO24IFCxYsWLBgwYIFCxYseApL0rZgwYIFCxYsWLBgwYIFzzGeedImIt8uIr8gIp8WkT/9rI9nwbsLEfkaEfm7IvIpEfmHIvKnyusPReT7ReSXyp8PyusiIv9lWS+fEJFvebafYMFvJkTEishPi8j/Wr7/sIj8WLn+f0tEmvJ6W77/dPn5h57lcS/4zYWIXIvI94rIPxKRnxeRf2rZMxYAiMi/W54lPycif0NEumXfeG9CRP6yiLwuIj939tpXvE+IyHeX9/+SiHz3s/gsC/7x4h3Wxn9SnimfEJH/SUSuz372PWVt/IKI/KGz19/VHOaZJm0iYoH/GvjngI8D/7KIfPxZHtOCdx0B+Pdyzh8Hvg34t8oa+NPAD+ScPwL8QPkedK18pPz3bwB/4d0/5AXvIv4U8PNn3//HwJ/POf8W4DHwJ8vrfxJ4XF7/8+V9C7568V8A/3vO+WPA70DXyLJnvMchIq8C/zbwrTnnbwQs8F0s+8Z7FX8V+PanXvuK9gkReQj8WeB3A78L+LM10VvwTzT+Km9dG98PfGPO+ZuAXwS+B6DEpN8F/Lbyd/6bUlB+13OYZ91p+13Ap3POv5xzHoG/CXznMz6mBe8ics6fzzn/VPn6Hg2+XkXXwV8rb/trwL9Yvv5O4L/Lih8FrkXk/e/yYS94FyAirwH/AvAXy/cC/D7ge8tbnl4Xdb18L/D7y/sXfJVBRK6Afxb4SwA55zHnfMOyZyxQOGAlIg5YA59n2Tfek8g5/z3g0VMvf6X7xB8Cvj/n/Cjn/BgN7J8O9hf8E4a3Wxs55/8z5xzKtz8KvFa+/k7gb+ach5zzrwCfRvOXdz2HedZJ26vAr599/9ny2oL3IAo15ZuBHwNeyTl/vvzoC8Ar5etlzbx38J8D/wGQyvcvADdnm+r5tZ/XRfn5bXn/gq8+fBj4EvBXCnX2L4rIhmXPeM8j5/w54D8Ffg1N1m6Bn2TZNxac8JXuE8v+8d7EnwD+Tvn6uVkbzzppW7AAABHZAv8j8O/knO/Of5Z1LsUym+I9BBH5DuD1nPNPPutjWfDcwQHfAvyFnPM3A3tOFCdg2TPeqyi0te9EE/sPABuWrsiCd8CyTyx4O4jIn0GlO3/9WR/L03jWSdvngK85+/618tqC9xBExKMJ21/POf/t8vIXK4Wp/Pl6eX1ZM+8N/B7gD4vIZ1DKwe9DdUzXhfYET177eV2Un18Bb76bB7zgXcNngc/mnH+sfP+9aBK37BkL/gDwKznnL+WcJ+Bvo3vJsm8sqPhK94ll/3gPQUT+NeA7gD+WT4Osn5u18ayTth8HPlKcnRpU6Pd9z/iYFryLKPqBvwT8fM75Pzv70fcB1aXpu4H/5ez1f7U4PX0bcHtGdVjwVYKc8/fknF/LOX8I3Rf+r5zzHwP+LvBHytueXhd1vfyR8v6lgvpViJzzF4BfF5FvKC/9fuBTLHvGAqVFfpuIrMuzpa6NZd9YUPGV7hP/B/AHReRB6eT+wfLagq8yiMi3o5KMP5xzPpz96PuA7ypusx9GzWr+Ac8gh5FnvT+JyD+Palcs8Jdzzn/umR7QgncVIvLPAD8EfJKTduk/RHVt/wPwQeBXgT+ac35UHsT/FUp5OQB/POf8E+/6gS941yAivxf493PO3yEiX4d23h4CPw38KznnQUQ64L9HNZGPgO/KOf/yszrmBb+5EJHfiRrUNMAvA38cLUIue8Z7HCLyHwH/Ekpv+mngX0d1Jsu+8R6DiPwN4PcCLwJfRF0g/2e+wn1CRP4EGpcA/Lmc8195Nz/Hgn/8eIe18T1Ay6nb/qM553+zvP/PoDq3gMp4/k55/V3NYZ550rZgwYIFCxYsWLBgwYIFC94Zz5oeuWDBggULFixYsGDBggULvgyWpG3BggULFixYsGDBggULnmMsSduCBQsWLFiwYMGCBQsWPMdYkrYFCxYsWLBgwYIFCxYseI6xJG0LFixYsGDBggULFixY8BxjSdoWLFiwYMGCBQsWLFiw4DnGkrQtWLBgwYIFCxYsWLBgwXOM/w8j4AfhhpUfPgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PMZvtSIl71qi", + "colab_type": "text" + }, + "source": [ + "After downloading the data, we need to implement a function to convert the kitti annotation format into the middle format. In this tutorial we choose to convert them in **`load_annotations`** function in a newly implemented **`KittiTinyDataset`**.\n", + "\n", + "Let's take a loot at the annotation txt file.\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "n7rwalnPd6e1", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "515b3017-8d4c-4f08-e28a-7f4bd4c204d3" + }, + "source": [ + "# Check the label of a single image\n", + "!cat kitti_tiny/training/label_2/000000.txt" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QA1pFg-FeO3l", + "colab_type": "text" + }, + "source": [ + "According to the KITTI's documentation, the first column indicates the class of the object, and the 5th to 8th columns indicates the bboxes. We need to read annotations of each image and convert them into middle format MMDetection accept is as below:\n", + "\n", + "```python\n", + "[\n", + " {\n", + " 'filename': 'a.jpg',\n", + " 'width': 1280,\n", + " 'height': 720,\n", + " 'ann': {\n", + " 'bboxes': (n, 4),\n", + " 'labels': (n, ),\n", + " 'bboxes_ignore': (k, 4), (optional field)\n", + " 'labels_ignore': (k, 4) (optional field)\n", + " }\n", + " },\n", + " ...\n", + "]\n", + "```" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "GdSaB2ad0EdX", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import copy\n", + "import os.path as osp\n", + "\n", + "import mmcv\n", + "import numpy as np\n", + "\n", + "from mmdet.datasets.builder import DATASETS\n", + "from mmdet.datasets.custom import CustomDataset\n", + "\n", + "@DATASETS.register_module()\n", + "class KittiTinyDataset(CustomDataset):\n", + "\n", + " CLASSES = ('Car', 'Pedestrian', 'Cyclist')\n", + "\n", + " def load_annotations(self, ann_file):\n", + " cat2label = {k: i for i, k in enumerate(self.CLASSES)}\n", + " # load image list from file\n", + " image_list = mmcv.list_from_file(self.ann_file)\n", + " \n", + " data_infos = []\n", + " # convert annotations to middle format\n", + " for image_id in image_list:\n", + " filename = f'{self.img_prefix}/{image_id}.jpeg'\n", + " image = mmcv.imread(filename)\n", + " height, width = image.shape[:2]\n", + " \n", + " data_info = dict(filename=f'{image_id}.jpeg', width=width, height=height)\n", + " \n", + " # load annotations\n", + " label_prefix = self.img_prefix.replace('image_2', 'label_2')\n", + " lines = mmcv.list_from_file(osp.join(label_prefix, f'{image_id}.txt'))\n", + " \n", + " content = [line.strip().split(' ') for line in lines]\n", + " bbox_names = [x[0] for x in content]\n", + " bboxes = [[float(info) for info in x[4:8]] for x in content]\n", + " \n", + " gt_bboxes = []\n", + " gt_labels = []\n", + " gt_bboxes_ignore = []\n", + " gt_labels_ignore = []\n", + " \n", + " # filter 'DontCare'\n", + " for bbox_name, bbox in zip(bbox_names, bboxes):\n", + " if bbox_name in cat2label:\n", + " gt_labels.append(cat2label[bbox_name])\n", + " gt_bboxes.append(bbox)\n", + " else:\n", + " gt_labels_ignore.append(-1)\n", + " gt_bboxes_ignore.append(bbox)\n", + "\n", + " data_anno = dict(\n", + " bboxes=np.array(gt_bboxes, dtype=np.float32).reshape(-1, 4),\n", + " labels=np.array(gt_labels, dtype=np.long),\n", + " bboxes_ignore=np.array(gt_bboxes_ignore,\n", + " dtype=np.float32).reshape(-1, 4),\n", + " labels_ignore=np.array(gt_labels_ignore, dtype=np.long))\n", + "\n", + " data_info.update(ann=data_anno)\n", + " data_infos.append(data_info)\n", + "\n", + " return data_infos" + ], + "execution_count": 12, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PwqJOpBe-bMj", + "colab_type": "text" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a detector using a pre-trained detector." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "hamZrlnH-YDD", + "colab_type": "code", + "colab": {} + }, + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py')" + ], + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HntziLGq-92Z", + "colab_type": "text" + }, + "source": [ + "Given a config that trains a Faster R-CNN on COCO dataset, we need to modify some values to use it for training Faster R-CNN on KITTI dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pUbwD8uV0PR8", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "2d313ca5-754e-4f89-e392-3f6e9b664f01" + }, + "source": [ + "from mmdet.apis import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'KittiTinyDataset'\n", + "cfg.data_root = 'kitti_tiny/'\n", + "\n", + "cfg.data.test.type = 'KittiTinyDataset'\n", + "cfg.data.test.data_root = 'kitti_tiny/'\n", + "cfg.data.test.ann_file = 'train.txt'\n", + "cfg.data.test.img_prefix = 'training/image_2'\n", + "\n", + "cfg.data.train.type = 'KittiTinyDataset'\n", + "cfg.data.train.data_root = 'kitti_tiny/'\n", + "cfg.data.train.ann_file = 'train.txt'\n", + "cfg.data.train.img_prefix = 'training/image_2'\n", + "\n", + "cfg.data.val.type = 'KittiTinyDataset'\n", + "cfg.data.val.data_root = 'kitti_tiny/'\n", + "cfg.data.val.ann_file = 'val.txt'\n", + "cfg.data.val.img_prefix = 'training/image_2'\n", + "\n", + "# modify num classes of the model in box head\n", + "cfg.model.roi_head.bbox_head.num_classes = 3\n", + "# We can still use the pre-trained Mask RCNN model though we do not need to\n", + "# use the mask branch\n", + "cfg.load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.optimizer.lr = 0.02 / 8\n", + "cfg.lr_config.warmup = None\n", + "cfg.log_config.interval = 10\n", + "\n", + "# Change the evaluation metric since we use customized dataset.\n", + "cfg.evaluation.metric = 'mAP'\n", + "# We can set the evaluation interval to reduce the evaluation times\n", + "cfg.evaluation.interval = 12\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 12\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ], + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='FasterRCNN',\n", + " pretrained='open-mmlab://detectron2/resnet50_caffe',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " depth=50,\n", + " num_stages=4,\n", + " out_indices=(0, 1, 2, 3),\n", + " frozen_stages=1,\n", + " norm_cfg=dict(type='BN', requires_grad=False),\n", + " norm_eval=True,\n", + " style='caffe'),\n", + " neck=dict(\n", + " type='FPN',\n", + " in_channels=[256, 512, 1024, 2048],\n", + " out_channels=256,\n", + " num_outs=5),\n", + " rpn_head=dict(\n", + " type='RPNHead',\n", + " in_channels=256,\n", + " feat_channels=256,\n", + " anchor_generator=dict(\n", + " type='AnchorGenerator',\n", + " scales=[8],\n", + " ratios=[0.5, 1.0, 2.0],\n", + " strides=[4, 8, 16, 32, 64]),\n", + " bbox_coder=dict(\n", + " type='DeltaXYWHBBoxCoder',\n", + " target_means=[0.0, 0.0, 0.0, 0.0],\n", + " target_stds=[1.0, 1.0, 1.0, 1.0]),\n", + " loss_cls=dict(\n", + " type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),\n", + " loss_bbox=dict(type='L1Loss', loss_weight=1.0)),\n", + " roi_head=dict(\n", + " type='StandardRoIHead',\n", + " bbox_roi_extractor=dict(\n", + " type='SingleRoIExtractor',\n", + " roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),\n", + " out_channels=256,\n", + " featmap_strides=[4, 8, 16, 32]),\n", + " bbox_head=dict(\n", + " type='Shared2FCBBoxHead',\n", + " in_channels=256,\n", + " fc_out_channels=1024,\n", + " roi_feat_size=7,\n", + " num_classes=3,\n", + " bbox_coder=dict(\n", + " type='DeltaXYWHBBoxCoder',\n", + " target_means=[0.0, 0.0, 0.0, 0.0],\n", + " target_stds=[0.1, 0.1, 0.2, 0.2]),\n", + " reg_class_agnostic=False,\n", + " loss_cls=dict(\n", + " type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),\n", + " loss_bbox=dict(type='L1Loss', loss_weight=1.0))))\n", + "train_cfg = dict(\n", + " rpn=dict(\n", + " assigner=dict(\n", + " type='MaxIoUAssigner',\n", + " pos_iou_thr=0.7,\n", + " neg_iou_thr=0.3,\n", + " min_pos_iou=0.3,\n", + " match_low_quality=True,\n", + " ignore_iof_thr=-1),\n", + " sampler=dict(\n", + " type='RandomSampler',\n", + " num=256,\n", + " pos_fraction=0.5,\n", + " neg_pos_ub=-1,\n", + " add_gt_as_proposals=False),\n", + " allowed_border=-1,\n", + " pos_weight=-1,\n", + " debug=False),\n", + " rpn_proposal=dict(\n", + " nms_across_levels=False,\n", + " nms_pre=2000,\n", + " nms_post=1000,\n", + " max_num=1000,\n", + " nms_thr=0.7,\n", + " min_bbox_size=0),\n", + " rcnn=dict(\n", + " assigner=dict(\n", + " type='MaxIoUAssigner',\n", + " pos_iou_thr=0.5,\n", + " neg_iou_thr=0.5,\n", + " min_pos_iou=0.5,\n", + " match_low_quality=False,\n", + " ignore_iof_thr=-1),\n", + " sampler=dict(\n", + " type='RandomSampler',\n", + " num=512,\n", + " pos_fraction=0.25,\n", + " neg_pos_ub=-1,\n", + " add_gt_as_proposals=True),\n", + " pos_weight=-1,\n", + " debug=False))\n", + "test_cfg = dict(\n", + " rpn=dict(\n", + " nms_across_levels=False,\n", + " nms_pre=1000,\n", + " nms_post=1000,\n", + " max_num=1000,\n", + " nms_thr=0.7,\n", + " min_bbox_size=0),\n", + " rcnn=dict(\n", + " score_thr=0.05,\n", + " nms=dict(type='nms', iou_threshold=0.5),\n", + " max_per_img=100))\n", + "dataset_type = 'KittiTinyDataset'\n", + "data_root = 'kitti_tiny/'\n", + "img_norm_cfg = dict(\n", + " mean=[103.53, 116.28, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)\n", + "train_pipeline = [\n", + " dict(type='LoadImageFromFile'),\n", + " dict(type='LoadAnnotations', with_bbox=True),\n", + " dict(\n", + " type='Resize',\n", + " img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736),\n", + " (1333, 768), (1333, 800)],\n", + " multiscale_mode='value',\n", + " keep_ratio=True),\n", + " dict(type='RandomFlip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='DefaultFormatBundle'),\n", + " dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + "]\n", + "data = dict(\n", + " samples_per_gpu=2,\n", + " workers_per_gpu=2,\n", + " train=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='train.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(type='LoadAnnotations', with_bbox=True),\n", + " dict(\n", + " type='Resize',\n", + " img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736),\n", + " (1333, 768), (1333, 800)],\n", + " multiscale_mode='value',\n", + " keep_ratio=True),\n", + " dict(type='RandomFlip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='DefaultFormatBundle'),\n", + " dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])\n", + " ],\n", + " data_root='kitti_tiny/'),\n", + " val=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='val.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + " ],\n", + " data_root='kitti_tiny/'),\n", + " test=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='train.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + " ],\n", + " data_root='kitti_tiny/'))\n", + "evaluation = dict(interval=12, metric='mAP')\n", + "optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=None)\n", + "lr_config = dict(\n", + " policy='step',\n", + " warmup=None,\n", + " warmup_iters=500,\n", + " warmup_ratio=0.001,\n", + " step=[8, 11])\n", + "total_epochs = 12\n", + "checkpoint_config = dict(interval=12)\n", + "log_config = dict(interval=10, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "work_dir = './tutorial_exps'\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "111W_oZV_3wa", + "colab_type": "text" + }, + "source": [ + "### Train a new detector\n", + "\n", + "Finally, lets initialize the dataset and detector, then train a new detector!" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7WBWHu010PN3", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 987, + "referenced_widgets": [ + "f774902af44d49cd9cd6049f9e1d9527", + "9748115624d645b7bca53c0170d13f20", + "ee35e58fb3794e68815a25d2798399ee", + "a3110b0aab414ba3b6fcbfcb68884e17", + "0cfe88e3a7224d988638ed69169d091d", + "ad4b96f52e8345fca66b853b74f77e09", + "8327d294e11c4ed0b1002f58122c16bb", + "fb500fbfc668479ca7ee264a69fa3b6e" + ] + }, + "outputId": "781e4b18-a9ac-4a55-d2be-3e11a11dbe28" + }, + "source": [ + "from mmdet.datasets import build_dataset\n", + "from mmdet.models import build_detector\n", + "from mmdet.apis import train_detector\n", + "\n", + "\n", + "# Build dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the detector\n", + "model = build_detector(\n", + " cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)\n", + "# Add an attribute for visualization convenience\n", + "model.CLASSES = datasets[0].CLASSES\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_detector(model, datasets, cfg, distributed=False, validate=True)" + ], + "execution_count": 15, + "outputs": [ + { + "output_type": "stream", + "text": [ + "2020-07-13 14:49:11,873 - mmdet - INFO - load model from: open-mmlab://detectron2/resnet50_caffe\n", + "Downloading: \"https://open-mmlab.s3.ap-northeast-2.amazonaws.com/pretrain/third_party/resnet50_msra-5891d200.pth\" to /root/.cache/torch/checkpoints/resnet50_msra-5891d200.pth\n" + ], + "name": "stderr" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "f774902af44d49cd9cd6049f9e1d9527", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=94284731.0), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "2020-07-13 14:49:16,377 - mmdet - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "unexpected key in source state_dict: conv1.bias\n", + "\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "2020-07-13 14:49:16,729 - mmdet - INFO - load checkpoint from checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth\n", + "2020-07-13 14:49:16,864 - mmdet - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for roi_head.bbox_head.fc_cls.weight: copying a param with shape torch.Size([81, 1024]) from checkpoint, the shape in current model is torch.Size([4, 1024]).\n", + "size mismatch for roi_head.bbox_head.fc_cls.bias: copying a param with shape torch.Size([81]) from checkpoint, the shape in current model is torch.Size([4]).\n", + "size mismatch for roi_head.bbox_head.fc_reg.weight: copying a param with shape torch.Size([320, 1024]) from checkpoint, the shape in current model is torch.Size([12, 1024]).\n", + "size mismatch for roi_head.bbox_head.fc_reg.bias: copying a param with shape torch.Size([320]) from checkpoint, the shape in current model is torch.Size([12]).\n", + "unexpected key in source state_dict: roi_head.mask_head.convs.0.conv.weight, roi_head.mask_head.convs.0.conv.bias, roi_head.mask_head.convs.1.conv.weight, roi_head.mask_head.convs.1.conv.bias, roi_head.mask_head.convs.2.conv.weight, roi_head.mask_head.convs.2.conv.bias, roi_head.mask_head.convs.3.conv.weight, roi_head.mask_head.convs.3.conv.bias, roi_head.mask_head.upsample.weight, roi_head.mask_head.upsample.bias, roi_head.mask_head.conv_logits.weight, roi_head.mask_head.conv_logits.bias\n", + "\n", + "2020-07-13 14:49:16,868 - mmdet - INFO - Start running, host: root@d8563d7b87d2, work_dir: /content/mmdetection/tutorial_exps\n", + "2020-07-13 14:49:16,868 - mmdet - INFO - workflow: [('train', 1)], max: 12 epochs\n", + "2020-07-13 14:49:22,475 - mmdet - INFO - Epoch [1][10/25]\tlr: 2.500e-03, eta: 0:02:38, time: 0.546, data_time: 0.224, memory: 2133, loss_rpn_cls: 0.0286, loss_rpn_bbox: 0.0177, loss_cls: 0.5966, acc: 80.5273, loss_bbox: 0.3858, loss: 1.0287\n", + "2020-07-13 14:49:25,783 - mmdet - INFO - Epoch [1][20/25]\tlr: 2.500e-03, eta: 0:02:02, time: 0.331, data_time: 0.020, memory: 2133, loss_rpn_cls: 0.0214, loss_rpn_bbox: 0.0122, loss_cls: 0.1742, acc: 93.9746, loss_bbox: 0.2994, loss: 0.5072\n", + "2020-07-13 14:49:32,934 - mmdet - INFO - Epoch [2][10/25]\tlr: 2.500e-03, eta: 0:01:47, time: 0.544, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0178, loss_rpn_bbox: 0.0148, loss_cls: 0.1506, acc: 94.9414, loss_bbox: 0.2814, loss: 0.4645\n", + "2020-07-13 14:49:36,280 - mmdet - INFO - Epoch [2][20/25]\tlr: 2.500e-03, eta: 0:01:39, time: 0.335, data_time: 0.020, memory: 2133, loss_rpn_cls: 0.0113, loss_rpn_bbox: 0.0127, loss_cls: 0.1255, acc: 95.5176, loss_bbox: 0.1956, loss: 0.3452\n", + "2020-07-13 14:49:43,471 - mmdet - INFO - Epoch [3][10/25]\tlr: 2.500e-03, eta: 0:01:32, time: 0.545, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0074, loss_rpn_bbox: 0.0107, loss_cls: 0.0982, acc: 96.4355, loss_bbox: 0.1552, loss: 0.2715\n", + "2020-07-13 14:49:46,863 - mmdet - INFO - Epoch [3][20/25]\tlr: 2.500e-03, eta: 0:01:26, time: 0.339, data_time: 0.020, memory: 2133, loss_rpn_cls: 0.0072, loss_rpn_bbox: 0.0147, loss_cls: 0.1446, acc: 94.6289, loss_bbox: 0.2507, loss: 0.4171\n", + "2020-07-13 14:49:54,181 - mmdet - INFO - Epoch [4][10/25]\tlr: 2.500e-03, eta: 0:01:20, time: 0.555, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0080, loss_rpn_bbox: 0.0141, loss_cls: 0.1119, acc: 95.6055, loss_bbox: 0.2180, loss: 0.3521\n", + "2020-07-13 14:49:57,636 - mmdet - INFO - Epoch [4][20/25]\tlr: 2.500e-03, eta: 0:01:16, time: 0.345, data_time: 0.020, memory: 2133, loss_rpn_cls: 0.0057, loss_rpn_bbox: 0.0123, loss_cls: 0.1228, acc: 95.4004, loss_bbox: 0.2106, loss: 0.3514\n", + "2020-07-13 14:50:04,959 - mmdet - INFO - Epoch [5][10/25]\tlr: 2.500e-03, eta: 0:01:10, time: 0.556, data_time: 0.224, memory: 2133, loss_rpn_cls: 0.0033, loss_rpn_bbox: 0.0106, loss_cls: 0.1083, acc: 95.8594, loss_bbox: 0.2095, loss: 0.3316\n", + "2020-07-13 14:50:08,441 - mmdet - INFO - Epoch [5][20/25]\tlr: 2.500e-03, eta: 0:01:06, time: 0.348, data_time: 0.020, memory: 2133, loss_rpn_cls: 0.0041, loss_rpn_bbox: 0.0109, loss_cls: 0.0932, acc: 96.6211, loss_bbox: 0.1926, loss: 0.3009\n", + "2020-07-13 14:50:15,852 - mmdet - INFO - Epoch [6][10/25]\tlr: 2.500e-03, eta: 0:01:01, time: 0.561, data_time: 0.224, memory: 2133, loss_rpn_cls: 0.0029, loss_rpn_bbox: 0.0087, loss_cls: 0.0832, acc: 97.0312, loss_bbox: 0.1773, loss: 0.2722\n", + "2020-07-13 14:50:19,384 - mmdet - INFO - Epoch [6][20/25]\tlr: 2.500e-03, eta: 0:00:57, time: 0.353, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0039, loss_rpn_bbox: 0.0108, loss_cls: 0.0837, acc: 96.6699, loss_bbox: 0.1745, loss: 0.2729\n", + "2020-07-13 14:50:26,835 - mmdet - INFO - Epoch [7][10/25]\tlr: 2.500e-03, eta: 0:00:51, time: 0.562, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0030, loss_rpn_bbox: 0.0093, loss_cls: 0.0791, acc: 96.9922, loss_bbox: 0.1656, loss: 0.2570\n", + "2020-07-13 14:50:30,411 - mmdet - INFO - Epoch [7][20/25]\tlr: 2.500e-03, eta: 0:00:48, time: 0.358, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0020, loss_rpn_bbox: 0.0116, loss_cls: 0.0842, acc: 96.7188, loss_bbox: 0.1731, loss: 0.2709\n", + "2020-07-13 14:50:37,948 - mmdet - INFO - Epoch [8][10/25]\tlr: 2.500e-03, eta: 0:00:42, time: 0.569, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0021, loss_rpn_bbox: 0.0088, loss_cls: 0.0709, acc: 96.9922, loss_bbox: 0.1352, loss: 0.2171\n", + "2020-07-13 14:50:41,592 - mmdet - INFO - Epoch [8][20/25]\tlr: 2.500e-03, eta: 0:00:38, time: 0.364, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0010, loss_rpn_bbox: 0.0079, loss_cls: 0.0719, acc: 97.4219, loss_bbox: 0.1627, loss: 0.2436\n", + "2020-07-13 14:50:49,231 - mmdet - INFO - Epoch [9][10/25]\tlr: 2.500e-04, eta: 0:00:33, time: 0.576, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0035, loss_rpn_bbox: 0.0083, loss_cls: 0.0657, acc: 97.5488, loss_bbox: 0.1311, loss: 0.2086\n", + "2020-07-13 14:50:52,936 - mmdet - INFO - Epoch [9][20/25]\tlr: 2.500e-04, eta: 0:00:29, time: 0.370, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0010, loss_rpn_bbox: 0.0065, loss_cls: 0.0521, acc: 98.1055, loss_bbox: 0.1059, loss: 0.1655\n", + "2020-07-13 14:51:00,662 - mmdet - INFO - Epoch [10][10/25]\tlr: 2.500e-04, eta: 0:00:24, time: 0.581, data_time: 0.225, memory: 2133, loss_rpn_cls: 0.0028, loss_rpn_bbox: 0.0083, loss_cls: 0.0665, acc: 97.5488, loss_bbox: 0.1275, loss: 0.2050\n", + "2020-07-13 14:51:04,433 - mmdet - INFO - Epoch [10][20/25]\tlr: 2.500e-04, eta: 0:00:20, time: 0.377, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0011, loss_rpn_bbox: 0.0057, loss_cls: 0.0563, acc: 97.9297, loss_bbox: 0.1247, loss: 0.1879\n", + "2020-07-13 14:51:12,192 - mmdet - INFO - Epoch [11][10/25]\tlr: 2.500e-04, eta: 0:00:14, time: 0.581, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0035, loss_rpn_bbox: 0.0065, loss_cls: 0.0655, acc: 97.5586, loss_bbox: 0.1241, loss: 0.1997\n", + "2020-07-13 14:51:15,946 - mmdet - INFO - Epoch [11][20/25]\tlr: 2.500e-04, eta: 0:00:11, time: 0.375, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0018, loss_rpn_bbox: 0.0073, loss_cls: 0.0574, acc: 97.9297, loss_bbox: 0.1300, loss: 0.1965\n", + "2020-07-13 14:51:23,636 - mmdet - INFO - Epoch [12][10/25]\tlr: 2.500e-05, eta: 0:00:05, time: 0.578, data_time: 0.224, memory: 2134, loss_rpn_cls: 0.0009, loss_rpn_bbox: 0.0061, loss_cls: 0.0549, acc: 97.8418, loss_bbox: 0.1236, loss: 0.1855\n", + "2020-07-13 14:51:27,312 - mmdet - INFO - Epoch [12][20/25]\tlr: 2.500e-05, eta: 0:00:01, time: 0.368, data_time: 0.022, memory: 2134, loss_rpn_cls: 0.0011, loss_rpn_bbox: 0.0054, loss_cls: 0.0486, acc: 98.3887, loss_bbox: 0.0922, loss: 0.1473\n", + "2020-07-13 14:51:29,031 - mmdet - INFO - Saving checkpoint at 12 epochs\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 25/25, 10.9 task/s, elapsed: 2s, ETA: 0s" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "2020-07-13 14:51:32,234 - mmdet - INFO - \n", + "+------------+-----+------+--------+-------+\n", + "| class | gts | dets | recall | ap |\n", + "+------------+-----+------+--------+-------+\n", + "| Car | 62 | 142 | 0.968 | 0.871 |\n", + "| Pedestrian | 13 | 56 | 0.846 | 0.761 |\n", + "| Cyclist | 7 | 63 | 0.429 | 0.035 |\n", + "+------------+-----+------+--------+-------+\n", + "| mAP | | | | 0.555 |\n", + "+------------+-----+------+--------+-------+\n", + "2020-07-13 14:51:32,237 - mmdet - INFO - Epoch [12][25/25]\tlr: 2.500e-05, mAP: 0.5553\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_vYQF5K2NqqI", + "colab_type": "text" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the detector is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `conv1.bias`, which has been merged into `conv.weights`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a Mask R-CNN model and finetune it for detection. Because the detector we actually using is Faster R-CNN, the weights in mask branch, e.g. `roi_head.mask_head`, are `unexpected key in source state_dict` and not loaded.\n", + "The original Mask R-CNN is trained on COCO dataset which contains 80 classes but KITTI Tiny dataset only have 3 classes. Therefore, the last FC layer of the pre-trained Mask R-CNN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the detector is evaluated by the default VOC-style evaluation. The results show that the detector achieves 54.1 mAP on the val dataset,\n", + " not bad!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MfQ-yspZLuuI", + "colab_type": "text" + }, + "source": [ + "## Test the trained detector\n", + "\n", + "After finetuning the detector, let's visualize the prediction results!" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "_MuZurfGLq0p", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 304 + }, + "outputId": "8a1724b3-1374-4d06-a3ea-26855ed44b2a" + }, + "source": [ + "img = mmcv.imread('kitti_tiny/training/image_2/000068.jpeg')\n", + "\n", + "model.cfg = cfg\n", + "result = inference_detector(model, img)\n", + "show_result_pyplot(model, img, result)\n" + ], + "execution_count": 16, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA20AAAEfCAYAAADShy4pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy92ZNl2XXe91t7OMOdcs6ax54xUyAIghQp0zRtko6g7bDksKw3P0gvDoWfbEf4P9GDQw5ZEYKDdshBmqYkiyBpgSIBYmigAXR3VddcWVk53rzjGfbgh32qugE1SBFisxvQ/TJu3sw7nLPPPnvvs9b6vrWOxBhZYYUVVlhhhRVWWGGFFVZY4aMJ9WE3YIUVVlhhhRVWWGGFFVZYYYUfjpXTtsIKK6ywwgorrLDCCius8BHGymlbYYUVVlhhhRVWWGGFFVb4CGPltK2wwgorrLDCCiussMIKK3yEsXLaVlhhhRVWWGGFFVZYYYUVPsJYOW0rrLDCCiussMIKK6ywwgofYXxgTpuI/KqIvCUit0Xkf/qg9rPCCiussMIKK6ywwgorrPCTDPkg7tMmIhp4G/gV4BHwVeBvxxi/+5e+sxVWWGGFFVZYYYUVVlhhhZ9gfFBM288At2OMd2KMDfBPgP/sA9rXCiussMIKK6ywwgorrLDCTyzMB7TdS8DD9/z/CPj8ez8gIn8X+LsASslni6IkhID3HmJEBLRSWK0xWkGMIKTH9yESIxDTc4yRGCM+CiEKkdjtLz2UCFopjFYokW6TEem240KkdQ5EEKW7d2PaPxBDaoLSCkGIMbx7TO/XE0L33bSNZ7zmu89dG0RAnr0eU2OBGALeeWKMWGNRWhFiwIdAcIEQUrtFQCmFNgqlJG0PIQJRwPtICB5Bdf0rWGvxweO8xxiNUpq2bXHOp7583sWpT6Xrx667u32kNnbnFK318/0rAWsUWgRiIHiPNposs/huB0oE5zy1C7gY0UrRtxZrDYFICAEJAURovUcC6CwDUal9AqE7ntC1M4RA6xxNXRNdJLMGrRUiYKxGKZXapzTaWJRShNB1YgwEIt45qqrGOY/WCoXgg4MYUChEKZRJz64NqCBsb2wSC8VsPkE5j1Ia7wPu2XjSJo1PIqHrD+8CwQe00mgtGGMwRhO6eZAZg1aCFtBKMFqT5RZsZFm1nJ7NQAzG5IhEovcE71K/KYXWBqUE7z0SBWs0SgSlJI2jGPE+UtUtvnWMNtYpBgWhrrBEmqal9Q6tNNZqtESic8SgUr8rQRPBe0QJUYSIIgShDoF5XeNiJIaINQYRhQseBDKlGeQ5QsTHQNM0GJOhByPmdYOrUhu01ZiioHWRtqpQMWC0QgsURiHR4xGaqPCiaZ0jupZMK4xSFJlGBBofOR5PiErT6/WQGFF4BmWO1ULwHufB2hyAZV3TtA1GoJ8XaGOoXcuiqnCNB9GgNb3BEBcCbVtjlKBFaFtHVBoXIs47FAGjDIU1FJlhXi2Y1w4RjUJomhYnhtwIm8M+Pnga15IVJaIMZ9MpAP1ej+VijnMt64MhaxsbiDEQVZoM3TpCjPhufur4wxanFT4wxPQr+HQtizGitPq+95AfiJk+O0cxEpolvltDRGmUtd2a9+56L4BvfdquDnQrdLcZQaIAAaiJrkV8S/CBaPqIAqEm+iVKKRAL2LSNWAMKV9VEEQKgo0dFUCZLO3dLiI6YZUhuEa9YnE7TWpYLeZlRe08TQEShI1gRXN3SNg4PODGQ9RFtWRv20DHQLJc0VYW2iv5oiKiMZKa8dwB319P3Nwh+cvB8nERiF1+XZ8ceBTr7pqmX1FUFytDr99CmGwUB2tYxmUyYzxdEX7OxscZoY/e5ffHjgh/Ug8mzF7s32rrm5OSUyWSCDx6rFLnSOIm4GFJ3aYWJnW3UTSARQSTZTMl+DN1r6f+0I0ErjYhKOxRARcqyZDDoY5R5ZsX9VXXHB4h/U3kXWsdisaBqapRW2DzDZhZjTGdDqx+YnUKIpOupa2mWc9qmolouWSyWNM4jz+w3ku2mjcHanF5vwHC0RlkWKPVsax/Fvn1PPwXwdUu1WDCvKtrgCSF09mj6bFq3k/0v0n03yvttDRDGp8dHMcad99vzB+W0/bmIMf4D4B8A9Pu9+KlPfYLZbMbp6Zh6uSBXmmGRs7U+YHPQw6qIUkIjyTg0RpFZjTEGrRTeeQgCUVHXDbNFy6x2LKqKum3wMSCSnDOrhVHZo1+WDHs9RrmlV+ToPGfWtDzcf0rjAlleACDBE52DEAhB4UOk3++RGU3rWtI5CGglKBGenQIRRfSeGDxCQAnPL7ohQutjMpo7pyTLLWjB4587Ya51zCZzXNMyGq5RDHOijhix1IuWyemUxaxCRBiOBvRHJdoKoiJok/ahFNNlw2w6Q0cFDnJr2djapAoNR+MTtna26PUGPH16zOHRCW0baF2AEAneE4NDCRitQQltmyaedyEdY4zkuWU47FMWGcZoykw4v73BMLec7O8R3ZK19SG7ly7gCNR1S24KtFgOpw23Dg+Zzs64YQqun9umv9FnfX3IejFgGlpu379HWWs2rlzHjEa44HEx4EJAMsOirmi9Y75ccnx8zN3bd/HHFee2txkOM/JcsXt+k8GgwOQ5ZW/EaH0bm/VwPqKNoXUVdbtkPDnj3jv32Xu0z/pwjdwoolswn57QTBuK3oDBxRH9tTXGhzVDN+S//Zt/C/3aiN//8v+Nf3hEmY9og+NkfEqQDMlK2hARq3C+YTGfcXYyY3w0pcxKNtdzdrbX2Vpbp5rNUL7l8rkt+kbIfMtabrmws8FLn7yBuqD402++xT/84r/g8UHN5uYlbl6/wtaoANcwn8yYzmuWbc3axojN4QY9bSkQSquIONroqEUxXnrevP2Q8d6Y//g3fo0Xfu4F5m9/m/MiHB4d8Ma9e/Sygps3LrO7XrJ8us9yDKo3pOhpNg1kyyXOt1RK4WPOslI8XNZ8c+8xcxepFw2T6Yzh1iaqXzKfTtkQzWdvvMDu+oC9kyecnJyyPjzP1i/9J3z1ziMOvvc2l6Rl/dImmy+9zOHpksdvvEnp5+xsjhga4RO7GRs28nTu+Mb+kmlvl+liyuLoMde31tgdDbixU9ArDfdOa/7Rb32JJ+MlV65e5dXrlzjfF37mtWtcWs8R5ziZOM5OKta3dvhX3/omJtfsKM1nrl5jY2eDWwd73HnwmKaKfO/OHm3eZ+eFV7Hraxwf7nNufYBqKubzJTLY4P7xhNlySik1u/1tXtnd5NUb2zw+O+D/+P2vsLZ2kctbF3j77fscqyE3d/r8+mdf5O27t3n74QN++hd/CSlH/O7/+4cYk/Pijeu8c+sNlvMxv/izP8upa9h9+VV+9T/8DbZ7GwSlcErIYuRpWxFd4KIdpJV+VXbqrwQxBAhQL5a89e03ODs5RYniU5/7DIONdSTE5PCLToE655NjlGsQIbYOPz7FIJCVYC0ehWSKoIUQkglvRJjvnXD/rbfYfm2T0bkdMl2gfASfIcoSw4LJ3te59Yf/nG2EvaeH7IWaV18dcOVCiaoXxBaGuzdYHsKjJ3MOH7zN4WLJ4ekhn/vZz6DLgifvfI91a7h85SY2M+RhH1ssiWsjQm+Hg1s1v/kP/yWni4ZLL23zqY/f5M03v8vjcUXb77GzOUQ3DfOTmoN7Z9QeluWIJ37Aldeu89///f+S+2/8Cb/zm7/H4eGEz//CT/H3/v7fo792kxi3AE3rBWUDSlqEgEQLQb/rwf4l23UxRpxzeO9RSiUjVb3PJPq+CGcHef7rR2pXjBA9iHIgNfgSggLtCLLAeUWsLLdef4Ojp7c5d+E81179DMV6D+SE+mTCG3/6Hb7ytdfZH4+Zn7VsZFN+4z//ZT79N/4rosqeOycfdUTAv/d/7zGiEJe8quXJmH/6xd/kH/3jL1L0Sl779Gv84k/9NTaXgd/+6h/waHbCUkWawjJqFRpFY1KAWWuNtVkKEodI2zbPHTkRiEawtqCX99GYNGULkMzzyc98nJ/7/BfYGWxisIj8+Cyyz4arPB+34V0CQiDGZN81dcWD77zN669/i0eHe0ipWTu3xfb5HV597TXObe2QYTAYvK85Pn3C3UdPeOPNu7z11tvcf+t7HN59m8XhGF84fFZQBUvQiv6w4KVXbvCJT3+Sz37289x88ROcP3+TwXAIWhElgDQYNErs83DUXzWecS7P1phEYIT0WhuY7R9x69tv8ta3v8etJw8Z+5pl3VA3jhBTcM7jaX1DwCMqECUS6wio54E5ROEjOO/5p1/8X+//sPZ8UE7bY+DKe/6/3L32vkh94rtBE7q+8Cyaijh2VMsFudVYazC57RgJjXcBbXyK4CNpYbWGQZ4zGkZc2zBZGOaLJVXrcBEa51lWDceTGaezJbmd0c8LBoMhRd+jrMXrEqUjNrf4poLgKHONNTnHZ1N8jChbYnJDqBOrAcn58h1L+G5UJiIqokShNNAxYwpBdGLAfIiJDWsdTdUQFRhryLIMrTSZtZgoWGORoPDBEbWn38sZ9QqCD9R1zXKxZDmbYPMMbQzKds68SMcAKFzdEl3A1zUuOLyKVNWSg70nhBBZLBuch9Z5WpfaHTsGVGLnwAnQRVdc2xJD6CJT6SI3n7eIQFtmNI2n1IKrluRWMT+dMw5PcQLz+ZLclqyPNqiiJqAZDdYYmIKjyZTvPbxDnltuXLjM5sWL5OUaraupvUc9Zy3TfiVGrDa4LsrhY3KEXYQ2JMfOBGibhqoWMhFs7nFtC6oFEkPjOgdUkRgTQsR0keLR2hrnNoZMTmf4mBb1xXzOdLIktwWNd/QCKB87BtNRuYYmeLSOFCZDuYBzDTpENAqtNV6gCQ5lSrRWaCMYqzA2I89zrAS0eKIKtN4Ro0d5GA6G3Lh6nYOjd3iy94Tp2Zgbl89xYWeT4doGRRk5OjvlbDJlcbbg4tYO5XBI0zSgHFFJYhEldOcY2jbiGk89W1DhscDu1habW7t86qd/mocPbvPmo2NUk9Eno29yaAOjqLBZgVYKWkGiR7mazDUUMceVPaL37D95wvrONjvrm1BVvH7rLV66fhVjNI2DCkPqmbRAuwg+gHMhnRclGDFoq2l8hfeQ93uMYs5aX+ONZRbSCjurKvpliVeGoDPa0JCXA9xxxcHTYy6tr3FluEVoIq6OWGCUG7K1Hg8ePUBpRdV41HDAfFEzrB2DrODq7g6ZKZgsFry5d8D+03uU/jxeoIqCbwNVCOSiCAJZkbPW61PPPZPpnNZtsFwssdZijGZ9OGBtWEI25NKFHXbO73A4Oebk29/k8OiQ3SsjrLGcnk74vXt/SJ7BdHzIF//JFzlulhSXLrM1PM8v/41foXYtNstoRbGs6jS07V/m0r7C9+G59fOe1yQxXNZads6dIzrPaGMd2ytxzmGznOVsQTVfsrG1ldgvwNcCCrRoMGs8eXrIhWtbRCLzRUVpcwRBa0h6gIjKW+7d/gZvPqj4qZ/7LDdefBmJece2Onw95jt//FW+/H/+Fp976WVmbsHD+W1evXaNQfEyZw+PsHXG4dtf5smZI2ZbHN+9w7EETBa4PCwQnROG21y5tktZRLKtTYLZxR8fEJ48IcY9vvm736SdzylHm0jUPL11l91KWMuH5OevUEvku2++SSmbbErOrDojj4GoGoanT/mT3/zfeHjrm/iDhp1ymxeu3iTvrSUGOUacB22edfR7Hh+wHZeUI+rPcHA6i07iu4Yd8AMD4i++YwF0+kOCAv9cg0PV1Dy4/5h3vnOfc8Mhn/+Fn6PoDxDbp1nO2X9yiz/47d9hfDzn53/5V7ny0ivMxhXHD15nc2OYVB8/Bs7ae6FInDEkBdZzFjImG2Rrd4f/4X/+H3nx5ZfY2t3ERuHNf/mHNCFg8oJBmdNmGj13qAhFZtDaJMWMSiqFEDxaZyiV1D8RyExGlmXJiUPS1I4BYw3GfGicx78znpM973ktMfXJlpIAkcDpwRF37j9k//iYZeMIoaU5OmW4scnsbI60Qly27D3c486dW3zj21/nu7ff4c79RyznC7LoUNUZfRXIjSYrM25evsq1l17gxVdu8oWf/wIvvfIKg9EGxBJRA2JUBJ7Z0B+es/YMQkdsk7wTIfWfOAizmqcPn/D2nXe4d/KUcb2gEk+QgOgUePHRp2VMIHTE0jP2TRKdg4SID44Qv4+Ae198UKPuq8BLInKD5Kz918B/88M/HvHe0bqGSEBbBSEmY3tZsawqrE6Tq8gyiiKjLAvKwmCCIgTSBAqe1nmKvKSfKXKJKAyFKah8xEWYLpsUQXAR5z3zxlG1FdM6IOMZRVnggyfPDIOyoOz1iK0i10nKpzS0bcOimqO0YHOL1orWtbimxTmXpG2AUqYzMruIagyEkIzPJItSoISk/hS8UkQCjpDWbJUkaFoUyljKLCcrC+rQItGTWUVuFEoM+faApnbM5xXT+ZLloqKNHoyGLCOgyKxBBaFxFSEGmqbGqxS1zbVJbekZlnXbDU3XyQaTjCASu0GbolTWpMXMNQ0xhufSVKUVWiuMzYnK4BHKwSZaBSRT2P6Qqq6YtBXSNEzqM1qtmS3mbFhLf22EzUYsTaRuau493Ofx0RmSFwz6Q/oitN7RNA3KmE5SaEGEum0I3hN8WuJ9DHgiqNTXPoTEdPoko3QhEJ0jdFGm0MX0ns0bFcEqjVVgUfQySxxAVAY7ECZti/czvPIsnaNwDh0iylqyPCfmGlnOaVpHP0JpM+rGUfuQJr6AMhrVXQBCcDjX4lwLAi44ohUEweSWvJcjIrSNYzqesru9yyuvWO49eMrp8SHf+96bHDwecfPmS+zsXGBn5zyz5ZjT/SOmkxnbgyFFUVC3s+fSYaUkBQZMhhKLURbfNMwWE+q25u69e/zJn75OY3NefPlFBheu884bd6A6YCduoAd9cpsTBRrfoqNQWM0gMwwVMG+g12NZ5rSyzuRkTJhVXDp/nrlpePvxQ7aHQ5zTTOrIjo8YAaMlybbEIDEFHkQDrpMnyzOyIlDYnHObPaDPwdERIprJYklRFMzqPkW/h+gcZQqUWDbWt+n31ijzHhI0ro5oiRRGyAuDkYjNMqrW0YpmumwY1Q1FnrGzvoZG8fK1C5wsZxQ7axwuJiwdDMoeRTHAN56AkGWWejYnYokhcHRyytl0jdl8gTEWIaIFcq3IJKANLKsFl65c5NLli9y69RZb566ys7XN0eEZe3v7bG4OObd9ju1lxaKuuPX4mN/7x/87L+5c4sVPfAKI1ETKouTH2K74sYcyhotXrnDh4qX0QmGAJBVGwGYWBExuk0HQenwbUJlCMoMdDjqHW+iNMrQJECuQgMQGxGOKM85dEs6OF9SP36DdCpj+NtH0UBKYHz9k/+1vsdEPvPjKNgeLKbF+gUsXrhOmwtN7h+wOtxgfHzOrPQbHjWs5H7t4nid7T8jPjtF6yIYUtNMluq04PT7i8CxnQ0bIQc3O1ZKf/thNLuzOyS9scOmVXbY3Sqp7j5id1hTnrvHmd++yffMyI7NFIZaGKQ/nY+6dQTQRHu/zi9depri+gZQDXvrUx9FommWNySPeB5R5xmIIHzSj8axA2w86a7GT4v/ApwH/A+zaM8eyC97+BQ1PiSAdvRSVApsY1mo243tvvM7j+w/5+Kuf4fLNa+RlimxNjw/411/+El/9k9/l6MF9/vov/DKf/GufxpRDtjYVI3NGUaj3NFF4byG6j7oj930SvBAQrSDAYGuTn/+PfoliOOiCuZF2vuR4MUWVOdYL3hq8EpQRjCgwBq1T+ksiagViTEFTbSC2KQWlYz5UdGRZhrIKJw1Gd6xrTPaQPMtt+Wh3YcIPsMJRIKBAIuo9tFKzmLP/8AmP9vY5ncxo8YQgNLOKg/1jzk4mLCczHt6+yze+8jUe7e0zni9xURCd+sRHR241O9tDXn35Gp/69Cf4xKc/w/UXX2Z9a5dy2EcpA8oDHmKDqBxNsutjjEgXNPlQ8J58piiJ8dUAQYhVy+LglP2He+wfHXHSLFngElGjBYUihBbnU1qSDyRVWDdOQoyoKCnnKtJJKoH3Y/Pfgw/kkh5jdCLy3wH/rDvG/yXG+J0/8zsSiBIIJNZKdLLIXOtovaMOgegCi3lLkTf0+i11LycvUo6U0UkqKSL4LtqdSwsossymnCQUvf6QZd3Qeqhrx3JZ4ZuUq9Q0LQ0pJ8hXYELLxtoALYmRMEaTFQWTquVkPGF8NqUoSkajEVlm0XmBsoG2bWmdI0j8Ny4t0uVHEQVEoUURozzPTVOi0QJGaYzS6XKgDT4GtLJYnRNECL5BJACe4Fomy1PAMOyvsT5aZzpbMplPqULDvKmZNy0Kg1UW3emR817JwtWI0Rgfca1nc3MEyvD06Iij0zFt9CgNShR0zJCI4H1EKYU1huBcYgy9x/tAWWRkmaXIS4osZzmZYMucfq+g8jUYmyRAooloiIrCFIQiMm+W3D055MrWJlc2L3I2n7Lnjhl7h15ULKYtDs3OxXNobTCkSeB854AFQWJqa0SIOmmnU8qaQiuDVjqxca5z7LooRwR8aBEcMQSUpDyntq4py4JMGzJjcHkkKs2gzPGmIbNTbJ4zb2o2QoTaEdsW1QNrDDqztE2La1tyLWQoRFsaaRMDawym6NhRJe/mvJGiND4oFnVFYSHLM6IIIobFvGI6nVMUPV5+6VWmu+c4fPqIyekJX/v662xvH3D5xmVGawUbW5tYR3ehSgtE6x2tCzgHWgQlGqJBi0ZCILYtvczyqU98ku/dfcL/9f/8C8wf/BFXd89Tru9wtphx9HCfen1E3NrC4gjB0RfDqOyxsz7k4nQNq4WZUsxVQBU5fZ0zORlz585dzt+8TG/QI7rA0jX4KuIbj3YtCk8QRYxpjCiVgiYSUmaHF9DWYIwiLCpi4zHZIOVzhZCYshCoojCeVzQIl65e52jmyWwOUVAY8qxEE2iWFflAgShMrmmXkVjm1EYzIzJzDdYqjBFi3bBWGl64uEM7yDken1HPHYusR8x7BK+SoxkDWmInf4gslksOj46pXZPWAyK+rYm+IS+EzCga3zBYG/KFL/wM33n7XsoZDDGtDTo5sZcvXuXXr93g+MkjzLdeZ/HwEccP7vLSSzcRYwhWM8ws1qgfH2PiJwSxUyRoY1N+tWsRY4jBgwhKFHlRovo6XcC7/GBlNSqmAB+5sHZuQNAeYo2hISzPiNUYnQk0M0Jo0Naxu7Vk93TGTutp3/kjljsXGF68Cr5l9rUv8bJp8R+7xGT8gIPTyNMzxRvVnHXt2bt7hrpsKHt9dF2z1ttg5DPCiWWnvYybZEz2v0vor7N30OLaKdPxlDo7h9oOrDvFn375ERujNbYGI3QGYXbIohKyCDbLqE9PubY2or+5RazPyAyoQQ/zeIoZDhhtXGJkay4MFTpY1m5cQjYLYjC4AFrAWvWeNCz5/ucPeGy/v6P2Hsh7BXzvddYg8UM/onZTOktRNPO65uGDhzy5+w4XN0b82q/9p9i8ABNwbsn+rXt86Z9/ia9+7Y8J7pSdUcG5rSE6E8SknN7BcIDC/Wht+QghMR8dyxo8YhTl2iBJzmLKsXq6t8ed/Se0okBZRGksClNkqCRuS9uAxDwqhTKdIQ2INmgdiTGkGgjGdORe7K4vnrZt8f7Z+f3xRbK4u0dMyqLQ1hw/2ufx/cccn01pouAl5apV84b7dx/RVEv2Hz5m794Djp4c4H2LsQHX1GjR9MuMrdEO1y7t8NqL1/jYKze5cuUi589fZNDrk+URZWLXggZBpSgsQiRLNqeYD79734eVJAjLpyfce/M2t2+/w8HZKZPQUInHxkgM4GMkdg5ciPF5jlvsAgQoTXhG4QmgNCqdhD+zOR9YHDbG+DvA7/zbfTgZzs8WxtDJ7UQLohU+qM7oTs6Oc4HZdEHbNvTajBQ40Z3cyGB0w1wFrAoUeY7pkqd9CGjtKbQiN9C3Ob7QhLZFPNRti/MBHzUhRjIFWlKUr2o8NoLNSnplZBHntG3LfF7RtiFJGa3BGovNLFlZ4H1D7JyBqLqoVgggihjBu4BSKckV0sU9Pju5pEIoKTE2GawJghJDFIeWSF4YMm2ZTVIUpK6XxOAwyrKztYlTgbP5gnB8wmJR0zQVsfUMBgNi8MmgiB6rLIvZlM2tTXqDAU8Pn+KaJSEkR1KppL1VXSGJEALOO7wPnTyUzpnzOOcwRmMUZCpytpgSc8VwZx03awiuTcU1jCb4rtCLVYyKEVlTUhJ48PARNjd4idQusLW1A1FzMj5jtv+ESTVnfX2djfWNVIClK1ziXJvGTtdbSmukYxGT06aJIeXmaaWfy2p9V0gDwDmHcx5il8PXsZ0AWhTaaloXsBGyLlgQonAym3C+2iSLMF4swM6IvQLfjecQOxli61AmyVWT9CbNgSLPyfOim8ApP1KUQmcWJTlBImiFLkqiODY3tsjsgvH4lKyArd1disKwby1P9g659/ARNY7zFzcZZAUimrptUVG64ihJyqsArVViAGKSZxmlUTFiumDFxz7+cWa65Gvf+S5HB2M2yzWuvnCDge5z5/Ejlmdzrp3fpSwM4gK9EPHiyI2QRUdUObmAKUtKr1HLFhsds9mM4aDPzvlznJw+pI5JlhrrOcFVeFPgXYo6KyJBurGmkgMeOvsoyyxIJATHM6mywienNEZqH2gcXL1+jUWrmJ9O6BUlKip8kxx+XMppKMo+UYPTMPctZ65BO8+2rGHyDKMUTX1GoRTreYYqC+40FT0PLCsWVUAZi42gnEtFaURQWca5ixcxBk4OTlICtihc2xB9iyGiJKKNIkbP5sYaH//YqxyPFzy4e5e2ajDGUPZ6ND5wVE3Id/pc+dh1Lly4Qcw845M9NrZ3ESwG0A1EnYH66EfSfxKQEs0FpTvnSymiEoQU5HIhmUeCpNy3Z1HVmIoIubpB5ZaoFdpEhJZ2OUY3E1hOWB7vsTh4hIijapfsvrDD5WsbNPOKbLvg8OQOg+u7SD6H+QTcKTdvnMf3NnjrG69THXr8LGPSNGyfH3Jlc8D2eoYtDdgeo8EOT7+zz3xyTLH9AqG4Si2n+JhxcjbBWtheH7CUnNNpBdk6W+ifwXgAACAASURBVLuXmZ0d4OIEisDJkyO+8MnPkese2p+waCdsnFuj3CiZnC45OFpQxD6xWGcz28EYw2DkiEVNY3MehimbSiVppS2er5Hx3VWddy2dD8Zxe15k6wccth9k3WKMEB2LxYQsy7FZlwcvz0fDj7T/+GytE1BBUZ+cIYuKq1eucP3qFXRhITQsxie88Y2v85Xf/zK337lDkWt2dy+zs56xsz1Kue0oUIG81ydUyx+xRz5c/GAviuqM3S4IKSJJJeMjNJ69vSccLWcElQLNyiR9WuwM5zQ1VQqqeP9uYTafAivSbVOMep7HGEPAefAEvOvsVPVjuqY+cwzkPbEBun50nvHRCe/cusPjR3u0MdDGSNPVjfA+UPma5XTOfLokuIg1GXVT4/GMRn021kZsbYy4uLvN7tYGvdGQZSvMFo6zszleB5xpWbcKEZsUduZdpy09sq5hH3Ify7NEnPSnihCWDU8e7XHn7j2eHh0xbxoq7fFK0J20tCs5l2zi0GnUOts/hIg2uvNpkuordgLgZ4X9fhg+EuKZSDqIdFDyfW+IUihtIAqZVQyMpSzyZMVFl6q04ahrupwXjc0LjNVEAla3FNbSy3OsMRAiVqeqlMqQaPJMUERap6haT9MGKucZDko2N9YJMbJcLmhcosgHA43RNkkiOwN/WbW08ypFKowhyy1FaSgyS5Zn5EajANc2eBcSgxgCGkWU5ChEEZS1iE/SzdAxBi6GdJEXwYXOIFWqq0aWTnhRlEkL7DXV0tPUNdoLKtcMygJ9/hxN42krx3wyJwTPcjFnUc0T05YPnrNmxEhuDVanfomE1N0+VaTzMRD5fp3/s0Uvz3OstVhrUdETmyU9q8h0xEpgrZ9DL0c1LWWmqZYO19RUlSLv9ekbQz83LKdjYvS4EJktlxTzBWVvSG9tjbIwnJ2e8PTggPXRGlubW2xubaKNIRJxbSedVJoYOskmCqM0VhtUABUlVUNDk5kM8LQ+Hbv3ntYltvRZgmgQUs5b0Ljgk/64Ehxp8Q8xUAdHW1WcX9/kXF7w8OiUk9NTJienEAyFKpFeSihPib6CRpAAoavY6UPA+YjzKXrbtg6facqipF9miLG41iNWMRpu8elP7zLYPuP2vcc8PTwkt7B9/jxiejzeO6QNkbptydBE3zCfz8kGJUopFvWCbJCnCKTRzyW5KZ8vVcZsGse4mpCvD7ly9TozDw/fucvB8TET13Dh2iUuXLzK/PCEb92+x9XL57mysc6i9XjlEd1V3fSgmhptLb1eiV9UlLpERgWboyFKa4I1nMwXnB7sszx+gvJLmiBEHxGf5m3ApWqMYpIzLkLdNFTOgS3TUikepVL1qtnZGfP5kF5vE22FZlFjrUUb6YqFBrTWZJmhdUti9NRtQ1YULJpDxlVDMVBslT3McMCbD+5yvPeEl85f4truLibvowdDDvaPePvBAfXpMaa3hip66BDoGcOCQJlZJGjW1wdsrCns4SN8qDqJTUBiSPkCTcV8NqN1FdiMnY01Tk/3ODs+oHaKq5cuojLN0fER79iWMoeHx/t88513+K3/71/yt/+Lv8nf+jt/B8mL5Iw7wS1bzHr/r3ZR//cNP5DbJrrLylRgi+K5tObw8JDFfM4L165z9PQAVzecu3gR3zbcv3MPpYTL168TWgXiyAvIREHraPcPae8/ZLb3AGVh1ixYnI3ZvLxLT5WoJiLZOoe3H7I5a9CLM57sP2XczLh0ZZOL53bYXNO89WjCKxdG7GznNMs+eS9HScHB4zFnk0PWiz67u7vcc4bJoE/+yqscHO/TH92kLCxX1gqOjisqs8ONV3+WUJecvP116J3xZLHH5fOvoorrTA5OKYc96uktDs/2CbMWtrfov/wSa+ubbOg+TjaZnhxg4j6j9Rx17uOM6VFsvoSoHlpl75PkIe/muXzA9ty/TaBjPD7jO9/+BtevX+fipSsoYzrZofrR7U2hM/uS07axsc3G+jqiFBhN00y5d/sN/vWXvsR6MeSXfuVX+ezPnHBytE89OSKXKWUvR5QiYBDxEDWK/F0L/S94nB823iVa5bnE7DmhKeA85FrTLJc8fXrAPAR0XuJ8KhZntKbtCpcZSc5cMpYjSuvuWSGiaJuG1jskpMCm6aTMKae/k7HB8wrkHzob9BdEfE9AQUiOCAAhMj455fY7d7l95x6HJ6ecuBqvBdG6k/FFQhuplw1tk+yWPMtBaXKl6Q9yRv2cns3QytIEmNYtdrlEj0+o/ZJ+VXA6HzI4mpBlJXnZS/Zdb4jSDdaudQ7ch+yiyPf5t5gA1J7p4SkPHu/x8OiA03pBFT0tHiWqCxynuhsgeOeRZ/0deW6Pqi49JsRIDP5dqfK71WHeFx8Jpw1INGG3GGulO63wsyhAMiCN0lijyY3G5jnGCM43LKt5KvHettStp25DkqMYjRZHploWpiHXQplZcqMoM4vpytFrlcqpq8xgs4xl7QjzJdKVoM+tJc8tMcKyDWS5Z9Dr4bynbmrqpk2GY1XTNA3OOVrXslymcvNzqylzy7DXI88LlI6I9oTG09QtdVtjdaogIwqMSn56iMn3DnTyRK3R1uB9xLkGF8C1go9J0ilRYa2l388QsSgVmS3nTCdTlj5gTEaRW7L1EdoYfPRwEplVSxaLOUVeUDcVvdBnY33IbD5N8tE24AME34lXBUR3ZXCNopV06wOtNUWRk+dZ5wREjIb1tR5lZmiWM/JhD6+EplpSL5e4JkJQWOcxrmHRtjReyLRQosjWh9SZ5nA5RZ/N6PeHZHZEnhcYbciM5fjomOPjE/qDPuu72xidkoxDiOm8LCtcaZJkLSRWSQLpb5IUVTKD9ooojhBMYuKMTk4HgcY7rBcar6lDi4oBV1VUwSeVBpGla8EHtEvBg53tLZrTE/afHlEtGqZxiohGGwElz20RFSKqY17rymFt1lXm9DQuVfF0SlgsK07PJpSDAtNYZvMWVMmFS1cgH/Lwccn+3j3m0wktkA36hK4ITqRjp0yGMgZNhl9McW2Lc+CaBlfXqBhRIvgQCVHwAbKsx2Q6ow6GUX/A5fPnWBQz7j/d52h+hrt8nStrm0St+Nbtu4zX13hhd4fdC1vkwxHtpEVcxAbP4mzCYLtHrgQjMChKjPdMJqd4I8RoyfAsT/Zpl3P6GwOKLAcfcLEmSCql633K/6sa3+UrQIsn72eYTDPol/TNANoW37YsqgVRSubLJeDQGrSJRBwigSiKxrfEZU0WFePxmKapMUazWM6ZIRydTZhUDbcfP6GethS6ZGtrA0RYyxQXRiVPThYsJuMuQX6Y9iMRJaHLA9WMRgOKXo/FoxN0HLNdlEAaj0WWs1jMuXN3j6wsefVjn2JrNOCF61d46/ZDQltR9IaUecG57V1MW2MXj6GK1OMxr//+H/HLf/0X2PjEq8mwzcyfJ5Ff4QPCc7OoY9IB8jynXlR45wg+UBRFUii0Lf1Bj7zIic7RLBxKRcQF8jIjLlqWT8e0xwtsY/EuomKPs/tLlkfHXH/tNcr1ddaomH/96xzcfpu2mXF27Jg0c0rbp22nXH71OtmVK2yXgMxYziNLvcHW1c/xmZ++RDw7orn/rzDrW7y6eRN17Sazp7d5sf8iRb5O6yyl1QzGZ7TRkl+6RvWw4vKV/4Dq8DvUB7B94QqD/g6hZ1H9hkvlJzm69V0yUWxevIHe2gE7h4HC6AzZWGNxOMObAVpfZGPjKph1Ykx5yjxjAZ4bPn81DtufB+kkEmXZ4+rVG2xsbKG0BeRdhi6qH5kpUM/LHkCUiLIanLA4mvOVP/5DXn/9y5S9nM//+q9w/soNQjPlra9/hXvfqci1JysyoiicKDQBpWxXefTHy8N4v9bGZ3OqezOQcv+iD8zHU05PzqhiQGcZugWiJxfBEYjik5g10eIpwKKFtm07NY4QJOJ8CzGQZ9lzVi6857ZNzzYRn/3xwxr7UYNA7JRt8h6HBKBa1Dx+8Ii7d+9zeDqmDh5bFJiuam3dtDRVm9avkAiKXn8ARYELiiZk5Fbo2ZSrnQiQCG2ExRIXG6YLoTftUZzOyOwZRdljfX2N4dqcwXBKUfTZ3LQYbYhRpbSNDwnp/KYfFVMOZXO24OTwmIdPn7I/nzLxjlaRqv36SOwGlw9JWWVMsr9ct+aLpGOK3hElkS7P6mCIkj83Xfcj4bSlApoaUJ2ONc3I56rwzoHTknI+jIqpUIBWXbGPflrcRHBtoG4aFq6l7diqGD1BWhqBWuvunkkam1uyzFLmNt3LS6Wy+yYvyGMqcBFDSEU/jKC0oIwmyzXPCNOmLVkslswXS2wmtM7i2sS+uY5RG88XzATm5YLBYIC1FlEGpS2mMN0Ji4To8cF3iaEpMhulq6SjkqEfJOlkUZK+nxlUzCB4XNMmYzu0EFs0EYmeMs9QEZyPRO/QouiXOWI1C1fhJZJL/vw+cJPJKfNqiVFCblI+ne8cSB8jtYTnFyLTlc1t2ySTDCF299sjRWdMkoyGEFgsF0hhiUrRNp7MFmhRNFWLiy1aLJuZRUpN1JHZ+Ixq7unnOS9cuEJbeb738D7jvTN0iAz7A9aGIzJtWCwWHB0dMV3MGWyup3Hju8iHD0QfiV0pQm0NEiLKp0TQVMXR4INLVT1FUsnZGHEd0xkFglF4BU3r0/3DoupkT5rQRk6mU8bHpxRK45zHt45+ljMselgJmDwjaiFoRRSfmFVJckCLwtoMRNPWgaYJhKbFB4iiqZqWqCKT6RyeHOA0tGqAHlqUKRhtbHKzZ+kNM9555xb3v3sLVM7O2gYBIUYhz4t0Hx8NgmY0HFJ15yyzlhhaNJ48s0RUKiutFVXtaVxEjJDjKVUk6xWEyxd5Opvwzt17TPtjrl+9zPalGyymZ9x7eoRZ66OKnDCwbBYjRpzj1v1HtIs5KjoKXXB1bYNYKO4ePeZoOqZSlp6BV6+e586jR0ynY6bFlJ2L57FaISqtB64NKBVxLuIRWgEngWgVngDRs94f0jcZ53b61NTMpxO88ykvTQViqNN5p6X1gijIbZ9cl+yMhP54QhscOjgyIzR1zfhsQaCgChlvPzjkphjOn1tjvZ/Tu3qe4XDJ1968x9Jq9LygwRHFE1yDDorgHEppjLFUTcP4dI9z5RARRZCIySzB1bi6JriWdjEjU4b1Xk5hBO8b2mbOoBxxejRDnS355JVP8JlzW3zln30JM41UZzUSE0McjUGs/vAlJv8eIXa/5Zm3ESPRB6IIWgyXLl3CGM3uxfPPGaNiOODcoCQ6h9IGK4LoAGFKdXBA7heYnmaws4G2EIqMKJE8FOw/GVMN1umvncO0Cy59/LPc/oPf41tvvMVCRc5dukT/lY9z51sHZDPhYGF5sD9l98ImG1c+ztrFl5HhNYwZErcG6N4DXLmD6B2MWWPjxV9CVAXNU0wvEJoxVfMEUUNU/5TiBUP15B6+POGlz70AJmN6eI9gHxDNkrP5kv6VHdRBYHlvStwbo8478pcuptscjM4zeTgjVEO21nYRtwUqS0XWAN4NXfJ9lvpHIldTKIo+ly9fR2nz7HTz73pvqe5ul4AmaAhKQ2w5uP+E3//tP+DurW+zc6nk5dc+xmBjG/5/9t7r17Irv/P7rLTTCTdWrmJqBrHTSJ6RBMkSYA8MvwiGYcCv/vMG8JvfDIwBGbBhAwbGsqRObJJVxcpVN5589t4r+mHtc6vIZner2d0aSugFEKy699Y956ydfr/fN0mNbBqkKehtAuPxJCISlzECRCr4xvq6b+PafYz0mpYcvGd+fkm37QlSQQxZxx0jOEuSIZ9GwQN6yGiNpOTx3l4xpSDlem+obyAjTFck3bRr1t5wqvgXtMSQgUYcztUIsffMXp3x6P4jTs/OcBJSaVBaYr0jRgaZiMRaR4o5q64pC4jZe6ILHo2n0nlgH2Nk00ErNGunWcjIqIC62FBUPU3doos148mSyV7F/uGImzdvMtk7QKoSIQ35gP3nOmfT68Y8JuKs5clPP+Onn37G0/MzLnzHSkRESuggEFEghML7DCYJkX0IRAzg854J1KC/DIN/Qdb1yZ2u8te8o29F0wYM9p754Oz4xkrkSf+Vu4qPeBvQ9YSqMphCZxdEl39OCInREq0LTPR4Aq63ROtJIV5JcINPpOQQNmEKENscLm2UpiojlTEQAmVMjLyj1KBSnuJkfVn+v1IaU2i0qTCloLY6G2J4T98HfJ/RqWB7nO2wveVke54/o9IUZUU9GlOVZbbNkANnVuWbTRhuCD5GksyZbr2zBHy2pQ2JzdZCCOiBm1xVOj8IUyLYbN8fQzYKyU2ewfeO1XJJlNB1baY2FoaqqikKw6bLYYikkN+XEAONC7yAICI+kvNrhrBqPfBzvXdYEQFDNDqjOlJihjudH45TQlLVI7wVdO0SnRIiWpTIvPLJuOagqQmF4Xm7YT6/ACsYNTXNeMRyNmc+n6OEZH8y5fDwkDi75HI+Z2U7ZKGHQPIiN0ZIFAKVBDLmoHEGhEsmEEqhk6ZIBiGrTMEzWQsXSHhSDo6WIk9HQshmMUqT7QwlujAoKUk+0G62eDJysj+e4BpFVAUe6KNHiJStYGNCxhyA3Hc9KSpcbzMCaIocPN5ZpIFCCHrrWa42RKPY+MTBSJC0IpIF53XTcHh8yORgSttFOtcTU0VioH06h0FA7Ad2icgoLRltjsFlx1NTEMgB9eu2ZWMF9b5hUpeIytD6yFgXxPKArShZL1t++vkD3n3vHt/7zgeMk0eUhl5YqoMpVTGCPnEwang5XzKZjBjpktJ6klGUhcYlCxK65Zz5yUtuHu0T53B+dsb4YEp5rQSRcD4MJiIKU2gC0HlH0CY7XImQP0cMjLTmYDpmi+bl4hxIWNcTU0YypIKQPC6mbDCKIlmInePaZA+dLJfPX1IfXuNo/5Bn53OKaspy44lhg351ynRa8N67b5Gkpnw549NnL1mLwGw1I2iyoD04RCoyDdJ7jNEURYlr80ROG5Pd3OJgxhQDi+Wcy/NTjq7dJTpLU2lSUbHpW7Z9wf24QLWR779/g7/8H/57Jmaft4+OuPH+H4EwJAxIxfCYIF+B8JoW80ahkYav/yup5/451y+Ua4IrNCjFXeB9RBidTZqMymaD8jX3RpAdUWOSSBlwcc7m4jm6TqwXLzieVDTXNL0UzGYLvGi4/uF3qM0N2vCIg1u3MIfXaF88plvPONxvuDHd5/lqxt5xg68VF6uIftFy//ScDz64y62PPqa+dYzcUyT5kJgS3fYc6RcksU91bcT68QPG3/kL/HKD0B66GZ3fEnWiwtM9/hQvBfVoQn39h4j6Ou1lT7dwnH/6D8Tqgpt/8WfMzhXVeJ/aK6yztFtHWt6hKD5EmpqT84espePou8d4UxLE60jt1+fprnH7Zbv/z3PyftVxMcEbCNtrrZtU8hu/pVwoDjKECCoJQmc5OzlnsW75+I//hL/86+/hkmPc7COEIgmJLkp0USKEIgwFZ0CgACFNHuD8c13kvyv06ev6IfEGyiW4oiu2W8vpi9PsF6A1yeWoIlJEkfI9X4BIIt/vFaQUCTEQU0ZClBIIKVAqD+p3ujeJHrRzQyn/Buq2C7zfXc9XYHB6o7N8475w9dHe/Gv68lalKy3V7/Z45RZh+K2D+2WynsvnL3nw6Wc8efyY2WqJk9B6S5YKRqTQxBixtsNam1VnZYlRQPRYF3Id53sqFXKNIzS9S0QCqQuoaJmU0JQKvVYUxqONolkXTLcaz5Qbt45JwpPP3B2R8/cxoXnjBP3qOfbGl5JI+BQQrWP96oQnnz/k2dNnXGzXLKOnTR6TBEXIw/AockRRjKBkygODmIEZPQxPg/eD3jRHLKQh6uyf8hm/FU2bBEzKeqkkwItsv501enGwcM8wtjfgyeYCRkqqoqSsKrq+x7msSdKVQQeBC4mRGSGSIPo4FK2BJDPtzIdE21qihMJojPZsnaNUGi2gLg1qucH2mrLU2SBAi0xaGCzkhVLoQlFRYEx+oMQYcDbhrMK7QPQe7xxt19K2LTFmzdK2s2y7OcWO8llk5KysM+KVQ7czshxjzFB1HMh4A5+7KioKrem2a7ouZ4JpozGFoq4rdFXjNxva9RZnLX20eBeQSqF1kd3obC4q6qrElAVRRNbbDSm2mbA6gJ9JRMSASvno8oRqMNnQ2qBU1pDFuJtGiRxGPpiARCLb9YbUW04vLrFeIESRRZkuotA0e/tsw5YQEpUyFKagdpZ1aok+EnsLZcl0NMIqTfCeV69eUVbVkInVILSmdx4XEi44OivonKNzis4HTFVnNE1qfEy44FEhI6tBGIIP2fxD57Ih+ojrLbbI9MiEzOekHNDdFChF4qAZMdkbs1gE2uhAaFSSyCSJPhGJ+d9IcCGHtqYUIIXsVinz3jpnc/wCuUlrtUJLjRUCHSVG1Wx8YL5xND6CTtm9NDa0VYnSmQajlERLSfKZB9o7R9d3NEUFZDqILAtCjKzbLU7Atm+h7ygTtFHQh4CIERcjbDuUFIzrBilKpJAU0bFOBVtVE5zn8mLGfDri2lt3+OTRA55dnLKvBe9du8H1yQEH1495Nrsk9B3T6R6j0jDbbpE+UivNxXbF6nLJ2YtzFk+eceP9H6KT5rNPf0azGJNURKdAKSyEDmcrxHQPKRJxC7bvEeSncRKCRADbc7DfcPv6DWSpWawfYWPL5UJwaATW7SHqmrIqUTagyKZFL16eofYbRtN95qsN46LknZu3id5w8vKMc+uQ7Zb0+CEfv/82k1EFJK5PG8ZJ8GS1wNU1VhuWznOQHCJZUnKUVcP+pGG/LjFVpO1BiWEabBIIxWy24uXLU46v3eHaQaaOncwXJARdu6ITiVTWHBeKJ9sFa7Xm+//lf0VxOCYhCShSEkiRH9NpMDh6s/AVV6XPzhCDPzRu/8T15oz96pk/MBJEAttbiB5BPgZGSgpDRl11Qeo9282WalQhiyIbVymQqWX59O95+PN/5NZ7b3N87zZdu2Q7e4xdviJVK9p2BfojQj3m7nc/IqWW1M558fABP/rb/5U/+8FH/PGf/znmZ3/H4bURJ4tLBA3aCsZFy713jqmv74FxJPsKL58T6WiXa8KFY2zeZvXwc9bnJ4yP9lGVRIwc7eUl1Xifvl1iV5fYVcv+Bx8gJw2U+3h1SLnXYN4+on+5QBQXHB39BXu3bqI4RK5b6iiZXZzT+zHG3SQJWGwqWuWxhcIrcmHN7lR8M6Xrlx2JN/hyv1CAfU1F9rtc6QpXHWjo4mt1Y7/pCkIikkRGIER0UfHR97/Le9/9GGSiqSOua0EokpKQVB4k69ws5CGRyQZHJNBmgFjeQCp/1cf6JXEAX/f1N7+WG5g3Pvzri+NrHEB/xev/qu98hdYnUz7Mi4tLnp48owtttpGXCR8iEokwJQqPiy47dicgZLZGjNmdW4qE1sXwPn3WmPscQSN3EQExopJGRYmKEomCpK6OeRIDMpz06w8y5PhldE7kRnJ480GI11jScJonuaPlvQllfPOVvvK3/HuHKyxAt1jz+NFjHjz8govFgjYFklR5TBITCkjRE72nNJpUFsTg0VKgVZY1GA14QWQLMoIIA4okSNESkifGnk4ISFnj7cMWUyimocFTMD2cYIoJSpbDqGHHOftdUyR3d+3dVbtb4o39GvYpCYyLtPMlT58/48Hzx7yYndLJjJbJBHpwLfU+0LmeEB1KZWmC9SEPXwbju5QEDBBSdhf22bciCGL4F0KPlDFi+h5PQhmDTRaSRymBkUAS2b0mOC62PV3yNF3NeDxiOm6oyoKyLqjqAobiF1HjA/R9j7OOKAUqaUyZhibBYa3DB09MjhA6ohc4KemlwihDEA1+Jaj6HLCotaaqBMpIpJbYlFAm36RiyrozLRLKaGSlsN5gbcDaXKBXo4am6+l7i3OB0mYdXrCebeeIqcOUmnpUYQqF1AplFEJrnI/YmKc9hEQMPpuSSEEku1AKrehjYN12qGCobG7EohTUoxG99ZxfzHAhIbVhrxqhdEOKFiEFKWVzkt5ZvA9Ym6dUWipUIYmewQRFZW68BmstzrkrUW+UXLl19l3PVkiskdjoSDKydT2dCyhTkRFjhxSGFDVRVPSiQIrEtt3gVWRcFIySJkhDbCSty258TVPRC0FRlMwWS5xIrDZbhDKMyooY83wxKnAy0IvINgYuO4tVPaYoUEARIkZIYshkElSektgY8CFCSANCp1BoYgApKyKCNgq2oc9W9W5Fe3LC/E7FUjpW0VLKgm7TYTtPjAJlqhwFIfPUBSJSiSxmkikLnEWmZQjywz8JwabriSHiKo0Nkl4nNkguNi3HHkbKEEPWVRIzBUsJhQ8+W2UHiUARYsAGj/WOxiiUKnFSoYuCPkU6na2lpyEw6z1RFvSxp5CJ+WaFXVgORxMmI4UejRgLQdlvUQ2kkF+z8FsaGYhY5iHyYhNoBXT+ggubuH18zN7BAZvzWS449qaZLtgLJhg6LaiLMbduvs/syWf85Mf/wHtvfYej40OeX57jfMttSuq4RcgWHyraVLPtHaH3lLpACo0VGsqSVIAhUFhLmSTRjohoog5sestiZblYWibTKVIEtLbE4NDjGj065umrpxxfv8bJqqVIgaky7I1qtrcOOH91wfPtglAa5PkZzazj9ItnTJLn3330Af/XZ5/y09WG1hwgveLASGoVsP0WmzSNNkxHgmYSeLbecCQLiuTwPlA2U6ZHt+g8GKM5PqxpKpCyp+9WyAi+XRPrkn/89FOenTzh2uYR9f/035BUjjpRDO523oO2ILIpQRwe12ogppN2D68/IG2/6XK55sE5j7MeLRWb9YbVckFVlRwd7FEWOpvcxS7rZ3LKatbTINgslpSTCtPoTJv2S5Q8Z09sODAlKpZ03lDsj1nPLwibE9xZzd/+h/+IP3qPf/fX/wXmxQuO3T6vvviMF5+/DNa3nwAAIABJREFU4O+2gT//kx+gasOT01OOrl2HpOn6NW+/e5Ojt97Hn2+IbJEji75+ExFnTOuCk+2CVz9+Tnm0x6vlCyYv/k82YcnezSMqXRPWAV4+p0qJ6sYR0s9gtaFdPMccfYAq3iWNI9f+9C8RhUNU19DFITFUiIMDUugpmz0KU2JMYmMjqjrG2zVtkoxJqAFCycVzPlvz+rpz9M0CDL7+JN4Zm//2xd9XTTuE/EVtkPgtdaQCgSY/R0UOrQQk5YGmHArNFAPG1HlAI0B5Qy0LRqVDmTF1cYT0BaUays+qgKS+7Ib5DammbzZpACFk1+YYxTDYBWezOarSGS8RIg1av3/65nwtAWBAM187dCZUEqQ+cvriGS8Xp2zYkpzLtv5akJJmG8DZwcbf5Bww6yMxCqQ0VzWMDwPykUAIQ2EqEBrrI4WWGGGoMSgrMFHniKEkSTsgWFniDt+MA5ou4zA80Fd9SNI5tsaTm04dQYTXJ1IQg5bqd9yw5KtgGCyExPZiwYOff87P79/nZDGjTZGoMnIrUNSFxNoe6xwiJrQU1HUBlMDrAXTUMWcOC0PnHXowd0kxgLdUJpsKJh/ZuqH5E2CjZ9O1mF6DmFCV19FyH0HFVTv7exkm5hqMtBsPfXnQkAYEV4WEWPa8evacnz17wDM3Z06bjW98wmRsJdeM0ZLwSJWjnGKMWO+BHEu16w+lkiQhcDYz6pTKrVjw4WuGTF9e34qmLaWE63sckeQy1GiUopQJJQUmKZzM5gjOt2w2G9brFZczTVkWjJuG8bhhOh5lo4+UUEJQFZrSZDSm67qsC0oCXWjquryyp/fJ4byl720OyA6BFAIiRfqupS1KqrKiLDRtmwW+yhRIpSmqCmP0cCMRr4+/FBiTbfuLUg6oE1hb4n22yrfW07UdXWsJfcgnPoFuu2G+6BBSYMoyN4QqG5DEmOlVuB6Cw4iETK+nkEpIZGnQRYkksN5uadse50I25eh7bIjolLDeDzemHFoefWC93bDtO7abFmt7QBBlyjTAYUifOeCRGHIDt7PM7/tASpoYi5wDFgPbrkP2abCrj3RdS1HWmV4q8s1BKYUxms55ZqsVk7pAlQXW9bkRkxqtNKttR1GWTPemaC3Zdh3btiWJRFFWjIRksd4wu5zhQhgm3ZEYoLOWsjBcrJa8ujgnhcjh4SFvvfUWd5ShqkuiglRkFHVHb5Eyi0alkIgB8YzkTDOlNXhHTDkCQcbEdrPBBk81apC9YmO7/D1pBkptNjhx0dKHmOMAZC5OYowEZ4kx5D2NWSdY1BUpRVbrLbbwbGNg7h1dn4bYBXvVdA6u/RkhHQYJxmQ0M3if0dCBKlmUBjccw7KqSLrFe4eynigFWmlGEk7skqYosE7y/PSMo0mmOjVNQ6llNo0pDU3VcKAbjLCsFjPqqkAXFUJVqPGE57MVF/MnHNUlqioxTYUqNTr0jApFJRLTUc2dO7c4qiXlRPPzpw85efqQA9dxdO2Is4uOtm2ZzRdM9jVbFzmfb3CiIKJQ2qBVpgVmZnU+z8fFiN62PL+YsXIWmwJ9DGxiYNW2XM6XjMeahEMn2Lt+k79679/wP/8v/4GTizm3xlPa7ZbosubhYH8fFxWLi6cYqTGmYrFsSSgKabg+nfLdt9/iyU8/5ez0nFB7jiZ7hOOarYeNV6RgGJmSSTNGqZ4UPM45MJHz2Yzj6zdYrZZstlvGTUPwjnfu3WXTWk5mK3xvcWy4nLcsTnuiOssDFKmBHFsSUkREB32XaVPSvPH8+8pT8A8N22+8drf7lBKbdUthNF3b8cXDL7h35zZ3bl7LTqYxIkRDilnHmlKm3axWCwrt0MojQough/4VZTPn9vePwL6CTcf4eILggMp9xP/xv/8j+uBDHGPu3rvNzfenzO4/4OHnn3Mxf4AqDcvLFZvVjMODPaxPVD6ipOfOh+9z/PF7mON3IHgSC0J3QVwo1GRCqnv2Jy8o3zlGTiQHs2uIdo5Jjv7BKdJM0fdusV6e8+rxJ7wjP4TRIebgiFgdZcpYKghRoKc1eizI3HiRY2sSiMJQmkxXT8IOha4GodnN/19jV7td/nVH4T9zXtbv/KW/ziFTfOn7A7ePqyBvIQfJRYNUAaVLRLZH4Gp/vlKb/ibrVyFv3geUEihVkKLg9GTB6ek5733nXZrBmTg3Wun1W/6G68sxDMPTLkkWFzMeP3nGYrOhjwEhcs5ptl7JLt1SyIy6IQY6us/ufsqgjSakYZA/RMeYIiGkv2o0k5IkGbORGD0hdSTRgyqISrPryHZnJHKH19iB1ZCjk9jJLHijRpdDrldKIDJDJ2/2N9+rNzYt/08MjfoQP9VvtnzyySf8+Cc/5mw+Y+N6bIo5zD5mVDGETAVVWT+QB04ps6fEIBlBSGTMvzMEn0PJd/NAcnSQUSqzPQQYXSKlxsWM1gkJo/GI23duMx5NkcIMDVseVvzO15emAV9/UYg0uKf3nsXJGZ9+fp8vnj1l7VpEoUitu6K+x5TwMQ/eGfSCiZw9jQDnHN47Usr5v7toBe8DKcVsBOcsbZvZeL9qfTuatuH/MiZ0TOgIpRCYoTmQ5IDiJBUpKkJwOJetv7uup+96ZvMFxhiqsqKqKsZNzWTU0FRlzjaq62wsktKV+NT7zLlFGwJNNhCxGWVKg6OLd1mP1m5XKCmoKoMqds1aiWw7jNYYpdBSUhhFVRRoLYjJAmIo/gUkiTaGGFVuGJ2iLGFUF3gbs3lJcLSdQLQhUz77jpgERTFc3CninUWEgFFyCH40lIWBLay6NmfNuTxNctZd7bH3HqkUpTZEJN47pFQURYFRGiUy3TQjaQx7lPVDOQMwhwT64PMNXIDWGiWG3C/iAAmrHIQsdc4Vk4JJU1GXht52bHvHqutpNz2iqFCFIZDYbLd07ZquK6iMIkVPu52hTUEz3sMnsN7ho6fQDb3LxitSK9q2zU201nSdJbhsv69jhqBXmw1SSUbjCpcCXbvFnnmUMhxMDznY28fryKpf0QebnX+GzxhTbu5DVJRZEQZAURRUOqJNh4rZYOCtezd4+PiS5WxGlSRVVVE3noAGYxAoRCGIXUbbMp1GIaSkNAXe92xWK3oBTV1SKc10VNPUDcEqQvDMZ5dYI+ltouvWVOPxcM/JN4pcIKnsijjknKQhPgIhCD7ipMMUGh/C0Dhmh6Ou77FdTxR5AGBkPjZVUXFzdMTFYs5q/pKLxYbr1w6ZNgWlFnQyUinB4WTMiDWbzZrV5ZJ2tcGlSIqGw8Mj+m7N8/mS/UKziYE9IqaQmC5SacHSe0otMY3h5n5DU73Do+I5P3/0gLHtODy+RiE9D5+94FrYp2r2OX9xwtl8jS9GHL9TsFxvaDctbVPTJ8V8G9me9PR9xfW3P8DNAs8ePGPTdZzpFR/X9yAmVtstwiQORnu8vFyy8WOu3bjD5WzG/sExISQ223xTLSdj7u3tcywd44lkUk/YrNYkU5Fkz3a94bhpuFVWXKzWrM9nvJyteT5W3Dv8kKAU1mpMLBnJBh0lUUZ0oUEFZssVURq8c7w6OWFUmqzBlPDWnXu0/RNCWGK9ZVrtQZSo1OBjQUgm5ykSOV/Mef7kETWODz76GFOVOUBWfsXU4Q/rm62hGKqMRkwnFEZyOB2xP2lQMt+fM4Vc4WLWyvRtJLpEqSIxBSaHDTLM2b74HFM4aF9g0illXWJlhar3SCLgLmaENdy+8UMutwXv3Dzg3VuCpn/B8uIF68c/Y2Ik9bjgQI0YH08x18dsTlb89LMfI/wF+uhtioMamgJiws3OaTdzyvIIZQpYrjClRB1XbF9+Qa1BHR5SlwXtixX26ZzFxZb5xkG5Rx8MB3vHqOPr2FgRfQQtKEd7uTKXOas0Z46+3jY55BOJxEAflcRvVMm/QYv8Zd9+U7vyL3ztGqevon0DUx+pC0w1AukQ2mSoa2iSvvQvft1WDMflCojbFf1fya3bLWP0cE+RICJFqZjujdFaXDVpOaMq/EYN29f96NXrp9dUzOAjpyfnnJxeEJMgMjj2xeysnYPVdrRRYHBP3MXbKCmwfUeMHh8sEDFGEQJY7wdHagNKkZQnKIkVIjNU4IqCLnG5zUiahCCIRCAS8UhAE0lCZXJcgqxGH6ysiTl3FJUN0tIbzcrv4NTdsTR3HWW3anlw/wE/+/xTTucz1q5nEx1O5smvCNkB0oVs64+UV1ZA0WXX5Ux9BIFEylzLeO+RwudrXKYBycyDbh99puxLdeULoVSkKg13bt/g5s3rlIV+TQz9LdxXf/XanUOvR0RXezwAcAJILrK6WPDg0RPuP3rE6eUlbXC4mHDe5dzdmGuoXfyTkIIUE87bIRIsN2wxxBxjNeSyeeuwvaXvuwwqtV2u+a39le/8W9G0xRRp+w4QVEjqoqLRIKLFR49DYUUONRQSjDBUJmtx3A4tC5HOejq7IS3XKAFNaZiOx0wmE6qqQuvcUMlBAK515iG7mBGK0pSksiIMeV2CSNu2OJs33TnLOlriNlFWNePRBGUC3Q7RUJqqKPAVWd9mMoRvzC5vIg6ORDlY2hSKslYkB9FmjZEPnt4VjPuKvne0vaXrbbaKh6xRKgzCZ3MQ6x20kRQzVN2UNVGIbM1qPdZ6ettjbQChmEzHKG1YbzKqAtnl0HY9qiypTNa5tW0/oIMph0IzBAMOE6rcnGUkyg/w7y63LU8UJEoLfMi6qVIbJnXDtKpxPvDi4pL5coNzlmQ1Umm0yvC5R+ASGG1QZUnXO0oEpiqxMdJ7j0mB0WSCKQrKsmKxXCGlQqkW75YknUimgD5QlxWmNrgU2XoHImGaiuloj/29fY72Djg+OGCberwM2K1HSDnksvmB050IXiGkQZKb1K7rCNJfFWc7J804TFOUkFgf6PqOJIrBHTKf87tmW+vd2DHr5AgR2/c5hmEtITiaqqAwMmsVjSYSCb4nBcfLl8+IUrF3eHR1b8vHRyFluLKlFkIhhRpC7LPragi76eswA5SKru9YtRui87i+pyNn1czXa8rRlBs37iAOx7w6fcViMaNbBqZ7hzRFQVMomkJSeYFpGsarDpXA28CTZ69YtBvefvs2xahmNb/g/skr+ug4mI4JiDz88JLNesG+TkyNwERB2B/hbhyxiIHHn33Orf3rTA6OeXpyio+Km9dvEWRB7yPOp6scRNv1WKFZrAOrFzOWdoz+/vcY7R+ghaaqDMkY2m1LszdiLRwbF9B9YNVF/p9P/57WrdCmpChrEIq+d8wXS8J6zXeu3+bGjevY2LJYrFi2lhgFpiiZL5dM9sYcGs01qZiMSrrlmscvnlPevY1TU5wTYKEICpWyOVKUmTK8WK2Zr7ccTPZYr7dMqn2aumaxapk0Y+7eukXfbVktO0ozHqbJBb03GFGiCLjoOJ2d84+f/ITN86dcv3aD49sTlODKvloMwc9/WN9sSbKbqRKSptRIASEk9qeTnQkypGzahNJsusCLl3P2xjWjo4q9gykybqHb8PzHP+Hlo59y/Uhx7/0po0KjhIFSIy82yGXP+RcvGFV7LE7n3Puo5lAs4P4j9vsZ+x98xOJyg+5qXv78MT/77FM+PP5jmmnJez94n/s/OWMrW9SNfWy34fmDT1icfUpTNUz3JtSrwFgG2sUK9+QUoRva+SXV4Qh1tEeqIZQKP9rnnT/99yyfPebk6RfYyRidIuVUMT4oQXkSFqQeivUi2/cHQZJvnmuvcz7jbkj2jZf42j/+mi/+i11f3avd34tmTDXZp7MrkjRfZpYJ8Ubl/tX16/dn9xp+YGy82bxJ+ZrCJ4Rkb2/M3t44IzEpRzrJX0P7+vXrK//+DarldrPlwRdPWa07EIbgLX5ArBCDEUZMaJGZJt76IZIhG3CxQ0DkLoA7XtV0hdDIJBEBSB7rerSMBOtIISFTZuKAQ6WExCBSZl/JAWnbZbKq4X6QREKmFuEXCFqSFiAavBgRMRiyYVru4b75ufsldHTYwhQDIURevTrhpz/7hGevXtJ6y8pb+hRIUgw5tiJfl+LLqW67X5mv2az/zmZ1CpEEShqUDqhhL0l+AAAyIkUC63axO3nP9w+mvPve29y6cYNCF5laisz00t+X4/+uYRMMzXIeAuxeLvlEe7nmyf1HfPbgIacXM1prab3LOrUQEIOpnY/Z4yDEvAekRPAO23c435NixPavkbS2awnW5drbObxzgzvpV7Kqv2Z9S5q2xKbdIpNkVBRMqoajSUWZWT5YpdiGxMY6fDd84BCQSqJ0STAmu0yK7HbnnCd4R2s97fmMs4s5Wiu00YyahlHTMB6NsnmDUpiiGOD7SFQRkiFFj3OWpq6QowYhEtZ55qsVXd+zWq7ZbjqqqsboAiWzs1CrNBvTorTE1BJlJGVZDNlbAiHSQAc0aK0RMXf4pjDEWOQbSmjorcVax2azZbXe0ttADAGRBFVZoqsyh4QPcPVitcpTJ6ko64ayKElOoLTG+kDv2pzRtWtw+/6qaHddj/YVMkGlGhjoDiHs0twDYYhjiGQu+NUlvENxQqZHxBjpuj7rEQsFJEqlCM4TradQkvF4QhKKNkRmvSUKiERczJOK2himB3s0VcmrV6/oth1ufokyBUVZUlQV680200CkwnqH1hqtDX3vSNHDMNGTg27ElCV9tPQ+c++7zRbXWSbVCG8dAsmoHtMnz9nsPAdrD2YwcQiF2ZniIDLdQytFXWiMdsRtyAYsIk+NiYn1doOLIhudFAZjClL0eJ/dGouiILjhwTLc4A7295mOR2xXK5xtSSmxXC0pjaQqC4xWVFWFTJqYOl69eI6LgmrUgOLq5qmURsmAkgopNAwToBgjZd1gosV7l8PDyTo+AbTWsnE9Rco2+n0MlGjOF+e8ejjjO3fe5d33jrj3zntsF2fMTl5weXFBWU05Hk2otWIkDV2w7DUNR5MJm4VHRs1queTTzzfcu3uTo2vHrE/P+NHDx1w/3Ofo8IAu5MDz3lmS8tQqQbJMdWLPCJpyROg8l2endG6Kbhoul1t8POOd73xAj6aeThhPpgjXUZUlVaGpqsRe03C5LjgXEq0MOikOxgdMDkfoCP1sgTxQCFWw7hw+Flif2HaOw8MRPkLb90QhaCZTFpsVF6cnHLx1g3o65fzknGXnMNqwX2uszIOdvdGY48ZRTPeZFZLL1Sk/u/9zqhsfE0JCBDBCogX40OO8JQmJ84GyKmn7jpQSShqqIrLVFu8ce5MpRgpU6El9x7Zz2CLiU0FIEinyA99UBdV4zIPnz1ldXnDt1lv5ypW7azi9gUb8AXn7TVcm8Aw0GvIkXwmRdRwiT1ylFASXC92uXVNWjsPjfXx0IBP9ck1cLFC9pGgVZRgz2vuQuD7i/OmSyVGP2TzCX5zRvzzHpYIf/MV3GX3/j5CpJX3ykL5bUIkKe95h8JiR5OGzVzRfPOedt65x+zsfsLl4SXA1KRi++PQJ//B3n3G01/CXf/1fU+/dI7QL4vwFe3dvsNkuqO8ccfHsPkGXbNaaVN3i8OObpOYAc3Cb058sGN36K25+8D36MMeJZUY33AXr5RnN+ICyGpFSNdCpgDSEyeYH7sBkyIPB7MaXXoNn33j960HWftnaPXd3Bd7uck5SIesJTV2g6xEIgUgZyUlXlMuvmrr80/cpDSiBlPLqtcWb04nhOkhp970dUPL1COFvugSD0dngjiaSwFnPyasLnj5/QWtd1omFhFAZ5VZKIbUcjDBev4cQsvlI1uIFpM7NZyIzsrRSjJSkHvY6BU9IHuIWLQt031N4R5H8Gy6RGhH1FVIpI9mHQAwD2yQRMaF8B7P7uLMfIdUKdesO7H8HIWo8KhM6B/Tut7G8v7rMrgxjItFHXj57yY9+/GMeP3vK1vVYEkHLoSYErfJ9TLihqclF31WTXBQFAM6F/A4HE7Xo4tWAOJ8reXDsXY6yeg2SRoRKCBEpq4Jbt25w9+5tRlWJvBojfhUe/v2u3RkMea4RbWB+esEXDx/x9OUrFtsNXQz03pMI+ZxJmQ5qfR64InOcFzHSt1s26wVdt6XrOrbbLdu2pbcW5302qSMfnzT0L0LIwbzll69vRdMGQ1ZBzNQ/YokRUEowhUaNx3RCsukt/Uaz0iLrz2LChYhWiigSIZG1OLKAsswujt4RfcDFgO0sq22H0SvKwlBXFePJmFFdU5UlhTEYo7JFPpGiKIYiWOSGSUomYorYtrBpr5xfcvCvBCFxUtLJPoeBd4qiNJjCo1WfqYRKUZYFoIcaSSFShvEhm1AUSmOMwlcVTTNiNOpYLDaklKiqghD6TJFEYnSB1gbpHd6GIWB8RRIZmi+qkrIeMUriSkfXtj2ttVlDF+NAb8xUuXy6vL548+F5Y+IiMqU5BJBD8wI7zUa++SktB3qkHFLhAQFdu8WlhNaGuiy5dnxMd3HB2jqQkt46Usi25+vNmtIYnHMkqfAJum2L9Z5AygYo3lPXDZtNe0X9jDFb6e+0d2Kgbm7bjjb2qErR1DW6KJBoHj16QrvY8GebDR/98ONsw+4cUkpMUeTzcog8UEpgtCKGgRYqJWLgK1e6GgJzA33X0/c9IhhaF/DR5xgJ8jRPktHdHXVRG3OlXwvR0zQ1kojrJft7U0qjWK9WLC4tB3tTVKHRdUkKkeBC1mz2PboyOB8GV8pEivkhFIeJZCRP/EOISGJGBE0JNiK1wvmA1JrReMySjBKPxyO2y3ytPX76nOXFlhfnI/7oow+5dXTMqDCcnZ6x3lguzs+4O76DlgnXtRRK0RQGWYSrSIO+h0dPn3Be17x7+y6T/WNOTk6Y2TlJl6SUbYbjMJAYaY0rNWUKTEvNvR98ly+en/Dg5IRl13LnxnViTJyfX1DuHVCnSIo+h9UPaGdtFAdNRVFMsPUIa2eUQjEuKhqpuHlwwLGC5+tzzN4B9XhK7HuUNGhT0naWldzQVRNa6zmdzRjvT1jN5rw8O+Gd/bdRRYGoEkKVbEPHCs9l11JOpqizOe/ePuLG0Zgff37O2fkLSrkP0eKjuhLrRz+g2klQNyMQkr7vhil2wjqLbTs8mqRr3n/3PY6ON6ys5Of3HxETPHpxyt0bloPK0/crxqMRo9GY7338PtvVJWcvnzI5voUpa64Ykm+uf7117u987UrV3SB8t3VC5HtGRjHzd6qqBBG4tldxtLfb+0QyEmEUqa65+4Mfclgk8Ava56ek0YTy+ANmc48Pkns//IijvR9x+uIcc+8aepJYP3zM88/u8zDMuT0RpBUcHu1z9G+O+E//8An26QWlFHTW82+/+6cUZUVYrbFdx8H+PT7+7p8wuvZ9MAVBKaSR9OctLC85e/aQTiTq5piXp2tuvf19XHETawWwR1EecXDnDqq8S2OOiOkEpGe72qKLEbpQJOFIKIQw2bVF5UHaVfbosG/ZVS0Nmrjf5qjs/nEkpXyCv4nmCaF+P2yrf6b1JRORNwpxhkw3VTUc3rxLWQvqybXh2bVrpfil1/cvQznfdC3cMWnMEE/ydT+df+613G739697/792vfGWvtT47e5bKVPzurbn/v0HzDdrogA30P1DzKEHu0YzNx6DZlIqIGTNr5C4EKhMOcxPDTEKopBsg8SSmUUIgU+CIEtikqyTYpUUU1FgMBgSKQq8yBqt6LOcM/dexdBUR5Jf4R/8jO7z/5u4+BkpzdF376LfX2HeHiOEJop60Lf9Zg31l/Y5vT4Guz/EmLg4O+ezzz/n+atXtNYitUaQSCG7VscYQOTaTYrsIH0VPyAFKeaIrVxLBLTIrKsUsyFT33cZVBG7bLvXetNdXZhrSYcU8NbdO3z0wYcc7R+hRTEco9d14+9r5SSxhMiPV2KKw8ANkkucPH3O559+xoOHX3A+v2Tb9/QEkswNf/AWEWKWawWLDR4fJc4l2u2a9WrBdrXMcV/eZb8BIUFqVFFjRFZX7urrXI+nX3gkf3V9S5o2QVlXGKGQbkgJH8wdvPO4bcLrAp1AF5JCjkjjET4mOuewLhJSttbse4uLniSy4UNp1KDnCoNmLHfI623LYrVGXlxitKQqDKOmYTIZ09Q5r8xoDSldbbZUBYmQxb5ljZIKUiSFSAxpMPtwtF2fIeNtRvdyM2goyhKtFKawlFtLWZYURUGhNYpIigEVRb5Y1C4GQGQTkggkweH+PhGHtz3OWazt2TqXebUpknPVc8HeR8ey67B9RpeKoszh4UVJHeIVVKulpChLRqMxZVGy7S2kjAiqqMnE1EEYrjRKgA9+OHTiKnwS8sSlKAqM0Wij0MPnqOsGGS3tesPq5XNM0yDKiqYq2fQ9fdvmibXQFDJP0TZdT991+WKS2fijt5YQE3fv3MkRD8BoPMb7kJGkFNkP+5ydnWPKAh9c5hpDRliGbDVpEgrNZtnyxZOnFFWFHpUc3D7GFCVaDlMlkWfo+WYu0EoSZc7sCyEQB6dHKSRGa5q6pqpKOqVzMHdwuDjQLFNAiJwHGBkatZSHDfnz5f+KKiNyIgVGTc3+ZIwb1azmc7x1rNcrVFlhvSAEie093idESIMWb5jyDQ9I5zOfXiaZEaO2I8owOHy57AolFEIruq7Dth09keAdBoMViclowqiZMNtsWd6/YLnuef/tO7x7+5Bbd99mdrGknc1z1lyZbfONkhgFsszo49pBLAqmB/uslyt++snnvHvvbe6+9xGbruXsYkkMsPGOXuSJ26iqSQjevn0LoWv29sbUpcTJwKePX9AuV9x8+5CD/SnnmzVqvcwPoMEtTmtFpRSlkrQCaqFplMEohUmJsTLslyXXRwUiwXnbEbXn6OCQ/bnHL7Y5CsAU6KqimijSZs0nj77gnekeG2dZtS1paKp8kogUkcGyPGvZ9oLp8SF3bh1CjNj+FswWvJyfUdhIH0QO4JaSkDJTQClN0zQ4HxExURQ5XkFrTVlVyKTY9J7j42P+5m/+hh99cp/lasF6fsF/+vu/ZzncbiyuAAAgAElEQVS6xZ99cA1CS7t21GXD49Nn/O3/9gXVwR3+2//uf+St9z4cwlL/9aMSv8/1Wl/zGrHMszgxABriatqdAigUSkr6bY8gUGoB0xqqBs5PiXvgOkf91jU4foem+mPi52c8fXjOfhJsmgJ57wB/3ICuqUbX2VLy9Mxz83jKjXHD5eKMte+ZjEqWJ+f46QFnm0ccUqP2DrDznsl4zNt/9VdM998hpirH6ShPDFu28wvExQmfPv5/eecv/i1aBt69exs5avBCU5V1jqZ0iZTpESAUUjUsL14RQsXB8S2iEAQsUvRAAlMhkv5yUTI0Ant7e6hxHpz+5mfil5GeqxZFhCuKf36p34Ohwbdk7aI7ymZM0dxF6ACifq34EzsD+fzTX0Ujd26MufDf0Q5/sUH86p+/7p3kX/h1zdpv1o1/6VW+BIFEdvacMUZOXl7w4tUJUUQCERfs1f1/Z5IWd41cSPjgshRD53ifNDBxXEjgI8oolDGkGOi1olcCyIYmslRo0+DKhKsLFinQBEujFUUcinABCY/Sg4dk0oiQP38Ia8LZz3H/338kPf2UOizwaYNtLarZQ966Q1kmYroG1OxGGr/1ShBDYD1f8PDBAz5/8JCzy0s6a5EqZ9EG74hkYzQXPDIGdFbcEa+a30QK4qrB2ElkSIPzIfHqWOcmOQ2Nvh7gpKzjFRK0EUymI+7dvcutG3eozQghCgTZkOjK1Ph3swO/uNSOKZyt+zMhMzdR68Wchw8e8PCLh7w8ecXKb3HR03tL53ui9+iU8LZns1mx3KxYdy1t77F9wPue6LKeN6VEkhKlC1RZIUyV/ReUQpNN75zz2c3eh6v71S9b34qmTUhBPWpQSELXoqqCZn/MXiVJ0bPcbtluN1l3NsC2+UMLjNSkQiBNQYiJtuuxvcWTi2Hv/SAkTaA0PkoShhBLnM8NnPeOtu9ou47ZYpGLo0IzakZUVZXpiCZnLAhpUEikgFJrlGQQGOb8vDolQkyEENm0W5z3LFY7B0GJMSVaZW1SXdeMRmOmo5qi0EgS2sh8gInIlEMQ464gEAJtJEbX0JT5fXct2+2Wy9kMaz0xgFIaoTQonWcWmgFt6gEGa9t0pX1SUg4Ng76i6iEFVwLxK5Nwrm7GQryelF59Y9C9qUHbBTmEOggQSlE1I1wM1GXBumuZzS9YbXt8b4ke0JouZOfEuqpyWKMpkVKw3a5xzhFSxCjD+dkF165d4/9n7z1/ZMvOc7/fSjtV6nhymMQZcpjDpaQrX1/ZvjDs+8npDzBgG/7fDAM2rmFDtiELCrySSJHDNHlOjp2ruqp2Wskf1q4+Z4aURIqSORK8gIPu011VXbX2Xmu97/s87/PMFwvOV0tu3b5NBI5PjpOcv9H0bY/MDZCaiZWRzCYTxqOKs5NT6tUapEBmhlXbIhFsz7ZQleG8XlKNRkiliD7N/4ZuSUifSxlDWeRUZYVYp6TNmCQCoaREZBITQYj2gv4jho5xJeXQ/wbWJR55Ug4WCCWQOik1JSplZGs6pdSavmmp25zDswWLtaXDYKomKTy5gPPpcNI6w+iIFArr2uSFODT/xwE5tAOFWAzbwEa611lLPxiN2qalDo7ClFy6dIWjeU09f8b9R4ecni44eLrFW2+8RpGVaJ0TQkRnOSMtWPTDPSIhL3NKX9KEAB5ylROj48njx0ip2N7fxYVA2zqW3nPadFyWiVI6q0aMJlvEPglx6CJnffMybWMJvUW5HmzL0dNHPDx4RuscW2WVqn1aUuUZWsqEngOlMSiVyrVFiDD0YE4yjaXkbLGgkZFMajKl8d4h9YjeBTAZb3/jGyzfcUmNVhvyLCeXLaHtiJkg5orWCZQqmAefkt/SsJ2V1Pt79NJwcH9B01jO1o55s00XFNZq2tqSZREtDVEFTKHIswKTGbomCfEIo9nd30NLwZUbN4jG8ODJQ9758QnvfvAu5e23+e7rM0ZGUxDYnezw+OEdnj68R7Z1jW/87u9x67XX01q/iKUEcWiA+aeMRPx2RhxO/wtSD0PrxoXAT/DgnWS1rLHWsbs3I/YNJ08ekzFnXHXUBx+yPP2I0aUKrpZQRJBLZm9UlLtvourHZK/eRm0pzOwSUVzHnvUokbEt9zBoLn/pNdqfr1kuGiIdW5f3uDdfs/vlXabTMbos0VsFV3evks+uEBuSN1AUiCwnFort6/s8eu+Mh++/T7FbMHItxfVXyLIZ3gVMcZl43BN6l4paogNqRGgYj6YQ94Bx8uyUgijWRLfEyCmw9QuzZ4ziq1//MoJANKnvVv5asermgS8TnOLFt2EoZCkl/1ne2xsUKkCylFAVkW5AaTZ0yA1tcZB7/wz1TIhNsraZQz8891c14xZ/639f/PA3vABxUHcO6fuu7Xn86Cnn50tscMhMkuuM3kZc16deoxAhDmJwQiRGjoggQWYZi8WSo6PjC9G23b0drly5zGgyISgLKqaiqjIIEZHKIVXg9OgBf/XvF8nXjYLLl77Am6+9xqtXZhgWSNkThSGKbaLMEThi85zVvR8g77zDyU/ew56vyCclW1+/Sbd3j+K191D7Kc6EDDC/+ty8dHNfoG4Deu1D5OzwiE8++YR3332Xg6MT6tYSnU9xQwwI/IXSrQsBEQUeSRjaKuKQRUlS7KL1oIQZIs5avHfEmMzJ44CCSqnQWtH3Lda2pEKqRupAlmfsX9rm1u2b7O9dRquKlJJ85p77R1izcaDZyqGnW4kBlrQO1zkOnz/h7v07PDs6YNGuqG3LeVez6tbUTZNERFYrbN/RtjVN39A6R9iIpwSPwA9sLEEQCp2X6NEEacoLCmUczl4hHVJalPN/Z1L2uUjajDFs72zTdh0r12B1xKmIGeVMRzOm/Yx1mxr1ouvp246u73E++YiFGNERTJ4zKypCKBM1RQnatmOxXHG+XNG5QMKNUmKUZanyYk2OH/qyvHe0fUfdNJzNzxOVr6gYj0cURZlQkAiIQG978IkSp5RO4iLakMnkP5FVhq5tqZsWO3i1OddjbUddw/n5kixbUJYFozKnKDKqqiQvMqSMA51SXyg5ChGHrzIZPSqotEQXORjFyemCxWKJ6y3BJ9qEupDkj0PSaGjbFmvdBS99k9iuuxYdQzIlFilZlIOCYCQQQ0yweEhqQkomXxNrLS/2iPiixy0mcQnvA523VPkEWSXfuCqbIIoSadb0R6fUrsfHVMEhpsQzxEQVHY9GNHXDcrVMQXupWJ8vOTk6xsdAlIInT58ynU0TxVNE8lFOHzy2ExR5Rmo0hkwZCp0zKUYIFxG5xFnP2fKcu/fucfXV65SjkmW7RmqZaA69f4likfxXBIkeEJxPdJ8YWS6XqaA8iLRYHwaj82yYa0goaKIAaWUSiqIkLniEVgiVJHTF4C4rpUjmi8TUc2gyirzEZBPE0QJRTpjtXiZJvKdNUg3qncF7bN/jXZdkhKXAeY/RBpNF2vOGICRZluPjgrbvqUYjZlszuqMR0ku66BnrMdrUzLa28XqECC2rkyVH8xbbPiPPKm5cu8FY50Qh0ZkBJNF1NNazWDdIbXDIlFAODcZ5VqJUJBcOTYfv1ixXnqdnZyznLWFSIMuMrFCUSjPOBaFrsNqyVymubk3ItWY6rVJlq6/pkLgYmXc1ExmoS0XXJcPUJibHG20UXkHUYGSqitoo0dGzVVWEHk6Pz+nWycusbTta3WKnGVYoFusaWVVkBQQZwUe6+YIihFQxC468yom64ujojFFe0gVBqTKuTXfQ5YyHZ495eHbA4dmc4nhKbQ25Kok+w3Z9ukdcMki31uJd6hWdLxY41tye7TIaT1men3Pz1jW+/a23Wa6P+YtPHmJ++kP+7bdvMd3ZZm88ZVRt8T/+9/8t9++9z//2R3/J2elzeleTm2qDn///4zceLxKGhGkMHDHA2khdd5wu1hR5wWhcpsBGSTIpWD59yqp9DPUTRiNJdWVGeH6fzj5F3+oQozfJ+x3q7gwxtZiZQApPWD7k0fv/HqLl9WqL5fMDlje32Ls2oxMrjLrMq29+kzCa0oYj3vvwY8bTy7x26etku68gxAiqCsVgKyN26LsDRqOMS194kzeO3+LKbMpItmRFw5Of/TH4S1z/2n+E9YLXv/F1xNYYlCeKFnyN0jkIRfSAVAiRI3EEkeiKvyxpilGQjyqIgS4OqMxvJFqRmkRCTMVTKdI5+pv2U31uR4SEcABDgEjcGGl/GlfbPPwltuFFwSGSeq0vqg4Xkuufs/GCAcv52ZqjwyO6vqO1a3xIiUOIPT5YetejVSr8hpS5JzG6IeEIDlTTUVRjxlON0Yad3V1GkwkxWES/xDZnrFtH8IlN0XXnON8gcRhjiFGwaoHxLf7Tf/l77P7LLzLlgGJXwtZlKF4HtqGZE+//lO7Hf45//yPmHz9nNa+Z7G8zve0QZ2cwf4rY3kdml0Bsk0L0X/O+vQjGhi8hcHZ0zLs//zkfffQRZ/MFrbPY6IneIVzykCuMwhJZ9226l7TCh4B3MbXxbOy4pbxgEcQYk9KiUEgZ6PqaEBIYsikUhOH+VEqilCTGgJCOyfaEazcvc/XqNapqhsBAfEniX2x20n8gtPGzQ25QZo8IHoIn9D1nzw64+/GHfHz3Y54fnHB0dsqyXXI8P2XV1nRdS1e36BARPiGHXnBhDyUQg8H68M6FTAlbXiFNRTQFQSiSSGfiBUXvQfag3T+NnjalFJPpFNW11M2Kum85qwWZ6iFUCbkQqResGBW0WUPbtYneo1Pi0HUW5zuEVBiZzB3zTLFVVmyViuU4T0aKiCRQ0lus9bQWughWyJSU+EAcaGbeJa+vZr5gvlihtcZkEq0kVZUzHZUUJmGsMfokWOHt4JkVkVqS5QqTjRAIvE+cX+cCfW9p25amWdM0a861pshzylFBWWZICXmRU2QFmcmw3qa+pMEGIcah9V0IdGYwZY4pcnRvU+XEeoIN9NYRgr1ILCElgEm4I1FVnA8EUvLbD4aUIQwJYmRQipKJsjp4wsWhafwFVzzxl9N7G54bQJqcSKTre5btmrPFnJP5KVJq9ODVoRK2TkRCjGlu6iZVZITCO49RhqooMFqxNdu+OIBtCLR9y3x+Rtu1mNzQ9h0+eJTRdK3DRcikRCuBbTp6qSjzgr7rOD1d0FqL9PDxx3e4+cYrbF/dpWs7/JBc+ZC4296l/rGoEn3TDcqQ1llk1BdyrdZatNKUWyXzvoPV6oITnuZt03SfklOtDb11SRBHD6pcMtGGvHOsliuC0VQmQwuBNhk2ZMxmkks3X2N66TIu09R2PVyDlDRbm9SsgnfYoUcvkgzSrfMIpciznHPnh8ZrTZQSWSSrgSYG+hgoUUStyErDyCrq0Ri1SihoNZIU5RSpMkKwOB+p2w6Hp26apLYkNN5GIsk30aiMqlREavZ2ply7tk9tawgdnYNyuoWtT/j4wWPcbML+3gwzGRFCwISeSgVyGSkljIuMWVVwsl7j2hovFKOtGa7tOD45Yi+X9Dsa7QU9Gi1SLNPh6IMlWIcLHotFOYtzSTjHCHBdS+8a2rah1oY+jJB5yePnzzivGwpTojJDleWUUlNEECHigme9Dkz2dulZ06yWfHzvIeXuJV7d2WOmMz46jNhacPj8Dg+eHbNU21ypphhVggjD5p8oytZa2q7DBU9eFHRNj4+RPK/wbYdv19y6dY0vf/Ut3jte88lH7/Hw3te5svddKm1Yndfcvn0da8/47ne/we7ujJPjI65cujEg4p8+EOPLP/kMezJ+5oEXj/uHPFP/rnj9cxR7/+JbHWhEMfVPRQRSQznJ2a1Mor1L8L1Hysh4e5siXOPOX/0E2S/Yf20X+fyc9cEh69mY9fLPuPUljV9mZJlB9h65XhLVGbGW9G7B5PYOO+2I95495e7RY772ldtUp4HCzigne+gv3GC63qU5XOCKlmCg72ryfEWkRaopkSopLpcC5JI4dbzxrbfZvrGPaGtEtsWOyejWOTQBJmOcMqhCEkXA9w3SL4l2jhlVIA1SlalASoVWJSIYftnFkxJw8YJiKkiBppC/Lp0xfuprjBtVO/HPEmHbjMiGMhg3vs2psBs3BZmXEchf+gJAwlBCdDjXDYrI+d/r3fzyv/X3DLwjvNzWtfk2EV4iz549Z3G+JBAxuQGfwnwVc7RWTM0MKVMR0fuI9xBlwOFpW8tqVVN3HVlVsVyssbbm6cEJIaYzEZl6kLQ05FlJUWxRFFcYjQ1FnlodtMqY9p4Hj+7z/N2fceCfgH9AvCwpv/ZtxO0RGEM8e4y++zN2Dp9xtpSM1AxvJBKV2mK6AOct1DXC1CDOUwFEmOGzi18yteKl+fnML4c4bH56ynvvvscH73/A6ekJnXV0IdL7gPQeE0Uywo7Jl01LdaEYmVpDkrCZgAsrpKRAndanjy/ajmBQyh3ots71eA9aJQGwGBOCmxWG2WzE/v4Oo9GIZE4uB2G0TdHGD0lbspIYtDQ/XXB4+XN/ivj1glcZX3rcy89N5tkBOQANoe84e3bAO3/9Q37wox/z7vsfcHAy5+x8Tt2uqLsGP9A98akFSYQX8j7RR4RMaLUQJD9AJRFKJ19nbYjSEIUmCJ3ouoO4nScSpEFIddHW8jeNz0XS5qzl+OwYtAIpyYVhrEpMVImSlEBaiJJzH0AYskozynNGo4Iqz1ks5pycnSYanJYEEelDT+xahI9MjUTnqS9l1fYsmpbaO1rv6TpL3/bpRpUSqwRtEDQqonWO8RFlI7jEV22JrJuGxfmS3BjyPPWrZXmB0VmCjEVAepfMwZVBycEzRaeb3NqerlBYZ3EuYG2CmM8XPefnKWDP85yiLCmKMqGJWpHN54xLQ5ZnqYdLpI3FDxQ+Y9QFShNI1BfvUzLWr2vOgSzLqarR0PeX4X0kSk1UOvmwOYd1CUETCDKdqGQ+JgUgKVN1IoQU+EeZVCUTmpSSDgHJbN6DzjKE1AhhWDWW1drhbEdRRCbjcfIkGQRIUnHQ411HriPSC5r5EuE8mZAE59Eq+Z9555DOkpkRPka6uqVt6iRrLyXBhZQs9z1OCYwwKAKr8znj8ZjZ1oT5akm9WuOt5+NHHdlffJ+rt6+R5Xny3bIg0+qGAf0gpASuzwxCClRIvWV17+m9AwEuCJxr6Z3HuojWIm3AkYvN0DtHHA5bYkQj0UOlKSCSF8igWiViZFRU5HlG11tcDMjoKPKkUFr7jq5d49o2WUcwyNCiicbglSDq1I+37i3WR5TOcAjWXQN4SqnQ0rCWilaBdTXKB8Y6pxQavVEKlRmyNGgvKPIiUYaVQimHHuioEomKPYWWLGJMPXdSUJQlWaZw7ZpMenYmBXvjkvmypiAghKbuPGM9wpoJP356xHS55O1rV7iaKXZGgonJuDZS2CsKqTImsynVZMKT41OWh0c8e/iIne1tfIx8eP8BMnO89ebrqCApRIPR4KUhkhQ1rY4spcPQ4YSliZqQCfLSsFpqvIsIFVn4HtusEBF01KmRWAzKd0aiq4L5qmE6HdN1NfV6RQyWo6NTfnj2lNXOGfo7X2P/yha3t0dU2XXe86ccLltiFvDCIJVJPHcF3kRUodFm8MAxkmxasjWdMp5NmW5tUY23CD6yd+MGX1GR+6dL/vgv3uWP/u+/5I0bX2RrL0dlAWsk+aTg6b07/ORH97jyypf5L/+b/5rXXr0NMQnA+EFh2cBLJ90mAH65wL2xiQWRrP8GhbKBHvjLDk/4O2K2F9XhDbXmQnnu5Sxy+P+v/LL/2ONinoY39lJUGQZieYRESc40PeBjZKI966Mjnrzzp1zfMdwYT2ndGiY7hKNzwumI3ctTZqZBL+8RxT6YAlYt7uiA46cPcHGH1bylj2uuTBX18oiDx2tGOzNktc3NN79N/qXfhUIQ5HOsFDz68V9yaVxhbu6SvXGdtu8olYBoacICk3nC+XM6f04cFbhekF15HepICBm1DYw6j6wKyAtcDChRkOWX6c5b1os5Y9uTzQRCMSSwAiWyQd04zdULMG3wzTIqeQ0yEHXlr35VNwImgwHDC+q/zCCmvl0fLHmWevf+qY9fTIkkyJQQy5d+9mlzcv2p335WJGQjN/78+RGnpydcvnKZvd3iYk3/auMfhccGIiWig2fxYGkWqJueD+/cYbmqUcqglCMGydo6Vl1HW3uWqznrvqNtW2g9rbV4kXqvpFLkWYZWhqoYsbW9TZGXjMsRWpvky5ZVyGJG0GVKcoJH+J6mrbFRsGzXzIxmd7/k/sMTVuvI/NkZl7tD7GJJvpWhtgvYO4PDnxPu/hi9nLO7P2M2LlktBE04p28eQH0FNz9DL84R5RmYjKj6wXfOodhCMhs2P8/GwywCXkREDKkATkLConWcHR7zox/+kA8/+oiz5QphNH2IEC1apj1JxYT2WO/wImK0wUePD0OCpcxgaZBQcB9TP38MEaVV8uIVQ5wUkzaDSPxVnCft5TIhbJ3rySvDdFpyaXebq5cuMS7LJOakNiWH4V6KiYq5uQ3ipqATX2ajD9/IMKyLpCh68avhrIqCxDaK/sXpFZMP38nJGacHz7jz4bv8+Ec/4p13fsyjp084O1/TWU/vBhr4kIjKSNqfBpbXUGPHSHWBCsqBaiylIA4tSEImqummj3eTFguVAAWEIoY4WHH9zeNzkbRJKRE60noLAYyXjELONC8ZbVdEBbbuaFrH2oKzHdJbfOkwSqBiQGtBWWWYYMiqEicFy7rGtx25EZiQnIy8t2QyMCo1uSogz1BdR3++xPee3gnO2o5z74lG0fQWRSCPoNE0SuOkwAeP8wmxOG88QvQoVWNMRllUFLlmVAhQEnyC5rUSCa0zhiLXFEHSuzrBz94Mvmqe3nqsj8wXDWrdI/UKpTVZrnF9zfa4pBpVaGOQJvWieRuJPlHvBOlv2aFSH0L6GmNP7zxCZXTW07qWGCJd19O7ZA6Y50WSyFUa5yMxeKRK7ag2OqKQhJCMtJNSJAQUniQ+4YemSiNTsnjanJMVOlE5pxOUKgl+RQgKQYb3qTITXUBEiROOalZw7couoyKjWy7xUVJ7x7yu6UMkHhwwKgus82hjGI3HKKXwEdZNmyTjY4ToEHIw0CQmVUGjkbnGeovRGaNJybrNqZuO2vb86Gc/Z3z3PmVRUhiDDpFSpYQyZcECIyVeQUuSU469w/scKxWr1RIRIqBBOKQyWAd5lGRZSYzgYgeRpFylgBDSJudABkEKXRReCOwgIONCYL5e451HGUVUFiF7JBYve2zoEQRE8PS2ww3CNjFGnBQoKQgyYmOg86nXBBepXYMNAYRH2B4tDLXOaAuJoGckJHVnMS6QSYHQJKRRObRKSZvvHdZ2GG0RWiKVQfiAoWGsFX42Y905FqsFIVh8gBBdogBLEH2H7HtGJqMqMrqm5fj5U2RbEwrDs7MVy+UDvnr9CmGyg+khl5L97RIhczrXUZiKSV4w0opWKhanc8ZVhRKC7398nzpobk2usX25wbsOqUYoXSBygzeSVgds9EjtiVmGa5OnVJQZMUROTg/Z2TbkuST2jiJIpPWIQtFHz7xvuX/4nHJ7l9p3FEZR5JJCQ5kX+CB4cLriTz+8w3dzzSt7E5Rcwa0rTI9bHrscLwLOWXITCCrSEsiEJ8RUqLDB8+T4kO29a/TeYfKSotxC5ZK1cuxevc6XXrvO2fMj7tw946N7x3z36mVGZY4KY6azCVuTnE/eu8vzJwu+842v8eqNawhZJA+tLBIEL+S6gU2IGIJHSkVi6kfOztccPTvhlWuXGVWpIp88uAaa1kVl/VcN5DaB93CYDfFlOqDji+jy4uT+fGRtm8pt/Mz7sD4VtFJ/lqDrPU6lZNxET1ituPPO9/iz//1/4uu3thmHnrk/5fL8MuNYkcUSc3xC9JLOPeD49Cn7t74AUfLsp4/xTw/46fMnLITmy69O6NSam1s7/OXDU77vD/niV19nf2ubLBuBGMEsUmxt8/r+FaaiQmZbeMaIPBmzExf03QmZdBAF0/192pMT9GwLRA6jKcU1RXFjihpPEFmqGCuhh/p1RV7dIs9fwS8F2ByhQAqb6D5SDw7Q6VoK4nBJJWow80UJNBu07de4qC9Nft8mpkM1GpPUBQPRD3/2H+SKf17HJpRLNPrNp00FFQkxe7GeX1o7Gzl4geT8fM2Tx0fkec6o2kqKn38XSncxXk4Q/wFnOrHvcAJUBB0BHwkucr5cM1/XRCF5+uyIuwf3EFaxCJJe5ZiokSZHVmPKacYWBfloQj4pMJlCKwkxWSkZqZNHatMTek/dWtp1TdM+Z2k9515R957Q1NB3OC9xMsO5hi++MmP2xdt0vebEan764Dnrw6d85e0po9M17umHyDjHPf4JsT3GzHLUzGBspOinxCBhWtCbQH18SrE3J9vpiGJJbM5pQ0PQPVXxNsRZymtUEtlJ6yYOIPVwrYIndo6jx8/4+U9/zt1P7jJfLGmCI0qB14rQ9ymxkAI3GETbmNTDRXgh8S+FREVBcMltWg3oWiAklGjwXoukXkmtNGFom2For0l6JCm5k1qjMslobLiyP2F3uyIJjMcL7YSEXcnB7yztGRu64YYsGaN44VwhE4YWYiQ4i5aajfpkJNI6h9YCKRwidETf0ywafv7xAe998DHv/OgH3L//MQ8f3qGpVzib1NljTIbgSkmcTzeiFAKp5KAQLC/+hhApLhRCQZSDwFfadIJI1geKTc+oH86MAaUTL/4FwqeE/X7Z+FwkbUl1r6KvG5RSuK6nWTc0BWSTnLIsyaVByw7Rt5STCblWBNuzWi5paonQirqPdN5jZKDMc6aqQk5GGJ0qcD541usaaSVZ71AhQtNTxIgsSvxIY5Vh7AJN71k3HcEH2qajbRqsD3gPhIgk9ZskxcKkTmm7jr5padY1SkmyTFBkGZnKyDPDZDTGYFBSIFQkKEmWjTFSEbtEaUxmhW1p3fEAACAASURBVILeetquo7M9TdtSNx3NStDONcs8S9TJsmI0HlFUVULvukhwSa7fuYBzMYlckCh1SnvavqGznkCfbvJBwfB8uaRpW0ZVSVEUSThFSFz0A+IWktohYUCIhv4zO1QFLih5EUXqz5Ik4227bhHaY6o8yeu6QPDQdY4QPH1vUQLyTBOUIssUMXpc35MZw2x7j+WqpXEHKRkwhqbrqet6ENQIjCdjqtEozVdjybIMmRna3mO0piwyvE82B1obmrrGeo+3PilbimQ4DoK+62nrFhECO9MJusjwUQ1nUdootZIoown0EFPDafCepqkxSpFnCV0NLlVour5PG6IU4AXaaKKIqLa/oJxaZ/FeDz1vaSOoxmNyraDvk1F18AgknU2fBYZ4VsSUkAqB8+4lw0uI3tL1HuMzXJFhxmlNuODJdAbKo6VOCGLsaUNPY3tEcMSoaaLjxvVr3Ni+yvuPnnL6k1MEDiGSQqhS8qVeAo9ziSqgZRJn8W1AKUOeV3SdQ/qAr1vyTKNkUvDy1mO0IlOSUimsUTx+8AxfKqbbW5y0gb98/x6P12u+fesmOyMJJgAOPRi3Vhp2xyPGkxnzVUO9brHOU05yjp6fouuMy295MqkRUdCLQO/d0LcJ3oGIkjwvCKHDuSR6k2UGJRxtnSq6RE9wjqgNKIUnMppNseKA0Pfs7V9D2pZ13TCaTMjrmu2ioj0+5qP7d8E1fP3GdY7nx4xHY25X+5w8nXPe9zjfE1RItIqYgv6uT9Rh6wNPnx0S9ZgbVw0mT2pbbdexjo4yN3znW99hNXecnZ7y4O5D/sXvf41MaXAV+WibP/jP/0O+8/vf5f/5w++zW0hwmmgSvKaER0RLUi3bjIQgXcim+wBKsljM+eFf/5Dp7/8uZXH1gtHy91GJe3lEAjauEDId4EpI4uBTlARzNo3qn6exCVKTwW5EoGXOqkmmsqtlzSf3nnDrzWu8slshDw54/3/9P/jen//PHPhzPrg25smT95l5w7dXOVbNufEffwOqCXfe/YCbb71NXu2A22N5dsAnd54z8i1Hy5Js5yrBwtGzU3a3bzGWgo8+OmB3OuWVayvczhIzG0EUzPa22LryB+idrxMv72MFyQg49DQH93j/B3/ITPdcm2SUuuXs6DFudZnp/huIaoaubkA+xroeZdpUGUYihSWGNQIPFKjJKAWQokGIOUl/bpZ66IT8hZBevDyFvzCnv8rsDzYBInlUGjMY1PrkPZkVxQs4+GXU9p/LeFnB9G8YG2GK1NosPvO7tG7LsuStt96iLAuMUbxcK/ltj0+9jZjyloPDA9q25vTklDt372H2x9y68ipiuofPp8heYJ2jDhbvPHrpOF93PDltaFxD16X2lK5t6NoW71KCH10S8AreU8Q1Hk8jStAZ2luktTgnCNkYoQI+KqJQ2OgRIudoFTl92pDvXePhX7fMv/cTblY511izPVX4LU+vA7FXlOIy0l8l+Ei0ioMPTlne+yvYe0aTTQhe09UrXv/mF3jlm18mGkHQCbkSMaHUIkSMSMhWmp/IanHOnbt3ufPgPmfrJX0MySqp6/Ax4voeQbygOL48yWHQFZAiKVCH4C5MsVN8GtE6URlD8Kk4vLFTGBCl6AcRuqGnNMQ4MKkERZGxvZNz5VrBbGaSBUDcqEby0vGxuerx4l8caJJSxovHeAFu2GVyLRB+I6IjgEAMHefLJQcHD3h87wMOH97jkw8f8v2fPOfew6e07YquXycULk0oMQrCsH9IJQY6qBgQ/GTYnhKtF29YiCRCEuOL973RbA2QBBSH14gxlbouRP2Gp1yocf4t43ORtImNJ9ayxseI1AaTZVjrePb8OeJEUChDrjRSRYT0ZNkIXeY0xOR9oA1GZaxXa9arjva8pYwelWlslZFVBRhFcD2jqmJXGrCB0PTQdUQTqCV4BLnUlNJwtZyQ65xV1/L8/Ix5X5N3nqZpaZo+bYBSJ/RJapwZ6IU+Yp1l3UDTWARLitxgQ09Z5GTBIL1M9EKZAlUDKK0JPl3istBkRtA7lSTOrcJ5j7eBtu5p6h4h12T5gqwoyIo83dYx4oMfetMSlUkoiTEZeRB0PtJ3ls56lB4WjE8c5a7v8T7QdpZUS0m0niQ0kirhgRc7ufeeiEv0yuBTEigkXiq8EmnxigRdu+A5Xy6om35IlmIysowQfGRS5qlSoQU2OI4PD6myDCMkRudJqXE0IvY2IYFSUY0kXdexXC1p+45R26Y5jSFB/yJVZkLwtG2N9z14i/clmckYjSd4v6Bet4AgzzP6zoIYTDYH/vaG+5z8DwUxGXwM8v8CKVKwqZSgKjOWp5ZmvUIrgZfFC+l9m7zkokgsaCHlRRAjABccNqbEOBLT5uo9Xgj0UImxzhGduLCAEMjBTFsRhBs2kmTQaF1SgnSupmtqnBJMtSBsTcnLEZk0uIHLrQc0FeGxwmF9QCrDOkrWQZAryfXrl8l2Z+SV4vs/+SnzozOkEphMIxUEPG3b0CqJ0QatJdb1PHx0QB81JjdUo5LKZAihaNqedd2wNU7SxsF7YhBUmWZ2ZR/qBfePnjE/XSCrLbJqjx/fP+XoxPE7b9/k6m5GoVKSoPHsjnJO5oFMSab7l/DbisODY4Jt2Z/ts+osf/Jnf05+aZ8YPG2wNCHV9jIUvVTYzlIVOUoGgl8DgnE1Ym9ri9nE0PWOvqvpraNR0DkNxiBMRjGe4FXOYrWm0oK66zhvHXmRcfn6FmsTOT6ynM7nLKbbuCAoqzFKVvT2Gb2V+DAjDOFvQrUEPgqsB6EyhMw4O1thnUBlOa1zoBRlVRLsmiIr2dnZQ8kF6/M1zWJNtT/j/OSYZXNOzHP2rpW88coueaiZP3/M+Op1jMkRBFRUFwVceIF4bOgxcvAeu35pn3/x7W8ymY5TT9LwnCSUBL+u6ENEXFB9ohwTiIPRCIPVLBcVWPgcxd1DaVoMJrghmtQPTGB+suCvvv8z+j5CJmknJT/48z/hP7u2RTf/GH/0lL3pjC22+XheoO0Ktdsx29nl7O6arTcvcfO13yHffovR9DLSLRDnh+zdmPDsp4dcH1/jtI08PunIsy38vEcawWp9xqO7R3zhtRY9OWZ7e5ICEi0Ikx282EYoQyCiUy0a3a15c3dKffqY+qSBUHPnnQ8oZyd86w+u49uHdMs5cnIFr/cZZ3tIUQxUH4e1a0Rco4MEvQ0iJ4Yeb5dIHRA4wAEzfrWk+1e8wjGdUQzIgB5Mfy8yjmHfhOFxn5sb5+8/Xg5l0ze/mKz9svX3wsfrxep+QZNMVkOj0YhU6P90QPpbG58F1Af0dHF2xoPHD5MASbsmL3Kuv/4GrtXcvfuYo/Meu3J4F5m3DXioek1rAzbWQIv3fepdI8VrRudonVTIiRElJFInobBgRkShyaIjKwJ9F1n6pL7ro2Ld9tS+o1c5kxuX0N1Nnj855+kHj7h3/JxvXdnnD76yz1bm0bFHGIixolspXJ9xdrTgdH7KvcOnPGs6zvSYhRjhZElRZcwuX+KVr2dEmVBHAQMlcUP7S3MUo6CvG+7du8/dhw9Z2542BrowiGUMtDypX6zBC6RniPHSvZPinOQb9sIOwscUm4QYEXHQMEiqd2kPHxRLQ3whWOeCT2bhMmlQZHlOmc2ozCVydQnBhBD0QBlkSJwi0AMBR45DsyFASgE6OpRIpFk1zIfHQTzBh5oQDc3a8smdR/zopx/w4Sf3eHj/ESfPDlgenrJerln5BESIgQSwsUtSWiGNAukRMRWCUjK2sTgQxMHaIM1LuBDg2xSgNs50MZL89kQSl0tiJQIRBlr4heP4ht0i/s6z83ORtEEcZOrVUClTFGXFeKzoZUcXegSp6VPQ47qOtffkVUkxqghIglBsTXKK0YTTswV+tSZ6WC9rmsU8SXFLQZ4ZRAXjIic3GoJA5xkik2hncU2D7CMqRorgKU1EVzmx2GYiJsQ+UC9rlqsV1nr80NDZ9o6utynAlqBVRu8VztmUrQtBCD2rdYNfuqSoIzXG5FRFxqgwGJ0QsjB4M4XgUQLK3FDmGrcJ2L2kty6hWJ2j61bophv6ANIdLKSkj4nCCSCESl5mQoGKOOtfJDWIwUg8iY4gEsytRyNiCPR9QhylFNjgESHgvEuLRwqiD6nRVAqyTFLkCVk0WlGWeRKwmFaU4xHz+ZLTs3pIWDVaS5RRbI8qdHAs6iXr9TlSCjIxBqE5PjnDFBVRCFyAtlmjlSTPMqT0+JDQvtVqhfeWGCJtXafqRpaRmQxtNOPRDG8t9bpByZ7goV6u8b1LXnmZxNvBL09osrIgz3PUYDzdO4dDkAmRONhxU/fxhNBT5prZdMz5SYazierbxwSNbxaikCKhNSJVg5ROkrjBhaTiaROdSCpF8u+wTIqcDE20fTpggkfpjKAsSutBsTTge3vhCRhCxA4NwZOyYKQEoW8Jfcf86IgMgSlz8rIEqegmjul0gh8KCSFKDucryq0tmG0josFIxXiUc/3SDm+//SZ3P75P1liESvS5gE2Nx2ikFCiZgvyu8xyczVG5ZmdrxqQoGY9n+PMz1usmiaLIZDdhjMEIgYqR3a0xWXWT5ycLDg6POdE1k51dHp1bjr/3Dl+8OeGrX3iNy7MZwTtmVcF2rllbSyagnE0Ze4Xta0qZsfAr7n74IebZM8SsIk62cXJoqo4So3KEU4igMCpHKUNwPSJ6pBfMiimhlDx+dgxI1k1DOzV4JE3viUIjVMb5quFwOWc22yMIwaqtGU+vsj+6iSkU/emCfDSBlUVqQ1VUaAPSOUJ0BCGTGJAQRKFBGYJQCJVjijHLdc9y3aW+xkxTViP8JKOvI4cPHnC+OMFowau3b2CkwnWOd37wYz5+8B6//2++Q6Zq7tz5a/7wf/m/uHrrd/iv/rv/gS9/+22kkIiQJQGvTxXvxUC9SdXCGCNlZvjSm6+kosuAdKaCBnwq0rrY4dMQn/lJvPiJuKhIWq/pHBgVkTo9dHMwvwBK4ksv+NuKxDfBxUvvIYJ3kZOzJX/6x3/Bv/t3/ydFNWXv6mWy+++ypzp+cC755n/wFf7N9oi/+JPv4Z8e81p2g6lZ4ruM6uYr7LxxC9keMrt0Hb+3jxpvIzBs5zcwk4DMC67ufoc/fec+i2bOw+WavS1HsW/4xrWv8/Ybv0dRXWJy7RaIHufPOX/+nImcIccGKFFkqOhYHn7C0w9/zltfuoa9UrFaBB786R9RijG3bn2J58/m7Ly2QzadIaspurqEMkUSLogQvUnmwWGOd0t88xwzGeFti3MNZbYDjCEMfd2/SkL/a+QKcehhBnB98hhNfdQvcWzhIhj9Jz1empdPJW/DwthU8UMIF8E4vAjMvfes1jVFUaB0Uo9mCMh5qeIfL+bs/6sP9jcM8eKLGJKCKAX37z/k0ZPHmEyTZ0ktuQ/w9Nkh7374kPNeoDuFEpqeiBQat26JKiMvCwieKi8QWuGix2QZWVFifWoHkVozHY+ppKXrWmpZkBUlIxmJXcvh4RnNusd6l5QAlSZHkpeK7/wnX+Htf/s64Wcf8dH9J7Q/qpGlojXghUT3oE9a1qvAh/eWPDha8/TwCKkc19/Y4ws3bnIgxszVFFtNqbZHbL96G4oMK1PpQ79MW92gyEHg257H9x7w7vsf8Pj4EBsCa9fT+RQjaaXRUhJ16p8KQxK2KR3HDS1SQgwQhnYMIcTAAEo0Se8deAaqIEAk+oDzSSAmJTFcJGIbqqDWmqIsmU722JncJDdbEPWL++xiiYaLH4j4glkdRdiUMkEkEbymc5zXHavmnCd3vse9D37Ck4cHPH16yqNnJzw9nrNYdrStRXuB6D1KyCRCohRtXSNUYj+l6UyFb611SsTiZ9fRkIy9VCy5SNqEQAjNRmguinhB8Q8xQghJcRoxMP3Fp/alDfDyt43PRdIWQpJS3Zggtk3ParWkKidkZY6WGaXKmJUVxgi6ruX07Ix6sWRXJ37yarUirNdIJTFYlFEURYF0HaLXqcpISMqK65bz1pMNCm15pimzMi1ilRI56SXRxUQ96mpcdEgVmYzGjIxiNiqSlH3vEmLQtqyaltY62r7H2kj0OTIIisIwKgyZSpuOJ/F8fRT41nLedKyypCSopCYzOS4mNcsYEmR74S4fBVIJKpMjZTWYM6eAv+uT2XYYDi0G37VkZZAsE3xMryOVTqIhUpLpjGhblErJm5QSSVKYhHQjeWcTLOxTkqakH/zsDBLo2j7VwYVKFSqRkCSpZYKX5fDaSmEyg+3jxeMFASXg8t4OZZuhFoJAxPaWLM/S5x4Ol6arwXlAJ3TSGKpxxWg0YrE4Y7XqiCHNhzbJ+872HX3XMhuPyUYj2qZjtVyxXCzx1qNFqjopY7DGs16v6UJHNpmSFQXGSILvCUPHqVByUObsUcKilMToxHMWJHEVk2mUEGh0osqajDzPk4kyPimu+VTZijEZWVtrsc4SYVDOTOiTkgolBLYHY5LCY9t0+N7jQ0qmM50SxSwzgxJUWvhaScaVoRiXhL7j8s42o7zg9OgQlWVsXdonKyvKLEdJjckKlMhwVmIxrGvLej3nSpkTHIToUYMQTJ4bRJ8EaWKMKCVQSiRqjfdIpZJRfVGBTMaTB8enqAg3tmdkAUCiVIZ0PS5E+hDRQjGdjIhdjpQeuTOj0CUPHh1y/nDNpZ19VF7w/pNTThcN3/niG9y+vIeWikIlbr3tVoTeMw6KajKhl5pIoMwNuVI0bcvyfMF5LFivxrRkyCL1rdarlt5FVGYQ1iJ8JFMS4R1FMWJrMuHwbI3znqbrOZ0vWNfJPiEvSkxVcH52TO89EZ0qtdFRjEZcvX6Nsyhp+h6pM/ymR0uKZC/BoEIrht7ADQVNG4QyKJ2Tl5KsKEGCyhKaeXZyzva0JHjHz3/2Dq+9+q/48lfexIeewow5PFvyvT97h+nWiH/9r9/ia9/8KlcvwbvvLjh7/pTYvYLIKzYJ2meZZHGTuMVwIQ6yOWyESMWIGMOFjUgavzx5Ey8HHJsisdiYuMKHd5/y3ocPubQ749tfe4NplZTLNna/acp+MQ38rQwRhuryIDiCR0rB4dNT5vMVW1sTnIPvfOUbPHv2E/qjAz5cBGzheHLnIcfLFRPbMZte4tbOdW6+epNGgt05hfMFoZghxmsCAt1liOlrjKe7vLp/ifXJiL3OsLr3AbmzbF0dcfvWF+iyCbPRZYpsl+bkjJEpOT97xuLglOt7DmHWxN4PfhcrRHNKqQTrpmd0+zaz/Zzxhx9z68Yttl99g/7SbczoGmp6HWQOWhOjH5CPxBYw2ZhoV9hmwfHJU7ZEhcgsSkui2CI6A6FAZIKXyBp/y4gv7o3PTvkmamK4nzbMj6HopbX+NOfoxRN/kyv92x8XaMqQUH3qV8MKHRI2axMjxRjzqcr9RgBBfCZ33kyX98nH8/PhQx5/IWkTMVKv1jw9eM5yvUJERXQ9wfY4m+TplVSM8iz1wCkFvmdrXLK7N6JxEWEUMCMvSlSmWXcto8kUpGCxXjGazRL7S0qMW7Js1tjo2J6OuTwusasl66aDakLdLZM1kIBx1IwVPHr0Y976gmbnlXN+7+uv8eX/4qsIX1A8+gR3+DHt03PCx0vuPev46bOOZ41jennMV751i8u3tiivX+PbX/1XxBtfJuYVqgqMtgzRJHXQoeM9qROSrhdSQe9YHB5z5849Do6P6bxj2bWsXY9UEnxi7hRKo6TACQ9uENIiIuKg/LgxzRYSNWz2G4PtTa+b9364KPpCQTx6j/DyosdtQ5fUQg7sjYhQUJY508mYUVUgpAf+X+7e7Emy7L7v+5ztLrnW3tv07IMBBiAwJBjcJIqQRFmy/WA/2RF+dYT+BetP8J9g6c1vdoQjHAyFZZM0xc0gKYIkQMwMZu2Z7p7urr0q17udzQ/nZlXNYKNAKwD4RHR3ZVfmvZk37znnt3yXBkTGFZEZUhoZVeKRxYRmSmRNj4+Jc1atVhwezXj3w4f85Vvv8vHDh1w8/B6zwydJJdx6ghTUoSOoSBQeLx06S8J/QiQvycwkQblAgiemIohA9klb9OEzC1HoOYBJVf1GGTIGkjXC1d17tYnK/rmbvS5BKa+f99m5+HPSaQtRpK5QBJ3lCYbUWagtaEEUHo1mUkwpJiPGImO9XuECqSuWKdqmpltXCNvRelh4iZGKTChKmTEeJC+YEAONbWnxNK6lbhqyWjPJhgx1SW5yZJkRjCb6nLKWlDH0UDNHYSQqJqOnQWYYZJrxICMwpWo7FlXF8dmCptNY2yVDX6XZnY7JjMJ2LVXVYh1EFHWwSf3PWYgO7yuMMuR5noIgUsfC6ESejUS8CH3ALtFaYvKCLDfUTYIMhuCxNuCj7GXguWrfaq37jl7yfHPO9RXTQGjblHAJgbQpIA8bNRsBQmlitMlfTKoUqImI0QK9maA9vy35T0RaG7HBsqqqxHkTkslwlBYJm2Az0Vu0gq3JCFOapEjpA2VWcn4+Z7FY0ApBOSwppKZdV3jbpYqhUjRNQ5YV3Lo1wjnL+fk5WmsEEWfTZn50dMRgMCLPMkbDNGGVSly0+XqFyTOEkHTW4q1PiXXwSGUIAaJUJCvqNMG1lElhSfZVSgnIJMailYH+nhEi4r0lL0pan6ACgQQ500rjuy51KnsbBq0UIia7heAcXdsitcboZGchlCZUHeu6QZs8BfYxmZIrrcjKDKkV0mhChBgCeVHiYyAzid83KPex3rOazYirJV1ISXXwkpISSUYQOS5o6g6ezs7Z3jph/6U7XOo8dYJIFaRUHeKGB1wGztOERKodjMaMmsDl4hJlNE3bkZdDtsYjpEqw30DER0EbRO9LGJlORgyNIKMiV4bp8CUeP3rC6vKQpiwZjoeIGv7y3U/wAW5vjxgOh0zHYy5OL/F1i7eeu7d2aYYFh/UM4VtoBCZXrM7WnFQZR1nG1OxhhGSclwSXNo3R1hZNCNTNBZqcgTEIpeja1PHsuo7VKlBVW4AkBkHXeVzo0Cojz3LqLtl0XM5m5EEwGI25+9w9Vs9OMFsD8sGAJkZa5/ARlDap+2oMoW4QOqUqIQis9QyHY2KdoHhKC6yr8Vbw+PQp6oU7bG+PGY0MX3rjJbZ3R6zbOdIXuLzCdpY/+p1vs3xS89Vf+TJf/Op9zlf/O6H9iMOPBtx96TeQhSGG1EFKG4wk9uvMdSk+9pDgft++ytE2FcOb6d71fheAjebEdUU/EJF98Ylk5Kobdm4LyoHFiZogEkdKiI248s9Sx0TR9wcBg0ySR+zuj/jn/+y3+NJrb1CtW7a3Ruypf8DBy4Jnh+/w+7/zf6JXj5Drhicnxwx2D3HZS+zs32HvpX0u330M55+y97UJyh3j/AXhVKCHdxGj3bTmq4xXXtzhq2/ewj34FqvzC/bvvIDa2ae5PGMwnfDhdz5guH6O9fyEp5+coC/+kJfjmuLlKTIbEkOFbJfce/V54miHaLYJbkYc7xGLAWLrPsX2fShvEeUQ1EacIsnCpSTaYttzuvUZuR5w5/6vJnGbLOLtGYvTQ2z9DB93OXj+VxHSXHdwf8jX+Jli8+fIVSEk9TqlbgRIfaU7z/PvJ/L/vCdrN8ZNGNbVJdp0QUSkqqqet61/IDdGSsl4POxfJm5c53REpa47Cj9taGQKen2vRaHSXmMjJ8+OeXp8xOV6ySgrkNGB97jGo6Lk7v4BMh8ia89wkHNRXTAZDblVjKmsY7y1x+XFgnXdUAwyBC1adByfnuFi4LnnDug6iw8tykSs7yhGY6xtqOokZmEygV3VON/io8JFS5Qgmxb15CnBL5hn54wHt9l541cQ5Q5xpWndGK/uEzqL606IqzOwS+5sGb54e4JUHVpYJq++hNi+B3lJ6I6IvkGGmkwkdWpJl+R+Ra+3GiL1uuZ7777PBw8+Zr6uaPC03hNl39VRguAjTVMjTNb7gaUYJPpeqyAKlOyTEZHgfEFGnE/8XCFFUtGOkeCTd++miJKoLokPFhEE0SeYShGIuGjxIWIyw3Q6psiTazLRIYQkogj4VJyL9AUhRfCK+brlcnHO5eyMTx58j+99+1ucPX3C4ZNDjo5Oma9bOiIxNCl5DAIpNPTKtVecvACE3kxbbSoXGzuBeCPpSglqiqk0obdFin28c93R7hUl+2JljBuYZVKp3MSEwBXFKMZkPbbRyr25n93sjv+w8TORtCVumEJrQ5CB3OSMhgVZnuOURWYG3zouZ3Nm6xqpJd7ZBDXLfFIJEoJhbhhnQ3xnOG86VuuOuqoZCI02gTYko2UhEn9MFwV5PiHzLaFtoRNgPZ1vadqa1kisTKTLvdGUUijOFgtaazFa4TtH13QUKsmiCq0pck2eSwiR2UqyWjm0iGRSMTDJQNuMJvhp8gnprGPeNcQ2iY90fbLaeMtsvkT06jVZbhj0JtyZ7issJP6aRGBUhtDJ/0IZiVKaqvOs6oaurek6h5AKbbKeb5RMr2NwqdvVL9IJ7pRUbmJPHr26WWO8rkaIcAX7lEqR9f4SG0iK9S7hdoVInDbfC594Sa5LiJrok6BDiJ62rWmaNePdbcywoHWe4ALDokTrnGy5YtE0XCzmdNFS5CapZnrPallhvUtG6MMhMYIpBoyGA2zbpGqNlNR1Q9s68ixHK8V4MGBrupsWCQmrriUGz3BQsl7X1HXNemUYFilBDTHgvEdmedpElEBKQ4wNTddiffIr6axlXdV0bUvMsiRS0nf/kvl0ToiCruvlYpXAh0CWpUBfSZUEH1LWSFvVoBWTwQAhBE2TPAo762majsyHZBhNCoKFSN1AhOx5c1cMRWIIlGVBbjKqpsYFT0dkeXnJcrXiltAMQoJrxr6dLzEsVxXvvvUu5WjE1u3bZPMFJstxokkLXm8sKaXCWY8h+RkKJHk5wOQtJsspy5xBmWPyjOEgZ5Qnn8MQQt+Riti2Y96sKFRLON5rxAAAIABJREFUKdO8trFjMBwwkfc5PDnh0fmCk3NHnAzJpebp+ZzheMBwZ5etrSmZznj64BGTYcFWqakzycG4xN3aoa0th2dnxFwwrwTv1Ok8t3a3GGcDAEyekRlDkD2ePwSCtYSocM5RVQ25EHRNQ/SB7a0dypmlcSFxQ8qSTGu0Syb0WmY0dUfTXfLynbuodY3PJY3rqL1CmYyuanEeimLIVClm3Um6Z/oKqOxhKWmDiIRgMUbhbTr/+nLG7nbOdDJia3dCNhkSG0cTAq2TCGHwdeCvvvlt7rx8m7svGlCCf/c7/5bnnnuf//a/f4WDFyb9jpk6SJEEl00QKvGZVEzAde7UQz0+m7CJm6jB71v0U/CZgoPQry0uBA4O7rG7f4dMCYxU+JCKVqm/dn0Wrs/yUxwb9oK6uh5Cwt72Ntltw8sv3et9iiLzWctYKfJvQ/d7v8fycEVmZwymgtnimI+PNcf/t2f3b3Z4vSwozJLxlwu0OiAEhRwLGO0Riy0KM0brjqE7Zv30XZafHDLKp8weP0OvLpkMBiBWuOUD/uR/+z2GZo/1XPC3J++yCCd8/fYvYLamYNdUT59iXrpNtvUcIYxx3TkNAjfYJZa3EWKAp0kqdRQoBoC5Cm5iUDirkXKKHowQakD0lugrXHCUwwNGwzHBDlOF+Wr8iKRgE0NB6iRw/T3brqNtGsrh8ArChBAoo3u40c1n//yOH9ZLTkEyG4wWIXiW6xXKJApFlmWUZfkDg7/P/99nH/6sXbPkaSpIWL0YJF3d8PDRIy4Xi6SEGD1SBHItyU2OUS1lLhluTTEDz2hgiLpCa4dRLdK3yDBnpDsuZ0+Q7ZB7B/vUXYsvYmJeXj5BysSejSIVAjyRar0krjtCteb0fM7puiPgcLdNUhOnJdjIThgyPV3jCqiyBflzM8RwC7ZLzLIku3MPfukeL7//jOydT3j69nu8OJBse084XSLzhvD2x4SRo0Nzef6Ardf2GH3hPkoGotl0rSTRpyRqcVHx3tvv8uHDh8yrNa2zeJngkFLfTMr6zlyIvdNQSsi8c3jvE4yv77LJXp4+itj79iYswQZ6m2V5SlS8730pJVFrnGsRQqbkry/mRpE4ZyGkOEoQEDFxxgiGqPJkJRMVnoauvaSuLmiWS549XPHWdz7hb976Nh8/+oCL82Pm52dE6xHREKTBCk0XQckMGTwyJMGQKJL1iJKbZGsjxZ840xvkRiRZBdyEKab5lXhuoY+DNx3IKK5j4vTUjT/n9Wsjob/G6V4Sfbcx8dyud8qbySD8+Fn4M5G0Qf9hSMGJDRYhyyRpnymyQY7KSQlV5xJ8LMuJIlBVNV1TYUQkUzDQkuFwRDYx6KrFtZaBKpA+4lrLom5oXVLIU4VmMBkhtUChKQrDwAxxIVB1Ndal5MkAjY+w8WEQkjzXRKnJSERFF30iaYZAMDAaKCoXyJxEC0GInvnlHDco2dnZYjwaMRol8Y6t4BmuVpycnmObCiMEwiiC0n0Hw1Gva9qmZb1SZFmqjhhjEkwty5BS98o9CmETxK7IEz9NEhOHa1NB9w7vA76vFMS4KQZc4S+A6wrKdZU74kJSj3Mu8aW0UMn6UNyY6DJVWVKVQ6BM8n4LMSJQIBwSrjx5lEowwOVqCZnCK0XTdiipwAWsD9TVCqUlg8KQoRiXQ7rOsuhanLNJvdMHWhdApsqJi5Kt6TbepyA7xmQ+3XUWG1va9YrV7ILRcEA+HDCdjChc8go7Oj6lWtdIpWitRYSOwhk652msS5A5kzpOLiaeWWM7mq7DAzorEFJRW5csAETip3khe2+9ZEQllUJJRWOb1JWDBE+IYIRCIxjmJblWtHVN7RwyK1BZiVDJk877gFehJw77FLj0gidRJHUltAKvcf2Cq7ViOChBS+quRZCsArCOsYdRYRgNMkTTsWg7kKADnD56Rh2nXCwWRAQmKxIXKibzeOdC4mKplMQNBiNe/9KLFNvnfPDh+3jfUQ5KTGZQWuK8o+1apBKUZQbzDhUCVbXieHHCrcGA0XgKMm0M26MheX4HNRpxdL7Cti353hYxH/DBsxPKTFFFwXYxpJGS0ijKUZa6uCai9sZkZsBOmfPJ4VNW3jJTmvc+PWRnukvwkGUZbdNxdH7BsloyMKnAQEj+YcYUhFgxGQ+5Ny0ptGK9WuM7i/OCQihKY5AxIELENh2rWYMuSXh2JcgHOQtf0QTL5bJCmpKoJN4pVouK08U5VVOjTUG1XBHGo5S0uQ4hDEL0Nh5OcHp0id7dIjcS38xRSpPnOWiFGU2oO8tz917ixRde4PijZ9Sd57vf+YAvf/XLfOlLr/PK/ef593/wHzi7PObgpRcJrkWbLAk8IRJ8RsgrLP8mPdus3NcPfngH7PvD883rxFVuVzWO777zgA8+OqNpPN7W7G0N+IUvvcJrr95CFxsF1z6pBG5Can66Y/M+kupuWZrE9VOgdAqWp/sFoYuwlVFsC7plyWv39znYhg8/DLxw9y6Tg+c4fXbJ+TLwi7/+NUJ2m2otKXWOKgd4WRKCwfgSKUvm528z//AtivU2s5MZs8tPaMeRX37zn9E2hwyyNW75IYcnK8R4SBU9L49yRHaJiKesjo9Yzi8oTyVisEJP99B4omzJd3LEMMIgI0ZF2wSyXPbc+dBbPEhE1BTlHoQcIfvNRGkEBq0nKDGGsIPKDCG43oC2h11FbiRam3Gjo9oHRd7aK4iR1ho5GHymo7bxdfuPFcD5eRg3gznBtbqfc57TZ8/IiwI9KMiyjOFweHUNNhybn+chrua7QAQ4Pz7h8adPaZxF5xkGCCKA9z1nL1nzLOdLch8ZFJMkcudb1nWd4IKdx9WOfDRGKs3x6SVIRVmOWDcNp8fnuGDTyUOg7mAd53jvyEODsS2tAykLlDFoZdBCsmUUKsLlEt45uuSV2xmDoGF6BjYjdh3dYo3Z2UK9scPuG3eZvrTNq8+PKE5n0LQsLxpOj97n028d0m3dx5ox+aTgN+/95+AHyZ5DC+JGhTVEmkXL+2+9x3feeouzxRldCLgYiEEkhXNiz5OWeJHUwuVVwYwrg+jUYeMqsdvI6Mew+SJE3+UOqUCv0muc88TQi7cpDbZl861579Mcl4AWiUdJhFgDK0IYUddwcTnnct5wMb/k+Ohj3n/vz/nkwd9y8uwx9UywnAXm6yUuuCREF1WvSqnSn+QvgiNRe1SS4OsRIyGtV8L360lC9G3Wnpu86gR1j9f3XYg9R+8alryZkamTHa8e30QHiL6Dt2mEWGfRPkHKhezjPnR6E1yj4JKQYOBHjZ+JpO2aG5FUZzprmc8WaO0QpSCPllJljEzGUPedncEQJ2C+WjGbXdIS8G1LaGtGg4LRZIL0MSUGGYgsR5U5ou3IY8Q4S9c1dMuGXCWCvc8Ua+VAKzI9YKgVWimUd7iqpqoq5j7ShkCuNHkvmlLmBYj0xSACedSgNTO7gDakblmeFOZQBnTi2AFIEcljZBo0TS6xChqZBEmMTHAsLwW2r2Z0TUfdpC9VqcTbK8oi2QkYkyaezFC9wo2zKUEySiC0RmmD84Haub7akky9vdL9+xE9O6OHLvVBvlLX5ooRiVJJiNvIGxy2DTBeCkIUiChRUqZ8IYpeht6gZfL30VLhZcRoxbhQGAOras26S/y7MivQKKxzBN8htKHMNNPBiO3xlKbpyIuMxbriYjanbi3BRVSWESJczBbYdeJVxQCDQYJEtk3N9nSCwrNezPC2Zb1y1Ms5WTEkL4YJ+qPSvZYXBV1tObu8wLmWnfGUTOXsDweookTodUqGlMYjsC7Q+bTYWddzvoSn6zrWrSOIwGhaJmirSmboSYGJ/lqmxUQjKFRGYTK0SKwFKWQyU7fpcewX3k3XIlk4bBSeAkEKhNFIneF6vuVsPqetK4oyT3iBGMiNQhlJcA3jGNmeltRzjfItC+m47BoiQ9584yt8OHZ8cPyE4CNaJKUI5wJGBGznIdN0raXqOoiSe/fuMTm4i8dzfPSMojAIlRJ8KQVZZkCA9xYZWoaZZmd6l09pmc+WrDsQkzGj6RYjr5C2ZidEtkZDovOMRyOCVDw9PAcJR8s1t7e28Gi2R1PMwGBiy/7AkAVDWQzZy4cMheYvH36InxoGuwdYJ1mvKwKCxnlUniGNJPqAlBqtNC6CNFlKWCWMypxcSuZtReg62sajnGRokkhNbjS5yohOcfLsnK1bU4JPEMqqrVAmp/UeFxVZMUbIjMXsnAcPHlCJSHH3Hr60yRsxJk3JGAREh0Jh9IBq5Wi6Ga+9cAvsKlUbpUGgECp13e/u3+Hucwc8fvCQxkXef/tj/uwP/5Rf+rXX2Dm4zcdHM6QJLC6fIYDJzh7JtDfr1U/FZ3poVxy3K5fTnyB5in2prq86ai25c/cAJSd0TSA6y2Sk2d0eXet99GePfZX0pwuVFCTVlqS6Cur6cW9ZQJSpG+I9KmpaG8gmkde+us36uVfI3Yqt8Yg3GXFRHfHSQcbYab73tx/zwptf5E9/5w/Z2pvyxsGA6c49xve/ihzsc/ToiKePOy6OPkScH/OVyV32Xxhy9yv3EBMNYpt3/v2f4f0Rz79ym+8eXZCJkunufV5742uI5hx78oBnDz7h4IVbLFeB2cNHHLyoCWLJsj7GlPfw4ZBYCaR6nowp0ivQkSDSXkfMICiUVsQo8G2NiBPQIxBjhLR9TCNBxtS5FRt2ImwC8viZ77AP0ntuZ3COxWKBNobxZHINRfI+7aXy2i/p/w+Jyo8a6fMlCsLy8hLbWba2tiin055XelMQ4ef/OsRr4Bpd0/LJxw+5nM+TKJgEoTVSRpq2JrMWbXLq5ZKj8zO2swGL5RqrHc5bdG1ZOYuVhqa2SGkgyislcCETVQQRccFB8GQEopA0wpBnut/nFDIKtDR0tsN2Ebwkdx5bWz5pVpw8PGGXHbbzHPHeEzi9gCCxlwtsLjDHGcXubdSgIt+StCvFbOl42ElO1551FlC+5XDtuP/i1zF3XyGqIRtlJomGALa1PH74iA8+fJ/j2Tlr36a9tKd+xJC4nlIlKfmoklaCiCCR1/zIvqvmY+rIbTrcG4ig6Hn/qYifEDRJRDut/Wkv14SY1KyDT691vb+ndY4YHSFKOtdxcXnKR+83zBZv89HHS/76O0/5+NERz46PsO2Srj7H2xXCW4LIsF4ShCSoDBkhJ8XmIjgEFiEcUgmszNP+GPrChYhE3JXqpew5n5vYKW4g/XEjgRw38gm9QXhCUogkj32jKxZuQIhvdt1SYSntmeGqu5cgqptCVd+t7DtxV/f6jc7djxp/r6RNCPEQWJJsM1yM8ZeFEDvA/wq8CDwE/psY4+WPOk6IKXvfEGlVb0CdlAzTTVRbj2wbSixIiRAeVQ7IjGE83cLo3hyxWiGJyCDIvKX2nsv2klZIZJGERoZ5xlCXDOoMYR2DmHhetYbzekVtO3Kh2SmGbI0nFNkAKzXlaIBygbpzNKt1uvBaYW13FTuYXkWxwzDeKll1NdEBRiFMTlAqkTqVQJm0wQsbGBuBH+RkfkRrkzdT0wZilLgATkJA0kVDF8UVb2xdW6rGc36xQvfS70WRMxwNGQ6yq6BYSQEywdeUS8mYdUnBMM8ybK/ApWTipm24dLGvSsgNzC5hWdM1Jl5VacQNgkuSyBcoFFoZpIpAho+OGCUiSiSKXCtskBgtmE6nTEY5i6aiixXaaDSSuqp6pSJP6NL1dm2NzTOUUuzt7jKYTIhSkbcdrQv4NB1QQLVeUVVN+tzaYIxGyCRTLwKMJ2OGZc7FfE5b1yzXNVItCFEwGAxQWlGUBZNRTlOvWK/m2KpDRkXd1uzdHiZJWBGp6pqmaZOfX4yJ1C1TkmWtY13VnM4WtLbhvrmLMRHbdThnexhqX8FKerm9BG+CSQotU3EgBpzS0NqrqoyUSQglaJue7kOCQ4T+Dwl2Zp0j5oZ1XVFXnmFbgJJ0IUDwoCLetRhXI0KN7xaMjeLWzhgrDM+Nb/HaF77AC1+7T7475U/+7e9y+fApbdemLqFyyaIgZETvsMERhE4uTdvb7O7vsVrPyCWAR+sMo1L1LYRkVF5ogYqB5bIiH44piynn8xVNF2mXDUU+JB+NGdZL8uAZDsYok1GHiCwGLJqG6CQXT04ZKsVg5Fl6y5iO29tjBgg6KzAY9O3bXPiGxSBjtLObpJurdbIGQbF/+zY+eqrDJUIIjNY0LnVYtdYpsSdS5BlGtcmWIXps5whSI6Og0JrCZExH2zw9OU8bZfA0XcOqWiFkhpAli8USK6Y4DzvDEXs7u3zv4ccc+si+NonkHCNKkCw4vE0JQkgFphbFauWYjgaEYACTfh8jRhmGYsr5xTkrMcPiKeyYv/ijP+X+83s89+qblLv7PHj0Fo8ff5dReYdf+0f/BJUVpA2pX5vFdeK22a6uBUH+o3ePK0hl4lYkAZv7d3e4d4crxTAl+qY0N1MzAahNaP8Tnv8Hj3jj7+tz/bBHm4R1k7xBSuD6KxMDMXS9wpoAGynLjldemqJ+8St8589/j+dffINnHx2ihwWWISedY7yzS5ad8M2//g7vzBa89trrTPC88+kfYE6+yy+++Q0McPTgHU6PT7m7/TzPVo/Y3s148fV/iDCG7jLC1ojdyZvcubvFcPKXeLfHo08POX56Smc9US758K23uP9f/2fI6S75cA8zbahquP/CLYaFoquWuO4cnSuyaQQ1IpLjyQl9QG2UI8aWpJ09wfshQmqC2lzNVGGWksQlicnTMYlhpGQvXfJe6TgdldgXArz3iUtelp/F8206Sp+7B6+VD3/+k5bvG5vPFDyTyZTtvX3ExvuJH/CZ4/UPse9s//irEj/37w884I1HmyNurC/6Is4mEL7xmnjj/W0C1hu2Vp852tX7JtFNzg6P+fTRY4JLYkjBO6KmL3xKgoDKWY7PZ6xqRzufJ1RRIelsS2YFLSBVS3A1BIGSGqkytDJkSl0pLudFiZRJVTGaEjmYUOSG+eFjhG1T3BOSrgHR0LrIynhGMrBVTtjZ3mdAQLo17nSGeRZAFYwHW9izFfJbfwNZiT3rEF1BRcahHHA6uc16rPEyY1E3HJohB7/0NdZ3X6ZRBh0FOaCCAie4PDzl7e+9xadnT6h8S+Pdtax/TzXw3uOlwBOuYIwCmQTQekqHFAKhkym2jwElVe8plji6m85UCv+SyFrwkWDd1Trug8f5NLedd7RNg7WpaFO3DY6W08uWjz5+l2/+uwpdNVzMOuY1rDuZEDoyIrCo4JAxoCKg2hQ7CkkUWeqsSUUXJVEaopbJv05GkBa8I7rEkd9QemIUhJAQQTH0xQ21Kfxt7tPre3UjKxXDZpW7keBuupGbBO8G0uQq8VLX0McQN+qc/R0dY+Kg9vPkWpny+tg/avx/0Wn7xzHGsxuP/xXwBzHG/1EI8a/6x//DjzpAiNDaDrxHEUAF9FAyKDXFQOFj8nvQWtOFJLV9enqO5xwXkjz6ZDhiPChRgxy8p8wNmRoyCIHz5ZL1fA62xmjDKkS8ydBApjQ+G2GGI4aFwjeKvKnJpUa4yPl8nhZEFdK+rBSj3GC8gV55xgWXbk4hEZmh61qc92igEBIbAqXOKFSWDPaEQgmF7hdaKZOCZSg76BwiV8QgaFuLi9AFT2sdjbNUPiJcwtlGrZJTu5D4EBPPqeuo2o6L+ZJBkaG1TIqaWYZRBqENWgsyoaDtsEkNJXEyVcIBi15BRyqJkrrPxfqEmqRymUo44UbFGzbLbWq29RuJDPiNTwUGSC9FCpwApyU6M+giZ3tni6IboHSCeAXv6UxL2yTvMuctsROsfYPt/amKwQgvJB0OJyN6mCGFZF03rKqa6LqkPiQ9wbaYVlIWGeumRhLItcKUA6ZSgak4vbikrdYonSXTRi2ZXZ6zv7vNwf4B3XiMrTrWqzWHJ0+ZVYZMj3BW0qzXrBZNUlx0ARc0lYO5VzgH64sLLi/nWNuipeD2rR2IgbZp0mdzFhU8MlisawFHayucLygHIySKuqoQEQbasVAdKotoowhS4JVJoIAYSXIBqbolnUH41Pk0pmQyHhDbiq7psBGc0rQhw8oha9fi/IzMWUwdk0ZnUAyVYHxvjyPX4R48YTsfUBaGy9glgrEIuNww97AnDVqmJIwo6NqIGKYK3LAske0aGR3eNXTWY0vVBx4SHQXzuuG9jz+iwfHaCy+yffsOi/WSajXjcHHOcDIgzzWy8eRGcXsy5OxyxoPFjNmypWlgZ3ePylseHJ4xziKvHIzZmo7Z1pKuaQnRIsaCWyPD7d0t8mBxJuekhW5RMxcG0woIOVcBpIA88xTSk6NSFa/QyCLDaEGQFms76qhohcDJpDhbV2tqF8jHYwoloa3J8hLdbrFcdAzLnB3hOQ8BaTR7+yN+89YOvqk4evqE85MhzQv3cTGigkZ6RwwRKSQhdGQDQWRMIz1DPNYbQtT9puARUlJVloNbL7L37Jijy0+pfIvzkffffsQbX1rjV0u+9e63+fi9D3njK9/gjV/4Fab7JciAiL1HG1yFZFGEfmP7u3W6buxrVxtl2tRS8KblBkzSb1z9ayQg1OfPsIHP/acYievQeYeSicCu4hWAAPrCCiSYmlR9h82nGRdlixCRzhk6K8hzg8SlQxsIQaAj3JqW/MYXX2N871U+ePsRUyF5frTN8uE5f/H2e6y7jvnxKSs14P3F+6y+E4jmnBd/7XU+3fmILz5/n5den/D0yQO+9OY/5ezoezS7A4TaB9ehFo/56le2Ofv4MXrV8tyrOzx65ui6JeMwYbr9HB88HSCLr2PKL5MdZKhikmB3pyeYVY47WyFzx2D3Nk3TwFaSUIp0fbVdXUHo09eSI/IkxrJJv2I0QM917WaEcIZdRM7PHvHca7eReoIIe0QVaewxIkSKbBdidgUBk0oznW4noaiNWOdnTLpTgLXhsl3HPH/3+/PvMz4fZG0Ct88qqf4Ex/VXqLT0uO9KCyIiU8hMXV/7CJDc8BBp3qie8wZcdcR7CaPPzMcbZ7zxZ5NIb5503VVIb2zzS4nvjydjS2yf0qzP0cMDdP4cqXgEApuC7t5vSwAq4dOwbDr3At2LEoZAggGiCUKiOkt1fMh6do5wnqHvyMQcJwVt8NhAQn1ETbVo8VYn1I2W+K5DyYgXnq3JFqUWFKJkPBmhCkPVWorBkKayGGno1hW7W1O6dkU+GrHsPNIMCF6wjIrWyrQO+pA4ZkLSogkiYovI/sv7/MLWS+zUTyCsYS2ZnVvWvkbuZEzvDClj1wfvY+ZNztuPZzycNzxa1iwJrL3FhsiXf/u/5I03v5w8OhEoEVMRy2m6RcOTR495dnjEqm1w2KQqKSQ6Szy20AuIsIH3xZjyaRGwIXH0U5KheoVGenGxnr4hBd6RxOqgRxa6xMnyIe3xm/uz7yA5Z2nrJcvZjGq9pussddPQdGta1+B8h4lQoLAuEpAgkqKl9w7wPZVGEqXsRT1S3CxVbxtCL/ghk6J1dBBERKiQdOGEuGoCbcRSksBJ/2aF6AW3ILmiihvVg3BdPpISGZMv7tV+1PNyN7YGiaMn+3NupsmNpC8EMPFKo0CSBEqUlOmIm874hrf9U4BH/lfAN/qf/2fgj/gxSZuQiaMlQ+JbtTgqK6jbgJEZSIE0GpHnqCynKEd01tI0DaaXxV+vl3TVnDIzFJlmbR2dLhhNJxwMS8pB2avGCKrVGt/DBhtrma0tcbliUCgyHSlzxbgc4FykamwyII4R2zYIn6oQQIIeSYnMyqToB1fGz4XMmMacbu2Yt0uUj2gjUEg0CoPuyZGSoEAGR24M46JEBMikgvGIIJKZoguBLgbWLjCvGtqmo2kt66bF+dCbMkuk1oSQeHCrVdUv9On6mUyT5wV5lhOdJ3jYmFVGGSEkwmjYqANtWsw9SVL2bV8VLVdCAjdgGN/X3r3Rdd5sABuuZ0zzBec8604hQ01pYDSZMB1PCDGZTZdlgR3m2DbH2RbnPMvaUTXJ32tZVaATLHQ4zKmso6rWVHWDdR7vHEYqtEjeYT5GqqYhzw3DouBysaBqLePhiL3tXaRUHJ+cUrfNVft9cXnJ5ekRRkpu7e9xZ/8ue7t7VPaSxfKSrvUQAo8+fsDuliEbwOl8zrySNEBVdwQfcM2SerlKmOuYvFayXCZYrYxoCc62RFEQN5wRBaLQrLqGrk3BkvRpcY7CYrFIKVN3D0UxHGHWazJk4o8oEL4ncwuNVIaiHKFyTV2tCS7QOGiioY0ZLkLULSOdsZVN6KyGDs4Pn/HNt/8Xdr/9Ib/9q7/K3mu3saGj9S2ExHlrhKKRknUU0HYMp0OKAG3s4Rg9LzJ4ixQmmWn3MIO2a2nqlugjThkWnefBp484Op/x1S9+gTs7E7b3dmibJav1jGhKSpWjjWQQHXdHJS/dOmDWnrBc1KyrYw7u7DHROd95/yFHJwO+/IVXuLu3RfQelSUO7FYp2J4OiYWmdS1nq5a2Czy6nPHs7BHjwZDbI42QGi8jWkfKXCODIkRHHSxNsKmLkAvyIqModliFyLpzjLJAcB0WUGWZEpO25fjknFaMUEikbRnTsYg2ed15wf7WDr/0hdd4u2s4PT7kcrFEmiSAo6RECE3XOYL0hCwg8ynlXsB2C6rOE2NKDlu3Qox2kSaytXvAF177KnEeeHZ6hA2aP//z77A/2eef/qPfYrX4Kv/6wf9ENa9567tv8/V/8OsUg+IK3y96IZLQP+4Bu/y4oFh83883yjz9erDZEP+ux/lPOQKJjO9iZDGvaKuW1XKOkYHRIOPO7VvEkLwsE485po00gJAZ1nvOlytWVcfB3g6jPE+akl6gYg5qF3P7ZepPHyHrijd/8+vUjx+SC88nj05xekw2KtntBrx+71WmexPGdsXHH/8V62fwH373b3hQ/D+E9Q6v3nqe9vQhw70hd770dbB3oJshuiUnnaLdAAAgAElEQVSZtgxljShG7O/do1YV7/7Ve+TFhFpuUckBcu8eZvg6sayxQqJjRLsBhdrDZwWHn7zPrp8gy5chDiEqhGj7AChPiRsGxMY5KnFoQ/S9NHlAEgiuYTU/RMsl3Tzj4uljJsU5qrhLOdpGjRQ+VGgRIO4D8iqGUlr18tpc7SNtXWOdoxwM0EbeSCKuzYF/WsN7D2w4Lz/5EGLD+buuwCdhr888K22qjnQNFHiu5f9FIFnk6JA6Ej/2unw+cduoxm7OBUSXKq9CpO4qCToW6hWnD/6C87MHPP+Vf8zo4C4h9l2LaAGJFxpHxEQHbZu40pmm661NohTgN2gTQClshHW15vj4U+p6Rrd0NKePadtnyPvbRHbofECJ3vQ5SFTUBBEIvafXSy8/T4wRrTMGRAY4dKnwOqC8xMaGUVmyNz0gC5CJyPlpTT7V1Jdrgu1o1x7vu15iXyDoQHR46UBLMt+RG8nBnSkvj25jLhbEued41vL+hcFlJe7Zmrsm47nRC1St5NPTjvefXPLdR8d8ulhwgaPJJIvg2Llzl//uG7/N63fu9nb0HtN/vmAjx4+e8MEH73M+n+GVSKqOrsMFwOgUK6gN/0300MBU7JMm66XsN2qGKRmPIiW6ANZ1eBcgyhsxnkfEdH+L6InB43qrqdZ2rJo1y8WS5WxOtU487+jDVdyHEGhhiAiaGBO/HxDCQy/YAeqK9yV6xJf4THGC1MghXnNZI8gQkVGmvY80LWLsRftiKnaA/2xndxOIQuLj9iNcddXU95V9NslajFfloqvy0Obv2Iu2SCkQMgkkyQD4PpmLAR+TnRfcSCjFj4d3/32Ttgj8nkhn/tcxxn8D3IoxHva/PwJu/aAXCiH+JfAvAYqy6AMRkpCHTOC7xnqUTJPEtw5qhymS+iFSorVBSwkhYJuatm4ojUEgWa0bsI66dYkgGZLUfJYXEBW27SiLEqUMVeOou4boW0L0eCRrLwhB0bQeYTSD4YhxPqFpEixNG0NV1Tw7m1G3DZ1Lqm55XqC0TiR+bcgHQ8ouIo1GapVk8bVKcvTS43o3eSklRV6Qa4NvLbnO0CbDekdtO1rvEEi0kRTG0OYpaavLgtYG6qajtZ7WemwMCO8xJtExXfBYmyZWta4TkbsPomPf8/YkOVclN31deuf3BLeDBJ+U9FyCTTXjc9j5m2o6iNS92zyOVxNEXFUiN8lfe9mxvjhnMp1SDAcMhiOG4xFKSHSm0CrD+5T8mCzQtJYANG3HYr3G+oDOc0ZFSVaWGB+oY0slJIl/6hOPSgiMNrg2UPmWurF4B9EG6vUqdXSlosxy9re3GI/HrFYL1ktHcJbzkxPmZ3MmkwlbBwWj0Qg9GRLaJBoSQsS6wOVizqeHK6peSEUiwQWidRTGUMmGx4+fYDLJal0hpWC+XicIgwYRA/O6RtYRJ0SyTmhaiIJhWSCE5/DslONvfpODh8+Y7O4xnYwYD/O0Zfc4ch/TJp7SgUgXA230SGeRhaFEUq07GldhbYeMA4JNcCSdZ3ghobOU4zH2dM2H773Pl27fZusLtxB5RtAqefeRTNgjI0SukOQsfMus6ZDFgqwbIEVvyi4lUWl8TPzDIi9xtkOJpHppckNWFnTWcTmf8/jTJ0h/m1vbU/YO7lCsR1yenNOs5uxulZgsQ0SL1orRcEDbNehB4oZJ5xiXitOq5XjVMNk1xOhRpILA7bt30ZlCZRFvFH5UcryeQb1mfrSgKwboScau2afzW6AirU/fqffJa08JneAkCEaDIYOdfex8gZ/NsJsASkhsiLgYEdrQWsfj06fc2dtmpA1SBIwWfaDmyXLDrdu3aNoa8/hTsjy/MnEVIWH0g1KIosBrTT4cYEqPa8EGRyTgu4ZqPacshgwnksleyR1usTx/jieLc6zWzLvA7//xn/M3Dz4E1+K9oLMVf/C7/wcvvnqf519+uZc1Tl3yTXL1owEcP2C9/wl/99MZSTiqrluaqubifM63/+pbfOO3foN1VXN8dMLB/i28C9ROJSEl0SUOspMIIZlOJwxHjszoHvkn8AuLyjToEr3zBW79ukTaC/ZGmvW9jGdPjnj40cf84j//F/zSV36TxdEF93/x18gnOe3qbT559y5/9sd/zcROeH66y3sPOkbjEWV7yOjePVS9xF44LIbzs4rSVVi7QzUfUkTB3s7zjEd7XJ6tidk5+y/f4Qv7L4MBH8Y4ZdHMyCcZRtzH7E+5Ndzh7HLF/nSbVGaSgEYmuASEDTS+N50Vm0p9gjnJBM5GKsdwNESIKYUe8cWdbZrVI0y2jzIlQkgG2QFCNAhf9slF2ndCv0fJDX5ICKTpVURluArCflbuo79vsrYZaa1UPyCI+/7jRxHwwYGSRHwqBoeQZM/7NnZkA8nqxw+8YJsQNdz4eTPfU3gaIXXbfACVBChEVMyOznjn2+8Q44x7r2/eV//K/t4IIqLDGlXNibMZ4BGTkmw8xIscyJKYRX9GEQ1SKJ5dHvHo4hmz1SXHHx7inj1FckFuAmo8woWAFyk59dLjcKlrpEAayXR3CiiqqoEYsMGxblZY4bASbBQMi5y1rQlS4UNED3LyQcE0Btom0DZLpAxE4YgkXmfEEkJLjC0D4RDzmvrwnGAt0eZcOsMn+ZCPppEWw62D2zz3X/wLohjzx//Xn/Ctv33Kw+MZZ41nLUoWsSVKhZnu8eovf4PXv/Yr5FnZdyclm7SmqxoePn7Ms5MTWu/xKhVuY8+7SvFdEqaTXCsg0nepE2wvpHjXJ3sjpZJSuXPuylYj+IiQGiVk8sgNASUFrbU429I2FYv5nMVinughrsN1Fm8dIqaYexMjbrza0nwOn4kfryXzr+/PTefuh8GdN8+/2TyQvVJ37CH3Uoq+03gjLv0Bt39aVnoERd/puuqYf860cPO+rubFjU7jZ4+c3oMgCe2JGK9mU+SadiB7S4bE/5Pfb1nyufH3Tdr+YYzxqRDiAPh9IcR7n/twsU/ovm/0Cd6/AZhsjWMMKXtHSHwUOC/wUeFFL8YRE88rVGuKokDrBF0xWmOkwpgMFQMmyymKkiwqYuewdUOUCplpvPAcrWZEreiCJ8OzNZ4wGEhGbYn3NeBwzrGuOlZVR2fB5DlBCJRTeKlZLFa07Zy2a5OXHBIhckDiQoaSRTKUVjDcMShT4DqXoIhIfL+sbi6+c562rtExSVwrpZBaUg5ytJW9OlISWXbeo3wk05ICzUBrOuexeU7VdlRNUjC0MlKHgBOpkykCuJiUBm1ne5imSH6FV61c8GLTGIawmVSb/wmhtwP4bHftB02s5OnBlTljIrCKfi8QGCX7Knt6vhSK2arhfNEglCTLcybTKVuTKUVpyE3yKdMyw+hA9KmjmascLXSSrncOV9cYBNtZxlgbukIRQqRpUqdKSSh11is0Ssqs7JPtjMII1uuKcVlQlAO2t7cpy4L96QTb7bJaLFgvl3gbcW3DyeElQXgGxS7jYpcXn3+e23fvcnL5lCgEy7ri7HKF9xEtDZlU5EqjhaSuGpp2jdICenjsomkwZc68qinzjK4XTSmGE8q8pNQDXGcJUrG1M0Kcznjvo0/49vsP0XnJrYN9Xnv5RUT0dDYioqGzHlf8v9y9V5Nk6Xnn93vdMWmqsnz76R5vMUMMiIUhsVyBQcYud1e7Qa24sQpRoZDu9C10pa+hCykUUqy40sWKWgXN0oAghoPBDIDB+Pbd5avSHvM6Xbwnq2uaDYAG3CB5IjoyqzrNqczznvP8n+dvYprYxkhlLbO6otQCLcCGQBscPlhwLThH8CJNdoNLuWmxYd7URKHQgBEi0TGNxsZIaFusXTCxC9YyhdhcJy/6tH5B3Syo9+8l6q1IjYGoDEGYdKypNDnKlCQ3iogHJSjKkqLXS9EOJufoZMLNTz5j58IWzzz7NBcuPsXRw3vM5gtsv0/bNmS5YbTSx1oQeUaWF0xv3sS2mgtXLvP+7fu89f33uXxhh5eeucFO3ifvDcBX9ERqbFwcjsiMp23n3LS7KAKTxvFpPabf11y4vkWD5HA84XB3jyKHZ3d2yHSiSwsghmU30lPN5wghqKyldQ4vcpwQlMMB7e4pUcJw2CfLNaFqQQR8itgmEljbWCMrSwbDAdNFje9can2MzJ3jYF5x2gZUCMQsw3fAjpBC3/uFQdJQ5IHhqsYz5PLT19g6PeX2g4doVfDgaMw0OoRrmB0ec2f3kMvXLnG6e5tLFzZR5ernzt2PjPf/tpTJP8stXWS9CxzsHfLg3gGXL17mjddfJ/jIB+9/xIvPP8/GKJLlit27J5R9yenxj1gfXmZ6Gtm8sMNwI0egibVF5ykvqI416AJpFH1f0B/t0Nw75v67bzOp5+hW8eKNL/ClX/tVBgPN+E7FxlOSjz97l7ff+nccfHaPycEhz15/huubO3xr+hb/7q2P+NrXnuIfyJf50+/8GbPxd7lYrrKpFZtXn+P9j35IFY+59pUXWN0ecfXpG6yyysZXfpHj2pNHB7LCy4AQLc7OmU5bhltXiEWfyVhgywwzHCV9LpDUwhLiOS3zGZWiK4CiTo/rWtJCgMm740gpNAXF6jAZKwRNKrByiPOk3wSWceuxuyZFkQAxAkyWIbOse8ufPj/6T7Eti8dl2HDW7d9fddM6xYucnJySZRnD4cqjadv5902lMCFaYoi0bc3ceeyiZZgP6Q9XzkwQ5JJ7/FO3H4OERcQ3Fbt3bhJj5ML1p9F5Bh5UMFzauU7ec/R6oy6YMQE1iUi3sUFPPiW++8c0D+4ThCd//gbq1S+gzRotKyAGBF1AiBRti6/m3H3/+xzfv8/xvTtMHu6jplMQC7LWEqPAhkgQDi8cQfjUbEQQg2N9ZQAi5aAOBisI62lmkdYFrEw6tt5wRFVFJuMTmumcQW4Q0WHvHmB9JAZBvbA0tjOxiJ37oJSJseRBO4vWkSBzFkqzWyneurXP944WfOJzRNFnXUaaH/6I3Xv7fPftD9k7aFk0CicLhA5It8Bknjdeu8h//5vfZGekgIhaZnp1jfb9vT3u7e8ybWo80HTO6lrI5GHQRTq1Hf0xdvmnQGL6qLSuluAhhsSAabuQ7OVXH0PAxZbKe+pqgW0bJqenTMYn2CaFXFvb4r1NmjcpEY+GV2kt+M7MTquk74WzZv0SrC3/LeME/twx/mN0Xj8O5J1fJ08Cg2eH87k69vHXXf4uEJIW7tzvzm7PTf8fvebZvaQn9AGUQISQvkUhz8xdxLmmzHL//0ZBW4zxfne7L4T4LeDLwJ4Q4mKM8aEQ4iKw/9NeR0B3ok66ltY6FrWlZzRZJtDaIIyhyCPK12RZjjFZmiT4QGstInii88zmFcSICRHnWiJJC2cyg20bXNsiQxL2z4/HuEVDXxoKIlJFsjLlt2VoerrEhGSdPmtqZidTplWLj8nFzxiDLnoIElXmLFzPJw7wPNpOCNnZhgaPEuC8xXnTdb+6jLq8RAYILj2nbppHAdUikpk0Ko5B4bWmaVrqBiBlbEijEGgyJbClwbrAwgeamCY/i6bBN20KyhWPRs6y64Toc93BR9qA1KF5tKh4xOFddk+6xSE785LlIkxdnS5g8Hw7gpQxpdKVHaVTaGbwCq8zpA+4EFk0nsmDAx7sHdErczKjybQkN4bV/pBeUZBlhizLiaTPe9BLweLOWRrbJM1hlhORlEom85LOIaosc7a3t2iaFiUlmxsjbDVnbE7JsoI8L8mzDGMMwVu0MQw212n6JfNZTV7kyMKxf3LI/t4eh2HK6soqK6sjjheHFP0+eVkiJzXOeZz1aJ0uXM4F8ixD6ZzWNfhgsdaztalB6mQ8EwAUPgikzPBe4BpPCJKjkxMm9ZTjyQyZFQgnOD6dMh5POT0+ITe6C76WNHWLLUwqeLQmyjR9jUBrHY1z2NYmfY73+NYTpAGlaJzF+YgQLRjd2XhHctHl8WkDQpDJZBxUN4G+ycA7Fm2FWslRmaI6PaWaDhDd6D8qjRUKjKH1jvmiJvquM5dplEmTNm002mR4H8l6JfPG873vf8jByYxXnnmaUdlHykCWFzibHBYjPoV2KkFuBMPtDXSZsbo54uj4hMNZhTs8ZeE+4+nLF3ju2gUyEYjBUmiJcp71QhO317i3PWLeBE6Ox4zdgvff/4A5C0JxgSgVTd3QLBq8TZoBJSQBgVaKPDMMRiPcIlKFZG7kg6fxjiakz92T1r+PPpn1RIeQKdDdujYx6wVkRYaLjrquqJua1kdc8BycHHNYN0zqiAkHbGxtYUPsLpweb1uMSREbs9Oj5Bha9OmvjLh641nGbWC6e8ggSCazOeul4R//6q+ieiUXL6yzkksO7t9h5+lXiTIFaouuCE+0659Ojfy7uMkoyHXG5voGmoLZZIazjkGvz8pwlcuXrmAyw2yyYGUlQyCpxhknizGTE4utFMf7ElnMWF8bUK7tIAIMVqDWoSuSdoF7VEXAX3+DzeGA7GiXNUr0yhreHBCLlrjYZ//Bp7z3J3/KwfuHfOPVn+eFZ57hk8/u8dnpHm1f8KP7u1y+17I1eJbe4piL/QE9XxMbx+qqwStPIxUtmme/+iYroZcKrf05/UsXqD/9mHY0RWyW1KLProuUcp3pyQHffe8d3nzzmyBWiGJJBFpqopJxQHLO7NgYHU0jIokuEDxInXWPVzg8CIuWAsgJsUVK1/H/amw4RscVhM7PSEdSqgQQOwe7SCqk4nIf/paANkiF13JS8dcDbZEQHMfHh+zv77G5uU2/P3zC1G0JllOlfHS0z97RHjFKLu88hS5KOseXs2nZOaT9hE08dvtYTRoj9XTKR++/jzKG9QuX0KYPUdIbrbNx8QZ1c4QPGtPVGYGUjUUMCN8S93epP/oIcf8+GRY73UdmAfHMS2RFksgsjlsO7u1TPTxkNp1w+sGH6AeHiINDmtNj9g8PWDDhuXadTaPwpAZCDA6W+VvdZMMIzfR0mkBL0MQW5tOKxlV46bHBUZ5qZtOaGCQ4z0Q0+HaRTDqkwrmk7VVKJy2SJE2dRUBRoiiJaFoj+HBWcfRgl4f3PuKjO/d5WAse+JJaz1AHnuH1G/zaL/9rKN7h7Xd+wNHxmOligo8LeloxGPb4+dff5NVnX0OL/KxJEnz6e6bjKR988ikPDg6onKMKHqu6on/5z6bretumPGNxzkgjEmnbButdMkKzNk1Nu+a9JIG1tmmom4ZFU3F8esJskmIPvLVd5ptPNFkSK0t1tUWICShq0bljn2VIi0R/XYKocwOA84DrSQwu+PMDgsf/D+hyTSNPAmOPg64fK+s59/ilZjdN1s5TrzsKceRskJHwy+eIlGePOV8jR/H5RNPlc6SUn5vi/bjtrwzahBB9QMYYp939XwH+R+D/Bv4b4H/qbv+vn/ZaCegkMaCQGh8c1pPARu0SDzQLZFpRdDbpRVmShYi1LgU0d24z0VpmixplIGbJRS+XEqUyjJas5MmQoyx6tHWDcw6lJVGD9S11tSBKhfUK6yW1szTWsmgX1NWCEAJF0cMYTWay1EGt6wScpEKEpIewQCsjrW2JjUe55PwmlUrj1hiwLuJ8i7cNuUgTGB8l2hhEjDSum4gZ2Y2rLUJIlFZkwiCNSvzrpqWyFmVAWYd0jigjmU/dUNVleBFByaVTJ4mqqZaUzc4k5MccvGegTCX7AaUedQiWt0nPt+ycQIzJNjddcBMgF0ISXCqwtZb0egXEyLxumFUt1XSB85EoVKKWeQiVhUVN8A4loFec0i9Kil7BymBIlpluHwX9YbLrX4pD53XLYlERaMlURIhA2U/2vZlJ43ulNb3SEHSfPE9xBL51yXnKOfLcEEhZQWWWka/mifaRSzZGa7jGsJgGpFCUvTLp5pqmy0RLoDS5Q6YF3rQtkCx5l7SzECO2DUzHM4INVGJBtZhD8JwcnqCFRkvTgT/DtKppA6mjqBS94Wqa/h0eEWyLMTn9/gq+8VRG0fZbhJBolSNlhrM1udas9EuiLJjOLZmsiDEyrR3zuqZuK4qswNQ1UUpU3qMXNFqAJukEM2no5yVlXmKNZKAUtC0Cz2LaEFwDrqVtFsS8IIgU/2AD2Chpum5grjTK6OQk2VFpkJK8LFEmQ+iMojfkeDzj/t4RGsmLV3fQK5rWOpCCxrY0bQ0oilzTLwzFoGS01sP0JYtZWku1i+xPa04+vMlkseDGzogr64M0qVcKHyqGBp7aWkVmAz4RltPJKfcfPGAua770jef45i//Mnd/+Kdc3FnBmJxmVhND7ATbqUGTF4aMAqZjlE45V9P5HB9GuBjJihQy3tga8InKET1CamKX62edJcsLlAAhIs62tNYzm5zig6duWlpyqtmEul0FIVHGoAhUiwqhFhgZCU3ELQLFsCA3Pa5cvs7CwfcPjnFtjW8srWhYWSmpAjy49Ql7tz9i9cJ1fvXis5hhjxhBCd8ZzHRNnr8t1fLPcAsdrWc0HOAqzx/+/n9kY32Nyxe2ePWVV1DKEDz0BgV6BWIUXA4vMswlJoucHs/54KNbDNYjo62SGGsiBqkX5DScPHzAZP97bF1SrGzfQG7u4CrJ8cGYqzeuI3JJvfC0viUYyZfe/BLD+V1+9/A/0pw0LFrBVJU8/cZXuXL9Eovdj1m7eJWr11+g+vRjfv8//J8c3/mE53a2uH20z8Nc8apcpTyUVIsjvvrmDW6+8312rvw8Mldk/YBQcwQOnCU3Aiky6mnNqy+9wubGJSAZTaSCvwBER32L+BRJjCIQouv0KYHFfMbp6Zi1zW3KwQohGpJFUqCNoGWGwBKpqas50/lNothjtRhRqFXOBI/xkfGMP2PtLQX88KgE+tlvZ932M+vzn3zAp9Dhv96EbblJKdjYXGN9fS1dR85RuLq9O7u11vPeu+/QhjmvvP4qSufk2QiJwsVkgCOV7Gitj/ttcq4Y/XF/2KO3U0p1soE51XhCb7CWpBXDHv2dKxi7QtZfI7qI0Jx9RyoGwrxi/tEuD/7sFur2LbZ7gmJe4VSGqQILcZvdu2N+8PaHvPvd95nOHf08Y3uwQlw44v4h1HNqEZgawSQ09LwjIFFRIFxAh0TidiGNfSdHY+rJlGAjUhiIOc6C9RWoQJSC+fEhkpwQBLnW1LbtqHUCQcpSNUqjlEZIyDJNEAaVCQb9IbnpMQ0Gn/X4Ue2Z3XzAg7sPqOctjc8Yx4DNJRee2uaNb/wjvv4rv86NZ34Bk/0Wf/ytbxE4gOaU3PR5+bmX+ZVv/gYrgxsIZKdDS9ZNtrHcv3efDz/+mNP5PDVibcqpk6JzJq6brvjvnAsDIMLZFxljoGmSJtQ7h3c+1dDOYZuGxXzOYjZlMZ9Ttw3zpsI5i1IqxRd5z5nOuXtV7wOIgFRZFzsFS4mYOAdGYjctfJwSed68Bx6FWXvvn0idPD8weBL4+ous0/Ov+fh07vGDP3Y6vrO1sKT9fo6KKXkcLC5dONG6q/fSQkrZby4REegmjlJ0IPBvzohkB/itbic18L/GGH9bCPEW8L8LIf474DbwX/7UVxLpBJVOG5IY0ujQmJRBJrrA5Rgszntmi4pFY5OFvUidba0MRWbA22QYYjwqF7jWUVUVbVOjkQgHiJY8AtaifCAblOiVPlFGFs2CyloIhnpmOZlXTBZTPI6yMIyKEtu0tE2Fb1uIpLBma7Ft6lx4H/BSMlOK1lpyNCt5QZkZMilw3nUXsEjrLM76pO9BkBsNUhK8pzfI6fdK8twwm01Z1BXO+7SIvO+6GuBEROUamSt8IxBtRESHjBHpPUYqXJejk2UpDBEBRinyLHWOpEiHwvnxMnz+4E85bIBIAadL3vP5Kdvy4qZ1orws8zIEKWxZS43IDcSANgKtBCF4giiZNR5Uast4HxHKJAOPEJEiiVNDjMyallnbouZT9PExWgmKImdl0KcsCooioyyT4QoKjIkIUeKcO5vE1dWCvYdjiqKgLAtOT1qUUMkCWMSUc+e6bpBL4FMJ0v4KgVQamQmkVkzGjugEWZ5jbTppTmdTvHfpZG8U6DTfijEilCTEiJaSzGiwgqauIUgKU9Iv+zjb0oSK4DwyCDY21slNzsP7D7lw8SLHbsr+pKaUAiMzJpMFbVMjhCLPS7TOuow4x2wyR3GC1pphMSAzvS5cs5ustS4ZYhDI8pwmiBTaHQMEi3QVTeOIUiMF2LZJjw8SHSTSefr9gliUmBAZKo1UGttOqccznO2Eza0DofAx5cWN6wUiE5hihBSWqDRCCfr9PhcvXeDTOzdRohMjC0lZDsiLPkJrsqxEqTSJCzGmjDwRmVcLYsiQVmEs9AWsDzKKgSbYAafjAcdzS11BYTI+vrvPbDpDPHeNjV6Jcg1KWwoduLazTj5YZ3004L0f/YiD2ZjVlSF3bn3G1npyflLSsHSTEiJ1G1tru7IoIhVkuaYoDMEanKuo6pr+cEA+rgkkynFq/GTphK00jbU0rsV5i7Cp4TEoC4JtCK1jPj5BBovRSQvi25a6qlnJh6gsJzhLlmVUdo6wjrXhNsOyRsgem6OMijFXLlzkcGeL8Z2beCxV1XL37m1qIfnRW38E7YLrr3yJZ7/8K1x5cZVeplCdXTtkqZ7m7x9uU8tOqIC1UZ+f+8IXeOqpS6wMe8nBuHHcvfOQldUV9t0YLx2He/tsD1folZaDkz3uHO3y8bcf8uyzm/zjX/kym+trRBzV0Qm33/5j6uNd4u6Q7MIM1X/ID966z9MvvoFrax5+9B5H9/fZ2M6xWzPyouTGs5s899w6n92x/D/feZty2OMLb/wTaCV7xx9y5977rG2Cyg8prxhO70/59L7l04/3OBCSfPwd1reep6l2+Xj8XSZXv8gLL1yB6pB2ZUo22EEKixjvseYMJh5SOMXo0suoOCNGTww5UhYgGqLQEJOLnseQUp8aYmgQVMTQ4MMJk/ldhhs1Pq4Ca0ixk4QBUuKRqFgiguXTj37Ed976bZd+MUcAACAASURBVH7uzRtsvPomybIoTXKXgCKxUlKTV3Y8zEg8c7tbskKAzxWAf5Vtef3z3jOfzwEYDAZPpC49qXn5s9iEAK1ksnZG8cjNAc66+N1UyVsPPnL96lVW+z1CNEkOIQTOJXZHr0yNnyf9rX9hl0shKFdXeeULX+DB/fvoLLl8Rpm0uuXOJXpsAck7IDEQRDfgU4hphb19gjx0MI/sHh1ycXVA9f1bLG5NePeHt/ngs112Jw0zJFZktH1NOD1FURBsjSFQKE2lMkw37Q9BoiMoF9GBLoM2fU6xbnBtxEiNCJHG1gQpMDriY3KX7mU5RtNdOxVGl2gDpsi6JphgOBiQGUVR5gglyEtDlhvKfol0NbO8ZBEU44nn4KjmYOrJTYETkkYZ9CDnl371l/nqP/w6UgkuXb/AV7/0NY4eLvjo409w1QOuXNnhN379v+KFZ19LhiMieQ90+gaOdw/49ONPOBmf4mIyp5MiInyiOhKXlDx/BpSWYMKfBZD7xEZqGhazObNpAmjNItEf0zCjTetKCCIeSSQ4mwC4FCzNPaCr3UVyHafzPRBxmQX3iEK9jHc5A1tLF0jvz/bz8WPxPGUSOLu/XNvn69Tl88//fnn/SfXscqL1uB7u/PPSAx57jbPMtQRItTgP2BINNUZStrFIoEx19XAISYO7dIoM4py0qGMN/LQL6l8ZtMUYPwNef8Lvj4Bv/iVfDEhuMVKqZLCCwGQZvbJE5Aon6BamoG1tyoYKMRk8CDAyWbkblRZxgaZUGoqcZplXpA251tjWMqkWBJ+mFtY2yDmoXOGlpPEwns44OJ7R2pDMErISoyMheIxKmoe6qjv6pSEvMrLMEEKeKJtRsKhavE2W6FFnKXMiJJvUGCNt21I1NdYGdOhs0pVCKEUk0PiItBbrU5xAWZbUzhG0QYeI8566tYle5z0+eFqXMuNMbvCxJQQIEoo8IwSb3qszfdEqmY4sqYxL0fP5Dsb5g/9sfN2JVZedkOUBveTynz1OerSKQApiVCoZwgjAaEVmJFKmz8GH5PQUYrJ7TVlvgJLduD1dnKQUBBRRprAB5wPReqZ1zelslrRvRjMarbGxtpbCXkWkV5aUZcHKsIezlmqxSIHFBKrFmOnkGCVzBr0hWii0UJRZ0r4JBSJ4nG+RKJTKkVogjCIXmrLXxzYOJTVZVtDrlWilqOumW8DJoVMriZAQgiMIhfMCo3IQGh/g+HhMpjSrqyMyI7q8lWQtnmmFCIHFbEpTj8h6Bikl3juiiJRlSXRtIikFjw8RoSK5MZTlgIDm8HhK8A+ZHJ1w/dImg9Kkk3droaNeqCyjQZHlBRe3dvDzMY2KIBQ2SlR0eN8mgxhSsLoSicARiRgBPa2J3tGPkrWiROUjrl5/jntHR8TDE6SAxXyOFIGBLmgiOCRepUZGlmXcuH6De/cfsP9wF90FgwopyIsCYTKk7v7+mEzGjDHkvR6r63B8OGMyWxCCZS1TlFrRk5GNQcm1i1vok4p7h2OOJzWxlwDsh7f2ePH6NdZ6BTp3GAmXL63igyHPDdWN65RHh+ih4s79u3zyg7uMSo1Wjqd2ttAmT4ZJMaBjOpZToRmIwaOJKBkJrevo2gWRY1wAa/3ZBF1IRes987pGG4NqNd5Z0IaN0QpXL2xx7/4D6vExs6M9iIpQ5KwNVun3egTXEBDJFVkbsmyIyjRS9TF6SGMDtm4ZrQxofcVrLz3Ph9WYO3s3ITr+7Hvvsnn5Kv/6v/h16pMDtp75Ag93D2n7u7z0zCV8SLpQRNL5/mwsF/72bIJz9G8ERaZ54/UXuq57xDvHdDzh5PiE48MxNpuwu/eA9/7kAya7x4hQ09pjXn7tWa71euy/e5c/XSh+/stPka/C6STj4+M+F669TDM85eHpA/zRAW/97v+LHX/E3uk+D27dQ80lg/U+X/21jHw45P73P2LjqS/w9De+iNkc8e63fp+jT+9w++49ZmLCRzcP2V4/5fqzl/HBcf3qRW4MN3jh+ae4uXuMaAr6Pc07+3dpb1f8szd/k2wsiHZMe/AjGF4iW13n4M59zOqIO+//EX6xxWjzBs7vIzKPLDch6q7LLPEiFWEejYqJ9i5FmfQBUjAwfZ698TIqLwBN6yS5SWDtzBWRIumFpgvqacOlnRfROplGPDLD6GZDHU6Rib+F6OjESWN0DoxEOP/jX2cLIVDXNb1e72dmMPIX39LE6M9TFpeALanjQaCU5MqFi2xtryB8g1ACSYYXkdPpnLapGfZGCVTLguWnH4LnJ/m2LpUN4exRAlCUa1tc760iTbp+IZKZgiVpOQ066YBI9EiJBtfgqgpGhotvPgd3c+r9gpN5zWfHJ9yc3+bjBxPmLmd3GpjaFqFrgjKMfYNQA4g1GtgQBSu9ATumR7ARozNyJShUpDSCaAw6KzBZQMuWXmHIlCaXPVRPgfFkOWSZosgz8rzEmJI8KzGZBhHQWcJKiZ4bKPIMokdKkjW9SIQDT8XidIKPAWcNheqzIlapizVavWDqW6pCcvXaDl/5xi+wvbYFNiAzyctvXGa49kvsHbxAUI5L1y5x4+WXMUUyMBJnPLqIrWtufvwptz77jNY11LbFhpAM4oLHuq5ZGAO+bc8a6irL8F2juqoWzOcV89mM+WzOfD6lqRui98lqPgQ67zhCF0h93uhHihSkHWMCHrJzbwwxmZkQA1GIR0Z3MXbW+gmkidjVhx2yOV9vAp8DaI+DrSfVpOcnZE+axC0f8yQwdn49Pz6xW+7LkmIrO2ZUjPFsMrbMLqZr8i07fUrJBNhiGo5oJT9/floSu6U8i485/35/00YkP7MtodzEiZdSortxtNHJddFoQRTJMB/VEFqbLJaFwrYttfe08wVapOnNzEZoUxZCtB6jJEOdY1SGLgzaBaQWKKVofKCaV1BHok7Ok/v7pxwcTlgdbbDSG4BMuVJN0yJC2pe8308GKFqnyYRL7nihtUTrUUJhpEZ3IbyhdQitzpwyq7pi7/CQ+axhtb/B5sY606rhZFIzKHLWVge0PtA0LZmWOGeZLJK7kclylFFkKFAGEyPzqk55bR0NT0tBVAIXQGpFLCSQdEpKSbIs2eDHkAJhEgh4ZDZyRqMQJF7vGUf389zixxda+j67RaoitrV4G9FKo6QG/NnZyGQKbXLq0AARH5LjoeimqERBjOmqIRBJRpPaNokrHwGpETGFhTdtjUAwryxHx2MMkUG/h97OMSbHaEmeFanADS5NtJqaum5oGs9sNsW1lp7OMWvrFCYjdNkhSqWuUxT+jMYbzwlJl3z3TKewTikFzrZImXV2953INEp88OA7G17AhUjVuhTcbj1aRoQxrPR7bG1t4aKnrRdsXdjC5AqhJZsbG4wXBzgXUCIBxtZZfCdmldoQCJwuKoRwRBROaO4fnFDXc9ZXSsoyRxgDJgOtsDLSkrqWOgRKrdGDkt4UpBZE6fEiBXTGmCwpPMnkBAIutDT1AtqWMldsDdf4k29/wg9uHbK2s0nbtGeB2oRAkCnwNIaIFQqpIjEKekXJyqDPiVEpQ7CjYgTv0VkSNT9qECTg5iMcTSZUjScve9iQCpa6GlCWGhkjhVFsra2g8pKjozGT41N6aoVFY6l9JBQFPnc0zZxSeNq6hagY9ksuqk2m/oRSRubeMZksuHWnZqglF7e2iCQDFx0jPsYu7cUnGoRryJRgNFrB24ZF1RBCoG0tQQmMyZAhNaxOJlNu3brJ5tqIzdUVqvkMGT2r/ZKXnrlBJiIPdw+oDvfoD9Yh92yu9Bn2SibjU1oXU6Fh8qQRlJ5Z2xC0om0bJrMTis0RRkWuXr3M4uiAvfkprpozd5IVL7h29Rrf/ewz/uB/+z/YH77NP/pX/zWXr2yynQHCn7nC/f3b4hlAk6KjRwnRTSg8SkvKXsHVK5e5fWeff/+7b0Em2Zsu+OTj+1ze2ObqtRfYvPQir7z8cxQq48HdQz74+Bb7t97iK7/0b/ja1/4lZXbE4Qf/lt/7nd/m6LTikujxzh/+e9aeucKLb97gStbj7W+9x//3b/8XxLCHmy34V//mf+Dyl74KtOjqPre+8wETNWb3wREHo8s80Hc4+M4f4cSMnc0X2RmusvnlAf35be68t8eVp9Y5La6wsTlktPkcfiqpDk9QylEd3WO2t8+f/t4fceGll5metLz2Up/J7reResTg8ghfPySIY6S6nnRqMiKEI4sRKSKxXmAXp8yPH9JfW8HoDC0zsBmLRY3OJeiUd7eEHcl9UtLLJKNBj5X+DiKaVPBIB6KzqY+iO+csTRLadBwSgBwpzbnvsDO5+hlsSinW19fPCqm/1FTqZ7J1yOAMvNJpZEL3t6cMQK0NIrQsTvcYbg3Ae4TKiRisb1hUp3ivEdGBzM6O6+X25/6u+OjOUskYxaPvQRDRvQERTRTq3JQgUWBjXM5GBQ6B7rL6opL0Xnya/LWriOMjevdu8eCtb/PJez9kXAmKrGDmA+M458A19ERBGUWXR1uwogW5loxUnybXFFHgpCZSI5Xl0pVNiv46RblJfzCgKEHKhjJTyAAZGWaYUccFMTZoBYJAyhxMzB6Ta1xscaFJRhHR09ZVJy2IhGBpmgo6d0alDYXxSDwGgXKRPCiGWUGdBWoEw51t/vm/+Be89sLrCKlxWYM0GYMb67z89AYvi0gULlGNpcbh8STWj/EgXOD04JC7t24yn01xrsXahtZ7ZCSZ7nlHG32XzZYMMKxrmE1nTGeTzuFxQXAeZ8Oj/DJBJ+FIHgopRjf9fkn7Wx4SS5lLAm5L6/tIjApEqh3lckK3LB6XlMAQHh1W52iR54/Bzx3550DakzRu5593Hrw9DuDgkV7sSc97fA2cn7zFmKipumOPhRDwISD1Ul97nmq83IflhDD9TnROrnRgNYiYzAVlIMYO+HbPiSHVwD9p+1sB2qSUGKMhVF1orEAiE31PS4QWiEyB0jhvEytequTqEwVlniNiwFuLEhGjFBaoY0hZUD4ibcCGltqBigHXekpjKE2G9JFSCIphSd4rybMZ86ljLOZkQqZcvNalojsrEMKgTTJhWDQ1i+mCummom5QNZq2jtZ62AeEjWmuiDmAC9WLB+BRs7CfNQoTaeqqDI6bzmo31VfplwaxtaQ+PWV8dMih7SAIaQa9UtD7gbMA6l3qRsvMWiomCaIxBSUkt5qnQbSPWJwecROGSnb1rKoijFJ35RWBpDgKPxtYxnYE7TZo4y6GBzy+Y5UG+fG2hBJE2cemFxIVAFJ4iU12HNCTXxrJP0DWz1jNfLGjrBtm59UjROZCJdBGIIYV+h5g425HuBCN1twASj9j7yGSyQEtJVXnaFgb9HivDkn6ZMeiX9Ioe4AmdbrFqW+bTBS2CPMs7moRKk1iZ3jdGn6armUbqgO32J7lIBZq6xbUOfJpoIiDPMqSIBJuAKXThilojVQIvUUq8lFghmbcNWoLQCt0rmNQzom0xytAf9BBGMDk9xVYtT127yv7JlL2DY7RITYqwZNFIkS4ARhOCxLaOMJ0hbUtvkHH/6ATbVgxGI8rhCHKNjQGLwLctzXjCdqFZXRlQHjuicgQCjW3OTsoBsESsCEgNUSVdloweFQS2DURVostVRuvbVG1DdHW6ELY180VkYR3KKLw2WFfjraWp60SxCAGco8gMT127QgDGiwVCgPMehOloUmB94Pa9B5weV1y5cpWLOxu4puV4PGe0vpo6Zb7GSMXaICMTklJCqTUboxEmy6hFih0QRiGETN1YMlZjxJQ5PRfASsJ8yv64ZT5f8Mmnn1IvFtR5BiKjsx4iCNmdwAP4Fhk8/bIgeI9tPUppppMpI1UgpSZ1JCQ+CO7cecCDO3d58ZlnuHxhO+X0OMf2aJWN119n9+IeW5d3WH3qKlO1xvZoSKENE5GaHEoZospRuUBSEzJBLAVPXb2OCA1WR0ZrfXQQXH7qaW6PJ9z8+Cbew+17u3z04UdkEmzT8nBxyKe37/LOD3/IL758mUGRnX338u8bN5J0npEido2ilAUUgk9/q4gURWr+XfSKN772Czyc7vLg5B5rT4/Y3N5m1rTcOphz8NZ7/PyXXubCi5t863e+x61P7vL+zf+Zre2X+cKVksPb32Xx8JR+MWLvsGanv85/9vVfon9jDbzj9GDCH/zet2BYovI1DsbHFPu7rA5Kxvc+5tJmwXb5PLI9Ze/4kLn2zI8/ZeWl65Bf487DUwYvrKCvjnhlfYt+2GRRP83rX/kH5Bcu4aeGOF7F6W20cMzu3OTi6pDf+a3f44tf/zpr10boMkeqKxzufUaxWtNf20J6ncBWx5YPfo4QHr/Yozm6z93332ZrY5Wd558lxoIQDLGao/sKQQHkKBGIMRXK0HDxYg8pniHTGoRLoe5dwO/SXEGikCIkl1sViL4mtA0iW0OY1Z/4nf5VtuV1TWv9UzQvf1PbchJwnhZ5tnekSVvKTJMhMp8c8sH777Gxs8LqhcvsXHsNJyST+SnT04dMxg1tVbGx/TzGJI1qapY/9rKf+2H5iyUEW07/gA6w2Qi6A+5SqDNg2fFYCUgskSyCyDRqLYPVgrghEduaFy6tsvPaG3zvd7/Lu299SO5JzcFSULcBWzeorEh1h/PoomDmBPdnY+KsZbAFXkUwlosXLrBzbZUoykRd0y41e32TarFoqWzD8XyCUpG1lR5aKUKA09NjnIfhaIDHgnQI5znYPeToYI8LO5uURcZiPks6+QjWBoregGFeIILF+gVbly5SXN0kHB0jqgU3VtZ5/Su/yH/7n/8GlzZ2iDisVMnYRKpUl/llfaUIRGxM7AtiqiEWkyl3b95k/+FDFrMpta07N07AemLb4mzDeD5lOp3Qti11U7FYzKnrqmNGJTaWRKDQxEBihIglQCJNykjXdqk6cCbkI3B2dmw8mow9AjsyrdnlsRo7vVYHYELsQquX9MYngKbz4OrHTb+W/3e2J49N654ExB6/D5zRRYFHJiFd/fo5kNcBtzMgxzlDk+6j+DwtM57dio7JFtXy83tUL6e6L9HAk1dQArje/6cP1/5Lb0IIMm26CZToBP22u1CmQjx2SRXaJL2MyVKR5kMS7CtpMFqlU0uEzIEJ6QOWhaD1Fo/npLL4aHF4Yt1iXMUgKAZCI40my3tkMqOX98iVQbhA3oURuggNitZHFvMFVd0wWcyZzRfM5gsWdVrIdAd5FnNypfEinRhNltO2NaeTKQvXovsZZa8PsqCa2iTsreZsrI9YWx0SZeT4dEzwA/q9gojEaE2eaVwIVE0akfsQqTtNS1s1ZCamhPsIZdlDZ9A6gfUCpKNtu9Dlsy8gBZi2tgsF7eiP1ruzaUbSqSXAx7ljSsiUIZZoFglcSZU0b1IHqrbFuqQHa9qaMi/wYZlHFZAyCbfzLGBMytDyzhFVmrxGmSYsIqb3iCES3LLbIx8JV104o8T40JnaCI0PgoXz1Idjjo4nlLmmX2asj4ZsrK2yPhpiVInzUPY0WhnMuiYXhmhTjsmSx+27/CsRHIGIMhmhdfgQsC5lpASXMrQSgUgkIxkp0d10SRBTlw6Q+hGnOxKpnaMVkRaw3pErwdy2LI7HBNdSZDlrcYSuNcenJxxOKvTAM5nWyZk0y9D9PtooqqZFCJDGYGMALRAioxVQFBkz51A+ULWWULeoQaKCeJGoij2tyXslo1xSlhlFViOlw0ZPE3zio4vkBhWUwCkwQhJkxEdPpjW6yIm2IQrD9vYlXnjxZebNnPH+PUJw1PMJsdHMN9bIBiXOp8bLZDxlYRta2yJI+sIs01y5dgWvJDfv3E0T+M4+WEiB0vrM9XNej7lz/yFSwvr6iKgKhOmhA0hRYbRBKU2wlmJ9xPZohc3RgExFmuCYTma42TED4VgpVun3DBu9AWXbUDSOetqwudKnCZHjySFS96mtZVw3SDME73De4T0Ibwm2RQuQeZbopMvjQVjmTUtRbiJkBlV7dhEMCA6Oj/FNQz8zbK6tJntlo7iwfZGL21t4qVlZW0H7nNJEdDcBDl1wuWsdoleipCbmiqCh9lUyuqgXNM4DGeVwxLUbz3N00jA/PsL6CX/4R3/ML7z+Gv/kn/1LPvntt/nuO9/j/u0fsvab/5wvv/4qqK7HGB8rJv8egLhHF+QA5y7ey6lutaiZT2s219cYHY55++0/QM13GWYHjPfuUo8DWbtLY/f50ds122sX2d+b0mjLdHyXL3796/ziN9/gvfcecK/dZah3uNuMWVkd8OEff5/Ngwu8f1Lx2lNv8KVXDYP1Da6++jKnEqaHFUN9gWde+zrTTw/58NNv8/oLa+x7xWT/FLX2NC9+/Z/y9IuvM715B1XW2OOaT77zJzx/ucTVhmyjhCwSVwyD4XNIfwVcS98oeqrlw08WvPDKdczwIkpvE9UhXh0i1AWUvADCEqNJvHuZjK4IM0Q8YdBbsD1qGW3M8c17LB4eMPcwunAV6Stw1xB6PVHqMAh6IOesb2tG65dpmjGy7BNiappI0gRNSpEAW7AgLOCop0fMpzOGW30y82O+zL/mtpyw/WSwFs+BHfG5m889hsce8xM38eT7cfljVxwCdG59VT3j7v3b3L03Y+PKNU6rlgtPvY61NXfvfkrm9hifHPHSq30uXr5OIjer7hoan/C+T9rVVMSndSHOivBEV00xOh0R5mwPferQJlr4oE99c4/mk4/xe/vEaUXUPWSVsd5bZ3W4w97RBHwfQpvofa1jpAyNjRgfqWTFx+NTdpWjr1JDO6qIjXNO5/ssFqdYn1EUBatrJUY7XDuh0BopCw72xty8v09uFIvNEVol74P9vSMQinJQMq0midHhJW3VcnpywsHDY4KzeO/o94dIpalqy/q6ZOXKGkoKgqr5h//0F1n/pa9y+OAjBspyeesSl597ne21y8goaUlxGBKBCnRB4gE6Rg9ak6GIMZm+Bddw89NPePfddzg42OV0PqWRkaAldd0yO51QnUyZVjNOqxmLqsLaNnkFhM6tNiZ9dQgehKRTniVzvK7ulh2tOMqOZSUSYyR515w7EFRiIkCijsYOpcfY6efgLOs+cg5oSZnquCVd8RxNcQmSlnKbdFj9eZ3a45EAjwO25eN+ksHI+Unak9b1ErQta99HxnrdftFFb6lHRfSZe2T3eYRzchgfE3tDSkmUXYC8lClSi6XJUvd8If5u0CMFJNMMQAuBA6xtiRLqYNFCd1QvwMdU2PvkDJms49MJQ0iBjN0ZQyUnqxhJphYBQFLmfXwMtMHig0NKQeM9jW04PjhEnpwmy/mqJpoAJtJGj5IK5xRVO6NuWha1pa4tVe1xLoI3iJD2ycdk0OFFi1WeRVMzbWoO5tPOtVAzzErUIoGtXCm211Zw/YLT8SlH+7v4tmZtbY08zzmuHLPQQozotqWvJd1cH6mTaYQKgvW1FWyZ4+oG27a0QTCtLM4HdJYn18lMpvGuVEmXJcQZv1iKSPDJpdK6NKLNMpO46jZ9XkoatE6vIZaTb7kcoQeKLKcsc6QMid8bM6wPoDVFL1nGuxgINlnoLuaWGBoaB60NOCc685Lk4BZsOCNoSNL7SZliGEI6I6SD6MeM0KOIBBGTu2CM2MZT2YrJvGF374SVfs7a6pDBap+836Pf7xOcgwi9skSRjDcW1RwfA6rTV3kPvhVktiXXkUalk6+ULVYGJk1A2KRlQkSUMhgtu8mmxDZ1N8mMOFujRMCGyKKuqOo+IjimTYVWyawkCkNtI8EqfG2ZLFoW3rEz6HF1tM7tO/dSN42IyHKEEDTeIUVARZEWukqAMzMZ83qBsw1KGbARuWhQWidgLGQyYcgVW6WiLaE0M7SY0yiHFxqlQMsIMaCkIXiR9H4ktyShNFPvOG1qMpMMkdfWVjFrA8RBcuha3d6kcJaRCkQ3h8UEPHglqF1L7SwalXiPUiKLjGxQJg2dCugsgq8xhUbIREUu8h6qKAkqdYFlf0Ct04GaeU89CzxojlndWaHIDMZL1vuKzZVAPR8zbhfQy/jos1Pu33yfS9sXeeWFl7i4s00/L8D1GalTqqwiro3IRGBjfRVTltz78CNEWbOdpXw5rKMXBcam80KWF3gkIisxtJh4CrHlaNEgs5zIDO3nZDpiCoNTGceTiu+98wO+8NqL9EZDIMOKSE8XxFDDZB+pVtDrmkia/E58gVEK0zq0FziRoU2fTE7Ze3DCdO65cPlp7ty5SxtAFiXPPHWd070DblaHaODjw0NGp5avf/F5jHgPN50z2rzK/dsTmhc0RRnQwhODOJt0xtjFiXQi67+7AK7rpqrE40uXlGTkJIVkPj3lz779Nlor/uDb/4HxuKavrzK3Cw4ffIKZRcLxlIXdQ8fAQ3mLBZbKFOhii3fe/UOeu3BKf6XHxYvXuPXOB7z5/HW++I2v8vCD96hnDc888wrPvfYaV69dQ/P/c/dmP5Yk6ZXfzzZf7hpL7nvW2lVdzW6yyWY3lxlyQI4ECBgC86IHAfO3CRD0JgHCvEgcdnNENrdW711bVtaSe2ZEZCx3d3dzW/Rgfm9EJrMoUkMSTToQqKyIu/i9bm5m5zvnO6fH+L13mcxqyrIkzxpibbC5Q5uAX8E3b77FXz+bcfGrv8WNb/wajZ5SvHcN5HnaO0Oe2b/g9TEMynOghlhZ0OQFGkEZCmgq2lZQrxr+6D/9J/qv3URm50ENCOyRlSWGEdGCXdzB2wyZ75JvbYMogZYQNYsTz/b1b9DMPyOPDtPrE/Zh8aCgOZqQX2vJM4mrM7Lxuc7UyELw3PvgDkHc4Pav3cArgxEa0KklgmRugJBEpYlEslEP3Qdl+rzQXSnOApA1oDozGP+e4/Lvx6qlOTDJNnWKTlGKmEhCNuZdAF3/2Yt9an/nGbzEfHWv4mPaAEvTSdYkMTYsfU629Rq5P4Am8vFP/oys7FHbS3z4k/v89eFdhKq4ePEqV67uQswROHhrUwAAIABJREFUPgOpUx80MbVRIBBp5Ur7KVKH4dotNJ162vCqCCrpNemCHtJeXICXXdbr5swNotwh9L9C+9jhH8+w959wMv+Cz45OeO4y9oRlrjxlLtn2Ihm0hT7SZsi4REbLqoHGN1gV8e0Q6Us0S0Rw7D/Z5/CowmQFZa/P0wdgDDTVcy5dGHP5/C3aRc3q2THZ9ogv9j5l7dzb7/WZzWf4VZ+6rhACsiJnPBrBaCuFR3tJlAaTD5FKIVTLcDSgyHIiGVt6xtvXDe/c+BbC/xYmemQUCFmC8ARcd+09kuS4mCh8RRTZqeVicIhQIaJkvn/EnV/8nI8++YDj2YRZveJ4PqOqGxbzimbZ4OoWFxq8THvesAYYMqk30lASSGXS+tkpmMKaPUvKV2JXUH+xJyycuRdS9l2MHVaPnVs46flhbYjC6bpwhnhj/Ve6YOn0+q+28t+AOE6B1tq8ZN3Cc9ZPYd1/tn4unAK/s8Dw5eNlF8s1WNucB2kcb8ApSdkFIDq8Ibr1LoY03+jOhCTEZDIoSLnF6yDttaIQkZjW6OkMXtQLgstXHb8UoC2GgLMNrst/ECIitUQqhY+B4BxSJ0mfVioBtuC7XiPRmVt0wZtdBUFoBTomp8QQUVGiO3ChCegQ8Q4inqgVjjUiljRtw8rWCKPwOCpbIZC0tsFHSwwpZ6bIMpQU5B7yPJLXNdYlMxDfpZtLIfAuaY2rlUVUaQz3llWnkfUM8ww9dgz7Ay5eOM90NmM6nbJYLrl89Sp5UbBarBgOBgyHQ4yzBCKOgLWWVVOnARkEkkCZGQpjCK2klQHRfU/WWpqm6W7CVPFfa4tb6/Dh1MXHaL2plMUOoNH9v9YG3VX7pBSp2ioVMfouB80mkxipiUFQ5L3kuCQFbdvQVFWS1YRkrz5f1Mxty8l0jvfpeSmYlTS5dKBcCgEyEugkgOvBHXklYAMwWqYIkrPVPm1QgA+O+XyJrRuOp1NUbtgajejlOZlWLBcLFMkx0uQZw3KYsrYqi2sdREVfkECs7Pono8eGJLV0TYMIkbquKMsSnWmqqkpSygjW2g4AprGsTJ6u53JJaQyZTu6hWZ7jQuBkNuNosgQZWVrLyXRK5e6R5z0a6zoZQyTLDYUpCVVF2zqMydBSIYRHAKvlHKkkOiuxbct8MmOyqNnp99gqezjraJwlxAaV9ehdGNJ7siTTgSW+A9YRKXwCy13VUAqV5MiAi4GlbbExoiRkQmC0ImZJKmuUZmvYZ+gdW7mhDhUmOjJdMNwasQg2TY5CEFrHbDbneDYliBQ6G4MjBoXw3fiUGiM0l3Yv8HzSYJ2jMDllWdAbZBBBtp5BMaRfGA5nJ+zKklKUtE2Flprt8YBVC09PZtiQUaGYtPAXP/05N65c4t3XbzPMNVnZw5g5vQBXt7fY3hqxaBuEbfByxdNHD9kajRnujGgjEDWuDRAEPkBjI771FCJy+dx5srzk6cEhzWpJqXbTgq0EvcEQVadxgVQ0RHAti6ZmO+/j25ZMBKaHz1j5Gdd7t5Gij8MQnUe4lugFQetkZS1zFBUuKM5fuAbkPD84oMhb6uWC27euE+0Jvj7ks2XFJ8+OOP94n1/71W/w3q/8Kr/x619nNZ9iG0dZuq4XNk+LfwjJIavrc/hnVZH9ox4vgs20Z+2k2koSWhgNx0gp+JP/8j2eT5f4uM21a9eYfv5DXA0qZGSNolADVlXDymt2b99k2S4ptm/zo59+Rq/6nKIIfPj+fc7lOdrPCfNjTLbFyd4B1Z2f8uPDD/nmt/6QJmREjgiLimr+nPzCina2wPoTvv+z7/Nr3/gmdT3njZsFmBOkt+hMEXQfH7e4ufse88tfJdu6TKsMQUkkbXIfJUJbEWdLDu7N+PzRgn/7O1eY2YZMNRRmQHBjnLcQtom2z/FjR9M2XHn9KsScEBRSZEQxZHjhqwhdYkPJh+//OUop3nr331P0brE8ekp0S1wradtINvDoLLE87qDC1AXFzjVUXMuFE2hbx0usHVkRHbBQKlmwQyfrWl/B//aCwT9YBhlScU4KyeR4StkvyfOz1v9/90bslccrnpKKlmmzlz6pROJwoWHVSILcImOFXTXsPb3Pra/sY7nG4f6SJw8OKIctQrRAixBJ2eLqNN+vqiX9YZ+iHCDEelOdonMiHRYWgnRN0maeTokSSOcUfSpw09Veg0iAT4q0HUXnZJffo1e+hqsuc/eT/4P7z6Y8nTlmOGzjyPCMZOp1XHiHEhnORxwVUkMhNT3n6KtIKftkMktn6AWrxYrFfIIuBZUzTI5qMqPZHuds7ezS642Rccb1S2POnd/hzif7SA29fsl4nBQYJosMegUmy4lCY/KcrMi51h8msykfUuSUUQgRKfOc0LTY2KOUqQgRdESqEnApIsVFooQoNCEmYBQ3LEsqKieliwBalJojxYrjpyf84Psf8H/98Z9w/8lDJqsZ82rFqqlTu0MLkrTxD1gCLYkNTcVt2ckWEetCmkp7PsGG6VoPNiFEp3w9BUEhRJRKMOFU8tcxcZEk/e8A2povWoP2dFt073G2/pCI2RfA4Ms9aa+8HV5i0NL9kB67VlydBW0vM3VnH//yc9culWdf++zjYjxl09byx3W70Ob9OHV/FB3duHmp7ruju4VkhBAFxIRXiHLDXv5/LZ6/HKCtGyTpgvs0bjqXxdxkRAmt9TjhsCG5FgbvO012Z6UZwYdT7WmI0IZA8N0XTkB6UNF37INDyeQ+FWJybaPrZwmAbltCTHp65/3G9SbPc4zO8EHgg8C51JOmW4fREucdLpgEfkSqzDrvaFvHqqqw1tI6l/LhlE6DyrXEtsG2geFoxNb2LlIbptMpTx4/5tz58+zu7hKjp2lDYuu0QoZEJSsEIQYam8CEJwUnu5ghpUKTFrm1XndD926mDYGSqQK4qTiEkEBcTHRxWRSpshQTGwoC7x2udamyrmIHWiJN0xCcTyHoqsucq1e0bduFhrdIIamlRHTy1qW1LGxL8KlaD3SNsl0j+tmNFKErf5/ayZ7+cVPn2SzmoqvqCCEojKbIM2QM9IseVy+cZ3d7TN4rULlBRBAxIGPEWct8ckLT1CzrFcs6ORaKkBqro/fUwtImu1OqqmJVNzgfkEpSlCXWW6rWJeY1JNmp6qh37xxKpQbXFJwOrvUEH8h6mn6WrIYXJ8e4EPG2QRpDMejhswKC4PjoGKUWlOUwMTmC7pqAFrJzdUoTWG4KesMhs+kEYzT9/oDZbE7bLljMFwyR6G2VQnGlwOUK11NkI4PuJdMagYagEHTBmevx0k00LnjyPE/9fK5N8ha1tsTvCivdfWWkoWcyCp0hQ2BQlsxbzaDXR0STcmFiJDrPbDbFPH+O1CZNjkGCT05NoQ1IIpnUfOWNN2nJePrsGaVSKbPQBlyjMcBsPkWMCsZb2zC31HWDMn0q67ChQpmC3fGIo4NDjNHMVzOKrOTu/QdMplPeuH2DQgmsVDi7Yms84Nq5EdPlgts7Q55OK+aNY+4ce9Ex6uUsgsC0LvXD6QzfRoSXyJiy7gqpyYIgixHlU0U01zn9UgKW8XhEluf4EHAx0AZHG1IgcdHvUy4FB/vPyXoZjG91EpSU1RhVYppNZjoGDHplwcnxMVVVUdcVTVUx7PfoDYaMR9uUw4JR7xwfffaIv/rL7/Pmu19HyBapA/fuf8z9zxb8wX/3e/R6Wx1IP40ASdf5Xyxi2xzizH/XopgYk6Q77+e89c5bPHj0jHfKcxw1BaKAr4ev02SR+3f3uWi2uP3VETNb86P3H1AXIzT7/OY3f4c//96f8sH776Nlw2IWOH/9Ar3BFR49mPOzH93BTqZEu8f49T6u7nN0GPmD33+XRx8esLWzotk9INSXGV18DTMc8PnihK1Rxu1bW3y2/5c8/aHg8rtfQW1dQqLZ7u9we+er/Oc//itGb1/k0rZne+sc/fImNJbJo4es5g4zvkpx6YS6EMynJ2xlBsQApS8xHI0wuo8QGRdv/AbkIHQKYxLdJKzKPlJtQZNjhorebsPl2zfIRzeIbU7/2jZRJ0bKiAqyBZE5Iq5QZUZPF1DX4CwiW7NRovvu6fD0GqzQaS/SEUnFXwCkTBXwf7Yj6UCWsznWOn74wx/x1lfe4fZrN05PTqwZtv9fL896FK4LIiFGgu/km1FA1Ng6sPfsmIfPP2Pv6WMOpp9x8+u/R7EDro3EoHDOsX90wts+Aa6DvSOODo85mU4IBN599x16veGpCvMl9LvpX1/3QHWffn2sN8/eeaRa716TOUW6iBLZy4Ccauc2d+0Wd481ts1p2ynCWXpBYKOmipGe8ugQqVxNLRsGJieTGTtCEZSnMBLR7eNiUAx6Q/w2qCJgckkmMwaDEdtbgwTYVJ/zFy+yu11Q9gteC9cZjvrJ1C3PuOQukGU5Qki0zvFonE/tLr3+MBlH0Y3AmKzwtZLMmxkVCpOV1MLQREkmwEWRaAS1dhpMe64QwQUw0AG4VLid1RUhOp7tfc6zR5/xg+//hO9/72d8/sU9rG1Y2VWKEJJ0fg0paJ4YiV3IfRQimb4p3cXliE6rJDbY7Swrtb6up4BJnLmebADNy66LLwOoV42TU9D2UiGlY6deNhv5sjVkHRXwMrB7lXGIeOn3Z2WOm7c/+/iXpJVftn69CmCe/fzrHLqzwHHNNIYQUn9+Z/nPmrmLXdRAXL/uGbOWLzl+OUBbTJ41ykiE7PSfApq6ZrUQqEITlEKZlF8Wgsc5n1LqhUOpLDkmhuRlJ6UCpYg62b8LISB4hE9mIjE4QmiRtCihkVoTo8IHsN7hYkQaQ7A2Vao7ulMLgwSkSL1YKc8ngNBkRuG8x/oUzN1x1N0Az/AByjxjtlhSW4sLPskGfTJLaWtL4wKrxnHhwi7jrR200ZwcnzCfTonBMxwOQSuiV/SKDHxARhhkBUJJKlWzEjVt8FjbsqpWtEESRWIt08bddPEAgeBDVw3o9MhdFS90mlwhk2tOyuGIuLbB2naT0+jdmhlNN0eep9gD69pkNwtI5XE+YH1L6xJVbH1qDlYIgg+0tqUl4ONaynIaNriu33TjHASornK0qZScnQ386U0vpUCpNFEppbtQ0AH9skDEyGjQ49rVS2yNhgTRyQqB0LYoCQrB7s42bWuZTE6YzqdUVUXwMMpKyjJDukC7qvA+gdfQUfiStYRFonVit2IoKfOsi5o43eQaYzBS0DpP9IG2aaAsGIxGaClYTI5ZLuc4H1ImjM8osozcGEJIYCiEgHOOrMi7a+PQQBCyY8ACNlgKo5NZjcyRQlEUJRFFU1maukUJQ3DJtdvisaqlyAVGS7SQGJkhZJ5iLjLTNRl3dr9aIERq6nYhWdsXhUII1/U8ru/3sFHZqAgqRDIhKUxGJ6cnyzKMSb1ZWiQ2fblckOVlVxkD5wTRaKz1CBGQCMb9AZfOnWM5nVJoifAOIw25EWRCs7f3hE8+n3L+5mW+evU1DDWV9digySOMywLtA9cvncc2loPjKftHh+R5jpvMWH76BV9/912uvPYGRw8f0dOCoZEU/YyvXb/ErSuKn3z6gFmz5GQaOZlJjDL0rKduHEWW49o0N7RNchzLhKZQhlIZhA+YzFAWPaxdEWWgLHsIpZIBTBsRWuAJm0rl9atXmX9ywpPPvmD7jR2UT66VykiijgR8ks6GVIzp93u0tmFycsL16zd4+uQJRb9HVvQZj3a4vdvj6GTKyazmi6ePOX/lEs/2R3x8R3Hz1hUef3GH6fN9elf73VzXVSu/RH7yL+1YV4tfYGy6jbI2aS567a3b/E8XL9AzPe7vOz48OmK2o5j1cna/odlpDcVbEtkec/vaZ+wOz/On/8v/zF/91//Mcv8J5WDAcWO5emGL3//tb3P0/Ihf3P85070Jo0bT4zLicMijv37I5HDB4fgymS2wUfLxh3fYe3Sf2+cPkE2Ph48OubF9icn9AybVlId/9cd8rTnkzfcuo3tbyHGLuHqd+Q8jzed3UbfHUNb45RNmDw5oxBZycJHD2SE3fv1t6Pe51B+hyPCuTkW7Xr9j1Vf4zLJa1HhdM9wukPQJ/hBZtuAMZCV5cYk3t34fMkFrBWagiTiizBAuyco8S3yYkHlLPXmKFBO8vUeod1HZZaLpQrxh85MKel2xsVNPrIVXZ43pT6/kP/1oaVvHvc8/56MPP+Zb3/pNbty4xXAwPKUWxMvn8g89r9NvIK6Dd6NM66AD7yLRRSbTBT/9+Uesnr6Pby3BOKomkJNUFSYrCTjuP9njneMJ89lz7n5yn6OTKTF6dne2UvF6s/EUXXF0fcoOET3rrDcRFYTkhixVdxVCoKlqsiytt2v2QW5kqkklhVLIUlG7yOHCEpRnWS/IVUAVA5QsMbZFWU8gMPM1FZZSKAZKMCr7eJmAZxvaLhJOcv3SNa5fUCiZTM7kjQFSa0QeUEYQY8lwbKjnybDs8qXbZIVB6lTgzBAoY1jVNdZDXhRkhcKFiBMRR9ys3ZJkYS+lxOQ5jYo0RrHyUIfUtxxRBKFQCGJMPfFSSAI1QTa0bYNrWqql5d6jI/7sBz/nw0/v88HHn1DbBldb2srSFmNsmHftP5HQtsl9ea256tRQaxd6KXWXDfbymOsY1HVxe110ZV30iKdg/BXg5e9iwc7++2Ug9Ion/C0meS1NPPs+a0nkOj5g3fP1ZVLHzXR9BrSd7V87y9Cd7ZGD0967Vx2v6on7MmZuXeBbOyyL9RwWOgZThES1dQXsZDbTERFIXpWlePb4pQBtIUSqusa1Kbg1Lwx5pqlWS+rVhKgFQSnyMqNXrKv7AiGThXwMXX6EkoSYbPdjcIkNkcmFZxO0SMqdiAhcTI5/UqYKqgtxjbVwLqT9f+xkDyrd5KZrtExWxBIXPNZ5goj4oDCtoHXpppAd3ZlAk6bIEnuwbBqWVUVV25Qz5jt9bu0IrAgRRuMBW6MhcSsynUyYnkyI3sN4QIwK51oykYBPDOlzeb/OOJPIPKevBN4L2i6KoHUeF8IZvXNqKHVdRsfZG0bKTj5JYk+qNUto26TB7ZjRtS2sgKSL951daVddCMInNgnVuc1J7MpjfSB00tUYIcpkOLOmiFPFRG7OZ7NurNHb+mbkRYehdWUpMTypeV1JidYJfGZKkiuJUQIjBVW1RMmI0JrQNGilkaR8MymTFXtRFpT9HuNqh8l0ymKxpFf2GPQzZKvRdUQImz5/lF1cQbopRccmurZFlgWDfp8syxBCMJ1Omc1m2G7cK2UgeNq6wVY1EsGw18NujVEiYNuW0XhEMRpwXFUd45SMPyItMabsvdhNvEoK2pg+PxHq2uKdB+9QGKRsUVJT5pKyV1IvVrgI0UuMMOQyxwhFqGtkY5HW4TwsQ9oeabluXu7qSyLlC7bOg28xZY7yvgPOsQP/3cRE14faNWILkjNYDKIzktGdgYtCxkDrPdWq2mjJbetZrAIjLamtQ5pUwWqDQ+UppFtI0bkyOXTnM172Mo4fHfHo+BB/UvPOlWsUrUKoHAmY4DDWMjKaQZ6RXbqC0hnP9p/jPBS9Ae/f/YJL21vs5BqhZQJZGm5dPIcq+rS24bO9Iybe0QTJqvXU1rKoGgaDfgoRF4DSaFMglca7JC0WUSBj6g00yqBKRZ4VyWlyuWDyfEq/KBldLMCmqAozUlzc2SVbSITzKDzBN2mTnAIZUIrO0MXgfFIMZJlhZ2eHqm6YTCY0LpCXJZfO7RJcw+u3rjCtF3xy92c8e/4YQc23v/k/stPP8a1nNqv48OMP2N7e4c23305FrH8FLBtrwAZpnel+Lbq5J4W5C0a7QwiR8Urwttzl7uI1FrlntNXn8S/uc++7nzNup3zn3Td459e+Qvb4Lh//9Edceu86B49+QTs7hJ0eeydT7nzwESfTCdG1PJ9ZyHZ4sx5zbaQ5f2uLP/3pzxhf2GabPsOLX+Hh/o/56ZPP6V28xPTOxzx8fswb747JXMZnd/cZ3n/KO79xAuaYIBpW+ik3b11gsVejokYNIu3ygHpwxLlrr4Ha4tM/eUg7d+yOvo0anAPZsFo9wOQOEy6jREmIe/j4CNFq5s9r+uYianCJ+clDclWRm3NpAS22u/YCAUbSCpscIwlI6RHkKK4i1Qh4jDSR0eUL2HorFSbX/YSwqVYnvcymhLdh09JvY7cYvdjY/89xNHXNnY8/4eOPPuE73/4t3n7nrW6JekEeAmeLAP/gIwHTNaspOnfle58+ZO/RY772xmVCcFSVxTmJFBkmL5M7sUwSfiUlMi/ZP5zwsw8+ZjptOD6c40NI2WRSdiBgbfLvzyy6EqKGrm0hNe0ForAE37BaeZ7tH3N8dMSzxw959913eePtNxKw7jjRGDyhniPVc8TzYwpXcbM/4zM9ZapqTvySTGcMM41WBT0UdaiogmDiPbWAzAe8SEW/4CPWJZdRLxJ4GW2fY9C/jMkGSF3iXYGQgcE4EFly+HzGcvmcyeII21T40GIyw2A0IAiYLZYEKVjVDYPxNudyTaZ0Z5ARQMuUjhcCRp6CAR/AMcR6TVN7gvcIFQlYfGiR5MQYsbVnMlvx7OSYZ4/v8eDuBzz+4i6PHz/k6cEhj05OmNqWC7duc/u9t9FiwPPHU54+eEiYZMS6oq3mAKw5GSljujwx7Vl152YNbNpdiGcASZRdUYrT8RjjZt+wedhGjngKwl4FfF4YpWckjC88hxeZuZdxyVkW7yzYWv9ucwhxalh45vVi9xkQL4K1l2WSL/+8fHyZPPPl370M4k6BqjgFo2t1QAc0E9sqXti/sn6uXDtNn4K9Lzt+OUBbjFRVjbWJ5i3zjCsXz3Fh3EPQUnubQqUJRFdjncO5mNzZpCEvegiVKguJnenkj9YhhEoNu0JuMszWTpQRhYwCE9KEIiIIoREydDbDySBFwCZEWIpOetF94SHCmvlGpNBopWUyVWHdSCmR2mBMOsesbpBKgahobEt0Ae8jwQZCbPF+Sesc3nm2RgPOnzcc7O8xmUyIBGyZ0zcZea9k3B+ghaJuapR32KZiXlUsG4uMa1vZVDGLQhI9tNaitOnOIcnP1sAoVQLWgof0PXiX3PDo5JXBJ+2+EHTfSdd7RHqskInZk0pSZBk6y7Cto10uqWyDCyE5BXYxBJA2R5vJYX1DiVPmDTqqPYZNH3eSTYrNuW/Aw1pL04EYrSWZMSk4Er8x0JjPTjg5PqBXlvRHI6TJyUwyihmUPYo8J2hNY0HoJEU7X5aU8xVtUxN8+ixC5yiVJhIbYscYJjDvQwInkHre8jyn1+ttqnV109A0DcaU3TgDvKO1NjFuvZxBWWJkkvsWvRJdltQBBkWf5bLG2hbtU1/hYrGAGCiLDG0M0XsWiwVaSnzr8DZlIK78Kk1mWhFiQGvN1LXszyYslw1DoTFe0SMnLCwDJyhCWgzavMCJiIhJjkEnvfAhEruJSRuDMBq3qOhiSDDGdJPnehnvHKzWjqQhJNkjXcPuuohAYt7zLEtjNoJ1jqWQ1CGwsBZRZIjCoHsZusqJmcKJSBvSvVVkBuEVo9GQ3nDAfL5itqiYrxZsb23jg0y5jdYyFALrAxeGIw5mFcPeALsT8B4yU3Bw8Jzne/ucGw64dmGHmzeuk2vDtkz3/rkC8psXebpo+OTRM6TucTKd0LYtg0FJv99Dakk0BicFXgq8BDrXqOBD6sNVitLkaKOJAmprefj4CTIIhuScHw+IQtI2lnOjLXa2hzwXOTE4vG+oV1OkzZF5Mr6xtsHoNL/lRlGWJUdHxylcXSqWtiVTmitXzyFlw/Fqwo1r5zi+8yn7z5Y09ddTYUGUfO+736U/HHH7zTe5fuMGSuvTwNSzi+y/yEP8rX9FEugNsespIvnjoCKDbUPlWnSvz/VBINqWN772Gt/U14n7+4TD58w/3+ed669x7/2fMF8+Jepjbt0a8+jePb47F/hlxXZhuP32NZ5ODjleKWZhxYePniO1p24H7DQPuFUNuHXhKtcujuCNa8yaLcLHB3xx/zlXbmV8cvdDqsU2jz4XfPTD97lwac7oyoDqzvtUP/+EgQgc38sYXXsLfe4aly9cBgGro31ErDh4csCbviJUE+rFktlqSd6bMS40IsCDT/6c+dFDLm99i1E5xu09pSkV89kUMbbkwz6EBQSDEAUEULLoGLIZMU6RogAxwAdLEMe4eg+hK2Q5oLw2AqPw+E4ImX4CdIAlnrkusWOCOkBzlhZ96XLGF//3H/XoDwZ85zu/xeuvvcHuud2uV/wf/402e8EQEBFW0xU/+cEP+OLuHd669h9wIbVeSJ2lyBhTEWTA0yB1YDjqIcuCyXTFJ58+wHsFopOhuxZP5HTXLlh3ISBOIy98cCghiTGwWi453N/n00/u8PnDZ9x//JTCGG5du0pZFmf6jBLIwjtWR3uE2Y8ZLo6QXxzyzg1PXV/hex99ROs8QQgGTiK8RTmL0DmL1jMRiiYqQhuwEfrK4JBUSBAKpELIgCpzhteu0ZoBx4uWydQzLvsMtjWlyNCTFVWz4mRyzHwxxdmG2tYILRiORxwcH9PGyMpaLl+5RtHv05dprTbapDw8BFp1bo9rAJCafBBE7KpGOY8xIAP4uuHJk4fcv3eHOx9/ws8//IIP7z3m5PiQ1eQA7Cr1JRJpjcZqQzEaIvojZgtYomhNgegPU7+/kIR6iWsFzjcEfJqLIp25xYvA46x1fbog3b7qLBDp9nDpCGd+/WJO2stg6exxln16Ydx230848/i0Z/vbDo4vyx7Xz38ZNL2qvw3O9La9AlC+8N5/x+d4+bHr48V4A/GCk+WpZLR7bOxY0DUhsX5flVwju3d+6b34e80ZvxSgTQqJ0jlStvjoU8ZEvaLtCUb9gtLkSJMhjITQsFpVrFYNi0WdXPyaFuuWuOBQWmGMotQBIwNKZxgpECrHa8WianCBlIsFECPeWxQCF5O5B4hNg3NyU/RtE5sfAAAgAElEQVQE59L/C7NewTvNNmnDTiAg0DKlh4oYUb5FSUWI6SKZLGmMkSKlxivNqqqpK0tbp36uaJPckKrpsEpkPOyztb3NarWibixl2SPPS2KAuq6TXlwk6/xxWRCyAlk3xDaFA3sfCMHTNpY2poXu7KCXMsk5lVQbu9HEWPqNSQYhbMaY0jq5SXXBjpnSCARBBHwH9IJ3NHVEh4B0HhcDjW2oq4Y2xNSEKc64aMUAosuN8eubo7tpBKeDXorNlLIuWsj1Qi7oZKsvbhqVkMkEIyTTzSI3KCVpFKxWnsVqxaKucTGSZYbcGAZlj0GvT38wwBiNyTNkm9wZPena9XOBF4ZzPU3lNYjIyjrqtiU4RxtSlp4QyWrX2mTp7n2Sq0ml0MawrGrKLGNrOKIwiqP9PZxraZqGxWKZzGV6fUzwrOoKu6qofCTPcnKd4VzEOUfT1IQQKDKD9IHGLlMshjIku5DT7gMlFVIoXCfV1VqTDUseHz/n47uf0r+gsDJSnTQURjMWOSWaGGEVk2mPEqm4HURnAawFSEXrfAIaYQ2k04SnVOqtrK2lMAohdRfQu2bEugpvSM6hxNQbkUnNeGeH8zeusVhVtLMqjTEVqb1nWi3xOuKyHKGgFREnwQlBzDR5XyXTF5uhjUIajSl6RKlThZJkX00MKB/ITcmkbaFpGPf7TBYrCpOxfX6Xfr/PycEBTePYD0vaCIPPH/KVG5fZKXOcq7h6YYdW5Yzqlvlixv2DKStfMZ+eEEMKBe6XJTZ4jBSQGbyW1JVLZkhnXErXVUTvU39niPD5F/fJavjm19/htpIIpYg+gVojFAhPjC0P7t1l+eQ+73zjq3jXUlc1Smtsk0yf8swQiGRFwXjnHM8OjsiMpxyUbO0OuTjfZtU2XLt0joePDzg+OOToaMq4v8u5S6/x8NF9fuP8Bfq9HnQV3X8N8shXHWtOpyOtWbtghyhZucDeZEGMM673Bgy3+sTQwxeaaltx54cHPLj7FDubMt1/zmT5KW++e53/+D/8B/73//X/5GgfULtEGVlWDYWB1y9v8ezghOG5HRarOQePT9hVkrfeusrV8ZCl6nMctni6b+n3RyyW9/mL7/2I7WHOb3/7t3m6f5G/+b/v8If/7hw7tFxtal7/o/+eeu8BeuQgGxDULkFYpJvyyUc/YWvLcPnKDfKxYzF5hDJ9Ll95jeO9T3m6d4/tLLCVF5y/9R0Kd53J00/p9TVqMMQvQOcwnT5juTph5+p1zGiA7o0gevCeEGc4+5BoI1EYrFrSqmOy2DIcZLjFArd8BKMCrba7zX7cKPSS051DibSGiRi72IHOhS/xOV2vSLe8cFrD+6cbG4Lz589z/vx5uqbol97vv+3Nz2x1u5dK8qoYGoyy5LqlaqboTCK1QihDVhiykSMvM3y0ZLli99wWK7/E2shq1SKlJuAZDHIgIpQky5J5SnAeKQ3ept40oVKotGtrDifHPN9/zs9+8gu++PQ+wQlG53f51m98i6999R0uX7pEUeR471FGdfKvmJi8xhOeLqBadHl7NcvmhOViAbVC+wyVCbxcEnVg0QpOXGSJwUZNbV3a9GpD4yNN66FNLRiY1OfWsuT59IhHB0csFoJ9wLsRty+NuwJugXOGk+Oa4FtCiGSl4fnBnCAMg9GILHiUKpGiK0ZJ0bFtyRxNpYUvfVcx9bAjLG09pZpOcLMVd+484qc//mvu37/DL372Ux7cv0P0NSvrWZBaFoRPPXuKDvS0AqP6NFXO8VFkvmxZNA0Ygyr6XQ6ewHWKMd+GMyHObGSPL1vjQwcKYjd+Xuqv3MC1sJYZnwU3aR2Xf0cxbv3YsyH0L/xs9mjd40RSrL3MrL2cAbx+zobR9KdmeWff++y/XyWzPHsuXybdXP/tZabw7PNfPtZAbuNgudkmBwIpGknopPhLhohJ7+eDTypAlULnz7YpvQCmX3H8UoC2RNVqlIy4WON9i7MNq7lHtCtMmaOLAtFKMgOFMei+ocx6SGXolX3qpqGqF/jYIiXIYAl1RWstddNig8QFgcjyJINUydY86a47tkwqIG06nev6dEIktJ7AOrhYdVUJNlRtkhYmHfGGpYoe1eXGtT7R6VpqdJ4sdqXSaKXIjWapLQtqbFXjncVaIAaMlswWK2zbcuH8Dv3+iOViTpGVGGMQMdAGn1w3vScqhVca5yMahdASoQNO+k2zo7dtiv4Ngeh8MrjwobPtPb3RY0iV5RjW0sVOloogREmw7WZDrkTnkrkekJ1pS+M80TqirPEx0jjXLbwd2trcAwmMJaMTsWHyT5n7cCrH60w8TsfOKSUuZXIlPNsoq5TqKv9pQslMltg3o8jzEYPhkMZaqqahai1tY6lXFfPJFIGg3+8zHA7pDweYPEMZg/MC6Vr6SkBeoLLUYyajwEWRar6dPFJpjbMNAnDOsVwuKcuSEAKruoEub85ay2q1hCI5SGoU09mM4Cyj0RDXOubLJVWTwGU5GEOo0+fJC04mM5aLZepzEwKPQMSAay1OOoJURKlTUVp3rKCtOzmjRRoJGRwcHvPnf/k3lO9cY+vWNlvLQO+kxlYe6VJRInQsqpLpmjiZJKGBQOtbbOvIVDLp8bZNURKuJcaAkCqxrgJcjAQhQCXn07VRRvBhYzcgOqlakecUec5iuepMXALNakXmIuNCkJWaRdtyuLhPLTJUkSNiTEHhpGiNvh4SREjseFYitQGZQrql0EjZIGNy8+z3eiwf7zELiwSwXGA4KNndHlNdvcxkMuXp0YTD+ZIff/IZbYx84+3r9E3GcHtEVg4QkyUXR0NWlWdWB06mUw73noJvuHL1Slqgmppea/Ek9zCHR5AkryGGzuwoFU9Wtkn9cC0c7B8zm69QWYYXyeDGCEkUkdZbvLf42nL3ow9Shltvm6PjQ8qyhzEZITja1pKbnBAdq6Yl7w3RjWewfY7WWW7fFEymC25dusjxwTF3P/6IP/3uf+Xd936Dizff4P7TZzTWbhjCf51w7aVjLXnh9PP2ishbN/tcOLCIUUaVFSAMPWvpX5D8+//4He7+cI+f/0XFH/7u73L/oUfvXCWq83gf8HVDEOfYmxwjiobLF8a889bXEf2nxLGlZ8ec3Nvj/NY206aHtmP+7P/5Ey5c/BW++d7XOdd7ysPJNh9+cMLuxUu89au3+e1rv8fPfvBnVMun3H3/AdOjOV+58Q5Hec5rr7+JL3Lm1TH9zBGWh7R797l+bpAyhFrJ1rnLRLckimNOJofYWaQ/GlMoTZzvowZLzr0ZcJnCyMD1mzfAP+Xk8AlZz1D0KlAzon+GEH0QitXeIb1yhKvvEWTF8NwYKywytLDsoeJldHmFIEf44IC2+4Z1V7hLoE3E0JlUmcSy0GWEnh2B4vRy/ZMf4swPsGn63vy96135Bx9nz369pgWECETX0B9qvv2dr7HdbzEmkBc5edFHtUO0kejcYnIDGnQm6ZcjLg4vYsoBy8olKV1mEDKpS+qmoWmbxFiKCFhU7pMU+nDBg4dP+cX7H/LgwQNsU3P50i6//we/y2uv3Wa8c5GyN+5UE51cdSPVX6/1kUwOaB6UNHuSydTyk8/v8YtHhzybOFqbMdA9Vq1jwpQKz+FScGQLqgAIRRM8R62jahcE37JDZFumgmwrJEYFssU+F5YTLmaOOCrJyj7IBWFhqWzAy4LB6AoXfYHRAiWSM+xgPCAoUr6o1mRFQZFJiEktJehIqhjS+rruAYuBEATClghd8aM//xv+7H/7Lvc/+4wnTz5haWcEJ5ExILzGOknQ60yIgqgixNiZaSlaEWkWMyaHB8wqT2xdIgK0SgZdso/UApTArQKhjeDXDFAAkXwMRLc3PR1OHSiI6+txKhlEnGWqXzzOsmEvm4GcZatezDU7/ZvWesN2b0AaQHiRwVu/1wu3jkh7gCSOCpt2DGI4wwyeHmf7zL5M0rh+zJeB0Jflk69i22JMhfJ1Ef70fUEKhVRpajK66y3svCFeZDdJ14u1aoxUbPqXAdqS/T5RYUxGL9fs7m5zfqDRwoMSSC1w+NQA7cE2SSIphUZ2AYK5AmMyjFEISmzRx/uI0gbbBhaLFdP5LAUj69R/Q8cUeJOBNim4W2q01Ai6ancI2MriuyBR0VneS8ImZBgh8CFAlEShu54wgfctQiSdcYxpwygFGCUpc5NupAB4yTJELJ0Ri1K4AD4EopDU1tMrMgb9EVknUQwSghTILIFNHzozlq7nzNY1wTmc97QhorWkkAWVTYHckByI0jms+wLY3FSpL0x1n6+rBMVIU7V4a1GQBuOaHu5AW6pEryc5ifeJtTtl107jSCLxVGLQbdBlly0ipUygOaaJcV3xWTdqbtg3kdystExuglqp7saOZHmRQllZ3y+R1rcEUpi3KQp0UWC8Z9D1kzV1Tb2qaOqaw+Nj5ssV+XSKyZIkVWc9cjx9GTGyxOmM1lmsh9q2CKnIsnTjSnk6iXnvmE6nqbk5L1gsFgkwSJ2MY4isFjN8a5FBcti2ODeiJdI0LZP5DNuZyFwIktVyiYhQFj2WWU3TNKlvAQjOYaTAKIl1LQSfKtKd7Mi1EoSmKA0IT4tHaYHJFXvHx3xwL3BpK6O3M6bXOqbW4RyENuBtm/or1xNYt2GKIW70/iF0Yeh0eXtdv6XWGlMUCJWqhS5Ggow4H1hHknvvMbBxKDNad3mMpzbGUQoKY+gZUkSD0tjlkrqVxL4heKgai80zpMrxIdLGyGh7i8tXrzJ7fIwyKYPQe59cV2VLb7iNtYFVVVNuDanqlr2TE1yI9Ho5g37OzvaA8bBEZYq9wyOqoPjo0SOeT/d5/dIur1+/1OWVZWwNdyiezyn6OT0i88WC2cEBmYA87xGHEe/SYmddS+sdUkra0OIJZDJLTBoS7yLOBlSQlEWP/nCMi0kDnzZnyZ0zyogLjpvXr/H8+QF3fvRjbn7t1xMTACyXFc4laY8xmnZeYZsVWdFnPjvmyf6cUW+HgdPcvHqbto7cuHiZ9z99wH/5kz/moy8e85u/8/scTI/59NNPefOtN7ui0Fnp2r/cI55BZK/6LKL7zgVgYkQrhx4JxvkF2kywAlQUGFeQmZowOyHeu8Ps8Ucs9CH/5t/9EX/545/wV9/9PnVtWaolUu6zIwUiaJ4cHPF73+lz5cbXmPpHnN/d4r38bbQ/5O69z4mPppzMTvjVd0t6bkKfhm//5r+hjgN8LZgcFly6Ibj95ohq8pTh5V/H3ejxODiyQYkeXyaQo5qAEDma67z3K79HffxjBjdKdK8iAaVk9nH9jdtoMURVjuMvPmR2/AynFcY1PD9qufzOLVQJdRU4f/sdTH+Hpt2jWT6gN4hokQOSohRIJcnGGfgl2D0yPSNWknASkYMLCFkRa9/15xuE7tYeRNpYRk/EsWbU2sWSxq0oB2le7haJtHE9cz3XZNs/2dhc96GIjql44Y1iBy75B5zBms+FJAdYo1BBDIJmMcXkkQvXx/TMW2jTw/sZZW9MT8VkqtEHpKE/GPG1X/kGb988x5VrV/ni3hN+8KOf0gYo8j7eOxocRblLliUjqxAC9XLJw3ufc/eTOzzZe0brAq+/+QY3b/0m165e59y585R5gdYmFW42Hy3th5KoJ60OMkaQhnx8gcpf4MEXj/n82YxfHDjurzIOvMIryExAKTiwluN5RWMly5D66TKhCFLjtE7F0mgQuUZHUFHSyowYBNmqIT+ZIOYzetkWy8ND2lFOeesGeZ6j8opL13a4dn0bEdsU4aRS5m2QUAdLGy3SRGQwKEzat6i0JwSSqQRsWjZC8Hhv0ULw9P4T3v/rj5geT9FlSIX0ABIPoUXJjFxogjBEmXVX2yFkRBHw3uKWU8gy8AoRur2QIrGWMiMGBy5HtA0yhE49lQrpa4XLOsA6geZufwdpL7UeV6zZps3d8gpG6UUp4pe5SMIpK7Z2KUecGp0IeQrOus3e6Tu8JFV8QRqJeOV7vtK18qXXe7Hf7NXyyZfPf8OanXmPdej3GpjGeBoEflYiKbsojkSAnDKLsjNPSYRPUvZEkfZjaco62+P2tz7WC8cvBWiTUpHlPYJY2/inbLYYBMqkKoguDKVWtLkhepH62dBEH2iaJhlO6IgMDrtaYIPGiRIpFf1swNYgYzRqudTuYjJJrgXVasFsNsF6wcrByWzOtDqmdoHKthChn2XILPm/yCi6TbaC6JEhMW5CaZRIpiQhStYBeRG3GZuRiHMpRiCKBOiMFngt8MYQi0TDayU7Awyd8sEyQ5aX1NYRsWwVeXKcFOB1pHWWtq2RWqN1jlIZpVbI6JDGESS4yuFdS0Dg4joAMTGH64EUre0WtRc1ukKIBIpJMoDoA22bNu1ZlqXXECLl58U1LS+T5XsE36bJLcoEkkWMCbB15ilrSp8OtHF2oHd28ZFUvUtjOnbSvtNzpTtvtXG6FBsTFdH1ULXeEUKkrmucswAoY+gNhhT9PnleYIwilI66rqnzAt+2rFY1rXOs6pp2uUDONVqX5HjscY0aW+L4ItZaXJTMVxU+RsosRxtLvawJoTPDEYKmrtnf36c/HJFlGa6qu+82UtUVBIeREus9xIANgWVlk421yWnaFa71HB+dIKRi9/x5isEAZGL1bGdQkmmNFsk1sybFCATXUruG6H2KPRARYxQ6K1jZigLNeDRCCcPj5ZIPjo9xY8fQ1jxbKEyvR9kGVJvMTJJrpDud/DpRtu60/14G8rwghCpVyYTA5Dllr0cmEsvW4mmDpwktPgbost5S7lt6vRACPgSUVBR5nv6GZGs44uLQcHF76/+l7j2fJTvvO7/Pk07ofPPkAMwgkTATBJAUJYorWdKuvJbXpfX6xVZt+Q/yC/8HW/baWqtU9iqswiruSpRAigQDCIAcYHK6+d6+HU56gl88p/veASBqlcrkqZq6Mz09Pd19nnOe3+/3TeS9lEnjqE8mSG3BQXCBYl5RVwrZ74JTKK25cOky+6VB+WiND4GmqfHGUeOptaASgb2TY1ya0R31GJ9MsKEkzSR5qlCZxOg1RqMuhVAcjMd8+/Ydtve32T8+4vkr1xkOz5Hlc3KZsdrPkaurHIzHzKuaprbsHW3TnQ9ZXV0n0QlGxoBsBwQRKTdKLATKkizrkJgM4R1GZTgfmBUFHkOqLASBU6AyAyIwHPb57Asvs3O4w/68YE7J+vo6BwcHCCHI8h5VVaONAQyzvTHTwlK6DtfPXcPsPeG5KylHu8c8f/kKhydz7u884v7xMbaTIaqKk2kUxRM+upn/OB7L8mXBRPvIBhqf0ZYUeDzWx3Ubp8IW7SsSn3Mcavbv38bvPMGsay5cS3hwd5s//IPbFHSZ7T7ipVcuUd5+Cw+8fuOzPLi3w/F0xvqFlMFGguw9T/fCNXYe/TkPPzimMSVf+tKnqPwlTDrgze99h4d33+fV4WVG6QrleMLDv3oHij12j97nzgfv8voXfpHVVz/NzuFTCneMkxoZNHlI0apPMJqgSsYzz1A6/PgOIruIMJfwISHvnyCForGBzU+8Tr7T4WDnIVdfeoV6bQKjdWQGadZD+B7C9OIQwR0TzA6eaRzhhYZq+zHJIIMmDnXUKOXpe3fxu5r11YrsygifrOJShU/iEHXZQweHCyV4F7WVZcH0+JDK1igFunWbXJ6mD6Ft/3gsyZYS2TZagTgIZbk/SSIauLgu/jaN24f+GwSz6ZTHD+6ytZaSJCVal5h0BSlSBClCdlgb9bny8nXOn7/CuetXef35K3QVNFXF4f6cPOlA42iqhqAcnX7MjLXOUtYl3/nmd/jeN7+J8o5ur8MXXv8JLl2/Qn84Iss7bRNq8D4sI3qA6N7c3rdjsS6X+5uWAtEfMPjyT3JcOh7s7vLI9tihZM8orJihE81Q5yRVB105ZqHGiQYtJDkCjMZnGt3NMDhMKkmFJ5eSgMKKBN1fJzMppdxlWil0t8OFmxcpMkO5uxdlLT5weLTH0f4urp4hccyKCZWrmZQz0m7Oi698kpXuFgLdxqi0Ni3tQor5tZG2HrNtEyQlWhgy06XUntpNqEVkQQUhkSJGLyjZojWuzZ31ksjWD6gQCEWByEpS01muJy/Ba4F34LQmmASV5sgQWU7OL8zo4vA+SM40PCEafy2bmWfNPBYD94+swBCiM/ZHqJbiI499nPbtFMk7bcIW8V4iPNs8/XWHbM3/Ptw4fjwi9WyDeVaH9tdFFZyNM2ia5qPh2h9CFxeff0ElfoZi6UMLzjiCiO4ZRgiMTpa2/6GVhES96CJuPAIvhAjU/LDjR6JpAwjeRZaN0eCrNstMUgdLNa8Isym6paGlOqGTd8jSDrayTAiYJLpLBizzYkY9b6grh2tK5uMpeZrQyRIEFukV5IZOokhGA0TWoxKG4azkYDzleFpweBjDrbWUaDTeWWxwiDRBtDqS6E4Xwz/9MuA7inaDtzTKR8pka8funW/pgbLNkom6LSkCWsUgaIFBShV1VFpFLRYLx5yojfJ46mBRgMdig8eWBd6XBCdJkjy+pxA1ZlIrcmPizaGsCU21RAd9iFaAi6mMgiV1MRCL5cXit85SN47gQpwMtCGa1vvoqhlCW9Ms4GuFMgoZJMI7go3PDc4v9ePL5b6AzFlcJHF6JORietMqskSIqXNh0bSdTmVa8mb7eIx6cI0FPME1eBkoCIg6tPTXgDw8iVEFWULeyxn2ByQmodOLAaK9AZR1RVHMqW0dtVQeRG05OSk5OXqKGJYoPInIUdvbOF3jmtYxC9dexCE2ua0NrzGGJEkIQJ7DdDoBB91OH1sW1MUcaRKSTka31yPtdMnLmmQ2i+8vMUwnMz796c/y4ssv83T3Kffu3eG7336L6XgcLf6dh0RHa2AfIEhciLldRVVQW0kQjm4vJ1GKorHkeYdhb5Wn997hncePsPka2WFBNe2jsi3yqkG5ElFVCBtpSUJEB1ajBKk0sWAjEJoa6aKhTpASaSIlGBXdNQXEIYCPjpM2+JY20f6UUbIS2ToebNRXCi1QQZMqTWY0uVH0k4Rh3mFrLeXEJ2ih0SrBWsvRyYy+0IzSATZENzVpFDQW4UN8T8GjpaSsa7xOUFnCcTnn8dNddN6ll+e4pibYmqoq6HW69HJDkILzaxukuwmHe08YT0u+d+se27tTXnxRMm/ABcGwk9HNMvqJRGQ520fHzIqSk+MT9rf3sULQeEFt43cJUfdaO0tZlfSalBBcdJeUBq1TJAajNNsHexTTY7aubuKGKZk2aGVAaVZWVxhujsjLwHv3H7K3t8f29jabm1tkWQcpQoumG7JOl5nJGa5ucefeDhc21qirhqvXn2c8m3Lj2gXGxYTduuLu7VuMshzX3Ix6X2Pa65jTYcxHjh8TDC58qMA/U1MIaO9drdZQgJcp80PLwf4ecvWYud2lmhneOiz4g9/8v2h2D1gdXuWVkeDSlS7/6Y+/wcRt8vKNTb7y859l8z1Hp3uNV2/c5Ld/+w+YPDjk13/r3zItG1778k8zOHfC7foBVVfgeoFC7dLoTfo3n+NybiiKKXs7B0hfUkzvM5/XfP2rf8LhiaWfr/Dtr/4p2e09uldf4v0Pvs9mz3DlxkVkklPPx9x7/zY91YMm5+DtB4zOORh59LAHZpVa1xgjqXWCVAP6l1+jt/U6QmcMN2tEJ8G6CUl3jVB322BfgXdPUJTYcoytahJSjsfblA9P6KaXWL36Cs38iI4+YU5JNXVkIWd2MOaoqrh4LSdJs6gHCQ6Ex9kSbx0q64EUDNbXCUpEI5ywMAARy3N2tmFbjrDFR8/r6Rk/+5y/GZt79iXi/sOiaZMLK5UPPfG/8jJo8Yn4ttu9VQjY2TvkW9/+Htcu9JH+GKHguRtrJJ0ON168wSeunef5566yfjGQjqIrYqJbpEHHAGnbOLx1BBlQBoKNOmwlJXVZEkLgxgs3+MQrLzBaW0PpBKF0rF1E1DaLlgmxDEsNRIe8xf4uWgTh7GfWAnl9hQv/489wXVR89zfHnDw+wHdSbD3HmrhX9l0OQTEXR4BHhCYylKRGZ2nU4/r4HScS0hCpcxUwSVOaTspUGo72Z2ydO8dDG2iOJyghSdDs7E1486tf5/6d93HNHGvn+FDjhMUJz8b5La5cucYwXcUJjVIGiSSIGOsjZYvkekdwsbarmwAYkCnWS7x0CAM60TRB4pxBKtPWRvUpahw0iKjlFyJKDbCijXeJU38fXESPVSQJYzTKpQjr8NZDYyPSJuUSXVvEYywokuHMkJsPNzwfd7Nr/0K2shSpFmHdp9qz0wFdNO5bDNKXjdvyZeIULITIivrIBGVZw/FMLSiEwLVgAURjuQXt8+PdH2nRqlNm1uKImrPQulOfomchxPpo2VAuUTmxbMjPHrGOf7axWiJwrb4w4JfI4vKniINoIQVBqFYTHYGISHuOMVfqx4EeGWkAJY0NuKpGioBMUkynSyeVaFsxKyua2qLdnCwLWFszn59EuNg6vO7gSOLPNKfDlNW0JhM53jpmkyn1pMAlmr1JSWUtvW6XTp7hQ0VFhROSXj8nzTtIBCoEunmH3KTUwsT3mUWxr1IGITTeSaSQ0f5VWUKw0eYVj8XQCr2IqV+tk2CIzZD10WAAEZDGIhqLDLGI1NJgpEJL3SJTHny07DeJRCUqMg199KlMkrQ18FAE76nKkiZYKmuxTcCj8EFRW0/j4rIKwkU3G+exAUQQJCqJYc8efLA4W1FXDXVT0bgG58DbFv5WBpMlgKMSDm0kaUhQtsI2dRTxKhPz5IRDUJMqhZeCuo5ZLSGIaL5AiCuYEJs8oVBCoWVsTQMLHUO02tVCtJoqiUkUgagZC1JE8bOSBBfQdZzoikSDjghWWVuEE2yMVlnp5cjQcDA9Yr+ZcXAyjv9OJfQ6fYaDIVm3T6fXo6mK+LlCjW801XyIqwVVPSOECYfugPG3JiT5gKL2CF9iEqRS0FAAACAASURBVE9jBU3jKJsaFQLGpEgpSZIY3Dmfz8gSQT0HGk+/18VmmqYpOZmPabCkTY0SBiMMznn6G0MK73j8dIfLV5/n6sVLGCqe3P8ew04PW3rGR3O8F+AVrnE4Ecvp0jqkc6SynUQ5j6ss1hpm1mPZRSuwdYP3XWZFxqPtQ6wvSLo9inHFV3/vtylnJ9gm2iYrJcmVJGFBX3WMMkMQCiVqpnhKAjSOoAWuajA2rnFbg1AZ3jURRSIgfIMXnkY66mCR1pE7z5yATyTBWhKlaXyBrSRe1uRB0u+NqCqFDkcEO6fQc46rnJEwNK6hKASNEFjZYH1N5nr4psFkGlE7bLD01gYclRXXXniFYnvC17/xbV68cA498pjKUU1qGmvIO5pO17DaUeQbfar1FR4/2WNqBbNqxqH7gN5ohNeCJDMoN2ezr3j5c6/w8GTG5L9Yjo5qth8fUmuFWR/gVBYda2USKcrtNSl8IFExY6+2Hi8TfGUQVYMUcx5sP2biZlx5aUS/Cbi5IiR9mgEkoqGZl0hpSNKEGy+8yMqwx3w6pZ6XJOmARnewIdKO0p7mwUHBKD+Plqt0aDg/20eGMXsP4PhoTvnwKfLceWSI12bMbgjLujgsNt7TXfuZHz/Kx7JUP1MThA/9XkgQWHQokb7H9tOC//Q7f8i8/B4Pnr5NI/o8KSXT46d0JorzmwrvJ4ir5/j0f/vPWO0ablxa4dylKxztBIrtBI/iF/71z3JYw4Ptff7s97/Gr//Ot7i4dodz51OEyPnO3Qc8qabk6fMMb+/zqY7g569dJv/cl3CXpvzmnz7hONR8ZvPT6HCO27fv8corW/zgVsn//W//T5pqwovGou8nDIZDDmfb/GD7kE52hc2VGvwJeuzQlz3NwT1WbrxKlrxKICHrCZTQBKuipqalQQXh8PUMjG2HLTkeA3YNKWpCmWOParKNi4xuvkw9q7EzDb0hYbrD4OpVulvr6PQyXjYM1hQDnUEdcOURdlIwm50wPLdFkhgwCSAhyZCJIQZunx2cggsesVDBNTEAXrYRIJKzEbiL89wiZc/YbZ8xyvohRxDRVVSE6E7sbUOoalRvQCyx/q7Ic9wbawHFSYmqBZ1RyvZxwb1HM/buHSKpWL1+gf6LK9x87Sovvv4ptkZ9Ei3akB0LMSIacBRVxTu3v8+kmpEmKbapSINGOYcrpjTljLWNi7zxhc9Ho2a1SEEVbZ29QBrab0ieRTbbplef/ht1+lHioSLKk55b4fJnX6Xz5l+S7T8inyWkoUfwCp8kkHgkkOt1ClvgXU0tIckzBmkXTUKV5AQ1QAaFrGpwDa454smTGcEJpErwuebxdBsx9VHnbwPCBpg5pjtjprvHSFHiQ4GXFVZWWGEhdBBMkLJEm4wgHK7NorStVj8zKcF6VJBoL0iMA9VgMQQ9wOlDHA5vo7xD+STGcxGwzuBFQMlAzL+LiFNDNMTTwaOpccHg1YIir+LA20eHXi/iYA+pItuLhBBi3FWE21zcQ4SMRNvQhmOESFcNYhHP9OxEQSn9DKK0+NuFKVwQHqFaFC6conBKRg7Mh6mLkZ4Z5ROL+6oPEYWSywavXT8LN/EW0Vw4Ei+YD/bU6QNEaz3kTxkewVsIiygqsNbRNLaNkhJnkDdwLIbOp0YnMZ7otLldZEIHf2rIcqqJO4P2CYFAIUQcOAcZG7AF8yuEQHAWoWxkE4WGoHXU9LdrIgQXh9k/Dk1bXFgxUcgHT20dZdUAkqzTJRNdZFZiraWXmqjTKeYx6DkEirqkmheEkzEkKY33CFswNIIk7yLbk2GUxmoFVrVOh5IgDK6uKYop1oNSKdpkGNkGdSuBMBIZophSm9bBMIRWqxW1YqnSGClxLuBstF9vWlc6uYQMAt7bFmZnOUELOHwbvChENEfROrr7xUUeF3diDEoHUKCTBLA0dcSWCCGGhHuBF4Gs04GmwgYRc+tcNFyoa0tV1bFRsjEfzOMRLi7+uHglWWLQOlqsO++omkXgdaRBIgRVXeOCQ2pBogW2jvqlpJ1KORfiRirjhCyKN9toheVEUywDIRe8AylVzEuTIk6lpIjxADg6iWGUp5xbX2M06Ed0TXist+zu7XM0HhNNXHIQgY6M9NL++hCRavYOj9jePUQrzajfYWPUJ9eB3iBht5izfzSmKmpCKDjeH7OX7NPvd+l1u2SJxhhFJ83QuaLq9Ol6Te3G2EbTlHOqmWdyNKH0Nbor6Pd6CFHBrACi45m3DeOjQ+ZThQ8xLN41DTIoqrIihDqeW9dQ24pZMUeKhJggqggiMCmPsFbw+NEuTx4/5dzWCoQ5dVWQGEWedMmSPuNZTVMU1M08omwh4F3UmgkRiy4pJLZqqEqo64CtTlhNPcOki248jRdkJqV2gfVhjyxNePjBB5T1nFlVIrWgLA2p1mg8J7OCrpEIqWPMBqeZMlqqOM0KUfcTvANUdK9MU0JoaOqGDjGvzROpBqe6yRipIFpN3SL4XQZPqkwbRSHR2uCl4mQ6ZZAGmqaBesZ8XmPTNgw2eERQKG2iXhFBjaeylrKxJFmH/tBgkm58HhKFYDBcQQ1GTCYHCOujxlRLRr0e8kLCk90J0zowns6ZOUvqa+YIVlY26BjBhcvXseNjfumXNnjyaJ8/+aO/oD/sMwlNRNPaSbZvKSs+xHiMIFtai4omLrbxeOsJIbqnFbOCajpD+wCupchoBcJTViUmScg7XcbjQw6Paqr5jBAkymlCnsVCIFiUgavPX+fBkydc3lrBmpytK9e59+5b5ImBZkp9coId9KMVeOsaKaXEBvePSEP7xz9ES3Frd9Llo4vpbdxjXVu8higVlZKtCwM+9/nX+as/P+bB2+8RVIJe87ixxnR73PzsBT59fZ2jkzmf/vzzXLuZUt1+j++/9X3efzAnC+f5zPoV1p/zPB0f8613HnN+4znyk9usSs3ed484fymh5wLnsgE3rr7MJz/5Kt//499gJ3G8du451MqUS/3zvPODW6y++AVGg+u42tNNBK///Gu8+b2nPLzTsH3s+cG773LjhcuEgeHTr7zIW+/dZ+/+Hr/wC2/QS3LMpYvUHEOege/BvESqE4Leg+AJ2TqWlICnQJF0NqMBcNu4z+f7pHKGdAnSNxRHOzC4TLb6IlnHETYSxk8eMDvSbN28gRldgDAk+BKZ5NFFOfUIrZicPGFy4umtaHQyIJAASSxoQ6TKq3Yfcc61e1jUa09nc2obGQR53joj0iqTxNnT3NKVFs1b+9jfZh0vDDeCjwZQatnI/B2uhmUPGajrkjt37jLorrDR2WTmLSc+sLF1gevXLnDx5jVWz11EaUNRFpzMJyQKcmNIlKAux5wcH7K3u8M7793iL7/5DlvnL2FEQCd6+b5P0ZkFQhDvpWeugh/y4T/6LPFxTxDgGocUkuHaCt1+jxACWZJR2qitm+DRmcSaFOUdBotTsc5KspzMpCQ6QyYJKkmRQRDcQn8Ws0O9i6hnlkhciEiYEj4Wzt6jRI0IFUo0EJoYGu59G0IDwoJ00dgjUBFEpK5HOYrHqAQhHMiAkjIGd9PgvcVF14QYpUBrphUCwouWDQS+RcGWJ7u9bnxY3H1CixY5nA8srBOkjJplpIxW2FrhlFo2cALJwpU7DrlZxk4FEaU5EBlWgvCM/kzKU1RpYbARwmmEy7PNxFm6YFysPvhnnLs/VnO2pGeK5XdNu89FhDB6P8Ts4XicNRwJi/exoDj6U0OQyHKzKKUwC+YHZ+mP8hQRI0ZHnXVpBpa/X5iLxAZNIDhtZBff10eMSkQc4Cw/t1gCjPG122s6+A8Ph579bv+Gnu1Ho2kLLbS6+FU1DSeTCbsmMJscI41EGI0whlppBkmK7Bi8nCO8ZdDNaKylsg02WDQCjKG2DfuTE4QN1HXMIWmkpHAOIRVCa9IspRcUXTxBarxKqDwc4RDCg/KEyDBrT0BA+pYCJAJSC4J0eCKsKYJH+oBUAktrQS81AoVSEqVdNAZxjmA9MgQkEuEVUjh8e83Ftd/OtsTihhp1WNq0zZOLxUOMFQjxRtVSCUFiSCIyFhbTR4dSCqUVTdXQ1PUyNFIhCC5qCW1V0ySaPMvQRrMyGtLv92isbcEwzWw+52Q2YT4r0EqijUJLMCpyrbXUCA20ry9EvJBCINrznjniJtverRCRkqriL9lSG7uZYnVlhbWVEeu9jFQJet0Og343FuDec3FjyNOn2xR1EzV01rOa9VhZGaC6CZOqwDcJ1dTgm4D2NdJXDLo9ev01xJGmmcyZKUfwEZVzRcFxWXK0u49UAmMUw25Cv9tDJRJhOpgkpdtdRwZLOQuMxyWUEhJHVdul2Uss8jzeeQrvqVS84Wkt2+yd+LMs41RUyLiOgo9ToXjzivbJs8kMbTKUUoyPDxkf7SBEhXOxadMyJTEdhBIoLdBGIhE4FzeDum5iiHdZUPW6EcnxEiXANQ3n1we8fOUi3U7GztExs4NdjiYzjvZ26Pf7XL96hbW1FUpnOTg64Hg8oQwQ+l3mXqJkQlVZJqWjspZ6NmMyPsFVFXiHlJFC4ImieZzFWkvjHEIKLl68xPnHD3h85340NhFR+xraibryluCb2GRIgVEKGUBrEHUsmhKlWFvbYNiX5BJ8WdGUBc3igkZQOU9tA0XjmSIQWYLzgEq4+/4DHo0d/ayHkRrXWIq6Ym4t1cmE1eGQcj5mPJmRJ4Ykz+mSsu4zOjZwOC0o5wU+gdsHRwjZYZjlqM4WF4YbPL/a5+TgiNnRDk8OTrDTGcI2UYdqG2rnkeiYX0R7eeiYvbjgyzvraBqHFIokSUgSg1YC7xtYGJQ4z/r6BknPsbe/T6fTpZpPo/OoSWmEogngbKRcd3IDISNJMj64+4ity5fxwVHrLo93j7hw4QKzccHe00d86623+PJXvsLla1cIISytnH+sDxGn388UVUG06Z4qDgWForHQzAw+zHDBMegP6CTP0TXPcfnCOrcfv8tafo7rn7zBo6zitVeu8sb6DYw27By9zb03b6O6K7z0mZfZf+8eZpozuV8xL+Hn3vgFZtcKfvUH/ysvPPcCk47mS1/+CTq9wGff+DR3frBHt9dn9dJNHj+6xUsP3mNwmPHG2huMTc1f/t7X+amf6rM+3ODe3Tu447eZzo9oSPnm3SPuVrdY+8RFXn3jDYqiYmO6zZ2dhHDlZcgUTSNBjqgmEmPHPHjvTcYHd9GhYnMzY3j5RWZihaw3BB2D2qk8ZQ3jeeDOvfcQ5SNeutSll3pCd0QQhiAyKnuAmx5ydHTA2uoF5gdzdD4mG2VI7UBqRNpbNhOjzR7D9edj8ehMLOzagm/JQlHRNU+paGRUFGWcsltLt9MlzUw0j5ACwSKku+3bzixYL2BB7lX87Y5l0xY8dV1H1sHfZxn6iLQXhyd8cPc2V26+iPZrDM+d45/+z/+CC+sr9DINPqJ7RweHHB7uYeuGPE3AOZ4+fMB7777DB+/f4umTxxxPZlx+4WUumqs0waGVwAaHs00rhVj+739z9fh3PKSR4ATD0ZDzF86TJAmi0TghqaXAG0jyFOEEsrYk0iKkJk80aadLlmakSY7OU+hkJKmOdHBvECFFBY1sc3fdrEQniuAbRNMg6gbfWBp7gA9jEHO8rxHBtuZoCqkMvlQ8vP2UyUl0lZ7Na1SSsbZ5gc3N86RpdGj0whLHyTbS2oWH4M5kjJ4esQI4Y9zVPhqgdaWMdNJT+/fI0pJw6tIfFvS62FRLbcA4QpLg6wbvzqBVIkpkFgYXQrSyHGSr+T+lAn5Y//VxmrHTJuXDYdv+TGPknqVHho9vTkT7etH07rSzEZw2rgufgkXG27KRbJspa+M5O+tamSRmYWbM6Ss+e8TnR4NDrdWySZNSnqFMnjZxogVfzn6u2CA+m2e3MOrjzHcjaCmRZz77Yg2cdT7/8Hf8w44fiaYt3iBi6KvSSdSaCN1ujA7vGwwCFyTWFsyqBu0dvi5QwdNNTWvcEQOwkyxFKMVsPoOyIck1qm6YlSXFfE4DKC2Yzqa4qqInQTmLSlNEItFKk+okol1aI5I4McF7ZJDoEKHPRtDykn3U47jYtGkW1vhRZBjaYjgoEQXUTuJrsJEEHBs7pSO90p+90beNoRTthDdgGxsvOq0wKoUQCzzVolGyjRNoGre8eGU7wdAqOjRBu3ABRGvjbm1sSAUQHN4KymJO4hKyTkaepuSdLFq7B0HaSUnyhMlsSlmW1I0FHfPayuBJTUSlpIgcYmtdiyAaAhYhFmhPe/oD8SYkFEYqjJLR0SkEjJKsrQy4fOkca6sjZDOPwZa+oJiWCAJGa4Z5QvfaRbyHqrbYxjJMMpQWlKEmU45MeQxNLLx8gww2NpuJYXPQxxVzxlLS1B6bELVWPqK/1ntcU3OwP+Vg95CgjhFJh8FKl9WVDKMCWdoh60JJg8gMQjmKwhJCCcQQa6U0Wmu0UThnYzMvJUYYjFZYW+G9xSSx0YfY+EuZIESb6SFdi+JGXZMxkvlszHRmQUgmkxnORV1c7V1rbmPam48kabP2RPA0taOaz8ErEqm4vLnGK9fOcf38JlVVoNZG1FXNbD7FVjN2J2NODvbIOh1Wz59j49wWK6NVqskJ4/mM6ckjNlZHDAZdGiReSLTRGKVI4jg8ootBIo1AGkUQUctQWcvJdIZWhtFg1FIeYmypB5RW0XBGiXjd1S0dQkqCczjRYHQaBy5CMOgPWF8xbPR7WB2YVh4Zovtp0zjmTU1lox6hVAJvLbnMmJYVShkG3R6Tw4JUGbRJQCqmdcntB49YG3a5cmETtG7jLGjzHRv63T5eJ+wc7HE8LZgWMx7f3+P61lU+uz3l+qtXSXKLDBM21nOe7m4jbQPOITDLjUrIUx1F4z2OmKcUWl2tR+C9IMs6rI7WWF9ZI00UTVVQFlOkVGRZilQN89mEEAInJydsra9ydLCPMYJiPqNoN9/ZbEI1n5IOe2ydP0eSpvikhxlu0pgu5689z6zOMC6wuzvhu9/+Nn/8B3/Af/8v/geGa6vLAuGvu83/WBxBApEOf4oTRz1o4wJSpsxmjnffvcs3vvoNyvkOW+f7pPIyT7b3+NRPXOOf/fwrvPudi3zv3lO655/jcbnH+5NDZtX7rPS22Du2vPQzv8xwNCJbzZj0/ozJvaecf/3zHJcdOvmQkMy5fu1VOqMVbCMx6yPmQjO89BzJScFX3/oqj+7d5/pWDxv2Ec0K2nV46eWf5D/+H/8785PfIOt3mIznyFGP6cEek2mHweo5fG9CPuoTTEKWw2c+dZnLvEzobhKURzlodjyP3rtD5f6K+7f+jEfv3uPS5g2ON+es3r2LzIbI3oi0N0QMFXlnwPajkuzCVV648SrHx+fpXJFIYeglPbwQTKdPufvBO1y5eJXLr1zHVjtYZynmxxjXR6eaIAp8kCiRxz1MqljY+YXDMO2+uThfIu5ZLrJIqrqmrmv6/SHD4QCAqqoxrXHAX3vaOW3Y/n7rBxYatL/f6wiwgcneESezCaUGnwpWN1Y5OoG7B0/pKBgoQy8bcLSzzcH+Lrs7uzx68JDbH7zP7Q/e5+jggMY2SCFYWd8gzTNm5RxjUoQ0rZShoXELU4QF6sFibvwPdgSi0YJCkg0ybr4QA8mPiwNMliKMQWQZJDmu9ihqEukwiaSbJuhOhyzJyJIcmWqskssawgVJaKUVEAUp1ll2Hm6zu/0IN5/iioL5fMrB+JCj8TbIBmyNdxalDCESODjcGfPmn38Dp2vm9Zwk63Dz5U+wur5OkiqQkRrocRAktbfIVrYh2qI/+FgbxgI9omyL4e1py3b6vSwYgnD6B7lY7EHAGWYgovWUl3EfQGtwbQ5wOEPNQ0bTk/aCES0ThXDWGOfDKBrLxmWp3Tv7eJuT9nE0vkUe69mm7sPNTftCSyRZtJ9xsdzwAW8jI82GSIn04Wwj+yz1UqmIakbU8LQJXdAuT2OizjRdi3ihMxEBH9eELZhncU9YfKZ4xp75/IvHzgw7FmZqC9BlgVgGHz4yETrbPP9NjduPSNO2OLnxYwci/UdoTZomSA0mzalbFE46R5aYmEPmLQRPXduWTgWN9Qijqa3DCEGSpHTSnG53QG8UdU9CKqpZQTWb43VErqxzTMdjKi84ns7wKIJUcdMQGkREpLSQbbJ5aOlfUW8WvCcJEflCSBIlaLyLTjIBgohQuhNxoickaCUIQWGFI/LxF45Ei4amda6REalKjMK7hqIoSY1ByRiaLBAok0A7hVQq2uQqYSE0kaohI5IQRKRKlk0d+bpCkLQRBUrGC0lLYjZWJyPL8xgeKUHomAMnddTVmcwwPpkym81jkLOzJFogaC3elUAIhXPNkrYi2xvKgv4FLAWqiVbRbEZJtAgYpVkbdFhf7dMzUI73yRJJnqYRPbQVQkrqpmI6myCFRpuELM1JOh16qcH5mmI6j9a+ArIkNqnOR1YzSqONpO8lYdRllBoWguuqaZiVFSfzSAWs64agEpyF0jmqcs7kyZj7DwtG/ZyXbt5EGE/ZFLGBSzv0egBxfS/oiMYYlFZEpC0+poLEKInzGilBaXk68QrRXEUKHVFY6XA20ky0jjeGXr9Pkpm4afgxddXEYULb+DvvCB6SJGM4HJIojatrbF2TSIUOklEn41f+u59loGqOdh/QzSTSGEbDnJVhF53mSKUo5zUn0xn37tzm/qOHDPoDzq1vsLV+HleXzL3l+OAYnfWwCFIVbftliNcKIsZPNkJiiRB77QKlsxyfTDgej6OoN4pQqIOnIaCUwahoqhNURD/qxtE0sckJzqJUEukkXsbnWUsSHL00Y7aqOSgLEhlzIWsfKK1lWpTxmgwZoXFI1Tqxphm9vIMkoJVGpxnDjU36Ddy5fQuCYzi4gREaayNKmuU5Ms0IqaEOlvKwJO1lTLcrHm8f8ev/7+/ypflrPP/CCs30kLyfkHdzmqM6TlqJ/HbaQUZ0QBVUTRPvf9LgRAw4F0KTJDm9LnQ6fXRiohbQluzt73L4cMLVrU1EutKaGQlWRiskJkFJhW0ceZZj0j67kwOUDOSZAhUHGWtb53jzndtcurhJMjxHZ+UAppasLrBFzpNJyX/4f34DnWT8yr/6l6R5/v/D7vEPeIS2QGrpNGfqJSazkoOjgvffv8V33/6Ah/ef8OTBO8yPvstP/+RLvPpJzT//n17lcPqUx8V7vPDpTTaufp57kxEbo5vsvPk77PmclfUbvP6JNdYHh8jqhMNvF9Q7d/nmD76NOv46l5//BLODc7z5n7/NMNvkO2/dxSPI3m14+867fHL3K/zsL/8vzKpj/uLPfp/jJzt8/aDkE5cCK1tDiltjytDh8PiIf/MzzzG+e4s7u1O+U1QkoUvmE57fusL0wTb3qr9AjQL3vv81PvlP/iWP7z+gLwXZwZR8orjz5p8zrh+wlgbWqgFix3Ew3eXhO3e49OJVPvWzr9O/dDW6Rna7nOueoLrnMZ2UXuc8qR8R/C5Bz7EyaqsvXPoUo9FlFDWy50iVoDvahLCC8xVIF92RiRIJLfSyUIuRmz5S01otpXdRG14VBWVZkeU5w411QOJs3Q4LZcsoUbEhOT3h7dQ/3kNVEIvM5L/90qGttaVoC9e/xzps0QdXNxxt79JYS+FqxvMp1eGYxzsPOTw5QJRzVlBMjibc+sEt7t2/x5PHTzg8PKYqS0xiSLOMlbV1+v0+g9VV1tbXEFrReAtO4JuaPGuL/9b1+R/zEEq09zY4f+EcF86fY/fwMYlMqJSF1KB0FjMrG0FmBHnH0M1SRJqhhYloUwg4VxNMg5eWRjQ00pFIjXQtndAKmrLmwd2H3L91i/n4mLKZYbG4polDxDYmKdLtFVJpnHPMTqaUFIw2Rnzqs5/jtTe+QH+0hkdBEDhP2yTqGPNEQARHDCyChdxjaa3Pab3zsd+LEKeIDmL5Z1zrOKg4bSJEdDJ3ghizE1+AlnYUnYfVgt7r21pysSAXjcFHHRE/rmmQZxqKqHETy/z4RRMl22YlnLH1P2uRv3j9s02b8KfUxQ9b7C8co4P3eCmWIrUPo10iRPbS4v9rGrukRp59H4ufZxE0qeXyO1iEgp/NcBPPfO6wbL7as9UCHG3sQ/udhkUDGvOo4nkMIqLmLBIlwzMznQ83vz8WmrYQoGlae/yW3ueso6lq5tZHkwCdkGhNogRZahgOB2SpIThHWVYURUXZ1JS1ZTJtsH6OkA5jA4Wb0kliNpYNDql1a+aRYny8iejcUDUuTl6cQFQOmgBBoUWCxuN8A7TQtIgcei0iP36xcEQ73fAhZrEho0GGa63wF5NCpWM0gAihzaI8YzG6mHYRYwJK26Aqga0L8tY5SUlBE+rFIDIifMrgEdTtd6mlJDPxO6qaBSIBZAbnO6hCUtQ1EkGmFZF8FqMAsiRlMBjQ7XUwSYL1NoZmE1FKrRU6MWgT0UgQzKZzbO0QLV96ebMAhJBnLt54UTgb7WmlVMuLRCqFVjLqsrQikZ5Rv8Owk5KlEQUN7YVSlVXUO7VW/yrJ8R5qF2jmFZVyzOdThILCNpSNI0hFb7iKDXMaJHWQNEIimoZgCzQNmXFopaiaijTX5HnG2nqPICWzomR6UnAyLjmaVVHD6CTCaawNrG+uMy9m3LrzAXuHhyiVEkK8aUshaFy0yK8ruXQVUrIduzi3pC0spsiR195OmIUEVHvziGYzQor4nRDodLrked42cilKJwQZ3S/Lum7jMRqcCxwB/U6XQbdHv9slNDXCeXIt6Pc6GFvH4gkfw9UNdHoZtjXc7+QpxhjmtmHWNOzv7fP04WM2Rqtc2NpidX0FnSQ0QTAtClSv5unjRxzvH4C1eCWYlBYVNE1d4H3DrGzwUiGU4ujwmP3d/UgN9YGT2Zw79x+C0rgmOn82IYBJsSFSVbQWaCVxddV+hwKtEpybE+oaJRKCd7FJDh6CZ16XzKaKTc6H9gAAIABJREFU4Ec0Ek6Oj2nwzOclAsjTrEW/RUS2hKRoGibzEqESyqrBBTBSYtIMIQXjaY2SAq0MUkn6/Q7ntlY4qaY0Jw23fnCLY7vHa4fPszFMKX2gCJIqGISKOZNVMaew0NWdeM+RAhHiACnmA0XUzQqwCPLeEKVTAtBYS1lXzMqS7d1D6pM5m1cNg8GAxnmGwwHlbErwsTFzShGkoCqLmBHkHYkU5HkedYxbF5lbx9QKjucV1y9eYftkn34nRYxrdra3+dM//hO++KWf5Nrzzz0zbfxxPALETq3t3Rb0uU4npXGKnZ1j3nnnNtOxpcN/w6izxeO37zHb+Q4n8g5huErebRioB/yrf/6v8Q9qfu23fo1k/i3WBpK9v6r5zPEXubM+5rkvfJn1F7co8hHmnQOUzbl1+/t8+Z9+jpvzPteuXuTf/er/xt177/P0m/s8OplzHCbIwRo9mXP+8ogt5Ug3XuTt77/F8XTM3m7NMO1wsLPH+3sln/3iF9EPFPqdP8RZz6c+/SJbxQ6/+2v/gXM3zvOLv/wzHOwfcOvdt/nNP/33DFXOz734PI/eucs3v/M+WxcHfOpXfolP/dRl5jtj8uFN9o5mdDfOk2+8gFx5BVQHgiXrXySilIIsrQjlHtX2B0z9mNHNFxkNNxCDPsGmhGAIwcRhJVkcMiqFExWOZlko+XYaHweRAYJvM45i1mAxL3BNwCQJq+vrrS5HLHMD4/BQnUEYTtEkFplVIkoKQCBcWDojf1yJ/ZGiagmXnEEA/iFiL4RgPi94+uQJzjY0VcnB022OHzzm0ZP77B3vcfToIeWTXe4/esLReExZxQiZPO/SW1tnOBxG9+Esi+Hb3TwOUlv02AePkGqJMka/hn/Mxi2GmngvCN6yd7CLtQ15lpPohFR50JqAopaOWvu2xonNlNQGicJ5z3xeRC1up4cXNY2qaGSFVZI00QQvkVKxdfEKX+wO6WUj3n7rLaoTIFTEQWqINvxYPGC0orQNQQi0TrmweYnPvPEGr3zik+SdHniF1inRlLp10JYapVsdbLDQAglnUcoQFnb7Zwr7Fp0Jy+eqJbq5cDUUguhaScznQ4gIGITQuphrglzo1VrN2oLtsIRK2+XUInzeC05z28TyTfrWrn7x5wXDa/nvlw2X+Gsbi49r/Ba/X2jPoimHi0zSDyFcZ/V1Wim8lFGregZpO9uExQF1WP7fEDCGpT7vw9EGy6btzOMf16j9sOPjULFlQypE+x3b5d+ffsO0yJsEdWZstHjemWb0hx0/Ek2bAIJzUdQdBEZrtFxwQQOubpjYMUpCVwqaSlEGi+h24sSh8fggSZJujDoQFZl0dHNN4gKhasDH0F9hY3hhZR3Sg6sthbSgMoKIzkUaTScIxrOK4CQaQ9LmJ9lgo6lCa0m/GAL4EIWfXgiC0u2iCWhEG7wY9SdCqSXPNQ7TwpL2ERa9GoDwyDbY2jmPtZ5JXVAWmtGgTzfP8c5SNrFAVUqiTU1Q6lT/Y0E6RyIEQkeaGkrhiU1O8DELTEpFnmhkG4ispSJLE3rdLGoVRKQoaqlw3tKEmJWmWp3eIlhbIpmMJ1gXS/uo0Qoo04pd29GEQKLdollt6aGipQgaRZpEcblWsLm+yko/QwmPqyuSLAVlQERjCC0jYlNVNZlO8B6KeYlAMBsfx0YvkZg8pdMb0FU5vQp0Z8x8NsdKw9zFrSTTgiRTGCfoZBlNYzk6Hkd7W21QSUpmAqaXM+gM2AiBcWV5snvM/n5FmnVYW18lnUVKwnw2x/sKIRbwPSwnMiwIAe1FDLFpFotGzS2vj9MLOhYecWgWCN6itMSpaPoync4RwkSagJZ0ezm9QY80z8hqS1k3NPYEay1FUVDOZhwd7JMoRdIitpe21iiaAlxNkB6Po2ocjbNYEdFvLQ22KDFG0jEZOs/I05zpyZT5rODJ023KpmZ1YxWZZqiWunz3gw84Hh+RpTFrrGgqpLS4cg7eYkVC0u1wOD7ha1/7K/YPdpdGDyezaBIjtMaYlFwpyiCpUTiR0FiJMIbGNlgHQgZEkMi4ShA+onyeGCidSIVJBN3U0MlTcmMIviE3CZ00xUhNYUtKYTCJRuqw1KIqk9IdjKgnU5wTlGVDd9Bj//CIYt6wsXWFwsXg8LzXxYoZgzzlwo013EzyaHeXh48ecOtX3+LqxU1euvkJKlJqFe9L1bygmM1ovGbOlDIoer1ODCwPIU6ZpaAJniYEbFB4NEiDkAYX2pDzIKicpPKaO3cfcDIvuXzlKsU82qYnSUqapBzPK2QSWFkZMjuWEZEXoGMIJaPRCh/cu8vhyRyPIs1S1lb6cXq4PY0mNUXJwe4+165fh+WE98fzWGy0sfc8HaEJJRgNDb/4i5/nJ3/qNZ48nDD0Xfbe2+aPfvff8YPvvIfckmxt3OTc89dYT2t+749+n7e/9p95/OAe2dAxmY35N1/8Coe37nP39gPM5ia98wf42TYXN9eZqy5i7RI7xzNufu5l1jdWeeGzP8HXvvN1kmRIHrro+ZQ//63fICiH3X/Chk555adeQzUJ73z3Azayms/97AW+8c4q//4//gC1dZP+1hC/kuOOjvneN/4LQadsdde5urrJ+P4uBzsll593dE8O2d8e863DJ1SlZ/3cCjeurnPlYpe0m7J29ZPMn36bF17+CdT6C4ikQyNAYRChhwiaRhSoMAd3yHT3DtvvP+Tqa59H+VUIHQgqMi2sxNsuUiVAuuQ9CuIeHNoptWwn+2EZARbdcOezApMakrSLTCPNekGNA+J9tzU28CGgRIDgcHVJeTKOrsJa4ptoVGHyLnl/hNBJW9T+1w0fzm7b7RtcNo5/r3XoA8W0oHEeIxRPb99lf3ePnQ/ucvfu++zsPcWdjJGzCpsYZJqwtrJK3u3S7fbJ8g5pmqJbVodJMrJOhpExSdYh8DYyPay30VnaLtREi0/2D30lhzbzzDGfTnjy5BFFXdLt9WiowdU4CyJEB22vGmSiUBoa56hOTrBzS1MHxraiq9Zw/T5Ii5VlbNpE1NeHYHAuoJIOnVHCGz/9c1y4fIPvfusbPHp0h6ODPXxTAhKhFUoJZlVJ1utg8Vx/4Qafef0LXLn+AmmeUVsHweCbuC6dF0ihCEGDMLFPClHThj/TuPx/1L1pk23Xed/3W9Pe+0x9ergzcAEQEwECnAeJpCiLZYdKYsdMWSX7bSov/BXij5CXSVUmu5SU7Uqq4qqUXWUpsSxFKpEUJUqURBAcAIIYLu489XC6z7D3XlNePOuc7gtwkhi5rF3VQN++t8+0917r+T/Pf1CnVMWiTJaaVyGmNTGiz+CnpFSRtEgBH8sFlsiQkoC3DEobtJHs0WQNKRqh0lPs5FEF7pVPfqOhOp2yrc9JeZnFD+EUSIiZyemkUBWwKU7lp8OFfOY51iYea5OTtVbMey8+DEr8E3JMmwa+VoXVUGqjVLwQUkrFZOUs4DoFTbo88RpwreumR23710Dy9DEeuSLXE8T86OTx7OM8ugzIJ5E3g5ZS1+ryaW/YUXnzZdbrQQF2ArpPKZNrMHtK5fzxx38coE2BNVpyQ2KEFGnqIbvbWwwqQdpdlL8bWzFi6NqO+XKBMoaYNG0vi07KwvVtGoMeVFLsmB7fdSijabSmix4fvNCgrMZ7z8nhElM1aOPwHjqfhJMcPCpGLDLp8FkTFFiEvyyLQyLkjE8QNfRKnCqtimgSGAtJpom6XPQaTdIajCZYsKFYreaINQJedAZXVwyHDdZqyVEBdre2GDQ12ff4ThN8Kw6RAfpuxcoLzarRFSpJT81aKzogAz5FVPKEfkW7WmGNw+Vawi9RWK1KRpwEfGeQEZ1W5BAlG6Twd60WV8vJeEQKib7vWC3EBh4PMWRstlSVxWmLNlqCvotVuEwfMyghxlbWMBqOhBqZAzkGBk3NZFRB6IFMHwolICSwCltV1KZi1QeWqxXzkxXRJ86fP894qxF7aiuTks5LwLh2NcqI8YWPmdppmtEWJosFfOx7+q5DK6iNLrmHRWRbS4c4+RU2eOrK0NQNw3rIZDRE657RoObYBnqfN5NZpcokVklHSXjSpbuTKRbNpzesOEatF5z1IlSMHqK4DLpaoY2m6yJtlwhB9HzoROt7TpZz6rqhbgYYYxmPxgL8ciZ5L/l1KUroelbcvnuXr37tazxzecJ4gNAtdSoTQSsbfz2AuiHGwMp7YhcwBkZ1hXYVW+MtmrqhXS5p5wtO5itsVeN9R9u1WKOIoSd1LTMN7fwYHROqGtJXibxY4vuexVJydZyzxJjwvQelsbplUSix2k5ZjBRzIlmJ02npr7PePzdZikE2MqUMKmVUitSDMeNhzbh2aOs4ODrGeM94MqFNlnvLTMiBiHQxs9IYUzFsRhxmQ10N0Fi0tjjrmPVLjLPk5Knqmvmipet6dNZc3NuhryNPPfcEr99+i1dfn/HOtXvMjzUdI2I9QRkYjwZcvXKZ63cP2b//kKrP7GxvoY2RKf46145ETyYbS+wUCYsyFoyhCwHbjBhOzjEaDPHRs2h7zu2dw2jDanHC9vYO2hhmiw7ft+Qc6fqW+/fv8/j4IjpFrIbhwHDp0jlGky18M6DtOy5dOo9KmquXNaYZ8mu/9ut86OWXOes29zfyUBml1zq2yGkGlcJgOF4uOZkfo03DJz6xRz4+5LEp3Lz9GM1F+NgXP8O+7fjOD/+Mrnmcm6+/xcmDa6j5Pg/SmON+wtdeX2Bvt+Tzgcl33+Ta//3/Ul2Y8qIbcOWDT6N3nubf/uvfpwv/J7/6q3+Pj7/wGf4f869orz9ksuy5/dVXyOGHDEfbvP6D7/GF/+TvoCeXePK5Ha6Mv0B7811M+k2uPnOVf/OVN/lf/+nvMHm249ZygXVDVFjgLlzkcx//Ja48ts2129eYzRy/+a//HfP5Ee0K/uz1A37xE7/IF3/l88xu/AUPXv19crfF8fE2M9Pz0q9+kMnFEfuzGZlbnN96FpVH5BqiDWQe8ODBGxxce8Czn/wSbvcxYmUwKqFyK/WLNhg1RQQeFnSxIU9a2AcFqOnCVIUMIRNiImeL01tYbbDOIAYIcAoy1g0xaYpJPqqCGOmPZ9y89g6L+WxDedKuYrpznivVAGsrqcu0et/l8ZOO0ssnxUjf99Q/57gqo3h48JCj+THXbt/g3Tt3uHnjBsujGYvjI0g9203DZLpNHAyoRkMmW1s0gwHNYChN33KYytEMGoZNTX3WqSGXmIQyddpQzdT//3AN5HYySr7u3rrJvQf3yBp07UQrpRLZg0qyN2ptWPZLupMF/WrFvO3IXSJFxZJI3nPE0JHxZJI00GPAB4VFk7I0i4NSNOMRTz73HIPxiPNvXOK1736b/Yd3WS1mUo+ljBmMiNrw/Isv8Etf/CK75x9HmZo+RPEL0JYUxTwu5sLxLHonlWVqV2aWQrmkXBcFkOUkLAmp5aW20lpcia22m4mvLhOnlGXYoG0xumDtjJlLRJI8gVoDbS3srlwYYacvYA3GRc+VfgIH+BFgo8TxeQ3GyqxhQ13MBYDLxCqXGCsBTtbaR6iHZ6dSiUxIYtDmqgqlEYfknB+9j9Xp57X+XZQMPTYh3Rvmm34f8FoDtbOg7ey/WbP73msG8v5p16Z1xwakbYDk+uepnHmR+qwNSDZ0bMVa4Vj+5ZmPeQMOT0PAf9zxHwVoA6itgSi5CikE2sWc+XENjRUHNC0nK9gKVznCckGXMqNmRG0lpyPPV3gfMBpyTBwcHrE8nlMXGiJaY4cVPieCEoeiSdOwZbaYzY9JGLSrOVl2zOctC53RuUOlDq0iUUW8MRIUXOx+TC56MmPRCvqs6dCEmKiyR5UTaLUhGxFxZsrCpQ3WysXU+ojWMt1QJczQKkVVO5pastN8u0KlTFNXNJVYlZvBFuQRxmjqQcO8a5nN55LQMu/oFy198MTY43vp4gSlSH3EZKFBaQXed2jnqJwrUzgBTJUbYKyWSUtKkiFnjUz/yMRUBKRJM2gco0FN9p7oI+uwwBgjIYC1RoBpDOWGX7dOi2g3JQlPVuCso1JGRMEHR/jWUddatndT46whRVi0rVA8rGHZLjmczUhJc+niZbZ39lh0x6xOlizbBUprfNB0bSYE6f5UNhOiZ76SkMzxoKFxQ2rTYHSFOzkp7oUC9vo+yOJsMyOtiMayaGtOZi3OWIxWVE4zGjZMhopVJyDCWAHiMplc35gastosSNbkknsjN7AsdrIgrG9krWVxr7VFmQhGOlIHhyesVgtCQKYuBDrf0/YRu+ipmr6YyEiw96CuaAYNVtUbu+PkM8n3+BCpmwZjAqH31K7CKItOGktF7YaYSsI8XazQy45upQBDbRw70x2qxnG0OObw5JjFosO6Cq22CG1PcrLgeiAZS6xGEp5pHCsfUBmGTUPfd6QY8SWUHmPJQaI2ooFFguv3DwkPjvjQpQtML+1ihg6jxPlVeOOq2DVbcjJkK/ed0wanExoxpLHZMxoOGLs9VquWxdE+y1Wi9xWt75iHxKJt2OrEoUtjMdlsmDCrleSgNYMhnfdcu3mDemuCHdbEbHj4cMEF1zKua1586Wmai0Ou37nO8f4xhw+XmJFFuZrYZxq9zQefe47zlzx3rt3l8PZ9Ht57wODCdNOlTCkTcuRocYK3DbgxphrS+UzMmi4mtBsw2RkybcYs20O2CkV6f/8ApxX9qiPmRFVVhAxdu2IwGHI0OyZce4enP/A8TllqndneqmXdaRpcVTGxY1aTHqXmbG9v85GPfpSmbv4D7hh/fUdWSWhO8gdiSvQ+0nvP1//wj1h2PZ/9xc+zmB+yuneTizvn+LX/+susYoepK7729VeZvX4Pdf5NHtz9HquTxKVzzzMe7NDNV6z2Wz7yUceby8y3ryv2hl/ixpuvcdT9kL/7uX/AHb3Hyx97kd/6N+/yv/zT3+CLv/Jp7HCLezfnjHXgzsMbDNzTzNoD5nnBcXef7vqbbE2egKvb5CdHdLOH3Hnj31Lvjbh9uE/49gm32sDe7ojRc4/DJz7E62gezJa8+nDFt48zn/nsL/AP/vZH+MNv/hkf4sO8eH5K5k3+9Ad/zu5Y8ZmP/AL1xHB578PEQ8j3DqjGSwbTK5hqUvS+ink64Xh2i8Zknv3wRxjsTEkasorAQ9DHZBpi1IR0gssO8hClKmCJ0haVx0C9oTyRhRUSfSBEha1HNHUl+0dOZcksxSFIgVS2lxiTUMrW2qWsePyJJ8VRUEt8TkzymHYwpGRrcEph+8nHhoW2xvcxFJv1nw/4pJi4f/8hb737Lt/8xje4c+cOWUM9rDm3t8O4rhg1FWZQY5oxrh7gnLB8XCU1w5qK5yrHaCxuy5UR7a21lqg1PnmMtZLntWGBlLfz14Hc+p52NueN73yXh/sPSE7R+Uyr4KTtWZ6sWM06+pWnTXMWcYn3LTpD1BoThMWTHChTIpNCwiaHTQ6TDCoIe0Zs3gO6UrShxTrFlatXOLe3w9NPP8X3v/ttvvvqt5jPj0kkhtMxH/v0p3jx5ZfY2TsvGWlaAp3XE5JcqPcmU3K1EjqXBn1Om7J+DQrk+lXlwsyIbMQK86hM8q0SVghK9O26XH8ZRS45bGiNzmceRm2eqRhL6jJJE3fh9x/F2G7zPt4/WVrTMmGt71IlnzduJmggjKwNGFpf/EphbL2hOK4NQtbOjusaZj1Ny1o+k7SZOMo1uZlflfdizry29c+hNLCzfALrx14/5/o4dbU8BWzyGI9SIR/Vr6nN1Ovs3/2o49HPbg3K5NyUFURYflFqrLw+b6oElOf3vJ+/KfRIECpOthZ0KNSJQLtckPssnTRjCcahg8L5TAiJnCVAcuRqRrWlrgypl7C/oBSzkwX7R0dMByMqZ2lXKxbH+9IYMZraVXShp1Ye2gVaWRyZsVVQaY51EFCmerQu+RrFDS+kiE1R6H9rlG80fVL0gM4JE7yMy7UtN6LGhyCPoko+WRYR55pOIdS4YmHqhFJnraFuKikwY6JxFlfGr1ZLt8YgItjGGdTWGGMdeRBpByu6viXkhM+RNpTpiNOMhw1aS1iv1ZrKOXEVLNMcXazUjbMoYzApljT5TFAlKBwRwxol5iXOKqwBlQVcKKVJhA3H2BhDU6iHIabCLUWueC03WbtaEpSiNpCdaAoHjcFGcYNa9S11XaOdoXEWZcWUYrk8gZy4evUqzjZ0XjRf48mY4XhABmazJcdHknFmtZEg7hxZtR0xVPS+p7GakdYMbI2zgdx7yDBwFc7IWD8TWcW+TAc1lZNzHIPHqMywqRk0Mq5PKmOMwlhdQBtlMS8ax7wObgxlUTkNcgTpWFlrWXd3jJaF3RrIJhET+NCX/BFHWjsnaKFvpJRYrbrNhqy1prWGyipqaxjUlvFgiHGWgZnw/DPPMx0rusUDrDGEpMFLAHeyETzgZAOQiawiarCVZWAtJgdiF9ApMKwddSUhn0YpaudQTrPyPV2fwWp01aCzImUNMRF8R9t1KBR1U6OdpfdBAsKjuKd6NF1WOCyHiwX3D2fYrRF5NJJsxbVwGUXMqugOKtBCUbbGkMKSGD0GS6UTAyt6vVrVLHLiWEXmvkI5Rd/1zE6OGc+neAZlGmDp256UoHI1rqqoBomdvT22jmbMg2c4mJBwuGobbM3Jag4qMh4N2J7uQmuodUM1GXO/m0lsCJlh3XB04pmMJ+x+YEJInRgNlSPnzGK15O6D++RqDDmTc8vMt5wslqwaw/G8o1dDIhUxZmazGe9eu8a1d6/z3NNPMawqcsqiXQvSTKibisn2Nrfv3WU62ebCuYuoWDE0ikHTcBDXG7BsjOPxWCgwzqLLBr12HNvUDX8dhd9f4yGkItFl5QzLecvh4QnvXr/B+Z0rPPb4FUb1kDd/+AOq1ZKrV58lGseoUrz93e/y9d/6TeLNI66/M6MNmsn2Nlee2uN7N1c8/dRT/KO/9TyPj24z+8qcdx7e5/Nf+mXUg7exZpc/+92v8J37M/ztu9BO6EPH7//73+PwXoeulwQF379+SOQHNNMBJ+GIp8Ihvb6FOzziO9+6ibp4gXde+x2O2pv8F3/naS7bX+CrX/lz/vCNazgC7Y19PvDlJ9m2Y56ePk2cfZM/ePAdPnL1Izy5dYnpl7+Ee2tIPrjDPHt2nn6Z7fMVFz/3caqdp9DjJ2Fu6ZcH7Ey3UNUOWVlM03Hk73Jz/hoTbdjdu4o6GIMaEg10+YCROpHrIhoWR8ccnbzF+QvnyGEb5/YwDaS8wOQepQegFalfkfwKbRSmGWFVA7ovtupWph3KnCWCbYqjTGa+aqmqiqaSSYayNYNhjTYIGHQOGeU5ae7kvxq9V3KqFNpa6rpGnXk9P9NN8J46OyxaZkczOt/jsuLCcItmZ4LdHjIZNtSAcgqGNVYPxU26GJfZyknxqiQgWClomprdnSkm9GQ/L9pY+Y/o8HMxc8uYzdzw/S9SPfInHnlvavPDfIr83vsmMzy4dp03vvc97t2/x/2TGXdP9jlcnTBfLUleQQtx5Um0eNWTs7CMQtSkoDDKgJN13Boxq7GhQvVSLzglTJasFVlnQuzRVjR8bRdxleH85Qt8ZvILTLe3+Pa3vw1G89FPfJIXPvxh7GBQGn6SPwYJUhLZjoacQnG8VpAjMXYFyBTJiRIanykAKit530orNAajbYnuTafay2LQJfgur4uEDWCEkiWoxBmTLHTgpIo5iFB5OJ27rYHZmtaoyvOkzYn5UTqxtZ3+Jqcti/zllLaXHgUYag3upcEFvA8Q/ajplbGGU81ZOnXCzOlUX5dV0aOeauXWgFgXE5lN1/vMcXZypTbgttArSy26pmOeTt1OP4Ozv//jjrO/U36xrDny/eZvztwHm7iHguNV+Z2zx98IeqS8iVSikzTWVdQDSzUY4UyS/AtjSMqyXK1IywXGZCoHi5OefjkXbVMIWKWpqoqIJSuNripiEUhNhlPqOEBbYXT73uN7Ed+rnOlCx+p4iU/Qr8fHssPIdAS5aUPpgKRkZMybQaVMJJBCydMyhmAdJiUqpTBonMq0Bjyq5MVASiW0WwXsutbeEBbEPWjtTiM5NYLWrbUYCwMnrkUpRqIPWJWxWpNSJKlEsgptG1xTYyoxrMgKlj5wsux5sH/M4dEx7aJDG7HOb5qawXBIMx6RrSYpha0cTlUE70k5okwumXRy8cfkkT2iLBSF+pli3tjPagNKJZrK0XWWrutJRdtmFNRaU2kFMZQcKsiuIpHxKVNpx2A4wmGIIeBTxGix5T86PGJ+vGA83aEZjbDVgFXbsWo9y5M58/kJKSe6NrBcdlhToVD4AG3X0/vA0WxJUzm2h0Ny7cg4qmaIoSP1HpLQYoMSqmiVKoYmMK8UOIdSChcT2RrJ0zM9zilS6ZwZU4C6lhDTXM5vLvSGXMKi1wLaTai4Kt3EKBl/VnVU9QCjDVXdoCvHskvsH9wipSxmFUm8i3NZsNO6G130IH1MEDRJ94ReqLeNHYgoXSsWPnJ0ssI5jWtsoU+yaWOnpIX2i8IqS22dOGcpEXN3vid0PYTIsGnYmkyo65pcVUSV6WPCaAe5gHmlyTGhY6AeDEk+UDWKqrLkLDlkWEuPJkXERKhusDrj85w7B4dMLp2Drilhu5pkSufLe2J2dLqQT6OYekRscYeVUOjV6oQ619TNgEXoxV01O0II5K5jPvMcDUcc5YYH80AEnKshBIiRaAy6lgljNhaXgqwL2TBbeFql2N2Zcvf+bZbJ0zQ1w+GIpx97HF0r7rx2SKLhpE9UPuIMWBeZTEas2sQqdPSLpbhjRsWombK3cx4zHoPeYW93wHBcMx3UGGUZDYY0gx20qbGxphkN8Fnyac5dvIAhMV/MWfme0MN4MODoYURVNTvnp9y8/gbTQY12u7Q+o3RDnyzJQNaRycBwbuK48vTjLJdz7h8cMBgNGOim0MZ5pFb9eZRuP0sd/eRSAAAgAElEQVR+zdlN9GcRlP+YRwH0ZuNVSjEeDRg0DXt7uxhrMdbw5tvv8rWv/wnPP36RB1//U+4vMy99+FmcHvKJj3yUh3v3efvmXbo8Zxnu8/TLn6Z+UpH7jksvf5D6RPOZ5xVfuHCJ5z+8y+5yhz/693/G3Vs3iW3gsfMX+eDLz/DWvbu8+u4b5Bzo2WcyqJiOLlNPRjzz3AvceHCdw5MjvvfuK9z73i1ee+U+P7x/yGCr55c+80m+/KVfZfHaDzk/uMytO/8Xdw/22Xuh5qMvXGY0ucrq+wuuf+cdPnThCuf1hOuv3mP0ocvU420uv3CFxXKHo3CFO7Pr3F42bMcWc/c1ps8+hZ7UZDeFuAVoDg4e8t1rrzC6ABefeoE6n0Od2yVnS449lUX0Y4uO5D3DHBhuTdCuIiSFtRVKO1Roid0Cw4zl6gTvW0ylGE22SKyEGYAC3RQ4ZARmFJxAaXxI3A6sVitiiDRuglIGYwfkqMApUKKP3kwt1lWx+sv1GhSlAFaKvg8b5pqsOKeMCfhRQIjTAn3jHpu5d+8+Dw4PsFXFlccfZ3Vygm4cbmdMXVnwHYFErmqsqnG2AhIxCsNnff2qDJrEeNxwYW+XfjanPc4svej7jUVkFCHhfQ8EMk72pPe8Xpn0RNZJd7mQ+uWueeQNsdH0AGSZPM5nx9x8401+97f+HV/94z9m/+SY+ydHLFLHInp0M2CytYueDujnLbldkPyK6FekJEyBrEu2ZwYXAO1IqiK3CtVbbF1jjCWnJMZfRJyVeB1SRutEIqENNGbIhz/5UT7w/DMoY5hMp5iqIuRMXlN56UVrngK5D6JHynLOstLkBGGxIPQzcELRy9qDShgltF+ZwEgT1SgZA4teszTIFVLnrqUU62sil70/B/mEtcRaiZ+OaM5TTsUkTipHAW5pc07OmvbnQrfMKZ7+eT1Fi2nj9pjKZi/xQFUxPHPl/loDvWIqktKZHLVTZlBd11ROchVjGXCIRk5qHjEIotT+hWWU82b6pNQ6T+70rll/Nrp4Xkij/zRCIaZTx0qt5felnkobKrRSimIe8J6p6GkL4kftVafgj83/y+yENRyTz1xtbhqp7/QaSxdQJ3Wf1hJftDa6egSM/oTjp4I2pdT/Bvw94H7O+eXys13gXwFPAdeAf5hzPlTyrv574D8HlsB/lXP+i5/2HGSx7O1Dkq54UqySYhXBVRVJZ0xtGbmKSZbphXMa5yCnSNd1kndVPpDFsmXZZRZdJPQ9S9sybmpqZ2kqiyUJjU1BNpo82CJVNX7V448X9F4Y0rEU1yFknAGdEkYFYhb79i5D0garoLIamwJN7hmqjFeJYzOgMYZhigxiIOuEtYoTEr029F540ZqMVh6rxUDAFJAnHQ4xOMkZtNGoLHbnYvGa8dlTKYNGY03NcFChjXSWQpU4SJ5u1Yt4NnpyEiAxqRuaaUMmEpOnTpq6qtk5d47RdIKtq1Mdm1JEhYhrjS4LlifnVISewrE2RjpCWSkxRNElNJuMNtLQrCuxoh1UjlZbupCwqmJcGbaHFYNBxbBxDAYVRgsAsFaxCpH2eI5bOYx2OOdwxjCsG3SKkFtxi3IN0VTc2T/g9t0HPLy/z2KxpO/70v0SQLQz3WZvd0Q0mqXXbE93qZsli5MTDudzfKzoU810OKAaNtjKkUOAlAgqswqJ2BpqLUHRHQlCZpggaSdA1wXqDDFLp0ioK+qRTVCGjPIZiZxAgFpKAWMU1lpcJZTVIGbDUqIY6YSlbGjqAbaqUFaRQ09KheNdRPmpCGFP80a00GybhvGoYTyq2dvdFnOQ+YxUOfJoSDcf0eWAaj25rnCjAaZxZKPwWaOiaOMMloRk1mhj6FOg63tUFqF7VSy354sFPkaawRCjHaNGkWPEdx0xipnOwDgyii5lfIygM+PGSohmBN9msjFknYmxw2qHGzS0fU+HZpwsfQjkCF2OBBUxqcdH8JWlW2VOjlb0vSKqGlRNjyE7KxkvKZONnLNOaerhmNFwxEBnxqpHR08XYNZ20K+o9QATwZLprGXZL6gMGFtT+w4TIfhEqyLfefN1ru5McC4RbWLVr8BmxiNFUp663qNli0VWjHzA2cxoS6FMRwwrlAfnIzpkbDSEVWZUDelyT9YrlDGMjGWceowZM7CWziVSbcE7klWY2qIHjv35EUcP73P5wnm2tyd0hy1TKm4lxapPXHzsHIfdCcf3bmGvSHSFHeyRzJRkDZjMuElsNR37D27wG7/xGzA+x5WrH+CXv/B5PvDkJaajCqMyKXiUMqIt/Dn5Vo/oItJpOOz7up4/37PItbymuKiEc9JMTEoRUTz2xGX+y3/062yPG3yrWLWRUaPZHU14+uoz3Lx1nd/+vX9OvxzwvVcyr36r4/EPf5AnP3aFreefYf7Wgqc/NmL2YMbNt9/hqRceY3fwBX7/q99jdeOYL3zqY3zyk59ilWv+x3/xf/DdB3e4dydQbY358Asf4O9/+e9z91qHedBybtdwcBBY5TFXnxzz2jtfpTp3jhtvjPnt2Q12Li54+Ytf4PHX3uH13/9dqg/uck+/y51XXuXFC1/Effxx9n/nj1g2kY//yq+TGGB0hRo84O7tH1CbKU9d+jg704+yfOc2Rze/xdbFC+TdiwT2cN5w/8YBr792gwtXXuCJ7auMcCidoTaoFKlwwATynPm1V+ge3ESpRO+G6CeeINsB5/cSKu+g1JZQkF1H8AtGOxdFCx0D2WtMVWFwoCzrslTDIyirEDfICva2p5vCWK4VUZtlFVC6QkBVBOKGbnUKVX7y9bourjWQtZRTx/MWZzTjTYu9KFjUj388mSCo9eVHbAPv3LrBcSca2a2Lu+hxjSn7QYg9fTYYV6GzEyc+gnwlT8yGrCzWVmgkJ3Y6qtmZDnk4m5N6Ax4BwAZ0qtDJsVquUERhbFBc+zgFZaLV8pAjSRkBN9rJ+4+pqFgK1IsyOfKLOXdv3eDdG9f5xte+zle+8S1++NY1+q5F64SPvUz50Tg3pBluE02DrSao5YS8WtAuZjJtTT0kTyLi0NQ9ZCxLNLtJwpiTzvgciDmuE2PQCTSGlCORSMwelUQfGaKn2apRwHJ1QJj30iANHSksUQRiCCyXS7rVihhE3tF3PTFIDdS1nnbRcfXiM9y+dZMuHaOsInmZFGUFPnuSkixPTUB06rqM1gIqC4NHHDLFEl8MeAKpgBiVjYRmpywGhFpYRtkZAYIYedw1MFNSS4QYiSkTivGJs4baGblCg9CON9TAKEBsnasbyaev4ww9EtaUPjaUR7sxBCkbdpLaLxeAup5usdaule81Uput/7y+S3Jag60fsXWUKWhWZ2iPWiI7QhIfCtaT5HzaVMlroJTSZjiiyyV7ljap1uCrGHK9735dn5P1+0+p6BplmqqyJpdaPeckRkhGAQaSRWddTEazkAXOmJL8pONnmbT9c+B/AP7lmZ/9E+D3cs7/rVLqn5Q//zfAfwY8V75+Afify/9/4iELKCVINrHsOvrlMSZ1+K1GJjhOY61lYGuaqhJUrsoI2jlsVaG1RaEJIaFcpB7JYt62S6Lv8TlB39P7DDmicqZyjqgjWSd8khwZZTWV1WXNSaSkSEZ0VjYlQhRdTcIStXT4UkhUWizpDdKNMVkW7KTAa+FhG2uosgRUp5xIKpNUJuvM2opSa8mByxtUfubDUvJ7ISSUFVfKZeiolSvuiZHet5KLVkKcBwOFrgYYrcjZolIg9B1ZObZGI+YnLapKYigyGjGdTiVLrpyXmBIpis26UpSQYUUOXoIySzdDQsJFRJuzKYu8lvOXEypKHEBtLLmuWVW9ZKVUFdtbQ/a2GgaDiqqyVE66I6YEpqd1sKouFrda4WNktWpRCuq6oq5qurbj5s1b/PDau+wfHZdg69LFKB2cnBKrZcdsNmc8GlI3NYv5gt29CVvbOyyOjzmeL8khoVNG58SoqdBZrFyVq3FaYUvYXk2kqms5P0mcNbURgGatxih3upBt+Oqy4GUF3nv6vpebvCwG0m2Rz9s5CeSWAa6msgatFJ3vSV0gLVtOFitCjHIfGV06tmLaI5Oncq8YuSectWxPx2xPx4xHAy5dOk8KkX0Nxlm0qxiOp+zsTIgxsJi3ZFUzX/RijVxVUjSniDKWZC0peKICZxyuUXRJ0foVAcNisaDzsdBVjFw/KRG6jna5JKeIsxZjIyFrVr0nRE8KUClxdzSFKhyjIsSE8pkuloXSGtoUGSAc+ZgkbDcniZ9ICqJKzOYL2l7aYykETNLYAHoZsVFhKkO7WtDFRA4BFSPjpmJaDXlyusNouk1eQWtXHAUx3GnbBcHbYrks15g1jj4pEa1ri7GOznv2Dw7oH7uANbIWRBWJoWM0cSUnyBKDZOdZC4PaEaNkCo2aMeem2xwdLlmdzIltR6W1TM9UKrrgiFUKUtk0sqyPa5vohGIy3aaPkdl8zrPPPsNy2dH3PSFarKkZVFNcrnnyylPcv36Ph2+9ybmnX8RmICQsDp2gN5m2yuRVYP7uksPFirvnen74zgm/+Lc/xec+/UGe2hlj46MWxz/PsdYahBCYzWZMp9ON4P3nn7Ktj/WmKwuHUEGVOKYVe+2qqbh4/jxDrVBNJplEUh0+RlRVwWKHq898mHPVmI8+0bNarLj09DPUk23e+eY1br7yF1waHJOUw02uUsVzUH+QX/q7L/Axr1g9mJHP1+xuv8TTz/0q33z1v6Pylt0LL3Pxg59lcvUi03MXWM5rvv7N3+apcxM+98Uv8a0//hOe+sxLbG/XXIw13//BKyxuHvGg3efi3jbGbfPK69dJf/DHXNl7gef3LvLHN9/l0ksf4qXPfYGgRwyaMS0d2g157LnPUn9gT2JZbM1Wc4HtyRJV72HyHkTL8f4BN669yQsvPcP5x86hrJIpQFjR9zOhUTvJd0xuyugDLzK6cp7Ut6xWmbS7B1oR0hxDhbINdjTA6AHTi1ugklDHCTjXlOlYRc4WMD8RDCkFlbOPnNsYZFoj8y9dpmoaVChV8hps/SWvo9KJ39nZKy9p017/yY91ljpW9rmuXXF0PKPre7Q1uEFDlcQIy1hLTEFs8LWFmArpIbOODjLaSAGfMqiIqxzTnSHnzg2Y3V8SOCRoS1KBrDSdj7gu0wdLYoDOdjNttATRa0mwKtlYsqrISWacOUqItGjGPSfHx6yWC268e41XvvUXvPn693njBz/gzp3bdJ1nFRwhCj2OLCZp4uVhcbaSCBvrpEneUBz1Am2KAhaJKK3ovRfGTUz4sGS5uMXJrOEky17XrlbCkogJ7z1t29K1Hb0PeJ+KcZqnazvIuYCxAKzBDrR5iM+irbTWYtCynxqHNY1kyzqHUZm9ix23bt/knXfeZrWYi2TCCPBYG+nkJAZStngbrHOJpSe+lsZocoqotKbzren+qhTKErmksjTRtTElosKTsmgh++hJyQtQjL6AKtl714y8s5qtsxTBNctn00DmVOeVN1EB60v39HuFKjKO9c9/dO7b6fOtz+0ppfKsBm19F/742+b0bzd6uTLN4sxzrHWxp0CLDQ00kzdO7usBRXkxj6wAuQhk15M+VSREufxdLr+zfvrNPV+YVMZIPRdjlFD2JDnH6+kam9f501klPxW05Zy/qpR66j0//jLwK+X7fwH8AQLavgz8yyzP+g2l1LZS6nLO+c5Pe56YMwEJfe7LzZOUFGPaKCKJ1ge6NqBYUDmDrYx0E3Quph4OayzaSEe3cpaqqqgaS+g9RoPRQj2MoaPrOxYhEkKLX7S0fSQkTcpiJALiOKiVYc0TqLXceD1ZrLUThJL1oR04bVAqo0OkKZLKloxXgvYrHI5MikHcmoj0SqY3kUzU4kC5ZmmAIPE1lzclsV93Rrr71hqcs2yPpzSuJsXA8XFLzIGuj/iQCCmjfE/SWqaMRhFSxlrNYDRhPuoIJ57KWayWol4psRWXwOFM1rrYGUp3Q0bPoq3KVcaHiO2jhHBTOiTa4rQWa3oNlbHUxqJTxGlNrRVtiugUiLEjRtE1KV0WItaOi1rsx9GF3qKpa4sqHSDI1NQMhg0nXUfOma3xqAjPW+l2FPfRnDNKW7quY/nwgIODQ+q6YjIZcng04Py5Pc7t7tDO58wWKxSwszVGuxpiJIRI1/X4pIjtisyKzhfdgJFQx7XhijYG/Onofj0ZUEX7I6+cDT3BFe72ugiVnCKh2ygVkNwWhfeJtu/oek9edOSjOQdHMzGF0U6mGqWHJALpKJNQDZXTWK0ZNLWI2HNidvCQ1WKGMZYUA8vlCmsS+4eHQGJ3d5fdvQmrLpPVAqUcve+lMUAWylg29L2mDQHtxDHVK02fYdn3DOsBg/GQmGHV9rRdJ8ClLJYxJmLXoboAtjr9nGJgNjuB8QBTPl8twS2QVAmbBuMUJ21PHYS2mFKCmNAxEkJAmRE+g7KGK5ceY94nHi4W0HtsqFHLRLfo0FuZ2IjRTmUN825F7FZoG6hNzdaw4spojHcdq+N96AKr1RIfhlRVRdc7cbdDgBtoYpIGSFU1pNgSQ6JbrDiZHUMHfdcyGhmC70ozKWGV6BZSRvS7MeK04aUXX+TwcMHhvhjPpOiZjAa0URHaDlM5AYxdJkRNDBmXDFpZ+R7R/7iqYTAYs38w4/btezz2gReok6O5fY+BmZJWntXCszhZst8eE9yU2J6QuiUmSHZhUmLN/sGXPsSv/8NfBrdFrCrqy7uMzw0ZDtaOZup9m+Bf9Tg7WbPW4r3faBR+tOvXX/7ISbHZktd6WyWsi033FVmjAxlrEiq1WJvAQvKZx8/vcOlX/lNGrsL4DLTkWhFSxdvfn/GRz36OuLzGD955m89+/lNs7z6OSgmqitB1HLz9fQ4nC15/+E0ml3quDBoehIbDxQ0Gj81IkxUP9t+knXScbI15+hd/mceefx5bZZ58aY/rN15neS3x5KXzbF2c8voP3+bV7z8k5cSDOw+5unOZz3/qc1R9IB8ecf7JCc3AEW2SYHXtCEyo6gprIVtNQOPHNe7Zy8Sh5cTu0/Z3cVhe/NhzDLfGoHoZa+DJdPjY4rvEcOCoTI1PNdXgPGo0xhjNJFmCzhg85A6fW9p4xMDuoYO4yuUk0yBrQKpfA9mQc2mA/awXlmAOfCpxOmtMxfqUrkE/PxVnPXqsO/MyKaiHozL1UJw1NfkJL6tMG6SZlFPmcH+f+ckJGQE1MdqNqcP6a11Ars0xKFQ3eS0yhRQ2RkcGXJVphlDXkcSSrBuizgQNoTCHfGF7KEAnhUHcvFWKZG2J1tEj5EhLpMrH6NUBt+4e8sPbR7z77nVe+da3uHvnDgcH+xzsP2A+P1lXt/gopmGNc6TYoTBobYhk2fs30wuFMgpTVUCmHg7JBPwqErsAWaOMYtV1zBcLdqa73H33Afu3jjfW8iEEjJK8L2ftxiBDGclBNcYwdJZJXZ9S0xBjkUwBD2Yg62iMG2DXdz0p9dhs6Puek4Ml0Xfc74+ZHeyX+inT+1ambGv3QyUgISXJu82kDYlR5cJUyhRpiHxRGDNKm821pApbiBTlXJ1pDqwBAFnqNFVAcUoJlPgxyHPZ4mANOfePgi8lE6ecRXajrDkD6mRdPMU2Z6BVPgUcaxfIs0DkvVlpP44l8bPoyc5q8c6CznTmtb3vHuP0Tk3rCDBV3BxLMzCvcxvzmdey5vnnzQ9LU6bcr2efaPP5P/ojXYCiKh4Yee0CXhYvdWbR+esyIrl4BojdBS6W7x8Dbpz5dzfLz94H2pRS/xj4xwB17Qg5EpNYuSdyCcruCSmJhXa5YGLK+N7TpogNmhCDTD/WwdVKYa2jsoa6ctRVtblZI2I+oYCkLKoSI4tEpus9tgJXDfERDo9mtG0nXR8lRVxOqWjswDpLnww+CxVQaUXSCXRCW4kXjb0naIM3Gq+NhO1mjctQF8CWdKbPkawhGQFKFD0YShesmMvodt05kBvb9x2rkPAodMrkoWyEGKh0JYDKKXSQHDurFRah0KkoAaWx69EJqspJJ0kVOkHJewtrcWyhiqLAp4B0YaLY3nIq7lRKKApaGSwaYkQXp8jUG5SzuNpCjtgcaQxihNE46tpIMKKhGL+oDS9Z8ndM6RhFKYqtQWUIIW+KOLoOZzXnd3fwIdH1ihBLrkhKmwLMGEvwgb7vWbWeEOeSXdZHQshcOrdHjImHR8ebbpDTmpiy2Km7imY4JGtNvQpo4wXEx0hVXJxyli595lEh7nrTXTsvracHXQGcp4tblGu5cqiyyaSU6PuebtXRl+gFH1JxWFRyzZRO43oD1IX/r1UulFMtEQeVZTQckLOna1vp0qXEm2+9g7WZ/Qe3mG6NuXzxEtPtXeaLFcZYbFWLo2uuy0bWk/ASd9EHUt+Rsmbed3hVAj+NQjsnPH6fUDqgjDQclJLCI3pfqBMJZSp0UqK50BprxRhCabFeT74noej6QMiB3ivU0RGjC7u4usY4Rw4ZE6XhMF/1uO0pzdhRDYbYpiIoaEME5ajciCpXnITIahU4alv2557D7oScEpWrEJcyjzYRS6KuLCZlvG8FuHlFDIl21RO8waCE45+hqmucDthWwmyNcSznK3KX8H1P9EKJ0QiNQkGJHBHqClEmg+Nhg8qGS+cvYmxF6FtcgFrXpFWLdxarLUFr6sGQYAy+jwQfQWmUNnR9wEfI2jJfrkho6mbAw3szFvOWsErsjrfoWXF0NGPn3HkO7l7n8P51/OqIxcMOLk+poiHNOiZbDc9/+gOYqiFb4SOlLAYt6ApvLQb1l3VQf+R4r2DcGMNkMqHv+7N7yl/9CTZPdObr9IFLQSLDE5UzlVF4DZ3O5BxotGRHPbx3zM54h2GtyOMBAUs0CfwJ2rbkCI8/MWbgnqG7rzjOAbPTEJoAxy03/+RPyannXn+D1155nXevvcnRG8fkOEIZy/HsB8TjJ8jHW2yfv8jx995lONVsn5ty8+41JsPEyy8+wRPPbHPre4mxn/DMJy7w0Wt3uPs//e/cOz5kXFk+cfVZrrohb37jL+CtN3ji2Y9zcvsNtp/9mETRpAanK5RyoHoikaAN1DUw4mi25E/e+gZPPHWOFy9+FBMyqDlUGgIoEzBOMbbbpEFCyG+a2Gru3jrADBK7ly/j1BCTI6o75Pq3/pTX71/n6sc+zVNP7GB0yW4zWZqFKZWuuEyQ15bqP/NpL3VX58UFuaYUoesG2fpU579ac0EBjwQP581mfVrI/YgHXheA6/ol+p5bt25zNJsRNo59bPKu1gBiDdwkhy6U/LME2ZBSaRppRUotSiWU7lHKY5WBqIkYfI54pJ4IqSemBYoFiqqs7+V9gADR4FkuWw6O5hzceYe7b/w5d998hdeuP+TVaw+YHZ8QfMQYXUwpMlUzIEahEdYjS7sSvbUxRVucguistN5Y4oNosJNR4AyOAehITp6Uhf6uiMyOT3j7nXe4cvkpHtu7iNESbZBiAUghFmMNT+i97PlxQfSd5I96T+gDXdsVC/lMCB6/ZivFFpVCAUGStbumR5rCplFKYV1FXwCoyuAq0Y6H0En9UM596XsLjTMrWfGLnEHYgadmI5yZCBFTuXSEgQSyP6TywOtraK2Hz2duirXrY0ZcsFOW5u26rlk/x7rxtdaXra+390/K1s7W75kKqXLazoCu9zoibuqbM7/2XgD26HOtHVzfP306+3vr154pGjf9qMHKGnBtwN06u3EjVzn1Ynjv61BFt3j2ud7L7FhLXNZkYmm4l1pWSe4cWiItkk4YRRk4lfeXBUgq/R/APTLnnJX6kf6iP+33/hnwzwDG40FeZ14oo9lESBSImted2ghRaUxVC0gCctYo5YqLT5laBLFx98Gz6trNJu+qalPIxsLJdbbCGSkojK1wzhGRDkZKMk3zMaFL9xuChFPaGk1NjopIMY3IXviryqB1YqATvRabWm8MOVlMgDor0XqYSCKwTImQU+k2lS5LzqdFziOofT1KhaZy6NjSrZaEtiX0Ha5y9KFFGbF+j1lE1tZYhrVox4g9i+WSNmlWKUkAp20BMNYyaBqZfpzp0qw3jpgTxEK1KyNp5xw5g/dJHBmVBm3QWdEYzaBqCP0SFSMO0MEzMIbz0y0mtUdrR2UyZE/0koulkpVxu7FgnCwyZf9DZ1LykCWjzBaBrjTzpJORcsSZNT+5UBuUwntfuifS4cooYttJqKiCw6M5OSmsqdidTgnes390TM6Z6URE81oJ/S5mRY6hxCFI8Z+VRhuhw4mNsoGQi2soG3el9Tg/xii0ja5DZSWRF+nUcjelyGDQsL29jTUOnzwxwaLztG1fTHWko2uV2dSbYpBqsNpsOrhGaZwREOico6oqJltbVIOKvu/oeuHt3733gBQ7fD/HGMPtu/e4//AQW9UY0+Bci7Wauqmoa4e1DdpqtLWsVh19hMV8ydFySecjylWnlOIMaIUplKXee7quF1dIRQHhXmhLpQspQ15LCIVTb+T9rJ0hfR/ofSLOThg8PGC6PaUvQCfGSJ8TPhqiz6xCZGsyQg1regPHPnBvvmT3ZMG0rujrhuO+ZeE1b1+7w/X9BXvb21wYTtGukutaaSrrxKBiNGbLyfns+05cOp0jhIrQ9ULLzNIUwoQC8mQiPZ5ssfAn5CT32qZjmjPdqsVVZcofwSgjYaQ5oXJgZzqi6xO11RKPMp8znY7YbiRvsW1XxCj6CKUNATEkWC6XkBXGidgebYkZ7t67x8kiMN2aMvz/mHuzJ0uy+77vc7bMvEvd2nqZ7tlngOnBjgAJkiBkBkUqRCtIRlByWLIVDjscfrP94P/Ar37TP2A/yC+SZTlsWQFZJLiYG0gABDCYAWYw+/T03l1d290y82x++J28Vd0YbJRsMycmurr63rx5M0+ePL/fdxs1NNox3r/IM88/y0nfUk1HzMaK/d2a9fyIk3Fiq2porMNqUKZHJsgEWY5XK8qrjh8AACAASURBVEcg0yP64R8vr/7x2yN20ecelnVdP7JI+FEP/5/psxSEPrE4WeKcYTwTF0OdFev1msVixc7eDiC66oQSh9xoSKcL3M42fhQ4XrYcHiTu3bvL4eFrfOYzTzBd9Xz/X/8eLz6xi/cPqZqKpg0sDk5YvvsB93/3d7l3fIM/DYecHJ/y8UuXGNWXeX9/hl+0fPa5T3P6rRNWuzc5cCfs5MzV+Zobf/zHkBLXrj3FdP8CW7tbPNmfMLL75O1tdp884Jf+1jXeef8HZB9599V32FpM+F//x3/KBZ156anLvPvdb/KJnX1Gl59G6UjwNVUeQdcQ6FFVpsk1iw/W/N9f+SrLvuNL//hjhOo2ye2jekfqFe16m+39kTRm8hjtVmQ8OVpMMlT1FrlSpDSBPEKpCAcPeP/Pv8uDOOfnfvk3qHRVkDFZ1MjCVj9CI/ppt0ec4YAuJHzbY70WlsCwoBSsrABjPwMurAT0MAV1yDFtnjs/1T6UKnoY+blrW+4/uM9qvSakhC+o0bDANsaAMcU8QtycY1lob1gdGVm4KyWsC5vJWswg6GrwI6DCpxbvEtpEQlgRwwnwEKUastkmxZpuvWB+cIuHN9/n9OCAV7/3Bm+8/T437x5y9/4hIYJ2oFxmVtdEl7CuYt11YgwBaJOkKelF35qLfUmIgzZeXDsH1FHJ1E/KClvXEhnhHRiHMg5FxvuA7z1379zhwb07VE5RVzUhRrzvN7lfOaeyLpTzJTFFFQNzR6Hpux6yOJVLE1GeR4MrNsixWWOw9XhTBBgj2aARg60cfdcLklKQNW3UGWo3FDHoUjRIgRpTJqaAT56QAlEbcToOPSqK+7TS0klQnJ/nZPhorQs9Um3oejFKdFbOAbX5fFknKaSJPaCywz1yfv4cihOttVypR4qYH0bQNrTBc7+PUVwonXOb9w4F4vkicVOYPvb+8/P6R93z558FQwMjlSZ4Tkm0f/mMbrnZl7z7keM9r49+/PM3x1WKwsefQY+ciyQ04aEJJOtkcRFVRpHMWdx5TIOraPGuIBczun9HI5Ifsd0baI9KqSvA/fL7W8DT5173VPndj90ygBIq3nDxjB3sapPkjOWI0g422Q8iItXWbjpZWmm0LZNz6snZS91bci6ykrI3RIHitdb0EVIMGJWIfWDdeto+sFjMBeUzCh+EMmgqK24/OYveRRucrtCoEhad8TmjIlTaMHJCc1vmXC5UsdUNGacE+VJZbmx9ppJ+lNer5e+976nMAFmXDKoi0nTWQBCXyXrUoLOl69ekmIVu6iMpelLX4SYN40ZQtSobEpaJqZjPV/ggro0UWl5l7aaA1lYeEl3XoXUtWFfpxKS+G3o9DLxkcqR2FbUz7OxMyaEm9itqY8QQpmlo6nGZeDRZRZLyBN/LhJ4koLRPGWsreu1oRpNi/xrlPEWFsyOyhuh96VwJulFVFZPxmK1OC0U0RGIS05OUUylqNChLyhLU7FNGh8TxyRyVb6G4wt7OHsm3xKI9TFnGYwrSabNOXJKMbSGBD6LRA0HZ2laMax6nAAxdrLbrWK/XguSZ6ocg/5xhtWqBU6bTKQBt29P1Mv6sccQoBacpiJUxRqgRMWBchbENVilyiNiS4zN0hhbLJb3vsVXFaCyOjJpI9GsUM8mOM46sFMt1SwgrnKvJWcx86rpmMhnRNA3aOmwtIdwBLSjwYkXb98T5XGz7E7h6JMwOilHQcgEpifYkQTKOnKTQtUCImdW6FeqCMoTg5f4niR4iOmLq8Qlu3zvAp8xy3RNXLZOqYZE8y0mmP1mySIr1rdvcOjjgJPScnJ5w70Hkwzu3GFuDnm1xeHpEVor7px0P5h11s0WHJtmKgEYbhytFZ1U5ZlsjxmNL1ck1W63WdF1EtR0h96VRoEX713fEBJUbsbVzgdhqkjIsVp0g20AXM0fzJU1TUVcObWvQHSiwRrEzG1FVimXrZR7oPe+98R6XL++xPxlRWQMqARFFxClB81VKqCSNg77v8CFKEWpcuQ9Fs2mNRSvF6ekJWM3+1g7H6zWjOvPMM5fYCjOO790g2hadE8Enoq+JaoxSplBeZS7L/560bOe3odP5/9qWoVsHYkw0o6bEgGXWXctisWI0bsgxE7qWST0iGUcyM2x1xJWPZx60N/jfv/oGmj22g8KGFX/5re8zMT07JnDfRP7oq7/Hwf27PPv0Ff7zpz/G8uiU/uGcD8McP9nm809+HNUbXvn6XZ59+fPsz37AePEC9Jp3P1zj/vyIT3xumwsG/vLWbU6+9Tq/9eu/xRN2j+X1I15//3tcf+MVoobnvvh5Xrg24fmX9lGpYRk03lxg7nZ463RO1bd8/c23uPriCxzeP+bw1gG7l57mwsWnSG3m+IM1fnTK9tWaO++9zd1vfJWf/9Qe450Z+d5buIufAzNicXpASAvCKnJ6fIdLT16krj5e6LEVZEMz0TSjq0SryDiJD4kK1Iix2+LqzjbT0T5ku0ELyiNi+Ek64sMKFviZcDGlWHcenTS7bkKMZ6yOvx6+RnHEK+v4gpblzeLwZ9gKortaLjk8PNywR4bm3SbG5NwawVqRCWTkGedcRYwQYo8RW0hyTngvLBOtxBK/UhXLPqNSQutE6Je0MbM6esjB3ZtYbzi4B9/8q3d5/c23Obh/m4f33sF2R7hwRGVgb7bH/rXL+HoPgjBn1u2a+WJJFwLZR3yKdMXAS2dDiIGYPCorWUcNz39ZBMq5YyiINCEpohKkKmuN0hatHTEEtLakJJKa0EXCWrHKgrzroWBVYlRjsJKpZilojDBqNMLssLo0mowpTtcWpSHpKBIRc+Zk6CmLeiAoJKomR5QXI5aYhc6uzq3htBLJTQji7KmQa5KVJaYM0aNyJJHILlOX2IhkLUFpcpag8YFRJoVaJilh/5RFdPk8PRDthFWkTUHapKjTWm/WASn9cFab1vqRZr2p3GNFytk6/TzlMWc2DZByZ2zWMueLoOE9QwTU0IgYkLkNC+kcqrUBr8993rCPYf+p6M5ERlIkJpxDH3MoiCSbhv+GWgkbSc5jhEcY2D95KLdU8S4YSPQFZJEDkn2V2JAUAtZKkyGnwvZScvzGGVJptAxF9RkF9Udvf92i7f8E/gvgfyh//qtzv/9vlVL/HDEgOflp9GxDRboB2B6DH3XJupBGdMaHvuhtNanwyFU5fQOUaowE6GojNvcxZ1IQLntG6HGmaId06qE49uVS6GSkIAoxbPgXOSW0SbhijOK0Frt9jXT8k1zuqI04SeWI0Zo6ZXHOyVnMLJQsOKOS72C1wSqpxmOZuMm5aKMMEhQ8wMpabGZzJmWBboeBK8hiTzWqMS4Rc0ApGVAheNoYWOvM1qTBOcdy1WHHI4Jv6YKXQV/omUopYgjCETYGktzwkv2isEbjrBN6apauWeWGBZ8uxyYFoNFKwiGVxmhFUznJ73KaylUyQTqFK/buMQW0UsTgWS1XBB/o1j1hvULXjuh7ESIbQ9cFtKlIWWNshbWChhjr2JpukdRIKAg50/lekKDO0/eeEAIQMMmiY0SwfaGlrtuOBweHkCKjxmJqQSRGdY2uLP16DX2k9z0+yESjtMb7UKibshBOcXAOGygQZxPFwBkHyiRZoiWKC1kqDk4hRJbLFeu1oMYxJUxVSUc3JUEvlS7IrEw7Ms4zOXlIovFsaoszmexbMo7F6bFw8ldL0Vw1Y6ZbM/Z2tmhGE6JveeKJCyiVxTZ7tZaCeoNWJ1brNV3fFe5+RhlLPZ5gnWM8HZNQrB8esl6uaLtASuCqnt5Lh1XlROg7Ugz0xXwlKkdSlhjl4dVnD9FDTvgsjq0JS1UpnClZgBFiyKyWLSfNkhgifR84XKyIJOzpms4Grh+f8LD/gA/v3xPn1hCYa8VJ25L6nu7WfXzXiRagmZFMzfFixdFiRZ9nUgTrCo10YmOMrNZrmmaKtQ5tLONxg9aOxeKANmdylnBbZxOq7egDuKwYTWYcmSU+G2qr6VLHKmTGVnHrzj1C3/PM009RW4MPibqSuc06DQg6GVNiMpowG02YPzzi+nvvkmIn1GF6/LqnshVOZawq6+MsuhnnKrQSN94UA6TAfH7Evfu3uLD3MSKJh8sFe/Wu6FedlmDfesKdxUPu3LyNzTU5i9OnNbKIEu2XbLrQcn8kN+xn3H5SsfbvpZhTMJo4mrFDiykbKKjqigujpnSqM1jD6XxBbUcYa/HJc3/+Bl978zpf+b/e5T/+R/81u1uKd1/9No19hh+82vGJZx0vffpl3rn1ATfffcDDN4/Z+/2/ZNXd44tf/CKf+4e/w5X9y4Tc8s4PTnn5Sx9ndHXKP/kn/x3HH95keeo58YkPfOCzX3gJZ49ZqVM+/vLLPPPFX6K/8S53rt/lK//q37J8cIuXruwQ6o6bhw0XnvgPGI9H3Ll5zJ//ybd47+07zPuOZ7enrH3P53/+iyjTcPPhhzz9wstY12Nd4NIzW4Sx5s37b2FGDS//6t+jtgtMDcuTYzAtSnXM9rYJPbT9lMVBR9wPUGtpIGQvTQuioHjKEqFkKoKqDHo8RjeWRA3KPjJc1GbJOfwin12sR/786HEwNMByziyWLejIVbbofSjU68dwsTx8xk8eT0qLw3NMGWsU68UKZy2uHnIVf/oxGb3n4cEBq9WK3ntCDKXxLEc3dPOhMCmMmOT0sQdlsM5hrBg3xhyIoafza5hnFktp9q26OV0/p2sTXVixPF2yXHnWy1M+fPNb/Ot/3tKfeuLxivnREQu3R3X5OS6++AWuPbPPr37qKk9uO2z03Lh3zJs3j/nw5l0ODw9Zh4SOCeXFyCl2gVTmqZwk4zSVwlbFVIxS1IB7SINEaZRWhbWkhG1lpLlkbEUyPYlOzIKUWL9bq7HZinlHec5SFv0DAjVcWHnMREIUdkMMwkKR5r4qxh9Jiq0slvU5QQxnrn4DooJShYEFQjQt4dqliPd9X9aTwrhICEVSl4gr7cS8zlUWq0Uyg60YTSZYZ+X58SOHz6No7vk7YiOlyY+6EQ5Ms02Bd27fZ4jX2RpF66KzO9dIPl+0PXI05+/XHzEP/yjq5OOF46MIloAUj7/moww7RDN4DhHbnJXzxem52iKfO6ZU1qxnO6PA/ZsahXLcWknTXQ3OlZvzcQ79I8na3SrMqCYo5FlbAiCGukXWarqghOcAnB+x/TSW//8MMR25oJS6Cfz3SLH2L5RS/xVwHfiH5eX/BrH7fwex/P8vf9L+z05QLqLv8rkMbnciItUC8pKiF/tvNNKWZ3PjlnM6HHmx2qRUrvKPou0ArEZFKcwMUoxkLbkjjdXEVMsC3/flQhna3pNJNLVhZBUOhVVyCaKMbYLSBKXpkcwxrTQTLQVdSAmlIQ7/KQVYrNI4HXBKLCN0zuIiVeDThHR7Qk6IwFjMWZrGsVVrdAz0q56QNH3nyUbT+sD89FRckhJo5TBaFtraaOrxmLGtcNt7xFHL0emCdtVK3kdOOFdhlZOugFZC/8tCnzQUnnSOWGvZmk5pO0/bBZy1Esicc0E/pdjWw+Sqxf2wqSxGZymIjGHd9WSt5f3F88RWFVvNCOccCi16obal7Vd0vuPkdEkX5hg3Rlc1qz6UIl6gZ1M5mqRFV6TA1Y6maeg6T7sWIw8fPNY5tDaEYo4xHo2onWj2Tk5OSLHBmhGdFydMW4kZSvaBGAOpFEUoQTFiykJXAAZO56ZDdA6GP99RyllcjPLgEnV2B6KUFG5CZ1AY7UjBS+i2kvHilNjtWqvpe49SmdGoEXOblKidxWSP6gPb0wlbkxFkaG1G45iv1sxPjlmtOy5e2OeFjz3Pa698izfffIetrTH1qKYZjYTGAvRdIHiP7zv6Xs6bdLk6Vm2Hj4UAoxK+8yyX60JXAG2dPMiSFG3S7ZTgdnFUrSQ2ISF6qOxJvVAOfMp4JCvR2ZqtuqFGYxAtjY+BxbwrbpJwsFzRkln0nugcNw8PWcSMUlL8t30kaYvXBqUbsjKkqPDLFY0F1zS0Xc/p6SnrdkaM2yiVxWAnZ7z3LHzHbGuMVtKwmEymGGPpTx3ZZlxWxT1K4WNite6o0oSQLVnXBCxeGZIdkaoRLZlOOe4e3OdwvuLy3i7b0xFNU+NTj8WidU1Somfc2RrzzJWrdN0pk6bi8Ggumo+8luZCBpM6cvDkLHl4ofekII2LyloqK45l9x/c4XtvvcHlCzNGsx38wYiQKzKa02XPN7/253xsa8InX36B6CMfvv0+PrU4F8i6I1tDQFBfIWYZHMVSmceG9t/grRhGDixqaRgWNtFQmHofWKiWGCPH37/D1/7ij8k7R9xZHvBiY3hxf8TsqRlOfZ73vv37LLsRfs9weHyd5/eu8Ep6j5wNf/FXr/GwvUeY7fPbv36NZBI3bt3g3/7J64wuv8tzn7vMiy9/lsXWNt/45jdYhMCr777FV/5Q8eXPvch4x3HluascPrzLN//sT3jllT9h1WaefOI5fvlTLzO/f4cHD+5zcOlVfDhh3Ci+9/Wv815VYdeeRq85ubkmH99jvGf49KeeZbStUeqE2C04OtT82R/+GXFnyt/+pb/DqD8hZUc13aLaf5YYb0I+AcaEeY9JcGH3KrWbSOamXktmJwqPxiKNy5ghGsjR46aaJ15+HvZ2cKMJIUYqK43Ys02KqB9eov70W4qQk0gfgo+0q5bpqBgcDXvM0mzcaNF+wjagDNYogvccPLjHzvY2tt496+zzY/Z1bvG5Wq24efMmy+VyI5OIKW4QkhSTLP4HfXYekEdHIhFSgpzEDCn39L0YRrnacnS45oMP7/LenQ955+ZNbj845mjxkNP1KYtFxChNU0dqE7m0/RRf+uQn+YXPPYd56pPYJz/F9ImrXJw4Ltseu7jLweuv4Y9vEo6OCcs1y+Wa9WqN74NoxXz5M0ZijuW488bafMj5Gow1UsxSbRZ6pOSGUZrbUohpbVFKQqUVIgmotMYqsAKIwEAnzKUIemQMlWdxzhuHRMlmN0XSMKD4xazEK3RfrvHGDr5ookKh+mXRi/WqJZWcXKdNoSAmyaJVQsgz1uKcpjGiZ8vKkDLUlcXpLEikKZTdolVE12cMnHNjbqgvNkWH/GVTmCk1GNo/3th41NHxUarjo8UX6lHk6aMphGdI2CN9lXPN6fM/n9/PsAZ6nBb56N/5yH9/HHU7kw6dUTUfp8wrPWRwphIzMJhOyX2YYnrk2KW2KGduWJMncTAlSUGds1Ci89BYypmoskR2FZfsnGMZy5mk0vk4yIKyDTXiD7ttPr79NO6R/+mP+Kdf/4jXZuC/+Un7/KjNaMnviko0SSojXZkYUVnMeVOKkAN13ZBSxvdDx0nMDLRRmwucUcVWX24s6UpZOWHDgM9ISKOKaKOwaMlJy2CdIneZkD0Wh9VCr/RRY3GAxWmhVmQiXmmUNXTa0CmDz9DlRJ1hrERs2+VA0NDrRMhiv000mCwPMovC5MKrzxJEGnNGUSp6NDoJHSlYhdcJD+gslvO1rulSBK2pRxJ8HHwghEyKGkrWyIc3bmJGFWo8Ja8WrEMiaRkNyuqiTyoIZrHX10qRhuIrDxb/okEMSUIVBS4eOhF5E344mEcELZQHZw11VeGspnEGYw3WjDFW9GJdF4l9T3YCSa+7Vh4oozGTpmJrXBNTZNJssewCPhuOF2tOTufUkzFGG6G2poxSGWOF7mClRw7KFcqNwkSDKq5NHoVWovFrKo0prospRQmyriqUkUlAG4OuHCo4DKp0W2Ux0Pc9PvhSvInT4dBZEvqFLuLoQNeJALqu6k0HNaW8oS0ME6wuejXZD2gUtdGoINq97cmIi/sXmE5GLBZz5os527vbNKOGw6OHqOgZVxU70wkXd3fwXSsTDhPW/YQHRyfcOFiwTorZbMZTTz3D9fff4/rRIYvlQo7dapqmYWt7m6aZMhpvMZls0bdrum6ND0JNUUquWwyBdt0xqmqiF8QxKYU27lznVeykFYI0RpVRtiHrGh8EHaqUQyO5dcYoYtfj+15QztWSLVtTK0vsA9XWFJU1IStyXeOVZpkia58kTLoNtCFS1Q0pWVIIhNLDr+qaZtQQckZ7MQAZK8Vsa4tRbSH2hG6JSg02OYIPOBymchjjhI5dunPGKnZ2tkkjzer+Au8DgZ5u3XIyXzG6sEsfFF2EZBsuXHmST2/NmO3skFd3aBO4kyWr9Yqb9w9IaZfdnWmhYnh8WNNHCEmK+L3tXZydUk8Ud+7fJ6We46M73Dk44OKF56hNplssYEQJMo/kmIihR6tiAhNF5D/eHnPr3n2ujJ7lhWufJa56Qhdoxpe4ceeE/s4RV55+ludeusbx8RE3b3/AcnGI0ZHDbsloukXjRlgMda6w0RUE/6/zZPj/Z5OHcSpCcRkfIUWMEtZE8AWVn0b6+IDeX0ctt5lMnqY6/Uv+8d/9NN/71h8zvf9Jbr37Ie+//jX0pW1eO/Bcf+c19LTh8qUpV7ahOz7AdIq3vvY2f9RVzLYtv/u1b3Lv7RUqtjz96iXM9AmOjo6xTLmqOiajbb7yR9/ha9/4NtN6m4N/+j8zCzNO792idQtm010+/uIuB2vPv/z6Ma66zqL/PqfLjLET4nyOGjdMFFxuxtRxzfe/8Wd86hd/hb0XPk5OB6TulJMbt/nav/w67tIeX/j8J+jufpdbb/0B+7sX2Z5dY7myqO0Fe09ULO8/YLxzkayPOb13m/H2Jcx0DCpK9pOJmwBgW55zJkM2CdKCuw+vszy6zf7zn6YaTx4pZgAUZvjhr70ZDaRMCGVOd9W5XarNuuBn+RyFNBlBnm97+3vUtSute33uVR+9bb5lzsxPT7l16xZt25LqGonckkJG6PxyWjQyLg3iBN33wqyJ0WPs0AxEtMpdx2LZ881vvMZ3v/193vreGxzc7zldrAl5QaJH0zCajPjEZz7Lr/3ar/Cpa7/AJ555gZ0L2+RqRK+kZ1Flj/WnwJqkPdFmVrFnFXq6mOhTpk2JVd+x7jt8zoQshlmD9AJ1riZWqnBDhlNwTpOUQVuN0Q4s6OhQtjT0EDdFjcIqU8yb0jngKW0QKrmkA2p6tijWSpVCTIsuV6kS3ZPpY49PUSiIOhBTFAYRUkTHmIgpEUqjESCqKJ9jXTHYkrG+ARW06N7qyjGpxBAkZo0P8Yz9mxIxdvRdi6tGaCNr2KHwzwWNRKtCv5TVvlLnIrQ3rB5Zqw0jTzR0Mp7yuXih88WNGKsMhiQDevR4ofbji4rH0bRHxvpwT6uzgu3MUOeHqZofVcSdR/nOF3xDhNIQO6W13szfw/GcL/ZMuX8e/90GpUuppFyUeuFciPfA7NtUq0m03ILi5kKlLEhaWccpVbTZ1qKMRWm9qU8o40OydP8dkbb/T7YyUepyA6eYBLbMQ7cll6bXUNAFdJYbLSfQ5cLpglYoMllb0WrEXOh2cnNqXQZLEoQMMriMMYOl8wB9q434M5cAPwkrtmLwkYTaVqkMxSI2qCE3Rtx7fEy4LB0gW+aTVmU6QwnozqggN+Fg0m5LZ0QjdEldOhkDRJ0KDSMl4bV7nyF42ugJaU22hmQUujKoPAykIn5NQhFJ8iWFeqaQMGTfC+3OSXjnYDs/ZMcRC10yitmItnZzQ4YYH7kxjBGHTmsNkB7tWhihPpQ+E4vlqcwkOJRyOCdBp0plKi3B2pXVxBBQKWKNoe/EqdFqQ+MMoZOOXkqSn+JcLS5JSCEvDkvC+/e+lwmLJPD1UEhZKUKNEodRaySAWhuh2o6ahqZpcEacELMRPVnnPZVW1C6gekXfi519irlYD0tw/NBR1CnhnKCetvC4u77De09lxSBEqJLFBSnL+EiKkhcl49NqKWhG44a9rS12trawWlFbjZtN0US2xiOeevIyz1/ZY378EEtidzrGaWhmM3LOhJw5Xa0I3YqjpiYlKWgPHhygtGYy3SLFQIhSiM6XS+bLFSlbKtcwqhzjpmJrOmF7NsPHwLpdE7sehXQdm9qW76CJWYl1vyp0g5wgBYjSvQokkqmJWHQvWoNaZVTqZUI0ikYbcmXxwaPbSN91GJOp6xGNa1DacTQ/pcsRpQyGSGMMk6pCWYfNGh0VeR0wQTQCMWdMJY2O2WyGbRraVc/FvR1m2zWTpqOyoJMn9yvIjegU+oiqrHDUtcRgkEUna51B1TKeU05YLa53bdfjfWK57phu7/Psiy/w0idf4GL1LD5FWF9kOtshZMWt99/DZI+ta0IWbWtTO0HfS94jxda6MhLvEEJHSj2+W3Bw/yZ+HbAobt64yezSVUY7e6JHrCpq6wpi7Gkqy3Q64cKVy1zYu8KD0xW1cVzZnhH7TNXsUo/2SKsF3/jm93Dt8zy7s8+iHvNXb7/PKzcecHAw5z/5e7/NS0/u0lRiEJVMRrgR+keiF2cPw2H7ySvmH7d0+Oh355/4Cplq5eEzaIWykoVTRBZuXdtzdHAKBA7CW7TdHd5650Nms5epQsIw4vXbJyyW28y/8S0+eOUbXLRLPvPSS+T+gKvPfprvvvc622PLb37mGu7+Cev1DL/3FF215qt/+m84vjUnmwk6au6/9gHT5gEhGVzOTNwWI7PDwhzTL9fE9YwP32y5srvF4mTB6EJgHhfcbUe405bcLTjuPF2CvgXVTDDbNQub8EQOrGH+4B53//QbqPoK15qKowcH3H73VfzJXX7x2hdYj/bYCprX3niHse4x420ezHt+8L3X+PJvfAriMUcPH3D/xiEfvvsu1z71c6Q2cHLre0wvXEGPGlIMNOYiKoipCyRUhGTW4CLBL7l395C4XqGbVFZAQzuax37+yZfyo8eEEv1t0OQAVjtSAJGonnXcz+iXP/0o6/tO5BB1dVarbd6fPuJAH0VIUkocHx9zfHyMsYa170Xyl6EyRkwMci5NRnk+pSyGD0pZCXzu16Sup+tXLOZLTk8WtOuetl+zu7fDzu4+3lykfBVUIwAAIABJREFUNy1dfEimI+UWpVoubG/z5V/7Ir/zj36TcXOZSk3IWYzhFJmaiKEtayRHn2picpIXkMUkLqBoQ2DlPZ6MJxAHt0MgGykahmIilfOds7CdtB3chBOqRC1lKBq4QXlY0CMFKqmSv6pBUuM2p3yYUWQNfK7wyQFUJKEKmwmU0dS1PONDirQrT9d7QuhxWlhefddvmEoyo4lLoVIKZQwqqUKZG3RSGgk7L6t5pZEygWKIklHaoTMSbj14p+dM6Hp0E6RJjDw3pHY7K9Ty0GBQCqE0CtvlPMqkNhAOj/z9o5E2WXOYIUdasdnn+e2HjZ7OTFEemcHPwW8DrXKDepVi6rwByUcZSCmlHrkFh3tlGDPl0x+h5D96FI8e94DiqqFBE1NBydjk525Oxub/4eTkYQjJIZxjTelSvxk5IIbLJMtnXb6LxFnxiI6Nj2hafMTBn9v+ZhRtZAgRFSDHyDp17GiFydDmRFCi/zJWMs5igTCNscX2VFAVTMnRUYqohUI1JJDLyR6yxYTnkkhFINrR+lC40xBQBDJJDLjxEl2DMoqB1rzOGWIiqowziOYueCwRVCAqQ0iKLhscRi6mzhgtr4kqk7Gi39El/BdD0kIvxJQk9RCJUcSrRiuU8iglFuShcqAtyhhCinTRo60lKU3vAzoGch8gKRpTY3IJFrWanMWsYjqpUA7uGwVBYWKiVpamqiU7T4kVr9FanOKMIXmxKZcOgZQ/UWVxj7NKbE4BmzN2cO+1CowVilzMxfbWkE1CVQbrRqQkrppK2kGkEIgIpW7RBhKJ8XQqZhl9z6S2NFOH85GVSpyEnhpHbRp6LcekjSVGTwwRhRI79CjWvqZ0sVQc7n6hNWpjMNZCzPR+zQQJc1cpQ45o5chaQtmxY+qQ2TItWmfW2YBtsCqRc0vbZ1brQPBpM+nWzrA9HTMd1VRuj4Njzcnpgq7vUEpJUWcGQS3nbvBChFEwqypGdcX+3h6jUU2MAWMd2YrLKuW7dX3LqIb9izuo0NNozbh2m4Dutu+BinY2oTrN2M6yUzeMLVSNxY7GEoreZtCZxlYQNOs2spovWSmY15aD42OaUUPT1EXI3BCzF1pALgVbFGdUNbiYaY0xIrLOpdPptCJmDSliVSSFSNAKo6X50HY9tq7Yqh1WK7QP0HkaO2F79wLUDapuYL4gtGusVaLnVAq/XuF7TwwJHyH5hNOWlDzWZK7sj/BWzEWUb8hmzWg8otKgk8JHS581EOj7JamPjF3Fxb0ttkeW6fKEk5hQGAlNR+GSUCf6qDC5xmmH1RmdImNX8/wnP8O1z19DN1CpiuAj6zBm+sRTfOILmt571of3aSZbLFcdb737Aft7W1y9chVDJXlEgLIZ5QLkHpMTlR1zce8yTx8ecXJwiBtN2KktTnlCe8p6aYoedUSMsijsY4/TsngabU1ZHJ1y9OABO/VVrDE0kwmXLjzJkzbx7lvf5DvfPuCZq1d56sXP8M4Ht9BqxN/51b/Lk/tPUCuHjhDVkJ1zbhF8bgGeh98xdDEzZPNTgylnTcnzRd8j6ifOnvoftXAeuqbnXjp0ttVAygeNpnENKSRcVXPh0kWOjw54//UPODx6jzdeeZvf+fJnmdV7tNXTPFgecmn7EncPb/IPfvtv0S0O+fzPf5oHd97h61//fU4O3iO7MavnnuO5q88THmRuHtxn++kLjKZXqG2kyxlnapxKbO9t8dzsIjfv3aVXHc+9sI26e4nDm0ekJfgw51by7OxsUW8ZXrtznXdv3+ayrphpy6++9DS7z36C/+UPv81i3dKUqJjc1NwJiRh70rrnz775fV558wcs7njqasnPfeECfnXMwft3uHN9wY37pyR/m1m6zPT5Mc994SrN+EmWD9/lz//4K8wfZPzBguo0c/LME0wuJdz8hNGzOyQdyG6Jyk7MnXwF3Q5q12JUQjlplmYtqMzZpf3oQv+nGh/nV0BZclqffmEHDzQWGp1BdTIOsiYlRM9U1ghixZ2IMWALeiK7Oo+gZYg9p0cPWcwXjMcTnKvZ2W8eoa7lgsjIanGgaZW/Jgit5/qNmxycnoLVtF0oaFQGY4la1kKJzBCirUJiufbMw4L1smN9ckjXHeK7U0Kb0EHkJePxiNmlZ3ni5c+g6i3qN68TXvkO3XFLjCu88mSXaHbG6ApQSxSGgEFTo+kw9HJetAany7PZkYJou/sgzV9hj8gqNoa4QYqGokFCo8XdDy2Ik1YJazTFBJGMAWUxxhXnPUu2kWitaO6NQehBQyEizKX86Bq/XH9pbG0W+qUYFOqpByKmHmPqBtOMyX0PYU5a9ygVUE0l7AQPxlnqpiYUWuRQIBlt8ENkTcxkWxBDcunj5/K/ZLR1IZBQ6GL2Eb1H50jWSebMKOvERCrW8OZskX8eiRrG4qYoHGQKiiHYmY2VvIxDXWzuz4odhWROSUf/7MfCpjp3RtNA0RxQK9iYayilydqIyilJUH0WoZj8nMU3QCmwBRkcCm5FKW4evV3l/OZzFMbSSiOdQ+HkQssxZFGHYaUYT2SSEtmJSgN9MWFyQse8KQCH75TLvs5HJikNpFTOSzorlvPZMUkzQW/ucWkeRIbWRPEFA51JOZBUTzb2zPUzZ4bcvZ+0/Y0o2lRBN5KMMJJV1Nmg+8jJcknbL+nIjJxjxxhslqp4XDWYssDLSjEQRbUxsmg1sbRyouR9Uqw9kc5KiomkM85WhCz6JJQMWB8zMWoGa9icMiF3JGqSMgRl6OXjiEqEpZKDBjF0tFmxNDWtqkjR4JTCGbG+bQiolOhQJCRYssuaLpVCUzwqZaIgY5RDK3DWgslEFVm2LSoGQmuLSUPGI3o9rWuyNqgcNwYv1hiaekywlejWkuL46Ig6Ca0htGscFSZB7j3JWJJVhFx0dSkXx0ULJuFcxhotXHUFXivsqMbUJZgiBHRG3O/KAyoXpGXgpOdyw0YSvV8TYsbmRKUVI2sYNRXaOHwCly2rmFhmjUuZKgQao+hTJOWAHWvcuKbJlkpXJCdjqesDaCfBtSmVws0QTbEBTpGcBdnT2opraYmT6HqPQXjp0XuSlwdGypqoLTEqYnLoGGhSIJDJ9Zit3Yvsbo04mjr60NMvEn1Iol1UovGpVWZsNaPGsg4N8/WK1Itjpj0XBCpoJUSdxJUqZxpr2RlV1M7ilBRmrnE0swk+Jo5Oj1mvWmzTcLJesew8e7MtpuMJKkWUsxJarxTEIHq1dY9yARMslydTru5u8XalyZVFd4qqGI1YZ9HUmEacH5WVoOZlu+ZocUjK0lCYjEYyfjMEpLFgDFRFvC2IrDhiPdJaKt8xpkSuLOtOOrvaWIyzWGB7e4dRZdEp4Ns1yRrRySnJ3dnamrEz26Y2ihw7VIrUlWO+WqBiKJ1GTR+FEqjpqSrFbJxQkzEHJwtiduSmYr5eoZNmb29Cz4iTHoz1+GIXnUJHCmtUUIz7FZWPGO1QNhNjhYk9Oot+x6oxxlTszkZc2pkynu3w0rVP4nb3OO6O6XxLFyFWY1JIbO1d5PKVJ7m9nhdNaebB4UPu3X/IweGKve2nCX1PNOBzx0j3GNVJLl90mDzi2YtX6ewxy76nayvm3ZLT9Zw+BapqTBxJRIqpDRpwxkCO2LpiZzZFLSMHx3eYbO/TTGtShJ3L+zx/9QLHRzd4/f598sMFW03LP/jNX2V26WmmlUXjiTHhVVNUuAq1KZrOwyWly3vuv5+FRXm+6BsKrE2TrgTBblbF594h2yDIz4/8eliYkyErJYsAShdaG1aLjh/84APu3LvPa+8dcHE25e//h79FEzy4zIX9bS5NYWECunmS6TNXuJL3+NO/+H3ee/NN3nz1r6h04O6NA/7ZwR+QtWYVZnz6l36OLz77c8S3rpPDTVI4pfWWyXiXO+tT6sbQpVPy7ojP/8o17n3lPrqq5BnBET5o5u0If1Azri/ShQVPvvAkv/Wlz2Juvcu3f/Am3fyEmkS9iOxMp5yuPWZrCrHheB44Pr3Jrsl85sVf5jf/s99g7a5zsZnRxe9gRzBfar75J3ew946pn1DMtyeMvrzFE0/VfPGLn2ZxV3NRX+bmzfu8+853+fz+c9x784j9/ASjjz0B6g7ZQKbFmAugLhD1hEzDhSee4MJT16inE7wGNyzIz28/Q/GWs0R+bNxGM6iYUZVEtoQuoXOLciskA3RWzM4iEEgyVfDOO29y88Z1fvGXv8R0OivjZlhSykBR2XNw7w6n8xUfv/ZJ6nr8GNWpILdKuvdROmuQ1Eb/OT9dcOPGLagsQWVQRihwWmz/u+DpciL1iRR62vWak4dzTk9POWkPsNFR+4xhSV17Lky2ePGJF3nu+efQF/d4z2tOp9u4/X2mXcfWzW3U4oDe1wQdiVqRTYXVIxo0Go9RjiEAOmMgGSm6tMNYh9YjceVWEqfTrlv6TlgZIUqjtDDGNtQ7Y9LG6Vkpy5mZQUATi5eBk4Z2hhjTJpA4akW2Ik3ISZwz1WCXrhMDQrqxlR/QuiTOz6lIOUIKYiKXIsZV2GYC1slaQSUcBrSlaUYoK9msxlhs7ahHI2yM5ftJ0WK0IYdC/1XSYIw5neWFKcnmAnFs9CGQspJ1RylKMoicRWspcEwpUIbcOhIZs0GEhhDyoV2ltCFrQ0bWVrmcDk3RdZ0vv1QxOClMLkEsB1O7gljCWSPtXHNsKGxUkcMUqzWUGrLnBAFWZe2oTGmjqUHDXiKyisOmUbK+ftyAQ4q6Qi0VepgURefQL72ZqtPGxZViQkMWiVRWSkLFdRSmXZDsYJ2FpXeG3OXNnb1ZkxTGFqqgaqXAU1pvciLLC+X8ZtHe56JzlAZBRmPRSRDymKQBHuAMBUa+50+y+4e/IUVbRhUTELkgdV3x5HSXF7ZnsN+wyJ6T5RIdei5YCzGISchyQUiJkDLRFHTMimZGGRmAlbZUytBYR4XbOCQqBcaWKhxLFzUxKWorjkxOrVHRY1LGqYSLGWsgEKUjEhN9KbpC1mKwoRSmTCLSTLPErOmRRRsZnJLliwViavGhI/fSITBaIWleSXR6WvQn2kr2mbOGkHp8L46WQubKqBQkJFBrUgikZRDL4SiW6USI/ZJxk2iqClvVaGdIQVxwrFbEJJ3AhHDItbGgFc5kksS5E3tPv16jrBMXSS03U6UVnZduDoVLrbQRHaG1Iu4twn4pKofsCrGHrypD0kbMQ3KSsNWcivPSGp81QRmsrdBGKJONGqFDT9e3hI1jlIKIaCFNlkBnY9BGk2Ik9B6tDa5y0GlCjFgy0Vl6E8g6oqwuwaCBEDtC6mjXmXY1Ymx0MSbwqKraCL9zjlTWiLlDjCQvdLTGWsaNYlRnVI40rmHa1OxMKvZmY+rGkqyRz09wcP8hMUEfIjZTct5kAs95cLhSOGsYjUeM64q+PHS2d/cIIXJ4dCwh2FoQ4raL9O0c3/bkvV12xw1tHwi9l4JQGckqrDqs6zAdOKOZjCqc0QTvsSU01FpxOVXakA1i4GIlILuhQVtDCIG+7ThcH4ntsTIkJY6tgw5y2IaHuEzGZx3EMwqFLjQZhUmS85PILBYL1Lih1orK1bimIURYrJcEE2j8FikFjIGu6zApMpvuknJifXIkWndti0ZyxHRkccbj6oZqPMI/PCLETO1GrNYr0ipwaTZCmZqkND4lorZMZjuYfkm2FVlVZGVJyjDkoF3e3+H06DZdt4L4EL1tCLHF95a2bWlmY8bTLdZdJFKx7iMhIw/sJJ3nyWTMaDTGaBFOK+tYLed8eOM2Tu0IlSbIfWtdhYq9uMV6oYTXVcVkbweWK/zhIbauaFAk71l3cxZYaeaEjpQ863ZFSIqkDa5u2HaWB4sDrt++y72DIzwaXTc88dQzbG8bXn3rOm+++gpj65hVM7yd8bHnX+BLP/8pXCOh8Clk7CNPmvMF2/CzHnqo5RXnkJYfs+kyXz7iYgUIN2JoiZ7XZjy6V/VYwXZW6J0hJMMR6zIPni7mvP32W9y4fZ/9J1/mP/r7v8K+bgmrnu9+/3X+j6/+b/zSl7/Ig/UBH//0LxK0pVsv2Z5cQvmbvPDCp3jp2h5//AevcvvgA1apZR0zq/e/x2LPcdodsZdWoHrGu5e5Or6K1ffYb9Y8PFxya+H4n/7F7/HU3oSnPvMk33/jNm6dmATPjnYcPjjGZMPWJLEz67hz7x2Wc8Ub9+7RpQW93sMbR7/qGO9oTEz0/WVWrWerMryw9RTXnt3iaHmbnZee46133+LqZy6TOOVju4oPX5+RK8cLF17gxgfXUfe/g732cZ75xGfRTz6F2X6e5uG7VHe/Tqp71uMR06d/AWUc2BW+/5B+1TKpr6LaiG6APKZ9sEKPIXvQlX38Uv21tsfNDzJw++4R33njba7Gjl3Tc+mZPTqTqdwO25efwdVqeDOkzHQ85crVp6iq5tyIODcyRKPB5Uv77O3tc3J8jKs9zbjBbFwMzt6TynjTSslzMcnn3L9zh1s3ruNTh8+J0DtS1PjYs+7XzOdzjo4OOT05YrU8pVstCG3P7qzh6XrOkxf2+djV53j2uT2e/thldmYXmVR7xGw4MIqHr1/nWPV4A3Y2ZbK/iz+c0YUFWQf6nFn3npgUGANZ4lSkfqxQqhK0INZQaerqgBju0Laeddvx8OghxycngrDlhC6xTQPYNVyDoYhCSU4eOYrOioHSxxlKVeDIgamWcj5zlFRnBQKpOFFu7uNMCp7Or+Tf84B8QR7YNMZgrZb1jDVY6yQw22WaUUOVE3Vj8Kmn9R7U2TMsM1DhSoMxn+mX8rkF/4aSp89JRLQiKlVC0sFqU9Cf0rbK8p1ykXUMrunDXJZhgwCpJIVfynGDvgKiHT8nWTmPIsnhlZlz+CwtaJvUPNJIV5vPPDM9OXeCBcFTWmJTEbmQgAwiNRokPxtq6zlDQIUAHUL+LCyk4frlAakqDKPiCpViKpVoQflS2nzlTfPOyPythtutNETEdKTgdWWtkQtzaXhxLgXcME7OELhhSJXxM5zbzePrnE1/wY5yqf5kvVjizBSb7MSszuYkrTU5Parl+3Hb34iiDSAbTS6ZIipFCIEta9me7RBsZj4akX3HpelYwmO7nuP5iWRHkelV5mS1YunFPjy3HrwnZiX2wtbRaYOraglArpx8prYk6wg46cTEYqaBpjKW4HuUD/i+I2uFrcdYrTG6loBLDINFap8SNksQt6CHmpiQfC/EnEEQK+n6mByocsQnxTpAjoEce7IqtiSFT5yKZo6UUFnhTENdGVxjQYm+TWlFVVmaugaga3s8HXGggylPt0ywWqCNphmPSDlQRSdc9KxIPtL7QFKqODzJoDUoKby0UBuj0tKtUKJRS8U+/3wwtDHl9YXOIXtLkKNYlXsvi8McyF4RteTyTEcNW6MpKkVy8LjKYSL4Xt6DUizajuh7asCMDFZDbHvpqg5FY5nohxBHKIiukWLSlRwUFQ0xJ+GO60RSCYzCGYPbmmBpmNaGUV3hrCKHwHq1pD8NZKNJStH2ir5fy6RXHhYqJWqjGVlFYwyqgsm4YWs8YntrxO7uFtpAUIqtnJmuWuYnK2KQs5VyQSeU3jwERJcpa4mB/2+SaPIePHjI/YMDTk7nGOOYzWa4tqNuKqpqymrdcnS0oNaWibMEFamMIyvog5dcnVxyRJJHI0Lcvm0hGlz2YCiTcllUb8aJ6BhH1kpofcrE4q4YvFj1q439XqFkFMqt2iSmA6ii40MaAlpTF2RYa023XLJcLnl4cEBlLWPnuLA7Y2c2YzyeokzmaNWzXi9ZrRYoAqOmxpTcvtFoxDR6lp3HR8Pe7iVefOFjPP3kRRYnBzw8vEPXRwyamKFrO1SIqBw4PD5hZ2vEdDYRR048o+k266Oe63cOuB17Hp4sebjObLsdti5u0Yy3uHWzYzGfo01grS0jo5jPT7l95w6X6qdkYs+KW7fv/D/UvdmTJdl93/c5W2bepfbee2a6Z98Hgx0YAAS4iAQZkh2yHQw/+MGhF8nhJ/89fnHIsqygwpLIMAWSVhAiCRAgSGKfGcxg1t67umu/S2aezQ+/k7dqAJi2zBfoRkxMd1fVrXvznjzn9/t9N+7eP+GRRx+hsokcenSKaGRiqbUmBaH55gyT6ZTxeAQ5c3RyQlgGtiY1I22lictD6K4gdMFYmo01aBoWB0fMT47JAaqsqDc1SVmyCnSh5c69XTbW7zNpxlQjx/rGBnuzlv3jGa4Z47NiuejYmW7wyPYWhycnvHtnl/u3DljmMV947TU+/uJzoou1oK0qTVixf/9IlyRTfDVM3gHhUyd+rk74uUNjGKz/7PMha02d/ZpMQc9+x0eeKJ/58y/sFoQOpZVlY2ON3/rql1n2iYUxvP/eLpNr61g75tJjj/Lf/5N/ypVz17l5/0OuPfkMI+s5/OAOcafl+Vd3+Ms/f5u33uqIseKlZ77E7Vu3aXvFV57/Iq99+gssrnxAuLjND+/sYtef4Fe/8psc3Ps2//GP/jV1NSbPFcuTY77w2mtcfeVT3N//39m78VOuTjd57bOf5UY44gff/SuWizl//Y2/5d3xNtsXL7K38HTR0UWPMx2zOMOdv4i5sMmD1+/y7Kvn+NIXn+NLL38VZzV3P3yPv/5X/4p37u3z6pdf4/nHrrCVDviHv/lbzNUu88Utzu8EbnzvHcya5/JLz2B2tmF0n52t68SNEeP0kLDWgTkmdR0+nYM0ohmv4R88ZHnrp6y9/Bway+GdQx6c3OLRV2A8Lp/U36NxG/b+dKaozsCdG4fcvLHPU0+fx0WFMxOCDqQcy3cU2p2WoOXLVx/jSgFlB3bIqiJccaUia2tjsq6Ie8c0o0aonj/3+tWKDlb4Wygy3XLJ/v4Dom9JoeX46JDZPLJceObzOYeHeyxnRxBbnIo8ujnh8ace4drVCzz26CWuXL7AheklLmxsMZoGzDThe017ojiZ9/RVw8iMUF4cqpW1qGaEHo9Q8watkuiUqzFWF3YRkIgSX6RbNB3aJrJekuMRvdrFq0OW8Zg+tUSB0zDWiHttOQPOjmZWt9iAsA9NzZkmWAKnJbDa2UrQxpyKA2MpnLPsbX0KKLTUTpw6EVaVQw2NERltBg25Bm2xrmI8qlBK4aMYi1lncVWNNpa0HNG1LSEEfCxxSEqaHD8EnRfmztBk/D9a0q+atVOLe2elvstJmo9QNG0pJ7B6WIGcDp2kiVJl6J1XzcvQrqSC5JYRlWyrBT0708gMy1UNyy+XcZfod4fgcxmWCmJkz7yS4alPyTGpIHnyj0P0UNntxZBjdRkGumHJkSt0RZ2Hoe3QUOYy2RhMaQYkariu0qwNBh9nZmyyXldIYaHlq9Lklu/N5aRZUWmHJqrcrMMwQOXTwUDMSRrys4t4+P5yEA3XSADV4twpuo7V7xE9mzrlfZTGVOqe/HOD7V/0+OVp2sq7VUqJa2K3pJufcPQgEE0GrdBGcdgeUzkr0/1JLZP+SqD6RdcSydSjEZpE6juWswWh9aQgjj+LZYcHQuhYBM+s7QhMUG6E0gHnhJIWfSZ0kcbVjOqG2EfIntQtQGm0s6AqsrZkbQW2T5L7ZZURUwgViRq8UmLPnRM5S9CjyglUxJiMs2ACqJLrZq0RGDeLrb5SQ0eusNmgIhhTYauKTISscbXFKDGrsErjGgOjsVjwpog2hphFx4cCnMC1KULvA11IaG3ZPz4hffABO9vbrG9vUE1quaFjmfQPPOYsn4nRBuMsVa5wzklAYZlCKK1k8qGL7i0FQlSQBfGbjhtSjvgU6DJ0IdIul+B7dI4YJTrGrC3OaoxxVM2IxTJJQ54CSonrYNd2hOCJOqFixBNJpmw2KcoNb4rdue+F6mc0YTAJ0UKRD1niGLS1jKsx601FbXIJOq4YTUZsqSmz5ZxF17HsA963LJcLtGrou45+uST3PXGxwESDTR4fAiZHKqNwVuGcQelE7wNQcgVdJREJilKoS3PptcL3HnTEauH3h+jxXhcUFI6PZyyWPTFq2r6jDfssug4fA+c3NtG54vi4ZWQWjM5vM2pG6MrQ9S2zZSd0Ww2VyWgC5FjKkjLRKn+OIUD0eB8LjbwIiFMRyGt5H845MW3oOtq2pW5q6qpeTb5P6ZF6NQlURfdW/HPISLOldNEeKsXx8bEc2D6x9B33FvdZHJ1w6cpVTD3C+47FgayFyspmHFPm8OiIPgbqusZnRfaKEDMhJkaTDdbXp8zaOXfv3iJnoRPPly06JYzOPNjfJ4aO6foauqrF4dIsObx/h5N7N4ntApQj2AlmDvm9m/Rhgc4dxjlODg+41ym21xq8D5z4xO7Jj/B2zOjcFj+5+Q4nMzi/uY4daWyOhG7JydEBMfQErSEmQbCNw1UVWkOMgb2DIz587xZ+vsHTj26vchn7vsMWd9Zl25Kck+m2sUwbRb/oie2Svd271OsTVGMxTpWzRQYX1iha37OxscbmxjoxRnyIGOtIfsGl6YRRGBPXtsh2QuXWuLhziboaoYxlUAAN1s1ltz/z/1Mdw2n01s9Wun/XIZbOfP0XVfhDUXPaFP6iFu+jVckv7hQkby4yGlfUdcW6Uixj4MbrD0iX14jKcunCYzxy5RnoNdtbF4GOh2/9kLe+/XXCpCalns988SUe7h3w9Msv8xtf/l3efXOXOzd2+Qf/xW8wuTJh/+1vcvxkw+zmPrW7wmOfexb309t8Mv02H/7hX5EOd3ns2nk2z1/k4c07nN+acM48Tn/nPk88f51XX7zA1esNW1tX+P1/+Sd092a89/77zK1lmRQpLTlZ9vRW8e69Ix7OjvnSy6/wpc9f5uToBj/+xrdQ4zHtrdscvPEBz3ziBZ578mkubU7Yu7XPaG2NWLeEtfNcv/4xHv74HXY/eJ/mfM3WdoUeRbKuuHjxAqptWHb3Wbz3LuNzG3SuhnyIa0bcv3GX5e4R0xevkqKhtoHxSDG2SvR0AAAgAElEQVToyf6+QNvPWoID5AB973j06rO8/MpF2rt3aUZT1jZGqAZQhhgSxlLQICMaFC8GUmUl/Nxrmx0e8GD3Aeub57hw8ZK4G56ZRw247aqOPx3f03vPzTs3+ea3/pLX3/gR7XLO4cE+82WHDxqyhZQY1ZoLFy9y7eolXn7+WV564XG2tmqiDsyXcNQ3zPaOSXt7BHMMQZNbx/LEs6cMx/sneLuNiiIbidqQbIWqGlLfEX3i/q37vP7GG6wZSzNap9djumjpwowQ5uiQ6Y526Wf3eP+td3n37Xu8fes2B0cz2q4HRZEcyHtbFavlc0hZinTJpBUDDlVMfxIy8NYI+pQLwjIU2QWfkVyroo8POaNCIAcv7J8MysgQUdgWuqBoRnTiQEwC3oh23KBCAldQNiNNhzYiW0kxkMqQNA4aKCTySFCp0pDJJJUY42nxnYuj4c8U47IeE84YoqJEQQmtzmAwzqFMkZWIqLHI9zKZuELKcpbhXEZqxKEJOwNocbr3/mLuglKDMYb8vDYDauYFATxz7dVHfo5VQzU0YWSRt5AgE9FGY1KWiK4ze6tSYpKnknzWRolWjHx6X+QzUJm4apfMPdSKejk4ag/rA0VRAUaEzpiH0Uqx9teQI8VZp8RpDQYpcgDEM3vFgI5SPt+hQRdDPUMJHCzIoSrUzAIMKFnR2oirfFIImjnkLQ7Pv2rk+UhT/3c9fimatpwzvvPkmAUBMTVrG+usba2RRokYPbaq0LWlJXDcLdEZsg+okHEhULsEPlIZwzhbjIFoQVWRrN3Q97OxkVBVhR2PWHjPwckxMUhD4EPLvJvR+47OB+aLjlhNUXpMRlPZiomTIESsISkNxonpSExU2hbhZkDnSJM9iYjRii4pcYxEk7AMwcJJFS55oQgMhiMyHSriU7LY02jQWZODRCHEKJxprRUhJapK8sY0YmaRtab3UfjlSgSmrq5LQaYFuUuJ3ids1dC2nuPZHII0OaayVOMGq1WhL8gmnJLY6cYo2WFphbqo0wGkYrh9xBCkLEpdwkB9FLt7yWMzWNcwMRaVI6lvUTlQVxKw3JVAzq5fcjyfo7WmjrHokyqclfy1qgqYLE2jNJOavoR7KvHwJgQRC6uyOecsXHPnrNzEOYn1fASypWkqxkbROAs5ESPUtaZxCh8gOMlGs0YyZNrFHL9csjmZos5f4GTh6ZYRUmRUaabjmulEkFJttNAbraZykn+mBjSNhCn0tth3hC5TacV0OmZjOmZ9raFyDh8ztm6Yrm9x8XIkpMTe4QEP9/fog+fh3iEmW85vbhB6z9Fsydq0o6od7XxJ2y3pQiRrgzEZo0UsLNcsY62l1qIjDSrT+UQIXpxIlYKi20hlYoyW4PoQhKJrXUVDxlnRSoEShE2dGpMOB59SBqP1auo0OHEZbaido6oq9vf3Zc2FRG0s05Hj8sXzXLp8iTZEdo+OePhwHxRUTqFjj06apm5oxg3W1nRtRxcU1vbcvnOHk5Nj1tcb5vNjdncfEFOmmazLdDcEvIq0MXGoMssgwcDKNSRl2bt7j6P7e5gUGY/X6JnTzTyj9XWMg52tdbSJzO7vMTvZY/9QcW6rpq5a4m7kxp1drjz5CNPz64z1lDg/JusK3y7Y373L/sNdrNElF8jLwMiK1kORiTHQR8fDwxlv9gfYvChGTZJRpK0lhUi2VqjXKYlDWvZ0Jwuy0iy6FlVrlLaQPJuThutXzks/ZBIu11RVxbgyqByorAyJ1KKnPznEaPjcpz7Ftec/w9Ey8ewLz2JrcSPKKRZ6jy5+EqcT9dXeBmf+dfjDYGE2PH5xQ5YHXdrPQho/94Rnh8xn56VnypFf8BT57M9nKZoUGWPFMXSc4Euf/TiVEf1CMkpsxGXCBapi/dLjXH/+K+w+eMDrP/kLHnl6g1/9nV9hbfNpdi5eZ/PaK7xwlAgcgp0xeuo8tx4esL69xuOXXkZPHNvPvcS5Z1/lezeW/OTWn/LSqy+z8cg1Znfv4OyM6XjK0eaS3/uDf8unjl7lu997nRP/I5566Ul+5b/5JH/wtT/mGz/6IW2M1PUmMVbEZNlx53n64gavPf8iD15/k6O9E0L1gJ1LO8x2j6g3tnjmhRfYHAPtPnlk+aDf59pzTzG/dYGb9zOPP/44G9Nz+GXDYmExcY9jfciFS4+hfEc91swOR3zw49uML56webUmx8jO408T1w26eoT7b/+QRVjwidc+TT1NZGQg9/dp3QZ3unv37hFC4JGrj6CMDFsrJ0hKygndTFHOQZqLG6CRPS1Dcag3OOdIMa7OLwb6nkwFWczn7B3sMW97Ns5fOh1WlJU0oHxpGA7EjF907D3Y47333+Pf/cG/5dvf+RaL+RGpW1JpxbrNaGtQaoyPiqqxVPU6ewvNX/zwDt984w5ZL+jTjLavyEsD7Rxl9kh6D+UDTe9QIbFwjv7cNeyTn4TDDfTSCwqlLdY2NLnnZP+Y/+Nf/mu++Yd/xHpVQVWxtGNMfQFbi4mbiw0u9BAOuH/zNg93ZxzP9sj0OCuUw5ilWAXJLV3digPdDVYNVsgRk+UYSUkGrmKp74nZrO4/U+iQwt4pfzYaoxxOxPYYZQrDR5yr23YhjCYkX9VojYS7m1XDJlE2Cu0cSutVllyUBSSsLCXnWgxCYTyLhKya0zPUzNWQYKD5/ywCl2QvN2aIpcs0TU1lFChpIHM1obIVvbFivlKagJyEr2JMQZwGFk6h4KmCLp215Y8xFDfTU/3binUwUA3LstTGFHOWhFHCItPpo3vm8HMxZ3IUpFRrceJMMZQ2WxxNz2DS5QfFLEWXvV9TMN2P0GHL9xca5SCKzCkIgldkIhRkNa/wLU1SA6tr9U4LNbJoK1NezQpX76qADGeOoxUyuooNWOlipVEb1o4qDu/qTMNljMhIYvLSkBsZUGQ9rCl5mkxGpcFd/XSA/YuCy88+fimaNqM0I1cTcmTetQQSi9ARLHiVYVxhRg3KWUwUJKuxDpM1KmZyEJh6Mpa3czJf0HVLjMnSmMS8CupW1qK0E/c4FHXTMFKZtdoQVUMbDPM+cHDSczciToWjMSk7bGUZW3DG4q3kjiUtdMe82pQ1GinwGzyojCuui13SdCHTJ01OVcmRg5AE2jfmtFAZPkAzNG5KnC51PkW6YggoKxuerPthoRoGYqOrGtoQafsO4zQmm9XvEV4wWFdjq0TqJBcuo+j7QAxDVIJaTcOsMaQ0YNGR2Cf6viuGIIM7Vpk6CLmr5JEkcnHE7GIAo+l8Twg9qjNk61HGkVPAKgkwb4NQH0JOKGMleLu2GK1xbUeVEyc5cHx8zKIPYrefWrFcrgZjG2k8tBKUbaBMDK/VGou1ER1TmbJJY2mUFoqksWidS6OqCTHAMuKsoq4cs25BJtNMRmKnrRSL2Qlro4ax2WarTxhluXnvIeubEx575CKj2qEIxJyoa8cIcMUYJA/UgZyLhb9BNRVxqRmPKnY2ply5dIHaZtCG45M5vu+5uHOe9a1tqtGY+XLOT955iw9ufEA37zk4PMZpzbSpSGiWfcTOW6rKgKtkLaHx0dP3y0IpUCJ+73xpkBLKaYxzaFVjKk2/SCuhf4oR33WrrJQYIyEEjJXNbVhzw4G3yvk7O1Uq6CdaCQWlTLC0ltB1kzR1XbOYLxg1DbVxTEa1NK99y7KTWAAxyIFchgpkobVUVY02mtGoRvXSULTLlhA8x7PM+voYrQ1d13G4t08IiZE1rK+NGTnNYjFDVyNwlqQsKSsWfcLrGoympcJnRUyKtu1Za6b4qPF9x8JnKjviyHeEoyXjyjO2FtW2dPua64+ss+ha/NED5r3jzq0bPLh3h+R7cZvUGoxoMYRaasWcyFmWStNs7ND2DzDVmJAP5L21C6gSTjtiTnR9B9pS1yP6ec/adI3KOkKOHB0d0h0FusUMEztU34nNePJYJ/SirWlDrRPj2pC6xNq4IVSG7Y11vvjF13j5y1/Fe6hGBlUokRqJUxiCbM8QSE7/riQ3SNaCOoN4MfBXTv8Oq6/LfpvQyMSVPEyABRUbKHEMaHA55AfHutXEsxzaZwuGj2J7efV9p01nsfY2XnCXqMiupdOWQI9rMpkRISVGFx/jstvB3vlbvrhuefLFzzBaO4c2a6Bq+rTH3vFb3Ln1A55+8hyL+Zxv/smf89v/+KvsrE3IasJo7Ropzrh0/TINlruv3+HHu9/gzt4tHp7cJl84B2uBhzf2+eN//h/x0xo7Trz8D5/h0U8+x++u7+AXlm+8/mMWFWSbmU41qd9jdqyIGxNe+/I/Y2oV85vfJZ9knvvEJdy5TXZe/q9IkymZYy5ev0334bd5c/8DUtzhlSc/iUqeiZnze//+37Bxfsn56QOuPPk8afsCbbjP8Ru7fP/NNznwh3ysfQbnXyDWI7auP06zvk2rFX1zgWuv/Abb118iVwallsD0zIf+n/YYzsIYIw8fPkQpJU0bsufjxUU65iDufK1CNeoUjVBWdN2wMhzQxqym7bLeSrGZIs1kjWvXn2A0XpP9MMvYBD46fBiK++XJnD/94z/la//+a7z+1pvcO3jA0rcQPR9/8Xn+p//hn3F14jGLnhQ3WHSKWGmigaNeMwtjcmU48cdgEjpMmO/Cn/7xH/LBzb8mmBYTe8ZJ43JgpgPVlgafYLZELXtUUqIT11biD6Jm7/YDFnfuMQJqrZlMNvmn/+Pv8PmvfApXa0yqGCmDU4nv/dn3+Xe/9+fcvnePI3PIQdij7VqG9IRYClIfguxZxogTsVwIcZFWEEOg7718T/kEtJZ9P6RyxyktdFVjRGJSHCSdk4w2i0PHEt2kNShhLSWlsKWhyEqokmJGEvGhL+eVOUWOFIKsGEPMamWOMTA91Nm6bKBIln1jMD8ZHkOIeEqS2bPStCnRc4kxmsZai7UGZ4Wa2IWMsmJKknQs/4k7pGTgmhWtVCthhvlQmoBy3XJWp1lhSkEJY9el4ZUGRtAm0fYZcpbroHPGaUMOYpiXOR1ilI+WWOonMQmJEoyeEVMVJcmrZHnvFlavSa5xLlRzQRdTYfLkfLa5K/cLMtjNZT8nF4RM5aJn0+JCWths2jrxx1CCZFkMySQBO2LEpyQungpp1Ev98dGsOlmrgxmc0lIXpqG+HpBBTuv0VU2ZFSFG2hRJKRQQR+LHhn1JW4NVlvizRiZnmvy/6/FL0bRBsQA1FNOIzP7JMbfvQxgn9Mgxnk5pqgblE1Y7GltTNzVaCYyN0phanI4W9RH5WKEKHzeHiMoK6xzjiWhSDhYz+uBlc4gtYbGkj4FgFEZXWKvRzqBchW5GQCPZZ0omx0YbKUaKZWwW/h0+Cve61mbFxdb06KxQ0aJzhcbiy8zWk4rmLRCicJu1Mbhi2SrghTy3yooYxPHOJMmgE9RcybVzgijGnEDLdCYAGE2lCgqYEzkpEZqWbj+i6KNcC6vt6sYnD1Q2izJKNlqkmDdQikZHYxS+61aZZ0ZLhkxWEFNm2XYoUyz+tdxknY84A8o5lJNpn6uF1kqWWIMQe0L0oAzL5ZJ52+OaMUYpxjFJBhsShj11DSdtpgoOayxdioTBFnn4r+inclYoDMbIe/XFfVMV+1ux9ldojDgdsfJQEvg8RTFTMRrrHJlIiIEMdH0nN3sSeH6tcVw+t0XvPc10wrmtDaxV+H5BFzwmsaJLVs4SQ17xwHVO6BxxKlPpTGNg4jS1gao25IRY5BeqxaQZM5pOqUc1jz32KMt+ye0P75AUdN6jVaKyDVkpTFWhrCbHTMiKvkwXI4Jwee/p+8Cy9XSdJ1YZgzilqr4XR6YkHPacSiGUoQ8SROrLxJQeKufIaJm+lqp6KJqNknDzQaAuTaCYAVlrV5q5k5MTyb0LUSgvWtPUFaNRjdGKZbtgueww1jCejAGNSp7kNc7IPrFsW+iF6muVJcYe3yt8n0m5p1sckWKmspUMKbIvGXRjNiYN69MxXhkO5ktUFBMiXY2wo4xzItJXKWP1sJlL8xiL6D5rh8+BHlhzNTvbO/h2Tlgc8ODWO+zu9ezevVPs5PfwywWkIDEOpdfR2pRDTorFnDPG1Yw3thklw3TzHMbcJ6XI0eEB97pD1iebRDQGQ1aGxWKJcpa+76itYzyZYpIntTP6ZcubP/ohT16+xoWdHZqJJiSPUxU6DbTlDCqXgqzj+qUtHn/xGUxl0DViioe4LVqVAVOcF1kNkyhIoWh1JSbltKcb5rNwSlU8S0kbvhZBdSQcCnf6VZWR4BZfELsKlQLoFooGxmiHtGYlhiTrj7STpw0dZb+Qxu7scZqBqBVki8HQa3EUNjlg87zszy06O2zVcOW5F7j89DWU3gC9CWoBcY5RPf3xB7zz7e+i71zj9r1bXJhusz4+h9ITcqqIB4e8/9Mf8KPv/Yilyrz+4CbvvP8OI1MxV56L5xc889rT7JgLvPFnb/G9u7dx1Rr/59e+xwc3jvj89UfZ2hC6de4jtRph2si5tYqNdIQ5/AHn7TOovsLYjnfpuPbSlxnvPAX1NlmfMMsH6EnF1adfgfc/4DD31M+MwF8Fr3nxtRbFHpc2ZzTjdWIcY5snOffCM7x46QUePLjN1Fu2L34CXxuMmoIa0yvNxSdepL7SQppKMWwEZfv/j7NJIWat5fHHH5diF10KxiyFrm+5d+8G07Zn7fwlmgx2XaJ+TtfbRz/0nAY6vdiyayv0t7WNbda0tPuZUqTKWGH1Ls6urdG44fNf+Czr6+v83u//Prvf+SbtcaCuKi4//gyf/7XfZNTMEDrHltQCagk2Emno44g2JG4/uMW9+2+zuPcOx4d3sd0bbNR7xDqSQ4VaOkIX6SLkuA1hglr0qK5dhfpqY6lchaUi9R6lSx2mOh59aoeXP/Uc1556XKz546g4+UUeebLl0vWbzHKAmAmLDlIiFIpgysVVUWeJJOp7QX1ixDlHRJGKXIIsjdrQwGQluq1iZXFmpqIw1pCiQlvH2toIkxP9fAbJlz76VDc7ZMPaMjwUvVzGB4/pE9Y6slHEHGmcLRmpgQiEFDGIyQxIwR1CWOkkSQlKjvBABT3VKJ26PZ9dP1qLtk4rGWmlKPtkCIEUeiAT0Lj6DCVUDQV92TM1QjFUiqwipf9bNTVSd8aiuZPXODj3DjQ+q4x47Ja84VVLFqPIScrPqrzy4v3IY8gekwvDap8ubgIMtHVV6oSk5GeMsVKjJjFwMQMKqUTKkGKJCMjDkGywXkGaM/lQiyMkclaUv2elhNmmhPWkdRmyp4TRsl6q2srZXpr+s0yKYb2s3tbqi4r4Mw15SmmFDqdiRieomwx2NEpYb1a8B+LZX3IG0jv9Hf/fGjb4JWnahhu8jFiIvbwBox2m0mSt6JY9qsusmwaVPIvuiM5ZtHP0ORGNxo4azKimd4pcO7KPcuBayRibjEdsbG6IxfnBPseLOcZpmjQmLo85Xp4QSnB1CJEQMxIzYWXiohVJy8avNTgFVpfclGJ32vlISAqUwdKgVSArMRSodLkJoyaR8CnjY1plmaQst1aKWQwpYiqHRMJoTXaWRmuqSktz4zt0Uuiki9ZN4gBilteNzXQpkIYMFDI6lkaW001QmWITkgKZjKHGqhICmUT3k8oYW2uDsYocg9AjczqlFJZiUhkjNAJRwtL2AWOVIEzGEJXipO0EQbAK5T2JQO0CzmmclUYGDKZy1M0IM0roZY+rG0LfwWJJu1zSOcR5MpfPLARsTuKIhARwi+4qkYpRSh5uvgHO1hKxoNHFpKn8vTgBgjR82QBlgmW1Aaupo0bPIl1o8a0cWForXCUB2YTI9tqIk5MRVIbJqEJbhbMRlyzZGrzuqV0lfneluHXOUBuFzoGmdjRba0zHDRujCmJL1yacrWgqg8agYsJ3Lbay+ByoKsN4XFPVhkkzwjUVZKGy+hhZdD0uCSqmXU0zWcPNMkovQWu0q1BG3E9RikW7wLcdATChxlgjtE6r6fteqENKYiowCmUMPoQi2s7iiIls3ENDIwWR5LSJAxjYM8YBMSV671ksFiyXSzKIVq2qqF1F5WRAEKMvhz5igR88IBSFnIUinHMktT1Cu9KAI+MInRyUqEAOkILYKruqYm1jnXOba5xba3A60xtFthULn1HOMl7fZmNzh771BB9Zm0yJXjSOxmrG62vU45rKRc5tTHjn3YdEI+H1Fy5f4frl68z2H2CqJVYrdnfvUx8vWZ805K6l0uC9x+eM0ZVQtbSs0TxwLMhgHM1kk7F21OM1oeepSO89N27eoDK7XL32FBc2d9ifzzhuFyjt6GKkJmOrEUY19CnjdM2dW7f4q299ixeefZbHn7pKtV4hUQyJmBI+yGQ1OY0d1cTcEXVPdHL456wISui2xIAydmUiMCBfpw+FbFY9mZ6c+/KvujRvJXvodPTCaXKxJ3OEpkGzDqqS6kW1wBKVPWQLuSb7FuIB2fdk4zDTTTIOkkaZcUnSlP9kXzwtStTZAuXsuQV0VKJjUIoeGVxUUWFmmZM7P+X44E0uXDmP1VuoTU1urORRMib4GbZpmB0c8xd/8Q3u3Tvm8sUJVy+9yPmLm0zUFu1xj45HPPjuD/jb7/wVP/nBT8hqyWjLMc6aF6+9yjER4/a58eGH3OdD6nMj1L7j3fdn3Lzl6Q4W3PnOnzFbVFT1GDs/QHc92Vm++tX/mtrvYRf7/OA//An7i5qj5X268+fYakakSpHzMU13k3z0Abo5T8pjrl1+gauXZ/jxnCpu4buKT3zxi2h9wNGH3+LNN24wf7vlhZc/zuWLV7l64SpXnnsGvYTMBiaBsg2qM7hKQahAWTCOZBIhZ+r/9/rl73wMVKPpdFqGkJADGKuwSnNytODmjXfwH77P+Uef5/JjF7gwWQNbAbaAvOWUPCNOE6Qo0ncLai2uuSlrZicztLE04xHB9zhXn67cFToXEcdey4XzF9j51fM89uJzvPStb/I//4v/jRsffEBo1mlVTVYOKk3fG2oL7eKYvQfvcLx3xPtvHvLO2x9yf/dtdh++zuLwHuEEgndgew6PDul6i+lriDCrLaP2hLg8wahA7nt87AkkokZ0N13GOUdtobKa0dqEj33+Czz+wiv0ykk+qTLYIMhyqCf0TcNca0KSQW3sfSloNblIM1ISl+W2byElnJNhiQzsZGYj8hDEgTBKwzE0SxT0ayjQUSIH0cB4OoXQ081PKGaVDIYd1ihyEnq9NVLExxiJWuw3UoSkNdqCVqaYkThiQs4lweZlaF50XyGE0wUWxVxOD6HfP2tZv0JmTv9da03lHJUTAxSfgrRThd0jGi8jmcRFzx+R94FSYjqnBqaKyBJSaVZVQiQ3Q7xRGiz2YaBqQtlubSoxLIqQEW3g8F6Kzb4emuVhYnj23mKo21dVpOyTRlgMKYExUlsqU+opkzBJshCTkhpB3pZaNX3SrBc0S/6l7PoKtCl0xSGkvDCThn9XkHQqx4YZvExWGkJp5ChaMrl2cLbxOm2kzoZ+C7pqyjDm1JrfWlNowFKjKzXUkFpYaKkMIlEr1tCgA5QeYmBWnZ6J/9kYkWTK4o2n3bpBM3YN4+kUU1u89yhgfTrGOUfwga7v6PoWnyJdDHSH+/iciGQsStCqELE5M61rcuqpnCIGR+rn4OdoVZExJDPF1pYKCcdtQsDpiNMWpx3oLBMH68BkFBGbA1lJIKgyCp8gJKFH9dkCFVZ7nO7RKlInjYkKlUTDGIq9fUQMStAGpZ1Mfkukke86urYjR9GZTSrD1saY6WgkQbgpQQikYFhGwFqSM2ibibEnBI+KHkfxBCEXExT5fUmL9sxaTcqxoAKskCdjTJlgyJRfWYMKAZK4fZYPEB8lY6zvfZmg6EIFMGKoAqJyVZaoshSiRppu7QxO1zhdQ/L0QbjAIfb0vqcadWjj6H2i7YM4OTqHs5pq7OgznCw6KSqjNGwpid4vxCAQftn/B3pAKM1EhEIbkJtNZqVSKCohIcvmnTM+Cmc7Ro9OiRANPkicQywbXx8D+wd7jHRLnTqiD9hKYhlUThhNaTYcNhuCUsxDEtTSaJTJNJVlVDlqp5k2Nec211HRQw7kEAjtnGpS0bVz+g7WpttMRg3jusYajQ+SP6M1uMoIMmyUyCKtAgOT6RhjLRGJWYjGMlkk5ocnMhs2lqw1y76HZcapQLKZmJWYhYROnlspog/0XSdNm7VgtFjclgDSEBWmRHGkmOg7L8hkOrtJRpmyxVO3z0HXFlMq03HJr6uqCmssOUcW7Yy+E6QzKEPWtRRISrR1RmlikOeUIEuhtcWgAFfye3JZ57E4PRmZPBvN/PiIES3nNtcxlaGNoruYbk5o+46M5cKlKwQvQ4HxVsWotnTdksPZCVobnrh2levnJhzem9GNGiZrFU8+8RQTPWJ5ckg1UlSTMVQNKWeWJ8f4domxQks2WsuEl+KuOcxFh8NFiW21ND4F4UsSYiq224putsDTEjWMRmMCit4nfAZXPrfaNWysb/LII48yW7R8/wc/YuFnPP3KU6RKoa1EHnQJtLJ0EZYxMOtmHBw/5ODOe0wax87WBaJqADCFxqJtQdWKVk2K2GIOojLQk/KMxAydougIpIRhoFmXqd6ZJq4npV2UGgM9igZiD3FGzieEfo5KFm3WgQBhH9X3kBQ5HkG9Dm4i95UHVIV2DWRp6NVQQAxDrlJMnE79T13IUsxondF4chcJ+4pJs87N3QfceetvuLhzkZ1PPkHVnCfmjHFTVJYmsR5vcuGpT3LlGcsnP/05/vY/fJ2H33mTn7z5E7pzG3zmc19hNmt59NpzfOULc/7s6/+Gcyrz8qOvcOHiVf789rvMjxLd/j221jqqtRE5GeoEYd5x5/aC5vyY0eQKo9EDJnGBSid41THzLRubV5gfVLxzd5cf3rnHiVeM1vfp1b/gkSfOY9U66e59LtcVlx57kp1PfJ5sevziGGUCx3e+x2TjUfbPf5MAACAASURBVHRzHoJnXCeee+YlWH+OeqTAKuCYwBG2diSvOLg9p7ETjA+Mn7hA7jIEA40nKl+m6j+ra/xPqClWWp8z5g9SdZEIoHvWN8ZsbDbc2z2gXc7Z3z9k5/qFEvI75IcBZ4oqed4o5bxK3Ln1IfuHx2jX0IzGNOMR9XgN64aUOTUsFlSWoVTvO0I2GGUxRnHlsUv82uZX2U3wz/+X/5WTqFhmhVaO5Rx2b3acPLjL7Q+/RX/yLqrLzO879t74gP37HzBbPODIK9p5RUqW4GBpJwSfoQs47dE6A/dJcZvU75D6TFKJaCE5Q/SQjGY8XePXf+VzfPoTH2MyWuPZFz7ByK0RET2yQoHJwgDIoaDQDpXFGCOEQM6Z2lrR0SdhjdTGgqvLflaopsqANvi+I8ZS9J+l/RmLjwWFWd1zxciCQvOzjoxo7EklF27oL4rj31nbeSmq5ftzSvS+l89hVJAtbVEmneqYyt0+UOcGWYpkeqky2jlFi87S7c7S/8/+vNYKq4wU9QWJM8asfAZSKohuzJJpmMu1UeJ+KbO6YU2m1d8VapU9pocNK0vdlzNFK57EydwrVC3aP7k+5QLnWK5jLlqztDL8+Ln7K5/J1MxCWUxlAJtLnSmui6fzRbkewrTRq9iG0x3VFMqinNeiF9ND1tuqaTNlfxAapshv5NpaHRGtn1xDjbi455TIMeBTkMEtBmNGkNXK/G/4nM5+fkOTmHNxQi1ulLnUNraYqcU4IJvFlEYbpLIeRoDyvJIrn8lKfh49IIqn9Mz/LDRtGbnoIQqk6rQjd55+3uIajfIVOiZoHF2lcGsNJkTMPGPmkSpoRqYiJVkI07U1bFOz6FtC1+FQVFpjcuJo/wF96KhHNTn07B3tkVIDqsKngM+BZBy+1zhbU5mKyhjUYNmvDVHLOaQRBM0UbVsm42pHShofLctc4whkNC63kkFGolKRrEtkpVIYZVmELNTBMrHQpsDXrsZphyGXEEdPZaCpNI2xNNYxBDOeLJfM2iVeiZV80rKsTYzYlKn1aQ4dzpKMIyhFB/jelwDqTN+1tNrSe48PMjWLIC6UKRXoPZbhiPxuU+yRcxEeyyZTrN2VIuZAjIlAhLoSu/raYZxFVZbajqlMXRpNRU6emDS2bsioAqdHUlLE+ZwUIzYFIo5s3bDX42OgLlM6sWgdJkxlQlJe06B3kxBESgZI4TMrXZyGhEoZUzFpjZHge0hBrluSEHZlNMZV0Pesr61z5eoVlgc36Q8PGLkaazXjpkKNakZNxWg8pg+GPgbarKiqQFU7uZ5ZoPzKWWqr0TkyqgzTeoQ1MKor0LAMLfcf7uHbnksXz7N97gK6qvE5MTIVo6YRMxkjURBKCQXDe4/3fXGrtJIvqCV7a71T7N19SOu9qDHKPXVyfILBk60c3To4jAHnS/RDuX5Za6L3RJ/pg5dmyIoOYdn1tL2n6zpCCAyi3mHKlbJQMUYFiRo+N9EuWKFNGl0GD2Uii+yNzsjBv+xajvsTMGMyEZ9bbMmCE2qsDE9Ej1A0FlqVsy2yXCykwTQNlXVghYK5Ppkybmp637MMkcVihhlNyLbmZLEkZEftRhATse+JI0tOHoXQPUwOPPfEI7x54T0+vPsQomE6nfLstWe5dOkCXi048kt6dZvlfM7NGx8ynYxZW59ASvh0qi1NsVCGlBYaT5SvOefQ2ZbBUSlQFaytrdO4MZU2vP32T3nyY6/grGPhA30EY2r0EKqKxSjHaDzl4qOX2H/wgB/+6HWOwpzHnnmBkDNLH5j3ngaNR5gHWI2qLW+//1Me3P2QF597kede/CxDBk4KSYYFJfxjGM0VKANUJOU5IR1gTIs2PZQikaylfCjNk0zAh2LIk9N9Yq4xqgVVmjM9J83ucXz/LmGZ2JicpxpVpHyMXyyZHy9Y9Irp5gXWti9jxttADfWYrBOZpkzWByOMYWQ7nFhqtdcY3ZHDApUjXp+wN79Fnk24fv6TqGqTq+v/mPt33mPr8hXGm+ugj3FuQb98j9nRPiN1nYM7sP/2AQe7u8zeeY/v//hvUPVFPvboDpeeahhfXGPDfoFHq8TTr17m5M7X+cSnn+Nq/XEO7CVu3fs+3dE+4z3HWJ+jqTS/+6VrfOP73+HWXseDh4rRJ/8B/eyY/f03OQyWESMqNSMcvsMj11/i+z++QTdqyfVdOKjZDus8/PrfcvRtQ0XFvfsHbF+a8vhTb7D5N9/k47/yFPV2zXt/fZNbP/5LXvrMb3H1qS/x4AdvsP7EhGq0hbORcNIR2yPCOKAaQ6gnxHzI+iWNnR/h/S6Hex9S+5q6rVBbDfMwZbR26fSSr4rwj/rfDZbdAwp2an9eimT4CP1JCuvMdM2w9DDdWOOpZ55g6+IJurnE7d0H+NDS1I3EQQ1IzxntiUJxfHzC7OSI2eyI/+tP/oj3b9zhs1/8df7RP/odOVuHc7A4+6rhRRU9j7EakwxERYximFZPR3zx13+DN9+9BbMWtKLCM2oy556omZ3f4urFx7j62Asos8ny/pQ/+YO/4GtfW9LtnXDcPaAzkW4R8XjaELFZTKQyGRUjxmdS29HGE1JQ6BSJRpGdQVWOnfVz/Je//Rv8k//uv+WRq5dRSWHUlOgpDdKQTQYYDfRolhhzDP1CbBmTUE/HTVPMKSLGWsajMW3bsuyWKF00osZQ1w3kRPQ9zloq57DOktRgyHBqmJHLxCRrhB2UxNBMpSRu24Var3SJK1hRFTMxZGFl52G/L2BB7wnZUOeMKkYq6AGDkkZHKdHApcEZcmjOhkZHFduLFapTin4GLW0hyA4oSyquj0MYdEoE70k+i/FVzBgcuDHRVCQjeahKW9QQnJ3zGcYBqzNh+L1DIybNWyoo3OmwTP4sr4WcS/6qmK0MyLAabkB19iY8fYhsZ3VzSnSAooRpn5FDlLNeKTFtM0aGuKaY6AlKJtfIng30HAZkWczRlBLEKqYy5E0S0i7mMdIiGSORRENGr0oKZQdzliQonRanUHIl8FBpzobma/j85GVnWYNZrWIfkFeDNXb1PWqgwZ5dA2WAkFHiRVGuqSrfn9UQ26BWTaIsk59tkT/6+KVo2gD5gIxGhURTnOmOYo8/idSduAjGrqKPAbM/x2kRtXazBbWGsTU4nXEacg+WlpHviTFjbS1uikoTlMXEEWo8wqmMVUfk3lMpQch8BFM7wmTMsuvJWqGdaG2MFnvWrAbI15zCzkmmMVYr0TkojQ5OdGjeg6pQJgvYRMTqQBV7oQLETKuTIBkxYpTFAsmI8ccIR2ONCC1zZL2pGVUV1hmquqauLNPxhMnJISGK+YPvPZXRqCTdvvdSsKeQCDHh20gbOo7alrkPdEEW+GR9A+MsVFYs8FOgNo7K6AJdy43sc5YmLEGIihwVOhmscsV1qLgZWSREG4NTg3UqxV7WYusaW1k0iaRatBMH0ZyF9hlTRCvZTOvGSWxBUuQQ8L4nachKbNCVtsxVhLBEAc5UpKKNEJfL4aDXYC05Cr89Zy05bfSQNVqNhP6VM63vqZ3oE7v5nG4+w40rVFURosbEgCPhdELVE0a6orGWpc7kpMkBIoHkI8qJKYlzDm0VoW2xNjEaTalHc1CyPoxVVE5hiYzqikljaSyCCMSEwlLZhmo0pesXTCYTRptrLL2HCBaFrTSVNdgsGtE+9hA8wWRyHBP6lhR7lDGg5Xo3WbRzXkGXhChW6UAzdsRsCbHH6cF1MxJCZBbbFeS/0kIip4PVErDpjKX3gXnX0Xkvk1frhEpZROHKaBpnWasaeu8ZjWtCTrRdJ/tCkuwyU9ynpvWUcVXjRrA1qQhty0nneX//mKRrrNLoFMhOrpfO4lilkOiQSAlYrWqMUaTsWS7ntAtPJtMue+L6hAvXLrO5tUOlE50XB9U+ZJZd4PFHn6I+PObwaEbtLGvra+QQaH3HeFyzvTGlHjUsW0+yI3Yun+fO8RFdivzknfe5cOE60YzxuuZolun0mOPuhK7vGC090/mSrc01ppOGHLwc9lE0WeQAeAwREyJ17lFIQG5bIhOMqhlPtrCVIUZLvb7G1s427928Q1SGnc2d1UQ3hUhWmth77n/wLusqc3lnjdpNuXfjfeaLJSE5nDph78GCc1NLdEIHbqPj7o373Lv1gL/+y2+w9/Z7XD9/ic1z5wCFdoZAxOSEIgKWFOT+VzmQwxLUAqNOIB+SOUIlD2oEQTIrU8iY0kijLVLMBXRuIfe07YJmvCnFZIoQPcZH2sNjDvdmbJ+7wOxwl0U/Z7mEFC0c93T3HrK2dZXxk8+hWBCTwUePqyDkGrQDPI4OhUdlC8mBtnjV0nUf0rc3mE4qHuzf48133mJn+jyPXXyJzJjR9qM8tv0IpJZk5nR7+1ShY77oaWeB0eWKOwdv8a0ffZ1w75DDmzv0YcGD+ffIbPO7H3+VptbUa8IYcJMJlWq4P6u5Owt8+gtP8ulb13njyNH2NS/92sdQ/hbrkwkvPf8l3L0TWv0umxs9n/rcZ7jxk++R5id4b8jGUU+2+e4PPuD1t97liZe3WR852mrE1Vef4OjBbfZvH3JhYwM71tzaO+A432TRv81dPaMejXnru2+g9S63/N/w3N2KvZ+8w4XDHXYf/BWvvvJlvHIs/Zx37r3J+StXuXruOY67e0wn21y/+jH8/oTupGeR5mypY27+zfcxF1/iiVfOAYZgIUYlVHoieI0KnlxB/L+pe7Mmy7Lrvu+3p3POHXOsrLHnRjcGognOoAQIIkUFSYsMh0zJelGEP4D94K/gz+CwI/jgJ4dfHJY8MBwKcYIAiUECBBoAgW70VFVdc1bOeccz7MEPa5+bBU56hW9ERnVVZ957856z915r/SejqbEUgEsAMZtNiN60HwwIdQ1S8kQ0u+NEaGvZ+4qC7dsH1EkzOzxnVa8ZDnfILg2EKGHBySeigfnJBX/yx1/nwwf3ubl3jR//+BlPzle8cqkJSuOUQsX2J3To8sjT9R4v8p6kCigUnYJFUjS64MYrn+fy4TNcFJWmNULt2r42YjQ8wLqAMlMGO1u4yS7eTFFqG50uSLQkIxQ540HhCTqwJuKTxgaFrVtiaIjBoaPBhJKYloTU8sqrL/Nf/cvf5c7LL6NxeX1qrMm6JqWuvNuTRcUtiCNU9DnnTTI/dQqUoaVp1xinuXXzOpPxhNnFjMdPn8hAT2WcRCa9Mlw2oJRILXwMRLEtxBhB1gWZEcQ+pECMHetmjVVCV3d2sFHAxRCkVshfWl2ZZ5A1ZdF7VI+eKyn6fc4AM1GjumxAorlCTDNyKMW3HHURMdVAiexlI/dB7h0SYoaHgtbTBmFqeeGDokPcABcxJLqYoKiJqQEqUvSomIddmoy2SeMXo+SrooRhJu9HQVBZe6iE+ii/hHyO6gUKXkqoHCslEiC5j0NuRIWmanJuWzY5yc0osDFmERZCojQ501dp0aa7Irug+zwESThjhfVCT0G8QiLFw0DWi7y9K4qnNkJTtEk+CO2sUEuVOE63XYuxGmcNhcu1t4IUbe61Uq6xeh2c3nwu/XUT10t5LSKSIxzJdVLKDb80qDElYhc2uX3yEn2jLieUxF6RkW61QUh7jXQGMyXPOFM5/i46fv/46Wja+ol7UsTg0a0HqzCjksFIkC6MxgyHnIeOrmlRZcWgHFBgRK8gqRr46JldLFkrhVMyLa1VA7YmmSJPhxVq3ZJUokngVKQsVNbTFJSjEQy2OVusmDc5G4NAF8TgQ2sFUSytDRrd21oTMQRcinmT84LgIDdXHTpEHN+iVMTquDHsWFsFFmKT8ytSoiNJ5ohKDKymcwkdNYWr0MbiY2DRNCybFYvVjFCvKVSiGgzYrUoqpXCupFOJVJQYV0kxGhJow6rtuFyvaULi04cntK1nf++AyXRMOSiwpZUctbZBWSVUqySTTYVQHMTcRG5RnUwWuMpkIhBJIaCMwgImCVJidUY+jKAw0rFHJFwSZDtH3HWURWVERCM0xiZCdBY3rCg0xKTpWhiUA87rOalZMcIRVQJnsyFKlEld0hnFkU0yBIhiNSSLMnrhiBtLGzq66EhdJ8YtAUauZDIcUg2GeFVQ+pZA4GLdEmuFy86hOE1MBoVFKYNWEug53dpiNB2xWC2xPpDoiJkuYAtHCgFSQCtxjxwUgrgVhSH5QAiNbOJuirIl6IakFMEpuij5NyYI0mi1wimNVZbSaFzhGBRQOcPl2QlyMXMRbBztoiE0NU1MtCGgUsQRKAZWHE87TWUNJnhCENpsiAkfEl3yBB9QSZw1K+coC5ez8II4hPmOkBKVLVD0BkGJqihwA0fhCkauwrYN050t6mZN3a5JJIrSys9FJVl7bQduQNLiGDaoSrxPVFVFGzSl0hgdaLQHLCYpXLYglkLAocsSjyBFQ2fZ293Bd3NCFE3b1tY2e3t7jCZTVPJUylDTYKuK2nu0tWxvb1G4grKshJ4xLIjJoq0cBOPJkOvXrzHcPqDcGrN3Y5+Lyzkf3b9HjI47d15FlRU1jjc/+zOcPNtmfvqctllxMl/RBdGzTgalmBAhhUyMgagCmoiNkVJ7klEkY4jGZhl4waCaoAaJ1TqiqoJP7n/Kk2dHfOGdLxFC4mI2ZzKZMhiWLBZzVAo8+PADTh58wutvvsL1Oze5c/0aj4+PePL8hP2poVstOD9r2d0bkYLi+HTF//q//G88fnaCCQ0cPebb18b82u/+NsXeAcpGXHLEmFiu5gwHE4wpIDZADcyhO0KZQ5I6Q6Wa1EVCMyfFIW4wJbUt2BIVM+tHFyQs2luOj45YrdbcvHWLUjspjBYL1vMZfj1neTFnpCPDwZDB0OLHFd1aMT97ivMtSZWwOoPCkogU5TUZ+qhIRyPN8PqCMjXQLGEwQZVTSmtxKrLyS2bnj/juX/wnjo+XvPa1LxLCpQQDI2fa6dETpvqYxcc/YndrBz3ZoXBbDCZj3nj7OlO34md+7lVu336N887zp9/8Y84PCyr1NpXZo15+SHNxwtf/n3/L0wdH3KsLvvAPv8SnTx9wq6j4ILS89NZt3J0hb7/5yxx+41PeeecrrHaPOGnWfOvrf8TP3rrFq3ducPrRETNf8/Jrr3Hj1tt0Fyv+i3/5Od7/4OsM3S7rtuMPv/0Dtmxia514fvgelIZkDMeXUBeWx3/6kJ1il5vj10k7N3l0tOb2wRmvvH2Te8cPqBdw9/4TvvS13+STH36H8wcPOdCK9759j/HLU7beeotH779HigeUL024/vIbpOUl19KSyY3Po6IhZkMbNLSAReNSIlpxtlvMO9pKrPCvHhmhxcjgL9cXsuwTyShGzqBTAEqadg+UppwM2L/+Ks5uo2KVn6OvqpACTifuf/SAP/p3f8rSKP7Rv/4nXM4SFx99SjG+ic+WVUb1joLuCp3NdVhQMZM+E8HAWsEKWKTE8fklJ2cLjC4pDGjlIHWgpSAMXUKrEm0n9HQqosEGwyA4fGikWfCBKslwOOpAchZTDCQ4Wxl8SmKDn4r8PJLPuLs7ZH9vkm1YLOSsOVJPYc6yChIpKrwfEroB0WfGSu5PlYFkZFgpRkmaonRMJiNGgyHdYiYIls7mCzEKI8TqHIkgQ8BIzOhYj4tJg5KiuECqXNxLU64Jnei16RuqTIvri2GdETiV4saWwxormjcreZb00QIoTOjNiRRGKfECUBqn9CY7jKwhiwGMs7ly6Ym9CpH1ClKUlCJ0kY4OrRIhSaB7yu/HoDc0TukUPEoHrEa+T54mD6kt/Y2ptMBoQSVSTytXGqVsHlZIjSm6cWmwQgiU6QrpUkGQvxCDNNTaoI0TVM4Il0sim+wGGVJKk15AqUyK6Nx0xdzc9wwRoYqGHBfk/5pTOhtaonVm08QJsJry0D83Of1CyjFDKVNgsQqXzJWbewgCnGgF2m6e/0pPDyF6khIghtQPEAKkRAgyoPC+y34b/c+wMSVR2lwZlKT8OWnxgjAKlBUtZe/jIEYlQvmMqOwWfkWN3KThqr+Jar74+Olo2ug5n8JvlXtWQhdNpjRYoxkUCuM9TWwojWFcVeiqInSWrmvRSgq4rqmJPqDKIcYWtK2nbmpCu5bi2yhoEjHlbVYpVp1Q0UII1MmCN/gg4lhizLaxilY7knJ0WqZCFo/VEaMkGNIkMVyIGtaxJmoRxBpbEKKCKM2NSRFSK0VvkgmF1RqP3IgpxOy4KBQ2VxQklxAoUBxpZPrjxaXMaMZbU7TviF6Cl6Up7WhiIHZrtC1omw6DoShKrFGMBoYCw2g0ANUynoy5dnBAUTnJrNHirNRbAsgNepWbIjkdWfj64mLrF75O2RJWJl/KSgCljypvWOIsZfMUItHzrK+mGippfJc5y7bnDCNNidH4pDE+ZFtZOaOrQUWKkbprZQKWonxmSufrejXl2XDN84HkQ6J0mrrpcGVH4RTVoGQ8GDDUCpsaVusVayPBnuumZVU3xCRB48oYimpAHAwx2hKVJmR+fEzSMAyGQzwJ17bQSDSEQkKdC+ckagDJI/M+kKwsemscKINyDucdtg8IRZw8HYoUDK512R5YYY1mOCgZlZaBS4yGQworIZCtD6yblsViwXy+FjQ2igGO2AlbCi1Ih0RAqA0FBaUxSaG8GJz0U22hQLxg9a812gvtoLdbNn2eYOEYb41ww1KmdUbC3JvgWbctSiuuHxxgraNwcsAmNIvzJZ0KRBPorJGMG4fQ4FqF8lGoEUnnzMDsZ5WnjElLWKpxYrWsEWTQanElrQYinD4+O6FrZoxHJdqVqNIymI45Ornkxz9+H6UMwQeKoiLGmO2TNWR76dn8gmYx4+jJAz7+6GM88hmFztO1DYv5Ja5rmV67xnxxyXq1IISAK0pQYsTS+igRBnnarbRo1lLmjqV83yR7RdcQOk/EOIMpNHHVYIsBnRfd6tnpGYvlmps3b0mMR+gYVAWFtczmC85PVzTdmtp33H79NW5dv8X773/C+ek5n3n5JS4On3L45AhVJ4qxIbYNi4sLlF8yvPUS9fKEZv6cYrtEdS2oCcaMGVUDmTrGmtAtMVWivXhCvbzLZLpEmzlpHQlxyPnRJfXqjNuvvI52GtqWLjXUvsEWJa4YEVYJ5yuGWqNSgeoCi4tzbIh0zZyL82d0qyXb9ZA21szXlyyXisJM6OoFy9k5g9pTXu5jJ1O6iyXVaAyMUIMWozwGzfnJOZenj7BqTjkuGN5+A2v3CcuWiR+zeP4UThrGrcGfnGHurFBpjtYFsZ7x8MPvUz69z/Lx+/zCP/0HbL30KmFlUXFF0SoOastb18bcemvKXz46xlc3QL3M8anj1qXmP/3Jn8HyLqE9o6kbDh8+5Nz/X/z7o0smbcvKBn71l36Nr/76b7Bzc4/JvT/kuT7i+fw9lqennD/z/MH/+5+4/uo1uvuOVHoulguqyvHLv/GP2DvY5ZU3D/iPf/5t2h/+kDdf/iK/9eVf4Ol73+b9j77D4eKY6dZ1Rmqb00bx9us/z47bwq5WPD39iL2bBav5A370+DlHFzWzywnf+auWdz+Ys2+XtEfPmezu0aTE88fH7G1PcWrF/pt3aMtTEmPc6A47bx+Q0hSPFGAutQTtaJFi1qpApzXzpuSTj44Y7ZZsv7F9NcTOZlI6c9fErABZ/bkB2ZpO0V2C6NjefQMs6IHlc28cMBwVEK+KJqXz5qEUIXjqumZ//xq39raZTqfUdc3FxTldavGIRtq+MCnfMIDzn7p/Zw5myNdlSpysVjz89FOePXnMwXiLoBMvkAMz/U5BzFP6GNCphbQi+iWxrTEhUmiNtQqjHWUpmbLFYEjrKuxoSp0d7ugzzaIiGQVe2Axdsyal7LqqS3kHf1sNmaQw76ntMeQawUjRHWOkyxS21stgUiC7/PunuNEBvUgHU0kKa2ONOBPTM/TSZlisECOZiCK1ni52+HWNCq2gZ0pJPJAWBIYQN4yPEELOJc30NTK97UXNWUbHet2Wydooncia+Wy20l/XbNIUQ8xOhQYb5ee1j9ncKhcb5Oxac6X96ofI0qiJo7Wif32ukDulX9AD5vefG6aohA6YtCH2NNLcqIWYa94eLc1W+3/d2r6nJv5EGLgSxtRVnNPVvR1Twsf0E3m9NuUGPolDowYxVzOWqtLoLlzVBfR1osYY0YyF6OlBwE2jJR/C5jr1DSMhbbwflJKhdEhiMiMOoirXHMIK6u+zmFlxCbl3kzjhSDTUC/uGvGbI165/f9mYLzdtm5mMMnkwIIMeOYM9pIBKNmvOc+C3ZmMqxqZeznTavsb+ex4/FU2bQmG1EZccLRMJrSAGcWIjaXxoWdczNE2++SFai7YVIUDjATSFrYhVRdt0YCu2t7YoSQzaBlJiOHCAp13P6boaV1i8LpmvG+rVkhACi/mC5dmKi1mNKcbZBCRijSEYQzBa6IEZXSqiptARmzyOjoIo7jADx6r1mUKo6UJEJ3BIxlKprSBBybPqOkrjiMZhgspufGJf30+StEFQsp6qGMlTKaFYDtyIohJ3xdIYTKFoQostSjyJpq5RSsIW63VN1IYueBatBHvHFAgp4JOnUA5nLdpoEnbjdhPyphpkaENCDDSM6nV+vRU5aNk1pBHoWnRVoqoiT8sibdsK/K01Tmd7faUky0XAN1ncQYpc7wPBRkxVkLLWJ6hEwJCiYVgWVGUpbpDG0oYGVxSyceeD2HsvJiR5E+i6sNF6hU5c8TobqJH3VrdrBoMClcaosiAqGJqMpOXTuKddomXTDGS3oMKxrDu6CE1SKG3wSgvSmzR6kykjG10/cTJKdHUq6wNF/CzUMqPkXuinmr1wNYaQdXo6w/Jy8FmjMUS0ShTOUFioCodVkvElLkqRtu2EptIfAnnyZI0I5nWMXHGvRaZMPtz6n0lahitFYbNAt5+wWZyLmNaQOglVT0pTKE3p5HpFH1i1DbWLrNdLlutIoeH2jQN2d3foupbOiEvSqQAAIABJREFUe7Z3t5ls75ACPHv2lMfH9wl1ZLcYoMxQ9L/9IYjGuYpkSlQy6ORluGIMWluils00hcBiOWdxfoLFo7SlchXWRJp2xdH6nJOziC0rvCpofGTd1lw+eiCIckiUZbXJVLRGg4oYqzk5PuTJ/bvoGLiYX4BzaG0ZDUas5+ecx8hoOiGEmrPjY5xOeKMJXUvwAZsiCXEas9qAlvWidH8gJ3GfdTJd7rzfaCZi8jhrcbYg6UAbEq4osWXFummZzeeMRpcE34m9f2kx1vCZtz8HvuHevQ/56MO7HJ6e8+Znv8j+zjV+dPFDzopTtgZThmXB45NPaPWMxaJlvZgxHij2d7eYjArWF4cUQwimoBjuYKpp1hIYYkjMTo8YjAqsBn+x5vLsmNEIupWiq5ecPDsjBsXUnVFOCuwQurgUN14zQncSDtzNn7K9vY3zS5rLOd1siTMlAzOiq3a5mMMnHz5j3QVOL4+JacQrL32Wna0d2rhmWF3n6ONDttaXLLxmNbLsvLqF9lYcMNdH+LOPOLn/HsvLE954+y2qwqAGp5iwoL08pKwv+crPfRHtRphym1DPsVu7gCB8v/Tlr3D0LfjhB9+iOT9leL6ibQ12skW0FXbvDrc/+yrDaxUnf/YpzbrjonnA//lvf5//+39f0p1d8OXPDnj19h1efvsd7v3oY7p6zLBM3BydcXd9jpvWTCcJ061o9wacnT/g9MlfMoiBX/mVX+cvP/oh3z+5z+lFTVk5LpZrljpy7eaU1bMjPnj3GU8fJX7rX/xzXrmxz+dfeZlf/pW3ePP9l/n6d77D2++8QxcS73/6nNFBTT1/RntxCn7Bjn6F+rzg/Y8DlzPLul5T8ohnzz4g7BXoouM/fvc9rr38DsNXfgaztcPt3YKt1yxBTyHVrE8fYIsd3Hgr67cDmhbwDFSFDoKg6XXim3/+ER8oxa+8/Bq3FUwQdGOjb+t5cQqpkJLfFLLF0IhGPnnKXSUMC+UZJqGtpSSFcbZTkB/S0C4bPvnkHofPn/Pq7jbWGkpnGY0qEl3eDhVXLqn8ZMOTpNYJJLzWHHaK05S4rOd88PFdfvzD73P2/BlTY/GZ7nlVISmMLjHaSsSP7+iWZ0xHCXt9yFZzQJsS2lZi2KBETxaCB2tZRE3A0PgkMQW6p8+lzUfkm5bYNOjU5QmXBoq/9kvkhxbaYt/g9L+bzs1t0za0XYd1lkCkS9Io6cJeAZi9FqgvxhHDLxIbdEaMHMW8TaI6ZGgXkLiasG7EpMuLhtjmGsKSs8oi2eVYainfdRkl06IrSlGMRWKfeyvDuxADddOQUpNjjATwDD6QdBKJQJDrI7WqDPtUBKsUOjMCZICQb4SeWZqboL6RS0nK9ZhUX9pK8xHT3/LJq825L/3XleGJ0pqoQRmFLgoKm7DKYuwwQ39SdxilSXicyw6pWmV6Z24UtRJX8Nzc5LZFmvTkr/RXL2ixJFAaMagJadMEWmsFycpNpcqDdXnP5P++apg32WiqP8fFTC6F7EzOVeMWQh5O5s4tpd7FMVEUBcZIPI7EF3Wb509Rah9tLQSPz01bn6GmcyMpLs1xExPRB3lfSV375joP/oNQewV0CaicFWisw+kRvbmRNLUqI8aiJUSJVp38Gn/f46eiaQOgF89nn06dDRTKqqAqLZ0PzFcNIAX5et5yMjvBmIqkDK1PdCkS1QplNKUzTIcaZZdoErFtUTGQvKN00tVWRUE5rKh1JZxhHzDGSvHuO8kWygYUfSCfI2RESIkIUixCIDsPGvIBkSIm1pQmNzlo0E50YCGhAqSoUF7ho8p0BSVfmBwMmacdMWaOsCJawBhZ+DFA8OL2k2C9WpKs/GxS0KZEg6Iw4lJVoCiVwURIIUjwpO+IqsUaccPyvhNImJLOSyihzmgnaNnQgjRiPrsDBS+5K1VVbihxJBGEqpjQSRZOHT1LA3YyJsTE5eWatm1QCgZFxaCocNZhtBJ00mQ6ZnYMclascMWBJ2YoO09WYhKNW85QC0kyWBrv6bc+me4kQdrydGij9QtR+NJWUbiSGIMUx1Ze28fEuu0wRlM62eDamCBIpgtGy9QpJbRzRC/IR7SJrosErQnK5O+BHo60haUsC5mIOofv+piH7CiVqXelNcTYEkIn0H+MdCE3oJlT7UMQigV9lpA0hiBUR6OU6AhSksM0eJr1Sl5LJ4xWspnp3ilJMm40GmPYZOjY3qXzheWrtSaoXPBokzfimGMh0ob3rZBNWiZN4owoG1/IFJyAs5qhMVQ6MTGJ9uKEyWRMHBgK7ZmdH3Jw7SavvHTA3nXH1CSGUfHs+SV6vZIDJDlMNHjtZWsJAR/lnlUqkLT8vfNzYrsmNkvwNcNCo7WnSC3K1/iocFYomfW6po4NUReUzqJczC6W4miWUhLbaKulYNGa6DvavEuU5YDGe7RKjKqKYVmwvDxjMTunnIw5OT8laoPvEqWzFGVJ6mC5WjMZj0Sgn8XwKI2PkdZ7lPcis1KKsDnckDXYC6yTEmv/ogTl0LZgOBoxX8zp6hWTQcV6taCqSkbbO0yHFVvbU3784Y+5+9E9lqvIZLrD1nDKvY/vc31nytsv32Qw2WblA+t6jTZQ12u++/3v49tzVs2MajpiFQOvv/YG1/b32bl2QJ0UJycXPHrwlP3pFiMbCPNL9qclxXSbSODu3XvcvfuAmzdvMp8sCV1Nulijy47xzhinIM7PoYnsjTVnT+9RFkOm0x0WsxmrYGnnLfPTJQO7RTRrjs8PuXawx+l5x72H97h9c5+t8ZSyKpmdPmM0W5K6wPHJgnK0xejgFkq30B4z4Igb05ZYThjFwPn9D2lVx/bemNTM2N6rMFtTmnlHOa7QI43kwkleZudr9u/c5rM/+0UWy4bxqkJPd/HVFqFuOVdDLoPi4tk5R88vsaGmW59w9733KZPl2tbnSUFBNeRSjbiIE67vfY7P3yh4W93l6Ps/5i+/d5dXb3yTia34y09PGA4GdIeJnXKH//Kf/wveOXqH//F//h8YF4qmnlEkw/l738G/tcMP//x7fPT9J/zuP/tXfObnD/g3/+7/YHva8Ytf+RpvjTV3/uFXmOztMRpOeOP9v6ILiotPn3PIfe59+IDzM83R6T2UMbzshmy/tI8dR27tjzhbrZlWB5yerNDjMRNa3v3zb3Djd75GWM5oVqfUKYI6YGhukTzMT57SHj1hf3CN5qVr+HGCZokNCRMcTXfMTFna9lUgZa3y1X0vU5t+h4qk2OXiVCiwmDVJNcAagoTOaxxQAgWJgqgcCTlrVUos5mvOzs8Zj6ecn57zox/9iOOTY+p6gdLdpoRJyNT9xdKrp9kJg0QRlOKkidxbzHj67B4/+M67PPzwA5YnS9L+Na6QQTkrJKxYy5ePhKZmOTvhM2/eYn/vFnfv3eNivqTuIp1PNOsarQw+ahqfESClcVnjTRK9DlGo64TE+ckZf/bNbzD7zDPeevOzjPZfAlX8zVot7z+bYV0ulns92WaQmEQX3nQdi/UKYs45lXewoc5BNmTThi5GQnYWTqQc6ZiLZXrqoDRFOojBSmkM1g5Iocuab2n6fG6YFEIHDN7j2y6jexGCSDJ6EEnns0jnBlIrcZO0/V4qVBAKJywW3w+dc19vdPYdULl57ZGdzXGv2NBLSUSlsgviC3lueQhMTGJ20TdG6kpLFrNZBrp3r+yfOw+PVRK/A1VilRdsT6Ws7wuQoCwki7Rnh/QB18ban7CcN1ma1P9bTGxQshBj9hjIzWtG6KKS2qlvugVElKFvWRQ5EqBHw9KGShlTlj3kFbO5j2L3AhqruVpYMnD2vs9QE8DAWmn8zCbQmo3BiLUW4wyD4QBlNN53NA2Ertsgq0pfZYJuAtP7bvqFRnWz3nPDFzJtNxFQ2b3baJWH8uI4K6hgjxzGKwQxU5BFBvT3u+b+dDRtGT0QM5Wch5GLPe0Mxjmi1ihfEXWJsgWxaek6jzYFg8GQoXW0IbCqG1lAJtG2S05PFqTgSZ1HpSA0y6qQrBCVSLMVnatYdQFfr0lilSQNW069V4hDnVEJR0OhAkE7gnJ4lY0ulKjqEh4fPSZ1GF8zMAajFOsYSQG8F0jXJ1mQ2lrZA4PQDlOQO9JoJy5GOZtEGYNxBh9kMavsGFdUoquojNA7+wyr1jesu0iwjvWyIUSPRVF3kVIZCmMkOT5FsUY3XT5QokzQcuOjZAcEhD7orCVkQ5a29TIxSjKl8l2Xb04p+AtrMQrccECcjLFaMxqWOKsZjQbSXMTAaDigdE6osDGwXi7xXZOnEUKf7Lxscp33kDO6SpMojCYqg8cRfEcM4uy4WjcURcGGA83V4uonQyEXwJtFF+ImNLLrPLaQrA9tLa6qKEy+Zg6CRigk1uIoqAaJrjZENF2KXCzmLI5P0crR+Mhs1VCrguPTc4rSUhaSYybgWN6tkzROReFwzqGizw2maKWUsgTfESL4Fw48ow1WG0ptKJBJZ///Ym5qtQKbuekxROr1GtkwRTPVZ+31e+PGMlv17l16M+GCK/es9MLkS2W+tlAdYl7DXvRyncQ3hBCxxmVkVkxNNKLdrJTBN2vu7G3zhddf5WBrzKRynJ8eM9ka06aEGQ5I1vHZz3yB/b09UtES5+ecPXrKH/2HvyB2DW1raDtP16yZpzUxapRP6OglekGJ+YrPk+ZSJ7YGjsmoYH9sGQyHuOGUoB3L9YqmEdqoqyp8EqdVHTwu0zo2+hIMKQacUahkBUGKEautTNzyejdaE7uW4VTQnMVqSWw1yjdiw+8TqZTG0CpN03lCTFjnaNYK37aoZPDZSKfn4/f5MyGGPB2UqV+IPtMrNW3rMdahlKZpGramU5zRnF+cUS8XKKAajnj1zdcpq4KyqrIO7pjZ5YrRcMKTVcNRfURqVlRDSznZxtQ1o+mEZg3RVRyd1/zx179NILC9t8vF4YKDa3vsHOzx8PCQk/NLLs4XVNry2o09JtbT7ToGtuDR0zk//uQ5R8/nYCZ08RkH14bAHGNrKn0bF6A5OWN5uSYFKLTm7Mkj1M6a7Wok69eC2Zsy3DugW9WogaIaG2zZ8uDhCUfnz2m6mpPjc0YDw3BQsr29RUiGw/vv8sZ2R7s4An9Oqo/wyzNm53O2b1xj0CiOHx+yOgGtOiY7E+zlUlD1WjPEUu4nQuNZLBbc//hj9jGs/YKX3/hZ2HqNdYRxMYVywZtf/FmOLh7z8NMHfPDxGSpGdkYlW0PHK9fvsHvtC7z2esudz7xG+t4Zc56wXRU8On/Cq9MFL+9dR2/t8M1vfIfVyQn6tS9y/uAhzHbR0wE//uA/cDR7hkqCClSFJTUtJ0+e8O6ff5MnD074nd/8PQ6PLqmbEf/1v/5XNLOWpCfsv/0Vku3o/JrTpw95eu8hR88vePePv8ue3eXm67/A4cPvszPRfOmd14jPG770m7+F3dtjOBxzslYM/B5P7t/j0+Pvce+H32G0H7mcPefuD57y4OGHnPpzfLHF2H4WO77GfPkYPXvOP/2Z3+ZgZ4IeabRpMKdrWmsp3QUHi8jkfE55exeMZ+N2F6F3OAaRGizXC5IGN2yxRtH6Jav5KbFbE9oVMUHderQp2d27wWC8R2Kc2yw5A1fzFWeXlzw5fMaXf/UfMNmecnJ2Qllprl2biqGi7JqbQWFP7+vRFpXARGi14sHjI7597yPuffg9ju99SvvsOd1Fh8umZy+2fTKnFKofKtKcn3H87BHVtubG7QOenR6y6Fo0HmsAVRK6FhMNTmucVngtSLzoxLItehKGRoqJe3c/5ff/p9/nC2++wn//3/53fH73Opjh31229SBbtjrvWRiCiMo5UJUl2hjaPBh90Z2vb456VDJmhoBOKaMd0BE2hh8p/6l7FC0gkU7aUGhD1FeRPz2VUtg03QaRSSllGmvExOwqWBpphKQCB6UoyoLpdIrVJUZ76mad0UTEIl6Lnoskf08G0aZHya61KRJVwqtE1H2NK4Nag0IlcR1HIxFMMWU0SeKVUkqbhieEIIZ4WdsPYqwm8UV/7YKo/D4y9S/kv0nPrzbNR0hBBurGUBXFpjmx/fnUn/9aTPf6kGxy8/UTLplaslV72/2YetfKHheUmqRHvXwI+K4TY5iMqqpci26y0l6g3oKwkHrJRd9RZVB20xCnXKdcfRxX9NE+s9F7L/l9iU3emgTAy33Zn5nGmCzZeoG+Ci+c9WzeRI8YqnwNFGKYQxJgRSeTm7V8vTNCp5Hrp1TvMtm/xv8PkLaem5sQaDdpRQyJ+XJJaROr9YqYFMtWxPZlqYQ+QaJQkaFN2CJrkVJeNFa+p1vXpBDFejxbOEcMUZegFW0MkmlTOpyC0MoKU1azblf42ItwE1YD3QKXHKkY0Kqs0dGOgGWdIERByopoGKVOil4tdEbtJDOr8eIc1EaZ9GiVxGjFOtAa7yM+eEKA1LZYpQWhayOd7+iysYKJHmdBWeg6OYyCMaRSEbU4KpaDAau6hralsg6jZbOyxtBFyVarO6FUScGtGAwqykFF3daZ9qfQWEGzfBC4t6hIWswouhBArVksl2hjiCqSfGKQaXhFUYgGLgX6UEiRGUgD0a7X4FucVRgFhYPKCTqoEASr6TymKGm6wLrx4gbVtfgOorbUvqVeLrHGUg1HotPZNBVZq+alYNda55y2uJnmhEx16OmTG1TJaqE+KLF/LwFtooSSKiVi1RAJIeF9QClN23l8QnjmWhyllDHMFws+/uQuisCd2zeA3rod0BpjLb6V0PKYkbGYBBn0vT2wFnFx1wnFU5ArQTY360jJc2lrrigY9JuDEfoMmR5pZBpm2lYOON9tNuU+SNSYfvYj/74R0EaZQG/CMVPCOCfc+BgoCpl01d1VYaCzCY2mj2AQzWfvIlYQ2R84RibCeoZzQ27tjanbBkg45aiqIcuj50zRFFuasdGslWJcFBiEBlwvA03bULsg3gS1ZzosqMqB3A85kL10jt3JgFdv7jNxiTs7A8qioNMlF8uG83lJ0w6JydN2Hc16QbtY49sApiB2Pa3D5mIiEVqhaLS1BHfXQdB57cTgRChBgdViTmkd1igW8wsWswvqkFDK4duaWkFZFrS15mJQMqwKOZRTQCye5SD3Ma8l70EVQm0lH0DI5FoGFCrHooj7ojWFBKP7lqoQauR8tuDTR485ONjn4Pp1tre32Nq9xta9B7z/449ZzGeMxxO69ZzDsxPc2rCtNKuuYe07fFTcf3zKYtEwqoRqslrPmR9/wmj4hGoywNvE9v4e62XLk6OH1OfHXBsbTh41xK7lo+OG+0cXTEY7tJSczxqsTTi1wrAiXYduMUfVGpYDFrMFipbL0zlmpWDHYxTU65rnz4/YWp/QxAGXiw5VZidUH7GFY2vnGqExxHbB5cWCvf1rmLUHM6M5f48nd3/A2CiKaBkWA9Z2zuXTexSjfUq9xcXhMfPLU/YPIrqaMdmeYrtzonGwXRGW56T5OVv6iGk5YaVmwiKpJriyogsrkp9x6/qEO+Ut3v3WhwzHL3FwfUqdIiPbsVjOuaw/pl494ai7YL5eUpSBlJ7w6dN32T+b84u/8I9ZxoI/+sZfUbeBUXcXux6CqbhYnnL/yTd49+5T6mqP1DjqsMSXkX///iPMtT1+96u/y/WdL7GsZpzElpPjC/zRip1bQwwDVNymMB0Hu5qvvvOPefL4Pdq77+PayO/9N7/DD/4M5ic/4J1f+jyTnbcY3PgsenoH2oKX3YDVReSLr7/B/uGEuP4KD59+i+OjY44+rbh/75xH5x+hdMve6H1qXmb81pvEqmD8+VuM1Ip6fsmjx09Y/8UTup0p166PuTMpuKlmDPyYZDwkTbNcc/joCaTIzev7uNKiBiVlWZKUDF8TCm2mTCYTkhfExaiUg3kDurSbYYjOLnoxiYTDOEdUcOfOHdxwQN3VuFIzHqicKMiL1ddPFtWZLqdiIvrEx+/d44MPP+ToyUPC+TlqscS2CtoWfBQdZ/98PWSEuOg+/uhDVosZDCua0Il2vixxykoObB4gRy/74KoJMkSzw411uUTh9fmVlhiERRO8F9dKFf6Ogu2FChNBZ5x2mEaQvJgyepcSzrr8trPtPHl4GhOBsMl1M7nhs1qyNX2I+ORJRpBqa6zsm4gmLyopqbu2pY6KOjQbBoq2lqIskABku2kEnHPorLFGZRqjiujBMDtW5wFuHmSWZUkMXaYg5iY8iRO31I2CFulMTQxWETIQFG0ethfyvhO5aerdGyKgcu5XkoZFG5GdKKQekPrlCrlU2bM/xLBpRHojFNGVddntUmOcozSGIl+PrKvI4u4k9/UGCUygFda6K4OQXlel1abR7hti+d+Rznt831z1OniuBr4piTGYzVmhIQTW63VG9X6SBtgPfk02phO9ZB4Ia4MxVwjfBpJ84ed6LVlMLw6RX/zsriz1lVY/8XovIotamwwmJKGo9nTdnoaa2HwGVxPuq99B9ookIhV1pcPb3FshEpXoE3tEWNy4r9hRfwsn9icePxVNWyLRhQ4fBHJFW7QpqYYTyuGI4FsAqkEpIsBWinwVOnybWDVnWCsZEIWSYqWtFUFZyY/yMsEw2uKT6EMcFjcYgKlIRLZGA6qtMc1qKcWNKejSOReXywx7SndcKgmMjQQ0AYymToGQVDYosdlMpKVMARPl1rQarNM4B8ZD3UWaoIjJEKNYnbZRcrDQZhNunNkAAvHGKBqs1BGxkvtmJWcrJUXhHBE4vTwXfaAydJfLHHCoQHuqbHOqtGRhoQzWGazqMCmRgqeu1yQrUwDjbM5kuYLppZG14hJF5nNbJwJ+Y2Xj0eKGo42IobVS+J66pYSyWQ0y4gIkFTAWunZN6FZi5hBko2g7sWqWZkZy7YLvsCQGgwpblJgOzpcNKVqqagBaZdqj5HMkZAIklsLxBS40WYyrUVqsz2MKjIejrN2SLL7YtdTRY4zw45WVnB0yT5ykMkwvTVbbebmOWmOdwyXNelnTrGvqdSv3ZRTny4ghKSO0CKOISqIG0JpBWdKEwGy1xGiF0eLY2ETouo6E/Bl8QFkrB8kLVAfrHCpqEdBGcZRs28BkVIkgVgn1t0dmdBbc1nVN13UvHNDqJzbIlCCqlBG4fnO9EtWq3Dgareliu9nI+0leYSyVMVSlw4mnNKmLmOipNMzPTpj7Net5ibGasqrQZcnl0YrF6jGOkos7r/DSWze5tT1iUBSMhwO0VvgQWLeN8Oc1mBCZDgbcvnENZwyzxZIuQERcJ8ejihsHuwxomVayI7nCsqg7ClcwHI5I0dOFThC6EGnb5UajHxEToX5CaY3GFQatS6GlhMD52SX1vGU0GUNKBFtQJo0dZAFyjNTrFUFpqsrmwUJHs/YEo5nPZzyLHqMSw9KhFUJHHgwwRcU6RkLbsV7XG1oqWWSdGTP5kNIbrYHKmVKikZNGFiNmPc+eH/KkXbO3s8PBwQHlaEIbFd/51rfY2tuhXVnms8BstaA9PqbuopglRbC2IJiC2kcsSYyijJg0+BgpywGr5Zrz8wVaj2j9kPNLz1ndEtQR68GUVdQMiyGzNrJqGppuyc29IXuTfQ7vnZLqDoVlsep4+PgxhkjlNFUxJvg5AHXbMVsmJjsTzmeR0wvDol7x7OlzZrM1F7MHPH4043Nv/wI7k31G08TRkxmH8yXTl25wcThHd2uG4wmj0TbhsmUSbvL4+QVRn6PMiGt7tyk8XN86oBo42m5NOpmjoqZZrYjrFR+9+z2W6wWHlUL5htuvfokU1/I7+Auaw4/45Dt/SNouqZTh9kuvcfD2qzw6POTGtQFP773P+z/8gPF4mrV8z/CzpyweGFbPjnk4tLjiASGBby3a7OMXO5SUaFtjrOfRk0fc2X+Fs6NzLkiksqKtl/zsnbf5pV/8Nd769d/AVbfY7hTz2Uf84P43eOn1A8zWGQlLNBrTFMTFdba3K3buvMLLd27y/OPv0x3+BV/9Z1/FD34bZ3dwgzuQKpIzYD1JJ9y2QbkhO5NfxPlL9GTNzvZt5q8dcP37f8pffvOcahc++8s/z/Ix3H14wcwZHjx4jz/5g/ewr9xmURa8Phzw9mde46LrOHvymDo8o9laUd25QVcHFhcLTo6fMxwWuPFtlJPJtStLyHENiqwjEY6dHK5th3bi/CtuwgmlTLZ3V2DE7Ge5XtP6wHK5pDs+5PHhM159+xVu7E5x+SyTrJsrVOOqzslTdKVoFy3Hd59wdv8ZqmlwXczWdDLI7DVckuck6EtUHtAQOgieGOH0cs4H92ruffqQi8sldRtAGXxTU6/mLJdzYSYVA/Qw4coRfa6ryhtDUpqYLKJf6w0ccof5d039lWSAmozC9HlfvUwhaST6JyVS57OaIQm6g1AH+892MyxsGoIz6J6mFhPZ3z2jYHLexnweoRVd68XnMtMSTW7MrLXZFMpuhlv9IBalRf6QTUWSMRsauaAxYt7RF+AxinlI0qKjIjMZepRFZR1Z0OCToDKeKA2AtYKkkZEdY3FKU2FkoJb9D2RuL1T7TitSNURXlVwnrXO+rLoq9LMm7OrypNw45eG77puItLnv+nvvKkD6Bfofgui1eXjbs5A2z56kiUqbesnQdR2rupHInp7ZE8hadjFLsXkATv4cr5qTjGJlxDfGq7pBcfW++hitTTZeVJvfJ+UG9D8DTL1Qk+RGLia6tiVK5Zkprv1a/ZtPpvozsofU8qOnBoee5rhZGn1zHje/k1YSJr9pwlPKMVNXYdo9Avef6dl+Opo2gEgksz6zeK+kHIwZjCZ438oEvyjpuobl7JJu7dFoYuhYrWqqgXy/K0vWTUO9XNPGSGlF+Np2HcSWtg0o46BuUPMSW5YYBwUN0Wqib6UwUzkTychN0oWA1ZpKW9lMI70cFqslxT1ERUgWrS3eVCxJOBXQyqMJqNBgFVQklJXLs+4klyMoWfRBIyZiJW1wAAAgAElEQVQjPYc4RDxCEQwhOxRlBEprlXVKClsUDEcDlLM0M+GvD8sBvvNYJ5e5Wa7F6S8FmpTwVkSmddvimxaDxMn6tsF4RzJaJHNJkDmVqaC9m51PEbHINbS+IadpiCFHTJJpR8yNuCwYQa4CbVNjk4QvC/yfRG/gk4iH0TkkURMDFLaUT1tb6i6KkYZSqCjBnEaVorfz/YaqN42LoA6igXtx4/gbX5tNRKZaXSu0UoODUEozHxRdk6i9ovUOE0SjoJWRYZbSWFtQlRUrUwv/PSl8asVpMcnQNHSZYx5FGxmTycYmgsq2vsNmyu+6bUFLGKVY+EJUPVUCFvM5djqmGk8k4DoEfBbYyrVTObw+h8MnhQ9AiGI9m41blDabgl9rLfdNT4PghU017ywv1iS5fWUT8KnlQDfGorUgdlpriqJgYC2FUhRaKLRWGxmCKIUxlnIw5HPvfI7RyJFSw3A8wLoCWwxo2sR8sWZUTHjnnS+yc2PMycNPuDg9QznDYDzAzjyJBpIY95RKcWN/l5dv3OTi8oLz5gwfwbiC6FtCZ1Cxw3draqCqBnJokNel92gdcaVjMhmzqGsaH8XgRBliRKaUOchWa4VzCucc4sysSF3ibHaJNYambqh9zUnbsShkqCL+aorSGUoLzlqiE25+VTqqosB3LZ1v0bHAIprM5WoFacLCt2jXEpMD1KaR977D5iiVlKNJevqyzpTrqADTG0EpykFFUZUsmyUff/wRr7z+BnsHN7l95xbvfs8y2prgHNhCEc6SxKI1HbYylEUpTlokjFVsDUtuHewx2S5ZhyWnl0cM1ZgQNZezBdujPS7nMOugtNvEE8PwhsUnw+HJBXjPZGAxrAUF7yZ0qxUuatqmIzrFMoKvG166dYsmlaRQcHh4QhcUTVdx+cmcTx4d02C4fWvIbH5G13p294aUI8vh+SHnM0U12sKGFq8VH398j73pgGvTMg9nAperJT/48SE//OCQNjkwkdevb2O7NY1K7O3sMCwdbV0T16c8evAuoQs8+OSBFDJlzdnigieLP6R6FCmqHXYrw8UnP2I4P+dyEXh2suB4YDh7DLPZBUenM86fP+LscsWye5027DKpBsR1TXu8Yqj2mLVz6k7zuS++xq/+2q9SlDeJ+iV2VMtH3/03+Aq+e1czGBxQFTWqW6FdRRMjX/zKV/nS134T60qSu0Rpjx6P+NKXf0/YEWZEi2ONZ2gKZk3NaDhkMNxi++0vs/3SW1w8fEy9HDHd/Ry4CckaGhWJWmIpqhRxArpTamGG3LzzNtruYN2Q3cmv89LLO5ytH3H7577AoD5A/cEf8+joAfuDbd4fvMTrb/06k90p7dGH/NX9d5nNGnQoUZMRO21D5RtcUbJ3/RrawOX8nECHVVb29MazmtegBgQiQdUo21KYiE0BExLdWrPqFJd1x97BddHERwmEj0kRW6Ejv/bGm0ynU+6dPGX/5gG/8U++xqs3r+NSQNObgCAGJ3l/7PV2faBuagP+fI1dBJzShCj0xaiTZJGpflorm6tQ3LwU3YOC66+8wnA05dPDuzTGsVqsefTpI9aN2KkbrRhVllsH13jjrbdodcGzizmPLltB2PLmLW2ZRpmKmArqZsVy1dBmlsXf91BadGjwIroS6Zo2Kwc0KkQxOFFC849NJ6ZZUWhvSkl523QdgYTCYnrTq01j8iIqI2dMVBplLIPRiPHAUKiEzeyN0DchWedEpjGGFIk+M0NiyvRIObM3TVA+p/qA7vT/Ufdmsbbl+X3X5z+tYe99xjtV3Vu35q7qbpfbQ+zYaTp4wsbCkCAUCRASQgkJLwgeeAIhhJRHArwYgYKEEFIEPBApJDhKHM8j3Xan7W5Xd1ffmu69VXc494x7WMN/4uH3X/ucKndsCDyEJZ2qc8+wz95rr/X//36/71QQzqwVWC3Zq6ropbIgS6qwkazV5Fw0cqVBEInG9DQku8wpQ60UqTRsFOdwlTUpiSN2alqCc/ip4dCaqaS/1BHKu7dFjKImqyQNTRS2VmIkT4TJDCnmYqCSqeqaqnKEEOi6TuRGW4MQafCMvtSQqdJ0TuiUvtLoynt/eT1dXl+fGlzkK3fE5DKpLt2mJ9NHqSGk8dPqss7dZgUzEZen5q886pXmbPp720axfC1GQZ9NCkLLTPHK9XvZ7CqmmfXlY8UrTW4uX5uauumlxiiD7Ol6JQubSsxppMZTxRl1OrnT350azD/p+GemactaFqmsFD5n+jGw3HTUlUOrQs/KkZA162SJqqZyNbpKhHGEusU0e9i2oTMen5eoGKiaikppCbP0nhyFwhUKa6puG7TJqDASvC8p54beD6QC3cec8AmqrBmjwWDQWhbGxmisyow5MIREyILKRWNZ6ganIxUdLg240KOJ8nyUAyw+SdOWFASEUhdBMpe0IqUgzVpxyNFl6qIBYiT6SEChs6YfenK0RKXwMcD6AqfAVTO0mgSimco6sgI7k6wjv1bM28jYL6mrimvXDql25pRUKBEFJ3ltYiBREUNk8IGENGcxs6UFoo24USIXeZwyM5QsXM7KBDP7IA3VMDDmJG5qClQyjN0oEQgxSMpBVRXEz9BUNc5kGpPJccDHhDJWQh3HuIXEJ8oeUxOhhMCScsamiT8d8TFsp47GKJwzsvjGiB97RgK+dVRKM+RETobOGgYabIyMYWTsPSRNThmrLZWrGLTQwwiBXAwoKuuwWhDgSCYGyYoTraI8h23zpETHJxECjdBFdS4ibUOoHJvzNUdHR8S64s5iUezbJ72b2PVGwIfIuuvRSZrbi7BGGeTaT5oxhq2TZAgBZx1N08gCir5CKaAs5jIpNOU5JzLRxMuJmFQaW03oROfQSjR72sdt1o0xlJwdw5g8qm5Z3LpFu1uVHOXMatMDmmZnh+eee5lrh9dZvPAcahYZn1iqgx1mh/tk84CqbWjqzOpiRa4ztXPUZQ2oUDitObs4xzUNOWhiq4ljL/dq3aKdZTOObEJgNQyMwVM5w3xWScFHlND4sgnnXDY3hF4xhYnlJBt4irIQz9sZdd2gskLnjMkaHyJjDEQD2jnquqY2hvmswVaVmMtoxY3r14i+x3cbVI74oePtb77Nwyfn3H3zh7j18uvURigr0/TbFr0sCdF7eo+ykCeL5iQDB1UyebSrUKZHG03d1ui8gBx5dvyMs9WGo9NzIgk3axjHNe1sTnsuzmQuS/Hm2hZtLH7sWbQVh3sN+7sVs8MdhlUibSybYcCZGq003eCJPjB2A/v7u1ycrjhw4ogZfaLrezZdZmdmef/JKU+Pz1nULevzNUSNaiuWy56dpiY+3rAzB6s6Hj06ZdV56vkBysL9oyVDDFysO9548Q4v3r7N/Ucf8u6DR4xPl1TOMdu7SX9xxqOjnosuM7OR11/c54e//xU+fP8Rm7Hhj95/ynuPV6yGwLI752h5g0bDH350xsHOgu9541V2G83MQsia1WpFVbW8+ebnORkHvvIP/yEfvvOrpK9/zMuvfIF2iJy883U++8IByVX0OvPw2QfMuiV3btzh2dkZAcfNGzf4ra++z3fuL8lhg8dxctzx+TdfR42POH92jnaaP/czX2Rvfhszv8nZ13+Li0dLPvcv/HO89K7ll/7u29RhzfXKc7xes3uw4Hh9xEW/Zt868voeSY00s9cx7R1UcuAz2SWJZDGZa7cblHUEq0j5BrY5YPfNl8jBkGMlxlrGk00sU2WhDaM9OQ6YnNGqJbs5Wde4XU1UFc9d+xKLzRnNtRqzucabP/IlPmvf4vbdH+Bl9SLN7ossT1fcP4t8/OScP/vWF3jrM99HbStctUGlNRnH8uyY9z54n/ce3OPxyXM4Y9hc9Dx9eMKj+8ds+kgXB/q0JNuB1mVaJXv7pouc9oHQzPh3/sq/y1ufeQOLJaOICeY7LT/xkz9FyPDKnTs8/OqpoM5Dz2Z5wkHToLHg2VZWhbRFWUjlZtQKFTN1UMy8YcxiTpWMwhvFqJUMQK9wK8W4IlJsC7FKYVyDz44/88N/nsMvOf73v/13+NoffgOUYtHUNE4RNj3Lk2MuhsCYNZVupbFRSGFZEMGQICdNUJaYpTH5NFL4qYpNitqrDcRVYK6gNevVmrHr0VkGqn4M+BCISBOUomjoZ7MZpqqIrhGK5bTXoLY0syIA2robxiwDbO2MLLKJrSt0yplxGnaWwf2koTJKC6qZZP/POqB9EORNJ0yW/U6kIaJdy1lj6xqK0ZtRk15KagtVRHih6NRtYSyQ82VWWcnpUgh6FCekjcuBccxSC0Q/CsXSTKjY1ATJCZaYHxniT69XgwRGZ2F3+Dii0wg6bSMOJnrkpH/OsEUV9RXDkakLysg+p5TantsQpLn7NBoHbHPcpt/NpcmeLoxtYyKcwCsN1dScqrKPFt2ikrJPl+Yt5ytNlBKX5ktzl0vroavHVWqkLmyYlLLshxN6eXnZMrHBmAYnXEEJU/5jj10+k+fH5BwrwIq8bBnsXqLFRRaTyx8s1/lkhPL/C/fI7ZMsF4b3gTBuODoRulfb1NhQ0SUY0QSfINliLw8+OFiB8j26yihrcbmhTh2xC7TGUAEkivuhLKJW660VOjiMMsznC2zdoofIuVecDecQA1o7ojYk06CMLRlq8uE0uAyugjFJEkdQmtEIVVLkUorKuGJFLCHcNmnaLIPcoVjAZsqkDTFCCblciEWIqpUUuyGIWNe5itmswuhEF0Z88NimIeTEuFmjNWyyR4Oct5hIqZIgyGTBiCXuJPA8X16QPvqIxeE+9WImRShyg2mtJYzSWmpj0FYiAIbgGcaRs/Ml681mqw8zxsqkymiUMSW4OhVNzaU7Yg4Rg+SX7cxqbI703ZLKwFDchGIs5id9Jw6AeWRWW5wWnV20WR5HTc5NZYK2nbjIFHC6AYFiSCI/p4qphM6ZtqmorWNWVVTVLlon9uZzyWiLkYQYWpACYRzYbFaslgOafRQwDiOb1YZx6DHR0Y8jYx+2lrDRx0IL0WQ0sUxiyGWtNFrmSEphnRUePkgmmxUUBQyNcThX0a/F4GNqirQRbYHRutxTCh8iq/WGFDxKZfZ3ZjIJNWKgI5RkC5OeoNgia222COF2E53oNVuiTWYrRDclg6VQKWJpno0Wd8XgPamucEZthx8aXZp8Ba7lpBv5xnsf0u7VuNoSiIxjxKgKrc6YN2ccXF/yzY/vM99RxNMjDm3F3dde44XHF7z/6I9kkmYMOgXwgcbKoEMr2NmZsx5HxomCgTwv2wjNpQ+ebFtGEscXZwzeM6sr0AuZXqaMc5aYFdrqQvPhcriiitY1emLn8YMneKjrmilcU67HQgPSk2WzuG4O/QY/DrSzGVVVYZuGtqlRlUbNW6If2azOiX7k6OgJbvGAPmV29mv26jtsNr1MrnMidCNdHsVolUSZ716hBluhg2lLLIiC6GkDyhq0dewtWoaUOV9dEHKibSvGoaJKikqLW2yjDSfnZwzjiHaW5Adcthz5yLB6ij25zmgq6nqPYey2oaPnKwmhDtqzDIkQOvTZSGUs8509QHOxOuPazWsYAw8ePyKN52yWPX2XCLmFnFm0Hh1PONyb0TaWnCNDCBiz5GK9JrgWH2o2XaYbaha7t3l1tsfZ5h7fuveU2czw8FHH8njF/Y83aLfD2D9isdA8Pg18551jcLvodofdm5pwvKYPmvceeYboqYxmt+lYJctuHahVx4/96A9y+NwBv/9/fpmv3vs6H6c9vnmUMaZhvkmc3j/i3skFOcHjD+6jVh1usY9Ds3q0IXDIXB9ClTh+8g7nXSSdzRg3S5bBgAoMTjOzh9jlkj/8/W9xNA78xI/9GK+/8jzRP+DaCzdJ9T6Hi5rd4YKfeut5fuPtrzPokcov+dE3bnJtDuH0Hml4m2RHajvDpWuQDslRUyVwlUKnEW08WScUFSpptK6IOhKtJXmFQwyPbEwoLMFYVMxk4xgsWGpU58k202uLI/Po0Vdo57c56Wpe3muxWPZu3aW98Rq+2qXaP2Z0HfG5OdH8AMfvJU7W16jqBacfPiDpU85PHhBty6/95lf4za98mftP7pPwZO9JQyasEnnQJAI+B0YiWWUx6SioS1KZ3Fp+9i/9BW7c3EElL7brVopz33V851vf5MMnR8x++ie4dfMWF+cn5ORpayeATihFZGlgBF1T21IuqaLnVmLWYLJGZ6G8RzKeTMiiHSr1ImIHLjmwGQNWo5uGg5s3+czM8vwLL/ODn3+T527c4Vd++Vf51je/zeOPHjB0Fwzjhq/94z/kxgsv8aM//pN8+Z2POF1t60SmhT1nME1DM6+odxZkq68UlNMx/SNBnkKQJzRDtFFKKSpXgS7ulMWVubIWbStUo+mTNG0ZVZgwjtrV2KZmUBblLGFaE8veTZ56WKFHSlOVGYJn9BJVE5MMCSeUa5spa02hGAp6Y7VFp4xKgZwU2U5D+Cs6eK1xzjJ4CoVR4eqKWjdlQFeyzCbWSaknojGytieKmUYuzpIlsywK82hMEoGQdC567yjUWBRDFvqe0wptZ9LUcIkaqUnaVZBYVXR+usQhSA56iVMoToXiTlgGqjpvDTCufkyAgDSQf7w5McUUD3VJ8UuFFqu1eDfEsq/lUosYo9FGF3fuq0ZmU72Tt0N2pSY08bIzmJg9YlrzSZ3YJbMnb+tmFa9SHS+vzatoWyaV++qyrtVbBG+iB8O20ZxuQ6W2LJXp70/6tlwaMGmdDZMedHpNYj8j55acSVGRdS7XKuU1TA30d2s9L49/Jpo2hUIbgUCT0YwoMhJibVxNtWgwlTju1NmQoy6heHLCQkz4mOTmUIkUBwiBkMQWfYOXZiUEbCmAMwX1GZK4ASIGDG6TUK7HR7joBlYJ9BgZsMTiqjh3GnLERGHFqpxISmPRGJUJSlwGUxKzCJTFmz06o1BpRIURnQKVSWg9oq2HDGMqVqFAzgHlBfKPOtERqbUhqUjQAW8sKWU2EumO1VmmQc6glWHmHNQ1lVL0w0gEqqom6oStW7DSEOakSD7REznPngcP3uf022/jmobDnX1aV0v+UxoZsxiA7LlizV/V6KpCacv5csXR0TMuLpYyOWsaiJPtvIh/VRadkwSHa4YUWK97+nEgR1g0M7wfaIxiVgsVVmxlE8pa2qamymIbPPYJ62qcdSTtt3S/HDMGiFoVdM/ITZPFot77S6reZDurlNA00jQJSTKNm88a6lkFOqErx8VmQ9hs2G8dt3bnQjEaWo6XFT5fEJIm6AGNZle3NE1HP47EBOPgaao5L7/0Coe3bpAquy3w9WbEYnDKiYlLVowxCCKZMi5BFYFhJCcJ2JRAckNUmkErktIQMtkWF8gxkZImqQqVDbaylzSOytDM51R1xRgDOYJNDl1ltFszJhizQbkabR2p8xKJkbPQCo0TJ9ZCLRDb/hKaPiGb1hTBtOgCU/KItjCQUpDBB9PvKXyGMSpSsrz34VMePjkl5kjXr6DEbYgIWNwyXTNjsbvDbgXP7864ff0aX/iBH+L6/g1i8HgV0JVBj5nWauaNoWk0AwZLzYx9utMLQkqkpOgu1qTTgXh2wUCGxR5Pjs54+PFjjKkZ2xm+T1RWEbqRECKpbRnGkbjZkMcRlRVV3TL4QDNvyXiMSrIBV3Nx71RstXem0AhDCMwXc7pxwDnN6uKUi/NzbLJcX+yyM19QIQ5izWyGdvusx5FbB/slE2hJOHnIclPxlUcPWJ4vsdrx4ePHzHdnVGPLqst0g6cxknlpjKKqWkI0xKSo2pbIWhDrJGHtAQfVDnXlUCZTLWaAokmRlbZkk2l2DClk+iFzsl7TjZ7ZfMa8dXgyPYr9xQKjNa2GRVNxkXpWqyXrTcfp2YqcNId7B6zPOnIYOV2fsrOYs5jvs7u3Sz92dF1gHRLPTiPrzUDMhtPznikXT6/XzGvLWVxRO0PTVvR+xIwKH+HZ8WPms32WMXF672O+9fiM27dvsRkEdfAx8MH9U7rVhp6GPiiSO2A5u8Nv3Tuh7xxVVHz08RNu7Oywv1fTxYF3TnqCqvC+Z/f6LTbrDa2yrFdrHjx4xqJ2+OA4Wy5xhwdcv3WN4BUuVvQnFyyfHeNjj608lYbndhzh8RO605773Ui72KcPA8EHGp1YtDC4OebMoXTNs48e8fyO4jj0hEfnfOvxQz7++Ih/8Wd/nO9941Xq1HJ2Yvmd3/htVHjGzVt36L+zSxx6XF5zc7flyUdHLB9/FbV8h3Z/zmd+6oeLaYJBQng9UKF0IpuenHdIwWNk1AZZitfsZP1U5WsKhS1WcokyUEsRajGnqDMw9lS0PO5rfvfbJ9y6cYu6tsyev8Z5WqONwt68xZmd8ehccXQc+M63j3EXI/fee5evfP03OH70Pn61BG05Pt8QcqaZ3ZR7rIbsRkwdSYPHL49ZXyy3KJJxjqg0OMMQB5yxvPrq6+w1NZzdh8UeQ6owqmJcPmZ9/BGmXXD7c6/T/f47PL3/AbduvUYzqwUJM7LfU8JXZO8R1CaSUbnoe7RnnT0bHfBJAu9VUtgILsl+Jet1mu5EbGhQuia7lvaF6/zcv/kXMTsL2l1LZRVvXX+Ol9/6IT7+8CN+99d/i1/5lV/ig/vvo1o4ePEL/Ev/+r/Hx//z/8aDr98TuYXK+OyJ9Fg7oA8tL//wG/zI669yePfFLYLBloY2FWyC+MUUJWYnZkIWRodtamZWMmW1lsalrmucEdp2yqD8hj5EYMo6FUdqlTJJJVSMot/T5VpSk4s2aKtAJ6zTqMpQoXFWymGjNSmqoheXyKFMFg0aQYyZrEVbYTHpyTBSif4+hIipa0wWZ20xpin1RBbGgylxRhqJgUlXmo7J9t3HiKsbslJ04yiUuIyYdpUBqNHymLnoyUXPKIZlQSnQTixLktDmg0f2VSMIVEpCK1Wq7OulkdbKkI0hGmF/1MpKc1oohShFLIYmk3MzTE2HyCQkl0200MZoaiOMjZhSGZizReqstdtMxMl9+5JaqCQXVeWtLwP5kvaY1ERZvGLWQSzNUCoskVzOhxHHTiDryRxIJt15QivFtICEDO1Tke/IAEE0+8rIOZwuZ2ut5PRd6UW2R5b/pHTZ8OkJ1SvIryo/l6eTmCQmJOUziKBMRQqa1i5QJjAyoKLBmopkIerApDpESb7vp/rlP3b8qU2bUup/AP5l4GnO+a3ytf8c+KvAUfmx/yTn/Avle/8x8FeQe/g/yDn/gz/tbwDEEIjYcmFa+fUs1tzWWlyjsDmTgjQxMekCvSqqzGUOlMplup9kM3f1Nu8rxIgfvUDLRqZhSSlCjgyhdLljR6Rn9JF+DBglDnwbP5DHjJkl4miorUY3YlMv6IhcHw4xvlDZC29ZGbJyxKxYeTBK6FEmQaMjrqkhWrrcUStFlxGDA61lYSjW6ZtxkCDp4HHGoJPQ+swwMHox8HAarCuhhiWXLRZr/gmiVtaStMFWdTlPYF0NpgNnGCMsx56w6UkjvHDjFtcODvEEPnj0gMcfPyStOtHLRIiqoGhoUhS3oGuHBxAFcVROLFUneD0FcZSqK4exGqMSdtHilKXVTrL0xp4YE1VdYZ0mdx1uVrMZB9ZdxzBIk+u9p3I1GYMvC6PkrwWwTiZCQPBerI61cMwn56OJ3nDJGddMAPu06FTO4UvGz3Kzobu4YK89YOacUBUNVMbg0PjsGeNAygmXJbjZOIttFVbVNLM97ty5zXxvh2W/ImzdiQo3voi0yZnoPZuhpwOCdUQtTZlOGuVE/+YHoVLinKCaWeyNfQwyYENvnQUnF1ApHRJVU7FYLIhZYiCwPYsh4uoLYhZ6TuaSr65zlMfRmawEyVCF1mCUEUveYJBQZ2mGUwyoOOW4yEdKwilPSu5BrY1oIENg9KJsJYEPifV6Td93TC6+W8ZOBl33bFY9GyJ5p+Xo/Y/4+ONjjvtA6Hu0knPaWsvevKGuDDkHUo50Y8/FZkUk47QqG7zhYGeP3XnD0o8w32XRZ+azDe18T3SjxtK0FcMY6GLHph8Y+x7VbVg4y2w2o6rnBKXYOzwg5xHywMXFCnRDUzdbExylxFY6p4wJMi2c1RVVbansIZXW7C4WXN/fI3qPSoLw+RCwxqJdzXy+Q1tC3rOO+L6j91IMjTHw6Mlj9DPNYnePZGc8OT7i+o1Dqqom9GtMdiWEPqNSwmrYjCOBBCkRKAYFGYxVNG2LsxZb7LGV05hK0/ue8/WSzgeUrRh9QBEx2rJY7JC0ZRhHrM08e7bhol+y7gc2XSIah1GO84sOTYIwcPPOc1TOiC6zG+nHEbXaiLaUin4c5Hmq8rhZhmUERbaaaA3eQz9AGke0sWRbs4kjOmT6ZDl9esrDZ+fszedoo5nv7lA7x8VqYH79OuvlhvOjDV//w3s0M81qs+Ta/gGmG7l954BhPCYqj1UDXbdiMWvwYcMLL77JuF7iMnzj3Sf0qzXONKx84lq94MWXXuH9Dz7myZMT+qBYdx1V4wijgkoXvWmm84oxrdg1jtdee4Pl+oLj0yN2Fi1xtcFEKTBczCif2SQIQ2KIge98+z10VfH8i6/xxls/gh8Gnn3uCf/gK7/GqDJDdLicqf0F4/E79Mry0uEhuXoDd+s60Siy2mC1aIO3U3HliMxAVZycfMDh3hytF1LUpojRtQyP0qUWRpHISlAtUTxLwZjR6JiIqw36YsPzL8y52Z7RuEhyAW8NihnLNdy7v+FUa24815K6c5q8Yb8+xI+Bjl0ePYPazzBa8+Jzd7l7+zafef1Fku9ZHj/Gd0vGzQXr9ZJ3Pkj03ZoYA87IcGIIHmMgKblXPv/Zt1hfXPD+219GtTOWtsKaipN7DzhbPaS9/TmSyhgTcWZkd6eSpbUYGsTsxVwJKSkLdiGDYpJkjSnINpMqICpUUjjjqK3dshJy2YgUCZ1kWJ2TwStNtb/g2mJPhq9K8ie11excv8abB9d48dpfxykAACAASURBVNXPcP3OXf7uL/w9Pnr2hGpxiHYLnJ2VvaEgB0ZjrMYaQ6gyL771eX72p3+O565dJymH2VI0r6IPhf5dnAsF3DLYqqJJLSHK/gBpiybkYl41hoCPkZgCSjtiFuMVQaQUyRhBFLO4eiclmb3iwlwMQlQp1PNlfq5Y7cveqZQwCMSYMQk90kDKBuWkm5ycjEHhycSQpPmIiZBzcdwN+ODRRhqV0Y9SY6RI9EHWRS3MkqlxCzEyRE+DQllL5z0xZZx2xV5elz1+MjopFUdB04zS5JQkJzhELJLlFpXaMoKmgOoMJUx+cl9W5KRKvIvEGlAMWmAC5pSwtlQx0dwimoKiaW0EmSzfN1rAiJQSY/CkqISSWtwtVSjGZ1N8kkLen4w0l1nea5jol2aLnGqly89OWW1xi2JtJS66oFJKhr+XKBfb1zQhZhMSu/0ZLcwpoTqyfdyQAlPs09bUpTSOTI3gdIK37RzbezInabR90X1OfUhpVcvfTxgK0lh8MLTOaB2LkU0u71OhgnKZ+6a2Lpnf/fi/g7T9j8DPA//Tp77+X+ec/8bVLyilPg/8G8D3ALeBf6SUeiPLHfxPPDLgYybmSAknx2hxqKlMRe1qKifW6V5ltE6okmkhBiAJM4VhIA57GkVVqI+6LIIxgZmJJXZIkkBunVDCfJCciVToYb0ZcYxSkKIJY0D5keWFZ3QV+wc7RNfQl4DCrbuPAp0CJoPVjoiYQOSiW4pKgrSVrQkqSX5VzEQlmV9YQ4wQUxZYuThGpnWHHz05eHRI6KahcaJ1aq3BuZJIT5Ab1VgS0PUbQpAiekg9rqlZr5b48wuGJO6XMSG0Rh8xWWGzou87jrsjxuWak51n1LOGYRxQyuHTSEhSXKAtWtliqhFJQRODJoUCHccSiqmUmG6EiE+ZSilShD56RhVobUVCUxuDQRDSum6ZNTOxl7eWRojN7CwMVdUUhzxFRtOFjK0GVKA0HbJJRD+iEVOIXKgcTDdKvsxs04VKeBlXKRwHM6FTWrEza2iRBXy1XotFfxaHT601OuvtLS40QS3XYtJ4F6msTIuc0ZKNEoWqY0q0gFFCz3OVRTmDTYFZU9NUDmcUPnmGLuL7ROcjmzFxsQkcXr/FYmdHwiJzwMfAGAP9KMYVrWvRU5ioovD/AzkFCpdPkOMSNBSvUB6k2Y/TPK7MhDJZyTUlVBKYKA/ioljOY0EFnbHMmlYMgsbAKq5RVcS0MjCprcFqhx56Ugq07QxrLd73hGALjz6W7ERbhhKWurLc2j/g9eevk/qO9TiwPl9SKV1MAaShruzkSCYavDAObJZLTNWURRuaumJvf4+M0HmoZ7zwyg2a63fYP7iJQTOs12zW56z6Hu0TfrkijwMHizkvP/88TdNwserxKPZ2F4Q4kJK4bHkvE1+VM9aKcNwaTSTR1LUUkZUTO+kxbEPVq6oCawnDiE9CvVIoZk7ub2UqyUJyDTErTO95+uyYo2fPcM7ijObZ0VN81hhrGBvQbctm6LBuQdPu4HJGjZGZzXQ6EoeiQ0QiQOraMepEVYsl9OSclpFhjatqEispsKZrRBlIMohYXazoh0jWls5v8Nmz6ga0mxFDxnc9javYnbf4JAV+Pau4WJ2Tnd3GZcQhcnqypB+8NDclzF4lQZ7HcWAcLTs7C0yQHMQQEomhFBADyYsjqCJKgTZGamdJ+Qyt4PhiyU67w/Vr+2yO7pPX51hd8dm7t8kh8+Znv4dH730Ioye3C6o9RRqOaHcaospsfOD9Dz5ic76kNhVpiEV2YYizNY+OT3h0dIpPmqQs871dMSNQAW0y7777MT4b1N5LZFVzkTRPLiIpWZrFNYbR0627os8GYkXT7pHGgTDCvJmRfOLet97lv/v5/4a/+pf/LV56/gbP3drhZ//Sv8LHjzfYe79H0tBUmu7sMc+/8gUO9m6Qrr+MfeUlfKMIZDQjKtcoZRExvQZjSTmzs9tidFcKpqJby6CQDJqU1bYohithtRlKICvKKIw1zMeeuT/lJ+9YmuFDUnuTgQXdYNj0mXENv/Pl3+UHf+hNvviFO/yrn/05btQOXWm+9w+e4+ff/TZ0I3Vd8dbLd6mt4eLdezz++D7PHt0n9isuTo9Yd2uO0ay9xxrD2Pe0bYOrHMbJkO+lF19GZcMv/+pv8ge//fc5X25Ye2k6ru3vY5pdwtjx0b2HPHjykLuvv8znPvM5ZmZXxtTaYg2EkEpROG0nksClKAUsAaUCKgdpcFRxKlTSuMesSpN3pU5Kl7TmqcjPheallejaZeanaPdn/PRf+Ble+dyr3Hv/PZKyXN+Z0xiFdUJfzbr8hWQgWVS0tO6Q6wcvopRBl4bgux5X6HUoGcSFMGlly96wzf7MJQvWYnNGBSnedRmm53DJfrmMIJBC3BiDNrbUcBKpo5UuhhAZ44wYPiWLyaYYLkmjEhRbel3KJUfXBzG9iwG8J/sErqGpI421KGvRMeNzou8H+n5N3YgLZt/3oufa0nIgFw3XlpKXBXUSlMpiTXneZTgsDVpCTSYcWmiHysptYa2VdStefn9LJ9y+FaUpmmCeT3ywRX+27XYJ8BZa4+Xj6SvND4Xu55zDNXXRRZdIoSxRSD5LpMJlhuvUrFyiUClf0addvXbLf4SmKO9t4vKx5Clc/sanv/7pj6vHVl5VmtUrfdb256caLwSJv7naaE/RQxI9UZ7HBH6VVyGgQ9G1Tee56OTKT2wHNjkHuW9N2iKSSYvXhWgkIykP5OwgX5rbyDn6/wBpyzn/ulLq5T/t58rxF4H/Jec8AO8rpe4Bfxb4nT/xtwq6FhMSLl3eVI1QJg0alVShWBf+LQW5KZMapTVWT6JHsBEqFamcuN1su39FgS8vxZcksaSf+MxjDmQlC0cYRymgnBF0KctkbjMkXJQFI+mEyhGXM0ZFTPaQMy5lVBY422QlCAXSkMWUhQYaNWOAISSikoytmBNjzGgreiRQJJ/xKaKSNF85BIbCs6+tIfmSc2bFtalylYTxZoGW5YIxGG0ZgwRQ27aWyIJ1Jw54WjOraw4WO1TaicnJ6Hn69ClYQ7bSqGXdyiKjip43yXPMOQqsXww/JEOEYkOrt9u3M4a6qmjahoVBKJsoQt9hlaLShqELXCwvcM5inGWzXBKVmJ3EmKhrX246yd0Lym43LJnalDweJdMtVSZ1oUDhn+Bxb7ncatLpFuv8SIoeYyGHgcPdGXbREodOAieNxWhLraBuPZvVIG6faZpAJoIX6mYePKaKVFrTVo5sEsFDn1OZeAllQiVBf9q6pdGZuTa0lWXWVMSsCSoyKqiVQW8863HJCy/e5eZztySnpQRxymIhSCIplrDygK00ObaMvRhaYKyI0GOSgFFEWxViou9H/OjL9PLqxpSwFincE2hXFholKFJOSahJWkvUhXbM25Z11+NDB0oLik2Hs5rZzpx23jLmwKbrMDYzm9VofcB+yFhX4WxFXbfUdSM6zt2GG7d2eeXGAbfmNWdPn/Dg48ecdH3R6geslcBxq8BVEuyOM8yWNc7KIm6VWOBrK+hlMjVnqwuaHcP5auCs87T7mr3ZLtrW9DGimhlqHDEZTE7szVquHexyfnbO+dmJoEe1IaQBZ6ayNZHjKBSTkgWIkmt11kqA7cX5OVlBVTdc27vGrKnYbDpqIyihyQqn5W/O6poQMj4pjBJtozMVynjc6SkqReZ1RV07uq7DxMRi0aJ8ByYTVWBYbmDHY50jJoW2kVrJOpRSRJkiNo+apC99wERjIRSdMSaSEjqvFFIR44Qq220GNrXlxrU9QoocX6zAKjyaIWack6lw1BpT1yz7DUTP46ePmM3vsnttn6Dh5OEZcQxCW8rCNkjlPk9oxpCKuYoEt0Ymja4MZEJIYtJUCt6u96iccFbjY6KpHJtxxA8DzjkuTk8Z+w1Jj2hneOP2DVaPnzCf70EM+LYC43BJYQbF4XVHVdecnJzwe1/+ujA/ViPawF67YKedsbO/x/GYeHZywRii5GpVmtmiZYyBcUhsNj3joNi9cYf6xme5WA7cunYDwkjsTrl563lOn30M+QSjRLNSVY7dvQMaW3Fxdk7dVAxp4Nn5xyzPVvyvf+tv8cUffosf/9IXefX1t/ijd44wv/g1LtaKlFr+0e8fYRfH/NDOixy8+BrYQ1KeoVUlqJnxWMT6MapMZkBhaapdtNpwcfQRD779Lso2vPH9P4htd0iFtjXRpGAqfgpUriJBRcgaExNP33+PV2LH0+98A/UDL9J+35/jrFui8j6zZpfv/ewhF91tnttNvLRvePb2d5i3ilu3r3No3ufGzhPWqePlV17j5s3It995h9VmpPMjYbHDYCtOVyNRz2lsxOXAbDZDpcxsMWe+WNDO5hydHHNxvuRv/Bf/FU+PHnPy9BGxy+jgMLOKn/jXfoYvfvHH6J9ldswOHx1d8Gd/5Af5/Ot/BhWqrQZtYqJNJbRoXoUyivZAQOHRaSSlUbSvKRJ8YAymGJp9ovbc3nfbc6kvC0uD3hagRmuo5FtuVvHZ732T1z/3Gn6IqGwxwZfmUX5ZK8loUzisbZg1+9RmJi9CfZfqUSFFlGKrX57MNbYIBdJ8aH1p3R5CwPvAMHrRHhUHPT5RnEuTPyE/qgxSxdH4am5XQZoyxJAYhhFLRkdN9IIPGK0ZhoFhGKTzNWLKlcjkYuTmFFskMWWpDYwVhoBWU8RQLq/VbPc/WyQelPpxoublGGX/jYI+qamGLXovtNQYgqQJ/ZBp4Ftep+Ly39N7PqFRiktmzmU7MTVCwpqa9n2lVdGxCYp0eZovzT+mr11tsKb3KkaYPAB0kjo7hnhJEywDXRMSEb9t1tjWVZNWqzR25TrY0jT5ZKzAVIdtr/Erjdn0+Set8S+zz7bNIOU8ZH3lNeaSYRe3zdv0/22guHPlHE2D6rLH5TJ8hO31ppK4Q+csethUzuOlAY5HqVD2nyTvO4ZohL6asyKbDDGilCtgx2QMU1Dg/7dN259w/PtKqX8b+D3gP8o5nwJ3gN+98jMPy9f+9EObSx43kiyfY0KjsRihdqfAZC6RlNoWmsM4bnM4tp7oSksxXFxaJvFqYhL+6y2kqmwRclJ4sFGg/ZQCOYkroLWWqnb4rBmzJtuKAUNUtbjw5YBWHoO44ZmUMDngtPCufYYxZUJWjCkREsSCPoWs8YniJAkog3EV1tUo46hsBTkWem4QcwUkH8qnhEUq/pCz5FiETAgD3fmKMUQRU5ebzVgregWjxT1PK7p+YCyJ9c4YdpqW2tZ0m4FgAzFExhAYvTg5xmxISpGKs4PKQk/V08YMRRCcBe6PElkgDYXG+8hqucL7EVVbos5UWmGKLW1IgCoIZYg0rqKqa0IJTpcpSRIKhJZGMik5fykjDkpapnhipCJc80k0WvbW7aFktSOljE7pcoFJEY1C5xIxYBS1EZ0YZIZxYEyePgpSm0tRYrQYOERjcUYx+kSv/BbhglRoHdPEZpp6CS1N5YQ1YAtq7JylcmYbvCnolqVtDFqvCSGwGXpUThhzOeHWRuO0QSehCjtlaKxQezfLMzYrhTYVIcPFuuPpecdq3RX0WvLmxJpWEDm9XUgjIpsXyqkxloQWO/ks95xotXVx2jLUrsEaB7mfjL7wIbLpena8pzENTVujtGJnZ4fXXnuNvb19nJP4BAnpdPIeGU2zO+P6rUNe2F/gT44I45r5eo6ymlQmmxOfHcUW9Qx+xI8jOmesNmUSKpEg4uJY4Rb7JNfw7gcfcN4HNt5w+2bGqoydLbj5wkv0HwQsx1grTXVbGVaU91crvB/pxo66kgYop0zO4qY2DD3aGG7cvIVSmv29fZqm5sMP72OrlsPr19mdzfDDhu7inJUfuHa4j7WGxtkSdqoExVearESXJpNLKSoqZ6mspa0qWucYk2z+zoj5EiSstQz9hrOzHm0y7dwxDh7fK7p+Q7t3jZxHmS5XdpvhJ/dLme2qS6qOcY5inivDNTSri5Vcf00rhiXzliF6Frv7rDYjy6FnMZ+zM29ZXnRYJxTvg+t7mHnLs/MLBu/JCcZhoKlrKltRz2Ysu56PT8/xxeU35ynIfSjupmLUZLRF2NFyb3o/ElMgZccYvKgoxoAiM6rAoml4cvSIL33/57jZwIvX9ugWcxb719DNnBO35t5H91nsXWO1Ah1npGwZekMwicMXbnHRnvD4+DEXcc2N9pDQGh5/dEEcPE3lWPU91AZjE/gRraCyFXFIhD6xV8+5ffszXN/f44/+4B+jtGMcpBjY2dmlbWd4L1k/Y+zZmc842D9gd2+fQGDvYJfdPQvpjA/ufcC9a9e5e/MtnnznEf2zZ1h26OIh7z6GX/z13+Puq3epNh+R8wludotRXcfV+1glcTzKyBWXkeBynQLLZxf88t/5FX7lF/42bt7yH/6n/xl333yLQHGcQ2/3BNnW9XbF1VnocHk+Y+/uXZ4+/CbD5jEP7q25+/pbzOMuh01EV2uebxPu+zLHZw/49V/9bb76679DrQ2urvnaN9/m0dOOxXyf/mzg3W98m9XqnPX6nKay7BzUVCmyXxuca5m5Pa4d3KCqat579z2eHR9zcnZW3HU72X9iopnXBJ2ZNS2tr9G14fZnX+SLP/mjjB9tuDG/ya99w1LNxcxquFixXq3Zu3kNjAFbUBWKdmjacFKC7FEEjBa9j9J5u14pYyU37VN71HRsaZNTA1y+tm1mlOwPMSOPbcRkxTUV/XKktRK+vXWxK052xhh6PzL6UdBrJUX7NAS/fAKf2jfLfjQ1H9YKyqrKGjMd05plrSFrMarSugzTjcEqkXWkK8V4LoW9BBtfRgvkNKFFmnHsWYdINbmPh4jVBuWcsJRiKCCTRZf4ppxEO1ZVFU4ZsqnQpYjPPmCsDD2ds8Qkeq4pAiZv60dByLZZXSFsB8QyrDSCoKnp9RRmmNYoZTDZMjFYtkYcZCjnQVcOW0l5frX5mRqmq/S9raYuXTYqEwV2igWYPpfmIxFj0XiVNR0lSO44jiQ/kvPUoMkWOtUU8nha6KFIE7O9Bsr3J+TpE8+NKw1cnuiQn6zEtk3Yd/l86wZenssUdn319UH+RNzCp+8bpQRJvNoE5iymYpTmbtLYfeJQl03Z9tBFsVqC0SlcwskfQeNIOgNOzpcGkiYlW0xgxHwuTbrNUqiobK7UWd/9+Kdt2v5b4K+XV/fXgf8S+Mv/Tx5AKfXXgL8G4Cq3DdLbamGSNE62dOAqJ6FSq7R9AyYzCblZTOmiZZGptBXhv9bEHIWbjMY4Q4hRJt0arLbEmAhlipByxocRH0tykpFiyEePNZmMpaob6naGdg1jseZ10gcRckBlMSIxWazks0GQsxQYMwUVNMSc6YKn9wM+JaFPFhMMVbLOZEEtG6CS79msS5GfSyOqqKoaoxWVM2LokiOjMhiTRGhZbpQQE0YJxcUPA0lLYOQYonwPcYKr6opKO7peNCUYWywcM6OBHGSqkKKESBpDCfINKJWoK01lC2QcE2jKFi7aOtkcI7WqMU6TwkAu0/ouBHKU1L4UIqu+F561lRLAB6FFRhQ+JsZYtCxwaVWcBT3K5RzJvGmCt7fXIDBtBLk0G0LF0FoMTMQ2fcSWIthmULomkiRkPAjap4zeonSUSU7WQnm0kYJKyZDBjyNJFxMSPTlEyvsr2Tcll0RBip4cA4oKrfIk/acbRnKSpiPEWDR5iDNUFI1JDBGQjL3WOOa1ZlYpFvM5zmjhmiuNzlKQhTCWxlZQjGEYCLEszmSMFnQlxUhWAQikpLBU5ZRPi3QQBLYgutl3+Cwh6ZJRYkoDIAjdMPQEX5ELFTgEaRhTDozeY1xExZHj8yXBe+qmwvod1Cyz4yLD6oyT5TnLfo3PSaSwld0i69qKE5gPom3su832fZ9EyT4mcXo1jmu3nmOtNPXOLpUOnC431PWS52/e4M7d5zlfnnD/wQdUzlFlmNWOpjI4p6gqwwBy/40DIWRqIzoVCWD3tG3LwcEBbTtHW8PhtUOsdbTtM6H9+SAT4pSp24Y+Bbz3GKPZbDay4CP5iDGWiIhtAZa3BQ45SU6gEt2hMppmVrPuNigEtfYhyvWTEmE9cnF6wbCJLPYqbs92aOoK56Av00pr7JZWk1Jm9LEglY4YhcZaubowGWS4sJgviEkGM5bEfGdOPZsTx2MGDXnckCvFvMronLlx8wBXGT548CEPj54RooYIJikclrqqqCqHHYXm5oNQsmISob9odFzJqctop9DKAZG+L4YECE3VWUtSlhjl/Hb9SDRrMpEKeG5vnzu3bvBEw+7+HienHU8ePOSjBx+xevCQHOc4PUfvzqiVwzkYVivWp8e8/NxNDnZbNJGKkdXxEaEP2MrR2EzlFJWFancOSXP05Jjnr99kPcCz+++y6xx2XlGrSPQD58dnWBeFwuRquqHHx5GkBjbDObWZYYzC6pZNt6HvEk5XjL3ja1/9Fv2rFU5ndhvL8dMBO99jHRIPjh7xi7/0u2z+j7/P9dsHfM8Xvsj8zvdy9zPfx7ypII+gPcvNCqUci7ZGkfjON+/xO7/5+8yrhnW34cGHH/D8q5/BVCJB0OqySfvEkaVESQpiVXH4o1+C5V1upjNoFsT2Oeq4YHO25N0H3+DR8SN++bd+k2/f+4AP331EGCKb9QptM0PsSK5l0z+UYZcGUkBnj86RxaxGl4gMrTTkiujlHhm9F/MEI/R+pYSK56yjH0bMrqNfR8JySfaJ49Uz1uGU08cPsIee2Y5hvlMz+jPefvtrPHj4kB/55/88N56/w5izNApKo/JlAzd2SyonTY3sP1LkKZ1xzqFHYfhIkX311F2hZV05j5SCnlI/pYlNpCCphEYGeWQtGpus0DjIZZCUMikGQuipqpr5zKDVKANEpZDA7U8dWzSEgqLIv421VErqp0nvpo3ZNkpSYykixbBKwlPIMTGOA5vNmmgqGt1AKzFFkSt0uzLQn8KQVRkYxTK0NcqgJkc+rbf7eCpW/xMiMjVGqjyvbOTxrTXScJdhr9Ki9dPmEoXR+kpwd9Hqe++LLi9u95VtE1XQoalZ0kpoobPaygCxoKw5FWmK1VR1g2pbclMzGrW1/afUfDC5J05oWdq+HylfaeIz2+cj1436VM2TxOhwQq6Y6t/iHlyGnQUynbYUQdykBRWXzHzZyBKnxlBO+IR0SRZsiQ4oVtlqghqvnLNt5ABsewJtLt0/L10uVanZ5B5KWz2j2jZtU4M4IWvThwwQxL1czltxoCyDgul5Sw+9hcy3WXnbyIEyttBZk8qW6zQ466iUo1OBRrdYapYmQrTY3ODJaIzIi5Qu2n5BYblSn/6Tjn+qpi3n/GT6XCn13wN/r/zzI+DulR99oXztuz3G3wT+JsBs3martEx4SsS2UiIGHYaOvjcom8X6O4rRgTjQyBvsrHD1ROMkN1dUlj4mdJKTHdGEpGCQTZvSAIUoFL6UReQaUsRHTyAJJUhNEL0MCoVGlwjBo1IsHPSEskLjRJZGmS9Gj8lZqDhlmqHIco9lXZCkIDq1MsHWShYKVaY7kxV/+WXR6rmaphHxs/cjMXhCFg61NlaK/2yo24rsPSbIBEGXC1Bbg3aarBUhRbp+JHjhGsukyJKVQelIHwLRA8ahsyHnQVAWlUhZ+OGSTyFFPMqQCeQciFHRpUDIEesMlRF9T86JmbN4DzoEoS5GQRAn3rcvBWJVOzbdhnGM/F/MvXmsZml+3/V5tnPOu9y19uqq3qZ7Nk/bs3hsy5uGeEnsgAwxUWQpQAIERgLJkhUkBFb4AwWB5ABCyT9BBImADUiETYnsBGObeInt9nTP1jM9vS9Vdavq7u9ylmfjj99z3nt7PGOH5Q+/UqvqVt9733POe87z/H6/7+aQz3KI4hQ1eEEAExbrNMZZMp7oPUY7oSWkvEECxkVAIfEB42KWCvdeb+iSBmsdSmv84FFGitFgZHqWs2hpstJEIoP39N6XAUEWSmHfE/qB2gm6m8tmMSK/UjQKxapy4pgkw4kLOgFGlyajFfqHQya4KmOVZUh5g9vrQrUbNQWb+6gg0mhNXVU4K8tOXTmysnLcIdHUlWRzdUnoWzkV+1kpcIKPpSkXoTJZuPxZ5UsL2ajVsuQYGIZBnEKzoh1Eq5XG81NGaEFREXzP0LZ03uOD8O7bdmA666kbR4gdoV3harh2YxetFafdwPHpCVM8kxjoy+AjKzB1Re57EZRTrLw3m8UYuqrK+WlMVaOcI6AYYiJ0PWo659nnPkIXZJm0WfL7QoYhRkxVS1A4hrqucNYWuhA4bTZNjQJsJYJ+Zy1VyWLzIdCenoKSWYjSmm7wQv8Nkeb6NebThqaak3e26NZLumGga1uaZoIxjqAi9eDJWVxRjRmngWXYUIqGpBXLdYvPkdrLcVd1I7WAFSS2H1bkJJlxWmsOHt7nrO+ZT/epr+7i9sQBMqXI4aPH+KpGNRNxjiPJhBMkg09ryRTMAVdVbG3t0lSWo5NHxBToF2ccPnrIfOcqaj7h9OiE1emana2aj37kGZ575i6rrqOqK5yrOT07R2HZbuZYIxEaq67F54yzFT7K9Fdhsc5hrSMjejzRWUSUyoQgzy3KEHygY0AbjSVjnZM9AcdqtWJ/2lBvzVn4gdPliqaa8uu/9uu8c/+Q5Bq2d6e0R8dYY6hry3pxzu7uFneeuMXe1ox7JG7t7bM3nzBzjvOzBXeuXGEdMufdmv68ZVivmd68RcqGo8MTQsmtvHltly4Z2sO3edid8pEnr7FYOV59/T4hDSzblvXQkyhlrwugxaF0vRYH0MViiV5ntmaWlTGYDF9+4w0WQ895GjDNDkonjk8eUu/NeOkrD1m1C26eW1K94la4z9VbN7Fpzpe+9BWuXNvn62+8RjPb4k/94PcDlulsG1drhhZ29q+xtbOHcXXB5RUbSqTs+Ju9X2XD2Kz0WjHU8lfGGgAAIABJREFUMxp3g2qYcHYSuff6AY8PDvmNX/s/ePGVV3h8tuTho2OsMlitGHxHZRWTWrOXG1aDp1bCpjBJk2JEYyEZhhOhS5Hn+ByJDITg0cZQN7UcbyyMCiWOvt3aE3Wm8z1VVzGPFdoaDt65zxdffJF4b0F3Fjg7PmW9HDg9X9LnFjVJvP/wbbwNTOe7zCc7MlzMCt953n37Hd5+7WU+9R3PM926W2hoxaI8j/p8YZN88HVROI6oyNiwSQ3GptlQqMJeLKjQeO1TLgNTB9mV8O9UCvmMcxrXGKomo+iBgFLyvX/oVd5/3CtHlI+s6IeB8/MzaZ5KMzGibMPg8T4W98bSKBuLQZwHyYmQAzmrMtSTk0sCdUk4dxobFb35b2zcUJIju1mMlBxTTFkK3SLRUFpcrMmSc5ZCQJd9c3NpVUHXLjUfKIVxVqz/nTSzKSVy8HK1FWhrUUGeY6nDKHWd2tjgW2eZNRXDoMl9JvuM6K2LPq9yJGuISskxplRcNkMZSAdhGV16rsYGK2u1aVpH9CfGguLpb9ZLFUxs0+Rcuscu/fuI0o1Nny2NVc4l+64gUarcE4zAir78uy+ZvF0arI9fX0bWxp8JGwbFxfUf/74xb7mESl0YhVw8P2OTZ63FObe5H0c6plzBC7fLy1RS+f/yhbX2IoppbEovob+pXBttHToralWRraZWiIHidIt6cIRUsewGKiWAEJeOX4/X+o9p2/5fNW1KqVs55wfly38B+Er5+/8G/KJS6j9FjEieB37vj/19yA2TY4QcMRZ0UiiVihtdYORMayXcz3HKYrTGGjGAGC3HLz5UWQQNBpQW3UNB8MaiKue4eTg3nbMWl6uUIlYVLnOBNI2RSZFKHp0DWYkg3geKPakmKEeNxqqSA5UusheqIkjVZRJTqUhQCZ+FmmdUgZw3U4My0dpMnAryOE4UtAEjC8ZI7cil+DfWYrIiJY9WCmeNoHb6Uj6XUlibwGp8oQPWlSNpwzp4Wj+wHHpSFEONnKQtTWSMHkv1SCrGD+PDn7NMP3LK+C7ikiVXjqwUDeCNxlnRmUERYZeGNmUIygjNEU3SjqyiOFUi2VhDGp2kbAkkz5JjpyGngIoelYVGmYoz0gYWv7QQKFXojEZ0f7I/5NIAiWanqRxZZda9J2lDVRVTnBTkGEuQpipcZ+8DsRuIvS+mMtCHgA6RISZm2lBVrky0ZPd1VVUmLUVXxzjQVPgQ6P1AbZzw7XMm9gM5KPIQICRyTKWoloXOFN5+DgkodA5rRUcUE8MwYJwpRjcSMmmNI9PJZ+sczaQRzWYS2F/rMlkLiZSTLDpoub7yyWze3xrLdNJAyjIUIMuzqTRo4XKPpv/zZkrjLOu2I0VNXdfM53Om0znzrYbJxBLTwLVr+2xtzRhCx1Y09EGVEYlM//q+R2lNM5lwvpLNPyRBJdd9x950n+FsiXUNkTUkMa0Rr0MtqKA1m2e6amqSz/RdpJlMUCnQ92u876mbiqFpaHTGWGkktNZY56iaOVFbctYYYxkHLklJZlBVC9UzJs+qbVmuHzCbz8tkOOGsNLTHJysqp2kqh3UWPwzU0xmgWfcDg/e4ekrOgtCOOXExRilwyuo6hEA9mXLy+CEn5yfs7O5iraeut0TnaiyTyQxlAutqRVU7tnYqVOU4Oz3lSw/exe1MULM9Uoh84+1vkGdbzK9eI/jExDVUrmFre5vBe4IfNmt6iJl11/PkndsYlbj/6D5HR49R1pEnPXnwEDwhD9S7FcRevvaevu3Z2tph2WbWi57VekDVmuAD01lDbQ3deYfuZX3TVm2GMzEK6hJTkGZIG0KZ/Estl7DWkMj03gsN2TrIUCuHyoaH50sO2hMODw/59Ec+zs7ONe5OdzjtVnznx7+DxcMDjs/POGtXnK8S9TTy+Owe908VvQ8c3LvP0PZU1qFCYNdV7O3sMN/ZZsiKx4sVb7x1j8l0Tt8NTOZzzlZLstHUtaO2E9ZnaxbOc3x2hO8W1LOGbTflvG3xyZMVdF1LVoE4ZIZ1ZjLdLkZbiqwbMBOSrjkNM1586xucugluXuOGjE0GHxWPFy2rfkVcdCy+tmT527/I0L3Bj/7YT/LKS1/j6ec/xtmJsBxiduikefbDH+Evf/5f4sFb77B74xbPf+LToBw6iznVZUnUaEI40o9E7yN5k4uoef/Y88Rsl4dd4PjolOH0iJd//9c5bSNaNzz3xA1U6lgOhwzG46a7mDBl8f6SdL6gygPT6Vzookma8ZATq9jRhyA0JAU6K5QyxJTp+gAqlpwseVoMom/WKCa6wtgpdTNje2/CLFjWj064vnuVdx88wC8jwxKy2uITn/xhur5luVyzN79OU8/IvuhjEmSfULnhiTvPsbV7DT9orK5RWGFqKHNRUGdhSFzUc5eK2YK1fJtCavPHBcY5fiWGXdpUGF1JNBKqBBTrQo3raNslPnY4lXCXitZv90YjVU0lQQm6ruP8/Jy6cczn0w2ykTPFEE0sbuTHNVpZnAFiJIa80b5L4zRq3i5jjIXvXlgao+5IaKZ6g1SMjcY4N5BGrURRlHswFZv9RMTmS+c1Xrni1qj1H6b4hSRuypu8S2dL0wwuVxfaqcIGGyulnCUTdBEGcgqS+4kYVGWV8H6gi5HQd6jphFgrko2lFoLxoUoploJ/ZJ2J/4EaC8YR7dO5OJt+8KPc6N4+gEoVnV7RdY3B4RsdoFLFKbo4XI4D8TJklkrj8hupDco1nvv4+1Fqg0xujkkpvtXrAz9Xvs4Cm8rXgrpcNG18sAG01l46x/yH3kdt7rgCymh5j9HeHwXK2nK9CkOI/IGzNUWjqWwFKWOxYBqauue7Pv40s6deYLqGt9465stvHpCHBYGu1HljLVqGLPnbPuHAP53l/y8BnwOuKqXeB/4D4HNKqU+W430b+DfLxfyqUup/BF4BAvBv5T/GOZLxIIPELWqVSSqidRa7TwIxBwxC73EjxJrFzEPg13TRURckhRSL3aZ0+FohuU0YQhikQRypCc5umh6fEn2KJCRyJYVItlKs9kNC0VNNJhgihA5txEQjRsWQQRtFkFQlTNaYHDA5olXC6OImE4s7X86jPhXlIzkMgt7lTPYyDdQUoVBpsMYQwGEISJuTCnKUJeejXE6N5JUolOh2YhG/piTFOoIwKSXXHGNI5V62RmgClXPo2olTWsqkSxP0FMX9bAw2NKo0SKlYtGZdxLuCdihj0NYWk4hIRuHqmtn2DtW8xqgEyROGiC+25QoIPlBNZ2UdEl1c5yMqRAyZrDUpRHrvWbcDw9CTkyEGoWP4ODr46DJpkmZvbHy99wwhlCaZzfUVe/UoTnWIaYEDgkrUSaN0FMt+JToCnxLDUJyzLk1MfExENMpV6KpGO4txFUFB8L04giaKBkeVoaraaBqU0lTNhGZSgc7F5ARiN5BFzYgzQhlTKuNDwkQJUx/zU5Qx0pAlBFmyjpgT0XtBGXOm6yV7TI/TyZgYBs9qtSoCcAkfVUUDZrSTCZVSGFNLcDSW9XLB0A8oJzQjlTNDzHIPUbSoZXOs6ymVzUzqhvlE3A/DumcInrPFgt53NKcVfmugbh0PD05BJUG0XI1CcUJgzxnwEeUjOilM0hIllIX66UOi91HiLpSimc1wXaIP0IXEuve0IeITxBjYmk/Zun6L5Ca8/d4Dzk9PeOrWbfZ3t4ipx6c12ggdyMcebQ3L9Yp+GDDW4eqGgMG5mqaucUaJnhGFNo7ZbE5VT5jkjDo753y5lKmdMsxnFbtbMyCy7FasWtnQd3d3SBmaShq+esqGJpdjIIQoAxxKyHeWhimkjB8iKXYsz055+PiA+dac7e19rly9jatmVHWNtoXWGjw4w87ONtlNqExmslVzFlqOT0/ww8Du9jbnWXP46BCGAYUgV94H1uu16Poqh7WGbhh48Ogxq/Mzru/vSD5NghtXboCpSNHjh4hRsRQKmrOzMx4dnbBcLFGTHba29jg/e0QYPDqJwdFOVWOrivWDI4bWyzqQhXIteqqMqR1Oj4iqFFoheConSLrW0uyH0rRVc3E57YeeRdfxxmvvsttohtnA6//4H+P7gZt7+/wzn/0eru3Oeduf8exH7/K1N9/CHZ1xdLqkarYgGjCWdtmSVhlTGSrjuHnrKvOdbe4dnbK9ewO9pXj9vXuc9x37+/vUsykqR0IaODk6wHeB6WTOweMHxBwIyTPbbpjOZjQnZyzO1+RY0fcIXTn0BJ+ZGri2fxWfRBaQbU3MFe1pzdl9Qx5q1GrNPCpuXN9l584On/nuz1BPD/n4pz9Jqn+Md1/b48NPbRPOVyxPDmmqQO1qVN5DKYe2CdNUfOgTn+HJ519AuxpchR9GPS3F+Y5i0S5/6kzRdESgo8maxannpRe/zvDMs/zyr73E+y/+Bv/iT7zAz/yrP0lvdviVX/5dupMOOiAaDocTVmnBNE+pVWbpFMtkuH73Ds5ozo4Pqa1mdX5GpcX1TUwUSiNQTLJCoc8DGzt2lWT1rcjUFXzo+Sd59tmPcPPGNh/76FWeff469dYebX7A1trzxI1t9vb3USlTV3N252X/DHD04BDXOHb2dlG64iMffpocb6HwLE4Sxk0JUdZnVQpBoxBt/7comy4jHx8on7hoNxR8oFkeh2lZaTBgqkZMIvSIg2qUsqhs6LrA0IHRE4nQGBukb/PSJbdsREVCivLZGgmibqYSfK+TkVrEjCYL43mUIWq60Bdl1EUTvbmHxqZEhvSSZVXYTxRMd2NsIqiS3G9lsK3EnU/Mi+S6hqBwCGMnwqZJubhqXKBMXDQMPohZlkLQF/OBAbr8vHa2nGthT22K++JoHQJtGKR2UqUp0HLOMUWRqiSDa+oNbU5668L6Kj1GLueq5GA3VvSU2mOsiY1iEzoOusRUhQ1CqhiDoOU89IgkUZoXM7q/SjOoojiDjk2qHGO5J4Rveanpu9xoFRBDGfl/SqRL3+o+59I118YIS2WMa/Iy1BvpkSoLzTekNPbzm/tpRNRG05OLJvISCljkMwA5fbM5Chu68QViKE/cxWC9IHpG7jNjdYkSUkxn8LnPfYJP/fhfZrbw/Oo/epll9/scHrzGomvp8hjJcEGZ/qNbtn8698if+Rb//F/9Ed//14G//sf93m9+mVL8JxIxR6xRQOR8dca6PysaIDHu0MpI07ZxZpALPzqxiHsdNK4gSzlTOY0rQv4UpUF0xf0oKZk4KaVLA5YwSQS8WllsFkRCo0oDmTDZk31LjoGsLD4KyoYVUavKGhMtFkNFoNIRhzQbIUZCSsLnRrp7XR6Gkk9aHGrGgEHJJiGPDpeSbxGzWNemLAYio2PaaFufUy6BkEYoYUmucY5jYZdIpYmKZVIiEyqJQ2imDdtaMSjDejXAAFolUpCpEEpsdWMUZJAcyNGQkiFmV2yfBYrORoEWfnJd1dKoGlOQtsL5V8INVtlgXYVWmhpVXEFF3xVjxIVUNgc555gzIWaq4zPabgAiTlfEnDk+OiJRtEujaDjGDfImWpgkBjGDp0KLA10SpNWvAtZbcTOsa6zR+FjQgSh0stU60nYtMdtNwza6CvXe0yfofcKGgE+ZiBhfoA3WVmgviFseL2OJMchlOoqSz937SPRxgzCrIKhX8ANDL+ct+YbyWNiy2CkleYe5iM61NZAkU8a4iqoxuASojnFGu8ln05qco1BwY5BNR1x9CGVo4tpI04iOIURxxUoqlgaUgn4U564sGspI0aTGga3plBv7e8y3B5b3Dzg5PabtWtq2ZbVac+9vnvHUz17De0HcjTUoPPMa9mvLczevcePqFaaqYoIltR4TFRpDpQ3OgqkqAgq0QdsK6yr6nIhZnFs7H+lDEoOEJDk5R6cL3n7rLY4Pj3n2zh3RJ6kMKVJVNcpaYuiIlGGPlzw/pTVOV9TVRExaCtrTNA3T6YxmOiWjqbRF2xZXTdC2out6rDZlspuxzqK0JQRPPZ2yXrfce3ggCORsCz94rl+fcfvWDWLSnK/WrFarDUUyhCQGORmi77h5bZ9po1kslxw/OuDxwWOqes7+lSts786ZTC0Tp3n67l0+/sKHOe8CD++fUIcWguOkO8UYw63bt6n7SDN4ju4/YHG24PzsjFW3Jivh9Ksyjcpa41Pm9HzFernG1pannv4w2lUcPDrGRzCuwRrLsu2ZzuYMbcfhw0cMQdH25/TRkiJoZRmCNJcPD4/JJIZ1LxlLxmwKuJxkLYo+00wk+64PYmggO8yorYhCjZI7HZUzrhg12emMwbc8XnYsc2TVrdiaTHjn4QPuvLuNbXd45sY+D9aezjvWvWbdKwYS67ZDZ8t0NuX6tWtMXYVTipA8Q8isusjOldtc3b7GvbPE/ceHtEcrZu3A8889hSKw6Fdsb0+Zz7aYNhO0SnzjtVdYdyuxvUsDBtHSdG0xgVEG5wCVqKcOIiyOzlj2NTkluuUaE89lrdWa4COrkLly5SbbN57mna99iY9/5GmmjeLxO/d4YubJux1+fYbvj4XVYhMhR9FWmynEBjMR2tbZsuXhw0Ou7l3l6v7OhQ+FKdPvMmIMKAwDNp6jYiIdeg5eeZcbZp8Hj854fPgue9OP8cM/+iMcryJff/EPeOPglLhOVGwzn0zpqkzsPcmsiGoADbOtCVZlfFdRacXQWolSMZoUUtk7R0pSGc6N5XRBdOracePaNe7cvMaHnr5Bs3eDYBsmWzW3Jg3T7Gl2G9yOY3l8wv133qMdBh68/z47k4bPfuq7oF2jo+e9V7/O/rWr7Ox/DDs10iimjFXzYhpkJLTbRynKEV08jFT0D742iMo311yXv+eb/pTTK2u61ui6FuOTcayYgWwxekJlwJkdLHOJSNno2r5NzWZGF768cVkc3z3GKAyOkdJGafKKa6/WVgplrcWlOV1gaeQPgg1qUzCrDdqx0WYltUFGYowluFqOyzlH27WknDGZsi4oNpNtddEgjM6VQDGOivKnOFuUfiWXaIMgjTnjfl1a/7Fps2bjcFmAvs0gPWfI8QJRER3ZaEC2aR02KOEGASvFgTCHEka7i+8tlVsec19hY0oiUosLNCerSw3JBr0br0H5jWVovcmwVRf6OHJpCBGapDay3/nSLKoshmTSb11o1Epbcul6lyY3XTSIl2mSY7M1opVKXzhCxlKvai70bSlEYceNKFw5x8sN2vh1KJnOm9eI0JXru2G5jddk0/yXe2dsXjcN3CUk10iYeUwtxk1BrdDmlLrKXLnSsDevmbopjRZnbaVH1tc4pL9s+PatX/9f3CP/f3tppYqhQiZnaW2t0ezMpty8tsdsVuHx9EPA93Hj+BYzaKOZTqdkFN0wMPSliSFhUiAVI4W6suToGdZLEZGUhqjLiWgMWVvICh8jy8Gz6AZiyOzMtkFXkBXNfEY1mV6YRqhEQizknbmwK0UVuqYS58IxiM94QdxScTkUC1qpbHO6WKlE0xdRuqTdFzQnKyk6Q86EwQs3X1OmUFJIo0yZ3CFum6VZ0zmJrbwxGzQp5NKo5UjwAykmos5SvKrEkCk3+MXiJcuNTCsFpRN3Mcn20mjtUEqoYUOMuGL9DuIGidYkrRhyQoeAHjzKi2mMtQ5bAUZhyuehUIK4JqEIaKtpiulIGYdJTACKZd2WxaEgtsFLYTY+yDptHCS994y5X6Ouz/uIshVx3OBzZlj3NFQSBA4S/G4EpfVRiuKUxZ2RLFqmEAK9F/t9ZSt0TKQ0MAxC+RIhrxju5CjZJxfOm4WimZHJoFK0vphFaFBWk3MS0Xp5flJIhEEcn7RB+P+jPa/WZaEVapixlWioCq0xx9JoqbxZJFNMDL2HBNZYuX+sIWlF1/UslmtCFnQ3pYxmiTESuTFpKrZ2d8jXIt/4q4/JNjOYyKAD1V+s0K1BZ5loj/ertZa2XXO8WGCVhhhAO6KBNq4IOdGtM1kZTv7cOe2PdOgEjVX80N+4zc5khssSZD+xEtDe/2nP+i/2GK85msEzf/sGvu3ps+fdv3TI4TMtOUHayzT/k4I/gBg8frXk6NED2hDosuHa3g7bkwnBtzy49y5aJx7ee4/YreU+c5akYGt7hxATYdWJOY8RjYFsLrJOGS3rmtWGME6Ks9A5J5OGGCSwNabR+VYKkKTkM6zrCeglylkW7Yph1bJ39So7u/tSFBjD1mzGdFJjdAYix8eHtG3L/tWr7MzmbE8t6905CcvZ+Zr12nN2dsSjw/dp6kylNTvTHVbLO9y4/RR3b38IuiXvPrzHyWpAxYDWDc4qrs636M/OiSFSG0erDKp2TJqKWilqBSRP262plMPVU5LRrPrMyeND+j6IiVFV065amqbmvXuPmegKa7c4OzqgM54hONp2YDKZ0vc9moRXEaMydVWBElQ8RgnStYUZEYInJ4urHX0QVEX0brk8n0FcRI0m+8jifMnQDSilca4i68R0OiN0Ayo51m3Pzas7LHLgnYMDiNd5+dV3edQlqonjw889z3v37rO3M8EYzZW9K9y4dpsYI+enx3SnkXfffp+BipOHh6hFYDLd5so1w+Ojh8S153S5QhFZDZGz5QlPNjN+7Id+iN2dKYf/7UNWqwXLs3Oi92gFVhvqeiYDmqpia2vO1s4WKQVSCJATi8U5serRqcO4I1b+HFdfYRE9J11gWt/ifX+LL7w54UPfucPd/ZqHJ47D48TH5ltsbU3Y2Z4x2IGdKw1OSyZYCmWooxTKGKazOXfvVDjjCqo1FvIF/ZDKtozoOohn0Hdw3qEWCx6/+4CwWGC15+tf+jJXduZ0RrHjerabxGz3NvcfnbJ/9y72yg4njw949/wNbHrENLSsDh4Q4kDlHKausc6SjSEqTTDSjOcwIrqFXofsKa5y3Lh+je/42Me4srtHd37G4njF8fk9Dn1mZ3vKXmr5nquf4MbOLbw/ZKYrTh4c8Y23X2e9esT3vfCdqHhKM0/kPPDk3RpbrXjw3m+JNCwpbj/5WUgWqz1Ga7re4+qKkD0okaYpIjFLzzvG7Oay9377KXza7B+MWNMFJxVBP3PJKFQkFUXmUZqBIWS0mVK7CpMHyBa02SCRZRPZ1Dfizp2xekQwCiSBlqwzWyEZrmz0atKIJHTSKAfJFLfFIFTNKFSX0pgVd+VRkrhpWkZkKW/GvVJwX+ipkpJmPDJa5I+HrRGjbY1RGoIvhkkKpTPGarIZKZkjtlTUgYUKqJTo4cbmISmRheiCgI2onBmbNoWgf0qj0ZSRptRypRm0aEGmM4xZv1kVtFG6vUsNvAw+AQEaCjqZy+c8OqPnPMo8pJccj208p6AborXUzmCCEYlBLn4RPohW3ShUyW6VQx11b2osvaQOTnLs4+c73h85CbBizQXyJoOzsXG+3JwIfJjiRTOVy3U1RtwiYozSnCmFM1Z8CEaEPIu+btRoKn2x34LIh6ICmyCHiE9x03hTmBmpUMrHi50Ybf/lc7/IzFObofzm6RgH1EkYdAOe6AdC6Ono6VSPSh4zLMh+TUihHOMozUrF60WQ0D/q9SeiaVOqmGMg4lSdArXWzJVmVyUmjAHMiRwV1kohH2KgqiumM9k0ur6icxmjG2ZVhc2JrutAK2bzGUorlosFXduKQQLQDwMrlVFVw8Q4lm1HqANLu4Yu0FQz5nYi1vrNBDexaJMLiiCFg9HiHtlkRcwDyUcpuk3YTDqCUkRVlRwroQ1CYog9ffAkhTjeJcm8iCqg0Awhsm4HJAfblaJECuRKV+A9yXdYnYg+0OuWSdOInXn2QjscM7jUqNmRhjGXKYlKCp0CWmX6lOnXA26iqGcT6iph84JKQ08gKkW0JYg8UcTBiaSK26O2KJfBSn6Tz0DJ2LNWOLMD0uw5bWRjCPIgJ4pmMWVylAn4mKUhTWkWtE6Lm1FMopvT1klZEBI5SONilGSJWS1CZK1F36gBCt88hEjb9fJZRQNB6GtKVSRlyBqUsVTTGucMtdFsTxvqyoDNdENPHwNN1oR8xslpCyGQMqxjpo9Qk3DaUlea9WrNa69+nX5ouXn3Fo3TBKMZdETniKscOSDT83VPtpZJ5VglhQMqZamwxOxJzoh+0mhCRgxTGJs/WcD6GIjZC6KswA+Bzpgys4jSiBpDRqaifT+URV7CnAkJFWV65kMg5EwXI8thIClDSgqSxCGQxcHSqIbtT+/w0s+9yvP/xjXoFY+fX/Def3yM0pL5opRsxEHJ372GbCzONegVbLkpzK7i9iagPJgFxljy3Z6jv3HGnX9un9krFe/9vWNe/bfP+MT/8KQMfYrOVO0Ezv7awPV/Z4v917bo/uUVL//sfX7yP/s4/Q8k3vn8Cc985hrmpOLRf3ECNz01AR06Kl2xevyAxckJy6jpMZycLXjjG1+hAu7eusqsMkxjT2c8/dCLgZJ1pCgbQghrrBInWV05UkxMjKayWaJAUo9SlTjPKiWfhYrUOuAHw3JtmExEPG1Uwscoz0jWaF2LRVWVSbGmy1buMysaWrLn+u6cfrmF9wMn56c8eHjM7rVn2N7aAn9EVWvMfBc3OcMax8m653y1pD95jzpFXvniS7z2+ltcu/s0t+48ySdf+A6euvshtM+8/YUvcHD4CFtP0CQm8wmsArnJLNY9XVTYqLgym7JnNcvVMV0NrY907YDqDOH0SJCeELA6EoY1k6omZcvpuWftrvDg1pLlmUe1FXEVaZopQUeqnKltxe4EKp14q12jTS2OukXXF4csdOOc6doARTNkVKaLHgBtNVlZQk7i2IuSZy95KudIORAChC4xnTSoHFGVZz0MvH98xuvnLf/k1Uf0OdDME9/9wgvsNdt89zNXqEwm07FaW9pec+XuhziYH3D1u5/j5S99nVXbc962vPfggElVUVc1YW2EAtl33L17B6cUx4ePIHmG9pzTYUGjLXa6w/liTY4GlYUe65xiMm1oJhNMXREz5CHhsmbbzaT485mcHfPZVbZFoJT/AAAgAElEQVTOepzWhLphfvtpztQ27dWbXP/+P8/q1kfpbu6x9Ynvo7m5i508zXMf/hjTrR3mrmM2q+RaZ0guEVRE0kcTtbXU9nLGl0SDlHJPzEFKfiqqg3xKDI/Z3bc0Oy1td8qtauA7PvspTl3iC0eKL33tS1x74mluhj2efuqTHP7Pf5fTtx9xddjnCR143B9w5ldgLMt2xRA9NROq7S10aMhtK7T/sQC2JUswK2xWMrhMiqu3nuDKrductC0vf/Wr0PXQrjCVYaUnPPddn+H6934v+8/dYKu5wZNXI09/NvG9n/scKyyDTzRt5vG9d2C6JjNQ946QNY+6R3izoo4V82s9zQS0XmN1FMfCjCDqSkP2qBzospDWRruJcco/aqsuv2RE7Cn8CsaI+yzcCsgJnQ3Kg4sKkqUzkZQGpjqT6VmR8b6nXb1Pe/b7aHcT09wBUxEVI+cD+ZugPKaKTNwKZytcbIg5ok2FT5paVwxRY1KkMoYQPYQBlTw2ZfpK008tUxL1MmHJrFEMSgp/xUCKGW1qZK5Zivks9juSgSVdg9AgNTlrlM1EIj09yiS0tjCIf8AQg2TqYsixIF5aWF4pBUIc0K7COUu/GTCIbjqkKI2qcWRMOUYELSn0W20aMpoYBpKS4VxKAiLIDi6aupQhmowqrC2tEBmDVgQ1Yr9SA0n8VB67ok1DlBhbJ7Oh72EzOhvwQsl0WNmPdGHHUECDXNHOrvPhF57l009f4eSe5p1vvMW9xwe0caCKZ7R5TbAaT5C6c9QajoyqjGT8oUg5kEOURlOrcq5WAARl5FkzikAkEUmmGKJEkdXEMWhc6Q1IEUqTrBH6o0G6p5wTqqDQWiuMkqYwKUPMkeghkkBnnAVnZS9YBUOeNsx7Te8Dgx5D7ku3pTTW6s1QIJfGOhfPDGPKdR+bw9E3QpniESHmfRpHyJBche3Aml3uhZqH2nP3ZA2P3qfvj+isIWQj56aU3KdI0zfKlL7d609E05YpphKIdbWkhGeGrmNxGuk7iDZjNNRe9GvGKYySxcj7czKKGMXABGVYxoZoK/rsIStWsUdnzVolBl0ocikTVcKnRDUM1Kbc7HWNS4EUoGka5s2cISVyU1HVGq2F05sL7U6pkfpVJjJEooJeC3Ad0kgT04KuqXLWKRNiCdAuE8s0duzFDcsPntVqTdf6jfByaz5lf2cLV4sphqsnWJ2xGoIfZINWmhB6tDHEYqXvnCMPEWXFIMF3PUMSbZbvOqJPaOs2ELBWmq3phPWqYrU4wSgnAasFwckpEovxxMYcaENzzQXWFgqiqxwqK/quJymwRlM7S45RqBiboEW1QR2z0jhjCOM2pASWzjFsNmGljaB9KYsVfRZnyFi0XKN1rzbSnIrhyehspuWzChLeOTpNxizUCG0R448yiYlBFiecQSFwuFGKmISyYYzBWId1Na6eAIGp06iY8UNHyonVas3p2Rl71/exppJrFUNxDZOmVRsrG7HWkpeWkhShSpwPQ0yEIZC0JZE4Pj1mOpuyuz0DRKtorMW6qiw0mX4YSDESY6CqDJpMrRwkcVBt256uHxi8UFuMtlR1RV3XmBDEqc6Ls6CrHD6OdBUtg4uy0BhjOf9US/MbluFMBg/1y4bmt23JCoT42YH28wNdvWbnf7eE1wPWzXjp597leHfNzb+zz+G//go76jaf+qU/w6+6vwsKojXMfrmiedMSu8CTf+cK/k939GkgJCdaTckvoP6iYfKyw040T/0vU37vLz3m3qcWqE6BBj+JaA/7f29G9UnDfFozmxiZuteWYB0MUGFZLjPtWoLAK6uorKZNci19oclq7bDGiWaWLEL0Qt01IvDB2JoURYPrcyBboRAXPTxKKwYfOTw64drVGZNJRfAttmqEfh0CpuTMpDSQVCCyJqsOZWuylnXU5Apr5pK5k5OsWUpxfnrErRsz5rMtDk7WXLtyi9gN7O3ucNYO6Gu3Satj3rn/mAF49PCA1998k5e+8CK3r19hfz7huQ89yx+8+BJHB48wkylZGSwaM2lopnPqqxbXaM5XS555/lkmR4633n5L2ApGMJY+BIga6yxJZbo2MqmmVNMpD45OICfe+YkDnkgW//IgReKeQ1egW0U4Hdjd2WVnZnjj8JykSvZkVuT5mKaUictA8oCRwq4PSTb0qSINEiIr9B9TtDJZimgnJhZ9lOGbcaJTaOoprYelt6yGmrPFGg+4vuerr93n9k7Lh+88xbvvvUVm4PQ8k90VDtrI1ds3sbMtbj/9DO++/z42epq6ovOekCK1M/IZ9wPOOpr5NnsKbt26wT/5gxfRKXK+WmKrhsn2NmqxJmlP7yWSJedE3/dMmwlN3eCMuMdWlUQ09H1PLEHs1keM9dSzLR7cf4+DII6JX/69F1n/uZ8imcSv/MN/gPu+7+GTz93l3jvvctafo7d3IE/FzKPUj0LbF0uhcUSdR5rVpfyhsqkVWn0L8YxheMR59x7vPDrgPL7GIryLci2n7Skfe+5TPPfRJ7Ezx9tffJmkNQu/oIstZ+cLtve22N9psA60lTVYAqml4Ft3A+t+EN3aSBnLxWqs6H8EZUrEAEdHh+jKsrezK07HbiZ5grtb+A7RUmeoZnOcduioODw+5LVvvMJhp1i1nudv3OK3fvs3eePeq0SVeOLKLZ589im+8PU/4NHx+9yY7/Bn5x/j+eefAILoYYvETib3ahPSHC9N8T9QK6lv/e8X+rMLxEchBbZSQJR9cr06pPMrcqUw2kEYyES01ZwtVrx/7xHvvv4GV6cDNz98RwC20SABqQvS+G4qY3Sh4+eMSRGdxEwuJaEnW5Q0Hn5ABY+N4vCZYyRETxydtlFCwy/MGNm3zSW3yAt794vvEdTLEDfDXtFKyYDYZEsw4h6soyYpyUcVgw6Dq2rI4uBJ2VvTyHYIJRokXRh5CdoyHocumi1QKUtubURQviQsMDEXUcWoazQN14BBqaJbHNHesRTPY2s8quHE1yAnadrHuCg1mrRRoqrKcJtc5DZKImbGHLOUx89QULlqts2f/el/nj//Q59i+bDmN//hb/K//so/4J2D92FoCVlcv+UaUTRjwlCzpbbTio27pmi6lMQTFH6kSqLz0kZhNBt37lwydJWKH0BQpX6QgOnRNVOeWzZ2/6nEDl2O/RolHMClxk/uA13QMVdXNPv7XB8sRzFwtF4VR06NtWJgZzZUyIt7XasxXmuzuhUpT/EhKFTdPP5oCTQ31mG1wQ+Zg6Nz3r73Pk+fdJyfHbPqzlj5Dh+D3JOXBzNcGlB8m9efiKaNPAoHBcpM5SYLKTHEWG5YeUiyMIUwxuEa0U50fU9IGa3F/GAYPOeLJaus8GOo8tkZKBHOpphIQxDtm5JcrBB6lmEgAMwmhQ0gx+BzIo5oti4ZU0oUaabwfoUXG0EnyIqoNVROgqKLdgxVYOgYijBSoUxCmSLGHFdwJM+jchXWZFJINNaKC2YK7Ewde3NHYzXz2rC3NWVrNiGGgXa5LGJbhdUzmroW69SUqapaikjnyMZwvl6x6jox0fCZGDsxotCaOPScHa+JKuJUZmc+J0TFctXJ+W545HLTaaVL48oFRzenEtbbsTaK2kqjVmtFU1m6FFD9gG8qtE5oXSz5k0z1rDY4Z4XGaC3Gmo3LZFYKY5xQQ8MgRS+6uMSBzwpbTdjdG4H3jI+S0xUzGBvRJqD7QK8CefCkHMmS4i4NeZlgyfCvFNs+4KqilaRkiCA0xpiy6Ih8YOjFFEUnRaWN2MUWjY8kT3j6NrEaPO0gLnB99FigMZWEYVsjJjZaM5k2TOoag2yiyUSGpJjMB5arBQcPD6jqO0ynNbFQF5WyKO2ExhjiRoxrTEPVVOKgtlzL/VhoLUoXMbmVc6NQaRWyCREjKmZ0Gil/oo8bt6aM6PiaNx2DD7II58z2v1+xXgWSTXR/pWfr8xPsxzOHf3tN/PlAGjx3/v6Eb/ytA+o3d7j7209x9lfW/PhP/AxfvPaPuPvMLb6aXqV5MEGvFYHA9MsTXvrPT3j4hTVX3pqJXlAroUQ9mcl3wR5pDJn1rud0tyUeRNBw9O8ucLFj+WdaPvlf32Z73rA9LY1o3eBNBW3Am5reN0xqmDUVu7tzdPSFxqSpbIVWsom6qkbbFnxEK3ETCz6RNSRrcNWEvvcMIWFrh5nMMMuOdhgYfISsqawm5wqlBBGNSuzrh5zpfV9Cjj2aSPZeJkIJghcDn5AUySf6CEMyZD3BuEC/OkU3Fdt7V1nguHJtl22dWR4ckLo11WSHNg50Zsl8VmOn21Rb+3Qxc3R0zIMH9/na44ckL8hVPd1CuZqjk1PWXcdsa5v2ruf4LxyQrwQwiZd//A0+8Z/cYOc9oSD73GFshW0UXefph4FJXWOnE1YxoPpeKFKpxaeOK1fv8tie8vCfPefx5w+Yvtow3Bm4/rtb3P5FmWiH7MlYErD+gYGDnz+heskRbyXqtw3X/6NtuiBRMSFnwn7m5BcW7P57O+gHsiH7JPTgpnIorehCJzEBriKRCCpQ1TVrPxCVwQyOPhuoatbdiqnZ5bV3A4+bQ958+4grOxMOjw55fLLiB/7UR3jmY59kiIp7R8e0MZCNIWkN1pCCZEDWk5qwkiLxwcFD9p94gu//3Of4yHNPc3L4kPfffIPT3/pNBh9QRrPyvTAlrEgF1uuOlk6GAiEyndakKJultRbjjDjCDp4pmsYZhtzz/d/9XbzwPT/I/q07/MDHn+S7Xvg4n/nEczz/Vz/PE9vbbDWaD925xc6TTxCqCVduPFn2bNkQxWRBJu4ffF2UOWOXkUv0Rs4DymTeeftNXnntd9i+tcsP/Oht6mqbftHRn82Z1YGJPuBHf/gzrD76AqqacnR2zG6Vef/xCVf3bzC3irNgWH7pNU7OepLPZS/rpIAu+5AyBlUo6BnQWEE5lEYbS86B9XLF6fExN69c5bOf+TR+3fLW669S7+4z0zWNybz69Vf5we98FmLEtyt+8/df4u//+q/x8Hxga+8qP/ez/xrXn3uC/+uLL3J0esaZz+x+6BnOVhXnhw3hfMm79x5y5+mOmRmHAdAPPcqIVl9lLVKy/0cFlIJsN9dc6s4EBNr2BAtUyJrStgcMYQGFnRJKo5NSoJ7Oaaa3mMyeJVF/u+7wD71yzqgU0UlM10yKovkeBkLfU5HJwZNCh8pJ6H3BYEKWwOskCM34u9IYw2TMxnRjjOpRm8J/pIDKPiRsjwQpkNWAoSH0HnykVhUqGRoNUQf65MlGBuREGHf8UcNltCGVfX1zJ6sLhs9Y740N0GgwIt87OnIqaU60JetU8OZCJzQaUwwEVBKdorgpKNCOkQprVYXGbBqBixDm0ZVY0MKopL5JZTAyOoyDPANmPP5Rl5UVzaxme2/O7tUdbu1fZ8v9ON7CL/+fv8qD9RneZzp6yeYsTVHO0tgYramsOGb7IKiTptBLtQxOBKWSAaoxotOWOCpxXBdATTwqcvncv/meGiMYUozEZLBFKyg1yfiZFP1gHBFKeZkyXG6aBh8yDs9sOmO7qVmfnWF7qaeNlt8roG0xqskjiilaSMWonRNHblUABax8HkmpTSOnE2ziy7REVz16cMgrL3+ZyeMlr79/j8WwZlBhoy/k0vl/83X4Vq8/EU3b5jDHLldllNXY2uGmDdVEy2TIOEwti1NyFjufYZwldC0qZayr5b/BoxZLJl1XiBvSHVtnuXbtmnTA/cDQ93Jz24wfeuLaE5Um78zRyxVHRyfiDFhCglXO+CgLjMCZAlLrS4euC4wdFQLHo7Dl/cfMt6HcqInRgclcGIxkmbAoZGHQZCqjsLWlMhV5SNQq4nKQ0OY+0+cO3VlyEm1aTvIg5aomGY0tiJdWZUJSRlHbszmTyZQhBKIyKBbkbEFZ+r6jCwN9t+DKtavcuLZH3yfee/CYo8XZJbGnQmV5aGBs2ArEnCK+7xmGnugHBq2wO9vo2uGUkvyKEhBrHWQt7pAiKlVYbehLc1TVFUprhuBBa7EWT1mEn6aiC5nztRfDGNfQhYSKnslkinOmDHszQ/D4GPFB3B7b3tO2g5herFeImUdCEYlRGkeVwTpHY0tIo9ZlyiIh2KY0kKGEC3svxhMpRDwZU8v0KOQsAwAkSsDW4ogac9pA7gI0lw2suCEpLRuTLqil1qkIWBNV7ehDZoie5XqFMtI0LYZBrPZjIimB943RaCsI2tb2FqhM23eEHNE+0MQO3a2lKSzPjZiPlKmmNlilcEo2GqMNaCVZLVkeAh88YS2GFOP5OOeY0pDqnj5Eqt8zrP9WV1xLwWnF/vY2e/NtAG7+97d4avocz33mUzz7zEe4eeUJfuov/Awnv/Nf8qZ+DZzFhAEVs7BktEE7Rwhe6E5twr0ID37hnNPHLfeqLBKNKAjM/L8xbP+HE2Zqzu5bDW/+9BE/8pWnaHpLDIO4nZGw4n6BxTOtNNuzhkltid1AZQ2zpkY1DdPJBGctTT3B6AUlNlKsf7UuBkCi3wOhcru6wVSuCPkTPojt9HTimE/noBOt73CVo+0HnJKm2uiIM0YMF7BUbGOZF6dQh1KC/g2xJxDJrkdXXsxYtOZ4ueZg5dnZ3iYtHrLHOcfHx9RXb3F0cp++a6ndBFtp2m6JriZcv34Fo66wtzXjq1/+ClvzPWI2TGdbnC3WBOupb8948Ne+wpO/dIPrf2ebPq5548F9oo7MJ1MqpTivI0/cuUVVzXnty29xfr5kmVaoRkEnSLObWeo+UTeO+w8PGNaZR//Kgu3fmXP7b+5z/lMt73z+IR//7xz7Ww1V1eGRyfjJT59z/ee3qH/XkW7CwS+c8n9T9+ZBlmV3fefnbHd5W+5V1bV2V2/VK9oshDbUSAKEwRghwMHIbMKGsT0244GxcQQxMTN2DMywDIqZmJGDcIyxwxAYxBogUIOMhVqWBFKj3rtr6aquNfeXb7nL2eaPc19WCYyN/5NfREZlVXRnvnfvuef8fr/v1i57sh2TtBmZZ+9/PaB+oiUSUMqglUpmJJ3+QYqI9CmHkpjor0oWjIYFQuZMpzPG420KMyQETRAFbSw5qDyTg23uO71GsXKcu0YnyDYmtMaw28wJYgBFyXxaYYZ9RiqZTNqm5fqN6zhvkwYrpiy5i69d49ipMzz++mUePXmKjY0NvvDMM8y2d8B7lNYpm9BabGsOneys99TjfeZ1Rr8syDIDEqTJkNqwvnGU0aUrNLZFl5LXPXw/T7zpcUSR8+i9Gxw7ssq6qbjroRNIG9B1xZve8pXIsoTBCtL06Tqf7uTuTKG4wxDh8GSXyM74QcTk+OcjaJnO8I0jx1nbPU6UlsbvY1RDfynDVy0vPPcZom84deo+Vo/di0NyY/8G2zPH5eubtNagasetm5vp7HYhmY2ERN1vpO2Kwm64GEJnNiI7h9zYhcIrghe00TEdj2mqGfecPsWVSxeTa/BoiVzl7N64xpWLr1JNG25MrnLplZfZayw1cDA9YO3IMRptcbkk6y/hxw35+jpH73+Ak7dq6i2Pi7u4KLuzKJ0xQoSkfSWZnkkSQiL+87XbHa8FViVuI2yQ7k2w6b6ISDtrcG6KFxUhSAgKH0noiIyEIJlMFZYjbCwNSMGgf759TEV2V8yKBUrQ0TRDyspdCLNFVx/FmFhQIQoIHukjJip01ECLJ3YSp4gIibLqQkJvFjFCQqR7p5RKVy/pM6AzlJMiYoksrwz4yje/mZ5e5sWnz3PppasoZbCxwjUN5bCXUH/vD5EYuveYGsTOwn3RiHUNzyJHS3QfOJJQNWKqGFTXFEXZsWW0RiqLWBjmiWTgJmJAiPQ/SrFoEBJ5SHqRIq0iCC8SZEc6/0Xn0hi8v934BJ++YuhMxtMgZZEPt1gfsgM9pEhrq40NNlSklE1YP7rGE1/zBNsHY/791VeZbM47flNi7sTu+Tlsmu9AP3V34Cmdcg6THZpIlDIhQCX9o5Dd328X/F+6rhaIW4yH34fDr6457s7R5EmeXFFDl8UHCdkLMa2l5JAe097fFegpry25bSdNbqfTu2OPEItBPbG7ruJQ2xYX96trXsMihkqk96cQiCAJPiZUl8jB3pSrr1xGX93m5uYuM7dwqP/zjep/LJLgz76+LJq21OwkytniYbDeUjUwqyK1B6cjUmWUqpf4yKKm5yAvcpwPNNYRhceY5DAnrIO26R60tBFoUWB8oNCmM45I4dsBkYoz3WWJFTmyqogxYr2jVZagNFoq6qjIhCHTSSORCtyQQrSF7256evozPEZpkIJF+F7KpwrYbjKSSUWDpuqsUBcLOXnfpe9VWvdoGZFZ0rngHFkvo8g0EsdsMsF0hf1iG2psxbytKYsiuTm1Dm2yLnSYpAUTkum8Yr+y2NaRZwZjDMNeThQDdsYwyA2DMmd52GN3f8zeNGkBID0E0Xf5cHeEgqYQRo0uk507IYC1DIuSpV7J6mjAsJchCWgt6PV0cs1zDnwgUxqtUvOotaYsS4RUzJuK1rsURNymUPHKRma2wVlLEAJnPQfzCh/g2OoqWqdGCwkqdiYjShANWB9R2iZ9l04WtDGmibe3LhnbOEMuhkSTYZuGKjqUk1gCVgjaqGialsSrTzkstm2xbY0SktYKmhZcDGiS06DrHKMWttwh+f4D3WZFPESe0+YI3Wiu2/xC14Q2tN5hKBI67VPgeGQRUJrExFlmKIxOWsgs5cggUsOjYhLjK+XSWhUpT0xlGqFEMk4J4ZCWaqTEyBTpEIm0IW3SUQpsDAjnaF7X0v93eToktCA+JtCfUcwebPBvDyz/nRIVWqY/4amrOTKP9PuJ3plrjzI9ms/16b3fk5cFr/+qd3G5PM9zJ5/Drub0Ng3RR8782pDhZzXtRjq8g0rvI3tOsfbLfVa2So6sCi4O5zz06Q2+eP9NzHOCWFtkLzB8NuPKj+wiMo+uRNIuuQaJQgdQ0ZMHhwuQxUihBFYIZPRkUqCEpFAJuahlGtr0yxxlJL1MkvUGNI0ldrTWLMsoyoIYPdVsgmtrZAwdMTyg1YTM1CkmwzmQA1LMgaGne4TWIRpBU82wk4r9W/vsXNtmtDwAnyjC1jaEaLG2xroKqRL9V+cDoi5RPUNrD7C7L3BiOCPL9xDcIluZ4eSIJjuD7B1l5hVb+7PO6S0SB0Ok0jQ2mQJFn/SMra/Zr3cprknOffEo595wlkZY7NM1UdacOnKEWw/NefGrr3A9nzA92XLyH6/Su6DZf1/FtR/dJv9EhtCC+dfVvPkHz9Ef9nB+Tq+/RLnTcuy3l9h+1wH5OKO8kXPmbzzEu7OHePkjv8m8FQRrER56z0qECOhdQ76j2Pnfptz9392FCo69/3GX4ZMF9Te0iY/qPdUTDaIXcQJ4pWF4UZPJFIgsiaAFyre42QHD4YByfcil0S3y8wqtc6K27D5RI4F6tstBsc+VP7hCqVZZuesI2fiA51/7NPOqYF5VrK0s89C5BxiVyejENi3Xq+vM1xuyl7vYlBjYH8948sk/4NL5V1gd9ZmN9zmYVh0C6xn0BsxnFU3TMKskvaIkqEShnUwOiMGzurLE+voavb4hSoWVimvXt5lNKswgw0XPr3/0l3jy134DL6HSjkEv49RoCXo9lvOSt9//IOu9EU9fuMQT3/43efiNbzlE31PlnvwgU4nUdjyZSFIJasCwcFKQke6/m4LbZrRkuO/MvXzmmc9xc3yNlbUSZQU7VyY8+uDDnHvoNDqv8W2DlyXPvfgyTz93hbre56ve9Hqe/Q/PcOu1q2iRkZkUXB+s6Bg0OrElknVlGjwJkdguC6vx2M3TQzq7goh4Z1lZHrFd5rTRsXbiJLkXcLDDqDfANY4vfOFzPP/MC4xrS29pwD1nh0RruXpzm53NzbSH5TkxLxmsrtMrS6Y72ywf7bG2ukGuCmI8wLuGtp0RaIlCQYAQkhHUX2bivngt8mX/I1UV/f4SeI8bz7l2/grjvX08DfgsjZOlxMZ0x2wbsaFHEzcwvRHJBuXPv4+OfYaQoguz1winCS4ZLHkhiVJ3EqxU4MqgUvau90lrJgSygycCEi8CDovskKHOdatrujt0qzsL/SFrJFEBEz0t4FVy2T557wk+9Hc+xAOnXs/nn/pTPvarn+CPP/M0m3uXQHiCVMjCkAmBb2qa2naRRElf5Z0jxkXuqbqNsBxGL3EYDxFJcThCyQ5vDgQiujNpS0ZxMdVsIhkDiegQ0SZ6qVic+YtP1/2OCBza6ndopPMgIwJJdIsIe9/pougQn0SljFEgOmOYlI0X74giCrRuTuNmRNEmB+W2RShBfzikLHvEKJKOXAaUdOkZ7miK8rBzvWMNdtIg4p0Sk4hTMQ0rD+8fqfO5gxJ5uK4WSKC4/b3s6oxD5DCEQ8bQ4vf6Lhh9gb4tnoZFILcgGaAYbShUjlHJuTSKeLuRFRyCBRLR0VfT+Z5yfHVaI3TyJpFYU4dOj7IbyHaZgUSF1OAQjHdmtPY6YnPMeNpSeXBSJOrvn7mQ/7mGDb5Mmjag21hD19nHtBlkBmMypI4kAheUJqJN0vpEO8OHBmMyikLRWE9bjQHIgkfLZN+qu0Iuuoatm1fJtE7GBW2ykNeDMnXOqRfiYG+PyfSApm3IhQSdCt8gBKgcp3MQEhEdRoAWpMXdoTSqQ7WMaxCkaX1aiWlqoQ1YkazyHUlQG0yG1S2tUtg25cglMavo9EKRFDYeaEOkto7cG7KY3IqCVIl2Q3rwlVLdnhsg0xiRE0SDUCleICLpDQegFFYAlU3ISYz4tmXUH9Ib9BGioW5bmmrKaKkkDS1uPxTehyT05/a/L1wIY3BkWpNplSDjuiEzWWqku+lQ9C110xKtoCxNepitw5sM3aGMiZvvid5jRDJsUVLQKwp8hKz1oAw2zogaKi8J8waEQGuTNt+YJrG3HT67DVcm9zilJZF0fWOnaTNaYm0EnzZxLSHTnQtvgwoAACAASURBVAtp7fESah/YntTs7U5Tm+4dIgbK3BB92qim0wnjcY3OBpw6coTV9XWE0TTW4lxIAZvd2qejmdZ1kwxTJMjgqZRE9nKESFlUbajxIiOGSNPUDPqjboqUJR69C2il0lqUgjw3ZCqZVUjSM+EXtsbdZp5otN19IeWhRLGgo6YtV3Wh6KZr+BO9RnYIYHKu8m1g6zsnrP5cHyxEPDvfPqH8E03zfY64TEKRhSQGx0E7YWt3i0nTAKAKm7bOULK7tUW90rI7bXjD2XdyZPCrxIemiFnOrQ8ccPrf9qnmDZbAZ957lY1/O6Rc77Hz4T3KH+khPi64+E0TFAbZRrCB+bd7yt81yBCxOO75rQHZbnKwDT7Zl+MDKgg8HhNFcle1iW4jI2iRdIHE28+70qBUJMTE/ZdAplXK4usOLa0VudFM5hUODSLNJfOsTNmDUtPUAaE1WhqICqUg0gIOpSNSKGJsEMxo6i2uvPYC/fGgE+UrmsYyHs9obUCokug8JusjdE6QGuctsZ2zrB13LWvMUsa43Wc5dzRqyJbIuHkwxukeeWZYXt0gU5IduYUPkWo2Q6sCVydHLNfOmE0bjiyv8NY3PMab3/g6nr30IpefWiOPSXlx4Z1bPPpv7iKbFrz8LbfYf8uEjVdGrP52nxt/f4cgPCd/+hjTUY3OcvbHNxi5nL7u45c32f76A7LzOaLWuCJSfV/GkY8fZdgf0LoGGSJKCQ6+p+Loz/cxuUZlkpgnisv03gp9A7KdzqrbeubfNGf69yvWP9wnHPdc+XszHv7baxxrjrC0tMJ4b4eD/T0GvYK2mdOIhit/23KQzzjzUz2idpz/69sUsx4rw3Xc3Rl/+rW3ePenjlDIHoXpsbF2lFP3HsHHFXaqba5ceJWXL16kmU1pqxm2qdlZ32HzLTucPn8sMQpygTCRa5uvkWeweRV8XWG0wijJ8mCJ0Ab2conuKUIGB3aKbhWZNNSNJeaBGS0lDQMlyXVG0Dl7s4Y2SAgC6yPbW5vYnQOCltg8IoXjog20RjBQmvj463nTg4/x3NPP8pXv/WtJVy0X0/JUGIYYusieQDg0xFgQjBJ1WsSIFAEp5gR7gdi+zHTvGvubYwpy/NSwU815/aMPE3Yus7W9jX71Eg8NT9AfPIi3iuhzfCg4dWKdb/3Wr+Fof8DF5/6IR970VXz2mVfY3z9I2viO6oSQ2GiTxkQqYvAp+gWJ7DINg0+0SWMWAyrHbDKlblL+Yl6UiFnNfWfvYePoUZ7+/Oe59cpNqtkcGwTDlVU+8A3fyObVTUo1wte3cNUcJWxC5a7eZD7dZdD34CvqWdUxaCAGRzJ1cEQpb+dOSYnzjsUAb1HHLSJq/nzp9KWas0MXRxKiGbvzfvPGjINxm6iA6dMnnTQqvRcRcFExrjS+c6z9C0q17ghdoGwSITVBaaI2qKKH6g2Sfl4FnPVoY1DSYNsa7+uEnriIzBTKJMBMENHBUypJFejOFA7DmQUdetJdA9GVVVKkVkl0TJLeco/lI8uYsseb3/kORuUptrcbxs9vIghQGsygIBeCiXcolTRxhDQQDT6Q6TS89j7dF6lS4DohJEQt0hXdoDMNwUFMRb8IyUBEGUNyJbQdZTE9Plp2NX6IqV4QAilNum86NRgq0jkQJ0Ted27nEoE2aRCRdGUKaZJu1cekd5NVRbXlsKFNqKUIxM7lPAJI0Dpi7ZQYWmK0zGb7jPd3qebTVOuYzmymY4yorjkWMVEWk47tjiFzBO/brtbqHDej76iiaR/QWnWjBNkNne+8lx1KueheuzrycFXL5E8gpUpnZgyLyQGLaCwpVUJyRXcNlUIZTYgSqX2KgdD6EDU8dKZcsGK07pqwCF2mn9Ed1bPLGU5RPObwd/uuWVwsRtEhfCoKfOfGUB9YZDOnbQ1V8DSuJWZJ6nPYqC6erf9akLYYUyEa7qAaaqnIdUZZ9NCZBF+jpGCpl5Fpgw+hQxQk/UFJXhQ0bcNsnpq/LGhUlvRsxhh0bvAxMp1MiMGSZznDXkHbtjijyMuSnsyZWMuknmO0xhgDThwW+kJIhMnxKkvTI2GQsitoO+g74GmDQ0ZPnuWdJesCCO2ysaRIxR0RRwrhC1HQKNkZWwBBdHo71VEQ01Sl9R4nEjoybROlKJNpajxvLMak9+0ai8WBktTOE3wKm16E93kEM9eAlMyrlqaxOAtogZYBoyA3kuha9ve2MHlFXvQWUSSJYsKCUnAb1l1wkWNIE1cjJYUxGKlpXcBI1VHJMnpljneJ4qY05FlOphXBOqJPqFsyM4G2brCdW6itUxHctjUuCqz1NPM6FdVRI0TKAguH7ys1L8Tk5KkF4Dm0uk9M1IRSJZFvyiAZDXoMButoIxmUOX2dHCR9UxFwOClQLlLZSJ41OCuShbiEYtBjUHi0EUxmc9oWiv6QY8dPsHZ0nSY0RFujRWquZNdoSyB6T13XaVKcG5bKEb0iR0uB8y0+JHF5CJ56XjEajDhx4gRFliXr/agx0ZNrQ6/MKVVARktbNUglkLnA1ZM0GBEprNxZh2vTJi+74qBpm0QZkDK5j3ZNRzJn6Zq+w4Khi7GIkcFvlaydHnLtx3Y7ITf0/rXC7znUFwTubbD/4QoRI7onOP/fTxh9esr5N++y/Id9rnzDJvd8HE4ePc7Tz7/C3pF9Pvv0Fzlz9hiP77+JT37wY+x/m0PWnuznDJyUvHr/PjdOHnAsLtGTJaM/KDj42prq3S1GRb7lw/cSHWz8cY+zd21w459OmPkxUsK5jy8R5xI/1Jg8J0qNFJ52VjGvK5yD1gam04pJWUH0NDYQhaJtW2rnqL2jdolejVToLEeahqbL5pNKp8FK8Lhu8BDa5BTlYyAvSjJR4mxgWjkKUSZ6llBIVSOkTQWohCA11jha2UIu2Jvt43RkNFqmdZ5Z3VI3iWKkQipSp61lfzYjlzBrLKNMU80E9TxDiD6hVcyqPQ7CFLse8UHQWsdB5WjDNv2iSEWUUGjFoeDfNhXDLGmmvIH5fJcQ5iyPSlZHfezcsjpa4W2vLPP5H7mG6YFDoT8qkC6kQFRgdLHPkfkaw3/UcmO8TXXOM/CayaTBBY++BitPFojdHjc/sAOnUxh562D6WMOs51j+ZM7kvS2bP9ngjtTsfsWM4acLJvdNuPWPxyz9fEH1cJOGZ++B+j2W4rOa1d8qGD5SMv1gy/rpIfe5s7zpjW9lsr/N7z/52/QKiTeGF795j8vv2+fEp3sUZcPFb5mx9x01Gy9lPHf2Aisva+7+9IjHHnmEESd46dJlrr66yfF7VxnPd/i1b/1lvuL/fiN2PiPaFts0iBgYDYcotQNEttd2Of/TzxBayK/nXDQVD/zkWYzV4JKZ1KDoMz495uZP7Sfqp5aM3zrlxI+tsfS7Jfvn5mz971OE3CE7uMbJvS3e+WtvxjqFFRpvcqLWRBygiCrDxUDbOpQQ2CCJWiQKG5L3vPdrmblIodMQCW4jKHQ+dTF4kA4fXdLSkiEwiKhInsGWGA6YTS9y7cLHMfEq0VasDu/lYFxy9ZWXmM72ye01tl67QuVbfF5w9qGanugCeX1Bv7dEsJvkasIgt+QCjqwuc/fp41xwlnk1JziX6MZd1qfWWYp+WUzUu2HdYpASRLKl75UleVYwnUzZ3x8DgquXX6XaHXN61EOs9HF7nr2r+0znGbPplFu3UvzM+977tVy4dotP/d4nONjdRJc521du8uLTz7F79RLazCmLZQwGFaCezGiqKQKHlHfE7whBFB4f3WF9tCiS0x9/djbf0SFFZGFicfuVhj6JziWZTCKt1cSQkPMQa3z0OK/QUSHwWN8yaSPuMK7hz78WmMmiaVtQejGG/soqxbFjnDhzGiMN2sFaf4Rwjheff47zL7+AFy3CLKigElNq1lbW0UVkcnMX1VaUPcG0M9HqqsRDRFFKAZ0uVMaExAiZBudBSoL0oAVeakymOXnP3Zy59xwXrn+BcVUzXF0mL3Nia6knBygSa8A7h9SRPMsZ5Iq5MdRNMh+RMpmOCUjGaSIhUYm6Jw4jnESk+3tCXbKswPpukN5R84rcoKSnKIougkQioqRuk6mVjxFUWrcUJUU5QmUFQiZ0vyh7DHpD+r0eo+GAfllQFgVCGUSA+c42L3z2KV596YvgA41PSKJSiuA9LrhkZOXmCBpEbJlN9tnbvUnTTPGhTQCBd3iSwyULlLGLSEgd3e1mS4jUSy0MVERXs6ZsvqR501p3jV/S7AkpEHdYJf5FDUuMnYt4p2dLgET6PGmQwSHYk0xm4mEMSWqwNNqnIXTVzmmruqtPE0WW7iw7XNzd71Ck+KwoFF5phE4gUgwpwkEIiemMTISUybnYklgcgFOSoAuEUOS1pq4hqCFSz7rm1x8aqcCXNrD/qdeXR9NGF5gHafMPAW8twXWWpQS8D0QtmTuwCEBjY2d96iVN7Wmtpwm37eMVSffjSI43Qkl8ZvDOYSWYPEuQuGuQWpJlGUaA8fo2pa5rt6RIeRA6M/iYqGVCdSHfwaUHtnPUiSJNshLhc5GrkRCcbrdDivR+pEheArVzyOAT5B5up7mnxPpO6CrT3FIqg8xLhJHITGKMRMYCb1uKsqAsS5q2RUdHlulEb/Ohmxw5WmeTw2LHVc6LHDFxh4HZC7TQGInWAJ75fMpseoAP9pAaoZTsskC6a7TI+Qrp5yReeEIcVMf9jSFiu4y51kkIFik8khT2KmLiBHvbYq0lzzJylbKYlBRkpkDoxaSjRvrkFKmIHBLMFyOlxT+RHgSJJEqP6HLvVFBI6w4bN6lNQhtJ1FmtNEtLQ7QWZApEtOkehjbZBMeEzowGffYPGmbedvKAQJ4pTN4Dmahpg37S2gmRpkVGGAIBvMNolaIJfHqIc2PI+gP6SpHLyLAsGfV7KAXO60TfkAKHYmtvTtnrUxQF3lraJtkPC6HItaLMNIV0mOjxWEqdMcgl/ULT6/cpcsP+dJbcVJ3Ft647wPxhCOVi7BCIaJV0IESfxLchrffQXe/xD8yYPDRPlIP69n2Yvt/iv7kzBthfOGSBcgLZF1x4xy4uBMyeohrVfO4HPsnmA7sc2JrN1cs8+b6fR2WavZ2rmNeyRIMSgZf/6T6bg5pBr0QFyad+4ALTqkFOFoeGREXPJ773MlIr5m1L4wOqSW5fCnjtnTN+c3SBItPd2khC4qppaX36nL5DEnvFTSDStg3WJw1NWe5QFldp24a6adLUT2ma1nY6hDQ9z41Om3V3r30U2O467/arFNxuA/N5mw44CVpLnG9SzEinoQWJtZ5gPbmZ4pxHaUVRFMRur4oh7UfOerxzlIOSotDJodJHyjxDzHb53SJNhl2MOFtjOeDca5d48MWHEKrAZJEs76G1Jkib9kUlMaZACJhOJVlUROe5fPceszdE6pg+9+x+z429XSbe8anvucS/ePrHGV4f8PyRi/xs/yPpOTEGKQS9fo/ZfE7rHEb3KYtIVgzJiiIhi1WLD3PyrI+ZKk7+1gpeR3Yem3HwgEXt5Nz6wC53Pdnn3K/dBXdrnvrR80zvb5i9o6F+xKLfq9HX05HXZi1Be5b/rwFZpjlp19n7gyk3vqviOz73CO9417vZ2rzCFXeR33j3U9R5w8FZi0Bw/Pgx9n7IceShDR7/hYKNFzb47b/1p1x+9BbKCX7nO59HivOE5wWP//4Szaub1FExpyLqyLPf+yxu0OBsi4gRN3DoL6QCZ/tr9pifqSguFigvufnETVpTk+8kOuWR51ZQvwebT+wye1ND/psZMgh8L7D9wQPGb5sxf3NDfi0ju2449dG7eO2Hb/Hkh/4DzA3j6QRbzZM8wIG92JD/L4Z2w1P9z2ko0AgIIjAVgoOPt/RMztlTp3hq8Cn+T/ULSCExGH6an6QgT8sShSRLboFRI0WWijMXEFiirrCz6zzzuY+jwg3uPzVi82bFpRvXeOblms1rDtSQ1y5PKU2PBx+4jzP33E2/10tNQutoJxVtM6MvG7yv2d29iVKS9bVl7pWCm7euY8ZdrqVzd5hXpSJL6869eZFL2dl6CyEp84yTp06zvLzCwWTG5GBGcJEbly/jZjV9v8bZPOP+u4/wqUvb2JBTasXBzj4f+52PoWYNjz3+Ft791rez9/AZvBC0cpl77r0P98Bp8niAkSUnHrifg+0JL376c+xtbycWh0zm/onCFrHR4jpk5C/zWpDeEMnp8DBkvqOqyW6IOa1afNQICjQiiTRkQIgcFT3EObUdc9BMcaEHUh/WP3/BLz6s32LwCBnQmWK4usyJM6dYXlplYEqMi8TGsrW7zZXrr1HVNZjkqCuEQuWG/krB6dOrrDzwANcu7nFhbFEiS8ZSh46K3fnRGW3EOyl0smPdCUnrGpxvD8eJUiuyrEfS9gXKPDXzddt2xkqpYVMupM/QSWFgYZB329xjQYeLoUP2pE729DK5F2udckizvMQUBSpCOQKtE+tHK4mSEKVNLCClUx2hc5aURnf0+f5wQG/QQ5UjRDkkLwYonZHnffK8JDMp1kkJQfSua8YiIkp6CEajJbTOaH2FNvpwPS2YjaGtwdWI2KCEx9uKne1bjMc7WFsT8QiZ1lQmNSoml+tFA6U6oxYhbjdbi1p1McSlo4cmbzuF8smNO3rJwshlcf++pGERX0r3XTQx3ifDPmPMYa15Z1B27Ng7i68F6Kyk7BpGCC4go0CL5BCcEGbVMZwCUqczks5kLRIpR0usHTvJ2vpGis2JgVxnDAdDennBYNCnKEpEBOdjR71vmeOoQ4BaMr50wEvPnmdez9EqQmyJ0h5G1f2X0KG/LJq25M6i7pjchMT99QFXNamZUIl8uHVQobVDSpN0FUhuTfYT71mmUUcUAa1CejiIxMbh2+pw6g8gpi1mPiNTCiMDLjgqZjihsMHT2hbrWpQzCLNoOkCEFiF80rwkbD7BRERcsIn6oDRCGBqfMjiETJQ0RDgMPI0i4YpSSZS2GBswUiQ6ZKeViQEC4bBwVsiuQE4uWcpHjAspmT56hBK03hHrCqkURuTgRdJ6FD2KvKBta2SssMLSujZx+1UXHCgWBXhAiMjSsIc6c5Lh8pDN3YMUMxK7xqi7c3fcxD/3SjP+1IR6Upht1IkDH2K65pmRxGBxAZxtiL5m4dsUvGc82aMse+RFTpbn1I0jSJhXU2LwtNYyr22iITjLIvx7MQ1biGZToHU8dL1M7pDitkGHSs6TgiQidTY90EZJtO6oPT5glEDkGqMETkgyJFFGCqNwylMYhZFgRET4FuvSECI3WXJnjGkiuhBVywi5TnqM9IrozjI2V5JcxJQT07YIIzEquXsakyFMj+VRi0MltLJzcaKjlORakSvoGcFSOaCfLbE+HNAvc9q6IssSqjtXkegbRPRkSpIpTZFllEWOMZp5UyGVQGcmURsgmWv41EgsGjohwL7OcfKfbZDHLkNFSpzzHMzmzGaWEGXn/KkY9nIyEzm6MuDcA2dxIvLM+UtIX3Lq1AO8/0MfIj9zDz8l/i7/U/5hbk4qjq5Ivvjs0/jqgE9/5mPcePGLPHbyGGc2VvDOcfH6LZ5/9SrjCrKyT6Y0AxU5fWKd3mjEzZ0dtvYmbO1OUTJHS8nyqOCBsyc5fmwNo4dEYQhSsjMec1BXKGOwwWHblmG/BzFQNy2mHOK8p8wTwllXNU1r0XnBcLSM9XRa25Q/GWxFmQmWlwbpZ6KIaGbTOYOyoJdrvIlsjXcxGAptWFsapaDsGGmDAb1K0w6oG43xU3q5R6iIEJaIpa0m3Lh+mVs3b7Dc75ErzXQ6YfXoGvccX0WLwI3dGRsbp3ilfZp7D5bx08hodQU/22L/9JjPnNvj4QsDHJoQLM5FVG66QVY6CY0WTNYbdh+ukzC+bNhZnfDKG3dYrq8RQ+RI/z7eMH03B82cX1n5PLtvBxsleypw9siDfNNb3svIj/jB9X/Evd9wH99/9LtoDjyf4DN87tHPsr6+wdLn+7y29hrxAymq5PLZ11j7xRVWTvW48PWb3PzqMWGc0b+QQYCdr6u5eWuK/QrYf7Aiv6DJLxju+4enqDNHdbQGQF/NiK84hNGoQlOOSnSu2HuL5+1rT3D8gfu4un6Tp77yCj9nfpZ/k/8mr9u9jx8e/Qx3338/03MVP1v/D/zCzkd5ee8aD/7+CS5+1y18jNz9qye5+L4trnxgmxvv+xTv+8Nv59jF0+zcu8VHf+YXsUULfwYUuedPjyEGkuZuy+M//RA3Ttzk2C9t0JyrOXhkwvqn1rj59dvces8O8rLC+oBsBKN/UdI8bBFfBy7zbPzkCv7HxkzfVyFszZn/5270RcMr33aJc//yHJN37mN1S15pHv++02zvTNiWNXZZo/Yy7Fssfl9AGQknPb/+2Mt800d+h9lRw8+84bf5SPwIj4s38hP8OP+AH+Ij/L+kjEYHMbFh0pAyoXhCtSD2Ce4mTf0Kq2uBzK+xc2vKpZcnjOdzXnjpBtM6UA767E3nfO23vYPhsqQ3yDHBIKOkPZixeeUCB/uv0WPO7taEKze38Jnm+uY1Ll27yny+ByQtpzH9FN+AJMaUP2kyRZQJ3ZWdox8IPIH1tSM8/MijVNMZW1s7zGd1MiSaHiCCpj9a46u/+h08fGKZF5+5yk7lyKKjcZ7nX3wFqhmPnjvHE+95V6KiRYm1hrxXEkSLEA3CB7wc8Sd/9AwvP/sSvrUE3yJkYBEWHUSg9TVtsH/ppm3xinf8mXCpSMAnU2odcSrplmJQmOgJqkmURlkiQoOQDZPqJnuzbWxYu13h/ydeiV2TZA5KWaRR9ApJWWpMBtP5AZPNHWb7Y67evErta7wWuK61VJkhKokXLcsrff7m1/01bl4c888/+gl29+a4O37Xl2i+OlpeCr1OZ6QMEhcU7XRGM5+hlhON1zXQzlPuq1EQbUs981TjCbGxqc5sWkynlWqahmAdrtONxa5hEwsKZWeOEUPEZBppFNoU9IZDsrxEm+Iwbsd357JSSeoiRaLFBuES3U8lNXO/P+LkqVOsrK9RlDlSRrLCoIoCqxRKZSAM2pTEoFJEkQd8yqITUYKS5FIT84JBf8BwOGSnniQNmIhEBYiupfcWN58SbEUULW11wGyyT/QtRS4pS0U0Ci9yiiDwbYu1nc+MdwQfUDp2DK5OQy8VGtXl50aCTCj2gmeWroMCFM6lf10AE3dq1JLJSuBO5M17fweq96WUwsU6TEgidC45IFMjqYxmoA3DlVWWasVelkGdWEMJpU302o7iR1YWjJZG9Mo0SDxz/zn++nd8kHvP3keZZck9U+okk5AplkrLTpO6qECjp1UBGxWq0rz22Sv8SvVxxpfPo6smrVnRRdDc8Rn+Mq8vi6YtdeQi5b+IJAQ0SnV0uZQb1isKdE+DTIYP6SEyIBVVnYrwopdjMo31bUd5NSit8DEFIbfOJptfKbFNixKCfl6SZQEVHLZ2RJMyHbI2Q3TwaQjJmUcC2DpBp8oQQ6cdE8lt7jCrLUYQmijyzoXGd42eO0SyIj7xkUVy1tGyQcvOdER0TkVh4YLTIQbGYEKiY6TQRvBRYV362WWW4XxLVc/QyqAwBJf4yLEJtLpGEPG+xXuLJOBjCr1tGkvdhEOBppCRsldSDAyD5SFZb8x41iJ2JgBd2r1P4lM6CDum2IZDmmT0SKnT5+kmf8EHWmdpHBib9HwxWmTQRCeJNrlPyW5CaF1E1BaEQpu8u8ZJjLpourIs5ccpKcAnRPOQTBGB7kFKgmVJcrLpXII6aB/oLNYXuTB0CGLyUJIiTemUSmGRMjq01NQ+MJvOqav5Ydh09JYoHM5WyXTExSQO9gkxljKFMfrQJndJkQIcCUltIBCHejRNoMgyenkOInQBkg6FTlTJNhANGG0QyI4vn+IOIjDtFRSioVcoVgY91pYHSO8YlQNMXhL3A9NpCmcujWYiLNGnOItEJepE4FJ0DW+aMtIZJtz5JTo+ujqQGBSZBKMUVd2gpgoxS1mDUhuQEhkMOov0RgOOlnfhZeSl6ip2t2W73uKLT32BDzz0V/irm9+NDBlLIjLQd/G6u3NOHR9hguXJSzdYZkRvqsErRvMcvRuhCqiBICsMWSEp/AAx0+g6J4ynmKogz/sobVA2o5kapmNJryyJGGyEYEfoWBJdmoO37YxpG5FItBnhZr2ElM1id2iMUEhCLdmtSUGsoiQKjXMOiSFEyWQnGchElSG0QtBnVgsaIqEYIfQxyrxkmCmWsozjdy1RlJImKoqlE4RsnSxbYaAcwk0xSpJnmqKQ5Dqwv73HpZfP004ronVMDvYRukFUN4ntBAcUTcYLb7vJ8afuRjV9Yr7CeP+A2qbsmaWVDawwqHmTzjIjCQtaLI6mrDn/V6/hLkdkE9j8bw7YuL7Cp0fP8bZHvoYy5jxmT/Me9x4uZzuscw9bHDDOanq2z/tOfyMrx1c4yjF+UP8tnj/9Il9z4j38vv1Dnsr/iAflOY6/8Ri/+cSvI7PI+w7eyPOPvMI9u8s8NHiUg6+Cl9Zept2PMBRc/L7XGPx6jnl9zrX/dsZbL34F+7dabukd7AasPDnEVLD3PVfJf6OHyHOEiuSjkjCDlgaVab6/+XrOPvoAzhj+ff9p3sQb+GH+Dzb8Ms2qpSDns70X0WieFdf41fd/jpu3ttl+ZJ/HP3yO6+/e5HPfexGRC6SXuMzzG4//Cid2HqAq5vg8laGnPn0Xy1cHPPOtr4CE7bePmZ1t2H7XGE5IzAuK4rrm9O+e4Pnvf5mbX7+ZSAQqcvk7rxOKtE/ZxrP73VNiDsP/r8fuDx5QP9qCTgWuzDIigsGNAcc+t871E1fRlzLqb2u59EO3WPpnBVGIFAUz08jrkf6H1vB3W5Y+VFE9Ynny8Rd5x87jPMHbeWd4C1H2+VH+CW/jrfyJ/wxvEOcIdoLKEiLtg0bLXhI7yApXX2T75tOMizV1EwAAIABJREFUJ+cZz64j64xqp6UcrLByfIVX9w4Yi11WjxruumvAsbMFzXyTvc09NsoHGB1JdiYbo5zVVU07a/j0Hz3N5Vt7HLQNV2++xnh/m9FSyaxuyTONynSiwYbb0oYYk8W8kKE7v9JgDykYDIdsHDnKa/PLVPM5TePQaIZZxAaDykuWl0csLw9ZXT+CuLaDdC4hECh2Dvapqy2kXAHRI4ocpQUEgZcFXmYU2tHMJZvXtyGkgaazLbrTMcWuMrDBYv9LmrbFuJ4FZXXRwDmEsoAH1aJyR5Ae7yTIBqiJIiN4iQ4SrR37B9fYO9gkcB//OQvLRXZZjJHoW3RsMaYgN2kIWFcTFIKiUOzZOVU9IWDxMnZU1S43VaTPvLe3SZlJ7jlxjNV+CVtjZFHAHUX9IZHmUFPWsXyiTDqwAO2sop3PO81xam689ZSZQcic2Dqi9fSynEFumDaWSeimKDEmR9aqTjmOUh42G7pzjhYiOUXGQ7RPsbS6zsrRk6gsNW2iQ9CEkGTaIGLKozUqaf19tAnVEZKqbtF5gSqWqKxga2+bvf1tINBfGbJ+/Cgrq+soDSJm3bVYUAU1SiexXEAgugimtrUEHzh+/DimjEzraaqLY8rilUdX6WU6oc3eMTvYZ39vi9nsgKae0bYVwRiU1injjk6sFRc1bpL6OOdTk0sKmm5dpIkRKyJRRSSOpV6O6BxKk9cBt+vkO5fxIWKXJEPijrXsI507Zxdw7TxapSw/YmJ7HaJ+MtXLuhuGF2VJluWMlpboyZDcLmPKWcuyjCjBB480KWIiLwpWN9ZZXllha3ubtWNHOX33WY4ePdG5faYhgY6guhlCFz3OQgYlgkfogIyGLCiOLK9zdP0Ug509sr3NZCgWbdL2xcVje+ez+xe/viyatqRpCylQskNy6rZl1rbkOnFicxER0aN90yFBSVtDlPRjcoEZZTkmF7QttE0geI/pAvCyKKhsurFSgfLJLbInc5QIRCloSbomoQwZkkxIQkwhusRIhmTULzGZIQqBdaFDp1JAdSYFQukUwIgGa7o7IBFCQzQE6QjOJYRLJL6sxeGExwqHFR6LR0afwqxjsjlN+kqdmg8bkCI1qcNBSa4jMjiG/ZJgHdPJjOADIkRE5JCC5F2kKMzhBCLLNS464myOoKPCxTQl8VHgfHrvUiYouHETlEg/MwWppOL90H2na4TUItTau4QAQRfE6KmtwxhDZsqEBEkwuk9bJ4opRifnxU7PprXGB898XhGJ5EXJQTPDOYsgIBXYELFRUAwK6gZwMmUYCZU+HwrVwd4++u6wTl9iYV0dQxLddlOaQMrY8IATSTibLF4T9z9EiZIGna4QWpvEXdeKaExneW8QwiNCwPsWYk30TQqFV8nNjZAKJhc6Lvzh3wMOwdxb+s7RExofHI21tK0n2AYnHXWV0GJvbZp4KkGhc8peicXTLzNK5xnlhpMby9x3+i5kdLRNQ1SGuq2gizjNsgytF9dEkOgLEiV1yqhrHQQOqTfOR/zhBgyio/UmkS+HaGkKHE+mJkmnldBhEQJKGor+iN7qGo1r6ZUD2tKRFRkvPvdF5ls3eLN6B0G0rC0P2Nofs7LSY3O2x7lHHuGVs2ex012cjJ34W2Myg2ySE6YuS0JumMgcETVqtMGyGjKKmtvbn2fsBPPtKd7V+KhoQyQqDUrjo0gUYjVIo8YQyFSO9wqETtpVAW0kOV1pTds6TKdDCChU32CkQKjkjBuymBprqcmzAgn4tkXkqxS9o6iiTz4smNg9Lu3VmNoh8pz8+BYfO/Yz+MEBUtY8xWf4V/wr3sa7qKoGgmLt7GkeuPcetIu0k4bGWWo75RO/98vsb07RSpDLgNGSRgmCirz42Bf5rb/7UZABpyOPqzfw8CuvI+8rqo05A7VE3M6Z/RXHM+95hel9FSpo6re1nPjnR/HWs3N6jJKKfxc/y3nOs2k2waSDSCB4WD/Ic7zIkhkxYshr+ir3ch9DhjwlnuIR/Sbmes5XxEd5NV5g7idkI8OVbJNffPAp2rOWd/zxQ7xuvs4/eeJf8rA9wf7xA77xl96O+72GpjmgVY5MGd6vv47n/UVyZ7jv5aP0BgUuU7jnjnB9vEP+ZM7SfYbqJ2q+65e+hfvfdD/FY5/kg6O/gTAZM+ac5xLfHb6df8g/SAMhkfGc+i7uiWe4znV++ubPM9uwxJUMM8w4feV+rusx2T2Sa8NrrL58lNgoxg/eYvLZGaJS0DVto5cG3P8bx5mqMZfev8nkwYrJgxUAxz+xwbN/7xVWP7aCd1Be6FGdnWMmhnt/6Qwvfvd5sstJ1K/uUpixpvGW/R+bcs+Pn+HKN18DUh7h5R+7zOzuKa7neOEDLzN905z4tkAs4ea3HjD8ZM69n1rFEVl5csifvv0iS9+sOPHBM7z7k2/jX5/+BW4dL1h+7nVcl7/Mjpwyioa52+eCucAL7edZvrTPePsap45tMJ2M2alrzpw4w6DXA2OpN19ivPkKYjjn+KkjzK/X7G3NiVqTRcfdpzdYu/c4G8ePsKwbnn/+TygLQb0rOXWkZojFtnWi1YXA0tpJZm3JysYpVu7aJCuHRLfDbDKnqVuklNRNhckgCoHUEekDIXoUuhuaJTOs0A1GnffUre2+WmazilxrljcGYAxS1uxt3qAamKSFag/oD/pIImPnccIjsuSWGhngEaguOFeLTt4gDG1VMdnbYhbSAK4ociwxUbSURcgWESYoP0WSzi6HSeeuCEgfD6MLoryNPIkoOkOpgIxp0LsIPE8DzGTCYjuZhe/2eBEiItQEFZBBYbcnVPvbxOgOUbZO2NBVlt3vS/oQoughRCTi8QFCbZnc2GTexlRoR0moGia7uzTzKdI7hPYIqREhuZ0qcoxXzHZnXLp8DT/PGc+rVKRLiCK5fAvAd4PWIAB1O6/rkDweHKISxFmbWFEWnLNMp9do6xqlFfVshrOWYVFQZgWgaRqf9JcxXbWkpYt4b4lRd01ARp6XFHmW1pDRNFHgpGH5yAZHT5wAaZJZhtTEKHAuGZcYpUiVXMBZe9tkQwiUbsmLHrZxXLtyjfPnX+TWzasQW7JRyZlz9/HI469jZe0ogyEY0yMGUMKgdJYG4i6SSYWNDbIwBBkhE5x7w0Msr/Z59dJ5Njc3EUKyMlpCrC4hY0ZTC1Q8YH/nGjeuXmT71hZ74wnTeYXPA7oUSO+IpHNP2ASueBITyUXXuV9ropBJ/xb+f+reNNqy9C7v+73D3vtMd6i6dWvsqq7urp5brW6k1jwzGhEEDhIBbLAggixiWAyBOFG8jMMKZjleXl4rTmSMTQAHEmIrgJBENKB5QN1ST6qeq7qme2u48xn38E758N/nVpNAVj4q59Pt21W37jlnn3f/h+f5PYl5yLVObf6gNiTdtjbtPZR2SyvsAdly2szu228C0uAZrbCZbcErApULQq4RS1LSoDwpKUwuDVtuFDZLJC0wl8y2kS46kmea7vz172RoAlms6SxndAdH6CwssXryMEcWD3Ow0+Oek6dZznuYdvj/ygV0mtc5bdOVEImqAHNFhqk00M3JFhbIMkuuE1F56hj22zzmUtP/D/3St0TThlIoa9EhSGit1bgYKENgEgJWeWoXKVLCxgZN6y8zfl+/GlKiHEFjjGyokEyuagY2bzc0vhYpWbDUTUPVKMqmEshikLyKQELlDc2sJJU10UuhqbWiZzN62si0BHC6TYAz87BH5CBNCYKTn5vmaekKdIbH0CiNtlLghZhI1hALS+pkkBuSM1L8G8E7+yibJ4UHr8jJ0AFCEwmNw/mAIdBoBSGhvBxyITaSut4StHxMVEECZH1wONXqlrVGGU1SEgTcuMDeZMb69V2Kbg5aUXtHWZZSKCSFQWNbIpJSisY3kAIpyXoc2gB0te/sQ7W+HKWh1yvoF5ae0SwOukz0BOdl7e4sxGD3D+SmFspnU9d0e12KotNmmUlcgEoSxpkbMMELZERWQkILaycwpHnOh0YbBUk2pTYTQ6tOsudK7XXggeGsJAuabmHomgyHlqydmPAzJ7j7oAhRUcdEhcbpDFfXuEktIdVZRqQisxFrArGpcI1MpX3QNFHhknjUYkzUQeAkKjMkIpPGEUcTbJ6RlIWsAyESfSAlyfnp9XpkWYFztTQthUzIUvJ0leHY8jL33XGK07ceJjVT9nZ32J3UGOXFV+MjjQutL0DISSnBQn8BYwzBJ25UN6iaGlLbKEcIMREEmoox8xwY1fq65rECCp1roo4tnlgyyUIV0YWlqhyTJuJcJNcdXKfGFobLL77Mk199jNe+5W1c3x2yuLzEcHSZU3fdwbNnz/GaO27nzJ13sfbs40x8gyHhdJI4h1a2rPKCkPe4PpMxbEIRkgSLi5xWZLKqSYRQyyTVZAQyIhqdZei8i2tygYlEobrZWtP4SjTzddz3cqq2QJMt+VhuzspKs6+UXJNa5K/a1MhQpKXZpQRpj6DXCfkAmxd0s0RhI8oayl7NU4/8a9539gO8/8T3sNrb4FeX/1tS+Ty1O8i5l9a4fnXEqdvv4fa7H6BwhgsvXWS7nKCs58W1TY4eOYQZbjMwkCvZIL/80Et87F0f5/QLq5zePc4n3/EYH3rdv+XVg4cICW4cu87t09P8+Gfex/mfvM4t//4IN4pt7vqj23jsp8/S2IoTf7DCy//wOtZk/HT9M/xC5xfZYhuAe7mbg+kAD/AAgcQ5dZ4ePbbY5pf5VS5wgSL1+C6+mz/nY9zFrazGwzzi38gH/a9yKV9jnFekDK582yZ/ee9ZqqzhgBsQe5HRD+1AR/I0r5tNIPH7J/6MF/VlEgn7HsXbzt5Hudtw8MwqG+Wj9IqcB794hCd/4iLP/+dXeHrlKveqe7ldPURIgX+qfpPfUb/LT6UPcMzcTcLxRfU1Lqc1howZMmSS75EU3L1zL/XyjG/8+OPs3b7HqfIkV7vXePVvvJ60vsBj/+IT1P2K0HkFXEIpokukWfuNBMc/tsrVd29y/R1bxCLhlhObb93ixL85ybn/7gVCEbjxyCYqgLlh8Ecii9/o0//ZLs9/5TJhKTJ+0xQ7NTQr8pm885+e4dJPXmJ8esTpD95KfFti+O4hzZsbDnyhx/VfmXDmzAlW/zLj8KFlzPqUv/jtc9w7u4fLj9xg2qmZrhfs6lXWX9zi/ff+NMfCKsNmG2cdT5/9S7Y+9XXWX3iJw7agpxV7vcCJw4exwYENnBj0aajYK2YcOrPCMboMyyl1DqnJKeIC+cEVjLI8/6XH6R0pmLoJ95y+H5cSqCnjZsbzl66ydmWbydIAk19kc2uD8cRxLe5x49oee6MJQRm8C2hj8KGW4skolPbEGNpCt8AYcFGos4FE3XhGkymT6YzxcEQTBCXdGENeOOryKrpp6OkDFNGQ2xl1VJhM0zGBLLc0akZSnhQTXkPUCZsSOiny6HFJMRluUFZbjK3FaxmUupYcKHj1ksxNMbMtMlWB6uGxKOUxNETXYEkEZdqCL2JSQJHhgISnIKJj0Xp1MhmmYlAxl3PNNDQ6YVIHmxTJlNS2gTpHbyXiZNhaOOYOWqAdHAuHskGRkXRGYAlrDBhNkyzlqObyjTWiWUcZi1WaQitiNSM1FUTXvhcRowwqdSjIMDNHAzxz/iqmOMgIJUTK6IkRbNKCXzeaqDVNC+uIXspchycrNPiaWHVwwxLSEDfz1NWQqbtA4ytUcEwmI3q9At9UTMqK0kMiR0e4GbQkw7WEA23avF4BUmVGqJhJBbzSNFqTCo0xiRRrVJStUwgJmyIaTfS6tf0IGA1vQSVMpuhkCh1qxptDrrzwAlfPvUg120Urx+5WYDjbo99fYDBYIvaFNE3rjdfGEGovdR9COPdWoXOIeaDKG5osB+3RmWJ56QALpsv1zR2uXRmytjbimK0Y715hd3ONyXBGEwwxWpxRZCsdbl06RFEqdtb22Lm+Q4yOJlVi8dFyP0s+k7gKxKJilWzLxEQkMmXV6oisdGXEuY9s3rpo0EqYAzbLiMHsgwQ7hcD/UkwiOw0S7RFTwhqF0Tl10wi/QkkcV5ZDMhLPk4WGLCmyXNMpDJ0WJFL1ChIzBqHmzgdv4+77v5f+gSPccechTmTH6MxqFg8dodtdwKS5GUSGJipp5iPJIG2tfFLmarl2G5eAmYZZ9ISmwXovcuTo9z18tLaneSP6//b41mjaQArodt3sU2ovSIsyFm0l+G7WBPp5V4onBU4pcpsxWFqmyHOaqqKqa9EZ60QWaiEUa0tuLd3+AuPJjLJqUG1DVE1mJO+wxpAVBSFFrBVZl9aalEmrHhWgJYk+xjaLw9oWty+5ZQnZMoUUaXyN0RGbWRFDtgQ5rSOdQgAqdVPjU8BYsCFDtxe4StJ8BeGly7SCJNK/VkJHAuUVyQlqPLdQTcaiV1YKFzw6l/VvWVZUtZPXM9R0u13yImcyG4OOmKyDT5BsQZMM03LK6OUrvHz5KsrIz5qHKI+nNdHL5s4ag58Te1LcJ0fGuaZZyZp8DmfxSnT2dfCMZyUES6OhrivyPKNuXGsqVTReoC3WZmRFh1jXJKWYzUqMseQevA9y8BmNCZE4c5hKUQRDLwpQwihFnmf4IBMhW+RihvWeOU1yPuUAmfDIF/JeZkUhSOIYKF0jsg7XkClpWKraM5nOmE1nNMniQ8CFSPIS4B08KG+YVRFd1+xuj8iLRYpuj2lVszeZ4FLCN2BM3gJYApIWoDAa+oWlk4lvKctzfKPQKsf7Gh/l2kpK5kDdXp8sN3Q7HVxyWKPpZZZjq6scPXqM4uAC1JYVo8BO6Q3G9AZ97MSTYikUSef3s0/qukYZQyfvSkil1vgm4IJsl28awxOmJW8C+4AaY2UgANK87csrjSazFm0MddOwvb0NCjyOqD0ox2S4wV98/CPgamoSW+sXuXx1nfHl82xcXUNtrkMQ6mWTHFZbAkq2lsqQtKFxnrreoxV8tuCgOF8St0bmmxO0ZAwLiwfp9JcJCbxvSHUippIQJcOPFmMeg2wPdXvw3pQKz/X4QpZCG1xI+3Ah3Wa6xJQIrbZftaCeLDhShNp0SSYnV55cB7JOj/JISXwqcv1PL/DUt32Khx84wjubR0hVSTI7XO0/z//yXZ8i5JqkOtxt7uHCa88TUuTR9DWOj1aZnBmzdXKHf/R7H0ApxY3j23zxXV9mdWeF9/2Hd+NWSj75jsfIfcEjL7yBzqTDZ972aZ689XG+Z/BGYjfw3PvP4zuRG/94l6gCC7cUFEWf7568E5UpvtT9LN/JO1njChUVL3OBi+oyj/E4FktyieXJQeyS5ZK+yJ/xp/zE7Cf54fJHuNK/zB92/pDf8n9GpnK+0X+aBbfA+772E1zYGfKZ7/93XOpN+Qcv/TZFUfIXp36eB/qv4UPqQ/yC/jn+fvX3+J+y/5kf1z/J7saYP7/8MX7rOz/EQ0rz7uX38NCrEp/7nW9w6vitvO/9P8oHwy1srt3g8OgkZ06/mpj18WnGL/FzZKnDS+ol3hC+DaUT3xZfze/p3+OueBc/pn+MSX+H1Y+e4qGH7+Pj2R6jo1O+51++hf/j1/4cZeC27+ty6fEbuBNj8hc1ykHqyPEyW6mp6hyNZBMe/vwxhveOOfDFwwzvHIGC879xgbAQGP1XY9n+F5HdVw9Z+bkF0iFD9fCQ0UNRok50QnlF73/oU/5KDadKUHDurefY/q5t7EXDhr7G7M0T7LbGnC+47YMnePa3L3H2xmWWv14QwiVS0tySreA+fI0L/gr8kiP70JDf2/41Tr0rY/HfbzG+usb67hVWP6B4/Dc+ylfHfYzTZOOSQwsLbHQqrH2RQkMxyDl9aJVZrKm6Ux5RNb6/zNKBIzxz4Spbu1O2tkuWRlC5CrcWON5ZZH1jl5ee/BLvec8Zjp8Q6vPW5gYq1eztznB+SHA1IQgJ2maJolBEpUlYKieKCpXaQGYsJIU1lpgC0bmWAie4f+c9naLg8JEjzMaTFh9umIXEaDJm6cghFg8cQpkus8oznEyp6xndfkbRKzDGkhUZyTlcaFC9olXmy33b+4obu2N2t7eZTkY0bZandw5V5HLmek8MjqZqmI32BA6i2oGtAgiCk6ed7jPXQ8jhNW+q2Meyy5ZKiH0BPZewhyCD8mTk5ypPtBGdNJWbMXUjnAp4FckS8z3b/kOsBhIs3TReyN9KcsJCTDgnw1tDIuiIl+pl/4wNKMGi7/uODETNaDzlhfOX0N0x41nZil7aQOd2DRFairFSile667VWYsdQkVlZMZvNAMfmjXXW1q4zGm+QZci9eb7Vaf1ok9FMGvv2xVRK74c0h5a6nZT475ghECoiOrM4YwiqzSIktZ5OgXbQ/v7A/j1irl6R/xZQR2xlnI13jCdjGu9E+hyEgjmdjZnNRuRCQ0Mb+XtGJ/E7W8kGNlqw/4mENYZBr8/BAwe4557bOFh0ubGxiTYZYa8m7O6yduk6jz32NMdM4sWXXuLajRvsTtq2y2jyTsFd993Fz77nPXRKzZc/9TU+/8kvsbuzKwo0HVBJriV5CqJekqGkldbGGKwRjoGQZdugcq1b72v7/pn598K+Wmvuj7VW1DNzinVsfYxRibpn7nEz1uKj+BCJqR1oW2xmRfmjDDZTFEVOXuRQGMzKEgeXVzkUlnnVA/fzlne8i0NHbqcoagZpEd0IHK8tAtE0yAfSQjSIUVBiHKzyzAO/E4GoAjopCAYzTWQjTadawJdL1BqSGaFVvR+nNN+0vZK98dc9viWaNgWiyU2yWtXtG2BsJkW7VUQlqFJVDMg6HUAIbF5rnC0o+gsEnVEFqJKocrJOLsGSMZG0leyJLBCrBm0zrDZ4J9OPzFoBZMSIzXNyYzG2brOPoGxqRpMRtqPJOxnaGKT2NKKzjY0ETLerYG0UPk2JSi5cIdHUshWMkcY56ihNW/IaV3l8GYhNkhC3RIv6T/sTGlDE6OXiRnxSJirRo7e5JvM5QJM8KilslhOMxiu5sGTfDHknp3Jmf8vptSEoTZMUZVRU07JtEGmbNjF0Ch3H7dM0MRqf5JCISu2HFWolr5trZR2JRJMkYd7GyKSu6BYLYA3Tpibrdun0B0KX9AEXxaSa5x2hetYOVGrX5ZJPQoQQnWB/o0J5hw6gQkL7RNShbRTSfqbKXEs9bzBDCG0R33rZ9nE+Mg1Kc5+lFhImSW5MIFs6I3hNfGgzSZjLfSUAVOcGFxvqZkpCcWPrKtN6QqfXZ2c4ZHNnF51bZpVjNp3SK7oUedYSNRMET2xmuAi51Sz0DtAozY2tGWVUBDRNhNp70BJgqoNIabUWg+zCoEe31xGJV1WD0pjBAoWXfL/S1dTBgVISeRClAoghUlUVJisw2tME30o35416a3dPNyWSpDYbL4iIJrY0VK3EF+C9a32M4h2oq4pyVrK+tk7UkVk9w0dPijU21Zx/9gnceAebFWAMZVNJ86QVT3RyejT0Uk0yicWFRcGVm0yCLaNiVjdUVdluyOa+BeQ1MBk0DcHLtFDbjMorbB0oBoZub0BIFheSDBSCB0K72fUtZGfujdT719X8Rm2tIlMWo414JWPEaLuPBzbG4FLAeQ8xiJQ5OFSKmACeCpMadGqo9rYY+5J8EhnUjqycMpsGbn/hDDvbN2he4/jj+z7PH6n/Ho/lmH0TPxp/mH+i/xmVarido3RfZXj4j17FF//OU3grOvFaeR546kE2blknrwqmteRcjhZGPH/Hk/TLBUKvZNgZ86Hv/F2G/QnmhiJYqFYbdKWobws88+2XaXxG8vDUo2usv/kFepcPcNunH+TGW19EO0153w4NDYcv38Lb/+0Po972Kf73wx9ndOcOn/nyo4y++Ft8/Tu+wdV3XuG/+fqvc+qWY3BK4Axr6RLP7j0HQNXM+Gdr/4DDapl761czPbPH94++nX+58D/ygZffzq1HVziy3KcZNdxz4T7uWbyfj739K3z3Sz9IbzGnMAWD3hKPvPkdrB5eZtFvMFAH6YYD1Bo8DSt6lUUW+If6g/yY+0EUgZ4q+FfqQ/x+/ANyVVDnkXAcLlZXGLuSY4OD3NEscd/XT/H0Wy7w5e/7Ct914nV8qRfpnYSxlUJ7sN2juqNiuLwnkC1g4x3XAJjeMdm/Lx7+86P4k47NN23sf694Msc8kVO/PpJ6ic0PbtD7SHcOvMWvBcIk7FezVVNhnzY0r3dMvmNGPJJQjeHwf1jBjgfYi5b6QmKv54nXEhs/vUP3S5ZzX72Ce1Ni2J1y7vF13MIuz/z9bR765VUOP73C9g8kwkQxvBRwmebgYIWkZ2yOHJtOS66qBjWruTHeoVjqc+vRWzh+6i7UrObFF68wHkfOf/My23uB1WlfwufLDLOYo+Nx+p2a0W5i4+qQpm54+KFXcd+D95O3W43pZMLGtesMegXra2tsbu1QVpHJpOLxJ59hMikltBrJdSUqkS5FLwO/VmmR58U+ZvjQoUNsXrvO3uYeVYiyd7E5CwdW6Swsk1RO7WFWB6qZROoUXZGcGRCwWDI3RYRKA5FqVrE3HNM0jtDmwyrAKgFfKaUwSX6/1ES2b2zhXU2WddpuzEGSXDedpEi/yepTN/VaINL/BEKTDO155NGhQSePigqN2Y/tUDoQVaKOJSZpSr9HFUeQFvh/lIgJ5rlwKSScnzebUugbI/K2EAJogzG2zRhrGy2lZNCIksZPCXgiaUvj4PrGFin3VM6LNaRV29DeZ8QzGfcbHYWSgG3a+hHFrK7Z3huSnGf96hpPPfUU49E2KEdZTSgbiTqSjUykqmt8npGTIAUBhrQY9zYdG5vl2EK2e/Oa1TlHSKALaRKkmb0ZewQ3cfi0/09qCfnZztdt7BDkWYesk9NdWACbUZWQmYyoInmekXfE026MvG4209KE67hPFp5TnrWxLC0us9AfUE4rrl6+ymLR5bZbb2Nt/Sqb2ztMxzOuX93k6ScU7+GKAAAgAElEQVSe4Xw14/KFS2xs7zJOBZntYsjwRnNo9SAPvfoulkKHzQtXeeKrPSbjCT5EUA6lAiiPZKdJWy6RADdl8UorjBUF3NyaoxDQ3lwVN984iZRS7/99aQ3ELmKUSICFcCefFdVeV1prCUsPIt+MaHxEhg5aLDY+RIHNaQnKDrmmf3CZd33nW7j/QMbysVVOrJ4g7/TFw4cFa0hBEY3cL1PK9j93av65S6mdoJjWPgLJyu+GB6qI2RmitjfI6xkKxwwZ+Kq2Jp2/Vqj/22f5r3l8SzRtJFChZdAlWX82zjOdVegYMSa2GWiJOhqKyreeMjFbmt0RNrM0rqFuRNNeGENuRHpkdItbtYayLKmqmQQmtoeBjYmyrNE+EK0Gl1M2nsZHXEgE1eDLiq29XTZH2wz6A2yRkUKUlHRr2k1OixnOLNB25S1G1Bgjvr0Y2w2FZJtUZc1kOGOyO6Mu27BvbbC9TCbzbdMnZBUJGY8poCLEoDHKUOS5IOhRjKsZk7qiiQlX1Sg1u5nRVjuM0Xg0o8mMuiopOhl5ynEJXIpYW2CygqwAOSgTpnVKJiXhwYbUkj4TMWXEEKnrisY5abhVkrBVlWRt3GqUhcsrgIbGJ8qqwaQMqwyjaSm+M6Wo64bGS/BzExR5nG9u2oNQy83CxyA3x+CpQ2KWEtMUqSLUKUBq9c8htDeoNlcltJvA+cQmRGJIrbl5ftgK9dCHiElWslNCCxFOCu8DoXKEtnHRWu1vqJxzNHVDU9U0ZcAlmJU1McJkeonxeExC0YRAWddEBXXj6AxWKFaPoIuM4DxOabIk10yOaOSnsxmlS8wcBJPhlaWJCYwl73QFKW8M1lo6RZdut09ZN6xd30TrwIGlDgdXlij6XcqoGFaO7WnFqHY0YiqFBJmxdIpiP4tmVlU0IVI5J/LXedOmbuYYyoZVbh4hKdlmqoSO8mc6nQ4oTZtBT4wJ56SA2t3dI5lINZuQYmKqNIUFV065+MJZiInRcCIyJOdQRYEtcg71CxayxKGlBWalZ9hEki2w3UzAGVlGHr3UOyESQ2xvFJEUkvyOUa7JLO+hsi5J51S1B+NplbBYo1CZaeWfgRQVOkIM7bz3FTcZkgwIVAKcJ3h5wjoh8imAkCR3JnhMSGJ4R2FS02KI5xSqGp0a8QCmkugbChsZ9AvuuPN+7IEFPvmJP+HK5av8zOn/mB/xv8zudI9qpaaJE7wqUa1w442TB0k3Sr73U2/lw+/8DBePrjO+9yne8LW3orTm+Tdd5s/f9pn9p/Glh/6S27dOc2R3hb/71HtZX7zKZ458mWOfX4WTFeWqQwVFcbVD73qXlw6fhwKyB2WdpIYFnU/dQX7bJrq0cN8OAKNZxee//jxFdozRrz6K2c55sX6Za9uBvcnLAFwZvcDG6FkAcnLu3bmNK+cucD0hk+WTDbthi62T6/x6/C/YjBf5aPg065e/zmdPf45Nvckvnfh5DvXewEf7H+FNH30TX9t8gofPvJ6FcpHx7ojRzpDDR09w8JYzGGVwSuHrRJb3ca7mvXvv5akDT/FefhTvHM5H3j3+Qd5y9M0UqsOCPcDaqWvsdnbRu4brbpM//jtfpOlF7h3fzkJteXzlPAfWVnFFwDjLwWsHMUGosRd/4WXUguLQ2iJVr6G3VUCCSjvKg576VIPqaAbnl2kOlmTXC0LVMPqvJ6gBmD2NmkH9UE1+Nkcp2P7nm8TlyOIfLpC6ic5jGfVbKvInLJO3T8DLPWX4yA57D++w8uElrv/MJoNfLOiojOwFTfOqwN6/8fSPDXjbJ95I584uMzPiyovf4Pzfrrjw7nViL3D7B0+ie5qOzjl57BaG7irDvT10ygnJ0YRIcJ5qMiZNKtQg8fxz64TZlNtO3MnKgR5f+OwFNrcdm6Or4ukNDS+vDbHUKBX42qN/wPE/+QrHT5zk5K0neft3vB3bsZSTCdfX1tFGiMgn7xxw4GjJaHfMZFxy9uw5vJu2RVq7VWprDfmIiutLJShnM1Sece7FlxiNRpgWsuWjwhYdVo4cZFTOuHr9BofSAeo6EJpIOavQBA4dXOZNb3gTKyuHJUsry3EByYILQon2QeR1zsmAkNTWH23sjrVCrG2CBC6fe/EcN9avcPxEjs0Mos0ReIGElWteIQp5ZSl185sqSkGdEiRH9KIqUq8QPaYkyhhSIPgKlTSh3iM2OxhWgMHfWLMBrfxb6ihtMiwyjApz711KkgsWPFZpaCWPIsOXXyMZsRc0QTGc1AQ7xZNJBBTioU+qrUEQD+L8SccQcHVNVIGsa1E6JynNN589Ryf/JE88+jhPP/Usw+Gu1B/tWe1ipKwqgvPMqpqABe/IFfT6A0zh2CtyfLQ3h7OqIaYktEil0cpibA55jlGSqTu/F+4Hgs9hGq33W5QWisY1JB3RmfxOQUW6iwNuPXMHdQisX7lACjW9BcuJO2/lxC0nMTbfr2O1kloqEkT236qdbBtbZHXGbadu501vei0Lg4LzT5/lyW9+k42tXYZrWwxrh0+RqxrseMzuzpQyJIJRbd0s9/26rklhTHAVwU3JNFhjUcGBti1NXTavwkZ5RcPVvk8xJRn6W7vfkCmtMSqTQTxCK9e6bV5eASCJMYEW6aBqX9v53w9tLWLarDNthCcgAeQWpaRBSxiJ33GBWOQkLdajRgdWlha564H7ed3pVUzWwehFfNJ4XZCSBQLkkFSA5EiplOFJbKF7aU5nk4ZfBSvDIV1BqqGOxN1EvfYcavQcJm6Qd3eBqcAAY7sdV2p/2/vKrfZf9/jWaNrayc28k5baUQnlJstQRLJcCxHKZihjyLKMXjZAshoCZV2CtSgSTSs1KJ1I+KzR0EiWhVxHBu9riqLD0uICJiRcVWGKgmJxQN7tsbU3Ig3HRKXACG1yNBqzduN6S480bYMtM7X9r9uJgcJA6qCV5K/ITSDuSyaSlk7dh4BrSkI9I8ZIUeQsDPoU3S4+RWyeUXQyFGkfvjFvoDAGnXfJen2MTnTzHF2XlLtbzIYjQtR4J6CQlDTBBzIjE545QTECWEVmLD4Tr9lsMsY3DdZmqCQJPCgJkNQqoQnE2Ero2gvW6HaTh2zTiIF5bozRmsxYUtHFKOhYy9JgQCfTBF+T5TkuJowVr1EyCZVpZqMJlYuY2oHSuMZhsoxKRSpXA+L9CjFQ+UCZEpMYqGNkprwANKJHm6LN60k3Jzu0Xitt2o0aqNhOqkRYTOM9w+EYWylZf8fIUreL1TIBi0QJzLRzoYoUxymJwdr5ALqD0TlaGZmseU9d2XYip8mVpawqVEikxlFPRkyjI1OJDNkuzYLGNXIoxFlDCIomdciKjKAUu6MR127cYFiMUMFhM8PG1gZNatjeHjKsZuzs7XFxbY1eR3HsxBFWVleZusjFaztsjWu2pzWT0uMa2YDN5X0hJipX4oNMu1wMEkjZTsluSh/tvoctklqaVZQbNRqjLEtLffoxMdod4mYVisisLPHbETMb01/qyQGcoJ5VkqdiNM55og/kRqIPrFK42gEGbxqmtYcQ2RzNKMkoA0RtiVpjiw7EiA+JlFwb2h6F6hkcymbYokt/8QCD5QPErE9Qpg1ITfvnUyJIqHR77SitIUg7lJLIX2STG/dvWmm+kYyhla/odkAQgFYO0k78aafzdaxxyVEbtR+0i5JG3GnP+IEtxs9PePnaFe4e7rF9yybXOlucvfQSf+K/wT/5wgd4+vmz/OXPPkNZD5m5S1A1pCOR1QMHmXT2+Phdj/K+L/wgX3jzY2ywjVKR0cKIzpZCBfndu6MOhy+vcKg4xObCHlsvjhgn2QKNby9xSyLdDv3I2t+9zpHHDlGulrJdH1SQYPrAdS794ucIR2aocHPLGY7NCK/eRIUevtdw9DcfYPh9l4kPX2XpWodyJ2fnb53lnnOPcA2INtC8ZsS1d14BDSc/cT+HP3sv6299hmt3PsPvpP8Nv1ihleb43Wf43lHNs/l1Pqh+nXJB01y1bL04Y2PnKn97+qO8J/wQXwmfJo2m2AR7IVGjWLSJBcAmxW+Gf8GL4TmOpqPga8qqYlrVfIa/oFENb+Ud1Mnz1PQlllKf3lWFG24TjEYNcs4snKA3DmzueMphh3EYUh0qOf78CbQKpOSpywndnZxmz7NzeMKRs8ukALt2wta9M3pXBuRZl9o0jO7dYuGJZSaTXWJwZFYTTkS6ZwtSo9Dn9b5nUrW39dmra+L3BvInhfqplETQ2Ey28DFGJo9MyJ7WND/pCSZhb2jUhmKw0udYcYzyrSO2H97ENbD0zUOMh0ORaVvL1b+3iwIyNO6ApxyOKcsZtYE6eQFSGU30njp6ysUJO6viczx8cI966nn2p7YYjmoUu1ibY3NLpiFF10re4Llim8Wllzh+/Cjni+u8d/YDZOdzrl/bxsUBqlikrqZkCwc4vmQZ7Q3R2RfwflekZEpAAXGOALdCAkykVtqXqKqSra0tyumMflHIYFRpDiwvcOcdJzm1coA77zzDIPY5duQIRw4dYrK7RTWu2N3KuHjhIqPRLRxcPgghiC1CSQHalA1XLq9TI7RAlRS5sft7sswIAVqgIKCVZuv6Ft984glMMqyePIM2EUKNdxV5CzG7GZEkzefNhdscb5CkwESatuAl386nJJ7iROtnDqgQ6WuDraZk5ZhBrNCpRqn+Xy3VVLsRALTVFEWOtXa/uTLG0M079KwlLzokElU1o/YKYhTlEYakElYpjBHQnEoZNu8QoqFqAj6J9SJhbjbYzM9E2d5oK9exRmwiNR5toFGaT33mq3z2U19muLlNNSkZ9BdQmZVhO4Ys7+Cahmo2a20MjoLYRj05MqPpdLugunS6OVlekCgJSF3a62b0Bn2qqBl7ec9ANowpiXVmPvROUTzjqfVnCea+pW2jQIv0Ms96nLztAIeOHGd97TRlOebA4SUOHj/MsRMn6feWUClDk6PT3KZzU5rYgvLFS6cs3e4Cy/0leh3DYn+Bhe6A9eYGdV2jsaQAzoGKlqByfJLAcpc8YgEwVOUUX48Z7tXs7WziXUPwQTbXSdOGLaA0kotqLLGtFwX0JoNz0k2qo9rfBEvTfxP6kjCimRXfH9IMCtRknjuY9v3jwUssQ2qH1FprNAqDwWorG1hliEm2osZ76IiixuQ5thegsCSrMHmOUV1UFO9kRKiSSRlIU0wa4cbXqUY30H6K9g3JtWA2F1Heg3MkFwhNQ6jHUJVQBsIsY+fSlLpcp/IzYhyTZRrvo0QUQLvBlfdSNsZ/8+NbpGmTw0OsXLKBilqDtdiii1IRYzWdbt4iexV5lmNbc+KsnIlfwHsh2EVPE8DaAq0zfBJvineOIsvEA5UXrVdNA4FoRI7ZX1gi6/boB0W2O6SaSOBraFfP3gdcSGgd9zXaKbVr3xbgcDOQ0bcXTdyX4smfm/89bvrAlPjmXJKDzREkly0kslbKkIzCdLvi6VIyoZl4SDPXNhWGOhmc7uKNk4siphbKoaUQFXsazI29MaOuE4RANzN0raGXWRqdyNrQ6RA9IbQUKQWplVwY3SoxImRGzRE6xNBQV1Oh1NkOGRrlI3kyEuWgDLh2W1Y3jP0EU2SoWSXbLe8JweN9RGsjOW9B/FPaWmodccQWlhFbapahrB1Vk6idbGDzTBGCxzmHzTJSasOr8xznRc4YI2hV76/n5WZOKw2s8TsOZWXTMsjFrNwvCogi80jB4UOg1yuIdYIokz8QPbuyQl7q9hMmg7oJmKKQIYUxEmLtLBLuqMkzi46BkBKz0lEbw87UEVNEt4V8DAGdKtR4Shk9o1nJ9OmnRdrovTSjmUJlmkJpugmGmaY3q+l2NOt7Fyl6G5hOl6vbE/ZmnvFM9NhFYQlenntZVRhrKbTGTUvmqOGY2uGH0nLoSHom7QAZFwKFarmaxuJdjfOeXq+3r1BURuQRWimaRrDXBLlemqZmYWGJYb0jkJ9MUaeamGVEo9tNnyG3eRt1YKiDwsfIjEDMu5SNY3F5AWMtVUyYrKBxgar27URUstOs7TJYXmVx5TDYnISVpj0JzVM+n7L1lXye1qGhhPKZ2uFtYO5NEwmP3FwgpEhMHqNuHsZzYmlSUd6n+dS09YIG2mgKLRmGzjtRhERDvdKw99CQjd2cz175JM+kS7z0jZfYXdzmOX2J3z37cVTwxJT45o3H6V4oObK4hF9t2O4Muef2u7j7y7v80fd8hGsnNujMOjx3+nne8/j38vXbn8QZB8B3fPxtbHd3eepd3+RVLzzAwfVlZrMpFx++SHfa4ZY/Pcbzv/rS/sR7cLXHxsMK5RRn/vkD+GS5+EtPMnvdtb9yzt/3+Bu57cL9rPeu8dL7v8jqZ0+z9f5zLD19kFueP01/YZG9apN40bCxdAWAhoaz/Rc49bkz7H7/N1j9+l0cWTtM+HdbmGuOz/xnn+ZjO39AthzpL6zS+5PE9NI2xdIpRruJo91XsXH5AgtFYjrcRacGYsnll1/ixJ238/LGiK1R5N5bb2MwKDh34RofWfkY7/v6+9hav8zZJ8/SHSxx30OvxZ5a4LoecXrtbVzf3uZtR17D51d+n5+P/4jSXGJjOOHILfcw6Ha48JWv8MDKvRT338MfHvo1NJGjbzjOPU+8Drb6XL0xZFYKtXD8li9SnT9MfrVHvrDH8uI6xz55H93CoA+VbJ88x9+avIsjKwtcPPcyX/vyY7x4xzWO/fFRZqVntLdLp8hIUcKU6tc3ND/jWP0vB+hhhs0t2ETWNfQHA4EFhUSeCdGway2ZTugYKfKcB17zZl5/+1vJspz16R7WHOJLZ7/B7miX61vnmFbbTMspqgOzX7jOLb+7jN+zDEeGxmp26ylGafIIdZOIJkN1YI8d7nzwfg51FvnyJ78qMnh6ZF7IyGNdcfrQAW679X6qpKkbz+LCIoePHuH41jH8rTUfufIFXnPjLTzz7HXOnHmArOiyvnmJ2d6QrlZMh2NKb0i2IGoZALoQ0NbQ7XUJjSKUNdoqIMqfQTGbzeh1u/imQQODwpDhSK7k+OoZ+kVOR/e58/bTPPHyC2x1pSlxdcU3n3mWcvpaWFagBSkeUsAoi6sappMpob9E8p4sz6lnMylEg3hflLJid0gil3Sl4+Vz5wkeXt9bYuVAH1WOacqKfuurnUvT1f6yv5VmB6lW6nqGzWpslhObGWU5BpWIKlJHj00RaS0UNinyBN0Q6c1m7L7wIreeuKNlmv/1j5QSrvWhS0CyJ8sLBt2CTlunBBI2OmwQSFbTOIySgGTx1zWA+KKMtvikSVoKWlQLMGubHz2HvbUNSgiBTpax1O9TjSeMZyVBJbRVXB5t42cziphYWVhg0Flg5hzTiSMqw3RaYltkfVlV2KJHjAFNZDqdULX2htQ20apV39gsE0Jxp4vNO/jpDMhQ2rQbxrC/KZp/neAmLRLxUaNa/D2yWbUaFg8uM+gdwJicE6dP4l2FygxZt0OvvwA6QymLd3pfthdcEDKlsaSkscpijTSy9bTi3HPn0MqxcfkSw50hBOh3uxgM2maoZEnkhJQRtcETyVWU69LBZHfM2uXLqGHD9tY2jXP4GNr737zjkOdrrcXkWiyTLSRFay2NqjEE50VpFtthSevZTGkeFxXEd2bn6p15k6fazDzaIajkHTd1I/+uNtI6JkimHWZE2WoqwDeOpnF0EtgsI8/bPsJqfKswE1igeBNVVPueQxniZ1BnhN2ILQ151pMF9rQhTmdQ1ujakWYz3HQKkynZdEoaTUheMSwVM99h6iqqGPBOY5zFim5OBiytjUS88f8/aNoUSkLu2hW4R/xPpQ/kdYPRoL3APoxq5YHtSjS2Bs6maXDtJi3ESBMgNQmjHVqJMTQGL4dGSw5UCsmQAqL3xLFhEgKDpQPMakdZi4dHt9r3brcrSP92CjBvuCQb4q9OA0CTom5/z3ZPnNog4raJS6mlziUJCVdaEaNDaznwa59wIVHpViuckO2MLSDL8CQmkxnJezGmZpZk5YMXkqyiJw9MMCPoXs5lY9ZKtyRvRNN4aEJFNJ7oArMY6eUZi8eOYI0Vn0QIhBDbAlQkj0rrFvXfHlBtU5piRGmBTFitsSqhQsQoS553MIAKCe9Eyy3P3TIpG6rJVAzazr/Cd5baK0Rhs4yiUCSjiT7SNA2Na9ou1NC4QPAKIvR1Ro5lNBxSVyUHSWRFIc8lRpz3ND7gnCeE+SRP8j+cc2Ra4WOg9hGTS/5ZqBvwgaZwqBhIwclASVsWBgtkXaFbiWw0tRkimrp2BB+oaocPEltg2oM1EMm0SEqsFbJljEkCvpPGB4kZCAqCj2AUucmgqdExYIzCWkXjakxeSFC40u3zqAlZgcq7JETv7Z0S/+NsRtIVtrfA0pIl6hnKW5SX62g4HlOWFUppiiJjPC0JIbbXfSv9Y35dtjEQ7eBiPj0kycQtJagrx7i6IVQ2FJkyLYpXzOBLK8vceuttHF05wgvPPStngs5xvqJOimByUqHboUnABqGLmayD1pEmeKoQSZ0OpY/0FxYZLAxw1QxtBEIQ0UQMOkmzjtIoU6A7A8j7BFrdfTuzVGou1xBljWq70v1rU7dTTiWvxb5MVM1LKkFTJ60I84JqLltPiHetDXdPKZF8wkaZFRI1KYp028Q2p3HNcOxX7mPjvZs8eugan9j8DPqzipWvLTO4u89r/89XUb61SxUrkoFPb3+B8fU17o638rpnb+OJo89x15338Lnbvsp/+uGf4Gvv+gYnLh/j9/6j/5Vnj7/A07ef3z+Tv/jOR7nnsbsZLoz40mu/QvGqglPXTgBQ9msu/LhsvVSlWPzqgHFRc/Kx01x57SWq44btt1wkFa+4+URY+aMHufDQOXZPT7j2Q98k3x7Q3znG7uuvceMHLnLjBy4C0L22wsO/8p/w4DsO8K9+9h9jk+XU7CR/9u2fAGD9dWdZ/HJAj2Zw3XPgYwOeePKzTH5km62rFwjTip9614+xOYx8+E8/x870aYxxzJoxT37tUbavXGEy3OZjn/ooF/e22J0ZXN1h7+TL7N1xnGdfepmth7d4/mMv8OIzT1KXDa9747dxuD7Djcu7jBZLzn3uGiEquidr4nJNc3bIaGvMsYNHecOhN5KKjHsfvIWnroz55qMbbN8/ZvfjJZM7z3Ltw47yUodJZegODtAdDKjuVoRzkerpXcq0x+HpYfaerPCFIa6MWL9rnc/+1qd59b138MZHXs9Db3+Qf/0Xf0CxsUQXjZ0U5BUcWOyTZYndfMjqC5E3nHoti/YAi8sHWTx0hKgzqqpColoig07GqeOrLHUsC4XiwKCgX2T0Bot0e31S1mE2qAlJ8RqTU/rA9o1Xc2PzOufOv8T69AafLDeZvBBhrKjGCq8gqoLgI0XQFCqjSoFkIjrrc3W0w269Q/NCIBYWtEIHJSoOE8lrTa/XJc8HOK/o+j6DzipGL+J2RmyPaq7tVFy4NmFt50U6SwNG0z1UU7NgNZPtHfYmNcrmZLkiKo+yGkwiLzJCDJRIDQAyjIwpiQRWG5KTwUw/N+jg2NvapJNZDiwv02xUnHvxBeqq5NDKMpldZGH1ECduP82B1aMoJVmMsd064SK7uzvs7O5iksFUTQur8sQUWqmfbBrkrJGzybXyy+eefRZHzn23n+BgfZWgCw4Q241+CxJBgRPbAkbuwbOy4rnnn+HIsS4nTt5CNRmzOxrSBIcykPT84FKyWQgJak/HQM9FzGSCJEP/jUXbvkdLNiZKOJzeEapSagWl8N5Joxpjq3JS8m+lQNSBlBtUEMlmjIhnqcVWKtX6+VuQyRy0Mg9rVkkk70RRdJgEs8rRWRiQFx1iBTrUdEwh9MloICSSErVIbBqCE9qn1SI3lY20MA6mjWO0N0QRyPNeS/cTJdhkOmVvMsZ0+nQX+8JLMHPJ6dzKMX+0a6X2/83vl8qo/Z2oNoqi16Uz6GNth6xTEGJDTAqddeh0BihlWjhWgGTJM0PwTQtgUe2CoG2CAmxd32Jv7wa+mTLZ3mR7ssfEN+gy4IyhyERh5COEpPFJtfATI5CYJjDdnfDyy+ukvYprGzvMGi+wHz3XmLVTuySKGgGCaLTJbgJJ5j4t9UqPXzv0jDLUTFEaOGKE0MZHtPYVay3ZPAg7JYIPRCe1vk6qzc2Vr6NrUFFDJuHXIqkU6th88GpbCFqDZD6HdkWdyG5ukdt3LSrQqYB4kGY0xm1NsMpjoyI1GbPdSJh6ekGhJgo9MdiRRU8y1KRLXTcMZzP2BpZZslRKFkg6Ggkn1zeHIjGEFnb4N33o5PEt0bShZNUumMwkwIG6YXNvj+k0I9cKYxXWCCVRG3li0jAI4Wf+IZk3ESq1mVyx5cYlULSr2HhTy+28R6NkixEi5fYOG3sTXIgiy0qiyc3zjE5R0PNpX8Od5h6eJB92Ei3Uov3AJrV/EaR2BTyfHLRhZyKliwrnFdYaEp6UAuGIJyxL0wgJu20we1oaoSSZTknJKjm1dEsfE6kW5KoU74ry3obuRkZ3q0AraVQnBytcjPxfzL1ZrGXZed/3W9Pe+5xz55qnnpvswU2RFEVaokUNocZIMhFHEuJAtpxAdpAYzkMS2A8SEgQGEiSGAcmILSuwI+nFUCRTEkVFAymKpCQOYbPZ3eyheqyq7hpv3enMe+815eFb51ZTEOWHvOgAF3Wr7q17z9lnr7W+7/tPza5FK4MxGu8TwfcczQ5ZG44Ijxq09SilsHuW6lATM/Q2Mj3bFm62ZnizxngRllqtC6JYFiFJtH5djzMV1liZgGToOiUc5iyOfNlqgjIElUlGaItGa2KMOGslPsA5nHWkEKhMTWMaYiV5MkpbtLK0XaTrI9bWZJXJdRQ0Q9+baoUY6b2n7Tx9H6RxW7leUlwjXUVMnja2EARJCTlTa7lmiiz3a5I4iLqucLoiZkVV1YRuTtKGQTMCAr32xCgUpaZeL9S4DNCA0P8AACAASURBVDqgtLz3ia7omYwcAtoIpTanEhouDl+S+1IQzywooByH5SAu90VKmt5nQuzFPCb1OCU0Q4U0r8O1DRZ9YDxfkqJF64rtE9tCyx0M6IPHdy0xiDYrhSjpP+9wflot4lQ22hgTOLkfnJM8nuFQoWIgB6GtjuoBTil0TtSDhtOnTnNiewdrHaO1DVb5LXrpqNek8elCYtF1+JColGXkalxjUSrivegTejS6tmzt7KAJ+BRompqu9eSUcVZjEZeqGGWaa0xF0oaYDYqELmhXUVAI9aM4Pawmp6wGNsdrm9LNlv+1au5KoybGkoVaWXSDRqsyPS6xA0ZD1KiQ8Um0gSYnXIzs/dhbjD86xkVHc2KThYXJ1pzuvsDVHzokA/WgYX8wJpGo9x23Hz3iD+/7On9av4ZWmgvK8S+e+rfc2dnjF3/6/6IdttiHhdL6/EMvfMO7OTkx4dnvfu74713d8doDojdbXlySXWngXWb+1JJl6jF2SjaJve95k35n8Q0/DwWTj1wlbrQcxBYAv77kzndcJtZCtWxujVBe8vt0O+bs6YdRR47lXuT3P/+7PPziBW7+6A5p0PK1n/0ikUAc9jz8b87wG5/+Nd5674xf+51fZ76IfPH1q9wdt8yPWrpJK269WvOlz3+BxdGYPnn++Et/wp8+9xwubqNbcYm99NhpNk+eZ/6uKa+88gLWgnYVTz/7IpdvHMJFxcHZfXKM/NBHf5hlep0r25u878GLXK4Sj559iPN2wJv7B7zwzGU+/pnPs5sSizP78PER03+wz/vPXiKlhsHaBn2UzMTWBNR8Hw6X5HZJ9wcZlRe04wy5J0XD4b7ns597huefe43v/77v51v3P4B63xbNyTMSKI3n9M4a2xsjLt93mf3lER/96EdBVyTlSNS8ceUGL734Knfu3AYSo4HjoYtn2Wg0GzWcO7HBwIKKc2zWVM0GSfVEPSPkBegGlTa4cGqHBy58B/Nh4GtnbvLQk9/C9ZdfI3SB3vcsUyZlRZVlD+uAphkAkf3dGcxaspapfEYQaYppFSaRlMRcqCzIlbEN00VHv+xxdp3Duef20YxFu+DhJx9DD9fRroIUJCPNWJR1NLVh0XuUEpbMbDqFXtgXGqkRJThYyb9lUDFRKUUKAWc0XdsyPprQtR1XXn6NP/vil7mxnNEtjgj9DHN4xMxoZsueUWMo5s7CAgmRvu/KcMZ8o8FFaUJiTsWQRnClFKHrPL5PjCcTJl99hjvXXuWhwYwH3/VEadjK3pKlwsiLJTjwesHdq1e4cRD54tN/xkPvPk2fApMrV9ifTMhGAx0oI9RDJY6baEFrrCm5X8Wl8Zs+VnRMXVLccipnYkLFntQJ3ZOUcBSExHtpBCOgItFkqhxpDChlCMYCjqiNuAIiCJVRkp+mlJJ7IggaZ6xBR1XopopKI7ThmFguO7r5Ak1CxURYtPS9p/WRjkTTaJq6RjlDWkj+X04BBdSV49z2SZoQyf2MYV1TVxUhQlvVxORJKYKR/bsr5lqr2m4V/bKq+45NuoBVJIywkspgsDTAKYsjs0Jia5R2GGWwboh1jZweUYaFKSayNihTtJpKAsRjWv0ls39nn8PxLfpuSphPmKVOGFQBaCpUZUh5A5UVacXuUEHykFNG94nF4YI3X79Nvz/jxq19pouOZfAkm4hIfU1WqCSsm5gkg1ab4pL8DuRoVTMqncTjIGliKhr5VftXTN6Uisc+BjknSBZjotTqKZchuyaliG9bcu8lb1ErrLakqpHaTGWsFnmLVmANWCM+FzpLTILg0pApOkSkRpW5dJKGNGRGribUDcSOqBTKZQa1gh50iqgcUL5Dt6Vm9MKWOmo7bqnEYUgsgPCOIc3qmmQEbUzvoB9/s8dfiaYt54wPHm80PicCZRPvhWqmshROdW04ub1BYyu0tbIYYgKVMPYdroVSYqGLCcWqqTJaHXexMu2QBRJjEKqcUlLIZaHmdV1P37fknBg0Dc5YOi+Q94rD/OeT3Vefaw05e0Q7BqzmEqo0VCtEJkZitBAr+mXCWAj3R3Z/5EimYcDt/3zM+f9zh/O/vANEcu6IiC4uFaplVcmUQBXBpoqCSsTQkWImhSXKKeaPeJ7+uWusv9Xwgf/pISmy08p0RdH7zOQDh1z77w85+5kt+jOBxYmW9/3cfVRzx+s/dZvxB1p2nlvj4D0zNp5veORfnWHl4KONKlefcriIbb/Q5gIKRV3VslBUJmclA70iJK2tExqCLSGVGZnWZDGtaL0YtZi6wg7UMe00ZVAYdPK4bHC2ImSPrjWuEWOYvmvJhW+dV2LzIEYYQr/Mos3Sco+knCUeQIk1ewiBhepIURq1xhm5x3pPaj3JVFhX0/uetm3p5jP6biFOoT6IiUeGPszl9fso9MJccl1yIsR0jC6uQsvFtcOjjbhH9YAKiUBmkRJeix5w3EdSL5tMRIyLlHEyRAgekwMbjWVz2NBUmqau6KcTVMysaxh3HYftkrXtDaqmoQ69hGp7z3AwwneRpV+itBNqqjxLtLpHhRAaiDqeUsWUcNbKIEE3NFqQUFMoOaYMMg72D9jbP8AYMbZx1tB1LXKwiUNrXdVU9RBjK5xxRf8RhIbcG4wN9MuekzsnsFVFWLY0zpDRLKZLQr+gUlBpJ6YyWuGMwllBwxUKFQutdTXPLYMO6bMEPRUT1oRJYuucWDlnCpq2+jyllYaWY+0nuewROeNsXabcAW1EPO1DwOeIqBUkjFUbUCcSaz9/hpNXT/MDH/4WzjWR5198ndt7ExZ+yTQtGQ5rHr54iRPbO7zaXOH1979FVTWcv3SOv/P49/KfHP5H/PLzv8evbfwBqYtce+Im566epRsnlkGj7YC3vu8F/HpLsol2rf0L9+u4fi9vDAPhZCh3mzyWl8aYbPjP9v8LCIbPDz7FE298G+PJlOl0QlKZra1NJpMx0+kE359g+i17PPoLH6C6NuTVn/0KlY6cO3eewb+/xNb/cooqXce+v8HvRx76+Q8xT0vauqWKY8LkkJvtkmnv+f2vXEbrDe62b9NFRWw9TVaYnNDKMtttsU4zJ7HsW2yn0POIiw25Stz86us89NDjaDI6LkBltKqZLjrWlMOaTNfO2b1+heuvvMKrr3+G1/3L/Ntf+SXOvP/b+Mh7P0qY9jz9ua/w5c99ldAuqP2SMJmyVm2gBh1Ka27tvkW+G5jPx4RuyeG338DdStgjGDhDt5ywvnaCpt7A7WyytLts7lxk58Q6Z86cQG2c5ts/+J08+K53sXX6Pp55+lm++Kd/zHg85ejubV5/+BX2mgOe+8TrRCxtH4mxJdOLhidpwJBj5tWXX8XEBH3L0GqC7+grh1vbYj7rMDahbEumhWxRrKNVQ91Y6lOa5c+1bK2tcfZ972P36ICDvX1euH2Nw6NDyau0imjA58igqqH35CTNmEJhsmElNjDJiL7JVpiqJvpeDHqUpW07FvMFZ4enUAYeetcDvH3zANMolIamqRgljenXGDaORReKq3AiBzGcapcdFRxrZ6NMcAUdiZE+imFQVTWI9j2znC157cpV9iczdDXk3KUHOdq9xf7hPloZrDJEU5FMtYLdC4ilmM4m3Lp5nel0TDNYw4WAL3T/RDHV0Oq4sC9jHUJIwqJJmqOjKYe332IxnHHqwv3H9MAV+qRzYnl7l7vzI5Zqzpc/91luTTTXdq8w83fZGx/Rv32d3cMjfNKoYmQmY2NFUApfyuClglA7onXHSORfULAdf0mrexb3xioqq6k16Byl8crSkPTBE70Xh8yQQSWiytQkaq3I2mC1mL/knInJSwyRcccU7ONrVM4VlRKh9yziDNXNyKlj0AxYpEDbLSSrS2WcEtojUWIWfEbiYbIwaHJK5GKUIqyriEGxsbbOuGqwaJx2bG+NGDSNNDZEYVq4mhaJplohQ6vHOxu3dz5WgdPlZC8Iny4oVBkSalOcFEWrFXMqplcKa8QKP+WeTEQbCRDvetBK6ieNJvlIt+zo2xbt3zFwJBH6BT6MSDlIbpo2BWUU1lkOCQP4Wce1a/u0hwsOJh19hKREqpP0aiipASsIaohkNNrI+RmTvEa0uDpKffBOgKWALOUarOYEK0TOaCua9OTJXmG0xhQqarlQkLkXqbDSSirQKmGQWASrRDZmtBjlkYI0fxlC8uJ2i8aWJk2e2Srex0M+JEy/Trd7GTW7gQpjdJiT+h7Vdth5h5os0cuO3EYIjrtdy9Wu4zpwlzXGWdGpCmpF6IO4ZMd7iPWqcv7LW7a/Ik0bSJcZYjrOf0qrmzPKjWGtxriKPonoNPc9MQSO8xgKMqNLEnsiE3IgBE+M6RsWhmR9cDz5sk4TSfRdJERBbWJB2GKUn59iLIV3kEWhVsL6UrpqfQz7Gq3RWFKUG0ohBaLRQpHMEXLMaJUxiHuislY40C5z+Z/e4Pyvb3D29zfIGTZeb1ieCzg0LibSBY8aaKLPqOuic+rXEnkbBjOLaRR6kKnvOurKUlUKYyO5Sdz89gOaPY3SGeWEEtmHJJtj0phKsf+RGaM3Bjz4i2eZPjLnq//6Tdq1hJkrdr93yqXf2uaB3zjJ1Y/BWz++z4P/YhveQTdBcWz1rrQiBGnOUpkCLWMU1EIrshKEL0ZfEE9TbINl4eQsKJk0Dz0hJKLJEsZekLiYpGlbmUyQdKEmJk7adUw9ksI55YK+pkL5zBynKRTaaEboB744BxrroIRC+z4wmc2YjGUx11YOwLbraUMmm5qL9z9AjNv0vufg8KAgcYZF1xeNZsN4PKPtvbgMoQtqpYj6Ht3UkKmdZdQ41gY1g3qAM0k+bIXSA9qU2ZtPWYSAsRWLyVxExNnItFlptJOGV0dDreDM9jrnTmxxcmeDtUEtQmzv8Slz/aij3R2DUuwfHHIwPiy8fHPsSqVLFMDK5VUybUxBJ51splrMgbzv6TO4JChlMJpshXKrk0QHxL5HW42rKyJykGulsFLDyQZWQtGNdmjbEAh0xoIxVE5hK00XE/Oupx6tsbaxSQ49p0+fxsSOw8NDVPYQWlxVU2vA5GJEk3BaJvz6mBSx2jjL9Ls0YBmJ+NBaQwjoks2o4V4w5sqQhHLIZkDr48DxvMpKKh+20DOskiDa3ij6SpA2lWV6F1IgqIS3mVDBUgXeOrzLnRA5SpZu0dOZjvWkufHEW3ztp18lTBPzccdCBdLmbX6r+Sy/ffFz3B0eMZ0tmc+WLEYdkwtLwjb0EbH+dhE3H1DNBzhnWSwX5I2AWipMsJja0A1amuV60TFmQt1Cyjy0eJJmusV37/8kf/TQr/LBN/4uNmhee+QKP77/P7A5OMlr+6/yiU9+gvXNNeq+5aQCUuDa5pdYP3WBYdzgxMk3+K9/5m9x/+MncS+1DMIUpQx7Y09UQ3p20BxRhcCO2sCsDTjyNXvqCoeLGhaJnB0GocOoKpF9xmbHwbKjixGfIw0O02UGriFoR6uW+L7j1rU3aZdzbO7puk5MeDrDYjIm7C4wZHZGDdfffJUbt3c5nC9442DKE0+8l2Znk2f/5EWefvk67dpJTJNolkcordmwHo8nTqfEiSfqGmfOYKqAthOG2wPsiYwiYGLL5qjm/JlTnHzyNNOdm1x46DTG1dyZtxy+dou7foPPP3+L2WzOay+/yHj/FgOXsCoy3Zqw2Okxk45IJCTQqaNxAYsmxlCGZhmMItGjqsAie7IJbJ3d4syl07x97RrLWUvt1tlcE53NE489iNaBWzevs7fcY3qwzzOf/TRVVjSb61w4d5a+Oct4NiLvjrkzPyJXpgxaCo2tD9SmwmVF1JUUemX5aVWToyL7QKUslXHUxmDNgCUH3N29hTpcQ2tLPYgY17J/cIc6RZIyrDtQqSP1rWjDM9gkEgSS6FBzzkWDuxqyUJqFkuHowCfwUZGs4+btA55+9uts+01OX7yf68ETrr1Bjp6UNR5FVpasbdmvhHYZQqT3gWZQM2iEMupjPJYa3HNCXBX4q7JCELcQMovO08/m3Jne5fDwkBADziFFamkSj3b3+MqzX2ESxrx5+TJzvcNkNsFfm3AwnVCNp/QpY4wVPGE17ddi0BKtIVtNHg04/fCD7DxwCdxfwo2Ee4PaUnRqlcVZNwZxkS7XwFmLjpGQe1JM1MqiTcbrIA10FmWP0dIAG5Q47WfEIVrlMgSTWkxrLbS18vtjDITlgs5PaO2cXteEGNBKDDWCXzKoajQBoujuNIbQ90VjLZFFEk0DyQcW8wVD59hc3wAiVhmquqZpKlARpRI+R6KtqEyDraqS45lLEb6SopTPSyW+Ks6Jq9KxUEBTGR4g0oq8qhmNZH0qxT0pDhRvBwhR9P8xBawbYHVF6FsUihgi3aIj+ECVESOQFaKlMin1ouMzteSJKsgpkkNGFYetdtpx/fo+cRFZdJmoLMqCdpmkIiYZYQkFVzTZQeq+KHuvKbI3pfWxhCLne68FVUxc1Dc2tqtGN+dcABh1jMTlJN4Q5p1IXrnu1pp7KFoWc0NTkGPftXjfC+VSKUwGFcV4LeSIU/oeUSkDSvJYpRlssM0m7uQF9JpGdWvkMCO3c5gtUWqB8o5cBgmTPvBWCLzRd7yVMocOQqmxgwr0WobzKisZQKwqD6VYtRbf7PFXpmkzSmFyFkg9ZnIo8K+SYLymqhmNhlS1TOdC8GSjyTof285GLakkurxwnR22qrGyNxWbUUOKSbp4a4rVrhRfrpIU95Qk9yqkKO5yGtYGDVorFkFy38ir97V8vnJhSEVgWigH95oXQEPwEvKc871GUsIeSzp6AHPbsP6JdZbS42G/UmHPK6aznu5DS/Y/PCdrmH1/y4P/8wmGX6k5Orvg6j/fZ/1Wg3vOcffvjHnqH1xisWzpJ4q41xNjYP2fGXZ/PuHpuHZtt0zrDD4YCfqNga7tuPA/brN7sEf8YmTr3w1484O32frXA/SroH7BcyXdIP1Chg9nXnvwJoMv1rLgs2w2ZNHuKSV0hRRXui1DZSyaLLzpLHRKYqS2FaaEWqrVdI2Sh2Z0oR1k+tCTdYXSjmwivS0avmVmGBR65IhOo3BoV6N0JQU1idB39D7gYxLqKwqyPraujzkScyQrizOKEHqaxlGvr5GHA3zfU2nDetOQU0dIPfNly9G8p/WCKlpjOH1ihzMbjtwHEpr9yYQuK8kby0dUJuC0ZHiRV9h9xjiD73tyCNTOsD5wbAxrttdragu1UzRVDXrAtA+og8z+bCEb7xCSTRgjDqtRZZSxxJSJXYtLgY1mwPZonQsnz1A5jR8KBXK6WHB3mcQgRGkOj8YcHY0JvQRU5mKVr60BlSRPsFD8jC1B9EEcWn27JPSyuYqdtRiSpGiJfdE95iw0TS1hrJIzp1BGmuqsNFo7EoaqHom9sqkktiEWFCsnvHNUpmIaFa2Hs1ubuNSwtn2KD33nd3Br7zb5tcvcvTsB4zDWiQV4SKxiXnMS9zQRxGc5fEvrpcu9GKJQfYzK2JxBG3yWA8d7X0xx4j2dR5b7StB1fdzUrYx1yImQIj4LzXvlboe10EtxoZVs7D5CxJTn5tHOsn32Aie7msXiNlatkZYtIfakjcwPXP8IG392gi98/qsk7zh/dof/7b/5Uc6c3OLN3QXPXdnn45/6Ms/+1LP8zRs/g7u9Rucz9WiNf/nQP+KpN7+Lp37vo1Q6sFgccPVvvoj7f+bUL22i3r3FVz/6Bb7193+STlnaELn9ga/QHs3Zf/waj/7K9/K57gVu/v09fu3jv4PpFfP3DfnNy59jY+0Mtc28/z1P8J/+8PeytbkOxqGt5V9u/VN+7EM/wan2Av9460Ue8JssZ7e578IW5x4+QTJnuNHMIR2h0iHbjWdnZ5vD23dQWFyzhnEVGztnUbVoXmPymFoTYodRBhMq6kHFwAF5iwEJ2ysUDSFrNmpFzDtU2nGo5uzenWEUKLUEH2iPDJ3t2NzYZGtniw988IM8pe7nC+EP2T57hicee4wpiqvzDt+soxtHExvyZJtsR2w+8ATT9edg7QLbFy6QksE5C6FnPrjNqFnD1hDTgnrYM9CG8+uniV3F7t4BF+5mzm9u0nVj9uctN3efJvRLog/oLDTyEDq6GFi2gZg1EcMqlcLYmoTGl7xFYxRZPA3E6VUndEqgI2k5YXzT003uUlUDcl7gBls8+S2P87f/9o9x5vQWX3/+OX79dz7OW/UtRlvrKJ+YLGYcvXKZpYHR1gab589R+S0m7QIVMoPsaA/HdIBxNTog52Sx5tcpEXEoV8t6qSqisnQhY43HqcCbb7zB+XCeyo549NxZTl84y7U0YTk+RPcrUydDHxU6WLQLaN1hkeiftNKjpoR2JQS6UNBTQbGiUlBXhD7SLnrGdDz37GX239pjNvekWuj6PomDnImJ7DUpS/FrM/Qhc+NgwdvjBTl2rA+XDIp+KseETpIvusL1s4piTqS16Jl9xXw5YRpnmFFDnTbRrUb5RGhkOGcwqKzZH894+YVXOWiPaIPHbkj90i4VZtLje0FrymgfjSMrI37HyWNjpE+GYGvC+hapqQsF895DMrEyUrUrkanYSgZO2pFzIiRDh6HPkkHXDAY0o3V01xPyIb7vRWtYG7KNhIGlXWVZ5lXUiSBFSqnjc8wiNvfZJ1JIKJ/QEWnu0fis6LJh2fYkLdTMHCLJSZRHZSyogEHhAJWSSGA0dNkQssTruJQwSTEfT/AhEhYdxiRS35KHFmW1GMwYjdOVIM/KMKxrjFb4d9QtYka3OgM4bs4DhR6IDJmThugRllfJBdbKYJTB6RqtKxn6KSW1IiLrsU4XarEwQBRGsuNSRFnJzfWlCYwrZBSFRUE2qFScOK0gjDokGcpm8GWoEX3gaDyHoIg6oVzCkrGVRgVLyBGdI1nLXawQRE2tEqNlkolSBil1hI4oXw5CIS7/789jTCmVwadakRaVUAhzJgaRp9jCrjNKobSh7QLZQh8tOVcoW2F0RfKO+bLnIAS6rFDZkpSSwRWRlC0m1yhK8yqnvxTtygCZdh/snZ5aadAVBEucKhgnzGGGfYiTzI2jniuLOdeWHVfbwC2V6SpIwxrTJUwfICgxj1mhm++4XMfF7zd5/NVo2kpXLBlIlA5ZNiSNRudMCpngE8NhgX+1oGSr4D2ZJgahlglKKuJEbTHKyPcrSEGsyKUjznR9L8G3KEwSe/sYE30KLIPHJw9Eum5J5RyLnOmiZDyp8nyJudigZ7EApeinKP9U9F3CpRJaHiqVnBZZVFqLDbx2YjW/N1mQQpDiby+TX4I+z5m8e87w0w3ulkZf0dz5wJSTfxThMwF7U+GvRLb+9zXCnXXGewvarY56blBzSrEo0zsUzKa+TLJYIdZUzgKZkDzeR3wf6ENg8iMt+p8JCjVdtIXipfAbiaOPeNRnnVzX4iwoQdgFOs+SDbQ2GjBsaiywVlc4MqkTp67KWJy1pBQJUfLktAZbWdbX1xgMGhbLGfP5nD4EQm7ovWawbjiqesaLRK0MW20kDBTLuqL3FpQj4QSli5E+RNnAjotroaTkXHjFKkvIvRbHQJMilbasNY6YDLPg2Vgb8cilS8wXhxxNDlBWswiZbG2JSchUJlMpjakaMBV9DpiUMLZhvRuiBmKVLW6VIt4NeKw1tG0m+ExtDVsbA9YGFaNBxVrjGDWOuqpYtjKtHVUVCxswVYMJmj52OK2wjSaRiCRSMoSgMVmjUyaFeDzR9EqjjCLkSJ8CvRfNn84Zox2Vq4kx0rWCOviCXMcsuTzaObKRDVCGD8VcJkaslqmc1wFyRAXFyo4cleii2FE7U2N0LTIP1ZGVImRLNA2m2SC7gTQtxuFLboshkFTPzHuMVsz6QFON2BhtorPjQ3/jo3zvx36QTz39Va7fOaCLjmwacI6UY9GAyvOKSShRKWZZD6X5Igsf3mkDRe9QaYVKSUJVjS3rG9GGJI4PrlWxs0JvUQgNUstUUVsjhkmhL1PPCDhSUqgEVQjiApwNKTeAxSnLyDp0Vhwezbl97Ro6Zuygog0OrzJd1/Pa1ausX94jhJYUO2aHPbd2d2m0ZrveYWtD4zdOkkebrN3/Hi6duoiJS5pNS9PUnDt/gfPnL3H49ht87Pu+nwef/Cmef/0z/O4Ln+LOwSExJe7uTdF6gImBNOvoJkti39MeHLK9vcUdY7hv6zR5ajj78jm8Uiy6KbPJnBPrAe6+wXv/2neRqxGZzAlq7h/VmF0PyeMqDZubbG4PeeTJh3jXk+/jUy/+CXF7wY/9wFM8+egD9O2SX/53v07nPVsntxiORpy5eJbKNyWwWBqBnIUORwZbuWMXz7qqCpJLodrIedl1Pa+b15l1jhPrQ/xyTu7njO8csr5ziqO25er1m/zIj58XJ7aXFSfWRlywDlsp3v2Bx9h59D6JrFGQ+0h/9kU+dvof8dvn/w+e8t/JzreeZX00InRLCD2/+9gej5n3cPKDp4nRM+/HjELHGXeCP33zKzx0/0P8vY/9PSY393nt9mUm128xXRySk7g0Ct1fUAljHRjRRyulcE4YCcoa+hhRtsbZjLWG0HfiuhwklkKKW4Wf9kzniRwFgYnWc+7hk/zEf/mTNKOGoxTYXS45Cj31YMjWhQsspjPqeY3WGtv3jI/m7PkJrqmpR+vsbA6oli3juoLROsueYlJw78DWSmPrGjccSFMRHUFrgjaEKDms7fQQ3c3YaLa4eWef1158mS4ecPrkNlujMywnE5Sr6TEYHEa1BNWRgsE4RwKUczJkKkNXo7S4MCop0rzKLGLHVgA6haFm984BX3vhOYy2PPrIuyRDVhmc1pjYMdufwPYlIKFTZDnteHt3wizXDExCaSfaYoAIGiOmCUWvDKHYo0PnMz5WxKxJLrJ5/jSPDC+y6TawGDp6NJVEdGTF0bJl/2jGUd/ibaYOLW3wpK6irhSahpTn6Ozl9VMVY6YelT02dMQAbTBcN9s0YgAAIABJREFUuXPA9Tu3efBdT7IS98i1KY0lVmh8Cox2GCU1VlQKTAX1AJ8zbYZx75lVIj/pByOCa1CNhWFF1hGcpXcNg6qREGJt0bFQd4ubXkwJp6zQUKNHhYzJ97RutrJEXbHMjdzzMaKil3VhHd4KhTFkjYqUwUDE+0g0ms4YLIZcssmsgna5ZHIwxvqWtfUKo6JEdahEigGwMuMTrjzZe8xwgCmauhXVNaeiYUOCzBMlKNrawvMU9gpJE4tfQiKhtUPU147UF3kPRvRsWZC+nBUoJ2hTuW9iCsLyAPoYixt7KkinDGtWNXKOitUtb3LG9FLTBqUgR3IBQ3wAQ8I2CpwEsxstpi5d9KgsYoxMJKdIiJmYFMpZXDL3MtSi4M+rfD6VcnkdwlaDFVXwnVBTyQTOqzimAnaA6BBLk5MRWqeiIqlESJARrbr3ithr2uDZjz0xeGIEjyURCnJq0KkYkSi54SMaitGJ8pnqzpJ4+S4h7NKxjw5j8nJBXnTEgyX20DNZRK61La90c276yE3vGTeOPnpcFqOxUTIsMvTk46ZaochaZFPJ/OVQ21+Npk1J4ngEklFkLdCrmIhoVE6EtqXrp0zGQvvCKMnCKMhMylKixiSf5xBlmlWoi8YInW6FGKyg7JwzykrmmkkZk6WIDkoEg30KGJ3xWjb2HnEPksw32TBXv+dYX1juwSDEWliZXCSxZ2UF9wsnQ6gayqPQpKiIIdDOJ4WupY7znpytGD5T4f9jT7QQNwLVrmZrrSGlyG0zY21ScTIOOfUra2gCB+9V1GuGE1sDjNaEGNl1c4w13Hf+BJ33oBTjyQLvI8PBgInzjAYDOrMgafFuWv90jdMyD9Hkwn822Jlm/WmZMlklhiHCTIyFr22IQYrV4aBm1NQMKsPOxga1Ar9cUmnNoK5QpOIcmdBGKJzWGjY21mmamuXSMW0cXR+YdZa2y7z7/gscbcDLr71NXnhMYzCNJjpLGzK9j+hOXCazKiiUTmQiUafC6ozw54xsKG+jVrKdWGOonKNrO6x1bGxu4qpMH1tUVdMmw+GkZ2Xjq7XGKUddObJ2uLrC9F5y3aoKH6QBMoVWG4In41E6o3MS5NkU/rY2VK6WTLoQ6VJH3wmNxxgr9sna4KqK5AMasd8NKRCT0E4ozksZiVrofEfC0IWerm/pQi+i4hU9UCmqygFD2tKwrWyLY0p4LM5VpKoqFBAFxpK1plPyp65rqsqQugmxDyhkuq+Qtbu2vg5kZrMlyigGoyGDwSZZG1qfCcpRDzaw9YAQBQntu55QzINiyOAjIS6pFJw4tYUeWfSo4tzj95E2h3zbt7+P17/8eaLvqKyItr2X3L+UJTw9ZhnWhGKkIoeGOLmibCnkFCFmdMzF7TGhdSJ0HTkHMeFRAo0rJUJmZVYsSA+xCPNzEJQwSgadTmKC5EyFSoHoO8gBY0BFmYrXzrAoAwarZZA1WyyZjMcMBhv0fcSaoehz6Lj+9l3Wry6lqUyJ1sP/+8YB1eaDbKwPGY4ydVqynBzwuc/+Npu7JzBakYaayeMznn/xMrMXzjE/uIs38Pcvfgyzfpa7uWcyXxCDDLVU6jFdxvuMtTVNPeTChQcYjoa8NVyjaTa4c31frhVjjAlYXTNuWz7zx3d4/D2PsnXuPimEcqDvI7VvaLZGXDj/AFfzG5w6cYLv+ch3cuHSw3zyy5+kaxfcvPEW26OKt65e4eBgn5DhwoULOGfZ3NykDgNyBnH5VceW5EpR8jXlHnfOHZ8B78wAGg6HVFXJn0pCy47B0wxH1FVN17bMJjNUTJw7fRb/nOeZL36Fl7/2LB/+7r/BYxfXePziGlJ6yscnGfDdJ0/xDAPe//g2DwwfZOfkCbqQWObEM/b3eeqB7+JC/yipy4S9nr2XXuDZz/8BV288z6PnTvHQxoxPv/ISe8xo50c4PSymTUvJnyy0PJQqRgAK4+7xjJUCVahgMijNgji0LU4lKaDIOKXwCQnHzpC9xIFcefNNLr/0It/zXd9F1VgunDvPe596ijtb+3zrB95LTonXX32N62/dICbN2TOn8ClyOJmwf3eXScxc2t7h3PkLrLeem7uH+NlC6F1R0BRjM8OmIvmeem2b5ZFHuZ6oNdVgB10fyFrsO4aDITsnh9zcG7O1fT/KRCbzOVp7erUkmJYei0OjzToxFufn2MnwNuTj9W60RSO26ZLNZIqMIhND4PBgn3bZMhgOGA4GsvZjT0wBoxsg0C5ncg2Th5wZH+wzPjrAWU2FlTy2xZyYirMwgn75GAXxKuydhMIHMfGKoUc7ePih+/n2+x/ATTyQykA7szIkOVjOOFjOWdITFETf0ucIJT4mqkyfI32QzEutkmjmTUaTsdoQlNDM9nbvsre7d0y9/GaPVa2tdBlT50hWUgRba6mrqviOyPCgGlaYEFFGFf+ADCYJfVwXalqxb18NVMutW+isKwaOOtbzV7VjuL5GTJFZDIQuFsdAVZ6bpW6G1LaiXbTSWMR78g3jHEDJVQ24GHBa2A+RzGDYUK8PMFUlSKXPon9zFm0tAY0yphjSFarj8X5S6LdJ6qUVB25FCyQj915xfNaruKEk708qFPtjvFPrUguucuBWdaScV+LhI79XF8M3k+VregViJPmIJGG0yGnPKguNTHE4F7dq33nmao4zIu9R5ediBWRx1hIsUtMm2WuDT8TQoWLAOqlfVWGlaKXErbVUWCtmymr/FeZZLvdXoW+vJqDF+E8JNiJuqchtmlZInC3UYwJKJ2LuCcGTck/KFX3oyMmTs9RGIbZ0/ZKYwr2bTSUoow2JAAC6jN2doC6/jTq8guluQxwTfU+bYbbwpHlmNyiukrhFYDcmxinTNlVhDYIy4quRguypx/dMqa1SzuTwDs34X/D4DzZtSqlLwK8CZ+RV8Us5559XSu0AvwY8AFwFfiLnfKik6v154IeBBfDTOedn/gO/pLi3JXFeUivqksIocYDJKZBjJPoslqTJ4EPpj9SKXkChWAjN0h6TUxMqeVQW/rPRGmUK4lUmlCTh8WplQFkyEKLHGhgOGskzyZkKQ4iZ1od7IcNRNHV69SaUxSCQttxkqwutik18ubYyMVCIc1HJAwnvjYQf9Yw+aclZ0T+Z6Z4IuBc0u7844z0/c4m1tyoW9y05+ltLLp3eZjhouFKN2Vwf8MilU0IrVBG7oxl0mrNn16Rp84E36wOMs9x3ZoM2eGKCYAKdj4xUzaB33PiHh9z/sxsMNxqWDVz4xEmqc5bD029j/quGi5/cJo00y/1dHv36efJ5hDLgBAZPSTJQlLL0fcdyOacyUFeapjao3JMzWJNwFowROuBwUBGCZEUZK+HYvp+TU4cxhhOba4SoUfNEmnSc3z5JtQWvX71FMpmgI7UR58Iutsx290qBDds7W4zWRoQoQuLVvSHwaPoGVFo21nwvX6VsvLZs6PPlAt+VDBy/IKWMMZLzoZXBGUWlZQP1QcxSQoygV+5KMqlSSsmBFWM5/N7ROOZS9CfKNE3R9WLm0vpEr8TRc7XxppzlZyPRBTGnUhzIZqCKOyZa06dI8IG27+ijWGLb2qEKV14bjV8GJrNpQS1Eb5hkGMiajVg1w+JwRg6+mBJTFanTlBgSxvaYXGOVOkaicsoonRmORpw+c4q2azmazGm7BSGBbXYYNGtEHejb/jiL71h4oqLoMZUmegQhyInR2oDNjRGhCpy8f5uu7lCqR8WW5fyI6Dt0TgRfBPFWGl1lbLlG5ZBXhpxE3G2tkwFOzri6wSKIekAatxh6NBGrC8K8EqQUirRzJbuuwHFKF5sBJ6G0dVWQmCwofN+1xODLzDEgwgeNyhJQ76PsOVprqqpifX2Nuh5gqpqqatjd22e8vImra3xvxWXUVix7xau7HRcn4A5uMZmN2a4zNnbMbr1B2j0imgETLfz++eGU/Rt3SDHy3NMv8fzF07x0821mkxmWOXVSDOdLDhd3gUy3PMBmy2jkOH9mWMJbPft33+Dm9RtYbRhGoTRtnNjh1vI29syQz33pGZ746zvUVcNRHZkuLE9/7gVuf/guv/rJ/5vZ9j58a0Z1Pfs3bzA7OmQyHvPHn/kMl198geViwd7BEVXTFJoxx/qaXBZOzup4IJNkYR/rhlYyCqEyla9RZr5ZitnZbEalNcpV1HWNRnHu7DkefuAhJgeHdJeEsnNz9zovvfwC73r8MbZ2NqmdEddjuWUlxqXQ99c2LrKcDbl6bUbIiS7D/KTnYNoy9IGmarh7cJuXX3uJr7/+Co8+9W4m02t8/WtPc5haDto9FqbDqEhlEPe/RCmy5UVpY+SXyrxBWB1R3GadlnJnhYbXo3WcSuTYkUNHzgrrKnIEp22hMinG+2M+8fHf4s6NGzz68MPcd/E8506dRit44P6LbG1uMj065M3XX+fw8IjNrW02tnfY2NhiazJm/85dyaksT/PSxQscjKfsHx4yX8yJfU+lLbWBoYZTW2u0/ZyOLXxwGLUk+JYHHvkWzo3fw4mTp3jixEU++Nc/wtVbt3j5paexSdMtxrR+CqonKS/5pHlAjJqu7cRRsJg/OVum9Hm13wqtOUV5kt4HrHWEEKhrxxNPPEGKifl0JndOFkfaUzubDBsZPCqVSV3H7s23mR0dSPMbptSDAcb7UmTKnpwSJVhXpjxZWzJiJCVFrZgbnd5a59zF8yyu7oLK2HJuqSRNxOFiyoKA14BV9DkIdS0V5oRS+CgGKJKqHZFoFqGqKxDDh67n7p1d9u6cJPse9ZdUiMfHVBbadtIenbVwjFSh7xSNslDd5DX1fSL5jta3DIY1MURULeZuq7BsGbKUfT+vfHwFeVNWQ1T4nDAKqmFDE4aY2RjtbZFoyFPIwGA0ZGu0ju87xkeHYqNffAlk3QvzKidBwUJBf7oUIGSU11Q9WBUlQyxEQuyoGmncIoJmxVwcD3M+Pq9W+a8rY43VYGhFbdLFQVio8/I9goYlQvYSEm2dSDaiOItDFtfV8js0Iu8Qoz3I2krjm1ZuxRqVdDlXsiCErjg5lmZNlcaZYgSTcibERN8HFinirBG37CzGN5325CQup10vtV5AwJaEmJWQEj5IRlpKico4nLGYUrccG3CUG+kvMnFZXSPzjgFCCRWShjWXdVK2v5AD2liUQei2JKyrMFoRkycj76F0r7L3h9jL95rV7pnurb0yAFClBzHzBWq2wHYt2Xd0sWOfxM1lz7TN3PHwttbcsXAQYZoTWSuMswRWrMBcMqmFJmuNBS1rXu6P//9IWwD+u5zzM0qpdeCrSqlPAT8N/FHO+X9VSv0T4J8A/xj4IeDR8vEh4F+VP7/pI4NYmxdqkTiYZxqjGGiF0RltCm+6hObZMiHRRhNTxsdARpCHTMkGQwrNlaMkZAajIa5QZNplS9d1MoFFsqMqN0C7ij4n9iZjfI4MBw06J6GW4SRva7kkA9ZJirwxIrTOURac0gpVabIqwlFVBJc5Hy9Uoe2I4cpq38Yo+lcMix/vyQ87YkqYRnHixQa2E+Fs4uBHpywOLf5EYP5kz96PTTl/W9HteKaPLLFnO4aHRTeWPQ7DUHmyU7zxI4csL3qy8rz6gzd54JMDvM/c/G+PqF5zbP/eiMd/+Sxf+oU3Ofy7S/oLmY3xiFOTNVgz/LVfuMir//A2hztL7n5wxiO/dIZhvaKsqpJoH+kLzUxrg3GGHLQ0ZY1lY9hgcsTkRFVZKqMZNRVNXWGM8LJzzjjn6LvueGrVOItzNaGHKkSM6al8IvfS0Fuj8UQqZCLXes/4cEwOkWZQsbm5WUwk8r1moByySgnfW+cVUF8OI1myMhgoFFhXjFKy0jTDIUFrmnlgslyKyUmIBJPw0Rf3SaHpaiN6v3u2z0VIizhp1lpMWIwSmpPwwC0hwbLzZRBhaJqGXsnEIitPEjya1VmRlEy3Qun6VBb9XibjQag/3ovdbg5iBqM0GI22QiFSxhBSZNl2Mtkz4gCptMaZGmeFQiEH/oqTLgWAUZqkxGgkl+GGKqYlKSWM1iwWS27cvMmyXaC0YX1zjdmi4+54xgk3pKoGVNmI3rFbigaRTAhBsgMNdLHHxkhFYmc0ROMxQ8f7PvQUSS0YH1znM5/+FK8+/wwptGhFEagrme5ajXW1hLlmRIdKQitN5WqMLbQzBU1TYysre4bvyNGLw5bR2MrSdR1918s+Uyao2ojzKEryapKXeIrhcMDJnR22tzdJObJYzOmWLX1V4eoe3xeNktL0PjBfehRZXDoL1cQ6Rx86lDEMXMVwtMFpPeAOe5Adg2aTnLwEzOfM2zf3ufzmWzx4Zoucet518TRfGFbcd3IDk9a5O0+ktocsjILhYEAzGjG7dZ2nv/YMB6mn7Ras46lyQk8PqJInZoWJEWKgm89569VXODg44OAjd7l15Q0GumZjNCActfTLyHj/Lhv1lAvnL3Cwv8dv/ubvMp9Fnvvwy3xkbZff+Pcf5+37bvDbn/hD7P0dRyd2eePllzlz7gJ+PqNrl7Rty8HRmOVC1ptSio31dayzDJoBujWEUNzSyvuqjeF4ZLuqDY4bZlnzMcWCbojm2FrDbLKky5HgPegK1fbcd/F+3vP4Ezz9pa9ycLBG3/ac2jnHzvYZJrMOOwpEY0irwtFAD9xuMwsHczTT/RnTo0OGteXEzjYj7ainHUdX3mTRznnu61/m9ee+zMm1IT/4nT/Avzn659ycZQ6bmtt7B+jNLeoYiMtpoTGtXl8pMrTsUWLtDihhesQAelRhtZEC3kZy8uhVwUnEF2TSR9m/c5Qi2C89B3cO+PLnv8Azf/ZlyJHRySHzx2b8yef/hDf/P9LeNNay7DzPe9a09z7DnWvoquqRbI5qUlSLtEjJFBULopDAjhRnQITEDpwgRoAkcIDAiGIggX8ESX4EDoLESKTAASw7gCxFViRFpEVJFGOJEimKZIutnthzzXeoe+8Z97Cm/PjWuVVMJFmBT6PRVbeq73DOPmt/w/s+75tvcnj/iPW6Z3t7h7oe0656wFPZht2tPSoEBHZ47za2atg/uMTWdELbrpjNT0nJ01jN89/9UZ7/3u/jV37ty7x1f0bo19BsocyIS5fGbPeWDz/7Hj7wgffQK8VTJ5dRueXB/Tuc5lOcMyidqWtRKhilwGWMCWxPd7BGs5rPWa1WpJTRuoQ9X5yZcu6nQr8NPhBC4PDwPtYYVIbpdETbtgx+za1b7/DuW2/w9FPPolTi8N23Obx7i5Gtqayjdp6KVM5bKQikcdvQ9Yxci0r88Fkpmcpbw8g5buzvUY/GhKaCPKCSIRPp2yWr5Ypb9+6yDp5sIjobchAZX1Ki3AmKAlUDVzD9FJ+UNiVGRmsBwvU9se8hbMKT//jHpq4WFVGGchZqrQkp46OXrWJRiAQfAM0QxLsVinwPrbHG4VwtP3vZJqXN9qUU5jlvwC3qIvsyaEhGE7V427JSaFvhlMMVT3dIEVdXXLp8mdV6RbtqAZEh+iSNpWyu5AzwBHKGISXm7YK17pnmCaNgaZxFh0jbLch6QR6N6awj1xXbWxMebo+0yLMxF8CfR0EiMYq6ZxP/5P0gULUYy/xQmtVUCMX5Eck9QPAS7aQfyfXSpblIj5DN0Rqz8dvlTM5DAf3pkremHp4fSsu1GMtzrpV8rAyMUQKF00qjS1yErcBG8cHpHIUp4czFvaqqnGz9lcYojS3/Jm3ISuSoD5+zTeMqjaT4v3MZrWe5l7JRewHKSm2TNnAVI0o7smyX04bWLnXWMAx0fU+TogxLkroAhLEhnpe7gbr4VblPVJrooCPQRE8O8nqdxsBtEm+oxIlVnIXEUYgcKc06a1qlIGWqEKlcJmRp1jb3I1UGCLnYMcRS9Ke/7/6ZTVvO+R5wr/x6oZR6BbgB/BjwQ+Wv/X3gS0jT9mPAz2Rpm7+ilNpVSl0rn+dP+iKiR9UaBzTKUFU1e1XF2Mq2raoto3FF8LKxUOXFreqajKLre0G0Gyufb/CoDFVdF99QJinY3duhbmpSSizWK9ZroYE5NJVyVNUY5RzrFPFpYDl0VFbjlKbS9gIc0LWi4x6NRpDBWUflqguJgzUGWwlJklJE5/ImNaaQEZNsdyqrsTrR5Z712PPe/3WP2bMdqsgZqweO5rZhbXs+8p9dpbGSel8NivFpxQTHqIIf+8UPkUlsX7aMtiuRYW2B6kBflSDnq8OEa5/bpveeHBPuiiEOiSsvb6Hf0XAlk1zkAz9/hS4NbK8t41uO5XiJVha3gqc+t0NKmWv/1wR9HummLdbasqGRYt4HIT1qIytttoEaQhNYuxarMk6V6bM1qFrR4qXIMnK4uarCjKUg7roepzocFcppZhPFKvbMqznzKhD2Atlq2iQreW8VfZQpj9OaPIJ+Z6Cbdgyhp/eDXEshCByjy+QmkzSErYivQBFRIdJsO/y2ADQ63bPe7ljvehaLFT725FoMsMmC3w60Oz3aBPq8lhuRcbQm0g4RpQdC7fFVQAXIJhCLFzCpTHCZfhTkAHeG9VbCG1DjTKoypEBVB9o60IbEjJ6V8SKHHCeGtVA4be0YsmjctZLMkBxhOQq4aYcarUgpFHNwph16zvqWbtfT7Q6AkkmjNcQhMMQBP0rkbYs2hr6MMn2GNkfBJZuK1KxJN6bkPhPK9Ch0MyojW+MUQdeaaDO9GejaAW0qxtsG3Vb0QLeXGO/WjJNhvpizWs8JKZabijSOGMUwHtAhY7RidF3TmxUHT+9z9QNbnJzd5R//k9/i//78r7NqT9FXsgx4BqFADgwkXaO3MsPuANWKqByGRGUdps6k6ElErHNEF/D0+MpTr8boXBOsSE7rpqYfPA/OTvEhlqG5QdlKXsfQo5PC2FryILWji4kHswUpRdbrNTklmqZmdzJFkaiMYVLVLOdLbt8/5iylC/mbqyqmoz1G04rlYk7VNBwfH9H1cgM7OzvDzBzWJHJj2N6eYLoZI3oeu7xDU++xu7uF1ZpLB/sszw3r43ORaAKrbsVsMaMNa4a04o/unfLEk48Rw4ouZ07PDrnen2J0g4+WnDK+C7TLlju3DiXTLyYpLIMi+ETQsDY9oWvR/pyzw7vsbe/xa5/7VbpB0X34mNuzN7l9/y1CCJyfrjBbS+Zn5zw4PKZyDc6oiwzM2WIlw5HoJbOzFBOq0OBkQyFb46w2wyR1sU0DKTqtdWwklHiZLpcoX/k5ciZ5j1aKfvCMgPn5OW+8+hpf+erX+doLFaOf2OczP/Kv88kf/GF2L0/xRnHuI4uTGbdv3eLO7bu89OE3+eKtf8qdD91h2d7mmf1nePrSPn655K3X/oCbqxc5/8IRwzczi+WMwJJumPM9H/8XuKIbZiGz3j0gVoZm9wanixWERGUbCamlFBwbylp2ZAw5OxRy7cQsU96IAwT8UMJx8Fmu9ZADKXp0zPRBrsOkMiYrwhBp5y29qzGjCpUTi7MF89mCF198mdViTdtGtK7Z2b7EzvY+7brj3uExw+CJQ481itWy5KB2Lffu3aWua3Z3pri9XYa+o3aO/Z0dtkYNW1axW3nqsWa6vUfbbLEdDWl1zu03vi2+58kBt+8dc3p4G6c8Y+e4fvk6++MdnHYslyvmswUprVAqgNIi8Y5rUu5ko64CehOPolJZw8oQLRUf+p07d3j11Ve5cf06j129inUaOikAT05OeOGb36RptlmvA2d373LvaE4cHdD7yLbzLFZrcojMV0uG6C8akA1kQa5DaaxKEgxKwbgZce3SAa5ucJUDPKQJw3rFt174I779+lvcuX+HwfeolKmdQkUlUm6VwUeRfAZQSch6mwY/pwSpePIBlTJOK5x6SNL9zlrtkV+X91pKiZhA49CqImMJYeMXhuSF0qyyKBpySmircLoq0n7Nw+8IKGe8MYaQv/N7UKgif5MzNidPHz3d0JfBfRYLgrE4LVLgxaple6tlZ3ebg/YSJ0cPCD4Kjz5nolaMxhNGkzGmrnHaoYeMrSsUNappUJWToWc/YEOiX7XM1uf0xrBQUsNcvXSpNGbqgmK+yRJOZSkBWRpypR42IeXvhCCwOGV0yb8TqnkIA9Y+EkX0SE2vlTRIqVh+VBYfeyARtVh0cpTcw4HMgDReISdIkZCzbMW0KmTRhxFKKSdizAxJNJXJaJzWaCu+V2NKTpyuqAuVNWoJ685lIDBuLOPRiKqqRGaIbF6NtcRoUEq8tHIeq4tmTX6fL66NpORzPlRHqEJ/letX3koltitkhggoh/jEBb+Sc6LrE9qL7973Ae/BByXe0VwkuoiNQmf9EKiSMl5p5lbTW01wljYpjoLiJop3MJwaw0IHTkPkNCuC0ZLnVqiqtTZC/MyZIQbxHWdRRYlEeNMnPyL5+mMe/788bUqpp4HvAb4KXH2kEbuPyCdBGrpbj/xvt8vHvqNpU0r9deCvA1TOUdsKtDzhKUYmWnNpOmXqNOBpGkvVVHTR44dSjDvDdDIiA/PoUUpT1zVr03Py+MDVl8ZUlS1aYXnTjJymsoohJsnvSAPZe7RxKB3xXYRYkbQm5SBZH1YzcTWVsQyItMzVjrqqcVVFDHJY9HGQLRsQkmZctoN+CEJPLIbxUJo3lYUe6UYOpeHk+SUv/ZuHjN4qIX9aXUxT1KfVRdB0a23JpJPvzVmD1ZsMjEx6rxwOKSsOr61wa7jz4VWRDonhPqZETJE3PygQlpVP+OcTzq4umko5eEF9t6IouGQr6fTFdO3sex9mOWn9UAYQUxQ9dnkjhiCQDecsRiuc0VgjII4NJABkOhSKr80aS1M3+MHTD/3muiGnxDpD8In5zppVlTj81AI/yGtckUlaM/eR3CNmTzNwrzri0JyUryNSkZiSTAFDYvDS4IQqYCXABUXm1HTU1QKNHFTziea2eVWKq+hJOdP2gRDgpFlxf3yEJaHzIOt7bemDZ0gZrQ2pFPZy2MhqL2VhFV0g4zPlYDyjqSqzL7FyAAAgAElEQVSsNWVbq4rUUp7bPgR8ipLpUqR4Ci4muZvnTF67jNE9Wi+B+2jNBV44JYGLJAxvqzslwy5eSMnIiig/DAEYClQjBV/yeEoswgc87d+YU5g3KK3wQ0+nZIujtWWZliVeZbPx7DhnRVaGajzluOo5VXfIKbBeLglRNN5KayFloTG1AxItMFQVSzsHDfHZyP958Pe5093mrcnrdD+6EmJn2aKxkacoja0Stj6lrztCFq+qRpXMFx4p8NNF+Hr3/nOe+8m/iCr6e2sdQRuigaBroo7UozHvefZ9XH3sGrdu3eKt117FlOZZaUXQinkbUJ0nJ5muppRoQ0tlO6ZNhbKJruTyGF1yY0pkSe8HkQU1FtNlEp7jB2es20g/dOikGPo12SVGo4ZKeQ5Y8exBxbWDEashMNnaZWt7l9UQeLBo6boV44mUj4ZAnM8YVoGoOm6v1jy533DJGY6XM4gt/vQe1eWnWdUOMxnR+TVJa9R0iskJZR31/h52XNGFgWQdzc5j6CFzNV+BWeT+y+/yzO6YLkeOdxte+t0XODmd4UNkMV9hz1cMg+fu3UPevnmHxXpFXdfUdYMyjq7r8ENHZStmp+cMQ89iNmOURkViIhIgZy3GaIZBpqPG2qJ+kCBv771Q12IUCX6USbtxIkLL2jBEj0mRwXse3LrJ17/6Ve7dO+Sm2mHniWf44Gf/IjOrWZ0v6ZZL3nzlVX73i5/nd3/zN1mfnnH4t49483/7PVb/wcCT70bOdj/Krbducu/uEe+8e493/t17XH3zGfb7x1GxIx3f5+qVq3z/e9/H4Zuv4r9/m36/oU+GS+//BNvvviubom5JjO3F4ZwKFCtEQ8qWmFyRgksYsLM1PiqBaWXZ0JMzVlUCYHIKZ8Eay0iBQA0CKQVMJZKn+dmCfmmonKFbDvRD5Px8IGdLVg0+JO4dzjg9k5zHdtXjB0/brfCxI3iPcxV1VZGix/cd56cDdVMBMJ/N+NLnPsdv/covs66m+Ooyzh9x780/4nD3No996LMMccQrr32Lf/pbn6fe3uN00XN+es7B7g4Ww16zTzBjjo7vsJo/YLU4I6SenC3LlWfo16Lo0Z5mUjMaT0g4zs7XSPaUpa6nKCzWae7fO2Q+n5dtgCZGT9e1hBBQSdN2nsPjB3z+C7/O22/cxA4DPjmGagfjKi6PFYtuoK5qxs2IwQvoym5ufVmGC7mc1z5E1kOHIbPVjGisULN15SB7+tmSb7/6Or/2hc+zWnmWiyU5SH1gsmD1TVVTuxqbFTqmCxAVUYAaypXtnpbBXUa2sUo5ula2UX/8Y5NHJlh5acQ0OTt8UKRkAc0wxOI1yiKxR2TeMXliKOaqEpq9WdtpY9H5YfTCpjE0uoQ1J/GlDv1GJhg5Pz9H5cx03NCXBkjCqjPaOlZdz9u373Dtqmdrfx+05ezkAXHdS1bpIIAbNwxoV2Eqy9CKzztrRTOZMtka087OOT0+Zn80ZTweEzAE79HkCwmgtbbYAGBjWZDaSMtgScs2y0fxv28yx6QZL3LJEv5MaUq0M2Ql3seUMrqQNGOUUHZlnFiIdMZWhhB6otUMBrxVBeijCMrgc8ZqSyYQyYScMc7JdWcMFJ6eKSummIS4mTck9KJsC8mjdSw/Y1EMFYWcxAdIa+GcbF6dc1gUzslgxBgt5NPwUB55UavAxdYxpQxO48bNxULGKnMRm5NzvvDKpaSotSMpGI0dUGHMCEuN1muC70jRoFRNimKLCAO0bSBEzZAjxjnxypV6iZQhKFj02CHjs+bMJ1Yxsw5w2AZuRThRhtMeFh5WzrJ2mj4I0MzGiInFM20sxjpSGBDYTvEgFs+1Kpv+P+3xZ27alFJT4BeA/yTnPP8OYEPOWW3a5T/jI+f808BPA0zHo2yAbAVlq1IxyY4CLlZQZGs5BKJvSSFSGU3lNKRADEEgGUpBCOSDxNGPtjzx+jbkSBpnzFgmFUPuCMsOYzWVzkwqCV62ZOI4Emwm6ojPlroStG1diZyEMQx48iqRdSIbQfmLmdg/okWVgyMnTYqRFGVLs5HlSvZEmfyyCSKWg+vqr4547P8YSbNmSviosxgr4aKT0Yid6YTKKHQWKcnWuKKpZDpgyyE3DIGkLH/4mWNG9xTPfGsHa6xsvZR8zj54mU7Ziptna84Xa0ZNzaipUGT6dYcq+FYB7RWp2NgSo8gAtTLEAmeQTBGL1pl+6OmGgVy00m27YjSu2ZqOmDQ1hEBlDeO6onES5KnKpLHvOsj54gbXrVv6rr2YXtnKcZgVD47n/Piz38XJTuLFk1MOD5f0/Yp9lVlrzburQDVoGldjnLrw24mOOTIMnm4YaNuedtWxnK9xzrK/v8OkseQwYEhsjRp2p1NsVuxu73L9iSfJVnH7ztu03YJF1/PgvGXVZi7tXeaJqwdsu8RId4Lbrsecrlactx0YS7/qwUeUL1MmLSRDXZqJGGW1b6uaZtSwNZ7gnEaTsAq8DzIdMoY2eoYQMcbJzTkXj5xW+DI1o5h5jRH99EMpguTgCBI4kZQDU4l3ruvxPmKt/D4GadJTFGlFlxPOWVK7YmogdSumTcU3rr3OR/7uE/TtwHrwDDGw7Ne07YDWFTEpfAgiPSTK9Zc01k2pJntML19jOhlh0sDy/ITje3cIfpBDzTjawYN2TA92SAzsNSMev3wJQ2KyPeHf+mv/NldWV/jpv/s/M3z1Ad1ZRbKOZKxsTrNAABKaerLL7tUn2LtygzZZ+iBB6eSHkRMpRWIKF57Em//l79EuF2ilcVVF2/VkpVHa4qoGW2uuPHadT//QD3Nw6QrrL36R+3cPie0CkwGVL+QytXMCoCnDkBADofeEvseRsEmIuf0gMrWQAaMJObEePNqIzHaxmDF4j9IGiQ4xKC2Dj75bsco949E+/eIBmUizNSU5y3Rrl2QbVqETn0joRd6aAiPvUV1LriNdF6iXLU9Odrl5fJtKK7aMZuvgaZqDZzm59iIHly1Hl1/k/c//yyIrvHyHZ577EUzflImpZ9KMYNnynrxk3z+gX8549oNX+NbrL3O3vcsL3/gGfddL7tLQo3zi5BNLfuvxb5GANR3rZz3pfzhEaRkcxRiIbuAb4zntjTUpyjCqKGLEmxFSue6EfJpyLJIZyWqKZQqekT/bgEsuX7mCMYYHJ0f4PpGUYt21rFcrHhwf4ldLwklHPu34/c/9Ci82DdZBt37A7Zvf5ku/8Tli16ILeS+cdKRl5Ld/8Te5mV4l+kjMitmiJfQDfddT1xWTrYrT0zf57hvXoe148e23iD80IlZ7kDTOjrh2NeKvDpwNS1LfSUxOVqgCAzh+zNHun/LYk0+T4oCxAs9KZRjkQyDnAiVJErmSQ08KA1plamNwxgjNLHuCH8g50lSW8bhhazqmspZFWnKzOePSpcfJOTP4wNAP9P1A23qRr+oKbRTaelLsMVWDqysq54SASKaqHKNxjXVSxJ6teva3t9E+sDx/k7XXhKho3JijW2/wTLfN4Tu3WcUWaoXNiVoFXvujb9AuPL7LeN8xxFm5DhK2kvvLupONH1rx5z/153nyqSfo+oGv/v4LND6wWg+Mxg1aW5pmgjUGrWQjOx6P0VpT1yMe5jcaLj92jfNVTzgf8EmxmC1ph8xx+4B+6Lm+W1FPt7lx7QZaiz+GHFBZYkRiCuLblxBXQvK0Q4s1ge2s0FHys3yWLMs//K3f4ad+5mfoK4uPllu3HzCbLRhPHNZlPvLch3j++Y+xPd3hhW++yIt/+ArDEFkrS1CGrDxYUzJQIypJVpqPAkoiJ4h/jEzrQi+WL2oZyUqFMERsbbl8+TFc07BYrzibLURyGRIksYXYpsLWhqQCo1GNrRy2kJfFMlCkuimhjIUk1y5KKIkaaXaSkszQ4AO1gp2tLYaqxg8DQ9fT+wC5R5EJoSdrxTOTMQePXaGqao6PT1isO4ZhwMSaHItUOj+UfrZtx8nxCafHiWG9JLUrXFYikdUiezdJpI4bCaRSBmOtWBXK4O07GxNpPBW5+NySDP5LI3zhVdPiuUs5y3uxnHkyQC+wK1Xo5Knc68u6RllDMoaA0LDr2knTGyXnWEoNef0ftWrIxseQvCh/tBXyulIWYxR13TBuxIfoh0GUCUqTsiispOe4EDQSQlkQRIEKyiBXKIkGK/aRi3gAyjDNoIyQG3MOKGPQdYV1lTRBylx8HYVA16yxIkX1mqyhbiyunoByKAxGi8UJLDmHCzl52/XMZguW3YqdJqCxRAJaRcQQrFFRk9c9/brltO+4HwYWKFpleIDlJMMyGboEA4lBK6jk+d4EzFdWlhMPN8rFS1jq3uJreZgT96c8/kxNm1LKIQ3b/55z/sflw4cb2aNS6hpwVD5+B3jikf/98fKxP+ULADZz+JcfsHzPiuQ96+cH7p+3/It/832Sm+YHkve42uCaSnSvOZPCgFWGejwun0rT7E7Z2fE89eST3L10yuf+41d4+vgqVHCnOuZH/8f3cqXfEZlX1xLaluV+z6/+jXfYPhlBVNx7Yslz/9U1qrcqtNa02wN/+JO3We0Gvvuv3sBqTe2sEPZSJBb5TS7QmY25VBX/WkTWxVYbMXOqEhyeEe1tViStUNbi6hEXkh0UWgn1ShUD55AKzCIL1rX3sYRDRmwzoqosIaxJSvP8V5/ADx4zEmLfMIiMFCPbOZIlGYurAq4KZSsp2wZdtPsq6otiU2RDUtikEFGFcLjRyG/W1IJOLVSgtDm4TJHsGNqug9qgSXRtK342XUJf88abCG3XkUkYZy+2G9WopopygGcNl8dbPPvMDvPhHYY7C7SV77+uLGNXszWZItuSgsNNxTupjWCXsy4Tv9J0Fz23NrrERhTiUYwMXc/ghSqojfi//HJFiIK67b2n7weils+RcqbvetZtL4HaOtEPPSYWWE45iEOMJDRV05A0MrlThhQV3bIlRymcNOXQVo4AJKuLD2DAJKi0NPhJKfoUSUnIhpssQ1SSQxpQyogpu1CrrBF4RfCJlBTONTjXEKNsBHXxwbhmxGQ6ZffSPle3xtShJa/n/OhnfoD/7rn/hf/6v/mb1MZhRg1n8xlf+M3f4De++CVu3Tqi9+L7iqngk7MiZcu4nrC1vYeyTpDBwbOcz9EldsEUibHIDB0uyWRxNK7JlWa2XBI7eOVbL/PGS69x85W3sNFRVRMGDbZ2FxEbZOh9oO066vWacbsG2whkxmmJ2uDhDTGnSGUUI1djjWZciwy677tS8GtcY3jsymWefu+z7B9cJcXM0dERzpabTa8LiVShlKytQ5CGWsOFQT1nRecFImNSxvcB4+oLzkQ9GZGM4nwxZ4gZZ0e07UrIn0WKUcJGIAvJa+UH7p84ztYJt3uJYBtUioyn2xwdnzJb9yhrC40BIfeaiCIzGjU0MXH/5Jwr29vks4pmS/Oe5z5Ku/0Y9tJTLMd3UMFgzIh6fLXIayyrzqE7Q86R2hiqFMjdkpZDdg4qvvC7v4feuc63vv0ar3/0deyhpXLbDERBo78LT/3YVS5dvkwz3WZ1KfHOT7zL+3/qIzirLjbTdV1jtIS7V7oiUZQMRWaSchLzvVJyDsSHt86NNEmpXGJGLMla8jRz/q/MiEPELCqqoGiHjnkzp73S0v3AgNGWeqoYtnu+evkfUo8naKNZrGecVScst2YYI/Il/4GM/Vcb+LBnvXTMu4QjknLEL5f0yyX9/Jh1t8VZO8fFJXZY8Y9++Zd48MSIMERM3mZiDD72mEahbaJaRZKypAAk8QblmGlGDblpmEzH+EEBASz0Xc98vqRtW0ajMc32tryvs8i0VbluusHjlWwGcorE4AlhYBGFBGitIUXPuZ/R/eWe5UqC69d9oO16jNFs7+8SvCcMnnYZsNaw1ezJlj0mUXKU7cS67TlbzMUXbS3jesTJrGU2n7G9t0VSFSo5ju6eksOKDw7P4gPMli2rdsHh7fusl4Hjo3fJ0TKZXMFamewPQXH52jO4JnF+OhM/UPY4m5nu7vLMs8+SsuJr33hRNqqVo/cBV2msGbG7s8Xe3hkAly4dsFy2zEcS+A6KyfYen/yBv0A9zLlz54g7d19jcTbHVGO2dvZZ3L/L8ckxO8B8MWZYL1iGTAoKheRcWVUCoylD4GHAxwAahkGkfMlHzhYLxkRGqiJ2AT2uuX7tCWy9x9HJMZqA0YH5+Tm/++UvMxlvcXRyxuliQdt6VhkW2qLNhLpsRXLOWI1khAZpIpJ0Sn9iuXYhMS7D8pQDWhnG4xG2qchase47bPmcWoN1VmRyJuMaK6ANZzC6wKCKx2pTLEvjUnypSRQh5Q0tcKEoWV2TpmFaOXIOdETWUdHnLFveNIi8Hc2D5Qp19x6XdraplKEajxgbQ6wd1c4O1XgskkzvySlLExnFy4lOUsBbx2roSbO5AOkyDEoRC+Aqa4PRCKAkRnJ6KPejfE8JuXdp8sUmLgybRk2aNO89KsXS4NjSHPEwu6zUWXLOizKAlIhBfN8oRdM0jLe2WK9m8ho3NSMq1KIlpHjxb8qUaIB88XrmosDR2kgTlwXj3/dDARgZQoj4EArQQwn10krtdwEF1PqhckUpacaMvsglu1DqXtwDdWkS1UXDCnImNOORQDuykC1zLpswpSXUXhlpmLXC1VYojUkUVH7wRCXDVwHVaHQeGPqBxWrG+fKMfdNh6wpNB0SJ6MFCtqT2lJPVMfdjyy0dOQqeLidmGU5T4jRGWp+Egm9EpmmcocoGWzkqY+Q9vmlQVanv5ccrT0/J4rvQSP3xjz8LPVIBfw94Jef8dx75o18G/h3gvy3//aVHPv4fKaV+FgGQzP5UP5t8Ee791Qcs37vmyZ+8Rlguyc9F5v9pjx2NAMfAkoFANTaMY0P2iSF4QpMwxXgabKLKFWZSy+rZWe58bEG7G/jX/qfvJW3Bf/Gf/wInT6+48tIUG6NMdFPk6OklV94Y8Zl/+DQ+aH7vx+9y5+NnPPnWARl4+8cfwKDIJmFzolKKWhtqY9Bl5Zs3U91iZBawfC4zByFcCnBFQgHRZc1rJLQRbWXiYYxogsuKPStddLbSUA0poZKSzQqKIWXqDBpLSlpyKTD4GPBDInqhRyUF2gnGuu07fIoEEkOExarDh4hzEsZsXYFlRERjHbNM35SsvEOI+CEWaqJMMJFzo5Cb1COHey55PALADknhqhFV1YicIASMc2idCdELGCNGKa4yKBLRD7JhbWpCB0ufLmRl+82E+chBZSVQs2imrZI1v1GSvxWClzdIGWWkJMMh0qaYVkWfH0nJCvUwFfhFTjRVxaiuscbSp8hqvaL3a/wm7wdNVShzOfcslytC1qx8Yr5aEazBVPIaZsA5g7MWbRQ+WvqkWPUD3eAZYiauOjFqg9y0o3hrRC4nTZtypmj7hc5XaYOxjqBKQCZCa7PGEqOgflzdyHZHO4yxVPUIAKMitZOBQN1M0NoSA9R1zWhnTM6ws7PHRz/+cXbe9wHM1phnLk+Z3b7Nvddf5qN/7hNs7/4sTz37NIvTc4ac2Nmb8vFPfA9ozW//9ld5/Y2b+JDwoUA7lMa5GudqlHayIQ4e1bf0bQcx4hCKU84wqipcPcKERF0ZxqOGnkBLZHZ8yM///C9QK0sOmT4pUj0SpH7ZRJOladcZyCJDMfphzk1KUQqWQhRLIZCLkV9b2QjrJNvzWksYuU8ZnT1jZ9idjJk0Dp08i3XHajEjDh1GScYMWbaVOUeMluFCzBHvB5mwKY3SlbxmKQMBbWVIFVKSGziJdd/TdgkVHSlZmSbHEtqacxl+RPyQ6FNmuHqFp57/C5zEmq995fd54cvf4Jv/0tcxN2+QeoFGBHm74VVmrvuyOclE77npB9zeLs3OFud6ibt+Ha9qdprM8sE9Ygfr3TPufPtbxBRZzU45fOclbF+hlWK7rlj0c65eaqj3O/Qocri6xXwxwxtPJGKzJw0JUmDoVhhniAGODs/Zjg3bT1yhdkfsbV3CZi8Ap+JJAy5u0ko9pLJdYKN5OMEUmqwt4CSZuBarHAL/qfjY5z/F2q0wleLygbxe3nvWZsWt6h2uT25ILEcz8ObqHR68+i7W1RKYqjJnh0c02jEeb/OB93+Y18avsf/YU/S/1qMbTzOu6c5PWC/n5NShVSQNC4bFEeeLc6z3fOmrX+P2vKUaPYk/z9x97SVGe/uMpw3D2KNDSx6W2GpKUJnoAzErQgz40DNfnMOdd+i6JUpFYtfiu06KGC/erkMnZn6iwEiMfliUX5BPM6i8yRJ7OGDQWtHZnnXbcevOXVDS2FLATuOmoXKOoe1IZUiCMyKjTxmiwMqN8cWcr6nqCXXTUNUj+qyYo4mDYXl6zMRk1os1577hyy+/xr0Hdxj8ivnpMY9fvcGonqJs5un3PI5SE87nc/YuXWG5Mjw47ZmOt7gxvsb52QmL+Qk+9nzlK1/n9p37fPrTn+YDH3oOz7cZTXZ5/MYz9GvJp3zw4IzlYi1DrqS4f/+Io/v3mW7JoNiHzAt/9G0+87EPkfWM+yfn1MiWZHd7hz3fo9vEg/NTTo7vUylNNdnFbl0Sz6HN8rxniYdIZIL39N7jY2S+yBy9cxNtZ9x6cMiVj7yPftVz6eCA8yrT+0AznXDFGh6c3KPtljTRspytOFsscfWYK09e5/TBAoci+wiMxaelxe+UFFKrKGj7jtVqRYzhTywQS90pVoqNb0cnfGg5fnBC6z2d9zJQtg5jDSl52nZgCANuJLmudeMYdraJVS4ZdWWYsrkfF3mSkLalhtJKlfM7Y7OiMo5J0xBii+820LcS32Lk3Mza0MXEvdMzTk5OqAGjLBhDlxJWa6Z1zdhVEsVirUAzUEzqEftX9nGV5ujebfrlEt+tCT7hUURX8R06s1JH5CyDo03MSCYXOdwG3MUjEnwuJNrKyobbGoc2RiAySYZxQuqMArDQpWne0A5zxGaJXaqNLeq0iM6K0Ach2OZMaHuUFXlpRhcPXZGx5nzRPMnQSyTXWgkYJ4RA6PuHcSghkEsIe0wR+oRSVjIJjUapWGq5EvUA+CQazBDCQ/hKeeiLgfKGICrDdKsNtbVUrhHvWhQvqVhppPZC+jeySbKtVZFMKD9nIKSBfmhZdWIHaFVPDFO6bsFqfUo/WTLGASsygc6vGbkx2UcW3RH3VoccxzmnLjF3jkUcWFXiaVyFQI8QuoOP9ASM0tTWSfB3TIXmqS+a7ZQ39PiNRlqy/EL655dH/gDwV4AXlVIvlI/9LaRZ+zml1L8HvAv8G+XPPofg/t9AkP9/7c/wNWif7nn8v38crzxBwda3K/TXNF0I+APPS3/phPnBmvNnO57/pcs8++vbrA8CX/jbt+l3E9/1Ty7zrb90xHO/cZWP/fp1ur5ntpgzhMBnf+oDnD04x8w0P/qPPsjRh2a895s9XYjUVcWoqmmajhu3LbWqyFbx2Lu7/MHfusm139xjvNC87x9c4dV//z7LxzoIEV0KP0MWFDhcEPooND2l7cMTpLwRLuSqaoNYhaw0cUPzQZHKgZ8uiDaFjIRsVUJOqJjFH+YsSUHvBaCw7Aa0UjTjEeMKOt+RVfGJJdieTLHOodZr8ANEj/dC+vNJ8iI2uuMYcyESyQgwUSZcueRt6Hghj8g5kbNIJE0xHCulRTYgP3BplhRi8Das1h3eOiajBh8jOfYolTDOFFlpoKpkslI1tchoXE3rRRZV1xVtGGiXKxaDJuYoB1UhDsVhIFlDyg83GinLmyWEiPeBYZAQ8VAmN1qri3W1jwHLw62cKajsEDxeRXyMGOuYTrdo+xX9IM1bCIINNrmYe8uGcPAeQsL3AtCIxhCioAE6H9DNiGgMy2HN2fmMkDLWVlxkmSTZfhilIQ4EAFtwvRniEDCZi8Y+lYByadqEEJWUAdUSgsgcrauZTrepqhqjApWTC1lpw3g0xdoKnzzGZS4dXOGTn/wUP/zZz9CO4K07dxhWpxzffpW3X/l9fuXkFd74kZf42V/5Ozz52A129ne5/tSTxH5O7TSf+uSfw/vM62++BQoBChnHaDKhbsYXJCmjNVmL37EvW9EcxW+hCyafIaBIzM/OWOlEyBHfdoyioU+atvVQVaRKk72nShKVoHOGrBiC+AGGEBi8J2lFRF/Qu75jE6a1bP7ajhA8i/mc7D0hdDI4UIasNO1yxXIxZ7K9x9bOLotVy5279+lXc1zs0aWY3Wzvoiqwi+jZjNySssSk6aLHZjljYh/pvSckJX4LP3B4fMzZ6YLcwapriwR5TegGchtJZ6mcGgCOB3mHn/vi13nt1ivMH9xj6jP9MLBjgMriY8Jm2W4bSR4nJYefJ8Zrj68rQq7Zn+5y1C2Yzz3blyz7FYx8y+x8xTA74/S1F3FWE5dz1u+8jlrCzvYWzd4un/q+j/ODP/gJ9hvPmy9+GTPaww+RGCLGafAVld+lzR2qQAxQmtVyTaxXTJCtcFU36JCprEYbh3UO59zFObTZoOlHJsabImkwQiQVqpnBGkvlbAk4pby/FY+/8xTi3REwhfcDVmuGnY6TrUMuv+eyZAXWnuxu0Y3FN5dyIMSBYRQYjRzUiXk+AxdJ04GZPmXvJ8aYHcvy1sByFrC2gecVnTEczlpmc0/oPG8Pp6hxJhy8jDls0L//cxwc3EBv3yBdN7x39xpTMyVWGm0hGF0iayLKZFL2xNTiw5p2vaTOCR08DD0qJlEBtLJhVCmIL4Yi+dNO/EhyI5KBRn44Ad5ISNNYpNWL9UKe6xKSnckcXN7luz/yHF/5nS+zWs+wrqLz/uIs3myXQhBf8eUrV9m7dEDXDxzPO5a9F4DMekFWidH2ZZpLHnMiNMfZoufu8TG74xHGTPnwdz2HG684PT2hD+ccnp5iFzWXL3+QS9eeJcWWg70tuvWA27dok7h6dZennn6CxbJne3uP977n/fRePFrb0ylvH71NiAPtek2MkQbsPOwAACAASURBVMnjE9737Pt46aUXqRrHeDxmsVgT7tyn+tQnWK4HXD0heE+K0A4Do/GYutllWxsmdcXs+JjD0wXdosObEZP9y9hmUmSJcsanHMVzFBM5Kb75B9+A04HxY/vcuHWbb37zBbqup8/w4Ow+h6dz2nZN3Wi2Jg6fgnjinJADTVVjJkEGCUMARnij6HPHxo8k2bOZ9WrN+WwmIdDljv1QtpUv/kEhypUQiSGIvAtP169Ydz0BkfKnHC686jEkQsy44IhEYmyIPrKh9UnfVsAdSc5HRQnSBpFKhlgIhwrKYC3FgFXgjGY6HoltI2X6IQgcyqgiBS0+KDJD7NHG4rMiDgOjKB5uhcgfK2sZ1w0GReMqqpEtg3g5J2JRRuTyfng0ZyzEQM5CCy+OFKBkCFMGSWXToh7ZRm2alZxl45pRQnUu3rcNS0AhAC61qWlSxKColMFnIRuH3hPaHu97dIzELuJTwJTnVhRX0qBqpeX+k3ORY1qUEeebNVZwRUaLDFulhyRMVWTnxfMtP2wEHUWmqRy9HxhiLETb8nwVz7z45tNFjfP/xv9vfj+0HbMhAKIYUwjUbNO0aSXb2WwUmIQfV4ynjoqxSDxtRvnI4NfQrem6BYOLEDtWyxnHp0dcHZ/QTBWOBdGvWa+WJFPx7gtv8PZXX+H45msczU846VfMTaQdAm2IDDkRdAKHyE83tQqZpAzRD0RZYV9sHFFaZL5luxazxIdJ7MU/J/I/5/w7bEaZ/9/HD/8xfz8D/+E/6/M++tj4pvIgmUghRUIXeeYf7JInPWcHc8wSPvHzN5g/3fGtv3LE45/fYXq2xTNfP+Clzx5x47e32Tocc/50TxvkojRYKlsxsiNS1vTdQIqaP/jsXZ7/xadRXULXNY0z2GrENz/7Jk++fI3YGkKjyJYSJhjJYZORUZC8aHICnRWGDbRC2rVUPAwamZyx6VVylCYITY5BVKHGYRUQEzmGAqcQwmTIFPTtI3CKLPEEKmaUhaYeUVuNpuDky5ZptDVFVZq4LkD2gjaNWeGKFCGESD94UjaS+xFlgwNGDsMoBXPSMuFKSg4ZHzY0pI3+OpY3cRBFhZUmgoiYZrUiEkQznhIp9mxNJ9ixxQDTUUVKAyFKhodGKE0+RDSmNLgCk1kHyc4Zq0xmoG9gpSG3Hc26ZxEDQ92QlMXkjkqBRdFF2Qx23hNipA+Brhvouh4/DKQgjaBRckCprNFYVJasuZChT5FuMWPpW5KFvh/QztL1sFr3dF1gtVpwohO704rt8UjoSirIRitljKvQUbMcWoKCNmRWbUfnA1e2dphOt+mVZtX36JTFSG4tMUSGYZDtT0qMGsdW3aCMYJqV1gztQNe2csNIWbTbRmGqgt6lPI+y5iOgqadjprtT6tGYHDOhG1iuFpIX1kCtHYvZivPlwO7BFQ4u75N8y9nb32LkZ2RrWN57nTA/Y9ht6KPnhZde4f03nuHq3i5ORd5zeZvjdzR+3vLxj36EB6dn3D6+T7SIz8QpvE5UCnZHY0ZNjd3ZprKGob+E0dAPPUPwLFdLqqlmNR9YrQOp7fBKQCE6Q58NTsu22A8tXrUolUushsiMc9aEbEE52vOBRZqjbIXSlhhFBnaRtYhCkMy+5NpF7s1acghovGw4jUwXV/6co/niQuoTYyJnJHUp9mXQ8ZByKNKzdNFsKGWE/JVks0EeigTQ0cdEM9qhyRPm5wvOz05o21NWZysZG2VDjoY4wLiacuP6k9jKcnJyH1uNOW1XfO0b3+R8dcS0Npx3A0NM2GqENTU2b6DHYJKjjgJmgMAKQ+dbWp1J9RgfEt9+9TZ7B5FL33ednAaGboYfVszP3kWnwNDNCWe32AojfuQzH+d7vucjfPLTz7O3NQIyw/HjfOx9H+bbN2/y7mKJ9oG9rQPGVw5oq2Mmkwmr9UoM766mcgqdxW/V2Ix1knOnnKOuRzR1zSZyQmsp3hTyuhmlqFxN8J6z83PaNuBMZjythS5sxSyfskiP+xjxEXJKDF0v0kNVk1JmEnb55AufpndeyJLW09Rj9rYPODt9QPA9IXhq1WBTzXMfeh5XjbgV7sGq40ozZn2+5OV3buOHHg04a2hXPSM/YUzNbH1OGgrqe52oBkP1Bw1tN+PO/QF9fo46r9h6rOX65WvUozFVMyHUTjbjlciXbM5sOUs9npCHQJ2DSB9J+DQI1CBDHErelxZKW8qZFPpyQy/ldDFe6ot1ZSqDNfHtqKxwMRZVQ8LbzHh3h49+8pO88OKr9CuZhis1oLRslqzTBN8LPn9rwv7+PrVrWM46+s7jmhqVO4ZhycHuNvuXtrnr7zIs1wz3IefI9vYWTW348EffS2U7HhzOODtdEK3l0vWnMXbCYr0ipnsYn7j+nivMDzPnvWywb968z8svv4ohs729w+OPP8lksst6fs7p+S2Ob98CnenbNV3fs5jt8X3f/0nunxxxPjsj6p68WjHKPWd3btJoub91ObM1GTOaSKgzQ+LKwR5PXH+Ck+09nLvNncMj+vmSZBXtegmuliFoEFlanyWupV12vPzKGXtVQxV6vvQbv82r995GNY4RmmZkSdWKpZeEtzprRrYiVY4uBIlN0QGUeDa9H8jaIamzIinzOUCKjBJMvMG3iZi0nJVE0BmTJYoElfFKEZ1GjTqqrDGpYvCZ4BUxamIEH4ZCNs2E4CVWyQh8wWqNI2OCp1bgSrRTzDIoNgnspu4RoxOyTVFIjp7co4ecOV0u6PslUyfnqVaK2mnGlWHo1kWtI4HpIM1WX5pOlRPJSN6aqUdoW4kfLXQoHVA6lI5LmqWcDMbIWYOWIWKyluCECkiIGCPD9/JGIYaAKU3jJlYixk0GWDnrk8RaaetQuiLhiF7qSKOS1FBeBo9oUUFlBb33pCzZncpa+tIM9UkTtUMpK/aIxpB1FPBHYy4UPFFB/n+Ye8+YW9f0vut3t+d51lrvetvu7bSZ0+bM8ZzxzDi243EcO2AcQIgygFAiEApCSFEikEBKkOAL+YZCAMlSEMRKULBTSBRsFOO44D721DPnzGn71N3fXlZ5yt34cN1r7T0WScQHpFnSkY722591l+u6/s1Is2RCQodEVCUQPUaskRw4oXiWYXheIa1FP6cCCslFVli8EymGzkJlCgBVhcZis5FhbIyQpBnPqxo5p6L1lPdanE2ljhw6z5CFtrpiW67yT1Mu6Ks2+JVDpx+z3NlmlBO+LJ1qCKghoqoKNd5A5Q7VBbqzyP7pwNHFQ+xJyyh22NAzn59xZgyPFiccK8XZpR0OFufsHQ4sQqZLmkFbepUIWeNLrmAsgW+ezIKMQ7LtGq0INpMs2C5Jrl3REMr9I2vln/f6/+Qe+f/bK2dUWjnZAQj1cKOqmToH+yMebs/59n96IPSiRjFqJozrhsqd4FrNzscN1w5GVN8Zczg6L+iIXMYxQT94cfDzkUu3N+i80Aa7POC7xO7vOW6Op/zSv/EdUlYsdwMb71bYXmG1JhvKJFIQsJRXE8JEXo1eCl91neieotAalVD8VlMXSg6FfJI0VM44nJZQaKsUKccntGCAovybkoZCKaJPtMsOr1WhfQVc7XDOshi6tfFJCgGNonGOvh3olgOoTBoSsRNbd4UusH4Ww5EsDav8WYWyoArfUAt/vASoiDVscQ+MuRjbKs3K4l2hIYl9qgycAm03p9JgySxSKxkexohIO0QsGlePhXNNJmdN1jIld3XDdq3JQ6C3mha5k6ZZcVo5onGkrBi7ip3NKZONTe7cf8jdew/oh0g2ipBkwi9GHVLwqzKByzGhEjjtygGiCCnjKkvVGIzJkD1VVRGxZBXRpiJlj6sdk61NUk4cnnf42OGTp21bjJZIibq2aNeQtGW2XLLw0IXEnQd7jMYzUoxUTcPIWPldlDTeMsGRCVc1qqlrJ4eDl4LK1RU+BVJQaCsnW1UZmsaKq5gcv8KXN5qNnR3q6ZQL165RjUacnCxZnHcMOdH7njYG+uWSbhiwo4p+GOj7BUf79zh6/5tkM+fEJ9558xvEZealSz9AszHlNCTu7O3zqZefZ3r5AtvXDdWFy/zCL/wqtqn54g/9NLdOjxlvTdi+sM14MmUy3WFzuiv6JCMItUwsfRlwRPb3HvD2W2/y7jvfZQgSRm+VaLFUzsWZjHWwcCqUYFiZAGlQFrQVcwTbYFxDM9oQVFzpoilIJSBVpp0UbVQm48d32H3qOWnWkuSnrZHQJ9Ca1Rmg1xd+0cnBWm8lIuzEKtI5hUSIK0Tdk5U0KSY78taCC1eu8ZR9mm75kH4xxxIZOaHiXLt2g2vXn8L/8cS1i09xefcm777/PmY8YfCRW59+itHGiPv3I8vFGXZjymldMwRNf+YZQkZZR8pCq0WPiAGGnKm2L9HoJfvnZxzrYzrfce/uhzy6f48P3v0WBz/zCaMfrTnf3Kf7oW9iyAyXT9n/Y++we/15Lv97jr36Nr8yvYd1lmE5583F7/HxK++zd/mA4+Nj+mc9h186xB7NGC71nP30jK7vQWumWw1ps+fgyl3IgRw7shGGQVPV1I3EupAVzmoqo3FWNIQ5VlgtrIezs4AzBuoaVzmmkwlVIzbqukyxxUNPKEhhiHQLzbAYyEEKrThoXnrzFXRd897t2xx0e8RJxv7Dih2/TQwdkMQhWFvCB5E2zgl/LqJ/KWH2wbUad2pIbWFS6AjPZNyvK8aHFdtHY87m82Ia5aSQrBWhMuxsTxmNGk72j3jvwQFn165x6dZNbj7/A2xt73Lv3iMefPwBe/kTzqr7DLfn1KMxi67nfDHDJI/XWZDlLEMFI6IWUhJDrFTO8hW5ffVaay4KVVkQkYI6eEUVFMn36EbySe8+vMvDw32u3LjJ0b1DNscjnG6oajEFOjsXYxylDVllHjy4TwqaEOT+CHEgxZZRoxm6jnt37zCfL7GpxjjN2JpCRzd8582vc/TgHv3ZwO7VK7zw2mu88OoX+MVf+U26g2OGo312RhM+fLfn9Gif40WP29hAW0M3JPr5Ocv5gjBEXD2mbwcIPf3ijKw1TVMRAnz44W0uXrnIjZs3eXS4z/liwaWNMd3BI+ZHR2xPNzBas3vlKnXd4Iyg5kpbgo989PE9zk7nuKpmdzpluVjSnZ1gNy9S1RMxGhoGfISQFCNlGGYdD5cLPvcTX2Bzd8rf+T/+EbO259LFG8xOW3a3LpCSYtlFplsbZOuYe0UXDUEZesTuP2ZFH0PJhHOoWMt9B1gjuWkbtmazsihr8MphMQwoTGEoKCGrS8PAwHx4yBAWcg8NYorlowx0N7cvcPXqVdp2yf7ePrPzc0aTMT52aCVGN84g9DHAWCNfW8hJBkUskR0oGegLmmWxpgYiA5nZ0NEuPefRU2lT7hBFjoMMXVOh/OnCalKQdJZjXSWyyhhg8IkqJGoDIXqCb+mHVkzejNQzkqtlMZUmq8iFm9d46qXnaTY32Gim+M5LQLh2RB8lC8xqKPWc0oIwy12Q1gYmMaeiOxeTLJNNMQoRpF+ej2Qveh9KHZqJarUvM33yEMChUa7CuAarHbqqyFpQPl1MypyypKpCOZHjWGOptaWxTmb2RFRImCy/p9xvwp1WpcFYhUIorbDKYhHkMVZyC0q/m6W+tBUmGmzWBZSw2EYTgyWlQEYMcFYslIyg7ykW2/9EcWteacMzpCyMlRTl+WhQxpY8W/FqQFtyFlab9aILza4mV2P524KmP48cHPc8OtmjP4NJP1AFz+n5MWrkmHmor16FAN1izvliwelZx5AhFJ+JqBRBaZLwdgmx2PoTcA7qlPEqMWj5HJMTPkdhQymKZwUyNfunYmTy+v5o2hTkKnP0lSN2/8Z2oRTC7X/niC/94jV+9y/c5/K7G/zQz19Chciv/KWP0NGTBoiFbhGGloFEjpGgW/qh5+TkhOX5knd//AE//hvPoLViuVzw0v9+kdnBmVCbTGLIiuUromv7ib/+afxc8dHnTtjfOqGaW8zEgmItkEQbsUfPQitciSWBYnErTdRK8EhMhYKQSw6EFJliAhTQUVMhkK80NalQJUoTm035fqK3iklcfTyK1ouT5riuyaGYR1RWJlvOMh5P5BmlxKhqCH4Qq2Fri3FGJGRBE1CJGD3JUJx2VtoQLb+PKjbZpvDLs8JHcWIEmbxoITdLgaM0qkxdMopIFhqWFSTE1DWjuiIOIlwfNTUqyefkgkSkFbqZk0x1i/GDWOdq9o4OOT5fMO88J2dnxa5Wcnj6oefw5JjT8zlHR0e0XccQUsn8yI8nNEqLgUN5xiEEBj+gnMaU92wlDvahh+xJOTAkRciGLqwOLwlJrEdjfLdk3vYifs2RIYLJYHwoQduGrh+wxnLhwkVOz0+JKUjul3U4Kw6dPvk1XaOu5fmgZJ35QvWQJ2Qka6UaSRFYnpExQuiIoRzYJFJINJVlOp5gnMMpRQ6eNPSYHNkYOZparH1zSJimYlRZfCeuebuTTHaObEYcHx/wyaNDdsY7JFOjjWO0dYk33rvLaPtdbvrnOGhn7B0sMZduMR6mPH99kxeNQZmMNuKUdnp6xvt7HzC9tM0wDDx6tEf0kX7ZkrzHkhk7y9OXbrF4dMzDdMBgpNGxWtNUFSnKexVTEhtjIwixSmHNgc8YlLFoU5WQakNVKayT8PAeTVXykJRSQvtYhd7mjKkqJhcuiClKQS5WtN9c9vzjY02KDQWs8mQy3/vxmAr3DCUuaEku9Jh7lB7JQR4Vxknsw9bmmFqP+fStm5zcO8ZtOHYubPNT/+KfZLK9wa9s/ybvf/wmb3/1bXpvME6cInd2NpmMLf18zHnqcPWEB8Dm1i6XN19ivHUZqhEPR7/GxZs3uPnZV+hbmE62uHxhg9oN7N17h4fv3qFxhktbNVolrPV83v0oT71yi//u6K9iJh6tLMEpdp+6zCz1/MIv/yPuP7yH9y07W1NS13F2fIg1xel0ZDGjmmpnA5NrTLVk5+pFlNGFDlnj6gqnHdd+55acOyFBFwhhztAN6CztVmVt0WWlJ5BNEdF3Q6YfSrCq97RpTtMEXFWVomwVxir0oxziE1mOQTTHRnH33h20q9BaUVcNZIVRFVVjSMmS8yC6vr7j/r0PGGKi6+ecnRrsiSlaOlvuEyTTTos2dSjZcBKuKDSpnORei12iXyyhj7gsGhGbPKd791ksO4Kqufdgj6NHD+nsA+z1lnjUkrVCuZpnrl6jNpZ7+w/pUij0LLXuzaQRk/W+Sg578rV6Pqv/X32NQgsdUD2+L0iBs9NTPvzwA555+ik+fuM9cow4rdjZ2MA6xeL8hHnXY42jT5q+W7C1eYntnSkhJU7Pjhm6ji5o+vkS0IRFoG4z7uwMa6BuNKkdaAfDdOMSdtgjm8jNp5+mMpquPWSzqjDLgRubW7imZhESZjTiuZde5ge/8IN88N5bfPV3fovZySn39/bJ2lG7htTOICd8DFRxRN2M8DHxxutv8PKrn2V7Y4vz83OuXLnBtZ0d5m1ic9Twoz/yY1hXc3J6IvvXe+bnJzx4eMrJ6Zz5vKV2mjD0zPuIqjMXxhts7l4ALXSzYfAszs6l0QiBYd7x5u2PcZOGd+4+ohlv0B+f0C0H9HjKGQ3nZoKnwgyK0bgi1Y4QAl0Ut8ohUYZTEZWLVCOVeiMmmmaEMTXH8z3uHva89eFdthZXsLbCFnOg8bhiZ3cTZRR1VJgUmbUzlsOcNveYhWaxPGfZ9Yw2Nvj08y/yEz/xk/zt//XneeM7b5Cx1JMGW1usTjirwDqiEqqe9IZy/6cYhUEgt5YgbIWCJ7IgQVds3WCdg77nfDbDdAPjcYOPIklBpcdn85rmKbqiXAytQDSxFOmEcxXXr98EbURqUJoUCv1RobBWkYJnVDsu7O7AkIkmrimeq88TSrw8bFV0bgqFVkaGskXStEK01y6UhVZplNDodGnQwmo4mGS4IrpciUjQWmMUT9SSpX5Bnq81FlsbiJHBOHIBIARRF9RV64wpsoyk17kL5dJ6rNFb+RaszgRTmtpgRR8nelWL0SKb0ei1A7jWxcmaVBx9y/co7ta5MCYyBfkvMVmC7JW7VZdYiPxEAPe6El/9uqufqcp7G1AZfOhFz+k97WzBycEx++MxvYKm7bAhcHp+ghnXtK1ifqw4eHTA0dkpS98TyPSruJ41AlmIw0pq3ZRTqaEyPkZCWUcCagjjjnK+ryiTqLz2h/invb4vmjalFBsfTXj05SOq9y1pIa5sTWNZ9C0PPjOD88S7rwQqrVjueD760hGX3m44vDLDjyLnT/ds3s20s5awJa5RVdPwwq9c5qMvHfHoywv01HLw6Tlf/B+uU1nH0Rc63vmT+/zUf3+T82stH/7JM179IBIWmTf/zft8+q9dQCMTlv0fXDK/EQgbieOfaKl+Q5N9ce5bgWYIBWSlJJHQQl2cecq0QssGDl5g4KrSVDyGejPSSEgmXCD7SApeBLgalDGSIVc2TELCJI0VYX1dj7BWCtNBZZZKHCZj8HQocrEv990SHwMdmbZvSSqhinBT7F1LI4NsmBACISSMEaF7yhLgmDTkrKDYtCbr8MYKlXJNy0xkUc2SEFRJkzAmonUiR0UKEt2glcasDEG8Xwtih8HThyBap6pCG0c9nrLsI34xY4gZCr0AZcgx0nU9R7NDyIaEYjqdFv3R44MgxUiOUtT7kgUHqwZZKKf9IDrBVFlyjmgVsVYKQqM0jXVEDIuu5/j0VIpMrdFVjbUVMfSYLE6VCRGzD/1A2/coK+u0diP6QQJzfY6E3gPikkQqGp0kG11pyWcbUhROfWYtGkZZTFVRVUL7MirhTKRxFY2raVyFs46NzU3q8QisAasZYuTCdIM0UVTVRWxlaNsWP2TSIFbiOgW++ru/xdd+z+PykoXvSdWIavQcbXT8X7/6XfZfmvPscJmlbnjjdsft/Ttrc4c+Kk5mj1j2HzAMC6JfENsO7TPzoxn39va4+umnuXT1Cv3RCV3n8T4Q+4HQ9+y1Lec722yNRszGE85zRlWCDHglnPus5CLNTq0PfGs11hT9QVblkB2kQfeRGDq0FW2Dr7fYnE6pxiMpqnNeOzOC7D+akbiKFjRu9coprvqvcqE9psgIWJ3Wn7+66HRKDH4g+ICpKkGCYiRnu9JWo2JEWcfm5pTt6YTTReRLn3uFN5Zv0s0GvvC5Vzk/O+Yb3/kaX3vmawTv2B6uY+spQ/I889ynuHn9Cvc+eo8Hn3xA6Dummxfp2xalLFeuPcf2tefpVI2xFXq0zeb1l5m4babVFB16wvIQNTxis7mMaWbcuHSZbnZGZTU796dUf69m97WLvHj7SyQsb518lek/eZZ8Cvt9j867xOU+Z6rFhIFRnkCKbO/uMN7aIrx2l5sfP8/44ZRvn/8+L7/+qjRtzq7pM6K1zQQjZYSKotHqlx1aKazVDFqy5laaGyEhRELSJC3xMTkrBj/AEKhbLyYJsLbd1kRBUVOm+MtCzHSDOAvGGBniAFqzOZkyGW9wcfcyy8UxXdsxX8zFT0ontBpwOmO0WD+LvFbYEkqZkhtXYW2iGW/gmjFV49GdONYlZPBnFFgFi4MjFmQaW7O1MQXfMvgZD+/f43je0w2BSV1ha02XeoxStF3HxG3z2mc/Q20qHv3aI2I/SBFnLSqUwcGq4lFq3ZT9vzVu33N3E9HGY6ozUsi42oGFna1dpleu843f/y5/5iv/Ps8+9RrvvfFtjJ3zaHFOIhLiQG2lcPQh4uqGy9eu8szTz+G7jnt3P+LgSHIJQ8poU7PwJ8S2I59lxjXoaHBmRFhmrLPYsQyX3vrO65znzDi3tIuOMRUvXb3Oh90px4slQTveuf0BJ+czVBwYjadChVUWW43YmG7RnyqMkhwr7wP9IGfy0PecHh6zu7lFv1gyn7f025fpk2X/8JSLFy/y2R94lTt37nB0dMT+w32Wi0zXQxsM2Y05axcEH1BuhK7HRKXBGqy1VK6iX3akxZJoFMEYdrau8MnxjEfvf4SuNrn2wovsTLc4evCIhe8JGOpGo5WncRa/PCGngDWGRkdxFfULBi0IdTKxxC0AyZB8zXzhOe9O0LHjcHbAX/lvf5at3WeZGIXNHT7MmeyM+Y/+3H/MS5/6LNYrJr7B6W10NQPT49PA1s6UkZ8wmWzw8cd3mc96vvSlH8XaMUfHZ7x//y7G1FTjilGVybZi2cv9rrSTzDiVSCpIOLdSWM2a1SDMm1SQI8XGxga1tXTnC9o+iHusrdBJkU1EZcnIXOnaKENbciblQPLynvqhp2kmoumqG27euEpUmr3DI2ICS7G/16qYr8HZyTHf+MOvcu2pWzxz69M4KxTXUGqZlZHZquGSj0VyMUJKWTTqK8265JxpdLbrBtVq0T+FwQNBJDlaE2JGcmBWzreULD5Bq0xxjfZFUpGVGFVlpcQcxrnSVDmMroT5EcW4g+KvQBYDkrWLU7m7HjPFVsOb9cUnjZIWaZBzbv35euXjoFRJJsio4qJJYh3jshpsCt6R0UaL7b9eGabInQrl/ZQpfHlPozSgKa/lCcJsk58bQ5RhcNsx5BajGrrzBad7xzzUhlnMMJthfOTk7ARVWbpBE4eK85Nz5u2SCGAs2EKdzav7RtZkzpC1JmkBapJSUisXoKB4O0m9IrYW4giq8+NByj/j9X3RtGUFV//eJeJ+xG93pHGgVpYX//5V3OaYH/u5p+mqQLgkgb6v/sZVwlVFfuC4+u0Nrrw+Je460r4Vm9zGka0mWBi6yKd/8SKLyx5jEy/+/jWxddbQa88zv7FNMrDzUc2P/+xTHHJCdobX/sZTbHzTEV0mWuh2Mpf/cJsLf7BDd8ljaiXp606RzLprIxlFDqWRqw2p0gyDXL5G65I/kvEZVEpgEk4nnBG7UEnNleaGUApPncmuLNNaQUWRnSVxDdOJxbDEakXuMwxSwATAB08OEZXFxUerOqwBBQAAIABJREFU0rj1PVVlseMKpTOnMcoGqDJUIiiPVsSUlN8bpdBOk6uVkFahswWsaLWMWWdHqSyTaoLo3JLSRKsYVMKqjLOGZQr0XaCylkpnIopGW7R2oDPKSgMbUyqmLJIz14XMkDwpa9GoJXHQ6nygGwI6DzhXsbExZdQ0WFORlMYnyEpMB2IUNzg/DPihJw4DMQyQxQilahyrVI0hiHFBMx5R1xVkD0oQyqQcQ9L0qcdax3yx5NHBIaNGMuZMbeiHiO8DKXpILTnB4MW0xBekVoTG+bHQuHC0FeVgNxbnHMYYqqai3pSwSWutcOFNhbOVZIU5R12PxTEy9IShhSQohNPiWFnXtRRnRok4WUuuyzAENkYNrrbEviNnyZJKMYkoPkYqm/EKBizBZzKWUT3mbN7ifcA1Uza3LkFleXh0ytAvWM5a/KDoe6HPYMCNLaqaYDYaJjef4uX8Ga7tXuCZZz9F1YzJzoGxhJRZdi2npyf0XSv0ifduo+/c5fTklBATKQdGzagMBQ2qWC6L2YRaN1gpCa0ipUTKXhwdi01wxtCRyVqa8BhlzSdMEQerghoL/VJ6sGK0kyGWS0SrFbpcUPi8mgM+7vEUgloHsqwjbSQawCgRh2dDTIJ8rHSLV69eYtuMmT2M7EzGvPDiU3z39ff4w298nbPFkuwq+muGnC05Wy5dvkQzHhPjgrfffYfj/YcMgxeUv2i9hq5nMe8ZDs8IdkJMmcF7umGJbwdOu/vkds5ItczPH7G96TjxC7r+ABhoF569h4q41bG7s8XnP/cSyz7xyfjbHB0/JBxnuq5H28AwLDE5YHPCkAVlN44hy/BLG4c24pTpqlrMQXSxbs6JFSHHaFtcVMVG2cSS32itXOory3IkpDfmYvKUS7FiDNqWvMJSDJCTUFvySptRkPckhkTBB9GwxgFPZhgGklJkL5N8ZSNDbulyS9SRkCXoOOpYYi4ypjLgJO4i6AiVFspyocxce+YGaZ44bc+wIycxICnKxW4STimevn6Vizs7vP/e+8wWR5y3BySTOV/2RByXLl7g+tWr7F+8z5GZ4SqLcWOc0/i+Y9RUIopPgr7ErCQ/cDXtXa3PQr74570knjWgrdxrUSWmG5v861/5t7nx3Gf42z//D7l64Rb/wZ/9PH/rf/lZ9h6+gQ8LqmZEzGKwk7TmwsUrjKc7HByc8PHdO9gcUNkzamqwBo0lZks9CWzqCTs3p9RmYFQlkp0yLGvUbMZ2qqgzbB0tqKzmzTv3SIyZD0veOnnAvZNTlKmp6xGz2ZLzs/fZbBwml1ymmOk7T98fY/FMJxMuX75MXTecn8/Y39sj+MD9T+5QVRUjW9G1A3cf7bN3dMKornnzzTf4+OMPuHzpEh9/coflYsEzn3qehOLN777F6dmZ0LUqySiLStF2S+L+PkrB0M4xYWDIHXHo6DrP3niKdTXBKKrNHVRQxCFQ1Y7aKLarEfsHc5LvGY8mZJMY4kDqE7OzU+pmzKQaARaVGnl/VZKCN1TkkFA6YkaB4DuW3Yy3PnkX9+iAfDrDpYinh8bw6g9+kWduPI1adGBP0Fsz+n6Gq2vGkw2mrmY2bzk7n3Hv3h4P7u9z6dIVnn76Wf7ET/0pvv3ObQ4O97n7yft8+OGH7D18wNbWmJ2dHcYbtcwNiML2Kc2OdJdqTWdURkEQxokudu+dT2RTYTRkYwn4Ej9SLN4LSrPSZGkKYygJs6lyrqBcopkdTzdBCeMkpUzMkazEiTln8SBo6oYYPcvZuYTGm1pKwdJExGLPb6wpTVpaG/vItkvSXJYsX7kXyjVWDOxyLKYeufQIIO7hWs5NlQRRlDzgLDWMUlSukriDINl8sdD2Ug5kNMGZUo9S0l40RlvIolUlZrJR6BXSlgtqKPx/Vrb80txRGjb1RwY9jx0pV/eeooRnJ08MnpW1SyZhJGhYGt4MSQWUdWjrBLHTq99GoqS00cTino2SWlxnVXLwVBn6r874kn+rNcZq1ABOaVRI9GcLjlAsQiKczrA5MzufkbRm1g7EYETT7j1RK6JWmNoJi67IViCTi9YwKxBTjFAQNLX+97xeH2r9HIFihqXQ9p/dln1fNG3LZ1ve+8sfiZNeiuSYGPB89ct3qZxMJDIry2GZYGcy77wojQhK8eGr5/CvyPdLY8XiVs/xn5+zmhWu7iP1soKfeuIMAD74mZOCWskCzKVQ9j8WeDw9kC9IyAY+/sGMYvY9dJHvmU4q0KWZW23c1SJf79jy2VornNZ0Fzy+ihx+oZXOfT28kGJFHLtWVqjyc6w22LU16uMNI128FlQsSSBlTpJVtnpJcSOdv4+yuawxMvkhf28eyGrcreS9SDkXB6JUNvxqgsU6e0Q98R9IVICEUj4xGWEVqp0Im5EXf/8Kz/6fF8k5oa1MotteELBMpu8ld4UkB/YQZFMIpbI0uzGSbcY5x2QyIkZIyqBTybwpP9sYQyzTqKhYH96okhOXxdpdaY2pKurRWAxivDyXlMVx0MckAdeuwlZJ7Po7z6ILxaFwIMckDlilQAo+klE0oxFN01DXNZNRIweuMThbyYMr+XF1XTNqRlR1zWhcodxjWp5E1ihp2uqmUBwMOUHXU+iefYlBkMgF5cshF9JaSxp8IHgP4xqroHEVKirJDrS1NBA6kGJP1GJZH6Nw2L33zBansqaS597+A7z3nC3P6QwoXbE7vcy1p29w9dJNrly5zObuJtOLE0ZbjmazwsRA3WUWi47j+ZxOKebDQNd3MBlxaXeL3d0dnDX8wGuv8cG33uC3f/O3ODg8IGUrwd+xuHPFDDGhMCXmIhaaIyWnbkWfK1NOhOqQdZZJXckkt0YOe5XK3s1y60vRY8opXGgZZbCRVwcLq/2epLgtMQLlBCBmMdXJOeGcK3a/sVzMspZTkgvYGMP165cZn1mC7+iWcPXWVd758GPuPzymD5Zh5kFNmI63ePrG8ygSn9x+j27oUXqbpqloJtt0s2P6fpC9FyPt7JiTZcDrCTF4licL9j+8y8hmlF9Cf06wnsYsmU4sKUkjEoclF3a3UHXi+GyP+fyMO3fexkeYzY7g7JxwlqirmnbWs7m5icoV3WIu02rXEKOiXQ5EH0k+Q5A1LboZoSqpvHLeFVpQKBelTwPOGHHgAga/ChyHlcOd6G2LC+RKL2HFDEEmr0N59qLWMc5SGYPONcZonJb8y2EY6PuetjixNaOK3nuyCYRRy5t/+WvrfK0QA2vHSqWIMdG/4jm4egwdrFzmVohWJuNf8Lxx9gbqHLquJwy+NKpyb2gQW/LrY+y24fT9c2aLObZyghLGRIgLvF1wZO7jJo5rv/Q002cmpBTQGT756GN0UHTLJbqSwlMh9Bxx3E3klec/j3/PP4qurW8uVVyNkybHBm0MzWjC1Wu3ePmll/nat79Dvzjlt37rH/Mf/pl/l3/pT3+ZT25vsrHRsLWzxXfffotvvP46UWle/YEf4IWXPsvv/O5XuXf/IX3ouLC1wWhrg72TM9pFL/oRhB62GFoGltSjMePpNk/duMklq3jaKRoLfRZ79/2zY+6fnlJvjri3PGcxeDan27hmTIyRs5MjYtvjDNRZk5TBjjcIKaNKFuej/QNqV2GMZntrm/lMdMc5RFztWPY9Uc9wVnN63HNhd4s7H9/mvbffYHv3IpVr2DvYY7HsOT07JQ49OXjqyrC5uYEdjRkw9GEghAHlO2qT2RrV2GzohshZHzg6nrGxucmly5eZ7R+xP/uA7Y2Gyho6tSB7iRZpZ3OUgouXrnDp0lXRoCtHN0Tev7/HZrVBPak5Pz3B9+Kc7Ixl2Yub9vbOZZad5cbzn+HaM6/yB//4VwnDkp7EletXeeqlF3j9rdf5lV/4B3z08cc8GI7wzmBixcHBOcY6qqpmVDcsTcve/n32Dx7x9ntv8s03vsHmzk363jO0nqHt8YsefE+lDaN6ggNczvQojLKkEvycs9AkUwqCSq2RNyW1c1b0IdJGL3m0QWjKOpcmKK+cZOVstlZM11RxQs0x46zFVRXzRcsf/MHXOT4+JuXMaNygggKy6CnrEW0LKQVUMmIEp1bUxgwYGZgnafKerPVWurUyBhQmR1TEGAjBk9SAUxL7pIu2S1mw1qGVDFZX9Usq0pVcWEOmDCDFH6AUZGUAnJXUeiQZwq9cyZXR66FkTEVuoRTalrM0y+fqlbZ77fa40mNpKFo90aPlQvvLaw3+ujZe3berCBGN0ARJpXZ8TIOUYi4/prWWWs2s6kdVNIa6RAAgWcKrOBKhn2tp4FT5m3Nhpo0atIPReMzIVKQusExzFt1AmC1ptCUEQ1ZKTNazmNj5GMFoTF2J/EyLW6aKkLPGWCVU9yhxX7kgvBJebta0VUFWZbS4qvHFQVStz99/2uv7omnTveal//pZlsuWMAyCAmXY3ZiwPR1TOUMikGIq8jChHA4+SOi2glzEndoYhuuJ+//WIS//j1fXF16ZB2CtoZD3y7MRk31FBmWkCM8QYsbHx0WYQLO6CLjloktP8Hol06lMSkpTpYtJQAixNES21OwFmk8Ch1qlqI3m8IfPaS8NXP0HWyuCT5kEyyKPMVJVmtqJmNMpzcg5Rq6STSq/iQhDq4pYWZa+J/qAVYroB/q+xViNMYqubxm8J2Q47zNDHxg3NbUxOKXp2yV+CKQo3aNQy5ACtzR7UvBrgYxVoaeV56JROPWY170xHrMxnoBKhDB8Tz5HHCJHX1xw8Nyc5qSSza0fW9CumtWsHGGQLCZjLXUzkemXUihjWS6XVFXN5uYm2mjaoaPtBrxP+PDY8fJJy9rVIa61WR+6IaYyVZJ1EjP0IWET5XtF2t7T+cCiD7R9YPBiUy4WronxZIMLly/SdR3L2ULE/cXGeLpVMR5PGDUjNjY32dycMh6NWGXF+cHT9Z00zmp1vEPIimXnyV0r2SlaTGQyii4u0cataQ0ZacZDWWepD3Q5iLOe8uKWaa0MJArSpE2NtjUozfbOLr6PLOc9zjVYawihI6uaZC3OVsRBpuV7ewd0y54YIu28xQ+J4D03bz3L7ksv8cyLL7C7s0ttHNlnuvkZs8NDZh/coT3bJ3RnED05iYsX1qGahi5GDk9PcK5iVFfMkEaqnbdsbezwY3/sS/zmb/8O5/M5kcTgE9YaDJpkXHmWQQxtUsJoAwpC0XLUjcFaR4wBHzO2KpO8FIXORrnwE8XNSkKv1WqItB7CFBxIxpCPS4OcxWVMR0L06/M4xcfItzEaRYQU5KDPmZRLNqMIsLAGLuxucvzxAffu3ef80HG26HlwPCe5EcZVjM0IteUwyTI7mTGfneGwWF0zNJtsX9xle8Owfx90adYrmzl69BHeTFl4iGEgzgPVInHhokPrRFIDk1rRLSOTZsT8tQXv/MWPsVbxid4HMl3uObp1zMG/fCBW8i8OuP/KkvtMW/bXciOglWK5XKLILPScA3eI1pr+8z3hqTdxi4puuyXEJ7QY0uKSUhIRv8k4bSSvsEBCq32byKhcTDbKlD4lCWs3StCJ5FcDLhn+mdIoJx+ISuNViaNQisrI7eG9Rxd2g1KiLYaA85mf/Gs/IlrjnOiGgcViybLt6Nu+UHI19/7SPa7/3FX0niLGKIMnJdS/mCJHf/6Iq797Fd5SnJ6c4ucdfYpkA3VlGWnDpq35yr/6p/nCa5/nf/q5/5nX3/4uzfYWfVYslgtQiouXLrPoeq7euMnp2TkPz8+4fHGXyaghnC1phxasImRPPR5hjRHaVShatjKHUIVK9KScxZTzcdXI5ZxRxmLNBJsvknPgi1/6MpONEX/rb/5NHh485OT0hI8/+CYf3P49tidThrMlMXiqpqLtexbLJUkZXn/9ddrO89JLL7B7YZfDvQecHu9x79Ee2dXsXL6CqzdYXKm4mHe59aXr/PAXXuanfuyHUIsJD28/5M1v/DZvfesNujDnaDijNYbTZKi2pjROojPaWct4UqFdolssUX0PcSCqzLiqsM2YZrrFfOhph8h8vkAVRDr4gUkzYlTXDH3HeDSWM9gqooMUO3IemM9PxABrUrO1PWFz+wJJWT786A6WKEiKVZA93fwMmyLUDSrCyAjqOq0NF7bGaAcezXYPw1OOp288Td1n3v/oNtNmDIslXiX8aAOja/p+CVXFfLGkTzNOW8fzL7/KtZvP0nmorx2yTCfs7F7jO99+l/39fZJbynrHEdqarYs73Lp6nfPBkMKEf+0rf5ZbV6f87td/lc/90Ct89pWXefur3+Bb33mPd967y1xbcvA4zjHWUFWOqhrQRhGHjhQ9xknRenr6iP39c5wdceXSNvryVR7d/4jZySmh60gextMtVJJLN9lMzCI6keGNDKRXLoGs9ndxLV2hOv3gMcgwTOVye2YZTKD0OkfNaJFmZB/Kv0EMMvQ7Pj5leT4DlXCNYcyYTKZtW8kMqyqyyngtA8AYAsmWqKRVg2Esq2ZNUOknNFk54awVnVWp8aSgh5yL9IHHTU9OSdiQT7xSyk/kmanHpUKJl1mhX6JnL46MTwxhcml8KQiR0lK3KaPRUZNIa/OhVX29GiTlJ7RXKSepAUutsRqIyt/7+OvdiuGg5X6Tj4s2LadUhqHyNSvAIauELjWbzxmUXjc+oSCUrLwlnFoPOUFJxERO66gHuU7SurnMKRH7QJ+X4CyxG8i9p0sBo2UQPAziNj6EgtymTHxi2K2sGLmgjRinKEvKQp00pX7X2oj0R1uMSRiT8SXqZwVerNC4lP/Im/xHXt8XTVsG/BBQSWGwKCNFzfngGWYLnBExOUoL1aUgUGgL1WSdZ7FWkTzMXP/rl2kHgS0VZTFqUGVK8Th1XK2pIUrJ9/GxfLRcUrJIxFadopsBKc5VEX8qY4Sjy+MmbvBiEZEwJDI+KVJGTDrKAaFQQrfxiTaAz5oBQVmUkY2RQir0IWk4k89YrYhZspbO8Oi8KihUQfhWWVcWI3g0ukysdV3R2Eou3OyJWnO+nKPQWNPgrDhYRhMYkicEsazXSjLPZCEWYxZT7LLJJZdOf890RQu5jBSFHjj0LdZayEJryBTd0aimGkFoPBs7u+RSOOVywK342QrAi2GHNgbjLChFSGmNUpATzmhcVRFyZNFJYZQRk4oQxXEorSgJ5VBf8bJXmXgrOkLwXuyMk2LwnvmyY7FoOZvNGEImoYVmZCzKWJpmTCKzs3uRW7eeZbaYszifUbuKpqoxWlNVFqWNNKUqk4yhD6KJiikRfCIkoXSCKuhQLnsENFYu/2yEl24M2pTpoVKsHEp9hpjN+tDPKWPrWqZiQOWccMaVFrMUYxlPx+TUyyViHc1Ggw8QtCbXBmUViYZuAIaB0/MTDo5afKcwGBoz5tnnn+K11z7Ps0/fItWRB48+4tE3v87Ro0cc7h8Ro4Vqg1xPGF28QL19mSvXrrK7ucnm5hYXLl7kw4/vcPLRJ/zkj/wkn7p1jQ0DEwU2CUr13Xff496DB4Sh5/e/9jUOjo8wGjSa5BOmICpDikLAKMMY6yxVPS5uVOIciMlYk3GjEU6VmI8MyXtSiGVwImvLxIBSZu2+t74EV4OgcvGs0fTkUVky/zJiaDMMHmstzlhQEpy+QtQzpdkLARMGxo2CxtItTvnknU9Y9pG+SzzaX9CnCfWoIsQe4wLnyyOMt+RDcKphunGRiKYPiXo0QlsZRjmlMM7i/ZyaDU6P99m5dBVn4JUXb/Kf/8WvcOt6Q+gf8eu/9o8Y2iVdq3nn/RM+/5dfI2GwdiRmUCnyaPGA0/9kQf3fbGEU5L9yyvSvTmCW1w51VVOGMUspXEejMSFKmLv7S47nvvYMo0cbvPEXvi0TbFPGUCmglMIqVag7eU15ErpTKWoAg6GqKjmHyhuwWvs6BfKqOZFdJeh15XBVhaHk9BWjEKVEG22sIebIcrmkbVuWXSsGPVpDGphMR6A13ke2q4qM4fRsxsnxOb6PaGU535tz57+4u54+S59fAoUB/0Jg+eoCziEVHWsCksosgTPgCM3Pjv4Wo+bvMv8vlyzaFmwP2kjMgHPs1Q/oh8BiPJfsRWP48PQ20Xsu7OygUZzvzdHWYMYG5RztfEnfdeVOKwuY9JjpsR5vrtBDHpNFFOh3KkGIVOYPvvY1XnnlJR49fMSjvfs0dYXT8ODBXQ6NwyUxWEIJM08b0ULv7e1xPpvx7de/Rd97Ll3c4eLFHQ7PT7l49Qaf/fwX2Zhe5L0Xv0OzGFG/W3Hn/kMOZ3PC8YJf/7VfZv/euxyddRwc3KerPMfWktnE2x6bAswp2VADfQtp6Nlwjo1JxcQpatcwoInFdEOrzGg8QgPL5Rzve5YlgBwV0Sqxs73NZDrB4/HB07Yd8/mch3t7uKoGY1HGsbW7y83rl/DLOaf7h6WIFApjOzshLGTdOK1oNHhlaDuF9Rk3nrBz6xoj3aBCZDY749UbT3F6csSR7zHFWCopGJlaGpvGkcLA/v17HB2eoupvcfnGU7zy2me4srXN7dtLbH4aUo9yxxiWqLlis7nCy888zR//8gVeeu1Frtz4PBenFwh9z1d++kfY3d5hnEa09XU+d+sl+sOBj48OOOt6CRdWnqFtyyBQCu5mUmGdoZk4jJXmRifP3v07+P6cUVPR1GNQ8OjRHunRIVFbqukW1HI3rSiSsjzzunRb7SMKOFM5Rz1usCqhcsD3HakU2/I1YgCkjcE5g9VyfmBFx2+sDD1XGZHBe0Jo0SfQDx3eDxgtzYLRrIOlc1qdQQVRUkookTw27WD1u69qtTLsy2T0ejAtZ3+KHj9kjMpUlSKlQuYsOV553RyVukiJOZ1QAGOpiVe8MflbVBbZisriTl5YezxmkqV1s6cLLVIVJEwZQ11VIrmIUWKSStSAnLESMyTAiBOErh/WZ+/6/YLS0BpiCtKwqox1mso169oxZdYu5KWTK66i5SwqMpLVxxL5ifXB+jnpQo1cmX6kJ/LkBu/RvcF1DoIhm0joBxliZYXTYlrV9z297/GFrVMwl3UTqAozTStVDEZYo5tyvq2MZ1TxoihU17xqssvfl3Ixj/xnQ23fH01bzrSLVv6wrFDKErWiS9Ih10qKdpULbQZBPiQnRiy5TfmcUpnDYPHiE7qqyNeGDpn8eNigFFnJRbLiP2sluixKw0ZB2rTSMn1cnRDl2aZYQniz8KSJWaDudfbSEzCxEi1MQqhbSiuMEpRsSIYhBua+FBuUaUDOspm0FgSgUKZUufwVShCclAS6tYaYhUYki0UmVaLVSGi9YDSuwawavETbZ4w2aFUhVuvC8R+NxjRNLgenNKzGymRGDgtBxGISWp+xpvCcV04+IrT1g2foeqrKMRqN5MEVt0WZ8kE/CuSRYrJREwqHfEV3k50SiqOSFNApSbBmLki6GIt4OTZjgFz0aykWZzMtKPUTTaU0ZqtpUAnGDJGuF/MR+WMUwxA5OZ3R9z0n5zOW7UBGMRqPmYwnKG0w1jEaT6jqGm0MdTOi6wJa1WxMHZW1hX4qmzXmuEYEAIZegkSV1mjrcJWiKq5yqiCYuawxQ0Ir0cJZK9o20eqIHa+xBuccIUHbe3mOCoJP5VCRCZByFcY6OWyjwliHrmv8oEhRct5cLdQQCXKPhBxIWKyVvbE4WnLSnaJzi1KJG7cu8MN/4ktopfj6H/4W7777FqenR7TtEq0dpt5k+/I1Xv7iH2d6/RrV5U3stmO8PUIbRVVXRKW4sTvlrIFv3n2HC7d22J5u02gwEQyJz7/2MqbSfHT3Qz79/LO0b/Us+44YEoMP1GXSl6LYHEsemqJqRoynW1R1I5SFgqZXGUERkyeEtEZj1hx/OazI0ZNVoWOsLt6y5imNv0xWH19oJNHj+sGXi99QV269jnPOpBjIRoJZHY7sA3louX7tMqfbU44fPuTg8BGHp3MImuUyFRQ1kYxHuUhSgyDptSYMkdPZDKUcy7jkw7ePMXpg6BZsTacFWUnMFoe8/MKn+LEf/zJvjf8Oi+URb7/3VWYnDT/8xRdxRHyIXL14g8ODjoPbLfOuRdcZghejpPMlNii2+oowDCx8xp5G0mkszazC56U86xjJ2sAEsk9iCb0AfaywBwoiEhxbHB1XoetWa+pKBg4pRGIMJK3RRq3pk6hMP/Trc7OsdjkD02N6qlkh+WmVpSeRI1XlpLGOQWjjIROS3CHaaIy1OFc0LRmMrVi2A1VdE9F0i57ReIPt7QukZDg5PqVdtmz93DaTVBGTX+ctxrTK6Muc/2cLNn55gnlbEbqBNCS8zgwayJE6wiQqntrZ5cLuLg/Pjrh3cojd2ibpiqGd86lnbuEqx50H+zz30ue4cesZRlXDV3/z/8bPTviZP/UvEHLif/v7f5dqOuHCpYtMJxMe3b3PyeER1lr64MVtkoRZUVJLERFjXE/1V/e23FOJrGcMIbKhDbPlGUdHRzR2gkoKlSy1czRNRY6e5D1aa5zRhCjunBqonOH07IgUE1c/8wI3b93grbffZv/BXX776IS+y5wujon7GfdPapoRPLh3h+d2n+LDO7d5+PAubQvnBvp6xMaV64w3L9PPD3j20hYn9w748N5dOi/nVOU013Yv8pnnbvHMjatopXnr/Y/4ZP+EMYnYDhIoXDLpco5kNK7S9F3i5OQQY2BjoyYu5xwdHXN8ekZSBuVqXFNzPu84OnsPozWT8Zhu0dJ2PQpPM66pmopkKoKu6ENiWEow9bBc0p+fYHVi89LF/4e693q2LLvv+z4r7XDCjX07zExP4gAYDAgQiSDGLNEiraJsuSzJdqnKpSe/+z/xu8qPenAki6JZMukSTUoUCYGkCRIYEjODOJjY8XbfdMIOK/nht/a5jbKpKr9Bd6qrp9M95+y911q/3/f3DdTzfcbYc2O+z9e/9GXSZuBvrtbM945Z92uMM/TbFct5y6xtGfqO1WZLpTVUiWQGuvOP+ZM/fA/dVMxmn0HbG4RXj6J2AAAgAElEQVQw0sUOnUasbqiVYqkN24ePeWn+de7sn4BWBODwZMn63oe88zff549+/1/zrT/+Q/qQWaLIFXjjSnUmRXHKBXiOkc1Vjw+DxCn0jmQCTmeWezNuHO9zcnJMTJmnF2vOVx3nV1vJSpxIhOWZU6WWm2IpcilyNWBUZjGrOFguqJ3BqMz66pJh6GXNobCuxlWtWNxXGmsEJE22xrUNpujEdFWhtJyLhszYdWz6DWjF4XKvmGZEfIpi5e6jnKlKk5A1HWMU4OmZInwChiXmQBoozTR5UoU9oyBpMUQq2J+PvmjXFTGINkuV5iYX2cw0pdFasnqV1gVYmbhEpRFDkXMopW0GFVEmlwZUDDF0LqRFJQ1GVVXMFwusMfRdDxmC8tKDlum70QqVInHS7jEZpEjzslOjlfMyRYhRIUVdwhh2U0FxqNTSDBaKoylsssmMRBhyugB4ZaLoNE5brDW7aSf5ml0VYiCEkpFnJIA9jB6LAN3SWxTWRo4QJSB+HCWXMxYa524WltndxxykvkCLB0AMMigQM6xCm41ZWA0hIgIf6SNE61Zq5n9/z/az0bQBjGPAKo0PMg5PCpJOWCBqjctAFgoRWrDbUC5uLpMYq82uacrTgi72qjGLqFsrXUqtXBaELjS/0pRQrLmD3zVl1843qmzemTw1ber6gdCphGkX04bEFApdJmtotM5lulSKOQViE2fphkj0idUgGpidBmfXfWdp9CiuJCmXkat089lA0uWhUeC1CGG1gkpP+RYyNbjyiRwyxmpSCDSqIg6eVe6wKtI4K5ogJZRSWTTF+MLZ0igI11x4yGCNwZZNSmsl0QhAjoluu2WdI01V0TbisCSZQEhBlGQTNyqh0ljoadKUKVUuWMqCmCQrCz5HUhAKVSgc9zCOuBqil0lRPw6ywU00inhty/7TWo0CCCSFHyPkYXfdRbu1ZbXeSJGRLdX8gPl8wWK5oG1a2SAKZSKlTNO2oAxgqdykZZPcPmMtkSQW9VoQMqF6Nmhld5QGio2/Kha7Yo0rz0AOBYnSMhkMqfC4kxSiVlsSlRi6GHFpstaBFYvdrI1MOo1FOclGS0GorkFbso1om4t2sMI4K5RbVcTY2jHTFaxWtN05h/4E223Yzh7wqddf5u0f/TV//dZbpPMNBFhv1pxdXrHuI7df/CzmsCVWJxw+9xp5aYj1yJgjrXNiGU+kWTR84Quf5QdvB775F99g8bU3aQ72mBsBPmxteP2zP8dfvf1tNj9cc/eVF3jvg48YNh2qsiQtIEtM0pCnVMyDssVHg8kOlC2Tc6GYJp1JSai7iknQnEkpFoQy4UNPAoKaitZr6sVEjQ6JneZzCnzVSouBRc4SoRDFlTWVQ48sIbT9sEWbhrkxLOaOuzf2CLXh4kwyzfoQMbqlqUdmdSWmPE4aXWtrhk1kG0bqqiHmAd9fYb1n2G6oZpaD/WWZeimSyXz+i2/w8597nZguJVw8r0luBZXm4uqCRw/POXu0YtbC3Tuv89fffZ/atoRhhckDaehI44roBzarc6HKxEAXhsIm0FTW4Mr0PKVATJHOG+qmYdY2XOoVIQ6k3JQmOJapWL42d8mJOI5M/H9rNB6ZZCldHMPKVG6KZ7kegiqhxO5E9RIc3I2BMWbWm55cgCeZfI6IgkbvGpXwDKUaQCtDGATk22wSrm2wteZq29M0Ne28wYeWy6un9H5NwpNSZAyjON8hiLeY4sjZprORabGFrDWeUNY95KT4whe+wBuffYP/6bd/k86PzAqMaVXG4OlXa3QSx0qVwXc9J/v7XHRrPvzgfVbjQCCRw8gn9+9jlSKPfof2Kq1ZLBbcvnmCtYbT01MuLi6eAbby7udc6GrGiPFL1Vi245btdsOtk1vsz/fZbjuuVhtsXXNwfIC20A8dwzAQQmDYbgrTZsBouHVyzMnJCS++8BxGG24eHvHgwX3WwznGzDB+xKSK48Uh9Uzz199+h++bd2hdzVplsoscPvcinWm5cfsFjm6fcO8HkY9++DH96gycom5rVFaQE49O7/Hww++xqAyuarjsR1S7B66m77cMg7yv4AeZBlQGbWC+aKn2lxijOX/yGDX0NGgOFgdE2zJmxXobWHdbFIHaKmnEukTXj2Q82Wqsq3Btg6ulwbWuZVyvqHPk5ZMjUug4uH2C7yOf3PsYdesFPoofYFzFNmTef3yGWbT80i98lZdffpFbxwck3zNs13zzm9/g3v0HrLdrxrzm6MZNxi4SBzi4bTm9esDV5lRif7qRWQKlzgmrRzz4/hWn758z39ugm0wyI/e+/13C41PWTx6z2LN89iuf4+HZmo8+ecSB0eRKzBr8OBJ8kv02Z8bes+06xjGyfzRjuT9nvV5RVY6D40P2DpfU84aQ4Xi2oNqMxMfnbMbEkCdQ7BnAXWZbZdpR3JOVBHRrpSB5VpeXkCJD34u+LWeqZsZ8PqdpFxhtsSahiSLpsHXRhwnQ74zDWCe+cM5SzWf0KbDpt2y6rYA+QDKFcaXMM5KLiRU0TYUyWZeTIk8+AMXGHzFUMtbuZDx6AqxMRoxXiqxDTTOJYiUvG5s0xxkiEZ3jju64Y+dwPS0XEJLCjMokIikHlJZfEyOquDpOdNKMYRzHIj2pAAo4L83Hs/FJ5hkNrNFmRw80WqPzNddaaY0xruj/ZFI17Sdy/SS6JaqMcmLCJlEieTpxC2NL6nphmGmpVSdA+porWia1Ur+LD5oMe1IB5Y2S0O9+HPFZYrWMAlWeYVXeZUxCBZWaQe7fVIOD3sUXiIRaaiVtNEaJPjoYi9ESNL97PgooOUVB/G0a4unrZ6NpK7QlpSDF0rRp8DEyxkBMicoIcmJV5hkDHigPYU5RCso8Ia5KjCnCdT5HLhOyKWMtl+yMmORApgg6xT09C+8155+6iJpi21l6fSXwgCDsMe+cuGLMgrjp6xGuMUaytVJp+pBNR+iXRS9WqG3ScMrCyEVcKS5yBpKTgktgDmlKEygjTkchyENljcVZI2HdMTCOg7xHJ4JTX7jmzkhmjIlRpm3WYE1F0zRoLQ1Z04pZBlp0VHVd4ZwDkEw4LTavu4w5BKXJMcm8MCUJPTRG+N9ZCuGQZKpXkSAEcXEcB2IK+FBMVJQu1L4kY3UfCZHdlCqrvFu406Rss94ScmIVIxFxpnuWyrbTtRWHtomuWqj06HQ9MUkxkxw07YyD2Rxbz1FuRl3VYm1rZNplyijeVZZcoiKEBiBZK65qcK7EKWRB4ZJKZVKoiBhy4Xn/1Pg9i2mJQhpDYxqo6vJcZXnecsTnWPjoCgmVNJCDOEsWpMpWYG210/elmHbOenFC1FGYqqF2tjQ7hoxFuYq60uwtFujZIarX9DzhM2/c5Etf+zvs68D/eOu/58e/94C/fPdtgh/JVxsef/I+q6szKqewtmI8rejbmsffu8ELN5fQz9gwUrnM8eGcWVtTz2qyUdhqxvzTX+CvHv873vr2d7n59V+iaSqc1cSciAayVXzy4AF3X3iZO7fu8MG9T+i6Hj96yAZXzdCmluuNIivDOGayKuF5BV2VrdOjdLHnDbJn6LLOtBIXxzEOUmgW1A3KmoZdDk0uv5fKZEcrTRZcAa0N4yD2zpkk1OHy/EYVUIWCeXww56Wbe5zsN9y3ivXFhQj3x8j+geNTr93m7u0T3nrnBzw43+CDIkdHSoGQQKWenEZSGqgiHM1rZntzdFPJfofsGfO9BWcXF9y+fQNXOT77hU/x2uJFWpV4ennOxeWGx6cXLGaaDz9+wIN7H3LjziG1zuQxY6OjtXNSuGDcCKVWXndGCVzExwFtZS+IhQERUuBgPuPg8IC13aKNpqpdoZYKW0FAE1sMYSDnJP8eoc0kHRhjgnB9rXOi5KBN4vMkXVA5aMklND2KnfYYSoOuNT4FNAGtBdiLWQxqUErcU7Mc0toUzaFSpCST8xBkLRpjWW82NLWlaixZeRIeVzlCVKS+k4B7ZHplShMaQkCFhFUGjCJphWWKgNEs5zOef/El3vjC57H/57+kblpiCDhnMCoRhl7mEjmxWa24fXKDD370E8auI3Y9777zNr1Gol62gcViIcVe0ZL4MALgnKGqLLO2ZXVVQZEfTMi1aNpkfxKWR6KqK4YYcHbGxcUKnaGtapxTLPYb6tmMVz7zc9SLfZbzBaePHvHej37A424gxi2LeUvlDMvlktu3bnJ2dkaOma9+6cv80cWFTIMQhkpVW9LQc7kdWA0rFrcXvPQLn6Z+eMVLJ8d88ee/zv/ym/+Kxx884sknH3P25IyZM2zx3H7xOb781a/y0Xsf8PCjj2lmFavYsRk7Ut8zYDi+Mafd26fetpw+fkjKGVfVtLVj1rY45xi6rezhypUGENZXa7pkmR8dUNuK5cEMYxX99grfrVDRU1cO34APisEnNldrXNK0ew60UOZ9yrTWkbGsLgeuto+o9pekVPH4fMXQC+MkGI072qfZP2CrZ7z78ROe9omrp49Zzhvs4XOo9Ug3PiXkzDor+lih0h5Pn5yzuLHg+bvPc3q6YGCD3p7Rjad8/0fnHDrLP//n/yvPf/Ftmrs3GeLI6XsfcdsumOWa9y4cl7O7JBuhW5D7U3K+Er1ObdCVRSnDMAaGlFG6JgCVm5NzpJ3VVI3FExiiZzuOhAzKNCgrrsFJ+Z37rsq7aq8wZjQSkynT6pACOXqsE2fZNIHVVUWIgW7bM4QtKWncVsAkYaskdGVQdUu1yDhVifSjANC1M+ATTWWY1S1KZbq+px8GqrrCGFcmXMWrYHp/utQspN15DmUyViz7pVwSgCt7AZ1ULrBSyc1NWVhIlRMGlA9+d+7nVGrmsuYzIv3AC1tjAsymwY2IfPKO2aOUrN0d1dwIy+HatGWq5WQUlAutEAW6mJ1MZlGx1FFJyd+V5kntmsxcRki5AGo2K4K+NuWS6doEqGUBW6M0T8SIjgGnXGFVKFQsztpJTJhyErlMSpmkUqFLljmtvnYJVnHXxjGOsh51YWtMDDMfhBapTd7VdyCGTzkAqXyOUrdrhCGYC7Cli8umOIKrEgiurwcsu9y4MnVMClWmjEbrwu77279+Npo2pFAM4Zmw2mJFnIMc3t5kKNOkcpuloUGVBwwpeK7HY6gouoZcBOoJQUVzabwmSmRWFpl2yYOqyvcUvu009ZDXjdnLVAJFiKCCQhu3WxzyAEmjlSvRgWktQ2mUaOCymri3QrtJRJSJwvtSCaUkD0bonIK8JMTdLwVXFrUiKT1FVBBRJB+Le5ojhsDYrXBkFu0MkxNpu5HCJjjQGqMcKRm80nirqFxmS4YA9ZBptKa2Tgp9p9EmoqwpBIiiDdGT2FIXcxaEcoas7+QTYUgkL6Pw0SeUTYSciUoRgb6IO4eIHGQbL0WvMqAtPiRCKJbzSXKtctl5RAwqdIFU/rxXmdXmSho+I+6H2SWhLyR24tWUAyEJ+i3UtlS47xrtanKGdr5ksdzDVjXGSaMalUXbhspWGK2Ljb4ThMhaaeS1BCKTPTFKALyEb8v0MSF0Slua9wRE5cqmN+GJEzlEobGgNKbYuXfKFdOFRFPVKJUEJbNWEKzitKZzj80dOoNKGacVJkeqaV8wTrR1CZIF21jatsY6R9aKkBQJK5o2pcnOQoJm2LC5iAzecOP4DkdHNS+caJRTvP3u27DtefzgE84ePcZvLyQgVlvaWrM154yzj3n6w3/He+o+t168wUhPSltWtaN9/ue48bkvow6P6bHomeELX/wS3/ijP+M7P7jHl994ldoUR0Znmd26w2qTefy9R7z22qv4m563vv8WrTLoakbTtmgneq6MIiVLxhUHQXEmDVFhlCPHAeNEn6LUPjEpkpEA1Bhkkx1DDznjI2JJX1DVGCRIXWjqxSUqA4zkPKKDw6QGATEjpk5EOozxJC+Uk2Qsy2bJi82S5w9m2HDG6ukpw+qc3Cla7Xjjs3f5hdee5/U3XsO0c7733g8ZxwGfanwUkbTvB6zVmJRw2bJcLqibipwh9Ek0oKPn6vlzPjz4kJu3nuPjcMY2Drz7+Af8xtu/i+82KD9wsf+Ey3hOt3mPq+0lw6cH3HHCashR4iIiiXwD+EUIeSAdRfIXRnQnFPCYE94JhdelGmMNbTvDLWp8nUg3BQk3TvS8UqyoQtlW4gZadn7ZN4UVYZkQzQKQZMg64wvzYXLDjcWOfoe6ljU2RjGUUUphCo2GnK732FTgEqPJRRsayTuqrdWKpMRqWwWZBCitcBg2lxsATg7vUJlLtn3P4f6c/eVNri4v6Pt+R60fdMIZCewOMWNcQ2WtUDpzZlY5tHH85PwJ/u236JTQnvdnC0zOrPuBrY8s9g8Y1yPHz9/hhVdf4g/+8A9YPX2EyYPQ4TUsW9H8He3vUbuK8zOhJEpmnCGOHY8ffCRAm9bcONqTc1ZZrKvRRizWna0ELB1XAnwpJ9EvWnN8dMR//p/9Ax6envLWuz+kS4q1Peb+lUWdjTRpj1n7AieHML99wmym8dFz49bz3LzzKr/zW/+Cyir+i3/8j1me3ObJ+n00A33c0q82NKsZWlsq5ajHFr9WfOXrv8Lmowf89m//Lr0fWewvOTs9w8UB7SyLvX1+4UtfZLFYEpPFx4bR9wx6gZ034jAbNBfrRBdHAYWyAxJViVvRumJvvmSbwQ8dKY9ssma19XRJ9nzbiPV9CCOri3PG4QpCz/HhPrfuvMxPPnjA07NeWBAp0sSeeVjTtDNiSsznFmLko/sfkrPF6j2O6pd4/fWXcbXHqBXbi3v88pu/yO3bd1i2+2wenPLO2+8QHqyxY+Bb3/4OVDUBy5gsMUcurtZYt0fO8OSqo9WOw5NXqBrP4wen+OoGKQfWdYWuG56ewsXHLelRx3azwYR93tv0NJWmH/fpQiACzY0Z4cJy/nAFPlErIAaWsxlzV7FwDW2bCNrQzhaEsSPmjIpi6qVNzbxZSjOaLTF51DigQ8LWNVlZmUsUPVjBwAnJo1Rk7LfiKDmOmKqS6ZvWnNy8RdO0fPjBxwx+LVrPbUR1a7TOkIxQHFWHXVQc1pZ2bGjrWrC82DFzxTRte4nVe9Ra0WcYY4BkqJUr57RC4cglfkQbYYvl0vSkbFFJQTZoEjZPEy5R8JBkKh63AzY5oWwVzZbGyP6XPWQp+J1S+OxFP2Y0wxBIZGrTCO1TyeupSUyRhd0g0xwZDhigieIj0VtL1BUuB4z3JETek1TGpIDTCmdS0XpnnLHYqiHGyGa7FZ0nYoVvFVBq4IlemWMi1ZpoNU5ZxihsFWWl+bq2+Cz0dSMsLBWEImpUxuhrgztTmtiQELOsHElJms5oFFpnfB5w2WPdjNEVl9uQiVYaT5NLXa7FVyAXFhPZitxICbCbtCcV8E5pcGaSAKlrPwSVS0QQWCNu8IESC5TKgGYCczWF0aPK91DiTk25DOY/gKYtU5qdLM42pvyuTEKKhbcXruzknrpz1JnoKqp8p2koNq3s0oBNskylhI4Si5BTKU3KficunLpdpaUZ273H6bW0IyHmIij5OSdBHkkJq0UPF0rwsUJcQTUUZKV8r/JfzAURyBCSWI2PXvjEmaKlyKZQ6OSBsaZCO4tyruRCyPRLl4cpBhF4jp3Bb9f0Y8e8qqjbhug9rm6wVYOuKpKxghRpWZA6eLL3jDmy9VIYN1qDhaQixEDWQrELoYziUy5FzfVUUvRlxWlx9MVdMaFMxNQy2RxzxEexUh3HyKb3ZWOd0CRpyibjkInaKPfueho18cRF16JlWlXup4zkRrwXpzNlbUE3ri255cnJJaDXoq1mPp+zt39QGrZGmkJVjCucw9gaU6IQTL4WQSdVNuucCX4AAjnJxDCU52viX8c4afaEOjvEki6VQpksillKDKlY24rlb8wSIhxigByprUMh4mChZIoLqpiRiDi8Mrr8mPj0okPUxmFdRUYTiJJVhUyjoyqOkq7BT3RfI3kptYLQNxwcvcjRrT0qW3N28YD1/opZGLl88pSLsydC2XCWfhjQqqLberI/p7KGWZM4m4985uUvcuPmkp+8+x5PLtaozQUnx5aTxevUbp+Zbpkdt7z5i6/z3W+/y958xmdefR5rAs44Zvs3CA4eP/iA+v01+8/f4u7J85w9eEo0cLl+ilI1lWuZzRdYl0m5R0K/xSlTxxGtB+H254bWLglZqHh1vZSIBtOhQCIxcsldzFnCr1XJzZkctVIWzW2KREa0HjHKo8XIGmsswUsm4NBF9hb75Bg43mtptaIdNlzef4hrtpiDmpw8Rs85PLzBS6+8xI3DhsF39F5MVryXbDA/jKicaZylrSpcKvotYwjl8MxKaMaLbxxw+blH/N/5lP6RPIOrcMUP++/zmAuSG9k7nOH3Oi7OnxLDKJ8pJ1IbcFYMiYw2mLmheqVi+U+WaGt58upTjv/JIdqb3X482T1PdtFKgc+eoAIHjw9p17MdQDZRHHXRKU9W0ClJITEJ9nWhk+TCABAxOAXAodDniwulLjyiPOmar51td3SanElB9mVjZP2klMniTMB101cOawowmhHwJ4tZUNu25JhYr9ccHh4zX+zx5OyMbSdZhrPZktlsKbq8GOmrgaadUy0tOZcIlgS1mab9gT4l/s2ffZO9txcM48Dh0VF51uRcuf/wlHbbQzXj5p3n6YaRk9s3sclz/qDD+4Fq1rJcLMr7TAQ/Cs3IUiaTiWHo2a5XQnmvakCRlSbEkTEkXFVz8+ZtXnn1NciRd9/6c548PWdMmtlyn7//67/Or/7dv8vbb7/Nj997n/c/+gQ720ctOi66GhUGXn/xhIM7GT/X3DxeMJ/V3HvwgPnskL6Pcp1U5DtvvYWpGqI2Ml1QmZQ8KQ0Mw8De4S329g752htf5o2f+wz/7N/+KefbtcTghA5LglqzHdc4BX/yb/9Y9rZeMW4DYeyxNjKbGWbLOa2aMw6GrguE1DP6SF1Z6rqhrizjMHB5uUKlKJS4mMBZTOuoqoSzNUorQhionOHWySGGBZqRnALddoV1mr29JZvtCmM0R8s5BwdL2nbG0A8Mg+H84oLZYcuv/trf47lbr1DbPVzdcnb1hEdP73H+VPOtt97lYO8eL956nnh+wb2P77ENib0bNwkh8/DBJywP9jhZLDg8mHPj+JBmucc6aDY+MgRYHGmU2ePVV/aodc2iali6mgqNTwY3PyRLpgRDP+xijMaxp/M9wziASlw8nTEOGy4ePWDsNugwSAYWgPO4pkWhqDS4piHkSDLg6pqqnWPrFj9uSREMAhb3wRdGQBLAXmy7dxN1M7lVhyDurjkyDD0+SpxJ3VYCzmdhNTRtzXy2xOiM9wPdtgMVyXkEo6hqQ9PMMVhS7Dg6tLjmgPVpoo+RShv6DBZF9J5oDDSitXWT/rbsJ0kJ9RANWknUTs7SyeUUEJh9YthI9E6MovOSCZkFXdgY2aCURF+pNA0cYtFdZYiTx67Ei0g+dmEmJDljtDakqFA5F4dr2TpMKsZd2pCURiM0Rs8EkBXDNzNFS0lhlVIklLp9kvHksvcapm22sMaSsOOyUqWRFbOlOPHVtLCDjClatQzGaFISl3OhV6qij9NCf1Qa7/1uT85lmqm1lWpOAUYMxVIungZoDJbJkdMUrwCJSwlM9MachN6orFChM6WoK2dWqeRLLU9x/pRmTCU5l5iYXIX95WMghFRkWuISXQ5GrrXzFGbV9Gf/318/E00bZEIYdw2XKo40qdiTTnRElVKhsrArAHbZBuUD5913ZFecy8FdHCFLNtlU5Ouy8KX4F/qLKg/vFNg3fd+cZQHG8uCjNKlw4/Xk7lh4qcZA3BkRFB+f8jZVMY1Aq9LkiaORaixqFjg4visNQYFhlJGmLSkwKlJZcSbCmEI3ysVZR96zH0eGHoydkVVic3UJERbNjKCU0BWMxjYtrmrkeTTIpGYcyLpjO24J44hPmYV2LKzBFh+4pDNBxUIhkgvjrDS4KYsA048jQ4j4IJbr4zBIkHVKbAdBMUOKhCyoPyGy6Xp8iPSDL4HaCWtd2Xifac60oBMTHeG6gZfFKHkiE8UqFu0X0vgo8wzd9RneeWmo5vMZt+/c4ejoGGPFMl7niDFFC5lHQrdlG4OgR1GABZWv7WWzkk3Eh7DTQKZnTBAmkfJP5Zeo6yY0pVCmyBGV5O9qNM410rQFT2IgxyBNhFbkFNHaEGJmGAMRTVW36LolGqGX2cmwZ6LqhoC2jrppUdrsqKo5ZQntLIj6GGKxYFdQgARiQnPIz3/hgEXbMps1PH54zjAOxJRwdcXrr7+OqxxPHt/no5+8TxikQa2amvVmy9Vqw9Vqy/e/92OWDxqePDhFDwG3f8HqyQc8f2ePvWUipo7RGo5fucHVw32++aff4mR+wIs3Z0QDs2VLe2fG6dkVn5xfcadJfObWXT7YVry/uSTHARW3BDUSuhHjJEi2aRbU9RHOzaidwjWJoBU575OCwlUbaYKSQruaxMCpVsxtLS5cCI0jFwdARRFzZ2kQUhLhcgyanByV8VjbowhiWEsNvqGKmiZZDo8OSOqSG3PHUXQc7N3hxU/f5PB4zu+/8Oc8rLb85MEjtnnk/QVsuw3rPvH4yZYQlUzvSEJtayo0QvXdGUdEoWJKH5Sov1Hjf1/TVAtcsvio2fyjT1B/VqP/hz10HIgGdHDMtgtUyTlDwfHREXVlIAqlbv7cgv7WyOd/57P4lPGveD71L1/DbSsB5coaA3bi9KmB+ylzCzVdS7VbK+KKq5/5u5NeLZN2IeTSk2FK4xyFNySIshQbIfnCPMllAnpNuRI78UKlVOJSNmlnprNI9hW92yumQ+faQe0aRPTes1gs0FqzWq24cXLCwdENzi7OWa9XUrDFWFx1ExfNBcu9fZrDtuwPEuA9fV8/9hhj6HPCe1/ejy6TBy3rM3pUN2BVxbvvvEsM8PJLL9OdnYn1uFZ0w8joPSSofcQ5y2PQKK0AACAASURBVGq1YRzH3b6plWLeLNAKtv3AarMSgDKLI68LkdV2w+X6irHbMvrI4CP1fM6v/tp/wptvvsnv/d7/wYcffMjp03PGzjPbO+SFk2MWK8XYDVie0vvHjHHFqod2tuTO86/iGseTs4fcOJ7TrVc8eviAF17+FCc3brO6vCDonmrWsFguiElxfOsGzaLm8vQh/+Ibf8rj88ecPX3CQT1ju8okDSOe4Ht8DNSmoZ0vydqQQ4AkWX1+1DRHRxwd3ubyoufBw1PGscMZ0U7GEOiiZ9Y0aKNpZi3L+Zy+2zCERN+NEBNd33F1tSbGwHIx48bxAWO/IvotOYyEXLPthd4m8inFdtNhtGWz7jk/vyhUe8XdG8eMpx/zvQ9/yNPTM9abLbPlIUFblJtx/8nIDzanvHd0xpP7P6DfrjF1w12gG0eU1jTWsb1asX7ymNWjx+jawLyii5FV15OUJWYjhXJWNMpy2M7RKbPpNE8vR3LWEtezWNAPPU/OnuJTQDmNcZaTWyfcvnWTvZM79KMnbRy525DIjP2GuF1TjVts3ZArzaANua7oo0f1mYOYGFOZekeNTyU/liiOryrLAT7RDHOW4hgN2bAL1SQz+B4VMrNZA0S871E6oE1gPm85OdknjImzJ09ReSSmK+ZLw5u/8lXq+V0+/rDH95HD4xn/1T/8j/nyq5/lW3/0F/zW//47XGw2VDgcQUzgQkSFhLKQQ5CJHB5NkamkUOrLiFJCg06IMZpSQtNPZU9JGZK2ZGMIiOHcWMzDjAIVKdlrcvYm2YSISeGTyCd08VyIQWoga4teO5faV03AeqEehlg0fGpH61fP7IrSTOjrbLVSKAloHsg5/L/t7yfjv1LTyGuCyIOcuPKWvzPx1wQ7V9c1ENfmXBkB7lO+pr+X0u2ZOo7dPjn9rJTIg7S+Dtk2xmBSqdFzMXlJJeg7KsBI3RlLBEGeIiRkQvpT4WG5IGVcg4zl4wpdGjE0maaak+GIRhXqrcEXo5n/v18/I02bWPFOdp6+fNjplFd6+mR6N6Ui513Ddn3zp/9Tu4YqFlFhKuNMQFx4nhGqiwMiu+IroeXAz+WhLTdE/kztAvOm7l4cLMXUw4cAKcnhZicdlWwwWmuaxYymnWGcFROJqSlDs9pPxMXA8ugQpQyTBg9jSEimT8KTjSJbLQ+uEL6EXxwT2mqsqWS6oxJV29APHd0YsDHgmhpKiGiQgDsZ7mcR0+K00BaTuOzEFEh9IuNxSpqyoOVaPkNY+unMjiSFxRiFBpljInppYMbJ0dIUTYqS6y+c5oB1Glc3ZO+JwQvdKcu9kM5SkKrJgWjXEAksVBZuQUSmiVoR5WZVFmihQcYyNQCw1tIezFgu96groQx5PxZNWtpx51OWkfkYvbheCowvr5uFM64K4iSh6lOO2vX1mcCE62DnDCnjJpQoy4TTaNEAZgM6Kyxln0gjlnFnE5+jhHUHFMY5lm2DrWe4qkHXc7ITLeK05VBG8DbGYq07uaNpTC6NeUrk5MlZnJV0+XzT/VUI8newt+To4IC60jx8+ID+5Y7l3oLD+QJXiW7JVc+z7bacPjhFBYVrKhSKJ2cXLPZmaAt7q5a7d+9gGXjj8z+P32x4+pP3uPPClrOzS8ziFotbr/G1z7/BJx//a77zl9/k5ptvUh82vHb3hDd/5U1+670PuLzcUD18yKFt+dTdW9z7kWe12lLREVjjtwqta7Su2LDGuXMxA3A1i71jzMExQ9zgSNhkofNYNSAOkFmcD7u1XJ8wQnERzFmecabncnpeUsL6GXVYYNxANRuoZx5lobKObrvl1q3n+OxnP82v/tp/xOX6E7rzx/iHK9q9lhdff47F0YLfWv0BSVX0WfPew6c8yBuCD9w4eZ5swPsNbWPoraZCYYiMPgCWrDROFfZJYV6k4OnXKy7OrmjbgZAU/ZiI0bNdX6GePKQyii4FamfKP5N9VTIrhQKt1bQHyT7tKqHlyHqu0K4I18v+ukM4C/HXObc7oCcgAyYdgfopRFSQVL3TqsWUMLaWD5UTEYUq+4uZDutUjlUlwvKpQJj2D2Mmh8QdXos1k7lUhKx3zcwkGC/Q2249TwWHLVP8aY3knFkul4zjyGq1YnHgWO7toZQ0cspoKlcRg4SCaysT72EsAJf39H0vNH3EtKapK5rKsV6vGYaBum6oXC0U1SwFlEmZd9/+Hh99eB+8Z7y6ZK8ySOS3Ecq90yhtycrSzhbUrZyn2hhB55UUie3yUO51AaK0tYzjyOXlJd/7/vdIwTNzhk+//lna5QHOOX7jf/sN1qsLPnz/J/iYqOYHNFZx//23OT8/p9s84lE9UlWSH/rJfUVlZygqFos5o+/wfis6PT9ycX7BvFkybjzR9cxaR1VLzEI/DOznJZ989AEfPfmE7vyMmVZcXZxSzVsGB2G74ebhEak1HB0dYZXj3icPqRvDcrbg4YMLgpdwZa0k5Hgct1idxWwrTU7KLXVVY7Ritlyyt7/Ppu95en7OuuuFSYFEPyil2GzWBN/TdyuslsgdY6EbKWdJwNpigJYVdV0zDCMxZm4dHXEcHWdv/4gHq6cMLtNFT8+WvcUR46bj6ipwuck0WJa1aCXnsxk3DucMw4C2DtssuHx6xXbtOd9sGfqNGKq0Lc1sTtaKYegkU3Dw1NpQH50wbrd0PrDZ9gQfCIsZQ9uw7jYkpTC1o7INqnIMwzkf3+votyOHJzdZ3L5Fd3mKX11QNYYwDmLANA5cnj0htAtuntxk2TgGL9TVDBjr6Lqei8vLkuUoNcGEmCf0rqGYNOOg0PlaCw6iEUs5cLW6pO87lI7M5o7FokLrwHazYeg6RBQXuXP7Bv/lP/pPef+jkdP73wcv+urXXnuBL37pK7htw5/9+Vtc/fjHqOSxWKmLfNrVXKQISkw8UFInKJXKvRXWjPDJ2dEGJyDfWJmuhxwFULF+suYARLowxkBOwrRSWSGGdGJmllIWuYu2KIQG6aOXaWbZV1HiVqtSqXmLs7JSYoJiKytsqWmPRIv8RlAhYQ6FcH2ATNVEls9otEh10lTpPNOUxZyvf3/3ucqErezPcjYU18j4jA5w6gVK3SROs5qsr8HuiSEAEs+kVMZMjJAkk0DZz0vtkqbGMJe6PpdhTRnQFGZHjMU0JiZyMOU9T3dG7fZ96YvTdBgUfaDZ9SPidlmYXVrv+pxde1zmTnI2lYLy3/P1M9O05RxkUabp19c3RSZfYtWhlCAVZCnG9dTePnMRdZlmxoxoUnLp/gudLWbKhEwQ3SnDAT3Zy06j3dLoqek9QUZsn60RCqTKSdwUjZbxsc7kWB7IMtWZigJBQwymctiqJon4Be0UxmRMlQkmEk1PzkLPzEqTdCDmTNABlTI6iVWpVpLFRpnyqIIgK6CpnLy2iyzmC7bpin4YMMqA0vh+IGGwrqGqKkFrNGgnTUJKI9mA8pC9J4dEhUyBuiSB5uWqAEJb4hnUXOiAmVyMR7Kx0rxk0RLayY1zcgNSEFJm9JGuZOklNONYCuHyatKMFf/1SYPChHxPyP1ElWS3IjIUbWMqOkdKQWhxtRQ9Qx948uQJrrhjksWYJQR/nfCgFFlnUhjEIlA9o5FJBUiQfQ5tdEGw8u75EaQqEXPRXxZr3UnAShEUayWB4ZpYprgKrYqlrLMEZRnyACWKQTUWox11O6eZLTBOaH8xX29OSSkmYxutjTyDUCZpERXV9Ws9M172fhTL9LLZxBSFGmsDs6piOWtpG0XXbQgh4MNAZWtQibquaGeHeP8im9WGcTOw2q443j+gHwaePjnn8HAfo1tSTrz86Vt88OFP+JtvfYdf+dpXiJs1m2FEtT3N7BazZs6vfO0V/upP/5zv/vXAl77+d3hh/4hff/Pv8eN3fsL3/+wv2F71PPzwIXdfmfPKnTnvnH3IdnVOzGtpOtQCxQyAlMUlsG0OyTcMn3rlc+R6yzvf+TPOPrrEhB7LFQpPjC3ri3O+/9a35JkLo2wrZaKe4qRn01ijqaqaqqqwdstyL3Lz+T0Ob54w2xeq8mJ2hNNzXnrh5/jyl79CXVvmsxf4yz/5v7jUG+7fe8RbP/4uX/naF5l/bo9VHhjHxHrsGPJAVc3IZkFIV1hbUxlDYy3KSw6SIheNpSUzgQQWcmLwI77vSGNHtg6VDa2x9Eg2UK2FWiIi6jLFJ2OUpa5rnKsgl6Y1XdMcYowYU8meUJBKQTtzOZTK3l4KmhDHgqKza8527ozPHMZTUOsE0lAQS/S078s+oxBaqkFjnYWANNsasNNavJ6u653tv4Atxsj6FTe0a9qkFADPwKJTc1aQa9l72IFBACGMNE3DfD7j8uqKi4sLFst9mnbO6D390OOjrKWYM9vbHab3+GHYZUgSQEd5D7WqxZXTdbgXJEQ9hEg0iYXeI5PoR0+uIrNmxnZYMasrXO8Yo7ii2bZhb35AypmuE7Omtp3R1A0TdXu72dCttwSV2d+fUzc13keJkrAWPWrWTzes+xVt09DOGjb1lvb2Pn/66E+pjwzse3zuqJqG49tLnn/pmKurK/TDU2YZXDVj8D2jCmw2K5pq5OjgBmfDms1qZDQJrTzKdVyE96n2Fvg9TzgODE+gSq3o72JivLximz3V0ZyTruPR01PsvEa5TPKDaIhqi2pqfIgM3tM0jlld4xScn1lpClLi/OKMi4vVTkOTwyhugq6VczIGQoSzi0tW6y2nT065uJRQ6f39I9rZjIuLS9abLQnYdF3xilK08zk3b97l8dMV4zhS1Zau25TMxgQqgNKMPtCNmfO4z2yx4MXlAY3bEEk8vvAMK8N6hIvtiqjWXG3PuHnyAv1qAcGh6yWuGWA7MnQbfL+mNpnWJfCJy8sr9GbDYn+UWsQalE6YWuFywqSO7uoJOSteuXmHYRx4dPqIdQfz/SWzvSVBZbyS6ZWq4OzsKU5VnNy+w17rOM0dF/2KplrS3jgmZ3h69pSn55fs7d/g9q077B0fMEaPUWAC2Aqi9jg0OkIKI7q2QCQrV8DPKT/MoU0iKWHsxJBRWTRqfT+IRr6XLDEx/7IlZ0yRs9z/cazJoWM5m9NvRh49uE9KA1pFrq46vvfuD/nSq7/EEEDbmoxhs1lhqqoYeITCAkr46IUym0OhN0jOq7W5eHdEVNYYpUAltEYkNKnkxOYsrIU0opITumBhbaUoNHCjFToh7I4kkzKJuyoTnhhIKZSfE8Zq2lnDmANRSd6vKmyZrCUnd+J27/LLSu2bs5JcNyUgeMiTE7kuTAkr/2Zil5SGmWniRNlmJwZFaZhSnvbfa5bRxFaQH6pM8qb3JPrA6wap3P/dGaF++swwGms1ztpdfa+13dWDORUdmpZMu2l/34WF56lZvK7XUpTAdaV0YW1M7UThxUvzsJs8GpN3g6bpuk6sMGlQc7H8f2bOVP70+nz7279+Npq2HQVmmkJcjzinixtzhhxQKu7+mUpCfyBdC8ynG6CMQuy8IxSbfaWMuOqUwnqy8Te7adpPPwA7N7KJDqPA1S3NbIZ1hpADPnhG7+XgVsUKVU8d+3Tx807QPgaPDR7b1GV8DVknfB5F0K4SWUdxs9OhxDoUzrIGlQ05l6yOGCVQEbDaUBlBBTWJqnLCSQoRVTmydfRdR/Ij87ZiiBkGD9WIUuKUhGA3JEBXYhcfYiT5gM15N26OfsSnUIwcFEoX5ySlxDp2Ku6VTCEVquhPkMmUkkZTJsy5tFEKHyI+BIZhlM1jQj3SdeM7UZH0MxvC9f1i9+tpc9LWyiaQIOu8K14DYu89m7UoBdv1lu22Yz5vsUYoiDI+D3KAGw0p4cNYTHDl+ZGgWJkGaFcQp7JQtQhvyDkK8GDK308BYyjAQ7HqVwY/pt0UzIjfrIR4mlw2WJluKCw9lrFuqeqGZr5gtliijQS0JjQ+i0OnSkIxKRUoIFqsxd4ebdsScuLq6oqu71A+ouO0lopjV05kbQUQSPJZVU6EUQJBa2fIRFCa9fqKEDwP7n/Cc8e3ODzeA5XZbtasry7wYycomNKC9lqZMmzWW7TSPLz/Pt/+i1NqC7/05a+S65rf/+O/5Nad5/j7//CXMXVke3WPT9054urVm/zgnW9z69Yt7n7mi3zpxZf5b//pf8M/e/yA07d/wsXVGnPvI+5++i79c/v86IePyV4OC607cu5RymGzpakX3Dw45vjGkl9+8w2++Itf5ht/cJff/c3/mQfvf4KJ0rQb5TEEGjUCGmcL5FMOG63KdlqmqNYomkrzwo2K//offJXPfenLHD73AlU7xzUNTVVx/vgx3/mLb3P+0fu8/8EnzPbhk/fvsWSP+4/WbHTmT77xl3y8vM/D9zfELgqlOhmGMfPxvUfoCAezOWnsKP6YOKPExdE4ma7kKKJyRD8rBhgJTSL5Hm0cKYlhTvilLav/7tFuXVkzZfvImqmcY12vIXlSGIghUh+ec/nqmh//0w9RxrB+ac0UlTKFxk70HJ7Zb42xMqEua3xq3MT0oRgc5elQN88cgApnDdmIaYjWAmQbrSWAXimcsZgsjmI+RTx+xw6YXg/yriHdMTtUeb8F0stZaKUUtoTc4mlSp36KHrmbnpczLYSAtZbFYsHZ5ZqUMrdu3aJpTnj8+JRxHMTa/od3OP/qU/iq5EPGILpWDQK0FZByux0Y48isbbFKMWw9Y9gyn89w1jF2gW2/wjWJtp0Twgbfb0l+JKRE1XpCJaG3fQnU9nXPtiDDWkssRbftAEWYRzlLdieZXLM0DNgoVvAXjMRwzsf5HvOvzTFaC013rNls14TmYy7n5/I9/YAxFSlreu9JRELyzGaZ5obn0YOn6Fwxjh2jvwS9RpsGpVY4Z0k+4N9RrFdrAknytPoN661hyIk6Z2bzhos4kJTB+UB9uI9vLHnTc3W2pmlb9hcL+s1VsW8HoytiiGz9Bu8HmsYRxl6mklr0iqvValeXdP0gAGRxC9UKuu0W773oq4w0+s46mnZOGHqOjm9y98WXiTzEGMVyb8Hp6UM+/PBDzi8uWCwW1HVDCIGL1Qrf3afRmnnlqfJAoy2kGRpLnQPzOhPniVV4SjPcZr53C+tqjJ2T8hnDIM3JcjnDjz2LeUMzM0STWK1WrLeBOlVoBY0Vi3WTwdma/X3D4fImn//8V/jok484ffiJgBHeoXND4ypq55i3M7bjyLLSVAY2l6dcPNoy9huyEW3UvF0yWyx5uhkZVcfVxZoPf/w+iyd7KFvIRCFjk0zN5lXF8XLJ5XbLNmwxpsJaqVlSyhjtMHVDSNBvO6KPxCCuyLKEAzFpglfM5guMUfhxZLvt8WOgqjQH+8ecn9akVeb0/sC/+t1/w9PNwLZzWGaMZP7mb97nzvLP+fjH91gNA6puwW3J1mCUFbCzADZDGFmdnzFut9Ig1jW1rTGVZPUqEipK/aKSgLI5e2ENKZE5uDzDpJFKNaTQISa7FQYrrpIxoZMmh0COEZVi0VqJWUnIyKQ9SC3t6prlwT5jNvjk8GEQd8is6WOPKlq3ECenRrE3vp4DlX1xoi1mxDwns8shm5x1hZ2DTB2V1HkT68haJ7lxSprunaAOoaQbo4p+uNRzZVCiyhAnPevC/IymLaW0q+emfdeU2imr6ybx+oy+zhUWjRyQUxkmlNcp+XKlLUMaKSWO4qUZEKXmNQAoNMpiwofMybJSUAwEhcrKTsKkJnfR6EmUbD4FU1D7fxhNGxBjKAd0KeYB/czjMxmFGC0fHJD8LSHrAqqYNajSJKid+Yj8W1G1pTK7nbIstJHJ0+5BeWZi8yzdhULDVEUQ2bQ1rlowhpFNtxEB7DhIIfQMCpDzteWpVrnw50eaGMBOD54iKi282qzJWOHaFkeZnCcqYhkfFkQDJZtsypkcs2TDeU+lDNZpGm1KjheousKmiB8HfFYYJH5gzEl0bLUlaUXIgUzE6FzICIqUFWPMOK1prGY+X7JarxjTWAoWTcpBrtN15yQmKciBp8taKEbZlEhJaVrLNZpQmFQ2BbKERFtzfR+MLnD9FP2we8An9Hxa4KUI5Jn7rTQpQoiBpm2YzVq8H7m8OGe77aiqGh8CfhRXNynUFdbIJDNH0ZqJQY2+dqFUsjjN9L7Le1Ja7X6YaaEaRXKiu0nPIE3S5BmhnjjZeHwYCz30/6HuzZ5tu67zvt/sVrO7094WAEEQAMEGoGg2EhVbqrJjJ67Y5SiOUy5XpfKW/Dt5zFOcSpxykkpTcayKaEuxJZmyKEoiIYCkKJDocftzT7Pbtdbs8jDm2udCliqvzGHdupcX9zR77bXmnGOM7/t9El0huu4sXc6moZ7POD45ZbY4QFcNfe/xoRzKs0JHkTgqZPJBAWdoZ6GakqqGGAKeHT4PWKPkfS9SSq2AXGSCKRJ8LzEYiNxDkXBWOk/G6DJp8zy4fw+/3ODMZ9FG87Of/RmPHzzEdwOtqzicL2gcpWgLbNdrpm2LSjXDumF+WvGzjz/gg4f3uHX7M7z45dfBZYb+jPd/8g6z5iYv3X2ZN3/wNr/zO9/mbxvDrS98gb/6pVc5+wf/Ef/78p/w8E8/4MnFQHt/yudfeIHN1QUff3wmsBkXGYYNOle07gZf/sJXqCrLNiy5cZB587vf4YsvvUz9a3+df/zf/BA6LZuj3qKJVMpDsns8c46R5HPRrss9WVcVbdtQGc0XXrzBP/qH/wGz05dBHYNxBB8wVSCef8I7f/I7rB5/yJPzjl4LctgcnPC3/s4/5PSzR/xv/+t/z71Pvsu53qCfk4lKzoEbd2+BrlieXzGYHUZF+oNecMovSHc1ZMjKookyoU4RnRL9vMdPeswJRHpUpZhOp7T/y3Osvr5k8fiAvh/2viqlZRJcOct8NpU8uW1Ht5Psnub5Gve1ntnHEjT/+Y8/TzqNDOxkfdVFipwzxjjG3ESZUj+T7WMM2V77xD5N7xUZcwhxPz3OKmOMZDRZA3VlaWsnmZ3KoLORsPUSPTI2g1Iq+Wgx7em34zKSsgCA5EMXc79MQeLo+cwlWoW0/9wQxPdmjJONPF37bKvKcbCYsdl29N2Og4MDbpycsNqs8d5z+0+f49aPnpMGIQlVYETjmkahtj158pgw9Ny8eRNjDI8fP2Gz67hz45S2rXny9JwnFytmRzc5vnELv+tZnT/Gr6/ooufg9JTDo0OCDzw5O0MpODk5YTKZMnrELy4vOXt0Biju3r3DYrEojdBxAqlYrpfEEKgrRxp6dt1AzIrT0xs4ozHZ43drHjx8SBdgfnKTfrslb1dMZnNCVmy9J6rMrt9x6+YtXn35C+Tv/4TNaqAKGuMqJvNjkRMGzd1bt7h6+gTfbbEzzTYM7HxgF4F1Tw6RTWOhNrTBcnh8zGdvP8+v/trfYXbjlHi24vxizf/9G9/m8uIR3WZJ9D3WGpyp6DtPiL2se+V9TUmgXmm3xdoKbcX/pbUmUeBThXodgqf3EuKcy/vmY+SomfD8y6+ymM148PAxl5cXVLVjubpgs91IEzF4kQQqRV03KG3YdZdsU+bJRuAIVU60xnDndMqkSkxahz46IurIkXuOyszoY6TWCp1hMp0yxMDs9JY0Fp2lu7qknkFUlhgDTdtiFcQiYZy0U8BwenKbF5//HKv1mo8/ucfh4SHbfit7dohgwSqLThoChN0acoC6JmXx1VJX3L79HF/7+jcJEbrseLzsWS+veP8n76AbK1CknKjQ1NpisWjriFrAWKoxTBYTDk5P2PnMzie0qoQC7TbUOTFYw7DbglLEIJTPqjIs5lPq2uL9QLe9ZL3cyQHZBLqhY+hrclhw9njDD99+l+qgoaoPqOuWlBxvvvkhH/30n9JvO5arJatuC86CLfLMAixqnBAknzx6gNMVdVXjrEOyxoQgbo2jbUTVpBQYB0mVJqjRKKsJraNfXdBY8ZPrLAospUTNkONARND70ZcGW4lAyUA0Do1l2Mm9tO12hJwkOivIFIggjeKmaVGVZMMa63CVSLRjGXjobGQCamzJvhsbNnpfnIhijbLnyb1ug+wVFLWVLjL3PZtBjRLM4sEraqicMiGHEqVSsutMxpi4Px+N+8NYtAFkla6LPTWepTK6+AatsdcNQCVZankPYpEYl5gSNptS8EtBNipH9rWI0SjE0jOqO2LJtJMTuUBotBYFiDLyORn2jcecy0ikKLFilIgEubxqP7P7//r4uSnaZJMDEFIViNeGJJXu+KKVujb7yxtZxqrjVK4APGIUI2iZsZX+Qca5mvG9MVYeqvG9kMJsNBSKvjaNdBzRRrLP30gJYxROaeooBEmCEr+cMoRBqDV2RLCXQ0CKgTDsCH3JnNAKo2uMmqDyEg045sQUGG8cPXY7lMKYiGXM50jk0EPR7+aciSW5fVCalDzed5icMJUh9ZldtyHpjqZuIST6sCP7DS62+JxAU2hD5fuOodUpse4HQtKcHs9RKFbrtWiwi2wqkcdmtVzjMj1VOZdfpYuiR3MrQj4qxVDO462fJBsL0EkOVWp/iKNk3JU4h3wNKHnWiAoUkIx0bFISGYPWlsP5Adpatts1y+UVMXjJoAO8H8ilYEOBNgL3gIg1msY1KO3YdYGQEsa1aGuIWR67keqkFJhUJla6AHVilAmMVnjhx5cuiyyUo04/a9G3+xKqmxLkmPBeppbVdMLhfM7h8Smzg0Ny1qxWa8k0GWdxZdIbyIQ8dq4UIStyNgQseZDw0X7wIuHQEv4+XmdZBEtHSYPOlqEPJXJBZJSS6WRARfphJ99z6Hny6CFWCWTh8cN7bNZb7tw85s7NW/iup3YZaxJJK1IcCGGgraZEm7i62jI9nvM3/+6/z9e/8QbT2YTzx2fcf+8+v/eb3+HzL73Br774Gsend/nx93+fP/me4dXVR9z88hf47JHlq6/f4bfvv8v2auDpvRVHhzd47ZXXuDpfcrW8lA1iCFTOomvP3buH1NWEn7xzznf/1W9xdHrEGZnSpAAAIABJREFUX3vjszz8mSPsAt1mW8AVkRAC2+UlORusqmQTKm3IcfNo6hpiwqCorOP2rRvQCY0vlEYKrSOzhWZgFR7idoZl0pxf7rB5Stqe88rXWs6XPX/y9p+xeueK7lcG1C9GApnp4Yz58yLX4WLFZTdAzvSf25JjprszFB+vgDvEip7H3iG+H+g7mZIppRhYE2pP+5kJ5jOa+E3PbrUhBFnDRoKhdom1W7JZrxm80DW9HshHS9JpIv5iRumI1pnLb53v1+lYpETXXlT2fxbgFPtn+OTHJ0KLfGYi9+dpwaMqQyZqitoJwa1yCqszzkl3NRZLjDUGy7UEc1wnZKm43iq995K3VmiKxlhplIRIztD3JeNJSyMpjBjtUldee+7E09h1XSFEao6PDqmrLZeX56ToOTg4wFoJoN5st7IPaU2MAVOynGQZFTmUUoqmbujLxNMYUzrNGVdpmtrhrMZAmfI6HJmdVXid0QlqZ5m2LYPucYV61lQVi6mQOzOZ7doWxYREvTRttZ8mk+WwQUx02x2h75lWFoXILOvKYTU4MuuLDj8MXK12XG0HUsyYOJBswlTQTAz9kJhUFYvJMX/245+yWj/BusxkXuGjYbMbwFQok9jtrpi2htXg0VhOj46I2rBdrei7FZXW1PMpCkVaLrm6WHLnr3+ev/93/xNu3bxF1HD/o0f89m//AR9/8B5GZckaTWCtI3jxzDjn8L4v+16mcq5I2hMqa2KSvT2XLryxRiwRObE3YZczzO07d1jMZnS9J4Qlj548Ydt1bLrEMHRAwtUWZaDrOpbrJdOcaJqWWEW0hhpQWZGpic0J8y9/lS994VVif4ZNjzmcam4evcjVsuP9jz5mCD1GQTuZsttsWXeROy+8wFe//jUun1zwR7/7XfrunL5b0W0MtTVsV4FGz2kWt+g3kYP6lNU688H9d1l3PbPFAdbX9EMnhEOfaCY1KluyV5icGXYrgt8yPzykqltmB8d8/kuvs9wNbDYdr3zhS/RD4tEH7xOGHX3yBETmN/Q7YpTXmZRCuQpTW4ZuIKRA5zNDtngsrmpQCXT0VLVM/siJtmlp2oq+27JeXbLb7eg6qCtHXTckH9nutmSzwwSLSjVZ1fS7HVcXOxaNlriXmKhNzWq15v69n5FSJ17GIrkbYzoEEDSgmJCTNDCPjxa0zYzoE34QO0Pwgc7vWF5elml8JhHkgO80xkpBs7y6YrPecHD4lLppRSmmHShbshyLnyomdrsdmbz3iyUUfQSSJa4Hrh6fcbW8AlWKOpWfmYhlso8liqWcpctQY2xqUQoeVRmcteWcovcRVsYYYox4H/ZrXQiBTMJqkakPZRAY9+dlgy3RGcZoQjmjpVRYmsV/JqdwUZTJc5Q+pYITT7TaA5nG75+S+MTHs7xMwmUSaIwryg3D2NAX8mUBslhDzpq9qQ+RQ6LKVFRLK+ZaXi/gr3EgoDDFQlMag4xZzteHKT3CtICR47AfOjL+0083Kf+ij5+Pok0pJpMWUGVEiZgAE8hpUaZNBpGjKKX3Ehkhu4wnzLFK1ow8TpUlg2G80Hos1kaKDUj48zhle+aCCXQCygiuyGYyEFAEnBV4hFY1tdP4qio408iAF/MoaZwBibwMJfLCvpMAQmfRxhGVSBz7lInDACkVKqBQapQusjkGNEEM0kqmPTFHwfwPgVy60BKCLQdrPwSGoSMy4BqF7ztSEilRDImh26L1DgBrHDaLV0RQrZlYKIgpBLZdRKWO46NDjJ5xuVyWDoE8CNLJun4w8jOdhZHVqHKSDQ6hHqYCFlEjkWcMidYy6dKqHDaVBDrugt8fP6+NqNd9ilEmi5a8DB+kS1O5CtdO0MayWq/ZbtdY67DGMAy9PIQpYY1+Bn4gi0JV1dSNYP53nSdEqKoW7SQIV5Pltajr1+hjwGSKBEB8cjEmhmHAe1m0navEQ6dU6WPJobR4iyHJJhGjTPusramqmqZuRN7oA/0QSCHvJwDy+uV6qgQxF3MsFFmqVIIKjYoJHYVSacvPzn4BL4fRhHgxbUXqPTlJF806Q9PW1G1FZkffdeLrywkVE48fPJCKs+v48ude5Cuvf5H5ZMKd27f4wQ++z9nZUxbzA5pJi8qDSHtaQxxaDg9e4Pk7L7FdX/Cnb3+PN7/3Q5aPei4eL7n1SzMOj+HVl2/y9luKH/7oRzCsee61E15eQHjjy5x9dMkfv/0hvtvw4Ucf8crrL/Olr7zKW29+n36XOJifklKmqhLtJDKfNBxOj3B95tXnD3nnR9/hd3/r92irI4wyZAIqTejMGYezw7IB2tK9KytPucZt25JSwqeIGjrSsGN9732Sh8kLb2Bmp2SV0Hlg+eichz97wtmHnm1oGHTAmkte/MprvPXDH/K73/9dfvbee9RnitM/usPQTBi04lvf+AV+4Y3P84Pv/4D7nzym28mE7urvXJC6gcN/fSBDeVWJ9FF5YgolDkezOr9ktVwRfCx45YQyhvaXJ8S/GZj811PyRiTL1ognLQXP8cGcxbTh8cPIaotgoo3j8KVD0guZr//TX6ByFdZarBVPrw+eIQWiSvsJlUhrcokOGc3sJY6Dkpv4qb7jp/1sIyY5B6G1xpzROuAjRJXx1qJMRYjilVXokolYvGva7D2nY06bbKTsN+axONRFwhxCwBgjcSq+Ezz0uLjttzK1XzvGnM+xuCIFZtOWlALddsPQ1Mzmc0wpiIciq1Nockj7AjAXQt749QCcczRNQ11XDKGnbcU7V9cOrcWXaK3GYGTPUhnnNJXTVFaTo8ZoCpk27UExWisqZ8TPlTNaZYyWw8jYrIwh0O92rJcrjFa4WUPfddRNS/C9yMed5uLyci8NXXURZRtsdcjOByqdOZzOJeDczpi6Iz44u08MGpRnte1AV0KxVeD7LU+6C27Mp0wnlqppaedzeg/91YZea5IztD6TYiDGTBcjf/DWW3zw3gc8d3KLwcDTi0C3jaSocZWBLEWaRiwC08mURGSz9QhfLEsm1DOZrcMwFFmYPPdGW9mnSwd+lEMdHB6ijeXeg0cYpZm0E9rJhMF7tjuRXrZtizGKzVYgXEPv2W7XZBVZnLastxuW60y309Qmc+N0wur8fd5794rL9RX1tOErv/BVLq4y7/7sHhpNO6nRzZTt2WNaq1C+px3WvDiveGnxAu/+ydt0myXkim67JPYJlQNtVZOGnuwjpMD7H33Ieb9G15aPHz/i8GiOm7Z4H8l+4LiqMcaSY8b7RDuf86U3XuevfP0X+dGf/oTv/N7v89P3PyQnzcH8kBdfeIE4dDijyEaaE9oo3KRGVxab9X5i02fJVTw6PEI5K3AKDa5yMhFKHt93rDdXxN0OlRIxDex6xW63xg8dk8bJ9UXRBzngG6UICoboOZw5UuUIWWGswmiHNi2DzxAlpwwLVlvaSUtlHdu17HOVNkRlSD6SOi8QOh1xTcP0YAFZo1JRIcVE8J71es0w9LStFIApBXz05Vdgud7i2i3KNqjtIJO6ukUZJ9YUlKg6ohQLGcQyo82+6PLDwMXZE57ce8B6dcl0blFGI6OfIsEbg6DL/1CqRFzpcrYusVgKgbxkyErk6bqoEqx1GOfQJu4b1VWJVjBa7DCq74iIp3lvdSmNMlVic8Y1U+trK5QqpFCQhp6rdBl+mPJ8pQLOU59qwimVqaoKrXM5g5Y6IqYisSzf+5kGfyp0bh8CZCURVV6ur7W6/FwKYQ2MNUc5R401Q/HFaWSwVMDF0tQpEQkgDTmKam48y2adyp7DMxPIEfbyF3/8XBRtSilmszkjHaYfAjmPSQbyiqySKYDVAYUmpgSxjDrzeOM9g4VF5F1Sv4h3whiLdQ7rHM5WMsLNmUAsgdejjO6ZPyPj3tGb4ZRsaq7SaEPR41akVJGaiFEWncVMfL68YLPeEGKksjV2j1xVECNOK9q6pppNMM0EczylO4jcPJ0Qh57oBwEa1BLqbIymMjKhsNYSMqw3G4YQIMF2syEHIQ42dYNtp9RNgw89Z48fcn5xRhgsQ9ezK9pra8APntj3VNriqNFZZHQhyfWLGXw5hJowsNkEZm2DqyomdcOu35EAlXKBXOiiKY6lECtz9LFo07o8BEihVJLrgVG7dD311CX345kpgVZKxsmZTx3knr2fRtrcWNzXdY1rWrK2nF9e4H2QCWgKhNKxEXJcSQksHhKUpq5rmrZFKcV2s6UbPM7V2MqRlEzwEgJpCUn8jaaMyjMUBKz8PN5HhkECiU3x6YzTBjEUy+sIOWKUbDJd9HifIBnQMi1sZkcoN2XX9fTJoK14g0gigUs5SiZXjiWXjSKxQoAnvqduW4zR+CzErUrLxjDuBkYseQxeDu7GWHoZA5YpskZbXeiTgVR8ndF7tE/41GOV5ng253PP3UX5gZtHt2mdIvkdtc3MpxXzxQSfMju/Rk80SgXef+9n/E//7SW+u+TJw4/ZXF1xdHDA1772C7z40oLoH/Ol127x3Vdu8OH3fkTob7P84CGfvf086jjw9Te+xKPdlnt/+hHdquPi8oTnn3+Jx4+e8P577zOErSyg2bLd7Fhd3mO1esrjjy74zQc/5OnFBR9/fMXFxQoMcnBOgRgTu50X3xGxHNjLRFzJfTYsl4z+rKHv+fav/xb5w3e484XX+PrfXvLFX/wmpjJcnZ/xgz/8Ax58/BgfV6wGQ3Y9Kg9sznqe9Imz4YLGJm7fvUXfK5Zdz+Fzd/nCG6+zunzAj9/8Q7bbyMHRbVKSrm5KEk8okuyEUoKlVkoovarEYKjSxYzlOfddz+7Jjrj2+DMvk1ulicagk9yP/W7DjiCGeTJZW4yrSRm2u46PP7lPWzmcM9KMKPIYrxLZ6iIrGaln8vOIWkKk79baZ4Afav88P7uJ79UVWqOVlfi1OMJE5HySkymSHMGny2FHVAF7cFAWvPifR0ePABWK+XzfTEyjlNKRsy0DlVECnvb0y2dBKiOUKcbIZr1kOp1x4/iIs5wk1DolZvMZi8WM5XJFiPLMayc+slG6GcNQvqbaU0mvO7K5vC7xKEIkZynG8p6KHJlNp8wmLVZDUCJ/D1FyJDVJUORoBJog5Dkh7BX8dhLptFGqgBMiZPH/qUIZjWEgxEC37hn6jsPjE8J2YGIUuIre92y3OxZ6QlMdcXo4YXW15N7HP0bpJcpIJEFUjsXhMc9/5gWms5bt6imfvP9nTCaKW6c36frIthvYbiNDSPgEyUf8co0PnqhAWcPF/TP+r1//lzx8dMXx7Zv8+M17nJ+tGLqAUYEYOup5TY5I5mM7ES956rlabWVarTRHx8ccHx+TU+L+vXtstxsyAmnKqfiWyjqeFVhrsFYaw9vdjru37nIwX/D4yWN86Dk+PiTnSNdv2Wx30sA1jaz9PtJvdyzsifyb4Dk5vsGt525xemsOestl2HDrcy/xt/7Df8DbP3yP3/vj7/DlL36RF56/y9tv/oB7jx9hybTW0kxmrM7O+Z//u39CM50SlWYyhRw9oe/F8uAaJlYT+ytaXZH9mtXmAl85UVK0ltWwZeom0qS0GltZdn1PzBk3mfHql1/l6NYtvvcnb/PWW2+xHQZUguQDV2ePePTRu+QYsRiyFqCVsoqqspjKYLKoU3JMDEEKmd12TTURErGraqKxKGMwORGNwgdPJjBrHc4pgu9ROjBbtBwfzjmYzdltNuy2K5G55kBMmWra8NmXn2e37ti89wkhD/R+Rh0rtGnE7lBnJq6GqHBWUVmDamoc4l+k9zLRHyQ6ItQ1QWui1jLZUU6ESjFRVRVTZ3BDz2QyFcL3OI9RWZp41nLz9h2OT24UwrnFTaaoqpbGYYxE78lRvJIxp5JPqWVAEDNhN/B+Hzl/8BitJaJDV4bsZf1TSWExJJ0Fcpevp1tay35OHqWAowDi2m8WY9r/2iuYxlijLLJ95ez+PDdGE8jXuT4Haa3QWWAtkiWry6TKFJvTtcpCa/NME12mZyOMRJvrwm2cuWitMNbiKoezMtmjrF+MxeMICCl5w8bKa04jWGvc18s2pPTYRCwFX96vuvuvOf6LMe/XWLO3dcWS2UqS5psQTkVdJrbAUvzp633vL/v4uSjaYGxWliIqBCnKCp5YlYJJqUwqnq6YBNKQkkgSlb6+MXI5hIx2N6uhrjS1s9RtLZWs0Xs5Zc5FRlkq3bFzOr5vBqHBCYgjU1WGqrZMJpaqqUCBj4kYMpWp8TvP5VUPKWCtYjGZMWsn5JDoBiEbtW3F8WLO8ckx08UErz1xCtXccOPUsVl1bFYdk7bl6GBKO5ki+HkPJJqmQelMu3YM3pNiZDaTRWIo0ieiht4TosdWiuPjQ5w1bFZr7n3yCUprKiOd2bDdgUNwqFEmfNJZkVskG1kcjBYN/8OnTzk5OmYxm2GtYd13RSIo1yxlCTA3yPsxNhwiQI7FawZxnLKVidp+ypMLEjeBcvqZBzYwdsE/nXFW/COo4iEsU82spFCvalLKnF+eiYbZCf3I+0E8d0p8drr4vpTWuLqiaRqRzoTAZrsjpoytHGgoLpnrbooavTKSmaa0LpEDoqUehoD30nQwo9ZamSK5CBhr9t0agocgGP7oPSlkjHXYqsJUNdnWdEkxZEM2lj7mZ0hU4unMWbTZtvxcOaeS1RVRTUVjp6Rs6YwiGDAFPiJfRwy3Vsv1q6wT+E7fsXWOlJGuvcqokDBGvG4pZVzOEMC5mhh6DhYTuu0Gv12zaBse3PPE3vPcnbvc/cxnODw9oY+Bdz/4gCEG6jrRrS54/6drGhpqc4PZ0Zwvvv4Cn3vlLu9/+FO82nD84m3eeP0FLn/2Lu+//wnLx7/NL37zG5zcPuXqyT1e+uxNhqsNj++veO/9D/ny7Ku89OpXWXeBi/PH9Lse2yf++I9/zCsvv0wzSTw9u896M3B51XO1PQftyHmCVpZstrKRZVNEvGlvbtalc2ad29/POSbqpuXpcse3/807zH/2hDcfXfLaD9/ipVde5Kdv/4gffPeP8doyxEi2YB00tmGzvUDbhpsnCwgyEfW7HuMsx8cnDD7yh9/7I+599AmunuHqGbae7H288ksgOCkqkT2PEmYyxyenkBVXl1ciM4lRQDk57+V8o7bfD16mMU5IqpmK6WxONdFE1+KqmmZRsclbkQR6CNHjgymeAk0wAmrRWu8n+AolQeWlUxxj2Ev0nh2ySQNFmjgjsGrcpFPZiGXvlIOAQYjBOst0WEzxAi4xRmO0ud78tWzQqawZIqFRpQkxfm0p2rq+pw8eawSRLdL98rwoOSgABfetaNtmP1lXZPrdlm67wmo4mLXk6Fkvn1I7RdtMGOqKru/JSaZLo1eKDDlEeTZLBmbwoRyWEkSJWOn6niFIQW60UN7GJpqxjtl8zmQ2LfIiUMWbOjY1ZTnVBXVdlAMposrkM+UISe+9nPJ5ukBmMq6oP2JS7LoO4yqsq2hby+GNBZebJf1qTdUo6qZi8J5h2LHZPubs8mOJe7GGytV0HuazGUeLA45PFoTDlvXFQ3LqmcymdP2ygI8MxhmU15R5pnizySIx3Qx8+ze/zT/7zX/B7ckJWh+gTGI6b6isodt5ep+ZNjOU1uz6gaZRSOGbMK5Ca4Nzcm7wMRK8J4eAG72QJQMwhFR8bjXzacu0cuicaKqKzg88f+sGm6HDWMX8cM7VxVP6YaBqW46OjlieX7BargkxYHLm8sFTtK05Xhzz2VdfozlyfLj8gKfbp7x85zZ/41d+lY9/8h7/+l/8JnduHvDXf/lrrDcbHh4v+PGwJiTLfLZgPqm5dXJT1qNJw3MvPceD+5/w7rs/Zbdesr6U6XytDEobau24XJ7TDR314RRUQrlM122Z6QnGOep6SlaK9XqJ1pqXX/0iXYj881//Dbp+jVaaytVMplPWVyt26y1dDBidsG6Cm7RoZ9FOYj+EdC6UV+scLRXGB0I3oHxEBY+1XoqBGIhDTx46SFFiWLLBGsnNVCRyHOi6HTEMrJdrLq4uMapI+jRMTMPh0QxnM9O5Q1U19XQua14GdMI6RfaaMCQCA7V21M5RGUvyA2syk8mU2WQqKgJXY5xFWYnTSFkJUyAJRbut55ihxhiHNRUjjh6VyQaqpsZWE1w9pWoaMhqsw7pWlGEZcp3LOijqGGU12hp08JLfpjxtM2GENsUYcVajk1Ao5Ug7FlJjISPntjwCocr0TQ6DaS8FhUJrLM1yWVVLmyeNZzf5lEQmhUBIMhnMSEPOOiuQwEKqVrGEkhejnLW6FJCiEDDa7WuY/Xmv0C+tceixPNQFgKIyyiiUthL7Yq0UwMaQ1fXEL5ecOimmjFAltcXojLGhrIVyhsop7DkFUpxSpme6wEMEZCJU+yQZkOVnzamcectP/hf/qXj7sipxBJ9uIv75j5+boi2WiUkqUpXRDLyXLBY86hgsGFElANiAsaCNEBczQBbSszY4DdYkapdoKk9tkRwyNCEL8tTmiAmeSBmrlgwxlSF7yYVYzOYcHc4xU0sIPTpHDqeaycSx9T0Xmw2r9ZrzzUDspROhU0dNYm4rqtDR9x7lPVVVEfoN6/MnWD8wN4cczDwfrx/xaHdGeGdd8LCa9SX0m56qPsQng5pUNIuao9rRqoyra9qmwroyuq4arjZbHj16jOo3DKtz1uudQE6UwfuIVYaD+QHr9ZpIwjY1XTfQhYQxmcWkIZMFFauz3EyxED5TBgxX0ROvLqkyHB8tyN4wnA8oL9jpaAzbriNnCEPB05Yb3xqYzaa0TQXaotRIKdEoI9ltOaUSoaDoc9wHFgJkPc7epEssi0UJYNciNe1DxOiauppSz2b0/Y6r5QXKQFWbvQzLGkXWRsb1xUdny9h9HMl7P+y9KXI40yOB4lpqmDJ58OCF7meVJfkoPpBiWg+luzNCSSh66pGMl7NkC4qcIpFCJCRpYGRt8UV6WmvD0A9Y5ABlkJ8Frv2ee7R/+TeqyGwtGaKnWy25DJ6cRBqgjSapXKRSiRw8NmtaK36WTMBZDTrj00AgMzGZ261lqgzedOzClqw0VYyo1ND7zOSw5uBkwXJ1wdHikN22I/rMfHLEfH4oEqJGc/nkKf2wpQo1KTp2PTTTinW/ZXV5wdHhlAcXHVfff5/XXnuVO5+rSLHnl157nUcvfMxbf/ATHj2+4t7lGV/71i/y0598gHdTjme32Cwqkm7x2vLia2/gDk54550/49H9+6TdjovVJW+9/ccczid0vmfXBxKZpj0oxbUrm8oM71bMD+dlY7+WMYwy3Wc/dC000Wo2JaTMVVK89dYn/PS9c+r2D4m9Zxgyur6FNZFJVkTt2eVAih0xdOROchiDNvgMTVNxupjx9PFT3n33PiFXhCFhNmtOpi3oJA0sVcozpVEmQxSPgFU1UQdeevllur7n6uoCozMqelw2DGkgaahdofamWGAfMh1PQMiKup1hjQNb4VyFqqVpFlEiF7OaXIAC+2DtVOQ4SaPzKFNEvKYAKuGj0BSf/RgnbSmPBFkpRmOU0Pd9ry0otl42bnmmerTZyucohcm6IKulc5uKvG38AkpZib0ICaetQDaQA4W8bvbyeltVgu/uetIgxEAfx+JGurdVLeCKlIX2FsKAiopuk6kbR+sSYeg4f3KfxdEJ89kcVKbvOoIfZFofA2E3iDQqZ2yGxlYoJZ6OnNLezx0wkslonJRZqWCldUVIjqgqjK1AabR1SDaRHC4HnzGuxYdEthOysqScGLysPyMB1BiBIlW1YYxtyMaQtciqqqoi58xyPVBNFqh6Quqv2KwucIDttYShx8DZ08fc7zuGYUdbH1BpxcF8Shg8vtIYv+Xq8T3isCTniDUtTy+W7H76EaHr2SzXgEZVNVplJtOJ0CAzLGYLbt68SVs3rPsd/eBZrR7Rh6f4sCblNf0AzaxFqQWHt19msz6j26ywfqAymZPTE4ytiwR94N79h/i+Q2c4XszJfsBowy707AqgqVaaRWM5mTZUdcWT80tptDaWz33jKxzeOOXt3/suD548wa9XNNMp09MT1ustQwdGOSKRmkRIK/RccXT7GGemvPunP+SpWfPil1/nv/y1X+PR9z7i//j175DuHvCP/v5/xsX9h/zL/+d3OLhxwrf+6rc4Pb7NzaMbzJvA7aNDLh485uOHj3j//Q+49+ABZ0+XxMGDmlBNRKqqTGY7eDb9jqQcjXW0UwNpxWXYEFKgnR5zcnqH9WaNjx0vfuYuy9UVP/nxT9ksH3O0sLji41feotKMlNaktCHmgWgzlbMoHyWSIwoAR2uBo1HOPrWt8U2FDwN5dYVNgVnbEgrd1KJJMRKyxu8G+hSKpcLgh4Fl2EBWbLdbQtJy8KgsOUTWy4E/efNtrIYUNC985kXUZMJy41Gqx+mGoa+Iuw2hi/RxYBVXxRZgGcIAjUE3DjdrWUxaclUxaVqcLo3YlDHliK2zQSUtZwtlsFbyK1MM143rpNEUMJ5xqEKMNhQiblGKoTRDKGcfJWLypDUmS7SLq2coLYVKyqE0b2MZTiRCVJJpiYIcmTjL4LI0e7UDbVAp4lwEFdC6keIoyZlpVCsI8G+ciBkCaq8UUjlLTnGSkxoxEuMg3mYllp40SiGLBSUWFZqzFWRTgH5eJlJKUcDWWFcGLlWFrSrZi6Osg0YZQkwELTTMQVmS1iiVGJRhSAmTICtLRrz5figNV6tLZIucU0MYm2eU+AaND5EUIsbK+pef4QiQi1oqydQop0zMIp9OyhTSbMZYmRCqLFPFvIfrUew1/3+YtOW8l3zsk9BFOCr62yLPUEn8GDkXKp8ZR6TykMQQpAuoNAaH0Y6msrSVpqmMhMTqccRZBHclVDpbK7pSo8l6JDImjLXEDOsuMJxdoVdCejIxsLnoqRqHJzEA3itysOicsRayjoS+Jwwbqdiz1IPgqZsJtYN+t2J9Cckb9POWtmk4nR2KqVwLwP3O3dssDm9yvtyLytkNAAAgAElEQVRxuesJOhOXF3RDx9Rk8ZnMJgxOs9bycB9Nb1BdNeweRGLfsw1DmRwqJtMJbdPw0ccf0XU76cQ6hwYm0xknp0dorbnarlgPvUxpyGSdiUYOMFXUDF3Hmbmimjjmdc3GWu49eUAOMGhF50W2KbFqWiR2Wl13dLKABMb34zoGAJmAKhkvS+7JXixVpIzXnrucxU+gtSGkCEbTNC0ZR9tMWK3W9P0W5yx1XaAKKeJJpRiUyRdFeiSabYtSir7vP3Vfyu2aC5hC7t0UE6n4/qy20kHKcnjVRpXpbypG3SwIdaVEOiG9YSnuxslBKRpjkFF6Sko6RlVTpCLtdbe8PBs6P9MJ2g/qxw91Pe0v0rC+3zH0O0GkaymUjRIdtypTB8H6lgB0bfBegmdzFv9lbTWt0zTOEHNPiiLhSsUjp61m0koYcIoaq7Yk/wRnMtUUaqtxakG/Vtz/4D3On26YVifM5wtObtyh9x3rq3MmBws++8orDEPHo4f3uVpteeejn/LNX/krHC5uslwPnG9XOGu4erpm9b1/y7YL3P/oIYc3PsvnX32d1954g8XREevtjtnhAYvDQ9758Y+5/8H76DCQh45d14FxVLXdF2Ajxv2602gEWpMRrO8zH6Nk78//HWSGmDCA7yPbfgUsi3/gOoRbVoZIyF6aVFBw5BpVGXwItM4ydDs++uADVustIYCyEs0xeC/3bx4Do+VNDzGik8JYRzNpCTGwXC0ZfC/pG7lMYbPaw4K0Ko2JMlFMozwuSn6itrJhKlPhqorsSsyBcyLJKbAh9lNwgXmEHKm0KaZ66SruTeZG4EJ7MtgzssU9iIRcCIZ6/3dqjAZAruVI/fW+Jw/PfP0oDRGlCqWyTKMY/3sBWuksEBlT6F+yNAmNuHQTQSmiUjjYf31bgEmqAD7EryxGfgXo+aw0hBRtW+OcZdf1PDp7Sgiew8MFt+7c5fHjx6yXS4bdToA2JfA6p8zB4SHtfEYzaQgp7c3txfQs13w8OGk5xBhrZPKake59uW/LDou1DlfXctBzhqb3KGXKunENjcqqWBWsFZKikuBfXZpeMUrR1vso0CxbYZ3FB78HuJAUh4sDjLVcXF7Q973cB0kzXyy4eXpC9APnyw0XqyXb7Yb57gBrDV03sOsC280lJpdkqOiJXcJNZ3RDD8BivuDu3bsYY/jwww/xIbCYz5nP56Tljq7b4fuOylkMU0xlSaanT55d6Om6NSp76hZ0ZfA60m23uJhpbM3s8JDDoxm75RW7q0sGk7DOcTAowq6DYeDq/Cmq3nDV9UxunPL6v/d15rbm9DMv8d4P3ubi6gxTVZye3uRqs2b5VF4TGWptsE2FqhY4O6fWhvPH79Etz/nlX/0b/L3/9L/ivT96k3/xf36Hp3nJr/7KL6Fbx2/81r/k/uMH3L94xBu/8A1OTo/5yZ99n/d/8jPWZxf0qwtW/YbLKE1uozXJBw5mC2aTCYezBRbFTBtOb98ma1icHOAauNUdCUAnW2JqyFnR9Z6sFY+ePOHJg0u6qzVH04ZFJXtAVhAGz9APNO2UumoY+it23tN3QiKuK6EkppRRBqyzhCRnAWMspqqIscJ3O7rthtZo/GbDbrWiriX2ggLv6fsBY8fIEEfOyFrUD2Qv9FeZ4Igl5OzpU2ZNQ1VbttuO6BNDgqpy5BQZ+sBmvaHfrctkeygqHEXKkbqp2Wy3hJRYHCyYHh2Klws5s5I02mawI8Bj9MgaaQbFIJEe1gjcpthCMtLgFchXjda2TL+FCi22EYPKiZQ8aLEPGWNJOhf1jio8AvEb6rI2pRyxxqEsCL6/UKGjNM5DSKBkvTelCIsR0GKXGEEm1hlQRsTYY/65AikOy7rMM3ElKpdCNu3PNyRbiLTFo1eKtxASUQ3EIHuGLgo7qWFLwaZFyhjHSg6Kt0xInbaCqhYp7Xjs0cbinEWZXPD7FVXlcM4VWX4ZKli7h61I3RFJMTMMkVCgWTHKXiSRTc/s+Xm05ZS9MxfgSZLC07oK64ZC5BwhMnJ9R0XhGOP1l338XBRtmWsq13g4lt9hfEPGP6exYNsTI4WoQx7pLnLMsGrAmsykNtS1xjkjh4L9DRUx5aCCtmTlJPNN6xLMnUBnGbFm8DELDaiTjbKxLb63+JhJWkkXJ0Pl4OjwgMOjGb1ec372mO35ktbV1FUj2TTGcHB4RFs3ZB+5+9wdDg4n+OcspnK88drnOJg4juYVTQ112+BT5sZpg9fHGDtjYjRV9KRuI5K13Zonj89Z+gFfgAhh19E6w62TBat1z+AjIUgXXbcVx0cLzp4MGGdFnrOVjuQQEovFlLkzaN/RhYHgI6EL7HrPECIHPfQhswo7Hp095u7BCceTGQ8qy2q3ZkhRKE9qTIYX87/VGmOuJXwg5vZE8YUkkdjpIisYnY0wHo5EvlPedaGYl8LOOCH4pKSYNFOca3j65IJ+6Khrx3w2QetMP3Slc6vRyu0PJiFJweacZIqE4BmGktc2+mVK0cN4b46j9kKUsqNZdu850YU+F/ahu/n6BZXfykGvRFIokAUWWaTRDm1rqnbGbHbIdLrYS7ykaIz76zM+Mc+WbNdNkNL1UmC0IcYgweHlkKetBKDrMsE2FPiNLlPaMPr/AgpHZQ1t45hONMMukAfZiBMlmDYLGv9iHWnshKHb0HHJjdMZk2nNVb9h+9HHZDPh8dmOq8ueMN3i2gmT2tFMZkzmB8wmDfcfPqLbrFEpst485Gw95dHy93n08CmPHz6h7zpu371FJrN8/JSTO59hdtLw+ld+ma/+0jd442tf5unFFf/qd/4NMXnu3r6B5TW65TmP7n2MRcJIcx6v12hejs8UcKUoTrE0Fp4ROajRZD2Ch0oDMIhURJcNgZz3a5BMW6+DTWO6DiuVbmbpfiJf1zlH8APvv/cu/eCJQTxaMcr7kmNEKVh/c0Wcxf17Lv7JipQz1hpSTjy1D7n6yiXb9Y7kPaQkDZk7mfjVTPdfeMYcqhRHuTF4m8gTTdcErKv2lN3cAob9RjdugHt/l5EDgzQVSnRHLhPKnIpcXZfuqtrfwM96xMb//+zvrmRkqbEZJ4a2AvlxIoEOPcEnNOOhTZOQ+JOqqq4LsXjdJVblGX8WUDXKHvf/PmdswauPTcYRciJ+JpkgWC2HhflsQls3kompRHZUNxOmi0PuP3zE/fv3ufPc8xwfH2G0YuushLCmRL+TGJJ5W4sSQSm6vhdUeAEBaFWIk3kk8ErGnrWmNG1kj9RFzit/lfY+kJTS3ksnl3fMyhN6XM4eVZDfOY3WY3nNk0nLcrnk4OCYuq4/1eQACh5bM5/POTw8JITAlRJplbOW9WYNcWC3WTH0HTuf8Fmmfcv1GpDDXfASMxDLMzhpZ2Rn6WKkdhWvfeELdLsdH334Ed4PrNdrjLE4YwiDl+u46WltTW0VOjli2DKEM/rQk1RAK8swKGKlOHItd49v8vTsDJUT00nDjaMDFk3FKmdqrdHDhsYobj93TJM1u+2Oi/WGTQg0hwLn+MzpbX7jn/1z/tq3fplbd27x6N7HzG4csV5vuLr/CA0EK/vhrGkws5ZtskzMjH69JarAl7/5q/zHf+8/5/vf+df8+Pf/Let0j89/6fM8d3CTf/w//g9Qab7x177FvXv3GPrMm2++yb/53W8TdxZCT1PBctezSj0xZeazGa+++gqnR8ci+9OWOJR1BFnPuq5jtdkSYof3JXswBQa/pveBrgtcXp7TX+2YKU0bE3G1xYcINuOLgK6qNHVradpD8nJFv9uhyUyaWibrpSkQc4YShG2cEJ1TiqS6Zuh2nD09x3cdQy82E/GOiUpEGjVepIRlD34230uX5oPRWsLWc2Q6meAax9XVkmAczXwue3iWSfbgvTQqKofJJfKDhC5USGfFf3h/tWLe72inU5yr0MqWBvAY7hxlamYhE2XaXJY6kG6+doasEmH0o6q9AaMcCgoRPY0NGTn/5gLJSGVCJD5bxeADw2pHFxOu0YVaaEUmqcFYIe7qolKieKxkfRDJao5JLDLaFmp3JBT5uULyy7JinEaQQ2L0wI5+ubH5o7Uusm1LMoaUvTS7tCkWklzOUewbdGPjca9kUfuLVtaVZ4Y8URFJoi5BYb3IZWUPuVZYZFXiZ8p6KU0oe72R509/v5waUvR7gnAue24sa9zogdZKvHw5FYuPNrKv5rGwlD3JaCuFtxoPFXIuKwiF/V77l338fBRtObPdbsubdV2kKTWmj5cxSzZCKytkwHFEG7wXmdjYMSXjTKRtEnWtJRNDF6+HtPUQAmTxVuWagC0mOPmeunQK8AHK5gIiuRSfQiDHgbqtaCYNs4MFi4MFbVUxsTV1Y1nHCj9s6JdrJm3D4dExQ/Di/4o9Q59pq5qDo5bD0wntzDKvKm4dt0wrxcR4bJKMmJAyZENtIjYLTMLUFuU0g07Mp3MmsxaHxipDZWoGk1kPHQ8ePOLRo6fs+oFdH+j6HrAcH865ujxnu9swmR7iqpoUI5tdJ1leTqOswZmaqlakOsC2Y321IhtDbStSHlhuNxy7lsPTE168+xzvqnvk9YbaCUZ5lDcZNYbeyug7p7wPQcxco7+fxZ6Oh8D87INU1JQ55QItkMIwBJG3tu0MsuLy4hLvd8ymLbPZBGMQLH2UQsUYDdIrl45IohjCr/XgFJ9ccUzuz+m50CrHjUHlkUpqxK9TXoMc3IWUN76W4IPo0cf7VZXWjFb7SYJM2ZLIGEwp2qoJVTND24qsrj198nOpcb35d54tctpnPo399RQEAJNzIvoifysLtyxL7KVh1kqzYxh6+r4XKYKWYO3KZVT2xH4LvpNmQQ5olWhqy/HRAc3iCKUUoa94vLpkHQZmM5F6TCaGxeGcbBb4NFCccfgYcLXFOcdmu6GtC6kMsMZycHjKh/cf8ODBOV0PKU8439U455g2M1585Ut87a98i69//ZeYHs7BaTa7LacnBwzdml4Fnr97k+Urn+Xq/BG71bocRiWXa58TNj73RZefcyqytPypDtuzSGL5vOvCOZUJe/KhBJhK4yn7fL3/jBuVkgJZa4WzUNkSfK8yvQ8MfQbv2XVDuVclb2lsCMx/MMesRSYTg5Ah27olZlW6tUHAQzpQ54phaSXUPRUjdwWsgAfS9Ru7reOz55yjSQ21r7FWpDrGaNRSc+MnN/6da7I/LGmhx46NsFJ67QtDWXnHz7uGAY2///nrO14zrQQgNJrXjbWy+ZXnAaWpK6F2SSMm7NcTbV251z/90Oy9zuUQlGHvWyNnOZggB0HvPRnJRhzfdaUCehg+NXl1znFxcU5TVzhjyoHJ4OoaYysWiwVDuOSDDz7g8OiI/5e1Nwu2Nr3uu37P9E57n/mbelZ3S91qtVqyHE8pxw5yBnCCjQME4iRAFXGGIhRQxRW5oipccBGS4ipVpjDEJE4qlRCwwcHEhGKQo9iWLNmWpR7V6umbz7SHd3gmLtbz7vN1e6Rgq7p0vjPtffZ+93rWWv+JJM9LPwxyn2QopicxRpTRaKOp6oa6rq+e98xOH5RCkJy+mHZaW0o9mANdxRU0iG4WLaZTfpIygGyutZbndF40hHQVJAuCiBwcttz+4A7DMNAt92Vo5UrwPw+1dS35hSllurZl6AdBI13FYm8PozKN1VTZElXFdhCdpKscKMWQtpKZmTIawxQTUxholgue+/jHuXHjBl/58q+w3W5om5bDgwM26w1TP5BtgOjFMVc5VIigFUO4YH37LsEvMWj0aHn6mZf49Hd/hu/5zu/i/P59vvALX+DOg7ucnt5jfXGO8xN+u0IZyf1cLPcI1xwff+UVXvr4C3zj9df55S99haA0Lz/7Ce68e4eL9QXfuv8BTz39FO+98SbZ1dy/cwcbxYgiKcXefsfCVpyFiXVMLKfEweI6L3z7i1A9zt/9qZ/indd/HucDz7/0MZ775HN84ee+RHej4k//2T9NHD1/76f+W95//1U+99nP0Lgj7j64x+FhRVQK0x7Q5InlcsmLL3yCk4Mj1heX3P7gHpv1ijAFuW6CyEIUGeMUKXk2fU9MhrrZJyEsmGHs0SpiQ0AlGfi0DVRa44MHPdE2C8YYCHGkrS0H+3tcXqyIwYvT9jiJLstpjK2oug5tLT5lYpSYppAyIazFubJceTNDwTmHNYqQjThdJ8mMFc263xkIzf1i5RxohR+CsBCiFjdnV0mNiBFNxFWWbrkgRTGUCgHcI87YxsiCZppGvPdcXJwzDH1B8vVOJ5t3TbsMD9LSz0hOLPRjYRn4KGeEtiLTiTkCphiTiCYrpoiZM4mzKrILkZCIuZyDrPBTpJ/W+Ms1psosu45aV0y9J6mJprUsm0rQPqNLnRcTFaNUkUwIFVAZOZOy0iidxfhMK4iJEIozY8rFTKQMgikW0xLpZWTpZa6QqrlZUaosjudNnWHWUxtrwM5oltqdkx8d7HTRtKUgJlfZQErmQ8O6YkbBUqHkC3o5eS9Dl57dej/cb8r55XB1JSHhWhFDKPrvyOgniAnnLE4pcgwok1C2ocmRrBLWCS2d3WOQs39mRuYylM/L4t/p9rsObUqpp4CfBG6WZ/XHc87/pVLqPwX+PHC/fOtfyTn/bPmZ/wT4c4gG7z/IOf/c73Y/fgyPNDymmDTMrjVFrIjoj9DmqnnOsvWgQKzGWCpr6JqGRVujqpqkK3w2hFQ2jDlhcsDgsXPKRRoeceORIUMpIEYZkJAwb49kunRdS9fW7B8uWBwuqbqKuk6ouCFMl0wbxTaXzUYShl/bdDRakTdrfEx0TUvrakKKjNkzxQnjHFXVSNhlVoSsJSBRVUTlqFOkzWeMKnLpFJPLrIYNF+fnbM7XHLiGJ45vcuP4hGmaOD8/o1+dcbTfcqM6YrMZOD2/5P7ZGTlm9rqaaRpQRlPXLX694ez8gu2wxS0a0aYYyTlK3jP6HmOht9BGTTdZRpdYTQPucsVjJ8dc5omze4rYx0J5EmRtl0mWVXGKEdGoRu8QNaXVjpo4v3k+mhBvjS0IWyCn+Q0rm/q6ackoLs4vCcFztL9H01QoLZTAGDyVNbvNstAPi6RWiStaLDQz0lVQutDDCoqi1K4RiYXjPDc03nsedSGVa1QKgKtaYvCM40DKmWre/ORiXoAgBmGKDJNnjImkLK5qafeOWOwfUTdLRBwbH9mQl22ctEnl491kW5CSggSUERSVHsnNuxpMkkEyYcomzzhNVZfQcT/hvdBFnRJdgqsUSnl0HKgIsjFyYgm8aB3Xl46TA41zNffv9iRv8EPm/r0Ljo+PeOqFA44XHadmYnFjSVKGnCaij1ycrri8uBCheVNzcnzEctnx4N49hrFn8IGtz7R7tzD2gHrvGq985jO8+OKz/MHv+y5uXj/E6qJvBW5eO+Y7P/cKry4d33zrLaY+8tSTN3n/neu8ubogRVDF2VDbcv0hDfs8FKWUiT7I0G6uaAzzVm4OTL9yMJVCHFQRfavZQl42fvPmTuYBVZBpQeByzLI5TGWQLssNjSIMPQp5X0bSDrFz92qWd5eCmmhNVVVUxtGPHqXk+hwGCcRu6wr9AEKYSmipJqwDm6cHrn352o4iMtNFnLuiksyfU8o88rHCuCsR9qODlph+KCpXfaiRn9kS5U0mVMSPbBrnc+FRxJjyuiQljZSzV4/DWNEnmLnRmOtIObB3j6ksXeZFyXwfWjmhQ5b3tdjcx93QFsvgZ7UhG4NPRbObZSAVh8cEiWLsk/HZgxYzkz5nTLmWlFKYymHrCq0V0zTw7rvvsFzu09QtytiCKsh1IDlukpNmnePo+Ji2bWi6docCz4i/vPfF1dWUpkioSLogZLIYnZvQqnKEhNA5C7qo9BWlLKqEBMhalLbEXLbSWtM2Dc45Hj58iKtboajCbkierw9bCeoYY9pdT4uuIzjLcm+fadzSdfu03T5JVYKAhEBdV5yfn+GNxXvPTpenhF72seef4eBwjy9+8RcIk8daQz9sqLSlMhqVoiwLNGAl32rbJ2zqSW6kqhN+umS5uMHnv+9f5gf/yI9w+NSCL3zly3z1N34du1zwvc/+fsb1Ja+/8Q1GvyWOW1TfU2tL1XQ8vHPKP3jrp1leO+LbXvwUL33m07CNhIsN/+cXf4GLi0veeeMtXvyDT/PCp17m62+8ik2moO8BnRRVvcBHzWaI+GRZVs/wx77vz/KZb3uev/N3/xZv//L/jdFb6oOPc+vjn+drp6dsjyM/9mP/Dre/9S7/+H/8Ob76i7/Esk78vpde4aVnX2F9+n9II24b+mnN8cEBL7zwcRZNy7tvvcWDu/eYxhFnLX7yjMUsjZTZ39tnrzuUfLbtiNYGVzmOjm9QNY30XCoTH9ylGVdUymNNAmeJVYc3C0ZdsR5HLlZnbPoNlXE0dUXUmmmYiCHS7S3ldzc1phgi5aJlmrwnhoCxFXXb4fsN0chrPCNf8t6vcJVoOWeTCe8nKueK+7fGVY66a0k5c/HwIUM/MPpJHA+1KaHYRQeuZxRJDC0k9iIT/IRWULum5I1NOCv+CPN1/uH6NwNEGW1yQaiRjMdinBGJBJ9lgClsCB8C2gJS4SELbd1aW2iYMjTE4HeGZ/N9Gm1o2xZtl4wJprhhHD0JmDYjMYmGctm1eO/ROGaDE4mPGdA2gkpUTcJoKwsxJzRLo4TOpwQkxRc2hkKGPZ2zGMZEQcRnWYUpdSGXxWhG9F9xdqsrXYxENskSc2ZfzF8VzwuD5B3MaL4Y0O0WXDMT65EzY+6BtNaYXJ5PfZWrObvXP7p4nOmcsfQCOoQiZSkDGEXGoxQJYfPpMvDWC1cWbRHloK4bjHG7RZhoG3ar3YKAXuVz/na33wvSFoD/OOf8ZaXUHvAlpdQ/KV/7Gznnv/boNyulPgX8KeBl4HHg55VSL2RJovsdb5LVM1MyPkxJkm8AZXRpckFJiBUK4S5TNipN29C1FXVdkZRhyhowhTc9b3FlvE1Irg/GoCgUnfIiKiBrJVqMJPkTbWM5Ojrg+vExXdtQNRWq1kxpYjsMmBhxKZOwJNthTIM2FTFACBlbWyrXkuKIyho/Bl79xreI39SccsHi+SUxH2J0hBwglaHAODIVY1xDWgtymBUqQJsq3OIaS7OAlLkME5sHdxinAe89i67j+PiEumrY9iP7ewueeeZJTF1z//SUr/3Gq9w+3dB1DWMMbMaRlDNN02FaR71oyTGwurjgYG+fPW0YUkKvBqo40bYdIXkux4E2JB4/uY72kbN4Tg4yQKhiiDG/jqkMKhp5AyhF2SYhb4gZOUKKkQ+BnARldEoc6Ky1su3ImbZu0cUGd7VaEVNif3+PuqlAZYahZ/J94ZXPzpGecfRSrOtGnP/IjOPEWHL09vf3Wa1WzBlGYoCQZ8BKhr6iO0kxSjFRc6aUIFpqRgCMYXa4TDGCczutjJ0XE+VxTSGStUObGlO1NN0e7WIPYypZLkTJ7kuZQosr7kmlSdx5zWaQDV/ewXAZcYf03kMWClXKGZ01UV8ZRKhCWQuFGjC/J4yWQ6GqHVXjqFymImBz4eg7i46KWoObBvbdRD9coqYN1w86ghd73fVmxf0P3mPcPGBzfpfDkxNwDSlNqGhxWrFoKobtls3qguAH7t2VItxeu0a9OMLUif2jGzz73Cs88+yLfP7z38+zz5xQVwatAzn3JUtNxMhP3ThBx2fYnt3lDE882ufzf/D76LcbPnjvg7L1u9KwzTdjrvLYRP+kdoMc8KHFwocHNqH6WmcQErDUMx/87uMPHyxFW0bC5yRuVkZTNzIsZaWYJi90uCwGGyoV1Gfy1E1NUkV3NFO7/SSPJ0VyCrK46FqODg8Ztluq5lAGDmPoDwZyp8Xe/JFt42zKM/83U5XnQ3T+Wz6qCQV5bLnkCuw0ggXFzYWisttuZgo99MOUyB3y+aHXpyBf+QrxBiBmORTl3nf1XJZ6s+OfDH3OiTuZ0o841MYs2YZVed2TDGqU53scBkExlWZMQZBQpBboOTeoLEp8TJLHI9kzxJwxSpZMKQa899gk2VTGOY6ODlmtNwz9gDVOELhpot9uZSFUhufCrUEpGZa0kufJKMFHjBKLclRxgLVCmySLA+9ug4zQz2bG5w5xZ76GkablkWtdG2ncZ6dPQR0sx8fH3Llzn7OzM5xzDOOIHY24Z5ZFl7GCulamoqrE2XO73aK1oh9GubaMMGmM1nSd6Heds0zTyHq9Jmd2MSx+mtg/WLK/t8/rr7/OarXCakNTV1TOEYZRNKHlOhpSINcSuq5pmWIv+WibwNFyjz/zr/4p/vQP/xi//vVv8p//tb/BanuBUZprJyd8y79PDCNTCJwsD0l1w/3xNrGu8M7ifeaJveucXa752muvcm15zLOf+AS/9s6bbNdr9rJldece7773DtkqxlVPa2vGJjAOE412xJTZ+ICzHdf2j/iXfuj7ePl7Psl/85M/wS9/9X9j8isat8cnn34BN2bMw2/yH/6lfwMbR/7xz/wjfu1XvkqjGjqjeOeN13nlpRd55+2vcrGOZAzOwPWjYxa25u4773P//Q8Ik2dRNyyWS4ZhoDcG7z39MNCPPWpt2PYjVb3g1mOP87GPPUdGc3G5Zhwn+vWGNA6w3TCmCXIgaUWsBka7JroWrKVrW6zRDKt1qTeSQai1LQ202dVZH0ZZXPqEyYraWrleUiaOI8bOeW2FMaK4okXGREojYRwBqYWVq0qGlyyQ+r6XuhgzBBlCqlknm8TEK+XIMA6kOJVBQRNykFiOOc5HC9odUiRbiyApudCBr5ZZKceyWLNXtU3NSBHymHPcIUfyOAIpKLQuA46SsyFn0UfnLEtAWbAUlhIyIMUUaduWG7f20XVgDFtSDOQxM256Jr8mxB6rRC/rs5f4npwZvWcYB3L2aJPww4acs+gDrZX/jMU4J0OYEj6KRqGNwxmNK7TNMfhCobcf8qvIApeTLEoAACAASURBVLeV80Gey9mEkII87ep6EGMnp3RpDaU/lFoufX1MJUcRrph4cyXbPUflLCq5uTlLwPis9zOPnBMzLVPMEQsXQ4lsKgZVXjupjyFCFE0VKgdMFjdTEc7J8OmyEUp5LH1hSoWCn3ePde4v/j/ntOWcbwO3y8crpdTXgSd+hx/5V4C/l3MegW8qpd4Avgv4Z7/T/cx5CnPTq8uBMaep50yxTxXHqhmGRWXRdliDtY7l/h6PP3aL/f2a4LdcrNYM/YCxNba2pDSJ4ULOOG1wtoZmAVUn0HSI8iKWx5WiOJu1TcOybTnoNFYJuqeVIiZNHDNJS66GjwNp8mijSEqKUkqG7Xbi4mJNVdcMQQoiPtG4mnFMuPqQg8MF1ImRCp8HQo40tUZVpSlSCZ80MTc7rR0hYXLCWkW3t0Qh4tQQPNpanPfyRg6eIUZyiNRWRL+mUoLAXT/i3sWKunUcdjc5VZqYIicn17j++E2Wh3v02y2nD+4RfSQrw8N+wzqd0a8GON+QW8sme9KDBzz/9NO0jz/FMHrCqhckq5C4rauKyBURX5YtSYqhFCG1Q7Xm26MNWc7g44Q1Tq4HpTHOUC+XDMPIg7NT6rrm+Oh414CEGIQLDqAyPoRCf0w4V1HVDa6qQZchLEWWywVPP/00cyzAdhtL8yf7lVyEqFaL/bsqb3gR6QpSJbC7oi5hwxRLcWcdMQpf3jkp7MM4SYxASgyThASbqsK1e7TLA6p2QdaOSUI9dlSyXBqxeSibA3DzTEmaN5aCyZccEgk3F7rajD4AORKmSNIKQyIGjfdj4Vsb2RQWhBKjSgaMkgWD36ImoXIZZwlT4KDb4+VnP8Znv/Ml7p09YLMZmSK8/e4HbIaJ1d2J1957jepeCUHtJzpd0W+3gpR3C1KYWK0uqKqacfL0Q8+P/uifwe3f5KtvvI+p7/PKK5/lB/7wH+VjH3uWylV0lRRBgyHlqqAicgi+9earvP32G7Q64Su4+dzTfObbvp3XXnuVN994izgWp9IyEzw6fAH4IDSYjzDqPnStzggVBUVzztFwpfMptfQ3/e6ZQnPFrZzpIkKpTTGJWUT5X/Dj7vtmPdOsV9otDorWEjRNXeOHnn695qlbN7l+fMTF6UOqupJsKWMIlVAqm7ra0QavMnMoCI1cB2Vs2A1Sjw6r89949b4V1DfGWCiM5uopfIQZIwOx+dDvmJugqyH4aoATgx9xCvM+SAOUJfjdaFMoP5ShQxpD5+yu2TBmpkDm3eCmlBGq8yMbWFUGz9kEyVm7q1GzKVBOYvoiO0DZhmcyFEOTWPaWOYtLqzx3umTVJWqjca6i61pSHNhutqQoOW6hrsVuPoh7mzG26Nfkb8uKkkFqcUZLOPA4SqPhR3IOaI1Qw0gymM6LMK0xShGjRxmLRjQvEmBLCUmXyAahfyucsxSOkqC2bUPXLQHDw7MLnKt2r1dVVQzDgC40X210eX93QrcMgf39PdFcGouPieHyArCEEGnqhpwrmqYuWkHDYrksNt6KGzdvcP/uXe7fuYvRijBNrKcRjWKvW9C1HX6a6KeBZCymsfh1xOEwbiBnQ2OP+ct/6S/yoz/05/inP/3P+Yf/w8+wTmc8eHgXVZDtZq9js76kVpn2xnUuTrdshpHVGHjs8WO2MXN57xSlofeZw+c/yTZn3nr7mxy2CzZnlwzrgbv3bvP8i59mf3nIw+1IdA5nOpzWJOUwXccTB0/yJ3/oT/Lp7/x2/vY/+gl+6Vf/KdNwIYRFm3j7zq/yzr1fpVu2vP/6Bb/4pV/gjV/7OkuraKzlsD3g9O4dqhef5eVPvsyXfuVVpjBRW4vL8OCD2zy4fQcTMst2Qe0qso801u000tpaLrdb+vMV+3uHfOrlVzg8OuDhg/vce3Cfy4u1LKLR2GmD7TdUWmEUDNPANvX0SjEqjXY1TdPS1DWL5ZJxGBiHgZs3b6KM5e69+4whcuAcprLEENAKYQOgsUpDiISCsNZa4SpDUpKLF7PoUGfjHRMlc9A5Tdt0VJX8XZvNmsvNCpRisViQJs/oQ7kOowQrK8klnQ2ZQtH1CiKadqwDXVAjQdhA2arQ/9RuqMhpRreLFCEGQmFQGGOENq4MVpuC/AemcUQVHaZIVJPU3Sy6zlRy0oSxoXCVK0OCQce5vkkPUjc1e0f7YA9xWqOj0PJDGJj8BmUy1lWEmDm8fox2DZv1wDQtCH5L8FvwG1IKxCky9an0O3bnsq6Npm5qqrrFJ82UJGZIIlImzDQV7dlcK6VuKaWhUCglLkikSMaaHcWdR9gaeT4ay8I8ltBsVSRTqZx3WWVUVMRkdrPDoyCQoGeS9ZwLImfVVWTYFQW/MA2YyUgzeicfy2SgpGbOLKwcZameIfpIJIBKZOVk2Y+ghKbEa6jSB4jx3tXA/zvd/l9p2pRSHwM+B/xz4HuBf18p9W8Dv4ygcWfIQPfFR37sPX6LIU8p9ReAvwDAdahstduyiLbpikJHUiVQWZ4UXQTehXhPXVXiGOkcbbegWeyxPOioqgNOyEwhiigzBCrnqJyV0Gpr6LoFsTpkUi3jJMGfs35Bl+1KXTmapqFxhjasmFYXbNdbgs/yAjqB7buuIY0jqe/L8AHLpePo6Ijt2QXbvhekT1GcyaT5WXQtVV3xcDonhMDp5j6r8zsc72tOmpamLpvtrEmTJgVH1k54xq2TbUnw6BioVcYRSX5kSJGxloDryliMApzBWlWMWxO105wc7+Gcxk8DJ0d7+L0F3ifGYWSz2RJyYrW6ZHMpLko+ZlbbLf3kyZWljQrbtGyz52K7JZ5v2TtZcnhwRIiKddwQovDMc+Epzy5AqWyZZvhZDvpH0NVy2xkSKIrbYSZlj2sqbNOw6nsuV5coa2i6Fu0MMYprWYxetlPWoK1sgGJKhCTZQikLpz0hbo3z7eLigs1mzXa7/dDjmU1QUi4UqVIwZjdBVb6WUiqORa5QJ0ULYqwturWJOefLx4hKijDbmGvJZ6nbJU23h6s7uR9yCcuWyqVULkh92dooYczPpg8z4iaOUwqxgpRcFWuvTALKhLHbVknx8ORE2VRaWq0Zp4mhn7Cmomk7rDNkPNN2DSWPahgmbFDYBHk78NrXvs7p9hycwrQVL3770/STp//KhtubBwzRUNV7rCbFFDfUzpIwTAmydiwOjlks91ke7FG3Le89PCeeG06uPc/v/74/xKdfeZ6nHr9Gzj21BV10imKqUqPJaCKr9QXvvvU65w/u8OQTj6HCSNM4zh7cZ7Ne4/1A8o8wFuBDKBJQHLn87rn6Leoac0qaKjRIax5p/h85FD6Kssm5UOirXB0w4rSoC80korQljIEU8m6zqMpGmXIffpquRPgomsaxurzg9P4DwjTRVRU6JlRK4jxmLcpoKldJbXSi3Zh1GbZssZUqjpZG/sqYfrNr5kc3hTnnQvkoNNx8tXHVRac1b5itLgY8jwx/v6kGzP9WFJpK0YHOLsMKiNJgqYLupTi7gIoeGSDM8TIFeYMybBWqq3NOtscz+6JQIAVRl618DPERx02hNWVyyS2SXxtSeUw7lFDMkkSU70gp4LMnhICygmIv2iXr1ZrtpsdVNc7VGCehzylITU8lRDsr0WVYaznaP0A/kVl0CxaLrqBcmZxj2eTLWaBzEgtrIwuGtuvQtgJtmMYgmsuYS26WkUG2IOwxyuClNEUHLs2ItYYbN24QM5ydXXB+fi5udYW2KmJ/Gd6ncWSaJkH7EU37ybUT6rYi58Ri0bC63DAMG5q6Ktl0XnL2nMbVjn4YqNqGZdfx8P4dyfksWV/Bi6X4tt/gwyQ1GcU0RayCGCcq25ZFnOUP/cAP88f/+L/JP/viV/mZ//lnWbQG9XBLv10RjWZ7t2fxwKFi4NJkss4M2zUhJIbVhjPzEB8yk9Vszs95srlJXTd88OY3yWcbxpiolh1+WHF+9pCj4yOe+/gnOF+t0Eqx3fS0zmGqjsdvPc+f+5M/xjPHz/D3/9Y/4vV3XmVaP8T5hqdvPcb+Y55vnd3h6Nbj/PCf+Pf4+f/la/zKF7+Ajj1Hh0us9qgc2fSXvP/BexweXqNqfoNpO2JVRfIyvEzbgcZV4t4YE03X0k8joSxIhuhJRtG1Sx5/8hm65T4f3L7LB++/xWp1QY5gTY0xDbZtUTYVw63ESGTKnoCYZywWC8IU2IyRvKgxVppmHyM3r91Eacud+/dYXax5/KnHuXHrJsooNuuBGERfkmNkA+TgMaam7Vpq35AQh74czY6e7KeJnKEpmk8QeUTfSwRI3da0TUPSVqiC4wRNoJot3EvpqaoGqwu9r9DTIReLeCv0PiUZmBgHWROjxJII+r07GaSGl5pgdIm1KsNmEWiQ4/zx3LzrMsQUVkG6kmnMQM38cNUj/YDRave+V4WtMWvgXGWxRuEqQQ6z0tRdza16ydFJZho8KUWC7/HjmthfEENg8oG+HxknWZCNYcL7QIyeoARr80ERxgk9m5BoymsiXNNczk0BPKIsjhDWkEL6H3Rinn2vOBey4MpZUP8cIqogszMFPiZBIOcBjtJTzqaGuSBcAlNenSkfvT1K799p5UpfOr+geWYwFaooWe3MTbQRoEYOtp3wp6B+Wvr4mAvwI6Yxs6I7M9fU3/72ex7alFJL4B8C/1HO+VIp9TeBv1ru568C/wXw7/5ef1/O+ceBHwfQn9DZnJsr2ktWO3eoXASXWmu0mfM8gCQuM9YYFt2Ctuu4fusm127cICvF6WpgGEYUCWvEdMK6CtBMURG0ZtSaPkSGeJ91PzAMgxSvuqbrusJNNaRJM241W6M41Fs6k7l+sqBuOtxyD9W0qLqiqWtqY6mSDESbFNis1tQa3vMD49CTqYu9KIQw0CfJv+p0Q1hY7oZLfN5w7clDrh3X3DhZst81TFPg9OGK/oMttteo2jJZxWgTwULUCXKg1ZmF1ZjKweSQPVVCR48qQap1JUPDEAJWZ/aXLddODrhz5yF3hhHfB7bbEX8vcXp5Sbe3JPiJaeiJIeKLXqOqHFOnGdNIRmFNhaoy9x+ecq2yPHFynYvNhunck5SE24a5oc2ZkBKmDLeqUNhiDPiiH5lpWNYY8J4Yi22vFdpe5Wp03bDa9Jxdynb36HBfaF7Tlug9fhyhDGwqG1IWTYYIgMX5zcgERt04urZmmiamaeLy4ozNdlschiRgmxzRyPNHMaPISiGZbxajLSFKUQN2zTBlEM2IA6p8rZigFFqv90HcBpPF1jVV3VF3S+puiakcUxRdpdK5uBQJJUKjijGK/B0qXw2WM80UdaWxSVmy4nbDWqE4zZx86wxWW4yR5YIxGmccMURq68SxT2uqrsXUDmMUcRqEo10MFCplOGwXXKs7TOrZO1xw85knqQ/2WYWJ2w/PWbYth/sHXJ6PbNc9Tz31DNM4oCvHNiRiMKh6Xwx6jo5ZHOxz8/EnGH3gUy/9fl548bM88fgBddXT6FGatrn7jq7UwbKWy5naap576nFqRh7efZ/7D0653Aw8ON9weXZG1zSMKXyoaH50aJs1l2SK+cRvqpPy/aEgom7O5JPXZrbR3107PALaqd2rgboyAwbAajuz20ghU7kWbS3bzWVZLgn6E2Mi+jIAoIqeQ3F5dsrF+Tl+FATi7nsfMG56nDLsdwuUs2LrvG/ouw03rp/s/m6l5qiHR5FCiFkR4hWtcP5vttx/dEhVXC0z8mx1nGdKjyr/nwu9R1w0fyvzkfm+RCtVqDBFoyXIqFzzIUlw9LzI0EqTgpwtpX8QrXGMWCfb4nlINkYO93EYy3JQ9J6qDHK+sBcEbYMYZPibnSa1loZUaaHHyJFWkNXyt0pToakqCZuNQVA8P004V9M1HRoxzzk/O6NbLFg0LbV1+HGSOBqtyDoTsjgSN41Egiy7Ba4gcb0VJz6NomtbDg72cVozjqPQNne1VoxNoAxXimJDLkOh0bLljyGQAsQSXqQ0TNNIzpm+78lZc3R4REqyaZeF12xulBmnifV2QxgD69UlRiHh7G2L1oZp8iwP9lgsOu7fv89qdcE4DOSU6bqOvb0FMYM2iqwyy/2laIR9YK9biotqzqziGmftzkF0HiacrTBB6KO19Qxj4omnP84f/cEf4evfuMtP/cOfIbvIa29/g/urD7A5Y71iiolLJ0uuo4M9bIBGWUat2aaEWa3ZXm4Y8oQ7PuCl7/0u2ouJ6e27HB8ccG99ydZPKKu4PH3Ig9t3uPH0U+Rv/AbDekMwBl/XfPLFl/nX/7U/T5M7fuIn/zNeffOrjM2Cvs3EtuVzf+j72Zy9zrcebHnphZd5cn+gffDLtP4MHxtWDze0y9loy/L27dt87JkF1x5fcPb6KZXpGH1gvd2yHQbC6Bm0SAGUs4Sc2YwDvZ8IGqqu5fDkOouDQ+7cvc/t2+9ydnoPTSCFRLuscFazGnuSyjjtSOPEGBXZOKKKWGs4OT4hTpnVxYrt0LPYX+Camtt37hBC5uatxzgcPfce3uPB/fvYytB2LXtds7OuzylDSAzbHm01dddhkiXlKAtRL+d5CF6MRbSmaRqC92y3a7brtSC/Xc1isWDRdsQxilnP5DHKYowtjXTeYSnSZ7RkYtHaFdq4UTuamzYGrLCtlDKIC8Jc9WToozBztBZ35nk5qlMZCJSYAuUgmcQqRFmWAKYsZVVxiY0lJiiSmCNbSIkUMiGOpBxIORRpg5UYq6jQUajCShtIjhA9KQQq6+QcMppubyHL31ShckelbmDQTD4yTEGs+H1g9J7Jj0zjgPcTUwwMY2AyfdGzBXJh5NR1g7VuRw3UWp6jRBQHypQlV1aXJVxhQvkYUDFRRYex8hx4H/A+YKxlsViWtkcYLTpnlLF4HXYHVZo14fP5VDI6S4NU8KHZzvPqpmeQANjJYuYzKElFm8O98yPn2uxub9CQZSiTvzuTkwyfs7zgau+lZtLMRx/Gb7r9noY2pZRDBra/k3P+78uDvPvI1/8r4H8q/3wfeOqRH3+yfO63v+WZApfLx3nnwKcfEVeKy3ZmdrirnGPZLWm7jsXePifXrnN4fIKPCdN6FilSV4bGKSwBozPTNOCTuPJFtFDtsufQetyhE5OLlIABZy210yidcJVh2dXcOGroak3TLHD1kugaJhRDlu2IThNjP9JvB077DauzSy4uH7LtV/hpopkctW0wNhOCJ4QJHR1+tUF7y/H+Cc889QK6ylgduH+v5+sfvMfpgwvGPuJyZK+rWO4f0LRLnGppjC0ugxmjA5FAJJJsJahDTqTgUUWXZ4tTUMiBkBJVZTk82OPh/YdsN2v2mj1SyEzbntXFamfFPQ7ixNS0Lc44MJaH6zWncaQeRxZZY0nciwPVw3M6cyQ0GjMXCmlSpxCICrJKVMoWoenV8PCoWcDsuDhva8VbMNFUFbapuVxvuFitsFVFu+iYUiCHkTCKns8qI9kYVoK70Xq2npmxKkHDrKGpDXv7e5yfn7Ner8uQdqUzg5LhkRQk/aGmFIqAOF3Z0Npi2iDN4JUt/5UOxOzoXAppxlMSo5umXdAt9+i6JcY5AZxLGx9iFEv9R1yGHuWLf1hfBXN4JWl2kRPkBOKOZtFUNYu2obZGPp8iSqVdwzwMA+MwMgyjvD+U6ABN5cR8IsVy2GQqV1Mpx1O3HucPfPf3UF1z5KahPrzG/fXIB998j3fuRqJ7kceffxH74FQa5OV12nYQaul6S9QOpSxThtRH2hv7HD35LN/+Hd/FYzefpHU1Vg2YGIjjQJhEo6R0IuUBXWkSchhP257NekWlFWkaufveu1yu15yuei7WE9Zooo+FBrOra1KeUhkwmBHI2Zb4N5eyR4Xo8/czIy0Z2RiXLBuU2lnLz3VQQrEpV+a8o1OFEpMBoao9dutxapd5683XyIXuOxYt6nxoWC1C/X4cuTg9JUwjurhEWm3QqTQRZViXxYvFaFnsiH5NmnitZr3v1dAx+swwpZ1D2DzUTQXl+xBNMsuigSLUnmMPYkpFhzVfxwXd+8jBtXNzLa/LTHfMM22TeTkhp2ya/Xgoc5tRu9VtKie2eMlYJDdSBsvZ1EdB2YB/5FrIu4tjtzklza9nGcYK5U9phcYUOk6Jb0gRicuVwW6aPKhCh1GIs2xWOOvpmgalFdMk1vWzsYxRmug9lXWY2hBUwGSojCVHoTtW1pFzQhkluVM5Erxn2PZMO47R1TU6TSNx9KDEDU9qcRnaciyNR9yd0ymJwcn8WuTSaA2DULvatuXayQn37kf6YSO6tKq6ygjsZTGWCwugqmuU0lRNzWq95ptvf5N+u6J1LVpnpuChUMpSCIx+Ipa/b+gHNFBbsdweBjm79/f3iTHKv6uKzjrSlFExY6uatlF03XU+//l/kRc/9TL/3d/+WXLT8u7tb3I2nNPVDpWT5JElDyHTLTv29/fph4Fh2LLdDpiq4mzosZXjwFZ89tOf4Xp3wG98/cusw8S4Gjg+PubevftoK/la773xTQ6/+0mObt3i4Zun6Kri4599hT/2R34E7TU//bP/K7/2669R25G9bsEDs+RT3/kk3eHAm1++R9sqXvjcd3OxsfSnl7S6JcaJftgSlcVzgXEOdenY2+5xcvMJPrh7gSaTjcJnCWTGWFGcVxVT+dyYEkNKHJ4ccXztGs50rDZbVmfnTNOEswaSJyWPImK1QtUV7d4hra64vP+Q6AO20uQ00LY1dV0TcpTwYzw+Buq2pe8nHp6eYl3NcrEgccJqveL2ex+w3F+wXC6oXE2O4uCtyiLdWUdVt1jlxGjHR/BFJ7qSpVXTNKAUfb9lu9kSQ6CqK5qmKUZyHUMaxBxJa3JhvcxFQ2kJ746qGCYpjSLuKIG60ANBhjZtK6ypsMaVvtXsegGYdWrMBwG7Ll3J35ZSwo8jYZrEn6GS97GSJoGUPCmJ9l9+Ml95AACEgvBpQCWmaeDyomcI52gHjatobEulalmS68wY5boOOYN2KNwukkVKnlwPzjqyc9RWctBizIWVlGVZquSMDDEw9lv8IMyn7Thgm2Z3bpriFJv8gCg95G+Xei/PRc5F0hJnS5GroSiTSj+VSd6XiIeSp4bQL7W1JFXyJ53buSyr4s49b0cfNWGSXOb4oUXhbvFIwclykWbtBstcOjK1i4mROgiqeE5QjveUruIHhLlhYe7ZyjfNe93fqq949PZ7cY9UwH8NfD3n/Ncf+fxjRe8G8CeAXy8f/zTwU0qpv44YkXwC+MXf7X5CTnLQFhhU2SK2LodjogxycUYpDN2iY+9gH20Mq/6Sr33910XAbB1NV1M3jqODBXqvo6oMTe3oOsuYAtE6kq0YfGToFZv1yOAnltrSOIVJGWLPYuGo6gXWNrRdS24VF3niog/YcSDnCWsd+wdLmsaxXZ9z+86bvHf7LuejJvhE6HvGnBj9hF5fslSB2jlymkTUajWbYeDyNGCvLzm/c87p5RkPHtxj7AfGzYBNmmV3wLRnORu3qDs9Kioa03C0d8jh4T7GKWxtxKpfZ3Qy6ARxGhm3koMj2nRp7Dbbnu04MfpEZTW11VyGNUp3ZAKayLBZM1aO/YN99rol2lrGSXRXQ7/iYr1iNY2krLFZgTO4vRZvNX4KHHRLNp3QfFIKBVGTHUUKiYBY52aTSDqDsWAbyB5tMopIDom6bslOMYSBrA1RW7brLXcfPKCuWw4P9kuTkUp+FTunOls5qsrtXH2keGSizsQUmfyE1g4f4c7du6xWMmBL02qvAqWtIcVUAh9N0XpQ9KayXPB+Eqc8o7GubKyDZJuJUZI4uimFNFXyAZMPJGQD55qObnnIYv8QXdfE3c8I1U4jFAnROM2HiTRPMy2gvEnL/RWNgtIyvJamTStDMhGNBN4+dv2YRW2lSdyu6cet0CGHib7vmUbP4Cd8Thy4mv2qpTOGrDObaSLMVOzk0QrM4SHTjSe53GvYZIPNhwxdzfXPfpbv+rQioEkqMfotIYy0eE5aQ+sq3njtNX7xl36J04sL2v0ln/5938H3/oE/wHPPPUflKmqj8P0p2+05NgykfouaIvvLEaUd2ShUpYlM+Cnw5htv89obb7LebOn7Lf0qkKImZ812syEG0ReGwctQkSR3ZS6m2tjdcG2MIqd5qniE7lj2s7pQzuaNWp7ZD2WWzrviLgPC/JrJ94rQPOcSmF5MeuZDURjjiuOTE6LflOFa6Goz3RqKNXXOXJydsb68JESPURpnJX9n2/fiZKU1lxdrdCVI27Sa2PYD9x+cFUqNoMzWWUEuqgpnJctrtm3ebZ4LLXKmuX/kIJEhdV6WFIG5lglSvl4WE7vbjN5xZfQy6/XmbCdZhRaNGxQaaZTmZqZppiRDWYE1xfxLNqHGyGCb5piHDNFLpMXVcDj/LjGlmkNQZbkoWkNthMI6T/NKFdrovGxEKDsqFz1gWWCFMl2KYajCh0T0E1GvWO4p6sZx/cYxD0/POb84Z39/n65dAOBDeQ4rQcH6KUAQt+PNJG6h2lnabo/F/jHGWDbrVaGhFv4REgVhrUMrQ8ygbHFlKxRQ9QiSb7WVPEs0FAffEMTYAQpyXJau1lYcHh6iLhT90Jf3RotxDh0NXdVhqpaL83NU32ODpz8dWOwteOzmk6zOL9msV2hlaWqJsZBFUkKFQG0ULmemccLaGqUU2/6M7TCwXC6oGwe5wlhxwK2rhrPbZ5I3GTxBjzSLA1741HN8+Stf4Y03X2McLjhfP8RWGqus6OAqcSN12lG7mn675fz8lGXbknygalu2fkvVNCy7JSfLPV796q/xxu33qBYtl+drWt9x68YxD04fkJPi7t07PNeveOa5J1lv7vPd3/05/oUf+AFWty/5Bz/5N3njzfcZckA1HV27x639PRgmvviF/4vhfKK6BXrKfOmXXuO01yRdU7WRqAx+Uox+ja0N09QT45ZPvvgdPP3kx3jn3bfpRy95rU1L17b06zWnl2cs9kD9MwAAIABJREFUsqf3E5MfOTk+om4aaieDa5w2xDgyDiNK1cSsMa4jULH2EbusOTo6RifFxekl7f4xbeeIFw95/NYTPHbzJmH0tM5xf3WObWqsMZjKsV1vubg8ZW+/4+honxBGLs/POL13l1uP3WL/8BBrG7SRay/GKEueaaLrarStCDaju4rgPeOmxyhL27T40TOMEwmoFwsWyw5bO/rtwPpii1YSu1FFh89i6kTJ3lXayLVKkkVMqcciyRLnQPRMpTSAMHmY6XmpWMkrI2wh5ExIM2Uiz0sioRGO0yAxTipRV6LdnB1m5/gd+XeW+q5E8pBK76y0ZK/Nvc40Tmz7LWk1gBLX7Ma2OCqJVqgrohJETjtLXbdUdYvRlrquqKwiJqFPomRpqJUspbQRw6MY5LFb54SFkhNd16KBg7FnGEey0eIZACz3ljzxsafwQ7+jq/bDwDAOTGEqUhaK97UEa2vFDi1XVuIPbOUIITJNHq1lYdT3I0YLAyzajGsVPoxkOmxlUCagUkJFqWemccXYpDBB1BU7afbiT+Ws1tmVhDpFznJexR0sJgZTM+VdDhoxIMmz+ZY1aKMoXPrCKBToSEDUmUur/39xj/xe4N8Cfk0p9ZXyub8C/KhS6tvklOJt4C/KOZu/ppT6+8BvIM6Tfzn/bs6RSnJNRW4jTYLwV9k5tyglm1oTxf2lbVv2Dg5o9homP2J0xmnLouto2462qVi0NYu6wpGxIaGyx6dE0opcCb99PXrWytHXBzD1bMcBverRfqK1NZf9GSlv6MdM0obJTBJCGxSNqTmsO46blieu7XN4WBHSmn5zwRi2jGGPy9XEsNowDJ4UEtZHOp+JORInL8Jpk7nwG87XWw56UMOaw8qyMXC2fkiKE844puGSlFs2xeBEioEiVJF7d0/Z9lsmP4KWN9fCKPYrS9MalPI4B3tNi3GGEBKTa0jJYa3ier3gwb0HjP1A1TlOmgZ7ofGnZ4ybFefBs15v8RFCsruBAAVHdUdtNU6VjYSCjQFFogqwbyuqLhdrb13sYaWTVUqCrKPaElUkZEdQLSpnKu0xOVLXexjTEHSirhRZWS5XA6v1mqrqWC4WEMQZL89wuhYzgaTVbgOutCamwn8vDrM5JSITffJstrKNljevJsSMA8QnGoigsiqolwxmOaldMyZ6NF8WOWWDpsUpUpW8lhQCOYVdFlxOssEOIRBVgzIVuu6w7QJVNcVWvsDpM0aYkxTL0oSiZjFusfl+xDJdKS2C6ZmrX5A36S0zla0gZayyEDyYRGs1dm9BXVnuPXjAer1lGEe0dSRjiFqatgNTc1jVYB1rn8HU5f0rXz/VFa/pluc+9T08dXIkh67SaOMIaEYfcU6mmRgTB5VmCSyt4ROf+jSpbjlbX/DsJ57ns9/+OQ73D3AothfnaAv9xT22l2e4HGDqcRnC5kw0d1VLP3kuLlcMYcPd83PWXtPdeIqw2nI5Gpg2+DBI5mLOIuiuPSYrUgTtKlISqrYpNKvRDNRtI1kwmELXvqJIiJHGFdqWCx2VtJvYdjN1Ju2oOLEYXJR8jILAlRGmUDOUtrKKQ5FInF2co6zGIO6UMQWcdTgrZhJj3zNcXMpBpQwRmGLCoNhsR1Bblnv7QnP0sjwaxsA0JVabaRc4qnXYUZUlbN3sdAK6uDHObpGSyRM+NMTlnMlKE5KI8SVewKKsuMBSGp0ZXcs5MMdmXBmbqKuBYG56mJGvEsegiksiGlvuWzSBEmeAKYhdTqh05UIp294r+qbcZOv+6AA6612ttTuqjJSHWW99hUxLoLkptUFebaOElhPT7Ng4D/ymUJ0zJtcgBuCshwsaWuq6ZW+vI5E4v7wgpMxyuU/C48OA0xFXhs8webmmki7XV2bv6AYf6w5QaYRYAsWD6HBykubUGrFgzzmBVqK5QJODPG5FFmq5kuutsbVkPoWREEQvrrU0rTkljHVUWaGUZHIZZdGq4uDkBi+9/DLr1Zp3v/UOq8tLLoeR1TBysljyg3/4j/Lss8/xlX/+ZX717lcgOJR1JCXE+pgDUHK0lKZBcTFFRi/1NSRFVcuglkloa3DK0La1IASAbSqmlIjOQqO5f/6QN179ElateOed10nbDa5SbIKnj2I1v9xfolH4cSL0ZQFiDColHJFrBx1N1/Hyy68QrOGrb71GTAnbNtS14/T8IdeOj1l2+1xcbNkMA3dvv8P3f/77+c7PvcTx/pIPvv4NvvBP/ncu7r7PUR14N2wY9m5w85OvcFN1vPO1r+K3iUmNHOcl6oP7vPbFL7BSkeASORmqaoFRkeAjagpM48BZiNxbvsOtW0/zsN5n0w9UVUNlNYu2Zhw2WKfZbC5IwLJpOeoWjN6zfnhGU1dC+0sTbd0wecWNx27hU2LTr5nSxPH+Ac8/+SRnZ5fctZZmsSTFiDUtVtcldNkT00AICRWvllGRkVUf2PR7HB0cc3x4QI6Rh5ue09v3MEmxPBLjIh8ntMqEfsP53Z6Dg30WewucqcTcIyviONE4S20sp6vLcrZm6mXH4ugAayzRB8bBy2BmHXUCnTJOGTRmV5PqbkFOgtzlolHfnd0zlbg4k2stbtbKCJ2c0vbGooHVc7C10mg763NlgBBE3OKcQiPGRGSpFdoa0MKA0HPvUhg7RltSlCxYqyFFX+IAFEZbfMikKZKYiEMgsMXiJDar6MFkgSSPq65r2ralrioJgrYGrEabrYTUW0fOs/PxrBVTqGiFVZBl8RxyRllHpcThUyUI3pO0pj1YsDg8IEZEEpIEWfN+Yr0+l5zA6f9h7s16LMuy+77fns5wh4gbEZlZU1dVd1Mm2RJFWgYEQ5YeDAPSs/2h9E0MCLINAX6wDcODJFggCVG0QJFstrrJru6qyjkyIu50pj35Ye1z42ayWrIfDPQFupERFTfi3DPsvdZ//YceP/ZMw0AIYqikjCZrU+KUDdoaQlaYpIgBhn4gxR7jFMoqtIH+8sC4WAiQp4VBoIMmTSNTjMXN3YnDbNEuZ63Eut/Kep0jEA3KxpNXQFYKbaQ3iRFSNhI5ECdUyqBjcY6ca+KEccJccZUTin6OaIrXQxYQMp2xm37V6/+Ne+S/Yibmvv/6n/8D7/nHwD/+j/3uxzeUmkbpwgmeKWXlxs5zgLEINReLhidX1yxXC1xjefL0UpCBqsKW4OaUEsFPHLsDOmc2qyXNesXqYo2uKnTTMGXYdj3HwROTRkWYupF+3zMcO4ZpYkqQNHgTcQ422rJerWguL6nbJTllpsORb+5v+fnzLcOwZQoHDj7wtuvwPrFuxPZfdE8tKlty0ihdF7tPXfLBRlQMNFqBNXzx2cdsLlumscMfDwz7PfvpQIoibg8evE+8/PorQhTB93K1ZLleYpyj6yaqiyWX1xuuby643FywWi0Ypp7u2HPseinIh4nkA6a+IJuOqGrWF2tsveY4Rh62B2I34kNEG0l0t5UtIk3BRLR61DxEjyyeMTD1AyFEVosVTdtIirwy9MNE34/4IM14SoowZbSuqNoLnK2pbU+tQMeGlAxVbcip5/5hz/39gbquWS8XaIQzLhTEMn424iI1W7PPeUGz9fQ5nVAmVBQ+sj45L5474ZX7Wr5fwobFbKTwmLPQFlNKGCcTiflvqYKshEJjmwvB2TkthEBSCm0dtm5ZLte0i2UR1ErxKPayqTQBPDaiuTA0ZkpZyRg8P+4ZLVKkE/KTfCjZmHPOXEClBWPrsM6Jo2DR/2llSVl0NLLpSEN89eSKzfU1OyLdGNBmgbUVVmtad0O7+Jgvv/yb/PYXX2KcJmQpr2eBcagNwWdS1mSd0aOIpn0ILKuWf/Cf/z2ubjZcXm0IRLrDnlcvXuEPR442keOeMPb0fsIkz7pt0UYWxL4f2B17Xr2+ZXnZ0B1Hsm6ZhonhOFDbiu64I0yxFK4VplqirFBGrauYQpIA5qwYUkQZQ1KaSVcCxJWg7DN243yjvHfPfPi9GWlLMZH1Y/MRo9hXa60KPUWW3ZATyseycTm0VjL5nCbqpqYfJKC4qiq0UoQQ8P3ANAzF1CmfUOq5zx+GQRz+inZAihT5d8qZEFN5tkW7l3KZHGXwpRgRO/x0ep7On605zw3gQzesU6OkNMrO8RiP9NOZYvoh1Xf+XfPPGG3EtKf87Dm15fx9c1TFr3qe3zMZmhuXs+M8P67ztWNuTM/dLM+v+enn5v+mNHqm2pavH491DgcXfV5MAz548iA3jNKKZbsghMh+v0NrQ103xAwhJKwqQbLGYpQCbfAhklLGOSNrKpE4dSilGI7D4/oXIn0/YOoKZc9LgscJMlkokQULPp3nygkVTMwAyrvUGWiRHs9vzplf/uIX3N3d8fFHH/HpJx/z5G//Drdv3/KHf/AHXF6sebi/43/58Y959csXhDEUWm6Z9CmZ5BljC8ofT8fQj9MJLGiahvV6Kbq3nMsnSAzDQNM21E3Du93EsR/45PM13377C55/+wusMgzDDq0iWsukerVYslpfcFVyT59/+61k5Fmxxfcp0GrLkyfP+PyzL1hWK37/j/41R59YLVeMQySNCXJi8pmr6ye4esnNsyc8vd4Qhp6Hhzv+p//hv+f25SsqwCSPrjSNs6w3G/6Lv//3+cPf/2OODwcWjcMbxXp1wUVbY5InxgnjFE43aAx+nOgOHleJoRM58+b1Kyq3ZLO5YhjfUDcNbWVxVrG5vGR/UOz2D2ilWS4WWKM5joH9fsdmc0FbL2jqmujlvH/08TMurjb8/Bd/xbv7W477HS+fP+fYjWw2lywXK968fs2TJ08x2tB1PdF3jNMg8RnDiK6LuY1SRD/xcH/Lsl3Q1BXPnj7Bpszu9Rsevn2B05p6vSJOI8GPVM5QtQ13xz3vxj03V89wWrHfbum6I84aukEmPSF46mXN5WbN5voSpyw5JAFPcmbykXH0Ep2i5rVXnCnnmqCqnexTWSa9lZN9cdawpZSxdSVTLi0MDinMxSb+tOaUFXjWdc33WUKMwZq2xjpLCAlTCn1V1joBlc6fzPJcyHiIlCLqzGUXKO6XEW2UAGXFNTaFQDxbs+ZXdziwP8vdVFqjK8nlrOqKuqpPa/spe7HEMRgr5wInDA6VE3quWaKX49CGtmnEiE6JC63TmkxNTg2XmzU5BqIfiH4kBX+iL4aYGHxg9IEpRIZR8lxTiDKMiVL7TONEnDJtLxmAKSdCCgREIxenLFo836OGzKgli1L06k4o/NZga5H2aCyVqrGPNpICtBYAVkzkEs4aKiO6uoSAkFlnmURaSEGm2DGJ6Z3AtuWOKLWbKkyS/9Dr/5N75P9vL1UMG5RlxjxzGR+rmTaTxIGscZabqw3Xl5dgYzElEZe7HDMJobQ1TU19uWKxaGkbyWxzzrFcLss0WyICPjYOm8EoiRc49B2HrisZIRP3Dzv2hwPjONIYxye2wRrF7rjl7fY17w5HHh7uSaNH+4iKgdWioVqseXqzwlU1JmV2aLqsMLaS7C0tWTHDMEE/oVUkjoPcpH4ihIyysF4tsJdL8JfgR3zKMu2IWWiKw8Td3T0hZparFZ988jHrizXT5DkeO2lGkma3m+iGHTHesd3t2O52dH3POE1Mk+fd3T2H3YEwRbRJxDgSY2SKiqxKPlOlqGx1KlJSzsXC1lNXglYkpZj6kX5/FLMFoKorrItih10KH6UUtqrQThak1lSwrAiLls3VM26eLWibkTR03L/ak6NhCgP7/Z7j/sCybWia5hQEHdMjCn5y/AGmcWS2YM8546fpFIQ7I+0plUBlZ6CYiKTirhiL8FfG4nKzSqzCzEee86oei1OjDa6YzcxatvPYgnmhTAg9SiiyFbZqaBcrlqsLqrolZF0m5sXMIAslS5FP2UPlF57+BmWi+PjYl4KpGPegkKT3skCI/kkxxcCu6xi9IEuzqUKIMiUR2pmAJhoxIVleXWCXDT7t2feeSIXSjqdPPoNuzbOnX/Kf/OCHXDhFIGNRRJXxEXyE6AO7bUfXdwQfGA5bWgX4wO7+jr7b83f+s98jVhZjEhutud3eoYcR0wBqpG0srnWMxyPd8ShriHYc+8jt/Z7vff+HfPLpRzx/8895/fwVddsJCpkDKkyyMWTIxf0rmQpbGWIsm62tySXLBqNBPxDdgugnqrIBf1cj8N7y9oGG6Lt48yeQoFyxlDIRCkJrJI4ixjIx1ex3D3g/nhqjupY15Xg40h+Ocn8D2ogz6uzemoomYEZRs+LUNKhcplg8NiPnQMd503Oe1zY3nPN/m6du83tnCuL8vl/VPJ2fn1/1/fnrU8N01ljN3/swM2+mKeazZ/78PfNr/m8CGKbTOnfKWiqAiy4mPu9rWnnvbwLv/4ygLKVZfTQheK/hn4skcTfHGjEqGscJ6xzGaK43Vzzsdjzc37PZXLFYtlDcb622tG1dKLwWH+IJxElID35+z83odUoRHyaiymTvxXJc3kYqDm+qBLgbPU9cheozawmtdeQMwWemQs08NdflnKmcqZwYK7x68ZyXz79ltVzS1DUXqyXffvNLvv7q51xcXNA2Fdk5jt2BrMRdTmtD0yyE1RAC09QRY6aqapTqRG/dNDStaFl8GEthKAZkPowchwHlpAhdVoarq0t2D/ekNHF7t0UTmUIvehtt0FkxjaPEL+SEMZZh7FislxgFtqq4WF9y0V6xWF3xi+cveHjoMN4Qu4Rra9oL+dmPbj7me198jq4Mwzjw5uUL/vInf0FVWb756itIiS54LtolRkP0mb/3d/8u2U/8/C9/ynG/RQWwV5rlomG/32J0wNko+mmtyVERJ3Xam3wIKCNN08tXr9CmLjTl+b7LAtDtywSomEyFmFgul2ye3jAMo4A4SYDRGEd+/Gf/lna5YHfc45xhCInDruby8prVxYaqqmnbmmnoyX5kv98xDXumaRINHAmdNVark5FW3/UM/ZEcI5WruLpY43Y77ndbdm/ecG0NyU8cuyOjMXid8TphXEWqLDlkuuOxhLEbjoeOECbQmeWiZrVuRbftaqwyhJAJITKGgPeRfvAMkzgnOquxRmN1hVaBECaSj1irqKqaylVo4xiHSUyg2pZw9jTPdStQtK0ligUKE4fH/bgE3+cMSpmz65aFMTAX8nl2wk2nvTtGZOqjDbO8zZRrmLNM2mfDq1RYDbpQ+Oc19nxtnddy733R4oLqi7X/3MiVf5vioFlVMjBxzgkNuW6oKnfyQpB6agbbDDonnA2nIQ3KnM6LZLUJ66WqiuN5FvO2mKRRSigSSgDGKOcj+kj0I0N/x3E4MIWJdrXCOZkSG4vcJ1que1SJMB4ls7IwRuRayV6L1uiqTCOzwVEABjvHPZgCIBkcWo4TaIz4YqSsxOzEZJRJaJ1E2lJA2VM2HbMqrtRZ+v295Ltevx5NGxTUV5GzKj4jcyZboZsoMUvYNEuWTY3TsLm55vrpJe2yRhsRT1ZVRdO2YqWMRjsjaEXJyukRHYXJGR0jFs2klRRoacTYgdVioq1EyxMPIz57xjEyhchh2nOxaqiXjlpHmipyuWlp3TUuWwhQmxq0YbSJY9ehleL6+hKTIuOxp8uCXo/jAEDbGOEoJxiHkWmc0E0tSKuRhdUojXU1jRhiEzWs6gZ3s+Hzj58guUeyQfkwcri75avnr7jbHtnvOiYvdC4waKM59j2jH4CIUhIOGEOCEBiHewgRo2XKuWhbtDUlOFdRFdekrCBXjpSEChTGibEbGDopwslIs4wiZAmQtcVZTVtD7SrAgNJYY5icZqpbvvjhb/Oj3/0+b25/xl/++E/xWqOz5v7hgYfdA8vFmkW7RBWHrJQjUwjYshmj5qnXowulMQY/TXTHTlzTcib6cPrvSmsRikZBp0/BjUEmLGKXXxogzlB0XQSuOZ/+jlain8lRHOxSnsM2H5GUXBagmEEZi61qXNWIuUzVoOZMpIy4W6WZBy3Nnj5rFubfORfEnB0fSBOZ1WNzkFPGOvtI2yr27n3wDEGoNbPAt3aVLPQlrFgoXgpTV9SrNRjN8Ri42x6Yiq36p9/7kunO8sMffMlms5IpXzEx2PcDb+8e2B9HDseO+7sHdvs94zDQ7/boYYBx4v72DX7ouKgM3//h5ygVubpY8snFAmpLzj2masQJNUYYB8Zj4tDv2R1G7vcj7x72rG4+IirL7e09D2/f8PRJ5mK1oHYGPRpejT390GOc4+lHz8g3T1i1C5SxKFOzP/Yo4+gnT1XX/Pxiz5e/9SPevHjOdPcaleL712GeTrz3vVyatpmeKv8Wl0F5/4nHXqi1p4yvotmav0plGhejF+dDa4sbbeL27S39sZPNPaUSDaGwzhLLPW1LflyzXFC1Qr/NJULlQwOV72qk5td3RRic8ul4BCk+bP6+62+8T4N81K+dN7Tnk+8ZaJF8tvxeIzgbBv21l5o1A+9/lsfnRBpWAUfOXEzgOz/nh43qh9f8/G88Hnc82V/Pa4hS4tAacybkWPSyUNcNOctnDDkKna2p6LqeZbsgxcz24R6lLlm2FVYbhqEjucz11RXjJI1XJhOiWHMbEpTwcyjFpFGE6Jn8RGWEzmOUOsWIKCXuvcZYVJK4g7loFFqvBNTWtWRU5VozDJJ3mtJj0ybXLLJsFzx9ckVKkaauyVGu2Wa9xKrMu9t3+KGnXVqM08TsyXHOiNS0TUuKCWsyx/0gbI2YmKaRqqpZXyyAeAIpQJrn4CfG0XOxucRaQ1A1VSPxN7e3ryEn7u/eYHLE2cxqtWR3EPfhcZwY+wlbOWKMtMsFH33yCZUzXN1csmzWbC6vaaoFv/zFN0xD4HJ9hascm+srvv+DL7hYrYjjwP64Z3+756c/+Qlt3XCxXPBbv/t73L18ybu7W7TRjCoRU+Qf/sN/hPKBf/bf/VNePX/BqrJM44Hd7YH7mzU/TT9mmo6QA37wRGUgKWmylXzuru8F9FM1tu9BBdrVksqKfETlyLu3W/Z7yS5TSknTrQ2/8Ru/xW/86Df5sz/5M+7e3nPcD6AybW1YrVdsrq/QryaOfcfF+gJIeD/SdweGoWfWH3b9kWN3pLKay8sN+ihAWbndqIxmjDOjKmCNYpoGUhy4WDZoIm+2dxzGjuZiTVtXJCVZcKm2VG1N1OCnkXEYMFamTuM0ENIkXgbLBZWzQCKngLaWy4slVdWWkG54/faWt2/vmEpWawgeqxPGwGK9pF20GCPMF+cc3XGgdhVN0zL5yJRFnx8L40YVAGzWMKMe+69UGhSdk7B7sjgyBi/h3TqlonUujAPh2hT9WhIQxxqJANJW+r4owIx1Ep+VorhQaiX1l+SzapSxH+xP76/358DcezT/EDllvs7r3BnAp7Q0uq7kRc7f19oIyGxEr9VZJxO2ylFVDco62W8LU6BseacmVysLSSQBKiehniuNMTOLRbpgwwriknE84nNEO0vTLEQOozU3z57R1ku6w0gYjwxdwpfJb04U1pMn5EQkk8fSrGZwSeiMSknjpq0p5ko1TbNEKSuOmoijvVIOZY3QNG2iaiymaXBW441EMCgj+nSJixKtYv5OUuP7r1+bpg3mqQforE6j4HN6SVNVLNuGRVNzfb3hd37nR3zyvWcYC0kFjJYbxmgD2pGU5jiODFHsSGPRkKiUCWNg6gZiiNxpzUOY4HjEHPdw3OF3B/p9x9vbA9suMGRH5Rr0RcN6c8HNkyd8tGkxC0EUgs+Mxwk/QTdE7ndb/PDAqnU8vbmh3x24f/OacexweiHOTkYoQq5qcM6gTU83jNze71BNxUhC14amdSyrCmuNcGDDRIierj8yPWzFyceLFf9sbRyicK7rdsHhGDg+HIrLlyMrQS2mkIjRyxi/WMUbpdCzJX0GazWVs/IQVRVN2wCQominfJQx9dD18r/jQPRi/a60BFNOMdJojXN1GbOL4H2Kickn/OSpTIWfEq5q+Pz7f4Prj57xr/7of+fr5y9Y2zXHu3u22z2XlxesFisRwivFOE344CVM14jId3azA6isk0anOBsaY6SgiEXQPKNHOZNKQNe82EzThEadwnPnl4yzH1fguYCzpfmbne5mJ7z3zAy0JuWiWhGYTIJ8raNdrlks12hTMfog49/iZjojp/D4PMzHfpp8pkfXzVOhSEL+2nzwGVQuBitenCjnhbkUqsra4jwYGCYv5hXaYpUm6UTIGeta2osLcIZd3/PyzVsedlsR+ceB9cUNl1dLHrYH+je3bHd3vLu/5/XbO9497Dgcera7PcdjR38chNLgPePxQOoHdPQ4nXj97S/5+OmaGEesP3C1XJBCous6aVamwMP9Az5krm6uOb58x5t3r3j5dsu+9zz8q9/nJz/799y+fQNx5Oai4j/927/Jw/0dt1Xi65eWdB+YQpJzEYXK8fTJDVWzYv/zX7C5vKYeJ1IpsG+urzg+3DHdSlN+KtzPi/VTMT9/Tbl2jxh3LJRdoeSYx0anrAsAeQ4BzZT3iilN8JOEyFqhQ+4fthyPx+KeqWWKjcQAGGtZl5y1qqoKQlqo5LbcQ0Y21tnH8cNGSj5SPtFh5sbpnMo4F+jvBWrzftNz3sxZa086sXM65IdN0nnTNjcAcxOQ8yPyOz9r58c+PxO65CJ92DTODmLnfwcezVTeoxqdNWvz7/3wZ87pnR+eRzVTK3UxnFGcNGAziS8ja7H30kRqbcg50XU9VRVZtC3T6EmLRN9l3r1+S9xcslqtqaqGEDz32weapmEKEyFMhCCoudWaylgWiwWr1QqfAjfdU1brC0JKdN2Rqq4xczzBIwZxihGJMQlTAaTYjEHuHSMsGWUsSjnZg3xgHMfHfKQoobsXq4UUlDmjrUKTsErysFSOHI8dd/e3OFuJM2BZ3+aoHJDsVK0NXT+UfEGHq8UxMyUBF7PSJwMDoYmKGcLheKCfDrj6Al0cdK22TGNP4xSbzRqrDZ0ODEPA1TK9TDFineXTTz/l8voSP41cL56vKfUUAAAgAElEQVSQ0Vx9dMNP//TPmbY7nt5ccf3ZJ3z08ccsjWPc7vnLP/tzvnr1DVMaub65xFpNmAb208CbFy9wWsCUqGDIkd/+W7/Df/3f/Df803/yz3i4fYtTijD1LNqK7XFit91zr+5IcRJDs2nEKotSFapMerwPkCULMJPLPlNRKEworWhcQ0pC23eVO4GVs3nG8+ffst3uEH6FhFUrqyCPPLlecbn+Pj/72V8xDR39cc9uu+fq+oa2XTIMPeMw4MeOunIsmpppGsgpsl5fcHG54ng48Dp4gi8Zn33HxXpFjBN+Gmhqw7Jac72seHX/wLjdstlcc319zZQSxzTijCVME9OxZxwHrFP44PHRo7WY0DRNXcwqjgTlmJSYa9V1S7tYixmJc6hirZ9iZBoGdv0eHzpubq747HufsNlsCMGjlGEcJ9ECG0fXD4wpYZyTBiwEXFmrQogYRWGRFbCk6EZTzujCTk9ZNGz5RHcU6p3RGmvEZVtpjbYlgHpuzPKsrUqFeVAkFcUQKKsyMVNFioMWbeivqB3mr0/NGZw0v7msl3qeAM77Q5LnbvSRaVAnSvXstqiLqZUxpkRxGOra0bZLTNWgbIV2DusqkQcYXXLuiu4eASFiYTopY9GGU1OrAas0lWuxRhg9SZWQ6xBwtuLqasPm4kryTVMkx88Z+17M43xg6PtizjfQTyPDNDFGT/ahUOwlhzNOgTRNaGOwUyQljXMNfT8yDT2ajNKumJkkMJHlui2xPDXeRxnIKDm7M31WzeDheVDsd7x+fZq2/AFVCLlRcgxUlWO1uuDqYs2mXfD02TWff/4ZTe24f3eLtmArw2JRi529EoqXcQ0X6xULBcdp4O7hgXd3t3SHI/7QYRM0VcXYGO4e7nn9/DW7ux3jfsQfRxyKZd3S1i03KzG8cFXFfVYMD3sWYaCpNH4Y2G93JCzVegPNkquPNnzaXLFaLhm7nhfDkarSuEqjLWgLJsumMsTIGOE4ZeJx5OvX9+AsXQxkC9qAI2NiQsURlSemceJ47OjKjXY89HjxUMVoS9MuMU1NUKZo3xQ5KlLygjwQyyha4bTGOkG8SInKiJjbaTmnzhliEgQ75VQmRJFxmuh7QTtDEfZKEWq5fnrN1c01b969pe+PrJWibltB+rUiyGyYKQ14H8g+oIxc588+/5Kf/PRP+MU33wq1q9/TH440qwVVKy5FRNlsIUnDe6I6zhSv+b6aLVsVRknRElH0Jc8HOGkhshZUJefMFCVA2TiLLKD5cdKmKPaxnDVk5lToqUKfmhmJfy1sGAp1EtBaCmhXo42hH0eGmKjapVBozyYBwvrUkAtdaJ7cnHgYGW2ErhSC2HBrhVA/y/o6O12eUCNjQCVUluOJWRyQtJVpYYxe8mIAUwS7OWfqtqVqG7KC0Xvud/cMU4cPI9+++CuuqwP/xz//H/mzn/0B4/0d3eHAdrdj3x2ZJo/3kWn0hMkTJhF5R60YdYTgabXmh19+j/W6gSx0F+MUU+ipK8jHCEnx9s07YoJnH3/CFODnv/iGF69vud8PjBHe7Q68fvMSlXpcDnz+6TVffv4Eq0d86rm4WqFev8aogIoDKST8ELH5CavG0FpFbTLNumH0AQOk8YDvduLo+SHyeN4wn9ayx1ZNbsnHxuC88TFG6FgypqU8b/J/hUCDD5Lr2HVHbp5uxARDCShRl8JjbohE9yX6TuvqU3OmrSnorAYlCChKnSbp+kxPN0/xTmiqel8vNts4z59r3uzn773fZKX3GrsZNPnO7eCsCYxnAMv89RxSPZ/ZEMLpc8/vBd4DUASseDyeWVP64eucYjlT+841sOfN4znt+RyZnu+H8/ckOOW+xRQL6HP64QLoFA1iaeNCmNcXKw1QFhfbRdOiCqVsu91jrWOxaMpn8vR9jyq210oLq2OaJjDlflWKZrGgWS5FMxMCNoiYPkYv+uQYQIP3k6wbSD6bUpqUgthaqyy/F6EYBR/FybHkViqlqOuafugl5FeBH3sq5wgxFNc1Cdh2RnF1eUHtLFutilazTCS05dD1YmKVMtFHuuORZbtgvVjLmhcDOadyzxbzHAymrgplzOGLM2hVZ5zRPLx7R10vsCbTdQeiA+cSTS3nUjTyukxjIspYlssFd3d3HPY7Li9XfPb0Yx5evWRSAz/823+DtmnJY+SrP/133O+33B+2GBJVlny5ly+es6hqpq5nWTf88R//G2Gf1KKjzSnz9u6Bf/Lf/hP+/E9+DDFSOUU/9pDAuZoQMtvtgRiDGC5bRVtVbPfinltVFWmKYta2aNG6JmcBW7PyArZ2e643l4zjCMDN9Q1XNzfs9gdCiPz0Zz8lmMTHTz7nyWcf8frbV9y+fknOA/12y/+9fVM0XdVjULrWxOjLhENotwYwTUU/9Bx2W7LSLNqaJzc3NFXN/d09Wh/LdFagLeMsk1Psx4nNzQ1PrCMuWl69fMNuu2VRt2QgjB1BaYINHLdbUvIY0zBOgzQOWkkjpcQl1fvAlDxGudJ4dKT0jpBEqx1CAci0xljLOI2EaeD+7o6vfv5z0fKt1zR1i7MWq4UdtlosaAxkLU2NdcKcykmYLkKaKGsgQq2z1pGiEgOLQv+NUUx8tK1Q2opbrbWFxl50z6k0VQXkiSmRlbB2NEV+gip1kIQbCQtTo5RBlh5hb3xIJT8HoE572ZmsQ81rlSzSp7VN9gZkIpYfAWshM8qzE4EpZzoyWiWMEROkrB3a1pjK4VyNqyoBYYppR+0s2koerlhfGIyzcl5LXYfWZAwpFwOoAkQQEyGJy66zNcoYsp31dopmuWJ2x/XThI+JhOR8Dt4zTCNxHMjDIMBAmBiGgX7oi6t3xidD8AmfFT5lVPTkFETOoERulOLIerGidgv8yKnpVYWqmua64ANg8btevzZNmwg8ZdIjRaQIspVSLNqazWbNxWJBW4nb4fbhHdv9W0KeSEooftbK4qqSIJimanCLFu0cQww87Hbc3d1RacPV+oJPnz7j04+e4WrDD5494fjlD3joR273R/b7TqhyQ4+aJpZGBLNdNOynCf/iHbE/YMOEDb4sTC0s77FXl9QXS8basXu4ZzyK2Ljrj3T9gaE/AnNgLQQc05g5ft6Rq4z6q2+IVjOkQNQZckSnhAkeS8Zq0ZL5KHlawUdSUijdYG1FzIohWNLRE/NEzpoUFClOaEwZ34rbUV03NI2lrsAZaYacNiyqmsparCuIP4mh77nf70lophiLFfxAP4yANBhp1oBpjatr2uWKMXiZqk0e1ehSkypCiozBEyUphpwz6/UaNSnevnmLMwafM7uHLW3V0q4XKBeI3pdrLI3MPJrPGdFupRkhesw/Ee1dlAypUgTOhVuaHxZJgkEp4SifI+ZCTSz0rRmZL5QHsdI3p8+QU2YOt5aMtSLGheJeGWWhRYSvrm4w1nH/8EDMBy6ur3HtQvRwShc78HnRLLbzWr6ZyadJHOTZGLKI9DlZyJ8CQMrPnVLq1KOuhrmgFEGHfH7roFCigp9KhpPGOUtVOSAzhomuO+DjKFOBYcvDqzte3b9isVlSpwY/SK6TNhD8gB9HTJZCUGzKITlNWtVcLJd88clT/qv/8u/zd37vb+KTJ+SJYeq5uN5w3G7xOTJse2KWafL99shf/Ozn/PKb5+y6iTHANJ+LlGidxURP5RQ3H12jLBy8p12vaRcLjv0DV6sWlTRXF1cnutanz27wMbPb3rG+uESrTOr3OAKkUG6J86mabOLy1SPZQZe2Czh9Vzab8lWSaxlL4Sk7YEHqkGzFFOQcTlOgrh2b62u2uy1kyTFUlMmyEorGyZBn1hQp9bjfApTnRrQHcj/JhO9X0fv+OsXx/Ovzadrc1Jw/Zx++50M6jhzS+2Hd51OtuYF8r9DQ9q/9nvNp3fw7haKazijT8zE/HtPj337/mObXeSN+/lm+q/E8n/TNX5OlIFCFjioaDnn+xIUUcact7rqCmD/+vRgingmyO+Uqps2Gw+HI7e0tl5drLjYXJShcmlVnJXTbaI3XEykI2CagkxSMxkoB5BpH8IGpFBVZIXtgDIKCG9GlUYyItJbMyxMt9eQIPB/7TAcv5y6Wdbg02KlMma21pCDBxZBpGgGw7OHI7nCQwreyVE1L2zQymVaGuLlkv9vhvWexXOAqob/pEp+gtJLoj1KsKgykSH2xRtuF5KwdO6x2rJcXrC/WdIctD/s9l1nR1GtChNEHKiuOydZYvA+8e/eO5WLBD77/A3745Rf4fqDzB16/ueWrv/opfS9NstEaR8KWfNQxBjaXlzitqZRkNU1TmeZpS06R5eqSb755zutfPAefqNdrbFVR1Zah74haY2yLtXVxnU1URgKYnRajoAws12tsbQr1zJGTeXR2VULJnsZRstecw1gLWZVsMbknl23LZrPhk48+xUZLHAd2u1fkFIpDorjnjeNACB5tDcd9ZLfbylqQkwReLyUYfo4Z0VrjbEXbLqmqBmMrMZUaBsZpomlqjL0EEqmuJebaOpp2wbjveffyDVpr+jhx3O6kqR49xmimSVgyAkjJ6jsOgRyHE+Uun3JThfUTUyZnsabPws1DaUPT1KhansOH+wcgMQ5jiT0pAdpzgLJT2FqCpOfJM0qViVcqa3suE2CRXeQC1mgjDopCvaykwdCWbORZA9DOUBtD9LFMSosJUhb9tXUalzS5DEAEFZ5NyMQcQ7o/mYRp/dggfLiuni16j1q8szXwPdrkieEwY1DptO7ms/fM2ZzkTCRKzFGCrA0od2ovrbM4Y7BaY4ymrmUyhxZnR22FWlnX7WlyZwtlsjZtmSQKrVxpAQCUMsW1W5+a54jEMShV6NeVQYcAaCql0A3UKWHItEaJEVtO+DjhJ48PgXH09L3k1w7jhO8bpv6A9wmbU2neRoKPxJAwSrRxIp2RZvtkJlcgyP/IoO3Xp2mbGxhV8gpyyhiNiJQvllysWoyCECdi8vRjIhHExpkkQZ+l4J43vpT3BLJ06Ers3heuQimY+p7Xb17THQ4s6yVts6BZtXxxdcNvfPYJpna4pmbynre373jx4iW3b++4uz3QHTtU9CytZdm0XNYNl6sVyhoe/Mg+Bd69ecurmTZ47EpQ9a5YvovtfMzCW/ZRMXpN6CZ0r3jz7o5kDBOJqEQAbFPGFp2KhDoKGmRdjakkQ8bHJCYPIaKjULlySbwwGmqrMSqjVaCtKtpFzWrRUtcVq4ta3AS9J04liLuIYkMI+BA49IPQ3SZf6JWBcfLl4csnh6FpGHn++iWv7t8RZ2S4XUjDEiJJSzMxTBPHoSMkqHXCh4A2kONEnEZxy3y4o6mWrFYrtFN4RkFoSj07U29OZXJpMORLzeh7WZDmgqvQZU4amUIVSTmRvC/j/8dizDn3WG7PxZri1IhRslqE1pROi5JCsrIyuRRQSexqc4kcQJA21zQoYxi9x0eFqSpWqxWudqLXcRrD3LTNmUkUfvoZfUGVFiEXZJq5ac2nhi2XAmt2sFKnBV1ekgCXColPxnJ6LixTJHhB4eu6Zb1qWS3rMtWb8H46mRQIJcpw7Dp2fiTqDU2zpGkaVgvHRWNZN46LRcVFU7NZLrhZX3Jxs+Hm84/ZrC748uNP+OKHPyCNPd+++Jrbh3fkHHnYvmNhKz55+hHdbs+UDvzpT/6S5y9eszt09D4z+oiPcv2rQpcO/sDFckVVNyQs6+uPmHjJ3XYALMu6Zb1a8XDfMU2BVy9fy2lD46qKOA3s3ombFVOPP+6FCfDY8sp98sHGN2vaRK/7/mocQzg1NUGFsrHF0vvNrqHyN2IQKpo1mpwDdVVxcbHm3f2dNDQykoCcT5x/XTZ80WzOU2B1okbNnJf3ENTS2J03Y+dupx/SZ+YJ9/yap2nnDc656+J8Tj6kXp5/Xx619/VswHtUSpnW5VNRML/nnJp53oilFFE8BrzmrE/i+Pnq5TxfRfXXipLzYzqnVX/oJDl/rnmNee+8mDkHqEze8iMrQCkBBcXRVT2CNPP9MxdPORO9J2mDc47L9QqjNXf3dxyOB7TTtG0jnwfwIUDOOOOwtaaPPT56rK2kuQsBpSnMgAgp4Qo1SZX7IaWInxLZmGKmIlTBVO6nU7B7ylhTf+c5UScQYgbYZgBAU9eO7GT/SPP0VWm0dRw6yXdDaZq64dmzj0RjV5rgb6JnfzjQLmqWyyVaZzFViXK9hB3givmLxtmE0RljM9NwpDt2OFvz9EnNanlZ9rqe3fHIshEKZkxCnVZWXDJDkFDfRdsyDgN/8u/+HevVAqc1rTF88b3P6GPkcDzgMjBN0qyqhJkmoacuVhzsgTB52sWazfU1tqrRzrG4uOSbb7/hcHuL0qK7yUQByfKCPZ6Hbc9Hm4U0O+GICSM+JHQ2p8gF69xpEn26n2ajgyyB0MeuO+mXgw8cu05kAdoS/Yj2sn/obKhsLXtESjRNxXq9oq7WxOR4eLjj4f6W4Ce8H4US6CRrtylGFXVVU1eVREzYSiZq1olBRFagLFW9kEiYJJmU680GbR3PX7wmobm6umEX7hi2hzJ5Fx+ABCQfsJUVR8jgy56tMKamrpanGJCMNA0pFUMbH5m8nDNV1bK2FPML7xNkeYZCSDjXY4zFaIcAFHJfKa3xOrJYr06RVWT93iRFwuc7Ym5EZlIaL0769rlaE62+sRL6fVp7im20sWXKf0ZBVErLBC7LbM1aydWMQRUXhHmt1ygss9P2h68P5RbvrdHzPjEfT1kt398XAo/usXLM6aQVlr8vbCBDTNLlGVeRMfiSM6uigP8+J3zOjEdhSeQZhNYKbXVxS5Z4F2ncLLVbylTdiKmHsSIByCi0cSzaJc5WxJypm6qAV+n0fBhj5TyWZ0SuT8J76S+01lRVjXOVmPWshMYaiw+HUoHge6ZB9LB+6vFTT0oTbbtAZcm3s8aBH0/nd84fPXfe/VWvX5umTbgXczEpBWrbNFyuV6yXDVpnhu5IVNCOFa1ryFqcY0IKMmLOWjJllLiKpRSxWri/WjlUZejHARSMTIx9z+F4JKoVMTqqFLh0cLNu2Fy0uLbBXFxw8+wZF7/3Pd7cPfD63/97tre3wgG3FpMgxMybQ89ut+P+sOOuP3K33zF2k9irFo1XZS21c5CT0FCQzcCnTEYoHSkouuOerDXRapTVYndrLXVx2wm5FNIKcZnU4McRhUHriqxcKfoySgWMhrY2wg9vKtaLllXbsmxaKit8d28S2/2O3f0D/bEj+kQKYi/vQ2ScAj4GoUEU3dwUY8kmixhr8TESQi8GH0i4dlazEUKhVcaCniuF955xGhh8JFpHDpmXL1/wh3/wf/H1z39K93BHpRWb9ZrlekW1cPRBk3fHogMRpCInIDwivimk8gDJIvJe8VTO/fyKOZ/QdmOMTMjOisVY7PjnaQhQjEnEvEOQk0f3I41CmbI4KQmeNkncuHxMxLIgO+PEcMQaCYn0nqpesd5csrpYkVTJStKF/lByWySnbXa0LJx18qmfnBE2QcRLYVyI8jnPQmLIueSDlPMgRiWPjyJZeP3ReynmopjUphSonOHyombRVmKYEaZS8GlizOToqO2a9uKaxbMbLr/8W/zW3/wdvv/lx1xfNjy9WrBpLQsdqAm0KrOsKhbLlsV6SRo9TdWQQ+Tl2zd0ux1+GAlIQbf56Cmv3t7xF3/+E97dPfDy1Wu6fpKG21h8FJ0BWpNiIIaMzpoQNV0fCN7w1fPX/K//5x/wh3/051RGs2lrzJTpuoHnr77leOxkQy4TqBA9MXru7+75t3/8RwjzxvH+nvbdi61CXDfn14caqfcbjfxYLJ9NQbMSEwmjMzlGgh/ZbsVAQJ0maqLNtIWqq4rphdL65EKWtWxmukwOMpwiGGZjmg8nbeeN2fm07VFofpZ3Vhq0vu9PdMbvOj/zz33XOTunIp6juh/qLnIW3e75Mc7UzA/fJ/qwx8nVPME7P77H4xE3tvPG7bv+ff455vfDo2uktY+ZTiknQn6kjlotjIM5cJ1C31RlDYkfnJcZfc2lmPXa45ygtsu2Qdsb7u7vuL+/J+ZL2rYVUyovbIhF3eCso3Kib0kIEJORNTPHJGtXyqQgDno+eIaxZ+h7otZUuuTqGYP304leP2sGjbEnWm46MwI5FYJajLJcVRf326JHUQYMOFuVdUrWrradyjRP1pWYZb22WsClurJ8/MlTpq9Huu5IXTtWqwXG6lKUKpmuzehE1lQ5E6Ze8kyDPFP9ocNPAWstq/UlMbd03YGQM84arLMnRoP3oVDUMu/e3kL0+K5nv31gsdnIuelHpmFkvViyWC9YXSxZL5ZUwP12y7Y78r3vfcHz5y85HI4C4NULfvtHP+LZs0949eo1b5+/YdKWfuowlcFaQx6DsFx85n7b8aJLxHFP50ZqlzAIDS8FJDy5qrB1hU+BvhPNGEqVPE/w3jP03eme1Fr4AKJ3dQxxIgWPQUHMaGR65H1AGVmj0IbN+garNWE6iqmUn8jIoEcby2q5pF0sSvBwcc3Lst+cIp4wAmoqg0+ZtqlYLy+pq5Y37+5E7+4ahu5ADEIVTNEDWvLZjEJZxEgiRjCGmDNGVwxD4O7dDmKhmZsMJlNVDa6qcU2FrmTSZlyNNqKdzCoQQhK9oDUsFm0xzOM0YVbM64rcYtoU1k2ZsD3e/wrvI4fDHjsNaHtTWEJIVqtKJeInSdM8DlRUKFthVaF9F+CrMpIZGmKQ+Vkx8YiFBvhIuReZBnqmtPA4oVMGcnxvLzpnTMyveUJ2ov6Xe+RDCmX5aSrtTu+b97YPWQfzOkwUIyRTNPNm3iNTlKy5LDRsPZPFM3LClCbFxHicZMJXzEmccShzlIa3APu65PTGnLGmYrFY0TQtSmvqxlG5CjfHWFlx6835tAuKGyeK2lpyMcqJkwACSoHRFcrZsqeIwYypV9QtEnc1NWjW6KKvIzv8lDBK1r+5852HDoXrxX/o9evTtFE2VvXII1+vF6zWLUpn+v7IfnuPQ9EualxbtEZKuMhZzYJEVWyyKZQEQdliEEe2RoupxpQSPkkgc2WPJS8IOq0Z+oFX44NQ5EhcP79kvWjo7+4I9wdaU2GXK3bDxMt39xy2R+5v7zk8bE9j1wQSbBiBZMgxopIq+SARX9zfBKqAnL0glcpSGYWpa1TJ6HBGNkyXwGeND/NOnwlJqCa62PVWrqLKIgyvKk3TWFarmst1xaJW1A6qot1SMeK7XoI+ux1v371j6IbifKgJPhEiTCEw+URRfmIVJ3QhlXT4aYqnzfm8EKIsVuMkSfdz8HvMj8jJOA34qYdeMb34hrt/2XM8vqGtIxfLBZv1is2Ta7o08HA/QvSYWWR7bvsaIzmk4j46L0aPC1EqSLtGRuZzzWSMLHiqBMc+FouPRh8CJJTfo2RKqgvib1QxXyhI+SO6KSCEsRZUYgyCrMyUNaUl8yYGKRgWyyVtI2HxiTIdngo3Oj9SGokJm9QpaDildGreyI+Fvlj8pjKpmzPepIFLc969evxfTDOiXyZ3JfRb5SzrvpKm0VrN5WbJYuHIOaByLNNF2dQX7ROuF5/x2Rd/g89/9Jv87j/4R3zy2WdcLC1tFWjNROruaHLP9WLFZdtgUiDGCeV7dAj0+y39safbPqBSoq0bhuBp6gVfffU1P/7zv+D1q1smH9gfh5IhaMFnQemNFkBkmEgZGmfwE9w/9Pzy6zf8b//i9/kX//KP+dlffc26bfhb3/8eN8tLjGm43x04HjrKSLVsSokQJC4ieHDGipj4g6btQ02bnGJ1Mvg4BxA+pPXNE688N9hKqDSqTFhmnYJSMIwDr1+/ZhxHnHMQE3VTS3FV9Bjox+dRYgMUGC3NnHWC1KZHS3qZ2OrTfT4f04eTM3muHj/Pd02iTnpJ3m/0HjVm6eR+OU+y3ztnSr13buYYgVCmkyeqZPndZqY5837RMR9njOqvHdPc3J1fu191HT/83oc/d964nU8gH6eL0mikAqporcnxETxKMZEKKKFOM/J8mtxqJU2eQvTEQlWWa2usprUNF3HN7rDncDiQc2bRthK+S8mhQlE3NSZJhlXMSXRoIZzWN+sMSlW0bUvSitV6Rd3UhGFk8F7MbYwpaPlsBpPKfuPkuPNj050KZT6l2ZFTnZpZrWT9QylSjBjjznSG0jhLyL0ptubFaMxoYvAM00RVV2yuLnnz5pa7uzuM0awvlmVyFAtIWKa8GZksZpnKmELRExRdU1UtQwiip2karlfXPNw/MHovFuMpUmnRei8XC+5ubxm6A9/79BPWXNH3Az4k7h+2JDJ391uhY5FwxtDWtZhYWEvOisNRXGtXqwtCSMJGqFsa1/CDz7/kcLnkYfuWQ5gYciaqLNq3QjE8DhM2n1hwUthPEaU01jkWiwVYzeHhSNf11NVC1sgyTQ2TJ/hQ8kbTe+tVSlF0kaYS9zwnk7IQEmM/sLqwOOc4Hjve3X5NjgNt08hzfUjFmVSz2VxxfbUpIKZQ+VLKHPteWAIYAdmcI+SItpZ2uWS5anDW8vbVW7a7PcvFEoXhMG1L0yjURJ8jqWRdPrqeihZdpYyrxP3YTxGjjNAaTSbgGX0gJCW/S2matiUXgAClxSQoU+55R+1qnLaopEqDVcZHM9/CFppzAd3m5x5mgKtMlmamlFLFNE8Tzs59TGLIl7LCaVPWvpJXmONJo5tzFgDZyDQ4JwF3cgZrLE1do2iIKhOULtMjAV6kIXmkdX+4Rr+3xn1Irc/zFP7DdVqV83YGJCt9yjPL83mwsn7obKWGTLnkv4o+LaaERhzTMZYcVTHWk++JKY4AMlo9avJjSEQvmaVK5eLeKddH9MSWfn/EVZW4LjuZAjsnIEVTNdKwF4fsXJguupy/x0ZwjihTxQgoorVMeBORlMUPQBkJbp/3ZI0mBGGDxTiv65z0bDqXenauzX7F69emaUsBKSBMxBpN2xgWC4erNGkCmPQAACAASURBVOM0sT8e6HZHVlWFygaSoHMyXY6PFtkqn6YIRmtUKmgExSY9Scc7F/0qJyzipJONRWlDAMaQxPEnJI7fvCb1B/rDgWxavHZke2A/erYHMQLZ7o9MUyDHsonNhU6Ssap0+0mmL0pJUCMiPpVg0gQ6ygOhTbFZrslkkk+MKTKmRMyaiC7WswmrDdZA5TTWZpoq0hTEcbFcYysrrpMmE8PIbt8x9j3jMDIOnu7Y0Q0D3dQzel/Q2FqaYFNy2FKRsxbb9mzOCxdZqKxxj5v0CY3OolsqPxNTxCeZCpAiVikWrmIyhuS9BCiOe6ZjYukyV+sNxmTG45F38QW7fs8UBqpsiUGE5ylECWMMJVgb4QiTpWHT2p3GCCqJI+YsMpb+RzRDxpQid264ZKxV0LJUFi6hhvgQCEmKDEUmEESHkqS4Ep65OHrGLKhkzpJflFIWuoTyxGk8IV/OOrIf6fZbusMBZgSyoOKySGaZLqaEi6rEuyREWSyqZ6EtzIunfO3LYlA+GHOGy+xglyloGogLlMD9xDKltMaBMuQISlu0XbDeXEBjiBpMjmTtGJFJzuL6U7748nf5jd/8HZ5++jG56wgP91RmRRU82e+ocsfFwrIyVtDu45H9cUvUib7rGI49OSmqpuWTjz7n3W7PSmtiSPzbP/03vHn9lr4TO27hwVdwRlXJebYolllMCBNmtSZox0+/+prf/8N/zdff/BI/9HTR8+5hy+XmEj/JIqxUEt45IsQ2JfA6QIlrSBS05bSGzROCecJ5KviRa0dp6Gea6glRO6Ft5Xqk4iaYkyxw8+RLJZmgG8P/Q92bNUmWJNl5n2138S0icqmq7sI0SA4EFEIEpOCV/4ZP/EX8PyQeKQJyKBwOgB50Vy/VtWTGHr7cxRY+qNp1j+js5mvTRWpJTw8P93vN1FSPnnMU44jTTECTISfIZLHI4FIrY06MkyBpnFd00KsJiceomF14/aJzEcdAi3WBpSsLiw6rFmWl6taodEpJAJyTa+/U5lmsr6HOCwOhSDsjg7ptPlN/MvX61SuqBkAUMFm1uFJU5iygikeSQ2vs4v4qcUkL3VJwCB2n5LzQw85duPTqZ2qn7S2981IfV6meSxFHWRKYOgx6QZdrYY4gVs74hWpcCzIxAzJkhNqPOr8pBrN8JnS2UiWlnIYJ5w1tLxqw1UoQ5OeXFw5Pe/KU2W62hKZnGkeGSSiN1lus94tjX9MEUszEacZ5R9v0fPvNN2ST2W429KsVx1SY0kAxlmBkKLOzgRwHTJEB100IlCzgj3gEZNUUyTwpMTuKWCTBdrpGUfCglKwxK1KKFTlnzpiUMV66xbKuzjQ/awof3r9jniZub295enzE6ZBca724WMa8nEdxjjqT1WFch/GG8XRkfzrh2oCbPbubK4Jz7O8eOLw8E5PMjbLGUFJkngbWqxXp+prHxwd++PFHVm0gj5HpeCLExGkasW2Db3tiyhyGE4fhBYrh48ev+Pz5E3f3Dxjj2V1dczwcufv5M9vNVka0pEzXOnbX77huPL5rmY8jj48vHGPk8VlntLk1T/uBx1QYc8L6hm+++YZ+s+M4nLi/e+Cw34vOukkqzfC0TUcuk5wBCHVMJA8zmaSmHYnDfuLh6Ymbdw8chj374yPOOfpmQ3A9o58pfpb5a97Tdj3NNIGBppGZXdZKZ0iAdUhkiff7PdY4np+fmaYBa+H6asd2vWa/f+F0PDCeTmw3QnkbBnFCjTnivFDJ8jzgLNy8+5brq1/x+PDA8fSMdRFjVLueJ0wbwAayb/ANtFbcppvQ0rYt1nqmuUiH2Hoqs2a13WJTANTQIhftwiMyHF2zzkgyXgsFK21GGY9htegynvVqR9s1S9lR84uc0uLNELxdjLfaxgvIGuV8zlnlEQWMDVgcpjhsPnetg29oupZ21WHcimQcU56kmEp6/uTXhcECdGkhuozbUkBMLsi5QCtF2Uz2ktJXiLXgu+gyauDEBzHxaVdiSFdy4ngY2O+PapZmmVNUzZ3kIA6pkkyxyHzcej4WGSFwQXWvwKawFqAUs3xu8VEoTHFknmaMzkw7OO14GXXotA6rjAHXNnjfYEyRQedWwGAZaRDwPhCj5AHWSkdfLf4oRQFT68jFE/UcjLEoZV3AV4qVM84KPdY6MMbx1x5/E0VbKWCKxdmCN5l159nuOrpOKubjOPD0ciTPmV0IlCxIpLeSpExJ9ArBW7yXlvA0TMyp6CGSiHOUQKSJc8pp8WYQNp0htC3rzZrQtORSOA0Dh6PMcYpREvPx9MgwThTjyBhBa1IiLotGigWZnyF/FjcusHiKETG6dEaFDwvgXIPxiWgjxQZShvk0UgsK57x2qYoUdNYQPKxaL9THxrHb9mxXLd7VjlBifzrxeLfneBoZRkEnpzkyTjJVfo6RFGexii2WNIsbmMwyg2mehQqjxS4UmQdzvnvauZE/mYvNLS1u4dmbIoe5R2ZTUQqN89xsNqzbQJwSL/2Bp/hEUyy7fkvjPMMwcJgeRPs3yUye50rDyxLkrM5eMYB3Ov/FSII0D3VStHw4g1ismiwBrtqtYi3FVGt1LWQWQxM0STBqH53FwchGxlE6pzK7yF68FtGA5SjJMyyCU0GiIpR5QbImZnz0ZEbtXgoai5MEyzpDsUgHphS8EWMAS8bkiJhySsFWdXnZVjc6cE60XTIoV/jgWOXaW0kCA0Kvs84K1YQ626uh9R15BqLh23/xL2k37/nH775nnl74x//7n8h07L76JS+rga/+7r+mv7khBSsUq8efmdzAbDcEF+l9ZLv2uBR5un8i5sgcE4fjnuNwYE4zBSfdgeOJ7x8fyRn+7f/w7ziNM8+HE4/Pe+2uKkLnZd+lmEG7OAvCnid861ht11x/+MDPt7d89/vvGE7POFuI88ztwx2b3Y6X454yD9g8LQE/5XTRva0dr6LI6WtDDaMBrdYd5+Q9qVvj+X2WR0Usje4nUx1PQWYdSXknCa1lgSfmgk3iSNe2nRZc2v3xXgo+a7QIE2aCaHs8pjilkrOgh9WUxiodp9IKZS/kpWtdi8eCgB7ZGLyrYId+53ymIWdEz4kCH94a1d0ZmedI3YJScE9qxiJ7Q2JNNUdwTuZMSudEBrbbrMCdQVHWiy5hrvfDks35qtf7EkLzClW+/J6vCrPyesRBRaOXQhO4pF0Dr6hEwgrQkSOqxauzD40W5fU8srauEQRJ1lUX5/OICICYZG5RIrJarWkaSY636y2P0zPHw0ATOtFSWJURDLMAeU0DRorWksVtziBD5UtKbNdrsGJoM88JH1qgUj0NpVi88ThkvmWaI7mR+WDCJjCAJGrOi9jfmITl3Mk3SEIoGkFBzHOO4libpW/vEMqmtB4kzucoIKe4SwvQud1uiPPM4XDg/g52uyu8L7Rtr9Qk7QBbS8JQbKPrBbId2R8PrDcbTvMAGPYve54eHykpLS7WxjpMKYzDQDGGpuvo12sZek1k23S0bgWpsB89pgnQNUzJ4TrRA+aYubq+YhwG7nmQglEBvvtxxJZE1/U83N8zzlr8tIGbm2u8N8zzRB8CubUc58RkHM52DClSkqcLDWPKPH/+xP39PTFFNuuNOnuKa6Tr1yRlyuwPJ0oWl9JhGGAaZRSRd7RtT87wp59+5DidyDEylxNX19es+rV2MjOETNM3CFMFVqs1JSNa4FgYhhHrHI0LhNYTjAEzMAwDqURSnolphARPj3fM00mLQMtq3eIceAttpYnawpQj3gBlxmZY9Vvef/OvIXym2X/GmxOH/S1PhyesD2zf3bDevsP4lr6ztEHieNV+k+FUZEZa0b3orYWmYRhOnA4HzEFykM280hgjPgnGOlwbwEXWk7i6CpDkGecohV2ShL0JK5HsgFCRcxR6o5F5iI13eGewtkARV1WTz/mizNkVJ0xxnyykKSGKdId3lq7r6fqWtu/ATSQ8RJFhFCNnihR+8dwVr0GrCMUTd/nEmWkBVkdweMoS+mrhBkkbKKYyA4whlkzTd3TrFe2qo+k6yS9TBtuCDQxq5mGUKg2QKqXQFLAqXUG1+EYLRiuFr1HjmFJZaNr5q+Donzl4Z+m+TelyTuf4SoNbzyNrAZeUWeV1dFWgbTukDyvMKWe95qIyF844Lx36Ip1Tbx05Fp0hPOs1rVRhgXON4VUH9EuPv4mizSAbxDsxb+j7jlZnowzDwP5wZI5RaGipEKdIjhG6oHVtJhu5GPePe+5u7zk8H3QQaB3Eqk412j1ZigxqsgAhBPrVWhKWAlOU4ZTTPFNll2mW4dfoJHcpF41aqRvVRClKDkgVLjORgtpxo7NhjKkW23LTswPnitoc60FGwdoiSbl1iF2xY7OWEQSrvqMNjl7dveI08fz8xNPzM3dPjxyGQWyjiyTzMRfmmIha0GZQnYRsNmu1lVt1MZqg1GKlFF7NQoJzsvNnDy3yxDkpLy1wcOQSpd3tHL1bMZSRcbqTAjsnHh6fuH94YNKCUd5H70Gx5CTJrbVqmkHGGojZUHkC1mZBVJUaVl0tMUYQKs4IkgxXnBd0aemg6OYtSTRe0FCQok8cFkUzaawGiZw1MBp8CyUFsP6CwqqdVdCEWEY0SFu+IWcDxslsnSLIjanaRq9Oktbjmq10Rb2nCZ7gndIOvM4AbAQxcoXWF1rvxYIboR97L5ovFwKuCbimwVmHN46mDXRdg/eW4J0cwNazf9yT5szf/zf/itX1Ox73PfM84cPX/Nv//n/km69/Zr+74xcfbzDxxOnlJ/JaOPfT4cCzaeluVqzWPaaM6sA0Mqeomr+Jpu/ZtNc0Tcvd/SP/+P/8Rz7d3tP3a7p+yzRH9i8HqlOdtWrqoAegHI6RGKvrIOQ044NZOk+//e3v+PnzZ9E0qTnM/nTi7uGBhFidX873qoloXRu1aHtLb6z//1an9VYjUJ97SwE8P3/WkC39KX3f+rxYs0fpGlc9ilr3Y3WWmrlYv6UIjRLdO2ocJPEHqsBKClEtTusvR0xz6gws54KASAjxZKEJ1rfRmFo7c1XTZdRZUMBQQWrV8QJnNBZdxJOqC6vfuXa4LodplyLxDGcW44NS0WIQHUtRDYrVTuLFPan6s0qdvDzcv0QJretimUdXihYl53t8Oauu/q5KB3y7Li5F/5cav9daEZaO3+XDapdvnEalJzaE4LHW413g4UE0jylJIeBsYJpl32Fkfp/3nnmaSSbS9S0maRLkLRkBoqo+xtZZgnPW2W/pYu9JsmKU+lWy6DPrCs6lYHJ5853ke8n9FIA2F137URNfK2erLLYsRlNKwZSEVoCMtun58CEAtzw+PjNNie32CmGta+y1DhOMrpnqYimslru7O9abDc5Y7m5vmceR3XrD0R6kmy8BGwMCAJdM13VCXWsCue9IoaUphsY3NLkwpEg0BZMSbZGiPaw9Nzfv+PnHn2h8IDnVVnpNenMkZ5k7OifDMI7s9888Pt4TgiMEz2azAaoMQa6n0MMTjBOHH39czHLaphWd9ygJ6Wq9JlrLPI86g7VhUvCz7ToBIEm0TUNOE9M4EqeJl5cX1TrKup6mmaiayLrXoibMwbfLPjfGcjxK/ma9gEFt2y2xwChlsNK4h2EAYLVaSYFWCnGawRms8aw3aw6HPafTLCCJs2Ayj0+3uNvfsd2sCa5ju17zeB85Hh/IJtN42GwarJdZbKVEpfaiOZuhZHEOrJ0k5zwpW+Z55jQMtErrFYquoxSh7cWcxT01pQWEqkCMVaZE3b8ylsQpaFs0LmYFz850a2tlRIVGVcxFMK5dLYnriZKETl8ZXUbj9Wq1omlhzgMhyviAksToI+V5AYAv6eqXzIIzNby+RvZzjQd1/9bvWyjKcslnlkCRvDp4T0mZ4+EIxrC9eSdn75xJocUWK/OT51nZcHIOOmcgXfgwG6VeIm6b5+BApSXIelzijORa6eKMXeL0RSyqdNPLs6aaSRUy2cwac2QdWutomk7yZCtNFe8CTt1rrfciQTDiZ5AKorkrhmkaiUkbIehHvojzX8ylLx5/E0UbgCmZvm1Yr1pWqx7vA1Oc2Z9ODONIKmKRPc9JZ2OJIyIOnDWcDjMPjwc+/fTA48OLcmDNsnmMUUcfkMP7omjzSGCdciIXcauao1qSFhF+5ywdFdJEzgkbtNVfpOiyKJ1D28lOizrRHJyTgZySiBt9wPqgnQz5Z3IDuIxb2r5F5qXpou/7ntXK07Vi4+ytV3TxyO1hYBwmjvsjw2lgHEeimSlWNkBBOL+zzkKRgKUHbBFaofPCU7/Uj8zz/GccZ+veoBZf2BBv/74alpRSFuOD+h1f9i88PTwv5gUppSV4AwtFrQYtOcw1IdDOFaqtU8KXFnhCgSlLMBQRq7T03aL9qTbnAbN89xpwLzV6NWEMViathCDUU+estPI1wXG+/qwD11FsuxSHekJpMi3FWtO0ojUJPcY6mqbF+0a6/Fbs9Z2XYO+9pThPsZ2s73JO9I31GB8wLpBdg/OB1htar3TdLPTcqikpxQh322pBYxzONdIJMUBJkIXym2NiKs/cXF/z7ttfkcyK5Dzebbh+P+Ntz271B/7D6n/jV7/4CqZIE2Dj96ycxSVLYzZ03mJIlJLo2gbfWMpQaPuG64/v2V1/RSqGX//61/zTf/o1f/zTnzgeR56eDvz7//Xfk1Nh2J/k/jmzOFOJCY4MW5YOSCZTFvpsSJb9y57v//QDv/ntb4Xm0LWCUqoi/DSNSrs5G2pcBvJzsBJcoM7oW57+QlL+VqP1l1637C0tEmsS/9Zx8dLsYp4jTdNgnFohV5RQB5kCS5FSP7ip3TzkoBeQ4OwWWBMCSR7jkrSXUpZC2DlNJaxQ0OvvuixmsymqLdbPjYi0JZFTOhxSzMQoAE7dF8YYQgiklBbHSK+215cHWqkhwBk1BzJLt6sUAVnEBVdoLdjX9+Ry1lopZYl19f6/pUZezoyrD3tx3V9/ttcFfX1NTYwui7DLNVILx7dr7m3RLndR7qO1klju93u6rsNa6FcdKSUeH594fHzk+hq2m41c1xzFiAMrOsjghP6DI+ZZjKPszJwnve/Sna+IdU2+5fOcAb0YI1m1z1XLc3ZAEITcGnBOrPOdC9SYLueRaMu8VyqvTYvbaVGxzkXqWr+9xEGNp+/ff8T7jtvbW8bxlq7tWG82y9w1q7QqHxzGSgJ0ba/4/HDH8/OjrLlpZrNe44pZ5nY1TVjuQdM2iw4sRykio3WUtsMYR0zIYOemIwBdK1RgUqZtW3LMDMOo1vBB113COcPpdGAYDjKAV4f6ypk4IzNWPbWzEWeJdzKGpXA6nRi1OOv7fjm36lmaUmKKEdf1NI3HGSvrZBaa+TxNpFKTVSnCckqSSJdWugyrFa1zNNZji4UshmOpGIahjg+Q7mpWCpj1Dt94jWuR4/Eoia534noLtG3LbrdjtZJCuGkapmkU+YrX74xht90Q54mHexmUXQoY73A+8+Fjy9dfvedPf3jhcHwixonQBMZpZH94wjSWtlsRQiPUEqWsCeAkXbNcrDChrBcAxLWkuMYamem73W7o2lb3q6M1hoKVYc7MS5ySfX7Wz9ZcQhL0DMja9tZhnNCHs7JDYhRTFGMcORsq22KJeUgMnWeZneuDU1Bc2GNVU4apZ4uYwXnvpds7JTBBCqAidPWoQOUCwNRzTwwOBIwpFWjRaF8qLi8AtjEQSZRcD53K3DDsn1+IWTrdcZxYh47dbkewjpIi5ETftozKShBQXkAjrOrLigKzWrQ55y5AOAX7ihZCX4jDlyCpQXRpb8HXLxdNtSiUnD+WGUiLo2Q1LDPGYIvEMYzDBo9xwsgrRcAM76R4nWMUkNI4SQlrNC8FzP9PNG2tt2zXG3a7NU3nySUzHE+8HI4qBLakbPGhIfiG65sb3n2zYyoDz6cjP376zB+//4mXp4mSA8Z5RRMA5ZdWepKmLMvNgyw0GgplEiFhVBQmpiqolOKslLPRSbpAaJ22uIM1OiPJ4Xy3aIVyESMDkAG3wZ6d16w9J9/ey2yQrmtZr3rWmxXr9VrQXAqmRKbxwPPDLcfjwPEow62P+4EYi3J+pdtQXKSYuCxCq4urZCgpS/IknLtFd1WTkoq8XKIvi4j/IhH8YlKrj1ocFYwWbVHpBIiGjMLheOTu/p7hNCqYmkm50PW9XO/MMj/GB7/wjjFC4RBdjaDC566XFmbOg4rbpUvplAftF95y07YigMfgrdjk1kDknBNbWX2/qlcT9nIV86pBiEXoitqpMKYIUldaoScYNXGkClwNxgbRmFhHaAK5mEW3k9UqF12fUNvqCcyMZVAxq1WnREcxjmIb1T16ofmuV4TdGh8ackzEXEjJKuVC7avEvkda+jLFE1eMiLazZxpGTmOEsMPuvuIxigC4MYY4FcbkCe2Grz58zarr+PtffUNIEfKJtin0PnO96fj2Fzf0rceZxHa3xVrD88szbdtw/e4dYX3N7f2R//if/pl/+Id/4PPPn8kFxjES48TD3TPeC8I1jULzvezMVDTwrFcSd9acIrkEppT4w/d/5Ieff8I4KY5P8yB7MyWGaaIzzXIwlcKCLl4+ykWhvKz1L3TNLp9/m3C/7aK8/fPbLs/b9885M8aZ0InbmQzQDvJ7dE0tP2eNUvHkAHdeYuFyQJmKoF5+xnTxec66T4NSayyYonbJVRRv6nxAvR/6nrL2MzlldWhFu05G6NdxBmPwIZwv8MXjrabsjADr36smLNVuVS0izVnvVkX8mHPh9/q9ziDNZaF12X17e7jX16PIvEE0bDmdi4vFmMSchfwVmHqbIFx20lKKS0Es8Sgrlf782aq2ptJXT0cxz5E4n+m7DnNteHh85Pn5iZIS19c3tE2rSV5mnmXobHBGXI5t1qHXdRqlFqXlvDa8d4RGgCRJDDnP07NGdXxFwQ3V+Bgr9vVR6PaXRd0rumnJCy39cv1q03gBySSZLTJ7bM7a3ZwxBm5u3rFardm/vPDw8Mjh8Ek7PC1tE3BB9sY4jhxOR/nv8UjbNHz86qMM/o6RULtBxuC8X+bLdW1HJhPnmbZtWNtCQszSUjEEE3BeQAe5igEyBMRp+PbTHWmW7qcxhmEYiHFS6hc69iNjLIsbIEUoYN471uue9XrL48Mzz897QgjEKFQ6GX0E4zQQQmC9WjPNMlfsdDoxDCdMSXTdDSlGpmGAUsTYZRho23YBJ4w5O3xKYSNMFe8DReeuRtRFu3hmI6N/UipYVzvsiXXfcH29k0IuZwyioRxUdmKtZbfb8e7dO9o2LPc9eC+z50KjBWSBYPjqq4+s1z37/TMxDjhv8G1LyXtgS9dZ5lOha1es+i3jEDkdj4Q2EIKluEKOYsbGZQfLOvJcKKYsLIEQAu/ev8Oadyoh0Xth6nW5AL6d5+w4XHXIRV0faxNIiyCblhhoinbMEJqcdK2hzsyrOuHLTpvsGSmunWsEONauoWCQ2h03opdPRjRhRn7gPF6gZIwvOAVnpmnC6NikYkQPLkyCcnEmVnrlOUYb48hWPAxyNuJGq7nvNE0yJssI8LWfH/lhmNlfX9E0LXmeGI7VxdQvwF0ukGPCLBTxer/qlTi7U6KxtjZpLh+XZ/BlHL/8+7cOya9BN8mP6qiWSmusBXE1g5N9GhFXToOZreRliMzO2aOwnYyVWX0mUKrWW6n0cpFff/63j7+Jos0YI9b+Gx2QacR85HA4cjqNpFyTEKPVqeHd+3f86r/6JS/DC4c//J794cTxNIBpycVTYhHqTclyh/UwWC764ranlupJXKyKigljymq1LotFiqGEt2ZBHJ1tcE1LCC3VedBUUWnMTNMgugNFF02VmBhBfQWlFbQlOI9fGfyV5V/83besVh1t2zCps+JhP/Hy8szpcGA6nRiGUSgeSfQ21noJbLoek6i4Qf+dUwJ156wuTiA6KDBLgVbRGzi3ai81HaXIdX27Ab7USZCFrh0wnVWWixgDgOjlMIZvvvkGZz0vv3zh9O7Et//t39G2HfMUxdnHB3IpBB8WJAsjRZlo/WTktNFiWS6y03l2ipybWszZRZdhrcUFL+YwuRCMdDyTDoGttt+irxGhKkW0bkVpFCTFk4wY6MQ4cxqP5JykC0IHVty+sqIyxTgJSLV1rsls07aqJRTNZRP8YuZQlsNCNHytiRIYXcCGFlxgSmBciw8dfb9h06/p1y3OWeZhYL9/IUY9jI0Ury6EhbJpQ4N1jmkQ2kHXrXA2MEZLMi0fv/ma/uaKghSwPohtdAiWxnd0/XvatuEXH6/x80iJhq4tvNut2K5XvLvaMU0neV/veX565jRE+vWGw2nmp+//C//H//Wf+e53f+D5eS+JbpHgnVKhaWSfTZPMB6yootF1WOffUbtUpdJcZE2knPj8+ZaX/X4ZUxHahpxkL80x4qxRqgQXgfmi6CosqNrbGHa5b/7S3/1/PRYd2UXhUN/znCgYqq7Sh0ZogdYuTpM12b/sGBsX9PvYhV6on5baTUPpLdK9qU6LZkngnHNQjHbcykVioIm6fu3FPbbSPkylJet3VPqMmKpYbDkP/445EZxfioDlee24XR6ulaJDEWOYlDMu+EUz66y4EKMa5ksTklp01fetz18WEPW619hXXStrUmutJcVILGfKpLevC/mS6xpVnYfJr75DveeX1EtJitLitFvvkyRvdX0VNLOjmrVYaxmOR9q2F41jKfR9Syk77u8feHl5xlvHdrejaRoZDjvMJF9odLZU27YcDwd5z+W7mKUwlPgvNNnaHZtnw36/Bwpd3xFzXVOFmAyz6r7dgoEL8pyL6OlqnLXWqVmJzGs6J2jy/4sWbvkHNTgQ2YRofGamccI7z3a7o2laSRqPR1KKHE4T5ZiXgjsEmS26u9ry8rLn5Snw8euv2T+/MJ4mAQLKG0dUTWa7rpUiKrekErEpQxQDI0qk1PtsEjYENquN9o+BbQAAIABJREFUFGm3J7YbMQkrQNs2jKOMITDaaT8NgxQRKuXISoee54lhONG2HT5I3B6GE9ahWidJGJOOT3J+y2rV07YtmIKLkWgMp+ORxnvmaWQcB6RzN9P3PRTDnGaJI9pRSTGRDfS+0Vpb1ukwDWRbsOWsmbWVpm0QuhxacAaLGFoJ+OOcg8wXack5Z+luxqgjfBA6XZZC6vr6mu1uQykRY3X/x4HH+zsssF5t8NbTdWtOp4n98Zm4ijhjWHc9tg+L6RgZSi6MqvGv/t8xJRoPbdNiLeR5FjnvEo/tEttcMaRSiyhHTnKOWDmwSUlpc7KIATF5K2raU1CfhVxBcnUeNE7ZF/XMqeDaGfxJaZa9I3WFvl6AjlqXpii0WzEBkmJCArQYUEk+5WQUzEVcyjlBmpFh6mcWgsiOzrmfsU6zc82ZjDYiSqFMswBbWWbdWWs4YbRgNxwPR1JKylwKi5GTUUQoF+m01/3u9IulHOU1F+vGVjadPPHn5/LFUfyKGSFfhJzSOUrVYk6qcg1HlwBeBTWq/rkAcuYYJNnPRbTHFjG5mbXbaKyneAfFLWDeOTr+9cffSNEG23WPdbLgx3nm6emZ0zBSirpMWYsthlhE8zRME3/8/gf++Xf/zHe//z0//XSngxDRpLIIsmArwpEXe+FapaMbJGUDRYqZak8KEpQygsA45zDB0/ms+qFAwVGonRHppFVHSlnPni54TR4T3ghKaJ3BOzGD6Fc961VP3/c8fnzg0B7w3rHf7/n0ec/xeGC/f2EchXpQ5oLJsuHKGZsjlllQ7+XfFbVDs08tlYz8nFXTDH3Bua1sXiMVr1CMikZr0nL53GURtySa2iavLnOpnBHiQiY0DevNRosbi9l5mquOm3cfloRQmk12SaAXHr8Ra9xplpksWEuJRcxEAGwRwWwaRJeFFInTNOO8p+v7ZWN5H6TVbx3ROAkwFOk8YRQIOc/+yVhiFm2DMWCtXHVrE8fjgU+ff6KUzId371n1YhhjisMWyEQyhilp4ei8OKkZg00jWYeiB2cxZRa72MXWX9alaByl01iTbDC03tGvVlzfvGO93oG1TPORw9PPnIYRvSxM84wLgWwtQ6yDgp0ga77BmIBvVsw5M9uWfnfNLz68xzbyM6GBYCKrtrDrAm25oi0d+fBEExzeZm6u1tzsrtmsAyvvWPViB92vNzhreHx6YkoO3IrffPcT//zb7/jx8z2PLwODDuo1SLJQEHrbnCXwxTkqlUUssKvLYc5CpYk5nilsmjC3oaNpOv7w/Y8c9iemcVaHOXFitEaSS77Q+Xi1J4xaa+fXXZLLvfKXirRzsnneKwuKffH8pabt8medjopwztH3Pf16Q2jb5aCVOuh8qJy1CWA4U38l0U/L4V8LWyvZw/Lzxlx+d+ksS5fj3AV05txNqr/zrZ5LRnvYJfGwOgh8oZs1jcSlomQ3c+7q1BhzNkYpr+jKtcsmDqrn61+du6yi3UY714u7K2dtbn3Uz3NJcboErZqmWX6mXsusRcdZr3KezVZfu1Aic3p1vy9plpfaunPBfL6Gl9q+SxANspp+aLGHdJCMMXRdB0AIluvrLaf9ibu7W+Y4s7m6kgHIzsvssZjE6t452r5lihMxzwuCbHSB5SRDk51zbLdbSsmsVh03Nzccj0fuH37gSjtd1loaKxb9JWdxODWSjNdEvWpN6nK3xhJCg/FCvQOYxoGmCWI05pzOp1LwxMpzxeo4l87hg1BufZD/X5WOzXZNShHvlQ2h66cWF6UUnDUcXp7ZrFfsNhte8lFAwaVQhq5rOHc7oZREQ8HnSOs8MSdiySQLo1U9fUmsVx39tuPu0y3eaxe8CKbpvcW5FSvbY62sC994KJ44JVKOjOOJ0+kghdIwMAw/kzUJrqBGTdaXrmXOfPr0aaEahxAwzlHmkTiPdI3HB0eI6qSckjhNGmG3WERPJtQ4GVVjnRO785Tp2pb98cCYZppGOsIxZzxi8GW9p5QkNMjgFYD15GiZiAxD3T+WaZqIMdI0/qyZR+bSGuyiP6r0ce8dqZrWmCJjiWJiGma8bbTbHGiC5frqA3NMHA8jx+eBbX+FDfJ+3gqNc1ZmlXNi9Z9ylFwwV8BC6GtSaMqeCMGrE2Dt1NTYYpb4iYK+AtrLKIVF2WGMXqPzWlzAYaRLbb1dQBmjGn7RcldwrVK8IyL00dmuSWfpTjOpjCTUdMQUsbDPZTEsqxvQGKGlv2Z4QCHh2k4+R6keBzK79Rzzy9Kdq86TxhpImaZtSDFy2h9kPWEYTieRNmmsLKUQk6fruiVuGXUksWqmJUWXni1GiniHozoAezV50r9e/l1j+1v2CuZ8vi/PVeZE3eVLoXcupyQe1LNQBTlOncpVG355Jjq9Z6Wo1s4aHU0h8ZCL+yBnLn/18TdRtGEMvhEkdo4z++OB4yCzl0rR5mSpiLHl9vGB//0//J8M6cinh8/s90disTjbqqtgBBI5D3KplV4hPWqQyq6ohb0MjUw4YpYCrZpVhEZoZz54sfh0Doe4FKYkATNpB6nkJFq2ighSyCYTPDoToqPrxF5WApijbTq6riH4wDRPjOPA0+MD8+9OHI9HhnEgVfFvRV2T08NMkxrpA+pf5oVaIV9XqIOm1O5gLenLguZXK1Ql+S2Pv0R5rBTKSyS6ar3kVtbgKrbjac70/Yr3H97TtA3r9VoLadXGVQ1PEp1ft7JsdleC5JTC6TSQFpTdMSftdigCErMI44tJYOzizGm9OE3ZOBNPR4xzPL/sGYaB9XZDcI4myAwXdBBsnCec9UvAMdXKVVut1dEOPAaPdVZMK1LBWKFFWefZbK5xztCt1jTOYysn3RhxVgIadfh0QYK8dQbyrImfwRodnpml+xGkOpSbYAOzbwStMRbfdKw3W3ZXV6zXG7y6bg77F+JwIA17HXUgTkYuZXI8Lp0j1zQE60kI/Rbf0fo1m02P77esrq5p+oAxYpTTOOjNwE1v2HaelduQjoX9LPrSjx9u+NXXH7m66XHMHO/ucb7FGcM0zdzfP/HwtOf+8YUfP93y+z98z+39A7kYsrWUbLUTDqXYpTiLMYk1eTqbCl0Gx1JkMGndGzU5bpXimmLk+emZ/csLMuw0UpKRmWvTLI6Zc9L8X9HfovFCeuiKrBntuJlXv6cmTOew9hr8ePvPpV7tXGCd11lNbOvfVb2p93K4FSy2dp8VjKroscwLquNEWLoZtQCryY8cMDXRu0ie7VlDtiSuAmGdiznEXEFc+cwyaqHubemmnAulQj08Obu2FaSAM2WZe1QLpcvr95bSWK9bzmolzrkLoyFO4pSR2T/ee1I568ne3p+/xBio96a+5lX30ghSTRFtTBsaNtsNu+2Ovu8lCT4ceHp64jSOMrczn79fCOdBtG9/h7OCjsc0452eP0FRaJS5ECtduup9syaREJ1hoiil3EBw0DekOLN/ljlim+1OOlhWDKycEYtx54IYkQxF0HnrMEX2nbxezrRvv/0lHz++p+9bNpuN2OJ76bxK4i06w3MnJV9c40zJlXJf76nsM2s0aZUFrWdfJqeINTKDqijqHbUrmav2MXiMFZBU6K3nc29hLHBhNCC/CWMsH9695w7D/e0t7ivPer3m4fGJWV9ntTiS86hgkPENxAk7jXQrJ/Nf55GMo2lWzKnQr1dcv9sxnQZe9s/i86cFH9ngvFvyS+ssPhRcCMxjwppEKZIvNE0gJaGwTVPU6+qZp4gwSs5d2BACbatxL4n+Sw5GIT5bIyNM1que1gdyTByOJ1JKNG3LPEeG/Qky2CAgbdM1jPNEYwxljszTJJ2JnEhKz4QshXgWXTGm4H2zsCRiLqz7nv3LHSlnQghnQLoUmQ94ARSI2+3FPtU9UjuxVrWsvjjGcWCSIZo4G/S+Gq52V4zTyMPjHU8PL7RNx+5qS9c2CiSVZU3mi26R81blD+eYVwvjGi9CaJiU4rnpt2y3u+XvpQC/APYuigaJHTXWOrwVlkEu4p2QcsSmiHFn8zeZ+WfOlEDtPgnN0pKixAzp4FVbJ8hJTJhq7itmMwJwv36cv+dFoxvnhQot+YJ9fSZcgC4xRtFCm7h8z6x5T9M05FZGNKU5Siy+0IU7L9IV3yiwoM0XTVG0E1aBGqP5e5X9aDdXY1MF+C9j+SVAtsTwpQg757JLd+3iOYwUaPW61PX55zlyWYAzg5w7ld2RaiG4HHn5gn1h1LyOi6bSX378bRRtQDFZnHjmSQaEHo7Mc6IkQ+V3RyKPpwPPJ0u5LUQSwgQWnmhMgsQbLKVESg22VWe0JATIAihCc8g6jwojmiIf1ImrbRZ3oJQkMBnhu5Ar7UXnfTjvxGqegjeCBrddR/CB0ASatqHrOtoavNLM6bTn6f4kowVORx53T0xfjRxvRZx8XhOybEtmOQjPFZbYL5cUqZ7yBqFwyHVQiaMuGlkXcj2KgVKn0BuW1vKXHksyRA1if764X3cLJLl49/49/92/+TdsNhtxoCpJhdwVoRBjDO86ysZxao70qx3jMDAMIzlDTLUbhNAJrKdQtROqLVKUilKWQ4OccNYRi4hGgwt0V710CmNmVhdPjKA03lookpiYDNlaUhLqiHVWaKzq0GaKJU3VUahARotxsYIVxN2S40SOicZb2qYTJKogSJUm5NVJNGt32BahEHgHxjt8EJeinKRgyLbhRMC3PdvdFVfX1+y2OwyZ4bTn+HjHeDqQ54mQJ1rVJ+WkBUGBRqmeznkaL91Fkw3GBXy/or/asv3wjnazA2dxZqbzhm3n2LSWm1XHqploXGYdLIQVG67ou4a//9f/il3fUcqRNA64tiMWGI4jD49P/PFPP/HDT5/4048/c/f4zDxnprk6oEkcqDqpnFnE8RXEyfWwyKjl/3mdVpMiowYRXdvSN562aTm+HHh+fKJrGnzTcTjIbELvHMlE7WReisbPdERAHK6MdP7r2n0Vw0pZujRfKuj+LOHXx1nbajnrEczSuanaqksakbGWbFQ7W+mORbth1mpSVk0txDCpZDCuBo7qDOg402mEvkstTJF7MM9Rk2u51nVGkjeiXcighkv+osOkXB09K62pXb2i4zbkyMw5KWCkDIFSiBfJ/WWn//KeLEBRTVQVxRRarKwfi1FkU9YR9lz81ev5pQP4EpS6vFfVTexM5SpLJzfnxDCcSClyPBzwaupUk+iubcEkBfvi8t1ev19NmBJJx4WUUkhoMzEnoWB6T9s0hLUkpjHOy2eeZ5mZlVNiZsIYHUwePI0Tu/Wnw4HhdKQAm+1WGB9dyzSeeHy6J3hPt9rQdT0pqTYrRV1nLAZVztllzMDLywslF3bbLamIu+A8z0tH0mpszSXjg9ihCxYpWsm6buo+0owd68RiXI5sMR/zNlCMEb15yhcJqnQcjCk0Te0+VwAAcrIC9sASt5cus7GE0PH11x/5/PmWH3/4gdX2hmmOSzd3kQtYI0BDTpAlZ8mjzhNrG2wRXWUxhq7r+eabX3J43vPph59FhmEL03RChtxbnPFK6UcBQJmbttn0WuQLBW6eR2KKxHnmdJqYJmGOGMtCy5VraAjBs9V7ezweeX5+lm6cdyQESEkp0riA7xzX2yt+/vSZ4zDIGAbdSw4n7onHE23fMQ4n9vf3NNbRhpa2aShzUaBUaGwpzxfFVnUXFcpl1/ccjkceHh4kPvet0lRlHdU145zMoRTNfd1tmsMYRP5iRMNXonTE0pQ5nQa6LrHZbKTIMYXNpmeatwzDgWE4cffplhQnbm6u6fte4pLRTpcCddZZKBZj0xJjShYXQMsZMMol0XcrXNMwp4mX5xe8a6l6fIG2qj5U4q7sc6uAJMICoHZZNNcriaplr0yuuo+Emn3uEFcgDI0duerjtPuVktA9jU1KTS8U3OtOG/W9su60snxn56EkvwCVuYi+vn6P6hchujoPSlVfJAopQdvRhgZnLMf9gTxrnCbjvKPpAl3fSodZ74GxTu4/6j1RigKHAkAH38jvSOL4XIBlNNYbN8i6f0s9Vy5y6KWALq8Lvfp60SS+zgsu89/aVS1FcoRcNBXXXBkMzuj5ZFh0u3JuiVFgXQBFQci/9vgbKdoKqUTGmBmmyDBO2r0yy+FewcTiYSqJki0YseZPuR77MrCZUoct14q8JnYXw07zuduk41flAhvpmhSTmeK0HLIxnWeLyeu0q6Z268FbvAVnCl3jadtA0wgi3rYtpchw5afHFw6HI8fjkdPppLSARCyF4ThhkmGeq6iyutJU/nB1zKojCN7AEDhdLDVhtIpgpwWhkufVRhkjMzHyueJ/dVe+gD6f0aGzaL8mVPVnmqZhu92yXu/YrHfEGLm/v5eF7q24Yulm9d6zWq0JYc2T3xPjCykWMYNJeXFQlHkZmoRhybFIK1z53kuia8DoIV5iYUiJ4Ftczvgg7l5yyQwpC8cegw4shlRRtCI0V4Nov7CGiPC0SYkpCZ3DqztoTRSxVocXC//e+oZiE6Hr6VYrEflHcUAyCLWSItz3OUmAmJnweIxUbWRjVBuSMXhcG9jdfMX2+obd1RXOWsbxxHB45rR/JE1HGiND40uamXMhCUxFVgvtbHTURGgQxTvY0NLtPrL58JHm+ga73RCDkZESNvGuNdy0cNXBOhiCMTTO0AeP8Z5tCPSup2dNjAOFGd93+NDx8MMnfveHP/LTz3d894fvuX945jTNTHNmGCT5sMZgo5h/xIugH7PMmKouoRk1CInVJp0laBo9eJ3Sj9ebNduuw1vL50+3Ikh3gRyz2FrPQjsNzovWUguxS9v2ywBtjCQYJRecPyOul/TAv1SoXQb9t+YaC6XP6eHH607Pax2aAEZStEgHy9XEX0PCkugUEbcv+jMLkC9QRY0xaq/jrBcjozmr6YOjlHjxuytFRxMF/c7OyFye5XrUQ18/SlEqjVH6SkaKKqeGKRKXsybjyvu/uE5/0alTb7x6RqpzmsSMOgusfoZKsTpTC19f48vnL/9cixTz5udLyeQ449VNEVDq2vAqHkpnwOBDt1Aug5rG/LlT2Tm+GvvaybZeh8tCsxZF1Yp8vV5/0fHXe5kxBi14y+PTCy/PT1hr2V1fYZ2l7RuKmRlPA8Z6ep39No4TySB7owiwJDFfDG6WdW6VMoTBuYmYynIWSbKYl25D1W7JfMv86juJYYxdEqwKKughJsO1fRADiqaFLOL/OM+M00DOSZ12FbEvUqQZmxdNuWiAzp1PjBR8zlm+/vojoXni589PpJhoGzX4Ufp13/fkGCkkSnZk74nB8zxPGBy2X9E1LX69plmteH468OPvf6Czgc1mR5wHhnmSgi9atcN3CwpfEIpccVXrKJb8PjgKCaOW4yEEmibpujsu9z1nYRzc39/TNM0CUCzGYjpSaBxOJBcJRn8HkObIsZxkLfUrcsyMJUsXaxqJMTKMI2PMzGHCBxk6bp0F7/GjW4ruorlInAU0/cW3v+R0OvHb737D4+Mj6/VadXkaU4tQP1erlcZ1u4A+KlgQo5+SKEWo0aVoSZQsx/3A49MjobGM04G29QuV/Op6ReEDt5/v2B9eiGkkpchutxNDGAW/nDpIpiJgiLWF7KRLnBUQXEAqJ7PGdrsdxnt+891/oWDY7a5xTnT4ssclpzzHUKPFqJiACf6YsL6eF0JxFLmQ5DuXIA+Inuv8uDSKqXiHZsSq5V0KAcMCgFC+HHvQvyuCjqp53Hlodr2/1Xyvdj4dqpfTeLXE4iKxPocGb6zEoUEJY0oP7voO3wZsUNAwo7NFHfW8qoY8tZuxNAqswRulq2usR4syw5k2Xufl1p+tjyV+XcTry3aX1bOEL/zsudDV97IOh1VkOb8qDOu5XOpN0J81ReoJuR+q5/orj7+Rog2GaeI0TuwPA8fTpIWYaIpqh6yYTEQMCIzx4pa3OENCjCOJdB4yjPKBFwTjjAw4ZyX4q2hwOYjROU/KMS66ABsf5IYkowtTkzDvpDhrA33jabylazxdFzBWBnHe331ivz+w3x/FWneexQkJWX9zTsrtlwQrpaxDVe2CsixtdpsoJcpNV70XRah8ulrPiYqV7y3T5KGGvnPLvq6oauZwfi7nivLIFq+bs6Iv1RgGEKFogfVqzWazYb1esVlvWK3WpFgYx5Gu68iU8/yYvqVfrZQiJLQ5Gfo98/JyJGWZ3XXpkJZywlovhbQiMU5b6bVwFPrzpa23zMoIXSudmlRwjQRU6xtCE1QXVUgmKdJWagVA8EHmnhlBMWMpzJM4bq26jtA0GMS5bp4mgvNKuRW9WrEwYzC+wYWOOZ0E6fRCdUk56/cC34hZRLEG4x3GOorxFNuADYTgWa93bK9u2F5d0/Ud4zjweHfHOLyQpxPEgZZIYzMlzcRsmGkoxon+yVnR9uWiw7kdGUu/WrG5/pbVx28J2y25DZTWg880reG683zdG65s5rqB1jqs6bFeDVIKYOVejtOAMxHfOKY4cvj8xD//5rf803/+NXf3TzztTwzjzDTLvMBpElAh+EAelWajdMicz/tDerPC1SfLuhYoQ6l+mpC1CpS0bcN6tcKkxN3tLQ/3D9gi+sFxkkGefdMynCYtICyha0hFCpP63ourYoG9P7Ber+RAM1/So/21jtp5FMSXaHjymrPmpv7dkrQu72Nl4K8Vm+pSCtlWo5AiWoN6wOvhrcwL3cd6bCtC6L2MMZbkQoreEBqth+ocrUKdy1W3SM7glnlR2jUr0sFPCxKrv6dkiUz62eX+STEniVjV47J83lc62osiqj4nXQ+72PwvMxjVjVcOeekqOGsuioLXNKW3BdrSGbq4N/Uz1Osvydj5d9bQaZSqM00TBWiU1hijOCemdEZ/axL9lvJpDGRzTkws57WIkQJ3mmdiUrOpkvE+qN2701ltkuxU+/a2bfFWjDfCnPChodkfeDk8U0zi5uaapvGqLxbHN3NydH1P2zYUb8/uckYo3NM8AcJSmKZZKbGOKUb9nkIpLkUpTJpRTtNEshlrAjnX9cgrgDLnouYJes9MIV+AB5Vl4X2QPKFEgo5EmeNEnbW4gAzGYJK7WD9O95lZXpdmoaM3bUv4+JGUPbd3d8wp0vlGmRHiKIz3Qhl1ntgFToNlKIau6ehWa1JxTAl++uNP7J/3hGxoupZ5HDG+0PcSZ1IuAtgOg9LdVe+CxDivBYO12uFXbaNV2mFMEiebNjAOgxjrGBnEPgwDx+Nx6bbXtVsBldPxSCkyg3MYJkIQ2/2Xw57j4UDXdAQX8AoETdO0AIziKzASKExpJpFp+x7pXArzqJ7LmcJ2tyOnwu9/9wc+ffpE1vEHdQ8I5XOiaRpiFFrxdrXRZPacK9XOdd2/qeYUWTvA08A4zhxPD2Ai19fXGPuevuvZbXos73He8HJ45vPnz5wORxnE3nUYF0jFgGvB1T0e1dQlaww1WGMWsDnlzMPDA7f3d9w/P/L+w/vzWUDt2gv4K/kU6rBazxbN88rr2CQoak32JablfHZWdc6eo2WBOnMN7QQ71R9SR+CUBESskbEbS3fuzWNxtNQ9k8lEBUsqEyWnJIB6PuuLrepLre6vWqxhhL1WciE0LWxkNMSgzDDrLW0XpGhrvPolFErMmsfVnoTBUo2n8pLPGo0bMpe3duQvii991HOs/n/JOleyvtPF6y/PcjS3EDaFFlkKktW1KJWG5fJPYo33WjNXssyTznWUgZp4UQvy+lnyF27MxeNvomgrBY7zyMvLgdNpJCeh+MlXFnpiLX6NVfcYBC2iFEqSqevCq854f6bRCC9ZL4qxtE0jtIQLjUWcRaRpXC3iJHH2zr9CfK21NEHa4wuFspFkWpz+DCnN7I9HHp9nDocDp5MEzur2ZpzHtII4ZMTMokyj6Ko0wQKj5iWRXIx2gip3F3IWtEL3kl5DpX2iwn8tPEqpi+/stHZuz2VqC1MwUTkgCgmLdjAWZF4scksBomym5Az4Ft97rm5ueP/+PevVipKSFo6WrnW0XUvOmXGccKERl8Tg8dZDhnGeiTpDZo6R4zic0VjrKNksnSyTxd1zIRyUc2Jr9b6bUmRkQ5bBt9bJbKFGO4shhMVls2giapwFa8lJi1VdZ1VjJGyEhC0WG2TGXghVM6AccQtzjov1utxPS/AdTdhQkqFkiylWxfQXnS8KxkTIhqZZYUJHRAo2o26Q680V67Vw8W2aeXn8zGH/yPH4RE4nPcgtuVheIpTU6pBHT7BOaX065JGMMQ2Ynma1ZXvzgf7mG/z6SuiQRDqb6EPhw7rlq7XjXQsb62gNSKs2Y9ScpRgvCYgr/E/+fxaDrFyEBtwcePqXzxw+niQZU2dWcdXTw0F5S7U0OZtdoLQ5qH2b+vyrYcXmXLxdUgmNgTTPMrtwmiDJzJlhmuSwN060clkGbTs1rakfpFKBawtr/nczd//LvfxRg1JNput/689donWU1wAI+paXn//8c+b1029eXp/DnJOxmkwAbw4f8+p95SWVTqNFi7HL6+Z+5vqfbsTp1lYOPzUQaKzQpLloYVaLU71/rnZFDEvHKyWoYwgoItyO0/zqC0kSw6JVqDFcqOKvC+T6HV91qooArLbU23U+EHMWNkelwqHrSg541epZMfcBFiaC0cTeOp1tqE6UzkqX3npDLpLsO1cNHWayGj9NqeCNzj8qZ4bC22Kx0j1Lkf2R7XmO3JwTHkkgLymSMWZxjqMwxdq5N8u8TXH1rGySma7rCaGhD0XnVXkKkcNxT2gsa7PFuAYTLCadOJ6eiWlkt7sSs5uU8U3DPI0M44h1RgqkLPE0TzNM08UCdVT6T85KHb4o1GXAMUREtycDpgXVd8YSNWE3Rp3yisRYGyRWp2T05CpCx/MO7wMhNUzjyDzPDOMgVFUfxJysLkHE9GbphpSM9Q2UwjBH5imy6RuevZHRIkr5irkwTongHa0a6FgaXOhBgZ/TNPH8/Mzzyx6METofIh48AAAgAElEQVS2ccwmUZ2XrbFgUfDYYoN0o1KWGYkksKGRsRnY5Rp41AymUcC6GBrdK10vhU/dE6t5ZhzHhdEjA4AdKPMmG6XOecf1h49cX98wTxPH3w2M4wTGMJcERizhzTwTvIfgsaqPPmWdb5sz6ThAgeA9FidjK4yhc4Hj4xPf/+Y7Pt/dkmYxo0jzTFH63svLCxRxagyh5fl5zzxFrjZrmceLOhIa1dsmAfQAdRhObHc9xl7z+PzA6bTHOYt3LSnBOEVMkY7OB/eBpm14fn7mcDgyzZHQdfTrLTa0OBMkiJgas8pyngQrus8UE7eff+ZwPPF8OBBjpN+t2ew2+Ea1cNYiJp4X3X4vs7vEYbWhFIkp3nqyFTfVVwZFRTR21lTn2Ix14hcgjYq4zFGUYKa+DRUss0bH+2gBlKXQxTpq6VO7TKZ+yPOJgStO8uxsKElij1EjqWqglpVxgEG0bxrjC0CWGGgU3MvOQrfFRnBBnF+tg7Ba4Z3E/1C7V0VZN9GSsnxAYTQIgFjBYufkvMlF9nQtSsW8TXKMYop0tPScMIgesiTxpaBwptNzvgYlVyqo5idGtdN6rVwtE0tl9qHrxpBTPSe1S7xk9w6D5GlCjRQtuxRxLDq4v/T42yjaKDztD5xOJ/KccTRapYteImmabgC7iOalPXxZTXtn8b5Z9BUyo8epuYgk0MM4UpI4D2Y5uZEhzOdhy85WsSNK4QviXuUc3szCq68Hoybtp9Oe0+nEfi8o1ThFTJYet9OZYC54jHVK+RJ72XlOqlWT9mjO2i7ljLTWwiSVqPWWFlnmjAoUqjZCqaCKXktyqcYl5TJpfLVDa5ZC7brJeZmhVFqVqxRrvPUU6/HdmquvPnLz8SNFLcETBYwlWEtoPK230jFRNNhodzNFOKWZXArjNJGKZZxGEeFWZKpoomi8FAHaOg7eiGkGZhn0XZOhmgAFFRMbhxrI2MWQxqi6PQs5HTEPtboGsqIwipBZ5H47S7BB72MLToZy51LpT3nZbNZKAJVCROZn2WJIUyJPKpC3QRzaDOCdBkAZQmqbjpmA9T3/L3Pv8mvbtp13/Vp/jDHmXM/9OOee62srGDsRNRIlAqUaiTIVUCIkRAEpfwD/AEhQgQoVUFBECoGKhSIhEEoiIeMIBRwT7IBxbPyI45vrc897P9Za8zHG6A8KrfUxxlzn3HuuYyJ5Xl3ts9eeaz7G6L311r7va1/bX9+z298wXF0z9DtFSE8nnt58xvHwDu8SwSUSiTRNlNDjQkdyPdV5ohecK+QyU1JCCFpIdlf0+zuGq5fsru/Y39xDN4AX+gi7wXF/47kdKh/eeW4jdEBsEzUNTMDrKAmK7tX/uvw1xvIl4+GRX/vlX+cPfucHPLx/q7OI5sLxpI6wKTe3v8qcZ1vbFbLOtJmTHla5FQwiTPNo2nW9P50ZczQr9uvra2KI9EOPE8fheOTjP/xDpvOJ6ipv3nzJv/5zf4Z/6ed/lr/3a/+Af/r9H+CL4/04c54Kr3d3+OveGOqNbK4YuyzCD/6zj/lT//HP4MUr0yKrZK31YmwZtlpVTtYAgCZTLLWhqDrntWnag+8Q8ahErO3xVlho8BcbXI2zmS8WA71bR1o4e+1W7GifhcYBPcxVllJyZk6zGTZoYuBKRN3ITKohQusX1e8ky5+5Zkir0yZgfadttEhj57FEpRVRaqy9SCvR/aKsszrUbl0it3K/CwTV/myJh18sn1c5UpODezY9iBY788bR8eI72Od2tYF72LgMA/act3htltRFj2WV63iNka0gq2KgzKXMqb3v8vk318YmDmmSV9cektoYK2PSajbmxK5tyZr0p1SWtajS/DPHU2I3DOz6aAmQ49WLO5yHt1+94fF44sXL14j3yuJTOB6fGMeJq6vrRTIcYiSXzDieEAdzruaKHBampRQdzlxLM8jA+lsq8zTTRZupKcpGKuul8jPnIfhIzRq756KAZq5qMe+CJ7qAiNmmZ5NWL5JUsTPPE3wPFSaLu7HTGbBtkHErcBaZJNpuUalM5yfKPOp5XFVR8uWbdzwejkTvuBp6nBR8iKRcmGeV0c7zTEoJHwJdjBrbo9f/9oJIsb41de0VW+/zrH27c9JzMYbeFqO2fQSv8tOcM7lAiANFhNN5VEZut9NC1Ri20PUM+yuGvRZu8zxT5kQQZUnbsONur714/X7H9fU1x6cjn3/2GXOa9UzELP9nHcZ++8FLTuNIf3WF+MjT4cDpcGI8nhlPynTtdzo+p+TCw9t3vPn8Cy0unGM/qCQ/xrAwIw3w+OqrNzwejqo28p4pnXRtWcxUnKyBVbaXzNAiROHuxQ2x94zjDTFGhmFA8JTMwr6GEHhx/4I+dMoqHo+cHh6Y5sLV3QuGzsxtcIzjyJwnondQEmk8U0rSYvis/e7dbseLl/f4XUffhwUsrKyAQ5MQisWJaqyqEqpFgaatbN0bQE81t9EGjBk7Y06VwTt16a5q/NIGa+daTPlQWrijtQDplCe/sD8iTTouSxzGVGdVVifb2n7X2FbXisTSZMd16cNVExGB6s37AaiOjCCxY3//gpu7G3zwTOMZ76yfXxpAXqEmaq50oSMXdRyHYuxnO9t0D5VSqXOi1my/i8VPv8Th50xakEr1HlfV0CinrG0yDUGtRp6gpl4ISpa0e4k5lCM0V81cKs7p9SmCfmZBi2xMQyJQsdzRetj0/CosqNKPefyJKNo0qE7qTlWAqkyNE79oU0EP/ryhkbe9Hk0XvWiNsYMjq1a99XUFF9vLEZ0ipVCoZhHfxWY2sTq1dTFqEShQ0khOimg9PDxwOp0W96NmW9sWUy2eGDcW0ilRig7RLOaGRU62QeQiGVkuzbNEZd1a7W/rImz/3Z7rLHAsNO6SAa6N28vvWVFcqaatV/q2yYgb3e6ccHV1w9XdHVd398T9XhFK57TIqVVRyBiBzOF0wvugA7JxyqidpiWBqYbCF76Jri5Lz0abLYYVnM7QjO33bd9/a66woJlogmBllW5oL+pU5BwZtQ0ONqtEZ+0lcw3Sjdk+R21oMWsg8M5Ta+urs6bwqnOMRBK5KMujgxjVTWiaC9WLMnbe4+KAc4EqgRA6ru9fcXP/guAV0U3jgaenJ06HJ+bDAzmdCYMm7r7qzDEfgg3S1GsYpdCRmXNmzgIh4Lod/fUrdncvub79gDhc4bqO4KHrC7vOcbMPfHAfebGHfVD0qw3L1Atdbd3OCEfAZs3lzBcfP/Bb//dv8E9+/bcZDyNzOpGmmTllOgmkpKYnxYKpZLHkuOh8m2yMalGZRBWTB+UKWQ8lJ5HoIh7Pvtuz3++JIaot+5x58+YNn3zyic6RM+fA+Snzw+9/zkcf/jQfvfgen338hulpQqZMLJGdv8ITmbPKsEIISDapXztMq6OvPcEF5iYBzHYIlIAXb8i4/kzRvrT0R4YQzLADWyMglnBXwBW3rDFoksom4So0SUxDCptjoz7X1h9rY/9i4FHWOT92ouFrQIqCCarWULa9MYvFCmvX+tVov2p/acXcZu+177ztGdM/nVnM+wXoWvtQ12KFapKizWt+k1NXA2m2MsytxHTpJWsFH9on8vxzPTcBed7bti2sFgm2azIbvSCXDekGSm3merXPNo3p4mftPVsx1J7nvV+UBNv334IBW+OO7XiA9jlSWhNUEbFzSW3hYxDr+Qr44LkTYZoz754OlPqGVx98gPPOZnZ5Tqczp9OJq6ur5bv2/aBrxIC91ueYczVjJf2sKedl8LP2LI0cDkdyX4lRZ6uta0YRbR88+G3ftMmtxJDvlEAmO5uckiIbSfw2P2huhG0MQs4Z590CuLYCayuNBXDeE2NvJikacxDouo6+ixRzcbSZxjjvubreAXucE8sdIqXkJSfw3uu4IAOFsP1YMdMPUWOS4JXtd07HBVEczswlUjIwUTw5zaQqi8Sq2vrsum5ZS81pdrfbMU0Tp6cD8+mID0HNO4oOuf700095eP/A9e6KaZoW46Cm6KEWStK+QXHC+XRmf3PLzd0td/f31FR5fP/Al59/wfH4pOuflm5UzuNIsL7L/fUO12bQlYI3Y52UdE7s4XDAOTVAyVkL+tpksxkFtFhjjzJIhblqH27XdQuA1vZa2y/b+9zvBnzU8T+nceRsjpw+BHLV5pynw4Hjw1vICU/VWG5SPB8j/bBj2O0JXWesUDPpAZFmwrOa2GiiroZaFZCq7tStX2+Jc5ucLpeMNFUD2JQm7X9qUkp1mbbYZ4DZkmOZO+9StDV8HhZgr6leXIvDxVQIYkYqFoe2cXgbIxsYWRf9grKUQhsjo2xUyQmKSpl3Q0fXR06UpeWkAeuqprM+TwnWv6i9p7V6GvioymabiYfDoTlXu9fbuLk9D5SvETPsQhlwpch0T9YKVc2EqGKga6X1+rWrYFYbyz7GwEOM9auuxXm9DKsJSaHgqGIja0xeCe3e/ujHn4yiDdN7GsuUc9Yb0OaZOFE0QbShW5xKClphpZKXBZJWrbj1wzR2yfvY8n1FFuz3uy6Cr7gAXdez2+kgyq7TBLC5N53PZ46HA+/fvWEcR0VgDFXbSnTWBSKI2Wa354loBRSiWrx6ClkKsxSmpAthscfeJA7bh1bka2EDazKx1eUqmq5spV1iY9da5WKFm7FyiAVC+x+i9rw+RJyPQGC/v+bF/Qte3t/hY0f1nuK08Mk5mxlLwJcCJSNe6HZ7xAVyLnY46js7Xad6b0LUjyJt4Kn2dlG3Sc9qIX45aHI7cNZdFHK1quQthI7ovKJc2RJpdG8sB6x3i5OliJBTYnZivRhaxLahiI0pyXaAOmNT7SLTXIiECk7nt6knXgGvyGCpHucHnA8E36shSOyRrmN/dc3NzR3762tqhfF05nQ4MJ6OzNOZMs+4Ottw10BOEx617deAlNVWIjg6IJSKi8q8Mdwy3H/A/vY1cXeDDFcQAxIdfUy82DvubiM3157bPfROW4FrqeqE6cRYcIdzRdeNjFDOHB4n/vFv/AH/z//1uzx99SXz0+ecj28YZUeqGpzmOZFKheotwcukuuldwXp40Xuda0KKMCediRfCumR3ux0vXrxY1kyMkbdv3/Lxxx8vkmRvzm/jnJkzPD4d+OGnn9Hf7Njtrzk/vtE17jotnoMOiW17GWNbwJisBdFfn/NN5iLb/SnWU4Wx0GXpT3YL+6bsrFsSkLZpv56we7YH+7ZY2bKDWyOMi89j51JL7FuSl6wPKXhFUx0qe1Hmw+ExFzjT9ItFkO37ts+xdWVcgDUXtU+5FS32u9sCD8z6udZFcrmNac+LsfZeF8XZNvG5iJl6PbcOnC2Zbs95Dv5cfDe39tHVWpbetIakP/8cS7HckqDNPXzOtj1/f7FzbC226sWcuu3vtj7J9rNWyG3nLbXkXZNKlaH3vSpSUprpu46XL1+ScRxPI2/fvOHV3Q37vufmpieEjvN54unxyG6/s4HdGed2BlbOjGNaCsEm2YqxI4aO/X7HsOsQybx6dcd3P/opYux4ejpwOBztucFMePwyRHlu50WtFlM9u91e7908gc3xCjEYaFY312UFMzob+ZFSYkrTMuJlC/bWWpdip61LXwXv3zDlye6n5/b2mlcvX+IEhqjnho5SyRugJNt50OZl6Wgh7EyS0oTettbsHCt2Dzvf4byevwqjVErWHKJSF7dNRBhihw+XpjZrv43u8+YomlJi6DpOT+psOKWZOSVzxs48vHvH8fERMSWEyNoXGr0jTTNv37xRaTsqG/388y+JXYd3gTROyr4aE2Mhys5VldkdT0+M85kYO7xXh8uXr14SfOBwOPL09KSA5jQzDIMCvr7NlywNKbHrrOtj3X9cXIMtOLLdB+33257s+x4fI0OBfhjAYiN4dsOOUGZqzkQvajrkNc9wMSjj7RyliBnbYEWhx7nWN7aJyaJlzTjr2IggECxxbz4GxdzJV8v4TJPpt35nNX2y4fRUJAhzzqSc1VY/r4XLAnovr4P1AdvnYp1JucgD3QqUZJO/tvjXrufzc6bldrr2RPvnvBb+1KxjJI4nTscDiQp15ur6inmayWkiOKdqtFbti362Oakqw7ICvFcZpveBaVTWzTtP6DxOFNxs36d9vm3BDnrGFpM8LwBHF6lV1tYNsZy0rKRHpanwtJ+6ZG2tkGpz4ww8adLUIGZEg5nqGbHRNr+K6py9l5islh/7+BNStJl0omVr1TSgxRr2rKo2jF3nDzVtrTjmtFgMWSWrRh7e2fwigejFmLSAM3ljHzti9EgU4hDZ7fbshkGr+lI4j2eeHg8cDk+8f3jP48Mj07Q6NDWs4rm8pjVSS7Nwd8rqRNMhR+9AbMyygHhF1BLZqPKvOzmuf7/8+eW/rY9KtQCnC52GFLBOYDeedvkNezF01WigzVXo+ys++uhnuL2710VKVmbIElwtNoVGH3gHrhYgkKsjzdnkOw4J3lAUY1oqqAyyLJ9hSWxY5z7Bigh13qtZh2wHBl9eB01w9PuVnNWqt2qx771S9tr0bQm3Vxkj6IauToNCseK/OGVKpIhJA0w3iaEA7WesM1XUqCYTe0OGClRx5uSo8pAqDuc6um7P/vaeYb/n6uqKYeiZxzPHxyem84k0nqh5JtZK8DCDNt67QGuSTmMhkcEVQueIUQfSl9kjcc9wfU13+yH97Uvi7poQO2J0DNFxvQ/cDcIHd56720DfqRMqWbXrUhMlzUw5MU1nxM1c3wyKbOWZ0/HAH/zeD/nHv/Y7vP/iRDo9Mh6/z8tXwlenjuNjwknkcDqh1bonJ+tZEzOBKOYaZ/c8F7UvVvmcNuUHL8QQ6HdX9LubC2e0L774QpvcLVgvTntVlNUU4Wd/7k+z21/z5nwwQEIBlNnqhi0y29yq2t5uRZsYWtrWXCsEtsXHNgF3zuM3jJk6opoMoyqzLdZvuMyecQ0pXV9vTfhhcb79BgS0Je+X8ruGpmLXf/2MzZhkYb0MzJC67m3XPsNi7KSfPZVCCAtmS606+FaWeNMAl3pxXb1XJH37mdv+xhLu57N12p/b4qjvOkNAW1K89ik11LMBPNnAgW2B2f7+NcBr+7NNIdA+QwPz9L5XRMrFZ9s+1kKqo8Xv7fO2bOLzAmxbiLZ7emmS4i7+7Xnh2d6nSWNTTpzPlRgcu6Ejdh01CV0uvLi/J8QT796+4+AdUgrDsNOZgFU4Ho4cD0cbBq3gg+4X7flKaU3kQhADRQc++ug79H1HyhP3d3u6YaejLM5nUkmkcyJmBVBD9QQEkay97DQWSY8XsZgdxPqqja1xcrn/tntyq8AJnV9A1MZ+Pb/+yzWsajbTTN2cVyv9fuhxtZo8rNrroBJ0Y1hd650uawFVcqGmTEmWVFofi20owK1sX1WwrI08qEHHe+SsBfE4NWv8wjyel1gRQtQex3laisV5ngDR3Ge/o4+OeZo5nc/snapODocD4zhRzC271LKYiTT79Jy1hzJXBXPH8xmXMk+PT9qXJU7lzaLxoF3/XDKxi6QsYEmzmtooy3kcz3Sxo+8H9vs9u/2VMY3gymx7RtUj3q/rurHHmt9oP3ouZhjiVs8CzSfRM9qrbLPt/bZbqoiakISOItqvWlFmNO46YnDWA5nxDsTrOkm1IjFSqyNVG3FhsduJV6B2ux8365OWSzhHtSagJZnXyAm1Usq8FKelGKMqCvhR175wsHPDwOcYAzlH3Uei7SZS2vw7t1wb7AwrUha2bRuTtzN4G2O5JRaWs8m5xc291oKXSqhLxKOWmXk6MZ2OnNPIOB44PO1M96T75frqmuYWrmeILLL/apJPLH6rIqMVV9Dy2eemUS0GP4+pUhXESgbitNmiK6AiluG75bzfngleuVc9i0HzQ5Net6KttStIS69Rl80qBsZIBWfnh1N3Z3E/vmr71qJNRAbgfwV6e/7fqrX+hyLys8AvAK+AXwX+3VrrJCI98N8Afx74CvjLtdY/+JZ3uXB2EdphqvMo2vyeBXleDtzK4glrCIJ3XhFzUc1tiIEuqLtjFyNd8NqfFHTziggERwam8cy7N1/x+PTE6XhkmibO57MeusuCtdvj3GWAZw0m+n8tnPwGpdXPVLQBtySiF77zvZ/iOM589vaRJ45aZGwQgva6y5+1oTeXRco2EalV/Wsas7b+HDuB5Nn/rfHTHG0qjhh7rm7uuHvxmuubFyCeOeuh4ENn7JOY26ImLGVO1FDo+x0xejLCVByS1M62Vp3f463YSxW7p8piNNmRJstr4G+sWa26jRqi1ppznTFibYPU2np+HGIuoQ5HEK8uoJbEVXNIylTrK1JNsva0aZEZOr8EUuwgmFNGpF4gvMV6rxpLidgaG9RUsWStTQsOkUCw3jPnIvvra27v7gi3t+qeNM+8f//I6fDA+ekJKbN2okmFooMz+65bZGXBR0quhgIWncXltfdrroHsdvjuhqsXH3J1/wESO3yM7HrPTQ83feX1neP1beBmb3IBdIS4ImUZV/XwnaaJ6XSkuhNST5RpZj4/kfPE4f2ZMjnGp5k6H/kLf+Fn+fN/8Wf5e79y4pf/j98lzbNpdtVBapme2/ZWKRTRNarGA2arrkICgvcMnQ5oH65u8HHgfB756quveP/+PY+Pj6t5gyVlerjPzKmQcuXm9o6u73h6+zmlFmLoSbUynqcV2BA76MvGvdCKcitvLpDGFaz5+r5thZBKenQgfLWDVpweosoqLunDBUq8Te51Vswq082l6CH8LA4scXQp8qyoymtf1TZBbbOdVlbbDlqLNzwrMKod9O1/7exv76vyONm8fjHWbmXW2r9ti6ucM6lkLSQ236clf40Z3F7j5/1uuk0vWbrGMCqztErVWj/k9nfao133dj2o69iBtXC275sSKV2yflsgb9ubB2uBvS2otoXXllVra6EhxNt10V6zFchboCCbaUBLbtt4iBYjDweVNN/e3tD3Pc4H+r4SQk9OmaenR/Ks5hXX17cEr6zZ4+MDX335hrv7G4ZB+y9jDMxzz/l8ZhxHywMdp9NIrQ/EGLi+3pNyJKfMD/7wY7xJFLu+U4liTtSpUlGjMO9NURM8MlvBOSfSnBl2wa5RZj6PSIVk5+KWzWzXQ2V2lkh5WeRzjRFs93+73ryPjDKhUnu/yC9DDAxDrwE9q6JEB8iLqTZagtuAlbKAiwSQCDWpJf84T7Y39IjxXiWbYoCmwOKcDWpioceLI8R+ZRNETIKq+9a7jhgC0Zwmg1+VADkEqD0pJfbz3tZKYbfbMc+J0eSrba+l1A4uVZXEroOUqFSGfuC73/tpfIw8vnvPdB45no7LetMkP9N1kbu7O5wT0jzbEO+yFG0P79+Ti74ewLDb8+r1a7wXghmlqfW97rsWA7ZSa8SuT87L/V/21rN43DIgt9lLmQoZHU3S2Gs8rlRlUJyDqgZE3umIBJoSB9HeSmm29FY8S0Cleg140fgO2kJCQT0XTN6Xyzq71nlPqlVBWuesFaNYDrf6FQRTmeWcFw+Sulyj1iqkoyJU/SU2J9PyKHtNH1YFgndukSFr8XGpJHmea24VLXqP1uKT2ooqHeYeHdoHKhUpSXtGveaDJanZSTWVRzs7mwOnFkc6CqxYT5szIgBMBvpMnNY+W9d1y+dta8E7T05JnXhTWhzcg13favckp6zvV/S7lJrVeMliSjHFTM5ZXUBbO0V1uOU8WEkJ5zQ3XfNzrEZo0tU/vjxyBP5SrfVJRCLw90Xk7wD/AfCf11p/QUT+K+DfB/6a/fm21vrzIvJXgP8U+Ms/9h1kPYwWFFfMQS6vDfmgi84v/fEa2FbzELfQ3X0fiKFtPghB6EJk6Dq8a/1lI+N55PE0cjprgTZOE8lu4sWhjyLOtTmgbRZA++/t/00cpwrfkkm2o2pK5DwzxMCLVx/w5/7VP8vv/+BjPn/3Oxr8F6vZS0R4e7HaJnj+vtvnK6ojC2u5YDem3dX8Y9WFi9dG+xg7bq5vefXBR9zcvNA5Y1Wpd0S/j8MkCbZZpilBLXQxMnTR3BNVipizWaUaairi7ZCS5X45JwQ78FoyVbIGubWYb+8ZNHAtaD/6XGnNvIoitX+PXSSGQHBOGQSauQkqa9BJjEqXo4OdqSw24dvEW5tOHc7ws1TLch1rVb/Tvut1skgpxL4n9JU5j5owxYBklUV63zHsrri6uuL69pa+H8hd5HQ+8fTwwPnwSJ5OSJ7NTESRvlrUBbWgQ7ld1kGm1ZJxb8zbPCeqE7LvYbjn+uV36G9ukRgJ0bPvHLcDfOe+48ObwP3esesE7w0cWVab7THnEB9IjICDEnh6f6ZOI2/ffMp+H9nvI8PgmOYnOp85nye++uodD08z52kmzZlxznjX6Yy9hmNZ1uJkncGW0mT3Uovy/W7H1W6g7wZ2w4447Hg8nPjhDz/h4eGBWisx6iDg1qOi+8ARQuT8+MR5Gvmnf/AH3L644f379zZPSwjOEWZHcDqkvjmLxhj1ALPEfAuWhKh9D9s4sO0tas9r/770CuBw3mKDGQHUogm7D4EQ/IK+bqWD24KjsXAelgP1+UG6BcHQpY7Of9PnbSVU3pqvxYon793q4Giv3xKxJjOTqs6sFGPj7Do9/3MpCqUxUnodks09Wxi2b2Calji2icHtO22Zsud9arXBpWxj4hojW7/ZlmVb+9Xc1/7NotXy/pdFLtRyWSQ05vd57N4Wa88Ly4vPz1qQtc+0LU7b/dP4Vy7e63nB0n7mnFukSxJ0Ntjj4yNCpd/vaOYyQ99zd3dHTRPH4xHw9N2VyRL1dQ9H4fB0QAT2+0ETeN9vCky9djG2wlXIuRLxBNcKUHUQbfu2FVY5VZIooj6lpmqptl61/zf4YLLDZOy7V0Ovup6H7Tu367isA1mL4la0j+N4MfA8hIB32nfqJADjEptUniU4H3EBaimkPFFqouT1fq1giD7anD9vRhIxRh10nQtzUmSj+jgAACAASURBVFnbUjg6obcxM865hU3UdbNK/hp7WnIi5bq0arSCqz2v9e+1ODCeJ0SE3W637O32vfu+Y7fbM88z4zxzPB6Zp1HPkjlTszKXVcxIxQnXV1fs+4HT4cDnXxQOh8OyT3RMSGC3G7i5vdb7mSvTlDidzhwOB56OR87mGF1K4XA8sjse2e8H7YHqVuZ0C2K1XK+ZXzjxS3G+3W/rRjDArRmS1TWWeASJnilpL6/iieY9IKoocFLNLVBfN6eE+IBrZ1dplvwGquWMtny0AlI/RqlZXauLKq5yyhSn8v9qeUkt2KzhBF7n79ZNjFTtiQFLFh/mOeuIIu8JcWOEVPQ76PK3GFPUw2Et+MpSxDrvybRrsF1rXwe3WlHUzg4MWBfXhhwUY6vVsEwaQVDrkne1fZRzoqSMsz5XfTkFMMjF9oNm1nW5z9bnXe1ss2u8/ZzP8+jl/CuCDx2DN4WV2wKeauZyPp+VwBkVeCzo3m8zWmut6mTa4k6t4NXpF9TwECvYV/LBHC5pbGkrfjW+lOfr9tnjW4u2qlfmyf4a7f8V+EvAv2M//5vAf4QWbf+m/TfA3wL+CxGR+rUdtD60qFotlVsSAKq9rbI25EvNRK86dpUsaZIforo5drHZ8DtUVl5VV2E34uHpHefziafHJ6ZpYp5m5qwOWNsg+02FUGt4VH19O1xXFqsaqtCS+FL1MG0ISCt4aqnMqfDp51/yt//u/6wWw4aw1VoXJ7CvF2x/lIewuC9WaPxsMTSi1ibnaxbnjpu7F7x49ZrXH3yHYdgzz5lkB0ahYB3fqtW1YZhpSjgcXRyMzQxUsrpgmRGMiFt06aWCbz1rxlj64HG1LAdSYzZaIqnyFu0l0wBVDV1hYVSeb9LFotesrxtCVY290xulYUUsmGuPxIrCOEs07UXBZLkaLJVWLznT5sXF0IF9Tlcr1QV1fXMDSIAadeTBcEU/XHFze8t+vyd4R84zp8MD79695enhHVIyrlHpFaY52eY2k+s5IxUtdr3qoOeS8CWAg7kGgo/0N68Z7r7Hzf0LYvR00XE9RF7uPa+uPT/1suflDjpzb9LCfJWuCubIZ+hh3w+UlDk/FTofmZgZz4/MOXEcM2N5g/RHTueJX/+NT/jt3/+ET56ueXqaKEWL9jwq66R7qawBrTEaSa3NY9TG9bvbG7oY2e337IY9JWW+/PItn33xBQ8PD8u9V5S/LhIOXdZV5/+NoxZodq1zmqilEANUcZwFLcaNTbtg0Uo1OaM+fOs7LbYu2o77Mfu1OnReorReAQXrfQiIq8yzynCdb7GnmdaoRLwlni226DlWCbZet0VPK8hCCJsEMi3JQ86ZGPvlQNceM69rupTVwMpQZeccXdCEKNkMuEXOLt7gKbNrtyRl6b1o8zGfFaHteomo3FSqLHv/eeL9vEDbAinUugzsbmBWkJbc1cVIKNOGQbtnRf3ltWvvc1F4FnN4c6vxQUvcQggglzOWnifu7TOnVHie9DR5dysivgmwe34ttr/fTtXt77SibQs8juNIEVF3tKQM0TxP5DSznya6YSBlVQL0XcfrV6958+YNx+ORTz/9hPv7l9br3VHKnvNZZ9GVmrm+3kFV+WeMvVrX50zOmojmPONcsTWnTnW1toTeeopjNBOHzHma8d7ugffKZBQ9L1rDvqoMHFU0EWxnx2oE4S+uR7tmqays25ZNbaDIynBWti6F1QCArovaUgH4KjgiVTpSUXt9TaK97b21FWE51wCa3LAo+NdVNWKbk/XcVPT8yCtQIsGbS6kVLBYjhNanCUIDEbUHTuwaqRJlQTTZX++XpD1Y8qo9rTpeqckG51x5fHrifDqR08R4suKqaPvJ6XTi8y++wMeO2/0et9+z63sOj486fqmtZVdVUNEYjwJ9D7udGkgNxwOPj08kc9/MxvJM04xfen7VFGzdlxZLpO17Zy58fpH3NXl7xQqeNUgscTuI6QVEdKZp0nEEPii4jEAR6wPG2niMORPE3I6TgrxezWeacVkDfDSUanysDXFGpcrO1pcCHnq/SjbZpakYcsprW0cu6gBq/Ze1lIuRKO3VL9h7A151z6yF2zYGVxuJ0oytisX/liVtc+MWh7Zy7fZ+3m5IXfxvzaFXKzezvbe12P5YgKvKlCZCDQQftBwrs4H96r3Y8oU1BFpRhKqz2v7YAnvtM2+/s76d2+wRbd5vDH6VSufWHts8lEVJsOzxNtoJa4nKCjaleSLPE62XrdbVYZuq9UzWMqLVrvZ5G0H14x0kf6KeNtFX+VXg54H/EvgnwLvafPfhD4Hv2X9/D/iBXbgkIu9RCeWXP+49lppO1r+3zdQOY0rFe+Fq16tbXBeJIS6a9a10JpdEmrRxN00zp9OZ8zRyOh6YJm3AVUS2UmqzkzUJlGiJ1Wb0aBBUfXjrW9KFc8l2tZ/bN2DduHp3MuoaGH3HnHVA9pxGCIF5Tu2t2jX/SW7NtzyENlF+sfSvGuAUSRQET9fvePnRh7x6/R1i7Kg4prkyJ90MzgtDiJaAVbzTRvCcC14C+2FHsGbjSrWhyJqBdl1cGLIm6VLnKDMVcWuQaY3ofd+rbMDuhSKFpq8HsAANahWs75cXlEvty3XmVjGGz4szKYEBA3VbdNvsPwegCIkYqyau3V9NaJwxJT44yB5nyYQ4D77NXwHnA+I7ihRzYHJI2LG7vuP27iVdv0OcDikd5xM5T7x/+yXTeMLVhHJpILUyz4U5F2pVyYPzqt93YsypoPc4CDV4xAU6H9nf3HF1/yH+6o6hC1zvOq4Hx85n/tQHAx/ceG46CGScAQw6qJ1lZkmthpiJGgM5S4JddtTpgfH0jlIm3n75ls/ePPDu6R3nemScZ2S+45MvnzjKRBUtYL1zpKQJQ2mH3yId0B4ShyaAu/2O/b5nt9/RRZ3fczrNfP75F3z51ZcL+9PYtXaQXLAvhoKmlIje8+rlC6rL8Law6zvCbHMa0wQlIbVb5MwNPRSRxVF02d0mn22o2LawaH9f/8+ydtvrIQ7vW9zQ/sNiswUlrIGgsWqNbWmMiHdeE9ZnBUAr2J6zU2IwaAOVtlI/HSFi8QqdeSnIYluMYEldQsQRu6ggVG1g1TbeXB6QzTltmzg3UGZhhOqa1IpzF86JW8ZrK0cXS7w8QpZLlcNzfFC3hyYLcvHZ1j6ordTqa/fQtVlZssSClmRo/09+dq1XR8jte4XwjH3crJstm9bUENvzbAtILWuwIbffwBJuC98mBcw56znmIHhlmEspPD0+coUQ+wFvplEudtzc3JBS5unpQK3C3d299uFWNe1AKuP5hHOVLvb0/aBJswtLv2JKk87kDB3znHFuZXVLEWMf1FxD/y1pr6uxCHW55pqslZIpSc9nPde0F7ZWf5EDiAjzPOss1bg6VEYXL3MMO19gY8ZQqwGe6lwpJnsXB33f0Q89XgRXhZp0TUU6QuhZBszXSow9Kc0X71FKMqllWQpC7z1RQLyyHcWyUk3Yk9qSB3U19CIW+6FmHXYdgyflgvfDsl4mm0O5XQPRkkztXYLoVaGU57IUmQVhSjPjNHEYR2r0XN/eIFKZz2cOh0fevX1Lyqo+OE8jh+MBKQVnxWhzolXJXFmKN90rlZpaPiJ61gc1mGmKg2lOi9lc591icCPSABW7R9IM5QKItzExrHGFS6n0hTmH05ygbmJJGwVRxWsftXgFqknMNeNLJlqhawtoVRI5FpBYQTBd2ym1+9xihp11TtkWlV1a77J9zmItLNrPGKBYL26LyQg5FdpweBFViYS4noVt7EqMkXGabc1vzjPr999er5xN2vcMHGqKqBaLt8CaxjWNCfM8E5yad7XMWYuQuoDeCvp7cHlR1bSZrCKVTCYXVXWttaWzQrMV6nX5HrW2WKC/UKopwjaxt62DFgNs1xFjj2H5miiI9ZCqKcOyXsQ5+l7HRxQDW5qsWeuHuqhzMJVQmmYrVvX/DSTWIefVzIuwvvFVAdOGbv+4x09UtFVtyPizInIP/PfAv/KT/N6Pe4jIXwX+KgAfQo7t4Fv1yEqxWZVd1YDh7uaKl/e3XF1dqZzIbwaSFpUJpFmLsmmaOZ5PjOeRcZqYs/bllFS1o7U2CUpb/GhBgNKydZFw6aYWp4dHk8psq/ltoNh8R3tmQ5Ttd3yEKkwqfVUqtVGz1E3h+Md9tJvfksC1384Hz35/xcsXr7h/+Zr+7h5MXtCSN2czZWLnCFHR0ZwnctJrMHS9ukBhQx6z2uG276Caaks6LMnRGRyaNIiLtEHnwVmvQYz0fW/a5BV9b8mrNhjrXA3A0EzHPLM0yLafNYlqrS0JUOTG24w1tZNtM/wquWa8U4MbbyxFTnWZM1erFi94bfaNsVV6esA6c8hzzusYBK9GOCF2ONfT72+4uX/FsLuiANM0cj6fOR8fyfMJmc/EmpCqfWt6kAbmDE6UuXNxoOs6dn0keHVTrGVinM4KKrgeF3bs9vfc3r1muL5BOs/1LnLdC9+57/nOfeDVtaCeoHVhNRuCZWmkXr8NUqx/Bvr9QN8ljm9OzF+8wTs4PBW++irx8FQ4pULxA6eHSEpXjOUdxVVqcVCam1IL2Co7aShgEEfsO4b9wNV+Tz90hK4j58oXX77h88/ecDyeFqOXaRqXNdIkVm1v6p+FcZ7VUjgITw8P5DqpqYvX/laXFBELzpHc2vcFULMmHa71K9qjlIKPYZURymWSvZXotCS/FUxb9L1kBS5CCMxJx5PU5UbIso90rYfl9RBB6mr4sT1AW5/O2uvV3nuNTzmnxZmt9QG0YgOT0iCyDH6vgsp07Ge5GOJZL3vHGpO1jY2XPQ/roxVllLoYOGgItl6/TXHzHDldGBK5vObbRvkLtsuAt/a6W7nPNqFrEr9tn9y2MV/vsyxrJJmE77mNf/tzy5Cpecc3f69tP5zfKA7WNbTeu4u/Vy7eu7FF7TxqMtkQAgWbhFbVKdRHk4AjHI9H/JzZX93gnCeXrJb/CO/ePnI6jUzTl9zc3HB9vTfxSma/33Eej5qoiScEBdL6Xp1tz+dCRVmSruuZR+E8nqxI0zN1Rew9fT9QI6ScOJ1P1g+0Fv85V7LLBG/yXMBlTypuNRsxh8DWs5ZSWljCXJtl/goEtGK5FVbKrjpETkth1di9UpWhxHtKVsmgAnSOGDqTOxfmNFOr9t4g2p6hAInT2aGihZiggGbKNoAdzQFySpRJ3T0RPfu0wAsEDyIVKR4hW+++W+SRygh2SwK//VN7G1hY6iKOPOWlfSFbElqpdH2H2DD1nCauumuurvfcvbjneDpzGidtL3BumTVWragqS0zRqONDUJaWipfAatamLox9z7qncnPvE1zJC0O+KhAslzEWo33nJnttj+cgT9sLXrBe/Gp9S5kmyGqbSnMknennQiC4SKhV+7stZtVaCZZrFBHOVliVXJY+NmiW/7pfFICrpDwhNOnkaopSUmKabQ+brDGIN3BmLcDFaBqtXdXxcE56n32F2EWTPiq3payu9W/WgrCCe+LWOIVoCbSNny2OPXfj3KoGarXSw9wRF1azGkBrwKX4oLl3tjmXVYkMZem0cGq2+Aqe6gDsauYmWG8fjciprcVH98g6cGA9g7cy8+1jmmbND6UdtwI50fUdDmEySXMfw9JW452jc96KUSUhUimr6qZoD32aZ5Vwz7P9PZDnScFhVNIaUPo2W4wR2rny/0PRtizlWt+JyC8BfxG4F5FgbNtPAx/b0z4Gfgb4QxEJwB1qSPL8tf468NcB3J9xVQ5W+Nhm9I0xM+QmhMB+6Hhxf8/t7S0xBpNCTAg6LFKt+Ftv2kxKxRg13RjarhQoNeMItrDW5G75bGWlsVvCrwmFs+B2iehsUQn7bpv0iDXVc9oXNY6TOonVqomXgyI6XwI2lr2XNWB79eVz/UQPS/DawSNOiD5yc/uC168+5P7+Fc5HxlxJZSL4sDxv3++AhHKEhVoUqaXq8NPodS6VsDZg4xSZXvXd+iFUM61Xw1mvofM6u62WsmwCZwhddSttrAlVwfugzps0W/GsjdKwSMEWJAhNLFvR2ArhTIGcVe7g1t4379WJsfXfOEPLJFgBn7Kivq6ZtYj9bhsS6/C+w1XwThmPnCq4SBzu2F/dcH17Txx2zKUwTSOPT+84PL0nzUdczfRlhKSDYmMXKQipVB1cHnt8rwYcwzDQB6HvHLEXSh45Ho9MU6K6yNDfcnP7Abv9LX3vGfrK3VXgo5c7vnvv2AWhQ4WxsgAXxsjaitUy/3KNNbpfXAF3oronjuN73r0f+eLLmXdvhDnvQEamfKb4nqlUcg1M44ku9uSiAEsr3jB5Q0Nm+2Hg6mrH7mpP7BXNf//+gS+/esu790/krIE8kxbrbmBJur++L/RgVgTVcZ4n5jwaUunx0RNzMHfEAlwaPbQ+gm2/2tJnsilWWgx4Lv+DZ0n2BvHUoaOXJial6kHrVLmrqLXAPCdC6BY2KJkZQOuBrXU1nGjIZ0vgW4Oz/m6TR+q4FJs6Td0UX8tntu/fvkuT37T+GyeK+LK5Du1aLMlSk0k6RY5daTQHdl1Nomwx1gVvDeFcvu8m+VoZOo0NTtySwC+sq43wSGleEhOqzjxq8aIVUNtZZu0+XBRSuZLzVnLnWN0iV/T5Ocu1vRZaCLRer7hYsG/XzMIKlLq4zm3PledAXq26f6tD9xQrur79HO19lutdvSLAVWz8imOeE9N8QkT3oBM9GnfDgHsRePf+PQ8Pj/p9XdWRAUGNFrp+YE4JxhHnwsKe1Vq4utpzPh3oQuRf/rk/zfe//wm5JDsvtIDqTKoreJxTQFMT6UqwGZ/KELU4L/a72LngzMzhcvRNY+HVJCCRsyOVtBT1rUh+3vumyWtjU2zTiu7aLkSC054i51VCHyyuKQChZ0IMNr4m6DiNNKujcHOIc8FrL4y1g+h4kxX8wQe6nack7R+bs7ZUSDG2sraeJDZxT2gy0dZD1fdr3Mo5M+e0FFVZKj40KaDFMzG7fOd0KLgIMXjGE3iTl/e7HbE/c5U1RY6xY+cj8+nM+/fvDcxZWeB2jWMXSWkm+qhGF1bYFLPnbP3DVuEYY7VKg9cYoLdEGTe/fPeyyZnUPMQA/ZIXVYez9g5DhxBnqhKgTpkYZJWrOdHZeWLQrFSiC4SAMSllSfbVwMQY0vWkWOalqXHIOrsRKUsvrBNREEBUyNjaZEopZBLB6UzSbQwsG9m1dzr/NhW1+/fVWNSgPdm4QE1J23QEWz+OXJT5dBhDDeBE+/e2Bdkm7j7PeVt+VquCKK6df1pNGtnRCIt23xoPp+SAtrRrsZVrpbjmEooBiOYmWVkMrZb4t3y2S7fldp22xaXVkDS3x2Jmd8tHVVzE+q01L93K+ptDejX2uIt6HntR9jpna+3ykT5YF1lRhUpOifF84nw+UYrOigzOM7XRJrks1+vbUvufxD3yA2C2gm0H/BuoucgvAf8W6iD57wH/g/3K/2h//2X79/+lfkuFIQj7bs94GMk1r4yWCJ337IY9+6sdV/s9Vzd7qoMxzYvG9HQ6M47ni0bcnJqry5o01apojm9zFkA9FbJpTmm2C3VZdNL4Gvt7Y+W+rWgSsGZDwOh8pdEdEtR+up0HLUi1hV1zWV9kg+6DfbRNWPg60qGfTwODmosoLa0MUr+74uXrD3j1+kOqCJP3iGhSG4MjxIp3go8gokVxLgWHB4nL9dOAqEiiBJMDeB3226hjAWqu4Nyi9S65KOMifglaVGevVXVDV7P3De5i05WifRTi/dpU35gGFDlubASiB0x0skSLNhMD50kGOQYfVhZAKjEGKybVfCVTCP2A71QyqT0WmrQ7r4PYU9XsuoSom897igj9zZ79zWv21y/o+0gMnlImxuN7Hh/f8vTwhpInvMtgOmmJ9t72+aoPhG6n9vZRHSd15mDl6XCknjOxC9TuBvGe3XDN7fU9XewYhsirK+H1PvHdD3bcXzt6axB26wJDsZW2ltZCmwW1svWJ6unnkvj93/s+v/LL/zu//du/xft3B1LyxG6PeD2g6iTM84lsCYKjYx4z7UjLVe9A8MrG9l3k+uaaYac25Gpckfnssy/49LPPTR6qcgjt+8jUsrIKuaiJzGoNXOj7gTTPjOOZMmdyqXz+9MT9y3vS6cjQX+F8h8sHwDM5XYNOyiZJ0HVtLdQaVF2AWtV2OwbEKRKaWhN6u3qGjHrXnLm20jrb95aUFAMmlGkxqZgCd1AEccHWtxY+tTZQw1BJNEkBjG2aN+hoezuNDYuEPK/GJjRW2zs8wYofnY7jvSd4lbdWzGkNoQ8doUkTWYfHBi+WMGuMEy/UmiCbLBLRhCFnHW5vxamatSQ1snD+8h48Q0nbWg1Oi4uay4Kauhi0704q0oVF4eCLMnillEUSlkumZnDe00WTTpuUstS6Jnv2/qlkgjd3s7om89tibwsgrAxatTlMWwZ0le20JG1h52orsvN6DlhitBTXtfWaeJps1uQDiHhCdKp+WK4Z1JzsenaUoglarRBCzzTqEO2uM4v8oH2bYRdBbqmo6+RXb9/x8tVLBr9DMS6PuMw4JUo9sxsae18oCaLzRB94fPeONE/aYlDBu0guCSkOj4MqllxWXIaY9TpkirG8mVMa2UkkeMc8JZwLJmVr16INUde9pZJUYZxOjOPZZrYFaq5mD9/kj03C2uarKaCSqcYkGRNchToby+4cBBtmzFqot/u69Pk0NVApGkMRtfO3USC1wK7f6+ccR3LWtSXOkar16IXAMERArLdUZZOdWcw78TjXTAwa8F11Ppol0v3glj66BhrUuRBDXPKQaOx7KYWaE6EWvDji7krXoCiIUaLQBwNNNO1H+oE4DIgLiBezUDcJt6gBR4yRznVI1Zmd7bHMzwLzK9D4VyXokm4MuXhTvdSl6AoGuLZRCksRWzfF2wLOa+9Wqers3c5BJx4fgxlgFVKBPCeiZJyzPdhkfg1YpsFNIKiUUypLvMF5RPyqZKhQk5rSFe/WM6KCJG1HkOAJBHrvEJdxJB0BheYzWmysc9Zaf3PBUch6v6fM+TRDFXVIN6VFzaJjjyQuPb7tw20LsSabbjJdHOpeWdY4iGAES90IugQJUQGXkq2gBWVH9bs6ES1WLC9ELEcXp/OVjYF1KEOpclBdz+KE4LolVupntoJM52/g8Eu/ta4R8yyQVlPoOZzSRKoJrGh2mNw5K8Aj6JrLQHWzMsTWmqKAvZDGM86ku945JSWqQyTifSu8HbVqTBl2O/azFmiVyiknXE6EXBZmrtSVFPpRj5+Eafsu8DdFbPIf/He11v9JRH4T+AUR+U+AfwT8DXv+3wD+WxH5PeAN8Fe+7Q1EhBd393hxS9Bqw3JjVFlDG5B5npRROx5PnE4n5nlaqP92E9dV9SxJYkuv2082iOi6yJqcCJYBC9/wmbeP7Wte/NsmkK/Pe85hbJHs9fl6oFw+7/nvtc+uMqe1WNIBgVYIOIe4wN39Kz746Lvsr28pAnOacTo0jV1UV029fMWQ2LrMp9BjxmQ7Ao2SrtIkTYa6pLUnxHnXrD400Fe9T50liR6VirR7lhtSZlCaWxK6lpCs1Lg3yWwy/Umbj0I7hO3CJY0IiwtecegU+obwFC06dbiroVBm/FDFI14TulwqVXUlhkpZH5vT656rg+JwviP2Vwz9wPXNLcPVHbHfgxTSfOZ8PPD0+MD5+AR5Jjiz/60OJ4GUlIlyVqwN3Z7Y7/C+p4jOzas5UxKEbkcRnZ3mvaff7dnvrhj6jj467m46fvqDyEfXcD1oAWt3bV1NDbZcV6L9vC5ghXEuUFWC/Gu/8g/4xV/8Jf7f3/1d3r17tyQmu91e92ocbD9mMqMNoNS950VZ2eYiFYLn5uaa3TDQDXFh2968ecsPf/gpj09PzK0HsVojeNV9VTa2xMveqJWu6xdXuMfHR06HM2SIQ+TqxQu6/Z5bY35zAR/UZryYZPZiH180lbt13cDiZFpKY8gvpXrbz+YXpPLr+35N2DFpXCur10RjK+PS91JGSiU2hkb7Ff3c9va050/TtEj5WkHwHEHVStB6eGxvL8UDlyYAKSe1yTYpb/CBOc+a7BZ1H4tRZ16WpDLYBny075dzWuWF3thcuSxktqzI1kVOkVZZkkIx4MVWqxaZhl76DXin/bOb+VlLArAyZst9M4a1YsO4geIs0aGaw+z6+bZmKW1oc0qJEFZGrQGL23Nhy9Rt5Zbb9fTcQW+9t9tZlet141l/ngKOGlOdtN4yR87JmFxlAJ8eH7nZ75BgahSEoYvc391RK7x9eOTN23fc3FSurq5xHjWs0u3H+XwmhkAX1S1SnDCeznxy/JTTnBUYcQ1QcFasqwsizWE0F/KcdJ9XTdqiU8lSTYld1y0mJc3xV2fFrX2ny1iHknFJ150ORW9S3K+bu7Q9UouQTKZGMMDOecqcGI9nSsgQAn4YlJTbJJLtfm0LuL7v9Uq6qv321krgnLKEu90O5/Q+jqNKvqtUQtfhrFe1rR3ttbO+PStSopcFJGqtBO0ztD6unLUHV3zQZB9nplOWR4Ax8dPyu41xEtd6nNF5eiHo6KLcZlBZ/x/O9rKjsWENdPHexhAkPZOdJVwCyEaaumVFkkkKxXuzg1/X+7rf1CwkeG/tMhqTm6qglhUcq2C+BAZEiprZVQNeqhlKVGc4vXME0XE7bgMSN4MPZ+0UDqHkZEl/xuXciBM9r9pg8Fo1xHqH2Lgbva9V1QE6F0hBXDJOgoEc1aS+ZhYlJh1Ei9cqa6EYQmcjB1pPv0lMJUAWBSyoVKlsz4clbtijqQO2wPny3+7rxEWtlTkro+yjyR9LxuMNSIHgszp10ggRWwMGfrk239Z5WmbiRMkUuchVNkyf3nAaa7kWdU1svDKi64iwiua5LLNBwSGlru9ja7Hdt1wSuWzOS8AVrwVqm5NEwHsB53XNwT354AAAIABJREFUqBaXkisxdOyGHcWMCF1J9FXZ9uYo2uqDT/nRj5/EPfLXgT/3DT//feBf+4afn4F/+9te9/nj5uaGq6srC5rV5FqVZDNNHh8fSUn15OM4Mp7PphvfFkkb4w/9MO0zXbzXhWRl85z14PsjyA+fvf62YHv++t/0nB/1s+3n+VGP7edVe6b1cNbmTp03c3Nzy6vX3+H2/iVdP6g7U54RIHpNFDvX0EZFTguZOSni4MWavMWBc3RBe7V0tojJICro9ddAXUHngfiw9Ct5mvujFpeFiqvOLPq9oeNN41uXCrVZ+RexQgkxNkzRI3VvjDrXq9Tl8Mulmn3sJhiY/EyxMV0iuXp8dcZ22aZHC7hadX6OeFm0/uIEF9z6HXzEuZ7Q7em6PVe7G/bDNbv9FS56RUvHifPxyPl0Yp4KMeyILuCdJrM6L8QaWr0n9gPD/hofe930eBv6qYG+1MppPlOYGAbP7fWem6tbet+x64SXd3u++2HPd+6EvehGv+QqGiTIhvFdu7as60wRUkvuz+czv/K//X1+8Rd/kV/91X/EDz/5hMPhYLK8wO3tLR988AGvXr7WBKcU5mle5ADeBXMx06Ttan/D7dU1Xd/pTLsu8vj0xPd/8DHv37/neDzpfRazxbd1A4a8LsONVzv0vtcZRE9PT4yj9gzOozonNlBhGkd2u4Hj6aA9J3WVsZV8ue9kuRbrAVyMnWiI7xalfG6/vk3Gv0lXvzW/aD2MF5XjJh6UrIjM+p2ruZatYMr6vushvGWBtofvtlhbChfUqW9rxrL0eFFNpiLkRabS3Fj1eGxyHc0A12K6MQ3beLgtctYCs9oe3DaTW6HOZWIsdmC7FhcsS1KZadXxDZYMKaK7Sl2bdG67frYJ/DID69n9a0lW+wxaJKzJebvGy+iS7efdnDvb99oWZ9uB48972rbGEuvvb51F27VqRf5zsxNNRlqMbu5pysZkZS1YzQycrOoJJ0I/9Ny/uGecZ96/f7B+S23Sd+IWmSlUzuNILQGpcUHztR85U0QlSC5n+r5nnGeTcSltqnORtA8xZZ2f5O18EgFKYZ4mBQpCYE4rEPvcLEFl6pr46zper0crqGEtzleJbNAi0pgiZfAhdv1qKpIzeVI3uSDu4n3ba7efLfJ9D3lei2ktVqvNmIxL/HBOB8tswZdtAb6d1+ecDstewZofnYMIa4x63qO0LYS2n1n7WlVa3BQuMUZVSvQq1cwZZE40eaawxsc25FsBL5W8OuvpqxbQXXt+S5qN+VuK+M13b59r68yqDseNOVUwUpwjRm89UsUSbz1jc1KmP0TrYa2wNbgQW49iqgvvBFe19y+XZrS2AfrB2iRaO02Le9mccQHvFs8MQczgSJbCiKr+BnOamKaJ0Fo48CtLX1eASZZ4r3tLvNcCv1Z8DPjoccGRZjvNneZkhfVsatf0eWxqEnvtB1YJcm6AkilLlpy2Fd4iYMoZ75pyStdNmxkXY7QRXV4ZtGzsl18l/sVUR5pz6nm0jaFbkGVrHKUxBrBr2xKb1Z2x5X7WPxtURl6yrhsvjiKtnxLzmRAr3K2o38TTbGxkqRXJ9lzRopziFUx1GnfFmDodpyN4KdSMKQxQdq1qu8PSavQjHn+knrZ/cQ+VvQUbulxrs9eclsQrJUXotPG7BSIN5CWXi+DTDtG2qL8JEXheTH1bgfTP+9gmSD/qff553vuiULUb39Jy7at17PY7Xr7+kBcvXtIPe0Q80zQj3nE19JrgBDF5gbeAqQtNB01qkPJecJiJiLfGWnEIRZt6xQZWGgPm7OBvbjkigixzNARvhVSbA1IRir1GqTA3dAcbum06dLFNPM9JBwU7oYpKKZyPFucb+iVIgeriYh6COLUXFzMLcbI4SWoDNgv6hvOID0QJ+C5SC9Z0au5GCOIjPgwMV7f4bo+THbvdLbv+mi7s8D6Q6pl5OjKNo1nMd/Q3cRkuWataxI51Uot/5wmxw8ceCZHigl2Xtt5VLhC8ME2Z/T7y8vUdd1d7roeeu92eF9c9H76M3OyETqzx2lYKDfUzPqJKkwS3gk3MmVNNe0ElvfM88Zu/9Zv87b/7d/g//+E/5J/9s495fHxc+g2dc5yOJ86nCSeB+/t7k/+odr7a8NHQdQzDwH63Yzcoix6NBfrss8/54suvePfuPXqymeW2oZNNPqt71mRuG9twUKT8cDjw9PS0MBrKXARSmnl49w7/8h4vHdPxzG6/ZzxqolxKQTaJDDSQbWW3WuGgAGdLhi/7mbaHynavbgu8Viy0R0vOdHDr10GbWrVg81smyH62vD4sCYBsFALNHKQlptuB0u17r4lcWAq29miJUZu32JI6L87igcrwcsn2Ho5gphs5abKmgMkls/VN7FZDMLeDiRUI1eSlJYCrFLFAe21ZLsmC0ALGBFgi9+zebAuj54XU9rnL5/RfN1rZ3uP2WqUo2Nh+L6W0sILb92q/u0W5t59p+2hJ1NaZslYuGNN29i3Jt7Gveo9XKayi6OvswVor59NIP6iV+PF4ZApB2fPeL/093nnu7+5wzvPu/XtKKdzdKeDqXVS2AS3OTucz0zSyHwZE4DyeIURKSVr0mwIiWjK37J2KSrzOIxKjDtc2JuHm+obrrqemxHg+czgetXB/dj3bftoWIcCF2QhwMfuw9QPq2g5sUf1ai8maNC4JQi2ZeRyZxpE+9Itz4PMCe8u+ilPDq+Ue5ZURyvm0OMCq0qOuBcLmsZXRtj09l3VcQTN4Wte4LAW1c460WVaCrGCeFTOtkG3XsknPcAG3uVa1FahZz9P2/YvZyoq5ODcjkcYA+hgJToHfZP084gTaddI3RmqllyZXzRf7Zvvf2oMHZVZmp1rS3VQBYq7jzjlChVycAcSyeW3LNbyyZtVZ4VbbTEaHKyyvv+5JjbmpaG5UcqGkRA76miJ+mYemMbwBXG5z4gritAex6kC2JR44UaaoGkCjs4I3IKOBVg1AqVTmnJhzWnqmfRcpTPpcp26bmarO1hffZSUytiMz1Pyj9aIXcNbHvOTdphxb1sp6fmlBnFdWrf2/6m9VM8fRIeVrG1MD+lrovliP33Cetn9bc/utEmE17GrnSctVYe0rF5aPpiyquMUYKDw7N7bXrOS87CIv+v1K1WJXGhaFMKdEEnVR11yu0Mozwa3sRPrjyyP/hT9qhXFUdOF81h61FsimUe1nS27DM9eDaUUNVzQq2xA+fd2vf/nnAfB5MfVNzNu3PX7U876pUNsWjN90Hdrzv+019dGQXytIxOyAfeDm+pqXH3zI/YtXeOu1cs7RRZUC/n/UvduTJUly3veLS2aeU1Vd1XOfwSzWaFgJvAgwSLyIZjST9A/pRW/6uyTTox4kkgCFJUEQXAK7BHZnsTu3nemu7rqdS2Zc+ODukXFO9wwpPa1ypqyqq845mRkZ4eH++eefD1FU4goiL1+zSp8ron5cZmotDONIDIOgY7YxqnMZqvB7rTVCUUfWO2vsKMpW1ltNjJ8gt1YA770IklRndEMPYRDUIYZGeZRqYC02LULrMOWgiidX6SNmC7A6RxxE/r5gVC/ZGKK30XOEcaUWiXGUQlIXYxvfjNR+4DxxGvHVE8eJaXOBHybG7SXOjwzDBVeXN8SwwRWn6qU79ocdUBRd9AweqAUfRnmmfqb6RRC5KEgUKr9sDhZOslMe3Ti84933n/P+O1tunm24HAPvXV3x8Ttbbi4c2yA0SA+ENt0qzhVFvrs11IypUBKokuzReIlSK3/1H3/G//Z//O/8+N/+GT//xS/YPe5Jac0qpVTY5T0pFS4vr9huLxkGLWpXp32zmaSZ+OUVo27wy7Lw+vUtd3d33L6643hIxGEEJDgvJeGcZJ9cFaRWNtlELtKk15Tqjscjt7e3DX0zNcmKx9WCx5FTZjNO7J+eqKVwfXXN61f3MicdDGF13GWtas1Yt+5kM12dwrc5Eudr/bsyOv3mdm4nTJjH1n2pa7Ag7zGlVHvNqphWOsEFQU3XAmfvY3d9iCPVaCmimMrJGOg5dT64Bm6ILRbHcqWlWSZyDbbE9ylGSdI5ZyRDH7SperZ2IG8GML2j2n8Z8OONvmViSawOqTk3wXmpcGcNdM7P0/fwMuS3qvMjwi1rBkUc4dBARDmPBEdB0dxSqjSAjiPLvLzhYPRNnc/tvjn/50ePMp8GjmDtIXIWxdvohy4r0dPBSivqt35izk14J8IUNaE1m5nt5aA2HOoIwygNmEspHOcjDw9GARzFTmfJGhfnOB4PwJGrqyu248SswEgZRAEvU/nwgw+5vr5uz2M+HpkPMyll3v32K44vM8M08u7zGz7+8CMm51j2B47DSAV2856Ul8aysDVnAZRzno3SkkpZM1L9GBsltcmXp4VWy16V+umlhiiXSnD6+UhW14L0/jAgqac1C91+zbBXaldHl06CkyhNGFXUY81ASd2UlXpIsGFZXaOFnQITMq9SSpqRWd2+2rLYK3Cx2Ww6+6D9p9ByBwtKDQQyMSnWzKAo/0H0kZQXiiK6vZAPun7svnrxJAtYSq2tP5mpYfYU455hUT0EV/G+c/xrOrEl5jtWtE1AEH68zBcVANJAujgPNUm2ywIWDeDMxqLjamAvoKCWlAYUE/5wfmWz2HenoU41EFkzZ07pqA5Qm90UnEFckuJa8JhSFnVLVbqkCjBd6KTovcNqXbGgQgFPe869Xer3KZkT8rceWLH10uaRBlhtr3QOaZuhJTpaWgArs6DmQlYfpQGRnIIu6LxrdYKs38/3BJCedsb4kLkvgZmNV9t7K9Ssiqd0a8WJgmpFmF2ViisqEiQX94b/Ls2/s753oVZtlE5uz1b2wtDVq4vuAZqQEOq7jud3lGTZ8VsRtAFKf0yq/ngUQ1FpC2kNaOzh27/B1JCcMxt7mmnrj95ZgtXJsb99X+btPLDrf/6u178tQDt/TzMqFuN3jtl5WtgMzOrMGDIgoxKHiavraz748COurm8wzv9m3DBNEyF4Sk7aP6ZQyiKTpzhqFYObcyanQoiBGEbGYWr1FD5IFgqd7LZAnRYIlyo9d1rNSMli6NVYhxBZ0oLx3Lzy3yuaSfOROE7t+TlNT4MhJp7Bj4BvzxuUvlHB41saOzoVI3EiHEIpuODb7AkdpSdXx1Ilq+RMCl0zgM4HwiQI4WazFecmbPExQvAqEDJw9eyGOIy4Ass8s5sfOS4H7Vyh8tQ4KezVxrMALgSiG8lI0GbjhW5ejoKriWnwlLTgWHh+fcP7711xczVwvY28f73lg+sNNxvPAPhaCC6r8VeCfk3KY9cZU8Fqtpwhf1WpbVXFGKh8e/uCP/5Xf8K//OM/4ec//zmPj4/4E5VF15rd5ly4u7vn+vpOHTGYxoFhjNxcX3N5eSXZQnVyXr1+xW+++Q2H/Z6KJwxC3y2GVCNzLauAQrUmlayoYSmFly9f8vj4eOJsG9XM6ZwozjGNEwHYPz5ydX3F9dWVZpPDyTqz7BP1TXpSX1d0bhtsMzuXgLfP62lNfaBndkE2nFXtqg8IrY/RuhbWc0idwVpXtyynDlAIwwnFyBB5ea847XZt6Sw47BH9WmsLfIoq/BmyHbRVhtyrUUlWMK3PpLUxc0o9KxYcy3w634x7R3y1l2uGUormT4OvtsDq+sz6WjbLgK2AXz657/aca1X5bGl4b89Rrqt79nW9dgE01iL/Xt22nwt2rea4n4N6PfLd//s86NcRbfdhdFuzbzZ/ZC2sz0wAN6EDyrxZNKiIQOY4LxyOR8ZxoiJ9tez6fueTj/nm2295fHqilMLz58/ZXmxEuKcUYhwIIXI8HLh/eGB7eYkfBnACrOaUxQa8esXl5SU3NzcA7IKs+WkT+MP/9o/40fFI9ZWrzYari0vqPJOc1Ay+++67+Mc77u5fk4rWP7l+vYqjLrZAnF17tv2atrXbskxKm/ZOAbsg6LgPQcDBWqRFTPCioFz9SXb1vGci2Fh3Ab75NU7mkKl+2jX0oEs/b/oWFMUYLcVs8VqzaTawlJX+2erImg2QdTcMQwM77LobvbOzY21daKa91KT0Q1HpFmXCdV8QEEECl2naEIKHEhqoZvav/+p7bfZz3cakn/tWgxiDI7qyUjpr3/pjtSUpKdjnRDHYVAnBk2vRtg5mZ50I4KgatquFqAGQ7Mtm8zPWdy54AdOccyvls4qQjrQVVGq9k3UXnLYQwUkNbkEk4tMi91JFuZagvokCbCnNbe7UWkk5gwpAhWGt8XfeKa1VwFrxp8VntuCm94GtZUzPhshpXTO49d57gLKUonL78rzFzidVF7Y57tt8lkBW5xUCQjgLnLt5GPyqqNzPh3O/us0T38XGbm230AeMzhxH9RNB9w0vc8ACM/M/9X8707p+O9sruT0gW4YVcllwjtbbsq2hagFmbdRvQbh9u+bvO34rgrZaK69fv9YmyToo5TyrBrCixf2xFq6vP68Fh6cB1duCqFO08vsH7Pz9569/2/u/K3t37pAYkm2/O69psPc0hMV7vB/U8YmM44b3PviA99//gM32ggok3TwHlbOvRZTlUl4IrlBq0gyO0EaWJTMvC3EYGIeJGCetK9I6kwooTU1EwIT6WB04H8WoadDhvdSeWWbNiRfKMG7w2ZBHDdiqw2nquEdLLSB0jqbq6Gxz0PurFjSEqFm7rK+JSh3Q9EDw0sUecIQ2vqBZviq8cEJsGS/puTZIbV6IxDgQwxXOyfj6wTNdbNhcTIQYmOcDy/HIMh85LnuKq/ghQhED7pzXtgGaaaiW4Qs4gW/JTowXTmrnPIUBR6gzy/LEx++9w0cfX/Phe9e8+3zDs43n+cazcY6ISvm7DCximIrelytg9I6cJKvpNVhzES2thSpqUTjH0+GJP/03P+aP//iP+eyzz6Sfk+uazjqaOpYYVnh6euLu7o5nz54xDCObyXPz/IrLi0tKEQN/f3/Py5cvuXt40F5EUTcucSJqQyNWuk8IQRpxVwEUXPCNDrnf7/UacnN8VudoYTtt8a6S5pnbl7ccD0c++eRjhhCFutqhd6tDVemXs6GJYvTfdCC+b/3bV+8sniOVNt+pp7bBfg5aeL6eoyqzwLVrWpalOT+GjMp19uda0UznxGEQR0VardgaERUuebjmSLp2jVUpJM2aGUlGMwjyYqPO0jZzVRdVlNQH652k9VPQpKlrXamcsGYevbdasYJdgMhvr8FXKUVqJ4xqVtYAyhz18+xmH4zXWpsYVp3nzllYn6dlSoLV0+n8EZWz2JxG+8z+mfbBtznZfTBp9zCO40n/LRtfm0Myp04BAgtEewqucwKKGUUTrCGsUcms3kPGv1BxPpBy5uHxkc2UGYaR7LNerwci77/3HqXWtuZzSVxfPWO73TbHO45ZArRFGtzHIYLWY8YYlRr9G25vb9scK7mwLJnFVYqT+rFDPfByNxNLJaeFJWeKl8yECJXJGMYQmaZNm/eWeTlta8Iba3INYgquSNbWY8FtwIWgPcRojAeTCzShIQuUz5ur2331c8F7ryUfurbc2mdOruuUutzPl/Y5iCDSPBuotfoOxkDIeVnZBykxTF29anuWrPOkuxeZN0InNLELC+ZA1oiLA6U4aUmglDMzZ0VbvByPxwYkOKNO5zXTY3arraGzwHIFn1bwYr1G6fvqTSApCVgpf+vXgNqpIj3LSkVrESXIM0VIA6mql3rI7LQRtuPEBjjduyUrFSkp4wal2tUq7RMUTGuAfDM3a2LBeWRfk3wmpYgwUBgrwp+TTJGcJxCczHfbj4wpU7wGhWW1lSEEpjCK4Izu/cX4sQpy9SIkpbOTNh9EmbQTtfKhs8vo73wTDDFiFOqv5ZTwTgH3GBjiiG8idk7KWKy5tQVMJ5/+9qRKb0/f9MvReVhaXHASDDlHwURqZK9bdzALrmSMrKZXfLW17rHf/4uC4A5VYPYOXwB36seXYhnbNSgu1VO8s0RuU4H+ruO3Jmg7HOYueLENrLL2dLDA5zSl2z+802N9wG8L1N52DW98wllQ1f/cb5Lnf/++ydS/146G9LrcULfz87Z77f4dggQWFc+0ueCDDz/m/fc/YJgmMZpVJLGjX1EpmXDKz/cmIiDs2mVeOM4LPkQ222fC79egSuhPSjHSQErUopxSCSTLJY6BCEekKuo5vuhycFp156Vg1jjupYrTNgwiZx/jeDKW61irwfae6iQYDLWtLKlxU0qjC15aEehGIzVxHjTINfqh+CmOGgb8sMGHyDCOjJstwzjJOfU6hV0Qce6CGLbEcSCMgThFwuCY5wOHZc+87EVaO2ZckeeDNpcu2iPGudAULakVlwshSrFwreCCY/QeTyZWTz0+4ZYdH99c8od/94fcXI/cPBu5vghMzhEdhGJ4EkhIuEquU2bSfOD49MDxcCB4zzCMokw5jPhYJYNaKsQAJVFd4Fe//ox/8c//L/7yL/+DKEWmSjkmcWJc6NaYjmOFw2Hm8VH6xm23W64uLhiDyDwfDnu+/fZbXt6+4nA8AquRyoUmJtMcUkAUAXOrCwreUWtmv5f+dKI4m9sc7x1zmUPCl0/LwsP9PXmZGGLg+fVNe7/VZ5wUt6uTK8ZZjb6+Jnj523kQcG4HTja7LsvW2wb7buczNTdxDuS+YozNNjSHptjm1E56cj29cxh8bFn0nj7lRQpWxrxKM1cpN5F5Wp34pa23mzqj5jBaQXopBa+ofVtvmsW13/mmxGnrruLrGlTYWISoM7isfZ7s/kqVjIQrjmUuoqaqTpHX67D6vaIKbiGE1nuoD5jOUWYLcvrgCGTTbs/WrY6zgVCi4umavRHnVH9ue1plUHEeczb7OdD3GLTPNyfVZMztWk8zNUbDtOzzqUiFBWLem2ExoRRPSULjkvUhUtNr1hCKZkzSUtgXKVloKojOUXNhM468/957gIA1yyL97HwUJT9XKzGOhFCZlxmnoiOTOvsgzvVhv+fx4YFllr6rltkrDpI6tj5naqoMaEY5eDKSsXz27EqzN5L1sIAbbfuzjkM/NvlN51PHfjks0gy3FryTzzGxh2EYiN4xlKC9uQLOxRP1RgtuYBU3GceBYRqprJmkOISWuZH6ry577mR9WcbMQA4TiOnnj+8UBG19W/ZuGNasW9ZgyQI7YwXYmjFbXGttgWdKSYBHXZPjOErQtQj4S4UhbgjBAtYoPcNU4j1XAYMFaNb2NvrcqRL8LksScTRn2bxAjKH1hOztmQETIJQ9sYUZcsJ1dVotMNZaNKd+i/NIPZa8qD37ogI87d6dp+ZlZRQpSFatdlfpsnZ9Dhp11UKpoCwACXTBOZW09xGTdpdnlqGrVbOm4aUUrLGzvd/2JqE4KjtDRU3Sksnzgqvqq7W/I7oFoVIjVO9IShXu+zjaGjjxq8vpGrG97XyvlHv15JrXWjvvBZB2vo29b76sa+MkwVC3N9KXMq17qB39PngKsJoH9ObRg/RUodO7bg+2fcu4WKd+fDtxC/78yR5vgm1rrWKIEQu411hG6hNljxeHzQWoeWltHIr7/0HQBueBSh/snHglGFK2RtLnr2mf+FYH6fsyYW87zh2r8+xXe9jfE8Sd39/3TcDq3nxvXwPjDDEMshl559lsL/nok0955733GYaJ47xIQOVyk1FeKVBRKB01IcWaYuTnY2K3nwk+cnFxzTBsNBPkpIasqkhJddSizbSdFXVqYW/1aOsLHX8J+GozOp7qpPePD8LrXVPk9p8Ef+agthS+W2XIM/Je76XxqpzQNQNcqxqR6AnKSa/OqyqmF4loP4rgh5N6OmIkbrYM48g0baSuKkSt05M2AGhGbggj23HD5mKDi0KfyXnmsBxFjtdHwhRFyKWKTL+rp85TLZItkuJhpahMqtSlfUqm4AhkYnK8vnvgg6uRf/IHP+LTj665uXIEDxFFgytQJOuJKxIkIsHNst9zd3vL/etbHu9eczwciCEwTRsuLp6x2V6w2WzZXmyIUyReXZLzwsvbO3784z/hJz/5C16+/FbeczGR/MycrEbGjCb0mQ7roVjrM7abDSUXvv76G25f3vJ02K+ZETzZ0M9Sqa62vizVtrWqfcegOQxPT0887XdNkbCUouc7BUqENhhE6WlZmOeFm2dXfPLRh2w3W37167+RjUcbzIZuLVpALRuTBn91rWlzZ6/tnYu3re/z7EcfILQgoJ5SIg2skokt03y1Kb75HP1r5TpKZ2ssgMonQZ1dw0mNHVWb0oeT7c/Gom3USMBXHS3bhwPh8YvzaY7l+Xh8l31ckfXTPaAPfk8EHmrVLaHLNkJzRC2A7wNny0D2gZBdR3NQu3P1z84yiJZJEXsttCZp1u7bM8mpo9noSDpJnZzMA7uGXtLdri2EwNPTU8ve2Fj042K0U3nvuhcJnYuG9PZzF11Z1gKgfWYDXiSoXJa5ObC1Sjat1srFxQXOw9WzSzlf8Dj/Pj447l7f8+Lbl8zzwtXlVRN3cN5Bkr1o/7TDbTZcXFzIeXOR/mAbz1EDxTFEspg0JDZ2BBwhSN+3XDNLybgxigVRJcxxlEb087zgXWwCFLUK4l+79WJzo4EOflVM9FWeo83zVCUzaXR+o5o5r7ZKx70PsKZpap9vVECh7Xa0P50X8vwdBjbXWkW1r8uE2NeyLGuwYvbJh5alkL3fVu7qiLeAtKxBf06ZOVvWe1VlPLFdSK9J12XkYozSzytlAXVKVSXV9R4ky+5aMGgS9L5o9galBobIZhhbZl9E54qAIWFVxzXb0/tFFnDOcyI6GOIkQHMp5GJ1zbY/ic0MUkRFcJ6gQKMF9dM0kZekdU1VGEejlyxbTqC1wqVIlt+HyDhOOCeURR86H9NV7bslmevowgpCaVPe7FTUQwMdoZ6vPmcphaWkVkMtwHcm9WoyiICMC7EpYlvRA7WQ00JF9s+AlLwQHJR1XvYgox3Nbp9l3/rjdO8Q+yHZJKFLApozi67AAAAgAElEQVSpUsBOnwvqFTbxtyrZqnme2x5Pq0mVja/fI3q7bPZOQKvTTG9/yHMxZtHKfDIPtFKhq4HUEEPAztZv0ED3LqCDs3GrUl4UdI8EYQ5q1t784VITpuDdFFarE//te47fmqCtoZpd74pTp2UNdE43+N456Q950P0G933B2duOPmDr39t/1vln2rX0f+8Duv77OWVm3ThPA7neifPe4/HCs8dzeXnFex98yPN33qVWSCpEkrM0MvbWY8Q7vJd6Ka/p3lykqfiyZHaHI7nAOE6EOJJUOGDQWgedZtTq1RiLEytNZqvUd6kBFF5zaAW15mCVIgtGCjKHtqGZY0nVjE01NSQzfo6oxdNesxHVCY0kDkOTY66IUZZgTqWqtWdGRYJK72TzCOOWaXMpVM0gGUUXnUrSDmJoK9ojZ0AbyuF9YIgwjY5x9BQq+SibzDAOsglVQWtKSpLNq3JjUm9kz1Tq7ULwUtBMxUWaqIsrImawjZX57iXuauSP/v7v8Q9+9B4jEFzBkQCVpa5C7wTJyJRcmJcDD/ev+PY3X/PqxUsOTw/UlEA36yEObKYLNttLLi8vub655J0PnxNLZPf0wL//yZ/z4z/9V3z5xa857A+qvOnYjBtiyBxbf8TeiMkjW5aF4+HIsiw8Pe355hujP+nzcBJcl1pJpiTnlLKgdQBVN86aMrpfkdLC8XDgcDhQ9TxJFc/eFGawjEmRurppIi0LaV7YjBNfffEFL1+8oNbaMkhwLn4RTuu5nGZk1OCf8/r7n/tMl32d24DTNdDism7z6WwQ4uD2m+sJaOQM1UNk2LvAwAIYO2d/neaYC4AgPXxMMlmk8009cpXXlvsRTr4EHhqUCGx6YpstyOvtpdm2cyXLWquqAb9pW+0aLLAKwUP1bePUN7VzWm/PWkRcwIfT3mc9Umxj0R/mwPf1kaLIttr3lLL2DlwzePY8jM5qQVPWjEv/3Ozez7No9lm96ue50uEJcoxkE/qgtncuxFk4q2XUGhj53Qk8IKITZc0S2DxatFl9CKIQao76xcWFzM/quL+/5/Wr16QlcXl5yThtcM5pLVgmHWf2ReA6HwJjkOa00vOrtgzrdnsB0ZPU8SUnBr8qCC8l44pjMwYGZ8CgKckeWWYJAEIQheSg8xultPU0xj549t7jRpqTVhHZ8oGxAZVyQbWtSxHdWp9hnyndbDaNBno4HPDBMY62PmsHDlgm2ubFaTbjvLatp9ie/u0cTF6FJbz3jHGtWcs5t/py0wbo15sBpg2AcCu123lP1aBDnHKpZc0moR7WthMG7NVSCG6QOp7mDKutLyav7wk67ofj3Man9616wMWk5H0RW5/SKrTlCKuSbVbxDa/z23nJPlWlEwbPnCq+1FZcY8CVc1qr5gftU+s0m7gqFJvce8153ZOpEpQ3kM8r7VjHDVEJrUgwExoLwmnmeMEVR9A2RkVtXQzj6iPq9Wl6HF+RchPNcZSUyWWh5iwtlkgwBIor3XpZ7W0vxgQwhNDAS5sXp/uU2jMFXe0+vdpwCc5V2K16qXX1gYxkH02gyrJr35fYsLn0tr/ZujvdN09eKXuTzg3zy7wT/0vq2pyKOBrYhsx9u57OJzXAoOTcwFSNQXUs0BZHCmhWEMEe9Pxi76wlASgN88QSv3n81gRtsA6w8VCrDh5YHCeDIjxhv/a3sBd0T6him79ObA083kB9OUV112NFntfrO83aybNYnS391XcGdO2T3wgw18na/90Mo1MUoFYBPTLS/+zq6hnvvPc+V9c3pJIIYWRJi6hbeU+NHgiC3jjZIErOMlF8bTS0o1JKxs3IMA1kEYQlhAE/KGJeVFXIxsxFqYmqTr5rsC0bQ2xOJCCZJ+NFq/68oWeSRheZcAkqI3HciKPipB+Uw+GHQU8l/TWqE8pXjIO+DqHseUccpPln9Y5h3BKHSTJsteKiyOn7YZKfw0AcotZpFYgOHxyD0VC8yH0VpcMFB1PMxJhwPjEXcEMhusA4DeQ544sEAWJdNSPgKnH0ouHitFbNQ4hW6yNozMaJhuNSKyzgU+Wbzz7nWbjm+Qcf8M0uM5EJx4W8PHA8PFDSkRg9oW4Zhg3OV/ZPj7x89TV3dy/ZPd2zf9pRcxJp9FpxRWo/8lIlc+oD+5i5XkbKPHD78gU//elf8cvPfsmrF6/wVQJ6l6FIulcQ6FTXHjedSFDOmd1+z/39Aw+vX3KcD9pseUDEcax+dd3QvYci3Zrbeqi1MgTJEu93O/b7vczjbmmVromqUGRtw9bNpVahOlGp3nH38MDffv45+8NeENVataheiq596CWCleZQFcFmpT957xniKMXeOUuQo4gtnbNjpqlXgmwZIFxT8Qrag3C1A51RMWtV103RtQyaOTMFzUeQszjnklkfWOa31/ieoPVq74SSYw6bE9EBE2qBZnsXU+o9CQzQXjmVvpdPv7ECJ0FHH4g028mpDZW5YBRGAYaCj82BFXT3lBZrDnjOa72KtYiw55hLJlpzaqdiKn515CsFE87wzjfHcnUepTdV7LJ3Dq3x8NbAXMatn9Ni39csggX55kA6DRbjMIAXuWgRLPDme2DKpiZ8YHUvtfbCJxYIa5BKRVvbU9IavBjq5dSBWZZF5q53pFkoj9vtFoB5ObKNW6GuBd8a1G42W95/X0CO169f8/DwIM9L59JmHCSbHQMpJ552O+mryNKk+ONmgnlmfzzgYiT6UcfSWp1EoafFyOQ9c83kVBkGR85Jg7TINE04EsfjIhnDMIiwVnQatEoA510g1CoBJRbwC/WugNKY5PkFh7QuqVkd8aCMBq+KyTCOQg9sfSG74CrGqL1K0RYyUhMWRslqGuiJPluneuExeJmj2qKmB4BSSiyzKOyKPaHVWC7zQqkCeEn7nUrCUb0wBYKKhOBlbiymiqpZOVNutrllGRIHkDIpZ9KyqE2AGDzbITINkaOqeUYndLlUCsUVBhdEWKMWAe2KqYILNazPOMsalv5gOSUVOZMAbYinKqHBB0paQRMLIlxYa4pSSqRFGpvXnFVcBm1dIvR86ZF2hGEE51srIFcLvlScMlsEoChCUVc227LM+BJZNgu+GounPVABu50jRKGPCtAkKt5OjatzQdsmVWpexJ8JU/N/UVVKr/XK0n3IEb1nTtJuINSs0FsmehjjIPtsyNTsKMWJEF2QvS1o+UDt9oXebkdlY3mnNdUYCL8KTlEl+MoUUa32Au5VJza3Vm0Z5YWlZWrNJt6kZ23tp4I7zfpZtm3ds9pfaOBJNaXGdc/tXe1a1v0HDdJanzVoKpboOrNHZ5m5llQRJ7+JbpWi5UbFxLS8KJ9rptXOVbOMI9Xq9GsL0mrVHOB/RoQEfouCtqJiXyZDDKdBjzxbGbxUMs542+FMwaZRZYynD5bnrKBS5pamtHOsSHA7n3VBRCaa/qTXZ//Sh2khm05c+/lcdek8q2aT0jvHEAeWkCiu6GQXGXu3ao3S6vtqYLu54ua9D9lcXVNV+KNWUZYaggNfmYEaIhHpT+K1mLo6x5xnaTQ9S6ZoCIPI+48eP1QJ1qIj5aR1Ah6PFPbWkEmIlLV3Qj0zhKV2KIwzI+wjIU6IImOCkMFLo05clGDNjXgNpsarG3wM5KrBT1A5VEWVxhAZwiibZRxxYRDHcojgIYyB7cUGcEzDDcP2QgQ+AD8NWlQtz26IgWmI0hsow2GoxLGwDY4JiNmRo2MO4BbHmCub4HDBsTiYYuQCERrxCViAlBnHQFaRl2EMuFghQvEZhgohk6vUvtQizkM5Vp7vK2WfeOUKr++OfPmr1/z5//0X/PDyGT/79W+4zS9Z7l8x3T+RDrfsnl5R64EQdgz1Oe9ev8P1pePTdz/h5mpgvHA8HA+qHId6ISoT7RKFBcKRYRq58QO7h3ue5iM//elP+fnf/JJvvnpJneW+SpC+I3OZqUmMUxg82WWKygmntBC9BGbH48zjwyM+qiM5SLBtyGTLbFHVsLvmrARCW1ulCDXr6emxIcagwglZ6LTRRakv6Q5n3i1AzeA9x5zIDl4+PrTGvGGYoKrSFCKGYtnQitgkc4gqhSXPDHESmgmenEy91mlgIeitD1GQ5uqbjTMlrpQyMUIYB2KwzI9QZs9pac5J3VbQjISICwglz863OjkiniNOuqh5iSO2ZpZ6gZKTvlJFxDuiE9pTC6T8mg2QQEc3KBQlp+qzWoUCUlraeVJKVGLzbvpzW2BjGSXvtXVIC/o0wHNS+yP0Jtl8lyWhJaKM43gOq640VFSiPHqVrtdnkCVjV51QGU02XWi2qDMl4FMtCUIkADkv0oHEBQkiatUaErDsVvBO+qDlharAWVHAROYTxEGVa3NqgFxR5zoMET/Ie6pGDItlC1HaTXFkcnOmsjpSMUaWnBlDaEOSitgb550II2qQSxWFXOmFaU6P7n4d9cmcGWnDI0LdMQotrJjimmY333nnHUopvHr1ihcvXnB9fc3FxQUpOIYx4qMnLzMZeJqPOAcDmc1mK0HMIPNgP++JNbVaSu89i2aHYoxspomwLCwpc8hVM70iye9cxQUYNwO1OJZFKJTeVfCWMR5AVR/R7EhQAC1XOOApIeJKZaAwUQhlIS07qhf6va+erHu0zB9xwIxuF9ra1gyo90gT7cR+v2dZcutvhmYBrVVRqlp/igZrReyed04b9EZ8DOyXPcs8N39Cau4CYXSrQ+rk2aWcpfYoZaWo0+iZS0rS6udYGlPGh8gYomRAs1AhpS5dGRElNaXjWDybWnA1CyjpPH5JEESR9jDvGYcJ5xyLOrgCeERq1f5uCnR5BT1rzQSvbXdSIs0LNeUGSrSG2iHgx0AumbQkqXesYsOS0q1djAwxElOipgwlkw5Hybbo/XsS3mXwhewSuUIhMjhpuUJZGILWINYi7RM0k04+UmtmWY6EPErbILVZVC82uWh2PqIZHK9CpWsbo5oLLs3SGghPJRJHZRr5yhDBIbTPZSlAIPrYmA4LCwszPorkfEBKSLIrLKFSXcFXRxDde6mDK6uwS/RR9t0qNrjkJNlJrWusyu70HqyVTFW7ltKeCAxEYRs5L8rceSaGQfyPIMDvjEmvVC03QJp/p0XnrAZdto0rcBdCwLvY9jxrOC7Bj9HA16DTst7SYGDdI4Siq3GY+eoSWcqeoQGls+xdMdEmBRtzJrgVaMjOUShYHbqNS/NFuj0JHIGIKxacGjNF61y/5/itCdrkOE0NnlMa7a//X6iOcJb5OsvMnUdtLUzrgi35EN58T5dmW1Oq3c9vOX9PffRO8PHcBRP2uSeAggNqYDNd8s67H/DO8/ep3nE8LgzTKEWwsu4lpT5q/7Os2RWExrFUQXmE7jJTkjYVRjJeRePErEpZIKiidxL04UWxqaCGWTfVqsZz0B5nDnOUBkCbVQ8TfqjaRNIBA8FPOCJx3BLiRBy2hBgl2xUlne41o0GtEoTGQVCpYcLHCRcj43aDGwIuOqbNyDBKMXOcRjE4YjslwPSyAC+mwDSIT18qpK0M/lQz1zHCEY7BsXNQ5sqUYAJ8iCyDJw9QB8eyyGa2CTJeuISjEFNiUx3RF2o64stCLIWyO3B4eOD48MjjqzueDge+Tnt+9uUtd1/e8uVx5ptv7/jN1y+5/9VXuPc/YPvxxKc/eM7H1yMf/t6GwY0EfkCIkUPd8/K15/MvX/DXf/EX/G2B//p3/w4/+NHvcRwHCdaLZJJKLsQQZHx9xIeZ4XDk7gGOybMrlZ//4jN++dmveP3qTup1cNK/phSlBtr8jaJfkovhj9KEPCcqVXvOyeZujktPm4O1jqfVl9R00ix7t9thEsylVFGb1AWxZp5OqdD9OuszPqWKPPJxFmcPaAqG/XsbZZcVZDlbzFrAvWaLAA2a1gbMpRn600Lv8+s8/YxTefceWbf7NFqW1KqqLDSOYZDMklHlbCO2bJON8Tk1sH0+aGCz1viYrPZpJoy2KclnCGQvj1SArXlW2WofTsRl+vPadbTPdkbDbJCmXG9JQE8DEycyjto7SK+pv8aTQ8Ek6kqf7FkXFjyuNDLtXeZLy7yslEO9d816rA3cV9qZZLuCZsF0nPTNQQPceT52aLJlZVf6ND5Azk3MxOid1s4ghEDUgKsXBDDZ7jfmraL8rlbmlJoYUQMG3On+ZM/rtMZS5ueyLDztnhiGgWm7kbGvopy2jRs+DB9QKbx48YLb25fM85Fn5RnbeqGtZwatNZK58vi453CYefbsWoKOYaQ4oT4fDocWpA3DoOCQjnGQXnn73YFK5uJyi/dVxtY56R1HII+FeVk4Hvc0FkmIkgHVjFHQOee9fBexC0cIkn2MwbOZRgVzzN1UwRHnuvuR9X5OebVnm1rgOQCV/X6vmcZ1jUomQ51MDeYpYmeHYWAaNiRVFZUeXbWJOFWnVNoiELLQv5z2EMzKclizvtVZ7a/Wz+layHPB+QyxEJWpFPWelpREtMjmJKawKICFUzVAp9kJaxJeSgIFpVGizqLtXJo9lxkI1CZa0VNXgUZLBKUxD1Hqlo2xpBmsok69sTlKyRKw5SoS/k6yMst8YHc4igpnqbgGSgkDp1SoaQEy1fmWISy1iqAPSLaVqsGUMHH8oEBBXUGoWgs1C92yZslCmmhIMdqwMimsvZrQIiWzRzLKMpQiNjs33QADYGTMU1pUiRqML1md7P9B7ZJ+crNdkLVdgLIhmg6U2v4Tp9SgBk2GeLCWD6UUMhIQ5qLZ6SrUV9dFZL21tgxe1QxVA4UwiqXsNee1jWaTgwJVfdnBOa3yJIPXsT36coZeACqr8Iy15LB99VzJtJ2n7Ym96V39CgswcSslWu5V1qhR8b/r+C0L2r77MH55PxZ9Pcaps7W+5zRb96ZT919ytE2tmiPx5nnOX9+/r3+g/YTpr3HJgnwVjdAtBYt9c1Z4O3B5ec3l9pqcHKmKghqzGM3qnUrTQnFJIv9SRdzDJHxVkGCZZ2paGGNQBcFRgzLBAXASlAXvCH4QXrU2nm5xqnfaj0foRz5EhmHUBTkTJ/BuJOdI8BPTZiP9K3zAEcGNRL/Fuco0XTCNGy62zxjGgTAMlCro7TBK4XXJVVWqBvCeEDb4YcQPI5urK6btBhdhmDwXV464RdGyghc1f4IrIsnqHNPgGJwU8vrgSLGS8sy1i0x5wQ2eFD0PFfZDJRwrk3eMW0fBMTsoHmaADPlQ2T8eedo9cjzuSPsdd9/8hsfXtyz39xxe37K/fcnu1WuWhwceX71mOcwkErfTK8bdkfhYeRgixyIO2sdT5H/6/R/xR//gU35wFYnza8L7My5tqceFq2vP8Pwd9uMVnz++z4//MvEn/8+f8Ze/+BlPbuDdTz5i2kxCb9FNdJkTRy8F+ikXbSQ5czEHvn145Bef/ZrPv/iK437WeRupOUn2xkt7AIc2hiRIb7UijumKHFkz1JUi1BcO99x52zBaNkfFTHa7Xath6oMNWRrurWv5Tae9nhhlqxXw3jOOY6ttodkY98b6tnVq2WOhLSs6aCjhOcjU1uy6Mdj99a89sVGsPbn68eg3iGVZWJaF6CPFWZbG7m1VIZQxE+RPAojQnF3LgvXnLkWLxI0qqefqgywbDxA1MlFT1ZtVYyWiCGizYQt8ZSPu6yLamHaBaEka/He2ulYBwJxDA3Z59r2C3nktRA8KmCPYb+R9MGJHU9XT1iMgTvKSJCswjkIddyCUwFJaz51+vq8BttE5eyGb2s3bleqKMinESRdwRUyxIO8WSJZamvhDHMSRtvVh2QfLGNp66cFBp2M5DIP0pCrSWywGk8vXeeZg0TU7jmOr/bJ5aRnJeZ5xHqFOhkBaFrzzjEPgk48+4GIz8tVXX/P4cEfJiXmeubi4bEIaFjQOUepTX92+YpomtpuR2DnqOcl7Y1yzze2ZFcfFxZZaM3lZqB4m3YMEMHBEL/WNy9NO5gaZVBMU2hzHQ0kVRyEVmb+5ZKLWCSuW3+TWD4eDZtQiMYpT7jTwlYybBUY657046VYDaYGdBf4pzYQgjaSdcyx5EXvgtR5PgY9lWaCIsIoZGj+ENn/ntAhVsJhjSGtOLE76uv5KKU1kIQ6RoFk/qS2TICHNC0sp6gdo37ig728gTudbWcBPxpdiRAN88AxxEBGrtlYFeDoByMpqFyunGgHnoixJ54WLodWaOm+MKN3rUiJnAdi8c/gYKTWzzAt4JzV+znNxccVSnnDLDCVLvVteKMWLjzEGfJUguPGtqtSoyZ4QOMxHDk8Hjqlw+cwz4PC54JzQEOMgDJJUNHBEVP1FxVXtoO6uqQiD03vXbGKutbFcJCrWer0045w0b44+iLCPtjzI1XpGVpzT2jtqF3xBiIG1Ebnsc7UW0lJwRfrHiYAOSglV9ehitEoJ2IZhIqiQWlDmzGaI+CJZKdXdaCI0UhlgNlHWjYEKKEjmtS+w0/Us8mynbSLMjpo4Wg909ntJv/+2OXay15z++2313/a7vkyivR9a7X+/t6wMwPW88uOpr+J4m/9yevzWBm1vc8YMTT2/qVYY+4bTtH7G9w2EeyMnxpuOg0mnGtR69vn9A3ca9veTx45zznZzsILHqEbS2d6uY713KkQfudhespm2gm7oJHVKT7M6F0k3G1e34mqW/hyD1AcsS6KWRPCOKY44RTd9MGl7WSCmWmQ/Oy8AsDP6AtbQ16hLel4HLghqGbwJgHiosgnFOODDhGODY8STGbwoO20GzxC9qu9EhsERw7o5uCCIrvOBMAh/mODEuORELZ6lFA4x4EcIoydGj9Us5KRNQdPM4grbYSAidLXopd7CHY88i5K5W6ikUtkdjyy7whgiu4c9X337LV+9eM03L19z92rHq69e8HR35Olhz+PjI7vjjv3xwDLvCbXgc2Ly8OG71/zup7/LD3/0IRkpSF3qgRRv2bx+5Pazb3m5LOxy4vH2Wz7dBH74wXt8MgxcPz7gj3vi1XNC2AI7hnRHebolUnjv+ZYf/f4zPr//lH/1p9/yV7/+hr87bXh+fa0Axyrw4mvluCSKm8lux3yE/M2eX3z+BZ9//hX3d0+iyoXSJfIKIBgqCioa4zxZpUMbGpozOS9ae3paa5TP0F4zaJbZ2e/3TTzDggcTHFkd31PjK47zKTBjztH5eew1J/LtTjbLc6DlxAA7o2coElpXykSfFStlDVZKOTX+b7MZ/d/Nmeo3hh7dM8fc1T6wM5Qvt1pMC+KK1oSM46g2onPgzr7KyT2coor99fX1aOdqj1bI7VUxDgdFe0X1QVNP1Vw/a61nXu9f7W8bp/XaYhhO6sJ6Z/QE3WRVnoTT8bXva5AHKRWsPiL4gRgGtZko4it2r2BNdtfPNzn/Xko7pURwmhV09WxfqM0Z6h0nXJXanU6ptBZwRbHt0AcGtYEdFsTZsa7H0hrX2jWaelwbr26e93Ovf0b9v0vJPN6/xtXMdrtVlVyhi44x8O47zxmC58XLl7x6dcd+f2R+tnBzc6OUVhiHkZIrMajUei483D8yRM92u5UgSVHueZ6ppTIOI5vNJGIziPOYUgUfqDUJHTYEoX5lyUiM48jVxRX7447D4chcE9HHlvmqMai7XMjVhKesth1c8GQqB1Wp9d5Ti2OeE6XMOLMN2tBXaK+yNkS81eF07xNxoOWkrjGlRQJFEybR+u9CaaqANlfnRcZBaoQRBdMic9YjACqlmD+sTAiagulqhAwQcU1h16mN8Co2Pyt1tGg2Jy8L0yTnnfOa0fVofzmn+7MGOFkDV8tuC53XMh8mZNUxFhSQA1rDa5tvvT3qe9plKrks4FYBIe890zRyPM5NWVhsd4BpoqZRRqpU0uHIw8MD+3mWPqoVcl0oZB4fjsw72MTARlkrudQmZS9K1gPjtKG6wLLfcTwsVB65qDAMI94XKd9Qh87VRClVaKxeapBNiTcnqVszerajUIvDUl45V2Jc68drrU0Mw6PCS0V6I1qLl1qy0MApKrvvRdtA7WGlqjCR1JZ6pXMvJesccu0ZtetyljEDCFRfqC5pUKbPv3rtxYvW80EcRqZhYD/PQG5+pH0meqeWjbO5WXNiGHzbb1tvy1ptKJr5glOdCLOD58md3gfv+9XZ+m7UR7Wr5qO0uXp29Lb0HKA9D+RkTzttp2B29/uO39qg7fw4dc5OB9v+fT4o/y8+/c1fVduKNaTTzNfbQr+T4KujMVkxvzkRfUr/JMBDlGxarUixwMe1bFtDboOj5Jnd7p4wDPhBHOpKkgVSqyacZSUERGDUO6k7K1oATE04LzK0wUvD5170xBXlFHtzEhKFjMvgimeYpHeN1BUdqJphc86TDhnvAyFCSpUaHDEI+pWOOwn6xhHnj+SyJydHDIV0fEU63HN8+JrFCUojBtiJaI+iUPgNNUzkgqA8PrDkSkbkpYsiYX57zXL9O2yurtlsPJ6FKSS2o+O964Hnl9LskSzqTBcE6s4z58p/+Nc/5X0if/BP/oDLZ5GlVr562HP76xfMv/oVP/3Tf8m//auf8JAWDkUoDjEMbLcbrm6uee9mw+9eX3Fz8UP29wd2D3dQEh99+AF/8Ef/Df/on/5D3v/4XT5/fcvffPG3/OblC45P1zz8zZfcupfw/F1iPvLs8MAf/O4HfPJ8YDMc2F572G3Ij46yKcTrgfC71zBm0vwT3n3vHS7uCu88u+GT39nwy1+84je/eaFCMK5tgCGIwMxhThznW47zzEDhneeX3D8dedot7A+zBNfZSbqyyjipTy7OulekzZzKahQYoRCF6Emd+pcZxH4dAC0LZI2y1+DMaLzLG7TKEAzR7jPbbVV2BlrqRUqhOf0hDGw2G21AO2A1k1KKIq9ZqXaaBbHsnjovaxbllILRgzUn2Z4z5K2nTzUDz5qFNOfEgtU+eBKHVCSmQ1jPMwzWO0+ctFxUfdBJPVBPEWnOvAVqTgnSXUB2vrmdA2P9+J8HexKQnd6rvYTNTukAACAASURBVO48WOwDZRuDWlUYJHiK6+mMCgq50+xlT3c8v36Hk9ojbJyNguSVPuRVREaEiMA3R1Oc8dgcIEN0a6nEMbb+UuZA9mM0TROlFA6Hg4BBSCG+945SVjVIy75ZkKCSYtqwVscwSIF+tABaHdw++2CH3ffJPDQHB8hLOnF8VmqyBe+lUYdajzAFC6ym0nvPNE7Mxx0P968peWGz2WgQK9fmqTy/ecZmGpjGidvXTzw8PLUejtvtlmfPngFO+7mttKTdcWZ3nLncbrm8uBQAT5/DkjJ5d2C72UimpEhd+hDWgFmeiwAsu90O7xzjxYTzjiGMzPOs1N/MMKz0p1qrUD29BfySjZL6QtlbZK4FhjgQwkgucDwe1z513rX54oPDV6HUadVZ28/NFvS9A1vQZoY2BEqMeB/XjBmoyIPQ1ecq1ziOI8EsVLfmWj1OpwRp5822z3LWxqRALdqOIaqYRyngEGfeO7wqUZYs/cSEZunAFPH0Po2BIXM4ijpkreRsYi3pZC0LuOhlb+4c51prVwNImyvFVekRVrLQDtUGSma+aJCj41tUFKPVkwM+8Or+jsPhKEGGV7ENJ/WnV9stl89vhPGDqFkHP3JMhcenA/PyyGazYRgHtpsr5rSwLImn+wfiEIkxkDcbEfOSzVNVr9XW+0CpImYidloFhrwE8+g+hNk8lIKKaR1I3W2pSHbTbKHTwL9kEfkKCtxSIdNsTvBhnRfZMpwaHHkTP3KtDMLsidiZ0Fpf5Fokc6jAf6gitFVzBh+bTxwMLDN/s0r2DbVtjtNssPRAUxaA2vZpmgRwKQkDCe01vb/Ra100O9cxPr6L5tiAMl1DZt97QLB/z/le1Ntk+97fE46Tz39bBvRtx29N0HaCip5tQHac/6YfrO9yAr7rOPn8evrZ8r43B9t736gE/We/9SF3Ad53PcCT+3QGMK3G3BxG+1mybYmn3Wt2x3tCHBg2kyoRav1WiMQgBbckoS4Nzolz7bXlcq4rv13ROuehlIXjMeE8Qh/0gaoNV+1uHYVI4WhV91VoFRXHHCIuWD2PA+9I1eEYiWGilEouMzkfGAYvwiQlULIjuMy3w2c8ptf87N//S+GVa2Yz5yRobNWC2XjJMYtyn3DrPbv9kYojxolhkq9puuK93/kRZZpYfCH6BTc5rq63XHzwjO1lZN49cP/0yLEM7JYJ4oZPfu8H/J2Pfo/dL7/g8fWO95+N7O9e88Xf/px/98//jD//P/8Fx7vX/M4Pf8B/94d/Hxc933z9FVs/8oNPPubTTz9l3G64ee9dvnj5LX/xlz+B9MhI5GrwXHnH45dfc/vF5/z0Fz/j69tv8WMkTJ6ajvyd3/sh//3/8D/y13/17/j1y8/5exE+yo88GwPTx5fUuVK+2XEY7pjfe8bw6afE9z5k8+03xPnAsxS5fIQfXl7xMt7y+PjE0/6Ac5UxRkpJ6qzAsN0Qpw3b7RUXU4AAu0PixavXzMsiToLKCFd1HAtdE+WGwnXIUgdWpCXp69+sY4LVgZ9nQUOPx+MJtauh4G9xSN+2ns9tQm9MjXo0jqPUhWiNTHNsq6LiHULWf2GrUf9tggi2xu38/Xn7Ooz++s43gDWgPd1cegpIT+8z5yj49fPPUcSTwG85zcg0m+Lcyfv7gMpoWxaAWQbnlDKST6hzfbBwCrTVk3Gw157U1nSbqQRP4eQ52DiLqJ5v9TU9NbKfB/3PtnH34i5GI7QAZM36rYqIIpawZuaiNoS2o5R+PE/pMynlE2fcOURQI3j6OsvTOaz7jArW2Hltzdh8sjrDaCpyXWDcj6tcx1qT1//dMp098NCefVd3+bYgPmnmNpfCECP7w1p7enV1dRJEe++5uLjg448nrp7NvHhxy/39PctxblTPrTq8JUmrmmkciVV67T08PpJS5vnNDWGI7TkdDgceHh9xBTbjyDgNUndLEYGyKtSqWj2b7ZbD8UjcRMZhlGBznnl8fCSlXbtWA1ar1jKltBDiQCpZxFxK0Wy8Z5mT9mMa2nOTzLaIKjknDBr5XBVfqpDSrgUmp4DFah8ki5dbnRZA1D1X5oGK/GRRQwzjSM6FpD3GbJ2ac1wMPHF+pcCaje1aRXi3AkMxiNpxLqXRIX2V/lsuiKLpOodrYyGAKFd6dfJbtXOlsRMkOyXUP+vLZbTvlBacqyJ0g9gWq2Xta3HbPLcAw/k3MsQWSDsnffPGcWITR/CBvIggSUkZPwxcXF4yTJOwFrzUUoUhMI6By4st71xfMwyiEjrEkWHYsjvMpPqC+SFx9/gA1UnwNgTGOOAcpHnmeCjs9ztijEzTyOAD4zDi44DUwVWq0jdLtrVcWI4HUcp2xjCo+OBFh8DE33SPpjhy0eb1MbaykpoE5Cg56fpIjH7EsBXp1UgL2qx2Lpg+QS4679e6WWnqrW9XgAkNOoqKHgUXVWFR5lZOM2McAakPdQ1EWueQc6LrsOTEnBZ97jCMI/UUk2u2RYSbqwJr695ltvLc/tnP/V4A635pdsCOvtm4Acg9s6ZnzzUwDbp1fQ50riJFBsrZ/X9X7NMfvzVBG7zp8Lxx1JXj3DsX8qfvdur6zz3/ffu5hSW1/aY7bXM88Ot7z50l2wjtWoLzbzyEczS4uxhjncniKacPPniR/Q3BUepMXSpLPjKnXZOurbUK4lgRVTMGMS7RA5kSKoyRGj1jHBncyDRcUkomzQ+kmtk97EGRs1ykmNmyMs5L77BJhIMl04KTtD8qDqAy1KVCKh4ftzg3MB+zpuUzYUjkcmQcovYycXgKjx/d8jQ98uu/nht6F1Q58ng4YJLVhEuOiyAf+8O+nVs2aEfVTIkjcjiIIhjliK8z21gYQyWURF6O5GWWwDDesKvvs715j9//x3/E//q//M+88+yaUo/kfOQ3n/8Nf/PTf8O//9f/ghfffsU/+of/mP/qH/xd/sk/+6e8vrvj3/64UPaJ3QG+/OqB4p7YvDzyty9/yd38yPT8krI/cswLv/zlL/n2i69YDjtuX36LD47L59f4q4ndvnA1XnEzTPiHRz46LvzBuOXTOMIUcZ98gNtu8D8IXA475qsIH/4Q3CXevc+XP/8JX38WeHwFr+6ecJvK/jDzeNgTvVNEPDGMA8+un/PBRx8zbLakUnFl4f7xga+/fcnr13fkXIjV25RkCEE2bCcotExlcSSMRpuNpuGMo56VU786inbY+rH+RbYh21dDUbv3rIFJH6h1q1gzMEYPlMyFZIEtYDOnWhpIa48//ZL3SYV8RR2B9tny4VLnGajetTqKvujfgrrGTWrfT+kTvQ2z+62Io3ee0TCnyhwQp9dhtQ1Su6MF8E7UaIOn1ciGGDrGAM2JUutJyUWU5YJvkvd9MGDfLWBwTuui/FqDKNlMybaG0Aeza2DVB4TNwdNN0ByzPrgz52sdL5Euq9q7K/ouO9JnFura68yoVOaU2Pj3GUFr9Hw8HlWASZyPPmNrzmHblL2jp7ecg3HmiLb55gVco5bWeiBGo/VLbWkLqi04ysJJLrU2aqOc27fMxbws7V5tjfSObRNYkQV5EoT1wYptebUigJntPZ0AitEp7ZnnnETNzosAwHGe4VEESnyQ+81ZwJ5pmoCBcZy4uX7G3d0dj08PPD7cc3Nzw+XlJSEELi8vGTYDuWRG7a24zAu3r16x2e3YamPuqG0Ccsoc5pnDcsA7GKfAOEogRZW9IY4DLnge7x8Yhsi4kbq0OETGceBwOLA/7MWOBaHIW7uGJS8SfHnYH8VOBZxkKgqEkHFR71Pn6DwfNYAwEFZsgPNvIv4rzU/ntx4xaL/QIu1RxBZI6wXvo9gC79leXoD3jEUUYNOStN5NWss453AhtKy1BU72HAcvWUeca2rcZlJ9COBXQQyc9OxywRGoKm6VJDhzpdmXnLMoOlrZR139oxBHXBxw9AIlEpgNQyRr7ePT0yM11WYHe2DIHPHmkEdPqbmpj9saEMEy3+ioh8MRHwam7QVTHBjGCTeI3d9eXDAfZ5a0EKNn3IytfYn3IqsvIk8CHI3Rc/38mnE7sT/O3N8/cXv7msNux3Ffdc6PhCD94HJOPOzuuaeyHSeGMOietGHaXCjYEEnLzHH/RBwmhs0INUvQpnO95IKPWr4SZMCLScnnTM6OWhMxeIZxZKkzoUZhSlGpTu2PPmVXTSFZ9r4ekKuVJlgijAyzCX7tT1mLshyF+k0RCfxkmSO1c2i2tyg40IK2xpoQYJgiSrmLCi5FVTK1DJ9zWmvZ+uOtAZh9me9gttdA0nMgr2cb9O05zjNob4sf3m5DTwFsO/9JXKD7nscSoeYbNAv+xnn647cqaPsvPd42eOco838uWj15rzqe9i85zmhL9iCUZ27nOEeJe6SVerqR9+85v17dX97MJqpDLMWlgeil15kLkqkKuhk1R7EU9aYDKWt/tVyIUyROgeQrD4c9D2nPRdwyXo1cXF9Q2LN/uuf17a30kFnyWs8WND1eC57Exsk5jMKEFsgnLbI2+lgmspSA9yMxjKqWlglxYUk7gndaK1Ioeab8cOawOXL8/IE4jMzzogZdjOrN8+f4EHh4eMHucFRhDDVEVYpzXRX6msNT3cDit/K88hFXZp6C3APqxMQQJcDcPRDiRC6Vp/0DdXAQCuPNwO3j1/zqV3/Jf/zpn/HNF7/gnZt3KSHx9/7w99nebPh3P/tzHvMjw2bk4uMbXNyyOyxs33/GR5e/w+XjxKeffMKrb17w4qtveL17YvPuhuIdh8OefJzZWM3GPvHVLz/ji199wesv/5p/FgIfbC5wIVLCBvfup9QPPsGVD8HtieUB57bU+ycev/TsHm746S//lp9+4/mywEPwHOc9X3/zG26eXfLpxx/ywbvPMXll50RprSJOxNPhyN3jA/ujiEiIk6rGOwRSziy1EFgDHqFJ9hlqKQa3Da93/Hsja0io9F5bMwpvC9bO17oFZ/LvdX2aAbagB6NdqRCBOUjnGRyTHK5O1tW5jbEecDImUnRuiphmFwzYEUfetQxUyqWtid6g2/Wa3TAxH+fX19Ramxx+D2YJWu0pWTbCoLRgijh2UtwfcEUof9WkZc3ErXGkgCy6higwxOHEPoUQOB6PJ8ilOVnyGnvO/aZo17lmfFpNiwUcXZAFfXZurRU8D26r1pD112fjcp5xO0dQTzN5K+3PsjZrzZ30rzJHsxeIkkJ4rxus2OLSgsNV2e7c3ptQQR9ESU8vczZWIKJoDydn4+PkmczzLLRvAwtkx++ewypaYwpk9vy81xoWVrAkd1lGzR9J8Gy1Inr0jox97qB96VCE24ehOV7748Juf2QYJRgOIYoCrV73djvJ18WG168nXr16xeu7Vzw83vPs2TNKzQyLqAWHKBnxaZqksff+QEqJ/eHAMAxsxolxmpiPRwkYqSzFcVxm4jAIHJgBhIXi8DzcP+CeYHu5RVqyRHzcstvt2e2OlOOMd1Epe7p2PaRa2M8H5nlm9IHBD8TqKS6TZ8uoSmbAB6eU7tVJtOdkju+bAJbYFjuGuDp88zxznOXzlmVhGD0YFdGJKIlD5qcfHMVLs/KqtWxFe4RVJ60u+rWC12vzrs05A00T4IL0S01lVZStiOKhYEYqp25zxuyUJVGcraOs9izjqmYmaw90r2umZdJqaQwM76XG0ZxeOyoVF6QCr/UE854Y3QkTYlmkZnCZF9LywFOFWgXg8vp8XPDUVFhywicJTqhIm6CwUjznlJjvXjNMMv8uLjZsthdcXV3xeP/A08Mj+/2ew25HyouIp03iB8VBesrNJXE4HKE+MIwb4jgxbjf46NndX+CHyLRsiWFgHAreR0RPpJKS1N5576lkrYOT2sPqPYfjnt3uiceHe5xqO0rmdZAevbkhdvJsUiU7a1i+lsrA6s/Z0Wwvlij7T9y9ya9kWZ7n9TnTvdfM3nvu/tw9wmPKyMjMysoiq1CjgqpuEEJig0Ag9mxg1YINOwRsQELiD2CBkHqF2FKCJave0Ah1092lIlVVVFVmZEbG5EO4v8memd3hDCx+59x7zMIzs9hVcUNP4Tbd8Zzf+Q3f3/ebg7OQSCoIW3mEpALBQ9SZd0BLbxoqzRVokKBRVeNEpQo9x/Em40p8wjLeSj93SXgcydikY4RHncyq1+HToOw0sVX2U7cVnFbRflVcclqEWmKA+rrKNfPWimK9/a0M2k630wDo/2vgVvkv81ZnguuHHDluHDwN2qDq4Yjprd/91rFP40WWBbiIYBojooMKBVFn+vyObr2aBVhb10qPVgJiYvSai4eX/Oi3f8C/9q//Ie+8/w6TTvzT//tP+KP/+X9h+2aLMoZu1XG2Nmx3L7m5eU4UZlvM3GgrmZiI6OM16tgxiUk0qfL8ZSZzsA7VtKgw8nD9mCfvvsvd7RU3t1saZ3FNw7N33qN1K54//4JrXrPenPPJ937A5vycw2Hg1Tdv2O72rFYbHj5+zOPHT7k/bLm9u+bzz35OjCNWUlCCqVcKFXOGNWm0FlFnjUfhIUrFL2mVF/eAsQ22sZjoWbcNP/rx7/Dq9oZWJwar+H/+4i/59Gc/52d/9QvSXvHwvU9wj7/H5zeR/+tn/4Q3r74kjQd+8NFjLh8Ydrtrpu1rtuqaNmjOe0/ghheffsahH3j65Cn9MPH65Tfc3O3Q3hNevGRaK169uWLaDhgC9/df0n34HvbC440n2I7WrfD2MSMfY/xIpw9w95Lxy5+yWf+Im8unfO1f8EoP3NPireW9j5/wyYfvs2oMZ63FD4OIhRpNUoLp9zExjCPXt3e8vrph3+8Jccp6LpIVM0ZkGIbDSIq5Py5nzpZ5VwIqnbNqfq5olIpHYYbs+/6oElGIR07hkzXsazG0i/NTB2AlozY7FwmMaTCZLrg24LVh1loIeHxYsmFvM/zzFE3Hc6DeV3ld2wVVHbOc72lWrwRtZamq7Um5thrSF0KcHTXp0zvuxSuJTKlMHZ/naRBzDHlcqjFvg2Y2TTNXVFWRz9DlYBmSF+S5GqvxPhwtRPUzTUl6E0pwehzIlWebmRlTCaqWfdWO72mQXwSby7gSljc7H6Pcf3GEF6KC2SecpUFilSAoGTvpKalt+mkAWiqHZTwZVSCxC3SnXLc4ITlgJRc0VO4ZBLSGkHuSDdJzE4joEI+up4aq1hVrrTWtc1jriDmRQjUG5+orYhuJzOyStdNS75889iafX+eeWdGiiuyHHYdhYr3eSEXHCjw0ZH289XpF0zi6ruXq6ortdsv19TX7w56m6+i6Fau1CHp3bcuqW2ONQCDHYWCaPNMkME2tFN1qJQ5xnBjGgcN9j9ZSwYpBzrUzhrZpOQw7rl+/wbY2ywLIM2raNhOLpAwdDXRNh1cepTW2aXK/tyUFgcAatOia6qwNm8hVxeU+lbkj8Fhxuxb5ioX5VXqZSuUt5n50ne+fVN5CCIyHw/y9or1GTPhBYLMkMiFRsXfHpE91smgO4LRGmWUsxxAkWRUkqI+Is520wo8DeOnRS/n8E2k+11Qc+bRcf7nHSx9bkVwpREqLE9620mfcXLTzWlEqbOM4ztc+30OkEkQ+dvm8zNmkE0Y72qaTqmCMDP1IP4wc+p4UwTWGtnWsztbEKARawbP055lE0zpcYyFGhrFn6gP9eMCHiHUr1qsz2taxWXdMw8iUIbiHfsT7SExBvmuF5dEqm6ujE+M0sj/cM449LvtHw2GPtQ2N61BKpJRSUigMxighWNMyd2MYSdEzxkR/OJBSkMpplJQMOTFLEKmMOmCJKQk1f672k5b+Vm0dxqiZVViSaoE5QWsKNFWO4rRoAxf/YEoQlfTmeR8xkpGStRu1JAxnR3wx8DKujm2rygmoYo9C9Hg/ZX/AzAnOkiieffe6IsZS8aqh5zXhVo38qZPC9bnVW0opC9Avr8vfKTKwnhPL9/IakI73e7r9rQza3laqrF//Sgjib9jq4ExVBq82bKLdcRwgfisQSylnvtS39ls7jXCUjDp6r77OukLgXAtasmLGOIxpsa4sOg6trZRdjabpOh5fPuHZs/d4+vRdmrbhxTcv6MeRT77/fb7kS+LO061WPHpnw6vXlpRGEXeMipREey1mxw+ToQkxiROmpaoVU2Y3ylS7iUTXtLhNh1k5wqRo1o7vfO9jwvQeP/vZX7C7vycmGCfD+flD3nvfMm4m3uyv+fLFFY9GxfnFIy4ed/ThDd/c3PL69iWXdxM/+hc+4fs/+j5RRT77+U8JfkRrwXibbMx0lMnuZ7hE7suC3GTvwDpIlqAsJE0cPe8/uOS9D77Lly+v+M53PuCr13f8+V99zhdfXHPzauLD9Uc8PXvG5eYhV89fsb+743BzTxr3/GL7KZ/HT1mtNjx59xkfXn7E/dULnl9/wS8+/ytu9jtst+Iu3DH0hrvplr3fYrzHpMjNzcDudos5ePAjVgWUS2yHLebijMasoG9xE2j9NeouwNUN4fozDuMrds0P+Ud/9gv+9IvXEOF92/D43Y95/Mn7dI2jv78hZrHSiGQRRUPFM02w3++4vb/jfrcT59AYoSuGXM1RrDfnTNHT76JUDvRsdvNYratgSyNwGffe+5kqvIb6FbKN03F/WpERZjIzVzXeFrjVr+vqTVngi3GtbUZ2jedqRx3k1BU54CgIq4O78vp0kSjBY93rB0sFo9gI+S2gFie8OP+l16YO9FIUR0RnaJrsWyA8RzCXEBaYVJX5q//mIKJy0uteknI9Ne4/xQi6WK9EcVKFdrmwm4m9TBz39p5CGo9t6OLAFXhl/V2tq6A4LBIJp7a4vl74tk2vF/V6sZb3ZFSUY2mjxDnVWmxFhrUUUdrlXJfjnkJ2Sia5HjfLPUEy2nlgaWNyH4ls4zQRYiZUUEtFJBKFZKhAbvQxlPVonUySjQ6lkpc/01p6tRMJHyNqmiRJo48b9Ms+62SFOMwFrpsrG0qqvX2fGAZPjHucs5gzk9eocr8SzlkeP37ExcU5NzfXvHr1DYf9jt3hgGt2nA1nbDYbwmZD17QYY2ibhib3WE7TxP39PdoYXGNptTiYtnES9AYg25c0ChJkvepwrmG337E73LPKiU/nWt5//10uLh4SQqLf/zP2hx0q9109eHDO97/3PTbrDSZpbl9dcfXqTWX78r1hgT0uTnGGCafCBCkOsgRPS3Ko+BZKlX5gSYpNfgL0jHYYhoGY0ixX0hpJSvkMmTQ5II95vkaf5VVizH1GcT7XErynGFCxbrWQuZ2CBKwqi9PP1VukwmeUBHIqiUNtSlJNRLvm/RWdT3IyuFThQ8iwOpYqeTm+T+aIeGSappmoapom0fSzFjIUV3GayJH9yvWGbBc1VitU12Ftw+QD+/2B69sb2tax3rQ4Z9FGYbEYRANuip5xnAgzAVdOpgRJHhx6qeQZpXHKsF6v0asNq25NP46Mk+duv2MYB/p+RAFN4ySZkgWwEwliYHd3QwJcu6JtV2jlhCQrCU2+taIXaKzGGNESTEyEMBAnYQrXRrE52xCmQfrZYpZgUXFOECi1JJAsojUo2mRik2IQKGJJGtY2NiUhaSldEoo8vrQQkCjUkWaZJAbUbAsXRNryvJnJ8lIxWXMMM9tVXVo2sn2f1wx5r15XF3v+bV9cjnHMoVEHsqcJzl+VoH2b/a8rcsUen54DsQS+2TzPher/HwRt80XkhbLOGJ3ewHlL5fssd6P6ncoYYRBDU0Qcy+Ke0tI/UjsRVE7dqQNy5CQoBUVvRNKYRw5bee/oOlN5qOV08wDO56isEbYmpbLYYmZBQrJkc2uoynAtlbi9ueZP/tk/5y9/9lfs+x0vv3mJXXesLy9zZUoTtfTRhDBJBqUMOpX1U2KQ6pkGpQzBp9lpEayxGGCDQAlc6zh7dI5tDHfbO/a7gcPdPf6w5/0PPuTx46e8eXPD/e0d9zcH+vue9z96l/WDc17F19xe33F/d+Cj7zq+88n3OEyeV1dv8H7k8PWes3PD9z75u3z3O9/h689/wTANxMwCFZEekJBhlcaW7Kfg3WMKko2LyL+VQamIVoFNq/jD3/4+v/3hx3x+f489e8gvfvmP+flnn/LZzz4n3ijOfvgRm2dPaVdAGuh3d9ioSdFhTcs7T5/y5Mk7PLh8xDhMvHj1hm9ub1FW8f6H79Odbfjg44847HbsD1vud1u2tweuv3mDD5bDDlbRoHyk1aBUYKcnLpwmHALhp9+QrgLb/o7rL69Rh0A/XPGTz/+Uf/rTf8jL3nLx8Bk/+uQj3l2/S7IbrkJPGAda64QhzJjMHmcYY6QfJ3b7npvtljdXV+y2W9KUxa5DRFtL2zpQ0HUNq65lOOxB5WAsMQtum7xQF+hEqAIcP030w8A0Lmx00q8yzk5kYZo8ElWdEx0qV53FeSiGH1X6RpZqW8n8lp6GGEXEVUGWi5DK2tyDlqtUBSJU7E7pm0kx92vkv5Js0SZXtvNcWHrgZNNa54WK2QkSNud6zjJTJwskzBK8x+dM9hELbXEAy/3JcO05mFLSJ+KsVLlijPOqqqj6ClNdNSqwkwwTnZ3qpYKSMtNYXXkRx43c/L2QcogznvW1QqAEwzoL7lojwX5ZXP0kVN0zi1cM2QgmCqFDMeMC4bPZ8ReoV3lW9VpQZzhLxt1nh6TWqoNFp6uGzaJVZs3NdjtXwErgQ3aAU0L0kLTGT1LRK/pxunJWYkj4VIJuqUgs2oVmXpukuiiApMQCm4sxipbmvPiLjfNeMvdFEkGCOegPPdZKgCMaTNJ/NpX7omR8GCvOolQOhQ1QK2F8TVpJAq8K2kWuxeK9MPZNMaBYxsw8zhN03UqcKC9O7TAMRO8ze+bi/ANYa3j8+DFnZ2e8ubri+vqOfd/z+vCa++09F+cXdF3LxcUFdAJrtE4Eso1PTMHTD55pCjSNpWktXdsQMmOnUULu0jphnEva8ODRIw6HPcM4yLqhAtv7O6y1PDh/wL/8cbiT4AAAIABJREFUL/6YD589YQwDm4tznr77hIeXlwLp7yfuvrmSHjfvcV2bM/US4M9Mo3phU4wxEr0nxuxjxDhX3eYEloZx8pKMyrO2JLSkEq9AGZxrJInjA+MwkrK2W4wRExUhj7uZqEhnNutSlY1SRdNGxkAorQ0loVSSKvm1aOZJslgnxH8yS++9VmIjopLPTBLSkkAiBI+ZBqbxQD+1aJVQWoL3mIQZUJxcCRpIKff1iqabs44plPmh6VYb6Tn0QRikfYSYsG5JhpUetjJ/i5+2JJyCaELmIK5pLU9Wj5n8yDSKoPs0jjTKsenWtKsOlxMpfhzxKRBi6amSOW90I4G20mAUIYxS2W8cK2dZK1hfrPEh0B9G9vd7xnFgDONs4LquE7IOn/B+Ik6JYTeRkspQeOmdLCyORV4JldA6oJT0qmmlMxGMEJCE6FHEbBOkipeyQStrqNieXKWa3V0lWm2ZGVSIMySQkwR4wugSbII2DTZLOwUfCAhZTlISWFptUTFhxwlrFcNQJSHzukqMOcCX1pfSH5/yOpsor7OPrksSLBBjlgXKY3tJrAkTcZoXEk3MDLEp5WQlNZJD+veMkeqX9AlmO5vX9hSPA9LC0FqIfhSL/xO8nxMeZZPeeZZEXRGk/9satNVRKXy7lHiUJT+JePO7S5xVonElRsJoc5QFOz7OMbvdkjHOD21215bttDdjjshN3YshJeejc6wekC5OgSqOn0yEOdNQmoF1zJovHsnIj/hxxDYNUWmGKM6TsZaAI+wmbpIirhyNa3iwOefiwSX+fMPO3TCmLckqphg4DBMxKRHoZinFJ80CTYpR3lcKT5QytQIdE3EaQSVW6w0Bz+3VLf4woIPC6Ak1enRMPH76lJcPXrLf3mF1II5bnHuHh5cXxKsRhyyucThw/eYlbZtwTYDYQ0q8+PwX/OVPzum6BqcMWEecynPWqKwDo5QWNLdSMGf9ypDPYuTJM2O3tOEPfvRdvv/4nJf9nm92W37+87/g+cvPef36FR+cfcCjD97BvHeGOm+4u7nFK0XSjnbdcn5xhjeRbX/Dhe749Od/zqd/+SkpBIwzGNWwdh3sBjam4/LiCbu7gTfXe4YxYsdAgwUbGYeJS6s5P2tRFw573jBNB778yU/4cgd/9vyGV69fo4hszlc8eucx/8ofvMvDZ++jz9bcDSPXtzsO9z2NMiQVc7CvSMoTjWYKiX6a2O0P7HYHDv2Bu9s7/DBiQkKnhFGaRw8e8eDRA755/YpxGlivVxz2I30/SWebIvf1aARFUSyR0KyrBNM4zBpEZY6ESRbeMl8ipRduYaCc4XnoTMYjTojkrQtVcQng8sJjsgxGSqSCz5f09lFiJSohSCgw0UTEh4TVItyeQGja1XHFTin5zBiXx1rKCY1FbHaBZ8p+k4oCZ8u0xCXACT4vOtYQVa5w+UQKAau1kL8ESTi4phV21hCYYswU3czOQ4FToxI+elLud1NGmrm1zv1ovhLTxRCyVs4cyMW4BKtKtJKiyvYssxq6TOxRMtdGZ/0g06CIc8InBqlauUwpDszkG6SiZZbt4iy+rWbyAe+n2VbPwqkRpnweJbN6asdrGudvQ/qOIZ91JViqVxCD2GtrLCqTL5QborNlDsmjtZvHJNHmHttCEiNBtM79feM05uy0PuopAYXRau4XE1bdkNEOSZx/BVoZopeA3boGqxVjGOjDiFI6kz4YrBV2Oa0gRY8mogkEJURN2lq0kqRFYWIMWf9MRUhK1g9tNFMYBZafJLAwjVR1Sz9dDAqnhZUowgyVA2ZpghSCsM76yN1uh9aarmvputXRM1BKmPfeefqU9fqc27t7rq6uuN/uGIeJ9XpNjHBohyzVIUyS1rVo0+GDiEqPo2ccBxqraVuDsRq3EgjlOCV8FLuklaZdbWhWa4qW436/Z7vd0hjDRllSf8CoCHvN9mXg9tUrUIZhGHn95paYArZtKJ6X9yLGrUyV/DEGrSXgsLoheukXc9ZhrDjPWucgiEwAlgIqiZ0RJQqBVaYsRWSUzrA1wziNEnhmwiejxWGHQm9fkSKkTLCj1HyOJblRRIF1hlqm6En4XDHxc6IpKoXPvWMqO9AGzeSnuX/XkuVDnBYnnYjRiWgjYRohk6osflOe3wgpT4yJ4Cd8SIzTMVpD7rMkdIw2hCQSFuM0cVy5OK7c1dXQGGP2s2JO4MgcbpzAUvu+x/eBw9gT/cToh1nnMilh11baMvqRkAR67b2HJGyTAhlWGHweGUECK6PYtA0P1uf4i0cc9nvRdN3v8cFzv91xOEi/ptEG23Ziz7PkC0hSTmwdMseLb0kAlZNYlLWuMKJKUmClV3S2jKUKlj/PQWbxePncoE0khZxMSyETiGRpn7ymoSWYImpCEj87kBMVKWKNRc40kHTCOiFyuU/yHGJ+NkkXtEZAxUhjVBaWz7Y5hsyELNQNWhd4bUZylEDP5LUxZHbQvJ6LH59dhpzMlQqvqVBCMs4lYJNKsI5mns+yFiS0rnvf0wzx1WpJcJJ9Fh9KInIp2KRkloQpdW/eSTXnZPsbG7T9qu24tPn2TYyJbKfwphruU2dg6pJvvZ/63zHGLFD46+GZ9evjyPo4SPzWd3IWo2xy7guTnC3OLAKN1EoW/0iibaXf7XDoWW3WXF4+5n4cuR0nlDas2wZ7tubRw0esu45rp2hagRUoDWhDUppQBY3FiBYqXZmGWdeElCsOksGzWjNNPav1mrPNBfsw4QNEJdnrdn1Ou9owjIG7/YEpKaJ2rM8ecP7gIa9ev2Z3vQMlzarvPHmKbhzX128Yo88sjxFNwk+er756zvn5hpQUYquVCEuisr6NOIU6ZWdMKVKpAqlc+UhIplZbolqjzt9j9b2Pef3yF3x343j1zU95/otXfP3ZgV2/4off/ZDvP3nIk8sn3KtEbyfOOs9+vMMouL65oW00v/07n6BNYLu/4smjFh0i97std69+SX/3nP72BQEYBo/1Exedo/c9a3WPSnDoPbvhHp8iuwN8fb3n6uZTbu73vLwbeJ3OMB/8Hr/3e7/Ls3cec3n5gMeXD1FGMcbEq9s77l+8xGhN4wwpeKJkCCCJcz2NgWGa2G73bLc7drsD292O/W6fGeKYAyfnHJePHtEPe+62d5ydX9B1PdNUdKsWmELtgBXDddgfZqe3LAa1UPYMe8gZSpWdjhlulR0PoxbIl5p93lzhqqBbMpUkgJRKl54rGcUpt+YYEpeKITaLaGuBA842JS+CKPCTnyszteNfQyOWXhEx1D4EXNEdiwsj4mysctaziCB3TiAwfpReJWFpy1lwrTBmgTkVCPVx35EEzyV4qJ2Xut+qhiiGGFEx4pVUPdFK2GmVZBZjSjRG9LJ8VltXOZDVmZK6kEaU4FDu38L0llj0KuX4NeQkzoQdxaGo4Y+nNjZVkMvyV4Ll0stWqlUhLpT5wFxdK78rzsvkJ0ISeQxrLQqhMFe6ZH3FZlqJiKUnpYJZHvX7VlBT6SWr2ENLYi5GjBbtQCHnmTIcc4kVlVKz3IxUqP2cIZd+OZNXBrnDzpmc7Z6IRNrOoZLBj4ECqyxVktq5LYyEzrRQ+pOiBBnlmgQ+FWXtKInH4qSwBMzLvFo2Y0TMer/vCWHR3BKYX+n1ajg7MzRtx/n5Odvtlru7OyF26Hu6rps13tbrDdZFCWKtxZPwIttEP00c+h0pBqyzdE1H064g2zRjdJYaOQDpqAcqxsjExBQ8U5g4DD3xJoHWtO0K27Ss1yu6bgVZHxQWxzepREpCYCLPyUrCJirQktSIMRLGAaW09P056d8M0RCjz0LJzHO4jK15nJYKsTGktsX7cX42KZVnEyGTHBS4tbbHlP+qena1jVZaoZLKNPw5GDAijREzugckYWdSDvRUSR7JWlMAvksSLeWKRTakZS6zXKecVxnrRchYPrfWzgQvcq+PYWjzqlD5VvW/a9hwndg5hcM517DqVkLXP/QM/cDuMIgf1ggkUxuDcS1WiRRI00qPoyRxLELMlFmFK/bDaZrwMeJsw6PHD3nw6ILdbsd2u+V+e5/hy9K2EIYDSo+znS/0+WqOHxKll2zWdtRCpFQq8sX+lPFN5f/W4+C0f7skjLwI4kpZIUdLgh6IAtudvGgGaxGnl3GmBL0Vs1A4IP2l5VilclpXq8SflIBQyFX8NKEbgVVbK0mnGHKvb+4rLoRnookoLQaFTEyugTk4MhlGnFJOhuSkg/gQ+Xnlfr1UMQOKC3yM6Kl9FznOkkAspGUpMxEXVtYSuJGks0BIYASyLkiXJdnzq7a/sUFbvZAsb3LkfNQVt29NTLUsYW/b93HfW1m8mP+vcjm2rvDFWPRKmI9VZ3JOj1Ebgfr9+v/zv1U2cIlM3yo6RCazRjptBDsehD3NWCEmMVoTYmQ49LNzerbesNmsGWJgdbGhVYaNadjGkc++/CUPmhXxk3dksMZMFYsB40ilnJyCkJGUyjlJ2MdiRKuUK1mSPVRoVNK0rmO1WvPOO+9xiJ7bu32GXBi69RnP3v+Q/TTy1fNX3N7tiMqwuXjIO88+4Pn1C25vt+hLMYarzZrN+Tkvb67YXd/gjGacvEAEXMd+P7Df7emHXHHEgtYiLpkXDGVUrgwuhq3USUv1TeCdAI6Pf+t3ebNZcf/6K7730VP++Kc/4erzN+yeexr3gKcfPuOjy0dcdOf0+wM2OJzOYqYqsVq3hDjw2eefilQAI7rpIFhaLVChmCK7USiCd7vIdjdwvw3s+8TgB3ZjYIxr2uYT7toV6vqcz13Le49XPPnuYz75nd/m7338Pc4v3+H87AydJnTy6BTw48j1zS3+9RXhMGCDwG8mC+PoCTEyThPDMHB3v6XvB3a7A30/cOgH9vs9w8waWZghA/f390zTxNnmjH4YRKS2adC6nwOGei4WR7VU1nzVu1b3Zp3OAckILw77DFnWAJXBQ4ysUQv+vG4mnut8J4maWrC7TtzUr2PS8znWlb46I2ytlX6yk2p93QNWFkGllMBF1EKSUjS4UjE0s44dQIYt5eP6aeljC0mCNV012Rc7s1SjloC57COEwBSWZ1C+WwfNdRZba31kOd+WbNJaguIQ4ywBUFe06gUtRmGASzGJJlOSe1MyupKxXaj5U3X0t/UYzNcIc7P86TM9tckCxTpeaE+d1Xkcw9x3NC86SLIjpEkqHVmWYfLH1PrlWRRSlXprnFua+3NiotDohzxO68RDWX+WTGzVI5Wp0jHHMMwQPT4mTHbYBIorSP2yvzJP6ntZ9wzJmhJQU8yiyVR6YwITNjqjLXyUylwF/annVXldzm9OnOQxMU0TbdvO87dAVsUplqD5/PyMJ08uubvb8ubNa1KK7Hb39P2Bw2HPenNOs1pL1SV4tEo0zmKtI0RLmCZCDPSj59DfzVVwpRq6boV1lqHvZzSJMQZnRAh41Tg2ObE5TZ7dYZ8pzgNKuUy5rjIJkCKmktXPY1IrCZBzj49SOgfUOguAp2wrB3Qq1VIZo8GXwFp6j8o6XATBJWCRCoc8v4XePvhlLNbzo9jQYoPLXK7tcRkTzkqgWeag956Y4choJRWXvK+YFDqpGUZ+5HspOceUyXbatkMh+y69aSHDbEuCpe4NTtnpjVGqqDJ/MsqChJ9GQvRYV3rl0rzfI8gzS0BekpG1bat7iFV2nl3XYZuGppvYH3r6oaffH2iaJsNzpYc+xJjhiJ6QQFm1AHi09BhKel2YmLXWDNMIWrHZbNicn3F+ccGbqyv2+31muhwY+z7T8Rdbl9lzddb50oKsEHZQg8sC9fJZueZ6ndBS0S8BgjqGvJeg8ijplH1pDRKY5z5iZxuBuac061pqHbINj7Nc1GzX8w3RyqLVwpqbYsztNlIYUGrpRZx9iiSskiqzOxuTobQImqRoyGkliLia+Eyx9OrJmMrwb3JCg4QKMjYlaMvJp1ytLpa0ZqkuY+nUps1rev6L+dwJcoGzv5kSVqvcjygVvwKl/A0x29/coA2Whb/8uw6LTheIb/1VZuNtAdVR0Fd0f3JkLfsupeWSaZff6uJAvmVfp9sySN5e9pwDuOwYyPkcG1etxDgV5kilskCrkgyozw3q5TfWWfr9gW/8K/ZhEurfqFgZ0K2wKK1XHbiGg9bc9z3j7sB4btGuRbs2Z/lsHryVU4QwCepZLlPl6lduDk0KZzuevfshfQp89vlXAgMKQve8OTsj9T37ly+ZvBeqXa3puo71ZsMYZCFCGfp+4OHlJeu2w48jYRLGw7ZxXJydoVHc3N0RvZcm3AxZywV4mcBkZkOJMHNrT3bJtIWk8EEWiPP1hu99/B3+7OVz3jcD6osbfvbpn7J9fou5Hbj86BHqTBOVpr+55XDzirC7h3GPi/eo4Jmmgf3hljg6TANvvnnOOGimSXM49PgQ2O97IoqEoWlWWNdx/vABDzfPWL//B3SXz1hdfMz5gw94fH7JR5dnfPTY8GClsQ2cPXCsG01LcckcmgBB4E2TD0zDgFWKRiv6KRC8536343AQGuD9QcS2h0wGEnxkmCYOfS+C2jmTpHLG8/b2llcvX/Hw8iHnZxeE4Om6Fcbs5oDi1EmbHfWTpMdpRq+GFmtjZijPkcNd0op6qUjXx6op8pVSR3ZiDpL04hDMcyzPa52F5AW2tvRyFeHtOgBbsunL4j8ndLLjXgdG0lsxoq1UUorjbDJcQ+VzEFIPKJljZywpRHzwM9lAikGgORmaWIK7cg51MFygY3NlEnX0jOrzrJ9RygvHXLNJmUUuJRFaVYWQxIMWyOopucYs/FwFliml7OhYvBd2PW2ESbT0+wiJgZqPV5woYA6Cyn13zkkv2Xw+yzise4xPbW0513qMFShmed8YSwHTTOM0OzdCRU3JCOQehTDvqwT6pUpbB4RKSeXyaMzMa4rO+kpLX6KM5zLe5XmU/smS1U1Ir1zSucKRq86FSbBUIpRWTD7OVeSaEKYEKfNcK1WSSsaj7oEsa1Xpk/JACoFUEfLUyYv6WOU5zInV/GynaWK/35OS9PSs12vpRTWitxZjZL1ec3F+ztMnjxnHkeubG+5ub7m9veHufkvTrWjbFucczlliauiUQCiNzvT9MRL8wlA7jiNt22QGyxUgUK4iaZGsBNnKSf9f0zQ06xXDOLLf9UzTgbbpMvtphjdFNQdSZUxndKy8TpFCwmGMzX2nIjnhkkNb6S33fsIoI4yRKfc3xTLXizB66QGNueJqcVZIycLkCVlMuejHyb3Qs32sn0mNGKhZ9UqCyRQYeb6OGFLWmcq2zi99ciGIYLdRGqPsDA+WsQDDNIrea2vmyncMojEp31nGR9/v5zlV27cyhpYE/ELmUsbxarWa16DTtaa2BXVQW9sS0c2U5K52LSvjaLoVU2awPPQDSilWqzWusTQuk4JoC5gsNQGF2VeaiwU27n2WSOp7Iokmk8k1XYbAe884NPgwAksAXgIUAOcsXdfmNSpisGjl8jFrn7QuUghcP/iQEzmpQn1I0Ftey1/u/U+JIA9fGhcRCLW1jUBuk+w3Ro+2QAkw1ZJoGkcvwWJ+FiZXp1NMpBlwkuG2WdJK1j6ZIyLBoVBKILjSYiAFDGGNVKQ44X1VZMnXVZ7vPA5jlO+XYAty9TfNvn5KxUaF/Hfsv0uf8DG7rjYW14hEic4FlaIdWAhVUr43VoHTetazLEUF0vFxTre/UUHb2wKfepPK5OJ81HCk032EEOaLPw2c6geafzXfSEFiL35hDWdRSh1V+8r+TjOLZTs+BvM51+cxX1mZJBppBkd6WcoCDsjkqK/De1IU5i+tNTbjoPvDAYaegYCPERsVPtxCY+m6Fdv9lvO7FSI/mmAYIWqcW4tWWS/9CXOWK0r2WenM1hfDPMHKXwgRjaE/jHz15XPWlw949uwZb15+Tb/dM409N1evadcbzjcd/cERg2fY33N/d0PXdZyfXzCYHa5p6IeR3f7Aw4cPeG98l5cvI04lnDY4Z7i7vcH3O6ySBcpmLHqBI4ijqrOjX8429ydqDdoQYpIeptby5NE5Tdrz+tVL3v2t9/mTn/4xt2/ecPP8De2UeP/pOYMZ+PnVN9AP3Oxf0R/27Pb3DOOBycsiPo4HfBrFsKpAs27o1i0XF2seXT5jfXbO5eOnXJw/5MnT91itzthszmm6Fe7BE4I9Z9s7olrhtGKjYdXAqgucrRSbRqybS8WxzjCbKTBNIWcDR3wIHA57rm+27A8jNze39P2BfuizQGxknEYhFIhpFriexpEYE07pHJBLn8br129ou1bgeknNDtJpRaGGCZZ/Fy2e4tDVc6YspDNsxUiVR7J7uY6rqwxX/lOIMTZG8PR10FaWpyWgKjjzUi2Q8N65Yv5U9bcs+rXNmNkVq2usr2XWwlKL8Gsd0CUvPWiFGGVeGObwQHqQhDUw62EFabA3jamuX44/+Sn3+R1X2Aodf/3efJ9jOrrXZTvKEiKkA0orVMzEJ/kz6R8UjSCfEJiXMbNDVSp35ft1IBS1iLBOvdBZr9cbxnGUe6VzFSFX7jjRtCv3twSBpfertuXlWusA7GiRTjnJpJbsfSF3KM9yHsspzoGa9x6flr44gZfJ/SnJsuIQ1VDLOoNd7n+BGM5rQHy75ILOFZSoEAruWJ5LyS6D0S47RgsiJZKkl0lpjDPEsZA6mHysZd2qIbHAkTMMzPem6LHFTCuvVL7ulLAqCzaX96vA8zSpWs+Tep0vvyvPoOjlNU2DzRptTdPM59S1LZv1mvVqxcXZGdv7e7a7e3bDnt1+i7WWtm3p2o62XWWZA4u1BmsbOmPnADnGwDCM7Pd7QJIKTesyXb8iRtHRPIyDsPw1Dmct3cqilGMcJoyW5EiR1Ug+6/rl8dToNvfHWFKS/prS/9t2HU3TkRJ0XcfoBw7DgRAmBPZlpTKdq9klGJbzk3tXqklJHdvRxrp8z0TbroY42sbNrIu1w1kncuaArRob2iws1mNVqQNwxko1KUgfj9Iml3gQ9E6AKYTc86fx0QPDMredeCMSmMU5eDytVtdrRj1mjTFS8T2BvJfKbRnjp0mb8levWbPtBkKCmI7FkVOStgodS8U4C3IbqRJpbUWfLJCDFgnmZL/5XufgxWBgHGfotrGGpm1m+xTDxDQN2bZLEs77SRL0RufqoiTwjHIoSgVrsYGl8iRJBJEJCF4CN2Fy9kfBekn6kRMPxqoZIq1Is/8bgyRzfK6yKR1FcmD0c4AZwoQx2dPMkEY5L70kRSPVuUrQVPzuJYjOEOSk8j7yM9MLzDJmv1SuY6kipsySWSM1Uio9jcIQnFKpSCqxkxmaEFUUWHxVtSwVwLLWlnXBuYamFb2+ruvQdkmQ1ZDkGCMhRRwKFav1tfIhft32NyZoWx7YMb64/ryQFJTXp787fV1X2urJWE/AIyiDWpy2RSj2mIY35cxAvRjXRqL+/9tufp0lms8VyX+kvHDFipyBlGnSlZAJGGeZJj/D/FTO+lonjuA0eVAK1zasuxUqJPxhIBFZBUU8jLzqB378wx/Rvbvh9sVrwm6g349Yt8LZjlEJzl4RMk6dzNYnWjKltK1QzIJCeRvGgc+/+Jzzw2NWa8vjh+cMVkTB99trjIWzVct0sWYaBpxJvHz+JWw6+D5sNhsuLi4YhoFf/vJz2nXD+nzNw4cPiePA1B+4ff2CcZowc7ldYAklA1UcdKVGVBOwVjIg1hmarqFpWtpuhdKGMdMGX3SR69df8OS9xzz/+ku+ev4Vd1+/5tXVNW37gBf3t1z/1TfYfkM/Wg5GY9sGbTasz9/hwcUDLi7Oeediw/n5houLcx5dPuDRQ8PmzLLq1rTtBrLWitaSFQtJWO18TNhkCJMiaAPak1Qi6oDtHGfnlnMDrZIKmE1CzqFQ+HFif9vz6uULPvvsc75+/pKr62tu7m7Zbnfs7w/s9weGvDjE7MCGkOassvdBst0hG2NrM1zFohL4ceLu5pb12VrmBpIV3+12R1WlevEHZnjk6Zw7nQ9lK7DmYgPmak3+D1WqCXbWmonV/FVqqbArJRCYmCdKfdw6218f3xh9lD2r91vmY1n8TvuXilNUz+1yDqiU4TgyT4vzKhl06QGog5AwTdJTlXuAtNa4RnpJpuo4832rA9vq3hZNI22OhUFrnaSyYDvnCMWxsxbrHPv+QMzOj9CV58x6WeXyddfPu77+8vyMWZy/afI45/PnBp8rAQXSslqthWkwnpIPLDDKI+mBKngpun91Fl4plXsd9NF5ln/3fT8/53kRzkK6wm63BGcSfIntC/PCL/sr1zqO49yHXN9n6U82c3DbOCcCxrlHp8hXhBSJHqgccel1G+dxW/antJxfTDHD6TP9dwqZVEGyjAGpxpwmLEtwUYg95s9jFAbTed0jCzMvc9xacdRPkxvAETStrnLKX5nzUrkshBRFxkPWsSlX4HZzoCLzh5xxT2w2a1brFY/CI/aHA/v9nsOh53A4cH+7xTqRBlivNrimpWkdnZO1SBx9SfQZYxnHge12izkIikUbIwQl1tE1K4zVECK7fsRPEwno2hVdu0IbTcxkMOv1WuBl+R7EXJEQzbeIXSnCNLHbHfBe1h5tDOumwwULGrw3TNNACJ5dJm6px3QJqOtEhdKWKbN0kjzOCkTOGovqOpRSDGPPeBho04qmaY4qamVsl7FqrZ0hdAs6YvGvmsYRolmqX0oSHVMMlfOfBdxTREVNnAlgpEpXYHha6wx1LEiFpVcuBpkbQQmMzGqpUqJU7nGKlN6xws5X1rT6nhU7XNhj62OX35wm92OKJG2FobKy8+XzpfqlIEVhjvTC9KiVhWRIySMBRA4urfQUhlQkIRzeR6ZpwFrp24pxgfELd4FlmgYO+zHPFxGEj0rjp4L+Egiv5tgXLvOsQEHlA5EJwTWS4Oi6WYanoBiKHbBWSMzmirFKGCUopTgnnRaYptLS11fsdDkH7wvfNPEeAAAgAElEQVRDqIyrqEJOADhCZvmElLUhJaFSbJfYXglERasuJ2J18fPqpGOaxynkNTIt8jozpJTcH5ghkoK0McRMTJLmAC4J6kDVPdjfXmvFri1jq21bIf9yzdxzr7SQ9cQYmcaJftcz9QN93zOOU44zCmHer95+Y9CmlOqA/x1o8/f/KKX0Xyul/kfg3wBu81f/o5TSnyi5kv8O+HeAfX7/j3/TcU6OeRTczI5VDlDq4KjO7J5mVk/3WX5TbymlXM2q3p9xSwtM5eRnRw/sNIA8fV3+XTt59aAqC9nSeLBMOG2EtcbHwOgn1KikHysx0/VqY4hIz4sqhsRo/D5gQiKNEe0MrbJM1mBaw7MHj9HnG4YUSbse3QfWqwsePnzMdBhI4yj3IYlBFAOuQWth32KB95jivyUpGW+3t2zHA12raHXEJBHbvb6+YfrFp+iuE+hNjNyNE2mcmBrL9HHArkFdCzxgmCaiStjOQgroJM6ESZ7GKYzphDJZaZKSa0aBdZbVekXbtTSrFW0Wam2aFudagVWh6Mee/V40yVbunPbsAnV7w+s3b3j1/AX3z69ZvfsBH/3Ov8Tj7zzho8dy3zbnz3APL3l4ecbmfEW3WtG0Ha5ZSQNqjqilcRchzUiKcYRhEDasmTo2SUN3YxXEgCKgbaTRkZWLXHSai42iMwmXFE0CHRFIWXZqXr96xedffMYXX33O1y+f8+b6DbfbLfe7nWQBh4lp9NL8Szm9zFSWeyomPzH0Y9b+sgKH8AFnDU+ePBHYUWNnKKU6EeesM6DlvdlhPQnW6vlRv6eVxsz04QtMLFUBwlsTJVVSBci0xFUlXmVGspDmqZ3Z/aU6UYhM8vgG/61jHS3YMZHU2xM0C+yj1m3J55RS1kET+ndjZBwKm23uecjfKXYoqcXG4b3Muxgx1sxQEziulJQgrD7/8n4JAJeg0X/r+9F7oSjO7zkrDuc0eelvSsxEMeU5l/6B+nnWzzemNPckkCTICIXFTqkjzamU4rfuYW3ri6OaUs6G5vMvJAXDMCyV0TI285g41Uc7DXxTykRLSmChRqk5SyrOsVS7Uj6nkJb+xLrqPI7jMv6qykAJZCRwW6CZ0Ye5KjBMw5Ig04bJZ4is03PVLaTc15aCJKkKBzeamIKM9VypE6dSiY5WFWjP4zlJZbceQ7OTmmqbwUztr+ShzknFOkCrndmjfVZzpR6X4zjO93d+Hikta0oQh1ghLKQS8ApxifcTrWpZr9b4iwf0/cDddsv2bstuv2d3v2PoB7H9rWPdtqzarMOlS+XIVHNV7rsPkX7Y0nYtKbZYp4WTxxTYp1z+YRjQKmGNwlrxFYw12bkWFMw4TUyTZ7VaSQDqHE3Tcb/dcX9/T5MDZu0Mq1VHCJphUBx2BzLGJTuxtd5TaemITL4E9tm6KYWfQg5EEIZno0TbTqu517jMkTpwmZM8WiQSTFWRihnqJbZymTPFKY8+4CcvzJIUuJjODjeEDAlMMaFZCCpkPJhFtkIt1TKVll4kYxqsNRjTzCQsJcDY98Ly6JpmHkO1La5tXh24FTtQw7mX6xICmSMYXRI/LbG0zsy9ik1HMBbvY06IlrEvlXNjpEoWxwFlDMlItctkeYMpjvhpgUXLs8mw7LhoPoYAsQ+ASENYJ4LxIgdTgs9j5swjv1TnwCYfowTwXdflXrqxSmpluZYclBEFmlgWUgECZ58UQ4oW58q5VlqVOVlMHseh0mWV880J3Cx1UdYq76fqeZh5/ZQxJdW8aQozO6azmpKcK8+a7FYX6K2QEFl80a2jBF2ZERI1QxpN9ilLckLWmmLfSmuAjPVhGOn7kcP+wGG3o+06NpsNTdvKmuWsFEDymq+UYeg9Nzf37Hc7kUOapvn5/6rtr1NpG4B/M6V0r5RywP+hlPrf8mf/WUrpj06+/28Dv5X//hD4H/L/f+32tmDq2wv/gjeG40CsHpilJFoatE+Dqfq3y3JUbSpRdGvKn+x/9j+P9nV67uXz0+PVC3jd70DKsJZcETA506e0xjiLbZtZ20iwvXHBj+fF248RpmVhbYaBiFRvdII2tqyCwRuNN5qf/+KXbE3im92Wd+waf7cjOcOq3XBx/gj8QJgGUvAzVCgpSFHhIzi1CLI6FC4hhAta4RV4I/dxjAkVEgaLa1c45xhTEhxyiigbaDaK0Bj2qx00nrPzDUZJIDoSCSrQNg6joDEak7xkeI2TypUyKG1JWnRUVBZXtW6F9xt8XDH6ltFLltxZ6SVQnNO5d3CNwTSGUXnefPUFb4YbXj+/5wef/D7/3n/8n/Ld3/8tXKdxVqGCULiOASafmNICoRh3IcMiFNErhiExxWLWZJgJGUJe9LQkCwThp3KPjsYaz8rCpjFsnGZljWRMFMQpMu56tlfXvHz5gi+++IKvXzznxasX3GxvuT/cszuIOK30q4WcsUQOiIilC8SiVLAE4lH62QQCKwmA1WrFo0eXiIZOYBh66aUMfnaU66pzPW/LOK8hjvVWO27zfElJqOHLeEtJ4I/l+1rPhr12yFW1L9TS1ybnZWdomK6PhZr1Zsj7KPpExeDXQUM5f2NNpjDmKNiBtyeMUkyE3IBujFDEx5iDgrwAFHhVnM+5OK8s+806SPNrn9DNwhY5B11x6WEpTvSUny0cB3jHjeLCHml1hoGWYKxxMpZSyJTuEuAWlEBxdso51JWAcu9STkDJ+0IBn0pwqBanKYGwVlaJq1KhOSVb8d7jckWrDoyKI1Kuu5xnea5l7NU9d8uzy/T6FBjt0oBeqrox9z8rpSAew4HL/S6wnuIUF1a/UJxWvcDktRK695kgJgZCCsLQpwQuT+67DKmMSZmfFqEfn5cTJc7kNIlmkwg4W4xpUGoheynnV+ZwDRcr484aCaJFNoUcD2hihs2RFjRL7SAaY2iaZoa/1gygJFCmcqC9VCIWaJA4iTFX+so9Dl560sZBgooCm1QqB6MRnLa4tWWzWvP08rHQ8l+9oT/0ouO23+MPPWPbYp2c02azoWkKzNvgXOnl1qQoiZHD7oDW0K5arDVZZkBo/qfBE6NU1obhwDD0ubLW5jFraZRimkIeT5DyM2xaRxo8w9ADnSTttIzt1bpl3a3Y3gkdvMCvHCGUivDSS6qNFcZTLzdCKUXyE96PJDIpjVr8nBJMD8NwVL2u55bYYqkw1RX6mJ1fQqgBNsJ+6nSVSFJzwElJhmlhVbTaZrmS44TdgnBY/Lpa0FkSRyMhSJBTU9NvNmuGccK6Y1bcU9tdgjfn5PclUK2rc2Ws+hBQOs0JMp9tYtHjDLVDCEQvAafoEIJWsbqmcl8zGgGFUoYYPH6KRLv0s5ZNziVgsqZd45ocKEkSXEiMfIaySr9/kdmo7VrtH4MEbTEkiMwoszydATX3Q3sv2oqjF3KiIkFT1j6Vq6i1fpk6aTMoY15phS3JGkSmYSZ8mQ8uVaaljaH0BS9JYR/S0bpSkhn1vRUIamWX4GTdL2uLnpESCwrgGEFX/Adr9Qm8Vir1Wte9y2mW2+j3B/r9gf1uR9u2uT/PYJsG1zQ4aznfbGhtw3qznhlyC+nbr9t+Y9CW5Enf55cu//26+t2/D/xP+Xf/WCn1UCn1Xkrp+W86FhwHQ/n4c6BWIvHy79Ms7MmZczQDTr43D2z5gOWwdZktAabew7fO8TSDWB/rtERfnKXT87BK2IWCKv0KErTZxuEah2sbwfg3DcZIaTrERWAxpgyzSqUhlLkaF7Ri9IFw6AnKsx/u6Xeaf/TVFdfKs58Gmkax7s8I44hWmgcPHmAJRD9hNQK9RAsmPcDkjQg0I5odrVa0SqGzcUoGvE4ErYjRYDBYjEw4ZZhU7pkDbIjYEKCJ2HNFOp949+kTFAafEn30eGSBUylilcJZBVoRoyYmDabFuhXaWpQRggJtJZAzZoNRLUaLozwNoxA6pEAIE0M/4O8HhrBnDHvU7Z7tAZ5+5/f5V//d/4Dv/M6P6YNhew1thJ4DSiemfWLwozhWVpOUpu8HYlAY5dAYglcMJKIRwgYROhVDqYi4VmOdwkjcibESDK91w9rC2ihsgmlIbBNc7/fcffOaq5df8eb5V7x68ZIXr16yvb9nd9izvd8x+pFxFEKRGHPAFmNmshJmvsV5iHMlx/spV3iW5IE1jq7pcMaJo0HlSPu4aKbFBT52bFAL5f23e5SKc3tsHPW3AzwjelIqB1VzipcFRlnD3krQVs9D0tJcfFoNqh12WBzK8vlSCT+G6tXz+PT9et7Pzk+pcuQUZ33fBIYXlvlbzilJw3TK8MFUnXMIYRbOPU4spaPzKK8XQoPFOakzzKUvrQQ4Icj5oBWxWkPqY/kgwVdZ4Gsijrq/a76/lRNVJ61qLH+McSbAgWPCkLr6VvZRqjx1xa8eP3Ui4bTK9rbEW0qQYkBackpGN2b4+ZwKWByAav9lv7WjWTatNY1xpKhmYgg/LQQv2miGQfqOkopgmYOr2rmUYCvOUCqt9MziF1NAJ4Gd+eBRiK6Xzk6mEFYc9+8opebg52htSlKBTkkSg0WvrwSujTbzNKtZVMu9KKyYs2i0KnPVzLBmqXjItWgtVcQQC/mMJCa1WtbPMteUUvR9z36/l8qVbQUdoHPCJkpVd921fPDeM7z3bO/v+eb1a4b9kEkYRBT5cDiwWq1yULQkOazROCOBmdGamDxpEpmUflAY22ONiHs75zBK2A+9l6rgOMpcKCzFxhi0MngfUGkJVLtOE1PpQ61FghM2wy1jjNzf3zMMok9X2DZnKCM6JwQkAaeVBGrSJ5YylHlhJBXyk4Xhd5omuq7LzqVZKgpZ9H1OIqnCEFioyJjno0AcxUa5TP9OUjMaJ0SpPIUQsTrNkLHFpmjUnMwozzxCCjNsthyr9Oc1TZMDN6koO+eOSKlqgp3y25LsKfP0FMZcKs7GGJFSQC3kxSliKMWAuLiXKbfTKKGU18aIML1Z1tZxHIhhQlispdfUqAzHJ2VNvMWHjDHiEyjtQRVJDAlqJDFSYKJSIRrGiRimucfZmCURUohr5rmtIimomTSmDlaKLVyg0pCiPL+kghDL2TIeJDhq8jUrtfTK6cxeWeDqp0g2pzXjaoW1dwzjNAdWiQU6Xa83JSgTm1TsQWAcBcJpnWWWyUjVep5/V6NSZj1Md+x/lPsOQjxUbKQPcR6jQooiNjilAhWv+qMVxOjneZN8wKuJMQ1Z8FvOtes6nGtYr9Y8feeSx48f0g9i0w77A3/+k/+TX7X9tXralFIG+OfAD4D/PqX0T5RS/wnw3yql/ivgHwL/RUppAD4Avqh+/mV+7/nJPv8+8PcBeLJEgXXmDo7hR3MW/SSDcOSwnfy+3sfbqm0pO1FvuebswIRFU0Qdf37679P/l4kyO0OVg1euxxgjlPGxdGTl603kxsZWeqesQxk9Z6wb7eaMhPwq5cVhcWyNcURr8ZOnHUImMfF0TtPFhgc2wfaO/S6Qdge0URhlsK5BJS+Cujo7bkGMlLYWa1qUdVgljakro1hpBWES42MTQSdGGqbowEPCYFuLaVoarcEorAIbwaWAMiP2IhI2E08fX5IwQnbgHF5ncpEUMUrjk8DzEpYQNOgGdIOPwvEzxEDoAzGOeP8mEwBA8oFxGPHDIOcZPX46EOMEOmCs5qx9wsc//Hv83X/rP+Thj7/P1696Qtji9Irt0LDtEgFPGxzolqAEz58I+CBOuSHIM0WxchpryUYUMdg60lhFKySeWAdaNMKxCdoAjU+4XjQ8XvcTN69fcPXyc27ffMXd9Uturt5wf3/Pbp8p+/cHJi+CyuPkpVKMOAAppYy8zcxleZSVDGBKYW4sjzlwU0pgSX3fc7e9o+taRLPFC4MUx5Xw4qjVcw0kATFDRvN2BFuroCxaycJf5vjM+KgXwx1L9U0VqN0xLLoO2hJLb2sJ+mpK/nIOZSpLcCuBSA2LTGnBydd26bSqVn+/DvJKL5PYguyQZWFc6edhrnwoLc3ohYQkxTBXAlOMUrXVWvDyWevp9DzqwHQJACvJi+qe1c9s7jnzwh4rzKuaEIpgK1lnTjTjynUOw3Bk94rDVMZTyMGddS5XbDl6/qXqVRxIoxYtzTkANifXk4PNErjVgV15vvO40scyDqdryqnDolWp+AJmSXQUKugQC1GTxnBM7hJjnIPyedH3Xr5rVXbwShJAzf1TRi39DmipNgvrWax65BYIWYwiB5FSoHMu94JqYfjMMFthWJPkhk9RiCLyelTfm3KeR3NiruxJrwZGH90nbTLRhOJb97o8nxp6tcyPZZzVAW6d2AwhYKwSoqCyPitJoiYEZhXyfZ2mkYkJq52MES1VQa01rrEzs/GDi3NWbcs4evzk2R/27PfLH4joe6ngrdoVJo00rqFZOaxt8GHKAQuic0nANQ3KWZQKucJiQGnGzMobE6DzfDOSHG5cg1LCKDz5gLVO+oMPO/phQmvoVh0xyD7Pz88lcNtuORwO8/yaExrlenPQmQg0RqQFlCJD04oPIsL0Za6UgKUeF3MvEupovhQ7pfJzr591TFEkC8pcVcICS1SSGAwlQEgUVtJjh1lBFjPWeoFJT0M/O/JHUM4Q6As7sZK+cOuEnbfYsqWPqZbKWIKX2m4WaHaZB3K9mflP+huIYZJhqAWmp8Us5+8FBNaZtcTiwoJprZNxG4VgRsZ8DogRZFUZ+zXLsfTTTkQ8KXlC1Mvc9rJu1fcFpKUhxDAfx9rjyps8qwzZqGzRMrcV3k8z1FsrJS0lSksywXsOh4NUqm0WpM5wS0l2OIx2FPZKa8VuStJRkpAhRQw6V+BtFkg/Jo8q43Hyfu4HlV5Yk9eQJbkkPcf5+pTYveO1mtkHKclIrSXBonXty2vEg8z+YqrXm1IRlgqp0QX2KaRQusmtDoBSwhEguqoCGyb7UxjN0EN/OKC02LjVaiXEJcbQdZbV6oJft/21grYkVDR/Ryn1EPhflVK/C/yXwAugAf4B8J8D/81fZ395n/8g/w71A5XSVipc5UHUNz3/QBzNKU8oyg1dsuilSibOSQ7wSFVEqOZKHWRSEWTxLYdK5espV9zU8n1YIJd1lFl+Xx7yUm2I83uKXFKttHaKYQTJqvsYhKkokbORDm1bnO2ygn2GfSkRG5ybYJWaHXKtF6iYsR04Cx00awgqoXXEWMuq6dioyBQ9t9s3pN2WxlisFTx0TKCUm69XHAyFshZt3XxvtYLWaHSKpKAhWbBIti9aNjTotUMpi0cRKc8gkcKEiQFLRKUeQs849tzvr1A4pqDkD80YhNkzpojHE5QsiP1Q8Pl2droVQuRACig15kVcFt4QhXrdNi2mWdPZC1moV2e060d8+N0f88H7f4fm6Xtc7SMueowCFXvC0BPWDmUcUSm0hkggpkmqZFaCL60CVifaxtHoEWelcXvVWqwxQh/tzP9L3ZvsWrYlW0LDZrHW3vu4+3X3G7eIOoOnJBMJJdChQRMJiSa/QA8JmvwEEv+A6CGaKUQT+rRSCKFH8iIiX0b14lbufs7ZxVqzMBrDbK51zvWI91IIKdhX5/rx7btYxZxWDBs2jK8PFkwr0JaKeltRrjfczles5wtu5zO+++YrfPUHJmu383tcrw+4XK9YlhW1VZNLLkOZrFZHejhbqoNKVvzN0TYYtaChtorltprhJjKrClTtuL8U3JYr3rx9zXESpkQmOSGYwwpi6PZw8gxcOTA4bHtFfQ/ISIICTLIXQuoZsOtpI61mT30kFcwqgmaIHaEUo3JF2wN0nKZIGIiC+eBSGnoqIvpgXAxQZUvUAAwHsYFEvh8SnVDvSJmN9FCFWjDFShQT0WbiFdt8GqfVYdBcAJ5DiBEhCVqpPJ+4nbM2HVXbYj1ubpKc5lYKnUUIYVRaXT2Lg8uB2rbhzHtVT++H9aQ+iiDZtZxy3mgwISCCyeC6LFCYjL8nQtYToZ2OGpFOv1Yiz9BNkEI72IPVrfdQfKZZh9aCnNnLsjoiG4Mda0KMJpxjFh0Km1HG+VhOAW7aUdZq+88DGR200YHOB1LQGRGZ/L2YmmLi+pqy2xplAEfO2uiNkqC0VRbM9k50XdfFEm73Ffz3dbXB3kEhye2t9VZ0xboWzHmCABziDcEUI26W5C1lRU5MLltTQOOgTMYoEHDIdkg7GuyuzcCDdn9umiYgKKoWIu2gQi/Pg0j20iqmKTNhKZyVOWa5mc8L0eiuu3mAGCISmYp71i/o/fchck8HSZYsF6uUb7MM+TlgcBYoxlT6yiQJHRIpwlE0YUrZZlgR3T++fMERCOsdmo3UOF/O+O67b3F+fMT1ekGMEcfDCYfpDilNmGtGnjNiilZNEiZiYNX0Wq9YsdLuSMI8HyFqSZSItXVUnOuKHNkKABsfMU28Rh0dx+MRtWeUsqI3DncPCkQJePPJK8xTxvV2GwqLDMqz9RNZwC+8ASJtxBasGLNaEoL18Pk1yYJpkiH8w2oeB5X32mA9EXAgT5RJoFdSGHsERGF1sjVnRNA2xGD9qjEMIITsUxlJIPfdnlJPeXW7zWD/NR0IK2kBOXEdXgt775kYYdg9ryKKeA/Tbi4YaGeYWBP4YF+zsFoTCG6v64LeTVcABu5AUGwtOiskmG1DEFbYOpMHCAz4jsjxAGCGC2G4Euy6Lqi1kYop9H1sjRGg0d9hR89OebZrQ2l+mM2mcEyENiBqRetMvGpbh3ppiBFIkaIwSnAgxYTN1zkIIAOgAhSG20Mkogb2JS5LgawFIVK1PE8CMWEm7R2l7eimKXF/SwSkI0VAmgChIAgT2EBRBkQJbNrv9J+lNzRl5TyIITKWI7ClhAwSrVwvpN53sq5cpdl8gEJNVIWxG3sPn47o2cBOjFiAIxAA1QodlX+yjbqqUSy5zyVwL0DU9r6i1zpaOkR5HLWwjWcpK86XC9KUjVadME8z/tzj30g9UlXfi8j/CuA/VdX/1p5eROS/A/Bf299/B+Cnu7f9xJ77858dTOq775IsGBcVMJT5ycFgy9a2DMqTJ7EbZU/zjx0K7x/Bf3OjIKOgpjC03BK9gbbvDmJ/k/cIzlMqB54qXo7Ab/u9gQau+YHHiDhNlIHPR0iYiMwgGJ2NdMVuCzkIExE/eTY6JggSpLN5viciXpOQpaRCae8Xd3e4vXjE9bsHXC3Y9x4iiPU3QaBDoYhJnfi1Eg94xJLU7SJJ4w8kooeIHgI6BK1VBCiSdkTtkN6gYcXjT+5RThX62whoogOQCS0kNBUgZg6xjKZWFRMHlwqpPDkGpJAwxYQIUgRCVqucJKR8hOQjQj4gzifkwx3CdMR0vEM+vcbh5Zd49fYLpBevcQv3iOsCDQEpZMSgCKeGOTfkiVKzHOAakVLA4RAh0jHlgJwFOUfMM+fjZVHkJBZAAVBBDh21dpzPKz68e4+H9484f3vB5eERt8sDzg/vcH54h3I74/HhA27nM7QWtLJiWZdtxlrbGnvdsUA31FzFm+Z94Xb4HBZXT6q1oKzrQA9dxU67GaKc0AUbwm288d6rSYEbrWiXkPhWc6Q3hmDB8iCbj/2dgquNdTRx42x7ozak5MNrg63trcIMr8wJP9MDO/YMGg0zUj1URNB6HdLFTBYVTnOwXQ/2swJsrOb9SnHCRvGgLUqSocFn6qxMVFNiwoEdBVAEe7l9nutWMfIgjM6APZIIARoCg+GU0NfCgEQEU0gmu46hCqmGNvHcSId0yqOEgMPxiNu6YlkWInzWlxCGk95RzHfouhvKFCKmPI3gbl9JdbTef/eAyfvQYEkDldS8r5BzhjwJaLVbwpWRJlLdWu2QRCdeO6lA7PlkQlNr5d6OkQIVe2QTpO70Vq03LOBgUtruT1yQIJmDd0GaZrP6xuuUNqSY8EDKGa06NS1ZUGCEsWACKzAxEAjlwSHQViGd1RkFsBpNp1tPCJUurKrXGon5BuCVtdixy+h/TTEMIAsWLNVKP0VRio1SmsCAtPdtkLZX2Pw+uagD3xew1sKEwKp1MZgQilAYi0FlQEo+JJ20L7HA1YPuZv3lMSY0XS3QFWjZxrP0AYzYvEMT+ujK6gI9HpATKxT2VbRTAs5SU0HrHDBeKhA6xSakk4KaENBlZa8egMNhRpxe4JNXL/HidMS7d++wrCseHh/x4f17fAiPyNOM090Jh+MBeWIy4+JfKVLNOYcAVFbWVCugBb0zpiitQmLA4TBjyokzNcGe4Fo5B6x1xTwfrQ8zIiZWpNVoeiICjYKXL+7w8sVLXG83nC9XqvRJo2BQUECa+WsDCbpan67FLjERjFspgpRTRteI2qh250AGh1jzBiQhiNFbH/R07YoO+p1t9lS39omO1gr3QvTRIVzI0X5cTCIExnvLuoyY6GnsZCAElPidCpN6MToiGCtpB0rr6Lo+6SN1O+uUcC8ClMo1aaYcDv6nmKxtw8RBjrO91l3NFsclsdEuImRCTRkqTFBqrQgO3MP6f80Gke4LTHlXJY0VIdqAc8AC/Eh2hLDi7tXVZOA0aZWkPgoE2riHoLBqXYJIResNl9sFLPJEzIcD8jQzf4IiRgGU4im17nrTvLihzkDh86xg235X3staaOtKqINuGohswhkzMLDVpgQgIUJihrhiY7d4Qb3PvEMDReXSNCEhA303fkLEwAk7xsZkiKHDxqoAdMxKEzumECI0BCqgPlOK9vvrdEj6Ef/3jWJMYA9DjMuBQ0DIlpkSombUxnmJEgzxkK0PuEOZ/KmiLoXg1w0IOOPPPf4h6pGfASiWsB0B/CcA/huxPjXhlfnPAPwf9pZ/DuC/EpH/ARQg+aD/gH62gC2QGZvDTo/VJdklaePY/tQx2y94cjMAfO/m+HO9b4biT36eHdM+4Nr3Zuxfvzccz79/X417/thvTl/8fh2iBBpCGzy4HT+RI4EPSBTbtAyItBPkCEI0V7vJWCNgng745NVrlCho64JSOacL2OZkcIOyaoEmiOBni09c8qwAACAASURBVFXbmBzsriu4OaJQPEUR7BipACQhIQSvVhj6JglpfgGcbji9+dm2BgKggah5nDKpIwhICJCU0WKEhgRNGRInQBJ6Z5AdgiDOwHzYOcOYMM0HTPMB8+GIPNMZz8cTDq8+weF0xHzImI8cCwAIcg44zoLDBExTwBQFIVCuPCdL0qylIkUgeqEI3GCRlxNl7ZSkPl9wuZzx3dfv8NVXX+PrP36N+w/3uNxfUW4LWltR1gvqeoH2grIsqGWFdCYxZV2haIMCorrre7IEblR7QQfVHSjALjg3qML7kbrB6j60GEEwxQmvXr3C8ThbE/immPWUYigQ2Xjko1plwe9Gh9l6g/agB3+PBrlj7Pc9rc1pb6NPFBgzoiBbxc0TPsCogjFtwEmMw9BSun6TJye1qEOC9QpJsLlh3ni8V2JsKB0jmGU1Tzg7EUDMaVAEW++IELjaGPBUvGOotfl+a0qVLgOMYEFMq9WqSxF13YZNO1Loimj7GUy9d+SUBu1m3+fhlX6nHzli64pdLiBRSkGats8bQJUh58/7AvZUN/acsAJSq/c3sAqV8wSojGZ6EUGIpPgtttZSCtYLY5XBTrlqzsi5wdkS+yS/mViAgmhtB6tyDr55ALfRvvR7ttwDElX2WU1xRrJqkgK23whUtVbRtaE10CmLIf9uOztZAcEAQhgQOaiJan0gVg1WZVDcq888I4Jca2UCV+tIboJVBLd9t62vPSXMqbjez7pff349/LlaK3rtCErGBavS7MUopVI0JSSst4WVMlVU612bpglRAkGe3llxNeAEg4GiTyi1exvi/95aA4yG5IAT13SzAM/pqIqlrAbMsDIafKg8gLqu6KVBW0NRU+tMPrdKkc2mpDzj0x98DlXF6+sVHz58wPmyYC0Fj/f3uF4uQGCF8HjaBnmnnDEb1R+aWHkF/xQItJiAGucKU41UWEnNyZIE6y263dbNZkNxSmQpNOUwawb4MCpVxrquVPqrDSEbzdxshqjgdDphmmfcP55Rbgu0A+ttQbT5pSFGiAZwDA3XWM4vUGvFdblBQJVSB7C7jVoYKthQiKhVonwfgeIySpsnShtd1xVRAuZpwvlhwXIrAyjYU9F9DWyV/2yJiamSJk/m2KfN+axiSq+A90o+r6DsH6QOWvUJCtVsQLSDBmHEXnniHLG9fXjiWwyoqrVibRXSzR5bda/3LeYDturi7VbgIlSu3Og9df49ZE4Yg8O+133mNjYjDBsFjZbAENRmRW7zLbVVYFlQS0OOnVU8RBu34PbOac5+P406u/PZo/jRSRMf1M5Oxs80TUNIDzBxEGOJObtGbMYoxTwYv8Ji62699uLX2NomJG7jRLqhOmQVWb+Zf18nZ8fjjUB0gJ8Pp2hzl+1FT7wa649tDdHuuGql7PpD92vLmT2AINv5B0u+EZ4WkMb9NKZVt7VHZeqP5zX++IdU2n4I4L8XL/UA/6Oq/k8i8r9YQicA/gWA/8Je/z+Dcv9/A0r+/+f/gO+gI/QqFbwq9uzfd6//WHK1R2n4no8naPu/P/+cvSPfc6j9tdq2rHzf0L5HL/efuf+8P/WcxfgAYAs0GtLWgWSUNRFADJEV2WX2LM+G5AGePds6swVLqJoKELo50wapDJBCFxznI46TLUxbOK1tGxbgRoIqaR+Sxqw6tQTNA12F2KBFG7AZ6OADAhGUjuFgYyTiygArIH7+RyyvH/DJj35miDKVGkO0uSbGO0ejLL3EiJAy0jQhpMn40ayMHaYDJB+gr3+AfDgh54i7uxlzjjgdJxwPM06HGcfjgTTGWTCfKMvq6H0IAih70eZJMCcmZAGKqFRz9LC3qUI6y/S1d6xrx3LrON8/Yl04i+PD+3d49923uP/wDpfHR3x4f4/z/SOu15sNulwpCIKGWhfO/lEilwx4ArSZbDuci259aH4/tA9KHAAr0e/X35bQ+DMuZ+vDJj2IC0Fw3DWol7L1IaQ5m8OxQcB9Q0qdLsy5imIy3aRbBsAoTYDr7qsliHEAARtw40GCO3dP4PZ9cXtbsa+mk2OfmPSPc4o2V6mhVUVOTxOZlDNIXTIKpoBqW9oQTGKY5ymYppmN1vadpRm653Qwo/GJYiB9ezGMvW3yhLQ3QyNDMNopeypEgWz9QWtz2uTWg+E2J6U0hBU8wY0x4Hw+j+DXv2tI2Nen8vc+z8hf69/jAgAeCOWcGdjtqHb7ip0LJtTaMB9mrGVzer0pglSTuTc03fZ7cbEUCwzUaKxUMuScN+9b6MrETFuHNcsNhgP7fzF4FPveYk9qnqspqn1ezAno3WbK5UGjzJroYFsDlH1qkhNaY9EXYmq+tQzxH/YWBc5jU/Z0YL8n3blZYGe7kICXMuHLgT1K0QC7aUqs1BgzZYAQtpZ8T/t94t7ZaJD7AHSfZI8DUlKVUqAgFAmfgcMwGgNbdEUOEdJ00GGh/J39kDr2BpT9H/tk8mPgzQYUYlQYjAUGp/luYio2YsDoZKobkDD8/ajI0eb4Gm69Y60FU9+qWX495sOM1/EN3rxNqKXgcrngtiykVreO+3fvoQBOpxMOxyOWGJEj1R1TdBtF5cgcJ3R0LEsFlo4YFIt0A4FIiUo2CLus7OFpnWqAfYqYkgFiYRup0btinmccj0es64rHx0fc1gLpLvbDJEcvZ4TrFefLFaURjKqlIqaE1hTNFAhrrSiNexTCCn+eD9DWoDaAGZ4AYQOWh/2y9epAZQxx9GnVdUXKpKm+ffsWP//Zz/G7332FdSkj6dwrge5HkbTW7HMwgKVgoAfXKxNDNcRumvkZPvZj9NOGZ0qxTl3s3I9+TlCjJQvXDZlPrDrvxY7GkHKjow9fZZV6B6EcSI0GRvp5AAEpbsCm26Wc87A1+37gnOdx7P6873N/zhkE6GD/onQmFyLIaQJZyGJ2vHIOaOY66O0GkUgQDd4P6P3um8iSf+9zgZcNtOXxrKVATYAkZ2oxsKJpjAkAWQQStsTXhXjY04th20upKLkas4AMg43BYq/v7vdpOU06xvxJRohGU+xqPboEHLRTMdb98b7I8hTsYnXOGRiA9yp6qxPQR1Jm61N0i31NJIWPPmJsUbU4iWJS7EUMTwbCf+zxD1GP/N8B/Acfef4//hOvVwD/5d/3uc8fYobV5yMATxMgtRK77q6N1+IG0urPE9p/QmX0z3teefvod30E+RuIqaGf+826f+3T6t1TZObvvQbjT0u2TE6VRphoZzDkhLxZO3kBS8WkktMI2UaDbBVL9iQwMfSZcEECpnRA7TNC2JAZlnyNLjaOy2gqdmxMziLEZqWFmAEIarMhqylAg2IOCVkyIkufNA5RIClCkwCRypT90wXXNwGf/eQnpDmmjMN0Qk4TWvMkOgFTHhSS4+GAFy/ucJpnSO9IQfDqdIfDNCEc7xB+9CUOnxwgAhxmYMr8icKkKwv1QQOAaDTLPTUgCYviVP9WlFaBtaCVittacCsVdWm4XG44P95wvSx4vD/j4fGKx4cHPD6+R11vqHXFslxwvTziejmT7lSblfxpuG/rlU3Comh1hSorDaxYqdETFdq6JYxUzSLd1JufmRkPuq6hcdu+8p4B2jiRLZAJu/3hyKAru1ULiL2Ck1MayRERMq+ybVWzEI06a0ExGF/u5HfZkwJ/j8308X00aH94qga1gTOOQvLH1e48cRMQRKAR31Fydq8PhvbVSuQ35YkKWAaKcEaPIZEjyCR9s9Y6KtDTlKlYKlt1Y6NvkD7hwbM72T26x/6DBg1bsAq775J4fac8IQj7T/bXx3/c9rg6lqoOdbXeGsquMT/tmQBJnwQJ87w5b7/u+6HVe1Tcgyu3v3uHrvY+foaOyqM7Pa/EbQk4HXspFRAf8Mu+S0Cx3G6IMWPKM5IJm1wul5EIiuqovJKqqJjjRKCq0bnvK7eevPl18PMrvUEDG85D7xbktrFuEwRhYv8eg4GnTfQCrmkHIhmQdBqdHkzEZAcyiiDFyPXCG8TqZtyopzroXZ327nTChw8fUArPbaOi4sm5DHEZmxX2vLqddkHQEBTx1xjNOQWbAaaKLCaGBWCKEUkC0jyNxHEfRO4TYoBCH+I2IQRcr1e4Ut9zX0k/bj6GbgO1mtiHUWq9KgIRo2wzqAshjOuVYkS1MTmwexWsz6oWJpdpN3RZRBASKx9agRenO7x984bBYyVF9f2HD/hwf4/b9Yrb9QoY/XM+HHCYD4hpwjSRbniYD5jmzDE+gT09rS6olUF11xurMiGgd0ACxVDyXUaSBtE2fO1aCrBWE2FpT0RcHDzprdpeE/SFr+vK+KmsiyUgYgEn+0RJEeag4zxNCCmNBEzQ8XB/j9v1xjVmFHXv/1SzbR0Ek9NI7hjMztNMEKMUfP31VzgeDygrK2jH43HYB58P57HUiEEq4x8VxiNVO2I2tsSOkSUBYy3tk0pnkOyBLe7NtlXMaaTJDLIZZN3ANg1kJA0tg91jPytuVB939gBhP/+PkZnbON7/7xcG9uMx1nUloNvaE7DDbbF/55h3Bh1xMixpYOXeNhA4Wor7GcPvrMuWvBAU8MIE93OU7Rj93rjf2sfUz5M3v9Y+h5HxiFf5nb2lcDpjCAEhYSTl2sV8NkEZP648TTjYXL+yroMWyoQ8WjIkgy1DwMf6GU2gzZNxP+59wraPNdwubkO5MXIQxlLuf2X0g3eYvWnb/aW6JgBE6j+oQo3RwqAJgMSn6+dPPP6Netr+v3zEsQGfVaX8F1t3W/XMNsD3Xrch7fvHvvr1p6pr+0UHYLd4NwQwGr3qYwmeB2J7FPH54/nz41jU+sR0e11vFWWlg4kxG6c+2mDbfVXBqiMaxwYLIaCxTMDeHUcyxXM8O+dB0UloHQiIyNa06hU/AFBrDg6xQ0E6SjJOMkIAJCImInW9k2IXJ4FqQVBFUkFWcrpTsupHjmgB6CGQUvLpgg8vgR98+hnQKSl/nGbkGI0GUgFU9L4ChY3E/Zagyx2WmHF+fERZVkwpY10WhMOMf/wf/jP8O//eP8XrN3eU1xeALe5WfoclbABEFeXasSwF19uC5bZgXQtKLVjLglJWrOuC5XHBel7x+HjG9XxFLaySldKwXG9YbgW1FBNOWNH6DbWsKHVBWW/sP1NFXQu06RiYWxppjxAdfQHag/1JXSMHCr1PhDNaKrFVM7ReWei9jwDRDaQnbSzNk/Y3Am9Do58gTvBAbCcmYI+2qzrvARWnV3jFYyCDIZgh9UrY070qJtTjzsP31D7IdIem6o4QgFVlJWzI+/49PC7rv7FqLBOVYp8Dk5CezenJoB9/rKLv4QyrGBPWskKBEQCP6rvTNbANYN7v/73d8OdjiIa+JxtwjUG/8f2Kj9invZiIO52cM0QE1+WG2rbj8teweoTRh+Dy7H7Nx7qw75qmaQQo67qOAH0fEI1rFgKWZbFqZ7T+D9g5poEoX69XW4uCgF3AoHaVLYhwm+vobasNrVTkKSNaH1+zPs9BDQ0d2YRwKBzwfZBtr55p/4iU8xidEhJ7xqZsszIBC9INkYZazxzXhK81zl7bHLwIK8pdrDeoEwXvrSG4cp22QQlKmihI0dk7JZHnfrvdLKkSlLLC6VF73+PIuIME++cHtfiZ3xpUTVs7MQS0lTRJCHsIpetgPqiSjkXqU3uyT3yf7qXU+T0RxYLz4/H4PWBmJCCWDLfexjESKCESzbE3G3DTweplsD6tUguW2419bEYnBmg3OyyYBThEvnfAaMVctxw+HUPEFGcKVdiaS5nh0ul0whdffIHb7YaHhwc8Pj5iXW643i643S6AJWsxJZwj5b2nmXR9teBxmjLyFEevHxXyjMrehCp4mQqK7BNrFM5ofawpD+ZjjLg7ncwu0k9z2HciCCYcJ3F//4AYgtGlGTekHHEX73BbFzw8POB0d4f5eGL1Bw05Bbz85CViTnh8eEAt7F1FMzqd+a7aLQmRSDaS/YSUgRCQQ8LttuD9+/eYpjuo8vhIc24bPR9PK1j0YE7FBNs7GvfOtnadBrnRnN3+e7Xfx0+MSpvZVdL8rYoUxQJ7t1PbHK+9fX0O0vt3erDtttpnavp+CMFnFSqCbD56jG4YMvt9MCYIdOmTBHHvD/bnKyJ2DgoIAdzWdPhcEbEqLv0Vwa5gvayc3epgy6ayTLAjGLhK/YQ4Pot2S0fcsI8dQozIUx7n6EO7e2nQUbWlveTx0ad33WxyDGnEDjlku3wbSBmc7WMMr1YbZCSEFBuRXZyxxdsYQLYnys8LLW6bam0ASONkEtjt2usALYCnia2LuSnI0tlXRweoLBFi/fEBYVAoPx5zbI+/mKTt+Wbw5/ZZr2+Q/Wv3r3/ek7Yh8lsA+Py79jSs76N9T1GEGKMNRXz6ncBTIZLnx7x/DATpCT3Kj3szhL01XK9X5Kkj1gbEFa0VzJOiBS89T8Mp927VOIaI3EjWw6XGH/Zme+UBW++cjO+OMZuYREAvFUE7MhJCBFovUHTMhwlNMppwyPCyFMSUTZFrhSqbgikVq6jlEagVoetQCOqqWHtDjwHXWtFDQi0zPvSvcTl/g/V/+xeo7YqgBYIVAQW9LWh1QdAOaexRWNYGlYylKrrM6JLQNKNrgsaEu+MRv/3bX6N/9R2+/OmPEaeMLoqlrRYk2lqAQnpDvT6irgVlrXg8X3B/f8+gHorb7YpSqNZ4u1TURYcx6q0Y37+jlmIJDjnQKora1yHEQN5ypVhCI3pY1mpy3vY54FDPDkWrXjmyZKVvTbDqATihpG0N7Spt3YM5OP3JSvLM5NAhY10Gr1zvgtiyrnjUR6TEteNoZs4Trg/3Yy3vK2RugP3vCDt0H9/vqxr7c7fn9n1HW6KmO+P3FKGkuIMOVG84KZtlGK1iWEpFDyaqYDz7Whs4gFuhtT+xFy4+wmviNDJSnyCCZV1GcrTf14NOUyl8sj9Wfx2Pp1jylxEtUB39XUL68B45HrRFe27MnIk7CXBsyYjL83cl9XNc05RGMrVPXDaqypYYOr1oj1i7k/Nz3xLx8GT9+GtqrRawkjrin+HBSAqRVEMRNOV9W5YFOYWt2qwM2B7u79GbIuUJWfjZAlYTPXiwHGpUAbgXtnvj1819igePy+2GkCKqUvUyR0e4O5MuSxBZBWhoJgShCqxrGcHZupZdEk/hgNI2dVLfIyGEHQho9FGndEkaKC36NgzbhwGHwAr6nrLqKLz7hGnaqqb7URB+v/b9rC5G0WpFKcZl1zB68VQVKXAUhNOpW2uo2p7QIB00WUsx2Xr2VNZWqXbriHnOI/nfJ5VbP5MHgEZpjayCC9QAEYUrGMSUqbiMwL4iDahrQ8XWAyvCvT6brQg2L05EkPJEQQw73vPlgpoIHE7TRGGJ4AEx1+7pdEKeMl68vEMpC5blhtvtBgjl4a+3M7QrHi8R8zQjpwnaBXk6jIQBAlKvA5UFk7KHSlVwvt1QlguW2w3zPCOlhNPpAJFt1tiotBibu/WGnKlyutbCRMFs0jzNKOsN7z5cWQE8kHZXLUl48eIO9w+PeDyfcTydICb8I0Hx4u6E0/GI7777Dh+++4AopKx3tx12Him6mA2TKU+GqtlhpxA6aDQGLIuMwN6rbiklRPX4qlG9E4Jmdi0GwVrq8Gv8zjBslv8uIjgcDliWBcuymBhVhciMFJLZtGA6Ey54tesDC5st21e49qMu1HwwACQ3Pth8mDav8jdLMje2gdtU9wXuF2ij+HcfVn+73Z7Evg647X0Lv5uiQT4cnbPD2Mc2mxDS8JNxS4DdHqiS8cFjsxgaAggBK3HqX3waNzftIwly29rMLh+meQB93arCIj4OiddbW6OKLgStbkBFT8qCg1C10WfK8ntYxdRaSTUWoVqwszz2mJz3JIPiaHuwytfKnn3AvdbM/z4db+Q0bgibZHrvaOuKLvv4PwwQwHA6/sBGyhig2JTtFK6M/OcefzFJm3EcGUL6QjdUDLAEDKCyoVq5iKn/eL+O121IrT98Ue8DQOBpsoVnr38SUH7kNfv37z/zz2XKH/8uVrhU2nB8rvqGQkRDKtBapfJTYIPq6CkyWlkNESV6ACxADIA5Js7LsirhrppBh6qIqSEdDrjdCr79+h3ODxdoc7pgBzrnV3QBbmpESfWkNsAD2hCioYUdUQqyrOi10BlKGEhhVQ7+lpiBmKB9wvXFAzTccPnlFUE65knQ2oKAiigdcwrIMUDQcZgPOLycECQjzydIOqA0gcYJMR1wLRWPHz7g3a//Gr88RXz1q7fQHHFrDUtvDGCNktJ7h9aKutzQSmFPX21YbouV5q3vyyS6STlhLxsRrQpoRW8Fra00kB4o9oTaqdyGXqG92e+ka2kz3nZhFVBNXUw6DHHqpnhkSkON17aqG5Kd4IcIYJRaX5eltQ1tgidJNv8qMkDzwAnu5Ox1rTVcrldoVxyOrxBFEI9hoGseONCJqRl6Um+CcfZFbM/uEC7AwY69cA7/3nZI41Al/AiHnuicN2TD+owpwMM960NG2c/pwIgHyYDN8rI9kJLTRTZxFY+ExAQnWKHkMdRmowOgYy6U25fWGquogIEXG0K4dw57R6uOVkpA65yHE+LWRwGAin3mGIOdnyeHQ/Rkl4B5EFQb57/tKxuCp/O1nCLryYMjvf4Zriy4p0yOioj2DcyyamkpBSoYlJHQNxU075Vw+9NaYRAw+geyNZtzL5B+CcBAkCgReQqAMBEAKH7gSsOllNE3ixDQCq/BnOZx7T2w8J4XEWHSYtciBioYrnVFsp7KQTs1oSYJgilP0KBUv4T3+yS09hxEAasTar0yikG1hwEWJJGql5HBNnITmVLvj2RP17BbugEfHvz5n6OaKjKCjuc+yu+BX3tPAKHNaGdWIeydinsWdFNgik7WR2awH7MhaAea+EZH7RzFgkEl0q3qga1i5OfAfUT1SoBKqN4TFEIiHap3o8R1EwFin+pqg23naRozq0bsYGt0iNfYoyuVZE/pRFXeSlAtSkXvDbXvaHdOpzWbFlPCMUec5ATVhtYKynrD+fyAcCHoVdYV55VCIDEe8PKVzSSsC4PHzKo6pd0zQsiIMUMkIs8zx+/EgLU26G3B5PPYlLaPozhg4yUaSq2Q7sAIkELCnCdMKaPWGfeP73C+XlC043Q4jSRkSgmHecL1ekVdbkgzgaz11pHvMu7uOOi7l47HxzPWUpDzjABF0Yq1NIKAkZWRVhvHhVgltAvYMiFW9eycA7pfj/vE/Xa7gTqf/UnFnvs7kn7YmKRo577wWG3fA82+sDzWWO0Ft5UjczTqk2oaARb2T3HY+1Zpchu/T5j3IMj+HHh/MGyA+2N+BxPD5/MSPSH08+drAe/pzjnz3hjI4rZsoyv6aBdXPWY4WUoxmmoleyMIVLaKmJiqIWdDThBh0lZKQVkL8vR99ot/9/7e+X53qfvxDSND3M5VIwG4J4UYs63d4oZojIXxMeP6kWXU7f0WavAemF1m8m2VZiiqgaE+bxiKkQTuAdV9rL+BPRNE3F/2cTIOXBKjcOZGRe0GpIbEEVmR98NVubf/eJ4dAlHTLFBna/zpx19Q0uYIwUbJAsZTlh0/RXKfLBZ/nV9wvzjylBKyT9L2ydP+s/YO7Xklzb/j+Z9/X3a8nebTY+ci2Wif3OOksMXAvgLt3ldFJCJZD5n2bkIPeYg7jEUYIhonQVqS56XbbXijiHFtQ4eGKx6S4Pxww/vvHtArgyPtFb0XxKDIOQApo8cDxGTecwjojZzo3juiBkwhAqJIAA4pY355AgSQ5PLzlrNIxOFwh2k64HCI+Opnv8P5i4Bf/Ls/Rs4ZU56Arrg+nhEheP3qFaZpxsN6wR+/+RpRAl6/+gQpBnz3zbe4Pj5iWRekPOFwOGIqNyznGX/817/Hh3cfUGNAkYCCDjVuvtoeZkl95Tyi1q283Smxq1QkalpRW4dERYhg8ImKZgmZCEVDel+NvgP0PqG2CCiroL039FqYBGswiVtFUzcHSqVPtjTR6NSChgLf1CztbyV3fuYmGrPvaWtKBbiIuK0veHAZsK7LlrT5z84whyA4Ho/sRVjWUVlxamGMnlw9DSBdWc/RTyaUT0Av7Lfvtpe2YagABh3v+499j4DvP5uDGJ9SFZzySGqVWHLEfr1uTdfeA9rVWQqWcMoOzVai9aRRKhNnWNAqHsyb8/D9uEN89xWFfQ/Ek2p+ioChsl36+O5REUgJSba+S3cce1R5/5wjpHuVttYaFlOZDEb3EKHy3zRNqJWN/K4E5iivVwSfIMEWxLNiSbrZ4XCAqj4ZWOsBM9crxr9zro6L2TRLHrfBsK1Xk4+3yl9riJ4YtmpiRNGeZ7WYyK59l5qsPJ6q0+2roluvHSlR3a+l6hgmDmxqYp4E5GSAGTxYYjIFq7LXuiIEjDlOKWWIDd71PjT3Sz4/EJYQ1c6BrIbhIUhAb2UEJ3t/GQ2ddeXQvS/bB3c5T0/O3x97upXbhrquREK87yYmdPD+ZFMELa2gaYe0XR+pCMRVKEvhiBajFcFHxeyOyysqvjaHv0WDaoVIGoO8tQHzfEAtnPM3rk2MaMqh0dHQ8GAmYlRHbLB7Nxo4dv1wnicrYBX0gKKrHQcBgdoaUk5MQMNG5fJ1ykJcwDTNyImz0A6HI9kMXbEuKx4fL1jWjsfHBzw8vkcIwPE0Yz4ecT4/IoSEaTphmk6IsSImwTxn5MmpWqtJiJchBJZzwul4QJ4PTHp7xa3csKw3LKWg325AI/2KlZGA6XhCWVaIRKj4KAfODJtzxMvjWyzLgvuHe6SZVcLL+dFUiwVvP/0UMWXcbgvUqw9dESVjng4oa6FghLIHXFKAxkAQ2VgOrW127UkwHp4OZG+loZQVKXn13IUkMFpBVGXYzn0P214gyftnRQQqwOEgKDZGhYOxWUWmjQ02jgRQId12sB/CVvX343RbMk95MFqaUVp9tAi3m1j/aoJT8B5YCAAAIABJREFUOfdJlyds+4IBwL6uPT3SAaRqg659D3M4/GTrgDYlSEBOmVoEERS8aIUVp2HL2vhT4KItAZz1F6lEaWIJrXfAmRie9LhNcXaDURO77facku0xtQoZ7XGMBHfV4l0Rm+kmHaRlmdqj2eg8ZZTVxhyJV8Eqqrahto5OYN2vec6kAocW0FpA025AT7ORkU+Ttv1jD046O2LkKM/8NgExU+SUyOStcg4ggbGZGhtiFG01MRZERCi65wHWx/fnHn8xSZsADHSxJTD7Utm+nMuXPm2+d2c6Frwlbfv3AE+TL398TBZ2fO8uwVO37P8vHvsbPY4dGFz3UYkw5MHTbu8t672iqvVFQBBU0BrQyyY64gMoTVsYzZQdIQKBy6B6EM/X9Uh04HptWCuHNHILT3jzyVu8enVCjMD84ogiwOPjBfOccTqcUNcV1/MFx/mA169eQQE83j+gLQte3R3x6Wef4loX/OYPv+Vw0C7I+YCXp1c4He5wmA/4/LM79Lff4aLf4u4IvP30NVoX/PH3f4df/epXePPqNb787EtoF/zyX/0OX333LQ7zhG+++4AXhwPef/sNoB2fvP4EL1+9xHSY0UtBWYD7y4KLCNbe0VNgH0pnk5go+zRiBIougFLVx6//2onkqoRRaZNouLhuYjEc6g3UViy4IjWRBgkWGHeQJ8GKifj8o0gHt1a71w0IoVOUZBecCDrvm8im3qkK7Q2tWgVEHbPfVadlL1jBhHmabFjvoEPo9npswc7hcODQ18q+g3ma0HvHumwzcZxWMQQ0xrp9ttYFHJI5ECxWS7Ymb3NqaUOSPejf7xd/7B0o90b/aIK3g2N2oIhXJlklUTumKWUmYMoEu9WGUdW303KRkmA9n713lN6h1VHAYP0QW6Lh1/U56LRv6PYEy2XcqQpLh+KUIbdV+0Bnb/eef6b3WvS+9YHsr9taV/TaRuAAbEG8f8dWqXkGXtkF2VMz3ZH653hAQwltosEMargnlvVm5+X0nIJaSF2B3fte69Y0rltyzCWmVHLcrXeCVp6EW9Ae4gaSAqOPZlACraLoIgpiM6qmeUaIgdULS0x8NXQAxehwMfoAX67raYpYlis41kRQW4FrRwWY3LrRBmHAS7M9Q1V7riPGDQxgmBgIfDCzgyOw49+vKd8LXm3wtbdHyp+j5ZPNX/JeRIA9Vq0ygYAnZhbks4+JkvS+F2BBcQwBvXJO2RQnghHdWDK79e4/+74e9umSRpYSe/q63e9SKilf4nYlACEOwY1sLQycDZlsrxJUWNtuvEbzdeJgLhOMGCOmaQYgFM4SsYQanMnYG7RutPOUEnI+WJ85x8PEmHE6zTgerGpvwNyb1xWldtyWBefLI0q5Yik3XJYzUswAIqa8YJpXhJAQkuDFyxMEwDxNppxHu5SziTm0ivP5jIiVCYM2rG0FogFehxlRElCYOC7LiiIbWHC5XZFjwpwzRSs6mSNTDnj96g5LWXG9nDE5pTMkrLXgeHdCPhywrgWHwwmv3r7EdEiIiPjNr/4Vvv7qG1xvN+SJys8Igk/evsHP/61fYNYDHr77gPv7+5G4PRXy2I2ziAHrSgaEyDro4iIEPL3i6aCRB9rPQSwHv1Q5Iy8k3ue6lrEeHdhpRtubMhlNYvtiXxXyY/b9BDh4QuGSGCKQM9sfLOHiZ3f4bLXhK/E0HtzvBdrcjeK77zPeQLY67JdqxzRx9QaxuK9XNOtPnI4H1Bqxtm4Jt8eL3u7BvkSAA8+RBIhgu4Zdw7WWJwnm5s9lJGU0zZ0A6A50G4Rn3QCnGJ+1OpmNr7ViWRbUcrRZlEzM3XYFiNG/mei01oBOBdbFhq6rTgNA2AOxdCQyzmlfqPE/vRcyWimP50gg3f0q1ymZZmQvKVIIEGWvLROxjuv5vLtWaYj4BBF0ARpIv1fZksU/9fiLSdpciUuVvQcewLAvCICYkKfuKgLw4JIBxZNNwMjriWN6nrA9R/e2ANBL2xio8AiOPJ/8SBDpj49WAv/Eg8bCNryYwpVTryqHxxJZt0AMVuGxZANdUXZIPA+afSPdyra+6f3KRIlAzFtcHSNEDlAklHLDsgCiGTllIqnxU6hknK+POLw4QtsZ5XrFT374JT55/QK/+fWv8ce/+y2++MEX+PIHbyHa8FhvWK8X3IKgtI7rWvDHb99BYsDL1285ZuB4hw/v7/H73/4eOfwC648El77gdlbgbcY33/wd/uZX/xLvP3yN49xxXb7GstxQHv6If/zDz3F3eoG//ut/iX48IKQJ+XCH+c1bzG9+gHQ4oF2Bh/s/ol/+gDmc0EJHXSj0EUUokdw7ogIiEQVUGwsQSGPy0zqRGQoSkLwYEKy3RcamhBoPuwGqaaw/lYbWi21wG14tXL8+zDKnjN5AGiVgyV0xifCKFA0tCwkNJqdtBtzpjGqGzJO8UWXa7Qf+fXOMrfVRZfNz8RVLiknCixcvrMfjcRhcR/ieoJe74H04MnFFR0DF0GmWHmig+E2shNheSc8EQPYGzIMsAUhP3SUgnoD5HlDQpgQJNIjaR0ANbBTIoc6n29EItn3rn5NzHJW23jhTi5XELUGKiRLhAjw5Nv9JiUnXXo59b3v2jyH3DyBOW7WxNVK+3Gk5gun0TxHZZp0tK243DtP2MRBe/YqmmCfAoD46QlnbRu10mzlNE9fYLknMKZEaalV/KKtKe9vMYIsJrg8Y5zVgRW0LfHztEcTg2ja2gSHzACCRwVpIEQlAVY4UCLYunaYpStGPZgAXe3stKJWn9vl2u43Aozcqo8Y8cf9acuLVyG7rWgRDXCWMfmHStfw6s0+ujh5Ijk5i9VlCZCJjPq57pZ1xKKKJf9TG8QEB3rdoyH7jd0eEYd89yAPExgzs4IqP7Cd/zimRHtT6uvfRHV10SPhX7YjaB7BKYS4OnJbg52IKaEGQJkp+iydIZX0SWG+2aDd/0b6LI18SvGIKCGotSIHiMLVUMiQi96z3iQm4F8Vtkv1oq6bito2l8eA+WbW4WbVXVdn7GgOC2EiXIEgh2es6+rpyyH1tkB5xPEzQDtzKgh4DpmwjaHJClIR5JtSu0lHrirVcsZQr1rricr7hfL7i8fwInBcAAonAur4CAJyOB0xpU1dUaZtSqipqrzZOomFZV0ja7EKSRro9s34q5PaGYvsmiOByWVmtCYLmPiEx8NcOLLcFrXtvMCXs15XX88sffYm/+id/hRcv7xAR8YPXn6KVjt//7ve4rQtKL0AGPv/8c/zs5z/HD19/gXJZ8Xd/90f87d/+Ld69ezeqSPvEKEbajJST+csK6UD2ilYtrJpaNWYTlths6qD77ta+KsYw5hg2wRxW9BVrWbCsC/rhQN0A+zxWaWn3fN2UUvk5MRig5/TxYABA3ijztaK1Bb3V4b/8OJ//viWZGD1V+yRx7yM3IQ0mEtfrFQFUReb4BQrX1coh2FTGJ024dwrGAN6XLhBU9C5oVeH0wmB+xmPvViu8Nu+2J6UExU74RBmrrmXdEhaPDXQTEmKlncwd2g3QhyvbVNZ1xeVygSoph/7owrmHfcxrU4QhWkObUWpF7RulP1hFMqUErTxnxgbOyukUUDFGiTNMfP2obr7dGQTOHmHs3hGCCYqZz+MltFFMtUOlDjaCSCCd2ACiEAXh78nK/mKSNspoWJBkYRNRSVa3xAm6w7kTpYeY4hy854SfQjU9Me8DfrLu0Wzb2FCbdWPzE4wSIuB8K4FR2OyzGBtuyL4/Bm1mJH5Pq3/+9+c/qkpZdg4AY7JlgwVdKIOGwILg5IizORKYEequ8BO8E4exU2MfCLoFL6NHsNn+69BaoFIgmFHXG1QqYn6LlhJqr1jSC0g7o/UbHs8V6/KI1/ktXn/yBZZ4j7J+h44KiQdoTzgdI04vDrZhAm494pYOmE9v8cXda3z+0x9jShx69u7hPb797g/4V38LtF8siG8VqRyQ5SXmwwU//PE/wutTwiwztAXo1KC5oemKHBVzajjMDU0CHq5nXL59j9/98fd4eTzhOBEFfrzeOF4gsAuptYIGU7tSgIqxgh4imis8NV5vH+IoZQMEmhCB9t6WPTqIbkmIKlqvtqqNgwnrCxnZMhH921JtVpbJ77diNBj2v+WJ8sjZBF982K0rgMLXMFfGSMKCoZUKNsZbqYsIq82Eu15ugFF7oFzz/Hxek+uV1cfW6xh6fLlyZMEUE3JM1vNhqkoiRj0N3JMqQKNxjcEDJjd2poxpxx1GDw/sWO3eqOJ4mJkU1TqS1gYDAu2ecbl3syAAulVAg1hFw1BSccqaGj2QR5ASJ6Q3pVCH9ACYeieU7B5bAQwQNKLUlerKwW1KH8ZfJCBKwqorgEaJ8d7cIA3sKZhdU7iqKW0VwHtd14ocE5O11gAhdax1QQoZ2nnNfOZZ74Xznkwq/DjdoXfOk8shIKeIWlbkSPqziCLmjNobVm2orcIHv9ZSkNPEOXGmqljbRtvMRkmppSJKQITJzJv9RBfOlwsZS7kZeAE4DYlBiQ3iDRGhA2ujuhlFCFi5ACpq7QimRraWlb0TXGVANxW5QJWvUtnHFqdsIhjd+sIEqkY9DILSypaMYesPVq3sz7Bbwf7JZmwFsLfMRgOoBigCqkn2e5AYJKIpA6WANFQmQwRiUDuOaLYk2D4rVDyUhFYaOAGEohshJvoo6UhTQoNibRXBhiO3Zvs+bgIy3uSu2lFXrslpmpCEtsZBSG0Na++oA4jJY96aGDhAsII+udSKCJMPLw3J5NIbOKdSIml1qhwc3ktFEPBeBSa6tTcCg6WMdQFDn6c0McmvFGdXVXvfJuMdENBrRS0F0zQTvW4ddNWK0htKWxElIVsMoSB4wqqnV/9Bn28IfPO+4FqhzX11gPZGVkRX5JCQD9nmfBn40htqa7jdrrhpR544WuNwoO3mnFFB14ZpipinV3iBlxS8+ITz0K63BZfLFecLlSg/vPuG9jpnHOfDECTZElzuvSDWk5kEh+MBmxIdq8S1OFWv4zTdYT4crXeRa7aWgnVZcTNaYZBAYSQA8+kl779VM0IASrmhlBXX+4Lf5ozDyxf44U9/ijcvT/jhz/4K/9H0Cu++/Qbn8yOaFoQEvP30DX7w8i2mIKjacDrMePvmNS7nMz58eEDvsAoiTaQGoGdBkDxmX6oqSlMULVDtyCGN6jNCskSEglR+fSjqyDVMz8RZkAogpzhsSEgBookxQelYloZSriaUEQ18ClBEs8VerTbfAwCy9aRVY1iICAfSI2KSCdo2wSxnQwm8l9z2bK2MO92XdmBZbwCoDKrKChl7sBvSdEBKkaOECkcSLbeF62KakHbzVLdEwmPHjU7qgAcc5NWKdeXenCQgCz1U1YZuQAyUNNbemHBF2KggBPTqe3izufSd3L+tKyCsaNZa6NssC2BMEi1JXLHeBFG8P9wEW8Rjbtuzlghy1mjfwK3oPd0RIWQIAmpaIL1vLTLCmKHXjqUxGZewJXOD2mkq2/xc+t6OipC4trRT/CrEQPp+B+ZM29RtKDxB5oDSO3qvg4VhESX+3OMvKGmDXTUMJ8EysloCtzXujYQNEV1koCatM0DmUEHOIXHQfBhoEQ/p+HCnNW58s2/ykhr4WervEYh8v7+Nh79LxPT7F/4JffNZadwGhdmfYVQpvHnRS6mwNEBgFEp/zp4UtVoBB3TQmKkaKgFu2GfH37VB5YYgHdAVIoqOgC6CNQguDZjvAsrtht/97lt88ekb/NWP/wqHu0/wsHwLCSt+8NmnCGlGLcDx7QkvXr+ASkSSGYeXd7hfr7guFd+8/xaPlxVffvk5Xn/2CSQDKXUs5YrlegNagy4CwYQvf/xTfPrZp/j9/33DN799h1oj7r54izdfXPDw3RnL9Wus64ofvfkcn//kn0CnO4TDO/zN//kHfP2vf48XX76EtoZSYaisVaE6nbMNMRkoXamFyCLMKIhY0GTAgY5CJgfOmryyKnvMvLcE8KpFtQZTjCC31zYUt1wltNbKnhxssrvVxRpiMCEM46j7zDXf+GPtPgcLKH5ANFaH3O5eLbC1xtlDbHobe0V34EQpXA/TxN6S1Rv1W6N09Dhmo1vEDJfLV9vTcSBT3tvGQNCDoUE1AffynmOudhytd7Sy9RZgVNx46L63ZbLhx7yJA7Nx4rVb2+d0UNgaGNQ0U5naKKduK9SQ945e1ShhpvpnSVvvQApUFKw2HwxisvaU3hoDrVXVqF+GYKKP6qSrTqJzbAABpg4Nro7GmVUMKAXrWkFgYEUIihhmHI8nHOY73G4PiFZhzhZEhCiozYAr9MFoSFNGNBSxLx29VEjkmmydYBHPv2EKkXGHWsAlgtUrUABI00koKwNHCV4V5YBtEUFCYGLSBNqZvHM4rMCTeAYp7HnQWk2whldlIJ4Ko7KJ9XRY/4Wq7eeADp8Tx6ozEeLMwcalQkNGlK1iC/Mr3UC9KDZPTZSAhQKKQB/hm8fWDftXGCBw/XQg8L2KjtrUQMaEGDNapU0IQdDWzjUCF0pQVgRjQJ44MkVbt/XrFV2zBZXqtcfjkb5B7TwjA54U46jMs+1ZzJ9iHLvEbHaQ4BbA1zHRNttmFc2oAOoWoGajI3XSDpj81E7QLLDKqFCUtaGUhinaAF7osI29kaolQRG7oFuiRxGXPs5DhP0qAaCIlFVoqtnLMbdNqTh5W25wwYrA1YnS++gpbLWiiwE4zaiYISJF73/T0UvOyqxVCnuxygM/g5VYRekdaylG+47sUcsBeTraMOMAKR1pCjgdXuDNa6C0gttyRV0XPD4+4v7DB1xvCx7WgoeHB8QYMc8HQEEhHlXcvXgxRkcEQ/7JMI4mWmS9paXier0h7ujWOU1kFElCyBmSDFCMgPYVrYPrHgye0VdIUBzmhNv1ht//7rf45sMDXv/Nr/CDN5/i5fEF5pShoCBUVIX0jvP9I9brDQENjw9nXK83lNIw5xlTXnC5XXG7AXliNaT0imBAlGCLdUprqGL+q1QcYkKKE0o11kkAVOsAqLkOOVKkSRgAjffJERw1sSPbs0GoUtjqwmDb/Cj7ojd6f3Tw3/ytekz1jHoswpgsSYIGRVDaIqoF11Hp9Kq9WC8kQRf2yzK+4P2j2FccAFirmyprMr9M2yFY64JSZagleuVv+J9uFadOMNNpojQLyl2iQI7UK1A0SANaFEhM0ObINxOQ2gFtrGIGNdq9ejxgPlcVPuoCujEpYCk0UQKCM1EC2WYWn6uaaFlTsN6nFitvwFoQb7EIUC0b20wBdNvL4lRbs2/dfTFtj48H0S4IqjYzUsa9bW5DQ4APbOe+sdhECBQhsjLNimeANqtgmg+nf2iovbB6/P+XpM0fvjHd+YUnC3/YzYFMO1LQLV2mRC0sgApPP3wESRsFioHuRvvx52QEd/6+7Tg86PvYsf+5pA3YKJkf29AcGLnjbIfdvxtKAWAg/x/7BvXrYg55/3jaWLl7HkQVg9RRoYEY3STQMBymCSsU9/cf8KMvP8PxdEQHUfkpRaRXd7hdSOWRcGdoQyEqU89QXXE6TXh9POLLX/wjvPzkBSpWQDpef/ICrz/9FL9pv2HPm67oqIZ63nA4nSD5ii4T0nSHn/3iJb49vsPvfvlrvPzkM/z8F/824vwaVSLS8YQpkto55QN6e4/aKsrKoEqsOV0Cq1xeoKJSYzWEUrmxxEv8AMF8dRVsDuy12XXdhD+qBZKO+LTWsC7LMJDd1Yt8nbWt90xg5sqNwFgbYRhbv4eDpjDu+UZZ2/+9987RCq0OXr3TN8b6MzBjWxfmsBmBWkOtYJroCFygQrGfY1JZ6bVeqrEf+KFPjgtjPW9r0BNIUoWf9iKEuFGYvHogslXlfb27bHqMEcWpCF0RLeDvFjg8UY/1pE68l8Gq7WGTX95faz+1EAPQiFqnPBmCWUci2ltHyEaXU1axiKbFDZga19rul/3OakYCYFWPMHHN2nscIY0potWOda3IaQYE6KXBhCpRa0POrALUUixZZJCnrQLSIZZYNLAPrYPUSlIAwzjXYD2x3iMGMOBVQ021ct1v/46BUBJQMqqP3SdPlt0e5ZAhY+0zuKqtocs2a0y5+Mc9JrXZEjMxqkqHVRPobMWqfqTJZbv0G33Yfcd+HZKNmSDaR/jgEGhMvA6tUAU2Bl6rXreeS20bIht2Ahu9cxyI2He4PW+FgUhAhFdZiQ5HSFREhAGgaFckk7Zfa2FSI6xoO/jB/cqeJ+9tRLH1JoDWZuiuVwkCppQw1FX9Pbr5Ct+q+x4wf/TWMc8H7kUB+6MFG7U3BFPYJHWv1BURRg91KnRIrFpqG+wRf6+4QhuZyLAwaGzGKAFIYjTUTeXRf9/3uO77Nkl7JSOHwhabHUdkYH48vKDoibAK7BROYANeuTbJfPBYYgT1gUqhbF8gEFwb+45yvlGAaz6RDrwy+BRj28QYIXnCyxcvcDoeAQTU2nA+n3E+n1HKiuW2knqbE8rjBZdSMc0ZKYqxCQQ5VaSYORsxJeTIlofRn7RWrLFAbMwGk96AeT5gPmREDahLQ5CIpB1RMkqr0F5xOhzw+Wefo1RBDQKtN3z71R/wbQOi0r9WAxC6VjSt6OiYImnwVMlMNo4kIZaIZVk4AiEeMcXMAloUBCQgOLivBGADRxPBWBu9uxCEx4Wbvelm20XoHwBFDMmWEUXXnPYu6KZ2a9U7E/0i1buhta03SYT9pNUAArdr3cAXF9La04B7U1NAN7jMfDR7T93nJ7a5KFA7UMqKaYoIgQqFXTvE+shFaOOrxQ4uyAGPc0QAuxdUXWwjxni+L/ZxhoggSUYHmTJJfPYoKwA1MiknEAOoNlQDRntlX2oSJmZRIgHYwXYhuFPzhGkMJ9cxR1jNx3svtu/lPq69gYF1tZiLVoNjsCJ6iyNRIxPCYjAuDDt/73cm4KeNgjTSeW/IDHAW04611LdRDrU2xAQKuxi4HIQKqdXAAl5ahSAMXybC+Ys8N/NfqhCr3v65x19M0vaEf8/sC4I9JckD1j5ukCqezJmSsE9u9pL0W2A8EirdvndU2QyNH4njk9frcJ4fS5ee9w88T4z8OJ6LntBpWTUlcEbFNqndYycFVEb5dMj0qNch6SwsXBqo/6aa9/1E8knQDBfb6FC4lDOTtnlOSCEiJ+DueMBnn/4Ay7Lim2+/wpu3P8FyveK7b77DrZ5Ryx0OCHj4JOHh8RH3D++RIOhHQY2CuxcT3k6v8ebzzzDPBzwu9zgcT5hfvsKXX/4I589WXMI30FShYcXaCx5u7/Du8R7ffTgjvbggffEGn7x9g1/+8mus4YB/9u//U7z69C3++v/6Nb765itc1t/g+h742Zc/wcuXBwi+QasVy82poQzm2MCOYbShFYJilKhus0A2CWHt7GPQTgpe1W6z2bbZWgwYqynWqVF1wsAgRDeqEXb3ww2HGnrNfiYAQXCYZxzmw1j/AJNHVv2erVF7eEWNVTXOpNmLeewBg/371frh/MF+OR+4G2w0QWEyFLeBzN3Q6P06e56UjbVuC/vJc7sEco8CQoySZ0Y2YmswH8evW3IbwpaMbdcYFoBuzsgNMIccPwNrAu/XSBzD1qMlOxSEPVrFknBDa+2YfP8yQXa63JZ4+rGrbuc/wBbQ+YlExBQ2fv+wTvyfI6Oqbtu2e8trJIAyMG61oZlCmqhi1YYpRTqpaOMbSoMGp4ISmVBsgXpKmRQ2VbTGSlFIRKSrrQmoK9r5OA3apFIowjHPGV6lIU1dbZSGV78C56KlhNuyzQzaqC1bwhBNsdP7zVgNCAhGrxIDCDyp81k+XoVulv168MoBqm4vWXWtjX1Z3o/l6w1iwY0AaBu6Pta/HZ83uIdgvVm98Q3wHkomSb3T5lAQaULvjf3KQtQXoHVPphJcu6IU2qcYgY42mvm59jJElL0tcVMMTRIQk8tUk8LklCFfkynF0dPlPW2bnejjPEePZW9WSYp27anKxwQ4GDxqc5MqFeKIfkfM0wEpRlwuNxDUDlYdsuq88H1roSppWzsBCgctu4z94mtirzbr98DPjUF9hFflu279mh2KpbDa0QPFhnTCmMHn503b6sI91hPUYdXc9mx9dvTuti5sAaMQ/LpdVzzI2SomFLqKKeJwmHkeISIf756osL5589p6im+4XhdcL1cspeL9/9Pe2YZadlYH+Fnv3vucc+9MZsZxNJiZtEYalBhsFdFIi4iKxhqMP0IbURq/EEGoSksx+kP6wx/Som2pCuJ3EW2JX0HwI9hA+yfxK2DVmBgSNYkxM2YymTtz7z3n7L1Xf6z17rPvzRxhwJxznLseGObsj0nes8/a633XetfH9pgzG5sUZ6AshKqAQVlSFQXDamCl+cUqrcqgstyeZK0GulxVoevR1zY1k7q0nlONMKyGrA0HFKOKcrDOeLxl+duSbAd+vE2zXVNQIC1MPeQ+R1y0eJPwdsrGY2fYv77OcLjOaGT5tRdVBzl46DDbky22tjZnhgmmgysvY1+3jZVRF0hNA+WQ5L1fU7Jc/sZ1WGqBbKSDR230whDrxhf2XsCmtdDfZjqxXPNqYBWvPT1mp/6GlOqZQ6atKZqBt7/IIYfZ0JvNiyYv2oVxp2QbE1nH5d5wrdg93Q6+6zra3B7DHW2NyXSeA1AhabLdHtc9omrFRDCXh2bnoh/lnLQdzlV3kKItpbZUkihVPZLL59W2dQdNF41ruqEqoDCHFI2FbSOYLIlFsSQ1x2g96fema5CSzrEJHuXQNkiqfNfYIuySFF71ufBIAVu/5LyztsnRL3S/yWzdbg70adtgvoC83re1dNt7hwuPaEhmaXVrKlCKHMqb52P1SBj7GSwCUBWw3VRSQZlyXqo9q6YXNVUVllOadf48VsZo65OFx7zn7oHOOwz5wSs7FiqWt2IelDyJ5tjZvuFl/7GdRtcsVKq/OOsJcPf/zQ90p/HTF/b+bkCA8FxMAAAK+UlEQVR/4ZrPnavnVM65adOsBOzOhXR+3XCDNe8A5PHvNErz88qGXqa/YNwxbp9am7btDN3kwp49r7nR7NFLjjJtax559LdMjydOnTlFUzfsG1U0RUk93ubMxlnGkzF1M0GkQrWmkIrJeMyvHznOibHFardpwmS6zZHBiIsOHOTIxU/jkc37KEfCYC2x0W7xi/vvZvPEababitObNcPTU8a6wemtMYefegkHjzyFVuDQoYvY2jrJvv0XcejSSzh28VFOnXqYlGoL8agL2lTa81NBcrNqcU+KTilkTJNAm5baywHXU9tFavzFbFvfEXP5ErTz4FSlGxzuzafFqy5lGXNl3zMYTD6tRHDO21SZ7RrtX19nNFqzKn/udcq5dLvlN4c+ZqMtOzm6kh+7jPa8w5UXWPiuW984UZ05Gsbjcff8crnj8XhsY/XQjrzAyJM1zBo/53eg6Rl2/QlDRBCdxWkWqbSdt5wbkGPykVmysyu/vDiduqFgOzYFyfu+5O/fNI05Ibqmw73iC/4O5WIP/gSYVeKzcdV1TVVUjAZDz4P3okD+XPOEMp1MESlppO3yFnL4V8LyIssSL5KhPglhE2U1oBqMrLkoHmaJv5fukEmpYDiyhr2o5/Zq47+DFZKpyiFNAWe3trtw67axUuGTyaSb5JBc/MRDTRAPobPWBlVZ2eSnynRiu8eDqqJIiWme5NzDa7s/MydH27aUVerygdpWPLTN3ilLIsd7KQmb423Lb/Cdh7IyL+u0rmnV5LssPCxTcgErOv2dfAFsSVCAJMtP1uyEUFRrCzX0Igdo67kxsyb0WW+7t870cm1h0FVpzoTku+qd3u41Vm18kWPvQstoOKRuJr4oy4Vxsnc7dQ3sW835rBYTQjbsgcm09rnPCvpYEQ0L2c1Va6vSKguOx2PKZM89R48USainXhLbnjBFNega3ZZuHAiN53pZmJy7ocyRZa8FRZE8bFQh2b9NZdH1Wuw8+KqW35N1CRaul1KJksvlFzMHZc8DbjuRbffeCZbgb/uxPnu58+PxBcVmxlrWN3lXrSvII9443ndWLbTUdmU2t7Y8LNCwaqimnQpJ1hBerHhNnQ1dZoUZitIX43mHsjVjqHC5M0PAHCDT3PtLLV82jbdZH44YDgeuz3PYlH3PAwcOcPCALXTHdcOBBk5vnGH77AbjzQ3OnN2k8vyiQWk5pdoqUpSktQHr+/extjairAam4+raZCNZpdqmrpm0E9KgJNWJ7ekmzXiLQSns3z9gWBbUSWiagqlAoQ2iU6rS9EDju89lUVm4IjDwOWff2oiEsr09pW6UtfV99i4ka7EzGq2xtb1J3UxJLs+5L2BTN2YolIX3GlNIFnqmbdM9P3rGhTnACtdFdDsbbW5z0bTUdUtZFui07tpGaGN5rWVVdIU2LIdstpOW/5BG1A29aqSzKpN9mZylCCTw9ArLPff3qVWmdWN5rpIjzZTRcISUwmS6RYt2vT9zHtqgLGkRpnXrDhoryqO+e2qGqslmKhJ1i4cgm8Oj/77uWB96P1qr1poQL7dfJivX35BQySHcDXUzBjUnX5Us37WuzZhqtKHF8nFTmr17ZWnGTF0k6+UHnaPbnGf2TqZkOs/0R+nOzVkkGtIyHm/b79OkzmGaEE8xgdxPzd5im0drz4n25EfTvaqUnqc+W7vN1vnmgLb1UlesLlnVUPHInqLom1f+DBAKsRBUKcSMWjXj1spWtJ3Tdx4yL4xvkYjICeAs8NtljyVYOY4QchGcm5CNYB4hG8E8QjaCcxFyEcxj0bLxx6r6lHNdWAmjDUBEvq+qz1/2OILVIuQimEfIRjCPkI1gHiEbwbkIuQjmsUqy8bv34YIgCIIgCIIgCIKlEkZbEARBEARBEATBCrNKRtvHlz2AYCUJuQjmEbIRzCNkI5hHyEZwLkIugnmsjGysTE5bEARBEARBEARB8HhWaactCIIgCIIgCIIg2EUYbUEQBEEQBEEQBCvM0o02EblaRO4SkXtE5D3LHk+wWETkUhG5VUR+KiI/EZF3+vnDInKLiPzc/36SnxcR+TeXlx+JyPOW+w2CJxIRKUTkDhH5uh9fJiK3++//nyIy8PNDP77Hrz99meMOnlhE5JCI3CQiPxORO0XkRaEzAgARebfPJT8WkS+IyCj0xt5ERD4lIsdF5Me9c+etJ0TkBr//5yJywzK+S/D7ZY5s/JPPKT8Ska+IyKHetRtdNu4SkVf2zi/Uhlmq0SYiBfAR4FXAFcDrROSKZY4pWDg18HeqegVwFfAOl4H3AN9R1cuB7/gxmKxc7n/eBnxs8UMOFsg7gTt7xx8EPqyqfwI8CrzFz78FeNTPf9jvCy5c/hX4pqo+C/hTTEZCZ+xxROQo8LfA81X1SqAArif0xl7lM8DVu86dl54QkcPA+4EXAi8A3p8NveAPms/weNm4BbhSVZ8D3A3cCOBr0uuBZ/u/+ag7lBduwyx7p+0FwD2qeq+qToAvAtcueUzBAlHVh1T1h/55A1t8HcXk4LN+22eB1/rna4HPqXEbcEhEnrbgYQcLQESOAa8GPuHHArwUuMlv2S0XWV5uAl7m9wcXGCJyEHgx8EkAVZ2o6ilCZwRGCayJSAmsAw8RemNPoqr/A5zcdfp89cQrgVtU9aSqPoot7Hcv9oM/MM4lG6r6bVWt/fA24Jh/vhb4oqqOVfU+4B7Mflm4DbNso+0ocH/v+AE/F+xBPDTlucDtwMWq+pBf+g1wsX8Omdk7/AvwD0Drx08GTvWUav+37+TCrz/m9wcXHpcBJ4BPe+jsJ0RkH6Ez9jyq+iDwz8CvMGPtMeAHhN4IZpyvngj9sTd5M/AN/7wysrFsoy0IABCR/cCXgHep6un+NbW+FNGbYg8hItcAx1X1B8seS7BylMDzgI+p6nOBs8xCnIDQGXsVD1u7FjPsLwH2EbsiwRxCTwTnQkTeh6XufH7ZY9nNso22B4FLe8fH/FywhxCRCjPYPq+qX/bTD+cQJv/7uJ8Pmdkb/DnwGhH5BRZy8FIsj+mQhz3Bzt++kwu/fhB4ZJEDDhbGA8ADqnq7H9+EGXGhM4KXA/ep6glVnQJfxnRJ6I0gc756IvTHHkJE3ghcA7xeZ42sV0Y2lm20fQ+43Cs7DbBEv5uXPKZggXj+wCeBO1X1Q71LNwO5StMNwNd65//GKz1dBTzWC3UILhBU9UZVPaaqT8f0wn+r6uuBW4Hr/LbdcpHl5Tq/PzyoFyCq+hvgfhF5pp96GfBTQmcEFhZ5lYis+9ySZSP0RpA5Xz3xLeAVIvIk38l9hZ8LLjBE5GosJeM1qrrZu3QzcL1Xm70MK1bzXZZgw8iy9ZOI/CWWu1IAn1LVDyx1QMFCEZG/AP4X+D9muUvvxfLa/gv4I+CXwF+p6kmfiP8dC3nZBN6kqt9f+MCDhSEiLwH+XlWvEZFnYDtvh4E7gDeo6lhERsB/YDmRJ4HrVfXeZY05eGIRkT/DCtQMgHuBN2FOyNAZexwR+Ufgr7HwpjuAt2J5JqE39hgi8gXgJcAR4GGsCuRXOU89ISJvxtYlAB9Q1U8v8nsEv3/myMaNwJDZbvttqvp2v/99WJ5bjaXxfMPPL9SGWbrRFgRBEARBEARBEMxn2eGRQRAEQRAEQRAEwe8gjLYgCIIgCIIgCIIVJoy2IAiCIAiCIAiCFSaMtiAIgiAIgiAIghUmjLYgCIIgCIIgCIIVJoy2IAiCIAiCIAiCFSaMtiAIgiAIgiAIghXm/wEHyByi/B2XVAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "cgSKHJobQpt6", + "colab_type": "code", + "colab": {} + }, + "source": [ + "" + ], + "execution_count": 16, + "outputs": [] + } + ] +} diff --git a/thirdparty/mmdetection/demo/demo.jpg b/thirdparty/mmdetection/demo/demo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f390fc9013dc8fa76a305b8e56d2bec76ff758e9 --- /dev/null +++ b/thirdparty/mmdetection/demo/demo.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e9ab135da7eacabdeeeee11ba4b7bcdd1bfac128cf92a9de9c79f984060ae1e +size 259865 diff --git a/thirdparty/mmdetection/demo/image_demo.py b/thirdparty/mmdetection/demo/image_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..5fbf93d3b6f20149b9c4ab924890be202ab34946 --- /dev/null +++ b/thirdparty/mmdetection/demo/image_demo.py @@ -0,0 +1,26 @@ +from argparse import ArgumentParser + +from mmdet.apis import inference_detector, init_detector, show_result_pyplot + + +def main(): + parser = ArgumentParser() + parser.add_argument('img', help='Image file') + parser.add_argument('config', help='Config file') + parser.add_argument('checkpoint', help='Checkpoint file') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--score-thr', type=float, default=0.3, help='bbox score threshold') + args = parser.parse_args() + + # build the model from a config file and a checkpoint file + model = init_detector(args.config, args.checkpoint, device=args.device) + # test a single image + result = inference_detector(model, args.img) + # show the results + show_result_pyplot(model, args.img, result, score_thr=args.score_thr) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/demo/inference_demo.ipynb b/thirdparty/mmdetection/demo/inference_demo.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..565efd7243d56098bc3d6fa974b3ddcef050ba69 --- /dev/null +++ b/thirdparty/mmdetection/demo/inference_demo.ipynb @@ -0,0 +1,99 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n", + "import mmcv" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "config_file = '../configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py'\n", + "# download the checkpoint from model zoo and put it in `checkpoints/`\n", + "checkpoint_file = '../checkpoints/faster_rcnn_r50_fpn_1x_20181010-3d1b3351.pth'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# build the model from a config file and a checkpoint file\n", + "model = init_detector(config_file, checkpoint_file, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# test a single image\n", + "img = 'demo.jpg'\n", + "result = inference_detector(model, img)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1cAAAJCCAYAAAAsrj1sAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOy9ebDm2Vnf9znLb3v3u9/el9lbM5JmJCQ0Am2IEMQiIZAlsIKBgAOxcVwJJadSMSR2AYVZiqQwwQJkjA2xkI0AKQqgXRqNds1IGs3SPb33vd13v+/628/JH+e8b48pK0tFU5DU++2/3tu/5ZznPOc5z/4T1lrmmGOOOeaYY4455phjjjnm+H8H+dc9gDnmmGOOOeaYY4455phjjv8/YG5czTHHHHPMMcccc8wxxxxzfAMwN67mmGOOOeaYY4455phjjjm+AZgbV3PMMcccc8wxxxxzzDHHHN8AzI2rOeaYY4455phjjjnmmGOObwDmxtUcc8wxxxxzzDHHHHPMMcc3AM+bcSWE+E+FEM8IIZ4VQvy3z9d75phjjjnmmGOOOeaYY445/iZAPB/fuRJCKOA88O3ADeDzwA9aa5/8hr9sjjnmmGOOOeaYY4455pjjbwCer8jVy4BnrbWXrLUF8G+BNz5P75pjjjnmmGOOOeaYY4455vhrh36ennsMuP6c3zeAl3+9i6UWVsfK/RACrAUkUvqomhVYW2G5HWWrjbtMCOEuMRYpBNZaEP5RCLTWVHWNmP4RMHD7l7UYDHEcYfLSjyfASkttamwl/N8sUoL076tLixAGg0Sa6UQEKrAEOsbaGoAsKzHGovXtcVor0VGAqSo3ntpgqUmakuHAzVFIi1ZqNmZrDFKF6FBRF7Wfn6KiwAKmqqdXIoXCWuN/gVLWzddMySmQMqAoKk/y29dOKSMAiyVQAaZw/6NVRFEVKD29D4ywYEGpqZ3u7qwKZmOQGqwBqfyzhV9JIRDCTpcBEFSFna2VkBZjQfpHK+WulzKc0beuPQ8IifHzc0tkn7PIAoHFIsHT3Eo3bonA+HEKI2h2uhRl7telpKorPx5/DRpsRV0JhFV+7AYpoaqsH6d2c7SGqnL36UATSEtlFXXp+VhUji6ep4wAkAhhkHo6ace71ogZIUwJmAq0e78KBVJKbAVVUTi6GDub/vT5SOtoY8WM5lLYGe3xay6koK7tc3YMz9mXbn5BaKiNBePHKdxv+Zw1NRakkpTjKe0kCAUYpkPSQUgQSBBuPdNx6V4jZkwxW0MpJVU15T0xe8ZsMgKkFdRTTlaghEAKSTXdHwIQFuvHLYRASjDGzPgujhsYYxhPJnQ6bQCyLMVaN3eAqqowxiKEmPG+wNGgrmra3QUAimqCEiWFYymqyq2BxBIF7lk6DMjynFa7R+35ZTAauv1Vm+eMU2KmTO7Xysm823sNIfgPyWKRSoJ9LjktFoFSgqp0dFlcXCKKI25ubgIQhSFSQlEajN9rWscEEYhaUJZOVna6S6RpH2Ny/3qNkBopxYzPRqMRSkqMlw0AUgqsNY6vASHdDnWsY6dTwVgnz4LQ8XpdFUCA8rKxqgzW1n5Gz5m5sJjaEkYhAKayuH+Of4IIZOC21OjAvS+OYuJE0T8c+/dLLAZrb/OatU6WxXFEEAYAjMcjoiig9vu/ri1gsAh3sWMONzoh3cZwREBgkdN9bS1KuTU2/hrjBJx/hN/vCIy0GE/fZqNBWWZUZcnC8iIA+zt7nl8VdV3P6CmEnMk7wK2L5+PbnCGwgDXT+4Qfw+39KABjLEHgaFCWFVprrDW33yclUkpqv2en6yylmPHxlLbGWHqL7dmzqsrO5P70rbY2lOXtMYVhgA4l4GXHpELoAC1vj7OuDdSzow8pIZCSLM+5DYlbqtmbiKKEPMtma2OtxZja/RZ2dp/St4+ZsjBOBklxe8zWIKwgSsRsHYusBuRsb7tnundMbxRSuLWq/flI7UWiQPgxKamoTeWOuil/MpXnU7nsnmutQevpeWWx1q/5VA5rCViq6dnkhkQQCoqpvmECpDKOz/2+na3lVOcSCoTw8uj22S6wGANB5PlMQJk53WR6jTHGybMp7Yz1e9DOzpQ4icizyu0L/LyFkzWW23ztzlFmQk8IL19m45zKUvtctvZjf+7Z5/h+SnOB1zGfA2stUgrW1tbY3tkGoK6Me+dsr7l101rP9poxNVVZIxXowO+HWlHVEiFv32eM+avbz/GqELOzfTYm/3sqR6Z7Gdxer6oKKeVs32ZZipTK7ROg0Wigg4DRcIhSUyoIJ4dne8EANXEcz/hzMsmQSro1k1NZ4nhfitvyzViLVAolprLSOH3U3qZxVRmaPTUTnWUKUU84HddOzwGDMTU6groOPU0USrq1BaitpcwztITaP0yHAcJairSe2RdSun1Vm6nurfyeun2m1KZGKgXclnlSCCaDbNdau8J/BM+XcfV/CSHE3wX+LoCKFMsvXgIgbiRYISkygTUDAKQJWV5u0B9vkKZOKJaZocodAcEfesIiA4GsPXGLGh0G9OIFTOkEvAwUtSqwtVNC8zpFyBZVnvHN978QgMFTm1yr9wkWIt78n7wZgNU72hzuPc4Xv/g1ALafbKKbEjEZkd90h3G73aA8CVcup7zwxXcB0IwqPv5n17nvoY4bZ6BIzS4qSqnTBgBh1OTWRsbCmQx71QuNwyZBb4SoY/+cmqyEuJOz2Hb3Xbk8QsQJ9939IEtBC4Cd3WcxVrOwGgHw+FeuE7dTpM3QnjHztGZcFRTVbQGsI0leeAUY0MKQm5C6b2HbjanIx6hOxJHT7jlxUpFXDawsUXUCwP7OgGPHFgiaQzauOKbPUkGjUWEaTiFrdmJCk1EoSaAdXYrxIXkI1AG1P4vzrCRJ4J67He/u3hpx8vhRDg4zBgN30cSCJKGRtBlPRm7sQUAQhKRpCoCSFkRFUIfcunAFgChJKLZzJKBCT4dOG7XeRU684tgLOb26RF7V7OxuOZ463MMETZqZJB27+zY3xmgEC0ccb6quwY4C0j0o/GFiZM79Z89ykO4z2Bu6OcsOjQ7YwP0OlCXfCxgNILeOP+NGRG9lAdkuEKrh6bKHSmNEs+merTWdVsKtnU2GO44uihqBQsgagVuHRssSJgVF6nijHGka7RFFCUnurpFBziSw6KhJuuf4WsuQMiigdPdldc6xlZCd/YTx9QkAp18UEK0UTAaGuOnESpZXLB7tcu1z3mkxtsSJhVoy9IejLQBrCWPhl+AYo9EuQpZo5Xi/rjQW94y6ds9WSrlDza0ctSnRQqKNwYipAW2IE0WYdNjdc7IEabnj9EmuXnFGRKPRIs9TQi3ZuuX450fe/iOsrKzxy7/2S3Rajj/jSBEEIZNJBkCvt0qWlwwGAxaX3DoUQ0sUjKhEQthad+Nsb5OnY/o33f7I6RPUPXqdPivdrluHvMs4z7jz3EkeeuilAPzev/pD8qyeKaFJkqCUYjQazQ5SQ01a5EgMjdg9P01zlBI0EycPiqqkMhPuecECV84fuHVJWzS6MVVV0Gs6ubu7e50Tp+7lllcO3vbGt/KJRz7N5vgQ4RWpPB/SSiIefOBuLl67BsDa4p3kY0FROXpOsjE72wWLa8t0245fkibEzYhssMGVS45+aVYTyYTCut81KS3VQTZqJhPHU0ncpqz3wQacPnUPAEKNaMXrXL55HoD77n0hZSH5zKc/TK/X9TxSUWQVUpWYyh2ErVaLnf4tjp91PKUjRWOxyyg75MZj/lCvQx56+D4uPXsRgM2rhzQbCms0eeHGFGhDXgk6nXh2yEaNgrUjPUYDx6MH+xPCSDhlXzhZGYYhkYqojSD180Majqwv0x85OSWEQCCprJk5EfJJSlEUtJpNZNPR88SRdZ782nVs5p6z2FtmlNZk5S1yzxsLa+vk+RiEIUudLLGmItQGHfj55pZOr8fhQZ8plAqJGgmj0QAh3Py0dAqlkm7v1aaimcRYe9vZEVsNVlGWJSq4rZomSULhHT5aa7I8JQgU0qseQmmQispUnL73tFurToNLVy4TxW7vDYYT7jh7huWFmCe++mVP4yFBKDh5+jg6cHTf2R0gdIt20/H++nHFzrWcnY09Su8AacYLHD9qeeyL51HCGXOmKpFCeaMYwjBmealDf7TFqbvcOCcDw/XLGVprsG4d2r2QO+9LKDju+OX8JuMqQxYaYXY8AToMxkPqskR7pXNxuUGeGfKJ+x0EiqqqCKMGp06fBGBj8zJ1pWaOzHTijNfxZEDg5WKcRExGNUJlVJU/L+IWQlhq4/aVQYFpUnFIq+XkVKvVYuPmDmVpaLabnocFUSzYvOp44eTxJrf6gC5Y8E6EwX7NWqfJeDAmr9x9ZTWmGXVnBrUOS+raYuqAJHH35bUh1AnGFkRNN85BOmbt7iW0cOO8+WyBLROkqpDe4IrjmEF/TGXtjC79wR5KhZSV431TW5QKsNJQ5NLTMyEILdTMxhXHoXOIedaM45Asy6iqijIr/bqHs+sbDXfWGmMoy5I49nJDhwgLURTN+NramigOyIuMVsfJYVsJ8iJFaXeulmVJkjRJi5yV1ZXZfel4RLer6C66gW1er0C0KZWTw3lZIghpN5aIA8d3O1vbFGnhHEzeIFCBpqoqlF8rKSWhdwROxzmZTGg0I4QQ5N650GonzjHrT9I0TWm0Qk4uLTOZDP2zJb1ej80Nx9PZpKLTOkpZlkQtJ7t0EJGXFUhNrbzcMAGBUGS1l3eioInE1IpIOvqWMqWQCbXt02m4/Zjujnn1d72U/i23LltPXubs9/R4aqMgzo4CcOI+xTObn0beOjpzGrabR6l0SZm79y0f6XJw6VmuXRmyftydDf3xiGasGe0GtNrecalyhgOLX3riKKCwNY04np217V6DsmqRlbssdHzQIxBc/HB2la+D5ystcAM48Zzfx/3fZrDWvtNa+1Jr7UtVMG9aOMccc8wxxxxzzDHHHHP8fxvPV+Tq88BdQogzOKPqbcAPfb2LbV3T8hZ+XlZkVYWoJFo6UzIJDbvbNzCypC59rFAKZCgoMvc7jkPqusTFTp21GSTOW5FlKUHkrP4oisAIJpWzihdFl7wxpibk/FPPAHDfq4+itiwXH9nhqU8+AcDWlZdw4XHYnziLW1Axyfb54b9zB+9+p7MbT72ky1PPXidIFqhqZ9Hfupghcuj0nPege1TT7y9Sxdtc/ILzViysFCwsR+xfG7K26Dwf25MRtYkopfPsKNFGhoZmYtndcR6FdqfFeDzi0Q98iv/uv/7HAHzPO36C97//Q2xu7wHwbHIVU2kXehaOxpHOwRrC2C3/YFJQldJlTE3TdqxBUmEaNUunnddGZDULqx3q0Hkwqgq6vZIsK9E+JSasAnY2hhy5I+TsvW7Ohxsttq5UNE/5kG6dUWaKcEEwrJ03XagQoSrksCT3HoRWQ9NsaDauufedOnGO4bhg6yAlSJwnopX0oLSIytBuOG9abgryMkd7D6o0FUrGlNQsn3MesFazx60vXUIMxngyMLAD1HZJy9NpODzkC1s7nDzR4OiR0wD0W8uMd/YYxgWhcV6+E6dyIiVYPrkKwMbBPqU0FBQ0dlx62N4o5cbumOGw4MQdznMl1yvG4xQrfVhbSLotQbljOL5wDIDdpzbJbw6QRtFedd6mvWyCLtp0F70nOUvZvblFcykiHfl0yTQiCQVJWzMaOzpMRpAkgsWO4/0xBika2CSnGHrvcpGgl0pqm8+8fGVZUNkIq9z7tYg5HOSoQKFwvCHLmCDcQ8cxmU+rVFJg0oJOpwfAwf4IW5SAQcfOu1XZEinimdcqzyxlmRNGepbmaYyhqkvCMPwrqS2GyqesJXFMmqYIaen5ZwfSUkjF3kGfzKcFRlpz9crmLD3MmIKqzmk0Oxw55tbvo5/4JEpqrIAgdDwlpGU8HlPkPqKQ5xw5coxms83mzctuvknMa1/73fz5R/4SU7pIzt6FPktLC5w65tbq0jWJtSmliNmfuL0+GWbcc99JXv/67+WZpy/7tRqjZDRLfyvriuFwiJaKyod2szzjO7/vjWxc3ODJp5xHP0kiqsqQFT5aKA3tdsLm9QF4z7lUJVUmqE3GRDsv39/6wb/Dxz72pwTKPfstP/4jvOBlL+Yd/+hneOVLXwnAT//DH+ZtP/T36CYnOXXMLc4XvvIIL7zvIdIdN869/i16R2JW13qMRi5aaDNBu30nw8Y2cduNK0pC9ic5a8suytBrLnH5ygaD/Zx2wz1LCosgIjc15y9fAODUiVXS4T6ray5a8MqHv5N3/vZvsbDSosgdD5vKgFBEUYvUOsfdweiQKIkY7Tt5WvQVIgwZVRM67mhglGsuX7rBsePLAGxe20HqkKrKsdJdlJsaUxeUZT1LLS1qwdVr2zTCth93QF0ZrJEI74UPA8E469NtdglCd4bsbO0yHhWzNKE8z1k7cpQrV67N/qakJIkjpBRM9h09zx+MOba4xMh76rf2rtCNGiw3Okx8BCMb9jG2pCjKWYrx6TOnUcrOIjSXLl6lqCqCUD83042qygnj6Hb0rChpxs1ZNEQRsL83JgiZpT23Wz3C2HnEk6aP0hYZaTYh8Ge7UgqtApIkoS6mqcMltjZopfjyF54G4Fte8wqkKMknbi7ttubCs09xM2ojcOsQa5fqWeYVUjqeWjjbQfYDbt645WgwarC/e8Dy+irLi24fX/vyNofRMXTQYnDg6BkoQTNZReB+HznSIopTBrklSdxc+gd94jhGyALl91FZDnn8c33ysY+0rhYUZUnTKkzs5ldVkraShI0l8sxngKQZZal5wf33u/W88FWSVkiW1+zsHTpahUNGQ4WW/pxrN+h2YoaDFgf77txZ7q2wL24xnuQ0fbZAXRsmY0MceblMRhCOCE2PYuTouT08RNaSVhKjp9HXSNHtNAkfcGmlbF7nZNjm7F0P8alPfBaAZqNJL+5xeukeFk+vAfD4419hc+P6TC6PhxWdTpta3E6Jp7bktsCSw8jtkThsoZWgyvw46wwRZ0gbotU0lRdarQ5VlREF/vmDMYEKCX0Ux+iSoigIRAvPdkhREwc9Ujua8bAxLppjfXiiLGsfVTYuGgnkWYEOFI1G6zn3GZKkOUuJGwwGtBpNJpPJ7JokDkknObUpZxGvST3xz3V8sLy0SJpm2KrmcG8XcBnC1kjG/RbDQ7c2RVlRiwNaHac3dBebDAZDxoMREzFNV3apbFIyk+nT6JTx6dqrq6vkeU4+Sen1XARYKUFtKpRWRD6bRWsXNa1qd18ch1gMN29u0e26uWSTnGc2bxCIFT8mS15uk6aawPOZQKAKiwktwnraVYZKC+Q0pVM1qVJLHqZor9dmQBPQZpncB9CTtuDepXP88ec/CsCr//4pdrYLfu4n/3sef/SrALzvU/+Og1IS1BU6cvpFs9oh60cUfZdlVDRyel3B+KQh8/Rd6TQ4OOyjdYCZpZ9rsqIi8nIyTyvChQShDLX1WTiThHi55vgRxeSGe9Z+6fbm18Pz0i3QDVi8Afh1QAHvstb+/Ne7NkiEXTnnFilotckKQ6hiAukWqaYCWWOLbJZ7mxUlRWWpa5/qllcksaY21bR8AxtppIVQaIKmW4A6q9m9voMIbuf+tpWguR5xuOMMovRQ0mkFsFghfI73A2dDyCMCeQSA1vEuhwfPEEaCaxfdOEdhTJmPyCvLXScc4auNmK2d63SOuZ2fNCT9zYJ+VCJqrwQGNUePLzA8MGxvOA7TLUFV1Agf1QvDhFCFxDpjYcGl8ly4uEmjmdARS7ziW14DwI/+6Jv4Z7/4Th772scBWD0WMZmMCaykzhxTNENJHgTsTdzcBkNX56JDjfC5qcq6PNSJCAh96k44julvl5zwqRJWV3SPRqSlpfbKnNYV2zehMg1E5A6dSICoQO64Db10r8LGJWUmqPx9FVCVIApIEncQoiqyrGB93YWCX/Gy7+DPP/QBCiFodZ3AKEuLqSdEoUZYx0OTLCduBEzLwOpCYkRGTUHsFYu+FDRExHhzGzF2xsBC2GZQlciJu3HtaMSNDA4GfRaabuynXvwgVhgO+rt0/cEUmhH7o236A0ffwHRIGpooHiAyx59pVnM4qmmohPaaO8BGiWGytUNz6lhQkiRMaIoFMm+cT4ICO56QXhNUOOGqVhLKUUXgaVBlOUW2i4wVderT7SYR0hq6a5pJ5lOxJpZWAt2We99kXIINsLpGeSfF8FCxeDrhYDKk9Jl0QkItmgSBG1NRG3QFYauN3XFjOn5XTBpPqMtFtHT0lLagoiYK3TiXmksUw5j+zT67l919dVQTRRHNplu7qqrp9/uEQWNasoOxBXlR0G61WFtzh/rly5cIgmCWDmOMIYoixpMhJ3veIKprChkxBsZjTzsp6XYW6HS055+a/f19zp07R+kexc2NQ5Jmi/5wl4WOS+8LI4mpJdeu+iwAkRHHDVqtJfYP3GE5TgccWVrj1t5NYm/gSdXAqgIt3F442As5ubZGEdQ0O+6w2tvYpdGQjMZjdrxTpNfpEEaQZtP0HotEII1lbc3JoIvXr9JeXkaVgtHYOSmUsmh9O40tSSJMHdBprZGXTnHLy0OoA3QoEMorq9ECdTWgLJ38WVn/Jn7799/NYZGzcd01eS32M/7BT/wQb/uBN/LRTzj5omOBqQUL3vCfTFLSTGNsztG1uwE4snY3GVucf/aL3HHHi9yctyy7N66wvOwM2tLUFGWf17/+9Tzy0U8DsLVxgzDREAkK71TLBhVJpAm88rHSvYOb288QhBOMP9TrTBJGmvEQhD8MdBRj6hZB6FI/GzqhkDWmUZNETgapKmdrM+clL3Vj/MrjFzB5gNUjjHH8mVUZQa0JggAdxp73KtbWl7h1yyn1ioA8TxGyIvdOvHN33Mt3/8Cb+Wf/5Od5+DWvAuCrjz1GbSvC2CkxRVHSbLbZ3dkniaa1TCWBFkwmE77lW18HwBt+8E38zi/+OksnnIE5YkKxs8+zX7vE2p2nAPjbb34r7373e3jNa17FV776ecd7/UOkCFhacnx36dKzqFBTZBmpNxCCICCvK6wQM6VTq5A4kKSj0l+jeOvb3sKfvPcDHBw4x5eQFVpHCBTS1xcb62qU4sidfePxGCEUKytLHHgjIq9Sms0mRVVx7PgZN59sQG8p4saGo+eJ02c4ODjgcCclTvwaFyCqNlGrprfgnq97AZIa7XX6pe69FNUuK6sJG9fd+648vUdWDtCBmdHh+tUbNMIuw5Hbx6997bfx6COPEsaaSer21eryOoiag/4OAU4G3XPvOZ65+Cn89Cjyis6yJB41iRO3jyaB4vqzEUdWI7b2/B6VXYQMsN6RMZlMZsrytDxNiQqBJo4W/BpUBKFAi2RW73j9+h6dZsTREw0Oh+7Z/UPQukUU1X4NKqRwtdJTQ3E0GqFVQI24bVw1I4oiJUz9NYe7PPDil/PjP/2T/MFv/lM3v8ObiO4x4vAIewc3ALh58ybdhXgmXwcDQ4WitgUq8E5DW9JbbqOjjN1bU6OlCYGr4wKoswMqm9Ns9AhVMuN9ISvSbEDp93+32yCgZjD0aeIG4qamKApCvTjjV8uAsrpd2zetdZrqu1pr6romy7JZbZ8Q4jmpod4hIAO01s4pDygtSNOUKAhnzy6ynKLMUEr5uhznKIniAOPrFqVUZGnha3TVbG0qk2NrQTtxcjBqBNzavknSnqZ1D0miDloJxuPRbJwYgzFmVpekVIC1lrjR8nQpfV1iSFH41OuiBOHq17ScOikFZVUTeSbWKiaOIu6//xyf+MTHAHjwoft58CV38/4/fQSA173u9ewd3ODSpUtsbrrWCqOBgbpExgrhz9EwbjE2EyJ/kJdpQbga0YgEB1c9fY8YYiOIAoHF6Rf/9Jd+i1u3LvNrv/HPAfjP/4s38ZlHn+F7fvRv8dgjfwLAxx99hCIPWO3dz9KdLknuSOtJtjavsvmMWysbpeSThHFZYXadPh5E+yStiMpYKulkXlqWmCrGWHeGBVKhWi1sntFadM9+3Stfy/s/+YccX9WU130/heVTXP3IE1+01rpc/r+C563mylr7AeAD/7culoAXUFZL6txQYzF+48lAQaBItKX2leEr3TbDfkqj5YTPwsoq569eQIoavEI7yitOHzvK+soal67eBJzyFijhqpkBLQr6haXagNorRK3VEiNzGnVEETvufdnD5+isWi5fdYfJ+/5yk9XY0FER7cQt5qRwRcRVBZORU/BOn+myuX195uU4ehyOd+CZS7Ad+/qYLGA4GBH3lln2xf951ifQEuvzV9PKIJRCyzYPvsh5kvPqS+wcbHLyheusnnEM/Nv/8s945tIF4rZXqPMUrEQHimriiw9tzTizHAy8IYUikM4JUvtM0cpIkqBiUmaznFYlBY2mJox8zqm2NBqwdz5zufOAbEuEMZCn6Gqao18TxRKx6Hbd5kVYXmijkwnCewuKzLii8NqwsObmUtWa6jDn5FmnRKRZRShDVJljDnxd1JJmmA45HI1pN9x1jbiBlpLSF2kaFUFtqQPtty9EgSYSFdV6TGjbng5tFvb3OfCu7IMAFgwU7RZF6Mb+9Mee5AWveDmnlle5tOtqM4KJpa4MoVfu9i4NGBZt7lhfwnaG/oWwcKZgcm2AzE47PosDwiXP+LhmDEHYpK4tjYZT3MbjCUIL0nJA7GusbNCku1CRecHd6ayjOopb2ztI43O+tcVKSVpmxG3P172AMp9Q+MLfWgmUsoR5gmy5+bWaBWmeITI9KzYuqhoRpFhvgDVaDYzKyO2Iri/ltKHzA4XJAOkPdi00ogDjI3wZJdVCwNIJxdE7Hc2/+ugITMrujlNstNYsdtfZ2RsSTqNbdUUUJYyGE/b3nwKg2+6QZdPVBB2G5HlJGLbY9x7vXqvBcFySlhVJPKWzYTRJsUZ52nW4/74HSCcTvvS4MyKSqMtkcwMVWirvYb/zzjspcjFTUMp6TF4eUPcLul3nuGk1muwPd9BWUE3z/+OCPM0pfH3H0fUmkSg4fuIsjz3t5jIejBiPIoLYcvyUc5zUmcTYjJVV93trext8Q4OpIiMR5KMROsxnTTWk1FR1Thj6xhulQAjBYKT+gzYAACAASURBVLQD1tcWxC1qmaNkTJo7OTHq38CagEbPPefa5qP80A/+CA+86AeImy4K9+iH/xypK1700hfTWXV0+MN//R46nRb73psexzFVGpDlmtwbsNeuXSKJDevte/jUB92eWV5eoSosz553Ean7X/wSkmab9ZXjKB/JneQFOoyps3wWAep2u0hrGXhFOB8PaTRjhGlQZPtuhWtFVYGlQPua1TLrYwJDmXvvb2YIooTJXjarayk7LUZpxpVr7tndJc3mpTGdlqTytGtIS1EX5HlB4JswWVOxubmJF4GYOgUMSdQgiR09s6zk0U99DhUmaM+L3/Fd38O7//iPWPVRByEqjDEEQTBr3BDHMWWZkyQJ5y84Bbrz4StcvLjL+etX3H2F4IWv+Cbe8Qs/xu/+z/8CgNe++e3sVxGhMuTCRTWvbmyirWQyU9JKjFEuSvtcJ2ttqUyN9mck0hLIiFP3uaj/hfNXePb8DYpyRNPLpMkkw9V7S+pp0axwjROmdSxSapRSTCYZLe8VDwvNOBsS6IRJ6nhoPBnT6jRYO+L2zJUrV+i0F1FJTWvBG7nDkr2tQ1A9+t4J264yFtZjWv1pAf0eo0QTVAWlcDK9tdRBD53CnE5u14vced9ZXvHNPwbApz/zMYblkG6QoIUbZ6AT8iJlsXOKonI6yI2tC3RWAl70IjfOz358wNpRS3mzYM07DTYnE5ZWU/a2odNzcqKqNLWtGPqaFiFjJpOcRkMSeKevKWJ0EFHWzigsco3KQno9Q6Pl+O5bX30U6hGTScDejlurJJGU1QjjMwoQCq0FlYHh0NG3rmvCMERaQeX3/zhLqeuayjoe664fY2c05Md/8qc403W0O72+yNMbmxw93eOV3/xaAN7/gXfTbS2Qed0iaRQYq8kqPaujtXLMeK8gigNCHyXO84xEWfLCjSmI2lQ2xJTGOdMBbMGrX/VKnnjiK6ytOQfr1SsbtCJNq+nWIEqabN7cJgpDrN+jxlqE1CRJOIsAF0VBlmUzwyYIAqLI1R+VPtOiqlytYxjEtJrOIWiMYTgc3o5SNSKMMYxGo5kBJpTEZFCWGa2WM260DhHIWQSxqiqSJMAYFx0CaDabZOWILB0SeY9Ao5mytApp5sa0trKItYpBfzwzqgOlfLMJiZ2FnA1VVaPltO6sQV3XvmGQPxsCjTAWqQSJd+gMh0OEUMT+9+LCKlEU8czTFzE+Ml+lq2SjBZZXnIHy0Y+/n+2tQ+JEM+1k0mi06JdDAmFBuf0+LgZIHTDK3HMaS21OPSi58Ik+ouf2zMkHuix0BWv357zu3NvdfcN9skXNn7z39937PvMZys5HeOyJT/DBjzjHW9Jd4E3f9noOb1nWj7oz8oMfeYTBYczaWfe+K5+XINo8eP+9PNN/3FFJtChGJWVZoBpeTmRQmhLtMyaStqtbzGpBpJ2s/uoTj9KOQhqyzcu+3cnBd37gEf7P8NfW0OK5sBaqwnsBJiMQAa2OZOKVpHzSp9lso2qofPSlshWYmsobW1EQsNBsMpxkGF/UF5eW3Z0DxqOK0ne8CUJNa7ExE2J5IAjSmvFujk2nlXFQV5BbjXfC8dv/8gk6HbjrZY4Jz92fsH89oxgWxMe9wnU1QJsSaQJ2B86iv+/sffzET30LI/MFAL70xc9y15mI/f4CA1/I3GgYrInIR33OHnUpKZeeMdx5T4OvbvkQpwmJEsmkqvj3/7uzWVdWlkjagmeufpnHP/M5R5dJk26vnBUzRgFUVlNWKdPmg6UUDMZmFhkQwrXAsjVYzxI1grJIaAdjRtecREzrjKWTgtJ3BuvELURukTKg8i6Ug32D1BCbGOXTyLJckGY1bR/KV1KwcXXIsQcDlN/A6aWcat1gWzWHW96D0FAoAjZuOBqYlSZKpQwO9ykmzshttM6w2F1jsrNJmjnFqRmFCJP4DkgQhhItQqq6IM3dGkcjSSgEdbMDu05o7W7tEYs+xbrjn/5hxMn2MqIq0L5zjrIp5x/5JA9978O0G+66Ub9kPKgIS0e75UaT4WhEliZMdtyzM9Fg9YVgdcm1bZf6tdZoYVSPdOJo12p2iLoVygomu97Y2M659OVbLKwsER93Al/oDvVuQbPllMl6nHNwMKHdbTIaOW+MDRWmLsgnElFNjakxQSRnHv44UdgqQtURk8o3A2lp0p0SW2ikdusldYkUEuu9evtbE9ZX2qTlCDwNrDbUNSRScXjJRx4Tw/LJCYHfRKLISRJDLgJ2tsZ+bTRpPpwdZoiassw5dnyFwdBds9I9zf7eDkFgCPyaGmOI45jMR3aKovSpG5JB6g/nwCCCkFgF2Gmah7CUpmToO2Vub15l8eWrDAcZ3aZvMFErFhaWGI73GY+dAnTt2jX6hxOi0PNBVBIFkq1b+xhflJ3EHRrdHqO9XappgXcNiY5JS3fNQntAdzHiC194hLT0zWpail67SZqWlH5cvd4Kt7YOWWk4Ja3d7LCzs4cQiknhFCCtNaI21JWeHfTWWmwVUhbTzo4KKDG2Ipt2zsoFi50O1ze2eNmrXw/A9/3A27hx6QKXzz8GwOe/dIGtGx+mXd3i7/3UfwPAF/68T7fVorN4gquf+BIA42FNFJUsNV8MQD+9xOk7emxcLdg9uATAq177MnY3JZeuf5G3v/X7Adg9fIqwey+Hhy48et+pc/Qnh/z8P/llfu3XfxmAz33ms/zRH/0RC4sdtPfyp+khg31o+ewla1NG/QwlIQqco60kJUszGo0Wae3WL9EhWils7puyjA6QjQrb0eTWGxEDQ0zE3i23h87e02XQrrFUMw+0JUDKAisEppp2SFSuwUrl1lgq1/2ukbRJfaricDTh5s0NSlPyyCPuUH7Ri15CEHRmxnIYxs6jbspZ18877jhDu93mqae+xn2+sc9yZ8Q9952ke9z9bmG558FXM9oW7N1y587bvv8tHD2ywKWL58n7jgbdbg9TZ4xGbq8HOsZWhVc0ffMYYygrw/rRtVk2QiNJAMuli+7ZdV3y8U/+BWHQQng3dZwIqhrqsiBKfHMc5SIHg8FgRqckSTCmQoXeU05INaoINNzactGQqJFw5fIG3/rac56HN9neTjl9do3r11yh/2J7icnwFmme8uDLXXrd3oXHQCZcuuLG2VwYEB8/xdYTE/qpM1I2bu6w3DrC7u4t7jnnvNLppGLr5oA/fe8HAXjpy+9gc+sZbH3ItWcda2g14a571tnYOGCU+45wmWVRt/nUX7r5VbLBxuVFEnvIYtvN7/Bgl+V1QVkolHTGVWp22T8c0es6pVCFgsWFDuNRMVubJC6oakM87XYpSpI44PBgwPpRp5iKMKOetAgCNUvZTJodWmFAlLizaGd7wCR1HUXDWaTVkKcT4jjGem+9UorFhQVGXr86unaS8eQ6y3pAs+EiiuvnXsYTH/pLGkmbr15yKZy7wxKinMJ3aJsUOe2OpppUVLl33gYdqAW2VMTeaZhNJtSZnXV2tACmIowS2t5gPzzMeeSRTxFFIYeHbv22tm6SNmNOn3S80VlMqCnYuL6L8ftYhTXW1BSTyX/Q7VFrPftdliVpmromELOueLC+vk4UJvT7zuibNsUSfpz9ft81d1EK/B51c9TEQTLrYgzTrqFe7wxCAp0Qx/HMwGu32wyGXbLRBpF3sMhgmyipUdKXOIwNxpZILB1vuFlryfPcdzW8fR5KCRPf1GtpdYVBf8RoPOChhx4C3Bl2sLuD1ppwmrllFSpU7PedLpFlGZPJCGtrjqy7CO2t/iN89l0foO0bonS6iWswZZNZt9eqHhPrBtoOqbTTYZXeww5q7njAZX+87M0h7/uDZ1k5tcCRM+6s3SvavOCbQh75+JM8uOIaim/2/5TxeI3f3XBpgZ/58O+wdGKNj3/yfbztTW8C4MUv+Fbe875P8Jaf/AHe97/8awAuPLnPyprg5lW3r3ptTR7s8uUvfo7ER1FrUdI/6HN0fYVh6rJErJFYIVFe3zl+6k4uPfFVpAyZFNc9v0haLeh1NO1p1kQvZIPndh39DzHvJDHHHHPMMcccc8wxxxxzzPENwPNWc/X/BFFL2c7ZqSUtafYWqMUYYZy1eeZoj/H+hIOtMX2f0rC4mpDmBmudh8hUFiVyrM1IvZVqygojJUoFtHzBPEBR5Uy7xeqGJU8zcmtQPrRd5ZYIhWmGKO+RycwQm8FCx3leTq6f5aEH7uHxTz1F2nEejP7uPgfFIWXlipoBeslJfv1X/jl/+RfOCv/QJ/8FSVKjLDz5Rffs1lpO0g1oqYAHjjhvwdUn98kTw6jpvBxZOSAb1gilke3pR3NCRBlCIyWaOFeuNduYytJuu/nWtnCRiqpG+MhDiWI/NVRimndbIo37ZtS0iYFQAi0aqGpM/4Kj5933LiEXDqm9ta4DCTqgFCWjgU9ZGuUEKnAFksZHzxoVgRKMtt3DdWzQOYTLS5S1j5ikOamAxcUGOvJ1NMKyv1/zwnMv9fQtMYy5+Ox1Ml8XJaOM1WNtGr0GxrfIVapNqBVp5jy2R9fXSMcwKvZ52ns1F0QHPdile7JHesN5Hg+zmrUjCf0DX0tVGMJlRZYl7F103spT93dYuU+xswuNwHmORRxw8dpFrI+OLugakeYIGUFwO4e7Vim2NhSBr78jR5Ztth937+uuajhbQFZT+3bNRbcmvRSihiHLLz8LwGBjj4MbKafPuftuPDnG2pzOGc0wdfSUuO89jG4K3v7Wtzh2MXv8bx/6IEtrPp+7HEEJET1s09Eqt4I8lYwGBdLI2djztCD2vJgONFG5QJbsc3zdebeTXoO8tug64tJH3RjqQPHA6zsUbRdpSZREyAJjE559n+MzFcUU5R6xbnueqikLSXexy/pR51mOkxUCWfKVLz9Gmk7bwwtf0Ot47KB/iNYBtq4RvlYEJWn4iE6aOs9uu91GK6jK6XdFakKt0IEk9S34y1JR2cp9YsCnqBV5ytEjJ2dtrJUuePiVL+eRT36arV2XchybgEklKct61kZehhnjrJp5TF/1qi6HgzHZYIUL5x1PiVAijAVbI4Ube5IsMU63OXPKrXn/YESYNHjJw6+YFY8fWVnml3/xF4iby0T+OylCWurqOd9iEQVFUbCyfIStvouGhlRoG1HUKTZy3sjF3hn+7R+8i3vvOA3AXWfWka2EULYY5T7KP57QbpQcDodE0Yq/TzMcZfz+v3kvAO9+z+/xR+/5PU6dWp+lfu0dDNjZmxCLgl/6BedlPMwv8a7ffM8sFePosVPIMmM82uZvv/0fuLVaSPiRt7+FlaUWuzuOp97+n/0QYaPNu//wXwEQ6AY6qMnLPs3IpQWfOLPOxStfIYwDsr5vy7u0zrjISH1TgaXWAoeHh8iqz+me2w9lCHsj2JumS4cSIS2ili4ECUzSEm3dGjdbbo3DICLPa8LIy9cqpSoiej3N/p7fj0EDaknUikn9GaZDRRKHs2jz0tLSrPX6tEA9yzJ+9mf/B971rnfx8MPf7Oby4hdw+Ow+577JfTrkynbF937X9/Jj33o3K/e59auGBUU+5mBvD+F5ajweE8aaspqm6YWEoXQpYrNvzFSMs5SVpUUGB86b3W21ORikGJ92FeiYqqqoze0W1RZJUaaEOiT3Yy/LklarhbW3G3YYUxHF4e327CqgrmvXpt2nZy2tLrJ25AgNX+fa7bX40Ec+yYmzx9B+P+7d2sXUNZNRzYMvc3QQIoW4weEzLtwUhore2RUuPzFiuO8yHawRBN2YfJLx3d/lPrPywb/4BIeHm6hg2s48Rqsm3/mGN/D00y5C+7WvPEUctUmzAaEvFWi2e4yGKQWOz9/w6m/j1vAmm9eeoPbt78uoRMoEYywT5yinKMc0Gz2M/2QMMqeqMhrJAvgCelul5OUhoT8rdGAJgojRMJ9loEzGNZImrXaDYd83j7E1K6sLxL4N+uWrV0ka/htA0+Y4ZYm1lnazM/tWWl1Ds9Gl8pHkw3pIEESsxjFJ18mIUSEJyoB+1meU+UZUNkYoZryhZAiiRlDRbPT8+wrKssKi0Xoqj4fUVUHo5Y8QitKM3Xc162kqbeE+YRAFrmgbuPvuu/hHP/PzfOozfw7Ar/zKr7O2vM4oPZilACICoiQhe07TCaXc/Kd87tIBS+q6nt0XBAGNpEVZlrPPQTjay9k1ri7Ope52/ac0Dvb2Pb+Fs8jjtL5rel9tXC350tLyrLlaURQUWcbu7jWOHXFpj4YJR08sc2vT0ffWzT2EtEgVzXS6NE2xxqV3Tuu+XG1VMcvkOHbyBIPBgOHodvQuDALKMqeVNBiOb+tw7jtP008aWRQCpeUsClcWEssE4fk8iZssLrX4vje+md/4jd9w85UGZSuUhoFPiddaY8qKltdp1+9u0W5lvOLVmvf8qvuUxwtf+d384E/fy2/9j/+ebMl/QqV7CbVzDyfWfFv8U7vceLxm+8aQsy/8PgCWuh0+9NFHef13vpG/+HduDI1eh06Ys3XdRRsj00S0axo6Y9D3OvpBRBx1maQHyMjzRhTRTzOOHHfjvP/+B/jYn32IU/fcw+u++0EAvvyFrzA62Ob6+X2SwEXmwtWC6589/Lo1V38jjKugIWzLRZ6JGy1M2EDGBl379KLhHtVOybCERnP6TZIUITQSX4ciXBcsnZSMlC9irGtEKKmwBMYxdLPVpRQK7TewKIdsbmWcOrGEjp3iVmaCncMRqqrxn9ehtbiIUinrPnVo5cwibXsvzeVDzt9wHUw2vtpnT0tUmaEqb+BZxQtfcYRG0xUa5+mEa9cf53Q75PJTjnkOxBgi6C4rjoy9EXjhkMEpyfqiUxgoLb32ES5tbzJqeoVhdY0bl7dJxYDxvmOUbjsklGqWxrK8mqDKJQ4PLhL4GrbBoGSiNanPbY4bEKuQycjM6pSC0FCagCQo0QcurLu63iYNLxBG7gAYDgRiocLminzovyNWxlRVhgmYfTeg1RS0IkhT33lFlIQiQY8lBz49THUVa3cJlDZkXhhs3ahYaHVpBS5l7IlPb/Kil6yzu79BlbvQrOqNaXUrDIbB2G1+qRZYWV2g4cP9i41VtO5ya+8p9vz3qiwBi+2AK9s3aEY+RH04piEDmtPDZH8XddglWogplU/r6h7FLHWRk4vs3nIHdrjSoWkTdnZdKkGhalaFayZQ1tOuOBoRZYQKKHx63y3BqIbxwBeT1zGt4xHqaMbY51f3AsXwsmR0GFB6BTq9OWD9eIfuccdjT39+h5N3Bgy0RUlfWzSuyCJBcTDhR7//7wNwz93n+Nlf/S85epcP5ecCWTbZuzmk2faG/pJi91DQHxYsNh2N00NLnIxIfW1DYBXRsEEVyVnx+OLpBN3qs3dZM9z3Q9ga88BLl2jeNRXkIVZkLJ42pE84Z8CH/nif1SOKYd8JRElFI16gIqeyU+NulVPHF9i4cQ2m6Vm2ZjQazDoR1lZwOOiz3GuTm+nhnIEpUTpE+pSYbquNLDOMnn5HKCOJIv/dj+mHaSVh4gtcvcG+u3eTE8dP0fBdKl3+fsDDDz/Mv/lf3+We3W0jRZO9dI+GH7sKJIOiJvDpaFpkNFaXWet1eOa8q/EKGi1MHUJZE4Y+bbWpGQ1zzpxwgvH8M5dZXF8nk4KfeYdL03vyy4/xB7/zu7zkm+/n2ad9ow2rCUJBljtjuSxL6gpOHD/D7jV/TbfFAy9/Bd/+6tfw3j/4LXfdWGGDU/zqO/8nAP7hT34v16/v0ZZN9sau5uqOe86xu3kDHQbkB+7wN1EAOqDXc9+hsmJCt5fTTNZ56sJXADh7TlEOUi5dtjx4/xvcEDqGXq9Ft+Vk0nv/+AP83C/8HOvLPf7kz1x+/Ft/+Dv4uX/8Dp5+/GmqwvHHg9/0Kj78yY/zjv/qJwD4zd/4HRaXWxRFcbtOMa9YPy3o/h/svXm0ZVd93/k583Dnd99cVa9GleaS0IgkBNiAIQaMIbHdNm2MnRXHsRO3s5KF0467nXbsdBK3223a8zxjaAO2mCWBDAghUKkkVamkkmquevN7d75nHnb/sfe9JdZKZ61eqzuLP+r8o1VPdzh37332/g3foZ1y4VUlmDP0ufPYbTx3XPoUVhyTpMx58PU3ce7ZZwE4eMshDt90J05T7lOPP/F1+p01htvO1HtHCJkMoGvTgNJxHEzDI1cQxCSNsLQGmhVRUV5NO4MheZiyfGAvKF+dTBOUiZgaXE6CmYnvEUC3O+DAocO0Wm0WDt0EwFvf/zAnTzxJ05WCIS88/yqto7O88qkvTyGqFVdjd6tHZ2cHXZPfNzvT5tLlHRYX5bOXi4ThIKZSqUzhRUEcyUAtDmgofqFj6HSjfGpqn6YpjmOTptk1488io+JXybOMbKIymCS4rj/lc4ThmDxPsR0LFISrLBI0LAxTkBdyX1ret8jhG/fy9SfluWpoOrV5G8NqMBp31T2ZxMOcMOtRV6IzTs0jFzk1pcbomTBKOlw4v8Fi/YC8d7NLEuaYepODeyW07Nz5Z1hZabFvnxyX86/u0NsNqMzv5cGHJOTwpRdf4Mr5y9iOQOQqSREjbLNBpaVEEvIlUquD62QkQp5rUViQ922oFIw7ch40HDzXJknUnqdJuFiaDWi05L0HvRDLrDEcyjOl4tepVptsbmzTUmbLujWk08s4dGSJeCQ/q7M9knC/Ugb5huUyGpQYVj71NvJ9F9d1CYIRtuL/BaHczw7sOSzXnT4gudJlf3OOWHGEdKfKj/zwD3D86Sf52hOyWGx7OlFWMFZCSnkhk3bHsimF8pnUBIOxgV0p8BRdoSwsLO/aOqfIGY4ibNskVx6kvu+jaRrzc4skiRy7o0ePsrETEqh1sHr1LO3WDP3uAE2pS7u+h657xEk45SmZpklZltMEs1qtUhQFQRBM/6ZpGromxWpeq+CpG0wFLZIkUuOofwvksCgKTN2YFhuyLCNN46koTK3RJAoz8lJcE8LIM2ZmXFzDQ21vlCIiSrqkqfxsz25QEqCb15Q5dUN6Wr2Wm51lGXmeTxWSDUXBiNN0mji5tuSLVVyP/kgm0Z7nous6seJ4xXGKY3tYhjaFRJZagW4UOIYShbDBNDziIEO3FOQXl3Q8xmhYU2Exu5HTPGxiavK+g1GANTfDe971o7zroMxHPvvEcWpv2GXzwlfpKzXLhTmXfbOvo3tFwoT/6uMvs2dpljQdkgSyEFb2YoJGxLzlc+6kFL7RFk2E0HAzOZ+9qwmmbtGa1bBa8rdsr0KrsYc0D+kncg3lQicrChYXpVBUFBbE3R6tpXm5iQCtukXcX2O0E9NcVGqoSYXL39j59k6uKjVdtG+Xi2Gk+TSrFnaREyr+TxgNMPoJuiuwlRRrMtIwiKnNyIEMRwZ5WZBnAqVPgKiWOFqDbDQgVRVur+LjiCqW6o7srAYEQUS97lMgD6ajN9/E1StrxOGIRk2RFscujUWDjlLz8lyT9c2Shb1NVpbkIXfl6il2BxFOQ0NXHDJLd7jn7mMMlZJcPxwTjS6wtHeO9S25sVw638F2DJqLBVV1nxUBWVYSqUN9b3SAva0mAwqe2b4EgDvv4JYlhZuzsyYfDs/IqJvGtHKWliaGVyHejTF78vvyRkno6ui+3Iwqlo4mBMPYZjSWn1OxNTQbdMNAKJ6SX5a0mib+ggo07ILx0CMlJB4rXs3YIsxSbEMjU1KXRgG24yFKRSC2LeYWllm9ugOlUhS0aswshWS5SWdVdevKEL3WoKI6ZaM1jdatdVgLsScGhW5OmhsIUZKorkaW2+iOy113ynlpOC6jjsaePS1Kpbj31PHjVNtLWI7NUFU19Rz0MGY7UwdjrtF2qxCPSVUnslpZwLZMuuUunjpAyzJHN2yCkeI7YSGKkhlLw1Ck9yCMwdIwbA1DBfHZSGe0LQj78gCoWnViL6Dl2XiqajOeiUiuuCSnQXPkxjK3p4Z9NGC0Iedz91zE3E0F4zjDUW7loZbhaGB5HlpHCTy09pDVL2NNnqskRaQWD93xEKkalxMvPYNmVkiKgIo6VNPBAdJwRE/IDk3ds/Bim6vrBaZaG7c9UCda2GHr7+dwlHz52tYl5hfnuf+75EHxpUc73HjHIjfcbnH2tLz3E5+6QrvdmB5wwTgCIZQq6LWuX16E0npAdSKaTRvb9vGU9K3u5pw/18Wx/alrfVFIPH0cx2hKxEPTDNm1Ut/nmeAZDp5nsau4IbbXpN6o0N0ZkuYTFT5HBg7aRKQloyhykuSaCpdp2PhuRcrrq6pMSY4QxVTwpSwMZmfaZFlGr7+p3lcjyzJsL6emyESj8RBDN6cHcZ4YoOWUIsKxZTCZxDmFts3+/XewuS4PItPUCKMxnhLZiZIMITQ0zUBkcuxc1ydE5y1v/e/4pZ//KQC6O+vMtBf4s9//IgBXNi7zyKc/zC23H8NRGPPtrXPcfvAOLl89Q3c8SU4zkjSk1pTrdWXPnYyCVaIkp9+XnbJKw8Wdz/mx9/48e/ZIc/WP/dHfUJ2v8colKerxoX/5s9x4481sayO+/OcfAeDQbQ/glAk/9oH3s7xXjsva+jY/9+E/hQ2ZJH3644/w8sZFmq5Brn5fqs1hlznWTEx/R+7pD953L37bYr51QL7vI58gDTWO3XcbL70o7Tbmlg7zw//0J9jsyHX+yMf/EEcP6e3oROk1JakkydA0MbUKcD2LildhoPbXogwxLUEUCWbbEv8/jLoYeoxfWZwGoqYYYphNPKU9MOgPsR0LQ7cRupr3XBBFY0zTwq7JAt3e/TeRlcF039LiIWm4Q5oIBkOZ4N1+260sLNV4/NHn2H9QVmT/0Q8+yB//wSdptmSV/OgtLZ7++1fQXtNRiJOxFAPKMpotmRhatizITQLOIAhAqQlOSfaWhWEYJEkyDZgdx0HTmf5blPJvuq4TKQUzU7PQBKRFjO1e4wRZtjFVeowCm3rdp9QyxmqPbc82cawma2vnpvvNTXccXyubKgAAIABJREFUYm1jFdeWQeBse4Znnv4Gi+1FZpXk/9rqBgKLKE0oJhMo74Q775S8wQceuJ0TJ45z/uwZBqEcl7m5Nvffcyuf+8wjKAAIUaDTajRoLcnnY/XiZebnNHKRUSh1SaGVoCWIQieNJl2bCNPwpiI0Ii+Zn19meU+bXcW13VjfRtNlog0yoa3VaiwuLtLryRgkSSOqtoPtNNnYlO+r1goqlRELbZXUb2asXkmIyvya+qNtUZY5SZJdQ7gUBUcOHSHKFYIoCFnvd6gDtRm5Xhptn7e9+V2UluD3/refl/NQr9MNYaDOzH3Le1jfWaXUPFAxULXWwjAMdne3p9+XK67fROzEMAwW55pc3eySKBVjo5phCQ87CanMyU75lY1dNGIqKnkwTZOkKNizbw9xoKTmt/oURoqhlegKilNikxQlFPI5dnUTbJMYaCj+n2VZNJtNsiybdoAGwx5CFFPDcFkwcCnLcvoa1/WJ4xjXdphT95nlJd3e7pQLOzc3h66ZdAf9Kc/U8zzyJGV+rj0trAThgDRNlK2QlI+XAiTeNNlxHZ80TRHaNVXDopDdyNd25ooypxCCalWOuRCC8TiUz2k5sVUxZFNQmREXZUpZChzXn+5vugWYFkIJsPiVOmUZMOxkqDoDtqkxtzzP7M2zWKqjf2VznWq7T65iShODy5c1jrTv5m3fL7lTB4769K7+NqfPBtx8kywe9TcL3v4PPsgn/uLDAPz57z/L4t1Njt14Hx94h0Th1OY8/vNv/AI721d55ZTqVM0YGFpBU5M8ZVNk1OYjklKTysiAVmr4rkealQxjZdlke4z7A4Ti1eUUWJ5Ns97CdOW5MzMfcPEbVzCqDQql5FyWJaOz2X97tcD/N1eBhqXkt+cMj2SUM4xdZtQG4ZopgRVjmtIzBUAjIU9N+h2105k5hlcSRTZaTy64VqWgt5Fy8OgSV3fkhhSPh9iuTqaqeY22Rlm6jIMQtbfz8guvYjsSqjPuKyWpwYBR4mI3FVE1iGnXdXLR5cxVedDva1v4dUEqDDpb8sFLDJ2XL1/AR27ucbGL63ukSUAayPu0LZPWbEGR1YhzOXGXz+jM7bOozcmFuT28yOb6Ama1ZK4tV/SoHBNaNfQ8oaLgNXZhoosYS/2WZm2Osxf7eHZCZqhA1M+IgpK6J9/jOx62ZzPa6lKXt0ld1CithEzPEUrip+LqRNGY5arcQGKjQxCEZIFGnioREVtDKyWZUyhIpWUaCC3CUypHYVzQ7XfJi5CKSpJcPyDJSkbDgsG2UnKbt8n1IaWCF9b9jJbfZFXEuLbcjKp2Hb2ik8Yj3JoMIoYBWJ7B2gXZet57+wP4CxYPPPwwz5+UwdyBFdjZusx4J6LuywCoH6/TDQJW9ssKRp6WxGOdstpgqNoxej5GCINsVOCpxHvl4D5sy+PsafnZgzDBrVQwPYcimjih+/RGI+ICbFt1DFsmCzM+AwUX6a7GeFQZ9GCoquAztQpmM6F1h8vlK3JTbrUzrMKhr1r7WRIhUptKraRQ6j3lsCBKDGbcGfAUidfaoVpzSFVCZGQ6w7WCE1GH5YOuWgs+l8463HZflayUN6a722ycn8FTErnjMMKIGnh6TKxPxCpqiC3B7s6AubY85GYaMN5NOPGofLazjssLj/e4+JzJ3IJ8tms1D9u20VTCaZrSz8p1rGuqeLqGZzUIR2KqWDQaxuSZQacvA8wkzXF9hyxNQUwqihqFroGwyFQ1tNVqcPONN/HlJ78KgE2NuEhw3MpUlhw9YXc3wLIFdWWeEgYZvl+dKmfmmUEqUnzPxVCKnqWICNMuGCZxOOnWCSzLpciU5HAak+VjDMPFUgFCxXeJIkGS5lP4AppOSUmiglAEaMKiUq2TKW+4ghxDa3D54rkpKbsoBUIU0yq1oWmkRYFtm2Tq9+UIGPbRw03qB2XHSdt/EE/Y/NyvSxnyc898hVMvfowzL5xn76IM6nd7mzy9O6TXzVhYkvPn2CFJnHFw3+3yc4TOwf13EicJpyIp4nPrrTfx+KNfZeVf3Y+pSP1v/95/SHOlzf4vfQmALZHyyG/8Oa970xt574/9MwAe++TjvOkfvJn3ff+b+OxnZNJXdeHVr32J//13fhuA73jH6/nv3/MThKKHZcjNq1oWDEQffVSl2lJB9eY6N7T38+CbpNLq+tolLp2/xDDc5j3vltLoZbNJe7bKo1+Q0uWd1QHHjh1hFJ3EHFXU/EVYto4QBpqyATEM6e9Ua0vSfZ4DeQvb7XPDMRnY+Czy9IkT5CJkoa26r7ZDESTs7sqEwbYcDEOjLPKpsqNjeti1CsE4hpHcz0597SLzK/M88LDsqpx7ZQdXm+PKpc6UhL6xfRWh7efGW2bY3Zadx1dObvLP/9kPcuH8JQCee+YCRVFiGClqq6ZRbzEej4nTQBaEgHlvmbxYn1bAC6VoKL3m1N+KAiFkVX4SxBdFgYFxzZOoLPF9nyAI0JUSYZ4nFFmO69UJA/l9llNS96rs7gzUGJTEyRjdTqZiGf1+n4NHDCojh2gkx3Nru8v87AKoqv9LJ1/AcS1qc03Wd+QYpHmBroGuC0wF2Y7inAOHZhmF0q/yNz/8PI1mFbdiMbcg1+t9d7+Nj3/srxCUzMzI8+Kuuw4y6F2BXO4RzZZPmNsIYtJAnuOzMw1GUcGor5Gqs92xHMJxxuy83Mt0R8L4t7cG9HpyDQVBgOPaU9VPqcqos7W1MxUtGI1G7GIQxxvMtGWCN+zGiEgj78vzanlhnspBOHlZTD2RyrIkLwWGZV7rTpSC8xcv0FGqnw3HY2AkaHYVSyX666++THcrQrcFodpLRJqyPYyxPLkfXL06wDIrCN3Gq0x8LTOEKKhWq9Rq8jna2QkJw3CqNhkEASPXIEj6oDwzm9oMG90u/9PPvI9nvvQUABcuZlTrLpkqloXjEt0QbK3vkGYTcYwM1/AwDcE4VAXlYkQhQDMm/oopepRgGTop1zpXo9EIwzAYB8PpGtZ1pkmgacqu0WvX+UThM80z1tflOmu1ZyiKYgov7PV6JEmCYRj4ylgzy9Op8uBEQTBJEizL/JaO26QgOPH/gomKaIalinaT5Gyy71erVSzDYTAYvEb0KQUkxNFUTQqEDjrThEEzwDR18iKZJhuUFmQRuZqXKBfsuWGW/bfPsP+IVM4bDS+yEesIzyccyUJfpRqQjSOidbk2j95+A3fvr2EvBGSjTwNghW8kGi+yz+zz9AmpHOv2DjPzA0fJIllE1Gowv7DE/Q++hUOvkxBgMYootwziIkHV/jA9S3b3NKXQnAvGAw2vAXWlmByHgjgZI4RDWxUNSi2jzPukqjOoUaXdamDb+6nNybELh+vEoeDwLbNcOKV+X81kCm37L1zXBS2uX9ev69f16/p1/bp+Xb+uX9ev69f16/+D69uic5XnJYnyxnAyGGz3aS3VMT2ZNWaFICpstL7AV8m768BgJKhVFNZX6Oi5hUbK/JyqAvR0rDQnHcNMTXYjymKHKOhjubJi4vgGjqeh6frUSVrTQdNSKhVz2rkymh66liLGChdfgcQtYFjnD379DwF4/Buf4PzFF/E8j2e60lskKHNqtTFlINv2umEQFjWcTDBUuGHLsMnznCwPMRVfzK7n5MIl3pH/fvM/dnnpRMyZlwboynzYSGyizgA7ypkUImbmfbqXSkSuukZRn7yMiFKT2+9V3hu9MXaeTSuv/U4KZoBre9MW7mirxLJtjGaKPSPHpaSg4lZ55vPKm8Kdpb7Sw7QzcpXBJ5HAdqHuacSKdzbYSphdBF2JAWhpxqA/xrRA2bngWSW9js6ob6EUR0mjBMurYioeyny7SdyPMSsauzsKMiI20Qxoz1SnxrDDrS2soo6mIAGa5RDFI7745ZOsd2R1stNZY+/cUQ7t3cfp87Iqtr66TcU1sUxZhTM1mzQZYuoWy+09AOwONymrBntqFQrlMr7b7xP111hSlUg3DhiONbpDcNSC1UoD03dxLUFZyI5MGEWU+YBaQ1Y+9UpE/0WHZq1Od6hMdjc0nNmMqJqx/04l2JFr2KJHMpTvm2nUoCjonzfIlLdIe7bFbjGiDMIpXl0rdKIdwfytygw4sSBp0O9vcOWrsmJabVQZb/e4dLrB8oqsurtujYX5Kp2xkmYvEqKNGMtJCHK5Fi+f3yRJS2xHw3Xk+8ZjDd0Y0uvK77N0DRubtBdwbl1+n+kYEATkSgykUqnQbrdxPXvqwSSKgizLKAUo3ja12izj8ZhMVZsqlQpZqlOU/SnHA2FOifcTCE63t4VuHeHYMQkZDYYpFc/huedeotqQ9z2KRliGQx5niMrks2yEkJAMgGHaR9MFeZ6gq2fW0Bqk+Y6CAcq3maZJHGRU1OdU/ZyDh/bxwvNnpveU5iNpfg7TjkVZxtIWQRHONaOkVpWQP6E6ubJjlkgj8bpcs71eZwpbk59TUqIRBiMMU7WzPQ3HrnHl4hqFEhHoJTmRlxOHcgwOPvgAH/zH/5oHbjtK6cq5+dDP/yIvPPci9XqFwVDuXb7VgCLBteTve/a551lc3svqxkVqij8yHpcc3HMjv/LL/5EPfOgXADj16qukTxnc/z2y8pmmIwr7LL0rFh/+e9lV1Eiwjs/y7u//WT7zMdm5cpZdPvt3H+emh94EwIHb5vnBH/8+/o//+NvM7JHfl8cDbNNDLw085S92/swq99//bkLZ9OP4157httsf5kf+hx/lq8flZ6eDnCurGxw5KCuaTz0WEY0Fvlcn6E268DolGWlsYzty3rNYoyxz7nuj5Mc+9ffrNGoeQitIlbmz5ddot5oEYUpjXvH4tnq4bhNXkfrTNMUyNdIkpVqTrxmPxqRpjOP4fMe7pADD2UvfJEo1vvzVV+X3ZxpzbYOoTKi5ynNxlLG+vYNlQmtOjnFrdoHjJ65w9ZLsSL9yfhXD9PF9l8FQdkN+8id/hC88+nlGw+pUjjouAoTQpqIwlmWTJKmCR03WuUGW5VK45zVVfrgGC5xU6SddLgBTN3F8hzSLlW8OrKwcIAoTXGfCUYopS4syrOPV5PMwHIYcPHQrpnWVk8/LivdoHHHr0Zt4/puSQxeNI26561aipKCnOCZVp04ehxiGRaa4GRUfZlvz9Hry9zZnBUWcksTmlPh//Lknqc3FZJEgyWVl3HR0FuZv4YnHpaF2a26eWITS+sOqqHkvMWONuZk2i4ow/9JLJ9EMCcEC0HWLC+cvkRXjawIWauxSZc0wHErjX13XcVSXwzAMZmf2o5mC9Q3Jp/Rdm1j4lAqvdbWjMRplFKJEV8axSZZiGIYUZVCc1TiKGI1GLC4oGW1d4KY1bjyynxMvKP8/XefVzima9TqzSoAh6Ee889138bkvSHuYMnNxfJ80jclCFZRYUrAkCEYMBvJ58H0fy7IIw2tCQ0GUUySChq/QQTsj9h8+iutZBN1t9boFqnaM8uZmfk/BvqbLxmpArLpSwnLZ3RmRuDPoyHWNsDHJMUu1pqwquSnQswLBNcPl3c5gyvWSc4P67zXBFyEEWZZNoeygT3lXE/hgv98lz7MpbG806FOtVsnLayIUhga6EieadJek0FLOxD+qVBwt1/GnnCvbtqfiHJP36bpOkkb4qhOY5znBOJKm4KqbZZqSk1SWJb7CIotSwzAFo5Fci7ZVQzNi6a+Zq46znmBpNl5T/raj97oMUoOhXeLMSthcfWmW888dZ84LWGnJtRsVPs8fd2gpBNpdd303b324xf/5Z7/KW94mPa3CsYmxcICzL75AbEs49up6zsXRRWarch3snbdpVDTOnH2aTk+OwerqiwSNV9B2S2oNxY+LW9QqCe2GnPNs7LCz7mFaAZ5CSBnWGEN3yXN72p0UpUGztp/dvlxjS3vamEWLsBCUQhkw52Pe+t3fSb8YUmYyhiz/n5tWcrz/6//7v81lGTYV1dYsopgblvYSmxGRavManotrjxBOMfVFqJga3bWCrvJgasxl1Nsmlldn87LiTlgFghJEimsrkQvtILZ7jiSZgEUTDKtESw1MS7Vi7ZIk1smFTm12koDEFOhYKkgqixRzoJGMQv5QkcKb7b1ku7O8/XvexOCS2hStsxx7qMk3H5MT3moN2TqbcO6sjTMrH7zxKCULwapqNGbkot93MOLSGR2jobDw1gLjwZjF6jyDsSKTpzGWMCg0A0uROYNxjG74DFVwLpKMpIRRknN+TS6Kfk9jYc8KhikX3O7mLvF4TL3hkgXKdC/OyIZV5vwWRV8JLrQTwqGJmcuAJdjuMhjoLB/22TonN5/mbEZ7vmC0lWOrRb/3qEE8LgjH8jWupxMOLbAyXF8dvEJj0C+xDG3q2eE3PLIopVReKpubVSqzDpo7wFab2DDSEbnG9vYYWwV4WVFSJDm28i3IypSXL12l7naYa8nDZHXQ4/EXP8fr7n0r3/WdH5B/Ww1Z2z7LSBnVkiekieDu++8inSSPnQg3qiFmBGPlp6TrNnuXD0CpDBhdl1xklHpJrhIpx66SZymiLMiFOlRtE92GUFkbO7aD4fQIxmMUGoThesJcrYKwQrJU/nFsjtBEA7+mDBETgzhJOOAErG7LjdRu6XjVnGBtSKqI4kmnJBvBnpskrKW72icJU4q4pNFS3imlQaNeYbRhcLkv116tLuj1rtCYk8+Q6ziEoiDNMzR1wNiGT5LmeLWCWG1IcQm6DY4u50HTCpIopRQZCwvyHsZxiCgFXmVielkShCOuXO1gWYpM2mqx01lDFw3CSAkUlB0838Ka8NdSnSwPqVRa0wOu0+miayaO400PuWZrDxfOX+GNb5QmmAtzc/S6Y2qtNi++JLk3Ng00UlyjiaZU/8Kgh6AgUtF5VkjOgG5eM3KMwgjdMiV5WsFWdFOj0fbY2pDFlfe//728853v5Yd+6AO02zIZH4+GWJaJZRlTb7YkFWiYaLr8va7rUpRSpc5Q41kWAsPKGQx65LlcU7VaBdPUp7h+0zRxLYvaTIPutryHMtfQqz6nzz7DR35T7l0/9e9+mk8/dpZIqdvd94b7+Bc/99MYpcUrJ2Wh6PzZK1QaGmWZYangOE1j4ijnxHPPqLVisL15hf37DhDEMog3RRvL7nDPER+xIUUKzpx/mfNPfoXHPi/X/s/94i9z276jDII+8VgGy0dXjrBcc/jqk89z6EapmnipvwppyhcekSIiN2/czF23HeW22w5wefOS/D6rjluW3HDD7bzw7NMAHL55P9V5nxMnL6j7tsnTAedePMv2qgqqmwaeb3N1TR6ywnS4tHqWas0D5Gt0vYKuVcitHpom95KSiAfevMy+fbJ4NezukIQdDCcjUXvE6+7ey/JyTKOxxOmz0ueusxZhuxqeL9f5ocP7iZMBm+vbJKmcP8d1mGm36XYHfOJvJJSmXhXYmCwqyOOR227mscefIMkz8kCJwLgFUbxJVLqs7JciBS+8fJ6Tzx7HrSu10pk2ZTwkTsopl3BxcZkHH3iYq2s7vPCCnPcrq1ep+d63QLjq9TpRFE2J/kIIWq0WWZZNhRomimwTZUDXdaeJlq5grDomvlfBciIM5U+5tr5OkmR47sSrscJoGKIZ1lQBtlr3ePTRxzl4aC+WrXyDvAanTr3I5q6ECh+9+SCjYMjO9gBX8WrytEDXpRLaxCNzeeko/V6EqcZgtrnA5vYF8tzl0I1y7CqNhEEU4TsaopDjfvlywrB3kdkD8t/zrXmGg11EvkDpS3hYp9/HtapUPY0dxdfWNI3DN7mMepPAOwd9hKX5CMX7Fjos7dlDGsuzN0kSBCWGYdFqKaGhKCKIdvG8a3OTZjFxmRMrsapgu4umaTiOd02h0bLI8wJLaFP/qLKUZsrdnhzfhdk2uii5cvEyuvJzu/nOO7nl9pv5m098kmRNPg+ZqHLPPW/DUgWmT33i8xSlDkY53buyIqcUuRTCUuIGkwR7ksRImJ1NxawTqjnWPKi5Ph/+8GemPOHmjIU7DpnbL6HK99z7MP1uj05wnIfvkZDmLz/2FT704z/Os6e+zie/Kp9l0zWwjDqhiilLEVHXHUJSMgXdL0vJj82yZHpeTEQwXiteYRgGQohp4msYFmVZXlMrVCu70WjgK8GQTqdDHMcURTEthI3HY7IipVL1psVFXdcJg2tCGKWmUxQCs2LjONf4d51Oh0qlMk3UXNfFsixKlShOYIWe502TqzRN1bhrZIniunsecVLChKcsUkSmgTCn9+B7Fr3emAffK9V7R1mHeLuJVdF57jkZK9m2S91xaTgh88r39bFHrlJvtTl6tyw6XbryNb783B4++H0/xTdOyURqOLrM0vIKoppiKVXaPQduYtmpU1c+fr1BifbyNl/721c4cKMsKFX2h2gVi/m2xtkvp2puAgw9Q1NFvTiNyXIHhEWhYiBTr6CbOZqeoyhkhOMcs3RBid7dcsuDzLXbfPmpp4kjeWZa1gzzB1Zw07UpErAwI/5r17dFcrV3717uu+NWAD7/xc8RFgmVRg1NRZi60wRtSBqBUIplR4657N2v8+XPKfxxoOHVDCp1A2ssF6FbLTFNi+21MUfvlsFjWhRo4R6SoSSTj0canmMSDBPqSjp4ac8yaVISxOsYClvs1Ax2LvURSq7ZqZukZYxVhxdOykrZHcfWGYY5wghxbRk8jrLzrK1q2PNygR+42WK2Al//YkZpTMxOodHQqM3YBIVc9FrssOegxiWldvXZ39rCtBPae+qY2TXjuIicimNTKOLrOI4Agam6P0WRo4+qmHrI9qbqzBUFg16f1pwck/ZChSjokWUxrXkVrFY0hr2ENK9QNRQJNbBYu5pgKilfYQoodTZeBk2JVVimSRLLB36kFIzqbRfdzqkonlSRlczM6JQ6WEq+eDyStRqNHFN1oLrDiH2z14zqzne7LNg6sws1gkRu7l7h4VUNfNdDZHJDXGzvpRv0qarDeXdzg4MzDZYPtXnxFRnYzMzOYdspl15+nD8+LwO+jEwKhKgk23JN9Dxj89wmB249IseOsyTJFtvbDsKSiVO7bpGmEdW6nHNNzzk0YxKPB1y+rA65aEgpCjTNnXYVtYmqpTIMxEhYvMEhH/nsbMggvtLwSXMd0xF4E6nndIZO0UFTHSK7aKLVTPZYfWaRY3ByR9CYtwjiDJQaouvnlHnCuZOKj5NXsWyNpT1VglB2It5w/3tY37jE009/fcrDKPoW42GJacj7rNXroMcUWjaVUC81h+X9Dda2VwmU1HQpCvTSJVOdFk0vMV2PNM4JM9WCQscwTdCucTf6/R71em3afUnTGE04GFYwDYDGowRNq+H78p7CMMT3amiYU8WmpaV5GYAEEZ4i7G9ubpKlMZ965DNyrSzNce7sZW697aiU/EVKTQfjEUEIpiOfR92UleaJspumGZSljmna08PLchKyvCRNSxIVkMzMzMuAUgUVr57d5Opv/pFMzDR5n8uLs3S6Oxg6U5VPy6iS5SFCKR/OtQ+wtbWFaRRTKWbDMAnDWCqNqcBpezviyJEj1Gpyzre2trBtl9tvv50nvvi4vPeyROQCx67x27/+JwBcuOJy47EVPvzL/xaAG1/3Pdz/xjchhKCzJg80XYeKPUcYjxglcjzf+tbXMRrkBEqw57777+TFV57kldNdMOXfTr/4ON3dkINHbmH4TWmg66RbPH/qm3zPO94MQPPgPhadB/jkx3+Nri5PvQMPfjfNmxdIv/lp3vF9PwjAr//nX6bddHn5a/JQ/65b38zxpy5y+I7buXxWFo/stiAvTAZhl5lluV6qjTF/86d/ya4KcG234PSZc9z1wBZvf1jew7lzqzz2+S9w5pxMsh/4zvu5545j/MFv/h6NllxnZVEjTgbkqUOhyXW+Z+88b374B6Zy0XfeM+A73/Jmfu93/pREHdhff+qz7Dt4AK9qs35R7kFuy8UqDQr1fDz40HcwGO7y+BceI4wmimIaIkgpS5OFqvxbKSxuuPVmVruXAOgKjTvf+GbarYLBphy7cy9fpenldHYzjj8l5cR1J6PZaoFK/KOwjy40HFefdkOPf/M0nX6PE899g5Eq4pkKWXH33TJ4ffbZZ69x+tRinCRalmVNq+lZlmGa5rdwsHRdV/9WXJEyIUnH5AWEqnt2x1234bou33hKnqvC9dA0ge0PGQ7knuBV5bjruk6zKfcgIytZ39rkge+QHLo4H9G9eAk9K7En92DoZHmKYVxLaoPxLktLe9id8LLjHNds0x92KBIZE5x5cRXPW8D19Kn9SxLFWBWDpRWZgGXjkmE3odU+T3NWJkD7Du/npRcvI4wuW6oQnBeSExlFqntneuRCkJflVMRDNwws22A4Uh2NPMNxXKI45PJl2aVaWJgDLaPX7bI4L5EV3W6XPIunHBzPtClLKdYw2SPKUiY1tm1PO0dCCGzb5t577wVgc/0iV9c7HD6yzN5ZGVRnjs/ZM6eYm1nCimX8FBi7/OJ/+BCGJvcDv15Fzwsszadw5fc5pUMYhmiaRrMpCxJ5VuJ53lTttdvt4rsVsqxHqoQGZmb2sbNxEbuSMFSJjNgc8MAPvI+f/qeyIBoPdnjko4/iGg5hLAPhfbcs0bE8dq5ssKzioI3UYlyGVCwVg5Qao3CEr9kU9jW+k+/7OI4z7b5OpNon613TDCKVjE2KeEII8jzHcRyqCnZTliVhGFIqnpLv+9i2jW3bbKsiV1GU5IXkb00SrqIoyLJiyu2VSZtBGIbTe9J1k0qlgmVd4yXnuRQsGfRlXGTbNqamkyfpNDGU969h2w4GYnoPllVnOJQFifseOMawX3Dl0hq6NeGQeriVBv6sXPdp/ybqnk4cjlmelzHP7qCDay0Qj1/mrEKl3PqGFe481mRLca70usZK+xiZ2eTuWXmudhuvYz16maH2ErWGfN3hg2CLKp2XZIwwupJQjsYYcYrIzst5v1ihOmdx9LtmuPD3W+q3xYT9lM1cp/GcAAAgAElEQVSJUvZcg6uX+vihh6msZvLMokhS0Gw0NQYagtGgg+7J79/azNl3xMc2Bmyvqj2w2mK7mzA3O0MxEWbzLa5zrq5f16/r1/Xr+nX9un5dv65f16/r1/Xr/+fr20KK3XJ18dCbpVlXUb/C5Z0hDf8AWSozYNuokgy6bI+GJKGsujWrLosrOeORwhEnNv1uxsxijK2qvZ0tnaVDOf0OFEJWHg/evkKZ+xQK+3vqzAUcZRLXnpNVh9nZFc6+ep77Xn+Yi2dllaFRbdAbdlm9IqtbjTkdyy9JNwW+KTPet7//TZw4/hxkizx8j5Qc3k0/SyeNqO9VFdRqlfVnA04+3Z+ggdFLF1uLmdljoJo97Fw2cewE1RjALHUczaC0coRQ7eGopNRLwjFoCk5U8UrSwKJU8AvLEgQ7oOkWNSWhXgQloxgqSnVQ1zPqFRPfM6W3DxI6WKQVyu4BmrNSpcqydS6fFQgh58UybLIsxRQ2tlJx2n+HzcDqkncthDJbdBoFRlJFUx4sg65JxZcVIWHIKsNgUJCG4Fcsgljeu6YJ9jUbRGP5vs1uhm+67L+1ztUzstLiLDj4NRfHrNDbkdWQUrcohYGpcMuH9izgzs5w5eWIumoSrdzksLrb4fSpK5iqElG1BWYJm0oKNil0nHFJ2suYVZCfvBKQlAE1z0czFE4ZHc2w0BVWvOLX8V0P3U4Zj5Tp5c4mZZFRKmggQNV1iIIYeyLTXY2xTZ0i0ibFZeKioNAcDMPGLCe8pJwYg5Ymf0znbMDFzYSHV6CmeCCPXXFZnHMZ7AyvmUfGJo5jIVRNpSgzKn4N09JZUJXP++95iC984TFKhmxvKunuakZRZtQr8jUVx+b8pfM0FmbRFa9O03MKI6W7M0Ivr8nrlgjyYiIh62E7FkKMMHRZETZ0izhOp5CK15o+TuRoG40GaBlhkJFG8hm9+bYWa+sXCYbyAanWLJb3LLC2ukmaqe/zdNI0ATR0tRYs2ySJRpTGBOJYoRQJOzs5hw9LvkEYhgRDDSH0qYphe26Gq6tXsMwJ30FW6GdmZqaVwa2NTRqtJnEcTjtxlmnj+z6Rgvf0ukOqnsvS8gz9gVzXS4v72NnpMBx28SuyGhqMcwQxhuoap2nKwYMH+dEP/gT//pekDHJejigLSyoBKhhIURTYnnsNZ2/YEsISx/gKVpIYDroAz84o1OcHwZCas5eKK+97OBgQRim6GXBw/8MAHDpi8fxzT5JGGXEg7/POex/CMEtOv/QcAHPzTZzqOq3aMU6fkhC8vEgoLROGLmUi98+5Iyt88F9+hLSUSqtbJ4+zct8+vvbRJ+huyb35ardk380HWd9+lXe++x8C8JmP/QbhTo+GL2FBf/TRv+Sjj32O5dkqv/W//EcAhFaQGzl5CKgK/tIhk6W5Fbauysru7OI8r7vrNj76scepNCX/7kd/6L08+czjnDoj4XAzM/u475a38PG//hUqiuuD5VAUA0zDo1BV6f1776JRbyEUFOvUqRf5xX//b/nm84/xqUceBeDAQZdTL/TZv3KMmXk5VydOvEi7Ok+OHMulhUXOvHIS36leg0Z7DUbhQMqNq85YkWZomgFK8VbXHAZDk/d9/3s4f0UqNB5a2c+P/5Of5EP/5qdYvSrHPByCZaYk0USGuU2hDaRMf6nUOg2H8XiAW7V53V1SjfDlM6cJg2QKASwzyTsxzWv8qiiKqFQqitchx6Ver9PrdV5TcZfeQLKjpfYNTVCKAtt1ac8r2eO5GU49/9w1Ppcw0C3Z+c5VJynJAypVj8M3rkw//9KLmwQi5M7775PrZ/UCyXBMPs7QmfiUSRXOPCvwfbmX7F1pI0qDy5elwbyh5/jOEnEWcODYhDtl0NkoGHRsfAV3x+ihpT6ZUgLt7G4zP9fk5ptszp1WcDRjntaRy1y8krP7qnyf4xpQejie6k6mOZQOWTHGU75oWR5iWcZ0X/R9n0F3hKZp0z1SWk3orOzdN+1AFUXBMAywHGXJEYbS02o0nnYQNU3DMWUXJY7lGWYYBo7jsH9FctM3e7usvbrG2975Vo7sl/yq3/+tP6PR8Jhr7OWWex8AoFlP+fhff5qR8i3zm02MdEAyihCKq9msN0mSRHpGqb2yyIWELCt+bJ7npNGQCI2mK+MprRS4dociL6i3JbTs4NxhXv+27+en/4WU8n70c1/hvtffyq/8h3/H3/2F9N5qzteY3TdLOR6xncj7urp9GZGWVFVH37IKgnFMmJcYYtLRK7FtmyzLyDI5xrYtrQMm8DvTtKcWBJN1rus61Wp1ahwMUsmxyHJ8ZRXi+z5RFNFutxkqyw/X9+h0VDxZk/M+GAym0EOQXCxd17FdZzp/nucRh9G3dNR836UoCpIsVetA+l5pmvYtkEYMGetqE289Bd2dmBJ4bgNdL2k0PEaKWtLfiXj9uxtECgm0sudBfAf2LNzATTfKmOArX/8aZZmztvp1qsuy0yncgIeO3kw+lpzPm+++k3LQ4a8+9Sy/8OP/IwCff/RX+cKFP+bQTXPoStE3yStYLYszn5Id/qef0Jk/rBPt1phTqpimu4FNQaC55DtyX1q/1Kcx67J0RI7d9vaYYNfHrxjMrci9UhQheQKa7rGxKX+fZ7ao+rOYqusoqg5O0yBbD9jalJ6LWmuW19/zLuZnB3zkNyQ8e/aWFhtf2/72lmI3LMHZHQldmKs08RsLxEkOQi7C7Z1tPN+jVhN4ngrCIsHGZj6FLqHHuFVBlklPA4DbbttLb9DHdMfYthzIrc7LGGKBYFse4IvzMOgITKvEUDCPtUtnyEaCE09cIFcQh15jh1arga2+PwsyrBysGYOFfXIyH/n9J5jdnzEMOtx3z08D8JXj83SzP0MbyaG+cG6b4ZpHrW2SdCYbZ4IobcIww1E/aGV/wua2Rj5Uhmz1iFLoWKUgUuIfIrEwCx3bNrBM5QlkOoyLGNOWnzMeFdRqJkWuMTOjeEpWwnCzoOrLRVmr+0TxkDAVCHUI1Vot5maWSMMKnU25uW5c7VL1xVRUICsN6pW9pOmIVHHY/BmHoDSJdjNm51W7vSzIw4zWvPy9/W5KqQsMzWc4UIseHcMyyVKNREnpzjU0uuMxhgqQZmZrBDsaQZKiKdllv2axtbNLq20ySuX85QLKwsMsJ/DJnFdPnqfeqHDXQxLaorsmwm0SJyWF4ghsXt3Gchzayv+rjAviMsMwS7qZPCydxMa3m2RZgKupBME2wSwoFaSTscVgJ8CZ9wh2FMSwqFGUJbavkQs5f3EmEI5PplrLIvAR7hDwSFPluVAmZEaCrSd4ijdQUKAVJWdekRNxy4FjjLIdXt3ps2dFwSWtgiyLqdXquGpOx0IQpgl+RUIxDNMhSgZ4Yo61dQk1+a3ffZYbb1lg0NUQhvz8JHbIUpM4lDwC33NoL89RZuWUAxkkQwbjIZ5ZoVC4b9OS5oK6SlBEUZBGYBo+sSLnur704ZgcSpZl0e/3KcuSYmIog45fcckzkzSSc5yMm1TcylQcRBQzbG50CMN4Ah9ne2uAZVnUajUy5dJY9Q00RydKZVLR3UnxfJ/Zls2G4hEsLy/Q63QwDA1beRo0m7OcP39harYqD+GENzz0emZm5Hj+zu/8MWWZU6tWpxK5m+sbvPVNDzEYyv3m0uWziNJE06+R1q9cuYRhmezdu8JgJOE29YZHOHZJczkHuiVlfn/hf/4ldGsi1y65Mq896EtNBrqTAzXNYqrVKqXIKQpv+lm6llAWDkLteY1qHdfrECh/vOqihyeqBKOI5b1yPu994J/gHz7IYGeHM09JEZjnTj2BY9Q4epM8ZC2zyup6ymxTx/SVJ1HlBihyPvCffoq/+92/kOOyvcEv/vOHOHTf6wF479veySOf/QbRaJerl87JOTZixHMXcNw6f/dbfw7A7IE9FOmIvjIx/tPPfpVGy+LCyU2GyoSyNqOjCwetDNi7X4oI3P3wLcy2buNvVv8agLtuuJ3TJ6/QW98k6MsxeM/7/oiTJ79JpHyEOqngT4//Gvfd0yZVnj3nLvZxnApxmpIrQYRxepXB1pUpv1JzQ/7X//RraNaIkawBcZEMx7RwbI2XX7kEQD6CY2+8ma98RUIlL8ZdKr5OGPRZWJRwmyiMMMyccdjB9xQE1whJUxPfkK/53d/9Vf7Vv/4ZHvnYx9h7QCad3tF9/OgH/w1FqdEbKPji0jyd7QxdV9AlfYciF5SFjq1Mtg0rpd50scwKVy5IuA25wcrKPq5elclHluX4jkuep2RKkaherxMEgZR5Nq+FFYZhTBP/iXS7YRhoCt6vaw5JlLHvwBy+Mm8//szzVKsVTFV4Gw9iLArIbZb2KsuWhQXOvLyGicVIwQkHowF7Di9z+ayEDgly0ijF0i3U40iU5LiWg0bB3Jwcv4q7yMbWOVDc1zxzmdtvsd03sJVnVrcTomlQaxZkiYI0ah4b61cwlUWFrlksr9g8d2IdT3nt7Aw3iC2b1z+8xJM7St47MylLnSKXsYXjSF6Iq80g1Nlw6MgtnHn59GvgYgKhafiuT66KThIKJkUNtrdVMmfZCMekNS/3JDPKKPIRc/YMQzVOhmFM5cPH43A6V0EQ8NJpCZEfJy5vv/8mTh0/ySvf+DoAdx2dY88ND/DkFz6DVn2LHJd+yL233ciOErg5cX6desPGtAWFEt4a9EfkuRI7mRjhqoRlInBRqzXQtBquVaCZMmEIBzFBVFBxG1OT+dOjs3yHGfInfyKLFj/wI+/gL//ykzzy2NPo83L/HiY2P/ODP8lH//ZP4KKENLf9BmtRD2tCadAcSiPBI6cU14RXhBDSFHwqxlIihMbCgkw6w3FAXkp464SXNVnXURRNIbGOZTO/tDBNnoRwEKLg3IXzU1/EvCywLIPhcEiaTsSbJMdrwr0tyxzDsnEcZ1rcME2TWI+pNxoEIwXBL0tmZmbo9uV49nohvqfgja/htfm2Q5Qm18y/swxBga3EjqJxgeXE7Oz2iVWc94b3zvPw+xw+8idyjl/c3eZ7vvdt3HXXIZ4/+ykANoZP0NZXuOWGB2jvk3vQiRMvceOBd+Er8bi5uX189tlPMSd0Hnvl8wBc7P8t+w85XDrVoame/8a+FFHZZW6f/L12DG1ajIqc3jnJ1bIWPGYXdAanRwyzSfFGQvRbs/KZWVuVNi7LS0sMlaiW7fcwrBAQFJkcl9EoJQl3MBVVoVFbIhjbJDvbRD35mpl5DZH1+dIXn6KqdBCqXhWQz91/6fq26Fy5viFuf7ccyME4Ix7W8St1Bh15Mpl6RpSn6IWOpzokWZxjusWUqGZgYJk5RuEyVopb9x65le9+5w/zF3/7CFe35QGWIdjdFTSVekjdsaVbuBkSKBO6ZrOCY+V0VwtErCqft1S5sjUiitRCjRzm9uZ0tkvqShGu2ahyYPEurErEXXdIQ7S//r+e4vSpc/zkT/wjAHaS07x8/hT1ZgUxkgGJa1h85RsvsnJji7wrN7vb79F5y/sP8LPvldyCxT0eST1BkJMNZFBf2CEi00mFQNGU0HWdcQiZCnBdNPJOFUMPMWrKRLCeEw8KXEc+YLOLPnolJAxK1NvIM41G4xZmlw9PfZleOfUYDcsmGMqx21oLiIYCTYDjyTcee4dBZDs4YUliyAU9GvhU7JCm4nhdOjvCEoZMbCbVH9MgjQrKdGpzwXK1xeZ4SKOmFCHHBhVXw1h02Dw/eaDArpbMzy1xSQUDaZFimi62Ukz6oe/9Xu65cx9r3QEnX5Gb3SOf/QrLSy2qNYO5ZUmcLK2YzbVtdJXsXL2wSp7omLqFmPgwOdCY04hCm+1tuT5rVRu3CjVbBi2jVxOiQcHiHXsYKhNaTRdouomGM+UEWSbkRYzBJGGPKEVGaQeMBmo+C4v/m703i7YtK+s8f2utufrdnn3a27cRcW/0DW3QKxAhKIIimE0laKZWppWpmUqlDkmrrExGgVQpjaKomZoKiIhaoKCCBEFAQLREd+PG7Ztz7unP7pvVr1UPc+51wpHDt3zg4a4XRlzO3nuu2X7z+/5N4WTkmmB/Q35ukAY47CrZhDuziMkCFzeuUVfKjm6U4tdNJoM+VVNmhHt5nzAVRGO1SXsptDPMukBjajCZYfspRTJLXxlw5UWGlpsYyu+kMduk1pzHweDyBZldChmTRDpkObqtKle6Q8EEW6mhGUZBFMiKJergzYr4H/Ay6vUmvV6HOJZEX4DBYIDnV3HcAC1Tbu89gbASfBn7kCQuOzsxrqeXnk8gnec9zwN18V1aqjCatJmE8oOeW6G9M0EjLblMluPQmmsSRHlJ+O73O5iWUXqNZLG8/GnFrqLU4RPzXL24QtCzOLRPZlqX9mpcungB25JjIITF5k4fr1LjVa98MwDffeZRRpMRtWqLTkdWibM8xrZtcrW/BVFIGE4whF5i/YUQ5IkU1yizqKVHyjTrlJek7MxU2djIkUGucDAVvzDXBUG4a85p2oKkiNEQBIpPlSQRttdkdmEfhiEvEsP2KrazezluNfci7Iyt3kXyQgUfucAudF71xh/jzz4jhSgO7N3H/T/0BpY35IX9rz7zN3iAXjehkGN18uQdjPOM8U7MOJcLIg1CrNilF8pD9s57X8ett9zJmacf48wpeSkLkwG6ZRIFAXe89FXy947MMNrIuenEYTmeucW3v/N1xu3VkshsurP0VreIE5loMMwKQggWFiz6I4lgmJs/yerqKknWozUn9+FOe4iR78dyZFD4oz/y43z+Lz7FtYtdvvi3nwPgtz/6B3zz0a/SbMyjKcPudifENeoYan8djwNqVZ9XvPy1/PWX/kL+3uwSi3vmOHP+Apoi+mfUqMzKgBzALKrstFexPFduKoARDjCExzgYoCwkaTYPYJsGaSGDpEsXtjC0Cq4nGI12hQ1arSbjyYhE8Vxq1Vluu+sEDz/8sJw/lkWeZKRpulsVTzIsy0LXRcmVzJS/2pTrI4UVZMCXKAEWOT/BrzXJ1f4yHPfZs7RAoLzietsDjhybZ/lKh3As23T0hn3UmgbdbYMrV2UA/eo33MuVSxe4dlUmKJr1muS+hBEVxac2hc1o2Md3bF7zGsnNWl9f5/Tz5zEUnzPJRsw0ZxkXEW5dXs6DZMBM3cGzdLZVwDzuauShRqzL8Zyb8RkOOmSpQaT4v/e+6nWcfv55hsMVciVIhJbj2FWybLdiousGhZ6UlY652SUuXbpEszENFDdwHAe92K3uC9MgznOErmOqpGyWJTiuxWg8VWh18W0P4Wt0OnLchW4Sx4ncR5Ss6XT9RspstdaoYhQpO4MB3/9KWdmNEp1wewvXneWR5+UF1q41ecmNN+Mq7721do+r3R5xrhGkMvB3DJdcmRhPK0CWZRHHu4iFRqNBWoTokaAbKknPwkA3MjTdI1FKlb/4K78G8TU+8MH/DsDP//v/k0//8X8mGq1RbSh/SjRMo0kUtWmpC7pIOrjNBmOlHnpts0+3N8L0LIp011PKsqzSx0r2i0aa7opVeI7LaDSiWq+VY9Xtdonj+B+qDFLgOA7d7q46IkgD56m/YbffxxI6QRDsVsZ0gyzLdoVfdB3Hc8HQScJdcSPbtktfOYAkipmZmZGoCaRYRl6kUiBDeW3ZKu5I8qxUs6RIKHKDfFq9KwIs02c0HHP3y+Ul6Wc/fJxPfuIiF07JOdXYv8TRG26n0ezy2HNSJfL+17yTlx+/i1iv88SzUkTIaDu88yd+gjkl3PSpD3+CSbXOy964wFe/+lEANpZH+HeMKUYxq48p/ljS5J73+Jz5G7nOus+d5Mbb5/nspx+kNT/1w2wR9npEgU6qKvieHRJONF77Q3LNrq5E6MEchmbx7OlnAYlwmWlVsWwwhYwv1le36WwHUjYUOH73S7C9iI0nn6a7I3+vftTCEw69yYCq2mMP3tTikc9d+t6uXOUUGCp7oBU5mqMRMSBUGa+onzPTMDFEQp4oV2zPIIw9NHXIo1uME5mR1YSccE+dOsflM79OGLmknpxYoyQmG+ZoaiPfGkZUKxFZbKDrchIkRY4hIg7e5NO+Jjf4cDBksQaxmpOba+DqDgf2hBDILNXVUxvcuM/nn7/3X5BUZFbM+PI5Du5fZRQqxZ2jP4pj+1zeeIpTT8jJ88Fffz8z+/+GB7/xDUwhf2Cg5XjWPl7zLjmhH//KNaoGZDbkFWX8NwGBjaGlxKqMLYQMkMX0zpzriNYYW7cosmlWxSRIM4QSSLj0TMy+g7PM7u3Q7imooEjZWDvD5ukdjIrcuCdWgraUMKcMRBs1hysXQtrLCfsOy+Bx1IErZ4bYsceJe1Vp20pJCoNAKeIYJqRhgaHpJFOXbCPDNX3iyMTzlemdITOWhSrtZxpMCoNGWqM+K/tgMM6pNqp0Rz0CdWCbho5rgqsqNCvtNq9YuI+vfenvedkd8kK7x4Yr584h6innzqty98EFck2jUZHwsPmZhI2NDXRhEausdJ7nZMksWhCy2JB/hxYgcgfbVcHr4YTlsxrjqIetNrE4G0gipJYRqE0y1XKKPEJTRflcG0NqU9EEwpXzZTROyRMNU2gEyl1+oVYl1VOMXM7Xbm0L0RuyEGZsbirVr4UKvmOysdZnJJSEsvComAUzTTmew5HGyNQp4hihJkwRWQRjKIp2GcTnuUFGhtIVoUgcRp0O+0/cTK4uhhcvvYBfdxkOx6WlQZ5FOI6PoWBPQhNExVgqVZnynWtOnSzL6JcZuD6e5zMzM8tQ3TAXFxdJJil5bBMkKvvqFhjUyePpvA9xbA1LWGTIwyRLbIQpScbt7amRoiBPXfbskdXYfjeltafL9pqOphQ3250eM60F2puXUXxhfN9jNBohVKTqOAVhFFCtzTLTlJu5NxNzV/0ubjt5O5/9rKyQVMeHSUOP/YfkF509n7Dv4F6Wl9s89G2Zfc3SAvSC5dVTLC7I+TmZhAxHw1JkI8kTbrn9VtbW1uirA9vQBZpe4Fh2eRgHUawuU3KMdV1gGAVZnIAinaeFwLJiYMhYZe8sq2BxYZZUZVCDvkacBwijil2xVB8UGHGP3kYbzZIHkWtVmIST8oLZDa7g6DGWrZMpY/FqrYVedXnnD97Hd/9WZixvufP1aNY8X/qMtLG4+87jpEJw5sIy+1Xm87Vvej3XVjb4zH//U2aX5P6ii4R+OMZSBugXT59n34GTvP0n3sv5ZyU08b/+7m9Rc6Vs9hSCs71pMbd0kGdPXQHgwW89QG4WuCZ4Qq7HSb9Dc9ahOadU//pjRj2TdjsHdaneXF8mG4UUuklnS87hAycqrCy32RnI+fNXX/kyzcU6leoMv/g+CVXstFfJ05j2Tr+sdJhmztx8jbk52U/nr6ywszXmZ37pFzl+QLbhQx/5BGPdoVWvsqlEUrJsQhxqJAr+GgTrCCEYDUZUq3JcxmnAQnOeVAsJI7kvbu6scuONN7J8eVpJhvp8QZEZZCoRVW/46EZBliW7YjHBgO8+/gxCk+0k09B1gWmapUql41hoOmRqHsL0MuVi24oIH8eEkxDdAF2bGsyC5/oMuz1MJSNfMask44y6ypyMzITLl9aoVhrMzk5Noa/h1m4gYMCb3/EWAHphm7e+8238wW/KKmdRZBREmKZDoCxGEj2kVmvieR47HXlZ7XaHFKREyuTX96tstzvMH1igXpH7aRg7mIag0+3RUebGVb9JmMT4yDYFQ41ez8XzEjRVXXr8Gw/RmDXYP3+Ec2fPqXGvojsOlVmVAPUyLpyeYODjq4Dv8plL6HrOsCvXo19xKDIHTS/YozL63Z2MYhyQZ4U0Swd0YdKcXWQ4kvNcEwX9oI+d26XMu+Pa2MreYjeIl0G9UVfCH0FEQoZdq/HU8zJOObFQ5eDNRzj75BluuUWux81NjQcfe4TjxyUN4vhNN7Dy6DcoEgvXUfFUEiGESVZk2KraM5Uy372cJ6RJzmTSo9mU3x1GEZPJhKWlGfK6XGvPP/kQX3vgIY4dlGfvf/vk+/FrPsPCZ6ujEgQZuE5As1adOlswsTWOhxO+qiCysTuLbht4WspY7ZVpkaGT4HpmCTXVNIN+b1RWdpIkwXEs0iRgrCrlRS5tCWaas4zUJdB2HDRNY3ZRzo2NjTX0Aua8GjUFAQwmCUUx4djRw4yH8sza2dnhxZqDeZ4Th5GMkVVfaUVOHAZUfJ++qkrbts3O9maZpDR0nTyDOEnQ1eeidLfPp5DmNMlAC8qzz9BtktDDr2e89R0SXvtbv/ooz31rk7v/iVTYXX9ug3j/s3ztiWcRyDn13Ne3uO8lN/HUc08RqxjyyLEjHGgt8MR3JFR5y1vl1tsWuLL9Jwy6cj12VuY4cM+IdmRz9/2yX65da/PUZ8YQyovwd5/8Ls+f8mi4LokcYnrhMgiHQk9K8TjdcRCiz2UFv93eHuPZBsPeeDchabl0egWW47KkqrsH9lcZ9l4gUcqu4eom+r6YItAQ2nQ8LTbXuxhZgdgvx7SxtwVc4h97rgtaXH+uP9ef68/15/pz/bn+XH+uP9ef68//hOd7AhYoHK04+VolF92AwcDDtAxWrl4DYNLWWVrKce0muSL1BuEQYfpl5qUgxnEK6eFSKLNVLaS7BsEIFP0GYdVpLWk4vvzc+tqELMmpuhY1VYJPCTCMMaIAS6XrM2wMO0ZB7xlNTEYDweGbCzwl4TjXPInIXZ56/DEybxGAO27aR9URDEKZ+bj1hgM88uh51tIHOPUNmQE7eVuLl7/qZi5feIHtjsxwX13NCdOCm2+TDe+twbgfgjBIFOfKdX3iKCMpQgwlpWs6GVkqyswLeY4pDFpNj+FAZRUNi8G4j45sk2MKNi7AoaM6i8fl37T7IbYOO20NW/la9Vdz4mGGqUQ2Wsd9mgdSzFHBdx+U75KHNSwRkRcWN3y//LfCd0iGKWOVGSCRt/o8l9l3gDRL0fIKk0lEoyXbPhnkFKnAc0Y5rZsAACAASURBVKaGihF2xcPybCIFOYijGQwzZxIMyVL5d5alYwqBnikJ+SxAt5e4dm2Nd95/LwB7Z3x+4zf/hNmDBbqqYkYDm0gvUHBnDh5cIJiEdDphye3LizGuZ6BjghK0qLgWmmaUxrGerROMIraHBVVbZqBzUgo9I8uKkqdQZAFCpFjKp8UwU/I8Jc13zYeFgGRiUURz5KpqMyq6eHoVf0pQrvfANanZBctPyuzreLNCowqTOAGVmSvyGNcxmW/JrN/6Rp9OV3IJDGOa8TIVkTwv4YvS02MK44FG02d7a4Rt2+xTQh8vnD6P65m4rlvyfabyu6byG0vTlCiKqFQqJQxlPBpxx513s7MjYVeDwYCZmRk0TSvx+FmWYZkGm9tbmLZcj3sWjjEZDUkTOccqtRadXgfD0MjVmnWdOuPJDgW73lBZNsLSPUJVDW3M6lRmeqxe1SmyaZ8LBsMhlqgyo+Rn+4MtTp68mbMvXJGdoKpzhu6U8rvuTJ033Psazp39DmdPS9jaT/2rn8S3HT7+8Y/Iv2kK+r0Ow57BbXecBOD8+fOMRuD5VinFLP35dgnLtVqNm07cyOnTp8tqjK5JEnoYhmV/poqoPJV01zQN0zJwHIckmMrKa0xGBZ5rkaoNbb65n5/+1+/hv/zf/wWAn/25n+P5577N408/VpqWd9cH5JaL8JzSDqIwY0yL0szZcjyiMMcwYzIFjfrYR/4rX/nbv+Ut/+w93P1SmeF++Atf5L3v/Umac/J77jr+Mi50TzMZZYwHci767gJRPGYSrSGE3DvuuvMerl5ZYzKUEKckMXGbLf7lT72PZx6RXluPPP4XuJ4g7EKRyfkRCx9L+PT6EjImErj9xE1o1Zgr5yQ0MS50bAdmF+RZFAY5wVgDLaOreFhpYrD/QIUwDogiWdnxZwOGgzHBlL6SOCzMHaLX3WLYl+30PVdCMU2DVFULJ+OYg4eW+MCHZHXr//nNX+WZR87y7n/287znXZKw/9P/5l1sbW0Qxxl6KaZilPLPIHmKWZaRRjFHjkrY42137eELf/4w9dpMyad6zRtexqnnn+L0UxL2ONdapNqwuHL5GpraIxYXZ+l0OuSZwHGnEtVj4jgtq6jT/UAICQOU/yYNVCVXUp09SPjXgQOS0H727FksZa8yndfSRkDCCwt9V9RG18FSvleGYRCFch9Y3Cvf5Y57buCJJy5gWwuEkRK+atR5z4/9PL/x4f+kfruHjgeFQFfVyTxDcVoErYaskPQHPTqdnTLr7/s+o9GEWqteSl3bnk1WpPi+h6+q11vrO8RBijBkdru1mKNbKauXXLRMCUzVMqq1I9x+90n+8s8+L7/L8TlydD9bO3JuOHaV7a0+abEDiXy/dBIzO99EU1DejY0OhuGSZAFHjsn9O88TrLzG5Ssr1Gr1su/SNC4z9ZNJiOtIMYXpPlWpSn+knZ3dd47CmFarRaq4vsOoSxqbVG2fQwryrzfrzC4dxPE1Tj8nq3CXV9apNSoESmis6gocD4ZjELaMp8Kgr2TOnZKLOvW4mhr4uq7LzlYbx9utwk/Hy7IsWq25fzBvpt9Tr/pcW19D0wrSqY2FaTLo9TEE3LzvRtl/vXPcUanwdF+eY+vjnDwNqfk+mYKRR0GAaRqcOHGCK1euAFJsJE9zaTwG1GoNTNOg3++XJrTCsEA3yDOo1GU1pDU7h2kKwkCex9vbm5BJE+3puappGkuLs1KyXcH7onBCgV6+59SY2DBFybnKkpQwDBGGwWgk52el4uO6dlkl7vX7xEkImoahApo0n/LdDAp1ThjKAHkqkqZrgjQfUGk49LtqzUQ2+4/NcnVL7p03HfU5er+OMWriCjnG73zr+6juK3jo63/N3Jzktb/yla/k6qMDPvxb/xqAN/5vTa4+32f/cYevfUSe7Rc2LF79Xpf5hW1SVbPbM9vkQz+5TKrEsoSRYwuHerVBpyupHwURpm0SJ1r5zqNxD8cxabbkuh72h6RpjhA6gdqcZ2ZmKDARlkOhhMzCQZ9wkhIGat/SdI6/ZC+bp7YJIyWqdUwwHoZYmoFdkd/v1H2uPnj5H4UFfk9crgxbK+rSI5LRROPIDQsUxoQd5dkh9AJSqDZdRj3ZIboA2xJM16FeODiuPHAypXhn2ZAHPr1tjbtvl/jqH/rRO7jp9kX+31+TWNFnLj5MOA4JJymWEoFwqzqGBo6pUVWXsGSUo5mUwg0pGoOhydw+QZTLoHff0k3UPcG3v/UcB5t3A/Bj73kNDz6wwlJLGTCGE06dep7N8XlGW3Lgbn/pDJV6hScfWMNUgX7h5RiuAakiGiYBSaZhYGAoCFe1XpDmKWRmCUNwXQ1d2HRV2dV0DGotjTBMsG05Key8ycbORZSwDLYO+niO4U6C7snFeuxWi0AfYwkLU1fl7xkYrzbZ+KZqo1UwLnIWDhksKDLi6ac7GCJnEsMr3i4n/aQYELUpCYR5nqPnggxJ2ATIMgfXdoniCYbC0EYTMAqHnuKheRWotnwsz2WsoC5ZXJXlciMp8c7CMUjilCxUEIR0gp2NMOo+47Fs0xte+Voee/SrrCxv0ZqXB1Mseoi0iq1EDDqdbZrNBkGYl74hmhGTpwWGbhHEsl17WlVEEVAowYncyDCsgDjxGXVU0CmqZLlBXiSk2UjNz3+IdzcsA1OE6FpOqDxQXN8hz1OiicNQQQcIDSJRYCq4XbMeYrZSXA/MniyRP/21ETMtgabrTELlyyAENd9jMil1KomzFK0Qpa+GNN21MS2jbJfvVel0ekwCeZhUaxZRIA+A8h1ynTgJEcLAsuQhMByOsEz7RUpL0oNq79699Psy6F1ZXmHvvr1UKjU1f11qtRqXLl1iPJbrP0kSfuAt93H7bXfyu7//e7Kduk63PSh5YJqRUWCi63GpfGZbHkk2QtcpxQeCSUSrtUAUyt+3/YK8iOi1c/TpBdMwCaMx+os8TWbn6rz+dd/PV77ygHxfJAcqiSV0BODk7TcT9uHCpWfYv18Gue/44R/HMgQf/LVfBWBp3xLD4abEuatLoBCCar2gPwggm1FtAEPsBqmu6xIrta3oRRfdkh9gTEEIhVRkS3cPZ993mZ+fZXskOYLRIGahOcfm1g6G5atxd8jjhEh5Gx078Sru+6E3sb7xbQbbSqihcHjs1N8Th0bpZVdzKuR5iq5gnnFkIhwNtyoPcQDPOMmHPvHbLBxaICpkYLhQHfHMt7/Fn/3J7wJwaN9+/uCzf8mkv1mac+p6hSzLWNrT4iV3SeGLC+fOcWX5FI4K9NM4A9sjjiqkyhWy1gDDMoknY4QmA0rfsGgHPY4dlxfaSt3j5C0vxy5snr36RQCeevgF9u5v0GkrblNiUa0bZPqEii8TaNdWNslTl1ozL9sZjAxcp7EbiNsmo0mXpaV5YjV/pA+YRqHtwntuPnkHp555gZe+SsLanjn9TbZWLrP/+DF+5F3vlm164mGKbo9Ty1cYtJXKmCOVwabraqpSlkYxN954QvZB3efRR56gVvXZry43rie4ePk0idpGHOHTHw2xX6QuadoGRa6TZ6Jsp2nn7N1zgA3Fj4vikCiMMU2bJJJ9sLCwwHg8JgjGuwaoeY6wdiFWEq6qKY7RlCO0yxuZGuEGcYAQRikO0KjPkeUJFDo3nbxZzQ14/PEnJXyxohKgRpObj76J9Y3HAOgPz2HQlHyS8kwpWNozSzCJsMX0fIjpdrvMzsq1NxgMyHPJvQxUcFxoIEydpaUlXFueD+vX1onClGN3SyhmwYDVqzHDba+Eejeac+x0R0T0cHJTzQ+ffQeaXLp0Rc7hyMa0NFqLKevLKnG6J2a2dhChvDAb9ZzOqk40NBgPJS/TNh064xDLsso9SKo46uXYgeT06JpRCjBMg3n5OdV3WcbCwkJJX2j3+lS1kDtm6nBAejxazf088Fd/xzt/7J+SFTLI/f++8NfY7i431LU9kjhE2A6RCl6LWJNCJ/quct14PMayd7m20wuvpmnlOsqLgjAMueuue0hjOX6nT5+mNdukWZdjtbW+QaFrUnVyqgg5VbAMR9x+TKpQt8cbaJ1N+ooJM0LgCMF4EqAb8nOzs7Osr2/KfsqnSoBSkGWqGuu6PpZllZB1kKp/juORpQVuRc5hy3S5trryIqEmeZ4mqn+n71zkkhflOHJsxmoPm45LHKXkSA+y6TryXY8oikiTpBS+kBevguaMvIT2ej3SPCEIAix7l6M7Go0xzV1eZJFKRcpdjlmB7eiEcYKpyz3PqYMR5iV08fir9vDD/+4uBmdtagdknHviFp8nn/g09pKOsS25b9dOH+A7D3yLG++U+9bia0Kafk6ycpg//l2pyLpxpY0WCe559wz3/qD83PDskD/86A4NxZMcDDvksVTZDEZTsSwf9DGmUcFy1NqOIoTtUFF0kCRJ6XY7VKoWuuLe2bZNFBdYjsdkJMew7nvsbHTLpJeGQWufz2AtJFFndG2vyXjSx/MNUOdapxvAKt/jnKtco7upDL1MwcbqDvMLLSYDOdFmWgJhC7IiQHn6kqU6RaHheirT0g2xRIs0HSNqKpizUlaumPzU//rPue9+qW4zGMBTTy9zTqmOaPoIYdgYIiFRSjJJRwYoRgOG6S7R0LcFwpwaMKb4Zkr3SoXFw0pRzJ1w7/fdQK21xKmvngfgS3/5NK3FfRzeLyf9KE84d/kiZtjAdWSQ+/yTI9KkQ63wKVIld2t0yIVBNJEbTSYMNM1AJ0ZRIOSOr0uJ2pHKEnc7Mboe4ppy0ZmGTjqJ0VOLrQ2ZKTt5fC+BfYxrfUkAd5sCw9tm/wGbwVW5EK89ktC6eYF8oU9fqSPl16qMV3XsmmxjGHZoGTV2zo4YKmUw32sQBn1aXoOVx+Xf7bsZUi0qjTFtNyXPDTQjwlTvEg8S+v0Y2xTEmcIWazkUKSo5iuV6hHEKpuTTgDzwfK+OJhIGSrFoPJ4wGIxI4+nlw6ZwfIZpQq4I/M+ffoa3vvHN/NbH/oj2puzj6uICtt6jUC7u9YpDGI1JswLUxk2R41o+WZ7iK1EUUx8yX5HfDzDIQrKwjkUd25pyiRJMMyUvEixbBnxhkGNYRinpnqUFWu5ikVOx1GaX5ox6CZ7rM694GMPhkEECyURlzvQmdX2G0WANoyWzuDe93OPsY2MczyVTwgJZAJ0oxnGmClSRlE7OivIQcF1bCkgUbmn4mOc5jUaDKFYVE3xsd4RmFKSJbKfnWeiRIw8YpYYURymmaf6DA3xubo7t7W02t+Th7Fdder0eq6vSAPYVr7iXKIrwfb9sU6/b5vSpSxzYdwODvjLZNkIc1ymlhIt8QhpLVSWhAovhqINhGJjCo9eVbb/1thv55ff/Cr/0/l8EwKlcI81CxsMWlhLi6PfGVOo5ceAwvyTbXq34fO5zf4HnKgUNLacoDCpepTyYzjx7ijhKOXikhaXa4Dgez596mukkftnLXsLCwl4++du/w6HDUvRibnaJy1fO4PsGwVgevnPz8wwGg7JK5ToOSZKQZdl0JjKZBLgVjyzPEaq0mqZZqXg17fM4jrl2bQ1fVSL6fZ2X3n8XZ889z7nTMmDWRCxlf9WCnJsx+Ku/+AN6nT5vfOPbALhybRU9NjlxyEb48pL0zGPbGFaEqTZm13MYDHKEnbFPBfU/8vZ38MkP/Sof+tTn+eZ3JC9KCzUuf+dhnnxACqI8Xn2BwWaEELvjV627jIYT1te2edY4C0A4nuBZLabmjUKExFGCpreZmZGVak03KESMZmjESnI/DTX0POOn/82/BGBx73F++M33c8vNe3jTW2Q7v/toQrcfMlSy1nkaM5rYHDpWJVbBpOODbetYdoE2lOto3+H9XLj4AgZq78577NtzmOGow86Gqqx6VUzTpNvv8La3vR2Aj3/8o7z0JXfz93/3RwCYjk2zVmfQ7mNZ8rtf8uo30Tt3gdid5cnvfEPO9aKQ80CtD8uyyPOcarXOmTNn5HeZNq3ZBuNxn2vX1tTnMhb3LPDKV8jMcnt7jdWNdcYDg211gY6TEQePz7G5uUW3rS65RoXNze1S8CEOYyxTEEVBmdEfDAYyeEx2k1yO45CmaakWKIROUUyNTOWZ+WKxi55SOjt58gTrWxslET+Y9IjTgrnZJR5/XCrXJUmE65o0KnVcxafeGYRcvfoocSwVvNJEQxMxwtTKBN3+vUcJwx7j4YjKvORdDgcDXNsp2+k4DlGU4DkOmRLQSdMUMo1wHBEpa5AszdHRuHRK7rk6FZLQQBQJoFSGNydU7BkqusVEfX8ajrl4OsL35QUhtrdJIov2tRxLVe/stsOVC1eZX5R78KQb0R+YpJnGvW+WXJiN9TbV7ZQrV5bxPHkWTaXup5dl23Ypcij0XfSBEBLZMv3f6b9tb28jlAqvERdU602upQJzR16SmvqIt//gW7j39XfxxKMPyT7IwfVrktOJNAdOEp1ci8k1leQ2KqrCPtm1iDB15mfnyr1zbW0Vv1plcXGx3PNM01TcoYSLF6WAhmEY8hI/luNZrdblvEvTUrk2SxLSLMZxTJbX5bmSkIJmkU4F0HQNLZPGv4lSbdze3paJimw3ARLHUlioVF9NE8bBhCgMy3mXpjm2bdMZd/Cqsv/63R20PKOqqoWO71GpVMjihF5Pvp+UWbdJ4rC8xFuWPC9DdQnLil0EybQNo9EI0zRxHOtFiJKCggzfl3ug4/msrFyViBrF/onCGCGkUmSpcGlI1UYxFTYqEpLYwzWrpfBNMHaxKhB05W/duu8Ofuntv09v/TJ//PhvAPD3jz3IY5+O+alPnOTD/8efA7C5MotuJXiX5Ty/+5/OEa0U/PnvXGQwlkiARt1jplXh8nN9Jl15udKDCtV0h0iZQrtihoSIJB2UIldZmpHlkKYDsrFKUlomSZAxVuJVrbk5ef4lQ7xpHxZyrkdRVCYbhLCkNQBTjnnO9rUdslTHVlYB8QjyyCXVCypNdT45AUP+8ed74nIFBZquAlWRMhkVrMe9Esu3tZXiuCm6thuoxUHGWGQ0msotPRsj0oJGYw+dkbzYZImJN5Px2DNf4+vflNLBQRhx9Mb9rHUkNMKvFOhGCPouRC3LZDZhNM5YXJqqg8UEYVbKPLu2iyUiNgc58UAu6gvPbdLtw/z8LI15GQj32ylHjgoOHZeZ7L/98kO8cPECxXCAUMklf0HgzhjY2YTty4ogXIHJOAPVL+E4xbV1ihDm5qWPQGXO4ezqRYQjiOQ6QNN1TN2lyOViCYOCQRcaMxmqoMDlzatQVNCm8rRBimkZGE5K5ZD8t7HQWXu2zdLdFs29Mhtz6aEx2baPq4i4rj9Df7CFZeekihCdxm1sQ2BmfXbOyYPBdAzqNwgYTRVpDDTdAj0ijVUAn4X4FbCtlIlShMozDV3ozCzKMcjznDgziRLIkJtPoUibeRIQKkn1PE/R0hR/6suSxVCY+FlBpPx5eukmZ1eW2XO8QXtVHo4zaZdEK5gejJ6nE2UFw7GOLpSoRh6SZgGaoeN5su22HbK6nKG5iuQ/C712RDgeUJ/z1ZzKiOIJwswolBqaadnk5LtVo9zEcHbINYiUbDZFSK1h0l8ds3Nejmlzn8Xe+l6aR2R/Xt7q0Vo4wUsPvo5HHvuq/C5jh2qtQZyM0FWl0xAaeS4lSwGiJIN4WgGeKpaNKIoUw9AYDGS/bG8PqNeqCAXv63Q67D1QpeVU2dxQLu5ZjDAcNHZVqSp+jSzLSjjKzs4O29vbspqiyPJpKgUYplnrK1cuMZnIbGy9rgjRUZXl5VV+7/c/SaHGhgwcOybXVFVOq1MYI+IoJFXE+9tuv5P1tTZbG30OH5YiCb/zex/jAx/4AKNcElHn9mREQ4HujJiMlArQ0Qr1GYPnn+piqkXaaa9h24JElXt1Q8oiC1FQqCBiacEnmAh8p87zSjjh77/yNRqzFve8TCa3ctHn0SeuYfmC2+64DYAbjt3OUx97nNl5G00FV4PBRHq/qKBlfWOLerVGEI53DwVbkGVShWt68II8hKeqhuNRv/z/ElXdEqbOV/7um9huzN5D8oL3s//2F3n/r/w8fRVI3XH3S/gP9/4sa53L/M4fyuD/9HdPccOxg5w5t8xMQx4rH/vND/PpP/kjvv1tGfQ26hWE2aWzZXD8kBTn+PQfPohwh3zit3+ZP/vkXwJw4+xBVq4+S6cnq2lGrmN5GnpukigY4mgwRBc6hp6xuiklomueQ5YJCXcF6n4FS4/J9YzZWfkumjAJix2ubI6wKyqrScpb3vouHnn2lPy9Z1/gs3/+STA3+ehH/xSAeNTEdWJuu11m6qMQTj17gZ1NE9OWFeGq51GfG7K9MeHkLfL3Lp27huvr2Aovvbwy5Aff+hb8WsIXvyDfl9RgMplgezbf/KZ8l//4vo/Q7Y1o1OXnokAjK1LiMODyKQm7+oX/6z/z/n/3kzz+yFexlSBBEAQl7Atkxlaq8CUl7O4lr7iVOAk49UyP+qxctzfedJid9jqHj0iYiKFViNOMYNQuhSkMw2R9rYsparRmlABKkrM4v8i5c2fU/BEsLCxSFAVrqxuqDVLxzLFtUhWwh2Go2jl10dExLUEQRRRK1UxCGnOEMLGVN1MYhuxd3ENDyZD3ejtMwpDBoFe+t6GDZQiSOCKJ5fozipQgGKEpKxZTdxDApDehohI+9arPhSvLHNh3oPSGMgx5IQkncu5nRU6jMaNEF6bWCxJat7m5We6VvqsuLkMVuBkh1XqFKJswRUYu7m2QpwG9bQ2vpmIcU0J5B2N5YbeFge2m7DtMWYWb8W9lOMh54glpUWOGLnE8IU1NnnhIJiRuudvmhTM96X2VTKvcGXmRMaXTh0GErgt0fRdirKkqVrPZYmZGBr5ra2tsbm5SEUoVzzWYZBYbwwy9Lcf9J156F08/f54LV1ZKyFaeFRRpQqKQJIbQMMxEXjx1ZR2gZQwGAyqVXTU9YeoMx6OyyuR5HnmasbpyDUcFKkmS4LseLzx/ukzYUcgKkG4oW5n+CE3TqDXqpZy5sCyCfkBe5GSZvMjkQkfYAlFMBTRiNNMgV0kakMqAtm1RZEUp+FCt1PE8r7zwRVGIEIIjR4+Wl54kSUoYYRStqPdx8Cs2XbW/7a8fZjgcohd66aFVrdbI8wRdK0hTheopEiWTr2gepQLnbtWPXEEqVfURZMxaFAWDofJucl1aLZnITNPpO+rYyscrVqgbx6nIqqMSlHM9izwzmEx244ZKkZC1NQ4dkfvdf/rAL7G5bfDI8lkeOyUVRE99asw4aXHxwQlXz6lq9v4hm1fHLO6Xytn7mvv439/7VwzjglZNxrAYDnndwhvZrDwnk0CTLKLIBoixOv/zHYpCQwiLuFAqmKaFXpgE4bhU/SM3iKKwRM6Mh3263S6Li3NlRRBLYzgOOHDwICNVfVxb3UTXTTIFibVMgZ7ZGE5BtT5FW6VopsOoP8JUZ6seV6B0q/0fn+uCFtef68/15/pz/bn+XH+uP9ef68/15/rzP+H5nuFc+eoim6fSkDWKijLDbtqpvFlrccmxclzJwdE1eUvVyCCD1sw8Waiw8NkQp54QxSnK8J43v+ktPP3sBo8+/SQAMzOSYByHKbrKGueZgWVmVGsFjqPKSxMTTcsQljKFtWQ5dWfdRKiMW3NhTG9bSl0Ot+R3vfW+f8HdrzzEiRskFOO50+u8/4PvwxBdtFgJaFgDDAdaFY1YcW2sGiwvg96X33PyngZJrHHp7JiFBZkxOXLTfi5d2qQ/2Maclq2jHDS9zPTYrkeSZeRFxOJeCU0sAsHVjWWCkXy3hZZOHmXghugqs1SxPPpnPaJgTOOEbFNjSRAFMZNtmYnsnCkwLJ8glH5GAIYIMGgQZW1MQ1Z24iLhxOsj+kM5LoZeoBuSV7K9NoUAmrRaLlHQI1d4ZwwdYXhkCiYYJzo5AsPQ0MSUAyXQCg9DTwlDmUXQdEjiUSliYFoGBgbR1oAdhWm3qi55LqjPW1RbSjgh0DA0qzTYi7OAwtAZBZTO7pNggBCCNNcZjZQYQCOi6ggKJdcq3BBSm61NB9OaSpz6MuNshwhbTkbXd0hjm2BclHO4Uk8JgjGFgnnppnSqJ25w6ZT8vZNLHka6TmNpXvXBhLS+wOE7X0HneZm5evALX0N3M7QiK/HVEoGhYxq7EJIojqEoygxYmiZkeUS9Xi0NJovcYNAPqKo+0IwhWZbTnKmUvIgi8/DcKkmSMRrLapZpmji2rOaBzEhLyE1UZv2yPGFzs01NwSkknn2oqi9y3A8eOIxpC9bXrmGqTHJ30KbW9Mr1P+rHBOEQz62U/IqFpUUm44Qw7vDj7/5fAHjHD/80P/Ijb8NelFmyhfkm435IraJjIKsT99xzkm984xscO/Q6vvu49MfodDdJkoxCVa51zYDcwPMt9u6XGdpRe8LaRsCrX30vt94iqx8f+c3f4ZY77+Ezn/oaAB/8tX/Pp/7ov9HcC8eOSBjE+nJIpz1GL3R0JW2rWTF6bpSwnZm5edrtNr7v0u2qymDVIwxD8myXt6cVBZVKBdOeVhm7ux42ijfomQaTfswP/+jbuLwm+Rv3v+XHMLH4/J/+ISAl6r/1rYf55oPf5td/4+cAuPnkCb7+jVOM4w2EqkbcfNO9/OS/eje/8B/eJ38jHlCt14iimMlEjt/3vfkVvOal3wduTGcgs+duMuRPfv8P8ZbkPDj1zADXtSkMvYS6FEgPFt+bwXVUNZuALNYZK9nsulMjzTLCVOP4IVmRedVr78O0K3z8Y7+MpeCLetLjnnvfxOteLeWEjxw7ytrqkJ1rF/mbB/5Mtj3dprszwlVVKsf2uHz1BWZmHfJctrNeXeTypXPouiBRkNSDh5bwPIcXTisuTC3D0ueJAsHP/FvJnVpdvsqff/7LzCw4LO6Va+3ypU2SwEMTau80fKJIipNUdTk3jPoc7Y3TzM8aDHfpGQLDQwAAIABJREFUGmiaRqygQ7quI4TA0Hb9dEZhyPyCzdzcLBfPS2hUntU5edPN3HqbFFuaBGusLrdZWdkqZaW73QGa5jDb2kuvJ+fZaNyVHlbqKEySiIrvcvjQUU6dkpVA13XJ8oQ83eVQmcozSCjBHl2XlaskSYgVUXwqzvFiIQzX9XEch2pVrqt2u8t4FIBI8StKDAALUp0kDne9frIRruOXVRVDJGhAngmaM/JzvUGfw4cPk2UFG2uSGiD5ay/yNtIgyST0cgqvbbVa9Pt90iIpeUKTcYiuG+gKapZrIEyb6kyF2T3y7Ot0egw6AZ7l4Cs4fbPlMLfg8szTsiI07Mfs279Iv7/G+gXZB4tLe9m7b4HnnpP7jykqoAegJeSKh+04JkkUSYiT8k4qigK0YpfTluZS+CoOy6qf51XIczmHjqgq5ng85MyZMyXiRTNN9DxHKzRayrbCTVLOXF2lUTmEYSk7CKFBYbGzIyuYjq28ovIMTQlmGUZRSoBP4W+aPm2jOuvzAsM0icOonAeNRkNKjO/s7Hp7CUH6ItiejoFm6FiOXXJ0bUuQxjFCmCXHyfRsNNPEVdDd4XiMbuRkeVz6asKuRYBQiCEJYbVKvyohJAR3YWmxjLEGgwFBEJBlyVTniEkwQryoulzoBtEkwnX80pDYdT22t9YlZ1WVOaI4AAo0df7nRUG1Wse27bJ6NhqM2b9/P1E4YTQaqHZCXqSE0S737tjxIywvL+/yKU2jnOuagktruoMwLHT13/OtRXZ2dgjDEcJUQlu6xSAbsn+fFCO6/1X38bkv/zVh1Ccbyt+fO3gIK03AhNWehPPVnJRxoPPad8jK1aWnTnPlBZ25I3u44YSMXa6tDFk5c5U8TknVmVKrGGSGz0SJuxX6BArQNBtDxS7CsEiLCYZhlfENeUEUReiKhzYzM1NCqG1PjtV4tEMYJMzOzjOtqEdBjIHNRHHK8iwiSTJSHYQl90VLOKRZQTSJUVRNNMMj6Yy/twUtnKpRLJxURMNJzHgImiplg8S5WrYk5Jb6/FlGs0XppWLpGrbmYhQmqXL4zo0hbsVA0woEyrNj1GJrZ51EQXmMDExTJ0u1Mgip+iZFnlCrWhhKZShOC2lYrC4fBSleBaJAsH5FHmhHbw4wjQQj2c/tJ+VEvPH4q9jZcXnhilSyag91Hvra53GboOy4iAFsD9uaYDJVJywwhY/WlRtNrTrH/kP76URt4kz2Vb+7hSMigoFgiCxxCsNCFxHjgfzymRmXPQtznH1unT2Lsqw7X9O4OrpKV5EDKy5kkYXpxmihDHK753OCHGZ8l9FYBXjHQmZuCtFVXxbX5jn78AqVWk4cKQyvHlMIi6Aw8NXeokUWrRv6VA4p7L3mk4QJvR2TQvWnaZpoYkSeJ2SpUiLTwLJqZNPf00zCcIRe6FhKPKIQAWlkYYi8hBhomsYk3No1KKzPkUQZRjAkUQcvtobQ+ox3nFKswjQTcqETKm6YMEFYGgi7DBBM0yAKC2wrZDxRF3stwHELFEWAmfoMcRATZC5pJC98mYLaCbOg3lRE6ngoISJTlZrCQhgZhpaX6n1ZVqfQQywbVi/Jd25EPideM8sV5evxo0du44m//BzjvRpJRQYkl17QaPf60kW82D0w4jguybNBOMa2LfJMKwUQHMckyyNc12Gk+H4HDxzDFA7nz0i4bWNGQGHQG+xCzvLURtMLLMssy/JJkjAc7HpMRFHE4uIicRyX/AbdkCTTrU25IUtCtpA4ehVkLy0toekpliEIx3JsJmEb3TBIFUl8pmmjo2H5Ab2e7M9uJyeOYO/BFj/+rvcA8Hdf+habnWeZWZRzqtcbcvTIPCtXtkpOmWu3eM0b7ubcC9uceVbCh6s1F6/ic3XlqprnElZVZAa6obgFpskkivn4x3+DHQWX/IX/+PO89gfu59ZbfgCAh5/6IIYZs3yhQ2tGEoYHOxHtjRTHsRgM1DoWJrVaTV2I5SG7uHcPm1vrBCM5p+o1D1M32N7qlaTlKTl5GiimuTTA1DQNU10M0yKm0HSq7kzpG3T18gZHDt9Neyi5DVG/Q6otMduw2DunoDQIXH+Gi+fPgKWgs9V9vP61byAYy3Y/9ti3WF27jO/O0enKdt5598swLJ8zT5/jhttkkumumxo88/RnuTSWvz8JNCoD2I42UMsRYTjohcH8/CLTg/DihUvcceettFUfbFxdwXGb5GgkSjkyDmwOH70HLb/AtQ0ZQLsVk9FWzOveLDlXN7/8DuYOFHzrO98mVUIR337gC9x2+0m2rsl3u3xxhZk5kzAuSBK5HvceqDEOxyzOLXLsmLxAf+mLf4fveyU/x/MNxuMIdI1koi4DRca7/8lbefKZR1hZk328uNggHAhWVgdqrApm6jWwdMbbkpNgVywM0yIYakSp8leq1lhYWODCeQltdWyTLEtxLFEGaVnuESdDXMshzhV/I9U5sP8Y9933MvXfAYPxMufPtHnyMdWmPQu85a1v4NFHH+XqFZmACCYZaLtqgVJERXJNphcgKTiQQ1EQqISLEALP3xXLMMz/8SJlW5LrlCRJycOybVuqtKk5/MYfuIWvf+UMumbhVeQZ3e/3SWKwHQHqLNcLiyQGoeCTBQlZGlP1a4QKBlUIOLD3AP1+v+TtJEkE2m6bavUmnd6QrMjKpJMcVwfbdfFVIsgwTOIkJ00kx6u9uc1gy8avuDRmZUzQ66VMwi1cV6PaPCrn3t0GO70NtjZULBNH7CxHvPb7Xkkcy33j7NnzVCsNOltyzINxTJaC41HylMnl+p9MQnR2xaKEEJJcInuFNJH8vGlCq1KpYRgmo+FEBfPyieOQQl0uLAH1WpVeFGCrWCIQGZlZoIcxrrrkFplBHJrce68UjlhdX+HK5XVczyi/V0Oqmo5GI3kZU+3UNK3sc8syKZDQt+m+f/jwYUajEYPBoJxDcRxToJeXGE3TMAyDJIt31W3zjDSN0XWTfMoz8xw81yee+lC5Du1hH5HmFGrRlEbsul76cfV7Q2zb3hV3MW31e2l54YvjGEOT891UcUIURaQvupwneUGRKrimSh5rmkGt5qHrepmkRMt3xxCZkKxUqzKZo6CfcRhRr9fxHJdJIPfBTmeHoshxPbkeB4MBcwuLpGlaXj6GwyFFkUk/MxV86povIYKaUnustPC9BoP+Dkk65TfrVHQDRUWnM5xgexa2IRDqXULLpO5AlAtyoS4kScHEcTGrsk/G5wZklYzbbt/H8ZtuAeBLX3yecfsqC/tNRkP5XU2zSabprG/J/cfzHfIcwijFVHuEX3fxKhY73Q7xQKng6qaMH4xdzqemFQRRWO4taRoidJvxcFze6zUdbKtSxiS1ikscx0wSgyIeqvlpEecJRi7QVPLW8B2i7fB7W9CiKAomwVSe2iZLoTAilBIrrmlKGecANFUJyIFgbOI7SoHHCBjv5GQTQSCTRkRDjRtPQKMlWNmUk7DdD/CqKfOuzE5ixLTbAZomD0CAMI0QGvQGIbW6IrmRkGJgKElgUo1oYlGpZti67Oy52iKLxyokyzV+6ZffD8DPvPtPuf3VDl1dyso/+vgTtFomYy2jUDLInm6SZhNEqJeGb5meEIQhjTllNGhu89CXNmgszTN7QElWtvYyCiOyRoY5UYFFHJBHOq7itMTxhE5vhSM3WrTXrwBw2daoNCsYipuS9gbUavP0kgzTVQpN+2zecc+r2ep1+OaXJZ+ifzahUa8S1JSK43yP5jFB+6pWVmjiOMdzNCwdUmVyqZspjb0zjPpTZ3vw7CrLL0Qck7QTNDMlShOKXMdUUWBWWGSaQAmaEYZ9DEtHaDZFNq326AhTYm0dRx56uq5DZuApHo9mWgTJCF/XKBQGezKRF3hqEUEgN1cjNylCHZiSfAtyDbIYApWl9isCz7NxMh3Nlht+FOcM+gaGqQ7UrQG2sCnEqJwvGhlRlOK4VZRiLH7FJUvSUlmqsCekGcQZVF25KW+e75GPGvh6hKv6MzIGnHsugVl5AJxfL9hzuMFwYS+P7siNpjLfZ3MTzKpFvzetJAl83yVNd8mzRa6jUZQ498lkRL3hYZh6GdxkWSI5CKm8bEVBk3rTxokqJLHaQrSAOJay3HG8K6mcJEmZ4ZOqSB7NZutFmUBdkXPl+pQmjQ5xHKOpA25nZ4f/n703i9L0ru87P8++vWvtVV3dXb1raa1IQsJiEavBBLBjbGyPjZ3geJbEk0wmvhh8Ep+JPU5sbM+E4yV2AozB2BNjYMAGzCIhIYGQkBAt9aLe1GvtVW+9y7Mv/7n4/9+nWudM7nLBRT83HDVVbz3vf/0t36Xd8alEiaGqod1gke1wmTSRa9GdmcL3+0SZIN5pqc8e8WPvfZh2Yz//4f/6YwDuufcI01ZAoqRZbX3A5tUm6bCkOy3NgA8eXSAZdnjqqcf55X8kxQe2NxPe+IYf4w//5D8AcH3lNBghlunQ6cp5tx2PvNdjeTXiP/+Z/Hu33HEnqejzF1/+Z/Lvv9ahv+pQaTqXL8uE0rUs3IbBsDdg/z5Z5et2m5w6fYJKtcoKUbFgGpJfKOSaGg6HaKLEcRo1Z0bTqRXZAFxbJ84ybMuiVJ3VhmdhVAZptkk/lXNqtyxOnnmKpiJpt6ddBuureF2XoSLLXz8n2DfvMDU3zYWLUmBicr7Jyuom3ZYaA8vFNqYRAlpNOX8nX3qGUtPwLIMXnpRy1M9922PvARvfUpydfIqNOKIqm1QiUZ9l4FgmrmegCrRYVoDfDOhFY1GBgrLcQnM02oHc/6GdcW31UfTUrDvcRebSWmjx2Nc+DsATjzW5/d7jvO7eu/jKdyUfd24vtFoNlgv5xwyrZGHhAFevr5Io1b/V6yFLR/ezZ98UG5uJGnOXJI3qwl+ealB5OAFolvy9wU7IV/72W4yiENuVAVDUL8nSEU1fcQTSjPnpLtZ0wMktuT9s0WUnGlDY0ggepCXFxsbGDcGk6sbbZl0k8VyZrKdJiWWMgznBpctnWdgjE/1vfOOLIGySSLB33xIAH/zgB9nurfPiifM1D1LXSiqtxFCcSzSDqtRwHZdxKzdotOgPehw7crgOmF988aU6WJR/X/KZpMKZStiLDE2T6nbjYpgQGoZuYakK9LcePUMYDnFcIJbnTbPRIU4i+sMRQiW+nY7B0qFZLl2VHUTygE6nzZEjeznxokxEoyxiZXlV8pTGBrqaIM8zHMWZnZqaYnO7j9fcDbh832NhcYGt3natvopeEI5i7nmjFETZn+jMdwK+8v9cZLAhP9sPCjqB5B72RzIJO/Fczsz8BAsL8vtOzjh8N3yFa5cuk6XyPBsNTO68e57b7pbvtHYFXjnXR4iQXNlI5AXkjDDNXXU7mQSIWvRGiFLZRghGqiBRFIDQlZy+KuJVuZTSV0G2VmoMoghPh6GruFqGjZNVmIFDkcm1EDRNHnrwNYxUcWUwGGC78h3S8b1q5ghKaR6t4hvTssizjCga30U2WVlxaGk/O1uyY/ry2bPomkaz1aqL2rbjkKZ5nQgJSvkdDY1C8ac0pZgqNDFeZpRlSZ6kUl0Z0LKYXFS4poMShFNJjRIGqruonipY7Y7naBTjum69/9I0xbQsBCW5EngSlZQ7r5VPLRNBha5Tc6zzPEYIj6qq6uRxdnaWra2tes9UVUWaJEpUS42nZrK11SN2o3qvuUrZcpxsuZ5NmsZqX40N5aEoZGezSBVnjhGGYaGWOXEyAJFT3FDcKIVGbGtkY5PtZgeRZ4xyDUNJuptRSjhhEw01Fuckjy81R+gClvbJO+bMdkan2aYvhvzglccA6AZDysinn4GuGiVbyRaxqeMpcY68TLEdMEsT3ZX7w/AidoaTBK02nuKnJWECGHXiLYSgEjnTUxNsbSkrjaIEw8A0DQz1g5bjMRqMmJyWyC7dKInSAlEW2KpIIPJKdiJDA0OJ2qXZrprv/99zk3N187n53HxuPjefm8/N5+Zz87n53HxuPv8Nnh8KWKAVGMJbUlny0CERKQ3HJNWVSaJmABWFDoG2i28WuoljjOVFDRzh0r9YUClYoNn1GSQFP/JIk0jhcbc3pVyso7wqFmY7bA1KNnvbKJQAGiVN38Rz9Lp6VhoGVVog1GeXFbiGS5UmtCzpvdFoTvLed76O9//yB3j8Caka87U/fYIP/PoDfPhXfweAExeeZWbKYXuYI5RHT5UZ2EFJoeu0lDqZCHO0pobnyfl58LX386X/dBLTzHjTu2SGfemVmGDqEKbucE21UAejFdK4qA31HM8kTaDR0JiTBTQ21nI2Vxzm9yjfK8chSgbotJhekBXTjWVBebWF4dosr0hVI78RMRoKbnuN7Ay4k4JBHpFfb3DlZcUV0zXKKoYqwDJlpcP1TBI7puop9UArIBVbtCdtJvbKeYlSqdBYZhqWalmXukOlGwjGEE4P09RJSotclxU9Q+RYzQ6D4RYt1blydIuclExJ6+sIisEKQjMplR9YmlXkqSnV7I1xWUqHssBUXZwsFwgtoxEEFEqJ0NEtgkAQbVpotvJFcAV5VWAoRUHLblChE4Yhjq34TmZMNMyZac+xdEiO+5XVUzT8BuG2gka0YvKRxajKseQQow9mic/bUEUUCr5gxQOwK/bvl542Bw7N4Jsuj11fpvBlNX10eQV7aJMl6zRcOe6m4XBteZVAyYlrusA0Cio0NAXFNL2Akh6+4TIYdwcyA0GGVkkYm2FldNo6lumzoSBVQlRkRYF2Q9dE00osw66x90lccPjIQXQdzp+XleRut0uvt4WnOnVZllEUFa1Gk97OllovBr6nYWsGmZLgf+Tdb+frj34BIYtimLrBKEtodzuUCj4xOZfimJNsr5usb8j94VotKm2HfUty7vYdddlYNShCC8eUlfpGo8Ew7TE3dydvfkRaODz5ree4vnyJbz8pu7hB4IEwMeyoNkk27ZRGq8m1a0NSBT9917t/lI3tZS5dOQXAvfcf5Otf+D7NZoBh7ErUg06eVPgK1hG4ARs9qZIEEIVDHrzvdWxtb3D+olSSa/g+SZLhB06tEieqFE3b7ToOhgnNRpdCG6AKgQRBk53+ECoNTUmGilwnHhU1pNO0MwJ3D4PoIscOPQTAj//Eu/jcZ7+EZRk89dR3AfD9DnE0ot2Rc3z48D6uLy+TpFFdPdd1nbIsKUsNS527k1MdlteusLgg+QfXr62SJyW6sDEttWeESZyWfOT3/oDTp6RtRFVFfOxP/5j5vbJbsN1bJ89LJrsNNE3ZVpSCojARuoGj5qY/yvG8EnL5M/ff91a2spBjt7+ON75ZKkn+zSd+mye+1qM9IdfPzMwM6+sjHnjgQa5fkx22l19+GU2zcFxqv8EorihFhG6MTX41igImm02qXK6pX/jFf8gf/N5HcT2D2TnJebp8fY2sKpmbUR0iHbJUvnutZGcKqqpEVPputb7KyfJc8gsBhI5tp7ieQaKM2h3HIRwVLO5fYM9eCQd/9BtPoAm49+67AVhd7hEnIf/q136VYSzhrt/6+gqapvHUd75Epy0PIU0TRGGO7YyhymWtQDY+82zbxXI8fv3D/4o//qM/kWv93nt5/PHHaw+7CxcuYBkmVVWRj7sMQuBYNmg6mYKyHTx0hKsXrzBU3bs3PnIfo2HC899/iakpyaHLi4SqKui2J2g25edfvXKd1z9yS60M9s2vXuB3f+/f8qUvfYlHvymlw+fnZkjKnDga0nCUkmsieSjK85aFfXtZXUnpzHkMBhJWGg9N2cXAJVcG9mWeYluVVPUFDtw3S2si5sm/WaHtys9OqhLNLLGLBkpMkmyYUxgG7/zZNwPwypXv0Ql0+r2Mc6fleXr41gDHSWmo8+A7X9/Bc3Uoqflxml6QFdDyJiiVimkU97FdC23MOUBQ5SZ5WWCZu6q7VUV9RoDk6kk/J8URNgyyLKNil7+VpTm258pOjpr/yYkper0hkTKz7XQ6CFFKDya1H4RW1Z837ob6vs/G5lr994UQFMJgad++WvWvt7VNo9GgP+jVXMI0TYnTuIbgGobiGypO1/gpigIqboAd2jSbzdpDK89zKvFqzzWopHdbluAqRFSz1aW/E9fw0EG0w8KeaQa9HeJIzYMwsRydIAjYUmu2KnMEu/LppmmCEIgSctUxNQwDUyFEak/JICBN012Ioqbk8rVqVylT0yiyhLzI8LyxWmCJY3tECg2V5TlpKvmBLaW6mSRRbeGw2yUWr/pfTdPwPI+q2DUpT9NUciNVh6/dbtfG9ePOrhAC2wgwjKI+9zXbxGwIFo7K/x71ZCd5mBb4HRWnRBtY0T7WruUksezserrO4AZpfdPRSJMKLdbZd0RRI+ZK+iuL7BBS9GTXNIl3sE2n5mFXIiPLBYZp12bcnmWzvd1H13Qsa2xXIqHKhYKQTs1MESdDwmGKqTC4VVHhOParrSU0nSIsf7g5V5qjCW/PmCkuqAQ4pomh/JWSNKW0dNxWWbf8CsMHs8LT5CQVKVi2y0QTts7KYPz175nh1rs7/NlHXmZiQk5KY6akN9TJlLeBn1kEk1AgyNRiiocaemGAEDjqoveDSbRgRJaqiz+vKLUM07fJBzJ4fdu9P89tDx1l1N/kG09II0NHVBx//e185q8+Ld8zuIIIXUahRkNB8FbOCsw2GAEEmQw+9i10sOdTRiO5MVa/O2Rlc5ulxSbv/xXJ5/rmU2fYfLZgcfYA4phUBFlZPk+YL9ffBWFSigzHgkBpCzhGl+WrKZMTMsh2nDZZOSAphkzMyrHrdHOWrwg2z4IZK7JsqVOWFrpKMA/c7aC1YypTp/eiXITL5wucZoBlDqhS+e65VpHEId1JOZ96aSGqFsH8Ju6kwsIDVeaRpjm6Mk4shQysdU3hZcsQaFHhUSl2taVBqmXkVYmn4DWu04JSo4rkZnUrjUnP4dK1FRRiDc2BMJFoCkVvoswBoaEpvwOhF5RlReA5FGpDtVsGnp+zdZVaWjfOCywnwFTmw6CTFTF5VdBpy0Q4Gm5jiZLD+4+yckUG+lYjIzIGxJV8gYnAIstiRpEBikzattpsngjxTI9BpbJ/o8LKbaYXJRnZDgzuarh89exz2LaEu26sbtNsuYQ7Azz1Xq7TZnuwjK9iMsv0aDZshhsWWVNeCh1bB3yGA51KJS5RsYPXaJKGYzNgHceuEJVVczzifESeGgh0KnVBW4aLruvSxBN5cE9OTEu+hNp/plWBMGveWas1QRRFWJaGP16vHoz6GpY9rIOIVmOeVsfi5VNyjjVDmmfHUcmsEkmI4xxBghf4FKn6MD0mzxMWD8ugpb8RcvlsheMVvP2d7wbgd373o5w7e4nf/j/+Jc8+9z0Amo0W0zNLdKYkZOXMS9cokhaWN8RxFM69n9KZCjhy7BY2FR9uNBogKpPLl6WogB+YmLrklfX7csxd18X3G4z64S6sRAh+9L3vJHBlQvtXn/wEH/rQ/8ojb36Yn/vp9wEwvdAljof4nkGNNUXHtbtouoLpliPanTn2LM5x7owMZvrDHSkvPNwZq5xICEO163fmeBVVCRour3lQBuf3HH8nm5ub2E5Q+w2F2SWqEgJLWk3s9GJuv2uKH/zg+8SxCgbMgiKTQXWsJNTn52fZs3eR737naQDa7S6iKMnLPrapijeewfZOSJr43P/AHQAEXsBjj36FZkcZziYjLKOFZZQcOiwD7+XlZYQICBoz7JmVe+SZ577EkWP7mD4sL2K9WuL6xQGnT17m6G3HAJhbWuXF5/ocOSL/22ukHD92H6ONWT77+U8C0G0HJOWAoKmjPChJk4pSG+HasmgSpz1EaVPikOVynT/8up/jO9/9PJPT0N+Ua0juj4p2W87x9PQk589fZDRM6U7I72K7Er7W3xnWQjSzc1MyGVVFBFODVqsljUbHpux5Rl4WGJZT8z7SLJQy1spyxNRMbMvjR95wO2Ei18YXP/MCnc40FSF7F2UCWxQRV6+s1rYnRZETjgps2yZojL1hYDgsufvOH+H06ZMANBo+WR7TaCofqs1VoniIrlNDIw008ixDAEMV3Bw+fBStqLjwskyo733NcYoi5/zFC7XkuO/7MqjWtTrwzrKMhcUmlS7lr8kXuHZ5SK+/QrOjDHRHOhMNkyRnF0aml+xsp7z9XdIQ9b/70CP81m99jIML9zIzL9/Jd+bY2N7g3NlLZJHcI9cur9DwLIS6QAbVDve99m4unFhl/aocz2bXZxCN8MxdE1jTdXBdG2HJ+/dXP/xLnD7xHJ/6o69w/I1LAHzwp36GU2e/ytf+9pL8LqKgSBPCMMbU5Pdd3DvL+fPnCexuLdxQlKmEu+u7cLE804njcNd/MMsobxCFAGpD4Wik5OjLHNezMaxdUR1Nk+eD5M3J39M1A1Gyyy0q0ppPNTbeNS1ZXLGVDDjIpOPG5E4mEIZaS6pAoIxzhRAY1vj/q4ijpH73sfFwWZav+j5lWeJ7Xg23832foihqblOelRiWqZImxeONMgK/ia7rtcejpguiaMRoqAqNVYzjBBRFwVjh3HKkIEUYJuiqWGXoEsKYV+M9ahKHEZ7n1QlXURSkmUzcxuM3Ht8xXDOOpBelaZq175uoKixLp9Xw67EqigqhGXXcJ4SgKEvKUtSWJkKUr+I7yznVXsXxStOchYUFNE1jc1PC1nUM6T2puIZTU1M1zG485qPRCN+zMDWPYSzXvm7Msv92i+2+LEZEIwNDK9l3uINoyyLC+sWM+dnD9AdbvPK8LGB1prsYXo9RLs8Wt2wwTIYcO+5z5NAbALj13gM8/fizfOfpJ8hj+V7tlk2WpOTROPaVayJPdgW0WpPSHmY4HNW+fWmagmbU49LpdJiY6PDKK5drc25DN0HIRHzM1UTXIBI/3MmV6WmisTD2YjGhgizP+JX3SIWvex46zj/9tV/DaWrkKjo2mxWYLmUkB6TZMKE0mZ/X6KjPWlvPmZ1tcv1G2knSAAAgAElEQVR0j1jGMeiGheXl6Ib8vdGmx95jguEwIENeenoZkGcRhqiwFfGryHImZyy2NxS2OSjQA5O0MFhQhO8geYh/8fP/AwfvOczH/+gvAbhw5gXmX9vmrz4vMaYHDveIBhU7kYYYyI23dc7BbJqEZoirYO0zHQP0Dm+6T1a3PvTL7+Mv/vrjfPV7X+fWg7LSmqVD0qrDxgsFhS6DDacZs7UtOxsASbVDVYEpNAylRJZkKYZZ4bAEwPyeOQbDlDgfstmTQeDh23WsyYyd520uP6M6UK6H5kb4ylityNvsvTPFXuihKSO3008YaGULoad4ymMmcNusrfWpVOWloXkEDZP24XWGmfIMEBVZGoNOLWSiY6ALSQYFwKooyzZF6WCpqj9Bich8yjyhMOSGLSsLx9RpOapLteVQbPRIhcBWFfZUrxgmsqJmKA6LKHUwqBUpNXIajQDNiEkiOZ779jYYjkbohYWujJqTLMW2bAxFpB4OI7JcgGnWh6ZejSjDEhHNMO3LRPjw0RYvb3wLoUzpijBHw8UQJQXjKsokay/mJNfaaG25PtNC4Fkl/WKs2HaA19gWnz37MrbdUXMTMRqVeJbBqC/XWVkW7NkXsLOtMPTYBI2CnR2DmQlVGSxHoHvMLx7m1MsySNLzElsU2J4MApN0iKFneHabqUlZhR+FO1xfXseyDCrlhq7homFgO6obWxUMhynt9iR5NnZar0iSpL7ALUuKbhRlRrcrg+w4GaJrHre85jCnnn4BgLfdfRzTznnsBXkBhKVFma5iaiZveofsJJ8+eYmd7YSjtxzltQ+8EYDjdxzjDz76v9eXg14sgMjZ2F7h1jtlUD0/d5jnvn2WoLMCyqulE+xhu7dJUSlDOWBuT8C1K1vcd/ebAJidP8rH//zPcBwLX3GXDBN812eo/EeKLCYrJBnaVpvddV3Fh9Dqy6osBN1p+W4Aa8vL3HH8Xg4fuJ0vfkkG+oEbYDkFeaLhq8KCblTkmV53f4RW0WrtZXp2htOnpPKYpkuem67rBCow3draoL/dw1cBWJpHGFqbpWMVb3yrFG74/KevML8wje2DqasusRsy2PI4e1aqxgUNG9+ZIYw3sH25kaKhSxwlNJpB3U12HI881yWxGLj9+G30+1tcvHCVZkPdBVnM3n2HuP2uwzz+mExyt7Y3aPoeg4EMdub3NSlim/nZg3zgAz8LwO///kcQ2oAsj9FVNW56wiWNDXrK7uQfvv9dnHzpS5w/HRMpFb577nMoMovZvTKxef6ZVUphMj3vs7amuvf2LEHTo9dfrSu7opRCCuNnYjYlHDgkIxCV4rnkOV6jjWYbUkAB8HWXjt/h3DUpJtFoOriu/yoRmLk906yvr6NhEfbluPzP/8sH+eQnP45nyb3e6/XoTjSI47DmXJWZXAu64RApvqjra0x0p+ltq85LlaELHc+HqWlljo3B+ZdXMUydBaUSp+tw5fIKnQm5xhbm5jn50jkMw6TRlO8ZhjFV4ZDmSV0VHyt7jYNoz/Nk0O7aFIqrEQ1HiKqi1e6SqGi10e4w2Zrg9Evy/NGA7mSLMBrUVXhds4jjWIowKIU722kwP9+l0VZedKLL8vVt+v0dUPf9RHeGrNQZ9TdxFOc4SRJm900xvSjv0Idedx/9XkZj1uHYASn+Mb+nxZ9+7A/xnClc1fF6+lsvkmU7FD15Pw6jmE5niTRbwVWdjqCpoxkd1tZ79TiMegPcwEWoqvjDD7+B7z3/ONgxd90rz66Nyy4Xzp+qu9uWaWB7EbZbMtiSfz9oOFAkDLZTbNVBHA5HmLZZnz9JOqLMpQ/eeE2V5W6SdGOSkuc5+5cW6v9+6aUzNJsehbpnqlLUKoPjDtc4ERif374vjaMty6k9+sadkhsN0DVNe5WRqxRZ0OsOGkAaJxiWTJ5uFJso8rLee9ITynlVB2os4CPKqvZYHI1G6Lpef9+iKNAM81XdLNs2GamO8cIeqWZnWRbxKGV5WXLmG4FNnAg67WnaU/KzNtZGiEKjLCscV/5+FGbohlkXk3yvgec4zM3Msroqi6tpnlHmErEzFpQYf1ehiGCapql5u+Hdsxy0iixLaLWUcbqmMxyGCPU5uq5LoYx0V92yKDKp3mmaOCquNQyzFt+Qj06r1cI07ZqjNzZSHs9BGIZMK37S+N+G/QG+35CIDk3OTZ6bGMGIWCmDNltd5vaGzC76JIXkN4/iiDPf7zM9N4EoZXJ1+VRIpZdYY9TGdsL9b9tPmC2zeU3u/5m9k7z07CWaro6j4iff99jpb+LYins7yiXXLivQhFJDTjIm59qkcfIqA/Msq3CU0JcQGq5r0+8PayEMz/WJ4lAJ9+yu4Wr0X+9c/ZAIWoDXUG3eSpIH/UBjfVNmRG99wy9yaO+nOLN2goYKVrVcoJPg+DLgK8uQThCydg6uSI9Gjr4W1i7n5FXBxLz8vfW1HDKBU8tMJuiGYDhM0FCdliKh25Gymwdkk4hw5OHPxUwfkT/z0lMD7rntOC9duYSjgsAJ7yx/8tgnmP/2Q0xPykn56V//7/nYf/6/a4U2UTWovCEi1ckDJTl6S0q0IdBzg0wRsK9uCtr2Dne8Ri7C768PmTm2j+Nb+0lDeYHGW5CLkql7m8x15Pdr+Iv8/Vefpa+CFssH24BwG3RFes90aHcsTFtu8tMnt/GaOrpt02rJJeF6Q1xHI/Iz9HHno20T5YJS7MprX3gelqJZbnlQHmwri2tcORFhWrswhJw+gWeyMZRz3A5yKA3Wr1tMH1KmqRsCs/Kp7IixNqqhOaBVaApeWImAUtPQHQtXtV/KUmOoJbiBizGUYzcKeww9GAyVbGeyg+VpVIbJSHVR0kwexpom3b7l3zPI8gzbUpCOOEeIkk7LxdBltSLNoSx9IK+7WbZlk+cliRJXEIVM2izboFTBThiWiMTCqEZcXpcLdHnZwfE95lVnLpkI2dgo6XgVhmqCbYpNmgcbJP0dhKJIWgxI+xrNeRlcXSsTRGLi7t3HaCjfYXa6S3RimcHIBKGU5PSU5asxqIPG9VMMfZbSHKJVcv0sLs4TJwN6qz0UqoOZiWNcu3wCFATXtm1cx6C/PWRSdeYoKwLHROgVhaqslmWBpsuDCyBNI1odD8eh7uiVhc7hQ11Onn5OvWOBZcNUe5YslfMSeDP0dlY5Nn+AA2+/DYDnX3iCpaVDiOI0AHoK733fT3Dm4gucOikLBAeWbmXQjfjesy8yPy3VrFav/4Asj+i05LkxsdhjFDpcf9aRUsfAiR88yuQem4sXQvbtlcHGW9/6MCd/8AJCkxiHmX0dvv7Yl8nznLtvkwHY4888gchTDE8nV/CzqjLYCiM01Z2MwwxhVvi+X19MSSQlnWH3sjItnXgQMlJSt82mzcsnX+LCmdO0xsWNosSwfJIi2YU4iBFLB+e4ek3ubU143HnH/Tz19DfQleR3kuWMoorJiVnWNuSFdt+995MnKd/+9pMA7Nmzj6xcZ9DT+dvPSBji3kPT6FUXx7Z5/DHZmQ8CgyIb0e4qeJgH+5e6nDy1SRIquf0yx/U1wmSIo9RpDMMgjEJabZnMXb58gcFwh1bHpchVBiRsHKfkpRcu1BDRIHDJioI77pbCH/uXAuZmDvOWN71PyevCO975Bj7zN59iYsIjVmfeKNcptR5pIsf52988z9ve/R409wlOnZCdjme+O2Rx/yKlKxPopVsm8d1pnnj8aeZmZBFBlAn9fk4aa0zNjVVbdyhyi/F1OtguSCMDQ08oUgWztARFXDLXneGVyxIyPhJbpCW4dbGqJBaxJF2r55ULl7Acm2ajwcSS3JDPPXsG12pQVLI7urS0jygOMawYx5Hr+tqVFSzbJUsTPHVnWobGxsYWsarsuq6NYWYMdnS0Sp4tzW7C5NQEg/6Qq0oZsywFttdCN5QQR6rTnZhEE/oNFWwH0zTxGk1MYzdpCcO4hvxqWoll66Rlgq/WQeW65FlGv9+jVMT7siyx0Gk0fTXmslo+utzfDebCHeI4YXZ2hlDtkTgZcP78kGYg72PLjvB8C0O3MQxHvaeGmY7QSg2h5P3jvMBzbSkMAbx0puTI8Qd49MSjnLsmizkNbYMXn36BrWuarFgDjm9h2y2SRL7T5ESHjY0VJuY0qbAI7Fwd4rqCpEy57x6JyzcqnWef2cFXMORHP/819h7Yz4HbJ3jsiy+omU+YmOxiWHL9hgOoKBFCB0O2TN/4+l9Aq0I+/5kvkqkE03UdhAamI8ey5QSEYYKpuzd0ikTdsaglzVUwPjcr33Fqaopz5y6Qp0WdAAldQsV0U8dTCWaSJDiOg61U8ooyI81yTGHS6ciEfau3jYaGZmik8djIWONGI2xdl7DXnZ2d+u9pulErHTYURHU4HEJJ3VEYQ9NM09y19yhLkiTBc/xXCUNUVVULaHgKnhjHMQ1P3qOGO2IQZnQnJ9BseQatrQ8xyi6ugsR69jxl2Werv8z6uhKKcHRajS4afSkYgYTLxllOuyMLNZ1Wl16vJyFqqtNpigrLkEmtZe2+e1EWdbIqk0cL23Z3lXmrijhOpEnyGE7o+QyGcY0kEUJIZUVRUZa7DRRd0zANo05Ox8+4i2NYJmEYI0RU/z1d16XE+Q0Qx8FggKHtJnxlWVIWgjgOMQ25bxttm61tl7yUf2u1N2DPQoP1azmXL8ti1aAa4DsWeZlS5Aq1teQxtWRy9lmJSqlKMDslR2fupUikgfZG7yx7D/ts70S4LRkLdrpdGt2gNgeOwgTbtCnTAsXAYXrvrFI73YViappGEHjEqsrmeC5xnMomz1jFEdmdNU29XnsSNvtfF7W4KWhx87n53HxuPjefm8/N5+Zz87n53HxuPv8Nnh8KWKDhauLggzLb7a1H6DZ0mwEzqjJ/aPYdnH75eU6vnEUomV5yjSwr8ZoKR1fqLC06ZMOMA523AfDOn76f3/+j38awEtyxP8ZQwpWyVFZxuy0Hy0vZ6VtEsfK0SsGxNfYf8okGMpueaHskTsr9b5C/973P2gxXp1h6jcOZi5cAeMPDh3j54klWz3f56Q/8BAD/8ZNf5ugBQXNBtlh9u825K5fYWtXIlflguCNwK4ElDEpV/dSsFBHZtKflz8zvv4VQX2VrOalll+9e8pgrfLbdjOXLsnr31re+j4XFQ/zm7/0+AF5bQ7djRn0BCsOKbpNlIfMKCpKnJlubI7xWhu/ITpnhbHHw+CR75jy+8yVZQTj7dEHQyrB1WUWaX5giLyJWVgYszMvqwZ1vcNgeTPDdr16mTBUpW2vQaiXgye8WaLCzqZP4Iw7eLTP/lbMVjmPSmClwke/VH0WklNhK1KOo2ui6h+EEjGmTJiF5pZFlCa6Co2jZEN3IyMfmw6Uk/opdZVYMUyNLBXkO5rh/K0xKIdBUS77MQdcLgoaBUBWKcAim7qER1x/mejoN36kNEqsK8kKn2eqyvjaWANUwcAi8Et9TviFDyLZNGsrsuHvUxZ0bsH4lx+/Il6oaJYOXpxHXCoIpVYXvZ0wfMNhQBf7Uq5ienCLtu6S6kl1PSppam2tXV9GUVUAWaXzk936LL3/tUwC8eOIsljbH2qCPOyXXeba5wcG9k4hUoI09NCKdXhRLFRek14coU3ynzWggu8szcw2isJCQDVUBzvIEzw1oNOR8NhoBlcjYWO8xNyM5OpXISNIRQ1V9BomBnpiYqH11qqqi1xvQdQOaip9ypfcKYd/m9a+Vn3350kWOHflJNtIr9Hty3V27vEl3oqTVarF8WXaSev1lbrl9vu6+6s3r9Hs2oz7cd79c+4//3VUWFycw3ZREzWlvK+Ujv/NRPvXJvwHg/KUfEIUpgddgMJSdjjBM6U60qERJoPx/4ljCpAY9OS+33no7o9GA69dXag5EnKWUZY7fCGqOQFVV6IBZy1/ndBtzVGVKksixskwXrTIQZrULUSNjemqe3o6s3nlBlz17DvHS6W/j2GPZfINHHnkzL544yfnzkteyf98+br/lGI9989F6Tfu+Q56ZaKoGt93vEY8qJuedGjp79DaDNIGuLz18rl0ZsL29TZ6neP64Ai3HIM+gUnzGPC+xbRPL2sW9D4cj2p0JkuG4ghiRpQIvcEDtkTjKsK0G7/4HPwrA6sp5pieOsLTvOCdOqi5DS+dzn/sU7WAX0tQMPEzDZ2G/PLseedPbefvbf5R//Iv/EpRP2eR0mxdOnKo5n1QBb3nbw1y+dI3rV1bU0IXsDHPyJMf1VSVXB02ryGuvv4rupMPOICUtdyWy8zRGS3yO3yp5YLrt8MLJ52g05ZrOs4g8zzAMs4bW6bqGpukITO64S3LfpqbbfP/pi2ysKTsBL8EwK5qNDtMz8s5M4pDr19alf2DtzWjW4iIASSbYu9gmDOO6y7h/qUuaVKysrGCq9ZKlOXazi6tgbLYt+SPRaFR3BtI0ZaI7xdraGrffLqFtQghOnX6phqj5vkuWpxSiwFfm4nmeKxGbojZOB2nTNIYz5bncw6PRCE2VoB3HAK3CtlzpPwksLDZYWd5k1FNdVDdH0wxE6TIxpSDbgwiva5IOBSO1bztTM7z2zXdwZV3CEB0xzVSrwxNPvsCRIxK68oZH7uav/vKvQZikyvbEKMA3JyjVOs/DHo7dYmNnE9eW51JguORiSHNmksNvlJw8t5ly9ukt1hQ33LI1tKJiZ2OIbo3h2X38QMLB5GCmZImGbum43tg6ZJFKbGHiEIXyrI7jCNO2UR64OJaU364K64Yug6hhdOPuUZ6naJpGovCteZJjmOA57qvgU4YhPasmuhJ5sLGxISXdFUTedS21xkQtOuH6LlEUKY7crn9TUdzQFRO7fM8xLNAwJN8rLcoboKYGvuPWd60UJ3MwTbvm48s1Ks1+x+fiYDDgrrvu4M67pc/eJ//8UzRbLWn7oXwtM7GJbng4nsfCXtnZCgc5a1djgkDFBJVGhc7M3Dyf/ztJ/fh3v/WHfOrP/xO+G5ArawvQKSrqvb24uJdLF19had9e4lj+TBgNoZJctDEEL4pH0udRxSDS69Mmy7Ja3KQoCrIswbT0sUK9hPcleT0Puq4TRRHVDeOqKx14CY/c5X3ZtoOr9uMoiutxL2uoYPUqqwSQ0MQx3228NlzPJo4rEuXt2Wq1KIqCyUm5VopcIwgCjt3t8r0LEr1TJB65Jtju9dg/JTmed91zlJdePFV3K7vTDs3OfvbNLfLt730BgJ1+TBGP2HPc44oEUdDbLJmeaVMoKPagH2NpPlkSEzTk/q9KKYCSZ0l918ZxgmXu8t4cz6coMsIwflVnTtd51fmpaQbJZvTDzrnSxcGH5Xt07DanXu5z5EiDD/+R3Ahf/swz2PpeonWbEyelythddx3B97f51J/KS8+xIRyYvOV9c1hKvOLRr+zQnegTGDpxqIiMpYumZ4R95TFVWiweNRgMBZubiizfsdjayLjtrgZN5XPV22kwHGXkymjUFS69axm97Yy9t8kL7d43zBGNrvPtUzscOyjhRNtXdnC1mESRVycbLsuX19iKSqotOXFvfcdrOXt5jTMvX2Ciqw5TvaIooKkgI5NHJ9CqSeZmSkoF64jSERvbIfPaMSZ8OX7nzkZEUYOBJqE8ZmeLEputjQxvbIhoVRgUJHINsndxgapyWV65SLujJI0qiPWSt739YZb2yt+7/PwOp545xcKs3ASLi7fx+S9+gVhkFBtywU0tNHjop27BcZf4+09/U35WKEiKHF8p2Q37Ma2Oz/67KzbWFUxvs8303gGjrKQKVUDplhQWoJK5ihLHmcb1vfrOGcUxjukw2LpKx5Hv2buYoZcpUwcVNKMsKEuLsjAoMmU4CcSJQGhgqSA+iiscW5CrgMzUZKKUxaBEzihyecDrlQUKwqkZOd1JC1Nd/GlUkqRgO63638qypMoyyrzEVFybys5xNZv+5V1Rln33dujO64hCqqgJLWPl3EU2zti0ZxQkZlRxy1v3sbYsE5vRSsbU3DxFEvLKQMK8bps7zMHp/Xzp777BAw9KKN0tx27n2088x8we+d7DcI21ZYuWGTOxX67PC2c19h3J6a+6uMoMcGUlRGglWaouOB0avkWV67Vp4p7FKbY2++zs9PEUr204SEjigre8RSru+X6DEz84zfz8Hs6ckRydza01fN+vzUgH/RDXdeWhpo2NsNsMhhE7g4R775IKiWW0xcWLl7AVpv7QLUs889Q5jt35Bmxb7o9nnn4S3xe4VkAay+/SbAuKzKfVloFOnm9TlTlHj+1na0fCw57/3iX2Ti/gWAk/+8GfA+Bzf/f3TM7ewunT8iTvra5CDrpt4ilInEuDUShVoiZnpLBIvz/EcQ22tuW83H/fg6xeX2V1dbXmU9i2TZJn0oxSJRtVVSEZCnLOy6Lg3nvuwtBtRqE88ywxyaAfsb19rRaw0HWdIjNoKe+t/qhHlpm02o3aCyfPSzRdx3GcmuDdagZsbazTbCoFtVKgaxaCpIaDDPoxi/uXQB/hd+ThsblSEccpc9MSLrm+2mc4XKHd8eogKWiYFLlOnkFZKHVCTWM46jGOiLqdCcKR5GAYSiVWFAZJOkJoAntcQ8tNdDOl25H8uDjaZNDvkeUVc/tkIPO6H3ktO+sRTz72HSbm5F0QbqTccs8S//TDvwHAA/f/KD948uv8zPs/wL//6L8DYG+3xS/83D9nblFBiTSN61czXC9AaMrbz3HQRIMjd2TESiFt5bJHnsHUrJzPW+70ePlMn7VLFkIF/qZTUBo57//ZX+Shhx4A4I6jh/nlX/hllrdk4o3IEaIiz6s6kRmEA8XJcJjsyjW7f/8BLpx/no4SCMoSja2NFN9v0FYiRYt7umysDbh0+QK+p7g2hQZagaU4kOEo48jRQ0zOViRDOU6DXsXVq1cxDKsOTA0LStwaNmtoci51wFMJ5ubmNmWh4wZ2vWYbvk8cx3UA5jqePE+zQokTybPUshw8z2NH+fH5vi9NPBWB3zRNiryU0FdtzNkz0A0J/dGV/9fSgXnSYp3rryixKr+JEALL0lk6IOFuL544w57ZObZ2emQKwhW0mhw8doRH3iH5zSfOvMDBQ4f55lOf591v+ScAXLt4lk9+7BNMzbZxG4orvTnife/9INeuypjk+999FoRBuzOF7SpOaQXD4Q6tyQVGA7mG4iLDNXUatrzXsiJHc2TCMxzKM12rbMIwQkMufN81SROB5RSIUvFHxDboBpbuYalgOUkj8lxjalYGtKPBDlUhFepuTFDGMLnxv+V5LpMtVQDVhQrGK408HwsE6JimLXk8NyidtlotUqWmq+kCISosy5bqu0BWyH19IxQtTVPKsqw5UXEc1wH8OCatKukTpZm7ohqWbSPyov6ZcbJn2y5VMfbazCS01NoVXArDkKO3HGN7e7teY2EYEobhq3hYQpNG14rCRhAEiELsFk5LnaTs03If4Cd//J8DcG7503zj618gcKZJk231XhboOrr6oGajzaDfJ4sjlg7I+Gk0GpFEIZ6/KzozGAwk9FK9UxxLT60bFf4MXQdEnViCLBxnaUFZjdWzJXdZuyEZGHPsLNvEVDD0JEnodLp1ctXv9ymFFBUZn9+WZSh1QDkmlmlgGvareG6u62JZFmkmoekAeQZZGnFgSRaTLFdjcz1hEPboLgVqSRlsbvQwvJTXveZBAGy9S1RuMTEr98f1K+tMtebYXHuZNWUYrFkeDd1lyHX0ntwPw0HFYDCg1ZXxR6XppDFMTE2yfEV63/muFDZpNBqIcrcwJO9e+TnSxDwhy4p6bUjOYE673azXXlkKBsuDH+7kyvI1MXO7XCTH72pw8NY2//HfX+TH3i+J1O/9+QkuXR1xkH/C57741wB89/EXePCeB/jffvMnAVi+vsLFs+t8+nOfZTWRJHcvcPHsimgd2pNyoVy6KJic1rntVjlxz3xzh/n9E+w5MM1jX5fGmHsOOkSjitFIML+ozPJsnWBiipHi9Wxd3qLb8bCasL4lJ/w9/+AhEm2ZE9+4QqHEFGKnYP1Fh8VjcpyvryVkfZibdGmpYOenfvp/Ymi1+T//8DdwFPbVdaHd8Yl68mem9hwkjbeYP1RwZUXJpZ9McG1oHpvGTOVhesutc2xsrPLUk1L6udl10URO2NNwHXVouSUGoPYXk1NNAucOhuEm/d5YfW2H7mSL1esh97/mfgCWFhf59B9/hkBTpGK9Ii5zhOiiKTEQs3Lx5iY59JoZzj0j37PciRFBj0JJ1uKUzB1yMP0hyzIHxNM8pvcJtocJWSRfbGLGJSxj8mqMibYIPGlgN5YrshtdVoZbFP2YBYWrjTZ6lG0b0VAKSklFIaAqrRvUikAgD21tfFEUOromapJ2nuekCZSZxdhYmFIq61QiQ1eBsJS9F/i+MtT0bERZEUY5pTK4tEyYm5M8m2vLyvi20SbwbdKRXD/Vts4wTlnYr7PvkAzO9yw8wMbagBe//zSjTTkujl7QK8FT6j2alxGHOgszTXBk0BKdnea9P3U//+Wzf4nnyrVhWwGvvHyBRlseIvN7DUoKejspt+yVIimhYbC9s0PXa7ByWV4oR+YcmnrJD1blwe25Fo5b0dsYsaQOzryIuXJ5mUIUGMqsNklSSXxVl8Lq6roki3oacwsymTp+y0N8/etfq1WPms02CAPbdrnzTukw/dRTTzE31yAMY1TRj0ZrmoNHZjhzQhKNFxYbXLp2ilHaYnZSFjZKsU6ejhhsl3WFVho9ZliWXAcLM9MYTPOLv/Qz/O7vfxSA7qRNVRT8lz//e85cugTAh/7Hf0Qw3SPw5bz8zr/+U06efpKPf+yzbPUlv6npNigLncGoT7Mr56bbmZHBqj7mvbTobV2m0fBr8QFJQPcZhmHNtzFti7y8QTJXi9FwmJ1u4alMf3VlmyrXcG2POFZO8o4JwsZ25e9t9bfQdQ+oMPRdjLkQAtd16wA2CAKqIqsvDn0kX0oAACAASURBVMsyqEoHQ7cQSpG1Egmi9Km0HMOUnx+nEa7tkSRKyUpoWEYD33UJlf2F67o0WjpxlNUdL0M3KW+4wDV0krzAtg3uuO04AM8/9xzvfs+70HWTZ78nk3Hoc/1yn1J1m/TKouF7RFnO+39edrMOHp7ls59+jO21bYpSXvRaUtCctekeULrA5SR6P+bNjxzkkpw+NobbnDjxPaZUgenSK+f5t7/xbzhz6grnL8quRre7xP/7hb9m/6EGQaBEkfo61y7F3P/QvQAMspOIdA+97ZDN61tqXQcUbsX7f+mfsW9RFhve9bYjvP/HXs/a6pYagwIpnb0rg7zvwF6EELzyyhWaDaUqONPl0ME5Tp+WZ3yWmAyHIQUxR44uyXnAwsBk5fr1OvBOs5hKmcUCFFVFWWgcv2MvjUAmZT947qKUss91vGAsNpCTljpo8jwwdTCNAEODNN9R6xOqwiJMK+Zm5HmTJgOKIqNUSAsBeF5AGMY1+kIIjU6nQ7fbZX1NFjfGfIhxUJhmCb4X1GsJQDc0BgP5t8fB/+KeJXKxyrCvFlkRUFYpt981w2BHFdp6IZERkw8EvtoPri/Yd+i1zB+QncHL15/h1InrtNtt9u2RHfaXz7xA0LBJioJciXG0ZkDLl5hwZdJbJiGHD82z00/5/gtSgEWrpLhCXpYY1Vh0pkBYRl3wsQEcD911aaiuaaVJpVFPfe/ADegPNshjA0sJJ01O+Vy5dhXfbxAq6UrTNCgKfbcjRIVnByRZXBdSLMuqOxPj4FGMjewVN1TXoRk0KMqs7lw5jk1eFjSbTRbVuJw/fx7bdOqizMbGGkWZYVlWreLYVwmDcQPXR9O0mmcF8vzJsow4jm+QRqcei/GarapKSmAXu8INtm3T8INaPXPMqTGMXV5XkiQ0WgErK7IwZds2ZSnFOIJAFWDCVCXjVp0YarqQiaNQnCvPI8n66EZQC2FYlsWtR49x7txFHEeOVRwVYEnTd4CFhQU2NzbI4qQWg+j3+7RbDQzDqJEbpilVVcey57quE8ex7ECzy7mqyoJDhw7Uc3r9+gq6Ls1xAcqyohLiVby6LMskt87Q6kKbEIJ2u4OtOF9FUTAcDl+1/0pRSTVJlSqM147nebuICSEwTZe8TKjUe+rCRBcurkrY0QuanSbb/XUMX3UngyYdT+PI3TY7O/IOOfdcxD/+Fx/iC3/3NQDOvniOW28/iGfnuErQ5sryCquvjDh22372LEg+bJrmnPz+y/Q25LnguS5lXmBbPqkS1ciqBFM3EEKr79Y4jDEs44ZOeV5zEMdVfCGk/USz5WOotrAQgp1rP+TJld3QxNEH5cGzspny4d98HV/+/DLXX5SB/i/9m2nOr4yY94/SW5VByzc/9xjH9t3OR/5QVh0dDrC9do1LV1b5lX/9IQAakybhsCINM/YeUhLDgU23YaJnctCe+dqI9ozOzrCgKOXg7jtic/mVkKo00FR3Ik/BwMJrqIy/FOSlxd59Za3+NL+ny733P8zXPnaa0pBB39G3R5x8HvbMyIuxtyk4dWrAOx8MmF9Q/iPfGTDKUyb1HuEVBS0Jcpxmi2xbJopJU3DPnQHzkwmvyGFBaLCzrnM9zjGVLLCmg9+0GCgVxSSDNJP/XvvqZALbNms/l9n9GqbpEw7m2dmSHx4OSkQvo9UySJRnz8LBu2i1Njn/pIxGshFEZYhdmXgdpU6Y+9iaR7cN6335WYIJfLOgUFUVey5k5pBBf6XFpRcUYb9R0Jp10byENJbz0OnoRFnG2IBEM2ew3JRi0EeP1AaemkbLLcJwlUy1o3U9o91xGQ5VgpuUVCUIoTPWKyorgWGYWJpAKLnUIgV0HU9VenUzJxppUjhgHOSWAr2yMayMcR3OdqU3z1jgYmaqSZ5mJElGVcpLJ8tHaAY0J9o46iAbrcXkmc7krCTil3ZEtDIkXCkIZuRBM3Nri+b0Qfpr2+xcUsHEMKMxcYAtBcmLkiGihKxj0lQH6Vv23ckrayMuXj2JrWAkcVTRcHVyFXB2J2dIs4J8GJGoYGeqo7OzA37Lq5XOdBr4Xodr12X3ZWpyBiFSTMPDUsIwjishk1eurmIp/y/b0aQynZLWtkyfskoYRWvYKtE/cvh2zp07R6shA9rRKCSJCyYmpuuO16c/9RfsXZwkyWF1Wa6pZuBy+NAx5vcuAfDlr32OhfkOG9sxppDJuG3nWJZNI5igtyPXbFVVZEVJNJTf9x3veZjryxu8cvEat94i5b5/4n2/wNmLFzD9HT7xiY/JdxcakxOCbdVt/vGf+FWW1y/xzHe+gBbLPRt0S+6+83Wsbmxy+bIUA+htD3FcjVglH5PtCZIo5847j3Pu/Bn1nQc02y1Go4jyhgtMXowq0NAqdDxsw6XZVsT/IkPXMvLi/2PvTYM0u+7zvt+95+7v3uvM9EzPCmCwg+ACgAB3ihRFkdpoW7GiJXGpUk6l9MGy7ERVkmlJlbIlJyU7UpSSI1lLpJAhJVmkuAkgwR0bCQwIDIBZe7bu6f3d737PzYdz7u0el/zNSfHDnE+Dxn3vctb/8vyfx6i1hEajAaW0SbXjJg1FnjF3ULDymjaOHYeg4TGZjLj/fuXIXL58GVsY9QEzmQ7IUxshjH3QD4FkhMybeJ66zm36bG1t4XqadcxUGk+yUEY4qCiu4xaYApJYrZE8LxGmi6Fh0IPBANuxOLB4kLW19fp573rne+iPdzj7moL8nTx6ivX1K8wvqszVA/fez9b2RZ588lu8+QnN7HZoBpG3+dAH3sov/+q/BKDMIgzT5qH3KCfi9Vdv8n/8q7/hzJlv8y8+/r8AsHTSob/u09PyEKN+zLue+Air65fY0kGnorBJi02G21atv1dKQ0WQdaa3NSNIIh8zziiNar+xyGXJsWPLXLmkAlG97l1sT59DaDIJ11GQFQpqoogDB3vYnsvWZh9PsygePniMNIrZ2VVz2hQSpE8YGXUmeeHwDBs3b2AZHotzCu5qW5KrV69SyorgJoPSwXPmcNyonotzswdJY5vBRN0/8LuEYVhHyaXMcRyLIk9J9Nnnuq4qzPcOkWn6+fF4i7mZJuOROpyyLKEowRI2wtozFCkV3byzj+Agy/PakZIS4mTKzMwMUx2ImptbYGdnhzSLOHZMnaMYkmE/JkrUvpiEJa3mHO/+wDGefvJlABreMoPxFWRm7s1r2ySVBZbe9w8dahNNJ0xzmwfvVUQ4K1fOMRitU5gJptbtm+nZBJ7P2mX1vb7TwXdNdrcmNfphPOlz7MSdpNmYwbbW7et6xNEUO9fU9iQkWYZn+tiVDInlUhQ5joZYN7wGhUyxTINQw7WbjR7TMEbmJYbO9gqrJE2V8wkqAFoWJmmekaXqmsrx8jyvzhxXhAV1sEOz7Um5xyhYSgPH8xFC0Gwqx2l9fRPbcrGtSgMtU9IbstxjJ5QqO2dZtxIg7DfgVaZeYgtxC9GAlIq0oWKJTNOUPMn2kWEosoVm0Ljlb4qBdu+6RktlQ6sxn06VLIEBNVEURkGz0SIKQ6pgapoXGKW3B60TEt9vMBnHNUlZo2EgC4Hr+oxHO/p3OaUwiTVSBkqEYYKUmDXlvKTTbNGd6bK5uVmPTZZlTCM1z5vNpqKQT1MCt4Idm0ThlF6vU+/XW1s7+F6TvNCEIaZQAbvx+JbMnOM4GCa3OLkyl7VWYxXYKCn2CFB0Nq+CJVYO8H62SeWASxA5stS0/FmGRVnfx/F8/KaPLCPGuRqHO+8yOLzcYjCQNBw1p65cGxKmFrnQ1OzWcfp5Trdjc/yQckzXt0esb73BiWN3E441GYeIuPz6KuPdCo4qyIopRU6NVEmzMVmW4btB7YwryN/+DF+B0Ppiru/V32fbiqRsOlV7ZbPRZLQ2+c86V7cJLW632+12u91ut9vtdrvdbrfb7Xa73f4LtO+LzJXXNsqlh3W2wMyZDHx+5h+fIkpVxGvxuM9wGtLwSuJEc9rbA9Jpg4b8MAA7r7R44h33M3G6/MZv/lMAovIy4a6NKCV3PqiiDDMnPDKzz5Uv6UzL1KLZyXnjVfB0Cv7g8Qgsyc6qRXdWefiDCaShh3S1+KkB2cSgtyTwdF0WRotOI8AwDK6/qigj77mvzWZfMh6oiNB7f3aBb/67DQxPYPoqEpDEMd2FEZNRC2Os/N3BRknUGHH0kKY9FzGPf2iBRz9wByMtuPr5T1wj2UlYCBa5qfWptrZHJNJkmqj3nsZVFLnch5N2SUSCm6q+bB3M8Xwbb8Ojea/WCNixmG6XjC/fwKoKHhYanH74BMeWVfR3tC3J45uceeY88Za6d257ODQpsgl2W/1usDmh1bCYndXRtJ6Jf9Dn6jMpIlfQoWFe0Gm26S6GtZp2aMFy4wgrmr6159sk6S7luMBrKIrTsLuImPQZFkOiQYVXV5Hb/lDrQm2DbVtK26ISETYBaVGW7BPetMizfE8zyDQJw0jpbskKZ6sF5RDYGgIgS4M8lywsah2vRkmZQjgtSHSmzCxUQSdGSSNQmZX5hQ6jyRqDvo7elQ3S3Rw7L9GJB2bmfGbuiBibGdNIzWGfFs70IOu7Ktq1MOPQ3w05eMdBMl039NgDB3j+5VUuXpnw+NveD8DGdp+VS9/iQFNFslvzOds3xuxOcu56VI374EpB48iYXrfL9TPqJbxek3wYsr6uOq/T9ZmGI8JJgSV83Z8S1zOUKrzWhsLISLMJy8sKYx5HksFwwnQ6xdKRa2GW+I0Aw1R9OR6ltDs+4/GQf/gPfg6AJ//2m7S68xT5gPvuU1Dhz3z2SyzOz5FroXHLSYhjlc0J9Lzrrw9w2w5JnpFOdI1l12NnbcRjTzym+qDt8dWvfZler0UUqneIpx5u4NOdD7EKfa9dFY2Weh7EkQRy/MDB1FCMPNN7qTBpVFS/unC7KgU2TRMhTQyjRJZ6Xs+rKO9o1yKMdV2LEMicWqC0MAskJSUJWk2A2U6T+x4MePnlEcvLWhNsCrvbI0xNh52lJmmeU+QmmArGUqQGp++6nzhN8RrqugsXzuNYDo2Gjk6OQvJiimHmdXQ08DsYpk1cpHRm1XW7GxtIyV7GLS8B45YosW3bOJaNbSV1LYPMBEls1Lo3URwiAVdYlGUFjXRpt2a5fuMas/Mq8njkyBE2NtcoUtW/SwcXOHniMGdffxVH1+2tb24R+E06rS6b1xScsDAk4STmTW9WtTdF6XH81A/w6U/+Ia5U4zC3tMD7P/ABPvUpBT3vdg9z+cJ5ZhZbVJ3e7ro6cpxSBaWTOEUIQawLpBfm59nZ6SOlg6HhVrYpMEwVzZ2fV9DSyWRCXsp9op4BWDnCyCkyTQqTTBFWQVF4zPe0llAvZ/NmmwOLag/0XcFkFJOXgs686oONtZvEcUKr0azHr9frsdvfIgnVPIiTEMtUUK2KIt+1PU6cOsWliyt1HeTWznatawQKujSdjgmCgKTKUk0igqaDKff0cHKdOagyUP3+nmh21apoelmWtUBwJTBbRbxNy8Mg1fo/FVxLKiIiYXLkiNINPP/6ORpBj0LDBB03I08d7n/TMpcuqL0yLydkmSSJ4IM/qn732vfWOX6HyaXX1Pdubm7iuW3ufNMc21uqr3Y3SxYWAkajQa0T6HqKZGJGCz7H4ZgoTPG9NsOROnuWDi8QNFtsbQ7Z1RTRVm6qd7SreqcCUWYYhqWQGkCn2VQ6bTr8nSUOCwsLDAZrCK2REUc52Kou05TVPpxgWwJXQ4d3+7s4roNRSjwtdl6aBWES4nvtmib/5vpNup0ZhF6Rk0mIEDZlWeJ7mja/zOtMyJ5mVoFlWXXNVTW+VeYdlH6ksHIkRZ2pFsJGllk9V0pp6uxBtgeFLkuUvtNe9tyyBbIw9oSxkwTbtmv6bVB6R1mWYWIhK9hh06eQsiYxSNO0pjnfL+DbaLRUFn0fDHG/PtZkEtaCyxVc0vddhDBI9pEk2JaHsH3SRI+5aVBKj6xIwcr0vS18z8G17BpiaFBSFBlWpa+a55iWg8k+aCQGWVaomitrn4SHUdZZsQqGuZ+YIggCtXfFYf09tm0Tx/Fe1q9Uot6msUdWoXTL7BquKYTA8zxdh6XPw1hpku3PZuV5fgtRied5SvzbtAlaah6ExS5uA3qzPRL9nhvbE9q9WX7wfaoc5aVnr/HCCy9w7GSba1dUf7ZmXaI04cTxN/HI244B8JWnnma22+Psywo1MjvXZDqd0p3vsLmh7Mx0ahDHU2xLIPVeUmQK4VLtLYUmzbOEU0shpLGB4yqYYDWnHFcwuZl8f8MC3ZZR3vGE2gyiKObgEZu3v/sQXS3SNs2nOH6X5uwE21YQqjA26LgeZ/5avf9X/p9V/sXHf4Gvfe1JPvv0lwBYPj3D9niXJPUpdNFboz2htWSwcUFDl0RG4FtYjkmSqc6dTBOErdKzhjambBx2Iwn6IAw8ixybVlPSnFUTMzHVwhhGBsmuuteDd8Hf/+F7+ff/RkGAPvorHme/1eL5P7rCrIYq2vMWhWEwXBVMVnWxap7w4z+/QKLH59zrNzh18i5WLiaYvlqINy6nlLHJoUPwlveqQv+/+vS3SSSYFcxV+ITTGNsqa1ztaDNGBEYtNBo70MFkegHmT2rNlaBFMd1EhjY9XTidmBm2t8ghrf0zLWIiN+ShZcmZb6rD68qLIXPdHhv9Hf7eT/0YAC+98DrXzp9HIxw4fHqOKQM2zxT02jpNLw1aLWg350kddQjLRo7cNGgeUeQOV8/exG/D/JyBdFR6eGo6NGXAZLpDy1RwibgYYkgfr6U2urUtSV6AaVp1OtyxfQoZKSdLwx6zPK1xzQDTqcS2TCXCrAvUDLPAMNRGVim0C8vGss3a2ZpfEJhIkolHVGg178zGFAayTNEoRGxbMDMT1AdTMTLJRi6DbIqjU/CtckxpuMydmsEUyoHeHQuuvxEwnarnLS4vsHTkOEErxjfV+B22mqxmr7N6sctBnRJ/6stfxrAlp09rXbYjBi99PWXumMPxk8q5unimJGhGHDpsMNpSh2OWqiLQ0UBvKo7DZDrAdS3iWPVxHKd0uy06PYfhSDl4yBbCaDKZKOhnWZZ0urOUZclorOATZW6TyZHSNEMVwTabTTyvyWNvfwiAV145S29mkTfOvcwf/5FiOnz269/mS1/8PGsbaq7MHehy6cpVEClHjmgyiWtDdocxs4sLDEcKXlvaOYdmjvODP/gjAPzRn/wenm8hy6jG3icxuJ7CaZv6vYSdMommda1Iu+MTTjPy3KkPpnQaI4QgyTP2syjtP3CKogBZYBkBnq+u6c5ClkrCsUeaaBZMLyKKLWwtBo4ZkSYGZeHh+moOu82YLDWRkYXUYsO+H2BbBo2G6oP1mztYnkGWOEwyNQ4NU+AIC4RDf6qMQL9l4AqTQjNnJYWB6wqKMsPWLG62cIkjiesESG2EGXZOlhXIco+cI4p0jdZ+XZ3SoOE7tNoa259mjEcxgdYkyrKCvEiIJglV/Xy76xJnygjcY64qObB4FEM7pjuDPv3hgKWjx0CLwlqm4ObVq/i2RRirvbJ0chzbp6nZtGY7C7x2fo3FIz3+5jOKgeqzn/9zXnn5IudXvgbA9k2XOBmTF2BpQ7jd6mAYMXGcEU3ViwZBmzAMa2MrzyRFcavhVqQZslRQqCrYMIkmjMfj2oA2hUuSZ9imqMWPi6LAQAACSztqfqtJIW26aoiZbApKcix/iGGpwNe1K9cJAg/btGuGtoqdsnK2ptMpWTYG9oIBeSaZTEJmZmZr1r+nnvwKM7OVyLA2pISCHRVagL3bbRNnKddX1lg8sFBfN51OaxhakiS4rkue79XaWZaldXyc2jAcj8eKgc6q6o8DTFPDdyq4myEU6YoJeVGJ3DfJixDb0Hpc0RiZWdx5z0FuXNfw+myIYZhQdPjIj75DjfPgPNubU977DkVe8+//4LdZPtVmHAfcdbdyxl9/7TxF6RCHgu6sGofegT6bNxPkhjoPESPyvGA0SOr132w2GQ6n2LYB+t3zZIoB5JWuV17S8jpMwgFBW+3fqlarVAE54K677mA6jVhbW8PRLI5hGFIUBrZt4GrIWKPpsba2Vu9JfqPJeBRhGYrEBnSAx7FJkmQfk5wKahh1LY7W8nJ9ut1uPV/CMGRp6SA3Kh09w0AI4xayiqqmq2pZkmOYOaYJwtJQyFxSllkNS6ygobZj1Xul6wSkaY7nOXuEGYbYB/9TzxLCQGqYoeoXJcLt6No0UEQRmHu2ruM4jMfjusap+tYsy25xHis47J7G1F7tUq+nSj2CwCMMJ6o2UNeZJcMpUznGNj3dnyZxNsUQFlKzGDeRhFXwdZ+GlbkPtieEjcTAFma9PpT4skdeSqTeLC1LBSTiOK3/u1o/1fdZlmILrWrSqn6ogj4Ajq0gnmmyp7VVQeaqcfF9H9u26ff7t5Bl5LKg0biV8bZibayuWVpaYjSa1DW63dkuCMl4PKzJeIRrkeY5O1oX1bUMsiTEZEQ4UPOyN9PiyLEWTe8EZUPbJcJifW2Tc6++UY/L7HxA0PS4unpDT0YTYXpQeKQaJlsUCRglRV6dV4q0w/Otek6lSY4sU5rNOUyr0kotmKxNv79FhA1DUQgDuJbFaCfjpa9Lfvin1KbliDEJCa8867G+pgoSrz0/QU4C2svqEzqdHm9/6C4WT7b43vkvA5BbI7ozJpvn5siGqnMXTzlcupHg6xqBKITtbcndb5JkUx212SnJQ4nvCtA01sOKHlhP2MjMsfIcswgYpup3bS9gmIxwioRGSxsb7SbHH53jN7+i6gFefP4KL317jHOkQ95RE2ySpIiJTboT0+2pe/3Tf/M23vKeO/n1X/oiAFevO8jmOeaWG6yeVe9w8rDH7nCDD//Um/nK1y6p5zUFgdVkrHGhRZESNGziOCXRkcH3/dD9nDlzgx3NNufMCzyr4J4PC5Kx9oD8hLMveyTRlI7G8fteA1mUxDpyZ8mQ2abJM98IOXVaRQGbcszlc6uQe3z908pIyUyTzmFqkTi7MSJfh8CTWIZ2bOKE7sGjrK2u09HshJYhsWzJjdeUsR5vpxw52CAaS5JAfZ8ZeJS+JM5Tmo4WlBYZflow3NLOeVJiCROMHFlTlwq8hiCJCgr9R9+HLDFqYbhuzyJLJVEo62ihLAuyvKTT6pHZupjbzMhimyxVG/IkyGh3DIRTICJNctG1GGtnSJPSUBY2o0FBuzLqZ1P8JZfJNYPCUEbhyPLIRi473x3wpreqws3jh6dYhcWF7+l6wCxjc2OLze9dVSxFwCf/1z/n3Oomv/Pdf8d3z6viamFYBF2DQmcGr64mNGY8JoOQ57+k+7PlMTvfZPVqn0QqhyuQY7zGDFGs1l5eWEiZI6VJp6uusaYZ/eGQoN3jwJJy5mTWZv3GBEOTtDiORRJHKqtR7rFiWqWNY6s5JUqbpfnjtLqtGmc/CYdMRpJcwk/95M8D8M63vIUbl64gdbHsPfc/zrnzZwkaDm+cUYbUT/zIhzl0tM0f/MGfcWBOWaKFHSELyZee/Gs1Lg2f4W5G0DJYPKQMzIvnNrFtlziacvKUmtfb25vYVlYboUmcYqDoWw8eUDUfrmVw8fJlHHPPuCiKAseySHWaw3GUiGSWxaAPy/XVkk6nTdDKyAu13yRRQW7EJFO14Td9D8uI6c3Z3Lipgghex2f55CxbK1MCLao9PzvH9dXzDIbawTUykqQgjiZ0tOHmd2wMBMnIpOurIIVRRGRprg4eIGgWTCcRy4eP1Qf91sYmhlUySfu4nq3HS4lb7gnHJvVh6mlWzDxVEerxNK2jg822TavtE+p9Ki+g1W5w370PEmka0zfOXSVLPIwyrymG06SgKCc89FaVeVy9cY1zr73Gxup1Tp9WASaAGzKm013gQ+9VdXsrF1d44ZkztHUGbDtZp3VAZZiP9xTJxda6yV9++ht8+q9+B4D/8Ke/xZOfGzI7s0iYqf5cvb4LZDQbbSb6TEiTKUWhmP5AEQUFjQBKUTtcspQ0fB/Lsri5oQQ0pZR4gV8zkdliyvLxBXY3+0xH2tgxfeIkQrgSVxtuORPCtOAHdEb6bz/zLFmSY8uCjS0lUHz40BKDwYAwi2rnppA5CwtzNYmJ59tEcYllGUqcFS0mKyRhOOa5554F1LhkWVaPq++rWo5c00gDpDmEYUy316oN4fF4VNdiVa3KFiSJ6peKuS6KItyK6t219TU6NWgUFIWJ6/q1hIrt+iRxgu3s1XRImdNoNBjsqG+5/8ETvHF2jZ3tEbmGAgihjHfDCfna00q4fDQMsW3B75//ffV9TZvxEAxSvvQpJaqNMFhYsihywc6algrZtRCmR67F4wuZUOQgTB/bVu80mQ7xfMU+ZqFJJnJBnnlYQTUOPqZZcHR5metritVsfr5LFOb4fpXBhLXVTVrtBhNNFGM5BY5sMhwNOKod9jc//Bh/8qd/hKNrIpuNLsIpmPYLhKXHz2uSZQVBYFLmleErmIaDOrhYCdemac729nY9Vp7nsLu7W4+xyjoaNSGRYvwrtQOoyQBkjmnatzDeVXOhItKwLRWkKstcCSWjWCUNqjWknY/CRHiizqrmecru7i6maeLrjGhFER6G4V59WeCTpmn93yDrDGkl4l2t1f2OYbOpCCcq5ydJFAW57/u1szEYDGi1GnS7XfoDLcuRp+QiqJks0yJHWD5GIUk1jmGAjZWHgIGpa5crZ8+0KkISgzKT5EVWB9DUeJW1IwzUTmElXyJ07dr+Pq+cqv0skRWzYvV9YRYqCvUi3efkures/4rwQtUhVYgzE8cS7G+GUWqB5D1nuCLnKHS9YTiVZHGKic9gVROEuCbCNXFS1ZdRqBibm90mUteifejDH+PkfR2++PkXWbms6nF7nRkunLuCG1QSEoLxULK5uU2n19bPy3GsLkmcYWhSJGEm2I4iNANFEFRiEEZ7QRLTtMhCmBDi0W9s/AAAIABJREFUaimdvNiby39Xu11zdbvdbrfb7Xa73W632+12u91ut9vt9l+gfV/AAr22UZ56rKIcLvFcgYFDVwv/fegfHmK7P8GkzR/9798D4L/+b+f4wu9bvPJNxSjy4MMn+Ow3/prf/J1/y8ULCjq0NTbpX7S48p2CoydUFHXprRPeuJLSKHR62ooI7AaWXXD9utYbkmDaCl/puNpzzSWTFBxd7+DPF8zNtMm2BY3DCnM9mYwJ2lBkOYZUsKdwtwsJPPIxdc2sXOQTv/cS7UUJsypCIkIP+gZSFCwqhlPc5gxnr41pt1R2xLFLjt7pMHcoZ2dNXfTgyY/xmU98hjK/jjOrmZXKiDR1WLuhhUYdMOyCPIMsUb70R3/0fi7eXGf1OyoysNsveeCBRR59zyJjUzNZeYKvfDFmY1NiFArnGjRdYsti0VO03W0LZk4eYelEh/OvKqriC9+4wmRnQpoblDpaWBrw/h8+xOvXVfbQMlxunrdwyxiZq6hGRITnNkinJgdPaRHIXkK86XDjqs7+WCmnHkoZR5B1VX+Whapxi20DT7PUjdMtGtOI1Ze1NsaRjCQs8RsgdKbMIKPRAkGL7ZsVjbWJsF1iHR31mwZ5BpNRiSH3GHAMUyJEgNdQ0bq8SPFcj0IjI+IoZnEhwG+EhBOdwfQEaQ5FAoaOahZpm/ngFCJXczj2hjRMmyS12NJZt1arRXe2y42rV7ByNaYn3qwo3fs6gholHjc31glmHPJC9d2bjj7AS8+8yGZ/g9mOuld7wcdvG0yG6nslbXxLkscG8USN1cRNsROP3qzHzW0tA+CWZEmP8URhmS3LVvPKEEynFabdppAhrt2to+C21SYKEwLNsGkLk2azw/b2FqbQGhrCxyxh4aDqk5s3xliixdzcMidPq3m2evMGb7z2PZqdLi1bZR7MYgXLt1nRNNqBN89Mo894N6R5SMFIn3jHRxiEfb78hb+kUarI1eKR+zGw2R2rfSTLp3z0I/+AxcV5fu3jvwXA/PxBhDPm/e/7MZ7+ymcB2NgYYhhGnU0bDmK8wCXPYw7ozNWb3vIAzz33HMPhkCypIuWirgsAkHmBIZR4Z01jmwtabYfunCSJtdh5PyHL9jJgvmPT7lpglrTaCqbjNWf4r37yF/nqk5/gs5/5GwCWjxzD9kpGY5VpKXKLLDexbJNMC43+yM/+COs7Gzz/1At0hIoA96c75GaB76so7sLsHFevXmdmpsudd6vMzuUrK4opqbTqKN900ldZhqyqI5B4nofnuXUkOI0TLMchnmY1m1237RI0XDJNkZtnShPsbW9/giRW3/zKqy9x8tQyN67tEEZqH2y32/R3xxSWhsMIgWc7WOUe05kpDAyjwA8cTpx4AoCd3Ztsbb6Oo5W/A79JXAyIswDLUPvpnfd3CDyHTMMnVy5cYWN9l/mDXdZuqn3xzrvvYu3qdUzTrLMvaXprTUIFhwGluwIqwm/bNlIW9bjbrkMcx3UdmjBdTt93inAy5PqVm3puFNiOIE8Fjmau6s0HTIaSKBrqORXh2j5JmtPpqQzt/Xef5sbaKleuXKmjr51OhwceeIgzLynmPMMQyDIhDCf1O3luJTps1vDldrtNnmVsbas+aLVaxHGC7TiEWhvBEAI/cPFsqxZENQyh4UR7kev/dD34vq/gbVm+j41QUpSyhjGWZUkcFbi2U8NypZTYDgQNp2bvKsocUwZgqrX33/0PH+ZP//Ap+jsJtq8ZYTNJt3mYML2BqbO08wtd+v0RcaZZwNodHn3sTQRGynV9Zo1HPteurpLJiIVFVbM6nIzJUpMim+j3FJhGVVtXsbYpGYgkLrC0/l6n06bZcrmyovbJQwd77GxvEA5t3v3eNwOwsRFx48Y1Gj0tYr6d0el0KM0xm+t7elWWKPC9NuNRXPdxqxfWdsvGusq+jAdDlg4f0PPMUfBCV5AmFTzTQRYppq7xdF0XIexbxipJEi1PkuJqpjzDMMjztGZ/FEKQZ6oOq86YZhJhGZRlTo0UNlTWaE9LyVb/zyjqLLglmvi+y3C0tScwnArmF1r1GbOftr9af5WmVlmojFV1//F4XO+ns7Oztb5Rptdxpa+4PyNTliWu69Z1gsPxiCxTY9rt7jEYRpFiH5S6XrTd7bF+7QZlXnEEuNiNgp0wIbA0dDCKcXyTopA1Y6FpK1SIaRr6+1SGyraEgrOChneadV0VUMP2qnVcFKp8oRJ+rsZqP0QPNKOfZdUQwjzPSZKcRuDQ6Sj7Ioxi4jhmf55G6T7tZfiklHhugOvtyd1UtVymsUdxLoTAsjxcnZGVUpLGEWJfXZuysUzsthpj8hne8Z438+KLL3J1VZ1rJ07ewXC6ztbWlEPHFWLp2PJxrq1cZ3tdIQNEqcfeMXG1bEYSFySRIMsydMJSiQMLiLUtE09dDDPFLB1KQyNO7ADHNYjilHa7qTtBsnNl+P1dc+W3zPLEI2ryGiLFFApKkyRqgn/wR++ifWCHtX7E3/6flXaKQ7LrYGnhwdLrcPCOu3HKPvY9CnPZX3P5zl9IPMvFaauJcPrRgusbMURqIzfcgrl5GO4UJJosYzTNsRyhNGd0itoRBqVTYmjxYek2lPq0nXLkAWVcZekC08kAezahdNThOLrU5PDBkFPvVBDHr/7f13GMNqEE6Wlq7XXoZC0iCf68MuyjqYXRTgjUvCEvLA7fCb0FUcNRtq8JitBlZyOlOgmFBVlWUmQVPtfA9lUqOIrV8jh2pInVbtGt0qfjlPmF47zvXR/kwq6uUZArXHv9Vb79uS0O3Knrm9xVGmmTC8+pa574oceYWAlmJFnsqO/zvYInP/VFylwwMNX32QhahsnMUU3Jbdrs3NgksAzGU/WepusziSaYhU27gvfbHkk8INRix8ePdTh1X861lYypp+4lzCZ5NKa5dLgmCCgpCK/dZLqrISSLBq7RAKNkqutB2j1FTZ+FDuORVqAvXPCmNXxCSrBsh93ttO5PpIGwSvLSxNQCwZYQOI6JoRdiYDegyJmdDzCEet7GVobrZxSpVZMNuN4sbnyc3Wuv6Dkcsrg0Q+n2kaWaU3nucPyOO3GCBV549QvqPaMdDi4UuD0t7vyGy/oqHHtgkZ1VbayOd3nwRMKlaxPySJMbSJuTx5scPaLG/dU3BiTjWTbG25y4SxsymFw+s40fQOkoIz4ITHZu9hFFoOdYTpqm9LqL7GoCjd6MUrvf2NjA1LUvSawOtZlZBaWbm1vg4PxJnn/+u3R76nm72zvYzoBmUzvUYpZmN2Rno8uRE6oP4jTklTMv0WhalFO1T/zYR9/F2cuXuKB1hLLhkDefcFjbSjh2t9I7Wu/f5Mrm6wRGB5Gqb37osQdZWVmpCTTuvPNO/vWv/wd+/Td+lT/8498F4B/9/E+zvXuVrzz1Aj/w3g8C8K53v51f+7Vfq+lbszxClgLLbJAXaoy9hqcojcs9DH2e51hC1AcHQClMkAmUumg5y7AdWDzQxddrcnNjyHRX4jbV/nxkeY4ss1m5epVHH1WFvvfc/S7+t9/+M/zGLo6j+iUMQ5pNr6bbd6wWaW4hrIJIQ1Rn59pkacj2xiaOpuC1HEGch3zwB9+j5tTlFd7y8Nv41Cf+ivvvU3TU7Zk2Tz/9ZTrdNmPtoGOpb8t0ZME0TTzPwzQN4lALb0pJIQ1MQS3c6Dk2vZkGpSaTiKMcSpfBZEyeqWsazYA77jrCK2eu1lAMx/awrQCpa9OkkVCIDCnTPUMNS9F6y5KpNjpN4RF0BJGGZ87NNkiTkGRq1vIaTrBAlmVMJupwnul0abRBmPP0dzU1s4yZDHY4fvw4q6uqBrLf72Pbe3UZe8X88haHSxmQ+/5GieM4JHpPKlGF8iUJ7ZZaa44wSeOMonBJtRHf7jQQtoupabtNKyVJUk6fPs3lFQULnGsvcO3aNQVV0lToChbl0e0oB2wwGtLtKMO7cgayNK4hRZaGJinIVaMmpJASOr3uLQZtd1bRSZdFUo+f0jKyaoO6ot+uRFFhr+jdFlZtrFmWRVHu77scAwdkSVFUEG4b4Y5oNj203BWygP5Oyr33HwPg/jf3+PIXX2U0kDhaV6eUJoHvMNNd5vF3nQbgq199jvF4xHCoaewPtjl58iS7azkI1ee25TMa79Ifr9/yPWkaU2otQ4wEwyyxbYtSG8dxVCBMG1Pk9BZVIOPUyXvo7xQM+ipYFYYDNq8X/LN/9o+Z0YV0v/zLv8HRY0s0e2qd7WwnZHnC/GLAyiW1dx1eOsbuYJXxeKBEewELH9fPqBQcdrfA8SWmaeF7TT1fdzCFQZoU3H23kjS4fuMKcZjUzpWClClphqq2qN/v18Z41YQwyPK0HquqpsfA3FsPmYlhFNiOguYDmjDBqudKURRaT8ki08RArWYPy4bJtF8HagwcTHPPKXMcp6aR3+8QAbhuo/637/uUZVFLHARBUEMHhXZaKuO/mv/VvfbXkFmOImhRTsge1Bv2dLcAoknEEx/+CO/+8Y+oOfbJv8AxC7aMkO9+/TkA3vfY+3j261+tbRJQ4urKEdX2nKEItGRZYOv1WMGIy3JPx6uC39bacPuc4urdKoKL6rfV+5rmXl1WmmY0WwG2bdd9bmlB8dr50VDC/d9bOXEK2qnrlPbVTaqxU0QYSZLV31eSU5YFaZxx+LCC4PcHI/LCILbUwj5x9GG8oGCnf423P6HqsFcuX6PR9FhfDVnXZCazvRmSaFA7V44tKDKHvIg4elwToIUR21uxqqPTR/LOdsjiYrfurxtXt7EtkyI3kFpwx7V9hFXc4lRajsl4Lf0+r7kySyxHb+65RKYOphTEai/nt//H1/jYzx3iv/nVR1l95nkAvv2nW3R9A19BjenvhLz8hedZvtPgjpM6QnNozD1vh1e+meNoTOuo3yALS1paQX2U5Iz7DYSZY1k62lQCUhEWePpQDxEwNXH1fSZMaTkmUVZy9rw60AIv5PB8ADLHksfUvZorhInF019QRqjrtIlCSWGCEykHwbLGFKWD4U5IdcRp5mhCo+ERaxqu8XpCkFks9RzMo9qobkuuvDbFn5jIWBtzRYEjINYTYPFAl/EoJpUZrUB9y2g65rA1x9q2zuadENzzjrt54/olVlfVZr58aoF73nQ3g/MpF67r4tWgwJoO8UM14Z7+iy/x9z72IW6WF/jbT3wdgLnlozitBoOb63S1wZynktjKmO7ogvqmj2VCktskqWYUM2Nc16fIYlxHL848JRp7WNrZmcQhr51x6fWaCK0xM+73ySOHmeN5LcoqRZ+dHYO2UAvKLQYMtsdg2BzUfVeaMTtbBXGU1gxGBmNsg5rJrsghTco6ggtK58IUkiIpcTSjTylT0tTG1Jt0YkwwSrj4Rs4Db1FjPLsgKcmRSU6U6hs6mwhrwvyC8iZLDjK30CFPZpik6uA9ePI0Z1+9QLR5g9ZBdTiG4ZB42MbqaQx9J+GIfYTXz65QDnR9HAXNB5r4nmDlijIQZns+vjCIdlWfJ2FJUvYpS7hwRY1pQ8zQ6MQUec6JY8rAW1sdYUUtzEALt5oGC/OHyGVRR0Mdu8XuzpgTx+9gXYskl3LKqZN38tM//bMAfOrTf8YLLz5Fd7bD9pYWc7RtTMtnOlX3jsqUtJzSbC9z5PBJAF586RkKaSILURfn7wxzrq7u1iKKp062mVtIefWGYC65oObG9kVOLZ7k7Our/MIv/Pfqec2A577zHI6uodvYWuHNj5xmMBhx5JgybO49/Th//R/7mLbkiXcpR+3Rxx/lkce/xuc/+zkAgq6LUUBBWjvLcRxjGMog2R9R9Dxv34FYYhQOhUxrnRSBgTBdrl8ZM39AGWCWFRA0J5y4Q2VVDGGxcu4Kza7LCy+qWpGnvvgCnTlJ0JjF0No7rZ7Fww8/zKWLKuJ+9epVHNdEypLSUsGG6W6kisdbLn6gHN+e32ZntMtb3/sBAJbvG3LulVcIixy7rSPJvsXi4YOYmHiu+t3OQBkrnmYUTbKUOI6VM6H7wDRNDFMgy6xm64zTjEF/xPyiWh+gsleOC4ePqDmVpRZnXjqH53QwdTaiKCQYk1qYNitSTNvEdV1sqmgo7O5O8X0Pq6NFvMucaBRT2TGRn+PYFhgWqS4wH22tYZRN3vKE+raXnhly5XLBzOIE01AG5tb2TWa7C7zxxvk64h00lbNQZVUs28SyBFl6aw1CVeuwv9A8TzNsW7OxGhlC+OS5yuwBjIcT8hxMe4JvqvVvIjDKgvtOK6e3Pxpw9co6M50TrOoxvnTxGo12k53BsDYCTVPtbYPxtn5PiyhSNVnVntcIAmVwCqM2kooiY3d3l6UlRe4wHI6ZTqe02+06intzbVVlEGROFeI2TZcwjOsMRp7nBEFwC6FFJU76nxbMx3FILiuSBMUi5wd7hC9JknBsuYvtSEZ9LcJeuHgBtWDo89+6hut0kGzX4riO4yALwWC4zac+oYivojDlyPIJklwx/E5GFt/65rMYhiDNNCGRUeA7Aabh1ve3bUGaChxR6TllCDMgjFNKU613N3CJoohjRzvMz6n9bGsLppMpk5GuWzLm+PNP/yoffP8P8lM/+XMAdHrKKNxa14ytnodtu0yGBS2tdzadruG1IuzAYawZZ6NwimG62Hp/O3nyMKtrK8RxWGeb3cCnLCVJmtJsqvF75JG38vRXvkngVQLwKVIWJEnC1tZW3ee2bdciq6BQDBV7XjXHaqdY2y7CVgEC17Nr8fEkSbCEqDPZZBlpkpLnJpRaB6pMieOcUkJZ8fqIAiH2GeyGUaMAqj221erg+z6eF9ROg6r9cWm3K3KOsc6wezVZle/7JElSkxCp71FOYpVVTdOUwW5f3d/ZYxnMi1wRP1ROmWXzrS8/x1e/qWr27PGQucVl/OOzZPrMunbxBlEekRUZQmd3DKMkTwpsp3JyFdGWsO2a/KOqFTOMsu7rsixv0WqCvf2GfTmn6reVc5MkGZ1Oq94jXNep+6Ai2kmzBCEEjq7nkmWBlHuBFXVfQwtYZ7c4XlIW9fOFMHA9G8+1mUyruky9TtyAaRhXg4xhlPRsFVy9fO5lilLy0Z/4CHfdoZyr3eG3mTuc8tK57xFN1Jx1DAOIkVUtlGWSxBLLdri2MqrfKY0lQkiEnmeWMBkNI0qpkgaWpRgB3UCA1gR0HOXgIh2O36WSDdvbu4w1V8Tf1b4/Mldtozz+qPq3GxjEwxazMzk3XlOHXBamdGcTfuhjj3Bt5yIA0SDne5/v42rxvNxxiQ2Pnc0+j79XGUne3SMOzczzl/+2z6xO5bdmD7IxusqMjmQduHvK6mWTtZWUUhMbmI4EsyQvBKZ2uHILXMejmGjoYGFguw2CLKfUpBdZG3q+y2CjYLKpFsfDp45z4FiDl19SpAKu26BoxGROm2SioziiJC8CpukGvRk1Hp22jxBRLWI2mUQcvdPi1H1NhtvaMFwfcO4Vg5IC7QOSyZw0MWsV6WZbsLuREI4lBw5puta5mGhFsqQV6TcGHvMnA+6/9yjjdRUt+PJXz3L0dMHS4TkufEl98/lnbtJe8JCGJh8pm2xMxzzxwSM0tNVy5lvrmM0G0yzCLNV7usIhysd0O1qZfDpi6YCNGRSsvKEWqG9nRIVyrJeOafrpOGX1ZkS7qTbExnzMlYsxc52A+bvVobC7vgnCone4yXhV9efiaYdLz6bkG2rhHjy1QKPhcuHidY7dreZULEcMxybC8ok0o1i7C7brMtEQuSI3yFIo5N6mZVvqAIli6j72A4O8SOssJ0AjcCmKveiFaEbIzKLlW5jakOmnIccXm+RX1DhcWr/C4fkjhGEMvtoMZk/YbG14DDf65EP1no0DHr3WnWyOFUxn7kBJfzvCsm0mO+qax+97B3ccn+UTn/k0mWb0s4TLzGzGw3erQMvzZ19i6chxblxKuK7FDzsdG1dEhKOM43dqWEfqMl5JkS01N3e3JJ4vKMuc8VDDeaVJo+lgmS0GQ2W8YRQEfoNSVnN4wu/87m+x29/in//Sr6jvm/MQpk+aqu9NYsnMTBthNfB05uz9H3iM3/vdP+Od77mPqytX9PMcNrd2MPRBe/hAk7YTcPbKVU7codP2YcRdd7+Tbzz3bQ4vKQp3zAUcq89ZzSjUmw04eGCJV155hUZTE4tEyklodxrYrhbLvLnL/HwPqdnRJtMEIUqKMkboomWkKq53hFWzmgkhmE6nNaxE0dcW2FZQEwu0u8pgmQwls3MqIHDs+BLnz52lEaiMcIEkinfJCZmdUw7YE4++l6889RyT6aiOAActCdLFd9UeGMUjkiRBWMYebEaYeI0mgzDkoYfUYXX06BEWjxzh2e+pLOrRkwdYv3KF737zGR59iyKP+M4LZzCFy2Q8ZKajnKLNjZvY9l5EFB2xD4KgzlLleU6SpVjmXgYvz3Mss6TRrGQPDCxTEKdpDRVutQNm5k0uvRGTodMTmAizS6oPwqYfEMcp0WiPde+OOw9z/O4jHDo2z5mvqe+ZmVti4aTP4hHVv5/75NPsXtvBsX2GU/XDd777LqQ02Zo8A8BHP/CLXFg5x6c++Td1FsdveCRRiuNYe8ZHmpMmkqoLgiBgMpngOHt0xhV0SQhROxau61IUBQuaPn04GTEeKbbDNz2soriT8Yid7ZhCZjWsO8tTet0mYaijzdkU3+vieRaNZsVOVlCWKppcQRO9oMloNKI68j3Po5ApZrlXzL8wN0+/32cyHREE6l5FkSsB8rZ6zzhKaLfbCiaWV1F3hY6QZb6PXluoLI+OktuOYt+qjDfQxrFQf5e1M8UtxAeYQkeac/KsgqOm/MzPfpgwWefyecVUe/7cTUzTrKG7G+sDDCGxbFEzgQnLxDItMEMsWw1YOC1odXxaDQUnXl0bgJHi2ia2pRl2RyM83yWOJzUZj227RPGkZo31/BZRGlE6OQeW1B6U5hF3nL6D8WTE8uzDAFy/NmZ17Sxo+OuJo6f5J7/0r7hy7VX+5a/9IgBN18eyjZqg4MjyQfr9XZIsqwkmirzEaRgEDQNfy8iY0mVnK2Yy0oyGvS7bO2uUZUFDQ0KEEEwmEwz2nJuG59Lv9/fuozNQVWaxGhMlbC7q8ZEyR1hmHSgqioIoSqDco0s3TZMkypVDogmIRqORfr7qu0bLJEklg90US6g9t5BTMEoMrBpa6rqKJVLo/XU0GmEY6lmRlmxpNBp0uzOYlkUS7UlbWJZVZ19d16XRaLCzs8PSQb3HFhn9fv8WEpYsUyRGFVV65Vg6jlPvecIy6ixY1SzhIMsEW2i2WcuhP9im3Wxz34NqHnz1608zt9RmsDNCoI140yJJ4vq8KA21Nt3G3n4qpaTMpdpDrQpyV9wSuFFwzVw7SXvZc5V52cs+mqZ5K8RPmFimofeqSrpHrdn993Ysu4b4QgXFTGvnu5oLruvXe0scpzQaAa7t1OyyWVZQFKXOlKm95ODSITZ3Nuvgapjs4je6vOPdb+Vb31bJlZXrFzh97yHW10eMhnqMDQPDLJGaYC7P1PvksqgddkOWlEaG5Yg6e5ckGUbp1YzFtm1TmhFS7gWBlpaOcPDQHK++co63vO0eAC5fusrll9Zuiwjfbrfb7Xa73W632+12u91ut9vtdrv9f9m+LzJXQccs73qHxnPKnJnZNqP1gqvfU55lyzZZPtzle69e44HHVGRl9n6P4Y0lzn1ZFaZ7HmSeINxqcOC0xk7fbXDtu5CMIJhTv+sPTbAdCp1nvv/xmENLJk9/Zoc01vUHQZ8ktnGaWZ3NyqcOs75k6ZiKpg8nCdOtkMFqTrulIl72XEn72AZd4fHCV5U3/fbHP8zf/0e/xG//yv8EwJVLr9I5NsXr2eSZitBkuyWtjo01O8aIVMF+LodsGVMONzREzZYYrk1hFHVk1yAhG7sgkypgjGnY5Jkg0XoHhiFxDIsyswjHWhxtxsakpKMJIBJ7Ccc02R5f5vhxBf04duJxvvvMGe54pGDGUcQCT/9f32Hnxoj2vHrvKJRYoSTJDQ7epyItW2dH3Hdvl0y2+O7LilbWaTmIsqDTVGGqcdxi6cAM/XCXWKeFRZIjvQzL8JikVWasxGtZJLrGbPm+LrvbFpP+Jo6GExUiISlyDncPc/05BaVzlh2SvkujHOvv69G2Y3Z2JamOji7d6RPlE7CsutC4NEySqaTUFNlRVGBbFkm2Jz7cCFziOCHP9rTEgsCjKCWFVO9doqJallPg6vmTliYG0GlAe15Hdl2LYhRga+2GbDRmENt0Wg4acUQ5U3L4lCTut4lSlfmbWch58aVLCC1wO5nGENv0Zm22Bwp//BPv+1GG1xJevPA52h0tEDwtOXg0RmQ6ChjYHOgdYjId8PyLGre8EBMYPv3tkkxrLCVZymKrRV5UVNMFk3FCw58jy1W0znE8Bv2Isixp6uj5eJQzPz+L46rvvXBundN3PMZwtM1gck6Nu2Eh7AhhqKiqaUV4bovxKMXSMKi5RZvt7V2Fx9e1PVFYYAsLCp0RyrfJpKBlm0wz1Xm/8s//CX/9uae5sXaN/rbKzJlmQLdtEukak/neCYbDPgsHPYZDFdWME0kj6DCZ9vfEFh1BmuxpmxVlriiIi5K80HBioaJ5eZqS51WhuIq6V9E7z/MwDVvpp2i65CSG3qzNiTtmWLmo3kGYC6zvXKgjycPBhIcePM3rb6zw0AMqk7S4NMNnPvMZLMOsa5Isp6DXPgoa6razu0ESFwgRYtmqP2M5xTFcXKPFYz/wLjVWXkIhDd7zsIIFrqyc4y/+7I95/xOPEE3UGv3aN57FcjyyLKOkogWuori61k8WNJtNFdmWezCWNE0oir2MRFEoEU9DZ9wPHpojl1MoTfq7Csa6dOgYj7z9Xv78T56k2dOQ2zgny/fkcU6cXqbZc7GEpNT1cPFEcv3aJo+87XEeeY+iZ//Os8+ys5vw9neqGrrrV8/y3Fe/SDRjLGPGAAAgAElEQVRKOXnXSf1OEW+8vsZb36pIMBbmD/PsC88zd3iHnU01xls3TaIwpNXxsTVEPAxDKC32jtISx1V6NnsF5ipanGVZTSM92+2xtra2l6URBr4fMByOOXZE4d2Xl5d55ZWXwSjINRRaJioqbgk1N07ffZKXXjyD44DvqHWUSSUe6jhOnVUwDAPbseoMUTVmCrqn1lEaJzQaDcaTvtLzA4QwiSLqWiMpJUVRkOdZDfMKPBfDLDEtQ+tyqbqO6TTC0bBHy7KI4lBn/XS9b5pqaKRNs6nPlSjSmSxN0+2qOWc7Oeh7D3YKfuZnf5xWL+XaioLAPvXUd5npLmIKtdZm5+cxDIOLFy9S6LooRdMdk8TQbKi+ajQdnQHRWRsrYhplWIaNITRNvuWp886J64h3njrIMmZZ6zBubcXc/8gM7fmMMy9p2LPXotkO2N0U9LTu0ubgBo7Z4F2PvB2Az/7Nlwhay9x17z20WurM+sJ/fJKZmXZdF2KLLkIYhPHmPuiXQLgqy1HVJWdpwXSSc/zYHWoMrJjLl67iCIegpZ6fJiW27eP5JsOBel6WFVi2xNU1mPtreKo5vF/DaE+HycCyRQ0zq2jThbkHjc7yELP0sERAq636eDweK+IGS/Xv0eOzDEcRKxcHtDRSJZdTpMyxzBaGUe3DBbbt7u03OrtWzTdQGcXuzAzT6bTOurUaDTY3N2spjeFwWBNVVBmh6XSKECrTs5/gwff9mlxld2enFpGta7s8G8uyGI1G9f3JCiYyYj5QdsrqdExpmCw1F1g+rerczr38Msv39Xj97AUcnbmycSjyPY0pz2+QZClO4O8J9oYRlmnWUEnY09+qrmm323WW7RahXyFuyUo1m0potybZ8VyyLMUoSw3pA9OyMcpyX5YswxaKsKSC5avsmCIXqfoqDEMtwFxB6xym0/AWYWHT0FlrLUYNcOLkSVZvrjHWpQILCwvMHpgjLm5yc13rMvoug1GkiYS0SLFvYUq3JjtBCrAmUFo1jF1mOUWZUUqLUuOKcjmllEpAHZR9M81yDi0eBQ3vHQ8VF0RRFMzOKYj48ROHeeqT3/rPZq7Exz/+8b/r7/+/tv/5X3/8471jJaVR4hAw3o65+DL40kJIgyKW+ItdgqDEiDLysGT3cso0tHBnBN6Mz+5GRMNsEzPGyByyafn/svfmsZYl933fp+rs565v7dd79/TMcDhDzpDDVZZEShRDybJsC5FkxwkcW84CwwgMBEEC+z8lCBBHSBDkDwOJ7RiKbMQ2lESWLcuWQkmkJIrLkLOQs/T09L6912+969mrKn9UnXO7ZSX5IwnAP/oAA8x7fd+951bVqfot34UXPp2yP9/nzlsNF3YuEpKS+3NOliVNuaSuC259s2L/AVx5fsxgOGBtPaVWC2TQkBceIjQYCb0BzK+FzPYlxw80mztj9j7I8EXIvK7I65LnLr7C5/6NS1y7usf8oI/vJdx477v86j/4hyjVsJhPSccNVVWThAJfZngUHB7Dz/27P02YSPaPphjPEKdbRCphMq3Jch+lEpZHOfLQRyc1Wtk2cJwo0KB9wIMw1IS+j2hqAs/g4dEogR9VyAj8BHwEoVRMC01ea6piQj/o8aN/6tPMqof4iULNJ9AMeHR3l8MPjnl064Av/fSf4M7tB4TTDL+qMU3DUvj0A8Nk1rCYNAhP8Wd/6ku88cYexycZxng8eyrk2e2Se8eSqjEMB2MuXTxLphcUszkCjdYema6RtUfSDwgD564uDDIIkIHP0cOCOIDxRcnUVCivQVbQ60nr8yIion5E/qjA9xqUL1FSgqoI0oR+kuMZQRxA6Id4I0iEIBcCjaRuDGHsUWcaow2jMRRFjEHhS4EU1gOrFiCRCGH/s611Y4ULpAe6xvPdzyZGiIC+V1OlHptJQzyIrZqS52FqQTPx0UqQrkvyzKf2oRdZoz7vaEIURIj1Pol3mjhK8GOPk70Ja+ubpGnKuJ8Qeg2LwhB4fQI/Ze/GB1y+uOSF557h4ckI4a0RliXRAD64GnJ05NPzPPytY8plztZWn+3tkNHQI443KXVJlSuEBql6LLIl554/Rzrqoz2fvFSYWrlDT7KsDUWjkNpCvLQ2RLE14q7KhrKo8TwNck7dTGlq5UjPBmPA90EIhdGCutZgDM9cOcd4rcdkOqE/TMnmSzaG66RhYg/AQLB1ZpvBOGUxn7K1DX5qiIKQJPE5Poy59v5N6nrJD/3gj3Pp0vPEaY/NU+vsH93HCI2QkrqpmE0XHB8VLOY1kpSjo2OSXkrTSOfkHjJb5uRlSVnXaGP5jRoNQoKQ3aEhPQ+Eh/R8jDJoZdjeOkUv7bOcL9jYGrBYThgMYoJA8MyVs2ituHP3BN/vAT7rWyMkPrH0iPyA2XzBRz/1WY6nx9CU5PmMb/3Ba8RJiDY1nvDACEI/YXIyZbFYsFjOqSuNH4DAkIQJvpSIRlAUBVs7Z2hUw3w24+qbbzE9LFlmFXfu3GX66IgvfumTZM2M+3sHLOsCL1BUuUCTEYURvu9jkBga6qZGG03gRwRBiueFeD5Iz0P6GlULp15lEAI8YXjhQ89zMjm25PpK0UvHCK8mjARxYiE833ntfZKeR11rVKMRwqM39FGVTVIPD3aZznK8sM/pS9vEgx5rG2vceu8eN9+9StQf8/D+Ia/85A+xJuAPfuOb3H7vOl6wwYPr7zPY7LG5sU0QhHzxT32eghvsnF2jN1bcv3/M9RtXOXxUsrl+mcAb8bGPf4TJ/IjJ8RKtbTC7PhqQZxkXzjzDaDAm8GImkyOk5zsOinCBig08qqqhaRRZnhFEIY2uMMIgpFXc8sCOBR7Xr71P0tN4IqFpSptUCmtELvBdMWPEeG3A7t4+URRg0ESxYD6bEgTSGpwWOWWzoFEFWb6kKDO0VsRRQhC0cB9YLBekSYwnfeqmBgRaQ5FX+L6HEPb7GN2AXMGK6kY5FUvPmkpr6ymVJImdb0+yWMyJogghZActK8uM9a1TJIM+jVlgpEIIRb83oCyWCDRChiA0RiUWli0kYWC92sJY8f7Ve5ycLDg5KdBGM5/nZLn1VCvLhul04YpjliebV4o4jhASjNB4vk9Z2T3bmsR7LtjTHayxaRo0FUpZXoqUHkJ4pHFM3BuBCJgXR7zw4ivcuz9DBgFhlDJe97l/8wRVV6iqpCwzZsdL1kcDPrh6l9u37xP7PhvrKQ8evo0Ijlks98kWFfN5TZKMgID58gilc5pG43kBQng0taBWkmIJaMsli6MBUZji+QKlG/b39jEYfOnTS/v40kdgyLMlcRrT66UkaYznCZRqaFSN0g21alhmSwTGfVfR8bMWiwVCWo8qT3pUVU1dNTS1Ddq1Vk4JUNM0irryiOKIyx/qkY6npMOCw6MTtG44tXWZNBmT5xm7uxOi2McPrNea53nUlcLzbCHA+jxZWFtdK5pGWaU9A8ZopBQIYeFoZV4zOTlmNBziufN5NptSVSVlWaC1Igh8fD9ASIGQAmOsl9pwOKZxe02WZdR1RaMqyrIgSVIWyxnSJZXSEw76vSCKwk5xsNQ1RhuiXg/jeaRBhK8NtaqZHO1zfLBHGEXk0xpVtx5U8NJHX6RSpYXOSgFCgzZURU2ZV9RFTS/pgRFo03TfOUpijBAEIsD3AqTw0MoghUA1CqMNGJsM2+9rn/eqKpFSEIYefuDhewKMnTfPc6bDdYPv2/fT2hCFEb1+nyzPu/EPwgCtDMPhiKIoLTeWVtBGO4GclYnxyjvMAMZCA+ME4Xmc2jmNauDC+TW2t0bc+OAuYSwRniFza9LzQ0bDPnXesJyW1IVC1wIMNjbGqj8LE4KRZMuSqrRwQCl9yyPzfYSAutB40hYIrWm7QErDNKvwVERdQtrroYo5x9mck/2Ck6MFgb/B0YOHu7/wC7/wd/64vOb7onMVrwnz0hccubsMePC+pprlqMY+zIqC7VGfeOhz4vDGw/V1pnnOK6/arsq1t/ZYTBRGaCb7Fh/7qZ86he8P+OqvXufUKdsR6p9b53BW0ZQOKy5qdJ0SBAnnL7dy5vscHO7T78f0hjYLn9xb0tyPKVx3YvvFEE9X6FJTe12bge2NbWbzOxzcs5WV8Tjk5CDriL9+mrG/K1i7rOiP7Ofd+KDgwjOXwaspja3CvfTSi2Rzn72btwEIhwkyOSGKFLmDplc5NJViOOxRGft9JvshaWBQrjKfJiMrY6zjTp61IkdKSd+ZUm72UyYHUyq9w5mXLSehf/YBwV7F7Rt9qkNbeVg/H3EyC1m8bu+xF0y5J31UWXTVnGwi+bEf+zF2J+9y5+YD9/49xv2U7121XY6f/vM/zd7de0ybt7l13VYiZDmgaDKEsfhke+82iD4+tnO+Nh5RVQXJaUWeOCnRuWZ7Y0h2GGAcHy6QNVnuYVyVczZdMBqn9Ho9TM+uDZ0sKRuByCF2hs/KaHwjOJg5x++wR1PniMhgHI5X1hKtJJVousp10yiSVHRjEATSmfeBcRWpk6piFCeIxnDhov3deKem0D43f90JeOiQ2dGc4Sjiyjkb8JzdWfLNNwwbHxrw4VdtpbNu1smOjnnXyd/3Rmusb/ncu3ePB3ctv2KQ5nzukz/FG699QLplKzn/6Wf/Au/8zq9z8DnLP3rty99ErDXEwuP8FXsPt98vOTlUxD2/6yDkpVX6ee4j9u8O9ifUTcb6cMDi0Jls709Q1MSDCOXk4Msqo9eLOpPGujCcPn2Wk8k+y8wpj5mawLfdQIDNzU2qqmR9fZ2Dg9ZEsCYIDZfOP0e/Z8f84YMDHh1OiVMn36wUfjQF6eF0TYjjkLX1MXdvLvn5n//3AHjje7/HW298gHLO9j/+pS8xnR3y7POXeeNN2wUva3j+xZf49utvELgO1HK55Hg66Sp1lsS9UklqL0vWX2HhpVmJWoANTLZPrVmHeycGcfbsWQ4PjqmUxvdaOWprRlpm9jXnnznH+uk1rjx7kV//X61qZJ1LpB/S6CVrTmWsLCsWi0XHd6jrCtNYrH2DM4r2rExt4McdHj/wJU1T8+wzH7XzcCHm+GjO++/dJYyc3LY/Z7I/RHiyU8GsCo8gEJ3BZa+fUBY1Unq4YjZV2SA97ToR7bMVEgVBJ3Ywn8+RUjIYrVS/louKqrSdDO1aXHWluXjxIkNn0/HeW9cxjWI0PMV43XZ2//TPfIa9g6t8+V99E+VvA/DhF87xyY9+hMGWrYon9SY333+XX/29f8irVz5t77PZ4q13vkyc2M/ffzDj8nMXCEOfxdx+v0F/xHR2wtHREcbx75rCJ+1rNk/b6ujuvZKqzhgOx12FPc8zPM8jTXsd8V5rzXA46KwftNZo5ZHEmihybXEtyIsl49EWmeOGGqNpakNVOouKXg+lK3RTd92lfj+lrutODhmgUrYj9OEP227e8fExh4eHbKxvWZ4njhOoYDQadNXsqqrwvOAJRbHa/VvbdQuCACFXpsDt1e/3O0GEVujFGNNxIIpyyc7WObJs0fGbfT9kMbfBD0CjcsLIZz6tSHptJ2JEtvDY3I7Y27Xng/CsUuXKENnv7uVxOeo2uGvX3h81W63rGiEsj6Zx3VAbIAo2NjbIC7vHCmktTpZuTzh1Zp2NzTMM12Ju37cc682Ns0yPApazmljaffCFj55nfW2Tf/IPfwWAC5fWCcSQ3b373fNYFCC8susCpsk6ftDgB6LjFoGmrhVxGFFXTmGzN+DKlSu8/vrrAIzXR+zt7eH7frcnFEVBUyuCJOx+175f21UFODg4IJBBNy6+77v1tOIN2nGV3Vm4zJZcvHgBKTyuX7cCIWkfaDYZjgUf+2GLkPjGl0NefOFFDg6tWNbDBwf4Ydsxs2O+XOZoZVxCvuLgWfGYlcqg5VM9znCRLrl/UvXPGPWEvH8QRNYiwXXrjo+PqcqS8+fPdvNwcHBAWeakLlYKggBtmo7HCLbA0O/3OtVCgKqpn+Smue8BVhCmHbuqKhiNRgSuuzwaD9jbe0jVKQjKjjfV8j59z6oXaqM6npKUPnHao8lXlhgtLOBxZc5GW9uIdl0bYzrxjPbnVjGxFSRp1U/b94njmDAMqaqq62R6nodwn/G4gM3jnXrP85jP5wSB3533rWCIFH5LzWRtbYMk7nHkzn8vCFCyZrgRUat2j5A0qmJyMqOqnKCMahgMexRF1r2353mdcTHYjl5RZNTNSvEQI9CNWfENhaHOCmSconwnUiRS8GowEiHs/l2c+NSz/Cnn6un19Hp6Pb2eXk+vp9fT6+n19Hp6Pb3+/7y+PzpXI2HOWO88JB59E3LvOzGNcKojXs360CNOPGqHA13mDaYWjDecJGgwZm/3mFAqCqeUs3Nli6pW7N0+sbAZQAWS0c4WqTOXmy4FeXGMEAGnt12GTc7pC5JHD5coZfGVex8c0k+nPPMRm5XfPWzQxrC1LjB1W92OMKFic7vHzAmmzacLwkDje/bz/AgWU8l4w2fiPLoqDdp4BLFHf91lyqEAYagdfLSY9UEqhucqPOEy7gCkBk96xE5l8Nbbhn4iCB1fRhUeMlBMDiFNXeXDr5G+ocqdb1Fa8Vf+kyt88yuP+N43bCXk5Mgj3JE8v5Fy756993PPbHJlJPj6r70HWEWqedlwoDQDXHeiyfDWFP0hTO7ZakEv6dPogo11q8rzoVde5fr3vs2iuEc0sNWCW9/LOX02JOwJdu85ec7Qp9frsXCA8igy5EvD8DzUfbc26oCkGTC7J/FcRWE89mhUwXJpx1KV24hgyeBMgdezY7eoYRDDen/Eowe2mhYPISsEJ5lzlxNLYu2xLBStWnKvJ6gyQ2P8rrIrPfB9QVuVj2IPpSxEKnMd0lQl5GFOGAkitxYvPruNX2S89ltt9UmTGJ+TWc5nPm9fs78/5vTlTWIlUa76El/8EGpRc+Pqm24NZzzz3PM8uHcd5argy1Ix2xX8+BdeZRDZas93f+UbfPHSJe4+dwmA3/zy1zhzuccyL+j1bGdl85ShyOc8uOWDbLH9FuoQrrtKb2Mlh8MkZv++rTx+6qUf5OhwxpvXvk3f4ZsbVRLHK58bo2zldzY/QnoryVjVGMJwhfUfDofcu3cP1bQV04gwEmysrfP85VcA+Na3f5+f+Mk/xXe/Z7lbV6+9RhoPKOoC1dj7DAKPXt9na+MZDvbts1Y2j4ijAeMNuxbPnTuL0Zo/+RM/ydX37br+pV/6e5y7cJYgCHj00NosLPPc+u+0vDO58hhpK2BtF6ssqycqpp7n4WDh1HVNGAWMRiPypZurPLPqa0VF4Tpqa2trLLMJfacW6oc9LjxzhaPjPT547xoAo36PwVrMxtq5TgXr+PiEslx1K4QxZMsc3w/5kz/4OQDCnuArf/gaRVMQ9hwPI695/tln6TmVwUW2ZL7cI4wM165ZLuNobcD0WGBkSRC5/aURhGFE7FQV54spEvtMVGUryWt5PkmSdKpbLacmjl2nrmmoa8WZs2sdH0A3hmVWIkTAdGqf0dH6iHyp0f6qq6Irw3hjnTPnbZfq2Rcv0eslgGQ+sdyX0TDl6G7KF3/yJwH42Z/9c3z1t/4p//nf+stklR2DvRs5f/t/+C/5+//TLwPw4G7OpYtXOJ4/4Np7H7jVGuD5If2RrdLbG/V4+dXzhKndb9773pHtQlVBx1sQslXQE91ab6WuXcHUwvx0gNFZp0QYB0PSXkwU+MzdeimqnKa2XS6wHYWqzvE9QRikbnxtt1Rr3fH9kl7M4dE+L7xg/Z2qsuHh7n3iKO06slrbztHjnKeqqhgOh53hbFmW5HnZmZQCxEnkzE9lx31RSj3hEfS4bLRx46HrCmEkvufRHzjeSRgzX5TM3fj6BEjPEPgpzzxnESjHx8fcv7XE0BBFdhziNHK2B2X3OVlmn4X2Ph83Wn1cEt/3/Se+r+d5GO13FjFCgBEhZVEj3L44WguJE0GZ2/eZTGY0TYAfC15+1fKwbr8/R5LwzMUXuPfQ7i/93mleeOUyl563n/fLf+efMz+qEc2IwdD+7vyVNd59+y6ecIqNusHzhIXdtfui17PG7VHczXFZ1IRh2HULiqKgqEqyLO/gn60cdxBH3bwLYfepfn/YzV1ZloRhSOitFP6MUU8q5XmBg3zZuR0M+8znc4q87LpLaS/CUBIEPrmzcRmPNvB8yfHR1M25pCEn8MPO56rKa3wHLW1cB1hK273ueG9Na+C96tDYDo98Yu23HctWSbLdi6zBs+PHBpL5fE4vSbvvY4xhPB53sZpSCr9DpjjfwI0NsmxJlmUr6Xdl97vH15QUnlM1XHWu0zS1Z4T7vOl0ijGmU+psjO6MmjvLhsaq+fX6adc5wghnM+B8GOuVRPiK56pQpnFGz6tu1uP+V+3rW7hvOwatSmT7761RelU13Xcxbo9vr6qyvNr2d60/WJ5nT9yTlPaZbNWXvSDs/LcAwlTQaEWj6cyHPc9CNhtV0bSWF8Ipsgbt3KlOhbD9vKhnzdyXixxU24mz6q4ufMPzQFQRQapoPHfOHWvG50bUhaJuFcrzgvqI728T4XQgzYe+6OS3vZo0BDUf88437YLWi5jNU5J+LyUrbauwaQx1tjICKxy51pcKVbbu05Km0VYW0wkLlJVGBJILVyycMBitU1RHzOdzHFKJXt/jr/31v8A/+l9+CyXsgTZKU7L6FrWy2U6jI2QSMhAGWdtD4GRqCOOAstBEzhF+0A85eNAjGdgEIelZ/CckVM6byvg5fqKpq5DYwZ6iXkM0rGmbi9OHGl1GsFFy2nnTzKslftCj0Hp1OOYRVVZwaseOZzZvCGMQyqN93rRQeEicKip5I1GxZBQP+LCTsX7n3YaHByVxMOHUGQu32d2b8OlP/TibjW2Jf/Of/j63Sh8tGsSxa0d74A8HVAtN4nygCGZMJ5qf+7f+PADfeetNJo/ukKaSC5csuXN9tMXXv/Z1eiO4d9eOVezHBInqiMZBkFNkIcHGjNE5e8hmdYM88ZncnXWHpdKSOKnY3HSHwiQhvDhEpjGJ2bN/5y8J3eY/dc7cfhiiq5K584oSvkGakL/6H/47/PN/aaFYe8cHhFHNyYMQ4zwOlILAD/ADF3j7ikEvRZiUj79gYVbT6+/z9sEBIjJItzaK/QB/MaARduHJWBMuJHUYMHQeYdm04LmPnEPQ51FsP299Y5vjwxN8d3AEfs31mws2himf/bSVcP5n/+Jd0l7IC+uXOXPKBp1yKPn9f/UbbDq5b29tnTu33sYzPsuJHavxesj5SyHZPCRfOl+0xQzpacLAHtjDaESeL9g8u8Hdh1YG+fLFFzjcf8jN2zeI3LxL6RP4ofW+wfGqUE9IyErpUVUrw0NjIPB7LBbLxyA9krX1HlUpmJ7Ytffiiy8ShGFn5FqUS3wvYr6cdRC8g4Nj/tLP/xyvvPxx/t7f+ScAvPvOO1x+5govf8ZWc+oq4w9/75tMDuYMxnadCa+gzJb0R4PuHsqyfIKIG4ZhJ6v9R6Vu67rpNvNWyrgj5wcBwmgkAiFk97uysn8TusMjiiImkwnSKdU8++yHOJnMePDwDmMnqayqmrPntpnMSo5PrHFimoaEXkiW2bWxmOckSUTdlHzsh79gv8sy4/a175J6MdqZ6p45d5aN7Yv89u98BYDTp8+wvr7O/uHNlWeX8anKBjzVibfUdY1Wq8Cml9hgoaqLx7xwfFRj7Gs7kYsWirKCmp05s8MXv/Aj/O7v/JYdgxiWy5Iy9yjrVlJZgEmQ0hk3x+scT4/xQkPuiimvvPoJPvnJT3Kwd0Q8snvztDhinJxnZ2wFez7xI/8mv/3lv8+D9/8xX3vDPo9f/PSf5Rf/6/+ev/E3/2MAbt5+h4P9Y/YPjzl9xj4zZ85us5gp3nv/bSIHf6mais989uPcu2cTuePjQ07tbHL35qMuoPWD1vtnBVGra3tmtbAkbRoQJWEkSXy7vwkdUZQLdNOQDtqkrGG+XFBXrVlqQhhYHh3Gfl6SRARBQFGsfKaqpiQvlmxvWR+xoqjIi7kNppzPjudZCJZAdtLSeZ6TDvodrGkxndnAsJeQ53Z8R+OehR3VdOIDbWDbrv3HYUUOlURZ5uhacWpng1M7NrEvqoq7dx+Sufkc9cfMFxMunL8I0t5TknrcvzMjSRIyZ4iZpikYaQMlbODfNK389MqguPUEevyZbGFS9u+8zuwYV+CNkgGDMcT9OQ/u23uYHXqM1gVl5uSooyGLRcFoNOj2oP29OX/yJz7H/bt7vP7GWwDsXBzw3PMf6wLTd773Fr62HEPp9o0LV/rsPpgRx4lbKxVVYfmfYyeJX9c1cRI+8f2apmGxWBC7jL2qKrsPB16357bz2kJt2zmyAgWroDwMYtJe3O3N0+OTLolpE9TRaESe591ct3L7UkpSdw9/4gdfwR++wfe+03D9vRYSF/DhVwas79j9/A//j5owDfC9iNnMzp9WgtALrZCR85Crqspa4jxmhmvnS3XzKYTXwVg7C5Ug6Oa+vXzfFkhV44RbfJvEqLpBaTvvcZQ+YXot0OR5jh8GbG3ZuOjgYL870zpBifHoCdhca8VgBVVW8FPp+4S+z3Jh5yRNErI8x3cwS+n7GKlBreDnSmmEXkms2/G0HEDhTG/zPOsk49u5UcYmOovFovOLtO+3WvutCXBd192+8UdNmrW2MvBRlDzxHrpRzk9sZebsB7J7Fuq6cly8upurdg6iKKZ2/p9RYo2G17ft+5+/tMYHt+4yOVnFwyDRjfWfMqItXNl7axNVrRvG4yGLbI5pTZ79CKUqzl84zcy5jxfLguWywXf7VhQlVLnkmeeGGHdGX9m5wG/8y9c4/azP8dzuec9deoG3v3b1+9tEGGkwrsLnS4+8VvTP5Fz+iPWmuvH1Qzw9oqwUgVMe8YWi8Qq0Mwccpyl5WaEag+8mVwqD1iV4DcpNZhh4NFjfBQDTtypBm5sjstB5aGT3+V5x2QsAACAASURBVPVf/wq9YcKpizaQ2T+4y/K4QRfOK0IapsczZOqzldrulhHHVEvD7l3Ncy/Y91ob+fQiRVP13XsviL2ISkSIyE5uU/oUU22J9TPXnUh9TgeS8SV7oK2PZlBp9Cjg4Jar/kYx81nB4YGiqe3rtrYkvi8ocrephAlgvbjaA365jKioCNrF5CtU4/PO7y147ddsEPPjf/FzDNcPeXj3HktXGbg0ushXf+krbF62B79Z38bcfchofQvtgq1FLqnLOVHgr9Rl1IiLZ4eOnAuH966zc27I7RsnZDPL30qGD1nkFf3BGga7AZ/aWeNk9gjhAimlAwwFa8M+1dK+d2/QcLKY00sleeFUjYZ9FpOCY1wwIhZETZ/saEixaz/PjD3mM8mZyxG91H5eokJ047M1st9l6mnysOAPv3YV0Tg1O/GQfgIz2dC4hD2JY5AGIdo1FlLkDdIs2dmylTKz+zLp/jWa+QGzfft5o7UeVb1EZHbzEXnN5z+eMNMpJ8fuoEhyspNdtL/D++/YRObsGcGVj25z/wPbNbr3YJ9Ka7ztMd997yYAg3GKj+atN79B9dKnAPhzP/NFfuV//xecTm2y9fD2FFknyECAsIfA0VFBEPQZjkSH2T5zts9sXmKUPSyV9PBin8ODA7Qr97x+7S20KTl9YYPJkT0cy8ySSFvzQSEFRhlX+Ww7HzVxtPLGAEmRz4mjsDtMynLJwwdHhKEHgZ3jG3euMznKusNsa2uDZT1FyqDjj+zsbLH38ISdzZLdPdt9+bf/8hd48/VrfPN3v+LW6z5SSja2E/DtvFRNiZf4lMqw7gKZIAgoy7I7FFo/Ha01+/t2XlbVb9Mpx7XcjRVnwCVoniRwJi/WtNWjKCq2tu2z5Ychi0WG5wy1b9y4ZgM+HxYLe59pNODWzYdICQPXgVK6otJFVy08f+4Ct2/dZDQYcP11ywMpioZ+P+ZgOuu6GEE/YVmXPPfSJQAO9w+YLmsGwx0ePLRryg9AK0kkU4R2hz8+QdyQxM74U9dkyxLfjxDu+SiWBcLzEEISBKvD0XJbWkU6wf7+Pv/gl/9x18E43evj+6B8j8Cp4C2WJ9TVglM7Ntmp6gWR7+GbHgR2X7zx7jUOd08Io4hXPm2Ndj0mPPfxMxzdtYH47/z2/0wlDwn7pzg9tM/fxsYmL3/04/ih427GSzyZMuz3u85nGAzYeQbOlOvsPbSBYRQHjIfnuVPan+tyj9nJzFWuV5wdWxH2uqDI8n58checB0HExtaA/UcHiMSpefUU1BnTRUkQt2OnEIKOB9I0DUXRuAq3K9yUGcPhDnmeM52ddJ8XBAG7uzYJ7PeHxHFMkVdIlwQqpWhqWwDRrQJt4Dshjqr7WVfW5y3ttQFXDaIhDHudyW4b9DeP8bOCwAa90nlFNbVi7dQ6C1UQOk5ZFAQYWbPp9s4qr3jmyinKoqBye/xymaGUYrlcIlygWBTFE91lretOqazr5Lpn8fGCh9YaIS2XBVYBn9ERxrNzun+w5OhwSH9ckzjUi+p7FEuNdGu4LhRCGGuyPLHvHccV9+9f5/1rV4lTO6ChhK/99tfoJXYNX37mNFev3qdWms0dG0s8eqAx2mc5dwRrk9IfxCi1KkR5viAvqq5LCeAHAaPxeJWgaN0lu4/zpJqmIniiA2U9i0KnaNgqW2oFeeO4mk4Rrg3Q7TrLQRjWN+x9TyYTosh2MRdL25V653u36I1C3nvziP6wDcglD+5OuXvHFWA962e1XObUuR3P06dPEQQRh4f7+L6bm9TvOED2vgOHGFglNnWt/rVuS+u/9EcThDRNu73y0aNHnDp1iqPDfU6ftmqds9mU6cmEjQ2b+Fv1wDXyPGd3156/SZJYfpFaFQ3nc2tS3O77RVFYo/NgxV1sv0eRZV2y06oTZqUznddWuMzzBbpTRbTiRHlWrBKuukF4sluLYWjVHuu67ooiWZGTLXOMhrJu95ygS4bb9/Z9n0bV1jQXbMJvDPVjXUBPSoos7/6uRW08nszVdY3vr7p37ecJ12Gya8rrVClb5VGjNFEUsLlhC8VVVdBUHmmSEjluWlVVFHlpuWmPeUpKDdrtP0YaqrIkjqKuk9fvR9S15P6dh8QuOQyDAaJXUznUz2gcs1Q19x7eJ/YtwuWv/rWf4O0P7rA33SN18QzNY158f8z1fZFceWFCVbVysTP6Y4+b34Dy0FZjB6OQ+WTJ6BS2NQI0lUTKpstaERphJIEnO8lopRvCMKBuDMbpVmqh8KUiX1iieLj9AvPlhIEfEjhoVCJj7t59iB+WvHfVbq7jccxwMybXrj2chyTxBnDMg72pe++Eqqg4+6xhtGO/z3ShCOMMsEFaHBpUI1hOJwxHzrS0DDCUnJxosqUjQCvN9e9qXnCZ+kc+sUHlHWNSD+3G4Gi3ZOdMRBQo7t52UpqB5OWXL/LWG7cB6PUaNLVdYK5KnUaaIBE0DoK0KH36I3j+UxVv/qYdzwdvXeejP/IxDvcOOHTEwpkoGDyXEQzsPb744VPs/6OI2XFBz8EZfBmRyIgiaxBuDZbLGiFrvv3aa3YMAiirAiGTDs50MPMYj30CP2ZjwwZSjapRlYef2vuuCp+kJ6AWPLjtOiYfGhD5IxpZUFWuYlnM8SLNycSZZyYh2c095vUhI9eSF2XKfFkRxgWjizZpWB7M6YuUkwNntnoxYLSe8e533+C5Z2yHTW6u27a+zHH5JIWqiHor6GBdawIvpqpyHp44o9hTZ9l4+AbHCjbaqu1RyXgMuUsYPnxaMJ8qDsIhu9i1/+Ez6xR7NdIzjHtO1vXqTcJgbqXIgWKp6a/FeKGCwMm1b+U891xA/dEBX/tn1kj19371A37gIwOmmV0boTdhfT1hXiSUsQ0ot7fOcOpsxP7eCS2n9ks/9SJf/8oeylWW4oGPaDY4v7XD4YlNVg8WU+aFplyUaOUqR0ITepLQEWbtAWIoygVtDz5JJJ4U5LldP0kYEKch82xOzyXjQkZE4QA/bshd0WAxzxmMA5rKPS9lhjaKJI4Qzgzw4sXL/MHXfp9vfv3tLgAzKuH+7YIf+AELL7x28012dx8RxQHp0FXcy4TlzMMXAcfHTnDFwSTaw6w9vH0/oGphRoFECq+DoICFE/m+j+8OIaUUCo0QhiRdQX6UUqS9mGMHPwkjn/5Qcva83dw/uHaXelFZlUl3wDS6JEkMhlUVXjX2/xMXiNf1kiDwbNDinvc0jairipdfepYit+vztT98h3//P/hLLHMLZ3rv6i0uPL/kta/tdkbmQirqwlA3GapxwbFS1Kpi4OBERQH9/pjFdNGJObz6iZd5/9pdmqZZVbgDD6Xs4QoQhoGtYKYpde3gkllJmsTUVU3uiNpBkABlFzwu8wI/9Cn1MVs7du0f7R9jZI4XCIQzivWN4a23rlMd2XE6fS7h1Jkt3vhqxuzEBkm/9msP2Nj2rCE4gEmpa8H6ZkDftGdKzsbaFd5964jQdWjDCL7y1d/kpRet4Evc32b37gleGKyquNiqruCPmKuWOYGzxKirAq0jXn755W5fHK759OZbhEGD5zl0AJJEig5iBRLP952Es73PKA6pm/IJE1HPs6I7rcBM29Xp94fdvEwmE9Ik7RIr+7qGPC+Zz+0510tiwlDi+YIkdLDSuqCpNFo33bxrTScH335fY8wTktG9fspw3GOwvtMF7LPFlFdefoHd+zYJzBcKQchyedIZdpeVQXqCui5Johae2waWq58f7xjAk2bO7Ty00Kg2mMwy+5zNi/0u+U/jlCiJyZceixM7xoNhjPIOyebu50GMbHyr8urgRIOx5tbta6Q9gXRiNVWlGPSG7GyfdQOskTJj81TCpz9jrRGuvvk6hwcKz18lh2W5tIUq181uChvEW2EEJ40eBlRVRfiYbLbtnqsn4Jm9Xso8Wz4W+Frl2zbAbZwceFUXKLfHWoizg1e5sVtmiyeUBB830G5FvPb395lfL4l78jHjXcVsatXbAISJmU3neEjW1sbuPm2xSkr5WKfKdAbtwGMIiMdhp4Ig8FHqye5Omqbd/p3neWem3CWdQvJod4/ReNBZaRT5kjCSGCekoLSmzHKXtK6gdUEQYMSqKxjHcSeMAjYBawUdHu+m9Xo9hBCdQEZZlyT+qtujlKLRCqVqhFkZYS+zqVVGdM0FK87h05T2NXlWEEZ2LVZu39fKODjlqsjV3vvjYidaa4SxhdB2vVjoYNPdk+8HeL4gchYOj3dP2/GM4xghzWNjEnXGxo93t9rCYmsZEccxZVnST21Ce+3G2wjTQ6m6gyYLKdBG4YWStpsVSI9a1SuJ/CCgWGZsbG90tiDLmS2AeEJ23dBuL3CvmU1zNrdPcZw1JJENgv7mf/aLJF5B0tf4xsby1crz/I+9ngpaPL2eXk+vp9fT6+n19Hp6Pb2eXk+vp9f/B9f3RecqDPpsn7adgePsmyQYwlzhunSEQ8GyMpR5Rdiyf2WIJ2qUajtES+IwQuiaVtNReAJda4zyoDXeDRp6vQjd2Iz06N59+oMBvpFo7O+ioE+xkAx6a6yt24psnWWU2Qmndmy16WB/l0bOODo2xL7N2E/2S86cDhjEm1z9jq3yXbwU4msfEVsoTyDB92A8khTOKVY1Jb1Bj6YucD6KFDPJbKYo3rWVlsOdBaMXI9YHfeqhw8Lfr8jmBf2tlH4riX1YcOfW7U68QumKIASlSxxCBRXVqBwCz+bW/Z5PXQh6o4SNS65zdeMWb79+m5d+6GM8d8FCle7dvwVRzb25ve/lzUPOfHzAnT88IS8cObjWNE1F0utRm1aoYUFR9ti/acUHRkmMqgcovcfFFyz3rbc+5P1vX+NhtdeRTh/tHdHvDRG0eO4G3Wj2d+f0I/sab94jm83YOb3RzXsQBFRm2Ukl95KYyPMJs6oj8PeimsDXVNOQ4oEztDzxWaYN3sh2Cy5fPs/dw2+xti15/11bRU0GA3qnKrY3BhweudY9vpW2Dd3S9EB6Gt+XbG9bCN7+/ZLycMEyDnCFOZI8IDOSviO4f+ce7GwGXLv3AYEz/n33OOD4pGBtTXZEyqQvmD6acO6yrdR7MTSlYnE0Rbk13O/F3L69RA8betsODhqs04gDmpbAm2yy86xg8e4ET9oK1HRxyPJ6jO+H1Maqsnzr6/fRRhC6xSkaj43BgKbK8F0leTQMCQYNN24uMawqpFrQ4fiFqJFYT6PESah7XsTJybR7n9kiJ+6lhGFM7sRA6kox7GnKykNI1yH1BiRJxs4zdj+4cVXSH8TM50dddfL27duoRtMfNV1V+7e//FX8SHP1xrsAFGXFcHia2WxG6CS4dakIRUSaeBwUq++ygi5aztXh4RFREjMeuwk1jcWUVys4ClpTlyX6MXiKQlIrxWTyeFfMdnMaV52cz+f0ej3uP3zkfs4YxSmBr8md/K4RAVXVYFhxmAaDEVVRcnRgO2AH+pjBYMCHPvQsj3Yt3/Av/qW/yGhjg0V22IlOvPfu3+bVlz/BP/2V/82uu3e/TTo6S5k3HYE/CAKq3Ep1txXspB9jVMjBvq28ejIkkBVVWbPuOtA726fY3ZvR6yVcv37dfWdBVdUdcdxzVX0hFK5AyvSkIU19grhEadf5aySeJ6gXFkI26CWE8Ygsm/GRF54B4Hfu7ZEGMS+9vMmBg8Cl45CTe/e4cNpCTa5df0T2+j2auSZMXcW1VBgjO/5R3VQ0SpNnIR979TOANWB941vvMD05YrTWCjA09HoeFbabjrdgtJHy6OGUrVMW+tU0NXlWui6i3b9tVyfAFcWRnuHk5IQvfulHeff9bwDwyisf4cY7BbduvM7mlnt7IVBKdIIBUgjL7dCGCxcuARbCeXBwQJYvuypx0zQkvZQ8azsRmqaxXk6t/HUSJ7bqrlUnae55ElWVpE58JM9zoliwWDS0Heh+f0BT12hVPfZ5+gmY0OOyyMp9XjYvWE5y1scbNE4fpF5omsJajYDtTkxOrChL5sSNjLHwvjgOu7W4vb3NbLoA0xrcruCY7WWMIY2tqbd2EEprtBx3r5nNZuRFxgvPP8fcGWjvPXpAnAji2LSNIxazDM9LiGMH/VQeUjghjdyO3drmGnFvThhIlkv73A76Gs9POcns2iyPj4ijPhcvB9y7aUWK7t65y/b2GZaFMz8PBNlSoVVIXbXzIhgMBpaH5DoPxgi8cCXaYZT9/r4freCEnvX6iaKIxaKVrbZw7XaPK4qCyA8wjXjCUFdryzlq114YRCjdcHxkO611XRPHMWnaX3V2dMPaxpDFYtbtw54XkkQxlYOnCaNIQmsNsZKHt15J/X6/m/cwCKiaqrunVlxBKd114YxRGOM/YQugdWvqvhL1mEwmFhab5d06GI0GLBYLjpx9wGBovQFb+X3PC22HBAjcPmHQNNoiGUZDexZUdenEGlb9C8stirrOilKKfJkTJRHjsY1n5tmSSlVPvEbgUVWaJLWft7W1wXI2xRjZ8RIfF1Bq57i9Hofl2WdmxVkry5ow9Lr91/c1VeE63u17CIXnSYRoBWAahLA2Cx31o1LMF9MnnqW6rvGF7ODLFj2m0dpQutg3jhMnZR91a280GjGdTllm9gwbDteZLSdoUSC8FvGSUBQLGrPqWFZNgx96HXyyrmsabaXvW6Zd4Nt7bRrBeM3GtUVRUOiCJG0FOzTKV8zmS0axjd++8MNn8GTFb/7u2/TPOyPt/we9iu+L5KosSrY2LfZf3cqYXrvKcC1lvrTBXVmW+DKhmIR4qYMc+TmybqhrB9MJoFAlsTMxBQjDPst6RugZmqoluUmaxlA5EYPy+IgiWRIchexcshyvrFjihYp8OSIO7eaWjCE7iblx38KgYteeH28Myab2wdveMjRFzKMTwSixGOSj3Ypzzy4YuGAyTD0mxx6eKDG1U9PrG04Oc+oi4cx55xchStYGKfMjO5HTPcPhQ8PWSCMuOMhPYg1TQw1b2/YBOn7k1F7ccvIjMMJHCENd2UUfJktMArVrXAoKS+ZTMNi2f3e4HzAMG+58931eeuknAMjPxeSLu4zXWkfzIfffnJCYCKNa2FOFNh4KTUjrDVGTVwt2ztmH5/ZVzdnz54mkInEBSjyU1FXJxrmUQNpTNkkBb4Lvko+tMw1HDzSXn9vg4NCOy+Rgl6Q34MbNR4Qd/GVO2t/oIA5BWKGkjxdnnLgEzMtjdFNRNA0/8KLlJN24focme8TZz9igMFcjZouEcLhOHdpDaHF/CWaHhl0Cd6gXTYUgpnQqVYOBBK3Jl5IHtx0/TqW8fbJkuLHJpLHQq9oYhIaJO5jEIMEkPV554UXe/cCus+PjAp1pjvN7eOHQrf0UT0r2Hs3d2A2Jdch074BG2bWoBgPieo2bt+/x7Dn7fV771m1+6IuvYObWf+TNG4dUeUK28AkctMbIjKJRqEyw5jafh3dLxqNB56VyZVuQNvtM5pKsset1UmtOsilKZx3MqaklUqza7YYa5RR8HKScPK9QSnYQ0vHaNicnS0BQ1i15PWQ5zZChT5vfFJlhMCoRrV9OXlMrTeCnjNfti/rDhigZcv/uIX/mT/8MAM9/6BL/zS/+d0S+gyX1UuoqwvMC5ksnOhMPyfOc2WROHLTkcRvAtIeScibe/X6fzAVNdVOsAhTHKbH+JCs+gFENBuN8slyUpqyHSK1VF9hjJGUO0sFmkjgh9EPKfIZwBxpCYhQkvSHGEZmbxpKGBTZZ1Uqxvr7Bzs4Wn/q89XM6mCw4f+Ei/9V/8bcoHPf07LmL3Lp3SOFgdEJprr21oKkbGmMPwmIhMV5JHPS7BBrjkS1zUgfhBE1TlYzXBqSxLRD881/7MkHkvq9aCYSkvdUYVKUd30ZVHWytqhoW84z+SHZJoNFWsGOpWnU0TVgfIZuaN75u1dg+/6M/ws2DqxThmG2nAHt0+JDqxHCnstw7E8VkJwW6gl5q1/msahwJ2gZIh4fHhL5VKnznHesp99GPvMrJ4ZzxOKIqHXw49Fkf73Djqk1ef/jzr/K9t24i/UUX5LTGqk2jnvCG0lp3XB8ZCGYnNe9+d58XPmYFV/b3Ztx/eMCzL/Whsn+Xpil3795/wn9IyNBxr+xaPL2+QdM0LJdLjIO79XrWY6sVd/L9ECkD8rxkbcMmgdPplHm2RCnVQemrUuPHXsedHI/t/BtjOoWvxbxAiAApTaeG5vsrTo9dm0237v3YPg+j0YjZ5ITD++A5OO8g7nO0f0CL+DdkKBXSVEuCyK3zKsD3UvI8Z9C382XVAbOu0OA5P6DHA01jjPXyMgYpW4GNXgddghUMajo7onQFHt/3mU9rBsMUXAFLCsBUHYQMGSGksbBIbb/LB+8d8+kf3mI+n7N40CbjYz76iWepsHv8V3+jYGu8Tjn3eHTPngX9ZJsiyzHa+SQuSqI0oNf3SSPH/5U+u7sP2N7eZuIg8HVdYdRKoS1oY6EkZeCUeSeTifWVC/wu8K1r68tk3YrAi5MumWqTHc/zmE0n1kxatl5GrVDISgygaRrKokJ6re+jR62mFsrlxkppjad7RLF7/guB1oIoSla+lmtrFgYdeJTubJ/PZ4TxKkFpBYU8T/5ranaj0aBbC5PJhMlk0s3zeDxmZ2eHft8au7drY3190xYF3f4i0dRN2Z31CGG9paTs3tsgCX2v85UDkJ7oYLBg97umaej1ek9yrhqNF3g0riAwHPaZzWZdDOvhU5Y1UTjo7qnIa6QMWMyLrhClGk0Y+OD25SS1BQSlmu4e2rmMoqjjRYZhjO/FGLcnRGEPVbfCIHbMozhwCdXjiZs1bW6hvPNZzs/87J/htde+zcOHjosW96jqovu+o9GQPG9hgSulzlatsFW4XCwWCOFx5ODaUgxBaDTLTtHPCM2Zc2OqSrG/b++hN+qBNh1dYjKZgIC6WRVYillGEPr4YcDEQa+DMER6sms+eL7H4jhjYzCics/6Zz/7WW7v3kCr18gWDrJpXDHt/+L6f6UWKIS4DcyxM9oYYz4phFgH/glwCbgN/DljzMn/3fvI2DfP/4BNrrJ5QFzfYj6fk1R2M6h1Ta0ailp21UJtCgQeZdlWjYTjQEg84TDenmaRVSihyQ/tBvEzf2HEX/mPXmLiVI6CRjJc3+Jv/PX3+M537WCdfm4TPzJIBnihrWDUZoKUmspVsgWaqgIvkORz+3lrm4LpboRfliTGblrpek5ydoGX29dsPBvQ6y+5e1uwOHEBl4CwF9KUA770Y1Yu+df/xdcYnT9E1Q7TqkriQOFteTSOd+KJhAfvhnz+T8fcuWE3pO98S3HxfEzo5GnrYEkcQzmHOzfsw3LuvKusN64TIStEaGhkxdARqW+8FjC7rxBG87EfsUHZzkfOc3ww5dEDKwXdTw35BLL9jJM9R+6WPiaEyqiO9B5EPrUwBG6DzOY5pk5sFcuzCUmLUfa8il7kkj6vwmONqml5KFb95tLzKY8e2DG4f7MgiDRVIUhim3xECdR1ydbmOTtOvs/D3Q94/qNX+MAZG0+nc849s43SHsZJ6UZNwdF8zprrTh4dHBOkkqPDBZE7KEo/Zn3Lp9w9Iu7ZTaPpGfLCw3fV9bRXI0KgCRhFNmg5vf1xvvHGVzh/6flOVfDh7gOyecXpbVuSNqOGcn/KYHSO/Zm9z+WjOevDU6yfGvPgjh13Lwo5d+osjcPjT08eoksNZcLCKWcRbCLyY3Qa8gmX7Kx9+DN8+nOv8D/+3f8WgIMHms21AcZfkGf2+1189hSHd2eYZgCJq7SOA6r+EOnZZ2Zcb5IX0Du/Se0qqw+v30LonGwZk/quClcs0B40rkpFo/CR4MtWBRVfNGAkxnXlTB3h+5rtcconNu1B+K07KcteSYDPYmp/t7HVp6oTjOtcB8mcRk2YH0cMx/ZZP9grqaqKc+d3uHL5JQDuPbjL3Xu3GazZe+r1tphPJVl53PEGjFlitI9uyk4h0eLzZ48FyxojBefPn+fQGZtny5I4tgFWmbWE5AaB1wl/aK2RgS12tMabOHJ1EASdcpUMQppaMBzaPbColwgCoiDpVKpuXn8PIUp60XZnMNvohigKcE1pVNFw5cIznBwesTuxybg2Po0p6I1CamfmOBoNAJ9dZ0sg05JRP6VqMsZrdp9YTKGujE2etSOme4r6Mel5mzAoLl+82CllPXr0iKq0SUVRF914BoHfJWlKOdVFE2HagC1QGBRnTp+jaux9ZfmcugwYDmxAPZ3O/0/23ivYs+S+7/v0yeEfb74TdnZ2ZxbAYrEBWC5ILkFQBIOkkkmJFmmbEosuWRRt07TLssuhVI4PLpdl602ssiyrig+iaPBBJESKASIIgFgARNq82DT53pkb//nkc7r90H3OnZFouiySLrk8/bTz3/s//xP6dP/CN5BmAizFD/zQdwEwOT3h5W++xebaBv662UOyAktZHN410s+uYDiK8ByLuKfn3nRyQraMuHhJk9m3trf5/Bd+l8G6S5aZ516FBH2B41idPLNUFh/6wNNaSRHIkiV5tsSyK5ZGWcqxfZpG843aIEWqCiHAMvuV3/PJ50suXHia7/rzTwBwsnwPu/SJ++eouj1ryJe/+NvUjSnciBFZXiGoEKbY4QcRlpAISlJjqun7AVV9FiwnqxW2E5DnZ1LFtm0zHA5pmoaDgxPzPdvwINqkycIzJsdtUFbXmID6jHuhE6kz4Yj7OYuO4dppKW8jf2w6pEVV4DoBnquf3SpZdkqdbaezaZqOF9aR+mv9W+o+TousmwdkuvO8wPEswwdpK/qSrKhRUj8Hz9VJ26OXdpkt9TM+PVnQ7/epyzMSv5QSWfv4/lniv1rNePLDV5nPDIJg0Gcyvcd0espHP6Hn52gU8Na3X2V+rNfvKrWo6imeG3U8pTJrtDBVpeOP0Nsl7mneZ2mKXHf2bmBbNp/4xCe5dk0XzGazGYGnBSUAo6asWCarrqOgJJRNHuqG1AAAIABJREFU/YAxrFbYs/GdlheWdwWkTm2yLLuOWNu58n0Xqc7U3/r9PkWh192NjTUzzxYUpaSpJcIkYZZl5kJnVKvfKT0PDaKm1yNNF8im7BKZqmiM2qQprpjkIfL8LqguyxIsaOozIZNVmhh577b7s0kcx9iWy3xpFISX2sTdD0pqk2CUWYHlnCUIjuOzTDLD0TNFCs8lCLSdQbvmtR2d9l1reYaBfyYM08q8H58cdX/fGg+3z8WyICsLY+VxJjphWRZ1XbO5sdV9bzKZPNDxahUg24TScXQXWaMb9LtcFEXXadTDIi8KELIzKI76Ea4SZxLynoW7zAi2xswW+rO/9P0/Sryl+Ee/9BmUERKzXIVt5dhGbwBRmN8LUUb9ta4ljS20cmWlry8ORghXEYb6ONPlinAEVW2DKfRFcc100tAbxOSGCz4ejJlPFyzMPieE0B3RMOyUCJXVitnUnWmx77tYzv1qjA2VKonCIaXhsOV5Stx3cFzVvVtSSpY35Z+OFLtJrp5XyuCH9Gf/EzBRSv2PQoj/Ahgrpf7zP+o4vY1I/dm/ql3jb79bk+Y1yd4eeaoD9lxC4IfkC0nPyNF6fkNRZlTlmQO2SbS7BbjKHGoahFcxvas/+1/+9w1+5Kci9u+aRV4lnL/wFP/Jz9zjV39VL6RblyyiqEdTeTTCGE1xjGtph2uAurHxnJz5FIQhB65tuGSJQ3Lk4puAsn8uJ97y8MyilNqSjeGYUu7wkQ/rTfxzn/s8btzgOg7VQlelJkcR4dYxuxcNdNCSVIlEuDWjoV7sJong1d8uePQJh8GGhsQcL/bJ5wm75wzZeiDw7BoPi9e/oT87f6FhOHaYG3iKOwCrCpCp6CTkU+Xy/u8peoWH1deb+HN/4ROkVUPPJG6vffsl1i8pTl8NKabGAyUpkI5DGHtY6N8rqhQ7cBmM9Eu2mC2pshTV0Pl/rZYZkamQrK3pTUCKFbYt6EWta3zAcjmnqAqk8abxXR/Hq8mzitIkCJtbI3Z3d/naH2jp2+3tTUprxTx1cY3+fBRBrWyUEPR9A20LLNK84sYtnaBcvXiV2pW8/c57nL+iu00iiFjObuOUFpmBR4YjB8IUE9/iBoLhGLKDkBdf/AQAt26seO/GWyihCwMAdSXxPadTc/KDADuokFZNletrtpqcQWgzWhuTmLl4fLDAloJyYspWWzHkBbZ0wZA0sWOSdMJA9egb0umP/bWf5dbLX+H3v/WSnq/rA4QrEcLHaowcvDrH/vKARmZgFCjdYU3WVPRiTTAtlzmlbeH2AwIjbFAcHxA7ayyWU1ZG7Sl0XYqm7jZkz9GV+rKuiYz6h+2NKas5tmOCbhkja0XUE/yZD+l78Oo7SxYOpKscYQKg/kixXGVsGnUtxILlqcNy4eD5piIdemxtbVGVisVSH39v7w6PX7nEx1+8DMD/8cufZ2Mrwo8aDvf0eQaBh0WFLfqdjHxZlqyvjzvoQgvFKepKe2agoVGOYzOfzzFLELbQZOx2Ia9kg+cGWDbYJuGqmxIhBHHU74jpZVnSqDNy9+l0gu8H/Mxf/1l+5dOfBmB6eoDrCZrCpTawF9dzdAJjOkRrwxGe7TGK+8yMQsn1m7dY2xrh+Q6N2eSKXFGVNbYRV7n4yFW8oOHw8LjzRClKLWTRi9aZLTRc0RLuA54vLRHeRnSdDtu2sYRLkiddRbb1fWrFTkDfJ9nYIHWAYjsK2/KJooi4b+ZZkZGngjayTtICsCibkief1HtIli+5ffs2w96Yixd1oWSZLlmlS2YLfeydnW1UU3Fx6wJH967pe97ULJdn0vq2B5UsqWqBLYwIBDVCSISluqqtH/RoyjNJ7jD0CXybPKsfqLDHvfDMl8Z81uv1KHP9XivbJXRdprOEP/9XfxiA8497XH/7gOPJlLHxEqsWLu+89g6Dof73dLbAcjwWsxlPfuApfQ+KlMnpEUJUmOUFJQXNfUoVsmlAuFiW1QVORVEzHg+I47iDrerERnTwuzTNcRxYW1vrlDmrqjlTE7wPCqUlzU0X1Uhje57XqZo1tWI4HBP3fN6/rruKzzzzQZaLlONjUyhSgkbWJqjUH7WQq1YBUc897Wt0P/Srqc4SgvYz1w9AlJ3HWt1oEYjA7A1JskQpQeD0O2hkTYlju/TjkMyoQsraRuB087zIS+I45sVPPM/Xv6b3nlWaEUdDprMjLKftoAnqKsVz9F7vuj6WrWFt4zVT8CTn+CBjY1ffp9mJQ5aWeL5gtWhFIQRh6LJcJmdqgY5nRH70+z8Y9jQEUqkHuohZWTwQxLuuq5Oo5uw+VVVligHmne3+XnYqjFVVEMVhNzfaRL3X63UCKLZlURY1cTzQgTsQxSFKKVam+CCEjefbWJYkWRkJ/sE2VVWgVHLmKSlC/MDuFKFnS/0sQidgYDof0/mEqq5xrDPxj/tVIgHW1jY6IYfSwORbb6+8mDOIdWK4XC6p1aS7byh9n7K87O5n3B/oLl+juv3Bso3vVKtS7Xmd2myrDCil7KTJ28DeskXXeWvPCc5U/PQ9b2Xiwwdk3bU4hr4vWZZoyXjnzI9TwyTrB3zKhAVFVnXrcFXrjqFlWcznJiZAYHuq65xVDgx7fepE8uILLwKwv5hx7bW3wUkRprEQ+DZVXlBLU9wJe1pm3g6xTAwEFnmj949WHAdlEcQOM5PcBa7Ecwq2Ho25fajnVCUtfHySA1DGrsBzGqII5gv9N00JrqM94FrLJtkY2oZFV/AMQ+0HWJvWVRRFLOYJjmN14imO41A1RmreFKuDwGN6Lfu/TK7+NAQtfhT4RfPfvwj8xT+F33g4Ho6H4+F4OB6Oh+PheDgejofj4fhXavxxOVcK+B2hcQ7/q1Lq7wHbSql75v8fANt/2BeFEH8D+BsAXmwRxDrPOz28ydalq3iyz/EdXZVYK0KWtcawLua6wjYcG9yngVRUTYnn6CpQ3bRl4wZb1XhWjCd0hWTD+RTWPKHnvgdAtBZiOSlHs7v4xu8k8NbxrAIVlJjuPoFbo5YVrunauLGFKyEpXBpp3LQzC0/1YVyQ3TKtyUCQlzmF0cSvg4i941N2Hg9Z5MZtOnSRqoAGLNMEFI3HWjzAdgzBlBI3FFi5z/yaPtbaVcVf/smneeWfHTKpNaRxtNlQGEEFgLhvU1WCMlkjHOqMfrIPWxckroGQ2LWN7VhUUUpiKldbWz1uMuHi1Uv8/N/8eQD+h7/z33Dz+jHf/ckfBOCDT36cu4tvYHk582Xb0g2BijwtaM26+yPFbK64c1dDyEQNlx89R+A63Lqup8rmRkyaLHAs0RmEulafoqiIhrort7e3x9pWQChdFjMDZ7AkQlXYVkm/r5/DyfERtpKMB4bkWyfYAx+Vwsamrm6l6QrfHRD3A+7cuKm/V9usbY3a4h2rJmVnewc/8Jnu6fvbOD5rG30cz+H0QF9PNOzh2Vso11Q0OWV65JAdwTPPaCjIYvYtyjcbyirtJPgF0ItcsrQl3aYs9hukkFRMzPMLeezcNm+/e4OLV/S5R1FEOilxYgPlSV1yJdg+F5HM9HyZThIiNyRLSxLTRf30L/19LpR7fNxUt5twm4P3vsDdYszSdCeq4gbrj+9iWUOGtBA1h9NZQnlXH+fKBy4xawpmywWh4QjVhGSJRaACrHVT1ZytiP2A1Hho2ZaDchSe62C3xWR1jMBBGUhA0JfIUnA6S/i1r+vOQH+th6gExm5Dn3sN29t9JidGhMYLiMKYyfSI//Jv/vcAvPzyy3zlK1/h/IUdTqZ6nm3v9jmZHvHr/0TDbcbjAVIlIH1cX59nltb0oog0S7qKnlIN0+m0w4V3pp1FycgIsIRBxNHpsZbgNbCpMPApiqL7e9lAIbUpo+e0HiE2AmEc7fX1+X5ImuccHOgOURzHpEnOL/zCL4A6807KsgKJ20HNGiSbm9usjEHiyemUzc1N3r5xjR/6oR8C4IkPPcLXvvUqaVK0FiEMhzFHR8d4BlqzWq1YHswpyxzPdD49p0/RlKTZguFAdwyn0xlhGJIkxqPI93Xnojmr0DZ1Q1JmCOtMft5xLM37MNyN0AtZLhMNG/Fbj6CQurSZzWZ4fusvGGLbVQfJCf0IZVWIAlLTOVosEuI4pKoKhFGZcRwHVZTEptIr0xV5UVGOFbbhhi0O7hrel6lM9hzcRrGYW0gDi+j3A8N5utkhJFzbYvv8Jjffv6mfJ1q6XNF0sMcgckEUOK7qeBJRFFFX0BvojnCaLlHkWI7kxtt6H5gc29y9c8TmbkS/p9ez/eND/Eh1JP0gFORlSRj0mM/1Gu/4FmWVE4ZexwPBsqgrydqahiqnq4Tc2HP0jFjNYGAxn8/J8xQ3aEUSFEKq+6BRFlUlWSxW3Xy1bZs41jC+tjvXdgpaEQPtc+aglOw8F5umYrVacDrJ+NSnvsecw4Df+9wXO5l+lKCqpfHNOfPV0XC/hqpqPY/8zosJdNe4NJ2ytrOTpimIUnfrzLWX1RzHdphO9HMv8oooDlmVOc8bs/FXXnmFra2QslC4lu6GHh3vE8UC29Lr+fbWLqv0mHevv8mRgQpHcYASKefOnePk8I6+HhkTh+td9xahqEooakWS6PjG92rqpqIN0bIswbYDyipB0XLKo85vr4XurVYpQRB0nKssSyjKAsd1u8/a9ajthIDhXCG6TuRwOHyAM6Tv75mPWgsRFRbG49JA4iMto53lSwIjEGQL7X/ke363fh4dH+O4dudbVOQlVeFSVinf/wMvmmuZ88rLb2KLAYFZgxANXuR00uVC2LiWhjSmpvvadtIlDUK1XYZWCl2vU8fHh0RRj/X1dVbLaXfuWT5DqQbH1e+D4zioRlI3Z8duDd9twxtaLBZ63TPQStD2Gq7rdt3c+XxOGOpuXSsBblkWa2trzGaz7h11hUsj7+MJKtnJ0bdG142B9mo+ob7H6+ubJElClurr293dNdDeA7a29Pt+eHhPP8Omoqz0fRkMeyiZdAiNKA7Icu3R1wpoiEpRC5CeuSdFzWomCV2PP3hVd2gbShy7oG7sjj8t8bB8Sb3Sz1ilgqYWBIFFYxloclljOT5OWWCbDlu8tsZJMSXs63vQH2ihkdFYsH9bP4d0EeCtV1x+7lHSyap7pnku2Nw88yQ7OZ7TNCCs2MzXFFAIYeG1Rs225mS5nTVDie85D3A1ARxLEg+irjuY5hV/1PjjJlffo5TaF0JsAZ8VQrx9//9USinR7vr/3DCJ2N8DGO+uqcLTC9Zz37/JwWFNU4RUQgcItsxB2Ni2Q1Xeh9WWeXc827ZASKqqoTA8At/1wHYoatVygfn0r70MFyysnvGmeg+ufLAhSwZ4ob5Zy9WEStkot8QZ6yAsX8WMCZidGKjCMCK2+ww2cvIWKliE1M0KPI9oaBIuN0dVdO3bRiaEwsZb9Xjr7VvmZkjiDYmsLYRJMi98uKaw5ngGHlb5Na7tUVYV0pCIb92pOXBusf6UxdQEN8L6MJcfV9y4oSd9mPV5/5UVi70pO7v6e8nS5Wi/YusRHbDkxYSjOw7D9Q0GY93CLesJux/Z4GMf/gC/+ulfB2BxInEFfO13vwjA8596jqYYMYx97hT6WkrhapibU6AMqR7hE0XQM7CWxTTh4O6c8WDEaGjMQIs5YRhhNy6nh6a1bikGY4t7B7f195ZzpJDYVoQ0cLRKVUi3xrLoSJJBEJKWFUHP8C3qAmHbRD3JzZv6WFJGxL2c0+kEpQxEza3Ii4LaCDAcLWdESR8ha2xjqFulFatTl6rM8IMzPHeTLwkMiXm8scGFKxsMn7nMYm44F45DU+esjcYd0bfIU6rG6V7qctWw4wc8/fHvJh/o5/flz97h9g2Hi4+eIzJt61mWc+Gyz9vX9PuxHfgkOdjVGD/Wn0VyimNBGkf4htd25+1D/srP/Dg//GM/AMDP/dR/xkc/VLO2OOablZ4Lu7u77L+2T2+8jtjW53V6NyEIFdNYn/ebd69z4dJjLA5zatfwMEbrpCdTNv0x0nCzaqQOcNuAtqqxXJuirvHbpUcKHMAxcyVLJZGyWQ8U/mhHz89VCraGwbTO9bYVUOR5pyiarSQf/sgavaHHb//27wJw7do1Dg9PdVAWGk+3ZQ6WS1mY97FWeKEgqaEX63etFhGOC55jU9atv5I2ET5TULPNBm91Xli9XkngeSag1N9LkoQwjMnLs3mgaolQredIq2gGWklJX09dF9j3mQ8LYdPv9WhUjYXx+qtryrIiCM428fXxmDLPaYMdx3M5OjmmKBqmc8MfmU6MCIeNMte3uTXi0qPnOT3V/z46OiKMXPq9MTODYbedkqaGQX+Nc+c072s6XZDnOdvb2931rq2t6Y3t8Mjclx5KaEGTNgl77PGLLJZLVgZSKX1JkZd4novjtGIZFgqJZcsuKBoMhgir7iDgtmPjBQENDdeMEuEn/8yLKNXwxc9/lTu3dEBrC8nG1iZ39vU5TRZL6qLklW+9zOVH9LU4rk7gWw+vdFHgxw7j4aATO2kKQUrKYDDq5kKWZcxOJ/T6BrZXlJqkXbtdNaCpFVWp4T8tH7ZpSiQNhQma4l5IvkyIPJt3vvUGAH4guHh+g3Q/Z4GGJruWJIpKWj2UqilxhKIRDSenuoigBEShjxA2cayTR6kE02xOZoR3pNTzuIV/gQ6q4zjuCPjt/HQch9Sofrq2jevYoGzsVjoP1QXt7Zzt9/uURUWanSXerutquJJZA0ejEdPplKtPPMGGkUP8zd/8TVBOJxTRKFBomFXLU2z5V7YRjAFYLhL8+6BYaZp2fnRt8JokOb50CQK7g6iNxxvMZgvW1vX7v7E55r333idfObz+Le0RaNFw+9qU/nDEpcd08JbUCzzfYXGy6M6pETV7t0/pG+i+Y0Wslhm9nsRvvecKiWU5XcBu24o0LfT89/U5KBmgrBV3bpv1x+5RVRkIu0swLZNkKXUGURXC1gG8gY61xY6qKbtg0bZt+lFMWuS0+Z1lC4oy72CXRZGZZ6k6I+yqEh0HS6rCPHWJ650Foe37HffCThClqirqRiEsp1OAfeGFF3jjzde78w5Cj0uP9Xj9tRVNrQs3TWMRRGOqrMEycGWpMuo6PIPRKS1goKRkOtMx3XDch1rDSAPD26nqkjQ9g4dqbmjNnTu3sM3+6Po7yMbFsiWWYzYWe4XrVFSpKWgpiVKa79QK37huRVnWNKrhfgjldDrtBEP6/T6O43TX2w4hxAMKglK2yWH7rAR1XWBZZzwuzY9zzTM+gw3med7Be6fTKYPBiNFoDc9A8EejNdJ0xWg0QJg5tFjOkFbFxpqe09PJAiFc8lR1fqpVmSIsj57h3lJP6G/5LGZTFuY2+QQIp6bMbKLA7H1ZhqBmbUevW0l9xKVHBxxdF0gD019zAuqmwQ4EraJEOp0SWlCv9HNJCSn8hr3bS6ShmjgypVrC9dX7nNvWxU3Lbigri/nUUAwcQa8XkzsFheGd2rYAy5gnt3zfWs/xMm3zCYGeXh6FSaAsF+LYJ69ylNKfRVGfhAef5wPP9o/DuXrgQEL8t8AK+Bng+5RS94QQu8DnlVIf+KO+29tYV1c/+VFAb1THt5bk2SkjzyxaxRK3Ccgq1amMrW/5OF5BlpgFq5QoSlB2y+LEUhJLBNRqycywwv7iT2zwn/7PFlmhk59VJhmtR/y7f3nOyUIfazh02Xt/zvqORJrFvFAJ48UGzz33EQC+fXCLNJ3iuDNagkWz7OH2N3jrvUMujfSL7u+4rBYFTovTjGxCCdVsi2yl7/08nbDzWIwTJjR5K1niI0KJbYj+IrbwbQg8j733TBIoJEEAs9MSIwREo0Y0RdIptDkuJEc19YkN9X1ynZ7DY8/qSfnY05JvvwTXXp/y7Iv6RRheXRH0Nnmi/h5+4x/+KgCXnrrC6zf2Oyx16ISQ2ljDeac+c2F3m+UyZZEsUUIfv5EZrkdHGK6LkkG8wWw26zDmiAarAUvogBwgzwSuK8jz1qXexvUsmqa6T1bWBZERRl6nKDZfFPj3E5Ytm0Z4SMWZ6bQryIs5NAGi1hW9nUs+tqi5fd1gjYVLL14nqabUyrycTUhT23hejmUSAttzCQeKnsHsnxwt+eT3/gibgyeQnu483Ll5zEtf/XXCYNRt6mHfJy8qgljfg3Sy5NzGFiuv4Onv0wv3+18/ZjjqcbRo2FjXycb07tv0RiGT2/r3z21usnd8CxoHYZQMPvAdFkd7FpMbc3KzYfpuydXLT3HrLb0BPhPu8eRzBYvE45980/DchoLNccX43Ee4fV0Tfbd7NnvJKZkhxlaFxm37IiAzSkvC6yHlisdHl9k/MkWDWlJJRdPt2JI0z3F9DxNbIYWHVWWI1tzRtXHqhhc/epHXr+mkJZ2uEE5NVSrWNvRE7wXbzJf7BKFRC8wLHCvmeLLAM4IaH33+Q8i64RvffIXYVMGSVcFwOKK/pv99cHDKcDDEImY21dcbBzvkxQolC4JIz43FYmGq30ZByfdZLBadvC7AsD9glSZU93EbLHSluE2gi6oi9nR113bOjoX5u3Y91pVo2c1p4bh4nkPTnBnxBkFIkmQM+1EX1Oxu7zCbzTtxjtl8jue7pHneGRlHYZ+0SLFF3W3YYeDy2ONP8LWva8U9x8toGkGWll3RQghdJS/Lsqtwaxl59cA1tHyNtpquifMCaIhigzSoc10ZN8W/LKt4/PLj7N/bx7Zbc06J7SgcV7Fa6jVgd2cHNyg6XkZd11S1Q1lquef2WVki0IFLaBKLutAdP1MAaWRBVeQ0heTjL2ixk/2DO5yczHBsXWgYDEOE7dDvDTg+1UnaZJpTyULzqoyqYZ7noKxOXlwHoC6rZNYl0GEQU9eNSYhb3omH7/tkZs13REngDyiytOt0Oo7Hh5/d4Nuv3mOeGqEmIYlChbBahbEhdV3rrow643OMRiPSVUbTWlIM+symC2wzh7MsY3NrjTRNHyDil6UOxNvOVdNoPpXnthLLNU1ZEYYxtanC+4HXcVba5MZ1vM58u50bCEUQBIyGxrg9zRkOh6YbqAtfmtRvI03Hq1YSpSQWZxYASjXY93G79O+FmtNpuBpN03QGwWf30wHLpt+PGQ71cz4+nXJ6MuX7f0B3zrZ3NvjsZz/Lv/nTP86v/KPfAuDwzjGjwYDVatZxylw7oBd7lMZNfjW32L0UM51Ou/sZ+EMcZ06a0BH0PTeiyOuug+FY2p7i3PnznXXHH3z9ZYJIdPMVHDzPoqk0x1B/z6VROU2j7jOKPuNCgV5HVqsFURx2nauyLKmNRH1zH4dUc+H0utgmplEUdetNVdVYlk4GRoYX3YpXeEbRt660SIbr2V1Cu765yXQ6xbIcXdxC8/WuXLnCtZu6IHJ6ekoUuFT3cZmxtehMUWa4RmjDUjarbMnAFMKEspBNpbuh3pnlQFXVKKG6a26VD+83k1bG5Lrf18lcWSXYlkdZ1kSGx5NkJ2xux8yO9fxJiiWe5zEajbpEqWkaqkqrZ7ZrnhB2x6kCjTzI89zI4OtnpdcN0Z2bfl4Brut2IhS6OBLjOFb3WXsd93Mby7I2aoSh+RtAWYRh3CWiSbLEdgRNU/GBD2v0ShT6vPHGq908qEq9VrfJGsBg0yZZ1Qxj/cw3d3xOvJL6nkROTBLoRKzqOb0wwLH18yvzhLoaEpm9tn/eorAX7L8iCC39N+N+TFFXVFWBa4QwXBHihDYYRd/SscjtKWubF5jc04F8UyY0hd5HR+uGD1tBllcdms1xHPxAEMaCutH37mi/MkUD6PfCbi4kad49u6Ko8AIby1bdvatriTTFjF7f2DpZNafvln/ynCshRCyE6Lf/DfwQ8AbwGeCnzZ/9NPBr/7K/8XA8HA/Hw/FwPBwPx8PxcDwcD8fD8f+V8ceBBW4D/9hUpRzgl5RSvyWE+DrwaSHEvwPcAn7i/+5AVZaytWbU9OYNSb5gtB0QmcrV8b0lRVNQ1yClwSCnOaPQwbLO+AfCYHtbyAiVUZPy/c4f63NfOOHfnj9OluvOQOzdRVVrpMkJtvFX2rujeOr5jzJdvMny1MjIhhLlVjz/g7oi9PYvVZzOCvprJVWiM+Cnrz7Kwe2GgYwQQnc/mrzA921WpqppNTWeiNk6P2Tvpu5qVLOIO9csLl52CY1Md7508e2CwvBqlicNF3d75EXD4lBn6kd7cz7ywhab50tWBmu7qmY00kdhWt2BTX/N4eTAJTAeIR6Q5RbvfVV3Bvx6nXKSMvAjXvuCPqedPZeN4YKJ/WVmtq4yXnDX+NR3rvHF39MGl0fTDK9fs26HlIa/tVrAcp5RFi7K1tWQMOgxcvuUrV9OvcCxKsKgoa50F6ztakVRg2cq11UJVS1Qlj6ObUmi3oCqsjg9Np5EUUB/ELNcpigDlxoMtiibAmWqI8L2qfMFrh+gDKQya2o2Ni+RnCa45h7fu3uI43n0xvp6++shy5OcAI/USHK6wibouVSNgzLqa8lyQZ44PPqdj+pjy4zf+dxvsTP4Mj/zH/xHAFy/dhc3UKR5wvaW7g6WZFSKTqVm3Hc4uDelsAu+8I/1ffnYC49jqQmLNybM9jSsa7S7wcGNjF5fc75+/NmEX/6NhIPwCYJeC6FcZ7I4oXEaht7YPJslNw+uE182nd1eyEtvKt68mTIyHaDZYc1BFbBq7jBa15WqZLEAGTM2qliTLAEjqzyKtQn0vDjl6ec/wTuvv4JrOseVrfA9p6vilpT0Yu3H44cGQiUKsBS1qSyvuS4btuLicsVM6M8moctcWkj7jK9ydHzK+mbQ+as0UmHZLps7Gzjo9/H4aMJoNNJVQ2Pq6bqeNjVemkq27WHLmJ2dC8yMv8pyNWVtbUSS5CwWep6NRmusVisiwxlqJYeVUt315a6WL87zsyqY43kMvIBVZsw60ZK2wjnrsOdl69VyVtXUKlLFfTLTGTUOwnbvq1gqzRPIkk6G+M448unyAAAgAElEQVSdPeI45uMf16a3X3rppa6CH7XdrOUU29HKoJ7xtTo9ybhz+w/YvaTXRcuKubu34PErj9Lv6/v52is38LwAqQqGhie0WJ7gul4HR2k7Ba7rPuAJpqWDK1r6TxhqSWLb3IMgsNnc2mIym5AbCEe/H2vp9bIhMhXok9MjhqOg66aVZU5ZSgb9NdJk0d27um746Mee7eDSb73+GpfPnefgzqmZLwWubyNswf7RXndOrpNimuIcHR0RR2vMJjMU+vr6cURWCpqqIZf6M21KWXcdmktGhj4apLiuPu+De1NUbaEage/rOZRnJb14RCX0+pZlqfYDwu2gWHE05Pv+7J/j7sFnqE/02pylDZYV0u8brlayIu5pA9YyN7AkV0ta9/oDTk/1Na+WukN1vwFrmqa602ZsHdouRV3XSNPyDuOIIq+6yrll2cSDIRvjNe7d05YRi/kSz3cfkO6WjX432u6B53mUle50ZLn+vV6vh5Rw584+jmO4GRLK8gyd0Bh/LOcBSfUcZeZZez1N3RiolD7v8XjE+fO7vPPOOw9wh4RtkSRZB+uKQos0FnzxC18BNG+7KFPu3bM4d0Gv1Ye3D5gvE5559iNgL818ybh765iTe/pZjTbXwK65cPkSi7mGZy4nkrKwCDwL2ZrqioRa1p0pK5aD4wiS1ZJX9vUa5No2gTemMs7KSkrKQiCcrLM4aaiJov4DHRloPZX0by2XunOt+Ub6vP0wBMPjUeZ7roFPtsiOKNZdQMsW9I08fJGX3ZrXQk3zvMC2nc5CQimBF4ZIWeKZTl2aF6R5SRha9EzHcraY8/71a/T7utuclyV1auE4NRcu689WaUKa5Iz6m6SZ8aKy67M1HEDpLgR2hdVC95uKKOpTNfV9xu+68+lYLRRbUdQFnhdQN/q+lIUAVVE1c0LjvyVrl+W8JOyZrrHbo9fvM51Ou86O5vR59Pt9jo6OzPGtbt6DhkuWRfXAGu+67pkKonUmsz4YDFgbjc1xNCTa9/3u91rjb8dxundNqawzeAbd/dU2Bw0rY7jeviuz6YLXvqU7hqPxgEfOXWX/rkab2JTUTetBqN8/f2jzoadtMmOjkdMjW83ZuOJjHel7Pr9WMnIc6ryhaP1F+x75aoUXa8RNtlhyclKysfUIl5/UEOCj2R7NskCcCgoDSfe3fZxtl9mpQQupimbqsL9/0iluCi/Qculj/z74soWsFdLEfY4rcdyAqqrJjH1R1AtBah5ont1nLC0FeWrsWfoxUpTYtnNmQo1gOByzWs1JzV7un/mO/6HjXzq5UkpdB575Qz4/BT71/+RYcc/jkScfBeD4xhz/e3wGj8Xc/rrmOzU3DxF9myYvO3yvbv02SBOwC6GhBGVVgFm0bGVjuzl1bWEZed+hE3GuJ3h37yYAOy/A5LRkfir51F/QXlveWknS5HjXX+DwPc19ufpsTJ0X/No/1RhsKWIuXnZIFj2Uo1/8u4dz/q2f+BHeenWKFeuJeOvWS9w43SOOzMtZw/V3Gu7mh+DoSb9cFmxe7nN0JNg1ppfSm2I5Md5KT9SRmzLZm9FYHhcf17/n9n1u7824/FRDb2R8WPKAXs/CwNwJ/CGDSJG+l+Aaw9W0rrF8iWdEDG6+OkOJPo2zwjVk8uR6QLa24sM7HpHB6P/YX/oJXn7jyzx25VkADr76FYRyyVYlVa3/5vTExrHAthTY5kVXDX2/pmj9UZsGoXL6fRc/0IvtfKJIVwmOsKjq1rDPQaoVrmvkRmvFxsYGR8f32D7XGs6VCMsB4VAY+KDvAbKkMYRvT3hsre9Q1Euk0/pcrLFaauhSnuskMwq2qJRO/gDOn1/D9yzm92bYyuAu64pSFdSl5PmnNJzo0qVLXLv3Bleu6ETjOz7xAq++/A7F4YC3vn1dP3fXYm1jE0uG7N8z8LNeSOja5Ab6lSsbO1wy8kZsPKo3/lu330VUfaiLTl57NZsQEWOoIeT7ehFOqnuMI7OQZSvWNgfUj9jMbxpj4aYhiPoUrr4v7zoZwa7Fpc3z3HzHGOJZHs0qA9vjaJGb51VSr05ZmcRCuDaVUkxWCY7BJG9tD9l7+xbVMicwYg6D7Q0ObtxiaDbntO+jKolYSa1qAgz8CCuCk7ne4KpVTrjeY6XmCK2Gy8HtgkjECCtlOjO8Dyvg4F7ezfNzl30q5sCQRaoLG6s05ubNPQZDH7c2kNSmxHIcPGGETfKCZZ2Ql292iYxSJWk+RSpBVZmChOdpwYpDPVeWixVxHOP7Z9ATWVfky1z7p3RkWEGe513Qats2tut0EKr2M200LM+EMuoSzwsQovV80ka0TSU7DL1qGhzHQlqamwQQhCHzRcJXvvo1AP7W3/qv+Ae/+A9YvPUmdUvoEjVKWQh8losWfuIwHPU6aM1qkuM5is3xFidTndQ3KifPE+raRikjNtLXctUtt6ATT4COcxEEPgipYbrGXDlZ6SDNN7DrLM155513NCem5aY0K8IwJF2Wncm17cDkpODceT2nAl8gm4KmyhCNWWAaQV0VvPzNbxKY5GYYhNy5c5tnntOG4cNhn9/9nc+xMR7SayFARUqjwDVEFMuGND/GdV2S1IhHBAWXL13krbfe7sj5UtT0h30aIwCzt7enfX42Rygz787vPsbJ4YmR/DX+eLVgMj2hJb54Dlh2jGW7nWhBmi346pff4/B4jlXqYw1iB9sKOsljKVc6sbMcHOMR2B8MODo6od8f0BvqYHUymzAY9zUXGZhP5liWxWqxPEtkmgZhWwRBhHUfhCqO4y6IaRqpLTHSpEtabNvG9wKK8gxi2Epd3++94zgtUVyf58bGFrdu3SKO406Wuw3gZWt+bOZW05z5Kdm2luOWUp75+NhBdx2gA9o47lPXshOmkVJqqKtn8d5718zxR1CHhL5+xus7IU3j8U9/+R/im0Dx8UfWuH1vighsKgNXOioFP/rX/gpHt++ZY3sMhj6//+Xfw1ie4QUentMjX0n6fX38VTohCHxKs18J5RGGI5J02SWYwrI1/N3A4bzApa4s8nLK5rYOTIukxHEjpJSEkf47IQTLe/vta4btOWRlieM7NIZjUjclZVUSOiFStYFpbQxdTUDt+1RV9UAy7noOCm16fXig19gwDBkOh5zO9L9VXQOSuqk68+rlqsAxsvi5gRO3BYaDA70X1hqjB43dFWA8T0tkJ/m8Ey3peTG5XNGurgroj2PcQHTS/Y5tUcuKIAg7PqyFot/vdyIiZZEjVYPnW0QGOqzkhKbyuHjxKqM1wxd3pxTlihvv6lgm9HosZssu3tRz0WU4HDGbzej3NdRUKUFq/LAA+r0B8XbMfD7/F+B9rW8V6HdkcnqMMFc4HA7xPEcbmneQw8aYgctuX2lHK9zSNFq6vaoLaiOhnuUJjz92laZpOtPpk6M5dTG7z5BcwyLn05zRWK9vJ/cyDvdqNi7rmGT3g0MqNSNHEo9NTODcJQgH5HZGbuJMrx+yfbFmfqQLBrFf8ui5C8zLhuvXtDyDrHMEmhuK33L0Cpp5RmMS6Mk8B+HjuDmNaaZsrjkM10OyRDKdGDEQt0fl0AmNlFVKepwSBSFVYbxoN0MmJxOKvOlsB1ojalNfIlkleJGFkpzZ5DgOtpA4lgTD+7KlC5yZj//z448raPEnMhQO197WFbC6sHB3H+HwJGWV6k3PEg1e01Ded7plWWNbPkHYdjkksoFGNp0Ske0I6trBsnQgC7CYFyxnDd/7/YYsT8X+bYvVQvG139M38uf/u5/k07/z93n19W/zhCGvZpMcd73gcKIXscXpiu998XlmxzP2j74FwDK/zee/9iqPn3uG2je/d/waru1gm05Scttj1JQUtkNllHceecLn3JUer3wj7QK1NJ9QphZPfad+8V/5dknVuMxmWev9y/YTDjtKUdQZrR1XPvfpj61ukQ7igDoVFMyRhoNUOS62qjSmG0CEVEWJVUtCMzEbu8JqamazGZalJ9Df/dt/hwtXPsC1ff1irG+HpJOC0rYRwmDDnYxnnnuE2++DE+jFbm2Y8PxH1vn6u/rf81ObSnlYTcbcBG5CRaAaVC3ABBYCB6kcjBUPvu+zymYorM7MGUeZTd8y/h6QLFf0h25XYaizgpOiYDSOSRJ9D0YXLNwCjk5nhJFRyulJFmmJY8QOrr12C6caEPd9Vkrf4NhzEI3AtWuefErzBIXr8+zWRX7/C18C4KPf+QQHey7X3/s6p6aa3uu7TCcJvZ7TcV+aQiGaFFHrez7eHDE5TCG2mZiFpTgK6PmCOD6PK1p+4RR6knyiX/IvbQ2YnrcJFgskGrN/tF9iJTOSXo+rT+pFsYpPKKyE6bEOYs5VI4KtPrvrl7i3rzeh6qBmtL3GfDHDyvXCafdsrF6PftCpLaAIUP2AJtZV8cNZQuT5+FjMTPXs6PacngW2qYYOt8Ys5ysGXkhqArBGZrhOyNq63pRO7055ZTJnvw5ITTAXihFKafPV1psG5SAbh41tfY6PP1mSZSPe+Ibk49+teZG3b+7juT62I8hWdvc9JWtmM50c2F6OUi79QUxpkmpkwHLm4XgpvUgn1afHp1R1zfPPPwdovkqWZezf3euCZQuJF3ra08pUSJtKk5wDE9A2qM6Vvt3QABM0qo5H0ppEDkdt9bNBSihy2TnJSyVNUePMeLNWkstXHuNXfuVXAPjMZz7Dy998hfH6sAssrNKjqRtWC2gq4xs2FEhKTg5NglnlPHLxMq+//m0qU6SIY58sT6jKM8f7oii6TR8wClkFlmV31XMppUnoFFK1yphKB3KGk+T7PrP5FIsQhX7uw2EPQcjp4THCadW6JLIOOT7Ua8louEYcWhRFiaz0exQHMa69NJwHEzxWJeuDiNN93aXyrXNcurTFvXunJKVOPlzPoixkx41xvAw3KKnKGtkYvsHmNnf39xkN+1213nG0KbRozXN9B8uGxTxFodeNZ575CKfH93Bs0ZncBpZOsqOBnvsyy5CiJssWDIx5dZInfOmffYUnroa885Y+1tZuwGq5YmYS4w9/5FEc2+P1N64T9vTzm8+nXWW85WF+7PnniGOfvb275hnUTE806b5V+ptOp3hBiOd5JOY9bpRkY2ODxhQaDg+PtPJZfUbgL0vt+7O2ttaZaldVhbLPTH5t28b1nAfMhN9559tEUfRAMuU4Ooi/v5ovZf1Ad7DtjlZl2RmECk93SO/3ufriF79IGIbU5m8sy8L3tOjDtklSHMdiMslow6HjuzPCyKO3YyHN5jNbgRfEvP3OG2xe0Bvw7Rtz/re3Pselq/reLVcTlumcrfMCYZIy26soVhV5taA+0e9yf9gnWeVd99V1fKS08Hybwpi7nts9z6XLm9w1Cq3HpwdUdUZduiznBsmBx/bOOkEQdElEXqSMRiMmE70u+4GLH9iGr2l4NZUxOxfizDMpDJDVmeBD29Fsu5vAfV1JRV3quR+vj8izs2fgeT5FkeCFDrlRqXWEjx/YFEWGa2Izx3HI04ymafl5NnlTsbU76lQiy9yhyFIkKaLt8ucK29ECFnoeOCT5kgu7u/TX9HM42j9BVpbmtbZFLtkgkCiDhgo8v+PiWbV+t30rIhMZs/kxN27q57BzfoePvfAC8/mbABzcNJ3vpqE36Jtzyjk9nbK+vs7hoe4uh0HEE0880XWykkR3nxzH+Rf8qtp7C3oddGy743MdnxxSVTVSWV3hqi3e3c/pMhdJYwrTnq8FLx4wq0eyt38bIQRReMbNWyxmnYLo4b0jgsBD2LC3pxNfJxQMw032XtPXktcNg76DKtMuuVu7MmL/bsmVpxXH+hYwnc7xg41OhXfnsUscHxSU1SnSQFCefeZF5tMly+MjMst0EGUDCC49pr0o33lzD2FpwZCybL32YhzHJl2lRKGOb5aLBM8LOgXUqpL0ejFKSnoDfb1B4GA5sHtuk7rQc+H09BSpJK7Zs3sDn7qygRJE60lqsZiVyEacNXjKM271Hzb+xAQt/jgj3hipP/fv/WsAvPW1faQL2WRObVxZxfwQS8xIcp08gK5ObJ9zUWbzzHOHui5RVoUn9EuWNRVWXSNkRe3pJGU6z/jZ/9DnP/6vz+lj+zkvf3XMz/30IZkJNB597DKf/OHLvL//NWyhF+Aqm3DrtVtI0xlYnJyys9knjLawau2OPqlPUckaJB7X3zSVHR8eveyQmi7RfClw6gypJIGR1hXumLVdnyTz8AI9WQdjm71bt3nqu/XG721UXHtjyew0ojBJw6AHDAZkRc7ALLjC71P6CYERtMgLh8P9BvtUdBVSGokSHk5LDjbVcb90qE1r1B33+cEf+CR3Xv4NHFPZ/drtCFnMMfs+TaI3qUbm1KZdu7YeM/Jc3nt3yZMv6Ht19YlNvvn5V0kKo/Ti6O/VVUVmoHUKsC1fK0q16kNWTVkrWg/IIAiwLCjzJYKWXF08sOnCmYN5W8XJ8xxcQRT1OuGNNC+xXQc3DDoFo+3dXSaTiVFbA6H0olfXdVcRtSyLSxcusneyj2egjE8+/RzffOk1Ll3RL/nB/ASRpFzYGjNy9Dm4W0/y1sEriGXNwARAp8uKOqkI24qNqklKi8HukGSqYYGXL18lCBzuvbvPnakOWnY3N8mrlP6oTcSXbOwI9t53qLPWYNDi0e+6wvvfutZ1LKOxT7poWJzoRfLq05ssZMWIISf39ObcpJIqk0i8TpWqKBMC18Xx9LFnUjB0IoIYJnM9F1XtEsbg1ZK5mUPrcYgoi04mvFCKZZZjo+iFpqfuVjS1IA6NAeR0qp+3grCFDlYVdQWOV2Ebxcl0JRGly+NP62Ty0o7H5auP8At/97N4vpnX0sa1PPIiwTGbsxIQRlZnXq3IUZR88FmL998wCkpTQdzzAIdSmtaYEhRpxQvfoU2hpXK5s3eTO3evdVXjJi+wXA3BUS2d1SiytVDJNElwXBdFSWUkv9fW1mlkyunpoiNqF/mSqoaRgai2Kmx1RSdt3QafpUyxWoiRcJGNze6ONii/desOjl3j+jWqVX+qCwLXIV1VPPusJjbv7d3l9HTSCWGIQDLs7zCf3+tgSMukJIwc6kp2ATuipixkt5YAyMbI5puPHMdB1nSJDuguXFu1ba+vrhs8XzAw0vaykvT7fabTeVd0ao0t2/e913eJejZV2dAYJdk8U9i2IXwbZENWpNhWyPqGvr71bcX81GMymXQE5dFoxI1rp50EsRKSSqXI2tWCGMB4sM3xdMmL3/0RXvmGDriqZoljj9g+p6E8+3cmWobcLrsiV1loiJIbnEF50iSnrhtc0Zpe29iOoqkFddHCQRWVKPjodzzFrXfeBcD1Bww3NrhzTydJvu8znZywNR5TmT2zqBVW4yKkizIwa+lJprMZnkkiQjtgkawIQpeB6S5rJT8fsDpRhEbm2LZLXZsigqnmtgUA0EaxlmXRH5wZlCZJRl2ddUOwVEfEb5O53Jix+r7fBZSWpQ2lI1NIkaKiyCtTVW/FXCJsYegAtB1Zsx+1tgeSbu1plV3TPKGuJGGw1r0z0+QQFMS+PqcwVhR5TiVtctOZH/RHNI0iyZZ4pig5HMUI26Iy0NOd7R5vvzNFWRG75w1EPaooMsHhpCKg7TzAcp7y9LOPA3pOH95b4vteB+G6euVDLBc1H3tec+W/8NLvcnJ6RBRb9Aemc5W60ARcvjLm1nXdhZtMJviBh2gh8S6oOsRyKizRdid9sBIs7O5+DnrblPUSWd0frPOAoa2Wv7d0MaluSf0VjTyb57br6fltlzRGzTlPapAxtpt3qrtpWmF7ujugvyhxxMg8RqO+XFZUlY2wGuqmMM9hjKwEi6WhNLgWluOhsOkbkY26TEjnNl4fCnM/RQM2eq3QvyGRjYXlCEQ7p7yLLPMj6tqhMtY9YbjOBz+8w+OPak22W++/zbfffp+qjKilTgZkU7K5fpGiKqlNJ/Cxi1d5+903cdx2fVPkWYHrup0xbV2XWOj1oH2PhqOA1TLHN/vjcqFtA37yp/4Nblx/C4Df+c2vEvdCbCvA8/VaWTS6e9oUZ8+q7V7dX2woy1LHRwY5UlUuluVgh/p6L13eYnYkdWJbmCJJLXBFjmcsK/rjHYI1h+iC170LanaX5bRBVT5Jowvm4UhQTBV2K2sfxNx+75jxZsD4nImnmpBiActFwdysXX6ksMqAXqjX0zt3boGscW1HG0OjLQcsy0JK1cWv4/GQopp3oh5VCatVRhRFCNNt2rywxunkkMuPPMFkoqGQRV6xWiQ0VSseU1LmAsv2sBwj2ONFVI3EdiS1kbGXqqA4av5fNRF+OB6Oh+PheDgejofj4Xg4Ho6H4+H4/934V6Jz1d8aqY2ndGVgfpQy2rax3IjFvuZchRTIbB/ZOCSLtqJXMl53u0prnlXkuSaUex0+36dIa3zbITdV4slswXd8bMgv/qZpv4Qxf/Aln3//p28wNl4ts3nDD/7wd2LZp7z0hdcAeOyDL7D38pfZWdNEECFs9idz7hR3CTxdcRmFKbZfMXQCCtOluXe3ZBxKPKXbzEniUJQKwZn/yHyWM94I8CKP+UpXykbbitEWZInxbrjUkB7FyEBwZ994QywtpANiXRHm+jkOBttkdk6p9PdsG+qTdZbvrajb7o4NddV0HiWWLUE5WJVN1Wr9u4qf++v/Oi998Uukx/pYN6ce47VeVxl459ZNhj2fuC9YmRZpmf+f7L1ZrCfZfd/3OVWn9v96t957unt6mjPDITlDihQ3kRK1y7YcWQKsKE+xYQl2bCOwDQSRHMV5SPLgGEESZAGcBLEUB7EDMpIg0VooihyS4jYznH3rmd7vvvzX2qvOycM5/7rDwPaTH/jQBfRDN27/b/3P+lu+iybxJVXl0UrbVcxK3nf5UVzLpbh//z5KNTiO0+HzwQFtKisrA2TP64NTUlTKfrcBjtuymBSEkXn3dGnEA96LDQcjp7mSh/Y8j1YLijrvujHJIAFHUNS6k9hUrbBdDtu2LwwZu21bXMtFEUpRVxWR9MksEb0fj0mlRgrLq9EpQXiG4fyYz10043ln/BO88u6rBI4g823Vtj1h4AzQFs6kZocEUcJhNiWxhtYXNy/z2mu3EU5JPLA/V7cUpcuTHzZwoqs3tvjON7aZHqY4lmwdB32U59FWEVVm1kvs9vnQhx/hkz9pKqZf/JNnOT4+ZO9+hbRtBp1p5iclaxvrhNaUeb5IybOG2JplVijGW4rB1ojtt41EtSRAjjZwqpKlrUB5riAMJIFdd9PDSQd7Wxn9LSrjfeSuOi9Niy9dUHXH41lkOfMMelIQ2Er0SVniJ5KwNvt40U64dH3EbK+mrCwctHFQygEMbBTAkyFNW3YVRd/3QXt87ic+wbN/9iIAWT5jbcPnYC/Dc031TPoZStfkqfl/1x59hOPjCYfHx/TsOEVej8l8Rq+XdDh342PiUVvvvaatQPs4jiIvT6E0fqANSdsaoAe+s6LY2e9S2U6B6iTAw9Any5Y0tYvG4s4jSZHXOBZA7nmCtm3RrUNrDcnRPo4OoVVIu9f8QNLv99nbNWfuYDQgXaQE0iOzYiBhkBBHEXm+pFWWB6mMYafvrrqcRQd5XHE8hK2Qv5cP47qrqrn9nLomSfpcvXq5gzgdHR2xubnJyckJ5+zZ7Ps+N2/e7PhO47UYzze8m9KSlrUyBHvpOd3vzNKKsloShWbPxH3JxvgcVb1kPre2GXHF/rbCt53zPM9xZYLrgxAGMipFTFFqfDcGu/8nxzV/5+/+PfYO3gLgd3/3D0h6DiA73omUZu6EcNHv8cJpVd15jSld4/sSpQRNuTKKDTg+mfELv/yzLOZmbr76pec4s+V358Zs0oKsGY3XsRQd0qpAFdBUp/C6BoV2FHLVwShrRmsjZtNlBznqD2LW14ccHBx0XUXH1Wglu64/2nQwPF9y5YrpkL755pt4nsdomHRnetuae64zFXZNRyoMw++DRrmu930dErMuVDd3SjVW2j/sfOA8z0M1hhO06krVVUvb6vd0xQorJV5TWQiWEII4MTYuo3UzLoupIE9lx+1NswUbm0MaL2NpTdmrQlDmxuS7nxgIlSNdEA0Bdk2dWfD4jWd45fk3qCxP+XiS0hsIFpOsi0vAeCRdvGhiiXdvvYPQIcLRHbyuqlocp0cwtl2VQUNRehTLlrWR+X6HuxM8xsYrbGa6mI4w9h6OXHlFOiR9A7WfT0/3YxBoytxYBph/bKBVHcdk5Q+mtejmIc9zpJSEYUiRWcho29Lv9ztBrbZt6fVialWztjay8wf7+3tIxwgKgUGpSD/shLdcL0S4KQKfo0Nz3jhaGuEGL+q6qL1+SJMvqFt7F+kaLXKogRXPLXaoUTiNz4XzbvcO87RBWV6kROPXgmYOdWiRHb0+XgyT2YyVR0Tg9fD7mp0D8/0+9aGnGa9t8LWvv4DrmnsmXzacPbtFPHBQtit86+07eL7ohLoCv0eWT3ji/ZfYt/5/y6Vk0DP7ZbX2srmDFvmp7UFT8vQHP4Hnh8jA7NGv/NlXiXsug/4aQtv7L5tSlYK2OfVcE0J01hiruVmJYLgWxZTlDWVR8+kf+4idh4rbbx/gkJBltnNVgdIu0llZ4kjGVz3OPzlmZo23US21KCiWDZtnjAiMdCOOt+91xuatCgj9mqPbLVltxvPK9XUe+5Dk23/+ALcyZ3xZzKgWLX7PjMFonHB0uGctPlbdc4nSDUJ29lj4vkTgddwp6bW0jSTPMx65Zm0X9g2qqSiyroPY1pILlzbY37Owx1zjyoI4ijvvuzu39/H8CK1bhtZ/U+mao3d2/42dqx8IzlWbNWxsrtqgO7iLIbNsSt+6ch/M5px3A5Z1RWAJu7UD0g2obeAtHGW8khzZtboDHBCatJrCKods4dxlxSA2/29BSZ4revGZlXgXjsz50z96lnPnIhBmYcwnX6d/rs9n/6ohRH/1i3u0h+q/rSkAACAASURBVDtcDLbwEzObH7v6Ee5mb7C3uM+0NAenco1BqpY2YYgcXO0yT5cIy51ynRpd99i9M2GZm58L3LOsjeHyDavC5ftM9xbUhWLznPmPeZXhiIBZVnLuvBmrH/2Ji/zeF+7iWpjeYCiZnRxT5jH+wIpVlBWhcHEtTMcTAtVqFmVFaxfmlQtbvPTKt7h985DP/txfBOAXHnucw6MdPv97XwNAi4RWLVEqpljYgLYVqMojikJc35ItlSSvPSZH293PjAYGm7+aFn8FJ+AUi7zCfV++ZtX1qoq7d45wBQThiuRsDhFXmj+wggXStdp9P6TKM6TndupyAMvlEsf1upZxXdc4OJ1wRBTE1uFdoy0/znVcQj9k3pZdu1u0S+pyyZXLJmk5e/YCz97cpsWBzcfM+qmnzOcNmwNYHhreR57XEC8JhAngHR2QtRmJn3TwjDv332RtzeNolrO0gf3axhCnaji05pXLzCGKexyIHUSzUmgqSISk9E+Qlm947vGAwdUzfO0FgxvfmxxQTDTj8xLHwki9ps/m2T7v3ryFszTvEIcuF86GlK1J0hY7OWPncQZews3aQGKTMGS6e5/RcNh5+yzrGm8QcWghjnEQIBUgXQ5sQNu0PgqBtIRz3/HxVYNDS5UZeIFEEvoSz3UprGtw5MfUbcvP/LgRFfm9L73A/ZtTQk/iWuhAqwWuI+E9gVsQ+njqNKn3vQR0wDe++hYI82+OA3u7c6IY1jfMRT85dPBcj3Bszp/JZJeyFAx6IcpCZCpd4WhMkGSTKSE07Yo0iE30G41wNL48Vf1L05zQC0k7/yjPmkWa9WpiUQeoOh8fKSFOQuqyIs/MOyxOKsZrA1aM77JoSOIBi+WUniVrlnVNXc+Je1E3LkXasDbepGcJ2ek0Q1PRCp8oMsGc7zssixzXc9H1yqhME/kerT1LfOl2gXLon3rheL77fbBAoYSFGbl2XIzwwcnJhMPDIzt2gv39A5RSHE8MHLBpGrQQZHYdlAc5g17MeDzGc836LIoKV4IUbpfkulITSDpfpsW8JUvvUhQZFy8aaKlwFZ4siayRa1EuadoFunUR1m+oqgUb5wVFukRapcUtf51oUPP7/8x4Inl+Q1k0CEKkXCWhRvGurlTHMwnDgDCQVO4KOiQQSHzPg44fUxGEgm994zlILBwvWWd0fsjEjlM71bSVw+H+MZtbAzt2ID3DR13ZrknXI08rZGwLlLol6UnieI3tbUOUUEqxs7MDQnUw0sCJyYv0FMKFgd45rma5NGfQmbPrnJycUNfVe4pc6vtU+lY8Edc95VwZn6QUx5FdodSsE6dbm1oLwjC2kCbRfXZVVTjiVK1Ta00UR2T53K6VFjJFq00x0byDR1FpYjngYNsEeHle4zg1RVvYd4LZQnPm0nnGQ7vfj2Yc7J4QBx69nlnH0lMsFwXT1pynou7x1s17NEJTtOYdNi76tDpjHITMj827x5HLeDxm1xYyXMdAKbVqbDHIJOO1ainzFVdTgOsQBiEnJ5YDJSH0Nct8imdFGXByRmOPxlIclC4Zn0nYvpMRWPhyUzYIHeHJZVdcqaoKrXwcG0CfFkM0lU34gsDDcyWqqXGt0bfvmXhrBbt2BXieSySNHyQYY/rJyR6u41HkK2EfjdtqxpvmbsqzEl1HBFFzyhFqApSe06qigx0uFxlhFNJYxZDYA5yATIMMrZCZWxOIinDdZTK3MHlP4Tt+J6jjBZJCt3zkZy6RXDBJ4OHdI956+Z7xerPrpcorHCdkPDLj+51v3ySIX8ILQvrDUw+9K1eu8O7tWxxZrlu/F6LFKQeqqiqqUnByVHDxwhUA3r19j+OjE1wn4smPmLjyu899gyLLcS00cjRc4+6dm9zb3qE/XHH0EpqqZXJcIO0+6o0j5vN9PJswOI7xxTJ80NN/6xIuC8+U0kcmCS+/8JpdPyV16RD4ZUe5aduWUrf07fj6+KQnmr1bx0irQK3CCV7kstiFk2Nzt/fiIaEz5dI5A2O9e09w6dyYG9czXnz+jvmsYINx8gTp/PPE0nKe24KnPvg0d+4b5eOT/SmqkTTtqY+bEkZFNA5DKgs/bUpNEAhKezfkOfSSmDDUTE9Ku4YdyrIhDDWNbbjIHhye7NLfsHDCo4IqDygnVSeuFPg98jwniCR5Yd7Tcf/tnKsfiOTKDRyuXTHktahq0GGGm2bM79kAhSF5e4J0BcoGuZ7ngiMoa8sRksIcSIXuEgu0QvoSxx3RFubiDXzBN56dcrxrLtTB+YbF0sENVKduNeqdoddvkeExly4bbtYwHPK97+7wR39kcO+vv3TAIHDZWtvizruG+PeLv/EPePab3+R3/ug/Z2AxrNXSpVYtia1MKEdxsmjxhcOqDNe0NetbZ3jyyU/z3Ivmgr7/zgFCh5y5bsfIj/CSGec2PHZ3bUIZrCO8OVvjgMImZdsHOzx+/SyLfbMAXv9uSRInxKO2q04Gro8HCNu5Eq6DaARRENHUZmHGy5zpIuJHrq5z9x0j2HH9/Z/l4qPnyOafN+MUh9R1jzzLcTAHjStKZNBQNYsuQAhDnzfeeonIkiiFEOjWkOJXCn9SNmR5hUZ3FdIw9kiXFbOJqWRrrenFMZoS6ZrDNkkcqqrAD/zuEGmahjAMO46A1prhIGQyXRDHJpHRtUsg+2RFhrCkxdDzaFo6YQy1qp6qlpX3cFPXSCEYJD0ae/jkbcUgXKO23cLj3GWrv0YUt1z85F8H4Hv/6g9IJ3ts9a+yFpu1cVS4jIIBi3pFHJekiwUXNh8hbcwhrQqf/d2M9//IFaZ7Jtk4mKY89vg6mxfMxn/5+W2aJsT3NjuFRM93aGvF5mbCSlCoF0ie/84fc7hvxmXzbMBg5OF4Da5jAu/BWsTJ5JDL4VkWe2b/zfcnLNsQb2QNip8asnNnjzfvl4zP2PEsHNb6ISkF0hJFXelwtH+MZw/EEgiShJoGvbqMaQnjU4KyU3uITCGU6Axmo2RI33GZpieIsVXKywOcasGdA/PZV66scW+3gdp7j2hCRatL3EDgvcfQtioaHNtpmc9SHDdjPs0YWUNrzwsYj/pcuuZzZBWoPL/BEaGR6wVmywW9aAOlK9LMXgqAlAKh2i5xchyHumoJglPDQum2aLFygQfQRMmANM25dPkiAMvlnLIsKcv3JmYBjnBp29MAyHF9lDKGtwBPffBJbt++S9PYDrQuGA+3yBYZc5vwDYYJo7jHfDo75cM0mltvv9uJObRC4vsSRwoaG9A6ugHHQTkSb8UlUBmNsCQ5TPKaZRkIl/Y9HJ3SmgY7KxkzpdBtS6NW/DGjlLi3l32fkIEQ4DinwXjTtARBiLJIgMAforTPndvbDMdmjj0poHFoakVlu0JBGCNaTWXPJOk31KUDOiCzwg1xsI7mXnd5DscBQSRZpprZoS1srAsCz+N4UeHY5OqpD17gm8//Hq6tLPeCiGxZ0dQKpU7VbdEOSpkgFaAoMjzldp0y1/HQWpDnpeneYiTJpfQ52D/Gt8WcttZQnqU3MEnvfHrEX/6pv8pkep/vfPtZwFoHZClRLLvkitbBdSXaJneNblgsFniei/WApa01nozpD0ImtiiSpQ1rZ9bIUvP9qrzsyPQPHpzK2A/6I9LlvDPnLYoFWotOYAqUEf4QorMm0G3bFcNWDgOrothpx6SkLGtcV7xHujogDH3c95gU13UBtB1PsaoqI9rQaELLG5Z+yPHxMVIKtJVbNdwgySDZsmtlThi7zCcFleWwpssSgU8cJ+SWU6qUMVuWrUnGj29V7NZv4kiHNRuMu2VNU2uSccjJvjn31zc2mC+m75G2dxACqkrhe5b7Vi0IfUFrVc50FRD0GlpqMmuoLRwfn4Jef52Z5X1FwxoRZEZuHVhMC3Z3a7Rwuf64Kfhmyym335wTSI/aCgR4XohWors/jL1JSr+fsNrbTVWbmEG3JMlKHbRE6La7Mz0v4Nq1G7xz6zYH900xdfveDutbPm0jOjRJmpX0I8lsbu65Xt/D9WuqTFCvBPBUSZz0jOGyvVOUrshmJZ4tgNSO6fLIpiawCZ/WmqyEOCwQlVXvcyqyNOPieYOYIJGsn9vikb98npuvmiBeJCfoyiHPwLNCRnEiaasUbblMo7NGxrvf84ltsWOt/xjPPf8yabokHq7sNRS6cbtCiu9Jer2Y+azm3l1j1O5FMEo2SLM5f/7Nr9jv1/Czf+FnODgy7/STn/sL/NZv/TZ+IKnrVWe+grZBiKaTpC8qE6sIfWpNFIZGbXHFQfR9n7qujWKn3Wu15auvkvqmMsXkqi5wrYCW6zj0EsnixMTQPd9FzH0O9wt6I1NAC+NzhGcKNscOJSZOScJ1ju77lNr8nUHDsZ6jJxVXbph1fnx0wOf/xV22zg+o7PmymA1IxR79kVnny7RACRfHBccqevqeBgR5VhDYIp52BGmWESUmrZGuKdiXtaIszLwkcUuRZwyGPZQt/Eivpio8VGXW5uZ6wsHBgpqKwwOzPl3hInBoaugPzdp3pGZmBYv+dc8PRHJVVw0vvWoW3Eg2NJ5HNtXUCxPYqNZj6fUYOylWCAQpJXmancouVi2eJ2nd1rSLALRAI6mr5lRSORbMZ0Neft0E7H/x/BmKqeT+7QdcfcZ0SNLlAj8fEMWXmZZ3APjWH9xma6y5+V2rwpN7pFnB/eyEG2cMKfz3vvHf8+U/fZu18XW0bwimo/hxqpMT8sxUBh0nIAkFjuiR2cBpMIh55eVXKB6VfOxjpoLxxd9/lp37iku3LXxqXvLDP7/JPE85eGBVBq+OOD726Hs1mc3e//ALOSOO+MTHzUG6N5jy5vMNF6/6nQwqbYsT+ZSrPrqjEI6gLSoiexF/6OJVNp55P9/+wucp+6ay8+U//EM+8xOf44c+ZjpZ0+qYV7/5VcKej7LJ3Xh9SF6nLCY5SbSSWU/Z2FqjWJqFqLUycKZWMbKXUFnmKGWIzUlvBZ9wGG/ITi66rlt6fY8iF3hWjrqqGqqqQXpOF5Q5joPA6S7ruq5BK4LAp7KVs6woGa+vsTXeYDazioVKEwYBhQ34mqZGYRSohA3u8iLjRz/9Kc5urfMHX/qK+T5BhGoLHuzbQG4BQ18zGF7BiWy1x5dsbp3laJKSWs8czwuoygm6MQlKUU45u9ljLT5PNjcJ+8/8zIf55rO3uPDImLObZhyW31nQpB5+cwkAqeaoVjLuj5nMDEkzDBJKR+EJrwtyb39vwWh8js2+9UQ7nJsuiG7wrXUAznlUM2RZH7N+xbzXaCsg8B3u3DPBVnHHIRgERL6gSa0Kl9uSFw5ansJ50JpWeyirqlajOZie4IcByib2/XCI0zTMF2YOAs8jDlzasiHqmwnM6wWHaYsctcQ2uMrdCr/pU2lbkRIJzKFMUhwLWTtz7ixF0TBfLrsKetPOqSqNa+EpAgelc4ZrnlUIAtVmRJGmLOHW22bMw8ihqReo1grMyBAvaEAobtwwqpFNXXHz3be/D5KqlCIITo9Zz/PQykAAV2phrW47L6qlrbovsgVx6CPtWBZ5RZ5nBH7UXYR5bnxTylbx9//23wNg+8EhL3znTUZjSyJ2ao4Pd4hDlw9eNwWs8do56ha+8tWv8eQHrwBGjWw2X3YiMNM8RRQNcuAh7O/TbYtoGzwZ4mL2kY5CBKUpWWMUWkfrEY4IODk2+73RLaoV+K6kWhXDPI/hsN/9vr3DAzxfAqqDKjVNQxzHNI1iPrfVQsdBCAd35ctUFxRFYcfVkpY3zrKzvQcoXCsoU1UNDgNUayEqjRnzNC07AQvfXRIGCVlq9nHP7ZEul6yflQwGZv2ki5ayKVEqIbYZyf2du0T9lMHYdkfFGqpeUsuCND0l4mfpAlcqpJ33ulFoJbuilysDXDd4j4+ZEWIoK4Xv+5wbmUCxoeRwf4/3P30NgJ/+uZ9ie3vCuRvneZ9j3FFuf/t1kn7MdDnHtXL0VZUjpdftTy2UUcDSdYcWcBwHzwuYTE5lz+OeZjabUOUr8QrZdR3fizZIUyOhvkKOmKTJ7X5fURrxoaIoOo83kzQbVceV+AQo6rrt3snzgi4pW/nMSSnJsowwCMitH0Oa1jzxxOUOwbC/v2vEMVRDZQVJaGEw3GBtbcDx0QP7+YKmcjvo2WgTkoHmZH+Oa9d1GLZURUleNF33o6grgsBD2W5Pf9ynqBVV63CSmbV/aTRGVQm7hws2N8zZ0bQ5eZ51iWIQBGRpgee7XWFRWOjSxqaFtvtwPFkSRD7D9ZU4ToMioFaKzbNmjneOpoxD0XXhh6MtA6MPWl570XQ6q6LExadxFUKvIKlQlyGuPIWQmUKL7lQGZSBoqtz4ldlkyveNKt3Kp/Dao1eZToxq2/lLBj7V1jWLxQzPF53vW3/okOdlJ4wT9hoCD44WGmU7Co5sSJct0u11fqa61cjwVKI+LVJ0myE98G0SnxcOo8EZCnLm2qwNlbckwZDtXft31ZC9Oeflb7zBZ37FxFwbW0tCv6YFMtsZq9IS36sQjollNs+uI92aYhGwfceKHbW3cb2KXi+hXHmDCJDCQYiVT5LxJ2u07ooPWZkymR2RxAmtvcd+9FM/x6//+q/z7Nf/3KyxfMaD7X3Wx2u0FkJZlTXCMcWNVln4eeugG9Wt15W9xyqZAnOe+r7pkq6UqsEUp1fFP0eEIBqEaCnsHDu6RtQugRWYcH2F0CWb6xHLwvq5Tjx2Fg7DJMSx5+JMT5kfFVy4ZuLH3pokkDGakFsvm/g7GsPFS4LxaJOlZ6Gl8xmDZMC16yb29ZKS6VFN28iuuZJlCld4REFMWZl4yg8EfuB3xY/zl0bMZhPqSnH+goEc3nlnBzRMSd9jodLg6JDGegQGIuDiJcnt2/eIQquEqxRu49AoTdOuikzfL4P//38eClo8fB4+D5+Hz8Pn4fPwefg8fB4+D5+Hz7+D5weic+X4OZuP3AFA70tUvYVbxeTWRbTfGyDbQ3S77KpgTaupa8OxApC+oG0KhPBwrLCAdo1vidaq49Uo7VE2C/7lPzdZ6y/9hEbVmv7GOeKeyczzKufO9gM2swG9i6Y78MkfO8syX7KwVdwz8RYiuIvXVvhHJpv+9nPvcvZGarDVlvw/efAOy23JuSvmuy7nJX1/zIIaV1uBgspha91j+/73mC8NDPHio+s8uHXC9l3zM31q+qOUduyxec2SZW/X1FVOupD0rfnwxfwMr37tJT79U6ZK/Zm/MiKd7zA7XBJ00tamgrgSF3CFQLkuBA5LK43+3MEdbtwaUmhBuTBdlPl8yv/yxjfZ3DDQpWq+Q28Y01Ru1w7/1Gd/lD/70jdwRUVhK52OCMkXy66yVFUVwoHI9zrCdxBYbLmqO9JrUWa4yuv+7vsCx1EkPYfUwiAGgx5RZNzLVzK2Bkp06neilEbj4ziaylY5hv2YxeSYKkpo61Xb3BB1VQdVckC4KKFxLNTM9yVvv/02h/sDBufMHB++mzHquSjbRtfLiv1iSVGFNK5Zi7Nqn7JuuPbkI9x6x5LchWDz0Zj7L5iKYuC6OMEYL9LIiVmvc3fGuacC9u+mXHvGcLpuZIq8zDi2xrs/+tMf463X95lPCtbGpkKjOMSRQybVMTIw3UFPFuweZkR9A3/ZvOBzvHOIN/QIrFVBU2VsriVsji7z6ouG4NkfxVx//4jK7quTe1PQgsBRrPBErVsROQ3CibCNMlRREXtBB+kqytzA5rSDsJwy7WXoVjDsWYGZWtAISaNjXMf822w2Z+PqI4i9d+hboY8wKrl23efbqeEtbExu8R/88i9ya77kj37/i2Y8z8Xk6Yyq1NQWcnD+bMzFC33euWnGvNcLqSpF3dQkiZUAbgTH+wVRXHDtMbP28hwWM0Vt+R11I/B8wWKR8/SHPgbAh5/5AP/ov/hNtFakqRUW6UVoWlorJey4gqY1VccVET8MQw4PFwSBS2WJvjIwHZ8VOT8MA6aTBU1bsxKB6MxYlc//8Vv/EoCdB3fYOhcRWg7b3i5UyuHGjcd48pypJE+XOev9AZ/9xMe5vWM6nfMsR/Yiji0X7nIyZC1Z4yA7ZFk13e87e3GD2zvHnBlZwmg7Je65pJbbUJUt/TChyDS6tSa3usRbCWfYTkfTVAgRd+aujqOREsqy7bhZw1GP+WxhjGitOIbSpmKplT33PWMFcfmRa5wcG9iKdAaE4YKinBF6K/l5QV0oQluhbRoYrdWMNwsK64HmBjluac4Ps16XOL5g937NaGSqr65/wNaZxzizlXTvfnfvOaJowKBnkA+vvrDNIAnwIknEyrR8k8GjV3nz7ZcoSmsC74W0zamkvqalaQqkdCjtOWwEUQKWWc6R9cx7/KlLHM1SZpVZP6+8c5eDvQcEcth5UTWOwpEuP/Thj/LyS6+asfdcKyJkTXelpFINaJd+38zV+x6/wgvPv4RqIgIrhS6cin6cMG/Me4NA4Ngu1QrC55OlOUnikuennfmVMMJq/Uh5yok2P+OZCntVsWJTuY6DE5zWfVXTghAkg7jzagMjwtI0VdeVHg2HLOYFR8cHq/9pzZUFycoo1nFJ+i233n2XX/ylXwDg8PCQr37lz4nsuTU5lBweTvFF0HUCsuWSOBoYSNXK0rFyyfIGJze/P69ntPg4NQTBKbe3quYkwsUPzM/N51N065zyzoqaOO4jhCZfccOcgFJVVCtoVuCiQ1gUDY9dNx1L10958+UjymYOtmHiKJDtRif80TQuTW041drysHxPcPZ8j7WNhNdfMvu/mAniOEDronsnP5CUTQ3tiisp8f3QdkMsPFsoyrpkzXbl7t+/yzPPfJReGXPr9k3zTjS4jo8AMoue0Y42XQAL/R47A4o0pCpTlFj5eEF/1EOgyKxghic8isbFt1D+zbHLZFqRplBaZQOpfBLfY3tngkU5MxwNKCtJs+LJ6BIvdfGaHu9+xXRR5JUtFtkhy6xgbc10l5750NMEPR+GFvrdVBzszjjY3Ue39mzRS0KpEMpFWIio4zVI6eA4Kz/A3NhTUJOmZn88cvUxHn+8x/deeIfK3odrm2v8i//nd/nn/+f/bcZAQhyHtHpJFFqoYtTn+Hhi6Q9WIEQqijLH5dS+5L1IHsCKu1TWZ24VGzkkid9BVD0Zdf5YwopMaQWO8kmtuJoQA2JvyO7+Np/73NNm/Rzf5eZBQDUvV968RDEM3IB824zT4f0c3y+o25rFgZm/j37wR/C8lL2DN2nsftCq5t7bGccT893SuU9Vaaqi6bypwEXR0OgF2nYHqwZ6vYQLV8xen89PUErTNJrd3V37ThFh6FPrOa2yBsCVg+sWlIX5+/H+hEffd4VxMupQBcJReL5LnVcU2cyOHf/W5wciufJ8vzswdnXCvZ2Wg/l9Qgv9cIOAZZuwXvoIuzmaosYVQQcBjEKHLDdqgauLWPsK168R+RDP4t0qVdCPBV/+A7NZX39XUumC/KSknZvF2080bTyiTjXTA3OhRGe3kJwhseZ5x5MdPvq5D/Hhjz5GfmAU037/T77LdE8TCIfV9XH2CcGduaa0CyWSAUfZBC/odSIbVdEQRi1Jz2exMBP39Aee4NM//gnefvA9AN746jFP37/McQqbG1aR5qUjRvFl4nHA/gODbw7DY5Ik5NkvGpjVsmw5E8e4Ww3HBxa2Fjk42kOs5MiampaKsmpZs+TutQ9cpZcFbF25hNuzJPCb22z2NIfaLNR1PSXQLu3oEh//sZ8z36VuKNMaKVpcacZT+hJXpCzmJsDc2FgnjnvsHex3eGBjsOdSlG0HjXBdhzyFIFzhln2WaYXnK1zLuVoul/i+b4OQUzNTx3E6eIirFIoGT7q4zkqtCHwX8sWcyKo2hp4kDAfM5tZg1nHJ6wo/DDqIYasMWV86LcVKnYwS2YvY7Jng9Xh3G60VY29MIMxnV9OabHnEvTsOvhUDuPjIGRqv5emfNcnq6y/fZlFPOLue85T1Kbq3PWe2vI8+3oR37AX2bkEykGgMRPVGeJWabQ6mt4g88zOO1szSGunBCqXX1C2onFCadvvhTsHaVo/jVHWEh6TXcvudXZZHkpF1X+8NKr779Tv8yk/9hwBc+xsh/+3/8D+ijivGa+Yy2dlPcVqNXJNoCxFdLEo8XAIbUEvHBccEZSuuT+EIJJLYYtxnkykzVaBVi2cPtqVo+aFkgHrfGiMbmGVTxezwhKElwVdrl3nz3Zu8cvOEvv2sydG79IYeRdWS5uacmE4bmqomCFcwwYymAUfENI312fBG9ENJWx8x6K/OBEA15NKq8lV9hAoo5j6/8zv/KwC//du6M25dBWVt05gCglhdOcomGKL7GWiIQp/pfMG5M+ZSL+uafNl0kNXhcEDSi1jMUzyb0Pq+bxXRNEVpMPp+kJKmNWVhxtzxNGEoyMopz901Y/erf/PXqJqSy1ce4df/4W+a8dzdI0J2RtUf+8gnODfa4LjN+PCnTPL4T/6rf0yDQ+PkHBzbJNCLSaIYz0KMqzonW6aUZctgZIpjcRKys32IUhrPXUEhochTMqtk6brGly4IIhYLK9SyXOJKh7qp8H0bZLoeZVki9GoMPJpWsLe3B1Z04s233iIMYTCMu6BBuj6tzDuomSDGDwRNq3GtqmlZL8iziHiwMuLscef2MVfft8nWGTOeL3/X47Fr19k7eoP7r5kzd/O84INPfZof/rCBS//Pi/+GWzdv4Su/SwLv3r1Nr58QhxGLpYWo6QYhZFfMUapGSg/HoYNLlmVJXdeEkUc2NWN+sDtlvHWeu7cNrE3cbRhEPfpDyWho9vbWj32aapaxc2+PM2OjbleUKZPFvOPVKQWOqimqijY1a+ONN940CqmNQ20hnLpQlGLeQYeaRplxVNBPVsJCLXEUYsyurSqdEN/nO+VKSRAERtHXngnGc8eoD66CQCkNVLK10Yt0XeIkQbU1No4yCZXlfa0+v9Al9azqILlNaXCAcAAAIABJREFUUxFEIW2ju0DIsQINcc/lwbYR43lwfxdHthQ2uIrCAf2eQmndGZSmy4JWZVR5iygsjDSMcUSBtoWpq09LSqacbCt0bc/qnZQkTjizHrO0NAdXSFw/pLGZRdkUOAIaVXXwc9d1CSOfkyMrvLP06I9H7B4ecnJkYU+XeygOiROXycTsmSiKmE+z7q7tD3wEDq4Cx7FnV+DjBorN8wkDs4Ro8oS2zpDOCm6vadRKTCSxa7EmXeYEgU/Trvz/WgIv6OBaAsWL3/sOZV10ME8/EEjPKPq2NuhxcJCBYKVdEQZ9prMZShfd3S79gKbWZGmGPTaIewqvcvCtKIv2FV4Z0OTgWE6pH3nc2X1AHEdgIemHu1PCcAi2wCtkjETikLN306h8DniKK1evc+bsRndOvfbaG5RKcuaG+X6zk2Nuv+iQ9DX5e7i2SbxB4xwx2ljtW5f5JKNdQRwdxY3HL+J5HvfumeR/PNrg6o0b/I2/9Xf5B3//NwC4fG2LP/7inzJfmJjSdyWe9NncOEeamvV5dLxPU5vi3MamuS+m07nhzHFqxL1cGph3Bzt2XcIw7Ey5AdpGGFi1PV/bpiYIItq2NIIwgOv0cKVPZAvMTXVCuN7jv/z1f8zuA0N/+cPP71JWglaXrJ81Z9D0eImuCpRrYlqUT7X0kGHN+oY5k96+eYtBqAiCDXKrsKl0QbosmFqRtCiWZGmFL91OF2Ew6COclqpIufaYKWqVuWS6OOzO88FoDdXkCBRttYKtS/K8pFWawdDMVZ7VuEIT+OZ8+/CnPsqzX/sKySDEsWvRxAktnhOhrSiSQAIZ/6bnByK50k3EvuUWTaeKYvseXgOVu6rwwdjrQ2XMNwFDAq5DGusCXjcl0gNVB7TVCmOqCYUPoqJWq6y4IfQ99nbMYP9P//SA80+cpUnfYfdNUwUv/IB+dIYkcVHeqnpeU+X7nElMAB0PYffBIf/1n9zj2nWr3pO53H92xtqjJZc+Zrkhkw0uX6iJLEdoZ/+A/iBC1UXHA0t6CV4UcbBfdsonz/7xi3z0s0dcfdxg6Ec/fp5nf+c2TrbOaNO8+2jo4Ls+g/4ZXvqmJWUGR/jA/K7Z+Eo67IgUv98SWrxzqyoaWgK56uZplGqJXMnVDZNcbSRDXnzlPq0TEuRmA7u9iPRkm8OlFRXxPR65tsYbkynPf+85AObLDKUUSZjQt4H3Ij2hXZgACmAxmzGZzKiqBqv8ipSSNF1ak0nzb9nSOGqXlrSZpQVr6yPSLEM4pvqjmsAGUM57SO8NjtPSWLPVJIkZbvaoiprlibkUiqxE1RCFIa5jNuzJ9IA4HHXKeXXTEIShqfqtEuGqZtQfc+OJ67z7rMFFB32XZaFxSiOH68mAS1vriErhi5V8uoeUCcuTgroyc1O+rnn/Mwn+I+Zy/uTFH+P4jbtMSpfpifnOPWfOBx69wZ7nc/vbZozf/4mrTNN7HNjE/7tfP0Arj/MbWzx411zg48GAfq8ichyqxgToznATyYhsaQ67p268jzuHN/Gq9PQQWY5ompZnfniLj3/yGQC+8IUvcf7cZX753zfiHK8+d4+dd/47PvbD64xttX5SCAZhSLbcxjYQCaSH0KILcHu9BOGajvJKyEAtC4TUzJYmUWypiaKQSmlaG3wkccBL995iKQsii33/SHKeUN7l7n3zfX1cJt5tor5Lv2/W56ULPWbznMlEE9vuXdU2tMrpBGAGgwEXLgt2t+cksdkfURjiuhOmJy5UlhNYLVksa8YbNknLKja3xjx+/UM8/8K3zP9LXOIo4OSk6GTUhfDQ2qFtVhLLDYFnldDsmprPFvj+kCCgUzqL4h6l0J2aXlmWCCUYjvpUlg+QZUsje95qsHyxQCYopSirlWy2RKGYpTs8sPv2f/vtf8anP/XjfOf5N4lsYPh3/tZ/xJ99+U946y0TaHzjzdf4mR/5cX7kp/4Sji1MffSHf5iXX32R9WFIbYM+7QjSdEESmCC0KQVlNUW6EcuZWcNNJXjqA08ynU65d8fskdGoT5IkHB2beW8aRZ6XFolgFbCSPlVdWsGa0y6flKdKcuAQhQPyNKPft8UVR5GXOW6uCaxAgNYOriy6M0FYHlgoz5PWJtgZyCFz3aIac+kuq4aLV8Zcu/o+NGavbZw/ZPf4JmHgdrLVJ/suX/vym9x528xLGPQR+GgFsS34KE/R70ccHR6ysb6SHM7xPI/UylpL4ZKXGUEQIThNJhUtdVng2c+an6TEXsP5dbNe07Jhkfm44xSRWEXYoubauctsv3Wfo3sGeTBY8xnEEfNsZX/hgwrwfafr/pS5Q5x4fPozH+CVV0zHazapEI6mtRwhpSAKQsoyJ8tt8TGKULpBKX0q/f4e83WAVplOkpSy4xvZycEVUNjuRGeoarkpbVUb8/iq/r7OFcLIra8QCq0ucGVgu7vGhNxwshty+55toxnKs1y5Du/eMjzvpo4ZbwQUVh3VcQRt4XEwzYgiGxgSsFjmeK5EW55SXS1wHIlIrBn48AzzsmVfVAyGZq6euLFJPnE5mNyiaVbFFIOoWWWKQRCQZYZHsyoiKNEQRg7S3nOOrqingsAV1FYt9J23FgjZMp3kXbdVNTVozeaWLT54EcvlnMAJuHh5y66Xiixt+NM/eMDaeNPOaU7baFp1KpvfNIokSbrktSxaPDckzxrGayuJcxeBSxRYUQGVIoRCel53dskA0mUOQhFbsQHHdWlFSTIw33fvYAdfrrF1ASZHK7XHIWWVI9yG2ha15xO4eL1htm/X1MIlkCFtr0Rp20lqXca9hDITyNB2vEXMIl8irNWFblw8vwSvhdaM1d7xbaTuc3J4ggX5GLXOQFPvmfNtMlvwxIcHXHlkyJd+3yjsvf8DV3jtpV3qyuHK0wZ5tLa5gUvE088YPv5jNy7zrW8+x6sv3+IjH7kAQNiHr33tXV54YY/NLdNc+PJXniUrD0hiazG0zPFwuXd3l+HQxFO9OMH1HJRq2Nuz5sGNi4NHu+LQqcZwq+q6E3wpiqLjXKWpFadI1lF1BTYpS5KEJOmzvX3vPWiiBSJIcJZh99lZ4eMFF/mn//s/MmtjluJ5mmEyZm6l5usyIXAiIq+xa0Phepoq94isCl+ZHrHYF8waD9c2DVxcnPfcfU1Rsz4YcTSZ0rMddikdZrMFTakQrAysFW0t2d0x+6rfN6rAjVXnBUjnCj+WnN28zMZZ81lZWnDr1h2uXLINnv13QCnObI7Zs4qs47UeeVGhaof5wtxrkf9vT58ecq4ePg+fh8/D5+Hz8Hn4PHwePg+fh8/D59/B8wPRuXKlSzCwcJiDJfHGJlEv4+jYVKXXxxsUtKiyR1KbqkaqfbRfoGublZceUSCN8pGtipM3FI5C61PpSUdI6tbF9U3153d+a8JP//UQEUuayLxD35MokZEVLsIk68TumNH5dYrUZMlr/YCj/Qesa0GxZ6sjk5B4WHF4r+kwyQ9uLvi1v/lzLBemK3bn3iHJWFNMNW1loANeT/Lxz17l//3tN+hbuXLlNTSTBcvcZM7N2ojNxOPea/e5/bLlG/UjdP8e4f0Jo56VRp0rynaBZz0JpBBIL6AuG7SwEq46JnQVlbAyz0rhVS6VlBzNDVck/cPvkC1L9PVHOHzV4LL/2m/8Gm+/+Q53f/cLZnjX17ld+zTjEa/f+w4Ajz56gbODMctJw3Kluuf18LYEwdT+PtES4pCxXHmmslhW+J7A1eBYHLEQLucvnOt8fXZ2HuBIgXQCXFs1FoFGtab6tzKKxFVoIbB0LpoSTtKaqA27ql+ja5wAtKaDNESBREiBdK26VraEbILn9ShtN/T65phf/aVneHU/5PG1R8zw+VNefXuPrTOmujUvMu7fL7l+sYezMhyKQ4qTlM/9pccZXfkAACU19+7UnHzDjMtk9zsIV+KIEdpiyqfekqyVbK0/ighN5fH17024fPEaiZVrLw7e4dKVEZNpghta2dN+yZMfErz7fIS2g5ylc2SsiAZmbd7dnhDEHpUXUVsT6kVxjCsljefyZ183lbn182sc7e/x137zPwbg6PbrrK057E3mvLVtYVZtH9/dQIQF64+YCtu7t+4Q94P38Cc0+TRnOBwT+bZK3Q85Opp22HEvDJgcLtk8t87GmulS3blzh4UzYFx5KLuRX1QLnh72CK0xZl9ArTxG0SbDgTk3Ej/h3qHC8Vo8W80OfJ+2PF6hQxAy5uK197Gz+0027bzv5B5ek+C2c64/YbpuJ/sF2bbi3IaZOzF/wNbGE/yn/9k/5Nd+9ZcAOD6eMJ3kDHpnKKsVP8V0Z11hYcFRiC89EIo8X0ETHdJJznA8MJVnoFhUBL2EeFW9qxRNKmkaD99yEJPxABxB25bMF2a9fOZTP8RituCV114BwI1cmiqkaVPCwJyV9999ka/SsHXxfXzgQ4ab6Q8lR/tLIoPkoyymvHx3n4Pf/Qof/4lPmnW91uPGY0+w/843OLpg918G0pXEtrNUB3Ok1ycvWsYb1jz6qOLg8B6T45bAdhCLssT1HB551Jy5O/cXZGlLGBmPM4AzZzc4nuxQnTidLH/jLAlln4nlXLpuzGIxQ4iaqYUTbq2fxSEm9IYIdyWV6+DhoGprzllqFhPoDxVnzpvuqyiX0B7SlBZWpmuKRcxLz93B653Yj/FR5SHpcoPArkWlc2bH9zneNeX0oJcTxAEuAXlxygN13TUjJ275d4vlkkF/i9b6xwkh8GWIalTnMeN6Pk3Z4roBKwzVIkuRs3sMnFUnwuXchZjDox3C2lZ2ey0vv/ImzrDH+ScMxLiscsZnxzA147TzzjHjXkja+rS2cyWqkib3CXyHH/q44Xh+69kXyNIAZTsvruOiRWPgOhbiWOQVYRjSqrLrooDhfqzMwFWZU5SlgXJWK9lscye3bdspeDZ5RhzHeLYjVRYZTmv4WSu0h5Q+i8WC4XCtg/MZE/qW0HLF+sMB82VKUTUIS5RyXM3B4oAnf+gDpJnpXB3vNISOQtqzWrc1s8WMUTLuJOodR4KuaFVlvBDNWyB9Sd/arHz7z+6hvJYPfrzHvbfNunvrrTmSHnUd4FouUV56SF0iXfP3ReMSxiMS36O1d/S8OCKf+3iR+V0bWzF50eK0sLdrur/JIOLsuR7HRzM8K+e/dWbMg7szcisdXrWmu6UzyZG1Z0kLxealmPGG4mjPvGcSFfiyT92uLBw82nZJo0pyC38fryW0TcaltStUdkPu7eyS+D51a41/3Za6lmipWWTmHc6vRfi9hJNJij1icdwa6oDxwCCBNvohr7xxi6apWR8YyNiyWBCdkezdl7ie7ZRn0DR9vJ49O5dDsmpKMHA6r63j3YxrV66zfX+PmYUdJ4OEjf6I+ezYrjuFUzu0OqHft3sNgVZTHmwX6BWkxtGgXHzP3ClPfeQiV28M+fOvv8wnPvNBAN735BaP3niMK9cuIy2cd35i/Afvbpu470+/9ByvvfZt2hZiG+MF3oBlOqFqBY4dGCFTHjl/GWnX2NmzZynqJQExVWnmr640ng7JshRhPR0dx0GJBt855ZhunBkBLnMLGZWeYLnM8HwILX0AlSKdGmz3p6oakoGi1w87mXfVuvTjAam1v1B1QJUe8Z/87V/pbCXW18YM+hssFjPyhZkrT5ZUbUVxZH1DwwTtuAzjDVrPfPa5jYjpSc38oMVbLFeLg7bpE/asRY4e4fVb4qVHZff6Is0YJCMuPL5BZffRwYMpG2sDjmznrFxodCtIegF1Y86SRy6ew3Hm3Lm3R9A3d99jTz3NnZsP2Lby936oWN/qcTjZobDw+qhUxInP3nbKI9Y4fTY/IjfL6V/7/EAkV+Dyva+Yg2779oTrH3yMu9vHjKyXis5cRpfXqOsFJxPLLUoC0mWFshF000AwSMjStMP6tkojXeN71cnbCpemVZ23yHJR8/bLLqP1s7hW6tIJHHTpI/wAbScljhzKBTSFNTbduECchPh9zY3ABAhfffVbVNLnMz92lT//V4bM+bm/cpXHf/Ic/9c/MfjuoNXoqUCICN83i+LkqOT+zjEX3tfn7qtmYYS+5vi4x9Dii+4evMT6SNPrDXjic6Z9+fJ3HpAEEUkAR/vmcFOVkWdewUqUNmaLOAJn5QOllsjWNQarGInzIIoomgWqMkHEmYFDPL7C2pPX+OS/9/MAXH/iGZ7741ewCpZ47YSjCUx0ygefeRyAi5fO8q2vv8NsUXD2ktnAW+tnmU9bVGze8fj4mEfOb1He7dNk5vCJeg2+69GmktySudfXzhFFwXt8RRzm0yltC8ISG5O+z3gckWVZN6coiXQ9hF7BPBoGcszJ4UlnIuy0PjgtDS2hNSjM8pLWmbFm/WMG6+uoXCICuH9o1t2TH/0Mmzc+wHmn4EXX8OGQLlE/Ym3NcKcWDw7QegcPh7K0UqWHKT//izdow5gHh+YQuf3aASc3px0OfbyW8P+x92bBll3nfd9vz9OZ7zz0gG6gATRGAiTACYRoDhIpUbRkjbbkOIoHVexyFOtBTipVTvySSqXiTI6dYlUSO7EdURNJSZRMigABkiAxNIBGo9Ho+XbfvvO5Zz573nvtPKx1T1MpOU+uCh96VaEK99a5p/de47e+7z8UZUEcb1Oksg/aS21Ky2Wi93nuE3LcD8ZdLr6zwVNPy819MLhNpEeIIGT59BFcQ3C4EzA/3+TqJYmLXnmgyWBUYqiEgeUNsW0oYw+7rnxnDJs0jenuTGfk405rjnikU/S+BcDqUptJZBJGOfetK8PeUNDvb5AYITWFqegs14iSmPVFKRRz4vQprm/tUKZDencUFt2IqdV8vNqRXHRGZ6HDZDLhCKrgBR3MMCSsLHShYDJmnfeGGmlLvu9aa5V8O2b++KMc7EkvuvfevQOdEr1ICVqKzF3XuHrVYLEj18dcFfHut85jTRLyM/JmYelT6oaLs1jHDQfqGeDYmsGgd0n2kyi5dvEcP/WFz+OpINuv+yRRzHg8RLeUBK8mOVZH+890PKGqXOI4plFTsKdc0Ok4FKIgy2Wf+75LWuW4Sia8LArcukcynRAqWFctaGFZBs1aQPdA7ku39zZwqYPy9VhbalFqFYOw4rOf+zAAX/6d7/HQA6ugp9Q7kpB87e0rJNkALPlMNg2uv/0W4bzLN//odwForSzy3BOLdEVOfKCCAb3EMmyKSI7nOI45+8j97O32aMpcA+PelP5hQiVMDGWmXgmX7sGI0VCRpivpW5KmGb5KcoVhyHiYYxk17EDOBdeqkyYCWyU7xqM9nnjyKW7evEmi4K+mUeLYFh9/7ln6O3IuXL1yHb2qEAqikpSCrBJUoxSnktCo0izwanWK9IhgGZHlEjKrKyjmwsIDdJbmuHjxIssKzrt5Xcf3CoSSgi+ykiCoMR5PZ5AqyzY4ODhE13QOu/I5n3j8Kba3DjGP5KlrLqPRECEE2swOrMAwDHTNIFEXC8vQmU5i5hUP7P5jdU6eWeV739lkbkXxqXILsx3TOxyTaPJ3zcVFCiFYnpf923Fb3Lh9GdcJEIqcH2Yapu0QTiICBW1bWj3G9cu7oAI5w5CeM55rUShBC9f3sG2bySSDmTRFRZELckvdjNGxTQshKhwFI4tjKa4kFGQQJDSpXq8zHCo4s64DGnmez4j4ZZlTVSW1modlH8G6Cyo9wnCVqIcW0hsO8Fx3JpxkOxZz7Xmuvb/LJ5//AgDff+W77G+HuAraGiV9WvMBrh0wHMnzKY4yGo0WUSz+HGSzFBmlgpUtLtY46I/YuhbTVLLVk3xMkRRATqqk3nUsNCtDU5BJERtE5ZhKs3GUdUeV1ShFQqW8dwQLTMJ9RKVhK05ZlWeMhtIg+0h8YOdORJoIPF9BM2ONsvCw7JyJSkhYXl3Kb3s6tqvktg2IoxCBglQ1LIrYIInyGWc2TXPCUYUuEipTwiU78x5hMqQUSmyhBm37GNt3tvEMlUzNGxx0+zgNfSaAMjc/z8H+IfsHMt5ZXriPv/ubv8B7F1/k2iUJY617Noap0ZzPySP5zksrHTY3xzOrCUNPyISOpdk4nrqAlSW3tjYwNQddzUVHGJQFlNoRvNggrzI0LSRPZTJwcfk4lpXxt/72pymVXcrG7VvEE8iVB9Dps/exfvIEH//4z5Go5Nj5829xc2OLt9/+M8ZjOWcPt6dQaaBk+k0DOp2mFGGzlPCOGeNpNu2GTqREbvIYur1DKqVQkuY2vj9PkUek0RGXMSeKQgnxq448zwqqopol9aMo4lOf+izr6+t86Z//M/ldsYFpVCA0HO+uQXeRGNKRGtCMir29XdqNFkeq8q4ruZNH0uWmaWKaJq7rzjizAHuHm5R5MvOZrTSLqgJLXcCyIscyTXKjYvm4TGgZrQOC1UPuyxdn1BYzN3GsKcOxivFqE+zQoDnnEKrLThGF5OWQ8dgmUQnJUhfc3NmdCbWV/RDPdnEbbfya7LupkTDtRSRZyd6u4lzeuIJmeOhHbg2moMxj0kSfcRCnw5Jm06HWzCkV5wrx/w38+5G4XIWjCdMbklnZqtfobdxkqW3P3LvbrTZzS3XS9sPEhzLIHfcGIFwMhXsURUkaxVLYQKlwZZmQwgZGxVGgpgNVUWHZKhjol2xdL5lbbuAoAz2tkXNwW8d0gxk5/iAa4ERjUiEP1IHfww8qitDk9evyYqi7EXW34uSHHmfuQYm93fdf4UtffQHfVDcSG4RnoUVT6oqHlU5LhIBf+7XP84//wb8EpEqM7VbceF1uPvef7XDhu7eot+qc+sgDACwcX2Nhxee3//n30RSJWOQCQ7uLoUdTpGHu8oY0TUPXzBnJV9N00izBDnxiZax4qx+x9tgC62tnOacqGP/6X/0r6nnEceU/1M0DnHZJSzfYvyMvTv2tKR45UTFBC+WmZfgGRjkkU+ZytVbOYNAlywqeeFR6TNzZvcY0nTKtYoxMLqB+v0+vvzPD1C4vzSGqmO7BGEOVFOuBj2lpJFHI8orkMmxudSmENsugBr5LGY5xPEGmiPcaFWYl8B2TQo27UfepUVIqAmhvP8Wod2gvwpmTMkt8J9vjxctXmITQPCbnRj4q0dKMC+/Iy9bKsQeYTmxEPqWfyh3q0ceXeOYnWnztD97k3AuyEji/6DK3ECBcuRn5jsnOVp+gaVBWcp7ZnonhVQyjhE8/IqsmC6Me129scH1DbkaGZaMbJSsnNDxLXhDCUY397QlFsoUXyHEYDmIKYRBO1MXGdxj0+wiYVa6Wl5fJRJckjVHFFsxaylpjnuvvyzHe282Ym+8wGB9Sa8sL9NXdXaIs4tjJBZqL8nOaY6JNj3PnUFXYbtxCSyPiAip1qItCYDrejMxaFBm2rREEHsq7ligtKU0dUWR4iUpI3OzTWVnHriTxd2mpzYZh86FnnuVP/kT+bjxNafvw1BNLnH9fVhW6w4zl+jxznuqD6QQvSPjsx07x+xdUBaEMGLtTOmQEiVy3u1XOdGzSdmW26wu/9Em+9H/8M5yGB6niOzkBQQC9/oiaL9e2aZkYmo5t3z30DCvBsEoi5RHi2k2KMobKwrFkgJekJYXj4NgyEB7lQ8b9Lg891ObwUGXv+hOSuMQPbGp1+ZzRuMaZR0+SxrIPPvTks9z3cJPNrQknH5LKpz/5mYAPf/CjvPHmq2xsyLV95bW3qaw6nhIaGQ/HGFbG6vppQqWQOBr1uX27JCr8GU/hw89+lLfeeodIJRECv8Hm5i0MU+PmTZkx9esWUWRSVuks5y+qEss+IgWD42ryImIa9HoycDvz0Ap6pbNxvccHPiITF55n8cq373BaZQ+XV+b4jd/4Tf7O3/n7FGoOjacTDg8i9g+GhAppMC0cdKOcmdd6luRgalbFXl+iA5qtgCSBVHGg7n/wBBU5O7vdWbbZ0lPeeOkyTz97iiVVvdu8fomPPv807751Xc6x7oTllTmm0/HMqLKqKuI4xHWcmYJYEATMzZVUulxDjq3TPdzD991ZEFrkBa5TI8sFlaoq2I7OeBgTqUxvc17n/NvnZsq5AFFpECcJk3GfdlvOqdacxmQSkVWqyuiVrJ5a5ODmFBTXxjAqDEtH0yv66hLo+zUsy0LRmyWnlRzjKEuDFLSYTsfo+t1EZpYWUpxC+cFYtuSpmZo0YgWwLUm4X11dnfG3QCYh7CNOsBAUeYGu6zNyfp7nrKwsoxuC+UW55+12bxLUAnRN7suH3YhWc456vc7hQCUtfR9NS8mzkltX5DsPe4J602QyVZc5IyCaZiT6eObLNjffIssT6g2bcKq4hIXAsjwO9uXcrzUN6l4Nq6hRZSp4zQqyOMO0DVQxkoqE5pKPdrTWximaWdFaM2bCfJ1mTjTNSUP5i/3Nm5RlSa3RxlKG4ZVeIOIKy9HJhRLjcQKyPEQUR+gPgWGEDCPzrq+PllOWMB3ms0u14ZlYbnFEA2U8ijAsF8s0KJRHmKYLmh0D0zkAldwsY5dGyyHK5LpaXL+PyYGFaVVkE3mG9bUerlUnS8bMLclz9MSxh8mLt2bCCrvdLV76TsnyagOvJtdfOC6ZTgqabZfEk98fpgmuX4KqAlq2AGFBaRFN5X6zsOZSqxts3prSrMuzTxQ5UZYg1KFSaCmlXlF3mmTq4rS5c40szhDVPCdPPwjAg49+jCgZEyXyDHvxpcsk0/dJ0x79AxmL6ljEccHjz60xp5Sci3GFYegzBWFN05RAT8XquuyX7c0QwzAYj4oZ53nlmE+/O0UoJdAoiRlN+lAJ6UGF5K57vkUlDLKj6hJSlTlXUr2GBd95+Xs4lsZkLOdG4AaUVYRleZQKVWQaFn7gzapUpcgIgoBut4f1Q+u7293HVma9nhcwmUxwXZd6vanm8JCyMND0BkJxJktybNvFVPO80qWQRhjtc/u6Ssq0C+JMo1PPZiIlehWQiwmuutikVYUlEoSmEx/ZcQpIQoM+Ic22HNN3xywXAAAgAElEQVRaq4Wtw3go++DBsyvMLcb84OUd6orDNj7c56f/2udYaDs4dcXtNTy+8vV/y7V3ZT898/gK/UmX6xdKmnPyu0bdmLNnn+LCu4JDlchstlsMubtn/b/bj8Tlqqpg/cz9ABx2b2FnHh/++JPkdZn9yYuC7eu3ac438BsyyzjuDTANh/Jo4QsDDJ1CRLhK9cOwTMpCSDf2o5sFupKZlD+Zps10kpNbOU4gD7mHH5uyeWMP2+6iqWBVHxgIzUfFqXzw46C5Jov+Ge7sywl27itQNFxeeWmDY0phL1hdYHP7JtOunCieayPiMYlWZ79UBqWBxd61lGtP5jz7U8/J98nG1O9bYE9JJb+3f8jKU6dod3Ju78pF/Su/8Gu88cZlmq1LRD0lqkGJqMTsUDeUCpOmV1SqCqYbuhQVyI5IxRW6Dpppo3fk5nf98DY3Lr/F5YMdglJVdjSdtp0z15Rj8N6dgnljQppZFAqemYU5ZRHRbDaYKNLkOB1z6oF1Truy8nJ7+yq7kwGLp1vER8KdpUXaS2nUDXQlYz+dTglqDir5y3DYx3YMLMOeyZL2BxNMQ0fTHMpSVVEqg7wQLC4vqp8zjDlBMM1oq6x0aXns9fuYhiBREApXs3EXakxV6Xm1YWOv1jkY7VJsy6VyJw259M4NKs3nV/+6VAez7YJRqZG+LQ/wcLBJkSfk9TZ6LN+lc3aVa1shH3nqo2wr4ru9VNG9PcBXl53tUYQTtLCsBgi5mWd5RZzlrN1n8Orr7wDwxssXcQIToyV3Gi8QNOpt8lGd4VgZU1o5pUio19YwNHnZmeQJ9cCdXaT29vbQDZu5ts6BciI/NFOELojzPo8+Iasa2zs3MYpllk9JGOTejeuE413W19Z47XWZ/dFKm4X6AofXcrRcQj0eeLDB+7duYA/lGp3upjSXDcKdCX5HbnZOw2LUSxEKuug4HmmaIpCCzwDNTocqizDxWDsjx/3U8mNcvnODcigD43e+s8Hcmcf43ksvceuqvOR6TY+gstg4SElU4GlEGg8+XLI9UqTwyObMsse7sYkvZDb0MNzFzyrG0zYHa/Jzu2MdqBgVkkD8/sZNnnzmEfrRbbJQjvFoGFIUUKvZCAWbqSoLx7eJwqNNWKcQGq5Tx9DlZ4oiVibCd001a80GYS7I1MWmElKC/crlHg21T+VFytLKIkkREo9lQHJrcJki6fK3/94XAdi8ETIZm9jM8/57Ek505vQ6755/n1Mnn2A8kUmt7/zhLdrHTzEdykx9GIU889wqmeFy4oysSl+5fImdQcJ0EvPg6bOyr3p76O4WNRW49Q8TvKAiSeHkiYdVvxwyGU+xbIFWyb2yMnKyJMUx5T6ZxC6Oq+M6FnpLjvvbb1xnccXi1BmL/W05zvMLa5RVxPOf/VkAfuZnvsBv/dZ/zOe/+Dy/++UvA5DGAs81Saclt2/L/XOaHdBqLHHipMyY7m3uUPdaxKKP2zzCfk1Js5SV9ZMABLUmUZQQh4csrygSurNA09xh+1KPy2/JMa2KnGLSwkDCmcLpPrdu3SLwHWo1KdS0vLzMm2++hW0b6EqxdOPWNQ72+zNBBtczWVhYYDodz8R5dE2XpH5NI1AV4TRNcV2b3R05nkvrHRruAiIN71agBj30sqDdbvDAEzIZd9DdRVQ6+1vyMuAHNRaXTpN39ukd3gQk5M8wNAy7xFGKjKaQKn+pCkLLsgRdUFHeFZMopemwEGIGA9R1qSoYRXKetzs+lmWQ5zmVCnKFUg78YXn2Isv/nOiFoR3JWadU2t19YjydgpYjkOvB9S1cz5nBBPM8ptnyiZMxTVUlLkro7eQ0Wi7fe/VrADRbPoddg2PrMgYxgwNGw4LB4WSWhJ1MQgzdJI6SWZBZloI0HtNUqpiGlTHfqVMWOqOhRIAYhjQAzhKNStnBNNsBSaYziuVnGs0A22/gtGNKldx0S594pDE3L2Og6XSCofsEbpPuodyDhMilAWoomF+QEM7DwU3m5psEdXmmRROH0Uhg2IJKQcbqNZ9pdEgJCF0JJ7UXicKUdKLU5pwE08pJk5yqlO/bnCtxvIJkrLMupzq3NkMmSYVQ8+DO1g6eW6DbDrq6EFWFhmEVNAKf/qGce3fufIfWok4QyH0jj0p6ox7oLtcvy+/+1KeeZWtrk63dq9R82Q+jiYnjBJj6kUm6IC53KdKCLFXBf91ke5jheBCp6pIWxCx2NPo76lJoeFRpRTjNcS0FPzPruIHB1avf5/2rUqzqT/8UQMdQCVBb5OgaOIHG0qpS5qXioBtjai79LflccZjhN0zy9MgQWaPZ8jh2cp6sksmjOInwHFkAKVPZfwd7EwwsQpVEWJy3MfQ2o8FgZkhcFAlVAUWu3VXwLEspsKOS7JZpcbC3g2nqBIGCOec5mqkzTWJ05Dm6uFCnyCNclWyMEmmq63kOlThSuDVn6xykrDzw5wyKdV3HsUpEaZApY2PLqaREvFqPtmMwt1Bj805IzZLnTNM+zu61PRK3IFCqzeOsR17quAo6WMQwv1xDd21KFcPGXcHS8WMcv0+wdSArnd2DbRDeDMHUXpjnsL/HIx84OVMQvHoponP/Wb74iY/xH/0nUqBrtDvGzFzcFfndF37Q4zf/25/jj7/yXd59U54fc8cavPTyeWnObcjnjI5Eo/4d7Z6gxb12r91r99q9dq/da/favXav3Wv32r+Hpt2t6Pz/12zfrx786FMA7O9tYRkNPv3Tz/L2RVmhOehukQwSCjGic+SZEw5IQ41S8auicUhnMUC3daKxvCnrhkmel/i+P5PuzQuBEMyIseE4x+7YGE6HSGnhf/LHNZ78sTYvfCVmZ0N+V6uxxMZWj9MPyu/5zGc+xUOdJV6/c4PausyqlHdifuf3/pAnnniWTz//LACjdIuv/5s3CXfl7RprQhy5VFaJNivhahzu5+h2xaM/JrO93pLF5QsX6d2Sf1aYGh/64CmS3JyZtNq2SxSm6NOKg6vXZv0pKGf4da2qMC195jkCoBmS53FUIjc0naLMCBZbjBQMQtcsKmGRjaaIeVXZ8T3cOxmuwjsPM4d6ULDXH3B8XVY1bMNmb3efUitIFYdt5cQ60yhkqaEkiPtjovgWReVQKq7Gcq3Jzq0pqRVzSnEZdrbGzM8tkyZHvkkTSpEgBDRUtrnCIo0TRFkyUdyseicgaNfpKY8gz/NYO7XAZH8Xbah8b5aXCLOYYBqTK4hKEUPmVxSKO9EJHIwFh2ksmG7JbFPp2BiFhtmysWoy+3P2sWd48/tv8qEnZFmzGpW89PV3eeqpD/LBn/20nMPXttjqXuHYEy69a3IuXPz+AbGh4+lHWOqA8XiAYSfYlszCZYnJc5/+IBt3rtLdkxmjWpAynZi4Dfmcfl2nFBnJxGOuIbNp7bbDlduXqTk+hZowpZ6TZhVFrMq2JWSlyfrJ5gzKE4c6w+EQx48xdaVukGvolEwjWd2aa3n4XsBwnOGa8p1PnwzY2dqne6jhz8ts/QNPnODCuTdnksrJMKdIQ8yWzmhPjVXbIhoLJuO7Xhy+X2MS9eksyHWFZjLNCvwc+qXMeK0dfwq/PaStNHnLSyFpcIKyvsqt6zLr6C2beFOH6nidg7Hsg3Z/QnvOoZeo9Z+UmFQ88vhHePJ+mf39+te+yqg6xCh8siPvFLtB4FWcXJNz+KCXUVSC0eEmxx6RGe+blzawbR8hMgoF4TIsC89zyFSFfTIq0DUHTbNoKThDXkQUqc/8sk+g+H43b97EdZZYmJcVkxvXLyGqlOWldQ4PD9XfpTz+2NOsrbb46h98U47NnM985wSnH5DV5fmFDgsLHtubIUsr8neuZ5ImETs7fb778nlAwmuiwT6iJuHMpxYXePyZVfLsOF/9mqwImVrOT37+i7z1zutUyotumo/wAkEYymxhFEV05ir6hwlzLQmhrLSYrc09XM+bidMELRuRe2SpXFdVVdGszZEWPSjUuFcVayd0RJUzHh1x9Bx6h1Mmajw/+YkfZ2//DknSJ1LGzQ+cPoWuC9489xYrCkJVa9SxvBorxyTc59yFVzBMjf5BSl3Jl9fndQ73SoJAjrFuZYgqp9VZ4JGz0hJjOjEoh9/ncNRj44ZcM89/5jk2bl/j0nuy+uMHOlmqY+g5poKDBkFAHIdU1V1fprIsqaoKPziC23iMBkOZjVZ7c5pn5HmO7RhQKqiw7ZLmEWP1vrVakyeefgrDLpkoo+HJJESvBA8/epqx4vFlmU6gW1iKdxalE4YHNpYN3QMpu354J0SnxdPPLWIogQdLuLz73gb9wZFwhIZhCjzXmhmwF6LEtC3yNPshmXWdQr0jgK5BvV7DskwyJWgRRymaptFqtQiVOafnuVBVJPFd4+08zykLMfMkjJOMoipYXVuazamq0iiKbOZlZtoOlumTJnflqEtKRJFBaWC5meqXjCwxqNXkOPzY51d46c+ukowDTFvRCXRpcO+6Fsn0yObAwzRN/LqCa2mCIk9JonQm115WAtCphMm6Ek7Z2dvBaS3jNJSNzHRImunU18yZyX28b9DvFjP5/VbbYzKakucCxzwyd82wdIOsAMdTUui6zrH7WggFV9q8fUA6bfLg2TZ3bsq90q8ZiColyQSraxJu2x8OMGyT4a58JtPIcbyCNNZJFd+p0dbRzQlJDDUl4lEULlEeoylkgF41SJKMdsckUjBLkfi4tRRsQaGqL16thm7pTKYK51W2qJyEwHco1J4w2PM4fd86W1u76EroYzIqWFmzaTQk1PXqlVusrcyhlzqbt+TZYDhyspmadUQl4hd+43kOrr3HW38qq4WhqUGeY2r2DCrsWA38wKLM05kAmufYFLmOps7oNDUwtYS8MtAc5e03FXidFj/5y8+zvi73zxd++xVefe01agqWWIiQRtPFDyzGYyVMkQQUZYyoyhn/PU4KPM+lreBo02FGlfmUZYUfKEGbyRRds1k/fowb128BcOLECfzA4p23pZVGrR5QUVAJ7qK2zIqihAcffRhbCXbcvH4Dy7gL5R2PJlS6hqnpRMoM3HNdhBC47l2DYsMwEELMKs6+71ORkSYCxz6yD4mx7RbJkWm5VdJsz5FbKTVle1IWCdsbfZ5+8nGuqZLlNCzxajZTxflGq/iFn/lZPv9Ln+P73/sKAN/4/ZfpTksefewBmh0J57t+IyNnSjmWa2H76h6UFk7dZuUBeabsb+3x2Z/7eYL8kMvvyTjsx7/wAP/jf/lVjn1MxrDZQZeg/TCWtzeDCmZphBcIglZGZ0GWbbu7Ffvv996squqD/AXtRwIWqOtQxRJS5RRQVj3OvfIGRabwwJME39TJhUek3K3bGBgGCBUnGoZOngk0MsSRkZpuq1Lq0X/KOFE3FA8L3CBj2reYW7UIjssg8OIbgqxYxXXvML+sNPSjMb/8Hx4j1g7VP1ixV6zz6qu/T6BU//7q3/1Fnhsecu4Hl7mmoHuVN+Ujz38EcXAGgG/+0R/TaEM1DbGVya4oSlY6JoarEYZyg6hZNZZaDc48IQPcvSjj0rvbmJZOQymrPXT/GrfTW/R23dmBLR3p77pya1WFhkFVlTOooFYamLpx91AyNWzfYzgIyfKjz+RkIsMNOqwvy/dL8pBulSL2ZJDdbC7R24sxTUgGcpHt9vf4wFMPsbO3S3+oeG1bI1rzLuPxLQA6/iIf/9hzfONbb2J5RwbBbYJ6hG+XTKZH3iIacRwzHh8pg0nMflWVhIpIaVoOQkivrkZbLVgtJy0jPCUGkJUpV9+8iTDhqQ9IqNvQEuTbm6wurNFXl7CpiLC0FNeXQWiqaXRvD0lLQa2tcMNJDNYcSTcj7MrD6uLg3zLXaqM1ZN/d2Uv5hb/3GY4vPUg6lGMV6wfU2hYX39jjySclXHG4U/L+5R1oybEbjCpc38a0bAyFN9YoeO/yRVaOuxiO/P543GBhvWLtuAwCr1/dJYymuJZJpgQfLl/ZImi1WTtmcv2y3MwN12QyiAgU16+qNKgKDvdiTp6Wh9WwO6ZRD5hONTqK+G64LhQVn/3YX5L9kiW89c5lThxrMlKKnnnssrr0FH4tZbcn+T5XX77EAw8tcXtHQs+ay3V273hUg4hCKQ9NxxGu06TVUZ490wRBReC3OdiR88xxPBxbI9JznERunL0b1+haEV1bcm/OPJxjjwp6kz65Mue1pwanl0p2egMydTBMbQhFRa54J+gl4UhgJGMainCeCJemvUKYHlCpQ7VpV0zDnCtXFXej6aObEXMNl/G+3BPqjQ5pmmPZOqaCZ+VZCW6B4vjiuS5FHmOa1uywdLwFTh/7JOfe+S6lEtCpeyeotQ7od5U/VlzQ7DQZjabECmahaXD+7Yucfz2ZmTDHuUZ3EHHjG68B8InnPsbKsSZ2Z59cARXSoU6SpZRmxKAroWUPPflBGmbAT/zSfwDAv/7f/1euffkmo1HIww/JQ6e3f8Dl9y+QZQmdeRk0pPsFRWIy6MuDyrLBdZo0GwaWLfeE/nCP5ZU6o4FgcVW+38lTZ7hxdZcolHN6bs6lLGIM3UXocm0PBznR+zaLK9YMnjUd5ZhGwbFjcm5evvoDhv2U06cf4OPPfQqA48dPACW/+jf/Osm+nHuj0ODBs4/x3/2TL8l+iiLW1wPCkQVKZKNICgzDIlSXFr9uIjQ48+BpXnj5jwAYjxKW20s8//wXcNuSr5blDlhjzjwq11Vvx+bhM0tcfO8aviP7aX5ukZXVBc6ff4ulJRlkHx4eksRTHOVt5DoGVaOBEFCofdg2LWzbQlQZmqEuZSJBN3PaLbUWumMOtrt86nPP89YF6YVXa1eMDkOmQ0G3K9f/nZ0eT37gLEurMqC+fu4ceh6CY/P0xz4GwItf+y75FJq1DqG6qOnK+LuaQc0N5TuVzVRb87yk0uQefXQWVVpFVVQoNBqGpiNEia7bKNSj5P/qOqXIZ1DINE0RQmDMIIeSC2Q5Froyhl9eW+XW5m12dg9mimWmIWH+kfKwMzOBoadkIpv5bTmOTZFlVFVEqLwEDT0gqFcMB3L9f/sbG2SpiVczmR6pUmIhhImtYJPyBSuaLZc4D9W4FMRjCc/SLXUBKjPSpMD1Snb3JYQ6jis0L2LOkUHavhLuSIcBoRLMykqL5oKJoggTTyIcG3zXJi+P/BwrRBmj6zXSTM7ZxbVlJtPDWUKtVnOoipK9rS6TiUrONT3AxNVdgpo82zf39lhbquOq5+51++i2xVzTonco328SmtTqLl4zJlGqm0VpE7SgjFUsU6booiQalziKp5gWGllZ4Or2zN8IBGVhyf0RSOMxukgYH9ZoN5X5eH3Czs4GohSk2VHyzSaJfDRd7oGWY7C/n2NqCZZS07SsgFwk5ElGZ17O9bfe2ic+iJiM5Zg3j3t0dwvICxxlhF0xJcoFJs4s9tT1DN1wSFQyKTeE5LFgIFRSlsri5Jk2N/qXsBoyefP0xx/htQuvkSmTWd0WJFlCpUEWy75zvBItqZEVI1ROm/lFj+EkpsJX71JKATbHJs2OeHUOZ848jmlJ/yuAtbVj3Lh56a7ZumZSkmMqBU6AUstIC/jwRz7C/q6kGPQOPQ73Qlyl4gomKyvL3LlzZyZkUpYVJ06cZEfBkE3TxLZtxuPxzOy8LEuyBCzLmInHWHadIjeoNZVpmBnTGwx47vlPsXcg+alXrx1w/2Onub6zS4qMAYK5EcuLPk5DzoONq33Onb/JYfw6n/qsFGV6+mNT3jh/ke1en5NPyP30wbpDq1XjYEs+5/FTMdFhi8VjDV584V017Zq8f+5dnvi4wQOPyXNNC9b5zK98nBd/8BYArmmx8/4b6LqOa8tnP/FQQJpN2brRoK282Sa9uybSf1H7kbhceb5HoRb1MB0Th2Pi1GBOBXfhpOD0mWNs7u5iKo6dlgvSYkQqlISzaZAmgnbLQVMXKV3T0QuouKu+BFAJbZbJsm0XczrFNBszhZ0o03j//BYr6x4f+LAMtM+/u8XhtZzlRyVv6Otf+Ra3b/zfOIbJL/6aDNi//c0LvP69DShDtFxeENy+g/fYJt1MvotVVPiGS2xXs6xfVCUE9VX+5q//Cr/7Z38KQK+/h2csMa/w/+++9Da+YTAeRaShfPbHPzDPE/Pr/NmFl2cHUykE/FCVSlf/X2k6hZr0hjDQ9HJm1muaJpVm4pj2LAg0LQtRJGRmweDa0STSscwA11bfbUSYVkWKTXciA0zL1rhw8T0M3cJRWds0jtjbOOTYCRnA52XJzjDHXWjNeATjvQmNIOH+BZuNWFY+psMeSTqdHWaaXiCniUGlKpFaBWgFrvdDqjEV6JVOXZnujcZjnFYTvTK4syOfc5yOEVVKtLSKUCa3ppOTlDFRKYP6idDRNZv5xjzTQpnVCoEmdNI04vhZOTYVBetLbVyFwc0HE0qRs5d2abTkBejcH95kfHiDzlKDH/SOnrMgWHbwlemtUa843NVZXNQJlYpirblIZUekpY1myP7TrZDhaIS1Iw9Go6qRJV3mWhqDvvy7yqioBW1q7gJ5IgVXhJ7hWXPYhjyodDPHxsE0K6KhfCbXqXD8MaNRzGhwZFpa8Fv/4L/CVZLq//S//1/IRML8wiKPnJUV2m/96csszZtMkxHZSH5XbcWhSAt8QwYRjqURNPaxqjYoE8j7HzrL3t4eY1UVWz02x852H0N3CBR3g0pnkmZotkdDHdiZpZGmYxxH/t2dOylpbPDoA3Nk6m5eI6WBixbAbVWpMpY1dBGCqt659grByQHhjTf5/YsyWI4NF9NZRdd3WW3JDV8vatSdnP2+fPD1zjHSMiV3SqaHKpHh6ei2TZlNcVQQqBU6rqbNhIUyUWIbBqbpEis5pqy02drdRNf1mUKjaer0D3SmyhrBCQA9JoqLWbY5jEaMBhMCv0apNriMMWVeYjlyfXz72y9yc+MMD39gibQp5/nZk6dJS5Ot/ja//vcl7vy1Vy5xoOl87Xd/G4D+YECrXcOwMoYq05ohaLY8tvc22D2Ul0xfc7nv4QYDpXheVQZ3bg4xdMhq8uBfXTrD1uY2uiEQpdznL13YZWW9fqQuzp2NMUEtwXJMSsXx+OW/+lc4/85Fbt2+gam4WY7tY5oW+0pEYGGlg7BCdnrXuPUnUslx2k/56b/y10iEzld++/cA6I5GnDz1AZ5U+/d9JwMuXXgXw9yi1OR+UzKhopgpNGZpSq8XsrVxwE999i8D8JWv/g65PuZrf/wNTKX26nrvcnztPqpIcg2r8gqbt27TbvlYKtjZ3tokSUd0Om1GIzlBw8kY09IolFLENC9wXRfbc+mpDjUsiziOsW0bUcnxK0pdqr9yZCbvc+3K+8wvtumPZL8sLS3QPLYIjoMZyGdYv9+ntphy7p2XAKj7TVaXl+kOp1y+IRMirfV5RlsTXNNE0UUYh+GfQz4IIdCEQENg2ncVBLMsw3e92efyXApKaT+kHhjHCbUgoFKmnq5jkSQJ42GG7R5dpkqEELPvidMM2zbRTYNMWSoYlpTXdhxrlj03zIrJdArqvCj1ELfu0KnXmU6PjHAL3CDB0B1slaxybJsknTA3L4O58biP57tESiEQIIszyrLEsjWKSO4lOjZlXmCqMCrLpQl2WebEKmnguAae64EoMQyFAPFsynjMhdfkGvrJn/0JdnZ2uHjhGvW2vKS4mk6rPUem1ku3P6AWaFAUCFXlJ0vJNB3Nms4unUFryNX3Q+qKl2mbDRrtLuHYmMl9p5nOaBLiOoJuTyYfiqwkGmczSXfLlwI0RZXTmpPrcXBYYtnzWKZBqckKUBoKKXGnLjt56iJsndWHIxp1GTtdOTciSsCtmWiafIbhIKTSEiw1f9ymQ56ZVEbCdJKr+RmAlVGkuuTUA5owmYz6LCzLpNrh6IA8i8nCkrqv5Em1ENcxcWoWuTJl370y4bkff5z/+j/7WwD8w9/8hxw7eZzHHtF595Xrap7VEWjEaYKhVP7StJKiRIqWqZUFeqaRW4JxppIPpUfKgDwO+f7r/wKAnbe7aJozU87T9IrRKGNuUZApK4aaE1CUDmtrTfZ3Zf/V2x6279Hryf6db5mM4hyRJ6ytSiTAz//8r7K7u8+LL79AqKpuF997h8lYVjblPzjF810Q4m6iXbPRqpg3Xn2NIJDPsL29j2d2MM2jz1Tcvn0bDQOhqmmuY7K1tfXnzL2liqk2U+au12v0iyGVZlAdWQ5kYwy9ycKanAenzixw6b1rfOuP/4RHHpWcXaPSydOMaTjk4UfkZSdJK4b9IadWpQCT16oYJud5+YVrdIdSIfmJB1s0tAr3RJ1z31e8qPbDZMWEsFJK2a0mNza2iXoZj39CXsDMPKW/N8G1nub003K/Pvf6JU6t3I/YfUXOzSKi0WlTaQlLyzJub8/Z3LiiU2tqJArN4ntNItRa/Avaj8TlKgwjwpHasCqB43bIopSta7cAKEuLne0Kodnoql6rCRP0CnHkTVGU5EUlZbhVybrMSxV9351gui4zYaWS5ak0HV+3CCcFdXXhqhwgjdnZtmhcU670xoBEXyTclTAd3TjHcw+e4Z2dLi+fl5C88O0hZmeHoSgJhzIwPDs/x0OPPc528iIApusSJV203CI1ZbagyiOqUpA3TQZ9uYpbsU5t2eHMCQmX/PrwZVbnNDS3TXcqs8Qv/9l5zp55EL3QOTLuqajQNciVCo+umwghD8GjS4olKpIsxVaZibwA33IQRkpzTW7uSw8tc/sHB6T9A6ZKNk4vcjBcIrXogsqmMnO0sMRTErKaXhJFCZapkRZHMq+CoNkgVL4X07xP/8IEo9IxlUxw1NqiN7XJuhmVdqROVFEUBZW6QOvoNNou3YM+R0kjs7BAVGj6D20iho2pB0yVr1YcTtEdgyqBRD3DWnuOUqu4dOs2tlKzsi2NJb9NqCTczcog1aAx7zJQm59mmuhpRlmVTJR0TRiGjHtDAu8HLtQAACAASURBVEduIj/9+U/y8INn+NK//DbPf0RuBh/6yIM49bP84OU3GORKotYuWGp5oDbubBJRr2XEY5vltXnVBxlry4+xvb3L9h1VAWq3MUSLg20ZvPpBTt2ukYYRNV/5Xmgeuj3g0uUhlarkxqGGzghNEeonw4rGvI7fcciUulWzeYybN2+xsriKpzIZK60cPd3l//q9rwMwnu7TbJvcvHqLO1fkM9T0mKa/TWC5bERyHBodjZ3JhEwJNww265w8doLeTg9PHbzbt6dopqOk10GzctqdGpNhNMvox2FMUNlMwwGJylLXnDqd+gK6UgFqhDliXVB3HD59v+w74XQxln02b1XU1IaYZTbFWMdSfTBlijmxwF2gUp+Zb80x36i4M/GJVAZ4Ot5jodGirfp3tH2LWHj0vSZaTQbLWuhR83xs3UFXUKya67O20mFPSfmXScbDj56lLGpcVd5wC0urTOIDSmJG6jJlGRqW6/D4Bx4F4NKVi5SiRNdMBn2VDdVl9b4wc9QZh4h1FpZaxApK2+jAYLRNeLhKGcpnv2lPmJvXGfcOOfbYMwCcXB9QX4j45h9fAKAdCKJpzOraCaZK8fLE8WV8dwmnuE0Zyj3IWcoY9DJCBZXyHIEXCNKJNoMTbW/dxrBK4pFBrlTpRJVxe2PCr/zq5wD4nf/zJSbTjCTWEJqsXC0trzG302Vj8zaOkp8OpyMCb4VT98sL0e3NG7QbdbKsQFfZ5lbT45UX/4Dx/jMUiRybU2tr1IMeB+rScnzlNB/+S8/ywjf3GQ3kBV0YTRAaqQqebU+jUbPodfe4ekntLamHnms41oh2Rz7T6vEm2zf2GCihgXq9ju1FJGlIqoKdoOYSJyM8z2NZwTP3dndpBy6ZqrQ4jkFR5BRFjq4rGHueyeQXBhpHmUWBobnkCkJm2wX1msMbr7/OE8/KoMWuNZjGQ0bbm5gq2GnOW8RZyKmHJIrCoCIeRUzGCY4Sj0qDMXNrNqtLHUZDGfyPogjLsmYBWFlUEnpXlVgKvmyZDkVZkuflDCLmOBpRJH2tQKJLqqqE6m5yE+RlzbbNWYJQ0ysqYZCrM9r3AnRbAwSLi2rf2N6i1emQlwWZkkZOEoGm6bOkjOXYVFpFiYPtKT+8jouZthn2Qzqdo2fXeP+9IYtLCvFimuQZBIGHrdSIW/UW/f6QJI7uwut1QZyMEEpICWFAKShEimPLPSivepw5dT+etcDbb8vgzfQ0qkLQUgqfw2HGxsYuDddmOpRr7dNf/AQXL+wwGsr5Wg9chCjQK3MmNS9tDTJqjRp5IefCZORRq6cMD5TYkV3h+QG2WxGreVZvWaSF9HqaKoW9mu8QxV3I5Rh77gK2XaBpo1kF47A3pLJGMtGnrBB8NyEvwa0pGwIMdGFTMseVm/K8Mp0AX6uhkXOolGNtpZCaKyGsTrtNEkWUhYmvPKx8TzDqeqRxF3UfYW7eZWFlEa8u197TH11naanG7vWM11+UQXatWRKHLq3lDq4p576d16g4xT/93/4FAFncozP/MKeO3cdrlYSjCSfEch3iMKFd78z6OAxTXDWHNV0mnU0DfAXTzcICF4e5zgl0da5Z4++ycX04UwYd9iv8oE29NZwlb4bdKV6joL3soLROmIZ9wshjflHOnzIf4/gZaQgnT8qz4OrlbYSeMxwOWVyVc8iyCqrKw1YIkGk4wrMD0nSCUAkJy/TxrIx3zr0DSi3QMH2EXrC8Ktf/+Oo+lmVJtJOKG7Iso6qqWZUqDEOJNqsqIpWYLooC3c4pUoeFeXlpeebsCb71zVe5pgS8oiyk2Wnxl3/xGd54TZ59Z04dZzw5JDssqQt5YbaNHFyBAuHQnuuQDEfUFjJuKHXbOWuJ0+0616cx7T05OczqJv10TFopdeT+iGxYsjWNWLtPvt8HP7DC3/gbv8yl869y4aKEca+uaIymN1i+TyYkpqkgGlckRcz0ttwD+9NlPvfFL3Lu3IvsK3qGsP/dFyu4J2hxr91r99q9dq/da/favXav3Wv32r3276X9SFSuqCqGI2WCK0yqPMOgQneODMNS9rd6NFr1mYFmaSc0awWacpsXtk+VZFSFhWmoqhQGrm2SFwWFUFLMGghidO7C6PJaiRj36F2VN/PWyhxrD50krjJuXZXZl/aCw6X3fsD6GUn8PbH+AM0lH7ZH7P9A4jmNxSbFsIkuQhwljtHuLPD6pS2GSmwhaBVMRha5yKkr13G9dKmMmBe+8U1GIwnh+uATn2R3sMVX/0jCBOudGt1RhlYILIVNFcaUc2++Qt32QWVINQSWYUpZTqDSNAzTIEnSu5j2UlBaFpqSWLeNnKwJaWbjqGzBe+duY/kaeujjZbp6hgZlVTDsKylRo6IyBaZlzErGQggs00NUdz0zNN1ClAbTUN70W20Px/fY252SprJ/O50VckJWzj6M1pfZAkqb7uCQUqUwnFJHFzpzixa9vSMZTIHt+WRlTqQ8kGp1nTjukSgpYW9xHt008J0Ghi6focxy8oELkxGmf5RZDRj/UBUlDSMMw+LWezdm7+ItBEyTEfWFFtlUQSGKkjweYilZ1Obc4/yj/+bfcGJ9AV3xJG7u7vDw8v1obR9PyMx8MRKMpzruvOy7sVay2GphNAR5oqqMesLtW3uYlCx2ZLb+8E7M40+f4fx7EiNs1TX8+YA0jjAq+QxJlBJHGkGrwVSNV1BPqMwlRqNKffeIqqoYDqaEI/kukVcw36pTCXMmJNKpNXn1++/SrinT6cUVPvLRJ3ntjdfZUxLuaytL4FjcujbAmJPzpT+esLC+wJUbMkP0zNOr+NkJbg1CPCWSEvVvkWbVjOCapylpmZOkYCmosFNzcetNJgcpmfLeyEcmSadEG0gY5LGgxiS3eeuty9QaR9ypOu6BxfWDGP/I1y4ZEuR1ZsnmPCMvC25lOc89Knmpu7FJ2b+C6wruV9CEV4fX0G2HtiWzlW19wo5uIIIGWl+R+h2XST7m9NIxarGEn8aTmNsbB5QKQoZjEUc2mTNGV7mtWsdjMtxHeDrLx+T3G4059q4fsK5ENnY2NzmYHoKjM1XVGC+TQjWGEMzXZGZuoEfU/BWCupzD125co643ePW1N/jCz0rrgBPrJ7l4/h1EMMcL33kBgMc/ej9LlU9TmTlffmeXOweHbG1u0wgUD8MMeef1TZ778OOccmWW8cq7rzOcaKzU5WfWg4q9zKfRgVRVhLFNKDO8usdEiRTMNxt0BwPevSArYP/pf/HrzHvH+PKXf5uN2xLe9z//T/8DvtfCtVzyQol/CIvCSGdGw89++EnOv/19fPsEi0of+s7mTQaDAfedPsV//o//EQDNAOaX1nnpBy8D8Pr7N5gMNlk/dh+6LXkuvf4BlmuC8ggSqYlpuBQi5fa2zG6HWUQ2rHDsRSpHVhny8RLPPrjEu4Ykk9/udalrKywdL7lzS3lvxQXz8/NAxfamhJaaNswv3MeDpyQH882L77K/Neahs+szHl93mmG7gizVpQEoYDsmRZFgWke7UoXn24wPI4pEjkOt1eS9S29x6tgSeaoQEgKKOGBjR54xnfrjxOMRd/b2WLeOuG82O0MDv9VCL2QWPhptkGc6IldnqJFTVSaismYVqKyI0YAiB11BOCthYRouZXkkkqBghVox83iqtISyEuimgVBwnjw3yMps5ssUtOuE0xTHNSiO4GGWRaVJOerAk2iLIs6xLAvXU5X5yQg0jyIR0hgHyNMBrtUg18coWhsH+2P8ukM4VedxZVGUMVGeEKmdP3Tl9xq6STQNVa9r5IUU6gAQCJI8Q5QWWSHHTzNsNm/v8IlnHkYoAlWW5fh+TerCA69880XswGFhaZ5c8T7fe3WT/mDK8VMLak7vUG/4JPkulopvHN/D8hz29izqvtqrl8fEiU6grErchk5vK+b+RzQ6C3IctjcNbNvGc20sxb8djEfMLddpLsuf9y7B7nBEvWby/7D35sGapXd93+fs27u/d+2+vfd0z/T0rBqtM0hIQhgJBAIkIMIhpooEqhJTlbjKLlf8R+y4qNixQxwSL5jEgHGZIijYQkYSEpJGI2n2RT0zvd9e7n7f++7L2Z9z8sfz3LeZIFOVKv+hP/r5a+btc895znOe9ff7LoNbKjOYVnnfhy/w9ksvYFVkdmDS1TEtn5Hir7mmT6EntBd9uiP5/TzHIQpzDroJrltT/WWEjoOjTG93N/YpTFhadjh6TL7z9Tc3yCOXWmWZmSVhckHbwi4tLr18B4Cf/RsfwWrk3Li2haP4XCIx0EXOaBKydFS28dqxOle++UfMQrlfdJou+3cv84o9wlcQyuF4gKN7BFV/Di3VNA3b0vCVd1poaeTmhDIDx1GWA0sp3YnFqn6EK9ckr871mkRhwlJT7l0GhU7enhLlOkunZdsde9TCESXf+A9DVo4qyGYMVlllZ0PugXRhUwlWKbQRb7z5KiD9P0uR4djmPJvU749JipKTK/I+aayk17VyzkUTZUmYpTzy7ofobqj1aRaSphFX31SWKqaG5TiIJEVTe2RNk/6Dh33TxEfXMyotl/aCnBO2r4aEacli2yBRtiBZbNBuu3MEg2dY3HyrR/fWDZbOSwjnxbMP0O+f4uKTVZxM1r125AxaXadzV8E1NzaYiJDStdAUJ/HN7YIHTz+EP+swlCh5XM9Cz5u0l5XlwbLOYstDaBaFWn81y+PZFz5H72BnnpWemgvkSZPHnpIosTjf5sqbHRxzmYcekuvc1Tf3+O3f+B0WVn2qdSXBH3j0OdQH+Ivl+0It0HDssqFUVkCX5LjpdJ6OLooco4Aoj4nVIaJVM1hopEzURlEULkkY0aibYB16w5gUQmcWxqA6ioTCFaTK28hyTKahgAxQxp/YNqdOnKa9tkBRHqbbxzx8NuHIBdnYnv+DfPULz9HbuYlryMl2Y3sP3a2hG8Xc7NBJdI4/5fHE+z8IwEu/9yrD7i6lZZCpQ4MmDAzTYT8N+dDHPiSroLvs7W9x6RWZBrV0F9e2KEmpVRUnqR9TZDGmkXNPcl863RsKg20Y8uCT5/kcWlOImEyHQPlJxWnE8oVzpLmGr0joek0jjQXpFPaUCk/FdeQhSsEE87yAQhCn8fzeh8/WtEPmmyQotxbaGEocIIpSZmFKsjNh6SGZQm5Ul9jbu4muGTSUj0BiGPS39zh7VPaNWMsZhF3Wltv09gaq7gW6DUlizxdVzRQYtkdVcfa64zGukWHqVULFvWkuVNg9uMPZ06e4dVUe8GaDnFIvsBX8xbIsDF1yHnLlhG4HFs12hTjLWVuR9eqMe7hxypM/oHy1zDbbL+xiOD7v+cGfBeDffu63ONi9hW/4FGpxzJIUXbNZUunoE2uL3LqScOzsAnmijIy7e0SpQdBI6R2oNHSoSVK8LheKwSim1jJxazMO9pQ6WVUn7PmUxJS52gQmBk7FYzZW5pzZDL9mgF4QxfIaz1ogjEY8/OgZKoHsZzev3KBZq1NvykWhdzAjTQxKe4tqXfazt18uWGxVWVqwuXNHfhvLrZKlcOKkbKef/PQn+fef/yJ37u6DIQ+YR47A/s4MV23gl1crbNzdJ0tM8vLQKNIgNzICtwmOXBTsvMI0NYkiOdk+ttzCDGxuvLFDNVC+bF1BEZhY7UW6MyU6URg0Wk321ebHSDXqdsB2knK8IQ9SH66YPHf1bfbjjGMrsg1sCx5oW3xHqWl59oz9SCfTF/AVFyYBiumER069l1ouDw3bWzdYPfM+rl2WqnyFnlFdWEYUfUxdqWdqNZ564kGefelFjp9QYgf7Y3r9HVaX5DXdXocsDHEdk+lM+YZ4LuGsROgCW/FFm3aNZx5/ii8/J4UNQqOPHpWYusVsKvvdz3/2F1g6uUh/f8BICcN0t2/w/As3+ciPyQXm7dd3sOs2o407nDsl59PN7Yyl6kmmWU7j/bKtrLtX6d2JCQtZpxOtJTbGGtXTFYQij/umzt3Le5x4aIl4JvvGzasZP/ijH+BTn/4MAJq9gOe4TLd2+Uf/y/8AwHA4pN6oEiXTucm0rkOWaiiqAWsnWjhONucxAQQVm9moghDl3Gh7LDJ2D6Z8/ctfAWA6WWe4NebGm5e4dPtlAFzXpcxcslz2KUqDLNXBnFA/5G8OU+oNm0EvZ6Et55dT51qs3zwgV8IG08EMHZvF5QChgjnHT5/mtRdvYOlizpUybYNSq/MDn5B8rmzQ4T/83/8P7/nQx+gPJQcqzId0N7u4vouulCHSNEc3DISCwxmG8o4ybAmNB576wHvp9Xd54smzTIZyvHc6txgNZziBvCaOC8azEM+r46u5s9UyuHVtxGc/80G+9IVvyj48GCLGydzfRtd1hCgxTX0unKBrJiUCkWtzT5my1BVnRb2vUeL7PtWaT5ootc7ZjLIssWyDODk0Ei8xXW0OY7f9KogK09GYWkt+h1ojYDDs0agt0+1KArtBCVrJaCoDC+12kzwrmYwjbFe2QbNVQRQJpdDnIhciFYjEmKsMinyGrpnYjjkP8KRpLOGCeUKu9iBFLmHrhwp4RubiV3SE6fCrf+vnALj29ov8yR9+lzgGv632M6VBlljEKqB14vgS42jEdBrTqsgAwWB2gGF5uIFS16xJblmtVmN3U877IgHNyRGiZO2khLEdO63z4p9lNBfkfLCwpLF+bczyskeh1u3WSs61yyPsskmtIa/rDvq0j1SZKWU3T2tx926E64XUDHmwqC0t0litsX/rJn6gzM0nW0RT7x5ktSiJkpAkdlk5Ifur347Jxy02b+5Rr/mq7TxKI2WmhCpMV9CorbK6LKjWZD17nYyNq/sYpUtQl/ev1FNuXxlxKEj5gR95D9u719l7G6p1pSiYZhRJyVMfFAih+pAVcO1yQqQ8144cWcFyCkxLMFYaZZVam+l0CGZKV4nAWFoNITI8NWZszWIymuFUPSbKTDqwXKI8YprltNScYJYjqqsex87Ittu8MsEZG6SWSTdSfPx2k+lGn+nQwVZQT79uMOynVCpyD6QbKYZZomkGlsqFuIbNZDSloCQ6NBHOS8pCB8XrW15eZDgcU4gEx5Z1iGOdhdUmFx4/yda6DIpeubyJqaU4KhgvzIxYZDhGSRIdQj91klxQqGCgrQcYnkamxzz1tOSwdjZGbN3YwvdrLLTVoWWcEwqdxaOSStPvbjPdGyF0l5UTcq9U8QU7O3ucWnsfk4FcM1fP2OjVMxz0n5PvWzUIZyVGHmAbSmkxqZFqNfTGlLQj5yWzZnB0wZ8rO47DCM+s4HkeUSoh27pVwTJ9Wu2SWMFkR8OEyYHPuUdUgHJyk4XqKS69OKR3INtp2JsQRSmnzgeMVfB9PKySdSff32qBmqZjKhd5oyzR85xBkZCpE0OZaUSajo8+VwKzTJ/ZNCRV2YkyLzA1kzCMqDaVgkkpEGpTfJhBKIoUy/IwVHRNiBJLt8i1nDSXA902TC6/foXKVoXzj8tNxJEzLS5cvMB3r94B4Nsv/xNW60ep1y2mA6WKkxU4WYTl5ZShnCCaSxYnjq/x9S/IjJcexxS+TtJPMJTqiKmZkOssNRpcuy4jpAYWSZSysixJfmZpYloloiwYdOSGyCkKNKNglmWYKqJflhILe8g/EllOrqQz54u64ZPlIaUKuTm2R29/SBgLVpflfZaX2+wPBkSxwPLlb0mUIvIUO1BRxyzHpJhztwBykaLpFnmeY6jFS9M0xqMBtnF4+CkJfIMEGPXV4XVwnYofMBmFnH7/IwDceuN1WrrJww9KrPGdzha37+zgEEnOHeA4At3UMDSXWBHDzaKg1DK2tyXPRXMsTM+k3hSkioelOS4nzq+xfnMTI5fv55cCYdgIFVFMsgzL0Sh1jaVFucmtL1bZ2rmLngoKTU6Svi3wgxqVo7JOnnOKz/zEr/CNr77IwaGKmuES+FUatSaaOohm+YxoNJlHMLf3e+gtk0uXX8JT33PtdJPJ9oDRCHShTD3tjE7vBk9/QB7Eb61PGc82abZP4lhycRz2p9RrfbKJT4Zyfz9eIZ6lRKacMPyKi1YUiMwEZcAYZQmCjI3tLVptudkJlg2EFlMo2XW7njHY67LcWCSM5ESzcsLm6BGbcJjPJVsNK6PfGXJkUYaW2g0fKxDUVxIONlX2Tl8h1iYUytC6040pdReBhu3JBUYzIooURDSCUtVTE+hagq3JTcX2zKLoJNQ0i+O2rHfoQeY0GekFtbasu5ebbI/HoIjibm7RtRKCEm5trst2aZjYZpUnLy4w68iIXrNtcXlapdOXk3tjsY2IY3Rji9KUC4VHQBnoDCddHJQITN2F+gLLy5Lncm3rEkulQLds6kvSduFgOOGFF76DmHWZdWU7HK2uEvYzxjuy/6RWycKxBdByqrFsl93BGN/W0QPBhcdk8GbU6/LmziZhLHcMjqhgujmJLrDVeH/j8ku8Z/mHCCeRPFwD7/vgaVbqLt/8lgzmlGEHK7FxcptsJCPJNWPI8RNH+Oqzr2G9LdvzM3/9h3n95bcIB3JclSkMv7qOn9jYx5TR9+0Op548QqkVOFO5efzUT53iocfeyxvfkM/75re/wOrRNY6sneVv/52/A8Cv/Mp/g+v7ZKmOoR8asLqMBzELyly13+uhaw4f+ejHefU1KcEPCVke8su//MvEAxkBfum161x56xXOnJKZwD1rjR/5yWf43//Xv8+Ker9rtzapBv6co+A4OoZVkgubsTKdLguD8STFr5oMlYralWyH7rhkRfEdfEfnYKzRuzFmUSllrTYirMIhaDksr8mA0vrbG6w2C176hjSzfeSRD/GZX/wlNvsdSsVpOXnsLI5ZZW9jB9M8zHyklOj3OLNqfs/ybJ5FufTGJR596iF6E4MbN+XB/uKFBWbTlEhlXpLUpup4MvusBCamaRW3brPX3aZUPJ58KucvodZMTdOkqIXG/ICiaw6iiNHQEYrva5pSlfdQPawoSiqVgLIs0A41sjGU0q9AV1ytRtsnSWeYKrKsaSVJlFOtNonUfJOKKa1Wi0IUREoyGqAsBXUlQjGdJIovkqMfbh6zEtfziCY5I2XZ4toWZSmwFD9n9UiLQX+Ehjm/d57n0iRZFPesLYQUj2qo79CPE0qzimvlvPBVOQc98v4f56M/dY4//N3fJ1AR/LyIiMPwkCbNxt1dhKaj2Sm94SEnyaHIdDz38F26FAVS7EsdEHzfp7Lg41gZP/ZpqfZ46eV1dO8yin5ISUCaW1iuxX5HfvfBNMFzXcLJiKI8tCqw6HdCCpVdTPOESt3l7MM+YUe2UyQiLKvNqF/iOEpmPQ0wrBlmGahvnmI5JiMBkRIROXXqFKNim7UTLpt35PfzXI1cjPENeRjRc418EjExHJZqcj5t10p23Cl6IjjYOlDfYRW/NiLw5ZryxktXMYoZQcWj21dG7ZrN0ZWAgy2NcCr70I98/CNceunziEOBkP0ILxDY/hSUUNQk3KYgp+4vYCzJcbu708N1PboHst5HFitUqg7jZDBXhMyyjMDy0IsU4zDwZXm0tWXSbfn/Txx/kP7GHgeTEr+rBGxGU3RMAj/G9pTcfdVFpAl+RaGKHIsk61GWPsZhIiRLochA1+cBCM/xcXWLoeqvk8lMcvQMC0sJi+mWQ1KkDMJ1SiWzfeaiR9jNKSdKTTeZUHE88iyl4spvM4771FqVudhZkWQYhoteVOmp79nya0wqAdMoIVb7qTCdMhvrDHsywKWbYyp2jTRPGGzJb9V8YIH3PPkAz379OUzk86qtFqZzlaZa/5PUpNmakgxsPEMe1CZJBKZGahRUbDnfbG6XrDYtyvxQv6HBwnKFwbCLb9bV+wmicU697uGoIPqRxTUm7oSgKus06i+yceM2t9d71BpynXnvh06TipTXX7xNpvZKtXpOXx3Mv1e5z7m6X+6X++V+uV/ul/vlfrlf7pf75X75T1C+LzJXZVmAgn1ojSpxrlFqOro6KluVgHQYodsOK8syyliEU4zCpTxM0Rc5egmmY2OoiEKBQBc6jmMrZR3Icwlh0PVDjpBJWSQYho6lYHImNo2KgRA5r78qM0mrnTVuXOrhKYnjDz/zKC98+xa7G7fmEeEEQb1pY4s6xoKMWCw9aLN/ENNSyZ1BPEZ3wXIqMvoAMtqnyajbaCAjQvV6g0k0ol05lJqvUxYaUdhDU3VPkxRR5hSHRiLIrJVhGBhKmjkTKaZpzGF7AFppEARVpgN5Um+0KqR5TDxJOChkhDaMEobDCUG1Sb0iT/29aQff9+ey9pmWU2olvleZe5uASZqm6LqOqbJnsySh4TWoKF+fNNdIUvls1zk0O3bpH0wRRcLXvvCn7+gff8CfzP/72HGLMB1SqOiIVQo6WxlrZz00pXSUJgUin1GoqOrxBxaYzPYocCGX7XDtKzd46rPnuHDxOPGOvNfezQPGWZ9YRaTlN5XfcUeZMs4eaGOYNo0THgOVkhd5TpgPeP6PlQRp+DV+//98jicefjfnz0v5VJlBjAkqNoOhvFehZ+gmTBU8xNWaGOWAT73nXVzZlBmTab5Hw1xgsCuoHWY6A484T/jOSxLOZJgLGI7O3p7AVBnaquMwm2XUnQD7UIloKkgmCc26zMaWxpTh3gzLMTAs+S55keI6LuNhQolMpduOYNQ32HWUOqHVpNRHrN8ZzXkRR4/rdLod+pt1Gk2ZTer0d3n6449hBjKqud0ZUGoRg/E2j7/vJADrNzs0ax5GKfv5/naHdqsK5hiUEW8pKlQCnWSgkytoW2bpNHyHTMlIlU5AzXZJdQ1dRfhPntXJaj4vXZ0QqqifuyR5gYarMnyZi186GIsWTQVnuLMPZxZXCGiSh9cBuP5dnai4w9KSfJfpKMLyDBzjBEIpJlXthF5ZMCOlcCQUcv16B7O+T3tZ9jurB8NqQhCcYLsreQS93dcxjAzHNhgrjzet5dE6/yAHW/L55BmdgzFeEGBo+2ra2AAAIABJREFUCp5lJmjCYmHtyFxSef2Nu2gi59Of+BgAl95+lc2piZvmJJ5sq2FvSNEfsLpYcvbdEvaYtwS9u4LH36XUrpK7bOxcI2uaHFV2ELOD77C/8yqNRoo5UuNhWJIaAT/yo08AcPnqJhtv7hHPZoR9+U0fPn+Gb335VU4dX57Pw5evbvPsK/8CW43/Dzz4BOu7Y5ZWVxir6O/TH3yay5ffotKoc3AgI9d+YTAcCz772Z8E4MbNy3z729/hu69fJ1FeNOODkP/jN36Ln/ix/4LasnzeD6w0sKsZl5XZ+pmzD/Gv/vmvc/vaLidPyQziZ/6zXyIISv6nX/sHss21UkJMcgtdP7S20JklIWVZkKqM86PnnsLeuE2uZPQXl3w6/QH1uo+t4K67vYSgbVNvVDELmbmKkuv04xxTJV5uXbvJ0pGjdGd7PPKYbM/v3nid2nKNg/1dhIKtFUWOZbtzDoRlWYiiwDJ18kJ55nW75EnGdLLP9q7M3tmOhuMEFIoPXKlY5Dl4FYtKQ0auk9ii1CzSqcWZ4zKDMOi/RZw2MRU8rNFy2NvtoesWuYImRhFUaiZJLuZQurIs0FSbySJAkybKhYIvWoZJGCXkhcD1Dv+uRDcNctW+eo7MgBHPrxGlIE6mjIeDOWQzFwmaVnLsmBx7o+GEyXRAveGSqsx496CL0ZMTYqRUIRPLpNk0OXZ8RV3TZzKdYeoFhbL8cG0HkefkqeAwgaDrOtV6E0s+nsWmSa93wKm1E+yMJFf6yu9obN+JqVSh35Nr+3t/qEnVX+P5r11T/Qyai3VsL2VfmdVrhsAxPEZ9CaMNE5nt0zTm8vcYgiRLsEyX3//dF2TfEDG+3+LoCcVp7bm85wOnWb9xmWBBzadphcG+wDQ8IuWdJDKTpYUGE2Xzsjc4YHGtyqDnk0zVvdIRVifGr6YYSCSHY2cs1ddIlCx5GEVUKwazaDz3aspGHkfaj6AvJbiORAf0e0NW2uc4viLvs9A8xmjW5djZZWbKrPrV164xDQWu0DDUVjXTU37+b/4wWzdlu7SrR/jW1y4z6Vk8ck5ms9wgZW8z5ea1IbYl6/DvPvcNLEujvSTXpoODHvFAZ8FxSBUUkkKqTe5u7ksPSMBzTUDQUJD4SW7QCko8fKJMwV8tnVQHp2qghcroN3TYClPOH5X/P9uaMCxjnEqVXA0H3XOwyhzTqM+5i7NhjmX6dBXn0rXraIZDqWfU3UOaTInjOGimhuPKbxNHCaPJFO3QyzDXKDWBZjrz7GtCRKXisHBshSOLEkkxinfY1ja4fSD7Qb1S49RDZ9kf7rBxWe5Bjh1vEUeCaKoUBjWdWRSimQ5rxx5U7WRw0J8xmUzodtRe3tDwAp2aypLlZZVhNMUzHPJMfr+33hxwsFejWW+jK737eJqyVC3Z3ZHjxbOXcUWTUk9AcfQsU2NtMeXOtRy/Leeli4+ukIoYQ5mDO9UQw24zDqFSVTZAYYdJH3Zyi6VlmZXKtBLX0dleN1UfDtnby1leXmY0lGPmuT+7QXuhgl/RMGryfeLkLz8+fV8crihL0kx28LITMeoMydFpL8qB4AQ6RsMm7AjMQ/hCOCUoY1xTTjSzJEEzXUrdIFHCBoapYVkWRZHd833SDTRNmx8GsjzH0CFOc0wlja4VJZopKITA0OWHS/ozckfQV7hzDBecOk+8/yL9fWUYvJMznk2oOjGPPSU77/bgNlEccqwuJ5EDdAwtZ5TMpBAFQKkjtJiVo8epKBL47s4BetWi3VYdrphw5/Y+i/4SkYIvmo5BGhnoMIde6LqOppfzReEQulGU+bwNHDsnL7S5qbBfDxB5wmxW4Kn0NIVOEiU4+oyxEm4oNIjzEk1h4z3Xp9ANsiyaE2OzLENkOY7nziEj1SAgiSIsdcCrV5cQhceI2ZwrsdBe5uHHF7n03I15t6gtVFiu1vjsz0je0t/9B7/O5kbG0RMaUw4XdTW5xCGa8vGwTZ+UFKFIzGE0xBR1SG12v7s+v/+tq/uYjk2opK2nO/cOVQBW05V8AMcm2pfp79GNHscePc2gn2MqovaZR5q8+dJtPv6p9wPw5nfv0nl9A+N8nyKXHL08L2gsNtnrb1Mo0nk0m3D85EnqDdnP97fuYAU6RZHT8iR8avdgTLViYDQTskTJkNd0iqyGLc+87O5sU/PruH6BrQISpVsQdz3CaESG8t/Kc1w9wPLkpOVXXLauCFbPCylpD5TCxXEEWZIx2DmUxPXw7BzrEFqb5KQ5OHqNNFOO97OCNAnIgW5f8kUeuGhy5smEcCShWLf3tqnXDZq1Out35HfvTSY0a20cU/apj33io3zrm69h2lW8inKNn2SYpY6/7NJUggs7/SFVX8OQczR5mhG0XO7uWWiB7GfdfpdZWaf0TWoVha8exNSaLnkpJ9vAisiRZP24IzfwlBbmaJ1EtNFOy2+zOnaJ0wGbyr8ms2yMoiAtR6BI2mFWgnDJ3Iy3OlIWeK1mYfevsl7IOtlFi0FcBaeEibzGtw2c0iJ3QoQyCLbEjM29ETMlke/gUJoWBRp2oHZzswnOgkkax3SUsbdrwKmzi6w9IOtpume4+yfrCGuKoea3JM849nCFxbWYqbJLiO6M2XvrK/RGKqDlnUVnif2tO5w4K/uG//BPkR0MePqpgCKQbXXtxm00rcmXvirfpbGi8dRPXOSNG9ucXZSH+N6NAxaNgvWNIR89I9vqKzdvopklDzTl+NiNE9549XVef/3l+aL3n//VX+L2zXUMDRbVpmg42ieouTz3/JflvbsDLj65wtbtMcsq8OZ5Ds9/6wr/7Dd+lE9+7GcAOHZ0AaeasLsrxSRGBxFuaHLhwgW0hhzHW1sbvPrSm/iq/wiREIcGtlOQKvsEXTOxXYEpKpSK29tuHWe/OyBU3JswGXJhJeDNzclcjGeWjEEriKIZWSrrcPxkQDjLyZVZ7nj0Flp4h0ef/gilUJYRy0cZ73c5ev4ot96QBO8g8Ck1sBW0LgoTKZ+slfO/a1RsvvvCmxw7ucqFM8oXLUk4GE9xFQeqyHNyzUK3TEy1mUyzjPHsgP2OwVEFjXrw0Qtcfusu46Ec/ydOrdLvd0miDF1xPCzLYjqZYts2QvlkaFqJ49hzvpwfmDiOxXiczA3eJ90popBGw57imXgVmyQpiVL5d8msRGPGbBbiB7IfnH/4MW7fvUIUT0hTdVArHZIsZkuZ6DXqLdAs9jt9WkoMqFJpEY2HGIZGQ3lKxeEUIUr297qqT01x7CqFSOcOXUkcUxZAqclgMBJiOBwO0ZT1RKMZkKY5d25tsbws1/tnnnmCZ8WzRFGNMx+T4+jTv/gLGPkRXnjpf5RtPogY9EPOX1iGRPa94aSDrofzQGar4pOJFN3IOPSFzVOH1aMV9vdnRCO5dpmeTiF0wp5cP0qz5OrlSyRJwhNPyCDi689PyQvJN89iBRnTNYajLpUFOT5PVs5gVqYMeyNGXTXn5RpaEeNjce2ynOOXV8+iNQ1SNY/MZiV5HhMEAWfOPgzAaNqnGGUsLVR55KEH5HXTFLJV1k7Ig7AQgnrlCIPhlL2+FGo6ej5gd99h0uvzgY9Ibs9ev8tLf9ZlciDHVbXt8cAHL5JpA2pK7OTNr+5xsDHGsiwcP1b9GtaW1wjVQcpxdWah4GBHpy67Bqalk8UJgV9nOJDtaVs6eRlRVyIGO4MeZZ7j2D6G6gdlqZPmoNsulYYSJJrGxNOMoi8DFIFzgnh8nScvXGA0kDLke6MdCl1gBz4jJYlvWRqiSDm0eHTtgjzXMbAQKpHg+C7DgxG2oc+hiWmeUVlssKCMd7e3Ori+jWGYjEZD9S4mFjadzRkbN6WJ8MFOj2bTJFB8wHg44871LUJ3zEPn5fx98vFHSaMpz375W/Ld0Ck0h1Z1kb4KRAf1DH1pRqUYMNuT/dwybLIsndND0iShErgYmkYey98Cu8p0MkMzHFxll9DraHidNsvLsg9Hk5TAd3G8jN39jvpNo5wVGFObIw/INhjEKXlYnUv5T/sT2t4SU2uD7U25UTh37mE2xBa1RoDhyb3L5t0Op049QH8g+7TIM/yGy2AvwtDlPmW57bBzd4Bb81l7QK5PcV+uuf+x8n1xuNItE6tQPKlU0F5ZoLXocuYx6XY9ClOuvb6O3YRMKe4sHztK015kd11ulqsVhzAu0XVjHk3TNEseprgXbSoLKHWBqRY0wwS9MEiEQKiBV1loMM5S8qTEcZQDfSHQspBcTSI3bm2jFwIzX+ax938CgIvRFLMo2B/doN+TC4MRnWWhNuW6EoXw222mvW1sp0RX5jSFyMGAbrdLrrD2CBsDk4X6SQB+/Gc+zOf+4PN864uvs9iQu+okn2AZ0gGlUPIRpmFKfLhSOTIMA92QOP3Ddy7JEMJEaJZ6NwdK8D2dOFbkwNykXmkQuA6jsYxSp3mGaZpzTkKSCqxqFZKSUnmS1Ks1hkKaEjoKVH4oqjEeymtE1ueIMpY7zKhZjmA47L+jX9i1gH6SsL679Y7fHeGi9gLEpICgzHRKdZiK8gjDdIk3ZJtv8r2LnruMJiOOrMi6THnn4cpxbdA0bMck+nO/b1669Y7rehxw4cMPI5bkAD7+yAohEW6zxm/+2v8sL/q1712H29wB7rzjt+kDGzQr6iDqLRImKe0jC2yvS/6NFqfsvjSZX3/ifcvcfWEfcWGGp9zmq7UafiUhGQeUpbzW9SAcGvzge+S4evONTdAtDOqUhpyATVuDXCeelDxxUUalCpFTCIPOUBLHZ9EY09KZTcY4vtyg7N+xqAQOjh3jCynK4Am49PId6styDNUDl/E4ZNQtyA9NL02HPEyZjZUfyFLMcttn0J9QMeQGpbYQM5rt0ly1SBVHr1FZZDDYxFXXREnINJxxdtWjqw5cFOcIszHBCZ98og6PtT5JmjPuqfssFxhhDKWFpvD/RRZhLJ5ATxJeeVVOro+er7HfWGSaKj8u06FFSZrMGKu+X6l5pOMULS2JVCTSX2ziktCfyL6lNR3S3oQiKllSJqa7RYprefRTn7NLkmP5yQ8d55svX+fbl9Xf6S6ak4Oeo2ty0fEcm7Cc0aj4/LWffjcAd4e3Ea6NZcu5rN/dJ9ZGtEwfQ2WOy2jCG6/d5amgRawi1be/ssve9pRpU24i+r11psMBH/uBH+Dlb0nvq2OPvQu/6ZLZNXpdOSYnWUpqjTB1OUfcfjNA0wtabQ+hNoa3RyMWTp6jMt1nM5GL+IMXz2AMdd79pKz3qNjgve87j+dEdHtyc/zHf/R7HFteZf3Ozbkkm1naeL7HdCqfN572eeuNKY2mw7FjMgP18ouX2Nu/wmvf/VOEOvz/0i9+iu8+/yp+XW52drtvsbPfZZimLCzK7759dYOrb69z9ozc8PUmA9A8hNAoFDFeNzXKzABTQyj/qM/94edwdR1dmeBGScqHHn+YZ856tOpyLLx97Rt09hPGcUpbRVrPnXo33/r6d7Cq8ls12ivUgwDd9Ni4LrPEsyJmsXaGp957kWdNmQ158fnnWWw3iZTojWFqZHmCaZr4alc2SyMMNNYvb3PxfTIjs7TcptaocKB4hINpiiZCMpEymsk+bAUG1ZYgWDAIlcn97n5Cc0Gbe0w6niCo6mQp84h7LiRnSopTqHUliylI5/5YnueR5xLVkKgAneM4ZHnM0soqpqOyUgi8wKVSV8I0Vkr3YBdN0zh7WvJxl9sn2dndRKfPyqIcD3GYkI3juW/ZZDJEMwWeb1GojF5exBi6yWyaYJtyQ9tq15hOQmLF56IooZBB2EIFYTV08jxDlAWFWnwWlpfY3d9nUZlQj/YmuJbNYm2RW29LnkmRrXHhA8fY2brKrbfk2Nq6ofPKC88xGch7N1oCUUZsbm7SqBzO3wGzUCDUfiCKQqp1HddjfuCKpoLZNOHISZ+da3KeuPDEOXoHEVs35RjymiaTUYrjFbzwp/JetqvRbFZJs6lUggNEXKMSFAz35c2DVpciE7i2Q3Dy0CQ5Ym/dJtcMbF+2QW1RMAz71Gpyw9kbdpn1bBaXHNZvyMzc2gMrwIBbe3fJVNb0wfOPUJQDXnxbBqZ9V6O/E1JzdBrH5Pd0/RrPPN1gd7TD+Q89LvvC8y9TD0ISxYEqwpBo0MKsVfnal6Rq83A9wazZ5HFOM5CHHcc10awCTch2Wl6tgRFzZ33KZKA8QrWc06ePMuqFjJUYT7UeYNjeXHhjcdFCS6sYtoau9kpGWhAnBUlkoBuKg7RSJc9zsi055+64W7gzgVfo8wB21i+o+BUG4wGKEkgqBIZuQynHkGmXpGmBoQt0U46H0biP41gUWY6mtvDhNOFv/e2/wfaWDHL95j/9LexKm+moj62yPVma4wibW6+NmSgjaqcwCYSFqNqqzW26Wx2SCOJIBcI+9x1MbYJVykqankBzTSbJAE2JXqWOIBuXHL1QxX9Aeu3FnYTXX3mNUikBl1ZJOEkoSpPWgkpkCAPXaSMKSFTQyTYqtOsNjpySY+Ht1w7Y2dkhaGg0FxQCxc/YX7dorVSJFfoBXyMTGYce25V2iWYPOX/mKTY6UiV2v9/hzCNV1m9uECnV7VyPuXrjFseOy3n/7sablEMfco2jSozL9VPax6r0+0Nc5W9VNeqMv99NhEtR4i7JDnD+kVMEjSpJoTPqyYpffekaRgGrJ9YY9uSi41UbaERzBSXD0HAqLoaIQW3csqzEMHJ0XacoDk12C4oix1bmh7lISVKBbpSI7J7EqaP7iO4YU00+XmWRaXhA4MoP0q7VyJOUg/0pr7wuF6t2tUB3C1x7lVZD1qs7yLh7e5/26iFxE4RwSPSCREVDHcvBxMRAny+YCA0j13nhJRnFWb95F60o0fV7whsUDoaWYxkJcXaYmdMwTQNTHWyKosAwtHsZKUAvTHq79zrFXeSAbJ5axXCUUp4B03FGrhfM9u4dOkKVMQJonQyIZxM8050LaGxevneU0Vflb9Pd8Tu+t7NmsXlHbtSbLTnIwkk8j4wGxw9hD/sYgcvXX5JE9eNHm2xsD9A0DV1F9GueQR9BKXQcJYUuxJTxzsH8eZUzVabrE4yjLmL73ntPJxNsU2fafeeh6rBYjkmh5fRv/+URCoDLX38b8UlJKr728rN88Ece4wu/9Tl+6r/8rwD4sy98kTDaJVMHzL+s9G4MyJUcre/Y7N7sAffq8P+t7UyR7X3fp1DfZzIdYVsOZkXDVUTxaGrTWNEJp7LNB7uCSiNlZ7NLrSH7R7PlYZg5/kKdulKuOnl6FcdpoKks7r/+N7+JZhQ0GgWJSo2LuMlgtkM1aFFbUMIlhs6DZ97Nc89LyMqxtSXGo5gsN3BUP5tMMvwA3Jqs0xtvvUZ7MSAvLLa25Tc8dd4gHxpcfX2Kq6BteTnCKAwE8hrNNjC0I5w63aK4ppTjHCgnLdJkBLG87mAroVE1qClRlul2n4ZTAU/D9lSDCp1h5HP29CM8cPAF+Zt2AJHByceVdPH1lGnfYTbTOb6m5HAnUwzXx9RKNBXg2Sl0uoVOrqJy9SMVFiKX8f4OAxX1dxttBuMpo9ziek8uzkbrEWorHisDWandzgFFWCLElMlMZjDyosQ0Kgx6CV++LuegVdfgkw+eYqMr67n27vdwYfRNrnz3bSp1lQmY2qzf3aa+ZnDmhIxOmrW7tE5U6WyqnVt/wM//wo/zkY//EFf/exktXKhVyZDGo9OxkjR2faJBga7JhTAfpViOTjjxeWUq62TUW3RmY/TWAnlDtnutbvFLP/ur/OvP/S4A12+9yrue/DFE1mM6k88TaYHnR9TbApRM93QGUdJnOpPPK0UN204QqcG3n1ORVT3nS1/8En/9v/45Bj15r3/3xT8iOFKnMZbfoVL1qWg+ywsL7Fw+3Di5HPvYu0Acmrv2sD2dNHVQywciB9NwiPIQW8F0agWsNhY5flZmiD76iZ/D9Oo0g4CXX5bQ3Y98/ON8/vOfp92ucrAtx/If/dGXOHp0gVW1+TgYaXjNNkZg02jI7OTFs4+yeesO0Szmx3/yrwKwtbnPsLc3l1guy5K80CjLco7IcGyfUIzw6iavvXQHgJNnPBaPGgxnso81Gg2sUKPWXmA8VGarVYf9fkwhagyUGI9mhDz68BNsK9Py/f0DJdOcUhZqrtZydP2e8TvIQ4AGcxSF7ZiUFFKAQwmU5FmKrrnopkOuhDAKNKIoxfMUfC+eIUSJ45hcUnLU2/sbiHKqkCiyHXTDlsJOyny8Wg/IsoxGrT631xBZga7Z2JZGpaaEqJIETTPRlWlqqafkahN+qCCoaTqFVlLqJpWqnKvqzRqdwQGdVFkjWDq6Dr1wHyuQe5Cd299m6zaYVRtPzeG//U//GYNuSDVQUvNWg8EsxQsgK+W90tJAlBpCZWOarRajUR/HruMrw96tYRfDNSmKlP0dNb80CvTSm0PBLMcn8B0GvQShAsOtVg1RFMwmzFXwDHdMUdjMRnJcx5mLF9g4jkagDrmGlWD5E0zdp1Dtc9CxOHPuKBVl6nvzhkWznTMa9glUoK+zsUuSmggz5eGnZPCoulYnSvqcXpBrStU3iMiZDj1ELLMvN67uY0YRlcUqX/z95wFYq50hDVNOHVciScsuByKivRpyTgVFXr97G00U1BdcDP9QBEawsrJCpyP74vWr6xw75bO8WmE2Ooy8GwxHU7y24H0XTgIwHscMpyNWjsqDRhJqbN0csdxaRjMVWijOSPIEE51QQf6blkdrzQdl09EaBzy69AEOugMSpZTZXDxCmUyoVHxSZc9SajalsLFt+d2HkymLSy2SdEoSqm9q6RR5iW6ZaCrg4fsOt66v850X5RyIYRCGEUKUxCqbvbTcZuFIwOD6EN9X71NO2O12eOziXwHg4rvWePu5F3jzyjW6ql8/84mLbFy9xvZNJbEe5VRqFvFoxv41hZioB6TlSW6NDnj3u6XoRLtVsLl1i8lEBm414XHkSIPF09DZlQGl3t0ZhuHTGyS0VVZ6Oi3Y2bqB1jgr631smSypkyR3GXQO928WjZaNIOKgL79fsKrj1SKSWPa71E55e/M1Lp7/GE8/LdEzN25pvPr8AZrVI5wpY/qxQbWic+u62ktoHq2KSTjS5nvSXBdEacH58w+wfVcGBKJeHVSW/HuV+4IW98v9cr/cL/fL/XK/3C/3y/1yv9wv/wnK90XmyvUMnv6w9FdZ3+lx49JtrJlg68YdAOI4o9pYYHb3DraCv90Zb3NqrU29JiP8w0kHYQncIqNQWZs0LyiEkiVXhHlNEwiRzyVrDd1G0yIKMnSFmZ+FMbVTxykSgTaREQXfgrzWoIjuQd0yt4KnTQn78mRupDa1lVVmg7t0t1Q0u9OhuSjQFbBsGgpyt8AOXXRlepcLgV7YkBQ0WzKSM5tmmLqOqbIAu5u7kGZU6z4z5aFlAbauI3AwFGfFsg0cx5/D7QaDHlDguDazmYo27cj3bB+XkYJZPiXeSRlPBnOPktXlNitHqtx4eZOlR2XKPxmkmKVFb0u+b//OjOqROlkxN9l6RznMWDWOtRhu3oP8WVY+T3vn8aG+Z0CrqdEj5JnHn5HvHHXp7U9xM3nNfiyjJOub4Txb4JYyQ2BbBtOJimoYhw5bsij7GRwcwj+fxtUFhm0Tpd87tRsmU5Ld9Hv+2/cq1557EYDAzfnmb0ufIUcFdtNs9heyViuPH8WIwZ0pXgY5ZZKx34lIFZ53NCswF30822ayLdu9udZgoL4BQKpgaYPhDF9lVnWjQHMStl/5i5GVLSRfpX18AcoKvp0hFOdxMhQsLy0wmUy5ckVmNC9dWsd0bLZvKnGFf/i93r7H2oPL2MUquzMJiTn51Fn+7f/2pT/33Ht1VhRiasfqlIk5l4KuVeoM91N03cBW0sjp1COoVkmiHnZFfndDaASmj6aycqk2xvZsXn2zxxNKoOBW5zKD7h5+XmGsIuy1JY081DBd5QMXuMwqFmEvplmX98otl9Fkm+duHeAEsm3joY31UBtLcRTqPmRFQo5DXMq/M0uLcBwx1ROaS7LPxlnJxLI4d/IkAHef76NbGrrbYDdW+MW4pLRsTNciVx4d//j/+ipaPEJRzMj1EK9ZI8l0TMWvjMIIc5whGgaDgRz/UzPk+l4d05B8h698/SV++MOLDDsBvaGM3us2TDZ2qX3kLPvbEt7nPvgQjaUh77mozJbzMZdHE3566Swf+aEPy/bsddDSnJZTY0/Jz8e6IO6PYUG2r43LYNih6iSsuDLC350OEXGGwGaqMo/17DxVbY+GLft+LQu58a0/YFev4FXkeJxOBOMkB2M297Wp1pZJxJBcwcpKQgLbZ9gfcfashIj2OkMee3iVC+cew5jKPre9c4VR4NMNZXRyY6uLQZum18JYlBm28dii6q7xyityHK8sL7LfHVES4bjKgD3JgQhLM9DVvJSWgj5jkrsys7P3O3/I3/x7f5/17auce0xCa3/vt/8Nz3zwo6ydW+GlV6Q0+r/4az/L5373X/LiNyX/YeU4/PRf+RQ7syE3rsg6reHz2LueorV6nAUlMf4zn/kFfv0f/V1Wj8jMspYmFFpKHCUUSjDEKQtc0yKOHBrKYLa3fZX9DYsL75J9I9JicEE4MFay5Mu+TWBWSKYZnX3Zdk7N4U/++Fs8/KjMOkyGOmkivalyBfM0jAQ0ARhzFAOaRiHKOffWtm1yMUPTtDn/N45jbD8gz1Ny5DedzMb4QZ3hWI6PYW9As9kgjoY88KDk6PVHXUaDIVph0usoEQhbo9Ko4ym/ymrNZTKekaYZywuyv3a7W5SmiePlpKnse0oThyJTmwLNIiejKJlDIXXT4uTxk8yikDiT/fPO9iambeEouL1bg+lY4Os2toLgikpCFJXSFF5ld5/54AO8/MoL5BO5tiS5gWkVdPcSKr5sz/qKRSFSSldB8IsZ9abNcDgiV2nU4yerREKQTU3csYuFAAAgAElEQVQeelLCs9bfukaemTRXlBdlmDKe5VTrHl5VPm/QGxFUdCoVn1yTi6PjmhgaVHyFgPEsJpOYScchmso6BPUAr1qQTSwMXc7D03HG1bd35jL6rZbB8nKdzdtDMrWmZLlGEnpEcU53QQnhjF+nuXiODMlza68t8J1/v0vz3CJbb8t1Ru8KgraG5y6xvCIzLZpzQBhpdDuyb16+lpDnNmf7q4z2FUzPBM0JCIIaT/+g3B9efmOL/f197tyWiBnLdDnY0am1LCYj9XdWSTZLWfRbjFPlYbWs4Swd4c4t2cfOPVKhGY8YjXuIVCFtbAfD9yiSAi+VY3Rzr8vuqMap0xIWfNRvsuNN2Jp0qNblpB6OBmDqeF6TseLajqdyPyx0OVdbro/Qp9TabXJHXiNmMcIQhFFCobDXge/x7Nf+jNFIjpnldgshBGGWU6q9b6Vd4fiDNbZH28w2FKes0cTJHFKFfDp2/n0cXD2gvPQ2mqL2Hjn1QR575P38k7/36wCcOr/GbJyAVtA9kHNeOU5ZOPcE9OsQyXkp84a0Th0nvCLbN50kFHnCe5/6MVxlMfDP/+G/5OCgz6/+t/8dvsLk/IN//K9IxqtUFTKgWXVIp1WuXtunuyPHXms1RzNS6pWAbCbH2tbNfVrVlKAq5xvDaVA6Ll978evoU7mHdSo2y0smw6GLrzZnJSu865EHuXFDZke7nZy+roEToyXy7/ykwvHjBlt7O0SJfN50vov53uX74nAlNI3nvy3xspPuhOmsT93z4RAf22qQiJCVxSNzyIFbagxGE6yZItRmJlmUMjNtilh5BHgTylIjyzQstYhrpYGhu2S5bBgt1cgoJC5VwQLHnV20ioXjWcxGSj2n0wUtZ1mJD6STMZYfQKERKKK/oaVMt7oEfhM3kM978MIZ9vo72I48NB09PuLuW3toARxaxFumQVGWCKGRjJQHwiyhVnUpEwVL9G1Kx4SixFNEZiEEOQWGIdA0RZi3PEaD4VzFTddNkjQlzRJ6e/cOHZW2jagrCInlEpMiujH+SeXQvhNy7CHFo/IlZr9wMmazGccuykVi860dJjv/cYfq+opKzxbvBLL1bt+DCRaafEajEZCrCe3Ln//6O65fOKE4Ub17h6CNrXceiPZ2723ca0tyVmgelwvOQJH9w+131jVeT4jVgv69Svn/M7HbPib7hp1ozNjnk5/+acwTsi6JUuNrPtBkcENOgOV0zMmjayyN5cJRtnKSesIX2SCa3ftW+UFIsigImnLSym3Bny/j67J9Gw2DMJT/trzU4trXNtQzlQdabGPo0Lsr27+30aW2ukzBDJQwTGTlHMQ7NOurjHtyjPiez+3r1+fPWzq+xMLSGlE85PZb9/hnWumju1PMPdmvv/hbEqLwsc+8F4Dn/+Qt6icrbL9978BnGy6WF+PqcrLVNZdx2kHzC2aHIk7dhPayTtA2MRRcIold9vvDuUHpyRPHmM4SihS+8+Y35PuWGrpjoGkJS2pBS7SIQRYilFKYZXroqYlnCCx14I+jCbFt0evE+J4yhbR0Wl2dOx3Zl6J+RsOuUfVsZqEMHFSsClYVJuGU/5e9N42z47zrfL+1V5399Om91WpJrd22LNmyHcdL9oTsK5CQEO6QgUxgYBiWywzDnZkLM8AEmM+9A4FAJoEMJEBIJiH5JGTDdmLHi2zZsiRrl3pT791nP6f2qvviec6RFMmGF7zI/Xz0vJFOneo6VU89y3/7/X4awrFIgiaVcpnmmjBQ9ty5h9ZCncXqKrbEA7huDTsZxDACkKXCrzh4G75ZJ5LVraPdZTpuk4tLm/jSgK7kcjR9F1uHbXkxVk9ttPjSkUvs3CLG2z0HR7nl5SOUsxpf+vxpAGYuzNDsRBRyI+w6JMoCTx87xaR1mM/8+acAuO1t9zGVi9mstygPiD5wFleotVUY8LBkuXS32Sa0NQjEujG6RSdezfHWt+1k/qQwZJa/G7Lj9hHOn15iaVHMt/2DoOoT3L1XjI1t40W+8u2nyag+nivew7CTkMQh9cgkL8sJs9kcbuyzTeLjBscyrK+e5a5XHOCVd4sAXW2twcc/+3ecePo4eyUoe8voFGXTxZbUblZ6kWxsU8m67NgqNML82nkePnqB+6TO3t989mECW2FsKMKRv7fQAM20Mc02msSeOpqB3w5QZADNCuc58b0nGNy/i6U5MT+efvQR6rUab37Pj1KU9adrK12++w/Pky+L6xQzFQ4/8Gb+9JO/x5At5uz44ADOSAbVzHLivCgH9SO47/7X8PgRgSMYGCiQdF0yptPXr2l1PGzbxrTiviOja6L0bu6cJFuZmmLJa+CYG+TzwvkI/QA1aXPyhYtIqBTJZpuuG7MuS3J6ovRC20pqC+pGnyCJnny8ooOa9Emo0kQhDFSSJASJgTIMAz3VUKKEdkusz9t3TOF2I5Y2pGC4AYHnMjg4hW2LvaighIRhSqfRpeOK+yo7NnHcQpWsvyvLbfIFB1VTWavKcls9g65Bp5n2bkHoVyUJiSQoUNEBnTRJ+vqR3ZbHwuwciR6QyCBsJpND1xw6snRQ02yKhQjCGFcGQBNNxczrFMs5GnPCsTg/s849D76OE8eE9qWiKJRMFV3dxC6LPVNxfXQUNF38frORoNgOqhKQy4v9otPV6TQ3GNqxg9qmuLaimlhmglsT9xhGEZajoBgunYbol1IxR5RU8boRiip+z4stkjSk7UrSqyRBT0xiPcGTTqDXAkXRyWYN4p7Au63SabsMD4v30nVD2k2bONWxTLGmp46Hmmljdk3KGWGsFioJC6vz5KQj/MgXT6BhMPfkMiMTYo6OHhAjqbGmoEm8kW3ZKKECtsTsFSw2VpvMnl1AM8W97zxQxk8Cwo7PwmWxBq2seXRryxQKoj8brTppqNNZbYEcw37HpLi1TAq88B3hNAzvtGmHVcpyPV85W6O0v4x1LqBxRmpaTeZR4haabRLIsVBULJqtJjNnxSDrjMVkojWmJoYxN0Sfd5cCjLIJahNV2odOElAs5GlLkhTF8jAzCoHTxpZ7WOeSj+pqQIIinSuDLputEDURfdD0NqgUSwSKhaX0rmXx6EMXuOPwy7APyvVs7ghtu8LgsLCxwk6HF44cQ1VUEln6/dj//jqHXnMLiiybHRvbw4x7hlD1sC1p+wYGca0BUYIuS+5rHZP8wDbueVAwJqf47D5wAN3KUi4JG3bL7mHWZ2J+6P538Sv/7sMA6HrKa1/xKtRJSTCzmDI1eDulskKqfFn0uTlKYnRxfZWixG8ZSQEv6OJG4nO4HKEbPoOFAZzhHmGXT9PVGB8aYWZGOqLDPieOPochhiu5go3nutx26246vrAru5HO0K4RZpdX0CJhV6rp/w8ILeIwQUnEjQ6UDWxtjGa9TaKIwbTZ8dm5fzdTO7cxMyMiel69hduqkUrqYkdJCBJQo6SfuAgDFdvRUJS4T8VKGmOaaj/SEqkhpCmKkhLJwaPrKqnrEmkJqsRvpTHEQcpCU1g7E9MjJJFCPlvElExrtqFD6LEyt8CgFEVOTZXBgck+EYfXdNEUVWS+ZMJHVVUM3aDtBTQlne/wyASbGzWyPdqYJCWJIpIkhaTHUpP22YyQxk4UhQKTJOvQc7kSa2trRFclcwYHbJzRLB0Zzfe9iOJ0gcbFJo4tNtk02aS9kjCwrcD52dMv+f7e8N77SSUj3Tf/+qn+8aldImpz/FFBOlKQdcthHGE5Weoz6308U5UXH6gbc6LPSxMD1BerL3perzXXxEZam//HsVIv1Ww7Q/ASgMXvb5snBL4ju1VsDmrW5i9+//fFl/KfnmMFsHqhxSpX+vYDb7sb8/L1z3fPj+/ldGmZTCIWm1rLvuHv+zUdJyvGQc+xAuisi8VGVzVU/dosY0Qdw9BRezA3w0AhpON1yOTFRthYvzZ7l8latFsurU6dHQeE8Xrp+AK1TY9cNqYiF+oNWY981+2inttOS6x3llm8qk45TVMMfZCmZOHLltvsuGWUuZnLFGTaJuvA/NIimcwIyLpzTYsZHi/SbkkWNyOgPDhEbWaWwBOzYmi0zMj4EHtumeLpZ78HQLeRkjVUAkkFnc0YJIkCYR63LbPS2KhqSi6bJU3EBjo1tZOGt8r0bWJ+qGnE0oUaq5cNcnJRjjM2GjH5TEIsw+FxHLCxXKMtCQOGX76LQwfGKS5bHHlGzIuJiTHcVsDE2E5+4RcERu+F507wmnvfz9MvPAdAc+15MpmYP/qrb+LK7Hnb88kVilRrHQ7tFVmbHz80yvLZhKFCT0U05Mh3OlQmClT2i/l3x2vfzvETx3jkkW9w7qzIrNT8kPGkw9ZlgZlUTz9Kbdsr0bUcq5JCuqHHqPmIjU2FQEbPK0OjmMoqmxeEcTfvBYxOjnL0+AqFgsBA7HlFRBB1KWQqlAdl1FY1qLqL5EfFJr+hj7P/0Ms5fvkM1YvC+H/FgX1cWp0j1iyam2LcDmZLhKuX+dCvfgiAlcs1/t8/P827fvJtfPozfwvAlkqBA7sqLC5cYN+UeL6FRp3lqEZLGh85Zwd2qc3zpy/hnRXje2AIAtunKtfXO+7bik+MGutYvliDcoMDNBY81roGqTT4VEPDUBQa66Kf3vzun2LvKw+iJgXOnxHR0KmijdaqYxSLOEUxry4tnOOnf+rH+dIX/gqA1aU1/v6hL2NNDOJURKCm2k7QG5sMjGXwNoShmBnM8vp3v5UnnxFBKMX30BSFVE8JfOkgqIJ1VFGUPumEoiioqtrHH50/c4bCWBnHKRJKA8z1uuQyeVaW18jKjFPkB2iqTigj9X4QEqdAHPevnaZpX1w4kQ5ekgiG2nJRvOMgCAR2LlUJwkDep0Ki+HihRi4vgkc/+t6f5GN/9Afk8mKdC0OFII5ItYjFJTE2NqoLmIZNSkSxLPZyw1Qx7SyhvLbrtQjDkCQWxAwASaTR7VRl9gxAZNR6BBsApiXILBQjJuiKa2mGTraQx087ffkHP4hI9JTQlV6aoVGrNkjjkBFJsuEnEVESQqBiyEj88oUFKsWt9EyS2G/TwcLXdGxNzFFzUCXxPNodsYfpeRXbibDiLJtyPnZaCoOVcSzLYGND7D0kDoqagtITGs7j+zFOxqcpKyQURSGTV3BySt8xDbwIP4gwZdWP77sYhkGaKj1zA8uy8X2PTqfTi8XJyiCdqlRTLRbLVGsu2UIWLxTH9LCMJglgjjwlguhTuyd51Tu3cfoF8XxTOwt0620mt45gZaUTn2oQZ9k+naUl6dlXF5fwqiajkuKvazcg69OswQEp76HkXU49Mwd0OSuWTyYGd7OZnMGSmcFOxyB0fQJN6+nJY1fyxEmNnJpnelpc//JqneEdFW65R7zP+YVFUtekU00YnhB7gdttECUxSreFKYMivu/jZHVSGXBZnD+Lrpsszl3Akvh31czQatUoF4t96vdctkIuX+zjuaJExe9CxSoyINmzqx5cPr2JnskQyiBhEKdoVtjHLZKYBG1IIth1SMg63PXGN9FqNyD1SOQ6kR/JoVsldOkwbK6uceiNe7h4TmVzXoyhQw+aPPLZvwPJonry9OOorYCiXUCVrH+7X3aQ8vYSB/c9SCwHdhi3gZRYcs9b2TydqImuGGjSPi0XC6zGM/yrj7yfFcmn8Kb3/AhpvsTFs8K5u33ffej2CkE1Yd8BUdV04dxFRoq7abRWGBoSG/BzR5sMjufRZcBgbGILSgqd7gJry8LuKowEaGbI7KLNvfe+BoCtOzS+9e2HCQLh4LbCgO3Tu0iViKwtjiXBCs8/U+fQwe2cPir2uuX5vvV9w/YD4VyZpo6hCuOgsd6h29nE77bJGmIjbjRaXD59HktTmTkponeKqjJc0WlpPZ2rlCTWIAlQZGQp9A10PcFywPclcFPVSIlIkx5IUydr6LTbXRK1F7lS8BttKuNFar2VxQ8pFPKYcnIuXu4yNqFSKmfo1axokUOaNJkYH0XTZCYg8YkTnVgSDShJgXbLw7Tos/fFcYwfBFhOnm0yAtRqe2yZHGdjRSyaSRSRxomonpIbmqZpoCRS20qyxngupiRiANisLqGoCXKfFNdSY+xcQiizHH4MdsYg2KrjIjbe3TumWa8usXKuxvitY7LvMuhqysyxK9mKA3ftZdHo0pHlk1e3S6fWr/lcKkuGGEXByeSos37d37xU623kV7etu7YxL8tH/9lbkt7wcH7rAFHi416+MRFGZ1704d99+jP86Ic/AsCXv/kF3Jm1l/y50ssKXPxy67rjT/3FGXY9UOL8o70j1zp8pdEi9ZUGzTWDPu2QTFlPTI9T3C3e89JyDV0bwtgmFunabJXSYBY/8PC6om9tLUAzwVCLdBrCiMgVHa4WIs9YBeaW5ijmx6jXrnxjaApe12V8ZGv/WHlkjN9a/Y0XfeZifhDfC1Hl/bqNmD3TWzGzEYPDMvPY8CjrLo01g7oEr/peA40h1Fgs+HOXasRxndsPT3JJGsvNqMaI4/CdR4/SrosJcODAHcwuPElVOml+awjPb2GZOoYlN/UkQFGyjExkWF4V82hmcQUna+KuyhIkp405kmN3eQp/UwBcO0EbVbNptAKm9gun0+gUWby0zCte9koALs5cYvueAwwUdzI0JDYTt9nlh173Rp5+7nF0xPz/7d/5IN5miDMijC2v5vL0k+eJPINSXryrjaqPrrsM5DI8+6yYf/sm7uSuO5usyM2k3ryMuzJPbV5hZUPMN7WrMzI8RmbEIJElTjtGy6yfucjEvWJsnFrqYOZnCdwGFUdqw+RbrK7OsXNsF8ceFaVsxp4hdkyNs3FaZDbNyKa9HlFV2gx2xOaYKReJWh00zWV1RTzzhGlSMk0aHRFM2D8+idPRaV1e5X0/IzS6PvO5r9GtdVGUlMCTDuXGGULf5+gxwUTmWD5jqsXpb/4Vo5L2fPXcGV77vvcxODpEMxakDKtqwEx3gVCTTq6TsrnYJcgOsiFZqmoXqwQlF60hDKRCeZyxyX088vA3aXfF+vqa1x1iPjjPysUOiiINct0gjduUC2Ij/vRffIrS9BYS20CTLFy/+Ju/i6akXKot0ayL+7wwO8PE3kMMT24T43XhNH/3Pz/GWz74Af7hYVFO+9r738bkzq0sdmvs3nsYgPmFWbZM7+WXfun/BuB3/uuvMzI8QJQExFIbKklEhklRFIzePpMkaKRo5hU9qerCOjlzEicvDBItNiFNCAOPuiwjt3SDRFFpS0M8DiMhH5LIPYgeaUV6rTOXapQHCuim5A5PUrrdjiDgkMEN09ZQDaFpqchSs//8n36bYsnGlBo+immjJwmlgQKqrCZodiGXhdAz+lpGnh8BBrEsFbctAxKdIAxIJWFAECjYto3ruv377HY8kiQhV5Q02rpwBG1Lo1oTz5zNmCimik4G0xDnmYaD2w3J9HR8gpAg8AQ7rmQCtm0bxynSbrepSCMw8lxOnniEye0ioh9aJg23RSlfIZTVG3mnjGHbjGwT683FU10aTQWdCF0SmTi2TRi6NKoRBUkI1G54aEbUk9Ck3W5TqhTQjZChYad/LJPL4nYTBjIiENjphuhBTBzJfcAw6XY8NM3sa2YGgY9haMRxTCzLQXUjJkw9bJmlWlmusX3HOJGySUnOh9W5BopSxMm7VEypNxrX+O43YNtu4bSQBpBqnD62zIHDgnZdUWBudpaBu7cxMCTHp56yGiQsVyVB0WKNnJNFU11OPiNsw8ZmG93MgKUyIB3avBMzNDHJ8yfEumEoNmbOoDBosyqN5JHdAQVnjFRNKd0q+qp5pMnauZjp3WJsvuzWWznx5BnWljewJKnGwZdNERgpmyeqzMngr6rqorJIEsxkTJM0jfG6Ll4oxmKsgGFkadbCPqMmlkerWe/lftmxa5qZhXN0fdg78HJxbKDJ5pnPE4dpn0U1jiKUQCEjExJtJSbRIWsaNKqzADz26BfJlIaxrTyDFUE6kWgjpLpKIjM0RatCvnIb47eUuO1e0eclvcDWfZs0ZTXNxuIiza7Pjl3bePUbRMn4J//0z9hdfz2HbynQjhblO82RKgFza8KhHh7czuTYTlqB3n/e4YEpqsNVmu0NjIJ4V/tvvYOxraNoF8XYXN04z+mZh5je+iCHD4kqg3zxSc6ePENtVWffHhFAKxZXUdIshZz4O9tWabXqlMsVVpfEe2k0DBobMDg5xal5QRb38BMRt+47TGNGBKSj9gpzZ2ew7QTbEvvvrn2HyedfYHm1zmZLrrFGCV7Chr1JaHGz3Ww32812s91sN9vNdrPdbDfbzfbP0H4gMleB77O5JKKH3c0maRJRzpbpdkTUJpMxaXfanDryLLYuoiFpHGHnE4YnhQd+7vwMhmaRRCHoUj8qUfC8iFzRxDBkVkrRQUlJZXbLtm2CIMDUdGJZKhj7AVHXo91oEhs9YHrE2IjG+Dbx++2kQ6dZo22XyRZlPWeSkHUKqJpGEPbqzkNSJUTTexkwG8uwMKyErkzTq5qCoqp0Oi06ntTVUAyiICQJRAgqiWKUJAUlvSaDo0jw8MbyjcF1o2MFEq33vCISWRgucv7IxjXndYCDr7+DVJEAcyfLs4+IlP3SyeX+eSP7x675u+NPn2FgarAfUby67dsjyoKepsmbf/gevnryqevOubpt2TvG5TPLL36CjALlRvJ90PT88uyLnl7cIsvaLtde9JzK9mF0GZlbvbRyzXeBe2MyiziOcewM7nWk6Ne2933oJ/BSGe2Vdd33vOkBnvraozc8/w9/7dvwaze+1vlH6zf+Ati+fTvPcQwz06VYEhG2pd53+2zGJsW43peW+cLHz1zzt0snRERnx+Ft4j6DFvlshZWlFqEcn93mtbi0U8deAKDD+WuO11bFrz57VTSntrrMB3/qXwKQG85y9rlz/MPX/r7//aWTpwAY2y7Gles6PPbYY2zfM87zL8wCML7bpZzJ4jcV4q7IVA2UKjRqHsWyeHf1qsqB224lh8HEsFhLLl1ImDm+QohPInX0Tp8+TaFUxtJEVM736pSKJZKkQSjLewkHiE0Pz9VIZblQYPrYdh4i8dld18gXVMyBJTqyzKrAAFGsYOoWq0tizHW9DoZqcfqEwFfce+89vPsdH+APPv4pbrtNEAvcfXgvUdfmffs/QLUh7v3kC0voOYczpwThxOHbb6E8GVBbb3DHFoEJ8rqXaTdqxKUulS2iFPObjzzD3p2jjE5I8fN2RHFAIb97mDFP6I/MLq6ysrGOtaoTSir9F84tkHcyDN5/NwDtx2ZQV1bJ2GV0U4y9ktZkpeugJwlDwyJS7Ucax5+exZLrZNa0abc1soMDVBfFfIqbw2QLQ5R3u7Ql7fnUlkH8OEOciPu04xwDoyrveM8oXkOWxrbWeP2ddzJczjM4LrJnk5VBvvjQoxw5JsbxR3/jlxi280zf/SDLK6Jk7OKx75DYKR08Qlesi4u1ZeLI584D4vlmLhxnZXWVgZFJxisi0rl9epqHvvcdChKrOT7qcO6FZaorc+iBeL7WnMPi+hoGPpYENmdyOdqxQZpKUiE15Zt//Tnuuf8VWDmp1bSlTCkzwI7SJF//xlfEtRYXmGvU0UsiWxmcO01pzKHRUhkdFVHxnbdvZX49IDU1OqHYD7vdLhcvnidVxVw4ePguzp44SjZTwJQaU6kaEgQBSZL06dlB7BdKTwQ3jrFVjdlzS4xOi/FDqrK+tI6paARSK9ELElRdo9sRz6dqihTTTVH7e1GvgkLrR+uz2SymYdPtiki9Y9koitCL6mW84jgmVVMS32fHtKBerlQqnL94nKB3ncwocRhT3fDYJc/ZuSNgeWmTOE4IpV6NqkR4bkgs0za6rqKkHo6hkvQwQppKGISkcYIia6HTVKFUGiArMy2tThNF00kVneJAbz+GbuBiZ0uYslRI0yGjuShS3ywIAsYzwwRRSCJhAH7g021WKeXytFJZ0uzAeCbH2qIowdUzBVQ9wavWKJREpNzSNmm2NinmREVP3PYpD0CjltJoiPfnZBWyWYNOp4Pvy/JFPSaXyxLLzKCVTURGXqmwUROVE46TZW0tIImyIIEF2WwBM+f2M4q+75PJORia1SeK0lBQEoU4iDGktEQYhhDpeK7scyOmVLFZWU1ZXxPjZeuWHNVOnfGJ3bSqTdnnKXFg0elI3GKmgNvpoJtZVhbEetNqVxndMsTc2RYL88JeGSyXsLSQuJcZtGIGBzXOXagTB5KC39JRzYBEc1jbEDbM7Qfv4tf/86/y/h8WlSRzK2coD0dYdoacFK9ePF/HLTUxhxUunhZj9paD25h5Yo5HvyD2vNad23A3VSwHsMS92+UcmD4DE8PUNkS/NNsdEgUMWd6nmQbtbkdovfXmX+CSJgpBnJDICqyw41KPfSJJwd+qXyaTc5i8fQ+KxKuZeobSYIWl5Tl0SxKXaAGG6hCaco62BGmYPpxnTa7f8fwarxt4Ha/cdw/fWxZ7d2PtEjun70UbE9euNRbpXp7Fp4Y5LsrmcvoE7/+3L2fumChx/rvPfZF/8cs/wcNffYhdd4ts2o/h89DnnuaFF15g/31CR7Ndj7DMHDtzgmDKUEqceeF5suUSuoQO3P2mN3L3Aw/w8d/9XSZzwl5cnq9R2jqG2xEVUiuri2ipwdzlo4SpyC4FbkiktxmaGmZ+QWQsO+EsGf123EBiG48/Q6lUwA3KjE6KOdvtlFGsRYK1c8SpGIs5cxeFcgErFNUmyUxMfiwg8qCYF3bJhbNtivlJtEhDlxAmS6tyYwtRtB8I50pJFXZsFxvcBVqgj9LqekSIAWb4KpVMFq3o0O2KwaOEMcVijrgHKi7kROlIcoWJSFGELkkcqlfYnrwYTbmSlvQ8DzVWUJMrQryKoqFKvQ5LKnMbWY12N+KsnHQTkwOkroWuRkSReHGoLioOXddDk5uchgoJJL6sd45iMpZKkMZks+KZw9AnTv6jIGQAACAASURBVGN0TaNQEhu7bWe5cOEsthT6Je7JAqdXnk9NURWN6uqVMrGRLTkgYfWyMCpEVaNCx7viCMye2mDPy6dRLWEABkGbi98LOPbNZ3nN+0VKvr1xY6KK1VPXOz/FkSFS+R42uFL6ZkrcAMBX//Ypdt4tjMkwCsjm85z6zrPXXKdUGWTyTdM88bXHrjk+uk1sMCvLwvBsr15fOnej1q+Qe4kWJrFQlpYtO5ynIzFbtu3gfZ8DVdpeQbV1qhevZ+Eb2iNqsNfPij7ImAZeIp2BzBAr1Jk/d61DMnLLCKsvXH+t/e/YDcCpL10hkpi8X7B1EXssPHHlb9q+uMfyUKkvxIlk5nv62XX8h+QYksQLB18lrn3s4SvXvvTMrHiGqSxzx68cv/pavVYYsWiu+kwf3M7FYzP94/c+8Co21ufYvmMbAN/82kMA/K9P/M/rnu/7W8uTLFVmB3e+wSmujL/eiMvtKGDqwinaXPdIUo9V6Q/nCwMoakg6MMTB20S5zdHHP3Hd72hbx6huQBKJ6zQWV2hmXQYGCn18pZVzwfKYPbrY/7vyrjLVC20GpPGjawqKnqW64dGp92q126DGGKaCHUqgcaKhFmxCqcv2zOPHOfH4r1MeydNsiQF6+vgl1pZcHnvsYd76w28F4CtffJg9d97OxkWxmWTyk3zyE3/DXYcPcv+rhZNkPhqxY/8b+OyffpaBgnjvh24b5J4D9zMzI0oxxgpbeefb38HHvvInXJZ4vrX1JbREx8oXyEh2sExRY7gyRNkWa5JpNvA3irixRqiLdSJvhRiodKOITFGct7ixRppAwZSbl51i5FTq9Q10Rwradjq0/Q6mCV1X3Ge1UUWxO0RyfoWphmmGaIZKZUSsgf/Xz/w8lZEcKTqbDVleM5phYnqJrzwlHJRTF2bYecetxK0aZUl6cf+bP4jbCvD9VZCiwaNpyLPPLXLupHinQWRyxz33EcYJ1MSzaDrsmpqi3hbzd3Y1TysKqZRUsop47/lKjS1T23j5cBFPbv7feniNzJBBaou5bishbqPBD73rXVSb0kFpQ6MVks8rDI0JZ+DAHe/Ea3oYisAkhfu2sRJYNN0q+24Xa2VsZun4i0TVgMsbYrDvv/NBSpUM3/7yV8UYS1UsR8WP4v4+13Y9uY+pV4RwFQVNUfpl5aqqopFgGQ4rs2KcqYqKSYYkidDk/huQoKQQyJJDS1EJoxglUYRwo7x2KnF4veBfkiS4rkutJowYRWIToyjC0IRRmIL4OyXFkuy5Z8+fIZsz8bpibEShTy6bo9V2cV1xT7Y5SbfTxnRSgkicZygGihZj9Wrgk4Q0CVEUi1SWsQWRh6KpxGlED4icyxZJ0HAlfsU0cjhZjYQYR+oBhaFPmCqkit7Hb6txjKJG9MBbmq0RBAGaqaFJpyUOEkxdJeh2yIwKA7a63qTiDDNYEjbBZreJoRQIY5+mZJTdv2saW89z9pRYi0fHbXQNimWLUDppbgdcSbBiGVIjTHVwnAqaIWyCjc01TGuYYnkb2aLYj7utLkHQxQ1TgYMD4riD5YQEV5XCq2kEyZWwaRAKDJ2u68T94LFBnLgoSc/ZSrk8t4luWZTKYpwHcZfNNZ2Mk1Asin6ZPV9nbFrDlU7h8Ngwp6sdtm4dZ2VBCmg3U2obG/h+h9KQJHNo19FzU7iSoTlRbdZaPlu2jTCyVThJsRqwcH4Fw6tTHhQG84f/za9T7zbYf7tYWyrD2zl+4jyev4mbSMxeZBMHkDXKVKU+XtAwsXIZMtIJrLldLMdm97ZxhgfEmDp57BIbzRaOmbJ36oDo426XuaW5PgtgkkQYlk4aJejSMCnaRWI8BC5J9rlqksYBqi6ZFl2fxfWIvOlRGhHOeHbLGGO33MXMwgyWFNBNNQ03DuihsX1dRzFhMGiTi8TRvdseJJ0v4gxmuG9QBOguDRZYD1MKkuCtkMmS230vk3kTU3oOa50V/Pki1rDoyw/8zM+SHxzjh987xaokCZs88Ep+Ztd+Nlo2bTmGEz9BSQ10hCOVpAYjWwZIIwNNMmd2g5SsY/OTP/sLFCS/wGprmeWNb3PirMBJT287xK7dUyyunObUySMADJb2sWVyJ+ubl0EmRZSkQhoOkpVY8X27x4n8HIraxpWY55KdJTOcx2+tMbpVPE+UDLJ8aZZMToz9YmWQdjtA0yzqtZ7957G5qmNZXTyJsUz/EcnSHwjnSs+YtDQxMfLFLbTrddSwSyqjP7mxUfR8BsVUsPLiwQqqSRDWaEp6YV1R8YKANElA61HBhiQJtJoR+WIvUuaiKkZvf0FRRA120O702YJQFOIkhjAkb4gNsx2ZRGraz3IsLayRMSuMDAyjyckZxgqpEhEbcZ9NR4lUVMXBtGRts7dOpCcoiYLnyYUtDNFMBRSYGBWZuEsX50jjmLgnx44QKOxFCHut51hVhiXxhck136+tNhkYNMiXLd7yUwIM+LlP/ANnH794w3fR8oUhlStMvuQ7u7rNHDnN1rt3XXf8e195/JrPkdzMqvUGqm5cd74fhsTN6526ldkXF2orDxaobTRv+F197sUzVv17SsI+4w5cwRAA1Oc3rju/PrOJM5K74bUaG7JuedsgjdkNkjRFk0J8m+vCsF2+cG127EaOFYC+em1M5MAHtmFM9ShHr8WCTezKcB6odwPC+rW4tEIljzYonm8FcQ9+9OIUoutzYjHZe+AQFcmwudxsc+nc5f45Q+OjNJnjnnvu4Z2/8WoAfu8/fpInHn2YQ3fdBuq1Wczf/F9/DEDtUpVvfeHznDjx3HW/q0pjpDkvNrXK9CRWRgRSliRRSPtSk9teJ6JbM+cvk7UdOtKxTDWP8/PPMu5d5tQT8vd/Gx54xd0sLndxpfO/PH/2ut/+xV/61/zJH3+a8rDEBFpN5p+9llhEtVSGKnlcGcVt+zVi38NRchzaJ7IMi4sN7KLJpUuXKGWlaHiqkqoxaOL5xgeGGRrYwkZrtZeI5dzpKhpinTl1Qhj/pr7JN/7+L5neJzbro89p7BwbIzs0xqte/ZOir1Y/zuJql7e//U42QmHob5l6Fwfveyfr8hUvLzf4g49/FzdbYPtuYSiyuYiXQpxzGBgSv2vpOnYmg6cIg2Gg6NAhSxx6IOUSghAwFCJNYWNRjNuiFqKi0FwXc80pldFtDSMyyBjC4dq9bS+PnfwuA0aRMUmpXK8GWEkHNZRZTs0ll4RE4QDVtsTjFGBppUZO1YkkJs/vTjI1UuGDb3glAFuzEK22acYdNmUkMoxaDI2OcGnpIuWCpODVs9h5i/mVWQBGxoZZ3kg4f+EIZUVgBM2gw9SB/ahtkQGr1iKxHtpDpFJwNpfXGJwcYHJigJc/KAyUmvddTp45hpURxmsSpswtzPFnH/0THnjPO5EXg3yZRkfh3j2CIXGx3sbXfdqxeLbbX/tjtI4/yulHH6VUFBHhRsvDUR2ePPc4XWkgnJ5b5dChvdgSYF7ZOsb0rW/mq1/+FkkiZToMA1KVKIr6rKdpCkEY96enYRhEsY+Shji6eMdKKhyDRDNwvaushyQlkRIAimmgqilcvQypCiRXiC0AXNdFVVUcR4wp3/dRVRVVVQmk/IVh2ThOFlSF2TkReNL0BDVVycngY4xPFGv4QZesJLmoN2sEcQdbc3qFKuiqRRy2+4RPoRdhGimh76LKcQ0pvh+SxPTlSjL5nNyLpTRKFKOqGqqpgpRZyOWKBHGEnwS4rljTC4UCfjftswdamoGh2yiKQrcr3mnghQwWSjTWN2nOyaCbYrLpVzGlM+7oeVquj25b/eDxegsUw6IgC0VGxzKcfLpF2LXJSHzV8HiZjfkq41vG6Ur8X6B5xHFIR2YLNUMDdKqNNST0joHBQVrNNZqNJUxHvPfQV3G7IaFkdrQMHRDEHj0HLAoTsrmceL+S3jsIW+g6mKY06xONVtNnaMSi5QrbbH1ZpViukNJkY0n0sZPNsLnRpeOLPiCqY2dNWq0WWSm3oashK0sdtm6dxPXFXhz7Dh1lk47XE4UOMCOH6qpCJNlerWxMt2ty/y2vpyH3vI/8/Id53atexlNPnJTXzrJ77z6szAxnz8tAe72Nl+aIQo8hRfRxc2EFP2jilCV5TRAS6SEXz3aIJiTObMMnSTTMXEShLII5tdpZElLMjLQNlZTI9wUDpSptjFglSTXSNMWUuMQwDLAtBRCfE1JKeZ3l86eZXRP75t7b7kTtRCghZCTesOm3hKyQTBDoesRYweE2Jct4IoLTxoxDcedejj1zmsG82J+MrSrZXSU8ib9Hh9hQyAUFOr4YQ3/+m/+eN33wRylPCBtvx+g06wtNdEcjlTwFnY0UX89jGA5xIMZ+MVPCDdzeoxD7IcXcsLCvPcm4qaukgDk8QiDJVGwj4Jlnv83wpHCEZ5eOs1YtknMS8lZvX+2QxhZppJIdEOPFsA0anWVavrBtVSPCc2N27xlDl7jIxuYasxtnqBQLKF1ZNWG2SPwmZ2eE85rTfOIkxFcjMiXxrrpNh2wuIpcZRBYMoJgvHb2/ibm62W62m+1mu9lutpvtZrvZbrab7Wb7Z2g/EJmr0Iu4fF5EQg0U2p02KAaTe0X2pDhaZn6tSh4DQ0YCGusNLAWQ5TdZxyLohqSpLko9gDSNIdXw3JCspHU2TEVkhGLJaKRBo91AVXX0VPqaukqapnTbbRS3F2HL4Plt9uwSUWrbmSShBR2fREazTcq0/BpBHKLJSLUSK0SpTyQpK7tul9qmQybXwNBlrTgaYdxBw6S2KaJitc06pmJA2kuxybIpVSWVlPE93NHgmIVuqP3zImKGx0REY225S6cdcvCWA6xKprPx23ew9PwVxr+rW74o6mWPP3OW215/C51uk0uPLfS/n757DxePXIn+D+8dpOO6rEeL11xnfM9ODFs889zzgm48kvix4cogGVkydHU7f+QM2w7tYtehW8Xn507e8B6vbi+WtZrYPsbizEvgt2RzHAe8K2WBzav0spyhHO769SyIaqpcdywzUWRkQETlZ86L/vqzj3+y/31uaJgbSy3fuFUK5jWfj//l7Iue+8jfCOY2K+OgS2xhDyWlW22uqghl+51TrHeuPNOO+0ZxNwKWz16bqXHbmwxMirDpaje+5rsgFhGbs2cu8dk/OXLNd889fYJXv/lKOehPfuTHGPtp8T7XTn+Lg3fs5QTXZ640SVnba6aW9DNWV7cLJ0SGRFHyaLbGoKRhrtWaZI0MZskmXb/ywGdnanjJJnpW9MjQvgEUEtZOi/f8kV/5dzi57aQY6LZkBjy6TnZHjs6lK/3k+wqF8RKjO0S06zUHX83f/++/oTDosFQX8+q9P/ZuTpw+w8zcIk2J1bJtE2JIYxH93bd/mp/9xZ+jE3XxJLV1GvpEocLb+A6/8d//vex/l3/J4/yf//HfAPDh932I3qz7HF8Q/3ktlIaGeO977uWOcantM9tkbXmD2QVx748eeYJOaLFl1OLrn/3m9/XmLNN3iXdjOyErzctsHhNzZufrphjMq2gp/Pf/9pnr3sPV7eU/dBdJRkSSvXZEd61FZcsEYSDm5iPPfY+BYpG9W3cx0xBRxUplCtfLkRoiE5noIjPWTGLGi1JI3XNRs1nCFDS5Vp65tIKV38vOW0QE85JSZutUCa29wLnnhTjvlonbOFc/jp2J2XDFvmJa44wPTdBcl9IdbovIDxjJaH366VyaZe7MMRqu1JiyC4wqPqt4/NsP/TgASxfXOXXhMQ5u15mdERHh+x7YxZOPH0OXTGgmGQYqKQ8//HkaOdEv73jF28ln82A5LM4LjMD5hQW2T0xiykzy6bPPsDg/z+GD+2i3RUbva188wUhOw6vXyE2ISO5UZZDnn3wYPSdCqKmpsNbw2bFnO7MXRP9qmobb9QnDuC/iq+umYHqTWdwwjFENkziK0WRJn6qptLsdweAr/y7xfZKrsL5JAoaqEV9VIZGmKakiklk9jJemKwShx8CAZJtcXcMwDMFgKLEifugxXBoFTaEpZVVsJ4vvepiydFBFJU4M4tjoY0yCsEMhnyFrZWnLpSNIPKIgQVFlmXwckWiC3S6SlSRRFBFECo7joPYYdrtdMplMfz/tdHxU2yKMYsE4CERBgh/5aAb9iprQd1FSFUUytrkdj5yTIVUUdFnxEidQ73Yx8jksWRqsqQmp4ePLiglHyRCGm0RK1M/MzS3Mouohd9wr8HilcpN2Lc/z3w2IYvF83VaHIAioVetYUlvINKDWbODKDFh5IE+j2SZKAwyJ+27WAkw9JZvPgSJZjBWTJNFIJIrEzmTJGAadVhdTsktmHBvf90WWRfaxpRmQZtCkzqZhdhkasVGUlE5VjjstTxqlNFc85NCj46Xsv2s7Vkas1UqsYFkxG0s+g2WZsYzraHrM5uYmnba0sfSUMOgQy7yApeRway5D27NYGXHO5uIamUKOmfoLVOvSdglslKTG5obY5wzbZW1jnuHhPFvGZQXRRMpG1cfvukxURJZGMztcXo9JZUmsW23TSVtkcltYlrpvlRGdXEGhODDIyReeAaBVjfGShFSODdu2SFOFVI1Joh51f0LqB5iWRRSJdSIhJYnAlHp8hgpB0kLNeJgdcezUI98la2sMDebwpS5iFAfC1lTl+zNUapHL816ANbFNjIVml+NnHqNZNDCrYi3RB0pY2gCaLOtMgwSyBmlYQ9OuVLiURycoWlI4uhthWQaeFqOpwubJxU26WGi4WHId7MYbKLZJ2kuKaSqBB7qhocjqslhPhGREqEHQk3XI8vIH38jqmihVbmw8z8TwHSTqDFXJtNput+m0bZqbCStLonS2VHbw/CaGFKr33BQUn/pmm2JBzKP52nl2b9tPba1KBpFl1LDYtmuITlNULOSUAq22SWo0CaVGa2XIwW3UiLwQ2cUYwY3ZpHvtB8K5SqOUQka8uDhKGavswPXaKLI21a9FjOUGCLs+7UgYDcWBMtHaOq2OHOD5LH4akiqgpD1HREHVNNJUvcK1b1m43RC1V52qRei2jo5OGEpq8ihE0VWcVGd9XkzOtDzE0PgQm5uyDNEM2H3rDvZvKTM9IhaRxfXzlMx9RGi0ZXlbEvjEaYuuVH/P5vfz6T9+iG4Ygio2CrfbxcnoJEnCvKSDTKMUyzaIeo4iCWkKcRwJHQugB0ZVVVWUQwKKlqIqqSjbAEoDGnUv5imO/5PexT988sn+/9e5vqxuc3WF0g5h2NQvNVg7c33p3NiuLUSxj+teazBfljT6L9Vmnzv/j57zT2n/FMcKYPPci5ccqqpKfqxEa/lazFEPk3V1G8rnmTmzcN3xXmuvvzQN+/e3Zu6fhiu7um2eXLzu2PIz1zqfftBh48wV/a9L31v5/j9h2+2TKKpK4IvxdeHUte9k4bhwzI9+5wg7Dwj8z4Xjp/rfP/TV7/T//6k//iyf4rPiwyF473s/cMN7t3piUdTZeWgHF85doiJBtpvLV8bh1klJQ77qk89k2dwQhs6ObWN0u228apc9khxjiUUiZZ3hyTwr62IsJm4H27xS1vnQw9+AWAe1RaMhAzU7cjiORYcrzpUVNdm81CAYEAvy2I8cYPrgJuvV87zrLe8CID+c4cTnv0IaxSQSF+G5HYhNBofEPX3n6FFuO3KG/bfdT60j5k7WLhBLbY78kMDayJ/hw+/7UP8e7nvjKzAUi0e+dsVJqq+v83G+3P/83373Pr7zxFN898nePNbxuz7TxSI9hOPBB/exst7EyuhcPHrjAMaFb81xgTme5G38/K+9H4Bu3eW5Cy2OfvNb15z7+Nef7v9/8q6dZDINwoZHcasocY7ZxDJMzq/OM78i3mVJXSO1MqgyUJR0m2TzBawoxJWyGWE7oav5aJpGWWJfZy4/idl8gk5dnHPwHfej58aoZPZw+2GxqT577jEyxjTr7U0KI8KwjyKP2aU1Do2L4NiW0hDPnznO9Nhh5j2xBpQSh3Ix4dArBbVvK21w6NZfpFRwGdkqQNn/5ek/5+f+j/dx+dmH6ayKjfdld23jR997kM98STqOA12CKINmKVTPirky8sGdzF6Y59zp5ymXJK7Nb9Jo1blwSYDl236LjXqbvHaIW24R+Mqp7dtYmFlncmKM558ShpsylbBz/2EuL4l5ubK+Sqvlk3M8snIedeotFEVB13V0/UqgRlCl90rkI1JFw7HsvsPlBwFGxiQFQqm5aCg6fnpFjDjyAxTDII7jvkMCvcBf2N+fFEXF94N+iZyiqcQ9bSx5D7pp0mzVGBwdIahLXJ2TJ/BT4qh3joHlmFhh0CemUHUbzw0x0jZKrxw0DFFV44poMTGhL3Ss2l2xnqaxSSaf7fcNSHKFOMKTAZFiqSAcxETD83vGXJNsPkPLC9BlSXvghYKyXOJq8k6GOApQNa2PcU5IcbIOhm5guOL6oWqgkUWXpXyqGVJKiiRxiCphAGkcMTo2gCGlUR7/WsTSpRZOPoMqy5GiUME2SzRbDej0dC6FAHJG0lFvrHXI5oqYToIfyD0ljFAdBy+I0Ywe8YVOJm/3A7CGIfBjvu/34rqYhoaCgee5QrsLyOeLBIFHknbkmEpZXgjx/bAvD+MUYhq1JuVsCVUVfbV9t832XTkWZsXv+bU2bT8i9NU+ZENVdfIlB0N3cNuSfj6nkNQDBjJiDVdthbVqxNDQJLsPCWzP+fnTrM7HbERrHLxVBItfefvbuXhpgWw2K7sgwLEydDox0YawlsenLUpRhOK6FPLi2OJai21btpFEYi48tn6aSmUELekyuV38XlbzaHku88eaNKVT2255DI9XcGX5YrvdxVQdTDtzRVNNg1xOo9lsUiyKQI3vesSJR5RI3Ldh4+h5/NDrl7tmBk3C1KQR+RhyvBhaBtSEVO1hGU3CMMKoBHy9KvTw9kw+wJC6g/GaxuNrIhB1x9CrCZWUWHoMjlJACyxaXOTh714hnto2fStuTZaHGj5pqvIHv/yz3Kj90kcFDCBVC2ihjibBSSkhiWMSpBF6v1Q5RVMsoY8ngWe66XHu3JN89qMCs/3un7uL5eVl8hXodiU9/ECWr3zise//6WvaobdM0Knr1KnjS7266dt2M2jqTAyPs7gsHO2F1Q0c1WbaEVjtSSvmKf80lpPBT4UT2m54uN0YVYeuJH1Buz5BcHX7gXCuIOlPViVjE+sJpp2hKbVovGoNy7RxVzfwZVSso9qUzA5FyVpjhSkZsoS4RD1NKUVD00SNdbslOqRccdANn8iXgsFRgmFaeF4gyA0QavNhEEOS9uFbUejSXL1MpSKyaSEdvHad1Y2tfPBHfh6AXVMG37vYohmEbG4KIyJjh8SuguqLjVBv1fjS4FGal1ukuni+OEmFqnwakMpFSkEnihJSOXmSOEXTVVAi0j7xhnQQ07CvERIlsYB3SB2vNHWAKl/51i/z1tf9Xr/HP/7Jf8GCNFqLeZ3jJ0/zl594gcpWsXHcc7CM17EZGB7h8391xXh6z9v28+gTwgE0pnOsX1pEGygSyzp0uvDuH9lG1Ezw5UT/M4TR/8v/5S2i79pNoiTkYx994iVHxQd/+j503cDRxaT72P946cl0dRsey7O2fL2DUtklDXYp5pufqODIyNzazCpOqYgrMxHZjIUX+Ndd4/vb4cPbeebMzHXHf+Zn/zV/9LE//Cffs17IMX3rEGcfn6GxfG1UZPTOLCtHb8xOaG/N4c23KU8OUlu43tm9ui2duPb76ek9XLx4LQ5p9vkFbtl/mG9IA33Pvh2cPX19pnP0wFaGRsWmcIFT3P3g3Rz57pHrzru6/fVf/+UNj3vBlWygJuf06A6hgbJ5lZN/9mnhDN72ilsZHqmg2sJhD6IOxWKekmkyc1lEoIp7NWwrTz4/xeKs5E+sQ9XdpLxLGKGxE3LPrbdhKS1m6+KcNI1JvGv7es+2u7nrDXfxB//PnwHwex/9D4xqeRI9iyatj+ePXuYD7/wxnnj0sxx9XkTTxnYcJvJqnDt2FIBf/s3/yvSerdSrtT7TZ7sbocr1ryV1wzQq1/VRZ1nl2LHvzz7Brlvv4bxk4vzVX/ktxrbdw8tfLTA7SRzRXFwk6F4hvVndDNEYYu7oMbbcIpxj322xfmmBQYkf2zh9BWP3P35LZK4+8vNv6TtWUw8IUpS5R8/xE//qlXz6448AsPD0BX7nkz/O00+d4uKCcOI9z6YVeIwPg7EuDGFD1chn1qlKspRydpBacJEMeWpSX8moTFKZyOOduIwisROOqpItl0hNqUPjB6jra8wtnWVqSjgk4/YWnj95hnIlz/IF4TiljsK4ZuJKw/sCUN5zB3FznTFbRDXfdP9uqo0N3vqB/yAePFmgtlLHyar84e//qXgHisHe219L3tbYPSTG7PfO1ZitNRnJywx4kkFLY+y8wsVTYl342z/+FOPjA6zMnKRqiXuPFejWZkhk4K2+do6COsDsySNYbdEvXUWnsXkZp2SSNYRhutmIKI3uoN4Sn5u1JdarLgwPUqmIObN6eYVcJoOd6rQlY2kQBzhphkQaNpqiopDQ9Tp9zIeu60RBhKrqqGoPSxyjq6rUshL7aiI1rq6QR0mHJlH7BA9pIsTse6K+Gdum0+mQJCmGzBaqZkoY6axsroItdS2DDqmSYMt7So0IN0xISLEdKXLb6hK5LfzE7M8jU7UIAw9k1khVDFItptntEMlseybvYOUsNDUhcMW+YmgmupGQ6RnexKRahMIV/HIul0HTdew0pu1KPJOmYRgqJUc4Mq4fgqrgtTu0WmKcl8tlDEUl1RXiUDpXUYJqWfi+BP5HKUoaE6uQSJyLo2dYn2tx6um2vM4Iw1tU2o0WpiUcC78b43k1oakk2eWyeYc4TUgVsYaatgNKiuu6DEiR5jiIabc7uK7L4IjYDxUVms0WTkZmMJQmuUoOP4npygyU0oU06UCiYmi2fGYf2zHxG5KkoWCyY/84cV1PdAAAIABJREFU+UGNel30wZGvX+L2B3YxdYtG4Il3OjpWwYuqTE5JId5iB5SIc6fX6LhiDfTCkMDNQDb8/9h77yi7yrLv/7Pb2fv0M3Omp016QhIgNEGCdKkqKAq2BxWx8egjIKLSFEVFwYYVRAEpgqhUAekQSiCQkJCQNklmkkxvp++z6++P+55zZiD6vu9azx/+1sq1FovJPrve+97XfZXv9b2YKUgiKY6pRDMhyLolXymSzZh0b9tDNCq+o86ZrVT691AaC1A98cxvbhsgO1Nn8QLhbI3kRnGqJQq5IqYi1jDNGyISZkHPoqVlts5uoFTOMdAngqOLO2cQ+CqaFiffJ+y3zMyZ2KPd5Ebz6IYYv6a4RcJwUSTRjzbm4DoO1WgAMqBUroQYehQjGWO0PGGLJYhqaTxZi+ZUyqi46FoCVda1eXaVeMym7JbwJVOm4msEeOiyIMjzK7Q1JyhNr5KQdhjmANrITHrVHKEqAsHjuzfS0TaDMdkfK1BsLAUSfpIjDzsSgG28TnlcMGgCGIHO9Zd8AYCvXyfIqoIgAFXjuq+dx/VfF4yMl15/M+DhM6EjNDTfQ1FCFNnkGs8QmV6tymBeMMCqVaXmWAEkYx3srnbhj3sskjr+z79+jKNOP4iykceQdV9KGMfxyzXmymiDSec8hWrBJJcTNkGpmCM+bxHNnUVyG8Qat2LJqShWhU3PifBjtilGsj/NprBENinGJTqsozSYtLYkGZX2RVXyKPwr+Y9wrsIwxJ1oRug6tKUbqVTtWoRZMXRKlTJuzADJYqdFdaz5c6kUxQc82rUdxQY9G4NQTDDXK0i4XFArwPS9gDAw0SVlZRCoEDhomoIq05dBEIgonBLUSBgaYyZhTGdwXH4EqSy267Ju2yp+8fs/APDza76BPzzOHXe+gJ8UL7h1Rguaa9HQcgAAC+Y3Y2afQB/YTTwuPF83EuJ6FfzAIR4XEdpS0cfzvFqTuDBQUUMFXTcE3JG64h/s9ZjWLovrtADfCWvshLlxsbD96q5BOLE+5v2V/ak6AnoSRnUWH3QQ8C2OWSGMsjPffzB33LeSWCw+5V39/oZ3OkQ+U0kofnnN3p2g6y5/aK/b/5XcduML/0/7T5bBvgKds0WhetGo4JRcwZYVmwrp8ydBRAGYRLTh2z6qniIzLUlURl+q1SrzD+rkkEOy/OoHTwKwevU7HatPnH8kaluZYz50FgDP/PXe/+M9e/kim2W2ZNuLUzNKe3Os5h0rPu7qjDS7KOJFzCm/ZzqaGO/9987WzI6QvVKbGHXHNOY5mA1xqmNT76F/XQ/99NT+rb+NyOL/RWbPFuO+Btj8Yjedy+bgSQN6b1LOu2wfGyYeFfANw9D54PvfSyypUuwX89r1xxkpOdz227+yZJ7ICK3uW8eCg2fSvEAsqHHaeOn1NWzfNJWi/u2UHx2z5vPkX57kgg99GIAbfnUTE3nCNQiH8vj3v5+Pf/kaZr9rEc13iWzdrbfeNeU8R685iROXHzJl263/fIGqL76hCYO2Wh2Zss8XLvgijxefm7Jt/0OOYN3ql7BSU5tKL1+xFFUWYDtlgyNOO5FVTz1c+70tkyVfEGO0e4PIOM4+bDFDSJgssN+xi9n4tIDzXnCpmMOOVs/kafl6NqRpcceU66/bPkzPkEdeLnId8+JUdrrs7i5hu8KpjZkZ1GKIJmEtdthPhDhqAiqj4jgz3cyarbtYqEBEFWuBpyTIVzV0Xxynk+Ll11/ETEVoU4UhVcFg1sJZvPVmFy3TxHseGixy0ofPYNtmEbHt3dhFdt6BGPE2nH7hVK/cGqBEVDZd+ysAGlua2N27nq3ruynKgJYW2Hz7yh8TbUhz2BJh8Q3lx8k0LKSkCkcuHjPRS2PYnkq8Uej4la8+yVHvOwWzvZmkJHgYccqMd/eTTgsDd+6Sgxh9aw8HHrOCgi2cK0uB2UsPo3vrDjpmCsOiVBjllaeexJNrmGJlmNaSRFNVchLyO3P+HLq39eBEA3RNEicQw/GLKDJYhWeiqoL0ovZeNa2Wjao7TvXGwCB0ZhAIYqWJT14cI2jfJ2fKwtBHkQFC4YwYhCEYE3A7qri+QzaZZFxGl1FCdFMnkNcPHA9F0WhqzLJpg8jyDe7uJRoxCEJvwlbFdR2i0VQNgeJUy7iBj25EaWsTWQYvLKMqOkZExZLMqoXxAmogECQAmmpiWAq6odRo5SuVqihD0FQs6QSGoUK16lGVML1QVTAMk4ip02w1y+NKRBULQ9NJS6r3XL6IpgQkJPy1Wq5QtavoEb1GEjI0MoZlRbES0i7SPaxkknRTgmEZoMg2pgjcNEODI0hODVRVxYpYlEoSZhaAbkbxXY3hARGg0BQdTdOIaHXIVqFQoLm5CTMmns22yxTGbBRfR1ElpNK3URQTx/UJZTlGpiGkkoOEJBU4/KSF6FaO7ZsHaW6WQYuPLKBxWoqqUkbXBOrFKVTZtq5Ia4f4d9UziFmCMdKMSOKdsoNphWSbY2jSse/b3U8y1lZz5l3HRtMU4gmVpibhqO3ZOUj/boWm2VmeWy+CUQfNN1l44ArGR0Vz7vkL5rK7bye2o5NMi3uo5BpIpAL6B4bYvUcS2MQ0KhWPWTNENnv7jl6iUZVkk4Fuivmyo6uPUrWfRCZKQdLkt6c7UA2fsbLQ654WEE010dY5g4FdItAeBnmK5SK6rpGSc8Hxq5hRHUOSncSbJRFa7xCuIr5TX/OIxRIoBYdQEjU4ioum+SiheO+mYZEfUTASFtGMhD06IxTtbgq2wXCnPJfXheWMYllChyuhTanqYFgZGpLyPQCCKU3MxVCrr/PORCJDU9DCqQFhFB8IUGWWWlFD8d5Co6YTAjXAilqomk3vNrG+3feL16acZuO2LmbNaSaV9nHH59RPn1QoDpQxTUlg4zpkWwzKjpjnAS0oGkQzGplmoauj+jxmdc6kd3g36bSY/D27tnPA8kNpbRXvpVTIU7WSmAM2vny+slYm4pmMjgwxf4EgmcJQeLq3bv+8XfYRWuyTfbJP9sk+2Sf7ZJ/sk32yT/bJ/4L8Z2SuFJWkLFies3A+w/lxCsOj5EZFlqi5rZVsSzuNmRRDgyKV5zg6Y6MlUo0iWhldGsMdHGdkZzfx9ETEO4aCQsR08T1ZcxHE0DQN2xZRv0gkIjNVGrokoRAF1Dqh79Uic+Vxh865s4hJ+E5pzGXXjj48Qm56WfTT8YoFbvzZDzn6iFZeWyciFo+ve4WRoS0MbhGR8WJ/ltyenURMBWQ01A88NE1BM0CRYTjf99A0Aybw46rYfzLVrRibKEP9Ffb07b0v1YqTjmLlY8/z6KNT6yS+/d8X7nX/v94p6mX+yrN85O+nc9vN78w2fegL5wPwwnOr+e2Pf8L7TzuSV9YLD/7wZfO49blBqmqVqEwFJhSbM49YtNfr/W/KKYcLiuNH1q2CMlRkGDAeJEHJ44UBg1umEjeU+8eJyz5aAKpWJ7fQozq6HSVwCpiWnBtBmjNO+SjtMx2u/J44rjjg8JMb7uUPd94AQENsAYsvyfDMc6sY7RPjYuf2YKWncdanP8GyeQJaetVlP+D0kxawa1jMxTde6+ZnPz+Lr/7PvTx8/zfE/akmH37fd/b6vEvmCTKCF1aJDEOgTKVvHw99DjzhVLItAur15J030rp4DoevWA7A/Tf9le/88ma++tgDfOCkH9eOu/b6Cxk+uo/bXz4HgOVb9379r1/xfrq3ief79CfO4vkV63mRl/e67/9J1jxT7601Z9k8Al/F+deM8ehelKpvoygSMhhG+NtfHmLx/ivY/+AjAMgmY8xLZ3n40Q2sXl2HK85fvIKUIaJy+bE82zeJnlD/81VBHtE3NMg9d0zNON17yx+55lc/5bILxHfz/dvvIhgbZe2q1dx7u4AKPvnAAzzJA7AArh69FYDzvr2Qm7/97dp53p61Ajj3vUfW/h4piHk2AZuakK1buijYIgv5kc/+t7jPrIAnrH/xDd51pGj4uOqFJzlmxbtZtVY8UyRIsWNrD907Bug8WDTQHSuO4IYiEzTnIBGF2/76OpoWdlCWUcamVhFJ/fVdf2Ij94khrtb1zvY36rVazz6xic47DwZg56rX2LS+m7iZosES31/fljKVok/SjFEpiYyZYrsY0U7MuMhkKVoRw0pCqJCSFNwN2QzOzp2YySZGpf72LAet5ODrIos6Xuln5pws67evpiQhr6O9fRy6/HSajmjmza5nxPNEM2x/5SVmtIro9vO9LzIQVGkLYyyc3QnAow/dz2fP+wAvdokWEk+8NohZNolrZQqS2nph57tondfAUG+Rvz0qMutOwiaTyaDKTF0sEqcURjGUAF3qk5H8CM89/jCf/e8v8cKLoon4nGmdlJIlZsoaQS9XIdU5B7VhOpos3yw544wFDhR0Dln+XgAKI2/S3/0AVU/okRnzMzQnDHZu6iaQWamxHp8DDtmfru7thOMSgqNCqFqokliBKEhuhhoJxduzVBMyuRHxREYrDEOCYGq0OgiYBB9UCEOldqwRi8n6K0U0nwWMiIoXKAzuGUK3xJzUVA3XExTxICCk8YRFbmSYQk42/rSiVMs2YegTsSSk0YBioYIbCj2oRxQSyTbSmRiBIpRJueCgqQqqpuJLaFI0E8X1yli6yL44TkCxWEIxdEETD5gxUS+mAo6EiiuKSirdQHWinktRKJeLghxL1nOl02kqtk2ghGRkVtg0TfK5IhEJs0pms4yOjmLbNrokEck2xtCUOCVJ8pEfqeI6HkYCkCUAu/q309rchJWM4vviPVSKFRRFIWaKjGmhUCQ3OoamqfiSSCGZTlGxbXw/ICdryH3fZ9AZrrV+0RWLMPRRMYhJIgPX9/EDj+NOP5BCWeTtX3liN4v3n8b8Q8RczIW9NFopUlmLtpniHvr2DPLSS30sXrKQqicm9o6dJbRykq3rhT0XxhpJmAGWmcSuiPeXSiUoOaOMj48z0CN7pflxSsoQmmw+bkZ1FHzKZZth2fSwUiniOA4DXTnR+B0Y6Otn7dpNjIyKLLW/NYBAJ6qEjIyJbZ5rYlVLaFGbXtnLQiEgEWkiJ1tGDA0UsawI6YZIzYYsVIbQVIuqUyKdzcqxCgk9F90Q31IsGqE4bqOi4MjMqhGJktR0HNfGkc3pYzETx3OIJcW3sPnFyXXcYp+O6c0i+xNEavWNYQhDfZOJp2zApuVAC+Li+yj4MVIRk6ffeHLSfiMc9sMYv/ja+UyWL//ol4TlSbaQatR1gPwuL/7xb2slKpqhEzg+X/vxb7juki/Ke5K2qjKhD1QgRNHqMGLHHmRkvJ9t29bz5O0iY7X/+6Ose6Ber+94w5QrGr3do6Sj9XrpvpGdbH1qKsID4D2fFFD3fD6PXamQTmexbQnBt3eTTc6iJbuU1pSoow18Hb9ardWUNTUbDPUMk1YsNEMSZtkllEBDDXXe6BKlCIb6/4OaK8UPal2rN63fyHj/EFo0wrS5nYBg88HSGCmOYzPRJyGB6Rap5IRyyDY3U/BLvOvdB/Da68KwMAydMFQFQ+BE/psAVQ0IfIkjRicMPAKFWqGvSkDgu4RKWGssWnHzjI2PoEvYXjIdJ6anmNHRyonvlkw92Pz9hcdJlWfyzAOiiDBxoIXppzAd8eE3JTwUwwXJmgIQVmTHelXDlU31xCIVomiTWALDQOLVJ9ifdJQQ2mfEsT0JpUkmUQtVIrL/QWmJ2HfJgoWc8dOLAZh/0LE89c8ups8X6XBFqTKUr+L4AU5OrLbJ1oBc5xBXXXss37n04tq7OurkMxkNhFEXK4c8+JfH+cCpx7Bzh6zRWAbP/PMedI1aY7yZs9r5Y24N5pg47mMfPaN2vj8/Korxjzv5NCq2ythYMAG9x6VAnAp5yfDTl3M45eDl3PPIc0QkxGLruhe45Itf4bRzzqPxOPHMyzyb9bxBRBUn0swxDDeC71SYPl8YMq7vU6166FqEGjYCasWXABElghdxKBOiR4XSHN69kW9yKXuTz3zsy1M3LAA+K/68E1Hvdu8fb2cyQPChx+pOxeNPPoRysAncy+Zu8Wle9JXvcNYFJ7D61RfZ+cpUb2PNi7JuLJJkmHFiijWl5XEsYZEvdRMdrSvJgbe2s+xXXwfgfv7KPSu7OHjRcUDdubIrs+irbGf54cKp+t1fvo09VOR/viSe4e5Hv8XZJ3+fH333Af7+5PcA2Lx7D51LO6fc36OP3sT23p1s3CHue1tXnuNXdODF8nzzUz/f6xgCbF//fyY+UYwAK5KgWJKEL76LFQ159rHbWfmMCAjofhrVdIhG6/VLR3zw4xgxD78oFqqH7xPwvbsfuhT1BDFfPnz63u9tcLD+rh668w46kyb33v1X5r5HOExdz61m+QlLWPPEBq782LkALD7mqCnn+MCZJ3P/3x+dsu2a667isq+JsR7reUZsDKbW+s1oylCelmCQAba/KY537Po+hUIdmvr1L5zH6R8SvbCaGnT2bNuAPboHNS6gH/vNmMMYo6T3O4CNGwXT5PQD5rB783ZAGBoTS5ZhxjDiotBX1QX86ZOf+wJ/uvG3tevNX9TMxo11EpPXH9iIOSNJWjpOI+MusYyOF4Zoshj/oUcf5pM/XUEge+Eoms140aUto2E5gowmHFtAVktjGcOkJPQjVYVQ0clVhMGwYOZyiDgYuspgTuhYvS3F0PgQnudw2H4fAiBqaPj2Lrq3i3cYS0TYM7aNlhkLSKaFju1oSRIWPXr6hb5pX7gIM1+l1NdDMhABCkeLsOLIsxgvDjEwIOow7rnvdjpnJJizQNahbeoi2dhAqTJaM2jjmoo/Oswjd9/D0uUiCFQZqZDIpihKJtk5c2ezbfM6wtwo6YwwIuxCkbhhMP3wpXS0Ct01mimyfOw4ogmhIyJWE23T5tCQeo31zwsDZcGCKrPnLWLmjDn8429/A8DUVMyIgaMJw0VVIwSBOoWUwvdDFAVUtR7EmzCSJu+n67rooSV9qyAIBOxa02pQesMQ/bB8WQBuGBFAOG8TjKuhC/ghhmERkb3ebLtKJhUnn5cGNSq50RGUIKxBKsMJ58wwao6a42qoRrVGfhKGcbzQRonoqLIHkmZoKAS4vlOzOeJWgljcqjH1Br5GY2OGku2TywunOh6Po6gh0biBGav3tykV80Sl8+F4Hm1tbTiOR7kgjlMCBV3VGOofoCIXtkQqjWFYWLJBcUSL4Dpj5PNVIhExVo2NFoXxcUI5dnE9xvjOIVKdaayoeJZMLIrv++RyOaKRCeZhhUqxVLMRUokkuVwB3/Uw5X1OvKuJfQACL0ANNEIJPUNxsCIK1aqL708YxwZoKvEGA1eWKxx5Uidz588jj4DENloZBva4NHWYrN3whrxemkxzlKcefpO2iRqvIIriezi+GCe1YFCo6CRSOnZZQGI13SKRaGZ8ZJyGjNThPji2hyIJGDw7gq5bNDYojIwKvaEYMGuewuAunbjs41V0+ij784k2iTGPNusM9w1jxCIossF0JJ7DiqVpaEszXhAaMLAVikN5YoYcXxWam9sIPEA6eJlmm3I+SjSRoip7wxWscTxbJybfsWW4+FbA9PYWBgZF/bIf2mhegB7RalBazxVlClvW152qmYsXsvyYo7j/N6K+yXcKqLEGbMWsNcIeHRyntVlBMcR5FD+gb8BlcK1N9HSht6tajGokxWHvnskrL9bhbL/4xiV86fs/A+DX3/qqGJ8Q1KDuHriKW/MWlKAOFw7kRs9VMDSjRn4C8KOLP8+l199c1xEhgELo+URk4K3qVFi7ZjWxWB1K3Jw4Eniifu2CwZtr+mhJNxFrqNdfb/3nCDMPb0eVzJWaatG1so9+6YhHLAclVBmzS8Ri4nqplCCdcaplVNlc2YyYuHaewqDQI1Z7EzNb+xnIUaudnNEaJ2IZDPVHicTF3DO9qWUYb5f/COcqUJW6MoropDvbKQ+N4cqC3eZpzaQbGxgZyxHWCrMDYgmTUlEsXuN7hrBLLqedfQ79srFu/+A2EnEFp2oSiYhJ7zgOeqig65IK0tNQFcHUNxGv0zSlxp40kV1CCRno6qOhXbykZGsC2/bwKiYnHPsBALbt2M0LL68kGM8zrV1+jG4jcb1M+34yKpdSmD1tAate34o6KdMQhiEEChNITU2TzRYlS42qihpOVdUY7pvKwjdVJswiMZ47EYWBG555hg08U9/t6H9zin8jjz3wR3zphAa+TiqiUfHLnP0+ccJzgMs/dRa+b+AmJGVt2aFoOzTud/w7zveqJMd45PHv8sLz69FjMzngUFEfozRO55pzj2PDm4Lp8B9re+FgeG7tALsGhQL+xKdPZf3I2fztkL/REBNRqjO/fAwf5EMUZC1FYnojmhYS+vUiZj8M0FDQFZWqUydJ90ON1nZJWqKDrVewEjp2dai2zy0rN/Pu+RleeUU4lA9uynP3JcfWfn+mv5cOM82Chjgnv09kfx598M8A3PX3h/jomafX9r3i+kf47sWniOfb5DFSKsEKuOgr36vt0z+2h2XvWsBO1k4dvKh4lpg1DdhFYyLF0KSfG0yD/qFdtLfPnXLY0/dK9+4COGBahk8ffwr33icM9rPOOJmrLv/qlP0//+Fvc+ON9ftxh4SR950rP0tXWkb9Ag/bnUogcvLJIhr2X5/4iLjdeBM7tgxx9z13wKemPsqMGTIau0vM97lzOxkeEu84l6+zNba0y8aC+a0oRNBl0XtTcxbN1UllE8RkAa8dlkEtYhd6a8d7uRGWnPhhxvvq7I1PPPdXXtr6D664sE6df9VN5/Od82+aco8//85v6DxQOCgvrn2IiRbZXc+tru2TMeIsPGoJm58XtSGV4lRM9jB1dsof3iCc8dbpJX53yxV8/lPf5V37C52QG5kakTv38+/CV5ZxAlfw7avE4EVi8F4uB+Dya8Uc+hiiXujI48TcaGrwScbm0JRZzu8fEE7reWd9lDd2DPC1j3+pdv7PfuhM3IuieJI04dprfwKAio/nCiOiWJIU3r7GbbeLzN5/feKjJEc7+MQKUZd1I4Kw5KQDD0aV0fvR0Ty6ouJ4Id22GPc8YKV0jFA4iNmkglpQGFFyFCT72vSGFEZ/nqKSJqUInac4DpUwSVVSv/flxxnoH+LAg95FLC6Mqwf+dgv7L9MZGdxNVRpXg66GYvjEZohvYb6RwNyxheGRcYpLxPNN328Gm3cNMTQgMmCpxtnMndbKhoFedoyJb33JYYezdecg3UNv0tEs5kJprBuvModTz/4oANd86ypi+V6seMMEKR46Pq6usmPTJtSq0PFzjziMwb5Bjl8qsodmUzOxngRbu3eQaBBGaFM6iWrovLL1TTLbxfP1F7fhOz5aQYxdyS4yWPVYcehSNrwgdMS2PTZowxx+7Aoa/1tk2O/69R8gH6LKGhPRFTpA07Ra5mPC8J6cvRK1VeqUWqw6LfuE4R3WsjU1KnZNQ1WVWqbLcRx5Lb/GlBviEYYeZiSCH8p6sVkzaMzG2fyWcISjRoxyKcC1q7Vm4ygquq5TrlTR5TtuaGkmEi3VMgOlYg4rauC6VXSZlYpaMVEvpqlYkvhCNyP4oYMhCQPKRYfRkTFUU68hGcLQp+pUCKjUnjliWKLpr/RRylUbzwczEqGhSeipwngOVdNRQoV8UTyfH0IQKhSKYp5nG5qwolFijkPJFvN8vKBC6OFIYz3TnmIcCF2VqGzTUcr7ROMGc6bNYmhQOgOBh6breNLhHCvbWLE4SmjU2HvNiIFpRQgmZR4Dz4dQrTeh1Qy8aoDvRia4I3D8Cp0LZhJN+OzcLtbaTBu81bOdjjYxlru7ypQDBzOqkW0WbKFjIzbTZmZZtnQ2rz+zQ45xkWoI7R2dAFSKBVzHo1RwaMxINuJinmi0lagZoVAWAY+IZmLEY0RkYLqQt6lWXHQjAY4kfAh9Uo06qWabiqx5Uv1OFE+jIo1lzVKJZy3Gx/I0ZkVQ1vGijA6GpJuNWvPo0Etgl22iabEuWZZFX38PthPDV8S2RDKN45VIGo01R8JsK+KVoTQs9imVLGJGI4EbUJQ8AZ4X4DoBDQ1ZkK0QPNemf1sdXbPg4AOZsWAOZrSltm1g0CY6owKah2rVTXhFDVFlBkw1UnTMNOjtKbN4goStENLUmCWS6YRJtdIXfe8GNE3MlzPO+wr33fwLfEunNKl3jBpS++Yn/q8GYa0hchAIwhltL3XXk4+rNaCWjdOz6Vm899hz+e5Xz+Pyn/1anOtAiycnOVcpfTp2sJmqV2D9JEbkxUe2UNVVGtKiFjWZitBFH1ueFbp6+fGdJMxpOE6AmxPfR65SRrW7UIditczx3IWLiBkGrU1i3nVtGaHsOLiNKRqilnx+h9HRUdQwS1VWZIfev2+u8x/hXKlhSCwpJng5X8DJlSlXK4zIu/MNGC/kKY2WaZ4mCyLdkGLewZXKxzQ0ym6J11/fSEenUBBbtoo+FZoWoknaxGq1hO9p9V4fvuiOPbGAgMgSaZpGqPh4rswceR5m6FMeFMZBgIoVi7KrZw8PPyqyVPOWp+nevZ1ZTRbZ2SI6mauWUMYH6ckLw2a0P2Bn9zpi0VhtQVPQhMJWVALJalSplLEso+YEqppwuCYKAQGap0VkpEDFkK/SKcVYvHwe85YeKO7TKXHLjbfS2KQxa6lwgEb7FLLjVdqSQklr87LomQXs3r2TT31ZRHq7urp49tHHiVYTrFz1t9o1e/odknIxc5wiRksTimHx1oA061vh6x87n0w2xtJzPwbA0NAQMxJNdPdI6oSr6u/+mBMFtK1NreJsX8VbO5/m/juFkTu7bRlXvPkUV1wtjMAXVosxbJ9eIpkRH+zKJ15lVfMMVpx4GIPPCcMinxFKPTVNGChaJcAzVLnYSwPAFYaFqiioSJrXdIxQ1whkhF4JY1gk0d0x9HpCi95NXbym78cJ7xaEGS+suYVTPv99HvnigLUoAAAgAElEQVTdt8QztdUL/CecqvW7drBsxuwpjhVQc6wAfnrBGexNCobCyhvWvmO7LslqNu8WhnxTaxub2VD73dMgE29my5apx77wdF1xnX+GuOZZZ5wMwLayw7xJUaQJ+dznLufe5wRRid4pAgVXXf37vd7v22XGAuEsb+rpJtvWwUjvO1kcJ5yqAw45iDdWv86u/gHee+L7AHiIewDo6JhNQbKonXLy8bi+zRtrBSHDji2bOGrFCgZzA/TnhXGczigEtkoYq0dozXLInbfdQOBKeMHV8NyOFg59zxWAmHcLDzyIv7S8AZOQEu8+6UBefGwtO9cKR+34j5/K0O4+1q/rIRyrO0JPPzKVLbG/fypUc83munO1sUtE4caLWXp37oJPwfq3JGwuaJhy3Euv7cIPbDga3lgnFphogwVHww+v/yKDianXiSAmx1B/AXNWnL/d/yaahEb0Df6TlSuL8HH45a2fAqDYlqeo+mzeJAzAW/7+TT515g8474Mf4cz7RI+nUl4YZ4O5MSKZejb0xpv/yM2/F7S8N8ptJ570Lvplj7jp0zqpBoOM9ZXwE8Lw/R6/46k3xtAlQ1tkrIQ/7vCjG3426SnurP11xdXCuVfb59Lk+chacgwtwq49uzl4xZFMywro8Zwlh6CnQh77zRM8xt7l8t9+g1RGpbuvl8q4mC+vvrmZdf+sF1Nvo/sdx5tXpugb7yLilykO7gQgFU3T3DyXprQomr7sG9/g7vvuoqdrF/GkXMSiGpHAwgtdNm8VwaKy5nDSGWfXWPFyw6Mk4g2omk/MEuvcQP8ulEqVbEsTTzwpxkM1monFA9EbBshXi+RcF7/gMXuJGIOG0OHoE4/nphtuZ8mBIrP6ma9fygN//B179ggnrbEpTaXs4nneFFigQEmENSdCUJX7U5yrCSdsgoFhwvkKQ7/W43ECNjjx71whj2lEcBwHX7LZBYFDoPjE4wbtMnCiGxq7e3vINIh/j/aNoIQCTYLU1b7nU6k6xDMxko1irge49PWVWLBI9qKMj1EpaWhqneBB1+IomsnYeI5ss9AB5ZJLuVJEZ6JXVBzHLpGIaxhSyebzgnggYhg1avmqH2BoCbSIGJdsMkWpVGJweAArL6PafkAkEqGxsYmq7PuGGhKLWLiuGN9iaRTDMGlqa8SSTJm5QgVF8WuZiIorFiAvUOnbI3RQVI/iFFR2bu3mX0mkMYqKghGJ1ByucqGIYZmkGzKU5PUURcH1HHyJXHGoB7Qmlr7M7Cw7tvShawaxBuG0FMoBVdvl/p++E541IYedOZs9O/PstMewHRkY9kzcssbubpmd1Gwcu0AsmqahQdhq2bRBfmycaEypZevGB6p4blhjxVNVSMQtqraHK6EyVhJKxTKJRhtXZo79kkaIjuRNo1q0Gdsdw6lqNSIzI+rgKUU2byzQNl3opUp1GEVJoSPWdM/ro6EhTWPWYmhEjGdAiGk1kojPwI0K/a6oKZINGrojnQ8tilMJyOVtDMnyrAY2hhkjcEIKJTGn4skEMMohEiqutMUwG1IkQo1zrv4uAH++8goixNDDIZxArJttHSb9A5PRDvUWLGZVkKnNnTMdMz6PaNAE1MmRynq11nN1+vw5fOWH11FVXCaDDJ2qzwR19sT3bPsO+iREmO9BoEx1rjy/Osm5UoWtG1ZQJjLPmsX3L/4cF15zA7ZEaRnqVKelqXUmmdQ0XCVkVBXPdcAprxDNJhgbizMiEVEaMzn93Pfy0K2CxGTNkzuBnVPOdfgJR5JKNpEvuKBNQPANfnD5lbxdFr/bpOyINcxV4sRjM1i98m2tS96JoK7JPkKLfbJP9sk+2Sf7ZJ/sk32yT/bJPvlfkP+MzJWuYY+LSIlbrWLoJqbjUd4lskRqySHTnMUvlhnbJTx8L9DJ58fIZgShhV0p0NTYQK6YZ+4SER0plC22b9SIJcv4MoKgKBq6bmHI5nnlchnX80WmSvqaIlNeL9oFUDULlBBXNoXz8xpuEKdMhEceFxH9I/y5WOk8/cPgyahiKhsn8Ks0ZEUEas2LQwwPjBKJa4RID10JqFarUzIrhmHgBy5GjdY2mEJrCxCJ6OiaSuArDA2J+1q6aCnf+eUtbBuRjQbtYW7hVr7+qxvp2igi3kM923lj/Xp6i6JGwelyUJR+0jGH7337egBiegpPyTPYPbUx7UDXVgbSIkpe8Rq57Ipfc/UVX6G/V0ZKWuG8X32NnW9tIykjspn2GdjRCtNjUymbAZIp2Vx15Wt8/JLrmLdkFqM7RDZC9zQeeOR5Vq0TfZiWyBqxdWu2kzZEZH/6PJWXX36StLeIFpmp6tm4Fj5ADV5khi56mEZRJqW0FR3XK0uISz21bQQhqi8b8+kqQahSIYrjTmQ/RpmjbaNrfYk9W8T1LjjnVB585nUW/FgQWrz13BAHHTefH174SU47W5APVK7J8D8/+Sk/v2jvRCIAF/36d6TUDMXSBq67+Ora9jdu3bjX/bc/Xa9Najt4Pi+89uSU3wfe6CE5t53mFhEpG8qoMB6wN7nuSZHduqn7JfjM3u/vrPes+Jf3/u/kmivrqcq/vu23aCrD4sXzSSXE3NjSvxMAp1SpZawmpLd3B2ajiGZHM8uY09HJQ/d9vvZ7dloH5375G/z0j3cDMLSzm/LwWuKJOqVsp6oThjEC2cyyG9i+7gn+dPObtWDe5rWv83Z58bGp2b/1m7Zw+iFp1j07wtWXiAztlT++k85jl3DK3CZ+83tRg/SJE/fj99QbWr//wFn8WdY1hY74tnZs6qYlK76DXXtE5LNvz1Ta+29d/DO+fa2AnYW++P7yspeertgUx+pQ4Usv+zilqIQu6A5jYza9uyscdKAo4B3Y2c/cDtkItSL2yw2q2LZNRFKcD05CM/79tj8BcOKHz5bHVNnd/SKTZfd4Hcpy7S8vpOD3YTUI3TlSfRMzVEmmTCqJekR8bqNC0pQ9ntC58LvXivu/SDROji87mvZyP+df8HW+e6WAP37s7KMwqxlIyfocP0fZ6+WO228kkRTwl/bWBv54ragJO+erIv1YHO2HtMlDNwhI7Pe+8EM+eekHyCxop8EQlNHr/nkHh526BFtmbOZ3NDOn9Vhe2vgIDRkBy3krWMfM1lnoaBx+rEACPP2PlfR07WLZQlFfd9KJxzMyOsTNW2/Gk5BjRddwyy6uEtLYIvTYWNcOHrzrL1xyvYByDr/5KqueeZIPnnQCOySUdbw8wMHNLfhxlbNOPAmAO+65h9M+9Dma2kSEf+2Grbyw5nWmtSZRZCh1V/coz7+8FiXqMdAvajzmL96P875yFTf/VGQBewf7iUbFOE6sc5qmoak6flBP1auyx9XkGqyJZsAT6EFN01CUEFVTBBETEz1PFbQadbOgag/DsFYUH4lEcH2HGTPmouhiW7Hgs3S/Zbz2sqjZqZZdQlx0VSfwJvSXgmlFiMQjuDLSHSiQbU8zAfBPp5rJj/cyUgpq9UUOOQLFIpFITHBF4bo+hfE8MUtkK/xqhYipUylVGauI79DzQjKNacJwEvzRA8fxSMQT8jwuiXSCZMoikJnAseERKnaZuKETk8QbASqu5+HKbFSo6pTLZTRNI5EQ59INlfyIN8F+jS+fu1qtoErEi2Wp9G4VusVqFMeFClgxk5zsMeeMVvDTPgpRElFZgB+ENDU1USgV8RzZuFXXqY7Ua3qjzRkCH9BcqkOSPGbHCKnOJDu37aBjuqgvnLV4jGfu6af1QKG/QqoQqhx74lLuvu4ZALZv2I0Vi1EsKiTkGDi2Rzxl09stximacolaEaqOw/iYsGUSqTSmruN7dSimZoKmgymzhb6jUal6hHjoqnjHlpbEo8jQHgVd1mrqMQXf0km0iG+9scMl39cATiMV2UfRMpuwohqVikoxL3RzMmlhKhoEsp7T1KlUfcbyY8QTYl1NN2gMDDhs3txNtl02U85XqRQNTF9cL0gMMm1GI0v2W8Qb60R2vKlJwws1qraLKslVPNkLrSpJPSLEcGyf4vgYriqyqJ+48EL+WboR1dLRZKPt/t4iLW0mqmzPgGNgGC67dsHMxceJezAN8opGRJ9KLqZqcVQtLt+fi2UCoUGg1DNIyUwDqiR3UHRxzUxHB0o40dvPxwhUNKXuUnz/9/dCtq43CHXCUIGIwwRv2KWfEiUDP73sbfXqk+TBP4n1/ORPLUctivt8/ZEch50+g6aGGIEiEEQRrZHB4altcVa8bznNLTP5+833AzBj9gIiZpr2BhNDQhp/eOk3ueSSi7A3CX1jhzluemg1b73YzYEHizHXowaOl2PpwW28+drUNjn/Sv4jnKswCDBls65ADYmrEULLRFL4kxsbI1fIYeKTl86Gqacx3CrFsmSIUQKy2SgRxcNBOGXHvM9ly5shhm6iyAJa31eo2gHxhPg44wmLSqWCpmuoysQ+oq/G5CJeJ7TxMVAk3EbxQnRVRYtEkDW2bHy9j4P2z9KTG6DbEfCsWS3NxJqjbNkklMjqF18lkTWxy45oCoww6kVDRoVqdQKvrhIq1K6vqGKfyXlIyzLRVJVK2UH3xSJZCD0+ds75VCTTSyptwknwjbPP+zdvQBjp/zqxXxc/8GhNCCPmhVdepyVb5oFnn+LUowVb2D1PraToWaSmz6vVFnRt3kUQ81jQ/s7GqA/ceRsAPTuHuOfOvxBrydDZJNheFi1vIu9ZjO4SH8yFX/k8F/AJfnfhpxmXTe96ChXOOamRMmJhAZj9gThXciHvPf5UAB67/x8kG0U/F72WCjbxA1uOvXiBhm7iYqNojQCU/H50oxEjsHDder+oxad+ltT2Pk49QtRvfA0EecWEXAITrV4fvls0EX6YX8JF/35sf/Klz//b37OHiUBCkzIN1fcY6BULa/usGaTaMyj7OTgSsjKyc4iGeUkC22XPbqGojznsaJZ/aQFao4DNXPe9K7j11juwG9ro2bMGgI6GFs4494sM7N7JTfc9CMC3zjuXow/OkEl3AhCNaeR8qI7nUOVHaqSiuK7BfY90M2e26D3VZ/cws2kGpiWgC1073uDBP97OgYfNZu0r4p3GEwb5fJElC8UA3nbbbXSQ4t57niAi+9Bc8o3z8KoejqcyLBnzNm95ih07I0xfIKBYqWiUYHaRz559Og+0CCjmbT+7j8cG1zGtJcuRh4oFxWhXOOE9Z3LYSWLBOY2Duf3675CeNbs2zvHZrTTGYuzasIOGWeL8yTaDHsnKCKBHI/zhd2KRfHhNvVOYUlZZ011n7tw0MrUHlZmaBLaQ5CmGHuLI/kRjo+JbHBwVc/Lr3xN9tX50+V9qxACXf6teG3btdZ8ljCg0Ztpr2zLZFN+86DcAXPXd84hazejWALZkVmyOt5KRjWUv+oJwYK+4+lNkUiGt80R/Hi1ad4I+8pmPA5CXvbhWPvxXPnrr12u/f/nbH8VqrRf3enYRz6uSNMW5AnUEbXQ2o+VB9GhdfzXEwloB/4499R5aCWlY7FrfzYIlU+sFm/UOXGyKjhjHvj1b+PJnvsBzz66ie0B4hG9JQpT3febDjOXEu1k0ey6Pr3yW879wJgA3/fbvuH6W0S095GfWHYnGdITyoFh0N49meWX0LZojGl0bBTPgoYsOon84ZM7sxfQMiTFsmtVC+5wkAwWxFvX0xakGY/gRn4ghe9FVKriKTyJm4sq6mjBqMtS9g19fcQ0Ai49eRGJeG+udEUZ2CQNzTtsclFTAo48+wDe/9Gmx39KLqFRbWS8ZIX2GKFQr5DzYtEOsO4ZbpWfNECW/WnOcHr73LhYvO4Qb/ijghZ89+71UvLAGg4c6vC8IVBSlHoixLKvmoFSrVVzXZXJ5RR0KWK9dnswqCBCNWpQKRUBFlbB8PTQol0I0J8khhwgo+5pX3+TlZ1dhFyQ8W7UIvQDf9zB12cMy8HECB91U8KRzFaKTySYZGxFzt72pAyUw0RStdu8hAdFUHNsuUbHFnDONCDOmt1MqSkZhLcQPHIp5wRwIEIsmcO0AImDKpvNVAkxLZXRUXC+ViZPLjxONGWQk3LVUilDMFfE8l0pJnN8wowSAJuvOLDNGzIRKpVKrM03HE4R2yNy5IuiUy5fIAe1tGXq3Cftm+uxpDEjHwJdGbgi1XlUTkkwk2G/hopqjXxzPUciVqJSLaJKYwpPOS7xNYs01F8evYqgKsTZBTFPuHxeBHV+he4f4rna8KpyJbLMYk1w5xKtQc6wA5szp5M1Xd9KYacAwhPHvhJDLOSxeKta00fFRAj+OZgYgmUBV3aBsFwkVj2hM1sOVCsQTMTxJaGMY4IdlwiCCGRHO4/h4kYiZJBOFki1g1joaSqXE7i0SdpkLKJdHSCVamTZbOEBvrSsQT/qYUQ/DlM6bH8M0LdyybPgcVlACi1hMJ5QQtdERA80qkduTQ5P3OW8RjA+W6JONapfOPQC3Osza11/C8cSYj4zq+H6FMFBRAjk/pbPb171BjoHJaCLDNt2icaJ2Kpcj1AMULWB4Vz2oFigOrvSPY/EIJU/0mdo1INbaTW/uIdmgENgK1DnKWP3Agwz2CN2ZjmZRFNCsCEpEfv9fhsfvuhdPfvBVHzgPrj73LFZ84LO186gRQzi40tR85umX0LTJukWDUCfQdQxVPOeHXvoeqq7hqwGeDOyFrs2DN11XO2/nEYs49MhmdoxsoDJWX+deeehNTvrIewhUMT/NSIJn//Z07fczPnM2A5H1OFqdBOMvN/2Rj3z5S1imiWXW16JUrJGyN1HPCcv3m86ajbuxJKuwTgI/WWFg7P8e7Pcf4VyBguKIBVXDJ9EQF5SiMoujWyYRINTitQaIHi5BqBORqjwaeviVquj4XhQfXqLZYf4Sgx0bIdkozq/aaUI/hxeIj4CgIlJVoUEow0RmxMS27SlFvaZq4TgOiiomgFvRsCoGI4N7mD9bRIQjYYKh3ToL5r0LRbIRDg0NU8679PRINr1QoxoW8H21ZtSrCmiGIq4pFwrdCHFdydICxA0LxfVRjDpT0VjeQ60qFEsF9j9U0CxrbXMJdivMbJe0krMamH3Wf9H91h7OOV8YlKvX7yB0y6hy4jiqTmFogML2rZgyol9VVEbHNDSnwsyZQsGvfnklr61fQ5Nku7FLVU485mh+dtu9WLJLfW68zPjoCIP9vSgy+1dxXUbHy3RNOFfvq7/5l58XTYlnzJjF/PlxSiWb4ZygKn7+6QiVskf22PcAcPc/XoJTYTA+g/4+sQht6i5y41P/IHA9ihWxWHohcBMEhijk9rQyoZpE1SI1ylMzEqJIIpMJumQv8AkIIBBjFwYNRAOo4mBGJ7px5/j1HU9BmATB+M3rQ2XmJ3V8OV8efmkrth3hvJPn8b8pI68Ix2KEqY2BR6lnDaYtmFH7Wy9lGOqrsw49w9M8w9NTjj333I+/80KCg4OlUj3c8ZWt9K99iNGi+Pa0CvgBoCRrvn553EVRHILy66xZJ7I8mbYW3hzehiO/bVMyxbUfsIK1CIXvhiqhFfCn2wRb4Z8mWAs/Ur+dhZcuxNEjVMp9LFwmmgg2N00nN9hDIIuKGxKNSCIq3n+sfADJMfIa40xU0hxz9CksmDWfnb3inZ/4/c/x+F03kuuuR7xKOwZqrItj3ZJ2df+pRn7vyno2cdUTq2p/71i1nsmxs5UPTG2Efetv6zVZ974gshPpSohhilYN2QYRhevqF5HTlnS9C/x3L/szb5eIYqAqPl/6zE8BuPbHn+HSS35T/92MY5RD7v3T4/z0198E4Ivn/+Ad50k0aChukpGiMNxiXl3P3POHO96x/wXn/qj296z0fnzti1fU/n3Z127iOz/4JBdd+Mspx1x82UdplY1+AXqGA2bKerjeXjGHf331z2hqE/PlwacfIty5mR/uvJxvfFdkW3KFMaJmHF8R+nTZsg9CYh4FdSWvviEy3BPplIa2GWwcFO/v1bc2cfpRx7FnoJ5F/PN1f2DBUW0Mj9fTdPmKz4tPrpH/EkQlu4DlZ4ga2t7CAKpmc8aiTzIu6291L0Ei2giaWFO2bdiOXVJYfsAR7FgvMPqFCsQiIWHFZ0RG3VsdFas5yo7Nogayf8cbLDr+cGJKnHFP3OfmrteYObOTBdNV3pIkLIVhg8MPb6N7s8h4WU1RXM9k1/YeWlqFjjXKeTJGwO7eUWxJEf9fZ5+FZXSwfr0IEpz2X5/kDz+7iYZYAi+ccCx8QgSFeiij0LoRYuh1xl1VFf8FQYiu1+cJiAzWxPc+wSgYSMNRn1jv1BBV0omrWoSq4zFz1gLKeXH+bVs3Uy0qtQyR61bAMAiqMDIu9Hfge2Q7o0TdRvKacPobEwmqbhRPEXNpNDdMJGmhB4JVDiDWUiWmKsQiyVqWOAiKNKQTKFLvVys+ppklkXCpusKALuwcpu7+C2lbNAdDc2icJow7t+qJGsA1OyflqoXoHT4TVRhBVaVaLWEPieuNS/3dMK+Var9Yfwdl0Xx1mVhDF06bz24GqAyNcPhyUcNaKYt7i7U2oZmyhrwUYIVpzvy4qNn5+x13QzzGujUbyEr0DLaGH1EJDANfkoZNm9PMVooE0tlSPI+UmqLq5PGUekCosKuKkozQNlcEIPsosf+pLQQTgWmngKYYHHRqA5vXieOG2gsocZW8X6ZJFzZHygwZd1zGRsXz+V6KgjvE9GmdNE8Xc2pk0CXZHqeQy+PKktL+TTYLDmio1b1lMgaVcoxA9ZGJQGLRJKqq0tAR4g2J80diLkHZwZaEFoqXwvcCwmgFxRHvb1pnifx4hHQiSqCJCxYqNmayiuaJ+3Yp4fpV/DBbW3sa2xP4gYHmR8mPifP3bfPQtSTpNuHolvJVPDdgxcmHsWq1WI2SsSZ8TUMLAmwZbAikMz/cIwkf5sco+yMogYJpift8/fFVtLZEBXsjk+qsfLWGfPL8Uq1OfJ7M1j289jaa2loEeckkmdcRBVlrl0iOE9pgBzahUw+YrX7hPlobxQJbqVRJxePkSyVef/RPco8AFJ+yHWBKx/QZ/1bQ8gTeBF2gy8SfpiKexVccUdOnuBDIzGqkAJO4pBobBhjoqRKNW0ybLZ5vA3DOhYewYSRHRRdrZcKc3PQYeraPkqtYuMEwh37wRABe/dvjqEGCta+sxpFsf4se+yBXvHA5exNFcv/YSp7YQJxGo4Ht9O5137fLvpqrfbJP9sk+2Sf7ZJ/sk32yT/bJPvlfkP+MzFWIwPcCC/Zfxu7hIaLTmvGGRUTKHh0hGbEw4kmKMpJkJFMoioouIybFsUESkQiu6xKJSspTx2L+AQrbN1aJWMKPdCsV/KqJU5WwuaSF77kEgTdRZiWoWsOQ0QGbVKPESbseuhqpRUXLFZsWtZVFB+xPWTaYPOLdBzPcu511L6+l7MkISUSnIdPJ7l4BujNw8Cs6QejguuJcE2yFE1h2qNPdxmU0NPB8Qs3AUOoQlojiU9EMErEs8w4WdL5FL8aGtzZiGwKW0Lt1mGqhxNDYKF394j7f2uMR2FE6posHbkibGEYDRxx7FKcecygA44M237ziSvyCy9EfFVGw1azkkq9OpekG4DR4nNv+L1/2VDn4CNHQdsOm9bQlZ2AmQjISemmXKyzbfzZrV4lo/09+dCMMwM9+fgcl2WF2WiZB6Iwy0NtFR5OIrOzqFVHt3V0CNpMKTELPn9LXw/O8Wo8URfYyi1lRKp5NwAQrFlRdBXQdZdK450d3kYrV4V7HHfxxjnvfoZz+CZEi37h1gJRdhJPrz7n8tJN47/6HceoHzqGYEHOqrb2JjRv7+eRRIvPZ2HkKF158Bm+8tonnXxTRcz3l0zxtGmvv/zNL3i9qPMa2dHP0EUeQHBBzbPOadcSTHfxjyypMq95kb/Z+IUNAYq6AZ7VaBkoYZ/dbIvJihyVSkTa0TAczJG3u3X/9Jb/45TWsfvEVXn1d1mFd/xPOOGYhpqzP8DyPUAkIwzpls6qahIqCqlskUyIC1djWjmXFGB4eluMrjk811bMXSmGUrr46g969Tz7GCUcexmWX/oJf/VzUam3etBmrMcXsmcvwJRxsa67K0hPPYuzvIqvSOSfClsLUpn63/OozXPvLh8iPKOwZFNd45tlHeIZH6judBJHmZpyhIYyoCFO5lXc25B5c10XrnHb8qshpDe/Jv2OffyezTxCw2R1P1NnoKusEtKYCHHLiYnYxTKkoIsItkmq8NS4in3fecgFGQ5ryeJlzz60z6l14cT1LBWCQ5A9/uoLGrIA5RuYnOfVMAS288Ev1jNW5F/83qUw7N1xxGQCXfvlm9iZ/fvBuHnlMtHPoHejl8b88+I59vnbhFe/YdtU3/8QHLhC65P5fvQrAeG6Iql4f2/n7ZUknZe1kVYyn2ezhSzjK/vs1c/IRB/Gdm35SO+aWB/7JaR8+mtkLRQazd/sIz6++nt27evj/2HvvMDmv8v7789TpO9v7qnfLlmyruFvuDWwwtoGAKSYxMRBIfhAgBBISEgKJk2CKAQOmGWww7rZc5CLLtmSrWc3q0va+s7PTZ576++OcmdFKwuZK3j/e93p1rmuv3Z155pnznHKfu3zv7/23HxW4/TcPPcFvOcrLz/6BM+eKiOPR3h5Q5hKJTgc/dzVGSR4TfNn46C4+9s8i6XDvG1tZsfxi7v7X73G0T3iWz5hTS9gPkslkGcqK+UsXB8jYnexZJ/K5PnjTlRzotelcsoT3XS3YOL9z979h2w6+ZhIeF30INipMOQoR6RGmWGD32mdJzV1EzTzhbQ5g4w1M0ByYR1Syy206/DqhYC1aTOzrgOailiYYGthPdkzca0aNyfmr20kWfXb3CHn487ufwGxp4PRZ4nPnX3kei+YvYW/3AaKSAVYpWChBDVfR0MtMgL6Lomh43vSzSdOUCuRHURQ8z8Oy7AqUXdf1Sq4MgI+Qwa7rgKxNY7sWNXU6T629j0JBeOFDoQCGrmFJEesoQQz+0RgAACAASURBVHTHwVWinHORgA8H3VFaF7Sw/62jJA6JvZKOmjiRHF1NYs7TiVF0U6do6+SVHgBGXoell3YxOT6KX462eArDI1Po8ngI6CFsp4CVL1Iar8Kugi0BVBXyw6Kf+eI4mUyOdFLI3HAwwmTfMG0LO4nXin07lUkxsneM3FAWs1XCCZ00M2fNJnapmIcdLwr0RvLwKLWNEto2Ic43Xbr6O+qEzDxr8VJaZH7myJCIuNbEIhRLAvpU0m1KXpZ16x+v9Hvq4ABtHTOZnBKRz7pIA6FokEIxTTAs9IS29kUcoo+YJynOiwZ22MVRFLSyYiRbuMbFzVQZZevDtaz/w8Fp1wwBkSbJEjnu09nayGRyglRCROl0LUgp7+DIfmuagZXXCPgddLWJ9bNv96u0tjaieAVymeo82K6FL2uiTYyX0NUwRshDM8uU/3lUVeXAHodoROwjVYtiO0U0mWvv6x6uX0IJREh7srCwpqGbYUpuhgmpKwXVZjTdwimJOfetIIrikkvn8GUR2cSIhaqXsAs+umQCtPMmOcsiGJQIpuEktbV17N3zFvWN4rVQWCWbKuBhU1snoi6ZTJ7GhVEmDojvTxyqolRGEOukc2YHnpqp1iSTbWLMBaZHpQDu/LqQ8Xwd6s5byN6N0xltf/u7h1i1REByFTdC0XZAM9GPY8ILBMSYB8MRLMsiTY58aXo9RoCmOqFLuEoBX4ngSwbs0dHqs4Q7xNipqo6Vt9HVRhwZhcuMTT9/t69NcsaVJtmEQikn1s+lH1zIA/dv5WRt6fmzADh4ZAPZkRKqHqSuRqz9+vbZJFYcZGDXHiYT4iy3nCxrFi/i4jMF0ujA4CD7uifY2ddPICP3dlhHC+sk0sUTv/CPtHc0rhRFuRd4FzDm+/5S+Vo98DtgFoLr8Bbf95OK0LTuAq4F8sDHfN8/MTv8uOa4LlMS79zfP4hqBihmcxWaUNU0sBQP27EIyORRYZC5TGWEYHWKJdR6A0dxCUolsFDyKBQdNEOhILGsZlDHLVnYbpnK00DTPEqWW0nOVagqjbohjTLfx/URGD4gEArS29vPhz7yF5y3Zg0AK85bSYNmULAnKNoCf//G1i18+Yv/iivBsJpiEwp6YBmV79A0QcUu8O4yTO4Kkg3LkYmN0RiRqElqrIofJe/i2S43/fkXKQWEYqhjccv1s3ENmYxYAt0vkcllKcrCqStm1ZFMJmlvEkI6Eq5B72xm0+s7WPsPAo9/4coF/PO3v044rBKWOGL4NY/c/yCBmIT7YVMo2Oi+NE4RBCET4ylyWYtMRsxfMGwSrQkxcLQHgB/dcx//8I0v8M9fu5OWplkAvLphOw01BexSgbQvA6pujp4eheFeoRjf+v6r+D77Gex5tUwFQqJbp66tiVQqxehhAa0qZiVVbVjWQEOnRo6n61QTeH3fR0HDka8ZhkGmYOFK2I5uaiiqheVYtDYKgTFGP3NbGmif3VKZhtVtBbyJ7Xz3X0Qh2PPOuYJJfzoRyFf/8rO0nrGAvBNGQazhzTtTbH3iNZB1Zi+9ZhYvvLSL3t5eLn+3yDtramqhp2+CHUC8Vmx+rX0OBx2X+aYQ0jEfvIJUTqPCMG1fNp9BWcNkni7WRnO0Hj1QS6JBjF6RHME5IZyREdpnimte3vEKc5deRVaJsEXW1Trn2tmgBvEk+YGveCgIR0BZ4CqKRqHkYpphSlIhcAo22VKeQt6q/C/mpyqQg+EQUE2c/cFDb/DAg1spjk4H4fzZua3MmBumb1jsqz29vZymn4XVLPLHXnxxF6YilFbFENCogc+PEIrH2LfvCG/XrHGByz+ZUXVsGz16PNjn5O3LX70cs+wtMhTe2D/Ks9KouvmGFeRyGUq2Tna22H9v/OJ5jPIBNCX2TGebqN2RkYpNZ207r+1Jclp7kN8/8AVAQHCMeAt9B0O0dYjP5WZY6NZcNr8gwImNM+fzxo5uVi+v5pTd9sXPU9AHGBg/NK3fs5Yv5bL3CNKEzngj//Q3f4dl2WSLYm3lrCIXXH8Vrz7+LH/2WYHtHbWHGZssMiYJLUafFXtv0eVzOGKJv5deuZA9zx2gt2+MZXOWV75v3d5h7vgLcZ9OS+yJcy6+DlMVisam/SPsODhBV/tCdjFQ+VyiNExhh9ijBX0QVbepq6lFbxLKarBPjGvvtj5O/5DYt0tXdDKS2UNNpOp8AJjTOp9Ne6rG/bnXXkTpLKFw6sEw27YK+GdquzjG6v9sFVogTG//a/TvEesqMTZOIzkymoRLF/dTyo3BVJiuC6WxXNtAKRJgMlvg9AbRh729KUIhB0vWvVGNGHo0zEDfIYqHBHTvvAuWo7TpLF92EfM6RdmKF194hqef+W8ausTaT1g1xCMBrr3obD70Z58FYPeOp6nRszy691Hef7Mg9bhoZgcjB3byh81CudK1izEbFNjvEggJ5T1PGs3xCGouJYnFUjUDy7IquVNAha69mlelCmIg30WRdOn4qsxbFv8qvo+mqZRKLqYmjA/X0lFNG5QiYZkrpPkmjmVX5LKne+gm6G6Qc1euEfN06TAPP7KNG65aztjZYv9u2NjN4KSKUyMMukxahYBNbVMN0biAtqcZYmoigeeUCIeF7AoHajEVk4F+YaxYloVhKhUiB4DmmbNIWxlCIZW8rCaY7skQjEVQdUmpXhBns++WyOXF/g+GdOrmxkkeSTGnZRYAuXSGVYvOZNYsAQHeIZXmufPm0j8l1mJdSy3J0SnCshZXPiX0HBO1YqAETens8koVvUhTHdSARtQUsKssQq9atLCVwTGZ4+WbdDbXkgAmxsQzanKsx0bEM8SbA2TzRRQlihmablxpXpTcWBUatf4PB1n+LmHE1Mbr6d41Qu/uSXISktd1ToxgMEC4EKEgc5dczyYc08mXpH4TDBE0G0lZOdauFXJ2cjxEOBYmFImDWs4BHSObc6oJ1pqHh0Pf4ZNDtaxmMTet0TC+p+BKYio9rOCrYJiNRIJC3uzfvY+mRo9DmwaOuUNmGsQzUl+HqmkoPjiWNLg8n0TPyTPWWzrFHKfSo8yZcyHYBqWUkAmGb2DGIDtpkR6QBWx1FTXq0jC7DCoL4xUCYGYpZsU1JSNDJGySczxau2RqR7GIonsVPSwUiJJLF1h5wXtYtGwhAL//7W+gXWfpBWuwHNHf01esIdNzkLR8Stcq4SklirZFjTndPCjvR01RcTyPutpaklNTHN8GRkQqQmtrPb6noftCnsbiDk5AoTCWpCQxnIFACHyL5LEMSidpg/sg0T9CaIaQgS2rVnPjp64m6lr86sfTHYx7XuuZ9r/nFElQNoommP/19zN/xWnc/Q//WrkmvmAOvdKRougKliuuX79RQM0vOmsRZoNGrvinMBOI9qdErn4BfB+mhSa+DLzg+/63FEX5svz/S8A1wHz5sxr4ofz9tk0zdOK1YsGlx9PoKOSTWQxpyOjhAPhCOTYkW15AD5LNZojKwn+KYVAbiaPj4co8pZLt0d4VYf7yHAdEaRHqa1RKloYvjRjH9VE0FVV1KYeuRGE9iY+W+FTNcVBUF18y0mQmxMb6Jn8/7Vn+8vtPEPEdrFEhDN9c/wKjhycIyvoujh8k55UImib93WVlLs/JW9XrN3ueTjGdRtGqbtaRSdGHH/GNaZ+66o6/B1mR3nR1QoEAGEEUpJKLQeeCRjxbCOdsUcez8jQ3N7NkoVDgm+NNHBiNEWs2iftVLGtNVysFSwjkmBqmIRTCD/p4shioaSgsNQwMPVAtMKm6uNgYWo3s733889dEwuJXvyRZYr4Eb/L27XWZLzQxkiESE/fy/TQ9u47gpjMsWCo23lmrb+WrfJrXXxH5LhE9hiHx+5ZbzhtQK/0rRwt13aSuNkBe1kRRfAXVMDFVncRY1evywsMPoNRGhMsBOOeWC+nsamGyR4zL6auaiXYs5d5j+r7gsovZeyTNtt0J+vqE0fLSfQ/QWF8d2yNumLSTQWtsZut+0YdZVheH+8RanJyURRJNCzcT5KAhDKLoNXGaJnPAPhRXPFM9JoW86E9YEos898Z0jxXAJedcglOa4vzzhVK9c1+Ohx/5BaO7qrWwvvUXgrzguz+4S4658EBrmsfnPjM9knnJ09fw0rqnp712+W1CufPlXvJ8hTXXXAfAfm87kKbhLFGLYyD0AgsuqMWsSfCP3/woAP/0lV/yN//9IczsLrCEQjk4OkTY3c+BN4VHauasIAsawvwK+I+/FoZEc1ua8ezJDabf3yUKWt/yuRdO+v7/pn3rX57/o+89+Nix3rZqTbIrL5rJJvZw1Wkin2LX7vUAzAiKw7lLLfJ0z06CXatoC4kxaOicR19ujJ/+7rtsff4pAF58+XV+9MijXLRS7IX8+CArl88C4Pv3iZyttXt+ypDfw3AuzYLLxX4/+PxhbLWbHTtE9OVguJH3ffyDfOTnH4b3nfgcw0lxqA6N9xE0wsxErM2ymZJITeFNivlevjrOos9dDHMLrFsnGJn4P7Bu4y/oqhdreE6XNP4Mh6RkMY2YcW69+WZmbmnlKarzVKKGqaRQBpRAmrERj+bOWu596JsAeFmbi25ezoYHd/Dkbzb90bkAqO+op7FfeN3nXriAXHgAt188yxntK3lx8Ahd111B/1PrAHjst5u59vqz2NLTw4rTRTTylX29jExOEgvLp5+w6R9SUNzNdG8VMkvRINxqYviwuEMoojX+GDtHAxjSu61aFrY5hRprIB6QBEjbdrB9b5Rnn/gbElJhT+STKJF6ZhmCwXB3Yh94YVztNB55+HUAplpCBLNpDLuOWl/I+Z3jPvMv/XO+uEYIrjvv/ncSYxmaamJYJblPDB8UX+ZTSdSG4+H609kDj2UOLP8+lhgDqDgMK05L1cfHRVUVHFecF7qm4lk+mhbBd8WFJbeI5xYxA2JcDEMUdW1d0MVkXij/e7YViPo6F61eyJgr8zeMAE8+mSHjCKeBE9SJB2dTShdQ3Gq/YrpLfSCIa8moUHsnz62t1v0BKEllTJHiecIbwtAj5HMFampFdDA9lUT3EEQXgKGb1HfUMTJ4cifMl74h5KeLwu63ejj/mkvlO4Kh98jhqhOoXLVuu2TKnXGJMA4100D1ZF20tJizdD5HXUA4lK6+9ip6RwZoahLG8iOIYt8fePf7+MxXRc5lNFLHVC7JrPY2TNn3UF2YYEOIYkIomKmxsgNs6oRcs3QizZnnnwPAm2zivI+0kddFJCJTyBEKh7nhti4eu1fs9VCtT/fhfnACsi4nhCIBXFwam2RSi+7hjWRJDByhWJJEI14AO1kirFiMHa32wtcK6LrYC3XxGvZtFYbVzAUiYlksOCiKxUjfMNkxYYhGLpiPhoKsG4vq6QR0MI0SiiujfqlheuXchdvF+MUjwrl8ZKdQ/nOTSWqbWgThS3mEhoS86ljQVXGOl3Iw1j/A6ID4XGtXF6WcRcDwKZWEftocjqMSQYulOTwgnuFEPERW/sDFV4m8+sN791MoFUEDVzLHKqoNvo8pw68KJpqSIxCCnKyhVSxaoLlYTo70lNR5COAUqs7OkuOiaA6Kp2C7Jy/i5NoWKkwzrDo7OvBxMVWF7n5Z7Hxkkva2Dly5xjQVPCUCJFGlPLEKGpY1va6mEo4wq2MO3Yd2V15L9AvZaslcuN1bXkTxFLr7j3LaecIRqaKze+OJjtT581dhSOTa3t2vc/fXv3XCNY89trby97suXIapTieFMSMhClMZLjzvCnrknnqn9o7Gle/7GxRFmXXcyzcAa+TfvwTWI4yrG4Bf+ULivq4oSq2iKG2+77+9y9f30SQdtutB0SoRi8cq1diz+QyK56MbPpqsZJ3P50mmpqhtEEaZ4qokxkY5Y8kMgmExmfksJIZLdM6BvsOSZtU1UVWfovTKFQoeuq5XPHGiO17FM1J+raj4BIImmWMKtdXFTCzLIVInBN/YyAA/+sy7QTVRdXH/WI1KJBKhIIsda6pDJBjCOq66c+fMWvL5PJPj04uBlttUcgwjUMPEcHX7zV+xgAUrLuepH9097dpUuhdDhvdTXg5dD2LbNpomPXFKPVOpEq40wAJGHM8W9KZT0gOdyak0NrsM7vQJB+XB9AFIKTUEZJFZp1DEd6ewCqooOw9Yui4xnvmK8VKm8XV9YTA83vcSwYDGlZdedNJnfaf22rMnJvaDEPQA5ZLH118nqMN3bj/KZDZHMBisHP6O4xAwQ9jY2LKgnW3bhAImZplZyvUIhmoYG+7jYx/7CAA/5sd87YtXY40lKZf89Uoxwu2Lede7BMQx7MK3frYejrE7PvjxO0lPgO2MUXCEoAjH8xSVqtE2fGA/qhrEc0KoqlgH2za/TjgsvJLjKXGYxmoCeH6JiaT4bN9QiFZTHFKJhPCqvv+GGxmb7OMIPWzsqUI2zj/vSj5y+w0AfPJjn+ZI9wgFo44dvxQGUSI9ydSBDcxcdS29m4XAWbnmGrasf5rPfvpzAHzv7u9W9sVd3xeQrc99RlAhHm9YATgyWdeQe9z3FdY//dS0axLbxUEcXXEWU6XlxMxGmjuq7JKnLxAQwR9+Uxygd3xFCtEbxa8PfHABnvSs1Um672yyhdyUWKv33bMGgA/fvh6AN1uEIv653+e565ZL+NyPH+auT954Qt/XXHU5AL0Tedpnd/HaHwQt7Mxr1jD21mE+9Z5b6TkkCC0eevrFyuc2/Fzc65G16/nvByeJLBZwJiWZoBQG+2iCMz8hojbOvhF+N9kLX4fnnn4SAFtCTjoWCyM0EIKx7B465l5JQ1SMo60rzG3voLUmSiYr1siCRWdwyerdvPSKiKK865qLcTJTEIN1mwQdbYIEjuOxpGUGXkkcvO7qWaS9MUZGpLwr9vPKjm089MRaXnldEMy8dXA/4LHuwccYl5BUIxDDDJtESmUngfjelsYYB/uFwpBMWxzpO4RRG4V81dhtj3UyIEkF1IDof8Sow5NRqrrmGAcT3fx+7ZMVBqp5K5fg1wXxZfTALMUIxXz0hlrsKfEsBatA0Iqw+JK5HJawmlhYZenCdlolM9jvv/sCN3/ycvarQzTNFtGt9d/bwKobZjCvTkTA0ulJsi/3wLwqqQjAig6DQMccXtkrolq1eom3dm7no+eLOcv1PUPP0CgzZ8/CtIViFIooxGMew2MKvpSLf/auONprOTbLxP+a1hqUlEIxmUMNS3IlPUYu5fDh9/4N7TI5fu7iDl5+fT9FCYOuCRpk+kf4w8N34Uoo/YyzV9O2oIFox1zGTDFW+7a/wf3PPcWKxcKA/9od/4cf/tddrDvSR11MjH/QtiiZCkXfwZQyXfUUFFU7Rp6XSSnUaaRP4v1qGreq6uD7eJJ2vXytoF8Xe1XVC7iWim0F8P1yUdYihhnBtoQcLqYKxBpNwovmMFkQY3VZq09brJPBkQJxqQjH4kVSpcM0mGJczGKa2Y0hhscszEi1n6vPWI5fcihJRt0HHhPOkJlnCdkSC4QY6x9nbGAUXx63Zp2Bpouke0+W1dWjGrbmETTFvKSmpshJwoRwvVgLtm2zZP5Cdm7fw8evF3t5wZxFWE6Y335HEJKUPRLRujiuJ/QLTTfJJqpnvSqJQ9KpLNGQdCxKeaqpKgV5NmzfuBtH0RkfqsKXbrjpszzivMhpFwkHRinpMNmTw7Fz6FLHCqpNXH3FlUS+KqP4vkV9Q4Sje46w8QnxWna8RFPHDMb1MXq7q2VARvtiNLVJiGU+Rayjkf6Jqn7z5jPDhDqCxKNK5VzTFBXFCTA5JBE9mkYwrmM4NcxuPl3MeykNfpRZC3UsRXzfYfLMXVDH2JCEZ09UIwm+JteUqYI7nU7ADCjk8hlcyUhXKPjomo7iFMEJy76La+s7W7Ec8fliTsW1Pc68RMBL3nzpFVAsgsEaUqnqmmqfNwPdMEhLh3cg6NHcOYMxaVypqkooGiMQcInXSdIJx0H1HYpe1fBfsqqByZxLSRq3hgmBeJD+vUmWLRcw6307d6OZGr4r2AABdE2rQHMBPCysko/nmaiSlToQClIqehimjm6IvnuFPIVUobLuAnoNpaJg3nOc6RC4oFyDjuvjONP1VNe2URUXJTgdmu8pJQzpJCkVQVPEIKuS5EaNatS0tjN8DElXOBRnODFKIC6cR6WUcKCetmA+AUncUnQL7D0kKdFHhTPMCCqcPn8huw8dmNaHQ4eqTuVlC2ez88B0uvbj25Ov7DzhtedfeZPLli3m1z/90wwr+J8TWrQcYzCNAGWMVAeCXKncBuRrJzRFUW5XFGWroihbffdEnOipdqqdaqfaqXaqnWqn2ql2qp1qp9r/l9r/mtDC931fURT/na884XP3APcAaMGAn0oLb3MwEiYSi2LnCpWEU1tVMfDIa34Fzuc6DoGAQU5CuAwjQCwWxVVdypX3DEOh/6jL2as1Tl8tLPMtz1sYAfAUWcyuZKEqGih+hdBCeNjKifrid9QM4VjTH9PGIxiLM5GaXlSsuSOM71ThEnZRwZD1OVTDwXJKHJPnS+fM2sq1tQ1yShSPqQmP+k5JXxtwcY/zFgQJnRC1AohjkSsID40WyFPIB6kxa3Bl+Nc3JrCKDmZE9KlUnMBQHZy8Sd4RXrhg7SjP/GqA5Fgf4bD0RnwbfvKL+1hztvAsLelqxtI1dNekvARUFFzfRtGUSnQjEAriWC6qvMZ3C1gFj2eeewLLFvOHlmFqyqK7Z4iphPD2HjncQ1fXTG64XkDWAjo4hSlUZZCwIbwVuqYykZ7CcVVUSXYQVgukUy7zviAiVx+9/V8YGE0Qi8axZBKo7Vm4vocRMKtRRM/FcS3MgIhyKq5PoZCjLl7Ly+urRVOvveb2aeP9ja989oQ54Djejz2/+/oJlxyPVg77GmqpgI5VoS+1jDyuJEeJy0KGVg+MFScIywhbJBLn8IQkaFGE193A5eprV/LAMWV7/+0H95B/v8EtNwgP6if5NFtfXsuMFTdilAsnSwz8QPfRyuciNVHuf/CXfPBmAdMrFEooin9CwjrAhZdfghYSa2j9EyLypUnniSJzwJ78wfe58joRtXlt/TpyuaqHrHfrdm76xvvY9uowcb3+hDErR6y+8x9f4a//9puV132jgeyk8HB94h+Fl/PeH7TREA4xCfipyWn3+eULYh3MNlS4Be765I18+aZ/EO9NxRjX4jjP3s76Z4VX+5xPfItoXEWkmkIxGsPSmll85kUsOV3kxz1ENXL1b/eL/L+6lrOA56mVybqDI1VP65s/E+QQF1y0moHJYR567UWeWS5eO2vRIp7kZabywms8b+4qgkaQUE0ThswDxSvxg7t+jdUSo/uorMEVCZDKaqx78UEAju7czKafPwcb4a0e4cHzrRINHbXs2HWEsCq8/KGgRlOwjgkZ+Ui8JX63tHVgWTInCAtH1tMxVSE7VtXO5JH9m6ltmI5FLykmLTNENCTuGnSGZrFntIdLVoh6Y4cYpaNzLq4h5mXLYfH5WbM6+c5TYl6f3fosP3/8x+hUvaFjI4M4JKlrnSXuc7CHxliAiV2HSMmchIYGg22PCUhwtEOsxYZ4lPHMJJN7qxAUJxzGmRqmeEy9k82P9bHgEzKnxoryF7cEWXegt0K++9H3r6R5kc3hnn0MDoixa55lMKtWZ0LuvdY5BvHWPGopTVoSdaheiNZAjCF/AjUja5qFIlx7AYzLPu3vtYjWqtTVB0imZY5NjUbAyDOeOcSQLPJaO9KM6il0zRHfVziQx1GTKKkCSPr0xJEeEr3DxJd0cetNgtCkkHTxS7vYsvlRAJpaIjz10no+fttH+Y0sFB2vieN7JXSt6gXXNANV8as1FxWRkyzku6TurhS396ZFs3zfP6bOTRkJouI65XvJc0OzcWWkDD9OKpmplE+54Op5JAp5+keKLJorojbXvPsGdhx5A6cQZnabWB9PvdpPfdMigp4Yp85YOys7OtnlpFEiIg9vF32UEha+oTGUmQ7HL+cEX3LFlcxdtJTHqCIkAp6Cqeg4vkJJIiRNI4LtKpRkNAS5N05btpBedUA+HwT0KpESQEO9TUgpoErK+jKIuCFsMJmTsHXXRwuHcPPi3tlyXz0PX45TRuabF4ZShOvleKYT6IqH52Uq35dIrEOfH6RFll4Z2jFJU7yOsK7SskDoHq2zDe7+1mOVz1z5mdUcPLifybEi77lNMDPdx2OMD4pIzCRVmR3w6ihJAohCMQCRNPufn54DVRgsEp0RRpdU7I7l4noWqaHpsO1Yrcvw3t5pr7W992zyiar8LuZ1PFeW26mJMM4oLXPbCdcLmZSYyAM2s89oo3uXiAEEog6UVML14nl1I0x74wxKdh5FnjNtc+YxzGFc1ycSEXOmqUU8O0xjtJpjPTWWpG5RI5ZbHeNCwUbxncq45/MGKpHK+0O9vVzwkVUkJ3KokrCrlHcIhWMoavUMtYoGmZzCxRcLyOjiBQv5z28KIqLvfltGOr8NsxbUkc2W0KSu67oKju0zlSwHKsR8PM39vP9zosi9WZPjyObpUZsHJTR9+QIR1dyyfQt/rB2VxFNz2loIIkm5JHJsODEmr0qw7CKB5Nq5YYiRoWpEqq4hhpUXsiESledIOE7X/DaGgVCTWBua4uKrUPLF2uhcMI+Bg4chaJMpSfIxNQ6MsPr0RRRk+aKAEcFzqsQnS+bOZO+RXhbOm1OJ8uJV5f+5EtbtejYBA17Zsavy3sL2Zg4MjVX+v/jspUSCQc4+ezXbtlVLr7xd+58aV6NluJ+iKG1AuReDQNcx13XK1962eZ5XUbz8ko3tW0x1T99gLjDjsnkY0r5ID0xQtC1q4+KAGz/aSy+DHPupj39dY8lZKormMneJGNSnf1U2UMSENLUFGOwtcGxrbjdxZe5K2UCwfAvDqOIwY/UaBDQKXolAUAyj1+BRSngUpiwMQwh8Dwc9YOOWk4FtWWhNrRpKqqrS1z1d+Su3SVkAjNRLFwAAIABJREFUtr0zhqpPZ2bZvXUn7/vwR3jovulMfc/+5vdcfZswAFw/R7QmRq6QIxIVkBgTj5xdwpf4Vc0L4bkqeS9PncwB2rfrIN5UgquuXsTIkNioWwClOM6G54UCmDujjZrGZiJmiFBAPG84FEfTogRidVhyTgv5DIbuoZTrGCh5dKMOT7HRFKGAKTTR3uQzs305ji+ZkowYze1RhoYltryQhkA9tdF55GQlc9MNY9bkyGcmCEpZ5pkWsYYAB/rFmOatfKX+Q6XYou+TzWaJx+OYpoQ5Og6up6Ephvw/i6KZBA2dsUmh/N1z95eJaJBNjPHJr4isqvd85us89/gLNHdK0ovkGCvmNHP28jr++19FsuXVt74Xzcnz0svbmLPoLDEujo6laASk0WwYPq4WoKiMoEulVynUYehSYKel06A0xoWzm9i2TwjKaGeclroGtiPqfgC88NKLbNpTCzfDv35TYP0HSmNMJNv593tFfsGCn9zMwf0PEnCGSYwJJTAScLjs3dfwwngV3rf+8QdZz4OV/zVNwXFcbNvmi5//0rS157ouqjkdQ60GJIytWD1E8pLVyDRNchT5wlcFScOd/3InjjVFNFAiFqnl+HbH3wlmuj0u3PPwY9x+o4A4Ws4czNo6oIq5vu3Tr8Cnxd+3/u2uafeJ5YXy07WwqmC9XhSHh5kexSnUEDv3m2Q2fUW89+IoNYYFkrhu9MEnUJuu5NCeHOdetoDj24sTIqerOXYa8DyD+4Rkuuqvvk1ofA+PPvDryrWvbniDW6+9jttv+TuC7WKdHu3dy8+872LNF4fJrx+9n4ODh9n25os0Nso8rMbZNDY3kHozQ3OHZIRsauBJX6FdMtAtWtRC81JZf+kpMTazzulg5OAos9ui9HSLfIPBbUlq2mppmFcmfBB7bu+BPahaGTMfIJUR11tS6fvD7l14PtTK/L/yUTB6oJcmSSIwu24xWbcPz5rEY4687mXCcy1SslbUyy+/yeprz+aNtdvo00Re2rzTa4iHJ3j4xzu56VNCQdjXtZ18oIQv4XaGmqFn6zBdZ3aiS5mazVcVv3BcjOehLeOcc+OZvP5wNbPzza37UbIJWue0Vl4775pl7DPFkVVT187BYCM5u4gAaEDaShBzMoRSSRqkUqbE5rBl73bGZEHthn6HXEph0eJWJiV8Mh5sxbMsrCzMWi4dhFaefF7njr8SfXztlSEOHA3S70WJzBKycuToCJGwianoBGvE92ULDmmrxNMvij4FwwFOu3YV7152PoYtXDYTmQTZiR7WPvcm//g94YA779yzaJ57JoFm8bnnXtrNg5cf4n0f+Dj3/UasR88sYDo6paJCOctKMVR8z0PVyg7HKpnFiSQXLpXid1L+B4NCHvQcLruTqpCxeKNONBohlUpTKggDwXF88DwyEvL0LMJxMO/GGRQmBVToG//2KhlUfvKf/1G5F/8EF277AK88WTWKNrCNL3zsL8l5VTWkvamNZ7e9wpHJKpHJyksuYKJlSL7fjhGfXjOnqb2J7kPdxOMxkPI6ny6hhwL4elkvEHvCxUCVBW2jwRiWPd0pu2nrEa679Br8yjiIfVnSDDK2uEedoeN5VR2hrlFApGPxOhSZGtHW1sGcuQWOHhlhxRniTAkHagkaOvc/+QwAN119CTMvifPMK9uYkEWvT5s3gxdf28aCz67CcsQ+33D4mWl9TIyM0RwLMnTQ5b5vV42uhRd20btzlGK62re9G95AlbqrGm/DmRzGqDWxp2S9s7iBk7IZ75ukqU2oh6rm43ke7TL/cGhQrMnMVJr2VrHOh0bE+Z2dzBM2ygCoI/QdsamrFfJmUs5hvK6WVEZc73o68do42VSV/OvVx7Zy9QdvZtUaUfzQMOMYeIxMHaowVy5e1cEwh6mJNlG0xGcd18B1i4QDVUMJoFBUiTeJ1zJkSCaTBFvqMGWNp1waAuHpsLqa+jCHD/WSywtdorWlGd1XyBeq+WQBq4sv/8OXuGiNmM+L58ypvHfZu64H4IUnHwdfxdQDFOVndUNnasyhsVkahQENw6xl4MgIv7vrt9P6cd6HFxD2RQrD878RzsIdB4UuseqM5WyWdSovkHnQr26fDpM7OjzKzFZZIF6e47NntNPdJ/bPzg0nJxdJJjI0z51FkSTxVlnfsMdGOyTOgMK4kJUFmXl2+mUCprv7BeEsfWtXzwn3nMj6HOk+OWHV3iPizD1w+OhJ39+0e9tJXwemGVYAxVKKgB4jkTqRHfGPtf+pcfU48FHgW/L3Y8e8/hlFUR5AEFmk3jHfCsEi5EtKx1yugJUUC3vF1aLw10Q+Q8+G17ELRfISV5/P5dA0jfGjYgDbliwipJmcc26M394jvJY//7rLHf9mcmS3W8mBOuvCANtfqQ7Q+HCJ1s4gIwPVjVD1ykHFM+d7aMdSX3oKgYBGwSqiOpI8wqmjRIJMJk+8QXhRRCFFF00r09MCil1lVAL6uiepb5LJwzLRb2piekTA8Wxy9on5WMcbVuX24uNPyT6VsHwDOwhIb0/ACKEEdAwpERtjMeK1DfgRg+GkWMjDuw+x5pJzaF+6jK7TxHVb2EzP7t10dAqM+8ioRSbdQ1AL4ZcL7wJoNbR2LKWxTWwO31OwVfCVsiDQcL00qAqqIg8mN49SDGNoNuVlqeqTDPRNosmq7mYgiucXGM6mQSsXqkyiO5BKF3Fkcrxd0mjQ43iSNnsqm0HTNAqFUqUwpaIolBx7GuNdoVCQ+QCSxETT8dFxHL+isA2PDrN84SwC8ZmVsX70+1+H7wvazHLbIH/K7ZlfP1L5ew/PnXTO3qmN7H628vex2Nspqvj3eJ2Y44LfQLAkniMljdWNL7/IWxv7aGoQB3W0xeCMc64m5xRBJq+ffe5iHv3tH7ju+g/z1OP3AXDZ5e/m1ts/xMduEcV5FdVD1yU72HFNVXU0uWeuve2TrL33xxUiCxyxpi+77Q5eeGo6w8+d/1KtyO6kxskWinSPnyg0rYI4hPVgkO/d+SOavyyiRpmOATxVeATfe4PwUgd9lU37gvQcOsLtHxR09/fcLxS1q1aKa7bkqvP42i6xPs85p5NlM2fjZApU0ly7/5tzb/4Czo3CWnvh4R/gjT/Ht//IXCopqeTuEUVkjcUid2t/WzvzF7YBv6b23FkATG3qYcLNo3OYVskSuHdokL7SGjY/JfIIDw2uZbCQ5jcP/4aCKryBpzfMo7a9g1Syn5KMVIcjcc5ZuYx7fiDGPGMVufG9d/DS1VvY+ow4THpeF4rmsfxMDWfUU6BAKB3n2Hb7h05SZBrYs6GabDzjrAbeeGI682D6iMXc98rcFy/CoVEHRWvk4GCV7GPt/TvIeUIhalJjfOCaJbzBNv7rLx/m+DbYLXOsgq2kRg8QmiEM7+Q+MWfR2iBF6dUseCWQZaDH9la9y8caVgsuaSA51kNdWweeTDA/8/LFbHz+WEWiSjgSnCOUwsjqEFbUxm+aRUqyyaV79tKdzaAdEuuvdUYASx1nqjQDIyiUR9uZJKi2MiMQpEE642xjJuP9R0WOCLBqVT3XrdF46lWbl3aIw709GiFZVMAslXV6VN+mKaxz4KDwMisKhGNtTLUtZOUKQTSwalYAPzPIh264hC3bxRoadVO0zj6DRFIoRm9kd/DxT3+G81fOJdYojBYjX8BVQTd1DBlVcmx3Wk7ysdHqY6NZruuiKH6VwEJR0HWDwd4q615DiwG+QWJM5l6gUyqVUBWTgCwo7SSz/OrBn3P//eIMUyJR1v76Fxx++BWWf0qcKUNDRQ50d9O6JMTI3qpz9JUnH+DaK0Vkfu1z4vPjyUk0r6ocD+eHMUI2M9qFAXWAAgtPW8Ltn7gDgMHeHoaGhrjjP7/LDz8vUAlHpCMrSXU9RRuCOLqB601Pa9i/c8+0/yeZjmwBSBdGeWXTdCLlkb6qqpQ87vqU3N/ZYh5P2mSqAa0NbRxlhGfXn5yWOh3ReHW4B9/U6ZojnDIv/kHIgfqOeWxcJwhXcvnpOeDb/nBiXsq8KzroWhLDIks303WR8hB4knXOnrJoahWOGq/OQJllMtEzSrhe5tHlPQKhhhNo3mctPg0vLm42a8kMevbu4OBgkYb26hh3tLVjSuM1XtPKm2yiVPRx5b2iNbVkMylcNzDt3nqwmdSUMGzcYpK6GpepUoqSNMImErKUy6E9NM0U+10xhTE9ONY/7V6a6aMHjtHR8kWCi4OkpAM0EvXwj8u6sbwEruvT2SkIiRRXI2NlcKk6I5tmNPLq1j6+dttNldf+6cf38o+fvI3BoaqDsOdQgs6ZDRiSqbLMbqca4n/bMXC1PFd+9GM898tfVMf3imYmFJjRNp3G/ezFQqcrupNcfdUynnl2J8XU9LmZ2y5k2ZGhEUoS7VM2+suFw9+pzVyuMwZkdGFA1XSpHN0icsLrZ4i93dge5eDrO9n9gjj/F6xs5+CWkxtsR7oPnPT1P6nVimdWsPGn3p4p+I09/UA/nYtO+5Nv/6dQsd+PIK9oVBRlAPhHhFH1e0VRPgH0ArfIy9ciaNgPIyjwPv6ndEJRFDTp3cpLppGmpYuYqBWLvgw6HN64i/q5Inzpuxbx5kbyUgzlPIe2xjYUkvyFyH3nJ/8kGMoaWhWKebHQQ/HpQiRSA0pco6ktwLisX+H7Ppok+bes8vUK+jHE/7oRwLF9XFuvhHUdvzpBFSiEBo7rVWCAqqoKEaAeR2+qaKCWmByZblSVm225tNS1cJTRk75/fLMmhAJ1siVfNiPLR9Ifq9jzHI8j7OVqW3LaXBbMF96Ud737WiZGxzBUH08angXbwnVdEpPjRILiGQ09SsmyUU2Z9FoCUzNwvCLokt4bE1VT8JwSmhwbz9aJREHTxMFo2TYKMXS1hCLrXPi2jqe5BIONmGWHqacylvfYsb8HgEyihGJaaFoY1y0buQqK5+L7LqqEOOmqhuKAros5V3UTfJOSZ1dYKbu7xzFUhXwmzZdeFTC5ji6Do0eL/OwXwseQ8TRmd4W46dbbeGGzEBxOOkfQr0Vz36KpQUJSHYWgDr4kGnHsWjKZLNF4kE2vC0GaShp4KCxZPpOBEeFIaOw8kxVLz2fvtlcBWLo4Qk1Ex/NNDpYkq1HGZUIeyP/+94IlqkxsOXiMMXZ8e1RyNpYNK4BobQjdrTokXNfG9xXwjBM+r/getqRZrTc0bvn05/EDYo/aktjihXt/yOXXC5Gxc+sGxoemKx8lHRzSqOlWjm+GKvrxzMPPoqHTqIsDfPueXSw/W9B8P/KYkCEdMxsIhoTCe//66cJzYUSM+d3f/y78PbRc8GmineIAv+GWVbiFFDqRinF159pHMD9Sg+6Lw/m6Z99FfsrlvNkBHv2xiOp992f3ADBr+S2kg5LEIxylAERdoawkNn+RiW5B5KJGy8/Xw9PPvsTNd1zNmwNCqW6uqWPHzsfY+7IwPuZfdh5K8nUCMY1aRXhxP/Wpm9i09wkyjk9zhzDKHB8627toaBB75nDvJP0jA/hFmHGmiCTlixnMSIjEiEVEOhY01SESrmdSQp9nLJlLYzDE33/ta6zb+DIAhw4MEAgY1DQ2sXGzcBYYoSh+jcvcq4S3uVTM0l5bx44393G0VkQH93EEP11A8XVSgXJZB1BtlYCMViw4+wzGSo3c8vGrWbRIGAhvbN3C3kN7GZ7oYbBRrOv21jZKJZftOwXsMj43Trw2TLQzTCorWT5LBtrsZjzXISDZXSd6Rmmc34IREeOS9gucef4ZPHfvyZVSgPjsGHXhJjLZSSKtYlySepz7dgdIJhxqGsW9xo8cZUFjC/tePSqfV7QhqkyFq66aTf/RMZoCNbzcL8bqdz99+YTv/Nu/XMA550d5eKuY9+RomrqGGIYexJMS23cU/KJJXIbqHc3GslP86Ic/5KeuiFLrZiO2kueTt3+A81eJRPiLW2dTckO0tYkz9HZzJUNHE6Ry/XzpVbFnR/MmuuFgKD6OLeZG010UVHzKMEGtwrBahgC6ThkOGMD1ZQmOYB2jg1W4aENzGF3XKRarxlZyokjTzBiZiQw33ChYDB/jITZsHScp0QnxdJSPf+qL/PzufycozwK3sYSXnOTSFWv47TF162763HsZsYQifNF5Z7Nh4zZSURM3WYUnRbIW7TX1ZIPCkXCAUQoTaV58SsxH3klhFR1SB/vh89W5+dy37+RX99xJbY1cU4FJiqpCUJIDhJtipMbFGmw6UzhzCmMpoqpCZ0cTW1/fUbmXS4ALVqwA4NWtYg3WtdSTmZJpDopGoVh1+GYK4u8iHoqkyPbwMLwAq85aiaeIucnlU+iAK+dlLD6MkdOZHEwzZYiz1pylUhOPs6FhI5OScbBYsJh5UQfNDcJo6VrQQKaQZ913dzH7QqFAx4MBJg5MMqs5wtzbhDH+/L0HuPaTHaCKsXxjXYmAZzJ0dB/jI8fzDIItWTBbu4LkR3SSw9OV/FkLDWxbyIi+8RHql9bQ0OhQE6lqKb4eplNGt2wJA0uWkjRKFsd8agrHLhI8Do6ZLwwTCAm5PJEeYGxsilDEJxARfYrXmkTePZu3nuhmvHe6MbX1uDOzpsFGkYbiyg/PYst9PXTve3uihLFkgrHhCZpbZR1Py6ZolYgaVcKcxOQYHKg6eFbNXMWGGY/CJyEpo/wzu2L09mdEnS5pwJm6QUOLx9hoWV8Vc/0cv6jc67RrWkmPZBh2LXKJ6eyYdXPFWTQ+VSDdNcXcKxvYemQ62uPIMed0ICrOjQMHTh4VOrY1LxF7dmxvji0PiXEMSvbOcJNbKbIx2SeMqeMxXH/MsPrftqDUaaKxeiZ4e+Oq3BQ1+s4XyfansAV+8I+8ddlJrvWpAHFOtVPtVDvVTrVT7VQ71U61U+1UO9X+/9P+14QW/080RddwjomgRlpa8WscSmnhlVBVFaWpHn98kmJOeMGDRoBSycGQocr00GG2c5jjKxbH4yax+gLjA8I7cdoyhdeO/W5FoVAoEI0GaemsFEHAtWUYRNKVq7oPxyXqep6H79oihwpwjgtxl6/Tdb0SySp/TkGltVPi6gfyjI8VTvjstPtYGk2tAd7ZTyDaez8qcq6MgIluRFB0A2QIOWIIqFs0JKzwUjHLjp1bKOZTnLtaeI3j8Toee3Qty5afRf+Q8BwsXrKUZVevJCMTsp9ef4T6+noIKKjSo1gTa0TVbdrmLakQcBRtUEKxSrJ1TW0I1ylSY6h40vPilhBUnkqJkieu0zSVQMCgJDOII0Efz/bRTRvXLZNQxDBMSKXHMXTRB58SHe1dtMwU4fdAncJd3/oBnufhyCJ7hibmzHXdCrTFNINYTglXYuR1zyUaCaMoAfrHRPi5qf1WrrvxJnr7BvFlVoJTTLDgjHoue7eAo0w5UzSHw/z6gVdZuVjAC1Ys7SKXnWJe+xrwxVznixaGAhK9h6/n8WwdwzB473XCd7F310EMQ+PM5UvJZcX3bdq2jZrQBMvedz4ArZ21ZFMFgkaAc1aeAUChMEU6keTv2VtZE5/78/dTKJVIJoWXxvV8Hl67nmuuOhtLrvcXXhRe1HMvWE5Rek5rlvjYx3hRBTGJXolEHduKXoEYwovpmi5OcYpcv/BNaZHmynVOSawpXTG5+ppreebpap2JA1u7SaZz/HzdLwGYPbeL7iPCk7hrp5iHdMpmcrSPcy8SpCUeXWRT0z2liYk0HR3Cm5kZnu79+s53RJLw2W0qbwCjr/6AfNe5APziXgU17GKWPLhVXP+jH9yH6Trsfaaaf3DmVR9n6PQ61q3fN+3eGUVHU8R46TkRYbl4jcCvt7sGv39qJzkgXap6bVdedAabR7eTD4oIVMIapbb+DD7zfREN+8aPtmEYaZzMPlJZ4el84Nle0hON6KrGT+75GQCheAOeaVErI1mb1q3lFq6jUjeg2ksA3g5B3ge8j/fDe9/mopNE0gckNMiSdOnVZlOkKudG91a9w+t4nnWV1P5nOL71cWDa73IrUSBFij5Ojj7PHPP3xKHpfR2RnzEkaVBzrIZwxODQVvF6qjtD5PQQ0WaDplohk946eoBcRqexsZ2JhBi9GXPms/GhVyv3bVnWScG2UMkztVesyc3PdrPg0mZohH391b6uun4uH7pQ1vjSpvjcj2Qk7SvVfkaUAgMTPmZM9DNSA04+gFcST6crAVTFpr6pgYAiokKWm8JxVO7+/q+4W55LkXA9jqawZJmIZK0+dwXts09j1eoVzFgq0Ai9A29SH66nUCrhlPOgNRPdcypy0vf940gtQNUUfE9ABgMhWT8xK5798usEicl2vxvXFbWuYvXiPMxMutglFVPTGeqv5jn89NtfPH4qxT2nJIQrO4mXrmWof3pey0TvMHZB+L5r6xs584pLGa31cUpVz/SUbjOUsEkkqxnaQ0ObOdoj7j3adzwoT7S7vvQF+NKJkL3icb8Bxt+syoQsMHJc6vnGTSfWX0uOVn32DtNl62sbxfp6feM7VYP809oESSaOe5LsMTnrW6jCfJuaBXwym7YpqlPU1nYx2S3W3qIrZuDMb0KVsPmVZ2o88+A+rnr/6UQ0IafSgxNs2jlCbipNMCL20fhUDiPqUjouL0mNKpQrv9SpLq1GO8l0kpJahfgl7SP0p2TZhbSkEHddHEkCQwGwfXLuMfL1iquwawdJpcRZZGVtDF+nuTZORhJ9paxRLL/E7KsaUCXNulJQsR0T1xO60sC+/YQaWoicFyCtiO+bTBU4/foOkn1ZJnvE8zXMjNDe5fDGk+LcqVkaZ6oDouEAGVkoHl8loGmVGq8g+m1kq4DtCbVAsihmxVPl8/g+M+c14WgFrMoZ4pEYtWlvElG/gg9tsxvRg6ex6xWRTuAbLlpNM5HCFO5xkMWxsEQepYvoSYcjzyVY9j5JTPGQ1P8WiX28b/9+enuELF1xg4jQpsaSHNp0Ivx18fK5BNuEPnDaB5t56X4p20tCVyumfBrntDJx9MTP/k/aowe7eY8k5yi38y5ZU0nD2fzmdmwpQ2oaRGrA2DGlat6pJRN/ej//X2Fc4Xrox8iSSCRCQ2szA7IKuK9AfXsTDRcuYiIuDuTMWAIzoGDLUGV8UTv2mM/qlTH02h4A1v3O4vWXSsTiBrmkGNzJIQ/uqILlDEOnWLIJBNwK45HjOBh6GLAJRYQBpKpUlE0QSrnr+hiGUYFIhIJBijjUNYUwZBK/54kE3zK0sVxsUVVUBgaEgGhoMlE1i/ERaJMHve9pjAxVFZHll3ex/uHpeQ1v17SQYAbTVRXf1nBzSqXCdrqk4GHSJ2/v+g6tjRE8xWD3W4LxLJe16ZoVp2+gaoru2naIF57+GVGZ3GkVbZHLZPuYpgxtaxqeq5JO5QjL4piaaeArKmFDFlJ2VVTdRNN9PFsYfO1tMwmEIgTDAfyArBuSToDiEgkJbGzA8PEdg2AwXMl/UzUIB+tQDAdNGky6rqJpR0knxKCfdc5SFixYwKHDRyvkFa7rouumOOzLuQT4WK5TqeuleFGK+TTRSD16SHxuqmizrydN36RfuVc8NI+C61GW5Wq0FTVcS157k9kzhYBafckNlJwSqh0hlZXECWoY3/KImqLf0WCIZDKFbni0SEa4My6NoqoqyWSSNkOM+6p3f4RCAQpFaUzoNqpioBsRdEncYRg60UCYDwz/F3PbhIFx109/d8I6mSgkCD5q40uDNoYY602vViEsb7KfXx9DaPH5v/4qAP9x150c37asf4PLbxTBbheNx3/yY85cJQz2WKRat2q0KOv4BILsOTJduL2yYTpcKn+M02LJQlEcd+MLgrEnHBXrbv/uw5y+4Fw2U2U7KuZsjhwH57j3e1/mtr/6Fofemg6LAMj0C4VnDycqPoefeuiE19589ucCRHncMNiTR1nYIgT3VFrAkR69R+QRfeGLX6Iu3s8E0K4Jha8PmDvvDC6/6GLWviGgGPXxmVx/7hVceo7Ik/jFP/8XTfURli1YU1l3g70JHv/Di8w4s4N8RsjK8dEBlq9czQVniYTlTazlrFWXE22OM6dB7LUN2zdQ19aKGXNJyfo4mhrFUy0Kshh4VLNI94X4uy9+m737hcI/fGQAQwsyOJ4iKOXiZddeypmzTmOmJHMJ1cewPQPLUJjMCUiYroJpeLiWQV2jYN2aHQnwN3feycEtYn40r8jpS1cxWspSKMh6SAENV81j5zU0TfQrHHDRVYOYXK9GKMieAzsxNAe/KOTpxOgEo4kRBkZ7MWTNqskDg5x+xRImc0KxiSouB177v+y9d7QlZZX//alcJ9/cfUN33w50bmiiBMlZUQTMCRM4ZtRxHCOKYRydURwVw4hZRAUZEBGUnGly5xxvjidXrnr/eJ5zbl8anHl/76z1c72r91ou6XNP1al68t77u79fcdBSFbEfFNKtFEdn4GMAY6OTZLpaOXaOYEgtBpvIew6j+/dQLUvGq5ViM2/rl0XeRg0DhTBUyK8SY768aZLqmMGQXiWsz2y7e/YP8ocnxLxOmTXOfW0/hmNzx5+EU371O5ZgvSli3Z4Ku58SfbV1b0DS6pDLiDmkOyZqkhATUZbvYmomtqVhdSn4ks3ONmJcp8SmJ8UBZ/1Td2Ok+znllFVNOJGZsggSH1U3sOS6WHNcFHUmsBhFEZqmzarDUhSFGLGHNj6T/BcUp8UhVNMhjmKSBGJV7AUdeQ2zNaYUhZxxtoDLPslDXPP1T7JdOhtteY3RwSK//dGN6Jq4rrMbbEXFzs6GlWFZeO1ivZvQy9QrY7S1+wz4M+vAFneaUr2CkZqBY5m9WVZ3iTE9yjTzjl9CSybDhvsFRGv+y5YyXa8zv7uPsf3iDBLbMVlbJ0Gs344XoKFhqypVSTSQymXAgxYtw4anRS3WgqWLKdfqdM4Va8T2p5+hd+Fi7EyaQDoWZmLhelUGtm2d9XpHn3QavTJwMr+3B2tuB6EXC5g2kM5mKFUdyjLSpJwVAAAgAElEQVQQFwQBbr1MSk0IGhUHBqTMHLpaZ948sc88+OCdlKujDMta0ZRu4dYSxtYXmZoU4y6VyjC3ZRnrHx1gwUrhEBTyCq2ZVoxI9NWvfibqFJeu7aS6RzgkC09pJdOX4laex5csdmuPXYkSO4RiOecuhPj1ts3bMFJizgR+iKLWUA2TsQMzYZL2bp1Yks7oGTHex3ePc+IrXgbAngND9PZk6cy2c58M8FhZG8eNmJKBxZTVhpGPKHsOFSmoGwYKI0+L9+g9RdR0agWNrCkg1A1TFI3RXQ6ahGfufU6KCB/VQ26ueM6uhRlqlZk5M2duG3EY0dk6F8evyL5x8GMdOzVTc1XoO4bl55/DM4hauLpW5syXv4EdPN8UQI78CFNRiFGabJtjw2L+1qUWla1FTE/VOPbEDhrgPmdAwatPE/g+1crssNqezQLS2LOwj6dvFvC8qjO7RMVqOVQy6albRRBh1RmLD/kbwJx5vWTS4kxRrScsW3sW2567F1+yUldGJynuns2bvOiEBexet++QewF0LBfXTWwVa9oZl36E+//w7ebfX+hYATx63/0veq9Inp+yrb209OYY2Lj1Rb8H0N27Gt0McIz4EFHtl7K/C+cqjuMmxTpApquVklelo0tMstD1iP2QUrWI21C3ntPL4NYZZ0NNLNpbNI5c00PUJgbHX4ElK00UJWavxCxnZtdrY6dMEiLqNZ9MRiwGqmIRSrpTQ+KpwzBuRu4AKtMeuVZLHtJFMzZJMJRYOlUAMYbRkOoDElH4G4YzA3Vy3EeeOdBNSQwhIyWLjxQL/tHv6GDB1e38/IuPN697w2dOwDDhV1fPiKQBvP+bFxItls+vWKiKgqGpzeyZbtiC0MEVA9SyDFTAdx1yeXGY03UNx3HRdR1fiuym0kKEt16XxciyObTIIpC1TJVaCcOw6e9dM4PH93M4VRNVEREDN9CI0InigLnt4kCy/vmtPPLoEwxPTrKsW7Tn6qOWks67VGoiWp0QoEQmxIpkpRJU+YqhU6nWm/0QxyEJEYlkfHz+2VZqTpUg8DCMBhNgSCqVolQqkZF1GKqqoqo6vvSSBENzTBB4pFMSU/7U/Wzdux6vVAZEH+mJAkrYpF6ddBwKmsWy45fxwAPCCbj9938lm2+hNj2J58oapNBA90PQxdis+xqVooNt2ySyWq5rbgHXrZPEOrmsWKT0gkPGbKUyLeULTBUlgSDWiOWJJlfIoSgJQRBzriFqGeIwIvQNgmCmXuLi8D0kkQ0y03ISr0JVLHQlTaNyet6CNCedvJxiQyDRsghCDy8p8a///hUAPvnxzzTH391/mBHae+2b34q5WPxe1ZkJTmx54NB6kxfal74s7v25XTP3/vF1IkNz4svOpjAvT1kRm+XcJV10zesH4NNfFJR+C3vmc+21n6Wjr4sH7hLO1Ls+dKg6+/+2lfc9fpCLN9v+7ev/Cl8X/73/PrF+fe1b32Tpq/rJZ0yG5J73srUrsclx3+0i6uhPP89mTD50xqeYHhfj+rg1Cgs6Le54ajNHrhHOVDY3h3sfuY8tW2Yi54k6Sm/PURyxWszt54efxWWcseGARBERYdMIMXWDVsnQeNYJZ7D6bSfTf8RqTr9A0OZXy5Ps3z/Arh3DVKbF4WZi606+fNM6WvoEu9Wc/gWEaowTpCm0iMPj9z/64kXAmx/fh2mKsWFnW3hmw2P8+aZfHvK9177lH1AkTWxoWZRKLtfd+NNZ3/nEl77BNz73iVmfveNTn+a+P/8OEDj+ieEKiS/arpidWX8VGWEPohKLF7QyOU+MU++AQ2drK8PTAc+tFxuvQx09qeJ6dWJdRImHxkSwpN4QZHbraJpC3spTc2YOMUMbB5l3/FzS1gwRTCE7h1qjnsYPKU9EXP6Gs7kD8XulwkIKHVNMrq/whktFoGbcr/PYRpsnn5Biy+0xWipP7BdRZBG/H8bEQUKsBMizP7XAwzKy5OS/PSXCqu/h4dv2oclsk2JBKEXfY0mgVEilcQKvub810BeaFC5tfAZCzLThVIWKWD98r3FIE06AaWpUpBhwKm9SK0csXr2Ab8j5DjBaH6SnRxIieB65VtFGN/3s0ADRwVYPJyl0iP6MQpdFi1ZSnnaY0y2y5tsYJ/IClh3Ry+DYTIbggd/Mri8J4ykGR2eCm7lsmv1PbGdTs0JE2MECBD0nLGJoncCWLDxRjPlUqo1cewsL5vSxAeFcdc5dyIJMDkvuO9t5hpVrRDZRkwFJ35umWncZYPaBb8UJJ6GpYtztGBlmassAYRjjy708TnxMU0dv1BGToKsRjmagyD1F1yxUQ6Ne89i5Q2SlFi44id/f8BMsSZKYbtWYOCDebuf90uE4IWRoe5GWtE48Ie71xM37mD6vThLP1NEBPL9hAnyxP+0cgjnLxP7ZlhFOWVgKSGyHofHZ1y05Ym4zuJJOtTM1XiOizonniPb5HX/Fr/mkNNGfvjvIghM72ff4OBOemIfti0OUKMV9Nz9D7/FiDIV9JeJEpVwUYzLXa7LjrzspHJ+hvVMc2OOpmeNwJGu63bqDX55kdONMVlE3NfJdOkEwm8o/MR3sWJxnBg9MMvL0TLbUqU+wuG8uk6UqimQ1zWa7cNwqU9Mz2ZDS4Hbuv34jSHWXkZ37+I3cMEZ2i+vmz0+xe8cYvfPSaNqMAwdQHD64PV02nDyzz+55Zoz+o9sZ3jr7uQEqG8U6te0gtl137wsIOZh93dmXnco9NwuB+TCanYEEWNk3n9H2MRbMF33vRXXcQLSjL5fFomQQbV/WzuQ2Md6OWn4Mu3lx58r3ZjNzHuxYvePNi7j1Vpfp2myUyvGnHo8l+/Phh2YQBr1tYgwVK9NYUeuL/l7D3v/B8/n1L2+lWDwUrfNS9n8qInzYDtthO2yH7bAdtsN22A7bYTtsh+0g+7vIXClKQqFFRGOqwJ7Hn6LvlLXEEqZXd32K49NEI+N0S9xnJpWedQ9VtWmZk8LVBmk7CMLauygmxqe1U0SzrFRl1nWBH5HNFHAcj5QtvqNpGuNSFM3zhbeuSLrMfJt4zvKU28yUNP5/erxGS6eJpqvQYFZSVTRVZWCvYLvp68+TJCocRMUOYNs24DZ/58BekXz8h0+eB8BkOaRdnx2l6OpoR9dSwOzMlR3EhJLSXdNVSHTiMCRwRbggjkOiSDAeAlSqIZohWAyLsvZL0QwSz5ulZaLqGr4fNjNghmFgGAax5+DJbI+d0SmWIQ672LVXRB+q1SKJoqLIeifN0NF16GrPs32rgITd9OvfgxZipOCJQdFWO/dNc8VVZ5DtbfS1im4YRGEo3xtUHTy/TCFsJyUpjqMoQE104kj0XUf7PJ56fLxZ/wZCCDcIPGzbPkgsU0NXgEjWbqU8Ik/DVDVcRzz7+eedzPxlKpVysQkHSUIVy9SJJaSqEru0mS08tbHMol5xr4UvTzM5VSRjZokDEckpFFrIqhDItjP0BCuVolypNalOexb0UK6XcYKwGaUyDItEjTAMEcnWTYsoSlBiFS8Qbfext8+O7P9/tV/99195Ubvphv/TK+Fzn/3MS/7t8SfuOeSzLRIA8dWrP9b87NrkF/R19vKWX4qoXy0uocYxqir6pS1nYega9ajChKy5iHSder3I+OABDAm9Ghkfw6mHHBgS0TU/CZgeHOaSMxZx3PIjANB6e7j55sf4wj9d3hT17W+fy2/u+AtWTvT5G191CmGlzt0bhvina/4dgAXfcyhXH6QjewTegJgzW2OT9v6lDE+ICP9r330lv7nzBoLgWVJyfSuPxPTOa2XyL6OkCyICeOcf/8Ta4/pY1COy/rcDl7/zZH7xX39ALwjYTLatA910CYbGMSXbYhg4+E6KeR2C2j6dPRq7sw9Hz7Jxt4DWVOsVRqaz+HNXkbQIPM/ytTH1pbuaOnCT1U1Eno8S5bnhi+c0++G8N3yE5557hrFtItJ5+YevYtpy8UI5H2tT3H3TLznzwrfgSjiKqmk8cs+vuOnXP+CsCz4k+qsti5cMcM4b3w7A3TcKKYpvfO4TvPsjQrn7+m9fC8Dw5iFecYGgNP4+XyP2IgxLamGNz8B8Ek9EXc1UK31LW9gooYEeDsPbJuhYNZ+JKdEvmpKntb2FlJUmkax3ntT8MuS6aCltRLFLxQ3Jt4t+r0kwied5eFKLatGJS+jo7uDAgIi4p/M1/DjFP3/ke3Qs6Qdg2WfOZ+jAjei1EC8t9sOjju3D6GyjvSDrCNYNMjbokLbVpnZSJqOhxQqKZhAb4nthGJD4QRNCphoqYcogbWkkisx8RBD5CqoKDaUFHwXDMGb2gYPggA2LogBVVdF1jSCQtNAydLv+aVFjNn9RlnLJJfQjspbY5yYGHNI5i1dddDbf4ifN+/mhS31Q1k7levjxvwrh48uuFJDjbTvvwcp28PRtMzWlAFqokRTFu3iRy0AwTq0ek0nPsFTm7ISxfUU6ukWa5pQ35nnkxtnQ5Lkd7Tx71w4WnSBq0Tate472VV14XoCZEtdNbdhHx4p5KKbc1w6S9wt9sTcMTIxDPM4Td/yx+beNu3cRhBEc1IZ/ve1GaO3AkJItYU3l5aee2Pz7OZdcSc2tscndRmubmLNT0wfwiiGLlq6gWJNnAy2NkiRoqmSkjUJiA9J+gqE1WGqnSWKLtvZWQimBk0TtXHrp67n9FsECqhcUzj/3DP4s4WkAg+vE3iJmuqzXWgDV3oCWtgYkSHz+4A2zM4ENW3+nWBfXAytPW8DmB2dnKB747W7e+gnx3lOTJTIpC9vMMzW1t/kdM86gyFojO04RSyjsznv28kKLJStd5Gu0tUBWIoN2PiRgolbVIklLWK5t0dCzG3nkxVmZs7kOkkzAjq3DBLOTbgw9Oc0LK/JSC8S9u4/pwa15VOsesnqBUm0fGSONkZ7JmgwPboTgb9ff79/v0L+kjVAJCOO/nUnZ9+jsTOveZydf4puH2uDm2RmgzY/PrhtsZK0Atj10qJzt5oH9fPRT3+DUU0Qd+PDUCB94u2BYjrzZmS7tINr8W35xCy9lhixbOJRTEH52w4szEjz50IvjSJYfKeZ2ys6RTefY8SLlAA373Kf+HT4Fre1LXvI7LzTlhYvk/w1L5exk5UliU9+ybTfOvhenRWxduBjbliQQg3tJypUX/d6L2Ts/Ixatn37lUKrz1g4T0zSJmgWQCokCkyMOXVILIwxDwjBsitEWJw5NrYIQRbRtu4mF1TWt6VgBzF/UgqIouPUao8N/WxvgsvcupSrz9J4bY+s2d1736H/7ru/42gVN+JumRChRgG2kIRIbjGoKLHajDixOInQtIgwDdElfqmFQT3yBrZffU1WdOIyabSD+W8UNJ9ClxlA2Z7F3R5mH/1iipakBG6MoGposZo1CsRHkcxm2bxKQP6cakLIsNFMllinjmlPlyo+fRGSKxSaMDIIoxNB0Ak/GBXRXQBwd0HXRf7qu0pE/gppUtU/na9z80w1MjE+RtsTGJDStdEqlUrOGxbZt9ESjLItzFduHSMdQDcrT4rO3vPtEFq2JKJbTJPL30loHvlduUrh7CvTke1j3VIp8i3iGFStDKl6EqWpN2lo3dFAjH9MUB2EjdDBMQfERuLJuQbWJgxjPrzYPhr7XQqA4BNKZCxUf9BDTSCN9ar5w5c/44g8vRdOzRJFob9etC/iibLokSdBUG8WsNevxUpaBrtm4btDsJ9O0cV2XdE7qQsQGn7n8e4cOvMN22P5f2OpXvpYUIkiio/DYn37ByhMvQJdQbEW1UTV49t5bOOECIRataQYtLS3s2iXgK9sfF4e/k859M4GEPamGzro7buR1H/gII9tEkOyhu3+DlU2jS/hbKq0zsU84jR0rpcZTAIkZ48kD5/ROsb/0re2jt00Q0+zau4fFC7uZmKgxWRQHsChUqAzO1A1ku9rQzQBX0Zr00AMbRE3DwuP7KTmiZieMbNauXc7AFrE/uP40Q5snWHFaD4O7xGe5uJ2TTijgu1VWHCnWWMPO8Y4PXM9tN/1Y/Fu1qXkFfv79bzJVkkFBR8V1fYIIoli8s2Wp2GYKXWpWBLFDoCookYImqU0MXSUOUwSRjybPO34QYWmz47BC02oGnh1FEbZtYxoagaRQb0DmJ8cOhQw1LNVi8N4Pvp65HW3881XfecnvvZgtPX0J2x84VFbihIvEAai1I0N5QkWJTEJVHMbW/WkHZ759DaPbKlg5MfaevXvLIfdo2BLpXO1ct5v5xy2hWi0ytXWmLm/RKUuoTIl7W9lOBp58ad0ds11siNn2dmIFVKn7V69VcEcmXvI6gHnHnUF3TydzOjppbRNngo7OHPff/gB2voNcm9hDnFoFNYwwJXlUrClgQODpRJoMAqRy1OsutmaQkhDVKKmRK7RxYL/QwGrv8lix7Ai+9clfgRR8Pf7ENVS8AUaGyqRahNMwPTJNOrbpyopx3junF0eZZmI8pFvW+o4Go9SKCvP6eli6SkSGRkb2s+6+QXoWij1lquKg1lUMVDp6xTNNlnzmzLUJSiGj+8U+emDbOOe94ehmjfDw6BCJETHlOhgp0Z+2FlEwNXbsGKEom/WUUy7CDQeo1feKNtBNtm8ukk7lsFKiH1J2AVULmKqO0iLb060FBNWYlCL27MpYFS9KeM8/XEabhOlXq3Wu//nP0WyFMBFrUEtrGlPJMFUWZ4Jlx2RxB9OoXhY3Fu/ix1OY2KRzc1n3sFjHTDuDf5BUwcrjFhFlLba9gDSpf0kbgRvhSjH1ydGIOd1mc6/XdQg1m4kDJVafJ+DgBSNDUK8yusdl394ZsqEVr16AMinGRnEoZmjPBJ/92GUMyDH5sxsEtHDuClFXGyc+Y1vHOf7V3YRVWVLREjD0fBE30pjee6jj07Cla1azfcNGuuaKe42NHOqU/W9be0cn01I7N4kVkuR/Bu2758H7Ofu0M2Z91tUzn7GhgxQiFZ5OkuS4F7v+7yJzpWp6E83Z27+I+lqH0kSROGrQqKk4dYd6fpppuXlYKRNtUS/tWTFZK7Uai+dkOOWMKaZiESmbGjbItnjM7TXxpO7MK98FJAbzZU3SDd8KMSyIk4B0RkzOMIibJBWJzLZEoY+hm02xtFROEQ6DpmEYDfKKGEWVmZ4klo+usmBxoZkdiZNYZFAMha5usVkmSoCqhoSeyty5/QC876PvQj0zoeQ2sioe5ZLH0d85q8kaE/keSVIjiAJcmZVStYSpqSl8T6rFxy6a5qDrZXRFTAQtAdM20PRGXZlGpGskidZkVfFiDwsbgpnMSqwlaIZOFDcyWTpeFJIy5uG6DWbAKplcigOD25kqC4fWdQKy2RZqJVkIHBkQJcT4LFgsGP2mS2XiMCJ2I1RZv2WmNVIFi5qMhGq6gq7oxHFMuiHgl+RQ1BBVd7DMguwzj1o0TCgPaXZeQ9d1giAilgHMOI6bTIGNvg7DEN3UMEzZ5kYeJygTx2rzwDA6MckKazl+PEk+LfrPc1w0CzRZc6UmBk61ndYug3FZDDw0FqFZKnXfR1Hkim9E6EmE54qxqSgKQamOpllYMjMXRhXiGCwzTVU6XJZZxDQMEldserZuEasVwqguiiak6XobCTGJ9Liy+RyaNpOJdOoRcZwQ1rLNz2quKg9KeRKpneK4EYliUyqLqJ4ge4Grrr0IEikiqoa0d7SStTswpO6bafcxuKvK+qfEnC0nVe7++a+5+rtfw8iLjamjbSWarTLXEO+b9hTI5BkJA9xYRnHjhCiOee7eO/jR14Rm10ev+hAfv/pz7BwWC/nw0BROfYB3vepVfPYawQT45c9/imuv/S6mHTM0KKJavb29pO1OXBnht/SE9mwbm4dGeeB+EQV3a3VeflQfmpUQSBpTL04w1AwFS7T58Pg0I9Vp7r3rId54/gXi3ov7KXkVDuw7QEM9zrIiSnUPXQYMht1pYtcnrRpYmUYARMeJbWr49C5aBsB1/3INt/z693z+YwJ837P2RDrnzWdupofRYbFazltZQQ9cWgtzmRwUm6yajVHCmDgU/fnd//ghZ1/2OkJ9kLqMclbLB4i8iMBp4cwzRT1VOt3G8NAYS5cKJ+LEi97Mlh1j5LoL7N0l2rg4Ok6k6USxguGJZw+iqshay3sHqoqnGdz+iVdxxus+CMD+Dc9y5Scu574H13PXz7/bHJ95coSyNrVngc0xZ57NM48fyhQI0JYXQZGUpaEpaZRodkF2GFVIpRpkQHDs6a/FWJZm0VHifR4C3nzle/jZ938GgGLMRIcTp7FOGIxPhhj27JC0YUbs2i3GT5BUOTCe4NRDYhnA8gOHloWdFKXQcXVs5nAxwOxi7cpUCSst6jIyaYugqlGT2mmjm8V1dcWnu19kpaNKlQefGSDTlsGRh8BWYzF/vXcL37pO1A9c+b4Psm10G139q7DGxAH+Y+9+I5HusHXnDiZGxPts3v40z28Yo1YXi6BmhmRthTC2iBPx2dR0hRdySBbaU4ThDGKh+S7TsyPsDjXau+xmbXIcR01HdZYp0OCocYoB1/JrXn/FK3jjFUKk/Mb/vJHXvPsCapItWFdz/Pm3t3Pmpa9GkYiQbVvuZ9Kb4KRXHMljd4gsydGvmMPg7iIHZG2vH4g66YKdIzq4OL8OrfMjHvm9OLCe/Jq1PPpfMwQ+AJe942i2DIwzXJoJ9EZJPMuxAtj9yE4WSobW3vmLqCyHMPLQdClEr4NXConcBEWyywYVh4iEOGicLUKstjY812XVapERXnXa2aQsm/vvFtmB9q52Vh+1lHw+SyC16IIEWrqyVGoOVmMsagZeGONI9orATQgTDzWOiOriOtOooOgRVU3Fl+RUppph796dWF39ANz5k19wJ6JemBExt588iIu5eBA3okudKXmC23oQK+J2Zus+HWCAF4aGp15EX+jgq14sF/GX374UY+JLaxU9wu0sOaMfbNEvZk5j6QktjOyKUEzRLkNDQ+i6wZyeeYwNizlpaiatdhYjEO1bjlLkMi2cdNqlPPyYqKGzs3MJ0Ai8Klk5t9UgRaIkdIlpzOTkGFa8mM5WG7Umzkqa14ceV3DKMyyZBztWAJufevFszN6dhzowhwbrxdzb+Jf9h3z3YNty26H1TV/+5qEETiNbZjtCT9724gytB9vr3vwuapbUFitZZI5qYTsbOeYVVwDgX+lx74/+hbPe/UnGp8R8N1WFTEuC6ov1/P5f/htvff9H+dV13/pvf+/FbHLihay1/zM72LF638c/wm033Y4ThS99wQvs7yJzpRynJLy0luNhewk7/qI3kQQKesqiRTLXTE4P0bcgDYaIqs7tLpDJ5ERmQpUilJpKHDOLHj4MQ2zTbi7cJDqJ5qBpWjOroaoqcRw3HcUkScRB3LSIZFbMsDy8eobv/8tTtHc2CqBV4tgmiuXho1jG0DQ0XSGVEVG4mhOgEmNoIbE8uIWqzwc/dQ5uLBmwLLHJe46PLgvhDU0lCGJIVExbOhaqIgquJUV+JpviR1+9i9CPSKfFIS1wPXzfFeQXcg7YdpqEgFAe6tPpLE61hK1lmSyJhWXZ2qVc/NYzKE0W0WXxcRg56FqCEgmnwzdC9j6bJckrTO0VC8vyFTGt3TW0KEMko1sYGkYSEySSnVCDhDrESjM7qCQRupZGQScIxQE6UlQRYZbQEzUxyWR1As8hkcK+13zkOj773fei6TGyLh3LzKAQYkg4aBRFxNSoezFpKe4aJxFhkJBN53D9RsbQE9ALSSajm1mu+cB/8qnvvQ2kAxbHAbpaIHE7mRgX95oYHcCzPY5afjIA81Yso0YKqxZS0URb6WENw4Sp3WJLndi0Ey3dhrZiNa4hIp/esMNZHROs33IXp5wuGMVOOO1s9gyON4lUWlMmuC5rj1zJus0ienzCymV4BIxN+qiK+L1qdZooCbAkRMZOGTz07DO86Y3vo9MVMIh7HrubvjVHU/MC0qa4v4FOoIDbKPI3DHY++wRrjzmNX/xWQNPe+trLqEcGupYQquLwkQ5ThEmdki+crdCfpOhPMz48wOiA2MAdd4Dx4hTF/R7X/1hEMOcvOps1F54ONdEujz9wF8Uxl4LtsGmXaPO3XHESa5eniMMali76NNTyeJUyfd1iV3/bm7/E2z5zEffdsw5Vjo26G7J65ckcfczJbN4kSDXaujo56axT6D9CQAcX5VR2TlTYNFxjTB6uymMjRHGWejyJqcmDYqSSifRmcMXIxXS3wNffcxmpbkE/f/4rz+Whp/ZSKxVx9wi69RXnvJ05nW3kUqKd+uct5ztfvIqTXvF6OtqFQ5Qt+Bimyi+++W1e+65/FO8XlqhWPGqS1eyxe4XI+fJTzyZlNor4U6BlGBgbZeUqwWJ1z6+v4zVv/RTpgjgQ3PCTX4GE+nYcIdiESuUJ9FhBzYnxW9tdpWVRgdauVhocFElgUynXMWydiQlxiDeNkMALCBwJC0xrJKikCiYVKQ/gDNbJ9LUQxyGFgiD6UOxpbNumWhfPMf78CG1LW/FR6JZF9p0tHagGVPyIrMzMnf7y03nsuYD9WwST3UnHH8e9jzzLks65jFbFge/Eo8+iXHdpb+1ixSIhV1LoGOf5DSNs3SqgQrt2Pcn+vXXsjEp1esb5SOUMTAtKE6LfCy0FYsVvQqrDMKQy7dDamWsGnVRVpTz10hmq/79Yx8ouFFdSzccJzt4BLvnAewG4786/EiQRmc42ZEUDlcERFD/GDQMUyeSmRoLKXi5dGIZBRrPI5/O094h5a5idtLdleOppAfm/9NJLUTSF4ZGpJnW4aoARFXnysQ2sWiYE1MuVIrqlo8rMVdo2sUwNVVewLbX5e9lUBttMkc2K/bClNUMQeuzaKc4N1//H10kd0YqzY5r+U0UgLKynqFbLeIHO3DlSjNsdIpdu5ZhVIng/p2Mpc3sX09PXTVgV+2o6p1MwC2hGnqIqHInJooZSc7Alm1/asv8+65IAACAASURBVCkGFo4XUi1J0pmpfRRLHqmWAqEMKF9/9T/z5qv+kTgW99mxbRMVfzdO2WVS+hC6FRObKfpXZahMijm679E6x168kM07xHoaeGnWru7AjGM2PCJlM7IJrlrDtC1yWbEG2GZArZwwPiT2Cy3wyFidLF2zlFJdzA9Vq7Nr81PkCxl6+0XWVFcSTKuI3iqy31u2lFjddxZzsu1s3Noo47AIEkjZOimZxXz0GeHAXnaxkHVZuCjN9P5xKiZs3SiYGKsORIlLElq4kbj/2IBHd1eaUPZxUFEoSkHqT3/58wBMTUaMDw+TZAr84foZR+Xqr3yfUUf0ww++/HkuuvwfKE2btHSJsfHHH38VgLe9830A1Ov7ufm3f+L1734FsTwr3fTT2/m/YRe+8T10WyJIsmvnFnbsamFoZLZDetkHP4kvHbUN+9ZjRwVaOvK0pMUau3L1AvzIJa2IwFTZq1MZGGaiGLFgoZjr+7ZvZN0D6zFSMLZ/hvTjb2WuDhNaHLbDdtgO22E7bIftsB22w3bYDtv/gv1dwAIBHpUF4HEqIZcqoEUZNKn140QB1bpLrVwBSX85POqyd9cgg1IcdHhkH3f88Jtc8oEufEdENf70E7js/QbdCwJG9ouo7chgQL4FFosSL27+kUkcxGikUBrNoTt4dYPx4QqLlojoebVaR1E0XE9SpaOi66bMeoiogwIoqgpKgCkjCKpiigJiifOMY4hCCReQkAlNjSExcV2Fcy8WsIDO+TEtnVl+9xMRraCm8v7PreHBe0a5/ZfiM3X8SeKMQ1ILuOd5kSZetaKXJSuOQFVERqiIgxME6KFGIOFuZpiQBBDJzI4QhATPc5uQP0XRUEONuhNhmsLD9/0ITVEIQ4nPNwwgJggNolAWMetQr9ZQjCJtbSJqHIY+4xPDIKnR05kMfb29TEyME0riBlOLCYKAWNVJpJaJGljUqxGq5LMoF2ug6ySqiuqLvqonAZqqoigKdQlDNGwBJUsSEY1JggXUaz7pdBrfF88ZBj6KomAYVjPqDipJZKNLqlTfLWGZLcRKhXRBQAB3bJzEnSij2g6+zOSouoPvGCSJiBQZmoqvQuJksFJSaFiJCYKIWjCC2YAvhhFV30dVZAaKgJiIJAnQJNTU9300rY4S600dr4SYmjMj6hlFAUVfwTRtkoOErr3QJXICDF1EbTyvTBiG+DIjpeoKYeRjWCmmXTF+VFUlRsdLZuAJmmqTBBlMKUJNQ3qgHmCYonMyLavww0782GbeahH1O/K8k2m1FlHTBVzDTqeYZxXIzk0oDzbEH7OYLd20nSiyHCsLaXRUnt2+ly9d/QMAvvGV99G16GjepL8SXUJr6vWIlb1dtLaK3w+k+HJEjLpiJht/y22bOPOsXrRQtKfZoqBFOapyLFZLZTY9cCdvO38xRx4vFIMzPSt5+tkhUmmDRMKJg7BKSrHxfYkrtS0Cu4tb7runGb2745k9uJHIJBoycuwTomgq+bSYj4oyH13pws6voPdoWRMYxVhKFs3WmNcv9MQevu8epp+6ncVHi6DYBSecxV/uvoWORUupxQKeuW/XAEe/7HgmRqfpyojoct1zqabq7Nw2U+i/ddNuPv2Bf+Lpx0TEbe/EBHMX9/Onm2/kotcJwodzX30JR+TmkGkR0crHdg7yu5sHOOKoNKHTqBFQibUicaDhOVLGIUlQqbK4R6wRhXyW8R0iy/vey4Ry8RFL+uhDJd3u8nUpFDxRLtI3r52yhEZ954uCjGLjtmHmtEktqmwWLSXWgr37xDvHUYVYiZtrUsOmiyENcI2uaHS1xCzsyDG6ZwZk9MiGDVx4qojwX3LhxdyCgL44MnOUtnIYZky9NoPJT6dT7NtRJl+Q0Fq7jdHRUQxDR2tkHqwM00Mz9b/JnBSmpVJ3dOK4wXJQx6tUxHpTEXPNmXDRbJc5bSKzNI4kNgpnxHKV2KBSD6hNlemVxa/+KoOXv3wNd0yKdu7o6WPewv1kjBpGWawRxbFBxiYPcM8fBzEM8QzHHXMK49NjLF0ufm/pBa/k0fs28uyGmXoORQMlGxAGM/DiUrFEvs0mlhB51WjA/sJmvSpJROfcLOMjMyow+S6NxFeaWZSSrFu9+PJ3Mi6zoUPTu9m77lGWvfwYkqr43vbn1rH05DOIG/uqm2HHM3/mDVdeQUeHgF0p+Q4mymVG9o1z/69n6j/f8pnPNskylEBhujKFnngM7RdZvvtueYQzL13JfX/YzLKzRJYhZek89+cX6EmdfyxuOM2We2ai4ItfvphaVCYyZBGDRDze8r0fiv/obGPBwn6cwKckNbpsLyEwNdL5FJEc6yii9rax91mmQVtPNyeedg61mvhOPq/z5B33sHq1EGn1Yo+BbXtJLJu4UU/lGWhKjoA6I1NivMzp7KDQWsC0JeRft+UeolGvNfY5m9FSSBSVCUORPQ+CgAXzFlBPZvqvPdXFANNM7hf9FlDDNhNMTcetidnWOWcF5176IdYcdz4AJgopPUWixnTNEc/pRSGPP7CB53cfgLrYnzy7RjrbysVnXSra1lC5+Y8/4r4HnyCRZEPpjEa1EpDNtpFqnZmTU7WAqCzKQU488WKmD9zP/fc9TPsisSZkdI3JakJ1rIYRNog26owP+/TNFdkJN9RxxhVKyRRqWswZM6WiJy0kBGRl/dbUlI9tZHn7O4SS+l2/uZUwb3LqaSfz4ENCAzS22pnf0YGrxEwPCQjW3LkrCXWbXXItTNwUuXSGQFFIEnFv04ipFCeZ372MWnU2F8CGTRtEH/SfQiltML/b4PnnxdhQFI0kzBIlHmMDM7DbSAND6tHF+gx8rdgj0B57lEHS3SvIxbPRatbyLibvmcnGnPmGs9j51GOM7Z+dezlQEmOsc+4K4E/87vo7OPnUk5p/v/DVF/Hn22YyWCe97m1UpwdwsxLV4Fp0tS/j/l9/n5Pf9wUAVvS3cP0nr+Lff/4byMr1zUtT9cb4yjuv5L+zP9/44xd8cihc8ubv/uvfvMeLA9Fn21e//z2ef2IAJ/6fqlz9HTlXk46ovfHHYTgYoTVXZ3hKpocrIRu2bGPHjkFGdgtsdOgXyWoBmbQ4XOVlgaUdR/QubbxWyJy+hEKrgeeIyWnY0N2bpSEFtuZEny1P2Nh6jlpdHAKTMEGRdSMNaJSmaXj+QRCylI3vh/i+j2VJjRAlQVUEKrwpaKtZ+L5PEDScqxhDT+E6HoaEHDluTG/fQoK4xrGnCxE0L3aYmjQoT4n7pG2P4XGLlceczu0I5+pl5x/JwuVtmGhE14uiw558L4WWPDt3i/S+lssS+BEGPlYgHARf84gUhQb5YBgG6PJQbkiKKAWFKEywVZ1AOpSGqpHEMVYi4SFehGEY1F232QYpXcE3IPGrDA7JjanBxqc22PwiBgYHUZhh71MUpVn/FCfSUcPG1HSCRk2UE6NbMaahojbw6oZOFMcEoYOhiYNvgg+Rj62LA+3k0CSgEoZxww/GttPUajU01cCX+jTplGB5bGgw2CmDOA4JQqWpLVKvTLN31xSrTpjPtHQoEzRUk6aulkoKS7NJ5QwGJ8RkN40spmkThDFRrMkxlaCrCpYcB36gY2oWcZygyQJy1UxQUUhUpdnGkV8jUYwme2CiaU34pqLM1EXEcYKqaDSYS0QXJ1hSpNmyLOI4RlehEop7q6oOUQ01DJs1AZ5SI9EVIl1sTF5JQKsyhR40U2z+ua4lpFvbCBSTMJBwUM+hqI6RlXCt+arKwq48uVyaeIFou7FAQQ3K5GU94H2P7qalN8dHr/gQkxPid3738Hlsv3EAXYnREYdY20zI2CnS8t4dLQWUpMzyZf0E07Iu4hxoSce0ZQuEss3j0KJWc9EM0Ve+neF1H/4sug3FmhgH6/YOgq+STJcIqmIwlH2TwJ8i9MW9p9waKDZKvUzeEmvJ4Og6IkUn8hMC6XhrKYswDPClA6/pJqm0D0kKxRbvbCWQy6cxCypzO8U6+KarPk9GLRAa4vDTbmV4/XsvZ9OezTz+ZzHXn3joEZgu4E3PZ6IkxsJ4+QA9q1sZGJ5xrhYvP4W+zgv54f5rAGiZb7PukVv52Ce/wvK1rwBgWV+a2Az4lSzuf/TeAdKFDF7aZ8qTrHhqzPT0OP2ZPCtXCuheqDq0tS9mYYeYa7vX7+DaG34Jn4drv/tF8UwkmEC9Wubr/If4bN1tbHjZa8i+ADsRRxHTctwNjYwTyXqoDc8KqKdumbh+TO5gajbAH4vIZMTNsvkabWmdxfP70WLhdG4Ern7Pa/na98VB+JWXXMzylw+x9eHHsCTcLkIjDlTqw2LN7102F03TKJgJijz8FCcGicoBuflZHLk3VJzZcDjFV0lC0FMB1eGZOhDFUFBNcGU9rGkY6GrM2PRMTYBhKkSxi55IsXMn4KiVXRQnEy49752ACDbedfd9TfhUpVZDwaBehygQY2rt0UfTN+8iqk6ZkWlxCEzZJp2FiylKwjBFr/H+Dy/jCr7Ml74mavuGP7Kdn/7wHnRDJVcQz1Ap+QiQi2QZTFSyLTal4t9mNdMVnSAOiZXZB8dHnnqMWkmMqUJ7H92nHEvnghYObJ2pDZmsFElJPb6uXrEWdvb28fxmweqnmVO09lhExuwalbrqUCiINWF03wTFkofi6SyYd27j16Vj1UNSl8EwvZ0XWt2JSZkza+nqc47FMjoJtQNMlUWfHnPsSeTfMoc//1rWEY5Psb9LJVfINSHjPj6hH5DL5wlVSfRBQrlWJp0R+3HZ8Th97bHkCwViTbzP5P4BpooeKzvFeWBksoKStzF0nXJJCsYGKscd0c+ezm5sGYxryxeoFCtUZW1IrVrH92MUNSQlyVxGR0cBFSUCVe77mqYytmc3Lb2dzXc+ftEaBtiGJuGuft5G831KpSlWveYNAFx48ZcoVzS2bJHMclqIW3Jwaj5TssYrjmo45TqRYYIt4NFqaFHfN8hXvvMzAFqCOvrUNpb2riBqOI8pB6vfYGxgik4ZPLr4qs+SmmOQzog19+4HNnHaea/mVdlW/nKXYJB12lWSkg+mRlidCTZ26FmmpM5SZcRF6VTRUwEdS0Q/HNhdRVHrqGh4B2SduQddfYvobe8HIAgiLC3N3Y8+hCdFaPVQx8inWLVsIaeeJ9jw3D01Ht+wDqNLOHfDZYPaVJFU2qdrjgjYh14NXe1hcryI8wLqwe07xVxY+OoMOUNjaCQA6ZTt2jHE4qVzCJ3Z9T9JohK4ou1Suk5DC+rhO0Rt5qIlXaxY3Iutz65N27pxIyODM6yCrXP66V08ycN/urv52ed+9m22LxVr8PNPjvIPV3+HH3zxQwTezNwek6QRS088E4BogUumq5XuLhE4TaVXcutXX8cln/02q48SQthLV6W5Hvj45W/iw/8u5pGmJ/QvnM0G/oZ/vpaFPQG1CbHGf+eajwPwtmu+xd5xMR8662XC2l5uu1EwFL/x7W/ixl/8hnd8+j2k5PlmTs9CvnDVP/K2qz7H6eeIWmnH0ak5NBMittlOygLVVLn8nH4AeucuxKsHKOn/eRnV341ztelZQZfY2ppi6aJOgmyee24UePp7b/0LqahI38I+Tl8lJr/dtRBFt5oUpI06IDVXo6NvZtBt2xBSaDGb9JeKAqNDNfoWilc/8XSLvRsDqtOlJrGBaVokCcxf2EaQyA1E0chkLIJwJrMThsKpiKUgsGqoJEmEYehEzYLSusyQiAeIoogoDtAt8HwxKFYdeSTLlh6HkhohcsRCH/sq//Wre3jfVYLOeOOze9nzbIKuzBRI33/XNtrbXke6xee1bxcD5bqrb2XtuW9g9UqxGFVKkLHyjNWG8Vx5IInKaApoMmOCGqESo85IHUMIiqELsoYm9bpJHIEbNTJXNl4YY2k6imSWC1zQVR3TtlAlRWqiaOiqQRjNUJ4HQYBlmygy/BsEPiQJqqqiyPqYJNaJY9AlgL2jrZWAkCDysE1ZlB3FlEsutlXANhvMRzaWUWDBPLGw/eAPd9HW1kYUBVQrFfkMwrHwA7cphBmGIXZKJYqkY6wbhKGPpprN8WWnVR68+zlWHr+g6TAnqg6qjqGL9627HlGgC0cuEZPacSKSYgU/9Jt1SplUiihIiCWtrKL6qLpJEilo0kkyFJ0wDKk7VUIpFKmiCIIOOe4sy8J3feI4RlVmTqt+oAAKgWQ/1A1VRMYlT8zERJVcLo8bu1hSdFKNdOJYJ9ZUEk3W1gU1TC0kLIr7tJj9AOTaz6XrCLH5q1qK4cEpPH8aW0agujvzdBc6mV8QTlnONpl2YP8U1KQQ9ch0wqkLW7jhdrFwV7N5lhp1wrm9XHjF+wHYOTyJn9Ih0MAX7VINIoZrw5gyQrDC7sRwyzz2wCR33SmjWedAa2fAv/zbnZx1uig6P/n4XlKmzdPbxAbzyNPrGR6fwNI1NjwjghbF6RrFyUnqpSFKNSnwHNTwq1Us2S9JGFAuDjN/fg9f+KYg0LjuX77O/sFhCplCk+7aiTx0TWkybgaqSuSJSLIv+0FNdNq72rjywx9g43axqX7p1mtoS2eoyVqq+fOX0zNvPstXLORVbxU1Hudfcgk3/PCbPLvuCa74qMDo5+e18JcnNjN2YIZKeOfm/Xz45g/S0ivG68i2Gpdf9iGOPe4cokhsiDs2jXHnM4Ns2SVO3u89YwHbpwYoD09gS3bhtozJGcevpNCZQpUZYCufp8PKMTogDldf/tp/0NbZyilXvI/f/+f3AehsrCvZ5iNxzBnvZMvkAcKDPgOo7X2UFzAcA+BNif3BA6666otce+3Vs/4+Pfow1/z4BgBSSh4vqeErASRyjeMHjMewWBLobNu2nnNPPZetPMbU4IuzzroVB9M2OPbk5dx/nyiiDyX7l0qIIfeLqDabtMGbrvFCGofMnDwhDq7rY9nCEU008KMENZlxUiaHprBSFppM1zsxnPOak5ks7gFLOGH7No5SrzlYtpifU9MTTE5OM7/DBEksUCsMMKDuIUwpaNKJD9UMQ/EujG6xRiS4bA/FmlVqEZ28avViOu98nrHdE2Sz4jCnFGKxTjfOFYlCtejS0pZp7g2KkqAA42Mzbem7HrpmYkrZlEpDGL2nG7NX3Kw6XqQ4VmJn6HDcy0WgZh870OOYRTJr09HWxTPcy/79++npaJe/lyUKY3rmzKx3p13+NowFBtOTYmy2dnaRaauzY/Munnh+djH+tnsPppk+lHZ724PPsvb0Zc1/b7z7aezuObR1zWXVSaeItjr+SB5+8L5Z1x2zfDnT09OMyyi/Zpl05lqIAF86U0YelqxcjmbIDP+xLyPf1kmxXGqyWW56bhO983vZv09kXqvlkLHBCcb27eHCV4rD69LVnZTGBomcOkMjMnPVJpjRTEPuoUaErifoptE8A7X15MimCijYpOQ+augK2bRBIS/G5oPAsfOO4BagVhRrZX/7AkZLMa/7xNdZukwQ4WzavoFY98nIMeaWfapOlXrogyQ+KuDjWS6JauPKujyv5rJ6UTfnHi0WgKcefgh96RLiwCUMGvuoQ7maEFQjSnKdilMqlWJAviAyVytXZLj3ljs57pQzWSGZ6x/6y5+pxh74bVT8GYHeaujhB1I+x1WplVxSTgtKVczjXAiGbeEHGnL5JjBCOvpaeG6nCOqX3CK5okLSZTHHFG21fdN6jFaVWjHhoSdEI/d2pMm0LCSHYJu0zCk0zSb2TYaHxHjLzWnFj4s4TsSaYwTKZzNbuO6nH+H97xTiuHfddiiT5cJF3SRa1Ay2NuzgrPHBtv6mL4j/B0746S/49DvfPuvvv7j6atae/qbmv+PIxzBtli0VLKobgNFxiwsuuQiAc47az3/dKvbrJ9c90bzuaZnJWyKdq84lfewbK1GX9U5/+errADj31KNpl5nB/Qd28Jmf/SdfeccV/MfHP/iizw9w0jH9mOl2JrNiffnM937HVz7wen75+Y8e8t13flqIka84aw3HfPti9uVrUJSET5Y4i5x27mnkJBqptcMknXMxDDE2bDNLxZliZGLGcfSo4zoBmez/3Lk6XHN12A7bYTtsh+2wHbbDdtgO22E7bP8L9neTufrEm4W36wE/+ONG/vUz16BqItp01vnH0LP4FKYnfQYHRCRnfP0o9UqAKrWFQglds9Bx6zOwwLk9aSbHXVrbhfdsmDHVSogqManFcsiceQW8mk8iMzu6ruO4HpGvkTSzOSGmaRFKKJimCRib73rNrFQcgaLGJInaZNgTdVwxocz2JEmCgkUYqpRlEsqvB2zfuJc4cXjiHhE5zqRs2vQU658VMKSh/QGRU6Y0MZO+3bZxil/+/CaWH7WGdEZEoIJI567/2szCfgHbqeIQR1NouER1Ea1bsqINRdcwVAmVUlWRhVFnMoCCkT2hpTWH7zaePURRVWxbQlZIsA0dLTbx/VB+J8JQDeIoEZTriNqsJOYgit4YXdeJo4QGu6+iKKBI7SUJFazUagS+S0rqsvj1Gn7ooxgJkUwFRFGIpSlEoU+xKkPsikE61cr2XSJqtXvvJJ1tOeq1GqZkVYzjEMPQmvT5AFEcEISgSgHGes0lnVXxorBZt1BoSTNyYJQN6/bwslOEoPWWHZtpbZlLJKFglqqTsmwC10OV4V4dhZxukegadRnpjuoRhqo14ZJBDFEYE0UJSeQ02ypJElQlISXhfJEboiUQx1IPKBT/MxuhSWkpPyJJElpTEg7q+1CPpGA1JEpI4ngkqkclENFJ08ph6wUir4aqRPK6OrqVJWeKSG4uJ7LHa45bxZbdIiJUq05jp1wW9mdZIOt2utu7yYU6zxwQA71u+RBEKDkHtyKzW2qen/5pgL2jUsh1fJT1TzmsPe/NjA2L6+y4QDrwCOISoYS/ZGyLdtumIKf6ZaetYsn8HhTN49JLBeTgCG5jeCzA1Qb5zx+LyOOaxZ9GtcfZsFFkIsZHd9ORTXPzL37Gkw8LomBdy+JFVbJ5nUjWlyiRhxGrqCkB6VCyaSw00oU0NZl51DIxqFU65vVSnBbwF8My0UhwJeTQzmdImW0Ylk+tJvrYKfuMT+1m19YNHP+yE0TfpX20rIol4VMP3nQd+a4UN1ViDENohJz0mlfw2nd9hEs+WGfXdhFB3LdhE1NbyvR2i4ziJkY4fe3xXPThY5mcEM9ppkxCu53RXYNovuj3kbEhFtoac/rE+uP5rZy2bBH9/UtxJc78wXWb2TngMzE0xKo1Iqtgxhrrn9zAD78nIq2lkf2cvOY0nn3ieVp6jwWgPevTlUuzYGkfN94g6pyOXtrLyUctJZWXGkyffxVqCmrVCrohIsJWOkUURVRqVYp1KcLrRuyojHPhZR8RY8r10JKYBT1t3LDrJvh/2HvPMMuus873t/PeJ5/KqburszoqZ8mSbGzZlkZOOIENxianGbiAn+eOmXmGMTBgDHewjT0WBgQGGRsLZwkrWJKVszqqU1V15XRy2Hnv+2GtOiXZwHyZex9/6PVFqtMn7L32Wu96w//9/4FzC8sUc2WymslyS2Lgfhbu+txfk5OivlHQ4JKGpMKX9kxxAiwl37MHqZWgGgqa4RPJ6q8zkMVzE2p4qLJqajkZ7EHBmgqiiqPpCn6Q9KQ7QiJ0QyXnOASSFlyJQ8LQxyiI/ZjdYlIojdGoL9JwxXWPjm5nfcnDjXRanRkAtuzaxovPv8KuXWIdtForlAsWI+M5zp0QNsj1OyRdlzCwyUvZg3q9g6LVeieaqhhEkoG00xDrdVVvsHvnANWpDqpkhBSVdKUHia+tN+V9bupcKSokP8A8bFgWSrzJNtsbmk5oCZsUDmkUMhkatXXyI5vwvDe84XpOnxfnlS2z3N/4whf42d/8qHiDk6XmrpLTBnqfSZoJV+6/nG99434A8vkikRbimHmuue1KALb9bJbZExGXXr2P/XvEWph75Rif+5+v7d246Ird+PZroafe0gr5qw6guuL173zxPrRk895+9Tc/TmvXOi+89Dy5rYIRst6s4zcbDAz3Eyjic8Mjg5T6ipw8Kfpcoq7KfH0Vw1SYmhLsnZ3Aw2m26VbEOpifO8P4UIFrrhklK/tAjz51DN1QGR7ZgpMVcLdOWKc4YKFIWZBYSVE0HTXWCGS/uoKA8nfaLSKJngn9Dq12Hb0pNTn/EM48L2zkm35BwFHTmso1193E+JZrePaYgPxv3TJIs9uh7cmzImpj5Ww0tQ9VE+vBSwJY7RAnVbZtEWv2wPY8Y4MOLz8lqOZriyHDQwW2TW5laUnAwRuVDuNby3SdDqoUmc0PluimbXaURAU6U76I/Ttm+fqXvse+ay8F4O3vGiHoVgnaFR68X9hFnwbrU16v79wuaahKFtUO8XxxnYXxLLWVDqaeoVySmpV+yuLpKQb6xZmXsbKoSULiBdQQz+od799NNWlR0Hzu+tI/AnDLba+nVFKpLUn4ax4WpmsUCoUeY3FB6aPhBLjtDl6vHw4qlRaf+FNBVZ4d0FlZq9DvHOSvvvB3ALi2SxzpBJHHyMQGykdF0yFRhC3xWy1qVZ///qUH6ZO/d+aVY/h9Eb/8yc8wtyj7NQfHqJ45znJms+I8/cgCuhIxPrIZHpS9iNPPij47NwjZcdkE7/9PHyOfFdWtz//+r/PbH/8TPvGx32JZ6tpNn3meUn4r2YywMTd86PcplfP8s/cEqYTpZxwNzVJ5/x99Hl3aErXZIqqu8Pef+2Tv9z/9ub+jdm4aSxUVy4tvuAp+Bd71sT9ksiAYNncNDTJ75ClePicQKOdPPsTiKwvomRH0WOz1g9cKH+baKy+n7W+gS1JWqxXOHBNnYXVtmm5a49z8FNwofv/B759AzyQoiUXfmPAvDCvDCv+2Rt6PTHD1xSfFQr37L77Bw1/6B9724es4eIloxHvp7CL33vcisV4nI5vqzZxO1jbpNjegZ8IJ0pX0NcbcynbZ0Wew0eevmRF2RkHatgAKWQAAIABJREFUFHTVRlEDCQkUxs/12iipgW4kSOQXSZLQbDYxpYPreV2SRFCqbvQNCf0qFV3TCCUeX1ESIEXTJcGFqpPEGlHU5ed+RVBUf+Orp9i7z+SSK8ZZXhMLuFws0nFdHv6OMA4TW/s4eXy154gDHDjgEKVVTj77CJrUMtpzkckLzzyAuiLKn1reoNWtcPs73gAZcRD5kdAgSeRiTkhRtRQFrad9lSQJaqoTuz6pbELSNANFVYmlhpeWpiRRSGwZsHEQqwGKbpGqGrHUQNJQ0DShHwb09KWAHrROSSUZSJKSSoIATTExVBtbBkRRkGKZGXRLR5OU8XVvnYFSFt0AVW68VPPoK/bz93cKKFFSa+E7ltAlkzpecZwCKq7rbgYbaYrvquiG1CjRLBQUkqRLmG6IQkOx3MfX/+5R+jLC4F9x+QEW5hZ6mj1BqhP5MVpGJ43EOsjpAwxlcuiKSiANfJQExIpPFEv9EUVQNaOKdQSQxhFJIoNeaX8TXVDSO7KZPA5jSmZRBGHqZjG6lCkQ+CGqhEalqoGuq2z4QLYpoAW6ViZONzR7GnhehShUMSRsZdvAFXidAfZIyupDN97Ab/FzPPZ8vSdsPDyiMzk8xkjRZDIvGua7aYfpWkBYlIFGx2Sw3+H0eZsxmdw4u9RkZrmCLgUuvQZkcyqttSoZCamMaaOkKo6TQSIhKWRzOPhccXg3ADt3jqEkEUqcsn3ycG8OXjlxnisOXU2f7FPMFGJOTAUszQrHLQk1lhbXee7FIwyOioPCjQK0tESYJqSeuM6R7btIwghTBsudbgu/0SHdDmst8V2FfD+HrxrizCunQPaL0AXStKeF16jWMMY90lSj0xZzlwQGxDHHnn+Syy4V137NlVdx/2PPcsttwgYOTA6wa9cwaE2mn58B4MG7Ps0DX/wct77zfdzx9g+I3xsZ5uAH38jBq8X3fJfreef7fhFDt9l+hXieOjFL7SbdZoyZk4Krl1xFnw5GWSY2am1WFtb55B98mjkpxBt0VjCtGju3HWTmqOh9mV9Yp1FfY0OSsLX6Cl/mteQAdeAc8CSbEJIvfP7j/H85Vv+V12ZOPvKavzeuMvY2HZuIH4QItlhgsyfKfRVoUd4yXan54/XUGv/1EQDdHwIMgssmnKfD6df82xyn+B/8K8K0vwg/qPbzzKv+/zM/oDH0vxuf/ejXAfizv/0Ihw8YPPytVyjIXtDWSki53yJNX9s7paoQy+BCSTV6Da1ydL2ArOH0ArCN0arWGRyW0K92StQMueHaq9iz7Xr5jr9m2+RuphfF2XD9gUPcfu+3+Ym33EZHynR0whrrrS5f/Zvf5ap3fQiAmalZvv61b2+KGIcBj33lb7jpPR/h3AkhNjwy0I8y6XLu6AlIRQJiYte1wGuDq2YaY7i117y298r9FMsJYSyc/507yhSMHOd5EoA7776T0dIQRs6ESKyjwb4sY5cdYm21Tm1BfM6vNlms1Em6Yl4e+Pq9TGybRElDXNn3nehtjr/4MmM7BazsxptvZKA0TKCqtH2x3vrHLBJMosBjWIr4ri5XIOngd2SQpK+TJDHeuo/lSIkBr4vrdsjkHCwJpXe9jtBOHMj37vdsKO5hek6s61w4ROvxWb795D/QJ6/rlKnQaa+Sk1oFtUoXxbdIgwSnLH7PGciwffJKrtzpMLxVXOdaxeXUk89z5qSYu3o7Ze6fz/JU6tFsiz126NLL6JvYQUTMRltjbalGFCa8eFZoZsbZJltLW3jLj7+ZhkxI5tRxnHKWO974Jia3fwKAP+P/oWgbtCMpfrwWUsy0aHcibAnT7dQjhseKBC7suXgPAMeOn+HWW3+SQlms1yeefpyt2y7CSItcdos4D3ceMDFWbUYKGQ7tF2tv6vwqt9+0l/GsuKaGV8fzXLzKCmwkoi0Pxy/iqx2+9bXv9ub9d3/rr/jh8WUQKHkOXXwRsRoTRn7vvE9RiaIAXbZi5PIONXy+9Xefxk7FnLuhQdtboZOk7BwVwsKHtpSwC3keu/ep3i/9wxf+guJomUsvvb732hNHZgjbIoCPfRc1VMgWLGatTd23c/Ninci3Ydg2lahFty6use41WF5pEUQ+xbzw1bIDo1APGKwmWBLqWRybZGLLXmAzuMq5OW7+yLt7BEFqV3xnfj6gHgr5iRm7wPL4bv7XPaLXt69bpW46fPuf7+X8EWFXP/Jff50v8secPF/n7KzQ93riiWn2XbaVp58WJCKpYmEqGn68iVlPaKKQkNBCkYGhIuWH/q3xIxNc/cqHBU5ym7nGH37+I5yY8vjsXd8BQAkWKU6MoFk6bYl5p+thEpL4Uu9I9umgKhRLGzftMfUKjEyEtKri35cXA3bs13p9WU3fxe1aONYArmR7i+KETMYhTttsEKsosUYmY+NKlixd01ARwrqv7qfSdYUoCnssSoqi4nptdNmrEQYpzUaH6246wL4DgrLwq3cvsbTs8Y79u9lpCIPf6gSYzig3v10AiaePNVhp3Ecma3EcYfR/5qNXEQTDhNE6bizmJZdxaBpPMDYoNv4Vt++nUqvjtqEjWbEMTQdF6WFMfc+DRCEJw54TqCk6sRaRxBGy7QMv9FCx6Mp+C9vQcRydpt8WJBKAgoeeqFiO0esbiuMYVdusihmGgaqqZDIZPE9isCMXFQUlhSQRxlXTVFqtFonEhUexAkaKoSe9/qpceYJ2x0MNNSor4tlUqssUc03mFsU8FfrzeG2PNE0xcrLKmAhtLNt0SKJNp8DQbQKvtbGUSGMNVdWINpie0hhdt1EtlS/+veCZadRvY3ysxMROYRxWqi1cJcDUdUJZ/UktjXbkkvghupzQkJAg9pGkUbL/LEHTwZBOfCjFWXXD7AXxSZCSagobcXaUxPiJL1gf080m7FbUQbdMQhl4J6Skmt7LQFv5LEkS0Wl3SEJxGPtugpMrsm33ONU1sYa/fc+LXHr4Iso3C2aghx49Ae+ALXnIbxBKFDKUcgXaLY3vzwrrqigdDEcnbok5L5QC7n94jnxhgGxZfO6pmTWCpIUrHYY4TdHjFDP1CWRDbmwqpJFB4vlIYida7ZCOWeapc7KiwWmuuXiMnGJw/Lh0LC+Bptvike89zI4xkbnSdJOTx2eotITDrFkmK6vzpMS0u+Jg9JOIRAGFFE260HrZQSnncaQeWDg1jzq3QjaxUUPxWt0LyBXLlCZGqEp2uzRNUXWN4qBYG1vLBSrtOdxuitsWz0W3TYyMyeLKEvOr4nPlkUkmJmdpNMRa3HHJ9TTaHntHiuz6gNT6ek/I97/xXf7lq3/Jc8+Kqtv7P/xzXLT3dZTKO3vrwHRsfNXh+VkxV/V2F6+lgK9wflkkIJotn7jVxlDF7wW1Do5e41Ax4Za3iWxfal6E53nc+/A0bU8QZhhqQKR65AaLvd+75dYPMH3qOGtV0VswPLKfjtEhCQ3WTj8KQGnXLdi61mPqvOn2N7BlYAI7XSMTCQdzJNtHtpQj3z/Iln5RCei3NRwcrBHhEGFn8NwW5148QVn2byZ+m+efeglbt1AVceC/95Of4O7/+LvohtTecYqEisLbf++3eFQ21butiDRuU5TMjn7QxvMTCqUBMjmxXnO5HKoZiETNRl+d28H1uiytivtdrVU4v7DA+aUFzsyLDP+xqVkuumIPTjHLY4+IsOj973sDq4sLRJL5VMtonDq1xK/+p/dSHJIHeKJRq3aIUFA2dAmDiJGhIp/9zL0AXHz5QW5900V846tHePC+5wH4zV//IEPjKYEvyH4ANNVCVdWeQxZFEVockyomP/MLItj9jZ/6gpjX/wEbXUl9Y45MxL22MlVZ+/eZs/xmSnZAo7b22oB17plneddnBYPX2RdPcOllVzO2dydrK5u9Zw/+y1NkNZGkOTvb5qmv3Advgbs/88NB+eU7RFJkd0Gnb2SIcllUGX7vd0Q/xhtv2AuuPMR0kx0T0Ox2mZ4TjuEf/dpHfug7bzhwBQ899GDv77e878Pc++xfASd+6L0DO8Q5XhjNkLNUlusVJAcMy611Ks2I9epyz6muu3UGB4YZHBbJTi9I6HSrNOvrlEsiuInChGK+nwNSP0rPllhYauGSksiepKTt0uzW8H2f0Bdz7GTEmaZrkixH02h1fNrdVfxI2Knh0Qn6B3cxtzDbO5OHR7fSbnZYfxUyZqYj7Pjsk8JGKBio+GT6FNTzIjWRKwwyki+QtUUyN7O7n1xBQde76DIpmjcGWOo+zJGns2RPiveZacDY9n4Gi6K6/ak/+Atufuu1FEojmFLYOE2LvHJ8ilRJBBkVUMqbaJqCJv0rSyuxsD7Py2ebFGU/jUMd1/d4/v4zXLR108WtpU0SeV71GQZJGNJqKYQyiMzn8wwM9rE4t8LzT4v7s+0SLz7zHDPnxT62SjkazVUayyvc8ZOiB6nVXkVLNLSswtYt4rx+4PllTsxsZXhA2MXE15me+h6qqmJlxP6rrCyj6ybxDxC+/Px/+yjzx0UwsLBc40233c51+yZ5xx2C2VVX86RUpT8g9Q2jGEWxeoWFyM8CdYqOTUcSUIVRwsRIifGtu+nEAhVy30NPUDKWGCiIa6gA73jrdWCFTM1uikbvtBfwArHfSzv6OH36NEHD4oprxPp8HFh5Ra4JaZvDVpuCWiCXF9dYHChhaZDPZdEl8UY3TClP6hycnOD0ObH26nMLTE114Jc252T/zjHc+WXmj4oiTGNdhT+G2eMP877bxXNo5sYZndjD+Snhy5wya3zuE39DX7DIre8XlcA//MJj8Mvwnaef49xp4R++7uYdTJ2Zp5gTPvPAUMT6uSp9r2JbymsQ+wFOXieSPtlG9e3fGj8ywdUNu4TBuO5tb+R/3f09FqfWGNkmWdQKFv5yEz3joEgB1jSjUe/41BrigRQMKb6nhDQbm9nB/ZfoWLZCPi8W3ZZdoBgRcSCCLcPo0qiC6wa9A7TTCQhCD1VTUCVERFV0VHUTGqHrBqmWkqQKzbbYnNlsljCOMC2zV2533S6WbaFIxjbP89i7Zz83vOlN/Pmfi2yFM7ROoOl8+lN3sWOvgPOl6PQPZxgcFgfMPXc+wTXXb8E0MhxHNL3XqjrPHz1NueDQ3y9JGGKdO955Lf/z/xIHw75LRnAck7YWYJYku01qEMUewQY+xE7QVAtHM0nkQRyGIZqTQ4li4o17tjW8KKBYFpsn8tq4QRMnm0fTHDkvWZSoQDZj96pEgR9i6Dqkm4KTSZKgqiq2bMpMwhDihDiOMXVhXP2oSazW6B8XjlUU6jS6AWvrXWrLYgN1quepLFfxOzotibNU0gyKGpDKMke2aGB6osoYBB15nQKuqbzKsKVpShx5WPbGpomwLRPXjUgVmaHVwTBVClmV0BcH4T3feBTdCPmxtwgB1v7xPpxsAS+K6MYiaGj6TWK9i6mrhFGnN8cAiqyYqnQhSUj8hB69v6JIwg2PVNKsKmkgRDs7klwFAfnbCL42RrMjDPCGwbVNC0txaFZbci1G6KpOf98oE5Oi/r197176B8Zw1BHqFeF8vPHW2zBshXZdzMvhPcIITQxbvfR96ppUZzokdChLFkPNyKBoBpmSeMZmWuU910wy39C4674Zce1GHSsJ6fa0q7ukXZsoVUkN8T1xJ5GZOo1wI3vuuWiqD9LgPzgbce70Atddu5sXj4oMFJeA7ujMHZ3mnW8VlZ0ggpmZV+jI7PZAXmdx5hX0ICBn5+TvRShmShp6yNwNS8+dItV0kpyE0qYxKSkaSk+GIEw8mmGbTDlLc1FCWTs+saqzsCAyrY6xDSvOs762zBYJg1ptVjCzDrXlNaZnxOFx8NKrGC9tIZCBuNeqMTA4xCutEHtVSgV0K1x8x1vYe+AA9/zFXQD8+X/5Lxy55fvcdOt7xIX/Nnzlm2dJkjaEwubVgha+10RRk57chWGq5IdszK5wUNrxEqYOS8ZennhBXNPqyhK11VXSbgtfFWu3HXoMKRphukmNfmx2Ha1QxsyKZ+Vj0fItsq+CUCWpSkKMaYqg89FvPcKBosFFQ30MHRDVyFYQ4XUSVqcXWMuLg3D/xfsIaOGfE65//+AoqeFS2rKlJ4RNGHP7la9nYHsfuMJBfC+f4M0/97NUKmLvtVotAkPaOoQ98/SuEMhdE3YkiVJUTaMZ+ayemxH36/u4SUyaKsQSLaFoAgpoSJHYQm6YoR3bOHiVhmNJyG9Xo9xvcezUcQYjyXz25v/A1PwMa2viOtbaDXLlvcyd9ynkhMPeqc6zpTRGrHio0jGs11K29PWTlSiK0fJWrLCAioki9162ZOB1fQLXQJdohCDyUTWIIokMMFRcVDR1k+b6c5/9b1h5hd/9vz9FvSaTG3qXBKNHeJQvmq9hd934LkUR4u2NithbxZEcURJTGJTIgCimXBhm/w2v5/m6qFhkCwbPPn2Cta8dY+uosCu8DwoDKtGy2Mef/uO/5v0/8R72/MJPoWtiDiYmDtJWIhpBl9QWc5zrK1Dr+CxL4djbfvKjRFrM99MlPElMgxHTqKzTWm1z+RWHAPip//ifqdaadCVs7qG7v8CVB6/CNor8LXcC8MijD9E3cRC/28CSdiIIu8Rxm64q14sbsRpEpFok9htQKjrUVs4wMDTGrn0H5foosry01qvQeL5HmsZkC1kaTWGb+wvjDPfvYWZKZNeX1p/GUlRSJSKWCd4kCCj3l4h8KBXEPkJ1WV1eY3hU+APtbhfV0Ni17+IeQkI1dJbWm7i+hWluOOc5xiYmGRkVVY5HWcSRFZ2P/hfBMlruM7EHHUrkKTjCThSyRdqJh5qRznKjTdtVCWIVV57HjdU6Z14KUIwsO7aKZzw4Mk4rCZh9UTDS3fHT72Bw+0HmpxdRZBWs3Q3RSwW67WWmzwoY267t+4lpk8bibPLP+FRq8/iGQlcKt2vVDn3lccZ2j3Pk6U0qfVOzaMtAykz7CWKPkj2IF0tphFbIiRfWULWYXEHcz3BfjleOP0DkigA+a4ISeegFh3xeMqYqJRwjwEBn/04xny8cP8bUK49yUsIlTfKMDw+wOr9ILM99Qx8mU0x6SZON0apbfOTXfhOAYnaAi7aOMSbZWAFKpRLNTgvP84jjDTRQiqropLpMHkvkxJZ9k3Qk463fSiloNkeePE9BPuetQyaDhYPs6Bdr+hQvs7LWYaDfeRUZEMy1U1zpT60tJHS7BcZHSlQbm9X6jmwfmRwWVc2B/QZx5LMi6eJb3SU0Lc/ikkd9dQaAd7/7Wi7fu535l5/jbZcIP+9PPvsk695rIblWZo25s1UWlgVM1hkVsND3vP2NHLpCJBv33LyXgpVDkXbqzju/Qb9e48r3f5B/ekysg/Kg+N7VtQbbtoskTBB6zE432LVb2OVtkxNcv3eU+fnNinvUbqGSJ1FaJPL7E+XfF02/QGhxYVwYF8aFcWFcGBfGhXFhXBgXxoXxf2D86FSu7hCZ8+986yHyeBw4XGBRZhnjpkJESDNwsTagQpU+tCjLT39AUBB//+GjLAJJIaD+Kqj0zGnI5dVNMdcWrK1qbN8ntY0yBqXiMNWldSxZNtdUkzCIIDVJZB9NGArY1Qb1bByHaJpBmsQ9CGC328V2dDqdDrmMbCiNY6IowrI2sgAqUQQtd4Kf+YXfEd8dLOF1l0jSOtXquvyuDkG9ylMnRUl3eAAW1xbJD0z07u3BR56j5gbUKxovHZeQsVaLXHmcLYdElure7zyHWsqQZjpYkchuDfQZZHIOqazKKapOs90kmy/0KnOmaaE0AyxDoyWhSfV2F9V0SBIxwWrksX/3JGvVKrrUmMpmdVI/pLJeIyN7mdTUJPYSkFMQRRFJklCv17Hk3EVRhKGo2IYJksAimy/TbVjMHBWZnqXzVRZn16hVW71+HM2KsR0DR1Upl8X9BUlKoiukicjUx9SwTR1FUYhj2YRuObIhO+1h9AXxhkGKrCipCXEEAwPjdAORhYviDoZuEXsaiRXJZ2OBqvLM4wKSky06vO6md5G4HoOy1Lw4tcpafZpsNtej6dZ1hYypocZyTekdNM0giRWQ2WVd10mUhCQNellpTdNI0/RVz8pE1zQUJXxN9apST4mjFgVJOeq6AZWVZUYHBd76msuu4/CBy1BL20W/G9DtQq2TsBytYTriuqx0iKBtkSls9DWK//aVt5JIPThDCUFJSJJCTyPODxOs1CaVvXaOPUI9bvNPj57jsr1ibayuWswteYSq2NdBGKOooMQJhoSHJpGOpiQomCgyyR7HDqnms1oTFQxTzbNyZJqjK0tkvU0dj7QVs3N0gAP7RTbt7Pws683mhqY3WmozO7OCoup0EmETYickScA0Mjim7EsMPIxII6hLrR9bJVJD9FxKIkkL1DjFsiza1Spj/aIq1dJaLNeqZGS/w/TJ0+QGiuiqSbPalWtKQY2BBJYXZgDYt/8ghUyG52bF3yMTBzj1SoVWUmEgIyoWF+3aReKnZLbs4OEHHgLg45/8ff7py//I+rLsE/ptGMytENWqHJ0WsLX5Shs7SfGCDstSs8dsu/j1ddZkBdG0ErTIJOzcQ+qI67TMIjlDByWLI/txxvLD2P06L79c6c25F9TQGg6mLfbjelLFwcKMNuFhhlLHbsf4joTgsoaRDPC3jz2EdVQ0vSdKgp2qxF2vR2VvOn1EoYuayCw1JraqkXomsYTgZssmI2PbmNg2wY4tojpx1/CXeWr5mzTXRSVgrtMWtNe3wnc/K0Ro3UYN3SnQkOiI2VaDttWlFimEqsxiahla3QaKYiCPBjzPI1Uj2lJsGcNkYssk3a5HKkWvDb2P/n6LbWMj7N4ryBWmzlWIuhpfuPPvxe/rKgPlPr4yNcXFl4h+3F/60B2Uy1ncqIYVyOpgpkrOMFDk/s/YGkN9/RTzNm0J5SrmChRLCVEXlEQSEiUKQRAQy8zru3/6Y/zg+MVfkhT3HwBD/lzJydP1A1RZmVOk/UpTpaerpyiicqW+qi8rDkVlfMNuBXFMGIZkM3lMQ+yPMPZJ0g7jow753GYW/MoD1/JcTfTIlSZKFCb3M794Bj0UFzV3bJGOt4yhxKwuis+12j4j2/JEEiKnJDq1ZpvQ07BlxWtwvIylqxTGSgyNiz155lyTif4SYbBpO+3yENdeVe5VrvLFEdzuAsV+h2ZLoCbsfA7HLNN2RZVKCTzskoWm2myZED2P9XqTETtDeWyYUGptnpg+S2W1+ioq7YQo9HBMi9F+gVTRzJS1+TlsiaYp6TF9fWXiOKEjqxOBoZAkEXEasrYuzifD7mBZReoVWU1TEjTFpjp3voeYUHVIFNDQma9Kbc/YRVUT9h+6ojcHg8VxzgJ/85CAqE3kbTKphWLlSX3xOUW36O8rkHUkpX+7RnWlwY6tWxjeKs6d+fOzvOeWN9FcPc4ffVGQjQTJOFfvy9GZF1CylVWbldUAI9CZqYgqg6eYBKGCHtWx5Pk0fepZktTvnZmqlZIzFRzNIivPtcJoG4wsndAnk9/U7YorHSwpOr8e1bB1DS1eFdqO8jmESYtcLkvbFfczPVshUxijo4r79aIEQhPDNrjrLwV5hZGB/lKOddfgXf/hDQC89VKFB559iq2TYg6STkTU7VLsL+FKyFCSdolaBiavFUT/sRvfwkVXCRvxwH1PcfqlGS47tA3ES5yeeomMYxFHom9cPECVOA1JZXuILvvWRycOceSkmOOtW4fQOzO8fORh+qrCjwwbK2iJw+HDF/V+f3rlRY4f9ZiqCIh638QujqbPYtjiOqPOcUYmxzkxVePkPfeID90JyLPynj8VNuTDH/1j5qe/zyP3fBsAKz/GgdveR6tS4xd/Wti3bCnP8aNTFPwmT78gzqeXZ1uUh4ZeMyenzjVYmq+SyL4s3RPX8uaJK0j3i/64PjPPK8dn+M4TAsb6rYeO8+Mfeg/3PXIOiZIl3WibCHT27BJr4cmHpjmwa4BbbhKV5bHRAaZPLhC8isZ/27YdtN1vosYOiiKqfJrxv9H5+3f/9f/HUa0Io3zlDa9nvVqhU+1gSgFNP/RQooAoa1IqSBXunIOmQGlMrLid+/s5wv0YkYOibRpp1UhQtYg4EgapUo+p1VK2SEZBzbLwYg0l1lGkOK5djAk9C9IQRZIybGhfbeDVdV0XTnIU9QIwTdPYaNJqdYQhU9INGJy4nogUL4n43vcexLKE4z08MgFajlLfdiYkfnxgyMHrNjgg8bKryQJrZ44yP3O0d2/dtsvW/BB1r4bhSGPq51hr1hneIoy04q/jNdrooUVki2taXYoxDJtEwgvarS6eGkEb6g1xf8VcQpRCX3+WWBI8mB643agXTHYIWZpaQosdHFmS91MD4jyjoyqW3GxREmC0I8yseAaNQKHsmKRxnigS160PZlDSEA8Vc0Nwtd/hmcdfJqkK58AcKJCz8xRLGTISpqfGEa5t4MZNcllxDTvHdrE8v44ne2g03USzLWxdRdPFNbUbIW4YEyYO/RJR0WmDrmUgEYdnNUwhdIlSj0A6FlqSkMYBZFT0jV4mTQdChoZkb5iewwtVjNBnwyusVJpUVpu4ZqcnSKwabRZ9pWfcw9Ank3XwfRfDkKLMdoYwjMll7B4pC0YHNS0RytJ/krSJQg0NBd3Y1Co7c3oWXdVwGyJg78sN8OGf+jnecvs7AUjtHPVOm6DrkspelGI2QxYbwxlCkaQTBjoJkGz0j0mDvm3EJpX4/zSx6AYRuqGgSRZDT3VRVIVsLHV2gjpPPlPhx27eR7Am4Lwz55t4UdpznvNKhB/rhCmE0hBGaYKRpERJiCv7PpLUJ/X83v7sGl3qUYfc0Yi9ezb1P1Y7DfZN7qS/KEr+Dzx+H82ui9RoptOu47dqKLbSc1bjJEWPIVGC3r5NDZ0gSdDl79mxRpDoaE6hB+/sL5UZzvZTMzNM1cWB1nabZC2jJ9gdZrK0Ww3Gd25Dl05npqXS7Lo4pQznwSeeAAAgAElEQVRnj4vG++uvd/EVjx+7TiSd9l5+mJnFJfqsPPMdYRcndh4gckMWjpzhSZkAUQ68iUtvc5l6/qHeHLx48lH2jO7HqwhbktM91pdXGDIdDl4nbNDP/sRv8OVP/T6fu18Qf3gdjYxjUh4rkQQCAqRqwkHXNQOvJR3KQZt6tUvY2TyIMq5LHFdRJUykrIQQZfHjTTIIvb2Om4ZU1sWB+oFrbucXf+vnuetb3+DRR4Xo5cjYMAE66wtt+svCScn0Wyyfn8PvCMqKMFIhN0xg6rQbIqCMwi6VxisceeYkwde+Jt6nh2yLfK4ricDNyfQxsFP8v2Jv9KLmeMZdpybXvbY1R+xMEntVshtQZb/Gob4dxEULJyf6KdLEZ9/Fh1mXDu6Ljz9G6ClkBvpprwsyinymQb2dsnz0FA/dL64pbwxx8WVX8+F3fgiAerjEPz70dbZtHefyA8JR3DZRYH7hLCoOcSxF5zstkswM1ZrUiltc4v7H10m1gLe+SQSTp84dwbHzGIaF00vsRdiOxat5KT7/hY9jNOv8zG/8CQCf/Zsvs7q6yn/9nV9FU2UvsaZgm0YPApiSyMRUIhpT5YjjGF4lvGugkqQxcSheMxGsieeX6sgWS0qm0IQs7SoxX9nUonrksePUZTAyseVS/ukf7uJ1113H2ZOiD6S6WsEyLJIoIpF22NYMGmfUHswqiELK5SJrnSW6sk9itpZgajZj2y5i6pQ8M/FZret4raXe7z/wpS8zu9oFISlHszZDVhG6hP0FmbSL8mixL2HcYFo2nTBhx4EtVJbFur760PXs2DVKuWDQ7Yj5Wzn2LAt6jftfFFBhz0kZHxxGUxPOLAn2sSwldu3ezbmz4m8vSlhYWUFVdSJplBIvQkkSdN0gSMWZFTQTVLXeO3csS8ewfYa39ZME4txZW11FSxWa7QqqPA9N2yF0PWr1zTkolYXNtOaPAJDZOcT0whzTx+cpZqUBVX1ss4hTEBC52FB589vezJbxCXTJRhykVe598CGWps+jVyUB0tgylfkUVRVwt9is0FwRUDdbJo+ySkTJMSiXx1lYEGvDNgzS1N5McqsxnueTem3CiriXI3OL9GcGuOGKi/DcTWhX4ETIdifM1ERJU1LNIiNZdhueB2pC2HEJZRLWyKvESUIie66NyMJXWmQMh527JBFGq41lhBS6HXKJgLZtPWRySdgmI/s3G7WIkyePk8vliOV68YIaenaImr855wCf+fPP8cUvi8TCe95+M2+74xJmZzeJI7LZrCD+SuNNWK6uEkcamiquM9YsoMO9X/46rhSBPpc8yfrSNLu3D7C0JPqSdUOhWltl/vwmOcPc2SVMw2JC9i6mSkqimj1YsF5w8DsNml2fg5eKoOwYR6m1xXN7z6/9NgArTZ/FzjCXv/Pnxf0Wt9AMUyKzy/ePie+qNRdYXavTrtZpLYn9MNLfR3Vl7jVzst7qYJVsHJnM2ejV/d7yGmfvFsHbiFFGG0t58gnRf3zo0NW89PQcutmlTyaYzj+3AB8BRVc4/ZLwQcqGwo1XHMAyRdD06HeeQ1F9vFfB2L/9L48xMFLEKWg06jJJGPyASOMPjB+Z4OqsFKFcmF2jWWvTCbqsr4pFt2NyErtcoLrYJA3EIVddjWi6Le57WDTgZqU/5UZdxjaLO1z/BpV2BbquMDbbDwOpTtAVD8dttzlw8BbWzjyDqsnmdSMlTUFRNTQJ/Qx8QWu9ITgrMOdKr4ogXtRIkhAF0KRhiaKIOIoJZHVEVwz6+wYpDw0QSOrubnsBTbU5MbXEqRfEIjcthWIhQ6YkNmfZPkx52yF2rD/Ms9wNwMfqBrNzLpQg8UQwNe93OTiWI5EUzqvhML6hUSm4XJMV75mp1ljGpzkmArlt+iDZs4v4N/dTKUv6dNdDyVhoqUlJKleXMhkqGnQkzTS+SqnPYHVdxTdENSuPzrDqkNXG6NqyX6Qa45ctFCk0uLi8TOvUIqqj4hUnAVBVhX2OTinbx7Ilrn2t1WbE7CMnWbim/QDNV+lUV1jtbDAWlqmFAduTPlZkxWuxUyPKhqS6pCBPIW8auKGLLw1EakHUSmkUaxSluqlmWdSVOobsQRi1NNzAZpWEUiiraYpNUwHdjEgTYUzUqE2sljEMWQ2NYxRLodMROGgAXTPpHzLRFYNUChmGsU4mJ5rrAQpZjVy2xOparVdV9MIOmhrjpimexG9r7ZCM3ez18ZEqqIqKokX44SbzWeJ3QC1ywxU3APAHv/f7FAv5nvhwlHTp77NQ+3KwAR+2EhIlJg4UDFmd8JI2muqgxxvU/cLo9CmgZjaw0SHEGn7s48kqZkktsNiM8ENxnUem15k7t04aeTz4gqAhthyTiIBEBsudMCXyurhhF0eyV6ihTZzGRGmAJoOb1FOJUx0kLbFfa1LEIS1FJLnNTODU6TPcfv2lJPKZTs1UUJIURVbf2i2PTsdDVUxi2XBumDYksSSokSQJaSrmmI1qb4qiimx9oyGyms72UZxt4/Tnc9RkBboyu0Di2HRkX6adzRClNrWmjy4dhInRbWT8gLMnz+D6ImipNRvksxmWz4ogbWZphW1bJrFLBRxpA89+/2lcTScKFc7LKn9leZ1rPvghto6LoOnr3MlX/uRTvO+Xf5XL3ioSUZFmMHVkimtuvJqzUkD32NEzHD0yg5WW5JrqkLouK+s1IukJK4pPEkYYhkUsK0dzp56iVCqiRJtZvDBeIUXrMa1qhk2S+KivcsQ7RpNw0eWjHxKU6h/4pfdSHdzLluY4t1wkspojSoBtZqmnYMmejnzOQk9jzpwWwdzO/ixbyhovnTzNzHERyKx5Xeam1mgma+gjklXUVUjSLBOTwvmptjpEXTFnvnRWn/RahIMl+gbEe3J9fXS7Vc4cPc3+KwX1fLGwg04XciOjtBrCOc/qMZWZtV6y6vDYKKePzlPXIoZGhfNR61Rw+k32TFyCL6tLx557nocfu49jx4Vdfvc73sttB97Kvxx7jJdfEX21F51eZ9/uSSrra5iamHNVy1JwhjHlGbN7+w6GR1V2bfXRLxV01G2vTZx4JJFHHEsKbkXFbwWbchtAs7HExPAlvb/PLMzz1tfdDNCzgxYF2kGrVxW3bUHsFEURurnZl6FpGqq22WmQKqDqOki22SBM6cubfOjmS7nn+0IaoaloaBmVxbmpXu8pwNatOlNfkTTatmC1s42AQCIIkrQjbKQeofVE4FMUDJQNwXfbRlE0SqU+PE/MQRAEmJbF6TPHyC4J52pyzza0xODwIRGY3s+XyJczpJVNxsWBco7OcoOLrnkDB/eL9fGXX/hbLtq7q0fl77o++4ZKVKfXuPZNrwfgY5/4BN/+1ktYSoPxrEgIjGQUqmunURMRMAwPHaLaWqezdB5NqmrXjArL8+fYs0dU3Nv1GqEXkqYKurVBlgV2oYRhaAyOiTWUyzlMbBnbfCaqjesmRFGHM6dEBaobBGiqQRAlWFJiJCElURVa9c1+9e89JERh610ZuJ3usj5/nqzfwpOoDSMtYjkRtTXxPK+95XZMxeaZZ18ikPIz08ePsXLuDFocsm2XcNjnFo+zslijlBUJjq3bRnD9LsND472Avdvt0mq2CXzREw7geyHEek/CJYhMbCvDtp1ZXjohiGLazTW82grTI2Uuu3azIuN2DUzZw9rttkkJMTSXlgycVNtB1VIK2Tyzs8L3zJh5FCVCk/1qmqHhdSFrF+krib6ljCUIwHZP7qJRE0HB0vIshhbQaszIazIZHZwkxacp0RZJbJBkAgazg5x5FcNq6/xTnHxaVPiU2hwvPn45B/eZcJm857BL1i7guj6bDJ0pKSnEm0E1wCsvfpdcRvh5+YJNzrZYX13BMoWdv/qaw5w4cZpOd/O5q0aGUFF7foKqRLhBiCnPQl1VaDRcsvkRDl0uqj3HOEqzIdbz5Kiwq/NzNd5yjc3X734OgLONV7AyeTTN5dTzwlarSYeMUSMOa0xMiLUQRU2yBf9VHK2gKyYJEMXivBofGuMk8PLcM5ycF8nGQxffyMj4ddz0biEdoKgFpl98kXIQcMkVoiJ7zfYm3wbeuH+YRFb3Sn0GXitgcVH0Fo+M2yzNVLnzk38JvyZ+f3lhleGxflyvji59SNP598OnH5ng6sgTggqyW63gt1oEaYj0o6is+vQ745RyefDElJecLEMjGa7cJ9hmsk6BO3mAqH4FZ44fkd8acOKliPY6WFL5vBBGJImCFCJnfb1EGNpYto3rC4eokDfRTY0g8jd1PJTNipX4exOitZE9UJQUVTEEnbasYMVhhG5ovcy8YTnousHaSpsN1mxdT1A0DzsXoWpi8SRorLfW0BfFxl/IOpRXiyTlLb05a66bbHNyKO0JXpZNvOOqTp+VYzgQXz43W2fbSI6r6mWmpH7EqBZyzWQJNxEH49yZMwQrPrsKQwQtX86TyZn1CK3Q5mAqMliLS+eZ3VukVBPvObjcpO4qKCULe0g49c58QrqsYzhd9smmyTNRSt94Fm1YbL5bmyUWulka+/KoiyIAGrZqdJ/WWJ1f56IbxbM6ODBOdO8pvGEJxRrI0E663DQzQt0Rr82yws52wE67nz+Va2Mw8bnJLhNIooHjwymH+wySoxHzEh76VBJx9WQeuwH/rEg2OyPlXaUMR1fEe55gmXFN44PqDo4aEiqkJ1hRkwk9T1U2inYy2+gPWviOOBi7agHNT8hqJvVUOFyqruG6LjkrRyBL96ZZwskadCRrXDuO6bSqBEHQW1+GmsEAAi+gZIqgIVQDSHQM2fhrWwaW6RAnLpM7xbN6mXlGR/roz2/nY78rtGGMjEIzCnDkgWqlGZIQVjwwNxqwbY0QB8tMehluW3VIE4NIHmiRn4AFs6ttjA366Qwcm+qwZXI7cUM4JY/c/R2WVuCXfkOovy+u1Nk6VmC5XmX/XuEM1NpdurM+piQWUQkYGBxg6+QOjpwWWdv59YjQqGB5GrGMHUMlJVRjVEnqYaY6Hb/DWD5Pzt3MOBUsnbf+2A2cnxMB0MziPKESoMn93PWa1Lotyn1FArnXwzjAMQU0WCKgUFJNVNCkQ6uoBmmS0jcwgCcrNKqRI9YyKFoGS9K05pQ8hpUjQGS7I9dHNTXcptvLvs64c4xvmSCXydKqCedxefY8ew4dZu7ctLwPi3MnnuHl5SojewR738rsNIdvvI6vPXYvnioC6IWZ42RHIs7Nb0LwCjtGWTn3PHmpd1K2LH7jve/jy4/dT6UmDtmvfO97PPvKFJEt1k/qQKtRoa9QRitIpkUlIU0VfC8ikxVOkpKoNBp1olfRbUehgm2bm+Q/QGJr5FQLydKL1nD47G9/lBs/KIg37ltTefzxZ8nmSgyURYAQF/OUy1nKcQ1XatGkrsXRpfPsGhYH8fved5Bz1Tp2NMHO3TcD8LrhDDfvn+Ds88/QqIh1/fXv/gsf2Hsls98XkBEtqfa0s+ZlhWRRd3nrrbey3hZr+sTpRcpaAT3tR3JHcPHFF/Poo0+wd8cEzaY4RNTUo+0mNBoycZMrUA5HqJ49gRdKaLuSoukaHb/J0IiY46tffwMnTh6lKVnr/uLuz/LO22/nva9/C5//6j8AYJQ1dp/fwqEdO0gkXLnbXccZGcSXG3TNq6G3LVqNDqkqNoiu+VJLUEXTZIY1VfHjzaotQMHMoSql3t8//rbbOPPCMbiRnlxJgoBMbwRlG0lGy7J6jnCSJBiGQeRv7r04jomSAFtW5k1Tx9BMFuYWOTG1QTlu0m2cJ1lbIGuKebn29T/OWn6VoRHBeLlYWcBSDUzd2Kwkqxp+GAldREnKgpoydepltkj2vrbns7AszryNvaYoCh03giTC7Yjz4rGHjxN1VFbnLxbf898hPzCEMrWZPdfNfrbvHOaaN72No89+C4B8PguxTkNWzkZGS6zN+ARBzJ6dQq/uP//qn2EaDVRDOOQAZ068zMunlrGHhF5O0GwSV9bpy5V7c+4pEXGSsFIV1xiqKTsPHmBoYHgTPSNtk+eJBBFAZaXF0vQZGpJMIlF9oriD142wZEUoXyzRqNXp6xtA6dEhJzTSFimb+9ixIzygel6Qj6yGYBoOTqkfXd1ggB2mf3SUK/YJaJaW6ePo2ROoSkRXknOEXo1cNiJv5FBiMVfrKw1sK6IiiQ1UJWZ4YpDZ+Tna7bZcL4I4pd1u9+5ZVVV0VaErK1JR6hP4JsdesJCFY/btP8zMiVNMT81jFzarC7u35yjmZZUxM4KmWhScBFMmRW1rGNOMGB52aLREhr7plXj8yQW8WGpM+p5gO9ZCRkYFtFUhQxKLCqxuiHW9ffswjjPZe89nPvWPzE2vsWXrcA+BomsW7WaHsaERXjMKJYZkS8WLL3+Zh757F4cuPdyjYvddFU3roJshoTzrVNVG01JCeWAFEoqfzyi0myKo9z2LQrEfP07pl+yu+YEChQGb+tRm1XigVERRNFKJnU/9FCOnYJjSBkcGlfkVtl3Uz/BQdvO6QxHkbGhYDg06hPEcXVe0S2iujZk6hJGGKVEbtqWjRlDIl/EDsV7W1xe5eM9VzLxqSvoKLmHLpm6JcyecF8ITxdw2bvxpUV5O+vqZf+Fp7IJ4dvYOm6HLr8ettTg5K22XJKQyB6/pkZRVwyaxHhFKI68NlZn65qPEzWrv99O0wdxsgKEoqBIi7sWvDv9+eFwgtLgwLowL48K4MC6MC+PCuDAujAvjwvg/MH5kKlcjEsN79R3vEBhvI4dpiKxDlKYkYUgzVogjKVTW7tJJE1Ylnam/IqLmI88Po+qyfspT+KvvInRPsjwnca4n8wSsY8nelCgssL46jW3FaBJn67YSlFRBV1Vcie00dQ1VVXvaUIKoIn6NbsgGQUK6iX4BRLVrQybJiyLOTZ8n0tZA2RA7NLHsQSzH7FUjVN0kjFxsSSpgNEPaC8vMDGxCJ2bDLkt6E299lUcqAkayb5vJx58JOSyrI97WHLftKvPtE6vUZW/n5ZkcLx5ZoOKKORu6ycGZ7OfsPUdoSqrSxkDItYP9ZIIij6yIbEzLrmOuq7iSOvgpXeNMbHPZcot0WsAXXppaoXs4ZWTc4rFnBXTnZGqy4/gJ3tEnLuDuE9No1yV06xFbj4gs1fmpJm0l5EGvwdvOiUzqxN+fpTU2TOky2dswPcPBdoZvPXWerf0ia3zN7hJm2OHEqscNRfG+d0xs495nVkX/FPCTToHGos/XTi5zxzWCtvPns/Ocbpu47jATGZGJmBzMMLjeZIskRLns4kl2NSrsiPrZL5XrH7SXeP3YGAPVmCMXi76Ite4i73JHaDtijX1zsUVN8dB1HUtC6LaN5hkb2oLnxnRllrHjVug2VUxdZkwcC9dtU+zL0e1KsgM/IJvNYloaqsy0lDMlWk2PWFKsB0pKu+Vi6A5HXtjEb6+vBezcsoO1FZFJOj09hwdkJWTgwPZxCjmd6TNr7NgnYGRtH1oLq+TLGQzZW7e6WiEIY7KDYj/+4zeX4KfgS987ysS40GVrr63x+GOPYg/0cdgV8Az/5Xt598fvZnhSVDAPrtmcWqkSznUZlNCWF188QVtXyGREpm7X9jHa7QoPfPNpzs+I9TN2xWXYTQc9l8OXosVp2ED3PJJY3EvTj0h1lVfOnafgbDbDvvmm61Bsm+enRRUsDtsUVBVXwicMVfSqderNXlUq8QO6figgv7JU9v+y957hkiVnnecvThyX/npTvqqrutp3q9UttdRqqWUaBEhCAqQBNBoQ5nnwgwAxD8sAO2gHJ7QweAa0rFYIJ4sQQsi32rtq313e3Kq6dX36PDZO7IeIzKzq1gLLMx/0oeJL1c2bN/OcExFvvOb//v9pnuP7IUU6fOZG9yeULtri34498QSL3RZhOeDCOUOcUGmErEZbhFarTZYleaqolWojUc/VtQssW1j0UI33wtkldl1/gOqkmZfpUpUrrr2KzmATbXsgg8oB5iZnuf3qa6hWzVoP9u4k655jbt7sheeAg699I9GZszQsxvmDH/5Tdt1yK4P1M/zd//glu/YOkrklHJt9DIsa2yfm6LZ7dHrmNUfmVq5A0Fyz1To/YG5ujtQ2afcBJVw8vzwiKVGFwtMlmvkY1/8H7/4h7vyut/Kx08beHDvWZ+fkDK7ISa2xTDfXuGL/bl5xxXYSC1vpZZIDzywxt81khN1Ucea5I8x6kFp4wL2fvw8nv4ubb72L579kes9uvOU29k3M8amnTc9O49qDxCesfbKQ7YUrr+HRR5cILCHLrnIJHUWUnCrK6smsx9CNHe69535mZkzlIY5jtHQIK1a3KA+Z3b+T5kafzTVjO3funaFZNAmrNaYmzLUvRz3m9hyEwNL0n23x8Y99jDvvuJP3vPunAPjwZ/6SY08/Tu+1d3LshIHS7Di4nc2kj0V+M3Akp/sr5JkLvtUIzHJUJJDCHZPciBzXc+j2xj3Jx3pwcHFM8xwlAxJL7OJau5RTEATByCZlWYbjOJRKpREMOcvUSJh9OKQDQjukw/4VUjr9Hh976NgYXt87R948hfRjupkpPTzw1cNs37mPqW3mvHBbJfq9DK0lesiKREiuCtJcoy2hjLQw6RGxiCPxLNW4YzPlWZKCG+MIxdaGOXuCMKRwC5576snRtR8+fYyVzrjPZf3CJmtpD/7+yzzxqJE5qQQeK6fXUDPme1dXQq6+cTcHrrySr91vNMhCX7N2LqPZd2gvmwpCPzmL59aJrf5EFK8T1spEmYdft72vmaIXDUZQZRfJ6rlltlbXyez9xv0B3b7ZY56tzElXgdS4rtnrviyhxDSlRkJuK51RPwYcVJohR5pnKY5kZIe7QK0KTeCNbzLIg6AsWFtbYz0SxLZKu75xmlanx/PP2Z5LR+P4CqVyhionQq/g5zmOX3DyjLEle3dfy4037uUf/8Fomfb660xkkyilqVasNlRRUBQF0pEjORGkIE5dhIXDSa9MOaxwYeMce/YZVM83f8sbeGKqyqHHnmW9O17rv/eHv4oeSs2IMr5XpSRcsJWINJGUAp80Stl7YA8AX7v/Pj7y4feybcFU5gZFTLfT4SUv+XZ+8keM5tozTx+hGswy0fDIrdRLnMUEfh3PVlW1+nsq1ZzuoDlCQy0sTnPi9Almtl1auZqbDFhrmzPl2utuQe+NafXGGnOT0x79fkwyKPCt/IQqMrJMI4ZV3Ny8f9vibs6mpgKTFRn9fp9uP+KGxf32Gnbx/PPP4/pj29yN80sEwnOlUJkD9mepE7qJwg+mKFfGZ23WMvvpD95nnkvhZEgEnq1uZ7pKXK4T1CfAQpX77ZgiUbR0RKlRse9zOL++dMkzeeL+I2hf0u+atb/tzluBJ7jmbe9g6RkD7/XTgh233kGnZ/od3U4btMvMfInA2qmBhR5HrePIYRVcVckLjZsYWxGttVg/v4zv5yPJ+CzrUi41UFkfZauvejCek683vmGCqwfvM5pPD/L5f+Wd//JY2VxFx+OG9n27Xs2xk5p+z0zA+QsTaHcCNYRr5C54MdJL8K2hUbmDA2Qqo2SNsyo0eZ6P+gaGLIBCXKT1IT0KrVG6wB0GXNIxi3MI8/JLZIVgsLFKpi3DjxeQD7bo6ALpmwUWBvN4QR2nZIzBpCijpgP8sDy6t+Zt83i755jd5vD+7W8DYGZ+D1sXNnFcw9KwsG2Gw+tHUc8dRfrGEQ68kOjkaa6umO8q723QWy8Y7Osy7Rvjd9X2BoN+i7V1n9AGCHsbNYIdszz/tMFup1s5103mlALJIdsDMXnHfva8fgdLK33Oa0uksLnFS16zi888aTbfU3qJ1902h1R1np42TujxjT5rFxR7v2mR/AbjjH/mmccozc4wt898/4nNEhce2yKdjAhuN47+Uw8+yoaY4fxGmzu2G6jQ//b8Ke5pJdy21xjpzx1aI0lStO7woSdNo+gtcw1+7cnD3ObWuX7GfNbjR8osZwM8y/50p7eHVjfhPYPTXBGZ+fzeq7cTnxjw+LrgqhtMQHm7mKL78Gme3TAG4WXfcSf6zbfzxXseY+mocR4np3Pa3TUGgyaTVWNMp2cnSROBttjfLI2pBhWyxBlpTBQyJdYxnlcCORSrzvD8AmwfmHRCSiWFKgZUSrXR+iiU5NobDtJsGgevneR0dJ++xQyXsoLp+TK6rnngcYP5lgPBfDUhXnKpTQ9JYCJilXHuqHEKW4kx1utpn7P3GVHYuhvzmtt3MV/zCOvfbeb9nT/BTdcsoKzA9K23XMmtheRcawO3b9b+a189TXMQsrZu1t0jjz/A2ZPnuWKqznWzxnlt0aa8f4H77/saqZ2HwJsg1YrCisSWfUmAg5dHTJd2jZ7Bf/jut7KR5Bx7zvQEiM6AraqksIQMlcDnzte+krXz5+h1h06lS5on5HlOYlmjHMclTVMK68AZGJQk7nXxLBZ98/ApeudXKFwHrAObxzFIiGNzWDq+R+B5xIMeWW6c+sAvkSeKIs8J7Z5srm3hZoKpCbOGzywt8eTqKrJcZddu01PhixKf++zn2XHtHCUL8d1956uplgY4ibmmr/KnvPWb7+SJrzzAsXXDwnXDHXfydx//c247cDv/5weMmv3v/vGHOHFY4lfNIbswO8HG5ipZGlGpWJ0iBEL6OI5PHJvnV2QFrY11pqamRs98ZqJGMojp22fsug7BRI2gOWYUfMOv/mf+5B+eYDU3e3ZyStKPWshKmekpc+p9y3U7ufbANAJJwxJKNIRi4Q2v5qTtx/37ex+kXKpz42sOgGVxK3kp89Me68unefB+I/T9vS97Hc/e/cjILsftNiVLLCMtlHbX7D7q10wTb5r9UlYB/WiDI0fOjAg1WpvLFI5iY2ONwDPX1B/EaFHg9+39epAlOY1ylTXr2PSimJk9u9B4HD9jIDhaZ4Rhmd37DcwznWgzdeQoX737C6ycM/fyI2//ET72mT/h0IN3I5bj5M4AACAASURBVG1wfPJ5WD19hsz2DZ89dZiZ/TlJ5uJklhk0b+HKgCxTRjQeA3cVDoiLAqDOluDpJ0/Cd5qf19a2eO2drzH31TNnT5+IuYXGCFpXFAVaa+v8mr8zTKyKtBjDyig0rivJs6G+khFSJ+5TshnIrc55UAX95pjspFFfoN2O0GXLShtYUgGpaPfM3PT7iemH1B6FhS8JG1A4lk1P5QWFKMhVRtcmO77euOGVr+Dxxx9HXtQTuLo+IEvH97J3+yRPP36c040voXrmrLOdx+zZa/pOemnC1z7zMU7f8DpcC8tqDbbop23cfAoPcz+t9dPUJ6bxLINh4VbJVMTC/AID26uZKYUjJKkNrsIwpNfvGH3IYc+HdJmYrpHnyUUsxgW6kGQjAq2IXLWQKkCOmGQLHKXYs38/8zPGpu/cs5O/+uuPQDGGeQlpbI+z+FoA5nfWOP/lj/Dc/V8d92ops0Zqtdros9NuQRwptLX79fk5QNPpb1CumMTp/oPXUCrV2bvPQD+PPH8UpVPiuDe6Z8cZa6cNWygcx0ELhzyzWl+OpNNtUWjFzr3mPD6xtE59ag+6OIInx3O6Y/42Nm2iOHc9FBlpkiKGJFBuSOJ69L2IZ88a+334VIuFHbuo1WywGlzBVbfdTmXhAD/5C6bf/+iRUyydPsO7/9O3cb0lonHSFq1mZ3T2bmw2qdUrdAddsJDKTr9DgSJW4z5pgNnqNIefMeu1vTEgp2CqOiaKyTMPKVMDC8yG9ydxpYsaMuVKy7B8foXMEspkaR8ZKgKt6ayYZN6OyZexY7rB4/c+NPr8c0eOYNqthmQZgkyDss+8UZtAqIwsjekPxkLimW3LuOWlJlkjZQUhNKH1WUsTJU4cXeP0qeeRlpjiwM5tvPzVL2FmMeSzf298iaPPNZnYsY2Lw6uf/YW301zfpN82z+rO7/gBvsInmHRBXWXs9/7JCr2sT2bDmqju4wjNIO2SW3K1idTM4+K8Q9nqDSbxOl3VQ/XMfqyFEikU+qK14yhBFHWM/qV9LWTcb/r1xjdMcCXrJkM7OTmN60gKOY4KhRAMspwgU1hdWpRSJFqhM9vki0DjEjttcjnuN5DBJq7rM+iZ3dkdnCUs1ShsZadUbhhjnAlyu8izXBA6IWFYJoqM0S9X6sRxPMq4ub6HkEYMd/ia40gcQKXjpuGh0KJr8ZxT89sISnOEu/s0t4x53twaUKBZ3NYgtzTAUbzJjh0TFMo4/pks8AqfrD9+LsuLe5lNZ8g6cxzq2KDv2Ba+P8vqGRO9Hzv2EGePPkXU7hIumF407Xk4RTaigtWxwhcB1bk62A2kH/XIKJifXSAW1pE6HeEf7tGwh4LwGkSORKkJfJvpyUuKI/e1cMUM173CGLuSdNkqSszdYa79O8rQjmLm/JCZXcbBuf71MQkRKtGUtMlYXvWTd7C1vEGwZhbxVdfXOHBjF6k0c5bJqHZ9h+3tJjeVCmTfLue0y3eEir1TBnu7dKbLIFmm4QcsLlhSj7Uu77z9IJ4W1BrmtcHqKqz1eelB8/3rvR4PHj7N5MKrmZgw13kybvFMM8bbnvLIkqnQrC21EIC2CvHbF0q0v/JJ7rjyam4pm4C22VmlHe9hcuaVnD5j1sb5rWPUaiVyG1y5aQmtNZ4XjBjogsAjz3OiKEFah0SGAaWSRNjNIJ0yg0FMozFLfzCmIX/Vza/hh/7j9/P4oyZwOruxTml7DddmtToFVCpTRL0tpm1TtvAzZNBgfkLRsY7M9NwMqpjisdOnAeh1jHP4momccLfJFu6+ei9OGJKIOpurZg0tLPog5Ei0VBQ5ObAwPYs3YZzxXEoO5gW/89u/b9bYQPGBX3sfjekJcsuycebo43z/9/0XXnXna7jx5aYqXa2VaS+dY3vDBOKv/aY3MHCgrCXDlOn7eA9h1SdqxczXzD7SV+8m02UKafaZylP2XH0VJccls0kSF0GmC1SWIKyDoJShvs9zy9RVFCRpSrfXNk37wM/+9I/Sbbc4t3GBraZ1kgYx0WBA1DF7qJUMyOKMLO4T9SzbIyCRKJWOKPEv9DpcOLc8qqI4cczTn/sc0gnx3/AGAI4++yTveu9PIoQk6Zn3DZpdyo0rKJyxnRhM1jnf2aK7Yubziut30H2+zXH9LKcic/D90I/9Rz76oQ9x4oQJWk4ePU+SdgnLVRj6Hp6LygvifotyxcpPoHFlgCrGB02RB8zMznHbK/cAUKmUEUXMf/vvP8cVXAvAe/+Pj7Nw4GXU5ZAeuszi7E5mt5eYnrKC5NMCcHEVo5Oq3++gNShrl/Mk5sjJDiut7qiXMO77lOImzRNHuelK4+zcfsuN/PKvvB9ZtxTgqz3kjCVHsoLyn3v4Gf7vX/8Lii3jePzzhz7K9MwMQc2l1TfroN/KKIV12qubo2qa42oct0TZZl7jfIskiqgvTqMumETU6kZMjKIxUx+xoTWbCQuTNRZnTLCsDrromw/S+Mr9PPWsYW38i79e4Tu++x3ce++XiTxzXmzE5wj8WSLrQK12VuisxlQmXIRl2KyEdYSTmr3gGttVkiEFGmeYsgU+8ZlPct3uO0Y/f/yvH+LYKz34ES4ZGoUz8u/MmWaqVbbp/SJh+NHfaEGe5/g2IZFpiRYar3WEpm3gzLMeu/bs5hhNXv16I6vyUHIP5Yog6VgpBjRCShxXkNvgzQt8/MChSH0KW5HR1mHTFwV8Qgi6q+PAambXfvIioXVu3E/1yld/K3v3X83H/vL/Gr1Wb1RoXRiv6X1XXsvTPMJm+yQ/899/A4AvfvbTPPXwIU5nz1zyrJaOHKIyZMJyNI6v8XSXVnPcw5E4AbG18TWvQX/QZq5eYdWKovc2ImQQjvyIfhIjHI0XeqS2x7NQCifJULkYVUOgoNAJvu/aOZB4so52shEBkoozPM/j/OoKBw6aCka728H3fZqdMfKh2TLP6NnHP2nvK2D56MMUg3Wcig3iY40MHTa6xt/YtmMvvnYpZdGoKhb1c8hB6QzfMldq6ZHmkpkZU/k4Kk6QRDFSjoW3pZSGJMUxDv5wONLDFcZfDGsea6sX6KbFiGBgfscUG+ebzE77vOQ6E7wdAs6d3WBjzbjs2gtIVUq1BO2uWTeNusupM2f4m7+5B2V7rHbtrTO/czfNjgmMs2bKiY2jHHvyKOtN42fWJ30KlZJGKzRXLEIqUnhBxqoluFjcvotut0u3F+PZHshuN8V36zjZRdpBwKc+80leOLYu+v/R506+6PcvHja4Wr6UibCPOQOe4isAvIevwE9f+pc/+FPvQEqBNae4eYmg5tFtGfvzx7/3V6B9Ot0+Z5fHeytJTUD68KPH/w3XZ8ZhVjmM8aP41fHrT/HPl7zvv/3X91/y8wf5SwC+6aU7+PeMqNel2TZ7LRlEFCIlGiIBVMLK+grCGRcyUC7lwCUhJ7fCzeMw6+uPyz1Xl8flcXlcHpfH5XF5XB6Xx+VxeVwe/wvGN0zlKrCUzv1BC5VrCuSIWcbRKZmT0ss0jk2faa1xtRzhybUjUGpAL+jRCGbsp66QpxopNI5jsgOVQFMOY3xhsnnCj0iUouyViGxJtVQq4zshuXIIxbhELaVE2QyRg9EZ0JpRj1WWJUjPxZPuKOPkSpMFH16nlB4qF3gTszQ8iymvDiiEZnOjBbaHxPVg+ewGltWauhdSmZ5m/ew4K/Dlv3uCXGboLCa0FK5KxNRKPluZyVhMlaq4eHRFgm7G9hnMEoQ+XQt/K2cuvXJBtBoT2OxWnrfQTkBzdQU8k6WK+xu4JQ8vMtc4UAOEFzJVzWnHJk7v9CN2T88SOZIt22+g3JygGEMXRKGJ8pgpr0EnMM9zfnKOQZQQbXVxrFZKpRyglMKiX/AcgQghT30alr2rEvj0ijJ+1aFs14Y3MU27eYHHpMm0TNQnEUxybKNJduG0mWPpURWTROWcwarJZm2f2EVlZ8KDHfucagvsuG4n9YkalQlzf+e8gnK7YGahgRyYydn1+pjYnaBuq36DKcnE8iZX77+O6stNpnxmZobF+e187SsPc/6syZC+7VteRbuzSVgy2bxceOgiI0kiHFuV8p0AB8nc/AxbVhNove8yPRuS2v64XMUEYdmIYFqV3X/kQV73ulcDuWG1AtqbPTYGGSqyIpgy4MkzK9SKEguTtvrqpZxZclhcFIC5v2hjwD/+w9coTpvn9OPv/WZ+B7j+dXcRpWa9rLdTkq2IUPQoLJ04+TyH4z41bfa25waoQlFNHHqW3j9JoaodvvdnfhiAD/3V3/LgM+fwqgW79lvBwPJOghC+/0d+mEVbKQtFQU3KkZBrAUxamblhbxqAJ30SlbL9pabipTeWkSsJLasR5rvSMJPGySirKnNwHI1kLBoupcTBGfeKSGMP4jgaVYB9t8B3HLQn8TwLxYhiHASOrXhtDPrIfICXa1oWxhZlKSpJKRxB3/aLiDhDKk3T/rxt1yQve9nLSLMtNiyt9K1770KfbxMph8lwCP1ocvreFfIhY+Ib4RM//34GrTU8C0c51G9x1fQCCzMBLZutO3roOD/x4++hFpq9PTU5z7kLp3nwgUd485u/HYDHDj3EqaMnGXQznn/W9KdsZm3a7R7L50+Mnnk/6iF8h9nthrL6meef4i1vuotaMjN6z1S9xqB7itOPGTjK2vJ5egPTX1i3mmTXXHkFr/2W2zmwOMmuBZPhbroh/UGfIxfM/JUnJrhmp6S/mrG8Znpa8qRDuXIF937pcd7zA+8y93zvQ6ytn+eKBVPJ6tQ94opZu4NNW9nxuvzij/0BB/dZ+uutjM2142x1B5w9a/Zap5+TSMOIla9bGGmtRL+/xW4rLjmzuIP11cMsbcZgq/CealJSmposjdAXc3MzNDdy+l2Tlw58F1GX7Lnr1aN19uhzR/nkZ+9m355d9Czl8GtuuJ7nDz+Ba/vc1EZCu9mCuUlyKxQ7KPkkaZ9KJSSKbBU1BSkDen3bh/IzcPPuA1zojWmgl9ce4NE/+OcXVa583ycdimVfBJ8b7g9dGLihYLz3cl2ghR5ROKtcUQkDQgHKMknmXonvedc7eeQHbubauwzL35ml42ycP89sw9jOxFFIxwec0X53HJdCFaDdkU6gGhLfiTF0f1hJW7AMgo4Pmyvj6j7A7//GL/G9P/5j1CYM8qBLi9OnT9PtjatbaWFs2Pf9wh+hrBBtqVIFNOVZc52q3yYZ5DieS1+beXAdQT7wiOIm4ZT5/HirhVSaYghRlRnX3PoSWq2EGUtHvW/3Pu6//wGCkrHdEk2eZmRphmNz4tL1QEMYOCPZDN/3KbSgUrFMlq5LEHp0k4Rts6bas768xpHjJ9h18Eq07fs8f+oU0zOzbK5Z2nU2Cb2QDrD5tGnVOD+Q5E7OxNQucteshWolIAgCtm/bA0CtvoAWBUkRE1lsolh6nIo3ycpaRiYsjHCyTiWoWCFccEjQKeicEbW+VgVKaHDFSIDZ931y3RutsqXTA3y/RNxr0rf2tN2apNXa4srrruKO1xlI4wf5Qx57/llUbsWPCckSh2ZrjViZuXV1Qqaa4LUoV60O03rE8plVAlup70drqDzFcaExZV4rT+Tksc+ZUyvkXeM7tNoS6aX0BxYJVISoPCHwS/T7xpZMTDTodgd4qswPv+v7ADhxYRnhuzzzqKnmDDoJsYrxKx5795kqTX/QRquMJIlHWmJ5rigKEMKcRanK2Vq5tCL2bx2//1t/8i+/wRRueZLP8OTX+fVNL7kNAAeF44QId6gDJ3CEYmJmesRSGfVbZp8WwagnsFxzUcLBt7p+//CPX+C73vbN5HKWG29+BQDrmy4v+aY7mJwoc0Vg7lOG23n0SI/ZspmDtCGQsoYcZMwumOciVZNDD5+hPjnDrqvNa/1MkfYzepH9/kbLVHqLcWWqUA7KSRCOQhTWgHtjyZevN75hgqukZ3owhHRwZIGf5aOHXUgfXwe4QUExbDp3CjxVENtGR4lHRQb0dEoSt0af6zhQJAptncDO5gWSjh7puURkKKHwpgSeLeUneUZeaIrCRbpjnLn0XBzLD18UOUIIpHQY4mbiPEcWGt/3yVIzwUppKBgdAEmU0I+3CMuNEQWuG/iUQgWFw6kTBkYyUauyvtnhztvNYtoxOU8r6bGlxxjXvdtCJsNryUvNEda90ZgkXh0wiYFLlSqwfGqZuQmHHTcbg7ttooTQIa1V8zed3EMNBmSpJk7NgqnWp4jjLVAlhNWrcP0cXRJI29w9ISYYxJsUeoEgMO9ZFCGRKPC1pmHFxxynRt0VBLb5ca29REVO4biCOft8m5vLOLpMfbpG1fZ4tNsRWZxQrRmnMEoVZd2gGvToJMZBKMIqRQSbWz06gZl3dcoHrZBWV2tFnUTkbRDTWDQDa5nDoNticWEHExb9cfj0WQgcGtIqy+uc5moH7WuK0KyfchogsoQBmtRCL/zUNH2WLPSzUH26GWTFx0cU/rPTVbbNHWRjucntd1kHPYypMIe2mkE6V/h+Hd+fILXrWosCRcHJlVVczzwXz2vR3NpAaHPIC2qkvYxSySNLx1CWTJQYKId9+42Te6dOiQuo2oO/7Ho8dmaTq/dVSId6TqnD/p0JnjsNwhyg6+e2+LZbp7n9l98MwFLbzOvXjsZIC2N18xqOF+NVPFYOGfKItLvOseYWYsvqOd2wB0FIP+1hNf3QDog0oFo3z/KlN7+SB+/+Eo35G+hhHKKFuQn+6G8/T7O5iWvPC4WkcHJCSw9b8j1UoihLaUWdARc+9bl7UF6Z+/7m0wAsr2xx1Zuvo2T3/1p7i1QqfO2QWjhhBZdEK3zpoK2JzPMC3/VG81lYIVWhCyKrb+TWp3CyDFyBN5RucCRCCHILARJhCNKhJBycsk2uSKhIl0wX1GyAFxQCx4Epm+zQgy71oIHAZW9onasUEtXD0Q7COi1ZJtk7HdLMx9Dopw59lReOEy96Bf7s67zG98Dv84fm/3d9vTe8eGytHWcL+DMOjV57gM/yC7xn9PPv/dL3/Kuf8xUYfvOlowrM/xsu5K3w1/wP8//XYgVhz7/obbsc24cR+myceJh7rF5OUA1prZ+g0VCcuWBsS7fngScIc8HRo6bHwGvU0MQsnTVByp59e5iZ2kaz/RCBMoFNZ63H7PRe3vLWt9NrmyRJkRckWcLxJUNU4fp1Kr5mvbkM20wSYXqpy5kjh2mvrbJ7r4FwLR9e5Rd/9sf59Q/8PwB80523I8UK3Z5D2zqhk9VpdCGJBimUzHp5/wf+4kX3/s//9JlLfnZ6Azxv80XvcxyH9ZXeJa9NzdRGScMszSmKAvdiLTPblD+46G+cRp9eZR95ZvqWrrn2On71vT8PwD/Z9/zwyk/xZ3/0R/RPXQq34ydvG33fysnDvHBs22/sRcnqKw76BVvLJhnp2X6sPM+pVCr0uJRGOVWSxW2mx7NL60Wf/08f/Z8APPzAZ1iYMufY8tJhJAKs5qLySkCXhl9CWTIQlQ2olhR6coZ2c0yQIRUMObkd7XHo7q/xitvfwgN3f/qS761sM3aqWq1y9uhhbrnj1fjWqfO8EmE1pN3qjgLfPM+p1+s4lngrTVMGUUy9XqVpeyBzDTiCoFYhtwGeV6ng+iG9ztHRd69tmXXQtP2i89PbmJmvkrdTYtsXFasubiAZDAyUdmP1OeJBRqEEWOHmVqeP5ywj3ADf9v9cuLBOVDtHt2+eSTmYYse2MnmeU1ioablcZmJigkajPuq58nxJya+SWOKIR594lMPPnsfJp+k3zRzv2lfj7NL9vPLmm3jiEQs5ezs899yzODZ5HfVbFEWOpoxvA0XhuSRBitxeZ3nLipR32/jTDnlmRdPDAaHwQGoKa+PTIieOfXItmZg1voNb6lKvbufkKfN358+fotXu0O5sUK+bOS2HZdbUGRav3MWuK8za2530CKTgW95ooN/NZkQ3XaHuz/OFL33KPs9VQrdE4UIx7NcSyvZBmnnxnRLbd9bZv+cApdAkKQuRk+YpjqtHvZLSLVCpJE1sH5/vorVCumU8z/oSbo7rBaNAbnF2gaDkkzoeWWECWk830D6Efkr4HSYIlEFhCguWtyCQMRV3nsIdtZ0hkbi5RjgFxahXUuA7eiTi+w98gRvf9D0c3LebL3/G7I9Wuocjx45QCq/D32f8oL/6mz/jxHOHWbFFg15/nbozQbOT8+u/8l8BqO0Muer6XfzZB/6Bh4+ZOS7PxpQzj8Ceq+WSYn1jgO9aXQRABgkqCRAKfJs4jfJxn+jXG98wwdUoA6ahyBxSfIQ7ZOEzzkzhjC/XVRKNxMJzUUoRa/BxyS7CQnqBT1Zk6NSspgAIdZnEmny/EBSFQjpVcmEJJkQFjUue5tj9Q0FuWP8sk5WU0vbF6JFuQV4UlAIX4WiiIcuP6+KGksQKVm6tnsDzAo6veATTZtE1JqYpuVcQVgpmajZb12kSeD6PPGaM3SPuY/S6MZ47xoEqXWNlcIq04zBpD5TWoMNGc4ueFVsUfofGfMFLX3YFV19jMrJR3zChhcFue92KPE9xPTnCbhcqo159GSsra4jCahKUAqKoP9KhEEISDRooXZDnZiIcx6XINEEQmoMHCHyfjfUt2rZXZy6okSZmI1eqlr0lk+RFahioNi12ertDtTpLp28zExLcwCVNaujCGKhSWJDnBY6YZNAzzyZLC4KgRGvTan0UgmQwjVKKatU06bp+wKYv8ANJz1oaEXiEQY14iHPvDlAyZtvsPMIG+qFfQuIQoUYq4Z1Ok1AWhDYA63Vi/Cxix9wc8TDIjiOOPneKSs3B0UYDpbUVM4jWqVZN0OtqgYozcl1QtQ3CBYIozXG9At9W+aYmt9HuNNHDTE9ZkiYms+xXx03YazE8dDLBt1WiK6/czZFDS3z8i0YsrzI9TW2q4KGPPsYrX3UDAC991U10o5TM8dlqWVaqvTuZu/qlfPIhY0gvtM7B22Bt6QSRZatSuoCiwEcRPnkPAOdmX05VzpFOmmdw/NAzzEyW2TlVpt4wgWJaKAZOhLCVT5m43HzDjTT7HcS6ca6WTwWcqFTxS3D0lCGmqVUWqTfKNFvGQDq6gV922UravPIKg7NnJ3zwE3fz/bdcTfmhD5t19swp7nlwF9W7TJ/JFS9/ORsbmwRZgWvtSw8XdAYiQ1rctdbK7n9LTOE4ONqwkMWxzZS113BdF51rCivAmCQJfhhQssLGSincwhk1a2NneagJNdLVEthsPaPnq5SiXK6O+lyGOjCOCMa9L65LnFygVDLf9xt//resra/Qbm6RDEzAtW//AQQeWjiUwqEOjEMURaO/S23f6FC/CCDL8xGRwZC9K6j6TC8ssDBlkjnf/dpbuf/Jo2ycPcHv/8EHALg5nuEm7TJ188188+/8DAD3/9oH0Ylgw9rFvB0jCPHDFD1jIqddb7mJVrfF8sNHcG2fiVSQ5indgan2dFOTUKr7AtW32jdb6xROn6sWb0HsMWQDv/1bP4dcXWKiauyGXGzgDhSlQcGgYbUFdUhcVuiqFUgtQW9iEVGqULb7MXJBeh4qLRMNn0G9Si4Uyp4Nnc46FXeKPTftIolNJeLUmWU2W2d46L77mdtj7K7bqBBOz3DHHvOeZivjVHOLHQt72HuLmftveuub+cT7f5dWnNLYa57L8uPPc/vL/hP7dxnH8e3vfi+62SZJ+0TW2YoHKb4jyPNsVCV656Ef5KabX8XF4x3f+Tb+7uPjHo8zZ09zYenFoffZ0xts32kqiq7rcubUClsbXSYsGZAjBQoHLtI7q0+VQeYjsc7eZkyvrZi8IuZs01znK1/5rVz/86/lT37rF0d/d2GjQ6U2R7B9XDVeXzqNE5ZZOWnOw9nFvaxfGIv8AiSWUCSygt2OGJMAtDaMEx+GIXE04IWjECFHnxnn4bdt30+zt0XUHne7OI7PYf0Fnh/6F9JBeh657XPL20288hSxm6Dz8XNIFWT9cWAFkOoU1+7vPDLX+8B9n+aGW0wytd/vcuHCKs2WsXfDGsS582tIS2zkSEmRKpIkGSV4wKBqcsuAGXou9VqFlc1lUluxLHkVJspTPPXg03Q3zGs3XbWfc+3n0O5YFF4WOQoIbTJ1x7ZpojTDq5eR0txfRVVwEKNrqleq5DpHOOOqoedMsLy8wrmz53EjW6lKOiSizMASDLzmzW/CcRN83yf0hiLJNrGNw7CDRemC1JX4ZbOP3/mul/Lnf/KnHDtyhmceN31EZ089zRUHr2TXtQfZ2hrP3+KeK0aMl3OlHWjHIBNc258qKEAIVh/7Iq71D91SmVRHJLYsWjguCrPMc+v8h9IF2SGXgtndRrQ4X2/SmGnQesIkmJwcZubmGUQZSc867VMQBJNUGtNEqUUeqCpxrkakJZXpOmU9z8ziBLUHrE9QFBQ6RxSCYigGq0aiqQDoIqMXa+76rncxaUXRzZoo0Dg4w+qgfTbjs8gxjNdWyw4gVVZEvhhXhIuiAK0v6vWDQkvAGaG2lFJ4F32XSkOagz6FHle3h0MgR99XFIXRz7uocrR17DzPb5V46w8YFtXW0hLHH3uYs6ce4Dc/aHS0LqyfJ6ePY/2iYKDpZgWdnuZLXzOizAff8Eai1XN88dN/zEpsUWmYc03bKmqlXmNqoY4bjOMNUQQIR6G1GAX//rgJ9euOb5jgauhcOI4cQfCGAZd9B8VFD9vRxugOD37HcRDaLBJpI8slNtBamQyz/aisUEiVoywTSmFZAJMspVw3RiRPBRrTRDlmB9RE0RgCJIRZfI4jiSKz+FzXtVTFesTQpAsNzvj+tDbZpVQlpFvGAEatAVLXCGfKNBZMxnK5F+E7BdpWD7LUo1bzkK5iqKW9e7/LFddOcn65hepY+uI8Ytv+EsIqkUt/nsZUle07t7G1YZxjVfhIKWl2jEGuVqtIGZps1IjtKaDVTxB+eXzPToGQih3bLQxCFyilibMeoRW9YN4p3gAAIABJREFUk45H6AfmWVkq7zxJmb9ygroVI82ylDSNSXND3gBQaIEjC7zAIx5YWF6lghDQts2kfugZB7NUH62FQbdrmmAvYlGUQpAkySjo1VqTxsnIaABkSUKjto9uu0PZ0pn2ej36W6akDqDyMkVRoDQ4YiiEGdJut8l6GuEMmeMmQbtkluY1iWdJBw61eokkHZOkCLmTpbMnufdBA4WaWSzxlre9cUR5utnvIaRDvV7nzJrJ7JbLJXSg8eo+fSuK3B70cYIMzzWOTeRpRJghNaT5ONvS75/gzDOQZ6aC97gOON9cRjVMMFcgSTpVtJvyFx/+BACrnRpnVzqUfY84NSutVK2QJx36m+aalDXJa8eeoWzFnLMiwdEO3VShM7POFlRKd9CjF5n5W9wxy53fcjudCwOesxCnWjDJxERGtWwOjpyYufkqHhrPCjDHHePkb/ZX6OfmXo4eXeGx58+hR4G/DyjSnuTuZevIfC/cdHAP93zkD5GOabzd/4oSIl7i2b82WcD7n3qKW9/xdrqdHoVdi4mIKQqFK7IRxNAwhTqjDKrWGm0b+IdrKu+kyEASlgLcwLx2/Mwx5hcXEFhZiTghlZeaXWNHnFEDPkAhADUWLc+yzAQ2UW9k886fXKe5sck1N15/UaVTkCvFoGcMXrvpEicRoCnXzDpf31hB4+B5Pi1nXKXI8xy3a76vFPomsHIDinQMxRZC4jiCkpWICPKY3tJpDj1rAnZeC//4+XuYv/oA//uffgSAwd9+jHO//leod7xx9F3r8z6N0x2q8yYo62XLqGSTOCmz8YQR+o22e5w59BylExsEDRvkpjn9QhPY7uOSKuikh2mXysSZZZJ0NFOyzJFSi5Of/XMAJpIzTPvQs1Cl7vqAStkndXO8yASUQaBw4oR+3wZ82qHquCi1gbS2bNKXpEWO9Dy2TZhrV8ublOsVcltldL0AZ2uAkiUKe3689MAt+JUSuSMQF2wFaD0mSi7Qqpt5KbsuB11B1Iw4dMYkFiq+Ytap8NzJI3SWDEzt+muu5Zd/5ue40RL2PPypr+CWfWpTE8zMmCx1bX6a5to601PTWOovbrjpZbxw9F7gArz1Hf+BQuT8Ib91yesHrtpBYSHqQwd1uCZgCJt3GKXEMWdknmU4llCjNlOiuxEhhGB60WabP/qnHLz2lku+a2pqhiB0RmLH/TUTfPzZ+3+HwAq1d5wOLxxR27wvtkykSo1t4RBJQgG5Vep963f/KACf+ps/ZvfePZd81mZnA/GCfvWiSClV6qP9V6pWWD23hLKEzbNzs6aqyrgyYPwEyQvlRrUojBMDFPbM3XngGs72zRzP1qq4OiO0sL2ZmRnOnT3HVLlAWJ9Aa01fpVSnQiYnDNy1Wq9dwihYb1SpVCp0+hmrSwbWfeiRR5Gqz0R9igNXGH9jq7tFM+qwb5cJ/I/zHMrCj4dwu4d54EXP/P/vGGDm5quXcMHBUe7993+oJWSIWBn9u8Iz3McnLnnbieNLI38jy7bwrc+CJeiSjsYLXLRTQdlzpdfrkEU9hsHdOKk+DkgKLXBdn14/YsvirAf9lA3dYmPD2FeVZfQHfcKST9kyQrc7TSYnp3Ecd+RDqgKkFKMkW7fXJ45TClngaJvA1sLuuwJlq4OuFAgEyi4835VoBL1Oj9SK3cYDs/eEM0x7W+ZPuESkefT6aBG/OIiQ4uLkoD0b3NTumTGVw5Dd0XyO8al96Y3sxuhvLzr7tB4WVca2SZY9Dj/7BKtbxnbuvuGl3PBtb+FNCwtklrk2ThKypEfcNkgEHaVsrl2g3UvodmyC59RhHl55kru+8+VMTptkVd7NiKI+UWKeZaQynnzuNHF8URLGERRZARSXsKb+S+MyocXlcXlcHpfH5XF5XB6Xx+VxeVwel8f/gvENU7mq1002fRjQaq1HDWVKWVE6V44iXqUKtC5G0CgAbWnR08647OhK8zdKjT8LyShrRAGe59n+KROR9pMEzzWReZJYoV9lGnmH0aqpggiEcEZVmzwrMPGqGvVYxXFMIB3KZZOtENpBOxInSUgTWyXyFKeOP8seuY+5PdcDsLbVRjSXcfxhliowmWQ9FntzVInpyRqlYI5qaairJYmz/piyXivSNKPd2cK1GZpaLTCZePv9RdZF4qHyxIgsgoHnpSlpmo2qfHmRoXWBshSW7V6XLMsIVIgj1OjvdBERhj6tgclSNRoNVJ6y0jIles9z0CjiOCZNTJbBCypWnBL6halY5PkFwjC8aM4zXNfF82LyISGB7VFJ03GTcrkcEieDURZGSkHhlkwWxWZxg1KFzSQmqFSYmLe9L50u4awyzSxAoVNypfG9Gto2MYYlnzSrUvICoiix9yMJgmA0n1JK8jRFF2JUvet0OoRhSJouUtg+JV1I2oMuKjP3UCnXSNOUtJ8wXTVZcUeaNStSTWihF+5ElSL3wK4FpSKE0BQ6Q2TjLFOoJiiVXZQt689ISRr16GhboUlz8qRDPZhAatP3kSVnkE6XohB4NoM02MiJu20ym8r1R9XcFhsbJttjKoWCmfkZPFuFOji1iy8cP499BHgln6QZ8Ys//XM89YyBHtVmq3i+g8Bk3KWr0EnM7MQ8dVvVaCxcxcxkFb+SsrhgsvU7dm1Hhg6nT5pMbyXURImmvM1j+dS418fvHOfkqQeZmDPXHnYiqtscpubNRbWOnuDYvQ+y/eUvIVq34BvXI/WBNENYjTetNYUaZ/aUUkjhoG3VCSBOBvhumebmBso2ldXrVQa9Lu2myf7OTM+h3NzYIzW2XYXikmyeWUceyRC64ECepoSlMqvnTSayXA6ZWGywee4IfslUAlzfw5UehaW/T6JilKUUFp7V73VwXY9MjiEcw6p7ZCEPaalmRNFzjS3kEgQGfhj6Ab6dVFWq4niK0uT4KEnyTZae2OTk0wa2dvvrXkPnkSc58Uu/CT9u3vPsRz5P2m6yZStutU5CeftuSjMu84umpyW67xjTRcbktip9a9OTqYAwjanVDEStvdYmvGKeW97307RXTHXywtFnePa3/4ld0S7WHrzPPMtBxoV2xC5l5mFyepbz1T24IeS2R7ezBkEpQFoB5mBmAs+RRn/O0qevnT7NwoH9xFlBNNQuizJWV9exyW7ibp+sn+AkKcpSIud5TJxHBGEJbeEkleoE9cYUmdWTCmJFMSXIKlVunDKViGP3fpHFzgbfs+8GZm8zZ4PvBqSbA4qOsSNPffkRvFziVitkU2YdzDVCenkGoaY6bCq9Cf7kj3+TH/nR/zKaq89+/KOXrLlPf+ILDArFCwpXZi1YGoEL58dQq/amydTXJktIKWltRhf9bvj/8WsAS8efB2nO+29765285OUv48t8ZPT7ztYmW+fP8cLxjne/my+U/xGAydkazlUVjh8eV0B2757meZr0u60X/a1wrMRJElFtlOlhCEWG43d+/scvfT8ZKnsx1XKuC6629OW1Wo1VlrjtVXcCoA+M3z8ibxKC++7+Eq95/Tdz95fG9NLzszOsrAzRCVUSwHWgUTW2c7PVI0oF0jU/77/2pZzjHPW5faNKgnYEDXKKYqhIBO3YIcs1ma2GrfQ75PkWU5UGbtnYWCco0Y8GdKOYf/q06WERvsfr7nodfmjWyv7vvgnfk3z6wx/m27/3nQAM+rY3WI9puoUGodXIdhXCwBU9zxs/A0eTpil5ZuD6YBBKWhcjO6IFaOfSCsYlFX3rTwkhyJU3omZ3dEGeZZTCCsKWOgUO7V6XsOQx0TD3vHL+wrhSBXhemWRYgbJyG2SKLM0JpEPJollEWRg9RWuqsyJHKQfXdZHS+HR+ycMRLoNBypkls26zFNoiZWrKrLGzp06gCkWaJpQsskqrgkGcsrm5SWIhacqSn40lDRzyPEeR07PaiVprS66WjZBV0hHESYayZ4rr+Ajt0u12qQpznSo1SItCCBxnSE7jIJzx3s6tn+y6Ll4wJMu4FFUxctJ5QfUJgWaMDhpq3+mLYICO45Cl47NOOOZe9EVrSGsDO7x4+51YOUtKQrkwc/zo3ad59sEKE7PbUPZeVOEYeQJ7D0IVHLx6EaZcdMVcQ3nQZs/CDVQbr6JrAS4D4VKmP66cqTZPPPtiUo8hCm44M/9y3eobKLga9jINg6qLIVxDnH/guqMmUNxx+XL4r9baONDF2MH03IA8K0bwoVK5SuCWKLQV8PQc8lyR58UINwwZaZqi9Rjy43m+EUm0MC8TkDkMBoMRZAxMMDfUxIGh06JGpeRce0T9mJpXQlQtUUPUIxAeK8dPUJ0xuki79u3l9ENLCGF1IUSGEC5BMBb589wyvpxjs7+BnxujlWQpnjeBHoqdonApCDx3BJPobKUIIfADcxCncYKjHIrcwxfG2AVUUO0uE8Ek2H4qjYMflkb3UnZnmJibpJ/2yeymFkIQeD79fp/JKRO0ZEmEEAGO3cC+rAACJ4gJXIsNL3lkysxByQrT+V6I1mJk6OJ4QJZl+NLHGSqRS7M5nbI7mpsoTdjW2D7aLFmWIIQ02kT22n0R4nk+3W6Xk1uWoU24eOUcrcwzztMKOlcUnkNuIXJRkRAGVTZ1j8KyDDnSJcuSkVheoQYkWQvf90lsqblcqtFaz9HaHZXbPSkoigRt12J33QSng3iAb5s7s8zg6SdqE7QtHCcrNFIKotT2ihQplXA7UZTR61uY1w9AtxMTViI+8jfmUJ+Y3E6a5mxuftF8Th4R1l0Gm33mFwzU8+mnn6SfCgLh4tqDrz5ZJXFaaNsMPOwja3U32dq0zEe1KfYd2E8/7/PskwbisfHcEa78iZ9i9ZQJpKLZjHvu+RqIDV7/xj3m2TlV5hbnqITGiWiuLbG+ukZGH6wmx9qRp1lPQmTN45C0z9gt87o3fCvVqjm8NtfOU/ID9GZGPx6zJJ29+0vsiaGzYg+0Sk55zQdt7mEh06wfeorwlquIXQuTJSNPBUIrcuXZtSHsAWKXnYUEOo6DUJal0nNwpKBcrZEmY6ip60ka1uktPFBxysWmWavCJnfgYp0gV2hyG9wFnk8nihGFoF4x89DtbrI5GDA12SAcQimyHJ1HI0YxMM6GUgphyWMqjsB1HNIhVAoQyhyEhd1Dihzf93Fdn9SKHWdRh0IZVqUhbj4WkqrykeHYBlalz8CJSSwL3iP3HIJ9U4hPHxu9p/zUgzwf93jYfvatt15Pp3eUk4eaXIhMX02jVCZJJVpD2TZFb5uZZd6doCXNXO0OGuyvX8vHf/l3wUIxw6THqdYpbl+8nY6FQiZLF9g+uYdVuzZOqSadeAKpMso22aAnazR7XcSWtfGDAaFXoj3ojeB93eYWJ86fJS9gomb6PkLXIwxDMmvfvLqPU5b47jTaPs8ii6mjidJo5CimUrISdUxDBjBZC+npnN6Zs/RPmP5UtyKIwwnEQHH2s2ZfCddldnaec9aZDKemqVVqFHmKZ+e0v5GRyYygMQnZwui5SznNxeMDv/I+3Gmf//xTJuA6c+IRqrUyLxxaa04eMwy023fOkKYp66svhuZNztZorncveW1qrjr6/9aaFe+ummf+5KEnRgxnw/GpvzTEG6FNGOzYPcfxw6fxpMub3mRIdVInJh/0OH4RvOz55w15xdu/7wcB+OiHPjj63Vve8TYA/v7TnyS1dvN//t77XnT9wxF3v36zejbocYiHLnntwXu/+v/5OcNxcWAFcPbUeC+0Mddz6shzL/4+++9X+XvzXV/+3L/6Xf+eoYEv8Xdf/5eBsc216uQosJMW6iUdcC+CiBXCBEpCCMNWBDhCEBYF0vFGOoHSJr0z2xvmSol8AVxsdG163BIihMAT6ag/ptAOvlsjTQSOGCe+SrJGqRRQZEP/rWR8OPt3nuuBdtCOGPkl6ALHL7Ftag+Tu8yekbrE2uoF1ldMQmvz9HGKTCEKTWrbAHr9FgiPQE5RtSLwAycj6fXZ3DR/NzM/w+mlMziOpGv7TIPAJMwRAsf6sw5y5O8O7106HirLiC0U2kDRDVTetcQlRW6Yq0Pbe+9KSZoW9Ho9/HBMduK7LoUoUKMWGzV6rhc/7zTJGJ5RQ9je8GwaBlomMHvRdI2G0Fzixw8FoREuxRD3mBv4odaagrFmnkZQ9sZ2YU712Ug91q02bNn1yboh3fwCOrSEcllMHPdQ6XCOqyzuWGRtq4dr13AoJylJD9VJue+fvwrAqbhJIMQoB+QU5yiKHPn/svem8ZZddZ33d621pzPdc4e6NaRSlUpIQgKEBCJq0EZAkQcfEBukfWycQUFsp6dFaR+7G7XVdkCwfQQZnUEEQSZxaAxjEplECGROKjXdqrp17z33THtcaz0v1tp7n5sE2k8/vuBFrbzIPafO2eecvdde6z/8hgUTYfdYIaxt+M1KfmXg31dNcrVIALWw52LW+OGyKvdUNR466u6TFLXBZW1kqBeg4Iqy1Au/XCBFgK50c+MpFVIZx7MRqq0yWGtb6XfrJkEUJg25WwjRdF9qZZUoCCmqjAMHHKnwm572HE6dOs3tn7+DE6ecGVx/2MNMNaYq2LjfKa09/sYb2Oz1KHxAW9qJT47aoKzUU+Igpt/roH31JU4SJ+DgBTyMDog7CaWusF4SW9uKOIqbRDWMAsCijSXwJanJLCfqrjLNc7TvZkkVUkxGDHo+UDRw/PgJKlEQRS6ImUwmJEmHyXhGL1kg8QdBo8a4qwu0kVirKUq3YBTljDBy0vqy5o8YhbCiJetjm2PVz2FcYmyMQflgQ4qAWW/OBa92NJ/POXz0cjYvnG/PnzIEsSTLpqioJVJG3W5L1g8jlgddzp3dJfGa+MVEkgSaUm42Xc3l4SE2z43pdOsuWs7SsE+lt5G5q9CmQcZguUdRTZzWN2AihdGyUX+K44g0zRkO9ze/r9OLGCy5e2Fp1S02XZUQxiVB7JOfpSFB2COb22bTexM381MveT733Xec37rDEX1vvusuZLSPrp/7wijiuIdQE85/0c3X2z74t8ggpMh36a24qt9PvPwVbIzGjVLn7jkXNM+3R1x22OGWpbZsHL+T1UsOs3bj0wFYW+kxOrfRBOeDbkw/1ox2ZiytuOrvvquvZu3YZeyO3bXakpLeldcTqJKwlhPvj+hfkXDJoWuZZe4evee+E5w8/iBx4BP2ICSvCqL4APG8vUceOL5JaNdQWz5M2b7AaRvQ6fprPOxR2gw9Ose6N1fVZYkJQqQKCHwX1VqBkAtV1cAZpEpks07FtsBkKVHcpdbe6a70KI2m8ny5LJ0Qh12UChC+ulxpg9alr/r6ayMseoFzNU1nYAxF4XgBAPu6K8AKQtsFrHzVJGvuOF5URgmKhoOiqKoCYW2j5KQrTYATUAGIhGG6c4Fut9tW6nRBrCzGaKzfCJe0RRMzKRaEVM6cJCUm9pXNaXYvhw5cSn7powBnjntWT4mQXLd6FQBP+IZv4fgD9/BvfvB6zmx49b6lkG4Jp794Hxu+aLB513GuPrxOVQvFnDrHXe95L0pU7NiaK6kJbriG6DueyeePu87V0Qe+wFae8ylq0ZKQ4MHjxMWEqXKdKh27LresPE8yz6iCkFwLlldcoaiXdCjHKT0ZUHmT25kQjMoLBD5gsNISRIpdUzXGxoEVBNIhJCqvDjrXBbM0bc5vFkSgEqhK8sidz1kQIjsriFHGzHMLyAommxv0vEy3zEq0gM6+1YYfa6IVVi49gIp2qUSbgMjdvfvmf/zF/8xDx3TycMGHe+48xWVXuI7ivMgWCpFuTHZSlvf1H5ZYQZtQLY7Zrgs6Z2xyitsf9u9AIzxxL8cB+LM3PaKe5cPGYlJVj7e/6Y/+Re/9cuPZz/tO3v+ud/Ktz/2O5p786798JwD/5/Od+bFSiihMEELwjj99OwDP//cv8Ea4kj//o7c2x/uBH/7hBvkAYKzr7tTdnjiMeduftOqO3//SH6XUFTIQWJ/oSwmqClBh0FClhRCEYZuk1ImOEoHjeeE4xcIIZxlTd0yERovWiBljUEI6rqefi6YyCGGRMqCqhXe0/wzlA2hbS+LbBhxUmcJ1eoSi6bFpDdbSjUL/+yuweVMkr39LvX7Vqs1KKUwpm6Si8t2EIBRN8TiMnT1JpDoUXlgsDBVJEjV7O1R0QpfI5D4uUsrFHw8+eBdfvPsL7lyNC0w6I6qL3AGORyjb+DSOJEZbjM6pZXDDAKb5vClEz23FkcOXsr2z2zR+lAUVxXS7XeY+lpBKNYU8d5osViiSRDbBvhQGo0uklE2SpLUhinuNiqIQEhk4RE3S9UiVzGKswVKymBcYAaKZU0HTLWyVsYs9SRK4rqmBplBsrSV4CDerlgkRsk2yrbVYCmSduCjpzK2tRdr6OwgqYwlkW+AYdBUf+7uPMPKc6+5qSNxdZnT+FMZbUiRxDxWFZL5Rc/2NX8PhS5aY5lPSzH239fWSjTM7xHGPyht298ScfhSifeE2KxN6/YLZZO+65QoGrdDHXk2Ih4+vmuSqzpXqibV4MQ2AsUi16P5uHzHBMsaQZ+2iVQtj1JUPa4Qj1vkTU+QVSsWA3ZMkSSmc07upCbuCQadPqVuCu/PRCJpj5VlGFMdusix4bWitG5GE/fv3c+/xE1x/09dy6YOuiviZT38SZI9SFZjcbU4qm2KF6w4AdDqRv5kXoUSKc+c22RnNkaHx33OGihRjD43o97vsnJtiKal80DDorLIzutAkB8YYyrIkDNrfYkpDEveZ51mjINbvLRGHCfNtL0u8O3bVJ5PS73vVqKKimhtiA33/vnkxZ6W7xMCr4s3nc6rKMFzeV6u1EoQCIUuyfEKl6/as5cCBQ404SFnmzcZRb3DduM9kMnFVHP9cTbi+5ipf1TEVVhdcc/RYq2AUhuBl85VfqLXW5FWPyrrgrtS7CKHR+0L2rTribycasL6+Thx0iDzZOIl7BEHUJESBipr2vOpEzTmuq3X1vA3DCKniJvGXws2Vxc1RisB5o+l2YaNKkapDmfs5bHK0zgmjNjgH+Pit97CyfpQfe/nLAXj3X7yB6YVNpiN3rN35LpvbZymyDspverom7krNs5/7fH8eBFFnwPKau8Y33HAdf8ar+JZv+5YG5jEcLNPpdfnMP/0Tcp+D84nLjiA2Sjq+Q3vbrR/jzL2nGA5WsctTPxe7HF7bz6YX8NidJcxLy2VHk4asW62uES2tIIIllHaSvzddewnXP/axfOLTjmT94MYuhZLsG2Rs3NdWhdOTJ9jqBQyMm/urZYAREu2TtBkFoyVBGVpCUVfAwKqAMDdUYX2/uWSnvsauCtcWdAACq8nnczqRxdQFgiBEEBL7YLTXWWE2niBt4NYJPNzAVghsU0gSCBAC68VOBoMeo9EIKaMGpltWLrCwxt0r9fcS1jRVXOW7VlpKZFMY8r/ByiYg0caCVM06lZSWXtTFlhrt1xwlDJKAOI6a+6hCIHXGkmyrjCIZsUwXlbvPG0vNNBjtEfKI1w4gzpyl45YEhrpiaabIk8t47BNd1VgEBTpZ5sobK056r50P3HEPRw4cY1W44Hz7wRMUVjHuCtZ8B+TM7iZ65zTb997Jdzza6ce/Nn0/5+Jxc68dySQr+w+QTQMq/5tjGVHZnJB6LbVUOiOwgnLs5dNNj35cEgvhFDKBUlg6/QjjhRIqYyiE5pIgovBJNSJEF8YpDfp1dyVSdMOCdOq7W/OI0p5FdRRTXKW1SOfEwZQw6BF5wY6QCBsJdoUPyCLLviuOcO70BvEFX6WWG+yeP811T7ieVW/FAPCil7+o+ftNr30Tk96YXpnwIy9+GYvjGc94Fn//9x/c89yD95/nK41H2o8Hy12CsK0spyO9598vv+oQWVmxcbyVfLjsUYcRVhDiE9N4iTvu+BwAj77aqbEtr/Spsimf+edWMv0F3/0c3vG29/Fd3+O6W0qGvPWP94oa/NAPfS9BonjDa/+wee7l//nl/OYv/2bz+Of+y3/m13/pl/nZ//rKFv76RMP7eSdP/sZvbGgAf41Lrp7+rO9o3ht4hMQ7cMnV137D05ok4c9pk6vHet89cAUUp/qpGvQFSvI2/oDf+v3Xu3P7BEGeuUJPXUCTUlJJ9iQkyhd4686HCFy3IEQ2IgnaCkA4VEWDOHHxT9CoNluCMKSqqgbGinAFpcqaxmrCVro2F3SPmyBdEjTfEw8Pa4tAUjq4W+uTptFCgFAPE1cQtHOrDvxVUFtkVCyKTdS/paxyet0hx+93ipLr+4YMlweNsqsrXmmssEh8sThPiTsJO9USw5G7/8qBJR0LhD9PMne/S+uqWTfAIKwmkIKDnmKgjUFPdxrYnsGyublJmhWsrrvXFOOU7lLCcHlAteOPH6hmPgDoyqCxDJcSl8nirrdUIUJoalaMlK7gLBu1RyiqCgtY2j1MSotQgsp7cgop3XyqE6mqxAqv/ljva8qLSyzYLNTxuZStoJxZjFHci/y5FnveU+qCwAuumcqhQaRQmDYdI5AQdNsO+oZZ5vkv+27+5184ZdOd2S7DKGQezNnwMPkoVuRlRp76AkUw4O7HP8hcj4i9Wu/5zS7h0gGiYcGpM58H4P67TxN2OqQ+JnjUZUc5fMV+7l9IrpRSlJVT0I3DvYWlLzcuClpcHBfHxXFxXBwXx8VxcVwcF8fFcXH8K4yvms7VIr+qHnsqYUo87HVfrnu1eIxKZxRFTll6ozgMSkoK38VRQUBVFkjVdkPKokCbAl3JJkt1fKucbA/nylVy6m9Q++B0kpDUQ+mSqIMp5uSe/GyE4Et33sF8OuM5z34WABvnNzl1agsVKeYzhyk9fs99XHrsCg5f5ompm+eZzaaEcZs1Hzp4hAP7h0RRRVTWGv0JSknyJf/7ZIxcC0iSpKm4WRVQFlVTlYuiBIwljmPK3FU0jDFM8zG701HTCdoejelHq6xf4iqhx45eTr/fRwhB11cZhFJgJWEY7yH/Y0TrERQIhFAIJLkXgQimy3pgAAAgAElEQVSkQiqDQC/4+0iy2XzhGjv4X2Vp4C+jnSlXHOvthQpKV4WrSa/GGIQMCMMYW3eUGpy4bDqWSimwKbGHOGqryPMSGYhGoCDLZkgFRaYoJ35uGIM2Y4QXmCirEf1+F1uF5P56JkmCKQuEUCQeijUv5+hq3BhcatFWEetz5ScNQshmfubVjDjqNV0GGViiYMh0PkMEngexDz776U/z7ne8kh94qatW/8lffgJRjqkuuErPaLzD5+45w7ndEfnccVHCToIgYLY7RngjzAs7IwZLPQSOY1Ibqt5z3yazqefepJqyzJmMNrnwoOOL3PDkGZceuYJi7DlmYoV9j4bq1jP0uw5OmNFjNs0Z+q7GsbUALTp0g4A8cfNOBXOOHbwKaw2f/2fHS1hd7vF3//Ax1lacwEXYDQitZjLZZenylmNyww88j/HuNnbLsVe3pYS0xPhqXloZrlpbIZYBm75rLLWkOyuYS0059yaJYYiUsrn/pJQYHGymsrUAimKezzG7htX9DlI5nxUYa6ib7lFokKLylVpfrcRgpasw2rq87HpXTQc0nWusqRAYdFV3z8Baz9dawMPrBXEMIyxIJ7xR5H6+KLdWGasbWIdSrloa1pVWM6UoCgJfSQVXJT5x4jhbF3Yb7kScdFjpJxy84srmnE9OjCAe4YvEKBvTD9bIjx0BnJfQ+Ow5srRi7RI3D5Zyw+DQCt2rvo5sw8Glb3j0pezmM/r7D3Hre1zHcjmDqFcSXXBz88TuGNMxCAtV6s7LSFme+X/9OzZu+wzv+d3XAXAolFwRhtzn7+MqzpmON9BCEPsOu5gUFFlB5aFKorvC0uqQQ4cOcf6kE8sophNQBi0EWvg13WhKC6XvCAaRJJEh6XyOqu9tDKobU5RZA7OyIXTCkMTzwuaVIlGCyFpM6dbTWBrKMKc0IaGHrYTWYmdzBp6LOjYlh578BHZusVhvjHnp5VcibMnxk+cYD12l/O/f+Tc84ztbOfwXv+zFPHT8jze/hZ940Q89rGsFsP+Qq+af33g49A8euXM1GT0cYrg4/p///qu8+Pk/uOe5B+87zZv/8s940fNf+LDX33W361T98bveRref8J382+bfVtZvAN6H6js/x8dddw3wLv70fe/le57z7QC85S1/8rBjHr36MXseH77c8Z5/4xdfye+8+S0A/OSLfgiApeGw3Wf8MEXLF69swc/9h1Yc4+d+bG9HsB4vf+mPNn//6u85q+watQDwih9+CUBjPi5FgAo67v62dTcEpC3AOksY8EgdGbSdLKUojSYQAY1XlHYd7143WJCTloRSkQl3vYy2KCH27KsSgdaWMAy9eJcbwohGzAVZI490I2WvanUt5fjt4H0CFwTCnHhOfV73dkjqz66HNhmmcL+lo5YdHC/IsKYW3JDEdKgK3fzmKOogTUji4YWV1U6+XAhszQOLusRxh1jEUPpOmZEoE9a0SLTNqaoSYyuC2nNUGfKsYjqekoReCCMwFPN5I2iV5TkKFwONvQ9boA1CKJaXVihr5IFynaTaDFhHrnsVx3ETEygPHTSIJi4WVpNnOUHQxk7CGHrdLv2uQ5yU0nmwGiQy3surNAu+A87zSaO9aJgVAoTZc39b41/nsZ9WalgQl6uv6eI1rK9sP+kR+s5VZjOqSqOUaLxEnQuJYT5rY6BsMyF69FFuetZzAbj/3gfIphO2zp9muN/FJbpw3UiVuOM8ePfN/Pov/J3/wl7MREiQA8JwjvHcy0HSpQo0VnrEW3WIXpI0ndd2OOSYrbuK/wsp9q+a5KoeD9W/hxYqWP9dj4fp7DfqMgtkcWsIpEuqwEHEZBhgPUjZIrwaTWsGrLW/2KjGy8hWLuCthQbqF8dxzMyb6ta+W1mRE/jXZXlOp9NtTP4+8rGPcu21j+XU7Z9H+0Dt2q/7Wk4++AEiKRqN/wujETruoH1S6CCIkkYGEPiGm57Jk77+Ss6fz4hVDW0r0da03lTUmOai9f2w1rWLm0BOekKkJfRKNliN87SxGA8/sQKytGA8nvrPgtOnzyKImM2dalsUOd6QlJKuv6kHgwHWWpKkxjaHKBmyM9pivw9Cx2kKRqBUjA3c8U1pSJKkgTNGUYRQUBnbeBBEPUFJRl6YZqHudDogLbO5953KcwTON6tOpIwx5HnOcDhk7q9Np9NBVyXVxN10YRgSJxHzvGz4XFHcZ3t7RCeBpWWH/03nBVpLhHf0DkNFmuYIMUd4PPf25jZxEoKU3Hmf450cO3YMVMCkcL9FqoQ4dkqOnZ5bDALhOT2mJbn2u6tUek7ir3FlDYVOEYFCm3bRfO6/ewZP/oZr+J1XOa+fN775DehwhbjrYHthvEoQG/pByXLP4bM2R2fpD7okcs658y6grRCMLmySjV1SdX68Cz8Ov/9bv9zMs8I49bxDy+v82m87N/TtnSm337nJvv0ueJ1Ndjh3z30cvvGJPO7RLqA5sLLG0oEhR486uI80htko5cJ2zmZtUCgUmaoo85D9h53J5oWtL7JyZEjmzTnDsENV9rGTlP6h9eYcXPm0Z5HvnkJPvf+ImFEWE4Q/dmklpAV6MmWt3gwSF5yYwDAQLnmbTKac29xsYDtZXnpIYNhAYpeWFZcc3s/m5hZnTrhzt75+wMFPtLvG0/GUIHKqe9ZDjgNVY9v3BqcqqL01IAxj5nMDxiL8RtzAPZyMoX8O52PiD2Uq7VS0hGhXDu3WQYxZIOUKiiJvfIq0FhRFwWCwxi23OejlP3/uDsrCYCpFu3VkCBmTJM7Ikd+Fu04VHHvsddx70sEzRTanf6CPfeqzgfe7c3XkIEemIdkFfx1OjlkaCE5+7ANs3OXUHr/4ycvoxEvc8PhjbJ50KlyXHzvKFauXc8dx95020i0uUV3mMmQl8hyv2Yxy/Ri7eYWs3Jy9LFpmlJeNylmsFMJUBBaMVzUtlmKiSBKH7h666ponsHlhl3kqWX/UYwG4cOoEo427CftRy/EQkmI8Q/miCZlGmpJ5YFEejlqWGlW5dbEm8RsbOL6DqXklFZUJmJGQRG4NtCbDmC6yLKk6s/ry0emqRpDkYNjnC+/6AJHqcvjxVwOwu7XLZ0enOZWOyB905+otH34bP//Bn+JX3/waAJ6sBE/6wVfwtd/2dbzweQ7a9hM+ifil33g1b3ztrwDOQBhoinEHL1nBLnBhwe0Nla5Y2beE9PybrXMTltdWCEL3uq3RmB/4/pfxB69/bfO+2U7Ea974p4iy4Cdf5j77N1/3Jkai5NW/9wZ3rToDPvvFzxKvR1x3uTOFvuvu02xub8G3Nofi9i848/LI79kf/YePw4/Brbf9E79z4U/ddahyslnGL/zUj/HfXu8gd5lpaQQAJi34zdf/IcZmZLWSnB9lqZtiw6te/yY0Fu8h7GIPY3nNm/5g77mxDu77f7/UJbOvet2bsbY12bXK/W2VWEh2/DH9nJIe/qeAGksvhMAWgjhoTcStsUgVon18UxqLFBGlBOmfCwBrK6qiQNbcJS9oI+s1SbpiS1UuqLh5/pYuNLVZtBNgoKFPOKi0dTGYTxAQEdoaJJLAF2+KKkfSQvl0pVvwmt0blDv+D83fWgQQ+uSDkTPFJcYYXxQJJJXJ6cZh4z0ZSKema3wAL63E4GLGsjYRlmCVxYaWtHLrdZFq8vmIQLffLYgUtrLNXiADQb+/hC5hntUCVoZItUlukiTYsqDbS7wnI+zubpHktY5AvWDXCUtNX1CoOgb160QUd8i8b2fNXSrzkigKmv2iruYp2RbjhRUooRwMsIa3qzqObtX7oigkjvsLHrJQVaZNsmXrQVs/p7XGaMlDmyN2QQAC48530gsbqkmeZq5BIUQjlJVlc4oy4/DBYXOsMyfuYPI3JzBePGr90BL7Lz3KE274ejY2XUGpzCZYk1L6azCfaQqTsr27we7IxXS2mlFNKyZjRf+wWyuL0ZTddOpELQAtxhRFwUJ6gdXG5xYL5/MRikmL46souWpvqNqYc9Gsy3ELNLUQYo3OXDQ9c1Vb02TcALawlFXaTMw4lGS6aFX4jHBu1zpGyDo4jj02t81SozCkrNpuT1bkKBm6TpXv2hRF0XQZmoqM1JRYlK/K7J7f4inP/ga+9mk3sbvtfsUlKwWf7N/MZDLDDFyQG2CZ7I5qUUT6cY+kv8L5zY3mt1155SGO37eBQFHToutYae6DyTAM9ygp+pO8h7cUxyGVzj1utiUoygCqqpWVB8GgO2B54DoDQSD9RtpuCFk+JwgC5vN5IxCSpinj8RjjpaCr0tDp9PjMZ2/jCTc47Pna2pqr/JVzIlWr8Ammk1ZifeuC44klScJw6G68cZphjDveeOy6DA888AD71g8gg5Z4aHTB8eMtQbIoCiaTCZ1Ot10kEVSm2mMSmCQdpAiYePUoFUQURcFwudPOhTwlioKGF9VJuuR56QwLfaVsPB4RRwFJ0mU0csHcyQdPESdhEyQppeh0OhhDM6fiuEMQRM782i+UttAYT+QFdy/M56m7Z2rlyqvg0x/7HGEkeeH3vQBwvJ1f+7Vf4yN/73gISdJjOkkZLMFb/shV+P/4T97Bh959F9/5/Bdy+yedKtbZM/dw2SWX8or/5Kqtu7tjXsEX+e5nX4+xXn0xjNnZnpPEQ/7qPbcA8L43v4MX/Pj3Md/vEsW11UMElxRIFGc23P0w7OUsF4Kj665aHAWa6FIDWpB5QYvSGiSKKlaUj3bX/cTW5XzqE7eyM3IBfCn7RKEk0AUfv9lzrn4a/vY9t2GKLTqhLxBUM+ewLuuqHERRiBGgZM1JSjGloSw15846MYD77ruPyWSCXqjYysC7tfvrMBwssba2zk3feCNR323Op07fx4H9R8EL0wRBQFU5DLuoq5MLLvd1hdYaS2Hc7wYo8pyqLLDCNh2vmmdopGjWKVc1Ns06ibBgCi/B64MPo8FolGwDZIWX260/r0gZDIbcdus/8clbXbcgTPrEiUSYEkEtEd+lFBEsmFf/86f+npOn7uewT3JLs8X7//Y4h7oWft695tDPvYIrDq5z9z/dBsBkrNmdVpz/yAew3rT4/J0bjPOSz763oOc/b2Vtnbs//nHG51x39Nr1Y4xkwWqpmW25hOTrnvRvmN/+Kar7LjBYconvhpBcKCD3QUcw12AhVQIVujmlUomaFAjl7sdTd9/DPbvbnNRTOl5EZH3YY0V2sbnF+PWzEBohW+UzEQXMhXHCxP7ahhEIW2GFbKr8hoCq1Ki6y2gkVmqUnFLW+5wRSGFApeS+M5fIgFmZNfNgInIGcQeTCfKR+w43b93HzAryvA12Zkz5VV7TPL5FW27h13ik8V9+9qfhZ/c+d+rBrUd87Vcao62dPY//gNfuefyTL354d+rlP/rwjtq/ZNxy84cB+MPX/o89z//er/zXR3z9L7zkJY/4/E+97KWP+PyrXv8mRLg3oPrZH3kxr3q9K15Z7VgjD02sgIZPAm7NQSh0E4h6fjetYlo9GseY0u3hWtAUYIWUyMhSmQoRLHZ2KoQPukOp0Lpy4gl116aqkFIRiKDZ72MlyauyIe0aLPIhojhCKV8UMk1xSgSCSpsmAhAIrPV7bq0giIs96o4ZOG7Y4vkRvviztzuyEKwvnHYXxrh7TymvuIckXOBOBypx3CXfsRAqJAwFuqoLvEFj3lwLMVSiwiBJGHDd1dcDkOZzZju7ZJk75+NzJ5iM50ThgJ4XiijKEqFihC6Y+6Lvai8hr1Iivx1XRclkNHES6rue72Qs/cESUQfwBeww7rlEtRYfM4DICIoQXXeXtAGtEUq0/r4KdFE2iRhSoUVAHHZZHbp1fxa6jphmp1HnnUy3QWWUXrhpZ2eH/euHScL9HD/pkpZ+r46H3TlYXl71+1jlRUrwwiCafm+JTuQSp/F4irVV8x1FIFAyxBRTLuxu+etQK+0q1nyxWuuAydQy3NcmV7d+/J38a49tHm7ZAPAA9/IA9+59UliMrdz96NXIA/GV06eLnKuL4+K4OC6Oi+PiuDgujovj4rg4Lo5/hfHV07ny+HUhLUJKrDFNC9fYuiVnG8iBUz4RTeWl7mUFQYAvKFL6x84vyf17VVUEcULp4WHSc2+UUiSJV/1LC8IwpCx0I3FqjSYIwr1te+GgaE1L3lqsLVCqQ1F6DyJbEkVLzWuue/w1fOYzn+XYYx/P/LSrcgz3ByyvH2YyuoD0nYD1fYfZSXcp574lXKRkYs4Tn3Adf4uDHA36XVzVKyQIaziPxupWOa8oCqQMXdu27qYpiVJB06E5ceoka6vrlFnRcCmMMUxmY4bDQdMRAkmRV4286e7uDt1eQlloSi+3PZvNGkPoyOOPd3Z2yPOc5eXEv2bOcHmZR11xNV/8opOerxXyjDGN4pBSiqqqWgUlHAyz1+uhNpy6XJnlDTStxhtHvT6TdE7lL16SJJR5sQfKolTM8lrPwboWdEmTTtt1lFL68yc5cOlBfz6dAk9V2AYWtNxdcjLuvpM1nc4xWGZp2Rj4BkGHMi1IJ7vUZbjSlsyn06brUFUQymwP38q1zCVZlrVKh4GgKEvyojXnFEJgtIOtAPBCuOXWj1EUWeNJ1O8n3HTTTXz+8076OM80q2tD0mzE97zwpwG44Qk3MtwX8da/fCPzkT9+EPKkp1/LyQ2nmLblq9GD5UMNZFNbw8r+FeKoy913ums6743ZPHM/vXXXhZuVF4gGMbN5wdaGqwzde1+FiD7PrJYl1jm9ToBQEXHsKldSwFK3w3D/EqFw3Ylwbjl37iTW37Pm1Gl2bMbKoMvPvuKHAbiZN3PDjVdw/12au+9wXK3J7g5SgjS1abkmzbepdEHqpZ+DQBIGAWmaMtpycLROp0McdQh7tf+YM7QuioLAe64VRcWDDz7I/cfv5f941lMBOHTJpYzHo8b/BAOB7GIW7kcpW1hzXV02xlCJqlXlKwxSOUWlRpRJSEDgjNRr7L1A0Hq+NPMDEKaFGAeBpPRyvu4SK2Z529ntxRFbm9v8462fpttx1yGIQgpdEgRRw/+x5GijKBsl15nrTJ6+n+XQzf2jouTG/hZ3q1a17vQHP0znmmvp+Gu8drTP+nKfa57+JCJvXkkg6C8N2BrtNucqnc6wuuIxc69Ad/IMk3NnMQ+eZu2c+/6HH3U5w8uP0bs84M6v+VoAPvbhj7B1x+2s+e5dVaRUoSSKghYqPJ0Q6pjtvvstH9v6EsiIOFScTt28u3+2xeXdVS4LOyTe7sJKp3JWdw8EjjflIFq+em/dupsVOXiodllZQiKU53gaq6mqooX2AEIokBHWBsgaklrlJJ2I3MOJS0qWtMQKyWdOO4+weSgRfs8brri5p4KA7VEOZetv9kjj1a97E9vnN3nfX/yuO5ZS3P35k1/xPSv7lrDWMtp6ZD4WQNiBMn348z/+87/ikCe+G/L//vorH/H9z/y3L+BFP/T9ANx888287rdf9RW/0yONJz/jOVx7zeN58+/+Cq9+vZNsFzKm0rOGv/IzP/wjX/b9D4XtAfzHl7hO22++7g17KAz1+JmX/vBDHjsO7G+/oZZab/elYEHtDVjwhRINoqeBE1rr0RLCqbmx0Llu7n8JWIfQabqhCmM934iawy5cJd62SCBjaR6Dg69K6+KltlPuvpda6DhprbGVRcnWRBhapT+oFZnbY9fomkV/LAeP87SNxYah1a1qqzYEwqngae3uj8B3dKvKEvmOcxTIPcdwyr2eRyRqzlxEEls6UUw69mqyc81s21BYx1NOM9/5U3O071TPU0s5SRFWkng4vyTk/MaFxhZotHue3GqSMML6NT4vBP14SBwM6CV+X4u7jktb1bFaSBAl9CWNnYcxbl/Q2OY7KBzvrUbrSBERBZIHj9/O1tTFb5PJFFNIKl2wuX2uuSbdbktbAcjzW6iqip7fs4RwcVgd39QxUSeKF+4Hw4XRhF5vwKDn0AK6zOn1OtTIptqqiMAuwBUFaZpjaeOuLMtIOr1GFfc73/1UVnp9pqNdpM8TVocJtjCku5qJ37eL0jCezQk9V7woKsqsYD7LmHujdhkr8pmjBo123DnfGeVoRKPG+PRvfgbLa/s4dfI0Fy44SHQmM8KOQxnVa7qQi5Py4eOrJrmaXNj5X7/of2MURbHHPNZaxyVoUjRrsTiztTT1mMtSIFGYqmoI7FpXSNqWfxiGDyO2WmuJwpiizIk8Ya+onERk6uXhjx8/xVOf8+288y//nOK0u1m+5+e+hw+9d+QmgyfeP7AzRfUsK/td8JHEffr9Zb7n+17I3+IMCd/93vexNBhSFKbeF5oFrIEjGENROWnWejGLosiLcbjF6PzmObrdxGOl3YEc18klDHUS4RbSoBX+0BmV1oRx1JynwZKTl5/P59QOc2v7BgixRI2HGS6vURQVo9GYffv2N9+zPqdFnfhKSRRFTXJljEtmi7xgNqt5UTHzLCVL8z343yzLGulQIQRR6JKeRaKvEIIoisgyd/yiLFELmNrNzU36/S5BEDSy9UEQuKTNVk1QFsq9i4/WGpQL2uYeTtjv9wgC2eCL69/j5MZrGdao4bxI2mKAkk44oebDGVv7qdXwEMcHtEY01wZga/Okg4TUVgGppMznPPfZjtR+yy23cu7cJlHYpyYOfPofP0GcSExlufzwYQCe+a1P4ZrHHuTkGXcODl7iIHxJr9+aOVtJGA0Y7cz43p9wAdB1d38YsSM5uHKVnxuGYj6jGJbME5ccV7MOkRlQprVZdkQ211R5xe7suJtnYYdzGznH788R3lbTdBP6nSXWpOPsZUODrGacLyy33e4lo58MD5wynDqnCXtunvWjHlJCzUyrqoIwtZSVJPFBtRLOpHk8OddcU7x4xWLiXXuulV6C25KTdEMg5EN/5/yVrrn2UVx62YGFuWGJAmcBUEPGKuPmY15VZFULTcYG1BzvshKooIOxMVK0wUqlNUq1PDvxCH8Z4+ec3zxEJDyxvOUElLmF3DZE6u7yGh/98KeBoPHsK8uCThKBChpYiSGlK+Lm/rjANhJDGErO77i17OCBISer/aQmab7Tl/7iHVxQBXkNCzJ9SiKi5Q7BsueBxEAUs9Q96B8AKqISluWhO9bScECSVwz7+zGH3HO37+4y+8gn2bxwgdMPujm0OR0hAlCez5GqirCT0E9i0l0PCxYSOejwqYmbm72D+1CFJi0LDnTdupYVJffNt0nkGlf6pFOGEhF3GiimtBKhHdy8hYVpKmFQJm8kjkMCkjCi8JCcSCm6QmLRDfxUa+24v0Jg+/592lLNMuKo709JiJ1ptkXFfbE3ZbdrPOUZX8P7eC9HLnP38QP3niYJ+gx9wPm0Y0OOHJ5Sysfzmv/p+Eq/9trXsn99HydPfoneAXeNq3NzjhxbbQJ9gXJQ5Uq3QZnn4yytJk2xS+uSsrR0O+7eHgczfvLnf4GrH3Mto7Hb7ytrCC4LKPOsgbS/5e1v5zOf/Wf+/kMfcXNqc4f9+1Z41kufyakNJyySdAKe+bzn0esuM/GB08FDK/zJ617Ltz7vuwDoJD2MMSSdiMjjs+zllk/c4o67suzWhNNnThB3kmaf+a3ff70PICNe8R8cdPA3XvtGv3213ktSwqvf+JYW2rYAzVsU3nrV69/k9+SHiHKJvfFDfb0BfvsNb8aYCiNbwJ2t/7PtZwRBZ48UO9BIv9dDSonynk7gkuz2+6k9r2vep539jFgwCHavd0lRDVd2e+BeHnwUqD20jkp7A3ZpH5ZQNbYkSqJttSeRMqaOY/Ymq4GKCMN2D3UJJw2kMQicUEcca4q5C6A78RGENA3sWpcVQeBk3wtTJ5QpUnYYdCImu66oNhqfY7IzofBQ5Y61DLtdyrJskrlOYOjGCfnUoHyN8PzoDJPpTsNFs6agG0I2326LweWUnemUaZZRo3d3xzuk83Hzmtl4ghYZwWwLY2r6gBfiykus3wu0cTL6VeHmuYhSRGA4e+4eOjMv5lZ1iYKYpKPYt+yKi/1+n+HyoBEtc5YAAcawUOQ1Xjbf3cfz+ZwwDF1SNm0NkZcGXWphFIClAwehMSQGoyWD1SFRVDT7RWUs8fo+jBWkaeafc0XiuU8KOyhm4wlhLDG4xGlrJjh6yRUMDyou9WlMWozJTc6jrnKCNnGY0AkDlnsxeerpJWIIGoKwZGvLNSlm05LxbooxkZ+Ly3zxrvOsDvuu4g1MpruMRiMEGrxVUPCVba6+epKrZMkpfiwqANa37OLCUQc3i75G4IITJ0ShyOfuIj3tW55O9PQII+RCgO6Uc+rqr7EGq50bc5K4TL1MNdY6vk2rJBc0XkXgJlOapo3xHDhuUVlogkggPS4zkonLdv1rBv0lVteWue8fT/HTP+MqWIePXsH5sxNueto38bgnuI3w6MFDXHn5ETJPhJciRsmkIeYBHDqw3xkPSkHlL6U1Am0MpiZSdrooY9C69fGqqsqfP3ezHDp4tLmRap5UVThSZTrTaN1imY0pyXNfZQlDNs9tkudpcw601k3gX28mjX+Y57QFQdR0YhY3IYcFl01ikee5Myz071/EbNfHloFLLLpJp5kbtRlj/dqyLIlC5XhydWATxMznczqdDrkXDQmCACVpvDAmkzFCrLguZpOcu01DETbzUxuFFILc463DMETaCiEqOn0X8DlFOUllDVXZJo/dbrep7GldUlUuyayVHTHanxPRBOjGelXKjic7h66DUpYlcdD6Db397Xs9Xv6lo9b3egDHXfp97nnE1/23V/73L3OEX3L/e+b/1sf/q46PvPf3ECpFqtpUW6LCLpI6iQmRYYCQgiRu/VXSWcY8nRIrb4RtDN1ud8FLxQdLShLEvoNhBsynu4Qq8Fp/8KUv3sP6/iG5d24eDNeQ2nepwpo3ECBUSNKPqN2HjTGIqEvs71GjNXlR0Ok4U2KASAU+OGh/b8th8E8Y2wQ2wtTyXa4DrUTrq6XzjI7sNFk/VoMAACAASURBVGqBKh6yvZsSRUnjjxMHEXEcU2jD2U2XgASqhCBkecGTJDcZSnSZ+0BjI1kiUJLBwjoerayhpCGoFfdUh56WSKOZeR89EUE6mTLP76brK8CVzcnJ2fC+WrnVGKkw2rjqI2ADRSAVwlpibxAedazr1nnuVtgN0WXOfJSjU/cd4m6HT+tdinW3Fy2lhv2ry6i1HmfvOeVPsCROOpwtp6x5E+9qN2VuKwJ/HUKpsFIgjEXUiAohvVIXpAtKtY4nGTbXTlp3ry/OM22dp2Ipa35M5BTa6q5jpRFxyObuNuGyuw6Hl4asH3BKjLd/7u52gjCj7lv9ObWPy8eaf/1PL/Pqdi/g/8fY2xmb0FbFf4tf/Jcd4rv2PtwGfopP/Ive+nfvevu/6HU/8F3P2fP4lb/tVPvScuL3j3a+umDQNOgKaGOVh3ZjFhMIcLTHhyY7e/594Rj1/ZsXhVPvrBVYMK4jvSCSIKWk1BlgUbWAQSCxloZPBKCkRFfFwvuCZn2oxb/qjphsUCO1+ii+M17HXhZrRcOnkoFokpnm8zzipBbckZFCWsfFkrI9p9a21klCCJTnR7Xfs+VjLZ6/Qrcm1rXaaaXbFKzU2glrFLIJmCstMVUJ1icfSmJFRVFphHX3TKSW0FlEEnXoJHU8tYYsYmZ1MpmPKMvCiTzouoAd0kuWuJCdpfBzfzZPWVpdYzp28VrS7bC763xQJ74wvL6+TpEe58/f+mqUL1IGQUCWjhvlxF43IQg1JquatawsLWWWu26jPw8SS1lWhLXSqQChLXEo6PfcsftJjzjsUumUqHTJVTrNmVQlVeV+33QyQypBUWQseb5qWsyZTVM6XmEwSbqYSmBlQuDjTp3DIEzIsqxV2csF/X6P0q/xeV5SzHJ0kaBNvdcK0lmFlJZu7Io5WlsKCmxSi7QIhsM+YSTZ2XXnrhP22Dh9BiFyJjt+PVUWLSrOPui6ckk8ZN9wjU4cMPBx2Obu/Sz1V1heSlhb88JequLAesDysivU3v75+zl0JGB534Buz61daTrEmAFBEDXcbKVCjj9wH19ufNUkVw15UkqseUgLeXFBq6v1yrWma+iD1U4+0lqoPOwhEC6wns/nC27KrqVcJ1cWQxwqiqIim9emugHpPEPKcE8L3hiDUO1j11bXTYcGQAUWXQUEQa3opREyIc09Sfr0A8wnOd/x3O/i277PLe5v+sN3k89SnvT113P0qKumnToz5txHT1LYmf9OhizL6PXaIOaWWz7nYQACIVvFIyHaKlVdZRBCUNQQA1tDChbhjNYnDq30PKLY06avk7JwQZ4+SRKCsO2UlWVJknS82kpd4ZO+lV53hTLyPG+c68EFuWmROoWyrktyjTe9qxcepGi6bq0Rr2I+nyOVaqppQeCk52sSsVKKssjoL7UVmjhJ6PS6rr3dQAUVYRg3Xc5LjlzeVNc6nV5zDoQQIG0j6x7HsZN5bxJF19GwAkStLonx5oCqFaawbnFZ3CxroZFut27JCy8BHrUbsDa+E1m3K4VXISubStmb3/o2rBcpqYU2irwiK4smWZ1MdwnD0EnEerfyIpthtSGdZWycdaIB0+nYw1/r8yn4mw9+lKc9/UnN/ZCXBd3OEtujMYcecHP9eZc9kTvEhNGjHaQyixKKKmVSpQQzt/RMyxnKlIy33XeaFxXT2Q7lXGKll5CNM5aXV+l1ulxy6DIArrz6KqJBxZFjXgCmSOlWCV84Ybj9C04dbXPjTvIDFxC6A40EsEu6p94wUAmFziuEUJT+NcPhGpujXZTpIn2FNIqiPYFTnudU1gVbtfKRQbM07DKfpg0sJ4oSjt9/juWR2wDSbJve/n2sLK81ldayqDDCUOmsmbNx1CEIdJP8ZLkThVnfv596VGXu5ke8N8B7aDAipAvuEx+8GCwyiPz9hb+mEbM0p+d/i84kaakRkajzPaI4pigqhNQ89jHH3HmYp5zZ2uLCbos8CLs9RBVgMt/lNx3mOxNyv3kCdJcUeRVSaR/8lDMm8zmVCmi2pVQQRPvIyNj166cVEWHcJSnceVpSobMkEDTiP4U0lMoShYqZX4eVNRTatIbd1kF3jBRNkWlsLCdDTeIDsG/65qdx9KpLef9fvZsnPuXrAfjHj3+SOJVMAk3mT/Fq1EFJTeCz0ADIKBFEqFqeWLuquRW2+aJREu9Zh6UXFXEdet1eKyURoSLRPgBDUgjRSvIbmESSbV3QtW7d2Ng+zQMPPMDi+IEX/3ve+c53Ufnr8i1XX8elVqIueRS/97cuIfml33kN+5ZWuP9LX+Bdf/VGAPbvP8LGxtmm2hxFDgqutW6g17Wabd3NBdcVjqIuUrl7ezbNef4LXsw3PuUpbG5tu+sSSobDIbNpztv+9B0A3HnnnbzkR3+I1X1uvnzkwzcjhKTXHRIon1TnOUIJhssD0tTtkUXuqAK1BUenE7O9vd3sV1B3NeBdf/xmvv7pTmrwmkdfzx++7jfBdwaSpAsY1z32o9/vNsnWIhhICIF5BHW7pmPh99XFsbjf1o9rk91mL1BuTwwbAQ2FNXVXpxYREMSx71zV9Ajtkqm6u22MwVa2EbYCGoEq91ntdw+Uan7L4r4kmv2pLVjWBtouDmpL+C5GcGep2a8qi5XSi42158hB29r31YqIi+floWtZ/Xmmql9TqzQaYq+saIyBQGCVZmfqCjVxr0teGILArYFKSObpCBlKlIf8IRXWzOh0oRPXtioFnU6IDdz6FpQxunIQNounOUwrdJVxYF+P+dQJjunJJmZ6jsArO3bCkqOPu4zKlA10L+52MEWBEXlj3qxNwaVrQ1aXPFx6eYXR7iYbm5uceCD1516CFCgCCk8NiCKJUhJra2j7gHResX/9MI+93u2Ru6MJs2mFLGRD3UjTnPl8zuqq+7xez0FUlWzVAstiiehARBy7vWF3d0ySJAhjG4XGKApJ05T+SreBwKfpBKwl9AJlKol8kb8k8Il3WaSEoUIa3Sj6SikRWUZfus9TsiTMMyIz4MrVowAs9SJ6iWDY65IWdfesQ5mVhDUtqMyZzCb0+yFdX9xcX+qzszWlY2O0N28PwhhbVWxfcAU0GUy56tKYwWCdC5tuDZqOJ1xyySVUedl08LO04F1foX59UdDi4rg4Lo6L4+K4OC6Oi+PiuDgujovjX2F81XSuqryFXZVlCQ+p9gBIqZs+shNAaGFigVTEkSJJQsLAZc79gcNRx3HrA6F9y7hut2ttfEVONjLEWLnA92khDipsW/J1Z0YI0UAOBYaqMgShoqhqgmpAUZYNhnZjY5P3vOc9fMtTbuK2zzqzzL/5s49ybL3D5MwJPnL759yHdQzdoIMRcXOcLMuYpW0VJyt2iMOINJ3vlYhegNtVRqMr50VRVx6FDBoMOjjJc4HBaE0Q+/MrBFkG/f6gEUnoN7Lu9bn0FTEBYQ0LTFMqq+j0l5vjB4Ezr6uLa6GK0FoTx3FT5bQC1qWrCAVxW1mTMtgjyV9XU5rrHnRdpcrYh1UoFztCCL3QvaT5zBpKCl4+1uw9j1o7iGVd7QvD2F37QOzhXNXfr3lv6KASNRTLwcjc3KtJrqb2rvJVwCBwleuyMkzn7th5VXlYVytAkgQSjd5T0avP1+JvyYpyD2zVGgkiQXr42/rSOlJKYqWoG2xlmXsZXcnjPblaBpHjfHmzD11Z/oaP8r0/9soFqVzNYHnAbbfdwvQP3gpAfOYLXBatEz3VeVgdn06RMqA7jDHLrnJ9aXCIcLBEf+C7jkwYdhOqck6VO8z1eOs8Z07tcuHcCTbud3C0dHya6x/1WHZrg+LDB/ncqSm3fuiPyWYjf11KdJU6iITvJJXaY/R9hVYXJQ5UoQnqLrXOmae7WJEhvUy3DAIshvGshsQqlgZ9sixj13dtOlGfMIlRgXXYbJyJ+dnz5xpoHTYgPTFmdOYEVrRy26UxYNv7NitytAia+RNFEbvjHY4nieuW42CBWI2S8R5+4UOhSVI6fxldeUgMqpn7TXdZuS53XZ0/cGA/cSchnezQ77hu+Xw+Z2lpiasffayBUF5x7Eo+/anPcXbT8atO8ABr/XXObm0iPKekOrfJtddfxf1n2u5WYUOKQEPX3wtxh8Fql6MHjnLinuPuexaWKkvpyWGzDtvCkM0z0qiGAGuwCiMsY0/4VsYQiIAUDR7ukltnvhqJmj9WYQKJ0ZplvzbfOx2TDAZcuuq6g2dPneZx11/L/fc+wNB3DJ/x1G/ir973QXQYkk29qafsIIKgblZQWSfTqykQ/t4zSiKCgFJXTZUaA0qD8tV0rS3G3+sqqC1HXIfIVBXSv8/iLDM6/n3dsMMDxZgLOucZN34NAHfedRfdQdt5Abjuhifxgb/+MKPU3XvDlWW03aUYtnucNK6ifubMKUIPQw5CCD30GDyhXgmMEY1RLNaBMgPZrqGdToc0dZV/cMIfRVEQJTE1+Pjokav50D98jPe+5/085tpHA3Dj1zya4XICXojniU+8kfl8zsaZc2SlNxYPFYP+ElpX5D52CJUkywpW+84CwGiD0RKjoOvn2XQ2aYj6T77p6wB473v+GoCDBy/x18EZzNaiVwBRFCDY24lq0BN7xCQe3m1pLTP03scLx3noc1VVNXLX9bDCYrQlCOo5XFLkuUNzLMDyF/nVtfk5UjTdLWt9V0rQ7OVSCDBgdAtZl3X36SG/RVhL6LtERVFgtXHeSdAgX/bCJT3VQ8pmf6q7dfXepLodjM4Qol27pH993c0Ft8cOep2Gcy2VZDDokRczxmM3r3d3d4jjmAtbWxw54M9xdQLK3cYnyVrLak8yne0Shy5WUUlBKDS66rPccR0LVQYE4ZjZjuuOzGyONYpOOGSeeauQKkXrCmENt3ziUwD01JRxOqZbW6p0Y86cP0Wv16Xy+/3s7EnWOkOWVruNANGRQ8c4sG9A5XngZVqxMtiHVIZPVQ7eq+IEW0ypKkPsZc8tOUZD6L1EhdToqiCfG6a77tibZyp6vT4Cy7lTruO1srJErCTpxK0B3UGfMOigK0kUedPyYUU2n1N42sPaypLvMlacPef24yXVYzgc+mvlvnuoFLrSdGuTdqEIg4QozNG6Ftm6lF4nduuI5/sKbYnjmAPrHT/rLCdPnqHXW8U7vbl4WmiWl7po71O3trKP6e6Y3HO3Bks9yqrCqgC8T2lZ5mQHXPes41FgAklRFeyMnDz8jdd9HVFoydOCa6+41s2pnRFxolAICq+fMBgMgTfy5cZXTXJ1+LCH9wSOR7AYeHe7XaIoIgjbhCtJEhCmWSi7ifMdiqKIsnQ3+Nq+deZZhlJqQXFOo1TYJBqhh5yZyjSLtDES4V2yY4+rL8sK4WEP9ag5WPWNH4YhFklazunHDtJkNCDm5Ll737XXPIEXvfjFfOofv8Af/anz/JhcuJfv+P7HsW+pR+xxrlaUZFlJjQoQKAa9PtPpgqJX0KcsM9ZXVtHSq9RI6RzUa1EIn2QsJh5R0mL8AQ8tAKxsIDLWClRQYUVI7f7uNg4adTRrnDN8UZXtIum9vxZhCLUxbq1aF4YhRruFuYZU1eprldGND0y9AD8SRr0eWpcN365+ndaaCprvnVcVgUyYLygLutcDSCoPMTBFtYdfJRaEA0zdCi5ShJKYtFVkqoShJn263xdjswpdtSat7lg+oNd5c66EdApvAKWuhQda57AgUA3srxa0EFiEUQ0c1SkfWRAtfLbUmk4cuY22VifUpSPQ+3th5nlvO0ZivUqlCiRKKPKsJJAtrBJEY2XkTyubZ840x1JKcebBMxzdf4zPfPMzAPjIyfsZX9jhQ3/2Wv+9IwQhBsvQW97PIihmIcobwMpYIMwKve4Kg1W3Juw/8igec+QxPOPbn8/vvPqXATh2y82s3Xk726k3mA5WOHnoEsbzESsDtyZ0AigLSLNR62VmIxQR2vNcjIdtZFnWzKvR/8femwfblt31fZ+11p7PcM+d3tiv+/UsqVsTGiwJpMgWGFAkEFhgynagQhGcpJxKJcZJ2RUnirHjcgi2C9sVh5iAHUohCilPsikQIIGMQIbW1Jp67tf9xvvucO4Z9rzWyh9r7X3Ofe91yxAnRVJvVXX1O/vus88e1l5r/X6/7zCdUVWur6wH6NPplIsXHSzh4sWL5IsZW1tbXLvmoCBf/eqXabUlSeIefqpkgDWSIu/GjYbBKECKFdG3rtsTsEt8T4mCxAd/oIgZSoMw1WrxKgVNXaPksu+LzkNHINbgzBa3uO/yL0EQYJrWQ699n2oFsW2pK983pop6sSRSUf9eKSW4cOE8e9dvEPkkzAP3P8rXv/pl3vnN7wZccNWWBWmsWPrrE8OEaLRNWz7fX9/ufa9hXu7R+InxqLHcuFHw/PUnMV4kwSwWRFYgAoXt4NjCoFND4CFycRhQNg2yatnygZSwiiaKaTAEPsllI4UloOyUAaUlFoJISgrPBd1LBZtZ2kOJDg9u8q7Zuzh//4O8yfvxTeKMOE2pmyWVXwwcBg1aCYRfMNQ0JAi0CjGdGBCSti3RZs3gvTVIYbFefdVgsT44th1P1VqUkARhRu2PtbRgwgDp96ltzbSuQCruv/ciAB/+nu/ld77wWdbbl774RUZpxE3PMZ1Xmkdf9+8wjVdJoQCDCBRVbZh5mPxG2Xj/Pa/YWJQOmmVWxu1SBFjjxyZ/rKZxBqnd9R5Mj9FWEIYhuzsukPnUJz/Nx/73X+TRRx/lzBkHH17OZzz5xSd5/RvfAMCF8/dQtw2f//znuXi/E9M5OjrC6MRB4P2rJUNJliUs5i6IjyJn2GqtJfKKjDIv2dwc8b1/5kcotl0/e8cfeT3P8gSTseNgXLl6ySUk1uZMaxxg1xrRj9fd29qu+dR1I/cJKKAwtwVbnapf19b9OgFUMKBtNOvgIikdb7AzH4/jEGG7+cxz+9YEp7pnUNctiGolgORpFy7p6PYLlEv+pdHqmjvoeUeT6uZYay3Cmysr6QSTumds7Qou3wlhJakC73XVrbccx5remypJErSOTohqdcGqGxu7c7DE0hD7MR4Mpq1IpCLZdpyZ3a1NrNXcf++ELHGBthNeOYXRftxvLcpq5Fj28+jlvZcYDkdE45pB7IK3IIWHTm1R3ed+L19YXrz0DFZPMX6tdGMxI00HvOahB+mo0rW2PP7gQ/1a4u1veTu/9Vu/xendU31iezwes7m1i6VFio47JVCyZrlc+vuSEQ0ibhwq2rozTm5RQYQk7JUOi6ImCkcYL46jW+2g5UpwfOjmp3I5Y3uScDQ7ZnPc0RyWCKvYmjh+ZuQLEaPNAVXlFfYkjHY3e85327ZsbW1RNwWnT2/6bTXWKMK19bDEIoXrs4AXR7OoYKvvi03TYGTAZLxB7PuCaTWRCogGXeEk4f77N9CmpvEdYTGvCIKAZVGhPOwwLxryuiHNvBCPiShKJ5RzeOR4WLluOHPmPCoKe5VBJTPKsibwY8QLLz6HljE7O7u8/JxTSZ0eHjIYpqRp3PPh5+IO0qdr7Q9NcPXCc1f/X/utlhU/aX1YfJZXl6j9/bTp2m+st8/yST7LJ+H7gTUl2Z/hyu/7N97zwQ+tJjgfkeR5fkKRrm4boigiXFMLatr6hIR8NwmatcVdx+VSKnQVDzp8edO5+GEtNNpCGNJ0GfdGY430hsFuv7LwQhL+HJdlRSAkUirmC19NCxR5XTmeku1ItQqtRT9Id0HEekZPSoEx+kRFTUkX2HUZIiEUeV0SxzGVX6xK5RQh14MypRRN2ayEIzolIit7Yrq1TmLZromGhFK643VBaNsgZOAy1L2UraE1biKSfrARGKfk1hF3cUbV62IcQrgMbNuusnydmEvtr6Vd+04nluGCRO25WG5bEIQUXuzDfY4xbUsctAg6wQCDtS3ZIOwnNCkFrS5Rg1UmCUAri0o7F3eJbipqW/LgIy7b84tf+m2OKsvO2C2kovGSsizRZdIv/sdtQHBaou1q8YE9RJuc6VWXFbv6/G/xO1WE/EXZG7U+WBXMb77IoHXfG4oFRDPOXxhjfXZbaMkg2SCQI+zQvx/FlGI5pak7uV7jq5dhb7vQ1BpB6GS4/UBaFAUXLlzgPe95j+vD+Zxf/8QnQMAHP/h+3+80TzzxNTY3E5RfpJRlg7TQtG6yjEKYzTv+lq8kSafIac3J6mrbligfUC+KmZPHjcZ9oF8UBWEQULdqlZAQfvHWBVtox/eREPr3o8hdwskgeiNTEUXkRdVXwOz0BroqScKI3E/0O6e2yYsZeZ7z9re5YMpgmdUFz11eEXun7TEDIurac4Q2T3Pt0iWSrW3A7ffZg+eQrSSLOz5lQBpucF5J5MhXSE+7fq2NIO6I/bpEipYvP/sUALMk4PWPPUZ7aY/Zy24OCYMAU9aEUYBPVFPXNZuDDbyYFjqJiMsGqTSXWqckpa0hFoqq9Fnx8ZBnrl3hzY+/mcC/H19/9hnuPX+arz/7dbTPfN5UmjRKUU3H3VJUrUELQ+0DmTRKKasGGYUYHygWusEYS9CugiukG2+6ZpoWqS2BlDS+QqJihao0W35BlliFKhpOjbf4+L90VZh/8Uuf4PFveoT1VpQ1k8kWInD36frNK3ztkzPqfqEKUTIgljBvZ8jSE9EbSZTEfV8py8aPa6vFv1KCtmkQUva8tqoqybJhLxDUtm78PHv2LH//7/0SAJ/57G/yLe9+O6HnyQLUZcOp3V0af++WywVVU3Phwj0UueuLg8HAGdarqK+QWhtjzMoaoapnZNkQFYS97PJksktTCoaDgM1NV5WebLhF+fTYcW+cUm6LXqsaGWOcaqMFbmNTrPOIwp7H6DauZKi7MX0lkrWq8rReeS7yyVzDgqLMGfln07QF1hqElMy9NUqSJATKiZ8Edi1o0RrjA7BkkLA7HqP0Zi/zbq0lDGJCtRpzlXJIDN0ZmzdtLwzVJzc9OkRKifDrmy5IWxePwtiVIBOgfdJDr50XgI3okzu60ShWSVz3D41AEMqQ2PPfRKQo6nxlKyMEQsRIGVD4tUSapg5JY0Nmvmoj1ZCyOqaLxGfTQyDnxeefYexVOIMoYrpXg5S89qIL4rcnKfWsIPVqyNuTDQ4P70dK2Nhw/ScMU9IoxbSayXDge4S3wmm7NZbm4e//PobDYT/GHxwdMS0rlstjMh8QBFJhbMFw0yUWK60Js5DmxorrMxxlLIs5ILB+zZYkGVVlUN6aJJAxy1nO9vYGb3+bGweuX7lOFg140+MPMPcIDGsE2XDAwMunL/LczStxxMyjQgbDhLYxzGZ+DosT5ospcZyw4RMSh4eHKFUxyOI+0D9z6ix1kZ/g3guhqIqco6Mjf+9CwkBQzI/A3+NASVAhBwc+8I5bjK5pq5q8cnP75vYWB/tTRBGThG6camqNUEGvCGvqJSq0pEHCqc0dAFQcMV/mhIns+bCHh4ccz+acveCSO4tqwShu2b96pX9HNwZDEIKj/WOMdsm3blx7pfaHIrg6dfUMf+K/+PcALyrglflOlt8NjYlPKN6tkyaNMV4eU3N803Wc17/5QfLldS49/SyXr7mS8TNPvYQul2ifrQyFdCRO0XLv/a5Dl3lFXYCKa1oPpZGyGzw7CIAjtCml1tTeNG0DYbQig0ZBTFnmRH6xVZSao4MSpGU0doN6OtykrAxKpT2JNkkypxY09uTHKCWOUx8ouUHyfwp/kTh21bpODS0OQuI47AOEQLlKVpqmDLy6XKAsUZj0VaPheEKSZFhr+4zJYrFw5W6z8pjSXghj3fsCQJg1bw9XeKFt6hPZO9PqtYDBqaxZITGd07tZy4p1CzzrMmddZsJt8wFN2FUeDFhQa54D6/C4bp8oDJxXmZIntq/L1ltrsVL0amzd9XQSuOD+LSyePOorXrhKX2dFbrHgq3xGrCZoKSVrAlRYQIhglQI1BoUAbTC3eCgIsfJCscIJSHT3yYV+xk1c/vIa3bjt1vSoDq1bT1T2mUVbO4K9kPS1MuEy/+tVFGMsSsbYNQEUALEGYxNCEgUpTdVyxqutffh7v49P/MrHeeEZ52kVNwNGwy3EELSXM21ky97eYQ+fCILIT8LzPpgcDAZsjhSLfEno+/Dun/kBnjk4ovAVooODA27cuEZ2kBD5gKQsS4aZQOsW7QPMvCgoS0vTrkjaVos+qAWfbAgEUkLj4WhxJHnNax/ii1/6PADv+2PfxrkL91EUBRtjlx3d3Nnl0uWbHB0dszF0k2VlnIRyt4ioG00QBQhh+0BRKnceMlK0axLcRiuM9Pe8NaRhhil0P6GqKHRKnWGKaboEhEAFK1Ef3VXXW03FGnzJyyN38GvbVoRtTdLBfXSNNmAIaX1FZmt3i/e97718+tOfoRVu8fTEE5/nbW95Ozs7bvJ6lucIdIBM0145a3rzGlv33MOP/NAP8sP8awB+79JXXdayl201yDAGXfRKi8qLF8RhRFv6LKo1CKDO3Ng1EWOKl64Stw1i4rZFVhDJgECusuCy0ewXJePQQ1TyhkYGGKOohVsQ1W3FaHOL/+g/+XMAPPPcsxwcHFFbzUc/+r8B8KEPfS97w6uEOiDwEvjaNNS6IvFjfEpA2CgaW5LtuMVHLSzxOCIMAmKfEDg7mBBoi1xbgHfJsk7WPUtGbq4zLVq4+1lKQWQCoqVXeBWKQkAbhNS5G683RpLHHnwdv8w/p2tntk/zW9c+ReD7ShuFDB5/kNc8+lr+T/6VewxWoRtJuZgRDN31LKsZO/EpGn8vo9CpF2ptEaLyfcrlzqQKaFtPsg8VbV0RKrdwCwg5nl7naP8an/nspwB44xvfyNbWDtPptJd1L6qc02fP9M9uvlyyv3+De++5wPPPXXLHlhGl0GjTMBh4CFdgaBqJ6mxQaomxAaGyVJULoIXdpCk1QaKQWeuP5b7/uc84/78//h3v4uh4H7lWuRqmFmsbrCjpNUq0cGga4SFcPhgUUvY2snXGWQAAIABJREFUJ9Y6VdeyLDGdMqb0vkR+HtVtQxAH5PmCxlfi20ZjjGSed8GBRtvWQYi9p2ShQdcrJIE7liUIErQPYtI6Ye+4xVaCsV9vjLJNxoMxTbtK0AlCpAxRPomwvb1NoBRlmffHTtN05cHm71ldN4RSEXiITSm8oEIY9wks4SG8NKuw1KJRdj3QdMkmKVlb4zmofKurXpnPWkukEu+B5aCms/kcIdq+QtLakv39fZblAdNDt4g3usVUhtQvlkeZYHMS8br7ztF4OPH+4U1AU1cN26kb08fhhPGF8+xsOSuGRdNS6JgoklS+yl9pgUoT9o/2uDx1v6etcirFfr1R1zXpeMjxpWf7NV6WZZTFnDiOKI0LmMscv17rAuGIpiy4sZhhhIex6RSh50gFnVi6EAIjDNJXt4JBQGNy4jQhCN33snSbzfE2QQBbXj3PIb2aftwwtWZ/fx+VRNQeqjJblCAL5gt3n8YbO9R1Tjtr+7mv0DUXtrcZDYckHqpomhYpLNu+ong0XVA2NUEYcfHhB919qQzzWenGvc5+ptHURYNK3b2bz3Ni5dQmw9it0S9dOSIapIxGQyKPBGranPnNErvl5qIohWWr+cJzz3H6vEvw3ty7QVNV0LaMvSdZNojZGQwpajdGDDdiDDHz42Pw69+N8SZ12xJvZRxMHQSesEs237ndFbS42+62u+1uu9vutrvtbrvb7ra77W77t9D+UFSujg5mfPQfugyb1QZjW0xT9wRQB/tqMRQnPCb8H/EbOl3O3nByNPxeHn3oPF9fzPnc7/4OAIFMiZTE+MxSIyVWhBhtyL0/ViBDgkh4SW2X6WjbBmsNXTJLCEEcRc50s688WLIspW7KvspQtg6OZnxlIIoi3vvedxAmYQ/dkWEAQpEOkj77EoQSKUF6TLVSCqUc/0l1/CalUIF0/kzRoD8HIVeVGyklCosQpnf41lrR6oLSZ60OjqbgnddP+HEoV4Vabesywb6ytDKp6DPg4hXkUzsfjZNt3W3e3CZbeytxeH37rduAXjTA2DVPH3iF376dWNzdr1t/67bfuUVKd7369WrnDfTZmbWzWNufvkKGuf13u6aUcnt1xcKVaQlmrTTWca2+0XmJW/brPt7qL3en59BX79YIxzeuubL5eHyKH/j+f5986bJ5y+Wcf/JP/hmXL1/j7NmzgMvSnT17lhs3bvijlt4/Luxx/FEUUVROprzLan784x8nkIrME3jDMERbw7WrLzHyPEznVzVHCNGX8I2wPZkbvM+d0bRtfeKdMWVNGCfUnhz7yGse4fmnnuGFSy8B8NCDj/SckM2zLlP21Fe+xPmzp1l42I4794SmaSh9RnqQZbR1ebLP2BBtNI0RfVVYiBAh8l7sxIqWol4QhQnCn2deLkEYima+IoFbia50D2eQVqK1r9D5jLcQglZo1186fHwYYFXA3GffQwKkNBjb9AR6bMCLL1zh8cfexNNPu2rkbDbloUcf4oEHHuivWSqDtjUXH7oHgPf/u+/nkYdfjwhWmb4oShhmWU8+DpUTTNg9fw9bHsLx5JNfIcuGFMucd77rne45PPQQRVFweOyyh9evXuK5554jX2qs758qgqrWQNi/R0mkOHNqi6MrLwAQINiIBpwZ7XLs4W9hpFkscn7tVz/jrm0+Jx1ItrbGqMBnhENNbnO0rTnrzzMxglhB7SFrWoAIwKqEvQNHkhZSYkPJrM6ZeJllk8SUQtM2fhwSrmratjVt69AXZnHTSR5rRVt4r51MY0RN7LtPmA6oY4mxNYmv2r7nj37HbS6XuWkIR0P++7/0d1xfTFJG4wFHyxW8xQYGhGY23cd6PmJbzinjjLk3RM+yAdPpsYNRN7764kV/jDErw+zWcWorn00/tXuG1zzyKJ/73Od49JGH/LZtNjcn7N+8xtJnoDcnY7Y2x/07u7d3Gaud59NDj9wHwDPPPEOaDCmrhtHYZaAD5biGN244eF8UBkQhCFomG25MmB7tszEasFxOKRauGpF67aXPP+Ge+/vefw+H86+SDbZW965+mjzPWebTXkRgONxAA8ul96KUfi5Wqq9iuRYxPT48wc1O4qx/Z9u2pcgbGl332+IImtr2/KoojJEqdmOUrxrPF4eEKmIwSPuxq8gbQDHIOs8gjbEtw41xL18+mx9SlwuWy0W/xlIy9FO4t4y54T3t6pbACzBNJluUZY0UAdr3jZ3tsxwfLDEeibC1teP5eU3vwaR16yDqQvUcpCh2FfE0cVWVtmmAljJfwd/DQNC2DZhoZZyuLNbq3oakNS1xpFy1J3PzxXS6z/7N62ThAUPVcR5r0s2MxB/77KldxykzMWnqjnXvudNYCYPRkNl84Z+N4dzZcxz49/jy9RtY25AXbc//SbKU6y8+haZFe155iyIKQqYL1zfG4zE3Dq84f9TGc0rzJYFN0dr0MD2pBiyrkqm3Fzg8WnDhvm2KZdlzoNSWRNTSVTDlSgAljWOs55S1tSRJJxwelxS+Uj5vao739whCi/XzzCDNqKqC0h+7qgqkFOT5ksZDRJWwbG2eJU79teklVXXExniXw5t77n7u3sP+fM6sNAxTD7kvC8fpzh3nq2kK4jgljRRf/upXANiYbBFnKVKFbJ654J7f4TGDLOs1FubTGUYqJ4LhI5brLz9PpsdURjDyVb66lexevJf9fYdS27t6k8ZabCB45rKbr47zY7a3JkyP96kDDxWsUpp6ZbO0WMwIlCFNBxjr3uPp1WMMiqZdUHr5e8kKTn2n9ociuGqagtmxw+LHcYwSAQjbK9CFoUKpmCTcOMGPCYIA1EpZJgzdSzbzJc7TF85yPM+RQcw73vMtAKTpCGlXgcEgCWlb1/mvXHE4/qe++jWiIAYhCTqjT2HQRve8Ie05PuvGwh3vRQjBuIMFVQ1VU/XEzabRvO9972O8c6afPDpEWS9MABivhBiJleKTU/m5VVnILXp7A81uwXsHqEnXDCv8K3Di7+vQPa0bF1yx4li5wOZkAHCrGfArNb1G4LXWwevWg5l1taX1dqeg4ASUrwue5No9MPbEtdwalN8xOLvlt0+oDd5hn1v/37X15yNvuR3mtp+19FGSlNwWe8EJKCGsFKHWf++Ogc+rBGiv1NbP79Z7rG4JwkzTrvbpzsEYoqSbxI/9BOsWAxfvv5d3vfOIX/iFX+CF59wit2kaTp8+zSMPPQzAiy++iEwFi8WCtnaLrSUOuRhFEXnh+DBKKYpWk3sf1DiOibOUQRxT+/eq0a33j1kJhHQDaMehc4GqxkqxxpMqiaMIjOgn+iAIeOaZ53j4UYdfT9MBX/ryV8iypFdD+/zvPcFb3vI2nn12JdxQ1zXO2sjtEwYxTdEglSTyqph17RJJQaiwfnK21r11hg76KUFKGqN7g08jnHhK0HLCxDuN4h42WxclURS7dzxbcRo62HVnxi2MQBhB2E8JDopt7CrozPOc/+MXfpGHHn24N5jM0k2ydMK/+Pgvu6/9JBxPa77l3W/lP/3zPwbA3v4Rh0c3e0NtAN1UJNmEfNb5q1V8z3d/N4fzBVeuOP6psQIhA6xU/Kkfcobrm5ublHWFt8tBKcXR4T6XX3qBwL//aRzTVCXZIME07tyzYcrG7iZXrzkvkzAM+Nmf/hk+9/QLhBsOahLrgFBpPvEr/wSAnVOneP/738/zL77Ahz/8A4CDqN64euz8y666OUtoTRTGlF60yMROlGlWLXjDG98IwMWHH6RsG4aboxWJPwqc8p7nCMVxShA5jksSek8w68SJVNgy9gvR0sLp8/fwD/7ePwDgN3/9k04NKy/Jth0x/f57L/LRj34U1ny+n/js57hwz0UeeNCpd37ta1/jqKgxa7iyYlEwt3Pm0wqh3e8ZAhaLOZveC0eKiOvXr7skRG8c5ji7Qaj6RWAUhGht+3nur/3Vv8F0PmN7d6eHAI7HY/I8p1jOePhhNwacPXuWulyg/Jg2mYy5fnmPJAx7BcPTO9scThfotupJ/Wnq+F1F7pIbZ86cYz4/4tTOJp0aj2kahCiIAsG1qy5R0nE0j2cuYH/u6ecYbBiWi4P+vsymNwmCiDhQnNpxQddskdNqw8hz75bLOcJCsVz2AaZQIfv7B2xtbSH8u11VFcLqPjE8Hg6ZTqckgzGx91cKI+mUFT30KE1G6KZB64o0c/vsbLgAYSXWBeNt59EZ+qRwbTRWSPJiD+sH93JZM61rRoNhP6IHgcAYi/VAs2WxwJAiUBznDi61KKbOQNbgDHmBg8OrpFHMMnfj8rwcEAYpgUqJoy7J1TDINkjiEaEXczC68aqZ7twXyylJmLAxjvvraRtFrBIINH1C1wTYoO7XNwHOwzNQIXXuzmky2uJtb95xys3eBN7YglYXNJUPEIRhsZjRaE1QdbSOluX8iDSZUviJJVIBL199uYfWJ+kEXVukVIR+YrZVwyTJKKt8JTKFYHtzi9ZDVo/nM+7Z2kXFST+eXrt2DZlYrKmw3mOtqCpa2xLGHSQ+oKqOqMp2xbm2LUjnqyjpYKNQ1yvRkmE6pGxa9g9ucPW6F4ZQlsVxTpKlVJ4HZ5RhvpizseHOszQlBIJIJBReBdcow6Jc9vBJITWDjXtojSAYuGd19eh5bAxmGVNf84lMY9gcn2bo+6uQhrK2XD26ROo5XpcPr1Lv5WgEl/YdF1QQUDdVZ+dGKEPqsqKqil4BdfP8FrPjHKuXXD9039O15dL0Jm3HuZYNKhJIEZIv3HMfhDFSSy6ce5DZ1PVZQ0UQBgSd/9jWJihHMUo8pLKuG9oG4mzEhnLzRRS48fCV2h+K4Gp7Z5vv+f7vBiCKYgIVoVTYd6bV4laeCKbWyZZCCIRy25feHGznzGm+9Nu/y4X7HuW9r3Mke2deG/bkzjiyLBY5u7u7fOY33bG+8MQXyLZSkFEvEKAbkDI8Qc671fhPCEcKNa1mPvekaJ/NW6mjVFStpmwKyqbrrBJlXZDXZa61dovV3GeSuky3EPLEIvdOAcDJAMH4BbM4USVynJq2378LkE4Y+KngRPVjdQ53Dn5erQkh+qqMRPRiELdWj+5UObpThelO13pimzgZ/KwHgLe2LgCynKyc6TscvwtkmqY50ffWvyeEU1iyxtC+WmC4Jp8N9Mpw3+h+KlaVQn+A22IyI+jlxm9tt57LOn9MndzRHcs/knVZeTiZDFh/LzrSchBKjG1Rnjh9ONsniBLKosX4bFqaxCznc776Zcd3KApn+O1knLtsmjdDrhsaz/9pgGGa9bylomhpTIPQq6CzKAoGwyEgex6d46A5XhtA2TROAl+tFooaS9u0JHFG7VUUWwn3PHCRh1/7OsBNjPecO09bl1x+0QWKs9miVy9t/AJhOEjdIO2zqtq2SBEzny3cZAwMhgPSNKYqmxNqpLDGaQuUH28E2pPfbaud+IGSVHU3obXYQlP5Y4dRRDOrfXG5L0kilSJQUZ/AstYiEX1Q1no+xCgb9MqqSRgRZxlvetM38fLLjvuSDRN++zO/R5Ks4c9ly8UHHqUs3LUcTG8yTicsj1cVkjDIyIuG2Jtlz+dz/vknPkGqFIXnfW6NMg6nB3zgu76boedYPf3006gwAC9igIiJ04T7HnyAOPXBmw1pW4MULUJ1z7ll1loGO+cBePDCfbzjXS/wwlM/w8jPKXmdcTQt2dpxxObHHn8j2WiH3/iNj3F45O7B448/zuHhPj/2F/5LYv9i/MTf+tvkyyVhx9krl9C0bD/wAO/9k3/SbWs0SoYUi4LYq7Xm+YI4WAmbFOUCs7AEKuPIV2iHI6dmOop22Z+6Z6pNQxYvee+73w7A9Utf5OH77+XmzQPSxC2Sblz5PG990zme5av9PX/4QsQf/4738fzTrkJTLGaEgWW8sd3vc3ZDQzsln++T+QC6bgx1rXs13Xw5P6EG27VuXOwM1+uqoW4aPvCB7wIcR/Dg4IAsyxj4ZxXHIcZEnD69S+YXYZubzuy9EwxIwwGjcUpVz9HG7bO9vct8cUQQKRYzP9ciODzcJ/bE+KYpiJOAMFIMfAC0tZlhbEOeFwhf2ZseH/Z9BCAJLrA1Wfake4DRcAJIBoPTffVlPNgEBa03uD59+ix1XTMcGjrGhTWwcWFCUVRIb4q6OTlDXZf9/FHXNRvjLbQVKB/oFgtDEEX9u3e0f0wURSRJ5MxZAd0EJImrrvQ8Jb0gTVOWCxcQKeU4WmEUkXt1NGtaxqMBxjR9orSqnTpq7rmN6SBGG2dc3qnb1a2mKZ0lSOITBHVjkOEQFXrkgoXpbJ8gCPqxLI4HXNtzvN3UC10Y4+aF2o/fxhjGo22iRcLxsbfgmB0zmWySxMP+nsdJiNXZSpRJRQxHE7QR/bay0FhbU5uqNxYPohBjNWHsOJC6tWzt7Lrxsu7U7CTyVEOeL7ATnyixGq0bjA/mpNIsl3OSKFrxxYSTsh9NRP+8WlMzrwzHh24sC8MIa2OqvO7vS0jIfHlIFAdYvxazrWSYZr0hcr7QPHL6Pr4WXO/H5rIs3PQvA0o/7keBIJABuhviRYBpDJENeOwBZ3Hw7Ne/yOk0xrSS7c6c/uCI+zcnlEuPrAgHCCXJ85zdbYc8qNEIIdE+SWqs438bVROnfowoJ0TxMXWpEBteyKxaoJtjKv+MrW6ZjEYkyZilR4nFcczGxpC2bamqZd8XBAbT+D4mlmRZxHg07IFqs4MjpJSUi30mY4eCMcrStjNKX3HLojESiW4blK++itQSxxltIxh7IZskDVFKsLfnkitSBtAqmsb0EvlRMAJtGI/Tfj2jm1Wy8E7tG3KuhBD/ixBiTwjx5bVtHxFCXBFCfMH/9/61v/1FIcSzQoinhBDf/o2Of7fdbXfb3Xa33W132912t91td9vd9v+H9m9Sufo54O8C/+iW7X/LWvs/rG8QQrwO+AHgMeAc8KtCiEesPWEEcVtL0yGvee23dMfAYk7Az1yGzGAJ7wDr8pjTNc7O1sSrAKkIU7ZkowmhV5yzUmCQeMgwy7bBWEmR16jQYyhlgAwcT6EzO1v3eOhaJwu+/tlaixWQpV05sSWQwcqDyVrqunbZmHZdTtRibYvy5XwhLdaykkqWwknBCtFXFGDFiRJ2BXvTZt0cVKLoqiFrUKgT18FtsEApJa02GGn76lVXRXolGNwrtf6Ya5/vBK1br/Ct389bj3NrZaf/vKYodKeK151gekII1vOv69wiB690VaHuGNrzxNaztneqJOnuHNbP85ZzF6b7fV/pEV2JiNvaN6pm3Vo9U51sI6/8fGynaKh1X/l6Vb7YLX/rKre3fq/zXBLCKWJ1ePxmrrnv/gf40f/wh9i74SA5n/7k71JVFZubjviwt9dgraUoltieT6kJhMRajVqTPa7KfOWvglO4PLW9wwc+8AEATp06xU/8xE84yN0tJpudolEShn0GW6+9Q1GaUJmW2g9d4+1N3vmeb+nVNJ/8wpPs7e0hsVy411VDtrZ2qOv2REX98OgArOV45rPgArZ3NnjsjRf7TOvNvX1efvlKr3oGDkao7QpyrLV2ZtXCYDo/tShy/ly2YTD23h6t4IFHHuH8PS7r+MQTT/DY6x/nm77pm7j0/Av+9w546aWXaI3m+nUnd2+wJFm25lvmIELrFdrp8T5vfuOjDFLFhfMuWzifzzl7ftJXGZ51N5Kf56f5eX76tj7UteOjm7dty6nJT3x2Gc9/yj/mn/KPX/FYf+D2bcDPwE0O+01LVlCwX+cyv86/gJ+EJ72a3pP8K/hZ+Ot8ZHWcv+r+t86yAdjneT7Cf/Zv/7xvbR+Bp9k/sek3+PJtu33sFz/Fx/jUqx7q1GbLSy/tIVUNyl+RFdR508PKjOkMqG0vO954v8FABCw9z6Rt4X3f9u288c1vAeCll15iNBr5Sq5XdoycLcGZM2f6vueqHg5RAhAnEcNhxtHhIdtbTp0sDAJe85pHuXTpEoe+SjPDZZ3TgZvrp7Mjzpw9zSKv+spHEKUIGSID0fMgq8pdVzZwcJ+vfu0F3nvh1AmpZeX5TkXe9BWLNFOUyxrtq+BNXSCkPWGFEscxTd2wWBSMx26Mq8qW+Tyn9siVMHQUhzBO+6pGUdSIqmY0iPt7Mp/PWSx0rzZnW0VdtSeU1hzMctnbl1Sl7mXkO86VFAFSBFT1yp+y1TVZFmN9taAxAqVSYqXIPFQR4RSZx5MYdKe6J7G66XlE8/kSqQxVU9J5J9blnDgaIIWDvIFDiVy+fI3h0Pt6WktZF9RVy8xzVkfjAVWrOJhe68egsnSV3/4ZpCnmmnD8NF+ZH4xHbq4QLYF0zzRJMtI0RreeD1iL3lcp8tWX+bICo8myjEB6SXyboII1ZIdVjDbvddLvdaeUafu5I+mWg8rQ1hWeHYKQjvdelEsiv64Nk5KgLNBtiRduZrC9SdM0FJ6DFUio6oCrV47oVlBZNuBoUQOm7wtGN46i4p9nUzZgQFrF4XV3LKUjtjc2OZ6WKK+WjW3RdU0Sd8qYAhUq4tGAbtm+EY4pyyXCWz/MFiXDgUQEMR210MqAgxsB2XAHYxb+WTUoVhYOKgiYTedoUTutAhwcfblwfbiDjAaBoaxyku7ahGGZz1FhTF113qkZQgoabbjh4YST0SZFXTIYuerkIN2krktEW/YVL6Mt9bJBUxL7Nfpyf0lRFL1q62icsqwWBGHUKwNOJltYJbm2t9fHJaH6vwkLtNb+phDi4jfaz7fvBn7BWlsBLwghngXeDvz2q31JG8PSk8ScKa9dwXe6Ew0CrC5W2+zJBaAEEMbfcP/iRc6XyQqD8rh20YAIZAfTRhGjJARRTJZ6TLmUIFrqciUw0C2YTC/Tbfp9uwHRGFcWz9KVt0cQOGf3TrYzkNLBnFTUu4WvfI3sWjAVIIUg8d4m3eJ5fVG/euFF70Dt+EereyuEwAqw+mSgcrvAg7zt2Er+mwVSbqJdBUm3BjfSP6tatyf2ubVJi5NnFycX7Le29e2BkGBtXw4/ca4d56gLCm/9+x04U+ueHev79cGntUghMYITQhCvBB/sg65X+31/nmYtcODWIGcdfnoHcY5bITruK3cOlO4UEL3Sd1/pc7dtPRDtuW/WBzK2wFKvDKaDgKIouPe+h3nXu94FwOy44dd+7dd68qzzXOuMI917dfbcOY72D0CKFZfBQ3C7ffKi4LE3vJ6//F9/pOfsTCYTvvU7v53P/vZnen5DkkSc2tnlOS/pbK0lVgEaS2fsZX0CZDAYoTxMZjld8IXffYJnnnPE2IcefYStU7u8/g2P9QFXFg1ZzPPeFgLgh3/4h9k5vdMnRMqy5N57H+DixYs97HEwGPDf/OWP8OxTT5P6AT9fOLuITv5eCEGoHIm583w6Ol5w/vx5/tJ/+xf78UW3lq3dUz006zv/xIeJIufb89jrHP8njmMnIb+xwU/91E8B8Mlf/1UkKyEc0wYk4ZiAlNaPeVUJO9v3cOXyDT796d8CYDQa8+73vKO/ljAeEIcJf+Mn/novmNPqgFY4ft5/9Z87mfO//w9+nunBPn/37/xtAK5eedmNr4QYL397+swpvu07vpOj6RzpYdxBFGJMS7vwAbspmUxG7B/sIToeRghRFHDj+nXe9w7nS/biS8+h6yVveasTIRGmYbacUtAgvKDFmVOnybJhL8AwGg9YLo85f88ZEi+ccuXyZbaGWxSmBu9hc7xcMhyM2Ry5RdqymhEHFqRlPJr456eoGnMiYM7LJcsyJ/XmleONFK0b6kpQ5d7HT0EQSWcz4q0T4zgkL5f9c5EokiilbnRv4RsnAabV/Okf+ghd+5m/9xe8v9lqkVbpHGuC3nKjmrS8ePkKrRFIz/dtyiVNo9dks1fzVh+0BA4uZo3h3D0X3fWMtzBWslh6ifVyyZlTu1RV0Y91nQCGECuz4bKsCcPVAtp5NCqwAcJbEwgpCGRClo5oxp0w0wylYuQacbQqa6SMmM3dOQQqIorHLItZDzvKvUz7+QuOUP/CpZd4d3OOjTW4ZFsZbABV3vTwxdnh1M3jHuKMMQSBpFisuC+mce9uGmf9ghlhyRfzXrxqWSy8gTy9zUpd14xGI+YzD5vTkjQMaVqN8CT7OI1oyhIlA5SHlt68sUc6SJF0Y26IFAFC1GRpx9+WNFVLEg57c+WiKmnrkM75szOJDoOExdz7R2UxwoTMjkrCyI83dcVgkJIk3cJ4RBw5kYZuzpjlR+RLw3iUcXTszXHTiGyUUbWrdZ8Uktoe45GtqEhQtgu0XGC6eUa5/5TycMZmipSSmprKBzvzfc1wmCHskLJwiZPBMEIfVmgfSCtCJJClY1rVJQ1a0jCgulqxMXKQMdM6sY8eCh9ltI2hqhrOnHHy3lp7fk6S9F5UsRwSGNvz1Q0SKwKG6cZq/o0sm8MYhEZ7S4OqgiRTDCbuOMu8Jt7Y4MbeDOkFQuq6xtSW1poegh8FCWES9lYa2hqCsKExAaNNZ3zfasHSVNg4I/EJCDVomM0WhL4Pu1Rzi2mr3mJoKVvyfMHmpktsnD1zjqODPdJgxHgQ+D58nUFUE+oFZeOe8e5gjLGS2gfe2Iq6qbFNCz64K5YtYTRisrEDQcfju0GSpZQ+cjMIWh0zK6oeonq0f4OiaAjChHnhEoRNFVE3iu1td5518SKtzkniqE/wZllGnIQURUHiI9owDNGm7RMSx7bENIY0CTjydhc3p9cIgpiyXK4Zp/8/53P154QQPwj8HvDnrbVHwHngd9b2uey3vWqT0k2IAFiLChVSRP1itG1brJF9lO52O6nY5o8ECKzH2dcYGl2TjBLovG/qxmHzbRc5S5q2RNikf4HatkWJCCODnmDuojnDOrtFeZ5G5wmktXaeM2Z94a6oqxoVrI5d5UviSPRqKHGsPJdC094iQtAFc0IIhLEIVoFMd4MkAr8euC1o0muLM9lfX806n6M//toi2xhzW5XHWuuCilsqQuuu6uACEH9/KYWrAAAgAElEQVTH3LGEAAuhXAWAwgcxt1emBKLjx/AKgcEtnCEhBMEdjiXkyYX/+jm/Uru1crXiqZ2sir1SVW39nLrjrf/9VStxovPdWr/g9X/6zKMVdwymbmt+sdkd4tZqrxBdoPuNjyUEfea63yZtr9rUFe+kAm08B0IIAhH1E45GEAQReVnxwssuAHrnu97Fpz71qd5zzfnnOHXQLijb29tjYzQhjIMe057nObpdI/kawwc/+EGu7d/k2Ku2tQI+9OHv40Mf/j4688g4cJ5v1664bNeP/diPMZ0eIaViPHZZToUgDELaqu5V9+bTY47mBxweusl6erDDm97yVv7lx3+pV5L6wLd+O9PplLYxPTH9Pe/9Y6gwYOaz8CoMKPKaL37la/25b4xG/Ad/9kf4az/+V7h5wykrpUnkzLS9EIawzthzMBhR+EBq+/Q5/vJf+e/IRkPKm64SNBqPOD7KuX7VVcoGgwGzo0Ostdz0uH6sJQpCbh5N+dM/6PwFP/TdH2Q0zPipn3LBzpee/BpCKIpygfQJnnx5zLPPPMXW9pAwrPzvtVRFzleefBKAe+8b8eD9D6EQvbBIbeZYE/f8OIDPfvaXkKLlW7/9cXeeyeMEgeLgxh4q8Fn4yYjj4yfZykLOnXXTyI29Fzl1docwdsHjYtGyuytYLhS7u6fcc2+cEImQuwy8ktTZe+8nwCCCblFYk2ykRJunGBl3j4/LFiEbfOKTpjkiHimu7F0lCb0iXZywN52RDMDbpLAZJSijoOnQAjFSaZo25OBwxSlBKLRpsLpLzAmUjVhWXhmwUZR57YJEH+yEYUh+XNMYTRK5Oey4UmijejQGumVZLhFGkMUuOC/yuk9EdG2wMWGxWPSCKMfzY1pVE6gBlecNUQqeeeF5kIraB50SiVKGyidAw8BXcYqyF+ewQlIWOX/0W7+NsV+YvvDiFRZ53fNOARcQGttznpuqJgpClJBrJsIVMlD9u94nMgPFYuHu1alTp1guCna3T1F5L6YkyimqmiTI/OeUoqhQUiLofOZKhEzJl9O+ihGGHarEj1OlYDQ4xf7e51Y3r62p64bJcIulF3gQpmBjlPXfm06PCLOMU9vDfq1SVZWv8Fdk6YqjMQ6DVfC4MfZB6kr8Y7S5iTGCqvT8nNQld9s2JvECOuONjMU8p9FunQEwTEdYaXu1QtP4qr1MqD0/B9OSRgll3fbVCSFb8mJJGHTqxJrFYk4URWT+HZLGGRQLYzBN5+NlmE5nRJ74PxgMWSwWiFBT+0W2NhatXdWmM3hu25aABOG5MFEQUZslG6NzvY+XtY1fi417NIRKGpQY9lwqTU3TNJzaPNvfTysFYaQwTcnWhrtXi+WRU2n0Jr/H0ymT8QaL4yWqdfvEAZiqYZQlVLlTvD2eLymqsk8iNGFA6D28vvTM7wFw7syuV92rqDyXyMaCJExovMjN4cGCsmgYDiYMfeJrYzRm0Ryws7NF4TmySbqNDEKkf7dNG1JdO+TG3lHvBVnXrkq1zpFVYUTdNGhf2UkHCVBRFJANXHA1W1iixBI1Ca1fGxjVsDkIaH0iLFSWxXJOlqwUIbUoCYKG2leunr58g3yxT11eJ4ldJJwkGbujTXZ3zvaJvsVigZQB0cip8pVlwQNveJCrVy7TGX9HWUitNVWpib2H1fapjEgpirYTV2mo6hYVC4Sfw5atItkaYKyCyB3/5s0blK3l4NApD4/SIUEyRgjIJm6cms1mlKVga+K89QBqXDI39++aFBGtXbr+5BP92rpAPAhXXPZQvXr49AcNrv5H4Mdxq7QfB34S+OHfzwGEED8K/CjAZHOb0MuL0pdgbV86lNISBYZ6zdzVrRNlv9BeX6iG3mwtVhFNXqKMJLCuY0zSDcrWEvmOYgJQQYQKRJ/t7ciKUiR0PymD1kl8+5e6rRtftTL9wBaoyE1KddUvkFtvTtqXEsOQw8NDr7jjgx3Zyc2b3hwPXKlesDIyXMHy/Dmtwfm64A3bnlz4r1VO+ntl3T3tzvtOTQrJuihE9/uBECsJdrpgQ/fKTt229Weyvu3Wf68Hf65CtIIvrl/z6rdur+hwSwB2G7zQRbu3BUSvJJCxHkh1wdZtFgDf4N99sCo6c16BNXcWBFnHAd6pqtRJTHe/oPyxXy04Xt//1utbr9i9UhVx/TyFECcqtN32IFiDu3q4nZMjdhn8tjEIBFHUDVoWMERRQlG4CfTChQt88Lu+i4997GMA7O7uUuQ5jdXU7arSOZ0dUzZlP5mAE1johGP++Hd+B29885t4+oVLpH4SMtZy4+AAKWWfKOiUMTc33Dl+5K/9OMVswY1rV/hf/+HPu/srJUpKsE4+G2Dv6IA3vOF1HOw76dksy7jy/PMc39jnjIcqDQYDvvSlL1HXNQ88/JB/BpLnXniJNHO/5x2D2Tp1uhfVEAjiOORv/uTf4R/93M8C8Ku/8glUZHsIoLASQcB0WqB8tvlP/eCP8uLlI67ceLqH10hx5FRTfWb36HiBlMYFwmJV9TsqZoSh4tqem4hCJbiQnuXxt77Z3duNA06f2WWxmDPxGfwwyIiTgNFGwLd+4DtdHxA7LJZHPP4WJ64wHuwQxyn7s9+h8oajW1tnMNVVjqbH/bN77WNDtNXE0abvS5K6KHn0tffSwQoaXSODiCjK+uruufsmIJq+6n/6XIKua7Jss4dit9YQBBFNa5mWL7rz2txENLJXX9wZ7aItLEwDhTvP/Pg6w8EGykvsjuIBra44PY477R9AocWMykhk5RaiaSwJIo1k5vtlgGksWdLQQXmKpkGbBiUkmRfxqEsHy6oqL4RRLYhChzKYzt29OnVqF6UEsYE4c1Uwqyyz5ZyJXyg2lWJZgoxVb7w5qFaCMF2bLeZEUbAiZFs3lgyyhKWvCI1GI/b3D2k1hD6g1E1FEKxMr9vGrKnkevjrwTFve9vbSJMBv/GbDkK5Md5hON5iPnOL7OEo5ejoiI2NDYqle283Njcd5DVS/cJbCEFVlH0WvkVgbcHGxoDSK8IZHZANnIDAZMMF1fv7N4lUgPWLsiiNyPMSLWqGHp8VJ5Yin5FlI5bH7nrKpRtTrF9vlDkcHl5hkKykltM4I1SCJE77+y5UjW6doTvA7v0PMBpuOGicTyTWQe2r8YI48qqQoU/k+ve/rmuEECi1NvdIATboPxfl3FWbB1v9+FstWpYzzWAw7OXgq7am1suVQIJ2lcGq1n0grLVmNjtGhUkfzIWJs0JouqSztEgM8+N5319UIMBYiqJAG3esLEuIooC6cUHvYm+KDAOCVlL7+6J1TRBEzKZ7fXWybVsEqodCL5YFVVsxF8fotkMLhRgDUdT06IDhYAMlDvtAUSmBsJKDpkJ0iqxhyOyoptZNb88SBJLFYsFw0CUcBHvHRwgd9VYTaRZTVYVX3fNiakmCqTVB7CuK0zmlbVAqJLLue9evXidOArIsI/e2BuVecUJoBBmwyJccFwf9WiIQEhmkfOWpryN8JScJh66y6/fZnJzi2o3naRuzssTAIj1dIQw6GfKWKIpIow4m3xAklqee+3pvTSBsQL6YoWSEtl5oQwXUVeGEkQARBAxGE1A4RUsgYZtRJnshpfFoF90CokF4IZOqELRYruUG/6gI5ARdVxTHPikTxlz5/AvEKA6nLkm5fXqEClryRUHmg6udyQRky8hXzicTgZAODdJ6KO/k9D0siyV5vQDj5qdTZyccHC+4uOUtVcKW5aJGiTGNF6vJBltoU7F/7RoTjzQQQiBVgM38+6hrGty4VHvLprbVKKEI19Bctnn1pPQfyETYWnvDWqutS3n/zzjoH8AV4MLarvf4bXc6xk9ba99qrX3rwC8M7ra77W672+62u+1uu9vutrvtbrvb/r/a/kCVKyHEWWvtNf/xe6Bnz/4z4KNCiL+JE7R4GPjX3+h41lqWvrTfQbHWSeFKBVjhpINVh2X2sLU+m84q217VLiu2NT4LQUIcZR4KB63QSGsxdERxg/Xl+C5LFIQjkDlluQBf8VIqcpKcnjdkhUEFijCMqbx3gjEG3eZYo1A9DNtgrUB1FQxaDvb3kbgsfnfu4DJDq+KDRLGCEThIQOsT32v3QEnPeerlIu5Qjejwzx5WJgM6EY31Z+D+30HyLILb5XaNMZhmBXVTSiGQfYHMPQPpBUBWFaeOS9e1tm17TG933liFEE7+tLtm7Eku2HqlBFwhwKJ9ActXWqTF2Lb3eRLWYIX02PNbqzv6xLG1bnsIyXrFbCUQ4ouArKRmgyCgbW+XZhdCoPvqqwDZwQnXoIJKrvhVpiNz67WCo6saNUb3+xlhQNHLhKJXz8L0PkkrX6r1/rJezwqV4/m1axXQ7jha6z5TZkzrTHa9D1QHVbXSov39k0GAvMVoOI7csxO95LzEWkPbVgyGLuN27eCQD3zPhzg4dNm13/jVX3dwwiAg8jCoqs4RYpM4LFC+kiuEYLmc8p53vxeA//jP/iAvvniZOIqwa/1aKcm6fYEUgjBJWHjoQroxYnNnk8fe/EZ++Vd/A4Ab119G0WB1SOTxyvPjBU8/9SJ/5Jv/iPs8P+bGjWu85g0P8453vAOA3/n0Z1ksp2gs7/pmx/UpKqjrgLx0Y1LT1FQIqrzqDXTL/JgiX3C8P+09gu65eJG6vNlDo8JgiJSQphbpB5df+LmfYDTOOH1uROj9cBAB586dI/X8ChV4ARsjCPz3JlspStbMj0vOnna8gcEwYrp/iXe8xUG63vaO70K3AcN00kvG54uFg5oJw76HXkRJzs7WKaxw6cqqnBOEMcfzOYmHqM1nV6jLGWm2qgQsFnMntz/3UCUbMEg3qCpL7g00pY2YbA7RhSZQbiy0tsDaiDj2HLai4fyZi9RNyXLpK0dJhjWSKEvZ2nCy6svccTNE6449L2YEMiRVimTbZUwf3nyIum5QPgPeGs1oOCFJEmbej2tzcxtltp2QiMf/z+dzrh0c9rCdSEOelwyyEXHiUQ2hpJjPiOOE0nNDiqpgI9nA+AJTGEZsjAekUczuxD2H0cYEMFy+/jKzuXtHlMyIZMhy4frUaDSg1S1ogap9NcsWyOBkRXowSNEtNLW7lioHGVhy9vHTIxFnmc9qpLTowO/XOj/iOPIE8MIZestAsX/ooDWPPf4G7r34CJ/4xC/x4INO+vna1eskgcV4mBcm5Pj4mNOnTzP3fb9tW8qyJImzvooiUH2FA6A42CeMA+qyII1dP6+qgjiICaVCJt38MGA4bFjOvAiEVaRhRZm3JLEXnSg1oZaoOGbhSWyDoa/IFa56MFvs8fWvzPjmd17szyENA4p8wcZg0g+h08MCrW1vvyBtyLPPPEOSplQeDjbIhlStg/wVi67i3zBfznrj1izLyP8v9t4k1r4kz+/6RMSZh3vvG///f2ZWVmV1dbfb3UUP7kaYhbHALJCwkFnBFmEww8o7Vl71BiFZmB2WNxaoJQsQEkhgJttYbYPb2O7uclcPNWXlf3z/N9zpzHEiWEScuO9lFjayG6mRMjb/fDffu/fcOHEifsN3aI/UZRU60LNO0POB0XsLDXpCipimuw/EfyUNUZGw6/ZYD7OamBi1IV88pqKIPC9IlA1nnYgNmB1C2HCG9YeONM0YlX+OW0lRKeSUOb4bYOYOo10HIvb7fNuM3N/tQ7e5KAqmVoOxePoY2sDESFlWAcI5zxNREvH23WcAVFVNhGIYR5JigSE37PcDWVaSeE5Xp7eM/YDyuMB5kCRxQSw0feeef2M043RATycBtK7rOByP1PXi1RYzDZo4TtGzl58vYrruQJrUxNJ3J4mYdM+i92BMTFEUCAnSN6VSkTFry8NDF/aAs+uCvh947yHjSkGcxcRxHGIspRRCJghlQfquiW2Iyzqcx5NoGGfluo/9Ikef0bQdUZQg/SRfbi6ZppnJrylrEso0ZpoO/K2/4zwI+/GBPN0QJ4o08Z3cNKXv+9ABK4uCOEpJ0zSsDSULoligohNyJVIJVscI75clcHoCKMPF9dr/nsXKU+w0jgOxUsyj4OJjZzpvrUXPI1ltaFu3Nl7uR7p+ZPLoBK01WZbQt2OI49vmO+wP91xcnDMNrgsmmRHShnO1bRwd5+HhN+kHt1c+e/4VBDNZljD6OFapmLnv2Zy5c6CQKWWaOGEVfz+3uwO90cxjzLZ1c3x2cRKg+lHjH5tcCSF+BfjjwKUQ4iXw54A/LoT4Odw28wPg3/UT9Q+FEH8F+C0ckOI/sP8YpUCAsir4pX/WwVG6rqPve2cG6GENXdfR9a0z3+NkhKmU8gEiHi5kwDozRnAH6jBuyYsoYGbHYSbJFPPsdalEgjUzQqlAMJ3nyZk7SoHxrfvFdDMKZsAGqWLsfCKBT7NTNZuMwfqHZZ6t24w85KFtW7r+i0Q4J6JhgvKQtRopoi/A0R6LWiyvPVavOwW3y7/Oc+NpTvGUD/Q0ETt9jpDmxKnxr9nPBdDzPJ+gb5wSEYtTLQyvz3Pg7AhpQbgkTATxCkAsSaA5vTcyzK+7Zgd7lOqpqqBLIAlzIATh2oWIWDhmXzDffSJKYZ8k9Uti5z5v+Z0Ftjoj1eK1Y0HoR0mEAl8kkOSP5hNnGB04c14Vc/k7tfhnKRKfyGjtvm+iknBdeu7ceveYB6MMWk+oKHnEkxMsGoCLT5UxLnBZvvHs3y8W8glZ/THXDFwyniRJKBgs0NX/6N//M/y+jX/96Y/DF35hUXHbPXn1f/fqZ8u//1TjT57+80dRVV9x4L/hu194/Vs/QpntL/IX/H/9hS/8v3+y0fzIV9/T8D0+r7z3679Pn/n/3fjxT/4Q49RzCPC353TtgFSGeu322Crf0HYH5tmSxAuUTZEkWeAW2RqnLJWlKOVgc3Gc0LVemWx2h6rQhiyTDMb9Xb0qEXIkUekJ0mQVSZphlyKJkTw0B3bvXgbu3XZoA3m9e/DeYlYzK8PRk/Pn7uA4X23LMHrhjalzarOJZXt/WsMP+t7xBoBp7HnY7RBCcX3heATNwwPjNFGuzilx8Jf3t28xOg5Qt+32lrJcM3SCSXt/Mzsye3jcMuQ4c7m+YNI+UI23kK/ByvC8N+2Ww/6OoqgZjp5zJSJkpOmD6JTjPk7TxC94k+SPPv46f/Wv/g9Y68QEAEY9UlRlUAEc+omz9Tl2PhV89OgEb/Q8BvW8RXBlSbCqVY3WI0VRBK8tbSRVXqHn03lhzcxmcx58p6y1JEnCNGp2OzfnVVXRHHaUSR2En8z4NAz6hV/4Ov/Jf/zn+am/9OfCa6OeQUXcP+xIvE/ZoT+yWm3Qxpudt4I0XYO1ZD6p7vojs4Y4KkF4nhIzm/UKK1wwNwwddXnNMAy8eeuAPtdXzxHKEi3CFEowm4mkSAJEbt91FPmGSAlaX+BNIkWc5QGqKOeJ/dij0iTMkx56jJi4v7/n7MzBcl2MPjN6IZU0XiGmmUgojFl4Us74fBw0s//ORZm714LKaAyxU0w8+mAZGVHXNQ+7lmqRxRMxwzQifNFkfxxhdLx67Z+Z/XGkaWZ2D9sQv7VdwzDO4Vwty4Jpeknf96w3LjF9/vyaadgwzS2bzaLolrHZXAdfrbquT+JVfiGcrWu22y1tM5DEPsmdLIUoMYvf4ThT1IU3eHb3ZrVaeSioDOuzH0YsNhQRAaq6RikVYJbGGGSkaFpN6ikpds64v+2o68ULr+ftq5Y4kRy9IMr5xQv2exDSBB6msBPzNIT3rjbOI+14HHj50hmnl+uOh91nCLJHcZf13MzFl83x3q21obC3WV0gRYqS7nksigKpDJg5wCeNgSIpHeTXF4/m2UFIK6/CWVUrQBKnMtxPt64046hPP88Wa2Mmb6w86ZnjXYfWc+BqqvSKTXLpfCp9sUEq0NNAXLu47HKtyPKYqw+/GgqEU9MxTT1JmnH/4OYzjl1ip30yF8cKHSuUKEIR4dkmZdADt/c7XhRuTc3tPzq1+X+jFvhv/oiX/9I/4vd/Gfjlf9z7Ph5t0/Lb3/pdwN24PM9ZFRuuz13VMY6dWouxY7gpx66l7/uQ7U59R9eNTOOI8DdcJTHHw8zNuwc+ufJGY7pFSad6AyCimWFwvKFFicRYjZSSOI/RnqQ8jQ5jrv3nCxwxPcuygG9WsfTJxakiNM+zVz46JQz7/f6Jwt4i0vB5TpAUArFwqbwOlFNSfMpZcupyS9fk6dwK+bkXOHUePj9cx2W5JsM8T0/U85a8RJ5yHc9JenTN4fNO3bPwPZecQuC7WIbHSaBEYrFBLVAId+g/TvAW0YtgbiyXz3mMcF2k/E+ft7xf2ETsIn//lLdkHxnvCgkqOiXz7vu535umGeVJp1LiuUanx2kROTl1f5ZE9tTRkzJyXTf/N9bMxLnrrmnfIY2i5bpPXcUsSQN3CDxHKEmecKyEEMQ2hUcY4ZkZKR8pCxqLsKBt/0gYQgee2bLW48iJEcxywcIrfvnP/2cuwPTcqVglrgqlNTI+caWmST8h1ndd54oWj+YDG3PY+ipulvF7v/ttfu3//FUeHpwM6tD3TLrh4w8/CQWXu7uOn/+Fb/K97zmloA++8oJnH5b88Hde8uy5C0zj2EmXl2VOXi6CGY77cueFKbI8JkudYflm4w6Bw/7OB0CXvN8uwc45eZ4Gkv9v/Pq3KYqKPI+pai/rnkc8e3bN2Xkd5IT7vnVY+PTU9avrmvXq/JGQyUzbtsgoDRwyy8zF+Vc5NI7jdTg4OeNh2rNIHK9X12wfOrAj67U3xzROcXHpuCmluL9/4PrimuO08/N5pKwKpIixs1tb63XNMAwki2XF6FSRZiPJUhdoRLFT57vfbll5krIiZne4QeAO3izNqeqcrh3Z+O6Ligz7XePUF4OwwMgqr8mDsIFm6HuEsCE4f7g/sK5zRttTebnkIosZhil0MKQUHJs9Yz+EZEMIxaQ0sZTB/DNfOblrG/vzw3dv0gzw50V/30BGOGOGwck+l3Ea+KlinrFa0+12J3notqUsc6wnhRdJQhonNKPh6PlGcSJIVML2fheEU6LImXWOPiFKkgwzzyRJwmdvXUCUZClpXnK86yhTd51n5xfsdls6X40VMnXBnLB0o1tTW/1Anj+trI5S8PZwQ+kNrQcRY7qOdf0CfxsYDh2HfU+1UiSJr54zI0VGlLrv60QxFD/3s38kJJ1/46/9rwxdwyc/9lMLlYisLNg3R84uL8IarqsXWD2HzqoxxpkvixN/M0mSJxYA0+TO42ma0L7r3x+PVFWFnXWIAaIIHu73LDSDN2/ecHZeszmrgujMEPcQzUyjovBKZz/4nitO/Jl/708B8Cv/xX/NH/5DP8mLy/Mwd3mi0Hp2icXk9uFNVSCwDKMXqxETUiR0XRM6lljHq0tTsLP7vCzLgrWLm6dF1CFlVbt7tj/usEaxnI9KJeh5QI9j4LAVxYrEFx+VT0Sb7oiUkvoxt0kIhJ2J/MEdV2uSSFHLLKiTTtPkLDEun7k1RcR2v3My8f5e3W970jhCWImUp7PPzoYyP/FXbDQzTYrMc0PXVUzb9iT5aR+UKqbtDZNPpPIqZ5h6mm5k7zk6wzhh9EjTdEHtLZIZxVkSEmNJxsXFBVa0TL4Tocc1Wk/Mk+DudjmLFPdzGwymu649FRSN+y7v494rU0oG6VUNs4TmcAxJ2TwrPv30DVF8KgwjLHmekyQJekGzoFBxxCef/Fi4D/M80/f9o32jox80s65489LtzeNwIM8Ltl4I5+bmgSRKGceGs7MLP+cR6/WZay74zpWeDMLK0NV8uN8jiNmsz/n+9z8F4Op6zTg1ZKlC+/JhVZVoo7G4vauuS1br0henl4RPk2dpaEgoNSKs5wr6kCdNIpSaudm9xfgYahgmsIJ960Bu6S53hZnRkGW+y58kHvkkgopp1w3kZYEH/aAHQxTFCBEFPt6gHU8xjiN6H0skMsaqHOXjJaME3aSY5xxt3J4bF5oqjZn0SK7cB4zjSHmuaBfO5b7l074jjeIgoFHkCSqK2B5UiPeTR/zvHzX+adQCfx+HQHtZ24eHLe/f34bkBE4y6FmWhkAtKwuKonQSjjhyu/CeTP3kApu/8Td/lbouKas8VPSG8cCh2SK8XLQ/G5yTu1f4cd4bA8YYtF4yfJdkBJWjyZEaH3v9DMPgg+wkZOJKuUN9+fw8z9HzyKSH4E3lAj6edIAApnlAxSc1tEVYYenkPBZaWMZjmBuAfaRceBonEY7lfb6oyud8hZ7KlC//noQOrH2aFC6QuB/dTQufePrck5TdCVYXPu+LML5Iqi8KU3xBMv6pUIe7psfQyaeQv8fX/kVI5UlZ7/GIk9N8WGuxGBCntrkSFskJ8hdFS+fsKc1RoDCLBLgQGAuz1uH+LBC9x4exNTMCFdQfZz273zPzI1+kCWlnhDnpW85mBqWCWtny7SwSn8thjMUY6+B0ATo7e2GNBRIrEZFlmHrikDRoJ8gnLfiAfRg1SZLS7P1hOXRe9nQKczx1ljdvvh8qod/61m9ijSHPE37u55xc+93dDVob/ti/8E3axgVC7bHn+qLkX/kTDta2WpfYeSL+l386dP3u3r+hKDMOhwOJDxAurs7puobLy18CnHpQ1zWsNpdYT9Ku6ozdbksar5kmB19I4oz9/sBm7TbpP/Ev/VEsE0LF3G+dMl8a1dR1xeG443ztuihLAhqpRZks5uW7V2yP44nszOy89zpNVXkfn1jxcLdH+APg8vwZdk4o8hzj+3pS5JxtVsy6YeXhLrv9A+tVFQQgrBFUaU6sIkrrDsvNpiSJJF0zkvsOkNkPnBUVdqlSxy6gFsgg0y1MxLPN15HzPXG8PMMx6bk82ViomXHqKS6uQhW173uuL8/o24HM3xurBXroQ/EiTWOSCLK0YpzcXrmuU6yd6NuBxHsujfpIEuchCTVmZnNWMY6ah4cleZzIi5QoEgw++Z+ZkCKl693Z4OwyJtJIIKVXqbMTibeiVKsAACAASURBVLJ0HlaCEowYjs2RNFlke1NUJIiSJOxv1x9cM/ZD8ERMchc8G2ZktHTcDNOsma1m9DC5thvpuo4k878zGVb1GV3f8tGHTqTh2DYcjg+kecXkn+Q3t+9o2geur93vSJu5PTeaEGYhW2+I48/7sBiQA7cPPnFTzzEWvv/D3+GDj926+zt/9x+ijSuQ6NGT4+eIUQ8sPjfPrj/i6uoZ796947PPXOAWRW6vPjt/xrjAieKYtzfv+OhjR8PWWlOWOcPYeVj8SWDGwcYX+enDE9RGmbtO1jibIBgQRRHH45EokuwOLhB+9vyCw74LiIIicz5weZ6FfbFvHfyt7e7Rg3seisqLpXzoEoS/9r/8Kv/Ov/1vsclPm/55kdJ1Gm1BRkuxyxBFEb11a8NBrDrKdRnimXp9SdsdKcuE7c4l9O1+S5FmYT/f7/cOkmRtIN4zzZxt1hz83xg94FCelovNWViLmVJPilerImcYBq6v3f28v793ipNtG+CLTdNitOH68jrsQXWx4ubmhsF3kqMoYpUmWIYANf/w4pK7h5dOBMLHSk278+eWWxtJkmDMhO77oCS3vTcMvSDPK7yytRMvsXGgS8xjTJYUlGnMB9fuvdu2pRs78jwNRc+26UmzU/FqnmesFDRHEWK5cdwyTGNYk8vQegxy23HquttSSpLSPY/7bkYOEUJYtO/6dn1DnpWYBXWjJbGX4l/eK0kSDu3A9vW7MMdWC+Is5YevnXi2lNIVDfqBySfnrhAJ24eBaTyt2eY4sCBp07RgHDQqMSydx1evP3Ww+Tjj4cGfPWlGXdfoya2pSAmmcSLNBNKLTry7ecnmLMfYLggsSRn7gqf7u7upQc+Rl5V3388ayTHpECwCJTNdN6F7HWCQlpnRvuVwODD55z/Lk3DugYuL5nlGd3MQppJSMluLQIV7rA2s12tynziuViuETJFA5IuGdnBKkkNvsX5f6qcerGT00FMVeVTXrIKa5cDMsR98YuZjFwEgXXUGKNYlaT1hZsL5OHY9VljOn10hF+XxH4FAezz+iQQtvhxfji/Hl+PL8eX4cnw5vhxfji/Hl+PL8XT8wehcCZBeOlhFDif5GLIVfIXmE+ltt9sz6FMFPFGKOI6JlOL83FWXV1XFNB348MPn/PQvOiL6ftugzYReHMWPPYfDga7fc/CYVufrMoCViNBNWrpM7qeTxLoKVbE4dp0spRRx8hhOdjLUc+IHmnnWJ3EGz8eRUoXPEUI5R+JFBMI4bs5jIQwpncS7q858roP1pBvz+e5R9CN+d5EPf9z1+X/ycgpANqQUSKInXarQufq8oEIQ1JDhd5bOVeBqWRu8fZbX5COBCWvnJwITy9p4IvsuvtiRiiJPbrWna18ghgvkYPn86JF/QTCM+5xYhdYqdHbMPCOJWZyMrb+uGdB2gaz4ObKnztg8OcxygPcp4btEioVmtsBjokg6SfnH8+C7DFEUOTGU+eQ7pXzX7zGMVFrhAJXqVDUSQmGYg7RtFEWOQiYtkTpVaLIsgzn3P7ckScpsJqaFhyENcRwxjj3Kd7jyVGJMy+WFq/QVxSVds+N47AMsKM0tLz6qsb76+/O/+M+xXq/p2wYlF4GZr5ElZxy6T0mUgwo7cr1FGFedH8Yj1kSIJGL0nIvivKIoCmwSkWTu8/ZtxzBp5reuAzbNB7AR27sHjl50oj6WzFoQpz3N0RH2lY05P3tG5zkJjS/B5mWK7t1+M5qW5jgQRZJ371wFOM9H+n4kz1ylMMuUMxRtx1DlnM2IEJKqPGfyHW+JIIpGJ4sLYCOa1rI5u2LSbp+aNcR5DkYy9a7UWeWFMzH2EDUpJR9/9CFxHNM1mV8/hq7rePH8o8BF2VxecWgaisJ7fc0lWk8UZRTujZkVfTdzsVkzG3cN3bEjjRLUsvZnS52co7Vhf3jnv3NF30Ce1XRH12Xom4712YqbOwd7zKsUMOipp/QQTolhmmaiSDAt0KvIMI0N0nM1hmHg00/vuDh/FqCtUZlQlAkWTRQvfJwCay119dWwzm/Gz9jtG/LCzd35+Rm73T7Aby8uLlzFOUrJvSz31dUzfuc736KoKlIPwbHAyBy6o8exR88jZ5sLPPeZqipomo5qXZP5imzXHqnXRTCl1dpxgjdnZwyj9wiyhqvnz7h/2AcTYRUbLi6vOXpOVBTPpEnB/X1DknlT7YMmj71KxTLsxIfPfoJj682OjwYtRpSeWK1cp+M3v/UKFQu0NewOvoO4SljXV2SR60ToSfHt3/ouu8NbMi/9HkcJWiecn13zu991EP+yqBmG/lTh1drBevUQ9muh5Aln4Dvqu92OIq+ecEyzLEfqOeyVh8OBsiyfwKyGoaXrOrQ5ndl9P7E/bIMYQBRb4njFp5/+Bt3BzcNP/zNuzf/ed37b/9mKX/ylb/L2/UnoWAjBZnWGtHEQncmylOOxIwvwS0Ne5KR5FrzhxnFEpIJERZytfPBgLGM/MXmOWV0m9M2BalWTeSTAurykHxryxHN9tCZNCyySZnBrw+gDs8np9ek+xyqiSjIGf+8yGTM1A1UWMXjBl/7gOIGxNSHmmWdLHEVYT5fI4gyjHRWjLBaBAkX17AP6VhD5NVx95UOMgb47iTvd3r2hfvYRB38N7x/uSZSlLtJHUM+JssxpvS+UEILD/oC2OnTT2ranXp0xzobbWwcRz7OEhzeviLyPYNs3xEkGRgQ+vI2cIWyep+Hvuq4jSbPAI+qGCSkj8jznuHd7YFmsaXoHN52XMyRKUbMEj3SaRMs4OZ5S1x/9+nS0lfWmfmSE3REnZmFrODpArIkFSC/JX0YFkpLN5iQ1n0UVwzCGmNL5T0mQJlz71fPK24VEYf5c5xeKzO2dq03FrBXDpMP+0jQHolQQiQu0FyBy2gV56OLM1tB1LU1zDPdBRBZrBbud969Ek6bK3bPBrSlhIkgimq4n9p3UfdNizPH03vNEJBXG9IxeUl3JOHStl46XlJKH46fkiXs+4/cOtj+0Q/DCK4rCxV1SEntfO6wEq4JQTJrGDl2mJcPgvcxkxDRriqJiQTjpvg+xO4BVCuutS7qDu84kyZitpekG1OhRRf+/gAVa+4Tgdgqcn3ogPcZhJ2lEmuUBmuWgU5pu6Lj1ONs4ylhVV/zgB5/xykNGsqQkSVM2a7chlnXFhx9+SF1/g7F3wdxf+s//U+g6kiQLMMCuc+pWSzKw8LOkJCxC7b0GHH/rBB90kK7lOp06EsIEbyo7W4Ry/iMLDFFY84Qv5RK5p5wgjCWSCqEeC0N80bNomb/H3KnHaoFf5Cwtf5cEDtnj+/J5UYhIflFV8DG8Lvwdj35eksYA1TtBEIPIxGNOmv/3sQLh4/HkZ5fdIJdE0cL8SM1v+f15nnlMP1s2tIWcC4+ERqZTAuZe0wFbbDEhYXxyLcaixAKD4kQcX76zmJFCBrETrUfSOOPpvZiIVYTAhPRZShWS9DDXxnj/K8+T0BN1VnkIkr8uOSOFDj8rYZnnyW1UuQ/K2oNzKx+nsEmWZYKUhnZ0wcbVJmfUA8PQUVYnrlbbHKmKIhC316sztNYY4xWohjdYOVJvAOGex5nJBcGROxQSkcA8Yu3A4BOEYXTGsPv9QF06SMz7my1WDeT+QNXy1nmU9DMXF+53hr6laR8oiopxXIojmnFowCtxVnUFSNp+4tm1gxhud3dkWeb4IPlCLG652b1kXZ2H+yltzPv9m7BuutGp6U1mIloEGMxIVp58YPrDkbrKvfeMPwQySd8cGKQ8kf/Hhro6I/GKb303kkQJRveUPojv2gFMz9RD6jHsUhoGPQRvqiItuH3v4CqJ5ylVdU7ZDxwOLaXnXKhI8OzqinsvtrApa45Ny3HbhMDi4mzDdnfL4bZjVbv3N8NMJHPssPAkWnrTs1qvifxzPDUTt/st3/j614NfDOOAlYraO/aqWNI0B4oiCby6aZRICZvNWeCs6rkhT0/GrXJSFNGaKEnAcy6SJGEce/q+I/N8OKEUQzdgfWKakCA9tG/B+t/tt5ydn9F5w2cXaPRM80SKm6fvf/oDiihBTaf9qmtb0jjm8HDv57fGTk54Yklem85x7842FwGrLyWMfUvqg/O6WrHd7RFCBOhgkiTcvX7LYd8EnkJVZ8hIMc5eSW4yHJsdUaxoe3/2FSu66XOCFvkF/+Wv/BqxD+6+9o2cCEsUX/PmpVt3b968payuqasVlRduSFTEoAfevHHPvyssxpyfXbEU9Q7tkbOrFdr7IAFU63P0NHB/6/hOH734ACEEg54CVwvj9uVxPMFkx3EkivoAxXTFnfzJHmusZrd/IE1TUm/Oa/uePM8ZPC1ARJLYpEihWK+9ufLQ8mu/9nd59vySekniPdTob//qPwDg449+nGKtEF78BGCUMV0/gTEhWBzHCaNSUrWoBUuaY48RksPR8UymaUTrmTSvOfeCC23fkiQ5aebuu1KKpmmQMmG9conM7e0t0zhz9cxBP+0MbTMwTCPnK7cH3d2/Q6YRVp/g4MMwoYo48LJW64pROrGITeH+7qNnX6HvnTHu4uUVxwllvUZ7L6Mkzpj0wDS0pD7IHbqeKN4wloKsWAqgzsspelREXH3lE6Io4mLt7tU3Pvkq4zjRD9Oj4q3zLTrz+89qtaKZZrTug0nyoTm6hMJYVuXiU5SwKr4WPFBHLdHGcOwf0L7Ac3fXMaWacdZ8/OzDcA/TNOXde1fMEUKw3++J04Ti4ifc2ugGxDwibETu1QldzBEFBdHcJznKCtIFum8keV6ioiQUeBJKhJQIr/aYpInzEet7Cu8+vt/uSOKRLFesvJLqMLREcsb6PUmoiYurnOMuRnmRtDiOQ6G1rs7C9zkcGoQ3/mXOaJqOPKuwXtFvtdqg55bm+J7BX9c0O5Pp3ovzLOfSbAyFfz6GTmONCjFslW8Yhs4pNdtFDdkg7UCeWLSXP83iGCljWl9hWpWVK3qrIhSYljFjiTydQMWOX9n7/WDWFjEP5GUSitW3uztWqwpjZ5rGFfGKvHJcer+/EaWMswAbcfSqtFUWI4Ti2OzC3rzQji4v3LPWtg66GamEyUO94zhGRgmTFUSxu1fJfFK//VHjD0RyZS2h0rMIKDgey1OOTFGccNlO3GEKQg5SOhUjlShyj4ncH3vGaaCuclbP3KbVHI+MeuTuzk3267evnGJKqgLONc9z4qhHCEnXLllw4kz4fNC03+9Jk5y2bcPG5oQpHE9L+tekF1xYAn0lBMZvhktnYNJene1RMiXxxq3+YVlU3JzC3inBWIL9L3CvPpfYPB5KPuVZPU6EFkW/8PeP+Uw+cLfyaSJm7RzyhVMX6SQo8UUBDeH4P1aGh3NRf0QYjF46OcrxgeZHSYR/z6BE+Dm+FIC0CilOComugxPxRNtDgLEKqx7LdruO6WMVRSEt1sqQ2FhvchxHJyl2ZoGSURChWFQHrbCY+WTcGEXK8aP8u2WxcoRjHyimkUXKGa1PFZp5tkzTgWHoyH0Q0fUtZ+sNqU/KDocdUSQp84ws81WjfYtkB7p3yj647rAzl/U/C8NoR6Y+CQFDRE8SA7FkUaiLRcph15BXvkOrJoQ0mLkj8wHQMHak6Yy1DaPvCu8ODre8YMwdzlnxsN2G4sRswGhLdb4on91DkRFFirp2Xaq7ux2RrMmSOfAir64vGKYH4mVByDVRdeELAT4Qj1OGwfNQ/G3OVEqcRx5nDfvDSJxI6rpm5w8YISOUUrQ7zfMzdw2qjrGcxA4aPTDOA8QC6feEyLiCxbqqORxcgLfxAdUcuwuI44RZJKzXVbjOvttTXJwxz4I0d3O8UQI7T0EmuCxclbxpDtx7snNVbkhiSZbA8eACe6UExkC2cR2vpj2Qpgk3798Qe/y77s6IVUxBQuQnIpng+HDHygdSQlryPOHq6pxD4/lO1rI5u2ReT0xeOKHOL92zu3CL4gQlY7KzjPno+bBFzvrZTNNtQ4e0WOVEaUSmvLx301CvzrFMLPqy5xcX3O/uibISsXBfJzh2A5Gfz+sPPnCEdyCrfedof3AdSwFz5+ZzvcnIVAbCcymalvNqg547pHBruBA90WTJ/VkUzZYsyTBRglpM4NOEThvuHx5CNzSO3V6wnGF915GkqeNeepGUOI7J8pIoikNX0diZqqqR3ixzt9tR1xV3dw8nxdtji7Ez67oMMt2CmP3D/Ymja71SrhEM/l7pxJAt8vx+rGr4u3/vv2Ptu3d//E/8q/z2b/1t4uScw9FV+N/efEq9fsH2/gHrORbdQTvZZx/cqdgQK8kw6CBsoLVls9kgIhG4KBjDuqo4ePPo7Ks/xps377i4WAeDcCklYtTAyfh9SaIWqw4lnRrd8XAyRdZa8/BwR5xkYV7yPKM9NiG5s0awfWh48cEzbm6d8E3XTRhjuH62offd5zdvXwPwwx+4wO3FV2sOzQPjdFJ1vDu2TNPAMExE/sw4HHaoqKBvb/x1psRRRjlkQfp5HDrHJzk8MM1uL5lnyzAdqetFLXAgz3PapuPB88eUUhgSXt64feR4PHJ1foEREXvfrXj2/GsusYmiUAQehoFhGCj8upulpThfMeqU3q/hQ9NitSvatb5Dk4mEu7c3zF7p7dnVC7p+5/g1t+67xJGLjbAR9++8xLiMSdMU4bmTgx7JspxuHAP39dAdeLjf8eLFR+GMPhx25GV24tkNDWkSk8an7tYqr7BZgdaaynNf4jhl1BN14ZKmYWzohwPl5qs0rUMZxGcKyCmrLJzRRVEwTQM/841P3NK0mvvtljRNUR4BEqcJd3cPjKPm6O+DiGLiKOXoCy6TkeSba9q25ar0gidSMWsXN2iPPMjEQJqkSM+926xyICeOz+k6915X65S2bTkeOir/XpkUGKsZBy9yUuQk9FysizBXfd+665YuCXbzEvP88oJp3vv7knH9TDL0M6OPp7quY9KKqi7A80qjWVKVFaNXcTTGBGPn5V5VuWGc+tA5s6YnS3IMltksMvYRkVWhCeKuKUFrTeELaOOgYbbMUpOmi7WGUyWUQgaRMj3MxDLBH/VYq706ssYsBtMyZegtxoAefPFeDxh74qa3osVaS5YlxLm7L0etKfOKYTKU50vhe0IpRVK6/e7sakO/PTAMQ0jij+09m/qKm9sHtO/uyvzznNan40vO1Zfjy/Hl+HJ8Ob4cX44vx5fjy/Hl+HL8Pow/EJ0rKQVxsnRMdIB/BRiXsBgzYx6ZwCohkdYE2XVrZ4wSCGOQwhspjgek9Mas2vNTZE2aSoSvGidZijGaJFUMcumwOAWjKLJB8UrrCSFOlRalHFzssVR5HKc+0xVPfIPmefa4MFfpGYaBh4c7Um/0N1uLFBEqjgIEL4pjlIyJxdIBW8yCT+2XRUHOmFO2/vnxed7ack3utQUqdZJEXzpXizrh56F0j9UIT2a5Tz9vwdCeYISnzoX718mrL5yn5e8c5E8hgumtRQpLlJ04WLMvawepad8yE/bUuVr8rE5QQdcZ1JN+Aj+N49hXSZd77MxyH6svjaOT/FwqREIIxnFknk7QQRUrskKdpGC1q8ROU896lYX5zPOUrmkQvgKcF6mX5fd8gMSvOwXZ0qXqWmKp0eZA7ytQq/qSrt8GCMdxf+Di4oz9YaD1lfqb928p64ymaQIUI45SkthV4gDSKGHoBmQx8u7WVbwCrCHOiPzvHY6uMxBP3h9ncJ5zMhYBs7+oIXZ9s4juUBQFXdtTe95Q1zm7hLPNVbg3bd9Q1xn3W8eBuji7pGk6bu9uubx2v5OWhiieyXIF0QI/qek12HiRcK/pGo1lDl4jKo6QWUwa5cGPQ48T9ToLnjpRvKOqNozDzPW1NwzctjzcvncKW76TdJZdMRtBP7mK5vlFydAZ+nbgwsug7+KZNHYGtqsXvqplNF07sFii1eWGWVh2uy1LO03PPcZoptGSLDA9ZRm7kdqvn35oUZElzzKEr3jn2ZrZDChhAwwRDHW9DuqPeVYTR5IXL14Eha26rrHWYdUXmG2SC5qbW6zfk0Yzs+80r9/ecTg4Raq8rOi7kSyPGCdXzc6lMzdeoGaX5x+i5563hzuGwc3d/dvfI7Ypw9iS5J6LMjmT3ZX3QOmnntFK8qxkd3zw12CQkeTt3RsWyyZDh54MfevW3bubO7JixTAMVMXSRZnouo48Lxk9jOTVd16RJTFV5dbiqizZHR7Ikoy09Lzd5ILtwwNiWcCJYhaGadKhczXZGZlEXD6/9nwISJIYrTUvnj/367xx8M5cAK4KfLaxyCj1vodu7yqKjGHsGb0C3uVmhbGa4sUzBm9y/eGza7bbO7a7W2YPrej7B9arDffeTuDiMkdrQ9dOfP2TjwBo2onx1Ohxrx23/Ok/8yf5s//hXwTgz6o/zc9/849h5AP/x99yZq5TF6POetq5I1/gL6sY1Ij1ktV9ozHAMO3DfjHPhovzr7HbPQS+8WwmkiQl9hzWpulwSrUxTePWT1mWjFaTpymokxrZPM/hDG36Hcnmio8//phPP3XqhGkaY4wCSeBhG5Trdnk0y+asJs9WxFEcrlMKyx/+6Z/k3dtbssy9/3Im7H134id+7CNkokN3BmB33HFot1gjEN49Ni8zbm7eUnoo3zhPmMgwz0Pgw6IK6qpERZb98OCvIaI8W3O/c3uelJKx75zMulfBrPI1m/MLhtZ3WueJ/bD3svXurW93D8jZdQsWpMOxbTDC0Pael4Xj3Lx6c8Q3l7i6WGPnkSxOglVIc3igKle01iN6bt5RroQ3A/fxlNSMXUMURWQeMjYNlmHSHJtFtS5FRSnjqNl7vmqdF3z04kMiFYd7Ko1EGEvf+Hs3aeJIYQ3cb90cqEiQFxFmngJVYGhnkrXkoXHrNSLGGuV8IDN3H6q0YjYt8xQz+DN52HbOT8l3Yw2WyMRYTYB+zd1MqRKqKuPDK8dBHEan6HnlYdaTWYWu8vs7b+eR5ggh/Dpc1PoM2lgu/H6T5AWbzYbd7oE6WygjA5uyRlwr9KTCWowTzeBVI/vBulNiNsQ+DqrzFZMeiWOF8UquWIPWM0XhrtsyMYxbhsGE83Cc9yAytg8jyiNH8ixlHE+wUsdPF8yzPj3H2kngX527Oej7DmOdj1ki3HdJoxJlYEYH9IwQgiTJgjqxEs42oBtEiN8kAqFdfFRUPh6eF5XCR6bFUUTbdQFqjoCpHzFYEo880H2PiE4xqx6djYMZWhc7ALqb2fcHymLN4PlUcRQxjTPf+fYPADhbnVOuE4QVRJF7rj54fs4wDHz87EWIkQ+HH+09uYw/EMmVIBhUAyrAv5Q8LbhIJVghg8q2UgqjTl4/CIG0EKkI5SdSejJgnBdhc13kxYWHcMTCYm1EJGNs5GFdNnJwsBkij7WdhYf0zMsNl+jJJVfLwjTGYI34nOmtccmH594gIqyc+Plv/hFefOwW65s3O7Se2B8fGPpFjvZI17RMenHJlkFEIxwUUpJGjjwr7YlQqj0O2v2SwM5e7pbF7+ipAMQyh3GsmPzDGqcSYxVGa6R8lKSYOMBYjBmIkwlj65B0RcrJmCZxjvbQoUiOWHqULMI1LYISdl5gfQozGZSIGD1nR0qFwaIX6dJxRglHdI885GCcZ2YzIqVi9thihWW2bRBNiKKEqWuoswg8ljqOY7p+i7VuA3AXMaISSazcAfPwcEdRpcRxyuFw62fAUOYJ3XBH6je7vm/p+pMR3zAOXJ6dM0SD5/eANjPjJJEiRnjT0K6LuLt7GQJTvXe+aXFU8GrvDqtD84aqXCNUHlrwohlp2zbctzRNGfWO2Vi6xr12fnFFnAiKLGfyQZk7ABqk5wi8374jiiS1vSb1GOg0U0xac2z23mfFwbqkkjT9splIVAR1VdO27trzImUcZqqiBuvnXcxcnNUB/59mCVVVMowTuTdJbBr3jF1feJhHbzhfVWzqVSg0KFKOg+OsLLLA+/0NZVmHpKlpGupqzfHYcHXlsNPTNDFNI7MeghytsBOvX73h2QsXhG6qK7quY54bJhbX+py6VkTRNa9fugArTzPGqWNV+sA4EuT5BTbRlPXiRrohzSraboeSLmh4937kK19b0fgkVIuR++0tz58/x1gP4ZpSitx5kBwbF5Ck8TlTdvQiNxBFKcO4xdgkmDh/9vaHqKQnz8+DuIIxhveH2zAvWV6ipeTTz17x0UfuO7/a3tG2DXleUBYuMTy+u0cIMD7ANLNFmyNRFPHuxvFHvvGNT0gSi9EzuQ/m+v4IOiXzkMPP3v2QobeuqLXQanTKQXdkeYTyh9VZvQKTsw8+hYY4NUxW0PlCmG47tAZrp1DwECJGxi3rS092VhuyrOLNmzfceljn9bMzVGTZHo4n/qQqWF1chkLKYTKoes1kct4f3d/N+oGu61it3HvfH3qkFAzDEOA+cRw7+JGNgyCJGifquuZ7r91aOT8/Zz921HkR+GoWi7WG/X5H4pOWVy9vubo659aLn0TZHhVB1/ZI4UnovWG7u6MsFM2wmNAqEIrV2vuIyQoSwWWdMngYi8Yg86dSwXoSfOPHf5yv/6QLwP7yX/4f+VP/xr/IxfU53/nB33fXfnGBmRuGY0/reRnYCNTsSOs4GK+KXAFx9ImGkBlnl+f84OUPgz0DVhFHGbM/w477Oy4uLri/eRe8qKZ+II5jBjkjfaCGlQz9FGCWiSp48/o9//1/+z9x5YNeIxz8erNZIX2CMOoV2jREvjB1ez8iLdw8fJerK3fW5iJl+/AWBEGopV98zbTbt8pzweub97z1JqMAN/sdBsHLly+5vPa+XTe3ZFnG7r07G/qhpVqVvL25RSpfwIpT4jgmTiI+unSB9vbhDvv+PohXxLGi7zuXoF647/fmbotOVYg3Xt/ecH/MuL5+HtZ0s29Ik5oohhUL79sltMYsRYyJJDHYaU/vOeXrjz7AzrnjAHnu8+rqgpvb9wE6XJ2VgIGon81q5wAAIABJREFUQ6Zufrdbxzu3Fu627jtnWU5Z1Ei59ksl4a7rqcuKysPLXt3e0L7u+eCDTTBlLVfnHI/vQmG2XNWMY0OWO18ycEnzOEz0/RjOaCs0/XFm9knTvrknTXOa5sBq7eXFsRTFirYbufDcej2MlNk6QNukVAzTzG63Y/IJV16W9ENHnifBNHhztmK1WrH1MG9Fiooj4sSy8Zy8IjkniiRdcTJz79qBaY5Icre/dUNH04LmgThxz61RDWM3YYwNfpFSRIydDebq1bpCzxPdQdF7GPDVtTv3ongOcMVx2hOphId3TpQljWKSJKUqYlarBR6cM+qRaS1PfngPDUVVuGYB0DYWZsE4TejOW5OsY5qmofEiRsZAEmWsswTtxVTSWGBGQfzYUscqurYj8TFXFCmytCJLUs7P3bPQD95PcbYhru373gnD+QNEzxPCQp7k1F5cZZom9DR4URQP4RSCtjsGGLKeRlIpuTi7IPXnVVQrlMxcE8RTpswckaWKsW78dcYIYrTRiCU+nTUK4RoFPj6t1ycfvB81/kAkVwhJ4lVODIu63hxuuIgcB0LMJz4MaCIlgua8Uo6cPE1T6AxEqWSaR6c+tZRtrEU94i2pyBkWKqmQYbNLkSphGubAlVo4PYvykRCCOI3o+57RexTEcRy4UctwHTAVKpFxpGjblr/+1/8mf/Sf/wUA2k6wOV/z8UefIFiCCEuSRsFrYBgGjzN/oPWKW8YYDocDwzCelOuwIE8a/lEUEcUpyp46R7E3ZFwWc5aqQBhfSO9mdgqMmvbkr4R0RsKet+ASvhRBh5CLd4PAGIG2GtRSsVAIVoGEPnWjT6ymUzcrUkgZcej2rPw1RF49bwn8s3VBNwwYTBADuD5PGccJSx/MjderM/a7HiEXxa339H2HSFWYl6E3zLIhSRWt39jyvESbiaZ119kOHYO2VHXKoffKZ2lFe7REUnA8eO5SJOibXZjDPC/ZH9+DmWlalyRN00xRVGzWl3R+DRkjOb84CxuUHiPGYUSaiVXuDvBNdU7TNKyqS6xdKsItebY+3RffLZymCenVu9I0Zb9riOI0qDZVVUWWFuHAruuKJIpph5564z7v/v6GNE25uj4LKo3uMDdcXDuDyd22xYg9u/0xGMVa22HnhLJOOLSOw7A7aM5WHwWFPaVS8mLD7u6WxHeXVxvLPAusD0LTPEKlKYqMZuEtneWcyZyiqrh577gTm6Lg8uIyVJ23egs9fPX6RajiKiUxVnN+vgkV7zhWfP0rz4N/TF2dY9ZH7m9TVpsF5H1Oc+y5ON9gtfvOZxcJ9/db8nLhMm2pioTJRMyeV2foGKcWKyR3917UY1Mzzyld5/lrSpBmZ7x8fRc4LLOZkPIOPffhOapKw/vb12zW7vO7xmDlHsExJMuRKojjmLuHHwYu2qwN4zSEjnuWJSiZ0fea737/NwBv0joamu6WTz/7TTef6yvmWXB75yrC5SrHMhDJmo+/5oQ+3r7/IQhNmlT0O/f9Ls4+5tDuuNm+9nMuWNWXDP3I3Lj7fn72jFVywe3d28Cjnc2Mno6BI2CNJDIJb2++H7p1729vESImL0vwSVma1ChVcPQH6mZdc2hHzq+es9ST9s2Bs7M1eblhNG691KucZux9xxBAgBAcm0PwCLu7fY/WI+r9omRniSLJOHRM3kR0s9k4Y9NW8n677LGCu/t3gYeWRhl5XnJ+dhkQGb/xG7/O8+cf8vFXvsZv/8bfAeDNm3d88OJjWu2uaRrhww8+YZ4nJu2uW8+DN7BPqWMflJmRT3mP9vKBcRxT1RtW9TkvX7m/6zvNh195evj/X7/+W1w/2/Bzf+RnAPgrv/Jf8T//b3+VX/zFX+Anf+KnAfjX/uTPIqQTl1i4aNM0sW9u6T3/+Pb2nua4ZdR94MBOM3z3936H5jiGAHZqJ7p9G/b4YZhoup5xHCk8X62qKsqyROuRonbB6uFwcHv5YgCdOzWzb//utzm7dMWAb/7MN7m5fReEngASNdIcdl4JDJQz3iMRCb1XVhwHQTvODP0UTJKXiEL4eGPfHfn+p0fe3yxrBb77vdeu8ysFO+/b17aO+/LYgP3VqzsmM6Pk6OduS6IkKhJ873vOXyySMdM0B2W5KHLfOUkSzO+650hiSD/9LMQ7WmsilZC/2dJ7ZdA4ShH2FVWVIexJVfDy/FnYb8bJnc3nmxfBk+i3X71391edYoL29Wuc8pP7Lut1zz/49b/H1dUVl94zS+uRZJw5Ho/0njcUxzFDPzL57sTUawTOpyj2B/IwH9nvBt7tKqTvRnzzp38GlSZ0XuxIKLisn/Gwuw8JXpoo5tlQFwKlFhVVhYlS8nzj70HpO/WGzu8JcazQ84iKDMIXfePEIQQ2Zy7ZstZSWcHFeXVSkowiRKRouhOPfjaWw9SEhG8c3Lz1w0SRr/w1bCmjkmp9GdZsstIM0w6M453W5/Bwt+X64mMi5ZWqDzWTcqbyyzlmmZ0SsPSiTGPrxKM2YzAf3+/3qDghzRSlF1iKkwuOh5ZLn+QmcUzb9F5p2N3Ttu/YnF9w7FrK3l3Dxov69OOyv+VEUcTDXYPwxdzrLCOqCurKvfeh7Zz4B4o0W/n1UnPsBw7bY7hOp4hd0zZezADJpDtmoWhu3/nXDEVR0XYTnS9En51dIBIR7ovwioLzPDPuT4lUlWfoYaDwjYuyLNFJzgc/9bNu7oaB4/GIVCfxLykShmFCJgZtFjPgEmsF0eCTQJWDNF6Mzl2lQBHHkYubl5zjc36qnx9/IJIrIQxx6hV3rACcpOLSCVB4M0M5BBMxcFoGS8tR69l3kSJiDxWzo0biKo+JD9CliDDWBiXCeWqcaIRI0NNJwjVJEqw5tVSFEF757CS2sEDGFnUbrTXGGwsuNzNJEoahJyi2efW9rmmD/ObN2wNv377GCB26QpEURLEMKmBFmbGuV1xfXlN8vJA7T5Lak68y9sPA4XAIwcd+v2caRrp+CIeeMI4AvSQjQgkPkXsqOS5l5BbIIn43W+Dk5j2PIFWGNRah3fxGSmHkhLEjelpgHb7q67uAdV1SlRnrTcnh6Frr/XRkpqO+gGl2FeA0Ety9e8v1c2dC2TQvEcpSlRm7t66a/nbrgr1hGGh8d2nT1mBVuHdRJHn95jX5YaYq1/6+Z6RZxO3dfTBAPewHEDpUyeqyous6DtuJqzNHAh+GgVVZ0By3JL5zZaxGoii8eIHWM0bPxEnKurrw6yVmGCa6rqXy7e/DwUH7lmSgbXqquqAocu5vd/4eK+pVwrF5S5W7QHvsnJT20onY7XYYaSnLmrZzScN+u0NGit1uG4Qw8jyH+aQ4uVpVvHr1ijhPiXzVuC7PyJIU+SgZb9uezWbF5LsvZaxo+5KzUrDyUKw829BkHZvqgqvSHShppnj1+nv8xCcbP+cxRnd85TxD+AT96voabBqKA0IaHh7uqKo10le3D4ctm+srkIL24Ob8k08+oW1btBdSuD7/Km9evyKPKnrfibi+vqbpeoYmYlN96O+N4eLsApm54GD7cOSHn72kqA2jl2bWc0vTHOhsjkzce/3wsx2zbXl945K0Ij6j6w1JCvvGPSCb9Tnvbt6w3mwYB7+GVopXL9+HTt2uuaXTt2H/AUfKNmaiKGpWPnm7231GkisOw3fdNVnBcARrMlJPztVTTzc4GX7j4Tyj7pn0xP7o3r+caqpacHv3OnxeVa0ws+LZs2fhHj/stkgpKOpHKk5WImXEqzfuuTo7P2O3v6Eb9kReiOIw7TBxTIwLBtI05uHQUpY52u/Nt7uGPOlJ09o9Y0Ccag6HAxp3gEcqJ441SVyfJHnTmKIoeHf7hvdbr/IlM+JE0Pv9rRkG9vu9gwX6uev6Ay9vBMdDeypqJYZx1Ezz0sWNiFTONO/C83e2uWDQPdZ3WuZhZpqcKEuwJbjdYe3MOAiMh589e3ZNbwzSnzEjgofbO37wgx+Erv/V1RVSSj794ffZbNx+s1rVfP97L1HlIu5i+da3v4WKI5LoZOBtrWW3O1DGwXGdobesvWpckiS07WviXAUzbq0V33n593k8vvPpG/5v9t40drckr+/7VNXZz7P+l7t139vdM0PPMI09GJxFOC/yAsuWoiQCycJRQiIQwYwxM0wgtrCRTTIZEbZgljGDY0RMUBIcBydBEUogxMYJCbYFwzKM6Zmhu+/+X5/t7HVOVV5UPee5lwHnDS8m0i2pX9x////Pc86pOlW/5bs8fFocBEOOTolj+Gf/9Df4lf/7lwFnep3EGcZqotAr6uYp08mS5ZG77sVixvHxnCzLDsp5vSaOUwJ5gKgZBFEUueQYBxN2XfaWzZVbU0qFTqyiqMaiT9tquq6ja936KaKCNE15/xuv85u/+Un/PI/RQ4+VgsFXl7MoJ45OCfdnvSppuxptJcYXQFvdY5EkWYbdq+DuDbd9YLwtNl6Y4/COrjbX/l4PMPLj42PX/dH7GEESxzGZCh15H8a1ZAdDuLdZsQOJDBk8tLXoGrI0pSm2Y8eZVFFW9VjMtdaSxCn90FCVe2SHxErFphYjzDkJEx48vRrnQMiBtqt4850HIwIkCBLCIHWQqX08I5zJrWEvMOXipk1nePPhIz8vDlrnCjb7TkfDdD4bC4tKKZSQPNmsR3uINFO89NIrmChns3Fn+z/+9U9y8/TGKEyjNppPFg8pq82YDNw8uUMUKrq2IIm8Ququp+t6jo/duVrXNdG6J45jtD/vl8uMutEENh1tRwCWiznnHkp7dnbGNMtRKiCfuvm8fPSE2XRKHCXsfBEmT1JOsskIqZaBBmtYLo7HJHdxtMQYGHo5Or3IoiHCUpXuHK9Nw3y+pNptqD08NE4mZBNF3a6Jo72A1UCeZww+6NK6o9c1s/yEhYfXh0qy25UkCTQ+tkqSKXGU48Vz6TvNfDpB64Nd0SRPmc2XlGU5qtca62Dr4K1CvHz5q3dvEPpig272sYibg5PjBVJFCBESx4cYtCp3xK+8a9xf+r5zqJ+tO2uzLCPLY5p6GKF/fVdjjLNYmnl4rTFOwXB9tbffEKggoO01crSRUTRdg1IC6Z/d6uoajGV17eYuTVPSJKPvDb1P6k+P5wRhzGZ7RenNhqtdCSrA+H2gbhpsWIKxhMG+Y6pp644gkEifX5wcT/gXjReCFi/Gi/FivBgvxovxYrwYL8aL8WK8GH8E4wuic4UVvpuCd+m1OLvcgwT3MAwMSiDsodq0r9yAg8j1g2vlpb4zEAQRre7Isgna7mVrjasm+oqi6SJHmFPpSFYXBB7zKTH1vpIjvXiF+5iua0fhhP3YC0A8K5rQe5nHfYU4CB0kUUhDlrsK22I5ASXp+obQCycMWmPsQYDh+rri0aNH7E2B9yOJEoQQzPK9HG1KnKXcOHJdjnt3XhnFGNQentGtaZqGonDVi81mQ9N07HYlVb1nQQsPNwsJfRtUKtcdPHCuAiSSztZoL3Yw0DOwIo1j5lMHJzi+e8yNWzlR4KoHu3LH537vd7n/YMtq7WAsWu+QQrNYTlmcvApAsV2x2T7is52rdh0vlvQWujVMM1cXeHp+QRtds92uqL0JbJzeY+jlyImIYsu9u3dp9Ype+ypnGlMVBWkUM/EiAjYOkUKNbV9jnGyntZbaVwvjOKIuC2bZYpQKjuOQ7XZL7eEiYRAzzedorSm2HvLTN6RpyND1DI37/EhNGPR2rFIn6ZTttuX66pzp1FWN0yRnvbrAgc5cpezlW7fR3UCxc/OXqMTBUbUh9K902ewIgojbJ7dZrVylTPWCy4s1Jyeus2Q7CAbBPMgIfNd2Pj2m2JbEcUzvZXo/8Pr7iaKI0leguq5nmrQEKiL0laTN1TXHR7cIlcX6DmWeHDF9z9FYTU/ChMG0nJ895OVbrhNYd65TXfkqrh40ab5AqpTEQ4WbVrDdFjStZrN27+j1VeE6V75DvFhIrJFs6s3ISdg9KJnNFjx8+ojGQz+NkPzuW59jW7lqbBAEFEVFlp1y/+LX3HVGCwZTU1XFyCVMUvdu72G7UZrQ9AUynNB5f46HT95ByJ63H11wvHRdxav1FUZozi9d56hpSsLYSebvRSfCIPX8xhjdunuuqjWBCBy8Fgd7nk6O6VpBp8/Ha+8Hy64oCb1/lECBEGSZe9ezPMEMguXR7bGi+Df+8n/DX//uP4fWA/2+4xxogtDSNd5sMcuRynWXEm+A7CwjMmZH2Thf1+sL5vNjzq/uu79LJwxdzNWq4OTUVSLXm0uyNCSOJoSpF8wYSrTUKM9N6QfY7S5IwozY1/zW6y322iKUGUVZLp48IknDA++k0WzXO5qmpW3e8nOlEIFGqYDzpx4aPLRIGYyGmlXVYK1gskix1r235xdur953AZqmQhhLnERjpVUJidYhaZpSVq6i+/TyyvOD3O9kSeA6AFkKviO86xXFdsPZ2Rknx074Io0T1CSl9FVyYzWzaYLWPamXnh56C1Zz8913CL3tQZan6K4aeRlKWWanC4qdpvE8iaarD6Q3P1QmqOqa2dRd5+S4pS8kR8dThPSwUtNiBosx4XjPV9cbnp49RL61h/fVCKGQIhy7OMYYkiQhjSfjvhsnieuWe9GUOI7JJlNOTk44WXq41DTleHmD4yMznmuvv66cp6T/Qd81KKX44tffy0c/+lG3frqO9773i9mVBY3nwwlTsVptKMs9aiSgLDeU2x3BnlcLGNVBHxB46Kzx92l8V9NYy2arUc+UnnXvxDiMMSPHq155+JivistAoVuN1tuxc2Rxnc8wCQ92LCoEG4xrOs2ndF3HyY3F2KnarUvyfM7c8zqKokIYgW0EoRcW6TuDDRt0Lwj8tXZVQ1O3o1CECiVSGQQK4ecqiTO6rqdpalIvPjCZhJih2/s4EwQR08mcYbAjkqNtpBNlqiu0l8TOJinWDgR7aw3dI7ylSpB4CLAI+J03P43RAXFy4It/7v4ZQeSenW4lTd8ShGYUjPjnb505wSpb0vk5tjomzi3T6V78YKAsnSXInqM7neYI27O6OFh+pKnzFpR+zru6IYoioiAkiP3f5RMKA7q94q5HhVRdR10WZPvv6zrAUlX1iLZYXZfsdgVRmBH4iUhzgRIzVObmM5THzpxYt+hm6edBIQPJxcXFKMsfRclzwmXzKMdYS0M8xmFaa6aTkNky8zYMsN2uUYFEepuAgZY8naKHgiTex2uG6+uHzJdLpHb3U5Qrsixj6qHRQ18zdIY4TNDeUqXPMvquQ3iee5bmDBoCAiJvxaDb1nlhCcts6TpsRjtZ9pNXjsbrjuOERWrQXhAsihcEUniTbHedUljatuHd92677+97x703jH6VFoGK9ubE7rOak9x74XqYru7ZbDbOBiNxnc6y2rAt1kzyhMDDT+/cvIGx/ej9FQYJvZiDsPSNp1BMUtrGiXzkE/c+rK/8Af6HjC+I5MqxrPb4Z+Ohc4ekRUmBwBIrOZqnOetHOyrnSCFQQhBH4dhqLpsdhgErDLFvafbGGQGOHkyhZTADSg3EXnFLSEeqC8NkVEypKxdo7BX29lhWBw10n5WmqVM1NGZsbfe9fk65rus6hsF91mhC2bVjcDT4TWuw5jn+llQBWXaEtQcVpf3/11qz3rpgZ7Vx+vx7fyxr9op+gtQHNtPpnCgKRrPVOy+feFx7MH5f27YU5TV1rbm6cslNVzcOP+uxuGFsOLt4wBBdMPdQsCBSPHj7t/myL/2XmaXe40e/zc/+D7/Al73x5QA0Xcum2rLZbTk59SZ4OuVkPiWydm+vRC4y3vW+L6X03jhl2xCrEKWC0cvk9vJV9LBlnqZjEtn3PTIRFIV3pN8YovCI4+Vt+r1AiFLkseL2zTt85s3fA+DGzYy6btmuvcrRZEkUS3RfoK0LZHSrKLY1TbVzfhGA7hR1VY7+ELrrMUNLkhz8R+J4TlU1pBN18BLrOybxBOmFOBI5kORHRKFitXLk2OlsRjxNQdhRgeo3/9k/4QMf+BPMM2+8GzpVwEDG1D6C/mPvfYN33n6A6gU3F6f+fia8due1Mak2tue1e3eZpslokihlwCu37nJ5eYmaHTbJAVgunFDEk6f3mUwiBgO7rVf9ymY8fvIOiI7Z0iX2D88v0bpF+Xeo1878++rqjPtP3dw4EnE1PpOqqskmCVW9HTkCxkASuIBrT67eVFuur69HWNLT80fcuHEDo+UIQy3LgqvNlqapRiEMlIMdjAaQTLh7awEmxDSehxk16K5lOg05u3RBymx+A2MMc08OljJAGHj0+D4nR/fcvLNFEdJrS12755JkU9bbB0z9s0zTIwZTUjcVYuQfNaiwp2quRngvYkvTaaYzb9bbhxhbI5SAfk8YTujakjg5CPbUdcN0MRshwGXdEoiQ2XQxksDd/eQU5YrO/yzL3KE+8ep9q+2OobfkkzkDHnLYDSBDVtc7VLDnmVrW60vyifc70wVNf02rO373My7Zmea3ePjwEkvPyYkn7D99SJIuubh08N6joyMG09D1Bz+3sugJZEQQCKeShoN1V1XBznO+7r99xsnpgqYrkfsEgYC+0SgZkU69oIRxJGblvZ/CxNLpmrrsR4+u7Xrl15Pn7AYhg+mJjKTYueeplPO021UlnQ8QdFkzmcwOqnO7jkhJrDKjeEXVbambAiEETy9dIpokCW1XIL35qQojrncOJnN+7d5/y8ByfsT1o+sRthKoBKMNSerV+3SFtYbZfELh/ZWMhGiEEbqxKyp0HWOMm888n7Ha1CQppIkLgLblNWVV8hX/6ldy4Q1Xy+Ka2TTjU5/+bcDB4aIkpSm7Mbma5hMGo2l1SbVfe42FNeN5BQeF2T3/tqmdx1MQRGNQrZRiPltifVI2y3Imk4zpLOX2TZeYfvLXfo3Hj59ydHTCF733PQDMj27xRV88GcVPiqLg/OKCi4srzr3oRFkVxCohi2cjN3vPT4rCvVljR1NX454IEEYJm82GKEpIfdK6Xl+RpumYhDpIvYMGjklnP5AkCUNvx3OsbTfk2ZTOV1fiOHUFvM1mDM5lZCDoCHyBaRKE47PTvkA4dIY4yp0fjy+ctIMmFIzJXZxGbg9WMXnmzqeibmiHjuPFfBQW0G3JfHaM9QHVo0dP6HRHmuZYv877tqE2mqLYUnmPsDh2vlT7fTgMQ6Q1aN0eRGgkxLGgs9DqA1crjqeYvfpvKrGNBdGN4jF6aAmCACUbjF8v0/mcrt9weXUQG9Gtpo/r8f178rQiCCWT6ZzBF8esFWyK3YEaISxxFLkkvvbmw9saK1YkScLveCikYWC+mI3qlqpuuXnz1IkW7RX2hogsy2i7s7Hoo0VKUayJI7efvnT7hK49Y5JMUV46duglgRVEUTqqGvb9lvl8ytoLWl1cXDCd5djI8PSJi8OMMdw8fZXtthqhnu7VaQgDt9+cHr9KP3REoSLwBVARWqaT2xhjCE3l5yYiiXMmkRcbiyPSowmRTEbBJdtVCCUxPiavm45omqLCaCzwRJOYE3kDrB6fi25a74Hqiw/K0A8tXXOYKxFm6EEydAO154H1fefi0cm+UNQRxzHNbkej90XumOvLC5eweepMHMc+Bve8tzwlm2ZIDnSX+XHOohw8dcervcYJXVvRD16EJpqyqUvm03xU6xz6lpdOb9G2DVHsPisa/n/AuQI7YlWVCpzc6TPGttJKpIBOMxIGgyDw5rA+YFCgjUEKwdyT49nVbM9XpEFC6z9rEBIVBRReEGEiHE7W6B5f+HCO58IbFvvyVRRFNE09bhgH9cHDtTdN45Ovw7UHQUjT1M+QXi1JElOVNcZXvNtmQEmBlYLGcwSUNK4y5QOwMAyputap/vnF27W97y45BUJw8tN5mmB8deugZHh42mW35vy64q0Hjs+hvMS9EILcd3HyPGc6P2KxzLl9213D0dERm/U5v/br/7u7d7Pj5ddC8v6O767A7eMjXv8TX0ZdtDz53D8F4ANf+iV86au38RQldkXJ+dNH7JqK7cZhoO/dvIlNM3e/fiK2ZcFVtaXs3EaQpDl1vWU+ndF6crWIBF3XIGUwmg83bcNsnnD33rsAOHu6Zpo72Wnt8fmLxSmDrlldVGSRS/BMFyJMxe2bLgBM4gnGGFYrzdHUcXa224I7N25RtpcUW6/oM5kQhiG9n7s0SbheX3J6eoz1gfDDx48JVMLp8Ymvgro1lwY5pe94SdlyvDwmzxKOvDBFGgZMTo6dAaCvqA8v3aKrCjLv6j5JI7JIcevWHVb+eXZdx707t0nTdExEJ5OJS7DNobK73lwTJTFPHjuCaZRmaAybZjeuddsK9GDYbT8DQJaHXD4JQDbsvGz20AsWRzFVYVkXrhtp0SR5OHZ/jQlptOT0zk0ePHTcpbK6dNeVuHspS0scJtT1iq5zCZjuLDbNWe22zL3suWFgMp2O7//8+ISL1ZrjI0OS7PmNkr5v+I+/7W/x7PjY9/w76NCtqVZrql3EZBpy++UvAqDWVxwtFpRFz41b/v0P5uh2S1W557Sc36GuKhYLS+llgQkGlMp492vvhsCthevrK1p9hah9UYYJvTa01UFQptGaTAb0fUMUufdYN5o4vEnfel6fEQShoW1qPvpXf+65+/nO7/5q1n7ep4uc9eZ8VAEUwkkerzZPWcwOpoddI0iSBdutm6vj5R3+ow/+CH/Q+Nbv+rMApElGFi84v3QcGHdhA1EgSScuQDm/KJnkS/6r7/25P/Cz/sy/9zrgDrBi147Fqvv33+G3fvHyD/ybf+XffPfYXa7rgsVygvJJ9nF0jNY11gikf566GwijgLbvRiNcITJ0X9PpQ5I0mYbUZc/EV42zvPem8J5H1FWAoO3tKC9sraUdtohBHALDOGJXXI3/zqIUM0SEQtL7gAErmaZL2rYl8e92XVcoG+MdD2gb6FpLnASc3rzhn3nEk8cXLKYnYwe4LFqqqqHw/Mo0C9Fa83ufe8CsKcqfAAAgAElEQVTEV6ClhG3zvFpguWsYOk3tub4WZ6Q+m03Q/Z57VzCbLZjOl/z2p37df5Zhs9kh8MUrrZDCdTcuLtz7cOvWDay1PHrzjKlHUTgTTjMG+hhLljkz1DB2c3WyjAgiRT/o58SqiupiTFrWl4ET9tAtR0eHjtdn3/znlE3JL//j/83NqTzmpZduj4T6KPKm6qI/dB6tpN40bNqLUX11L85wde7u5eim49jsTXABdoXjx/Z1O66pMBTU9Y7OHASRTDdQVdtxLVhrGVZOvGKviKz7lqZajWfzphvGfTrN3GJIkhlNV3N5fek/OwUMYRgekDHWUrU1IJCe/zedT6iqhk3hFSi7gDxPSeKY2gsnIS157pKyneento2mqFZjd6TrneJwEB3sZ3ZliwoEy+UJx3P3/hdFgVKKpnXPsLeWQAwoAkzrC8pNTRCHZFlE00h/fzHWMPLltrtLJtGS1fYg7x+FIQYNoh/PMBUUKBGM53/XdU5B2f8HIGJFmkXUtSYM99yw0p3R/b4daWibBmuCUcq/a3uCKGboD2rKUsFqtRm7YoNVPN49ZDYNRxsYqwemWpJPI/Y+JHpX0JsSXbp18PC8om1r6spgfPU4jY/QHc8lon2nOT4+pvVJhLWW/mpNW1yOXf7pdMqDR2dYNL320u+hoa5a5l7g4rd/83OAQciDnVCe5zTeDHjuhaiCUFBVa1Jv6pskMcJcIMShsJ8kEXmWjv/O0wzZAaYhS7zAU6lRtgAC2sC9D1keMbQt291BMGS7W6HCjMxz3y+3pZNnL5tx3rMs4+l6B8WBWzidptggo99zGXVHEEdoMzD4eS+1pq/rsVuZxwHt0GHNgVtYbxvmi2MGzFg0bHVH1ztjZLdWWqbTmO3mijjI/Dow2KFBBXbs6KX5Mf+i8QWRXCkh2YtFCmCgQ0kLwsue0yMEHMfDQVrXuk3xsAASEA52p2u3sfzJL58ymX05XfcQG7oN39qQpobIy3Sb2h0wURhR+aqK7gzZNKHTA3ZwE9D3wpEm9UGVJ45jlArGa3Ju5eEorgHugHEv9d6R3qm6NdVuhCEkgUC3JXEeIb0sdygD132yB08qawYCKVFje9jQtU48YUy4dAvY56qF1joFwf11Dn3D8fEpfe+mXxiLxHlYicF1bS6ePODyYsfb9z/F48efBeDoaMHx0RF57hbc8dENVk8qntYtp17F6eJa09YlYRhz+qpLSH730X0my1OuVy5YvlqtMcNA37WEPlle7a7J0ogkCKn85tPJzst/e2ETMzCbTbj0FVWA3WqH1jVJ6mBTALoP6PspZ97dfrW95vz8nOkkZjpxh/Ojh+dcXzplvH3HSSnF46dPmc32kpwBx0c3KHY1V16W9+TkiKvVNclEIfYqNdmCpmwIPel2db0iCELeeecB77rn4G9CFNw4jTH9Fcq4tfje196g7SraibvOahOTKkUWxrz6khPxWK2vEUBdFpzcdBvifDFlPp8f4H4yYBgE5xePx6A3zBIePX5KWe1oag+T25XIIHom8XdKkp975z4nXuK4rmvuP34bsFyt3CYyWyyBwMFpgPVOM5kuqRtIFy6QqZsdHRHT4xmmdYG+7kLoEhLfLZCxoai3PLr/cOwkD+qa8801YeX+3TQdw+aCOI5JfMKlh4KyviRNU84vXZext5bF/IjGewSFYUwURRTlZiSiW+ve2R/7xEdGcZNv/fCPkkZL0j0MYhjAxmxWO2T4JgBZcuyk2buezAtRXK9+DxBMPESurNYYSrq2RviOqTWKOIG37n9qtAFYbd8hn0gQXiGq2DhobWiwfn+LooBQTTDhwdZBklPVKyIPWanrGqtyjIj5lu/41wD4ke/+PwGIY0uSuvkr65auNRjjqpyvvPIKbaVpasHV6vDe6N4iVcS9u+8F4MPf+P0AfPwnPwLAtjhj6FK+89t+grb2CqlG0dTX6K7An9dcX1/TNgbtS+fbYsMv/f1/yFf++fePEO6hlxRVyT/5+bf5X3/aPeM//ec/wG59CCb/n198hz/1b72XIGMMNLbrll/9n9/kV3/uc3z5n3WFkiicsNkYKg8BDlQMOLGK2cR9VqdrZNthrR3FDaIwJEujEYrlvOoEWpcj4RoMSoXsEeRaa7IsYxj0AQnAQBjkBKFg7TvcMlKuy+p/JwwjbC8gMqMUXV2XzsYilhivoprNU5QKqdfu+9MoII8D513oCxLr7ZZpNEHXDVXv9om+t0ym6Qij6bqGMAwZesXQeeW8LEHuszY/kiilGxri2O3VdXGJNYpdoSn987RW8MYb/xIPHz9g48WGlsslV9dPR78jcO/NdndN672+Ot0wGNBtTy3b557fGJzvSsq6IY5jygt3Ru+7PJPZdFz7+3N2D5E/Ok1IUqequs+/ri6vWZzkLEVG5bvEUu84e/yUdz7nq/nK+bLN5tmYNJydryAN3XPyojrhXlnTe7AJaUjijDQ7iLso4eDfwzCMypEoydD3oyJk33dESYIYJMYc1Dq7rqfvexofCAsVghj2oBuCJKK3FpSi9cgK07ggcd9JGgan0ucCUE8xCAKEEujuIMK0rpwwSOjVGMMw5Hqz5WrVjEJKzhuzRwjBrlr7ebcMTTdCgKMkJQsCR+737/Hy5JQocnY0+yLlZLFAYrlxy6EV2rZm6LtRvRbAtj1hZGnqjunMrb1dsUYIO9p9pEmIEgG3Tl6m8MluHMduzq1l7uG8bdXSC03kRcSSafp5hXYpBAQS2wl2XvjmaH5EU9UHAY/QvWfbqjyIiMgBKWE6n1Js3LsWIpFI9F7hNzHEcUzdOtgxgO0rqusdwc5SVp6aEEYk8YzQd491J4nUhHxhSUKnvjqbzRCJE28ZCwtWUlU14T7Z6g1aD0TB7VEEydCy2j1iV1yTJ8txLURhTlF67zRlvFy5HruKunBKnQ4ue+XXR8zq6ox+6gsijYsPBGYUZTG9ROt2fIfiOHYNDylHr00JBGHmLHX288BAGIakXj696zrSLAEZjsWqtm1omobJxMVgAFxd0bQVSTQf16aUVwQh5BP3d3me0ltLOj/lxKvLFnVF13UjtHa9uuD4ZMlmUyD9Gj6+tcAisPTI8ICWmS8mrNd7NM0xVvZMZvmoWJjEIb0YqNuKzsftkdpnLX/weCFo8WK8GC/Gi/FivBgvxovxYrwYL8aL8UcwviA6V4IK3f4q4DpCXeewoqMXrzBICXUvxhZnlmX0pqBtPU7TOL3+MApovYxlHifceU9O07+D9u3oWEbEStD5SnY1uKrHJLlJf+26E4PZYK1BWDV6fWSZHPlU4Loc++7QHtIE+zZ1MMq6Syk9z8fzuYSrplfVCin2fhWGxw8esDieoKQXtOgbsiwdJUCLouDWrVt0Xc/uwlVH5vM5YRggpSIWrpu03Tnz3DjbZ9WGpmmcsaCv9phBkUaC3nfXFrMlwlh0VzOZuu/XTU2cbXjj1bsjnlt4OEHuPZisNyqUifcGAOgFYaCQEsrKQ3AiR0B/2cML/3jsCOEqlM9wvDRN40RCYl9psdYiMSN+XXe9kzsWYsTjy2FDFE4ZBjv6RRkvob8XAxhMRdsqokTT+Irw0AW89urrCNWMhOSLiwtefe19NM0BT35+eUUzbFGh+53VdocMJVG0JPS+SJeXl8zyxVgBk1LSthXb7Rbp5/21V99Ps6u4sTwaTe+K7SPqRrNcuueZnEQ8ePQmd8K7PD3zkJHtFcvjU/L5CVXtuxjliu2mGDkX28pBKW7cvsWbv/tZ/zxbwtyy2+wQHr5QaokQ4cjrcxdrPS7cy67qwQvI6NHbIwpDyrIiUL6T1F/SNIp+aDhOHQeiKyJQDbvNNcGePxiU9H1HP+w7HwMqUui6AY/DzoIlTbUl8X8ThJYoDFjMbrG6cvMZkBEnW4SE2XQPd3MSzHvj0aEHFcWIoKaq3DsTBAH1bv18ZRAH+6Pcm3wu6W3F4jikqd39GbGl3fVEKuNq5XhD0rzEydExF5fOq2Y5v0vdDsRpPPJxsmTJkyePWCwW1IV7nnm2IIuWbH13ZJbPKPUZoTxsvb02GCz5NKHzojrT+Yzt7ozY8ySzfEbVXGLpPL7+MIZe0fq/C5MQy8DZU1eRDoOctqqp6gLdHZ7BtnjKSXKTJDlABX/ob387We5IxOuuJfZ8oR/7nn8IwF/4yJ9DSsfzmPvOX9t2TCcBTy7cc/mlv/9p94xLM8J0AjkhiZ+HqO02BUGg+OV/8Ob4M6UUpkpY+y7xndsvA2/6+3JrNs0ExvQkfr+TUpLEkjgJqT0PJIxDjBBgFUruBTNq+t6OnJbBCIwW5Hk6djoHM8DAaOgZRBHrtTOvtsMeBq6Q8YRNfTl6PHX9ADZC+0pvIztMPxA1iiz1cuZqRl85pEWvDuaxKtAjz04QoJTzdNHNHiosaboOKSKUF1MJ4pS6ahj8+TMMA4tljlrEI8yq2hWjGMJ+ZGHMLAvptfv+O3fusL18SJZOuHXXe0MFOYvFgvXuPu9+n+tGJGHOvXtz6sJ932//1md4z3vew2vvukNRel5rYzBDwBtf8q5REKTve6bTfOTsHp8eY6xDULx813UihXD/1n17gFkmmjRXY6W81gWtMeR5ht5Dr6eOozVfTjDW7Qm9UASBoddeeKNUPHnylJPT6bjnvnJjxtBv6VuJ9Hwc3Rmesh4hqruiQJDQ1Id9cugDimqHMebAJQpcV3aUIZeSyku+Ky96sV6VHoqtwKMMAxVg6Q4ejygCGdBbRjElK6EZujHGqPsWPWhCeejQtGVJEAjsYJGVR88ohYSR57reOXGNgZJib9itNWEQYa0YO4FJnNE0HXXt5qpuG7IsQ4iDuavuHcxz6O0zXOKYYdCjVcJ0mqO7DmuHEUWRpIqqKRHBgO495yqywOBlwMGIAmJDrS2V56bJOCef58ThfDyTpdbY2owwPWN6qqpjGAaMjyWM6DGmQ8mMxPNxur5HBRGhj3ka3YAUKBWOgiYiEBhh2HU7jEcMWOn5//5+MyEZOg02prj2MvmqIwglmJQ0ce97JELqXc+q90JN3ZajowVtV2AH9wwsD+kqB1dMooMvKfw+S5wgpCq7MZZIMkEYTXj5pflhrbctVdkRyD2PL2a7dfBt4cP8qmzcOzT0DH7im0YzXdwYocpl0TKZJgymGeO7JEtIRcrGi3NhB3rTMegDXLquawLbEwZifG+rqqPbaZJ65e9XIXYR0jbPoWeUjLnebJhMZuO1az2g+71HqOv85nlGUXlkVxy5rn91yRNPj9i/F1XjSftYNiXMZ5MRFry9XFHuDJPJhFbvDaVDdmXFbueROpOWUDXM51P2fkXX2y1RlCBVjPFCZvr/I336gkiujBlIY5+MxIJgucASUvrNIE1ClrOMsolYe4W0fDFHRTHKu7h3bclgOqIwJfMbnLCGWEkmgaTyUIWqWiOFxecZHMUJdWcR+hzthQX0UNP0EVGgqAtPpJwFWCsOfBk7YIxlGOyI/6/rmiyd+Un2LVyzF6bYBzYO7ti3W/LIQZy2+owPvH4E6pqdT/BEUBKrGdmR+z49EcymCiEl69jDrvqnzNMTkmBK4AHdk6jC2Cs6T9yc5EtmUYQMzJhsxNJgWsFk4SByN++csCu3VJ1i8C/U7PYt5CCIM4XxxL1WD2BrrNjDElt6U9KsB6Q5JI+6q4iigM4H1YPtESom87AE1RouL3ZeRfHAKQuDmK7XGG/4GKYRJlCjGIEQir7WLinzyZXpJmy2jli9JzamWYzYq2IAWk+YpBFJkmDn7sXo287x6HQ3qj+99FIIenhO7dGiEYGg6/b+MT1IgdY7tn6zSaMNZ2ePUYGbu8k8Y7vTvPLKaxRrL/4RdcRxwCA6rCe0lnXDdJLx4IELSJfLl7h9892cPzknSvyhA0R1xXQ6Z33tNpuy2mFCRewT/6KqaIqa9bak9JCKSteYesvR0Z1RYSeJM/QwUA3u2UjV0w81dWW5OPftfeXKAFEYju+WbjskUGm37rJswma3IgyPGDxstRnWLIMFXf+EpvOKc1GOUgOZFzvQrTsEv+Q9b/D0qeM36GHDqy/fpvDY/9n0LnXdUO06qnLln12AChICOaA81Em3EcNQc3rqnd6bigcPf4/FPHOwEEDXtfOHCeSB9wFkMXzkwz/Fs+OHP/F1aL9vDM3AYj5js1kxSx1ZPskzPvj1P8TvHz/wo3+B3Kt+fttf+vj48499/9cC8J3f/rOf9zc/8uNfz27TMZseVJTaoSVWiu/4lr/3eb8P8H0//C1oAbXekD4DzwJXFPj49/2j5372bd/51QBsr9ekWcA0ewmTHtb15VnPxeVb/NTf+rvjzz75G58mSR0Ps2vXnD8V8KHDZ77z1lOCUFIUBVni1nrPBU27ZTF99bnvz8OYfcJubEmu7vCnvzrhF372UwBEck7VrJ77m1l2g9X6gsDD5p48fYev+Df+GL/yv/wWkXLBf7GrCcNghGw1taapB/qe8b1ta4MKI6xtCdODeayygffqg6Zdk6QBu0LTeSGT06ObNE2L2SuTWUmsAnTT4n9Els5Zr94iDLKRPG5sR1FejsFB1/VIpYiilI03wjbGKXBZIyjqPSTOFZn2SW+SaqRJwSpSn5S53y/J0pSp9yDTesD2agxw08WMpi3otcQY7wkpetYj3NGNq12BlND5QNWYHpknVKJE+Gd37+R9PHqy4WJ7zmzuPqsyGtu3JMfu3/ObEy7Oapa3IhYefnb+uEZLw+27c6R0xSIRKNbra145dvyx5XLOdlsQBNEI3QkCibUhdavGIp7jvBnCfVA/WIzo6bVkfuSeeVsErLc74rTBBj5paBwEauP33Dd/Y83r77/LbCKofOCURBnRSUrVSJT3N2oe+OLIzD3Pq3VNoCSB3CvnwqOzS3qjUUoQeVjQMAwEUh14L4EdYaf7xAkUumuQktEXrROD42544R0ZhARKIKREe5glg0TIAWsP8H6MS7KeNRZWKkWq4FCAHLwg2HA4M5t2QIpk9I9TMqI3Aq0Pn9X1hYdl+utXIbuyfi656ocNBuvu24vAjEb2nsd7vpZOUdEcBLQGGxIGHXboR3U7jPNki3wSGAQpQRKTJGrkV06mKcY4X83a8wulDOltReg92ASSXncYzCh6IUWAsBHWBJTlXsmxQckE0++DegEaVGpHXruQirYbEIIRcqvbjiAIR9ilkU5AzBrAKx+G0sHm2mI3Glhr0SLDgNyb7A59i+5r0mRC64sPQWTI0zlFUdIOeyVlt5b3ipvG9EglEEKi/TPutgFpPKUqNcJz7buuIQgiev8Muq5hfuOEYrPFeP5WEIa8ff8dlsslaeSuM4yg3JU0lfLfX3BxcU2UxmPiNGjIJzELr/DZ9z3l9Y47t28yGDcvKiiQdspg+jGmS9OQOFbPeKBJlBIIkYyG5DJURIFEENB53l6x2xBGETsvSBZHCdksZVNds09ZYuWUGXtT0DRekVm3SCnJvMF03/dsd5qz8ydj0jtYp1NwvbkY1y44Xug+PrWqpreatx485PjEc1gT2NVnLGZ3ET6h3cMt/7DxBZFcSaXIvPSssobeuiqQ8sHyJI1oNjtq07KYukXfFtdU64rbPrhalzWTJCVkGDtXfd8TBQFt35P4QDHK5myuV0xj/9CCGCs7oiwdq2JmCEljSd2sMaNYhXW8p70UvBAEgQLMMyaCgrouncSnr24NxslRjjhU6aTYK6uJPUfg5HRJFGaU7Zbbt1218HJ9hogiQi9Hn8aKWe6M0jKPFQ2yDBUb+mHFrYV7+c/O18yzlGbflZpIOt2i9RbpP0sFOXmuaWtHln3z07/DfHLKan0xVpsuLwxlozk5vY2POZkvjinLYdxAAhURqzlB0iC9I700IWlyCyXMWNltG0vTFigvDmLMwHyR03Uddq8SaWokPVFgCTO3IZVdhS7b8cUIrKv4GaPRPnFL45Q0lu6w8FWxcr0mi6PxwBkG7aT8nyGOSgWb7cqR4P15plsJwhzU/KxXWuwhjPeVXXe4iCHjpZv3/Bx3vP4uge59QmSchH4oFStPLN5urrDKcr2pqCsvwpAtqKuBPHXBx9XlxUhY3pveTRdz1ptrdsVBYnx+tOTJ+RM+98gl58vFgjSOmC5i5qdenawp6dYZQy2J/HMIY81skjL1XIYgCOj0jmbQ3LvnpWfLBq0H8umEsnAbYF03BEE4dq5UENMpS55nVBv3zI+mJwjTcTy5S+8rQov5EavVGtH5l6gfWOQZbVUS+a7GNHqZ4+kS1bu1mKiQUm+YJAk33nXPf3/N8fFdgpDRQNOisXZB0TiMeRpZbp/eYDHPx2qoUi4AvV5fUbeHxOIjH/4ZvvcHvsF9X6750Df9XT70TT/Jd//Q17j1WvfcP7uPlJbEq6h98Ot/iI9+79eOh24UBXzrBz9B2/Sk3hbgJ376r7DdlHzkm3+Uv/bt/zUA3/M3v46+7TnyCoof/Mbvx/Qhea5IM7c5l1dXpKngwx/86fEaP/Z9X0eSxXzbN38CgDjtKGrDKy+/htqrmvnx8e/7R3znx74KgHwq+I4P/Sw/8J+5pO6jf/OrkKqnqwO69tA9SvJmFEjYj139Dg8fuq74yekcPTwfnFftU/pyIE3saJMxn+Zkcc5gnk+U4nDCbuuS+NkiQ1rNz/93nxr/v7QNaZA89zfFVY1Uanwf9LDl//gff8s9o40vDOWKstiiO185jzJ027JZXY/qkoO2jvgcGPD8Rt1VEMbUO2+2miqG2pKEU6LMi49cXTjJ6r15drkhjARhFJH5QLGqKuJgihIZ2icpKggJpKHYHgo6aRY6dTkv3GAH43hevVMpA7cPVlWB8Sqguu9I4wwl1civsAxM5iGmL3nyxIsUqIy27TDCBR/dZYMgRPfPWIMYwWSSPfd829ZxEvbSzEGoiMMQa5wAAUASHDGITzKZGGz/TBelbUbRi5PTnHc+fc31Zcx86dZwXYJMQta7zWiuqrX2al7usy8ur8myCdbWI4G+9cq6cZQjvQpfFAS0bTueF4GY0ekdgYzGDm0QJ+Sp43Pt1dBsFGKs5Z233Ry/8WV/nNmipG7XRLE7Q7a7FdPQGcjXXmZ530UfJeODmKE1GHFIbOJYkcoQlB05rJiIQSiU2isB907komrGwEsphZTSKw+6uYnDiKapnuHM9fR0MHCo6KsYjCLam+fWNRjhzgf/7kVhQN/3CDGMaASlHOclCPdJj+PGtN1BVKvrNcb0xEmI8OvF2oHedvTdIb7J8sR3zvxaNAaB607tESBKSeI4HJPlLElJ05TVakW8F+dIY6I4JY2iUfzL9pI0i0e0SVWVBFHPdJKz27k5brraB8AD82OPIEonWDlnt9uLUEBEQlU11K03SVYBUkQIsaXTPoE1A4JmjAlEb8EItBUHBI4MRmXnrjvYMWDEKHKTJAlN0zHJp8Q++eh9l08JyWrl1fOiCGt7ur06YuQ6lG2jibwCZCAzolhSVRVbL44VqsCvCzPOQxiGSGkIQx8XFQ1VvcLaA7e/GzqU0ePe1dYNYVAwny/H7mfTVpyentJ1LUa666q7fdfVX1OgXJJtnf0JQBJbqhaU7/pHYUyaz1itSnIfw4bBMcXOxVnGc08dusuM5sNd79VWhTODB9htWtoIrO1GURalBIG1o20GyknGd1130CDIBabrSWNJ6REDRbljlk9G3lnbtkwnTpBlNN4OlDMy7w7xgBKSpi2pm0PXL4oCzs43DJ5Xn8aOU/bgwcUz5+/zZ97vHy84Vy/Gi/FivBgvxovxYrwYL8aL8WK8GH8E4wujc2UF0bCvYhqCUFHrHl+IoKsbp7DTWlpx4DypMGDt/QdaK7xaUsfUYzBVGFO0La02BKG71SBOqIctj85dK3GSzZkezYgjSezhBdPsCGxFFKbwTFfeWuvNIhm9o6IoGjtXYegyYiHtKBm/ryQ820I0xtBtCoonzuNFloYolgjTgZcdv50fMShBt/L4YwyRLrkTJeQT1627XK/YXJ3R6IYzX+Xb1AWt7Yl8FfCts7dhMKgoHCtX5eaMSKUceznOXtYkix5Ra6bH7rMfPHjA6ckpMnpKv/Vyom8/QAaa3ZWHh0xvcX39cJQsB5hMI4ZpztW6Y7l8FYAomSODCOWrcGkQuvZtMh35Dl3XgZBIGKtbsYwIIrB77x8k0gpilWK7vcRqR5JkzvfFezXEIqHTDcpDNGZZjggdj2gv6SqlM0Tcfz84tadhGMZKizUD2AGpxGioC703hIzoeu99oQTXq43vZEIaJaRRjLWW0yOnmDifHDOZJkgpR06HUsp9rsePd7alKkqM7kcVniyLnfHd5prWV5mCTYPpQXq4Zh6klJuCSrWUngMRhDFZPiVdxCPnIQ0XWC2ofbu9rlqOj25xPIswe1PBbU1RFjx8eDmqZw12cCbCvksVJ9A2JYt5Rtnvv2/KarXm+HjJbOrW0NXVNdPp/MAbqBuMCNDGknu55EkesN6d03lPjbaqGWyHikLa/fONLRfnjwlUTj5193x5vkaFlsG3lmfzmFl+i+12y8RLQZdlQdnWZEmK4aD89Ykf/waUhyo2dcIP/uC/z0c+8lN8x4d/BoDv/cF/l1maE4SSb/2LfweAH/rEB9mJDUXpoJG38tcA+Kvf/hP8+E/+ZXfttWE2cap9/8XHv9Hdc++MjEuvygXw4W/+cX7443+JnZfgj6OcD3/wbwPwX3q1viodEM/4FH3oP/xxPvaDX8PV1Qapnjcv/O4f+hpOX3Wdx7fe+txz/2+Rv+IgF+Eluju4j7/vi+9QlvVzv/sl7/+iEWIh7AlHRxd8w4fu8Xd++JcA+OL3L4nikFl2G+GVDvPkiNVmjX+1eeO73sd//l1/j1fv3hslh4UQNL1+7ruObqQUW/hTf/oDAPxfv/AbpFNNHGXOgBew1aH2l3ozzmGQBCJAWM+LDCVpmBGGB/hSEjkYjaGnb/c+gQddwigAACAASURBVBm6q0hTb845OcLYnmHoRlXM6eTIKYH5PSJQAkRPIENqr0q53RRkacTV+owTD3ebZAm7Ys3gW/zLowXb3SVxGo1mwGVfYq1lMplg/Z6TZROSNMR4n6uiuUIJ12Gq9V5CXhB2il6D8hw4rQdUFIycIRVmBEHCZr0jy71XlAiQv690euPmkTt7fAe87waiyJmF3rv7KgBVVfDOW/d53xv3RshPVWqWyyV1vZeenhBGA+t1x7tfdx3Zpw8/Q9EYkmw6ws+iJHacYH/2KiUoyxprIPReZiqQo/pe57sYTVt7xICHcA41aWDACkrv47WczUi6gEaXGA9Ra7s1bTUhidw19axpjaXtB8LEd6WGhLY0qEATe2hp59+zvcT6MGiM7QnlofOXRimdcf53ez8sgULKyPOHnAWAU60LCP09CyHoe0P3jNpa2/bPKa1FsUOzGCsYfHcQJdFtN0K6h97xRHa73VjR77oGGQqGYSCK9+pyPXESUFXNeI3D0BLH4dhFcZ1CBQzUHkae5Ynvyri/i6KIvu8IwpDYQ6plXRPHMcYYJt4/LopC5ospm81q/H4RDIhgQIZ7yGFN14OUYkTGxNOEMIgxjXt283zGer3m6dUFWXLo7KogYrOuxu7S1XWDtcPILWybDimljx88JymX7HYbwgDyqfeeNDWhTMdOTxBKyk1NkoZj18Z1WgfXTfO8mjhNsIMZ/67paoyVjgu/9WsjDjA2IJBi7OTkeQJItts9YsnxylUgxq5Y3ayZ5jPCJObeYuHX3kBVHFSwlVK0TUcQw+BhgVhJlDooqPImvpFNnQm83nP+AiSSqinHDmkYO0Xdtu8c5wxPxwjDca/I85RuMLRtO/La6m6HUiGh9YboRY2SIcYY1qWHhyJAWpQ4eJ51vaU3EtMfTMTDMGa3vR6vKc1zdDdgrSXYmwJbTS/6kU+mmworDIKI1KOmemPou56y0FiPglnMlxRFwT6ki+OEbVWSRjFl6TqD2XSC6QwM/SEerit2u90I/bTCmRpPZlM++1nHJUbWJGGOUgHawxynz6PzP298QSRXvem58nyOuna692Ec0ewPgUEzn0/prKTd+pZjXBOElq03+crznE27RoYZV1fesyeQhKFiMAOVN2ULgggbDSzn7qUz7YanF9eYy4ai9Ca785DNtmGwPdIvsF5LhGB0wLbGMAwQhvLggTDocVN9NmgXQowys05WUlIU5WEDrDvqqufk1oTaY1HLXcPZ+ozUJ3PL5ZxaWQgs68p72hxP6Zs10+mc3Eua9kIyDYIR8pNlARKBICLyyUelQsqyRnvI2iKPsV1BJCyXT5z/0CsvLREypdh13LnhMPSb9QrbW1JvWNXrC+7eyTGNRfuD8eLyEWdPDCc3X2FXOEjPIs4oqoHtY/c8bty4SbGrSNOcrnULdTZb+sNJEU48PCScQz8ZDaabrkSqgaFXo7xnZBp6bTwk0wdFoQRpafxhsrlakYQ5KBfcAGyKDX3fk4QH88owVIRpiPYQMoPA2ghhD7KyhoHNriVL9YF0qkKmk+VoUNh1hiCQBCoese9R4JIqR7zde4/1IIORbJ0EMcfTE4wBj0ykxxJPLDK+Zj5395yKHm2sw+vgPMx01SClRq2db1GSKrquZ1WswOOrHzzacOPmCfhDj7jh6eo+289cMZ3t/XEC4jRH6AF8sliXBcUwsLzl5OEvr9+hqRSnMifO3NovqoEgjnlyfsb7vReN1tr7xbhNeb44omk62raladwavnk0Yxg0M2/Ou15fc7o8oio7dOc3zcUJRXsFyRqPVKQX1wz9nH5wAeD1SqO7HU03cOTl4YWC5XTqRGaeiTSVTgi9fOsgC47nr/LsmE1u0umGD/3Fgz/Wh7/px/hDhxem6W1N9/sSlpPTOWdnTwmD5yFananAWyHsD6Pv+YH/gBXe0y0JkcrysR90UMW/9pGfoW168jynqK6e+6w8n49GqEJ1/I3v+bf5T/7K/wTA08dPSVJBkolR9hhg6CRJcPf5z8lijmZfAoCUmpPjOd1w8J76k1/6r2NMRbGrUT7wPDmdM9tGY9Fps3PE9h/5/v/28x7TX//+r+U/9XDJ97znPWzXPWfn98f///P//a993t985Vd9Bb/4D36FLHFJa9WUGPX/svfmsbYs+13fp4YeV69pD2fvM935+t3nhyfkZ7CxmcEgMAlBTjCOjTCOEmScCJIgAv6LJAIhlBCH4AgRnBghGRzMYIQwtsEYYfOwMcbDG+9w7j3jntZeU89dXfmjqnufC8/kH6S8SKf+O2ev1auH6qrf8B0K6N18aUpJ27YkScb62iWGaTr4HAXE2Y14hLXzkUew3j4iUAFhOGXh9BCQQqMDhfbBgBbKk/8lajhOqICC2eJw5BZYUaMCycGRK1b1tiObTplkMTvPEQ59UCqlBDmYY3asd9dozzsJIheE100/Sr9rrT1EWBJ4EQ8rK4QyRN7ioCgqmq5mfpChlS9S2uckw/243qzAauyQlMmIMEqxMkcJJ0xzXTwhW7T883/2ae7cc2vCvZcO2G9S9ls377JZQzKXrK62dN4rqjauoJimkxH2VNWlE6LyXIpABFhlCKKIsnHzta86x0fq+7FAp8OAPM9HaKTOYsBSVzmhh9IW1Z6j6TH52QrrYYdKC9arlsWB29ujdEddOX+9MPXQ/drQFhZaSZq4tSsfYEza/77pEQkjTcDd4wKLQUc3IVPbVkzigN56n6tJ6DjYgaauBo6ucZB0Y+laD1s1zli4HKB0E42UPRDhc2ra5srBCT0HSkqX+LqCoBd3UBDoyImAeWiUkNZDKgfT7ZA4CjG2RI08sJ6+B4Ml8IlTGKd0z4l1FHnlOMqIcQ47zowiL3YIf169banO9jc+VMoVMfu+G8XHXELf0xlGTvDV9Qrb65vEut1hjKA3lkCF432q6/pDwhtKaur6Bv42XE8YBW4/BZ49u0ZrTVVFFIWH188jWtMPORNKW6aLKX1XUXvPU4EiTWOEjMZjNU1F2XWDNSRKOUGdyWw6nmcYapbzCdPpBK2GJOya7SYn9XzcqiqQ0u1JA+QvCBXWOinwyPMnm6ah683ouWqMwQpoSkFTbv1ckCht/fvh4xLj9hEjbuJh07QYC3sfU2opOL+8IMsyhPWcsg5621N4CHigBs9Txwt09xxM11JVNw2DpnNCHLWP3wIdIazBYEY6SNsaVKBBDUIjLVXbECaaunK/Z3pNHMdM4nSMg6TsabuStvUJfBRipWV9ndN7VZjO1EgJFjmKlGz2OxTiuUS4xpie1uRkvuC7L66dKEvbjQleHMfEkxuIet9b0mnEZn8N/j7NZwn5usGyZ7Zw8XCvPmzS/m+OL4jkqsfybOU25ZPTW5yfnyNKp+QEkJdbqnaHDlNKn3DFMmIWJOOCYeoa1VfU+wLtF83eGq59cDfEVmVeMUlT9t5ZWukUoRRRHLP1ZMLtpiKbHVCUW3rPfdHCumOIm4pCYzsQN/jmruswxuFg5XNqYNba57x3nGLVrmiwfnM8fvmQIjc83a+IPR9GKM3ByYIsGAiyirJqKMuGyPtM7doNr731GrcOb/HOuy7DTmdTduWeNHbfmy8PKPOCroXa37vZYkqUBqN5XllWrK9WZJnmzp1b/qRDqvaa0FY8ftcF7NNsQZTE9P7tieIJl9cr4kRw667bnLOjI4pyR5IKbO/JxxtQfc0SV3XsLlbcXh5i+xLtlUXOH72PtYI4jtkVXt1m31MWlsNDN5nfee+zvPLGKwgidsLdp0k2B6mo65Zk8KsKFLaXxIlLKjIdELQtZV1QeR+KLIoIp1OKoqD1m3/fWWTQj5yEum5puw6tM1Q4qPcELGcJjWluBBCMQVhLEnkndO+5YWmxg1pZ1yJk7z7rV2olBJGE+dKdd9F1dKL0G5P348l3TLOERdgRDP5RagqKDwl9pPMpbdVwetv5FgVxRCDdQjksWuuDFVoK5uK+f36OB9A02xtic+sCuaquRw7Z/OQe+/2eiUcRn7z8Zbz33nucP/gc08wFYMXlFdncMJF6NJROE03TVGxWLihzZoBTdNST+I337ML5V63Wu/Fenp0/Yr6YcXDggtVNfonpdoR6Rj5sFP0dgthwcuJ8xM4v36UNSo4P75LnwyKtqCvLbl/w1he5+wLQy25UBoqikP2m4C//H3+Mb/+Df9Z9r4dp/JzvBvA9f/mPEyeavRfZmM4OKfcVUrecnLhK+X5rubp2ycLgc3VxtuNw+dLY8RpGEGbUPigr/ca22p6N3Z6pnZKKWyNB/rv/0ndSpiWHh3OE/HDnar9tR6Gd5exVLDfcqqNbMUqmRFHA6vps/P8iN0TJ7kPHwWha69bFptthTcif+KN/e/zzO2+/zcFhhhUbtpfu/uz2a549uWZ+4Nayusn5lm//dWz3Z2jt3r+yrNEy4E/9wF8dj3X+8IJ9vmYxc+/6b/jtL2OtJVnEmNZX5ruOH/07PwlA5QseYaRAB0Re4KI3EiEldVtxcOTWiap063LVlBjPzQxUhg5atns3F63VtKqnKJ4ynZz4Z1JTNVtmXjShaSwHkzl935N4/59Hj54QxIKmqxA+sOlMg041Ze05UVqCaCnrjvC5tURJR6geOoiWFh0qrB3I6yXGdgQxRD5I2+9ykjQmDBJaX7xRuicINLU3swyiwHFmumZUqa2bEvlv8K3bvqU3djTZNFaitCbSMaVfc6/zD7h1J0XpiIcP3JzdriTHJyHTqTeAnXQcnEw4P9uw2XhDYhGiicir/Sj0g7SjxyI4s05jDEVVYXyxKkkiMM4ct+9vPN2CQNGZYU0qiAPX9b8eOJ7ZhFC7zljs4wRhTsgmOYvDQSgCurZECjmK8wRBg4hm1Pseux0M0G88Kd2BJGmkiaKbAmkYSaxVYBljCZ2E7Pd7ktQLb7QtXed8kOJkWPc7sK64OnjtxVHm+ODxoGTpWOJKSpTnUwvpOrTDHqNUMIpjDYW+pukoq72PN9w5xUE87gsAvXFCVsZ2RH5/KsvSiVKoeEST7LaV80uTg4F2OyZ6o6qhtHSdoetuVJO32xwpb4Qt2tYQBBalFGfPtuN5dC3ku57IdxDDMKZt+tHLLEtTus7QtgZ8JzdJQ2y/cfx2n8gURUEgu+c6O054qW8h8pzgaebQREZ2SDkkai1K2DHpjeMUawBhhulJbwymtwgrxuRKotAyGJO7MBIIIbC2HZUIlVLkec7F5TMWvrAXBSFN3Y8+fnWzJ4gVVVkxvJiyF3RdiVKayivmNFXjijb+pKqq8t0+PSqmgut+BsGN2Ne+KNA6HrnoeeFMeZ/vJJVtx2Q6Iwj1KEQlpCuCDB3LyWTijNLremwsBJGk7zWVLzYsl3NM797loTDUdQYxaB3Ug/kv2K4fjZyrqsH0LVGgKWu3bkQk1Jua3XY/xjxB6HzYjEcwaB2wWl0yXx5zdXXu72fBwcEBRVUz9XFmFAdUVUnT3nTKVBggtaIY/HGVpKorIh2MSoD7oiQKk3Ge7/M9QWvRoab18LmyLMnmhzT1hr1Hyy30h9VY/83xBZFcaaU4veUkgJUKOVieeLKe+3t0a0JR5UhbMb3tN96iRwUS4zsmUTKlaiYcLEO2uau0YHomU41pDLGHZ0QBmLYbCbwqiOh1Tpg1RBO3aJXtOfNoTrvb0fnsOY4DhLS07UDkdA7WbXNDKoQB0naTPbtF6MbUVwhLEAaItsfU/jPGmQArGaH9Ay/2e4JYs839hio0UZBwaz5H+epZXuU8evyUq6sdUw+zCFTAblMzOXBGdU3Vs92UzLOM0nilw0KQhRm98dC6ckcWR0zTmLMnbkNdzE9BpcigJ01cAN13gkgbLEPLPGFmOsptxWblq70yY54ckufXbDcPAMimKfN4Qhv46nNRcLXOfdLr3e5TR87dltfcm7pEbZ2vmKeGrnDV8zuLjqBZIQi49obEK9nStZbZ7IDVU78CSs12lzM/cL83zRZsu5A0jkdYwtnVygXvStN7CGecprS7a7a1l+Q3LfP5EinkuDH2WBAdXeuIs+4ZW189T/y/jVfJ6TB+0ezbGqQlDsMx8RbWLZ5VNaj3GYQMsJ0ZA4vJZE4vNEGSji9/1HfkxQ45ONJLQ2tqgkjTNO7Fb4seJWbUdc7Uq+AcLDPapoPBsttqtA4IpscfMr221rAMJI1PrsIwQN0RtB4CoLTla176Us7OPyD3C83t+x9DKYEUIa2Hu6yuL5kvNGnqjrPbbeit9gGBu75lEhKEPYkajEb3HB8umUxS4nFDzUnUnMtnT5jNh7nxjKN0weOHnwYg3yq0TjH501F5EEIm0wNOb73Ge+8/YxgiClh4KEixt6ig4dv/sxslwEni4JjPj3cf/BL37r6K9qTif/GJn+VweUQ2mfPu2z8HQKBmvPTyqX+27nNp4iArwyY/jN3+muNj3xHeuPt1/6U3qQrfBW87bN8wSVwXsCqhqS2r1Zq2/TDeq6fk/j0HU+xMgWlvumTbbU4Y7UmzkzExBMimB2x3Zx86zja/JI1dcWU2PRk302G89NqBm6PikNhv9Jvdltc/ejrCHt986Yv5A7/HGRL/sf/xtwNOjtol4TedqVfeOERFUOTu/fjev+g65t/+x34t16vCf++m8/LSa+5+np9dodSE3g7QvZCyrOlMOZLXVeDfPVsBPoDlHNM5qAi4YkfbOKgV0t2XwkOUGg9hTLOMoijQUrL3ENXpNKWoAkRvR9WvJJ25wHqAw0UBdV1ihaEblEetoO5q1yUXg9iBJZ0EDKXeosqxNmS5OGa1cmve8a0jdrsN293ViJCI45S67Vj6osx6vcYYF0R1ZrC/ECPKYhhlXZJNDrGDUIORoA2xPuV6536vF2suLjq0jnnjTTen3nv7irOLd3nzDV+x7RKSSYjSHatLHySlPasnOwwWPGw1CBRdz5jwtY0hTJwAyVCUadoKIdQIf3NDUnc3CUKsp+S7Eik1R0sXJ0QyIoqmLJcHFH5fi+w93noz5PUvds/hnXc/y27rkzYPYy3yAITi+qJkmbn3aLBz6Dsvhx04uNkAJQQnblQUhYdReXhfU2L6ls21O9c0TVFasb7ejZCjyWRCb3BBtT+Hpi08lM9X4Ttn6Fs3e5Rf87qmQwRq7DYPtqy7Xc7UQ92iMB1l/IfAtO8NCMZ4Aysx1qC1vjH1tZYgiDAdiAF2aCyhDrmh4UuqsnOWKb4Q1rY1newxraH1iAwp9YeUWLV2glC2t8hBCThUiN7SdwZfx6Tcb+l7xj1zfb0nCqeESoOfn/vtmjSJyPMc46Ecs8mCpr4RwhBCYRqLluHg4c16nbvA26xHjoFtoKir0V6kyStM1xLFwShxLuixncHSj8VwrTWxDsY53FQOGin6nt3GFWqKomK5mFEUOcV+MOOWSKFHuxRQSKnRQjIoMpqudO9sa7i+dmtQGqUgBK1PmpquQxuDFQbDjVCEUK4LpvyaECYSgUQE6fiMrVAIJdgPyJg4RgSSXVmM8yNOIqSWKA8r7aVyCa6MnNgXUBZ7rG2Rvru23RcI2XvxNo8gqHv6tiMIgrG7K4TAYkYFQyfuosiLZkTv7PaFg++tN6OI18nJMUpJjO+wP724GiF+k7nbB2Sl6KUknU0xfg6XXUWUJgSD6EUPZZMTyQgdDMIbC5J0QRonY4K+3++RVgx+7ywPj2iqnLavR8E1JRPW6zVRfGMf1NT/7s7VC0GLF+PFeDFejBfjxXgxXowX48V4MV6Mfw/jC6Jz1ZqGTe2gRGcPr0iilEU2Ycgl66ZgMk1ZbVbsfZsuSWfsrq8GDiMX+wt0kNEVPZobrKiwIANJJTyJsKuJo4jA47TzXFDWDbEtUdpl4baXVI0hjKdjxbnv90RhMFY0pFSEQei9rryvFgFS2hEWBg4q4DCsPssVFiEsZdvwzItq3Dm9TV232D6k9r34MA3pBcS9lyGXEtH3TuTBG/auy5wszkjiCOOrd5vNNWkm2eceikXguWKMxNv1fkff77EegCxSg06nbCqB8JLDm/aarnCSxvOJqxbUecmzh/VIxF1v3+Xg4IAsMGweD3Cic4JEksZLbh++DsBuv6GoFfv9tb93Dv7VBx1qICPXro1tq4LPeFL+qy/fQ9BhvFTx8WRBXrZU9Z5X3nBQnqbcUxY1aRpwdemO//6Dh7z08mtcPnbHOWs7JlmGmCTEnmvXXJ5zce4gFgvPEerDkEIqNhv3XNbrFbP5AXfvvIHp3TywVqD7gFBYBxMBbG8JtKLxHS/o2W4KpospkYddzGZz6qah6603lQYdxiTT8EZkQwvXXo/0DZ9LCtq2QiFGSIMMI8IkHbH3aIntBatNxXzmWtVFURECgYrZewhc3/dOnnkwYDQdvQVZt6OpYBIEBGFE0zSjMbWpHAxEeBhrayouzrZM4ntMkqG7IJFMkKqj9VywbPoaUt28H9C7ij7d2PGSpsNaM8o1B5srOlOz61qerp/6e95za3ZIr4/IveBL3Uo+eHJG5SVWwzClLiUHhz1FM+DQe2eemdfsdjcQuN1lz8OVe8aTWcBidpfnx2fe2XJ0pPnTf+Y7+O/+uPOuEjbml37xU+x9p2W36Th7uuVvfv9P8Lv+w68DIIqf8u57n4Vvg0/+0mcARsniszPfJfK+UVdPBb0/z8rj1v/wH/xf+a4/9U0AbNY7Tu+e8O5nHdz3e777B/nO/+YbKKv92D0fxgdvFzx7+El3r6hZHCzHv603OelEIGVOFB2N/1/XlsPjI/6H/+nbAPiuP/pX+O//+D/gf/srfxiA9z94xsbLHf+BP/JxAD49ueT87JIw0nzfX/hxAL71O34zRfmAqce0/8KnnsDvge/8rv+EC88N02rO3lf5/tP/6msBeEc+5OqipG1u6nu/41u+nH/95Am+qcHMGxV//Le+xIPoib+fzocujIYuau8l92Pa9sbMVcsQ03akiVu79vs9YSTYXnsIsAnIsjn5djdWzxFOhrn3aIVqD4Gast9vR5+7JIno2h1CB0jp9pRdvkIpRZa5Cv7JyZzVumCeHJKlrtuz3e7ZbM5IkoTCmx0fH85JMzt69iXpPbbXmouznNNbTqAky1KUqjFGkBde6KO3aOWk3sH5AVVVTZpE41q52+3GTskwogiiUI6w7jCKCcSSvtFceshoetgQVBNCvojV2kFc3/ySjrc/nfPuZ9wzvPtGQhi3ZJOA1ZWX2z+cYdmyvt6PpuHGhh+ST07TjCLP6W03znkHsTJeQtydZ1VVRGk0ctqkVSST1IkL+fZEGk+YzOastmfUXmxkX11i2ND6TlRe5OS5Q0gk3ixb9DGNyZHCoKybs6EXshr844K4Z1eUUN90ZNrGEEWO+G99lx1jSeKURg78I4mwgjBKxs7A1WrtO0UBeCPqvGkIQj36m0kxweKQElIMvk+uK+T/OcI+kyQeeUoOMtx7yesbZEzdVaNZtturJaINRuiw0pK+ax0SxXdWpHD8KR0MDuUBehI6PjPDOYXsN1um0ynGDobEjvsiniviO5sajRlEmZKYLujojUNiuOuxbDZXdJ7vFKkA0Rcgo7Fb19WCwrREYUzu4fxVtUeL2cgfb9sGYzr69saYFirW66f0yj7nIRegdUjt3z0he5SN6CuBlQO/McSanq5raP37LqKISMejT2mgIgKhMHRjp2OapGihWc4O6H3Xu24qhA3Jr937kaQRpnceWoM3lZQNdW3QQpH67l+P66oPEM4oCam7EinUKESjlHI8vt5JsANe6r8du7DWWpI0om3b0VC6qAsa39kWfs0p6oIwihCeX32xvqTvLLPZ7MbcXCrqukYPpttdQxhq6r5D+rsQT0Ns56TsVeghjf58K985GyCnSonRMFxrSdnVVH0zdkD3deHiZ79OWRkQyoReGC48r3Y6nbMraoypmXg0gkLQ1CWNt1iI4xhjarbbfPQNLHoH66zrG2uC3W5LEAQj31AJSRjGTjhpMvjoGnrb0vfRjRT78HL+MkPcTMj/78ZLryzsf/0nfyMAvQiIo9QFnH5z6GxDS4dtuxuDsLpBCcXUm7T1tqHtK6RWVKU38DWd83aSciTZOVduNarbZOmc/a5mcZhy8dgtLP/7X/iHzBYRUhmKyiuW9dKRyb0am+ksSjki941aYOAhQHLkYQ1+HaOjuG0Iw4DVec23ffNvAOB3fP2Xsl2XpMkhUnmDQmqCJCNqBnx8Td7ukWFE4Fux52eXRJEgizXrtU/wZEeYyFEsQ4qISTTHmh4P42e3uSKbJuz33phPBARCUuYFqV9c811Fuphh+wrNsMkodNCCh0XkdcPBcUZfKK58IDxfTCmL1kEvPfwlVCFpotl6mFEcx0RRwmqz5vYtlyQVRYExLW3TYHwL93g5o8zXbDyc6fb9e3RG0TY911fuJetomGVziqIi8yqRcRwyn895eubwuVhJ0EGg1Ziwb69XTCYTpw7mg1VjLFXdjIF+ZyrOLq6YpAcY465ltjig6zqebZqRa9M2PWEcjcIYA2dAhwHT+X1/7BYhLLbvbjDsvUVLNS40SiiEcOqYQ+IrtYMTYMVoGmpsQV21I4/A9hodRAhpaQYfGGVRYULX6BGv3po90NP7TV6qHqEqWjMZN+OmqhHSEkXRqJCodei4EuXgxTFDyBat49FTKog6pNRYa0aib9NUWMF43kKFzkw1CkaRkt5GRFE0bp4Kx9Nw3nDu8Ukp6ds9rbHj5x4+PGN1lXN17ZKPqrpyfjFtwmbtIE4vv3KKVi1JJEm80fef+Z//Hr/c+H2/zyVJTVdR1g1ta/iHf/8Xf9nPA3zTt34tJz4Q/vN/7vs/72d+9zf+Kv7WD3zi3/r/b/wm93thEFPXLf/3X//xX/7c/sDXUpRbsJK//dd/7t/6+zf//t8EwF/7v37s837/N/22L2e2cBvM3/r+f8pv/91fQW/kyL9p2pwf/juf/1q/+uvfAtxeV1c9OSzUCAAAIABJREFUQQj/4h9+dvz7l/662zcqXDrgZ37kwec9ztd9w5tcX7k5NZ8vSeKM7c692//iRz/1eb/zlb/xdaRihCZJKSirFqmGIlcAVjrfIl+kCIOEKA6cHouH14ZhiBQB6912PI7jwt6o5yWTBCnV6BnUtk6MII6CsRgQRs7IXamAyosyxEkAShH5opcLjBrm09noDVfXNVmqQPTs1i4wzWaaIKoptu65HJ9ErNdboigeeZJF2blAQ2ref+QSzJNbt52R8AAnLK85OEoJg3j00AsDyIsNP/r9N8/iP/6OX0VdaqxXWqyqGLN7jfX2DBm65OqlLyo4ezBlf70gmLoCQRgbJmnIT/2we9dkkvCxL7/Do7ev2Fy79+rO6yEffCrn5Y8tmS88J3h/Td83Y9ACgHCc47LwsC6JD8QNPn6l6zqSaTzumdIIUAYdCCahhwV1Ia/c/Wp+5l/+c77yK78KAKtCfuKn/hrbdTI+8+NTBVazXw/PoSSbGprrgsor0C2OS37+J8948yscfzOetKAtlphf/PH3APjY191BSM8NYyiqBXRe1Mpdi2W3c95mzxeUwHOtlYfr2tbTHjw1QUa0piMIxWgabIwTFxjmYpyE9LalKPaj51JvLEoFXrXwuXhD23F9lVLSdR2BnIAYzGRdsIxVPiiHsnLJuByO3btjY2+uRSlF17QOliZvoF8857cUBIq2rglDPSY7fWccRNUqppkrNrRtS2+r0Sg6iefs9+vRPNfdA4OQlqqqhtfYKeTpcBQ7kcLtoRjGYFnQOX5Y34yxYBAoqqoZ17u2awhVhtZ65A1JKTFt4wrR/nrmiynCSq7OXaEoTWdY25IX+5Gjd3hwi2fPzj/0vKfTqVMGHZRPTUkUJSgZjVyfvFizrwpAMvEiPNt94UVvPDy87xBaUOQ1UeiNk3uBpUYGknx/ozyMMjT+eqXyJtOBHBsQUrr/k1I+p6gXUhT7G4+3KKKt3T0YxH8UMUJaisKtnSpQpOmEtr0R9XK8czly1dwPQtt27poBIbSLDTykEGA2T+k7VzAb4oS6rdz19MNziR0NR9YjxyyJp3Rdj9IG4cWVuqakrgonXgYIa8A2aAl2UGTNS46ODgi0HhPmMi8+tEYJIahKw3SREnnuZF26mAokTTcYkr/O93/fD/5La+1X8nnGF0TnqsfSBl5dp60o8rUj4/mzq8sdSksSPaX0SZHUChnChd8s68oySWcslinad3GKokHJFGMKFl5prW81UhlCT6R+//wppydzpjF81qvi5P0Fh9ExeV2jlFcUiSxNW42VARVKqqoijiVSDguiU+2ZTCajE/gQSI8crE4jhMbIGu07Qoe3U3oDOuh5eu6lUFNLt78kG/gyRyfcSV5mtyvY7LxcchRhQkETh2RTd81V7fC/88xVr8/Pz5HhiiSeYwYeT5twkB1x4CvDCJe0Pnj/barW3YPl/UOqeg9YDo7d5x4/uiRQkvncTfrD7JjrqxrihtBjYSsrebo946s+/hWcPXU8F03EZm8Qvgs4WWTMpinQU3kRkeOjGeeXT1kczyjywUCz4vjOKSfqTfes3n+HJA05XCyxnVdHa3oC1SMjS9G6AEFlB3TxnNuvuY7EvtzS5YqqKfDFUILsGBlNWeflqDZ1evuIZ2cXlO1QyUq4d3pEU5VcPnadVdvHVHkFl++zLt8GIM1SNhc9qb/n1+s18+Uxt2/f44P3HM/kzu2XETJGajVWhHprQWeYzlebVEfXN8TLGOW5InXe0zYlccwo4RqmkiycUOzd3Or6GlqBUC1Yz2IXhmJXoWRK6xcyqRuUvukW7vcFYagQRtL4qlScRFgL5XY7blatcaaYU28B4LqyAdZ2zOZedc8Yh0UXgtIn7ZGe02PovcRx27YEYUpV1wwvdxqBpBuTuzCMXSKqFZmXHDZti9RTmqbkNHI8uldefR1j9aiYeHn1jDhxBszPK1fV5Q4p4Jbv5vzKv/rrsJiRKL7LL0G0PHt6QXXiu0znT0m0JUmm/L5vcklL3ay5e/clqmKo+G2RUrLdlry/c0Hvt3zbr8d2TupRi8EQMaCLOr7597qOjZQSKy2amM4LTwS0SNXzrb//V488DK2W1MVmNFY11pKXmjRN+dZv/Q3+nrecnB5xcnLM5hV3z//QH/mttJ1Bi0P/ezUml2yXF7TPmaImQURRrlG+Wp9mc772t32Eqd9MtIixQY+OeqqZ7w6qDIShMjW/81vcOeyuHmKzG+J221i+7ne+hUUzBBqtaYiCFK0U84Xyx+ro2mti/+79lv/oqymrHhltCGvPaelrTBohG0vs58euqUhTOYpeBLrDdJJb8xMq34Go6oJ5EtN3Ea3y6wQdWlheves4O11X8fTsimxyyHXv3u0ut8igIvSbc162TKYBfd8yiYfEKaEsWury5nO601T7BrQ367QlRjRcPs7pvGyvChVRpDFNx3zm5vB6VdG0O3rrnt3Pf/qaw+MDpvGSZj3weDRn6zOCdErvRXyeXV6jA4i1ex+laHn/sxt2u/fHQPjOdInUQxfCjYvPXXF2/nBoJPErv+z3clXWVHlNlrnCyX51h/31AfH8fZbeliPf7ZHdki//ave9f/aPn9LnKYvDC84eu/OcJYeooELWGu2Fi5TsUXJC5a0n0iTBtAVROKPs3e/1xoLQhFqNsuBaQWg1re/sGiRJIDBVjPEIlDQ5JU40J6cH/Ppf+/UA/MAP/Q3CMOCtt1ywdXF2zdmDgN5Kkrnnx80hmwdsbEd34Yuw3WAa74M0vaWlIVbP2acoCwiE7bFD1YeWui7p+ht7ljAKEb0k8AG0jhQGQxSFY7LRttB1AuXX174rfIKSYjzvK9CSrq1HBExV9TRNRZQkYzE3jELi2BVmhGd4pNME01fjPAiDiChKqetyVEcTQqCFQArt9jKcwI6UEtPfqJ2apmcyidh6HlEYaiYHKZv9lnQQlOl7lA6pao+O8Mq9XWNHHpiSlrpsCSJNa7xwkW2o6gLwCWfVg7AuyWNA+bhOspAC6eOoJEkxnaXwqqxaK7SCdJaO6/4knXJ1dc0ki9HpzR6WpIyG01IJoMWiiQaxKiLiZEJRbm6SvnCKEpK7X+Y47KuzSyaT1IsnuPl6//593nvnc2itmM8GNeKWotwwm7p1WKkAROsK8PXAvQvRUcZqtaKq3BrbSEnTGU5uO+5rbfYI2XHrYEbgO0Kb3RolI7JpyHbnhWHSJVKXNP6c2i5kvdmgZDcaUSvdu2oG8VjcMB10XThaBwXaEgYhUThju3Hvre1rFwt44YimKlguAkzbUfr4TYoJOq2QBKPKYNd1TOJstC7I8w06nNHSE/sCepxBVVqU1KMgxjxdst1dYf05md4iwhYpQ1q//4aB8WbeEhn4TnlbEyca4RUT2wbiJKano6m8Lc9yipEdu/01SezWt2R+C9N3I49XSKdqqJrpKITRdT1hOChI+r39/4VU9QXRubr38sz+0e9yC+TB8jbvf/A5VFCO0A9sQKBmSCnxSucuSOkitl49CFuRThRRqPHG0qTJEms7jOk5OnKTtdzVJPEMfGAjesPyIKOpr/n5X3QH/1+++3tZzkOqfiwg0PQVgY6w3SC7bmiahjS9gWd1XYcgHNu2/gcQgrGV3/c96SRme9bwu36b69b9oT/0cR699xgjGibZoFzlJp01LnnsjSabLDm7eEY6cRPz6NYxptfUFWiv/CXtgs72NF5Jap9fgegIZczy0G3ObW4dUXsgKK43vPXWW2z3ORfnnkh9fMx0oumNHCu7UkFZ7YkDD5U0ysmNW0b53SfPzsnzHcuD+U2b1UasLtfoyAdbvZMbjYMM4YmqQSi43l+TzSao0WdKU7cNUexm8dXVBUfHB+T7GuvhL7ePj5BBSNPVvP/4ofuegJNbd6g9/C7f7lh3W78RDTC2nrwq0UGA1O73wjDl+uIp0m+MQeDkk6dJROyD3M1qQ6wSpnHAzCe+GElb9xjf6TmcTyjzkkk8RXkJ7svLFYiAZDol8OTRfVFRVQ1LD0tMprdpbUNR9ESDbH51xXZ7zsHBnbFrGwaCtjGjL4QkG4P9Xg6boyQMJgQivfFYE06e2WtzMJ8tfavbMmxoUjoJUyEUxgeG+/0ea+0YKLat2yScOuaNf5GUEiEhm/jfsxFN297Ak2zAar3HGON8QoCrq2vKshwDjSiK0VqTxsno2RMHIZNZgtKWboBd1A1Xl5sbrybRobWgrBXGk63ruubP/dk/zYvxYrwYL8b/n8aX/uY7DsZWubWsbRy0MEki9gOCINBYY7FWjLBAFUiqqhyLuoOIwGABM/xf27aEYTwmfLZ3VIbBwkXrkN6AsDcEfhVITF/T98aLXzgoljFm3Ott3zNJp1RVMyJJqqpy8v/yZv2WUpHGNwptVVUxmUwdJHRQNawrpJTUVTsmhlGUEKhy/D3T9mBgvyvGbsx2U/Dyq69jWI/+WO6aK6wvek2ygO16TZpOyT2ipqk7rDFkWepEHQBrnBjZdOqSJiNd18xiWC5dwa6tG7R2BYIRoVQ1aC0Z7DEDnTCdRWx3m3HPnM5ClrcWnJ+fc3rqkrcwSNFaj/YFIjBk08RBFX0siA2ZTid0pqSqfCLRVqAMyotVdaZEqJ6u69hvXYHp7p2X2eU56/UVge+yCwKePb0Y4cTGGF8o6yhKF1MWRUVThmMcMZ/O2BdXtGZNmbvvJUmCZEJvK7KpO/Z+V3D//n2WC/esnp09YrupWO9aTm57mC4SqQSryxwp3PcODmLKMscOsustCEKOjo5GJeC8WIMsaWtN6WGATd2hNCM6KY4maBX7ueUVWbWLPaS0QwPR2w5MxmMb03EwmWOtIvAQ4Ko0aO0UQ2OPUMqSl/ihH/gHv2zn6oWgxYvxYrwYL8aL8WK8GC/Gi/FivBgvxr+H8QUBCxTo0bj1yZO3iSNNlt0euz3WGuJYUO0FRXnDb1rOJtw+dh2pps25vrwmUXOW97wIhErIizVdVxH5lt78dsJqdYlU7jiny4/w+MlDvvRX3ONTn3Rdm/1+z8nRkjLv6Qb5S62QCloPrbPSopSgbWue7/4NsqqDUWNveo/vH/wdXEWooebZysFRglmKmk45Os7Ybb3oRHXN+48+i5SuYqJkyEkQ8/LHPsqnPue4EeePPsNsvqTv4WTpuh+b7Z4gUCjvH5WFhxRFwcUqp/AY04M0JpgorHGp+8n9l/jg7BHTbMnpyw53XpQb8p3z7VC+2rNYzJhl2ehw2/eCJMjQhKMnUTw5IJnEFHWJ9RWa7XZFkEE69RAAldJbTY++cXbXIYvTu3zu3V8g8fjjg+Vt9vU1Ky/zHCUzWmFZnE653rgqzoPLB/TCiYi0HlM+nc754Ok7o9v90eKYhU0QpufysYNwZVnG/aNTtkWF53iyW10yiXqymcc2C8Vqfc0235F7XLYJW1Ta83AnWHr8dt9Kpx7r29hlU9ALSaMn2MbDXzKBlJZGVez3uZ8rPXEcsN05LkNePKbpWupScn7uMNyzucDSYvInYxcsiuYEUUyYuvOcL15GaIFQIWLAuRtFXnTk+2djl6huKqIkHUUWmqah7goCPRmrhZvtFRdXF/RGsLpy3zs4OKJtu7GD2bYt8/mc2Wx6I0vaVOx3O9q2pfHGvqbVSAX9IA8tE7rWjhVXACvd+6N8S7ppGoerF3KsHkoEWOXw54McdZoghR67jEL2dF1FKIOxwtZ5SM0f/o7vGPmFfe+gJkP3MIqVfz8hjmbjZ6IoQinB1BO+J1nK6voZe+8N57y5nlLXBdnEVTCTYEZRrNGiGk2SQy0JVcR+5wUR4jn37gWcPbsi9pC8q9WWr/qqr+Inf/JHOTp0Fb26smx3FfOFg5CdX2yRWtGJlthD8JRS5FXNJE64feTOPW9bVvtzkgESpiUiDgis5dkT96593//5T/nGb/pqTo6PR3+TWZZR7ra0Hpo1O14wWyx4793Ho+BDnIRonfHe+w+x2lU1911PGk5JPfRke53TtAVhrAn8/Wy7jkh1VHU3wjhRmjzfMcnc9/b7HUIIpsslnbef2Jc7ZDTFNj22d+dp0QShoPc+KVl6yH5foGTMfO4hlFyDcDypaeb2hzjWFEUx8lV2uw3T6ZK8KEfj97rrMG2Ab1Jj2oLBVmOAZ2WTBRJBENxI+XZd5bsCHkqUBGx2lwizJJt6/qHnQiopx+55lqT0RlL5e3frcEZX1tRlPQoU1UVOuSmZhCl3jp3M/9HpksXymEnmuH6n917i8PQ+q+s9e8+B1NLyQ3/3B/ne7/1LDOPv//BP0XYdJ7cX/rlc8sM/9CN84hOfGAUCPnhwwfTwgkkWIfFzsb3EWDNWyZNU868+ccXB8ojc20gc3ZqwXq/pbMxHPub2ot3+it4wQkYdd8zS1D3SQ1TDMMIK3/V+roshCBhqv3EYYPqKLJuyWbn19Mt/xW9hObtFls1GT7nv++t/lldevcuzp27dePa44Pg2tM2a8yfuWb310deJF2t21xWP3nGfWy4Uv/iv3ufVjzrp+eVtC6SU9YpP/TMHbX/ra05G89/WS0aHoYP6DbwoNx/T8Z0CsEJQ1zVC3aBu8rz01jDP87IVcRzzC//4XTdfipbaNjfQOmFp64ooEGjhrrfcOw5RGOmRg+S4zgrj19cwCqm8OMOAphnWwKa5EREo6xrTdkjvs2U6i2k7mqYb11NrJWEU0osO4bkvXefg4IEXRDCtpalLpGA0wu77HoTzQRqese0VXXvDRU+SCU3dsVnvkP5Yk8kE0wkm2WTsukmlSCZ6fObWdCRJxLK9gcBm2X1koNltbiE8X3y/31PnhtNT9w7FieLwdMkkmXHlbV2u11dcXqzYNw3Kc8qstsgwZOfv4Wp9RRg5/8YPnjieeRzcQD57M/i3JWgt2GzcviNQHB3P/TN39+DRdUX16QcIAQ8euXm9XB4QhCFX3oohTWM6GpRV43tblSVxUtC0e+LIxYcHhzP2mzXKz814MqEuLQrNYu7WwItnJWG0IE1iIg/dC4OMttXjfdntV+T5jjRNefDACdq8+sobSGXJ9+4+LRYZm41gMXtzjAmapiNQPWVZEioXl8h2z+aiZLdya8Sd01c5WYCIei4u3HuVTZZ0XYedB9y54/jpV6tHhHFA6Xmn16s989kp+/xq7FjeXhyTZrC+3jE4hkipmEwmIwyxrmuaSrDdCprO7X2nB1OCLiPUC/AoHzXv6Huws4X/Xsl237JYpgjlrnm29Py2foLxXeLnRbI+3/iCSK6wlpnnq0gp2W1zMCHaq3E0poY+5ngxG1t5dV1SNzmPHrvJm00PeetLPk7b7bl45siHeb7jYHnMZlePwdX89mscHces1y5gP9+cc3Z9QWEPOF+7gCiaZTR02DDCDF4/MnBKUoNBnwChFd1z/hxSKhrTIgON+BDRj+fa9IFLyHTIzr+I103PWQ513JPng0fInFc++hVceCLlPt/yrz/3czw4f8DSO0Q/evyEaV2QTSPKhx4uVTmI3cQ7Tj98/Gm6PidUR+SND8C6nKYqqT32N00ymrYg2F6OaoXTWUrXCoToR++Np9sVZZmzXHgPoN4iSUl0yiJ3xy7Kkl5IZ+Lr4YNVnSMjxezQLQQXlyVNV5JMFVPv1XJ1eU00kUxnC6Q2/hlfMV+kXJ67l+zq6Y5En/Bzn3yXxEPG7tw9ZDaPnbHbxE2OOIw4nE/ZeGWZuspJpkvqcsurb7gXWArLLJuSTRc8eeYWspOjU6Kopy4Gzo7iKD1guZhxvXFzQ0Zu8z+9e5e1ny9939GYdiTdX+wbFvMTmrDn3Ydus3z11Vc5f3ZOEiZI6167+SyjrHNU5KGmXURrNWEGx9IFNqdHd9A41/snnvdViSvSxQlF7V78d3/pn7C+7jg4PuL9By55dGR9S5olpJ67tFgsaE1LL939vVg/45Of/HmSZDomDUEQE8UZSgakM3cOq92V83bxIOM4DinaPdtnVyP+P4oiBM7UMIw9edwCwqJG1TLLfDmlqXOsX9yFSjzB1h0nyzKvHNaPMMSuaYmiBK3lCH+x1hGubwIGw2QyQSocFBFovBfKtt4yPZj6zzk/jqZ0f9uvN8xmM+Iwo9hu/LEN21VLHGXkiX9vryKHJfeO7cLOULajys+ZJt7/x2qSSUzf1SwzN8+Wi4zWdMQDH2AyYZpECLXB+uewbd7nwdOn3Lr38gilmSYHFOXTkWe3ODjCEnK1PuPw2D3Pxw9XRGbCNIoIPF480ZI3Xr3Dzq+BJ/dfYdfsuH94yL1jt3l8H/+UL3nrhKPDGeeXbk6d3Dqk2yUon9gUleUwS7n3ZW+ivBhIZRqKRrFaS1TsiikfWSa8++DRyIHIZilZdpd8X9D6c8/imN22RlhLNnNBfBhHpBM1Kt4FPpg5mEzJfRFomh1Q1wGL4yW73G3GYRhyeXnJdO7VQs0lYaoJwwqZeChNn9J3gYNnt24N2pQNMggIPcdrPr9Nsa8JdQReCdR0jtg8eNMFOkXIjiReIHCfiRNFsc/pbYXtB3+cGIGkM26dquqaNJnS1yUb74UDGq0zeh2iEvceres9q9WawAdmD36+QoqO115/hUniAqJXXvkIH3vry3ntpa9Apu45HN865Hv+0l/kb//l7wHgv/2T38WD9S8hVYD23ky3JhnX2/d5fpTtJZebKx4/cdfysbde4fjoHsvlu/wX//l3AvDTP/Oj/MzP/jjvvPsZbh27ZxXH0HYC27l5J4XgIx+9zduffYISbt2fTyeEseWzn7qmzr0SYCfJkgjt98JNviYMY+jFSLJfr7fO41HrMelM0wzT9wR64MeAkAFNW47B5PHRbZqq5vTkNj/yY3/XX2HBO597iuhfAuDu/QChd67w5426P/Vz7/HFX3EHSUkUDF6U/v54WNCJWGJExXI2BZ753zvwkLWI3K+VSjnIfOyLbGEYgjDEkRzXRR0GpPGEIAhGXo3OQmwvxqRFC0kUhjeqlcBidogxLb0dvKkMUqcoGYzeaQqNMV6NVQymuk7AY9gfoScOXaFwiEHc8bxBrOdBd8oQBeFY5KrrGh0lZIkaubd1U6JQBFrdiHMELiZqm4EjOKPx/Ksh8AZBHKcIYWn856qyoezK5/xANUVRkSQTav+Z3bZEKElVt2OhXSnFM9mMnFaQLuFtLGbwqxJbTNcjZEc78ul6LIZNNey1OYdHM3bbx84bDDB9TWsMtB1p6mGPGnohaPxelEaK+XyKtRlXV94Y3kLbtIRhiPbBf2cDyjInmc78OQk2ReHOy8Pd6qZEK2eOvTlzx3p6fk1rujExDYKAKApQMuHt9879ORmkcMqGU895TuKM6/UzFpnb59quo22cce9sepNghrHzLQsGAavaEgYxn/xFlygK1dB1LbaXaO2+d3b+kK7fEfikfjavKLYd9a0FUXQjuHJ86xaB1OM8v3vf8lM/9U9YTF0RqGkVoi9pqy114ROgouLiYkWYKDb7X/BTNkKHavRzm6SnGLtHqprrtUtodvkEvQpRgUUJt2dmkwhrFQNTYT6PqaOO5cGU1vh9Ry45vFWhA3j2xM2pg+VdEPXIx++aI+JMomXGZu3W7zfeeIPPfuZfjhBJANFN+XeNL4jkSigxJiNhuODwcEk6uTFE3O/3NE3L5DCj9OTY690FrdmOaogXj59wtX2GFQrlCfTWWmrTgWjHxe6Tn/s0YXBj8rutKtLDjLcfPqDzJHsZRBjbYzpFErmApGn2hKF+zlDPQB8gxc0t7FqnxGKtHTcPpRQIMTrSu78pkkDw7KELbB49vGC7v8DYaMzMt1tDqF9i7rs9tn/K4rU5TbunqFwWvlxkIFpWF+d0XogijhOiHtbeSDWbTVFqwX5rCJIBm1ojrcL47xR9w2J+QFntMV4J5fzZhsXhLbbb1ag2lyZLZCi43q/8PWjRasJyfsrqzPGd+t4wm82RCHZbl7SU1YaDZcZTd7nsdjt2+ZbuWUleuOAqDudE4ctM04Sd/79koYn0hNfuu+p9EMImv+b0dELpyxWPHjzm9p0jptMJ50/d4vPppx9wcDgh9QlmVfZ04opybzhaeLNqoTFFRVsXvHLqkoiz80tEEzP1/LFnZ4/JZhPqsiDyCo0qjolS2K02dJW7f0kacrpcemlcqKqC8/NHiEnOR994BXCiZXdPb4OSrLfu+TWqolA3G0yYpjx5ckaUwEsvuwVpV/Rs13tMX3HduWTqWL7EOw/OsdJd32pzxid++mcRQtwYYtoKSNnnG46OvAS3DehaQerlcIMg4PDwENPpkTNnbU9dF7SdZZ+v/bNJ6BrjBU6cKpPWmkk6ZeoNptvW0DQtaTJH4N7RW7ePUYGg8fNstXLnH6fJiKHvjXaVXjmYdYc0VUXTdB9S8FHK0ttm3HQA+rYbjxPHU66urgiiGO2DskEkQ6GwgyRvFNGLgMZX4ZdH9+n7jqItiLJBvGZB0zSue7t286wsDKavSb3c9iStaduaJA3ZbN3ENsYwX2QcLDLS9OZ+pmlKEruF+NHDpzytFZNZyGAmGU1fZ7W7ZjY7xvj15SrPuXX6JlLX43GSKOFOd4JS7h1dLCdMJye8/+AZ92+5ZOfOvS/i7PwJu8qrd3UBn/rsY8xBj/CG3X/iv/xNTG6nQMabd1xiqOuSw9t32XqV0Xq3petqDpYzrJ9T0fwuV/srFr/6I8hBln/fcThLqWsv9NFsePmV1/nMpx6yXj/x9yrm6DADqUb54jSb8O5713zJF78BwGazcWuSEbzxxhcB8Pjx5+ibHQioa/fOnNx6nTTRBH7j3xctZVljjCG/9onFrduUdkOgG9ZeYjyMDaJJWV26wHg6yZikS+pmQ+c7rQhLVeZkPkDZ7q5Ruqeu61Gspqt6wliy3eTje7tYJpRljfWB6vUq5+BwgY0j/PSkKSuK+pJm59YcAGsMb7z+UT720kcAuHf/yzi5e5t/9BP/hFD+AvTAAAAgAElEQVS75/k7/oNvYFtd82S3pfNV8K0p+Jt/72+gfbf5V3zxR3n4+BHWSEzn3tmqFDy7fIfnR9fDfHFENyimtTU/8o9/kB/70Z/ga77mNwPw8Y9/Pb/m13wDb7/7k/z0z/4jAN7+3AMuz8+Zei6FRHF4K+T993tKTwMp9oZ7ry74pZ87J1IuoFxkCb1pbpQkZ5peSKYiwPReoTF2ycH/w96bxeqapfddv/XO0zfuee9z9hn6VHfX0LN7djodTwkYO4SYdBLbErnhikQxmIgghdwgJISEkAi5QEAcLgBFAgd5IhgUG2OcHuweXHOdU3WmPe9vfOdpvVys9b27DU4kkC/64qybqtr1Te+an+f5D0Vd4WoVWtt0SPKERovOtJWkbWuC0Kcq1Aza393jydNHXF1d8fob3wZgujXmz/ypf5NvffefAnAd/yGyUJzU1z6uftPy6gmnH5R88au3uTr/LgB5rM6XPFHjMh0/IM2vMcwbKfa2gtAbI+mYatXNpqkIAkX4B5V0UzzUtg+cqqJECIOqLHFsLdDTNdi2c1O18ZRtQJosb8aqVdLihlbcrRtlRNy2ab8vDocOphmRFzHjkZov63WC73t95appK/1bjO8zGpaYlqktYnTySAgc2+6DCEfYeF5AWzdYOqnmhEOyLFFc7e4m8O1aE9/Tth1NC7gYhnFTwesqbNvtLUhAGQubptnLvEsJw6Had237BuWTZDFZnvbICse1qdtwI3mB4zhkRYlpCiodgNW1qgQq2rn+7b4NtFxdqTuJYQjqVtDUHaG7ES7pKPMcyzSI11o8yuswDAgDdc6ldUPdKbGOwUTfDUuJ4dQq+c5GxdBE4FKXeq1VHablafVGrZSJibpaSVxzY/nTYAsHIW7QCSr4L2l0oB0EPk3TYdkWiU4ILBcrgtAlz3XlERBWCXbOKlbfN4xGLFbXCMvE1rz2tm3J6+qm0mkHdB00MsdxteiFlLS1wcVa7aWjpMW1bN5/8rZKTgF+YHB+GeN5HsORSoAIkfLyJ14l9LQdxUoFy0d7+xSZRsosM9zQpKgTLFeNVRLXCAuKQp8xVYntSqqsQuc1sNyaulJWMjtT1S/zRaz61NBqof6Q9XpFGIaUWhwnrSuObu9SVmtsTz3ffJXQNoJIB6GWU1OVNq1Rsr+vklznZ5fs7R5jmQ6J3idW85tkxR/XXnCuXrQX7UV70V60F+1Fe9FetBftRXvR/gTaD0Tlqm0bnl2+B6jIuW0sZNXhazhT02aUVcI8nmPZKjMQhj5N4tKUGw5Uyyz+ANmVONoLI3DHZOtnKkOi+UVCWJSVT6erW3mbkqQzyrrj7FwrpgmJJSwcQ9AWGxigRdvccKdkayBagRA3ajpdJxDaP6XT3IKu69gIfYLyjxDCYBAazK5UyfHk6Rmj7Yy2FqQ68yisknUKy3jjnwCB7dPIlvFEw5Ck5of4U+p6ofvPxTRtZKNe01GTpQsMw0UrcmOFNVlRcHCooDWr1YrT6znDSdRXe5xaUCQZkTui1pkW2aRa7nRjomZhAGfX7/YZDMt0WSxLDASOrgR6jk+yzCm1ul1dpfiux2y+5vBQYX2NziWNU3a2h0hdDVysZsSrGXtbCgvvetAhmEz3yE4VHtgPwbI7iixmqqWDw2HIcBQxm6sKm6hzqmZFNBiR6Mx8U3TYlkHoO2QaSyzrjK17R1S6IrW1GyKlZL26YjJW1Z9svkRKELbFcKyy2aPBkOnkgPmVqrjt7x/RdQFVVdHoeWDZHrITWK6Dr9Xs8iJmOB6z0DwCo0txfUlZNZxoj666rrGtCD8aEoUbb6g5VZUwHqt5vsgLfuTHf0gp5+jK7nIRU5Q1HZN+fuZZSzAd9Ko4q1Wqs0OSSht/+oHN1dUVtuUSRerzLy5jhsMh44EaqyRJyPIa0ZZYGqecJgVIwXgQ0jSbz2+QsqXS/BHLUPCYvEh7f46OWkMKVVsvEwzDwDQVlwVU9k4IgeeGDIdm/zdD3HgbdV3H4eEtXNPAsG+kkQFu7x18X4bWoJU1w4nKtAohMIVF00Rs1mia5AwGI4bD8Q2kIi45OXveK1d6nkdVmRzs79zAeWQOUmB0AldbODiWUDDdQM39l259mM6ReL6FZWgZdH+LIPCwtDeJmosN6/WSttMQBBzyogIkQmeSD3YFVVUwGK65PlH75wdnzxGWyeiWVqAMBvzpo88Tz865vlB99cnPvMYHT57QFga7t1XVJJ3HrPOWgz211trpjLZuSNI1ta4SDw2HdZHgOg3xtYJnDKIJW7tjBr7KnG9tD4mThsEDH2+kKlBxXNKZDabl9aaeXSe4dzgl0NyJq6tLDAOm+wdklRrj8QBcsyHNMj72qrJjaBpYrJ4SBDpD3L2CMDokBk8+UJnV8dQhy10Gg12ePld/M+xLilRwrH3nhFmAtEBss9Scx8Od26RZS5qq9X9wdMzt2/c4eX7K5UxlvI8O71CkhTJ5NTdnQYfn20S68nn3/jaPPzhjMTvvx7MqGu7d/giv/dBXCT21b3zhh77C7YOXWGgecVG3nFw85j/+T/8eP/Hjfw6An/yZf5XVrMAJPcaGWmt5kpEsz/ja1/6a6pO2Jk2V7PuGm5ZcPes9fTYtzypMt0VqBTVMn6zo+PBHP86DV1Sl/OGT98jSNWEw4ce++m8AsJ79GvfvlpxeqErYO+88JPAjwtCj1hCgJFZQpTCAeKn2M9e2SdKkX4dh6NJUJbKrcF21/gaujxQQtFWPQqnrCnvo9Dw3xzGxbIM8qbhzS0H+Do/2efzBO1ycnZGu1Vz8b//r/4nd7dd47+/+NgD3X3nA22+9y2hIX+35zOc+xK/88nf56Mc+y/0Haj97/Zvqe8pc9ct6XtCQ0pDddJ5sWK1nDAdjlgtVqXZcl7ope/SKY/tcX52yv7fX/3bTVj5sSZLQuRsuliRNbrx1TNNWvkx11X9dWytPrQ3sWRgd0SDQ/n/6s00T01BVWKTuYz+gbbse9mVZAyzLoutED73e8P481+3pEhYGXSux9B7o+h55mtJ1Xe8VJYQgiHyKoqCs9fofDKiaira92XObWiornQ0SyJDaLFp8nyx4R12XPbqgk6bi6zgWpqYT2LaJYYaA3z9zniWIzumroelihet5tHWDq7lavj9Q5rlWi6crakpeu8W3bwyRy1WCaXjEGr7oui6iszDFDceyKTuE6Ej0mZZlNfE67/1M1fuU6mErJY6rPTLrQvtKaVqJ6+F6Fuv1ulfh9XwXUyh+2wbqJMRG2VFXcS2f5XKN43YEnoYTForDVjUSQ0Oag9CmriSlRom4fkjbZICJ76lKUl03ICSuG/SaAMiGosr6qmpWZTiOR9XkfRXTcyd0Xcl4MtTzriUrCqUQqO90WVVTi4Rn58+xNTfcxKdpS4ZDdQ+bL67xXYs7d26zWMz0a0KapqLt6KGmwmlIlilNuVECrvDLkLKEpV7rt46H5OU1htzl5FTfmY0KKSWOrSrLyVpSNxUGdT/voijn8cOneK6Pqb+vTmJMq2NxsdJzw6LtBnTEXHknqptaEyFNHDtke1vt3/v76o79z2s/EMGVMAShhr8ZhoWQBk0jqQt16Z1MR9SlieWOeiOzNKsJorCX9syqCtfawjIDVms1cEUjoWtZxUsCf+PHU9PUAtO64TYZZquCj0INkmWpkrZhqUULMBxvkWUJjcYDm51FJ0wMYfalYMtSJsgb6CAo0YIO0RO5m1oqQqlt90HEN775kJ/52Zc4e3YjjiFkS1o+wTVuHLfj9RLD8ChstRmkcYLsKgyzxbI07GG5oq4L9nbUZaQqOra3jkiSNWjjTZMRNQlnz1U/+a7H1mTAer2m0tCBpmlp6oQsl71vQZZLHMeh7B2wA9JkqWBe9rB/3vUypm4ygo1ogOtSFgWOqw6rwBxTNgkPHjxAthsJ9w8wjY7LSxtL8yL2do5xrBnb+xqOYhjMrlPyMmFnRy2W2XXK7PKaeJ3feLwc3uIsWZLmurTvuHgccX1xzkhDW4bjAMdycEyPUEO2dia3qcuUzbKI3CGrOObo4D7Xmr9VScULGoT0sIciiZlXFmmhYazFmgaBP5pQVBoCVBbUjSIo1xqS5nke85Oz3ngvt0wcS5kdbrw3ptMR63VFWS1JKrVJVd0O773/CE9zN9rSx46XXFxcMBwOdb9UNG1GGHo9iT8aR6TxFWiombBzPM9mGS97L5qmirBFyO7WPgstuOI5LXRLnj59DMD29ja+b9C2SxoNVTStluFgSts97423RRdguRa25hZFkYVtCRzXxtAbW57n5HlJ6KtAw7Ud6rplOp0qnhf0ghwbHy01FxM8z+1lX7tW4rqOEgTRsKtYXxwenz7vbQeaSsE3bD3GtqNMKmVr9eRu2xckcYxjRxT5Jkli4Lo+ldbWTdM5dA6OmSEMTUYOd9nZ3WJ3a6xMM4G0SCnrglIHWzgukTugqdZY+tC7OE9x3Jy27fqD0MBgEoU4loKjNB2k/oqyznooTVW32I5gOjxmZ0vDgso1jYyIQu1ltpSEnWDr/kcZDfQzuyavvvIqlxdvMdeHVdyWLK5moD3Qms5hFLkYgYutL4VXy4dMxiG27bId6SDTt0GYrHXm5o3XH2K7IXVRUsRq3LenE1JZ05UFu1OV0PE9j6oqeH6i5tTt6ZC2rTk7e59gpGBat7YjdoIhed3Q9CalLuvMYlEoLoxthISRDzS4gfZACgLKfAfHCBlp2KrrbxO6e1S1mhuOY0I3IcnXnF1qeWYnYhK5nFyosQu8AWlSkWUFu1sqWL08m3F8POHW0UdZLtW8PDjcYblcEvhq7KJgxM74Frenu9y+rQyYt7fuc3K+4O//l/8VP/UXlGGUs3XE77z+HTx9aTq+dZeH73yHTkp+5mt/Wb3GH1M3z/AMi/GuCga+9a3fwAk7PvTgZQCWywxvEFIVJb6tDv6Ls9/ncsMJ0a2jJUtiBloI5/zyjPfeec7nPvtlJmOdOMkqhiOX9Vzw6KEKOr/9vd9gb+82f/6n/ioAL3/4OY8ev4vz7JSTZ78PKBjy+YnH4dFeD//thInvOZgaVloWNejr+8bsdHZ5RiOlErnRvidFmeHbfs/1yZOKaBDw/OmcH/vKXwCgrVroBG+99RY/8xf/CgCf/+xX+Ot/46/z7Mkj/SwNLtvYOAip5ZMHFV/88of5xu8+5GOfug/AOlZGwU2rg5vO4fjehKvZxrYDhNFgCklerJXsNQqm7thGv3eahsHO9rbi4nVG/zchhIZKqWfe2d3i/PxU8c+Apmjx/QgzMoEzPT8dQOK6mzXr0TTq7rD57CpvCaNaeelpyJ3n2nSd7M8mPxhQlhXb063eUDvLMqLA5fpqxkSfF3Ec4zgOo5GaP6vVCjsKFJ944/XTSeqqwHEsXE/tQXWtxCtufJIcLMPE89zeJLkoJCAVtFufdaK+SVSrJvF9l7ouGWrZ86IsEaJDIAj0nBUtOI7Rn7Wua+LYAssPqHXS0jDAcwN8z6FuNl5JQNcidBBhmSaBIyjLmkYbzJoIJTIjG7JCG5I7Ho7j9tBI0wtp2wbpuP38zNICx3PBlEjNF22aCtc3MPR/t11DUbWYFv2zVFVB3dRUZc1Ae452wqRpKvR2R16khKEPXd0LfRgSDT31sHVSu5UrXNdDe5ZT5DmOF5GnGULv6Z3oGA7GtHSs1zccYDoLQ0uOW0joKgxsfC1I1LQVnmdRVzpw7DpkJ0gSaNHiVW2NVQ1pGwtT+74m6QLTtEjON9QPlzgW/OEbz6k0xYBujmUIDGH2HC+sUhuLZ3qMXeazWEnia8j2/HJOZ1WYVg5iQxUwANkXYJ6fPWcYRso0W8MCk1iZYpvWiixTCbO6khgi7PdvYWbU7QVRuI3ZB72CQWhAV3Gm6Sda4+yf234ggqu2Ae3fi0mNY1s0TUOgsw6rearMReuCSB+Wi9U1z2ZPabSLc9s5rJIFpr0g0INk2xWW8BEI5qnaILbGx1hW12dsKrdmvbrm2jSYL3VgYzgIDKLBkKtMHVDHtz/E5eUZK82XaatWO6SblJsLNJ1SHenEjbt8p4LHG+Km8o4AGI7Uhv/uG6fUzQFlfcVooPwO6naIJKHVz9eZBVbQEa+vqVZq8h7sH7NcXTEc2GQajOq4irCYl2rirNc5y6VL3Uo6oSb5eHCXpjWwdYSPYRGnCY7r9eRewxFg+BTlishRk84zh5gYOBqr2rYNVVyzOxxCtzlAC0bTLerK7XH1qzRjPN6iLtXvTqsZA3fK++8ucLR5pbA9trcOieOYJFEXJ9HtIyVcX6nsQZJklHmF6FrGQ3UIVHnBMJry4JMf4eLssZpDZsVwGPUGpUW5pKo67t05Igwmehwa0mxF2Sb42qW+qgrOr04ZaCNA2/eZhvukWUWl2SJu5GJ4kgZB22dkBzx9/oRCV7y80MHyAtK5cl8HMOyO4SigrBoMfYkvO4EZ+QSahD4c7HB1FeNjYekLV9NYuK7JydljCs2Hk8z58Ce83iPMkAOypGX/zhRXV20Fe7zx+jc4Pj5gPlMX6OlkmzffvGRbZ6BWS0Ee5+ztbNNqoZYgtEmTNW17AVopZzIYMplMsI/VxXgwGDBbzPs5DfDs6TlBqFSqTH0hQLZczc6pGy00cPGMNKuIoqAPZNrGosir/sAJgzFt0/H8edJ//sZTKwzDXlBGypaqTHqVrOPjY46PD2nLklwHQOfnaswO97f6QzbLcsqypNGqeF3lAhZYDYazEcdQKlbCKCjLGxXDqhZ9JrKjoiprzooa21LfN3euOJ35uJYisYMSuamrrp8HddVSyoZGrqjzjdv8CMtqsRwLobdk17MQpkmtv28QhAzCiL3tPUaRmhvjsYOUFU2ZInSmc29wQNcMyFtVsdkaB7S1Q2embN/VCm2WTye3uXXvM31VyjZKmlczYq1ql5cZoefRlS2l9tUJ7IZOCrIs4WqhLrB+NsT3ptTaFHb34Dam1eJ4HZahgqTvfu/bHG3tc3x8jKkvYYIGacPOR1SAYNmOUs88O6Fz9SWpXGOlLluDCOlozHyc47cDho7q31VyxcAMkMLmyFFzKFsKXBqq+l32dZXIsUNEXfHKyyrYOT+NefjBOYMwxdc+c51n8ujxO/g6KbRcXLJ88hbhIGStzRN3drb54P2HvPLKK8znilN2dXnKdLjLzh1V9dsbf5hXvvAZlklGmqj+zdohv/ab/4hf/fVf5t/6hb8BQFOV3Lt7m6xT67NrC9L1E37ur/4w944V1j9Zzjja2abJoNTJokfvv8GXvvwxDg9UtUlYAkNK7IDeoDiN10wn28AN8iHyfVIpiUJVAXvznXdAlnzko/d7ZdWuAwOP3T2HWiteHt3yGEZHXGn10CQR3Dq6x93jV/jkx/4UAG+9+S1mi1OKTGCbY/08kKWSsQ6WJ1OfwWDE9dUFueYNe54PGNRt1V/iHcOjriWlrkrndcN6EZMuJZ/7rDLj/s3f/E2SNMa2XH7x3/7bAPzSP/iH/M+/+g956cMqaKoTg6xaYhtT0ACU7a1bPPjoU/7wO085eaYSZqPRkIQ50UCtodn1mgevbDMe3wRXg2iMZZYkecaOVm0ESZ5n2DoRphQNR4qLbG04M4KyLAhsFyk2Kq0Zh/s7fRU3oVCJSo0oAAhCQdsavY+nKGyCIMK2TPT0ZGs6Jk6WFHneJ48mkzF1k2M76mwqyxQDmzJPet657yrvH0NIOi3mYBqCssrIU7W3uLZFmsWal6vWUF6pS28jWwzNuWrsBtezb/itpkmWpZRl2asjjkYjyipFdg3mhgNlR1gSFSkAmBIhwQrc/u7U0dI2Da7jE2shA6TAGFiYG29RWWP7NlmWMtTndtd1lFVBXnU38xpJUXc49s2VVwhJNPJotCptmlTYtk8Y+b3f1zAcKU9Qd4OYALAwRNd7UQUDF8NWCKYNF9wPLdq27o3GTdOlbUsMS/ZJw7KpcR0H1wnQw0AjW/zgxhC5kwIMC8NoKVItHhEKpLRo6o5Oai+qPCVwWzItFCGsFGEpnmKpzzrHjOg6i65L8HUy3DVtXMvu54YpPDphEo4cVmu1/m3Loy4tOq2cLQwD15OUVUzTaiN1y6cpKwI36rUSLLPGdXwKLSIipUBgY1oNjqOSgYZoMDGp6xyxUe8zPGTb0GnudrIEy64JQxNzk4ApwLfGSsm01Txof0rVxAjj5i7TmWDZ8sbAu6hwPAPDMHAatU/t7U2om5RSK5YbwsfutC8Wak3awqEjIEu7fr6M/sV6Fj8YwVWaF3zruw8BZQ7aSAM/sJlMVfBhOa2SPc9synIDAzJZLbMejuY4BrKW6rJWbVRcSsbDkKKoekL5+0/eoTMkVbvByNnIOsNji/W1Wui+XWEZNvFq3VfUHr33JmVZs6PNiLOs6GXYh4ObzUdmKUKK3sjUtl26rusnrxAGpm0hhKTTsINVYnBxFuFPatJKO5hjkecNhqUOdVO6dHVNONzl4lQdmO8/fsTB/m2SzCDTMCvPF6RpzoY7ejVbMIjG5GVFoM1dL5evU7XX3L+jvM+arObd917ncP8jrHWFZu/IwDIcLMfuFXdkm1IWyz6wyTMQJrRtR6ZlM1tR4Qob24nwdbZeGNcUNchOE06bgs6NsSYOa33IyrJlEa853L8PvurzyigZDCJMNgo/DbcPFOTC0QH01nQXz7HJVhXbQwUZ8aKO2eKa6fhIva+7Q1HNCFyX5UoFna5pMPAUcTW+Vr+ha2FrNOJaK0kGqUljV3g2+HpzdUyoipr19ZJQq+klMiMIBxzduQuowD8MQ2RbE2+krYdT4jgmS2v84AZGkueSVJPC22aubAnaJZdX2nQam7pu6cwGDDXuFkMuL9NeuMG2BJ1RE7DNKlXP53sNn/viV5gtnhE1aqPwA4NbR9vKgA9Vad25nTEeDih1Rv/g4AApx1xeXnP8kg4QLBvT8BDaoNC1bLa9LaaTIXmm1szO9h5lvSRLa+KVrsy5EeGgw9QH/ac+8RneevM95vM5y0Wq5wJUzYq80omUeKNQ5iE1ydZ1Q7qupWxcTKEO0GSdcOt4mzhVffKdN57w9ju3iAY2Zb5JuOh+bUt29QU6cxxmsyXO2O+/JysLyqrF05krx3EwaKBrKDTcRnaCus57UnhRttiOxf7utM86GrQkSYJtWgixgeVYNE1Blqt1HMcxnbCxHROd+KSoVliNhSMdLEdnOlsBlUkU6cqcrJTiVJJwvdAy76cWw8EEy2xp203Ql9DIs75aiVFhmS6N7HC9jaJYyWpxRZq+raAiqADWMG8Uxcoip6hK8qxAaMNJw7RxPBsvGuBPPq36sxzwxruPiHWVyvfWCKMh8gPOzr4DwOX5jMXsDxlPfUqdgBgOxjjCZaKJ+E2VMJn4HOzskuuq1CA4IHFz9vwJ68XmfRNu3RmS60UTDXzOrs6J44yDbXXpDWjphE1R71Mkqjpf1x5B6PDeIzVfbNdhemAjGVAVWg319Dk7oz3eP73Sn+0SJwaB0zEM1EEc+CHNyCIc2vzoV/5lAD7+8lf4g+99wD/59d8E4Gtf+zLvXjxFNh2mFp0ZGQV/8Pu/zWc/c5/7d1VA+ejx2wS2wLfV/LlelTT2JV/48suYthLeKdoUx2zofJtEQzGdacZf+Ym/hC109beWuE5E1axYXqm9zAgbfv7n/jX+L95k04Rn4hpWbzRKvuZnf+7HeeW1Y2YLVaUaRxF1JTH9lscnKkDfGb7GJz//xb7iZblKqGGZxpxfqErLszfe5av/yo+yM91itXpHzeE85/TknEstu/zu05iiaumsDqFVBk2rwQ2HmLaJY+l9ypXQudRavGbHdVitZ3zqY6/i6Ivpt775T3n1lU/y5S99gUcfvA3A3/rbf5P794/ItNVFnlUYhsWjy/fYO1Tjd3F9ws72gON7IbPLzSVQnVN7h1od7STn5EmGZd9IrGfrlMCbsHe0368/Q7g4GJiaiG8YNrKtMYwOtCLscDBiWV8icHAN9cxbU4fBYMD1TPXdcOTTNg6jUQh6vO7d/jC1XPD4AwV/39qOtD1HxCZR08oMixajy7G0imlblwhaTF3d9r1awQCzmi1d3a7ygi99/kucnLzPw4fq3vVDn/40T58+5fRczX3LsoiiiA6QG2NcJGmuKuedTpxOJiNtYqwDUSPDMD3qpgBTnSlpFjOIpjSN7KGfltcBJpmG6SurAgUF7WXXDYM4ycitqq+oFUVBvKx7lIrrBGTrjOEkYLXSggvDHWhCDPtGBMIwDALXpMi1rYsV0jYGthtgWLpqM8px/I5Otkw09aKtOqo6pdDKwOPhCNlZNF11Mz86i6rosCyboa/PhwpaaZFman8dDB26xsAwJFWl9iSnm+K4gixf9LB4A58ya3orBlCCOm5g99VCJEqR0qgIfTWvTcOlo2R7T63RQXRIsr7CtYPelqMoYxxniBATsk7tpx0NVdHi6QS6Gyjxk7JqcLWgS1YUSCl7EQwLA6SNYwz6eVYXJUXZUNWyPyOFcKjbEsPazNca21Z39Kbd3IcFTZvTUfWJUsfpMAwL29D3R1tgGJ5SQOypAjZFXmLZvoaOQppcY1o396my6PAdk84MkHoviUIXKSFwLCKdVDdkQ+hMsDZojyRBmA55nuG5KjEkJZR5jG272K6mZ8gbGO8f114IWrxoL9qL9qK9aC/ai/aivWgv2ov2ov0JtB+IylXbtiTrTP97Rydq6hJWGs7keRGWZZDmMbazifElVSXptPlhsi4YhJ42ptOchKZlmS1pmoZVpiL1shSE4YBSl3Zkk2CZLZ01oK41gc/xqWtlGLiRqJQSonDQZ0w8zyHN1gSBh9QlZM8R2CJQpoGd5i7VDRg3kqdSNoqH1QlGI51Nu8z4x7/8O/z4T36KdaIyHVk2I8tr7t5XGcwkKcmLhGHksow116fpePjkCaOxjyVUdkKaLlI4WBqqcHg8ZWCuCOEAACAASURBVL2eU2Vruo0yahGxf+uI+XwjS2rg+A2r+E0cDUezzVvs7OxzNTtnrfV2jU4RZZexGqvFMuOll15iNjtT/hBAEI2ZXT3HNLw++zIe71LLtseBT7Z2qJuMZbLo/WOgoaoWnF48ZqRNfC/m58SZy3SsTS87g3XeKuKrrT57Ek1Ji5TOqik0qXZ5KRiOdnpJdzqb7eke8/mMWMM6ZTDE6LYJgy3aVs2zMHKocsl2pDIhWyPJdbzi1tZdLlKV0avKlrJIcbYHWJqM//TpJdFowGMtsrFczRmNlW9JU6lxXywWSlaVpv9dju2BaHri6Py8xLQrTMPFdHUFIU2YbO3x+Olb3L+nxAeKMkPKhjrTpPDhAZKEJEnxXQUrXcxPWM9fZzod4bhacj81OTjcoyjV+6bjA7JiwcnZO2xpTkma1jS1ZHtrhyRVfXV1uaKpUy4vVaZ1f/sOphHw/OmCYajL+6agbhuW84I41gavocSLPBwN1/rd//N7fOy1T4F4n+FYzYX1KqEoxjcQw87E932y4rKXdB2PXBaLGecnJz0WXUqDb37r7V6K3fUEk+0EN4+4vlLjKdH8s+UZy5X6Tbu7H+Lo3i6DYLPOUkadT1ZWZNr7arlaYlkOnuPBxiyzLhTfsNJ+TtGANI45Pz/nSgvTWJaFYRhsT8b4WsAiyzKqqsDQ63GyMyZeVVgCBlrSOU1Trq+uaKXsvTZs28SyHORZo3/3PstlRVFUVBqGXNcpQeDhe2NgI6qjZKArzUXN85K8VFy/8UCtoyRJkG2NaZoUGoq0tT1ia2tCFGnT8s6lTROKeNnveVlakWQxpgWDoc5iupKDvfs4Uksxty2yLZFVzPGhyjYfH2yDcYc8azF01SReJSoTK1V/CsMiCEJqw2E4UdC9waghW8MiWZDqylhdLXj27AQptTmna3N2dYnowNMViO3xBMfxuT05ZDFT6wHLpqbi0WNlGbFaZnzstZdZL84Z6ApwfjhFVoJX76hs5fHxHb7jf4PtwzFS84b29x+wO/5ZBsPbZGsN7yla/vP/7L/geqk++xf//X+P06sVltdga17N9eWCl16a8OUf/tofMZnFstG8f2ZXT+nagjA46NdDVzfUjYE7CJidKN8qz2x599EJn3z1C2r+VAltYxA4w36er+bPmAUR399kbeHaTg+/G4wtDu8eYlsRrqUy9VV1TdsK2iJka1tV1L7w+T/D5SxAap8rLxhAZzAQBoNQVQJ+7q/9WX7/zWeY2y9x/HEl6/6FTz/geD8A/X2nH7xHvpbMVtdcnKtKwPsnZ1zNM5bX55Sa51JJk7xoMDr1vqvcomwlX/rRH+F/++3fU89chEy3D1llC/6dX/wPALhz/wDT9pHNxkC3RMqWo6MHDMfqWYoiZbF4wnQ6ZTnT54OGph3fVZXP9995l7b1yMsbzlroToCWul4QBur1BhazWYyhKztppeD/wjB6HvZyuaYzBI7vYGtI7LMPFrj+TPlvocROluszwuAGZ/T82WNeffVVHKUJQ1XVyPoWbdOxvaPet1pfM965jWVZrLVMf1XmPQ8dAGHjhUPsqKastcCTcEjTlGjkcPe+QntMxlus1tfsHShoa1FVzOdzyqLD0Ea8NiZVW+J4BoE2QG/rCtNw6DZFnGaA49QMw238SAu1FBmGEJim2UPibNthe3tXVfmA56ePWC0bRqNRv6dXdYHj2diOhWlu4PUWSAWzAzCEzXpVIDq/pzkkWYohTNruxqPL0F5qbc97EFiOSVbEBFqQqGugzjq6ruk91lzHZjKa9jz6lhLLaJGt3cMCDbPCMBXCp2400sgc4Ngtln6+plqzXtUcHByQa/iZZdtqvftDTF27si1FzbAszb01BWma0kn6Sq5AaJibZKltQAxLKPSWPq8uLy/xbIdlPqfTXDtXV6eapsDz1H5W1x1BIHousVEbyhXFKPE1D9p0XFzXpa422gKWhr5avWCP6dqYVonrulTVhlMe0FR1719nGxaWaeF5giq7MbZ2XAfTcvrKVY/00pUlz7eJ41RbCmjqjpC0bYvr+cSxFnNzLCzL7sVIDJFjOS2uayGlmj9ZIkHUjKIJjUj02AAk1NoT0TDAcxxC3+rhtmWlKreONehpQKH/R/fX/2f7gQiuLNPGddUEr6oK2fhIqbylAGQTU1UthmUQLzfBjUfX3pDdBwOL9fKSPKtZLDdO0mNs28FxB8w1XMINHWJ5gTA0KbXboylzUlqmkbpgrlcNW6OIxWJFpCdYa7fKQKy4MXdzLIflfM1mwF3XJs9zhsMxhqGNTKsUU5gITd70tGqJEAalVokaBBHnJwW//iv/jO0ddRloyTi/eM7TZ/p3ux5VnSLbCwLNV9ndnZJlMc/XC0y9mY/GLYYNUpuKBlFHmrbkSUjnbnggBlcXLYuFmiTHx8c8+NAXubh6m0Dz7psqIM8Frj1lsVKXBj9wMc2QWJMhs6Lm7YePGQdjskodVMt8RtMov4NCm9ymbctke4jUQhjz9XNl5tpaWJpIaZgSb+iyjk8ITAW9rEiwJFwuFJQnDH2eXF9iGg5Oo8mHzhLZuARBQCXUxeJqtmRVj3rfBKiQ6wTbj2g0oXVRJcwK5b8xGIT6d1XUbUeaKYLzuttBmIJ3rt4k1mROhM11fg1Jyd7uXTWmkUmSzilrddEYDA3ilaStbNBBYOQZXM6usMwQT0MHZosFiBpTbyJ+NFHeKd4BC61gFg2m5JnEDwdUGmpapQ62ETHaVofsdBpxcv6cwUTiaPUgISJC54ir2SOyTF1SxgOXrnUYaYPbZ2ffwrIijvZfxrB1sJHWrOYVRVawWCs4z90Hd1gtajz74wAc3Z7yrX/2XUZjDzNQYxOFE64uG1pKjj+kIU15iW3bLNZL3b9nPDtTalaL5bWew7us4zm+DlSz2KGVObfv7ZDr3/3s/HUG3pQPPbjF3q7imZw8fx8pJVWh1tVkK6ATMVM7Yjra+BSp+V2urxlM1noOCZpqyDvP9UXVc2nrHCkkI60IOZ3ukK5N8lRwcar4fpblEURhT1DuGgvbCkizmFGk+ryqWyxTUsuW+FL1S5ZlJFnG7q6a0+PxhMHBAMsyei+co1uHfO5znwOMngSepjG+beJr49ggMlkszxkMRj2O3xAeZVaSt2l/YRdSMB5P6cSG49WxXiesk5gqVa853N1mb2cXuhpzYz5qwTAKCQIVJEUjD8+3MMyOTl9W21ZQVx15XvSHatEWTLeGtBsxAEMon5Ta6PljvjegayU7e/v9+6KBzXJ9o6bnWCOKzCSVVzTVhiy/gkZ55liaU1ZWORcXZ6BFU8ygxd3aY3m9Zji5rX+DTS0Lnl0tSPUhHoY7PD+d88bbam2DxcXim2zvjHE1f8sf+tiYXJ6pJMnl8wUf+8KnkOYAibqcv3z/x3l+9pTF8yUb4Edotzx69w1++i8rYQUjCKnqx5jGGKHFgJazOfuHLtOdXbJC7VOe41NT4+roKlk+w3UsJtMPI/VF3DQkoguQdFxdqD1hdfqU/a2XsbWapzVLEKLDsQZcXiuIXLp8TrX1Sf5IEyWidei0WuD/+lu/zic/fp8f+eq/xNVM851sB9e2yMuW08s3AJhOjwiifXINcXJw8dyAxeOHfOpTak/4hb/5k/zd//CX+N57jzg7U3Pot3/rEfcOD/n8Z+4CcO/eba7mC1750qf5PGqNGlZNmhes12vCSK1X2cBiVvH+B2qs0iTh/OKC5OyKR2+9C8DhYcB0O+S//x/+MZcz1S+fePU+WVojtE+Sa1uITkH4N+Ij0/GItp7iOhajifq+XM+RTOOJ7t47ZL3o2Ds8ABTUzI9qFosFTRuyNVX7W5xccXAYUmkusZGW+KGFH/mcnWpRrVwiRIfv+Eih1t/2dEzbZX1y07ZdulaSJTcwo8P9Xd56/W0cnWRranWf2dmLmF1rSKyzjyFqHNMkDHQwnnQEvtsLR2Rpg+eA6QTqPEIlsN99703ieNV/3+nJjMFgQFPrtdeU2IaL5QuSVPsr+T6uK3FcmGko/Wg0okhhqoVqynZBUdSEUUeoFURtw6WqKszAxA82fk4lyBzPU3P4k699huvlCcPhkEcfPFbP5zmUpaSq6j54q+sSExdT3xtW8SXTnT3iOGY80UIRdUaSJJj2CFtfjqtKeTn1cGmgbiv1zOVGHMOjrEvoWhot1GCYFp1Z0W38TDuLMjdwXRcdn2AYAn8YkGUZhuH3f5NdQVlv+Okdt4Y7NE3V7/uia2mqFilM/FBzfFpJ28g+Wb5YzLRqYoenucxl3UAjcJ0A01RjM93awjBMFgvtEeq7lEWHaYr+s00REidzPD/oAwQDi6aWOPquJLuK+WyOaXW9YbfrexRF2u/Vnj+mli15dXMO2LaF53jIVmLr/awqckzLwNGUCiFbDKOlrbq+D1QgqZKSGz5c27ZYRtMr/DmOje8GCGH0dJuOGs+LqOqW4eBYv86hbVs6PfetbZOiKKjKDlNDHF1XUJRLFbxtJpVhYrkORu8tFiGlpJY1uU6Et40JSKzA0DxRKKsbjuQf134ggqumaXtBhqJc0bQFpggwtPOyZfo0RsbWYEC7keBFYJsdQqjL6/YkYnltkZQNoy21+Vk2FFVOOl/joTpuIEYkK4u6Vf9dGzm2Lei6mlbLbY4GE5IkxrEccs0lMkxJWcZKNhNwGw/DMDGFQSM3RFX1z8Vi1i9i274hC4KSTm+aShmz6kxER0u6zjm4NSZL1PPsHQSMBncpSr05FDVGY2EJH89Qm8izRwvKosO1bYQmyyarNZbpkesNo24TBoNQyciPtCt2C1V9heurz37j3TeIng+xTXD1ZcBsMy4vV6RZTKex062saHKrj9j9yEXUJm1nk+rKYCWXyNogy9fs7yiD0LLOefb8lIle5HW1ZhBNKLuOda4OGNM0kGnH1taU1VKTVVtBjcTXl8m0ENw6fpmzi3PsUB3g53FGml4hLIGlSe8FMYtnD3nwoVfV725X5Nkaz40YbCke1mw5w7AzruMzClNlbc+vrvEDG8/UXCocTGlRV4JwpL7v+ekpjj+i62CVbzKfFp1Jj88NozHr4gPwwRLqs2fzGGG6+OGgxw27YcRqfUmolddaMyUvK6rsHNNTh8cHz97ntVc+i1cZXF5rQ1Snw7FcDFddMFf5Asc1SZI1tqFeEwZDTLNlPJ2wvbsxauzouCbWBNPhaErob2MwpNWk+svLR9y9+3H29vaYJ7t63DPaJuG1V1XlLEnW/Pmf+QnSbInYZGOfnhNNAvxozDsPv6eez/W5desW+5po7EYjhJ0yO5+TZLpaWEEw9FnE6tJ05+4nSVZTkvyM6bYW53AmpDOL4Zbg5FwFO6998iXOLh5jo8bFcivS9ZDdWzvMrlUwN4s3CQfYGavXuV3LbPWQqZaVf/L0e+zv+7RNx9NH6n2GuOT52SmG2fDZz6kL6ni0zfnFgna+4f5sIfA52B9zfa3mcNfUWLZPXqyxtbKT41jcmR6Tl63upwuqLGU8iTg6UomUqlrw5MkH2LbNUmP756s5TeP0AixCCMLIZb1+mzLbEIvBMBuKSqkggsr62bbidIFKQvm+S9XUdPpgiqKIk+sZTdP0dgkbdbJN5WowHWFaHV3bKMN01AWvyFvSddr/LQwFVdn2QVnbKcNXIQTxWqtS4uC7FWVZ4elqimFYhGHYB9We72BZAtsUmJoDEQZjKp6ynju4+vK4Tgpka2K7KsiukgrLuUXt5zxbq/dVeUGa1RQFXOkg3hQrBCW1oZ/Pn3J5ec3J2ftI1J77zusPuX3rPj/9038JgD/1wz/FzuE+P/Znf4JXXlM8qQf3PkfWrAkGAaHeg/7gd36PT392m899+TMAXM4XBE6A7DpKvffPZifMr+a4wRa15p66tsmqqZCtOiveev2bnF885POf3aeVG24BiLbCME0ev6c4bGZWsLt7j6TeKG51WpHWJNOcktAqGA5UFWLTTFxcJ+J6roKRqlkR+fcRhkFZaXGH4ZSu67i4fMLXv/FPALi++Dw/+/Nf4eRUJdkcTMpCCdt8/Zu/BcAv/LvfIG/3uLv1gFJo4YtuRZae8p/8R78EQBAJXvnYD/P17/wfnGgl0qPtQw6Ptri8zLh7rIQork7P8QLJ7r5SvP3pP/dlfu/r3+FXfv3X+MJXVR/XVc5sdsGrrx7wtY9/BYBnH8xJszmV3l+lFCAFu3sTWv23usgx3EP8qOHTX1TrL01q/j7/Cy+9ooKBz3z2LpPRIZ1Y82uoPr/30i635R62GHLvvpp747FDkddY+uI2HA7pKDk7f857D1Xyxvd9ZtcL6taiK1Xl+M69fbYmh2T6/MjLJUWuLpP/OyqgPTg44kMP7nKuZaaDwMcLBIawe5GUpmnZ2tqirmBnRyu71ZDnKblWWt6ZKjlsDEGtBZCydMHHXnsVKTuur9U4NG3BIJqSaOSM4wRUVYbnu+zsbLg+LWUl6GigVt93dGvK87M3sbXsf5lk3D7YZzab4egAaDANmc8qus6m0lWb0WjEfDYj1Qmwp09Stra2WOQpkaP6abVa0bYSy7IRm8qOYYJR9AJFvu9TFSWDMKLUKo6DaKTsMaTsq3i+b9JxI7GeZRlVnWJaIZ3YiGrUWEZHHKcMBhpVkBWYpomlK251nmAYAbKrmGhhKCFMBBLXCSk36KCuJU1LfI3asCODqqqIIh9doKVuTPan+wjR9fffON7oCOg9wjMxDKHEGXRiz2kMqkqJMgy1/UOWxthWgKWVAU2zwRQdlhURBhtl3hQv6GjrEn/D3xItdOYNckQ0HN/do6k7ZLcpEBSYRsVEW920bckwtLiarwi0XVJVKWuLpiq+D4XS0jVG/5vqVvGGTdPrK4Gm4RBFA+I47gMuz3G1gq/6GMsU1FJiGXZ/ztmeiWka+KaNaWzOWpO6lqxjdbcITA9htJiGQeBvzkMX2dnIJqdtNorBIBt7UyijqVUBopMmhlDP1xktpiXIsnXPtfPcf7Fc4AvO1Yv2or1oL9qL9qK9aC/ai/aivWgv2p9A+4GoXHVd10NdoiggGri0bUuRqChVthaeYyHbDG9j4mkNcCy7x+dm6xZZ+bx074gkVtlfCTRyTBKXlFr2VLaS0HbxNZ/DCyPC0KdIZR/JBn7E5UVL23ZUlfaUaAoC3+mN/1T5UWCYNqbG55qmgC6kaSRpqtIT6ve5fcbE913atqZD0sqNfKlFUbrEV0Yv055cFERRgDQ32V9TwWwMel7U7mCMMbKoamUguelLzx0iWpWtaMwh0+GEpkuRutSdXoETOFSJLgW3BpfLa4TsGA21/LU1o7i0GQwigpGK3qfhFHMY9mqBx3f2SbOC69OM4y2Vdbx9Z8LV1Yzl8hypDSZFU5MlMzxd6m7qCNtWJdzdrQ+p500Xqtxaj7Ed9b4iWTOa7DK7UBCEt999xNUs5+Gjd5lqI7emhmBYsLu/RduqzMrpSct4fMh3vqchQLLDNnPG4xFVtcm4hwShySAcEWs4yHjo0siYvNA5B6NU4yZDKl0hFdaaxXXFeOuIutmo4uQYlosy/IBnFx8wnmyRVUv2tGLhZPuI09Pn1K1PozNQCPCjLUKtNnd6OqfuFljGkjBS45fmaz54+g6mabLQ0J3DWxPSvOFCZx0HA58sbfC9fc5nGsY2b4mGC1wnolj4+plrpEioderMtXZI2gKsmH0NcfzwR4bMF5c8/oO3uHWk/jabFYxHu/zu73wbgL1Dh+vZUhlj6n65OM2J1xXreE6kLQYur+fkVcZkqtd2OMWxPKbbDeOp9p2qQ6pKck/zycbhLpFvU0uTShuEf/Qj97h3+EM8Of06Ukt+r9OMpjFY63U2GhlIbJ7ML3sfr8ltlTW89+GXmW6p7zs/OeezX/oy11eKPxYNX2IUjbg4X2JoH7jlKmEQ2gxHY05ONEdntSJe51xrOMx6dYnlmHS0PRdtGN1ivWrwfZ/5fKnn1ARBw86Ogs0YomR6fJfnJ095+kwpdVVNTeBHjMdjRlM1r4NBwHq57D1DgsDDdisGkUOebmwPlKyv45p9pTyJldH3prKUrRdUmYXjOFQ6E5lmMXVd00nR70u2rfZcoWGJTtPSdArStFF/6qEcXYO9gXDY0DYGQqujVWWDaXW4jrHxxcRxHGYrB8NsENmG9yFonyvMv2oSyzKYDoe4rtpvhGUyHIa00mS+Uv1p+wV0Nk2q+mUdZ7Sag5CmGioooK4aLMsh0P5/nekgnJZorKrLSSLB8Whrn/091ec/96//HT7yiY9wrTmDp9mc//G/+VXefu8NfvFv/QIAlmNgWY56Zs0XjeOUj3/ioxzfUYiJsizxRUBSxgw1dG82e8b9e58jGh1QVWp++oaFBBKdva+amE994k8znhyQNdqiojVxfJuT0zOE5nN8+hNfYDrZ7zP1tuVR01BUJWWq+ZwM2d6+y/e3v/iTP/FH/pufh9/gO8Df4Y9tP6v+8XWe8N/xj/741/x/bN/mnf9/b3ygfs+fzK/449sv/b1v63/79v/r/y2zhrbtmIyhlGq8LucxaSx5/Q+/Dii7lr39LQLP5/5dda7t7W1xPbvg2dNTdnfuAGCYJZcXT/n0p5XiZtdJrq9njHR1HeD2/Ql0JqHmSfqh5PGT9wi9Xe58SFfcsiXDYMj52YxGI2qWyxTf9/nIR9R+2jSSvFig0Fv/N3tvEmt9mt93fZ7/PJ7pnnOnd6yxy1Xu7nLbjiF2Yie2EwUICEEQsTBCCMEOKaxgh7KKxBKJDWwQEhKBKGTHEFh4iO0kbne7uquqq6vqne58z/yfx4fF85z/20ixV1n04v0vr+6ZnvE3fAe119979wnHx8fskjumR+q9yrIGWh6/pfZHkiSE4UNu7y4R+nVxPKZtpmzWe861suLp2YzR+GeIQoXQEP0YP+rYbCaMJgdYl8l6rYx/D7xWKQWjUU+v6/tVFbC+WdL1DPz0URCw2e9oy3rokGRNwWjms9tqRJEwMMwOQT140e13BfQhtlsO3PfF4oTtZo+tDYpDN6KJatIkHTrH40nIfnuH7AVtpb5D4IXK48w6eIRGlGVOGB6hAUu4nlYuNEf4uvtRt2sMYRBGWrmWDt83EKLBtF7D3wyzoe1TOq0c7To2s9lo6Dy6oY3sLRzHwtDxheI7dURRAKgzYBT79J1JoekLtu3j+RZ0r3lRx8cn9J1B0zTDWb7dLWlbczhzs3yL73rg2MN5KmwlSX+QMze1psFCw+gBjMDAMCRtaw5KuW2rlB8PsNtxFCueb5Uq7y6UvU6W7fE8B1/jLNu2xfVMAl+dnZ5vkCQJXcuAFkDUJPs9jh1iaaRDVe6xHQPta0xTV9iWh2EbAxWjyJaAge2I4e4JRhFpUuBq7l1RZLS1RAgTRyskNnVF3eW49pROc8N2yZ4/7/mpSK76TnL5tcIoT6YLLn68xbAqTs+0od1mg2NNMa0527UKbjbLWwxpcTRTAZRrRnSNxLTuB4LiZrOhqioCPyKKDxvdwfePB1+GurSpq5ym7thr8+HNck9Z5Qgsqlqb8Tk+RVORaSnh6eRIOWl7PptEm5G1LUWmFu+BRyP7mqIoBmJl3yljuCgKMA7t0qbGMg122zVINWGe45Hs7oYF7TgeptXhOjHJWkO/fBfTcAnCgEgbhNYyIR6ZCA2RS7Oay8sXxPGYTh+SbVnSVPmQ0LoeRHGAIWcInfA1xR4hTZK0I9EeYe3EY+SbXO7UHKyWKRJI9nseag+kuxvw/GNCL+QAb7662ZDtKpKdGsswDMkzEKbJl1+oAPP4eM5sPOJHn10wPlIvzJKc519m7DVnJ88LXjz/HkEQsLxTY46U+L7L7asdrpb83m07bu1rSm0KbYsAJ4L7zQpDqN8ShA5l0XJyusAPtct5MKKpOmxfzcG28oj8iG12w0zzwMrGAStF0FFkhwNxDKLifq0SGylsjhcjvvoi4y74gfrNzoJRPOOf/rMfgnmQ5U5pug7PVes8TVuCKCeKRnStgu60TU+ebmnahKZSa/h+VZEmywEm4NoOtiMJPMlGr+Hd7oIonNJTsBgrVrTp3WHZBSczdcmvNxWmyIlnMZ/84HO9FjuSZEOe59zcqHGoK5jPU5JU/d79Fwn73QuqrsQ2VZFiu01wPRVor5fa68M/Yr9Ph8OuLjP2uzvGcYSluW+r+4Ro3ONt1EGdrrZgbqjqgu3S1N/zht//nU8w7Yonj9Vv+frZPVWRDR5eCgqSc3X7nNnkANtRn/En3/vxIIQRhDY/+Pz/wJQ6ODg54rvf/T53q3sWJ2oN94Ad+3QCil2v1/U10+nRwPHq25bpNOZ+dYujpbSTdKmw2rWLrfd216e8vFixXD8HYDabc3V7zf3qlqY7eD5ZLOYP2O9TPO3tF0cjlrsU7X3OLi/p2obJbEwUqs/bF3uyvMSuXSpt4ZAlKbPZjFBztdzIo6gKGtky0/xR2zQJw5COboAsl1VF3/PaMsJ0tNmiM4j6CFORfZu2HqTfaSIManp9yXu+gnrQKSNNANl1CNtEYAyBjCEklmsTjV4HlJaljLg3G/XeZXND91wlWGMt2V7ntpLF1edyKzuEadA27SBaIKWk65UksJQqmYuDBWVZstNy5l1r8M0Pvsko/CbvvfshAKPTBf/0Bz9EaJuHx4/n/PCHn3A083nrbQVxvtlmCiZkmqRaRCDLEhZHfxEhNY9I9khR4YUupT4j/uAPfp+/9pv/Op47ItPy8FmdEYYhX3ypEg7XtXjy1jcRpiDXHKFROKPtJMvrJWWlAsrVbsxDYWHqs6zvLUzXoqlKrr7+BICTeMF8ccY/+D//X0zNh6v6hOPpE/7R//I/AHB1+zl/67f/SzwroBcHf5yGkR/xD//X/5Ef/Oj/AeA//O2/y3h0TKfX6za9Q9iS7/+TP+HFi/8LgPP3fhk3ekKb9lQaYtzZDsUyZXej3ic6OaLsjWtZbAAAIABJREFU38cxPB4cq6LT7Ejy+ee/R57dU2vPnra3kVIMxY7AjMHsaGQ3FAO6TtmgWJY3BIq7bIVrj8i0LUnX5gShhezQXnPqMakGbhFAPLJIMxiNXgvM2K6FaTpsN2oensV31HWOba14/lwlEm89PcP3BG6sEpIXF/fsspLpZESkrV9eXfyQR4/OePutD/n6hZrn48VDnGDHDz9XSZxnTbAdcwjaDmsqS5RBNcDd7ZqH5++RZDc4WrhhPHoX21K+gb7mYZdFSxCEPHigYc+vnrPZVkT+DK1LQxAEbDZLongyGLDe3X3JfH48QGTjkc/d3R3n5+dsd2q9IgTzxTFt42JoWNfDBx/Q1CG2q9bYfntDUQnefu+YrtZFtrs7ppNjyrog0QXz+fEx8fho4GXaToR4P+Pi4pKHDxWH5vLimmjiMx6P2Wp7FMezcF2Xj95Xient3TV+AFmxpmvV+hnFDne3axw75MGpKnjc39+zW+84OlJzN4kC2t7FtU203zth6BDYYwVZDnRg73kk+Z6mPZgKjxA2ZGmObavf55gGtmNi4jCbLfQMnrPZ3tFJtWe3mz1H83PSpKHQ9hdHU4Oibsj3JYaGPXpeqIx9hbbWkJKmNqiqgpE2fB5FI5qmwaDDdtSZV2Sl8uPUkvgmAZ0Q5HmuIJLAelkQ+GOSbIPQmgC2GVG3BaUWCJK9RZ6VSFkMlg2z8ZSmeb33eqH8zPblHlMX43zXR5otwhXDHVZVFW0n8TXFQQhVBIvjeBC5cOwew7WI4xGHx7Y6LNvA1udwWWVE0Qghu8FU2/Viyspju9lxeqK4tkWRUlYpQme4202KkB1VUSAOMu+2IEm3hMEp4xP1f8v7PZ0sqDU/zrBq6qpksTgj1YW26SxEGIcC92vY+p/3/FQkV7YlmE20+3O9wTIq1suE9bWqwk3Hp9iRTWmVLI7UoeF7K/oup9HkYGyb+VFM1yX0OnDyfJ/F4gjH8ahKNSCbdUbXgKEJilm64fbugqpuh0pLnZusVveMo+mgqlKVe8YjH2sgk1u0neJQRdFhYbTEnsN6vR6UgJqmYb6YDEnScrnE913GsUdW6cukFXieQxgtOExclrYIKZA62TGETdf2JGU+BEDdOEbKgvVOqewAzE9GXF1ueXmjMN+L0xm9bLlf3SCMg4u6hYmJq/HAceDgeJIsFWiuM1XXKDO33qDVPlfPX27x7XS4zNq2x3RMhJC8+OcqOLdMF9dTHLEDgdeyPequR3Q64EsrulaRfTvtt3B/n1BmXymM+k6rPTYJVZkMfk59Z+N7IVleIwy1EcaxS5H33H+1Qhg3einYuK4zKBPVTcmm3rLfdhyQsLYhqLuWm1VCpqtEjh2xiI8Hkmi8CFhdPmdfloxslTi10iKKOzo+pdfqhEn+BbJzBtJ0EPn8wR/dYXkGZa2SQM+6VEFh3tBpToKUEmlIKm347HkWq6RBdGtm09eBcCErsGC/U/uhvn+J7Qjyrb6YRMzxWcB6eYXQl57jSZbrVyT7mmx+6ARK0jRjOtbBayJwgj3TySmrtRo7yzIwDZcwjPED9X9tV3F5vR4O914WLB6EzI7mQ4d2ujPoO4O2sdlttWmh41DuHL7+SnV2XA+aGrJdq3D7QNMUyP6EfK+Tg/RCdT5ch/1e7W3RLzg5jakqm+9/TyerwZS23zGeHFSrBKvNmnm84P5edZtSPV4XL58TR7o7+Kphn21xNEfgxz+WyN5kMo+4ulemsEIIPGuCRTiMi+d5OG45qPlZlsFuv8Z2BIUWV7BMh6ZpSPOW02PdqTIM0jTlxUvVRS2yhL3mTBw4j8iOr599juv6gzCFbdv0HdxqHqdl2RR5heNag6dMHMdgqEBUavVT23bpY4/t7rUfyPRoQRRF2PpiCoIA0zBIkh2V7oa4oacLUWo86yqlrEr6vgMOPndSFZSEhXswIDc7wEH26iLuuoZe1niejau9d/oe6j6lrCVSHjxsgD4bPNeqqlIFKdHi6eqyK0wMz6VJKnaVKig1Ndhujzh0t3qLvjGgk0MnRxoSExPTCbAdFVxdv7hGdjUff6w4dH/r3/0P+Af/6H/n7/23/w3/9d/9ewAsfIfF8dlgJlmnPU1d8+//7d/GCTVC4nbHeCQRdCRrtfYvr294651vDbyzuq5pRYfrhlxfqHn3vQlvvfMz1G1BojkBsml5MJrw7JkqMP3x937EL//K36YodyqCQXGGRW8im4r2YDA9e4iwLHIdcI68EY7n8uLqknfO1fydPXpMLRvKsmWsu4xWOCPNGpKdKtx8+M5jwmjCanXJdKyKRz2C7Tphff+SJw9VEWY+X1A17VCcc2yfvMm4u3lGYB2I4i6OBdKx8fQ5bzou2+1XzEPNxylt3MDGkQlf3z1X89nN+eDpX+UP/uj/5v0PVbfn9vr7NFWPYarzJrWW9PWMTkKn/Sn70uLB2QNu7i9xIm0U7W3wXQct0EYUjmnanPF4xlIrAwoMzL6lKtthvnpS/NDn+lbt9fHoCNvxaZodo6n23QtipBiRF3vutuqcMC86HFdQlGqPjuMp692eNC8AlZD4vs/m8+cYwh06CKv1FY8eTXh1qYRTHp157PKcLH2tIrlZGphWr8RbgNvrlNvrjMnModZdlGR/Tdc1jGOfqlKfF3g+P/riGd/9ruJuWZ7BZrNhFG7pW7VHl9trjuYxk/EJZaXGxbQdvvjylkCv89Vyg+8HJMk9mtJCFJlsdjdcXd1i6YKSYd6y3H4xxEDXVxdMxjPySmJaB6U8GzeMuLy6x7bVXHVywja5Js3v9TiFlPsWw4747MvLYewc3yItK6ZaIdFwTMo8B82Tmk3nRLGLl3pMpuo7jUYRSbomT5QRMsDHH7/PdrdiopWHHctAmCZFURBY6m+GYfCjLz7j4cNz0lTf0UbL4vSITguGjeIFy9UK78mIRN9PTW1Q5DvCSUihz6m6aJmMAxo954F3xC7ZUpYNlt4zwmi4eP6Kx0/Oh7VRFT3ZbkujA/0wjmjLkkcPpqz1Gl6nWx4+PGW92tI2em+bAe+++5j79Ss9D0useISQ9aDEvVgcAxWLozF5rsXGiorJNB7mar9LGU9C2rZlMVUFkPv7WzzfRMrX/Nyu67CdeIhrXdehx6NtW5rmYL7dUxYpRwvtMZmr2LFtOoJQG/9GPn2nOpmRRuuUVUrbNriBvlN6wXQWMx2HAw/s9HiB4xrsdtuh6BMEJ9R1PXyn3W5HGMb0nYEhVAwZhTYvXjwjHk9B6xQ8fvKAstojsIfP32w2uJ7J3f1Sz4OkaVpsO+D8gdZ00FzLP+v5qUiugtDhW99RG6NrBX0VUKQmppa/9QOb/W6NNCw6DXeZnsYI0wUNBSuSDikLLBFQ6wpGGLqcnY6YTaYsNcldtgl3969wfRUclHXG0SJgvlhQNtrcTcw42tr0tRjakJbtsFgs2GxU1yZNKq6vN/heTKxbnJNpjJAd45k5mB1bbct4HAwX9mx2qhQRO7Ddw+IJSPKEvhEYGtI0msxou5KuOcD9WpA1tuMw0gGQpMJxPAwTet1W3mQNvTSZaUNNxzPwfIs0aQaJemlJHFsOLfp8kyCMHtlH9JqE2ooav9+BIYniI/1eNnW7ozW1eEbdUO5zxoGFaasORtUsqVObrpWDAuRye4swXDpdsTFygevaOI5PlWvlvO0G14ZdtqXV1W3LFriWS6mrHG1bUbc1lmUMG9jxPXAa/LFJqdUIszqh7FzotSqP0dHVFdPJZFAiy7IMx5bc3ywRhjawbRPu7hMMvcawBZFlUmOx0mqIZiDg3obWHEiZrexp2nIg53etgWlJ9psEUwfQy+0tti0YhQsMQx02ZSXojZx4pKWgiwpLguW21JUm0BotmK+oEgMh1feUXUhfSdAKlFK0rNd3HB27yF5X3EKLIOgZx5Oh81AJwXQWDfKw3qiirg22+z2uO9djLNnudkgj5+Bya1o9wpJDULhal5hGy5df3A4G2km6J4wMmsod/s9xJVJ6eK7aa02TY/QWeV5iuqqjMJ2ecvHyYlDRDGML+hijDfGsg76vQ7qzcH0TV6+9uijxI2iag6R7TlWUpM4GOch966THcWg6NVZJnuA4FpGvJXoDh6YG2QosDibCFn2dk5cbWq1AaVg9d8sdi4XaV7Jt2W8SvMAdDEMBLMvBdX322i5hvd4iux5b74/7ZY7tGMpaQsMu+h5OT08pi3pQB0uSDb7vq/EAfN/C88FxDaS+6A1D7QfT8Mm0pHpZbmFV0OpzAwySakXTdJjaDBgp8TyP/XbHaKTOzyiKkL14fUYIQV0KwshGHExEm1q91jUJA3UGIRqEJYbPNzuDurEpGwZzzk42ONJnHAevO/h9T9s3rzvzRyq5o7XodPe+rW16s8cMjUEK3fYMpGhfqww6FrFnIEQwFKJc16UqM1brK/ZL9X//yi/9Cr/+m78xQHdvkhX//f/03+H5Nt/+SFXBv7r8Cs+x8G31vZd3OxzX5cnbHw3nTWAJqiIiHptc3SjD1+12yelbI1ptPSFrSS0kpt3y/NmPAXj04H1mJ1P2u+VQAZ5MpuRZRlGotfKrf/nfYHH8iIvLFxxPVQV8n6yYHp3w4tmn3K3UuPzyWx8gpWQcqrnru5quN7i9vuSpNhGdnz1mXWYKYq9VNz1nxsuL5/zCL6ok5p3zj7m6u0MARaHVZUcGy809pydHPHqqgqu2bWn6GmFrIR5jRNXCbnPBdz5UHb3RyQPu7jMM00EvF4p9QRyY/MI3VWfwd/4koSXGkJJIb5m2Nnn+6nvsN58ys/9jAD7+9b/A51/+Y3b6zo7yBfbIoO5b9jv1wifvf4hpw3bzDM9TAVBgn7DfrQn0GrPaFqO36IqCSKuhWY5HustZnIxotE1Gnrs0LZwc645wuqOqO6SsEbqtka8bTk4fU1Y5pqXGIUlT2nVHLdQ+fvbyBePxmMDxh/fu2BFEijog9Rra7Vfc3J0OSp3XV9/FdcJBvAzg4uIFlmUhtAy6MEvS9IYsWwwFGNeDthFUdTZ0UUdjH9O00VPOflPQ1AbLej0o81V1Q5LWvLr6Ib4m5TuuSVlW3Nxoc+DJEftdCaIdBApWq0IhI0RHqy0U1tsl01nM7forAELnnDR3yLISqQPhpjb44ecv6PueLFfBv+s6xBN3iKd6TEzRI4QxfF5TK6Ec0xI8vyqG13mGxdXNnf5OK+I4xHE8Viut1hvW2E7PdpcQaXXC7a4gjiZU2rRYOjYPHhxzXVwTTg+iEBUffftjPM9C3GvIWOySZAW2FlfrWpNHjxbs9zt6LezlTm26LlDq1FoyXvaW+v6RKrI9e/aMJ2895v5+Seiru1ZYCb5j8/bbb3Nzc6Pn/Qbfg4mmE3Rdx2hk8/TJHFPLrD96+oS2Szg+PR7ETfxgQtcVPNJJbhDl+F7I0ezpIJbheC5CVHSNxeWVmptvfPAA240oCm3hkFxzfnJKltY8eqQQIPOljZRyUIh0HIcgCJBSkqavC6lpXpKmJS6HMTAZT04HERP1GoHnN+z2as9GUcj5wzNevrhAaOSR7/RYtkng6yL3JGA2iXFdG1sLEmVZSZFbhP6EfaoKEE1p0vXGID7iehaeZzM/OkP2ah/d3/+Id959TN1kQ4wVxTameMJqpeL/4/mcs+MzqqoaUDee5ykknHCII1uP558vWfFG0OLN8+Z587x53jxvnjfPm+fN8+Z587x5/iU8PxWdq74VHIWqAua6JleX9wSz0SDz3HY5bhCSFQX+WPs3JCm2OUJoj6le1AjTQHYdgatFJ1qH1XajJD21MMViMadHGcUBeJGPECZVk2JbqjoxmXq44Yy+lYy02IBAYV4XJ6rqUOQV8fiCXsDpuaoyNHXLW0/e5euvvxhwyo7jYBgWOw17chyHNE3xbZtSkybrShLGDr2swNRCFL0g8I+HNmie77GsgDiOByGMuuuRsqGnG0igbW/QdBVS+x2t1xmm6WCY3SBZ2bctZetiuqoCXhY9TdkguRukJztq8qLF9QyW96rL4Pshhtkg5UFG08M3bfKiw3MK/TeXvlPkRNNRYzyyZ1RFj6UzfdNwKcoEaakKOkCblUghKKsaW7dwpdnTyGZYpb4/VhCjpsLRkI71KlFjjD90qoJgRFX0FIWWM/VbLN9ivU+GcQrHPk1j4dseByim3VmYhjMY+iEabLtj4gV0jXrvfZbiRw5CyEE0wDZMAmENxP+6rsnTliAco7v7TKOYri/ZJtuhWh+4Aft1RqrnxXYMqqpiFMXU2pha0tJ0AtO08DVkQ/Q9ddcQH6mxa8oON4hIi4auVJWk6WzOOB6R2hV9q777LHboe9hp3mLXgW0YtJ1FqonwpmniBy551hy8P0nTnMlkMviiJEmGYSSMRiPWmmdmYNI1Y25vr+k7XSW2DCYzjyB47TXiew6y7xmFCift2BZvv2MrKVeUIWsn92zyLb2GF61fXKqWvShpO7UfoiCibDx8V3sw9Sm2Z5PvHQxDi5bobpjneUNnZTKZ0EuJab02qjQ95c3Uabl02bvUnUkwmeDpblZRZITxBI3So65znKhH0iB1V6XpCyzHoWp6et2ltZ2W2WxG2xzGxKLrK3a71/j/IPB4dfGC89Mz9nvd4Z4G2H48QID9KCQcGSCNwfKhaSrGkUVRMlhSjMdjLMvCsbQfiJR0zZq2yUG85jeVZcDx6QzDLPR331MUzQAPcZxzelHTSRvb0N40nYlpmqR5TZJpA3LAMPoB5mnbNq7lUaMk2QFcx4G+pzManANm3vYw5Ot5oWuxBXReh+8qCJAyYO6wHGfYM76rBYS0mExSp9St4Ch2iTT3pcxKJqHNh9/6Dj/7bQUD9IMJX3z5FfJgWtz1XDy/5m/+W38DdLW5ayo6x0fTCNhsn2GIjvl8Qt8ePAIbehL62ufuVp2LXuBzfvQQqSE6OAWysLD7hqs7LTozDYmCKfvr64FU37YtWZJS6H119t67aGQKhZYOj4KYYrfmfrnj8fu/oNaLYZMs77E9beRsu7R5x9Es5uH7vw3A9SZh5M6xgVob/TZ1SWS7TD5QFhXJ3sM2XHqzJtPQVqMP2N1t6e0ES3ezbdullR2m5p3irejuan7pF7/Fe0/VPr5cWwSzOc12jaHP5s1mybtvLzg9VQdJZLusm706pjU6IenviKTNf/6f/nW+vNBiJ+bP8e7bv0ny9tcAPAosnn/1JdKaIzWfqyhyPnu15/13v4mhpa2LKuHoZM5Md+/u77Z0tYXr2syOtXXH/RWn85BkB85BcIUARMVoov35QkEQSTbZivv7g/iPi9EV2NLG0DAyz/MYnVp4ujuR7huk0XFzc0M0UmO33d+yThzi8Bih5bWPT89J1im+dzgTeu7ulozHCgUAcHF1SRROyVMFMzs7nzOfn+A4DkvtcyUME8f16Opu6C6XuYEhTISl44Zig+vZNLKns3SXIRLUsmBxOsHVpM79LmV2ckSSqruh6BKsyMEyTApNvXBsD9tz6fucKNToEiMmzVIqre5QNjfkmaRtW3rNNzoazbjfXGFZ3gDHLrqCeukMXXjPc+hlS9MUTLVNxrbZssvW+n8OxrALGrkhK9VaaXuTpjfYrVeDL9PtKkNKG893WO90O7A36duSo7n6n7vbLceXO6TI+eJLBd1t2hz6kKPj16b2Vz/8lNBfDIIItmlSfL3Hdc0BCrndpUgpaFuL0VidE5v1hiCCbKXe5+k7qoP0jW/8Ahtt/FtVFvFE0Fst732oBEi+9XMfc3P1akCbBNFYCYLsr3jrA7X/fGfEOrnm8fnPsLrXflVGqdAI2vf17bd/hbYR2NYRlqf2/9XVBU+efEjf+jx+qj3z9hVFvSPS0N1H5x9hGAahX5LnGhLnzzHtgvFErfMkv8EwYDF5hNR+qr61YLV5RpotMB0td18usc0I1znALntWy1tc74i6UXG158yYz44YByY73X2tygbPrdlq/jh9wCg4pshWg0T+0eIUxwbbHONpu5Km3SNFz0YL2lUyY3WzYZOVLBbqrs36hpura8LAx9ColGJVYBlram17FIQ1QZBRVS3H2tbpaD7i9HiG41oUOlaR3ev79F/0/FQkV/QwFmqhlmmFL0P264pEw9bCkUmZddSFR1scSOBjhBsgdCB1dBTRdS0CC1cnSY5rMp4sKNIOQx9sfmgQjqdomCbLu5UK2NtqCAaU6ZhNFHikiZpwz7e5vl0NsBbTNPGjkKbrB7PTLK/5008+wbLB0DyhRnZ4P6Ek5wU+WZHTGiaVhpEkeYGwTDzfoW4OyVRGL6XC36O4E64X0XQ9lSZX+r5LXvQkSQ46uKmqBs8PafX/CGFSN3ts237tgN07SJljFBoTLXv6Xql8dXqzVHVJ4Ia0XQvaXK1tLYokG7Cxgh5Ei206lNWBG2JhmZK8Suh0O1gIoQ4LDTlqpVI5Q8gB2+x5HoYpMQ2Hsjg4ioe0XQ0Hv4Uq14ZzYnCkNzDI85KyuMfS5pFBaGMIf1CtMQyLIqnUwd5ruF1tgKj0Iaq5L9R4bk2pzZ0n0QLbVsIXqQ6AwsjBMCuStCfw1QGxWe+IJz5pqjHYpVSk4dV6wPomssMPbPqmpdPM4l3Z4XkByU69rutbHMejPmQ1oCAghjIDlHp9mpbANAV5dphjm+0mwzQFvX7vzz5/Sd8GVE09rNmDq/tBYdPxY7brHbQZjqsTWqCqazzPo+3lsGarugGdRBwdeQShhTBanr6joAOjKOb6+hbXn1GX5rAWdvt71ht1mcymJxRpRplX7HfqUohHPo5jDx5MhinpG4u2dgfDwJOTEXXd0LZa5Q0Ig5h0v6fVRHU/8Og7iTOJhrkaYJp9+xOO9A2mbQ1O9qYpyPMU07SxdTImUUnAbrch1h5dnhfQtd1gbO44FnXT47oCqaGJTV0rsQPLxvLUGdQmGa732qPEssExfMVv0gpYfd8TxzF122DoufI8F8MG0zrwBjO6rqP9CWKxaZrUvUXTSsLwgOOXTCajIQFLkpSyKBmPF+xLbUjuhAS+y3pzO5Cdk3TDZDKh0aqqVfYSx7FJfwLG5ro2VZFhOjamoZPTWu3vgziHVfuMRwscx6bQ+6hvW4qqJCtSdokOrvIawzCYz7XC136PYThEnkmpv3vfNZhSagEeLd7g1Aq+qINHwwrpm46L/QuuV4r3+Rd+/lcwXYfl/XpQI/vTH3yCH4UcjdVl+Ye/9/vM5lP+xr/2N1mtdZLkeRRFgR+off3iheKtzufK6BogzxNG0wllmXN5qQoL52ePieOQVItXNH2HlC1VZXF1pfg5v/5rf4XNZqO4CjqBKsuS9WY1/M8HH32ooKBhOMAeR1HMF59/Sp7nzLUIk2kJktWOAHVn+raDLQz++I//hE//9PsA/KW/9tcpmyW9EdNqfvEkcvn+Vz9GPFNn7kcf/RIWHY4bs9seTFMFN/cXvHy+4hd/XgVTUkpsw8RwDt44MenuM7a7Neu92v/j6Zhkt8IwwbbUmhJ9R9uV9Bq6nxRbvKOIvmo4iEQ2u5qjMCDbC5ZLtY826fc5XzxgPP9LAFyvviArP8GQFZFOTG8vrhj3c2Z+RKF5u/OFh2FNMFDBuTlaEkYVZdFi9mr9zNyQ09OnbJzdUBBw/QlhNObZSwXzLNsMYS54fPodQmunv9PXmKJn5I0GvnbghZjWHemN9tVxp+T7Bq+2MVCf9/H7T7i+uifwJVKo4Pjtt87Zz2+INKysrW3u4wt2h2ASePfpI2zbxtRjuVgcYRDR9BmPtR/Y1UVC03WU9ZYP3lMiEPtdQVl11BoGfeQfY5g2fSe0YiB4rsU+WRHE1gDdk3TUlaTSvlCu5+A6PmXZDMGkaRoIs8XzrUFkyvcgjAI6DT3r+5bJZEqapsO9XdWS6XTBdl3Q8hqGKDAH2KVhdjS54Hhxxs215mFFLq5hqgKz0Gp9ItXeSfo8r3M2mw1RFJEXal33nYHnmZT1GqmLMKF3BMJis1VwwiCyyYuN4gUfikCeMo+9X9+w1UnKfH5EUW1ZrTW01lOFnl62zLufMHz3fV5879nAfd9uch48PGO/3en3fonvzqiqS8pWJS2W5TGbnPGHf/gps5k6ByPfo23z4Y7u73eYjiSMXHTdFG/eErkn5HuTiVa9FiKmqmoiV+3Z/WYPRsZm+yVRpPbfKI549vVnxOEZni4QNMWSo8kj2l793rzY07cuYeRwdaXikvnCpsh6DBo9lmoMbq82nByrz39x/Qmea3N+/IDlRkEcF9MJd7drTM2znU0X2PNHeBOXXMMzy7KmNWpOTp7iuGpubLehqU1OjvVBKRqqsiEMJ5SliuNXyy1x7CJEPnDdRxN1R3e6MG1ZBp5vU5RbLq7U+T2bPMIYmTQFSC06NxtPaBuB7LSpeNZDdMRoZqmGB9AT8/jJ+yzXl7i6QH98phK2P+v5qUiuhGHy6WfqgvF8Uxs0CppGbeCTyTl7M6MKU/Q9j2XagIHrHiagw/Y88qyn11l/05Q8f/kZo9hjOtNYzUJS1wwVm3jqcnV5rWUx1aAVZYYwJNvdHedn6vIwTIX3PxiGZllKHMcIw+bVpbqE67oj8COyMhsObt/3afsdhV5MjezBMsnKll5/nh071HXNPi+Gzgq2Sd5kaCVRLNvibnOvnME1X2R9vcJ2PIqyBp049X1LXbt0ukLruSNsy6Rt+6FbV1cSIXoMXaIVRodhqiTkYOCJcJVoR9sOogFpkhOPg0HZpmk62rbFMHo8zcEwLIemrxCmpCkPxqYqGSh0MoA0cV0bSY+jJQX7piVPMxzbR2j5+V2SUlfdUEUSRkvb9rSlpNNV4t4sCMMQxw7otClzlrb0bT6slTRvMCRstxW+xp1bVUfdZhzN5qSJVgJyDRzTwtElzX2ypq4kruMPBMw022OaAsMQA0/B9Wz6vqDWal6eO6brGlXt0hMYRWN2yRYpBb7m+xV5hYAhyDY8IX3lAAAgAElEQVRNG8/zlA2Bfq84HiNMhb0+kFwFLkVZDIIkbVlTlCWOHQz7oWmh7yosyxi4IL3ogZ5OJ/C9CXZkI8seWx/mWZZhWQZJlis+G9B1PXme4uvkwzAdqhqQkuWdCmxsw1OJdWQhdILnuD1x+HAI9KPYY9WBZ0dEgXqvspbItiHQAW2elzRNTeAL4lh1MPoOHMMkHoUE7gH7DpN4Clr1SHYdbdVhmwzCGweFwL7vB9y3aZpkWUbo6wKBEMSx6hAdTL1tU2HMjxbzIREWQpCXu9cV4qbFEhZ9J4c9Iw2LsiqwTCgrdVB7vs022RB5B0XBDX2n8OcHLHyWJxiWRVFVrxNhYZAmBZ3es66nSMRN3Q4V7ru7GybTMaOZP/Dc+r7l4maDzkvVHjcl2/Ru4E4awuXy+grXNQfRF9+3keQ4eh10QhL4iodhajfHvlUJbtt0eFoBKk3usSyLXJP6w2jCdl+y3e6HIkwQ+Limz2a75+xMST+PYpdXry4pK1//Poumq0mSHtvRFX3TRghBUaQEmpfY9xVZ0jLRVeqems1qx8n5Gf/ef6TMf48Wc/7SL/0i/8Xf+a+Ggk4QRzRdi9SL4uLqml//jd/AC6PBGqFrJcKQ1NqocnV/y8OHj7Etj/ulup8c26QsS9L9mp1WTX369C2FHtDmkpQmYWRy/ep6OLvOz8/JsoyqqoY1FUURX3zxxRBIHR8fkyUpXdcNalpFUbBZ7xTB/EjNu+u6YBg0h4JEVZFVNfvNnvkTFWTPJ3M221ts1xkCTNHDJ5/8M87PngKwfVxjOhWmOR2S7Koq2GxvmY4fcTRTiIyy7bEsi0KfSZPREevlJavVinfe0xLcjsEi9mlin3SrxrMpcrabHZ98rr+n49O2FZYwsX31t3Zr0DQv+ezHazpTGQTbvk1SlKQX2nri6QdsX5U8fuwitIrqxP2QVtj0ZoanJc7T7ZK+7emFihs8v8G3J3z45D0aVMBeVx15VePNTAx99yRlSp7t6TQf1zB7kuKG+/X1QHI3DAPHnJDkCfOFLn51DWbnMx0fuKHw4QfvcXn9Y0bxsZ6/lF/7V38V1+/ZquXCybnH7W3J6eIDvaZ73vvGjO12xf+GSvD+6m98TJm/tj+4uHyBaddMgzHadYUg9nBcgzBc8MWXivPkuGOmizPuNyqYLNOEtq0p0oxOS8hPJgHSari7B904om4TantNU2suepnhOpIgGOHqToQwHTzHoai2nJ6qfdw0HfvdfgjgkzyBDjzbw3IOQjgmQRgisPC0kptt2xiGQdWo77nbbTg6OmM0dhE6CV0sjri6uqJtg4GfOp6c0Xu7AQlgWhLPU936gYdt+1iWSZoITC1aUNV72kbgOVo0oe9p2xTXf404aRqDrNrQi5bxVI2DaVvMjiZstQLueOJQFDVFWVM0urjp9JhOx8mDObnm2k6nU3xPkOjzx7ZCqjphu0s5PlP3WpIuuV9tieIpW11gXa+2WGZDrwWKwniENDqMe4uHD9Q63673uK5Nma+Z6vNb4JBlN/iWmpf57IjlOuf8dESV62JqP2PsBbiWORTfTUxsu2Oj0Un0Hb4fsLrfDAJvTVkxigKSRCX/oRdSVRWLuUFRqb326PED1cXcd5wfK06n5bQIGQzc9K6vyauGbPt6/exfrdndXWMxItTIHNPrsOwJda6+44OHC4pMUhQV51r9cb/PsG2b3W6HF2qOFzVN2w57Zj5/yHq9Jgwj8iLRv68hcH2CiYOrxY6aKue9dx4NiKw8q+gaQRh37HVsaAoTS9xxNrfIMy3YU+nc4894fjqSKyFpNdRtNJmAaMnzLZPZQS49QbYZWZIPPlNB7FA2xSCj7Qc+s9mMIm949qWCE4RBRNl2jOdga7hLltfgpGwydYFauBhuhhA2jfZAStMU3/cp244Xr9RhbrkC27bJCt2GtSzyusY0O2otdWt7Lrs0wTB7fC1ysU8S+k5SH6KdZE/f9/TSHIJzw1CQO8ezB/hLVfWUVY3rqPeRdISRQ1EUFHofeF5AkeXMj6YDsbCuS1zXRgyk0JamrHEcD6ErboahkjWkDsSbVh9OcqiUe35IXpWMxzGZVpYxbeilHDw1TMPR7uUGte5cmVJQ1HsMw8A0D54SIW2dYw9yscrnyzCMwXm9aZR6k8AaWuK+Z+G7BpUmWwahz36/JZ5FQ4W9N0xcx8MwLJL9QfxDdU0OiWovJY5rYzXh8DdhSBwnYpekBDrQz9KaspL4gSbGOzaWEAjLYLVXN6NtOfStg7ArOj3vhiHY7pIB4tjTU9UFjg29Tnpv7+9wHAfPjYZ5932frm+GTqSHS9tAmud0utKKIRFCyc0fPLqSJCGMHA4qbpZrMfaPyLNmUDUSpoPoD8mpVhU0LXoknn1QWuswO0kj5VAp9zxPj1EzqG51XY9r2XS9ujjqxqCuOkI/ItHQy+XtM4QAyzY4m6vLMd012G43+HOs7kuqskGSIbJQf56L5bqD4ItsDXzXw3EEufYfk52lO3wppj6xDEON00H2tWuhzDp6txrm4fCM4imtr+WomxbTfu1Xp4LfHsdzf2I/mvoy7wdFsTRNcRyHVsO1uq6j7UxMs8PxdPVVmnixhyHsAZ7h+wFN01Bphb8g9pG9gRBiSIhmsxlSShVQGweVuIquB9c7+IGoAHsyng7+GqZtUdUNwrBJM/W9HEcpc/XNQfTCwDAEQggMXSXO0h3CaDiaz7S/Dci+pW1NmlrtNT80MJ0jjNbB0HLNiJqsypjNYkqtdCrwCQIfy1ef3/c5vdkwmbqDUINBSJY+Jw4idisFwbFsySQ2qBJFcDdsB0RJPIsxxUHcqKPvG4JIIAdPmYiTd0bsVuq8S/Il/85v/RaPH39Ab6vf/Pf//v9MWdb85V/71QF2KHuBa9tU2nMl2W3xgxA/GFG3rzvsURiwutNWE6t7vv3Nj5Vfixb6Eb2BKTquL28GeeazBw/oOjlU84UMWMyPeP78+aB8OJlMWG62uvOuYYhty36zHqrdcRxTFAVVVQ3BFSjVK9M0OTvTsLWyxHEcLB1ExKHPZ8+fIWXHA61kJaSAPqTpWqJIraHLr294/71zPvrZbwNQGy6+b9A1/QEZyfXNPW+99Ziz06ek+q7rpYEbuTj6HFnf3nN2dsLP/fxvsdtqUr9r0LUZr17eMI3U/ncMweMnb/HVMzWeVdviCpe2rQfocF81ZP2GuqrwdECb5A2mtx98b/abLc9ffMlybfOND7WXWP+Ks8UxVh3y6qXqIJ4enbNNntP0hw5/TBCMeP5j5TsI4IQCmVp4VsKrS5Uwh6MQ0RU8PVeJaW8+4tNPP2URh8RjtR/vrleMI4GL4GyqgmNER+CeIhwtyZ/UzMYTTubfGRAZjh/gWg5CmAQLdbem+y2eawziDnUDebnCeq0OT9dLPv38s0HuuSgKpBTYLuS6hZEXazzXxbTNQcxluVyz2WbcaquSVijIoBf0+Fp9bb3ZMImPcb1g2P+WExCPXLZaWS5JC9K8QNIN5+ntzS3T0Zjp5Agh9euI8Fw5qPLRujRdj5QF+40+k3yL3dWW2dEI2z4UwwSGYXPY2B+89xGBHyNFDWhxHEvy8NEJfS+pKn2+tC73yzWmvgjOzx5ye7vCdV1GI9Wx3G1Vgc42I6Shz0qnIwymSqQDFS8Ks+Dm9oKJVsqUtNR9STQaUx9sB65XiE5iG2oMxuMJ+/0lpm0xmaq1v9svCYKAvpVDN/L89Iy8SIk1rWQ8CVner/FCi8nM1mvYYjGbIo2W3VbTFfw56+UtSX04twyQkl6a/OgLlXg/PH+HydinkzteqFCXo+kD0rwmXqg9Y5kugfMI3zEQzcFbrMI2bXy/UzBxAAyy5IYTDVEvigLTNAn9iFgnVxfPdkxHDxhHap3nec50NOHFq+dYlrofXadmkz4jDo64vFdQViGU6u9md68/qmabVCy8R7x8qeJqW8SMI5e6KBkFR3pt1NzdLYkj9flff/2lKlJLF0d3z6sqZzQ6pm1i6kaPVdfjWAL03RcHIxzDZzQasdmqxNCzQx48eEwvM24v1OuevveI3XaJ0F3VeBThNBJhl0j9+06PT7DsjrIsqW2dsJc/oUDzL3jeCFq8ed48b543z5vnzfPmefO8ed48b543z7+E56eic+W5Lr/yiz8DgBPY1F2CH7yN46kqwH6/xfU9tsl+INXXdUldm4PT83xxRtM0eCF8/AvfBGCXrJnNxwijHaQui6Lg8dMHdHeaIGnXyKxEokifALYfU7VKlrrQWbCocwT2wK8ajSbYdoAQgDhIs2bIDgzEIJcaxzFpkmEbh4y7oqoq4vHoNYSrUTAQ06ypm9eVcct0BmnkpimIohDDMKkrDbtoWoIgoq7bgyo38XiCZQh2u4Ncq0C0Nq5jUGmp8k7WWELQaXyBYUpMC5qmH6r3wjeQXU5Z9ZS6OxEEEXXZDF4YXdfiOSZ931Nq6XlpKgM4z/OU/wzQNQ1pmuMHB+6Pqr76vj90NWzbxjQFVdkMkM2+7ej6hnikhRuahidPnrDdbg8q4Th+BPRKtt0/CCe02LYzjFPf2VRNje16Aw+nqguSrCAeeewyVVn1/RGW6ZLmqkLTdAZ9Z2IYHY3GGLZthaCjrSDUkpz7ZI9peINsbhh6NG1F31uvZbOFBYZF2dQY2nemaissg4HkaxgNaZ7guC6mGQ6/2Q9CPQeWXlMBbVdSaENrxzWQUuL5IbZzcCuXCBkwGUXDWvR8h6rOybVEueu6tG3//+N4lEVJ27aKOKo5VwIIfB/rABlrWgwBdbsbsO9RFBIGE7bbPbrgTduVNKkYYBdZscYyPXx7jNTvLenI85+AQYQhApuuMeh0lygr1qTaxf3QkS2KAtd2htfZpo8dexiG8Zoj5B2I9TsKPQajcYQQrzl7tm0PPi4Hn6QgCLBdD9t97U1TljWuZ3GoRxnCohcmdV0fkGeEfoRp9/SdGIi3pi3Iy4rIPwhoFGRZgWEY2Frndb1eY1gmi8WC21tV9TNNk3gcsVrd67lSXe2qzXB1JdlsLcoyxwsDEr1mjVpgFia+tmvo6g7f9dhu10SBN7yXb5js9tvhDKrrmroyh85g10OSLWkbgXHwJGsTLMegbgoM1O87PiupagnVgYPlsLxaMl/M6PXYGXaL24/ZbF6LeLi2Q9u2hPFrsZNdUhH7Pm6gzyDTIssKgiBgrXlRril5+eMVH33r5wD4T/7tv8PlzTWff/0j3n1PcVH+9J9/l8ePHnByfsrzFy/1+hAYwuD6SkG4i3zLu+98m1E44U4LvNimgZCC1b26K2TfMJlMdDVXCwZVLV1bcn+35vhYVbzDyGO7SXC1SEpZFux2O+7v73nwQEHrpJRst1tG4zGWrdZQliWkacrTd9T3jqKI29tbXNsZhC12ux2b7Yr5fD54teVljeOGA/RUyo4s2fHg/JjjUyXqkec5huWS5XuiWH2v/XbLg/PjAV5cUGqbAEGhIapN0xJFirMnNU9ZiIAkzQj04sh3K26XG1qzI3IO/CrB7/zO7/Lt7/xFKv1enuNiWwGWq+9VL6MuGhxHIlu1Fh17z7d+9le4uFiz0tB5z2nJK3fwN8zSHdd3P8IpHf7Nj/4zAH7vd/8hP/qjP+Tx9Bs8eaR+82gKRR9wojtgYRhxfbtiMhWMpuo7Pf/6lmk8ZjKacPVCVbMfzU/40Y+/4Pj4qVqLXcavfeeb7NcVDx4qLkxylLJPNsSPH1Lk6vecPZjQ9TWZhqPJaMQ0sJFdw/xIzTvSoGthm37FxY36DuNJgO86Aw9zt7ri4uIl7QHvB/zuP/4njKJjPv9SdStOTh9zf5vjhjvqdqfX64T7LMMPXA74vqLMKasNroZPB6ajJOOtikzbM1iM2S0TpNwow1r+P/beJPa2JM/v+kTEmc+d7394U+bLoTKrMttV3abbbmO7kQdkCQsLCYmWWEBjBtuoNwgWIECwYcECLNksQJZYYMkLEINACMmyGoywwBPubldV15BZncMb/vMdz3xORLCIuOelJbe7JXsBKENKKfPm/d97bpw4Eb/hOziuVhqcEXv/yOvdgctHT7m9eWA5d92z9XxGWRYI0Y4cSNihjXjjgWgMxaFCSsXjR+5ZPx46qqFgaN542FVNRaTOSAP3DG3vOrbBA5M84PbWPetBsCOKA4RgFNqY5mfEcTpa3XRt5n+DoW383qwsWThFxVsaz/81WrCv94T+rLXqiO47Li/eIvPdpuvbG6aLNV0rEV7s4Gz9GCmgrd3+en+dsN/V5JMJxdHNgRITDjvNPM+YTdzaC8KMeZyM0O8gUmSTlKZraTwCZZqfIZVBKsHZuZvjPJlhhpaZh1kiQqqqIMsmYxy02WxQKgRRoXt3/16/2jBbZSNn/8tXnxIwpzgmI984TQuSJKSpDZEXLptkktTMqDy8b5LNyNIlm90t+wcvwPJ45cSTvEl7Ek+5ub0ly5IRSl/rO9q+5P7F1Qi5f/rkbReHeE57FIWs1xMCGWJ7dz/PHj2lKHdkc0XXntAXMd/5+COOHjFlyGm7PVYnZKmDRmfZlL5vGHSN9NSLxXxJEFoKj3ipDxVxnHL14vVIy1muM9q25dMffe64VThIeldLZODWVF0KkrgmGJZsvQ9bXC8Y9h1VuSEI3e978Fzy32r8vyK5GozGTNxiLjpFWe+4P/aEHpeUzTR3r+6ompYTFOpQ1lRVwatrt0He70uMbVHBQOZdq7thT5rPKYpixKdbm/LyxcB25wLMjoKyrNGDGM3IAAKpUCqk8wFCVeyZz9djYHy/3RAcdyjvzu3+yHoIDiNmvigK5rMVXmuBrhNMJhMG3RKfHnRjaNqGOAlJfWIhpaTvB05CFVmW0XcOSjd4I8V8kjF0A7tNxcUTj+M9lmCH0WMqiiz1MCAEY6DvoE6GxsNagkCC1UwnMY2HZ6Wpou0CwiBhNvPcLEClMWXpjUfjCFBo0xInHpsuBEYqMHZUOsQI8jzFI7GQWIwR9G1DeAo6hx5rDYGKUP6+N8PBqYN5tbKq0hwPHdYq/B7Nw+aGPM/JVDpCxJr6iIwFQeThISJjkTn/pV57zywryHLlglzlPbralpZmFMKo6xoVJPRtO4plKCU57Pek0YKDhwqqwJImc3oPjRRTy2w5oWuh8vCQOImwQtI0zQg1a5oeK+y4+UkCjLX0pqfwm7kx0OmB4347FhJCYyj2xciTGNqBsqxJko7MqzhZE1D3hs5ux0O8PmzJ0phs6n7vcVsxm66oiv2YAAnr1v7Q9WOinUTOuaIYFS8TsJI4fANjtVYz6JrVeoLw2P4wykmShMbDWIRckicJQRDSt6fvq5nmEwJ1EhWRzoi3KcYkIs/OyXOJHoQ7VID18plr0XuFv/u7O6bTDG0Nq8XS3093/5erM4xPUrASGUDoFUWbpkFKiTb9yNFyRoSCtm0JfPI/nU4xxpkIgjfQljFah2T5SU0vwJqWsizHpOxY9MznS793QT84sRDHNzjBib2PyMPDGOxUleOrnBLFKIoc76uqyLxYhggU2XJC0WyZzU/PUQuYkXOZpjn393dcnJ1THf1BH2jW6wvMYGk7z2VYZaRxQlm5wEbagNVq4FjsWC0c73Top7RtycNmx9QXWIq6p9MD0mP/MS1hVmGkQvln+1Ae6IyBvKcSvjg2ZPRtS+w5bEmasEwXHEqJjE48iR1CKq7ut+N++tb77/LuN57y3jec4t2PPn2JDiqyWTZyZr788lP+xD/9R6iqYfTWE0qwXDzmb/2N/xWA2/svuDj/4zRNhzh5LvUDcrBcvXYJ2WI5YzabUbfVaB4NYIzmuN+N0NLpNOdYtGNwHMeK8lhxe3vLT/+RP+rW53bjuJqK8e9urq4pi8MI99vv99R1TZIkIxTr9csv6bqO/PLyDYe0KEizjMgnH7rtqKqC1Wo1ruuirQhlQBwIag8ZFeHAYnqB8PudVClKWPTQkvizqDwWbPev+fjj3z3CnutSO78xv0dsN7d8+pufI6MPePYttxZfvXrFz/zu30OkAnovPjDolk9+8pJD51Vc4xhQDJSjoExd7zFW0aMw+IC5iYikovdqobvtDe9+45tMV5f86t/9mwD81Ed/mPnqMd/91f+ZTrnCbP9ii7B73lYu2Xp1f0PXH5n06zGh/cb775OHUzpz4Od//gMArJV8+PwbRPFJVCcmECsiDly/8LzaaEm97zCdZb12Cdf9XUeaZQTS8UemeU4W5nSmOeXGrC9CXr3cME8fw9p/Vj5wfVOSZe7MDoY7Hq+e4vKFH7nXdMTh7kiCF8YpKmR/JDEp0iemi/yMvr/CdB295//ooWI1yym9uJLpKy4WGWUXcyzcs3d5lrOvKqIwpD4JC2UxfbvBeJGGWXKGqQXvPnlvFIrQveZitcTakmji5kCbmqYZ8DRJHj09oygadMebJFAq3n5riTUJF95Qth8Khi6l9gXQstkTqYCXr15xvjrBXztMLzDGqRICdK3jPJ6SJGME1lrquiOO3hSKQhURpxHHnQ/YswxCPfI594ctcRyT5rNR6Gs2z5AqJBERBy9kNsSGvu4ZPPSst0598ljsqeoTXzVlMplgkDx4rttuXzNbKDYPbn9drmZ0umJ9fsnBn6MqskiTUFZHYq/8ebPbIKRTOgR4uN9z/viC47bkkeerrtclcTin6cxYjFvML5nNlzQneWKZ0hvNy5e/ycW552ElAS+vXrCcv4X0z8PDbsvl4wl9eSoeH9juSpQMMV6JGwVpEvH9Hzpe3+XlE6JsRldq5h5yPNgNaRTy6L0VUfCmcHrsG5bzlb/nNavFnCiKmOSe31TfkmVThr4fffQm05yyqUk9z/3lyy0ffvgNtpvDKDqXZwFt3RGpcOSGC6Bra1Ifiz7cFaRxxiRL6T3vWwnNzesrHj9+TF2dYvQDZ2dnXL12ucR2c8XyLCfPQo4+Dqtefp9QZjx+/Jjt3vuprf8hBS2EEG8BfxG4xImJ/QVr7Z8TQqyA/xp4B/gc+EVr7VY4wsafA/44UAH/krX27/yDvqPTPXc+APvis2vK45G+jch99VUFPaHIubm/pvQHRd/35NN4TBhe3T2QTxLSTGCtUx25u7tnOlsQqozGiylYHdEOG6zyfA7j+BwqCkcOhBCCOFJcXX1JFrtg6uzsgsPhQO8PUCEtIghQYcDEK4ptH1rOLi89PtonCHVH2dQjzh4c/6FteoLwJEMakmUx6u8JVg19374JoAeDCgRWmHHRKSkwAazO5iNHJ04kRguUV1FEOH7DMHSE/oES1gCSicdgS+k6H1KBpx+w3+8JZMzQ9QSR7w6EkihKmXhy+fG4x5gAhHqjRCgEoQw57vfMTkpreYy1AsFJBWxgvlhS1zXSl/3rpiefpPS9oSxPJELpjdzemIMKYZkuciebCsztkiCI6Ds9Gqe2DcTJwHKV+XlKSCchQdRSFu4apkFG17gEferFFNp2oCiPxJFbd0Eg6PoH8jSl9MpnfSvJkhwhhvFBT6IUPVRjV+X6+jVvvf2E8rBh8IeeMa7LGEpF5CvAUT5lt9uR+IT9WFYkaUqn+1EgRAhBWzesL9YoTkItlvnsbCSTChTrxSVVfaT3OGCjB/rOETNPIgl6sBz7ltQHwoHKaGqNCuQYvDqunCKKIjqfECyXS7quI2Puf4tFa0GWz+g9lyhJQoS0WGtQnsDRdTVKTdCDT85FR9u2nrPnu4rdgDXQernfST4njATzdYg2bgNO5Rprd3SDZpbO/HrpUCJkNvNWCSLE0pFlb7q9rS8CSCnHjsm+2NGWNZFPxFWkfNcvQfpnr6oLwiBxQYNyh95kGpHE2ThPfV/TmOOIqQfXndhsNyilCH1HSAUBd/f3rFYu4MviHKxTk+y9geZkMiHPcx4eHkZRnSiKaNuWy0vPMWlL+r7n4uLRyJPYPDijYSEN8mQxELmDabd3gU2wjrH9QFvXTHxybq1mkqXO1NPzi4QwnJ1lhJ7j9fTiHfbFFY+fvU9VuPt5c1XQ6oEgq6mEU+YT4WO6vkH0nivW1zx6ekZdlzxsHO8jTXMGG7gOa+fVl/qWoTcsT4boekCFhof2Sw637t6cX6y5ev0lwkz45T/zHwDwl//yX+W//R/+Ev/pn/0v/KRLLAGzeMkPf+QC0+VZzu/6zneo6w7ln61+aKiqlrt7lzjFqWU6WdJ0LUHgu1KDwfSG6yvX3VotZ4RhSK2bcU0FgaQ87tjvr/noWw4hoYRAoDF+vUol2WzuSZJk5FNtt65L0HXd+NrN1SuePXs2KoUNg0FrTVmWIzf07s7xmy4uzqjaEwfKUtc1gV8rXXUkVAGrszWVXxsmgCAG3fbMffL/a7/+N5nEKf/47//Dbv1sb4jjhDxWIyLj9dULrO2ZzRaUBy+K0CriUGI8N/V+uyOKJWerNZWvNgdxhO4ksYJbX60XCvLJHHWy9xgsVdHSGUtT+ADTJtRty7EpEIE3V9UJ5aElVO6z9/srPnr+DXQQIb09y4uXn3L2/F3+ifN/je/97e+6dTabYPo5rx68gM4sYX5+TlcJjn59Hm5CyuYnzGcXtH4tfvH5C5QK2Nw7M1KLxvSCxhiatvBr2HVLpJSsVi5YbDvDblsS+sD/2VvnJHFIGoVIz99YLGZUlUSolok/t28+36LtQFG6DunuoUcL85UCk5N1X62mWHMSoaqYz1P6SjAM/hynILQarSXTqZu7strRHGrq6lQkjakOBYOB55duD1JK0YmApmiZLU7iSgekrQikK0yt5wuCICCJAxReECGKqMoj02lE4xE9j88/IEr6kYMZhwsu307YH6/pGvdsny0saW64OH9r5CW+eL3DmhblBYmWi4im6bhcLxA+psvTkGEYsNZi/TlTNxXYhK49nXMldV2j9cDR88XOz89pG832rsbqUxGtJowE+52Pi+yULJnQttVYXJHSCboo1THxHajN9s6hIbRbi9tKup8AACAASURBVLkCrEJbxm7vbnfPfD4njlNXeASULCmbeBQfskKyfeiYZBGHo9tzJTlVWYEaiL3W1zC4IuVYEBGKYHcAq6h9sfBhc8+LL3+DR4+XYxf1y5c/gt8wGF88nmRr0iygqgvuDi4eXm6W3D9cM5vsePvCq7QWDY2o6bqTlY5HbkQT9l5Z8W5/RVnKkXda9BvO1hf0Q0Dsu0ZlU7I/FiiVsyvu/W+uKMqWyzPXxe26lpvNDU+efIDwyIfF2YwffP9z3n3+PrWX1//y1QNxnJK58JGiEDRVQt9WWM/H63Y1i+klx6+g2c7OLjgWzSjSdLZ+TDbJ2e3vSHyhb7lOuL7Zo3povapgU3ektSWbuPVadwdCmVPXd5TeouZsdU4gUl5/uSWfnxBZ3kbktxi/E87VAPxb1tqPgd8H/LIQ4mPg3wF+xVr7AfAr/r8B/ingA//PnwL+89/Bd3w9vh5fj6/H1+Pr8fX4enw9vh5fj6/H/6fHb9u5stZeAVf+349CiB8AT4F/BvhD/m3/FfBXgX/bv/4XrWvB/HUhxEII8dh/zt93mGHg5Y8dbGfQPW0Z0PY1g+9AbXcDebpCyuMoQ6rilLJpMF/htBzLgftNOcIuumFOvSlgaIhDl7lmWc3QdyhvDiql40AZ3Y4SjhKB7jSr+Yq5r0wXTYse5NitCKTrLrVtR+HlN40NOBT7kS8FIJSEwY7Y29lsxnbr/BVOfAete9pBEzEdZaul0l5y3WfJugWliaKIvvNV6unMdX8kHkLo5CiTJB31+aMwJwg64mjiPKMApWDohVecc9LlTdMyDAOCzN+TjvMnMUMvML6iHwSS4ngYeRnLdQI2QOsp2rejt9stkgwlAjLPsdK6xaJITv5KUYK1Gmu1x4tDEM7JJwll0ZDPfAehKlgs1txeeb5FnKOt4ssXt6OvTxS4itegKxLfWs+nEhm0ZL5Sb7Qz2q2PgtR3LOr6wMXZOUW5JYp9N6JTXJxdjH4SBojihENZkHmIipIZXdcQRhmPLt4F4LDfIoNgtACYZkua0iBRPLl0vhPt0HPYlwgkQ3/y7dIsZ2fUXtEsiKFuDsgwGJUdj4eKNM3RvaU88WqUJU/mI75bipChl6RhMiot9l2P0oZAhDQHtz7n87kzCK68ZHxigYEgiN/I0eqB+WKB1QOTmed9DS2D7jlbut+y2+3Q9FTN9o0pZNNQFBXTyYK5N1dshobrmwcCr1I3n2ZeRUuNOOyuOWIjRpjHYHdgQ4ZegPeKaYdr+q5CEHGo3TovDg9Ouj7wXj9pTt9X3N7uRvx//BV5+ROfCqFBCYLoxAMbCFVAVZVoz53I85RABcwm01HyF+tgoye51qatiWJL3ey4993QoVNEsZvnwcuXB6ElCAIK//xHsaI6HLHWjt2JruvQWmOtZfvg1vp0OkUqjfG+c11fEgQDTbMboZCTPCKOA9J0zm7vqoV1VfL220+5v3d752KR8PjyOXmecix2fg3nbPevybP5qJQlpeFhcyDPvGJbrQnDt0iSFV+8ct5Ju2aDUguEeor1PNN+ODJdvjHLlkHG/tDQ9zBbOjiKNSDFDjvE1L4CrURHkmWUtfd8229IctcN6TyX8NNPfsDP/cwf45//xX995Iv+Z3/+z/PP/eIvsvLQrC9ev8RqTTLJ+eLK8VNEGPDo0YeU1W7kF6lY0dQdP/nixwC89fYls/mKeigZPHw5EpKHhy2N//633vqGg2K2FdJ7tZzN13z66Xdpu8PYVdSDBTFgeg91jSKub16TJ+mIPKg7p2JZt80It3316hWTLB/RCofDgSTJMGYYTah3ux1VVfDRRx+Nno6D7tz55M+m17e3fP7ZZ2STKeeXrkp8aAdM23C5nnP0/MLt9oEPf/YXCLxkvNU1QiQ0XT1Cmruu4unTpwgh2e9P5rGZk/P2yM/bzYEsjsjjaISREwr6Y88siSjL1i8GS9f3dP4Mm2Q5ARaGOb2vbq/OFxT1nsPBEKS+C24awiSm8r5sw9Fwf3+HjnMuFl7ePxfcbK55tHzEeu32qb/1d77LT33rOb3vMhTHBfujIExCNr6j3zcV1p7xxa1m0CdT+/cxwwF85Zo+JchbVmqg8F2Gvg2Iwi2IgR//yPMw0p48n2Kkm6frbUG5bzhbPnO8OeCLq5+g+4QwsSO6IwodxzWK3YRWrUKFEVX7RiFSywEZJZw/cs9j3/egJU2wZ+7Nh+sC8ijDhnK0iJhOYiZ5wGLhIXKtJZvm1MeO3u/71VAQSsn87Bxr/BktBQGS1t+7+SKlLXvqQpN64++ibImijGpXsi99dynZUBzteK5uD9c83GrWq0uePF6M6+76akOx+4Km9ep9cko7dFjh1mZRCCQhYRIxNG5fPD8/48XrK7TuMR7aPnQ9WovRYF6qlihImE3m3G9u/X3vmM0mlNWO5co9I5aKwdQjmsbFNQN1c+TCy3s/7LY0bYmmJhCuO5lMIsqiRvp47uGwIYmWSBOSJa4rFc6N0wDoLZXvAKeJoChfc3XlPdCyhK5v2G1/DF59tWkcHL9uCwbvS+a4uAL87wujlNdXdxTbjvPLk52AJZsGvLj+Hkng5NnDpKMfKhJPCziULzm2IWmyomnc3zW3R+J0ztXdhusr18F/+tYzmusG5U3nu6bl8izhy92P2Hraw2wROwi79+x88brj4UEThJZ56V4bhgojW3ZtzU9eOHidVQ3Pnz/n7/7QSRqmaerOuOMPefrY2RCUXcvVzQ3b3ZHHT9ye/tlP7jAUnC2d+mOkMh62R6qiJsvcOs/TCTfXD0Rx7VQncdoFeb4ckR13dyV5PVD3NYuFW4v7XUkYSaJgwe2tOy/yfM6haNGeC5dnU47HIyruR52AbtA05oqHzRVL/Q2/pv4RmggLId4BfjfwN4DLryRM1zjYILjE68VX/uylf+3vSa6EEH8K19kizUIOhZuQrndeKwZFXZx09hW1HRisRHo+Tp6H1FWPsm7RKxXSdQ2TZEbk2/S32y+ZziO00kh/MAVyjhLH0eCuaTRpFlAeixErGoU5Q2+RStN6ye+hlyzWEyp/CIVxjtGKYeh4ePCCCNEckyqathshRlEsMLYdoWZdC1JkSKmJQ3fDD+0dxaFnsXzDKVMyASrw153mE5IkoW1rkqmHWVmcrwcQe/8Gqx0xf7lc+N+rOB57Z3DqF2HbNVjTU3k5etOnpKHCipTAwyWX6wQzPLBYPCXxwiK7ww1CR0wX3vdG9CRZS3lQHF3cxvJcE0Yl5+I5Brex3N/VpLlms/Ut62mK0S0Wzd3DSz+/huPReSXVXoZ0MlVMF5o0c0vrRz/6IUmeoWQ6misPEXRNwnQ6H83jrIZJsmbvvSkkBqkSkjBB+SW/nITEseV4ANO5RG29itlu98zXHua52dN0EcYkTH2i0XSavm2I5cD1Kw/LE4Yk60dPqziOORYPLCYLpl4CuHt4IE4TyrKmLdyBMp/OORbbkRs0zSOsjREDSOFNC+mJjQZh6eyJ8L0gVCXaiwi0LcjwgJADM+9ldDA10WRJrzfkPggzQ8qxKMZkpyoa0nCKUIrQQwezTFEWFWmcUXu8sY0gkJbNzkke932PkiHCOG4OgApDFosZZXnEDCd/M40K3kAArXDJcayCsbCwWr6Lph3xz8YYDqXjs1jjN25doY0kyy3Gy8GHWUQQBLTDad/oaYd76jbA+grM9JQo9DsC5YLcsuzo2dNp99sgxArJZDbFGDefAifuYlpD1bogcDKZURwHFiv3nsVqSls3hKFk7oMy4b1bpIxpvTRx3XXMFpejNHOvdwRpQKu7kZR9fn5OXTUcjnfMFifRCUl/WNCdvFRURZavqI4BUp18xDRSKVZn6egJliYhF48TbqduH7m+K1DJCiM1eeq4U01/hxVw9/CSD77pDjCjA+xwyXbnDuIXL0qapmF2vWK3dYlhvshomh1NFZLlHm7XRghjqRv3+4IgIE0nDhrpg3gpEoLgKV0/YKWflwo06QgrDeOAMBTcXW8Q3lD+T/+Z/4iPvvmP8eLzOz754Q/c7ZKGn/vZP8DWz6eUFqMDelPx4x+6JDCdCuLJgqLY45F6xMTc3L1gf+/26p/68NvESUBTKqQvRNlB8XD7Bb12cK0oPKdrDYkQI+RXo3m4uUGJA2vvO1UcSmwaoQNvdGpaHh7uOFunCB+UySGiMYZ8HlMe/NqrSqaP516UBwatsX1Hr+DoIbldXTGf5WTpYkyqAytAGuypAHPckeYJMgoJA8/ZbWvCRNLqms+/eO1X+ozZNOaw8wmuUig7IIRi5zfwpmuJooS62o9+hoNuEHFM6xON4+aa9focFUbsth4OFiVY0dD2MeXB7c3T2QRtFYE/d6I4pW4b0gBee1jgO+dztJF0yNGnUHc1k0hQ33tz16DDBFOkha73cNfmHKU0dXNk44sGSWiQgWL34NbGJBvAGNrSclJ8El7vXAmBFKdCVIm1dvTQ66UhCHK01iPk32gw9pyu63j/mz4QDjw32u8lVVGSTDRFb6h8EVabEBgwR0Pi4af3uxuWyyX33iy361vSPB/lxcH5UyqlKIuTxYEhDiOyZInynxMt3LWnaUrnjX7BnTEXnsfnfAN7ivqGhU800uQxQSAYjB6LuUJWdO3A5drFDU3bM7SGIA5oG7efPnr0jJubG/LknIsz931D09E2hkh4n7SdIsszZBexfe32jVZHzPIVTVdTlW7O98cvsVaz8gU72UviRFLvDCt//u52t/R1wSR/gvDCF4ormkGPUHPdgRlaJDGpN1yOlGZz/yVxGo3n6O3tjsV8TZq5veXYl2gdkucZzcE924lYoKKQpo/G8zANJkTziKMvBkzzR4ShQhgxGm9naUgUS7A9gy/iDXFMkqVUfp1LldBXPYbhjeWHCBF9T99bus57WAoFJkH6WFR3R4wwdKpkW7r7niQJYZCj1Fu0g3ttlp2jbcHMy5drs6dta1p79GvQS56YhnRqOW4973NzSxgJtP+cNEpB3XF/d0fjqRBXd3fowY6Qx7rqeLgvEEKwXHpKxdAiIos1nzB4iGHXdVx9+b3Rf0yGgqruWM9XvHzluGLb4wNJnKFUxCcvvK68dhYor72dwO/92T9AUe942N4ybdx1t+kZYSxpdUpTu7m7f7jm0dN3aPw+8vr4QxZyShbO2XhjY2sFYQSbXU3nryvRmv22GRMpbSNsVGHshMBbBzTtHYqc+fQJUnqtAv4RmQgLISbAfwf8G9baw2h2C1hrrTiVTn6Hw1r7F4C/ADBbZHYYhSMEUaKp65baZ5KTmXKBiA2YeC5KIANHqvcE3nySImWGNh2dJ+c9e/YMYzV6MOPCGKxhPp8jvfFnU3XE0Yz1fDaS0A/bGi1qsnTxFbJjQdMqpPGO2H3ALJ+w3RyZenLeZD4Qx05kwl8Wx+ORJLUsVqeOV8I0O+P+/p615wRNJ0+5eLSja+HxU/daVVXc3vQkJ6EIqSmKK87Olxi/eF6++JI0mSJlQNe56T87O6Ou65HAV3UVk8mEw+FA5wUswjCkHTSx7/A1bUnRH0jiCVadOCWSRxfvkk3sqN716J0FUp6NG83rl3uMSTm7kDx77jaoNHvM9euBH33vNc/edQfZ6kKjdUzjyfNBmGKDBNqOhQ8md7sNxrZYHEEcwNJzffWKQLk5+cY3HjHYiieXU7Y7t4mszxNuriq2m83o43O2uGS3O4yCIcMwEIVu8xY+6e21pTgcsWag81Xxw4OgrRLqo/b3AJIsJDD6TbeprpikAdWx4OLcVYl3+xuyJKOt/GdXAyEpuhfcnjaR7ZbpdMo0yWi8OV5bHunq1sknAtKsELYgChThyW9smhIEAVmWj9ywIHQO8Qe/qei+Y7FwJsq99xZaLnKkSYnzt9l5n4mBhvl0gfLsfJMN7HdH6IaRl3XY15ydXbDbFgyd98fKJi5B9TyCQRdgpfNU8yTiUIZEUYBCjL8nDCP2+x2J5/EkaQQm810y7ylX71DKMj1VFA3oY0/X6zefE03IwxQVwG7rgqvJ/Jz7hxfkfq0IpRA2Jcmh89y77mQqXTdM3ZlKkPVEYj0aPpdlQ5IoRCCx5rT2GyaTOcVxQxCfFC57kllB5ROiUKycD1BVcnHhExRcd7frGkxw8lMJqJp7tPeFSUIIVMQkVazeccnO1ctb+iZiOX9M27vAcF+XBFGPGdy8ROotbN+xXFnq+mTY67hwfbPl459y1zBJpxgDtnH3JU4CPvnxF/S2J0r8dm9D1mcT3n//gstL1126fv2abBFwd+eu+/x8zaEYSOOIw9Gbeu8bZtNz+q5gvzsZQ8cgzBgY7vcFiJIsT4hOiYXSdL3FYtlt3PwtFisOx7tRCGc2WfH6+ic8e/Qz/Pv/3n8CwMOm4W/+7V/now/e57vf/9tu/jJ49723RiQASOIkpKoaXrz6BIDvfPtnCQLHI228qtk0y7m+fk3tg/P1+jFCCJq2IEhORp8xX3z5CYWv8M9na+ru4AIPb0Y6DIa73S2Xj58TnIyMbYkipPUB9WAVt9cv+PAP/n72nhifZiEm0PRtxO2tqz0q1bA+ewS+g9KZihBDmqy5f/05ALfXn/Du7GPCOBk5QYEVWClHpc7tww1pErJaXvCwdVXjNF8hZEkYnnH90n3fbAqIhLp316S15XgsmU5zNneu6p/Gyei3dTrjB60JrGXrE6k8T1mfnVG3PcNw8klzyVrdlM6tGIfIUKEk8hyIYRhc0cRaQs8XkyKhLHsCaUZezaAtVdFQ+uD1+UUICLpW08duTxgijTDCJTdeFGExPyNLZ1xcuvl8/foly9mUunrDjZDaCebEcfxGVTSOsdZiTt1mJTF2QCpXnARnkgyuKPTG6NtijCHsXREnXdg3Hmb6jWm5RbuA2heiAnmkqUtU5JLzrrlHdoI4fhOO3d9XVBX03clLEZbzHGu1Vy2Fvm+JwxXT+RxjTnFQwmLOWLRYrxbc39zy9Mk7Iw/UmoDpdEpZlhj/vmeXa+q6xngxEmsq5k+WLn7x4irlcQO6cdxCP+faSPIsofP1AqngsG2wek/b+KLhIqWqDzR9Q+C7H3k6xdph5C1lyZKmqAkIKX0RtmkNs/gRdWHw1ozYTrozxvt6db0hzQK6ph05iBhDnqc0rQAv+rJenGG0RPf+rEUym8yo64ow8kU1ramqikm+HJOdquoQwhIGJyGjgECFWDEgQj8HtByKlq4zTL3Kn9aa4lgRx+7Ci+KADCRtM5yONYzSCKuJkpiF5/H1neZwKMj8D26GwflOZdMx0bZG0PcdWjNyJdtyQz80HB58cUUpVDSnrQasOBmwDxy3FWkaI7VH8JQdXWsYPIeuizoetq/RuicM3bm9PwxkceS6p0CWx1gjgICqPKGhAtqhQOt27KIqpajKhsjznQbdEiiF7Wbsjw6hkWVrlAjRfQ/eXD3LI8rjntp373/t+/83qZrz8UfvU/q9q2xfM5uuMJT0xu1d0STjky/+d9LshE5qiMOUQd2M3UIRWuq9ZDGdjD6BXW9IJ2oUUtttN8xXKfvt7Yh+GpqYLLd0zUDl0Ra6d92/32r8jpIrIUSIS6z+krX2v/cv35zgfkKIx8Ctf/0V8NZX/vyZf+23HMaYsfKSZTkWt/k1fnKNMQRhSCSSES4xGMssn45VgIeHB9I0RkiD9sqA2WTG4bAnCGL8n6FkRFPrEQKYRsrJdBjLNHeHpR0ks/kZQkh6v+jyyYzNZkfqA9zNQ4E0hmdPZrSeaBxFEVXVkEURMnAb+tOnKXFwznzlHnwrtxy2O54+D5nNfZWji5lMnnNz+wIhTtAkze/9Pd/m/s4FW21/z0//zIfs98dxA5cy5Xz9hKqQY1foBF1qPKE1yxxR3smnu81ABR1RGLNcecEQseM8XVE1zShnjjXsiyNhlmAa3xrtfOcs9R23/YHLpymrs5As8oHi1RWzheW9bwUs1r6DeLPDDIrQbxh1WTCdndM0zSjb+/G3P2S/PRDHCXHmXjscNMv5N9l7cn7d7FjOn9DVCY8uPAOUgedPL7wao5eWF5rz1Zr7e5cE5tkUhMEMPUN1MhoW5NOU9XyK6d1aqCvLaram8G7wjbGIviUKFIOvcsxSRZJEnM8uxvVrJynTJCTy0CFrIowJsVqPgh3PH7/lNlghybwIQ9M0rOeLNyImOsUkht705KmbO2PAWleBDXxyEyLoho63n7kNebvZE6qQPJ0z+K6RoSQJJrRliW+6EccJVakQXqQB1aCiBKsl2m9sk8mcYeiIYknklfEm0wRMT555dbROcn9/z6PH6/EQ6poaMwSEQeRgDcAw9FxcvFE522/2DL1xAYl//k6CNKfnrKgLDofCi7Z4Q+1yYJKsUcFA679v2B6YL86ovXJmV/UkWcZQ91RewjXwyfVieY7xMLYoUAihyP0huD6LaNsepQTWV+onYUZZ1sSTaLyutpdoE4xzsjncMp0EpMmaQ7339z2kbn33WXrhG61p+pbnz53ctqQnUJa2sbz80iW9QZCQ5AGDKZGB9HMn6doa6SFH0STB2IFjVeA9ten7gjQfiNMZbeeyx7ubI++98zYzb3CZLWJ6Cxern+J7P3CdnarQDF3E5uFIXbitWciOMDZ8+ztOea0qGxAd+/1m3G9iBVaUroDTnOwKdpyv3+H+1ncw0pYkDdAmQftO4L64J42XtI1m8Cpxm+0tq9WKpnZdh5urV/zCz/+z/MIv/Ak++4k7SsqmYDrL6AfDr/66Ey34+d//ezl7NOd2641GhSAIJC9evODuzglRfPjev0pVlRTlAePhNnVT8uNPf50496pxH/y0298iSesDFIvlsy9+SO73+Ml0waF8TRjMEJ54f9jueH33kvfe/X0Ibyxa1A/EGib+UN/fbLGmYDE/J5u51/qiQvctab7g9Ut3KO+Pt6T5jPY0l32POklV7z0kZ2LIsylhHI3QXSkCwiQYBZhMX5AnK+bz1UjSxhqsEg7G6gPo8/MEFaY0PsFMIwfT6ZpmhGut1+cEQeAUMwVfua6Qje8IyRCiOOBwKMYkwlpBljjJ/7FT7aGvoRcVMVYTqgA76NEwV8mIru/I8ngM3oyW6E7T10d/3R9yqHuKaiDyIlAybOkwRCqhq99IOA8WHj97x81h13N99ZpvffAutfeHUBKvLvcmedRdjxCC3pyEm0ICERAnarwmaw1xlCJEPO7XWmtkIEdqgrWWpinRWo/7qZB2/MxTpTybzdF6RugLEvP5c1QoKCt3XwBUtKDTgtzL76tAsj1sEcLQ707JokGoe370ecli5hK189WapmqJvC1AkqR0dUcymRJ4ew8rtT8bnQw/wHZ3T5qGKBuOf1cVFdN8Ou7xtrOsZ2vatiGJ3fl0d3dHpBKMV4hdLlbImaHrOqb+/O+qhqFoCWXI1F9nIyqatvT7pfNOX6ZTqrpE+Gf2cnFJ3fSoaED5BaOJ6JuatUeEMF3SdCVd34xFvH7oKKuWQE6Z525f3Gw2JFFM5M+dSCqO2y3TaU7vRVnCJCZLUqwxpF7IrB+cTU3rA8i+P1BWikk+Z5K7e1NWRy7P3+Hq6oq6dPPQtTDJFiM9pGz2lMeKPJsj/W9puwKlFNoONA9uH8yyiRPw8A9f32mqoWKRXzJN3d5VFAfySULf1lhvRWKVQQWWOHrTSRrqAW0sVXUq8CRIKxmkpvfzHiQC0VlUeEqAFEEwoetLhmbw9/Qcad8oYMfhHKykKKpxf6ubkraXGKtGmxwVSIIwRvnkbr1Oubu9Jww6Go8IM4Nm6DRxnBBELp4xtUYMgiBwz3953NEHHb/+6zueXDhI3upC8v0f/AZWJ3z4gUcQFANvPXuHq2u3v9bFgO0Liuqe8wv3nmNVE8gFYdRhfUHCtAOPLqcjiqqtK+5uBWnOKBSXpXN6faAZBJPcxX7C/oNhgb+toIVX//svgR9Ya//sV/7X/wT8kv/3XwL+x6+8/i8KN34fsP8H8a2+Hl+Pr8fX4+vx9fh6fD2+Hl+Pr8fX4/8P43fSufoDwL8AfFcI8Wv+tX8X+I+B/0YI8a8AXwC/6P/f/4KTYf8UJ8X+J3+7L1ASGl9tnkxbimNNrw0ff/RNAF69vKfte4ahGFv5UgRE4YITODEgpG807dAy9dDB4liDEei+Hyts2tROcEJ6fLWV9G2JlJIHT4g8P3NeBEHUk2Wew7KVzGYLjJdGfXKxJp0GdPqe48EbqRWKfBLz8PBAKFwFoS47onnH1WtvSnc+RfeG2xtNcZKVb/Ys5pYkmzCduOx9uU7Y7/d885tuDpp+Sj/UXJw9/wqcIeT88imf/2TLYE4y4LfoQb6R39bWSYCHTs4ZXCXgow9/muEEHbh0IhvLdcb6bOo/Z0eahQy25uaVq44cDw+cXYZ89pk3iYwfU5Z7/q//oyIO3xhaRvmWIAjYbdx1TiYph8OBULjPjlXEcdvTNpLUm8J9+uNr8jSj2NUk2Qkaabi/fk3sCbyYmE1bgTgQeThaX2miOGCWTsffXNcNXduQ+nZ/FARY0xCogN5DBwQBxaZADDHg7p+UEW1Zonx1/dkjBx3aHUqWc4cNt3SuIoscjVozJRl0xaXnYNRVRxAour4aibdd17KerVxF2Fc6Z6s1Zd2S5afqzxGlFEXR0nroYBynGKs57rYjOV6YBGElxfHkH6VJswm319vRxyOONPPHEx5dvD3COMMw5Pp2T+kN9VQoyPIltjPUJ4lsbcmzlCy1NI2Xkbcl2H7k3s3nU9LUVZoXy6V/j/PaOb+YU+5P3dcFWr/pEm9TQVEUnJ2vKTwW3XQhd7c75t6b6smzp7y+vieMJStf+UTG9E1PklmuPQ5bypAwjNm/9PCCyQwhB5Jpwgffcvdqv3d7yubwgugE/RACS8eh8B566ZS+sxgzMJm6+6CNRMgQJePRhgBxJFAxwrh7HqVQVlui7JIHL1kdKEmSpRRVifFwKWqFCgK++z1HLIBz8QAAIABJREFUnp1kU9I4pG1rKl+Zn02X3hJC0nhJ+iydY/WSzrrfu6u/4Ke/83MMfcj3v+ckx0UE+3rL7vOeqwe3hr/9u97jk9dfEilHDt7tewYCXj58QnkSCBA1Ml5x9mjJYunXlE0RxKPX183NhiRZ0Q03HHwnd5ItKdsD+/0B4b3nFvMJ1zefUVXuD/N0jZY1XbtH25OlwZS2K4hShfUwq0BEFPua99/+OQB+6g/9An/1V/5PvvPTB2YTB7eN44QgUFxfX2Okm4ePvvXHaOpghHBra5nnK37zJ7/h/PqA9fIJXduilED4CrAdBD/5/FdZrlzVMcsXHA4FYQSZJ+PfXt1yKK75+Y//oJsTaccuRegruz9++X36bsdbTz+k8fwKqQIQeuzMv7p6QZjCfP6Ih9J1J5WOiYOMrut48PC+RxePmc2XdCcDdqtoe0ESG15+9j13I4aOJ0/fY1/uRnSHkjHz6YSXL12ltSy3PHnythNcEie7C0MeTCmKguLgvu/J/BFxqii8SIsxBiEUbdtye+s+693nHxOGIceyGJ9bISx9245m5NNpjlKKvhuwHm7T9xZhNJvNjhNBIAxjjHnDpQqiBGGkg6iqE7ROYRBuP/edI41g6PUI61bCQaUGk1J4bqYICqZRxm5zP/rvrVYr8mzCw851AWarM6xQ/PCTz3h66e67lgaBIY7jNwiQOKHve5KvUB2MMV4SX42v9c2BwZjRBxH4e7pUUgYEQeDPIXcfosh1HLuh9QJCoJSzPbH+nImThK4byJI3HI733v0OXdeRekieNYInj50v4wmtEwQSPXRo3TF4EY++LdjsXpEm7r+PVUMUBRT7mrg+Sfm3TLMF2HDsrK0Wj9lsb5ku3HMdCkGa5gghmGYnj0dFU7Vk6eyEtkMqizUBIyfEWFRgWcznI8wynilUYKmbgdZD2YUMmU7nnJ153nAzkMYTkjAckQ9SBo5XHgecuOjp6i0ehXB76+XFl8vRmFh4LtqhOJJGOUqFWE+P6KsGGxgiv//EQUAjBiJl6LWbl+39hslk4vYNeUJftFgsrRedsrYjS5bUh2YUfFpOz+lqWM3OOBTueV/MlhyPNYOnn2Aks+kCpcLRiypJUoQQaK2JvF1CcTwg5RtO8qADLJqqO1IeTkJUCcYMWCsIvO+jUophUJStN0SPAqTUREGAVCcuWoCUIIXFh79O3CFKHAwfh8Tve4sgGM/MAI1SodcBAN1HGOO8tU4xpZIpsUzQVtD3HskVZkSSER6+uduTRnO2u2seXTghjqIo6PQO1Wmk93RL0xSpembKQdbbfo+0EjtovvhNJ0h0/TJFyYRjcUXlzyclM4qDoqxOkFVLpRsmk3O8Kwuvb+9Y5AFdF3Hm7VH0UBGEMyI/TyoKOXsbZtM1t3cO2WF6Q1VHBCpl773LZrN/SM6VtfavAeK3+N9/9O/zfgv88m/3uV8dcRzy3jvuQG3aI4t5TpJkDIObEWFqkghmy9moBFIUFX17GLHbZXVACEE+zbG+lWetZRgsygZsNy4Aunj8iH6o6dqvGKIKR4RNfCv9Ybshz3N63VOUO/++gcGEpKFflbTc397TdQ2brW8rBwFlvSMJ1nQeKqiCiOLYE3ii8e5esd3tGYDq6FrWs/mE69t7knjOYeIezvtNjBUNm81nAMigp64G0qhH+Dbrdt9x9/AZ93cH8sgtjMPuniSejsmW0RIVStrWGesBPHr+MZIU402Tu74lCuYMneHVKwdNyNM15X7P9Zdy5J31emB7ndO1XjGtukffBuyPLXnuAtnFbM3hIWGSTcb3MfQk6pzGczfa/uiUCQOF9DAEZUP29wd3EHlOiZSSoZfjdZfHAWE0USzpvdqb1JJYCbq2o/dJSxTFDFgeeS+HpmkoDjV907Jcerid7ZlkE46HdoTbQE3btlxcukA/TWPquubR2RmFT6CN7enansk8oq/dkSIIESbGDG8w+4FStI0ZD2cp33hJnQJMoSLiWCD9/VzMcnaHkvlsPcJRgkihxIRACbSHXvSmRWtF7tUCLTFJFvPs+aNRpUqbniCJKduGz790G+75+hFRFFD6AL6uQ6I+I44smceGl3XrREXqelTda9oCaw2vblyAO5vNCIIAIx1sAiBOMuaLBCFD0pNHlxKUxZbh6IKBLI/JZzH5JOSd973+jdmhh4tR1anvWxbnAXVTsVidyOSKslZMpgnpzG+AMmKzORD5hHoSRUSJYl+03F67Z/sUzEzymMbfq/l8xn53RHhRiLpqmc+XbDabcc7b1lAcK87O3gguTJYtwkYYLyqwPW6ZzBRG7Zn6aze9ZjJJiTNJ3b3x41NSMl+6uQxVymAaVBKReZGbdJJhTURdV/T2pCAGIjwQe4PE4/HIX/vr3yUMJhjcOtjc3ZNlOUI+0HlBoL/yvxU8eXrO3Z0zWzXEJNEaSzdCfsJYcHtfkqRzWm/0GaiYumxGlVGBZWCLNu45cL/5HkyKxWA9T+iLL+4IIwjVzF93RzfsiRNGpUOtBZgAPUCauPcVhwPPLj/ml//0fwjAn/ylf5lu2PLuu/8mL7505OMgsSRhyo8//yFp7u7NBx98m97ocZ031R6r4Qc/+Bu89dhBL+fTKXXbegNh79H1+oZh2PDhe/8k4Li3IhBobTE+2Hl4uKLrDrz9zEEj6+aAFDFdX5Ekruj16vVvopQmS5dvRDxUAuKI8LDdTz/7VdJJDiKi9EHZ+XwBw8CxOHB/+zkAFx9+QBhktN6k1WinalhVNa2HSyZRSJTOqKs3nod2UFgL+60LMGezjPPLJ0610Ae9QhqMkRz3B27uXTL+0cc/Q2/fmGUnYUTd9WzuH+g8LDCQysHwg2CEzQVBQNu21LU7Q1frC7p2oOlarPTrxUYIE1OX1VikiMKMXjMmwoEyBCqk6dpRICiMA+JJhtQK6YuG2hpqq1kt3FqZTDOq37xFhCHGn0VdXwEZu+2Gxdx9Vj7NiNMYHZxigobZYkFb1SOBfrWcgtUYo8cipRJiVD0F+H/Ye5OfzZI9v+sTEWc+5xnfOeesrLpD3fZt99zYbizcxjKIFUJesGkJ0Uj2wjJbViyR+BcQa0BYYsPSbNpASz369nCnrqqszMp88x2f8cwnIlhEPOe9Rm4sJJDuImNTla+e4Txx4kT8hu/QdY4bpoKYYRQaEMRRQiofINRWuqD2YIiMlRhjsNaOhbeu6+j7hjAK6dsDP0V5tdwDp1QRxWrkNgGosCNWButpAlZYdmVH1w0Pa7/RaC1IkmQ8Z5Jswne/93K8d0I4sYSu2bH3nJ2m3dBUA01bUTcHBdGY45MZq3cu/si80fmHDx+YT905k2UZ2/Wa58+fH4TjiLMZ0yKh8jz3MBLoTpHFOduN9w2NDEEqSEODsMn4WU1b+vsIQoVIERME8ciR2+9cgp9NYnrPKQ9UiLXtuA/X24rpfEE3DLQe3hcHOev1mvk8GwuuTx+/YLfeoD0fV/cdtresb1fYgxJh2bKtB4wZSLwHaBimZGnC/NQF+ruNJgxS+qyn2rtzNUkS5rNjdpsVR14pd7tuXALkYwIVxk7xd78aVWyjIKWqKqy1zL2Ix369oWvbESocxiGBEHRdRTHxiW+owGqSKByhu103uM/1Agxd55LCpm1G71KFIElyuqal83tzEOYMvXngYVuDEgIlI+wBEisCtvWGLPNmvaJjsy7p2oIoPPxNIugQ1pJ57mBVr5nkxyMHq0gjmrri7OQJ+90BbhswzeYMuqH28EU91ARxi21d/BYEPYiGIn1MZTw0WRksJYvFgtZTA/b7gbubisXSrdfpPOHu9p79LqT2SaAxC7Z6y3otuPFKjmfHj/mz21tePncJXzaLePws5y///Cs+ffWLbm0MA+/fX5PnKbO5e95vP7h46K8b/6/UAv//GsbYseqY2yXpJHbmsT5zfvJEk6Y5q9sds2OXLT57/ISbmytCfyOXy4TBuA3rwNFRSiIFPDo/5ukzFzCv1lsCFVB7JRRjeqeUFbRg3AaVpylxJBmGeFRMEkSUW0EfuPft9u85Pj5CkDOfumuIE8l2E9FVgv6gahYPCHNO7xV3ermmbzuS2UCaON7H0DUIG7DbrKi9IkwxcXjjrnOHwvHxKU0zsJd3XB+q9yIkihVRHNP4it5nr77FF3/1midP3ULZbHbc3FyRFymLYzcHt9cruu7q4XAJNPPpGXXVEHjFndvtmr7RpIki8kGg0XOqqiTwCo35xG3qp2fHlJ643XcNUkvubm6YpC6A3lYVSSDw8TuTImN5NGE6nbJee/x/MmG93nJzc8Ni5g7n3c7QdIYkdw9LfixY3a3QnURbtykXxRFGNyRRzNUHd/gnaYyQir72G75MmBYTtLZjlTgvQnQf8fTxY3alm8+223J6+hQZeBWZOKaqV0RxzpNn3gS2sPSdZbu/wnjFoihYkOfpGGyBZNCQZDFbrxo3TWcYYxiGYVRo0oNBSEno1U+G3vL08QX7agc+aLFGsLrfUxRzkF7xrtLEkaD24gfGatrWCbeMMssM7N5fYcWWKHD37+27hsGUTKbu+wJpsWZP05WjsWnZluzKxgXbHnO932+ZziZEB97J7g6jBVGU0PjEtLwuWS7n3H71ZiToK6UcD9If/Fd3G4oio21rtHEk+9OTlMXsiPfv3ToPgoBy36Ct5vXXng8kFFIpkiQbJb+32w+kqWR6IBC3kt5ayt2e4WAce1AWqsQoijJ0ijjOENK9pix3tP53HirC1lpUILi6uiTP3dxtvh7I4gHrqaVdrRnigrAICX2C2Q2Gpmno+hrlA21EQpFnbL0am7YaIQMnr68Tv+4GjOnoTY3xleS66wlET1+7ex4kOXWzYbDVaJJYxEsCMcfoNdYXStLccnNzNa6xLM1pqhWW9oGA3brK5OUf3o1CP0IojFZkvmucxYYo24GeEYcusZgvJbfXFUIsscoFFnEWIMjGThayQpuWoS9Q/m9x3DHoDciAq0s3x//gt/8R//Df+x3+6A+cCuAP/uyP+af/9J+h+2BUl0IOyGzOm7dfsNk6oYazswuabkPvMftpmrLd3fPmmx/yy7/4twBnGLpab4hiReGVHL/46Y+pqy1Hc2efAJaua0EbJj4Qfv/uS5IkYZK7Q72qtoTBBGgYOr/v7y757NWnJNkMbfw500OLGQt/dfkNj1/+ElVfE3pcftU2xEqwX6/GTsDx+QVdyyg+JJRGCLi+umXmA6li7hTMhspg/BnZtS1pn9L5xLg1HXUnyAIBxj//QhEFii9+8lOOzlzgNp2d0ncC5fmAu52zLVmv7zk7dUHhfD6nG1qECjD6UKxKuLu5Zb11qoPnwwlaWTrdILwco9UduutB9KPQRl3XhHGB9d9XNjWzLKLtagJ1SKQGoiTGNBbpBZY0A/v9ntRX8ze7kkEb0kyN3TsZCExv2e02FH7POwhTdIegUAqasmM2m/Ov/uiP3WuiVxwdzQjDcDToDYR0Cro+2UmSwBVarBhVBq11RTqlFCo4ICmcGujhfUoFrqPX994o/WDKHlDtq7FTRjsgUEi/BzVlT9uVGK9gDFDuatIkH39LEASEYUwgQ9quHv+WpJJhqMZrGgZDWQ8PEvl+xFHIwgfwcITWfp/zxemhd+iDzDdahqFjGHqm04D7tTtX7+/v0aZn8xd/xXTm9qXVaoUxw3h+WDMQSkW53/LkieNhr+4aJtOEoigY+gNHtibLYwJ/VuRJjAwsVkgydbDXCB2vTQ0Y435zWd5zfHzMs2fPAHj79q0TXhCC2iMyhJIcz88ZhoHSI0CWyyXJshiFcOI0oQwzrm8+0PmkTBGTxilSQpa657btDLILwVuc5EHkFGEHQeKl2JGSPMjYdHcjv1AOlsU0HxOp9bYkK1KaQIzF3O22Ik1dgrXzYjHTYkZdl2NBQlmJ0YY0Ksj889C1DUGgXDJ5aBNb7QqeB1RMETMMA01jmBXuvodBzG7rmhCd5+RP0hi0K9CAE3MSVjAMA60Xbuh0RRhYBu1jp3zC6dmCzWaLCt39a5oGowNXlJCeqzXJ0b0eLzGKFCKOGPqePD9YzbT0TUBezFlMPBetbembmHLwStJVTJZHrLcrfF0aOwSosKPrynE+o1iQ5ymVT0z3H0qiMKdtLUFwEE4qkGHJdmVHQbeh3zAMmp/8xHXFinTCv/rjPbttOV57kkZM8glS2rEIEqj/ZxPhn4vkahg0lfdNqMqWrDUoJVDqcFAErJuSatcQe1Lmvt4jjBhlXq0ImE0nXF3dcHbsEov7+xvOzubkRcTNtauGLhYzJx3qJ7soCm5vdkRhwMRXHSCga/coGYzB1Xq1o297at89ODk7wWpBFMTU/hBCS06PnrDZbEgTV0WtmzVNd8t85g5sY1OKaE4QG7LsUMUtGbqcNN0BPtjpLKaVDK1bOHcfVgShk4cPvRpTmqQIFJMk4/zUBUB5nvPo9KmrnAI//OEPOT/1kLaDR09VszyejX4k8/mUrhVkWUpXHWSeA2wYoWRPXni1t7jgw1XL0LlrOl9O2W569mvD3L8mjA3lJqKIMtLY/b4km7Ba7Tj20K/FYsZ8WmAZePLqmZ+njufnL/gifsv61gVSpjacLMUo6VzkSyJZMpue4G19GGxNnETMp3OkT/reX73n+fPnY1dFKYHSCXowh9iDSb5kkp/y+MkF13eOAHl9c0W1CzjxwUhVdaTZnEEbIq+cY8yevu+I04jc3780mrJcHPPVGy8+0gxEac5mvyPyAiFXt5cEQUSSJGOwYZUliiIGc+iKCa5vr+hMyzAcpHUlIghY7e7H6mqvBYieKPIQoMZgcclM27lr6HSHtYZiEhFF7keX+z3KPMBcZNAibE0xzTE+mXv6/JjtpmK12nJ85EU7RAwMYxAxmeR03UDXtgw+qA9iTWfWHJ/nD55r1jL0jUsMgbhY0LUGi/OHAVitNdc31yS+8zoMeiSRhj7w74bWeVxtavyZw3R6TN/VaP/9wkgUMednj0b4kvEHdxTH49wJAWW9wWr3fCSRq0zW9WbcSLu2IwxjJGo8jCUZJhpovHjFJH+BaVP6RrFdu0NAicxV0USP4lCdzKnLBuXhIXW7R/YpQlQY7zafpimBLBh0NAqSWBPQ9A2Cgz8P5PGcPJuMHdAosdzcvuZ0/oLerxcrV4RxSpwu/W8xLM8Dsnjm1LNwZOuuGwiDflSgUkoSJQo/nezrAb1VqGBDEntY8H5FnEWUuyuC8KAWuHDBpA9+pBZOZXXQoyRvEs6oq5ZBV/yn/+ifAfDJs9/g9es3vPnmJ34xwne+8x2q5m6E/Bod0TeCL978Ps+ffebmvZhxffcQxETFlB/82Z+QJPDYd666tkb3DVrlY1f43bsfEVDw9OKVuw97JzwQSjXO583dGz598TlRcBCq2CPoiOKI27trP08tjy6+S6egLv3vMyDTjKsPbh08u5jx+PRTNtWWiQ/S1l2HDAM+vPuSvHCTnE+PMIPmgDLr6oZJseD6wyVl477v0fzfQQQh1uwRXm1uPp1QbrbsPdz3+OSY07OXmMBiK58wJCmmaykmEKROllvFCWU1MHjBJ2MtzbbCmoEkfSgsDtJQt90Y/OdBSNfWTKcPATzS0Lb1qEQWSkXdttT1HSr0HZI6JSVE+fOqaweKZEbftwR4IQMZkKUzlKwPgoz0JmMb7JlNDwIFwgvJmNH/TwhBWbYYY8YEOogTqrYduzZxHMPQc/fhnk8/dWsjSkLevn3Hs2fPRgVYEyoPX/Yy7SoY97gHyJ8ci2MPSpXuOoQP7qw1owDOz3bCrBVOodjfPyst8NA5kzIgyxfUdTu+J48S2q4Zu5W97onCEBkHJKmHa1rneZYWBU17eK8kDKPR69MY45TdjBiFvoyWCKnp+x7jz4MgFCgbgPUQstAihGUuTnj6xIkIaN0zDE78o6ndHD++COm6gX3l4a8younWJGbO/cbNxc3VPfcrTZI8+HZWVUOeRUjfNVouj5kuF/RDO3ZD+t55w7VdQ+EThEmhaBtL3bhk5Pj4mDjOadqWEw/L35Z7lJRoIfFHK13t7tsBjYGGJJrw6CxE+G7oduM8JfflepTzD1RCJGLW3l4gKxRFkRGEakRDTKYLZKc4nZ+PZ/n58YRdtRvPogBBXzeEMmJ15xU9k5wkjDHBMJ49gVUsiuUYd96vV2itfWHBI0AihTWS6eJ4VI5eb7fkUYFUY5sKG2gYDKGXdbe2Jomd1+Ji4mLCvrOEYYjyif1kllKVHZN0ChNvZVKtCZTrcgMIoyjrijRNsPhnvbtHmBlS6NEuSbcQqoDTE/ddVbMiDpwce+RjoOdPzuhajTU95xcuQU+SiO2qxCqngNu0JW0Pm1XH0bG7f3e3a7bbgeOz+ShcppQiTmBy5O/ntqTVLdPJgmHwtgBtgmkGAv/Mu/ves5zNCfzekhaGyTxnVsV89doVgUM5IYok+TQn9Pvb3e3BzuXfPP6tghYfx8fxcXwcH8fH8XF8HB/Hx/FxfBwfx799/Fx0rpQKCdTc//+KSTHn/v4evAFcVRqsMGRxwvW1y/onxYyhU2jfDo/ChA/f7JjPnhAkLsN/8eIV2/WGb16vUNL7U8VTsjBEe97JfrslDiWhSCl8tWC3bsjjJffr92hfoVGhy9ilr0jv1g1hkDC0A7mX+8yzgXovWU4nGG/4mscnbLdbFhP3/dt1i7E958dPuP7gpIN1Y0jTnklxjpUHiEiHHgSJl35WYefwpbXk0ZH72939B/qhxA5wd+2qWfdiiwzhwwfXqVMI8jRlv6tHL5rl9BQrt6T+s20vkUPIdr3n5Mh1kqK0Q1hDGBQMHqe8ub3mdLFESC/32d1zND/j8YkZW7Nt15FZmM4jKg/LixJLpDICL/uqiBjagOsPV9woV3V49uIpb16/I5Yxnzx1ldbuTLLrLglDV5GK4ilJcsHQCLR2MLLzi1ecnCYMg2Y5d+978fIpH27ecaAKBhE0+5Q0kYRxOq67dx++4eruLTtvjmmGlCxPuPzgpaD3A8VUcbS8YFe6SlaYlBTFnL53UsQAt+2a1eZmrP4aEfD+/TsavaH21a26rIjjmP1+O85V3/dkWfYAz1QxWreEUUAo/X1Xiq4vSXJGTxkjB0KZUmRuXlpb0bYthj3nFw6+uFoJFouaKJqPFdE8MygV0ns+oDEdWTJhWzVo7fF2tqbrOuI4pKwOBogQpwGRh2tZ2xCHCq0ZRS6GoSPPMuq6HEVgVCBo25rGQzH7zgkEBDHc3DuS/XQxx4qW0nMbQpnR9g0qiKi2vlI3jal3LTKoHnhRqwph7Oi8Pgwdu3IP+xnKE/G19pDTNB2J213XYnSE9ITzOIqpyg1Hi/kIJ06TCXGcselKpPSdK6lJophAnPn70lPW7wjr5VhdnhRTNutrptOMJHBVP91nyCimFw/QqEDHJElCNzz4/9T1nmEYOPfY/kdn3+aP//T3uHjspYRXN5ycLHj/7sNYSb69vcd0GhHEHNxyT05P6FrNvvTy8MKy30IlFL2HVMVx7C0b9INxotb0bUfkYSwqDOj6miTOMX5PSrIpQ29JU0PpBSzKqscMmqZ13zefOenmptzz+JHrfN7drsBE/Jf/+L9BGQcVevv1F3z2rWf8j/+Tg2tNpxMuHj1ls1lhfbcuigq+efeG7eYdv/W3/5Ffs8IJLXg/nqF38unPnj0j8cIUm/WOIJYMg2Hn19Bq+4aXzz+n8N3mqis9SVtxd+sgm/vdDZ9//lsjEd9aQT+0pEnC6s7tU4tlwdHynKpvCT3OOehAJTOqvetcHU9S8mQBcYr2WP++1wxCIWTFha/QBvGculphlffiyqd0dUs37JBR5+/VMXE6obp7z+BhwRZFudtze+eeoU8+/T6dkei2HhEZCMHQWX70oz/g+acONdFbQ93s8Gg4kA3WCrbb7egNpZSi7lz359CpHgbD+u6e4yMv5W0BJMYMDF6wRwSW3W5H16+ZyIOc98R1NcUDJ8la6yD/iXuOQxUhVYQNm9HXzoiQQIZMCvc5m51lOp0ziH6E6UdK0TaDFwTwFXUCrBEIjx3SfUccxwy6I0wOctBHnJ+f8/v/x//Jb/zGr7nv0z1KMHKL27bGGPfZD549gjiMiCYFxl+D1pq2bam8GJDjq1mUUmP3TErp4XcPkEarLVqbkQ80DB3GmLHb5F40EEhBWx84SYqmb5BSjp9trSAIFJv1foTzhpGkbRvCA1RKKbTt0VKO3DBjArDafY7vptVVhVIRxt745SMAJ4xlO++1GQSoIEYphfRIB607x6OdPPdrSiPFMxC+YwY8/wS02bPb36M8NHLoDW2zZ7NyZ+++zzCr1sG0W3cNxnYkSURRTCm996R7/tUoChUEAU3TuP96Hn2WhORFwaAFuedOTacFNzf2oXMlHCSy6xpyzym9lHdYaynykxGWJ2XEbl/xySeu8xnHGe/ffYmSls53C9++viOIFEkxQR+ExS4vCcLUWxPA/HjCtJhyd79i4nmnloHdevOvias0dUO+KLi/cfNihaLIJgx9xyQ5GMwrJCF933J85s7ak6MZu21DU3uhr7ZnNp8SynCEUH64ek8QB+iuRfjfZ9OIxeKInedT1tWGtrEM0jCbues0bU40y+hat09aaUiTmLZvmHn7ojTOCOOEpip5/sztN1IImp0litzZ9+rTT7hbrZhkUyoP4ZQiQJueLA/ZejhSpCRB0JJ4afbj+YLZvMDYkj/909cAzIqULA5YLCKS2MU8f/mXrzk6PmWzcbHhfPaYm5sbVADCx+3tcEmez9C6pdx5BEFacLN6SyDcM9R0EUV+RFPrUSvh7nbNi5dP6Fo95iBh4GKPv278XCRXQohxMc/mKULH2D4kjN3iXU6n1PWeOAyRmQ9M24HJJBsnaNfeUuQzbLcb1VHqsqWvB07mp2MLUHeaOIuJPOk1i6cI4cikUruFeX684G69QRDs0ES2AAAgAElEQVQhY3c4F/kRuluTxe5gjOOEMAzQeYv1m3ssM4p5TjdssCMZeEKR5UxmfqrNjjyfoZAUiQvUJpllNolQASSxC5jv77e8ujhllnloVtRwv7llOp049TZAzI4Zhp4kz9j5QD8rIuq6RXkNfxUo9iuXEKQeTlDXMfu9HL0jhhYCOmaFZFL4Nm8liFLhFGY4GH86vyg86f149pjLqy8xScIkc4IkUvQcHRdEUcR26+B2XTVndqRIQr+pWMt6d0mj3T0F+MM/+iFpFhDHijByi3ayzOjvF/QH6JfYgRhohpap9+hal1/z5o8s02lB5iHlUrkN8XCg7rYN0kLfBoTavUibHav7HYaS6cTNuYhqml4T+01seRIBktv1HYvFwfAROnNHnk9QnisVKQtqy+3dwXNthQig3VuOZi7Iji7OudusmU1zPrx5DcDp/BklezSFnztD21SgpyO8TwhBKHMseoSR9q2Di+z27uAdBkM+LdBDyO2d96IIUoL4Ebt9i/aHXLmpQWvSA1lXhVQVdKVg4OCr0aIHyPMp+70LmKUEqyNs74MWqUFIjBUM9QNUaNv0GKno/XXpoSfNwhH/HyURu2pPlmRMvI9IMMQom9J4fx6DIQwlTbPHenZ+wJy8qDGEI4a9aVpUaNnv3IacJCm93lCVKwLvhZN6k9ey2o1k3KZpQPSEoVtjZbMiUgXNXlAkXmAiy2i7mqcvYfD+UWkWYXVG45OKySTn5l4y9B14D62u2fD08beI04zUB95ff/ENFxdLru4950qHHB0tef369Wi2moQBg3b398qrYN2vNhydx9Sdn0udsNpekUw0N/fu8BBKc3r2in15N8IjV+sSawTzuTvght5y9WHFZGo58YqXm/UOoy1JGrL1ympFURClMYFXhPrm+kusHBDK0Hc+8BYRWRZRN9UIe+q6FqEasonfc2sI5JqzswWX31z5+z7hd/+z/xolzrm6dUa/0SJnu91yeen8q/7Bv//rmMjQr0KIvS9Larm7/ppPXh5z6iGqZrCIrkd6M9ldpdncfMWikKPa2r4qyU1ItEi480n8Wag4efSURh7UAzWiV5AI7q/ddarYkOcnDB5OagQwuP2q2rtrmuVTAvGESEm60s1Bmw4o06ArJxwxPzql0QLVVAzeoDxJe8pKIboteJ9AqUK6zmIO/DwMQlckcs/ae9HJfEm336LiBDxsRaYB29Utyp8723rKWSCg07S4pCydL9h/dYmp7smC77u1YBMGfYNHCSEF3LeCPL7j2YVTpd00hl7XpOIIG7i1V7cV0jacHbvr3NQF+25D0Ec0Ho7aA1255nRpsb5osNOKtl0z86qRIo642d6RhgNPPP+nDDKqdk0iIrrIB75DSyYrjBfCaXRBNFkSVZq9jxNMkmFuNswSTeTFjQhiOluPPMKUlKEM2d1vOT1x5/Zus+X4yTMevfqc//2P/wSAX/0b36Pve/TgztVAZWgRYAY9cheTUNKrhrrbO+EQvIKkDEbop1Iujun1qJ1Hr3vn41l1hD5QQ7gi09AdoJESqzUPWS8YGRDHAmsOgkgKqXq0Fhgv7qBNA0Yh0eNnuetXoyksVjh+lTSkfn9TqcXakL7vRy7K0BfIMEKYAzzbJZfGGAYP85ZWMAwtRsqxyCVlQNmWaH7G+0tKrN/bwHF9hBCk0Wzc8+I4IC8WnJ6/9N/n5qhYiDEJtFZ7/lpP44UvEIay6Yj9dTf7gf32ju16PSYR+01FGGqCXIwJbNv1hFHM3colEWbQ5HnOMAxsNr7QXlYsFkdYqynmXhlP96g4GXmRersniSYkYUCsvOH6ImC92xLKiMTTI/aRpCgKTjyMrSxLYhmitB1NhLM0pWvuyNSCq9srP1c5zW6gK/2ck9APiqE3lF7cRJmIxUnB7foe682AB+FUmA+wxPk0JQoCnj1+wrp0n32yzAlVhB4Ugad/HD07p2wjtDeFPlleUJs1u11LFru1d3Za0LcD3/3E3aub1Q1DM9AFhnDi/bHWIbGAaNLQeSrLyfSU5aOa2Me+rV7x5NkRbbljMXdnkZCKXXlDFFqWcwfZ3jc3hG3CxHPMrm8uSdIl2lS8fO657/mEt2/f8d3vfJu6doloeVMzz3bcvfewy+mS3/j+Z/z0J18z9TDIvW3ZrWusFGRzHw833zA9XnCwCNRxx75dk+Yp+43bT4/OYr7+5gtMl3BQAl0uHwRo/k3j5yK50noYlUKKSUFTrQkjS+h5GFkyJwhjklSN5PO+75FCUkx9MtBkqEDStxatvcqgEEySjCxK2PvDMQoClNbkXlq31x1RolxXwSvCtc2GJLI8f/qYcu9nPBg4O70YlQilssRRiJTpWOEznSCJJUlyPvI9elNjbEOzd9/37PETVutrlOyY+sUjRUyoBNYMo9v80WKJVIb1vZe6jDR9B9Z0COkrq9oleV3XkfiEcnO7oqktx6fu312rsJQ0u5Z6+yCXPsnnCE8qRmh6oxBM2JcPQh+RSamb3XiYSKnoe8Zq12I24dHZd+mqeiScC2GJF0esV/coz8s4OQkxtqDrDmpsA5tNCwQ0pZuDo9OQvtfsyh2tDyjv1wlDL0Zlqdu7miCQRImm3vtAIxr47Lsn9N2A0e5vk8mEo9OY1cpd03YXkWUJfa9pOxdMHs+PyGaCu7sGGfkAoZNo2x2eHe62HZPJgrouWXm53yTOCSN4ls4ecPVpxf1NRZ66gLZYuAO1mGds7ty6290OqDikGXrSies81NYFrh52zmpfkk8FfW/BV4SjFDCge8HNletGRqHDuY/YfzE4bHqTjEpSbb/j7naLtQ9dxeXiEev1mp1fB2EkUcrSNMOIk7YBzKZHzmTTbw9t0yG0GTtsYRIAiq4rCXySZNDEoUALQ9McJIYDtDC0vuOl4sArePZjpa7TJUVRoP2kD8NAluZ0w0DqlfK2+/dEckYYhyMvwkYD01k8rqkoTDzJdoc1h8DfPatFPsdwwJ3vSNOUbPT/myNMTBTFhF6sZhg6JpMJSXLM1hyk3w1Nu8McKr1th7EtYRiNMra3N1A37+k7ffhwjO7pTMu1x2erKOb6nXO0OASB9daQJlOu1ju0da8LQk1Tz8g8z0WIiCQ8Z7tbUTf+mpRmkjzBakO5d2tdBTGYmP3WdcWjxNL3PbttyN2Ve00chywXMU3TEHn10/PjF9zcXtH2ngA+nSEDS28s2t/P46OCutmxnKZujQKrbYfWAuHX63wekYYnvH79l/z6L/27APzC5/8BXZ3S1G/IfQd/COHD+w3PX7nv//4v/ApDF4PaM3Sem6Il19dvyLKEo4XreFVVhRAW4Q2K692WxSxgPi/wTuL0raauB8LFgtW1S662mz3f/cVHdI3vMijBoDXCBJSVC7geP35MkuQuaAX0YBECuq5hX7oD/PQiZTKZsN9syLyK6WAt1a4iL9w1ZbMJpZWgDVFw4LSE3G3uWd1f88zzf0QQIqVGiINoQcJ6veL95VekExeULRZHaK0RhCO/AjPn5vYd+PfNixPiKGBf10iveDu0A7d3lxwfL0blurLcOWlrH8RbLVmvv+LxMqXx545MItQQQKrHwPT2duDiYo40XsFMFVizQZtulH4vdy2TLCMJM6627rlrMAgxUB+6/iqmaStOYosWbu1XTQNIgoRR1Wy/32IYkH5vSc2EQUgIQya+SxyokN1wx0kuqA8qcbIlUynqYH+BRiSaWZozPShe6oHdvubz770ag7I//cGP+PVf+x669XxnA73eIMOA1BPiQmGo2x6pEoxnU7RNR2AtveePhGHglRbDce+JAoUMAiIb0h8OFgRGKJQX/pDWVfnNw7aB0IZm1yEOnHIrWG9umM0WP8P/lQSBIo7TMdlJ05S2rYmT2L/PEsURnR7GpEWC51sZDiGgigKXSPizQvnfKJUaOYF93yOkQEhGDpS11gk8eA6k+hn+2SEuEsLx1bTWP9MVtgzGjN30tm3prGWaFaMYkNaWIC0Q1o7F8TBUSMQ4v33fM5k+4+KpYbdz6yxeONEkeon1naSqb7HmQYAgSSI2mw2r9T0LrzbZtx3b9R1ZkjL481AiEMOA9DGQDWsuTpaEYTiqgxoDRbmgaYex+PfoUcH1B8Gs8PYsm2/Y3e85W57TN97gOWuRJsNqQ4JXe75fk6mQ2BcWw9BQbm8IiwxVe6RRNPDmp285OTnh9NQVnUKr+NEXf+b4kMA8P0JXPT/58x9B4Obu4uKCSTElny+ZH7t5ubxrIFb84i99y92rIWa9viR5EY4CT3fXd6xMz8znEhvbc3x6RBoeU3vesC1CFpMCIefsfbceuWZxMuHuysVcR5Pn0O7RZsPtjbvOly+/h+4rjLin9SbaUZDy2ctTKh+LPoleMeiKZfE5beTu8fTImcefnbzgyy/cnv7bf/+3uN18wcXZC3/vZnz62VOS6ISra28s3AaUu1s++fYFu62bg0BESB3jgWsI2TNZFmxWdxyduGRuMs2IkoGhD9ntfA7g0QV/3fjIufo4Po6P4+P4OD6Oj+Pj+Dg+jo/j4/j/YPxcdK6UFKMBa7Wt6IeWJM4oJg4+0TQDdV3T9Zb1ylXd5/MCM1gunjhcpm7h8vKSPF/y6Nxxb9bre6SUhJGi89XkUDkFwIPa1Ga/Z9A9cRBjUi9/KWNAkkUZtXHdguPZgrbuaGpXaZlNcgIVOVl0r8J3PL9gcaxo64B3772xaLIljhQnx45L0fU7IiWJgpgo8d0XEXF3f0WaSazxSlmdYN/uCXwlZLvfEQQSJQVN4/kUJmS72REVAd3gcdFxyDSLKXuPnZYJBB3Vfk3huTBddYmSCv9TMFZRVlssHbud69A8ffKS6SRn6AWD9xboGokIoetdJeLqg8HSEoUFg/YV4QD2uyuMbHnxwrXp82zGaneHt68gTqckecr9+o7UVyc1d+TplGKecn/rrsFKSWfX7LwpnFSa3u5QQ4aVByn2iKywDL0ijg7eIhV1syfybe2XR0cMNmG92iKCg8yzIIwijo7P8FQ04uAMEzSu6gVkzUBVtswXR+z33lBXw6AFP/jBn474bRn0KJNRe45SlAcMQ8/9zQZhPW7Z7CmvJFneMvUqPP0wEOYp4GWXs5Re7wnigtarfoVyQBtBmicjzj1RC7RoyD0nQQ+R6+QGHYPvfCAs9/clxSSm6w64+oEwEqTetHiz2RDFrnuW5G6d5XlK1/UYY/GNKqbTOUom1LX77KOjJZttRRxnnJ87RZ8PV28IU4lpNcsjB8Gpqoq71T0T361oW+cp5uTWDzDWmH25Gju9WZaDUE7171ABEzHCODnVzsvDZumSoevQvXv2qlYzdB1REoxQCfccQxhHGO1et1ie0rY1g/cf6XuNtTVVKzg7ddDWST6n2m25u92ChzRV2y1a61G+uWlLjBlY76qR73B28oJeD3y4/rFXxgKrEza7G4yHCmViRkBBFAcjVyQOI6x2ld6ZNybUximhbe8PXjiC66tbolhxfOT2kst3b4jFFozks5e/DMCXX/2Y2Txl67H+gZ2SqowwHEYYmRJePWxoefLE7ZVDf8vQ35N42HM+nXO/uSUJI+J5PF7D0Fskmsz/vsXRjLJqRm+jLH7E6zd/yN/6tb9HZP8GAP/L//y/8o//yX9FOVQM3aGrueTruy9oOreXTWcnVGXNYDuS5GACX6Jkw/OLT4kjD+fVLWYYSLwS6ZsvfkAkevL8CZXvYiIEKlQIYdneOUhMXBQgp6MvYj8YkIq20bx57QyeF8duDzmYAwslCWTAZrOi6VzXr+9mIAWiGxwRFwhUxHazH2HX+z6k05oiCen8nAc65u76PUmqCCNXbRYyoCl3BPmBC5Py7u17rKiYTx1MLwpzVru1a8r5UmjTtFTtisZ34WbTJftyTRBH4/MQILi5e8NkVjxYftgeISyt/33T5JgwDCjvN6j5Z/77DKYO6a14MGrXKwKzIooctCaPFtzv96MhL7guzqOjDCUkyqMozAC262DkQDWOnzfcc+X3vHCpHHKl3iF8FzWOIqphQ+07UCrOCYygj0H6bkgsI1ZsuVvdkk5cJ1Bqi5U9gb9uEc7Z7N+hxIbAQ7gC9YIki9mstvzW3/m7APyLf/G/8Wd/8RW/9IsOljR0O/Ihx0g1+tURwfJoznbbMSqxmwGFpedBca8qO5Q12J9RAuz1gAwUhe/WB0GEFepBSS5wkuP6sH4BBagoGtURsZKjoxOwYuSGSQndMLgOlP++yiOADl0HKaVTZ4wjBt/hC9KIyFvYaL8PSmEZMEjfKWv7niSKGIZh7EYdDG+VUgx+XUeRUyYcVRWxaGvda32ntas6p0gn5Wi5o7UmVAp18PbKczrd0HUPvpNFHCNlQBhEBB6xMHSt8yT1a1qGAdZqZBCOnUAhFFVZs9/txo6etc56YVTDb1vCSHF88ZzUBwCOM9jQViWDP5+6tiTLstH0Os0mbg33ZkQo7HY7giDg6OiIylvS3JTOU+pAbfmVX/8+6/sNbdty9cHFT33fcn58StOteProO+73KE2k4hGF8+TJkq+//po0SPnmtfM8zacv+N7fe8m//P0/5Md/8iMAfvlXn/HqxSuOPMXh+vI1URCyXEYE8cGCoybOEnJVsPQwuXL7mrNHC2I/v0jFYnlGkiXowMV99yzQiWSRew7dRUGnQrq6I/D+f9d31zz/5Ncooo53X7vrzGY5u90ly8BrEsgJ4QIuf9ogrNvPf/zjH/Kdb/8CafqU1185Zb5ZOkO3luncrdc0OqGstuy2twReSdL2MecXJ3z99iej2XmSJLASo6rqo0cnbFcd33r1OZn3Sfx+/h3u7xtev/sS5c/k7377KV/+5C2vPnPP/668oi4tL549HSkjd/fvWZ6lKJmhblyct/cc279u/FwkV1EYkHkerooL4sUEawRNfXhgG168OEaamHnuDuOqWnN2/oTH526RXL6/5unZBVEUYa0n4getw+w2cHbuFkEQSpq2xUrX0pvPJF1TEAUx8dRviKJGyYjd9p4nvu16sphQlRtyT9Ju25YgEjxaTkeI03p1S5pOUXLg5ScPBq9ShLTepDHPIpLA8aaSyAVS1momxTOCMBkDyrJa0Q+GTq/9+1JOj8+oqobOSyNf3ayxpiIQEXHsYVW9oGlumPse7sX5KVdXV/zq917xl3/uoEKvvvMJRTHl8sotjnQScXWZMi1e8pOfON5Atf/A1eUHZrM5k5l7qFarjufPnyE9XGS71gxaUTZvWHrYjjYVcZiz2e354q/c9x0ff4s//JMfcHLubvIkP2VfvycKJ8jEmy0LgRIWRIsIPVlWlixPFhyU7pdHBVJlhGHI7Y2bz6OjBSqwxFHMYuE23K9e/xgIEV6a/fLyHWWjSNOU1dpBQTaiwgyCIISjI/e+3XbNrm5G74/drvQmfw+HXlbkNO3AYnHC4KFR+1VEFAt23imctUHTgo1pcMl5aELCKKYVkqv2IJIAQWkJ/Sbd4PwtkmRL6oPXqmzduWklVnvfl+ot8/kRlfeqkIGg1w0CxWrl1svZ2QXFRNI0FaH3LpNKsN3vx6QwjAX7ckseHyO93H3dVggUKlBEPngU0hlRJz4gur1bjWv++vbBn2q1WhEEYDwst6oqoiB0CROQ4Z6btm05YE2ariWJC8RofiwRDKxXd2NyFwYZUdwQqJC+PQhTdESx442A849q25626+h7dx/mc5f43V5fkaZuM6+bjrbbjh4XUhYIQpp2z+Wl+y1DX1OkBfPJ6SjzKgNLnk5p/b3SpmPQgkm+HCFyQSh5/+GG87NHGM8t2K4HlNbE3mOq3F7z4vFLhqHjy6+/BODxk4y+r1gsMjiYapOCrbDBAW4jGKTAoml9VWQxPSOQgsBqSg/Lffb4lOnkiKfnT91S3FwRCBeA1N44ssjmgCaeLoh8sKHyACFmhKHbt253G4IoIptM2K68KIMeKGZOCn46d/tpEigePylJE5eY/t7v/Uv+7m/+J7y4+Pv87u/+DgD/+X/xO+STgKtvWlLlPj8QCW+++REItzYWiwuqbu0I9B6ufHt9xbt3PyWKv4V95ua41TWBDdBectgcYKX6eIRUBxjaukNVe5rOPe/5/JjF6RN6z00xRiOTiGbXMDQuEbX6sfdk88GI1igRsNntQbo5SKIXdMYSCDkKFARxQb3ZYGJ3NvXhjFkeghkof0Y+eb+/JqRFKZfA3t/fk4WKpjkEoVvKasdsnlB4rq3WmkC66z0E2lW9oelWXHi+ShAEaDTWhBgv9GF7y765w5iOPHfnzO12Nb4eoO8Ml5dfkWxblp+5QljX1zTDBtH3zKULLC7ffUEoviT2HGEbt0wmE0rb0Xlvr0BIbq7+ikdHlqrz5t9KI6yi9AbXWVqzvrtldlKiPf+oKfduL1GSxOs5DHXP0WnM4Nem1pAqkGmK6Q567ZYo6Xn+/BRtXYFA9wKTBg5nBwSqRzea5bJABm5PSKUCJIEK2O/dPf2H/+E/5J//D/+cH/7oNQCff/uUxTTnfnUFPjBtxEDbVsznxzQejm2UhLBg6RONwVjSWcF+vyXOYr/OBpIkJJSC1kPLqrYCoYiyB85GKBRx+mCTkc0m7nM89HzoNZGKxrMJIE0LrAiR8DOy7k7w4rA/6/4guw9dcLC4CLDaoK3BeKiiFIo0FIT++w6J1c+aSR/4VM6D9MHLzCVyZlxbDir48L4oCpDSScGPXl+4QvfoCyUlJ8kMMISBHD8rUoqu69CeEGNjhbXxaFmRZzFl3TK0HZX30OpbkGFCudtTeHsdpRRxkI3J1uzohLZ3HmXG37/ZckYoBZNpNnrBSSkoq91DYioAKRzn2Z/3i5OWqtxQ1vdk3kMyFQG39+/RpXvWl0vFYDRJkvAL33fPbd9pFvMJdbMcJfzTWFHkU85PXDx1v7piMfuco0TSfN/RCYokpOx7futvf86tNxJ//K0XXF9fI3zC8L3v/AbffPWW4+NjFke+EPXuPUZLiixh7214jrIJ5duK2i+9p0/OsKbh6psVgy9SxIXi9OwR1ifsqw8V2g7QBkyWLnbK04zyzTuWzyb0pfuw+77m/Ol3effGQbPlJOGLr75Eisfc138OONua9N0JL18cjdD9MCoQ8guUcEXE95dvmWQnhNE9xdQL4QQRr/+q4umrJW++cnFs1w0Ia8j9PpVlkq7KuLt7x9NHns+12XJ6PKHre4LHHvIbCL796jvYzusUIDhaJoQywPqmRRxmzJapp/C4sw7xKfBn/HXj5yK5CoOAb790ajPbrWGzf8dsMePu2i+A8zmahkQFnCzdoY69IE8z2sbdkNN5wWRyRhhYd+OBd+8GUAXT+Yw49uIGVYkK9KjwFZMymyesN9cMB1NYNaNu9hwvJ5wcuxu1ut7z/MURhVd66Y2lau8xWnB25jb3q2tNNntMWZYUE18BaiQf3q9G0704jvnqyw9M53PWW6/PT8xqdYMKBxZH7iCMkgzNQILntOiQplvRtXtevHRBY5wa5scXGDmw90pnZ6ePqJv92FHYbwd+9Vd+kavrb/jO5+63xJlA0PHrv/kLANzcXnFxesLtVcXf+U3nSH1985Z9q3n6YsH1Bxcsf/vzOW27pfJkyDDpiJlCNKP3ROOsSLm6/MD8pKfT7hB6e/WHPH05Z33vNoKTs4qTxwt+8CffMPdE9UePLuiahF6vac1rAD779Hs0lWBSuHu+uq9YHuUUU8V87j77ePGI65uviZKY9drN53Q6IVApf/EXX41rLAkTPrx/9zObreD6bsdsHlHX7tD5cHVJHGWkE7chhgIYWk5Pl2NFMFKWfd1S7wbazneqggmhTOjqg4pTQDPUZJklaL1YhWwIk5bQJgyl/6zwiHKoqLxgSD/ckkULhq6m9X4jQ+eIyyoIGDwfBmuxRoz46jzNGIYOrS1x5FXqBqeEFKhkrCrmkwhETtMcuFqa45Nz+k7jqRp0XcfQWwLcgQjQaU1ZV+Mh5JTEnFN9Zw+eUpCkGWGgMAdy7OKYru8R9sHbRBsLNiAvPL9But9yMMGNE0XTuEQpUO41Qy+Y5Gfc3+0xHkM/m8SU+zWd927pWsPJ8RP25R23K7cOdj6okzLk2qtnhnGMtYLKuxEr2WAM3N3tKbyx4fHxhEhNubty5rsAVb1Ht5Zd6d538eiYdtvT6X5MAr/68hsWy4TF9IT7ledYsSVK07Gi+e1Pn7K/3xFFMb/2N38VgLarabqK8/NTbm5cx7nvLUkxofXBpFKC+fExm82K2yv3+06OzoiUhLgmDA6JsKavy5GbloaS5fKYL7/8K45mLuGK45i23yKMRDduf5GRQmjF+SP3mnboKeuSvuoYWncNUT5FYYkl1Hu3FsphRV0dsd65Cup/9Nv/hF/9/n/Mf/ff/7cEkVt33/3O33QoAhVjfPJYlWt2m/c88glCmixpmkuGAUJzMNncImTDrHhOkHgho5Xzhbpfu2Ty6zc/YZb3nM3no4iQ1R0gafcdd2t3sJ9ePEPLaFTKTNIEbeH25hLpuUtpdooQdjQfV4nrJt7e3BPnh87qKb2VWGNHjx6rLfv9Nfdrl5wfPfkW1gx0QzuaXtZ1jTE7VByOvD1he4y12EO3KYqJE9iVDfOpO8ANmrZtSOKQ1Ctz/uT1X2Bky3TqOJ5pmnO3vSGJ1MgDqfclX739Cz7/7Htoz0G01tI0HYF/rtu2Z+iu0bNiLMCs6/cQKWIZs7k/CC5EPHr2CO33JCF6+r4kUBF56vbTWux49WoGbOhWnneTdMRBgPEBe72vWCwCvv/Lr/iDf+U7eokhCi29ibA+gF3d3TJdXjI5/5sANG1EIDStGVC+CFOuVrz45JT5XPD6te/gLUK63hD5OVCBwbQ9v/lLj9kOrhj309dTTCiJbDcK7ezKPXlu6L34yNevtxz9DUlha7rygJDIyNKMSHQs3PHA5e0Vszzm1IsWXH3YkBcJw3A7ejPeXn99aIoAACAASURBVJc8f/4UKxqarRdmUhaDHjtQg9FUVcWk8B+M46IlmeMbAeRFTF3XzqzVbzj7/R6lwn8tYQmkQsoHtcLDa41tHxIuLbxJshy7RFq4jlTb+uKR71D97IiiiK7rRs8vNy8FQljM8H9/nxw5V1a45CYMGTtXVhvH1fqZa9dG/F/svcezZFt23vfbx7v0mdfXLW+eRRs0utkAumEIghGUJhIZIUZootBEGkj6ayRNNBJDJBhSQNSAZEgQxQhKEFyju9H9Xj/TVa/M9Td95vFWg73zVCNCwIiDHtQeVd3Im/eYtdde5lvfh2FY7YxXVUNtagS9bjvPaNsmrmb8tWQyyzLKpm5J0bIsI0liDsdjUsWUm2UJaZG3JA22KZh0HHq9Hm5fFnhM08RQIsE7pINuWMRx3M54lYUJmkZZlnRVZ65B6jRlWUSq4DmiqvGcqNUSPb94xXxxi6F16aoZcpqKs4tLojBjoIzq6PCQmpqskAlDVercu3PCLIqo1YNJ0g1fvTjnw4+ecueejOmen6344OkHfPVazhZFWcqvfPNXubm55fOfyyKeabn0umMMQ+POqSLo2iSUUc7BsYw7DeGyXp/z8urnXF3tCkMBNxcFo/6uGdDleNynTEpUrZNJb8RIC3n96pzAk/Faki95/fkLNG8377xiGHhYjs+wlMmO4wxp8iG6FmJZM3XPFrezc1DxY9c7ZX57Ra9vcHklSWgmByZ37/1dkjjEUVqinmODNqBjypjSoARDcPjwEEMxXOfxljiPOD4etLPZQmQIrabI5Nn09Oi7eJbBVy9et4LovYMjsiJhPgsJuh1lC/yt65ciudKEhu+/Fa9zA49Oz20TBM83idItdRGiCQVR0Ud0PLsdnrcsD8+VlY9cqXIfDMe4QYfZbEapMvp7x3fZbDYkqqJo6tLRjHoHWLbcUEJk1HWHPK/ageTjBwdswiu+vPhM/p7lEecbnjz+kPWO9cd1eHn2nHt3H9IfyOv82afPcTwLx1PVnybD62eg5xiO/D3TFDhViWGlNKoV6/pj+pMBQlWWzs9u+eDjJ+iiIVIVk48PHiIMnTCs6XQD9awMuuKQKFHsZHRZRytsz+dEVVUW61fooqOqtOC7Y9abGfceBgx6qjP3F2sO77kEPY3bGxnw7R0Omc0iBkNpqLUIca2ApDRJFLW9rpdYgcX+4ZBoo9hY8jVpmmCpSmi3Kzg5PmHUfUB/KH9mewlFYnF9E/P9739fvZshNzc3uAru1w36rNZTrs+LVq18N+QbJ1scV6jnGUDt8+vf/Q4A09kVUFE2M/JsByvt4Dod4mQN6jkcnAiiVdrCyRy7Yu/RIZZls1y+ddyHexNmyxWuoxywnqFrHp6itbacgjzzyfKSQAUWjWnw+lXEZDDk9I5iFNo0pNUcRx0ceuGDqLFNwaAvN7AmBINBjyKHWDnu9SpGNyscV1XzswRNWNi2wA/kc4mjENO0sYwOmhLjDsNQygcoGKvQNIpcUDcFO1cgq5MG69W2TYA0TSOK45Ysw3V9qjIjTkJcBYmrGkER5QwHA3wFjyqbEtPUmC1kZ6DrB6SVhLHsDp3AC2iajChWAsyZjm0FFGlDR7HxmLrg9fkLOt6gZZu6nV7h+x22qhNpWRbL1RRL1/BcRVGvktE0KugpKKauN2y2Fa77lu0uTzOGvbcd6CzU0D0PSAnX0oMORyPSrCJw1ODxckt/0OPV60scBW0dDQ8psjXz6zmVYl8c9HuUeY1Q3a260OkEAdvtlkAdCr5tsVw3UAi63o5uu8YwNDRNCaTqNlVV0He6DO8P1TveUBQ5lgW1Ymjyg4AkT5gqdsJer8fNxZaj/bsMOjIYj+OUrjdhs1231x7HIb7jMrtSB9w2w0QnCLr0xvJ5OqZJXWVojk+soLrYPmFY8r3f+E8AuHPwbc4vXnP25or+QF770dEh26jEMG10lcisFhsWy+fce/IfAFDUEmGgWTWmUCxRNxd4HYfR+ISt6jI0wsIwKzKV3F0vzrHsCZbrEKr3XeUJttNndr1kurwE4MmH30Xoelsd3W63dHt9bq9egapS93qHhPEWXVc2XWU0RUZehOgt+Y9LmsZ4loGmaLqbsmETXb6twguHJNqgaSBUUriaTUHbEmdNK859uzonqUo8RXMaxilRNkXoNo67g8RHCEOnrktMFVxtwyWaWdLpSTvIckUbn8U4vvzZ1cVrtsktg/Hfa4srTV1T1yWaur9ttEIXBmbQk0Q+yGq66ftQlWxUN/Tkjsvk8ID1XImrZwmOqZFVGo2ioz46GPDwocftQsBXct/qTU1VlTuVAJbzkF/5zQO8vs5aFZjGrsZ2u8E0eziKqGm1OOPx0z7TmfTVrlOCq+PYRkvAkixnDPoGmyTDU525StehSbFUYSHXbdIwxBQVWaYSi8ZCkKFpAl114tfzlF/92mNOTuXw+h/8s3/L7HDEP/jt3+Pzn/wAAN92SdIK4QiiQvqq7333KXmUcf5KMmA+mtzl5evXPDkYsl7Lzxw9OODBvSPOL16wTmWi/97jp6zmSzaKhGbgdjH8fnseA1TpGsPov4UANhW+bVEYegvBzbOGpkolXFn59LIs0UTdJtme60pInvkW3ldXjWLhK1s/lWWZomeXvtuyJNTcsezWfvJUJmh101DuyKmSmKouWvY+gEY3Jf28ghhWNGgIxYooP7Mj1Ghp5amonAy9eEuWEQQBruXieR5+IO9ZiAbLNPiFnAw38MjzHCHeyvLYto3jWr9AsCHp8KNd17GpWK/nksJdSc0URYahlczmq/ZdpElOVuQtIRq6RpLKAuTuWemmJbt3Vd0SWlR6hXA7mJq0sUHnPfzBUzRshOour7dXmGWMr/UoVEPg/GaO6xktQZFpBHz54jleZ4yjmKt3BZ4/+bMfsFjJ7zoYjdC1AYdq/OT25oJPbz4lCAIent6Tf28Tsprd8OTRQ378lzIJ82yNyV6fn7yQiKXtStpM1fi8/1Tuh3QZY5sGjSKvsH0dtIRCh1zJ+UzTiv3Dhg+PvolW787RS7brBnuwI8ZJOZwMiKuQxUY2JPp+AJ6Ba+zTO5bx/mY7ZXj6a9wqseWDyTGmY5Kl4EeyIPmzT35E4P2cr3/9+0zGMj69vrhhMjphMZX7bLGo2N+7y/XFAt2Q55phNiTzHKdrcf+ehBNfXP2IzSLj7h0JxS4Tg8HglF5vg2GpsYN8i4aBaXjoqjO+za7429Y7Qot36916t96td+vderferXfr3Xq33q1/D+uXonMlNMCSmXuSbhjudynqkq7ix6+bGr1u8DynbdcKUmq9QnNkxW2xWREmKwK/h27JqkpR1MTrJX63IzUpgLJMuH9/vxUMNd0O19fXRPGKYUdW/cKwwrK65Pmqnf8phEacV9iKDKA/HFHcQpSXXN/IQbw03uA6Ope3L1isZQXa7Zj4gcmVgiVZloXfa0Bbc3gih4jD7Yr9o0MCf0Shqq+mXRJHNaaiXf2147sYds16uaFSld0kK9gulhhan0hVAptSpyxijk+UkBsdzi5fMOgftHCQg8kzbmczXp5LKM/+wQEPjx6wWcasNrI68eTD+4RpyXK55Gtfk/CMWBFp9Aa7joYHNEzcewhFY73aXGG7Hao6oUolXOKjD57w8xc/5uk9WVWp6ow8q/DcpqXgN00T1zY4PT6lVlSlq3SDZ00o61X7LPftY968OWvphYsspCgMsjSnUG16Q3fxPZ9MwUzu3jtiMUv47ncOiRP5mensktjZ4AYHLRFGLRboWo6maFB1C9zAZrtJ6KhO0sSZML0N6fZFS2QQhRnUGffvqQpqs0E0XYpc48XFKwA+vNvnv/5PP+Avf/hTYiEr15+9mnPQ73F6KO3uepmwnksoUV+14Mu84OjogOVyzXCsRAS9HM8fcTPdzT1N8X0PDb+tMrquoK4FQqtxVLdlOr8lCMCydxPZcvDX83VWC/ludc3CtmzGw4C50lOybZOygK2a2eh1K0yjwba9t1pNdsA6SdkstswVrt3xXSzHJlOQiggp3LpZbdg/lFUqGoPtaoWtrjGOUraLNXt7ewhFHlHWmdIkaeh1Zde0zGqSUCfwlH5FtEAXkJBhKTFZx3OAFcN+QKUq7OvtlvFwn6KUdtf195hVM46O+m1V9fLykiy1ybOaw8OdHkdBVWZUqgNeVyXb1YZRZ0yoZt+i7RRTtwh8m2ZX3dpu6PV6WI2ilZ+FuI7Fo/uP2Iayuj0eD3Hsoaweq5GSyXBMUtSYloKZNDo1GpbbaSm5m2JF15ng+WZLSe+4Jp7VZaggY5vwhoO9PZpaJ1faaTQ621WKZZl0fAXPJKXb75Mq0WvPstjGEck2p9dT0FZqyqKmFCWDnvz+VT7j9//+f8ReXwqynl18waA74OdffcrduxLq7Tp9ZstLqqpi0JO+6+L8NVW1ZW+kZAmSLSAwhEaq5njm6zf0el0azSBVsCOhoEwrBY0cTiwGBwfUoqQpdmK1grKpWa1vOL0vK6S+O6Qq3moBmWqm5Hb2mr0DuR/9oAcULRW7oCIKU+Jkyr6Co2umTVPlGKaHULDc9XrFanvJnTsS4qhrFk2Tk1cFtoKezKe3zNdXHIyftRIKhq6TKx0t+T1Lrm+fc7g3wfekna/SKZruYhg1qYKobTaX2LbJYCzvrahKTMOhyqNWZFc0Ob//D36LYXDCJtp1hTM0vX475E/GBx/ZaOwR59LOtMbEwkAz9VYOYj5dkJQrup68P8vQqCrZBc6Vvxn3LU7ujvm///w5gt3Z01A0dTt/5IU2UfY5P/ixSZZK24jCLU1TIsyM83P5Tr/3m+/xne+c8k/+Fwnr7gQaRZUj8hqhUAVCNJLCWeuhG9J/a0LHNB1S1VlK6oSDkYXFkMVWdcG6uaRu120cdV031zNOemtOj2Wl/vu/833+5f/6b3j/mw94//tSI+zFJz9isDfifHGJpzQW3VGf+eoL+nvy//6g5ln/mE7Q5/ZWniGe7aAZEU+f3mMykD7u7p0jfv7zEOdQ2l2SZAhTYJhvRYR/5zvvk6YFQVf6sourazbbAtsN0C153bklyNIKTbPbDo3jOArCJm1TQuUKNuushdJJ29YpqhJ2gsjIfaUbO32sCsfWMQxaMglNM1son74Tc7YshGhagos8zajKml/g2CArcqoio9G0lqBr0OvhOJYkIVDvU7ATcZb7tExzkkKQRCmblZpvGvbIrbT1gbajk2cJTdOwWe80OivStCDLfnEW7e3zARBaLeGMVFhq7lPTBbre0DRvdVdN02TU67I/kudAnGyk7l2UUu70zgybRV2Ql1VLalUbFmm+wVOzvnmZYTsGZZmhqY5XJzjB1U3V2d11xmV3uRPI/ZGmGToCx9Eo1dz+zfWSOo8wTI2DYxmXBB7E2SV1Kc9VxzSZrzbYts1wOFbX6dHzxySblIdqtGS2vCbJCyxbzb5OanTL5ODkMXtjaa9FVVOUMbU6G9ZhThAYuG5Joezu6eMHiGXF5y9+jqUQL9ggXIuTu/JvmVqELkqMKGP/vpzxvJrdkiQLzMZviaiOhofEcczJgbymwNHp9Z5xc3vN/Q9O1N97xk+/+Cv+9E//FZ5Cve1NDmkKD1NXOolOxXY9Bc3i4WMJs/78s58TxymW3/BadZyvrs7xzVM8pTEb57ecXX+OZVmsVTw8mAy5vDmjNwh480b6pcePnwL/mr9p/VIkV43WYCvIWh3nzFYhju9weStxp9PpDZPDMTZm2zJ2vJzbm9t20FFUDpqRsYym7XB+kRYMBmPW8ZogkEa4Wa2I0+1brHG6ASPGtDPCRBqv43tst3OEVdLpS3jG2eWUKjNalp0kLtkbPWQ+vyRXTFIHR3toWiO1etSLoqnIi4i9PWlgjagxjDF1rbVDk48ePuHm9gLDLJgppjzb6uL5ZotRPr+4pjv0WS1TLHV/cT1lsGczu7lEU07DCmy0suaNgvLVvMGwbK4Wly30MlouMb0Kc8cYVLvcTlOqOidXCRRJRZE3eL5FpYLcvIg4PBqTxjscd8pwOKAqYrYbCf3aPx4Qbbvk2Tn7+wr/v91wPH6Kodram1WPLL2hO9ZIE2m8Xf8BcbIkDFPqRgmbuodc3LzC7+4OCosizzBNu4U9WKZLkicUZU5RSMf95MFHvD77nKWat+jEY1xrX8JDS2kvi2XEnftdfPceX3z+XH6Xq5GkBaUKNCzLoA5jFss144l8nxdXl4gmoDfsoHRiaXApq5RQ6Q/1ggkCHU2rOd2XB3Z/UFAaOf/4P/v7/ME/l0QftgYn905aEWrIqGoTTc+4un4lbdHs88mnP0GImq4vAy45IJ6x3ch78ZwRRV6R57OWac3QA6JsBWbVzu2MJ33KKiJXA/S65uH5Brow0HeCvZqNYwY4jsO8misT1uj43fbQs02HOFlSRwXjA5kE3t7c0ut2KNOKZMeQpnksF7e4SnOlLmvqMmc8GLaHZRQmbLcRDx9InY1tOifPE1aLNZZiWzBMjf7ghDhMyRWcV2i5ZEhUzXfPt/FcmyzRMdWwdaygqkWxJVezmYeTU1bhtCWOMB2H/X2fIr/GUVjtk6M90mzL0dFhO+PluC69bsPVlbSpySSgLHOaxsJXWG1/0MMUFdcXK3o9aS+dfY31NmY0kIWF3l2fLE5YTBccHirh7abG0gRux2lhh02t01RpK5LuWTplNcUyHJqyp2yjR900xFGE76pZ1KrBNDQVOIFjK/hrHJKk8pkLTMo6pmtP2K5kIrNZ14i6ager/cDFMmuipsBRhYy6ySnqBM/yiZQW3d/5tX9M173Lq7MvpY2NTnj51WfEUcZvff/3AciLtfR1BuwIIJN4xt3jR+0cUVE0oCUIhszn0nfdzl9w5/jbaIZFrWBrTVNTlT7LW1msyqsNwjJpBAjFsFU2NWmVMN+c4yth8V5vRJqEaEqbqqIh3W4oqgTNlWeD0E0pkqqSK0szicKEJF1gGTKRcRyHui6Iom2rlRiHIVG2pFRzBElWYlChaXqrEZjGIUm6YbJ3p2WcrPIC3fAQYpeIr6mJyLNhq29UUUEtRVYbNfs2X53R6ZQYinwkKxOEaNCE3Wogvnr9nKbzir33fouaHTNmhmk1LRRrGy747ve/yeufeJxF6p4RrKdTtCAgieWM7PBwRZaJttAXbxIaGkVeoIhbohVf/PyaqppQCfmuRFmgCYtEsRpqouH9j/r8+Z9Cpeabi6KgqXIKbUWq/JRlJ3zyk5/RlKrYud3Q70ohrFwltEm6ZW8y4MsLHVP5LscoqXUNTbEVisjErGbkwqO0pZ/SUh3TqGiqiqrazZQlfPD+HpdvZDFp2Ps7fPjNr/hv/7v/jf/iv/w2AHt3hviWQ+N2mOzLvVxnDftHA+Y30qgrI2Yy2qfr7xGoItCwZzGfTdmsQg7U75VlyaDXwdB3YsQVRZO3SShAnVxw/+gepbL7tR1T5Q2WW6G3HxN0vAFxEraMaVWVEgQWhSok9Icdqqri4HDcJgxCSJKIOErZ6R0LISiqimInGKyIMXb/BhB1g2malGWJ1RLtoFgl1cxn4FOWNQKNHTCqLiuapqFq6rb4V9c1m9WStSoslFVBXZpo+tuEiEajqmqaX2BI1HUNU7dxFHzasgw6XY+6rtsEyjBtOpago2WUCvJX15LRclc4SeOUpqkoy5JGj9Q15IhazePXO8i/Tlo47R7VsTFMjV5v0MZmRZkwPjghjLftuEkT1ZSVSZKo0QjW1JWJLkaUtYqx9JpKA0PPQcEHq6rANl0MXTqu0UhDCB0n0BGG8vuilELElYatiNmiPCZcLbiaqQKScNi/9wGOY3G9VpDGpqERDVkRczJW+k2DY+qywTcU6c0sw+zImbrPfvoKgMn+McKvWSmdzUH/kP2jUz779HOevCehdNNFAnFMb5IRKsi2Y3uYdp/bS3m/X744xw0G3B0PKZV9uq6N0Apc/+0zN7wQV7dbqOt6cw6kmEZNFctztSFl1LmD9+SIbSTf3/5en+0iRdTSbzx86HJ5eYnA4GefyLkzzzUYjx28wCNLpW18+1d/g6urK6YzCZUs6y1FYWEaLucXigTI1bhz95if/vQTajUj//rNC/629cuRXNUNKyX46vmm/P/qhv1D6Yw6wT2iMEWYDZutDGCX24Jed0imuhxpklE0tWSyUh2FwO+z3oYk2YLR8fsAWKWPE2itQOrt/BJN0xjv3+fqUh7qhgdOz2e7ibmeyetKixq7J7CUEfa6fa6ntwT+gI7qsJkW2MaIOE7ZxjJBuP/gDq9ebdoK+KtXrwgGOUXi0VeVrB/+7N8xHh0xu11x9748oL96cUFYmJiNqvr1a1bhGQkZmtlXDy5hsUpx/B4VqjOn5yw3KwJfUZVqJnGYUNV5KxBcNBpdb4CJEvArr1lNc4TQMRXrYOAP0A2dIk/beY5R18IyhhhdVYWPZvKQEhrHR/L5hukc012wvJniKvKPvGwYDB1q1cnaGzh0vQmLxZRGdeGyMKbrDlgnXyDU3NBi9QWBN2CpsP/9scbsNiFLIlYqaqrLCs+3GAzG/PTTnyhbKOn1fVLF/1BmGUv9K64uZ3gqOZ/PQjbrDM+LW9rafJpgCodOoAbczT3CaM2wb5OqmY+uM2SxmaKLAZuFcvhGgW14LJTzIe1QVmr+Q8hnvFre4w//Rcz/8D/+EbpioOoNDwnDmoXqGtVljaFZNLlOjnSadRHSCQZs4zlbJWQshEWWlK1AYphsyXITTFiHKqmuHbpeQ1V7hOFOcDGn4/dIEvk8NRHi2mPyAhLFcWG7BXWzYr7JsZSI7+K25PGTA54+kt2JL55/ys10zWBisdyobogoKcqIIhPohrQhVzfRrMO2S+TYJv2+SVnoBDsK3oM+i5uEVAl69gYWcaRx/8ExrqdmIrYhUVliOg2r6VzZ0AF1XXMzlYlqt3fAcNhnNdcYqLrGQtGYa7WN0OUzFtoGSxTQ0u9e0u16lKVLpZXqeV7R63XoDPtslfhvXRbMl5c8uCcJX1y/An3FZmFSuvJdu4ZDEibcf3QHU81zOKaHoX+Fr4Ll4b5AFB0EHkJVfw2tz2o7R7M0DOWXbMtne/MVg95uvqKiKkzy3MNUzIOWsGiaAseY0KigWrdCqH2aSon11gWr2Q2i0XA7OyRAwnoeshW9lvji+GSPKEmp2TED6ni+hecdoOI28gTqUjCNpnz9Y5k4nR4+5bOvLhj3VEexijh78yXf/e4+dx5IcoxtDFnToJcFZaUILdIpzuAAXxELpXlBU1s0dsNyLgs1e+MJTjAmylIKZeuWUVMiWjs/OXpC17xLXTYUSqDUsAVNrXN1+5r3RrLb4rgaeWaBtkv8LRbbkFybYloyQGjQKGsQaq63qjS20QbTy2kqRRTTCJK8QGgNQqEYpstPSPIQTYlCG42G0HOqpqRUwUCppYyPxjSaQ6XtKM4Flt60TJmr7SsM22Lv8H1KNYOhVya6IdB0k3NVwNKcFC84gh3ZUbbCcwdkdYlyO1yuXzHUXWzTJVFJhOM1NJVLXSvGzeqWs5cZN9Epujo/irqkooFUoMlXw+OHeyyXA+Y3isBDN9CskDrRsVQANNnr4PYcbPcGK1VFOwRZmQA7MqcldTEhjkoasVX24tPUFXWo05TSCe0fj7l6I1ir4ojjGyRJhtMNyJWY89H+hMGow3jrs10pdlkDaDQsNYS+Xs9577cfEych2qVKGhyLPK0otYY6VgGeptHd11ik8sxczWd879d+m7Ov/jn/17+Qdvaf/zePcU0LazXn4gsZhHUGHY72Bky/ugBgdPiYsiy4mZ0Tp/I6a+2QN1dnHO8/wlNEBuv1ku5B722QHfYQeULN27ml8V6HV599id+X+/je3hPK3iuuFitcxZhWJTGPDo5xgjGWGtf62c/OsR2HjkKXnL+ZMjr5mLObTynU/dkdgdMYON0AUxWGNCNB103yQhWdm5K0kILdcbYrbARUzQZd71DViizI6OK7DkIlP7VuYpkaTVEStR32mqrUEIWBritkha9hmm/nvnXhgZZRlQ1JvPt7gqoq0MRb4eskkde5o3nP8phiEUMtWmp7y3Ko6pqKFF0x3pZljS60dlZLdtyERDSpGSih+biuS5S+7XBXVcV8vW27y5aeYJomq23YkqSZpontuvS7HQ7UuWbbAkTRIq1oStI0pi5q1ptd0mpQZBpFURFu5bUXhUmal2SpTPRn64IsS0hyvWX5BdB1+Td3wtBCCBy3Q60SMNs2WYQpIm7kDDpgmQJdQOMG/OgzaZ+uZ+MHDj8+e9n+30oMwjDk5ER2ifbuBXz++TmOIt2ImpznL17h+D5ffP4pAIYm8DuQxybLhdy3ewQIbcbt4lI9O0g2l2w6ezi29Kd1GjDu22yiFTTSPnVjQG8E2Vr6iAd3H5EkW9bbgr/84Z8BcHz3HqYdcLt8yVrJ5FSvdR7cu8/ZG4nIevVSEKUZlpuz2MhC0fk05eMP38MSFleKYC1ZW5i1QabITyaTO9xu3/D6/Dn9oUxCsyRnei4YuGMOxtLvvnl5yd+2fimSK03TKRVjku8N0TTwvX16CopV1VDXS+q8ZtiXLc7lekMW2zg7bZFwhWm5DId7rNSwvCMi4sICw+NKsXDFW4MA2opG2QjiaAaGSXckH+Rys6XbN/E7AUWuKiv1kkHXYq0MZ7lZkhaXHOzfp8zlhvJcm6vrS1zXZbinBsXTBMvu8snPpBH6nQ5Z5tJUJuFG0UGLPmG8JM1i3py9AsCyLdI0olBU3nZ5Qhp59Pou4VI+l+6gh6ZXsougEqUqrhC1gaHgE7PZLV2/x8HhCWkuE9NkW7OeLZnsyQy/qCxG/QkVCaYa4JsubtGNhmF3j0wdcq6YUJYwV4lwf2RQA13nkLXaCGUZYZg1h91Tso3c+EW+pYhXOKpFfhuneK5Nr++hKTjoejOjrvtUjYWuAqBt3GDZ61bbpXuobgAAIABJREFU6OrsnLr06PR7fPlCdpsCp4vrHjO9ifHtA2Ubl4Rh0DrNMM/RtJqmqqmVZtfeeMh6FRH4AkNVsx3HI0tpdZqm0xvG4x5FVaIjg6TFYsazR4/Jirili3G9gKALniVb+VmWM9kboWlwfiGD/+1mge8OSfMOrq+6RMJnG76iq/THFvMIQQyNBwpGVtYJeSzoBj02a2nXulkQxQvSVFGJ2gboEWVuYejyAN+Gc9APSLO01RKrm5DFdIWhYBDDyYA6s9ibBGxsq/17Quh0nC6O4kbuWBmb5ZQk3UEHK+7dHbHdRiQbuT9Oju5SVwVxHRKoLsp8do5Gh/0D6TTDaM5mYzEe3qM0ZaDoOgHjB0dsQxmg9LuHzKchw46PpzoKlAVVvqBsSh4/lHogonSo64ZcMRN+7aMPeP78OZtsQVdIyIGlfIPjBQQKShP4ehtcAozHJzSkpElIXSsinIN7WJrFfHqFr97VyfE+jx51iUIVEDUOnjNmeBeqUh0KDZS5R1hp5Ip85KOPe/zVj54y6EpbtHWHZbih3/OJVvLal9EUx9e4vrjEsZRelJvTDXroQulCGQ6ObUAjGI9lhy0MN+TllqoKMXWlucYhN/MX+L46vPSIYe9EQneaW2XDOaPJHvt7eywW0iekeYJlmRiq67fahGTZim5vTFmqDnS6wnYtju98i2gl98Pl9ZzewKZQ7ItNrVMUGSd3jlo4alVVaHqBob3V31ptLnj88GMKVTqv6pwGQdM0XN28knbXd7HMDrpZU+XS1jVd6rOFihzALSWsRlSCFlUlGsJoQ6PPCTrflCZUNRRlhtVSWQtupxfMF1Ms1YESlJIJU3WbSlGz2lyQ5RmeQj6kaUojCgwtaBnEwviW4egAx5LnhxCCoswkm6YKAqs6RTQWnaDPVtmQY9vUddVqf1V1TqfT4eT4AUWpAkXbpKprDMNis5bvr+N79LoH6KpA0BgOaZ5j6IJMdd1vrl7QD74GtUteSd9clBq27pCpruNmqfGTvzqnP7hHuaPkrnNsx2SzDblzR+6jKl8QbQS5gqy7Xoc0caROkSb9VBLF5KlNmZuYqlOdJwsc2ydVhE/9gYvn9phNX+EpqvmyrDB0kyROmEyk3/jGN36F//7f/Z8UxU4vL6Uya4Tlkiiyqu/8xgHb8IrF9Ih+TxYuSy2lLMu2Q1PWIZfXU9YrsMyj9p2auk1R51QqBmi0DZeLlPlSXpNu2cw3Md/7e7/LH/yTfybv+Z/a/Pr3fIa6wcmxCjLjksW05tkzaWNZmaJXGmWR0VXEG/HtOUNT4GQLEkVq05BTVLQdPc8b4Q/7bFSADWB4Bv6xRakryQrHpCw7HO0f4DvSlwz7KZEoiG9fM7Dk/n98YtPYMapeiOtusMUld0cnxLEM2Dt7XW42BVoZUStm3Nroslkv0VXXMU3Asl2aRifoqI6iYZLlFVVJ22WIs2uitUOj7q3QcnzLRdcaTNWVzsqCru9haqIlgaiKkiSk1Y+DOVlc0GCgq2qOYetUdYZoahrlgxyzQ5xs3nbAqDCMGkGDpopOBTGGY6HXVjsOQi0JLcpS3l+eSyKQ7TZCV6QzdV2TpplkiFMMt5pm/DVGQxq91STbdQtNS+dnn77Atm0aNXph6w2+16fXl/7cNCS6xfUMbFMmLUKrMOwCYVRYSufRAuy6ohbyvaRZRpqmjIRokSO6rpMXKVEY/TVWyCyxMB15TVFUoQmp6boDxtSYlFWDrttoO0KwJOHNbM52K79nqAWsb9bkecOLC5lw/dkPA6oabFP6DcsUNFXK3rjDViUtjq1TXWj0ex6GJZ/x5fwMTdOJI/leOl2PzeqG9KXNo8cKXZKvcbMDup0JoeqUV3XEJz+ecveOJJy4na4xLZ3ZbMFAEcXVxQrHcej7Lr5CuOQJXJ19jlAMxqPefQ72K65nLxl1JPLAsxbMzjf0O3scjlXxb7NECL9FFFzdvkZg0PPutYRkebRBYNPvjNCQfmrsG8Af8zetd4QW79a79W69W+/Wu/VuvVvv1rv1br1b/x7WL0Xnqmlqeoq2t2lqqlpjvpixCZWOkGUgDJ2KhCjeEVro1E1BpirXgXfEyfE9VqsVui6ro7ou6PcDdMOjUZW5omhoRM50Ltui/bGPbuwRpjmbWHYZ6sokXVT0uz5lLSuWti/IMikEB9AfDpjNMlaruJ3HEcJmvY2oapeKHUV8SBhn9EeycmYYOppess1XrZJ0p9+jqiomwWk7a+MHoAmDbk/+3mZd4HkQeAOyUFYwkihnvGcThV4rilo2JU39VvROo8SxNVynw+WFHODTa5tRv0sWyfJWnKwYDO5QaQapqmoOBiN0zYDaJOjJSkSWCDqBDansAjb1HfJixe3mFZqm5h2yHEqHg9EJNzeyGxGlEY5jUqjOgN8RZGnM5jYlUVoG4+EdNNOiN/C4mctq+mAYENh7+KZ8L1l8h0WxRGg1rpoXu3v8hEqf09QRQlWNnj5+QpyGLR3uwWGPeLNlb3zIeF8+p+V6wYOHI7qdPkkkf88LNObTJeOJrBoFrkNRxrjOA2xLVuFHw0O6g5Lt1sI4VMP5rxd4+gEHD2UHo6in5GVGngruHknSkqKaEy5WVGVKobSvLC8kEA6ZIkTodaSex8152NJYd/s9HGPMbDYnVZ24zSpjvrnmcP+Jso0Iz++jaW6rk+QZLjpbTFFh6tIWAmeE2fHoDxUUKynQPAvLq8mLnTivhuPqGLpNrvSwhoOG4eAei4387nCjUVYVmzSh15GV62FXo0q7nH15jrYnK4GPTt+nLGp0U+6F6VXB4HAfUTfs70m7Fk3K5dmMoCMrr67tcDDRyNIVkRLszaKMcfcOtbVlfivhtnv7E3qDLt2BrD55HriWzZ2jEa7S7Nk/6QI/ZdDrYRiqMjd06Lk1gVKbryuN+WLKwfgYQ+yo33WKPOHBScDJ3XsAfPX8Jb7nYal97bumpLbOYLuS1/Ts0VOKckN4/pI7R/K7nGbInaMtHU8+k5vZChOLcLOkqWz1XQVJGtHrBsSRouC1KmyrS6N8RNA9QVgpL19/gq2q9w0W62VKt2+zDeWe8dwBvZGLXistPGdDki3QNI1tKN9nWeu4fo/pYorrqkpgpbPdbnE9Tf09l645QKuHxLHsEhmWyeH4G/zB//wv+Yf/sYQv+x2P2eYa35A+MClCFuE5D+7toyF9Z9HEVHWGZ3fZRNI3v7l8ydc//u12lqGsC3TNIk1ybqYSyz4aN3jOiCxLEGKnsSQIwy1hrjSlmvvSVosEzZDfZVsdprMrPF+n40ukQ57VGIbedpss02U6P2c8HDHcdT6qTAoDqyqy47hsonNMz2kJJtAadE1X9NfynZ5dvkDTNMwH8jNVnVI1OoYwidWQ2cXVcw4ORzS1iXLV1PVOW0iRAa1nuEFBXZktrFjoUNdSLiRTULMwDDmeDKjVWVRUcgh+NNnjpz/5K/lejIa7x89I0hLlminTElez2IZylrm2XuP3HmG5NpWCVFU0iKKmrnOev5Joi8OTZ5hm81aXqQqpm4S80ECXPvZbv36HN6+mNASYSt/MqjQ0zSJSunMfvTfCcwY01TVCEb6IWoASZD49kT5hurhmtdZbHaoir9E0CfnfzWr0xjmrFWgEoLq7VAaW2SBUZ1o3UzBiyryPYe6gmCmmZiFsjVDBsS1nw2qbUgpp06JpSGsYBkP+7u/9BgD/xx/9iOPj7zD+Ws3NXL6bRfwVnvmAnmqG3kzfMOjvYdsGva7sTpTFkjIpaHSNvvJxF6stcRxz51Te73pd0BQ5hv0WjraYJ5wenzJbyWq+b63wJgOKWpCq+3UH9+mnPolhoSvkgWGZROtrGkVwdXpwl2W0xjUqugqRcfPyBd3AYD5dME/kLEoQ+GRNB0MJx5Z5TpV5cr7KUVIehcnN7TnjvWMMRXueNx621VCouT6BzipMEIVNbUk70xuHyzfXNE0i4X9IhIvAxFPoCNOwcT0bTWswd7PMTYmpm7i2h2ntKNVr+vUYS3VHNVMnz9NWgwsgyVI1Q1a2M6Qg2GxWVKqNo2kaeZHTHQRstvJnSZLiej2KomqFtpuKdmYZoCIhyzJ0obWzhWkpO0fr+QpbdY6azKPhjKJR4sdNg2m46JrdfkYzBGZT4rgWmrbTebQo66KFS+ZlQ1U26KJBV12qTPn/wPFauCKNhm07qLFIwjBkOOxgCINEdY6rssJ1fMI4ap9VXdd4jvsLQtE1vh/Q6xktL4Eopd6fpc4KwzCwbYeiKOh70s67gcdmtaVoEiz1rrq+T5IklBvpq4tKcOf0IS9evOT6Wl53Eka8+vISx/bbuGQ6fUU32OcNEnZnmjqe62Jg4SpClDzNMbwO9w8ftRqdYbhhNo148ljCvIOBxR//P3+Oqx0xuiPtbtB5xnaZEIU5x4fybBDHYz778iW9vrTFohaslxn9jk+h9PGSsGIyHLI37PFXP/4ZABNFe/83rV+K5ErXBZVq5VWlHGA0zAon2F1eg6Y1pJWgKBWbngU1KY4tA1rP3uPlqzMaEYGa+VjOEkwzo2k00kT+bG+/T5ZlOIqff7kIcTyXujKJdkO9TkJVwmxRoqnDwxDw5s0c21VK5PaKqtbQqZktpYPSRYBp16TZGqHa+etNTJwmDPpKPyqCvJxD3QWFW+52XOpKCu/5jgxIfMcibSrqSkJN8uoTAmOfLBSYSnOlqBJm1w6GOUJTsz1lUbA32cfUB+q6PbbhNctFiFADtGUBZZWSJjthTA+0iCrtsI3lAT4eP2R+u8DvrtnOFf7fikiWDbqC0aX5Et/rEVUxZSPhi7UwCaOKSvsJiYLEPHjva9zOX5EoeF/QC4jCFXVT0lHzcbbu4hguliaoFaNYY1RYwJ4v76W2Ag4mpyyiMw4Um16v75IWOhouh8/U3FnmotmXmIY0/iyPODk54fT0A+arl8qiBJbRg8amG8jn8Obm3+I6e1DKOY3TOwOieMl2G+GqYePJpM+rl1dMJh1eK8G+Rw8PqHILhPzuIjVxbR87ALsrW8izdU24XPDek6ecXSsoRKkRL4YEA/leXK9P1awYjnw0tTVF2ePhBz0MvaETKJhVs892O2E1V2LOWsl4MMJzHZpGsRUVXT76cI+zszPyWH7X/qTP9HbFwVC223/8yR+xt9+jKjpUyomcHJ0itJKXL15xdCQd56Mnx5y/mRLOlTq6ZfDk0UMOR0MixWDWsT0WUczp3TE9xUB3/2SPNMlZqeTj5OCIbtembiKKdAfNE9x7sMfNjQwiTLvEMByqqIdpyP3x7Oget1dLDMemfyqTBs02KfKYwwPFTCZqTg72OTzeY3YjgybblAfGw+PjlhBhu14w7oxaLH4wtCiziFFvhKsYkwzDwNQNDE0jK2RwfHpyShot6AzlPv7ow6e8fPUl/f4R04VMOvXC5MOPPuabX/+VFsKxXYc4J10O9uWzfBwHvHjzipvZF+Q7YgMqnj76gLocc3EldfSKakWaRKyUjlczsfjwwVOub3ylSwZZuWY43idJt5hK8yxKQvyuTpjcqOdioOkWabmmahSrWc8gTKWPa5QvyfKQTr/XzjtUIsYWfQpmLLYyIfoPf++/4p/+T3/I7eySh08lZGy93WJbbkvAEC3WLFaveN9+hqE0usL0krIQZHrCm7Pn6vn9Cr3+fqsf0whBLXKyTU6tbdS9ONhOQEFMo9i0bKvD5eWnGOoAD/y7WLZPoyeg5lMt02e1nuI5I2w1n2rbHmm2baE8WVawDW/x3G4LbcuygqZp0FQiHkUh62jG0+OvtdpQcbLGsX1sWyNO5LVfXFzy9PHHdDqyaLLYXmDoLpow2Yby2W2iJafGQwJ/wFxpb9mmhWmaLFfye6Y35zzo3aPbmRCpRMrRPYoiwTQ9LhXzaK0J9sYPyKtUPTuoaeRckgpWD8cHWIZOKSISFUhpOhRVSKq++zd/5xuEt11ub7cIBX3S9RpDc2iqmO//rtSUsdyczTrEdZQf1iJM0yZPNDxVpEiK16yWDdABTRXRfJ84rhCNvKaDI48Xz8+xnQ7GjrG00TFMKULb7Wfq/Q2YzjLM/k7/K6LSKoSmtXPC09k5s1vJXLcLgExriGZULG7l/+/cHbC3VxHORlQK5oUoKdIcLINcJZTP3uuRomEIRQqDBgaEccKTx3KW+C9/9EP+zf/+Cc+efI9Ek7O9WeUy2bOIt7L44FgZ05svsUyPOFQzNFmIbRkEvkcxPVP31yA8rdXjDNdrju4cI9R+ARh2DfkdtdL1tOHNxRTHq5iuVCHFGzPueNw9OuBmKd9pWpu4/QPWqlDc0DDq99mGc372hfQbo66H36u4uMqwVFCdzD9nNPmIhUq802TOZPiMMMswKrk/msrk5I5PGJ8Rx9J3ffTxN4jiqtVcW29CDKOkLhM8R56jebElDGOCXkkaSVuwrJogCNiu5DMo8oZNmmAadgvXFKLBNgVRukbPVRJmBXiuyXwrzznJyqpRr6O2SKFrFmVTo4saU0XemqaxN+m0xZyiKCQraaNhdKXd1d2gJdPYMQ8LIdD1t6LFdeNS11KwuBW+t20Mw8AP3DYpc5oE09QxDQULtGWBIE3LFtJo6l3KSlPESPK74iTEMJyW0bMfuIBAM96CzIoiQwjJaLj7nGg0iiImXiuSFF0jWW8py5J6Ny9mWWxLCX02VaGkKCsaUdKzd9qUNUKA7zu4aua6H3SomrolREvzEsOwqWrasz1NUzo9A10bobUi6TaT8SH3TiWUP44jTEdjf/9roAqLush59eYz6rJhNJIxnePtyWRZjffE6xVWXVJnFYaC9ydRztn0DX7gEKmcYDzaw7bg88/kGRpuSr71radc35yh5bKw0aQN42HAuKuhK2aYrKh49vAZ3Z6MLXTT4Hb5miJN8Hw1f1tqVFVDlq948kx+Lgje7tf/v/VLkVzVDXQCeQjOZjPyPMcwDOrdJtNNNM3BsORMCIDtuWzWFa4KGK6nN2h6jW4KLs7lgeYHJrZV09QOhiYd9fXlFtsBT4mKJquEF1/9gEF/QseTVXBTWMTJkiSd4ijygaYZUGsZeaGy28wBUrI8w1eJGtqWpgHHzylLJY5XORTZlpWq3pnaAC8YEIYhTlca9Hw+5f7pQ169PGPcl0FLv9vlp2d/wtFdaeCdYEwSNTx5cI/PPpeZs25YdIeCxeqckS8xpZtNyXy+oCykc/c7BnUjsGyXSm0os+MSFzV+IBM337NwO3B9BkeH0iFu1zlZGuF1TGxDfg6xoW5KmlJe92o7p6lSHGvAtYzl8NyKTs/EtEfEhUy43tx+ShTq1I3qFhY2jr2PZWQM1XDgi+dv6PfWDIcDMjVvUJcC083pqjm71TTF78Bs23D3VAarmAlNtY/uz9gu1UxAJ+Wg/2t8tZaB3De/8W0W0w0v3vwAS5Pv2Pcdrq7PCexTKmRgL6oxdTMiVt20NIzoen30xubJfXnIxnHOV/lz4tUQUyWZw4GBXt7l8xevAPjGx9/j8+c/JQjG7A3ldd59cJcX/o8wqmOqw10p2WJow5GiKj09/ZB/9a//kAcPD0kSGbBHm5rp9ZrA6WKqztzRvQNWi4L/94/lcOfv/v5H3FwvMQyL7z2StPnzWcjeYMRoWLTscpqo6AUGpi7v795pj7oyqXLBB4/ke7++WvPo0T2e3T1B4223wK6XHE6k8+sPAob9CYEdtNTkg+GY1/Zz7twZIxQboa0nVDTcP5LJXJhdcHA4wrF7/OAvfgjAex89ZbPZMFHFB99yqJsCz3EwFINSL+iQ2iuOj+/QGHI/bsOcElirmSELh0l/SJXlBK4i+tgqgWEBKxXkNlWIbey1JCZxk8vEyrEQilZe13WSLEHTipad9KP3vk6eXjPoy2eZhRYfvf8tVpspjqUw9HXOzdWGk3tOyyTpuh2mlzqL+a4yKd9Lr/OQQV++93W8ptE8NEZ0RrJ6fn7+GXEY8f7JB9IWk4pPvvgLXNdnNJG+62b6hqDr0RMWN9fy/rIqxtdNRmN5mFzf3lCXBZY4ptBmys412XmhaYWpO12b6+UlnUC+4zwRxNktFSu+9Y1/BIBjHfPTz/6ER0+fYakZj228pmpy3I4Sd76ZYVoQdI/amceqBBodgckmlHvt+KRLVWrtPI5lmdiWzez6hqKSyfGD+98mr0qaRqeslYiw1Wcb3baJTMefSMa5RqDiE+pasFpPGe0NsBSpTpYWkg1MsYxm2RZdr7hz8qhlhERYaFqJruay5uGaNE1xrVErXm9ZGnUtbeRWUYcLdA4OjggjGWg0TUWe52jCYrmWgff/x96b9Fqyped5T/Tdjt2fvsmTfeZtqyXrFllVZJEWUJJpCwIESbA9MAwYMOC/4b/gAQF75IFhQDA0sGwIpmSLLJpVrOZW3S5vdqfL0+8+Yke3ovFgrbPTA1EjDcpArlHmxj6xI9Za8a2ve9+33Wkx7O9R16xA9WVZomkGF5cySbOxOeBg/33qRl/hPNI0xbZdomiJyG/pxENMzafRUvW86rkrnfM3RwD88Q9+wrIIpbSHcnZKAZoWk6qg8OZ8DbMKaaqGUpFJmD7UdUNZVHiuvPDf/PxXmDxZYRJ1rcHQfaqyQJGTMpvNWMzbuJ5FrhKlcVTgOgFaLc8LxxdcvMlxnDameUuOoVFVGbal0w7lPfztL75CNAa1Yl7zdJNGgyTJ2eorMJEGs4lLq1VQK/IYIXI828RUgf5o+pr1vQF13Sa9ZTeyBbqt0WgaqUokQoLFgKSUa+zYNYZlkGoOsUquWOik6Yx//r98wX/930pSm4v6BXWqEarkX5E7bPQdLi7m5Ip5OAxDZrNLTKNmNpX7uqwEu1vbGK5cl7XNPuPxmK5iGAVIsyl+GGKZyi9aTGgPB3h2gqOId3TNwQwFz199xe6e7JBYVguMyuZgX55XmZ7iuyF26nJwT15rmaVQjzEf31tVhAcPfsjRqzd80lUJSTFnMVtQ5BobW9LmxUlBHMNwfchwV+KLk6RktowYL+S+MwOfXncf03wrGFzqHTRNY7ge8OZMOgqtVkhZOJSlDAL7AWxWW8TLCYWac9O0GF3NaHntVSBTpGNOR/NVsGNYJlUlg3NLrXsq6lVlZsUgaJq4rrPCO4Jks6vrGksl2tANmqrCsCzsW0Fn3aCsaooVLjLHtlxs01p5z4ZhrFgOK2XPws1dTFOn3ZVfsk0HrTGwbJ1SJRYLEVFVkr2zUiyYgXeAEGJVQRRCYJo68+htVdO2TRk0lcWqcpXnkmrfNNzV3xVFwWKxWGFf51GKXhoyTaUSHoapsGjKeNqWiahKFlHOzUju4ZNqhGGCr/ar7zp02iGG1rA/lHuj09pA83SWy3zFAFtVgqouVpX6dmdInjfkTYrRSL+vaSre//iHNLW+kqjorT+iqU1qxcNgOxpFIfHFjUosesOaNF8iyhRjLvfLyeEZne6ANL3FpuX87V9/Sjvo0u3IYPx6qWOZcHV1tdpD/UHAcG2D46NfArJStrV+n/OzoxUen8bBdnR0qyBQBYH5+K20wb9rvMNcvRvvxrvxbrwb78a78W68G+/Gu/Fu/AcYvxuVq6phdDNb/duxbJqmIlNl81JkWHbF+uYONyPZUlGWEstxdPI5AJYdsjbYpcgrbF+xBTU5otxG02FjV0bvJycj3HBIrfrl++01Avf3qOoMU2UwPKdPmWls7d7hUtGzu16A7gkM1Vo3nU7xw5r5bEGeysrKwd194vk1i/GSgapYzNIle1v7+L6M5q/Oc9qBQTfY5lRhoHa2d8nSBtt0WMQyszOPTxlu2KSKAtzUu7S7gmcvfrHKFkaLDCtfkqeCi+xI3rvbxnVdponM0FS1RzsccnEWrZgVG6fAtjrktWKNGs/R5hG16OGpKkdV5ezt7ZEuNaKlvFbTuOhmhqmyOmFrSFrM8EyHjsok26aDEBalBqYmPzu9OKbVNdFVtiJapHi6R5pPGY+VrpdjkeURi7mNofApd3Z3OHl5xXUj2+hSITh8fY7vdQlUG4JulHiuzmTiYivx6DQqOY/iVfUgz+e8eHmE0xKYpbwHTwha/iaOMWCRylYITdMIdBeXW3a0GrOl89lXMWkq16rdN9k8WOP69JquL7N8yaLh6uLTVdVxchPx8O5jzq+uubmRf9fT7rDW2YNS4PdlxcLWB2jVjEKT6/D1s7/i+9/9hLxIcNcVBWgRMbq+xmtnq0qZiAtafsVP/v6Hcl1wKLMSwzAQxS1O8Zq13iaGtUWpKo1nZyfs7W+ilbLy0e9+j1IkxKMRkRIMvru2wZPdbbJizs21rIZYhs9PfvQDvnr5KQCuazNodwhaGxiqhaoQGlbdZzDcXlWcXU/nq6++ZmNNbtjHa09wXJubq4jdLVnNsooar6mwfJnmnEym3Lt3j6oQb7OVyzOGHYfA1SlUz/zkZoTtuPiqDePqfIyvOyRNgqnf9ssrKuBax1LMamu7WzTCRU0vR69vABPqks0duS7jUU4hcvxORraU2eTDw0NafosvP5dzsLa2gWt7bGyFHClNOceyOTw94eQ0pTuU1/KcLkl2jOlJG1HPWiyTM1qhxetDOeeGa6JbCVn+Br/lqLnbRjQvcH2FRaUha8bsbu/w5lyViTUHtJLLm5golxUS27dJU5fRjayqJElKKqYM+gZpLjP1s4VB0PLodF2mU2l3RdXGdGpMW2b9TEfnZjThux/9UwahrJ69OvkFs3nEJ9vvr3AuZZ3jaDaFwjaMpofs3tnCsnssFE1/UeQ4ts9stkBU8r6i2MCyOhRC2pY4m+G1dhmNz9BudQpLF12HLKtWbYClaFim1ziqHc20NAqR0DQWhqqGFEVKks7ZtLawVetH3ZRYlrNiBjw5PWKZLjDNLrrCFpRpQVmL1Z6az6ccHNzHsdsrdtmqrnBsDU0zGY3ks2xstfF9n6WqhjqeRa01VJVYnVd6o+M5XbJMWkKGAAAgAElEQVQseasfpIEoSspavrNu4KDVLTSNt5pPhoWuS+bSW6as7Z0DTNMkv8WPGA6u7RIvYo6PZUuMb5fcffB7lELQKLpm04LlvGJtU9rO99/b5ed/PcHUbTRNtfLoGmlS4bo1hiH3Rhi4RNNs1fJUVTmaWWFgYLryO7olqCuduinQuNVmCxRuWN73cBDyi6sZrm+DqrrptkmSaQzWWnz4gTwz/4//7UswDSxLuScVJFlFnWR88h15PoVtg7KxWGYxLVNi5kyvkgLZkdxT/TDi5NAiz7dX+wfTZBnFNLVJty+v3+kFfPZpQhDKdRG1QBMNpgmzS1kB+U/+449ZRiX/0//8Kf/rv5D29IOnNVezl4TKnle5YNNpsXPvfc4uZYv4TZQRBB2uR0sevy/t9YsXz/jq8DWdjqwSB56Ja7WZKbFngLB7l0W6IFX0+74n8W2TeMH6UJ4zQq8Rk4iWP+DiRu0zG0Knx+mx1J2zAgd3v0t+LVhTOFCz7ZMnBuFaSarYO3/z189Z665zIbciea4hRJemWfLiRJ6/hy9P2N99TLezyfJKfvH5V8/ZOjggENJO3rm3RiquSHJIld8wGK7T7+5wfZ2y05dz/PrVF2xvfsyHD+QcRMsL4oWOFYYMNqW9MSybxd6CNIoYqE4js67RrXqFg/I8jyRJsC0DoSjjdWo0U8PxPUpVEbIsC9u2uS2nOY5FnueS5fKW5j3JKYqSeZQilGh5nBREcYqjYB51bVJVFTXa6j3O0yWGYRDNprSUvX61PITGwLZUu60Gga/TabdWDLdlXpJUgjTNqBXOM8sKdF1fYcMsy8KyDPrd7qqd0HVdXNfGNE18JWXhuwG5ElMGWR23bZNOZ02KZgOaLitUSZJQqy6YIsuoqoZK4RSrqkEIyTTYUfIajSkYDHpUih15MBjgux62YWKqKn9eljTTBstyMFQngNYYmIZLqqADXksjbHu4ObQUdCCOY5omo2hKbIXNznOB53XIPLme06jAtjx0e4CpWv3RdKzGwXV7hKb0ke/nE+Iooa5v8dQJaTKhKhrORnKft7oN11cR04ng4I7s9go7AefnEbFqidetlJurc6q8ZmtDitw3WsxstMB1N3G8W7uk9MP+jvE7EVxBQ6h0i0SZs5hNCVoemipVtltSc+rZl3+7IhYwLR3LMdndlOXptBCk6Zgkm2IqEUHDyrHshJubG3xPtj213D7jmys6PWkgF9OMdsfFc4ayKR1Iite0u20s3WdjQ778eV5RFj6uLzdhK4Buu0/LW5IpLNEy0iibgnZ3Y6XCrRsNWTFHU4BF29Vp9Jg88egoPE6WpxTFnFbHIYpve1hz4qXB2tqt7sQFs0lA4OxRKZHdaDmnP3hAuz3DthRWoqwRaUM7lMau3ws5OT3l8ZP7TOcyiIiiKbNlztaWbJGbJ1d4joHpZcwW8r6zNJKg6sairbR2Op2QL776Ff2ebOG6mR7SCrqcXx2vNLsSsaTIGta7O1wrMdD1wRauZ3B0quiTByGxkKKamiYPRs9rM0uOcDSNupTXujq7IGj5HL6WrURoc2wGLMYjeoFa92yO6bg4VhcVh+LYFWkxYdhXwtGLBZ1eh+Fgi6NT2S9/ejymE3TpdibkimL8+z/Y5xd/OSPw5PN+9PQTDl9d8/S9BtdXxqhK6LZ22frwAV99JvUUZlnGYDCgrdpDDEvg+W200sH1pbNaZDfc3dgEseTluVy/g8cuRRQS1/L3ttZCBv0N0mzO6Ea+uEl8ytamy/bGLtcXcg+tr1vMo4j0FrfUBDza7zBcs3lzIuc8z1wCy8VwMnCUoOX736TTa3NyKIlG9nY2iOOEwcf3iBbSAAZ+B103ODy85vvf/wiAujKpmprvfSxbDgfDFotoStM05IoiO6Phyb0DDEtfOQmLecr2bpvvfle2ul3dnHB5PqLRau7ek3s/n42wQxs/kP9/9MDHbxmSTlm1lQgRYFs5TV0ihMJzbW9yenbNdz+R2lt/cfWv2N7oMhcQK1kAw7wVcL1h0G+pdUjJkhF+W+3pbout9R3my2NcFVhsDDucnhdUJUwj+c44QYerG5ONbTmX8/SQ2ljn17/9mtuRLXVwU1xzyNlY4jDDjgW5x3QiW1Q3NzpUHgizRsW8TKOUTt9hXsxIIrn3j09O6HR9KlPOZWsQ4vaGRMmEoC2d1cHGFpdXR6zv2fSXslVpGZcYVoKhSAXCgcF85jOZT+h0b/vHS9JkgeuXKy2xpskJAmvVkiPKhk++84/xjacI1TI2m80oMrhzsLlqB220BIM18lzZwPQYh1LioBSluWnVOI7D5fVLKqSj1ut+iBBi5aAELY8iL7kZH2Go9p6g1aOocmxHX1ExL9IpSX6N7SkdPxw0U1LA3+rOTMYTCrHAsd7DUQDoNF1Kp0j52E0jKMsCg3DlkNR6ia7V6CogK6uCPKsUCZEiYGgMCpGgax7j2REAw/UA1+6sBIqzbIHrhYg0od9VlNxZh1bQJ8tjanWuubZFVWlcXssEjNsW7OzeJynmoDTQNE3q09VVwb2HMiPQlJ4E+yv8gShrRFGxjBb85O9/T36HLrqmg9DJEmlLul2L63TJcFMGNql4Rl0PccwCFX9R5AZZWnD/IKAQSselcqmEja3Q8g1QVymT6Yz3Hylig1yjrHwCzyVRzpzru0Tzgv5AtVRVJVURYAZgKAxbViVYpo9jTRCKRj6a6RiWsSK9aIoGwzQpxGLV+vX5by9I0qf0PHPldCbpAq1eo1npN16QLtu0PWeFoamKGtNyWUwSvEBR6WcRdblGXqpEpqWhlzWBE3CpEm/9exZBOKM7cPnlL6X9/OCjb5CaU/oq0D85OUEUS6pNSJVwYFULstTC0N0VrqbTDRnPCj77Uq772tBn2N0nX7511izX4fLNBb1ArrntBJTUVMLEVnIby3SEqEyiZEquMJZua4txc0FHSWHWecbF8SviaoGWyPPw3tYuJ4fPsFsBeSMTBO8/ukd7fZOzC3mmxfGU9x7/MWW15F//haSb/tYf/CkX58+5ji+5VOeTE5gkWYpQrfSff3ZMr7fDxu46jWrPKuIbrhYZw/5TEiHv87sfPCaKpiQzhRVfxrQGLURV4XpyHepGUFdLOn2fRCzU3ksZeA9AU6ReGAhTx3BcHEXGY9k6Z+enmNkSx1E45aTAshp87xbSEKNTI0RNoQTXHc8nyRa0Wq0V9lRrbFzLW7XelmWJKCoyUaxsoO+ERFFEu9VDQbooNYNOx1r5eI6t4fs+juVhWKq1zZVtiU1V/n8SFxWmqb+VEyhr9Z0a01S41lhqffm+u9K5ggbX81e+r+NaOI5JWYsVgUYcx1RCXm+ptKFMS2c6na78avlMGp4VrEg15M/WK4mFeJmi6ya1VpOIW8F1A6MxKIvybUumZmFaHpaSBcrjnGQxw0YnGSvcuWai6TWmBa7Cvnp+R7YqZnLNN/wWdaWj6Q2LuSJT0xua2iQdXTJRWo2u06LV2qFWbckNLbrDA4So2LonbYSu62iVSVMbaGoPvT76grio6QxksLW+6SOyBUevjrm5JUXb7DBse7w+PCVUyWnB22TIv2v8TgRXdV1zfa1A4ZqObQeItEKdu5iaTrcdolHQG8oPLa/m8NU1ldJXCVpdhIBBeIe8kk58nQ8YDrvUKSQz6XCFnZBh7w7XY5Xp0UyqOiNNSna3JPDu5GJGXufozWLVM29ZLsP+ATdTmRHyfI/lMqWhIFDZ5mQZY9k6tm2s7r3d6yEKVqxcRV6wmAl8J6ITyiAlyxOaJmUyHdHqSMOZiYyiEugKiFsKi07HIs8yjs+kQ7e7fZ95dE3L7yEUOYXjymxglsl5ubk6R9NyXr/+fJWhresMQzf4+isJsK2NEb1mnfV1H72WTmiaj6g1CbYso1tRvwrXWmOZymBnupiziDO6A51ZLrENWVoQxxmV4ZEX0ojYnk2WGHRUtSleNGRJjKYn5KU07nF8zcHDDc7PayyV1TT0CQ0ZrZacp3Rpcnk8InAsjjK5DkdvZKVje2+TXlceROeXF1huyfRGvgR37pu8fP0109mYxUIGNo8ffkCewnj6EkNls/7Vvziivf0Idyjv88vXLyjzKdvtDoZS5e47IQ93vsGnJ3+z0gnzHI8g0FaHvGZobKx3WNtweP65XKutnTv0fFhOBFvrChheCIrI5/5TGfi/PvoMvdmi2w7otaUTP5iabK3tspg1dB5IDFtVzHG0jZVGSae9jusZjMcn7Cgcjxeso+HS6a/x6kQe4gcHj1hEN/T70ogNuwdU+dd0ez6W3lFrZdLUOn/yp/+AaSSdiEUcU1YlU9UTvbyuqOqCm5uI7W1ZvZsWN/h+G8OFtgLx3rt3D8dxVsQfpt0wmwsePXpEnMr36tH+RyTiijRVTEGTM9pr65BZ1JY8ZG1Lw2v6eAGM1aH+4FGPh0/WaBq5Lv/0n/wJi2nG7OVzHtyTVaKt4UP+O/4vPv5wSODfCm9LJrbCVg6grbHW9/GmA0zFBLh3b0BJgmY1dO7LZ0lTwf69Hq22fId+/asF25t36fZC8kzu4WWW0urfYzZaMFWB4c7BNlVWU0+V3QpdHEdQihRUhv/j959ydPoStwV39w4AaAc+rV6LZS7fUZGaBEEPx5szj5Re1SxnZ3eDJOpjt+S9u+GCm/EFQTBQazcnaA2x3DlFIm/q/aePELVgPB5TKcY5TSvI0xhTl+/xew9+gm+/xzR6xf6WZF/62c9ecOdRl/sPd4nmyvnQDCwbzi+k09QfOtzZ/5CKHF0FCGUpD7V5dE5X7b1eZ5u6yVd973pjU1YlVZOwtb0DSKB4WZZglASuIoYZvSHNZuz6kilTa1qU1QyaBl1VymezGZats73+wQpbp2kSi3WLx/nNp78krzP6vS0ydYYYJlS1TqUqpsfHx/zBj57SCWVWGEA3LAxTVqUsV362s7uOobXJi1sduhpdc1gm1+ztyWd59PhHXJyUaFq90t7RjZo8KVdsmn5g09Q6ul2ugsJaCHzPYTq7YW1TVevoUtU5VLcaNw2mbVIUCXEqbUTQCcmTCq0pCBQZz3IpyMspm7sKkzjTyFMLO2DFpik0i0JMsRwD11QMbdk6uuFSI989DQNds+kNBAd35fNNbkqqwsVve2TKEbZMjarUua9YFMuyIk9N3I5Glso59z2Lq1nFnSdtbFVdFoWOGzjkivDJtm1ykWFYJevr8iwYvZjKDgkBukpAGE1DKRo0JajdHZgk4zaGoVGrA7kUBugmjUgYKdKHjzt76JpHLuTzlY6H3lhohSC/ZYS8OWNrvcOTJwf89Kcy2Pi3//uAn/yzJ4wupS3b2Nhge7DDzc0FKq9AXi+ZTiq6wYDrU+njdPs95rOEx49k8qrIR8SLGQNF4AHw8vkXeK5HFKugQpTYrRazJCd9LRM14fqAsBtQe0vGhyohEKV017oUynle3+hzmWekN1d8/aW8llYm+Ft9xCylvyHXL3CGTCfPKZUm0dMHH1BU17Q6Nt//8e8D4Hoa3vABWZbhmtK+kELY76AbMkmq5zCPJxQkhEp0tmqAxqQwLqiVjb2ezCmba5aRfOa1tU3StKHtWZz+6ueATObu9jf5+tUrOmvS8T0bzbi6+A1PP5Rzl2YVthMQJxlCiUIHgUdrbYM6r1GvMr7fQuQFmcLVRnmCyDMs21gFGl5RU9YNWRKh4npMHXSzXrEhZ6LC0Gtc31nZLtN2GQ5b1HW5StRUeUlV1qQzGUQsRIVhaNhmQ68nz6elOcexA1zPxlVaW4atSdIN1XZT16BrNnmZI1T13BuG1DUUxVsMqWnKgEqv1TtUCuIyQwhBtJDvEbVOVerYho2ncFiGoXFnbw3jFj+GRtNAWVQ0SiA8z5egm1RCkcL1BxKrZhm0QnmdusoRSDZWW4GsptMpRTHDVYEbVYWl22RFjdtSTISNI7XDmhjLk58to4w6Z9VxUyLwHQ9RFCvx4bIs8FoBVVXhq2cpq4IyXVCpwFQQk1NQ1/aqKLIoDOompiqaFblRd7jH2qa70tqtNA092OXgvadUheraqhZE8TmGO8ByZGI/juTZ/HeN34ngStek8QAYRxc8uP+YIjdWWdTJfEIhMjTHIVOpgePjK5LIpTdQEak5x9G6GFqB7iphPH/JMp8CLotYluktoySwLIQSP3VbGW1/yHw+4nomQfZFkULa0LhjXAUQXF8PieMbamXwF3GCbozQ9BaLkcxurQ03KRcuaXpNrZw+GoemNLg4U+1vbiCrSlq0Evpz7DYpAk2rUUlinKrL9rZPU8mNkxYJc7GgqZc8uCMrAfNJw/7+PebRjFI5DWevb7AHOr7KMl5Pcyy9Rg9KsoUS540XbG18yHBN/j8rbNb6d5hOT9DVfXue3LiNs+T6StFRVjfkIuXhfWnY4s9fkUSvaD/4E55/LWl7DQRp1GDpM1xVPq0bm8bQ8I0DAEbzMePRDe+/95iuKvN6j+7y+uINDgVRJX9v4Fk43oBWqMCd2h7DbwzxnAGLE+mwf/+//CecnxT85qf/nB/8sRJz9HxydApVubi61OgMPmLbg5dzldnBwvVzBuFTsrl0tHs7FdeTiOZYzoHt6uw8Wmd2cUmhymIf3etyefZbNtvbnCWSWGS7bzPwQ66m8vDUgjZfPz/kW9/5Hp2uXPe9zT6X0wv2791HO5VBi24YZFpMEkmH6N6DdfJJwmIe4/RuqXUD4kXKR0/v8tUz6cSXucbenXXOL+W1K6fGbq3RN8yVuONkdkoDdDodnjz6FgDHFy+wzQ73D+SB+ubwCKflonlr1HNpRE7fvGR7/z3+5jdf0goUyN20aLV2uR7JgHaw5ZGmNmGvwg/k+3d1PmL37gZ5YfHitWxNCrw+ZZayFLIkn41jPnryhM5+yNn/fQTA1gCyyqCppWPjD7podcrF6NnbanNokeYxIoPKlPdk2B6WazJXrSdl2UK0NH74935EnSnxSsUe5Qwc6tu2p0YjzxcM16WBtP0pJhqe0WFjSwZg8/mE7YMus2jGYFMGq4vFjFl8zo1izvzmN/6Y56/OCAcel7E8QANbZy1sczr5jHAg78Fvb/H85m9xVZb87GKGaXYZDtrc2ZHP9+b4nAf73+bF4c8YK9X49qDNcrnEUeKn7Z7PxeUlvYFN2JKORpYeM5uUTOa/xnOUs6MJHGvImzfKJq23KHOfwA24jGSQfXW5oBBzbFcQeqoyL2K6a5uEnmyD8Jx7zGYnuGGf0fS28nHD3/uz/wjT9VkulNSD2SAqjyKVa/zNbz+kSoaIJKJUQXZTucTTJU0Z8cGHMijKpxplbaAp0gtTg2gZ4bZzwkDOi2N1oW5oqgRbZWRHoxMC32TYk99pjJK6NinLClclN+L4BsMwsK2A2wC2QWCYJtOZtC3bextURkJRFFSVoirWcmyzQ7SQ7/83f3/ABx/s8upzB8u9bcED22oxHc+4d18mSdphn3iSolm3oHAPXSupREnjynfh+toAbRvLtgEVmOKwWER89J5sh7uenuLaHlGaYKoW57JpiOYNRXnM9q6sHF8dDxB2hXnrEZUFmulwNbnmYaCon1MN3QlYRAtM5SkaZoMhdDbbcg9PLodUmk5lVBjKKdM0gwadrXWbOpFrnOQlll/RqLKfRkFTmuik6EK1WRsbOOaEmpJGnUW230Y3Z6wN5bzcXAs0r4VlOZjcgsF1ND3nvaebvHwp7ctNpGP5OoFQFRqRkyUL9oYd1nZVS9XXJnni0QxqqlJR5xtD5vENVS3PhsDoEhNSoFEpx0nXZZIwyaY8fCTvvZi7LJcNpgrAzFSj0CIMo4WhmAjXtnpMRyHv39/l7FTa4V8++5wPnu3hqMRGVTaYlovdqhkG0pZcXma4bei5DpliUby6Kigbi0LZDdMPsERFb+MttXMaadzd2WY8vaVKNylyQzLsJdKpW6t3cdo116cpnW1VydV1bN+lsybtVlwsqcsRuchprcl35vz4GWvrXVx/iFVIOzXKX7CIYacv53w+G1HrHRy3ha7L+Tx6fUWr3cc0e7QcGSBkRIhlQ7sj94bV38Dp9llMv+L6Rq7Vk4++ja5pHL/6FW9eyGttbW0xWiyY3VYwCJnPp7TCHSJVDSnmGac3l1R2h5tDOQ+DbkVo2Tz72b+Uv1+1CbpdWkFDkcl9F+p3WeRvCCyLZawkagqBYTc0qkXNant0vHWqZUyZyndtc++A8fUFw527WCoRbekNVuDhqoSv49o0pWSzLlQg2lSQZQm64a1Yr8tCoxWGLJaqClcLXNtElCaGahVsKKg0wXIuUB2NmJaJpusIlTip8gyzWWDaIaZ5SyPfYCBFxw1F1OboIUW6pFHnYykaNFxsm1Wi3/UbDN2lFDWWrijxaykdcRu4JUIyplbl24pwXYNl6RiqcpbmYhVERUrOp6oFGga2YzJXnSOWZRGGHqmyI7btIkSFa9k0qo3cdhqESNEbnbpQ7yglpqXhqCRUkiQUQlYNfUUmEQQhVSXk/5vbgBJqPcdUcBCZkKuxdGNVGfQLgWmEVFW1oukXhU3VlDSaYpvUDOpGJy/z1e+BT2ewS6s75/ZM8Qf/frbAd4QW78a78W68G+/Gu/FuvBvvxrvxbrwb/wHG70TlStN0NIURuPfwCfGy5GZ0Ra0E2HrdIWlWsNEOEHMZPQ+8O+wP9FV1q84cWkFAnJ5hq97Nmja6rrGxqfHwyScAXJ0nnFyc43eVYKgVYmouYfC2V9xxHHzHk3pbt330S4s0G6Er6lLQiBYz/FDQbctM8pvTC0QBnX5DqnQgDN2k09NJVUtOWlywOB+jWQ19peuRVzWFqPG8fQohWwycIEBUBflcZrJ1PaPRfaJogWe11Gc1xycnLPMFrZ5siautitevJfgeYGd9l+l0wSAc4nTlvb/38Y+4Pj3Hs2VGcywsahpKQm6upIDnk/v3uJlqvLl8wZ/9g38GwM9++a/56Ju/z+Hr5+rZXKxgh7Nfv2RnKDNnYXvAWXPOIHQ5O5ZZqd33HyKKjEWttJT2Ntjv3mXCax7ekZnseFai6zpe6FMp/RZvPWR+o9MN5Rrs3R3y5moKTclP/vF/BkBSWMzO/opv/MFDDE1+Twt0jKXPelfmDpL4jAd+B8we73XkPK13u6SORXS5YG9f3nt34NKcjPlgVwKGl6UgTXMe3H/6VoQ6a2iMjFK7ZP++zN516NBz1tCVTlIczXgwbHP66pc0Kjt59jJi9+5TyEtMlRE6PvuCp994ykC1o0ZXKaPo55ALily2Qe3eWeOrl5/yysm491B+79Of/z+k8zmhomttOQFikVE3LpoqdbfXNolHE16dTkhypfvk6BiGwem13GPjdM6jjQdEcYrVUjiXYhutCXG8aya3VObGBujLlRZO298inR3i6A7nZ/LaD59+m9k8IUtP2dySVaGwc4Cl5/SWMi1nDgpc00IXOtv7srp8fPgKzTewm9sK7Rmu5eNbWyuaYOolTqjTlB1M1Wq2SGf4xoCxImTorTf0euu8eD2n05Y2YTqXrb+jeUqj2jo3NwY0ZZvzK9U+5YfMZg2V+YqbiWw5WkTghTVJIbgcy7nK8xTf0wgUtbfhaBT1KW/OYzbXJQB7ejMiSQrW+o/xbJmFLrIEV3vA5eXfAHD//n3yVGceXTOdSnKMXnebl4cvCVp9hBJzns1mhGFImsp3YTqJaHcNlklEVcl97ntbdMI+frshXyoNqyDg4vKYx+/JNh3X7nH2ZkSjW/zwR/8FAH/6h/85k/mMxhjx9bNfqL0hEMLh018peypGtLwOlSi4UPvlez94ysHdbQ5fl3iqhQNM8mK5wk5NZme0tC10I0So9gxNF2RJw5276+xtyUrZ65lLFguCjiLQMDuMxxcYpJiNrAhVosZ2K9I4p1Bg6kVyRtAxSJQGSn9gIcocx7HQkHv45PQV69t9LMta3VdZCXTNZj6XFbYn768ThNssrisMlSFtqoqqKhkrcpfv//g+k1lEhU/g3GJfZdZyEV/wB3/0TbWHBdOioR3IlpVCCLAEeVLy9CNZJRaJiah61HW0yphqtaS8fvKexL4Okw2EEDiOsxJctyyL6XTCJ598tKrgn5QZDjaVuo5pOaRJjONP2b8r9+ebS03SsNfOitAiy2I2twMahRWIEwPXaSirjPK2hdtvIcoYr+Vz+ELOlaHtU4sCibaCdq/LfBSxthmutChffDXGdVvoukboyXlwLB3IGKiz6Te/Osa1AwwadOtWcDlF02vaXYvLU3VuCxOv06JW+jWe7REnKb1uTVWoKhwDRLVA5A616v2y9ISmaVZioLbpoeHK9qnmFveVUzcmULGnNPMOX44wnAfc9oKVIkd3NCaTjLqSc9ANHRbXJppp8NE3pR1+8y+P+Om//YL/6r/5AQA3589IyxTX2qTJFI7H0LE8l8YS5KpDwnNMzKak1pVd9EyCtstctWEDWH7G0csv2NyTxD/zaE7PT4njGL8l13gZHzGJNOJyiZ7KZ7776AMss8FRNsL2A1ytYK33e/iKFvzN8xS71aUi4YvfyI6T9nBIU1p8reAElmWhN9cU2gShfJm6hmfPTumuT8n60u5HNyd02n2MUHbTXD//G5zAY7C2hTmTVaqL82eMFiU7d/Z5T5FxlElC43n0hwo/OvCJCpfBxhbDgXz/4/kl6bKm1AI+eCTtWaNNKKYCV1UnJjcZfj1ieTWDWv5d4xm0jJTQ9JkkRwD0wl2SyxHClmd934ip0ylZkRL25bWT5RzX7ZLnBZnC3y2jmkprEdgKA2lNMBhQlFN6PUV7nlf0+i3mywVVJd9Ry5lz9OJr2h1ZLewNfKosxtLCFR28o8tqm+u4K11EIQol3qt+T7fR3TZUDU39ts26rmvKSsdULcWz9A26XlNn0iYEfoiuyUrYrf6fyDMqTSPLKrJcnn+aZqJr5gpTZts2VZ1jWGAr6nVdtzEMbXWPhjOQ4sMAACAASURBVKHjWS6aZqCrKnVVGdSlQZplGKqiXuQVVVms2gvn80jpjlVUCgMpRE6WZei6uWqpNE0dw9DQDfmdsO2ja1IHdj6T61LZDUIIZtPliuSiqiqqqiJQ5CfzaUbTSFiRrlqFXb+mqhuKXKxEklu+1EVTU0ld12RFimnUTMeXqzkPW206rQ6R0lgbhApq8HeM34ngqqzEqn+1EpL17e69bWKlnK3jobVclsuYQgVAa/0BabpACbZj6g6z2Zyt7XtvhTDrgrLKaQVDnn8lHa12N2Rtc49SsbGUaUG6bMiKcqWnpGkay+USXbdRvg3zyZQH720yupETO5kucL0hmh6tgjLLabG+7TK+tKkUcHPvYB/L1khi+fuz5Q0tf5NCeLxRxAZFccPmxj6pmHJ6Ikv+7aDNRn+LRjkMZxcX9HounV571QYRRxN6w4D+cIPDQ8Uq5Njc3XrM9VgeoK7Zoz8MeX10gqFe2Pe9AZ1wyGwqf+u9J+/z2edf8+jRI7aVwShyDdeDteAjppdyrobDIfHcoIwVNmW4x2TpYYYz7t6RxAKZyIjzmr39+9zZlIvTdm0uz09JM2n8vv2tJ/QCjb98NuJGsbiUywiryrDtnA935aatyw6hcYansBSTukSkCV465nL0awDOzyOqKsbQM+aZnOOnuy3OLjXWlADj2qN1Dt8cEVoeRks+3zIeYWkDHt65i23L+bw4fYNTJ1yP5CG3eWeXZDJHoBEM5IFWpAVkc7bWe2QKd5JyTmmmtBXJBk1Nmlzye3efYtyC7KdXlOkNJzdXiEqWmnc234fUJTuRzmtrUBMvpnQ8j8NX0hlvdb6L569RaSF/9VOpw/Dg3hbt0OPiXM7dci7odzYYTSZo6vf8DkTZAseBeSwPufs7j7m+mNFW2k0bD7YwKodleUOhhCJttyKKvmSrv0+W3bbgzZjPcvqqh/7TX/0Fg14P1/FR/h1nFxeYmkklPO4+lQ7lyckhQsxXBj8tNZYktBYFk7ki9bhzwDi5oBLSFIV6B9sN2NoMmSi8gWW5eP4Wo/FiRaBRaVDlKb7C8Wm6ztnFG6JsSalahxdKoHYynfPxt2UANBnHRHG80rSazMYM1zoYTY/RVD5vp3WHqsxx7IBUaXblqYPrhgiFz3t1/BovGGCJLulStdeWS7Jcw7KGsrUYOH7zOfcPvkNWSCfJqEOybMT+3l0q1VJVVhWLakajG4yVHp4oMwoxYmNTOt51kyPKhE64QaGwIcukYha/oSxLHjySDt/Ll894/+NtbuVcLBvu3L3H9c0Vm+tSFPb5izO2NvewvE1+/MM/lHt90ObP//zPubiQ7Kvvf7xJVQtcs42uKcbUoEAIT6rWqqHjkBUJolaJlN1tLl/52Hazam3RSImTBWvDmDyXz1yLFkHLQKsVSUKpIcScO9t3aXnS2dHqClFUOK7OMlbYWr9P0P+Qjj1Qf1eh6zpNU63aQDFi3n//exIIru61oUHTdBIloNuyrjD1JzS6hqYpditNg9oAXe4D3U6II5+m1inKWxyoh9aYFCLl9Ey2/F1dhQTtR6sAsKoq4qik3/PZ35FB9slrh0VSgl7d8iaRZQWmVXL3vvzOm9OG66sSzWGF8bBNF+ocz885OpR7w3C2pEN0y4apG0TzmAf3AhaJPHfK8kOKKqaqS3T1PqRxg9sTaArPdT1NKUULrQZdnb+LxZywY+KGJXkub1RUNYHngiHtfpHXiEpjkZxwfqlaBbX7uE4Lw6y4DcI0Gtqhwd6uTD7+/C8nBIELRk2j2MmkYKlGo82ZK3tqGB5Nla8YKW0jAAGPHm6uWpUOX07xO13yNMNTDG0YNdF4xh/9mXwXXr86p84tdAcsJVqclTViWaDZBaZipdRps8wLbIUxs3WXGlguU/YVw16yuCLPd9GNht09+Tx3Dx7y7POv+fLzDwD44L0BL357wvZBQaTaEMsmoOV4uDR016Xd7fV6PP/iOft9qU1VWA3z+QXT6FZ3CzTDZilSKtVi5Tc2J9dLwnWP0Ru573r2BmHbRzQOU8XM+fLkiHlc8INvSYKb4+tTdtpDhJ5y/Fq2CptGC8PeIM/OV/eEDUZZ4XrqTFlatPsDJvGUvJDXPr8YgW5z+OzXfOvpnwLw4PETnv36ENeXfzdOLygiGEdn+I48x6NoTC5qqmbIm3N51m1v7nCw+zGWwgxF01P277YxqpJj1XZZW0tawxASA11hQ3/16ZfcefLhCmv/6L1vsIxvKMeCl+r5jI5BrzdkOZ2jF/JdnhydEi9Ttvdk0JlHCY7jcHJ4Qrgm3+07B9ucvnlNN9hnc0u26k7DC7LFkqCt2HsbC8OAPLa4UDpQg3CDk6OINButWv7C4S6zxRGmagdva30s3aVqdCzVRprPC0SZUpVv9RRFoaPRYCoCtFoviJY5vuFRqwSh45hUpc6gPVwxjVqOh2F0MbXbJIIUA66bjFwl9mlqomSE7wcYqmig6zpNrWEpOSfDbGi5PaqmXrEaalpNXVcrFk4hBMPhOtP5jFboq89yOq2QTq+PoQSPy7KQ0BIVNPmBo1oOdZT2MVDjeZ7CmOrqnqRG1i3udLlMieM5ruvjK5ZB27YRQsMPbDRD6aLWkKYF3Y70ga6uZjiOh4a+Eo+eTaQ2mK5pq+snTUndvG051JuK0LaoqorWWlvdpy61w9IlvvKxsmXEv2/8TgRXTaMThAowWMyZTmIMw8BRPfumldLy+0yTUrKfAUlZUAkTS+2KZZ6T5RGTr3MaFRWvDTeJlwWimmCb0rHXarg6n1Ch8Bi2j21b+EGbZSxfxLxMuBldMhj0VpnP3Xt9Lq7OWaie3VozaPvrzOPlKuAL/XXarU0i94juUAZqb84v6a1nKyfbSu8wGl/S7gUUS8X+EglmVknVLAgCdVDoOaLJuJxJg7Gxt08a6eh6h2QhN/3u5l2CsMH1A9xvSifs5fPPcE2Dg8fSQTn58jn7jw4ITBvXU3SpG+ucXswpSrlRbxZnrG/q6NqYeCLnfBIds7k74MH+R7z8WgJoh1vrxKMpn3wkGakuj3J299Yo4piBOrxenqes+x12+gH5UjpOIksY9g7obqqDOKtoPJP3Nr+HpwxGXl2zvJ4imoKWElNcnESsDe7QWpP3OZlfMdjdoxAJQnHGbgcOy6LGDOxVH79eWpjlIctSOt5cm3T8fZaLhE1Fkd0kBUm65Lo6ZFOBJHvDEHsRUyrx2q9fPmPgtimFS67YyjQb3NzGxyUtpJN7lc052Fzj4rmsAuzv7FJ5XV6d3vaTg+fajF+/RHMM+j1p7FpBQ6nB0aEE8HonFkM/IGgN+f73bqtnCRtrA/SmxK/k9RoRMBnXrCswcp4m+KGFn1WMxnK/iNTkYOsxUXSKO1TsNnGK7zbMFU13sGZyPb2g0w3QXXkIXJ/llMuKJP4tg45KNrCG7kWcX8ggzXAEhm+BUZMrrNZsHrO9fxfLG/Dprz+T91nVGE5GRxmxq+ua2ijJ7SlBS65DkkkihaAvA7frc4GgREvPiFMF9LfXeXl0DFqJE9wKi9eI2lgBvvU4IokNLGfCoCMDKUsJz+5trTO9kM+3XOZ0wj5uIPfrbOYisgyj2UYUMtlQVCPm6Sme06NRAqU7O/uMxxPiparmWT6u1cF1Km4mspLb7ftc3jwn7NS0fOmAWU7GfHZBp1Or31/iWgF1ZSMqufeX2SWO18M0HNqq2urZW4ynJ+RLRYThlGRpjWOtkSp2y7oZs7a2RV3ZzGZyTQdDmyKFQAUoo+mYmgjHeMTZhTxghmFDenRIadR0FYFOOt/if/gf/3v+9D/9ibrPGM83KHIfR/Xat7sBSWJgWtrqsAKDWpQU1ZFcK/dbNGWI5maUt1gYy2E6znF6M/pKrqBB0GgVtZJ+WIqKWstodyyySD1zq6RBl5gtFTQMtxc8ePCE0+eK2TVJEXVFy3c4PZEEBf/wH/0Yz+0yu9ax3FsqZIOqajAsaasfPu3z/Nc5eW6vMphZkWPUHkGoKgo2NNUQ13dpGoVvBKIoYmPdR1QymaNpHRoKahV4u55DtMhwuguqUokYJx5llWHqBmUlr1UXNl6gMZ7JZ5tNh1Ro2Prb4LUqGtJsiuVU2KZ0+KLCJGy5K+HPWtfQavDcmssrRVHttDFYUFXViu318jzG7zTkSoizajpYhoUbdFnMlWNfa6T5jPEsx/XlOyomOWCj2KhpGo2yLOgMmxUTWZKAZuiIKsO2b90KnbBlcXQqz4841nADnUYDTb9lt23wXY2yjkiyW5ypQSXeSqMYpo5jmdh2Q6QkRpJMR3crHLchVxlWPdPRqag1Re8fmViWS0O9+o5hWmSJTMYlyr4s4x6Vpq/OeiEEru+zXF4zVListd4+V5d9kjJHK+R++eCDjzl8+Yx/83/KRN/O1u+xt9/izesjrlUC8t7dgMnpDd/58A9YKPyUUerURk1iqCqDZbM9XGOju7Va9/loQeA7lAo33Gr7hDc3RJcJT57IROb18Qnp0mF9GNBWMic1DdubFs++/DcA+INNbpY1DSZtL1DrnjCavsQ3AvyWDBoOr095cHCf0VTapMvJmHEWMxhsUSTyHH/6+DtY9gKtecgslvd1cjXC7RkIhUXHcul2fGzdw3Dkej578Rn9zpCzs1e4t8QpZcoXXz8Ddf4X0ZK7e3e4Hr0iU9Ta4doWaa2xfb9Hpy3vYX2+Q17meI7cY5fjS3b3tzC7OvvvySBXZBHJNCZzBbMLGfS9+HrB93/yY07eyLX6+U/P+P4fPqXWPcZX0u4fv37FxsYW5fQZX3wubfqDJ4948dnP+OCTP5LrUE05PJzSGXZxbZkgPDs/Z3v7HrPlhFqx2fZJeHT/hygSVY6OnxOYAbNpQ6CqdaZW4Ic+nqGtZAfSOkYzwLmVEylg0OpQ6RP0WtoSrSkI2iVVmWA0KhlepNR6RF6pBIHlgdZIHFL91ga2WiGg0+0o4rJsSVU2q86cpqmZTqc0jUZ9S5ijZei6vrJv8nsN/W6XVFWzdN1ksUhYLGJildR0XXslhg6SNl/TNLJsucJAaTrYpqxaKTMsWV3rmtlMEehoGq1WGyHEKuFbFBl1XRKGIbaqgtuOQ7tVre5zZ2ePPM9XtPYAZWmiaRq6rmOab+8tS4oVn4LruswmS/VveZ+maaBbNVpTrch/uu3/H1Su0GvmkSr3azpLMaXjd6iRCzeJZnT6PeqlhWHJiVvESzrdFoVigMibgrSKyNIZ20PpdI4nCwqRUjU5t/a+THIsx6TdkgsyW5yTpzqnV8u3ZdempD/oYDr2KlM2mY2J4ooLxWroe21sZ4LITWLl4LmO4OWLV5huxIsX8nu9zg7pfI1StbrpxoJ2cEDP22MYKMN5fw3Pdjg+PuTggVyw1y9fEd1UfO8bPwQgjgS5nWOZGuG6fBHi+RjXbnEz0aEtjdT93X3m4+mKjvrjB98gNxN+/Ee/T5Yp2tyqwLU0/l/23qRXtiw9z3t230Yfcfr2dnmzuVWZVcmmSFEUKUoUIEA04EZD/QLbgOGRNfTMMKSJYcMA/Q9s2ZYGFEVY7IpktVnZ5+3v6c+JE33E7lsP1jqRnlAjDcrAXaN7DyJ27L32t761vuZ933uH4gC2sbNLpVd89fSP6G0IB992d4lXK+x+RlfqqzzYOOa2ekMs0+IHB22qJEPzd9Y0sk8e9mg19xjPr3EscQ++1WY6ibjXEM7h+vqUdKmwu3PEZCyulZhdYi1hmaWYdxpk+1uEWUIgN8bDrR6ntyuyEpC/pxo5+/fu8+b15zSk1pZl79LxE2YrYRt2VqIbOVvdNmevBHufZ/sYispoOWNjX8z5+HaEZ7dw5GGyt9NBVxLG1yu8UgYoSojfdKA9QJcB+nZYsTqdsrkryACeX1/y8PgeN+MLbk8F9bvRa9Pu9al0F1WToOx4TqXDg3cEQYiju4zDJaWi4MhWmnbD4asXv0DJlmxsS0bGVY6i2kxy0TJqmCo/+uk3HN/fYRyIv7WUHWrTJK0NHEmAEBYpqZoTSmfTVlVqWyNOp0heCNxmg0ypUXSNpWSlW84XHN3z2DsQ1zk9mzIaL7Bt846nFa/pcjMcUttXWFJL7GjbYRKY6wpGkpxgNlvMliq70rlH8RQqd83qVJYp01nAk4ePGd5IYHNRsVjdUClTHm2K9t5lkJOV8Zrhp0htNL2m0zwml8BYJPNlGhvUUrNHVTU83yK9a58wtxhNX6GqV7R94TdsU0NRVHTNxJaH89urSwynotcTm/xsopDUCb5V4shWwTL3afdjstDjWgY7nvOIyeSKRls65NYuqAaT6cW6EtBsb7BarbCNNrubgnxERcEzt1GkxEJWVrS8AV9//Td0uyJpcLj3EbPlNWmUYFri+udnC7pdnVoRgUavv0WRe4yuOjg94SMWyxRVMcjVkuMjcWD/7/75P+fifEIlZQHQKtJUp1JCFotLeQ8Zq0WMzhax1FPzXY/pdMzWrvhesNCpqMmrGhTJMpo7pOUVg22V83NxiLecQ7ISDFNsYotxzqPHO2hWRbiQm7paURUavt/ktVy3D54YlEm2bitRFYMiV6lKZU3B3elbXJ9FUKnfshFqKkmcEWeSstrZII11VFWhkJTKpmkSTjLawi0yHN4Shftoeogmt0rH0oijlGY/Z76UbXnsiAOBDHbiKMO0ctLymtMTsTeU2fdRtZA0rVBku0uwLHjwvsb5pUha1MW+zOyCJgOLOMpodQzyPKWuRLVO12zBgijnQFF08mzFoN9C03fkvWcYhgO1SiCThq6t4XUSnr4UtqFbj6jyGWlWrsmHomVOu+uB4jCfShC466Lr6rripmkORXVGko04P5Wtu/VDarVEN0wUVba7Bwq6VWFKNi/DtNBMC8NmPZ/nk3M+eHcHzwk4PRO24bm7pCXcnYkUDVQ1o91RWEm5jShX6DgaURzj2zLBGiYMmgZpLK5jWztERY2qVejckSsJlrWGVxAH4v2tAo1aEZlpYQcuq2WMZejs7Yq1dnr6kjSrMRydSIL4NwZbHN0/4NU3gijmT/5ij3/2T31efh7QkG35dpXSbje4Wkx4/L5ogX/2s8/53nd/ADLplNzckqk273ws9gHxrhKO7r3DJBDBwXR5QbPpUa4Srq5EAq032EE1VszmV5SyJT1PNBpYNHpi/4gmIxTPZ1XU6JJxsyhTBgOfk5en3IZS1qXW+Hd/9XP2ZVvp2eiEXqfBMrhmd1NUwV6e/w2tjo9aPSbOpbafElOTcvZCEIbd29tjuUrAbLIhGfD29huMpxXpfErXk3Y2VInSiM6GJBXJDJ4Pn1KlIVu7oj372csL+oMdbm5uuJQdk9t795jdvCaKhf+pixuevzxDtRrs7ogWvPlsha6kjE/PyWbic//4nz7k9NWM7370u8IWsz9DqUpYhcS5bPmNQopGB8OaMjwTz9Pv1Xz8qwOyQgRgaXjKYnLG1bnP/n0xx5PRGZPzIZ1en+5AdjZdfcJooXHvWDgTLcvYPXiXRusKJOnFPFCJ4hlZ5ZNKyvhFZuO6NolsezYB1ShQFJdaRh+a4hCnJXWlrn1enM0oC+Vb2vXlFBUdlBzHulu4ObphkSYFuSQNsyyLVZpgGzKpX4Nn12R5QltCPQpKyqJYs+vlWQmorIIIw7hrD9cwPEEc0enItu4yJ8/zdSBVVYIG3vPd/w9ZRkFVl1RlQV3LilAi2ntlQwFVXVPXNaapk93p/xkuaZoyn0UUkpqzqip0VaFakxiVMtFTrX/Pcj2qqhJnFzmKosD1v6Wen88DLEtHVVWCaCnvKcOWvai67qz/9h8abwkt3o634+14O96Ot+PteDvejrfj7Xg7/iOMX47KFSp+7w6kGeN1WqBrpLVswenmfHP2BXmcrjO0/c4mw5szms07ut8aBQ3bVtHv9JzqBY7jUeQ1k4XoU65UhdFQp90XUeegc0BZ1fhubx2ZL5YTlEohDsJ1GdJyGui6RU+qscdRThjNqVKP4aWIbu8fPMTtPyZKr2naoqXKNlwso0uUiUz2yemUnZ1NWp7Jai6eb/ug5nZ4wdG+j6+LCsneYJfQXmJLgeK6gHcOH3B6ekIkdXUark+nZ+L6Bsc7Iiv24ukzorziaFNkqYxYR9VrolXAK4nL+vjxHqWZsSlF0+a3I65uv6ChDVjeinsaDFq0zS6XL9+sxYYvz5/T7TVZrSQAvKVx/SbG9mZsyx7lKte4unqNY3sYtXin0+A1pu2ylNa2870jrp9fcXp5zvWFbHVrKERBQJgOKSVexdJ7dHyT0xuRJVvuKmiuQqtsEEiK3Em+ZDE7Zxnn1H3x3oNgiOPtsmWI7I+rFQR+zfV5wta+AN5+ffoZR80NWk2f8o5tc7yk6e0zHoo2lnhhMq3mmJqKdiszNEbNdRrhpsVaCLPZ3KWx7TG8FVlAw9MJqxVFabD77ofyexpVnqObBXkkbDgOFcz2ikUkMi1aS6XX2eEnP/tLBjviWrXiEq9ibKtiIjEJdrvFdHqNZYl5yscpx4fHvH51SrMp7C6vDG7nrxktY+JbkU1rtgqqKsPpScrxIuL6+hrHKXCbwu4mwZQoHgtwviayr4oz4mw4p78hMoNJvqRj2xRFQSIpXG3fI03GdGwXRYLOKbfY3YWnz0Xbo2X62JZNkmRMp8IWDTfgepQwsGWm121QxjHLKFhnpNJqgWMZKNoeibS9JFph2gVpFEt7HRAGCWGikeXib77svc61FF2+q0qJCZIcW9JmR3HGahXx+N1DLqR+nOltoCkWdQmz8Uz+Xk6r3aeQLQeGXvLw0QNev3mGKjXC6rqmSH1su0Em+0GC6JqsSlkFIo+1Ct6QpjV1rdJsiWe5eJ3Q6fncji7Wuhq6ntF0PEaSBv3+OxtcXbxhf2cXDZE5+/rzT/ngoyd8efEM1zPkPGygGSlzifFMxgF18lvoukkSC5vKCwUVg63dHf78z/4SgP/z//6/2D3Y5XYkMrSdzgDLMEEtyOoTMVeZTVk8RlFz7tKKRZETBLc86Iq1nkc6plVRFhZpJiqfimbgOxWOYa/1sXzHQDN0FEVmXpMZ7b5OFDsosmqjqDoVJWG0wG9IbbbeLlnYJpP4Kr/lkpcVYZBiu2LOyzIjz5r4lkWpRPLdKKxWIf2BuHadm+SZimEpFLIyrlsKuq6TpcIntZwdxqWN5mgo9R2YvGC1DPG6Mb2e8CUXMxXLNaikGKnt2EzHE9779T1WUlA7CBJ0qyavBE0yQJqURNklrbsuikmJ5QlaY0eSQsTLGVl1S1lXBDKbXZYhYFDJUpJWKmjaink0ZjoRGdm8LFBRqOocVVaXyyohLwM8T1Roo5VGUlUkaYkjsSJJkvDoiU8URhSlFEX3KhRNWet/FVlCHA2xLRVHF22ey8DC9CSJgtS5mYQF/Q2L81NZ5bD2cD2PvEzWWl+arjDYhKvLG3RNVOYURUNTVGSCmGCV0m4ZmE7Ej38iKo9+4wG1mpBFCkgfYKoa0+SEq0uJhc0OQRMCy5XEotQYBMspj+875PJ7RW2jaxpFIWm0dYs4yWm6FoG0BYwecZrj2MoaV7MIFuzt3+fLz4QkxxdfPefixe9xtHsPtysqiA3bIEsvibQSR3aO9A4G5NOa4/fE/vsv//hf870nv0XnxRsQklI8fvQOqa7T68gs+bzC79xDcWcML0Xl8fL6im7TYbd9zDfXoo2t49tkYUIhq6PHOzs8fTGmtdcgyYRPWC4igoXF9vYBVy8FYH88vmESLUjFZdgcHGFoJi9evWSVCl8dl2NenLso1TkDeV67d3hA0x9QlaKaVpJydn6NYfp4ptDHe/r8KYuw5ujoMYePhFzK559/KlrNZAvw6YuvKHL4le/+fc5Phd/Y3tymLmNaToPRjfCLzz77a1axgizQsDFoUpQJ2SpmMpcVEgtOLk4ZtHTsvugmOb1JSK0Fs0zY2Ac/+JivPvkxTl+llpgk1/cp9YCN7Xv8F/feBeD52SuSbIvpjTgTVIqJ3eugtwpWoZi7dx8e8+Unz9DylHwu3vGz5zccPv4BeSie5eTpV4TziF7TJZjItmDbY3urzeuTl7z3/V8FwBm0yaMQX0IV8hKm6Yo6jFFkJ9d8Pse2bXTdxJJta05jwNnZKbNXosS3vb3Nwd5DbEcjuqNCtwyiMCPJIwypn5qnJYpakZV3uoU1pqXgmB5hIvaeGg3qck1oYVkW89kS0zTXVak4DsmLkjiO11gmx3EwTfPbileeY+kmWZat2wXv6NnLIidJEnkPGq5roevfVpeqqkLTjDUdfZIk6IaGadnrFvUkSbAsay1ibJouUAlcmaxmBaGgkU+TbytqiqKQJ/m6cq1pOvP5nDiO2dvbWz+LYQgc1lrzUP0P16Z+SYIrmM2EEzOsnKJKKVOfLJeAtiik2amp6wpVMsKhJmxubxPK3ua8iPDcLvsH95lIQoJa19nc2+HTn39KUouF4LtHnL654YF0bMFkRV0puK6HKfFb7WaTzc1NLk7P0Upx6Ov3HG5vJ3SluCvNiiRJSFKdD98RQHFH81FZMmhtUUuGFtvw0dSKl6/Fgrq/d4jXrHHUYs1SYykKRqHy6PEBq5kUmGxkmHWCKw3MbiREwTXtjoK2Eg53uohZzGwU3eB6Ilj+vI6C2tiglKB3xVSokhWVZ/NIstu9fnEGBkzmYp6KtGC32yGLDBTJppNEJtuHB8S5zkgCNy1XY7JakEttjPhqTsQMt3RYjoQDRtOJ4hTbtQhCKcB4ajLYrnl9KxxN9M01Spzx6J1jzmdSj8dv0W42sVWDurpje7xkfJPgy5J5Hs65LEpUPaYh9XGSpYkRTMhVm2AsFr/iV8xmN2zviMPPzThiFqwYLpY8FD6Lm5tLHhwd0VAcPv2hYG07erjP+dUrLMlcmeQrctPAb/jMRhKToBs8PDwkqnVmF6KdZ8Pf4+bsdI3LKpSCr148I5is+P6uwKcN0ETX9QAAIABJREFUT8f4TY8wvFm30ikNH1/TqKVm0J+9+JyH93YxHJtVKBm+uMV0VQo8ElUy5VQrzHa9LpFnGLw6e4Nje6TJHag+YlzqRFmE54mEQJqPME2XC7k5V2VKr+mS5AqOJUVah0t2792nzuFail6aXkWW9ZlMhR24TbA9i9UyIJQYRFU32N3ZYHq7wmuLezgZn7B4cYPXFvPZaG/gOw1UVaPhC7teJDfsH3U4l+x9vqNSpQWlAouFJIV5dEBdmtRKtAb67u9t8OrkFzhSHLjR8NA0BdtvEwbiXc3mEtMUlRgSq2kbHp7X4YuvBCZB0zS2d9/l2YshvQ1hHBdXc0zTpChTTBmgdwe7PH92wuaWOHCi1ozGV/i+z0yKCd5c5OzuHBCWN3iuTPAEPns7e1xdS8KHI5Pnz09JIwXLFc55b/8dep19fvjX/45uX7xjRXFYrGJKiUn48ssJraZHkcdrtrnN/i6f/OwX9Pv7aJK16dXrLzi+d0gpe/9t9QlltUmzZ1AUwt1rlkkQLsnSkP/lf/qXADR7DdBsplKbpt1SKSkJFil90f1CVZsoahNVjVGl/l+elSTZDNcR/18sTZIkQVE9HMlmuZwuaTgOG83HvJTYpbquqYgp5ZqJk5A4DIkTbc3IWpYlNSUVHpUibC/Khrx8OsTzxSEtilcYhk0UlvS3JJOjVlAVHrVbkUvNHNdrk8ZTPvwNQTrz5WcvUfgA3VDXgTCozOY37PeFbZZZgwoTUCgkoF6tFGolob/VYHglN2O9piizdVtgVYqWPMtLGE2lXo5pUqsqmpZTSyY3VVvR67WZ3MoEhdNC1yqKGgopSBxGU9xehG60SGS7a9s1SauMUrbRWIZOGF9wMx5R54I4xbIc6nyG5UCaiD0lTqf4jsflpSQxqHM67R6zJMaWjFumHrGKXxNnNYYproWhoqCgSQGZ5Tyh0awZ9PsQiYTLEh1Vq8hSBXmWAqXGb2ZMRuL3HMfF0GoURQgxA3i2TRBeo2gKliv2VlWv0TOT6k7JdVGwtdWgTBOKpCE/o5GmMZbRWmPmsjTlO+9uYErMTppbGIIYkKwSdlBXBaqWs90zmS7EvJSKilLn6HdYN1WliHPcjYLutkjKvLooKZWaJErXwWNepSjo/NZvCQKNP/2zF/z4by75b/7bX+fNuYhStgf3mM8s4nDKq1vhA7Z7bebhkh998kMAfvudB5SE5Oa3ujmrVUnGgrlMIrbdLkqa0/OOUI9E21ySv8QrGpSk9LbEIp0vJ6ySiGZLnC0ytcPGno3bga8+fwrA5tYDosmUXAmZTIXfnZ0NUbyaTksEFXoxIA1LHt/7O3zx9I/E6yxt2naLvV2LQJ7Nnj8NeXjkEy0l+Um1QtMzirBkNhdngmBW8eGHT3jz+pI/+ld/LOy6moOu8nBP4GM3ey5npwnTxRWm9J23N5cs5yvirQ0cU/ytYWhk+ZAkEf9vNe9xc/uG2XSE70nc7niB3zzgcFfhp6/Opc0W9Dcb3J6Iva/eaNNubVIaKpvbws5W44pG06HZ6sA6KROjax4PHoqg7NPPzxlsuth2m+tz4ctGozGPv3uIZXRZSrjC0YOc68uXuPfE/vSDX3+fr59/TZ4OyCRLWppURG4XXy958dn/I0yv4eGoKivZSm82GtSeg5ZVuE0JV7jfY3Qb02j564RAFJS0m3t87z1x3ijrFZdXM5pVgziWicSkpip1sqpmfCuC1V5nD92OUWQTW5pF3I6uIHPZ2pHtfZVJGkfr1rooClEUlaKANJfQC9tGVS1UxVoHHXWpEwUlRSF8WVVVUNegltxpRd3hHOtaQ/JeoGkqaZojqQxQFEjTFEWt1615VVVQlilFUaAoMmFW11iWSrstzjJJksiAKCeVZEOOpeG7PooqEmUgru36BqYUOy6Kgja7aJpGJVlGoyhgsZjRaDSwZEvhnd/528YvRXBVVyXzsTh4O67Bqxc39Pt97j8SbDplovLNp5+zf28D0xRO5OZ2zPaOz+1EOFJdVxmOR+S1w138VelNXp1/Q29zkywVE6crJv/o9/bxLQkwZ0iv2yaOI1Yr4Qy2N7dQiej1PDYGIgM1Xt7wweOP10Hg1q6LUtpcX97gyn7nXr/D5dWK4dUrqkoY3fH+A7pdA1+C7lzbo2nsQRms6T6DSYRrugSTgFtJhb67u0/H7fHiuSAH2Np30FSL6+sbTFndOjzc4XYyRiFGTh+GklO7fRYLybSmq9iuw/Uy4v6RCCj1zYTx8JqBIw4jhxsHnJ2c095U0UrxLIU25SfPfsje4F0qGZhOsxTX3EdVxaG72+8SFjnjskFTzud8ccH29i6jNGbQE5tAIzKZzVNaEsjJMGLrwWNGkxPcjlhRnd4RxfVrxsUS35XCzYNjXt1+jVKLF3qw/4js1Y8JF5d8NRXv4ftP3uPRwa8RLCqGQ+E4j5oPuOAZuSQDmGRz+g0H0kgEN8C7x+9ye3FBbroc7osDl9OyiMqMRSV+rzPwKFY5ambgb4kDwqIqOJlcY6xCChmIPrs8ZXt7k1QKFjbiBI2I3v1D5osTACyzQncNtLyP5ghn8MlXX7Dlb9Fs3wG3C77+7Cu2NndZFcJxmw2LJCyIkhjTkery4xFO08X3REXx1fWCvLhgx21RZFLgzrVISw/NHDJbSKpizyNc6UQL4SQ1RSH1Klyny0ySXMTJnPFtTZZEbMoK1yrU0d1qjV/ptB4SRRHUOg2ZkGiZDuEsQbMqrm/EOrqZhXx09CvYvniWVRKgpQnjyYK6Futq6+A7fPLzP1sLsDrNCltXWIYBe3vi+SaTDMvSCKIAqyOcZLRK2Ox+SCUDcceHy+spXjlFcsfw4uRE2s0OYSIrpFXAycmcVBesVXncwY1vMPyAc0mSMpqc8fEH/zlRMhL4AaDIY2o94GoiAup+d5fprKYsQTfEOjKcc5arEbsHHsuFZCPNNJ6/OKUhA8402scyBkTKK7JErMfz01ueP/2SdnOPpfQvcQCuVTE4FPM7n9VMp2PUpkO/IyoPNR22+i5ZNePyWlTd3rn/MYv4BbYmbDoL9ultOHR7LQQdA6yWCXuHB/xv/+v/zL174gD9i89X2J7OdCYC2sHKpalZzBdXHD4WSRnTGTAaRehWhaKItbyYR3Q7DvcfibX+V69K8sygUhdrnGsYhvz+P5lQVOfksdj4LD+iqnTSO7pty+X4nsFnP1epJT61KAs0xSfLMmr1boP26HbeYS7nVzcVajKKokKVh+PJeAnVDnkRrkHLYRgTxzF5eYc/GhAqmmS0EvdZJQquZ/DokegC+MXP3uA5D8mKFOMOs1uUNFowXT4jTY4AyWarh+u1F4Yhth2TZnPClbDPuigoixrTMshTCZb2MnrdXZYz4avTIENRBTuXKg8fi+WU9391QBiGUAvfrNZArWJKdqtwPsf1Fd578h0++Qux4etKgKGWlKVFEok56HZ1/IaKWos9VNdsaiWg1WqgSICT3zBpd2oWsxLLkJkoXSfLQyxNBiNFwtHRDtsbOk8/Ef7bchxqNcE0bOw7Fj5lSpqPSFbCXhuNFhUBaqWvGQxVxeKddx7z9VdPUSR5jGWpZKVNJrFwVBGuo7GchBSpZBTTSurSRdU0CqnAmsUrfN9BkeyyYVjiuTlUKoXMzEergGbL5cMn9/i3fyKqEXlZYOigyOA4K1KCZUjjISxl58pw4qApJXWhkklstqobJGnIP/sDgYtejiP+7d/8OU9+8pDdgbj3n/zoC7xewTJJuOMWm6UuSbgkjYVtHHdaKEVCPP8Ww9HodFmmEdErsadtHmygNWw+ff4Jg55IlvV7PcBHqW3Cc0mp3rAgV+m5dxjhFZoN1DaqLtZerTT54MNjvnr+lEFL2Po73/uYo+/0ePVaBGCz2wVm4bHtqiwawkf83j/4fdKk4uT1iM09ic06/4LXp8/Z3RX+Jl5dUiYtdrafcHku2G0fHRxz9nxFlhS8/1AkPDVlm1U640gyLy5XJpoxY3uvx+VrcZg53twg7fv86Be/oLcpfq+IC5qKjmbIdWV28QcJszBkPhPramPgc31zxecrA1tKRuhbXfS8Rpc40Ks3l2gti65Z0LDFXpRuj1GTFcPrgEImtTc3D5lcv6KQiXDXd6BuMZ9PaXbEtdXa5cHxewyHCx4+EkHY+UVJZ8NYS0YEic9H3/ltXr96ztG7otqbFCGqVjG+vqAv32kYBnQGXTJJFGWXNbOrKfsDm3Aq1uhiNKOoDJ6evaDV3pHPvE3JiF98IZLVijGnKmyKqo8iyTHStMC1e/T6LVz/zpd4OK5BLplBK2oqcvI65fxWJAiuzlL+0e//Q4pC0vSrGqqqE4YhtXpHApOSJzlpGq+TD7qlohvGupNFqTU0TaOsq3WQJKpAKmmSo0m2zrquyfMURVabi6KgLEsMw2IpWTE9ad+ObVBVdxTuQvh4NhVzl+UJdV2CUqLIa6uFiaLk5GWBK8+ZpiECQkN2TDiWQ1YkpGm8DihNU8dx+sRxSi79fBxLAOjfMt5irt6Ot+PteDvejrfj7Xg73o634+14O/4jjF+KypWq6iBFRB2rycamQ1nmDGUZvVQUtg638XQXzxHRrFp6zG8DdrcktkjzuSgvGd4+R9NElajIYxoNl0G/h6aJzEBRzCmSGFsVWT9NLfBcjYbbp2GLjF4QDAmWC9JYJ1ZFRn+j4cJ8xZ4nMhPpJMc1bXbNAednAs912Gmjpiq9voUrqZBvzq6x8TneFpH2m9cjdjeajCYZhRTVLcIK39dYLWOoRTT85tUz7h3vsH0knsX3euRZTae3gabfZQZsNnc3mM9WBJLuLVdM1CJhd0v8/vVFQnf7kHp+w7NPRXZrZ7PL43t9slDiV7IF3R2DslQJIlHZyWvRrpikM2xH6mqNFcbL5xzdE1nqrGxiGgOstk8lqTz7uwNyHdBcvnkjMoP37+9yenlFlIiKV6LWhNEcpYTtTTHnSR6Qdnz27R7LlcieR4sATBNFikK+fPYJdTKh427hmiIb2rYecnZ9yv3jeyxXoj/2ZjhlY2eLN89EFs7xbOxGl4NSZSkzWfX0loWpQqUzqe+EqE28boPxlfh/NKnQrIKSDFUR9+klK2wzwutucilbKlRVRa1aVIiMW2JWbB5+yDLReH4h6HePDnvc3AZ4uk8ylLpBikahBgxlx2HD9lF9lZgJkWTcs5UmWTZDyTQiSd1t2gNWw5Dmhpyn+jktwyeLU9o7Yl5ub0/I4wuiMEWTORQtP6dIenR7nrSpDrUS8ebkNb2WFFd0LXxVYVXDtSyHWlYTmzbXSzGfyyhELXRsw0S1RGZuFce49oDpfI4jWSH31R5eR+fkXGTTmp1DhukE3e+g2KI68dnX/4Y40/GdO9HtI4x2xnQSMywC+bwL0sqm07m3brsIkwmHR9/j7I3M+g9L9nY2ub6ZUss20u19MRfj6Q2pzG4rlBRVjKuLexyvKsy+TrPdJp6Laz3a/U1urk9RrBGrpcjMzYpT0qRmc1MIx9qWR14GXJ+vePRQZCJtPSJLNV69GhJK9tNf/fgHuFOFHFHxqgqV0e0VrY6/pqxVKo+itrm/NWD1TPi8fk8hzAvU6kja2BmmpfLy9S3JtsjMf/TkXZavvqDTN7i9le2EWsCgd8z1qfAbx8fHeC0T23XIJLvR7sMtPvnZT/n6i+f8/d8T7FmLYMF0EbCQ9xQlNXYUoisZT74j1tVP/1JBKWxCLaAsxH1GecFht4uEgTKaL3H8BnpRUWSy/c3J+J3v/Cf88b//KaXsx6+sAlKNQrYXeW6EoT6gzKbkEt9kVTroJctlyMa2+FxKwDLMUCT2rax8UKdkUUyrIWwozmwKdNI8xjRlS9yixPGma3mB0+cJZa2gFBaaxKdEcUGnXdBoyyyn2qPMYhRVodJlBhOX2WpIOZuhRk/EQ5cRZaFQSWptSpvF9DM0zcQ2BeYjTA1sNyfJM0zJzFWES+JgTF1KimVSqsKiNjWyQmRHdT1itZgznbSxZJtlho6mZNw1kU2XEbEy4+rCQkFiX+uUum5SFQlRLLA2x4dNWl5zrV9TqkIuQdEKTOVbKu9Wd5t05VJnIrNbEqJpOrUScze6mxbLoCLNJBOgDdRCby6XWMnaXnHv4JDRqWxDtDP0xCLXElRVUtQnP+HF059wOe2jW5LVNM1wFItc0uYXdcbH9x+gtebMpSC6ZlSoVU5QZTQccf3J7ZDrUYWiiOtUZU5VifbeWtpLnBQc7Zl8/tnPGC7EPuZaPlm1WAvA6mZJWRf0tz0m52LdFskDIkOhaRYoksqeykWrK4yuuM+/+zsf8eNP/g/+9b/5a/6H/17oQF1kJ1jNDXatiJVsU44LlY7doD0Qx69XwzH9zR10/VvdnMq0SEaX3DsULYeXi3PGZUy7Y5AmsjNg0sUoJ6hOjwfvChbV6fw5jqdxeSMZEw0YnwRsH3XYHojnnSyfM1l8wIPdD9hpS6ZK0yGu++z27jQYJwzsB4yWF/w9yeh3NZ7i6AWebRFEYsFvtr+LacbkuXgvw2FMv7tPw59yx9z98sUIbJfvHt1fMw//6LPnbG1s8fy1YOXb2t5AS2F6ek4shZtfPjuhtWGwt7mxfn8RHr37ewQSA3U5CdCNGr2OcTuS1l11aLW6mE7M068FzvTRewOW5TWN1oG0V50ymaKVBufXYn04GyqzOKBMK1xFnPMKo4HlW+iO8EmNpES3YjZ794gDwRJ5dTXl558r9Ld2GN5KWZ5mmxcXp2x0xJzrScbiaswH33lCWIj9vxwrhGHN1vEWKsKGd7q7GA7U+7JrY3xBM4948WzKxrbENBJjNTy2OgqT14KN+NXJz1CaXfbfFf7t5c/OcW2TMsmIVbGO3333mGUVE0c+0Z2kwXRGc3fA8FTYS7fncbh9n8tTD1MV+/9v/Gab2TylkNpiimEQFRM6loMhzyTzZUy/rWGZHqr0L/NwyTwKcVThI/zOJqYSk+YzbifCptqmg9K0MZIETbYmF5qBWYEhmYDdno2madRKSCK7g5IIdLckLmJWcyk/4Qr9L02XEi6eQRKrNJoempTXudPcUjVl/W+B0aqF+DsQRhGGqZNlGaX0w5pWye4IITkB0HEkROhvGb8cwZWiU+fi4Zt+h/M3U7Z3DghiicNoNumYW5imTqsp9bBsnQ/f/w1ySX5wcXpD1y7Y7nZZSBKI/b0dkmjBbDxhMBCH416nx+hmRLIShrLpd6miGL9pUmViITZsj7Aq0RVjvdHGWcbh7han8jCnOxkKJYah0pGHnfFyyaBvEiQuyVJcv9V0SKKChmzBa3earMIxWVHRaUtsQaZiWBWt5hY38qRdqCOG81uaXbFZng0nNDwf1fIoZdvacBbSGejYbZeW7DM1jQbXN2fcToVRNDct5kuNRZDjSrzY7UxFdTXmklK93dgmiJYUVYAuW9YcuwTDIc8i8uLO6HPanSaxBGSf3/4cTa/Y6fVYjoXBVUuV2qxJ4gsMCXr/7PNzTM/CkkDgmpRl+pxB+4g4FteaTa5wXI8Sn0TqeE3LFXvbjzkfixYuQ4voDjYoY4cPHot5Gb2esLHRZjEq0WvhtCbLIV3ngNZAtiGWTZaLBR27hX5HtkCHgQtuaZI4Usi00WVx8yWm7CsLZit2NgYodYOb8Z346IzZQkEdNDCkDput2lBkBBIgXakKt9cRSZ4xkGK1btQiiCOW6YRWS7yHR4+OWMwjVKTuhaViWDpxOaIqxOal6w3CpEbRTAxPirLmNbpZrAGm25sP0fKcrIi4uBQOP8syDra6gg5V6rKMbjNQA2xXbByqM8dzdni3dY/Tl6Idtb83wNBrbpYz7sATmwfbjGdTtgZHAKRRiOJUbGw3ODuRQtHuQ/Iix7FLctkDbVldqJfr1oxlMMI2d5iHX1CXUkMnKrBNCMYSh5YGGO4S02hwOxWBqV20afkZZR1wfX0t5+57vHz+mlgGDAftfd68eUF3sMGnnwmShqNj0fKSpQqLmVjblq2yWiYcPxCU53XxkourFQ/dY4parO3ZMqdSl4SLgpYMFB8d/zafff4Jtt6X3wNVTdje6XN6JQSfe+1d2q0+jtcjScS8XF4/IytH7Ek64ygZ02z06fUSVETCZT7LaTee8M2XL/A9sTEM+j7lqGA+Emu0qDIe3/+ADTfgnceiTefNq6/4znvf56uXr3n3+A8A2DioGd006ErNnMFGE8t1RBuGL2x/a3vAf/1f/lfs7OwwGgs7MwyLLJ3RkP51sbqmSjfY36lBtuRpaoOsSjFrd631cTU/p3kcMR5J26x3qHKbqlhQI96NpTb50dd/ynCqYtxtcnGKrlhEsQjmPDvj9clPCYN9TEnhnGcZhl6RJymGLg4tjtmjLmp02R+fs6IubHQ9J4yFbZTFYxR9hWn0qOSBAKUkSmZcXd0F2Ttyc60EQQegohOGM8JQhC1VaYBeAdoaRxAFKd2BTm21iGbC7+u6TlytsCzZ6hKrNFs28QrSSDyvohYChG06hAsZCBshRVUzHwvf3fI9FC1H01USqSPU7OgoKvjWI6ZSSNlxDIIww7EVaVNTZskt4XJ/rZdTqRXBaoLX9Enkfng7foV1toGKsJ+6StHVJrWSEIXinrLqmuubgDJ+glrdCSebFEWObsg2xPgSx2nz6uU5hi7awVRVpUJFNYU2F4BSBcxnJWUt7LwsSzTFwjRNoqW4dqOhoWGj110UGQhXmWjfzxKxZnWzRrEjsjwnl62CtVqhVTqaZZJE4m8Nr+CdRw94+kxcu6wgTjLKMkKRydQ4XlLVNYrusJJ4bfQaQ9NJJIY1j2q0OqfKUhqeDG4uh2zdd4jjBN+X5FtxgN8omU8EdlnR9tnd3ebZ11/zr/538Zk/+Mff5eTiksU0xZI4U7fZpt9uEEyFTR8feqSrCRbfHtbGYUSll5ycCzzw2Tihv9Hh6MAkkWKuZaLR2S65nb3G9gV2KV/VoBagijkwXJ/j9+9jmQoNSTjid1xsS+zHI3kwnY5H9LZ9PCndc3qzIrdeoDUMRufi9xZJSu/oMb0tuL4S7yYOxmzvWJCIoMWscva2HrAKb2jIpN47Bwck5HhliSVxLg/3GsCIQB6WveKQ9lab2fUNDbmvaoMuttOjP0j49MsT8XxWQZ7UmKqAjJjWM372i5f84Pi7TJZSAHk5Ym93ly9fXhDE4t6vb4fMJxFxZyhneEqSZBwbv0arI/b28aJm0DlA1xQ0KUlht12WkwuildgblFaX6RwuJs/pyv145/g9kqSm6RjMrsXvuX6Llq0yG4sk1Fw38JsVy/MrNO4IkNr0jzXSCxdVZkrCxTWrmwRlINZsOooJb+e889HH6/NUUZjUqoWvprhSNuLF7JSDto0ig+qtdhNNdYiXr1iaYn1c3yiYegVhyPRa2F6r76PHCS2JH7r/8AEXV39BZ2sfR9p5kerkSUqtCP+mYWIpBrNZiCOFonXF4nwU4vohRSB8UKu9xYY9o5S4sDqZotlNlNwjluLOnpESTHV80ySTe0EWmmhWQWZJivW0wjQcwrlGoyX8vunHaEqHioCeFIaOkwWKohFLf1DmBb5nQKUSyRY+3bBQVcjzak1gUZalINHR7s6GClGYoygGvi9ZdagEPbxSr4OyXBIh/W3jlyK4MnSddx4IIKWixPzD3/1dzk5GHGyJA5jf9Dg5OaPf6GBJNeatjQ3GlzOO7onDzqKlUyk+eTnl/juir3c2Sjja3aLlFhSFcAZK1cLSdPyWePTlPEHXmtiGh+9KUGh4gdty6Pc3BLYEQfowXSToUvdG0Ss2d/cZXr1BM4WFLxIwqpzheCZAkYBlK2hmhxvJ+rV33OLrp0MMw+DlG5ExGXQ3KeKQm+mS6Uw4n07PoMgUhpfigFkVEavcwms2qGT/+CKZc3N2y/b2NtOlMLAsHeI4Dq2eDJJclZcvzvCaJpkEHxqNilejS/KVOGSbrRWVF6KXTUYj4Xw2NntMZisUY7RmaHM9C9vuMLwWjqa/qRIGMJsGWLo4aGi6jmKV1HWCItmlWk4DVW9jyExkGIb0t3Z582zM8YGoLrhhRB7FDJcLTGRWY7tNms/Zl7pl0/k5O5tHmJXDbCrmLtVDnp+9pO01SVTxPIPBgHl5S6MrwdY3p9iaSmFoaLl8f06b7uEW1z/7KbYEA9+ef8Ph9mPCWCrERwpZmVHnC2yZufJdgzDXCfOQZl+qd0c1QTSirqSAnwlxFGK6Lgu5xII6ZRJMONze/lbsONfp2h0mU7k5pwqGtUuuZmz2hP3M5kP6G03SlUm4FMmG0tLY2tzl5FJklnRLxTNUOr1NMqkb1mq7mJpJnCdsbUjiFLVFzCWqPOCip5xcXHB88F02JLvkxeUbjo8PKUqLva1jafslShVSlBK/EhfMlxdUxh61BGGn6oTBVo86b1GqYmNQqpAkzun3RV/41y+/JNYzOp0uz7/+DICtvccc7h7S8oWNBbOUKj8kTTO0Wjxfv+mxChWKVCjYA3z6yc/xWya+JzKDb15dkdZXTF8Nuf9I4jJvTgBYzSoanrgH1Zgzmww5eyXs3GqA46nM5jGdO4zZak6eWbz/+DtcXggs0+X1a47uNykKMb83wyWz2Ywn738MUrRUVSzqSqPIy7UW1cnJgv3dd2g3hW28+NEI17EYdHaYyx76ph9QMeJwfwNLEmikUQp6zoHUnVlOC2bjEhQFTRHPXDPhfPyMsk7pSEzgzbXG1YnNwVFT2qJJr+ugVG0Opa7dv/gX/yOT2ZT+YJOJZCy9vrkVrEkyGFnOdRQnwvFgeCnuczROsd2KJMnwm/IgXCzwWiWGLg7sSZ5iuTqKoq1Z+Maj53xzEhInD1E14YMooAZyyY7Y21VQ8wZFYaBIEDGVSp2m1FXGwb54npevQ5RaX+tcVUqKrnYI41NaLbFf3FyVKHWTLEmxJItiXYT0NzwcKaQaLAQ4OklXeDKqf50VAAAgAElEQVT4n48SvH4FtVizRW5SagV5UWDK7O9sMubwvs3GwUf88FTqTNkKRQG2Kzbi4WzOb/7dfRqtnKdfieft9hsksQjS0kSs/6P7DRQ1QdXEs+mmRpzPsTSXJBL3YDgpWVoSLHNsGdxkRYhpWiSZJNkwE379o48Ipp11EJiXJaZrUFYKhmQV7A8sdM2jlGB5w6ypKtD1cu0TauY8OP4VLr7eW7NullWFYVnEoXhew45JUg3fPiCRuFZN1ykyUIyKWOoL9juwmhVYtmT0RaGgRlUhS8Rh5zvvP2BrM+L0WqO4yxKjkRQxtaxgKoqC24KffXpBJfFiqqaQ5RlGKYTBAd47GBAGY64u5LHGjKnqgqLIcd07ltEQtIogNgljMS9+s6QsWYskF6VKVcxpuhYnb8S+jaqQRjMc1aeSh8zJ5IZ/8tsf8uRd8ft//jcXPPno+/zVX/wJf/rvRUXmP/tPf4dG94Js5hKkc2nWDaqOyemt8IGHWwcYaIwnYk8DqOMpozwja4ok4qNuzfHuJi9fP8WT1Z++C5ObDMM6YpUIO7+avOLe7j22uyK5sojG1D4sgxWlfMCbqzG+FZIXAabdknbmkqwmuLmw4a6xyXw4xtIW6+SKWgT8+Ec/pOft4O+LvTZBJ1fadLuyImRYXJ++xNIbvHf8nrDXMsDT91CrFXEqfOz2nstomPD4fXGfL158g++WJKnCltQbVNSEWZ2hVBpuV7zTvmMxvz3HkWvdqTf4Ow89SncHfy7eZ5pfcDW5RI1z7u0dAfDVsy9QdYOF1P98dHREsDpjOXnGzZmw61XlsvVRi7Pzl9gS1z5/vaBv+cwXInF7eP8BF8NX+M0GKuL3nj1/zuPHH/Pizafs9cUze3qNo/hM5Z6tOTV6ZwdNdbmdiH2t1/e4Og1peTrDE3EGMEyPyodmIv1UlDP44Dfob+0ThiJQm1y9wje7+L5LZoh7391t4FrLtRjwYnlFe/8JB/d/F98WPv7551/gNe/hOTX6nUZmOWd2fkPbEXMeDl/QocvF+YT2fbFfNJsRYa3y8y9ElWz/8IA6z9GMLpVk7zPUG/x6wPHRPj/+q0+EEdsmBxsOheQkKAKV2/mKjU4XsxZ2l0UhVRWhlBWeJz7nNUvquEsik163wxss3aDb7TIc3wXHFXUxQa98conpNkyNutZwXIndtFWK0iIMY/zOXaW8oKwqirxEQshRVZ2qqtYsg6qqUxQZjuOsK6Z1XaOq+jqhLcb/DwgtoFpHt/1Bk15fZzKJaElRWNuqeXi/yeXrIZ2GOLSodUIULTg5kzTdQYBl+2x191FlFF7UU37+6Te4VmPNsNXtpuQ1FBIIr3sGcbxicnXL/pFkiascRtNLCtVnuRIGfXC4w3g4xpWHSb/R4fMvLtna/JY6OEjOoUyIlYAkFlmUrrXNfPWa3kAcls+nCeaGwvj89VrcsbKW5HlJu9uhsykW9XR5jUaPXBMH/VUwxrFbdLbfZz4T3+tt9bgZhbw5HdHwjfU8ZBOFdl8YQRb7aJrCdv+In/5UkGPYviiZNgfiWV5c/px+v4NJE92TGZNqznJ5TZkpuDKTPB2GWLWFLwONaO5i2gbDq0scyf7UHfgYdsZ4cs3eQATM3fY2T19/w0ZfzIGqVeS5zfZRj+n8BIDNzh6YMArPsXKxgcZlRLerM5V07QdHj1isxqThazpN4ZT7HYdqXtBtdXkjA1hb81ncTlFdsYGrlUa7bZG6GkEsrtVt3ifJM/xeg3konE+nbXB2/RmBFAfd7ne4HkZs9Htc3oogN7X79Lsa09WKTC5+pSzwVRffFc83np5RK7B97yHxrfhMls14uHPAbHhOX9JWZ5mDaUO7I9kXSwXNrsmnFYokO3EqG0cfoPgpvmxpODk/4TSc0dkUh+xKmWPqmhCiNWVrVDwn1toEgcrJyZcAvPveAxpeh6vLOxKBDWw3ZbR6tWZDs1sesyjC8DSuJuKAMBtPaLfbxKmwxTBccXD4LqvVhKwQ3/O9bS6GX9CwjlAlxentzRDPbmC4Yg72tz9iFd5SJj7ffV+0zbw8O8N54KBL9flo9YZ4NUVTOgwkWDcOQ/KiQ7s9YCKZh7Y2Xbb2LX76c2HTDXefInJptdvYhpjPli/s9uGjA2YT2cYWNnnv0d/j5lI8WxpYvPf4N8hZ8vmXPwbgex/+XT779Avu7b+LI8G5l1dv8Lw2c8mAtbHh8vjxYy4uLtbtDDkzLka/4NG93yQIRBWl6fVZTKJ1MuDw4CFRekW0ytdsiJalEWWX7G1+gC4Pbrqa4nb2GErRXdsJsB0NXbMYSTbUsLxidL3CdV1uZWVuMfkAv7mDIitLtWJj6S7tRoM3L0VV8w//8A/Z3TtgGQaUEsSPouJ5LlUus/fVLatlE01vrGnzs8SnIMW0Y8JAAqKVnCwrOHkpxYGtJkk2Q6k0JDM5uwcWB0eP+PFJiS6DFEgo1IQsu6O2r1Fzj6zQsCUzWJEX1JWKaeT02uL33lQmVVajWbJSXigE0ZLtjQF7e+LA9+Z5hOVXFFmBKRMei+WE7//aAeNb0QalsEWahRi2ShSKvUdTVHZ3W7x5fSXfwRNZCfqWmdM0DabTS8LUAFW0/P2/7L3HkyTbleb3c+0eWmZEalHqaQHxALSanm5Md5PTTTMu+C/MjkbjgkZuuZwV/wMuuKPRhhyz6eZwWqAJoAE8iKdQ9UpXVmoROsK15uLeTPQGXM0Ci3KzWlRYhof7veeee+453/m+rMixzTpRKGFzZUgQTkhSHdMUazTNSlTNoihTVq6Yq5V/BOPuLalHho6qWCiKRpyKv3FaHstFhKKV3ChtFzlkeYkl4S9JNsMw66hl67aLukgzlFKVhy3xft/55kc8fOSRZL+hHL6Bu4SycqUZAZMrjzjRQbsR+izJ0fE86ZOcnJU7Jk8HZLkmzafEMAwM28GVcN6NYY3LCwskk6VmahR5hm2aJIkccxTGY58y72NKevYoDslVAbUE0BUhtuqu1NuDL0BRppSaSprdyKrYkGcUudg/SkriLCaOw9sgSSFga7OLVjjYEk1SEoOiops38PeIvYMB2+sNfvgjQSiRFjZxaKBpCoUpGVJV8FZnvHol3jfXq7S7XQZrXc7OhK/8D3/9Jd//yy4vTl6RSGbMNDyhYdvc3RPV7Pn8nNCfU9yopgLUFIwi4/RI+K2NDz9kFl1Tagm6hKyFccl0ekpzfYPLa2EvzU6bpPDx5CEiTDNmyxfc2z8gk3BGw9LJyXGqDaqtvrSXFYWn0OyIoFdbVXDqLcZXD/FLEWR//Mm/YWevxYuHn/L4oWA6bLXXWM3O8UrxnFW7it3qsNbfRDPFM4yOr3jx7Jo/+7M/43ou1lbor/BdFwMRI7z/9jvMpxe4hk8pE9iNeoPR5TPi2QabAyFp0rdMdO3sFno+Po/5+OP3+fLXf0/gi2df31lnHijsvVPn0WMBSd9YX2dto8d8LNba/t6QTjVhdjnnrXd/H4BpOkHF4e7+fc5PhP9u1raIvHOabXHv84sjum2dyeSUhi2QEdVKBde7QFUcApnEP3p4RrvWwpLg3SSIMEuHxSzgYFf4jV989Rl9y2S3fx9FJuNWgc4qWGBIZs57d3d57b7ky6+P0GSMp5UJk5NjmsMuFVv4hLV+k/FsRSATwxsHW4yuznGGm5iy3eWb3/keceLhll0+llB273zEevMOxGJPcb1LFKWLU53xkx98Jm2jxvV4xIffFQfHydExg0GFx09ecu9tKeswmZDmY5Jgzt1dEZupVsLk8JioKmJaO9WYzDzcWsA8Eb7ECFL6jR7Vgzq+J5L2RVjHU8CQJHSaXnBx/ZQvHl+w8sRn3/zmv2RnSydZZdTqEnqpq0TxElfS+09mC+xqilK0Wa2kTI5iyoPSb9j+NE2TByop11CWOHaNKIrIZXwTJyFxHKLrKnkh/u6GAv63XW8ILd5cb64315vrzfXmenO9ud5cb64315vrP8P1u1G5UkoqdZnVSRPSPKFah0w2y11dj3GqBb2tHi9PROaj022gqRmVqsy8FBZR5GHrTR5LLYN2q8bOwR38Vcp2RzQItrstnj15zkr2ROiOhmLVsR0FLxYn0YV3iWpZBNmKaktAGtIip1AiAklZaeUFVlVjulpgSL0a3c45O7+kVumyuyuw6JbV5/J8wflEPPe9B0M++/yaPM3RJM42NX1qtV1Mx2Es+4vmiym6sWD3QDz3aHSNogQs3QuyTMJPxnOiaMZiprPyRPnZtm3a/dYt9GO2uKZVH3JxccXmusCPL9wjsqBGbInM0traFt4yQ29EN0lG5sucYW8bxzZuKXJ1VWU29dAkBFC3M4LQp9aqoMqsXK835PXFK3a230cvxAtOZlPaPYtC4uV3tu9xfDrFqvkEmcje77fuc365xKj2qUks8+eHxzS79xluizk+vjwniGcMujZeJh708nCKbkYcujM6UpDnYnJBzzFwM5Fx3+3ex/PPOfeXZBKylkwvePn5a3bXmqwNxVxNw4DFSiGVVdRae0BsTCj14BYythhPMGodOuvbuLInIC1SXp1M2NwRmVdFs1E0ePX6kFL2itTqPZazFZppUMrs/cSd0DIaZLnM5lslqzjiIjrB0sW93EXM0EyomR0K2ctQra3T7NZ48kKI825v7nI9TvCXJ+zvi0xSHjSZldcYWo1qTVTUjk8u2b+zx03CZXadkHCE4q/jyEpA6PvEQY6i5ICktl2rEPsRzZpcQ42M0dkR1ZZOzRS2cHE8JS9hxgnDDZHp9EMBfZguhO1XbBWDPovlK379hVijD+7u8NO//wdUKWq4Puxz/+4nXE88ItrSXh384IxOY0Ai12hByNmpi6EIeEiz0WP/YJPryxVPvxbNzm+9LTKEj7864qOPRYb05WzG9dWMalWs67X6ENcfoZkmg67oW/DchPffO+Dhw5+AIfVOSBlfJeztvQfA109+jmPXaTWdW62PLIe7e+9xenzG5pYkTjg+YXd7jSSUaygv0FQHx+oykLDS0fWE4aBHkmR0+lJvbJSieBE7XTGfw3Wdh18dMhius1wKSMfeTp3jYxs/muHEQrBTYxurWmKYwqYsy8D3fbrNIf/j//Q/AEIQsSgKojQmiCT0K8/JsgJLirTHvo2qB7zz9if43g3hwxLHWZLmCsj17hgmuqXfIgHiCGzLBCXGXYkMX0PJMJIh7uoQuYwIkxCtluDHIls53Oxx/qJEU/VbeFhZ5iRBQRSPSUIxnnlik+QFmoRwWGaNpT9hc62GJW1x5Y/pVhx0tbyFOWZpgqGb1BxRDT2NS/QK5HmBdkNupPtsbfd5/FTMp2ZUiFIfVJWKJElJMpfBUOP1UY4uHbimFaRFKDnShQD0cK3D8WGBLSExJSWaVlAUEZnUINzZtxmNftNYHSUxqmJSlFAUwnft36nx+a/G1Gz7VmJE6MkU+IEYu4ODIY2qxoWXoZo3pBMqpm2zmHtUHeHzTo+P8JY9dEmpDjlhLMQ43ZXYd37/+/dZLkqCcEWtIX6wyBXx3HLMWx2FZqPG64sIR/arpkVCHPmoZo2FtE+1LAgDm0KmcPMkx9BV8rK4pWv+6KM7PH4JpaaTl2LckySkNLnV0Pv9D+8yHLgkkYfKjYZOiq4bJHGGKnv7drd7qJUqK6kbaOkmqe9j2jmRpD03tYSKEROv6qyWEgHSV4nLAl1WjtI0x3JyJpMJhSTaMi2H2M/RnQhd7od55rG+1ebsREBbFbWDCty5+4Djsx8B8IMfPOHbv/d93GiEKSdwc/N95osF/pFYVx9/fI8z/QVnUigXQMm79OpVvIEgoQojiAqNSqUOuaiirOIZWQNq1ZKDlqhmjUYjVNvGkFpRy0nGoLfOy+fPKDSx32exzf29b6CZPqOxmHelCNGSAjcTvbf+ooXZrNHb3MOWsNXhnQ5HV+d0N9ocRGId9dbu8ejpE7rdHTmWFpZhcj15hTsV41uzh/z5X7U5vnjCbC7mqtdew2kEzBfi9wbDDu7Kp97og0TBpLpDu/suzGZcXYvnPA5Dak5JHInQ9eT0CQt3wfe++SGf/upXAMzcLpcXZygb+9QHYj0sF6CkO9y9I2z/81//jL31dzHbCU9fCqKmzft7XF4+I/YVdrcFJH4eTTGtbdDEczcadV6/egiFeotiqNTr+MEMPy1ZToWulhuavPdgm9ATEDm/KDkd+9wfDvjHv/33wvb/4C+p2iGnL8+wu6Lao3FCOzXxTWHDejHBWI0Yh23e2hbQ6zwMqTRCnp8ds5aJPXKrU6c+sFjIqs3hquB73/8Tnv/6M+xT8c4b+7s8fHHExaHL3QciFuwrJm74iGYq/GucmxiNDG9m80ASGS3PXfpbd7EMMZal5vPk8St2dr+FIVtae501jh//CmopV4di/0WPmb2ccO97/1KMyfycyfEl9967y8cfCDIgpXS5PDlEi3WiqbB/3c5IwoxGV8Rzg8Fdhp23cOM5viQfq1bWWLkuvWYfZG/oeLzEqbbQZftJt94kCQpUTRViWUBa5liGgW05t9IyeZ6L6tU/EyOeLs/pdDq3hBbtal0QBBWCIwK4/f5vu35HDlcaVl08Srs+4OJqRLMxJInF5lwoMfXmGs9ffH3bVLtcrVC0iOgG4qBWqFTrjOcRo7lYiEW5g23ZNHpN5rIJ1I9V2v0dCkRQn2Y6tq0xm+s4bVG+nEwX3L2/zuGzERub4rPrUUKzukEUCviLn3ikZCzdcwpPOO6Z52HbJWu9u4zHsvwdnjCZzTFlc971ZY00PaZQU7oS9mRVSlw3YLa6JElEWfe73/0Tfvnzx1xLzaBKTSNPIfDT276Fl6+O+MbH36XIJnS7omQ7W52TpTllIRZUt19jOfNwHJtWSzzneFyCNScOhfHWq13s6imT6YjtDWH0ZT6hVDLizMCQsKc4hdKExprUYFmUxIVOp9GmLnH1q8AnSuBifC4MH6jUm5xeJeysiUDx/Dxk/+49Xrw4RtNFID7y5/hZSLzIKWU/x87uAZfXAeFKKrZvrXN9ktDR28hYBz2JyGMN4pBZKP6u1JcUawecn0qtr+5zlLKG06hwdC4Z/pSM+/fvk5AxceWmo2UM9xv4gfAYx+cXqE7JbOaTS9IJu1mwClP8+TVZLhZ6Xa9RbbSZSA0ty+ig5LByz+n2hUPMMoNFGlKptDiUDe2VdouxmxFKCJnVG+IGLgZbNGTwYdVj8jhl5J1QtcThZmOnTxgblIVYM2eXD8kyi7oz4OJUlPe3dtfBNNGo4kWSlTK2+OqrY9pNGYSupsTFFb12i8VUkoYYDkViYZt90lLYi2EXLFZTjIokk3Fz+oMKFXPImYRs1ps2JycprZ7KeCzFh60KbnhBIZudZ96cIj+hWd0AyZ7Xqt3j3ncOGC/EQTEv4Oq6ZLQ6ZemLNWrbQyr6OovFBaot1vHr1zP+8A/+kotzsaHmTAndPlGU8cn3BFxjMhYHuHtvtzg6/yUAa8OPuBi9pJTELfMA8tKjYnfpS9HEi9GnNJU2SaGwmAg46HDtAWnic3wqYIidnoMfzOl37xBNhf34boChmgzXm7dz0+qWRNk5mTw8TuYZG9t1FsspjiUCoqS4IgiH6LrOkezx6jW3WU01Gg2RBHr5+hI/DkjKa7Sb3pu4Q5ZOUM0aoScSBIrSJAz9W0HNInfpNLf527/7Pzk+FRt/p9tn5fuouvrPNggBISukP02LEa1Kg4TnnJyJw8DS7VFpr1FkJYuVeGfHDli6EX4oBWA1gygKsWzlFnJ4/8E9vvzsp1jVXQrlponYIHIrNKqSZWz5BRP3DqbevNUWMXSFwI348MN7FJnweRdXc2x7QHrznCUohYZuznn0a0n4UDFR0EjSFXki56FdYX2zzi9/Imyq4vSIyyVxkqBzw5o6Z2f3Pp9/Jv6mKDIUTRBW3IplKz61mo6SdbgdOS1DyXMS2cOzMezRqAcEnkJW/gZyqKgl/jKjURe2l6Yxq3kDXb+ZzxjNzCGqoUj/vbW+z0k7JvRVCglHKTIF01TxpAZTxSpIQwPTcEglgY5SaKRpQhwlfOvbogfZXf4Md9WjKCVjqhIKPfPSotsT+5ylQxwYVGsmpbQNVVcoC+X2UNhsqrSqQ2yrjpR8oiTH0C3UUkWXsDk/ROjs6DfNTBmao5MmObYj5mUyfcHJ2ZKiHFDK4E1RSygs8nQqx+WcxaIgyypkUo9LKyCOcnTbIgwkC206whubrGRbX6MRo5QKRW4wlczDO4MWrYbDLx8ucGQfZhy5aJYt8JZAGoU4VYU0y4hT8ZxlqZBnCVHskq1kMhUdRY0IM+FPVaooms/O/gP2T4TfePHikB//6Iz/4r/8hFdPvwDAC3LiQiFVZEvDrIqSWnQkYyuAO17Q29lh/y0xB/PLKVv9bYKgxsWVsE/VDKmYDieHU8r1G9bGIfVaj9WF+P00XnJ9vWC4toUXiue+XExZuT6asaRREc9+enJB1XboSjH5tB4zc19wfBwwXBcHjb9+9m+p1Wq0KlVOZ+LZ53kDrWET6OL3lucZ+5vfode5T5ELGHJRKrhZC6VZp2KIPVJxErLMZCVh3laSsPX2BotpwbU88HUKBTfwCRdLbNnerOY5aabiywP8ux/fQy8NVrFOsyXm6vX1IS1nwGJ6iHS7OFaL45PXVOrCT25v3cFbLZmP52i6GOPUHWKpHkEYcymJmoxuzGTiENzAgps1BoN9lFhFVYS9LGYug80WebHkdOJKO7vH65OvuZ7IZEejyfpeyeXsKXd2RNIvHE9p73bx4jnLibj/W7tvkxdX/PCrI/F7TslW/T323or55c//XwBazQGdjSqhDzPJtrq1XSf3L/GuxL56FRW8eFnHdGz6Ul90NnmN6am89+EeDzZlAnR8SZgsefZIMF5HRp/3P/l96r2Q3JZEO8MOl/MrYilMX2o59eYBaAs8CSd2wwir0mTlGpQyKWvqKm9/8E0OzwQ0M13p/N73PkRRQuaPfiJ+v9Kl1qgQXL2gUxHwvkCbsKuqOFVh02fHL2jUNmnaCUYqtehWVxhaTOJGzCXE2LbqhNkcTRX7OEqFXJ/gBSmGTChVagZJmlKkKVkitSfjGMMwbpMrKBrtzhBV04himWgrb8STy1tCsjKWrKG/5fqdOFyVKJiOcO5T16XWbTGejljMhBPR9BTveEGjsUanI3H1J0fU6nXOrsQhJstG1Op1ikK5VWhGjdEtm+vJ8jZTfb16xmoW0m2Lv6nULGbLGZoO47kUB7Q05vMSw6xzLcVVS83E0KtMluKwUy1qzCZzDvbXmc3EBNRbKe1Gn8nsiCv5XJV6gW4qDNeFMQfJc9Z6feqVXTDEvVdhSFpkKJpCvysy4xtrfwz6MzTZ4G6VbTrrbY5PnmNKWvn7dz4hTVPW1vaYz0VQbRkmilJlbV1ikqcR9+5vEgYZnuwteu/9b3B2doEiN2Kr4vPsq2vu3X8fwxGGGk0jFMXBMGG+lMKmyZx6rcWTr2Vvk6PzzjsPePTwIYUU2dNUC6duECUeisSwz90Qq9rgdCScZqs1YG19yGQcEkbiXmfjMYYpsp1zV5Jx1BR6gx0uMiFmt/InDDY7nI9PKVPxWdWyMRo1puGYmi4Ocy21z9F4znQps3KVPkrpYa9U2rKhXUtyfN/FUyCRTfWuP0ItTVpS3Z5kjrsK6LVqRLGwxarZBi1gMc+pN4SzUTQdw1Fp9USm5+Lskrpdod9vY+ViUXcaVbJKjfPRFblkpVHMGlmmcD4W/T9OdI1m2KhWzotT4Wy3BvcJVhMcp6Qoxfx9/fSYl8fPee9tETT1ux/wxcOH5OmUXUlCUakN+PkvP2e4hmQ8A6MSkysFG9vCfsqTkt3dexxdX2EWItDwlgnddp/x7IJKS2weK88n0TWmN9TzSoI39ul0Ghydi/WwqayhGRF3t77D8aVofN3Z2WA+TbiYioPT+rBLo7LHfL7g3fcFzn0Zp1S1EbkU2c6LlOv5a0ol5r1335bPBEG8wFArbMsGZatyhxSXdl+Mk+deEq0yWq06qgweTSmaOZuOb+cKZcXdewccnUoa+2iEU1GJsqdcXUvmo3KDMotY6zuomhjPIjPIihGbspE7z1RmU5er8TNsedJ3jA5xfEW7s8V4JDbZvb17zOaX3OgNbmwMqdYcvn74U/b2JasRPq3OAJU6h4fisNjrQqRHHEu2Kd/1SMuIkauQJpIuffSc/mATz98jy8R6t2suWlEhuyFuURROjq/5/OFfs7MnDuevj69Q0CiVnFAeGhR0lBLR2wPkYQNzEBMGS548kgQv5RpeGGCaOiVSPLY6xzD7rFwp6uu4QExZNAkiYS8nF8fUjDZRamOo4vd0NSaMFCyEfxsM1jl+VqUoMsr8Jqi38Pw5rmvTaogDOkTkaoEuyQ6yIsFQVFTNx3NvmqvfIk4n6FRIQuHPwuiK8WzCVFLGa0YAhoaumbd9ZrWqytXVFUohNueSBMqSIofVXMzng/tDNtYiPvMLFEN8L0pCTN267TELowWWVScMYoyamIckTzFUnTLTUDSxjutOjyJpk0mKc8sssXSbwI1QFTEu/irHUNqssgg0sV2rSkmJAVI8s9UymVzHRGGJ3pCkEJmCbpqUhUt/TY5VXEFTa7fCypSFIGoIfVoV4XPr9QZl5pBmOXkhniFVExyjRuCLJE1ezDg/alPkLXQpHh9GCY5ZJVitqDTEmC9XGUGikMiKVEuroZYFql5hPhU+wl0WGMYmBertodoyTLy0vA1O7HrBs5dnXFzWsXsiAIt9F1Vz8P2QtiR4qLdqfPHpMWkh7DwIfMo0o1ark0lyjm6nSxKnzBY5rjwstiqKED/VbjLSEe+9+wCiMaFkMHMqBlHuURQZ4Up81qnBdObhJsJvlJlKVibM4oKtTeE3Lo4v+OzzE/71v34fpyq+16jDeDRnd0ckgRQtQCly9BvYCBAaHuRblPMAACAASURBVFdnTxnKPmXbUFmkU1w3x6rIQ2hSYzGO2X+wzkKSekRLg9HqOakvDj+O1qQxqHF29pq6syfGoF+C4eK6KY48oG/vDEjyBmOZTLpYXFGrbFOvN6nIZE59rYfvX2LbDh1Jpb84f8HW9lusprLqlyRcql9QFCaGJSvX1Q3mixf84tEpB3tiHRdek/tvbfLll4LZdflqhFa0CMsVjTWxFzUbbdLsBK1VwfUXcuxMsHVqsppmKDU8Q+Pq8UMK54YIK6ScJuxtbHN8IRJsS++a+SpnMh7K78HGWod+W2MxE05hNn9Gt9ajOwi4vhS/1zd2yPUj8lDEDfPEp3ArbHRtRueSpGx9k9gdYYQW76wL23v09BF7O+vYsrIbhHNefr2gWmlhqTeslGfMjksMWweZEND1BoVi8C++J9b23/w/vyI1nqCM6lxfiWeyrTrRPGO9a5PJtXV8fohapOxsifixNblmaNX59PARzwOx/v/gj/+UytaUq9GUs4U8DC9C+sM7rN+RyBzD49Ov/pFOq0siCZf2Bxu0uh2OX4lDUmdNwV/MCc4zTGkHQbKk2mhwcTXFlARPG/02jw5fMgmEbZpah8lVzMKb8fWXRwDc+fgDtqMute6ApYxPg+mSrfe+gSvjjUGnRZFHKIWPKQsujpbj+VOmno0juQOKuKDR7JMrUsLF1FmscvpDkyQWY+AHOaCimQ6mLAgYVo6iKLe9VGopeAE8z70lQMozkTzSNI3RtYh5kFIDv+36nThcxXHEpaSQ1DWLSs3BchpU6rJsV7o4RpvL6SGzVBxaNKdKlCpoEj5VqzcwNJvZdMnGUFKalhplgSCOkGVdp1pg6U16dVGGvVydUWoFQZwRFyKQyYolSrpEsSoYlizPatsUrChkcBBkGYWS0m5vcSaz5zEJK2MJRsD6llhkuqGCEt9Sjl9dLdlY75ClC4Y9QQd9dPxPZMo5veYeK18cCP7df/yf2dt7izgUBmA2daI4pdnq4DjynWsK8+UVafYS0xSLo9XqoBsVrmXjdhRlBMsVllXFtsXfvDq6xDQden0RhF5dzun0+ijGjEiW24eDTTxvQlmWrA3E9/J0SOgXbG0IR9rv9nnx9BGtTp2FJ7WbjD4YfVrNPrFsWly4I1q9DpGET5XVMX//oyc4Wp1eTwTAL05GOJWUrV6XVS4y7HpuMp7kt4w0q2VIsYoI4oJhS2SgkiJmOV7Q7e6zkEaf2COUzGS9Jxy5UmhE8SVmZY1rWWVAj7FUF0Wp3rJE7W6+xfX0EKcu5mo8zmmt1cmLOaYM/q+uRrSGJf36A2ah1FgKfe7fvUcomy2z8ASnpTF3M6yO+BvfLxjPXOyGgi+djWPZaGbE7sYfADDLEnrtIZ9/9TmtrmQw82ckUUihlySaZLPzQxrNFoYqSvnTcUKj3cNbFDx5JhzSo5ev2d8cYJo21yPhbHrrDmmm8uVXIkv1jQ8+5Oef/hODrQMGPbHpKNkZ1VrI1v4mv/jspwB0e3v0du4Qh8JB5UXM+fVTCiNmbVMcNhTdZnNrjTTO+Pid3wPg8OQx89UUU67R6WRJVAtQFA27ITKmRbzi5PwYTb/JiqkUlkq1ohLIoNfLp5jWijLvot5AsdQKs+mC3pqsNl/HVCxQ1ALfvakpSJIOR0GTUMwgHKObDXJZCen3LU6PTlnMlnzjA7Fmq5UacVTijko2diWLkpfT0hsEsgI+m8SsDdsEyRlLyfBZZjm22mPpnd/KFyTxgDRsoWgi0+qn51wdpbT7LY5OxQb31v3vYRo14iShWhfBlOtFaIYGN8yZrQZ5FtJrrHN5Lb73rfe+zWraZBWvUa0J+4zCJQoxeSk2QsvY43/93/4t9x7s8O//w1/fjl2Wl5CmKBIuoSo6aZqiSTKSMLlmbbCHRps0FuuqNCL8MMew9FtW0299ckC9YVPmMsOfppiaRRwVFJIO+p1373D+skqQudRkBZ/MIQ0WSJk7LHPIwstp2PkNgoM4yjHNkrVuj2dPD+VzWpRKSSx1derVNWbBKZ2ewclLsf4KXMrcoiwKDF3C1rSUlXeNbT2QlqGS5gWaprGaizW6PtCoVqt4vnhuvaKQJSVqKZgFAbJ0xeHLM1TeoZCOo4xNMjIKZFXFSLi68LGsPlGmy890SmC5umJzS8IeC4U81kk0Sc2uWqiBIIy5e1f4iHpdJ4srlIWJJuFSqAVFkaLIstHO9oDZ5UhQL4fCD1d0G0qLIFjhSqINyxAaV0VVEimlOabeoCyXt4m3JFUpCouiKIT+JGAYCrGf0mqJQOo7nwz4+nMHpbRRVIkLQkHXTZJ0TrUh1l2r0+PoMqMmofXZKiEnx0tLKjXxfsvFjOWiQ1EaKIgxLpKMssgEzhZAL2jU1/GjEEtCExWlRFMhjX16m6LKv7G1ie+OBZQT0DMbPS+JkxVheHO4MvEDn5WfYzYlvC8PQVMo5OHO1sFbjvDnComsXJlFIenmSwIp6taqlawN7/DLr4X9BKGHYYFqmty9IyDGr1884umzZ/y7/6PLvXflXh4XdE2L89ciwbO9M8RuKngzmYEBmnqdjbfu4C7lgawwODt7hlLt3JJ/+eczynqd6QRKOVdZtkAzFSqO8CN+VHIxPqde6VPKw7iiaMR5wdq6w+xc3sur8Hr0kp7U/9td30ONW1gNi9NTUU0f3rnPoDmAwMCuit+z622ceoAi/TdqQZqolFmbXl98dvrqmvWeRbOV0usJv98yazz64ghNvyFE6WI3FcpzWMnE16qsohmf4CZPsBzhKJZBylu76+iSJe/4xRjNLuk/aBLfwJDXvk/cfsrSnTBaSLr7O/exFtcEsdgfAz/GaeyRZRmlKmKQ56dPWO+r7N9bYzES/npyOme7v48km2S1OGUWTEgiE98Xh2E7vcTWa/zhn/8JP/zZfxLPMNgkiTP2ZHvID3/1U+48+ADKEkNqOs5XLqU5ouG0aG8K/+0lC7AUxithK8ONHRK1xJvmDGVybHOjTjGJ8HON+EYLLihYX99kNhZxn7Xe4On51/RaLQqZoP/yy0/Jg4L1zQFX0jfXOk3G7gXNdWkviwinUHGTGR1dvN/57BI18KnUxd6rWDm6Omc6WrG5IeLojqUzH49pV2tsS8bEshjhUWF/KOKw6qDk+BdX7B1s8ud/JeYzKTXy6RmPLyYMJGGXO/P5+7PPWdsQNtZodZnOznHU9i2rcMw1qtpCq3hEspI7Hvts7G7TWxcxSep7WLmLnik4ppjjV8cptVqNnIw4uGFDzTA0nTi++X8qqlhFSVaRqK0kIktSbNuke0PM9v/PZ/GG0OLN9eZ6c7253lxvrjfXm+vN9eZ6c725/nNcvxOVK1VVmcum953dLU5OX1Gr1bArIvNxdjKmuQ+6aiAh11RqDnmq0WiITEtRhliWjaIkvDr6Wt5XJ4lzNrfWmE5Edml/f5dF6nE0Fo2UZW5iWnUqdZNOT2SyLq8UsjCjVq1RSCrfNJtg2L1bHRFdSWg2axydXvLg7W8DMJ5cUqIyHs/56MMPAHjx6gsCf0mrLnC2DcvBMhOchs3RiaDINrSAdmOAH0wZrItM+Xr5FnmR3ipLz+bXOBWTZvUuV9Mj8X76KeQNIEJRZAZIUW8F0ACazSp53EAzElYrccJvNWtYdsnJsaSC1ius9wckUcLUExmbvDzjYPcbPH7yBa1EVAeajSqWXbltWI4iaDQG1JubvJwKIdV2q0IJnByf4VTlWJkBRA0qmsjGxO4cS1eI4/y2F213q0pahES5hi5hXYGvo2uTW5FdL1iwNthi6Z/hFSLTahoGemLir6a32bSKtg62waWkv767u894cs3p+ecMuoL+FqWGZTaI44CVKzLzr19fU29UuTwRGcSN3SFlWXJyGHBwR2RDfM9jcWGiryVENxTOdsR/+sd/vIV55FbJZ8+esnvQ5EJSVDvVNuguh0eL2+/Nx1+gmzlrTdFc6icTFmMFnR6lhKzc3X/Ai9dfcjZ6xoO7ouR/sDXkanKO7wvbiPMrAlclD6Hekno1eRPdtBmPprdN9fvrH/Hzi7+h2xMVsKrToVKzKYOC00BkvOarY7qtXeZXo1uh1igJIcrpSEz78fkJO3tVJlcaSigW5F/86R/x008f0r5T58nRcznGLsN1i709kcl69uw1Ybji/XffQyLbWIQr3OWC3e1P5BhcoChNrq+O6PVFxm3lhlQcA63IbqGsSTZl5r7ACqQuS+2AJJlRN6soMjO4kgQgmtojl6QMWTJnnl1SU0UG1ShM2nbEcGsbfynmZTU7o9vooxkB85FYI81Wi6vZCVkq1tXa2j5ZPkGlekuA0nB6zN0J7njMu++KzHUQzWg3NqmWUmdP0eg33mHlHdKyxby0WyaHx69ZBZfsbIhMZ+Bd0B++zWokbHHljSkNhdTIeOf9bwHgBgYrZZON3TalxIargcM89Li/ISCqP/6Pf8dHH9R5fnZ8W9FrtVXyNEJRFBSJM8/zFF23bqmu1USj2oF2t00oySr85ZJNs0PumGRS/y+lYHStgqSxLcwSt0hRYwMrE/du9lr88scXdIwmQSL8sKXbaGWBn4lM8mxSQSNHsRyKSJauVIUsztC0Y04nEjdT7qIkLpqs6IVZgqqn6GpCnAg4UVrkkIFdsZhJOF9vLSacV4nz31SyVHL00iSQpBrbu1WODq9JZe+UYpcUaYbRajG+Fj6vdaDTrOwwD0c0bQH9UooZmlklS8T4DrZDrucuXtbGlgQhZW4S4ZHFEfe39wAwdZ2AHE3uMZRLslzHdyGNBMx7saozXfYxq9GtHwyDnMxQcCqSgrxewfcNEi3GkNCyMoJADTAtja2+2CNfzBKCMkGXVUaDGlnm4QUK3z4QayVLwY0d6rpOiMjkmomKH/sMdsTYKb7KxWKOZvRRZE+ZY2jEeUwUKmz2RAXo4jqn4lTJb7RhFAdF10lWS+4/6Mh3KQkzG1OHQIokl4pPiklFUjh/670dfvAPDykMlTgUdlY1dDJdISoKhh0pRJssuZ4GlIasYCY5QZaiY6PKqs2dvXX8wMdLC5xM9toqGWDg+1L8tFHj/n6FH5wtbquvCgpFUaBZNUIpvL33fp8yhPlMVuoaJppWRdUDnKaY07WNd5kvn/P1kxO+9S3hBx89ecp3vvEeJxcCGumFF5RmDasmcaUI3annz7+gookx6LS7rDKLe9U2r5+LNdN/+4DST1GMBFsKhFcLlfmkRJc9l/2eiesZJC60+lIbTtUJlxmRYbBKxPsl4Yo7OxtokjBIVxuU1oKCnDv3xL6z9FYskpIvPnvKnQMh4ttt1Lk6idCb0tfaMZZtM7m6oNcT/caWDYswZ3/9PqFsiJsnY/Sqyu6BQDm8OPwaLa5QbzYIZO/yxL0mWV6CGdBqCZTGZDbm2VOPlbxPpWKzVt3g8vIaCuHTvfjX3LlzwOunR7TWZKtA32F8DFZdfG8RXrGM1gnHM2p94TdanZKJd0l5omI3BPzUcyMqdZ1nTwS8sF7r06vX8YIrrJ6wFz8qcJWUz54c0aiI+TqanbJUalhS0+73/viPWOvW+OHPvmZ1JJ6huamxO9zl2RdHqIbwQZ1eiOeeksm6x2hxSVXtU26Z9CwxBhW7x0Q7IgqW6Kb4zKxrHJ6cUkr5hPDlmG98810qTluMDaKqOc6nVGsWa46wj9OTa5x6ztMTAd1PI5vuXpuq08OTvYy2rmJ1WlxKiYpgruKs1fnm3QMWgbDz8cJl491NtCjj+kqM1TRasX+wy1T2wquRxfa7Fa69JUtJOle6NrGdoGd9kGiSyWxBo5UzXop3eX24RDNdgthnd0+s7ZrWwtIVvCBHkQRr3/7eR7x8ccjXJ2IsTTWkmEGlcZ9lLMWVq23c1QhDL8kz4U+NwsIqErYGYiyTioqpWASLM2JP+Lf79/ZYRKdcXF1zdSkh8DeMdL/l+p04XBVFynBNBKZHh68w7QJT71CThrq3t8f4esF8GnPvnliwo9EUywa7ItnDshrT2TXbdywST5AyhPEUZwjeKse2KvLvLChTqhJvaZo2dlXn4vIV3pnYBLa39jk7P6JUgluGuywpSPQZnbZYrCt3LLR/fJ0XL0TjpmlU+eDDjzg/+wG/+vyHAKwP29y7820efS0cqVUraNT3ODuZE0QChri5uclsuiJNQjzJ9a+SE8UBjiEOW1o5YLk8YnPTZtgXpdjp6hFa1qHZ3GWxkkQb/px33r3DxYkwwtC/piivsFQHRwZzpt5hsbomkhALXasQxTlploFkQqrVuyxnCpubm/jBDSvemMXMYF2SfIyulww31ri+fMz6thQoHL/mo3e+z3x1SibxsesH29j2HqNrQSqg2T5keyRZwdWF2ODi2CBML1GVknpNHGRW3or+UMfRxME0KWaMp8cE0Qgku5UbOGwPBnh+hKlJIWPLxzJbdFvCgcyWT7FqPXa7Dzg9Ev1NjXodQy05OXrMxlCKFtoZtt1nIvWdHOddlosVpZbw7JU4CNfsLk7F4vnRp9y/JzZLXR8Sx1csXeF8Wh2d1WrFdFRDioCjxgm5ElIqPl3pzL2ZSqej4iVSTLrb5Hp0QaNjcn4hxtx98gsMJ2Nj8BbXI8FqtL6+jmkbFIVkY6qso+slzT2DOBbOYDa/YrSYsTbYR9fEnLrxHKvmsHDF5vXq7Gvu3P2AxfySwP0NM+DS00iLCnf2RPBoahbz8WtGMzHHFaWCEWcYVszOA+GQvn70GVWz4OTFIZqEKswXS3qdIVksmcE++CNWwSvKUqPXFTDEHItlsGA0F2toc+OA8fUKy/Fwl4l8vx667rOaL3Fs4bKePHlIq9NEsYQD1qsRttHj4uw5ra4IJn2paTabvGLQ2RNj3N5jNr/Cl4FjEiQ8uPMenrvAroqAL89z0mRB4KlU6uKdjy+eUq21aNdEUBEES0bTM4Zr+9iOsOHx/DH3736L0Vgh9cSmujO4w6sXL2lIkcZ6vcXrs5d0OiaubJLOEgMvOmHQu0uRiTXq1GvMlydsbonfy8992v0Bruvy6kiwMW2s/dd0exUyJUPTxXgmyQk7GwPOj4U/ePjy/+K//a/+gh/9L0fo+o1QrIqq6JT/DDSuauLfrR6Q4fDxlsVA93AX8rBTbeDGMbkf4jTFxvTW/S3+7m+foqgC0pGnAYph4vpLHrwtehfDeEqWR2hqm1IKL+Z5QRrl3L0rYHpJZJGnKUmakqcSqmhkVKoane4aJ+fifdAgL1RxEAQWqyXVaoGh11jMJRysZuFYFnGWkstGqCC+wLb3b3XEsqIUYuy6RVVCfj3/CtdNUDTx3HGWY6oKYRCgSEY/w8x48cLFsgeE8lBWFgZmaZNLdtI7u29x/HpEnueUN3qKpkoYqTi2zttvi3f+hx/+iDzbwrRvhI4tEnxW3ikffiASdkcnz0hTg4rSIkqETzCtOp4bU6kK5zJbeBSlJt41vwGj5CQR6EqKWop5Pzq+wrB2fzPvSkaWKyhlRn9NrJmZO8MwNfIip5AED4UCeq4zWgkyl5n/AUWiUast8Vxhr5WKjqYrrMIzDqQuYjp3iOMcQ8IZFTWjLAqyJKaURAadVpeXZypxEZNLWKWlV4lXcyx5cLucnDBbJqAJnSmANPMptIQsyvn4fZHIuLh6zWIBSiOTNlYShRlqvqJdE8/Z7JeMvi7wveAWrlgkKioqgUwYdLoqdmXIxdkYS5ekM1FEnimYjkq9Ksaq3tR5/uIxltQ7KkoNRQmgKMhTce9KTUO3Vxy+jPnqiUh47N/tcOaNGdwT+9pkMqNVs4g8uc6A0eg1CjqxTD42G30e7I6YjF7T39gDYNhocI1HFEXoSv/Wzrb2EkYjkTzWtB6Bt6TutOhLxuTHj3/NannJwrbZ2xb6UfPVBa1qnUkmfOb1+IpWq8V49oxCHlpKLSLyC7rdNloq1uOXX1zTaBb4Y2GbH9/5kJm7otOu8uiFaJeoaw2cuobtNHj++rEYz9xnrb/FF78S2oLf+da/4vgXP8JVl7y9KcZlESTQczk/vCCSz5WnFTqtPTqyZWR7e5dXRy+wzBZeeBM7GTx58Yr6zhqJTIaVHPDeH/TwluI5N9TvcHpxTKQVhBMBoXY621SVkjgDRfbtKF6FVZYxHAj/Fikek9UFd3feuWVRNu2EiX/GV8//gYNNEXve3VpHd7bpSCjd1WjKj58+JcwuqUth843Gxzx5+AqUhED2yE3mI/Z3N5hJ3bJSaxKkY67PhiiG2COfP3rI9rCPYdRv4754tcKfwcEDsQ+gKLx4fobvHVJxZCLRDlHQefLojD/5U9GKMJ59xeR5wP4d8b0gnHM984ijBdyw7jU28cc+zS2xh5k1F0cdsnRDWjXxvX67wtK7ZBX75BKGWG9Z9Lpw8lwcpLTZGllU0mo4VGtiDWlVk1J30MuY0wux/g7eWkNNa8zmknPBSlHVCrW6zmQiYoJT75yKUyOMltzfF3b9y19/iu+HFJl4zlY7QlFi5qOrW6bDMEjZaCs0ayaTWMx7s9HkKhgxXoj935oEzOcmg3s66U3y0TOIxlOsQvRpAzRlHP7brt+JwxUKhJHYmGq1Bmmac3U5odMWk3Q9OuPB/Q9oNuvMFiKjl5cu7V6HqiMyDK9fjqnVasS+c8tu1zA26PfWOU+Pmbsim56VEe1u61YALMtjwjBA1UqqMriazzwcu854fE2nuSfuVXcIozmmKTaASqXGcjWiUmngStKLWi3g66c/Y23Q5vREOIOyGOAuy9vqgetd8OzZ1+zs7dLJxCarayq9tZyL8wA1E4F3EM/Iy5QoFgH1cG0fwxsyGXtMJsJx7u48oGIM8YIJLUnFOr4OWC5CdEuMXeBG1Bshg/7BbaUqdyLi2OfBW8KJTScrDo+fYtuwI6svR8eX9Ls+jlYhLyQVc1DBqphoUjQ5iXOePX9FzeoyH4nNxHFCHj3+BZpaYlfEZ6dnM6rNjHAlexmCnuh50aJbityN9QMuL+vUmzZpKuZmc6dGgc/phTjYrA071CobGLrN2rr43suTEWF4hWm2SCPxnCELzs9dDEf8zcFdi/k4wstOMW0RgCWRQW4bNNtdnr8Q/Rx37vVZeVfMZ6Kf46c/+TmDLYtaW2F8LRxNf33IbH5Jf7DD4bGwxe3BkHfe+oSjYxH0RvEZtqMReApOVWwC08UZJQnvv/u9WyawoD9mOjvk7oGopoWey/r6JqbZwD4QG8XzF4/Z2bmHohbMRrZ89hhDbWJLUcGj4xfU6hWytEUkx6Bi9+l2wFvpWDeirEnEeu8OUSqcCsqS8bjAtusMepIVhx0m85BqN+VIjsu93X38eUwsg6bdjR5Xxws2d/cIJWPi6ekVezsPsDTllszFUh0ij9ugZeU9p1ozSZMal66wYbuyRrdzF0Pa6/n5C2rVDp3eA549Fr1hSpqyvb1P4l+RyAD23fd3CT3rVgjw5OwJG4M9Ot11bBmQuAsxj73OgFUg3llVmpCVdCUNepwHzJcuUTijsMUcL2Yhw7VNyFYUhVhXOxvfpFB9rkaCWlcr1ujU38X35tiSyqo9/JCDrftEnsdAUut6K59Wq8WTpyKo+OSTTxj261xMj6jWxb2fvPolb9//PdJ8zqGsuneaB7TbfRZLsWZmC5f2Wosoidjq/wUAG1vbzMIFWlHBK4W91Dptyqjk03/63wH43r+o8+j5cw6fxpi2SLiUpYKqqmRFTin9oKqqlEpJXkpWLC3kv/mrHY4OX3J9LuZz7Y4KeY1waVBBbOqvDyPiWCORhzLyCNO0yJWYVSw2oSdPVxTlPuQlpaywF+RoRkJFYuhPjlxspwsoKDe9ReTEyYqNrW/y45+IYKdUClTVIs1vRGENKtUEb2mTy0pZ1bAJwhVOpcZiKXzzux/3WSwzIomrV8wCRTFRSptSVs8KZcl8DppMaGVFimIolIBsDSEvZhiVAcVMJZBkAFpeUKu2iDxxyC4Tg4o+IMu839DKaxGtWp8FJ9gSx1+prqMq1i0teV4EhEFCWZi3+1yaF2RlSkZAKmVADLMgCBZ8+O4N/b3NfBait1WSSLI4WhViN+b9t7cxVDHGRV4nTnMsW/ZO5omgfy2WzGX1fuoJZsCiyMhyEchUnAZxGKDWxL2fnT7CML5L7Beosnk9Ly3ytIflLLAkYiEvLGFXkjkzTXNAoUhi2g2JTnAT8kwXvX6y0b8sTHS1pN0UY2c6GbrRQ1VzkJXrvEhQCxtLNRkMxPhdnIREcYYlG9PjFMoS3OWM/lAcmPtrW/zd3/ySorCJZUVNKyqoZkEpbV+3M/7xxz8jTVtI1mWKuMDQbbIsxZCfDYcNlqOcKBS2qJJi2Aq6arFceHKuSv67//7fcPhkSd0S71x1Sg5fPSWMRIKw3Wiy3rVpSEFmgNzPUWsOVTlXP/77/5vNOwecjlX2dkUyYIHCaHJKu6vz+KUIYL25w8HdAWZFrPXR2KPM6pgNjbkISTAr0DGGbG28xXgqvqeYKl8+fM5gQ5AkOe1zrsfnGGqbQCZXO41N0mxE1arQlExy3/39GuevX7O/IQ5p/mjK0fgYLTP56JNv3Np1GimEwQJH+pvFNGSlLkjlXvjrZz/DrFh4QQ3DEoeB81c/p7N2l/2DB5yeSzHgjR6GBo+fij4wDZuKbRGtIgbrsifZzXEMG40YVzLO/vrZ37E1rDO5FPtVs91h6p2xublOJiUNrkZ1/vgP3+MXX/2KpixKtCoFJ2dXNOSBqGFYeEWGn/1/7L1HjG1bet/32/nsk3OofKvq5vBSdz+ym6EJ0aJFwpLoiSXYM9sDTwwDHtnwwBMFAwYEDTywBcuCNbHplizBFgXGZlPke+yX4823cp2qOjntnDxY6xY1ECkPNODgrtG79fbZYa1vfeuL//+CpQxI1ktVCnaHZju5Dsalfo0kyjGbi/1RbpgYY4t2bpfVlXCknj87IlDnGJpJOSdkOGcZqElMMRN6Ks1WSzYyFwAAIABJREFUNEsWQZxD5hqwjZi19U0uhhOMggwkBDqtNYOFRCLudXaZjE9J0xB3Ka4xQgXX87FMlZ/8+F8AoGsazWqdTkvY0VeXY/rDYza21yjKIPfZ5VPcWcpsJUGMkpS7d2Mcb4jvCP1q5+uUShorU7umVNi6scX06oyODNxoicn+e3d5+eVPWbkSNZY5Dx58j9ngjIffFcHc42dH1EoWxZKkTzAKZMRomcLBq3P5vCKdTomNjW1efC2cziAtc/veDgUZYCpWyvz0qxdEbkK1JfbD9PCIk0WTQlXl/q13ADi5OmS9d5ev/lgQY290NiC/wh0ZnD4XZ1i0GaMYU/Dy3O0J2T++kBU6f8Z403P1ZrwZb8ab8Wa8GW/Gm/FmvBlvxpvx72D8hchciSiqiPT0evv0z4fs7ZUJJI781uYeppURxybViihDWjkmOb3CaiajVKFLoWwQB2U0GR29eafJF599hed55AvCC48TH0uxOTj+AoBmt4ZmtNA16zoLNp1OUVUL2y5c1/aORyvWt4p4rvBHHX9ImnnEic7uTZHun80muEsDRXG5cWMHgMHomLP+C7pt8e+bN36G87MhhlIm0kR0QlPrmHqPYjGg3RUe9tGxj0qJqSTdszQPVVNQFZ12U3j4arRJhE+z0bmGNFfUSz7+6IhH74g0tmVk6LrO6ckR1bLIig2uhtiFIhMJFx14MbtbD1jf6LCQ0dedzQIr94Ik0clbIvUbKwvytsnFiZjfre0eg/EBnt9nMRVR/163hqIvidWI4Ux4/WWrS+yMMdSWfMeIwfQEULl7S6TRw3hGHMf0rx6zu/seAGcnC8Ls8rp3azoEzxmQy+X49lNx7/msT6teoNoK8ST3zsrPUShtsVyJKE7/NMfobMrWjQrjmVjPi9MF9+7fIW9uU5C17o6T4+LyiF/8oUiZz8YZcaJg2wVsW2Sl+pfP2L1xl+XUZuuuyMz1+33Ozz3astxvOPF4950HnJycsb8v1mo4WFCr1JitnpPIUtNWdY/JNCNLRATVcyK84IxO+xFIsry797dZjH3mq1fkdCH7qR9SLnbwXBEV215vYuVSzk4uqTdEhDZv18gin41O7prH408+/CM6ndJ136AbxJRrDllWx5GRViVRqNpNzCxkpysiiO1GkdlQYyH5xsJWjZ2NdUbnBxwciejr9oMe89UpjUabTCKkGWbA1eCETZkNXXlzXG8JiYPri+idOk/JF3LkdcmTlEwIwyvC1U0e3P4V+Z59Aj+l0aqxXIgo6mIxo1BIsSUvC1kP3/cp5Bo4kXjPXFVERaMsRTUkZHQ6YzCdUE1EhDYkQsmu0JQKqSWzmmpEfzin0e0RymidG8xpd9dxLBHhj4OUvB0xX83oNEXUNlNtPn/6LdWayUlfRNjK+QJKWuRXf01Az5+dX5AC1WqXRkfyD4ULFEy6zW3SRERIJ9MQDeu6v3Jv9zZJskBLtqlVJBqTklEubpA5DkupS3rdNf6nv/W3+dW/Inq3zmYfsRwc4XgqliyNjqJAQB2lgIQ0V1DJUq7haLOkwG/+ySWffH5GIIlUkxAcPSBNA9a3ZR/fxg4ffnhOnEriVkMlSQU5cSZJS5v1bY5DFU31UG0xn0EQQuaTSgLYxUojDH0M1UI1XxMZxxRsnfFkQOALmdU0Dd/zUc3XqSSVStkgn+ugKL68d4CiKCwWS3o9IVf1WsjRC5tM9lxpqkeWGQRxQIzIXJWKCvNp6Zr7KwUSJSEIA8oF8bzNnQb/6o99cnaeNJUoeNmS8eTqWk8VKxk//ZMTdL2NqksY9Dhi6S8oFgwePxdnT/88IW93SDPZk4SCqtjUqksabbGeT18sQNnH85xrqO4ky9AV/Zpo+OmTDMVYI82Ca1TMOMuIwxhnfsDJsZgDRSuhZyqqLHFEyQhDhZwR05YIu0efe2BopGl8XaGwcn30Qsbf+Ov/BQBPvv2Aby5CavXiNYR0GhgskwmlvIYjz8z5NMTK+ZBZcu0gTBJ8f4AnM+yjSQ5VV0gTn0xmytByuE7ASq7LaNBhOjXRVBtFkX1ZqoHvZUTh7FovTZYamWaiy97e0PPIsowkDVBNkWl9eXjAYAZhnF2jg9m6QZpmeDKDUa6bON6cRGkj27dIVY3IyYhjBzUWujKfUzieqoJUG7AKBmkSY9ll+oMjAN57932+//4v8d5b0TV08+XVkp/57i+jygxttVLgd//lP6dWkjwBQDx12d+9SSSRK+/ub3M4GbPW6zDsP5brt4kzX6IpFWoFSbg+PKPTfXidwYiCMRFTrq7yVAsi+zMZL+h19slXTaZHUldaFaIkRo9ltrKwhqe9olTOM59LaouCxWb9FoPREU4i1ubqMKVdbpJJFNWjy5/Sn0353oO/Sl9WnIRejlI5h+MtiFIhU9vb9/CCJZki9PngdIrRatMpVPn2WFQs6EaONHDw9TprW8J28Fcez84OrrO/Z1efEsUx9foupwcyi1K0mM6fUzDrNGo7ANQaKxajIb01saCzxYBOs4Iam6SmJIqtL3hx+IqNjkLqi7Mv8ivs9RacnYnz37PLKEqZweiKkuz1S/G47AfkShWQWeLVaoYfH1KV3E2hNydvZAwvFhQl7UGUwWA+oVLtcHQk5r2xrqMs5tQkR2irvcarj/rs32ug6GJNtazEbBEwmc3J5SVUf87ganhGuSi+b3hVoFatoDDl6lxk79arTXK5AqVSjcVQ6OZqucXezTv8seTQmg/HvHXnJp4fMeuLNb3feUC2ERFJXaZGGsvlJeNBwt66yPIvpi8wtCZ5w6LYE3bQ8nJFvLRo9EQVx2KWsBqvCAPvGhMgTRO+ffEt7WKNaCXkc+mGFKspXZmhHY8DRoMjusUNmg1ZlbJKeHX4EWcHBufHQpc8fH+Xk8MFexJB+Pe++D2M+hpOPGD0Qpblt9bpVSzyVsrxmeCdiyOVpWLxzvd/Wc7vktHLc/KlFhv3xfO+/ugVtpLj9oM8H30u3vPKk2TJf8b4C+FcaaqCmoiUrrOIyLIFilq6LifwgzlNK8fx02fX9eoX5w4aoShtQLDI9/uX1Coqoex3+OSTJ8xnGbmciSFLhYolm8hXqJVFL8Nick5OT3FWIZOxmKxuZ53l0kHRNUzZkJwqsHRcTo6Ekt7eLVEoVgmdFmcSOrRYqmJZJheXx6iqMHLXu/cYT8/IyxK5crWMXTRYLSIqEj75m6+fYNkJ1WqVSPanlEs9XG/O3q4o3XP8c1wnptPeQZEG0eXlMx49+D5nFwcs5kcA5CyFm3t3OTkUZVeWnVIwbuMtp5AKg29v5xH9y3MUVZJ1GgmWYTMZRGjG6xKSFVlcw8sGBN5rfhOHIJ9SkFDlK8cniysMxmfs7ohv+fbzQ27ffESna5C44l5X/VNqFQNDll2lvka5ajAaLK/L7bZ3S6Bd0Gv1OOoLgrkwzONHY/qnQmGUShoaGYZuYhnCWW7ZLXZaNa6m5ziSb6xWzdjftRkLvc3J4Qm5ShPFMqnVhbJT8dna6DAdzyjnZXlPbPH2g/dx5GHiLHwwx4RximUJo6VS7DAcvUSLu6TBjphzO0JVE2YrIT/FYomriwWVSusaAvhq8BSCt9BzZeJUKMmj42fs327iy96i+cxFzy8ZTJ6gG2JtKrqN54Tcv/kOJ2dfAuCuMlI7xlBl6edMZevuTSx1xJEs/dx8cIPIn+N5Hs2a6FP8K798E8d/xrPn38hv6eIuZqiFc/J52SxrgDu5YLNzk/FKyPqX33xOp7nHcCb248UgYmad0ao22bv5ms05YDg45+TinG5FKLLb+/fpD07wPaGkLeq4ccjKOyMvD4XN1gZJmLFwROlZ5Cjs3bjJbJawWoh5aq3V+PTLT8iZKa5E0t+/1cPzZ4wk0ECz1sSwYibTi+s+LCsvZDIOXVpN8U6ea1CudsiZYs5zhkEcRRTtIifSUWx3WqymCe5CQ5W9BVEUsxyNsWXzLLpLEk4pWWWiUML7TnV6jSb1hk3/QBgI48krtrfvcCFr/WdxSCmnsxzoXHwjS13WN3H8M+rJLTIJ/VwqahwfPKHcFt9ycP4FncoPadffQTWFTKlalSRMCJKUd++KEoe/+7f+O27cvsCVhly9vk3Rfg/f/xH5mjD+g0jCnSvZNRQ7SKJUaSiuranU9jsMPhwSyV6fjIQUncmFx84viYCLplosptk1caypFkiUmKJd4sEDMf/9Ux+UPIahECODYYFKyTIolyX4QKKiJAVIQlIJMR66KT/3/buE/gJZXYcf+ZiaTqC48j4atWqVV8+v0C3xvCgOsAyTJPavexAb9TJP/QjXFzJl6RGJluKsMjpNIfvL5VMWi851r5GqqxiGSpRk1CvCGDg+fk7k3SNRY+JI6rNEYTFfsrst9MZidcnKTdDzGl7wmicph6ZqDOcDXOkoGvoN/CS9dhgUxURRIhQmDPqyfDJqs3R8Wrk6QSR7SiIDzx2z1hXlNoO0wOKZSyGn/SmghWbhLS+pP1SuyY6dIEY11WvgDV1XifyIrYbNwwfiXP3i60OWQYytK2iqdGBVi6vLIz7+E1Fquljk0VnHT+fXDljeVpgOXKptm9h7DSRTRNOVawoODI001qjWNV6dyFKe1T0wYvQkvSZlRYlIE4Pb90SAYL29x3z+BbqlCrASIAxT4tSjXMkYjcW59u2zEYmiE8XSsch8cmaNYKqjykb/+WLIwfkUjHVCiY6lZCtMzcCXpUrddhsynZevbMy8OI99LyaIliRpRNUSfzs9Pef8IiUvDego8skZBVRVw5Yl2z/8xX8PJc0DYxxJdpoviXLJ+JqD1Obv/J1/wN/8j3/w+g9oXZswnbKS4E6LMKK3t0lRz2NJcI5kknH//nc5P50yngg9eGNng8uzQyZjIXfrGy1cV0FRDSLZz71ebZOGMSevnpHJ8ylwDfY2Nzg4FaVOjUqbcrVI4IfUWuJ5paLOdPGcgpUjU2QbQG7GeLViRzrn5XaZn93fRLUPmZ+8LlFr43kZG+u7zJaCZiVNAtwgIZH6rt3soJkZzuiIhQTVsKwKuVqLiX/Ik2+EPbNWv0Gqx5gS3KGYt+h2Njm9/BZVlXQXc5Wryzl6zqEhKYm2d28Q5lVeyO9LgpBSPUSzoVsXzkAcFMmpChcnU3oSWEipGMRRke2W6Ou7XF1ydvqUbmefdlXSiTgevWqdpb5iNJX0E5u7fPnkUxIJUT8bHmOVCthqnoIQH0qllPe/9+ucnh9BXbyon5WpV7oocs8skxz33oVvvvmadlM4XDmzRa3e4MXxS3KyPFNNdO7ffoQmbbr+yYzpdIaGTqMmnI04XFKpFlCJKRVl6XPm8unHX3HzhgDVqpYu8cOQolFkc38HAF33uZzPyFZCpuv1PK4fU7R9lrH43kqrzehqStGs0pXw7Mp4AGt1Epk4mboTrPmcVMmhyP7KyA+JFYUZQxZ9IZ+GZrBwfZiLc8Cu2Pzc3f+EwPmKD/5AOND//q9/j48//hRntiAn5WUy0ahUdb56IZ0mo0hGzNxZMl6Jc6fa3eJyMKNZtslCMcf+co4/XTCS7SCZprDWqrNYZSyW4vt2bmyTZAt+5w8fM5dlstX6n+8+/YVwrtIUWk3RlHZ2dkKmZoyGC+Yyy7B/c53TkwsqNZNIIoqouodpGSwXQpiKhRopBuVKntCX9fiKSf2GxWzmU5KOzMvHY5J0QrUuDrhaaYP51EchIZRGROA1MLUiiqpRKonD/2JwzPgKbt8UmYhyTUSAnJVLrSw24mx5RJaO2bmxh+eKxQzDEDtXZjIW7+25R8wXA1rNDdLsNQnlhLXNDbLE4vBIKJ9ur4aCyYO7PwTg88++5HT0h5Sr59ckl7dvv8Xn3/weoNCQHCSe5+A4Ax7cFfXOCjl836NQtDk9FAasUz6HLMSRlmqjrbNYHBL6ButrIuM1nc6pVIpk6TrLSB6EQUAQHmFIslwja+B5A37uO7/CaHYEwNZeFcdNKCxD3KEQzE6zimY4OBJZDgWIdNa7N5lMhIE5/9pD00oszi6p1IXiVtHY7/wqL5+La6p1j5yts5p7dCWohp4lLGYjGq0mKEJr1SsFLo5PUXVxjeIWMCtFpsPo+gCv19roWo7VKqQo1891F5ycfU1Dohwpho/jjmg1e9RKsmFYy7i8itnbbzGV/Di1ao5SYZ/RTNSBD0dnpIlFp32bwVCs543NO9hqgSBRrqOKlg2eYzC6krwp1TrlapUgCFhJUsjFJKCUK9A/mVG0xSFgqRqGFfKaZjPUTFSqmIZPpyd+t1w6pHGA4zo8ffoHALz14Bcx9V0y2RycqRm1+hozt09ONr1OJyNq+R6T0YhIGsJ3HnyPZ6+O2K5Jfgc7wGfBcJGjJzOt56cj6rpBaipkCAfvvB8xn4bkZN+ZXSrgxSn7u5u8OBTycnGWQlpg/6Yw1pulGkvXoVWrcnYher48t8ut/S3mE49K7nUWc4lm5nBXkqB0p87Ku0JVXQxp/JiWOAxMTeXouThU681dTNOgoJTlO+Up2XWmswEm4m/3dt5nYPb57OvPuX9bZJcq1R7L1QRDl6SCUZ3NGxucXx6IkDzgEZIm0D/yubEp9lH/PCZausSyMbaiGYRxQLFoEGdinsaTkFJuEy8IGQyFoVjIF8nnY3xpUNv2Pqb2CCNvkkpOG8ed43srtm8+5B/9vf9NfJ/5lDu3b5EZQvazdMXzb84pFvNoktRbNzSyTDhWr3vWQEVRNGIZua7aebx+iDJTMRUhr1m4Rqwn6JGHqYqD7/TsisCP0QxJlhup6JqBtxxeZ7PdVY040UR/qyRzjaIcsTIhDlpSXhMMpUQSR+h5IYtKFlEqZoxHc6L4NWG3SpaBIQEt4nCOaRbJ50ssL4WBUqqUyeIMz/O4ufaa4NlgOr1Ck5mzOI5RjTymaaFmYj9US21OldK1w6mpkGYqcehzdirAHFQtJVN0lBSSWDjeWZxRsG0CX0aIu9/HME5J0vT6Xpmi4C5XdLo2nnTwojiHqv9rTkWm4ixX3NnvgQzAOF6MajnEcZHXVfyeF2LnPLJE6MnhIMLOFzGUBEWSO4dBiGUplMo6oUSAdL0QQwNN6knDNAnCGbf327x6JtBCF8sMM28QpyHI/pgwjKiWYipV8fzRtIRGBJlPrMpsaKaghh65gosjg5uKaqLqGabyp4Qwi1VEq2Dz3ffF+fT4qw7HgyWZp2JZEvk3TTEMDaSxPB5aKIqNpoO7kISkhs18POXGfoW27Ct/+c+PUNWH1zaCbigkWYySqbz7tgg+VCtl+pdzWr0evgRcyrSUGJ1SUQZ4Vx6rBVi5Iol0TfP5Anlb4fRoyp4M8P7ar73PV1/9iCCRTlPBJlNTBoMB770rAEk6vXUuzwfoRpGiPI/iaEroJ9fZwj/8ye/RP3/FT367Bv+D+JbO+m0uzk+xVIly6qoMA5OVPqMiUZRTGxZ+gF4wubshjONy0eLk8Ih1yU25Gl9RyNVIbA1N6jeYMJlfsX9rl/lcBJRMVaHZbLGcChvBVOakTsLwfMx33vslMZ+5OQdfx+zuF/AlB9n5aUr3Vnbdt9zbeQDhhCitUq2JdXh29JS9nV1eHXxOFop3r9Y7ZMkSXYJsqXmD2B2wVEvcuSv27LOnCxLVoFVukr8vebX0Fl6cEHpCDg7PnxHFRbZ2HnJ2LuTz9OiU7d4esRKB7M0MnDGXpyf06uKsXwUjvNWSd/ZLfPt/C+CNt7//l7k0puTMGldXRwAY1SY4Ho7U+2pY5eF9C9tcZ21NBHjPn50zuVyRVS+4OhXfbDld1KzBjqzMefUqwKrbBOGINJMgN5nF+eUlF5djqpYQ4sAIyLSMwVDosqG74AcPv8fGRky9JNa0379kvoLeWovlXOiutd5d1EylfyJ57go6x8fPaNb2qZbFuX3/0Toff/Ih81mfVBH689H9d4m0Vwym4ndeEKNZC9K4xMIV+ixSMrxQoygz7LPBghvbW1QNF1fyx54dzqm1CozGQ3IN2fvmzil0CwxlL/oySblR7fDy+IKcKYmp7QL5ZovV5BhvJc6GuT/C9FMMReyPi6OEp588pVyOKDeEU3h8fsDc9dnc3+H5leiDvnfLYDp8jt6UoFBWA1XV8GYV3v2uSHbMH3/BoRdx5RXZkNVsK9+nXKvjecIOS9McSbOKVnS5uS70VKtV59MPfp9H+13smrB/L+bX0ZF/4/gL4lyphNKIuHl7g+FVDk01WZcpxyT1KBWr2PY6l1fS+ehU2dzcRFXERF5cXLBwIoGcgxAmTQ8oForYxhojKaw3tncZTl6Rs8RGXEwdCgUTVbMF4S8wnozI2yX29+6wXAoH5OaNe8xnDqkEW+ifHeO5CXduPWAmketMXSNOM9xVRkWmddPMYTCcsLEmMmUnpwf01nv4XoyzEp7yrf3vMJ2PUI0J3Q0h9IvZFMdZ8k/+2f8MQBh5/PwvfI/+2YKroUCzm877vPMzW3z9+RDNFEqkU9nm/PwUV0ZMAzeiUFDI2za5goQv9pc0Wz0OjyWa2mSBZdmkmc9UbtZatU3/bEzOTlEyMS/7u++QZCvGA7HJhpPPWF9f57J/xdm5BJMIpph6i1pFpyTR+mqVNb58/Mfcu3tH/u6KdmcTzw3Z2hSOjG6tWAYHDC41FkPxO2cVcvzy/6VRE9EZk3fJAh9DLTFdiEhWt1tifBSxihdYRSHOR8cT2r0cQSw2S2/tFvlKgdn8grUN4aBoao4Pf/oJb73z6BpAI9GOKZSa9DrinQ5OP6dQrKIZIUEk5Gc6m1HMtQhdnUpFzMvzo1NmhkVZrrmhzPCDEYvRAaaE67w4PYHkJY8e/iKzsTiwLWPB8YsR9ZpwAhTNxXF0JqM5lmx+NgyDYlklTiKq5S0pn32CYIwvSX09b8Rnn15RbeTp9IRhOhlNKeaLhNGYMBKK89vHH6AbKbYtS8EUi3K5jjfTCGQgI28rGJbJ7OrkGrWt03mHyeJLGhL5aLZyaK53WWgJ52NZymO0ieIj2oU19Lw0rvQId2qih+J386sRCREvn6xor4s59idLbDtiOhTyGscLpuMhrtWmJ5vQ53OXqTNnZ+0egQQR8PwcuqGQyUNe1VJMtcxq5bDREc8LfAkTnevg5MQ7HZ+8pLtR5mQsjNfcqk6rUWZ4mdCRZKTPvx1iqi4Pbt9nOhN7LXBVUAxCWX6XhiF6r8b5yYRbd0VgyFM8Dp58TbW+TWdDGFK93jqF3CaeDNyc9Z9Trio0yptUTTkH0Qo0myi9oC7Lnm3LAN3neCAOgG7rVyiWyyh6ykJ+VxQk7K7v8/f/9t9lvyf01MNHtzi8gGpXEmEPnnF8kqdcKxPI8lPLzJHEEUkUo0knKUMQs0aRuKZQqrLZbHF88JNrA8iNZ5hZjlUc0+yJtZnOLnBXK+yCOFBD1cdbhuTMgP0dYcicH7tkCiRRgG7JgICSkM/HzKQDlmQ5dMUlS1P8QKxppkZUqyknz2Jimc3STAXX8a9LvzTdYzSeMxrUKVXKUoZCstCAVCGIRcP+k691LH2H8PV90gIaNoE7I0Vc4y3rOE5GqSgdxTDANirMp5f8B39JGEkaES/ORxTsOgU5L3HsMxk7PLq7I+Tnm0MB2a8qZNprEAqFmRPw8GGVnESqzVINXVcIXLFW+YKN547odS1KJUkiPnFQCw0cb4RpvQamKWLEMZsdUdXw6SdnoPukqUEqQT1UDeJkRanS5fBQrGmmFYmCmJwENomyFE1NMBSHJBayryoacRJhGmBK4uT5fMFut8noVBjew6lBs2wTxzFJKJ6nGDZ+7KHnfeKlDI5ZGUmSEKZCR2iKipoo9Dp59FTso4v+CD1fhUghkMGcJE7R04TvyGzaBx++JKYImXpd0gwQ+RH7N/ZpV8X36MYGYZCRl86H7zpYuRRV9wjjI/Etk210tYDvrsjkfOoFhcD1qUi9FSQ+abJOQkIos5OGbhCEoOsms6VwJP74w4TlykSRwaMwjDHNHI674NEjQcVydXVFmETYukGkiDPSyBcJUx/TFnvmH/3j/5N8UefrJ0+vv+1s9C1qvkbVFPtlZ7vHydRnvpgROsJWWsQxSeBjaFVUCfRh6iXsQo68rGc0DYvZ9IrLC4V92Yg/H6d0NjpM/TE3bwnj8fPPP6Tje3z3tpCpl88PBCplzuHglQhMqWaZztYGWBaX58IZf/etn+FF/zNCCSIwmcwgGJJvqNdothuN24wGfeIM1m8WpUxN0HJVSgUZJPEC6s0N2jsl/IU4a3dvl7i6OqVWsKlJAKLT80tOTs7odcQ5XuvYLIILXh75VGQAdGevyvDqOXquwMQRumQ7X2WtPMKXAZBSMU/banH04y8pReJbnnzyzyi9812MWGE2ErKOvySYjal3xbxstm5Trxqc9R0mY2G/Ld0hlUKPfK7FLDiUshDR6zR58lhAnL98fMLt977LzmaZx48l8jBVFstjOvUOs4n45kLeomQVqO+JoF7+7JgPfusPeP+H36d/LILc/ZNL9KKJ4/qs9USwP0lmmNYabbnGgb9ga/s2apKn1RFz/vHnPyVVcmimSiADPJ8//jE5o8xoJOzqd976AWkmbKXVXALmxBf4yw6774ry/o8+/QSzsEQNLTRdvHe3qjGbOShmwrEEQEr8FU8/fkJDgtcsZjGjSQ6jmGdrU8zn0y+/RLPOiVYBOxL1b3o5Y32nxtGZKH+NHJu1tQZWPo8uyy7PT68oFqos3YR3vvPzYt1vNNje2GS0EOv5xRevqJdLmHWL01NJiVNRqRdhvtBR5Lx4zoKBf8Z8KvTPg2YTw5wzH4KaCNl/8dkSq6zg6DaOLAuJX1NM/BnjDaDFm/FmvBlvxpvxZrwZb8ab8Wa8GW/Gv4Pxb81cKYqyCfzvQAcR4Pxfsiz7+4qi/PfAfw4M5aX/bZZlvyl/898A/ymihfO/zLLst/68Z6iKynL1uvxFod4oMp24eJ4sjXAqZW7UAAAgAElEQVRnlEplyqXGNefSaulx+HKK44kMRr1eZzr20UwHRUZM2s0bELfIF1KCC/E7L3QoFmq0miIN6xZc7LzFZOQQyBpMJVmweaNJGC5xHPEORatCs7TNeCoiE83yGknJ5KJ/xGAgyhfeevsei2UH30vRVQm9vkyxjDqHR+I9O50mgR9jaDrLhYiKqUaf5WqBZRaxTQkRHVxSKKnc2BPRgpfPBgyHY1AjSmWR6TjpH2A9zuGFcy4lyWarkeIFPk+/lSUyNZ+DoyU3djdBE3M8HGWcnH+JZomI4uXJBEurE0cqmi6iMbVGHVXJEScuW5si6xb6OklWJAjFNevdu8S+wsn0glZbiNJybtBpbJBoY1apJI9zi9y+s4Wmiujaje1H/OEHv0mntc9y+TrSmhBmVdbWehw+Ee9eLpus9R5Sl0AcfrAkXyxwcnbK+oaI+k+Gc7Z3bjJfeQSy1r67m6NQ1hhORYSh0bMIw4BmuczBiSjd63Z63Njrcnz8Ia2WiMK78zn56pyB/F0+n2c2mzCafsFWR5R5VApd+uevGE8GKJI/6u697/Hq+DnPZa3vo7sPceYFeq0uw5WIbqwWr3j7wQ948fIJrwMeqpWxvd1hciWja1vbvHj5mG63SyI5kDrtCpP5gGqpxvGJSK83yhlJVMKXEZRGSSUIPbJohhaLaOh6u8rhwSm1aovCnsgEzmdL7LxBTpbLFfIGSujTawTXmdy506Z/cczbt/eYHIts0rD/Ee+9/QOG34r0++ZeizAoMx98TKUmok1aLYeqtchlCp4syz05GtOub9NrivmNMwsnnLEMc8xGsnkq8VnMZzhzsS57NzbptbdwvRQjE3tof2uPp6+ekMYe21si2nR8NOVyMKYsSyXchUq7uc1qJ6BYEZlrZy7k6HT0FaYsQ2h262R46KaI8IXAi9PHGEqNliH2mscJK2fFTvsu3kyWjHpDUu0UdynWvGJvs3CPaLUanJ3JUr6Cxf7ddzi9fEUuL7KMhfKK6WDKoC/eZWOjCVkOJcywc5LENNIJEofnh19RltFs27AYDWwK/Bog+se8yEdLbBJZ577eWeNHP/oNLD7j0Xf+MgDLpcGdW/DFZx8C8M47v8Yf//gDdFMnkRlay3gNI+H/adkaGVmWXGeubt/sspo6LFcBmiYiwloUE3kRxZJBXfIGPXvu4fs+yPKpTFkRxinVXEggdWcUakSxi21pJKkEU3FX3Pv+XZyl1MvLHKV6BT8ZkPKauH3FZO7grjRkBRdWloGqEAWyb8mMefutO/zoR4dEEs5YU2Lyik4c+nRbYk2X8wMyVQHZp2hoObJ0xWo+5rvvC1LmQF/iuSF27jWZa0oYJNimRVvCCR88u8AwKsRZiJKI59k5C00JuHdP6KSnT38bhZuYVoNQ9ggFgUcaq+ztb3AiKRzCOMZUNUxZLpakPpnioml5Zlci8xAHBkoOjCTj9Vu5fsBO12A4EHp4Pg1JjRxxpAjuLoA0o1LKczkcMZxIYmitialHxJLwOTMVXG9Bmii4S5nNiqBYMohCB03234VJTLNepaCJ3x2NC4SqR5p55CXvVByGpNYCx1dYueJ5iSrIdzMJoKHpGkkyx3Of8vILscZl+12GkYtuAvL+kRtTzhfYksBN/yrwSBhh6uZ1qW+apqhqiG2qpEuxRxeTb9Ct1TVMv6qbKORJE43eWl5eE7Gah9QaBqEUqsDI8MMQwxDR7Zt37/PxH6U4TkChLL4lcFOyzCTTZvzsD0VFzVef/z5u8JCi7EGOoojxYMTdu7evQYQmkwnlch0/dihoQi/5zpz1dpUvPxcZjD/6yW+zvbnN5fklvlzjklXDCWKGjiSOZoJu2hi5Ct2akDv96JjOnTWODq8Y9IWuzhk3yOVzxLLl4Gw4pVKtsdHUmV8K2ei2u0ydAUrOQM/E2VowqpwcPyPbEv++Upbk4zxGeZv+QACwdHu7kOgMr1z27oky7pcHn2CrOR78rMjsPnt+zP79d/nyiw/QVGFjbd6qwsjF1C0yX1bmjA5RTI+yLHXv7dZxv1U5/eZz5pq4ZntnH109wSpW6Z8J+6Zg19i5OWQhKybKlS6BsSJMrvj6pbDNbm+/S2ujTDi22bsjsi+r6QW1ap2rSHInVar47grXrlJ4+B8CUF13cUcucTAnXxBnlmGv88GTA4KvfweAvbsLHu7eolIuYEtMAAKXQOuTxV3eevA98Z41hfnUZ6sh1rx2K+DJk4/Ib+3SqQodn8ubRMMOhDG5/Ov+d43j0zN0Q8hUp7nGzq+UOT58gS05AG7sNKnVy0QJeL6w6XrrZT794id0GyLjZZgKKQn1Vp5vn4lz+/JqxsbGLaxCQEPav7oZELgKwUro+IPn5xSKJvliG0WXFDx0aTZrvDgV2a3NnW0WXkgaB/SK4jx2koye3mHCOZOF7GU0oJNvkMZizsuFiMPzz2i3usw9sdfyuQaGMmZ7o4UjYfnbnYzEneDGElCrV8ENwNdXhIHI3qVOnVLzJU8fr7h/T5T8fvnZCc1yjkJLnO2lQge7pJPmcrQLQk+Z6ITzK3BHzDxxhkR2iKkbbG5LIK6iwSBYcnh8TlUCZu3fuMXV6BUlPYdeEGu11RNz/WeN/z9lgTHwX2dZ9pmiKCXgU0VRfkf+v7+XZdn/+K9frCjKPeBvAPeBNeB3FUW5lb1mp/w3vYShEb82OFMTd7UiclXWZZlO4NWZT13MdAbycNZQyOfOqMo68EQfkDOqVAoNPEUIXP/8Fb0NyKwKliZx7scuCRHduuScKBQ4Ojyn276BL3kndBXGlyPQIEUoc4wOg9kLglAoMbQSw+EZVt6mtyYUTb8/w7BSKtUGI4ny5fsuqhrSaIimV8MqouoK7WaH5UJcM50c0O7WMY1tRlOR0px7x3TNe5wfSxQuO0VXcizcKaYsFbi9f5vlLEXBxJE9OtPBEbOpQ6Eo+49aGyhZgcuziIJkUVc1kyzJUcoLA+no1QmmAbXKOpVqTq5DmfFszN2bP8vhS2H4tlo1nj4+ZXdfKJ7I05mvLoGU8Uz42J32BmQKBWuNgeT/MCsGVwOuyVYtt0Sluk9mTQhl/bFtlkhdWK4mtBqybEZxWS4cDE2iM5UiRqMBmppydSEcN9OyOT1/yvr2HWyJEhm5lzgrnUwSCCa+w9nxkJt3OuRlz87ZybfsbrxLQcuTyWbHdsFgsvTRpUypXsTy6AW7N9+jUxKG03xyTLpyqTUbPHxHNCA/fv6CLE5Zb4taXG+VMBqvWIaHFGzxTmutdwidGpbWZ3NblpF5DleXh6x1hPx4wZQMFZ+AQG6Xk8df0ij0cKYjNiVXQ/9iiF1IUKUxgm4QxwbeZI6VCWNVzTtsNPMEoXqNzKNrGbPlFdsNcaCNRmOS9JiafYPhSjrCSg4lVRjOICcZyy/7J6SXr6hKB3o+WTFYXNAobTPzRYlMhT0G8wFTNWBjTRgfdqmCUfFZKn25fg2uXnhsrW1gLoSiztstpsGctx6+D4DiGBQrVb56+SHupfg+xVDZ3mvz9OtXZIrYR+eTKXY+j6EKx9GfR3jmAXGgs7wSjowpgUrUwEaTZLKNaovzuUbzdU/CKGCWRVSLFpYvyxeyCMNoEU4TZpLcsVQKyNnrtMvi+fNFzNODr9CUMpWqUNwHZ4fc6BZZ22oROJIAuVpC1RfcuSvmPJz5BKlLqZhHV8QB51gzXhz8Id+7/R62LHs4GegcDaoUyxI572xAqVggVSbXfZG/+U9/i8HRP+Nv/mc/x/ETAapx/zv3WIzOkS1XXAUO3kojZ2oQy5K4Ug7F1XGcFa/hDuJUHMhZKvRGrw6ZmrByDHJl2SeVRGR+jlbJQssk71RYJI77OJ4EbrDaBP6CarfKaiX+Nh7kyOXzBGkMSOS4LKakzXAD8XzNjljhoGopumxQjpgzD8rMZgmK3EeKH2JYBgsJtrJeMvGdCF+zsGS/EX6Gmw9J4oyNdRnkCrd4eZ5gSK62IFPI2QZ6ZpLI5vGcsoFpBySSPFPxFRI9IjF8XNmLMndClNgnIrkur/PDhHajzbYsR33+dY3Q0lHiGCsTe89Pbeplhyg8wBkLmdXNHEkSoMhysUSro2Yuy9kzrJrYQ5FqUCYjTDJUQ8hUEiWYms/FpShnXER5dC1GixWUvLjXdOBze7tGrzrl6EDMeRAHqKRIdUA4V+g0KlipwwfPhQGU5R4SuQsSy7pGriSNKOdzyAorvCQlnyrEUZ5I9vGlqcV6XiM3uSRMxfmgKglxGKJL3skkVgW4zFqJxaU4r0ajmCw1iYwFpi/BIzINrXCOMxPG1Xg1xbQqaFFMakmeq1VKwdDYXS/w9EgQ0XrhCs3aRJElT7oZ43gDNpt5nP6xlEUf08iItfTa6VMTi5U74d6ueO90GTA8V4iNGnEoZD8jI0pVcprFXlvoqlVP59PPXpLPZG8vOqvVnF5ng8VcfJ+iaIShSy5nk0qOTC1TaFbb/MN/8F+Jb0kV/CglfQ0gAtTMMheTFT+4K4OrJ1+hpzpheEl/JeZFLxU47s+IVJ2c1BNRdsXjz4es7YgzxS4XSbI5vmtz97ZwVkfTgPOzEwpVG+dEOE6d9TqJ4fOHH4newv21NVBDqrUiqCK4aig52vUdhosjsuh14LKBbdtcygDz9k6Jo9MJ+VaDZ4ciGDh6PGd7Y5fQ83CmQk/d2t7gp8c/xZ6IOW84Fu7ljErlAdWWuNfC79NuNwiWPlEo+U3DNTqd79LYkg60EbNYzZjOYnISDVW1Y86OHSpFg3xTBEW7ayGnZ19SyUsgMy+hVeqy+Z0bzCQ3XJSEnF3MuP/gEUfHXwFQVcfstd+htJ3KaxYYSUjswOlK6Amz+YBf/vn/iJ/8yf/KM2m/FUYWRt6AlZynVp29gs/Lo3PuPRR2QrBcsrdb5fBqyWQi+7cTWMQRW1VpU54eoJVbKGgMJOfh7k6XwXjActpnc0MEhgbDKdVSnXxB2FNoIdPpHMVNURxZol5t0KrUyOdr9KW91n+2ZOtm/dreKJQSXgxeoJ+UefCWKN1bnAU0OxaqDF4vl0O8wGU0H5OawqneqNe5cAZMRyt60qF0MfFij1gCojW7uwwuBixGPllO/C2gwVapx+xsRq4l3n3pphSKLe7IAPp4Maa6ruCvEmxpLy6jI54/W5Ar5zk8FkGKKEqYBZvEl0KX1WolDk+H7O1sM7wSstHr5Fm5V9Srm5hF8Z6Hr75mvVVnJfEbri6+5m6vyc72tgA1A9RCirqscmv3BqOZWOMsk8T2f8b4tzpXWZZdABfyv5eKojwB1v+cn/w14P/IsiwADhVFeQl8D/jwz3kIyEPo6PCCu3fXaTbs60b/UslmPPH56LOv+f4vCNSWxm2Lq/4Z+ZyIWiXKJonxgsHSIQ5lbbNuoSoGj794fl2jmwUJUeRwNRWbtVbtsLnTw1mlnF8JBby2VoNEBwK6HXH/0XhAEsdUa/JA9R2azS6mqbL0RZ9Sq9Pl+PCS6XTI+rpsnFxaLJYnXF2K59XbCZoZMxqMcWT9bxJoTKcqcXxFvSF7Luw7jId9NtZEn1KGhbsKUNIKWSCU69fffkS1lme+mpH6Yimz1GRwkfH+z4n7PP1myo0bN2h3SgyHQhhW04jx8iWrlfDK29W3cd0VN9Z/lm++/RSAbqdCu95iOl3SqAlDdD4bY9vadabu0f0fEh+lqMoZW+u/AIDjrDg4/pq3Hn2HYk6s6Ww0Jp8rEPsS2VHxqRYqRKlLvSa+5eDFlFwBspzOeCLeM0lX1FsVFo5Yz8uhQ76YMRmlbO2K382mPqqVkqlzqkVxePSnEXHi0KpJtKm1Hr5TZTUJr2HCd9bexnXmGGoNTXmN3tVkveCSvY6uh0u+8/AdyvUeH38uDp23HjzEyoEflfjiCzFXl9MBlWqenGyyP+8fUGsVCNMZi5VQkqbuUK6OsIoGU0lsqKBRrusCGQcIxx57vTXGowG+jOjf7m7y6vAlN261uZIZtUKhRkZITkZj0jRi5Y9p1fdAgg/4UYRf6DFyLq6jYrodsdu99ZqHE1NJQKnRKhdYeGLu5u6M9bU8qRtTr0vDtFDn7LLP3dvCqD99fkqESmWzSewKpTW+nFKrNzE0i1Bm61rdOlaxSf/yYwC2zQau0ydTu3S2RdRndBazu1lnMpRIQYpO/8UZeavCnftC9r/44gs65Qoba11iXxhEW4UN9u7v8M0LURNd6RX5+vELbuzu484lEa40Vh69/T6xRAJbRgGsLlEcoVh1Y8bDtZ/HyuUZXwqlWSq2aa93iKZjWpIgeLk4JVwW0Q2RLdDzGqWqwdVpSsMQuqWzYTG4OiMXqdxpCAd6fLii2coIZULh6cUJ6+s1JsMpy1QY7JkV8jP736Xb6vLJt+LdPztQKNsm00DshdCDwaLP1tpt/uk//A1xM/tjfvWvbvBP/vH/w1/7678u5s+PmJ3MuNUQkcgf//6neKqFnk8wpeOUEWOaKqqqoSLRAtUMhRTLFHu2VI/44ptvccOE/J8y6BL5CkY1ptUSMjWZXhFGhiSIBZQpvpNRLlfI5cU+ylk6XpihoIJEbcvnbH7uF/4Sv/F/iWMhiQPUTEdXKsyl7K93bXY6a3zyR2ekiiR4NRTCOCGQ69m+Ucf3QwIvwjZkUCaLUTIThZii7F26PPZIEhNTOq9kCo7v0GjZ3H0o1u93f/+MyM9RlMiAqqng+SEaGaahyXtb+KFFqZjiywytpmZMhsc8eSacSdetQmaRpB6ZhC+eTubsV6tstYu80sX+iJIQk/QaoTEMV+hKylv33+Xpqeyri+YoSkYYReimjMKv5uzd6lx/i+sEFCwNXU0IpWMauAndjYCcVSIJxXW5nAWxS5JIqPlUxXNmlMsNDKmrs1UElkaWJWgyK5WEHsvFgCtZZaCpXVQVFBLQxL3cVUjmLcEwUVOhm0NFQdVVMulcRW5ArWyzt9lhIvudfvp0hmXniPwQTRfyGYUZvc0Gim5L2U8xdYijECR6XhxH1Bp1eltNfvIHIjKfZCV07U/RJkktlMwjizPefVsEwn7843PcGKpxhqq8hsReoWYG1aIwQkt2l0ztkyQucSR7yvSY2IspFjxOzz8AYDAISFGus73z2Yhut0ulWqXfF3u71WpQMPMEUYhhiu9pNZp8/tln/O7v/ra8pkUQeJTLRRyE/r5yYyL/nC8PxHrWcjb19SoHH39NMy/noGwyOx7QatQ4lXDbjdYDSs2A0UAirTZq6HYdZTbiG0m2ato61bxNodKisyUc/Q8++SmYCRtdoRfXt7ukvoHnuNy9/1CuS4ptWWh6jbOBMM57vU28lUNOOr1Pnx1zdHTJ5s0ua+s7Ql7mNskqIZcvUuuIPTroX/Ko9328ubjPy4MZ93/4iNFwiWUIh2s2vCQ2bXJGHiWSoDPVBMcbY5oSifgiwrRt7t3p4TlirYbDQ+qNMtVqiSdHYq0uDl9SK7SwG7J/uxRzcDzi4YObjPpC71t2ynvfeUSShSxcEWiv15vc+45OEsq+6LhDeXOdwfA5V8+Fo+is8mzsfZf27k0Of0sQ0Zr7BTJdu3aybc3Ap0W5M2Mse+YCT2eeXhHOS9za68rvOUefFAgkPHyzs8HBySsadh1N6s6D0wvceEGlXebJYxGkVLMijVqDw2MR7CyWmqAXiTWVQBf6VDdaZDmNSE3QpTNVKabMxn0qZbFny9Uil9OE23vrjOfCzqtZJlEW4IVCpg5eHrF78xbrzTKq1F2zyMMjYL29TpyJ4KY3iKh2y4S+cNLm0wBDz1MvmEwlBkK+WUbJlVHyBk4iglyxVsAqFnj+7FvxLfke3tTBX2nolpjzk5NLutsVNKVNTvaQ//SnH9N41KZaFXbLdORTtFWIIwZXon+r1XvATqtMmqtzeCB6HDdrNaJkRiidq3opzyhYUsKk1BDnzgeffMTb7+3y5at/QachqFf+P/beK9ayLL3v++2898k53HNzxa7q6q6OM8OeGWoYRA4lyKRhGLZAWoIN24BlWHzxi54tgIBhPdqwbFpw4IAQh5QtSxbDkBxO7JmOVd3VFW9OJ+ew8/bDWnWHBDT06MEAbdR6qnvqnB3WWt+3vvj/P7ddf9z4NwK0UBRlG3gN+AHwDvCfK4ryHwDvI7JbI4Tj9e6f+9kpf7kzhu9HZGVqfTVPEYcOy6BLPi8EwbJMzk4vuHqtxdmZSP2SFOic1jAtIWTt/glaZkY2rzMfysMk5bC//xnrzW1KNXEtO22gqjEHZ6JMbzhaUCzWMeyQzW3hKedSDsNRj9nUJUrEApTzW5y3Lwg8CX1JlkTxUTUTFaE02+1z8rks4+mIflcoO3eVUChl6XTE/c4/2efWKy9x3h1QqwvFNvNdeqNDKqUtliuxmLaZYzldsJKgCZ4XkLKr6OqU/lAoms2NK0wnK65tb2FI6O6DvXPu/Ow7aLo46CvlY5aLMb5rM+5JA3Ma4q92WCGuYxltPv+5d3j84AQ9kYhCyRQooagu6bTYrJpmcHL0kNffFI3Ge3ufgLrCTJd5eioUd6NRo75R5qx3hC6jzQQZzi6e0JSwToo+IY5nWNQZj4SQ3brVojvoUS7lsHWxLfvDI1TSrLznRkyaWnmNjUYVPxDRptPxQ5x8DGGa46fCOTbTK/IZgzgSzvnHD05IGSlKuTrZlETc04uUdqpMBjNOz0WEbXf3VRajJYEEryiWsoRuhtHQp7UhuRqCJa4XYNo65zJNXm5m6Q+O0Cvi2rqRYjBcsrZZuOQImbsrHj57hG3ozGXEq1ioECUBioQExtZwtQVOpgQymt3rd8ikixwcXpDJi0MnrRv4UXwZOVksbXzfJExPKcjMyt7ZCYG2h2q7OBnhFE3GYw4mBzjykC8X6yhKxN7kPn/0r4RSHo19vvqLr7BWrPCD94XjUmleIZPT2T8XGUzb1hj124xmoJhCtBeLGYqmYTgFVr5Yv5W3wu8/w0gJeXx69DGZfJ7z/iG5lXimi94pK8/CW4jDpLVeIpNPs1rCyYHkMpsPKGWLuPMlliaCBqPlgvtPemQt4fir/oirV5oMXRNLlk9caQkHZ//okKwtFGG93kBbrZGqCoW8GDbw3IBw3saUXme+kKV71sUwfHK2OPT2PnvKtV0V2xCyMJ8rqJ7CX//yL/Gv/uDrgOBJqmQzaPmA7kToqcQ2COLCZZZja3MNDY+l5lLPiWuPvANmsxn37iW0FTEviaOhKiqq5Gpz7YRGYZP/4+v/lJc3xP78d3/xDU47Pn/7b/4SkUQQHIwO2HzpGnoiI27qDE3XMT2fRPLqqHGMrisQJ5ecTgkqoRchbV7KtSqdQUAYHRNFz+VPxVu5bLQa5LLCaDg6viBRssSyoT1JFDx/wXrrKr4EXFm6PdLZJn54cdnUH4Zj7j/4FgtPcqcoDmHoYhrK5bWqpQa1fBbDSRHLAMvKj9Edk9VCZvRNkZ1NQpUkEe9i2AYLNyCbstBlxUIYFvD8BQV5XgRBxGzq0iw4lKQjOl7sYRk5pB9HREgYKpiJ4AoEuOgOsDJpFpMAOyMMRW+h0yg7bG+LiPS3v3dAYuTAhCiWZYiKhuWAN3Lp9MWaKmaeKAixzOfAGAsa1QLuMuD0YC7fLy3K+DQIJNJiPl1gd2ud+x89k3vRxcrZxKF/ibSYhEM0bcxwWGK5kAAP+pKUaZDJiznonU9plFJsb+/Cgwv5DD4LNUJTYC7LSHOWye2XCrR/IN4ljAOSGJIkYemLvTGdzPjqz3+BcPqAvadyTY2EKE7QJCDSYragaKssJks++lDcz3Fuo2ohup4llnj7Kz+gVs1wcCafybfIZixmc5eVdOJVRSdOXI4vHrOQJaJe5BNFIisKoOoW7nDF61+5gyV5kR4+OcBMp4nigDgSaxoHFlE840tfegmA44NPGfQT7MoC15MZ0zBLEAzYKF/jl37uvwDgO9/5LwmJLhE2fd+nXq8xnY1BwqUvFgsURRE0K66Yv1yrwde+9jWmkrOvsdZiuZyTy/3IWDMCn/VyCtMWz53Lp/nm977HRr7O8EzIf2UnB5HK8CLmmtx7B/snFIot9Eg4d3nHxkonrCYJ6YpwnMoFi/7xCZGf8OmnIuqvRQnVQotIETr37GRAq3kTlAn374vvbO3cZDI7wQgC7twSGaCP732CYlhoKclX5yr81E+/w6NPPmCrIZ5poK/Q1ZjZQGNn42UAJvqUomYzknrfqJv0egPuffqYak2WRqcClkudVFFlU4IdDCdTZrM+tgRbmc88tsvrHDx9TOxLTrtGFsuGSSckMYSdcPet15j1R3Qlv5MVGPiuy3jiUiqKsz2OY3KZGhe9Z7wpy6w9z8MNx4ShzKZrCcZEVFYp0ohPF+fsP/sj9EQh25DZM83AnYek5PqdXgxQjABIGHalfGgh/mBEySleBt+8CGazGY7kq8oUdDKxxuHFAbmsWD8rbVCytphMl5f8ppnUnLPTJRUJaHF6fsw7X/o5Br19FIl06ORs2r0BG2t58jL4V6rMOe12MRC/CyKFGzdvkdEMWuui3G4+bvP4k6eU6kJP3nzpDrYRolo2j/aFTZAabPPyKxWO7h0RS0CZtWaJFQopCfgSrNKsb26Tcww+6AnZ9hYTxuOQ/rRN1hDrMJv0WM2H5NIbcl3msNTJWnnydWGfqnqZXC7HbDQHyW9488YVQtdGotNjKLC2VuPg4AE5STUzH0wJlRXLzh6GDNQWWxkmU5eNjNCLmVqF0+Gc4XyO7wqdm06nGYxXDKcxni/O9mdPf4SC+q8bPzGghaIoGeB3gV9PkmQK/HfAFeAuIrP13/yk15LX+08URXlfUZT35xKC8cV4MV6MF+PFeDFejBfjxXgxXowX4/+r4yfKXCmiaeN3gd9KkuT3AJJEkrSI//8fgH8h/zwDNv7cz9flZ39hJEnyj4F/DLB7pZncuLkNwN6TDhfdE1prDSIZwTw+PiFrN3rpshAAACAASURBVPji517is0cihbty+3jKklJZRAF28jmODrpU6y1UX0R2lr5HtZhHw78kZR2Ph9QqVWo1ESGeTZacdB4yHffZbIlSpc3aywQrl7VmndFIRBn9cIXnBsxl02uSKOhamuF4QFU2Ox8+OSfthLTWXmc4EBES35vT7XgUs3JKogmffNTGdODpUKQ4m+smzcIdprMO+7LxfWt9i+m0zemJ5J3J19g/beNFYzIZERmwjCJf+fLb7B/e59nTh3JiLVKOgSYj/KrSo33q4ehpanUJlzo4Zmf7Gl4gYJ6zeZP+IGBn+xqJJiJX9do6YQKK6vP0qchKffUX/ybz5YIgEe+WymnMFytazSajkexl8CZMpm0MNY0l+WVtO02lmMEuSNI2NyCTK+KuhhRLYl3m/gA/CLCMdUiL75UqrzOfxhTzsik87KFraRbzAU8l99XmTpWL9pT69TLr4nUYLhaUihbIaJOdrdI/mzMYHJGRMMuObTDsG4xGM7Z2RNlDt98h0ldEsg8k1rKs/DHrtRK2KkpGuqMuGdPgYnRGIjkecrkcw+EYxxRZIx1IVkOm45BI8tWslnNspUihXMF3xRy7gUu3s+DaNZHxGnUWpNarPD15SCyzile236LdOWXNbtIoiqjfaPUIA5PQE2scREsWvSVWo4GhiehnNlNjNj7H0tPMZRQ1CEekMyaOIyJgp8N9dCNAXTR4/RVJtppvoJoJhcoWt9/4Jbn3rmI7WR4+EiUWB0//kFtXXsbzFCZj2ZiacnEMg5QGEwlXHqwi3NURlikybprmE6Oja3OOjgT4R6HUoDc8IyOhgweDBFW3IHSZ9kUIaq35EtPRGWEGUlkJfKMMeXB8yu66gDzeLu7gzQbYzPnCG28AcHwi9q3iRmzelDx6ex0UrcpQZpLjoIFTtlGDNJGMwne7h8w6Pq3yVWJTlCpttl5lGTxk5YrIpKE3WAUxJ919FJnuWdk6GaOAFi6JJHltbTfD04eP0SXHTKlZxF0YmCuVUlWUGAbBOt/ZC5lETdKyJ8lxFvgzwcMFsHBs/vB3/hfeaq34+//gPwPgcP+U+OIxS83BkqV7iZvCiA36Q6Fyz7oJfuKTU03msrdIVTQgJomVS0ALXdEAnXxBgjSYNpNRAEqArsu+1gB8LyZt2UjOUtxVRJIYeJ5Y8wgXVbFprWc5kbrMjzX0cEgcG3iBkJm1cg3bqjMYywspAbphQOzghiKqmS/kmA5GdPoBKRkl9pZzlCTG1EWZzrWbdQ4PpgJ7XGYeI0Unijy82YiNdRE9/+D+FEMP8SWajKLqmKqNY6lMpyL6O1mG6JqCtxTf0WxROnd1s4HriSzDZJaQ5AIs0vih+Gw61blzJ0skM2erWMXQdVS0S96p2FuiKBOK2TpzT+jPxEowFQdf0gksZ3NeXi8yHnaIQkknoqmiBE+LWS3EWhUyLoYzpdOT2UktRRhGBO6ctCwdDIM5hXSZ6UAhlISdhmoSJbBcin3g+xE7mzn2D485uZDlS04a1bRIXBdPZoCyhuivbXcklL9j4fsrkshAk7Dymtal030X08pfZrMytkLohvi+BLSIFVJpg6nn4UcyO2Hl8OMxnh+Rlvx0oT+kXs7gOLJiwtRwoxDTNHGkxTKej6mVCjSaZT78UGRpXH9Ks5JFIr8TBAFhEJDNG4SSa6s99DGdGkrioUky7jBOY+oJoaR5aHf6LPw8TqJelgUaZkwUJfQ7T/je935LrPtyjqIbzCVdi6ap5HI5RsMJhbw4L1YrD8OwcF2XmgQyODk55mtf+xq1qqRrCRQ8L6DVaPIMERXvPPyE7JUaDRn1ny/GaJ5PtuyQLYrKEceNqVyrMFt0SWTmOuUYpB2LWSj0iEqGci5L56xLIMu1euca2VSafDOLJs+QrGpw7foNFPVVAAbzYw4P79Gs1ilVxIQ+ffoBW+vXaPfH9GREf+Nak5ODM/yZzPrbaZLVkJSqg+yhe/nNm6irgOXY5/F9ofdnSopKzsOXusXTQtbyDXZvLInlfsmqNmXHYTweMZYluOXNBsNRF0cCMJVKJtm0jaaWSCQQztOn+1y7eQNbPwVFGAWaYuNYBlvrYs6DaISZ2ATuBGQlx3QyJPCeki+B7z/v24uIIodAgsDY3pLJyqdcK6JGEu7esUk0Czsdcf5EVDqp2gZqNGXQkyTb1YhcpsS4NyUQl+bqzVsoyjrT8RBDFedDLbPL9l0NKyXWc/9sH8XWqZYaJNIezqUyTKcL4nhOuSSydYoGmuVSkBVape0886hLe9JnW7Ttsbf/KY69xcJLc3NHtEzsPf6QrJ1lIXu+IuDlrStE8xHf/v3fB6B54ya1Rpac7OfKli3+5I//jHS+RLMi9uLR3n2ePamRzlnMZa/m1B0zXSoUc5IvbxaxmBkspwqptHhfM+UwaQ+YDi7o94Q+zaWzpJwiu7eEbnn04IThaEkqPeHgVGb0swmr5QaZjMNkLOQv9gx2dtZwPeGaZByLybCNt3TBFs9weLTPy6+8RPf0s0tAsumwhzeds9SEPp3sL1BtnZxpYieSKsBQGA/GrBYqi6WwHfT4OW/cv378JGiBCvCbwMMkSf7Rn/u8KfuxAH4F+FT++58DX1MU5R8hAC2uAT/8y+4RRRFhJF5C0RyqNQPTtjg8PBQvOwoplZc8enRORxJF3nm7hWVZdM9E6Ze3vKBVLTLqLKm1hBI76/QoFxroiUH7XDyqRkC/d4LmSHSkeMZ6awcvVwNZcjDp9agWS9x7+AHpgmzAVm1iJcCX9eqGoTFbLhmNZpgSJCHl5LiydZPZOCGRqE03rl3nD37/d8mVhNHbau3w0b/4mM1rMfOJmP5wpbH2eZ2v/Y9/SlXy05ScTR58fM4bb0sBLttU6zrbu5toqhCgJ48O6HXaXNu9jbsU98sXTMqFCvc/EUqsVb9DtdKnUi5fli98+StrRAzY2RF16PvPLpjPBuRKK1xfHKD7J4/Z3LxJpztgrSkEaDz2MXSLel0c/L3uBEWziPw0w4lw7nTDIIwj8oU0niwLCoIsTlZntRTrqWoG7e4eChHZzC0Auudd8vksrt+/NHbMVZrm2jq9gXCoM1kNJfZorlXIpCTNOS63bt0k9sacHQtnNVQclpqJItdKMzwSo0eqEHN+KgTv6lWNdneEF42YHgkHIZ3KEccRtZpQyO5qhZHyGY4v8CXnUzpfYNaLqFQ2KMvDcTLv0qht4cg9FSUdMrrBbKyjIvZUMWPhzcFWalzZFuvXmTzDshxcTxzq2YxJFNooZo5iVSL8qTFqyWC0GLGSiHpWALoJY8lJls4XaTTrOCkYSO6NWnUL72RJo17Ek8zqreptInWKI8EO2uMVWtzErimULGGElnJXGA0XeHGJ23d+BoD5aoGqe3zxS78KQKFQ4Nt/+NtkbJeqDG54NKkUmgzaA7yZUNQbm7cJ4yLjvvg7o+RRFYsg0kgXZb8YCSldxZalBKqZAg8MXaU3Fe+X1bKUNouM3YSRPATq+QJv32ow7Elk0GIZO+uR+Ck+/ET2YNhCvkqpHL0D2RC9CiCrEUti1agQ0nAKDIIuSxmUaRXKTMsKbucCG9kzZ4CmNhlJBLzm5gaYId3BM6qSDHQ5CYgYkk1nyFRlD9vUIQyfooTiueezmFQcMlctfEQZ8rcez5goJbIo+JKo0XdN6qU1Dof3AHj0e9/m7/1KhTtfeJv+qXDGS6UC4y2dnK3CQuzFRD9jetFjOhEHRW8yRtdTzKIFxHKOLYNotURVdRTZE4RiErkhhZzQdzd3X+Ybf9wTPEWyHM000sSJS7FqgOSZms1ASymXxLh5u4Rt6rhum8VS7g0/IJ3RyWQNuodCtt98500KlTnnPdlLUdohCicEqkXCcx4vn/6owjIAVfINKaqKoukEsrl66fboDARHF5eEvTbL1YAvvv0G07Eo3T07u8C0C0h/Ac1UCLyAZjNFpyv0S5iYhJGHrABEVVXm8xn5dIqCRLzULYg1jThZYUmjTFGXNGsbPHoiyvQWnoZtnmMqdYy07N9CwTR93vvg4aXB5weeIHJ+3mvkxWyul2k0VL75Q2FoGHYN31+g6iqRDGoRxwRei3ZXcr4Fc7RliZSRYbYQ81su5tiobXK/M8INhXOVSWdAdYll0ElVVQxlxnF7jq6L4INhqCxcFwcNX5ZjN1t5Hj/6lBhhkOm6iu+HGKp+yT+WMjXyhZi945BAAgm5S484jlG057xhKzRTIZ12mEsHbJmsUJQQRVHx5fmbsjQcLaLdEWvsJSZWysKdLwCxF5UkJGMq3Ll5k9/7ujh71MQm8nzi8DmJeA5TW9CoZzk8kaiUcY5CLkUSxKJnDAijFZbhMV+IvagaVZYscF0PVRGbYbqY4S1i3nwNHu7/7wAMhhU01aAje3a+8pWvkE2lCYKA50VB6XQayzAJ/eiyd/k3/uv/lslsTK1UkZvRwPUS3nnnDf4MEcAKijFZTWd1LPb9MS7V9U0Sd8lsIObAaG1zdPhdhqsp9eo2AKPhOcuxytpVWeq67HPx3SNCbUTTEVZ2TI/RrI06qbK5Lsq6jw/bPDs+JSX362arQtEq0G0vyeTEc5YyMOqPWCYDrjREP6ytrmMkPs2mmKdnn40J5ymuXtnGlfxtq/6UQb/NbDmiUpLAW9M5M3+T+Uqc0dV6hsHhexQzxcvem95ogra+SX6zRSID0Q/f/5i7b7yNKdH04sglCGeUCutcdMR3ctkyUaAShiG6PJPbFxOUaEldOh/TwYJiMU+omNjSaXj0WZtauc6wBzHiHHXsNIoaEMVCceRadcJpQBykOO+L8+K115qMLvZRowxb5S+I9UtPGXUWvPZ5AX7y+OQ+k5VFaKjUdsQ+wNHRYwO0hIuBaGkwKNNoVelIYIxASVOq6YSJTrUk1nRvbw9Hz/LK3VcxZZl6u32GH/sEnghWPX10jKE1uX5lh6m0kfVgSrAcM1ssOZa8j1GiEYUarS2Jnuu6nBz3mV4cUJc8UCnNJrP+EhnEGfbetx5w89Uv4BgelgQti1o7qFaCq5mYskyvVKxjamPMjAg6q9kOSVal5OSYBs/leIoa6mxubmJIRN9OZ46dshi0hfzvXr3OcHzCbBKzVhQOTTZT4ehswmTisb0t9n7ouVQqKdodsRcVS6HfibmyfZPpVFzrzs03OTl+RhKa7O7KvujTJUmy4OhYzN0br7xOd/ghvaGFKeU/TGycMmSyCou5mPMYqY9/zPhJMlfvAL8GfKIoysfys38A/PuKotxFwLMfAv8pQJIkDxRF+afAZwikwb/3lyEFAgRhyP6RMBwVMhTLTSaTGdmcUJK59Dr5osbKVylVhLB0T8dMZodErtgkGb2MpWXo+R0GT8XGXN+8wnnnlEzK4Uw2q6uqgpIoLCX89Xw1J5hfxdR8EhlVrRWzdLuHDNoho5FE+HHOCIMEYrHB840q6ZRPtbTB4TOhuIMgz5/+0X1arTXWt4Sx+s0//oBCbo3VTEYLFw7zscLoAu7cFoJ3/XaFd9/9E7Y2c9y9I9BfdrZT9NrXGLTFApp6h2q1gu/HrJZCiSj6gItOn0r1dUxFCEcuC4dHj1kshHJ4+/Vd7n16TqSe05DoK6OBT7/jk8sK5TAYdtjcLpDJWLQvhHKdzE+YLEOW/uIyAzWeKWiahpIIIchlHHQ1R6e3RxLIelW7hpPKkkwjPv+aMEwtbcG990ek1iRB4dMzCpk66YxKXzb+lvIxu9vXOTrosX1NPGeCi6L6qIYQ6iTK4Psh42GfmTS8C3mbJ589oFzMIQMRbLduEfgzun3RxOh2A/xQJ5Oqc/WqMBAuzo4Zjqe0tssslxItCB/HyjMayh4odUW1mcNzo8tmxyf7bVrra1RrOfpDMcfLWczV7QadjniXpTfkxo2bXMRTMrbMZqlLnLpNHEVkJWrjSdunuVZFk+hkKTVFFLg0y1ViyXBgWzPmPsznOvmGXAjHZ+UGvPHaLwDw+Oh9DCfh/MSmmBPvctZ9n3JtnTBZcdY9kHujgGFCyhaGf1bfIUpGlAOTfEYcsp/ee48bL30elBm/8Ru/Jr5XLnLr1bewLiGjl2xc3WTcO8WVh+MiAU2fEqlwRRIgHu5dsLO7TnlXHCbdzphM1saP8lgr8S6trZsEg2OmQjyYuwFVp8Czw8fk1oQiXSvqeOMl18rXGM2Fk6QEKYIgZqMp9thgMiPOZHBdF6sonP/+2aG4x/o6vb6Qf932sMKE1ULcf7h8QteDha+S6MJJGq7AcRL0bBo9Ep+l7QGGkbsMUDx+ep96o4FtrxFLh0i1F+hYLGca08X7AFi5TTYyV3FdsZ6VrW06h2eE9jb/83dFpDVWCuS1gCCJ0eUzpPIZfvDD36M8EofuP/mvfpnsWooHn50z/OR/BQTFQG9+wXi6ZCVVuZVfZ6OcoigBCs7PZ6xvVImUhFgCtyho+L6Poqk/Ai2IwXV98jkx5+edR+wfnKBbYEl0Ut8PUBSF1167y3lbstmHKRR9hS6jlasZEPZJ2U0uJDy0auhEUcRqoZFKPUeue0x/oKFJBziI5pBYzJIR2ZRYm1La5KSzwEiZxBLSXMckjBJ0VfaB6XDWnqDqxmX/GLpGEiuoyopqRRygi9Ueihldgh0kSULg+RhqQC4rqQJCD81UiRKxxkZso6mw1khxKIMyKzfAMFJoakAoe4TiyKczfe+SNN20iuiKRZi4l9UXo0GfrY1bTPptFhKMI1PM4K5G6IqUqyDGsEIUXUP6NaiGSqLGJIlKLLMR5fIS01JYyV4j3Uwzny9JnIilBHMqldMoScRw4uP5EobYczFTPobMwgd+l+Z6hv3D1eW7JImCEifotsVqIM6Hnd0WndOYUIKfxIGHoWnESYK/FEan5vmknSrLZcRznl8vcIkVLns8x7M56XQGQoOVvJ9ihCRxAsQkEqzC0BKquRwXZxIqPfSxNJ04UjAl2qOp6mxuFDg6PKTXE4ZasZTB0Q1cmbqKEpGJjOM57Y7U1Z5CPuuTJAGqkpJ7eMHNGztYaWGAfefd+6jqbfwgQFPFtZxMial7RqlQpdUSwY3Z/IhIW1LIi3V/7e5dPM8jk3Yo5MRnvu8zdVfs7OwwGIgz63d+9/colEp/Yf8YhkXKeQ62D1uFDbRYoyMRPhu5NG1/SWLVyT1Hs122WS/eIIhOCWXvUsVokssbhAthkySBzs2rJaL0OqFs7NHMFGpgk4SLSzCnt175Mt968BGKrGqY90xKxYjR9D0URC9MWsvS2Nnmo2cz8pHYQ8/2v8/K9ei0JThWyySTsqjVtphJBGMtsth96wYHh8948H1xFr35hbscnB9jyR7ID977kC994fO4gYIiibdvXq9zcX6I6oUYCIO2UsgTJysOZSB1a/06y1WbT+7t8fnPCVCtQjGLoqpc9KuojpAHM52mnN3Al+TV2XKLKFownYb4kronm81SqlosJxE5ie48Ho9x1CaNpkRoTBRmygRVgUJGEtOfPMN0cszcgKNnwjnOVPL81OtfxUPojfksQnfGZPMp0hmhb3qDA3JWwrDXp3VVnJmxZjHy52Rk4PZ8doGpFynaFvO5pNtZq7Kcw5ODNrpsEI2jBdNBwkSiKt6+/Yt4/hQnZ3EuwU0WK4di3sGgwKGkD2lUysRazFBm0zXdYBLN2bp17bKqYXDSR0k2KTVEkL15xcWyLKJxnnxFzO/AHWOqRao3qpw/E0EmRa8RRwtRkQBk6wUibZ3Dow8YLkVAW/cS0mYBxVIZueJaaj6NXa4SuRIQybFZnWX54tv/FsOB2D/f+e491naKPDnY41gckaw1s3z80QN2d4V8esmcSnGNcjlLEok5OD9/hmWBWUnhzyTwhu9hp1S2m0Jmp/4IVU+TqzbpdcU89fun1IwGaDFrVWHHRggZ+3HjJ0EL/A48h5T6C+P/+kt+8w+Bf/j/dO0X48V4MV6MF+PFeDFejBfjxXgxXoz/v4x/I7TA/7eGqkZMZiKq4/sdZosL6pWbFCSKS/d8wnxcY/2qRUdmOo4PxkznU5o1EWnJGDr7xydoeR3fFymMdqfH0dEBuXzqskTMtjKk0wWubItI/fe+/z5WXWE+izk7kr1aS41sNmFrI8vHHx2K35ViMk4Ldy6iKp9+vE+jvo6WROw9FlHxh5/u8Xf/419mPF3Q7Uuy4WZAHHoEgZjqw8NPefm1NI1ylduviujIdOrTbGzy1psvo0gSWN/3ufPKTUJZTrR38IzdneucHvdxRMCNyfSYSqXC2fnHJKaIYKQyu9TqFRRZPzp2vw9ml8PTEaihvHaEH/foDcTfN15qcXp6jLtUKZRFtODl9A2mk4ByKU0i+4ZG04cYps6gLyJglpUin0tz2J5z/ZZ4l2EnxNHLRP4FnbHIHL16N+HfvtXk61+Xdf1Wjt75grAcsS57IkJXIQwW1BoZDERUbLQYYZoBz8H0zs87bK6vM52OqFeLcj0L4EcUCmUGIzFXJ6cH1GoVNjZ/CoDTo6dYVoRuaPi+mJd8sUJ9vcB0OiSfkQiQSYGFNyWbFdF7d66ynGfBDBktBHLe9pUWxXqRo4N9CrKGvlZqEIYzdnbq8jkVpsMYIp3ZUuypJNTJpKYUCioXbZF+3t7YJZ0p0OmLSE+ghWTXdPrdPouZyG4N+y5+4ILmkkpLJKCzI9Dg8YmAOB/Oxmi2BsYIQ0Yew2HMdDJH0/RLpMP5YkhvOKLbf16TXOJzn/sSB08+4pGMZGXXHLrTI1zX5eqOKCM5OTvh8QcfcfOWiNhcdGcE8xhsi4EkA3ZyBu7Cp1DMohpiXurNDPVqnQuZNa63WgxHA7QEcqrYL93DCWE8ZjYRUblWo4WtJVRqDrYst+1ddHnp+hu0BxPMvIggessJxCGHw0MA7r76Mo8/fUym0MI0hKzX07IkZNTnpddEL8GzvRMygcrIFZnyRSdibC8Jx1Nu3BGcKCeDEd75hPrWGhdtgaJYr9Z48vSQr/zMXwPgz771XU76e1TLG6QcoW8eP3vAv/MrfxvHrtO7EGvq6wpJb8C1vOjx3O9l+daDPIswxpSlGYblEXgmTiHLqC/2xpPf/9/461/c4W/9hz8HgJbR2D/p4FgeN2RZqaWG1HMvM0xOUeri+tNxn3o5z5/8UKyn58JsMUcxYiQiNmEY4/s+um6iSK40VdNJEmjJnoQ33v5pVt7HqIpGsHpewqlimhH1hsPBnoSxDnxsJUciy/aW4ZIbO02u7lznd/7Pb4nv+EXMvEMYR8h2H+IQBr0YLxDraSo6mhViKCX8iZC1RqHBcUcjDHRsScoaeC6g40j4XZ0Ui3mPbKZIFIpniBQPNU5Qkhn37olrBZGNoSeXPIkBIWEcsXKnnJ1LdMtlTMYBTfLlhV5CFLpMZmecDefyOiZaNCPCuERD1BKbXCnNM1nBkCQ5wjiFZUYkyfOyS5/19QpH7TbPS8YCNyQkwbbEd1arFfVGAcfOsXBFZjDjxCiaj6nk8BOR/bxx9RaJX6TblzwrtoKq6sSorGR5+EDtMx1nOD4aESVCHhUlIgqTywyGoXoMhnP2L6ZEkVj3KAKDhCj6EaH0cHxMKtdgJcmBUwb4Soij6qymQr/dbFpousVgOiCU2R5LhzgI8GSfm6WbNDeKjKYdIlmWa+sqcWQQJRHT2XNkLhgPBhyfSnoGo0ToeziWTaA/z/YomOaS5dJkuZQlC5pPHBnE6lKuVUw2neHWjW0++OTPcdJEOugJqoS7TmYR4dxnR+q7YqnHZBAQRDZqLA4fz/PwgyXFcpXR/ELuswXzYcgv/Y2fBUQpZr/fx10u6Kxk33AhR6NZp7VW57d/62sAPH2yx8ZmE38m5knXFVKpFOub1uUjGn6BhbEi2xJn4XD4hECz6ffGNAuiTM820sxXM4zQIGuJM6taNzFMC6TuK5cLtPdO6U4nl6V0G+UcTqBRtPKXHGTvvvtDtq9tEcp16fWfcXbus15/iWtXhW7pdSekbJNquowfinLbRrNKf7CgKDN1KQf2988Zu2N8SYUxPPdIPb7KbPyM+ULYb3uPhjjVXVqyyiCVu4o7HOL6FnZOGDifHX2ErRTQkxmm7GvNN7IMZyeXGWHXVSnkNynXF4xmYl0uLk7QtTSNegVf9qJOpn1QrmM4Yg7Ojg+oVUv44QxL6qBX79xiMm7jZE107Ud8UblCiVpDfOeD775HuphnPD8hiMQ5+uzjBdduv0W9niZ9Q7Q55Cs2nc7jy71PPKKYrTMaK2Q0kYUrZvIEyRwjl6XzvBIgDklZKVKhWL9UVsXHJBtnL8tY2xcdnHSG+eqUC1nuev3KLRRzzq6kAPK8EZmUhbu6oC5Lmm/Usnz/3W9RTLt4MjU+pouatrFTP0K8rtQaLGYTZlMhj43GBml9hGZ8CYBf/rWX2L//fS5OjvFUIXvXd3doH5+izMpEK8k9WeiRy5oUMkL/nB4vcFYDyrkCQ1fsA0OtUqyWedp+RJKIedFTHqfn7ct9d3hwRs6qcW33Cv/9H30DgJAxg9GMrfp1MnlJ/qsnnEzPGUhOK10PWSz6RGcJDYmieHA0JF5EWCWdw0OxfiWnhKmkiHRhy6hBwpWtl7jo9EnnZd+nE6AnDrmCyWwhzuhKU+jMHzf+SjhXtpO6JK8cDcdYjQyjyQmKIpSIG85RVTjdW9FtiwN0PgvwMUA2wnthQiZdZrDoYKbE4va7XXau3ibBwJFp5WZ1gx9+9AGTjjA+KtkK0cqhNzjCzMnG9Cgir1a4OJoTe+JQfeXGz/GNP/yAvqxzX1svELgHnJ8d8dodwcr9U597nXZ3QK2VZSXTs2ktRzX3BlHzOeHrjPXmLebzPocnwgBrNnf40pffYTw4YyXT+56rs7FV5vBQCN3W1ha6tUQ3x6xko2gS6yznc2aeR6QLB+ije2fk0luXymHQc5m7e6w1bnJ+Jr6jGUPCKMILxSa56CxZLV2qpU0OnwmgiEazaP8r2QAAIABJREFUSmutjKIoDEfiGRxbJ/IKXNkVkLXL5ZT9w8dca17HkzXCxXwOw5qSLdhMhrKJeKLz4PGC8US8W3O9SWMTFrOYelM2qrsB4/GMfLGMLw//jNlgEe1ja0JhNOsh2bTKahyxVhV8Z9///ge8cfcNRssp533RaJizU8xmRfTU856rFZaZY+FOyOdEv0O/NyEIEtKZCr3ecwZx8BZzalK5j89DTg+esP1yjXpWOAN3tl7hsL3AMpfUSkJpvffsgHqtTBKLvWnoKba3rvDxh/fYviOc+N7pnFKpgGXBwpWwp+GSs1OXrS0BqDGZzzjaf0ij0bhspJ5Oh7z6+s8TKec8fiqcVY82xA7DC5nuX6h88ad/Fnd2jC4bxSt6isRfYlnWZcN8q3kXVd2nVJZlSL7D3uEBs5WOF0ni3VUa3c5hZRRS0nnc0iLu7H6RszPhaISTIbORys4bm1TzkudidoA/WVHc2eHwQChOzQroTvVLgsL+XptqNYUXjClJ3rnMfMXBVGFTltrYQcAcl0LVwe2Kg6nYSPPuw0fc2t1hNBDXrq+lWMxCTE04jsenJyzjmHByxIYEuTgdi/VYr1QYSB6t/lGbwvUWVUX0DOQrVxlNV7xy9zYruX7d9gXXqy1G5wGmpBM4Oj5lrXGTB/clS/36Drl0hUQ94/xQzN1f+/JX8foJ+yf/nKLs1Wy98rc4Dp/wx/fEnv7GgzPsQgXHiEg8WQZhZIltjQ+/+RBrJXolf/3Xf45SVeVwJHvophGhtqLQ2OBQOgyNeppMEJJfX+NC9inN531UM8OjE1E2E8QKkbpA9yyQ9kIcCwJVTdOIkWUliFK5iuSBObuYcXrRx7BNVEX2p/oxuYxJyrY4fCL0oqaZaGaE5j2HRl6xtVMhmypeyr+qwnwxxUnlMGUjer4Aq1VMKHlSEnwSAjxPJW0IQ1ELbDTLIU76GLq4vmEa9CYDbl8V5X5JaOJ7IbodguyLNByFwI25erXJcCT27MpPUDFJZN+ZoRpouuitmC2fQ+oaJGqCIoEplFjBUFV0I2GyeI5oa6JhkeCjSVCNiBHrrdfp94Q+3T9dkrIjXNcnkv1G2ZxNFM04uDjFMEQJkJpYRImOK50Y0zZYLCccH09RVNm/SYRGQBTFxM9JfJdznj1dXPaYGXqO5XKKoi7w5TFz7WaVUqnMeNJFyYdy/VyU2CKQhn+iLDEMGydTQJFrFccQui5xRkNWjJHNqzz49JhEEXKFEpDg4QcWzwv+G/Usjx89ItQ3CWUpm7rysXSdleTxc2yDIBlz2j1DT4Qjs5gtSdlponh6WaKK4mGYaVRZTmiqFnG0glhD08ScW7rD+maep3vPWLiy7NHOoURgSr5Bb2lgGCGvvPwq3/me4ClM4gjVdIl8h0QVe8Fzp7z5xlXOz4XMnJ330J2rRKxIJMm2O7fJZA1K5SzvfyTqkGZjhWohzy/8vHCuFosFrebzAJs4/x3HJuVYBMGK3/zN/wmAaqOB67oY8l28YEk2VyCV/lH3hFOHETq9jgjSTjyNm1uv4vuHvP9ElJ69/cbPM1/2KJSrZCV0dxT7JKqN4Yjn/uRJjySeUchWcX3ZU6bYrFYTjKWKJukEQitB0xxUWR5mpxQsq8bWxjX294S+Mcw0zAbMBiOWF8ImCLUaeimLUZVUBacj8sUMtXWbj+6Leco2Nph3PuALn79JYEkghUcfYxYyVMtiDnbjGh/uP0WJPfyZBC1QE8rVNTrjZ+CKNW2Ud+mehGRz4n6j6TOMZQ5TLxAH4l3efO2LPN27zycfHLF9W+iJVjWPt2xztC8csNFYpZL3CJfxJUXFfDplMQ/RDIdHeyL4Vqts0O4/Q9FE+Xm5sclms0oSvEGmLIOIv7rGfLLPN7/52+QzonRuueiSKFWam8K+iQ4WdM8HoNdJJFdi5Lv0z5YYpTTeSuy9tFPDKa/R74r7R/MVM3OKWtxFlZzezUaD7tmEm7t3cXQB2pQpepi5NLEM4g8GHXqDmLzjsJQ8rOejLplihTBKWN8VrQHeLGIVukxlr+ZitYJzl6qTx5Bnw7ODc2xd42TwTwA4HbxMWjNJHBskdP946LGxcY3HBw+YjcW5lqCScm10qXMjJyL25uSsNLWyZGeaOoynSyrVFpOReM7VdEHeslmF4pl2dxsUnSJ/9Pv/jLtvifld332LH7z7DRyKNLeF7dAffkat1MA0JC1IaEBoELgrLiSAVcZIo2U1xu02Gw1h0y1nJspqhSX7StK2w/RixGLapVQR+7XuNBj1Z2QLMG6L/bJ/KmzeHzf+SjhXrueCrIXPOgn+fMlZv087J8nP7BFKWCRyQ85OhVAXCgZxkOPDI+EMbO3miXwHLTY5PxMRvlw6y5PjY27uXuHefeHIfOP4z6hvrzMfCoPo6i2T9354j7XGlctIeUqfc3LoYtsWd98SoA8XvR5vfWmLx58JISjmHcqlOtc2XidRpeE2HPPGm3e4aM9RDaFcE1yW4ZRUWiiHwfiCgx8cUWva1GpC8M/b97no+VRKORJfREgUPeJob0KpJAR4sfIYDU/JZC08SahnaWUKhTKmnua8LZTw2obDYOBy+7YgOvvo3ndoVDeZjAfUa+JAC6nhuhcsXCGYK18ln7tFpGQwJUGhk7Uo13LMpwmpjDDig8AlUTX2T4WRb5opJos2nhdx7YasQ5/4jHorTN2nlhcCfPShyuHhPju3pEFtV7h9+xX+7JvfZjKM5P1MND1HLlvm+IkwYLe2tlgNsqRTz9HKEtbWdplPz2h3hAC//cWX0XSFSfeMYl44TqVymtPTB6xCcb9iuULnYkzWKTMZC4FdW2+xf3iGEi4oNYQCrJWvYg+XKLIHah62ufvWNYJQw8wKo/MP3v0uYbTkrVfeYSKjtqVqhmqzysbaNgCPHj0mn83xxpsvc9ARyiddCFnEXZ48mbOxIda9O9hnbW2NpXRsVsEITTXpXUxRVJmuiyfcv/eHTJZTNnaeA23kMbU8TlaIb71p0T0bspj4rBaHAGxuVgg0nbN2n1pjTe6zLolqE8kMZnfUw067VOsbtC+E/BWrFcJAIZ/fJklk47SZ5WjwkL4kB451BTU1ZXmRp9F43gibI1OpM+y12doWDs9ouMCbTkjLJoyF12HRVVGNHE8vxOFRaaSom7lLxK1ZsmA2XWGq1iWXieKnKeUjnh49IJsTazV1LaxsBdMWz3h8+JR8tkat1eLkWCi9SkHI0jIxSVnPAV9sLo72RbMO8NKrX+Wzk49577MPefW2WL/FYkX28w2CgzNmQ/HsN67fJUoNOX72XLescTrao2TWKGalU+3Au+//MWU7oncq7vcvP/yXPD3JoNoSbSqfBi0iMR00aXjf+/hD6D/kC5+r8Hf+vb8DQGd0xrx3Qj0v+GS8xYxKJk0QLjAKkizXGPN+/4hXyz9F2RN6sfLya7T9Pu0j6QxoMa6rooYrTGkoqobNKkowtZBYOt6GliGO06SqYh16vY9YzMZkTIdAF3sx8hZkcykiZc6JPFiWikuJHKEte1ONgHSU8OTpQyaytyiXkz1OERCJuWq0Nvnud/dQJLiCZeQJUJhOJrzy8rqcT4eP3rtHunCN0BM6NtY8osBElcSY7cEIhQKqrhBISz/2QgxTwXJMRntijuM4gEgjen4/JUENffyFx2ddYewoWpNoFmNKImU3WpHWTW683OAHH4rnBpUg9jA1mMg9u1Z2ODp4j/ZI7E3VLJDEM3RsltJ5LKYsDo4PiKI0SPLaxPDIoLOQ5KeFTMLM7fNsr0MiqSGNJELVHMJYQ5dGw2TaZ6117XIPa+oSXYnRtQxL2ZN4Zfs67cWIZRRgR8Jo8PwxZrrK3BfO1W4jR/1Kiz957zGBhDDzVhaJohGHMFuIs9XK3MXMbhCcyyb0KI0SOSR2eAkCNRieYqdNVMVEeU7aYyj4SUCcSMCOaMxiYhCFNQJPZkwzGq43xVAcvECs6XolxcCdsxhL5yNZYKs6kZ4hfh4Esn2qZYej8w28RDjQtpagRNGlg6nFKo4Vc9495LGUW8M0URMLJZpjRJLDKlGJ9ISTY3HtOFKJ1RAzhJWUmURdkbg2w9kI3xOOTL93yn/0q3+fVkus1fn5GdlslulsjCJ/VyrmaTZb/LOv/y6fSoN9q1pjMo1IJDG1GtjUyhWUnEQaAjQ04u7skmPOwKMzuI8fxJd8arNRj3p1C7tg4MnAZRTNmHhjXAliEC0z+OaMhr1NIIM5i0Wek8MH6GqWTFbYJcVyhvPTC7Si0Oea2SVxEz748PvYGSkPk3OK+Qa+qhGoYr+s1TL0J6c8fSDWvNla57zdpf10Co74bLxsc2W3zpPDQ/IZSVK8dpUnZ/fwotcByGd1NMNHI8OO7Fcfj6fMwxlxlCcKxPsdHn/GYDZh7Im5qlbLZPUC7iJkNRfAJp0Pfe7cfRXHHnN4IvSU7+ssVmMU2Sum0+P8RKGU38bIiudcRCvmPpRyCTeuCsTZYd+jua5xsC/k6m/8wt+l1dAYtWPGnnAejzvfJF7NMZIWoZStfG0TJ2MxGotrX7/9GsvZijiecNYWchVhUKw5DN3VJfiPFUYE6gmVlDjrh4slWpLQ659TK4m581YBuhnQuZhc9qefXpwyHoxxZP9mrVzEdAw67TGezL46tkq0zLLWapFOi/sd9kekcgoXbWFTjvoQ21OMaMpIcrUGbszGKzcJJOHzefyY3eY64/Yz7LRIZHQ6fc76XQqZa+zWXbk/fdY3NkmkzPbvtckW8mTyMUFf6MrqesCjB+dUtQ2aVRkoPT8kW69hSoTPuL+kr4LiZLEkf+OynaW1dZ3vf/tDFInN0EhVoTZn6QkH2h+mUOOY7uKI/PMea32AOp+zabfoSuLtbLOGG8R4K4n+XMjzw+/dY3Nzm4UMqqlmiduvbPHBR49IFSR32WPhC/y48VfCuYoimHoiba9oDsXiDU6Gf8LH7wuQQT0s8jM/UySKTa5eEwb7dNYn0WEsifh6wyVKkqFabWKaYoPbOY3hoxU/6H2Xgi0W7s037pIrFXntrnA0vv2DP+Wdn8nhuwq5nNi842FA0V4w6EB/IYTK9WacHBvsShQe3RyQeCmuX3kJTbKTlys1Hj85plxy6J+IRbEdlfHiIStfOEmGbnHn1U3O28+YLiQzuK1hmVUUVWGyEgIbhRrN2isglVg1X6A7CBjOuhQkXLKdsYmSKRf9Mz73ukCpWXn/N3tvEqRZdt33/d783jfPOc9ZVV1Dz2gMDZACQFCiSNqkI2xIDlEOb7T0yuGNIxze2OGNpQ2DClmW7HAEHTYti6ItcbIEEAJ6BBpd1d01V86ZX36Z3zy/+T0v7s3sjeHwwgss6u6qIvPlu/ede+655/zP/+/R6z7jo48FJMfStxi5n5EkKbFs3AxcH10fYkcC5mUaWWzdIGNN8GQG4/zknGKuxsH+GfmShOSEIa3mHhWZbdKVErqqUl6skzXFJj/u3UfVxoTj2zSbMnNtjHnja3mKhU3x97QMD+6/z/aNBmPJ0DSbTjHUDEnks7Aogurz5iWGkyErIQ6X7RmjiUumqOEmIhOZJG8TuC6KARnJNjObpSwvbdGTLHXu3ERRVMbTSzJZSRkfDtDNOZNxSLEovvt4fMSwH6Ka4kBbu/EKsR6hJwpFyepxc2uBxuoq075HIhmEbEejM9oHWSKvLpV5evYjYneJVApMd4dDlpdX8OIPePBEXE5vbH6HKKjT7YtD99bN13h4/wDHSfAkFawXBhRLCjXnS8Fl28hQLq0zHEvKU/cZoTclZy1gSSG83uwCAoVKJU8SDaTtmQRBQL8tvovnTtnZ3WUyda8bfydBhzQooM0tBmOp/q6Y5KICugwGNK1Oo7FEq/Wc8ZH4mclQo15bp1AscCwZ4ayMi6YHdOUBkymVUbWYTmdAJKE7g7lJMT/Al8Gzk5bZXlph72wfqywq1w9Pz1heWiBTsBhNxLqk2BjGgJJk4bRslZWVDTIZm1YgnN6VCvyLh0+4IUk23rr9VX7+s/+LjIQTD6dnvHnrGxwb6yzJTNZ3390mGExZWrrB3P85AK3LJtPZAF1WiPI4eGFKuQJ6LIKrZ5+dMBg47E1NDiTDeGgb1BsmtiLeMzEtvL7P88MPmXeFAv3f/NZdvv+f/z1al8/4Z+8L+tuN5UUqURECEcC7TJi2HcJigCcvKCO3yHKlxN7+U+oVcaga80v63R6XHUl6YxaZTXuYVo4rpco0SonDgCjUMAyZuY5nFPOgS5ve39tjOImoLOSvM6ajWUi9vMTaUgPvSiB47mPaMZEktAn8mPqihaIajGUGOputkio+sa/iXCGf0oA0Ma9hSa4fY9omKCFff0sEXO9++13+57/4TxnSJVLFYWylEakW4MvqSyG/SRiMSRNBJQ8QRRGa4TEPzriQFUvdqJKkEbqECbnzGNuKeeXuIg+/EPvd0z1KRo6ZhLVNg4Bla87eQYf5VQUjidBjW5JmiH2UpiGN+jIffCICWiXWUFDQdfVaOLY36PDkSQ93so5lfelPVTUlkI3/a6tZNM0giYrXCpSGYTCbToljl4yEdX313Qo//+QprkQcWXkTUEnC8DogMdWUB58+wfdNLK7IP2wUjWvK+GzOoNm8YDQATRd2EMceqRpiqg6BK77fZ/d/zHR+C1PKEiRJRBwrxElKItn7ltcKTMZT3PmXxBRRCNlsnq6Euu5uZKhUfQ725qDL6n0QoKOiWgrBSDzLsSwsM8s8lBTyZoyKSRQERJEktNBtprMe3c4MVZNkJ7pOGkc4EnblTgPWd4sM548Zz0UglcmskyQJmmZck9OUyzqqOmTiSuIkbNI0JUrVazKX8XTMQjHHzVfW+It/JYiRlxcX+e1/528wkQyilUoF3/eJgpD1dQFpUhSFIAj4wz/8Q3KOqMj6cYRqCJgmwHQ+YWdnmbzzpezoYX9EbIWUpXBs5A3Y3drh9HxCviT3uh5Tq6xw3jkhq4n1tE0DLx6iKCLeCa0JmaxN8/yCak3M5azzjL/23b/OSesTzo5FIKq7FVBtfE8kaRcaRY4vWpSyK+imIu26wnm/Saj0WVoT8Ua24hJni0jlCey8gz4WdPOrKyIxVFZAUwwmioplikuDqrQYDrpkCyIGao1j9LhCY71CX+7ty3EfXYkYdi8o5oXtGVaOVxZWeLH3OQDnx2PC4Am3Nt9laVXEMz/60Y8Iw23ypQWWpJBxthjQ/OKcBSnyO53bVCuLjKb7FFRxNoznHqW6TxLlyUvo5dn5BzzfmzKSzLx/9Ef/JWvVDGaoMBqK90zMAaOpilZysGri91bnGU5OL9m4JWKZR48PyTkldN2jVJRswe0B1ZUlBgf7vPWqSOJ3B0cQqORs4SjTCji1BhktQo3Fs2bhJcPhkCCI0UJx9pTyEVbqg2R/nQUj2hczoii5br04Oz1mMppjWxpdSRFfqxc4Pmxya0vA5t3GgJtr32OhtkImdyXwrnN+ckpNEkz5wZS9B885uphScCRlfWYDUw04uXyONRTv/s47b5NVG8x6Yu+V8z71hQqDcRNFSjj4ccRStc64f0mlJOzl1sY9FhbKzCRKzPMjElVjac2kKxkhQ12jkLN5p1Zj2BZ+KtoMMTMpnoQljqMO1XqWvAq6JcnVum1SxcQ3MiDj2mkcUKgXaD2TrTz2Ol/5jTdpnrUxZewZGwknp0O63RP0sbDhtZrYY79o/H8WEX45Xo6X4+V4OV6Ol+PleDlejpfj5Xg5fvH4pahcRUHI5YnI+g2GY7wdFcdc4uZNSW3dqjA8nbKwscpnDyR2Wh2SRkVGQ5HtsjIa9YUss/iERGZ23WjM2pINSZkF2Tj5b/70EbdvrrJaF7dkJ93Gcc64mBwTRSIj1O30ef3tW6xtJDSbonK1u7pO7s4Oqi3+/eCTkHwmxzRoMu2JbGG5UWRxpcajpx+SpOL/fF8lTRNSmT1cWd5lOJhSyq8ykhCOlbVlJmOPo6M+ednbY9lgFcYMRqIyt/eszdraOuPuiEJJzKU3mhKnU5YW12idilv+aH6I73koqriVt4efUqsuUC2vMpmKZzlahpxzh/qmyEQcn7TR0ckXddYWBQmEHzd58uQJjfoSJ2cCPlgpldne2mAmG3rHowtevXcHXV9n0BcZ9np5gYXFTS5bY4JIvNPm2hJ7e3uU7olMZLd3iWEmJKmHL5uRG7UGw0EbNfUEnTNgWSmNJYtMVsDa2p05QTLkovscOytM17SzfPrxJxSrOonUJIjTiIvuKYYhnn1ydoKpV/HdCVd87Z3RIY3qLqZRZCC1duqlbba2QrotKQA9mtKd9cmVLc5GIqtRyS3y4OP7lBqF65JxqZxhPPAY9EU27fadd7DMG/S9MwZ9YcOKojCctsjnVlldEf1i561DDOOMJanL9q9/+M/JZkpgO4xdke2xcgUsJ4+um1iByMiG/hRVixiNRRan3rjDKIzQMhMiCbeJVYdY93CnLQxJhRr5BeqNAp2BgCUouSkPHgXoRnJNTBHFHmsrq/h0SWTuxRu71BsrXMg+gnzRxsnt4pSyHBwKKMZi+S5mzidfLPPRz/4KgJ27BS7Pzgmnws62NraJwzlpxiQvpT6e/PznvPHm1xmnwqaN6grPZ+cs393h7FjMr1QqkSox+Xz1umo7nV/QvDhENUTFdLGxRX8wJkoc1m8I2OXTx6JXoLK9wGdtUYE6cTuM3TsMHoh1+v7f+g5PHz3l+UGb+8+PANDcHid7j3nz115na0sSb3Q1igsqeiI13gYeg3nCpz/pE7hSJ+UiwY3rGGpM7koQ1Q/Ze3FAKiGV084F6xWfu1s2X/+73wKgmi/z8HxA93iPe1JnJudA3ze4lHCU+r0czswgdROyociwW5kSQTwhazt0ZKUqiX3q1S3GE1kiMmMM1SbwdGJN7D+FhNif4zgVAkkHrekGaTzj5vqmmO/FJWg2qiH2E4CGQymTwVI0RvMr4UaNJAHLlnpHjkqtXuCiFeNKUVaUBEV3CN0YSxLtjIdtOm2XRELkVA3SJEWJ4PBU2NSD+5fMZjlcEhxVZCd9zSD2Ur7+TdGneLJ/TApoekosoeUkOr47ZeqNCBPxfE01BD27FC313Jjbu1tUKymX7SuNmTpRGmHL/tzhPOaVr5T4xm2LH/6pOJ8y2RKaqkGqMx2Jyujm3Q02NxsEnqQzdhSiKEE1QhJZHWkUc7z7jW3+l382RFUkZFPV0E1RbQFQ1Bn37n6bf/XP/xJF9hJP53OiNCXj6NfQyMkkR3/gY1giS+4GLpYTEgcaWUlstLJQ4vSkzdzzychq5Nj3KGWzhLKZXVN8To6GWOYqoWw7E714Ov1ul9deEegOS3/KwWVIon9JfpIkEIXKtSRGnFrk8nU8N8aQsMo4ifHC4TWlc6VQRUuPiSP3unqmKBpJGhIpEYEr9XEKNvNZyCSQoqn5DJGfkjC90onGcWK6gyNCf0n4TISdqoqCIStsJDM03ePo5AvC5Er2wMDSFaJUQ1NlBT+akS9qaJp4jqnbQsssjK77VdNEwwu7PH76EU+fC7/ye7/3n+E4Dq6ssJmGju/NURTlGs5XLpd5//33+fmD+ywti3NsOBGVVl2VQr/6jJ3tFcxE2B1ArBikSgxSqHR3cw1vkmBokMr1nHs2D568h0IJQ0rUjKZd+qM53e4RAKur6xwdeLxy4y7zQHJWmwM+uf8xVlansSyqNlHgQWATSvaos7Mx7tzANCcolliXrZu7nDZ1pl6GvkTYNF+MyWcXyeWFD3z4+DnBPKaQy1GriPkcPtlHQWNprcH9zz8E4M7mq9y9822e7Qm/nNUzJFFIcqqTa0iiGAym0yGL9ZvUFsX/TQYaVpLh7g2BIFrfKvHk0RHj/oC6Lyo0X/va18hn18gULGxbvPuDzz/GtFNSGZdp5pQoMlheusXJqejH2djYYOa6jMcJ3a6Q0kANGfRc1tdEv2G3u88wTDF0E2NDnD31Ronx0yOqi0UiKUnx/PlTlpdvcrIvztrZNCHnqDh2jpkr1ng6Uuh2Yn7lK7/Op5//RNhiCnmrTCTLHpmKQalo0j6dMZ8KH69nInzfJ5w36V4I+wj1iF77klpd2NjDzx6ysrrBwmKR6Vj4JV2zWVsrY1kxU+mrSOvc2FklYwpb7F9G2GaR5W2bJBD9RodHD/gf/vE/4Nd/6z8B4NY9lY8++hMqN+5RLop9NQ0DqrkldrZygKxwhRatyycUTOkXDZvD1nPWFpzr+Q1mBpmCzsKyQipJSzJalsMXR/RnEkHUqNDsP8E371A2RIw+HT5hNi1z6evUKuKbDocpOVthOhIP33ptE3cypt8MyZWFb/F9heXNZZpjj1j6hKKeo2GWKe3Iynzo86K1j6nnCaXe2enFBCcpsrpxk05XxNGKFK3+ReOX4nKlKTo31wXG1XlFJ5Nt8N6HP2ZRspydfzGkauzy0Yc/Y/emhOA8HfI7v/OrTKVDvmh1aSwX8Lw8eQm7sBsKzz/bw3I0EinA+KvfW2V7YQtTimCW8iGDXgE9qqJLZqd7d7d49vkhG7dMVEUcjoPBgKPRR9x7VTTU3bn9Ov3JMyzbwZcsTh/f/1Mcp0a2YDKUGNqZ26Ze3mA+Ff8+Pz9lY6vBfFDgXDLQKYnB8sI6w+4BealmrSgKne4EX7Jw6bpBHMcsLax+2ZznrNFu94iDDKYkp7AcG99PyZTEJv/mq19jOtLRVYNsRsx5bXGVnz/4Kyoy2FJNHzdUQTPIlMV8P//oBdsbN9k/+gxdEUbXH1wSeSkrS8IhV0ojkkDjsn9GQ+rJ1Ko7+P6Irc1Fjo9EYFgoLtNo6JiW+Pv5gsNoluI4VQxHXCJOzp+hpRnmUwhcYdB+ENFY3eYL2cBvZS0uzqfYdo6JhJCctV6wuraBG3RoXQiHn8kaKJqLI3HLcxc8t02pbnEie/QyWZuzYI/AVZF7mtmkzEI/Gk0+AAAgAElEQVS2iCPXxc7nCRWF/qDH7g1xGT853cMumVxejqgsioDvuHnE+sodfBmhPH7+Eyq1IqHXIFeQLGORT7t7Qb5k8eJIwABz+Qz96QXJuYC6NhaWGYxOGU50VNksP53N0I0eUexRLwnYw/7hCy77B9cMP1HssrC4wHAyJIjExlcTA6ecMrqc0igLSNosOGfvuHV9OX9+cEKhWMRwdYplUTYP/DzjiUeqqMwltZul53m+/xxFBtmen/J878fcuLWMaohnj70+4/OA8SRg5554/tHxC7Z2btI5FzYcaEPG0w7TEdQkLKhUaXDZOwYJeTo4PWJ753V6vf61fpSZKTJNRoS+R5oIl3XZblGvL/KDn/0QgM2NbUqlKs2jc3RNat/JOZ1dHBIjnn92dsLf/6/+CXPJNvVf/zf/AMfUyZQKbO/KMv98RjlX4f3f/wGZKzifphMFFq4UfFYUjThxiM08sSa+sZ1VSeY6VugTeyIQ1vSUG68ssP6qCD621pdoLFTA8ehKEcrPmy1ul9dYK+cZjMSBUkkMCvqEzVsbcu1S+p0mqlnilZ03Adg7PeN8cM5irXqttTNWYy6ihLnUH7IyKl7goaUJiWTBC/wAQ9MJAg/FvIJw6RQyOW7dEGt2tNcmVHyi1ECRl5bAm/HWa7cYt10Oz8RBb1l1ht2A+rIIYtRUJZdxOD6bXIuIz6YBegr4Ea/dWpC2r9Pp9zBkX12chhiqjpZo7N4RgdPcazP3obpUwusLW4jNFJ2UWkXM92fvz7CzVfxgSiqZAOPYZmGxSjZXZDCQ0FajjmUUQPYRxqHPdJyisQKJhPOpJqkSoEi9rNRIKYWPefh/fMhs9hsA+HoFFY80NTDl2pUrKo+fPkKX0DpNS0iVlBSF8VD4sv/ob3+bnHPGeHTBWlVcisIwRlEMklj8DOmE0/NTVL1AKpvJowhMy2TYn7C5IpjIstkMl5ePSJWJtEUdlBzTaY9yQewrBZ9ePyBRVObSZg10XM/j6ia1spCjVKrx6NGUWBE2bOg2aaoQBCNsS6zxjZ3X+PxJQKJfJQx9oihGtypUZA/k9ladTz8dE6cmtiHOECU10AztWuMtSQaM+n3cSZFAMjvq6KSoxGmKIdnlJoMzIr8GEkKWhAlBEGLbOqOBmMvOdgnLntBs9kjlBUQzEiIvxJR71jZSXrl5g2rJZSxF7g1HIU3GqLqJIYkvNDXFsD0mlxKC64dkyilKqqGoEmrqB2y/tohi+NTr4vv9+ve+zXzmY0pfrWkqk8mEyA9YXJQECNUqf/AHf0A+n8f3rnTYwNQUNNnzpRsO2ztbKJLYBGC7ushB5z5BKILJ8cxiMDuk2T5jzRSEUu5kjmZk2dzcpnUiztqFeoa5u8idu8JWJsFjyvUKaCPGXQlxnCRsbts8enjGu++KBM90PODi8uJad6pQylCp2ewdfMFEJkk0O6HT65DJLNNrixhkqb7L3D3j8ly8+/b6DdLqjELuFl1JPlSs11ENk1G/x+2bYm9/8lef8tWv/io3boi476K5R74cE7tTShLiOI3nZAu3mUdDMlIb6vjoMcsLG1SkDtXpyQW2XebON3dpXQqIoaJV0HKwd/QFXUk+UK3nyBfqzH0BY3OyJXqjS3RzG10mDU6Pu9h2FsvWubUtEgvvv/dz8gXzmshscWWH1vkeuzfu8uixgPcvLK6wtrTI2cEB5YJ4T1SPg4OH3L0r5hf7Ldx5xHg8JgiEHRQKOfI5g739Z5yePwUgl1uilC3y6RMRy3z1G1+le9JjMJlSrwpfedEZ85W3vsXp4T5zX6xxtxXynV/5LkfPRYL33/3Od8gX1zjrPqE3E3addbJUq2X8cEw2EWddvhChpTqTgfiZctnivPtTPvqnp3zrm98H4Cc/+V+5eWeZTz9+D4C//pt/n7/7H0/5wQfv05GFBQeFe68tcHDSozMQsejMVylXEjpj4Zedxha5jEpBdb9kru0qHJ4/IQ58bMmeWysVsdUstYpk0/RjblZWOW8eUd4QPuHRky5mfQq6RTIV/nO5vEx/5GHLnqg06EDgsHNjlTMp9L2wVGFxuc7M7WHI/r/tWo1xv01Pkb1pWpbbt97g4cPHjGUiysqqDN0TWv2AXF74xcV1scd+0filuFyFUcDjJwLH7M0iTs/avPLaMnEinOTiVoG/+MF7/I3feotaXVyu/DBl5s9QJMNQoWJyePycYtm+ptINxgp3bm+gsMA7d0VF5vDkmGTeozsQAfvdV+9wcuAz8jLXYq6Bb7K2U2JtbZNOSwTjxUqNnKNfO+SZcsBrr9+m34a5LwNaI6JUtghCn1xWGMFZ84Sb22XSRFYGQo168W2eX9zn9g3xcQaDCHc6ZDLuUpT9TO5MJQ5Npq6Yyxuvv8ve8xcMxi0KJfHsF8+eM5vNcCc6jZrMIPglNrdvMpmLjGK/bVEopmipxlxi6C/bJ1SqOby5bOhVykTpGe1uiKEKg7tz+xZxNKNccijlNwHIZ3I0T88IpMHlS3miuUGYtJjOhMENem0qNYf5fESMFFfVFeZTi9a5mFupqtIb9GkPZphSRFRRTQzDYtDtUKoIJ5LB4qIzwi5IUcjYZTrtkypTMrZ4z3anyWJjG9+FYuHL/8vnS5wdC0dqWjZ+2GT/RcLuzj1hP0HI2dk5qu6SlRm2TveceWdGRjYWz/w+hm3BzGDvQBzOpXKWy/MQy5lycSHe6/VXv8uLp8+vm4OTyKPfHRL7I6yccD6TqYtlWahJDhlboakZTKWGnROB+HySUquKS1y9Kta82XqKnZ3TbgVcBsIWC1VQkyqK7FtSMOl2JsxmEXMp6mvaF0zmMaqic7QvAsyb268yN1TQxd9bX9yg0+uzuLLJsC/msry0wEnzM2Zuev3dDS1HGE8pS4X40WwfO6/yYv8Fqi6C1c5ozK3td+mcz8jWhZOqN7Y5O+mxvnZDru8FpFCtOvT6gmBmZffrTCcd4kTYa8iI09NDDKNMQQbQ/fke8VCh54fkSmI9t1Zv0R2es9QQ2co0TWl3TikVVRxDHM5X5AXbW0s8fXYq36nAq28u8OiRCPiKlRx+PCcMRjz4UPxMo7rO9/7eW/yt3XcZDmTzav8xiRvRupSMl4W7jNoPUWzR6wZwMWhTXYR6rcrOikhA1AsZtjbf4PMXwt/0hmcksUY4+bKfQldcwnAARkxpWfRqTNA5vXzOLBKJjWLeAVtjoE/4QJL63K6/ja8mzIYjbF329oWXjNwILxF73UzXUJU5qRqRyssxaUAU6RiaiWGKb+W5PnYmJZaMiWcXLSxHR1ezxIif0Y2UekOn25kwHIn3UjNDPN8CJD20pZEp9jj99IBIEkykzEmICf05qawkoTbwAgNDktWEYYKSiG+mxsKux0EVP3hC6GqEV9R1vk+1/iVl9XCoChKFNEIxhC/xJzOyRQU38EAm0VASkihAFnGBFNe9oN/P4ktmTj2jEzMnln1uQRTiLO+wVf8a40/F4VzUVSKZ/VZlf1PgdQmsVFxyAJUUP4rIZB1MeWEYDU4olnLopuiPAiFB4nshbRmE/p2//Q4vjvboj12suggeI88l9ECNTGJNVM9Ta41UUdAkUYytW5Cm2NkCXiKeNZvmGY4j/ETBkpUVVVXx/RjJZo6t+MxH4M0DMiXhc6MwwI8SXNensSAuzFZGYTo7ImPl5Lr4GIbGZDRksyouNkms0bqcANXrXtTQj1BT41qolshDSx2ICzgZQ65diKHb+PMIVV5qN9aXODgMSaX9KKqC7Shomo2SiDMln7OoVFdQ1AFIQenIj3Ac67pS5nsuxYLFbBZeS6HkayoEGVJVw53JipNicHN3l2ePBSLDsvOoqU4UuijmFZlSROCN2N8f8u9//+8AsLS0wLA35CpS9DwPRVHY2NigUhE+6I//+I958NlnLC+vMB7LhItmkMZcV40bSw7t7s8YdkYgdOGZ9vuoSRYtJ/uywgn5Sg17ouL7svKpdzCtRS6aBwRXlN/pCoo2ppAVfmQaH5AvBihpgbUNsRcuLiYcH12wvbPCw8cfA1Cr1FndXGHUEXtP1TX6owtsq8ZNeWa2Ls7IZhU++fgn7G4Kn97rHhFFEUUZ7wSBixcFOJmY7kDERf4sZF7yMLNFdEkG4lif8fCTH3H76+I5k7RH0Nlhe2uNmaS6Xl5ap9kcsrhY5Pln4jJXL63TaU1YXpVix6ceip7Sau8zl0zLquVx8ujnJLFBTkpgeJ7JrZv3ePpMXIjGg5TF5bwgSxsKu0aJGE96LObXcD3J6Bca7Ny+TX8ozl4zU6VYnXN8co5jiTjhrHnIaDxHy+c5kv2F9YqK57nEkfgug0GH1dV1wtBElzGrlZkT+gOGoyGVoqjEW7mAB5/+jADh4549fkbVMei7L0iHwsdqRoGn+y9wyGBlxLm9Uijw4/feY3tBJOM6F3N+8uMPuXG7Aal4lqE7ZJ0iZ2fnlHKiwpVEPsfHfawrWRClgmW3cUyTf/1n/5P4NjOFGzfXqBTFnn16+CeslBxev7OJkhN+ynSHdDuf0hv1UU0Ro7+ym6fdPicjZXM0K6DXikjyFTRZ7FDiU3a3x9xcf5Nnj8QdIFfQ6PQjluS3KxdSHjz4nDjN0J6Itas3KuTqHpO+T9ER56+bJjy/6HF3U1QU7cSiUS3x4vIpzbmY31e2F4iTObYRXSdKHzxugwYzKc1g6zMY5ClVbfqSlGnuzYgCj1S1QLLnTt3/d0KLlz1XL8fL8XK8HC/Hy/FyvBwvx8vxcrwc/z+MX4rKlaZrLC+IDFjWNtnZcRgMdSxEVnpzJ6G6YHNze5f2hahGfOWrr9Dv+/Q6IsMw9VtYZp7e4DlLS6Lk2enEWBWDW1tLPH7+GQCD0ZhGLYRUQHKeHccUaipVRaPTFlmAveYz6isubmpRqIpsjxcfUiquc3ohMu71hsn7H/yIfEHH90TWL5evMRokZJwKeVNkQxqVIeetM25sCyhPvbTL3OuRzZm0zsS7L62u4AdDXnvjFnkpaNs8a6HqPptFAQG4PG+ReDqVYgHPE5+tXq/xnW/9LuVijg8/FhjhSBlw2jxjfU1Azc7PTzhrDthev4Uns1uKV8cLW0SeWEtFUXCclOdfdLj1iqjiFByNw+YlpmkyGgro3rA3YGNrEVNixVvnHXQ1otJYoSezryFzzpsKqrPPYl1kY45Oj4n1CE8RGff9kwgnZzCezLGVK4HbNuu3t9jeeJ0nsrSdK6h023MkUoHQt4jTMaZuo6lifQ0j4unTp2zvbDCTEFHHyeK7KlF0pYVxTKWW4d7aWyi6WPOcWqJYWKPXP0CX0A/f1zEr0J6Lqoaa2JhRCd0aMPOvmMAWKFpjfM1kOr7SFpoynxvXIqaZbJnZNCSI2xjBpniW3icKCswnZVxfZEi67Q5vvf0qhyciK7bUWKFQtLhod2g2BSzAMvPMhhoLjQW6XUE5XKgsMh25lMoigzKZnJDNVilXllA0Ue0J5gr92SW2vkESiszRwVGX9Y3VawrnfLqMbuTQ0gKrq4m0l1Mcc5n1tW36fTFnL+yioF6LNCppjVrVIU0VRmORqYsCF1MvMZg9BtkLsrTYYDruX1dtTV1UaXe3NpHEg/Sbx7jBAEUyfC3WvkGjVmc4aBFJ2vyKvsBUH7NQrxJFUpcpviRvFyhIuQzPyxDRIklVKjUByWmeir/r+Q7bO8IWsebsvrbNJx8Ley1kFdI4j2mppBnx7PrKAmq+RxwVqEm9qvJigVur32SsCvv56Kef8Bu/+31awzapIvZHNtqg4w1INZ2hhEZkDYv3Pr0Pks68ahYoOAVOmzMOJgIe6phlRuUVVh2TqCeyYQfDCWtLy1hZkR2dDHs4ikpJV9iTNOh7FyGRCmE8IFsV33Qts8CLz9rMPAnXUFxi30NxCmR18TPdyRk5u0ocJ4QyszqdBty7d5OVpV8Vf//0z9DIo6Ya6FeseLBzY4GLzikT6TsalRqhqzGdShFz2yKNdTq9BFPi4wMfHDMh8CM2N8QHOzo5J4zAkP1GmqmhpOCYKa6EQp80m1i5ElGoIlnI8RKL6fSU0UR8vwQDVIUoVMg4wieEwZyso3Hv3jv82f8pWFMzjkUcz0gkdHA+d/lrv3OHwegAP5aZ3SQgSTVMKcnh9ELOBgWedYaE+lW/YwCagqprqLGE161UOT49v65E6JaFqqYoqcKVdFM+r/DhRw/Jlaq4kuYvTUMMO4tuXFW3h3h+zCyYY8vqS5pEGFYWN/BRNbEf/+IHf8mg/5uY9hXT4xwVh6l7RrUk2VBdj37fQ1Wz+HPx94yMznAwYVvq87z5+jb/+x9/iG6tkqSSsj4ao1sOhmrxxuuiqvD06Q8ZTKfYEjGRxBGqCrP5nMU7m+LvzULCyCSIVMzoChERE8cBcSKFjbsDijmHiIhE9vboqoHnu9hODsuQfUrVEoMHl0hiRwI/IVZ89EQllrZRLJRot8fMfRVNkwyCRoY4mZNILvZCvkixBHsH+9drnKYuqqKjmzCfisx1vQ73P/8xL2R/jG7dxdBVQoxrRr8wmlKtrNGo7vCVt78NiFaBVE2wZH+cZRpYlkHkSyp64B/+w3+E6TjMXJ9Usb58Bz1Bmg+727u887XXOJboCIB+egbaDsuyT6s/6BD5KWkyuxa5Pz+yKCxYjHtj6g3xf63WGUmg0TwTviVQHBxbIzJbHL4QvdNxFPKtr3+fducULRW+UjfHzOYDHGlTjmkxnFkUyjaGJfxGsZhnOOnx+ps3Cf0r/bY6kzEgGZoH7pDZ2CBIz9Ezkj23VsB1J2SzebRU+IRi7RaBm3B0KHz8K7e/RRJ4GIUYZP/tSXNGrZHSfN5nuS7231nzPpZzk76sEEXhnMXqIkkS0TwVPuGr37jB6eE5+bKNHwt/enlq8LVvvEpNsvnVqzbD4RAvmBLKKorjZOlNTqHjkiDaHMqlGqZpEvhSwiGdU8zY1HJVPFegLXzPJ3FDTMPm7VuibeT4fI9+p8uRKs7xxkKNmX9Kr29RKgo/NRkHnO4doOsR+cqqfIcVVpdTVmTV2FNC2uMmYRJfQ1TRPMbjGYlpY0ibKpY0Nm+Uefz0UwBcP6CytEJhsczlgYgbynaVF/uf0ajUGci+az9QKFcXsG0p1zAPiOOUJJ2TyQq/ceurX6VcPGcqoZEX5z/j6JMEo2gSjkRcfbZ/xtbGEsNQwPqkodNpzUlssb6GmSGv1YmUlKKkcHcHCtXiTbz0mDfeFbbxb/+yj1LKsS91Sxno9Mjx9Xff4eJAVO+rpTKGraDqEaqErfbGLsWigevJFplMgzEjlKx+DW2fpCOaRyeMRxq1sljjiXdJaiYUZew07I6JwxOW6rsoU4FmCeYxt269wmW7zfqGbKGYfLnP/5/GL8XlytA1CgXhHHbWX8H1EhprGX76M7GQUThiNp3S6j4nJ0Vgx2ODdueUoWwqLhUW8AM42TPZ3RFwu8P+z1BrOQ6aH1LICeNd2yzw4Scf8tYbonR4dPqCTLnGRTO6Fph89bWbfPrgQx59/uf82nd/G4DJGNz4lLzk1L9odVDIYSTb17pIJ8fHpHGTSs2mUhalyuWVKouNHS5agg77/PwDShUVnSK5ophzCiwurXNxcU6pIJtcNZiMIgxbzK9QKFDIOMSYjCZio99a2WLaT7n0LlldF85gONZZWd5gMpK0solLvlChNz2HWGzq1N7HzmUZT4QR9gc+hq1gF8b0uiJwM3WH2VijPT1na1esua45NFtPSEPhoCazCN0YMI/K5HNVOd91/sk//R/5ld8M6M+FeXnzHPN5Sk7ipDVX4bL7BX7YZy7ZK1Qt5NGzj8jbq9hSaysMVVB8ymVR6t7cuMlP73fZ33/BvXs1OT+LVPFwvYmkY4bh6JL1tZskcr7q8BwnW+Gi95BiWcJfAguFHoWKjpqITe3OY+aui50Tz261j7m7mUPr5SnaV9C2E0ynhufC5pYQoh2MWxRrAdOxFM9TCqysL3J8pJCkopS+uvgGj589IJvxr20oTEPGkz5bst9wNGkymeTxpyameUWNfkGaxkRplpyEkeUKKkpcu+4nsYwSrhcSRl82j5umTb2xy/HxPqUFMT+NLcZTH03Sb8eeQjG/jK5kuGwfAdCor2AYOuNJnzt3xfw++/xjTMtmOJKUqoUi7cshiuJTkLAApx5zcPYDVtcXUKWOlsqIjZVlBj15idcSdnZXOT27vP69Yj2mZizTlbTpl50XjMYHaKlNVjpgPxyj5SzOe5fXItAf/dt93vnaKyzUxGH2+cMPaDQaFIqrPHggqZJlksXvpuSysnfKHfLadpn/TRMBkZvMCWPQ4hQ9I5uyn77g+9ZdWhdHlItiX1XtVT7+4Y/ZuCeSJFaYofnkkMSyGXXFZbw777O+fZfW5BLFEY57mBh0J0dUJVGFEq4wdHrMUPn3fvP3APjgrz4gmrUZjrJMZL9YLVtAUz2e7v1EfuNlXqkuc3J8yvqCuCgGtJlOoORsMgnFxdfJlTGLMW+8I3TuDs8OMBSTKPIZTa4gPwaxCgoRlhQbtzWbWJ/QnUq9o6CGqXZI0pBYAhwUNPrjISenlyD7EpNYJQxSAcEDthYMJqOQ/gBkjzRR7BF6KlEEuYI8VNMZsdomlRosQeCjGhmcTEymKL5NrzsnSGKylgmS+GbmD7i9uk5XwnSGEw/0AgoJoYS/6ZrCzvY2vXZEJIWFXXeOocVohphvmiZ0ekeUMgXcUOq5KT5GIjTAALK5hO06tLwJodxXSXaFdNoX+QnJrrC1schonDK/EgLNOmiGge8F2BJWtrLa4NOHB0Qo14kFS88wD1xsSZ9s6Qmhb2A7RXz/SyjfZa9P+/SC73xbfPfbb7zJf/eoxywU39M0dUH5Pc3wxjfEuaYlE8JAIwpCLAkZjaIA141QZU/UcNQmiByCOEG2JGLoOjPPI01Tmk2RSDxvdrHsIl4gda4SH8OwSNOY4VBqyvg1vEAhVRKQZBVpnKAqNrms+Hu/9dtfR0kUfvTTI7IyaRFHKdmsxXQyJyPRWaGSEqd5VEl/r0kb1fQUFOFbRuM2a9USk8kQpF5cEkcoioKqC1vRjIiYLqNBgC73u64aJCgkqYquXPUAR2hKhCEFyW07R4JHIZ9lHl8lhlRK+RI7m99Gles5d2dYts1gIGVkFCHwvLW+wT/67/8xAJ/ef8Ct23cZjL4USVYTlTj+8j231lYYu+d0J7L3DshWqlx0ujS74vLRPrvg9u1bVCs5To8F3K7bO+Mbr/6H9M0vmEr6ci8eUsvfoLEiAsAfvP9n7GwuMeqoVIrSNrSQg4M9QQqmiLX1wh7tM4N37oqfmYYT1LSCk9P4yfsCOri5scBsqlOpLTAKpTajb1Bdijk6Ehf/SmWVYi1mPk8plYSP95IpuVwOkgEXAxFfZKolbm2VOdw/AuCy9TmF4hL95iU92XdaKmboD+eUq5vcuClg1qW6QWO9zI9+LJLJtrHNyuoa52cDvvL1KzKJJndfeZfH++9xU/aszqct3n/vI3IlcWHw5jpJovL6679CtyeC+C8efYznpqyvNmhJkpul2k1URScnxZZHox6D8wGrjW0WpAZpuzVkrVbD0DMM2mJdfA/eefvr9NpiLrlcjov+KXEaUqyIdbk4S8hkIV80GI7ExTecRywuLmPKjHIaDJlPAhxrE9O6aldo42QMNtfX2D8UF8qjB49ZrFXZvi2ebehZWpfnPHz86DreGI1mmKrD5cUQ05aQbcVmNBoQJ2J+K2tlmkczUi3CzIg2gETx+OznMxbkBXfvxSWmNmTesSllxHs6+Q3c1GA4u6Q9FMnxYbWI7eTBEmdK4BrUtwvs7zfJLYhnO/UZQ8/ntJ0hnxH7ozvtc2t3m/75zwB4cdzhzu43GZ88Z+6KmLXnjVC1kGSmYEpIYxTGLNdvMJuLNdl/1mFjfZuTi0sKEmLojn3qCw3m3oCxJHhb3tjk1t2b/PBf/jkAtXqFzeUbuCONxRWxP0YzIDa5sd1gLm1/OBG//4vGL8flyjRxKuIjhVpKplpi/6TL278iuPc/++w9bA98z2BtVRjPsBtiGnmW5A2/kFtiMDrj3qs3efRUNhrWirR7c5YaFl88FRnMUqHMXBnxRHLa9/tt+r1LCoVlDvZFU+HmdoXl2g7tI5ODJ2Ih7765zr/5y0/ZkOxhgZtjZbXAsDNnbVtckqzMOo4dUS4uc9G6ympM0DSNza2rjO0XXLaH1KsaOXlzDqMuB4cDXr13jydPhWF2e6esLt7GkOxPiWcxd9ssL20RpSKwePToETm7wc72Iqn8kqa1jK7YRIkIvPNFk8WVDF98fg6q7CkLmxSLeZYWBEHCtPs5yiTP6uou05Ew3sHgjASXlbVFZlJPSQFyRZWjlsiEbGyucnh8xOJCnk5HBN6j2YBXXlvmT/7oJ9x6VRjfO2+9zcLCa4wi8XuDecTi4grN5oxUFYHp3E2w9RqX43MWF0QA/eTRCd/89jqX5+LZg9E58/mct97+GonE549HPdY3q0ReiiUJMwxTIwgn6LK6lc/WaJ73qTVSutLZqYBuhPTGXSxLrIuWrJIrlRl2xEGxs7ZGp9VDtxLGU3HIpVqJIOmwsNRgKkVDdTNkOAzI54TDCNM2bmizun6DJy8+AGB949e5sfMd3OCMk+ZDaZ8LzL0ZqmQn29m+w7PHL/CCDnVJEBIEdc47zzEthU5XVmm9BMdsiGADCMIEw1AYjHuYMpir2K9wfnCOYSRkpFbL6VGL5ZUa+ayw4dbslMRP0fLpdT/e5vo9XHfGeNzm8ERkP3VLoVSuU6tdZVGPIdEoFRp4gVgrx6xDXMWyVUxVHgLzQyrlHJadyvf2abXn2Ebjuk+he35CpbhJQ/Zz5TIR46HL2vINXFcEcwfNM1YWauwureGNRHB+b2eKEyu0HomD8WZ1i3arTbZUpiAb8XOSNCwMz8jnxDt1hiTEQxYAACAASURBVDN2t+6wtCAc93AyxlR0dDX9kvBh2mN4lPDmO2sEmphztW7geHWGIxFMqs6M8uoih/37jAIxl2pWgRwEwQhtIoV3A517W99Dl/pjnZNHZCOHoh7z8NF9YZ/LdWbtEXbOYaUgvs39p58RdnOUCyLg85yQ4tI20fCQ0VT4pEb9NiWrz8W8Re9cZq5Pqhz2ymSLIih8Lf8Wjp4w98ecHoq9dtjsk2QsNDWAqdgjvq/g8oz/4r/9DwA4ad8mV1TRFJNRIPbM8voSdibDFw8vUK5IBDQNxQ44PhIXi+++/VWqtRz93ph8Xqx5ErnoSkyahtSl6PT9Jwd4XkC5JPeQoTObRqxWitfCtHuHczKZOsHcxc5J5sH5nEJhgd2dRfneD8hYCpCSSLKKKJngOCb3P3183SPr2CZECp4rG5RtjUZjmb3P22hXxAZYhEqAIYVGO1Gfi8yE9rGDeGswwwGxpuC6EzKO2Ff9/jm9wRRVVl5QUlJi0lQjkAJA8/kcXTOIU5BmRhSmzFyXzVWx1y3DYDwMMa3CNfNhTIpm2ChGxOaK8MOdU4/JOCC/7Fw/R9MDlCTlrddEcHyj7jLsHWCVGoSe1IEywQsCTEvsxzgN6Q3mBKqDIVkbwyAFDBw7JYmET9DJ47kRudLVJSbF9RMURaEk+zBGoynTiY+iOPhST83QFfwgIIld+WyFTmtCSEgiyRy8wJXaZwlzT5yZj/d9gnTnmkAHJSQFwiDGUMWc79y5g2pP8b0+ljguCCcJum0y98TvZXM6fjhkNjbRFVl1C1x0MyJRrGsClIyt0mg0iEPZ9xkHONmYeBYiW+bQFJ1yqcHuzlcZDsX+K+YsFMxrsXVFUVlaWqLf7/P7v/8HAKytrTH3fDTdJEmuBLMVZv6UvEyuLi2VmXknuFGfq9Hrtinmbl1rcS2sVMjmHb543GJnQ5BQfO/XvsOTzz/A1CLO++KC5+QKPD37OT+6L/b6m++8S3v0CUa6gOmItTs8vEClyer6CsOeeH59scTCkoEuL7RRNGXmtZhdmBhXbYu6Q3kBuv1zlFAy0CZnnH3e595rIkE4GAwwTZuMlRDMxbP74ynGYp4knZLNyZjHm3By6KGosmfPUHAqDuePOwQzMRdXLaKaBp3ZAT/9F+8DsLWd5clxjhd7IrZ491t1nhw8x3d9aqbYt2Ga0Bo8Zzwe0+8JP2jZKobtYuji73UmF1hWhqcvPiSWBC9rK7fQNlXK5TLDkfg92zE4OjynsSQWodM+RFVrZIslUplgMksjev0JjjNCTUUwvrK8RpJ4bO+Is+jwZJ98eQk3PKNz1Ze10CDVyqjErEgW42IuITDhwQsRKy2XMwTtOTd+ZZmJRJIE4ZTxZMzBixxeIGKC3Zsr5Iw6iewzPj1ssrLRoOUmGJpItBuORr1a4PDwCEMV50x/eIYabXL7tvAbw+ElQTKjPZywtSGKFN3zNqmu8XxPxGF6PsIL8yyuFph3xbc6OH3KneJd3FlAqsg4MzZQdY3EF2fMykaNZueSgXvGxRPRg1wtZRl3Aoo5GzMW36ZcNLFTWFsReofry1lWN8s8fvECWxJordhrnJy+YKqO8WUcFE0UHow+wJGVXVOx6Q0uSHBQXHHBm6gmj5uHZOIimkxqGQS8996PKa2IGKQ3HjFq6uQcD4YioV2oGowue2SdLPsH4qzbXBU//4vGL8XlKghnjFzBdtPqDqktw9SdEu0JZ5ez6xiNOYtLG7x4LgK+eqOC6muk8iD0gxmZjI0XDFitCkic57vcu/km3c45S1I1ejgc8dadt2ldiMvHN25/F8+f0+v1qNXFgZ3JGSwtrrO8ukNWqpMPh2PeeOsW/Z5kJllbpFopEycX1xnM1197jf3nx+hqCVUTG2hteZM4GJFGwphXl7Z4+GiPyXRwLe7mTuss31CYTNssyBu978/woj4rOWHg+70vqK1U6M3ahKnY1Ll8kUxew1WguS+CzLW1VVqtSyxJFDHojTEtUFFxdLGBtlZu0Gw12dsTF8zRYMrWVp1m8xGrsqG+mFsjTAboqsKgLwLKW9vfwvX7ZBzheDrDz8nZNwhDGysnHOnl8HNu3b5F3vpdJlORQWidDnlw8ZcUtsTh4TiLtNoBxwcDlpaE8ZqZKaPRBQkxl23hyO69fouTk31Ksnqwd/QF+WyN2aQrstlAEmVJ0ilTd0y/JWyo0VgkDZbwFOFUYmWO65/RqH6HmSP+jzRDFKVMxscYqnCIqqIzHg2oSoFZVXG4uHxOvjrGkzArVAVb17i9W2c4FGs+6Rv4o5S5J2zKMWz6oyPmkzM2VsS793snHB9f0ljOXcN0+p0ptXLuWkS429tHJ8etnds8fiz+r1LNsrxwh3lwiSeZACvaLt3LCSDmksl7zNyAOFEoSMmBi9YXFCtlut05Z03xvbY2v4kX7/P0xREApZrJ8eUZC9Gd64Dr2d7H3LzxKqbt0+mKS/xX3vib/Pi9P2NzRVRjLbVB6kCgzAhN6bSGGonaYzraQDNFUHbaHKKf+rz5hqiA9Q8OsJwIXUlpnomsuGXoZDN91FgmSawKHa/D2B1dQ6qy2QjL1nHDCn/+4b/k/2bvTYIkyc47v5/v7rHvS+5VWUvX0ivQDRAAQQLDZQbkzFBjNJmNmUzSmHTThRddZKaDzESN5iaZdNCFY9RBxjnpQFI0AiQBgli70XvXXllZuWdEZOwR7uG76/BeJi7D2xxwKL9lWoSH+3vf+/bv/wf457/364wPIu7fEWd2b++M1maeo9MHdFrCWV6cygHyxOBAUiqoqkKznPHmW+Ls/eVf/JRmc5swGaHH4hmKJYc//5ufc+Mrv0EyF5nWn3y2ZGurQn5TOKreZ3MGpyvalS9z9x2JYHjW4+LFCbW1LYZLsea72x38YMSHB8IwNRo6mRaxVE/xh0JJ2+oG9cYao8kzlGAHgO37X8JfLkhky0q6SHl29pSAC4oFkcVVTYvlssjPH6WMB9ILVAR5bSaHc3W9QhSHaKpDS4gimiUqGt4qRpMISfFywnu7v8WN28KAf/ijhyhaDd8UCGziQVcY+hLNLJImQncoWZfEiNETsedKkuFRIMo0HOnMxV5KmGQooY6biMSXmtmYmk0i+/2yyEYNA5pNm8lE7FWWpgK9T/NZ+rKiT56zs4/4/ENJRmpWyAghg1g6FvViiTu3XuPi4x+TypYxBZ1Ed0lScZ+mU0cl4dlohlMUia8wyjAMlUhWsjoli4YZ80lvSWCK8EpDoMKlATQal8P5Gi9fTDEk7HPKgjTLoanJFQLci6MPOJ+a6KqBLtvkQmVFGqXokti8Xilw+PIXRLyBnolgcrUKMEwHU9dZSgj+2SrBUwJM2c6o+RGxpYMREY4lIElyTmQYKFmCI2lA0kgnyhLmK+F4r7wSc9chTQMSLqHRbRIjJp4eUdYFYtqgP8My24Se0N+qkQMVovmKSkkIVaJFpGkFT4lpIWymuwpQsElkAuaLw8/RknV0IyOR5MOqEpPGOTxvzDfeFr/3zv0aLx64pJe9oImOmgUsopRqU5K5b1l8928+w7BMNNmOGRsWZqYwiWQ7cdGkVVkj5hgc2RJvQBBrFMwCbipsdBDEHDy/uOoI0fJ5ktUCRc9Rkm2Bg9UR3/rNfy0SBTIxlKoauhJfVUMNCxy7xL//k/8DOyfhtgMdI4sxlIj4StZTbKOAIytX82yMr1ToNGU2CDgduDQbQ8y8CBSfP3mMZXVptur0LkRyTnt8n95gwuZ2DVW25Z0PTilVNvjNN94V9zl7Qrt+h6dP9ikWRaImV51x4/otjg76tNaEv3F8uke9uM2RK/TdZL5gMvXw/At2ronvDRdfELgV7t6/z2oq5Gz/4AW3bn2bx8++B8Dt7W+SKlNGwzluLO5VK2/grob0eyOurYuukFqpzWn/c3xf2Ppu6w6D0wlu7GFJkmQlt2Axz6jVtmlIv2h42ifRE+69LkA25ouXvNh/QqlsM50Jp7pYLLL/8oRbu2ukktA6SnwSzeG0JypEOlUcJ8DOF/jsC9GO+druTRbTKU+efUCzIWyWomsodp9eX9xnZ+2rWI7PyfEpVmdTyrnPyl/R2dzg2TMRFOX9HGolBZnYKBVK+IuMG9u3efRY2Paz3ow3bt1mZ2P7CiZ/scgIFnPyhtjPXH6N7RvXOXq5jymDiMgtYaQuaRJx66awBU8fjmjeK/O5TOZmoY4WrqNyhO9LQ6oYLH2fRqOBijhH7qRFs2Hw4DPR3jeYnqMbFpsb1zg5F/6b781Q1IhqXfgWYQi1RoN2scrTEyGf995oMJ4taJU36WzIJNDFGFXPYxWFrAyGS1bhEF1V0GUlsGrtcOO+zWHvJUEkztHu7hqLaEhJEX58tVtg7/AhiQ/VurDbTx/3mIw81m7ZDCX5tzef4i37qKkoGtiVkNlswuHhkPSO0C3mvEfqLSm3uSKYDw5PaVW7TGQlK2flWfhDDKNMFElwo3GGF6w4eTKjXRcBs2H8kjrhP3a9ArR4db26Xl2vrlfXq+vV9ep6db26Xl2vrv8E169E5SqOM1TZE91ut1CNEUG45Hgh+modvYRtVIh8hS9/ScwSfP/vfsjOtXVMQ2SIcnabpRfTbDZRJIlZTc3hhwc0O0VM9SsAuPUe7jykWtgBYHDmsrbeJMmXWFoi07K+to2ptXjx7Au+9g3RmjidBPjBBa4rsmLNpEG1VsbQ6kxmIutwcXGBoTcoVDLSniQ3dlq47oLFRGQBak2dr3/tG8wXKxYL2YsaXOBETS76HoWihNI2NJazFcuazK5FKmHkksUGiS9iYi2z2Vxrc3pgYeoik9vvTVm4E0ol0Wscp8e4boKhbKAaE/mcGde2rzFfiiySoijoRkKSZPQl14eSrui02ywWE0xHzjvEZzjWNRRVZJb91YjNdofB8JDMkPwRVomDvVPuvnaHh1+IKt/hiwl37t3HKl1ykh1DNKHWSGg3xT6k2ghDc4gTl0xm62/cus8Pvr/HYi5+T9dVqlULkgqqKuG2o4dM+xmKZtJpiIwFxhgvOWBnV7R07T11qDdrHB8POD8R2VfLVul06yhxnaacx/NXGVG8ZDIRFdOHJ4/4rW//AR/+4lM0W1QZylUTkgo//vELrt8QMhsEZapNH90W2TVDW2fmPSJLj1EUce9CoYiVO8RdBuQsOWuXJYTJkoHM1LfqayQs+fzJj9El10e/l7BzvUajVuXjj0SlzJ0OuPvaFkko9vzo/BmGUWJjY5fFTNwrUg5ZLKFZu4lTiKQMH+H70OmIdVksJuhqCU1R2dwQVc3Tkz6TyZgkW3CJoPz5F59w8946h7I//tp6naJVZr6MWcn2kIyYZu0+/f7DK46ucrHN22++y6nsxyd1sE2dbv0Ovhw6jwMXNW2gSFWUzxk4xoTzs+dXrYK7O3fZ3Nrm+YsLdq5LOPhJiJcseXYszmNqwMsjl3zJRI3E+62ti/VxKjUMCW375PkHWHrCl78u5O7/+6v3BUx5Apkqzqxt2/TOhvhuiqKKjPe9t7eZTUfsPxO/t712j9gbkioxcSwqHyezY5xKhIVBHInKzjDLODk+xZ+L9zXq9xlfPCVZOdzsiuzy04N9VvMBre3rTAJxZtoLi9RMef5ctCrvbm6SBBN2tl7n6Z7Ion7vZycsQw091VFl5VE3EryFTiTJLKeT5xQLBpVCh2ZXPOfGznXCIEXTNGLJJZYGEU/PQvbPRYW9202JM58gS5iEl7MvGc3mOr3e34GcZQq9IZGaMeyLPS5VVI7P91FUm9IlpQE+YWCRL0acD0TW9sWehePUL8dl0KyA2ahHd+Nb5GQV3I9GqGaEpZroiZCPhTfh9/6zb3Ek59yCYICVd9AzjTgWe+wHFxxffMRs6qGkQncpio+WFkFWksajEyBPTi+zkGAclpUnTlwURAY11U7QrTxZnL9qL0yTGFuxABcMsS6RZpFZNoqEmTe0IqYGU3fJ7k1xrrqbGvOfviBRI/xAzt6YFlE0p1KV/Fh6SLW8yXSQyToSWGaehS8AC77yjgAb+X///OfEwfyX5MNaTJaZmKZJpAjdpTprqLzE0o0rrkRHdwhmF9zYFiBJy2BGTEqSZaTy2QsFk/5gzJs3bmIVxVrN5hppml7xXi1WHlGcUalU2dgU5/G7f/vnRPFd8qpOKjsrdEdnulhxa01Ul29u5fn77++jswFyOD9TYhRdI8sycpKI9vSsD2rligIgigJyeYPxaoItQVnGZ4eYikaShuim0MPECVmmokqI+ixb0Ww28dx9kO1aSZxgJCpoY2KEzfrX/+p3ePDoL0iRbeWJi6MqYFscvxQV9v/63/yXdLod+v0BunwG3w+oFEtM+8K2b29v83//6X+gP/kR5bKc4x1lxHGKrpXIUlGN0HUDw7JQNCHnnbWE094zjOCX7li5pjGcfYYZCFuxvXWL0eiCfNFhNRA64q/+5s+499ptdG0T9ZKbMb/CD2bM5sLODSf7VEtvo5oZEwkCoxkmT19+gWPVCFWxBqruECoBTVnFNe2I8eKHbG1uXc2mebNzut0qp4cHVCVNxiqc4a6W1Gs7Ql7zBudnM5JsRl6C8fR6PTrN11jfMDk6FTZsa+M+YVDCD4S8ng0/Zb29Sz4w0eQ+TM9nKPqCi551xbl45/YmZ/05F33xve56EUXz2Vh7hxShm+czl531d6kWY2ahqEbUa9fREo+lL87CIj5gMA2pFxeEkejQOD4GU69hm3nhowCzxRmh73NjR7SohcmcF0+fUCqscXwqWtuKxSIaGbEfYUgbYhQNgnDFeCgrNFWDIDghWNxlrSGqbqtmj4v5AclBynQpfMhyfovWpsHzI/EuxcYBo0WPpWuyvi72ZrE8RqWIgsNsdMlP53N69jmOHHTd3t3g6GCPONOQHNCkiYIetcnZMU8PhR6OY4/zXky+KM5MpVRmrXuN3tmUyBfdFk4uR5xk5GS1qayVMQyLX3z6I+7d/LUrmS6VRviZQRbJaqHnshj36Mr2udhb4S9WJGlCLP2GjTc7nB6/JFPz5PJy/s9dksUB/fCh2Jd+BcdWiZOAZ89FlbHT3ULTL5hcaDRyooJ0wJLMaaBoQi+Oxy6jaUC5W2A5lKAaqkat6/Ds6T6aBPHI5Q0+f/w+9baEcK/lsMw8qmZwJnlRO2tlXHdBq92lVhNyPboQ5+kfu34lgitF0ZjORevZcL6HqTXIUouVJwSnVL1GnI559PhTrkswha/+2ns8fniEYQtncqYOSYIa1VpVDE8C7dYG/f5LVu4Suyo2s1PuMrOHuNKgaqrCef8FKgXWpBEIXAiVEYqasJjLPv4I8rkSW28Lw/S333ufO3dv0Wqss1iI8uL+3ik7O9fRtSKNqgTVOHjEzvUuT58JoYgyBV0tEkUWy5VwSGbTIbZto6o+w7Esxbo6lXKOo5NL5JOMyWSCv4QwFIq0Uiry7PEeGRaVsuyhnc3QDYXJVAQDzdYaq0XG+maJoQzwwmhKo3WbkxPxd6XcIgguaNWv40sQCruYcnJywhtvvMfeyw/Eu5x8TK16zPZ1Ua4djjJmiyWzaQqyPQxlTj5X4v0PfowpZ33u3t8l9HTSUBz8t25+m5dHnxLZGgU5C3N8HFOplHj4/CHbO8Kg/ODv/5pKvXY13D3oe5xnfaJoQqMl5KW9XuPhw4c4TpVgKX6vnqthGgF7T8T7JbFDrXiPxw8PaEq0oGqpTpbNubZxndATzzAZP0alRRDLFq56l+ODOcVinmpd7OfhUR/LjtBshed7QuEXzQZOISVyZQtSYYq7WKBQwQ3FM5y+WJGmSxy7QhRJrqRui9F0Racl+tWn0wNcd8Bad5ecIVoxZtkEpxjy4sUBN++Kls1GpYueVQkSYZxbjRXr67d48vQhfiDOzNb6e+ScNitX5eSFcEQLVZdmq4Hrir2azs/5ynu/zrCfcHokjFCYTkDTxLufSUAZjjg89vAz4cT4vk/gBiTZgoIj1rNUspnODkGJiSQp63bna3zy8QMURTgD5VKV1ayIW5jTaAiF26zd5fPP/wo7FW0Yqh7RbudQLsIrZMDhwGM4fUrOqXD7tmjFiEOVXC2P6QiFPx73qNSKFM0ik1Pxvvam0AOj8Rl2TrQ0bG9vk3NM8olYg3q9huf7KJp1Rdxq6Xmm4xn7z1zu3BKy1++9YLqcEuvCqfB8n0bZ4XRwSlaWAdj9d3jw5EOqNpRkAH0xicnX1inWJf+Qt09/MeVffOV3cCXaZKN4hJumnLtzTEvs30VkkyU6W5LMchHAs0dLRv8QEcSiPVNxbAqlFFVT8JZCphZuhL9akZf+ponNaujjnR2z8ISBSZIEO2+ArpGTyFWqkVAsVAkS6QysbxGsXOI0pd6UMzuJyv/wP/4t0/E669viB9J4TKlY4NrXhQzXG1ucnbxA05MrAJQsiFm6C2olhfV14Vj87PtnZEmEoUqHVrOolrvUaxYLVzhOCjb5fJ4oEPNKAJmms7ZbYoIM2JUIcFA0MdsFUK06lBo2nmujydbE1AjI4l/yFuUaPqssAr1DEIl9140VlmWwkKAezUqJMDLxohlJKgERlIxAE+TLS0+cD9NooaTa1YxQGAekSkqSJBSqIugdjEfEaQm7qJJKfpU41SAzSDMhB8f9IaNhjKZFZPJ/mlEgc6FcLOH6p1IWR5iaDjKYzFKDMFVRlIyLqWhL+uTpMzRNQ1chtsTa+WGCnalsyharxWrAxXBOsdtlJZHzLLOAoenoOsw8cW5Hswis6CoIVc2EOEmIMg8jJ+R19/YOT783olJtEWYSwCbJYSsp3kSi1E1vUixu86LvYoWX3DsqaQJRFGFaEnzEz5gufbSy+FtRM8JUx1ByxK6c33I1hqcrbKuEKr+nxilqqhOshF688VaHnJ3HXaVoEiFVtRTINDJSUESy0R1/wFatInoGgSiNMEyTYD5ja0skx/7wD/+Q0WiMF/iI6WNI05SD4yPKUreML6b86Cf/gW/+To3/509FUqTcbhAnBioZhgxSIn+JqpvosnGoUs6RzNeYSJAWgK21L/F8/30sU+ibcqkO6oqnzz+mURE+yLd/+1tY5pzT3h6mI7nX3DW2t+6yfyhQ4za7O7w8+ALUCM0U73d2dkans4ZlFhhPJLdXEU57PVaxOFfLpcfWxk10JcfeM7F/3eZN6tUSn37ymFxJeOx22ac/eUxZErJ++uCHBKuQ9nqZlWx/z+frZGqAQpG8bIEdjB4BJgmX4xpLNKuCY6QYcv73q3ffZZR+yulA5fpN8c7RFBolG08C+IwnSyrlNrm8weMnIiBazjws54jj5TVu70j+zcMjbq/f5ku/9nsA/NXffo/ubpGnxyNubEmQIrOAP9epNms8eCCCHbKQze0Gk7kIsg1LZzyJqNVNBiPxP1XZYWOzwMuXz8jnhN73gwXxas7mtkjqz+I9VoHNxfQEzxX7vn2zi5uOsHMe5lJ8r1y0mfZidrcvZ65nmGqBJJtydCTer902yJlNJpMBh8dydKZgE8c2oeS5PD87Q1EMlMxg7/Gx/Myc3JrDYhRiSnCait0hs4aMZJtnrdVishgz8yZXoBq2Y0LmMJsK2UjSkGq5S6u5jh/L9uXpgowC+Zx6BfByfXeDs/4Z44mUg1JAkQ5+uqBeFvJzMfBQDJt8LsGUSD+rcI4amDg5cWZXiyXjcYhuT9nsCjmolgu483N2b93GiYVcq6WHDCZzHGnD4jBj67UOv/jwY4KZ2Kv3vvp17JxH6I25e1MABCVJTCW3xC6KhHYxn+Pw4ITDw0MUmWGOQ5tKKY9uKJz1hOxdopT+Y9evRHBlGAZhLAKUfL7FoDfh/u1vsvdULMhs8RxvNcXJmTx4IIKNb3y9S6uT57wnHKlOt8j5SR/TX5KviEX6+QdP2Vi/wWg8o1CSA/Qjl2azxjUJCfy3f/ddisUi48mcfF4IZrvd5WJ4RrO2ztn5AQDu0iNNVe7cFtnwP/qjP+Lhk59QyKnIBCI3d2+R4hJEE1SZkp3NJxyeDOhKSNVrW7d5+PAxhRKsr+0AsJwvRR96kuBK47G1fZM0tkkQBq5UUZhMAvxgRndNBFKjyRHj+YxmY5cE2RsaLVDIYUjy2vnMxdQKHJ797KrnWo9MDg4O+NJ7IlJ/+PgxsdsV/cmaWM/xdEqmpDx+tMcbb4mM6fu/+Af8IOFIZmw0fYVhpVTbKrohYS29U1aehW3nkMi9zMYLymWDSA4VDi56uHOdG3du8OKpMCiKOaBcv8Nu8A653KVz02Tv9Od84yv/VDznFxonZ89ZxSGpBK+YzCMUq8zG1n2ePBUM4mvaBoZpYoQiMzFYfERkZ9y/dwtNon75ns/mVpv5os9p7xKWdMrOdpfFXJK7mgGFos7GtTXSSATe5eJNDo+esfQmFEricI1nD7i79QYHL0SA4hRTqrUmxwd9VF/sZ6lQotdz0XSP9W2xVit/RKlm486EMsriPP3eS27dustkLOQuX8kzGL1gMBjRaIn3GY36bHTLKLJaGOPQ6z+n3alg6kI20qhI3imQs3SWnlCuaZpB6jBbiP1b32jwwc8f8c7b7xGE4vzFCgR+QhrlaTckUeRkiWlWroKk4eSUvBGSJTYbNaHsDk4P2NhqYhv3efJEZMUW7gDD9ikXRVCoKgblesRw8gX7e8JRfO32Nt3WPU6PRDIgXoXo5jWaHYvDMwHucm/3O3z+7ENqrIglQo9lVFh6AeOxnEMrltG1EC1L2d0Scv5Yog1Viy2WkgwwV3RIlzaOLn7vtbtVfvazl+TLZVKJEpcqC+ycwkfv7/P73/ktAB59NKBixGQSze9+d5vvffcvaL95k4fSwH1Z3+DN7jWOzo5QZKVs1HvOzde+wsmZeM54GbPZ2eDHX/yEeCj27/qtNmqkUdNUzqdiXeaZRad6Az8QwWv/NEW3FezWEFsGgb67ZHgaoIQr9o+EvKAr5Ep1PYD2kAAAIABJREFUFrbI5uVzZYqNBpqmsCbRmHTVIFgtCP2ExVgYvtHgnAvOWM6FY7NaKZiGiqoUMaRD5JRyFIoWnRvb3JJV4mvrbVqddfJy6H1+6lPNl/jP/+BdZnIo3J8FHA5PUFKX3okIUjTVRtNjZnKg3tQzZpMxT/b+kkZR6EoNjWC1RCOPLg1vuWwwnbu8eObJve2iai5qUsLSJViNcUaCT+BDKoPo1BfknaEr3netaVN0NKbDESVHzC1kmU+UpCiyyqFaRzzbnxNFt7Al+7CqZGAmrGYBO2tCpp4+/pz5tIotwR00NUM1DVQs6h1xrgrVCFSDNIZL9CHDUgnDJdd3BeJlmD7Dsmos/AhVjknESYiaGRh6epVsuBhNUB2dlZyFMzINp1hglky4d19UQz/fe4xpV0UgkYp10UwNVfVBEWtg2Ru4/hlGmKDLKlwYpER+QKXWIpKw2atkgpKopNLRUBWFLNVZrk4ZSUhnz9XRDJMwWqHJtdIVmyRacfdNcf4P9o94flBEd+pYhgwMY5c4CSGLaXeFg/nk8YgsVTDknqdxhIYAh1qTiTe7aDBZJRiWSSirkZBiWDapXKdOPc/zZ/vMZhGKlGGVDC8LSVMHTc7aGrbB8/0Jq5WcuWzYqJZBNJnzX/13/41Yg3BFEPhomn5VGQNwXZeOnNX+4//1j+luZcwnEEnbk6KQJBGJHhJLQnDLUEmShHZHfG85nxJ5FiS/nLnq9Q8xTBUFIefHJ4c0a9fY2XiHTFZj+70xUdhjOo7otMR5bLcqnJ4/5c4dIVPHB8cYhkHZXMeQ5NfXNjdwFypeOKFeE2dt6V1QzpXJJMF8qrnsHwZ06k0iSTHS7/vMV0usQsa5RJfN29cZe2OKEn57ONJQdYPhxZjAEzqovbEiTSGKanS7wj49ePYBRFt02iJ5ZOdSjg+fsvvVXc5eCB0xyT4h9QP8yYyeXAfb7JJEp6x8iUip6KytXWfunV+RK3/9N97i+z/4c9acDTSJMlrtNHGLPkee+Mx3/sV/y88//jOqmkoQCB2RhBFhqHN04nPnNdEhtfAmzKcDZjOZjKtu8ea7ZY5fHmHZws8M0gGnfR8nVwNNnDU7yFMuNyATtmEy9qhWRZdT4oj1/OSTHjfad5iFCrtbwtZ60UMmiyHLiQyqKyqLxYRapYhjCJ/VsjLc1ZB8KcG2xf+qdZ2jwx4bm8L2TUYeS3eBHwa0OpfonVAqa+wNPmU0k4TdO1AutlhK+PJhf0yhEFJyEty5eOeJv8TUWjRawhad9J5jWWOm4wFzSRTdaq3x+OEjNtbWuH5TfG46H+AnK1aJ+MzpiyHNxgb5UpHQk4A9Rp84jlkOQ9SmnP9VdfL5Ip4v7Ed7vUwp12E2njCYie4yy16nWd9hOh0zWQl9qhkOm6011taFjvjw+19g+yp1Q+fd3xdATegLlr7F177yFqrsyAqDFN/TmS2EHumdnFEsWVzf2cCQemrv2XPW19c56Z2gmUKXXOrjf+z6lQiuFBTaLdma5Wl0OgqKMWT7unBy58shzdp7oATEZXE4Xu4fo2kazYYQriTUyJQe54NDIuQgXLvGaHbIjVt3OD8RUbdqTBmNFnQ6YiHrzQr93ph7d9/m8EiUrPv9c2aLHt5SRZeQ2N1uh8OXfZ48ERkpfytDQWc8OcMPxCIv3JTlLKZ3PiKViCmtRp2LYY9yVWZa1XWClUcuZzEa78nPtAn9kNHslHxBQkRHS85OX2DLrOPsOAJ1RZLBw6diaLFS3qa7+RrzJfSOvgDAtPPkzAZnZ8KBTpOQ6zu7oFd5eSTQyXJmnnyxwKMHBwDYBYUouWA2tMnlhIFJsmPidMHm2ts8evIRACFP6Hb+CdOphKBUAuazJa1uDh1hKGrFXVbhOS+eXVDMCaN6bVflnXvf4efvC1jpOPOYz3SSqMIqksiDGxWyyKGUL7JYCOO1Mk7pVK/xyfsii6QYERvbHfb3h2SpyL5ezB8KhKbpOSvJwt3vD1G1AFdm87sbDisXuusrujXhfPz0pz8mzUxI2+xcE/cq1xPm0yk714TC/+STX6CZCX6Q8HJPDHxu37AI0zMatXX8UGTmozThsy8eYOWETA0nNZIoR75UR8lExUJRZ6xttHl+8DlGQVQeppMYJV1hapITrVvBMN9AQSVXlG1kqspy6bK11UBVxHGdzmccneyTKaL1o9u6w3RyQRym5G0JE5yeM5wsMMyUkoSfPz2ZkPCERl0G2VqejY0M3YzIS2i9WLFxYwXb0bnqtkkV1je3GZ0LZXfaf4KvFFnf2GYoK87+akUYlFBZstG9B8By9Yw37r1Lb/BSfibk9PSMSjWPIyFc40gBKyHTxLlu1teJUw8vijg9E4apUnlBsawzGOzRkgOtJWeL8fQxpaJUdH7AydGQ19+8w5Hkj8tXJNLeasFsKu6VaUUO9yeUukK5/+G//G2+/93/i0bdxpXtDJkaY5gKjx/1ODkWRqdayVPJb5LEYlG++OgBG50O29vXefHy7wE4f/6Y22t3ycVt+iuROf72e99iNNOYDIVj89W3rhEuPNzkhPYtkX04DyIWS407m3dpq6KC51RuEKUGgWxRbbVdvNBDGRR4IXlLXH8lqlB2nZ3bwjhHSchs5jObzaSMDfDmExQS0GSVyDDI5XKYRpG87BlpdDcwHIU4FPomiZek6RlZqKPEQn4Wrk8QzLlYjPGOxOf6Dx1UrUKtIe7dqK9jOCaKqWLIqqLVNHhv90s4+i+r17/9O29h5lIiCUJhmjncpY/CmMGFyJR31vOskhmRC55sf1Ejj7wz5fBQ6ARvuUXJUAAfRQJF6GbEkycP0XmNgoTA17QQNTZRVQnOkeaBnKhmScctTTJ0yyZILs/eCt0okcQqmimcpiRNqRplJsmYjkSq7Y0WROoG1iVka5Ri6wZeFjCcCj2cKSaB3yWfU4gkEmAYRpiKxngu7E5ZV1ksY1TVIOYSkEQliWNUbczBmQADGc9jVKWJLpHzlMRlPpmzu34LfyYc2mBpoKgmURZhywy05/o02iW2bgm7+v2/+wI0m8V8Ra0inl3RgCzDcSwiCf7hriKKjkEsg5jYS9D1HIWyhiurW8+fh5h2HZKENBHyotsak+WMSBfn8MvvvMePnvyMSu4ani8BUJSENIkoFywSiU5IViBOEyLZsWDpFlGSYmoGSSo+8/R8gVVbw3sZkpcVp0SJCZMQQyY2HSvm6HiIbpVQJGlWGAXkTZNV7GIoElltajMMqr8MOjWL8/19/vnvf4e33xZjAQeH+5QKBZIkZTyWwB6qiuM4PHj0MwD+/Z/8Kf/Tv/tnHL64uHLKFCwMK0ZLTTLdv3qfyTTg2q6wA8Wiz3Aw4fr1G8DfAVCt1jk5WeHJtrZrOzsomcV0ccZ4Ktbc0AusvJC1LYtUwkM/f/GSZu1d3rr/TwD4/OP/nbW1DdTMoDcSsphEJtsbb3J28QhXVrO77Q4L5QWqrNCuliqddpHR7Jg33hE66aK/xF0ZKFbIeCySUxtOjU63wWAknF7bqVMoFIijJWFB6K611i6LecjF6DEkwjZUi7cIgoD5QiRz40yh21jj9MhlKR1vZWTg6JugP2V/T9iZ127XeH7wgrwtZGpzp8NoespyuWRzSySLB/0ZlcJ9CjmH87HQ31sbt5gMnvLgVLSafWD9iCAaoSVFKjVxZtxFgm0XuLP7Jv2BeB/dSOifHpLPCV191ntCLvcGjlNnMBbncTkcUi/fJVcIrigpSlmR3sWQYCZsX5pVGSwXbG6/iSHBuJ5d/JQQH9UKORwKXzDyNBSriSVpCfzsiPFqTqv2OrOF0PuqYTGfeZRLm1cO/uHZEaVqW1AhAIqpUmnUUYwxy8umIsXn8OiERPllomRja5fnT89Yyi6xbvs69XqdZ8/eJw5EckXPFFKCq6Ce2EBJdPJ5i7MTyX2ZOrQ6Bc4Ge/QvxNo5BQVVK6JLIJdqIUerWqS7uc2RRAscDwPanS2u30hxZZC7nFloZkwYib/39lx+61tv8MGHPyCXF7792ladLz56wtSL2dgSwVTiqdg2HO6L8/HVt25TVz9gPV7grIT/dmyuUS00CPw+L/dE0vm113cZrRSK0lZstBqMp2eEQcCoJ85VzixDbJHP51lJXq1L8Ll/7HoFaPHqenW9ul5dr65X16vr1fXqenW9ul5d/wmuX4nKlaqq+HKQu1hqM59FhP6cuWRA3rm+yfHLKednY2otkc3SFJdWY5fR5ACAZrONqusES4+x5GpC1eh2O1xMD1nfFhnvvf0jUD0+lhwzWlagUinjB3OyTGQnV/6Su3fv8/HHn1At7QCwWIzQjIC9F6JC5PkXKJmNaZpX5IqHp0OKuTrFSsZiJjIBt2++g2FkPHgqqjbHR2fs7Fxn5S8oSo6Xs94zViuXQqHF3ktRGbt5d4N8xWJ0IaLrIFqy1t0kl6sRpZewq30SS+Hx3h7dmshglPN5vPmM7obIOrx4/ozBRZ4sqV8xya9vNFh5wRWH1uDiDNNwCJMhrdJlO9F7zGYjBsMXbKxL6GcUXjx/RqUmsjjewibKegyGEVtt8ZnPPnnK/dfvcOtWlUy2ITj6Nh9/9BBdllPHF3NanRq93hndjiQozuqcHPepNhMMR1brRg7lQok7b0kuhcMh4/GKza11tEsCSPVdWlWdF/uf05T8WN5qTKHUYP2ayKBOZnMMPB58MiO4IVJCdmnJ0UlIp9NhEYhWrEL0DuOLc1DEHtg5hVV8gF0q0OqI3zvcH1POd2k1S0zlDEKEy3SoXA3ixpGGH4yx8iqKLFmvbW4wGIR027fYXJMkxS+/4NaNm0zHQlYqFROdggB+kNkez/Px3JjumgaSl0VdZJiWTr0uWvImwzlKlqN/PiBwxf6VWjCZ7WNFHZ49OQCg3oJUiYgDUVEcLQ8plXO8OOyjI9pDksSi3dpEwWC+vMyQ5hidqVzffgOAuXdOLv86h+dPqcvBbV3XOTs5xilmbHRES8VsUeDl0R7Dy8HP1AFjTMG+Q1XOU5yf9RgYx1eEwakeUsibjCcJ9+6JKuPG+janxw8oFysUHJGlGvaH5EydOzvimU4Pj9l4rc7+2R66JjLz9YqopmZqhF0Q65Irt1nf1fBW4t3W1y22N5vMl8MraNU0TTFsndl0SP9QZKBv3yvh+QFIgmtnW6FZaEC05I1rYu1CRWVcCDDrBXZyIkM7Skr89U/+gXe/KaGD0zUgpuB0WOlCXhy9SiFnc+zF2Jp4Bn+xJGCFIuGalcBEzwLylspaV6z5cKgwGi847z0hS8X3FM3GzEVUqrLtKsvRqLdB9VEk/0+maCwWC6JoxGIlzpamZiSpR1n2wq+1O7TaX0HNTC67oOySTaqraJmJmYnnGi8v8FwfdyFk+NHex7iLJXFksFhI7h3LotHaob1u0KqKdqVmO0+j3qTREBloy1bZ3CwQhQlb20J+kswkUYe4Cwh9YQu8xYL94wHXrot++eXCYLEcE/gxvpTXzrrHdNnj7LRJkhPPaZs+llEgk207jU04PZ+RK7VQJbS2pUAQp1im+C3HyJOGdTRNQZUZ4UxNWQUxuhmx3hRr9fTRDNu2MSRUsq4UiLMlnndGQQ5bu7MyYJOlK+JYPINi2KiJTpKJ5w6DBigapqFdzfpWamW85YS3vnTzilzdCy7wlRlaKDK7edvBXSTU7yR4qbBrpEVUS8HUFQypN2LVIVU87JL4ezJe4LkKlWKelZzVwF4Q+RGvv77LyyPRIZFmJlmWEsn5CtuyCXyXvD2jXt+Vexywmq3I5x1U+XtJFFMwTTxfyIFVuoOmVyRx/CVIioHrubRLJsWS0NeHR6eYdgcVCbaSxmi6A4lBFIt37o0j+rMSdj6PLu2hoRooinpFPtyo5wl9Fdc7wTFXcq3yxPGceJWjVRHfW+8W+egXI2qSl20xPub29fv8qz/4l5ydi7ZHw9BZei4qKrrsIFh5LpVKmf/+3/4vAFSrVbauVfjuX/6cLJHnUU1IYo00zoMq7WG+xnjWvxpV8OYZpUIVLf2lO5ZlLsVcnetbQo9MZyPSZE65lEPXLmkJSky0Cb6nsbMldHqx0ieJXQ6PRVXlW7/5uxwe7PPZo0/ZvC7Wd7WwSLSAQjmHH4sK1MTV0IwqE8nx2KpvMxkvCUKFs3NRjVGyjDRRMKLrZLJCc/DyCeVSE9sROrfWyBFEE4KZg+2ItrnRRUirfp2lNyBOxf7Nph6bm5ucn4vKx9bGDr2hB1GApYtqVuh2WGgzPDNhe1cAdC0HQzbW3iKT1WZ3GeB5Swr5OlNZ5V/OE7a3d8g5KkdHoooyHByyCqYUa1J/F3Ms5xaNyjZkQj6jMOb0/BmqFjOZiIqT7dSpVSs8eiSqyxvdNY6PDlgsp1y7Juxvo3nG4cE+lnObTBG6o9woUVK3Gco27zgMUIwJH3/0Q27vfBmAdtliMR+LsRBJRXLjWpeTwZi33xSdCB/9fEC9vMMqOGYyFi2bitZgbbPFh5/8A5YlebXWbzBfzq+wBNIsoGjfIIoVdER3V7mc8eD4Ia/f/XU21oS+9icFGpUytZLwDWfzJWGQUSy12b592Rp5TBKrLHyxL0kas1x66HqR7Q3hyxiGRaoKXRtEwme9sf0Oq3kJ3RAydXEKhQRePnhCuyU7j4Z7dDfLnJ6eXnXwFJyIouUQS2qUVsvk+f5H5Kw25Yp4zhfP+3h+TLFiMp+K/Xv64iWNxhbdrrjP1utN0sXbNCsWJTlPNXjep9GssPdwTKMsQEqatQbPPvuMW28LW2TqGdNRGdtMqe2K7623N7m4uOBi4tHtCp2Xk3gB/9j1KxFcZVlGPSeGChfBZ+iaydnZnFJRbNaTxwMcu8it29vMZ7Ivsn+A4wywbLGQimpyc/cN9l8aHJ6Lg6BS4t7tTY6ODjg6FnMgy3lGs13htZs7AHzx8AMmUx3LHLF0hZH7ta+9y4cfPCRNVELJmTFf9gmjJV4gWsFWvsl4NqDZWLsitG1t2hScmOk4wrDEId47/CHnZxds70jy4UhhlQ4hK2DLFkDDWjBfxmxudXDl/YdnIZqRkZdO0uy0x1lvSC1XIosue5kXHJz8hOEg4je++m2xfu6UMAiIE6H8bKtM3u5ycnaE5DBkPlOolBxUOT/SP7jg1s17FPMGi7loQ9CNGbq9ZD5NmUu0vnxe4/h4Sq0qnNaUJQs3xsrOWa6EEutu2pwcH3Lj5p2rgfbJ9IzrG3fZPxaKtNPa4JMPX3DjTo2CLZwtMz8hX0nJsjylsni/3slnbO0+xZFINqtphxs3Nuj1HzPpCdF97dY9VsGQfK6BJbl9ktTD1EvsH0o+EGNFq7BJqZzj40ffBeDajWt445Te8BA/FEq5UUoIVnC0L5SIbuqkjFjMQiYjobQ2Nzc5PvmcMKpjyeHV6XRMZ+0GeVPs8dn5Q/yoz/rWTRxNBPWnJyeoaGy3bxEvhGG4deM2ZBpVydi+nIUsVofkC2Vmkvwwlyuws/02Z2fPaEhnrlSsMBjtk8+Jw63pGboW8drN1ziS8z+DixGlksVk1KMoA5lmY5fp4gg9L9vmChYrL2GxGrK1Lp4hjioMejOs/PIKoUkzE9IwpTcSMuV7Cml6jKJ6GLpobUkYMr5wud3cZDwW91c0l5PDkGrjkotuQKXSJggXWI5Yz5KSI5erM1+I8nulXuP504d029cpS1LmT9//lI21IoZtcbgv2sG66xtc9D16p+K3HLNAnM4oOWvEMmhZSD6fvFrDkoA2nzz4gFYFWkXR6rL0h2zulPjFRxe02kLuPM8jzjIcx+Hf/W9/CcDOtoaOQq4mjEmn3sFQMnZeV1mMhZ4yjZhpNWQwfsJZT7YhpwaHz1dM5EDtbPyQ2KxgOCqaJFvV1BzuZAJKTCxRU7v5HGej8yuHTNXzZHqKouoYOQlQEECmK+Sq9SvkuDhKCFY+/lxyokQBumqgKgGWBI9w8iaNRotisXiFfGbrDooBnuQkcl2FZ4cDNHV1RTCrGQaaYwEZORn0FQoNNjbb5OxLJ/tt4jDCXyUksk1vMfeYz5fM3SFHJwdiHz7tkYTRVftUpipoqkGlUqVUETq90WhRb5epNkuUSsLIOaUGadbhN7aEg5LFiZi10FT8ZSLvNWM0fYH9bokoE3K1nHgsZysmcqbM1F2O91wmFxmOBA3ItBXFShNX9t6XSzHThYehtbENCVaRpqSqQr5kM59IPaHYkGYEsl1EK+bQDQPSjFpNnNn+qU2MD1mBJL1s+ROB1Yacod0/9nH9FEvVcGQgFUUJqhJD5jM8F3uVpjaaBURifVNVQzMtojShVJZOqHeOaljEpFfIg4qiUcrZ7D8Xs4zlfI0kGaJkEZHsljR0HYWEZfDiioD1krhTl7IZBimuu+C9L+8yuhBBy2plkWUpcZJhIp4riDJsXWN7TZyr9z94gJI0UGxIJaiPqjnE8QTbKV21OE2nAWgaiuTLisOAOF1hF1LSRNiQ2dwjSVskiUIi52iVTCUlodkUZ7TbqfHXf/0PGE6VWAaiaWqAUSBOp1eOvhWm2MxYSS41W2/wX/ybP2C+mBLJc6XqKrlcniSK8V2hB2/cuMH//G//mONjIQetThM7bzG8CLFkX5eiBKiqjqJk6Pple+YK1Aw/EWdNU9aolmzOj38JaKEkNfKOzkomPxbLExyzyc7GXX78IzFL2mxVeeON+wwvzjk+EXqxUmmB4vOZ5C3qtMvcu3eP0+Ee86l4X2/pEwZHfPUr3+TwSNzr/LxPzm7y3jtivvnR80/wVgsKZYUwFs8QegpJds5yqdNtCYfdDy/QzenVrHiucJPxbIqhGqiqWIMwWfLw+Q8wTJVQYgA4+QJpplGtCh2v0CJnjOktXuIYO2LfowE5J6GVewtdFwLa805xHAskT5Nl2JRzVbrt63iBCBQ/+PBDFHWD0WxOyRZnK29BotpYsg16eLLknTe/yuH5c05la1ulniOOJsCcRJI3J35KrMCOBJi4OJ2wuVFj6/4N5EdAy2EZFjmtjVUWezoaZWxtt3FXEpzHatBYu0fBUvjGu6LV9OX++4xnY2anEV1Jrh5FMeVagfOe8KdKThG74XByMCCRoybD6SFhWqLdrRD6Qiecn51QLKvsHws5qFabGPqSaxtfZjwXftD5kcbv/tPv8OjRE7odscZnJ4/wZzp3XxMze5VcwnK5JJer482FPF7MjhiNYt54SwT6UTKkUjMZXXgU5GD9ydE+7iKl2dZJUrFXjx5/Ss7YvkpMKWlKEMF4fMxsKuQljHwef/6EStumL9vBo7mNvlEWEMBAq1nEna8o5jLqVeFzzSZDHNNCURROerL9tH2LMB7QPxTn8YezJaZZoFRrEAzEu5RzZc4HPRQ9oyDiUvoXB1y7vkGWief80Y9+SsFp0e7WSCUf33H/AMuyaHebJBLpcLm4nPX8j1+/EsFVkgRol2zX2RZGPiSfn6CkIhsSRwZba2+yWLhokpz3vffe4+XBc+zLTPbFhOlsyHgy5tbNu/LOKz748Ae4M5+8nM0oFFS++PSAVkcYVNsqQ6ZxPniKKisPT55+xsVkj1a7QijJMVXFwndD1rqi53M5MyiVCgwGPV5/XQA+HJ085fnLT2hUdtAi8Vy7N3ZYTGwy+XezmeOk9wRvdY4jYg90C8pVjZOzpzi2JGCcW3hLHwvx3IZRZ9Cf0PcfsLkj/udODPxZgffeW+PopTh4sTLCMmoMh8LRmI4TNjs2dt6/YptfrnoMBkNsQ0LkXnsPb9kjy3QcSXefKnNyZYM0ja6yYN12i521txjK/monV6DT2mY4HjGbCE1TK1fw3TmDwRkKwgGrtYt8+PGPsGTvf7XiYBgrlDRkFYgsx8L3UFWVw+OXtCQ539p6ASUOef6+yCjcvflNfvTpn7G1sQ5lX+5wn6PeU27tfpWTU3E4W40ElRW3rwsHOonh8ZOf4zhNtjZFNaSQK/JycYipt7m+IwfKVxGmUcCWg/+j+XOcwiaWbbK5KYKIfD7PrVuv8ZOf/JS1TWHEbbvO6eBDtFTs8Ws3XsNb6aRBkdOlONSuO6HZTAmX5tX8jZYL0WyTWCrNxXwFZkC+oLPwRECp6Brj6ZRS1bwahG81KihYDHoShtVIaLeqoKTouUt40Aaed04YRliWJGo2cyhJC9cVGbDmustG9+v0l+/j+eI509gkinMQBiw9YSiKZoRpCHZ1gCgOMJIxjh5zfCJko1SuUE4M3HkCkrx54Q4ol69TFkeN3kUfd27h6wOKRfHP8WhKnMZYljDWB4cnbG9v0z90cXSh7EqOwWZzh1W4JInEGtRqBpZ2m/klAmYYoWvwta+8xyeHIrlyciCM7XJ8zIVEkvu1d36Dwfkhc0/I62z2kBv3KvziI4vwMtjRMjJCNM3Ec4WD+bu//01GpyX+zz8RwXmz08PUi6x+ckBODqfl7QybMWZJwSwKGcpSlXy7yYuhhOhlSTwPUZYxSXKJjqbghgtMLSaVFYTnmk1Js64cwGA1wXYMPDe6gro2tCJh4qGbJooMwkqFNo5ZuoKZdfI2KRaqYkEg/hdpI9xkSv+wh5IKA2HrKnm7jKIK/WPaGWFsYjkZiay0xJOYWFuhKClya0i1HvumSrUkZCyfdzAMg0qpjCIR2syCzla1i6ptYTvif7r2FbJ4eWVA/SBg5k6YTYaMh8KZ++LggNBVCFMfP7ysmlgUyy2qXXEeNzebrK9vUqtVaK0JZ85xmtxQvk6tFpKqwrFPVgHeYslKnqHl9AvG/RU37uWYyrmIxXiKF+l4IyE39fIaz59OGF30yEnwGtU0UTOTIIpJ5b1i1cDI11FUWVHIwFLz5As5Vr4I7gbD/5+994ixbUvv+347p5NT5XjzC/e9fpnNZje7RVKyTJOyYAO2bBiC4ZGnHhgGbEAeCLIHsgUYhgHDnsgTwXKSSJGUxG4+dVB3v/zuu7lu1a1cdepCULm9AAAgAElEQVTkuPPeHqxVp2FAnGlAA3fN6uCcXWuv9a1vfen//wJMq06chOimJBuadSjVXHTZTqTb62Hba+haii+zjIaiY9opveERrbrI1sVxim2XiTNJ+JCn+NEYRfc4OBBnNs4MktRA0TU0Gd2ORkMqFQXHluyEp11su0YU+ws2xtnUZnm5QbURcCXbcoBNhr+oRDANjZk6o97YoNUS53g83idODEppGV3e5d14wOpynZUNGTx62SXxA1yvQSDxohkafhizc3MHRRefKbonHFCJx7N1hURVGPam/FvfF5lq2455+n/6mGWPSOKSLM3BT3ySRMi0quXUGi3m8xGGdd1EWEMhIkpCSiu/YuLLsyYjSZH/H/+N7+E6OrPZDMuR2ClFIYwigtmcrQ2RJfqDP/zH/NM/+mNWNzakbLr0Li1Oj4eUa0InpDioagRqiqYLnW6ZJlqU8fq3hAz3L89Ikoi1+l2ux2B0gaMvkylC7kfjDnff+pDpaMIb94UD/fWDL5hHSzhmkzASOu/ZswFOJcCUrKbp2ZxiocKdm+t8+kthZK+sVPGDKbPxCZeH4p1rlU2ipMvec6HPLdtCN6A/6PL2m8Ih2V39PrVyi2iuMM7Es/aeR5ydnVCQGe+rzinV2hL+tAeK0Hn9SQff11mvr1Ktinkd7Z+TK6BIp+nhs4+plJsoub1oe7BU32A83+f45SXFilA4s2CEVXRRM2GrmU7OfBSy/+Qlm9vCQfgrv93ik88/Y6l6A7cqzn+hoJB3DPyeWPP337e4OD2k/eKUknTwLCPlxo1bhMGczoUw2IuehuYolKri/d74wRsYao3ReMD+yY8Awba8ubKDZ+REvnCSsnyP/YM22+siuzWOZnz99AFqkvPyRJzbcZ5wfnVOoaWQOcLBO21DbXmbZy9FG6K/8p2/zi+/eUi52qAlccqzmcnGxjZfPvghzboMpujndNsXC9KyO7vfQSHg8OBogSWcBx3OTlr48ZhZV9zRq9UWemPEQFLbu2UPr6JyeHhJuXndnmGFJO9x2t6X0lnl4PAM1y4yC0Rwc32nxaj3gmjaYm1JnNEXB8+JtVNcS1Z2JDnt/a+wtJz1DWFzvb/9NkeHjzAVA9l5gfXVImHqM5EMhurFnNkk4+7Nd+lMBet2GCZ8+9ff57Ovv+TOushALa0XeL6XL2wEq2CRJip+2FtUbR2fP8ewq9y+0yKRDcItp8zTry5wLKGn7t55kyAZkeQzVlfFeh4fdpnNQ2xHYSBJtIoFadj8OeMV5urVeDVejVfj1Xg1Xo1X49V4NV6NV+Nfw/gLkbnK8owryZy3tLqCkhYY93MmMxFBVPQMLB8lCkklPePRWY6iZfTGAgOVxQVsN6Xb0/BjkRq9d+8Ow/QMzfpVSj4MFX79O9/nySOBq1nfXAE1pFxs0b4SUfDz8x5pHrD/8hnLki5VS1oUygYzSfdZrlVw7SZ6fsmoLSL6u603SefQrCpcyFrtL746ZWN9Cx0R2X38zT71NRUnc7nqigxQwWoQxSGq4pNK6uVioU7NDnm4/2MAVpdvYdSWSJVLFJmCSsKM33j3dzAMjaFsGqhaCu12m0pZRGiW6st0+y/pnKncviUiCJ1Bm5WlXQxX+NZhPmWcdQGDgi4iPdPRmOEox7ULbK2JiJBjVphNEl67IXraHJ1cMpq9oFFcW/TVMs0Mt1AGJSUMRRT1xd4xcaizuiM8/d4g5Fvvv8PR0RHVuojQ+qOEarXO1nqRyUTssVNaZj5/i1QT0bsf/uQf8vqbN9GoU1wREZNc0XC8EmiwJqNEw2EP0xqShiK61emecePmR+i6ylVb/O7w5S+4eeMuBbtKIvtTdYffMJiPePP1D8RadgOiMCXN5uSJKG07PXtEo9FgbW2XIJCNDNdtqvEd/JmIrp2djqlUmkzD3qJUaDbRsE0XChPmmZjDRnWLZwfPUXRJt++tUanc5fJiTLmhyWd9ha3toMUaYSgiOeurGsNhyMaakBXfb+NPCujFNZJcsCE1aiWOj7ssLS8zlKW0ZiEgvUrwZbQpPTc4mH1CfTmj7K7KNTihUjIhK1DyRKYjDWPUwgxbUpU3tuu02xGGHVPQRKbz4PAxppVTLDXJMiGLSTZhnj5Dn4mslK67HF98SZblrLVEtGkwO2e1sMVsKuRHzz0Cf0KUv+DlS/Fsz3F5cvoNaZQSyvKeMKphF3q0NkXUcTwN8MMBnz37MzLZZLvlyoi5blAsyuaj4+dUlzaJ5XNcpUy1UeQf/G9PUCSWIQozFC1HUyxMR4TT/t7f+wlescDdeyIyqRomjlPFtAqL0rZESYiiCD+KGfckrWwyReso6PLMapqBpeZouivLW8CxE1btOuQ6ynWrgHCKkipkmZA7VQNFEf2A4lRE3FAUMsXDwFxkFaJoThrG+BKvOupBmuZoqo5nivPgFgu4ukupXsSQGS5dV0mzGNOU2a04QM8UlEwlln2LgjwgnquoquiDBKApNlGuMpCyOY4SPNuhO5qjXWOQdBVLBdM0cSWbpaYZmJq+KEsslcqsLu9w+9Zri3fJ85z5fE4QBIsS1el0ymAwYCxZKh9+sc9Xn/ycLFZQJBbGMh1arRblcpmqjFyvb2zQaNVpLIkzs7H9Ebt3TAxDI45/XSwnGb7vE0q65na7w0ff6XH/nSmBbB4/HPVJBjqDyVOsstDpg/0rhqfHaK5Yc8WKyaMWcTohNcXdoMT3qRklYkVBlXTXtagARo+KzFh4lkqaJySoqKakg1cidF1ndbPKZCrmlaYGhhqTSzDcOBhjKhpbazZaUaydH81wjAQSn0gVOja1DCw9pyIblE/8hFAN8fDIZCnPPIpoqnOOHk/xh/I8mHOsfJlEl6VSmcF8MOXO1jt0xqKsbDAfYbibhEmIWpAZvamGpcF4JO7j8dAhMhOirE8aShk2FNQ8IhgOmcvMqp9oWE5CKrOaiaKRxxEGEW4uztHJ6YDMcLE0FV3eWQo5SaxQkv2cshQur2JcxyBNZWNhZYqmuqTRjDstUZ61+9o2+8c/4nsf/C4AzeYGZxd9Sp6LJpuyZiRk0ZSS0eLxU6Fj/9u/+/fYvvU6I1n+Xi1Av/9LotgAmTlSsggtB9X2KBausVpjXEWn6gn9NHe6OIqOU/9V3xxD06nWK0TyrH974/cp2Ca//OVP2dkV92+zdoNavcLe3h4rKyKDUa+F9HtDMkkBXiqtcXjQodpUKclm1YPekM3NbQbdmN0bkjnWsJmFDifnoqTqxuoGKytLzCZvYxli3z/57FNK3jovjn9EILPZhZLCytIdiaMDPzwnOQ8pl6ucX4l1qlS2aFRrjIcxXdlEfHNzA9tTeLEn7opisUl3NGdzc4lMUne3R0PalzMsM6PXF3PY3NplNBqRSNsiTA0m02NMZYsY8axOW8Uzm/SnHUJZforikmohklya+TyiOxizducOLUmlf3J8Tq93zupGhfqSuA9fv/chxyf7GBJPdPD4EKc4YjLoUK3IhvZDHS2POJsdUJMMyTdufsjZ5TOGkuJcyR2cqIhbHbMvISppAu3RiIpSZ+SLczvz50RnD1E18fePfvYx82mOWynRRGTPsvmE0fCcomPRl3htt+GwU3oHyxB649NP/ymb69vsbNxgNJV92XojHj38mpXVdU6uOlKGPErWCl5V6O/R+JLOQcBr39rh+FzixQIVpWhwKtm0m2WTglshSANS2XD9Tn2TB+MrFMsikAyUrmpQLTcYSNmwSw127tznix//jNUVca5QFXJVod0Z0KhL1r9gQDTTcTyhq9Ooj+uoPHjyJ0SyAkwxTPaOT0iyHEOWn3euTrFtGwriruh22zQby5ydDPFK0j42HUb+Sx7tny4yxc8fnoIRkk1FxcRl95JqTcUPZpyeCl1SrZc5Ojyk1wvZlPwN497/D8oCNU3DcoXCf/jkmGatyuryGv6ZmLzuRBxffkrJ2UBWmpBEEEcu77zx1wD46Y8/Z2W5SL42o1rdBkDNLDxniUF0tKBQ1mhRrbR46y3pbPlFrvrfoGQx6jXYueUyHe+y3DSpVIVxc97u49krDNqiljkNzunFLm99620+/VI4aqExZal+j4P9X7K+IcoHg3CEbRfwp0IA7txZ5vLqis7gGTdWRA8dRfMx9JxgrpFLEO/mdpVO9xJb0srbygobu6u82M95943fBuDo5CmWrdBo1tg/FdSWtmkymyj0O+IgmJZKq7GDZo+JZPnZB+99j8dP9sgklW9uXhEHPo5pk8teG1GUsLxeIBh7GPKi2N25xTcPv8CR5AB37+3w+VdXDCf7VGStvz8xMMo6rlNGkc3cnHqHRnmNTz4VSuXWrXt0OgMazSp3bosSzpyEb75+SZyEhJLy18tKREmPSBFO74cfvkuhYDIcdhn0xQHK1TEaAYdHf4aq1H+1dhcK3a6gLt2+YXN8fEEYzbh3R8hGluxwdnrJvTsNBmOhqNudQ3RdZy7L4VqVO1z1zohDhYLEQPhTnY7ykvfe+x3+yR/8KQBx2qZWuE+mCSPC8FISVEbTE2xHyI9bGRIlVQx9BcuSZYDRAStLLS4vhYJSgoy1JY+yt86zg5+JuW+8zXLzIy4vprzsipK0p4+mfP/7H/LiuSzlw6JSMVGsGZYq9sE0Q+7c2qXXD4hlmcyXXzzg/W+9xXwi9j3OY0bhZ0T+TcaJLLvwQzRtjOclWJKa1FBbnF28IJAYj/K8Rph08AcKQ1mWV6sUMbQSV+1zLPeathpWlnd5sSf2wfVs3nzjHT755DOiVFLkbrSYDDNkbIC37i+TpT7zmUa1KC4v2zYhMznrfYlri/VULJ880ei1xUVcrS7TOe/QbG0wlg0mlVhcUMWqysWlBC0PIMs/Z3uzKvd8xNLSNts3qhxL2vVKpYbvm2RKRi4paw1DJ5iHTEbCMEbR0DRhVBUrQuEXCgW8QplGsYYpsaCaZqAoOWksDOMg8JnFClPfZyyxd0mSkKYT8jzF8zz5/wwK1QKeLfRUwXOxLQNdgUQCZHzfJwxDhrOAJJQljUYJJU6wnV85NqZlEEUBKWJfpklIOEkhS0hiIYs5MVpuLPA1mpnj2CUMw0KResL2inhFgziNFjS5wdwnn6dkyFJXRWeoGhiahuuK37lFF7wiGjaJcl1mpaLYFpb8Tm4pzJM544vxoo9QFEX4fiAxreIz0yqwvb2EZYvzaOoGuqGRJ/Gi7DlNY3x/xng25bIrAglP954ymUzIc1mumYp7p9VoLQJotbUKy8vLNCRhz3Jrhbt3XkPTFWGpI8hO/DQlCmL6XSEvS7U5k+GEuQR8z2cz+l2fVF3Fk8Q7vasOo+GMzFAwHPGs+TihVE7YvSHO2T/5v0YYaQHHsgkCefaUFK8Elhrz8kr8P9UGXbfJJBbOUl2yJGB3uUH/ugzZjzBcExIF0xb6JZxc8fZru1Qlljnxz1E1iywxSBemQIiDR80rEM8lmUPeRM3GqLnYg5gpZW+Zhw//FK8h6ZPDEpaV4+o6oexBpGsueZxwa0dgNb7+6gWa6qIpKpkqe5LpMXmicPvWNop6vNh3zVBJc2m85AppAtVakTXZw+50MELTDJIkwTTE3NMsZupPeestcWdGqc9oPMc0iqS6cDAVTQclR80cnIIQmI9/+lM2tm9yU+Kw2xdtvIpHriQkWSCFKkTRInJL4z/7L/5zAFY3S+hqTBoLXVKquLTbl0RxDoYk44gzlNxANTJyWc6bJQZLywX8UDhAttmkbN8lic+4Hp7XYjA6pywxJqZt8s2jh7jeGr2BkGlN9xj2MiqVCn4kG7WXa4S+Sibft9e/IpxbqFqBiifuhuXdGv3OgM8/+5zv/+a/A8BgfMjZSYApST7OO18x6mmsLt1lPBZ6Loxi7JVzssThu+/+hvgsHTCb+qCLc2VUHaYzn0KhgeOJANp42ubk4BFvvfkdLttiPU2nxDePPkOT+mbiRxjmjKuLgPlcfKdc0lldL3Fx0cYSapGrCx+v6JLoUveWtlDVDVQVLgfinol8BU3TODk6ZXNT6N3zvUuWlpbxPBF0Pjw+A91la+cmP/vZPwagUrOYTkMmgxqNuvDChsMuuq4SJeKeuXXvTbrjS27deI/LriR4iI8pWFVKNaiXxXk/Pj0jDBVsCfNoty/Y3F1HMVz290UZ8tJSk1vFOxh6gaJ0tHv9M/zwhCAS+3d8eESlcIdWwSWRJZTpUMGxaty/02TYEXNoxxMKXkjiC5n+ze/+Hu2rQx7sPeDeDQGFeP31ZV48f0T79IgbN0XAfDK9xFIreJ7QebW6SZLP8Up1bhXF3nz98338uc+m7Ms6H3YADx2LG6vCbn++/4JcK1HwTKZz2fDdKTIOx+SxuDMrTovp5YjtezeZBWL/LgdwOexTL29QrguH1p+dY3oq+yfCpnXmm2ztNAiTjIsTscfNlSpPngwhn1P0pI2lGLSvTtlYEuRRyzWHy84T+j0NUzpqxDnVWgVLK7C3L/RNo+7x4sWQjRvyvrIdyFMUXPxA6DdDMzF0l9bmBpZs61BYl6CtP2f8hXCuVNUkkkKRRDOiROeqMyCKhTK3vTLx3GPg++SSGahaavLowSU78gW3N2/imtC8UcWSdf29/hXraxU8N8eUEag0jTm72KffE8poMDrHq10SjV67xgfT6w2oVlo0l4pomRC6N18v8PWTf8n7H4mO21loYpsaz558yc1dcfKT+IKLwZBmc4mLMxmdbJistnZ5cCFqRdudl7SWNmnUf5P02qCdjWjUNtHyKYNIRPDnU5/hoE2hdF0XmhKnI6rVMu2eOJyHp99gmibzcJmKKwwEXTWpb7fwZdZvOg9JJw0+fPd19iUT4f7hM8oVj0waGpdXKtvb7zKdzrnqHornaA62vkpupkxCIdC/+PKIwPc5+Zn4zvJKlVs3t9g/yKnVxUHsd2Mm0wGVaoGnL74C4L33XiPBX4BSj0+/4Nsf/VUqlQoPvhTPtl2FTm+fWrNMqyD6TL08esD2dg1XEZdCwdpCzX0if069IQyE+RgcZZmV3U0GY/Gsvad7rK1u070Siu7lwRVBOqLiLdG5bs5bdghjn2dP91nbEspgc/01at57fPP0nwGwu1Pk/KxHkiR8+J7AOzSaPk/3D8j4IffvCzadk8tPGPknZJlQwGGQsrFeo1zcptu5zr4qeEWF3JpSloQg4+k5hYJOtSJZcZZjjs5esrGyjD8Sl4C1WuJk/4x6dZPXVv8yAEF1TPtywEDi3JqrDqfXYNBQOBHheYmy69MfjFlfF/O8sfUtvvj5x/zWb30bgJ99+SPcYhk/GOFK4G+WgWF5pIlCKJuUuo6B45XonwmnJc99LLtAplxw+7bYq3HHYDA8pNkyuQ4WjsczlNylVhUG0WX/U+yezr1b38ZyxbN73T2iQGX3plCsE3+Aruasrr5LqyUMi4uLM2bpJZXlIoEviRtUGPZ91jZlc1c7I1HndAZ9VlsCAzGSDQ6DKAVp1Duuz2hkcXwgzuc0PeeDD36P+/d3eP5cvF/iCYxFliWo2rUCTVC1BEP2KDJ0izzPSdKAcVdcFOPuBVGakKOi6rLXj25g2S5lmeUoFEqYrkuz5iz6nWiqi+u66FrGWBLKDAcz2gfni0xWHMfkeY5t21jSWNZMA8sy8LwiBYkTtG0T0mzRWHU+n5PEKXmmosiov6HmuBUL27SIImnw5TlpFpFIlPYkmDAYDMniDE06mLqqomFgmxa2ZPQsFktYjolpXmOpRJbOUDWiSMzB9yfMZhPaSbLIStmmjmmaOI7Eq3kuhUIBXdcxZBNawzCwpXOZSufN93367f6CEEHMoUixWFxgOl3XY21zk1uuS0FG6/M8xzLNRQZs0h8yGY64vDijL/fv4JM9RoMBiXx2kmSYtku93lqQx2ysbtBYc1ldXaVQEud25+4ylmOiSFnxwxmOpxKMQvpdmd3+G3Om0zFTv8+wK9ZlNpoyGR3y2adiTkFQYx4PSGYB/vy6Yj9AVyKcygr7P5QGpbdGEE1QNEm2oKjEakKxXudK4hSy6DlpNCInpSBl384hDga83BeyEgYpBadG4A8pSxKR/ukpG9/6Ljc2GyiKjFynAWrZI/ZlpUM2xXM03r63wvMjCRxOY0wiwshasEtmyRzHVXn2TAQke70IXVtGUfVFIDOKx6iKR5Z1OTk4BESUXzcUMtl3TtVz0kgjzUImgTgfg3GMomhkeUKSyI7Leo5l6Ezm4hwfHWdM5imqbqBLByxLMrI0Q1dz1jbE/oVRifvvrqLp1wyGJhkKURIQT6QzoFk0mxX+1n/1X+NI/EbJKzGeR8TKQO5DHY0t0I9EmhlI8wRDtzF141dnOe2xtnmDKJJEIcaQvYNPeeP1txcyfXL5c0rFHbK+mPeLZ59RrVqYpsGgJ/EymzXOz14QBAmlinTi+5dMxyqbW+LOfHb+jPVNl3E/X/RFbNXqaHrCzq0yh2ciYGdomxjeGYkMmnhOg2atgOlcovoiyBXHx0SJICNq1kXA9etHewSRz/KSyNiUK1U+/frPmE4LrK+Lz84O96h4Fhen31CUjIyJP2VrtUUQijXvd2dUSsvEEYQSQ6cqy8xnM1QtJpyLtXK9LldXsLsjMDtKVmB1tcUXX/0UryCDDZaFYZRxqlOKJXGHGHqDNAnpjgXG2y4rDHs+3cEZ5YZwEGbzS9bW3+C9d77HcCyybvsHzykXqkxnQqin0wTdNOgODri8Eme7WrVZqbyOWvDpyIql1kqdf/ZPH/GDvyTuWrc0ZDyNcb0ypbLMyCQVDMPEsiO+/FoEaivFErs3tkhDIZtl95Lzk4A7O7f5w49/CsC333gXjDlXVzEVV7LsTgOmuYYi+9pOJ3NU3eP1N26wf/ALAHaWPsQx1lm/41CTvS4tK6dZr3B6dijWXFexCyUm05Aklfhbz0f1PWxN/K/aSovu8Clbm/e4OhHyUqg6JL0hZsGhZor3q9bWmeUDamWxdqfPL8FoUCzYzEPJfBgrLLXKXLw8RpOTzzSfZBwQpNK+aXg8P/qalaVbmLrEV5ou435Is7ZBZUnco1fnc5abtxe8BS+edrlz97dZXenx008E8+k7b3xE4Pc57wy4e1c40NFcZXVD4IwBnEzHdTzqVQfknbm6tMzFRYfpxCeRjJBZds1o8q8efyGcKwUVVVKXbq29RphckObdhddvqiWCVKVgF6g3RKO4MICdHegPhCJ9650dfvHzz2nd3eLyUmzceKIQzEMOXw64+5qIZk2mp4yzKVGUyb+73Lz1IVcnBQxdRDk+eH8X35+jaVOODoXBXulpvH3rB4umoqY9YK4UKXorXF2Kg7jUuMfrd19nZ+Md/o9/9D8CECddPv/059TrQkE1W7tEiUocBcTSAEoTnckwIIxiliQLz1XnDNd1uPfmtvj7TOfs/CW2o/HyUIItazqm4dDrQa0hnMDxaIbrNLE1oVRqKzaaUuLg8GtKEmQ771ss7azSk92uy0Ub8hlx3GVtV3ynd+FyfPYQTVPYuiEU4tlhRLXlsmYKZTQdTzk+7ODZVZRYzNuzY3rjB8RxRGNJXOqnZ102N27QkUbMb3zvHWp1g09++QWuKy6FbmeGbkCtskZPNijUtBGutb0ocfzpTz7mgw8+4PnBAyJJf39ztwBJgyC+YjKUNKv6NlnikCGc0Giu8Nf+7b/JP/wHf8zOHUmXHhlUizd49HAfR0ZtgpmPsXxOLstRxqOQ5dYKr73+Jof74mIq18o45x6lwhIPHwnn8f7bd3n05Au2t0Q0yLXrBOEpYWCgylT+5vL7dIenjMaDRRTcdctMgwsKJVkeEjpkmcb+4TN2bwtFVigUefLsY7ZuWuztidLZNLVQ0zKVipinXQgoJCZzXyGSJXklu8I4mDKZ+Tx+LEDK3/leA7ug8eSZMJoq9RrnvVP0RCeUxsdS5S62rnHVuVyUISpVnyjRuPmGMMSVxCGerbC8dpcslIEF7wLT3CZLYeqLC2xlvcTLw6c0GmL/GsWPGI1O0IgpZcIQda1Nri4OsW3JWicdB8/WiFXxLroLWT9kHl1RK4jz3253cV2YjcScRp0Jb9x7m+k8ZzIRZ9SShnu9usZ4LCJ8ll6h2RgtGL+09HUeP/yS1pqCIbMMqDPUzCHXM3IZADEsSKIIVZOZD3ySNEFRwbwukaOAQ4aiZYvIfJrOSMIpnQsxp3aWE4UZKBnaNXmE6VLwiuzsbrK9KUoOdprrJI7FXJaoTScBSZKTpArT6TWD6ZzZbEb7ZMT1rep6BuWyh6KKeTueS7nsoKCS59eZshwSlTyHWJEoYnKSTCNOJAmN3SAzU1QNVPmsPFdQMcgzlUg2Tu8OxsSdkCSTBAyaoKp2HWeRhXNdl3rDwbKsRdmhbTokSUIcy2zadMrwvMN0NCOVpVC6ruE4FoqaL/REpVKiWipj2eri2aoqMkq2/Mw0YTTuctUJMY4kMYyiUCpV0K1rZkeLtdVttjZvLmQuVURJYyBLeQeDAcPJmNFowumRkOkXL7/k869GRFHEZCwMizgJKZXWWF4WAYL1rRK1ygrFxhKry0Ivrm3XsB0HzYRMZuGULGc6GTG4EPv5+783ZjQeMhoNGE/Es3v9C/JUJ5kaxJGYw3zUhzyAROxdFJnkwZz7t+5y8JUIGs5mE2pLG0C2AG4XSkV+/Qe/S7cn7kzf+hhFC7AdgzgW+2cpReoNHbeoc9ER589wt0iDCM8S98Bs6NGsNcnCEedn4lyFahk7z8jyCEeSiMz9Hq+9+TrzuQj8zGcpumkQJTGplEVdqZHE3+AHl2iSjhplgIKJqoozmqYpmmZhWClH56I6IEpcyFU0jYXzr2gmWZLRrMlsb0Eh9GcoigbSEDIMg1xVULU5l11hqLn2hxSrBgVXX8h5ppighmiSVn65uczf/tt/h9FwTqtZlWusYboWmSQWun3P4fDoGeQFclkpoygZtlPCMvJF0MAyqlOPs1cAACAASURBVJimSmNZ7N/pC43NnQqd3iHXo16rkPiVxb1eLGUMegH1yjZpLKoFzk7Oee3+Gj/+0ZNFwEXRInIlo9sRcrC6vE7gtzm5POT+GyIwPJqOMEwNt+guWppMp0OyxMKVBGHBLEBTMoKpxtWVCMqWSiXCmUKa+JwOBeFCf3bA+touF5dCDo6OXmKgUy2YHO2JfV9bvYdhRcyCNgW5xWHkMwnGxKFY80ZrFUPPMDSNSOqNy+4FZBGmk5JEUoZbZabTHpOp2LvVFY9u/4Qw8XFkJrdU8zg5eUm9fIPRQKxxvbpBlJwtWnAUtQrjSZ9uT8VzxaRGwykHF09Zam3wg+/8hwD84DtlRuMnfP6lcGwevfhTnj7p8a2796hKEg/TmGJaPqmikKfiLjdMWN+uEkiSpELZYjKIiJI2eX5d/pmQJwUcbYl3vyUjEolHyYOzkWzSvNrCdoYMkoj3PhIO5WzQJ54O6IcTdHtb7Fdvyta9+8xVoSP2nz0gTSIs/Sa5rJr4yY8/4bf+8vfodOcEMqg2CxOCi0tKFTHvfr+PoqeMJjMmI+FsrO/eZdw5XlQZjGc9KpU3cG2d52cicNLUmvhBTp4MyFLxfsvldbTQ4uBSvMv2TpVoqtAbj1Ekh+l8OqBYdrjSjgh9GZC0HCynhJOKoIUftbFsg06vy+a2eN+9fZHVKpc3CEJxHoplF12xCa6hO1WPi4sLlMIxlaJ4P8c1GPVmqJrGyanQb2enzygX17hVFYy3kdbj9LBNro4I5+Kdh8sTUDJarQaabOtwfixLm/+c8YrQ4tV4NV6NV+PVeDVejVfj1Xg1Xo1X41/D+AuRuYqThHEsqD2Xq1vY6hqaOVsQBDzff8HquoblNXAdEQlslFtEwWNUVYKYjQqV4jYnJyesbYjMShDkVMoeO7cKBL5M4aUV3NIYPRRe+Z3aXfyxjWZdUK+Jcq2Ly2N2b2xw9HJKbyTK+Rqr32Pv6JJmQ9JaKyaN0gpHx3vcvfN9AF4e9Kg1dP7vP/yf2NkWTeAUYtrzQ6664v3mc4VKdYUwHWLqYp5pZFNoaPi9K6ZTEQ2ZjBRyIjyJ9UljnVZzlU6nvQBlT6djeu2EQiHn4LnEguUJCneoSdzA0+efkec9GoVdipJUo7pZ5eTiOaYu/leSjDk6OqK2VGA4EFE4P7nC1krM/Cv2ngsMUr18F9NOSSNJnxyoqCrM/C4vD0Q6fHl1jTixyNIiriMiuS8Ovsb2Mv7KX/43ATg77fLPj/4E162hyQaeXhFm0yUMzSaVNbr10iqnJ4eLmvr3P7rB2dkhqlZE9US0IlPqOM4yh0cv2doUmaPDTx5Qr77Ha3d+DYDL9in7j2FpuYUmI/VhrBIEGd/9jd9c4LdiZsz9ActNkT24efsWh0d7ZHGFbRHU4MmjQ9Y2KoRhiuXI8p7whLXWu0xGYp3yvItq+nSGpzRlD6vji68peE3C0ESTEVnLbmCZu4xl9sV1YT7rohs2/aGIzOXqMqvbmzx4+hjHEVHGLB5iOjPmIxHpmfUEHXv3qk1tTUTmR+OYwfiCm9u3iafis0cPn9MbJzg1EWVcaRqMhut4jrIoI02TFLdY4tc+/Iinj0WUeOZ3qTeK+L278v1yeoNDjk7OuH9P9EWZTiJyZUypVOHu8kcAfPXg59y6tb0o/fL9Pq36GmHkE8UC5JqEIYo2JY5Edsu0IgxljcPjl2xrK/IsWAS+y8r6KtFMyF6luEF39IhORzxnbeU2Dx8esLKxy1CCn7tXYv4vT15Qqomo+/HJHu+/+13OJG3/cHzB1B9y/+03qTcFJjDKZmRZhmaoKIbQE2EYouYqqgQ2x1GOplkoZEQyw2cYPqBCppJLmmxVNbB1ffGcNI0pFBRAW2R/sjxmNLzi019c8POfiAiproOuGmSI8+E4ZbxyDcsrYsvmhZVaFa9YpGinFAoi4hwlCvNZwlRm5oZDn+OjDqgKii7kNU9zVDTKRRfXEe/jOQa6bmBbQsaCQCUIp6DEKBKrFQRzclK0PCOXJU5Fx0W3Suj6rzJEkZ+QJTnhdeNfP6BzGhJF0SIrZTsmtmFSLIoodb1eZ7nVonSvtMhuZVlGmqZkWbLoJRiGIVE4J08lvbg/xXEcDMtmMpbEDckEUMnTbNH7DhSm83BRmpVkqcD2aBqeJ97ZtiwsXUM3rrNiNroBq+tN3nhDyL5tmqRYxHHITAKgZ9MrOpcTerIB62hwxuHDSzqDC4JQ1uwbGsViGVUpsrwq7pnl1SpbW2uUZFuQcstmaf01FDQyqSPiZM6o73N+fsr33xPf69wIGU1GjOT/P77oUauu8uOvXvKzR0IP67nGqNMhyxWM4XXrji6ff/FTjk7F+SdX8MwSUTDGkaX0c6OIVY54fnxOKEl88BSUNCWQJXm2DVHaZ231PuZD2c9NBXKLXA+uE/MYWoH9F8f87m8LPfzxTx4RZymaGnNdOzib+aIf3+ic4zNZWmq6+GGwyE5ahku7O8Nr5OiOeJeLqxmWWQE1JUvFHmeRhq1Z3NgVZcEnJ59im01UzSTLZXm9ZXM1OOfOzTdYXhJ6StFa5NqcTGYUbNslyQNiPMqSIODv/89/h2H/JbWlG0wTkflPsyJGHJJKbGejZvOLX3SxnAxFZgtVVAzVQc2mBLIFhpKZqOaQyUDYCP3RY2rV+1ycXXI9GtXbHPSecPRSyI/jWKAFXHWPsGTJ8a2dO+w9PmRleZsgEZlxXVOp1YqcnYmqlFazSa3cZNKMGclG3zdvLnN0dEASFRZZW82c0Gs/59u3/l0ApsER/kwlTmYL3TKdxJTLZRQ15qsHoldaqVhnNIjQZclYfU0nCquct49xZK+vLMlYWl9l78WI+UTicZjxfO8JFU9grpu1KocnX2LaObYu9L5t6hhem/nQ4bd/R9hYP/rhz6hX16g2RPYgUy6ZjENu39xmIrO9nUufN+7vsPfNFNsV98N0dkqjWQTZbN02CiyvZOSZziwUa1eum9i2xbP9n3HeFnfB7/8b/wErS7v84Lv/KQDf+7Xf55Ov/4yvPv0hFW8bgBcvzjhRf0p5aQXPE7LwYs9npbmDL/XwxD+lUXoHRU0W2Mxapc7J6RGVok6/LSsk1JiXey9YXRWyMZ/3IMsoZDF1qfczXSVXaxSCCcOpeOf6VpOJOkNPxPru7NYZXJl4RQW7L9bz7V8rctk7Bz3i6ydC1r713g4vD84o1YVe3LptcHJ8iho3GPTEPNV8m/XNEqMrYaeousmDJ5/wVvQ+t++LOX325XN2dm+T+kM0S1SXjMIRLbuGIltPRJmLms1Q1RK1JXHfZ3EXS63SWGst7m0rU5jkUxxDyF0SDtEtDctRqcjqq9cdYYse7D1mdVXYsaedY2yrgKZIiFHeZ7m5S5Yvc/um0CXz4IqVlRWe7x3iysqR5dUt6qW1RbXH1eWMOB3jGBWaK2LtLq4eY9s27csur70uMojlsvj9nzf+QjhXqqYsenYo7oDRoE3Ury9qIDVvTpKuCkKJnkzvN3PqtRa6IX731VcPSPOMOBtydCSEYDK/QDVr1Ks3qTfEBlxcHhDFGddQCsc2GU9OeeP19+lcisVtNgwM3aZYqHJ7W5BHlPMIr1jh7q5gGHqy94DO+Bmj4JCaLi7ere01PvniH/Hm2zuULWGN/9mP/jm2M6Ms+zvEsY8ftQnS9qIJWWupilt0KMU1+n2RqixUHU5OzunL1Oxy/QNOLi4w7ZiLC6HcC9Yq77y9Ra/Xxo8ltkDNCDMV1RTCmxt1dncb5NMGliIEcxaORH8vSXAR+aJ+N4jGNJbFcyxjjTQbY1gFIl/87nzaZfuWzXQmLvUkDfHnYy6vXrK6LpzeQiknz6uM5s+ZyKaea+sNulcx93aEwzebP6NUKbLUXKfgCQfz8OQzdm/e5fnzx+iIQ1Us5rRqawy7QkFarkHZ3SbLjti5KRygZ1/0ee/9mLp6iy8+Fw7m3dtvUW522XssLpNmfZezy0cYTsCwJw5ErWYTajEPHv1LyoU1uX93sA2P00uRcracjMCPOTp6wtauKOu8bB9TbRj0+k+4uS3qqceTNrP5ywVguXOh43omd2/vMhxKEHg+ZjqfU646kAnneDCas1TfoFwVCtLPjsm0gGqjSftKzP3g5EsKhQKqrqKrQoZGfozjxXhVsb69TkCpWqFQCcgiIeeNJR3Pe4f5dELBFc/XswrVpXDRaHAwtDGVJq2VbdrSYIijlKP9DgR9Co5kxRmPmU8STOkId7pDPNegtXxrUdpaLK5x0Rmwc2OJXGIl7969Tbfbo1UWF2NH+1P8uY6qaUSyzENJC9Sr6yxLnFSnv8dgesba2gaDoVDupqlje2WyTMOXjTfj1MApbHH/bQFePTrdw6tWGfSn6J74TrkldEWuxHR7Yg1Mw2Zvb4+rvijltW0HW3eZJQ9Z3xGy8firOk5xTJoqqNIYVxWbPE2Zz6fydyYKOboOEs5JlEQoaDLgI9YgTRKSOF4Y9Xmek2Yp5BqaDAwZuoqSg+cU0TTZuygN0Cxj4aDkqc9scMbwKhbAOOA4F86aWihi2+LcFoolipUyzZYw4FfqRbbXVlBVlbmsx0+zGH86I4lhKvtq9YZTfH+GvJfQbANNU3BtG88RRlKtXqFe83AtjTAU+x7PEubhnFD2EbMsg0K9immaC/xWGAekiYaiKAuyCk3TREPWQDqBsyGD6QDz3FiUTzm2wKIZhrUIohm6i2PbpJm/eM5kMiPqDQhDqffHM4bDIVmSEKnXBrONouQYsoazUipSrnjUK5UFEYamaaiGSSTJQebBFMt2CMOQfk84LY5lg5ISxTG2ZD7MlBLLW6vceEOwuBlmhq7r2JbGbCbkbjjwmc8vubqYc3oodPzB0QFffPMxw/Z1aaaKrtjUW2U8T6x5sVSiWilSK6+hVURp+0ozYClVmEmw9W44YTIO+MnPr6gtCWP1d373JmHok5OSJpIpL1yn0/GZTcWeW6TMgzaxD/5MyOdwOCfVyxx3QkZTsceuZ6KYCo4ujZ1ZiFeY4aclLo/lRepHYMzJyclkw+U0nXPrzh0eSmbeOM3ASGRDXSGvMUOCMCFKY+bXJB7aMokSgizJS9OcOJmx1HKZ+eKd80w49EmeLBwENJdht0PRkw7D0rYol1USNBmkSMKInc1bfPj2e3SlLRHzkqJbx3Yl6Uw0R89sqq0K/8v/8N8D8J3vWQRqhYePRnhlWT6oQoaObkrSi8SgfZbhljXyVOxp0S2iajG3dpdAOsw//uEztjfu8ouf/1Ds39Zd7EK8KBMW8wyo11YXPYr6/TaqWqO61GTYF47Tm28ucdk2GU375JKBMo4tGvXWop+caaW8PHxOqVJHkay7Lw+fMugqtNZ7vHwp9H61dIO1lbs8ePpHALTqN4gjlTiJUGXDXseb8/jxU27euMN0JnFRuU4c5Atm4CAdEsxcNnbWGcuG3Zoy4LNf7LOxdRNDBjsePTri3Rt/k//o3/svAXhy+A9x7JiENj/+WMjLyso6btkhiUOePhbnr9GoisBcIu6LyTyg1SwTxxalwnVj2jn9cwvHGRGG4p3T0CIIJwy7Ys9//Tsf0tufMJ2ELMleRrPgArAplasUiuJ++vv/+3/DvVv3qcqyud3VdzHVNV577X06QwFz0K0K9WqFi8HlQg8GgYKiRliy3L1eb7K//y9p1rcJQsnWy5d89O03mI0SXuyLMst66TbNZpMwkj0sVRvDnNKfzRj0xZxWS0VyBfonIW/eFnbJNy9OIBxy0pV9H2OV+kqZ7nhKJJV6qdSiffWc+WiM4wk7IUfFdV2mMjCVRQ4GDSxDY3VN/G7Sv8RU7rO1Ib4z8h1+7d//iB/9wZ9QrYr79/ZWjD9MOD+bEaVfAnDztTcYWlckYyE/ZjjhYjQhDxXmptiXteYKw26H6bBALtduY/UOodJDzaR+Zc75xSFBNmMwFNCIUs1DmUGaTtAzsTfbmxuct48X9mro51zEF7z91vsLO6Xb73Fy9gjLaFGpiGQDWUT78hhnJi5yy8nIEhfHMjm/EjZl0W4QR1AsG5SK4ndnZ79q+v2vGn8hnKs0TfAlYPDgoY9hRaTKcwxdOAh6VmE6ChjbU66uDgFBRBGGMa2WbNpGAkaCZXm05CXUvqhzevY5rlPk8UOBUzq6+AnVusutbdH4t2A2eP74DFP/hva5ECbdDOgPbPK0iGFLgdavUMoTkPTeeWTy8vJLvGrKwz2xAavLd7l/9/cYdB9z0PszAG691sDWXufx838BQL3p0rsy+Eu//df55tHHgGBHOj/1WVpaw9CuWWMucZ2KiB4DvcFLssTAsZrcvS3mMB97eHaZmTllsyUiHSghaaYQ+QIr5pkDtLRCnE3YuSU87qlv8unXf0xJYnaWl95A0V+Q5yDva5Ksi+8PcD2Fel1Ee3SlxdVpn401caCPp8fk+ph33n8dQxXG3GgQYDopmtKi3xYKd2UL6k2Th09EtOu9Dz7g8ePHnF58TcGRNcosc3XRp1pxaDbFZ0QucTyhXhYZsLOzM27cMmjWSpw8F5dCve5QK1UI5x5/9bf+JgBPnj/k0YNTdEkTPhxP0Mwpy7W3OZ9LIpOOuBxu3FwnkZm45wf/gvtvvc5kJiJZpycFLtsnbO34fPONeJdZ0Gar8C7x3GUgmcgMo0qW9PFcyUjjBvjzlPOjnFkq5jmfh+xuN7CsnO6VWOQknWJoEy7aYq+cWkqzvsann+yxtCbxVHYB1y4xG46IEuHULq/UGQwvSKUTo1XGtCfnOG59gf+5Or9gONrjvXfe5eRE1GErisfG2grl0jW4W0NJVfIkZVfWMj971ub27h1mfpfLC8E4efPmTU7PjmlUZaS1H5CmU5S8yswXDpDt5hQ9jU7ngMGVODPbW7fY3Wry+MUfyXWyMTSTzY2bXLYFHs5zbaazHs8l+YmmaWxsrlAtbnB0Tf3aKuLpFboX5yzLrG1o9Gifx5QcIYum9ZCb27scHp0wD4ROsKwl4EtOLjpUJGnIaHLFxO9jGDIKCOSpy8GzDoWSOGtpmqCpVXJicmQjQ8UCDe6+JoDilxentE+H5JlBjsQb2QL/Y5g50j9A1000W///OFfkMWmakqaSUSzP0VWFNAsWZAqqqhLOsoVTga6i6GDZ5q8cFCUXGZ2ExcU06AxonwUcPJPZZT+ETMM0bTzZ9NAtVEQmrFJaED60WgUsq4VhCqNeSROSNGIy9pmMxRqc96dcnccUCi7VsljjilOi1GiSyybGQTDHMAyKxQJeUTxbNxUUUvr9PkOZkU3TFNNxFlgqUxMEF0kcLrJUea4wGg8ZDofMZ+L9wjBk5s8hu26ubmBZFqVSiYokZWgtNbl5c0lE++X6+b7PbOYviDDSNCWb+fiAloj9myhTyjI6LJ6tEUc+GSnFgswa5xPURENXcnJ5YSdZTpAETCVOQlNdktRHzRwMU7LU6Qam5rK1Weftt0STTcPSyRRIfPFu88mcuT+kO+wwGIhnn58OePT8Id32x+SSTjxOpviRjqaLDF+54lGqFmk2lhfYxdxTaVSWQEmwPfE7z3HJA5vNqjD8l2/EzGZTht0+oWyqPZmPCcIi3Y6CLmU4mV8xHSkYhtBb496MbDnh4dk/I5DEFGgucZ7gKDaexLVFRgbqdBGQcJwiigEGDtk1+2JqUVvSuHXzJr2+kLOz3gzHqWBer12mU/A0ajWbOBXPyjUHTcmI0owsE7KXKAGmpRDF4juD0RxFK2Ca2TXkCq9YpFrzODi4IpQf2t6McW9AnF1jdgo0SvB3/tbfpV4Q8vLuB9/l//mTJ0CDPJRZaSUnyhV0TRhzh0dfMRhNUc0VkIahVywSJwFxNMKQjZpv3VjCsXVu3RFG2tXFOSuzDZbWfwWQv+oENOsVltfF/woeqRQLDWbhS+pV4Vj4wxqFcoLp1jm7EHp4dXUdwzAXTYRXVzYZDXoMujlr22LNh/0EzRozn9rs7ghbKY/rrCyv8s0TccdMZ32m8yvycJe7rwvd6c9zxv2c8XjM1pYIHl9eXmIaGlkunLvtjds8ePAcR6/S92XWRpvRWKtil9wFnjk1Auo3zvnv/tf/RMxpfMHacoViuczNWyLzUSjY+L5PfcnGdoXD3Dk+pVC2ebonbIlb29+h23+O4ziLYLWqqty5fZejw2TRJmMwGKBnOlOZff3sy5/QbC7jEgvqUMAxGhwcH1OpQJr8v+y9R4ysWZbf9/t8fOFdRmSkz5fPv6rXVdVlpqure7o13TPkUByIEChDcEBBECAIHC0EkpAAacGNtloI0IJLghIlSKLI4WAMx7Tv6u6qrqrn/XvpMzK8/7zR4t6M0kINbSigF+/uMhER3/3uPffcY/7nf8TerFSv8Pm9T6nXZN33x3/F3Sc/4dLmHt/6zb8HQGszxvfOWVmpMJtdBOhXKZfXSBUhi4dHLzCNKrqpYco9zhcMHj/qMx36rMtGuHGY0G6fYsm2GbuNyxwdnxElCz74psi0nr14zov9czZqNY5lhjJWbYLA5cp1mfEKxsxcB3eWx5CogjTW2G7dZjpro0sWY3cekrUyxFKXKUoRbzGhXCoxW8jsq2rw8tWdJRFHGMeslL7J9t51PKk30iDPV997j9LaCyx51xVLKfefRlxZE2sZT6ZsbxUxzJRuW+jKZ8/alAsmW6tXKZWFnfnw7nN2L68wi4TNbqgWplLFLlTQMrK2uGgx6CaUszCbHwBgWS1q2W0cZFsgdFqtFe7c+ZxKTchwtb7KrebX6XUHnJ8Im+7b334P33nAwhfOsqaVaK5WqVVX2dgSNuRkKGqlm60iL14Ie23mSATArxi/Fs4VSkwSS+Wwegsl0Tg+f4RVFAcqbyZ4Xp927z6zqYy6WRrlcp125wCAyTSi2SowGk8wFPG9QnGFqt/i5eHPSRMh0pc2v0m3d8xZW3wv9k7Y2SuwWMyxpSL99rf+Jj/78SvOR59z5YoQ1rvHXdKuSmNDkAPE2hr+NEe9nGNlXTgD7XaHTLZLIV+nWhQsaqedu3TnvSXUZa35Lpd2DX72s1+QE4FAIj9PuVFjMY+YjoWwetGQeqOMbUinZdplfeMSUWAvDeiee4xq5DDVMm4i3ofUxnU8HF/CvJjz7KlLrbrGQ0lk8ODRx1QbNpolWaPSQzK5AoaqLMkPdi9bqOoKqhKRKiJzVC5eoX3SZTAVn9nYuEm7E3F+NieNRcTEMIrM3D45u0m1IX5fU8r4QY+CJTIh5+fnlCoanfMMt94RNKHH+zP8cE77fIxhCoVUzBmgGAwl5Wk2G3C036ZVvsVqTTiK+YKHO87jBw5tVzhFQdjDc2O2dsQFNxmnvHjss/1dg6dPBczza19/C9ed4cyHmNISNo0Sjx4+R1eE4fjLz37OpUu7TEcqQXRBdWvQPe+Ryxbp9J8u1/zGjWt8cfdnYk75HJXSGtPplO5MMjStFvCCkWC4vFKW6xLghgMyBWFcFbRduicH7GzVOToR+7l1pcHR+XP2dpo8visOtdEzqLRUMmVhVDx6dkKltoKeqlia2Ktrl96k3z+hN9ynINn60kQjZ66BJTvE+wtW19aYtB2mrshmra6XiVOHbEGjJjNlKkXSNCWRLDnlRsDJYUi22FsyCpYKV1hMx1iZCFvSkB/uH7O1vUciGfbyuTJ+MGYwbENy0YPsDN9fUMyLy8t1XQ72TzhMFhQLIqOoJCp6PqZRrNJq3JTfO2Fz1+BMFufvH3fx05+wGIMmSSc+/skn8F/BZDqnP5TQy4zJ6WTERktc4KgLdDVDMbfJ+18T6/SLH4hgSZIkKJrsoaMaJGmPf/Bf/6dins4J00HIpO+Rysv55dGYVwcnnJ708b0LNrSQ8dQjkga8aqjkrAqJGmFI+JmmW6RpjJpGpJLmNY5UlNRHUy+cspgojtE0Y0knLjl5SBVl6YTpWhHTLpNIgoJ8TkVFEd/3hSzOJ2PGw5T4WLngRABNx9LNZRYgX2tRXylRyOXZllTl169doVkvkMYJiswqdGYiI9sbivNx3h4wGc8ZTb8kptB0HStjUK2WqVbFRasbOnbWwJZ6MUpDptMJ+NrSEU2SmCTRaDRX0eW6aLqCoihLGCKoTCcLJpMZ7Y44/wcHRwSBh6GpmJLZLZvNUl1psLYmAjXraw3q9SrZjEUoqe2j2GE+XRBJEpPZxCWMYxJgKAMpagqWmUfXVfSMZI7UiiQGmJJBUUlAQ8NJRoSBJMvwHAx00thlLtEJQm50YimviqJgGBpbm2+yuyfe76sfqGiJwmIxoNcX35uM5sznc3o9YQwM2hPG/RMOnz5CJgvxQxUtCciX8qiWWIN8yWS1tUuzJZnICg2q1So7G5sYkoRCN13ms5CsGvH3fl9k5l1vTm/eZS57p40HfVbKVXrdHiNJwpA6Cm7s4U2zLMZCf8/mfaJgnVpV6H1n0SfTyKNHOhdJGj8sUW9MOeucMRoLvWFns6hqROjJoIWqE8Uuo9EYPxJGvK7tkCqgKYKgAiBOQ0xbJUkl2cpghB+aJKnHelO2qLBtQjclSEeosoedM60SJHNWNsU8e8Mj/rd/+i8YDlO+9TeFQfun/+IvmXXyqAWIZHBDTQKUMCQjmTq3dy6zmD2htpsh8WXGWw+xDJ32yYLEE3O/fO0KViYgkgQeq6sqL/fvsr0rg4pAvhSycF1GfbEvpXKG0/PH5Kwa6ztCd42HpzhzBXemc/WS2KtnL57j+cesrwumvk9+cZ9WY418xsBfXGRaeqSUMY08viv0Rhh2uf/giEAyL+ayBQwtpHWpgGlJhERX4dKly8wWp3QkgvHt27/Dyckzxkfit1M/x95uA286ZK0u9rPd0dm7cpOXBw9xZfpci3KcHr7ClNCveimHphSYjtxlts51FXRDI2fXseRZs7M687lCqSbOVXd4SqQMyelX2dgQ+vv0/D6ffvoppqERxGJeOzs7PLjTHgQ/tgAAIABJREFUo9XcEXKQnZCxskxmZ5w+FoG+m7eusLlxGc0ICGWrkMMTh93dW6TKRMp0lu985z+hWPD44S/+ULwLFaoFm0ItpGEIObNzFiftB5xJIqNyqc4773yDJ08/plIWe0My4aT9hEq5zv6hkI1i2SBUfGLJlHnaOWDh+Wi2xcGjffnObVxXJc1kmMl2N8WaSTwzCDWp0JUctjVgMjhd6tPALVCw16hVdzjtCTtoOg6pZNcIJHFSvpEAJoqaodEUjvdAH1DKlxhLJsmMnuNPvv8/4jgG26tiPfPVIj/4yZ/RqqwT2kJmB4uUnSJ4kRCWqJwhns1JdBVVFfdAc1MnYxskfod2X2aO6jMWjkmjIRBhC/cRUTImm13HlCR3ujnDnQ8oFE08SaqVLSs4E4eKDLj6SkilZOKHJcau0J1xP2Fjq0YYz5f9zXqdOWE8R0nE92qNGpbp0+32uXZFlJokwYJCK8+jJ1+wKt+5pkkCrF8xXhNavB6vx+vxerwer8fr8Xq8Hq/H6/F6/FsYvxaZqySJKFck3Cf2GA2G3L79IfsHBwDEWkig2WhphsqqxPr7Jt3BC2JVFgxnigyncwLGPHkh6ik0LUUzXZJUxTBl9GX2glib40qq1Frd4LQzZTSOyEiP+1/+yb+kaG9jGcVlkWnOukqixPz4ByIS8f5HBvV6Fc93mcj+Sq6/IFspkbOz+GMRCWjmdhlbr1gEIkKj6U0ePvgZnfFTPtgTGP0BUybTAyYDk5zsCXBt86ssZgm1mkhLZu0ip8cn7F2v0z4WkdVstshndz7BturoqfD6r13f5N79hxiSir1WXqeXnuP7CguZPt3crROFGt0zkXa17AKVqs3hfpftbRFVmQxn7O2t4kwquAuBc/3k+Y/Y3Ssw7It3mw2fQBLx1cu36Us8sGqpzIMcqQ6+LPSNPI0Il1iXXdX9HK4bs73xNo4j3qVcNwnDCkftu2RMEUF8dfg5m5vXyMpIT7d/SBwvsIwQX9ZqOKcel3YL3L1/QKUs5lCrbLCxfpNTSZH/8MEPefvtj7h/9wVVWbjZ702YzCb4yYC3viIoascTj5XmKtOJJLiIVNBNMgUDPBG58icmqhLjB+6yAHIyGnN4eES5JCKPr172GBVPUI0p5bzIgvmLCWfTI1QrRyThPa47Q4lCklhE85qbJkq+RpJq1GW2IFFjitU8o3lEWWKg6+Ump4NXOJ7Yv83SFmY25eTgjMVF1Gj+KVYcoSjr1CWkYtzZJ/U8ho6I4lSrK4SBim4H5Asi4uZ4Lr43Yj6e4M4lNNHuUyqt0OmIKGC+0KBWG7FW+yoHx4KoZbZYEIQJSbjKcCyyYFeuXOOk/YiMzE4cH/ZprlYZjk/QZbawVGnQ7w8ZDmbLffH8EYp6iKGKSOT25lXGkxlq5oyHD4QMp3GWN94qE/lCb1xe38PQFDLliPaZOKONli7Xvkgs++PFepacVceR+9lorDIba9ilGVXZf0yzJ8RxAcNQSGUkN1HmuE7En/yZgPf+3b91g/WsTv2j38CT8ICvTyd0unvMFpCpyFqmkUXobHPv/qcAHO2f0e50WSx6S0p+d6Qwd1zQHHQZDSsWbDQjSygj9YapoaYmappFkbWSmhEI+JEak0TirEFMmiyWPeyCMIeq+5CEKFInGGZIEqcYqoWuy1RHZJCkNqEiZKrXmXFy7JJGKoEkAyqXClQrTTJmhbUNAU1qrqxSWymT1YX+WWnUWFltgqIRSLn2fZd4MWMymbD/9CK7DK4TkpX1MWvrKzSaZS7vblIqC8iKqoKiJviut8wu+b7PbDYTBS+IHldb6ztkM5aAAcq1cuYLHMfDlU2SHccR8KihwMkPeicEQYSKTl5CCAolHdvKUiqV5PuWUA0dRdfQ9YurMiHyI8LQx1kImRr7fUjVZZ8tTTOJ4xjdUpZ7pes66DEo6jIzJmrQYqL4IlOXEMU6fvucUGaEw9DHCz1c30WR6xkmMagKq5sis7tz+QqaoYv1klHqxcLDWXjMJtMlFHPY7/Py+ROePrpIV4IWqdi57DKjmK8UKBSL5IoFMpJ62cxYrK+/gaKky3nHYYjnLPit74jMTuA5JElCGvosPNliJI6pVHf5q+/9AADLqhANHZwgQDHFPDuHI1aKJj3vjLOOOO/VapNEDQll7ZaRhRpNEv0BsS/kxUAn0UKyigGyd5oZpCghOKHQb36YZ72xSn0zj+dJCn4zg2L5JCEokdCnpRWbYnGD7//wewD8/JO/YnfzFpYaUGsJWew9GENSwExNiMQ+BzgooY4t03Cd8TlenEMJHeyL/kOKgRJE+IbNXH7v5Gyfr5or3Hv4xwBsXf6ItfolhoPD5b5Mp3OGk1OKhd8WvxOOOXzm89f/xtd58OovxBoECmqcpVGtLdsCkGrs7u5hWOLMbmyt8+rwlNVKnoIt0DTbGypnZyfUqld4ciD6XNXqq0RJjUJGwqB9FSMxME2D4UT81nZuk67TpVpo0B4IyPbx3T71lUuEqyLC3xs9wg+zLPwXNAvi3m6shxx3v888zLMrSacqVgajoKFIwqd0eMrhuEtrrUEwEme70WjghV26nS4fvvcfAhB6Nnt7l7hzT2Re7h38MRs7TY5Oz5Gtt9C0Ffr9Q9658Q5b2wJu99kX97h1s45lXJSRrFCtWMRBiygQ+rvfdSkUCuSzFqpErxTzPQq5PLFsFaQ2ElTNx7JKrDWvywdO8Zwxlr6Lql/QlQcUqxUaqwL55DsLJuMZxcLKsr6ptb7Gjau7PH726bLBu+PpFMoWriv0fm/gks+VWWu1lnT01VoLUg/HSRhPhT4r+XWaq1VCT9ZO9o9pbV1mVg4pSwhgNlOjWNbpHh+xkhG6Y86AQtlmEov7d9SdouhNht6QdC72vdas8LL9gmpO6IgoDzdufQWPkGguFv3S9cvcvPYd9i6vspgJhNTDe5+yf/pXLHoiQ5vJrTGYepDkeUO2RvImY9zI5+TkaNkDLY4CHHXAs1PxO95Ip9HcYTTv0tLFvDNJg+vv1Mj7Bk8lOVXnrEOluI4uEVmYLk+eHTP3dTZ3xX4GQ5XpbIGGijMTa3ccTrFydZrr4v2ePHnIztYu4PL0sfhMuZLDCxdki8UlBL5SkcXWv2L8WjhXSmoQuZJIYVVDc31Oeo8JJe5VTTUu7RYYjTsEUrlq1oh8pr4sSrUyOl4QoxsOaSzSe/VGnYePf0lztcLartjgLz57TK26xlDC7yLXR9FU0mS6NJbPusd4xTFRmJBI+Mv2esLO1i1MyYDjOgELfwBKSCphTwomSZhnMDrjorG7ZWfQ4y38QBihP//0T9lobZKZb/CLnwv40e6lGpGfZ22jwaHsSn16/Bk7O7sEisDULoZrjAYBgadhSGKBybSDqVeI45AwEPOczT10U1k2VhwNHcLQx865OJKtSDV1Mpkili0ORoqJqeUxrUPWJFxqmrPxophM4zl2Ij63f3bOq/0QFbEHljnCzC6o5X6P4+dCCD39BSvrm7T7Y5prQvhq5T1+/vFLVlfFJbRwHLK5DKPRMbOJrHNJ5rhOghblcGWxbMas021PqVbFmvvxEYPRCX6wydq6uIj9AFxP5a233uf5SwFXHJ+nfP2jLe7cE5dsrRkzHLVxwwnbW0LhP37ymNH8BQoreK6A82UMi6OnHSp1cSwKZpZ7vzggW/JJJfuTbds0d2M6x/Nl0XBtpc7JcZvhRMKuAugch9RXigSJMOKVJE+o9lhbXWG+kIZi4DLq99hqCAMlVGeM5+cEoUqKMFAGowX5TItKvUwhJ4v/h6ek4yKVhtiX2UJhceqyW/uAxBIO0JPHp2ztXkIzXRxPXNp7e+/SPnmOnYr1LFsWJh5jN6C+Ks6DnVnl4OWQZstgnIrntc8esb12Hd8W7/Lw0QtWqnssnHPcQFyySbJCPmeTRDGFrDi358cezeYOQ1mLkjLBMrbJ2CnTuZhn99zBzOhM50J+Fq7HdDolVXSuXBEy/Wz/C8IwpZAvo1viEkh8ePhgRr4g3m1n8xrFUoZ//s9+sKyT3N6+DfyQ+WJEUTZ8DTyflIDihSGMRa1aZerMcVxRb7C7s8bBKxdLS0mQRdmWRRRYBLLwf//8kIxi0b7/KQXpVEdOxHSu0Gi2WN2SdQoPRlRXrlOrCthj+2qb/qjLcNDmlx8LnfDGt3Jcf2ON49Mz+mdCcX/vL76gUCqgyTqN0MugKAswQubzi9oCDdNM0a0U3ZB1LkGEqmSwLCGbQeyhoRIlKqiSLTCyMUlIY59U6rc0iVGTGTnJRBYYCnmrTJL66LKUMIkUZvM+M6XLqwMRuEiYg6KiyAIdP1LJ2Hmy2TzFktDpK/UKX73xDl976xZrsnjctrMEQcR0KnTSYNBjsRD9wAYS3hfHIbm8jZnJks0KmV2prbK5lsfOy+bHEhqpaRqGccHQqKKbGcp1Hdv4ksVQUbQlPDOKItI0xnGcpfMxXvh4jsuRbJa9WLwgDENUFAxNwi5VlXy5Iuq8ChLHX1zDtNSljggjlyCIcKbiHQCCKCQIfOI4RtG/hAHGcbxsJq0qOrZtY1kZUsmAF8UhURIKqJssb4qiFMdz8DpCXr0gIE1TNO1LJ1AzFSzLws5nKJaEQXL5yi6GYSJtA+I4xk8iFosF86k4V4vpgqPDfRbTxZLpLAxDNONL5zFfKlKplSlXq0sio8Z6SxDvZDPLs6YrOkEw5d//DwSEO/RTAs9jOu7jSIbNycCh1SpQbSjc+1gY+s7skIUbkKTiXc48h0LW4m++8Rv8xZ9/IuQsjVFUE0XXvmSzVGI0U+HJ/kUD7TJWJs/CDdA1KQdpjJYUqZRtitJ5PDx4yT//n/8XQlm3ePvmW2hKnuP2Pp4pgg2/83d+iz/84ucYiYoq++8kuoXnjHnzmpDpyE+JojnEdfL2RTG+QpIoGKZBRhI85Bsxid5hZ/t9AF7uf0Ja0cnlvzTWtta28ebPmI+PpICHbK63uLTdYtAXRuh0dkDgr5CpzVmMhD7d3Wsxm/bpyb5TG1t7lAsmjY01Akds/P7hgEphT6yFZGn1RgaVrIUhz3+kxTSubnD3k19wdVXs36UbAy4V+qyVVqmtiH3XtSqvnv+MxedSL04DcrkC67VdTheiVKAR5Zi5PoqfwRkLPW+VbNxFn0gWeQepRjEPnaMpm/KOJlBRIoOc1uCXv/xMLIOSp3vexZmJwMZXb/zHTJ0umZyCZohz/OjuC25c/hAjl/DilbiTEyVhpXKDSMJ0N3ZV/vIv/5wwcrnxpmjefHbaI0r7TOYJaSgCLrlcmTSNefzsFwAU7Ouousdg6FKpiDUolZocHh4TRD6dUxFAvn5rB9dLONoXgXdFBdd7xvb6LdLkgnUv5MnTz8hZV8gVQ7meGUxLZa6LuzbJJSiYjHoR+YKsow3PSRSP7nBGc1XM07ZSBsMR7kzot/XrBeb9EY1SkxAh14dnLzjvBqCWCBE6rlxuEkxnpKF43nzuYBfOKZY3OBqId5mfdDk77lG+JS6C4alDrbaNpo9QDFkbun8MmTY/+OSY/TNxRtXIwlId6gWx5m/e3KU+DxhNzjg+EnauGpS4dfMa7rxHIoNMplUll7eZnh8AUK9ajObnzMYpo6l4Xn0V/Jcp169+fdl8++6dR7ixiysdTCOTMJsa/DvfucGD+8IBiyOdsKdz0jtnqkhCik6BvFkm3hBnPXUiHj89pF4pkZFw9KfPDvmt73yDMJrTlfWNeCLx8avGr4VzpakqhbyYShxkKZdvcnhyn2JOXDCFQpXO2YiZv6AuDe1Ou0/OjkhicTGt1m8wnA6IUDg6vWB20nn/7W9y5/4XnJ4KQ6Zor9Bvz2iuyY70HYX+4JTWRh1LFYtVKUakSszG5h5X9sTGffrxJ3S6P1kWxjvTDG/cvsmLlw+xs0LoV4o11tfX+d6/uUOzJTGzvX10K0tTRjCS8CmO/4JvfuM3+elPfwxAJlPF8yf0BkOaKzcA8EpnVKp5uuey5kLtkCu5vHj5dMmmlaQOt278Bv1zH3NFHKBHj5+QqkMsWzI2aT6ZbAHbtjk+FMpne2OX9vmQXFEotrlzxErhbWyjzXgglFEQRByf3seyE7REKLuPvv5t/vRf/wDVEgfDm2fZ26vxyZ2/5PRQ7MPt9/a4d/9jWpsb5DLC+O+cvSRjZrAl5bG3SNhcvUr3dM5Z76dCCBKb3Z0r9Lo+kSQNefZkn1bjCt2uOBiVepGVyu4Stw2QyTVYqV/j+PiA8KJJauLjBwsasnmt7Y1wFi6VmsmrA4FbHs2O2NjZYj6f0++L/2lpBsuOcVNJUZ+4VJol+t3FkjVua6PBq0dT9PwINRXzPD0/ZjJPODqVzZ1LJfwQpkEfVwYNCsWEYk3n88+fceXyDgCuGxG4GRahiNCcH0JzvcLR4RmpftER3sW24enLAzY2RPSlfzbjg698m5eHB2J9zx/xnQ9/i05vzFw2c11vZCnrRQq5OhnplIVugm1XGPTEpffs6WM+eO93iToD2ufCoM3nHeqNBq+etVm4QgFbls2r9IDxSCjbXC5H+6zP/osJN98Uhc3d8yl2LqXduccb1z6Ue1pkMnKwJWlJZrVEtzPgzoNPWdsShsRvfu336HSPaLU2pNwNsTIVEmXE6ZEw+HKFPMPpY/qjEzKmMFoMrc3+QcJKU0QUH92bsre3R3OjxtGJqIWbTEV2pVYv4bvCsFBVhYXTIVHE8xUytNtfcGn3OhtrIqraaIx49mQfO1cgls2OFRIyuZjH98RlWWlV6bVdyhmdD771m0KuJ0PWN7ZI0xlPn4q5V4tvcXx6zGAkdNJ4NGc8cVhMIlRZCH/lZpWvvLOLYU/5x//4vwDgb3z7v+Ply30sSYSma4Ci4zout98SUb8ktjjc7zLodyGQdN7VlFQf4bqy/keHRFFR0zyJIYzQFBcdhSTRiWQ2AlUnDqNlcXeSpGiqQxzGSP8ATVcEFbtiUa3JJqlUMUwNL1jI52mQqgTunHlP/G94csJnP/kpWbuIIVnicvkKq81NdrZExnRza4PV1XVuXlulWpXNOTM6cRIynU8YSydsOh1z1j5esvDFUUoQBIKYJBX3RZjEqIqGrpvUq2Jd7IxJvmCRy4l9z9kWmUyGXCFPoSA+s6NHS/p3EHTtSZwSBBELacz5bsBk3mc+69M+F4avs/CJomSZNdIMFVVVyGayS/bHbDZLrpDHNE0uyOGjKCKKEhzZFHrhjggCXzBK/j+cQBQFNBVDyr5pmui6TiqdSVPPiO/EKYH0NhU3wVn4BEF/6VBevNsFIYpwviwKhQL1VRGQ3NzZxjJNNNVYfi8IIjxngeeINXBdl9APGLU7dPal8R8nZCwD3TbJZ8U9Wq3XKRSLmDKjmK/kMDIatdUq65JcJZezmU2GOHP4h//tPxRr7IU47pz5VAjeYNhFTSNKuYhLm8K4Onil4jk+jutiycCCpphksytIziuCMCZUXlGultFlLWohp1OIm3z+4BP2D0QwbjAesblzhWJF3L2zecB8MSBjJCB36+d3j9B0izBJMaWOVVSTWNEIAnGvnp74ZEwbJVWJ4i/JKQzFwvPn+Lq8n5QcOc3m5Vjo/a9e/bvEXpusUll+ZzYIubF3jUFfOHK6mcEwfNrHR9QkBbil5Ei0lMGwz3lX6JdioYClNHn7KyKr0h2/QFey9I8dGk3x+wOvi1HLc9ZZsLktslknpy9pFta58LzPH/6Ik7MMW60rXL4mlNDqnonnbxCGFZ6dyM+9MhnOnxDIbMHO6htM3af0z6e0VgSTnK1qhPorPvrmO3zx8QWx1x6Njd/A9cRmffbslDjQWNtw+fxTYRd98MFfI5dfIUk6TGdiHUhVpjOVrDzHC+eY4WDAzs4lKiWxLtGVBJQBcdpEyUriq6zKy+O7XL8s9vj50y6xn6HRKi2bsjcbGywWc3Qz5MEDYS9+7f3vsn/wCl0R99PqpkL7zKFSzTEeid/ud3vkcgU03SOM5V27sDhr3yFvi2C1YnRRkga5UrJkro3CFvOZw+blJoEkb0gJcByNsiQRe/H8iGp5jdmsgyfraIejLjs7O8y9I847E7nvcHT6kFJJzNPfz2ImOaqVIu2uCIDaORtDz6IbKn1Z090/9thZzWHLxs2tvV264zHj8QA1FHs69Xze+co7zAJhI4w7Lra1YHUlpCsZExfOhDDs4c9V3rz6tpC9ocNm8zdQfaG7XT/gRfsxRWON1BC2RKT43HnyKe++9eZyzXv9Cev2ZbI5IcNfuVHh7otzUquLK1FF03GG22/vcefzn2GGQq7rLYswHZMizt7oTONr734Vd9TFlTTy62+s8PzZM5z5gta62Jtcq4G3GPLqpdjPQrnBSrZOtZplPhFzrzds7j94SblqgSZ0wnAudd+vGK9rrl6P1+P1eD1ej9fj9Xg9Xo/X4/V4Pf4tDCVN0//vT/3/PLZ2VtI/+G9EitxxUxLVoVRuMZ6KyHk1v0scKZx3B3iyH8964wqGPVxGhLPmFtlsnrtfPMGZCm/65q2rGGqdRv0SDx+LtO71G7s8e/aSXdkVNgg9zk5GZPPKkj7VsFS80McNY5pNkfJ3piMSdYCe7ACC/lbVT8iYNQ4PBOxJN1IUdK7svkUkw73t9oju4vtsNL4OQCFrspiPaLWazCRm/+DwhGIxRxRqeJ7w6OvVXUH/LCNnipowXRygG/llvyMUl2K+SjZT4bwrsi+uYxBEIwYTke3JF6s0apeoVNY5kCny1XqZWJ3RORd/K5pHxqxgGiEnByIyiH6Mbuc5PRpSroioRi5bZD4/o1YRkeXYt9naeIM0iPAl5bhuqqDkmc09NEPWndWqGOk6R13xmdZqAyVSOT/tcvO2WN/OacD6Zp3O4IAbN0QN1P37d/ne935OrSGi8JpqsrpWYb4Y0WiIFHVW3+G8/5B8wSBjiv/NJgFZu0yUyt4bno8bd1C1Bb1j8Zndy+vcvf+IiXOE5w3lb63ipBEbWzL93ffY2apwftahKOmMV5p5ep0FimWQyPqU9tmQldpbGLKp8P7hS8Yjg0JZIVmY8ntFXG+I53kEss7sW9/+iFF/RhSISIuWrTObH3H1xirjiYgSRVFIpdDAmxm4sglkQsibN77OLBB77DhzzFSn203Y3BHZmkFvSOKmtBolUlPsw4vTZ2zubBLORcT7/NTDytaYjg6YiwAYV29bqEmD4XhApydZzeIGjjskXxC6wrZzBE6MoWfxQ1m3Z+VQ4ix+OKKQF+9cqVTIZ5ucygi/gkan0yGMYHNHyHAlv0saZ4jkWupKlXxF57M7f0XGFBEpQ13h6OSYjK0SyRoaUynjBm2yEtoaBBFeELG7eRlfRg/7vSF/8r8/4vf//rv02iK6nM1miRmRxDLqb5roRkrgmty4KeAvf/iH3+P0UMcuWIQXn0NQgxsyKvbbf3uTKDXIEXPrtojUCVrbMru7JU5PZYRt0EDRSrRlZLndPmE8HjPsT5lNBEX+X/9bu+SyRU7OnvHhR+K3nHGDP/jP/3vKJUljmzqQ6oxHLn/wD34HgD/4R9/gL//0IaoWcnog9u9/+h/+L0qlFr6kEoySBZqmkUYKk5Ej11hD1zKouo8meYHjNKVUyuN7yXJd0iQkilI0VcKVtBDPD0liFV2XGDViQEWTsMAwTFEVHU3Rl1mbNIlITEVC81L5OQHJC0OZKYsErbypmGQk9LpcatJa3aJWr1OtCfjL1vY6zWaNUjkr9zOPH8aMJmN6PQHXOO/2GIzGTMYzTiUzXxylKImCJev/cqZJPmezUitTq14wltUolnLkJYzFyBropoYEd8m1U0nJoOnqkrFQU1JRg+VI6unZjMViwWg4ZzQSEffxeIzv+4Thl42UdV3Qz6uyXs3OZSkUCpimiaZ8GfcMowTH94iDixYOMZ7nLXuEJUmCrgtK+gsYaSYrGsSqqsryjlcVwfqZfEn3n0UhimNRxwWiz5mmoGjqEiGhmxo5NbeswbJzOSw7g6prmLZYq1TOIwlmeFLnuc6EfvecQNZgqalOGmnolkNGu2ganqHassnk6jx4JGDylcommhUtoaA5y0RJIQzGF6V2jGcLTs9Pmc2mXCSJojAlCGeYlsjCBUlMqkyJw5B5T8KsXJ/zsx4338rw7tcEmuTk2MLxSjiBuLM102Y8PWNFUfnGd0W24MlDh599HJAUdQxJPx1nbeZjnzf2xPvNnR4//1hj93JpSVs9WiywVQs9q9IZCjvhr3+wzm++l+Gv7gn4lG03WKltEE5H/KN/KBjo/s9/9Y85P33Ctasi+zOYLohCoc//7M/+FQB7V98km80ShC6jodBvrfUq46HL1rZ4txevHmJnq9j2iK5k+KvXy3R6h2TtbfxE/HM0OaNevo4bine70SxhVYqsrrzFs+eyT1loMZ94tIcv+cUDYU/dWr9Oba3KNBHU6Htr20ynWQrlJgVdyN3J+XNApVYFt+fKOagcnjpsbYu2BLff3OCX9x4SxzErDcHoGUUBjx93qNcrFGXrh+nijOlIRZNtAVIMcgWV7lnKxo6Q17OTNjevvsdJZ0StJgQmcBPKKz0Oj0QPpqyxx2iQ5exkxJXrAgmw0qhweHBKs7GFJ7Mt81mA4w5o1ESGzwn66LrPykqek0NHfi9PEM3odMbkC2Lftzd2aayW+PhjkYXrj/a5vPcOk9k51bKAE3qOSrVmoGAym4nz9/z5Y4rFMpbsTTmfO6y1dqmWVul0BQRe02OmU4dsNrPsh+X5CaGvcvW2yMy9fPSEVusGmjnm8b3PxZzW32W6GFG08hg52coiLTGbP2IkSz02W2sMuhNm/RmbG+Kdb9x8g9PTXzIZifMfLs6xcwaHLx+QzYq9una9xhvrNqZu8ennYu5h3iRKIgzZ8uCkd0poFCjmS4zGwr7xvID19RoZs8yZhFRqtkecpiyGAs327fd/m9bKmLvPu2SKYj9ZMbwSAAAgAElEQVSHR2PqGy1myYCMbMUw7i0o5ZtkcuIsTLsZVN2j056hqOIcF5smqW1jaBrBXLYPiWxaW/DyqcjwZa01bAN2Ll2h15fohHhAvxfTWt3AkRl8XUv4z/7OP/ksTdN3+X8ZvxawwDgJ6Q6FI7Xe3GGxiOkeedQkVEHPpJiaTtLvLGsXgmRO92hOviCK0DNmncV8zBtv3MaRtMDOfEysRdhreT56//cA+NGPv4+drWNISM54NsCLhqxWijRqAhaUsep8du9H5PKCdABgpdkgjkRDQABF6+KMM4y6LuubEq51Pufalbf50c/+D67dEIba3rU91sPvLvurdM5PKJczTIc+IWJz4zjC932yVpVCdlOuSQpaSBSJ56fEqEmT2dijUrtoVBdiagGHR5+SBOJCs+wEzwFbUoK2Dz0eP/oh65uXWMheMf1eDbSE4UDWA6kJGWvGpd0NTs+EMIVpm3e++iZnacSLR0IB1+oOxUKOYk7CvCo1omBOtmxy8FgItGUWWVkp0FhZodsTxuOrV0NWq3nek7TrL1/sc3L6OTp5dE1ADtc2i5x2T8nnSzyVhCRBoGMXFCp14dwV8nWmi1ccHJ0ThRI7vXFOEE+59cZ3+OJToXzWN1qcnD3DCyUFaJCl1izw+IHDmtBrPH5xh8/vPOb2Wztosknij/78gHc/3COfEcbO4/PndE/b7OysUqiJ/706bhOTUsmV6HfE/uXzayTKCGcqG7laNT74nV1+/IP7hI40HgMFf7JCfdWlsSeU+csnQzx3wPvvfQOAe88/xzLz7D8/wZaY/cU85Hz/hI3WFrWGuMTz2W1eHOyj6UI2MnoOo1Cn1YpxF8KYRFV41X6AYm6jy2bcrW2bNNSQJXs0axliY8R4ErB+STgpkV9nNO5ysH+MLvvoTEYvwRrgyxpIc6qSzSuk2hkSYUSx0uLJwzPsXEJWEgQcnR4wnz2lXhcXjqlr7OyuYCqyBwEwmXSYLaZLKBH6K7xwi5vXfoPhVBCpTAc+9eoOhn3G0UuhuBPNZn1jD1Uak6Zp4rkh2VxKZ18o6Uu7G8AjNDVLQRLFjCbHpGlKxhbn0QtUwrmKZSXcfyjghNmCjqoVAA3kGuNmSAxwZI+bXmfKQe8V71x/m09/KeAMb7/f4MWrO5z0Ddaq35ayscr54IQoFvP2fZ+UmOGow5Zkg6+UG9x/8EvqtSb37oh3fvMrMzbWNxmOhMOg6waKqmKaOn/6r4Sx893fvcl4MmB9Y4WNbbkOVhFNM5aOsB8pBG6A43j8/f9SFsebfe7eOcCdVEmko9/v9hh2hySykLvnnmAbBTQ9JSPh2UQWmYxBnHgkkipY1VSiKF428DVNAVFL8JeU8SgJhDpRHC/lRVNVilkL7aJnl2oAKoIDR+5NdMzTgxeEzyJCSY6jYECiLiF4diZHpVZndW1teWaiJKFYqLK2us7OVWEgJGmEqoqeggCzyRR37nJ42uazO0LfuCFYuoZhXtA+m5TKOUqFPAVbnG3byqBlTQxDw5a1UqalkTHMZc+uXDZHPVeiuRGhKrIGSjPQFJ0giJjJ2pux7PvVlw7gcDjk6OhA9OCSS55EKapmYNoZCmWh82zbplAt05TP03VjCfmTPhL+Ys586hAEwXKt0FSiKPpy7WybIJMjn8+TN2TvuyQhigMBsZRGi++4zPw5QVs23Q3DpeOVkZC8bFY0fDazGvmsuKMLRZvWxptosl5NUU3iUEfVfWJZ3zSfJ0ymPRbjff7Xf/ZPAEEtrxCRkS0dfFUjny9SKzVoyBqT3uiMKA4olXJYsl2ClsQEQYCuSWIIVcUwoVQq8c5N0cJhY32N3qTHd3/3NvsvxTm6//kTNE3HlzWXSmwyH3TJl7K8+8FXAPjpx3+J72cw4y8bYQd+Suh7bGzKOt6Hc+bzPopaxA8kkYkGJDFR4IGso1vfbXJyeMJHt/4aAP3ZYyIdhpKqHWDiP8WNPB4/EzBkK1vnyt4bHL28z56ElY/n55j5PHMHCrKxqW3pZBo12lIHXt2+wmh8wMTN0JOBaDMbUyjVCd0ZkWz+a0YZCrqBPxdB2lPHYD7v8fj5Pufn4h5dX79FLVPCc/b5/X/v3xXPy9uctB9g+eL5j58+Y3fjtykVHZ4+EaQTZqaGP3NxlSEnp+Id3y9+l27wbxgfic/4kUGz8Sa5bBU9L3Tej75/hytXvsJkdszLQ+G81atbrG1mePJc6MnNjR26nTGu72FowhFtNW363ZRGaYeTE9E/0dQNyoU3UTxZ1hHOSdKIj771IX3ZeHc0CLh14z3Ou6+WjoypblKtrFNvCjk/76ZMxyE3rm/wxSc/ACBjNVnMQ/KZFrubwqmdu2Me3D+UwRlYqV0lDBJ0HV69EnNvrGwynvgMhwNU2cn8xo03GY6PsG1JZJYtcnJyiKWVKOTFPerME4q5mEotw8mRkNmsZrNxo0i3I4KUa2tlomTCeDhlpSrm9JVr32Tv8iZZVcOWcGlTV9h/+AMePBSlHs4sxrWgvnsZ1Rbn/+j0HikW7Y5w0irVTUy1wu1336FzJux2PzS4d65ghOc8PxB7vPpGHifOUDCFTVuo64SqStHWKBry8os99g+O2L2Ro1yWdoGWMO0HVOvi73vPfkzJep/xsI3uCANurZnj8ZMvKDRaeIZ43sibEiYWwWAu184jY9nUtlqkinCI5rOQYDZAVbPYUnfVszn82YzVDeFLjI5jclWbybRLhNiXQT9gOktoNl1Oz8SZXF+T7/Arxq+Fc5WkMSqiSDNfrJKzG2Qyc7xQGOyd42OgSCFfR03F5W9aI1obG/TkwVBmU4yMR6W0zUZTKLvFYkJKFiOX5fO7ImKh5CZUm2v0B0K5X7/1Aab5BW5wxEhmvJJoTH2lQoqPqYvnReGcyXRIuSQZf176bG/XKOTsZaFxvbjF/qs2H37wu0xnwihrn/Uw9SrFsriY9nZuMZ9PKZdqpKpsSJwoTCcDNle3GZyLz7nxE47PDri8J3CnqiayEX40J5H2s2XZLIIRzlxnPhPr4HoOKDqVshDoMBkwHk/QrRNcaUiduseUSgWGYzFH3/cp5LL0R68oSnY7M9jk7p1T4jBDKqPNCzcgUSPuPBRRq2vXL5PVtzh51CejC6XVqNcJkh6vDhcMe+Jduv1DMu+WSKRh+stffMr21Yh68QaqLDh/uf+U87bD2trasjar2+tRKa1yeiwzXq0supVnY2ODmuyYvvDbWGadF89OaXeFok6YUanXGPRl3Vv/Dtlqk1QN2T8UF8zl62vo9m0yepaVunjnxSimkG1x+Eys080b24yHI9RY4+5PhNLKFvKkRgfXiQhdcanvXLvMZHZGtSGex7nN83sjKhULD/EugeczGs5ZX9sglI2hZ9Mul3ff5fBQREwySUQ8X5AtGrSq4qAfzA6p1ooUCgXOJHtOq3lEvppnfCpksb7SQlPyjIZPcV1xZhSjyMbaFU5PR8QIua4uVBrVDZ48Fs5AnM65ees2aqjjOOJ7R5M2Tjwhrfk4FwxJ5Qn5ooG7L9SF4yXM5wbVZgbfF+cIdYxiTzjp9nFk80Hf98lnLdpn4n1V1efq3k389EsSmIQAOxdj54UcZDObvNw/4FrpPVxZk5DLFIhDn9kgRV8SFMBo0mM6EHOsNEJq1RazWZ9SSRh+gSzSXWkUSSUCejCJydmbLOT7Fgo5AjdiNpujyDosM6uSqiFR7BJLFaloJqYVMJXFsqdHLvlKFS9USX3hOP3sp6/YaF1iOvFoVsW8nHAKSoJ7ERU3QrxBiqKk3LglLtCHD+9SqZRYW28xHstMSxzzzW/f5p/90z+We5wj8i2KRZ2XL0XQ4t/80VNuvqPy/MUjcpY4754D5WqCI9nm0sRAVRMsy+L2hwKP3/Uecvn9y1xufoPxqXjerctv8MXDe3z+8L5Yc/Ov8Rd/8udUKiu8eiXOzHmnx6IXoqsGhmRIS1QF09LJ5YRjk6YhpqWRpgmqvLw8N8FQXRRU0kQ2uY1VVE0j8IWOUHBRVZXEVC7KvlAUAytjkc8py15ipBEpMbqWWcpYt/eMs/PHS6M3UVRIVZJEXZJcZKwchWKVfFkYKOVKjWqtTK2+xu5lcWGnpo6hasvnL1yPyXjGSX9C4Mo+PjGoiJquC4KHrJ3BsgwyWfGsXM7CzhjESrokwjBNC10VhBNZWUin6zqFcpFsUTgRzY2WzG6FXzbG9QOmcwfHcXCn4qyNuxN8z1uSZWiahmXZ5HI57Iw8D9UctUqVXC6Hboq9UlXhXF0E2VzXZbqYMBn28GWfG1O3sE0Ly7Io5MW8MqZFvppZZiJRFTRNIwzDZRYuSRIWgcdikdKXhA9xHKFrNpbsTZMvWDQbLdIUKrJhaHWtgNcocevWLX7xsai//aM/+iMqtVU86aBkVR/HOyEeTNiXTsNHH36d3a1r+MGczU0h+3o2y/rKLnZGyEGpUKBYrbCxs0sqncFFOECJE0ZDh40NYSf8R7//dWbOiH5f/PZoMGE0anJ2esz3fiwMNz+tMPM6WJOEJBB6RTdsvIXL/rH47Siu8NG314nDZFmDmBCxcCJUI8KW8nky6qDFB+TnYn0XxyblSwFbF0QOwP7TIeX8Zeyy2OPGmsaLl79AjUyytnBeMcY0WpcoV0JC6ZhVV8p8/P27JJ5kHd59Gyu3xuD0lEuSXbIzuM90pHNt7xoq4jxE3oTx7Mmyye9PP/lzbu5ex1A1VmpChj3nEEt9m0trTXTZN/Tw9IBsVqdzIOa5vfYGvjPj1cNT4kjWG1dLrJYr3H8costMZyc6Y2/7A4o58fyf//JTtvrbVCoVVFPohPrKOmedB0znU2plWbuERhimZHNiTs+fnrDSKFNYKxFIeVFUE8MeMxifYpvCiWg0BBrjgizjyYsvKBVX6XTPmY0vzsOUldoWq41LlMqS7OTwGF1fwXfE33EUsLpylXr2Pf72790ScpAZ8+Mf/5BO7ynPnglW2L3rb/K1D7/KD394IP7efo+j9s+wrCx2XpL4jI4IY403b36N8UwEtbujAzKGucyK97ohcQznnVN0TeibQqFEmiTMRgk3rgn7cDxtEwZQlOt7cu7Tapj4+Ciynup8+AnzOz/m8HBMJZWEQJpFqhdZ2RDrqzsx7vmQpn0fTRdO2UQvUiw1Kdji7rv/6JSVvEqiVdja2AHAymmcdIZcXa9z+y1xPh4cfMLYtfngffHbZTVHb9ShWrtGJO3joxdnlEp5xlMHJxR6o6S3aFYz+IYMHns63aCNE8MlybmQKepcv7bg8HCOmRfop0u7NpNpiBFLBNPijEy+QMoITUI0Ll99C5uIq1s3mUm4TuBNePLyLrmssANbb9lMJx0ePHrO1Rs35R6n+P1TDvfHqJL1s38mU8G/YvxaOFeaqi2LyU9Oz/D9kPpqiVQezrxdp1DIo5sRw74wcs/OHFS1S146u+edu1RrWYJohYwqFnLmzTg7v4MbtGk1haBc37jO06dPyUoDfjzPomgaRHUcTxzq6ajL6sp1kjRgJov4q82QnJmjLJudLsodMhlIIjAUyfaUhUyuwMLtMZBMK1vrX2Gx6DHoCQOztbLKxnqd/ReP0DRZ1Js1yapXMBUXTReXuKmo3L59e0mHNRr4hPGYaqlFriQu3sePn9NsrBMmI5KLMumkShg5nLdFRKG5niPx94hDuNQSUdzHj17gmwGthhDUuTMgcIoshiofviMgeXc+f0E+n2Fto8DZoXienhkRe6v0RiLSe+/eEVqScmlXx3HFxds+yXLSeUkmD0kqDnqxlKPfP+Qvfiwb1W6bVKs7HJ8e/t/svcezZMmV5ve7Mu6NG1o9rVNnVWVJVAENFNBodE8LTjfbyCGtzWjGFc34R9CMO5rxj6BY0DikcYYzPc0eNlqgUQAKVSiUSC2e1i+0jrhxNRfuL8DF9G4WWKTvMjMy4l7348ePn/Od76M5FNA2L+hTyr/Ly5e7c9jK5uYW/b4/h/IoRJweDEjnA4YDASHxXJ1iweLFs6+wHLF+pXjCq4MzchlxAasurWKoBeKkRbEsqqGv9k5IlAFxqoiJmJc33i4zi1U0KZp4el6n4KxyY7NEsSCCcXea0G5ugDFgFopsSKPepVmPWNsSzx0zI50tU3J0sjvisHr5tIWuDzk9GnLjpnDw2zurPHz8BUVZFXvj3Ts0LhqMpzPOz4Wdx7GKO/ZoeFcYiXAi52cNDGvGRBZVup0+2WKAmqioilyH0yaTXp+llWWaV5IsxnVIpkOyWWHDmr3A/vEJqYrFRFYiXp720FI+kWcQRyLwLuYW6VwkBBPh8G1niutG0AErJZIix0cXOFmFDz96h15T0l+PsySxxlhS+TqZPLt7bXxvOA+EKwt5rLTJoYSs5gsxdsbgq0c/5dkjkUVd3cihJgqQp1ASDrDXneLoC1hZyarYCVH1DhfHF5Qq4tLiumIdX704nZPOLNZWqJQXOJXJbT/osbi8xNnZGF1eGJz0Aip9FGxMybAX6x6hH8wp5L0oppjSODrepeSI35v6HqNByNryW6RNYVOtUZ/xdEYgaXQ1xSGKRywvZefEDePBjMk45vTkgs0tyUCV3uDNdy+x/qWkp41mqEaA71tzsppPf/YEK7vK1vYNTONaON1mMopR5CU0iqfoqk0Yunz9mWDcWlq/zac/eYbyYZ7ZQKxVNI64bD7n5pbIQH/w4U1+8MM8i5V1dvfEwX/V6FAqrbD79IjzU5F9Pbyc0W0POdgTyR3TyNJtjQjCGENerux0hkATMMFrMXVFTVCUEE020KuKCagkoTen1lZVhSRKCGJlHsQnCSiJxjQW/lTXdTLZPPAbQglFUWTVTEWVJBcJEaPBxRxGfnAY4/shJCqODD4MRVRfykVJbFSqkM5n2VhZwJaU8alUCgKPmRvgTkWAMpl6DEdT6m0xl2HoEyUxacOZX+4cx8ZOm2h6jKqJ9dMNBV1L5jBvVdeIASVJfgPliyMMS6fg5ClKgp4kDuW/S+hnGOK6LrOZz0gG9b3jNkmSCMFl7ZqGXFTXrimPHcdhbXkFXTfmsMBEzrXneVzTBk1nU5RRf+6XwzgmJYO46wqW4zg4dpr0QgndkOeFqmPo9m+e03eZTHtEocaoKwLMlKPj+RGJqxBL0eBoEmIUwZesalpikdZT2FaaVlskXO69/S7/3X//P/Do6VeMXGELw86Y6aTPVV8ETYfHDYgVvPATkZ0E0k6WlJOjUDBJ2WJedNNANwoUKyKALlUrzGYr3Ll3n2ZdfPdKLU/pD2cM+yPGnvj+yJ/QaUwIIjEX9+7fZ+SP0RUT15VJJy0kY6SJ9YBHn4jM/1J+ATQLDCkifMOgP22zsHab6zHqHrK5skEmK3z3+cEBS7UNTKPGUF70Yy+gcd7Hzo6YSPIPf5zlrXc+4NVzkUyut9rYRoCj9qjl3gRg3LnAzvs06ofcuinkYByrx+pGhuN9YQf/7V/8KVGzTnc6IpHQ4fbgCtX2SbyErx/9FIDq2hLdVoHioljz1dUap+ceerZKXsZFhhEzGHbI5SfoUjz+6PgpevlDthaEP59557Q6NguLKyATkiOvgZ02KRTW6fYG8nMq+eIiGUcyUtYsVpfuEXIAkVg/z3OxU2W63jm7z48B+P0/2MSwevSGoiqmGT6j6T7LtTfmsNVsNk+jcUUcx2QkwVroJdhFh1u3RbKz2DZxpwP29vbYXBZosJ0bt1j4s032D58wc8X+mwYx036PiiPmvFLJ0GwXCEOfyUSc9zs7G+wdf81l/WCepAzCGZl0jWZLJFxrtTUyMwVTzXIlBYnjKCCih5XKcXwoIXixh6qYpHMiJslYWZaKd/mdD+7wf/3rvxLrEBg0uMTzylxKsiolnBLrGrtTiRLZu+A//+hbuG6dXVdA1JeWLY6Ofsm4J/Z6rjglSek0W2cYuiQa6emUSgXaox5HUlC6UnqfzVp2Lu9xdHhCtXCTvZPnzPpir9+7fZeXu4/YrN7GkRfoi6MBm3dKXJ2L7x4rMboJO6srNMaS+MMo0T5o0xpPeGNBxHREFsF4xtKSrHiPTfQwIYhdDg9EtalfHxInFXZ3X5HIZJEbWCwULTSJMjptn1AtpCmlDdyhFE6vqyxWV3H9IyYjYeuW/Zv9+h8arwktXo/X4/V4PV6P1+P1eD1ej9fj9Xg9/iOM34rKFYmKlRY32byWYzKGy3qLQl5k2LJmHjXUuGr3CNVjAEyrSm0xz+mRuCW/cf8B/iTFF1/9I9/7HdFboKkxhgnvf/tbPP1GZCxevNjFNCPaHXFT73SvsDMB7sihkBbVs0rN4ezsgBs7m4zkDdsdWlRKZUY9KVRbdlit3ufstImXiKpGnAxx8mlaFyElmWFPWTPanSHVirhNz2ZNJq7D9vYNen2RFTs+3UWhTyq1xqAn/i7UfIZTV9AjA461zq0bS+zt7eGHIoNw99a3OTo6oNOOycrSvdsf4NgVCgXZg6UuofIcJUnmpAzvvH2DRIsZDEQ2yLA8hhjcu32X3kBkoIfDOm9vvYs3aZEkokSyVFvl88++5tZd8W47Ww84PDhl5/YqWiyyvb2Wx2S8g57uE0nRyxQbxMk+fpCW3z2l0ThGM0ISOZ/d1pR88XOiaIg3ltBEzWIwuCKTSUtDGZPJZHCsLGubwjY+/flzrmaHuF4XOyvKmPVWnfOTKcWaWJdIneC5O1xdzuh0RBY+ky8wmQ5ZKd3mqiGyGpGXJ5UGNxJZne2Nt6kWN0hmPkgB5FQxxu8mlMwtSkWpdxCUyGVjZr7Iqlxc9ilXDCrGGkPZdJ7NOiyurZIz7tFsiKzt6VmfXM6h3RZ/PtxrUyrkOT8dUl0R89KfDug2VRK6GKrIsKdTKv5kSior6W938swGMbo9Ii21d5wZhEGO89aAUlk0nZrKhFFnRG1NZMAbrTbBTMeJ01xI+YK3Nx4w8Pqsri/x+KGAiAWDFKbpky5JMgJPI+PkCIMRkSQ2GA7HjPp5lHhIck1HG5oM+jNyefHcvSHk8wq6rpOTVMz1Vh3fC/FDkXm18hEZM8NgMGB9R3wmCmISstjpiGfPRUYvm1vEsHz8UKxxqEzodiJu3rtFuy2qYEEgsoHVchEvEOsXByUaV2dkZHN+NnuTvVenZNJpLiWH863KW2TTAaPpBMsUWcYZESYO0fW7BTq2btNzh6Srwl51p8FsPKBYKNMbCdtwgwF+5BLJfgvP81HUPvcfVIilmHSpXGBpaYdmfcIn/yia3L+wHvLR926xvC6ya+2rCuhNVJV5f9qzJ03+5M8/IJtP0zwX3z9xpzh5kygSe0bRRI+N7/vcuSlkJVKpCfe3H2BqJRZvCHs5Pb+iurFJoyEyoV//skk2bTG4vKS2LDKDy0ur1JtnfPjtGr/zbVEVbod9SEzKeWFjD79+SRhoDHo+p8diDi7O2wSxzsH+KR25NqqWIiGaV2hsO4Wm6FiWhSKrWZouqkCKmoCEJieJQhQlKOq1rlZIFHlAjKpd6w2qxGGCrpt4skKqoKEaGSypd2SoGqoiBOyve2Hi2GM2crnsC1s53X2F53lEJPPeCcMwsDIL5HIZSnLd8+UiCyulOZxZM1IkCsST2ZzkYjx16Q3GeF4wt/XrapuuXcPMDEzTxLJNHFkZskwNxVBQE1BikWFXEkCJ59TzKBGWbZB2UvNqoYqA6qmqOte1CsMQz/PpD8T7tTt1fE98RxRf98wJSJKua+TyEoJjWagZG8eSsgSK6DvyvNm8mjWaTRiMh0RXTRJZ89I1G83w59WtSmkNO5PCylrkcsJ/J2gEcYBqJXzwkYBZ/fVf/xV+5JKoUoYkmQIp3NmUcl7Y6z/+3d+yuXaL7nCAnRHfpaNh5VRMKZeSXXLQjYTYi0Bq6HizCd6wx8nBiNCTFPWJhxeAKYlbFMUjRRHf9HFkBaOayeKmHaqFNDNN9Mwq/hjlvkIkIc7T0ZAEk5gJjiN7WtIhq9VF9o72uJDUzZryJt1Rg5msuJkZlSJp+hcSigCslVZJp3xcSTSkeBati0s2b9RotkT1ruDU2Nks8fnnL+c0/XGmi5XYtCcCOqz3DdR4xPryXS4boq8mny3Q742opheYdkV1oNdqsphZ5DtviX28ePt9BoUG31v/iETCIH/19BdsrK/wxU+7NGzhW9NOjvPJPht5ETs1L1uUCnm8sMCwK/Z/plhhMO6BusKG7JnTNZ9UOs+TXQFRX6guUalm6fSvyMvWi0brlJXVCkqySDYvKxb+IkmszmF63jQmikY8e/WQ+zdFn2unc0Ucd4mjhFRWzOnx+VOKxTxHxyLuU5WIjF0imE2I5L5CMXFnbTIZk70DsVaVSgHLsji/FHPX703QlCwXF39DhIA//M3fjkkCm9WNNGvrYt0tRSOl2CiqoPt/+XhIsVbl1dEvCWUcdHU5Y3mlxuHeS9bWNsVcKRUGozPGEzHnQTgmZVTpzg5Iy17Gfr+PaSVsLd+i2ZDSOUGXSqlMty/8TSmt0222yDsVqo5se3ATckv3MKMhi44gvhiPTFS0eS9s536Nx+kU/uA2M4SfaDw5JG+tsnNbwMrP90+IC2OSMCGUtORhssfpSY5cPk2uIj5XW87TGe3i9sQeSikVlLiHN5yxvih6GY9Ov6FWWSFva0wlVNCb1dl/NsaVfZm56iKPvtgnmykwkLaY+Fe0Axc7bdCSbRaGZ6AYA06PpbZZoDNUxihWQkZqkhlOxHR2SSOYEoyEz8tZq0zJcNIWd4mdpW38+oiSvsbpgbDh2mKGTMHgybMrdnYkHNT+jSTQf2j8VlyuzJRJuyEeVDOq5ByFwE0RuqIkd9w/Z+oNSGYG731LlLH7nRndy11mXVnaG05xRyr3b73P6b7AQn747fe4vGxy9GpEXqpU2+ksk5GG6YiAoVzL4o41ploX25G6LHWfXKZIEHZYkMK3YVhhf/+KYlkcOHpS4bJ7RGs4xpDCpqbh0PUaGIEAACAASURBVG43MW197uyavQtWb9hMh+I5M45KqzWmr5ooutjUhfxdnGxEtzPBLl5vhCl6CHZGXu5mHRx7jbt3HjAat+W7pAh9g50b69y+IWCPu68OefDgTdSUeJeHT16ypG6ScTQSeRAGYYBpt7jaFYZjpSFtpeh1L1laFoFbdSVHqJ3TvIhZXhUYVs+d8vHH79OQulCnV6fYZZiOQqJAGLSXDFi/ncY01zk7FWXs3vgVZpLFkAKX/f4F5WqGKFRRZTNiGJio+gzDzPDtD34EwL/8P/41GztpIkOsp2lvsGCuMQ1P6I7F+y1tVFkpVvj5r59wLB3izlt3MdIa9Ya4KG7cdLga1Mms2rx3R8Ae958e0RpecXnxjMgT0cfGzjJPX12xuigCx4zhcHGwR3NwgiObLbVYRbeOSWdKJJqACvSn56Ts3JwBZ7GyjTeLOT1uUV4UxCLjgc9i8Qa10jITKdi5s75Grz/EqEl4SuwzGHo4uRR9SZahp20SdYQ3SmHLICXSdQICkEKc9Xqd8TDh3v0dNBmolRdyuH6DxJ/iToXN9mc+ZrrHgmTqsm2bMJpwVu9gGeKdC9kCmuaybLiYS5KEZTyhHmcYNIS9GnZCSvUYdsYoiwJGli+UGLcs6s0RqbTsKSFEwSBKxD5OYg9vrBPGCu2BuNBWiktY2YTOWFxoR9MUx+fHJMqUvC2+ezgNiEIPXS9y+464IEynMyHgLSPHXH6Jna01grGOqQtbXF0TB1257Mxhs5cXI9JmiUpxE4BHjz8ll83hzzTKBcnM58eYKR/Vi4lU8ex6ohORcM0NEM5iFNVE17JzIdxEVxj3E6IkR68nDgZ/OmbsuYwky2HgQTAbMZ4OQRF7rVzJM/NGVBc1CqfiGb74bI+7b21y5y1hY/9+7yXVWgE/mJLIJk8VhRePOnzv4xuc7grbG40GpDMbMiAFNVEJoimOU2QSSwaq2RXvfu87vHzaYTYUe7k7O2LJ+R4rC8JvLRQz+KHL3tE5uuwfO32xT6Vq0G0FRFLM3dUGNLtXrK9K6Kka8db79zCMgNlEMLKqiokfR7RbE6yU+K5PPvmEdnMCsfjzwe4Zqgq9/oSJ7NkbDSeYegrdsOc9SClLQ9MSTFP4RdtIoSgKs9kMebciISBBiAZrUgSaJIUamyjab9gKE5iL7wKomoaqmKQkpDIIItJaCj+YEkfXpAwR3uCUq4nHxaHsa4t0PGI02bOXcyqkciaWnaFcFv48U8xSLjvYmYKEQAoiijCA6eyaoGjGcDDm/KxNJMmOFEX0NymKQtqS/Vu2hW3bOJJQI52y0UgwdZ3omglDtzFSHlE4JUquL50aKd3EkeLOJCqmps8hunCthRUzGY2ZyB6vdqOP54dCyFgOVRVsgtd9Z45jY1kprHQWJdHk+6moanouiNztNaAn+uQUKXKvmiliJSCXc+bweiNtkiQKioQqK7pBEE0wDBVN9o9d1btMp1PGkz5ndeFL4qmO63nMZH8jcYJpWqK3TUIhLdvGyqvYjk1KQj3tdAnD0OZ9LmEYEwQRxkxh1JOwoKBPHE8Yj/vcvCWCKyO7hBdOUCUAqFap4kqfbEobIgn4yU9+yr/7t3/JR28LZrwbtxb5+qtnqJHwy4aSpt7psZxrzed3/d7HNE4fY0o9ueJWjqOnHZxRne1Fsa+GrUt+9bMDgiQilRGf0/QMvSuDTFGc2TMzoqDWGHVcqmUBY3KNDie9GUZmhuGIOV9ZWOfTh19xFUqY9bOHREOo5b7B8cQF7HD/GfsbH6IvReiI89DwLd6+9TaNpvCB5dIaz57s88F33iLhWjNzRDqtoqsqliKe8/b6HeIwZjAQv//t9/4T9PSAw6MT0hnhA8vlMsFEwXJ81stCS/DZy1+DmcORfuTps19w5+5b/Mkf/Df89Kd/C0Cp7OCHA8LYYElC/s8vd5n4q5RkEmjof8M0VumfDFgoiDN61G5TLuYI/RHbmxKeXSpyePiIDfn7+ew5drZKs5fj+ELMy+b9DRbXbOoXTR4+FwlJM5UhVgNiVfjcybjN1q3fxUhO8BRx4Zr5A/r7JZZqa1SlEH2zPmU0bKIokiVaCYn8Ga5nsSoT9GuVHLc3P+b5k4eYCD9/ctVkZXmbtZJY98O9DgsLWZ4eHhLLWDcOdEq5Erv7Z/Qk89/m6ionBxcUKmLd+60Lhv2IJAkpl8QlybbLxGqK6UDs9Tg1odMySGc1saeBIIyoFgsYtk7OEXOsGgGDrsW6TIQP+iOOT3ZJJnA6/VK8y723CHsD2u0GmiL29rh3hZpfIZMS7zJsdkhlIFTGWJLASg8d8hkfd6yxlpUFiKrFZNrn5TfiXcqrGdxBF9UtMfFkz7y1hDfxMXUdR/a6jkcDnJHJj74j1jidrfH00f9Lszlg8a6w82y6jB9E3Nh5l7ffEfv4i5+J/rp/avxWXK68WcRVXbz8jbs2mWyB3mDI+ZkUMU17qGhs39qZM9cYahbHrmBvCWeWxCbd/gVWNo0nnevjx8+olBe4uHrF+sqm+K2pQbWaRlfFoffk5UP++A/+lF5nxOMnQuDuzdvfJp2xefjka3RDGEalkGZtdR3Zn8x47DMYDrFMh9FE9mWtrDDom+QKylyYzh0bHAxcBgMRxPS7MW8/eIdQaxCJfUEcQ6GwTJJMyebFYlqpBUq1iEcPxUasVldIYoPziyfEkTAw21zgz/7s93n0+Bu+fCgW+s69FZrtCT33FwAsLC5w8OKYpGbxxt2PAPjx3/97VjfzbEs6+nrziM1tm0EzQ/1KihGmC6xtO0wmI4ZTsTarKxV67RZLUp09n9ng8nLA3tE+lZJwiGEYknYMOr0GhinW4eatNM8eNbhxT6qJO8toRkCr1USXwqbL6xlS6iaXzaeMfZHNipUuhWKVhGuxzBKG7TIcnPHJT4UdLG/cRIv7eGGbgqS/bba6xH4GLRBOpf5ygpYPWd9apF0XNnW0f8Tb730X3+sRTsV8ttuXbN3RGTWv2bssgjhkefkmVk5sWN/1WFv8LtnsIr/89HMA1jdvcXp6QsYSvVTFYpF6a4iBwlVDONve1RhFMThsfMV1D0LsNRl0XOTrEZQ0DCeg3W7P+5TSOY1ZEGCkI0iEQxyNrkibeUYdyRCn6jiWzu7TExYXRdb27t1toshg7NaJZjKYS1l4bpqTK2GLgWtiGnlIonmPzuHhMRk7zZdXV0w9cQn0kzyzWMgPAKTSWS5PGmRzFUHoAgy7JkurOuncEodnIqPn+TMKRRtP0vvqeoWJ10dVdQxTOLYoMhhNPDxX/H7dbeB6U2wzjSVJS4ZJHdvSWF9b5tlL0Ws3mkxZWl6cZ8WnozFXV1eYhsPSirBP1RDzfHbW4O49kVnVVhR63SmRZP28cesGidrj6GhCRjZw1mo62UOVRjcgbYgLWpxE6EaCKvsBppMRk7HHjRtbLCwsyPd1OT0c0+3OSGJx2VGjGWPPw/OF/bSbDbZ2svzoD7d59ljstW7/nO9+9Gd88/Wv+NZ3NgGoLmRxUnn+6I9EMulnP94nCEKIU/MLgma4PH18TBT9gI0tMVfptEUcxySI4DUMQ2zbZtSdMJkKP1UoG+zuviBXUZDkeWTTawx6HpORmLNiqc9oGkPiMWiKZ885Bbq9fSx9jX4oeiV1JU8QzWjIHoGVlTV+9fnX2Gl9TnFer9fRTZ1bN94llBWS+w8qpLMlVuRa5dK/R/2qw3g8ZfdYBMvd7pTZMIfb8Wk1xHwenJwy8Qy6dVnFYUZCgGVrkIjD37Is7DQEwQzTkBU8VSdOvHmlTMchiAMgQIll0EAIygwvFBsyjnzCSATbqnpNEAK6rRMlNlZOSnfoKj4xcz1mhvQmEfrwnPqlrJgECWYqTRwJIiKxVmkyaYdqVaxdNp9jqVRgY3kBW9KQx4rKzPOYejNGA+GHZ35Iq9nndCoz7hGoSoKVTmFI1j/DsrDTKpm0TSZ9XfFOoWohiir70JKEKELQvF8rG6Oi6zqpfA5H3lYrmoqqBPMqVZIosl9Nw5PEKX4QMOr08U9OkYAFYiVG0VQU2YifzmTJF3Ok7AxFWXVP1BhVcwjDhEJBUq+ns7iuP2d/jOMYTTVIkmjes9fvTRhNhqwsr1FD0rrrBq43I5T0/lEY4k19xsMh05kkmLq6JDhOoekgc30YKYuU6cy/O52xsdMpirk0zsq1oHyEZljESUIcy0RJFKLEBoqkHHe9GaZuYJgapyfChv/q3/4bzo6OScIQpyx6ZiPDx9ArICtn4bCLFw84H3vXi8D50a9Zqr5DT1YZj3tjFtaq+G6XjlqT6x6Szpd559Z77F6Ifspev8X3v/UDXogCDbOozWLGodeZomZEoH+4e8zaVoHzkzopU8RBg96EUlWncSjiDVNbJl3t8/nDX3J3XbDZ/uAv/gUX+7/m8aM+b729CYBuWbx80cApivXsj7qkMnBwsMelFK9Nm2XeePM2F+ft+f4bDsfYdvAbYePeJcOTLulMhoLsCVajNIZiEcQdXu6Kc9TOqkwjl6ms6P3ugz9BH7m8OmnhjcT8ebZKqXSfzuAUdyzWoVwo4eMylILP5dwqZ8c9yqVVVFX4Fj/qsrx2j5fPvsCbXCcEdaYDUBfExffqIqRcUFhfus3jp+IsMowVguklg6sxO+siyd2bXqLpWRRT+J/Dk5/w1ddF3n3nh5yci6Saao25qh8ShwZPHgqm2p2dHRYX3+fZS8FyWMreoj9osF26w1VdzOf3fuePyS68yb/49u/x9S/+bwBaV0MOGg3KlljPSiVHLlvm7OyMQlH2IJkjdM0miRzWq1Iwe3pOffiSquwf+09//8/wgzp///O/JEL2gXkQJSa2ZA90LxJUdYLnuTSlnM/CUo7B4IJ2K823P3xXzGcwwh2maDfF3HXaYwqZNXphnVjagaMukF0w8ZIJvY64rN6894Bmw8OXpBsLi2tcXjQxUwqxdC7lcpnJxCRyu5wdibXqPH3GnY1FLE0yA/aviLwUupOiLG2q645JpdMsVJdBldIdl4/IWw6fff4z8b6xTa9xQnX1jXn8v7d/ygdvf0S3+2sGHWFni+tyH/4T47ficoUCsXrdHOwyGA3pD7q880CUDhuXU7bfWsUwdR49kvAe65x+a8b6+iYAvj+ltpAnkzGZyY1x486GKB+rszkV43CYYNoKk4kw5o2tMg+fPGQwPkNLCaM/PD0gDEPy2eqcwrnTGTIetcnlhPE66RxK4nDVuJozj01G+2yu3yeJx3Nl9bSdZzabsLwksiN5x2AWTBl1JrxxVxjh/sFLvHDMLBjixOJCoBkzjk/3MVOSi9/vcdnoM532WVwSDv/hF+d4Xg/HTnMlKdTv3rpPoWjTaIqFnypwc/su27ct/IkIRLc3t7j3xj3Gcp4K5SGWnsNZ93Asselsc5mj3V1WlrL0h8KYNNXAdSesL4sMWJyEOMUORSPPUMKgarUyL18+YmGpgC3JP0Ivw8qGQTYtaVCHHYbDAWGQEMzExr+5cx8vGNKdzNjfPwbgnXfvMXGn2JZw3Jrpc3p2RHfUYm1DXKQG/RNm2RVu7uxwJeF1b93ZoLiyzdljcZH6/Jsfk1EiTl7FpOTt+I/+5C3a7RT5zCrWspiX43qXctFGk3StgT+ikLfoDvvorsjibG9U2Xt+RKd3SlZCd2Klz3vfvsfJM3Fp8Wcqg8mEYWtMcUVWAlYtmu1LdGdGVRKJDDtTHCeHFwv7CeIisZtjMhojUU+gFEkSmAUtrmkps/Y6mmaQSou90Gy6pJ0xuYJG/Ur8x37v12zerhAFBt2xgIgUjWX03AWGDHCXix8z6s24NIYMB8JJdrodHl/2+OCj72MVxDsnrkfj6jFvvfnHAJiOjz+dsVDd4vRKHHqTWYihlZh6fYiFU7bSEEdZdEkQYqdDes0Jll4iDkXA58djmo0hZaljZOhpdEWnkCmzXBXBiGOHKGGWXm/A6rJ4piCMGQ6H3LkrssjdzpDzi2NKFW1Ot32twTUYzXjxShyylmVi6Bkuz8XpPPRdwnhAyrRIyWrzWcvDLjpo+1NU7VqHKQQ1mjMKTsYhlmUTRxrNhvguM6XiZJbo9WdzcoXQm9IZdDEkg1m9XueHP7pNuzHgzk3RMHxw+JLJpINpmriTa3Y5k3AyYuO2uLB/9NENPvnHfUqVIr6sgqUslcP9K54+umBjW9hUGHlEYYwq7TyKPHw/RNM0VAryu6c8f17n1r08jiWCm9a0wcXF13z4wccAPPrqAt2csFja5uxCZBlX1tYJAxslN8KTGWdDcYgCk5MTcaFWVR1F1fG8gNqCYHGbTGwy+ZD942+oVMU65wpZvKlC4gs/1Rq59DoB+doatiPW6v2tJdyxwf231njySESL927+15wfDgk0sbbHh232XzVpNab4sgJ9enpKuz3FmyVzPbNc3kZBm7NNamYPK5VH1zJzVslIMSER1SkQhBNJLN5pJhkhARJ8FEUFRVbvxjGqoZLIbHMYuNiWhmmU5lXGoqkQxb6gqQ9F4i3ypnRGDc7PBcvpNYNmoipYlgjKUqZFsVilWCxSqEhK45yOvWxjWdcaUxHeLGA8neDJi8VoMKA7Cbh0O/jXMbsGdlrDdqSulpUhnVZIWcZvoNdxCIQoqAQzMZ9+FBFgzm06CCRUk5BMVuzjfErDNHVM3SJOxA9GoUIURPO5m4xdxv0hg26LS1+Tc6WhaApROGNRXjKtVBbPb0q4J3NK/zjR5lTzcRzy1Vdf8OY779HqCN9lJjpoKtc4SFUVWmSqqVKVGf0Ve5m06WAYNhrCV7rTCaE/nSdEPXdEo93m64srFmSz/Fvv3iWaRqKSKJM2um4RKrN5oJjLZRj1B/z9j/+eT3769+L9PJ9SoYg7mbJYFXvU93qoms32HZG4eXXwS27f/RHt9t7cxqZJj+P2E5Yrgg1uOPDRdZVklkZBMp1myqwuViB2cUfChnPpMoPR5Rw6XMkt0W8cUKjdYeyLmMfJWsSJy8ryJnsvxd+tLW9x4/YSrx4J0g0j7bJor7H0B+v0m+Ly8c3nX2MWDZR8mxe74qxVNYc3332bzyQt+cbGBsuri5ycHLGyIuCT5UKZQX+KlcpgWyJYPT+rc/feJi2ZJCmUVRIvj1EI+NUvBRnH7btr7B+ckclacwkcXS2wsFIkEwnyg5VyyCef/Jj+SOXWexJxkrlJr3eBEuukpZzI+tptjrs9JlI79fjFkGqtTDaTUJf6m9XqTc7qpwRRjoUFEXsmzj7lxTyy4IZq9ZlOAy4OG6wubQq70xP02Tp/+IMVvnkiktru5ArNKNK6EvN0/+63mHhDXux+Rm8ifGW1eIsHd77Dl19+ydameJ9ysYKih2TnrRAK+ewSmYUQXZ7HR3u7/PIf/oZyYYs7D4R93Hr/TY6uXs3nKYnSxPGYweiCQlEkFoO4wXBYolpZQJGkM5pvs7G0jNxqrG9u8NWjXXQW2KgKG7pKTkjpOUZj8dxZu8R42qTba4KsZke+DURU8hsEMxH7Tr1jFmopUrrYe5GVp1jRiGPYkXG7qlzQbk9RrRqmJRIgj54/JmMssCrnJNFmLC6s0Oufsr4sbCpIjjk57uCkU9gS4r9TuEXntM5l4zoxXURRfUIvYKEgYpJCeonZNOTi8jkbNZG43Fg0UZwWL5+L/b+1tcm7qwt89P3/gtAQz/5/7v2PPP7yE6Z+wJklzieJtvwnx2tCi9fj9Xg9Xo/X4/V4PV6P1+P1eD1ej/8I47eichUEAR9/LPps9g726Xba3Nr6nXnDqe/12Nt/iKmuzNXfVT9gfX1d4PaBXMnD1DYZ91Q0XVRR1lZu8Vf/7m+oLqmkFJHVWLhdxDYXOLkQJdZmp0Uma9HqgpUW/y8MhxQzBYbDgFxGVpL0EHSXRJYqA39CHKksLpTR5O2WRGPkNginbSxT/F2lXKPTHjMaiexatphmOOpgpbZpt2RWs7jC2XFDaJ7YIrP75a+/YWnNYXFJ3MpTZobziyNsszqn23558HMGQczLL01++IcC53rvzjoPv94jnIl50XIGlYU87rhCiMj23r97g2bzct5Qv1Z7k8GoycJSjrbsqxm7Z2xvrNOZvmJjS/R91Ot1MvmEl/ufARAFE9bWa1y1G0RI4eZTl1Kphuv2UCIxn3E4ZG1tjVimHRUs7uz8iHbnjGpVzO/pSYN0bsLm1gqTkVh30+lQskokEptupRXsTIpV520USS+c0yeYSh+rPKMrm/pbJx6PH/0dNUldfPPeJrcXb4KhsLkqhAafv/wxw1EHx75LGIrf6/c9Uqk8PZkJHU3qOC602pC9L6ooP/6HH7NcW0VJPMplkdmxcxle7D4nlJlPO5th+9YKv754hSuz6SlCFhczoBhoifjcwkKB8cglY8tKVl9hFl2hp8GROOnBaIaiafh+MhcRHXs+4/ExKfOasnaGZtskWomzc5FlXFtf5Op0zGAY4SWq/P4WpcISqibW5f7qAsPOQ0jq3FgQa7xVdnC+/yGnlwNmYzEPhUyEtfYGTSmDkPYCUHJM3YhYEicUiiaWXmEaXGHJChBJirXtEpcdkS189uqI2+s7JIEtYFxAHMBibQlVk/aKT7VaZKG8wrAvCUnikPWVEi93X10jKsnlCqxv3OXxYwGfUNHIZdMEswGB5A4OEqmbkV2mUhFVm1bnkMkkwUwJ16crJqOBhpky6A9EdiuVK2NnXVQtBln50FSNwJuR0kW2O2U6KJrL4eEl3kzY9db2bVotHc3Q5n01gT9j7A5xW6Jyns1ZoLVRwvvUr8R8/vM/+lN+9cXn1ColnJyUiBheUCyUGbXFd9+5X+Inf68QRGOQEgeWlWeQdHn01RlvvSeqYJatoyQJYSgx+1pC5AdEcThvWm40L9nZuctk3OTeTVH5Sxll3OefM5PNwTt3VrmqP2bihdy8K5ufxwPabcgXF8hIYWFT1wjDmPt3BQ69XKpgGDZXjTPOLiX5SHYVTZ8wnnaIpDTIxvoO3qxDX2rttZodlpYz1M+O5zBWP+6Tyef56pcvOTkQz6WELyiWapQk1EW1znj/228x7qeoLUvtm90DfDeHH58z7AhbfLX7nKuLIeO+1ChpxfT7XSZjD9uS0C+ljq6bpKU2jq6mSJKEJAlxbFGhgYjI1UCfoKrX8LMIRQ3n8EJdKRL6I3yzOxcDnvkqiaqhqQaK3I+apqFbCnlH9hbJylASxajqNXnMkPpZn7PDYH5ah7GKoqbmMghpO0O5XCWdTZOVJBRLqzVMM4WTys97oAQVepe+rFL77ohGV1Q2o7nunIpppbDtFHZaUunbKdJmhJkStq9ZOqaVJgoC/DmZwwxVhSAeoMj+tCCYkIQBKV08p2k4lAqgqw6kxF5TtRhvBpGfw06JeVlaXqT5sI2Zk3ICUUSMgqr8RjxaUSMm0xGu62Iast8oFTGbeai63LO6jucGhH6AOw7n32VqKqomILQAhXIJK52mJhXmFU1ANqfeTIhWA5o6xZvFqKpOrAhbCAMNJ5cipYjn/PKLX/Pjv/5/GPQ6c6F2004RhiGWZdFpHAvbawbkK0PO5P73Y4ez/QNW5FkI0BpcsFGr0fOE78xmApLYolzbInSF+PHuy+ekuhkm/YjsgvAvjrPOi+dPuJAVoXvvrjHoRiTKAF1KDngTk62VBzi5aK55NvMGzNwaPYk6qJJi4gUMWgeMr8Q8lbdszo4e0gsD8orcf0nMyVGT25LSvdms48+m2FkFXwoLu+MJtu2wuJDm+ExUoH/w8cecnx8zllWclFUmMUccHvZY2xDVmEFvhqqFLK+UOTgQnyuVs5yftehLSYVIX2D7gxs02iPUtIjx9HTCQrrI7m6DalnoFLXqDQadKRsrohVi++0dXux/RswIQ9sEYGOpzIsnuxSrZYa++L3puE8hu8RgIOK+6TBgGvcwDWcOSQ/CMT/76lMO994gLXXJuu2ExdUshZJAGVw1umys79DsfIUjoZhKFBN5FqVMDVUebP3OgPPmEwxdvEsUT7DMEoqbolwS+/HNjVuM3ymxd37CoC4qpGtqiOOE1DXxe71WxNpGgdXVCuWS2H+aUabVuiQIAqrL4hmCTsKDe9/j5ETY5l/97f/Ck+dfUna2cAdiH9nmEp4XcPxMzPk7b/2Ae/fe4Jef/wMqwqZWlmqcXrQZeLt8/mtB/rFYu0G5lOFcxtrlSoHBYIyja+i6sNerszoZZwUsj2Ak3q9QKJFJQjzZV285AcNBnVJmhdlMxAROPs0b98t40wBdF5WqvWcvyRhVvvt9cRblcyVeHj5FSRlMJI29lffYKBdp9ftcnYrn2lhbpusOMBXhz5snD4nUBeKvP6E1FVXNje0ik9mM55/9isQWz24Nxb74p8ZvxeUqZav8+gvRZ1Ne9ilktnn2aJ+dW5LRy9Gx0iVm7phCUTiNIMyhKAma1LTSlSyg0x/tc+um2FC7r07IZxeZjhuEinCu2YzG6dk+l01Rgk+VcgwnTSrlZUxZKvX9FIapERoRE00s3Mjtk8mkiXzxmcGsz/273+L0fI8oFp8xDINSJYtprtPvCshWp3vJyto6k4kw8P6gQdraYmmhxv6BKH+XylkSNaBS2abbEs5ta2cR054yHYsFrw/qFMtlzHLA8ZGAed1/UGL7Zonf+/42/bZw8K/2vmEy1nClqGAxdw/PHzMc72OkxQautxMmbhdLFxey3qBOt9/AcnbQzeuN6NHzT/DNMftn4l0G3Tqbyys82PkuANXcMqEf8nx3D3SxEV4dvkQpD1A0B10yrVn5mP36PmV5iXC9C6aTJZYXV3i+KyBHhXJC4NawrDKNoTD6SSNkeS3H0ycCurC4WCBlB6TTJn0p+OiYDp5qU8plUNckfDHqc2+jzIYrnN9O7QGfdtpMZi1+9StxCdxae4edWzWePn2OJgkQ0CMWiuukZJPo3qFLpz1EN1J0emJz5goVekMPR/W5Sa0OOAAAIABJREFUtSPK9JetAZquYKfF4RypHaazAb//hx/y1a6AWUSjHFGisbmxQ4zUoumeky6YHO5LmN5ykeb5Y/KFIte4QNdvoCk63tRhYl0LU7fZ3lkhJZt6l5cN1Ejj4qQ/hyr1Rx6NsynFhRyq1NHx4z4KBWYj8eevvzxiebVGRouot6+kvdyGcJ3J+KesysZp03Y5ObtiGogAN5no1FtjGr09bBl0Rp5OtjijfVinlBWXzpkb8stPn7O4JX5veaWMauiMRi2MUCQfMuksYTyltiACXCVU2djYYDryabfFnKcdg6dPn3Pz9g1aLRGQdDt9wkAhL5MfV/ULgiBke32VobwkZa0a0MaxHDot8V0pK4uvKoxkH1gqBj2aUe+CYcqL8OwCSy8Sp0w0Ca+LCTFVB1UVfmQy9tFVk9t3tqnXpcBsktDvx6TzHonUQPOCiERR6LXEfvzg/SVyhSyuN+Xg8LGYp8mE2/cWODocz3ugarVt3OnVXHvjn//57/G//a+PME2dmdQDcqcqacfg058/5M//QgQkuq6QROG8yT4KQsyUjabFqIq4uD19dMFHH1apLZn87BMhCL6xfpPawgpXjbq0zW9474N3SWINTeLOVTVLNndMzrHIZzYB6Ay7vHHvAYsVYftX5y20TEIuU+KyLqDKS3e26Y3a5LNLvPeO8B2/+PQfKRZyXEkR4+XVCoE/xUl5vP/9fwbA4+e/xnVjvvX+d9laFkGDU1J4tvuQszOxLltbb/Ly+UucbMTBZ+Lg3dm+R5xzUVjlzl3x7KF+xH/2X709h94dHTSpVdc4P2vw6oV4zn67jKIo7L8Q6zkZdfC9mMDX52QHUagT4VMoWPiB7N/STWEXUr9KMWcYakzsp67bKYmUmDhMSIwE15vK/6eiqgop/5rgQiFWwA8D1P+fYK+ZtbA1B0PC48I4IFESkkQK3Lptzk+PmbkByTWpR2RhGAa2k8e2xbrnCjmKlTyZnAh6i/ky2Dq2nZrDF33fF4LFU4+p7AHqtDqEs1j2qIlzTlVVstnsvE/JydjoukYmlcZ2hO/KZ0ugJnOokmArDPCUIaHUj4kiBYUAVYVEXlpypRKJ6qEgzotEcVHRicIIXZJ6OI7DxcUZK+s7NFti3VPYKIYOkmVQT5mYpommKaTkhctybFJ2CttKzYk8JrOA4cjjxYs9aVMbWJaJbqbm0PkAj1ngC1012TOnxgrH+yf8/Cc/BeD5k6coxOTzWVQJ2QzDEFSFKIlB9plub36LT/72Lzm5EL9378Fdxq09Tge/YQv87ht/ypdf/xzbEdCoWrVMY7TP8VmdjTXhK41KFpUst9cdmk3hm7vdIbXKNouLwsbOL04YdNtctD0qBQHnr5UKuGOXbmtCJi385/HgEc32Ipsror/KMRTi8ZSVlTfI3xOfOWy9Ytt6wLA3xpZwUMUIuKhfEiRZaXg2b7+7w8GRQhz/BvIXzFTiOEZBzOfB3jmhZ1KSLMf+GGrrVWYzUExh11989iX/7Pd+yMV5k4vzupxPn/XiFotbYg5i3eWLL464ubOJJfdfMh1QWcjzxAvIrIqkqJmEjIZtFvNi7cxUQqFWJfGHlDdFHPTe7VWS1jmeqbJ7JYLqpdoKh8/3WVkT0LpSyaayVObFi/M5gdV47HPnvS069TOmA3G2Z4p5Ru4AJSXhjEbC5fkxKdvGlXqRS8UsF5fHrG+s4kgB8sPdIYZhcXvnA7F+9YeEkUerO2LSFv7m5GgXJeozmA5YcoStR3k4nVRZXhRrpVRc/JlGSq8ymUjtrZGOrscYWoWy7E+fjC84OW9TlMnHq8se93bukygxT/bEJf53P/4+3U4fdyyFjfunKOpdvvXBdzg9FRcu3zXZWFrCcw1eNkR8+l/+ye/zcv9n1GS/09XJGe+/d5/dkz1OT4/Fek5ndFpTOu6UBckO7lQ0gt6UgRRWLpYMKvk8k8mIekMkTGuLWSZ9l8VaibzUPFuvjShvbvLySLQqpFM1fHWGNhywJvui3bTJ4fEuKys7lJeEvYz6Z5hUuL0mbMPWfE7OfA5PDkhlrrVMYfPmOvfefJ9QEu3o0m//U+O34nJFopPJCiNM6+todoRtK6R1EbxiXRD4U9LphKwiMJfZXJlOewSRDLLtHMPBmGppB00Rwdxl60t0o4rqLXHVEJvF86foVoJpCCO0Q53u8Jz3vnUbXzJgPXp+yKTnks3Z7H0j+mhW8stkMkU0VRjA9s5N+sMRUy9Gkc681amzdatCr6NwciZ+b2W1zM9//hU3b4gM8WRikjcjzi4a+JEULZ70idQZJ8cp8jlRQUi0Hv/z//Sv+JM//SEAISq93gwFnVJJ9GEkhHRaAWPzkmJJ9HT1Bzn0dERZE3PnzSKG/RSxeYAtGeGUTIWp5xNK7LY7cXGKNpHi4Uvw7dX5GX4YUV60sGUvWmDD2WmfN1aEwz8/mdHqvKSwkOVv/0401Da7pyyup6hUKii6uMwZU5vxcIom2VmWFm7jzoYYsyGmJL3o9wKWqkUmbpPaomSXGWU4vzhhFonnbDQUCkWH/f19qguyajOckkpyNI6PeftNkbEwqzdo9DxSt0TA98lnP6NNl8pimazwo5zU97AnQ5yMjqFLZe7EZDodk8iqQ6WyzMl5QqJ7PHt2Kdd9nak3Y9r2uMyKCtCTF4956+3bRNLuFCPHctXh4vicjZtiiy3mtijmipxeXjKQivBJakzBrmGci0O12WmTMZeZjCYEkmLcNsrEXshKdRkjIxs36y6zkY7viv+XyltMByOWl/Kopljj/cNDUo7CyGfOyKYZCQP3FQslUb377NPPuT1ZZ31hlbFkiTo7f8HCpEHa0Xm8J5qbF9cSVEPD8MR3H+7u485c0rkSSSADGctkOPTRqZEyxO8F/pBqzcafyr7FbpvSVoalheV5A703HmNoAbdviYP/ydd7DAcdgshHN8Uz5Qs5FDViOG7gy2ZyRZthphICSQyWyxukDANd0/Bk1fb73/8h/zvPsZwZsjjB5WUPTdMYdMVnVmuLmKpCHHYxZAP9+tIGhpYh+OU5saSaTdSAhGge9Fq6Q+inUROFnMwId4cefmSiBwlIyvYojtAVjZSkyF5cVfnF57/i4w//nPffl0mK4gat7j6TWYNpLBIJaauM7ZQ5vhQH3K0H2/zu77/Pv/lXn1BdFHYWBgop06LZ6vP4kci0ptNphn0f7bppL9GAmNFoNBcy/973H2CwgD9OEasik+yHE9ZqP+SyJfp/DD3N0dEZhWKZ02ORiXzjjbfY2C5ysPeS7c1NsQ6Jymjs8qwunrNUKPLNo5+wtnaDrCP28Xg0IvACVCXmxXMRUBayNRxHwTBkQkTzCdw8GXuB8xOx19JUWd3c/P/Ye7MfybL8vu9z19j3PXLPrKysqqyq7q7u6p6e5nCGM+OhaJESJQiiZMuAV8EG9CL/BfKT4QeDNiDTfrBli7YgShChGdqcGXIWcqanp7fq2tfc94jI2Pe4ux/OyRwBNkk9+IEG6gCFyoyMuHHvOb/zO7/1+0VRDRTZt/fyxRHVaolQSCxorzsgGb5BJNrENC4i80l2D16SzaV5tS2eLxW9gTOaZ2dfQN1HYqIpOmTGSCT1y2fZuPompyciuBMPFRmNZjx//hhFFRUFR6d7dFsBZ3tDshLZ9bzRYdgH2xPzq5ojdDVCLGShSadM13xChoKqBhhybXxfQVMNfNmrZfseihqg6zqGzBI5jkPgO/iBiyONRxeVwFNRA4mcpYXQDINI2L+UT0+dAD6K32UqAYkGfZfDPY/AvYBrjxCoFrFYnKgMiGTzeSKJJLFEnKX5ix6WFRQ/wJGbzXJd+oMRs6nLcCiN+tY5k8kE11EvM0mGCXpYvazGiEdN4okIiXiWsExu65ojqAI89zJbN7+0iBlKYdnCsNEMncAX83KBPOg6PtZ0RjwWIZEQ+2HqjvEC/xJQIwgC3OkIz4LpUDqiloUfWEQiMapz4hwtlwsk8iGW1+WZ6Tiit8udYUqqiUw0g+c5NBs1Ht4XZ93h/iu2Xr3AkqjGoVAIXTcYj0aX0P2e72NoCj7wwVcE0mm/NSAVv4WZFOfA0G2yMv8mcb0AfE/IgmOiZWx09xfw+zeur/DZRy84awidWyjlsPs6nZqHGRaycFp7RSaeuTQ4U8kB6USRrf0tNFX2FrpRHt47JFOMkJRBkphyi4iu43dFQNRK9JkM2thGhOPzz+U9FIim5yhXmkwlkIliTImFUiwuCGN9Z/8+tbpHr1e/RNiMmiYn+x0WF5ZZKAidF45NODt7hT0V6znozginIyiYRELCTrj7zi3Ozxu4sxDvvCP604ejAafjc1yZ1Rz2e2jhCJ6mEUgi9dP6Nj978Dl33nwbX5f9qWqKSqHA8ycimBtrLmEkI8yllzltiCDXt39/h6srRba2W8wuEDZVm5tv3GZhQfQ7vnx5j3ufP6FQrjCbivN/Ynsk7AVGzhGGLRFqy1msYMhwLNZvaWGB2mmNVGQd3Re2Yb1+RqFcpNedXJLMHx7vcOfuDfYOngmZnrqsrsU5b+1Tb4gAaLmyQKm8TPP4CWe2kNmJm2NmDOh0JUkyGTzf4fDwmDfeEETGhpbG9Ye0m2KfAthjk2G7znQoZLhYrtAfDogns1gz8Z7eqI1uplBM8f3pbJH7jz/ixuYijfaBuLYyTyqukIkucO2KkP2njz7BV2ecSFLhN994g3uffEyokGDcFefvm7evsP3sgJWld7l+Xczdp48/QYsW6E+E7T0dJEmGKnT7D4gnhP3f7U4Ja1ViaYX6mTj7er0Buw+2uXNHgHN0Rw1QOkQmEWayhGAyHJDUIvRq5yQrQmYnAw9dGbAsz7T+eZeFhSm3bl7ne9/9EQBrVxY43q2RSIbptYVtVikIh+3PGq97rl6P1+P1eD1ej9fj9Xg9Xo/X4/V4Pf4/GH8pMle+b+P5Iu27s2WTiEOuGOL09CKKGmNhaYFarXZJpFoogO2MKJZExO3s7JBKpYJKgsN9UWoSSazSHwyYTXdZvyrKZlrNLiFTp1oRXme/fUQiWmB/22PqHwBguwbpdAjfGZKXZLypZIrT4w6GLB3sj7pUqsv4qoEjyc4m7oCtl2MUTGaW7M3yPIqlTSxPRM4836HdEchg7aaIqqTSaVyrTygxZeSIKJE1cnjv3Q8uiX7z6eu0B88xDI2J5ChIxOdZ24hTO9/Ck2VIx2fbmBGDVFSUfoQTIWqdA65dewvFEBGhn376bQxtQjEqMhghPYVrhTlvNQls4c27LqSjUUbnLrYr7qFQLBIky/zO//bfAhCLqrz19jrNl0dcuSqJInsbPHv5iNubv8xIMiBG477IMElellSijBWa4jgTUgmRtn/45B7xxHPUoEz9THxuaLUJJUwiE4mcpWbo9DroSpa7b31N3OfUYzKZ0OoM+fyekKG+W6fujfhjWZ97o7JCpZqhPbTQEOsymwW0x/tUSmmODyXaVBxa4wG9nlir6TRENJak3XQImyIOcXy8TyIZxho7fPiRKPkrlhPUjjvMFUXkrNvtclTf4fiwRXFV3HvQ36NrpNCMOGGZ/RyNcnTHAeG45NlxdVzbpdseYsvad6sfJhlzyS8UePbqSK5DGEUbMB5IfgfDYDp2KWZ1VJntqVarEBrhTNO0akI2fIbozgIRSVD8wZdvs737FMVyaQ5FiUMkEWFz9RonJ0fcvnnRZxKQDefZPhLzm4qpLK2V+eTjY65dEVlMVR3R7kzJZ1O02uJanucQGBaOJKGOhZNYQ5j6e7iumJf1hXU6nQ7ToViraFTF9gbkchmGEzGfrW4HAp2ZbdPpiLXyfB+UDupFf5XjcPvmTZJJjVJVyL4js0e+P8KX9XaJSJj5+Sq5pIimndRPyRdSpJIunY5cd3tGrGSIMhPlIgMEispln2LAhOkghpK3SMTFfZ630/iKj215KJ6QM0/1sC0LTUZQVX3C0tJ7dAZDxlPJLdL2UdQZiVScaEzCNbs6vcEIPS4iqB9/eo/rtxf517+v/QKJ0LUJhSLMpmNqx2JNK5U56mevSCbEPlYwmE6nJBJJzg6lvskpRMM2gT9mc0Nwezx9+hjDSHF0JmS6Mh+hkniLs9MDfJk5NhQDU82xvFyi3hSlH71xgwCb9EXGXfUwIjMCzSGTFPfQatUIGVX6oyd4EjWx3Qi4tjmHK6Oj50OHjWt5eucnHJyJyO7iyjr79eekI1lOzkRGz/Fdhk6SQBNz1x/X0JV5up0hltS5hXSMm5vv0Ovvk8mK+dQpo6oT4pLbaK5yhePjfZ4+e8L77/wNAAbjYz75+B7z87LsKhzhbOeA937pFrVzER1dvp2gkFxjrbzJ7t6BeJbWiCefn4MkoR04Ezr1CcfHQ5pN2cfrCMJg13UxzItiQYjH4wQyI2Wapujpcb3LDI2ugCBX83EsIYuqBoEKiuSPU1HwgxkBKo4sW8cLoRs+geKhy5I4Qw8TqN4versUFcUx8f0JnabYV/XTZ7iugD6/eB+ohONF4rLyIJFOkSvkiccSlJZk2VXIQNdhEviCmBmYThyGgxkT2Yt6Xu9zuOeh+DuEZIm47/uE4yrhWIjJROipTDKMZvgo/0YGyglcTHQciYYYC8cYjEdMRyMMWYI36o4JRWO4srfXCzxUTQXNx5TlhIl8nPHIRlF9xrJ0/vjMJjjyiUr4+0QiSjYXJ5qNkZTooM++eMXzpx/y6tUTzusi6u54M1xbIZ8Jy98FabNuGLieRJzUDTRNQ1MU3IlYr3iQwPVOufvOlwE4239BZzxjGK9dysWT3ftMvWX+87//HwHwu//r7xAK3aCSvo4hSZNvXnuTbmOL5188xIiIDIZpJJhYCo1tkX11A5dYcsKbm19mMq3JdWiSz5bIZH06TVH5E1XmSIeznJyL9oz58gq6UcL3fWp74tx560urHO8+Yf7aFW5fWQbg23/4LwjFVF4+FWd2ubJKtzljeW6NWvNAXKtyE899TjobIh0VKHxf+fK7bO99yNPHooRreW7Io2ePcewkffMC6TTF6vI6itFlb0+cPaXKHIlSlyf3Rabcnk1JJhIUchVevRKv5fIlmucjXr58xsKikM/F+bcYGBOiaaGTMpkcrurS607QdYmiWonyxY5NOK4zlxS6pDsYMjHiRPtCVlauLHFw1GTSd3AVIQcKYRqN54TVBO4F1YMW4uTkiLk50a8aNVIsVrNAl91tMVe3bv0SiYxBqzGgLfdfvhTns3sfXlYnVSsbbG0/IZYuceuaeM3qdmFmYk8zfPWu6HU72dqnOldiMpWluxSIx2xQx2hSL5XnFP70Tx6wsXHtMsseDtKgzRhL2ey1B3Ttc4Yjn/mSqPx5uXvC3HzksuS43VDJl3ROTs7J5wX0vOcENCZHDC2LQlpkoHaOD7n7pa/iaEJ37jUO0JIG6USckWxDePxowPr8GqhNzg7EvS/k1jlrtCjmRTXLZHzKzG4RNZexJN9hthQjHppxfDhGlSX+haUM06bKZCYRxZUZk6GJrtmcD8V+fP/2Bqf3ntIfWhgZ2TvlqMRCYRpnAgUwZKTwzAg7tRHvfl3woh7svSAcTuG5Ll/9kkBNnsiyxT9r/KVwrlR02jVhyGVzIaqVZRTPp9sQm+XKepFu20FXFSIRSTTWi9BudVlYEiseiyap188o5qpYkstAsxRMM0oxd5XJQLyv3jgkOk5z1hD9TvGCxmLpXQ6PH+HLFLxjzZi14mxs3MbXhBExGYxZW7/K9o4oWZmORqTsHqPJgJOauM87d66iuwvMZjMUXSxUszljfUPh/FiSkaW6GGoO3YySyUqI2vGU+lkfr2jgyb6WQnYdrdqj2xMHY6N5gOVM0XQP1xGfyxdLHOx3SWfK9DrimedKC7S7E/JFUbJyeFJjp/acg+k2MgNPpawS1Qp4siNgPBvS77UZDRwMTRidi3PLoCYYu3skZC3syWmdarHAnbvCoHbtKZlcmLOTcxJhcTDNL2fYvPNNrKl1Ccmruhqr84s4klRw7/A+k7FHOpPAmYmNv7SwSDSSpH4yxAiJU1V1TAb92SWUr6OoZDIGrcaYJ8/FIdA+GRGEG0x1MCV4RDSSxDir8ZWvCUUe9n0ev3hCKpsjkAZ3KqnSHUA4doYqv282y6CHTHRTlIx6kx79YZ1sIc1kJOZze/8Z8cQSs4nPzJIku2dTBqGR6DFAQLNf3Siwdi3LD34g0vtmqUAs5ZKPhziQzZWF1ArdyRGxqJjfRDRBu1UnOkxgSr8mmA0x1RCDXgvPF4aaF6RIJ+ZQFGm4+Q6pdBozHKcnmywtd0A+OU+j1SZmiDmeq9xg/criJWz2ZDZldf4adjBBn0i28mgYR21TWSxxdCbKJYaNCN14n6l0suPhCNZ0wle/WeZ0Syiy2cQjkojSbp1e9gTlsgVs30fxZOmgbqMpCulUGWQfRioRJh5dYizLElKJKkbY48Wr7UtoZEUJiMWStHtdNNlErKEwHveJmLL2PrB48eIpqWyGaFiUMw1HwkiPx1KMxkN5rR69Xp1YVMzJcnWNbu+AYW/KWDbw6sY5Rsgmlkhd9se5ngG+gm5e1OxP2dupc+XqEssLohz1k0+3CacV7JFNSGL3+tjMrDYLVQmbHXiousr+0T6bN8WeGU5G1I5dPvjqNQZ9MVfdYZ1sqcDRqdA3Zwd7vHnjqyRTcXw5n4YREg36psmj+6KEwrJmGIZ2yScT+BqGEWLYt4gmpd7IzdPvKKQLfXb3BSBIaS7F0ck2qZxwevf3m0TWRlxZuUm7KV7LxotEYmEGkwGOvywE1FB4tfWYqOR4y2UXqZYd6ic2UckxNbMOmUxcCLIogXgtlhjz7OkLoiFJbZF0eLXVoFBaY2qLdd959QwzMiW/toLriQN7beUKE+ucowPZ56bYONM6ucw8iipea3Qe8UsbX2cwPEYPhNOn6R6T2QkhWV7Y6fRRlRAL82Ua51tSPtNUl2I0zy/6ZZuUyivUT3XqbWlcrV/hvJZg2q7x5KUoD1tb3eCtdzbIyYCdE+6hKRYoKXZ3hZGbz+R59XIPLYixuyXkstexOT1pMulJ+PShoKdA1QlFftEsbYR0QiGTsAzsKZqGFwT4vnBMVRRRfqrrOBfQyIqPr7t4noMmAxCKouC7PqrkSfN9GwINP/DRo2JdzHicIAhEqZ4tdKWuqLh2jV5bfK7dgp0tBd83UGWJYygUEv/CaVIZWZKWSRJNxChXRAA0tBoRjqGvoEhgKM/z6PfH9EdTfNn3dVKv4XkaquxlRjVEL6EsDQQRTDFUg/29HSqLwrEYTqdY/KJfLR6PY+g6rmVjWZKnMPAwTY2QGUG9ABZBwdeCS7lz+jOOTk8Yt475/HNhJ/zat36Vv/cf3sWy59h5Is6x6bTH3vYhH34i9LlhhLBsG0VTL3vRbNtGU3Q0RcWR+nrcmLBWXiKdE0q+PLrOQaPF8lvLl2v+wQfv8/jRC374Q8G98+9+68t89tkJ0XSG86aQ1xf7UeayCVauvEGzKwzFYiKH5veplCW3WTTO9qtjJprH5i1RlmhNfw5OFWvc4ET2vrz75gL1o21u3hZcmEOnw1Jujs+e/ylzC6KlIR2rYK5bPHr4A6zW++I+v/wej7c+vOzr6TWz3Lx5i+db93EmQsc2z3uEQiGazSanx/8LAJ3OIz798JB/+A+F8ziYfk7vXCOZVXE9ERybODUOtwdcvVnFMEdyPlUU1SZwhWwmTHj55AXJ2DzZvNC5vfqAmytrzPQZB7vi+ULGPEFIRYmIdelPanieR8iPEJdO9XFvh/rI4mYhjduTpZflHIES4mRf9BaFImFu3azy/Nk519/4AIAH9z9mqTxHJgu9jgyw9PqkElFSKbE/oqE4kbTB7l6fZEbYgqPJkPPukLlqgX5byGwquUBvOBDAR8DB8WNcR2Ujm0bi59CeQr5S5Jt3luj1hZ1Zt6bcTr5BStoNJ2eHaHqCdCp7yRdbVnxyuQzhUJQwrryHI8ZDl5vXBSDSzsEJ0ZBJPOOydyqeORYLYU8EBxfA1qt9FNUgEoN+V9idCwtLTOwm9cND8oaQjZge5vFnz8iVxZ493Tvj+vqbRHSLaEjyotk9huMSy1dyjJrCnhqet2i3Dhj6sp8zZDIeg6KpaJJ77/S4Q7mYwnMSKIqkslHjLM0ncByxPz9/+IiNqzd4tPUQ3xA6aNV6A0JxcnEIopIv1kgTMRMECTHB7b7HQiTJ+aDP6UCClClVCtkEh+dbHB6LBMh4+Isg2f/b+EvhXNmOxdK8MDCVIIwz8kmlI5dIcrFYgvZ5j8AOSJVlbXH9BHQfR9aPVxcKNJoO24evKC0KJVKvHzBfiPPws11aspcgVHLIz0q4thCuVt2iV39OsRLC08XGqJ11+OpX7tDrDghFxIJrTphKdpmnM2FwOvaEYSfM8ckB84tiwWctlal3QCqV4d13RN3nsydPsAcj0kJf4PgK7W6HUsG8RPTyPQWdBMV0GscRu6Pd7JAphAkCmbla89g7GjCXWidhis15dtogng6hdkukE5LQLKyh6mcc7QgDwYhPWS4uMRkPsWVvmH+a56AZkJMIQ+N+F1PVqCRLFPLLAFhujYHjk04vETJlJDI2YDxtcAE7Fqg6M2tEfi6EJftqdnZPWA4qOLZPXIIN7B48YK60TCIj+sLu3n0TewpHx9s028IgWlgs8/T5KYEGkYxQkulUFPotGh3RxGiaI/q9DIam8cVnPwfgyx/cxQjP8eTpHjl5iI/6USJhk9FAaqN0iCtrm0SMBUZTca39g4ekE3GwVshKFJ7dg8/JhjYIZuIgjidMdneGKP6ITlvy3AQa26/apCLRS5JaTdGxQhqtpjBwNcMlHb3Ks/0D8svCaPHHsLS4TDKxwdOn3wbAtc+JZdJYMtJSOztnOB2QLMY4PxeGRa5YoVopcn7SYaUssq82I5zBjLh05rBCGN6QpfkqbpESAAAgAElEQVQSkYFwIg6Oe3h+j2I5R3sgDqKVKxucdmvEk+Lavhbm+KSJqU8Y9iWZ82qYsTKjdnh8aQC985UVdCPg8WdCiZWuJzg4HBEiQ1Hux3pvhO+PuLZ4nURJyGLt9IiJZ1KT0VHDjhBPJHGmU/KSLLsx6uBNZmwsCblrNOrk88tMRjCbXcIBMBjUUBSFeELoCc/LYJhh+jOxP0aew3TqcHCwgzMUDslCRcjfZ09eEZENw7F4hOmwSaYkrnPasmm128QSM3SZ7XEDm7ARJmIO6UhyYwMFP/BxZLTSwuP21Wu8dWeF+x+L59PMKIGjoODhyQhboHhMxzMyBZmdDPuc7T6g3XfxXREZzCR1VNPmwaODywx7Kh5m3G1ytiMMspCRZ/foI8JhncFIrKcZ9lCIEomo7O+IIJBuGphmmEASQAaAqkZw3TaJlHCAbt/6Ct/5zj8jElsgGhE64dnTB5hmmojUP1HTYDg+5crqJvcfiT6l4exz5ivLuM6QRk1GPpMz1lY2iEnktb2Dx0xnQ8b2iK1dSQIfFuiq5XIZ1xMH3/Fhh+pCCN8T+3Fr+5BMukgmVWVlTkQsf/ijP2R18RbH2qeUJOdZo7mPFtW4IfnNPvvsp2xszFOv75GICbmLhG1+8tG/JJvNstcQQZi1lSs4U598SsjZYDThpD7i/ff+Aw7qAtTDG/pE9CIxSZaZyxbpj5okM0UchAK3RzMWFudQlCTXDWGI+t6EVDbC0QXXl2kxmB0QVsvk8kIGXW/M22/PU04s4n9TGnO1Dp5yg4REJ/zuj35AMp4l7C9z/6mI6Fsjj3ZtRHcwRZHIlcNZD1NJEJK9THpohEEcR7EveX08ZmhqBNcLgezp8vwpaqCi+nJf+SIrFPgeKEKmCOK4juiP05QLhL0wqh4iJqtG0F0sTcVDw5AABXrg49g2g8EB3Y7Qn74bxvUsTFUGGhSIhLJEEybpvJiDVDJDLJagkkyipMQ8hELzPPEcdnaFExHRgUBFUzU8WSWiKCbh6Jizsxc8fyzIXCMxU/LSySxVPEk4msQwo2jS0Q9HNAIzQiqt4LvivgI3YDrr0ZZ6qtOpMe5PmQ5bpJPCYX73lxXm5iOkY19noSpsgN2tOn/tN3+d53//vwOg3mgTiyZxPBfHEXNgGDpe4GOYURKSI3OambK985I5T/TCVKsxKlmbT3/yCEQimd39M4rpPE8PLjiYCtx8a5NGt82NdQH4cm//Ex7tJXnjnRUiabHGJ8cGE0+hsiozikpAeS5DNOzy9MmBXIciZnjMrDfml94RtlJ1pcTAn9KVmQ9GCl1vgD/xyOeF7B8fPyJuZvngrS/xf/yz7wOwfnsNtIB0WHwfSptXDz9neeNtBtOGlE+Fs70ahfnbXLspvq91ssfKnRn3XgrnsdOfEInrhHMpakdSf5fTxGMzuqMOhYrQ1+NRG21mUKyK9Sz5i1xNZ2laI2KysbYfdHAw0CyTUl7I1NbLP+Hrv/xbdE2xR0ejEZ3xPpOxSUqSnccmaZaKNoa+RnpR8hnOjklGqkwjEkyqsYtrXeXu21UC7aJPqUQQ8bG9LOGw1B2ZNK12l9hFEMo9o9ONkM9HcSSXqe/bzOVLRAOFouSUCsU8+v0U5yfi2pX5dTIRjWg0yvHxRbAwzXh8imuVMGRQe3PjCtXsHIOucLYajSZTxyefWqAsudoUT8dLNTneP2AoAS3S6TLt7kP+6E9EEGFtsYg2jNGbqkTTQl4126M8H7rkxlIDg+tLy9x/9jnJ4pxcvzrWJOCNd27z4rEIOt25/R62FRBWxbO8feMuZCwOdmokE8KZmy8FTMYKnY7JWFYeVDZLPPz+z4jFhUy3W3Eqtzdx3BHH0uHTqZDKlAnSDooj+uH6zVNatkNxRchG0VhkaXGN6aBGpyP6Tj/62bd5d/M2lucwkQHdXAo6XQunJc6wTCpGSGniouHLZEcuEWMyHKJoUXYbQq7VwZ/fVfWXwrnSVANTepbxRJThsMaz5x2islzq5c45s4nCwlyWSEJMeOfFOeV5jYaM7I56HoUFk3FvjS9dExGF33t6n1oQpnS1QmZJCOHtu9do7VrsHwmFPLUHuF6Xpw/PmK8Kw2Zp7ioKJqreYjqRUT/b5E9//h3uvCUO1NrZIaqicnPzFuORhOmcTVHDFmO3z4ttsRnThXna9XOKVUkAq+ZJReZo95vUz8Ui5Ysx1jbm6Q16hGXEK52LoxkqcZnG7nRbLCysEVWS1PfEgsdTGt1uHyWeIiTLXXYOv6BUTqPHJNKS6qEYPplChscvxIGthtposSyTQMxlaeUavuWTSxc5PRUgBsNZG9WbMI4cUpcQ55s3rqAbGqOhuO9k2kfRfYr567R7+/K+51CCMPF4mE5HIvMt3WF1dZXmqYj+1vZHePSZWmMcRUT9PvmkRzgV4fSshf1KGJRzS3lM1SQvPVPT1DnYPaRUSLNxRaRrC5lrnNWaKJzRaV6AYzQwwwovnku2eTNONV9kOuti+2KTRaIqhhFiNuui9YTsFfNLjMcBUYl21R/NWF4oo2ohHFesZ3kxRe9cAWwsWzyPgcG4P6KtijWo5tb44fefkF/xKUUvIq1RPD/O0cmImSINEnoM+mlCObHmFVSy4Rj98YR5qfCXliusLM3z0vZJSoQmLZbj1cETEjlxmFTzixw/e8rzl8ckS2LDz89HOdqbEDFcKvPicz/5/KcMvS5flZnHpBLwN//jVW5fCxPui/c8f7nPZ7sBUyVPSjYNp4w4Q98mLhv4J06CdM5g1O0zlgTImSxEgipT38KaiD153uoydSYEshTTwqHWbGE7IdoyK6UrNqlonNq5mMt2b4ISb5DKhuhsCTnDM5k4LslkklhEIJKMrQ6HxwdU54RiLWc2OTzdYuoYRCRICoqMjmPjGeLnwcgilQkT2GJ+Z859br9xlZODcyxVOCimfp3d7ROsmYEq4bXRLDQ9dEmDMB11ma9UuXn7Bj/63ocAxKNJRk6fSFjFnQrDVNNUQprO6qrIoDQ6J2BEsalTl+SctZrL5u0Ke4fPCYfFfQWBRTafYOKIg2JpfZlm7xlacoxuSQjuUMBsbBMgspfApeHsywbeIFDwZInSUDYRf/L594knU6jEOK4JPZgtGDiWxmQkG/9tF9O0+PFP/oj5sjAC793/Gbn0HCtLd9A0oUtcprx8sY1pij07Gs/YP9glnytz7bpEEKzvUTtxKeZXaHZEkOvkrE469wa9vmySzuRYWl7g0fZDIqrIRGQKSXy9Tjh0i5dbojpgYWmF9vk2lqQJMPUUe/svCILhpXM1GyVAVSjP5egOxBy7jkMqb/JSwgk75oTVuzP+9ce/zeGu2I9feb9MNvk+fl/MuaLq5LJXOD3/FFeWhyWjSwxGIyajIS93fgLA5rW3GU+nOJLiIGyqmF4JTQ2DL+G+M1F2tp4xK7t4lnAQpv4QRVeZToQhvr6RZa54A88LUbopDKJ0tEDQj6B6IRp9URqlh5Icvurx9IUwpIaDMIPWCGsc0JQZWi0UxfW7mCGVqCzBUxQNVXcvEXZxNXzPJhQKi8wsYDtTNCK4Xh9TF4ap50/xlBAzX5bRaFFwLXRVEShHgIOLp9roSpRwSHwu8B1CEQNrKgFDVAfL7jAc6bS6wpEJ3F08z0PVLmN2hMMhNF0hFhGfCxQF15uh6SEUTQKEKDq2FZBMm/zVvyn02ZVrGi+f1bF7Qif88rcKNHtHbD8bkcoLfbO7e0rMm+OjP9hC1SSRuTPC930C5yKYoxIwJZMrMx2L1z7/WY9SKUXt4IiBRIS7tvEu9dY+46mYl4tsFXCJRBgEAb7vi7NGGo+NvRZX52/QkrbLwFkiUV3ivVtV4LsAROMm1XSJ/aYAZHm+/ZLN63fRzDkCXVwnZJukroSwRm3SWTHnvdEZ16+usL0lzmMlSFEs+LzaOqOyWJX3phEKq2TTVYY94VQ/evQEB4VIWji0S9UVBq0Om9e/ia6K59vI3UHRC3QOX/GD7/8TIRuZNPMpn+yKXONIhkmjz8+++IRCVcydU9MwQgUm9X0OZPlZbC6MYWv0zoQ+aI+m3Lm5ysnhHj1J+THrnRBNL6IYHieHYv+n8lFcM4ktkeuUpEer73HrnV/h9Eg4TpWigRMZMeqOaEqbYG3tKkenjy9lejqdYupJsuUciZhwGtKJqzx/ts9kbDOTZT7Xbt7m08+e8v77AkWxPPdzWvVTzpsxChUxn1fXr/Pxz++xOBfijdtCd336oxPCiQhDW5SOndcPSIQrbKzO87FsJ1i5ssRkrOEoAQfHQk/9xl//OqmYwqNnYv2Wl5fpnbWonYwvSxz3DvdJh27Q7p5gTsTzkO7xcucTxiOhW9ZX8hwdN4guRWlPhM4b9cLMrBqGoRGV9A/nrQ7lxeXL/REOdBQnRzzqoUTFvGwfHhLrR/AkmFS9tkcsNiJXmcOQGfbh+SnWVEFREixeBErPe1y9eg0dGfDdPaLV7tLpepQXxLocHrfYftFkfaVIQVZbDdp55gvXaHXE565cjXJWe4Wm5IlKiqNwJMThyRMIDFYrYs6zS3OcPHiMfyCJvzevMGq3iZhFIiEhUz5purMJo9YUS1br9LsanXEHIy4BKhIhDg9GHHQsNtdFoK8767KwkINumFBInDNzc8Ju/LPGXwhooShKWFGUzxRFeaQoyjNFUf4r+fqKoiifKoqyoyjKv1AUAbGmKEpI/r4j/778F33H6/F6vB6vx+vxerwer8fr8Xq8Hq/H/9/Hv03mygK+HgTBSFEUA/iZoijfA/5L4LeDIPg9RVH+J+A/Af5H+X83CIIriqL8HeC/AX7rz/sCTVeJS2CDx08/JZUwSaSSTCYiQhSoFhO3x9MX5wS+iOQMRz6x5BKOJaI/Ez/geCdGu9Pje3/8TwG4+++8zd7TGfgD1LysYd1tc35wSEH2QBy99FCNIZvrb3FBSlKqRnBsj8XFBc5l47bi94ibZc46IuqQKxRwJiWItbAk38nY6qNYLp4Dw8kBAOHxFt26ja+IKK7phUhENcBD0mUQiunoRpRYLIovm5THI5u4HmGhJKIjj7d7zPwI7WGbyURCcGeWOB/uMmXIUEIFO47N8dk20YjIauQTOTrNGYPpiHxWvFbJ5Xn+4pyQjDD26xbWrMettZucS+jnfKTE4dk+t9bmmJNNi7bbp90cXHLFTKcjXGuKGdphZonyHkMD07DRjA6eI8SrUq3iBhZ1WR+cVYscHpxRqhokIyIyMIidEI7C8mIY7GVx/WES1bCJyrIS0zSYq4ZxnYB8VTzvRz//IQEmrh/Hl1xb0WiYk/0Rf+XXJV/O45dMJz5zlXl2ZYRo7PWwxj6lcpgjSby7vnGV8dRibIsIdCwSZzIBw1QuoUvxNJYXiijKlMCX/RzY+EGa+7KMRQ1MVlYyDJw282nxLMeDAZlkioifJiF7rJbW5+icT0hmZX9F3KR2YrCyXOfWXRGhOTmw+fGn32V5cZH8goi0Pn74OSsrG1iyh23YGeHbIb71q+/z4T1R4uRYXcxkGi1IsroiIi3NoyErpWsMW2LPBM6Y8+Y8v3PvE37rqwLq1oikSZRGpBMOwVTIx2FNwU04xGRk6ZMPP+WDD95iNNGxZB+GFnPojttYM9Blen1qu8wmU1Ykx0Sj3kHNxLGmM1pnFzwiORq9AUNZputj09huMpuOSEqi2LBqoJoF7JnOl94V1AQPn37Oe+/dZWtLPMv+/jneLEw1lyImI/XHsgn69rWr1HqiVCEIT8mnbqFLGOuolqN/7nHj2jVSOSGLOy/6LCzN8S+PXjAdiywGakCgaMhWJsJmDAWP7d1dmj0R5Uwloli9DqoSYMjafsuyKBR10gUR/W2OevSmLdLZBQYDkSUqL6d4vv2CVCLNcVPol/HQJ94zCKfEvDx5/gmVco7NG6v85A/F3EUMBTUI42vTyx6rX4yLuFmA53lomsb2jogSV1azDLthFH/CYCh7ENUEvu/T6Qv9mk3P4btRTk53OT4S2aVev0Otccbnn72kOi8yOYah0+/alOfEfsxkTU5qDt1enVZTrINrR8nnTUa9CGeSn+r2mzc4PNxFQfa+rVR5+OynJDJ5nm+JjFAhsUTP9mmcf4QZkf1FDQtN9WmPhA66crXE/Yd7VEtL9HpiP6yu3qb78hmPHz8inRbXt62Aen1GMiPWM15YIBTb582v1MhWxb5KpGE8G19mIKyZjx7ZIxINIRG58bUug75Cr9ejWhVllrF4mv39XdJZQ/4eprvXR0+20WWJYb3RJ5ePYwVDxjOhv+eraxwd1nBmImqshTT2jp6izN4gmxbR2PPzEVYwY2n1OtOGkJdra8t842srPJYlVfge1shiafkdHt4T5dKHjTOOdh2ajTHttsyUTab4gcJMZsp03cAgghoaoahiXUw9gmFohMliyWx9OJTAwcWQWVvHH+N7Oro2IxwSz+f6Bp7qoLoKnqQY8V0DhfAlNDoqGLpKgIN+gRMTqBiaSRAEqPIeXNfFcd3L7I8fBOi6iqIEKMoF2IGLoccYD2f8tb8h4L0Pjvb5td9cuSz3WVu9ySefdvjmXy3SkmVB3/i1W3znnz+lN5qSkJUxnuLjKgGKKWRKVQ0CBPHveCSqLz756CX/2T/4Eh/vfsr774rKmN29LRLpJNYFLLmi48iHvSQ7VhSCwEPXVb72gejNfHV/xFlrwPV10RPcb7QgsC/LJwE60xHJcJ5kSJz/b92+y6NPX/LWl97m4EzYJNFIiuapg1E02d4S2YmVlRVevHjC7TcFT1I6XeV7/9fvko+tENhTKVM1btzYJB0vMfUkMIR/Ri57k3JWnBXh1JiToynvvf8mj59+LG7KWOK49YDv//C/5x/9138PgN/8O3+LH/3+t3ksOfuiiTzf+sbf5qd/+s+5WPZCxUdt5pjMDtjxRJnu3er7dOunFHIC8GGpCPWaQzlt0O+IzTZSFHRvH2ucJVcR9tPezgPM2CbFgshOjuw2bjjKYHrA9TfEe7qdGg9e7BMKGaQKElp+kiSa7tLtiUxStbTB4eERqzfKHB8JnTcaBLx1+y715gFx2WbR6QwoVXO0WmKPjqcJTo5rqK5JeU7oktPGNlc2pyyuPWc4Ebqy48ZZTiQwZCtGBJNokMWfBbz35V8R8nr+BGdYIJMa0GsKOXv5tINuzDB1sUdfvfwclQmqVqJ5LmHlQyGavR0GPZ1cWuyRqW2hMOTFtgD++ZXNr3FtZcazZ02WN5LyPo+JRwto9pBBT5ybJ0cD1JBJIiTe0521ubV5k0KxzMwT93R60uCk/px4SsCgv3XnW7ROn6CY40s6oZkX4Ho2tdopCyXR/vFk66f07k/5ynuiFHT16gKpUZGbIZPTI0HJMWi5/Nqvf8CDzz9Bl9nCnq1TWJ4nGhff3+21mI59iiUdR3KnmYZJMXuTrd0H1A6E7VnZzJFPJplfFr1hAQO6jT6Br1PJCp6yE6dJr2NxdWWO569EZZPiLHF1vkBjLNZuOIjjzmasLOg4U2FTXtm8Sbtd5/nOCcVFYc88Pn7Enzf+QucqEKf2SP5qyH8B8HXg35Ov/1PgHyGcq78ufwb4V8A/VhRFCf6fp//lUDWf/RNhVGiRJlM7z/LVPAe7srRGNxiPmmRyJk4g+xsSsLXTopK9CkCjfsziaoFAHVNOCiOprG+ghO/TPdvHlwdDr+ZSXVnhbCwU1Oq1PIsLf5vxeEi3f4HoN2PivSKVvcnEEgfteDJBU/KkYsLAzCbn6XPO1ulDzLDYwJ4O1nBKo+aRyEqC11KZQdC8ZIhP5mckUwmefnqfokQ1CwKF4WRE/bRGNOpdzDudYxs7LQiRnemIhbkSWHGcsCQ27inki2UODs94/lAojagRYeFqHF/W3jf3PfLlJbb2HrI8J4Re10usbWSptUVpT7/XJJ3M4AYGruSmiMcTrC1dQ7FSl7Xoaugc2+2g+LIcrXwTz59weHiMI/mOgqBDKKySTuXIFYUQHtdf8cXTYxJxSTKljFG0MMd7HtGoeJZ8Jkuj2SEcTjEaiZNXURWM2BB3IsXPSjCdBoDPH/3xjwHYWC8SDSc5PeuQTQlll4kvUS1VL5GQWp1tdDVLf5DGt4RD9PaX3uLhvWeYepZwSBgRhwcnROIxPFnG1hlNULwIiuJTlE2otfoBo+iUTEwnKxvYG6dNrq0WeacolE8xXGHUHaESoz8QG1/1dRxtwMvGPXJ5cS3VUyiVNbZeCmO5lI+RzATYdptuTciZNYizNLdOOuvT6giZzeQ1OmeHFKuylLbs87W7v8zZaZuyJANsdV0mU5uFpTwP7ol66mtXlpgvV6ifiHlyjR4//uIl02mWf/xtIT/2CNSwS6D4XL0mZPi81cJqTzipiWfZvHqdQReSySK5lFjT/d0tIrEEs6nNeCpk2ExFGM2mDCURrh+K0hwNcb0hqZJ4vkavRzJaZtaX/YdYeAToQZIrtwWapW83sL0Zc9VrvNoWz4Jr8OjBDudNYVRE4gFr6/NMhuC7Ql7SGSGTezv7eNKZMvQosYrPRPahlTILeEqPcd/GnknUobkpyYyOETIJJMqQoUeYWjZhudc1FALfpdVycALhTEZ1j1gsgue6JGVZ7vlZi6VVg9FUGP6qqpJMVajvD8gXxfzO3C6xdJx+e0IgnXhfNTk8rRM2JLjLNMn8TYPotTQ/+z8FSIJCAk+1URXl0rlSFAVVVS9LARX5N1XTGA4uuLfCWL7LaWuHqOzbG4w8zJB9SXB5POpghM4p5BZpS6O+WMrw8Mkn3Ln9S0TC4nM7O/dJZzOXBvT5eZNYZIFuu8OrV6JM5847G7x6dorv79CqyTKdlXl8p8dtucaff3yfRDrHSbuLGQgj8+4bN7n/9AuiSZVUUcj1oDNjeW6VwJfIed4y0/4uW8MHvP22aFhxvAGlUp6p1bvsi7h96wM+/NkfUCzIgNKww3A/QSL5Tcox4aT4E4t2q0v1QjZPjlHNLnMLq+zK/jFLH5FIJZlfSGAinKvp8Jx01uDRExFcWVtaJxw2GfRckCBJltPCD5XI51eZzcR+7/dgvrhIkBRlLM8ffYdKtcDJ/i66J4zj1aUQE2wWCkUOYkJff/boEwbDJkey/2BtZZV8McPP7/2YjXVJqLn2Bs57PrbTZzqNSHlJsrX7nGxGGI7PXj6j25jR74RptoQhMx7azGYjVCKC+BaIxV1AwTOETGlqjmgMNALcCyJNdUpIieD6PobsRbXcGZ4foBkXJaqe4D5SXBRZmui7Lh4CwfACwEZVVULh6KUMm5qGouh4voPChdMiBM6ahHn8QKJLrnt062BLsIOdl89JxkymwwGLRREgnM8u8+zxjzANQ8AuAioxdMVH1S+APlwIdMAnGpelUTvPuf9pjbn5AvfuCYcklBxgDc7RNMlJ5noEgQDduCjLNQwD8NF0hZkrZH/lyhIP9j6n1pM8W+saJ+c93pQ8WwCGq9CfnPFiVxihubklNjaX6Hef8rN7IlB0q7pJOmFBkOTtO8Jx+/GPf8g7d/8uv/LV3wTAtnd59vwh52c11pffBSC9EOHpo0e8d/s3sGaeXPcGOhHOzoTRvbEwT713ym//z/8FVxaXAXj4yX269XNuv/cO7baY8//9d3+PzY13yc9EIMsPzbM1arC8tMG4LvTU8XaTIOlxZW6e4YUtEVlgoRrBCAuHz4tUKYbK+NMaE1usy1mnTSg8h68MOT2ViJcxj6VKhJ1doZPiGY/+eEi5UuS0Lc6wRvOMbrfL2++8x+mZuId0NgR+lkpJ7IWJ1eTGxk2+uPeAblfIfqmwgqZHcAP3EnHSCURP/MGhcMDmqjnW1tbZ3t7m+Eic473ZPsnSCS+eBhw8F3vyrXd/FWgRjYnee8Xwef5gl2jhGjNbfN+075KZi6D5XRIZyfOactDJsyQBSfZPtplfvInjTTg8PhDyUlmm3trB0Na4dVvYvw/uPSCTiXBlTejTai7Bp48fsrC+juqK+yymLXzFoVgwOZWIha7dIZNZwLPE+pWLm6hRjx98+HPyZSH7NhMCJ4rdF3t24a0ojYMw2UKU/kzYBKlQFE1r0Gk3WKyKREkiVkQz+4xsce3jnVP0SIniG0X8ExFgXrla4OjkmFx2nslYBLXzpSgr8yvsT4XDnkgvEYlc5eiwT6Ui9JtueDQbPZaXruDJ8np/kGalFOegJkDEzlo2129uMqx3yedk8qGe5/rVDPunx2xcFTo2EtYZjfuoI4nwaZpE0nGcICASk05u4yPqBwFX15fwQxJox5L27J8x/q16rhRF0YAvgCvA/wDsAr0gCC6CEyfAnPx5DjgGCILAVRSlD+SA1p91fcty2T0UB9OdO2uMeiatdodWS37EPKeymOFwr0taNtlFox5vv2+iWeKwTKezbGxm8awzbsgD++d/+kf4jMlmF8nGJNrUjRyd4JzlpHBadsdd7j//E/E3CS866FukzAqHJ8eMurKpF51IpE9FErA6M5vJrE3EnEcNCaNXDZJksxXm8xkOz8TznG7VycXmyaeFQev4PbYOH3Br8yauIoTXsQeMhmMMPYaqCaXhug6lYhUzLr4/zwaDSZONzWV2DyRkpOrx4tUhv/GNb7GyJPoiTLdErXnOD376BwAkwyq2paIqJQ6PRB3vTN3juHHI/LJwNAa2ydGLbQLlu5QqwriajGcEM0G63BtKVmzzCtlUgnZHGBrjaYN4LEMknGRfZm0WFxfRVJ9Q2GBrVzTCr19dJRn7gKf3ZDPivEM6ZfLyoE9PF8o1PZmj15/QGR1RkFHbbq/NcihLInqRKZvRa3ZJp9P8p//+PwDg29/9JwS5gExex7GFk7Szs8Xd91Y5kig585Uqk6FDe/CU+SURRXn5YpcAi+UrMT76SByW8WiJTDZORypbz7cZDSxUVGKR6eVrmcQc7qxNT/anDIMJzXaH+U0RHbHbYzx3yg8MTRUAACAASURBVGg24PGeiHz8rQ/e4ekXH1GfQrUstsrz5y/YvLFGNSVR48Z1UskF1OkKzWNx7W63QTq9ijNpE40K2c/HrkKyIcIcwGmvxr2dJsVkhbUN4XB17ve4WU7THR2QldCo7fE59ce7vHnz1wA4a/RIJYuUYgqORFGb+mcobp7JoMvWlsh0RBMlasdDYknZHLzbI0qSdHHIUCJzliorJKNphpNTZjLiPe12SYazHEuACSMSIhh5lLNpXh4LmcpmYDTugSvkLhpVKeXSApZaxj4jqSQnL1u89cY80bAl18Hiw5+dsX5NKPI7d97k04/22Hgjz2wse9oMj4/YY+aPWF8Tju+wp6GFVWKurFUPTErzGY6OjkBGxfUgjuO1iEXjtFWZPfej6HpwCQ5g2zOi8QiOlSLQxfO6gY+uRUAZY0oADcUPKFUNAl8YtN2mTyyvcPVqmaePxeGhhsOk0nE0dYwm72Fn+5BEPoMjnaaFUoH9vQ6xWIJ49sJAUFENVUBOy+DRJUrgv+FsXaC/eRJLt35ew/MjFMo5Om0h672u6HGNxpYBOD57TCadZzYNmEmaB6cbwnVDfPzZT7iyIg6mbr9Do3XKZCLmpTqfASWJqvf5xje+Ia518hQUi6lzxNvvCr3bap2yMF+lUBbP8tbbGzi2yuHRFl/7sshEYIVYWkowDQKKRbHOqZDGbDIlLoNcBAp33r7FyDJZXRSwxA8efYwaamEYBs222CRfPPlD+oPWJaqZotmkkgVMtUhFFfthu/YZK0sROm3Zl5kysPwJXzz4jIgEnShXrzGyW7x69orrK0J/RpJhIrFF5qrCQBoObDbWrtHo1XElnUAkmmY0dIkn7csgk+a7dFs+TIRu+fW/8nd58HibdOWI47qIioayd9BDPo+/eISiCefDDKcZjyeUisI4b3ea9NsahUKJbkdc69qVNU5O95iMbOyZmONvffM30A2N9kREt9/7apZyep14GpBN4Y36CSeHNVrNEe22WPeTozHOyKU/Fr/3R13GE1DRCGT23DQNVC+DHrHAl4ASSRPXCVCQzp3qYNkTVBJYEglQ13Vc10XX9V8g7Dne/83em8VIlqX3fb+7xb2x70vue2Vl1l7dXT3dM5qZHprDRRRpW6QtgqJNW5ZsGJAN2H4QBFsyDPnB9oNswLIBWrQsyZAISVyGpDjDnpnumeme3qqrqruWrKzc99j37UbEXfxwTtVQMCnSMAzIQJ23ioqMuHHOd779+//xfV4gEfq+i+t6oPgoqoS89QEcXFfjZF8EU6sbSZyQzbAhAxszRLm5SyKWp9oUtueDjz7h2aM2sYTFaCyDPs8XAZ9MwKi+h2VZTJwR/b74bMfx+O3ffJ9f+g9WsGVHxqX5S5xcbIGsTmiahueJ4PB5cKWqKr7vEwqFaJZFQDJyeyys3kGTAFoPHn2btYXXOTwXyQiAfC7DxUWNzauLAHh+n/NiEZUgP/Uzfx6Akw+f8Nobb/H0aJ+9HRGE+U6M/eO7fPe/+acA/KVf/s/5xV/6j/h7f++/5em28BsW1sIYRpTD430iYbHni9NXOSs+JqSKSlLPUQln8qysrZKSXTCNVImLcoXLr33xBYCOMlYxQgOubwqAmd3PH9J7/5j5N3+CUUwkjwuXe1iRONXSiJvzQg8boy4z6znee/BDsSc9qOgNQukAnqxgvnr1dVqjM5xmCM0WdzIytUgoqBONOVI2fCLhJKaeoiSJzI+O+1y9foVut/Ni/k9VVfr9Cc2h+JxBf0gutcnXfvxrfCaJhU9PLjguBXB8m4TUsY32EENXWFwV/3782RbzC9Pk81nO5czcyuWrGPo0x+UaBTnzfHpxzGTs0U0KHX+4e8DGpQ0eP/2U/JS4a1PT83R7DWLZJGOEvDx68ik3L79CzxZ3LZoM0Wk0SWVjrCyJvWvUXTKJFSYTm0pR+MiObbK8OEW1J2zRt773LaKZPOfVMn5PvGcmN08g1qfbMJhdFnpJqWskkyk0mbSoFzuM7Tjx7Ij+SHxWLJrFG48wZLm5XNomk9XodOvEJDBGp1ZkKnOVbPYZu9JvWN9YxUHjk0/E/i5Pr2FGAzx6uIMm79rU7DSdbZvpGZPTotDNxWKfWKREIiPux/bjJlPTl1hZK9BoteUe9AhGdMKJCf2uCNSGHZ87t5a4LBEMv3u0y+4H98hMx3i0L85lebaApqXJzCS4eVkEXG+//TZLN37qRcL3h+/dZ/lyjmSgQLUpfMhiv0bETBKJhDi+ELIXMSRK3R+z/lTBle/7LnBTUZQE8FvA5T/N3/3LlqIofwX4KwCxpMXyqsjeDUcuuUIUnAyzM+K1serRavSYmg5h6sLIKuqYZq2JOxIKqZBZZe9ZCcNs8dvf+QYAuWSYfCaH3Yvz0X3h/F+9NeTxYYXXbolMT71+RiiqYKqL1CUUZMAIMB5EaNRbhMMSRCAwoNPpcL8sBGV1IY87MckmlqjJjHAmeIOlwiYPn/02qbjY2tmZS4wGEaoNcfEXVy8RiapM+i7BmBDofrPPaDIArUssIVHULkZM3An9hrgYMSuJrk3Rbk44P5FGVj1gpnCZTivB909FO9hcfp29wz3KDaHI3XQMu9em0WnTaUpoazVALj1HzJLIa6Ex5Z7LxVGL2oW4UCErRH/g8tZXvkJ+ShjCeqnHeGITDYkKWLPe5mj/iH6/96IN4vysgmL42CMNyxJBxCcf7KMpZYJBsZdHZ0XcPR3HHaCpIrAYj07oDbvoWoxhRwIgDG1ODiuEI+KShYIub965wV/59/9TVmfFgOnv/84nDLqnBMMxHDkoXpjR2Nk+JZMRQZphjNC1Hs2WiW2L7NbUVITxKMHnD85YXRGGod1r0RtOaLeEAs4ko8QKKZqDDglZZag1mjRLNS5tznB/X5ypYU4gEKB8XJF/p3L5Wo6HW2Nur0jOJcXHC6d5cy7Ev7YuYch/7jV+45MeqiUCoubhiFZPZeJ10GQVRQm67BU/ZG5ugW5DKPh48IKUMY1riPNsDB0CXpVepU+lJc4mk/OonHbo9TrkUuIc9ESUk942730mqn4h1SMeC3J64RFOCGXbaXu028fkZmIMZHtm5ewEV/GxFDmcn0gwnckRTiicnInfcnRyQCpdYOzW8YXnw7Bjo4U8EjIo6/W6RK0U2UyGocxKt1s9rICOL9vY4nqCxfwUY69FryOUreYX2NyI4lKlWBKKMxJOc/3GFOvrIiP96OEOa+tBrHAQT0LGd2VSYGYqhi/5fzYuZ3i6/YjbN6SzHitQq18QS2ggncBwMIOql+gN6vgSyEDxwdAUnpffA8EA5XqLzuhHvFO+MgFPwVc0LFPcLc87YHVtisfPRGKj2x8wZMjQGBIMi/dopkY45qAoDtW6cFYL2QK65eO4IilzWn/K8NjkS1++QXZFyOLTz+ukwhoTW8F/jgTqeS/aqeB5kCWqWZ58eiuoMRg6HJ/sgS9hq2MxatULJjKrGgzO0OnvC2fWEzqpP2ximiZH+xUGPQFHa1gjfN/HCMhKXbNEJOySnUpweiHuWsAMk0yHiFgbpBJi30eTOpmMxcW5eE8+t4zrKrhGjdpA6OpBPcTQabKwvEjjQpxlNOrS6XYIBIQhLNYeEA6HUbwkJ2fCwbx942s82v4Wjn+CIwPDYT/C+sZlqlUBkuKi4Gl9zuo7mJKqYDKKMz/7Cp2e0KV9t0QguMz6lQhD6ZT1bRvFT7I4fYcv3BFm8Dd+9x8xu3SZhJxvblUdymc94jMKXV8kSSxjGj3V4vj8e+TSom1ltjBHtXSK2hX3+O5H8yhmkLnZVSa2qFKhXXBWP6BvJ4hEhX5LJiIoExVLVg9D8Rb+SCGaytJpiexvuzPEslIEQzFKFfHsT549YGVlg4MfiM++Nn+NSqnNabFFVCbxuq0S03Mx5hemSWWF41Tv7aF5QzwvI2XD5Z1vbaGSp90T3zcZxqhdXNDvx7koCodE0zQcJ8RYQpwHTJVwyMRxPQwJ9KMoCrquMplMMCUXlW7AaNwjYMoWQEcELQFTcOcB+J6BEQDU/vMCFJql0yxOGMozd5tN8vkNYvEAY0cmLbciuIqLGgigygAIHFRNwXN+9ExW0MD3IRQSOiEYMjg8OEfTLlFti8rRwZFPr6NiS746y7IAwfOlvEh2KHieRywSJpIVQbXTeIZpG7RsoRMaJWimKkzUc54vIzhLPOMxkZyLuXCGzkTh2dEhgYoQtBu3brO1s8vE7dAcCduTKOQJhzzys2Kffue7v8abt3+Rv/jz/xVv/0DAoA87FpfWVrl7/9svbIMZ9Bi7WVZnxF24e/4bIjAZXKUfFOeXywe46O1y8JsdblwWHF2qUuc3v/ERy8si6fxXf/oGnz8yyKbTfOdAJFf39ntELJOYYUJJ6KDl1VVaowapmAg0zLRN9aJEKnONfEYkou5/fJdYKs2rty7x8FORECiXjxk0XGISGTCbjVFrddg/OSYsu37mcjE6bY+JPXwxZuGPfeKRFPawJc9YY+/gAZ8/GjIzJ84lEjUZDJtous/TbREUOZ7C+tpNXIna7NFnOOyRzS4RkB0vMXOa83OdVr2CPRZ6anb1Ohubq5wcCR/h0somITPK9LTHk+0jAKYWYxjqmNMLk2s3REJp2D6lXHyGLvfFccPE9DGVYoOFBeGnkCxzXjolm1qiLJHrVtajVOstHOlPDQMOmVyaeCZA41Dc7YOTbW688lX0/IBKUTyDYWXp223yCZEoimfKNKonBA2VhbxIjtn2HtValLllcS5zU/P89t//Z4QTC8Rka6RjJwgGLQz9Gh1D6O8AEVzfpiBHWxqNFrPJDP5oTC4tazETCGg5onETpyhtwWyORCZFryrs2PXrS9gjhdysQtMW9rF6VhK0DP4GBQkWd94/oxMqYLSEjUmGd4jNXSI5ozNWRCyxvnYby61hhBcYdUVVMxkvMO7XmYzEGU9PpQnrY5zeEEve44ATI5NcoD90sJ9zVvJcf/zR608EtPjDy/f9FvAu8AaQUJ43QMMs8FwznANzAPL/40D9j/isX/V9/1Xf9199js72cr1cL9fL9XK9XC/Xy/VyvVwv18v1/9f1J1auFEXJAhPf91uKogSBH0eAVLwL/Dzw68C/C3xD/snvyH9/KP//nX/ZvBWAaYa4fUsQj/3jX/8HbF5e5drlWS6KYuCs3WuQSU/TrBepuGK4Mj+VoVYZszIvovler0+71Wd+fpF8VvyscDaENtFo1tokHRG9n7X2WEnFaMgSPRMf3w6ixg6plp4z3sfI5oekknFcCT87mah49FBNObyqRWk0ywQ9k0JCzHil4jq90S7ZqRxxOSvV6BZx9B6vvSGGYHttleL5Lr3ROfNJSbyZzuGM+mha6gVccjaboz+u0KrLrF/SRffnUYJjsgUx66N4Ko4/4f37v8f5mcii2PYfoGpd4rIScfjwgmQsSyQQo9uVz65PaHR8ej2R/Rk7YzKZJUrlFpYleZlcDdPS+O737yIpO5jYYxYWE7RkW+Da+hSTiYunB2h2JbCBH2XQajOZVPAkL0s4HEEJBLioiayRaYSp16uYAQ9DZv3dyYhwWGXcM+i6IoORm4kQDKZfZIMqjRbT2S9gmgG29kXb41d+fIlyHWq1BkPZXmOYFr3emIApMnyOckIw0cNXLap1WUZ3Iwx6GiPbY+SI39NstwhoNlc3RUb69KSEq3TRQiMq9eezNzF+7K032Dl8jC4rAYavEUsFiIVErqLVdXBLLpYRYigJdesVl2zQZ47HLFvi/N7+wRnl5iKvfEkMuO7t/iMmk3PC4RgXR7Jypbug6hw+q7K4sghAfzggGfOpSshadWhwffMN9g4OaPeFvCxG85izPsWiz3gkXmtX20StGDuPRcY0GcxgRYf0tTqVU9Eekkpk+Qu//BrvffghxSPZ8jedRdNSjCXsuo9LY9ji6KJDMir2OBCC/YtjdGdCT7bbBFNpLipFEr7IpmljCyek8axYImrJ0no4hBlQUWSCZeQMcFWHcFSnUhXnPj13Cdse8PhRlcFQyMZwVGN+bo2tR6I1o9upsrFxi7s/fEwoJs5ltnADuMv6So65guhNL9YOmJ9PoxhCqGudx0yYMFt4k9yUyFI/2blLRJkhlRpQqQqZ0sM+47GLZlry91ps756wtJhAlzmm8aiHM9KwEiH6Xdm24vU4OzlAD4psVyQxoj9x6TtdFNlqNuwPiaQmTFQVQxJaphMGdt3C1GSGtpDgo0/u8+Dh5xQKQm8cPBqhujaK8qPqmeOBpasv/i3ALBR0Q+XkQJLj1jxisRjF4j5xyb1TK3eIRVN0bNE+hTJEVSI0m00iMiM8nvQxA1Fu3/oixbLI+tWbHaLxKKOxyChaaoh6s0UydZm+5CDTLZ+hd048mGPgiGdIFyJEoxCQLWuHR/tMF9bIJvPs7Iqh96XlDHEtz9HeKTevfRmARvcRo4nL1LyQ1/ufPqLZjJNMphk74hn++Tf/CauXpuj0o6QSosoXD8XRNIW4nJ3cPX5GsVLkzqtvoSvi3oYdgx9++h2SSVFR9CdjOjWV2esxyqfi3HvOAaYRYmZm5UX7y+rCTbSogsQ+4PLGEhZx9k7OSGaEDrS7Ds1mkcnQwO79aNZHCY3ILogsNfYcrfIJpZNzcnnxWq1/n15zTDoeJxwXlfgnW1v8h7/8i/yjX/8nAEwvuOSS0zTaPSTvL3sHJfLTQfrdJk+fiefUA1EG3hm37gj5efzogJAVIDPlcHEq3qN5ER4/2eba+p+j0xa2r1Z3cfVDchJ0KqZt8voNj9SswYNtYXduXH2dbCRPu9vi7j0BSDI1M8PHHx5jBMTvPT6sUi8NmAwdmg1xP8bjMZZl4Xke3aZ4+HAkiKZb8JwsW1NxlAmKovN8EiFgiGpWMGjw8UeiivrWT/4EvlNjOitkupDd5PT0mPNmi6lZIWcfvnNIMOgxGkx4XrhSNQ/F10Fy0+m6ga7rdNpDFN2Rr6kcHZTZfxLgjS98FYBqrcHW9qdMRuI9lsUfApZ5Pr/lofhCZ/ZGorJaPG+TzIWYSEL79UsJzk5OSU/JVmWgerFFKBZiogt77Gs9jms7LGzMkJXyebDzKUosiBUK4LniPmwfPeHq6k2yKWHDqq1j/s7//pe5svlnMSPifuxt7/PF13+ar731Ezx5ItryxoMki7NzPDgXpOJRo0DMDRFPJgEhB81nJdb9L6IuaHQHomMoEUxxZXqON24K2fzGP9hicPlVrrU69A5F1S2RarKUfhV/3HnBF9kb7fH0WZRuWzzT9FqS3miEOgqRmRf7cOf1W7z/7bdpz2UJF8T9W+d1jkqPcCSoVqekoPg+n33+Lrac+/rq6z/NWfOE/d3HRC3R5rh53ULXhi9kajzpE4yYJDNhmnVh116/c4NyucbTZ3eJRIWeiEZy3L33A0xTVPQKhWV6/SrLCYuLC+E3eM4uuVyOePTLzMwK2auet2lXzrk4F/ZjYSVEvW9jD1wW5Kz2dCrBk60Drl6ZojMQFeDRaEg4GGMwFG2XlVqVcjPD+rUNhq74voHdxXeDWGaMqU2h3w4OTlAIgKR5iWXyhGNJAmoQPSSqZ8mQweeP3yWWiNKoin2fn5kmk83Tao/lZxssLCTZ3i6jeELHR9Mpbt4O8tEHwve+2F7ip372F/nmu+8QjAj7nxlrbO1+jk6COzeFri41PuOkNCafElWjubU8580SE18jIWfKPrl/D7ubJhTIk45KKHSvwtFWh7EkA1a1OdodCCfj4AhdUshH6XehXqnSqghf/rxXZr9yDK44u9nFAM1ekVvqHTJydvLg3g+wzCGt4Rb2c1on1SfSqr0Aq5orLDKcVElnQ/QH4vvKtS5+v8XuWYuZK+Ju9Yv/t5rRv7D+NG2BU8Dfl3NXKvBPfN//PUVRtoBfVxTlbwEPgF+T7/814B8qirIHNIC/8Cd9wWQ85mhfCPjm2i1wTB4/ecDGFdHi9P13y0yiMDu9QHpKGIZyvUY8GcDTxMVPpWbQlBDxWBhTOk5nFxUuKs+wkgFSBeEgtHopcqkkB6eSnDdhMB4H0CYWIVMIuG5APjNHrzWgXBPl9mgygef2mYyFE9isj9F0j5XFBbYfSeb1VoNAaMxEVTk7EE6gpxgMWj6eHHoPGgFOj87Izef48H1xgeZWNHr9Mvn0JlpAfH4869M49FAs8e+ePWLQfYCnWxSmRHm2325SrZ6SS6WxokK5apbDsJ7DmwgnrdE8p9ms0rEnhKJCaXW7A4JRg8N9oYyCkQnjYQcrqKBa4nPKxR5R36XW7pBJiBKu6054sn1ISJIYP360SzoTxfPiZAvi7+q1LmbIQNVNLEksun+wR9iMYViSHHgWDNOjXVdYWJJIgCELe+iDOkQfC+NRrg1YWLOpP2cvT8R4cvI+/+P/MuTR5x8BsLgaIp9fZXe7zKYU+gf39ti4Mk+lIeYWRmOXRCyGo1QxTPGcF+VzUukEyeg8fdnPPex1mZqLYkiH1qVBfklnZAcwJBngJDrkw0/fo1QqgSH2IROKUjrvY+ZFi1qpdk4sXCIdC3BuC+X62nqbyUmD0rnNH3jC6Hyy41P2bN59V8z8Oa6C5wq0pIVVsXflUgtDm6U/KlE8FXckGC6gGxWuXxUOWGgSoXTWJh2ZoeKLM+03ZhmNDun3QlQrR0KG/AYRr0BYsqObAZeHnzUxgxFcOQPhYXL4bIAymKYwLV7b2WmSyqg0WkKJ+WqAkKGSiepMdHGP/MCARFQnHc9wWhX3wXBVzOkEdlsOobsO3qhNMneJdl22UGYChMIByjKQCsWjVDs9kgEdIywd77MtlhfmmV9Y5+hQOBv9fpHRpMrEF20RRkBn0IN/8+e/zsmJ+P6dA+EwphI3qNZkq5AXBatFIi7m457uvYNhDNB1k+++83sARGI50E9xnAnItkDft9GUAM+nTPWARbPZJ5OyGT1HklR9AoqK4rkUK+IZXn3lKuurAd77XDichfwMWwcHOC5EI0I2IkGNbl9lrIxxVHHGZ0Wd+XSScFR84Y1br9Gpn6EqYb70JdGS8/Hbv4ahhBkrLvgSCEMOYj9fnuegaRqqqqJp4h51Wj3ajSHeIIwREzI8XYhyfHyMPRYJmHTOwrHDNGsdcITV6bUdcCZ06mdcuiyC1cH+mGAgSVs6A73+iETaYevpY26/Ku6jQpCp2SlG9jlPn4ozmV9KYFhxVEW0mil6i659wnjgMDMt23TMTYxAn8K0RVeiA3a6YRbWFjg8EN+Xm44R0CyqtV06PfFbbt3exPMdPD/LdEG2kXtpjk53mZoR7S8/+1Nf5h//xq/xyUfv89aXxPB/d1hE0+rM5UXL8dbTIelYiEFDodUVrXQBdZpur01IPydmClmst87RjHNicYG4GdTXyCZNGq0uw66Qu3QqTzBooXpxul2ZCBs3UDWD3pGwV+kZDcU00IizfybuQ/Xc5q03fpK+V+ToWOipheVrbO9/ihIWDu7ugct5sEQ0l8buiWe6qHyT1NzXmVu4Tkui51Wb5yR0ne3HwoFW3HmsmEc2m+Peh+I+zk+HUfwhimajm8L2NNqHhKKXcWSL1dnRPkE1TvH4jHhC3NEH29/l1toXOTmqcGldJBZddcDN1xZIyWHy10cDfG/A7NQm33n7d8V+GmEO9yvgRujKBF2t0qfe6DPoie+zQjFQPFwn8EKGPX+Mqhqoivki+deogTeeZXZmUTznaZVoLMH+46e4ktOqWO6RSkeYjCOABNpRELNhkkhZ0cFVFEbjDgaydVcVc56ffvKUL31NzOg9ePApkZgi2pMABRFcidkr6al5/guwjlhSOI9aYIdHp/dJhYRsZqJzNNgjMhD7BNDpHjBS5tE18YzHB0XWl9aod4fcf/A9IQfpJcZDhXJbJT8r2s8Pzx9x79kPmJYtiLP5OW6/dp2AdsrxmXCWTcvA8Wvcu3+IPRL6LRILY6XTLCoSQMsJMJ0pMKwdYSWEz9UKq4TsIJ4+JisJpcsHNUIpl199+zcAuHb1a6wsK3z44TNyU2JG6Ne/v8NkucXyhsVYDgoPqj3GpTN0R7aVdpsEjASfPrrLj+d/VjynOsP84hKV1jlIbqGL1jamFkKTIwb6RMEax9hYnObpkbCrJ8cHLC1tkHwth+MIB3oql2Vr6ynjiQjuHHeI646ot5qsFURLo+Zp9Bse6yubfHRXtDQur8So1g8ZS7/vzS/M0+k0ODoo/iFeM59uxyaR+BHRr92vMRg3KBSe87JBtXHM3NQKpmyhrjXKvPXlr3K89ZgD6S+GZy3GwwumkpKLMpRFWfbRtS7hsOS+S8wSjpxSqx2TmxXnFY5kuSgeo/vi+6en1/DGXS4uSsSicoa9c0i/3SKdmCISFHamVW1iBqL4prBF05kN0qksy2s9Hj4WM3p55qjWqwx8kaTttU44PYa5qRyJ2PORgwjEmhxs9Tk6FLp5Zf0K5fbHHJ8JgIl8aplAIEHtvEghJfS+5oOrFNk9qHHnDcEhe1J8hILK3PQiAAd7J6xcWuL44PQFyM5oYtOsO1y9EeH0XOiJL83c4uDRUzyZJI1bi4T8AZNBm2pHguMlo1QdnV67RP1I/OalpRmOD0/QMkJ3tos1rm0usb9VJZ8X9uLPfv0LHG19yObmJucjEfQR+H9JIuz7/kPg1h/x+gFw54943QZ+4U/63D+8DCOApYkIeOnGNDs7e1y5/GfY2hYO9HQhhxVU0FQLdyyM1biRJ2p62BJhaGT4lGpNxvRZmhaXpTAb5/1PP2HRnGFtSghhNuxg9yaEDbFBITONFWrheirNhvis1eUcB/sP0YwEQ0co19r5PqFAhnhEKMBhb8jS0gwnu+eMbGGYVleu0Gq36fVtDFMou2a1RtTI4o6Fkhw4E3L5JIZRRZXD8p6WIaynOa3uY5hCCB6eNFmdyTEoy/mjoMlQq2JoHgNbKJbzizKpTJxINEn5vmskqgAAIABJREFUE3E5Y/kI6riC3RLOyKWZPKWLAamsT68j/s61gvR7baIR4cBPPJdgRGNkO7gtYakyyQTt7gUBI0NHEvY121VM0wBNKJVGuYtiJAUi2qrs/4+oDLoOvUGDoC8rZaZJmw66nE2LdbLkMotYgQ59SXpphUKYhko0HKVcE6/h6Ow8KePJQfxkMsnszDRju8ncgnj2QmaZTtPmF3/h5/nBD94H4NrmCmbAIGKILPm1xR/jrFxCM5OotnDGj88qBIwQ5cYFU3PiTBNTWS7qDSYSBjUaM2mV2limTn8gqj39zpjuoEtsyiKqyyxKroBm2bT7kuRTA9Va5Kh+QUAXz55ydY7KJTqjKE3pwD47bDBK+NgdSVi4ucBgZKH3m7TbQvlNFJd03KR0oJCIC/mMMqRf7LMjnYO1xQ0WVhdoPd5iWSrSw/MKATWB4lVRVZnO7oWoTwaYMmjRgxqNnR65hQitpgTsGJ1xclRFsfpEZFVjMnFxBj7ORA53j11iuSQjD0YVibCXKHBWO8RMR/BlCj+STDByO5gyc21Pxpj6DD4uhmRf7zhdOi0XNKHYdDPB/kkZs9554QhXKhWCAYPZwgoROQPh+Qaq5hKXaIW24dMbdDk+2MaWsO5XNsT/eY7HypqYc3nn3TqLG9PsH0jI+mGCaDjD3c/fJ5MXOqhSalJYytAbHaCrefl9QXxVEUhigO+N8TWDwXiCI6vimqHSGjfJBHOM5LxYetrg7GzAeCCh3/M6rg/euIPXFfuiWRq9to0RgoEt9qUwnWM6v8DuqXCIzptPeOP2qzTrIzLTIvgf6TBxFAKG8SJj7vk+rvsjWGtV9fE8SSosjeP09AZ3P3nIG6/9OX7mz/0cAH/7f/0v8AM+riOcrXhkgTbntN02vkTvLMwW6I+GWH6KopzjyU9naNdrZMLirM7Kp5x1NZLJJEFN7F2tfJd0IU2jViMiySs79QmGPiGTFs/9hdd+ir3Du+iKz3lJ3NutJ79NNp0hW5hC18TdSgZNKhcPSEZE4m3s+pSqfW7dfJNnzwSSZDJ6iWL9c5rtUzkHA/Vii1jK4vTsuWE8460f+0m+8ft/h48eiIzs6swiigXdwXNi7Eu4bplW94xbNwRs9vaTY25dfQvT0nkss/6xfATXX6JxKmzT9EaI/eMnRMIK48HzmT2DUnmPuZmbmHHhBD7Zv0vQuMa6DFQfP/wGl1NzLK29iXJH2I+nD1uclD7iyuYXsVQx62M6Y7rdU8bSgZ9KhCmdt3C8COWSyG7PLWxwenhBcc/FRzoRq8uclk7oN+UcsXUMrsnnj+D6mggog06XScCjPxkyuBCVlkxqCrsHli8cooE/ZGKOaLXGeEg9NWkx1h18xULXhM385u+/w/riJmWZFAokbbxumELB4o23JOpX4A4/9pNJth7dZVNCaVvqAg8/e8RJWQSP3//tM6odF1fto7tiP3VVw/M8NMtm2JPV5ECCN9+codEQyG623eD0wuXGnRXe+464R8OBQyyZkpDpwq4FDDFn5Y2EnActn0F3IJA3FVm1HdmEojoffbzF2+9ItNdQDyu0hOOJGROUoACOcV2eu1aqrjDuwsxsmmpR2JCRGyMRipCUGf16o83q9BpzhWvAN8V++sDomPK58C2mFi9T6o+YHH6M1RLn2Q0tMbuaIWLXOD4VXT4Ls+uE9HkKs+IZTy8e0e2dYQZz3L7x0wB02g0Gdo319TmePhNVjYvTZ/jumOmCCIj2Dx+jqx6ek2YikwHJeIgf3rvLwvIUkaDYq7UrG5RLZ0QkjPzyWo7qYZ1wBBqS9PprX92gcVJB0eaJSMTJkZ/njZ+4Tq0ski0X3RrhzATdivCtbwoqnTdf/zJmIsvTp0+5fEn8nnG7RyG3QuKK0DenT05JJ3SqjX0iYSEb0azF/s7HZGObnDTFvpRrfV65vsZ774p5Y8dTmZleZG/ncw5OxTxXn6sw6ZHRZvnC7a/L/atyefYypyUhU51OB19RUfQJAemX6J6K4jVwRxbxpPSVKiVSkSSWBGCzlSTXb81iV/tMbHFvp9cWeHawSyo9zUZYktr7I7Z294nFhEyrTg2tbeJYLuV94bskp4M0ak1mZue595Go8kejChvxHFsn4q4lrua5qBxTLp4x6ckZoZDJyAsI3SxJdbtNk2g4jvN8fsjt8+DxPtFImEuXhM+su12qjRDJiLAfLh49p8X+xQkVSeWhGyOWFmLEYyF82QlgWGnSsRz5lDirnYMPsAJBBm6bB/fFfVybzYMaR7FU7j98D4DF2SlUYkyti/cUO0/o9Vw63TpLC5IG4bxMPGsymWRJWGI/64Mx+RvrWIYMQpMROs0OsbjG+ZmQxbOLE3LZWfRgBHMkZ7XOTxmPbZK9oJRNA4jhBkucNkWgWGyfoOopSpU6x/uiW+bGpkjQ/nHr/9HM1cv1cr1cL9fL9XK9XC/Xy/VyvVwv18v1R68/FVrg/9fLdaBUFhmUcueQoJnl4aNHdPoSGSyiEI7k6TUHZPKyH97vkk5MU2+KzI4z6ZBMKqgafPhQ8JaEojrXbm4wVVjk4lhkX8b9GoPBOfm0yJztH+1w7eobnF1UWVgQ2dBkMkMwGMTQszzZFpkVU4mzsb5E+UJkPqPpOIdHRWZmZlA1kTGp14fEslN8vPXPuLQm+k57/V0uWvUXmflCborFmTROqEbelm0Aoyy1zjZe3GEss8tLa1nKuxNsWXkeOXU0Xef8rErXOgLg5qXX+fje7/H1n51w+ZJoSfvn/3yf0VBnal5k7S9Kdfr9LhE9R1X2Fg/GXTLZGP2RyLTm8tM0my3s4YhQWETztdoJmmphhiwc2camYTLqKyCx/33XodOu4KJwfCAyIaZhMbT7WKEwSNhcU9dglCabF+LWH9aIJlIEwyZ9OQfW7oxpt/oCUSryPOuuoKshegPx/cnoLK3GmGp1n4UZiUDTrtBpwuOtu7z6hnhtMoyxvb3DzRuvi72ceouJ/msUzzw0VZzVpfVV6s0OmupQLYpMhDtx8H2FZk1yCzkhCoU0ISPNYUfIgat4aKEQ1zfmSURFRmZvb49sOMrlqyKr8t6Dd0kXQgS1LMWiqOLcPRwz0tJMtCGjrjj3mhmgXCsxlxAtD722SqNXYSmdIxUWFait/R063RqplAqebNnsXZCKhTivlKSM+cTjUySyOUp7QjbUgY4RM4gGQ/QCIgvetXrYkzF2WyL1dcpMLSTwDOcFzLsVsunYfWZTK4xckSHFH9EdtYjERXuI67p0+m1838SxhSw0Km1CVohW3aYnijaY4QETZ8BkLCFLFXD8NtVqg7Al9kAzHRKJGKr3nMeoxnQuQaNjs/1Y/BbT0ji+qFOtvYPdF7Lw5hs3KJbPXrRY2LaNw4RyzXzR3rPoi7bJk4si+3uypWrBpPLMRY0KPRKOjKmUbGKxAL5sOXC9Cd44QUCdQtUk/5bnAT9qrXMVFd/1GU9sJhMhw95IE5DjwwHdochSN3sG2Uycy6uizaPWqbI0Z5BJXKNXF+d5fn5Od1gkGZhiJiNkeNwfUnG75OZF69D9T6tcvTRFuVEl8kxUJy7NTnNxVkFVn7eniAz8c64rEISsz/89GUgo9pMx/9lf/e+5vLnJX/+b/wkAA2eAEQgQiQjZ6A4rBEyTTDZMCFHp6LbrRLNBUlaCekNkUVOJGM2mRlO2v4UjOYJxhfF4zMOnojq4kL1Ev9skbM0RCIu77fkjIsEc9ZaY8Rr2nnBa3ObW9RvELCH7wdsu3/7e73Bp7doLgl7FgPHkEkNf6OGxVyOVukS7DAtpMbu4u/uIWCKK6qcpD4V9CMXn8PwBi3NCFt/5g29xZeMr/MJP/3t8+53fAmBp/t+g1vmUTz8RHRNra+t4fg9Ti1Mtit/XaB+wfxwgFs6RlAhb+4fPmJ9fQpEVItcZ4401av0qtkQZs2slTDNKr9dHl3QCG2uv0G/mUQNCL//Y5SWGxQuK3//feO2OqGZlFvLsOD7zygOuLomzefeeTWmskM4IGVZ6C6Sm9un2m1xfE601V2/f5O3v/l3ajTNu3hTIuCO3Q8hKszAvfsvx8TEfv99jedlhGJEt4maYcNSlUKjxZFd2ciReoR8LsSfbbHXDZ9Q9IR5eJugJ+Hv8Q5qHRfrNJsaMqBKvX50mFXLpSSTL5fwUj59qON0ariF0XuukTSVg47pwsi32anZWJWpFuP26yCR///dGuI6Bqilokotq4o4AA1MPUeuIM1b9EI7XpFYTsjFbWEBRKiwWlvmfPxSyqKsG4/EYx3GQBWeUkY9pGgQCEq1QCzBxKpiB2I/44ogQMhUq5SatmrAht97c4OP3Sy/un+N4eB7o+o/Ixn08UDwi0RBnp2KPBz2V3PQ8p4eiMhjQcox7Ht365/Az4u8sfY6ructUl0X2fuv+PiFD5Wd++a+xrIrz+93f+k3srsXx+UfYkuz8L/3bf4NYLMH/8D/9NfH9egXTShMI6OzuCwS8sJVEUw2elQ/oD8WeT83HSCR7PN4VLepThWV8L4BhDnkiZ/be+tJPMj+7Qbd3yEVV6Njl6QjD4Yil2UUAmsdllHGSL/6ZL/LpvT8AYKd4gKWZHO4fEVHE/dNTFb718QOSkqw3riWwAhaFxTRxyRHY6Ti8/oU3mXj9F35RJB7lrLyPJufT+t0Wlp8ikQlTr4p9Oa+c0etUsE0VzRJ6MJaa0Oxa3LolmrAe3/2M88NtXDPDrJxhHbsleq7C+9v7hMTVYmlxjcGkzcfHokIUbpzieFCt7TGTETo9Yhr0O0Pc6gELK6KacX3uNpVaHacv5Ee1XFKRaXaO76LKbpbymcLa2iWOD0vkJA+bHhqg6EE0SdNzdmyyvj7DRfWcy9fEHp8fnbFw+TU67TPCaaGDvngnyMHOPV6Vowk7Z/cpVjU2bi8SlYjXuhmjPaoyHI4YSIqhft+ia3dJJUXVeGf7MdOzMTptWN0UnQcf/XAX0/BQFDmPHxjz7HibYumCJdkZYDPkrG4zGYX56hvC77p373u4TpqlDeFbxLNpqiWXr1x9lWdPRdtlqzHD0lqAbr/Plcuiatos9ti4DLtPRNXR7YeJZ6JclIqgCXk1tDyxeJjeoM205IstXXSol0fML0jMhXCYXsfn0ePPcT0hr+lEGsUbMh5NSGWFbFycBDB0jVBUyF0wFODDjz9gjE0qLs5YV4NoAY9et0khLTAWFmdEh9wft/7VCK48m4TEy3e9EHgBhrbLQDI2q7pFLp9h3LHZeSqEfHp6mlgiiqzMcnKxz3jkk04VCATkfMUgQ2/cxQ6PKJXFQc0txtF70zQlL0somqTT7xGOjcjlxUU/PyyRTIZxRkOSIXFwI9/jyaNt6nLAPVsIULkwSaRsOj3xWqNxRrbfZCH/Cv2GEIJYNEwwYdAeia22rQmfHz1hZ3ebzWVhhGKxBkuXwuzs25weCkNbKbao1xUWJZS3pqk0qmOcUR7XFJfluLqDakb4/rsjwgFBwnywZbF2NcVFU5Sxu2oNxXSZms/Q84QwjcoqnU6PoJzTanfqxBNhwpEAXTnE7I7ACPrUK3XCclYLxSYeM1+0HHm+gxFIoA41XOmIx7Mh1K6LqqoEJC9CNh2mUmy/GEbWDY2T030sM/4C9MLxVEKhMEO7w7AvnitoGvT6NQwJJvns6Q6qYtDtt/Flu2Y8bqJoA8q1Mtev/1sAfO8HW1y6NsWgL77vn/7u3yIYsamVwjiKcM4vL99kf69COGYQjwmHVjFb9JsOQUnAPGLC7l6LqXmbaFw4uZph405UmiUHQ9Z9dcNieWGTFQmV+u6977D15BNyRoaleTFQe3b2KZoWoeOGOTgR0YeVsEkqK7Q6Ys+t1j6qCdOzr3B0IJzXQiGHpUYoXfRewB47xoB4akKrIrkqJjUy6RlqjRYHVdHikIyv8PTRZ0xNpag3ZLAasnDcEV3JAxGxdALxOPX2GaoigpvdvTLXrqyQjFtMZKdA326ihwI0S+KZbHtCNpPHGzu02kLOJ66GqwWxu21M2b9t9+K4ikZPcm+Ypvh+jAGG5J2zAhb7O6esLsv7T4vBSJD9xhPP54dUyh2bsOowk5Hktbvn9Ec2i2uy59uHWqPK3PQ16mURfJiGNGLNJoUZ8fnvfPAJYTPA6ELIXTqVA7+H44SIRIWgXd18hU8/fspgAIr2HOdZwCrL0SY0RcFzfRxnwmgk2yVdj2AwSvmixOq6MHqba5f54Q+/S0ySRGamEihuB69voqii/U3RVJaWFohEIgw6Yj8DjsbRs8csLIggdHU5z/7hIy6tzpHIi1bM5FSA09MAuvIj0lL8fxGO3fO8FzNX9kjM8fw7v/Qfc+eNL/AXf+Xn8HUhi+FIjF5//CJQHGkt5vNLpCYGSUO8NlVY4rDapNbfwZPQ3aWSx9RsllJJ9Od3OyrxoEo4GkIiadOfFLkyfZnxwOf8TDjCwagJ2phmR8hiY/KYZGyeTnNMuyt0V2is8srN16i0izw7FsHq4uwGF7Ui4aDQ1ZVOk+uXJ3QaxyxNiXvcsj3awxrlWpfliJCP/mAXL9Lj/n3xnnA0hBEecrhlcfuKCEg6w2fYPYPbt8WsWLlcopCdZ29vH03O8SbjBfLJa5yePyWekgA2vQOCDZ35WfF37U4VXytTr1zQ74r3bF5ZQVVh62EZwxJnHE9pbD28x/4z8VveuH6JilHl6p1l3n5XtDjbpsooYfGs60BVPMPhZJVQ5gqNHaEPFtZszHCB6VyEx4+FnQtY89y49gXe+8EnlCRBsNYtkYxeYmZKyObB9gkTO8Tm+gbf/Y6YCXz19gZzq1fwmvssLy0CcLJzRFBfJpMSzlbHu0/9UAG3xGu3hEM0Gc3RbXto4Sp7B2I+LRmNsP/kc1Y2RNCrjUP42hHnVZd0Ttg+U3lK3XYxAgbNmpgXsawI7eYxniTdTWdDXJTH6KryokVc13XGE4fxBKyQuLcffnCXSGqDzJSko0gluffZAy6v3eb0SOyBGXYY2TqKoqNJO+b7LrY9ekEGPOiP8T2V0WRIwJSQ6q6B604IRQJsfSaScT/381+mshjAHohkbjQaZTQavaA/EJ8tSO+73TYzOQFt3Q4W2Xr4gERSYvcrOq4/pNMRegug5ytsl+5Tk7ObhdkcTrfH//l3f5X/+m/+QwBWNp7y/Sd3uXn1ZyleCF3i2mG+8cHfplqXbex2i43LM0SDBcpF0Xbluh0MNUsqtcjVaTHZUalf8NmjeywvCofRMi2qtWPGoy6xiJCXk+IuE/rk81MMy2IP7t+9y+b6TSTXOQcP9zATcX7nnT1OtsWszUxhk659jpY0aLsiSJoazfHTt9N8diZaPyeKQaXcZufZU0YDEVTfeT3Eb/3ODzHDDq+8IhIEJydbvPfhhy9a69cXL/Pt73ybf/3P/8xzujHsQY+bb77O1tNjAo4chbA19hrPmEoL+3H1zg2KlSPq1Q4zC0InPLl7hjvyWZqdwxkLHbv98D6xcIw/syQCBjM+YP+gRyG5wki29+k4WKEwo/GYSlX8nmrZoD6oM5bzzatXTd779LvEwwnCsl06k9Bpdoa0B33qB6LFOJ1YYaqwgC+TR4f7+zj+ColYlEdPxQy5b8W5FI/Trz4ltCvkI3H1KqurX2UuJPZ31tKoRl1KY49KX8j5k8fvkQxGyMczLN8USZFHOw9RrSC9vtAtszPzPPn8HsGYxecPZFLdUNBU6A+E/RjYKosL62TTQS7OJJlz2mQyVml3epxUhOydFSfceX2Gp49FIDWVmWV9McOzZ0+JWqJ1j7BDqVLh6OiUL39RwNEf977PZw87eJ5oS/zS16Y5Pmzx+p2vgCH2PJVKMRzV2Ntu4itCPkdOGcwS3/wDkUT4qa//DIHgmJGjksmK9+QyWbqdMWelZygtESyapksmF6Z4JhKwhak0i8uXKNaO6HTl/VvZpFisksgkUCWYyr27ImHzx61/JYIrx3GoNoVQOmONeCyPHqxSkKhf1Uqfo9PPSEXT2G2ZSVZjPH12hu0I5yASyVMu1xgM2xTmhKN4vFtnbWWZQa9PXKK/jPpTGCEXPSm+LxuZ43h3zNgfM+yJz04kczTqLQoFE88XQhc000QiQXp9YTiikQL6AvT6E2w5lLl+eYWT2pDesIMtkYHCZgJV1xi2hDJqDyEWSGBoKnuPhdMZjXvYcxrGRCcbFEIXCifJZafYuxAK0Qw6zC9usPP5GYmkjNTPL7A7OtOzU+QSsur2lR43b7/GeVfMYP3eu/+QXC6HZSa59bpQ5sf7VVoth6FEObJHI7ywCHKHQ+n4WxZmMIjjjhiPnju5HlYkwER63cFQjNFIxfP66AGRzdOMCIm4haVrqJokQF2Y5YnxDEeiI01ch3bLRk9FsSTIhT0YMJjY5AspOi2h2Bp1m2w2R1ci2dj2hInfIB7PYU8kimNtQiQUw9C1F0Pu4VQVT2mj68LwB604hqpz7UaBel1kJ1TFZWMjSbPmYuiSCFOZYuQ2icfFc+/uDbh0LUenoTNyJflgxSCXNRjaY/b3xWB4Np/j8OiEsCmrWyMfTYlhWkHaPaFo4qEYAT3B2BswvyhnHtwuMSuLIw14q7lPMhvh2XaFZFIY+rP6Pp1JllarzfyqCBAcJUi5XKc9EgbVM2yKH75HPKGRDkk+rtopRjTBk6NTFEcYlH6piWlqrM4LpVUtXbD38IS167N0JbLbpY3rTPwhH378AbduC2Lq2ZlF9k+PsRLimQKjFKHAHKNRi9FAOATRYJRseIb+oExdDtAnE2OGtodhSkLbQAmVBJaaIZOSPeYTm7mVAgE5oNyrTWj3z1DVEOGIzAj7AwJmgWGnTa8niU2PG8SzaXZ2hSOuqB6aCqXzA0ZD8Z6SRATtOvvEdeHEjzQFx3HYXBXG+ofv/YB8PsncTJhaSZyDoY0IRYN0exU0Xez5j2aXJEkvCqqqSIJTOVvojfEdlXaryey0CKpn5xZYXr6MbkjglnCbfiuL41bJpUXAHolOaLd72HYPJOpX0FJY5BITqTcqjWesz9/k5OSIQ0mWOVRUxo5HUBKVgqhcOZ5HQAZXvu+/IBH25TU+2m9w7/HfYqKdEQ4JnTAejxl0h+gyseC2NM7aY3IzSWzpEDX7GsurVwgFwtSaQr8dndQYj8eY4ucx6GtoqgK+R6Mm5ODaZoSdnR0yqSjNzpHYl5UN0DqMpZO0uhrHHtYxtCQTtyOfPUC7YpHITaEGhT7rdNokM1n8kfh9+cwUeFFu3Fjn8eMPxF7VTCJphfWNKBOZWIjo81Sqj9BMmVSzZqg2zlmYW6ZdlzOs5c9YX/oKxZK4x1OFAqo6Ijfj0aiJzdOVHOdnO9TrJ3TaIrmhYTIcdF+Q3l8casSSY84vjvniHUH42m60GLkVrlzd5HM5q1XZL+IpGpYkRP7O9g79foVW5XX+8q/8dQDuf/ANTisOrfo2PUmIuvLKGslQjjNHBGXTMyOKtTGfPX5CQQJ4PHn6XZZXC8TiBWJJWbFsHOC6Lns7IsBtXuj8l3/jV/ju24fceUWApNSbDQb9AZmOi62LquL2RZvZyJBwStyrjl1mafaLjCdVAhLc6N7D9wmGlmhXVS4viO+rDZtMr68Tjwmd2x5MWEtvMNKqfP8H3wbgJ29/FQ0XRdfILomzevrwENNUCCdEADa/OOaDjx4ynZ7CkdXX8WiEFnjOMyNkoVhxiGYyPPhcJBqXJ9f5yo99id/8p9+k3RP3I5aOE9BhPHYwDCHrE8dGJ4gVEu9ZWorz0ft10ukgg6GQfV0TVSjLhE8/Fnq/XLrO06376HKm9HmVS9xFiZ6r6fi+QiQSwQgL53T73j1cVyGXFpV1RVH4+NNvc+XaOs9XJupSrZYJpUUVVzdDqCgEJ2d847f+DwB+/me/zntPnvHZ/V0uX74JwDe/999Raj7i/2LvPX4ky7I0v98TZs+0lq5lhIeHzIhIXVmVlZVVLat7uhuN6eY0QHBBbriev4E7AgQBggABAiQwxHQ3e1oMp1VVZZdIFZkRkaE8IlxLc9NaPbOnuLg3vDgEajeLWsQFEon0dDd7795zzz33O+d836UN8dmqcg2PKc5Ep5AVfj+VkKQqypjnEqwejqYszt0gn5W9cMExur7EcFgiHpH94+02us9FMao4suLk+o2reJMpybjYC9svKlxZv4zpdFndFM/uKpCbzFOtbrMihXA/ePeH7D3+BlP2FqdSaSLpIJHwCoO+iLme7v8Vycg7hGJ+fv6piIPikSTvfusDRiPhW25ef4+F9Vn+8i/+HzJxEYMsrSc5Lk3plzyMmPj8QnGZWNRh68kRAG9/6y7xQpzc9JR798Tl+Nvv/SHdxhlPHv+MjUuCvfqNN6+x9fAZZlPEapXjHvbYYePONZ7K7MvE1Vi9vEDpcEgisQTAo6N/JpTzEZdsusNxj2JhgclkyFlF7KtAMM9e5ZxE2M+gI7Kaqh2nVH9Mvy9s6e3bb3Cw8w2BUAhPVq6sZle49+gBm+trVM5FgF978ZCzRpt/3hMA6G99e5PZeJ9eO0d1LNbqg2+/xfX1W0RTCXp1MX+LC3c5rW1xcCx83vUrN7l8bYNgYkRvIOz1/MhhZJsX/fHNaoQP37/G579oMCcJH/ILLu3TLptXV3jxQmjBjno+Bv0qtgTejWCE09Mq/qmNIostXMOkUq2xdmmGF5J9NRTPM/VadKWI+eOHeSDEwcHzC7KxuSUDa+xSOa0zsET8fXY8YHYmw+qKiIUnVof9w3NmZmYZWcLnffnVL3CdMP6giSGzmori5/nTU7JpYec7zw9ZWl3Ec9SLy9WTrZ+j6wlmFq+Rzwl//aQl/MCvGr8WlyvHm9IciIn1+6IMKx2MSOuCwUxzVgnoLuXWObohJvf0fEi1Xqc1EuhIKJCmUdFIp9NMyuKw3D2pgDFk53GbW7eEo27376OqKdI5cVC1HJu333+X07Md7t921T/sAAAgAElEQVQTgfAsOtZkQGuvRlQ2FsYTG+zsfc3iikghtxse/lCPqRUQ3gM42N/CCyn4gj7GpkAn5vPXKGQj7B6LZuRYdJ6joxPeKrzL3JxwBtUz2CufclLav6CRnY/OMbHrDKrCScdnM/SrQ+LBMF/+VLzzH/zeH7P17CuC0RE9SewRDA35q7/739FjYg4MPQtOkEg4RWsoSyjdPslUBl2iFepIwzRNHGdMNCgOS8tysGzBXREMiUtY0FjCtjwsRwTiiqLhWCaG4bEoacIrjT1CPo9CYompLDucTk6Jp1QOd8UlxnUgncijqzrmSPyOh0Ov1yISDJBJCSOvVVqEA0H8PrEG44lJuz/ECOrY8oJXOi6RL4zRVZ0vvhCN6bffXKHTUiiKOIP5hTyzhTy9YYOTAzGf4fQx09GI733nN3ixdSTWfT7E570GtZq4BC4vFvBGU1rnbeIJEWRn0zquYpLKhnj+VKD1Pl8AM6BwT7I4zeQLTF0Ta1wmJoUxG70EtdE+rYYpmpWBeCqCP1HC6Yk5//BbP2D3sMRY6dHrCttIRa9wfn5AOpFj0BE/w9olZM5jISnrUzHOun08JYopkd2Z5SWG7SCN3oBkRjgRw4lTzEWJ58RanZzucmljFs80uHJZHOy1bp1INM3161cJGmL/Pds6ZHEhhSdTiM1SH2vSE6rtkvI7GFRw7THF/AwDhL1YyhR/yE8iJg6YsVnBs1QcxaFVkYLWxgTXVdGkyKehGwwHGsFoAFv+zKdHSETCuKrKRIorzy0u0B33OT4TF+pAIEQmkWOn1sCRGdqpLux92k9y+EzYbC6yTKs5YOuFyPAVZ5aJhBSmpoYuxWQfP9tCD5gEwzpTyUR2gUbL1JUQShY0wIq8tdgTj4nloqFxcCpQxr/++21ymSxBKXrZbo/xhcc0z/w0GiKwmZtdRrE04uE0sazwGwdHLwjoFv2eQNdUNCaWi237GUt+EstsEwsHcRyH/7/axavMla7rF//vlYj3j3/8Y7Krx8QTEern4gWH4wH53AKDvtijY0ujPTzGUYIs58RhFTNmGfVHnDXKbFwT/hQ3QLXSxpBlz4mohuGmOC3tcEWyxnXbLRqNJj6tyMqyKFtxbI/Tkyo+ycY4bCdo93ZQo0NWCsIWK2U/nl7m/OgxqYIAnUx/j3xkmYEmnjMSLjCa6Nz7ssbAFIf/8tI6/e4ZVj1AfFb4+Yk5pbmnc+O68N+t1hm6oqArOsdnR8L2AhbNzj7JtGSN6o2IRaIMxz1MyTJWKY/5wXe/x9m/7LA0K0guVN8lzsqnxGIiYijrLRx3xJXN2zzZFmVW8dg8qm6zu/+QwVAAEh+8+yGMVugMhS3OGhmOd3s8vL/LX8VE0FItObz13gc8+pdTovLsa54rnEw+5+7t3wTgvNwFzebqnTlUWYI7tcecnO6RTKc5PxcBQq0c5M27KyQN4RhnokGe3z/EsvqU92SwdTVK5WiC5QYoyDV97+4H3PvpX7MQFvu4VQ2w+rZJp63y7KkAMGpnET7+rVUmM5tcKoh1eLzfoTvJYPhFlYHVP8UIxOjbcOuqKM/SjBXCbKEHcuweiqDT9drM5tfRoyITePcdj7/4y0dYloPnShIYn4qnukJ8WLIa1hsVzst1+hNxjn9x/4Df+Ogj7n22h6b9MuNl2X0URcey5cXJ59FrjtmQBDh//Gdv8Om/bDGd+tElK57nOeDp+A2N1li83+nBgHQij22JfezzaXieg6KoOJLy25ra4Km4jnohX7C+fpPS6RmjiQBOpuMga6t3KMzIWjQg7IVw4kVCkqbfa5Wo1SY4AZWXsu2h+s4HfO8HP+TRowcMpiI4PzjdIZmOY7siME7FsnzxxQMSmQDryyJDm03P02p9zexcnq0tEUtcu3kF1W+xsydIYXRDJ+K/xsjpMpEi0Mtzt9nfO8GqRLgsA9jD0i6JQBJdSrjkV9Ioepd+pcXajMj21EdbTAIuf/D7/xVhRaAw2YKPkZnh2BIAQbsyIJLViUVm0HUBUtRPdKyAiTUpEgyLC4LjWihahNs3FuQa/z3J1Ft89J2PqdcE6DTt5Vlc3eTjGzHu7YpLWaM95A8//JDF3E8AeFn+hEbHz/LKZXa+FHP3ySef8OEHH3Hr7iU+vSf27cujBZYK87gp4XRvrd3mk198Qq1W4+Y1QQJTbzVptloEYyrlmrCFK5urbB+2iUmAMB1QGVZ0ZtbmqZyJzz45sEnMGNjmgHBQ+P1grMf5cYtOS7xvqzPkxp1L1A46kBS2352OefPdtzjY2cefFZn5v/nsU1x/nJvf+T4Af3d2TuVxn7mMxl1ZYvy//m//Cw8Wv+HS0ia7EkA63XH57sebuJ64AN1/ep+ZuQTpcPxCLPvGzbdRlWX298W+yiTHPHv+NdX+EYc1AWSYziZLCzOcnJUxEDHsW3ddpmaTBZlJHg1rRCMZdMWg54g1Xl3f5Mqljzg4fMDlNeHfRmOFZr/KeVt8v5LzEYx4xOJLKJr4bMfrcl6vkpu5RGFWxIdhf51wWMGSwFsgEGJuPsvhwTmmKdZvfnmJynmXQCDGxJLC4pbGbDHLsCfOwkvLl6k1D2g2h0SCYn5jyRG1RpWX24eYMhN4ZUMAtL9qvCa0eD1ej9fj9Xg9Xo/X4/V4PV6P1+P1+C8wfi0yVwo+YlGJTFpVjk6PWVm6ys1rAvl4+s05xwen5BZm6Q8EYllrnFFvlUhmBWpcb7QYjQO4DY+XL48AUBWX8vmI4nwGB4GQNloO/mCdviS4mEwCTKZTWq0y6YJA5irVFq7SoVEbs7YqEJJo6pjepMrkVCAKuuYRS+Q4OSgTlqj0e9++Q7M85MHWM5bkLfy8vIVnFUGWZj169oJoYkLYP8O5TA8P3RbdEWxsLuNMBNKh6Daddo+1uVeIcBYcHyhTbl0TSICnWoTTBpDm8WOBmOj+EUvrSVp9gTa5Tpt0Psvu/gGerMdNpOd4vrXL7JwsM3McJqZKKBjHkWhlKOpxVjtC9UBVxLyYY4tmp0skKkUwfTa2NcUfSLO9I1A5c9pm8/IijXaHhaJA3UrlM2x3wtVNgXafnJxjTf2Y1vRCI8Qwwvg1HU016HQF0rp5dZ1w0MezbZF+TyQL+ENFur0qji3QykjYj+NOCIVCDIciC9bpjWnU+nS7Ar1bWbrMkyd7rF4usL4pMl6PHg24tHqVzSuXwRHIhxEx+dM/eYN/+Pv/AEAsHubwsE44qvL+2x8B8OL5A2xHoVwt4UwFqjkdGFTrFbJpsXbDco2p2yWXc3E7Yu7a5imJXI7esEMsKmzWSGocl8akDFG6UCrtMuh2WVxLMemIOTbCIUamn9//+I/4H/7H/wmAQCzAct4hHpQ1+4MIncYBi6tZGhWBwvnVOK1qg5DPY3VRlGf09BZXN++ydSiog4sLRVzNxmqN2N0Wc54uZhmNxjRbIxJxMcf9YY9WJ4qmiz3jUy1G4xrZTIHcnMhE9IZdqo1TTC9NSPY6ddsuuWKSgCEQ2UqlR6/XIp50mcgexITiJ5Uy6DSkLMHYQLFthp0eSBrk9GwMb2Li8/lwZS9Ds9NmNBkRDkSlnSvYtsfIdnEVkbE6KsleNn+c0pGkAE4e4+HDJzWKBj2H4ahONrFGtSHT/KrHtA3WNPKf9S4p6GiaLCXwbGx7iuM4qLL5WNcmWJaK52uyeEmi/K0m4XiC8z3RC6NqDobuw7KHBHSRQTjcLZOfSWJZHi+3RdlxOJRgp/KUxbgo99m8ssbWyTf0Jx5+KVq8sXqFvScvCQfFu7wanudxUTClqhflSq7Mum3vPiG1GKVb05nIcinDSKJ6IRxXZFWcXg+fHqDd0ZnI0tbOtEQs7SOiBxlVhC106xbWpI1niwzR4d4uS3Mh1hc3aNRkVjGkMOhZ5G5k6XRkj1XbIpq0KZfF9yWnKa6tfsjZySmOpNs2wl0uL69TqYawXNlv6NjUqh1MWcob9Ycolb5kKfVt5nOimXw83SMcUPA7sxxKQiLFp5NIBLAsgdAmkkl0Q6Fc6bAie18VAkTC6Qv/s3v0ObHgPKn4GqmYlJEIerQbLn/0B3/EF5//WDx7fI471z7k518I7aY3775FNBbi5fF9RlNZts6IVqfEjcvfQpWlnw8+fUy3+oJv/54gP9r65h7J/A3++M9uENMEwn967HG8X8fwX2JmVpSajicjKo0OT2WWwbAm+LUQ5eYA2xVz7mLze7/7++zt7RAciDPrvbffYHvrgMvLImt07fspPvmZENX+6cu/B8BXTrO6docHD7dRQyKrcPDiKdYkQSIm5unzX5yzemWARZsrK6JPYnkuT+PwmOjMMjoChS8fPCGctmg0RYbm3aUN/vaLJyyuFwn4BWpc7T4l4h9wfHBAPC8JibAplVv0zwUhgqLZGIYfhQCokmRHsfBcA3AxAsLau80J6bjK8bGw99/67p+iKROq5Rpx2azuWHXQEqiuD1dKhRh+P55rsbIm9uPcfJhLl5cpnzcutIw8TNEb5Yr/AvjsZy+YW4wjmaeZTMYoiibLcCXNu9+g7/ZJJJI4pqQKT0S5fecmlZrIWCaiea5f+4An20JnTxiaQT6Q5qtPBbnK6uosPsNl2qyjB8X7/uL+X2ObASyvTTwk5jyRSOMpVcy+1DZSdpnYDfKzt5hK23/+4gGDgUmxYHBVZoByMxpb248ZSp20+WSesf2MTrdB0i/6svKZdU5PzjGtHlLtguu37tKtd3h5IMqEe+aI5cUctmHTa4izaKIGCIXg5LDHaCjihMaLl7jWgIMd8TtvX38HIz7icP+M2Vn5LqEVyqUDOoEOb78lsm7tZgc8H+PmKwHma5yUtxk3pqRT4l1Ms8Puy0eMC3FKsicoGnN58PQLsllxfgwHNvPZTSatKUtLIsNuJIJYvUPOz7oXP1PNMRF1QnMkbOp5dZ+NK9dodGqk0q90p2J883Wb3/yN71APi0zudFRjsWBjyKqbhGFQH+2x8zLEyqroa4tHQyiuRmvQ4rwn1iamK8xm1vntj8Ves2yb47MzNu/coHwiKmUePtzjytJlEskgz45E9UryjWvkszN0JZvU0PEwolkWLiUpd8SZcu3NTQbTATv1EllJnPTum8t0mg1Un4gN/WE/J5UGDx40CetyXy1tsX/0GSubYg+tXArQ6zf51m2TTlna2VGUnmNQnR6SlERfemiFRNhgb0+UCU5GQ4bNPvnZFaE3CTz6pgn6Pq3zOsmUONdikQLuxCI/L+b3qPyQW5u/gx4Y8XxHZMoy2SjBAETDFq2mrHrx+TAMhXpVZMWikQwnZydUyl0uX5W+xRY98oFgDL8h3gdG9AdtJmOxZy1rjOFLoigj/Ib42f7eAfnCAi5Tjk6E3x33pOjxrxi/FpcrXdMpl8RhnU+vcu1yingsw3/8T8LZJFMB0slFHLdJoy2F8PwxwoE4tZIIoFs1FbwBpWGTbFIcCqpicHRQJRIZcXomey68OrlCnoicV1+0zRf3X5IM5wkHxYatVHrkikmuX7vE9q5YzHpnj0HHwKeJhQsEdGrVFpnUEv6w2EC1ukO9PQbNT68val8DRoFgMMmZ1LgI62NmozOYkxM+fyAO/pW1AsXZCb1jDdMTvT3tWgc1qLC8JFnGzkdcX3+LY+vwor9hJrPAT//pAW+9OUMgKHrIxpMgp/sTAvL9xp0UzeEALBVdbvRev08mnbvQ58Hy4U01Gr0W5kg2W6c1ZmeLDHt9Ie4LKGqXWOKXteWK6uI3gqjxDooUgHTaIb65d8aVa1nOWuKZ0pkc5rCBZYv5nVgDopEZMD3akvjDtj0SyTDDwQSf5BCwHZNmp0dELlY2k6NetwkZXZSgcAbXrlyhVvYztmvMLouDodpsEDDi+GSZV6tdQgtMaTQdUMSFaGP9Kienz6lUlrm0IZzyva8eEu5arK0JhzGZTgkYMeIpj4ePRYO5N7U52N9l7VaKK9dF4PuLT78gEg3iU0Va+bzeJz8fx5p6BGUN+8c3b/KLz55RXLbQXHHRfrojLtfzUs+pWWmRK6qYI5WoJFc4q+yzsf42n/7in0VpCuD0e6jpGXw+MS8Pn2xx6Y0VtEiQ8Uj8bOA/ZmyZhHwKjikvsCmVnjXAssScR0M6/nCIUG7K1BG2MOgPGY772BOFclUeRBMLzQ1hqjIQd1SyMymW5ufYPRBrXKr0SCZyJJP5i4DEb9SoNxv4BpJAI6aClmJqD7BMcfEJ6gFG4wC+oCy3Gwt9GSPoA098Tq87xR/2aHUHJGVDcqfbIhNPE5QlR/1uG3c6QMfDNMVnma54z5eHTy4+y+9kaXfPmS+KfRUI2zQbCsHQOa4MiCbTFqO+H03NCUVRAFeUwV5cthwFPAXPcfGcVyyDfib2kFgqzMQSttcfwqNHFbIJ4YhTaYVqqU44FMRFPFMsEaVSmhCKtS5KjHU8FpLLJDVxaXEGU6YjnWAojC1FrxUN/GEDxVP+s0vgq39ejVc9V4rUojEnDqeHE2oli6DUqynmspyVDlm/Inoi7n9aAsViacPAk3vbsCOsLVzlZ//0c7gqbH009FiYz3N+INbh+kYcyxowN7PEyZGw72i4wGymgDn04UltNmdioNgOt6+KXp/zswrDzoBifoV4VoAyzd4nnB53yM8t8eChAATCkRrpYoaWDKADusZsZoNiehbTEj5XYY653By9eok/uv1vAPj88c8w4jFaIxHMNds9VDeMprsoinA4iUCW/Z2nxJOy+Tlxg/PyIVcuRUnLXoqjw20q5z1y2TTmUMyLP+nnxpWreAibHoxG9HsBDncPL1gwo+EEfmOC6qVYWxSfVdKecHUjzpNnUnB+aDAcDnnwzc/54JaYFz2sYaNwY+ESA1lCGQxZuNaAdET4rcMnT7l8OUtLHTGbFxen7EwGzwrSbzvkM6J80VAdZuZtnm4JsMqdvgW+Psf7h6wui2cq5tf45It/x9QtsCYFULVWh0sby3z7PcGGery7zelzhY9/+CGVstj/taMmH9zdpNZrc3Iggp3M7AK2mwLJwlmq79PpVZntaozCUjutqPLg4Rm57CLjkbjU3n7nLaqHDay+sHNfSCeRDDEaO0SCsk/KmYKjoetcaM/5DIdmKUwhIubO5wX44tNtGi2TXEb4Rc8JY7kqnqqhKuLvJmOVYEhjdkEyiIbD5Gd87Gz3SQp3g6p62K6J52mEo+IcPTysoSjeBTOgpmk4jofn/dKmxP/3mEwm3Lx+F4C/+du/4M6dt1mcE+tijj2azVOaNXGmAXR7PfrVEQvzsn2hMebW3XWsJwrFmFgrH2lqrccY/giP9sVF7ePfucKn954Tk4RBtqOQTCc5r+wSktqe0WiYbHaB4XBIVRJTnJ7WWFm9QUgSPqmKTn/QYDgYE00JAKQ7esnUMtF1G0UXF9OH9z4ll7hELClAtptrRca9Fu3RMZojwFtTaVBIv8Fxv8R5STzn/kGFP/7NP+PD/1r0Nm2f72G2LYbDLv22ePZkcIV3fvBDyo1tRlIjs1BIEFYNyofCNgauTbW6x0L+CgFZ9qxpPhyvS29UIhUXMUFxJsTR7kvKLclEOFYwNJPz3WOQ3ArTkxCuWeHq9U06prCNhfwCu88eE5KaoLF8nlhKw9WgPRS2Hwh5rG/Mofs8ugMBbpwcnDK/MHcBHkfTy6hnExLZNLmCZMH8yVfcunQZc+ixuCBijlarwdxskp3nkpBs3KXRrRGOFFHleTGbTfDk8DkxI8TiqihNa1T2OdnfIZ8XwPvmYoFOq0smEecXX4pyyUwhzrDUJBb2Y09kH227wslJnU5fvG+2MEsy3iLuT9OWoMizJ1U2buY4OxOl7pVSknDEwN6vkg2LS+Bv//Ajdh7u4MVj7J+KZ5+aBX73D29y3hbngOd5WO6Al6fPLkpg0zmNdtPjvW+vc3IofPOL7a9ZXbrMyqogCPMsnf7wjEH7nGJG2H4wOGX3sEx6JUEkI9b92bMnGK0k8ZQsz268IBDWuf7GGmmptfXw6QNSqTjZ3C/7KRs1UL0VcjPCpkrVBoZhkEzHiUaEzw1HNhj2NHzBEa2WsGFrIgPVXzF+LS5XpjmkK5uBU6Fl/BGTh48+I5UQ9Y7JrMLpSYlgdEBIF4hwo+4wHoFiSVFR08FVTDKJAu2RuKiNRiMUO8Jo2iWVFJvMthxa513GYdmcZ3YopPNEIzF0SdMdDcSwhn467SEdiZC4kxXu3l3j8RNxMGVSKc6G55jOAeO26Jn5+qvnZAoqcyt+jvaEI4uFPVw8NEk9PeyV0Nw02jjNnTdE8NeoTCjtO2ST0K6LA23UqBMJGDiS1XDc9WH3HpJJBzmQCEYgqvP9H37AyxeHFxeeVCbMdKhQP5XMLtMJ3sRHOOxij8Rnp2bSlEsnRKMyCDVtLNsin41zfCLmxXP99BoeuuEjnRenh0qUeDxJqyeCmNPTOrF4iEAyhGSsZuVakXAUplqPcrUiP3+CPlGodsUmKxSzdLtD+sMePkPS2Lc6mFabUNAg7heB787LU3JFP35J99kbddnbKxMNh7nxhjj13HEMI9DCsTTiMpGDPqJTaZAIiVpj11ZJZHx4dgxVl8jnNEQ+k+fJN8/wGVL8NxCkYx7hOuJ9L69eJheZ5cGjn4HMBQQTCkrIo5hbvkDF4skshuISkxTyUy1EdzDEHpjEo6IHYvfREapTY9zosTgjNno6UMAX0ShmRADWHU2ZDKeEU6DIrdltm6CaNOrnvHFXHGDdvomr6+yfChtbvbRA2J3h5VcvmCmIIN6IxHC1KvnYAoV5sY/U2Br3v/yMoCJOE48dLl26Sb+vcrAr0C1rbBHWc2iuQqMjbEEJ+AmG+wRsEfiPO0G6dYev6w/wdLE2l268hT9g0Ot18WTAHgnGqY1q1NvCac3m0th9i0wigCrVzavDLp1xF7+8CAfCkEiHqNXKJGLiwMlmc3R7NQLhCLY8rHzhMOeVJjk558FAkH6/xIQQPk/4hHQ6zjY9/EacTlcKYaoajjWi3RUHh0YMnzGh3ekz6gunnMpFUZwA/bqHJmlzVVVFlcKlIJgDxb+9C5DC08EZGViuRaUmApLhdIRf9YOkh++3NRIxnW5nyESSiKQzczS7L8Bdpij32qjbI6L4abVFT8Szp0MyS2kmposj3685qBCwwcJDl8/5inZdkb1wiiS7cF0XT5GC61OXRsXEr+kEZDN+vVZjYbHA+ZH4nXSiSGFewVEnqFKcc/PyDWqlDn/yr/8Nj7eEGKeqtGg1RsSSwoa/9Z03OTms8OTxMzY2xUGv+EaMB4fs7KgEguJSFovNkI5epXwqfMLG8neYjNpM/W3aHeFfhuMe3W6Qqf2Cm1c+FvOwc4/d53skpVh2v9FAmYYJjqp4AeHzkvl1to4PscYm6MJXrm/eotE+J5MRwUjppEqtUmJpbpl6RwQfnXEdvzODN5WU3LqNz6fx9MmXJAOr8n1dYikbc2hy7fJ70s4jfPPwPh3ZAD6YDGl2X/K73/kjxlNhd988e8TxWYefn/9H/tv/7r8BYPnyW6STadodQcSRWJglGUvyfPsZpaqYl7Nmj8QlF9MsYlji89u9PpuLb5LwCwR8ZinB1tnXxGMBjkoiQ9oaGYTUDVKx8EUFwWg85tnDe0Tj4v0OSg9xzR7ZWIR+S6x797zGresfMhn0mZWXwGrpJcX0u/zo7/8GgKuLN/EFA2w/3aIzFEF2LrGMrSZx6SF5khhPRvh8Ha4U3gSg50xI5fqEoinWb0q2x4Mz9LhFPr/I8anIOJ80D9G8IJfmhZDrXu1HZHJRTo7Bkb5Z9QTQYTlDohJJrHdGfPP4Ke9/W5I5BM/587/+B3zhGK5E7BTPh6pO8Twb6boYdUxW1zLcvCP85NnZCbffyfP5z/dRFLHfLcfD5/Ph2IKqHaBRGzKZmIQkiZC4WCkXFysQYIfnuaRSSWpSFPn9N3+HRMrHi5eS3juYwY6O8WvOxd/NpfKc914SiYq9frL7nL2XgDG9yIolU11q5yZhI8nt2yK+ODzYIZUoUpMsg5Y9IhYp0DPPUELi8zuDPSzflN4gQjIu9lE8tsjL/U/wy7MhlvBh6Hn8uu9izreenOJ4JqobZmZGfF/QKTAyVXxR4SPa7TaDbpfUYoHTPQGqxTI5XFxSqTn+1fcEY2ImsUlv3KfWE2D5zcVFjKsh4qEIR4cCiJ6fW8YctmnXy0TCUk6krXJ8eMD1KyKL+6h6n4Dfz2jYo1QV9rO6msVyTqnu+0guCvvc3inz9saHvHgumOSuXrvN1s4+ek6nfHIEwMbVW6TSN+nWpxdi1bn8PE92fk5XElrcmr9COm9xdtYjLTOty0s5qtU6g2GTTlueM+ll3vvgfX70I0HcclI5pz89Zza0hNURz3RlaZXT80Oubd7hWF5cFCfCaKSSnRUgV9pt4ztSePjkHrdWRQYxtzjLYNwmk57l3ueif+vL+0f823/733MoSbYOaofMFG9xeLbPXEESpwT6LM4sUD3vMjBFvN2qD1haXmXiCp9bPW3zzvuXefn8lLl50fOo62esZG4wbIq+TE8N4Y50islbNM7F3/38X74kF1rk7o3fZnVZ+K6DozNePt4mHsnKtWuyeukGmjLgVM65aihoio9m0+PRE7Hu167dIVMYc7ovALVk9BIrq3maHY+ulGIZj4Pk53I0BzX8pjh7CsUFUHVaHXH2hgJJ+sMGxXyGE/l92dQME6vB118/ZGlZZKp9eohQ1Icn4/94BvYOtglFgiQlqFbI59nd3UVTVLIp8XeDtnj3XzV+LS5XsViSm9fFQdVqVOj0+6xdmaXXEUHa8uwlDPuMRzv3WFuXMAMVQkEHV6LUiQgEgpfpj5uEgsK5jQY24fgYnx4hKam065UmmjFEceWrj2LouQDt3oSIDPBOjg7Izxd5uf+SW9cFEujYEertCrMLIvA3zS5vvfMDXrx4yUiyk+m6znnTR286xGHv/OkAACAASURBVBqJ9Gy50sTRbe6+I/6ufKLy1YOnhCMGoVeXG3tCLBjGbI8Jyk0dm5/FUBTGXUlfrkTo9qecNHc4PxOHrGIkMEf7VKtVAj5h+HTmcFyVIeK/Y4UMk8mEoM+PTxPvfLJ3jq7r+A1xugz0DrF4DF2H2ULiYl0qoy7FcIzzsrh8hI0Bmq0xkZoTM+k4ETuM+dwkmhaHwKDbpO/aaFjYsvylfO6QSlosybKLQX1KMpQgZOik4uL79o4OCUaCJGIq7bLYQOlclHariesXdjDpNpmbX8QbO1TOxNzVml+xsnKJTKrAwctduQ5jcplNDFnG1hlU6E1ymKMj8pKSU/eSmJM0zUGJpaWoXNMpne45qk/MU3+yxtHZMe99+AO+fCqcX6P9gmvXVnn06CEJ+VlLaxG0SRhFBv4rhRif/vRrxs6YXFZsdDtsEI+lOelp6JL1pzk+4aO3V1BGkjWhb+NpcVKJOHsHwtG4EY+DswOKqzOU68KmcsUYgzFISSLmZ2b58tNHzKwm0GNibZqNLrc27nBYrRCV0c7ZvR06nQ7xeXEoLGZucrR/hq3ZyDs8/miS8WSA3wsxOyf2Wq12hmGNsWS5z8JSEVOZ0m07F5nHnRc7jPsDOoM6aRFrYOoQ8UWZk4xXqWiMpZxBdzLAlGWPAfpMxio+Xfx3rz/AxU82u0RMNkkP6ue0B0M0TaMg9elsLcVJs4seFXbeH1uMpiqKMkaT9LedkbDvbruFX2aAmvU6qpai3ZLlWiE/o5FDOBgkLjXevJGBYxnYeOiSrtmybfwqF0ikNbEEM59tX8hBOJ6K4/WJR3zEfCL4cCdVhmOLnq8u12qJnWd7zBSTBPwCDWi2j9jceJNoOM+BLHu4e+0dRnaNSkus3Vh3GfQcPGXA5FUZa3SKGrJRxvrFZc91PXT9l9TsnuKiaQqO4+CXQeFkaDK3sEIiM+Hez48AeOP2VVRXwxuLQ9e2ujjTIH7Dx+YtgSB2em1Ux+HkaBtPEsrE4zPMzmQZD2QD/1hDUXWuXHmLZldcWkbDIdHQLM1ajVVJnT81J1hDhaNdEVzpZoH5ZT92vUupL/Zx4VKe5OQmz7Y+IxMQl9O10CwPWwM8XdhGLBxlMJmQm4/x5KksWw1U8KldEqksWzsioL1p6GQSSaZS+sHEIb00Ryzjp3QsbMqZDFm+GWMss/fNrkkyNEe7UcZniJ9tLF7HG4LmqZyVBUq9sjSHM+nQ64jn7lo2ycIa88t3+fE//B0A2eDbbH4QpdrdoSnLJRdm30VnyNqGCBxDSpZSr8b1tXf52ed/DsDlmWVCtTSBsEJagjCmqmAPLWbjwqYHnWOuLb5Bs3rOzIIAlEqNZ7R6z3hj8z10TczdZ1/+LevzsyQl21WtCYF4DmtkE59ZEvO7tMjO8UMGTgavK2nI3WVcXwdfVPiN4+Y+s6kchhsnPK7LuXrJdLCAZ/uodoQN5XOzTLwBlYoACLp2jHCvQPCWx/ZT4RcjwVkWEnEC4TGlc7F+ieQKYyzckHi/+YVrpFINjrZtvKBkiZ2q+FQHXQsxRbyfqphMTAdXE/P77//vn1A/VUmGc3iS0dNVJxhqjLHbvShfxO2ytBbjqCIC7+lI4f13vsv/Ef4MWx6rqqYzmSJ0tl7td3eEPU2iyfIpFOdCR9FyxfeBH9uxSWR1Jn5Jid822Not0ZLlp4VMmkDEIT0rdS8BpTdmWNWoy+qPhc3rtKoNXK9NWwLD9qHKOFDBG8TIpMW7eMopqrtIKirOlP6wyvz8Iju7XYYd4UuisTnC0RgT06UqLyT7+wfE4gHiWXEWHh1VcaYh1q+sor3SzDM94tFZytUa3zwTgW8smAA7jCNpuqc27J/UeO/N7+K/IoLshbkkx3tPmYkZ7JcF2LF1/hPOj1tc35AyCO6QkBvFF65iScKQZr2EperMLKzgjoXPm8vHMWwTE/E7w5HO8to6uh0mkhDP4EwczImNGp1iDcWeWSu+ha1P6MqMTb3aIBIIksktcygvc93RDgorRKJp9KCIQX765Y+IxWYZKALIPCgfUO+HiBcMbFcy15aPOD3qcvdOhlt3BOCx9fKQf//n/45iWvj4zrjM7HKe87MtTEuWxKd8dPtN6vVTskVxRj55vMVqcY69F8KG1WiQYGKGxcAESxf2QzeMQZRnT7exXOE7PvqwiGWPiEpJo2CvT7dVI5vJU25IdtDWGUFtnpAvhqMJkHkSVLh0OU48eRuA54cVSqct1hZuEUuIy0Nk3MKb+EjGBNFQOhqj22tiGDGSGeGHW5U6J4MvaPdvkJ0XazVTyFBr10hHxBpcujSDObXxdJ3SqfAR+EJMLZNub8ytt0UmNx6Io46SaIg5t90+uwdtFKPF+anY64riEQzpaBh4uqSM96KUz7cpSM2wyXTI1Bpydl5BlXhHuVZjeXmJUCTLSFaABEMQiji0pe5ktXnCxvpNVDdDLC72zN7hU3oDk+XlZUZD4T/7fgHQ/qrxmtDi9Xg9Xo/X4/V4PV6P1+P1eD1ej9fjv8D4tchceY7C4qxIea6v2fSHDfb3DklEBAReq9UZOkMWZwuoErFIJR3OTsYUCyJ7UG90SGQMzPMgfSlsWpyNM+x75AphZCKJYCTIdOqSyYmyhGDaj+2NqHUqeBHZWxBWmTptQoEUp0cCXfL0Eo6S4WBX3OZv3dpkb/8l/UGDalkgA5oSQ6dLrzJBlb0a2cIclWqHv/xzUUZTSFxm/WqWTnvMuCNu3KHIDOOYTjyvsYBAjsr1Fr2eTTgkkIHz2gEDs43md8nlxDvv7j3HmWoE9ZQQ4gCa9QahSJCipC4+OBH1/PHw/AVa4KhShG0kUfGBhj2xsdwOsahAGQzDQBtNmNgTigExV91JnanaJivL9Pq2gRJSSDjeRUbodNukUz5mrpiiZ1blOkDAl6TRFGUC3WkX32jCG7eu8/JAIFlLG0nCBHEVH4tzYt23nh+gaAnSUYEUROd96KrG3t4Rqi1QhqkSp1KrMfUOKaTE7yWMdVzVT0Kmb3tnSYZmm0Q2zFAiioP2Fr0OxJMapivm5en2LplclJmIQAEff73Nwf4WzdGAsqRPTmhZpiOHSDSFLik/48EItj7EL+tMPv2Hf2Z9aZ6Fy3NUKgJBDERCPPxqi8131zAc8VwL0Qb9lo3uyX4A34SzsxKaf0xDCtoV5zM4gymDwRC/pBg/e96lae/jC4ts4fMTBSfm0BurhCXNcsLXZGpqvNh6zKAle3SsMKlUhrbU1Ij45gj5k4zVKZJ9Gnc6whx2CST8GLIcRHE02paNLvukBuEa4/6IYWdKTmqgmP0eoViIsFakK0WR/bqKZavsNwWCWQorzKZzmK6L6wl7Hfc1Lq/P0KgLW8mlLnNWKaN4HWyJYIaUFMWCWFtPrnu1ekA4HKPbEd9lTlv41DCqHqBZlwLeMfH346GCX5YPOt6Q6TSCqwuEedIYkI2n6TZGjGOvxI41fAQI4seVfW5+vy5Eg6W4JJqK6nooyi8p0MNTl/HUImAYdNvC4ZhDB9Xx0xkLpHA03MPzNKxpClf2fIQCWcaDMY3KfXIzIktrxBwqBxqlM4HU5XMLqBmDcqWCgkBoDX+QaMygORZCwfBK14qLskDPVQEVRRE9kgDO2CObyhKJt7l1R2TUW7UhV67NEA6JeXEdhdH0jNnCMocHoob+2tXbBAMhjkqfE4uLdbenBu1mj2BUvG+rHWNiteiM2gRj4v02rn4Ls5Ugn+6Sywl0/q/+r5/SLZe5dUeQFmleCsVziIVWCRjCd7XrPVq1F8wU1jEkacdo3CcYHFNMiQxNv62QT4wYtOvoMoNhuGs0mvvY/gTf/q6gSw74wmiqSelI1Mv71BDZhI/a6YgVWf4Wj1/Diw/5yU9Fiew779ziZ//0CNcJE1ZFiWM8mEUPVClVnzGTF6VJz57fJ5PwWJwRc1mpV4gQpHR8QkieYZsbS/THU3LZZUZDYUOJyICJ6nKyIxDQN7/9XSLOA6bTMHe/K87DJ5+d8PatGY7PT0kmRXnd3tFn6EYAIyfWvKAvsjZT4B///v8kkxbzksm9S+ngHtGURqMq7Prtu9/DsizqVXGm+UIRHKXHdKKyuiIQ97PjHVLhWdKJVT79hRCrjSbnSRtJFkSlEo8HZfy+MMWlJOSlrtbLpwRCMazJCDchxc21Ca3DAeGrIgNVNNLoCwqlgypLUvrh/KxGb9ige2hx47YoX7K9Bq1Kl0RcVqk4QaIxH+akRUwT9uPpY1wPVFe9kEdQPJVGtcmsLEPsN9PgjXG8AYoUtFfcCI6n4/P5UHziPLLtKTduXsG1RbZgb2+bd2++z8xsitMTYVOe5qApGqqqX9DBW5bYZ4ZfvK/tWEynU3w+7WI/Oo6DEQhwenpKMiP89eaV61i1PtffFfNy9NimelQHqUcG8KzcIFg8pxgWGQ1tbDA7F6Hb9REIy32sjmidBJgrGHyzJTLAK5c2iadsXrz4KQBT16FSyxM2ikg2atqtU8zxhFHfTzYv/OLCSpqv7z0nZAhSmHfuvMtw0uFg/5j1NZG5zhbneLH9FZqmYASEHx70+7z39hv87FORtZ2dWySVDtHpNpmXFRLH1UOCiTzdSZ9qSZz340mL2XSOk0PRZjGeDrl+5duYgyGRrJxzVWO2MEe1toOGmM9SecjY52F2RVYjowU52H7B3Tc+4nxbxDrBaIa1pff44uu/JZ+dkbZu8/DRMy5vincJhTUOj0o8fviE3/ytPwbg5PRTPE9haLapSbHzdCqH6gUJBaQeoNdiOHSIJwqUTkQW7O6d90kn65TLVaLBJTF/717m+TOLZ/dFme7ixi2ioTWCuTEhSev+4P4Whj9Co9FiX4pcf/zxn9CsPMUnZVAcRUV18tjTE2rHMkOqwdpygdVZm2FBVh+ZOt98+pzcohQ2b/coFos8uH+P65cEGVcmOk+70SY/Y/ByW/iE2991aZhP6HRFCeBsfpbm4TnpSwa7B69KYrNMK4fYI7Hml5ZyJLN52q0KhswWHh+VKC6tMrYrdNvimdqnfRaW53AdEQO1mjqj/hHm1OCd94V/K1fr7DeqhIfzXBOui7PzPpm47yJ7WFycYWvrnFBMYWqJffy9j9/j+ctvUL0UU0vGCUmN8ChMqylivFQ2RM4/j6qPaDfE3rq2scTL7WPCEZ35JXGGlU9amG0Vv5QcigUNJlYby+pR2ZYiyeEU4bBGs9m6KMF/RVbyq8avx+UKl90DUTrQ6J3heUOCegBDio8elZqilMFLYo3Fi+Uzedq1YwxZbz1x2hxXzggnsvjH4rUmUx+630+j1ceZis9aWFboNkMoUo29UTkkmsgzHSrYknUokSoydhSGdg3bJ3vBghl290rclY3GtXoZnzGg2+mQyYhUbLPRI+IP0xl6eLL5sFG1sRUdyxYLMdQ87n/1klw2SH5GlMgclpr0h0P6gSGDntTMsTTc6ZheXzhk25ui+32Ew0FaDRGoTSce8UiQqdnHJ9mlUFxUxaNREQfoZDBifnGeVqNJty8DtWIeczxFl02pPp+F7YzwBxSGpmRt0xwM4gTQISjmM66kGLb6rLyxBEB//5CGOuZ6OociBYLj8S4bc9d5+M0WhMTnB2N+OoM+g6FsPHbTBBIOzw63sWzxOyM3iC+sEw1E8fvFmsbiYZzxkIFsQh+6YcatCUPLoS/Fa/VQgUzOT72n4vnE3EUjKRbWs+xJJqJEMoqqD7C9HJoUjrPHGrE5j16vzqAnniESixIOLF4IFLa6FXLFHL1BByMkfpZKxhg3B4SiJvNFsTkn4ySV3pBoUtjP9dsbzF8OUbbOyPhE8KEMD8mngtzY2CDx6sDM+6nUK0xkG5g/HELzdajXOhfN8r3mkOnYxHM0VjZkD5nPoX3aRJlKmxpNSEZjOJgclUQZSSIS5bx8QiySRJVMmYqq4tP8VEsyuHI6rK/Mcdot02rXL/7u0vo1zk4aOFN5SfIpDEb9iz3T3W+xkM6jRiK4ugxsgjYxwyCbmsfXEsFiv2+RCARIauJgqnc6+EIBxqMuvYrYV0Y0yPHxKYYuglBrCnMzS7TaJVR5GbDdEclgiNHEpNsTAVGnMaFQjGBOhL160yRqyMFzFUIBsS4jU0xsKBSg0xXfF4v4MLsKjkzah0MxeuYAJWDimFLTRnUZTAbAHD7ZG2bbU0DBJ7W+bNPDdRw8H7xiD+uoQ/zJCN3xGG8sDpRkIojnOChj8XfaVKMwV6TRPCOakMKm+mUmQxc8j25bCmiOd0jH1ohK4ebcbJBOzSIWDzBsCbteWrxG42BEzTm6IBG56LX6//zbcxUUNHTfK9/iEAknmZkNMJVlXZmUznQyYmlF2KahubRaMeqtOhtXxbykchOODwbE40UGo1eEPUGub75PrS3KZsu1R+AUyeVnOD4XLFFr2rt07VMMPY3miYD5T//0T3i5/58Ih8ReT6VCNOsNLl/NUz2VPZbdErffuMOTJy9JZgTolEpcJt6xsCXIVphbIZPVgBiPtv4KgLSdBF2na57z1QNJnOLPcfPGDSZTMQcrSxlUW6fndvD5w9KG/Rw8fkplW9jKFi+J2Iv4tCG90yMA9gyVb715k87AT2sgQIP3PrrMwfYx52XhAzvDEY52xnLiMuX7Yp6uLVwnwJBENEKjJX5v6yc/48X2IZfXxL7eevQlYcfm1gfzhBRxmfONviI2s8CtxRX+wz/+zwDorQWy8wtossz7u1ff5vD8BW9/8H32HknttMVbpK7+K9KJEFNXEovE5/n8pz8msSBspdOtYI8C/PD7H3F4LALTUmWbzesztPsvmCkIUO3Ntz+gNzokLBm+QvEAq7kV7NaEzILwb0/2DhkpdQL6hJmVdfl9N0n6f87W8U+FnYdyGPEZOtMpxwdiH8djSVSfzWg8vhCmntounpqgVBMxQSYZwLTq6JofV2pfep6HY3v4/M5Ff6qGRq81ZaYoboH9ZhyPKcFwjPFE7CufX8PxBhhGmMlAnEe5fIxsfkqlLfzF+vo6T188YG41xe6e8KfJSIjJZIJnK6/4OVAVA9fl4iI1Nkf4fWEUxUPxXvVmCsINVVWpyx6N/dJLtKDHiRRuHQ7mCUay9NqyVAoIJHMcHn+JNRB2fm3pHabWlGAkQacv5i6gqywsrBDxp7FcwVLXbjYIh1VmCmIOqu0SgVCUdC6B54p9NbFmeLnzkDt3b3O0J87Rs8MO62vXKObFhW9/55Sx2WF1bZmzU7Eu9XKPjY1bPHr2CdgCSIhFHCq1MsW86Enstiw6DZNc0ubkVFz4RpaGEYnxzdMfkZ0RNlXMr+PzQowl8Zc5jPOTT36MyRjDPy/XReH+gx8xn7tEXJafBXwpbNdG6gVjeGNUX5z+cIrhCTvo1AdcXb5KMXsZnybOAsdzUYwmn34hQO6Z3E3imSlv3LrKmbzw1ZtVLq0lCQdTeKoUhkeh3dqjkBPPFI4tMB1FGY/7jKROqGmd8dVX91leWOfx4T8CEAx8n80rNxi3hB3kFgzOTp9RSMzTkH1ZN9+4jt9ROTp+gDqSoHrlhHR0jrgUtH3yok4oniSgDuiEhC/TdJ1y5Sm5xCajsbjc2I6L7dVQWZHvt8hw0GJhZo1HT4T+5ztvvcn7H9zg/v37jGVfZLPsYLZVymfCFu9+cAurn6fWGHB6Ji6GM8VF8pk0japsUdH8nO3tYZlBrr99XdjZ6YDJwGOh6GcqmZxX1zZRo23+X/be41eyLL/z+1wTN+4N7+N5nz4rXfmqdtWO3RyKwxkJAwmgBBmMFlrMH6LNQBBmIWghQAAJgsSQ7B6Rotp3V1dXl01vXubzLry/Ya7X4px8LS16VrPoRZ7dC8S7ce45v/M7P/v9PvpSPNt2qnzzO5fYfvCAn3wmSsYX5/Jg9/EzBZbeFYGw+o/uceHyKj1ZWdvtDLCsKlN7QFL2Qjx59pjxOGI8e37uJwxPDNbXLvHxR0IHJlKL6DGFSmEDJDfrsGfz9Xf/GV7Y5ec/E2Tu7771dU7r93FcoZdL5Rydrk3chLIpzlGz1eX265fZeXHKgrT7el3heP2+8QfhXM3cGUNXXPL2ZMpk0uSdt97lwT1xqPPlIs+3zxiN2iiyZj5uqiiKyuBMRDAmvo+vwqhTOycVNOMpxtM2w8EYU/ZhlEtblPMmBwcHAKSTceyBzXQcZ6iJw2JPezhBAs2IUBQh9IcHdcxYkU5PGI6K5qBEJvYwwh4JYydtFZgFMaxU9hz1qz8c4YcepZJQKo1am/FoQq9rU5Pe9HDWRcNhhAaqiMxls1nckcE0EAdR0w3UKMZk7BBKh6Scn2PQ61Mq5s9hqxU1wrZtFFUo0sjTIITxeIwiC0/7/QFqZBKFY/kuM1Q1JJ9bojtw5HeGpDFpjUcsVoVxvHHpEve/fMyoI3u+tDHZwGTaV9CL4v/mVwya9SaZXJ7QEIpFjfLEdY+ZJUmaixUagxNKlWWm/ZT8vTOaDYeNtT5uIJHPDAXX9qhUhBOaLVfZnj5nsTrHqC+dSTOi23ZQMzqKhKPdOXlGlNDQTPG+C9lV9g77FMtFDg6FMs9kLKJoRjJp0pHvM+n6xOjgSsN4HNhEuoo71Wk1RfZHnwRU5jMM60meSnSri69Z9I8PSenCyX733e/wm7tfMgwjLlbEPIdOjBt/HGGWh/xf//RXYl6lDMOpQiCzGuHEYjIe0RjbJE1xybmRgqtquJ7C549FT0BMj2PpxXNy2N6kj5koMJw0MSSi38TRCb0pShgxGkrko8USe4fPCTwhG7XGHok4zCIdV5OIlzOHwekZ6XSeTrcp9y+kpGaYSVCGbCGFoumU02l2GkJJ2oFLNV7AtCAre6WcSY+337zOzo64+NMFk4FTZxKExOJC9cyXM/SaE/yJmPeMHrnEAlqywkyi+bj+gMGwS6PeJp8XxnkiGUPXTWyJcjQ3nyGdytNq986jfgSyPyp0cWYSLj2TxbQcdEU6SUyZOQqVaopIXgpWcki/ZUEsRhTJWwiBAGZIqE573MMPfHTfxZfZUC3Q8GZDzFJIWvbDDKZNFNUjbohznYjF6A17oKWwRzKrmfH51//df8Mvf/kD9o+EfBYqFQrZJHYkzt5gNCZmxLhyZY3hF+L3DCMgU3Dx93+HTqZpCmHo//+dLNmU/xKoRYtBsz4iVwqxbXFBLC0VyOXKmFIOwsihMKdg5mKkZUR97/gLYmaBZDZB9mVEWHW59+Auq5viO5X5OHbXotlokIqL83h09Ih8NsfRwV3GsqcrHV8UaIeWsJL2z74k8goMp2N0Q5yHy2tvEswCcsUEja4glPzg7fdodWcU18V6Voopzk6PiHyXP/lT0Szfs7t4x1WUdIuYImShXn/Gbz5pYg/FOfanVWZDFV2f8dttacQvX2YxW+HNNz8Qa94x0c0GpcI6774rkEH7qk+9MwDNQPLSMu2ZrMy/Tycu7iJ9GOP4uMXR4zO++T1B9FtUq3S6E3p7Lu+/J+Y5l1fYunCba8si628tpMiMFQ73n+BKMveLK3kYdGn6Ft/+mniWFSxyuHPApCGMwmf9Gb3mGRvrF0jeFPL56NkuNzav0K6fkZeQ4+3nXzBfTrAi2dV/8OwZt1avcv83d9lviTO6cmGVDz/5EG9a5Z2boh9Gc3o8f3yfC4ui/zhdLDAJxiwubp4HReczJrmUhT40Od4Wjvbx6D6aphMFQh80OzZFvUMxu0go+1WcMMCIKxyf9Ni6KhG9vjwgn48RSeS6qdsjFvcw4nGkP4nrhmiagaJ6+J4mZdFiNnN5+kzoydrJFDOewPM1dEkxgjJD1SS6Ziic6tU1nb2jT6mfiedcu7rM7sljrt64yE/+4UDIi++iaiHnHAeApsaYOBMsQwavUAVJsYQ8B1EB4vsjVlZWuCllKKY6fP7gc+ZT4l67sHqRBzuP2JbrBrBWydA8LeJJff7wwROu33iNXs8nlxd7XM3H2DnqYPv75GS2EFWlb3dRQjGnyxe+SSqT5sNf/YyMrMiYzMbcvn2HnednLEonLGb16XXt84qb6aTH6uoqO8/3yRfE/6UzIlNlD6aU1sS8qnNJ7KFDLi/spMFwn7ffuUPSSlCvi8UqpiKa9TaXLm4ynAidfnx2QkIrkpRgOdn8OrGsTrPZppAUOn5jsUoyTKApOqvzQjae7zwjmavQ6oo720xUySbimJ7OXFaswVwqw+OdX2IaJjn5Wa/XIZtew86I/VT1kOWVdR5v/4q5gug3Sie+hmUouJMUMWkapxIW8+VFPv1YgM4osTxra6soasT6ppjTyfEh5WKF8UjlLbnHh/vPme2HzEkwmcCbMZl1SGVfYzIQDu2vP/oVC9mr/Omf/Rk/+blw+nwvIGZp9EZCb6UyMY5qd1muLjKRQZLtJx/z/o1vMwqnBBJ91Z1OmV9+n2xaKKVMfsL9+6esLK1x84awPXVD4/P7X2IkQ9a3RPBmVGtgtwwWq8IOe/7sMw4PTvjgnf+BS5fE3O2pjT/JM78szseTnX2UwAbLoV4XQZlCt8lMv4CylWYmtobe/jOUYo7LtwVKpqZPePhsHzMqsyp79FNZndW3rtPrnLH/UOiSVLLDTz88Ii6JlQvlGL4yIFvIM/WE/j5rHNFtz1haq7CwIKpa2g2Xx0+es3VZvO9k2idE57MvPuGNt0XAp3Yw46z9jM7gFFcVdvvRSZ9vfe+P+Iu/+t/Ec+yI1coFarUaVkLY33q8zfbOgGJhibb0AVznP+4+veq5ejVejVfj1Xg1Xo1X49V4NV6NV+PV+E8w/iAyV44fcPeRSCFPhw7FgsmXD/Zp9yVq06TP0X4TdJPFeeGVziYO01mPuITydiObwFOZjKeYOYnQMuxgxrLk8ybLyyKy0us2KBYSeGMRVTErFdSgjaWBK5nx9DBB4PmkEyqqI1Gx0hVAZXtHZNPSVhZV9zCMFIO+iEBnsxGz0QQC0GUvhqHNCIIpPuMc6wAAIABJREFUHYn65YY+c+vzDEZTao0DAFJmGl+zMJMmqswq1I/aGLkx+OL9JuMxuXyGyXhGqSyi4rqio6txpkOVUJPoZ+k4McNkPBYRjZXVJcbjEYlEClXCpaqKR6c+Iic5IBzXw3V9Rv0xqaSIYHhMySXi2A2bRkNECwbNAUldZyqzW2sbW6TjcTxHxU+KrNRZF0I9xdW3L6HNJLLiqEOt0WWWkH1ZwzMKyRhxX0XPiXmfPOkwV7A4Pj4F3ZbzDEgkcgxnIoozPDzAiQbk8guMe2J9R5MpQTjA11ROTkVk5+K8wSd3Pz4nrx2UhgzHLpVFhVRSRF/CyKU/GlOsJqEnyiz0SKF22mWuKspmRg2HTHWOfGKekS2yOP3akHg8hm+MaM5EZHXwtM/GQoG07Jc72n+G3epTKhXJmjJCpIU8fXqA4YUsLYqI5XjQIlVU6Z+I9z06qJHO55i4U3RZLjWYOhgxBVWLk5Ww2bbbJ2tl8GTkRMFgMJwShBpxU2RfZrMZQagS4pFIiBjK2PaY2i5jT/yeH3SoDOfwPY1pW0S3c9Uq3iygNmoyGIp3vryxTjB0uFgSsvnkeMrUChl7HRKRkCndSDIZjwU5p8x0WEGMg4Mjopjsj/NG5JUqgR2gqmLNx26cWDbElSV8mWqZwaxHo16nnBcR9pmTRDNA1xL4EqXOdV263SbVOZGiz+ZMer06ZlLBl9HlyMsATawkzFfEPnTrAxaWizhTkYXzHJsoNEgYFi9OZK/mhkk8lWFstwk8Ed02TRPPDc8pD4IgQFEUZrMZEeJsa04CjAGr18q4M7EPtSczVpaWsWUkcuiNSBppur0mC7Iv0uWY//Xf/c9sbMWZk0hLjdY+uupRkuhd+HG2Vm7hzcasr4hnNdoPiJkmUaSgSlRDQR8cochSRUVRCJUQXVHPibcV1edgt873//n7DCWXUKV4kbG7fd6XaSQjEimN5YWrdAaiT8nxIy5uXMGenjCRGSjbhakX0WqJPda0LHp8Sl5NMbXFWu1uP+fWjW/z3W9/wNQR0cLn20/QtCoTWdapkMNI+hw0HpAOhJ6IFJhORhhxHVMXEfZ6Y8ClrUXuHYrsxM7TbdYWrxA3fIppCcHtpVFyYwajCC0udFXgdUmnkxRzIruMG2NtvUKzbrN5UZw1ww5ptRqYZdF/8L233uOjj/4DeiHDaUc8J5nskUwlcR0FMyWyd6eH+2xtGVy7JP7vt583WV8sM5ru8ZsPxX1xc22Tyvwc//4fPubhrshwvfV2mmvXr+N2hO48+kWDrStvU2v/mnxRyOeTB49IJ9JMRiFzc2LuJ2yzuXWFQkpEVT+9+ynVjRs8PdqlEJNZv2KGyfAUc+YxGAp5qaRKbKYNfvRrEYX/1qV3MKtzbH/5a+ayQrdknAxf2/o+x/YhTVsgV+595nJh/gJvXn0dgI8e/ZjDk0NacYeKIdfFyhJpKYJkg5Ij+nZPvQ7Ziok9FPfx1Rt36LVPefjlYz74joBZH4zO6PU6JBLh+X2YNErkk0WaA4FIV07dZGXJ5hdeA/Ulwp/qE4a+gEaXCJ4u4AcTHkpk10ZzQipbwfUdQhnhVzUDP9JR9ThhKD577U4J09pnbl5EwDPpMulEGtfTzzPsoKIpEVNnimmJ+zckwHMdVFNE4XVdx3Xdc6JxQKCJRgrFXImkKvl4HtbZylykawtFddw+Rg0cti6vA+J89HsN0sk5DEnkfuFreWZ+m/4Lj3Re6LxOp0sqWWVzeY3Gicg8JtJlVuZWuH9XlOTWax2cwwaqGnHhsli7Z88eM7YdLDPJ1dfEuTo6foJlKtRbIip/6fI1CGOMh9u88foaIHRLv+2wVFnAlxkEhU0KpYhuT2RjRqMBcT3J7v5DVMT7ZqwMugoxdY5OQ5zb0djm4pUcR0fizl4qJXBVjWY94sZV0ctoOx0y8wskk0kCU+gJ25thhh6BJnTuOBqhtbpYSo4L10Qf33atSas/YzGzfI6Qurq8waNH+9y6IUivQyXk+LSGO1lm/Za4Gz79zR5DY4jvd3jzTZG1DYI29z4/5fU74u+H23fp9wfceO0Ow5FY8+HQ4cLmGk+39/jpj2XvcHmRdDHJWUOkcayMx/rqGnfv/5LlJdHf+MH73+eLz3/F870jEimh5/f2z+iNO0xl9Uy5amIkPI5bdUoFYfvG479hZHzK4Rcx/uS73wDgsNak32+d056gTNm6cA3X73BwJGTqduUNiqVVVN1hOhLz2jmpMaVNMhC6S5sssrWQ5sXzzzFk/60/iUgtxQkjsccri1me7+6RSl7hoaxKuf3mGpcXUzx98JStja8DsPyNDP0XuyQQd0VYDPCOXCaFdSzJV+e2JoyYYMXmSEvC3rFmMRwNubgu0XsjnTAMODw+Ym5B2KxzlQVK5THOxGTUkdUkdg9F9SiXhH3lhyOO9nok0zAciEz5eDyi0+sRixsUykLn6ald/v4Hj1iZF72ay+tx7HaHQSfL/JywQZqjHrNJllangZkQd2t3IJsYf8/4g3CufC+gK+vQA29AbJLl8NETzKRYbF0xyWazjCYDZhNxefTbY/JF6/ziiMUz4Bmk1QgzJgn0Yi0ymQzZrCY4T4B8Po7nTijJcq10PsFwBMV8jmj6kiDUo5IyGdsuk4E41GbGotloEZecFnosjz2qEUVjYoZ4VrvbIQoTeK5LNi0Oi5VKEwsSTCXRqG74zNSAiAmlSl4ugE887jH1Q+JpsXFzKZOJF+H64n3L5TLtdo9UIn0O/eqMAlzXZ3ktz3Asa1QUn4nrYEl+DNebkk6nsUcuCQmOYSU0VHWIoYvfSgRZoihkbr7A+LzfCNqzNlvlec6kYeF6IdX5kK1rQhkZVo7j5pB00OdlvYSvmFgpUFMBqi9KW9zgmHwlzcGBaIw3jQAlmGNu0WM4EIdsZamMr0xwhwpnsidoabFIKqnSlhxF0UzFizQapw1C0ezCeDZG14rogY8zFkaKP7vBZLqPKZ3JB8efYsRN7LsjCpKcd+YqDJw2PTeFNxFrt7C8wOiwR1o21FtBnJiaxov6+LKR8o3rbzEcd3ly8pikTG1/9qMh639epjcSzojJjMDzcYIxzw9F2Vxcj5NTVqg97lCX5IPpgsawZhFN5CWUF8a6rut0+8IAKycL6HEY9Ltc3RKkoYORjZVNMuxIpeVNGLkeuWyaiiw/PT4+IGlVcWZ9shnx/MlwTCoVEUhZUeOLRATstev4jnQaTgbMzS2wUqySWBTGI67DLA5NaQycdc4oL6zRmM5IxMVneS2Fp+qM7QZGKORsOlJJlHI8lQSXMcMiN58g5jTxZy97JxTqjQalckG+25TRwGbiOpiSnLvdmaDNPIg8/JcgG0oMPQaTqYSnjXQ832E2c87lPEScXVVNMJaXVXW5hD1uUykK2Rz28xQKBdrdJoEsVfJnGUb9MYpRRDvvnRD8NS/L7YLAI6ap5zw2AO2ZzcZmDs3UGLaFzF65cZ12vYsnnbJUOo89tklaOdyZhG32TillttjdG7C0LNYhn4kTTzioEkJ+ffkyMVzuPryHLg2+YnqTtK7yiboDyu+a4cW7R3LekkBYUdBlU38ykaHV7vH06cPzvh1VTXB21se2xRoEdoyrpQuousXWhqiFt6wdXNcl8A0k5zSqqvPOe5fYfyEu8EgZMJ01GAxGrCxcBUBnncpcle0Xj5hMJTBMrkQxt8poKs9HKoauptk7PaAke1FHQ4e5xQoEG5x0fgHAznHEt9/5NsGhMCJuvv4GteMOF6++yc6+7Ntt9Vhb2OLwwCHwZV+bUSAXz3B0LIxHVTFIZxLEEwqTqdibbCLPhUurPJW9Is+e/z/ML+p0hyOmY7F2w+6QSdjh7TtvMJ2J8/5k94Cp7xBThP65sLrI00df4JgBWU2cobsf/ZaFzbd5/buXWSyJvrZo1uc3n/wj37khygSfjX7Cbn3E9uNTfEuAKxiRQTE7Ilkt0I7EXTBsdvny822uXRJG6B995RvYWpd7zRr3PxKlgosXrnDp0iXafpeElBclgomv8+4tUfYYxmfs7j1hczHDxg0BZPCLnz7jyuYm5c2L1M5EAMRLTLh9+W36J+LZo1OFy9cvsPNom9SKMGj3Zl/Q/NXH/Lf/xb/h+KEocXIbQ452Duj2xV650xlbF+4w07vUa8K4W1qZ5/ioxtDuoMbEmUnHlyDwiWZiX6y4ybUr11GU4/PgBqGCYRgEgYciSYR9PNKZOZKmeBfHPSGtexBGGNIBC0MVXX9ZciUe9Uff/S6/+vDvznun33//fT797S/xgpCi5JWw7TGKAmbCxHWELaHFVMkp9zvwCnGeIjzvZemugaaoHB4eQ0KiamkxXpwcoMgycrfnc+viJSb9ISD6Y6pLqxy0zlhYETrwpPaIUKvx3rcvEyLk82S2wLQ7pNEYMvLEnHTFpdtxmJP6NIpUzEyOfBGePhD7WVlYIlRm5IsZzmrCuWk3x8wtmlRj4o6pn/TxXfjud/8YJxD3TO20w+r8Kl66ylCWsse1HPVaC8uSpa2JGvfu/YY7N7/JaV3cc76XpD9+SMcOWd3IyuenySTi1Dyxn+urUF6u8t7rAcrklwCYRpKz5oDTk1XajnBafCWgXbexJUKZng353rvv8ei39zkeSFLYzhgzSNPudrj3pdiHN969yvK6wbOnwrkjynPpygp6GFA7PZD70qdULpNKx8+d02q1SnUpiaeKs26ZGkYMuoMDdneE7nr39a/iTl1CpUs2L+yZZmePTOGD88DN46efYdsamxvXGMk+urx1jdeuvs4vfv5rbt0UvUu375T48snnDLviORcurjIYBJgxnVOpYwvqLYbNFt/61veYDsU+HD5ymbtxRPNAPHuxcoGDw0Oy2QLlqtibR4+fsbic5+h5m2xS3JEby1Wi+AZjW5yJzc1F9GSJ412b4kvibV+jpzRRZXtBwopIhlmSts3ME4GUk/Eqye59lqs7zPYlMfXRBuWNtwjqoueqvf+M3vCQayWfI1+U6c1deZMXzx5C0Gbnvtjjtc1bvPHPKuw/ugtAp9Nhce4yaxtF6m0BNjTpV/CDDhc23kSX1Dmub6FqBmcnYp16g1PC0CeZijMdS0J0fUxSy+H6AZoqqSwmj9GUOQoZ8Xe/1aB+OKU6n+P+YxGIKlZzNFsnmCmPoiL6Cyv5NPAjft/4g3CuiBTSSaHp9LhGPB4nVGO4ssFs5qiMZiHVaoFO8yV6R4ZIi/BlxiYMFcIwIJUwzo0q00gwGPZQojRnZ+JwbGwlCP0USih+r7Y3wtKz6AEkZPSu1Rqyu9ujUFwmnZfK7eSQmF5ClYaUPW7T6Xgksy6h7MuoZJdxZj66boDkZgpVjak7IpsTHnCoj1C1LMZ4ytATBnQ2OQduQOBP0GXmwel46L5LXBqYk4nNfLVCGKiEsg8rbiQplnXsWQfTss5/L/R9ziT6WioRR1ViWFaSRlNEGapzG0zcAC8U62toCYyYMOhfMllPJz7dgU+w6vLuhjDYH73ooGWShKE4dJ88/Iww9FlPJhg7Yp5bly/Rrde49/Qhc5a4VLXYgHo7QpG19wupdcx4hQcPHuJJo3c4jXDidfLJKhe3hJBPetAfh+ALMR2Opji+g6vVsOJiDqE3o9G3yas50oaYe68/BEqMZe3vaOqzkl9iOvJ58ULIRrrYI5YwSZoWXdnYfNZuMTdfISkdaGKHpNPzvNh7yktrMl8I2D+YcXNtnlvvCiPpq3cinj3fwekL2Xj7jSK6mmD37Cl/+mfCEe2PZjSaQ4zAZ763BoDnTPDUPhOpHJaqqzy495QgCImpwlgedGz0Ugp7PKPZkk6mbeInfCxZZt92puh6mvHIZ6gLmYprLrqqkC3Oo8j+v2IxS0gDXSIfhbpJhMOyUSYhmcjzxQypkoU9tZmNXzpTA05bLQxZVx+pFqNWG9dUcSSxMLOBAJeI4kxkf9MkDOhMmhRykgNp5nOws01pKc9oKJ3/dofl3AJdSdIYS0dkzRzuYEKnJmR4oVKk2TkiLmUZYH4xgaYkGEpkwiCcks+sY+SmHB8Lw7RarrJDjeHQplAUyrVrT5hNFNKSVHQ07dDYOyOTKXD9mjDqdTXDsGERS2po4UsHaIqmxVCV3zktwrESwBYASRWyikpVS0PCkPNyKc6lGI2EE5hOWHh9n2G3Q07yBqnqHIVKGXvWO+ffUpWAuLrK6obMjhyf4Y/H3L55k6eHomH39utv8U8//BJV/V1fFUooHUBJ7KEoEKkEKKiItQoDndFwzOHR2blB+XTnU0yzSCoj/y8EQ03iOC0m0qke9QdcfPMKraaB44hnzVdWmQ1jbG2IyKc928Vzl1lbG9FriWf92b/4V9x99DMC9QxLojaWSgXa9QamzJSbyTGdVo9KaYFcTlzYY3XGWavGynyJyVicyVjCpjFoovjirCvEcIIBnb7DRBpqoWLS7ExY37hBpyEMynIpzk9+/GPeeOddAKysx9C2aXcPMRSxx6mlDDmzh2WIPU6mk6RTFisLKr4miWpHAVH/hI9+9SHf+oZ41vLcOp6fodOWQANXS5SrF/jkl/83l94U67v52nVePB1y5ZbFsHcgvqfqzM+t8Isv/xGAK2tr9KcxVi6uslAQAZ7yyhydqc3+owd89XVBgPx//OWPyMVVnLrQ5y8OshQvLvOt7/wLnuSFQTIaOzimwnIqjzcUivDJk895cTTm6//8fwQgW9FJJquEvTqnz4UeTlAlpcR59vgF6YyoPEibBprq0p8Jma6sbNDuDGiedNF18S49TyU2GPLJz/+ep8ci42WbCXRtATMjDMBWr4H6+Iirm19h9/inAOjWNRYWSlT9EpEMbuy9OGJrbZNLm8KI6Q+a7O8ekU5Y+BJkR4BbCFnXZC+xbrjUGyP+7q/F+TATKeHwqCaODFJahkGITuSpEAlb4oc//Etu37xBrSv6R7a3X3C063HjjXkMU3bV22AYcTxvdg645AUzQDl3+OLxOK7rC045CTDjeQFhGHHp0hWSc+L59367y6Wrb6BKlLNWv0ut2+XWsnhfgGh8xtrcGiB0YDZ1hcl4ifsf2dy4KZDWxoMutl0nn53HzAj95gQ+oW6fo6iWinkSOSil3sTzxZo3220ubX6XVvuMVls4VwcHLYaDPJuXhH7VdRcC2Nu9z523RR/R1DZ58OQB8xUfTfZ0P3+yx8J6gu3nAvVvYgfcvPYakTJmPJJBYPsMSGFoOZIxcY5z2QbObEQwFXv34T89JKtXSJQCLt0WZ8azk4Smjx2+wJLVD/V6k4WNCmmJVlrrtTls9hhbGo7kG/VjfZLWiHvPTlmWRLhmyqZ+FvD+238MwMMnP2c8HLCwkKF7Is7a6kqJidOnWKqe8zfeu3vCN75zm5MzIQeZTAYtFtDv+hQy4m7PJCuQO2HqXGUwEvuwtuyyffCEyBXOajJh0my1UJUzKlVhNywuF3i2PeL2jTc5OhAZbm8cJ5NJU0iKu6jXOqXeeMaw41ORve9LuTk+u3uEMnnAoCHe+eLWCo3+Mc5Y/F4mV+Ko9gTXdXF9IWfhLIfnOehagomsXmns7bG0cZmVVfEu9nBKMpHm9HSbclYEbzaWcvzDPzzkrdsis9PoP6ewcIGFxTK3s+L3d3cbfPokyXzpXSqyN6t3ckTYeEF3Vziqqcwl8tkb3H/wS7ShCLIdpO6hVd5ibT3HzBNrvr9/l9JkC38mDRy9hc8c02kZbyZtkADWFy/Qad4nnhbrMp1EzNw+vqzQSGdMGp1HnL7wuXxFOI9qZJHJGxyfNLi0LBzavZ0QnCqVN8U98NEvHa7fvEDHfsTimghS9LsalbkUB/tH5CQo2vJFmRz5PeMPwrkydB1NEcovCjUmsynjkXNOyFcoWsw8nU6/TbYiLl7LihGpabyJROzQHMLQI3CyZJCORuQym3mEwRhNFwrx6KSP6wywZNN7JVMkVB1aHR9nJCI0pUIOQwvwnTGzWULOsUQ6YzAav/w9AyU2I50zmQ7Fs/3IBT1gZIfn7+MxoFQooEji2GASMnVbeOOQiowoRGGMjjMiMpNMh+KSW8zNcVavk0+J73jePooyoFGbEZeohsW8RSlTxB7NaLZFlMhKZ3DckHJVHE7PGZNM5bCHI2KGcBAa3QNCP2IqI3C5lMGo36dcStGUjc1BMMMyYxydjemNJKx038XRXKKkUJCWYmAl8hyctghltsDM1UlpadonNgNPKIx33l/guOajKcJJOxmfcPboC2LoxCzpyCge2jjHzIdcUqzVSa2FaZrMleX6KgFpP4kTugQSEjutWiSXLTxDQaJ7Mw3G9Md11isiqprOX2TvxT7VcpH5ZVlmaadJqhm8kYsho/6aEadcjmO3xLNnYUiqFGGdZLl1TSit975RwI9PuP1WmSdPhDEVJrq88UEJfyjeL2UVOOo+Q0tN+dt/EBfaoBFSKOYoV6tsXhR7c7z/hN6xS06Seh4enhLXTCJVxZXwpVFkUG91sfIVZrJx2o33iCd0pg0JYmArJAsRihk7ByQJYzrGbMK1y+scn0oyPj/EHih4EgnJzPuM+hHhdIgbCoXkDoactMe0OwMmEhBC1eKkExbIOcXyBpOpgzEM0GQ52tiKkzGqxFSNkdybckonGOsMZWYgk1YxJwkGHQ8tIyO7swxjxyWUkeWR5+LYI7A0vLgs4TRj6JpFwkqeB1zskYOmuQSytCdpFfDCKa3jHjFD7GdHouotLqwQaeISqJ010ZQCJ33hgOWSBYqFCrGYjyVl+Pj0DEO/TCwKmckaQ12Loes6joSjV4kgDPHd4Dy44kc6RjrGVA/wVDGvfCaBqkXEDCkr6gAziMhYKxiq+L/JxOWsNqDXV1m5LC56DYVWbcJ4LJG6zppcu3iN3Z09FFXowA8/+icyuXk0/XeQ8KrKeSQdIIxUVFUYopEu5h5Xk9iuw8TWCUtCT43sIc2uw/KiQK6bL6dYmb/IYNjGlKXXyUSB+tmEVquHERMyO5dfx/eHdPsS5EZJEPgxLq2+x73+ZwCcnvR57dI3+MWH/wHDFO/caj8nV1hkdu6kXaHfeki33SY+L+DZG+NtoliMs+4nRPIuqLWHWMYhG8siAPP5b35GtXKDs8NjfIl+ujS/gsaU47097twRzeqjXp//+r/879mV1BS5TIqpv8OFzeu4UxF463T2iNQ1tEjcHzElzd//zV/zzbe+iZEVd8Og1qdYKqBbBzz+SGQZVstFjHSWVkHoslZ/h1gyx82tN3n4QmR7KuUF/uUff8Bxr8bqRZFZOTv+nMXSPOmMiOIeHe6hTj1CI4MrCS4LuWvE8xmyOrgSlOFf/0//hqOde6wtiTXozhw0P4vX7JHUxByKxZC5whI7h/dAltwaxhrLaw61feF8TPor5IpZfE4JJmJ9q9kUCTXk5tWL1HtiXa6uvs+jwSGxsggQRmcvcNtj3vv69+j2RfQ+r6+QmC9xfPKUji5k/ztvvMFJzSaTEU7o53s/YufgAcXIZCYBbMaDENft8pV3v0GjJiLeg3yf5eoVAke8b2dq8ZV3Fvmr/3MbRUabI1VB0xU8J3ZO5mrGM8zcKe2W0CWWmYNIJyTCNF5SEygYhka/3+ftN8S6//H37rDzdMAbN/8UgCcvfs5w3CZurPC1rwlH5q/+8ifEDYtIAV86U5qmEfrqeZZKUQRQhqr+7jPTTAAqB/uHXJVO5txyjmIsiych3dWqw2gI9+oiww/w4V+84M53btKXmde+neO1G9f47W9/xotHwlhOGVm0csC7b36Lo4YwYB8867CyWeSZtAfee+8dPvvkc6ZZg2xG3E+9TsDDJx+SSKTOg6kXLqv0uhNGXfFu3f4e88XXMNQMj+6LfQm1Do7fp9XOkpWlUXHTY2zPWF1ZA6DVPMD1h9x78JSlRWGcZ+MJtK6G46hMbVOui8ni4iLpuNCLhhfxb//t/07M2mJlV+jAr3/zJs0jn3JpnqTUUy5zNPtDFmS5VpUp+4e7ZHMWxwdCp1slDU9T+eq33mU8eAlo04Uww84TIdPBVOH54R6Ke4UgEpUy6wt3ePiow90vDs5RYRfWdI5rZ4zkefSHBhtb6wzsFr2W0Hn11nNUHAZ9sD3xfNsOMJMj+gOxThe2rhNGKoZh0BsfALC9o5I0UuSyCZySMNTLZY3t+zusSkqOTHyDaLbPN773DnZH3GeHByfcevsayWScZFKUet74YIMf/M0OuZxIGnx570MsM8PUbZCIi8++9Z3v8+Gvfks6kSImEfaqV77Di/49/GNxPgwcjusRt98q43vi/b540eLNt68j4/6spdapNc44rmscycKRfJTl6uV1jp+pJNJi/5L5ZQYzFb8gvvSTR4eU5z3K829z6Q0hd512g4NRi+3nJyxVhQxNxveZjHaIZIVWPrPC7s4x6ZSDvIrIZuHhFyfcur3AR18Ixz6VSpIt6UxtsZaL8ws8uPeCzQvrnElqm3hskebkIYae5/G2CCzkshkK81lOGiKQsbxVxcMhX1xj90AgCqatDVKZkKQRcWFDlJ/jC3vo941XgBavxqvxarwar8ar8Wq8Gq/Gq/FqvBr/CcYfRObK9zxcWZoVxCYocYtIj2FPhMcbG8WwYiGhZTEei88GfY14fEYiEFFcJYpQVBM/cAllI/XQ7pJImPhOQCDhkmOWgWVojPoi+uP1bYrVJNNxiCo95VjSIu741OpDcmXxf7NQQQt8HAmDbsQjrJjKdGiSyogIWyJVxJ620a02Rkz2dOll3NEMXxURPtc1CMMkSlylUhUe9slxnVTSIJ9PcHYmohNh0SOTmWM0FhHTxbllGs0+m5cqFLMiEjHt+fhBk9XVtXOIysrSGomJzbAnog6GphE4bbKJkEpOlBg92GmyWprHi8QazJQhxBVq4wapjCQxjZnkiylQuiRnMkWtzCiWkvRkvXWn1qWSnrGwmiSKiZDC8+d1EokSdmcxssYQAAAgAElEQVTM2BbfO6n5uE6CzlCSGPeHLJcXiJGg64t5RorKdKTR7tfxJZRtKpdDCz16sudKsQwCRUWb6uQqMoThazR6E3SGbFRF1LbdSaHn1llZELLxdPuEt9+6hatqfPr5LwDYMlfx8iqjRg9FkhYura9z8qBFQfIYKKrLwf4JRiIinhHP+ou/PmYWtukMLAZTEdF799tZ7PERI11E4Wp2wFlwRnYuxUVdpJ4HpQGGZeIHEc9fiIj+ZBwjXcyfZ0OzVpJw5nJ2NCIlM5aK1sHvxfCigJYvopFFM02t06IzEL+nGAZhACgzApnZycQLlBIme/tP8CX5cL+tYiRS2K74v/bpjAtL17jX2MWQqiChZolroKeqRF0R0UslU9iTEZ7ky83n0hBa6HEL2xblBd50hFbIEqkx8jIjE0ws7Nn4HJAEwyOVTzBoR8xkdmt5qcTB/phMRZZBdh1iukkiGeFLbrqz2hFhpNGpD8kWZG8YLgQqk7Ek9NQbWMkkCV3HiIsygZmMLNmzFnkZ0ctn8sxGEJdlLc5kSqUwz9jukUyIz0qZDfZbEbqlo4WyVyPyIYqhyr8DzxehqUhFkSV4uq5Sq9VIpuIkRYAdM2URKl2urooo7tnZCXkzTqmaZW9f7Gcqv86w18WMmdRaYm/iVgIjptJuvyx7jPPrj7/EsjS2Loks6tQu8dr110glD3Bkf4oR1wlDD8KX2SsfFRdN0Qk9WcZgjHE8G2+8Tjohyiy6jYCt1RViutBbhqmzd/SQ1fUlHMkRlEun0VyNZBTHmQh9067XiKfjaFI2tGiDRHLK9uefoLpCnz76+EMGVy+ytrTAVELnl1emTCY6LyTPTjSJM59JsLb8Tdo9IcPD4YxvffM1hpMJLxAZ9Sj0SRgGaQk6cXF+RsceMAr7ZGXzerascDwYkq2WeL4neiyuXX6Xs8YBsbSY00Jlju3tj2gEx1y/KrITk/ERw2kfZyT0zfOdz3CiMY927rEme1iuX75Mv9YmUNdYXxL69KjzjFbziHRORJHNkYeR7vHGf/ZV1o9Fad3R7lN8tcfJ0T45U+iuYiLL/S9/SkGCrTSmJ8wbiywmYG9PRMBT+q+pXr1MvbaPMRX6bb/9M3KFNays2KtcXKO194ixFUEky+udiC+2P+X+zz5lKEmLb797ndVCkcSCKEuKxV2CTpNCFkJF7OdJo4eevgyahtcVkdzfnN6jNdrjvixdSgQbXP36RR78+Aes3xCyeNhskY1PWN7awuyJSoO9B3Va0ZDQFPfHxYWL1OMBCwtzLBTF2n12f5+wP+WLe4e4E9Gz9trcTfr1GKOx6K8oLa1ysvuAyItw00KXxaYzInRCxUeR9AFO4KAYKqHkx1M0UHSIAo8I8R1V11F0CByPd94Q5W6pxDq5yj6eL87eUvk6lWydLx5+QUaWyapRnEDz0AIFpC0RqRahNiMMxe9ZVpzJeIZh6Oc6QfRpRqTSFvmSkP3dvTqTQsRInquUtsZs+oi+BJcBiF9Q6I2aKDkh06UFhcmwRj4f41hG2C8vXWU06PGjn/97Ll4W0fRqLmQ9f439lOAo+vFPPiSd0dGsPsenso8vNofvNzg6eoYhy11Ny0eJJuSLkrPLrRDqLmY8xO5IXkRFJ2GUCdUBgWyrmK+UafWeoqmS8DlMcnRyQqGc5ekzkSFdmJsnkUqSKyRx5F0wmZrUj7rMlYSuPjup8Sf/1b/CjernZcG7L/a4/8UT/vzP/5yd56JvR4s7zGYzzLjIXOWLGQb1Ia0aVCVH1/PtMzL5DLsvTgglL+qN27eYjEwGbdH3XSiYLM1d4P4nn5OWJbiVks/mxjJnhydUpO2wc9Rjb/8ZaUPoyc2L80zGfVpHUyJV7J9mOPR6Iw5rB9hjIUMXLrxGONNo94ZSDizW1ud4+vQeuuRA3R2e4XigTMfE0+Jsj5yIQm6RmSPs3OagxspilZxm0RuLvSpX8hDGaDUcsikh1//0t7+kVx/y3le/BsDG5hpaFKfT2MOdCvnc3b/H6voKw2Hz/DzEKlO8M4OVC6I/ttU6hGkbNWYw6orfS2ojdk97fOUDoTc+/fgL+t0TGo+2ufmGAK9oJdpYtsHWBZ2EIeTl40/qbNzaxEAA4dy8UqGZntLsG7SfyvsqEZHOJJgMjnA9sael3CU+/eKHlMuiPPT6la+gLrZxgz6ZjLjDvrj7Kaa1wpndRjXEZ8uL3+Td917jh3/7N0LGBg5vvX2JXjdk64p4Vrt5xmiYJIwrZFNCXirVOI3aKV4g9bfV5/Spy6VLF7i4LrLbMVao956QK0/xVWHz7LwQd9LvG38QzlUYRedGjOXFCVyXdKIIBeG0jJ0JgWsynikYljj8hjXDcwOSKSGoZ0cB2bSOog9wJHhEZa5Mr2vjzCZkshL1Kw6O75FJi/9LJ5OEoYtpmiA5UQaDAa5nszCfI3hZrqibhI5NRabR/cBlGoKuQL8vHKJ67ZRIU0llQxT1JVJWi2o1TSYljZZUxGQquJyOakLZDQcjCoUC9dP6eZmTG/bILKSYjcWlPnXaKKpPKh2jLw9sLIgIA8GpcG1DHI7TRpNxr0NMNq+PJwqzwOf2rWs8eSIUVNZSCY0CkS4OsGvHCP2ASjJDMhCHru+H9IcNKqUM2aIwNkZHE/aO2xgSDe7i9TViXsj8Ypn9M+Ek9Qc68cwIxYjI5cRaHR0o1GpdvFA4V6l0nP16ByvuEkqjLJf3MTyFzfQmq1fFIX5w/yFpU2fQF8aWEiloukvg+cymEjRh2mIyjkipCaoFcYmHTsRUmRFGsuyxkGWusMBPf/pTUrr4jpKN8NwxajqGIksVdvcPCOM+mrw8L9y4Ruusw+bmCrt74kLT9BBvlufx8SEba6LMYv+TNBNHIVEWl2OxpBDkS8SUKvcfi7Kgi5sLWFGMk6N9gkjI2dhOMM3YNHvisBbRWZhfJR7PM5EZ5+FgQi4fw41sfFlqoiU1Wp0m6xuivKjfH9LqtojHY0SSPTyZtzg+a1JeqDKTzc7tbpNKtUoknde5Yophf5dssoQak6zjcYW4HqfTb5PLluSe2mi6SVqVnGStGkEUks4pZOfFOYq6CfpdB9McEVqyjNMfohoK4cuywMwylYLFYHREViJVnp41SWeSKHJOb752lRfPjgmckEgR7zt2RliGjuuEDHtinqVCATWKgUSDc2ddYgYM3YhqXijJmOw1M1SDaVusQTFlsX5xjv26JBUc9hlOOjiOjT4ReqPZjLCsi0SEhBKoRVFCFIXzIA2EqKpKqEYgS3eCyGNra4V43KBdF2em29knnVNAgnyMhjHc2YTRZEKvI+ZUyOqoqkqpYp2XHbqugWEaBFKXaYZGZTFLs9mkPxBrVarmsZ1TFCVCVYTRqaoqUaTyu6IEjVDTCAMFNCFnSpgjnTI4ODpg44owyuKmhqak8ENRQjq1SwyGHRJmiqxEsjLNGdlkiYXqAkNJInxSO8Gtz9i6LEpwM5GJ6hjMwiJl6ZAcGW0y2QGjrguSBPbwic902uGNq4I7qZhdpla7TzB1KEjOntlwkd/8ap/FjSp7+5KM9/YmtaMDnKFYz3K6QqpsMLe0wK9+8kPxe7Fdktl5jvcf8pU7Aoyjc/AUUg51R+hcZX/MxuVbvDh9hjMT71xdXqZxNCBUhGzcee0DHrofoSdUGhIBzujUSRRy5HWFQBF6f31ljS0cto8ksblpEHgT/vEHf8PGRWFY1IdDJj/7R8ahxt0dsQ9WqGKkVmh9fk/Iz1wJVQ2xdI24JdZp+/CYnTDBQpjDjoRsXL+0SWsyRPoZKI5PY3jIauoN6p44D8W5Kv1f/IpIzbJ6R+xN6CVodUOqVWnwDcfsH9UpLa6TiMTDzDmTv/nkY/o7Gq+/L87/t/9smfZZj+v74l264yUKlTKTr1xmJvtvw0OPe/37zA17PP5c3DOXrt3g1lev0qwJJ+nChatUKhUKsSSdttiHb35wmy8/fgozh0WJ5JhOGpyM7rG3K8p2fvqL3/Ktf/k10sUi3YFwUEwrie1NiMdieL5E69Q0guj/y/GmQhihqjrqy7VSVAjjWGaOK9eEg7f9/Am3bl/n089FmWelkqHdGrG8sEp2STjH/+5/+RkZLYlHRPSSU1LxUYgIw5dceHF0XUVRI6Lw5XciwijATMR5KsvIC+UFjISNXRf3hTedkE7pWJLbDKC6GafTnVAIhPMxbNj0sUkmklxYFN/buDKHeayD3uJ4X9wzq0uv8enHv8CVTkWxnEfRbIYjDyUSxqSV0NFjKXL5a4zk3ToeB8wtFnipN0I/QdzI0W5MuX1bIOU9efZbDMPg8uWv8+KJAC3Z3QnY2LjJ8ankiqsk6PcUkpbJ4pzQLfWzMV/9YIPjox6doQDj0FWTudXL1NsSqCJMUSnn+PJuh5uvXROfOdvceesWZtLnZffAfOU9Pv38M772NSErO9tHZFJVVH0qQEMQHIimYbC2fB03Eme51jhgcWEFFOFI9ZoKW6sFSt//I56/OBDrkp7QaXtcvXKLdl8gVSpemrm8wdgW67t70KHfdUiYZbIpoYO+uPsUTc0wN7/E3oFYzy/v/Zqrl94hWxD78Hj7l4RBDNPUSWeFE1HOLbF/eI/6WYtIRi5LxU0WS1sct0QwnliD5ZU1jk6O6U9EwGVvt8mt1+9QqSikMsJutp04Fy9fZtyXveGlKXt7x5QXF8ktis/6rRPyGQ3f90lJTlBnHHHtylWspNAt/WddfE+j33VpD0W5naUnyefWefilOI8ZQyeyLBLrWWZj4awGfYvi2gaLFZ170uZxoxi7z56ztSrxBgyHUd3nnbfuMJTBuYcPThnNOmxuXGc6FGtXqeT4/ve/z9/9w0/EnGY/IR1fp5wvMB2KwNR3vv6f0+k3mF+ZZ3VZnLWtzQt89NF/YHFNOFtRFDAc9lGVItWy+Gx/9zGGuslX373Do8eiP3U6mufqlYucngk56HZdAZzSb7DzTKz5d779BuhtZpMRjx8KOUtm/uOFf38QzpWmaRRWRKSgO3AxDIPRsElcEZesFypMwwkRyrkii8ez+I5HsyY2JJ2NoetTVDWBnhGC6gQTipUCaqSix2RzpeOg+ZDNS8dtFhCFFpE/wJCGmqrkmc16pDMm445wUnqdBrlCnEgiNs1mA6LIIfAV4qrMUuUsupMmKStJtyO+t7ZWImUJ4wlgOJpgmD6TyYTRSDiKm+uX6XZaLC7MMZU9CAEee0fPqUpm8FTKoNX0ODl7QUJmxRaW1+g1LLrDXQ73RBR8NPBZWVmj0RIHIfJmoCocHh6KjAOQHAY48SYThLJPWApX194kbkzpNsR6biylOemfoIQeLsIYiP2/7L1Xr2xbep73zFhzVs5VK4cd1s7h5GYHNtnsZpOyKYKiaMOUCUGGb/wb/BcEGDDsGxuwJEOQYdmkuk2J7Gaz+/TpcPI5O4e198qxalUOc9bMvhhjr+aF6CvB6Is9bhZWoWqGMb7xjS++r+5QKqeYSETBJ092uXa5Qr+/yJWrIkPTGwVMnCPMlE5ZRgaOT48wDDB9SaSq5ohKLokWkfKFcW4peayCSkqPefJINESHoUIcq+QlHbunKVSKNQ6HJ0zlPFVLc5SKNsPhEeW0MNSslRH9sU9WRojn39B58XyT9752mZ2Xoueiq8zIDLMkah9Dro3qJ2h6laIhNuLp0RGFis3B/ikpQxhgSZhQr1XIVer0O+Kz/jAmMg3620LGVucWuHHlBkcnhxQsMVcZu8qL7WesX/kKO7tCcebsKbXyMjmJ7NbtfEE64xGoLrOumPNyuYYWa8REeJJQUklslMTh8WNxoK1eWEXVNcYTT4CpANvbLS7VF1CDFIH01Ox0mliJQSItFkt1Tg8PmK/NEUigliBxSAKdIIioLAqlOJy8JFvIEkdCpvXQQolVWicjMnkxx81GhV53RDAzKVVkr4TTZdwNyOWFIk8ZRbZ3NomJKFVkD+I0wjANFIlc2Vxu8GTzJSoiqwSiCb2Uy5IxE5RYyFRKc+i0R+fOQDbXIPYiIneK05ekzHJex+Mx+awwAt3AZe/khOlM6JFKrYpCjKEaTOX98vklnIlGrETo0uhMYlC1mCD4FfxqHMcoSUIke66K+TyeM+P09JhKVcLIz1yOdjS6Eh0tW4BOK2QWTklLWoDO2UMMzSdllFGkSlb1Ge7UxJAZ4VK+iB/MePPdN5g64p0fP/+Ur371qyRJgq6rUjYE2IYpewaiKCYMQzQ1hYZYl0SZgergTDLoiOdUlCP63T6m7OecTfYoF8okro+RFTJcsHJ4zoSB38UJhXzeuHyHWj3Li+ciSr3bOyOTytNYSdNcEMGV8jADs4RSM6Y3ktUIqRmL61mGPeGspsxjbKvJFy+fs74hDCe9aLOxfI2HTz7kunTeTjbbdI9GZNaEUXb/SZeMXcabOizJHo+d7SPmOKV9esxf/+1PALh15zL99oiS3NtJlLD7YouUpTF1xDkzl1EZj56Rrwh5nSkOsV1irnEBNxT3+2TrQ965+A06kzFpKS+r8yUwfSZTSR6rTjgZtXn70jukDbH39vCwC2s0dZOBZNnUAo25Ro3MXRH9xUg4OTnhiwcPsWZiPaPKCqXdXeKLTeqrcj61PI9/8Dm2zFJVl3MsrF7k6aPnlIU/y7PuMXalzJ3FOvacCFZNett0T/dZMkRPmzlRWdhwmZ7scOaL8zdtpGgGU373ny2zsSyMltbuv6GIz8qqgO3udD5jZ3PA298pcf9zcYb8o2/dYji7irHYoFYSWcULF5YIxw76TOy9Yn4RO+mw86TD5QviWh+//wWlcprmYoajQ/E+6vIqyjakLDFPpXmNd27dRY2+jy+Zm13fw9B1wihC02SWKFGlU/MrAu0oilF09RxMU9M03GmP69eXGfbFGXL39lWePH3K19/5UwB++uG/ZDrx+NqV32L3kXCgUxaEYYKqmMQych0nHpqqngdAgtBHURPiOEKTqVxBPqzQPjtlFgqj7PrGN5hvNBhIgKJOq8N8sYkfvQrcQNpo8LT/EaYtMoNGXCRE53DrGReXRDTdmURYqTSKZjOWFTWzsceXn9zn7a+tCpluzNEbtfnks2NyJbFWlUKTK1dW+fTz9wlcYRO88dZdFDXkxTMh57mChuc5+OGIoyMRoX/v3d/k5dZTPvn0Z1xaEc+VTTdZWVlBkcBinUEbO63w9PEjVha/CsDXvrrO9u5n+JF77ghn8lWOO5tosj8+X8kz6M349nffZlNmqW5e/yrdszbHraNzgI61FZ2337nDaCyeM4oCjJTP8vIaCUI3W1aabtfh5ctdkpS4XzAbUy6WOO6IapNOu0WpoDGLXNY3RMD1o4+ecO3mIjt7Dzg62AWgUC5Ry1+hawmd+2jvKaXCEnEy5dmuIDafb75NtbGI6zo06iIbem1DZThwcF2JamgG+H6amTfB0IUertdymKRQDRVNAixtv3hCqhhQbL6qyKrwYveQS/OLnPaE/Ny8cwfXH5LO53j5Unw2GAnKEd2QPW12llqlgDs94eFjQU2Qty7S6x5hF2NGrTP57JfRlALlvDgH1tc9Qjp89sUnlEsio05Ko1hyGcqqirSl0RkElOaLhBLh0zSzmNmY7eM+obStr68t4R/4rCwJfb53YrGYafH0/j1W1oWu/qM/+kN+/KO/xhvNUGUw9Oykz8SF73ztD4VMu20efNxm/eoSA084bg8eHpDNLqImCjMJZKJE+8SJh+uJudy4VmN3d0Do2Yy64rP//Hf/W3aPP2Bn+wXOVKyNoY6YDCPGI2EzZ+wmYZRwcLRJpf4Kq+Eeo4FHvV7H0GXS4Eg4ln/feN1z9Xq8Hq/H6/F6vB6vx+vxerwer8fr8Z9g/FpkrpIoYjgUkYjuNIUZRcSBQ0bW/+OGKGpMJpsmkaSlvgOplEHaFF6xYYJKTBjN8D0RrbAti1gZoKdSTAav+iIMDCMklGlYJ/BIgiFpw2YyFNORshzm55fwvARFFdGQtUs5TL3A8bGIfGiGR7mSJ/DUczQtRQ2xI4t+18OUqEaFnMVk5DB4BTVtZsinTGp5i4wlokaT8Zg4jFBRkOXipJQCy7Umnb6436CV4LsRzaUUJwcikrztdwj8KTNPx5aQ7YHSY2frCYnMclTKFaIkYmG+iibDd10KZDJnDPYlqbCVot065LSrUZckbRetBaxkDNqIiUQwbFaW6E8U8ETkbuVqDRMTkhQf/ESgquiWjq0VyDfTnOyLyOOli02uXl/hZE+swScffUS1miWlNEhkT5k/cdFzEZlSjjfeETwsn3z2kCiYsLEhota9yZjpJODO7ZskquzLsk3GE59KeYmu7JUolEPmsgV2d0SZztPtx2xceZNyvUa1IEr53n/yCQuFHMX8RSaOiOKUC2lOpg6azHhU6hlmQUignJ1D4t+4+gY7218wGU0x02KxsqbGeOKTjkTkdXA8YVTvoJMllxe/G7ljMrkcw8kZjbqIlL18/oCcNaReEZHe9tEJZ6camGlcT8xLSgsJginpXJbZTMx7dzRkfqlJqyOec3vnJeVKDW8WosktPR5PaWstbDtNWpbpqarOyUmX5XUR+ey0+4zGAb2zfeYWJbHx1IHEJJurMJTw4UbaQkkp6BKyOvbBNlU03SOKJBrTQZ9EVUCbMRkJ2c9ly6QtiOUecv0tzOyMdK50TrxrpROmk4BCXczT862XlBoFup0jmk0RIYoSh8Adk7HK+LK3aNRNsE2TIBD36rZPMNNplMBgJOvVDQlv5M4iEkXMnW6k8bBwxyIi3euMKRfyZNJlbJklmoxNVFUn8D10Q8SfwkABJcIPZJRMUQjDAA2NREbcZu4Qd2piG0WOd8X6qWZEpVKi1xWRz3L+MoXiAHU6IfJFtNnWq2iahqGnzmGdNRXG/pBA1vCPZg5JFBOpAUEgrp3O2UynLvlshkFPZk00BVM3zq8TxzGmqeN7Ea9iaaoWYOgmruvy9JHQu6oCU+eQalXMWdq2CMYObnpILOkSjrfP0AwVxY7xJay7zTH7hwqdU7GHMs0Eq+yxP+xwKiHAM0mGYUdnOO7RGYuo9NzcEqGToWCLiOmjL56xvLLO1668yXFbRGOdcYet3oR6NUOxKLNugxqNCzCUUf61y3Xmy9d58uwhA9mPG44Thi2PbD7PhTVx/QePX3LxwlWSREQrDw6PWVqoMxl08STB5OPJLiEuuuxDC0ca1+Y3OHYHVCuiPOxW3sSLZ7QnDhslIXs7ez2KVYtGQ5QcjVqnNLK3OBhN+M0bAq3wu7VlDl5uErkeI7mPb129THrYYlPSCRyOO1y6foHstEFVkt43Ls/RtHIc7bdRZclPsKjyzu/8AeFUZBQOXmxz+72vk+gFxi9Fj1kUqlQvXMTyT2EqItf1QpXxcw/NEfux73a4tH6Vnz17n4xE01xdTXjj2pDuyc/Zel/ol+W3rlBaXcKLhAynmzZ3NnIUGirXTVFaF2ZOiAcdWkczirZYq6xSwDeH+Ip4zpRqMe75WDUVLyPh7guL+PEZ7d0B1y+IaPaLx5sEsxSFJTGfcW9C/+QY29CRQITosUJChKJo5xxyhqaec7oB5391RUWqdHRFZTY2uHwjJp0XpUmun+A5Lpoh5uXK1TW+/OQpO7svSOeEPl1fq7J75JE2bZAZYF01UBKNRGaySBQMQ8fzpufExmEYAiqtVoubb4n9d/HCMsNOFzUU2YmMZTBzA3zE+QWw1LxFtVbm333/LwC4vH6LUmmFK79xgeebIkv8w/f/A5eXbzI3b+C54rfuWOGbv3Wb4VSUM129tsGff+8Bq0sbfOvb4pxxvWN++pMfc/fGbzOSSMDDcZtycYnmnHi3wWSb3YNjMpn8eebDTKU4Om5TrKQ5ORH65pu/NU93dEAs+4+uX/0GL1++BHWXUk3M/2SacOvKn9AafMnWrqiwyZeyjEdjiEXZYxhF7Bx+hJW/zmgs5OzF1uesLd2gWKxSLAr9cnj6lCj2SSTpfbO+wHjc46cfvODCBbFHPc+DWOPCpSaf35MlqRfXuPflDmuXxZkyHfV4/KTN5ZtlzmQJoJGKsK0cWBHzicja9AcdXL9FGMp+TruJqWVp91pcuSzslFt3bvPxL7+gOV9AlZm4YDZm9+Aeg76Y3+vXbqOmEqIgTbkkdNJx64SYiHjWZOmS+MwPHtJcbbArSaFzpk7aVPn4ix/ix+JaXhRxYe0GI2cPVdqZdqrKbDpiqgqd2x7M0FN5ymqG5aqYl/rCBiP3lMl0G1PWWRaKJs8f73D5osjWF4tFnGmOpcUJM0/M8XjSx91yWJY9poNBxNxqA10PCWU1km7YzAITJewwG4vSz+Pn22ysL/OjXwp9bmVDtABK1QqPnoi+8zAwyGZNDo5GLC6LOZ+6x+yc7rD5UFz7j//Rt6jlOlTqKwx2xDpcvRlzdPSYs+4Rb0iI+OFgTD5boNMWe2EyNCmXGnTbY1LSl3jw4CMsO0W3M+HqhpiXo6NjpuMsF1dFprU3esFw6nPWHnDxLaG/t3Y/pFFbZTye4vkie1YqyT7Dv2f8WjhXiq7gSvJKzYnQw4BwBu2h2FDpjImlZwljgygSCjBj6xhGhlgelv12iK7rjCcD5mpCCJRkCHHCdByeK+BYiVFVg5G8n+v7WFpC4M+oSC6qTq/LqT8hl0+hyvpq1wsx9Br5gphQ0/Kw0gqaFlDMCeO122tjpSPSioWiiufq9ScU0w0SWT41v2KjGAHxIMXpqaz/N1VQLeJIw3fFAZo1LSZjn3xaOANngy5Xr2bpjwzW1yWARmbIsNNEm/XY3hICbWghjWrhvN+p05/hRlOOuy2mfWEImyULo+OzKEEaQjOHR598Hrpt4TD88OwXFK0UuWpErIqNd3ji4CYhhaxwaLsdh2xK42R6CJrYiGEISViFyETTxRxMhy5PvmzhIwyG+fUq82sG3YMB1bQw5rS4wWHviGMGw3sAACAASURBVMAbsidr7a9srEHkMHPFtXvtAWlbZ3frEXNLoizh2e4hmumwvnaH7c7PAUgNmmixjm5KA2xlAS8YcrajYMh+jhtzV9HCFmpa42xfGLBGymcyOOJ0IiHOtQz57AQzmZyXzZ25j7DNIWrNoCkbww+eh4SzgHpVXHthaY7euIs3jRnp4l2C44QLi7eYjicMZTp6aWGRYjXk5EQoGt0O8MIAgwqTofjdkBMsVSeTq+J7olciX0px1uviuaac3xSzyZTbN69z/0vRv1EuV/HNKRnbxovFwbe7f8r83ApqJA69cadD3k7jotNui8PYDR0KeYsk0ZjMJImoL7jDVE3IZqw4TJwUvqtSKYt3tgs2Z90+uhFwuC0CAvmcjmXrBJEsJ8jmMTNFOmcTBi2x7tdvLfH4wQndI3EolC9mGA36zJeLeJJbLI4TTCVPHAVkJEy/M/GIA4gDSUtQrNHtd2guzZNoQs7HE3Ff2yzgjGWfi60w9aaoiSxHVRUK2Qz7ezvkZM+HxhyaqpOoNqHsEQKVJEnOoeBVVScKQzTNOAe5ydXSKImJjs2yNI73944w8SlmhKrdebaJmlVIZyw8eXjlyxbubEiiBPj+KzJgk74zOT8UYn+GMx0y8ybkpPGaBC4PJ/ew7TlcCXEeJYkoA9SM8+dWDQPf985LlRTFhEQlnQ7ZfC5ABDRFIZfV0TXJO9c2OAh2KOSqVL5yA4DJuEV3NMBMV5hGwjm1qGEbNrfeekvMeW+Xzt4ZmpUHRTz7MJXgKFtcunmV+JGQDTMMSMYRGdm3sDI3R9qOqTYsDmVpcnO+gTcL2Ts8ouOIPZm1ywymIYWC0G9aSuWLR79kb3uP63dvi2dXO7TaXSJT44v7uwBkdJ1Jb5ecLfRpISyx8/wh+70jvvtb/xSAw8GUo9OAiwtiDnqjIamFZVZr6/Ta4kBdXLjCTz76CSlFoRuJEsN63SEKxsSeuHZufpW5udv84G/+Z/58Kt7lD/7Bn5E3mriTAzZGQs68KKawUKVeF3KqjRTqpsH1P/6HnPSFAz3afkx3IU1p+RLRq97JrT1WNy5x1BNz8s7bX6UzjCnkfVa//bsAtA5buL1TjgceyADBlVqaK29d4URCfs98j+kXKpnyIk2xHQiMgNbwCoP+GSt3REn684MR4795xuKCqDn87KjPy/1TSu6QvbHQSXtuiffefJs5e8baRXEeVudn6H6BWBX8XOmSxVncJvA0NrdEeXbkeNy983UOtj/j2T0JnJAJmCvqzF0WzsDR9jFHnQN0DfSppKiwUngKpFSNlPaKxDeWpL6/cq40TRO9kbJ0N0kSsrmA29feRZU2Qed0ipEKebErSkg3N7e4fPkmU9cnCMQzLS3O83J7Cy0d/Z0ywCm6apAkvyLw1jSNOI6lUyVIhONohm3lKJeEvHz4yQ9IGKAnYp4MTefdO+/ymYSUBjju/QQtfcS735SBvkKPzccHoM4TSd5Hkzq3br/F/tGH2FnJi7aQ0GppWLZs/L//ESET0qrGk/tyz5o2B1tjTHYo1YRxPpye0OtMeftNAe4yeV5mfq7IzFGYn5floe1j4tk8RmpIqiBkcf9oi9axx9IFcf8PfvEB3/zmN8lUx3z5mQDV2LhyjY8/P2Q6nZIrS50wHDOddgklOffa6iopK2Z/7xRfgh11+4dk7AIk+jkPY7m4RBCfMegLA1rVLEhmLC6XmTliXhYW69w/2SRlWSzPC5lNpXzWr5TotoRz53sxF68t0z3bJp0TOqhaC/nZB5/wD/7gK7iStPhK8w0ePXrE9euilPaKYfBvv/e/cGXjK8zPi57nP/+33+PiRpOj3Q6LS0IvjQY+tVqNek0ELTN2g0JZoX06ZtgTaxoEeUw9wqhMeLgjyGrL1jw/+uGX/Pa3hT598ukvKDSXyOSzlDTBg5bKurzY/TlxbDEdiDOkMWdRaywyk6WuB7stbt1eQSFA7b1ifJ9SzpUZtzqsXRWOTG+4S2e4z88/nkqZKgnsgcSgWRf2DUmAqYKRFmvXd0+49VYGdegyPBO/C7WQrD4jTiW8c+s74p2thONOm0xVyM/uXpfB1ONrb3+HsQzwPHr6CQtrDYprXfYlQbAZ5alk0/Tlufrxs/tcur7Cv/jeX/DGLeFIVYwUaaOEGqi0D8X7NRtL6MR4khrp9KSLN4tIZyPGAwmzvrjGzlabuWr9nEdrZbHMyckJh4fibAqjInbK4OatufOEy3ztNtu7DykU6yyuC7u5K8Gm/r6hJEny//mF/z9GKqclb/6eEBzf0Rn3RgSuwyudVagWUQwTb+RiWuJ57bSKploMZW+K67pUq2V6XYeUJUEvrADPhcBLEymyHtcGzwloSiK3/mTApDfDtjzykqHZMEoct0/I5nzKefG97qRLEqo0miLSoiYwcboYenL+nbP2gEQfMXNUzJRQwMVChkmfcxSuUBnjBwlNO8toJiKttYUm/WGI5zv4Miqtxyl0Lcs0FN/Jp8poxpTJzEC3hUDn8yYHu0PMVIPVefHsnbZLr3PGoiT+7Qy6mJkU+8cnFHLCSdJMheloiC0RfxLVY+y6ZAydOJJ9IMGMoq2QKejnyDxaaDAJTtDku/SGPoY2wTCKeL4E2dDToESYmsVoIJ4ziTXyeQUkIEOpWsFPNMJgxt0rYt3VOMX+/ggr56BHQpGuXFrj6GiLpw9FZMkuFJg5feqlJqokafZokctmcIYZJqp0xo00OT1LpSbWZXQ2JuIIU69QbQql0jnYI9ZCVCtPT6JSRu6QWEvOo7jZwjyTmUvUS7hwWbzz2WDEtB+QrumcTcX9wiDNzcu3ePJARPgajUt0zyZ4kyHVBSHEnbMZjcY6g/GQfEYY3rmcyvHuPsFMgjIoKpELtdo6Lw9FP1WuoJFSsxhaiZHk44lil7Rdwg/EHDhTn4ydopDLnpNle05ApCmE7piFRbHuo6mDodtE8uDvdruUy2X6vSEpGaENIwVND8mlaxiaUIpeoOCGDnmpXD0/xkrlQA9wHSGv3kjBG6eolStEupCFYrFEJgNxKBXpsyOskoGiD2kWRfaskNI5PG6TktnfhfkKzjhgb3uXVEbMXalSwfMsRuMzFIRi0/QJtpklkeADva6GplvoFYdOS9w/ly2y/Ys2c3cyIiIJ5HIZND2hIwmKV5cXIAw4bbWo1sUzKawRRhBHAY7/Kw4bYo+j/VcN2Tqh72NqOgOJPHrj5hyN+RytoxZmRhjMzeo1vKCHmZIonKkUI/cEBRNvJt65N+wQoZBJFxnLLIphzoj1tMgkAoVcEVs3SeKQJJCGYhiStlMMexnaLeGcqrouCY4R3yEgn7U5PjhFkaAMSaKIBv/IxJIAQc2FGCUyyEmgn+kgxNBMfC9CM4SOnWsUWVwrMxoZLKwLo2V1cQlLBT2RDfyezmgUksop6LbklIumnO59yDe+/l/gJ6+Msl3cWUKtKhurXZ9sRufhsy0KEiEtChQKuSLVapGTlshCB7MJKT9DW0bqJ1rA9UsX2H3yjD3poM8vzXG4+5TpQGFOotLdffMd9o+/xLTEgWqm8oyGO8yXqtiakKkgibBKDbr7ouczqJaxKxeojEYEMljWISI6dojUhK+8IUCEXu7uE8cR8xfFvZyByX5nj6WFN9l9KXTC5eoaxZVFBv0jVvLCELWaDUb+Gc6ecHB3T16ieTY3btygI8mkLdWkPzglZecpZMVa5YpzBGiMZLWHZTXxoj5qMiVtioi0XrC5/9GnrG+ss3MsvmcoZTaurKOEQqa6uyMuXZ3H9fd4ui3PHatMxc4wIeF4RwaGQoe7164xGYk1nsYtHH2Rvacfk5aEy/nSV1ls5ojDLmZJPEPgjJmcHaHlhJNm1HJ87//+l2xs3MWUADr0PUxzVWTGi0LOrly5Rtjv020LI/vqG6vs7j/jf/jn7/Ozh8Ipy2eyhIZKEkTnaH3CqYrQdSF3up4hSUDVfmXjhEFCJhXxv/+Lf07nTBj/o9EZxydnFErivBiM9yHxmXgJb1wR8vlv/vUH/Kt/9UvqjTJRKNYhUUKIE0KJyqRpCqmUzWjUO89cmaZF98zhz/6b3+HOb4gzWY0TdAUUieiZJGOKuSJLcwt8/Rv/PQD/0/92lc5sxsuXwiEKI5+VtessVSp0NsX9Ni5/FaOmYeo2PUmAbFkWTx/fY+OKMEIVY0S/N2PY95E0fhRTt9jde8mT57/grbfE9wrVFN7MpFYT9k0xv8Cnn/2ScrmKaozku6QopK9w3H5I71QGrnI6k4lDXSLs9roDWqcD3njrMkeHIgO1uDhPOqfy5OHxuZ7/jXf+gIBnTPpyr4cKmjnluP2c6USse7Ewh6FHDEdnOOd2bJpyTT/vvdUNFc2Y4roBcw0xv7s7J4TRjMC3eeuOqHpx4g4PHj+gnhdB2fHEQTMTwmnAb3ztPwPg5OQLWu0JesoECWRULSxhmQU0U+jOs6MOa5cXefzsS+JI2nSc4czO2Ns+5NJF0XvuezG1WoVPPhfAH4XMOm++vUGne0K3LfvHVi+jGwbH3U8J5bUuNpY4POlQkbKYDcccDyOqS2V8eV64/gmeP8J1YvxIOASe75AxrvPuewJ05mTvFM0wePTiI5RIzIupJty4/jaVRpFnWyIQrWomfuDy8qVwbK6ufRfPHUBUpNoQa5MyVf7q+/dYuSp0/satSxxuP2ayM+Op5ECrLBt85Teuc9o/odpckM9ukDJmBJrQ+YHfRKnUUGIPZyL09/Ote5x1fS68OaFqfA2ATBgw2UuoXRV76MFWi5nSYG6uweV1oYNePnxJJrVAdbF8zo8VhmdsPntOTdroe0cnHB61WJpfo1oU8vLs+QH1WpPDw0OuXRXBuFnQYXd3m5kkVo7jNHdv32Jr+yndjrDxLl64ShjNsG2TdlfoRcus8t/9s//x8yRJ3uI/Mn4tMleWmUICqOE4E8bjKYQBi/MSZSRWCXyHUiF/HtWIA5cgOSVjrwKQy0IQjsgWonMjIg4LJIqDYYfYSMfJnGEZcHIoJq3eLFBfKjDXbPLpp+IgzNW61OfKKIqLlROGrxmD504ZjuWDejaQIlJnvGiJxra5uTkKuQWen+2Ts4SyqVYs1NghJUkMNzfHhLHKxq0FEol4tbPTwUxlcGZDVENClQYJuUyWdFoIStrKkrZLRMqYckP8btRVWF1cZOPaBvc/fl+8n1rh4uUNxjORVi4UNTrdDtVSBtUQghlOVNRUmoNjEcWpp6vUC1V8X8dBQgCrGrZt47kRk564VjBLkclaKBVh3CmMILIYDmfki0I5pDQF1/XQ7DSKLBlLgpgk0GlaIqrTqBU57Z+SrWV4eSgMC2c65NLSW3QnI8anQpn7ig5JjKIJpRIEAbl8iW5vQFY2TWq5Aqpik8tpzMbi+rYesr54nYeb4l0KOZPF6iVmiYLTF4ZGfa5Ie6gQTz0mA1HioIQqcdpm0hFGWnfSoTR/gdJ8j0FbKJr7D0IqKz6nz0eE8v0W5wu8/PwJqUAYbsOTFyhawjR0SXWFQsrmIk6PDkin07hjiUDpaYynI1ISoMQZprl6aR3HcVBCca1itoI7BNcJGAzF5q/VavhewumJ2OSXrl4mSRK2trbJ5yTkaGeE76poccI0K5WbMsP1XWJfInOmc7T6XdaWGr8qDxkMsQyL47NjFuaFYREGU3K5FKpkPreNiOHolJmToL1am5mLYQa02icYsjl30GmzuJRGlxmvK1cbhCkNxYC6zPYqQ4Ni3sGVDsNxq0spW6LZbGCnhbyqcRrskHSUZiphpSv5i/SHL8lLWoJcyWc2i4mmU3JyPpmJPTdfq8kDE8I44uD4mBVJQhl6MyajGWkrQyKRMlXDQE0CoshHVcU1DFVn4vbOsz+vyox+hR4I9VqB+bkMK8tVdnYEbLXjTmgs1OhKWHslsslpC3hRn7yM4vqJAapNHEVEkbiwERoQqCSO0GUTx6U17lMupslmXhkkBkftGYapo8hMlaoqhGGILtFC/dkMO2VSKheYjP9OpC0xiVWfmXTsL168haGkODoQez1OXPp9MG2DUMqLHxUJI5293TYvd4ST2b2xzVzxEt96QyCKfe8v/z1X7q7j+xEHp+LgLZlpKuoNPv/JI+yyjJQrIyZ+xHFHrKetTUkFJRYWKxyeiCxRJl1le+uAw9M+PVkeGYU+dzc26B6LMmS1qHB6NmHixiwvC8NpFuvUF1aobNR5FTz8+aMfY/sJkjsTq9GGsM+4G9BThBF4ce0t9k8esbHxe2LtvCkpz0HJ2rimmIP+zhYXKiV2uyP+9qc/A6BYbTDyunQfij10Z/0umbRKPd0mXhaO1NPtZyyqXcLBiOvvCWej7/b48pPPuLsizop88yrPnz/He7JD47KEBdY1rl37Dc4ePqPVEXrxtK9Rracwa0L2m0UDZyti5MPZQMzT7GDA7Xd+H3t8yJzw+TicjBgc7TC/sArAxh0DpR/THwTUisIhMktNwuiE7DDmveuypPJ4ytE4ZuwIg2i9WWepUmSqVljPCgfTyuvsbz2iNr9ETpZwlooWD9wu9pIEMppAI5kjXYzPy1hHozG3bixwYa3B0Z4k1T4b0e10mMpg3OMdWC2VCf2ISOoWRdchDEgAw5B0CXFCFIUkya+crSSJJeCL+M5oOOKtm29Rb6psb4k9GcUO/X6fpTUxL9sHR5h6iVQuxe6uOAtu3VzDTn8CiU4YiXWOFLB0FUN7BR4TCOROTUGVwBthKNALPc9nNhWOkusMcEYhcxKCP4o9nj17xuncNRBI2vQOL5BpTPmHfySM5UfPH9I+7jPO+LzcEQGlZu0iumkwm81YXxVByu997y+oz9kMxiIYMRx4ZLM2lhWzuCC+c9p6xPxqlWu3/in9kTh/67Uavd6QSlms+VnngMZckWotzV/+5d8CMB75fOfbBqrukpc0AJcv3uHhs09ZXRVzF8SfgpLH0EpUJay7peaZ9IcsLedJy5JbRR3z859+zrtvfRuAFy8/48KFixQLDWauOKODwMMwLCyrTqkidLMTnNI7G/DGbQEc4QcT7j18RMZuoBhCppyZS6GYplIT1C0AL18ckHhFSgWRDc1kzuiPzshV5/n0C+FotI5ecOnyHVTdZmlJ7NH97SP2epssXRBZKsXqMx5nuH37Jh9++DEA+cwiuUoO3zHOEXZNU+fkuMMFCdxy88ZdNjc3sdIJEeI5O50W6azNsKOzvCJkYdbzcLoqC3WxZ4adNqnsRayUxnAsgLC6HQffDdAMl8lUnJHFwiK1apajA6HLYt/BMvO8d/crtFvifqoesX/6iIc7U95+U5DF//IXj7n95hUODmV7RjjDTqeIvJgvPhcVNbcv/j5/9Kd3+PRjce1RK6TZnONp7wXv/IHwK6ZOj1asEaYjZpFY49q8yqef3aPbFvv4+soq68spTk+3CFyxZ0rpPCl0Ri8iDjvizPzmu29QKJ9ieiLo9ealCv/Hv/8LVptf42c/ELZ2vXiDTKXB/kGPxw+FA2vqAWkzD7p4363tXZpzFxhPJ1SrYl3K1TJT12dptURXttwMRw4XLm9w0hZy9+jpPZ6/mOBP6ixJ4JbB5ICEiJc7XYp5YWfqyiu6k//4eA1o8Xq8Hq/H6/F6vB6vx+vxerwer8fr8Z9g/FpkrrwgoHUooqrNssnChRWGU4gU8VkQOpiqijuFSKbgvYlLqVRDMUU0X1VSzFyffjekUhee83jSppAtk8vlGPRFJGcySrCMPAtNEXnRNZvxaERqIcXqmkhn9sIt/DDA90MKeeHxztyIOIyx8xLEoGTTbY+pFAtoqvDCE2WKQYlC3iaTldDvY4WslcJOi0j9d77zNc7OxvS7A7ISXMGaTRmNRuiGwVT2+8xilTjlY8gSjsnAp1bJkMlqxBIaPVLGWJkJJAvYaQnskQ0oVl2itox4d3XW5lfw4ykDWefankxJ2SEZmV0LjIRuv4Oi2WCK+2d0BS1WCYKIdCKimmrOQlVCRpJnx4w1tCSgns8zlJFI1dIxlAy5gka2It4v8D1KpRKRJ57p+WafVBpSRkC/L6Jwi41FWkeH9J1TvLFYv8Fsh2zeQJcEfuPpmLSts7Q8h5kWz3TqTAhCnYyaQldEZDeYeAw6bcYzkdkplHM833OoljLMPEkqeOfrRPsvOdp9xjsXJHCCHfOLn74glReZgVmYZtI6wlJjzKKIvtQuKaiBT7k+x8meaIhOwhQrG8scSgCNlG4wHE1ZXrhKLMslPTfBSLIknkKxKkEYBi2y9iKaLsv0+g4PHz7m8voV7lwTKfKz3haTUZeZN6ZQEDKUJAnTyYDF5VcNlSGdXg/FVOlNZKlQAYpzWczQIPBFdmLsTlFQyepi3cPQpVnTOT5oYaREZHC+WcUZh1TyGRJJ4tuo1Dk6PaHbF5nAYtFCTSwKtgWaiJRfvLPM0WEH3TAo58TeOm1NyGbTXFoX++pw/BmTIIdipPAlYefIMyjNzTOTDbxaYtA964E2QUmJeRpPpuhqhpiESl2sTbdzTL5QYCAJZzN5k0j1yJmXOBiKzMdcM8M24LnxOfl4oiaUchW6Z0Jel+cqlIo58mWdTkfs4xiVRLFQ1QRN9m+RxHje7BzgIpJzkyTReVR85k6o1+d48OgLdLmPKpUacZxhJNflwobNeP9UZOvDV1k/D8U0OG2NyZsygjkZMu0PeAUrHSQBcegz6kGvJRubA58kiWjMZ8+pHjRN5e+WegeBRzqdxjAsHAmAgDrFc3RMK2E4kETLQ51BfxfLENHCKBxQbeZpzJeQSTBymTztznNaxzMqC+Iek4FFqmrw/pc/BGBhqYo36jHxQi5K4uRW12My3WW5dhEZQOTwMEfDzmPnxBwMO3toasCj918ytyRKYqYDj9DR+OLzL9jrCNlrzl3l6f0fMDcn7m85ZU727qFrHuZI6MXbt+9ycHCGlTVIZkIPf/PN9/jD3/0zfv6zvwLg+3/zv1JvLJOqVNnfFRUL482Y+coCraEA3SiFFb7c/JKlr27g9iQ5aH2JtFKl5gzY3hd7+827G9gZh0fboof2x4++T7p4GfdFD0d71c+pYqcquM0U3/+l4G9pFhoYpsn9F7JH8fJFalmDei6Fuy/6oj4/9VleO2CpvEBBlhOmlJiXLzZpFoV+Pc2OKdRtikmeoir29nBwxrS/x6PNx3zltqCoqGXnyOXTfPij/yBkc+MW0bSHHYMiAZ6y1YhPHh9glUv0I3H2LGUbbHk+Rixk+FQ1uOoZ3KndYXNPzJ3h1bn/4gnjL3/Ob/7WfyU+m55RzF8gj9j/W4e/oNJoEh76LNdFM3m0NGO4t8/9vSNWrwh+o6W1t0gVHvLgs4+ETPdT/GTrCaplY0Zyj0Ywi0PSqgmBBIExDQLZgwkIvqkoQVWU8z0aBAGLi3kO9vqUsqKX0Fe/5Prti0xkf1y5eIlnu3/O7bXvMGeIDIa5OsG2VIhMdO1VqbBJQnRODxMnEYqSCL4t2c9lGjpRFDLXaLIoqwqsqoEfzvjoc5H5yBUXOD0bUSz2zp997kaRez854q//H9FDu3ZxnvdufpNf3vsB73z3vwTg+OiEaNtBN0NC2Ytea5aw0yrOWMxTLq8QeArr6+s8un8mr7VEEDp0R1s05sSZcu+LJ5SKTZ5uir6vZrNGMG5xfNLlT/7xfw3AD/76Rzx69IS19XlKMtOZyQfcvnGTjCSKbp/+mMuXrqGbs/M5SFkOUaRycnZEbklk4sKkg+9FPHoqskaHh8dcuHCB09Yhhqw8iKKA4chhbfUyWzsiY2Fkp+SydcJY6POHD58QuEVc9YztLWnOJiqakkNNFFqyUuXKtRUeP3jBbkvMZ3MuR8bW8aPkPBO5sLSKYWj0BmPOuiIr7fRn2JbK/p44U6y0hufvM3K1c4Cpk5NPyedqvPv279IfCh3gui7z8/NUa6JMcGv7EcHMJpM2MWSZ9bUbKxyfdKkU56gV5dnuxQTOkO6ZsCVwDdbfrnC6v8l0KjLHnutw7crbPHn6ERdXxHwWyzqP7+3x7hsSOnx0xKA1xTRsyhJOvDsc4NNhdWGVzWevYMfLfPDBJ2w9k/LzLYdarYEz3eHqFZG16Qy+ZOdzqFdEVvPmBZuffrBDqVbirCVsoEvLeb58/Ih6cR67KvbHSTck0pYozAk9sjP4JVbnHeYW13n6UNw/l1ln44rK/fsPMRbE+r0461Kqa5yeiv1w+8Y7/M57v81cPcvDe0Kf/l//57/mT//JPyGTd/j8vgB4uXF1mYODExoSin3maXihQRJFfH5PlAAvzddJFIcXm2NU2fYwmfqga5iWyBaurF6hWaxjL2bJStLiqb/PTz/4MXaqxKUNoRMO9oXe/vvGr4VzpSsG6axE9inqoJmoTpdE9juk0x6drkG54GDqYqGsRCNRB4zH8lAomGipGCsXEPmyTykXMDoLcIcDmvNi4s4mAZHi4HpCKOKowCTyebD9KZWsKMEbd8cYqkWlliVQxMarlXOEbglLwvn1On0MI8XRaYv5qjAGKoU8B+0R5XqNxUXJKWVn2Xp5yMmeWIjxLCGIp3i6gufsApAooOoJqhbQaIjfOQ54kYsmmbRL1QKR6tLvRRjSAbMyFu4szaOHB4RTcf1COsvxUQ9flvJU5opcuFjn4YPHOK/Quwo5vFkeVRXGQRyEKEmEGrl4riyxyMeY45lQVGVNzsuIUIlISXJXXc2QtTIoWowtUfHUyGcWzHAcg7wlykM8XSNSFFRTliVOJ7gTn+Gwd94A6vRnhImPbqyQpMT6nfWOSRkLOFOhjEIvYm+7TfqmhSv2GOFwTFuZkFLymIowDB3Fw535JBKcw1pP0/VzzCYz5i8IOWh3nkgnaJ/HmkiJV8sab3wlz862kKmUqmFYOrsnQ0qy1KSYi5klBaxSlpuLwjDce95h5hiY4l8m/TGlQhlvNiCdFnId+3n0tMOg77E7FevgjE3yEyJpDAAAIABJREFUFUhJFD4tilBMg/aojym5GzYuXWcy+RjX75/3DvQ6A4bDiJpEJzs6PGM2cySAhzCys9kcnj/gZDAhnxGlifl0AcNU6XVFeUqhWKHbm5HS8xQkshOqy7XFm0zH0TlS5V77GWpSwtCEYxOHEa43wyiOUKSCCsxTrr5Z5MEnhzw+FDI0N1/GV8/48oWY33K5QTgeQ2IzSgnFqVopjo4TPOn05tMBmpZCiRSmHelETAy87IRCKU93KJ69Mp8mmDnU5sS7DQcOYaCg2C75slSIoVgz2wwwdCHDYRwzdXyqNYmgqKdB0wgiUVYEkEp0FCPATRIUacwpSULohphyDnwlJtYgjgWIBICilvnlJw8ZjvoUSsKQ2rr3Swy1RiRBRcJHCgoDzCRHviQOtIJVoDcMiEfgS3Z71x2SstJMR2Kv+a5HpVxn7EwZTcXcmUYGVVVwJz66DPAoiiBVtWxdXsfj3Xff5W9++Nc4juSrM8G0fBRMokh81u/3ubB+kcFA6EW72KBaLDEdTclmRIlarzVheektrv9xzP6+ONAuNdcYdKd4kgR+5e0VTg9OiFE4PBbv0u5E1KsW/dEZjWVhbORWahw/fYDbFms8UBTa+1uUmhsk0ni1ijrtwRlzzXWWLwrUJiWc8nzzE45OJbGx1kJVQbfN856SD9//EN3uYqayxIpE1Dx8zg9+9CWVhjAQCnOXMPJ5zKrNrczviHfO5igXLHYPX5XWuNz6yh0C10WRHma+XOLwxQ6envB7fywat59vbdN+2GVlVeif026djZVFnj3dIY6FQTS/dA0nmBDGY168FI5T9o082aLBU2kALp4UwdA5nZzS74jfrTWbdE5e8Piz51iSxCpthlxZWuPBE9mLas0oFJsUKzVUU5R5+ZMIs5Ilv3ydrZ7s28sPOOnB/IYo5XPafcJimiCl0W8Jo3B49IiR3yVsxyiR2CNWweXu9bf4d38pypLmvAOelnSKmRTrTWGAPd4+5Ftv3+C4M8GXpWYHLw6ork04u/c3AOi5MrXrl9i79wxDcid1gVpljdVaiXZHGLDPvnfGlZsFqtKY29naIrNYJeToHDwi0UMMF0IdEgkwkVIUdCONKgMihmIQkRAmCpo0oI1Q540bqxwdfMBgIHTlN77xXR7ce5+9x6Is6drdS5SXfgdlVABFyPVBJ0ZJYhKtTyz1teJPSYxfAWjoaoYoDNE1A1cGtBJdQ1USpkMVyxLO8Pf/7UcsbJzSGghdVq9UuXbtDeaaa4Bw/nvHHhdv3mRFyv3+4XOeb4U40wzrDRGAGZ0MUW2DqbtFIAOXi80VPvrk5yw0hP5ZKaRoHY65eavEpQ1xRrfa+8wvpZm5Op9+KOY8VzDQlRS25GXc3wlpXMizf/gZO9vCLvrHf/gnfPHwZ7R7J6i6kPXNTQ8lXUCXwAbv3f1tTrpP2bkfkpIgFHbGplFPMxgmjFtC32R0i3feusSDZ8LZiYn4+POPWV5exveFvD5/tsPXvv4Ox61d7KzQCaNumXq9xuam7CPMzBg6Q9YWbjIaCH2jKgoLCyWOh/coGaIM8PRgRrVaZTwTvWlqsoAfHxO4KouSB242HrO3uc216xf5UqKoxqpJMDMoW9KZzMb4fp/dlwkTGbBLFzw0JcGyPXQZrBr3O9y+8ZuE0n4cj6f0hx3ylSVqTaG77t1/TBIpvPOV9/BDsX77w5BuZoieiOtkizkG7TYls8nMEfZaNlfjYPc5d659gwVZLvlXP/qIS3ffxrQl35mTIdEj0pbFWPJxDttjUrk0RydtZhKsyhnH3Fy5xfUVybnWHbAzc4j8gFsbItixcSPm/R8+IJB9ricdj9X1Gu3RC7K20EmV+VXqgxNs0yJByNBkHBBFGl4o/i9V6+xsn7LamEcxxBqbxRy9yQBNNVlcqclnGtHIXWWWFuv5y08ecuPNBvc3N3n7vd8H4OqtNgU7x+HuCbWyLGnWE7LZGVZmFYBrl7Ok9CxP90+xZrJM3kswjTRWKqZUl0jHXwzQzCkP7ou+s3KhRpB3UEOd3Xui/P3W3WV+/9t/yPHpMSRi/VJZMfd/3/i1cK6iICIcikMwt5hl8+GASlUlJ52did9jZd0m8UxGsl/FMEy0JEVWkmXGEx8lnFHOpFFffRZlac6nsGyTSDob6UKHTDZHSzjOpLIKmUzMqD9GkUhdtlpFTxmgxDhyAzXK6xglFW8mBKw37NNuazTnqhy3hKB60QAUnYOjFjlZi356coQfRRRL4qAajo8JohgSFT8QGyqdTdDMmLm5Gm0JcaooM2ytQColkfL6bdLpNIoC47G4n27kmLgupqFiGcIAOjsYodk+g4kwkgzLY/dwSKvdJ2uLTEDWztA9myHtMdLZBsOZgx8PSLxXfVkplDBEU/K0ZAShWsoymHjkMkKY87kKZ/tTZrFDZVX8zumVyWcUXGdGhJg7L/ZhqjOLxO/K+RpeMsSwCpzKyEelNk+SRGiTEZ4lNt6FW8tEiU5WEVZT+3SPslXgrNthSRKUVqpLvHy5j5mxJIEkOI7PXmufxkWJ3OMHrFy00QOFYVcYx0E6wTYsli+s4ERiHdyJQ73RYEk6TdX6Es+375MuDRnKXp8oTJOyoNubsPNSkoEqMBoNCDxx7WJ1nbO2x0F7h4WGmHMfl/7Ao5yrUKsKhdQ2D/DVCWl9FQBVnxArCc5sfI5K9+TJE6qVeQ72W7xiwgzDmEo9x9GJhOkfDCiV04RReO4gjEYj8rZJya4yGsherWoeIs5pAkIPMmmYjobsPRdzYGkWau2Uo06LVFFSGmh1CumEQlY2qhOTMhy0tEJVZol7gxGf/eIMf5ClMSc+E43J/y97b/JkWXbf933ufN88DzlnVmZlDV1d1YVu9ACgSUAgAUqWZcqSZYVsRzhCtv4ee+EIh+2FvVCEQiGKkmmSIEESI4Geh5qzsirnzJdvnu+78/XinEp4Ye60wKLOrjLq3XffOb/zm3/fr0kSiPMbepAtpJhMx2iBuI+ZjEm1GFDaFsb66OCERLWZOw67u2JG4PT0gk5/SLFiosjhWC9M0HSLkUQrcwOXUjWDEiYUJel0LPvt6/UmCwkMESUxa+vNq2BHVUwW/gRn4YEq5AVVwfVj/DBEU18FlAGqBqmUhNqVDoCmaVgSrKI3OyYIAjKFCn4onBvVXqDbEEylqjVN4nmaUr1OviTOZTIOMOwC3VaKal3ovCTOc3jUZzQR+mBtdYnpdEYmlyEnAR9UdOrVEq3eBFeSVZpqGsMwkFeBXKHCz3/2MednPe5/Q2Tqx+MJx0enGEaMaYpnKej8s3/+z/nxX//fAMxmMQQJ6UwaS854WWHIzO8SRjaKKhzF/uSQ8tIWOytiJuH05IgbK+tErsNsIfSGn3IIZy43377Pf/xjka1/4527mKUcoxPx+x4+OeT3fvg9hk6XWKJgvtx/RPsixczxWL8hs/quh22kCVQhL/msxmwwYO64JK8QYROL7tkAlQkZOf9zePwV+dwW44XY80KhgRpphE6aVlcEDW+WPuDJ4330rDirvlrgreXbPD35ioGcDWtUi2ipLvPxkE8/FaAz2aKBmTY4PhC2IfAMvvzVl3hqwNwRuno+TbB0i6WlFSxJH+IGCz7/259hpuVwftShVrSp11bRJLz30ekx2WKRQm1OvyOM1kI1+WrRJ5K0JIvBgOx8RqpzzO41Ebx2O2c09DKWbqNLAu2nj/bx3C65rNjflK3zf//RL8kaWZab4p0q28scdcb84YffwrsQ+vvF+TMWdYuVFZGxtY0YN5hweupeVUm7gcfhr48IFiOWKqKKQcoj0i3euCMgjn/01/+eZqnBW9/cukICvV+6w3h2TKVeoP1Y3MlqecHTB5dU1sQZp1IJ60sr1Cs9okgEgYapEvkQxWDor2auAvzAR9eFnowiD1XTEDzgsfwc2LmA47Nz4kR87sc/+TPeuX+LkQx2BoMLpm4W0+/hysrj6tYmtWKNVn+KJgGzYlUjSXRUTTw7CRJJ0RBeVcpeBV6TyZi1nU0ANm+/oFQrUFwV1bt0kmPozinUZHYAiIIUrtLi+o6A+97cvMaTZ5/ye29+yJcPBKrhYDjn29/5AZ2exlyCKx2fHnD//n06wz357zJLq3UmwwX7eyIYL1XSdC/n3L13HVUmH7xwznh8SmxIP6ykkzg28/Mmb9wTvsXB88eYyhorG2laEuxkZbmKqeepVERCazqaoUdL6KmnNFaE3HUPjynbt2kuVdl7KgEYjDHFYpFyRgTQ939nnT/68x9z48Y2VZmsJs5wedGn3qhwfCgSdKlMhLOYsLktvq/d9qiXb1HI5gh9oUtmkzanJy3uv/37nJ2IIEmJI6JkgR6L7xv2hqws36IbnRNK9NzBeEC+UGM0ccgWxd168eICyyiRxOL3KkaeXDbL9rVlIkRi4Vef/gkZM8WDrx+xXBPP37q+zln7BUMZ2CwmHhtL90hZAc/3xG8xjAJ3bi7z1//Px9SbQn8ub+TRFyHMxOeUwpjENxmFYyo3hM71nBLlVEgYzvn4oQhOi6UMydhBzQn70Wq1qDSWcbwpOUvco/XmLvPomOtrJQZz8XuefH2On0QMxqIz4LLXY319hWy+ysmFSDYMZikqtRKGIe7swg1ZTG10LYMze5XoU1EiDS3JkDFE8F9cUyiVW3z8iXj2zlaDlVsNLvqXuFPh1642c8wHU/LZNK0LofP+4MNvc3FwwUT6CIVyyM7uMnsHP+Fc5G1474O7zJ0Fi0AhL33rpWaR7mVA7Eg/STVoXLNx9oZ8+N3vCxl++YyzM4d3P7jB158LXZLP1ag10qhPZRfHbMTYNZh3uixJ4LvpzKVUSREFJr/42ZfirDblbPffsX4rgiuUGNnhQKfTIVNIUa0VODsVpdFiNcN00sVxLDSZmU9IsE2VJBbO1nzqks0YGKqO48rBxvmc9bXrKHoLdyE+t7ldpHUaUihLfH4rYbm+yiCzoNURAA+FgkUYJLhzH9MSDuV0OGRza52LiXBillaalEslYn1GGMlshTfGMmzSqQKnFxJQIlMmjhIWoXgnd2Ewm01YXm1SKMnfomiouken16UuB+2toY2qJaQlv1LvMsH1xxiajhKLvw16Y1RNJwpMBhKxrFxKEXk2vi+E/uS0z+m5QdrUyaSEExg4IWk7hx8KIRyN28R6iDOdEYqfRz4XMXM99ERDtWVlRYGUZWNqMuuvxKxtVBgqCZpEfzLKC3ZWt4iDmMu+EN4SJabzDqWM2HMncIjcOWZYJJ0RBz/ujcmnIVVMM+7I95o6JMYCS/I0LaKA1QYU0xmub4oLfP5yQjFjYds6Qwn4kE4pNGolrFdD6MGI84sem8vbpCSKY6B0ScIzdCtDVVbY0vYS0VQhXxNB0/HxZ1QLDcrrCoOOUIi5sIqVtnh+fMbmdQExPBxM6A6OKWbEcwrZGkQOOxv3iRKhRDzXRS23SIIOIzkY3p+MKebqjC5Fy0Mqv8RkMiFJYjTJUzZxFkynY5ZX1+h2xXulshkqjSJfSRTF5nIDRVmgJjqvnAjTNNDJoNoLLCnDgZcwnbnIn4uie/RHc8rpKnZOVolTOYajC1aXKyxCyd+Ch4eBL0EnNENF8V1KZQNLtvc9+SxC02rUNkTCAsCdxTgDlWpFDucujpj3A4jTGBLgRSdhMumSyMzre3c/YNxzuDgec3kgHIZCySeVqRImIfmCMB5zb4xhBBi2+FyuWML1JhhJFssSf/OkMI8nfWxD7IHvhvi+z2XrNy04uYpGFKcJoldtlzFRHBKFCba8M3NnTpIkV87SK8jnJAFDtgBm7QJmOcXl6CWKBNNINAVFM5jOXoFJjAhHIZenz1hqCKSB09NDCqUcS0srzGRVczrrE+shzXXpiIczCqUMqgYZORSuKzb1eoPhzGcun58QoWrxFSKkacGzva+ZTDzGY+Es39i9TRQqDIYd7FgOfL885ONPvmRlVTgM4/GU89MBiZqiLbPSQRTz4vAZpXwBUyKybTQ3qOSrHDwSlawf/MGHfP3VF1zbXOXONYFEdvzinOHEJ2sH/Mv/4bvic7e2+dN/2+dsLOTu9r1b/OWPfsGbb31A7AjdefHCo1BN88MffMDL58IZMDMTZuMBP/5IWNn1tRRKvKDZLCMTpPQHLvn8ChenL6EjAhdF3WRj632kKmPR7zLvj0hb5aukyP5Zm4U7w5XVu3Jxl8uDhzw7+ojdFQHYcXl+xjwIcLwZI5kYMuYK46mHoQv5sTJZ/NgiicGZCcdmPjvFc0Mu+13K0vZ0uw6FYh0vEgmC8ayN75QZjkdXnFm9eQ9Xn9EfzqilhZM7dccct0fcvCuG5RfnVfrzMX5vgmoKnZvN5vnlF18SRY/Z3vgAgGoti+4uePBcBIW7177N9//eP6bvw2wkbG2GKvW0y+eH+6wUhC1K51PEkcr73xeB26xzTLW8xsLPMJqIysd7lRid72CXrlGS6IufPf0xf/wf/oR7OyKov/f+tzk46TJqXWL5Mvu7axEaM/6n/+tPuHtbtJGulyu8s7rKFx+LipddLDIbtZg7HdRXU+KJTkKIqkIsK1eGocmgRt69RCFRIpIoYS5b4leXm+xurTHqHTObi/Or5pt0LwakTXFnJhMXz33B5vZ9xm2J0JgrUGtqnHd0VCQaoRHghVwF9SoaqqKjKBr+Kw67lE6chJQrRT7+tQBhse2YWmmDlmwhK2bq3Ly9xdPnH/NqvfPeNocnUw4OxP4WcnmqdZP+YM7qmjj38fxXPH+2z/vvf8jLQ9HOl80o9MZ7VIri/6iqQaW8Qrt7wNY1YTMNK6CQXePg5Qnr66Ky0x8/p93dp/dCGIe11QqKZ3Dv1j1mY9lun0yZui3SVplaTVZRcwX+7E//LTduiWSyv4Df/dY3iBhe6anrHyyzmOc4a9XJZIUd3by2gaomTCVMuGWlWFupkrKqHLwQ1aVMTkXBot1uo0paB0spUmzqtC+Fr2Yqa3zjzvv8+uO/YuFK4KZwRrmyQ7s1oNsWjv21nSUOX5S4/84mAA8f7nPR2sP3EnoDIfs3rt9EifKE/hQf4Ze8/fYKC89lsZAjALGJqkXEsX2FEvvOW9/l6fNfoxk1Zp4I0BM/A5F9hdBczDZZWs3w4uCQJBR6or6W5pMHj1hbewe7KH1I5yX61KW5IuzT737/Ll//fELkBrw8EXtXWV2i3W0xGsVMZNIwbeT54FsbHB8KnjtTr9CobVGsZvj8q58BMOx8xvWbN/FCBV3+vlpNZXN7mfZnQp9Wqk2y2SUaxW1qdSH7Xz96yFLlBl4sqjjdQchsvOAb37xHrS6C12eP91ipL+G6Dl988jkA737wFu5MZ31d6LvZ9JQDZ0ySLEhXxDmk1TJJvstnX/T5zocC7v7nnz7izfouRUUE4r7i8uMffUqpvMFQIt8NZx3OLloUmyaWDJhLWZXT4Jh6UY49+AteHF0yuGxxcS72zplHJAl0OmMSGf4Uy6LN/41bAi14OrR4ebyHkaxRLArd4o8u8BODi/YBekaczWwmvufvWq8BLV6v1+v1er1er9fr9Xq9Xq/X6/V6vf4TrN+KypWZMrEqIhvkuQnposfMcXjjLQm4EHhcHNeprFsMuiKSnE4cLL0Aisj0FotlkYnSTNKayPbWVlSceYvZ1GXnlhyybw2ZOT6ZtIimLVPj5HmLdDVNVUK/wwWqm8WZJ6RlxltV5pwcdcmWJYy2Ap6v8PzxkOs3xbtP+gVCdNLpNO5CvOdkHqOq0O2JrHG5kiNbsDk/HZMqi8yHlbLY3tlCURKyOZERUmK4vDwkjF+VYjVUNaBYWKLryZmyvE6vO2TkdKnVRJZoFo4YD0GTIBBp20bBxjZ8Li9FdqRhJxRKZV5IIkfNVkhnFRr5VY6mItOzfnsdrx/iOg6hKrN3eRM7gXFLVDTqy01Cd4zXDVBk9lAtLmi3u/R6PdIy6s9mykzjIrrk1PFdhzlT2v0Jm5JkL2VZqFaaiROgaUIsnekM3dTJGaJacW1Np1YJqBUbnF9IqNmFxxtvbjJf9Kk0Rbb39KyDhsZCVgHeur/Jp589wfEHFCWZ8+XonHdu3uDBkykbVfF9nfYhWdIE8lrUqzZh3KXnzklL/p/teomDwzYbtRpjyfG0dbOAN1vn2Rciy2gZQ2bdS5TxIYkcetX0LLV6TMoOqauBPNM0h90+thwWMWYzsrkc5+0z1pbFvoShh6YrRJFDpS5kMZXO0++fUJZzPbOpgx8MKefr2LLVMwoDFsoCO6XhzkTlQbds9DgRbZpASk2Ry+bQ81lyKZFn0YlJ6SVmc584EVm38fCYYDJkY03At5r5EUq2SEKKx18LGX7jXp3epMciCAgScc6qbaKnYnoTKWcoOGGCZQQEkSTxNepsXtti1BOZwacPj7m9ex2vFmHKeTUfk3RKozeYEoaiolcuL+P5c5Dtb8N+j3KlyFK5wKXMVuqKqPCWiyuUS7J6NumgGnNu3JRgIIpFf9Kj1XJJm+IcdAOCRUwmlSdBnFUQuliWiS5bDe2USRzHOIvZ1TxXykrjBC20RCdRxF3TEoPDR31yBaE3Yi9h0LFY28gQSBLhxE9zsueQaM+uznjn+hb5rRUM2YvfOzpm2h1jGia5nKjGOIFCz1nQ784YjUTWNhVoWJZFNivuf69zwbvvfYebN67z6ScCfOAnP/kFhqFg2Ab1pshgGrrNz372KTUJtrJ1bY3mUh3TUuj35azfwKdcXsfUsnQ7Int3VAlobGR49wMxWF2uVijWtth7OeH0QABDZNUs9z78LsPOE5CtHs8+eoHTGnBtVVTvVm7f4GD/gO37W3z1a/G5tTeu8/zFlP/tX/8Ff/+HAno58SJ+/tkF19aEnjQKId3ugOzMR5sIXXn8bEJtNcf6eonOWLxno7jBbLzgR78UZJ3ffusNcukC09GYxJUto45NrbpKpyOEqrm6gVmI+PSvn9BcEwAzE2/K+fFTdM2kINE5AqYs3DFDV2Rxq3EDdxRQaiyhauLcNX2FRXCCG45pdeQ8RSZEUWPstJBTjRKhEjP1FowPX82UFOhPJ6hmir1zMZRtpaGQ3UAW2BhPpxDHLBKHXz0QcwONWoVJuCCMPEZ7vwbgVnKfpbXrFNaEDpyrLqWta8zPplfgKofdFkPPoe5rXMo7ee3WFg9+/TVDybPnDg84Pfw3NHfeZllSYkwvpniLI3LZc9odkXH+2ac/Y2trm2pB6BHXVnnn3SKffj1kTXYeTFUo5Xf4R/8o5PSF2Kt8eYtE01lMxb74eoivHDAeH2JInrIktlG0EEVLUJJXJMICoCqQEO6mqZHEIaquMxsLnffO79+lP+nQ7be59w1RiZt6F5wPB/hST+ZyRbSwybOjC3KWkLP+fISvaEQ46DIlncQJKsoVmAyxaDs2DAPfF7IYBAGaprG/v8c3PxAEvb3umI9+vcftd0SlLl/0OD17REO2BAN88dlzIjxu3hR66xc//ZL1zRq5gsrRoah41apbqNqE0ShAUcR9v2i94N6bf5+9fVHBePP2PR58/RXN5hKhJ3RL6/KQ8fgIOztn76XgdNvY2MJxNN66LSq02eqULz7d4/b1t6mtyJbt0zbNRoH9F6esr4l3vege8J3vfIfTtqhqNJurtDpPGTszmmVRzerMF4x7L7m+s82oJzsyzns4gU26JgzWzJry+9/7ISSFK/Lax3sfkVAgk7FwfdHmGKs6k4GOkkgi9VjnwVdfstSs8/kXoo3NSCW4ns/xyWeoiN88GtZ5895tHj4Ud2E89tB02Niu8vipqDYNxyMG3TMKJZNsVoKNWQUePv5bsnnxnvUlm+Gwx6D/OcWS8Ck73RmZVJ3JKEbXJTR6WSNyK8TxSP6fS/Z/3eXspM8//if/pXiH6QWhd0oSXdDrC9nvti741vvrXJyJu/dXfwHvvnObZy/2acrZsIWv8PL0JYa6xOZ1UaE8fPGYk9MaY9kevrQdcdj6khX1JpWquGtv3i3Raj/n2V6XZlk8a3W1ztn54RV9yvHJc0rFLJ9/+bfs7Ih7a5oG/dkZ05nwO9WkzsathB/9+C/4Bz8UtBW3bjWxUy6//JsTrl0T3Q/D+dd8/mmHO/dERej04jE7GysUS8vMfbEvR8e/Zvn2FttvrXKwdwRAqlLF3IppfSz8N0+1iDMR733/h/zxv/ljIS9jFyOImI0mDOUssTdKEyoBz14KWdm+dosoULix1aAuRxoGgwDHnTEdaaALfTacnBK82IBEtr9nAuIoQFcNVteEn/DJR19Qfus273/nHT7/VLSRj6bC//u71m9FcJUkKmoolFjKmOIHPXQMuq1XyIAaa+sara5Lvy2MZb5go6gDiCR/jFLg7pu79PozpgNJMDlWWF0rMtRCMtKxd0Z9kjihWhbKSE1PsRIVzfZ5diAOaWW1SHOpQH8yIvKFkBcLKmEcoCoi4Ft4CZ12n3ff3+LlC2H0tjaucdFZMFmM8OTskjt2yWXzWNIoREmAYStsXa8xnIt3Wl6rkGDTG7ykLhH2dGuO6/kocoA+kw/otgIif0hvIEq4q/YqcaRTqdmoqlA2gRJR3jAY9oQBH/c83NmURqOGLwf8D3pz4t4LEtnKl7GqOIMRueyUD94VTr2RgkK5SHk3x2lbGHrPmeEFsL4l9mDmjcjbSwx7R3z4vrhQFwMPlylrW8uoilAYl6dnDJkxn0kCaKuBgoVpGESxbCFbzFFsA9cPrnhLZvOAVEojkgFKNAsZajb9bg8jJYxXZaPA+UCAPZTKkvF+MCJbNAgkUljvLKFWWqVa1Tk/F0FZtpSl3/HJ5BWQ/E1ZcxNLnzIbi2fXK2v4DDg8v2R3Uzh3zlhjZbWONsnx4Fj03h48b1HMLrG2Kfqmx+MRrhITeD6jiZCzpaVlAmfBUr6BL/vjb6+nyWYUTiVyXbVeICIhOytcMdDHUUQuY5IrZK7Ql9qAjjMEAAAgAElEQVTtDn7ikiCJlBc+mUyKWrl5hb6oJyaYEePJAlMOYDvODN3USMvA23cSsnqBiTukJ8E/1CCglKsTxA7tvtirzaVN/sW//AP2Horf++RFRMEK6XZ0VqtydslNo3kGmZSLlRIyOx4MWTg2qytCpuazDkGcI2NUyOaFQzKcjIl7MZokhT69PCZpG6RSKeaSQypJUvizMbqlE8s2y+GoT6PShEDIT2W5hOsO8BZjLPksyxC/0zCMq3Yi07SYzDxkly5ePMH1EyytchWooQRYtkroj7FMIVNx6FLIZ4kD4aTpQYSmaeiaeTVzMZx38cMIRVVwpeypeKRthaxRkrI4wTRMuu0hi5mcI9BybG6Y5HIZGjLBs/+sTet0yIUkgM3mbdww4tr2OqotZ/tsC52E9c0a6Z7QJYZuMp8vCDxJ4J0kODOPTLpERToy6VwTP/RJEsjnc3JfTDw3IJSD1Pt7Z/T6bQwb6nWhk9J2iWtbK2SyOqtbwlhl7SYfP3jCtU1hrH/14CH18hpmNkO1LuTu/PSMX33+11y/scvhqWjvm152WNmtk0iDpgcx/+K//2f89G9+TlbOEq1uZPn2uzf487/4CdO2SFw0SzX+0Q+/wf5zEbA7C4VKdpNpb8p0Kpyk1bVtCrUsz/b2iSQA0e+8vYPjj7h7RzjUkR6SryxjaxlGUr9lrQUnox6BnMf79JOfcnB+jmpXaR+L1t1SsU7a1llMslfy2Z+ekssWMWTLsaGrLNSQi8u9KztgG03K5Q16w0N8KRvlSgPNiDmTqKNv3NqlVM7w0cc/ZWVF7GekxCiqSbO5SSKTXO32IZniBeOhCO4uz6aYqRlzR2NjQ3zu6PkLNm/cJJ210SUQzqOnn3F0csD166IVbDDsM/rVY5Z33sAqCPvbqK9y3rtgFkzpdoTz8aM//Q+kChnWNv5rKYsbrO6YjKcXPGwJJ8mfdFBDH+vGu1eO9y//1wO++Z0/JM5LXigMem2NzcJtFhMRgLXjHl+9fMx6eZmRBPbo9Xr0/RF6SXK+pTNc2yiRzR4SytZkVdExTRPHczAUIUMkCYoS/wb0QoE4CrE0Hc8TZ7p1rUitGPPBNzcI5NzX2cEFiTrHkI08o+EMEgXLVMhlxd8y6Rxvv/U2n33yR2QrssV4GqJo+m/QOROFOI7RNO2qfVhREgxDogfKNvVCU6O5tU5Fkp8eH5yxeeMeF93/T1vg/b/HVw9/ydmZuC/VWonFzGA8PSGdFrNv6YzKdL7g8Owpu9eX5Bl/wXg85+Z1EdhMRi3Sqk06XaHTPQJgZ3eF8TTNbHFC76VwYMfDDL/3e/+UxVjYnf29Q7ZvVPnswZ/w1q2/B0Aqk+XoqMPKynV6coQisXUq5SbdoWgdbDZ2mYw/xrLSxJp4lu5quPOQR/sHLBA6z86rjIYqG3khr3sfP2I/PuXOm29ycine8/Ybb/LwyRNWlzfIFmTi5OWC3d01LDkrenl5wXA8I51dYX1TBAOt9gmjXky9uo0biu8LQpWT86MrEvpMxiFWIp7uvcSRLaOjWZebb15jPOnQ7wgb7TkOb9//3hXa7Hw+J5dZIQj2GPSEEcnYGfKlZby4x2gqOdawOTj8Ke++L1rdBgeXLK+s8Dvf/Q77T0Xbc66gksul2D94TCLRen//e/8AbX7CpeQWzWgJT8/anE/OWc2L9trJ+YEAk1ne4OaumNt799oWauQRI9s1S2XC5Jjjs1+x0hSfcwY6tdwuWnDGYiRn64oO83HM6pKQxc2Vm4xHLe69cZPZQujT/nBGuZbm7FK2dG4U6U9G5Erw+Ik4q2a9wPHxmO/87ns8eynaEOejDHff3sRdiLtXym/Qbk0wkghLFXa1uFmmfXDOtGNyKoOV/+q793l+csaLY5FEqG9bjIc6z54eU5TckGfnl9Ryy/jRPrs3RfBo2VMuHi54575IhPlJh9bzGU3rFqYMHov5ArNJRLd/RGNZ+KyzmUMcKgS+sI922ubG1hsUck1mI3HuhZzJ00ct7JTCYCj80c0dMRP+d63fiuAq8kOiiQiaKuUmczdLLh8RBBK8Ip1m/8kFupri7q5wYIeTAVEUkU0Jx01NTJRQJWUraDIBNO57jMZTMvkc4UIcyvp6niRq0FgWTpJdzzA25ww6Q25uS8LXokEYTNDVGKMgNnzQ1hk6x9SWxcM1Q6XWyGFYfWxNOB+TUZt8tcp4FNCsie9rnQbEMYRSuQeBTq2e57JziKrLqlhSZDgYU62VOZMTe763AH3KfC6FsGyxcDSCoMX6pjCqndYcRQ+IIpVEzn0dX1zQWFthIY163kqRUlUmkwmKzMzn8lUyaY1IDuXnMzmCuIyRdSk15fzB3pQP31OYDUzeeksEFu58QHfio0ohHE7mbO7colRfp5gR+/nls09oXNPxwyGhHOKfjiZUVhvIeUg6vQWlQhosDcV6RRSrMB0M0K0iui0MU7W6zHh8iY4ED1BsJvOQRTigIjNLC0fn4OU+7717l+f7wgF6+86HPHvwgHe+KQju+pc9BsMuw3ZCRqLbHB8dUry+jOdOiAoScrxRw5sZbG+IZ19b3+DoeI/VssIHb/8uAP/6//jfWdvcoFKqsbolABCe7weMB3PiWBiq4aiPosSkjCXsspzxsnv0Oylm8QVd6ZCMRy4rG+tXA7WnJ4eouoEfJmTlnNlkPsUyNCYXI2LJXKvrBrlckzCUqJGGiaYFdC+72LYkJF4IosFiusrluXBkGs0y+UKKg5dCaTXKK1haihiPdEUYx9CxSEKPYA7/438nIJW3tvOgf0l4Lj63cbtGqZylsdSBQBj107M+U8fBUAtU6uLehos8zjhgIgfF7ZTGsDdmMh3SUIUhzOciuoMBpswMpqspXp5dkDPzKLGE908iKvU85xeXjCWz+/JKg2CxwHeE3K/tbtMfeAR+QrUkzngmwW/SGY12VyjuyXiBgkloCqcpVOcsFiFRyBUaY+JboFqomvGb4fjYJ5vK4cgAV0liDFUjNAx8yeyuGnkstUB3vI8ukzC5XIq8bdO/EAa8216QyTjk8mlWl5ry/GB5qYamRiTSsU+lHfKNVSqyklTMpslmc0RJhCbnzArFGuPZhIYRsbkpnqXpJpZlMZ2KfXJmDu12mz/983/DjZs7cj8qeEGItwDXkTDSUYhtp8hmJNLidEC9UcC0bRZyjxczn4XjMR7raNJYBc6ExVzj449E8BMToqgT7EyKKBI6XcOmmq7zcP8zCiWhJ3aXVlkYeUYjcS7byzbn7SPu3N5m3BXBhmov8fLwhHffu0cwFXL9+UdfoGYz3H9LZEPPzs4JAzifTzGrIst4/923OGmdYBgWWTkfevisz9d7n3Hv25vi/CyDjx/9irJ5m49+LPTGP/lvb3F8vE+UiD0paCYXz3z+1b/6V6gL4Uz+9M9+hJ8xUeIAuyR05XAckJQTfEWc8XTYo1zdIBVGuClx10ejS0ZTjYU74v7d35UylOfw6GtyWeEcdHovCJMs5UqR0Vgm9XLrJFGL8fQCVd7/cmET3wnxQ2H4q40GS8u3OD3okJVgGRs7Pmu1Jpe9UxZyXiVfMXEGB5ydyMSN1sAfdzg571EtShCafJWJG6GoNm9s3wDgg3fe4umLl3z8qYBGL1ayrDTWmQ5imjIIdDMT3GRB0SyRXwib/IffeotGeo4iE6eD4YR5t83GzRukVBk8TuaYEZh2zFgReuL56R7fun2bWlXI+YOXF9hGFUUtkMhOFVULCYMENdZAe0VDAFES/uYe4xMnPn4QYUmghjA+5atnz0lr28w8EaArKYf5LGQ+lQTakUWx0iDunZOSlAs9Q+GtO1sogS5IzQHT8AgSiGQwp6iCtJg4uYJnV1WbIIio1hqYaXFeg5cvKYQb5PPiDs1ml+y/HBNpovoDcHZ+yI1bqxwdHYnv74y4trWBbmu4C/Hs0WBMKlskldLQVXF+W1tbVKtV8jmhp05HKi9efomWh3RaBBbLjdscHPwN+wfHZCUJ+8pamXb7gljOq5l2isizuHPjPQqvKuXhjPFsjpo6pdMVQV++nGcRtgjljOfp0ZiQiFplGV/CpR+dnpArVNFUDUuiH5N4rG2l+fozAchQa+gUdZ0oWjAcCtn//POXLK1VOTx+giFByppLVS7Oj1lZFrrMC4aMpi28Q4fpvCPPIUVrcIkfaaiaJs/KZ2OjgNIWsn984JLK6TizhM0VEYjWl0s8ePSIt+7eYtKTCZeMxaDXp9YUezBZnBB7abZW3uVYEqnv7jY52O+RJAWKeUkM77kYhnUF195YylIqlfjpT/6WVVmtt8w6kRqxdXOd3kCi/AULBi2XVEXostvv5/j8y59SLm1eUeAkypx0dhnSKh89+iMhQ70NNrc3ybiBPAeXtbV7WOUMSFn0GKLEZcxMCV3OYTcaGT778icsFmI//+E/fJ+//sshfrbH0bGoAKlWDdQ8mbxMerttFD+isdTk8ddivsoPbmNmsjx89pjd2yLo+PhXz/D67hWB782bK1ycXuB7l+iyoljU7tKe9tBUi29/X8ywf/3oEblUgVt3ReA4np5TN+vsffbndAdCpkqla+RXyyjVFS5Ohbzkchbl1Cp56T8OJinWmmV21lYYy6p7ytCpVtMMF894+JX42+1btxhPekxlsvN24z8jVzzlT//jR2zJ6p2iaASeyni+R16CJGXyr7Kx///r9czV6/V6vV6v1+v1er1er9fr9Xq9Xq/Xf4L1W1G5CgMPdyJbuLwhmh4RGQLiE2AetdnZuIdhK/SnolVJMyPSlkVKE//H1A2GIwfX95nI7HYhn+HD732D3pnGiwOBkHT7bg3XKaBoolVBiWzyOZuNxiatqchExNaEjNrAfdKn3RXZyI0Vm2Jwl/ZMtEZl8xUG3QHV8jLFipynyt7loH/JaLhAi2VLkmrhzCdkbDkTNR+iaw0MqgxHRwDsPfFQNJeMnebOrXsA/OqXX5Mt2lTrIuuXJAp2KmBptXbVclBvpOn0jnAcyMm5ndXmEoZuUVwS2ZG8XSR0oDs6QTFFBjqdLtO9bHEp55a++c3r3H97i1b3hK6sTmTVPPP5HM3K4ckKl6mX0JIxQSwy0td3t4lClaVygY++EGhFy6sFRsMRpmnjSTTCzTvvcXF2iSmhvSPfIYirxJpBMBPn7gSg5yJSVsBs+gqCMyFHgalEAVxdLaPqGrPujMgTGcWLVp9atsS467K+LNpdOq0L7ty5S+dSyMHZ8TGJDhEmfvQKtdHh+YsnZFIZXkhelmtrPlkzz4WsHqpAkmjc2dlh76GYV8mXyuhWzOngGXnJ7ZXJzbC1JToXImvUKBQIUVCNFKsVkTl/fPSU7EoDz+1SlRwkijbl+HhKUaKHlYsV/DAkni+uMnWZTIbhcES+UCJRJSKjBsEiYjoVGRvX8SmVimRsDVXOq6HE5LMZyrkaCzmPk02liaKIWlXIhmkaxP4INJO8KSpQvhNQWgq5896HPD0Vsy9/9ckDbq2n2XskfsvW9RkXJz7FUp7JTJzN2nqd+WTK4dMOoStkfftGjihYEHjinVQ1xfrmEpoR4jji/vXbIZph4kpI11wui2WnSOsaSiD5ZKIMOj6FTJm0hJVtliq4zhhdwr9FEWTSVc4vL8hmXyEmivMZDT36A1ldIiabNfDkTEQ6p6PpRULfBEXIyyKcoxsWdlrBnclyaxyhKgnGq0yopYssuSeqWgCRF5BOBxhhnrSsPB49PyVrZrFS4g7du3edZlW0CcWIykehaKH5FaqNFMsrooqxeu2EbLHEqC853nyXbKZImMTMZcuf67rMxhPcecygJ7Jumq7geQviWHL/WBalUol8Pn/VGlUul1kqltFU86rC1R90URSF6URUWjRNA1XBCwIyBTkTqGqoKEymDpok+rxwuiR+TFZmQlE8NNtksZiBbLsyNZ3WsM3SSo6nh+L3vHzmkEoFBJLT6rAyRYkXnHcuKGaEfDrqHuNRn3xaVNoA8qs11IxKyhKZ3Vu3mxjGjA++9U3GE9mimmhsrFznH/zgPi+fiP54zw+4kcnScsSMiTIzSGKV0XyIVhI64bjVx7QyHB6JisZhb4SayrB9e4edLZHd/vknP8P0S/TGZ0S6qHCvV3eJ5i6xpPKwE4vzJ+eU6iVGc7Gf+fwyoT+nlM9xcS6qL4Y+YPf6zavKtbuYcnTwjJy9i6WJ/xO4Y8qFCufn51Qaspo9GVHIXqOSe9UxMeXlY43Va2mubYrOirNLm+FogJWOODz7TLxn8w6WleLFnqgQXX+rzixyCJ0ps6GQz1Elj2Fn8GYJecnt4ylluqdzfNkS6ycep+fPqFZXSeRc4/pGjZ7vMmgNee9NMZs5freLZ9VIJGy+n5qytrGOntP5ck/MBKUii6JWQ4nGfHPn9wH49Kv/yCcPO9y7I2DIx4t9Ot0x47GLbUmo0yQhiiJM076yh0kUg5qQSNjIIJQV18WCkqzMLfxzuu0YxZ9x/963ANh/8R9QLZfNZVFNe/HiCD2JyZfX6PVFRTa0pnz1eY9ETfDlc4kTElVDf6VzY40k8VFUFU2T86K+j6brPH/+gu6FOJs7u+/T7w5otUTV5htvbfNg/xmO5K8E2D/4lFT2Pik585UrnKAa5zz+usXW9qY4FzdmZQ3qlQpqIpEq1QaGDv1L8U6rq03uvfkug9mI0BV/Oz7aR8Pk937nv+Gdd0QLVa2W4d/9+3/HoSTCXlmLaF8eUsldw2jI8xu2WVtd5c073+KkKs6v1e6w/6JNsykq55aiMRjnyeQWHB4fib/ZS9h2ES+cUjBF1fZi8CmRF5HLiGcX7RJKlOPp02Nh4ADTDri87FCplJiOJVJuPmYxUxlI5Nx8voDrD8ln8tQb4h3Ozg+xywmWpbMqu1AeP33MIrR4+lBUqddWVjg67JLPrzDoiWfV61WK2QbnJwMU2XJ/++YWqB6HJ8InSJs1FH3MeDxhQ5Kdnxweoaow7yuUC6JqM2Of1fUCnZa4axurO9hp8GYmayvCT/HCGbev3WY8fkk1L2zB04eP0KhRXBatnz/7+C9Q9Cxpx0SR9Ay2nWK2GOB5GpWSaPk7fbHP4IsDLvfE74v8Ah+8989J1zLYaXGPC6UGz4+/JFEnVFOi8r9/1GPn+nsM+6KK89lnz9jYqfDwwT6pvPBTDDtPt99nPJEdBfoqmjajUtvl9ptCplzHITEi3Mjh6WOh8+yUxvLyNQYSiXTQ7WHaU8rFbbLSrD776BfEzV3WduocfCb0lOfH1O40+OLBvpTpMfe3b3Hv9pv88leC3NlQfLzhjELexNEltL2V4dvvvs9oKP7teWlSpoETTMgXReXx/Hify/4UM5fm1htCXlRUtlbvkk6LDp9O95ynzx9z9/4O8/mrkY0UinVJNsxiW+Ief/xTcVf+rvVbEVwZZgrVFJcsShSU2Gc8WVCURt2bpjjuPkYrZBhJEIF0ukjVzjKU/Fjz+TmnrQ4rq6vkJImYu5iQMa8xNNr02kLhtrtdnOmcW7dEy8PBwQEZO+F0NCGQDoJqW2hhnd0VlTd2xEUYTh3UlMpaRuDlr+80+cmfveTzT75ieUU4eAeDp7QdD3cWoobCWdRJsDQD3xdtV6aZYtif4MynWJY43MCJcJ2Q7IrG0aGA176+W6PX8a6ciiSC+lKawAdFla10hkMmXSGMFhRki8FoZHB5ckImI5StlwkYDAZMJy7bN4QTX9CnhIWIel30A5tpnVb3iPl4gGUJp8mwFRaOwWIW4SPhTDea5EvQOhGO26gfU8iPePD8BXFRiFLrZMDaUgVTtehIOPiz0z63N9N4kttg7b13mHsBn37y9RV3SjD1MK2IYTwjK1ux2qchS8vbTBXh5PvhGH9qES4MxudC+RmGQxxl6XQGGN4rosEps2DO+oYoK1uTPMHilEymjOfK9tDGLfb2L6lVJwSIfTk+OGR9bYtaVQQaC3dCoZjm+cMnDGU/8NbuBq4XY0xGuJeS1TxVw0xlMWIhP+3LZxTrKnM35OhEBN7Ly2WGg5e4vSxJSrb3VRLSiokqFU0unef04pQw9JEcwriuh6olIiiXfdluIAhgDTlZnS1X0NAw9YRQ9v+rKlyc9ancLmLnxHk9f9nCtE3SBSE/URhhGzbEM6JYGKHAGvHyNOKr/b9CK4jft7yq8PBoiquLO7R/dslyeY3ZxCPwxMvPJxqmVaFS1cERsucPoJzNYxTFnqdTOeIYFl6fhpwt7EQdvDlXgZThx+hKgK3bxJYwOM3VBq2jNgYK+YLYA8/xsI08KVt81+XFmBCPaukanUuhJDNZoT8Ojk4oSwLWSjnNZNolJwPa3qiD4ucw9QoRImhx5j6KlgIMXMlrZ1o6URRdDctrKKgoaJpGKS/hvj2fs7MBYeQwaQuDXUjnUPFYqYn2AkPPonoqqpmQr4k7m8/WWMws8uUKgWxJa65cY+/JIdKfxXFd9l4+x0yZV8zyWmRgmRp2Cgzz1aB/RC5TuZoDWXg+YQC6nsJzhVPY787odsdsbDWZSy4qy7IIvBAZ0+PMfbzYx7QNPMm1JWggFKxUmt5A6ISlQg6lYLIIxLPjwMedBqTMNIkE+sBMYfo+550BunQCU3iMpiM0mXRyZhP0XJaQLCNX/Og4peE4WRq1KsWMbOOcB8y8HueSWHx1qUyplqF12qFkijP97Cc/4+47N0msLHdviqBo/+SCy7hK70gY/kLW4OxozHx6TjAR+7m920ENPBrrsm1ne43JIGQRTfmjP/obAM4eOWzfXObWzipnQyELB/svWK3X8VxxBqFiYJdSPPjqAGcmnr15fcp8MefazhLVJcm99fyC5foqw9kRALaR4Y3bb7LefIfn+8KJsO0M86BFo7oieBSBfC7H3HuKN6hLOdextnvo6hKH58IhcUYx6+vLjBdDlorC4Vuv3eTxxVd0+zIQf/oczQq47AzJp8X9UAs2y5Uai+CC9uQr+awxmlajKOXOypbZWt9AH0dgyVYhp82otSBXVnm2L9oHzwbH3NLTHJ+J1iF9rcLKygc87j3CdKVo5F38zBDdzGMEYs4lG0eoapGxnIXZ2r1LM6MRRj6xpCGJQwNDjwjDkFC25emKhW3DTML7R3GComgsHJ9qXQTjceyQJCu44RGPn38KQL22zGX7mLMz4QibVsKvP/5LVtfvkpbw+r7Tpn5NQ7dVkMFUGMzRlRyxTK4YmkYUJOi6djUfq+smum5iGCZ3v3EfgM8/+YJ8wcRZCB1xcHZGhEKpsgMIuPbltTqj4YxYQk+vrKwwmbVZXa/g+sJZ/e7v/IBHT36M4qe4vitlIR8wHYdsbIn3fvbliHr5Pb55I3elE1bWrlHMr6KoPu5cyOJkfIaSqPTlbOOdW98iv2vyt7/8Eiyh3zQtwXc0Npq1qznslFFmZXOHi2PRUvnG29t8/vUUghQ7W78DQKIOOTvuUSlXIRG/ealeoNMe8of/+T8FYH//F+zvH1CoV4mTNflbdDKZAu3LMwoy4VIvrII/vUr45nI5Wm2DhRuipYWdazRyBNGE0WDG/ovu1bMuWpfc3hWgIqYZMpnM2d1dYu+JCJwePXxGqVxgY2MNXdpykogw9Oldimfrhosb9onDc8Zy9nXQFnN2mzfLnF4IQBlFz1Gvx8RzIZuzrsNCU/nuh+9i2kI2+v0Jz/Yes7GyTb0qzsH1HhFFY8JAKOKKtcZoOuFi1EaT86P5QoIXjlG9kL4kFi6vNFlv3CJ5TwJodF6QSnVxFxpTOY87dw9xJmPcxZTlexITwI8ZLZ6SlvbwcvAV7UHM3W98g9PLffmsLkqS4Xvf+y8ACOIhH/3tL2hfdqlWhP5+cvmYRE1hmjHzkTjjaqXIaHKJocoZRQ92tu7SbrvostV06917DJwBF8d7ZGryHnkJ7dZLdnaEDM9aGmlDo/0iYLkmQGBIOVxePGZb+QbX1yUAmmLQ7b3Aj8S/PcVHj1McHJ0SSzCXP/jBD/jLn/0FqWKVwJc2ZZFhOr8EiRGwsqFy95u/y+MH54wmrwK3LER5TE1lIQOutLRdf9f6rQiuNFWlcy45QwyDWPWwbIWFL/mUNIPy2hamruPJDRkOzkmlUvSHQpjCcEa5WqI/nqIiDO94ZPA//y//J6WmhVUSTsvf/Nwjk0pxKTPZvhuiKx63b10nWQiBW1FWOG+1+M53b3F6JqpZtbTC0mqDz78SFYzzl1M0q4VllEkCOd+Ui0iRpU9AK5FIgLZCep5C0SV5beJgDC3sVIFQTtUbtoNmmXTGDllJHhsu0mT1LJYkIzzpH7OxsszM8Zgmr6ovHqVCkblj0VwTGe/Tiy9Y3lrGSgtnx+8uSIIhlfIyeZnlX9kw0bvXGcbCmaxmbHrDhIUbEDnCeDXWLQpVm6XVVY6OJb9BP8T1pwwl8lK1YeFqc87bfRaheO98ZoXT9hmxFvAqwbdaXaY3H1CRAWBxdcj5xz2+/947XBrCoKnpEc38Dk/OLoki4Yy/vWuT+CMUSwRJR4dHTMc6luHTbIp9KVfT9C5GrG80ePjsSMiTrRIdm+CJ9y6XixTq7zMa7VEuic+NZnBze4V2r4WzEAFQTilydjJle0dcmpcHT1lbXwYzYHVVBFze3OHy8oiNjQ2e74nnh5GOPjbIl8X+hoZKqz+l3syTkwHRYj6hkKmSs1JX1YKgb6NHCbEqEcXsCllzGdVYXJHHelEP3aiQtlM4M+HQapZF2ipBLBSwTpZu5wTdDMhLUsZyxkQtwGI8JSsz0G+/VaDTvyBfFnM1jjfHTBVxu8c4i5fyTAts31lm7kNKVonWGivokU8qJX5fMZ9GS1QSRcGRDvv+wTOWUxFxnGPuCOcmm95AMQIWoTAA6VKKtJ2j0w5QNGEcN1Y3cB2PnJyhm0xGZNIFZpMQR3KGnB89ZzidkTLyaJ6QPVWLuehNaNaF8QKHvFkgcvtsb4n+cdcT+3z/jZvMXCjafqYAACAASURBVKHwFwsNxciRzok9SI+3GasGtpEQzmWlTA1RkoDQN3HGPfmbM/i+i5mSittLMJOY0I/wHWH0FraDqnsUMyVUCSLSafVI57IcHInv9/0Wqq6RSqXYQNzZp89eUiqVGDkzZnOxV7quo+vqb1DH/Ig4jqmXG2xuiZmWai1HrVqkUiz9pnoWLwjDEM8VOmk6cbm47BHHXKGDLhyP+cLn8y+eXVXYLDsgjuMrB6yQs0ljEoYhcxnsOP4CBZVkMkWX+swJEkJ3Ti4nnqPbKdpOh8gLSUt5Wcz6aJqOGqtkZJCkaCk2c3UaTZEF9EMPTVNZWskzlH31CToEHhedcyYSpKhULrK5scJkJgLo/nCKruVoVptMBmKP7/+/7L1JjGVZet/3u/O9b57ivYgXc0TOWVmZNfbMJrvFZpOUZAFeSDJAG17YgCV4YQG2YC9sw4AXtgwakARYECDLtlaUJZuSIZFtUmx2N7t6qMrKrJwzIzNjjngR8ebhzoMX5+RrGBC9EmAu8qyqIt+779xzvvOdb/h//+/bXyKITMahSSLJW/RcykoyxV4T7FrReIk7H1ZoLWl0ToVhWKoscLR3QlU2r883i3izI45/+pLzPRkxtcao+RKaUuTSgjije6MBR6+OKC/IHmFRiKZopFGOSlXIp+t3+fBL73LRcTk7EHszPpvSLwzJNLFOpgXTYMSjwWe0lsQeN60CJycuzkYNdyrmdTYaMx77tNrifB4cdSgXSnQvXlBrC6Ol1mrgpwqh16bdFGc0dqdcWr9KoyrWZDLUyNSApStLzKSRNhtMyVZOCZUJE8nC115apWSvgiqeMxsdo7k5Ymw02a/Oj1UW1+qcHu0QycjQ2kKJk94LSiuy15d7zA/u/w71xQKxJWSqUrjJ4Ow5Ln2sssxKt6+SKU0MQ+iRScfC/kod1XJQ5N/QYpREJ8kUDOn0JRmkoYqmyLMQWuhmROxNWF8Vd8jm2jVeHx6ihAWyqTgPvqmgpAbBRNyHR/6A9aVv0tl/THtbRO9LrNPtn2JoJrp05lItQ41T0QASQR4jemsxRx6kSYSmQu/CZSZ7ZPbP7rL72OQ3/tJ3AHj1/AW5YIXhTMg0wPZSjR998im33/1lsXfk2Xn5lGZTxzbe3E/PSKMigeLyxz/6fQBu3fwa5bbH4UA4tJu3vsy1W7eZTIy5zo0ti6fHD1GSCp1TUVezurKEG4BdE+d/d/+YL73/Td6/k3F+LhwuVXUoOJc47p7y9J4s6t+2mXQTCpbQp+edM0pWiUJhk+VVIYu7r1+Cesbj5z9ne0OwipbKGqWizctdkVkuVW5TrWtEoUapKOROV8fsPumBqtJsiDX+9P4Tqgs5DEMyNLshhp7DsRbIy+89fvKCjBiiKmubwinrd1U+fO82P78rgiSV3Ao3rt4h8cbEktGv0qijqRH7u8fzmvXr165wsNsjLxu31ysateqXGPQnKDIIfO/BH/Lrv/5rmI6P/0rs4eraApPhCZasy1Tyi0TZGD9NmQ2Fw/fo88csNIo8ffSYG1e/LPdhi97wkCgWNpBmluj2T7l1+QaLbaE7P/nJc1TdIxg9xsqJddcji0kxpKKJ73XPQrr6AYv1NYqyzvXFziNMs05kevwf//RfAPDrv/HLTIc9TiZij5ea2xzvD8iye6iqsOlquQaKajKSxFvdwRHvXPsKnaMeSiKe3Wxt8fOf/BRVhfaK0FN+6HFw+JosFfuy2LzEaBbQbC3ja6KGdf9whjvN8OMBm5eEbj45fkqKxf6BcO5it8DwweeUFlZZkqy/h8d7uGOV1/ZzrlfEHl8c7TGdBQTSHl9bukacdZmNXa7fEA77T7/4lIlrsLjQoBdI30E5pdYocffnP5HvN6CQr7DcrPPxd8W+3H1wn6mrcu2ddYYdcc/osp/unzb+TDhXSZpSXhJCmGURhlEgTmDsC2Wk6QmTsz7lnEkmO4qbZo0kNlDk/5PmMdQqgbfPRDokmgW5vM559wzNFIdRtU0yI+LgVBbnL2yj6Q6ffHKftoxuaXpGoin87LNHgjkIWN3S2d0PODmW7ITnE7A8bn9pkcm5UMCjYZ+Zd8H7t7d59EoYwuOpT+J6ODIKH0YT1q9c4uGDXRrLsoi/bOH7GqqqgyJ+bzi9oGjVSHQR0bDtPJOZihdopFJJko057/XYXN/gaFcIYk6xKKBzcSYuy3xlmbyho2U1jmRU6tTLKFV9Xh8JI2bppk57c4UnD7qsrAhIR6a7fPLzDvnCCFuyYHnuGYvVTfyhLIIPBuw86rDYLuJPxTynScD5KKRaDKnXxHqens+4tt4iUIUD/eDeLmtb7xD6MRs1qYAVl/F4yjfWMj7+WKxL+1KE7uh8+kNJqX6s85WvNskt1Pmjn4n9m5wtYzYHDEYhuZyM0EwDcI7YfSr2vP2VDxilPQ4He1imJMLwQlaXVxhOc0wCcUjM8gZJcMJxRzwnVRMuBh2q5WVGIyGLqjGVzGopSSjmni9YlEo1zgdifTVNYTCIscwRbQkLzBSTQc8ljCbUq20p9xNsq8japogaf/KDXRzLIJj6tJbE3w6Om2TpjFpllaHMlBUsjUrJmkdxZxMXyzJQVQvPF/vuxy66XsB1Q1ZlE78g9DAMDVVmoBI3YTw+YXm1jS9EAcWFmmmzVCox6Uso60GIkk15eLAHgFMwKFdtjjo7jMb+/P1aS3UUR2VD0shXKjn6/T6Drlg7b3zEu+9cw1QM8uaqXAOXzNDwXaHo1lcXOTzaZzjuoegyw4dNu1XDtoqEoZh7tVoly45ZbIt3U8nhuxFbG+8ymkqO6jcEINYYTRZclyoG/WHM2ZFQ7mpssthYYjaD2BefsVWDlJg08YgCcR4NrYyum0RSVtI0Rs0Ey94bKF3YMTBMAy+J54xpqysNMlUhljbhYDDC0E3K1QqBzLSqGuTyNuPJkEJByMtoMiSn5bAdcVmaFngzn153gC+pw6tnJdrtJvncBY0FsQ7VWgknn8eSRb12LiVTDCazKYuSdn1n5xWOY4OqE0sGUd/3yefzTCZCEC7CEFVVMQwDRcIXDV2wrllmbg47HE6mQMpMzklVVQpOHkVR0GXmynYKZFlElqnEyUw+y2EyHTHZkeQKCEa2wWAwb8qcZAoXFyP8IMCx32Qx6+gOLEpD3Mmr+NMcQabQ3hQZmtnUxcyGjKYzLo6EHs7lbCqVS7Tb4sxM3QsKjs1oNKNclQ6m5XL92hq7e8Kp331yzGIJmjmDdEOc2Z2Oz6vXn/Pee18jXxUGuz0y8e1zeq74rUZ9kShUufXuBqYhs1SvT9l9lDIajoilTOVVnfPDLrdWZbPe4grL2y32d17Qeyz02zhfpdRokkxD9l4LQzhIVcJpyngszvrJ0QnaUpU4Ujg9FhmF0ZnCwH1Ju91CVcXcbSNhFoyoSBbHQqHCQquO63VwcmIOR8czglkXy9CpWGI9956eceO2zuCkL9/vMl+8fMilrTXcI+Fw2ZZKkjPIck3OZFNWxSwSqEV+//e/D0BzuUi1WGC8m7HcFHPKogGOVmU6vuDhoXi/5tIaXrrHcF+sXeqaPH16ROhHkIm7QdVt4thDVTVBIgFomkoUBXNShpgppApZ4rDcFoGM8azDemuFKIjxjyRbJz1+dvqCQlG8b3u5QK1xmc1fbrLzcwGzmo0HjHo++aJOlr5hAnVIdU3ASoA0TVFVXQYpZLNxVCAhyzJWF0XQp9na5MaaxYvPBKuamyySKxzwa+/9Vf4rfheA508SvvalX+bJU5HJQstx+53fZOoOMUxxHypxgILN+WkfXTKWHh494mI6IEmFXm5s9vjP/+Z/xPrWB5SLAilTqlhoWDg5HUcXtsvBwQGTUcI7N0Wm9/ToOc937oFiUCyLd6lW65TzFj/9kwdcu3FTrF0+wY1ecbAvgs5nz1dYX9vkp5/9M1ZORJah0VzAm7lUCsu0WiKDPwu7zEIXW5YFdDr3CbOA7cuXGU+EPq3VFtjZfcBy6xJPX4jsUhIVWDEX2dkRsnLzxhU222tgHLN/IFibt9Y3qFVWGU98GouS4Ml/xM9/us/SonDusiSkVFhnPH3NVKKKNmo54mAIcUqQCN388MEzNpZXaayJ/7/30xcYtoaXnnPxWmZDvvsd/CAhy+pUK0J3xcmExkKVY6l/dK3D1pUif/j7P6VcFvfqnfe3Odg/YaGxQhgLPbhcLXJ4aGLKwFSUHVEox+Rsi1TeFx/eeZ8kHNLvDrGL4vz1D4c0ShnIO3OlvYGmm+hGQhQKnXvj2nU6g/s4ziLtupD1x49ecvXGEjOZYXfsMnfevc7Me407FXqRZMZoGKHakrRInRGlLteub/F6RwYE9As+/Og2/d4My3lD3hZgO0VcT8IECwmu22U8dRnKMqDPf7bLex/eor1ynZN9YVuXymt4foeFvJCVfDNPkuWoLtTYk5DtleYdqtcd7t1/yE96MlC7sUIwfU61LlBpuVKNs/Mx9bbDUUe2ySjXqORjzo461Jcl8dV4gpU1Wa/LrFissthwKOVMHtwVcq0rDu/fbvDs2S4lS+iunCWzB3/KeEto8Xa8HW/H2/F2vB1vx9vxdrwdb8fb8W9g/JnIXKVpNKczDqOMqlXAnfYpVmSzw5mHO47QdRMlExHaKIxxvTHHMlJHbLOwqFHI5ef9lEw7pODoRHZ+HuX3A5eJlxD5MstR6BNHKaadpz8THv7w6ResrWwzjQJS2Vxx1KvQWqxw45pIwx6cHHNycUoQdHGnwoM1jQRfhR998hjZqok4SGm3i8QyuhUMK0w5wqkOSWU/l+koI41VTFvHsMT7oevMfJ+eLCJcWlhBURSm4xGGISGGvsqlrS2MpMB5JiIIaSHHQf+AiswIedE5rWaNx5/sUiuKCFT7eo2Tk4iS7Ed0fHTK6/0Rqm5yPhRRhtSImAUzDMcglbUEWaJy3j8gVUWk4PXukNOzC8IsjyVrvJI0pWCmmI7H+ZmIMrSKl+j2z9mWuPDYrzLoDghzKVEgIhhM4XrpjGvLGq96Iqvw+Y5H6qtc2hJZgL/wV1ZZWnf4+//bc372XKzde2sZnd1z8nmHWxIu9emfHJJbimnfEhj316+mnIwfYRRmrK+I9Y1jlZdHJ3izKfWC2NMsvgDF4MWOiNCYVsbK6iKPnzyjImEBi+0SnYMpo1GMYYp5Fcsqk/GAztEbfHcRUy2QRCn9noiK2Y6GouXQ0hwvXwmZLZUdXu28wPdlbwrHpN1c58XOU1RZ+5K3l3GT5/h+h3eui/q//aN9RqNzTF3ssZPTccwFJuOQpWXxfoP+OYqeUasucHYuIvGVahMnZ5KTWdzO9BWeP6OrhViZiMZM+y77u+folkmYCpkNo4TmkoUWi2zB7vkpraUWvquSMyWleqmA714wHk+IJXTvon9MLmez0BDrSwSxlxD7EdGbRlO6iZJmkIlI6+HBAePxgGariCoppNPMQVN9LrojKmWx5uORR7u9OC90bi0sYuopj5/cJ8qEDDcXRXasP3TJEiHDrjdk6ouagDcy3VqsMAoGJLJBcZqCnsuhJBGxhDSGYZ3AVTBM2XtHU4kSiMOIxUWxBgutHJPRmHq9TiJrs6YTn+6gj+OIKGetViPwQ3q93hyCV6lURH8cXSeWvZPy+TyW6cxJKDTNwMmLSP5QUvJOZwEHBx0ajSbtJZElzuV7875UAN2LPq7r0jk/I5HQXd0wcJw81Xqe0UhkqpIkw/ddQWQBxHFKmqYoikIUvZmDhqZppFk812dJmKFp6pzyOI5SZkSYmo4uKbJNw6HWqJOmMQpvSG0iPM/DljVzCjrn56ckZFycScrqwQjDsElSFUURCvXx41foukqxIM5Mo6UzGc3IFxLOZB3YdJyQqGNGfQVkuwI/PMAwy7QlaYEfXKDrOu3mJYLoTaalwEKzxDvvi+z9//473+OXvvEB3/+T+4SyxUFr7bvcuaVzcNAlkFm44TRDUYpc2RawQEdVGfYHZCrYltirL9/ZRjFMVMvh+Eic/+NXY668c5N7rwXq4NpSmwXbJtRy9IdC32zV1hmcnOAYKYasVzva36M3heFQrK+Vtbk47WPaEbGMQGvFIavN2+TzJg8fi0Lx7/zqn6c/fkwg77Ri2UYzcnT2Q3KObDkycGkvL5PGPWZjIYvVUoPZ2QmRJHcZm8ecdQcstj6ivSRpumf7nB33SbWM08+F/l7/mkOztMFf+4t/HYAf3fsRga+zsF7FC4QsnhyfkgYKllKgXNoAIM0iqsUSSSbeT7VVapUC04mPKqn10zRD002SJJk3C0+UAN0M5vUqaeqSJg6KolBtvYH3FrFyq4STgKkla0NK67yzkePJayEHzfICu0//iOmPYzbXxJxiL+bK9UX++P9+TRLJeWkaoZJiIGQsSxUyRUEm0oRMaSrIWs1Hj0T2ZXHpJt60R4DQUybnBMNVXh89m38vTkTz4VjWj+i6zeHpE6rVOoOxtHncErdu3WZnf5csE5nOvb3PaC2v8cmPxZ4rX/11Pr79LX73X/wBf+0//isA/Gf/yd+mUW/yl3/rSzx5LGqEis42K61tbNkepj/ocuO6yRf3d7Ak2VGSnHN0uEujucY02AOgc27w/gdfZdQXMMRcq0ShFMGxjyYj+z5HaIbO8mKD8wtxF50Mdink61wMBKX7SuUKiRGiEfNawu1fR0t87Ze+yr17D1lviyzxk8fPmXptrt8WmZcwCIk4Ye/VPvWmyMxhJbw8eooWVYglXLpWreP7LmNp32xsbJArmnx6/wmaRDo933lKzd4kZ+vosr9owalRrhQ4OxRZ24/e/7fZPf4xx3sqly6LOZycPcHzZjQbm2xdFhnSBw9/REqRRamXXf+Me5/1aS05nHWEXbT/Ej748h2++PzVnK48iRRWVlYIZfsZ77RJMT/G1XtzIoznr15i6xql+gapIW2OhkJ3PMaQGUxdLeI4EKdDzk9kLdGqTeYvsbj03hxBMOknKBi0V8QdfffzT1lurZHFRa5eFe9yundMpabN640rxRUOjp5zfnzG+rLIYOqWwuPHrylXciAh8XEYY9saN24I4pjnz15TLKvcv/+Ab3ztuwB85asN9g+fMhr1WF8TLX8qtYQnj48p18QexNEYNItoOqRelRTu+SKWo7Kw1OboRMC6u70+a6sfUq0IG2EwPqRWr1IpbXB6LHRSrV5mbavOZBiQSZjz2cWY48OnXNsQPRA1LcFyYkbdIfXyopQzm1HH49rmJXrSri023pQk/OvHnwnnyjRV3pddnE9PuiRZShqZ+EOxmYpisrJUF/CWyRuoYEoQejQa4pLVVIskG6GqNlUJxcoVEsIAasUGp5KhyTJTKtXG3GjJlJQkyzC0Ip7EittOnr29c2rVHBMJC+xedOj16pRKwsAslwoMxzXIEpyCmKc780lSn4WVBpEseru6USY1fQ774tmVZgPXH9JoLpBKJW2oBidnx5g5+xfvp+Yo5htQEAbYwO1i11Q2bzd5/URcAq3VDexalaefHbB1SbDGHJ0dsNZuEUhIZafbpWjUWV5sIbkq2HtxhmYbeH2xBheoNFZMAl+jL4tlE61IFof4U59I9lPRNYdSAeyyWN/XryasbFeZjSMiTVy8Z3sjsigibxTIxBTQihFqPuLzz8XlsbyZo5yt8+DJEdOieL+rCyfUdI2fv0yxbCGW9VKR8prDynVhLP6Df/yUvZcKg4FPoy2U0eOHT3n8pMfXv/MOO6fSEd3Ocf1Kg7NjCQ9xTghPQ3L5KqrE2a5UGpycO1y9vIoh+6LsHncYDGJ0SxbLVkscHnWwbRvPE896/KCHolpEbsJkJBza6VQnTTRUyVpVKlgEsxQjrpIo4lmd0wucfIX9vSlhJP42GYdsbVwiJ2uiNleqDIcTNNWcd6DPl8fMzg3iMMKsi+/lHYdCwUDJZD2X52JaOteuXWMmGxSXik0uLk65SE7I5cXnJrNT/DAhy4TCX6g3GAwMsriAURRrbgYGW2trBHFAX9aGrS6UmYwSFtvirK3mK7ze28fKGSw1Jb56OmEymbG0tEm5KH7v7LzPaOhTkeiCtbVFTruvaTZa1Evie83mFfxgRG8olN9w6GPbJvmShaaIdRmOp5BaNBeW5g1edV2jezEkkB3pSUzCMCZOI4JI/P5gKP7trONTkJBRPxqz1Krzsx8Lg3Zr+zZ5p4huehiSZSyehYRxSjaZcPPGhnjGiY+a2QSeMAqtnIZhGOi6ieuKfUkTlUqlIiGjkkUpEOyM+YJsrDzpo2t5KpWKgAEj4EQZYNv2nBFSVVW8MCKUhDZL9RaDgWTMyot1ieOEJI7Z2z/jxUuxfsWcQ5Ik85oP0zLmxfWJ7IU3nU6xbRfNHM7700RRhKqqqLLbsm2b5PN5xuMhuiRO0Q1VwP00HdeVjGxAHMXYsgYr5xSYzWa4sTtfl9nEZjzyqTeKLCxIdrCmg5PTCYI3uHWV1lKRwXhAyxfv53ktslRnOouYTsVehkHMdDae1/UNRgbjqY9pqSSS/UNBJY5jNNUhle+eLxio0ZRHjwVrrGUb2KbKyxdfUGuI36s2TAajE754Itay1NriZU/lhy936feFcVVeuODO9esoikn3ZE/Mc+bSqLV4/kRc8rffu4ZVjAldg1ZTQIDGI5/nT55TadoYEoql2zqf3P0CrSrk9f6n91AnM1Y2mly6LgJF/+v//I9YqjXZWFlhdCH0VDJpMTzvsvta9ghb26I/OqfRqpE3xRrs750zHduUqxoXZ+Jzn/7sKbPphGNZD7y46LOxpXLSOSRJxLPXt5eZeiFOrkGpLJ51tDvmzuVb9ALxfm5sUiuuksYWucoGAC8PHlF2lvjyxx+zXBH62rAV7t5/zrvvifU1CibDQZ9a7da8HjcJuqy/myfyTAY9YXgvLOX40fefcueOCI557gRDz5NlKqZsHptmKUmSoKoqQSDkTLd04kglkZCuNNHwkxGWrXPziiCialQmjEddTvsXlBdl8HaqoFkBq6LkgygKWFxuUNhex5EkCe3qCtPpAY5ZpS+dN8VI0KMYRX3D0KqQoZJloi8OiDosRVHwPG9ucwRuF8PSSTWx73eubTOe1Mnyv+ibs7CUMZpGrF16H4C9g0ckSczLF322Lwmj96D7mv39M+xcwkg641/98q/RuRjz7/47wqH9J7/zt/nmL/0W/9Zf+Dovd0ST4l//7nvcvn2bP/ijf4qmintmqaFQrHl4Y2EkbK5+wN7uMYsLN1F1sQZ7e8+pVIrEcUaSyDKO1KTbm7C2KZyf6STg8KjPu+98RK8n7uPJWEM381hWmyARf7t3/y6NepNGUSz6O1sLDKZHDLozPr79FwHY2f0J455B0WmSz4m1qlRy1EoaeibWPAx9FDWmVV9gMpFwyagJ0RQrF9GoiXvmpDMmSwN8qb973TNIE25c/Zh/9YMfAvDlr37E1z76NouNS3ihsBMcR+Hx/UesbYt51oqbLG+7/JPO9+cQ7o+2rnP35y8oVOB8KJzadrtNoWjx/JkI1K6srDEznqNQZ2VZwM/K5TKPHu6RJjoPHwsW09XNHFGYcXAozsLW1RbauIqtWey9FFDIOBSNrlfaZQxbOEVlrYYfT3j1QpzRZrOJEgbMZjOWZEB5OgkgUwl9j/1DYbtUqnkm7iHTkbh3LG2F9a11quUSY9lQutaoU1ss8eipmGPoFri0fpmL81/UCKaZSqZNiRKNzTUByzs5PiOfL9M5FrozTQIK5hpf/8Ya/VNxhq68U6JzsQvGhONTQebmBjpRYGI5IuAb0UMzZ7hewGlH1sdt2fSOEuIoY1MyQgahy0X3Hv2eCPhUq1XQEvZ3XHRHyN33/+SAD798i2F/iCaDf63Fy0TehLNT2ffqxjtkSYRSylPKC5vHKs548uSE/nkgWElhThr3pw1l3l38/8fRWChktz6QDE1OhXzeIcsSLmQzSUXRMAwNPVdhPBORjjSGKAoRaH3QVJs4ndBYKPL8tbiIl5Zszk+HlPN5CnlZNKwrPLx/RFk2z7PKKnGUUSk3CIM3VOUQeSmW/ouoWKr4qFmO6UzMSTcczGKBydTHzsmo33CKGk3wU5D6iIKRx008KmuyniszSNwxs5mCZE8nDEMsM49m6POolFPIYxllUokxV1IPH5eZN6UgmcEWmmUOTo95/8ZtYkkC0TnYY9GpcCFx737eYjxVqFrGnL40axikSY7ZmVinfDEmU6YQtXCK4qLy1SFmXCCKM0xpmJYqBp3DCbYhjINCoUCuZBN5GU/uPZL7Z3P1ZhVHsQkDGcFQBfNeOCc6MKivLDPqDrmyLCIDDeeQ3c4xVjGPkxMGX7ejcHQS0ZHR9VqzSv80wjCm3LgtG8z5OSYXFxSdRSwp6+tXDS6OpiiSGjkOfU73Ezav1zjYEVGH1aUmy1uXBJNNKrITgaLxRz/8lKUVWfcyCHG9gOpCRq8rFI2hVFholiiYFQoFmSUaHHN+NqJaEXtcrRW5e+9PWF3ZnMunqitM3ZjjozGNuvje6ckQjYx6TRictqNTKDicdXz8ULxzoZqSuovU6hrBTDwrS6BYcfBkA9j19VVcb8T5WR9HetCnp6ekiU59IUdXZnd0M6Ncrs4v/jiZUauWyQyNTF6W02mXIAhYWlrGlUXucZBSLCoYkhzgYtCnVFugPzgjJ4vxC7kWg0mPcTilIAvhdSdPFEU40jj3vAHN5QVsPUe7LoxHUpEN8SOhNC2zwNHREakSY0qWqjD0yeUKDEZ9gvTN+QMlLszJB548/QInV8EySxydigut0nD4yb/s8M0/fxlNkfWN9jJWLsKUGZNioc14ENL3fEaSSMF3YzA1wkGXnCV+7/zEo1JaQpMF9aato6k20/FsTlFbKRfQVINGo8FkImu1DEs4YdLZURQF1ABdM+eOTJxmpGlKEEXzzJGu6/h+IHWcJPpJU2zbnjtEWZZh2w5e4KGr4ntBJ9FoKgAAIABJREFUEAiKahmNTZIEXdfFc7NfUD0rSkYcxySyLi0MA7IsQ5NF2m/qrIRjJT6jaSJ6aZomuny+6w9IkwxZhoKu62SZgpoxr4+L45hCrsTS0iLFgtBdubyFpmVzg1M03AwJQ3dOf6sqBoZlM5mM5lS6URQSBMG8mXS/O6NYLKHpOWYSeWBaoGQqWeKjvGEszDQGg8G89tW2HNI0xfe8Oe1yGHhYugVyDaxCjqk7plHNocgMgqqZeEmEoWlz4gZDV9E0A9sW76ZoEKcRrYU1Ls5FtqJ33mNxYYWjoxMs2czdm85Y3aiiaZJpMTHIqxqlgoafinleeucrhN6UcPSamWTmfPb0FcPIpSN1khpnKJpKEGss1WUWvK4yGfchyuifiPertCBXVOn3xDoViha33l3j008fUlsU52GhVcD3MrKwwXd+QwTs9l71aLdq3Hsi2pB4/pjZtM9a8yvzOr6Re5+ado183aA/EO/z7V/+KicnOySKcBr++Ps/5sbVazSWiuw8FUagbdusb7XYeb1Da0nowcH4jGplcd68uqguUms4/Pf/4/fpJ2LtHNOBLCEIfAxJ3Z2moKspfUkrHUcZgT+jWc/z278tnI1+Z5/ReZeFj67iSMTJ3ssdsmrEUlnI2ODinO55hk6Z1Q2RlR5Nn2PbOf6nv/MpB2dSpys6WqqRycCigk6cBFiGMm/0m6QRqgpxoPDf/a3/EIBhch+vCyVZj6fnbDJrSrN4hd/6rd8G4O/9o99iMPC4dFVkQ8+7IzaW3kO1j/jsMxEYqtcW6Z3vUGusMhkJ/VmwHc4HnTd9Y1lubHPaHfObv/mb/Kvv/3MAquV1CkWDDJ2bN0Xm4cHjH0KmocVCn1abJsNxj95gn8MDcWfevPLnWN9s8OrVDiWJKuqc9Vjb2GYoM0K7u6+olBvMxj6/9I0/B8BnX/yA58/3uXbtBpLdG3c6oHs2JI3Enn/7W9/lvPeCJ49ecfmKmIOmWjz8YpfvfPc32JUNeyuVBsNTn5vviPq1h09+TJxOmIxDokCyIdcSDo9PWFlsMxpIYoqb62RZRu88kPL6gtOTLh++/02iSNhcF/2MUinG1stkipDrRsMhCVwUS+jAotPi7KIDqLzaFfV47394mb3XPfxghKmL87dxqcjjB8e0ZDZtacVh/2CP50+OuSz3tFwukzJBj2+yvC427Ac/+j2KhQXW1kVWzAvO6F04aNoFvY6s33LKDLrnlIoLXL62AUAW+PTGQ/IlcUf3+wecdcbk8gqWZFEtlYrEcYLnTwk8IXsLyz4PPve5fUdk61utNmFyxs6TYyplsVmD8T7uLJnr6m//yrc4PrhgMplSaYjfi7OUmfsSb2qxsizu9skopnfRxZUB38uX3sGbTbnwLrBS4WCuXvK5d/c5mhVjGcJ+Wllc5OT0IQpiLdfWlzDthBe7Q67JddHUEbOLED9R6PSFT1CpmRhWiCuRa95Mo1ZZprlQZiZZxqutJWaTBNMOkbFGrHSVre08iQxk/t4f/kveefcWxXyBl8/3AChULIbDgEK+gioRPSsrOX7lW//N3SzLPuRfM/5MZK5SMqpNcchy+QWCICGKJ1x7dwOAYW/M4X6femVEJFn+fC+iVmvQkwaR7Si0lhYZTU4pLggD82IwZDpNKDoOqjSuXC9gY3sTV1IQm4aDrkcYRkSpLL43mw7IF0qEwYRuXxpOCzXC0KVclUVwbkBGSBjGWAVhRLSaOr0Li6QX0VoXEQUvjAhdhelMRp2NAvmCQ6YE80sBxaDaqHPWvUDRhNBnqYgwGEimARQWqjUWK20yWR1vZwoVvcru0xMKbfGsRlvn4GmHIBCGcbOyRqVRZNA9YLEhlNYwibArCsVEXM5OtYaStsnbDh1JYz0dVMgXXVJFp15ckGvepVSskkiWlMif4SdtttYLcFMou0iJCNMRiqKzJNmsfvSjL1hZWaZcFHswvoiwWx5GPsdYpre++HyEp4D5TGVtU6S/TS3mwzvwfEcolas3Nvn5ZJ9yaxNDNkowzRFfWr9FRS8QKCIq9fLwC7JIxx1IoxCbS9sFAj9l44qY5+rGEnFi8OTJI5YlNXLgGehWn+FAPMdzY/KFAr0zl9FAZqVKGZqmYBWGXHQleYQbYFk66xsCPvFy54BCvoSuOVycy542psOwn6FrCv2ezAgtreF7CaeSsclxHELfQrdSNFmY2jvNyOV7DIZCUQDkbIMwyGi1hIwlSczMHVKqmJgS1lGJDRyjynDUpdGQpZWaimEEnJ4Kx9ubQehHLLeauL44DzkrR5zM2Nvf4cqmSNOnpsd0MsYX9h62VcCfJqiRQxQLFTLyXNzQI1M90lSckdTxqDby+ANhjNTKaximRee8Q1466J4/JOeU5lmcvb1dYgLKlRwzT5z1LNXpHu1Sb9UwpZOSxCHJVKfXFZd6a6lNLl/lojecsw51zgRUJos1Bm/6+GQejZZJrAqZSqsmw1GE5hRwZA8dbzZESyH0fLzJmxYKeVxvjC0hQK7vYpkxrutRLorLK+808H2XMEwwTelM6TGmYxBLaB2KgqZo+GE0j3ymXoAfRei6PnecgiBE0zQcJzff4zRNmc1mcyM+SmJc3yWKIlJd/F6WgZIydzQKhQIKKp4bzkkoFEUhl8uJHjzSmbNNEyefx/MkuYuuo6owmYyo14XeSLOYxuoyuq7h++KMGHoD1/UZTcT6jsdjlDQjTDJSyRKVxQqT6ZTo4HD+PU1TKZeLczjadDZBVQV1tS1bYgyGPdyZzLY9EwaecDJBkdGrNM1wbIiTAYr2JgunE0cBuqKTyihjlon1fpP5CMOA84sOzXpzvi553SHDRM1JOGwQUjAdlNghk3CbMNMh8jEMC00yLQZxhBdExDJokUQh1WqZWPXJpLzeev9dxuMxlytb8yyc7xvka5W5k+0mKoNZzMnJKZEMnEziz8hUhVdP71GXDmaY+hTyLRYRhps/c7GclKnfYzgW+uZ8qKArIR++fwW3KZ61s/uSvf0Bof+mB1KNB58fE3o2Z6diTg8enLCxVcZS4ZNPBNRrb/811fIWWSL22MlZBF6FF6932d8XbTIyXJxCSvBshB+K/Xv89BFFy6G1Jc66n7ocnU05GE2Jp5IIZ+LRGQ1pLubZO5RG7vmEm1cu8eihcMA++nCDJ08P8WMFQ54rVQXfC1E0dQ5XUjDJ1JBIMu4CpIFNtVaiO/tXAJxeJKwV3ifpDXn+ubjrli9dRqn28LoS/ppYxMOHbC1/m2wodLWSOpx6Q/w0I5FBJ7uYJ1J1SN8EqDWyLCNF+0XgJMlQVY0oiSiUpL5WTFqLa0wGQueutFcpVyrc/fT5fN6RqmDlE16+FLC9/b0RuUKTqB+z0BKGfzgb8qvf/sv8+N7v8kqy7jUbS1SbBU6OxNql2XWWVyv8zj/+R/zqd4Szc96ZEoQj0kThh5/8U7Hmw/u44yJXLwlolH9SRrdUXrx8yEpT3AOWk/LpZ1/w5a99hCFJtXRT5fxsQFeSFtlOGcvJUasucXwmnI/TTodv/MrHuOEJL3fe9GEysHQTzZTMoEpCFproxpjDE7Evo67NnVsfc/fz780zK8tfafFqfML3vi/WBSWkXl3DKZ2T+ELfW/nrnA7+iMQLcBzhHGu6g+9PMSW810pzlKsarnvE8YEw/re3v8biksH52Sm6JZzH5zv3mU47HB6J91tbWSbn1ChUE2JFZLgfP46pVutU6uV5puzBk0c4xSKnfUGygbpNRkBjoUwQi/v38LDPwd6IzY2MqSf00uvdFyzW81y9JJ/z4BNWVy9j2kUSaa/5QZGPP/6IKOtw0t8TsjCJKFYajEbiDM1mAWvLm9Rqdb54IjKW1eoycTLFnVrckrBKz5vx0ZdgOBQy/eDRTyFa4s7tj8kQdu1CUuHe509or4g53v3sPuOuzYdfXWNflkKE6YDF+iWy5ISD1xKlVbNYXm2wtyPO5+HxCwI3xVMyFFXolu7nCZXaMraj8+qlcJJatQb1+gK7e2I/Xc9m3E8pF4sE0m6vNzW8gc90lLG1LVBv40kPVclTqkhUmj5h4B+iuSn5nLBhX+/sUcwtMBz6c4Key+sOSlLg8T0RtPjgvTsUGmXu331JWZK57B/2yeVyJOoETer0MP7/pqx4S2jxdrwdb8fb8Xa8HW/H2/F2vB1vx9vxb2D8mYAF1hZy2Ve+KzzLJEshLRKHJqW8iPR0Tg5YaDZ4uf8FBiJamM/ZuF7CUEY27YJCtVagezF60/4HJYaiY+NNAgqOhPelKUvtFr2B7DUUm2RpTBB42LIhYpRMMfUSGSGJjMzbeZW8YzPoCW++UCigGSq9wRDbljVXU/DCCYsLDQYS0pCaLlmQoWki6ugnM9Yal/DHY1JEhHTmuRgFH8VICWfC3y0WCuiZRiijcAvLyxwfH5MlKVeuiBRu6PXJlJA0NQm6IrJ683aTn959QbUhKUj7NuedEYE6msO8bm5+gFX1iPti711TxQ2G6KqBY0tK14shS4vrjMdDPE+8S6NexsAkDUVGQc1ssiRAVRRqKyKrsnf4ksmgT2OxhSVp8nNqnURzGMm9un65ynQcs7Kywv6hgM00WysMZyO6J8cUZQQ4dAMCXrK2/RWxdkFCs7HIycVnTGUEfLm8zMUwIdfIkWYiipIFIy52LGpVkUkqNy2cqoKGRxCJ9xsMp3zwwS36vYBnzwS++axzQLtdZG9XRNPiKCOIAyyzOK8fMUwFw7CYTS9YaokMycqaxdLSIlEgBG9w4ZIqKaOhz/OnIrq1vLzC/u45Tk5nIhugep5HvdYkVcS+OJZJ3mniRT2msuGy4zgsr9m8eNahvSqicLbhkEYmzbo4H8PRCfmCzkV3MM9ErF8ucXHSI5+rcFUSYdz97CHTyQxdZkwLxTqqBsFwgi57oKkGjGc+WZZx6bJ4v8/uPgFUkkR8plrXODo44csff0Qqc+tRCL7vsbq8xM4rsaeh6VMsFvHOY7nHDV6ePKVSrLFcfpPpDPGDCWWJF4mjFMuB84vDeYS/VtsmCrtMvSll2TfEn/k4WYtA9iOJ8OmOBqgqqMimvpnPZz94xbWb6+QltBU1o1pvkM8JOMNme4txoHBwOkAmORh7EyxNZffpCwp5KcO5AqqSsbEh6h2CKGQyntLrdrl+VWDMG02H6XQKqLjyzHieTxgl88zcm4i2omgCIghMXY9CoUAo6c/Fvyvzuqk3sqLrOmEoovUgIHhBEODO/Hm9n6FbNGoVCkVx/gPPZzyeoKCh6HKtoojxcIRhGPNMjqobuK6LIuNtqmri+z62aaAbkpjCNAgCMQ9HUsT7gctsGqBIBhbxvBTP8+bvYmgmru8xnbqUJfQql7OJwhnXboj1fP/D62h6ijeDp08E9v7lziGuF+H7oUhVAIqiYxo26OL8q2iksUOGhx/KejyjjKaEpHGGLslbLEsjI5mvealQxvd9siwhljKsUiSIfIJEnE+SDC1V0bU8sSTxiVMw4wzD0OaQSkXR5plAAEOFQtFG1Sxmsoa21VqiPxgxGPWpSoKX0WiEqioUZN+iQjnFzGnEiU/sypqWwCNfaHAxGaPKnnJW3kZV9TlEvbVYJ0tU3Ml43jNr99UrWrVt/u7f+6/ZPRM1Jbv7Y54/OeHzn4sIbcksEMcxXjJmLPXbaBTh5EIsPaHVFnv88mWHXCFPwRLznE1SrIKJHySkMppuORGWcZmNRoGaLOJ/dfCM5kqBbl/ot25nTBiOaa+UaEnd2e96DEfnrK2s8+qlrLmYuJwdz7h5XUSkaytlHj98RBBfJZFw3iwKUdOURIVAnjXDzKGkE3odEQE3tRwX3Q7/6d/8a1TqIlOdtxWmyQv2Ho5QJeQ2V1zh1pV3yeSdPRh2Wa8v0KlZTC4ExLjs5GluKvyt//Yznj2W1NL5Akmiocjmo2QGGbGEAQqdF0aiTcZkHPBX/32RObp8p8vrzohZICLnH299yPbqHV53zvkb/97fB+B/+Wd/g9OTh0S+kLHV5feYpYc8f3bBlz96F4CnXzxEUxsU6wYH+wKyqcQVvvr1bzEdi/2cDvvoVonB5BWdYyHX79y6yuHRa5YWt7AtsVc99wcstd4nmIg9f/7iPpeuVXCHBULZ/y/MepRKbS7dWMPWxVp99tkTjk4v+PjrojasPzgXRBxhQs6R8jLVqNbh4f0vyEt92uu6fOObt+l0xPk4PDimqJdZXm3gBW9gZDe4f+8xxXyTpVVhvxmssLP7Y1RVwtjHKeVymVwhxrBlnflJhhucErk5Ll8WdXsnnQMUI6ZcEPrmnXfW2Xv9FHfi0pB9Hzcv3eLRw59h6XV82cz95GSHrfXr88bfL18+olHdwovP2Xks9v29j9t8/vnnfPXj75AoQvY+/+yE1bVFSiXxvYvuGaPRiEa9ycwTsnh2MsRxHHJ2k1JF6M+zi9c4+iUmrrAbtreu4gcTBl1YXhbZrIc7n9IorqMqFvW2hJZmHmf9E8YX4p5rtQySWZtU785bI2xt3WLn1T0ub79HQaKIDg8PGU1nkAiUUXu5Shgm+NOUQkncF/fvHtFc0eb6dToJuLT2de68X+Wzz0Sj8TAJubx2B001mPlncu1OuHnjDqedPQAaC2Xuf/FzlteuoutCFu/+7ICVlTaNZm6OmrL1HGqSoy6hyqE3oFIqMJwNcHRZT92d0WiskCuvECPWczKecXDyAt0Qn6kt1Hl1+AmGcpX3PhAQ585uB282plJd49KSILyyzHOmU4X2grBJBkGfTq9H7zyk0nhjLw5ZXGxz3jkjDt/UErv8l//F7/3ZhgXGEURTsble5KKqCUuNFmeSoMCxSgSzIcvNS3ROJc99e5OjY49JIIR5bbvGyeEUb6ZSrwoBU3WbViVHqCnYhoRUxB5K6qEr4rJOVJ9coYAdaHMojW5pxJlHmM6o1iX0wovpnI/ISzjawB1gqk1UY0gkYRDdsyntVp3pyCWTbDPRIE9RM1i8ImrK9s+P6B+foRoaE0+k7s28jaJbhKGGkxcQI2/Ww1Jtpq5sQnn6iiiK0UIDT5JVGBQZno/JzARVCn73aEAWWmgSTtTzOuRWNJrFNhd74lm5xTETr4IXCKEkX8P1RmS+gaYLhahmBpPhMYWyw5uaizjromDOe/Zsr90gjHucng158liknsv1RYrFbdJpwED2vrG3IxabSziSBfDFi2c0K2329w8py335+q98nX/4D/4h/rRHQZJV2MUSefM6qiX2MxpeYMVNvJ7Pex/9KgDF3DJPvvd/stXysKVXXaq9y8Y3tlldF4esfxxzcPIM057RHYjJB5nJRc8njSrkcmKeWhLRu4hwZOPfcTDGVIu4IzDk39zxlFE/ZHV9mTQUyu5g94TTwxRFQsYaLZPRaED3YowpHXZUF9PxUVkkJzu025ZKo2VzciyMj/pyiYyI2FVZastapnyZQqFIazlkKAleKhUNQ9GYTGWfsnaNNHPJlCqaJZ498vaoNRoYOviukJf337vOyekxpnyXyXSIaRWJ8yYTeRlrqkMlZ5PLaxzIXiLf+NJNDg97WM4bpjeobF6laKvMJDmH5ZTxozFHnVdsbAnIwdFgl5nXpyXZ9K5d3iQ1Ijpnx5x3xbPN4gbn3QPiTBg6+VyZztE5Tk72qQEODl+yslzFtAw6J9LxnaZk4TGlkpCf8WxCc7XJixfPWFkRBsJYEgBcv9qm2xWyfv36NYx8xMWpUJqvnz2gvLzJQrVEbyCZ8xQfUOn2zwEBwblx/RK93sm81qhcrXD9+nX+r3/2u5x2xDlW9SaFQg5UBUsGc/SphqYZc0exXKoSJilhGBKFYq+q1QpBEP6/YIGWZaFp2i+a+pbLxHGKrpt4MrAQ+AmKYlCrW8Sy2XDBybG4VMc0xFmvbLa5desOjx89RdGF/IxHI/q9IUEQkKbic7OpRxiGuLLeqVhM2d5uYdsWli3OVT7vkKUx0+l07pQVC5sMhlMGspl7lmXopk6xtDQnarFtm1LVZDL2ufupgC+Nhh6WWeThA6G/vZmK5084ODifNygOgwQ38MnlTZyCOA+uOyVWIZJOta2bxGGKomXzwIKipihKSopPJB2nYKJgmgayNI3hMCMIPBQFTFPIS6INUJSUkjQ4o3BIlKkkiocmnc6crjJLU0LZ1Fn8XoQfefN1MvQcXgDD4cW8/xBnJ+RyeWrVAq6sx9MTHYOMKJEsfL2UtAd2OYcvWf+alQYrK2u0jYgHd38q1iqLqVpltiqiXibVMtKkR22pzYKsd7i6nYe0zN/5u/8Dxz0BFSzVl2ksFPkP/rrQnR++8yEpQ37vez/l2TMh+2P/Kcd7RwSTIvFMQk0Dh/PpjD1JPmJYMZXQZHV5g2Ff1vqN4MY7Tex6xmefi2acej4iPJxhyWBZo27j5NsMzo/oyECfbtqkUUyuoPHueyJoeNE9p7mYsroqjJ/erMP65gaPnmaosm4wzRJUJQNUdEkMoesqURDMe0xFoUpGgGHFVCXJxo9++AfMpiHf+qVvECOMzp/8aIdXrw8pm8IG0dQc3WsuhWmR3kictXRxSm5YInZdsvRN/Z1OpgwhFbKZphmqpgK/aMYNzP/7DXmLrhd49rPHXHtTN5yd8uOf9Uij8fw7rx+PKDcXGc1ELfNJ9ylHrwK2t1d58rkIBnZnO2T+lO2r32WlKZ51997v8b0/+Od89cNvAXB0cMTS6har7etcvS6ev/NkhqHnOD7qsLUt7vul+gfsve5w46qQKc9X8Wc2/ckDagURnIu8MWcXBvZxnpuSKe/yjR5P9x7QlzVX01CQ5Cw3L3PeEfMcul0uzlqUzWUMQxjev/TNL3F40iOUtWnXr12DWMMwdM6Hwsn+4x9/j63193m1+wW9oTB8/9L/w96bxUpypXd+v9gzMnLf7r5XFWtjsUg22ZS61WotUC+jZeAHY2BDA48Bj+EHA36wYXsebMOAYfhxHgYDDASPZgxbGo8wkqClJbWkEVtiN5tsFtdibbfq7jfvvbkvkbFH+OGcm9U9cGv04AfBqAAIkomMuJHnfOecb/l///8vf43e7BPGPbG3/NTPXOFP/vgvKcUJx8fCNyRdZeeldczyMlkiIOqJ9oT9vXO++fUvi+8YU7K0wJUrNzFNcYY9ftxjZelNqrWIzkC8V+fUJ5cz5hDuRulNwsAlDArEiRjPUV9h2Et5unvIcCDOmVq1ReAFtKXmouePcOwKxYKNogp7Das1Aj+j6CwCIqA0tTVu3t7gW98SxDvNxpSFxQajyS6BL+yzUV9gOhiwvbNOLifec9jO8Md5Xr8rgoh33vk29WKetfXrINdHEAhdtKd738edChu+fvMVLLsosOTAcHJMGhvMph6tRbEev/LlVzm+eMzphUgGLKzZ7LefYDwuUizJPtOkwtF+nys3yrT3xLpSjZjvvPunvPWmIJPpT58RKgHT0ZRrO+I9f/GXNvjBe58S+UVeuf2GfFZMms4oyH7ZT+6d8PRxh43NFUayf7uYW2U0yDDNmDgT8zAbmJgUSNORnBefsvkFGgsFDvbFGV0sNKnUq5x1XDryzLL1lG7PZzgWCYpIr1ItrFC/fcrnkuFzMOwSBi4b69fwpp6cYxX4Fj/u+ltRuSpW7OzWFyXrmJdRckqkcTZvvF9dzaNqEbl8kfe/K5yynWt1DLOIXRKL5+nRD1hpXqFsm5yfiwU881KyuEylopHJA2zmRaj5Cb2ROLxMXWfme0xcH1tmOeu1RY5P7tNqXZkHXOP+iNZinlQRDopl54gTj+6JSr5wScno0z3XWV9poBuyqc4LePZ0wp0viCBt6p6wVL3O6VmbU8ni1NiymLgxulrBVCTLRTTEnY4pSyE1rQZpkOHECv2xMB6zYmFqEbXcNsUlsdkc7H/O5uYmAzkGfqLjlOuo3gQ38eUYaPhegFO8ZNwJIFYol8uMxmIBDUZTWk6V4nqFruxri6YuK6UcEpqOkznUWgYD7wIvkWwSoYXj5FCjAFU2gQca9KcXXF0TY2Bgk5HnsP2Ya7KqEo5zOOUpR+0LNJldjpWAcdfj9g1RgYoJmPVy5B2LzdsiCD15GjP2T8jUPsOO+Ht3X3mN3uCQmqTk3H8wpVr1aLZMPEnc8Mn9UxarK1SrJZ4dCpvqH/lUag4jKQYaeCoX50PKpQa1hrhvd/cU1zW4srXMSLLpmXEBtdijIPuIrHpEwWyy+7jHq2+Kd9dTh+OLpxhmmfYzEUh40YA3f3aNQV8EnalXpdedoKgRpbKwxZWVJcJsyng0QdeE0zc4G7C+XUCXKvWTyQg/mGGbBfJF2WytqkTuEhtbOu0j8Xs67R6GamOYwlZSzYPUJohdqg3ZsJt/idHQxXJcqjUxp+PxkO6Zxst3xeauJptcdI7o9Lo0lkTWbzJNRe+DkrBS2wTgnXfe4eq1TUwZsA8mFwzdkUjp6OLdk0AjVcbEkfj7tu3gewOSoDzPGq0s3GLcOSec5sgiYYuqk6OSzzOeCUd1qZmn20voji9oNEXARZLxO7/5Hv/Rf/wmicwA62oRRWFOcVyqltA0m1mgc9ERtu/5Y1zP4+nTA2RLEHfuvorjOHiSndB1J0SRS7Va4sqOyIaGkUq320XXzDn7WhCEZEAkm2CDIEA3LFRVnbP4pWmKphmid0o67JZlCYIJ7TkJhfi77rz6kmUZlmVRKhQxTPGZndcxdYU4Fs+ejD3+zjf/LqP+jD/41v8tx9jGNE0sy8KUAYGdtzBNY067XqmY5PN5HMehIqtNSRLRajWIw5DhUDjsCRbT6ZTpVDgogR/iej5REOJIVkPLskjTmNXVTf7qL4Xjfe/D+5RLFUJZLoyjFMMwsJ3cvIcmTVMSBNnH5WdZnLCwsECcXDIDanieN68aXY4LiB6xLH3OvgjMg6skismyDNM05wGtH0ToPzTecRxjmwZGzppXEdM0RTcFQYiiZvPvKYoy/1uj0RhNNShTLbgMAAAgAElEQVTk7PlcWZZFlGSoqkp8KXEQxoLMyHhOHlJ0HFQUFFn1T+KQnK6SRT6HHXFetJqL6LqBYYl3ajQrVMoFRqPe/D0H7XPOzjsUynlOJXWw7/usr2/ylZ8WTozrn/H+ex+TZGMaEulQrzQYdSKY5TiVDokOeDmTSk3sZe3uhFnoE/ojXtoQTq+j6MR+j4lWZu9YONW1apXV1RxPZD/Q6uIOnfMzSksLrLTE/j0cTFDUmMBN0SXRR6meoVvKnBksGKUEqs3pmU0hL2wxS2bEqUZEiCUrq2kaE02n9E6Fc5dpEUms8dZb60xGYo9d3Vjj4b1jqtUS29siEM05Ppo1o97YBMCwHab+ObFb4+UN0Xj/8N493vjyLf7Xf/z77J/LPkwL1NAi1X+IkjnLgHRuU6HnY1o6o9GIr/7CKwDs3NJZWd7EyYv99PDi23z2QZ9f/Xv/Of/gP/zfAPiv/qevk7Nd+n0xBk4RXnnlS9y792weRGxvb7KzXePkeMrOpuiL2n38jMFwl9UVmdDauksQHfPZg7M5a2xiDKCfp7TQ4qIrnHg3zPMTX7jLE9nncn42ZPOqhqXV6UpWuEJ+Cd1SGU0GLJWFc7y0ssijx+/S7Yr1v3zD4dmBRqts0JSVq+FIo1RNsZyM9z/4CwDKlWtcv3aVP/8TIaT82p07VOoVznf32bwlxuX47JCc3aSS5hlOxLtvXr/Kh59/xvKKOHd2P33Eq69ss/vsQ3yJHNm5fgdVLXB0sE9rSVT5ipqHPxsTyh66cXjG+WRCqVDniy+LOb6/26a5dAXHNphMhP+09+yIV1//Akcnoneq11Z4460bXHRP5yLea+srRG6RxcUKn3z0vpj3bMRsYvLaF6VY7+f3UDSX6cSaU/WftJ/y8pVvkOq7mJoI/jUroN+u8OaXhW3+6Z//GzTVplC2novHF7bxgxlecE69IdbRRWdIo3GNyBV7y9bSNRZWyszCiOGFuO908JDBdEzeDAVzILC99SqtZY1v/4kQ+n7p2h1ILYbDIQ3Za7tUX+Oo1+VsIKq4k+mQkTtmpaVQMoS0RZw2WFmpcXT4YN63q5gzCvmd+Voo2BWGw30SRaWoi8r1xoaD70eEyYT9Q0FacmvrS9QqJrmCGIOaY/Do/mec9U551hGVsp3Nr+E4LqcHJ9Qbwq+86J8RhjrLkpCs0zni9bu/jJ+O+PQzYefrq1to+XP2np3MidkUUhyrjGaI+bz/8RnbO1fRzQnHhyI4vv7SDkHgY2oGZcnuurv7iP/6v/y9H1u5etFz9eJ6cb24XlwvrhfXi+vF9eJ6cb24Xlz/H1x/K2CBSZygXUKqKnl63Sn+NGJ9XVBW9npd6i2DsRvRWBDRpjudcfX6Ol1ZxSmWM7oXPmmhhOuL1EA+X+SiMyGXWJRLEmZVbdAfWIw6IitXbWpkSYalW6SIaL7fO8axlvGmE0ZSW6boNNAMCKS+izuZYTtQLOUIL/VQrYRms8Bo3KdYkBlhG65da/GVnxeZiT/+3SmOvcbZ8SGlBRHRq3GZnKbgOA5HT6X+QFGnYNdwRPIHNbUZj/okjokqufenvouh+hg5D3Ukxs+myOF+D+uylwKV4WBM4ruYkqa7YBnYtk27K7JUTsFC103GsxFWTjynnOWIgE7vAlOTLIqKytloSqkuMphKZnI+GVEurzCTWRwvmKBpYOsWp21JK79QxCkXSKWelJ+MyRk5Nq6uciq1BbTMwcsSBm6HLJCsW4Rsr6wzk/1HtYUqq80qR3sX7H8uMgqlehXGddY2F7k/E3N1dHKMnqY8PRDPXlisUF9pcHjanpe/0ywmNVw++uiAlqRCbzSLtPsBcShyDplvE3hj1KqBJ7V3tnaWUGKVZ8d9rl4T89BpT3npVoXjPQkFdW1ytQA7n3Imoa2a4dFslYkDhUpLjMN6pc7Dj09oLYksI8oI0zRZaLVoLOjSfvIcHHZR1JRqQ/ZhmSWcSkgi8ZlhJyVv29QbDmOR3CaKEvr9J4y6DTrn4h1WVypcv36D41PxpTQt4vuQNw2GMvNYXIvZ2ikwdSM8CRFLkowbt5eQup8Uiy4LyzVuvvwGM1/M+3v3vkelauONLU7kvJu2ie9qTAKxRmfhFE3LMZ50aTaEDWnVDD9MCCclaZshFiZO08QbCzv4g3/9Lprhs7FewJM0z5sbV8mXZjh1MS/u1CRSBry8emVO063Iyt7JWcDaisiwOU4ZFIVAstv5fkqceqhajp6kcE6VlE6nw/Xr156zNg57tM/P0SVbWa1aptqooSkZp2eS6l7PYVoOlpmbV6UyNLwwwLqkAM/lUTXtR6oxaSqqCmmazu9zXZcoiuaVCEVRBORO159XZtKU2WxGFIRzu86yBE1T0WWlZjpx+fVf/98ZDsdMxpJZsbU0v9+UvXZxHGMY2ry3MEvUeXXNkhWSmeui6yqGYYhqN2DmLDzPmxOm6bro1RKCw5cMhhlZFhGl2bzXrd5YJiUDCXF0ijnCyJ9reQFYeZtcLieqfRKOqaEwGo1+aFyS+ZhcVv3iOJ5XAy/Th6ZpoijKfHyV+fvq8/vK5TKaqs7ZGC3LIAxDJpMJsVxrWZYRuRG2ncORUMVatYDj2Fz+scCv43kekR8RRXI+/SlJIioal7A10zTJ5XRUSVVumSqRFK32/VC+U5VMM5hMZixJ2QpNNSgUSriusPOjwxP6jkMcJoxG4v6t9Q3WNpqsbi7xivYmAB9++DHECr/2TwVD3GB0wYJdYHFtiUQX59PQL1IpL+As2Ki2yFyvb9RQoxp/9ucCArOzsS4q41HCW2+KasxkdMLv/84DKncXuLEg7KtmlRmNU4JEPKczSpj4MWG/zWwkESEjjziAxeUCBOKz4+OQ7SstlpviOQeTR8RpjFMoc8n5n6YZqqqjJuF8HZm5IjGXumlArOJYBRI/YTIU9vpodEx52SIMEs66ourdSGymozz+SPx/c2lC/yJmlLQ5Oxd+wunBMSdhB9225nBe03bwtQhdsmIqigJZRvZDKsKqKrTh0jTF0oTt/8ovfhMvnPLkQDAYPv4k440vvsZnDz6d35d3AtKojqKIOS7at/jedx/hVH0W12Vmvj/B0i1UTeHzh4J9LY48vvQTP8vpkVhHvcGYydgnSzTyEkGwffMa/9dv/BveaBQJp5dQxIDvf/yE5br4zuJCnVnf4vrLXyKvCGjU2D3hon2BZeehKHyXZxe7VBeKDMYC1jab6gxPznlt/Wd4vC8qHbl8AT0r4056jM/FXC00G4wvzrl2Tfh4YTih5WxjbqmcykrZ1tLP8/jp71CzXG61vghAyYq4/Qs/zdm+qHLc/MbPcffmTb79hxYzKVEzGu4S+x6vb22RSFmX46P7tDsFvv7VXwLg5Pwd1IMeK5vX+KuPBPTyrTdeZjrtEoclVAkVDvWAMHC5sbIJwLcPfoDnbZJECW++ISrAH3/8MbZ9wWcPZuSkb1bP2Vz7iVucnEuJGmzOzy9YWbg61xtdbiyiWAMe7+5hG8KmCoUKkfqILBWQPN2C2RhW11a43xZVvizRGfRmvPHm63MtsStbV+h0j7m4kPI6hkl/tEyx7uFLivFyqYVm5Yhcj9fubkn71Pn+9/4twUyMXRxq9IdH7GzcBCmyu9veEygyeTZogUvRhslggVjyDSzvuJy2R4zcC8JQoi/SHFN1H0324+byRbK+imnlmPr7AHz+UOXKzhu43oh8Ufj273z0exTU69QWTuU7jVCSmOOjM4oVMS4Hxx+y3FqmUs/jSvbq5lKeSmmLyUTYYrW6xO/+3m+wtnlVnPuAadkcnfXxZ1Uay2JfqtUNPvrgIxYXhB/YXCgQxB1SJWFRInM6nQl3X73K8eEzdp+JSvxs9tfXpv5WBFemYZBTxA8ldXn11R26F1McRxxCSepgqRX2Hh7w5puizOtPPLonXTBkyTHbxm6UeO2167z/A3FgH54+ol4v0GqsM5EwlqeHHxP4KjXZ8O17PpqukHMcZjNxnztQgAGG7lCTDYnFeo7xeCoPUXjjCzf4+MOH5IvgInUuFItCFWx1nW5bGGL7/JStKy0effz895aXz1nfaRDL4e93A8p1g0n/mHpNGGbgQTCboUu9mSDsY1lFOu6UUHoyBbVIisPe4JDtWByO5ZyJN/UYeJfCvxlBpmBZxtyJCFwfNWcSBOIAT9UUJQtxLAt88VuqxSpjT9BDF6Xi9kxPSROFUPZ3uH6ApqdkvRjjUuPFihmPRhRqG2xtiobko/ExlmMxkSX5wBuztVGn2VoknIoFlDcKnJ1d4OgmiSYOD03NMx2OWV4Uh+yor1B0LKIQFuvi2Z2jNivLJerNZarD98R7+h7hpMnWpmzWnXS4OD7ioj8gkfBFz4VPHz2gVdqiMxKL82zSw8lZc2z8yvICrpeysb3DWH7nk3tPWKotoagxF6fCplJNoX0UMVNFUIFbxRh51JY88tKZDOkymy7QOT5Dy4t5VzSdVtUkjAUcrehkZErESbtDQTbezvy+gIhFGqcXj+TcrDLpNbFsqcauD3BsE1OxWFsU45LiUXAM9vePWd+WfUO3rjMaDQgiYec5u0A+1+fkYEpRanaN+h5KquEUq9glCXcZ+Jh6ma67D4CRT8iCIt3uOU/2hWiibkZ4M4/jY4+d6yKQuVraIU4HXMj+ppWNLfqDNl5QZTQQYxUmKVYOlExq2sUVqk4LS1HpdMSh/uarW+xsrOJFfUp5qed2/ww7eIkkFuvsjasllopL/Kvf+wHOpYippMqu1DaYhZc9lgHlcpVqQ2yk4+kU27CI0oSShEGOhi6aZjCZDqhJaISVy9Narv4IJC+MErIMTEmDHkSib6rXlxEugvjGNHJzKFgcxaRKKIIDSUwRhAGaaaBhYFnPg50fDhjCMERRFEzTfE6ckBN9WUkYkZOQ5ul0hoKGql1CqutYOZ3llYV5QBEEgRBfjUTgABAnCVGUzoMYwxa6U6Zhkcpgzsrl5oGMZV+SXGjzAA0AVaWQFTAMY067nmUZSVLGfv4tpn5ERjJ3RNPII00T7IIzh2InSUIURUK4WL5XnIhA8vLZuq5L2GH6QwLIOfkq6hySctm/djl/OdshSQTBRVGeBZkcm9nsknBG9Gm1Ws05gUcQhWRRiO+Hc925UW9IFPbmEE7D1LAsg1xRoVKS2juZ2DeT+PmcJklCFAfEMmkxHrmAShylKNKmonRKEvtUqsXnFPxhxslph2JBOAyarjOdeuRyFsWySDb4iegbfPBwF9MUY9dqrrDYbHHlilifKS7jzpDeYMh5V+xBk+k5cbLLy7dvo0lYft5pctYeYNqyJ0mJsa0cYy/ie+8Jp/6i06e5tUndcfj8UJw9X/vPvsEf/dYfkcrAVDVDbt38Ah/ee5+LmVj/hUKL5XWNSsXg80+EU7SxsQFxxJPPxHdarSUspcT5+ZRSUfw+NBWUCF1V5vqUcWyRMCZJLwkmLHI5lWZLxZLCzc3aK6zvGDx79hQnJxzho5M9xqOA/lTs1XsXAxaWFgnTFDcW72Q1Mz4/fMCoV8eWUgHCrkzU9HlrRQYo/BBMEGH/qqoyGQi7/s6f/gXHJ2dYeXHfq3fXCMOEOJ3M7xlf6HztF77I/pFw7gajAabpoSgquiL2KQWXvYMzPG9KpSK+Vyxo/Pbv/Bn/4O//KgAPnvwJabxIFPm0e2KO9b0pd16+RufCp+fJPbJQZLN+lduvCNv4F//8t/jSV6/wwQffJovEutq+ViT2l2ktNvGkoPS0/4SzYQ6zJPaknZU3qMZL+L0Jy2Vx/u6PT0iSBE0rsbgu/LeccUrv7BOWN4UN19YKlBsVql6KciKSCK7fYzrpsXStzuEjQcpSPPmI95KUUize+82v/T3Gkx5HJw9pbQhoZJgaePGUdx/vU64JaJkxiyhWhnzw+NsAvHLtVaLxM778xausSc2u7u7HOGrMNJ/SmV32yFq47ohA6s7Vqg1crw1pjr09EQx704TBRcbKRondJyKgvHNrm739h5x3hH/T647Iopxc+2I8w9BnOpnRqLxMHAmbef0LN/no/ts8OxTnaq1eot/ps762RU8GTlEUUS5B96JHvXXZDvKQQtFGIjFJyagtRJy0B+QMYRsXw08JwzxXNjeIfEmAZOiUCk0qZbEn9fodUDL80OfpM0F8s7F0HasRM5TJ1mquxuLKNufdQ1JPPGe460NBiBeH8aV8QRnd9jiR+4GaHTPzpuTiIlvbop9yNB4wnF2wf3zCjZuCGGptfYVee0itJfbAjz465tbtBVJzka0N4d9Ua3keP3pKvbnG8ZHUviQvzk25/Gyrzhe/ZHF2dkJRilX3pk8IvBGxb82lVzrtDq+9+jrnUrqlXsuBajGetDF18Z3l1Rpv/8V3aTSqjAdSymJN+Bk/7vpbEVwlSYonmXOqTh5NcUizAYnMCN195af57vf/kMWNIkPZGD4Z9mgu+4DYDHKFlCy2+eDTe8xScVpVFsuYpslB+wRTCvZlVp56o0wqo3kl8EBRGAwvyFtisOxcRrGkoKoqw4kUJByOyeVUNtcFhnY29inmi2zuNNjbExtwmqosLBbQshzb25sA3Psgw7RSqnVhKK98pcxf/PHHBOSxTNmX4fmsL97h6PACzRROWKXs0ruYsL0lMd9PDsmMlIVFh0Q6ivmwjJdMmMQDvJkw4CBNUEwbR1bO4iSkVa7hTsfMJlKoVSkwGQ9Q5PgaionvxsQJFCVBgD9NhA5XHFIti43MHcWYdp7s0sE0dSzVxp8F9Loi0CjndRaqNbbXNhhJlih3OGV5sY6liApNdalMpQKzacDSonBezw4mbK68xMCt0ZcsWIE/wjJSKhWJwe70GLsBzaVVJjMx5k7VwM3gz959jzgTh+zG6iKxleGm4jsXk33ydpHKks1ILgzdL5HNxkTBlH5X2MLOSzuYepGjZ+LgCOOI9e0cwazD2bF4p7/7Sz/Ll3/yZf7RP/o17IJwuJprRZ486GMuit/bXMmTGXlQTDQ5x4Qq3d4JrdYSqirG09C6VJY3MBwxn0ftXXKOz4JWYSQrkUE8RdUM7EKdJLjUQCtydtFDJohZXlnk6YMDwjqsLQnHcHGxyGRq8JWv/CQDSdQwGk95un8+7+fojk8pFAqUyhVOT0RAUConnLRDauVNdEsGb1ZMsdpkMhDOyKA3wclndLqfz0kS1lauMXEvuH5bB1WM5/n4IeVqhdKCJJjwhkxdjfWVBgPpzCl5E3ca4XlirtRZEVc7RokzXn9VbLa2bbNSKhJHNSrLItNZy+3xpdfeQhuJLNI79/6Kf/35Idde/wprGzfF75XY71fvvIo3Exv+/sEu/fM+fiC14vIV0tRFUSPqDemAtZbZjrcxLRVf9i6BcNovg6RL0V7fDzHkf89mYq4areLciQ/DkCyO0dXnDpeia3NBRoBGo0EURfOqCkAcp2gaP1K5UhSF8Xg8J5PQdV1k7bOM/lDMsaZpZIlKmkoGUz1ENVRWVlZpNoTd9ft9wtDHsiyOj8Xh77ozVEWfM4NqmoGippimSSr7x3TdQdd1Zp47r55dihvHMmDQNJ1MEQmOS+x9LpdDMSPxG+PL3jqbJIEsk4yChkVGgjv15oyCYRiSStauqSuCaNMUvWCXz0mSBFVVsSxrPuaXgZeqqhSLRTk3MzEX8r0vK1lxHBPIYFXTBINjXvaKpWlKGAYMRkPOOxfzz/K5PKVSEd2SGeF6TghF688ZIUejCRkGbVnVVFWVKAhJs4hqVVYHlutoekYaS6ZORNDoTj2mvpi/yXiGrpsMBy4TqdUmerwyXFk1NnWVLFPQjfo8Q1upVpm6E5QfatkLo4SHj59y44Y4U/I5gyjbp1GoocgM+Oa1EkkUoZJyeiB6l/ae7FFd2OSNLwvnteJUePj5A2qtCpWqcISv39Y4PD6k09nHnYr7fv833ubiUZelovh9/dML1u+8ykLzS9z7gWgeH/RnRDObC4+5ntraeo6T42NMxDxsLN/kBw8OsCybVBLfqKqCohqEoYKh1eRnBgrWPHhV1QhVVdnfG1KW52GxkvDk2QGkPqWGPJOXrrN/cEKmJnJerjGdaAzGj7m+9VUAHjx4wGsvX+PjsEO/L3yQvGGQxSmpbLJXpW2p6vNqaJqmZJmCZVmcHoi19vBjn6XVKmdSz8nKKliFlMPD3flcjS5GHO09YiBFb/00YdBOaS43GI3EXO0/G1OuNmguF/Emwhbrtk1n9B5/8Ed/LGyzkiMMnzAbG2zcFGQVj47ew52FNM0lajUxdkuNNT77/AMS6TstL6/iuyZOISWQmmQm6+zsuHz28ENOLoQvsVVZRI/GaL4IpEa7GUZO5bOzj4lT0XtX5E1iQyFR+uSk6PSi8YzzTpt8X6yFsRajui57Bx2CSDzr9VfvsHu+yvmoTG1FjPH+3gmGUSaTLd5/+FffY6ecce/tP8O8IdZMo1ZAT0ysUpGC7CW8Xt/i/pM9hpLQKpxNeGnL5jt/9PuYsk/5B9/9gOu37uJnAY4t5rJ/3qXdy9i4I9bMxtYi4/GQNI4JZhIZcbDPrTt3CNwUyxZ788OnTyg4CximGF/FHpBkCqVWxljqb3ZGbWbTjNu3Xp+fyYeHx5y3O+hynE5OXRYXl+l2u7QWxNq2cgqDQcxskjDqSN23ZIUoatNYFOM5Hfc4Osn46P473L4t1m2ztc5pe5/BYMDmpvie541IE4PxTAbLI4uvfOWn+PzJh0xlperkYsiNJYdxO5bP0Th62qHaaDANRS+jGyvEk5g0rmFa4j3jSKE/GuHY4lwtOGWINWrlIt9/956ws5WrXLlpsXvS4+lTEazWnW0KtsFM6lzqVsZnDzvkrRztc7FmxsMma1stDo/aoMnKv10kSvtcdMR9eSvCMDMUtcjRgfBv8oURRpZDpUv3Qsx7tdIiDGZUKnJMZjHTccrVndscSgHtTqdDpWrR67rsbIpq/SXr44+7/r3BlaIoOeA7gCW//1tZlv2PiqL8OvDTMFe5/U+yLPtIESfcPwa+Cczk5/f++r+RokpqXT8JuRgcgh5Qa4nN9qPPPsC2c/Q6feKxWGRrS8tUCzGpZOrpjfbpDx4TBTkMS1IVRyXah2PCZEpzURihpZWYDiJi2f+skaCaEdXCAoHMsOVLORRDZTjuUpKHx8SdoGQ5Hj0Qg23nUpLQZO9Je565DQKHLEtIUpOOdGi3rpWwCxHHUiDx3fefMp35+K6OroiX+MIbVygUR6ztRBzuy4Z5z+Xm7R1SmQUoVDLi2EWLVQwZTe/vHVEpN8inDmMJ02kt15kOJ2SygV+zc5zudyELKchMZ6VUxgsDanmZAVMyIWAbhhRskWlNbY3pbMhsGnLgiU2yaTUZTkaokmyhVa7hjqZUawaahD35ox5B4HLWOWEsxXhLxRruNKSfijFI2CJWVYajgLuvisZYVelwfHLEyBvjeZIJLD9je+M1zs7EYmkPD0kNjfHZhJ2rYvPZPT+mlWtiWGPGE3Hf299/wPZqmRvbrwMwmI14ujvgpVsNhkOxyCyrgBLYjKIeCytStFit8L233yNfkrDE1GFjw+Hg/jk/85MCWvPVn3uFf/F//Ca3X25Sk5vd091nvP7la1iWyMp9/OgzNlaKuLOI0xMp5mpa3L6zhjsZ4/fkIaykBGmfyZl4b7vsMZsE5CsGkaQFHg0jyjWd824b0xSBxXicoBopF3KzM5Mq5VKGYczoSmawQT/i7KKH682olEX27tHjQ6I4o7UgDq8wmaKqJTw3T7V+yRp3wfXr1xn0x/OydzVf5/MHT2i0xJj7g4ju+RBFVzAkLOD07AnN+g5KlhEmwqm29RxeL8U2xDZj5Ys0rSrtzn1MTThOSqbRH7sUC+KdVpZWiGOXckmj3xeb+2KrSrvnYRoBuUQmQIpl/u2779HbF8xSev0KN37251jfKBMGkhVTVobK+RyZlDRYW1lnNArISYIbQ8+TpDFJ5s+DmzjKGA7HGGZGJiENuq4ym80uGcExTRNV1fF9n9kla1yqYJoamvmcQj1/uS6kw19zHAaDAY7jzJ1xPwywnTzTyew5AUJOOPqXpBD9fp+MBNvJz5/tejMMwyBToCATIAU7L4WMxd4ynbn4vo/vxZweCbsulgrYtoWuOSwvSTKOMKQ/HOMFwnEbTSZEUYQXhM8DvEuaUHheSUoVNM2EeRAUzMk7dAnT8YIIJYtRDZ1EBn3jSYCumfNqEwjIn+M48wqNYRhM3TG2bc8DLt+dCVp6ycaqSYjlZWUPLglCRAA7hwEqwsG9hJC5rotpmhiGMf+OgDJqz8kzMrEvFovFHwlyZ7MZgR8zkU3haQJOwcaSTJ22o1OuVlDUlJqErSqKwXTikSYqs+klZfSULMvw5ZjnbQtTV7DtPPmiGJdGvYyuq6RxhB9fimobTEbhc6ZH10VVFc7OznAccV70ekKWoFIpPa+UpT6lSonuUPy9frdHFHukCVSry/MxD4IxOctg55bYz0hSUqakifh7zcUyhfLLPNt9iqqLNXp+2kdTT7DNBa5VhUMbjQ9Z2KmwLRnoarmMd//sbdodH0kSR85yOD485c2fvMHjB2JvPnpa5OD4gtQTc965eAdPqZBvLhFKIpMsS4mDCaqqo0jm3ywB352iyhaDLFUolBxayzq6Jub0YnjM+vo6hqGTqSI43X12RBbnMSSMuN9LqNQ0qqMq9z8VLLhLyzUMTWUycslJqmdSD1U1CaX9JGRoKPPko5j4FNDQdZ3BSCY8wohpcMFkLMZJ0S+YPpsxeU4WSBBNuffhfQ4OhPOac5qkuk/vkceyJIaaBh+jRmOCo/ycrdPKb1MsXOPkTIxlbzSlXlvEt4Z88PE74r5xm8hUKa4WyQJhGy9fu8Gnjz7gvC/201QfMejepVZ3uL8vECE5bYOdG2UGnSmnhyKAXtRf4fWv3uDX/vlvAvDL32gQ2CHJYEY1FQyGY/M2VmgAACAASURBVG9AWVnk9vo2ZxcCzhedfsqC3eJLXxXsff/kWx/TasLe5z0WNsX4fvjgIUXnKkbLJFAjeV8fogKnsWQiVXr83rf/kllUYF0yOdulHDk3ZUUr4yWiajNL4dHTx7SqovLxV29/h3rRYhYqDCS19vZP/SIPDj8l6Pa4fVMk6Fy/yGJ9hWvbIjH86NER3c6A6Sjmyz8pWDdnV32G4zbLq4uopjhr4zSjfTLk9h1RoQmSHuWyjV0waB8LG75y9Ranh23C9CmWIhKJV7dfY+/ZQ8pFsR6rNxbp9Pc4a7fp98T6r1RKpJkCSsqnD8TclArL3Fzb5KNH35VmV6BZV1lf32Y8E/7bweExrUodVYVuX+wBU++Q8WxKqSgRGmaO/eMnTMYuS8vy3dMZ2WiFm1fEftALR9y4s0n/xGdnUfgk9x8/pFZUaR935nulqurkzEVKEio5m4zJ0oTBsM/6qjh3nMYhx0c9moUNEiluvljepFKf8rvf+tcALK/cQTU7ZGnATCKyatUCo1EeN5owG0pIZVH48JfQ9nqzwnQyZDSasCMrXqN+joJZZO2lgGdPhS/f655iF2FyyeKqlikWanS72rwtQNdMVpZbhEsxTl6cwe7e8wr2/9v1N6lcBcDPZlk2VRTFAP5KUZRL/sH/Jsuy3/p3vv8N4Kr854vAP5X/fnG9uF5cL64X14vrxfXienG9uF5cL67/317/3uAqEzgKqaqIIf/56/jbfwX4l/K+dxVFqSiKspRlWfvH3qFmDKTY4nK5glNOGfQDjk5EFsXJl3FHCqZdZmNHZLPWVhdoH4Q8OxRFMduuY9qwtLzK8anATk7ckPEkIEx9kguR0R8Oh5QqFnZRRJ8VSjSWanQHM4bnEkPrm1y7to0flNk/3AegUV1EM2fMxuKnOzkHNecShgaViuwJGrQJvDpHR4dcvymifsM0GA9Dbr10V773NQ7bHzN2+9imyDavbsecHMUcHfWoNGU0nFQ4Pe4RyTJ2cTWhVGpw8mSClpPNzgsJ+XyEkZVBQgXHgYepm/NGRyNWSfyMQsER4ptAmMxASfAlbKdSLBFFCTldpygx7X6YMBzH+GFGKpstGZ1j5M05BMkPEwx0kjTGsEV2Ip8ziZgyCVwSWTkIk5iJl6HbwowePPyIuy/fpVAo8N73BEXmzRtNdDNge22R3fuiwrWxeZUnT485PxW/ZefOKqE/RTE8njwT33H9KWvXl+lfzDA1SeE+TilaO5yeifvOTs9RkjyLxTschSKz43lTQtfgxt2XGciqRrt7xtZVjXxBZI2ULM/B4ee0mlvsPhZl8794+10yVWNxo8Y0FuOwfb1FlI1QfTG+eVthNj6n325SWxSfrS7l6A073Llzm/Gp7O3zE/wkYWFJ0sqPJjh2BXfiCUgmogFzNJ6RKTqu7FnTtBmObaNLIcWT86ekQUqhqKEb4jeXSwrlSo3e4IAwcqQtKpRq5rx6ZxccPG+IXQqJpbhywTAwCwrZOKNQFfPnxWeYZn5uU2qm0Fqq8fmDT1hcFBWn/nCCNz3m9u1rPHoslvrR7pCXtpdJRuI5Q/+cIHtGqbqIkRfjcnp0zsZGnVZLQAdG44DJOMPzptRlNj1KfOqLyzgVm7ysvi5XVpmsR8Q/ITKMimaQDkM63TFxLOGM2mUGOSUnaYhnYYCeg9QXNu0HE1mxUDFlpSW1IGebpGlMKrPimqYRx+G8ryaKAhRFoVIp8fzSSZJkTjwBzHuoLiFyk8mERqOF67pzoeEsy0jicA5VA+aEF52OgEv6vo/jOPPnXT5bURSm48n8vlkwQzMU1PhSaFhF1016/TEzSYBgGBqaruB57o9UiS5JFAByeRNDVUjTbN5zFfxQD9Xl77v8/8tx0S0L27CZzWZokqLatGx0csxmM4oFAb2I41joekmWFE1RSZOEMIjnfVKqqqKg4U49AgmTy5lCOFnPm/NxTVMBX7yESyqKMof4XVahCoXCnBTk8veKqhdc9kAIYedsPg6qqTL1PLqDwXzcL+F/+XyegtSPiaIQ1JizjlgfuUleyoikc82uSxtq1WvkWzk5LoJA5LIaE3ohSqpw1h6g6pe/zyUKXMolh1K9PH+WU7DJJImJaWXouknOcubvPukLvaGDgyPydvn5vBlnVOqXek4GSeiiqjpBIPZTVdExLR3fj0ikjk8chyiZxnFbQHnfee8ellkkjVRevSulQnIaH7074fYbt3Dk2p6cF2ktNTjtiP3AKq1hFmrcvLLM17/6JQAGkzZPn50RRhG/8h8I6NXTJ2e0Fr6MUxJ2kLcKvPPhKb4SoF3qU4YmlmkSxymxhPg7hYwkmcznkzRPq1VjeT3k8JmA8BSMTaqVJsenT+mNhcZaMb8h+t1UsUYtvUKzssqH39nl5dffEnOTzegNBoSRC6mkNEcnwEPj0u5UUrIfoWC+7PVL05REVjY126A/VLFKsoJ5HpIpUxqtKyC1tx7sn7O6ukxpQVQGdncfU1920PQKb78temHWrzRQEpPJeEomJQ0efHKMqli0FoXdH7fP2X0csHoj5kKeo0ZWQrUSeuMxli7O7Ud7B5StBiNJ9FMq1Xj8ZJeXlC1e2v4qAK5/zuFextbGTZZLklylEnFwesTipqjCfXfvt8mSjFX7OhslWdWYvc/hoxMWiosUJETUjd5ia7vE7/6F2JOazXVS00OtRdgSmZNNPc69FD8es/eZoELfbGyyeuUGybnYb67f3eZ8kvD66h2conj3hz/YJ6yZ9B8/Q5M6jONxk1mQ0p88h+k6TsY4jXC0RfmbcyzVt6jUbc67EtLYaKAVVD54T1TcZv6McnGZWhXOOpeQaoVmY4sk9RlIQpnxwKdaqaMg5uGNV77J7pM9vPGUWlX8Pt+dUq00adQ03ntH+rqOzfr6NkdH4vcaZpFyNUeWmpRL4qw9OnlIzioSBBlf/8Y3AZhOp0R+kVZDVIQ0wyaS5eE0Fmv0a1/9GqH3GQ8ePCJFVKq6PY9iWaNSE3tEFBjkHYVa/RU++1T0eK+sVNkbPcOUkOOlxXW6z7r0znvk8+LZwSDkpbtvoSjf57OPxH2thU02Fq/MobyaogjkQqZgS+I017WEAPykxNUtUemsVmL8WcLLV78OQEyKGxnEuKjy3OkPBzx5+jmvv/lFXKmd1m6fUSwr2EVJQjWYgCo07iyp7dkoNXHHHt3TMTtrQmDa45RHe59QKosqp67YLC01GQ+guSAqkUfHTyAu0lxwePhQtCKUS8Jn+XHX36jnShEnxQfAFeCfZFn2fUVR/gvgf1EU5X8A/gz477IsC4AV4OiHbj+Wn7X/nWf+Q+AfAth5g6tXxcDGqcKTJ2eUnCJI2MNCrYndCpgMXKp5YTznp22m3ghHNq2XazPC0KJ9vMf+rthILSNHwc7QaKAbwsG88foGg+Ep9Zow8PXmdaazkHbvANeVm7lt4c9m5PM6q6vCeWrWqlxcXFCT47lzpcVH9z6mXFpkY1N86M0mnB6ec/XGEnIumU0dmg2HR49E4NZqbKCFC5TzKpYpjG7v8ZAg1igVVziRDHdb2yaVkooXSpYs1SBMAhT9+bStLLd4+uwIzWpgm5dMcjp6ZuAUxbjU6lXcsUtn3EGREBxTLWKZNqk8iEXfgk6xWKbXE86cFyS4rkexUMOVGMpxPKBVKJGX/cL98YhyoYyWKExlT4s36pMZEUutGtNACkVaCppqMOlKgeI0oHcxwsgpsokb2qcaD++fUe2l+J44BB4+mlDKO7x0S/TV6aZFlkY4ukOnKw6qm8sv8e77n1FfylPXxQGztahz2rmPUxQBbqW0hF3TeXJwTqkgG4R7A3auLbO7+wDdugyYqwRR7TnDl3tK6FsEZsxY9qs1lpc5754SawGBL0kghj1yuRoTVyw6RUvpTT2sfJ6TQzF2UZCi6Qn33j9EVYWTsrbRIhs3OToR9028KdVqFV03CKXuE1qGZRbwvZQklbAgVOzcEt5UHBTnxyc0qiu4foAjoS0PHh2xc7VE3l7HkkF8ikupbM+1MU4vDrCsEjnLwM3EO/neiMNDi0F/zOqqGPe80SKMe/RGIpisV2rkczZkKooU2fUmCkubS3z8yX2WpB7O1//TX2LSOyeLxMGrajM6g3NSxWA0kdj+1SopCqiXDbZFdq7WCAOPlRXxnMWFFuViC7QIVX5vGGckqcdMiq6FkxFhHJHL5anUxdoO5TwurS9y6XCtJRmD8QhPYnB0w2A8nhL4KYH8fhQHRHFMmprEP8T8pWnW3FnWdZNMEdpMc52kWUA+nydOknlvVjRzSZJk7vg7jsN0OhMsfPKzwXCIrutzZjFgTuRweV8ul0NVVQzzeRNNkiQCylssz2GAuqoSBvFc0KneqOG7PkEQUGsI2xeseRF5x5k/bzabUSg5c5heGEu4sKnPNbs01cayrB/pDdO0jCTJCCXMUgQvOqpqSs0fMI0cmmphJBmGhIheBpJSKkoEQkkIpPNAtFwuzwOkS9F3BY1mvUUog6bLXrQfHjvTNEUfWBzPe9su4X9zoWFFkSLK6Tyodl2XXM6cwzo1XTy7VKzMg8kkyYjCmGEwfU5gYVgYukGpfAlBBNs2CQPtOcuhphHHMf3RcE6YEccxOdskZ4n1aWoGhYpFtVmi1Vr8kd9ycnhER0J+fT8gSdX5XCVpQsHJQIm4fkM0bpNGoMRUKiXOZFB0dnYuWCjlmLhegB/GGFqM5M8gS1Isy0LXdcYjcV+5UkTRNfyZZKQt1FAUnThO+P49oTuTxglL6y8xbD/mVEJ++8MOF1MXRZ5X3e4eltPi4rzDv/w/BfjFKapcfWmFwTjgj/9YkGPUyhUKTo5mvjEfJ8+HzFaIZJBtGTmi1AUyTNkGkEYpgQ+W7kgbjuhe9EijFFMRjmmQJXzy2ROKhRqjwT4AQz5HT8sc77lyPicMLp6iF/Q5TM6Phjj5GrVGnf3RZU9XiqnnSCXEUUWZE7RcBvFJlqEBqgpRIL4YRwJ+WhZxDTlzicZSEVV9DvldvVbg9OgZ2wURcF69vcZ4nKJqY0plsT5Sb5miozKctTF0YcMlp0izVZ+voVpxhX58gXuiQyqc49SuUDJVHn64x+ameM/fePivSKMcX3pLOJynJ8csrlZB71AsCIa22I4JwylLSzukpnjW+7tv0+l0CKfi3Ln71i/x4fv3iFsb9GSwur78BfLWUw77H/HzN74BwIP0c75/P6OyLn5z7CastIoMDkcMFWGf08k5w0EXq3Wb5VWRRBv2Y6p+xqMjMS/f/fQzfvXvf53O2QGfyQTo9be28PsxR6lOoSreKxylFKs5Hj0ROnsba6/S9+Ci2+Plq5sAmErA2f4ucbZISwrfP/r0KQuVCsWCmKzuQOH84oyNldfYuSb201zR4rN7h6zvGIwlRM00clx7aYfDI9FHl8V9KpUab3/nBzQlmZKipqRkjCcOP/c1EcT3u0Mm7pCS7DP3/YzxSOHa1Q3CROwbnz0ckrdbJGmf8444x5aXF/j04z1ay8LOFa0CkUK7/SE5S/YSF9Z4uPeMm1fXePfTB8IW4jytRZtOW/g3O1svMxidMHEPWVsTAZhuOCjeCM0Tger06SM2Kwlf/uYGv/H7gg1xcekKx0ePaNYabG2IfVhRdHL5jGJJ+H0PP+0xnQRsbFYJJYGHpbfIFJfGxhrrUtD6/HTMzA9Y3JCsmINTxm2dazevcXok/Mzj84ekqcXxweE8aegUVFS1jDuVPkF2TBynGEkNUvFZMZ9DMVUKZpX7n4u+z1uv3uD61TeZumJ/jYKI05Mz3FFesJgCgZ+iM2Q6KrCyLGKQp8/EGP64628UXGVZlgB3FUWpAL+tKMpt4L8HzgAT+GfAfwv8z3+T58ln/jN5H6VqLvMDYTiGWcIyMqJkwPrqpnhJPURXc3SjHmdS6NMw8hj5EclYZqmNPL7vo+gBa9vixwdTj8XFCrXy9rxX4bS9h9u1ieUEmEkbP+2hKQrNulgs9coOxbKKZaeMpDimkiZoaQlFOsad7hk3b90lZ5TY3haB4eOHR6ys2fTPE84zkdVotHKMhsxx2id75zSbTTK9wXgiIvzINwmSAcXqmIJkLOychizWKyQLskl7pjHrD6lWNXJNcZicHLv02yZr6yF2UZyO/fMh682XyCTuNYsj8nmTpeoq41OZVYlmpChEMrs9C2OyOENRZxQlO2IWRxTtPMOxhyOJG5KyQ65gUpFsScNum4gYf5KhKeLvlQoObjhj0JtQkNnlaeJi6jkM2T+i2RqHF3s0W5u4qTjQnuyfkZoKJ91jajnxvXb7Ardg4xfEJh2EY66sXSOLCkShyCS1J23uvNLkeH+GJ/tjjFyBbu+Eg7YY383lJgurNdqHXX7iNTFX778TcNT7hOpKi0xSh7q+ip23MUxhG2VqDDvP0FbG1PLS2dHyTN2YRmUBX/ZcDP0h0czFrIrNyHPP8TIFTU8oFoVD0h9HbKysEoQTXLlJtuIFZn4HDLGRMlRQkgrTYRdFHpam2qJUyzF1nzPQaWaZRPFwZbZ5bXsBA5sg0PEljX2sDvH9GisrTTRdbPjfe/cZEzdPVdp5pVIh72g8+GifVl0coDfeuMPu7kPWNossSjFejTonF20u232qtQKPHhzSbCzPHczWosNJ51PKThNLEfZSqTp0egqBKmx6aanFSq3Fhx98l2ZFZDXzlR0Ggz6FgtjE6o0inbMB5XKT4UDM+6BzQrU5Jok1LHnwpqaBYxjE0u5UXcVyKhhxjCKprQtyU0+JMaSTWyw4mEZAYUNkpDx/wsyt4ropo5HYpCezMX4YMJ0E/w97bxYrSXbe+f1ij8jIPW/m3W/durWyuoq9sZstktIMJVHSSLIWAwMb8FgYW4Ag2MD4wQYMA34y/GLYAy+wDcOA4RnNYo/lkWSNJIqkRA0lskk2l96qu6pru7fuljf3PWOP8MM5GdU90EieB8OCUfGWF3kjT5xz4pzzfd9/IZJVTUXRSOKMQOLzNc0QVZ0kygObgu2iqBpxEOSVjvliThzHuRHveDyV0t5xLtRSKBSkFPozUQRd14njOL+ProuqWJrFz7hhcSwO2BnkClRRimUU8iB0NJqwWMwol4tYUgM4CSOy7FlFBcCxNZIkyVXxlGhBEmdEcZTLumtahu/PRaAiA8pU0QnC4GMiG5ZsF7mRcRAEaFqIbihMZULAlEFbKr/j2i6FootFkgcNK6l5S3/GYQuDAAXyebdSCUySjGDlFYDgXBUKhRwfHwRBbhoMIpBacbNW6oSFQoH5fIkhjZX1VCdOExzHzKuhguOTSO6VeI8ty2KxSLDt1eFYI/B0ktTD8yTJXoqlTCbzfBzS1COJFSa+6BPbMJlNp8RRyv17Yv9QDZ1apUS1Ws7fkTTNmC88kNwi2yqQxDAcLDh68l05Dia2o7K1sZ5zIPb392k0CjjSGTsMMlTF5rx9Sl/uq93OCN/LhPiHvH9/NKbiltBkJJz6OuWKzsFeg/lcrNX1WhPPW5CyT02qy+40tyg6Lr6MPubeAku1qCRFlpJXa5om3//+MWkWcfuFF+XzxcxnA8YjuZ5bDqriEEthCBAHIDQwDCc3IM/UOYqSEUaenK8Khq5gawXKUlb+2o3rvP+97/Bwcg/NEkmmkX9MYnTRJEcoiGxeuGoQnPbRUtHnlzeaoC851AaEUoTF1TTiRQoyuEuyFCXL8iTman7quo5pWkSyqlE1L/GTP3eFf/qbvyc+ry1IYxXTeaanOeqmbDU/RSZ5jnGaEPlzFAz2tmXw6E8JlhmfefkW/Z5IfDlWgdnknF5frGVJalKvb1FwDRSZOFXVHrrSAKOJLpV5k1gDVN59WyT6er0eN67pjB0o3xBr0quvvsG3v/9VHh/2uLIj+uq1T7/EO/feZzQS682wd8Ev/dTf4vodjT/8hghkeoeH6IUCqgbvvi+SzIWiRq1mMekJ3sv3vjnh3/3VX6DXvctWWVY+ZzPWt2+QZQljKZN//eY252dtbraEefViv8Nbf/omr926w82fFxzr7771VZKozmu3X8+D42Uc8vkf/WladamqaLbo9trsb7SoSpuew6MZd167wzvvHnHnZSGz/oN3v8Npt8Xlgnj/nx6ek2Qh9x//kEtXfxqAwXDM53/iEk8eddi9JMUcggpxkpJlEjVSmDKZzCjVDBbBijupc+vWbTrdY5YLMdd399cZz0f5nL50eY/D48dMJiKBC+AWasz9EYWyhWWI3+uPzhlNBrSaIqHcWm/w7vc+wFtm/MzPiAD9v/0v/gP+9s/+lzS3H/OVb4ig70d/9NMcHv8AHRFMzqYDiq5Fr9eGWLRpe9tlj1PO74sz14/8+B1ufmHO//W7H1AxBJ8yjC8YdzLqfovNDckJXI6YzcZ0OlIPoNViOp6Qpi6DqRiXUrHOcg6WPWM0Fmvz0Wkf2zFzYbPz9jmus8FosESxxToVxzG7ew163XMaNbGXl0oW48lFjlbQnQzHKVG0LGKpgl2q1rk4PmZ/Z5d9mei79+BDavVNtnZEH0Q+XHT6XLneZDoVY7VcLllvrWG5Y9575wgAp/BJVdB/8fpXUgvMsmysKMqfAD+TZdl/Jf8cKIryvwL/kfx8Bux+7N925N/+pVeSwHwkFoMlc9bqDWwNpnLz0rUd5osLdi5vY1jicHP8pEtGmKsMhu0i1UqLzrnP7pZYNE3XYNCPGE0/Yiajbl1NuHx9N5ejvXvvEXvru2w0t3jQFqXYnQ2Xi+MxkZry0quCcPnowZRyYcz70rHZtT3Kqk57MuYD6ZOwuVYmNC1UTcM0xUtcKxcYj8dEifh9tTBjFkUU7CprGwL25C6gMzNpTy7wWAkgVNCNhF5HfL68d4Prr73BWfsxqSmKgJ99bY9OvUwQPeFcEv0Kbo1G1WUxfAbh6fiHRMUyFeluv+gMaGxWcFVx+OyPRFZlOVvm6kG1eoPJbErB9vE9kdWoVbZQ1YxDqRRUtIr43oDQmOGUhYpiOLNZK5sEWQzyYKP6GtPQzystiqXhuBUG7UMyR3yn6w1wzIww8UlsMS2v773A0dlTwkAEFsVilTc/eoQfB+xXxYugBhWO3nmCW16TmUwoaibLeUClKTYOtw79fkgEfOVNASMN4xnrG5fwFj6RrOBd3blCb9Jj4UlophmCU0Jjj/lCbEx6yaBQ1Tk9e8xaXWSJTLNEf9lHlXLtpuZS0E0yHQxXLJKXtXWO7j3k8u0dOidiMX98t0O56NKdiTnsaCWCMMZbRjlMT9Ez5t4TnLLFzZsiq/jk/iHd+RRPSuRayZJ4KuBXoS8I0AVnh9FsDmcJRib9G9YOMN2MQIplOJZKr9Pmi3/tFRZSUtWtmOhmzAu3bvPovlQCWnYpOFs0JFl+fHrKzRvXOR4OSRQxpkeH9ynVq1SsKmZZzKEv/+HXmI+HvPH65wA4e3DISfuc1vo2taZYgDvjIwo1B1VWJop2geLlIp4/py0z7qVilfliRr83IpNFE8sy0E2NWP7BtG2iJCMNMgoSlqTJSsY3//S91TkUXbPx/CQ/KGqGShQJEYaV/1CapujoLL05lowoVwHUqhJSKpnEcUySZJSl3Lbv+yhRjOO4hL5U3VN0LMdmKg8HhmFQr4mD9ipIKrhCfjxOEzS54GeKqFZ5MmCIQ1GlSqL4Y4GFnKaGQUG+a3EcE4Zh7sdVrpZY39hCURRWCazE0DFNkyzL8udRkwQ1TQmk4ISqCfU+XdVyqfkkSViFZLrMzPu+j20XchirCGB0HMfJK0krcYkgCHL52zSNKblODmNTdAHjS+IsF/+AFAG0SlDkADoyAFq12zTFOCi6lsOxdF38LQ4TEim0EUUR5VIpF3fQNQ1NVmgymWGP0gSn6OTjslwKwRB/6aGqKyXAhCSUEEWZMEvjFN14Js6RJDEpsdAySKTf2NxH1TQ03c79BtfWWkynU5IkysdO0zTMgpofEJIwwltOmU2HOeBM0xRUDVSpMBZHEWmaUiq5lCTc3bEsppMZ/d6ETKrCPvzoTMwhWWWsVErUaxV291rcuCkqXnuX6lSrVcIoIwxEP0wmM9rnfXwJM+pcDBkOPZ4ejciQwbE7QNNEpW9VDbUsE7do5VLwlmOTpksMzaZur/oqZGu3jIKWw3Lm8yUpHlNf7Jnh0EfTDDIyEqkMhpqhKRpZEqPId1lJMzF2MrFguyZ/69/6aZ4ePqYkE1h62OUnvnSd9sWcP/rj9wB49faX+PJXfouNbbF/zJdTvvyPQlprBap1cXS5eKLhFmzSmYGqreangaZauUqtrmqQpGLey/dBiVSiJEFTdDREG4ajNv/L//QBu1KxTVUjTo+nlErPKsKOpnPtWokL6QPZPg0xLJW9Sy1imYVvT9uslZoEsUeqrMRjppwfpezsCAK/6Qb0xiMWHTVPDBVw0G2H7Yafe/vFiwTTCim4Yl+9tVamfxGibfp847vfBGBjq8nJyYhEtZkOz2QfZ4wGEWEg5vTx+ILd7TOO/uQjKvJeYzVjseiSLRKO7os1/ZXPvEAYDHPLiNt3qnz5a1/FNjMmXXFvb+5gbpm8/84pGxJKm2Uekb8g2RTv7LW9bd577z2+8b17vBCJexWLLd566xHOls1JVyQg1yotvvKHX+O1zwoBLSMuszw9YzLsMZ6ICkSWthmN93n505/la18R9IGD/dc5PPohj5+IOXVl/wa7e5v0+30eHYrzYrFS5OTUZ2P7gCgWbe8cKiSpTyxtVj66N2N9q0EWl9jeFvvFfOqhmQaZYvP0SCRTOsOQ04sALxB7rR+XaDZazEY+r0pPuaPTh/jBHAUrR02USyUq5QKHR+Kc0u6MKbouvp/xu7/5z8TfLs7oxt+npb3BtSsiOF3Oz+ifd9naEXPz6HhEvVnk/r0hP/clEWAu0yHHT1Juboh96zvfv49eqLJ4EFGpiR2h2Nrk7KzHdJxwcFkEOwkp33vzgmpDl44bcAAAIABJREFUjJ1btDm4vknghaiJhEbOIwqORvv8EeOBOFe65YRhLyOSljGVQopjZphWRP9CJCjK7hoFW2c86WC5Yg8ZHx7w6dd2OTsRcvid7gnFeh3XKDOV+8WTR4cU3Qb92YwsE+NQLgboWsZMCqJFIbQaZcbhBXNPjF+QhpwPTpgcBnz2NXEOm3Y+pkDz51x/sQsWoChKU1asUBTFAb4E3FcUZVP+TQF+Cbgr/+V3gV9RxPUGMPkL+VbPr+fX8+v59fx6fj2/nl/Pr+fX8+v59f+D6/9J5WoT+PuSd6UC/0eWZb+nKMrXFUVpAgrwDvDr8vt/gJBhf4SQYv93/rIfSJMY1xZZqygeMh3PCVAp1KRXjD1AQ+PBw3tsbYiIO0qmZGpEEosI+KLToT/IKNeqdCVn52CnTjzXuDgLaEn/Hz+dMQsfEUaieuBaBo+PDtGtCy5dkjALA7xwSeQbdI5F9aPgekwuhly/Iopyy+GEh08/ot7axcpElnE0GdCP+lSrVZRMRPlPnx5iFxSm0iF+c7NBpdTi8XGXMFxlwGAejzDNErGENIV+RJYUKMgMWPvkKcN2n3INkLL1y0Wbje06lvEi/beFzKrrJmRqiCYJ/GkaY6oiCx/KSoe7XqLWqjAZiPts1Groqk651aI7EiTfQrlF5+KCal3DdkQbZssTwsyiuibJ817KtL+kdskAXfRv77RD4+o1tq9e4c23vyd+z84ouTUmQ5Fh2KrvEAYpYSFi0BdVqWaphZYmlCsldESfD5bnWE5C0RIVov54jGoqVGwdLxSVOtcpMfcyonhKQZq6lHfLFEoadWl62b4Yo7GATGM2l5K1usFwMKbkljAKso8vztAMDSUS80czIpotk97iOLcFCNMYzUopLFwWEs7Tm55jV2qMRnKMmyVQYTQJKEmhEbVis7ffpFxqomSCLGtZJrNJiGWJuX/8uI9eOGdzp0YUivmZJQXcUo0sXXD2WPyf6ugoCRiZmPvLEDAm2KbGxaEY04MbQlo0DGYUDPG3gCFmouNKDlbi62jZGv2BwsVQZHs4s4m9AoPBEqfYkOPnsl5tcN4VlayZFnH89BQvSlAdMRdKzhba3MZ3+jyV4jRrmy1Utco/+dOvA+C4BdQ45qU3XuOtb4rM4NqOSs38FJsNATNR1Jj2+SnFwhqvvfpZ2U863X4HlJCC9NBYzH3BqZnL9zryMAwLRYnwl5L/I+fDzPNJJTQpUUbouoYl4TATKduaJElesTBNk1iJhQiGsuKCGTnUDETFplwuE0XPYIFJkrBYLDAMI4fXFVxb8HakVPqq6gTPqmCqpuWcqpWMbBCF6AU3J/D7vo+qCLPzFcwrTVMcxyHwfZaSY2GaJo7j5BlN4YmksFh4eQUhiWO8OCAMwxxmpakqhgFxIr4T+D6aJjhDuUlyEKAokke04jfZ1ifELXzfJ8syDFlRAvD8kDgKP9EuXXcJwzCH6QF4YZBzz0BWwRyX0PNzqOdi4WEYRv4dRVFQDf0TkL/IF/exbJsoklUFXSdJ04+ZCIsKVZjE+VgZhkUcP/PCqtfrLBbPPL1W8yAJI4IgyMfh4/5Wq+8oioJpqTnkMM0CwiCm0WjkUJPeYEEaP4MTrkyOoyjK54aSZui6eN5YVhWdQhFdL2BLrlgcp6Rpynw+x/PE73mziMZaBUWNKUrrjK3dOq7r5hw6VdW4aHd5++13CXxR0RsOFriui2lqNNdFu3b2mqxv1NneFpUIy9bp9XoM+lN6XZG97XZGzGY+SRoiC2MoikFzbR2kqa5mLMlIcF2LVPqUzRcTSmUHQ7d4/31RCYjCjGKxQCKr0svQR9ELJAQ51FRR5RxUlfydChcLSNRcMl5RHN5660Msy+JDyX1JZmN0x8R2dP76T4mK+vZ2g89+7u/w9IlY3957/yE3btxCtSOePBXiEe8/OaZx4xYoCoa+4iXOMQ0wNFk1DgNMXSdJw9xHU1V1yX8LCWWl0cvmlNYyNEu0ezLrY5gFUmmkDnD92h7DXpdhV4pOlQsUKyXCeMGjx205XxTWayonDxPiWHyvUDS49bLLsC/uNe1mRHQpFvfYkIIEg/4cywTLcgl8MRfXmim2vUZrrSXn9JAkVKi34AePBITr7t0PiROHhTfkqC8g96WKzvbWAY2agHmfdR7x3Xe+Q5T6uQ+jZbm8+Y13qVVquKY4F7311gN2Nreor4vxHI67eEubSnNC90y08/LlHS5Ox0yn59y6Icaqf24wGaXMpgK+OJ/e4I3Xf5zv/uAbnJ2LPSxKZ2xslbDiEEcV77aOgpUteevrot2Vmk29XsNbDtEMgbTY3d7nD377j6lULqPKvWM+X+IvbCxLzKnB+Ajf95lOAurS/6u+bnH8ZMn1n9/knXdE5erhUZerN38m9xFsNbfwFgp2UWM4kCgmFcqlXabFOZ1zsbfHI4NStU6jKd7t8eCMSmmXs84x//Af/G8A3Lh5wGhyThw46HJtHo/GbG6t40u/0/FAxTJ10miD198Q1Tq7HNIdn5KV7vLmtwR8+I03PsdLr/wisSrORUrXRvWGfPGFfe5siLNZWDzg9w9nnKSin2ZJj//x74/ZvbTPcCbkFSrTq6BeJYsX1KXwzjf/9D1a65vY0iy7fd6lWnNxGw4FKYA2Go1YLI9YLGckkRSKiZo0WxmLhdwrEwdFDTGNBroqznSGGTLsTygWi6ipqEDd+FQRJdEoWGLiXdpZY2tzh0dH34FUQuIzlYQFd+92MKXmwbXrB2SZg65JCwlFweeYw8MuB1KCX68taa7t0TeG3PtArFPD02eV5j/vUj6+cfx/dZXqZvZjPyV0/k+6Uxw3Qc9C0kwM0lorQU3L9NtDMrnghpEiDjqSuF2tV2k0arQvjqnvi4HzuxOc0GKt3mAcivLeYHlOHC64dkkc5kgMZkFI6CsspfnhpSsNpmNhBHp8dgRApsfc2HwFHFlaNw0On5xQr1RpyPPBMivz6PwRUWBSlHA3056ymMeokuNgWR6WtkVvHuHLNqVKjOmo6FqN8VC8HCWngGNrrEn41M7GOh++/4hCJWThiY0wjAMiJtRrzdyfqtvtMuiO8GUgpSo2fjohzJas2yIwNCsmvV6PljQVnXRTbMOkUEkZL8SE8+IQlhmpElOuiAU3yRKSbJ6bXo5GPQy9gFpWSaRyXt2sYxglllrMbCaVgMololBlgeSrpCqkZRRjStEQ/ZIuTLxsCqbJUJI0J8GcqlNnVyq23H96F0VR0LEolqRiSLDETxJUQ0WPxCGnVnNwaxaTmYQuLSYslzNK1QIrqx7TcLEMm8lwQn3FlVou8NMMXRcHG9dSGQ0X6I6RK24VjAaWlXDWPccxBXxwGcyJtZRQCn9Uig6aajNbxGw2xGaSzTKq9YRMrzKYioVUiXwW85jpUrRze3eHKPYIvRHVioCMlItN0hT6g/OcQxSnEIce84UkT2s6sd9mvbyBrskg0JzRGw2oWiV2WwIi0h09pV7bpHsm5vlavcJ4NmWZLLHtFUdvyVp9nfHYp94U75FlaHSf9tCkk/3En5KFMXpapVwX8643OKTi1pnOlriq+F61vsbR2dNnkDzLJPRCmpUaTclBWtuq4/s+jikWNscsceXKZaI4pCs5ghdnHrptsFjMqNVqcvw0RqMBO9KLo9/p0+sM0LQCMWIc3n7vbR7ce8yv/PqvYciDd5ZlLL05voQcKYqSBzarg7frlvCXQoXOkAfYMAxRVTWHlYEQN8iyZyIJK1jXx4UodEMlDp8FYEJYQcCXcl7WfE6xWMQwjPxwLNZlNb93kiREUUSlUvmEQWkURcQfE5hYBYmrdT0MQ3RdJ03JgytFEV5OHw+KRGA3fyYKYek5xHAVNKhoxIngG31cDTFOwjyIWAlMhGGIZa64RWkOMfwX4XyrdqqqgMJpmpG3U8AWvTyIEc8Tk6Zpfp9VP644aas2WbqB53m5IIngZSV5u03TzD2yolU0kEqoqPZMVCQIAizL+YT3lSK/t2q7omu5mbGYPy6+72OY9rN5YBqCP2ZZ+e+ACGJjuW7Ytk0cx5TL5dz8OE1jvPmCxWKB761MkhdUqm4OxaxWirTW11hrVnIe2GLhMRxM6PcmLOay7amKH3iYpmhntVamXHSoVotYEoodhxHz+YLFPGAo96LF3KPfHxOF0n/QMdncbHJwdTPn6K4EQwzDYCT/r9sdsJiHzKYL2e6QMJbcN5ncMC2NQkGMRaMhkmjL5ZIsS9iX3Okf3v+I+dIgzKLc34xUIU5CslTDsKRf3OiC08f30CXnq9Gs8/kv3GbpL7hyRXBKa0Wb/tBHURM0KYBULbqcP+1QcUWft9arPHh4jzhckknIYRLZOEWDb3z7XU6k+ahTNon9CFWTHmikEhYIoYQ0pXGEkmVoippDYC9drnP7pU360ki9WlcEvNYN+O1/IGBsP/OLt8myjIJclxUtJCNk2AsplsS6PJ5eEAURjdoO1br00ws9UnWObtTk7/s0alXO2mfs7wrF4u3tGkG44PHDc6piu+D+h2M2NhtMFyJAqRb2KNcKnJ/N2F4XkP/59IIoW+ClAWtNceg0dYPZ1Gc4EGOs6SbFtYQHTw7ZkJx5K55yeHjIy7feAAlR3drYZtxf0NwQn58+GuMrAakyJArEPKg1bPwgpt15giFFSrwg4LOv3+apVAt+9TO3yZKI1uYGx6cC3ucHAUHg8Noru3znewJQ5ToF4kWGJ0W2qpsb9NszDvaaHHYENHIxirhz8xZF6zpnw3cBOOvfp2hf4fJ1ycv66AR/kbK+vplzmWfLHq31BoFn0OuJ/nv5pTdYTD3IZOK23aHeKmHaEceHIpC5en2bMJlw/DDBLcvESTHh4jxioyXOp93+h0ynY27cfAFPBsKaauIFY7LE5s4LInC6+/4DGnWbmRQIW6tdJYhGFKtLCoYYv4v+hyRxiV/6m6/yf/7vvyXaPiyxf3CZzT3xfO2zDgfbB/z46z+K7wlO4OHDDln13+ShFHD48OE3KDnbaFlKZyQSzGXVp7x5Cct00GQyt1SsM5wc0mmLwD9DoVyrYihWDicul13u33/IemsTPxLjUHI32dvboz88EvOuB62NEk8vjtAlv9HQUjpnc0rVbdak+a9ph3z4wWOqDalyvIxxzHUy/YzhSPRdubLJfJrgmAU0qax88nTERusFalLf4MHDp5RKDuVyHVMG2Uo6xtBMvGCZKyTqOPzq3/67P8iy7DP8Ode/Eufq/61LN1Q2t0REetzvYZlrlAtGjlueTXsEXhtVNYlCsRHefGGXJK4znIrIOQwjnh530C2XQU9E3JPzKdvlDU7OD/GkeENmhdgFkyAUi/taQ6HXb2O7BVxNEP90fchaq0T3/JjbnzqQbfBR1JizU3Ewtk2PnUvXCMYG7XORRdm4XCeIixRrGcfHK7Nai0LRxzLFJqS5N/GiNpk6QzGlOpFaJUthvuhRKYpFJA41RjMP1RSLQa/XIwsVaoUIVUocTxcpuzsHdC9SMnd1oG0y6sbIPY8oTjGULcJkyVFbkEe3lXU2t9Zp98SzpKYDVkYaPiPeB+cXWHaZMA2eqWLpBp22T22lQJlZ1OtFRlFAQYpeGKZBtVhkfHTE5YZUe7u0yTc++CE1WQELgoBZNGOzvkbiiz4InAn1Vp3H7xwTScJwve5QKxYYTURflotllsEc1zZZSi6KGmgYlkJCzEDyWkpumdPTMZlUdqu7ZaKgSL/TpdFoyGdxaV+cY5s2Z+2B/D+LYdil4oq5YasywK03CJeJ/D2d7tkEu14jDWTWqDMiMwJKFfG5qu/QbS8pNgYMpGFnydIZLlQGc5G9AVDVAok/xq2KPleSmMViQKIGjOS9u5Njyq7Fpf0dTCne8P7RIVcu7zB/ciEHQqM3DNCVKdWqeKWn0yXVeoXhWQ/HFItPkoZ0Oh00TRwYOoM2tl1lOVwweirm1M5WCTVLUS2HB8fSwLKY0djaZDCQyYBER9UtKmWbaVcE0KpeZppOqZW3cCyptpYa3Nq7gSsrgxe9DkbFYTia0I6kfGqvz8Zmk4UqA1o7ZPiDEdubG3xwX2SIatUGdqlJfzIm6K2U5BrYVpG3vv0dQCQ7atUKi9mCWl08y6/8ys/xn/LfYZkxs4U48Jm2QxyHeQVKHMxNFotFngWcjidYlkWaiswakHOUVrwswSnKiGNwHCnUoim5xPjq8L9c+EJ1TtoSxGkCWYZhaASBzEpXy8Lg1g9oVGv57wmRC1nBzDKm0ylkEMrqj2U6QhmQZwf9VFZn8gqNbeeKhqsAT1FiskyRwcyqkpZ+oqom+kP/xL1M0yJeLqUQxLNqVpZlFKRq6yrAdGw3b1OWiWDSMDUM65n5bxRFuaqZoqkULJckznIxDhEAmhQKLuOxxOO7LkEQfKJKtBKmWAVhYRgymc/yypt4ZsmnY6Xilgpim/LM4Nk2LSHkINtdKBSYzRZY1jNJ/TRNyeJEBhLib6qhyz4O876zLAtNVwhC8Xxh5Av+lgyeQcjPF4sFUintvVwucRyH2WyWP5/jWGxubmJZFpOZOHyMxzqeF7BcinvPpwuePD7DMCxse5XEM3BdF011qNfF+z8cDrFtPZdG7nbbHB/paEqGpq3UGGPcokWpVODGzX0AiiUH13Xz6u94POTstMfjRyecnrTzcTctHUi4fCASHqVygSvXmhSLgs9VLJbpdoaYpkVXyrP3ukMu2j2Wi4RHD0/kM9uoekhP8nrmiqjUqUmWB7mGrot3KExR5CHX80YY+rN3oVwus7mxh59E9EcimbKcRWSpimkbmLaYE/ceHPH7v/t15lKG/Nr1fa5fv0FR02h3ReXh+vXrxOmcWsPhWHKO1ayIqigkqSfbpKIqKppq5uOepBGmZhKHSs4vGo9D3n33IVkqxuH82GJtbY1KcR0Q66Jjq5yd9kirYl+djGcoWsxyCQc3RH82tjVOH88wLY0oFXvk/YeH7F+5yWlH9OVWc4v22QXT6Zx4XQSF84nK2+/ew7QDGk1xvrl6UCNMhqxZ4uCYhQ7+MiBTPA4u3wSgP1hyeu7jWnVUySj56KMHbG618nfvc5/7Im+9+/vsbq3he2Jv6Iym1BotOv0ZxCsV45DRos3TdwVHaDpO2N5tQbSBZYt2BkFAv9cjSX0MySF1HZO77x2TpNK8ujPl5PQJ9bOPGA7F/qQbFerVbf75nz3I+ZRPDj9grXKdfWnl8/jwhN3NV2huunzwUAQNlmWRZT7zqcdsJlEiWYtKQ+dQmjkX3XVcd0S3e4FbFu2cTKf0Bx2uXX6R61dEkWA0GaNrMbOhmJuvvfEKp2fHBGHC6elKnCrDrSboloIiOXMP7w35iR//Gyy9kfzGBmpco9dZYhdFH6ytl5menEGicHIixvm8fYyWvkC5Ib4z859Sb1SpVrdoNmXifQzdTo+TY49r10Wl8Su/+4SXX73J3XdlZRePOLT44l9rYDs/B0B59w/4jd/6zznY+gUAvvDjP8s///YfY4Yl6uuWnNMZy3afYtUXXiZAwanw+PERxYKYr698Zpfz9oxqsZHbLGWYtBpXUBSPghQys6yQb3/rm5RrYq5sr2/zznt32Tqo8affFgit/e0bxEnIbHBKsyaUJOPQpt4ok2ayMkiDcrVIt2/jL+U5M1wAU8ql9bxSpio6qjHHD8WeXSgmjCZndHtn1ApCm2F31yFcOhiWxumJmLOf/8Lr/EXXX8q5en49v55fz6/n1/Pr+fX8en49v55fz6/n119+/ZWABTbXa9kv/02hTBLbY5aLERYV7IqsZj3uUC7rvPyZF/mjr7wFQLFqEfgp9abIQB8fD0kYYRcqhDIDtVYvY+g6oT9ja1dEwd3ejGJVR5PQpVJB473Hp1y9UkedipKMbkcEnka17HD8WGQ+djcqVDdbxJk0gB1NGM98OoMnrFlSHSUp4DQmnB6P6Er5yUqtilWMcKX56WSUMpvN8OIO25fF71VLe/iLiFFvQFFWri46ExZBj9am6APbsoiXZTaapZy/cXx6j9qaSxhVcrjE44/uc/uF13jwUJTDl0FCEjnYNoSx6BdLT5jHCu2RrMKVbUzF4MrOQa6AoqQBfqRSqRksJYelvhnTbnvUKgKqmMRL4tjH0VUSRypLLRK2dvZZjqdca4ly9DsffJeBOmDdEdmSYSfBKBWIjZjxUGQwN/e2WI4SgosuWl0a6CoVojijWBZ9MhsPMS2VZRyQJiJD4/d9zGqKY5SI51I5p9lgPJsSyMyZqmnU3Baas2Q4lLj2zGBzvUGSJLkEdxT6FMpNPGnu6jgOsb+kXq5QkaXA4/YRXpLiz1WKthzjYg1/6WJIs+VSQeH0ZECxZjP2RH/WqiXS2MZLJlSrAtbhaC6DYZ9KQ3xOGJB4LoPugtqKJ7HT4OSsx3pzj5H0nVl4fa7fPuDsRGRQo2VA6CUYlkmtIbJp4/6CznjIzb3rqLHov17/nHJlk7WayNg4bspotuTh0za39wWX8fLuFd5+fJdI83N4n61bzL0h47F4lnq9SpgsMVKNG3tC/nYUTDg6f4+rW7doS4Wrkt0gjWEqjRudQoEwiSm4OkEks7808YMll/Y3ZJsyJpMpB3u3aZ+LDFEUe1hWDUVR6Ep1K9d2SJOEkydHANy6ep31tSb90wk3XxKZ8299901++3fe4vNf/AI7l8S7Nl9MhGeJfLb11ia+H+fS5yBMduM4JQxiMlVWhqTn3jMYmy7NQeOPwQndHAq7updlWTn8DKR/FVKxTlZVisUilm58wpxX0zSm02n+e4vFAk0T0CtF1/J7i2da5JnjlWHp6rOAWAkuz8rUNwgC4ijNvwuQkeSy5iDm/uoeqypcwSmKyk2WEQRS7lZXP1HdWsErRTVIzftC0wSU0A+ecdw+3gdRFAmonqLnEEPT1PF9n0iq4YGoQDmOgywg4i89CoXCJ5QPoyjCcRzCMMwrhitY4GpchNeXkJ5ftcGXlaC8OiK5XcvlMq/q+b6ProhndCWMDE1lNpvl9ymVXPFb0TPzaFEJTQVcUlZIi6UCSgah5OyuvM0s65kKmGmaqBLauJKDt2RleOXHF/rCryvN4rztURRgGBZpopKlkiMUhpTLZcKch6ai6gFZqrAmVWJLbonpdMpwMM7XxSwTMM+1hoCH1+oVtneaaHqML1XNLl26RBQldNrTPJve6XRFxUXOjVLZpVQqsrHR4tJlca/NzXVUTWG5XDIaivXl/KzHbBowlft4N9AIAwXDeFaJDH0B2ctSDVtsh5w//IDFcJxDv199/TXu3LwiuHwyK24okHgRqqKzGr4/++bXqdY3WV+X6nbjISWniqL4FKUS6EW7S7ms8+S4zf1H0g7GttCyhGRlVp6qmHrG0ptQLgmExGi4JPBnFIsJwUJ8r1HfZH2ziJYJpMX54RjXiXjp5Rf4x78j5Ms/e3uXzb0KI2kcf3rWo7m5SaG2YDCUPNOlQ7PR5PUf2eedtz8AYGt3D6MS8sFdwS168daLqImG7ag8PRaQNdd4gVQ5ZTRe5PNla3NfcNikMbWlaXSHp1z0jrlz545ot3uV7ughqVYgS6VlzGBIpdak05bQU0I2NqqM+gMMQ3rMaQ00DeqNEp70wwrjESgRiuSUNhp1ZpMBw1HAwZ7g2nrRKePRgpl/zEyavjfWrmMqcxxDrPHrm0W++4MvU6+tsZDoEscu0VircDHo0KyI8WvUKhwdn3L9xj4AZ8cPqLq3+fRL1zg9FFUpx1U5fPwE29wkldVQjAJWOWQhFXbrJYVJPySNIUzEHra3d0Dgp6TJElWu7U9P+/z8z/48w574TozHdLZgOu8QSa/2VmuNwPMx7CD3opxPM+rldSpVcZ774MP32ds94Nvf/w6bO3I/tteZjEMCv8vuruiHyfQUNTVwJWS01dpgPrDY2Tdon4v5Ol/2SaMin3ntDj/4oZhnW1tb7OwdcO+e6INKVaPfbXOwu86/9qV/G4CCW2YyfcqXf/9/AODtD8+JjDJ2VsCSsNWN/W3SpcLC7+R+eJf2W1x0e6hSrXhn32IyUYj9jCtXxVnwvffep98NuHZjnaKszPleTBKDG4v7dC4+pGDuUN9ucXIhKsn18hoXo/uUqk2eHotzbK26x5UbRd6Xcv/DUUxrS2E5LVAorXiSE5QMdMUik1Lz661tvHDC09NDAKqVTbJswHwWYZqSMhJOcK0Wm+truT1LEI35T/7O7/xLYYF/JYKrRsvJfvSvC3z1PE2wrAXrtX0kbYhRb850MscpZzRqYjLNhmU2tovYllghjw57uBWfk/NDlj1xmGtuFTEtB92ecmVfQNTaxymHJ4948TNicN1CyPd+0ObazXWyWCxk/U5KqeBAbBB4ohG3r11jHmdcSJjA7kaTe4/uswgXZHMhqa4y4+B6XUgPy5Kj5Wg8eHzOZCq9dyox7Ysu9TUX1xXY4qPHfa4c7DIa9ll40knerYOisbEnBrfX6ZDFRSolHWd1qJpOqNSqoJoMuuIgapomqa4zmoqNaumrKGFItpywe0XAHu/efx/DNtCLok3V8i6hH1AswGIsS92jCdvbG5RrsC1f4N7gKarm8OCeKGsf3LDwvYzpaEqiiU2v7NbQtZitzUucPhYLy9nFIdX9Ar0LKWhRWOf0SY/EmdLYEjCoxVxD0zNmkzmKLQ4k/ScKlw5ugCEgXaPuKbt7O0xDH9sWcIn5YE6xYnJyOObyjhjTiTdiOFtQk0HodLjAW8Zcvb6VH9z8pQqoLOcjDFNseoWCjZ4mK/VkYjJUUjZr+3Q6Aq5hlGKmfohuZBCId6fkljk/61GriQC6UdPJsoRUc5iPZDCnaISxRr3homti/ILFErAplsUPTkcBjVaJME7pj8QLXKs1IEmxTYPRUCwiN25e5nvvfw9HCn28/MI11Mzgu9+/R6EgIU6qSm8ec7DdYjYUh5SNrQqG4VJ0xMI9GI04bY/ANqgWwv3RAAAgAElEQVQWpRS6u0577IE2ZUsaLpezKkm85P1jsfiUqjWa9Qqz3hS9KIPArS3uvvMR1TWXonwnwyBmPpkxl1Aoxy2gqoKzkUgC9trGFqap0+uKd1ZVTZIkolj6mNDBLCIMY5ySwUD2gaJkqKkBUtLVzMoQqSwnbW7cFu9joib8w9/4M/7GL77BjkxkRFHAcLDAlrhp0ygSRxlRlOQS7sKXxiYKM1TjmeS4pj4TUjBNU/BsNPKD/2g4kYIBySdEJ1bcKBBBk6aK4CX3sFJUwjDE8zxcydVMkkSIkMsgZDqeoJsGqqqvvNVJZKCTpOKwKPpPBAOu9JSL41gc0FU1T8qs2h4EQS6gYds2cRg84xbJNlmGmcuQJ0mCHwS4rouirOTg4/w3QPDHVFWVHKrkY30Q5xBFEDC5VcAD5AIRoR9g289MfaMo+gTnasVpW8H7sizD0PSck7a6VtDMPCgKAyETLu+1gnkCeR8IXoyfG0UbhpEHp6txtywLDSkXL9e8FdwyN2AOfVzXpVy0cm8qXdeJokjAGiXfbwU5DeQhQlVVAeXTtLxtuq7jL+Y4jsNSmtyHYchiOcv7SUUBUvxgTiwji3KpSZJEJGlAKg2tTUv/BBTU8zwiT0BEy2VxAEpin7W1Gts7LQqu6IePPrrHjesv0em08/87O+2xmMc5Cdw0TRqNGuubbh7Eb29vUG9U6XUHst0RF+0+w+GEp0ftfIw1TaNSKXFwRazfGxtNNjbXUKVwxN/7p2+iKhZRFEIi+YaZj2bqxJGCaoj+PLv3CC2D2UJ8vvPpW1ze20BVFbzVvA5TMiNBSVQMmZB0HIdarYYqZd4NHSaTifBOi5+dj/Z3N/jWm9/n7feF30+9XmO5HJFJ/liWRmjo6JoNqrQFqcQsFh79dkAhE+O1tl7m1/69XybwBUw/mLp4iwtmU5X/+R9+BYDru9v44TiXlR8MIprNJus7KShi7SQtUaw5xHGKzN0SR1UC5QRJd0TPmkxGfRr1MomkRlSrwpzbsjV6UlDKMBUK9jqOXDf8+YwgzBjOTjEkZO2VT3+GRXjB+/eOuPWCgER1+zMUI2QwEUJYhpEwPA9olDbZ3xFJ52H/hCRxeeWzl7h7V1AoqvU6vYsutbJYh5M05unxQ9a3ynQ64h21SwPq1Qbn7QvmvlgrnVKFogqW5JgrGIymbRQ9RZECT6qpgOIxniyJZiIQ/ZHXX2Q8iykUpJjTcoiSWFz91GX6XdH2KChz41PbxInKwpcwYrPPRe+EJ0eSJ7WzS69/SqWwnb/vqB6KGuMPy7z62j4A/+yr/4SD3c/z2mcEZO3d936IW26i6COOjsS4r9UvUakYnF98RKkkOcj1fR7dO2OtIZ7PC3zKtTK2o/DDd0Vh4dKlfXzfoFiyCaU9gm1HnJ/OKUrLzM2NJmkCldIzWxDP89jYqtA+ianUxdjE3EONtrl6XSQ3nx6e0Khv8c47P+Dsqbj3r/76v86nbtyiVRLv5/c/+nt8/ds/xDBL2JIfO+7YXLm8zuOH57gF0fZEucAPQFPF/CkWCzh2jePjY/YvicR7uapxcXHBfJpw+2Vxnvng/SfMJzqODLzfe++rjPoJr7z+k1y+JmB6uunz8PFd9i9/muOnImmwvbvBZLyguS3mysn5ExbLKSgurqTNuLYLUZ1B74KdbVlM0VzuP/iAxoY4t4xHE7Y2mhw/7eFWpNR8eR3PnxPMMtbqcq/VNf7DX/vNv9rBVaXuZj/6RRFcDYMLDK1MFioUZBa+VLToDU6x1H3mUu3NUG0qlQpRIAag4Bp4vokfneP5IkgK/RlqZrOxlZGuBB5UhXqzQudcLFB3bm3ieR7ngzazSBzip12dL3xhnycfhdy4LkzS+l0Po/iUyVD0V7W6QRIHNKpXePRYvCy6PoZgiqrU0HSRmSvU4PDpY+zCytDUw1+YdKcjAsk3KjplotjHNMsEoVgMXMfFMEGeYXDddSqlLe7e+yoHeyLYmQ5jUExSVAIZGJbLFp3xFFUq7tVaNSadc6pUmEo/jm5vyOa+TRyKRez8rMenrt9guRzilqXp7VClVNap1zVMSx4wlxr9QYhbltU7f0KpvE6BAt2p+P1KtUh/9ITG2iX8qXjJtCAkCTwO5aZXMZZcWbtDEI45vDgCIFYNMBcMFy6NssxmLwXRP8vEi3F+dIJrKzQ2K3hS2GQxmaPEYNmNPNubhkuWSUYk09taELDWvASqlwsZhAsNMgvDTElZGUXrbDe38eU8aF902dxo4WhFyERfleo6nh/SnixwpRiHpaWcdc/Z2RbVn077ENUJWEQJVUPscvWNCoPhAtdxiCW2Nw5iyqUakrpBlFhE4ZLxYMz+nngfND1DUU2Oz84pSUEJzTLpDy741M1PAzDon1EqlTg/ndFsrCqkUzI9pVEw2KxclmM6ZH19i35PJAhGowk/9wu/zHtvfRdDZo0uRj1GUw8vCHGkCfRatcKwM2IqK3q1VoFGaY3O2ZJqS4xVwYkYXISkapGNqlgkS6UC/cFFvgmdnJ2yvbfNZDihJpnUnj+h2+2zfyD6bjqdEkY+vr+k2RT3yTIFSzcYjQYMJ/L9NyzSRKcmD4WkGUqaEiwzfKkkWakX+KPfu8vP/xuvsrcv+vPehw+oVlqMpcqgZbrUyuvCmyrPSIVYZgFNexZMicPxM6GIVTY+DP2PVbyMPKhYKdD5vo9lWXnwEwQBtmV8QiiiWW8QRRG2befVF8uyhEKg/E4QBEKgIY5JZGChmwZJmhIGAZo00PaWSyaTSW5aa8hAynGcnESs6zq1Wg3LsvJgYzwc4XkeirISwkjzKtTqmVfBSpplzGX/FQoFUU37WOVqFdg8E8xQgfQTB/skFUHT6nlXFTEle1bN8jyPLMtwXTcPgFb3CGT6V1efmR+v2hDHcX5oX3mQRVEEqpIHwr7v4zgOqqrm1SUhuBHlQa/neZimKYVGvLwPTEUR3D1ZUQrDMP9t8fsiCCiXy8+4XqngormuS1mKVSwWC5IkYSxJ6JqmEWep5HqJ9VRfFUU+xnMDyLJnc6zd7lAsOOgGOfJhMpmzXPh4XoSS+2NpuEWbckUcdgquhWMa1OstxgORjBuPxwRBRPu8h++LgMQyXXb2KtQb4l27c+cWpiUO5ytRjUePjrho9zg57jCbLmUfxxi6TbUmnnetWWZnd53tnVYuJkGm4Psx85nH0aE45Ha7A4aDab75xW6LJMnwvEXOb1zxnBQsskwEqccf3kdLVZYywLx15wU263Xxfq4CZmw8dUy55JJK83jTtOFjSRLXdimYBvP5nJIU7JlMhxQMi/ff+5B7D8R+XyhWII5QVqJMUYqmi/0kkJWW6TygtV7BdSOOH4sxdd0Sv/ALX+DkUKBLqsY2r9w54N69e/zj3xeckv/+7/7HdLp9jtriO3sHN+j2e7z17R+ia+LZX3npNQ6PPqRS2cGU5bvh5BzbVgl90eeb2xv0J09ZeifYhlgT6rVNgsBDUx1mS9GmxXKMomZsbe0DcPiozeZWnck04Kas9jy4e5+1jQILr8ynXhKH3De/9QPKpTqKJuZwrzulUi5yaaeZc0pHwxne0qa5VcwRA0kSUXZdMvkeDwdtvNmCy9d3eestwcepN5qAz0brEr2pSDqH2gXF5DpLOTft0pLZLEWzIEjFvuYvbFrrDXw/xJTr7nw6o1rc4fLVhny+x1za3SMMMnRjLOdiC0VL2Vi/xFQGvt3ujMHsPq4l9tCy28S2I06OZhxckYFa0MSwZxw/Puel20LVcLQ84eTJkJdeEWJSo/EFmqkxWXREYhZwzCaz2WOiJCEORWB46/qLeMFJfs5s1Pd5ePghB5euopkr7vIpaWZw+8V9zla8aHONi87DXFU18R3W1zfxFj7V+ioJ5GNqLdZaTo5CWV9vcvI4YHNPrBGds4DGuouXdrn7kah+Pr1/wk/95E/zYz8puHdXD15iOO3wJ3/2QzqykjQ6n/Hy7R8jiMa0u6JyvbZWRaXCdC64k1s7daLAYjwe0+uINeLFF18gjGdcnE+Ye+L9n81mLOcq+3uiGLBWSzl6ckixssWH9wU/7vJeg43NJklq5+iEze0i773dxXRl9dWNmM9nYC7wpWl5q/oqSTyn3xly45oYm3fe/TZOOUM1VsIbOpP5EPwqmirm50uv/BizScRyMiHOxFy86Ab8N//ZH/8VD65qVvbq6zLrYG2jKjZbWykTmfG2rIhCoYCibnB8Joz/ajWdgrHJo49E5qXREtK+42GAUpSGesMlrp5Rrfg4ctOxnTKZdkEwFy9GWS+gJRn3z47Yuik6G9/BtJ8ynxRxXTGhP/PFa9z71ozNPdHOQAvJlgHr5Rc47YjDXJg+RMlUut0ZMynTia5QrFTpdSXZUhpc9nozanUxKSJPkr4VNSfM65QwjTgnLX/qhTuEocnJ+btUHVGFi0OFzuARhmWjSLdof5pSrBYI52IyjeZTLu9u4SQOT0ai+qKmUxr1Jk+PRfawWingFgrouo5uif+bL6eUGi6u4zLqS/NffcLaWp1uR/TB9hUVy2yy3ajxzkcie3DRPuPq/hrjmcpsLBbSsmZRdyr0Y5HB7E1j0mDBRr3I2VhM1OkiRE10rJKaV90216tMewuqRZFVOXx6TMMuomsKviX619JiVMNhOZ3RKInDuJ9ldMcTMhl4t9YtqsVrzGbnTKRSn7dM2Vg7oNawWSzF2IwGPrVSC8OUsECjxmjawSooXHTFGBuqQbVcQ9EjLm+JTM6D+4foboAj5e+n44hZOGGRJjiKmGeVVkYQGyR+imXJ7HJhm0IRumfieaMoorVucefWC7z/jljYgsCnWKxQLreYTMQ4XMzO2aivYejiPu1hB8sqkPkhP/a5L8kxvcxv/NZ/zWfuXOX6poDuzWchk9GMYCE21PXWFi+8+BJf+9qfcDqUGUwrZrNU5PFHp2RFMc6NjSJ6FjGSsItwPEY3bHaubDPsiIWz7FQxLEdmmMWaMpkNiVGwXXEoW/oeuh7TKJVAfmc8nuK4dl4dGU0mlEoVTEvl9EzIILslFStrUavVmMhM5HwZUnBcFlIdzdAiprMRVsFAVeQBczjhvTfP+JV//yd5criSL9ao1ksMBqLSu9bY4v9m702eJMnuO7+P7+6x75H7UllrV/UGoEEsBECCoBlEipqhxNGMmUxmki4ySQfdpOscJDPpIjP9AxJ1kIYyiaQo0wxpHO7AEGw0Gt2N7q7uWrJyz4zI2CPcPXx3Hd7LKLSZqBMPONQ7VaVFuL9462/5/r7fRn2dPIfRSIyBaZrU6i2WfkxJRuGEE6CuyA8cx2E6HQsxY+kM/HxW5IYZTDgQ8UvGPVWFLF9lj0BAHKMo+kKGRlEEOcaNwXfDArjwPcoS6mJL6FsYBCsj/iYav4IlSga6YrG4YjBzHIcsFU6BIfueZQm+JKsAcU5pmrZyDm/6kCQJrud9QSD456nR4zhesSq+hPwJJr0bwgfRL2P12ZtxybIMx7JXfbdtG9u2CYJg5dyoqophGLiyQDkOI0k0kq++d0NkkSTJar5u2ARvMl5ZJpyfIAjI0xsj2yRN85UDWCqV8DxvJSZ8M66L8RjTdlYZxCwVfoCxgnQq6IqKbtn4cr3eZNxEhu6l4LJ45sv3B0FAlmUrwhVB6qFKBsZgtTbEeshXa0xTVNIsZn1dZALKFZu1tTV832UyEWeXuwjo94e4C8kI60cUC1AtV6hLJ2JtvcGtg21UVV2x2c2mLoPrGZ4Mjg0Hc5Z+RKlUodEQwZxGs0S9UWFnt7uCCqVpiuu6nJ0KA+zifMh0HDAZu6vxLFeKdLp1NjZbNFtS1LNSpNVq895PhOD7n793TKlYYT6fY0rEQpoticIEXbVJpXN1/snHmLrF1Bfn6W//039Eu1ql37/Cl0yHxGCbVRbzHvbNus4ywjzEKYhzytRswsCl5JQoVMX7vMClaBb59JNnPD8WBq2mO2gK5Ip09BWVNMnRdQtPMrslaYDv+1TKDdZ3xDxfvlD5T/+j/4CTJ/8SgLp2zd31CkmQ8l//z8Ko/89/rUJn64CFZLK0GkVyvcFiGQLinjk/G5JkEzLDY7P7bQCmi2PULCOWEiBRalJspURxD8cQDtHl1Rmz2Yxq02StJQJ0qq4w858ym0lhc0fDsVrMFgvqFTFO/ZMlj97c4/y6x8aayDyUqi7HT8dsbYs7enqtsbPX5PD4OctAzEOpUGa2HKAYKlEoGXZ9n2CaYqjifd1WiTSCzJ6SazeSETphMEYzHBK5b+NUJ/ZGtJrC2Wlv1nny7CmqXqPelZT8yYzTFz7r3TaWDBoaGJweHrG/L2y8wewK2yhQtRuokkArSK8pldbRrIhqWWaOvIzA16g2pTNnrTManVCwalxfC9ulUu7gWBoPDl4niGQWLKmj63NK0ia4Hp1yOTik2biNbop9PB646LmJaRXZPRDB+GFvgaYtGVyJPv3q977Oef85/fM+liUcQ7MYYTk6F70rDg/FHXl783s8eKPBZx99IMazs8tHP3uP+/fvUm+KsyRaFkiYiLNIloj0hx63Hzzk6Fysu0qlwv6tOlfHC0wJjbx16y5PPv6UIBTvX8RPub/1NoVGzE8+FPZ4rRqRxWusbzSZjl+y15qGQ6UqCZ9Mj5NDl7VNk+dHYm+vNd9m6algXK3Os+H4hPOzHrdeE3DUh/uP6F9c0xucYkpik069xNZGmyhxUE3xvh/94JD1ncaKVl4zYDiY0m5trdgY++MhhmoRzG3e+Ypgz3z69Cn+ckKtJdZ571LHrifM+j2aZXGejsdjDE1HU5ZUKiLrt4hy/tv/6vf/XufqFaHFq/aqvWqv2qv2qr1qr9qr9qq9aq/aP0D7hchc1epO/mu/JaFuixR/GaKZLjv74m+VusL5sct6t7PCjwb+kMvLMdtbwmk8Oz/HLihkiUOWS4rzQsSgH1Ar1ggnIvJpmw67dyoMJyKq0mo4rG91WcQvcGyRSlfje6DMmV/nNOoimpXYL5iOyqzviM8Mhpd02tt43oJNGS08O31GHNZZBH3Oe4LaMgktTKtORUK6nn5+yu7uPr3BEF3Ss89nS3Q1JUsy5lOpU9CuEgYZC5la39/fRcmWZEFEFokIjR+FlNsF5t6cVGqgFJw1vFkPuyWePRkuqOgGY29JRRYtB5mLrRbxJCSgWbMoGhYnJ2NqXfGZ0JtRL6vMCylGKCJJX/3ym/jTEYoUX5sqS7JBgatZn0pNpsiJ2G1t8MGzPuWKiKLv2S2mGVzJuiVdaTKbXqNqIyxdRLzG4xFe4NFsthlJkU3PnfLa5j6eJ3ULOGIysFAKKUYudXZUFbusEiUxjhTjTJKENFWYyWLgvdtbDK5jqjWHc0mgYbom1VqBku0wD8X7duobXM76LMcSklO2iZIAzShQlJHPWRzhqgFv79ymIJYih5fXZIWXkfo0W6JrNsPEpVQUEbA8jEmyBaphoiFqmebjC954sM/5sRjP7g4sFxm3HtyiYot5OD45RFUbGOqEUEJbuzsmJycnhKGEYmll8hwm0+GKan5jfYujq2fslCzSUMxNZhjsblYIR6Ljtcomaa4SZHNOX4jfvEyWNNYLuO413kD8Zs3KaG/s0T8X83cxOOL2/pfZv2XQPxMRoVLLIUk9OpVtzo9FbVZWMBmeXEMiondjY8HO7gFt2+RERtw67TukTAh98Vtm45hqrUSYDPCWIpNUK2+RphbecryiHi9XTQoFeyViWm9VMU2TcNnj4JbAudtmm//hv/ldfvOfPSCWmP1SscnMPaYgs5yaWuTqskexYq1qZ8rFDrVqG9/3qVbFXKVpipJrJDIhHcdLNEtEfINA1sIkIZVKAZWXlNjewscyK6iqpEH2QnQ9pVZqkKayRkjJsMwi/nIOEtZh6mXy9CV1OEAYxOTqS42pPJdZFjX/OShdKHWtXma8LEsQYdzAArMkJU9SHMsmlcK0aZKv6jsAVMNcQfFummVZqJpGoVBiNBJZ6EqpgmHoNCT5UMGxGI/HmKa+ok8XOl6VVTZOjJ8QMc5fvpIgDqQelxg70zRX8MKbeirX91aZGxDZrYJTEtmrlXCrSkpOFMer3yzeGX+hLitPxfdvxk7XTJbLJaWyFIqWNWlOwVrpTvm+vxJmTuX70lSQgdwQQOiaSaVSYe7OVs9eIQN0XVDqI7JZURStCCssQ0gCJGGy+p4fBiDn/IYKPcsylPxljdfNeGXkq4zifLqgUilRLTsUZQZ6Y2MNXVdXZwSoDK/HPHnyZFXzuFh4xFFKpVKh0xHR9Ga7QbtVWNWU1uplptMxea4wGoo5Pj/rMZ97DK+9le5Us1XDtnW2tgXSotOtoWkqxZLDTGpfnR9fEi6hP7hmMRPrTVcz7IpNJgVnZ0FOnmuEgYsp6zeCzCVIQxyjiD8XmaSrJycYxktimre+/AblikOpVKQoa0OjKCAnI4nTFUokWCbourEiabmREyDXyZWX+6harXJ6esrz5+JuvxHFzmUmKcsTLF0nywPyWN6H+QJdV1ksPCryLKl2I7719jeZP/shAC37Ck2pcna54A8/EOv1Wwc604lKKPfnoy9vU2nsEOQhe3ckxPnZc1qtBr2r0UvIqJLizmB7tyPX65xmu8TFYEBVaicm8Yz5fI47V7AktPTh67c5O71gGYn57HTWWMxA1yNuyWzP5cUhk/GCBwffYjw7FnOlm8zCAEXCNL/0zq/z0Y9/wGg6pi2h31PfJ8t8/IVP0RJj0Oufs71+b2WDzCYReZLy2ls7LJYSajZ2McwCXjjD88S47O7c4Xp4Rirp2lMf3rr3iOPLJ0SS5Oq6f0693sSqFrAkQuI6XuApc+qJuI832jvYzRqBe4aeSd0iVWMyuyTVXJybWrBkjhZ0acram4g54bzGP/7tf4sXz8UYfPrZu+zuNvBnZSpSCmW69LHUiMgV73vzS/v87LM/Z2N9H0tKdB49O6Naq3P39peJZH3q46cfsfDHvPXwt8RaqS/50z/+I8ySzd17bwNwdv4Cw4QoHZH4Yp298dZXUIwlP/trcfdalSFKVieMFmzvinlIIw3HVtGyGudDsWcqVZtuZxN3IVEw/XMO7m8SLOOV8G6t1mDpTXnzzTdFvw9PUJQcxypwfCzGYG2jziefPKVa3uBXv/slAP74T/6StbU16lVhy+zv7/L08MdkOBwdC+inmlforpcYj65XNbrNRoetzTV+9Ld/BsD25tfIzQvmE7h7S5wl87HP1laLMEo56ws7od1Z5/TimJaUALoejsnzCZq5IHbFWTLwjtjY2KBgrXFyJLJ13fo2rnfGTMq1hJGwJweDHm++9isAHDwwefr4BY3aDu2GeP7+Pfjq/X/+iw0LbDSc/BvfFDUXhq2hmQqa7a4u0PX1NrqhYDktDg8F5jIJUnSjymwhReGaVUzTwp25+KnY6Avfw9BKzAceqUx3l+0K27sVWl2x6BNCnjw9odLyUZA6As1N2u0y7WqX2Uhs4vPZB9y/f1ekMIFue5/L/icEizaVstSimE3IUDg++YxyVSzo0fSaeqvBdCyssoU7xfd9VEPHlw5RnpmgLkh8nVzqQIhCWI9UXhSFosp6Z53peIFjStYYq0yt0eWTxz9DlSrYtlYkyVKyTNY75AkF08DUmyAhFReDE7Q4oNEVB91wPgV8LG2HUkP0c9KfsVZq0Et63JW1RLqxIHDnVIqSVUmdU9NrpFnAbCwuy929HY7Px1wNL9joCmKBslPA910GUh+kVu2SxNDrn5Hn4sBvdmpcD8eMpyMcuSaLtRLBNCd3ZI0SRUwlZjrx6BRF38uFJmN3SMyUekdsIH85o1iucXos+lQsOUTLEdVmE9+TxrhaZ6qHKGlCVV68ncomWwc6z34moG6T+ZTMKpPlCsWiuAQGZwGJarNXKtNfCCd3bc0mDH3efCQOlf5hn0/PPyWu6KiSmatUKhDEU3S9Ru9MbOJvfukdPHfK1oFImx8dD1BVh7Vmjd5Q9L25obAYRyi6RiwNkkq5hRv0QMICq5UOWWYTxzGDgcA7K2pEc61OMXXIUrEWK+0i/vicR7uyn4Mei+SCgrHN3Bdwyb99/1P2H21RLlU5+lgY0K+/fsCTZ6eUyrLmqlvFd1XeeLCPIWGPT54/Jc8EeYota67yOKbY6OBPxIEfhDGR6sAso7Uhvtfr9chJWF/bA8BzffIsZT6fU5SQjvHIZX2jzXV/TEFCYssVh0KJn4Pbqli2jqGWV+Qja519fu93/4jf+U++iooY4/FwiW7GxKmYu0a9ievNWEZzKpJpyV0IuFpORF2u9TgOMUwFVzJQFpwy5UqLZRjQ60tIRamEbdUwtRLZDSsKGaqWY1vi/YqiESdL8jhfQaOKJYfpbISuvazN8n0flIyGDIjcMABmCqvPuK6LruvYBYc0vmGUSyQEL1m9vyChKVEo4WeaTqVUhp+D7kVhgrv0iSVELQgFiUIcxysjNEoFeUWt2lg5omkWYpo6RUfuoW6TjbUuYeSvYGztdlsShCQrrP91f8zCXa4cEj+QDgsRSSTWtdCz0leEGACaIVgab+qNkjjDsoRjbN6QZSwD/DDAdhySVPSzVCqt2PoA4kjALEsloVUEkEhSjBt/7+Y9N6QWN32ynJeEFyDqBIMgWDlgy+WSNMlRgKrUDQyCYFWbdVNnFobCwdSMlzVRlVIZf+GuHKbZbIZhGPz8PX0Dn7x5zs/rct3ACZeBh6FbLD2PpWQ6E78xolgUY1epVKhVC9TqFRoN4Ry3Wi2yLOPoxRmDgdj/h4cvSCID1xXOT7FYpNVq0WiW2dsX+/Hg9ha1eoEgiDiS7Gvj0YLpxGMgHbAoTAiCSMA9C+I+3t7cot3uEsY+sZwbx7KZLT0+/EzsY0WvkOWKqJeVWMw480jVDFtzuD4XdUnTixG6ZZLLetzf+Z3fwXXnuN6cG+6BIFjKYIe6+ly1VhzdEYcAACAASURBVCaKlisHTOxZhdkkXP1msc4cnj87YjS+cY5NNNUglro6WRJRrhTQtYShhNLXK2VAJUkybEOeQfNzvvXLb1Ipit9HqDIdNGiutflffk8It37rV79Op9Wk1RHfabZUskxlOOzhypKDKIxpd1qYenlFhJWkOuPRnGpFnBvrGy0+e/wJWmnJYi7ZdAs1ND1ltrim4Ih5N/QC5WqVqWSktWzwlxNmk5z7916XnxF1ocu5ThiLzwVJznAwYX1bQOSvLl+QZbDWWcOQdaCVepv5OCaKe2hSlHkZzNEVm7msN0TV6LY7+F6Caoj9f9m7wi6UCIKIjS2xzjTT4/OjT9Ek2cp65T7dps0P/+5dDiRh14ujZzx48y6zXkTJEPvv8/PPaW21UWLxvnp1gyBJmQyG6NJWMsyALLGwi2ViRYq5xw5RMGRTiiZv7W+gaRbTQcB3v/d1AP7sT/+UYFak1tS490A4vjpFnh89pm6LBEGxOiZWLGaTlNcfiL/95IOf8O1vf5e51+Nv/kKUAXzjV/f50Y8+5rV7AuZ5cXHC3QcmZ5cXZKlYC3Fc5dvfvcMf/F//OxVD1EFt7BjM/Rm6rP9bThVu3+pwejigVRe27sj7FKfQZT7XsRUZYG3tozoLNAm3mwwnGHmJZttCooAFLDlTaTSEzUWeYxpF6vU6T56KvaepJophU7QqVCTT8Seffc6jN7dYSAHfPC2hWTnXvRNSeZ4Hvkq5ljKe9MklsY/vJbz15kNadREg+Dd/+wGWXSBOfDrtmhyDa2ztgHe+fov/54//FQCt1ha2VWUqNcqurwKarSpPnzxesRFP5wuSPCL0izy4L2CyWSrqn10ZWBhNAva37/PVL3+PTleM+b/4vf8J21RxigpBJM5FJXP4z/7D//UXW0Q4R8Fbiq7Ui0XQRiR5QkkyqMyDHslcYzbuY0gmObtsE4Yey1gcdHbkMBjO2NioQSgO6bmXkrGgWMvYfyAcBG+yoDc5RZeY2mqtw/YtB7tgciKFfy8GnzKbt2n/0gaZLlbY3t4jrsdHaKlYYOPxGE0x2d2rrbDozHMWixGluo4uCzfXNiocvRize0ssCu9YodnqMhzNSHNhZCeJgqE10PSM198Um+Xw6AVW0aOkCAcFLSZLDZxCiVwe5uWyhj8bsLPe5lqy4HmTJZ29OrEvjCRvarDMQ7bXHY6uZN2QFbLWqDOXtTeb6zssgxmdmsNQRgG+9uYtnjw547U7tzn/XBiPB68XMIwUXTIolR2LZqPEp+9NufOaOFxPL0/JsWm0TcaeiEAltNCzCpXWjXCrzuhsyNbmASN5gWfBkoppUO40iabicJ1NXOJIIZXWjqoHxEqEmmS89UhgrnOlSvIiZTjzOT8VF9rerS7Fkompiw28mM0oNm2iRUaOOHyuvTn1epUg88gkOvbks1MODr7O2bmIqrSaayyNGWW1yURest32AYaT4fkuMoiK7/voJCxkND/MckIlxgxVkNHtZeyytr3PIuixtyU2bDSf8vZb9/jJx0INfrIYsLG2Ta1scnou1vX5xYBiwcBfgCWjoRWnxJMnV9Sa4jmNkopKznQ2xVKE4bS1eYcoDdBICDNJqazkqEqAKtmmgmRBc61DMHV5601xPjx689f56dOfcHH1nGZXjNVgMAM9pVKTdSeBRhIGuPOcjjTKJmMX151imyZfq4vPtRsmP5oe4d2wnA2XFNYs1tsleiOxFouVIsPhlItLse666xUUVSXN7VXtxvZugyyAB/fuMF2IA7DXP6YebxBL53VtrU28rDKZTCiXxbhkqhhD1xvhe8KQKZZaoERkcg37SxvbKqLpOSCMljz3IDVp1juEvqjNipKMZZisSAyCaIlVdPCjOal6IwZeYDl3UQqgKWKtG6aGU9CIQ2HIJVmAu/Ro1uq4Cyk+asH14IyD/Udk0ukL42uq5RZzKYxtWRbuQmRNbrJpBatAFEVkiYLviTPPtm20XKEqhakNQ8PzFkyn05XzkZHjuy66qhFkN0QNMXHy0tmqVsRa0xRVCB8j674UhfliuqpLCqOExXzJVSzGqd+b8OzJOc1mfUUmEQYLgvDqC86NqugUCgUMGfDp9wcii6OoKwdPUXV0KRJsGGI8sywjzWIWMtCgqipOsYCjOVjmS6fHShxMyyLL5SUeBKRJjvwpVCrCwPY8b+XI3Igdr0g30lQ4VslLwo6SU8APIizbwHfl/Gkm1XYNT56nqqJjF01Mw1hl+GzbXokd3ziGqqqKbKD+sg4sTiMyLWcpRWgrlRKJJPEolsR4+r5PLgkzQDhzhqbhuS6RrDEr1SzUPKTZKtBdEwZfuVxGVXRGI1mjdH7Jxfk1H334+UoQuVgsYtsma+stWtKQ2d37JqVydTWW0+mUF4fH9AanHL4QtbZ/9ZcFNNWk0SyzvSNqEtY3tmk2Ex69IdZUp9tiPp8zGo24lHfRxUWP999/ih8EKya3omOSmTl2SRg/ruuT5apkiUxXc6UoCrqWE0s0i6IrqCooN1kcVRFG+9rWqs4tiiLiyF/9DoDRaESWZbhyPnXDwDRNSmWDTlca9ZpBvdZmOrvm8ko4j+12F9edoWgvhbGzJEfVi5Qla6umWVimQ5qm6CWxZrfr+wRxkTASe7RSrXDr1j2WMosD8Nv/7q/w+ecfMpsJpMV8kuO5IhNrSSrATrtOr39GsVRnKgmlbu09ZLlUuOoLZ0vRAjrdbabLc4pFKR7dv2ZvfwsnsTAk06Gl1zD0mMCThFp2k0rJZnod8+wzERw7uL3Fi+efsvSTFbHI2m6bVsvk5FwwKG50yriBynw+WRE3DGeneJMmnbUUQwYSBlcJtZpNZ108ZzpZcHE5ZH2zxGwm9n+zW2O+8EhSKJTE3z7//JyGs053S2YBqx1G7jHFTg1FFfuhaLTxpyFJqtCPRKA0ST1u77zOj38sGPemQ8gzHys3WOZiPHfud+n3piRhQJiKuVJICbyMmSHmszoPWMxd1jpN3vuxqG/a37tPo9FkMr5gLIOiKS6+1qdqit/3wUcvKK9vs71bRJdn3nd+49f4l7/3F6C7zD25Fq83+KV3vsxyKTOWb7d5/PgTxv2Y9rpwmN98+w5PnlxRLx+QJWIfqeoGpg6nJ2Let9YbnBye8/DeHdJE3INJsk+OSrW2QAvE3j4/e0ao+Nw5ELV3WpqiphnTUYBuv8yGaprDxYW055KEuwcPOTs7Q5WkM45dJs1moPkMrsX31rsmL54+ZnQtDKVGa4qu2jSaHU5PhU1Zq7aZzc8wtBKpJFwzzJxnzy7wWlKSx8pZ36wyGmpkMpum6CWuhme8/zMfWY7LxeUJeWpjF2X99uI5jeZDbt/ZxpOB8Ht7D/DDOa4/WzE0xhHUmmUGU3GWheE6b7/2m+xsrfH+h38OQBSYNOoGU+/TlRTGbCECDH9f+4VwrpIkIdXFYI/mZYolm3pLx5cLTNUNGtU1En9KIuk2TaWFXbRZzMX3phOfUtEiWCbMrsUBQZxj2Q6LYEJfUoVaZoG9R/dWEc2LcxcUleOTAWvb8rIK5phFiydHT9EQC7pd10iNKbOJuIT2D4oMhwOyXGU0EYZFsVbDjxT8mU6tLem2Y5N7d3fIFXH41UsVLk49MHxKknZ5MAsp1MQC/tsfigLBZttA1W38VPwWJbZYeGN0I2drTURRRqMB5XKVUqHOYCmMx+5GizxVKUtu1qvzx9TbFcYzd0WOMTMymjloioi0htkcs5hxdnkC0vjR8jZB0Cf1VB7dEbSrQXJJqeDQKUpHajzk86eHPHr0AENSDj//8U/5/je/xdnAYRKJufEmIcX6HCO7oboc0W5WiCKPXJVsYWj4UUK93qDbFs9qBxHX/QuWS0n7rMwIlwm7u9u8/4k42KxSGc0sYVYcarbMFg6G2GaThiQ2yUopqa5hl9bxA2FY+NmcRBcZhaNTsTbI65wP+6zfF99zRz7VAmj+nHAgI97rCwy7yunjK/7t73xTrKHBkuH0nJ8eiegTjsZGp0lNqRAWxEV+fjpAVSwMrUOmC0ei2Ix5fnxIbyAOxHfe7jI4jtC3HBpFYcw5lQ6jxYxbrQK1mtR9Oxpze+8hgZyrPLUZT68olXR0OacqGp3GDvPxGb0TccEYpo1DnaMbIpNui9lsDGlKo7wu5yGmZg+hUeTWpoAB9PsL7ryh4V3KzG7rNmE05+piyJfeENG7ZqNDuaoym3rUA6lPNVdoNpoEmYyw1zxyY53IVDEbYn8EbkaqKZxcCqhNufEaearh+imhNIIMY4mVNbB0h0Relo2mQhxMcSyxZ0ksrk6vuH3/AQmSWUpGKQ9uvca1DBqoqo5hlyGXunCHhzSaNXzfp2BJUp1codVskucJuey7kkOWqCsIkKZDrydJXGRQZmGEtFotomSKYUgtsayMH0AqYayuN2Fv94Dh9TU1CZdwvQHFkkkQzlfwjFq9iK68JMu4gSBZpsl8ehMksVEUhcV8tHIIHNPCsHX6VyJ6V6mU0HSFRw8fstaVRDQbG0wnc6GnJGFyM5l9eSGN5eHQQ9d1KvUGCpK1rZSSJBlesFyhCtJMOH5NqQeSpime5+Oe9TAlC5fIfuWQZdRqYr5MSwONlyQXaoZT1PHceJV9uSHQcBxn5QB5nodjF7HlXJmmiapr5CkES7kfSFE1jTTPVk60adgkSrIap5s+maa5cpwURaFQKKwyQo5jMRtP0HUdR8I8syxCV8E2TIrSSIrjGF3TSSx58SuR0ARLopV2kqZpuK6LItkUAeqNBrPZjKIjmWSzjCRLMJvmCnKoKyrX/T6GbjCairOrVqthWQ6BDOq1OhXc+Qzr5zS7bijW59MlVxfHYr0YBoWCRbMl7oY7d+5QKho4jsPJich4z6YeSZxzfnbNxx8J6K6qaqBoNFvi/NnZ3WBn5xZvvV1Cl5pruq6S5zlnp0POTsX3njx5TBDEAnIKaLpCo1Gn223TkMXjd771NqblMPWnjCQ0cTpxWYQB17MbB0WTc5KvYLO5kqFkClkUEPuBXAs6SZahyML4i4sLkiT5AjNnlmUUbIs0TSlIOm/d1L7wGcFMGBD6Jqee2EdBuGRjw4XcRCJumUwmGIZBLO9oNQfPT0hjg7ffFmen7yX85P0fY9smjiHWZ6uqM50tMDKxhqfTAdEM7J+D5U4HF8wn5ytobq1ZplOrc3beW73/86MrbKNCxpjZXHz3ejhle+tNhuO/BiBKIi7PT3FqIvgDsHeryGBwTr28RUHemcPxBdO5R7cjUAfFYouz8zmOY3Hv3l0Aer0rDN3GqlosXGGwj64XLF0PT9pqftGmWKrTH09XRDiakVKoTgiDEjpizF9/q8XVZY+B1J2cz0KisEIn28ELxRmfxgsKxQKeH3ByIuwnp9AgwUeXQe7TTy9ZJhl1u4LkNWBzbZNit0HuufzgvccANMp1Tj9/QS4dqdpGgfFlSLFURsnFs48PL7nuedy5+4C92yK7ZFgKvasptnRonz87pVpzmM4qbG3KAJalEqczZvMeR8/EfO3uNlGTDgevPRLjaZnkxTI7+w3+7i/fA2B7f487D9d4fvSCSlUQhCzCM9zTNrdlFq7VrpLeLXJZuKC7KRy1H7/7hFKpwVfe+RrDidB0Ojs6w3U9NrpiPrWkS2M94GJwQlXKjtx/eIvnL864PktY3xJ3+Z3uNu/+8Anmhnj2+votnj47ouLscuuOCOyPxn0CL1ghH3Z371AoW1wPA2o1kRHq96+wdRtNM1A0eXZRhSyi0ZaICdund+rS2dhGctWxudag2sj54d+8t8r8P7h/j35vysVA2FP16hqKohKGSzYkffpP3uujqDnFSYmtLRGESSKN8eITbOMWAN3GPc4vn7G/c0Bqib1drMQkM5Vb621G1zKjr2jkSoDkjsHRi9SbEdfXTymXxLnfqHdYLn12tt/i3XcFXPHh/a8Bf8Xf114RWrxqr9qr9qq9aq/aq/aqvWqv2qv2qv0DtF+Imqtqzcq/+xvC+1TQGfR99m83V5or5AZpYgioiYz2fvzpExRNXSnEz2YzTNvBtAwSX0R2S1WHha/gRy6pjEAXpMpavyeiXTVV4cGb6+RZhfNLkfbUHY8ck3rHJsklXGnD4fx5iCGFDVuVLvVGmVbtAbokePj48ac011TyqEKrLjzsKIjo93s8eSKgZtOJz/b2JrZT48MPZEHdRpMgCEUNhCG8/GUY0F4vMJ2Ifs/6PhWnRHezxY2LHXkTKs0qz876VGsisrLwEjrN6qqWIY4y2u028yAhkPSwvr6k5DlsSBzxyeyCVBO8/tW68NSn45DvffchBa2LlkmxLSvF9XsomYhIO+U2F72MPAxX3P9Pzubc2uwSpw6fH4mo0U7DJFYbuEsR1VkuMnZ2NhiPe3gSimFbdcIoIifGkmldNXYY9fvcvy2iOs+eXFGoKRQLDQ6PXsixa2PYCgtvSSbx+AXLIlpmGFJjajq7prrRwtI7DPoCvqDmAZ16nfnUY397S45dgaeHP2X9jpjj6aJKuox58LBJOBTzcv/eNv/mz09pr1e4d1dESI6OLzg9eUEm0+iKHaIGGanlYEja9SxQKVWrhElEkklRXVPBd1066yJaaKsJ81lIda1OIGlsbc2mXNwhcF00U8KChmNU4yXZga7ZQhdHCUGRRf0oxHlMt1ZlNhN/a3bXsFWdUNZglGoGjqUSxSaG3GuWXmQe9Nndew3LFN/rX1+yTHOqplwvR8fs7W9xcPs+07mIeH/2+AjNivD9gGefHQPw8PZrnLseI08KXOsR8bzM63cOiBBj8NHxR6x3b3F1JvZZngc0Kh0MRWfpi6jmemcPS1EpVgwuLyey7w6eP0ZNavIzu1z1zrFtm9MLsdfeeecd/sf//v/kP/4vfpPZXDy/01nj+PgET9ZOmaZOuVxmsVishHGLToEwjCk55VXt0jIMKZerhJH4f06MomYEXoa3EGujVLXQjRzNMJjOxPvqtTago8po3nS2oFqoUCmt4cs+BNEIRdEoF1urPli2Rhrrq0yLoZl43pJKpbLStJrNZpimRbVe+wIN+Xg4WlHWdtttut0u9XoV46YMM88xTYNSqUSxJPZIGIakabx6zmCwZD5bMF24KwKGHJXJZIJmGi9JFHKRVbgh0FCUnCxPMDQdkIKvWUapVMH3/VXGq1QqoKoqtiShWXiuIKXIlJfinIisUBiGq7qim/qqm3ZDaZ/nOarMsMVJCIqCYZmQvayVAlbCu77vr7Subp7teUupQyWFVAOfaqmIbRk4jr16p2NZqKrKaDKVnwvwvOUKrjV3FyiKwnz+snYqiCOcQuELgtKWZZEkyarWKE4EkYVuGqvs3Ww2I4liyuXyF7S0wjBcCaIXHAfIKFg2S++m/iegVq+iadqKxj6JU4IgWRH9+H4AuUa5UqBccWSfNNqdGpVqEV3eda43x3cVfE+M4eB6wmzmslwuV9mJZrNBpVKi0XJWdWbNZh1FzVdrYzFf0utd0+9dMx6KMVj6LrbjUGuVVzCdjfVNBrMZnx8tZJ9ipgtXQgFfzr2maWT+jKPHInqvmpak2BfIh69+9avkqiIykpJFIEkS0viGtELW2mUxURSs9pqu6yiKQrHkrNaLoijEkagFvdF4m06nLBYLVFuKQo9iQi+GPKZck9nQtMk/+Sf/HpU6zEYCRvT483ep1zcYynORbEQyzykVa/zRH34IwHe+v0eWJVQkFXylrrJ0DcgtLs7FXWvZ0G62qLeqnJyIszJHiDmP5+I5aaiSLddorpnsbO8B8JP3f4yiKGiKzv6euMNUVchMrMm7aDZ18dwY3Q7odEQGYzIZMZ+7xIFJEAukTODHFCpVInlfmYUqkT+j2mgyGQg7rNyoQWLjL5YEofjN6xs1Bj0TpyIlHawho0lGtdzBsG8ouUciG5lGSBUJnGKGbTdo10Sf/PEQz8vo7lexJIX70WmfWR5wfHiFVhBzut0uYCY2YSbWhhf06Bba7Gw7IOngt9feIEmXPH12zDIRe2u5dHj41gbPDsWd0m1XGA77KHkZiYwkCw2RYYoVElmLtrPX5ZMPhnzna78NwCL+BKuik4Y1dveEbdiq13j8/BOeP5vxm/9IoIMOn/WYDC6xJIT7V371+yyjKc8Pn/HR478Ta6H0kM6WSRJPqBSEPdw7H0BuYMpatN5lj0qpSKn4Epxm6AUarSZnZxfUO+Jz/SMXNbK4eyDG0yhqaHqJdmeDwVBkEE/Pz6hWy6SJ6FOeQbVWEJqUUituMhmgWybV1hquJCRplC1mswVxJgZqMrukWnRI/IAokfXblX1a7SrHJy/oSA6AwF+S52BI+N3VxTntTp1hL4RM7Md7D+vMRha39nZIpRbVwhvw+PEntBoCWZWTkbHAUQ+491Bk9H764SfYZoEwmqNrol9JPqBcaXJxIp7z3e/8Y157o8zRi3Msee7/xZ//jIn3EfNJxp3bYqzOT3z+u3/+L36xa640XafXF5NUb5gYhZjDowGGFE2r1lVubX2J7e1d3vvJ3wBg2BGqlmJLIoc8c/BcHS9JMWUKcDAdoqtdvHlIXUIhlEwhDCeUJBa2UE7p95aYxksNG0uPGY8X+LHNdCwG/Oxoyd0727z1SEDBPvnwOVGY8eHHH6yM5VsHOzhOE7ue8dF7AqccRVPKJZWvvC3G/+OPjrk661Nrx3z31wTOdTINeP78mGKxyFyyGNabHebDiPlcHJpba21srQyJxnAoYGy3d3Y4PLtANXTyTBre8ZwwnaHbsuA8rnD0ogeVhJasmbFMg2yZMJqITVArlTD0IkapArq4iL/1/df5ux8dooUzXntT1ggoI9KsjD+5YfMqMp88x8tc1gpiPI1M5bOrC251Dlh3BMtQzDmmYXB+LAzOjfZtxrOIJNFfwqfsAkoeEKc+S19c2FYO9XabmS+MGNVQMCwdRU0xbLE2tNRmOujR7LSFMQXMZy6zuc9rr4v3L8IlnhsSzK+IpQit3bIJPJfFMsCpiwOqWqtxeqEjUR5UqkWM9SGBPebR2+JZ7/7JhxQjk3/67/z7/O7/JnRK7j2sUWzc55PHEtqGQ15TmS9nVIvi2UkOZ0+Puf/2HoORrIuw69haREGucxYFyvUZiR+gSzhqo9REXSaYjsloKC+0JKDo2CwkOYDn5Rh6AUVh5UA3GnXUvMh04tNuycPgtEelVKJQuNHecHH7IY9e/w4VCaEM6RFGGVfTPjVZ41EorHF9fMrcEBfMxt4WcRry45/8hM0NcVHsbu7QHz6hXHS4/Y7UgViG3K5ts3gs1qu3TLFVOLocM3Y/F2PVLHJ+NkSXcDvLzhmPr6kWG5RKAiaQ5zlBEuENhxSKch9jEHoZ9++K2rssSWl3qvhhj298Q+hXLOY3DKEaSUGcCcPrc3a3tgklHE1RFDTdwLErpBJnPxr3yFIVTdNXTpih20RJiOVIjaJUZzxyadfb1MoSWudPQTMYDFyaTTF/KRGmoTKdS6ZMwyLNXZbhlCyXekPBnHZrgywPV4X3p6dD6rXOSvujWq5RtC2iZUC7faMtVBNCt7aJrt5A0hRs8yXTX38wIMsyHj9+TBpLVjxdFwLFhr4yRFEyHNNaERssfJf5fIHnLZEyUDQaDTRT1EDdGPppHIk6l5XulU4QpDjl0sqRiqII1/cwdWNFAhFFCaVSBU3WpqpKxDIISNNwZeTe1ECVyy+dXCGYrK5qaLJc1AklSfKSiCJISbNM1DgpYkBLpdIXdK8Ei6JFHMerflarVTRN+YI2VrUqao1uRJlLpRKmIRykYlH03Q8jojBZORXz+YIsy1A1C88Ta/B6NCSKY6IoWr1P13VM08STjHSZkmEWDFGfJWuLSvUypmELx0LCcmaLObZdoNNZk+OUMhpey98jjJbpLGM66WMYBhsbYv932mVKJRvDlItMSej3JkzGU84uhNGrqSa9vsd0OqYgiS+q1TKtRomtbbGvO13xO01LZzIWBvRgMOLi6gWnFzqmrEG6EdNuNmV9TK1IsVTkl772JUxbro0g5qOPnqCYOr6sWfvwow/wkhgMEczJSSUBRb7SJMsVBU018MMlSJ0iQ9NZBO5qLS4WC5ZLD9u2SYvSEgYc28QwX8IzgyDD0EortkDPTdB1kyiJkL4xmqZSKttYBYfWWlP2a4Orqys0KTCvHaSCyU11VmNsGg7vf/A32IUym8090U+9SbXVYDATTlIeqxi2QpS/ZLZEG1Nw2miKOIMvT64JXJtut8XehhjPWlNnmZ1xPZygmWKdaXmJN97axpYw/R/+5U/xlIST4z7VivhenqmcnV/y8OF9NOlAL+YujXqH589F0HJzs8vR8YD1jSZX5+JcdMMFppWRKB5JJn5zvVFjEQWEEpJbbXZJvQxFcUhySWDVyQjnKoqeUkzEnNpWDbvYJ5aCz05xA89/iqoPKKhSk7RQJopDgiDgzoEIsH788Sfsbh9glMXc2arNPBkw9WyODoWTbepV1EpMp5WgmJJd0lWoVKtMZQ2bbVTpbDb58LNDwplYw7/8X/4zNvYSHrze4tNPhG30s48vuDwfUS7LGlRDJQhUyCOsghRuXzo8uP8lprNDTiWjrlUu8/3feg1vKhlvG1Wuei8Ilj5lCYk/fPKC3dtt7jzIObsQgfZmt4qmtCk7Ys+6yyt+/O4TOps2t/YEW+B0McQ0ypw8n7H1FQE7NHeKRNGU04EUuHaqbK6voeUxw5FYZ3HqcXoeUW6YuJ7U2mzUMbSA95+Juu/N/S621capmUwla3KwTIgjd8V4e3R8yKOHX6JQsIklb0Cj3WI4vGbhXaNIwqXPPv0QjCtUqa8W+RU6hSrVlsn1ldxrC1+w+YU6mawpD8MpjUaT1rr4nmFrBDOF11436MsKjlnfplCA3mWfSBF33dbWFg8f/BJOSTz7hz94l7ffeofrwRk//IEIAm/tl5lNR+iayWQk1uf+wQHT6Zxv7Q7/TAAAIABJREFUfEPwHVj2KX/2r8959vSKclX06bLfo1DOObj1GlEkHGjzpSzl/2f7hXCukiyh2BSX5XDqkRPRaNnEgdgYM3dOSMZwck21LrzSfn+Arof0rkUmolCqUG0VeX50tIo27e4dMB0GFO0aaSCNAaBWa+HJQm7fsxkO+5TsOqaMPJbsBrWdMsPhADUVC7NZ2MLR6vzsfVHrQ1bikw8vUfQZrz0Q+OrBRchk9iM6nRqedIqyVMdQdKZSVf3WnT0WnsfO7jrHL8RFWG1H7OyXGPZEUS4ASkaWxytMu2lpjMY9tFKRxBJ/+/zoHKtQp9asUiyIqbxaXDHzFTYbMrKTh8SpwTLQOXoq6zC6m2yt1bg8F4vEiRc0zCa+n9JeEwbtxYsxt/a73LtX5EiqgLtXRRQ1RpcW4LA/pGiWabTbPHn/UwDsZoCpbzAe9UETB27vEpR4TFkXN9Xw7Ji7d+4xWZgsJQ16yTFxlwpxrlK2ZUTBC3BqDvOZLBx3ArzAQFNz1nfFIR0MPSqlqrjQY+mkxBm6njKdit9nFwxiI0ezbLbkBVft1LkeXXGweYAp0bGfHH1AYbeAJo3QvXUTs6SSx00mF2IjKnmX1l2bT48es7kjLtmnzx9z++7rNKVzlykZ42COGkMus3W2YXH//i2m/XAlnBi51zh2i+Mj4Wg4xgY6OfWygyHnuDe8xMocUkVFkQw7rhcQRilzKQaqqTYJAUE0Qy4XXNclCX0sK8K/EhGoOALTX2I7UuR3PsK0m2h6yt3b9wD44XsfMRovCeIJqjRXIy9ibdfhUArxGvpdTs8OaXa2Ob2URfx5QJyqXA6HGNIgKLVLnF8do0jHv90qUi4mnJ1csNsVB+cyNhktrqjI/Z+z5O7th7x4dkmnLdbibDqE3KBYLGFKpjZV8Vlf22EpxbJRPUxL43oY8vG1GM8NKfKcJDmlopgrd3GJZevkvjgZj84+p9Gs0Lsargr9nYJGu90mij1USfm7XPoMJ2P2JCtWuVghCjPcYM58ccOwZWNpDpb1MlugGyqzSW8l4Luz0+Hq/IzZ9IzdXUE0kGWiDtH1vFVR/8KdUamW2NsVRsVkMGcwEkb1jQCyXbS5vLwkigK6svC+2Wwync5pyMy5bTv0B2PhpEghddf3MAumMOzl+iwUCoxmUw5PjgHBoBbHMVkqAgMA49lUOGZRyDIU3ysVHVSFFQlFEkGpVBbU9TILp2kG7jRErzokkmwoSRJct0dJGoFRLLNPqrrKXNm2TRRFgrTj56jli8XiisY+DGSmRtZn3TTHcchIV0afqih4rot+I15bLq/etxItllmYwUCMr6IozMYTNjY2CAO59g2DJF5KMWDJhlap4Xke5+dXq7EEMSaGLZ55kx0zDENkmeW/NVWlYIv90ajZuPM5i6FHJjNumqEzjzx03cQxxH7cXN9g4bk/xxaY0u12MVRlRUSyuyUyRp63YC4ZUp8fXTGbLjE1yYqX6xSrNo5jceeOWGedtTb9fp80XSdMxN4aj6acXoy5HnmruUvSCMexVtmfarXK7q06ea58walOopi+DApd9sWayDIoSrr7aqVAo7lBpVWg3ZLMnEHMn/zVX5NoN2y6mQi0Zgo3RPpZCkqusljMvpDRI8txZJbKtm3JyJiwWFyv5lRTRTb1hi2wUqmQ5wqVshQRNk1UVUdz0hV6Jo4iJpMls/GMK/mqG1mITlcGFtISwTzBbA6ZjCVaoHlAs9nGDy95fCTu39H8jOf9n5JJKY1mo0OxZHPdG61+Rrd5h+lsTu9K/M0smHR3W4SBz4kkbirPVGw7w9AKeDOZuUpH/Nm/+jGvPRQEXiVnh7V2gfa0zscfieDfvQe7bGw1GPYjlkvx+xQ1wfXmK1KWfl9nMh3Q6qwRBZkcAw27qOCUUwyZEfFmCw4O7vP0MzEzphGilxTGkz4VGWwYDWOScE6eRsRLsbdr5SKqVkZDnJOZktJsOizmKgtXBqLMlDSNiQONk0Nxxhq6xuMX77JU9wBYjlS21zo8O3xBuyPO5mfPn1DKCzTrdTxZj2eVbIo1FUfW+iQLBW82o+BYzCfij7//f/8Bv/69b5BkA7Y3xFr8/m98i9//g7/iXDLCRqFOq/oGB/cLvPt3whaslyqsbzRw/SPu3BYG+mI0ZN7L+dbXZFD9sw8omrt84yu3OPpcBN7VJMXOG3z9nQMOz4TXcHUxxaTEw18SgYwfvfdn3Lr9kE8/+ylRJNZLZ71CuAzZ6LTpX4jvFYsp/fMlt3ZEwF4hwtJhOkxoFsU9c3j2FKNgsshzPE/WXN2xKRTqXFyKcSpWbC5Pjzh+9oStrhhPTTO4fWufy57o9+2DHU5Ojtje3iZIxPzleU4YHnH1QuXLX/qGWBuX93GcDSae6Pd+5wGOeYXrVml3xZibeovLy1PyPGIyFns0TQ36/UsuxmK/bLUf0Gi5fPzhC7797e+ItWHk/OH/8UekkcI7v/yOmPdnh9x7sMngWvRpd+sRtWqbci3lvfeEfRqHdbS8yBuv3+MHPxTkJp9/PKC7VmEqM8skAdeDE6pN+PQTwYFwcOcRuiHYew1NnBNh9Lf8/7VXNVev2qv2qr1qr9qr9qq9aq/aq/aqvWr/AO0XouaqUjfzd74rI8LlKpcXfap1jcVUZDUKjoWumdQaVSqVPQA+ffwhd+500aRw3NHpCaqjopkVxpJ16GBvn6uLUxRFwZcidJZdIwjdVS0MihBn1DMb3RJRUG9qUm+UabbWSFWR9iRQefrsY3Z2RUZoa3OPJJvwyceHmJbUmDLb7B449C9Dhn3Rh+6mgTspr3R9VK1AsaqjZDWGQ/E+pzJD1VLmI4UsF/0aT/skqYYq2XUujo5ob1aJnYSZFBrebqyBpjHyYwyZhDRNmzAdUoyE36xnDlpVI49zPFdE00xNsGYtZdTItKo0Kyahl2Ha4v3tahunVGM2O2VjRzDnDIdD/OSaYkVGTIOMLNggCl3cUPTpOk7R3JxqocbSEmPu9QzCZYAqKYgVdcnW2ibPPr9AM0VGqFJaw3V9Fu6YzrqIDGixRZAvSQMZ3coyllgUiiF+KrIaNUun4FSpV22uzkX0rlBQmbkjClJjIsgW2DakHjiqiKZ3b+8zOjlke+8ehyci03E2+X/Ze4+nybIzve93vUnv87Plq6sduoFuAI0BOIYczgxBMSQuNCFSEoN/gbbc6n/QghsppAgpJDEkjhSaUdBoIIyBGQBt0aa6qst83qQ311stzsksIEKz4wKLurv6KvPmvee855zXPO/znPLKg7eI1rKXoXHNzs13ufhkSrwQ2ey6U+fSSzistbnzHYFX/8kHT+m5Dkkgxvdo/Jxho46SN5FoN6LyjFxRuRovsCWtUb/RwgsLApmlDsOYdq1B/9Aikxn5s+Mpd2/dIhh7LGV20qkOmM0mVGsC5rFcBBiGRqVq4st+gLLQaLouUVbQ6oss2NXlU7L1glfuy+xaGrKeR3Q7A27viEznoF/l6dkzMjWgWrkp5kEvuVp8wp1dAUFYjD3SfEpv/y6fPRJjF3hzdGPAOveoIDLj6yQjji9pV8Tv59mSVrvC6CqkKWF6/U6brFBJpDbdxeVzUBxajca2OuHNa9y5t8vKu2Yl+0UalRY1t8X1SFTlOt06mlJl7XvsH4is49qb8j/8y7/kP/7j79JqibFarC4xrZwiF8+o63A5GlOttFiuxZr1/AV7uwfEkcrUE9kzTdGxTRdHVrcbzSqnp89ptndIQgljNQssy2AVXuPIasSGIr1VF5WzMJpTlDEoOYXcu+azFRXXolTAMqvShk1MEyRqhl5riOu0mIxnWzhTRobnrcjznGZTCoSHCb3egGIDJ2x0oFQJw4hc0sjHcUySFcRxSlNql1Qcl8Vise0nabaqeOuAJEnodEQfRpwmMquvEkq6J0uvoqpCdBFENUbX9V/rYcnzHE0XcLsokBlLSxdU5HI8FU2V1QRly06oKMoLpjh5TmmahvYrwsob7S3bdlHkOsqyDNVQycuCDdIqyzIsy9pWOcIwxLJtmXGV/biOs2Uo3Px+WSgEa4+KhJWZpkWSFZim+UL7KgxRlBdCznEsermSNCBMNvTCgtlRCCqLOa5Vq5SFgm3kcg5s7t69S55mzGUV9fJqRJJknJ+fk0jttKzISYucSkVUf9rtNhSJrMiIZw+8Na1Wi1t3b20p21VVxfeirW7Z8yfPubi+EH2vcl+suDXQVNqdJvP5dDvvVbe21ctSJePhpqoIou+s0axtYcniczq6bmKoL+xgU+1berJ6p2poqkmulmRS8LXMCwrTwEvFmGssiZOSIitRNrIShY5pV3n66G8oJMNmTo6qsn3fW7du4QU+eV6iy2cQumkZjmO9ENUuhA1teuFM06ZaqWOY7hYFY9kGRZFRFBm1uhh3ypzlcg6ywndxccZbbz/g9OJzvvhUwMpeff0BfrCg0lTRVXFmrf0AVYXJVLK41nQUvYKa2vzi34v99O//ozcxrJjFXPJMawX9QQPPiwgkY3GS+ihmgmXUyUJZratMsa0aJ0cSOaPEWE6N1+7fI5UK6N3uLienz3jl1RvMJAvus6en1JsZ8tghiSycWsZ8tULZYJVLk/2DPvPlFYps4PRDn36jhx9LGYQyJEgVIm9OVfaBG6YNpYVWFttKc+AnKJjUWuLeQaCjmwmG7iJl7kjya1zX5cnDGQd7QvNouZjRbBbYfTGWZF0O95t89vhjtEL4kEo+p0xKmt0eC0mNXmubDPc7PP9KVHq0LAOtgtYKOH4kKmVFltGuDnn1QZeq7J9utHs0exankn31pz/7kju3vsY3v/UGP/rxD8W9jJSdztsMdyCU/f69dh8vj4lnYv5qjoE73KPXi7n4UvzeOlToVVuYdYvz8YYNsYXjZOSyIhQFde6+1uLo+MkWCj3YafPkyyf0W0NMTdi6F1xQ5hVWS9lbWimx7V00K2YktS8tvUerVWF0PWUl2R5tO6NZOeTmoYDXh8lTsjznyVdTvvs73wZgeuEzmR+B7HcschVF1VktQyp1Ka4eTSlzl8MHQ55/Jd7voHGXk7OPuJbaaXs7uzw4POTZ8Ygc4ffV6k2CaIS3zrhz6zUAfvH+39Dp2VwtxF725oPbpGFMv9tDt8X7Pfp8xje/+TqnJ4/wfNk/Nrqi2WhtWU2Pnk342tcfsFheE4cbYXOVYGawf1DnieQEsO1d6s0Ky6lYx3/497/Fk5Ov+H/+3Yc0e+Jvql6h0+vgrTwoxR63mkf81//iT36ze67IVfRMLuA8o9WospheU7cFtKUsQ6Fsv5wxHAgjME2TIJzSqIqDP8sKGnTRFWjsCoiMt5zgeT6mo5FpYuOOghFFGeNUBF49CtcUiopmqGiJMJRo4XFVjCkrJaWEmgXemu7tPTzZ1PfB518ybOsM2nVcSyz8lb9mfKHSqLrsS5Xo5ydjVmGEfykOyzJdEwXX3HlwG1XqQMwmCkWu0eglXF9uDicX24R9KWJmW3ukqoJdltRa4jln8ZrS1DFVC1Nurmhz9EBjuRYb6WBYQ0k1cmI0UaVnMQtBKbBlr4+3Pqai3CLyI2wJMr+ezlg+PuMb33p1q0kyW86p1mrEC2G8lqayGCXUGwrTa7FY46VBfdBmfu1jSWrNmr6mVm8yiySEzHK5XqzItZBcUmLP0zWtfpMsj5jJoNN2cqquw0TqXg36XcKrBWajw3QuNrtqf49cC5lPfJBkA2q9haInTCZiU6nWatTadcbelFpNzNVnH/4S042Jz74kk/DF4bBKqk3Ra8IWE8/m3/3pD2i2Kvzdb4mFv5wp/ME3f58///Mf8PHPBE65bmW8cv82v/xU4NUPhkNee/0eZWHy8ANxWFbsJvPilJ19h1ZN2J5alCiLlN9+W5S1P3n/J0z8EdFpE1f21fTaYKMztwO0WNheFIFpOeTSGVG1EtWsUOo5c9lnVHcaLPKIne4eoeyLsBWLuFLharWBJVkYVY2sWPHRs5+Jsbq2ycMVzVqf0D8C4MGrd5lfDbg4lxS5SYpZsXh6fIqXSbr0WpWzJxfcv7WHKgviXjyhrjTx1wJSUW13UdM2d28ZTKYCXJOELl4ZM5GkCUrZomHWqNeqjGbiICyNOacXY3SlsoWHZaXNzC9RZU9bWLpUXR2zcEkkLOnpUwFjMGwLPxabOVrOdBIQhsL5uHlwm9x3KTSFdnUo53OAmme0qzaqIptjFQ/TUvClgOfVRc6tG29Qb7iMRsIWUXLisETNdbJYvE/NrYNisPAE5CGOY6o1B0tt40WS9KJq468UVE0hlxp2syCmLPNtDxaFg6WHaDqMZxLH36wT+CNGsyuOT8VafnD/bZ49e/KCaKDfochLTHQmc/G9drvN0yfPhfhwIX5vPJuzXi+3cMbJ7JRer0ejVd3CHh27QZ4plGq5he6pqiogzBtYIDG2XZLEJakUNjbMnDIpIM2xZY9lxamQlS+o0Q3DIk1z4ixlI0Zl6Dp5lmEa2hZKpygKpqajSueuVMBybNL0xb3iLKVMBc36hrK9NEtUjV8hk3hBc79x+NMkQdO0bdC00aGq1X/F7rIM2zBQtXL7TLqlC7pyGfQ5phBfVnQNRzZNb/S6Ws3m1mGvOjau6+LY4jnq1QpxHHL3zg1u3xXnx6D321TqDuPxmEupBZcmOfOZx4Xsk5rOVkynMasw3tJ0Z7lJlAScXXxMJgPf4U4fTVO2lMf93QH9wwErSU4BMJ7OUBVdwKkUYzsPSZqim5sARUVVFXQ0dBkc64YIjlvt/naMi6IQvW3VF6QQy+USTdOoyQDIDyN0s8TMNOJSrJnUdLDNAQ7y354BRQZFjilFYYPUI42nKEFBJvW3TNvC930OZA9trdFEVcVzbQLmLMtQ1Jwk9bc6ep1ue0umsnlulAJVywgisU+EsYprV7g8u2TYF8kbRSsoFJXjp4J6/stHj4mihFanSXcgzsirySlOxeX48pp+W4zVZLaiVqvRkCKpumKiGSGjxQZwCJbpkFHQaIvzcTZfslxFaJpCVQYkreYdykSQyLR7Ikny/vtzbKvKb/+OgIc9evQITVWJ0wRPiggb2oLVvOTjDx+SZMKGNdVBL3e3ML1azaE3rLEOPqfREMmxKJkwGa8IwgLd2eiNFZycT7Y2pVjCLtqtFq7cg7x4TRwl3Li5z9MjsZfEZU6r7WK5Yj7nqyNWUU67qW6TD2napd1tsj5UGcn+mAcPunjDgNWJmLt9rU7VOSTMPmHgyoBk6VBticBtKSGN3YMuz+enTKVNfe9rN3n+2QhVqTHoi/O/Xq/y7PFTHj0MkZrvuC2f3VWPPBDf69fanJ5O6e9d89YbgoTipz/6IWp2SamaqJH44jtvv8l48RkfHQtY2el5l+/1bmB4A0zZr/bqQZPBTp/5KKC1I9bI2999E0UrKeS5micuqX7Csy9OcKR8wY9+8BlhkNH6ust8JoJ404k4vzxHlf1/T5+G/NZ7FpeXJV97W/Qgnx+PcVs2u1qV2liK1esG9Z7Bs8cCNtfYs/jlo6f0Ol0uzoTfkKymUBosJcFUHETsDW/xztdf58unglbeMfvs7e0TBx6TYwlbvXnJ9377d/jlF0Ir6snjT1heBwz3uvhyv1Fjm8X6nDKpM5e0/BYN9gcPqDry9xZNGl1xRmnSj3cq8PGHl9y//y5uQ5zz01nAZDbZinzfv38fShWlaFBKwrXTZyrffOdrqOWM7kDEF1dXZ7jN13jtDdGq8Nc/+gVurcp/8o//IX/zgeirvxid4jR0Zv4Fy5mwqQ0t/N92/WZUrppO+b2/J3Chrt3i6Owr7n2thb8SCzb1Yb28oNGxMSQRhaqlFGXG9Zlwdjo9i/kswFsb7ElDTfKMZbREMR3OrsTBVK91qds2sTSUrMixjDbVSkIgI9ciNlA0G6epEkvV5tLcIYtmqLlUR/dzdgZDajWHSDaPT8cet18fksQl9faGhSvl9PQYRRHPregKfhjQbgy3h7vp+IzHY1qtfeJIZu/KHD9YY+pi0xwetglildRTSGPx7JGyIEwTdjp9/JF4hpiMVqOJhOdzevYMvVTJM41cVtjSOCHPVExDGEeUXGIoBbbhbgVYVcWhJELVUhr1rrzXEXv7vW11bX+vy9/89BPe/MYbjGVV5fo8pNG9SRbMkLqfxF6BbrTIZZ/berGkUakSBAGJ7DHTXZtwtWLXabCUopBR7GPZCkUqPqOqKrt7t5l7U9ZS7DBRI9LYom03CZZiDCb+iv5uBUMT90lShTxWaXdqJJv5zGqkxRxNbZNssqHkRMWKpnQGdF3HaQwoVh41Tbyz5lbot3ZRKip5Ij7XqHWwzISzC3HIGmqF66sxhze7XE3Fc7baXU7PjtgZ9jk5EpvBTu8Qp9Lixj0xUEdPRky9CeE8oZDNU+v1GlvVKayCpiNsYXydYRoq11cy+9Nw0QyHs8tHdBtSryrTUJUUQ69gb3q59YQ8sbaORpquadUHuHULA3GgLSfHDIdD4lRh7knyD71Cs1ohMaSNRSmW45BmOn4kNuA8T1HyCo4NaiEO/1G4Qk+TbaP43o1bGKXNdDLBscVn9na6vP/ph0xkZXCn26XpdhnNR1tigZ3+bcJkTq1SR83Fmrk4HzHc3aEndYSCKGQ8WVIxoVapbu3n3/zvH/P9P35nG7T43pJ2c2/L4hauEyxLYeUtt70y/d4Ojx6eUK9VaUsGo9OzZ6y9OXtSU8P3YzTVRNMUfBlIqaWKoVfJ8xjLEjZr6C5hlGC5myb/FMe0mc9mWJrYI+azNet1QK/XoyWzbjkRk9GcXk9UvJ49ecxrr72CF0zJZSCTZyaH+/cYT09IcmHrluWiYG2rAMOdA+bzJbajUsqqTRAkxFFGrzfAD4V9zhcjVC1nLat3pjbk/r1XSeOMSqUmPzPGcV1M00aX/ZNJFKOoGYoU5zZ0h5KUohCN8wC6oYiA3DS3fSqarhCG/ov+pDTFtl0yyi17npK/6KVJN+q/eYFlWUTS+dA0jaIoME17GxRtiDAsy9hWabIsQ1GUrYCvYWjkxYu/g+jb2vQLAdu+ql/ty9pUrDaEGJvfU1V1WwEzTRNN04S4uNzjwyiiUqlQ/oqQsmOKnqWqK7XpVBWVgjAMqbqO/J5PrVZlOBzSlw67qqoMh8OtDQterJzjk+ecXwkH/f2fPRZMi1FKGEdyjEVf26a3ablckmYhtWZj+96qruE6sg9uo6NXirnajIHjONuARf8Vhr0kSVBkMAMvKo+b8Y2iaEtMsRm7OE2IghBbN1AkCcQ61TCNHkkibHM5maFYJnmSQCHZAo2CZLXg5KvH26BaM1SWyyVvvSV6oO/fv890Ot0GsyBYIlVVx3VtViuxv3meR5mztQ1d1zEtHccxt9WtVquFWhb4nrfVZiuKnJU/5+JKOG66mTKb+rh6l25HzFWhBig6+IlHEohxqDVgtQq2tt9q9gijJeQGP/mByKb/3e8/wHEqXF2LZ6zUTfJyTb1ex1Qb0j5zDnYb9IcDHj4UmkCT8Zyy0La20WjWiOOQ4XB3208VrHPidEGUrLdJEl21iDyN4a5IMI2nR3R7u1yNr2jKqr+mqxSJQ7trsQwnW1u4urwmTTfBlkLVcYnDZEsCc3T8hE5LCDlfT48AqFf7WBWdOJKJFMPEW2fkZcb+vhg724Dx8pTKMKQhK/9PPoxZKCHdmrh3w21QMRwePn+C7gq7Uxc2zZ7OehWgVMUYa5qG743wfbEem1aFXrNLZsXknqw4WxlaDWbXz3j+ifjb2+8e0upbXF+J82m32eHGzpvk6mJL6vHs2REVs81rD+7x6KkIUr7z7ns8PfoArRBz9U/++L+g06iSxxG3DkRBIPDn4FaoufeESBqQJac8/nTKxZnoc7++eMrDh3/NXDO5cVvYtdNK+PyTR9w6uEmtKp7z9OqcwgjQJaInLS1u7Qz49Ph9OqpAWxVGysHvPWD0l5cUmtg/Vc3ka+98h/c/EAHQ9GjJ/fsP8PMFmirWzflsTLfV5uRIJKtv3q4QrXT2Bq9xX2ph/eIXTzHdhCKP6PdF0qBMdd795h3+7M/+LSDOVT9Y4TgG04Wobn34ycfcuXmHy9MZrbrwXZptHW+p0uyI8e0NVfz5Abl2zXIufInv/NZ7PD96H13pY9csaftrFG1NTaIMJuMlChrv/dZb/PTHnwGwv79LSUq10+DJF4JRc+UHmKXF7p4gLfP9kNPTT7C1A77zO+JvT54+AhTGo/k2qba3d8C/+K/+x9/sypWqKpiGGBDfD8gilczL0GQGbB2oZPjMJw5vfkMsvDRfcXlSomRi8UyuJ5hWnSRd40sIYJF0MPWC1cynbYqJM42IpJhiSOFGS9HJkojFvEQ3ZeNmHhN7KyrNAUEgDCzPjmg2dhgMBKRKNVT80OPhyTF1V0xu43aT08sxtuuyuBSLcb0IMOw1ilSfVvIBvX6TMtW5HgsnO/JSqnWNOF4SBGKzi+OMer1CKrNpx1dHhN6MjrtLHIiFWKk57O8MuL5cUKlIAoTjKa5dMpZCykVscfuBSaddoMis8WQeUG2HHD0RAdHZMxfDWVGkzrZBWdWXlHkVRdeZzmWlw3KZLTwGPTEHK8/n8OYejz4+xanLTcuuomYJ8/mMfCmzxFoVXJswEM8U+z71XhetLLmSB1yU5fTrTWIvoJQMRqbmYFcg9CQcRSmYLkYkRUIi6ehDPxOZ3ekJui4Wo9s0iJKcUv67UAJKdDx/wXQinMfe0KTe2GW1nmCbMrud6mBWQKqx11ULNYZH0zVvvibm3dBSLtZLOpUWy/hIvF+9wgcfnGJXJCV3MyJyUj48+iW3b4oM4sLTsOw2y+WcblM4N1eXExR9ztm1sFeNCqZjoajlC3IFu0ac5mRBgRRjxzAdlssJrqRd9fw1tqPSqHRoyiZif73GUFWiNNzalBd4dDv21vFXVYckLQku16i6yCi7WgbpAAAgAElEQVTWanUuJmeopbF1mDUnIVFbXByLhmjbrZJmGkWeo5TC7haLGbka4mo2A10cHkWhs45WDHqi+rP2U5JoSp5BJklKLrUUy6xw76b4ThrnrEMVp9bBcaWTXWZYVgO30qBmiaz0YjVjujxDMcVaaLc73Dg4xF+NMWRD+yZweH72nJ098T3DsfGDEKcqKya2g6Ik2E5JRR5UQRDRqHdoNm0CCZ/IopJOo89CQqV6/SFhkBGlL8gWqvU6CiahF28Z9sL1mkajgSdhdEEQEGo+pm5jSmHa23cOGe7e4vLynCSXh55aodP2iSRhx907D0DJqVV6JKkYu93dIeOrgLLQaEjR8FJVCfyEVMLD8iKiJOb6eo4iJSPmsxWO3UBbpEThZm3llEXK1bXIhL779htcXJ7SbvW2pDOWI8RDO21zW+mgKNHVAkcyZ8VhjqqpZFnAcineudV6QcmdSgreEuHguo6k97VNQQef5jiGpC9PBHW7ohm4zgsqbYUCXdkEbgagbgMeEMGN4wgmwI3z2JAw062jn8TkWcmvJhfLosAwjG1Qvwm8NE3bOsKbudZ1fft7gj3xRTVvQ7derb5gTNywGZa8YFaMs5ROtYW9OT9q9e1nEwmfMm2L5XLJs5/+nDLbwA5j4jjm4FBUFJIkodNrY1kGpik+895736TZbHJxcUkiK/reOhDQTykm3+40SRJBib+ScD7XEAK7QgxazJ9hCOr9TUC0XIpERK1a5eTkZDt+zWYTTdFQ1Y1bUfwarb6iKNi2jWVZrCX0Ok1jbNulLBKybDO2LqpWbOdG0TXKsvi1+zmWyTLwyfMcVSaikijGMsztmK9WK8qyJAzDX5k3FQUdUOl0xD7c6+1IAhZJepWnIhBJXUwZ8F2cXBLHazQ1ZyDZAqPYQ9cjmpIEygsuGe47OKrN/iZIGV9zNbrGqbt4kSRFalgMel3iRMzDYn6NYYLjvHDH8kxhNl1hS6Fq13VYrgL8dUmii3N1PDlHx2A+PyOXSYR2v7Z9Z4CMENvRuLi42Npwp90nTEqOT9aYMkniVgc4VZNSSmQ0WnVWqzkHhz0uzsWeNxgMqLVtyqwkWUu2zighi4FcPKfjlliWg2PuYMq2gP6gwWy8pNmwaFVFoKaqKetpilsX47uaFbz33re4Wj7HF0cfb7/yKo1Fyaz8ikKTCdDS4JW9Lg8fCt+pefd3UPSUVrfO5aUULV+M6N14QDUr+ey5cOLf+tqrmPmag46Yl1UUkVkx9UqVVSjOvqdfTbjx2i63br9BwxH3t80VWVluGf6MClQMg939u/zNY0FocD33+f733iPOnqFJCjm3YhItXf7ZP/unAOwd2KThivHlGYOW8HVrFYXPf/kpZ/M/4ZNfCoTL9CrFsHUymXS2bPjrjz7g4anP939PnKO/+w8q5MUxS7/Nvqxwr/whzabGszOBLllMz1m1m9wYvslcUvcbRhVl7HO+ntFpy8SwYfPhxz9mJCUcAlKenZ0QqQmxKmyokrXIiOnK+fzq08ccHL7Dcn3Kn/yfH4j3rdewEod2W+dUipbfu/OAH/34fSJp59fXPm+/c4eHn55QJMIOXrn3NpPRGQou3cFGqD0jyZdoiqz+nigYWkx332IhJRzS2GDYe43Ts68IUvEu3W6Xq+s17Vsb+G2Erpn88pdn3HtF+G/oIaPRlGLdppBMtU415PLpNd228NW++93XyLw+Sj7g9PKh+L11wdnolEb9PrfviXYTP5GIlb/leklo8fJ6eb28Xl4vr5fXy+vl9fJ6eb28Xl7/Aa7fiMqVoZusRyIt/+YbNVaXOouxSmsgsk2rp3Peev0dnl+NWUvo19nplCKtk0iK9UKx2dvvU6gBhtTxSYIRu706rcjm6VNRScoMFTSLVbTRuVmg5Q5ZppOqstJimOzcbqMpLvNIZD46zQa62+RaahTNJucM2wavH95kuhKZj4V/TmF4NJoVVlciO9HptNCtW4Ty99bra/x5jSC7xJGZ8lrFot4YMBrPsRuyWufHWFUTJRZR//mpx06nT5ZMsKsi4h5decwupoxXa27siSzc3u6QhmXR2RW/f+9+g7y4oIhT/FSK/1o+Nw4djh9vBBnXFIWBrkWMR7ISMLCJvQTFVJjI96s2XSxD50xWf6q2RbD0ORlFtMuN8OaMgxslURLTqomsyt5ehdFkuaVhtmoO42hOmufbjKKtaKxCn3rLpS4rJldXV9SbdQJF0i5nMZW6wXK6RJFldFPLMdQUy1ZotmR5OJiyWhW0K6JkvQ5SbDvD1E36ezLTs0jxoicoGNtG7X6ni5FajKYiI7EyTHYbOt/u3CV5IioYM2NN6trMzia88qoUVzy5RIu8baZuejLDdbukWcpUijuuV+foGuRFTFMKdua5j5rXyTORBZzHK8LIw7RruKa4t1JqxKlPp9XYVhDjYEGalagbDZ9Ki7yIyPKA67Gw1zTKMe0cx6mwXkmdkmWKVYYMdkSG3DA0/DimWasxXmzob6FIDNHrY8r+nzAFP8WUOM+y0FkvfHRbNO2DEJhVFahbVVSpO2OaJppSJUjEc0+nUwa9DlEUYUiBWUWr06haTMcCZlKt1wjygGrVEbpuQLQI8ZMV7WaPtSROSc0Cw7RIpP2s5j5kHpql44eSCEPCS0tdZSpFtbMwpls3mU7EfHbaNfxlwmoFX39LYNM9z6PIT0njjLfe/IYYl7dN/pd/9T/T6UrIYRhimBamU2M+F5WB2XxJGuc0m80twYNt2yR5toUmrdcelUpVQJFUKRDujWn32qyDMYasPNhahUKrkMsstVo0uDqZYFWCLezi/HRBGC2o1WrYEvu58jx6/SbXY5Gt/Orpx6DqOHadUnKqd1pN2q0OhmHwaNuX2GA0GmNLZcwvv/olB4d7rPyYi3Nxr/v33kRVRE9Skoh5T9OSmu6QSQINTVdZrRZomkKjIe6VJAGO45AW8RZ27DgWtapDVVJyG6qGruus1/62YjGX1LhhFBNJKvuyKFB0HUt35L0TcjLK8gWhhKgcFei6viXoKMucUnnRX6WpBqpRkqUvqlm26xJF0TbD77ouZVkK4dl8A18ytiQYG5hcrVYRFRRZsdlUfTRN20LrNs+yWCzodETlw7IsZssFQSj25SBcbStEqkQQWJZFXa2jmdq2l6ksFZIkYTIVe4tSKKw9D1VVt59R1JJms0mj3dj2sDSaFWxH/7X+o9UyYraYb0knTNMkiiJBeS/HYbVaURTFFja3Wi5Jk4Q8e6Ht5TiOIDZJX8AqoyjBcV48t227xHFCWSpbCKWiKpQoZFmGKeGIml4hSqKtppXlmGIcNTDknldEEbEXYBgGiiRqMQwLz/O2VOxCpFrARSUARFY4M4I4oyJtaDUX2liOKc6P/qCNphdoZUqrLeH8ho6qNLi6OCeVtjieTNBNg/FCVHvzNEMvC+ymylSSanmriL3dA66mp+i2hMSqddbrcLs+dFUFNSAMNkTzoCgaw2GbxVrY79nJKc12DQVjSw/f6Ta5nl4xHPaZS9kR07KEXICscqYJBGmIbZsUkhAlLxICP6VR7W0riE8eX3H39g0URcy5roFtuSynPod7Agr99OkR1ZoQag6WYkD39w95cGfAs2PRb1yrNhmNL2k1S1S1uZ2XejPj6tLf6im6rk6ZGPT7mrSxSz7/6hMMw+DvvCfotvNQQVPb6NEu3bogXLr3D6qE4WhLVLM/SElNyB2dez2xVxd6RKB6VOp7eJmEQvvXVLU6nvQfdQe+fPicg70q7379e8LOqk+pVw5pV8ptX2vrfo3FwiZeS5h3L+fRo2N+97f/OT/57KdyHqo0Oi6T6zog9tPjo1P+0//s+/T3xJz74QhLNTGciH/53/03AITrlGU65/x6xrvvCvryrPYYtaJQyHmoDlt844++x42Lz0iKHwPw6Zfv8Pt/75+zmh8zv5BwljLHMO/RcsT7de64PHv019y5+bu8/c6r4t6rv+D4g59T1TqMJ8IPatwCZRUxldVJtQJ6q8bjnz5lLXXK7t6+TxF2qTtyDMwhQZxz516F0aWsz6ghiqZgWnsg0SyPHj8nisaoUvR+MT3j4qJDs22yXIn136z2mE5O2N3rcbD/OgCj0Yg3XnubG3uiuvXjn/yM7/9H3+R/+z/+ewYd8ZnZ8glnRwmdnkm82XKUAm9VshxJ4p9kTb/7HbqHHgvZb1xzuozHUy7f/wJL6r4+uHWDWXPF2hNtHR/+1Kfea9OsWOzdEPPy+lsmmrXm+VOP+UL4hwtZvf3brt+I4CrLo20Tc61i881vvsH10iNNhQP04JVXmFxf0tv3WXtiI3UrTUzd4FJif4eDKv2BhlPd4aOPhVH0h22OrwKW/gK9KTbOVqtHWaREsgy68kwajQbNrs3RqWRasVQmlwF5vqbblJjdWKXwJ8QSkkPi0Wu8ytnzM2ypH6PTotKrkRdLkkJulJpKFCZEUm/FdV2SKCJIY+yKMNbbt7/OaHpGooYsJmLjtm0TLwhQZY9Cu5rhmArTccaG0idJMtA0dCdnsRKL7L03d7CKEBvxe7PJp7hugaYmaIYkalgY/MX/HfPZJ+Le3UEd1+ijGyqKFCN0HIflfEzFbWyZshy7yvX1Nal0WuuVKq5p8MrtGyRSeTcML9F1lVyBQMI8uoc3+ODxn9N1BPQrVBIipSAu0i02fDGdUevWCdWMaCIJGDQTf+GxkgQXzVaNMs3QFZWlDPjqdZ3ZaE6zVsWWit779TvE9ZREwl/8UCEvU+LIRPZD09mxODstMVSFQkKxLseXFA5bgovpKqFZ6/Pcn2JJ+FJrp8X1bIw9tDieCod9MQkZDGpkivh3WiZcXl/RbNW2TrZm6HheSJFbW0hqc6/P2ckYSxe2GWUZbrWFohXMpehsrVbDqJbEZbgNxpczle6gjycha8v1kjzPCb0Cw5Dz55pkmc7CC9Glg97uuDx9dE4k50+vBNQaDYJUw7EkTDY3mC9W5K6CIYUTa3aV8bMj6hJGY5omq9U1rpazWEuB50LBzIUAoNaTjqGucXI2p9aSDq2Sspol2G6F1BfP8Pj5l7xx/wGFJJNZ+EvsqkMQBDgSQrKzs8M8MPCDBb7sw9A1l0q1QR4JG16tAso0otbs4Ui45HwhDoz+YIdU9n2hJ+iWQbclHFzHUlHdGjVniCGZl8ajYwaDHrri8uSZ0NF759232N3rE0ho63K5pCBjZ2+fdlsyAYYh7Xab0fWMnaGAnzx7/pjd3R2WSzFXZanQ39ln8fhD1pGYzxv79/j0i4+wDBPbFGNccRxGZzm9vtTn8gI0K2Vvf4cw2PTHWJRqSeCVGLKnM4lDzs5HOK7U7LEUwmjN/btfR5H9KldXFxiqQxCsXzCkWRpx5m91mSwNyiKk0mhRr4vxfPLkl+wMD/GCnMVCOI+VSpUwKtGNF9AsTYckzrFtsXdunH7btre9U3qaAu5WhHbY72PbrtD5kqx/3bbQTZot5kxm4vfW6zV5Xm4FgxVFochzkiTZBk6VSoUgEMHG5m95npPmGXH066yGIihSt5/RNG37nSiKSJIESzqsIJkIZd+Q44ixStOUIAi2vWmWZZEkCXGUUpXB5KYva9OPtRmX9XpNkshjWLUZT6eUZYktCTuEWHGFKIq2zJOO45AkCaa16T+W+ltZhquLv8VhRpRkFLM1k7HUCDI1XNdG0168r+NatNUmzaaw/cl8huM44h3k2dPrOvjBi/6c3d1d8jz/Na2vQoo2Z1m6DY5rNSHuvIGaJ4lgmwxD4ewDlEpBloJTsdGk/tYySsnSmA07R54laGUBZUGeyvU+GxF5awxNId9gcOXYb/rs0jjjejzC8zxu37u9fU7DEMx/s9lsay95XuJHMtB4MqVSdfBmHp89FElZ3ShpN6tkpcJYnk/TmY8fhTSHws4V04QU4jDi9LnomWl3mizXAZqhYkmh74W3xnWrzOQa6vcOyIsCT+qRASRpwGQRYehiD7518x7j+SlZHjKRCd52u0u16hIl4VZTKgxDwjBmA0pabxlQZyiZuFf71Rqm4RLGM9yqeCbLLMm1Oc+eif3cNFxa7QqlonIiSSjeeuO3uH23w2r+Yh6uxo85enrM4Q0BWZutLjA0E9uuspIkRVcXJdVqjUYn4dbNHfmCJoOvOzx6JM7MwV7A6XXK1++/BrKn++TqiMHuIevjjNGR+Nxn089AjblxU8C8xuczMq1OmjtoEn7WulFwMRlRq/fo1WRbh6oTBxXeeONN8fuax6C9h7eY88P/V+gd/ef/5ff5/IPPOH+WsLsne7yeneKYDf7gt39XPJP3KfVDjSgfcXNXBJ17uzp7hw2ypAKa1N+MfVbhBV88lBqh65TJaM7Pf/oBjtRObDUOeO3de5z86Q8JA/F+dgUefnrOe98WzLxKsCCeh2j2W1RsCVFfFzz/wifgAsuScPDCI1+U7O4I7oIvPv0Ut1lnuTjGPpSi070D3r1j8efvn/DzM3E+nF9PiSYGpS9ZDjs3iGceN9sDnp+K8Xz++ITdV3usJrLVRNOxTJWPfj4lkf2chqZwNb6gyByqFbGX3LzZ4rOHx4TLDcOuw3K9IPY9lqGAL9bCm+zfOKTp3OL1N0R/0/Qvx+zsd5jIBOHBYZ+j44fs7LYoImHDf/hH3+Hnv/gxj74Ys78vEtF5rnD3bpcyF/vkTv8WKimTqymLqVgPSpnQrA+ov17hdCT6U8+uLygwGOyIpOWnnzzm9cZb/Oz9f0tF+ui9Tp1mdY9vfP1btCQL5molbORvu34jgitN0anIzeGDz0948OYtdpoxk5FsUFQDmlaDOHFQJQ7UsFWycs7OLbGR16s5k8kIUHntbbGgLk/WXJ/PaXYclE3qKlVQlIBqRXzPX9sMdi0uL6YQi0MvDRWqVo1+v0okw+IsT4hDqMlG4/4rDeZFSNqEhS8WkF4a+M+alEZ1S+s+ms54cHeItxS/d3YVYjcj9CynXhdGcXZ2xvNnY2I1or8r7m8qFhRLvJk86PM5uhqiGCZRJKWhVQ3D1akqdVJPbEjv/2TCrZ0q3e5Sjl2F6UQjTz0uZuLwmF6bLFYGe4eSeSn0iMoravouS18YnGIOsGsGRZrh6mI8O1YLpargy2ZIy3Gw3ZI8CZFtEjRaDdLYptJq4Ekh5X/9Zz+gWq1hS4HLpqVzfnlGs9XEW8oFW6RUHZuriyt2JcX4epZSczSRZkI4k0mRU6oaxebA9i0qjo6q6mShmL/ZaE7ghRjGxjYMDLfCZBSwWorDstHP0UqHKArpSBKIKMzhfEEpM59WGbOeXnPz1bvoEvs+Ob3kerTm/ivd7eGx8hdMv5zTaov3K4iJi5Bk7lEzxRi3qjcJp+fkWUG8EM917a3IM5WLiXimw5sDTNtlPpmCdJbXXkatoTJeXOJIh2S4O8T3S84k9bxl6jTqOut5iKbJCskqYjWP6O/U8ROxAVYbLsM7bSxZ2S3SksX1gkqNrXjtaLyi21OJlYA0Fu9ca7hYBzdIpdPiWjZlq02qxGjSCSSFxA+4Go15cCDJaXSdcqBwuRYBigaQaVBqdAci+FiOfKazJTt9gWMOL0pKUsLAJ9CEnXvBFN2p4XszUhlc1d1DGmaHSkc808X5GKWs41Yr296QLeFAXrKYirk6PBhi6xrvvSdoZv/1//pv2N3po6g5T59JWvk4IjrLMMwJlqS7/snP/5Kd3SYjuSft39hlNr+GMuP+3fsA/MVf/Zhuu0Gt4nB1LrJbd27e4vziDNOQvRNOlTD0WXsBt28IB2E+j7HsLv5yQk8yQhVpTq/f3Ioft7sVNKPKRx89FfTqgOlAVpQ0mhbz9bmcY50yTskKsdY1PcdxVZ6ffEyvIRubiwLPC7l15y7HR+J7jlljf/eQlSQxyZKY9dpnOQ/otTdEKpdUG/ssV+esJJV2mq+4e/tNViuR5JpMr7h5cJ+yMMjzjcRAQqNVJ8/KrcO+mnlMJgsaMoCPowJN06hWqzgySWIZJmEckZcq9YZ4BkU1MXWDWPaUTSYzam7t14gpRMAoAqAt859m4Jr2r9Osy//ffCaKIhRF2fZcGYaxrVRtrk0fl22b2/Xvui5FUXB1Jea81+thWRZxKYIueBFYKIrya8QQvV5P9gDBer0izwTLYSarMZpp4UcxGipOTdiQpik4jr3tiVKKgjxNKcnJZKZe1USFKvJCKo6w4STOqFUtDNmLmqUhYRhimCbVTS+MoeN5AZ7nocmMs67r23cUvy+qUc1mcxtcxXG87fP51f40Xde3VSrLMjEMnbI0SeWe5EcphqFhoG2rKHFhgpJtA+gyS1EVhTRL0NmQMkWURUaJshU7L/Nii47YXMN+j3LQI5P83nmeo+smrVYHXZcED5HPjYPDLRoiCUMsU+d58JxqIav1WkyYeuR5SkUGJNWWRrbMMSRqQ9Nh5U/ZObhNoyn22DRf4Sc+eVEhl2Q81YZOs9EgTcR91sECQy/4lfY0oqSgiH36PbE+6vU6WdHCMFQ6LfGcnueTZlAqKaHsmYsij2anjS4FvAfDA5bLJbbZYDKS9OyFiaZnrNdL7tx9U37vkslkwiuyN+X04kt006biDDANcT599umnnB63GU8uuH1b7CWGCd1eG8uUCTWtShbF2KbBcib+dnizx2x9hl5WSKREzKC1Q+Rl9CVpQW4FOK02N28aIMmHMm3Jxaig7tRwe8K2Ot03saoWY4nQ6OwM2Ou2+eSTpzR74hy/MbR4fn7Kl4+mfOMtwfL76Ksz/vD7/xilEO/yi59+TpYYvPXq36HfFOdTGmTMlwEHw7skiPHsRxmuXWFydQTA7TvvksVnHJ0ccf+mCIAuRk+xdBPbtmmWYv9u3rB5/vhyu38fnxxRaSW4bZOKZEisNBr4aYV3v/1HPHv+GADdqfPaO/vbxNtqec6suMbI4epa2PbNmx5u7xr/3OVS9vbfv3+X8/Vfs9/6AwDu3XyFTy4vieKQv/q5YAJ+/WCfIJ/x9Ks6blOyO/sGTtulY4jz2Eptln7OMo042BcJiSyLGF+cEEom4t27eyxmx3hzePVr4qx/+OiM115/m+PjU64uJTNu1EbXKjQaItiKY4/lakrgxSxkz/XK+YR4XeXuHZsPPhZ7QpwmTGYeS4mQanQNgnhNFve4eVfY9Z/+X/+eslRwKgVPnwhW4Vs379PpOuSSdfT4aUCre874QqHdF3Zn6BZJVrJGo9OXfl96jbpu8OFn4j69/TpZcsprD2pcXIu/rbw1cRzzP/2rXzCfSsTJ7EVS5//v+o0IroqiRCZ/caxd1Nxl7I0w6uLhh72Y0WnAeJqD1ARJVQ9VKwhCCRdJm5w+tnn9jT10TTgIN/dsvvn623zx2VfU6qJq8sWj5+wfduj2ZJbT8ri6PEdT6yDZqeIk4rd+b4+PP3qIIUkujIGO6jvsSc2gJ4+/Igp0arUaqiqa5Zczn1o9YbEKUAyZZTRK1sscQxObdJQ+JfN13rz/DuFcbMBPTj/iD/7RTT767CHXFzLLn/nsDxxyCQG4s/c9rldnWPUATUIh60aXdeBTMVRU2aCflxnnV1N8TzxTXGRMr0P8ZYEp4YSpFlBaCfmGBtmygIRKe02+FgtBd3PCmYJaamRSm6aoZtw6vMHjp2IjWK1WjJdzYqe2bUJXDYdgtSAtAkxZKet2Wui6jiEhQKvJjF6jR0G5ZTDqDlqkUUi/3mEuA652b49cWyETmlxML+n3uxiOQ90Q81kuc3IlIw4h8sRzDg+bTNWYlqRdH83nTCcjWq0Wji2+d3V5RaWeYqoWwVosliyFpAh5fVdsyJfeAgMTfVnlSDLl9OoV7vZ7qKHN/EL8zXYMVDVjvpQVN8ul2epyPTqjKSmHg9xjmSypVBwSXWyIUQyO2eW110XWL1orzGcBWRCT5puMe0Ya+dR7PVZzkUHsuArj62tQJczSciiykrrbRxbByIsYf10IBrtSOE4UDrder7KSG9v4RGFvsIO3XDO+EodVtW0Tqzr7vS73D0TQ8MO//Jy1HqFKZ9mzbGynQhynIiAFVAz2DvaJPRVfZrMuHp7RHw7ICmHnpqHTsB1m4YoNgZdhd6g2elRlcmWoh8yXZ1SdNoaEBR5fPidexjQqNmopnA3Htrm4OMW0pbOVZDSbXZbemHopmKTeevNtPuaMNAxpNSU0ql7FW662Y9Du1/jyq0/odA/p9sX3nKoGpc7Dz5/RllnpTquBZhqU0gFrVBtkccZkPuGjD0Xm82BnwPOjrxj2b2xhgHmRMRx28aSNOY7BfDbhYO+Q62sx5oZe59aN23irCVEibLja7ECypC4bjy8uJ1g29IYdlFI6rxT0hgbBIsGPxEFk1dqUpcF8IWGW1RpZqoCdExUb5zWkYldYr1d0u2I9GKrFehpgSWhWraFCqbP2fSIJm71z5w7rxZyj4yNMSziGgb+kzNlWUSsVh8vrU9rtHoEkwnDcOsvlFEMzt9WJ7rDLeDze6iutPJ8kSViuva2zbJs6milUrTZVkyzLfy3gMU2dSrXKer1+4cRLEgoBSxOOb7pl+9tAxkSgZFnWr8EJN/ThIPY3VVWp1+vbIEnAylLB8ifhdhvInCXHxPd9qtWqpPp+EaAkSYKuq1gbwhXfx7QsCsn0qGpg6QaKVm6rMVmWoeu6pIjf4F9KAi+ilIGGbpikcUa93noBt9NyykLBsC0UWamyLYvZbPGCsj6OqVZdlss1q5WYq5xSEk+YWyKMohQBTrwh+igLyY4YbnW1BIuiYM+UMS6xhBe+YDV8QcbhOvK80GxQYvI8pyjEc1mmThAkSC4JTMsgTzMKQLPEXGmahqoKhIS2YXukxLI1LEmVmxebNedQrWw0FlXWQcx8PkWTpAFBuObTzz7c7tXNZpXTkxHL9YiaDP4b7TqnxydUKhWuLsU+bDsOSaazkqiRnBxT1zi7GFOTEhGK7qIbCoblUioSNlosuLi4QN3nVUcAACAASURBVJHw6arTIA4TlMLZjlMYKpimzmQiqcvjnNVqRa872Aa5AlZ6Rc2usZxJFlxFR1drW+KYo+fHpFlEt1XlwatiP8+ijPl8Rn+4z8m5IG6oNzWCKMEPN6zKTfJ8xXqdsrsrAqnZ+Ij5YkKz0cXzN1TaMYbuUhbi3RQK6g2X46fntNpi/16trtgZDJjOFxSSQdSpqOx2X+f9j/5KvIvt4qoqZ8dX3L4hIIDnxylJOqHfajB/KBIXf/gPf59Z9AXBUoxBvWJwcTkjSH2MWDj13myPZJGScMBakhqNF+f85Ec/5J/+k+8CcPJswOVFyNe/eQPrQ8k43azx4LUD+rXbjEdi3PNWyXDP5tVbguzgYnFJo3obTV/x5SMRtDiuyvGTL7CqNqolbDj0LBpNlxJhG81WBd1weOV+i5NTse9nyojVQufVu98mSMS4G2qHq8kpvc5NAExcVtOC4W6VQUOM+bDVZ3I5Yx2o3H5djNX1aEqWlPzgL/9bAG7vv82Nyj2+ml8zltXLpqXw4y8iitaAeCn+FiwNhrdzTi/E2EXeGYVeISbBlXvl7Zs38f1LSlckKK68BUqp02k2WUvYqmUWfPXVU27feoXrawGv++rROe2eTi7ZnhvtnKvLGfu7hxtpOOyKzziMQangyfk7vVhx95U3mFyLOV4uU4aHBtNxQmsg5mU+X2DpA+qtNjkiabDyjzGsIa4r9vyD/R6j0YRmy6bXF8/w7OgZo9mcxEx4/Z4Yu/GVzSo7ozsUflIcQVjXWSxmDGWl9ejJCsPVyClptsQ63dmRRY6/5XpJaPHyenm9vF5eL6+X18vr5fXyenm9vF5e/wGu34jKlaFDvSriPKtmcjU6QylSopXMgAdTnFqdt7/dJgpF+XK0fApqiSd7GYbdOslKo970mU82VL4ZSfKYwe0an30uqi39mxqGEfHkM5EFeONrLebz/4+992iWLE3v+37H+/Tu+lumq9o3ZjADYECAEikKGwkfgDtRC+20ET+FPodCCy20YoSkCCJAgCQG49A91aa6fN26Nm96d87J47V435sNKGJ2XIwi6t1V1s3Mk69/nudv1piOzv2PpJnsScDXL3/DXEtoeCI6VfOM9onPIhWZCLNtkWUKWgUjafybpjq1ZsUf/ugRb14LuM1qFlJhk6qiQlNvtOn2AkzF4OXFVwDs9xv8/G+e88kfd3YeXYuLiiopObgvpT31NX/06ef87a9/gSKhNWGyQVUMTFRh+ggoVoFiliwTmTnXwW3bDG9n6JJzUT/QKLfWzgDSsDyWq1tubxYYljRGU0xcx2I+W+FKEuqmCAlnEYYv+lfJMzq1DsPLNaaETwXtOqURsc51PF2UIwt7Q1lmjKYiOxNN5tRsl0IH07nzICmxbYdSUelK2NPNcMy9D1rcnAvYZVUlZElEFG/xJEQmLROKLGfQ9bCkL8NykVCmyg5v7ro+mlrRqvdZLESGT9e2KMkBrU5Jlol+Wc4LTh99wXgh5pTlVRx4NSbTa9JCZG22mck8XmAu36LbIhuyCiN0zcaWhGhD0WlaTQo7YjGSXlFWjU6/Q1LGZBK64zkWH58eoUis+LPoivlyzXGzTd0Umc9Xb19xb3/AIk1xZTUkS7bEm5gHpyITeX1xjRvomJ6Dqolsz2KTUe/UqUowJJH5uDtgs54ylWumtd9nEU6wXZfP7gus+HgS0+yesl7ccBOKkvjD/TZfXl2RSAGGNIfLyyWGUdKR3hTbTYLneSRxwkpmoHqDgPn8gkIT24yue8zmQxTHYzgTf1Mb1Hl7/ZThSGRQKy3H8UqKPCeUcvtOw8UJLU6OH/L1E+HngnGL7SpE0jvG0EApM+oNl+VUrNF6TWSwfM/aZeHTOIJS4TdfCvND3cg5OrlHhc5yI95n5DAcvqLVG7CJxHOqiolj10Fmm79+8j297j4//vxn/Kf//FdivtgGjbqPpirUZRZ8E06oSFGlp9XZ2Rv29o6o1DVSQZqPHn/Ize0runstBvunAFyeXVFWK776SuwRg8EBr9+8pl738GQF+uzdFcenXdbhgsGB2Bfz1GAbldQkBLcoKuJYoVH3uLkV6yHPElzXJYoX+HLfVUg5Od7fmdBeDM9pBDV07QfeF5XG02ff0+8dIRGbBG6b5WqB44ofU6v7rFYLtul8V9kxLZ28jJmMh/R64jln8xCUguFEzDHX8Wk0WhiGQSSzoZt4i6+5TGYTdEvsObVandH4Gk2aA6u6RhiGO74UiIpCFIbY9g/QOcO0BXRYcr60SsAGDdOkSKWvXpbjuu6uctVsioq7kFqX1TTbxjT1neiD6OPin0izN5tN8kxIid9BDHVdR9fFd97xjVLJ1QpkPymKgqoJYY+pFKuwbRtNtUi2CSpiDqOpKIpCsyaqvWmaYhk26/V6JzpRViqGqaGpUJQ/wFfSdLuDvzmOQxRtJR9UmnjrmhQHUSnleTGdzmnWmrsKVDsISIucMAx3nL07iKSoeInXfN//JzLoaRJTFMJrK5Ec5KLy0M0SQ9OZymqIjorruqi5rJSVQsa/UkpyWbEMtyFpnqEYKnl5J6GeoGkaS5mV10MdN/BJknhXSQ7DEMt3SdMYRZfWK7mYixPJod1uHdJ0S6Nu7bjE61WMaVeSwygRCq5Bq+1TVneehEuafY/bmyFVdAdDhDiNcJycUhHfZxoGtu2jcIc80MnyNa79Qya802nh+z7n52J9qEpKs1nn/PyCQEI4bVdhMcto1HyaDTE2q3DFJlzs5uv+wR5RtMF1VCJpnD6ZDnGDGmVVsZRw4qrqYTkuy5kYg3unH3I1/A7LsDh7ey2fSidNMm7XMw6l31+auKzSJS++Fz5CpgWGqeI6TWLJEdzEWyDk8OQBuUQ1XF6c0Qya/Ok/E7DEpy9/i+EYLCYF33wn5K8NM+Hk9JR6rcvnXwi44mZzztNvN3wgLU7Gw3/AaR9xs15z1BGCRJZtsJplmCc3/Oefi/E7eeDS6xps5+Iscl2Hn/2LE168fcb9LwRyJIyWqKWOo9W5/1CM1fblnGa3wWgl6BJNv01Qy9huNVxLVPTu3zdJtvDk6XOCtuiXz+4d8vzFExxL7Hd7/QGj8TWe26DbFp9dqjdEo4h1d0ZHQrcuzt7R7/RJbTHuWQKf/cHPUIqc+VT052gyxbPrXN08IaiL+5pXN0kyn8ZSQPlWkxuGYcq9+1/w6FS89vNv/paklfNBz+HtjZTu39Mp1hG55BJnhg1KRd002BuIs305fctkscB1xXf1Wk2ySKMoTFYLiURoH3Ax/J5nTy84PhFn33w6ZDVzaDTEGVOrBZyfveZmONztG6rW5fhY5c3rK4KmmBtuveT5q18TSG56UW74+d9NqdVd3gg3GExLw3MzZtOIruRKvXt3RlkqDPbFfvr1l6+plJhSydFscVeKswknp0cMr2+E1gNw2DmhWw9IJI+/2z5kEV/jOwOG0i5pvAo5/GAf1W+zkfy0l89FTPG72u+FiXCv61f/07/5EwCsRghGxXgxZiGJo9uFy6Djsq17DCdi8Sf5mG7gYG3EpFyNtkT5gk9+8jm3UqnHtlRmq2v2Bw92qlHbLSRpTCrxzkkxpdZzyKxbClUcxMPLDFWp6LcfsJiI11JVpV5XKe+U3WZbFDPm/uE+3/9cLIRybeGdNAhcHVvCXW5vR3S6AZYjFlRVlPh2k/FiyvBaQnlsg02Ycfpgn73DO7+vGNt0iCURbx6HFEbC8EJjJaECg65OlQdU2or63WUqdFEUhY6ccOPlCqsOt8OrnYqbElTkiYpjS4PLdcZ6EdNuNhnLvqs1NB6fPOTt2xF2IH1SwgTf99E1cYmo2T2m07dkmYMleRK6qbGI17QP97iV41fmKVk+oVpJ5SVFwbLAdS2WMthRNDBcg7JS6UtiuIJFklZUuhiDq+GQsqyj6hWVPJiCmkOSqviuSx7fKSapFHgsxuJ9jmagmiVBrcHVjYAX9PcOMEyFZTRHl/2gGjr5Otpxm1aTiH63S5oWKEg+XhmjajrLcoIh4Zlue4/tOiOTh67fbqFlQkxAF/c2CtMlVVSi1Yw8lw7mDZ+G2+Htd2cAdE6aDJcbSkunGUhhg6oiCxOScoMmsf52UCdcbMgkbqZVr1Fd32IZGu8kSXu93OIZFnpNJQtFf94fDHBqOd9fiR3KsA067jGqseB4IErk56/esi22DA72SSRHz3PrDNMNNXn2Xy9vWV9rVOqC+4/Fxj0fzXj04WOmM1ivhHKUqTqUlcKgLwLF67M5kbpBcwx0CVtJVA/Hyxleimeq109JMgXLKLFLS/5ei9l0RaNjskrFOgp8nbKymF2LdX3c9LiNSjy3IFdFHzhZi//47wSP6n1739639+3/L+1f/eUHOL7HqxcCeu4FNYoiIY4zWnVxtjeCGtNFTLvrMF/KAKjSWK2nFBKKvbffJ6jZ3AzP6bclTwqL0egG2/PoS+GdyeSSNNS4L4UpposxUajR6uqUiTi38yxiPJ6R5zqffSb2/d/8+iW5klFJpeVPP/8xy8WQk5MeL1+KvdcyWji1GfV6HVMVn9Xrd3jxYsQf/kQESS9ePSNZbYkqG+WOv60phHpBkcyYSJGLvtrmX/7kv2Mq+dx/+/d/zenpMePrEXttwdvV9JJtUuC1N3z1pTiLfvyTx3RaRzvzc88MOH1gcvkmppIKlFWVMZ1s+OjzfTJV3EtefVvw4x8P+PLvRfD4Z//8J/zt3/0VteCEtTTk2t/fBw0uLi5EAg5wrAxTryOZxigYuM0pb16H6Jq8B+kJSaxTq5m8k35xQaPPanNDthWB/mqR0bvX5vZ2TEsmC7/8++ecHPd59OFjZhIOmmQVtlmn1hZndr7WyOIMy/X47ntxtt5/2OdieMtobmL5EtqaBTRrDtYdjy9SKfKUOBpjmWIupKspeWlQl55keQZhWZJQ33ECzayiKEQCzQxEH/s49IMe84W4Pzb7JefTGWapQyXvi84G3bbpHxwThSLYaXoDLD1nG8p7ZzTFtwN6/SZfff1Ezv0Wn336kO++eUrvUARz8aaAyuDg+AEAhTnm9ctzTvcfUbNFYBhFMypDI1YWvHwhEkJ/8a9+yn/8D7/Y+TkOjhz2ap9j+lu+eSFMoetNj1wJmV/WqKRa6Ompzf/yb/7332ki/HsRXB0eeNW//bf/EoCouEZRXSajmLovOu2g/5jfPvklZ6NLKlVcaJ0amFWTdkM8/3dfnaMah/RPA06OxAR/8+qGVrOPAmS5mKyT8ZI08Tg8EZnPy5sZD39U8OzliIXk+jRqR+haScM7xLVF8LbOLtmsIhIpelEZMJpPSdIVex2RSVrdOOimwmo+4oMTQfRzLJO0iOh0xAVzG5YUW5v+aZu5NCQd3l7w6N6Aht+UZolwPXvL1WTOyb649Lb6Gt88fcLVmY8ixQ5q9YrX319y0NunXROff3HxkmhtYtWkQVpTodk+YbVcInUMSPKC8XiF9G0my1OCwMFUNcKpmNCWYbLfa5NmKoZ9Z2xcUFYR61Bwhpr+HtsoxlQTMkV8X24W6JZKuMzJ5eLIkw2+a7OQZGPdqHAcjzwtdgIFSVLgeU22WcSgJX5LmiiE2xBfOnCvNxssOyBOMqZzcZj02h0U3aAsFCp58a6UnKRKdlK3m8WCmumz2YY4DcnVUgKm0ylBQyGOxdxotXqk05xczhWlrEizmHqzTZrJLGe1olRKHN1BiSURvUgJaiaVzNAaQYPFeE5RZARyXIKaTUVCWZZMx2JD9L06JRVTKaOrGdDqtFnOYtqSBKqXFleXZ3R6fRKZSd7oGg3DwZf8g1IpmE2GKPoPcsIH7R7LzZS0VCllv7i6i+0GxJrYgIsqYzNJ6Ry5zGQgXHd0qFSG1yGV/M33HtxjnQFSBh1fZXazRNNLCom1d2xQTYN4U6HfqbMWBbrboURsYmYhbFTDKKMrBQq2SkoebZlnYrPt9z02k1syRawlAEfzUdwtT795Qa0uAsyGfkimh9T3ZYbxDPZafUZpQlaK/jxsNSiLLov1kPVazNlEh/1el5bMwr09P2cdr2g26zhSuGU1W2IZFoFfp5Sk/vH4FYE3oFETQe8qPCfLKjTdxbBFwmcbV4CLXyvJS6mC1fqIr379hHsP7jKY9/nNP/yCxx/ex9DFHvTi+7eg5PT6LXQpSBKFKYeHh1xfi3nuui7T2Rhd13dS7FlaUCklmyhhKc24/brKYrWmLuedrptk25L9/fZO8CVJltT9Oufvbji6J57raviObmuPLL9TYyxQyzaavsWQFQtdt5guNrR7bZ69ELj6TrfGzfV4lykvS2g2A66GF+zt7cm+G1OzB9RbBWksfrNp5xSFSHgA9Dr3ePPuS7qdQxrePfGc8VqIhlCjVpcm0+kGy/J21hYoKYapYhoempQvDuMZqpKjKT6uVK7KyxAqnVyC/Q2zxDRcilxlu73ji93ZSWzlnPZQK0iz7S7T2mq1hBqqNNoFiNOENM13fCfBj6qoqmL3PsdxZAXnByT+nXLdnShElmUkSYJt2zu57TRN0TQN0/zBHDdN053E+N3nZFm2E5K4a1VV7apGICpshmHs+GtxsqVKhfqhKoVpFosFlmURBMGu6ncn4nH3W3RFRdf1HWftrg/iOBZCIfkPFb27ZwNRhQsCmTjT7xQZIzRDZ7EMWciqSa3ZJMkSFEV8ThRm5NkWU4ckEevq8vwdlm6hVD88V1FkFFnG0bFYH6YuFCEdT93ZUTRaTW6Wb1HKCl9yM1SjJMsj4kicV7Wgy2wyJvAGeJIft9yMyZUE3aztxiEM13QbXXJV8PG2SUgUqthWAEhRFN0VF+Sux7aSSq6WS5Ytd1zfvcEBabomSysMySFNwwQFk8Gh+C3L5YzZ9K4yKcbPczqMJ7d0ugHLjdjf0qTC800UpCiT4zBfjGg3WqRSKXN4fU2ndYBuK+zti3U1Ha7xPFVUlAE0aHcGXF59S08a7+ZJSaPRYr1SdwmsPNfQrSaKIfrAdjSybUW7Y5JuxXM+Pr3PeP6M7crn088eA/D1d1/iBBr5VqxP09FZpHPyokSRwiizjc+f/PQhf/M3/w+l5NomWcG/+PO/oIpEJWk6XbIuNyRZStcQ+8bBns3F6Iq9wTHJVoxVvamziVJ+8mMRzN27f8xkNEXVE4qt2Cuvrl7TO1S5Hl5weynWg2np/OnPPmE6FKgb07OYzqesFhmGNLD/6slv+fjxHzM4NLmVKKY4yun1A4pc7J3L1YTR6Bo3cBmPxN8cHrWpMHn7+oalrBx/9OlDrkevQVqVZFmGYW24uUiQdC7uH/2UJFtgGz00W+z7FxcX2Hqf+p7k45VNVvMNQb1OcSfmpiYMp0MuRiv6DbnvthtcX14QS65vs+2yXoVUqUOtKfpFVYVgVLMl9pfRcIjZsLiZzVDlGn3Qe8xg/4Dx1Q2zO+RIw0Z3DOryfhzPViilyWKzRtXEWhvs11hHW1znmMVaBJhK0uDBvT6aLj57sxZ7kuWYPH0mqkWGbjPodbEMi0ZL9HGn2WJ0e4ViSCumskTRHbIk4v6pCLhIMsI85/XlVyil2It+9tO/oCyGvH4lxqV7uCUNEy5vMpoDMQ+22YSvf/kGrWzw+DOhu1DoIf/r//x//87g6j3n6n1739639+19e9/et/ftfXvf3rf37b9A+73gXKmVSkNiol9+t8Kub+h2+swWIkt1vXmJNnA5cfdptUUmZzIWmZyvv5N6+T2b/aMGe/sPef5bUY5uNvfJ0ph6rY1bigxJ/6Mel2cZmlRjuv+gzpvnX6IVHT7+QGSy3r1e0u2dUKU5SSFgZFploSYVtuRO6ZaD2RsQFx43UmlFUdfYmU3g1lCkEe5kOqLbrjEaiiqV59ZwaiXff/mGZSze9+DxIWVZ48tf3mBJlcHHnw4o44CrNyLCf/fyOb12n879A371jYjer66u2e82yaIVc5khaTQG7B2UpKWo3s3CKSVb3rx5Q0tWCzzLoWYZSLEkolTDiE1abYPKkqabhUZWqSR5RCBLqpPJENOCXBpFWrZKVfooekEayyxnXFBkOr5uIlVX8Vr7JNmWVEqsxikUeoZpqIQr8VnddouyLEnDFMsRY7XerGg0AmKpCOk4PnkRYWo6e13xTJYVMFsJKfiilJwZw6DmNkmlt1Ecx2SxjhM0USTRZZve4Dcq8qxCJl/YLOaQqDRbMtudZGznIUWaY8oy33q+Jt5WaH1tBxVcbzdkukG/JTJ8aZmT6yWa6bGSnmRRFLE/2GO4uKaoxBjXAp3FJkLzZHZ0OcdcrSmTjEpC4q7GY0oDNusIT8rB13WL+c2EWPZvXmwxbIsqq3bGm5tNhEqNKot3kthRGeM2aozO5Ry2Fe59MGCjjDHr4pnGswWWUqfdbpNI6N7l/Ib9wTGKI6q41/MJtY5FWW0hlry6SqNSFFxrSilxHYYJOUtWUzEOnzzYYz67ZbXMubgS69b0Ypxqj3ZfZCYnqyvatotmWgyXIluYqnNqah0zN7m3/6kchynT0ZxUKowNek0qxWAzu0aV37/dZqw353iBh+WKfWORLJitJ2wiURHalib9wT7LxQLdFOO+13/AYnqNoUAozXLXqxiFOa2uyOaNz5c02gGVGjLYOwXgdjhjOp0wONjDkB5PZ2fPwC4wpJLc5fVrPnz8AcvVnHAj1r/rO2R5zM1wSJGJQQ1cjzRfcjsWsJZ6vUlRFrTr+9xcC7Wp8XDCvdMHhNF6x79p94Qa5Goh1oLnWqyWCfu9Pp6sQFtqnSLL6XbqbGOxv/iuw2h2hmuKdaVaJmVuUKQaqiLnxmiMZlXoSp+mrIwvZnPu3z/l7ExAo9vtGnmest8/RJHS/YPuMUf9B8yWL4hKkaG9f3Sfd2dDZguRgV6FE0bTV2iahivn+XR5Ra0WoKCy2Yh9OIo3VKVOUBdZx5rfJMsjknSFJxXoHLtGWcUoakYkq9KaprNaj/GkapxlNdB1jSTdoO4k/9fYto0qN8Y02+44Qo48n1ar1a6qFEn+iGnaO2l38W8huX6nEAiiuqSq6q56BD9UhO6k2TVNw3VdqRD4j4y4JZfsTtUwz3PSNKUhs89hGO4UBe/+RlGE0bCqqrtK1Xq9Frwu+W9TNzAtR3iAyepPp9kiyzKWy+XuNcMwsG17V9lL05Q03tJsNndVHE3TyLaJ6AP5vHdS7PVArKuqqphMJjiOs4NnlUrJejYnijMK2Q/haoVp6bv9xzYrNtsthukxWYiqRhpVKGZGWaW78QCdRruxq6KmeYJfCzCsgqAlZfrLjHqjw9XV9wQ1MaZ5YlCUFobkYFXZlrrTxrAsUslzq7dbrDdzpssZR0cic32w1+b123egid9yfNKnSAwuzxf4npRtVUJqdZX5cogEctCsFwR+jUwq2U2nc3q9DrafEkYSWu52SdKQeCN+715vj3S7pahKbqTh68Ghy/5Bl/VmiiF5rYUmKoaBdwfBr3BMG9MwGEtIvBs4xFmEa1m8eCG56K19PNdiIivgrt+h0jK8WgdHwtHNusr33z/l009/RlWI3/ybr37LwVFBqyHuFrfjc/Z6fRzdYu9Al8+psJjVCHoq46nYz5J4y2JhYzlifba9GutpTKuvUUqYXrFaUlkbykqn0ROfNR7O+Xf/1//GH34qaCS3w0s++PgRi+UYvy7e1x7UaHa6aEZKkYn37e13iJIbnn0rqiMff/QRijLh+jJmfyD2+FbHo+67PLl+y96egMQtN8958fQViiI9n75d8uM/vM9f//Vfc3okUEWDbhvPtZlPlrx8LXn0vR8xHI52xuKVskHBJYkyAl/s1YvFhul8Qqt3StAVc/jtu6eg1phuBOTw9OgBnpqxdq6pkN6s4YSyssiqaxxDnDNZbqJWFeu12Bcrd8piMyNOFfxArMc3Z9/g+B10rN2e8/LyLbbh4uli/Ew9Q1ML5lFFZ0/052qZYDn2D+qhqFRVQbNpoEg11Pliildvo5oGewNRDcUqWGUZo3OBcigig/uDPYKut7OtMOYdsnLNcnVOrSaVwNMJ3369Yv/Il2O84PjohFdvz0Aqga62YzbnId3OgFzanFzfzqh7LrOhOFOKKuBnf/4Zr14/51LKw1tGRVCr4TsDfAlzLPOKNNvu9k5T6/Gr5/+eVuuATl9UQ6ONQxy/5eSewUaiO+4Mvn9X+70IrtBV3t2Iy9ag1aHQC0bhFU5HTMJf/OLXBLaF7TQZXZ4BsFqsODpp8+if/RSAPPcxFIvhy7MdFGIbp+RbFyW32CZCFKHWhE8++RnvLgSGltxh0PwE0zGo1cQEV3olqGsMK2cxF6819wtsp2QzF5/t2fDBxx9g2y0m0lzNd32+/O0Z0XrIdHItf88xTb9DlopFPZvNsMwavu/u5Lbj1Q112+X4wQGuIw/CRYSe2KiIA85Sa2SrJpl2zp/8VOCdb67u43lL4mWMqYnL4/XwLVFa0h5IKN11hVeLufdgwEZecm2roio0Ht27v3umbZwzH205OBCvaVbJ0+9eEtR1rm7EhaikoMx1SunXsQxnpEnBJo8ppCiDrtgYvklcbZlJ3PDFdMHRwT69juSBTRa4rodpahjSWFFVKyxDoVZWO3GOWsPhZniOKiGHqlLR7vgUaUEYinG4DSdUekGWllh3ON7SpYxLFlNpJt09YpqtSSqV0Zk4mPZ6HuQpm9mGmpTpTrMVjl+jkpyyJM+wHY3BvoVEo1IL6synKX6rw8Xb1/I1l6IwuD2Xkqd9G4MC0opKBjaqohNuUsrC2PXf2fkVpqWjS35et94mWq7p9RuMpax7fa+OY9Qowi3RSASwvZqP268zngooiKZqaIWCqmrYUko7l3ccw6yoSSjkKr5gujnj8FCUyIfTd4yml0y3M2p3vmHbim0eoQUJK2nCfDVOyDMF/+5yFYFv++R5iGOIsZrMJ2TbnJ+cfsA7uR6idMLh/mPMRAQDHafPL559+/hVBQAAIABJREFUgxmYHB2LC8pkeInlhTvhj1k8we0NYFESSR8J3VIIHB1bqXAssSmH/pxa2gIZGJcUZJaEKhZi3sV5h1zfcjPb7ALhAsiSeOed5rkNVquQVhCgSbGDqgrxajaNfpNyLOb+fv+Y9Sbd+TT5NY9wDR9/9AWrpRSvWUa0WwPOXk8wJGQzKVVa3RaTmfQjOXnM5dU52yRDM43d3PfqLXSlRSShWFplsFqt6XbEId/ptHh3/oq3Zy/59ENB3O61O6xXMz589BHffvcP4rnqBZ9//BHffy8ONEPVCRwFqozZXHIgs4q9/iGuW+JIIZrZYku4GVHlcr7aNj/7549ZLjbSPxCCZkqv9RGj0ZIqN+T31ViFM2wpaBFt15S5xkH3SBi/AjW/zjYLKQuDuhSiiaOcTrvH/QcCJvTt859z7D7g6t1w52/Urvt4noNlm8zWgnuyLdbkqUEgL/DPnp9jOVCy5t6xgPyoNMjSClWrdkb0ZFCrudSl5P9stkBVVXHJVyR8OYNMrXbQQfEVCp7j7SB/eV6iqJXwh5JB0l0Q05Jw5jzPybKM+XzOXfN9X0B8DOOfSGmXZU4mk1V3h7vjOOS5CMoMw9hBA+98pO4ggsIsFhzHkwIb1U6uvSx/gCneiTncBX13QeCdX5UipetF34mgzLMdcgn5S/OMvKh2EuuW5aBWMJ1OdyIXRVHgeR5VVe34zUEQUGT5Dl7o+/6u/9drsUbDMCSOYyzX2wUkWZ6TJTmKXNxVoWBoOppS7rwTLVORAai98+hrNutUFDiuLvtOY71eYiuCAC8+OyQtMjz7gMVMnIf1wEUtDRaSG6IGBmUCRrYikfvi5CbEROeoe0K1lkm7KMKsFMZrkVi4vlQ4OTql5k9Zr8W863Q6FGWCbTlocu+IwiXRKmOvJ/bl9WbBzc0thRZTl3u1E/hML2/unAowtD62aaGgczC4M/7VGI9v6XQb/wiWaxAnCxz3zuMvJ6JiOZtxcCw4V64f8O7yHbrp4isSQpUv+erJC+yahO06LtdXKVm6IJCiU9PVAtu2mc3m7N0TSbXOQRO/q7KUY66bHoWi8PDRT7B8sW+8fXWJ63ooxpTNSq7/JMK0muyfiDvB2fkbFuMZo9uEvrzQ9vdrfP305wRth2ZDjGnDu09gDlin4hz4oz/7M66vX9FwDrh/KqW0Q5fTe03GN5dcXpwBsN/dR6WiLu94v33yS27HN/R6e3z9pTifPv5Rh//z//gV/81/+xdsc7Ffz+cBw81oZ8nz6PERy0XIQf8Bigwwmx2H0ehLbOuYh/dOAfj+6bc8fPAR1h2lIs2pig2a4RLnImm4TZZsU43Z8i3RRjyX7TSI1dudbcZ4tOI2WmP5OZoq5vnTVy9pddusVhmW5IuvFnMa3paGTOpdni0YdHtEyZx8Jc3N9T6X5xd88MknZLEI1F5dzLCcLS6x/JsmFQaak7OUIhdpWWE5Bkkizg/NLFBVkzJV6TXFPAjzjBevv+bw9AFz6aeWFim6oWCpYh1P8opRsqGIJySZeM79ey2+/u1rPnz0KYcSovrlr37DajPFmghY+fHRCetwwmy6IJPnk+VqOC7M5u+4TSWvLa14dPgA3ZIcum3OZjUmjSPOJP/v0UePsLKYigJdFeP35vUlg34DvybuIJtwwf17jxnfXHL2rYgbDo/2+PwPHmC4GquFmAttV/Tr72q/F8FVnMRcLs4AcO0+s9UNvUGdaCI2sY8/ech8tcZxAwrJ3/jDHzU4e66wkgO+WsZ8/OgDXr2boRtiYxkNx9TqLts83GWc3UBH8W7AEgdPrdYhGWusJiEXb0QA5LsO82lOHK9QDfF3mWZRpKDfcYvinFe/vcb3CpZrsdBP7+1hAPP5FYN9cYhYasL5xWs6HZERLnINTXPRrBxrKwn7mkndNQgaDi9fiIX+/PklX/z4BEMqmqiaT65MCJxDAl96aBxdc3W+xfda1KRKlG6ccjMZMZ2IfuoPTMJlge/pHJ7IbGFkYns661wssNLWKBRwbJetfG27jPB8C9+3SGMxmRwXomhL3RcZ0zLbUOYpgRkQSs5MViTcrqbYToBZFwvI91WWsyWKJRZZv+MxX2/IdIc7D9o0V4i3cHL6gJtLkXlAK3Ecj6MjccGMw4gyTwia+o6voqURt7NrsjTFsOTlpqzodH/AmJuGj69kxFFMKatZeWgzn204Ojgir8QYV6XKfDFmLtYYNa9Nq9vCdhxWsoqaJMI7ZxtuMS1xoOVFxnZbYctsc5XmFJWOpau4jvSisW2m8ymB7aJJRca0iMnKdLcI0zBDqTQ00yJO5CUiMTEUm3W8xmmI3zzcDvEqBdeT6mj1gF6zx+uLd4SSTxWgkuYlmzAml7yhStNZzGM8S16g1YrxcIvmVVSRePa60uNyPcM2Supynq3DOZvtLaoulJCW6xlF4VAqW5byORttDc0wMKo6HV+M8822zosXL/jzz8UF+vk370TAfJugFWKtHR37fPN0xam8MARmjfm8Yq+l4khPlKbtsVFzjh8d4MvD47ffnFOrebh16SsSwc2LK3AVOj2xrqIyZBWneJ7HRnrRNM0BMRWLjYzKshme63B7O8ExxGd5nsPx0QOWm4hOU4yVaSmswnMSmUTQVOh2HZ49+/UuUPSdFudnb3E8G19mcpeTW3xPKGgBDIcj1us1zeYBk4UIytq9OlGUMOi3KQoxfpZmcX274uOPRKXO9WzOzy/xPQcpooht2mwUg+loyL0TQea+uT7Du99kry8uLZWyxenVWK+WzKTYycOTL1iuZ9hGjTi6U+YL6Tbv06qL6utqM+P757/ggwc/RnYdpnnEfDEh3IREG/FZDX+fyWS0u8yVZck2zdiEc67fid/X63YJWjWmk1tMS1wQZvNrDg4OduqPq3hCq3VIr+OArL5OxgsWswTDXBHLLP9kekW7tb/D56+jOXnl0Gq1GN6KhJahT9kmEbru4slkQxgtsO37O4+iTTgTZswVJDIAC7yO5FJlu3kQxRs2cYYiA0UR+Wm0281d0HDnpXWnRKooCqZh4zo+pvVDtemOt3TH6cqyhCAIdgqNuq5zdXWFZVkMpNDAarXAcZx/ws1KkmTn1wUiUMnzfKcUCMIXzHGcHe9JvHanfCgztiUYlkmZ5bv3VYigzzAMVOmjhapg6BbcKfWFIRQ5tm3vnklRFGE8rGk/eLzJgKsmlTPjZAtFjmVZP/C+4liYHc/m5Lt+MLBt6wfz5jLH0HTm0wmJRDHohkqJxmAwoNsVl7K82KJrNprxjwLTYoMb6HiO2MuK5RkX5+fcO/5ImLUDSboiSxX6MuPeada5ub6kstkZBtfaPvPbKbN8iiXVc9PNBq/tUYTiEjgdReTROXvdBlUmzlpLCViECY7rEsg7R7Gp2N/vsZxKb7GyoDXwyYsaYSLmYppsMBx3pzD44tUzHLNOlqdY8tzRqWjUfFRV5eJSzP2qNAjqKgvpc6cbmvBWjLMdguF6NCIrK7ZJjm2KuWsaGkUxQ63Ec4erMQY9nJrKUiYJbMNFMQ3moxTdH8u5UfHi24j9A9F37Y4JKDx78WuqUvy+6zcJHz5+QBHrvHj5VHyWfcjDD4/5u19Lr6hApXbsERgDsrnop1Uc82hwn7G+oG+IS/xYWbN3r0NnI5M56yn7B0foSsD0Ziqn65Rvny4hXlBKJMB8ecN0kvDhR58AcDX7Jd+/+JpG47/nD38iKlBhPsRrhQxnXzOV/dfd6/Hdt+/wXclNGy+5uVxj6Rr9vvQbLAwsVaVIUo5OxN1ofDNndjtHk8G55TRQlQzbcolLccdaLG+JM5s809B1cc7cLl+xSRe0AhFguu6C6WZKERmspVH73kGD9WaLbXu75MaD0w5lETO8FOOiFg7R2qTSSi6kAXLN2uN48ClFumI1FGOjpRW6VnEgRSGmt0N0zyHwdOYysVhr+cRhwt0WmGRbylhDKXWitVhrvh9wdj2kvB6y3xd33/nllq7bwEQKohTXbNKKpt/B8cVnP/nu5wwO9lgu1mQrsVe3m8eE629IpfDXcJjhODbROuHBBx8B0GjarBZTVG3DOBf79fFBF00zSJI7M3OfNy/PyNKSRx+I/tyGG8rcQC313Z2uzCra/T5PvhUqld99d8Zew+bwqEe4Fvv3fDIn2RbUmjXWmZhnD47FvPld7fciuFIUjeODHwOwDi950H1AXqg0uuKC8ub1OWme8WnvEVkgL/HKWwZtg/tH4uK2aWUkyRX7B33aTZGhGY9nzGdLOp02vX2xGNOs5Fe/+hW5hNFt1lNqtQGq4hMuxQJezKeY7BGXU1RZ9jz/XlQdmrKaFpkmYXyB443ZSGjb9eiW1Sal1ztiIOWLb6/OQanIM0km35Y4wYbVbY4TSGXAxOV8GOMtI5RSml4WBWfXt/i6VAFEZzIviL0RviX+5ubNmO12TbHVcWRAGQR13IbNmzciSPP8CrUqaDUatLpio3n29RtKOoxG4qDq9GwMJSHeApo8QOMJrUaXsoQ71MVsekNZ5aQynRb4Dr7jM5quqEnDwNt5RODUSNbbHdlRdRwqLceSG93o7RDNstFcbef63Wg0yJSMNy+e4npig1qtFwRue2fgqWsas8kKRfVYShNYQwFilc8//gnvXopKUqEkhNsQV1ak4mJNkPuslzc8OBWCBIpeMjg6YDKa7gwQW80B23WG7Yt/24ZHuE5YLafokiR5O5rQaqpUpY5lS3J2kVOqOa5UOcpKBbfRx9JzzDuFvdEUz/cwUXemoVmZEiZrNBmUlYlFrVnj7NUV9Yb4vngaM8sm7B/eYy3NnLdRgqlZbGSgWOoZ02zMNg7R7kxSK5XRaESmQik/P0sMXKvGthCbdLxd0gzaoNVQZbBhmipHBzXmmxWlJFy3agHLNARL9LldD/HcilWyQTFEf6LoxFHEWXSGc2cQasR49YK5vIgn1oLjwz6d9hxzKw171YKPPswZvxMXvM9/+gdk5ZTf/sMbSlc893/9o4f8zS+f0PnilGtZ3j84qFHkBhNZzfvij45wdJ3xcsxyIsncnRDbswnjmEKeDLalYRAQheIyolGhaTaWW+PmRlRH6mkNzw9RVZ3vX4jMlapVHB6c7GAeRVIRxyGn9/fQVXHIfnv2ina/juZm5BKiqpCjlSpTKT1/eNDAsl3iJGEjxzO+XFIWBnG0oiUNCh3fxg8LXr/9GoD9/UNa7R6e4xJLUn8ab7FMHy9QMCU8xHWaLCYruj2xhtbhFl1RqAcBUSIu/9v8GsOoMzhy+PqJUEM62Dvm+F6D759IY2zXJ1ptefPmKYFzKp99j998+Uv29gdMZiIB0vWbtOw2qkRB5VWOZuQ8f/E1f/AzUUmaT+YMh0P29ns8lcpV9x58xM31iBIpaJPHqMxRCx1Hwlicmse7dyMUzcHxJGm522cbb1itxVzsdvdJ05Lx7UgES0CarVksJ7Q7A66lOXarcUAULQg3d0m1AYahMZtfoUkLB0MNUXVtF0CPp1vqjQBd1XZS5ZZtSsGGcicgkSRbFEXZweayLCMvUhS0Hfzmrrmu+/+pHBW7/0uShDzP6Xa7u0DNsgwURWG1Wu2+TwhHbHemxf84kLqD8ilKJWTcNY1E7hOqqlKW5a5KBZCmOYqisJUXElPTURSFJM92kEbLsoQpbSkTRbqKZXvMZrNdJSrLMgGpVNVdP0ynUyaz6S6xqCgKeZ5juQ51V7zPMAwePnzIcrncPaemaaxWq10lbzq5RtVUTNPm4EAIRdUaAd1uG9s3SaWUvmH4UBnkEnqaJAn1ts+789f02mKMvZrH3t4eYTzEl1UaTTWZzSbYUuApTJak5QrLPWQrq4XFaoPvu0zKNUMpxdzxW2wXEZuwks9k0Aw8Xr+55XBfBMe34yu22y2N+mPWU5FsOD44pNEM0GSN9uJ6RZZWOL66g8RlWYGmKTvBkKvJBKPl4vs+FXeS/3Uury9J8mQncrMOM1QrYSMFPOq6RxhvcQ2H4UisBdU0qEqTUk2YzsQFVisNHjw+Ibkz3R2Puf+gxmq1YLWSUvrmkjhSsF2L+TiS87mi9AO2slI3DGPiUOHBfY35Qlz0G60DqspkONyAKueZ5XI7HqHI4GO8GGFoJv/iLz9nKS/+3z55id5sYVQONwtxoZ1xS/Qs5UcPxcX2xZNX3P/oRwR7GqmEdY7PIw73eyzSmGNpV/Ly5TmqWnE7OxNzamRwevhTzs7O6Pviwq5aoBkKr96ckW0DOYcu2CYpOeK3FOkKRctYrtJdtdCza1DVGU1eU6+Je+wHjwu+fvI9mkRMxOsVYRRhBQErKfmdpxb7R22uz64pSrHeA79DvozZrsT51DA+IIks0rLiY2kCfXH1nIPuJ4wmF6RyPysUjYs3F/hdEegXWcZo8o5Ov0ORi3XkdQvyasLwKqUnpd9TNef2dokvBZDiNCHNCq5vr2m3pYhQngvEhlSbtlyPq8tb0iji9PgUgCoHw1AoIoPhlbhXRhHsfXHA86dC4W9vUGN2G7Isp1L0BZoNl8V0yeR8zaefiXW0XRfU6nuYpjRuzwzm04wqVxnfivNpb+8hhWuQJTaVRDqVEVyOp7skwsMPbZ5884xuZx9Pjuf4ZoKiLbAdk71j8dqzN+eYtsXNSKxPXSvpdjqkVUF4l99RawxnG7p7AypD7FMLeX7/rvZe0OJ9e9/et/ftfXvf3rf37X1739639+2/QPu9qFzpusE8Elkyxe6imibqeks6Fq/1/QMeHH5Ku36PvCWi/rfvfHIt5KtvBIEQNcJ2dLLM5mYoIuVwDapSEEYLLq8kKVhzKBKXVJb2NqVGlphYlkOrKbI/FxcJUTKhKEo6dZEJmN0OidIxxURkR/yWSbRd0uzsEUtu0XiypB3s0W8f8Z/+5ucA1LwWzaBGkf3gc0Xmo+UlqjRzXMVLNlmKPs/wDFGtOz0+YDSdYgdS0tVR0VVBqL7IRVbDMWooZUFeKTuIgR2EOM4Bn312CsCTJ2+o1CWLRcTNUDxDp9lGoSCQBP50E1MVBVZVEUszyKbXpe7WmC0XO98nVe8IGIsI3Em3Ec1Gm/EsRNOkzHqZo24rGq67i9wX64igU2MlsyyZHVBaFZmjYEi5/Q1bKiPG8C3MUnzffFGQbWd0pIjJbDPHdlwsu04l5W/zNGOw12a9WrFaiP6s+yYV2x13CtWg26yzTlY7KE+95VCUW1StxHLE3NCshJOHPRZLkaWK4imabmJbdbbJHbTGx3ZqbCZTKlkR8l2XiiWZNMG0DYNks8DyNC5uZPVOU8kIWW9CDgcnst8VsNgZVWqWjqlrtIImWSJ+n4FBvTtgs1pTSj6Fh0VZmSh3BEzVw6xUgsKirkvyeJZTxg1ahx7jhfh8NVXJlAVZIknvuQeJhmFX6J6o8mVxxOR6ger71Lsiuxwu51iqzeR6Jt/n4vouWBBKbLgeQrbRcVo5S8m5Ovm4g6qtGF5KkvTAwa1VOIqJi3jOsIz50cnHPNUEJjpJJ5zeO+AnH37O3/3VbwB49t2QP370Kd+9ud1J23/08Qla6e2kyuu1OvNaSUOJWd/KzPn4Ate2qHKTbltkz9+9fk2320VTxO+NVhuyPKTXdxnsiUzdcr7g7dlzPC/ADkRf+V6TF6/e0O+J5240Pcq8QZm5zGIBUTPtjOVGR1dSTo5FZnA+SsgTk6b8/pvJkrrjoOoFzWZTzrMlvf0ecZSTySzjfDnDMFxMySdZhUums4iVrjGdiWpav9Mmy2PKVZ1u94fnunx3TSAlyA21yXg8odk2OJX+H6tpyuH9fTarhIb0zPniD37Cf/j3vyLJxVj90R/9Ea/PviItUmxP7EHj6Rn1psFsEqNIqMeDe6e8eXVLJaXnq7xkOLpkmyx3lY84jfhXf/Ff8fz5Sz75VEJwQpVGs04mIXjxbUwcLTCqAENWQ25uhmi6SlrGTOdiX+r2OmzCBd2mWEPLRUiebZgvr9lKn5KDw/uUhc5yEeHYcm2rKpPpBZL3T1n6FKWGqoeosuzmOBYXFxd89oWouL15+444SsjyZAd1S7a54EkVKbaEqG6iDb5f28FRNpsIy7IQy1PsEXeCE2EY7oQoDMNgOBzu5MkXiwWdTmdXhQIhApGmKYZh7DLlruviui5LycusKoWyLPE8b1flN6R4hqZpu0qVaZpUVbX77Dv59iRJsORvUdQKXRVS8nefVfcD4jTZVX+zLENXVJrN5u5vqqrCNE10Xd+9dgdtvHvONE0JGnVUVWUrjdtNxyYvChqNxq6iF0URjtPdmVCfnJyw3W7RdXZVmzzPSYucslJw7rgPakwUbnYwz7zI0BGGyHfQ/VWYsVmnKCUkUurZ8yr6e0dkpRQ2Wa0pFJXJYrozbnZ8l5vZLe2TNl4g+moyXNL2NfZb0j4hSrmdXdE/7hBLeF+haKi6Q6VCuyeq/FG6JhxukGATBgdtFvMI3agwJQe5IqMstliWWAv7BwfYlo6igCNfm66mBDWHdZjsRCdyNHK2IO8WzW6P+XQORYkq5+J4NMP3WuRpTk2+r8oNptMJruQIGZZLXoQUpY4uzY0ffbjPfBGi2wqhFMLZrHMMx+XoUJqDr17S7OtcDm8JAilIMHuBaef0e4dszzP5my1GsylpJtbsh4++oKvanH1zw1Ba4vzln/8Fw+0VXreHJu1YXj1/hlJt6bcFvO9/+B8/41e/PBNCBqnYc3Un44P7ezz5fsJEyoJ/+MljLofPeHchUBu9zhF7hxXnZwt0T4zVu+sXhKFGo96ndyp+z/DmNQe9GqORmK9Hp0e8fPGW49PHqPodJWXJalLx8Rcfc3bxKwCsxR77xx9wIf2rbFtDiUUVOY9Evzw6/ZCr4TWu16XRkyJMkYoVfMDNlYCozaoR908/xDQzLs8FWiAOFc5nr8kKnW5HjE3gatRrAVNpdhz4EE0TLi+MnQ1IHM/IswLLq3jyXMCx7x+f0KxrrDZiXDJMFuGEPC9ZR+K895wWm82MloTIr5KMwG+xf+/Bznbh1bvX7B89ZDNPqFQxNz796IjJeE2lifVSpBH1Wg00gyQRFVOlOCQL4dFHOoYh+vPd2y2Nlo5SSV9PU6fdsfnwk4c7c/WT44e8u3jGfBriOeIZhtc3qJWNL++U8801H3xywu3lEBBn7eAwoNvZZ7WqiNbirFstMvYGfVZLIbbidX2m6zl22aB3INbseDGhs+cxW8cohthv5onon9/Vfi98rnp7TvWv//WfAvAHX/wxw9ECqxZzOxPuyA8OfsRies0yuqWUEK5SNbl6N6LXFhuNQsT52w1B02S1kqpD2yGtRpN4VVLKQ7xS10TLOv8ve28Sa1m25nf9dt/v05/b3xs3IyIjIzNfk6/qvSqqVAjbGAQyYs6YEWKEGCJREiAEE4RgjBAIT5AsSzZyGbAtd1V21WvzZUZmZGR0tz/39OfsvmWwVpysJ1GMLFSDWJNQXJ2zz9prfetbX/v/h11x4UTRhiK36e85BJ4Qwq+efYMTJMRrF1cSouXJlEYJkTQ0bLM1CqZAGCplM6LZx3YMyiankHWgbQ15Gu0aHXXdJ1rD3fqCruR42RsHXL69oGlVkkzM/WDvA7bxPQdDsbnrLGK2ntLrhazuxSVe5gVPPj5itdxSroXj4voDRvs90lRyoBAzW6yo1YZaAmiYtYrll4LAACiKnCKrqDOdI8nKXcQKUTlDM0vGstFQVV1urqesZVPS4f6I9WpFq6kkqWxUUkp0y6dsXeJEGFxVFpFHOZop2dF9h6Ko2d/f35XNzWb3BJ2Aoo7JtuJQZWmFYba8q6zp9ToolNzezHbcV5ZhgKmTZwVFJIzARw9OSauETJZYrNc5eRbhBw5lKmuErQHb9Zq9/ZBCNrEkGahm825ZKJKGpq1IkxwkV8Ro5IuGdbXBC4UyV5qc+XpBzxSyuL6/pw5ryjRDU8X+aSgkeULQ9+l3JIDGNkdtLG4mQtEMDi2KtCAMPCKp7FqjwrZ8At/m+kKkxDWnJasUgkD8fpVk+JbO6f4e06lwDJ0gJN2kZE7Jqyuh4IchuGpAmkmEoTpBjQ00N9khXg38Pmgqk2RGnEnEov0HPP9qQk+iflHrnB2ENF2bryXAzIGtYpRDwrFNvJVGWHXJ6fiMe4nMd5+o9Ls66TTnR/+aKMVQ9Aa30nkmyfrGo4c8ePIB6t3PCJfCSPuj2xxbeYjaVVDkZZw3Or39kI4sBbm+iYnKkqoqyJYS8dKseXR2wsXlNbHsXTTyDo1Wslm+K8MYQ9NQVVssCXKxWC/YO+yB2rKS5S7HB4ds1xv6HWkgbUpsR2W+WmCZ4hw7nQ1X05hWc+n4Qj4Puj0Wt1tWkTgLibJGy2zCrkHbfGf02m7Lelni2cJ59IKKNM13JQ51Bb7XZ7vd7njE8jRj2OtzeHjMWJaD/NN//n/TVDV7Y9EHdnh8xGRyS6nMeWfo2+qYtq7wO7BeiOd/+OEP+eM/+SPOz0Vt+na54NGjR1xfzrFkP9VsfkPQCdkmSwYSGfPg+Ih/9k9+QWcgzlVb27x+/ZLh2OHTjwR3YaskbKI1Xz17hQR2ItmqfP+zD/jiF+JCGwx9un0LsoDBQKzBv/izP6U37rKKp+iSbyhLWs5PHrFYCGO5yLeYpkka6ztOQtsTJVqz+ZrAlUhnlkqaxuQS4U9RFFB0ykLBkKVXh8MBbuCTS1Ss9XaLpmlUZcNGBp3CMKTjH9A0zQ411XE8BoMB93dT+WyNKIqo63pXrvUOTAK+A67QdZ26rnelX7e3t4xGI6qq2jkaWZbRNJUAxMjy3dz/vJPkeQFlWe7Q+UBwPlVtg2naO2dHUZTf6MtqaqibEsu2d98rqxzTNMmSFNv4DqwiLfJd75ap6aRpSpIku54r3/d37/fu33fffddjluQZ/X6ftm135Yuapu36whrpcAlEQ3337NVqgWmKEsc4S3efKaoUz3fJZBnnbHqNQF1oAAAgAElEQVRDt+dzcyvsBssy0HWT+8kVriwrpfEwDIvry9ccn4iznBdgOyGa+Y5HbENTGShJxSoV+253h5RRRlkuKV0JnOQHlMv5Dt3SMUfQpgShzb0Egep1QqoyRlcDahmIUlWTxXq2MwINQyNLYiyjSy1lajDqcXt3zWAodLXtGNzdzjEMC98Xf0uSjI5nsk1SPBl0mi42GFZOUwvdYmgWTQ0H/ogb2WOmGh53kwtC36MnHSDTaqkbm0ISpHYHJnWrczt5SegKw9T1RhhuRBgecvd2IuUl4+nTMyzznQHdspwnbNY53YGQl0V8DcWYMoWjEzHP5fae5Ubj7InQnY8+PuUHBw/5h3/vc/Y/kuWgtxM0s+XlqwWridCf5+fnhHsq6Ux85sHxQ7yRS9g18GV59ldfvqA1KwzVo1UkoqdXoqk+k4l0/Md9FGq223i3nl88u+Z7vz3ANY6Y3L4B4Gj/gC+ffbMDk2mViLJwOT4+ZbUVumt62xKEHnUz4+1rUf6N1eFwf0wrAcloK9o6xw8MdOM7PbC8s0ntV8SpCFaNgj6u2iCPLAcP+symS77+9prJjXA+HK9mPDjB9tIdGa/WWMzulthDoScDv+XNqylheMrwQDgD9zeX3N1sCfujHTDEcfcQhRW9/Xe8j5cYWoXStCQywFsbJv1AQ5HcW9s8JV57nO33yVIxJ9XRuJne4ukeV1MB8PKT337Cr362wB0IGfO9nMWmxVTqXeDEsAq0NsDVVWb3Yu36nVMae8L1S3n3Ho2oap3RqI8q2x401eH4ZMyzL77iPpZgde6YuorIM8nP+eghi/sVg569a6+x/Q5RDHF2gW74cq36aFpKKav8qmJFON6jyXSCQOjcr15ekKsF+8dnDKTdPrue8t//53/nL+S5+kuRuVIwwRKK9ae//uc8ffgJ8abkMJRNvatLptvX0PpMJzKCuRfgehlpJhaIVmd44PDNF3c0sknIDRva3KcuFOay3tnyRCP/NhbPma9afF9jcpNzU30un9XQsc5I22tMCaludz9iHV8SSaQg3w/wA5f1oiaS5Kp2qDKbLjGMetcAWSkbRvuHxLGMFuZbEeWzOmyl8RpdzaHVqGuIE/H8vIqI0gWXt++ieRl7J0Nmszt0ifCjahbXVysejUdM3130rsKzXz9DkX0EZ+f7lGlGx+/Ryl6puonwTZv5Qnwn6IdsyxndoU6SiHVpUVDahuUsZSuVSJyoQEMi67I73QBVtfFCm0YT854uZ3haTZPFFEthpCimQ//AxXBknb1qE0d3NE3JYiY+UxSNQFK0FYxWAj7oU6raYStBS4xSw3Vs6kZDdpazzBfosUWLxvEHAnDhYjJhFWcUEgWs33OxtRIKj0oevMac0h/qlEXBzUT2x5hgWGDL2mIVn1oz0bSWvbFQYjf3UzZVwQfn52Tld4hQiqETSRARq9PDMAz8oUDLAnj29VfsH3Q4Oxijy6xJrm5ZbpYM9yT0e5KB0qBaKvW7OuJcQzNUjg7PuJek04aikJcJq6WQadvy0U2TpE4wJEP8ZHbJeNRnPp0TSiARKo/GDlgl4mIcd3oEYZdp1GLUYk5NUbB/ckRIn7fSSDHwORnUKJ4kGmxH1I1Hkq2p1+L9atMn7LVM7t7Sl31YdmfETZaxkdGtVskw1EOMUUW2Fe93dz/DcwqO998hkxUU0xs2kzvcB38NgB90Nrxdrul0B3z1SijuHz59QnSjsLHF/7fKkvH4Id++vePTp6IPc76d8c+ePcPwAvqhMHK7loWZpBz2hG65XVwQpxWKqmDKLEeTKiy2S6pIoa6E4TRxVtSmRyqhkrUip8nGDMY+M9nfsLwt0VyDwkypJJre6mKNhkljiss53Wj0LMhWLZkk3mzihIE6wA5NljPhNESKxmIJh12xn2pbsd7MaZWaofzbIo3QVYu63vJ//SORrc+rmGFvTC4j11/dfY1tqbRpzqgnLoWu1+Xi6oKb5TWmKc7MZP05//pf+V1evxYR06LIMcyas/Mx/9Zf//cA+Lv/x9/CcWxeXk45fyAcmVffTnj4gcer10KXnX4Q0tanfPrpR6iScmDQOWSe1Dz/OqGMZcbrgy5qM+e3fij6CDBa/uj//Jc8Pjnk8EzMqTFT3l5WOKbNQAZTxmGXtp3TIgl7zZrx/pDVUmShAJJ6Spv30O2AdSwiuV3tgG0SU8ggSVZmeLrNJ2fnWIpw/gM3Y7md8PNfS4So03PSNGa1TeiFYt6X91OyfIZtjL5zNkyT2fyaVPb+lLmKbpZ0vYOd45QXW2zboqkVFEXIRlUVGKbC/UzM0Q8dyjKlKApMGewwNZW0aIk38Z9DAmzIsmyHklcVNQoqSivQ/gDqRsE2beqqxrVkVLwUsOU7qPIiwzAsNuv1DsQjCALSskLTtB3UvKqqWJa1c+YAHN/DC78jGs7zHM/zdnOD74Ax3gWvQitE0zThmMpnJ3WFaZpYlkUl37mqGpqmoZXfy5sIhRxF12kkOa+m6yhsubp+uXMkxoMxRR2jG0L2J3cbuh0fQw/RZRAmyxPKuqQzDFjI+6G4Tzka+ixkgFArSmrbxzPVHSpmUW9p7RZVddC34vlGCqF9xmuZnTg8itimS+LSwJJ9w7PlCsuw0fUteitkSLMKfCug6xzKvbom6HXZrit0RYLquDZ930WRZSKbWGWRbrBzc0dDopQGUZwR9CxKGcw19BpNMXb9a4YjoP5n8XSHXDcceoThGfezGVZPIjTmOS0bdInYOFts8Rwb2xzSSLthGc1gq7BYXLDXFXdWY1r88svX9D1hwM9XGf0zFyNQuPhaomX6Jqa15oPHT0lLEe3XHZfjrkMYiP9ffRWRvlR5+qMx6UboRa9/yP3NHKfTwZYOiaaoeJxiheJ7Dx49xtRWlJseWl/YJYZdg6+RrmYsJUJqNzzB83TUVvbLrO5oqw6HBweoUl7+xt8Y8b/+z3+HP/j9w10f9tX9HYP9Q5JMBE6++vwVoX/AZj3n6kq8X687hlbF8Q75nd8X+uzl62/Jsy2WJ0FEbjb09wy+vbzh5FBk3eeztxS5zUDr4Uu6C1sxqJUS3ZdogbMN3766Jo8bDsbCyR0fAnWFY/coSkm4PEs4OO2QSA9he6fTdfvsjS1WEva8qRQ0y0atWw6G4v47e7DHy+fLnWPR9Rzu5leYRsjeuwqbYk2e57ukRX/UI9W3TLc3aLVwTJ3WxFQdNL1mv/dAyNCdwV4/5O1C3NFuc8rj0z2++uUzAkX2qxcps/VL4pXPJ58+BmD/pI/u6jSROC+u2cENMuJ8iaWLeydJGhbzCNeyMWUw/v56QtArieQdEycrOkFI26wZjwTgU2X5WFaOGx9hSTCsr569YG9gs5yJveruuyzjNY4TIM1oHh6O8IMuV/M71rk8I1Oxrn/R+EvhXEFNUQuDz7XP+PrFtxwdauiaEIDXb5/z8OkD3r7ZYJiSe+diARQUuUSWKzt4vk6n3+XTTwSc+C9/9pq4ndPgsF3LyFXro5jFLrMTLxs8Q0Ehxw9lCVAw5tWLKYZlg0Rjeftyxv6ZiSpTnnXRYrQOeXLLSII5LGczXNfGNCF+BwbQP2C52NDtS0b6Yo1hOnQCl7s7WUqnmziOS1FndIdCMJabNZbdxZNR46qJieMS3xuhqZn8Gxi6h6J7lIpQIkleUVGzlNmQ4dDH1X2SzZYiERfMQU+lXG84HYso0joqGXVOeHv9gkaW5PUHHdK84OCwz3Yjo6FWjKIoHB4K4yfNtlQV9JUBkyuRZm5UnbhQ6HZgtCcR09KMVs93Eb4qr1DRyNOaAxlhny0N7qd3dMIhF5KnIC1iTs51NHlRVY3KYn2HFViM9oUBP5+2mJZDpaosZHQ5zTI6ns9sKUoANjczHNfHVcGSsLKoNVFUU+b5Durddkz0rku6lohtbc7e0COOOlxMpnKvXD49/5DJ7BXtO5jeGWhYdN+BnWzvMbScMi+xdfG3Tz76Adv0DtfrYqhij799+Qw/6KDbYl+StiTseER5jPRH2O8PMPSWF88+57gvLuOry7fMr7eEQwnzmkdcL2oW3i1nJ7I5F4OyaAh9FyUW+7ddptBknMqm8OntLXbo4bsWliyN6vg2F5cTVNvccXtVaGzSGNeQ/GYdl9vFEkVLOOiJ9xuFPRaLBcNuH00aBHf3d8StRtCXyHV1xux+iqU36Pvie6af8eZiQu+puKzzbEWr7aP5/wa5JlAij/Zbvnz7xxweabi6dNjrhu5I5X4uvnc89vA8nfBa5au3osQwK2P2ui5lo5KuReTTcw0CZ7AzFJu2IslSBr0hmcyKWb7D0cEhk+sLFnNxjnuDDn7H46svBCBD6KgYXZ+LiymFNHrzTMetDZbZmkpyaGA5uG1LsZABCXuMqVkoesNyKc4xhkLVNAh8NnFZ5mlLtlaYxOIzZydHzKbXtG0FMrOyN9pndDjgi1//iv5AnBHd7PPi+QWmJ4yP/n6f+e2W73/8gKYR+ubL5z/n0cOnvLy4QmslH5Y1JtrMsSQ87V/9g9+nbRU0wyFOJIfVyOHl6wsG/Q95R/FRNnOaXKeVDoOh+7RKzuXFPbomI9cTm08+/j4/eLph/0Rkxn79+ddcXy0ZjUU21lP2+Lf/2u+xP3b4+lsR5Do67lCXIW9eXfPkY1Gq9+bNK1ot4/ETIT8vnwdE25If/dYjfv65KMV2tBFpskJTDTRVfu7la7phQJ6ItXM9n9PjPVBbvnz+RryMfsNkHWM5Yi0LJaVuVD568AOuJyLApBYl8TaidWscRxgW19cvsZxyB3biu/tomkqrbslkMGezzsmdlsDv7KDKHcciT/Ud11iv67NaLjEMg1KCR1i6SRgaVFW1y0D5vo/neTsnqW4qARRhGWxj4SDYtouitDtADABF19AVlSwTclAXNVm2llHk79AHTdOkLOsd2IamIWHrZdVI06AoCr7v70A24s1Wohiav8Hj1arKDtGsbdtdmaMpURz3+30WiwVFUVLLc1TXNUrLrqy0LEuKcsv6dr5DGbQsi7pq0Qyd3kBC2JsWyXrJjUTO05QueZ6jqCWqLuaUrCo8z6I3DFjJKpHh4T6h46DJZ+dpQa015HWBL6sMlEaUSybkKLKMfBtHmK7O0+8LPbVcrHF8C8sISSV4jON1sMyG7XrLsYRej5Oa/f0hjgREWi0DXMchju5x5brc3F6gVC79d8iEywo1hTbQmEswl9CAqtVpkiGptINMq8LUeygUUs51qlVOVlaE8h6dL1boBgQdk6aW2cIGDvYf8/pK6M5kU9G2CtttyYFEQ7W1hKpsWa6vMSUVyuquxtP26Uqo9LdXL6ivDgl8k70Tcc90Bn00M8J1FeY3kgrl8BGW3ZJKQ/h49BOGXZV1sqbWhM7903/xLYd9iyAscNzHUjgDOgOLZCNsi+vpBaNQYb2doElOq+ure04/6XF9PeHhuUBbjdIrzs5/xOWleDatjqoYrKP5Lqv59bMZw+GYy9s/o5bO1V/767/HP/nHX9ANRDnzk0/hfrJCt3t0+mIfvFBBxaSuFC6vRGCmyGzcUCHKhB4ejI/YP+iQRZDLFpHVBs4OPDbrhEDyBq7Xt/QHA26uxfc2mzt6ow4PTj1M4x234JbpdMLxiU0YiHWoyg2hv8/0QvCWnT84Q9FLyrqlL7NZ7faWqq2gUjE8IS9ffXNB4PbZZiJQu40a8lRD1VYMhqKM/OXFhMurOY4sh6vqDMtsiFcph5Je52AvYPMygirFlLw1eZJg4/Lxh8LhDDUBxnR0uMeRtCGbZoHt6PjnR5ycC/twk94QGAP+4F8XCSHLqvn7f/THdAbHdHtCXl5885b9/T5FmeIYYg7OIGZb3GAbMmgSNRjBgnhR8w9f/CMAzh5+iFb59PtDOp6wfw+P1liqSTsS+me6fsnRg49oI42zvY8BeJ38mgoN3zrEk/QlD46f8r/9j3/GXzTeA1q8H+/H+/F+vB/vx/vxfrwf78f78X78Kxh/KXqu9g/D9id/IKIqql5yfHSKpkecHgiv0TKPmS/uMf0Vd7ciq2DqHaJ4w0zCdp6dPuLi8itOHwy4eC6i1He3SwwnwzA1FAkLnqYOmqbhyYiQ75fM7iNUfA4OhQd8fX3D1fWCsGMy2heRAUVtWM1hvhR1oY5bYRgaumpgWsKTNYwOQWAxm19SZJLcLbqnE46Jo+/W2e+05DWsNyIbcnhwzDYR5Qx+ICKteV7g+TaRhCDdrjRqRQc1ph+K6JYXjtisY/qmy+WliJiYlobthBSZJKYdd2maCs+1qWXkenkVc/TggFIR66SbFsv1gjevJtiSh+b4g5AircnznFpChxdljEoXNxRRQMezcRyb9aThl18KUuZHT86ZTKaMhj6LmSyFqiJ0V0eXsPbj4QhNbUk25W5NdEMlTXKCvsmd5NXwA5tagTiSWUdtQd2scJT9HbliHdV4tsftfLrrDRl1+0yn812ktUojvH6foqjohoHc49fQWDSVQ3co+bGI8UYafiM+s1kviI0t48Pv8fa5KJf65MNHLBcrrm43OLKHzLFNovUUzxYRG79r0aoaUVLgyeRwJzynJKfTHfHsmegvapUI1+lRy7rsSs8JvJDNIuLB0QP5ghXr5YyqbTg8ehchjUmqFQ+fiihOmlRst/eYqrcrs5zNCrq+yvH5iMvX4owsZkuOT0fvWgRRqQjMDvfzO1RFliGZJkHYY50uSQshHw8/eMTLV1+zNxSpdVcb0tCyjN8icQx4/PAjZvMlaTrfRbOTMuZ+taKS3Cmeb9MNdLTWJEvEBnb7W243GV1dnL1eUDHuhzjWKd/IBl6tWmH0wbV93jwXJTiKXvLRJz/c1Venac46nzFyQ764FuWMmuEQaD6m4XF7LVL4fd/H8kxuL8X/dU/DMD2KtGa+FlmURjcxrR6hX+PI8+gYOo5uM78VsnlxfUN3YPPw/EPSRGRIk7pmtbinKCwq2Ryv2RU9q4Mpo/LrVYLnhywWGzZbqct6LaPhMU61x2L9RszL3kBlUxdCpgbDA+bLOaEf0A/FnLJqw3x7yTB8QOAKWYjiNVmpsomWUu58Am+EF6hUW5H5KKuUhw+PmM5uGO0JvXtzc4+tdfBk78R4dEBZtByfHewIirdRg6pYdDp7rCRxaqMUzO6TXX3+aLjHbD7BMV3OTvfk2j1g6A64XvycSGYHF8lbirQmsEVU/Ec/+AmGHfO//81/wumHovTDDVRev7kj2rT89o8EcfImmWK77a4cTdd8Ts9GzGZLlFbIUJzMiLKFAPOQ5M3np3sUUcZqIXSZ37Gpm4ibyyV5LM7fgw/7rNOWTHLo+H0bra2xS4/Rsehpq7KEVbykE465mwidUBYKnU6XvBTPHg77bFYNiu5iW9/Bs+uqRSfY/+58pCtU1UR5h4zeqqJEzjQlQa7gZdF00WOlYO2e5boum43IjpimiWnqgktLPktVdPI8x7bdHaBFmqY0TbPLsIEojyyKYscV9a4fSlX1XZZIVdXfAMZYrRbouoBs//OZK1VVURTlXXuqkA+Z5frzQ9f1HbeXYRgiu6R916u12Wxo6+o7yHil4eXbXxKGIZnkXPTDlrKOKDKF51+L826qCp1uQCUfPu4f4foa62hBKcvBNN0nWUX4nZBOT9JdLDd03TET2cd3v7ri4GifV5d3dGXppa0a6LrOJs3YZhKG3PdwLJtWvp6qqqyWG3SDHWy9qrVQN5RliSPBhjw7wLRLTE2Wg6cKQegRpRdEkjcoXUf09h5h2mLeL79+w8GoS6Wq7En46/X2Br1xadhiSxvkzasIywGnIwFDzJC8iKkrFU9mWjt+h8ndnMOTU77+WvBOdUMX2oqseAcvrRKE+xR1w+hAyP7zb7/h5LhPVZZcXwu7xHe6WFpGU4uqkSSxsZw+Dx/7REtJ2eIGBKFCVt7RVEKG60ZntVrgSU7E0/EndMdDLm9/zuJS3g2HOj3jKfPsOT/6njj/R8eHNMaaOhPr+/rtlHiuMxi23E7EeegHAxb5C6oEeh1xZ0bpFUHQIUsk0Eevh+k2XL5Zo0pwrI8++hTPDXjx6mcs7sWmPv3eOVcXc+pK/N/p+TRlwaeffp8rmeX76U9/ziA85vC4x9u3QhbTvKYgZSlLxo8PHzO7f0uRZnzwgais2mwbDHVOnYUUlVhPRUm5v084PRNZozeXb6iqDNPyRHoRONo/p27X3N695fFDkdGPooy764SpJFsPHJdOry96lfpi31ebLarWw9BqvE4j5eWawPW5n4qL3DA6fPDogF/88k8YSBAmz7WIo4xYZv2D0BQcaZZFI7OFPT/E9RXWy3tU2btctxGL65bRibhjVvcLNtmM4fCUhx98BMDbi1cMh31sR2c+E9Vr8bYiCD1GB6JSZ5u8pswO+Ph7H7CMRFY6iufkqw5H40NGR6K645tfXbLOG1xLzHO1qjh5fEibzHn1RnKEnZyxWVQcHTskEgDJaAdUVcI2Erb9ydkD3tz+jHzq8tFjkflsrIT5esuPP/s9NpKK4Xo+47/9T/6Xv9w9V/Adn0PgH7DdbtmsUx5Jxm3LNCmLhr47YjkTi+S4JaPhAx6di417/uIXGKbJ5dUd374QnwlCB031aJqIH3xfvP/P/uSC5fIVpezV+vwXKYGnc3oyZHIrLuKr19c4po+rmLz5QhgRR58eYvanjGUq0fccsmTJ3XWBoorFdsOMly9SDk/NXW247WmsNnPKQlxUjtdhsSzRdX1nrE7u5nT6XWpibiayN8x0iLMlisxiq4qN5YJp2bS12LY0adhut1heTV/2YbieRpabOH3ZCF+3rKJbqmaMIYXO67towRZNlvLEsbg49/cPmS/F+/rOAc9fT0jzFbZkvB71nuKEBUFfGMs3NysuLibkZQbS0ZitrmgBte3S5kJJHu3v0ao17+CR4myLriqkRU0tSYsdx8APOsTJAs8Tv1eVCmmRo6pCEcTJFl21GZ8FxJJbDFNjdT/DsUxC2Zy7ni+EASKN0DhOSacTXMdH4gqgoGHaBppqQCuJKbWG1W1KG0jS5EAhsAKmb19ycCTkLCljrq7m6I2CYQol0jQNqgWFNFriKERFwzUrmkI2gUYLOiOT2fwF20go0tFgn4PxGUUpJjVfvUbLVU6GJyzvxUWvaRrTWUJn0KGSddmjQxPV0ZhNhdwdn+zx0cen5Fkk+GiAi5dTZtcmtzcLykwYEY8+eMxscY0iz5rnaOSFitpCJYkNDcWiqmAxWXB4JAzf28sthtZlsRTznJHgWAbJJsWTjaKqZvH64i1hzyTfSE63siJwumSyJzHfLsjVPt2Ox2QtHKeitRl9OGIqyyBu3mzpj894NX/DdSqZ7JMLnLqPXi/Y6wmDJPA71PqGa0maOHB7dM1zLq5e0srAhmWYrKM1dbGgJ4leLVtnlSe7EkCn7hI6DpbacjsRe+V2LUzVwdAr0kii90U1m9kLnnwozv8H53tcTG548eqaxw8lotDNLapnojUK9Uas1WA/ZLlcYkpny+865EnE3kGfgz3Z9xXfUTcJUfxi1+w8cM64m0/RZYnV1fSaXq/DYjVFkf1Uy2iBFfhYrkdvIIlMVzPSqqQrSzG3yxmHezZlU1OUYj2Hoz3eXLykG4yY3UvCziRG73S4nYv1vLid8/TpEy7uvkVhX8r1K7rDlDiLxLlBNNBrps5AEovf3F5hmGDYOmku5DWOXxPFl8w3bziQ5SBR1uez3z3FM8U8O4HN3/u7/5RKi/EDcamWVUsnCAg9g0heaP2uzWx+y4MzUf4a+vvczn7J1cuC88dCvxlmitcO6A98hnsyyLWI2MxLHEPsX5msUcyGn/zkx+wNxJy+/uWvQb/j6EDsZ2+0z3x2wcn5I5YbIS/DoY/fUyiLGkOWPR6c7LPdZKwmIpg06FpU1QrbNikk+qJh1xRpTVEbmJK0fLm+wdYDbLl2juPQti1VVZGmwqjWFQVNNWQfk/helhXC8ZDQh7plkpf5b/RhoarYnk8cxzsHyPM8oijCtWV/Y5mh1iphGP5GP5WqqrRtu0NITJKEsiz/HCCHCTTUdc1M3sfUDYZhoJkGiuz7etd79c5JatsWdYdiqEjZSISj1qo0kjxaU6BR1R0yYJys0fSK9fYOV/b2TJdTqDU8d8jjD4Xsm6rFbLph1Bd6f7Z4y1l4imk6TO8k6fVYQzdLBiN3V157cnLE9G6KIc2h0+MnNMT03C51JYFaHJW0LqlbaGvJkTmvMDoWs40EilArXNdFISfavONzUrCMkLZRSCQ4RlE0aFFGWwmZ/v6nv8Pd9Q1VC1kq3lk3fDq+xlzqkd5RyCpa4xkDvvjVL8Vn/IDAzGnJoCPW8/i0T17EJEUi13dJtzckLVd8+aUIBvz1v/Lv0jZr4jjmqexPXSzvSeMSV8riOpqyjmI6I5/Xl6IUem98zP39Ja4eUtbijja1Bt1QKBBn5vRDlyza8uzLl2iqsN8++TBgu63Q1CFpIeybyU3K06efsFmIu8F3jjja14nic85+S+iEj793yKcP9inb3+KP/4UI8Pzyy5+h2y0vn4nzODjUubm65ouvbX78478i9kp5y/U3M05PHmB5Yv+yvEueZxwff0/Ixvyaq8nXbBYO5w9FYGEyvUNTZsznMc+/EfdTpx/Q7ZsompjnT//0DYf7Y37xZz/FkSh1g04XpU24vV5iKuJ+qtWIb1+u+OFvi9+ryiXJIqPb6XM/fSHkXPOxjAM++dEBP/+pkKGm3GM8KlivRXDsaPyQb15/jqoq7yi9ubh4g+Oa6MqAm2sZiPZd/E7DNJIlh6sF3VGP+TyikDKlWRmPPuoxv5lz9a2Qz5ODp+wf6xiGCM5fXUUEbp8ff/ZXub4RjmKRpBRJy6dPhKNxN3tL3TbESYIpHanWyJncxfSHLmkifk/BoD8YksTClllt5vjhiP39EEv2gQ97j3DdhDdvrghlW87r51N++/BFt0AAACAASURBVHctOj15j1sfMl9tub1bsUmkXaQW6IZOVW14cv4ZAE5Z8Lf//s/Re0JHqI3C/WyG3RasEvG+zf2SKL7jgye/Sy1bfNaTLY8eH/Dt12KPD/dOSOM7bpOM1+tncg4NauJRb5c8+7VAhMxw+f8afykyV/2h3f77/8EDAK5eRyiKQzfsiagPcHJ0jONUrNct11dC6G3PYtg/wpMRmrzY8PpFwmjPZjMTSj0MQ6os5M3VTwnDd8zjW46P9ul1ZBS+YzIceFxfrnZeuKrUpPmMn//0T9gbiSiD+8BhtY6Yy4hGvI04PRVG/notLkIvUKARJJZNLUnoiojhcEglYWynkw2e18ExDSwZbZ5O1xRNiuFkKNLd3axyLNPFqt6hI84YHYzRlD5qK/52t7wXaFRRSqu/I0RMmE23hB0JK7uuaJsclQBH9vbsDUOSrGK+FhdOmmf0Bx3Wi5YPHgtFs1wuePNmgm3bdDsS5rnTYxtlfPv6DQDDY42q0rCsIXkm4b6VBkuzGY/6LGYS3UpvaLWEjaxxt3SNIPBIo5o4Fkql0wnI0paiWlGWMjPmBqAmRMl3tfiu3aHX+w7uN4prDEUh6HdZSAS41XTOYLyHKp2fPMrIozn9cLwzIlStQdMUdMtE02TTclFwd50wOBbGR5TNeXJ+TjRvd8puvY3ZHw+J1AxLwubamkHb5qSSTmA+z0mKlNFBwGFPOChJNiWOU/zQQ4JiEXgjXFchkR7fmzevOH/wkYDrV2RWTzFZrDNm63tsX6yf2TqgZAxHXblXa47PfB6df8T4QDz82y9j/uVPX2K5Ggcj0eei6wXT+T2GjGCm+Qqn3We9XuJKyPGiUGlrFV1rGcm5f/XiJYNRD+s7u42mrDgcnOxYzo+OT3n+8kt0u+X1SxEBCkMdQ3eppANtmDaj4T6r7QU92atlOfD61YKBL41J9Yii2dAdmWwTYdxtpis2SkLfDznuy+h9Y7GpQZegCb635PVrjbOjh1y+llncwCZrKuo0pxu8u/RgtV1hS4e9bn3aumK7iPBlVlO3LcrKZDW/IpdZRcezqHObJ2fCOL+7u8AOOmySgm5Xymfikpklq/mGwBIylZYNlumjy2h6UdUcDYbUbUMjkc8q22EbxxSrFaYhzt9//B/+p/x3/9N/g+YIHVhRY2kmtmYQbSWohmWiaDq+4RJJhMSDow5X93cs5mJfup7No4cfsi1SRl1pvFoqq2lKk684PhAXZmfkcHc7Y3kp3uXBwy5xpFMrMxRpdCb5iv39Qy7ezOhLFLPldkUUpTw4Ez0JWZZQ5g2K0uJ7IgP19u1b9HALVcB4KNY4jTMO9kNmU0mNEBxwcDBmsv6adzG/2aTCDwyODsfMZxKVSilYzFLCrohID0c+t9czvve975HV4m64u87wQ4VtumAuKxtsy0KtnZ3eQI3pdF0sw2U4EIZhNKk4ODJQbRGxvb+dcT35liDY4/RY9I9MVi+5vLkkdMa4Eokw2kQkSYLrCqNQMRrSpGCR3GDJjAJNQxB0oVUpZbBhOp1jG2NGsnHccTzytMDzgl3WqEgzVFUVfViV+J6CgWna1NLccl1bZpxqbBlEqKuCqqrQdX2HfqqqKroiqgTEHiTYfkDbKixltUDbthL2XfuNjNefB7RQFAVd1ymKbNcXZWoCar5R+A1AC5EFk4a4aZKmqUAblAh/4jM1hmHsvqfrOlVVUEnnvGg3XN59znq9ZjiSsNL5iiyuccwevf53vbybzYZ+X+jFILQFwp7lkcqO/azd4mkhQdjj9luRBf/w00fUVULXEWf7drtCVXOSpmT1jppk0Gc6nZKsE3QJGWyaDpsoIV6Ic9XvemhGhmm6qHJvkrglLzYCKl+iDNaVTjd0kRgieI5Bv3tAmqbYltBTilVTxnN8T+jJbRtztZgSeh002QuXrCrSqqCqE1RJO9LthqxXCUUtzn9Nhq4d0zEtphPhCB8dHfPw8UMWm3tcCcWeJAmL+Yo4lecDFUUz8AKfV2/fAPD0wyeUacJ2vWIkq3w8rc9ifcNWOsaquSI0XEy61BLJbtA3uXx9Rye0cEKxp9c3bzF5yOPH4hyPegekccI6jTk9E/v39sWajx/FfP75hKyQhvao4vJ6iyUBSoLuORoKtqNwfSXe+cc/POPV21c8efgxhiVskPWy5ey8z+UbmZlrTDRvJkjXl+K+WNzrnJweMZvd0ZEId0EQYFgVr16KjJBjHlDXU5K4wdBlLxMZKBVXl3ccHYuMTF3m3CxXGJIsN40uOTnY52623JEIHx+c4TkOV5czOl1xr2VJhKH72JJeYDAYgJby05/+AsOQZMC+xWq1IPSGvPpW6LwffvYJm+iWtBFnTWt17u7XlLXCJ0/FPX43meIFfU5OG3qycuv6dYBldvnBb4lz9eXnL8gyle9/dsbzb8T50OwaFWWnO1fLmKIosE0HTXYWddw+1/MrVMWkqsTeGHZKFpu4rgRpK1s+fvIRf/zP/oRPP/ktAPaPfbabmLKZoytCV54dnzO7nzCRJNT39wU/+Oz7GHbLyxfC0d/f7zK/XeN5AUP3AQAfnof86s2fcfaBOAv/+B/co/Z8lNTizUx8r+sEbDYbLO2Az34ksoN3d7fE+ZSTY9EbZjstjw4HxChErcimTV5f8XDvMTeXU0yJ6NvZC/jP/qO/+Rdmrt73XL0f78f78X68H+/H+/F+vB/vx/vxfvwrGH8pMledvtV+9vsiWmGbKoH1kPl8Tilr2H/nJz9GU2o2mxWF9Ipn0zX9PQtVf5cx8ahLndu7t/gSPWQy3eD5Kt/77ITbCxEtcLyY7330Cc+/FLWb2/iWz370A2xjzItvRHRi/3DAm4uvuLufUGViXq2/pszsXYTPsW2yKCfsODs0pNlyRdFkeJ5HW4qUeJRMaVtl1xsSBl0MS0DNlhJVzbICGgXSbMXxqYhi3t8JEk6rkv6vtsHvBbS1R1uK7FmplVR1QT5PMRyJeGebhEGfyZ3ICMXJGtfpEAYuZS0yO77f5fpqg+mLSES/P0RrbUyzxJFwrdcXMaZromsqi6mIKA5GPsv1HM0Sv19VKv1xhzKrSbYSxU1RSLMVhmUSvYvMKyWjvSFJJNF1vACNlvUqw5Xz3mw2eG4PVatoMvH8KFuhaDGuI7KMs9k9rq/g2CFLiQTo2h69fsDr2ysqmfYLnBDbtglkv0y2SLl5+5ow6GEbkgei59IqBbbnsJQEpaZhE5omG5mFi+oKzaypYzjoiSj85eUl/sjCMocsVkIW6rblRz/+AXdTEUVazO5wdJtkk6PJ7Jlh5GyWKp2uS9AR87y/v0PXKyxdRMqzskVTG/phuCsPKSsVL/Boybm9u5B/a3FsG0X2sKmajefrRMmUAwnrPp9smS8Lzp+cM7kR82rbmNOzc755/RKA0TigiULuZxOOjsWZMXSbxXyG57hsV0I+Hn/0mLvJZNezt92sMGuVpjDxZNR4MNijrGJKNeNuKuRaVacMwjH392Kv9sbH3NxOGO6rGLLfwAtrok1JJsuunE4PTWuxVZtEyn6rFWTEDMMQCzGn+UyjtUxOPhRyPr8uSZYf0bYTFMkflaNzdTPh0dlD5rIHaTqf0NVNAglBnKYab97ccHZ4zNGBKA+7ur1iMpmxNxoTZyJjQpBj22MMSRSrlCVBb8QqXpFI2Pyeo7LYVCSUBCOxx9NrBV8x2JMkkZNZga8Pub74ltMzER3VPZ/OYEyZbCkzSUzt2NxmG2QrHGVZEy1jzs9Omc0lcXKng6541HlBIcuAOl2P6WyFKUt5m6LAtDq0Ro7ff0fFYEBm4pHhaSKqqbhzlvcz/s3f+XcAOD0/4/Mvfs3F5OsdcfObiyXrZEngnYNELN1uLtgfP0GTPa3b+A5TG2DaJZYkKb+9mRO1OcOuhyLREBeTGaG/R+dd1qGw8H2fKFviStJy1/bpdC1UDapCQtuvZ6iNzmy5kDLcw7ZNxuN9Dg9FOeHN9R2g8ebqV7SSCqEsS66utjx69Eg+2+Tm6pZu36A/kBH27ic8efCYP/2VKAXZrm949Pic6Sollr/38uJrLM/lcPyY5UzIlKbobKM5huy5bPUESz9lsZngyGzFZp0yHp5Rl8WuPDuKItpGx/PF+ratIlD6ippBty8/k9DrdCmKatebZVs+TdOyicWZcV0Xy7JIswpfIsLRlgKVz7R3HD22baKqEK0ld5NjUrcaVVV9xzsjM0iqqu76ot5xUX3HoVXjOA5JEu2QAdVWlEdXbbODqNd1E0VVKWV2S/R7gaFqO6hwRVFoVUVCuL8jQNZI03hH+dHqMbeTF2i6hURwR1ELyiLFc4d4lkTKNBocr6Yo3pUjWqQxGE7D/ULyD2mgGy2d4AhTQrJaoclXz17we5/9jpDNdMPF2xtOnpzx8tUbIQvbmGHQJ91uMAyxEYt4heaY7PvvylhTknSN2uqYkkNns9kShiZVnaI1sjfbVKirlET2+p0eH6Ax4NWr1+zvi8y1ohWk5ZYwlCiqdQW2xeGDAVcvJI9XBpVpU5RbikySyXZdknSJTPrRGw6YTzfki5STE6HfppNbfvTj3+Z6ckcuIdybtiDseCw3kvA5L2kbgzDo7bJZYeDTFDFtU1FKmPxR75y0vuVyKWwn1zrE13x0bU0h7RRb6+D5Gov7CM0RmSNNCYjjmocPhf5ZL+8Iuge06pL5hfjMcHzI1eXPGB6cEsgMnqo3qI7Jty8EwXy8HXB6eMo2WaBIpFPXiNluawb97m6Nu0HIhx+d8lrefTpjxsces9mEywvRO9XpnNHrjilLA8OVaH2rnP6oz/XVQu4L6GbGejVj2BcVIcvlkrBns93EdLoiC/782Qv8fdEHCRDaBtE6pdEsHJn5UIuSTtcmzzY7/Z0lFkdHR6CKNZhNV2iaTp7VlNV3fHWmnbNex1AL+dg/6DGb3xHJUteet0/ZZKRpTVfycSb5gsqo6IUGnkQHTtYBHz39kKadyt/bksYqDx7us5XUBN88v+Unv/MJt7dij+9uEvYORpRVxFb2fVrqgKjcYlpQ5OL8Ky3YfkjPE+fsfrWhyUxc09rxlI5HR0TJBsuwUQ2ReTw9eMLzFz+jLykH5vMY3TJxnR6vX4mS2I77CC+oiaKIKpZ9fFnM0Sc6R4dCDv7233qJ3nEwlJSoEGcv9OHi8pped5+e1HlVJfhHg0Bk/T988AnJYsWLN2vGp+IgTa9ucc09Dj9wdzRORQT/w3/5D/7CzJX2h3/4h/9vf///dfxX//V/8Yfjoy5No/Powad4rsNisSbPoGlUmnZDkSgsV3Ncx0ShxfZb/E7MZruhqnJce0gUbVAVl7BrYzkanm/x5OnHJJHGyfkeg9GA+XLOcm6CYtAqJotZxa+/+JLVZk2Ux8RZxM3tlLLRGR/0cEOFoKejNi57BwMW9zW66hB2cn74w0co1SHXN29JkxzVbNiudTo9gySJqZsU1w0wtJAiK1HRcGyLxXyOadqsNnPyPGO9nlMUBZ1un2iZkGxT5tMFvqejqwqq1tLr7KNoBk1bUBU1TVszmU2pqoJRZ0iSbmmaStTn5ylVqQIKeZ3QYpLXG7yOhmpA48SUaCgGoGvUlATeiOViSttCnleEXQ+qENNU6A11Oj0Ty/Tx3DFlO0fTBZxxlets1veYeoCqaFDDcN+npYtq1piWheFbtKqJZSgYhk5dqVRlxnYToWs2dd1S1QVxlKGg0zYRTZtTpCpBx2M46OO6Jut1RtXMsG2bzbKlrjQsS0NRFdA1HL+D47ioTYupqbx6/g3zyQRPt9jfO0RpWqo6o20rlusFByeHFFVJmiW0NAwHA+pmS6kbYGis1xvq9ZzQtlm2G6IqYtMUpFlNUSs0bUFLw/HpAZqu8/kXnzOfz6nbEs/voBsdrq5XRHGJ7ZQ0TQYomLZB07bcTqcouonr91F0g0ZRQKvQdY0ozinrSjh/mzuqMsM2HSzTotJyGlSCro1hq2S5QdReY9gdsqomKRtaVaUTdHECg5oCw9bwfI+kSFEtMB0DVVOoixzDtsgSQbCaxiWz2ZTjo8cUlYZmuOimwsuXl7hdh1ZVONs/p2P4aKbG8KyD33VYTCOoK2ojJy10FFUlSeYYhka328HzXdbbNYNhlzRNWK8ykjjj4OAxeqfi5asVUd7Q2++RbguK2CBRCsGR4+fYoULbVszXc7Z5Ssfb46BvMHmTEM9ULp6lHA0aev0DLu9uyYqc2WaO61ms1wvRaN9UdJ0OaqmSZyllJgAAtqsUBR1VbUnTBLQW13XJkxxDs9AVg6RJaBTItxl5UVIlKoYZcvv/sPfezpItW3rfb2tduurollfPffe9wQhGkAGDBokIBn04pMW/Af78HTRh0WPQIgOMGc5gSGBAvnni6tu35ZFVp+TWem8YmX0miMAwaMB4RqfXJ7p27cpca2XmWt/6vuUlSunSlSp10qPWHlEUM1qoaEoPBZwfj9H0Fk2DptRp1JjF1OD45Jhg4JNlNb3WklYbJmMfy4G7+wO9pVLUHU3boykahtozmrgsN/dUbU0clgTGGENvSMuUtm+xPJu6gGEwwDJsmr7BDYayp0snzxWW9wWB79AkBboGbVewjq756PlXmIpJ1dT83d//OzTdw/On3K62pFmN4/soWo1peVzfvSNJYz795FPCbcPPr7/jfnOPYxmoqoGhG2y3O9I0pe0aTi+eEIdbzo4+xXNm5IXCIYk5Op2imQab7Q5Nt6mzClPV6Vs4Oz5mu90QxgVpUZEVBVF6T6fuuLl5TZptMA2X+fyYH77/iavrr3nz5gcC/wzL7rGMAEXRsSyftm/wnQFlnlCXOX1T07c6lu0zHh9j6APquuFuectsOsBzDeL0niStydOeXXhHXmaE8T1F4/P42Rnf/O4VhzBFV4ZAKWjfe5uu9dltC2xLx7Q6VAV8b0DfWBRFSpqG5HkBaDRtQUdL3RR0fUmaHXAsUPWWri9o6ozxZMAh3KLQ03U1aR6j6xqObWKZOnVbkGYRPR26rtB1FT09uqFTlAW27aIbBrbj0rY9judiOy4qGqiCUv0/hPxpmoZpGhiGTtPUNE370Iulqip5nqMoKmVZUdcNpmHSdS2WbdM0DYqi0LSdEENWFJDCx/17RXgU6ZMdhqaSJiGmqaCpAmbVNCk9OarW8OrVD6R5gmnqTGYDXM8gjjJca8zQ92nbAz05adIRjBw22wNFWRNFBV2tols1ZVmhoGCZAYo5pMhSqqKlqBr2YYzrGxRVRZTGHKKEXXLH0eiMvm5xdIt0HzNwfVQF8jKlbRsc20BVO5q8oSpz0jzEMl0WZwuWt1uKsmQ+H5HEKbZt4jkDKXidYJjgWja2ZQp9sEZnPB7RKwWq1rEYjcCA7SElL2u6JsfodaLtlibv6OqexWJO15qkRYJl6+iGQpF3NHWPF9iYpkFXa7Slz3gCfafQdS0oJkVR4w89kmxH39fs9wcUxXggFlGBvlGIo5BhMMDUdZReocgTsqSg66BtejoKNNPisC/pG4O+NbFN0W+rGAqokCV7Aj+g6XN2u5wib1HNnkMc06ITpTmus2B7d4tpDZkuxgSjAUmZY2sBuyjGdTzatuflu4gs63CNEaY+AVpQ95iGzyFUqCqFydBmubnGtlzaWqPvFDx/RJmXrDfX5HlG19eE8ZYsLdB0G8NwCYYOL35+iePOUPSettMoSoW371ZYjkmvgBk03C1zUHXSpCRJc1TNJEzuGQxmuM4QXTN4d/sKTRuj9gp0Oof1LaY5YV9tUNWOus1wOh3bB8+bkRcFimrg2Cb3qx1tW5LnBaPRkKqELI/wvQmm4aAoBlVbk+cVwWCEYRn4voXrqbx6eyOgt7pGR4FuqJR5RlVlzBcnRGnL0ckRTW7T9w6O5xAme/abVCS7LZO6TXj97pqyC4mzEN+yaasaxzYxdZ3Xb98xHHtstwVhWpCXYo9yHZUkjUBt6KmJwhhnaNLlGlUJuudxujjCtVzyvKRr4fjoEZrWcXQ8YbsJaRuF1V2J61u0uU9TmsyPbb7//gcWC48o3KMqkCQxs/kpRVOwz64o+5DJ7Jjb2z3/6n//ht/85oazJxNur3POzo/QlR7H1Dgc4PEnE6qq5GRyhmO71GXFarVhGEzpGoVwHfFP/9k/49Ov5owHHkeLBa59TNWmVEUq/K/qmYx0/uZfvbr7i7/4i//xP3av+YMgtOh7yFJxQ7y73WAaFW7QYNiicmWZM/whaNqIOBUYyOHIYXWXPqgsX75b0Xca88WY6UBkD37e/sj//eu/AUXDFR/DVH1SdnSyIVPxXAbeI3ZZgeuKTMtscYypOgxGYFjvM2wnmJaKJbWaXr76iUOSsTqsWEuxXG9o4g9NdocD07GoQGVJyGG/J5UNrsNgxCcff8TPL1c0tbgpV1WN547pa/VBW6RIa2pPwZekDMv1NeP5hKZROToSmQh76PHTzy/I9YRGk2KVfU1VpMg2IqYDC703mIyOySVBwOWPW47ODK6vZTPpwGCdXFE35YNqfIPCIXzH6ek5ti3ZXsIY12+I7+Q8HUNe7GkUlU0ksum2OqDdtnT9DFUyZWmdTrhfoutStNieEaYpg8BlOhEVvtUqwvR16irD86W+UpfhOmPut4Jkw/E0itpFNxwCmaBtUDFMG6eHRGqLqU0nWO8knjwIPLIsYTCxMXWR1dhFPVmRsU8iHFk92x3uWcdLul78u+sUXH9E0k9I5O8ztA7fDlANg0xmhIsq5833V+ju+8ykznJ1RWAZWIFs7m5znj7+jDQK2O5E5iOYLHA9g30qsmJ93aApOtEuZijnJe9CkibFN1z6SthivM14dL5ARWbA0xcsnnhE4RrbkVUjxcfQx2RZ9oCV1lUP09UoJclGXYCr9xRFhSPx41EYMhgErA8bSqmBso6W+JOOW1kt/PjRV1zf/oQ70Xi7Ffo/R/Mv6NKCd8tXPP/kl8Jmr1PWqz0j2WztBR5VUzCbLaiHYu5evnzJn/7nz/ji05uHNV4nLa2SMJZsXn2nYecuX3x5QlML1qj18i0T6yMGErN//OSOZuth+zBdiIxX/O6Wo+Nztrs9C4lhr6KIwoFwKbLb3qTh+PiUJCxQpY6XoRq0bcYvf/Ul3/1e6IYoqo6maCSxJL2wa6I4x3MmOHIdHl98we3NmqJTGEtmN5WQLCsfbMM1oTR7tELB80RF9m71gs3NmvF8QSxFysMyw3F7DNkUrjQGlq6xXG2YH4vPResQQ28pixhHZnYPYchsvCCLhC8kZYU37ugLnc2tsGHFMkhDm5ntsI/EmjbWkNUuQWtkpd6x+fHyBcOxQSTJFTw6hv6cLEv54tkTYddFgWZVjERIwg9cijxnGDxhu34LwOTIgrzidPqM5b3IuruBz8nzM0rJ+mWZC2xDx/FsHp2JjPB6dc/bq1dMTsZkpdQlo2cwWTCSMQIU4oPGYnrEXpKk3N9taNWQ+fyY4UQyq/oGjj7n+p3IXB8dOXz+2Zf85ptvHvTxjtyAvEn49tfCpntDY3244ez4CYlEUXz+xZ+wjmK++/G3nDwS8WXk1ex2FQvZD/T27gWPP16Qxy1xJrK9fdsTeAO8AKpakhtZIxQM7rdiDdq2pS4LurqjkGLyumay2x1o6o7jY2HDf/mXf82nn33JSAq5LldX4tKjWQ9Cw22rMBlP0TRFHHiQlbJewZBEGIKw4h8qTSCqS33fi/5WabO+7xPH6UOVSpEXJcMwHjS7uq7DMCyqqnrouYjiGM8LHogxmrpGV3W6rn2oUlm2wWZ7L7RzJBnH9v6SONliS+KN4XBCFCmUBewkAcvJ4imW0REedlTZ+/1ogqHrLBbCNpP4QJHXaJr2UPlI0pK0zHh6esL9nbDPvCkwVZ9C9vUZtoLl+dxd3eBIBtrR2OVQbDF9l9FUVPmrtMCuWzJZmTs7eUbdl6RpjTsQ1S1VM2i6nkfnn5HE4vuK3MFWXXoZNw7RCsccUlU8iNcmdYatdQSqWANr0FNXNYG+4NCI/WK9rnn8dEqadBhyw4+Le45PTh5EoZsahqOa/b5lIrX3HKUlK3fcv75mMBTx5aOnn7Na3WOp79kfMzy/xbEDpAY1690ltjnl5HxEKPu1ixL8wMczxtJeHVQ9AzxSqfVlYpJlOVUd0/Zi3VebFE3XH/Qc1+EdrlmyWy7ZywqN6s3wHItGcTBNqW/Gjn0IF5KExjJGnE6PeHv7kkr2ZVX1CMs8Jolbnsse8q60WC6vcW1RZUyyW9Q2I4mENh+IXrSqe8n97meOLbGvXN69o2lLQtnPVbKnSjVOTsZ0xXvR2TFaMaZKTJJestIOZ3R9Q3wvfqDNnFprsO0Zxd6Uc65wu9lj6C1tI/7WKgqDkUVVyv5qfYZu3JKlBYns+x7MRFX4+OgCwxAx6O3rkLNzj6ePBcO2o2koVszd3ZbFRPAGhOmGvIrYbj36UvjRfD7HNW2u92L/zTINzx1jOTquK4J6GodkTY0lz06WZXF3d0dVKriOFCjPM9J6RloesCzh/49PvySudxx2sioX1TROjuNY1JXs+bq5oe0SLq/eMhxJrT2joek1QokoSlvo+5aXP96htMKPR2OP5W3I7eaGon1/5qn4xS/+hN9/K35LWVdMZxrRrqaXdqe3CuRTLl/cMjJEte6n71c8+nzOSs7B6eyCv/2r/w1HH3N1K/72i3/yKYqqslnljKYi7jIU94x/bPxhXK7ajk4mtPI+5d3tltHYY+gJw9lHGV4wIu/uwRTB7tW7HaalouvikJTmBRePTqirijCTAV8F27WYznys9yxRdcuLH2/5+FPBMPT4KVx/1xEfDFxfwt2qlNOLEYfojvu3UoDR+B3r9Zbj+RMAAm/IIVwxGHs8U8Smquk9m6uYLDeI1uIgo9oNqtZzcS7K320Vkdcqfd3jyMbi6TygakNsZ0EaCYc9nh8xnmp0lliiM+UIQ9MptBrkFYgR2gAAIABJREFU38K3W3zTQvcNGslGmJYhmhajScamqX+MVs8o65AkFZN8cj6l7XWGUgzR9mp8IyCtTbJcwKxsbcT4eMQ2jah2kg4+31Fel4yGYnO5vN7R9Q2WohFYwhGbNmM0mrO6i9EkGUdS3DNdDEmkwOWLH684OjO4v0+wXRG0isZCs3XUgUppinc/sqds7mNa4z1USsHQBxRFxnggDoGaanGo7qkpmMuSvKtDnDR4srlbs3UWA4cozlHkJdR1B+RFS9FVD2x2baRg2gN0KVRttSpp4xLnWzx5sOhNh6zPMeueYCA2or5RSMMDTyW9aBLtKRsLw/XpOglt6Vy0vmVVv8KvxHuFUY+tmeSxcHKlVzEdk642CSV7H4lJoI1Q25wGST+tq9zf3TI5E3Z3fPERVteSNHfQjx/eaTAfsNq+JpBwpe06IVVyagmxmrgmwXCIFfRE9yJwP3/6hDTJaE1oJaHM7pCi9B1uI3zor/+v/xV/NKVd7RmP3s9ByqvLN4zHJ2zuxGV44B/j9hoLGYxudjc0tYZGjzMUQfLkpOXVNy8ZSYgFmsYnv3jCyPdZL6Vws6Hw+kVLuLP5N//n34pp2av8d//DF3z/o2Beunm15Ysnj+iVnKoSG9xw3hNtD3RJSW9KAc00ptM0zEBevL0z7tcxuqfTSfrrKNug6ja7MsU5lnT3cYTlOxhTcSkMDxnPn83Yxy11KuzlNrrGHnR8cjrElGQH23XGOFDopajn4+efswtXVF5F3b5njTugqCUDU8WWECf/kLCYTrh7KTeYMuPksceXF0/57e/FBWE4PcYcWIw4ZSchVGozZOTZ6JZ4z1dfr/nkyWfs44jZWFw66/aAgUZcpjSyET1fl7TFJRri93Z9Qqem3K5qNN6zYrbEYUtWbDiSJAx3V1ecPplSSvSk2hvYw55dtkKRkhGHaANmwrH7nN6UxD7NEr/9iDITdhe3KRfWI1a7jLUvHpa0Hd1wxup+y9lM2HrY6SRbHVMevHUKfLsgLUqm7/3fKkEZCyRDK2x/t06w7YKji/ex65I4+xa116liSSizOxCuD3SZ+L3eUU/aaaR5wlqKUJ+ejLl+fcVo6FPqYo5/vt1zPBhQIjJ4TV/x9ucrmuaegYT3KVioqoBSFzIOu3rD7fWao2OxLod0T910VE1D34jYrJs2r39+wdn5MW9fiHWfBROqOOdvf/N7AD755DnB2EMzdKKDeIdea8iiLePREaomvi9vU0zLo+qkMG1SMvAhyzpBHgRUdUgUh3jWlESefLsmQbU0ckm3X9c1Q39CkdcgWTBVQ2cfrfEsD1Ne3kZDj65XKOVBrm4bWtUkKyIM2Y7QdyZFu6dhz91eip2nFUncEkmR7bzo+fjzT3h38xO2K/eCEaioLKwzZlMRS66v1ly+u+O/+q//GwD+j7/8a3oFwqxht5OwrkZFsQyW9ysKqUkxnkwJdymK5JD3DYs+s4jzA6mET1Vthe6ZNF1JW4rP3YVLDN1h4Yvvh4JECtCPhyIuKl3J2B9QJAlN8z7mlMTJ+oGtzFDn7PYhQy+gaIQfRyuFs49crm+F/88uFrz48R2BOWV2KmNS0XE41Jw/eszyXpw3BmMbRc9RerHGJ+cm0aElXFU8OhN29vZdiK4m6G5GW4m/DUYG8UHFcUWsVvWcQ1Sjo5Mnwh+D0ZC2OJCHOkYv/t+jx6cckmsimSB0h2OOj2bcvLulTcVatX5HXGWYasAhluLKpoFlaMQHkRBpNAPH9RiMXOLUlLaYkOR7LNPhZiVgnVWvcno85OVPIgFyfHzK6/vv2Yc5Ewm3M7SUJN5wevbkgVSrSO8YDmZ0kml1F7fYlUm8y9hKCn5N10FvuNle00gSltkiYHOXkSTCFs3AQR9WGKZNLf2hakKitsOxY7pYPH9ziHAMDcuT0MiByj5sabPuIYm/qjTiSMX3VRxX2n6ywbAHLOV+3DY6gW8xnj7l7la0BdA4VFXP1fKSpx+LS6bphkRhi9JIQhu1xPfOGA1sHE3sKXncMXRtsnCDbwqb7QtI0xBT4s+zLGE6mjP2HrGVUM9wvafxO6JDKG3FZb+PGI0HJLGYlyQvMIKYyXDESrZHVIs1abTDdiXBjGKx2cYcndokhVjPydRlNDjh8vYVq42IsUN/yN3lJbr0vYuTJ1TjmqRW2MoEbx97qHpC18GukGQ1yyv28QpTzu8+rCnKGAuwbPGefe8SbmKeXkwe2Jb9gcHEDdgehO/9/rffoCoVmqWh2fLC/vUKrW3p6hbXFBf29SvxO/+x8YHQ4sP4MD6MD+PD+DA+jA/jw/gwPowP4z/B+IOoXCm9ii0rA1XeEG5bdBTOZuJW/vLNG+7u/jW27fJEiqvNJiZ5UWEa4sb91ZdTttsDZRUzGoks52Q8pSiG0NdcvhZZhtvla/78n055dCZun3/3d/8aq5qi9lN0W3aB6g13yzW9tsGwRQbKtoa8id5yWfwEQDA0KcscTXUYz0S24PZ2SZiWrO/2fPQLUX42XJMiCdnLRkPDaQl/vmSTqoxnInvQ4pAkFZ5t4A2kPpWa0pgWpiXeu9ShiGu2XcJY0s8Xgxy/dPBPpySXbwFw/IY6H2Ah4USVR5Hfoqoqo4F4z8F8wtW7gqPH4tnrQ47mFGRJyNCRTYT3MZYLh7BEUSS9p65hWQ6dpJnVdR1NN4iikK4TWYaTxZA8SjiaBmxlxssfTonLjEBS3Rr6HdFeYTqdc/1OZI0sc0CS9ljjkDIWmYc43BL4E5JUNjprAXEaU3c5W9lkq5sGg5FHFm1oZVZqPjtHtzVqKe56v9pgeS2WYlB1svoz9NivM2gVDF28e2805FFH0/TSxmySQ8jZkxmdbIw/hDpxkfH80exB46U2VM6OL4gkHXZVtdjWiKIsHshHPHPAb7+7Qdd1JiPhdprb4Y1cylSsS5bU2MGQ0lk+EKCYSkuj6jiaR5rKvghX9D1IPgscx+Gw2WOpE6xWZpL1nra9Y3dIMWX2JaoPTKopntQba9uS211IX9YEpsh0ep4jIDtKRyErwI7pUGcFw7HMwnUaulGShAPaXHzfMn2L6+pYhklVSzHnSU87NFneioxUr4zAyoiLPRiyQtKGzEYzOvney+s7ispAWRxxK6EYo9Exf/TlmN9883cPFMez4Yi8CVkfBGzv/PHHWJMZy8M3KJ2Ypz5uULWYVq1pTfEszdBZXe346JmII66r8OSxR694vH0jRYRbHd83efXdj6iSUl3FIVvneKrI2Oq2QqIUtI1BlcvsqJJyNppRlD0DCQvSyEHRQVZMd+mOSuu43625GIlM9uTcZrtpqNSGofT/yYlDnve4QynEaXnM5xM8N2ASCN/Osoy73Tv+6PM/pQtFdbmstjR1wMh6IubO2/P65h2jmUomKyZNatAQo/Yq46l4FuqarKjYhiIjHPgzDmGMH7hEWyma6lh0Xc7iaIIu1WoX56fcre+xVFERqkuLMLzFcCKcgYiBq9sdamsy80saWa3L8pLNMqLIRQZT9VyW6zXbeM1MVu9e/PA9g8VY9KceIjnHJlXdodrvs/4mq+WB07MLUklk8P3rN9CrPHl2hCHpxPebkC+/uODmjcjwVyX0jsmPr35mcSRhpEnLbHGB74o5Ces1bZGyT2NGc/G3m/s7MEoa1WItqwpnxydkaUsnM9l2G/D88yNu7o8equLT2YAw3GFoCoP3gs+HBE2pUCXFct32BEHA2dkZWSiqYnXe8mfP/owoTXi7Fmvjjl2Oz6a0mqgeVnXDYZ8wmQ4eZA+qLKNKY9JDQjAWFcSqycmrezSkSLLvk2Y2TdsyW4j98Obmjk8+/pwsrShlk/10PKEsa4pCwG9czyPJQ/q+xXQlmdNujaZaVI2Gbor9qS0VdE2nltWYpqqomj2WaaJIjcX15i2dsiPPDpiWRJeULb7roEjY3PHpMX2XcXI8fyAIcMwReZIRxpe8fi18++nsUyx0/uov/xKAq9sbRscjDKvH8iVMN28YDmcoeoeiSJH7pMWyfW5vJCmD/xzV3PN4/IxQyntsm4Q2atEME1X67Zdnv2S1vqYpZNa/LxgFUzRN49VrUVU8PTrGtGw6JeR0IlsFmoaqC2lqYfvTqUutOET7nKMTMQdZ2dFpNo+eir33cEh58vFzqrJhfiRQNy9/ekEwOsMwO5JC6o2pKm9/esfjCwEP2+57bm+XnJ4ckUgB5rSs8DyHtvTw5W8J05RdFDOUyKzecNE9E9vSKDMRW8JtymR4RBpXjIbic7eXawazIaaEWMVRyKsXLYbmYXpS0H51xWI0ojUMJrKaPR6PybOEw174hzuYY3sm19dLdFvEQEVR6KhQ9I7bGxE3jo/OCHcVE4mG6JWIrlVF37TUQIsisFydIu9xJZnCzeUV233Js6cCUn08OuKQbBlOA46ORJxqSSjSjr5y2W7FuteVjWnaLI7F3rfcRShqTRoemEktwdvVGyaTR9iKxnYn1mHkuwS+gdxCUWqVqlhi+xppJIWwzYC+a9jtNgw7KQOiWxR5w6nUA6SFLK8oK42PPhb207ct69Uay1YfiGEMfUCSHNBk64Xra1iWSVt3hIWw60fnn/DNi9/jWBa9RGlcXl7y7MkFnlyrrmvZHrb4TYlURsA0FvRKi/K+rWM4YL1ek4QllqyATUYBdAp50eC6wob3+wNVYWKqsv2l3dK3LtPJgFwSRa33CVmpUmQKhSSGivcVwdDCdSTaZNey3qU4wwDDEfO02l2TlzsMe4AjmX76uiAJC3Sp6q23FuluiT21KaScj2bXTMZHXK8uCVNRHXSdgNfLOyxZcbfMKYZXYnoWaS7mZbs3MbWO0UDh//lGkGpk+f9bHP0/HH8QlyvD0DDl5Wp5ucF3VFzbfgh2/ihiOnqCaflcXQrI0ZNnJ9iuz1aKXu5CIbx4CLc80Cp1Dof4HYv5I2bSOb76k19xfX3Fr2+EKF3XDMnKira84Uyy/oynU1xnwu1dy5Nz0T9yu/w9nzz/hDgSG05epIwnU77+3Q1NL/oIzo4/54//y1N+/A4++0Ic3qJ0z0ttizsSxqRUOdu7mtHcR5FYyCIJGTkexaFBk/PgTMckSYIuIUcDuyJuSgzPYx+KoJxFKYZ7zurdkr4TG72hTUmThsWpCGJpcs+T51O+/vslzUQY5rb6iSRX2Rzk/Lsdb95tGY6n7A8iOOg61K0pG5tlP1dZkOYZjisvhW1B1/cEwwEHGYy8TMNQFHbpEk3qgaRlSTAaEmWSWcYuGDtjuipClXh8e+zTxVdM9RGtLg/eTY6ql8xHwsnW9xVh2LA484kkHVLbWKh9S5u1dFKHKakKBr7JQkKJyrKmbGLaWmcgL7T7YkfVhGidy1iydbmTAb/79c9cPJE4d2+KEV9z2NxhyICh6hWTwZzd/p73WFbXGeIHBvdrsQadpjBwXcq6YWQKW+zans+fPiXOEwKpWH7/8wtub0MenT0RNhUmrDYbqjZkOhWbgG853OzWuJaNvPPRZxCceuSZgGLUbUJvWLjDgFLis/JiR7m2ePLohL//VrAhXRyf8fzoOd9+KzZ+beSRskNv1Adh4cPhgK6blGXJUDa2bVZLdAU6KWKaxjrB0MBxQNcls2OkEm4rzs8GRFJsOI8yFMN96DtzrIz9vmd/aLBMKbypjakbhUYe5NTO4nDf0MYxhtRuC9M16ZsVjjdkLC87X376Ky5v/h5VaqJ99uRzZk9OefU/v8C33kPINviBTZF2qJ2YPE1TuPj4mFJeNIoyoWxixuMxpjzkOsGAJElQ+prThXjW7dUO1VTopU2fLCY0dKzulgzlAUWzTPI252g6oNXFHDz/7BnL7R2WLXyv7StUTUHXTTYb4cdFFnI8PyHarylkQiBwPQ73NV98JjbZu/WWNz+/w3zS4Xtys9olNJ1CXqXYcrOMK5OsyFHb98xuHdg5ujEjfg/dsUaYjka4jVEUcbg5f/yEy9dXD8QGcRajqwMMzeToWNjG8u4Kzx1Rpg3B/D28JgddoZc9UQ0apuoQ5yFJKWKz7S2I0xuyJmc0kPO5/onBiYom17jsPIKBTeeN+P7td8J++hxSndPRFEX2CWZhTGDrD/1AVaNSlwXpz6+oK3GSOZ4/RlV1HAxWd8InLdPn5xf/8H/atuZ+e2A0PkaxxMVpMSvwHB9fasCsbxrSJGEwPufmrcDez0c2Q2fI3WqPL+FgVV5Q5zqf/uIr8e+iZrm/pNMVDlvhj5Zjcog32JbBYix8+9XPP7BYHPPqSoi76pqFac548/MNlkwaGqbGT69+JE4SCimQ55oub+9eo0r2vrwooeoobw4YElIduAFYOr4/IJa6aFl0QNF1HKlz59k9d6s189OAqysRE+JIYb9bUNclXfv+4NZS5gamZFqtigZV0zAdnfR9AsYxBGGFUpPlUp8qU3FcBVv6VVtU5NmBJDygS0j6drOF3mAwPqVuYvmsnLZRmc/FJSKKDqi6BY1Cupc9iUrIZntDMLCZTERsjvU9tRmjSoHrs7MxzjDgx9ffc7GQyU7LwaxBoSWS/ZOdmgro/omwzfPFgm+/viN4pHAqLztzZUZ02NF0OgcJNdP1iPPRBVUv3sk0bSxbw7IsykT0uYxGY8oqpigLdoqAEdWVynDgch+KGLSO9zR1C6iEkum0QWGzz0ijtXz2jHSfoZk5L16IQ+hseMzq3TXjhc9YxoQkUzkanhNu5F7kDrEUD0U12crel+OLM67efkeh1STy8u9Pn3Lx1GJ7L6BncZ4xPp4C2QP7ommPCSuFTb6mVITPuLYLhkXgi7YAPW/p65Ii3nIk188+esR8MOXm+pLZRCQyug7SfU2TSy01CnZWhOnqqIpM/vUdfQuvX94xHonPVVXJcn3DqdRu3IdX7JM9ljnA9kQMLIsKz5uSJDv0XMRdy59gYJNE4kB9PD8nTvaouoltC3u52+RY5oC27x80pZI4R6UFhJ1Ppo+ZjkwuX25gKuygzDuyeMPhnofk31df/JK83PLtnYTyKRq6qmFrNkki/cMtsV0breqwJKtw21XEcYzrSKbcVsW2HYKBjqEK387yElVVGQ6H2FJ8sm5bdDPDlD1zqtJz9faWQTDGtYUt5EnM88cX3N9v0GXSaTgYE4fNQ9zX1QFtVxElOxLxMYqm5598/iWVjPHv3r1jPjslDlNM830MitENnbpoHpIwumHS1jq6TCwqmkIQ6LS5R+AJf7xZ3eM4U2yrffCr93vx6kqcEZpRg9lbrO82OLIv09A6NHtGSUUl57NXehTbfdDV0zuH0/k5eR0htbhpsz1xUmJbKr5MqpdRzb6Kmc7Fs2234XBoCDSNthO2f3mzxXN8VtvqwR+++uWf8m//F3H2/4+NP4jLVdt17Ldi4z8+Nbg4/yPCfcU+FPjxItN4+thlF2c8/VTc3uksfvf7r/n4448BePv2DlXP8QOX9VY4UBJtWcyOoBmBKSokL37aUGQ9p+fCWdf7t5j2CSPfw5ANe7rmER1qlHbG777+dwD0pUKvxnS1cILxYMHyOsI1ThjOpWialbP4aMzt3ZDLN8IyG/tAZ3dUtRRNbFUGpyPG+jEt4iBlGDVqC0XSEWfiwBWvMuYnp/hSbdonIiWjSHM8KXb66OkJWaoTLmMefSUyVWWqoSprek18/2q7pzcNxqcjTEmhfjhoWL5BuJeBm5a+gKb1GI5ENma/j/GHBnVVcjhInLTjQAezichyZkmK47gcogNHZ7KhftcymTrkWYjRiYA/cm12yx2xrDZ17Rhl1tNUMA/eXwJT9HZAi0WaCKeaDlyatnnIzmiazXxygaVUTMeSMn55Q1MFeMGEWpeCgckGvZsQyoy77mZMRgH6wGVyLJw6fBujNjphmqH1MkBFJa7b0snsel4dMLBI9iquvCQNfI2qSsjShrZ+L6q3pm18lF42DJsGqtahNhqWDD524NCXBY3ZcyNp8utUYzQa8OKlcNDZ0MQfqISRy24pDkSxkjJ8dEwaX2NIDLIaqOSUuPIQqPQqUdaQbyICW/bR6YFQUN/vmUtpguQ+4rb5kVgSXMxOBqTXBaNgRiEPr76tsFrdoOoGXS+eZWouSZTiyr4M6hhdsUnK+EGaYLE4Zj4fsdpsGAQiwA8HDq8v1xiBmJcXvw358o/PcKwpbfL+HY4o0h15KgLW7d0908mEs/GM+43w/7BaczRaYBj2Qzb9enlHWJacTj4FYHMTE2c/s9nu+fQXIiZc3q057HM8d8TtpXjPTgF3YtJJceAkjimqnM0+eyAkuN9tGY4CTF/n8krSHpsu8/NTskjaYtXTGz2LhU/vSyKVNqVvfVarHm8sbP9wvwGjQdFyadMOh7ymLsF1RCyJdi0mMYZi4MtNVm0tTobWAzFFFVccTRZUdYdskyJOIxzXR+1rPEf0G3VTlaw8PFBrDyYeRVWwXq85PxV9n7vdjq7yhZisTESlkYJhO9zdijkfjGAxm3F19QOmLg5Sw9GMxWJGUdToUhxXT3sur3/kdCL8v9YHxIcUdWDjSrKhfRyhFwY/XL/jl8/Fe544IxQC8lrG+DDFmp6xvHrFTiaPfG+IoqhkVYUvxTg1LSHKQ0YyS20MAm5WSxbeFEMSM7x69QOfffUlL6/fUL9v9I+21JWC50qSFKVhl2Qobk24FEk8T9HR1BRVxpG7zR4/GGH6PmYiqfU31/j6BNMOaFpxML1fVTz/+Bnf/yQo3H3XIKxjsixDt+XFphqQVwqoPWtJoOHNVDqjwpUHue06ZjZ7RBTuyTLxOd3oKYoMbxAwlUmgNC9QFYPtXsQRxzIpq4zheP4gNVFXEHgeKCXTI7FWRR/x7MlnRHvxbM+1OJroeIYgMAEwUVGaDKXN8SQlflNGjIYByEpSGKVCCL03UfX3/TEhvu/StBWhvFD6gUNXjdHkRdi2D+x2IaauY7wXYHdCHp09482rJZnkWTd9ncHAJ05kX8b4EXVZ0VYKrryMd3XFZOSTZj1n58Km3r67oixbnjwRl9ei7Mm6iifPPyORshmGaVNT0bYKAykeHZd70qQkkOQO2/2WweAc9ITbg0zi5SXTyQnbTcRUJqImR3M2y+uHPXqxCCiTnrboeHR2Ku3MYLPZMxxbbA7CrqbDEVG+opD26uoTplMIsxUHmUwNdIM43RPJU+Fo7NK0PX2R81j22hq9wnimUVbxwyV3Mn3C6zcrJmMxT4fdEkPTePMuwnWksHkDit6jqQZZIQ7MUVTQKwe2kvTq5OQpigKH7Q5qKW2R6yh6x8QfUUoESGCP+Pa7n7Dl2cIxArpWIAQ2m7fib+6A+2xDmlRM57LKV2YYio0me+h0V6VtGtquYC+rhYqioKkWvrt4IEAZjWAxm9P2whaHwTGb7ZbFacB2d5C2aFC393gDC1UR65Cne+xBzSEV723ZU84unvL9z9+ShZJwoVCZnQzIyxuGsuoeRR2K2qAiEQu6TlYWWI5LKp+laRrzoznhJkeRwXl9d88+vqerxb+9gYnZu7z45jWffiJImXxPIU5aNB3aXlZ3246+U9nvxLPPzk+gLziEO1JVxFPH0uiVFs8f8565LDzsGI/ndInsDe9q5rMpbdOQ51KIvk0o04o4TVEUiZCqTeJuw9FMVu/jhMFUZ3m/x5DnhslszP3dhu1WVtNr0U80nR3TdsJefdNltwbTMnB94TOeN6SpY2xP+LWhz0jSA7vdDkeSqyTbmr2xQ1VUzi/EZbxIWsLdLU0p5rwqEjQT+jKn18U+4xlj7lYxrZU+yAk4Qx8MkRgE0GlxXZWqhkoS/XRVhq0rOHpAI0lE1F5HMUyQRENxEeFYPnqjkMmL4tjzyNIGTbOYjkXSOYnFneUfGx96rj6MD+PD+DA+jA/jw/gwPowP48P4MP4TjD+IypWiwNGpuJEujkZ8/fvfEu4bxmORSZpMZszn56TdkivJRBZFGYvzCTdrkXVMiwrH1dnuS548khSSgw5T0yirPY4sASqdxsefTB++2/dstpsIJ2joJT37YRNjOCqX1+8o3vf7oBNMIuJYZHEcTyUJVeIwJJXsNrMjne9+8wa7s4kbkQHKwh7X1KgLmaVu91iqxc39lumx+FzXCm2S+cUYVSTYULUey+7YSzjKzlQYWGP0ocH8TGTKSjUi6FqqucVhI/GqYchoNEKRuPqnH5/iBQNuV5fUmfjd1kAnOUSMA/GcpnZQtZzwULB4LjIYdrDg9etXDIdDLF1kQ5qy4aPHz6F+nwWo0WwHb+Rjv2d23DcsN3v6vkItZfXOyTAUlamEJe6jmPWhZew/IglFxmbgD9iHe8pu9yCqWbU9Va5TSMY93Sw4mvpEYUIlWcCePjlnOPLZpTsSSV8a3/cYfsdsIuY3rsC1ZxzyPS/fvBQT3OioncHTi2OW1+9pwGumo/lDpayuM44WFu2yJgkldenglOXNOxQFBhI/rnYanjvhcC3WfH7ioBum0HaQosxaW3OVbzkkGV8OnwBQBAFFW/NUip9GeULX5cxmM/JQZH+HzoS4iRkN5w+Ml2FxR6XU2BKuud3uBeW249PKfElW5IxGHvUm4L/4c7Gmm8Oet6/XjCcCarZbL5kFQ5zA4T3m8H69Jwxrjo+nRDLD3rYtJ6dzdImd7oOCzf0K07HRpTSBYkLU3rEPcyI5fz/98ALLGjPvRQbs2ZkJVc3qdsnIEbZ4e32NSs58Lv6taAv+7Fd/xk/fvqTKJcwj10m7FtUtSBuRFdu8uWM6d3gtmYn0NiLLMuqg5/VrESO0XoNeJ2/2FIr43MfPPyLchCz34nO+76GZNoblsz+IbGHX6SiKw83NW3rJmHg+HbK6vGQsxaQfnRzxZnOHYfaEkiK/jnXcwKPvlYd+kaPjCXGyfWBQsw2XLFxj2haVzLh53py8OaD2LoEiKgiG5XE4rNhHwveOTmY8/+icFz+9oZeQv2Fgc3x6QpmuuVqJrGJPg2MpbN5DRvsCtWl8TIzVAAAgAElEQVQZDCaED6K3DX2jcHa+QJFbwPXbDY+eDqlqKV65XPGnv/oTkmjFYibQAmkVcXW3wjIDXr39GoDT01MeP14gVQ+4eHbG7tbnbrMkkL1LcVqBUjEZDEDSVnvujE4JMGVG2PFb4nJLHqa4Ugz0ZHoElUrXdQ/V877tGI4DOgmR265vMByXm3WMbUpoYqDzb17/hlYFXYoyz2YLijRi30gByKommExYbm95fCEqenEREzgeV5J5zfZH5E3ND1dvUSQ8xA5cWlUjbwuQWenhcMCbN6+Y+lKIWzVp8hKt75lMxP6xW77i/PRXTKdjXr4WrH+areEEHpWUkFjMh9i2hWVDK/sGHNfi5OKU7SbmzZt3AJydXbDdHVClaHpVVRwOB2zDZHQu+5banFqpqcqMSoq3d73C7nBAVWWlLIw4Gs9IkpBUVmRVzUDTG3aHHXEift9oNKIoCnJJD29bAf7AI8s7wlCgH1zXpcoSuqZg4Ih1b9o9ZRuTb9+/Z85uH5ElMY8eCWbVkX+MbQyo6yvmUgBd0TuWqx2DYCLXuCSKr6DReP5cwO1VpSOKVXyvoJTC3seTCVlj8cMLASsdjo8YnSxYL9ecTGW/b6FwefcO0xzy+FhkoKuNSTDzURWJJNFyzs57Dsv2Qcjc0Hy2UcLxyYyZrMz/9O4lrVax3YlMeVVoPL44Q9MbfnghWEy32zXjWcAhqqgl++JNGINRMhuITH2ZwyGNifMMzxDnoPFwSG23DwLlmqawX4eo6GxlNTRP99yHNdPJnE5CoZp0g2Y7VIpYu91hz8Bb4HgdtexzKcoe1zVQCcjk+ea2/RHTKmlaEe9e//iSWfAYywxIakmpbrTYbofneGy2wv/S3QFL1alzCS+0AiaLKVkVsZJsbOezIaZisN7dECay91XT+OKr5xwkpfshPVBWGqYxxtBExTLPOjpVwQpUCunHtm2SZzWhlEZZLI6ZjETvtir7YZ2gw/U1oijBaEXMuzie8ubtj+iaWLvzixFxGeIHNlorgpcb2BhGh2GqD0gj1/EpyoSdRFa5pQKGytD3ibayejY2uLzZYqoduuy/UwMVtbMYyjNCUe1pe/js049p5LPjuMP2HcJ9ApKRtaoqQW0vWWo9z+TmRvSTh1LI/GQxZ7Y45tXrd4ynYm9ve+gVlRKxnm2nkeVr0jhjPhGVzqPFgCJXOD09pZdsxGWq4PohWylM32sVdQ+e79JWkj3TnpIUW559JCq9TZvz4scbPG+OLWVs1vcHPv/8M1br12SSve/t6xWL+QnXsr1nNpuhKhbX61ecKgL9ZCkpZZhx9NglkigNS1OpUoXOErF6Fzk8ffaYSjeJYjF3TbMjLkoUPExP2KxpWfRKhqoJP1scOST7kMCcU3di7pLQRg0CNHvGfSTOAKqZY+jpA8Nm2+iEh3vOH11gKVIWqO2wLQ3fG7KV/TStFAX/x8b/78uVoiga8Gvgpu/7/1ZRlKfA/wRMgb8H/vu+7ytFUSzgXwJ/AmyBf973/dv/r2f3KARDYYSu/Zh/8S/+OS9fveXH70UD7x//+TE//fSCXXJHUoqD9qOnZ7x8ccNo8B6Le6AoOoZjm3dvhHOa+gjbOTAKjpDoBS6eKTRVQSNpWKs8petVVvcZGwkPOTt7zD6MyfKK2UhsFLeXB0zP45nU1fn+x+8Jxianj55gSshYXm14/S6k0e4eGoS7rUGbaehywVtNIfDGnJ56rDYCctQ1OkfzpxzNRlSyvG+aDi9fvsEcSeV1d0iulDx7dsTXX4uDzXRyRNPUXN3c8fnxHwEw8ls8y3o4NDmBynpbo/Yas4UwlBfXd5RpxtFYOEsUt0ymAWGbUMoNIIojptMFcbjD9yTddqeiKzpK/15wUmyqvVpweSk2pq5WOD06pq4yKkNsxkVW4ngqa6kj4gQxXTWi76+xHLHuvR5hOCF5bNDZwhGGwQDHGaDJsvY2vOfd24zA9xhI+KJpKOyTHUmVoMjDhmv3zMaTf8DeRiG1kmLqNaYlAvAhahhNJ/RVxmeS3MAPRixXV7iSnCMpdKH7NB6gyIO+adRMhiN28e5BH0bTNMoyZ3EsDt69mmHbJioaqiPtrMgxFA3DMQT/KTDxFTTNJpQU9egad9c7nl+4nJwKu17dhjRKRdaNuLl9Kdd4yNHiiPulgLpGUYM9HjCezBjIHrY4jpk4KopT8dHnAv9vvW748/9M5cVLcSD6q79JcQYe8SHHlHhuU9MYjT3KKsGTJ2Zds8mygq4Rn+tpCdwhWDmT4Uh+X0dZtQTDOZrUuYg2Hh9/ckQr++OKSOe7X69ZPB49aN+kaYhuqSzvxbPzrOGbb75Bb3XGUqPocLukKToapWFbCdsw9SnLdUlZvdehS5mOXTzziMNWBGXHHdK2DQ01nrSzJMmI4hhfarCNBzPavmcb7hkOxO+dzRZ888130BuMTsSalnWF0ZocZI/nytKIw5CqLQhG4pC0rJY0dcnji3Pu78XaPHv+R/xuucUcm9KmSt69vGQ88RlOxTs4tsFgMCPcdawlZPT0RKPWWkYSqqgaDb/7+t/SlC5zSdgzPZuRFBGbzYZMQlTzSOWjJ08pJMkGfQ9tRnow/z17bxJrWbbmd/12v/fZZ+/Tn3PP7SJuNJmR+d7L11bzeOWibINoysJCuIQFWAxAnhgJCRCSZ0wYMAIkJJBnwMBIMIEBEiVhV5Upyo/X5MuXGZkZGd3t7+nb3bcM1oprLFXjEjAA5ZpkROQ5++y11re+9bX/P54tZP9b3/oWd/MlN9OXmKbs7bFULq9uaPpC37z/jSd8/vxLPK/PaxmQsH2dODOIi4jRsTh/Sq5z0nvCxa0AFnl7cU0SJihqRpCKy+vopEWwbDDq9Qm3Yl1io2S7+xxb9qvsAocgD8m3EX0JHrGbbvjog2/zB3/0+3QH4veatkVJwfVcBq/yCK89wrbbRIU4V9PVnFavz8nwhPmd0Oma6REubylk74SptUBV0MyKTAYWSiVhuVtiyIBBnCQUaYFqa/Rlj0BeViyX56DouNZIPluh3GrYEn4/zGriIqOpxyQb2Zxf5Sh5QBKWPHsinNVffpbSOe7dl7+2mj5JKkoAt6HsN1Bq9uGO2XKCIhFsiqKgrgQcNEBdahyNzlgsJ5SFMOpdz2UfVPh+G8MUcjZdzJguXtGXHHqGXjHs9SlRefBYlNImecBsMaNSa9aSV+dqdsmTowc0XeGgXF1d0PQc/JZNQ5b8NJyCqjBIC4e8EO9eqXvC7Rppv0OtkpYB82VBuyX7N6OUND2nKCPmC2lAN2y8ZotSlp4lUUaz2aXp2uQSsENRaipCVrsFrVrsTaPRIM/h5PQhAI7ZItrm+LWPWYjzr9dw1D+gN+wxmUp9FscYXsAH74nnpNGaye0LulaDvuzn+mqV8N7ZY3bLPZdzoc9265j26BhNk+X1RsXri3NQUxqeuIuSwme9SWl1fGxfzGe525JGCpNAGuyOQsNpEVU+pezZifYpuyhDk/yDcVJwOH4AlXIP6oGugRqjWwrrnfi3m7tLxv1H5LI06kc/+j631xeYeodSGsJ+d8TN5QV5VWDo4vf8jka40eg1pbGulihJittsstqK+6nhmcTFhru3FxiqsG96/QF5qYCayn1RWM93NDwdV8rdchnguTr9UY9K8v/VlcLzLz5D08VdoRgOeVYS7nb0ZPBPHxhcXFwI8mhJpfHl56/p99vkkoNxMV9xPD5gdr2ieyDWKogmeO4Yy1RBOht5UnM4fMwuFmuuWhXL6w2qaqDKvmHDriiKiscPP2Qu9Ty1SpVDS1KOKGjkSsXdZM77p+8BMN98xXSx5WQ84uFDIXtvXl/SGbrsJM2Doii4eot222IyE+fdsjxuby/p9/tUxbtARgffd7m5FYGU6ayk2zng/PaOfl/cRa7XZLfZkmYxzaZ4r8vra4oyR5N7HO5TBm0Ho6Gw3Ir7sOkrpGFNpZSoEvxrH+Ycn3XvW2kOj09R9ZjbMKc3FL9nqBXZPiRNxfmslAxVgywtKWSAMElNwnjC8fEpV9dCJxwcpZTFlo601SpqolDhV37jN3nx8c8BOD48wjRqdpstgyPxe6ubHWqlEUl+xQdHXRabO9qDDrlcpzhqYFkppbbHkI7p6xfnnJ706Ejo9+VkSZXXvHj5iqcfiVLh1sMutVoQJuF9v28YLfE8i1Davk2vTafRIi5CkvwdlY5KrWrUaYUuS6jPz4Wf8SeNP0/m6t8FvgAZaoT/BPhP67r+7xRF+a+Afwv4L+V/13VdP1EU5a/Lz/2rf9qDywK+enUuJtaIKHI4ftDi8FhcAtc3aypzg6V17rMtq8WKH3znu2jysPqOx93tjKbhktZCiaVZQFVqeJbD5Vw0Fh6euNxcbOn3xQakYcrg1CeKY0xTLPbzz76gd+jitY54+0Y4BO1hid874aef/QSA0/E3sJobLs9vOZQ8JQ3rgKOhi9Z9wPOPRf39g9GYbQGpJKGsMYmqnGhxhecIo8xQGjhozK5vmd+Ki6nbG1DkKZYk9TTKGGto8+qrlzx6IA51HpTEasVHR++jSl6b3aZiPk85O5NN/cGE7W5FrWhcT0XENA0iHh49I5VZo9rKMFo+Wlqy20pSzwbsVhlJktB7hyhWqYRJSCEV29HREYv5mkzJ8QzJ6uvExMmWohARDoA4S3F8h750FMcnH/HmzYR2y0LNxaVXVHsaVpt9uMMqhfBGYUHX455PouX3sXSDhqtxI4E3Dv0Omtog2K7x3nGgeArrsBDhHCAvl1C32C2T+x6T4eGIfXCFrSsspCGMqmCaJu84NUeuQVa1mC9u+eCxcKovrnaUZUhv4NJuiaMQbPbE2RbDFBec5zbYbTc0PZtcount45pG4mF4LSQIDy3VYLXdYUuAkEzZMhqNyXYBakc8e3Q0ZBHMWW0WeH3xb5WeM1ldkkvD1Gs3aPkt1KxkH8p+CnSagyaKs+PllcjkpHkJU4vtTOzfX/urf5Hf/fu/D1lEuy+JMZOc2+sJZVlzJpGV1psZx8eHFFK5ZllGEK8o85xCNrmahs6we4Ktt7leCKN6dNpmv68JZb9DUVi8/60+nfYhr84FyEZ/0GC3jahKIQdlnVJpFavdLZ7k5yiqHNd3mBCi6GL/ug2XzSokiISMqXpG29OpwgpLEw7DZHqJaWkoqFCLfVjEEUleMJIErElRMp9vqbUKAymv6YZW28G1WmiyvyLcpvQ7HV5KElrD3eHbDb68XFLnYs0PegO6DZvw/0K4+vb8FePDPltp/FwvUtQKdFTmN+Kid12XNDJ478lDlkuJ+heHtDp9Kpk5W++2TK42nJx0KKQEVZXB7d2MqlbwJbjC0dBi4HV4+VoEptyWLbjZwhWHD78JwOnJU+4mS9brLb40tNMSirLG1WS2sK7ZRHM8/5hCgoE4bhPd0UjygBcvRbb30eFTPhy9x2Qh/n406PO7n/4eR4MWmUS8XAchjw7GdHseX7wQQEK6d0xaVoxkU/gqn1LuXIYHQ9FnAMznW7788nPCNMRVZK9bKf493Yv1bNomeZgzemARS7k+G4/Yr0viuzWORMqc390QbxIengqZnk8DVpsl3aHHLhSXpOc0Wa9jDk8kOaSdEa725HaGIxvAb6/vaHgaruVRFpJUV1VxbBvVEEbaYrNE1z2ysmR1K0kvfR+3aRPEK+ZL8XthtGW+umEgSWn3+4D1eovb8sll70aeJlzNrjBUjUqRhORZiFJDLfskkyjD7bl4lk8kwSsUpUKrbRQMQund7Pd7LLOBLoNOwW7LYrFks1lzNxc6oj/sEccxhtm4z6xGyZ7tLr9v/PdbY4LdhqpM78lrz2+XlIXKw7Pje7JzRXNATylk5cFsFtDtjjg9bVFKB6HKLPZhgOna1LLaAlXFbMBKBuP6/SMWs5ThqM/VpZCz8bjHOpigqS7VOyLa6xm1WXAoe5LSfUhLN1Gcil0idHyz2cSuHdbBmqKWpKiuhunBVxcSaTEBr/lDLq4/IZJIp0a/xY9//BNOxge4MgBx+GjIKp7TssR6rrcBrmtRVDGzlcxG9k+plTluwyeQ8Q4bBVVV74m/63qCXppYSoXWkCixDY0yzsikcafWDcJ8z9mDI7JYTDjPVXZrnXATcDgS2ddB18HWTS7eChvos49fY2kqercilgTlvb7Gw9NHvLp7Q6stq0k2GyzdZrkXhn/fP2SXTXBQUN/tSw5G1WPct5ishU5QnYwqi+i1hUOUbGP6nQYPTvv8Hz+WvTZOg2C/xzA1pAjjeR4d3+fy7bn43j7HsRp4nkolK2NaHYfvffd9slTj/FwEosf9B/THDe4Wwp4zFRc1tzHUglA6q7rRYno349k3nrCcCRlabXOePHxGPBGO4mZ5S5IG+L5PGos93i5XeF6PqlTZLMW6n5w8wG96bHZC7sIoo+05uGqXspJZlMzg4WkbS3G4vBLzSYsKw9LY3cqAsmUT7tdUpY5tCWcgSvfYjQ6lkoEmdNd2l5KlFZ2WWM8gXFISU1cKuvIOOXJDkVZ4rs92I+6L9x8/oigzNlks3/uEZLfF1G1iSaC72m2I9hlZGKLWEogiVAl+tiANZcBgu2Y47mJYBt2ecKC//Ow5vYM2O3mHnV+8xrF9dA006ZgeDFuY9oqri4j3PxD3zMs3HzOZbNAkOnG31eJ28iWTWRNfBgjTYEcRq5wcH5HLPrrf+ud+hZ//+KdUG7Evpldi1j7bfUSYivnqqoNtWyy2c3TZe34yHmNrGkvJEXY06FIZFf1DE9d9h4oLab5lcn1LKKuRhuMh20VAsyvs//n2NQ1zgK2DZhr3ewwaJVArMqDk/yPy9T9u/BM5V4qiHAO/DfzHwL+nKIoC/CXgX5Mf+a+B/wjhXP1V+WeA/wH4LxRFUepasgb+MaOqc3pjYUjdXVzz448n7NMxDYlAs1xtCKIJitJj0JOlbEaXq7dv6MrsS7gJBQx4HBPJhv2O32M0HHN9eX1vVNvGANfJaDSll9quMBsK+yShqkWkZXz0kPZxwnYZcPBALOA2TPjsxS/4zX/61wCYT3I+/skEXdXQVSHQbV+j02/z5ScvGLiyAdLwaTdqtpKEMtFuydKQpAqwPTE/z1WpygjDMHj/fRFBnKzvePThEw58IRSxc0NVmczvuG/Ew97h7DKKWiEKxPwUw6Qsduwz4XxM5wlut0Gc1FxfCcXZc31WqwXDllDIWTHhfPYZjvuIhi3WZbNeYxojuv0+lkRoW8w2PH70VCAyAjd3t6RJgWe6pNLh0tyKVInZJyWWLO9xfYsqr6gkitNyekfX8CjWJmEpm7vbFg3Lwt+pWNJJ6fVP2MzX9yVBnfEhdsNkHdyxldmsA29EXeU0LAE6IuajoVsZuinkwKPLPtRRDY/hUBhuebzHqAyKykA3hUztVxu6w0N0Qzz74sUl7cMj7IZF3xPR5s/3n2B5AZo2YC0BMxzTA2N3jzanqTZhsMW2Ut6Jfb0LGJydsF+u8WSpwPntNUfjIyjF3s3iNS3Pp98eMJHNkoYJSVxTWwW1NN7SqMBFp1Ik5LhaUuYxi92WB5Ks2q4qVuEVDWPMfC5LL5Y3dPs2e0nW+enffcF7v6Lz8nMF2d/K9PaGk6MD6rqmks7bcNBjMplw9C5boes4XhtV18nlu2/2M5KwYLE5J9PFetqNmjifYBpCSZ++1yQMCvJihyrJamfzDX7LRrGEouufDNgvVkRRRZxJUsFujygIuY1ndCQAih03OO0coiKJVTt9FouU1tggj4XyK/cGT95/zM+e/xK/J9Yu3oa0PZNdLqGQVJ3KKXhw+pT5TKxTmus4jSaKmmNLkJLC1AnSDchMz3yd0fU9DsZH1OU7JFCfwVGfV+evabZlFG6R0uy75KVE03RsDo4PMAwLRZYAue0K2xiyDCdcTcW5tUyHga2ykhFU12zS75ziuE1qCSF9M98S5AaOU1PLqJtSNFGqhLb5rvQkozQcuq1DEkkQ/g/+8He5m8/wuw6aKTMruYpuGuwiYZSpaoblNtkHGYaUu320IQhVKkKOT2Tkun3CH/z9f8jZM6Gn1DLi8ZNDnh0/4atrYdBe3d3yneOP+IfPf0L3RJJcJjFpHFBIFEW/3+So3cZxbXSJfFhuA1zbRwtMHNmgPzufU7kWEpeCRqNJkujcLGdYEiAkCjfkhQJNnc1U6KU8WfPNs/eRAVsCZUmeVSxmMYqku6iqirgqmEugCFsv0FSNdbABWa7Z7R+z2k0wNIGWCmDZR6hxQJVIlMxwwW5qUlrpPYiI5Vm8vbmF2qCoJWhB12AbJpxKkJ0ozFHUmCjb3Gci0yBHUTW8ZhfFEHNRyhrP6+A7suE7jkiTmE67TUeSiG/2t1iOTbTbs9gIZ67TcrAtl47MNif7gGgXsd+taEoM7tevvuLs7EMWqy0tafy3Wgaz6R2ejN7vtjGPHj6mSBOKXKzB+LiJY5koasFWEkOvpnuq2uXwRMzPtAKW84B2d09ZCd1S4nFy+oSsXrOSJYZVneE220Q7cYaCfc7wwGW9Cu8pKrK8pOU9pOFYOJIsHr/JOt2wl8G4uqo4PX5AcZvSbkonYr0lNWqWq/B+PrrlEMcq23eZpGaTXVFiPv4GjibLM2uVurwhL2J0hA766voNXssnkNQhRa2SVhm3k+t7xDtrv6HdbbFZ3+GYYr/iWgEtxZHW13ZXcrWb0znokMozutZslrsVo764r/ZBxny7Yf9ig1GJ8/GdZ9/nh7854JPnvySRJfG21+TLLz/HMsTcDEunrFLiZHNfTrxZJZwcPQT9pyyXklyVPo12Qh5L5NOyQ6tVYtgOo8N3Wbg9wc7Ba8GxI2RD0yEO93gNGbQ86NH1Blxc3dCRwcAwj+j1RsTJlsqQCHv5mk73CY4lgVvsmNVyxsOzb7OQZbmzu5LXr675nd/5V7i6EM6/qu158dUFhgy8DUY+abrGUG2SROxDy+ugUDKb3hFH71A34Wb6BkMG4+NNwbA/IEj3qPK+1yubpuew3szpdsRdp6oqQbi6z9qaVsxuvUJTTLayjM31mhRpQa2XrGU5//h4QJzFtCUwhq4VHAwUprMZrj2Q8gmFFpBlJX1JgD65XVBXOzo9+142isSm2WySpRIxdb/B0h3ifU4hnSnTUPDc4X2F1nJ+R50qVGqb9lDI/uRuha40UGwbZACk6RssFzf3dkqBxnweoygqb89F1j/PHRqNBq9fCwe34XhoioGmaHz/+yLQ/8tPPyaNbHyvyeWFyLqlYZPDcYdSlQ7uaoPfafPqzWs+eCQrrfw+nqVRlSqlzEqtVzsup3vG74tztpxtCKIU12+RG0L2F7M1TgPyuoFmyrk0Xcq8oNMX36t0g7TY8+BZh20o1imbbtFNhYbrgQQbK42MPIIkEvJTlipxHbNeZ7hSx9q2gqKnxGFJJVtS3gXX/qTxTwpo8Z8B/yHwzlXrAZu6lq4mXANH8s9HwBWA/P9b+fmvx9fj6/H1+Hp8Pb4eX4+vx9fj6/H1+P/t+DMzV4qi/BVgVtf1zxRF+a3/p35YUZS/CfxNAMfVefpIpEGLoAlKBarDi5eCsPfDDz+klR4zmy4YSt6Zl19MCOIIvyNhkA2XKA5JQ5MqFlGGwjbI8xzT0Yll6nefzVnGE4xURApsp2C3fc0XP9NpuiLi1mm7kCuM+8f88mNRXlOUCR98/4RcFf7lF69f43cO0LQttSwBUloOb9/OGAx6LDcirZymEfvVDFfWnXrNMVcvA4ZPfHaxiOwuVivUvcq33z+kqkXkqLZt5vGCyysRTRsdOpTsePS4T5aKSORukeKZLWZXSwrZQH90PKTRHLJaSLJc02B2s8Z28vu6ZSO1eHz2IaUstQlmVxx0j2k6Q87PZTlRs0Gw3PDgcZ/dWvjgDbfFfPWay3PJX2OVJDuL0AywJZeBoWkEisIuzbEkpLmntYhSk4ORiG7pik7noEOtKLiyBNBsOJxfXuG0XHxJ+BiEK3Q75eAdhHQQoDsqm7im64lIS7tls1iFKKWGIQkty7pCVRM0CV0e5jXvffsD1qtLLl6LtVstZ5w89DBpsN/KNG/LIQw2qJZ4p9TIIc846p7w8y9FOZPjqTheg2xn4snGZsXK0VTzHpzDc7t0ej6VnjNqiXJCKhvLqtmoNWEk+VWSjCxJMCT8dh2pqF7FXksoZL1xXeikccBwNCaVUWLXVjBNAzUX890sA0oqDroNujICNl28Ra8MLl4u7rkaRiMVW7F4/NEHAFg/rNlEMcFmye2N2HfN8KiqirLMyWUZYKuncdp9TFvOdzlfodDh7eure9JCr60QhGsafgslkrwT65rB6ZjyHeSwoVDoGqvdiqKQ0Ohug32ckMmsYxLNsKlZB1sKU3zvqPUe63SFV5vEc1lUeZYQxAV5JlSYbteoVUoVh8xvRZbo/WcPSdItplpgyixfYShYdgdkSVWYrTg+OYbaQAa3qfOSdRywjwJcW5T8mmWGqisMj0UkMtwH2A98yreXNC2huxRCMqVGNXQ0qSdOT1xmNze0fHH2yqJgOy9xOjqPH4hnLbfXxFFIFBX3oCFtv8F8saHVEjKcpVsBfpBvsWS/WJpmDFo1tt2mkrVR4S5inpss9rJsR4tRy5q73Y7FnXhWc9RG9zTWcwtTl6VQheCC8j0h+4tZSKVmbJLlfVP/UdOjDiJ0xSGzxBpfJF9x/P4xeSHO+mY9xYhVokDBVYUcHHtdsEqsWsPoyL7IhY1alWwl991ht8+rl2/4wW98xKXsmc3qEN9p0RsdsLoTWXdMnZFvYhpCl2SVxcnokIvLN2iSIiLOtozGNvtAJZARy0HXIS5T4q3Qi6ap0+lBFATomojC75c540fde/lxdRe/LQgkByeinLDe7jh78gG7JObt+Sspe49JNZtr2RRuFyPK/IZ1GHF0JEuTC4U41YjjgE5X7N92teDh6WPeTN7I905YhP/7XQUAACAASURBVBN8xaMpyTmj5RTf96nrGk9C26NVpFsVVXJhJeGURqNPqazwe+8yFg+Z3NwADieSUDard2RRTCJ5WjRP8BPdLKYglgXbGLFer+n3jigqsQ7zmYJWp6xlBmzUe8DV5Q2mGd+D/+xWCSenB6TFFFlNSJZlDMdt8kTs1ctXn5HETRTTp98Rsjg69VhvNpxffk6rJRvTVZcvP3vJ4ZHYT7VyURQF31fpyPKzNE3JqoqryztGw2O5xy5UW7JM3IVxknCxmrIvajxp6rSaxxTWEnUfU1eSvDndUdcuj2TWf7Gas98syWsYS16mtHJQHQ1D7fHp20/FBFUTY5+gyvtftUJQuxyMH1JJyojNcoZh9KHW0ZqytyczSHc5alOSMlsHDB7VqLXC/FZkHj3fpWieMb0SJdZ+t6DtKESRSSjX96uLF8RZyoPjx1xfiiyDoTg8fvCQMJTAEXpNVRo0ez1ymWFfzq7YvdnS9U/ZIPSw1zYBnTyXFROtCKXQuFvMOD0Td5geVOyTPXnu4MlzPJ3ecnAypELMbRls6PgdlssQU/YDuU2L1XZDVWq4kii65eYs7y4IZKnio28csd7Nmcy2WJJEvDPscnu7ZTqdc/pAZPBKYnzX54uX4uyVuUmjUdJyRhxIIlzFqdCaA8p8x4EEc4ijNXm5oSzE2Wt1NIY9n/2rBauduLdH4zbBPsVxLeYzkUF0fR1V01jIPl5d1TBthzxPsCX3ZRwu6fVbaIoOinj3dbDEazYwG2J+lt5htQo4ODq8tw2rCvTMQbEror0E7aoNnEaT+Xwq91MjSlIcQ0Vi+NBwDymVCQ/HxyzmwoaM4hRNC1mV4r3LqkMULei7Ftdvhb3WGT3g9vaW00aHvWyZaPcbbGObpi2BcDKVIN1hqDaRlIWiivmD/+2G8ZHI4rS6FklgcLO4xH8rwc2UikKJ2a8KSvmi731jzPn1DRuZEbb0EUVmEm6umd6cA9Br+oyfPiLNAlq+0Amff/WCwfCAtgQoejN5i2W77Ocr+gOhF9Nlilk49PQET9prap5hNkwyWcJt1iZJXoNSciTvhpWSEwYVjq3SbAoZXi1ijFq7BwiLwlPUfI1TOniStDiulpz2D7i+DdhI4Cvf+b9PIvwj4F9SFOVfBGxEz9V/DrQVRdFlduoYuJGfvwFOgGtFIAy0EMAW/9io6/rvAH8HoNVp1Ju5WEjH0vH7oGjxfdN7u9Xh+fM7NDXhFz8TvRp5XNHtjuh6ksk6F6VEWZKhaUKgj47bpHGFZUf3zYD9nkKve8J8Ig5imuSslyl+Bz78UDyrrGJMbch2tSeUjaF/4be+g6GrnH8man1bVhuv3eTg8Iy3r2RqW9vxqz/4gC+fB3z0faFwcyXl8jJnNBab9MUXX3D6Xpt652PLWvheLydQLbSGwaAlNu6r3ysYnT3kUhNlT4WisVyntIYb0kSCJKRw+uyM4fiUhiFJ6OqI65sbHj0Ryuji8o6ijFEVG68hFEu0DtCNhOVU9sZ0x+yCmNVuTp2LdWk3RqyWtyQ57DPxuZY/YDrLacuG87Is+Wd/4yOenh2wl+SOSb6n0BziCC5kX0ZZaKiNNYlkiLdtm9lmQSWfAXDonmA6JpvtiiSQXD9KSb/nEAfveBMcyjzFUk1MiXj18vUr/LaL322iS1LNMAyI45hdIhTW0fiI2d2M/a7CcsTvHYy79L33uJu8YTCU/T65QxCv7sEODo+GJElFlNfUsoyl7fvoRot23ySthWykeYRhte/rn5vehLrSaJhjptN3SIRN0hhRsiobtXuDATUlC9mD4WgOZV4TRgGHJ6IM8cWLl/TaI7TKIN5IYsimjdNoEkSyZKxX4LkGLX+M5LhmFUS0XQ3DqlBl+lpTfIIt5B3p/JQb0tSgd9CmaQuHy1BzKjSmmxVZKuZX4aGUAgwCoCgzFvMJrmtgyqZ6XQcqi+U8opZEmHUWs9/6eG2xnre3U4yGgtmwUTfie89Ojrm+3tAYCuN8tlpTGxYHJ0fkoXjvbPWGumixXO44eyjWpdFocnP5Ek3yH03vSkbjPmkV0jmVZ7tcY2oF33z/MTPZj9NslzSaOpvlOxCamu0kZb9/zTe/JQIgV5d3NPQu7QMf4x2pFCZZVGHJXH1UJaxvd5ihRu9Q9inqHabTO4Ig4uj4IQDL5RLDbmG7Qv/sdjva/ZIg3FPW4tmL1RZda+D5LpYt3nO9m+G27XsEJQGekLBdbwn2wiBqtR3SuMC27XtjCkUX/WeSeHu32xGFCaPD8T2Jd5GXTG/nKAVYLQlaUiekqUYtQUS0usSodXahxehYXCJHhw/x7JgwDdjKYFUebESvoiHPcU+DKKFYVeiyDOr0I5fzt8/J2ZFvJB9W6VAqOX3p2DS1E379h6fEYcD1W0Gq+8H3nxIkM6L9nMcH3wJgH9W8fH7Jb/yaKEf58vVrnj07ZR+NkLYHwW6DUQ7Iq5RDeRnraoERqpQStfHN7I5+b4R/fIBvCBn69PNfEm6HZIksce45zOY5aR6xDYUxl6xKjs++zyefv+L4WDhcWbIlCvdUhSyb22m89+F3uFpdsZUOSXNwhqN73IU31FIHhWFKmSbsN8KQCoIKy1EwS41Alhz3nENsTNJsTSgDaLZp0hoo9+TnmlGzWi9odfvkEtTjm995n7/wW4/5/b//U87eE3u8WmaUURu/LfstrlZs0x3j0xEz2XMVJ9AZjtlspyxX4t01tcHw4IiN1D/bMCavQoqsxjElAm1zS1LvqFSFSDpc6BWG7rOSz3n27EMUtUGQTJhLtLmruy959N5TDFOlqmSQUutycHiIIcGAuk2TxWKJqTnk70iugw3j4w4lMU5TnOXr6R2Vrt2XrQ+6xzz//EsefXjGIpa9qFZIFSWE6Yb+SJx32+5zd7PjXJaetVod9FplOOpw+UYAhBwcn+E1GyTrNY40+vJSo2UdkoTCwA02GbWfCf4jqTbGw0esF1sORmfczaXzbWo0Gg6F5NUpCpNwX9BomCD5KTW9olS2/ODXhV6eLy+5vp3iOM17VEXDTLj+fEmwDXn0UJDqLpdrDNPmgXSop9M70jwl3uzJZS+Tikaw31IWDj3ZV3N3fYPvDTA98eySFCqbOJjz1eeif9wz2hgaVFTsFXn+k5w98b3ObxoG07sZRZ7jyACToXrURUCv12YtnYZoU9P2R/T67xBv5xwOTlAUBaUWsuG7Ju89OuX28i21DI5pmklZxAx94fDVocnl9C3dgc58IgIpx60e55+t6B12WecSzdK1qdIeliTdjZKYtxeXlLUG8tlhkKKq2n25PcB0dku/NyKUvcwKNQeHLdI8YLUWZ63jdyhyA9208H0JSBJFtFsD9hLQYrPJUEqH3TLHcyURrq1wcXdDtil5cPJQfm/GarXieCwCb5PrBY7foNnQ+fJClOU9ePge+00K9YbhWHzPaRh8+ssXWB0ZgHUL1ELBsT1iWSa728xRqpj5LL8HhijSHfFG48EHQg7mqzlxrLHer3lwJgIL6/UtrqfjysDbbhtR5zqDUZPVRtiG3dYpba9idbdiLVsariY6UZHgSiRps9B48vR9er/27fs1ePP6E+7ml5ycnjGX/XGe3WQ+WXMjOaZG3RHtdp/F6oLdVOjAYdfl4MBnt7JZl+LfwnWObSpYkqBcTXSazpCW45AGkmD+oM3t7Y7VMrq3F5MgZzw+IpQ8e7pe0O33CRc5WSHuvhqbm+s9iuKg69LWfQca9SeMP9O5quv6bwN/G0Bmrv6Duq7/dUVR/nvgryEQA/9N4H+UX/mf5N//SP7/v/en9VsBVFVJmsnGv6aKooQ0/Qbf/94/A8BPf/YJ282CNLBJJWKZpduY5iE7ybh9c/uKZ88+pC4jbE/WZROTlDENe0j3XYPg/ivyMmU2FQt09qFDndUoKcSpJERzWviei2mFfGSKC+xHP/rLfPLzF9SKcO6+9a0zDNPjzesZF2+FAW02bJLBLaenXWoZZVwuI4o8Zy2b8zqHPbpeSeu4y/Ofy+ZVtY/TWuOdpHz1qZjPN747xPYaxNG7hmidhuWwu7PwbElGOBqSZyolBZudWD/LMjgYnbCSRHzXl3N6vT5RtKLri4tQ7Zc8f/ElB0fiHbf7iG0YMT5uozviwpks5vR9n2HXJZFGrq5GjAd9VFUIc57VZEnGF5+9xrJlT0uqUKsbhj2bbzwTivP07Bk///QFcSwhgVGxHZPbmwmmhHnfb9Ys7+4wNBXNl5CqjsUu2LNYiLVrtjv4rk0aFzhSSZpKjGUb5IXLlYzeffSdJyjo3Mms391kga4vMLW2yIoCw1Gf2eQKv9XA0N8pjSW9UZeFJPD1B01yLSJOlvi+2Idx/5Dz6xs8p8K2pQG7KamKkAcPhYypioOCiaEr95d6u9vh4nyCbhr37OtxlJMnMaqkCWgddJnf3WHqKus7cREPvCZFWbAJQ8ayUbusEtJkRRwIJy1LIW0UUIeEsZCfPI/R1CEHwxFv3pyL9WymKFXFy9cS3OWsga63KPMJz74pmlAns9f4zRHa3GCykPtclVR1xvZSrGeS7Oj1egTbmEbjHUN8hqLktDwfXTaBUjmgVEwmt3KvXMyGxWYV4ksUzuupqJd/l1HoGxZFoJJkkKhCIbbaLqvrlEYTuiMh+2/eXpAXFgnCcOt1D7i+XLLLltS5DMp4Ja6rodUQScdQNzX2u4j5ai3/3iDKbml1DZ5/JmHd7YJ+XyNJDVZLUT/ePzhEURNqvZBnwWN8dEDsqax37wAt2kThkv6gx2wudEJWwXA45FpSSARBQL/VpCgqwkj0Vx2detxcbaDyubsV3zs5a1BkBo7MYFiOIFVfzqp7h9Y2Hbb7COdIY7MW7+W6FnG2oZQNy4btYrlN0ZuUi3MUpQv8lsN8uuJBW5KyG3dUqGilBJ15OmZ1t+XB2MPviDOz364oU5UyTGi5wgDZ7hOCaMrRI2Hs3Gyu+d73n2HVLWaSEDXKtgzax/zw+2d89UpUI6wIsWqLJ2PxPbvR5Tr6BXdvZzz+UNTjVxuNbLOm4x5Qyb7Ph10XTzV5/kLo4YOjHnGS43gVlgT/OImfsl9HnD5o34PFLOYXqC2LaC70oq21UeomxDW77N0ZHaJUEYrsactrC8s3MXYWYSDBajyf29kCy4bdVgadWg/YzDOiTMz3e9/5Bpvwgl67iWMJAyXJUoJ8zmAwYDETl3iWV6yzOUX87rxAHoWouklfAtoU2Yqq9LC1PvtEyKxjN/H9Ec+/+qmY7+mY0eEZ2+gO3xW6ZTK/ZLWqKOuKUAanTk5OuLu44/Hjh2Lvohk/ePaMxSIiOn1HUFyxCXdESchYZnKqOmGxmPPkqUBV3e/3VIpJFIS0e0IOMqUiiCtMo8H4UGTP03RHXReM3kGelwnrdcRwfEwZifOgKw6Lyzk2Ldq2CMLEcc34uM9iLfTUYp1hWi7T+QRPOpSG7bLdZYRxhqLLLHizZLXc0BuINa/Lkg+ffUAQh1iyZ263W7LfR/SGh1iyL/rVmxfYhoMriakd24OqpipTBgNhdGZJTJQFtN1D6kpGWGodSw/JpL7puG1W62tarQ4GMlOd1Xzjw8dkZcxcQtlnmYLv+ZiSeFczYbPZUysdLF98Zr69o1RqFmtJ7zE6JCkqwrhkt5dojHXGyemIJA65uBLn4dHZe8zma9Z78T3TUji/fMVB5wSvI/kSlJKi2KGqCtulONtlrLHJ3/LomXDKZpdr0nTF0WDMaiX0vqIauA2PTEnvjdWm4aHpJjt5F3V6x+hFjlLPyCREftN2qPIYHZ++7JnfLGJcT2dxJxyGUrXRNA21bt1nbde7DL2h0fGaKNJRe/P2hm6rjz8Sz8mKlKdHH2AqK56ORSZ3dBIzaqsoLbiVyHVBVKGbDshe1CgJ8Z02eZlSS4CgwfAxSQy3k5d0JUqkYdhcX18zPpBZVK3i5naCbXko0gmLsi1FGVOVLu86ZfZJQZAVJLJndx7f0TAsOs6Ar94Kh30wGGA2mqiElBJMqdE2qbcVYSDk4PTslMOTAV98+gk9iS5b11tGB31upzPUjdjTB70DGm2LSvbVqZWObmicX73m4EAC5sQ7sgA8r4cqES69Rg9LK5jNRPZ8tawxrCa2U9z3aiq1RhDsuL6WlTleizpPCeMQRSIfG8oA06o4e3yGtxL67O56SnvgcCLBVtSqiWUoLDdLctlP7To+eaLw8//9K3JNnGPTNRk/ekCci7tQqz1m1wturjI+/Ejs+8Xlms2m5uzxEG0q7vazkybXswmFxNzrtkw22x3TdUKaSjREu0e7ZVNVxT2Rsu/32W322LJPsVIK4jhkFxWUssro8XsnbOchURjjSHTwNP9T3RqUP8Pv+cc//I+cq7+iKMojhGPVBT4G/o26rlNFUWzgvwW+C6yAv17X9Zs/7bntrlX/O//+vwBAmbkURYXtqnz1VpRiaVbC9duaYFPzvV8V0cI0avDmyymWK4TS81yyXKOsK8yWEILlJCHZtvjoV0pKyTO1WF2CptKQ5T6tXsab15f4zQO+810RJQq3GU23gdet+PK5iEodHY3Yxxl3V2Kx/9Jv/oiXz5/zd/+b/5m/8bd+Qzx7HREXACFvXglo4veffIu8UillXLUyQj7+eMqjR21iyaZtYDLq63Q7fX7+y3MAzs4e8slPXvIbPxQAF1EeMJ3GtPwDDE0c4PV6TRipqPaS7VIc9LPTM3RdJUzeNbTnTCYTup0mroRinq7u8D2XKhW/XxHj+haaYRKEQghVXHRVo2RJy5dIcrmItiu1EML9tuT2ZkGnaREUIiLlNLpoaoMkSLAkzOugZ5EmBoVUIEUJo9GYyWxJKMt2KAWvmWUarCUCXBjt0HVI5Hs2mz6mXZJlCaU0ZEo0+gOfONDJUvF7pw97fPnFKyJZP3Fw2ME2FfK0hEpGIuoax67QVOueP8pu1FRFTiIbR7NSIyxiNC0n2ogDfHr0hIvr13QbY2xXHLJtuGcXru9hWNeLHb7nUVYBpi0cp/UqoCpVRuMW86lQWo1GE80sKSQak66rTKd39Dqt+wZT22owm60wdRNPImUGUUCpCiRMAFPxWM43DMcmuoQXXW1ivK7Ldrem3RKyHkfnuLZLmcpyRj3FsDxcx6CSBsOT95/x5uUb4iilltD2q82WStUw3kVayxS/6WEZDluZIi/LHNOBmgxdOsyHB0e8/OocryUuBbW2uZvOiOKYkweSE8iyGfZcXl7IjHDPJttoTCc7mgOx5hoNorVFcxhQF2I9k42O7ehEMnrkOC5qXZDFOwLJz9PwcjRNJQ4VRkfSyI1Lzs9ndIcym2caZFmCoposZzKredjCcBQ2q4SBRBByHZugTFjIqPhp/5ibt3M0U7uPVvqGR1lX9EdDJrJcI0xSkiS558dKkoRh6wE3d1/Q7Yt12e0SLLPNyckRE4m0aJo6r15e8PBUXEzRfsP0LsI0LPyOcGjTqGR82OVmcoGpibPd9Ez2wUIAOiDoGlAKVFW9L+eNqUHLOO51aXvCYf/Fi1/Q77VYXEukt8cqx60HHHUsrl8IA0irK+ra5WY+5Qc/Es54uNzQGfjcToQcXF2/4bd/+y+T7FXWgQh2xEnN0bBNFQXk0nBSnZrZzYIPJQ1Cnuf8bPIp9U7ltCey7sv5LZGSkt2AJcF/NCekezhg4MpI6/IcQ2sTpxHDkXDYr14vGQzG1FqKaYv5nb/9CTg22U7s8XyyoD3qo6sJlSwj0VQHzzOJQmGYpmlKFNeUhY7b8KQMg+mCgsV2InRep3OErao0+0Kmut0hf/TxHzLyDhmNhPOaVTHTxQsUpYmhirnExRLHrZjPJB9gpjMat4j2EQcSjjrYrul22yTZFgnMx3YFbtMglhd/lCw4OjqmzO174269mYryYUtBleUuR+MHLO/WTKdChkfHQ7pdlc8+OeeDbwneqVevvuT49BgFh6qW66JnKIrCK4noWxYqfrPFZrO4N0LDfYTjCI43VRrCnt0iCbL7z2R1yHojeGWGEqVytVyiKAp1GeNKUA0w2ewikOAHjjWkzGI22xXf/Na3xfdWG7brHaapY0po5KxKoKzIJNjRZnPHw5NTDN1Ds8S+X128orJsDMOgkOWuilqjaYAEijA1k37fJkpFQAEgzUXFRdcbM50KHq04SOl7D1BlpcxyHXBw1CZLVVzJ4xdFG548fZ/f/8O/hy0z+LpqkMcVtbQJOr0hqlFQ5AJICqBQI4a9EfPJSspYh9liyna7vW+yb5guQZBAnd5X+Qz6YxRNpZbUGkGwwdALjo4PePVSIp0aFppaYpomcSBkI89zzMYWSSOIVbbJ1Iym7dyDc6iOxWq2odGzKVMRkKAU6IiqDH5Yioen68TpmigTa95vddiFAcfHp2xXQvaSEFzPIiiEnsyLNqqyZDmtefpE6JYs21AlOutlRiZ5igwrp2H36I5kKb0akwURalFhO8JgV5sJzU6f5XzOXKJlV4rDNtjz8JGw8aoqY7tekkQazZaYtKG1CbYqcXZ3X1HT643YbXa4rljfCgXLstiFAaYpqV50k17rAKoSRRPOxjbMqGpYSrvho++e8ekvP8Z3BkhAVnarPZrVpNXUCGXgK8/guHdMKrmwTNdgfnnDbhcxktmsXbhCNywM28HQhT6t9A1pUpEl0pncp/T7Ha6v5rRlYLhhmyiFjqYApggy6YzQzFJkW4EwKqmUnLIyUExZPqwabDcxnivlQCuxTJ0sS2jLTKfndVCMgobRZbEUd9jZ6fuYdsmLz2X5tAG+20bXLLp9sX9us8cXX31GhQ+GuC9uL3OODwf4PfF7u0WEWipoWopMfJJmOsMjA7QCT9IHxEFObZTwbg9mG4J9jm5ynw1F0VC1BovF7L7sOE0zoh0kEvnY95vonsrkMiKvJVhdxyVYl2iahozBsJgtuf10+7O6rn/AHzP+XCTCdV3/HvB78s9vgF/9Yz6TAL/z53nu1+Pr8fX4enw9vh5fj6/H1+Pr8fX4evx/ffy5nKv/t4bndjElc/YiusWyPf7X/+VTnJbw5k+eajx8eIppeITBO+6LIYpxhyPBD5ymzfTtjs6BeU9M1+pVdLyaYfs97q7F9+rM5/ChycVrEVF4+vSUw8Fj8jrh7WsR2XlwOiYMCjb7S773K78OwNtXF2yDCR98U9T+f/nyC0xL5d/+W3+D1qGMFtRvuflqDjuVf/4v/ssA3NyFLF5/SSRTnoVjMvB6mIbJLBQJve9/8JSnhx/y4otrRi0R5XMNm0cnQzTpJR85R6TJDUF8hePKWtgsotZColVEFooo43q9pqpDwkjMN4xLmr5HURTMZpLPxe+g1gnzhYga2Y5Gs+GQ5wVy6Wg2bFBrGs0hs6lkaHdsBgcQ7mTERvEwNJPxWcX5K8lWrdRs9ivGj46ZL0Q5WGn5bOZbkFDCpuUwW23IyopaZjmsZgPFMnBaLTbv2NFbLqpa4TRlIzcqjmOKyNFOclrQEuWD2yVeW6zL67dviKKMw0OR5Wx4FcvphrKK+MF3/ykAPvn4lyyXG5rNBrYtor1O2qbT1IglTHe/f0p+V7NebjiSDbVBvsdseAwPRkS5CKN4qs7NTXRf/ubYGmme47sNEsnnkmUZdqNmvSzRFAnJG4cUcU5Dgh1MpnPKqmayWN+XSxpGQZaWGLpKKhu1s6JE0RrsduI9hyONo5M2WVpQS6JR39WYTxeYdkUs68U1DkjjBFf2KGx2NT1XZbpJ0BDR1x//wSe4vkmz3eH1G1nOZyh4iovSFPse7Ut0XSeMAyzZTKppDUpiyqrgaCSyBfP5njIrySSHhuOXdDoWw4FPqyV7kOKQz19+gSv7H243Ca5pkpkxuozQKqmJboTo1Yi97C3qD5vcXM9ZrMTZa/sVjgXj1gOcpigniPOa7T6i3enfkznWdcxw6BDK7JZm7ImCmoY14ulDUVZSk7JdxlRhyTYT51bruczWM1DEZ5SmQpVnqIpGGcjG1pYLZHz+/AW65ARyHAd0hWjzrhbeYx/M6HYHLCRhcLgv+ODDDleXr98xRrCYpPiOiyObvHZzhYNBH6tZoUhKA0VRuLyaYLv1PcR4w2lSVAmGJJQp9ZooSNjM9jQH4uFVoTNodbEjm7tXIktj223Mts6jptiH1MhoGDUX5xNkEpx9sKXVtnj44QEdSeq7vNkxbo+5ei76Mk79MfvpDrc9xGy8K7frk2YTWr0OcSpkfRu85f2nYwwJ4HMzn9Hx+jQNi8wRazw87bCaTXHeHxDJ+SilS6akTOdCd+pVA7/bYDeNmE1lL1pH6K44S7EaMiJc5diBgibTP0fvnZBVMdeLKU/GImuz2WxgGxDthbx2ewdYdkKW5ajy962GQl4WRPuUptRLZjOi3fQJcnGGfvH8E/yOy3S5JUoE4fvR8BFG3SE1QtpDWVa20ciDko7MjqqaQRzH1HpJIkuVtIYraAMUlWgpNkLXdcxmynon/t7rj1mvcjzf5P9k7716LMuy/L7f8f76e8NmZKSpzDLdXdV2ejhDAjIQBALkp9GTvpIECHqjBErjNI7T0766qtKHv94d7/WwdwRfOIQEkMA85H7LRNx7z9l7bbfW30SpWON36QpFt0lyH9MXv/f7V69QKx1ssffdrm7Y7H0Oz05ZLMXvxSm8ezcTEDnpb+a5XQzLopLr8mR8wuw2QVXcB+872+pimjqbdcREekjS6lxfv3swI9/st0RxhW7X7DxRxWmsCkMHGo2t5CBbnoEeGNwuRMX0wDHJ84zheMByI/aspi7xPQXTMMkkLzJf7zEMg/FQVqnLBTfzG05PPqGVHBPL6WB5DcvlHf2RqCq2lUer1vgyM2+oGkVZkGY7cim7bFsO1zfvOfjygIOJgLLu9Q2jcZdciuMYgUHR7Gl1j9VOzKvtLsTq3GH3a8JEilMNT+l2Bxiu+JswTFhOQ7ygpZZ+R/3OCXkeP5h6QPC0kAAAIABJREFUZ/me/XrD0cEpWXIvbOShqC1Znj2Y3LZqTlLEBNL3TlMrDLPhu3d/YLUUlcDTwwPaMifNFBYr8QxHJyN2ofJQsTG6CYoSkKQ5ZSYq3oPRU4Z+h912ge6I52qbgqrMkfoyRMUKOxhRFTqV5Hg1hYOhlOzXIeFOjLHnDISQgIwDxzFYXFdQOLz+WkCxLTsh3EWMh485k5D49W7OJrli90b84OHohNX2Ds00SffinOLvj7HmXZbThELGxujEotevub4WCInhcExVNlimQa8rDlmrVUiWWRi2RnMv+Z+3BEGAKr3htuEczzvCcYwHj6cia9ltEyzLwrLlOIQthquiSguONNxQ5Rnz3ZKuVE6yrR5JsUWzjukHooqyWywospBQesO1O53z4xesevHDHnZ6+pSsaEiShEiSrPsjiySLcCz5+43KPtzSHwdUuazMmTqKrrDZLoiW93yhdxweHLPd3K9vJftoQxJZjI/upclNOl0fW5fcKbNhvb2mE4xR5QE1ySPyRUHoRCjSruRuu8J3TWYz8S5//LOXRJsdYbYGRZxz76Z7Fsstk4M+ZSOe/XB8xNGjiFpWklbJHkcLWE5Tnr8Qc9ZwK96/W4FSUZVi3X//as3ouMfkWBpvJzX7/Y7JUZ9wI74rK2NaGkzHo2zvBSkCtrsp/YF4l7yqCWchmmbiSEP5ZL9BNzoM+1169yJTccUtsoL7n2j/LC5XcRLyd3//FwD0ekN6vZqnz05YLcXhcXFzQ2/wgb/6M4OB9HfMy4I/+tPPuJZ8J8exePE9g+V2y/FYTNiLNxkj/4j55Z6hhL+cHCost7d87wsBt8kiBYwF0b7k9ECUjHuOx9/+8peMzmru5oJcrRkqZuMQraU/TqPhjvuswxn7hQjU128uKOcehweHLKRp8W9+8Yaf/vxPuZuJg2rVwqMf61zdfeDlqVThqbt8/c0tg7GOthIDbDYbnr2wKBsxkG9evUMjQ2sMMik0oFATb3M8Y4hqiwX3D1+/5uXLE3R5WS2LkMmoR5bllFJIYbG8ozew6R+Kz9jGEXeLFV98/pj5VEzEKFygNA6jXg8Jq2U06kO6QC8lzEtNCMY6abTDasVkSeIlnz15wu1iiyF9ZzI1J9G2KIk0+fX6bLZ7BoMRmYT3GYZBXbe8e/cOpyO+33YCinLPTm6ovtulKl2qEvah2OjPT46YzxaYlkOci4Usq9c8//ScuTQM3GwL6rpkNHT59rvfAzA+7DFpRuRFSyw3vTiPKJOcSppFXFxeo5QllukSS+PGfbaiSlUqbcugIxbE7SrDdZd4rthwXDdgfrcjj7e0kkC/3W7poHF2/Cl5Jvrhw9Uto8MDTFsaKasLTMPCMUwc+QxpmnN4OGG2mrORfCpUBVUz6Q7FBaVo9szmS44nT7m8EXDU7kAjz1tU1UGRkD/FVOn3HJYLyc/TJxRFQbhPUKWi36Dfp1U09tvwQR3QtWzWyzXSyoy6rdjuV/S6wQPva7st6fS6VJXOThJ9W6XmxafHXF+JzdmwLFynS57kvPpWcpKUnMODDuVWbJbZrMAJKjwsXv9ebPwaCocDH9qYT14I4va7b6/J85xAKv40tTCh/PxHJ/zmW/F7y1mEF5wQ7UpG0ixT13x+/90Vbk9CJXSXk1Of1TxlsRSXncOjEZZdksQFz0/FYp6rOfq8pSchCNe3OyoKDvodqKTipaWy2iRUVUW8EZtelua4tkcmoa6e6ZLWMftpiS7haCfHPeoqItpVHEq+Su2v0VSH6wtpZnlyyGY7J9y3KNJM8urqWkBkRh3WC8m/K2uSKsTriZiKwy1FXKJoGrq8lHW7Gq26Ia8NvLFUl3M84mzGciqeezAYgKbS1BqffCYuH0mkss2u6PQDfvtbcWk47Fdss/e0vphDw9Me1/vf8Nnhf0dHqmm9+u6OftBjn2/57EysedE8ZL2pKOWF6Ca6pDvo0vR00mtppGy0WKMztttbBgci1qu8i2saRKmIn8Dp8PbNJcNHIxTpc3Pz7o5nL0coisvddC777wWv/vAdgwPxvrbdoVYCrq6uyFIR2GGyp7FMDENCXdw+i+0Hhv0em634mySuaGho2wa/e+95NmMYHD0IPlSNiloX2I6D44s+r/WKRq/pdYZsZJztdxsG/gRDqpoulrd4wYjBZMBKHnrbGmyzYjfX8Xvy8G86rGY77lUTonhBr3NClNziBuJiY+U1adbiOx57STBvyWmKElsejFFcVE1lMZ0x6IlEFE1Lv9sjSTKRGAA6nS5NbXB4KPbQqoBuMGQ88VEUOf83MUrdkOxTIl3E3sGxwcvvPSWQKoBOd8w+2aAoFmEh+qDrm+iKRRzmKFJxTlV0FA0ORmKPni+nHHbGtHVDKi8WruuzX+84eTQijMR8r7Sap0+f8fqdgO0lVUG/NyaMtyzvxJp39uSUTmBxc7Eis0P5XRq7eEvbSPETTRjJV5VBkksfRlfjaHzMze2aQfdelbZkG8WYtkxs7jZUVcVwMESVpPeibfhw+4pKyfF9MbeTRMWxmoe5vd7kPDoPiMIUzxBJPN9xmd4tcSU5v6xyxpMepydjVov7A3uMN7A57fW4vZFcdNulLPMHIQVLD2irjCIxkV6raG2DoQ0oi5rTQwmprHJGvVMUVbxvXqpYPnQHXa7eiHe5vfvAn/7JT/jVr7aUcq45DpSNQSvFSPxAIUkjOt6ERnJK15s72lZh1D8h3InvD+Mp+7ACeSbo9Eqi0AIlJ03F33QVE9cfM18twRO/NzoecWSePVyS7nZ7ilbHN1zKUPLHjIw43lDkyoPpfFv7lEWLoojzRrRbMxr2sEz1Iek8na2wzAnHkwl3tyI+q6qiKmoaufmZrst0vmE0GhAn4jkNw8L2NeL9jn7/XIyNv2ezm+IGYs6++u4K2zjEcNwH1cZaXxLoJlmWoUj+TrdjEm5X7DMpLNbzuV3coPs9fHmBdgOffLPDMCzKTIpMFSX9oEORi9jsOBbTdcjk8JjNQjznPlxjKAFoFpbVkf2u8v7imjoX+2hQKVhmn5Q9u60UplE0NKVCC0RfRvucqlTRuiaJnI/oGW1tUFcpByMJ04tLpsvdAyR+sd6jqy212tDIq8ewZ9EPXpDkO+ZL0cejfpdh94hoJ3iZtrsnSVKOP5nwu/f/AMDR5DmNvmUyPsGQHCvvUwer6xMX/3EP20ULKqWilp6rURJiWhW2YVLIMVWxcAObRpWm6WGJoZrouoku9QUM3aFpNKqSB5isIYXP/qn2z+JypSgVQffeNb6iZc5/+9//jL/4M7FIzmYWOyUnCHKOTkUnBZ2a717/FT1XDKSmGnijDcE4AGmE92/+7b/m4u0lXTeg2xOT//pyyOGRRb8vFrG3H15jWhEnxy8ZBGIxb7OCH331YzbpnIUE7fa8Lk+ffslMZtMePXnG13+Yc/ZswDffiKztj7/6I9qwRc3gLhaH+B//8ecYbc3ZY3HQWKRTfvOPX/Onf/LHzG/EAlhXPmZQkxYNrSnFKiaPUQyTV98K2dc6E2bBnuPdq0gz6VrsLkyCE+2BdNoPjihTlVZm2D1zAK1BXuRYlvg/N+igGRlPn34FwK9/+zWW5/P6/QXVw2RtePSoi2LquP59dmJN3+tTlbJaKIUZAu0JF4kgqp898bCdiqqMHsxAp7d3qLXFZCwPjnXN8eEJd3d3ZBIjfP7kjOVqhWXYDIZislx8mNKS0e+Jf7e5wma+p1FMelIQwXV02nZAnFeUskpU1S3baMM9m9DzAryOSeB7bOTGtFotOT4+YTCe8MtfisubYkRolUUszUC9bgcvULB0i+VKVu8sFd1QuZxOMZCfqwM2y/jBfNiy+qiqy2DYfzAkbduaInOpK5WmFQeS09MzkrwhDGVlqWowdB3DMpktxaHT9wOSIqUXuGzWYmzC1MYJqgeJ9aJIsd0BcbbDk3KmNRonx11Mw8KWBMzb2yl56hJHkq9mr1B1n8Aw2UnM9XI9o9cb0HEcNMnfSOOESqkesnK22cGxdKJwQyaz2YblsljM6A8CcgncT8od5aLFccTnoqghCRfsVyG55Df0Owq+YXI5FRuxb7doZYVaGHzxTGaIlxmuW+F4Kqv1Ur7fhrposSWPsDfKGDojfvCjZ/wv/9u/E/Hp2fT6PpZvoWjSSHkXMzkZYsqqg2GkdLod8jyiqGV2crdD13R8PyYpRL9skoi6tDBlRUo3U5LIYLlvOZYufnVdCi5HqRLH9/jtgLoueXwuzSTVhuV7A8dt0Q154fImvH/3Fr8zQlElSTot0U2F8YFY3xbrFbcXe1wnQLfFJuCYHT779JDXb2eE8pJrGAX+oEMojdSbNkc3DDzHfOAp2nqfKKnwPIWlNCluE4fnnzxnmot+Kv0QvCFaqHL9ShCwDw77WMYSzx8zGEkDZEXh4nqKNby3kHDpOD1W2wWVlOSd9I9oKjg7M1jO/07EwqbG6ozwAvG+Z8MD1gvYfVjTBpK7dHhIut+T5zlKIWKo7z3id9/8lpEcP71ToTkNVVsxktzC9qDg/MmI//svfk2liBjudweovsNeCv3ML1ccnz/ltDMRvBWgPxiynSe4pujLtg0JzCHT2yWxPGRbRp9KiTgYPWZ+KyrXtWayj0JKqR7aFDH7WEG3YtJ72fz9HftwRRj3SRPxTKoOtuc8XLZ63QPW+xDDgUIetjpuH9dMSc0pWSHWvLLO0ZQQVRoGe9YA3WjYhntsRx7g04i+NyCM5pSy+hL4xzx9fso7qcbYHTq8n33g+PARhqyQdCofyzGp6pxWZnazNEJ3WlayclZmCn/0s09Zb6+YXov9cdAbg5oyGBqMJfctTuccn57z4UokfEzDo9EilvMZpiPGb71I8S2fThCgShW+7X5HnoIiDzGe3SeLCjB5MP68urxj6I/Y7EK+uRDnhK++/y/JG42OrIpH+RLf9WnymLNzKale7fjm2z2mPaBuxHxQ9BTXr0ljcdlBUWgbHddwMKSa5X6dobYVilXS3Bs173KKuiKQHDPbc9nvI5IkwrXFM/R6fZLZJevVmsNTmXQqDRara1K5z7z45AlhcoVS2Shy/9+uQrIoZSBNaF3bZba4Y7G+Y7USnzt//Jy4iFisNygSsfDq1Rv8voYi+c1lEjEaioTEXpag93GDYys4gY5li2fPNjme0yWVe4PdLUnyiGWk0JNquq7T48/++leMey73gpBJWFCVQjwMQFVciiijThcPcTDLb9CwmC/uOJSG2bPlW1Rzj2eLeJ3fbDAVl7ZtGYwE39DzVRQVLKvElkaxb7+7ZNSxHoRqDLdlsy25vV5xfnYuxj17j9dx0eOAu4Xg+wyPAjqBTyqrOJ6vs1nG3Lwr+PIn0pbHHOJ1bNabHZlMpj4+87i6XOJ5Yj5GewVd19juZw+V6yypMQYG588fcXspVTeVnHfTK87OpHVHnpHlLa5XYMjKjmpoNIW47JqSf5dVKUlT48rLj2/Y4CvUVYUnK6tv3rzBMHWiTU6UiLUjyAwG/eOHymMaRxitQx43OFJ4K97FVDTYAaSJWLvWc5+u13mwPekPLaKwoGlLdEWMn6ZpoGjEiRQxcTyuZyW0G7pSZGO32tEbTMjK9uEsmu8VTNVgLPlx233O4eMB7XTHbCZ+/7rK8bw+QdfHlGtlmlxiVF8ym8og00bU1pZ5fkN+n/hefcBUDfTwPVkq1gRTOaBebx+4m8vlElN1UIoWRyZuqqDLYNRjsbmlbqVKdLFH0yGXKrFta1HXLboD+1By0aKIyckQ3bG4kPYIhnPPEf1Pt/+vJsIf28f2sX1sH9vH9rF9bB/bx/axfWwf23+m/f9SC/yv1Tpdu/2f/ud/A0CyzwmTkMPDYy6krv/o0Of96x2DgxxFlvfiBIZDhRefSDWtSOHt1QV522AooiqlNSqPTl3CbUxVicxjEJygGd0HRbj97jWOX5PtBuTS0O6rL8dEe5u3d2+whyJLdDj8jE284M1r8T0vX55Rxzqr2Q3HUsY2TXUsr8SsjpgvRLa3LHN24RRFGsW+m04J+iqfDV7gyErZcnuFoe9xrR5TCct7+dWAb357x0JWyk7OTkhy2G30BwlJTbG4/jBD0xs6EmOdhS2KvmcgJU/jrCZKE8KooKplVSpRicKMbld8Ji6WHB51CbcxE8lbGnfH9A7AMnpcXYvsz/e/95TX3015dC4gJKPBCXmRslrvuViIrIbaW/D+my3ZvqVqRVZcp4+jV5SmyBQ4jsV+u6Mucw4ORNZI13XyPKVRhKkqgGUMOT97xEpWD7VWp8x1vE6XXBEVjHhVEMUxw8MRSEnzb797g67B5y9l5WO/wDR79DoHZBJyYBsedaXgeHApYWsoJmXc8OQT0S/ffXhHbTS4jk0TSshKU9MqOrVW0wukCWWsoDQWvaEtY7MgT0v6fYvrG1GV8oOGZ49/ys31BRUCIjoaPSUpNGpd8kKSAtd2aMqGPLv3XBCqPKatgVRfjLcNlt9gSEnuOF3iOQfstosHqFIYKvQCU2QipcHs5198ynqdkUn/r7LZYdCjVlQymZXybIu6bdgnMT2p6JXsYrI44eS5iNfNOmK/CQl8k44vsk1FUZOmCUfHA2rpCXY1XUNqcXoqsriXd3M0taLjdsnae/8mnarMiNP7bH5JtCgYjk8oFVFlmF7A8+cdVuGO/kiMw+WbjHRv8ezFsXymNV9+9ozxocWf/fnfyz6o6XYsAt8kk8pVtaHgD0ZoEiJraQ7Z3sSxNUKZubYdndntioOxyXwjxsbWPEzPopAYWdN3WZcZ4XLLJwNpcj3fMRwfcXW3oJR9rhsKJ6cDNmsBYzk6HnA7qzg67HF9JeaVp0/Y7Tf4nQ4lUu62N0TRIJMc0ypVmAwesd3s8TuiD66v39ALHnOzmKNJZbUib6gUG38sspXz+YzTg1P6gY/hib/Z3KzQG4/GzHD9ex+oEsVSQEK60l1MVGfstzVnEg6GPqNtHUzF4tFEcK6Upk9cXNLpizzdu28ynn0yQcdjOJaZ1ve/YHTcwVNULt5JVdHOZ+yyCwpZNUL1UQOhhBfYIs7z0kZvc0JlytAXfRxXOh3PopTwl3q/pLYSzG7AgS2eKZouaTSLi9tLHKlm5Zk2WqGyk2OsORZFG2J4JtMryUnwe5RFSCz96v7Hf/U/8Mtf/oa0zukMxVy37Zwo9miLDrasWOTqDa5hMd9K6WndwXV9StdguxTcMLXWMSwP3XJYrsQzPDk9Qi1rtpLXZ5gOSZ5iuwZZIueHaVDHGVbbYSv5I5OxzX694JNnAsaexi1ff/0HBicQRzKLa/uk4Zb1fP/AsQzcMbZi0xtJOGFU0R/1iJPwQQ1xudihYNKfqJSZ5EXVOhkLKEV15Msvv6Tbcbm9nhFLA/a6ytlHd5ycPoFWjHulhuR1SJaL37cth9vpe6KdxuBQ/F4/cBh2Rlxe3OB3RXVysZ6htvCzn/8QgF/+5lvsUmM8GBC2ou+KosA3OqRlyVUoEATPJp+QhkskfRQMG13XsS0DxRRj/PZmRrK/w7YCDo/E+mZoHT58+MDTp88Bwatpa4doE3MovczycEebG+SNyvGx2CN1PWO1T9GlWqCiKAz6Q969f82gK+ZMrz9hvrvg5naNqkgochCxnO7o96RimoTPNlVKcS+eWyikaYoq7QVMW6NqWlrFoCs5Qqg5ZWaQZBssTVbiLQvV3BFJ2PxmWvDo9ISgZzCfyS83G4aHPXbbnFLCunxHY9CZkBfiXepOSRLtScKCji7WEq0qWO0X6GnA0VOxb6+nIXWT4Eue3WaVczTosllfo2vi/aKkwnU0dKNl2Bcw64PDAd++/VuKRFYi3m0wLR8nMBkNxTyeb37P+fljLi4WNLqU/NZH5J1bnEasBy4OgSeUY6c34pzw5PEZbdFQqDnf/F7Ah0/Px2hmjK6J2AyCPu9ev6PjHjzs22Fc0hnW3N7uKaVCsWlF9DtPGcg581d/92tOT0/RjZpIogWOD59RVhlZltGUIoYLYwWNwX4r1m9b7dBUmTCBdwWdwHa7mG2Lqes0tej3XC24mt3x+aOnAJwcTHizvCJcbRjJNX01zyirFFPp0umJubXfren3jtEVEef7TYZiwjab0zaij4swQdc7qN2EjuwH2zjEtEsWkhflBjqzWUzQOWK1E3tWXanUBSiSB/rkyWOuLnbUTUZnLNb9pgFDV3BcjyQX/TK7S/ny888xbQmxLA5YrN+iaF2QVIWub1CUcwzliFp6pQ3GCmXRMPJlhTiKme4WKJbC089EVfPtN+8IDJez0SOefyqoAn/9//x7Gs0BVcwzUzGo65z319doUqJe0QryNqbFRoJgGPTGmEZJVSYyxmzatqUxa/are38zHbc3QFc1Emm9UqoFd79Y/ZdRC/yv1WzHIEzFpvvq9ZoyUwi8CakkFV58uMKwfGY38P0fiwNzt4w5P/6EKpIHzs0dTRZwc5Wy3gjYw/GxS1P1qTKTqSxD/vCrU978/m8evA063hkd5xnX6+/46kdC9vz3b254/OicJF9z+Y04CPPCIi9qTLlxqKXLercgq3OQ5VNF9Xn66ID5csb2lQjWJJ+hWiO6I7EgjhuVZ4fnJFGCkoggbKKGxV7h+Am8+yBKqjfrFXWTMJQXoLJqmd/WpMWKji+hAp7DYKyw35QspFmtFwTYgYHui0NFlqzYxgtcz4VUPHur55y9HNFK8zUzO8LpOpwcjrEkfyTXU1bTlsnEJ3DEZvLbX13QGfh4gfTHira8ev2eyZn+4P0xf7dGLS1OjlzyRHKuEgXLqejJPtisI3RaPvviM8JQXHbiMJamqjX9gdjQfMcRfkGl+B634+F2CvJkx9VbUZrtDQeYrYOjK0ylTqeVVHzvyzNapDG12kU1DG5vVriuOJiWScFw1OGbr7/BCyS8sZOTNQO20vuj7yskocOoZ7OTvhN+v0OYpDRlwWYnhT3UAOqMdiPGM9mlvPjsU2olpJGy+boyZHZ7RZYn+CPxfrWlUzd7skj0gU8XVTdImz2OJzHQho9pHVAoGVuJYQ+GoFkDUnlJGg8P0Bqb1PIfYE8D3ycvQXFcDEm8fftmxsGBhmWI2A/DEbtiRuCNSCQZeOQ/JUwLUm2JIon9fWeIO/BRuZeHB/ckYL9KaaS0vYrHeOxxc7PAvIci5w1ekLDZSIljXcWyXZIsppGXm7IOMHQfKYdCHE15+fIJTeUQxuL3Xr5siPIEi5IwFIfOTs/g/MkxSS2NOUl5+WzMX/3i90jkAEenNqruEEYFhYSMem5AtN1hBZIzkFeopcmuugZVbF5VqeDbFZ3hY1JJsm2KGN1SaAsZi0qHxjKxDg1iCfM6+PSAOFmh6xHjkUjw6FZCU8Vopvj3Jt7RWBnzZIkruXYHnQmqYeJ6HivJ1Wo1lf3O4PMnL8XY3bxmn0Q01ZZMztG6HHB0csj17IqzR0LSPKoLoihCz0VsHHkaxycBZa1SSu8breMQJzv6is/3ToUZ7/vla0zzCMcUfX4dtbhtTm3lVJro87LR6Lgq+9mKmRz3o4lGQcbFB9Evlqcxi1/z0x9+xXf/KH5vsS3pPzaZLzxUW457OyMtI97eivf9/LMDtqtbdmFFmkkfkWTGi6fP8JtDlnN5eWzA1nxyCalqM42mUgm6Bu8+CCnfvheg2TWHR0ds7sQzTNMVk8MTKnnwLsolMRUOXVrJw1h++MCjH5yR7u/5cQFxHOKf2JSK5IosFOKk5nBckMq5rSYOj4ZPoBKxuIs16qplvrhCRwSjoZr0ukMWqzmWeZ9IUKgbk9VGrPnjwRF1rOI7A/ahtCaYeJRaTlLvMKR32XS6o+d12U7FGGdxyuHRCYU+x5HEmrpOSbIIKo/+WBzefbsmnWVUSLjW+pJGczDaglkk1tO40Qg8levLlK7kXH7+6XM+3GYP86osF/zD387Ic5PuSPyNqdZ4QYfDwQn/8I3w38IpKZJYwIqAvPGwgwGnpwNWCynmsEpJdxdk9YqdzHGZms+jyZChKtYft3LQbJddlSApENh+n6o2uZhdMD4TcyuMM6CgSMTvOd2CQmnQTZe9HNOmBG8wwFWtByuSfZ3R6Y64uxJ9nqcZTken1x1xeSH65fj4kLJdU+5qygfOhYtSpZjGva2DRd40lFRUcq3cbWPmN3PqsiBpRAy5Rgf0nFh6FFVRhRWAqzsspFDT4eEhpt9QSv7IuDtmuY5wez6LhTTGtVym2ynjkf/gE6q1Q5Jtiq6LMT9/odK0Fa8+XDGQVIiTk0fE0ZZku8V1pZiKZfBh9QG7I0VvZh0sa8TgMOHuUuwNXU3DNwfkrUpaiv9r7RLH1llMRT8N+49o9YThYYc8ErHY6bmE0Zq6qUik39Ddckq3M+HDTNIZBl3m6xVuZ8xye2/mPCFLLaK4ZnIizg5+R+P2UufgqbTkSHTCMKXbPWQkDcMbI2a9rzHNiqPHItb9nkscJ2x3UlgsU4Rf3EBjvhG/p2k+84VIOt97CU7nG1abC3RNHOCfP35KQ0S8T2klVFmrLe7uLiiKDMcW/V7bBXmR0JFQ5dXdmpPDAfNpSiXhmYXWUCYFmWFhuOLMkcYWnmFwOReiHstwjdI6DI7H5G+kmNNowE10S39ko9Zi7eo7h2ySFV1bvG935HFxccmg16FWxefi1iGLNYbGAaol1+F8T7FVUaUBumuYdLoxHc+jLsW7rDeXdDsjanlBmU33HB6OWCxvufeur9oKR7HZrlMaQ3y327FYxDPKhdj/x/2EJlMpmoqTJ+KCOekesF15WK7BaiaFWtQJobphk4ixyjIhuKa0PlYuxsWpbAZdm9U8ZnIi+k61R2TRnsATi0S0q/GDHqcHJVu5ry7jEAWfus7xu+L/8qalzjsMZNLCUFyW0ZKuZbGUUuyWMUIrG/blErcv+oX1fx7498/ictW2DXUuHljRbnnxxRmL1Zq7azFI509P+OwHPf64dycAAAAgAElEQVSvf/cHbi7EZDTtkt+s/5qOIzpkt69YbbfcLV9xMBC3/tG4S9k2NIpC0BUHmQ9XrzEdm9MzkfW7erviH37xC/qD4OH2vlmHTG//lqopMXSRRWlqhdU8pyOJ8evNDDfQub5U+dVO8KIOTw65eJeQbMMHn4sfff9H/Pu/+S1bQxxGBv1PmDcmStjw6lYY/zlWie8P2KclG4mh3S5nGBb0A3lo2rfs9hsMw2Mf3ldadiith6blPJKLyHJZoKuwlrjsKMwYDgekcctoIA6PkXrD2dmITF7uqlpnvZuyyxqsRiz4vaMOit0lrHKOn4rLgDY3qdqcP0gPL1qdxWbJNFEeHL9t26dreex2N2Ti67Ftm1pp8SyZealWjMdd5rPlg2t83ZQEXY8wDNFLMTlmyxW2rWCYYhLEcUWWKgRBwJMXIjMYpwaOW5BUBbH0uXry8hFON2C+SmRMVehKQR7VjKW/QpEV/OIXv+bJs2esViKmNruU55+0JA/qej3sYM/0ZssPfyhUIpeLPXmxwFADLq/FZa43gLZuHg5SQc9mH4WE0Qpkv3R7LlWZMuxYJNJA7+LDHariYOsyE+lmhFFC01r0J6LP1SamYM0qDElSaTppZHhqRCC9RaqyQdGhSkuKRPRBZioMD3p8++Y1I6nEF3RUws2ekSSczncX9MYT8iqm0cSilRUNplJSRSG1XKidfoDbUfnmnfSr0DV0VaVCfXBVbyuFXtNSFgXzhXjnl58csbyKmRxIRTFdZ7HZ4PVMdGMi36/F80ekljiY7nN4/e6SwWhEWUje4MDFq/qstN+Rl52HsTK7SzbygHky9vjFb/+ei+vFf1Rxsz12uw2m5eLcKw+qKmUeYVYiFp1uj7jZUexLDFkRNjoq/uQQ09XQZOb4ky8e892bOd++EhvxT/7IpygyZrMVL4/Eu9iVzSZXSJoGXxqbambNbgu2JCMX2Y66McnWFid9MQ5mAGMnYLFeUclnuLmeohcGyVCMsVqZKEpGrZY0iHj5yc9/wPXsG84fPcNW7z17XNB0okwcXh89OyUtdoS7kmdDEVPfvr+kaTVm+4QrX5rxPvLJ8h23H0R/LpsVSqXguj6WIU2vVyVlovAvfvwn/M0//gaA9e5vqHVPKL4BxwcTosTid7/7Ff2eWAOPApXbt0v6/RqrL+ba8tJgNzM4G4i+2y/fEG89jg97LJayiur2uVvO8NQTulKp8vzJMbd3V5jSO+3rP7xjeGKTFiFVda9u1yPKFoT7GMcTmc7ugYtabnDFmYza9+mYHkY+pmmkP86gi5JmDCT/aLmbE9bg1S77laiODDuHHJx1WaZrxlLasTd+xHV6jSa9hR4d+Ly7e4dv+QzkfrHb7FAo6HW7lNIvLstjsiyiK98ljTO01me13SLv3ZR5ShqVHJ8O2a0lp7TI6XR67FZi/fnsxRfM1yGr2MGTPIDdtkZpAwaTgvFEzKNXv7vhpH9IKi/eCiatphB4PS6/vUdaWLz86iXf/5dfYhqyclVWHBxMSGOx7/z229/TNhqdTo0rD4VN1tDWCh/ufo1tS7Ww5Yqu3WEg1VD36Yr9NkPPa2LpJXZ29hVJGbKfTzGlSqRpl+yjHf/7/yr+xp0E6MqK/TpGl/yxZLal37GgUgilyJQVK3gDg95ExF3TpMR5wcXmEltykI+9HoeTERezC1LJDdVMD9/skWT3lwGVipiqCbFdqXhXq+iaS29UEsl9czYLUdqIUiq7RguoixKvH1BKM6OkjtnlLY+Pnz6s+4vlO1znEROJ2pjO35JvFE5ffMFkLNEs6yWm3qPjivmxXt2x3UbkjU5ZiMm2ijacP35GlMxB8uN0BXq9HoOe+J7L23fQqlSZRS69qdbbirYy6Hg+uuRhV2WFZbgosVQn3M8wun3u3qV0ZaLW13MMFwzTZ74V86HIbfq9CUspDvD6zbd88vyctlKoJXJlsRciIcePOmzXkudWQ1NpvPiBSBS/ef2Bbt1ldJqiIvbounS4uLiiG3i0jdjL724qdGfCXCYyPT9Cay1uLm8IfLHG6o1NnlxiG0NcV2RTomjBsD+mkgiJwcglTVXu7mYE0n8sz2E0GbNeR1SlmNueMyDe5w/ian7gEMcVaZzhSh6WbtS0jUkSp5w9EpeGKJ1ja81DhUZtS5oy59Hp8YOfkqor2F2X2e2MIpfjZ5rQ2Fi+eG5NUWjKiOlNQleTh3pVw1JN8iQllSJXHeOIOmtIFTG381JlMnmMpoKqe/K7ck6OejTsSaSAhWm5BIND9jK5Ol1+wNa6NEmO3op4mYyPWS4zJtKk+frDElM36XY8rq6kyfVRj4PDEarqMlvdi7fFLGZTBnINbqoIxxrg6g19iSh4//oNxwc9PLdDLL235vMp/aMO0V70QZbX2HaH2+mC+Z2oTh4ceJS1RpSE/O1f/jkAhmaioGJ05GVu0uPuZktRRTgd8ex2rJOmMarm4EmvxH2UUZMTS7SQbSWkcYimxJiy7+oqQ7NsVBw0iViQGmD/ZPtnAQscjJz2T/4bsfF//v3nvHu9xgu6FDLLQWthGDmHB2f85V/+BwBefDbBsApuLkTQn5+fs4wv2ad7nj0Speebd9d8+vRzVtsSSyotzRcxZZrweCI69pv3V2i6gqqqaNJx27Yb8jznh1/+nCgWi0GYZtzObh/kGuOoxNYDfvC973N1I4Ipr1LyOMT1OiANbPOm4PX1irXcTKx+hybNcFUXUxGLSFPtePrkgCTdcHMpFg3dSDiYjFiHYrIYxpB9vCbLTFqZRTUN0Nselqk8wLpMqwt6xoW8hHa6I3QzIwgCqloET7LJcD2fk8diYbubztnulvzg2UsqWS0wApjvt0RRREcqq7Vtw/sPb/GkQIHrujx9/JS/+dXX+APxvrtNxdHoCNdquLuSWbdhj7aBSGYP02JHb9jBdrpEshLhBS6b7Zy6Lun5YhGpqpbrqznjA+l235QoqonjeWz3YqNyHAfPU6hqAQ0FqLIVy+0GryM2L9dpUZuGzdTk6JGElcYKZa7iD0xup2ITUPSUtk4ZjsTJJkkKylThcHRAIc2Oj0/7vH1/iWtr7LbiuYK+ynq35fhA9Kfntfz21+8ZH3nY0sDT8zw8z2OxWOBKaeK2bcmK6kHK9+yRz+00otc5wpSxmBchWVvRqC1Vea/C12Cp0NxL3WKiqTpdt8NiIQU0DI2eH5AWKYtQLFqB3zJsHRZLqR42KlH0AfUmpS+JxsU+Y+C5JHVEpUoolBXg2BqrueinolK4ubrEt8c4jhRuCISZZ1nFKKWYI3UJtqk/VI3Sco1uBLSUHIzPAQizW3reY2aLDwBkiYbrmrRazGYpFZSGJvFmh91TmE/FuGuqzelkwvZOPGOiTDEtBdOV6XWAtqYsM1EhauUlRVWp6phxX1wcW93mdjXlpNdBk1WqVRahNDWPTkbMlhI+aBnkpU0tD2lZNedi+oF+f4wh2d1erjGtM7yeTpJIcmyd8OLpF+xllXF6eYnTmZDvU3xHPPvp8SlJFLMOp+hSNCRLNJ4fn9DKzfLV2ytOjk5IipijE9F3tmJyvXyPrujs1mJsbNdBtQwseeAL+gMa0+buak43Ff/3q6+/o3fQ4eiwj9aK2NOVCY8fj/nFL/8cALMfcPc65LNPn2L0xBxNkwqjcvj5z37Er18Jg/dvvrng6adnVIWE8mVzQCWPKlxpbNroGS9PXrBI1ixKcUF3rRHH3Qmzd1L5LM3x/BFnJyP+w69Essrpehh2nxqFJBJxpughQRfCWMTU509fojsGX3/3e2xFbISPD85Zr7eEmy2PJaG80mERx5SVWIc79oCbi5Sf/ouv+Ie/EjBSw2uxBxb5WmZC9y3rOKR2hZANwNHkMUk+ZeAd4khBhJvVikWY4agSdl2VBMGE/qDH1ZWAdQfdCUEQYBiakHwHdFcligoMCZcuti2jicsyvcCRQi35Noe6oWrFGiJiqoBWo6pEbAx7E0zN4OZ2T0fc15nfJuh+i2c6hDt5WfUMNNVkK/tyt1nSP5oQ6DVTCX/5Vz/5CdFqR1TbpJFY8+oKRoM+eSX241KrKLMcz1bIJVTRVn0W+z1WJxRuoUBZqJxORtxdi9gMvBF5uSHPN5xOvgAgzGrm8XvKNmEnEQOH3QFlmHB+8AMAFvmUrImYHJyykntt67iUUYTpBhiBFGVIa/S2ZNQT6JawithHCb5pM5ACCIoqbCQuV3doUlSjY1vk6y0/+uKPAFgtQ7598yu6I5flSoyx65pYdkyeOOStVL0NXLJkTyyTAdtdwve+fEpdK8zmYozPnk64uLhiMO6TSHL8fLVkt2j44Y8Fmkg3V7z63Ss+efpTDoZi/s83IdP5LQeH0jy7UgjjgiQvKCXs2rEMkrymVfe0chyaQmVy2H24oNwtbnDtPoGncHMnqiFlZeJ7fXStfBA3CoIemp0TWFKdhz2rzZbxwRP2Uo1x3BlhKgbbcElfmlznec5uF3J2KhNMjsbt9YZ4X2I40g6mzBj0TomS1YNxs+1Y5ElNIysDnjPhaNIhyVOmUtnRdW3aJqEsGuJUvPNg1GWfR5S12AfWc4XJyKLrW6i1tMlofXbhhuOTCbYvFURvrvCs3kOlrjdwmM/nrNYZphRJ8QIP2294/3aKJiGcR4dj0iinKKSdiN2gGxqKpj9UwTe7PedPjlG1mpUUc4ijiFZp0KQS6HAwQdcb5nerB0rKwdGEusjYzhMcUzzXdHeJN5g8XPjCVY6hxORtg1qLC19/OMC2NN5f3uJJ5U+HgNasKKWCqI5JmqaMRz0SWV7SNIOibLEsDb0VD59mJZbvPBQf8gxef3vBy6fPubkSF+i0LvE6Jaqs1LelQxpW9PonfPNaoMQOH1s8efqUu7uUohLjVxQFtqMjNYUoU9AUneHIp5VyD67uEPgO13dLej1pNp5vyNucTFagdbOGVkczCp6cC6SFbu15++YaKhdc8V1ltMOyLDRXvMvVxZLDXo+63nBzd38e9tiGe4HC6Mt1t2ygcbCk8Fddl3RMlbQouZNCW71gRJmVVHr7kKxe3k5ZfLP9J2GBHwUtPraP7WP72D62j+1j+9g+to/tY/vY/gu0fxawwKbmAc/96vUNbevgdxwaeVO/urjAdz0uLq84eyKwr7tohq+qfP+H4tL49t0HltsU1ehweyfujF73hKhUCTOT3Vbc6Oe7PXm0pKmkL0xj0HOG7DcR/b7E8RZ7To9eMp21cC8e4SgsljNaCfrWFIVVuCPMA9Ybkdm5ur3i+PiY+Ycpli0yD1XZkMQ7FJnZbW+39P0Oqb6lkvKldV3y3ZsrmqbCdSUUy/N49TZhOJbGZllFkiSYJqiqyAhHYY7vhYSxTicQv5elOdG24fz8HIDNfoFpGSTJlnAnzTE7XY6PT7m9EThpzQLTNIniAlN+92KxpW0rHMN+IPUt1is6ne6D/0lV5Lx/+46uM6JMBd/hZDzCNlWi7Y7nT0Q1cjp/y89++qe8fy9+L887BL2A9WZDKatp201DGtfYjsZaYm/TesZgOECX2Z80aUjzCM0S/QpQqxGK12d6e4XeSqy95WIbJprE7DelSVXq5PWeblfw6qJogeWCoZtsV6ISeHDkcXh0xGolZdBdGzcY4No520hkkvvdIaNexdXllE8+F32+Xie4tsFuJ/rp+jrmyfMuSuszkTK20/mcuoH+4OChnlzWIeOR/wBPubndMj4YYWg1l28kEffsEfv9gs1uyaAvzYcznYoQT2a76lplvV7Q9jMUmWHvDjz2mxl9t0uj3UPiCuJ1iyGzk4W6IwlzRqdHKIaUh81r6qYl3Rf0BuLZ26ImDmNUyZPar2I8y+XowKQ3kDKrS5U03WD6BZWUra5qk0rRiWRlJ/APUY2EaKNjSE+JaBWjV1OyveiDp08+Rdd1vnv1NZoizbnXe7BUvE6fwf7e9wUGXZ9sI+WFC4P+0CDZZw9VBs9xuL2JSJOQ02NRwdB0hbLQKWXFfjtfo9UBbaOSICoPUVViNw37TcZOViyVMOPxkwm//LWo2ExOR3iuC039UG3qd3wub9/j6S2mKuaxqnmstjW7VHx3XaokScKjQ/9BYCZOMqqyZjAYsthJjLdec7e+RrGl8eezM+ZXU7xBgCez8G5TopQ1kdHSKhIOtk45fzKilrKys7d3dDodTkyDjRyX558P6B8OsAKVN99Ivyiz4vImxPGlT4oWcPzzI4yOwetXAn7mNz49R+Uv/o//E1wR+/1+lyzJH/iVpyfnhLstWVHy2Q8E9HofL7Esi3iV4PcEzHkxi/DMFeNjgSDYLBoenY/YFQXn0lfr4uodbb3GNEeYrlh34yzD7kxQJTQjjgp2q6mwDzBF5vPdxRt6XY+D8YQkEVUazIhO1yEuRcZ9MV9Su3tuVm94cS6QDm8v/8DFbPWQMf2kf87RqMPddvYg07/Zx6TJnh+f/oxfficqbNs4oVi1nH8lKiZXt++xzC5KC0Ff4vjNls0mxbdt9jvRn2P3FNcZsttLIv7pE25ubvDHxw8QMk1pGAyGvL+e0e+I7TrLY1RNwfIk/NVx8AwH146pWom+cFRqvUE3ffr3nNJBh7cfbpkcSA5Wx6CsKyzdoDOSfffmjnC5JzVUtPreWsIhzzS2Ei5tjRsc32K7zBhOxOeyrGQbh4wHPrGcM6ba42J6i2GIsaqUmlaHIjN4+0FU9BqzJCtbjibnRFMBUe27I9KmJpZWF6YDRwcvmS2nRKnkZQQWaQGDE48wE8/leAFZHktRayjTkoHfIUx3bIp7v6MMRfHxR52HCqJm9PEseHchoJEXFwv8rsZicUG/KzLlZdGw2+zwfe9BiGKxXuD5HRxpKzGZjCjKlCzJMRX5zmFNuBKxV0mvu9U04fz0hOVaQIyfnE94/uwR+3DGQfeJjM8bDAcyWQV4ef4pd7cbrCJntV3IOIhIax1DbbBtsUc+evqUm5srWrmvKk3N8eEhaXjD/FbExtGJTauG7LY1/a6EmrUFetUnltYa+32K43XZRzGtKq07FJ9t3FA0Lkh4ZpFvOBg8IZfS5UUR4TgOrZKRpqLPz05egtpS1nu8juiX9brAdg1qiSgo1JC7dQZKwcGpQISEG1guMxStQNMkRLWxUbkj3EiLhV6HloT1NuGg/6UYh2OH8FVKozSkiUSzlBpGoFPKvXa5KGlVB9uDOBFxYLUqb95c0Ouc4DuiX1aLOWcn5w/+XI5roSo68+UMV0LNPM9it02xbfNBxKM/1AUH/N4vPNzjehaG71BLARTLM/lwtUCtXToHgubQObCJigxXypInSozja/8ve++xa9uSnel90/u5vNn22Hvz5s2b9GSVgGKpIAEUIHb0COpJ0FNJzyCoKUBUAQRVxSSZ5trjzzbL++mtGhFnUQ2yV4CycaK3N5aZK2JEjBFj/OP/cRSLUvY81YVCVCRMLocCGgvsPp6oWxNPogWOh4hePyBO16S5WL/bm6dstgd2p4yppFC/GF0xX92zkJqgh11Bp+syW78lkDqaJDEoBbuNOA+GPQ/TPbLafeDnvxQtOOv9I9/9+B7PNzhJX47SMJ14LGbCzzl2h0HP5XjYgSLW3fZ18qQhSTI6n7THFYW6MUhLMedabaNj4ug1j5K0pCHmcADU3Zlef+T3KBqVNBK/JeyHZKcCTWl5+eIpAA/vZ/ScMZvD5gylT5OSJzd91ltR2a0SjbIJ0LDwHEkw5RpUlk7aVGeR+br6JEL8L4/fi8tVS8t6KRbA7/gML0yOccRmKYLcr795ztuf7rE8hcNBOJ3dNqMsA6paBAf3jwtMJ8R2FE4PUpxPg+TCpq4yXMkWVix2TKZTWksY+OzjPZquc/vsCdu1CP4Px4xet2a7n4MmYXm6TTd8hi6d+mq55/IqZLfJ2EhGmG73ktUi4fHjjOtbcbFIsoJWac5CakpWYlo2puoTy8ZG07CxbRdDtygr2Xjb7uj0WzTJjniM9qiqie+7LBbiOT1niu0VnA41qTzcTLvFamG+FM7Ltl0MpYOqpegSTrg7rFmsllieFD8NNeL4SDu8Yr4W85kWOY5m0Bv0efdWXJxefPGSxWpFLAkYOoFHkufUpYrpiGAyjXJaEwL3ioUU5zNdh/v5A5HE+jtmh48fHlA0RYCvgfVyhmP5dOwhHyRm17BtTMtDl032egrjwSV5dgB5cbJNm/uHBagKA1lKt8wQt+ez3onPKasWBZfB0OXjnfh941GPskqxdJ1L2Qhb1zHlscDTRbA8DMdMx0N++vE7rm/Eet4/3GN7LqanU8vS+npVcPu8iyG1IvyDy9OnfTarw1nzwTBUwq7F8XBAV2XTcFuxWe/RDBGAObbHarkjiXI6srTdkuP5JnnpUkhHsd/vePnlBbGE+9R5gefYHJIjvZFkcVNr9lHM0O2jyYtolLT03S4Ps/diXUyV63GXVGn4/h+ETtk47NG5GYGqnlms5ssFUbpDkaeFG4R0Ow5VrrNZSbHcLCPoh2h0SCVZjGIDqkUghTCrOKVVodPxSEqpEN84bNdrDFXMQb9n8+7je8IwJMkkiUFmY/oGSRmfNd6wTGJNxbkR9vNV54ImVTDYUVXifZZlEwRd4ijFkZCqTqfDu7cf2BwlVtwNicmplJCNZB0zDIvFxxUKUJZi/3VCn/iw5UrqTjWKyqQ/paqqM6QiiWqCzohkXdKxhJP1hzWnJEWV9tp1fL74gy8FvEmK+tZthK61NJVKlYpn932VYxGTJ590ryxavUTRFaJYit4etpxOB1Srw14yar58+YL9IQJF2EqkViTZga4Z8OyZ6Bv8zXd/j+O4fPftezwJP/N7NSYKjuxFc20Hw2p5/PhIIsUkB7c2H9Ml+/KEH8uehCBgs7jDtISTj6MrRoM+rhnxb/4rAev6/od/4h9//Zqvvv4Fd4+CPe/l4ApbUXhyKxja8uKOj6sVvWDEZiP2qK7YXF9d8erNOzLJYvb02XO0tmLUF9/39scZXs+l1/NAOtmyhtV6z7MLlzqWgfAhYn+/xnbFWTKdBszv4M3ffuTf/vLPAbCxubJ9VAkzGToO33/3msl0hBcI+4zKlJ4/5vX8NY3U8dPLLn/0JwNmO2FTHXuM5jqkZc1cCnhOrhqOsYZvdRgPLqRdt6z39/zsKxGgaHlLkac4bUgi+w08RTtDkmjEsytNS1HmBN5A2o/G4VgxvQlY7cRr/dBgeYzIlCNfPBHw6PtZguX2+PJLMec/vPodL6ZfML/7gOoIv9PmXRa7A0/++IJkKZMbyZEn4ZDTSvjeNK7QagsNE0/C7YttRW/icTpkhPK5HMdif1RRJOzqGO+pq5a8qLClRqKqOjRlTdlqjKfSZ5YRmq9TWcKG87RgNpth+wq6JT5Ld7s4vYasOVBW8uJ0EqQzqtQIS48Roevw8589Z/4oYontZs3AdCh3ET+fiHmn0rnfrtEtsX5JtcfILS4mf8BEnqfL1T3rDwd8//Is4pvnJZ3Qhq7Yo6uHA64VMrns8uHxvXjf7sAvfvFLkmSD2xGfdRiVXE5GHCVLZBrn1JVOra5ISxFkO0HN4+LI4SRZ69QfOe5PaEZIVwpFR9GSk5KBrmJL1s9/+s0/0u+OqKTN9LpdemHIh1dvuL2V0avWsl6ndIPe2bZUVPJkh+eK14TBgOHUZ7s/0pP6lE19IG7W9PsDMkko0+12SY4ZhbyUaWYMVNRtjOcIOw8ci7w40dQVkhQPVTGJkxxkfKOqgJVSlCp3P4ggt9fzeP6zG+Jjwet3wj+1usKT2xd4tvA7ZVOy3+0pYp9ZLs6Wi5s/4KtfjDnuT8SJvMg4Fo2aY0jykarQqNsar2tiB8JekzjDsbuYmgvStweuQXqMifciseiYFqWS8sWXz1htBQT/cEz4xS++4NWrV6RSkNxwcpqywg878gdrnKIU0xQxHAjffnvr0igqWSnOvCZzqAvOArfXVxPW2x2apuBYwq6bIqXWazaHhI4zlZ9VUpQZroTpX0xDomTPYDBitxVxQhzHGC7kx4SVvBjs9iZ5AZa88PUHOdlBYzNX8F6K3xyGCvHRZzIVe6/IU3TDZXDRUKqySFErGLrLbrfEMjtyjWvSvMW2xO+t6pKkOpKkOZ4n1r1pMz48LLGCEW5HtkucSnbb/VlnLy8SOv2A4yGnlLpsutkSBB7bfUFXCjUHqslstUbrSlKfqmS32fLv/90fU8h7UIPCq9f3DC6uaSWt6GG/40695+Ja+MKtmtEPe8RlzvqNbGU51Fzf3tA2kBRi/19c9tm+kq1L/8L4/bhctdAfiw28O8TU84qqKs7NgH//n5doaIzcKx7loaVpOvezI4FswNRtCMI+3/72B64vxORePHvJbL2hKUw6kkHE7Fp0LrocUkmb6/RxbJvlbH5mPjPMDovtmjRNQTLOhY7LdDI4s93oBrz68SPv9Ef6Q5F91QyVpIbGts+OMM1zVKMiPoqNiKlySvc8v7jlKPu5hoNbDFPFgLP4aF15DHybg8w6xHlE4Hd58/oeVzKdZeWeJrJxfY9WisfFSUUY9M/Y4qw4sF69Q1U8urKxeLFN0VXtvPHXyQrXDllut6SpbO5WLJKioNkeuJiKbG+WljS1hiYpz7fHiGG/T1ScWEoKeb9jUFc1Za5wks2jbV6xPj4SyorX8TCnLlviQ4Fh/XPzYZ21LB929CQDVX8wYbU5ECWSkqZuKMsYRVPOTdNB2GWdpwy6F+itDDrzI7oT4HgSZ98AtYKKxl6SXCzm9wy7U5I04usvJd1umVBFCoPxpwPR4tWPr+mEQxokcYNWoBomrepgymzhL//EIY1VCsns5AYNHz7cEfg2jWT4apqc0+FIXUOeC9tzPQPTsFDka1aLGbrbpVQ4UxXPFw/so4Qw6J9FmUcDh8NxhyJRvYHvkWUKVVYShuJCdNxusLUemhJiaSJI2qUZbdDw7CtJqX7csZy9J4pMXFldqtUtc6wAACAASURBVJOIU+Ry++wL3t+9F+veFMTAqCeCA0NvMMyY7MHi5lZcLGxP4acfM1pVO4sG102G5SkYkiY4PR2pUo/+IODuo8RzVzFV1VLmks3ndGJ72nBz8wSvEHt79rAnSk/YhYsnKxhe4PHmhzVuRzx3t3XI4gOWWtPKincWNwx6PcbD0Vlk89vf/UianbgYiyqD3uS02o7f/JAwuRCHa0dtqCwfLJe+pLZXKwOtrZlOxfx+//aRTifAVGtiiWlvKpOr6QU/3P0WW2ZflSbH8xpIxBoPByOMxsBUA3ayB0PVSsqyxLBBbltMY0BHNzkcZH+O3qBgs3hYMX8U32fWFYqt0nVNbp6J6svd/AOTyyGlTLYEuo3SGNT7nJ0jGSGHt2TRFq2o8SQrZWiPqXPQEK85pjVxodMQcjGW+9FuuPuwoNO7xjIkquD9nMGgSycQyZXHxQ7bnLKYKfyv/9v/DsB4XHE1fcKPv36HIquDqqcwmLzkV/8oxNZvb58ytQM6+gBbVomyxuDjxw2OZ3NxISrOWZQJBlSZEu6EAY2is5mdCKTAc5mlXF48JU9SWinsa1sNlulRyD7F+x8y+v0Ow18qZDKD+Rf/3V/xuHrHJ+7Kw8OJYGQz/tk1v/u1CNw8J8fWXCq7YvtRBOPj7oiKgvggEin+YMBsMWM8njAaiwB+tXpHN5jQGwe8eftePHswZNjziaWIeL6z8DyLttmdM96GHWIYCi+ePWG3lmyozghN2XL3QVyonz8P2W7u6as+eSGbsp0O/Z5HU+woM+GLjtkMx++hllL8tCwYDvssZh8xPiWUpiZZHuCYNmpP7K3p9TWnasHtSyloHZ8IgiGm1vC4Ec9QJQq6bVJHa65kz3OUnIgPR5yRCACzvKasUiaTMdu58KO1VmHbCqrakCSy180tMHSdMhbna1NXZCWYToDflYQLRU1nGBAne+mhwTdcbEPnKJN/pa2wayNObzdcjMSFcntYQ1Dh6CG/lWv613/1P1AUCYujYAvsj0KCIEQzWv7hH4XoNa2FH1xwOOx4Ki+rs0XFqx+/x9aF7b98cc1um1AVJs+uBMvnD9+/IfBtDLXPTlY/nj254XRao0uimLZouLm54f1dyvogEzymSnc44XQU67naxvQ7DrPHGV9++UcA7DZ7fF9DUzQWj8IWVa0mT2KuLsWc93sBf/d3f8to3CMWW4/Zaovn+qRJTlcmhqoyZzgaoclqepSlPN4vcKwhdz/KBMH1BNfukJ9qnl6Lzy/LFrWbnPtlfvppTdPmKIpKnIrfMh167JYZrtFjsRbP6YUqaqMh84xomsn9Yo6m2mjnS3XDq1evuLkNuX0mzubZbMfHx4L+UPZObzOSY8CL5z1SKetw3CxYLpcMJmNUSTKj6zqG4RLLWOL66pb3H+9oVRPLEM8eH99gGDpVBT15WdWcDmql05EkSapVkWYJ88UdYUdcHv2nQ8oyRTdqOqbw21lW0wu6tBJNEx8TamoMzUSXz3RYbkiONcOrAa4UvvWNPkqV8vhB7PVEiZmMJsTJAUNKsaBZPB4f8QOLppHJf0/B0nrossqoqy2O47NdJgwHwt9/9+PvqJSS3vAGRZUxiN/iuzbxWjxTJ7jEMdbkVQaSzCE+leiWQtXIZEu2R1G66LUFlryca2AaKrQjPF/4lDg5cNxX1I1MhOseu92e0z47FxbGoydUtLRVwUIWNxzdQVN0AkmkksQ7HK/B98d0ZVJttd6TJgUqBqlMSMZqArpyTkgeNmv6vsvhsOP1O2GLjmvw/MmULG3JUzGfY9+l73cp9lJmpbbojgNOi4SLK2nnUYWCSbdrka/E2eV6/5/+7n9h/H5croCs/OQUVLIsw7IsCgmlU5WAY/qB7Q/JOdOZZnuiKMV3RMC3O34gTeY8vbml05H8+LOSy94UN1RJG6mVZHR59WYJ0oFfjafoWHx4eMNw+ImxEFarudStEAfp5cUt28OanYTWbbaPhP4VphHw+CAuTm73QGVq9DsWy1hewvBRKgVVFxcLJSmJaJgvN/zsawEBmM0TtscC129B0mQ/LhagDOj5n1L1DXmZM72cfEpK05ITn2qiZEWZiX9OJgOOUYxly8zE9kjo9nBtj0xmyq4vbqjzlKO8uIGCbfvsN7szBMA3HZIixw0DokhWnByHqijQNVkON31O+wjH8RnLTH1RJBhGi6bWaLqE7qFgWRquzJIvdnPyJCUpShzkoRI4ZEmC5/sMJI31/fIdZR0RugP5fX1BAtGmfOLkrbIcw1QoyhpVQmLiNCI5lMj+RNqywDEtLMfEktmfjm8y6IRoqs27N2LjXd9cYIcRP/0gKnW3Ly4IQhvdTNgfxbxEeYJua0yvQ/Y7qWHleSIIPjvLGtO02exP2JY4bE/7E47l0LY2qSR42EZrbNtEqyW9t9PD6GroocW7mazeNSZhYBIn27MumWcYLObrs9p9HEUUeYujeeSyQXm72WO0KopRY8ikQdhxyJoE5AXeqH3SNGXQ72J0hF13+gFJpvHtP/2AJrODpqfTcQaUstr0+O5Im7T8L//zX1KpfwPA3cOMZ1/YRLGLmovf/OFxS9MG7E4iE2kqAd2gpcxCXFuSuRg1aVKQFp+c+oHBeMJqE7GU9L6TaZ8ystFbh+NJ/K+KTV5cjdBdYT+PdxH4K9KThuuIi03TgKpqtG3LYS8riDq0lk5Vi4uNrWrkacLzl78gzaXwBQ23Nzfs2wxDVlZrFHTLxHBlk62nczhumfS8Mz18xwlp8hMvvvToShrrqrmmaVNiydRldDRmHxfodksjK4q+36e0amy3JY2E8//4ccnz589JZbBjqhDnLU2m0ko6c68/YLPZ82V/ykY2gVvBDaWu0r+S0JpjxXK9oB8qLI+S4ngwQlNsRuEAVyYIDseYplbPDI03TyfcPczo+DoX/U+X8RU3T7+gqg1UqXn0B9/cUFYzdMkC+PxqRB1VfPHLPo8bAbMKwz+liLcU5QlPYj/2GQyalsVO2MbF5ZjN/YLX2QOehEZv1jvGY5fHxx6xhDQ2Vc5Ff4LVF2fJcrEnOUWEroUqKeqf3d7y6u13/Ps/+ktOrZir9T8tSe05sYRL/cHNH1PaMYPba04/ykA/cnn96gO/fPmNmJPohNv3ebN4w2AknilsUg5lQb5a8uRazEvRnnh42DK9EEm2WG0wUoO0Sslk0sBSQxxX4/3DT+iGpBN2HAzNJy4EzGuxXnN9ectut8I2xDnleCp+6HM47JmvxBxfXg9RTY2uhBz6roUy9tDVLqXM3ldoXAxfUCQKmSRXMnSL4cAlkpe0/Tbnd69/JMoq0kicXf2fjWku4bQr+fJnwj89LrcYpokl4X1O5dPpBmT5iTyWLLjdkDxX+ObrX9LIhv39omTQf3omYJhOR1RFRotxZlrzOxXrTcnV4ClVJBgoURuyWOdTYlPXLLTaQFVsChmU5UaEjsrD/YwXz8VFZmgMmJ827GrZUJ+d6AxtFFvnQZL4DAcX7PI90fqR3rW4LP79j3+HretM+k8BeQGzFBarBHmU4Fg+3VEfpc3ZS4bU/apidNnlyhGJmrcff4cTBMzXCXVeybXp8ObVI91BwCKSSI7WpTewiSQcPXS6bPc7msqi1cTv268jhlcTTEcS4xQxrutS1wseH6U8DBp6qaBg4usilGutFFP752rn4XCi2+2iaganVCROJpMbRpMQpT2gtsIWN4ucfm/AQbIM53mNYYU0RUrHE2e1puTsDkvGgy7f/ySZjo0eN7dDtlLnMos0bCfAMAwGV5Ky3nSoqg2qWvLzn4k5/vGnOZ4f8lSSav3jP3yHobtUdU4otjbl0cKze1R5TSMlMC6nLlULulbLZ1L54qsOah0wlCRFabpl0Lsm2m3xJJpF0zSKoqZtJAFSlOJYNuvVnquxsIOrySXb3Yqu1zn72tXykafXL86x02adEHQuGY5tNkexDioGm7VF4F6RZOJ13e6U5eqBblf8GMtrycsU27QxJMtolNV0exec9ifUSKxXeONSFPk5Pp12LzmeEtTa5JhIQqm0QrUaQjdgJeVn0irnl08uKArJBLxbM56OUBqTnUyAvHhyySlLUE2Lg2RIXq10dDfBk5XktNTwvD6dyZ6TLCSEbh8rKLi7EzZtuT1MQ+e4K9DlHr0cTBl0e9w9ztgsJHunX1GkBZZENbQUaIrPeBzSykqZqoKi2qCp53OiOtQEvs39w3sAdLXD7C5hPNHZ78Rnb+YVdQtPngx5kGROp0KlbAzK4lO8qhL4XTTdwpD28ub1gSdPJ9i2g90Rz/Vnf/xzvvvhO2YSafXFL/6UU5ox26wYyLYHfzDECQKibI0mE7WbpUz6/yvjM6HF5/F5fB6fx+fxeXwen8fn8Xl8Hp/Hf4Hxe1G5UtSWSDZ7Bn6Pujqxiw9IyQ6yPCOLXW5urjlKMbD5csZg6LOT+iOK4pLuC3o9g50kmAg6XRSz5nG5xZU6KVFcoVjWmQ77tF5Txy4dd0AQiIxCWiW0FNhWeCYp4LLl8fEHVEuqSPcusG2bsqzojET2zrL6pPGCNI25lHoVZROCkpFIStfR7ZQ426HoDh9nUgG7tLBdneS0JznJ2n2pk5YHWtmA2Rv0sG2XKDpQpCIrbtkGmhHh2j5qV+pq5FvKNKCS1ZFOp0OZGNyt7s46V2lUs5otMLqi1K05LtEupyzAkBkMWhNVLVHalkY+Q51rVHGG6QqzcW0XRbewPRVDUo5rWoiqx1SliiN1rcpWo6EgkRn+9FRi2Q7TXodUZitOcUGW5gwvujzO3os1ns8ZTjtksjlfUxI0raUBSgm7LFOR0cuzglYV2YlO0MetXdRSvG97WmM5HoFtUmoSUnnKmRdLhv3Buan3FD0wHj/FC8X7phcd5rOGKDkSJ5IeNuiy3RWUdUIrG4B6XQ3f96hkFlA1E+bLLbZhYXmSXKHS6bguUaLzIPtjhpddHMPGlfoVDScOeUrS5OiSpts3A9I4YzKZUNay6fSQczV6QiV7ksbjMbP5kSLOUUoxn6ZpMnRCNtstBynUXLYZL794SnoSe+3hcYNtj+g4Gkn0qV+lIolPDLohusSGn5IdjtriSjhRwYb/8X/6D5TFr/jt34tKkqINccYRReESSfKYtvLwjBGpVF53bPDsgI/zt3T6oho5sHs8pGvGEjqkaAXH/QZDGzDsi0rA4uFAr1+htAaRPLK6PY9TdcfIloLM9QNFUhE4fSIJC3LdgPrQ0gk8KkkQcDHtM58VuB1JVV77OOGQH+/n1JbI5vWDW3TFJJotqSWUtW5yer0LXn8UvYxPvrrALExe/eoNnsyYDoKGY9zQ8Z9jSLhdvkvYzLf4EvZRNyaqEtHp+xiKyNoejxFaaLPfVSQHCaVVcxw35GosKvVvlt8zGV8z6ekcMmEH3U6PIknxFYtXUnSye91FNUy2sqes152geVcsHx9QJK17dmzRa4Oe0/D+/XvEZNkEvfEZbrs5FOSlzjE/UcsG5cO+QFcsLL3lyy/F+fYf/+7X9MIeeGKNN9sag4RWK5gthJ11nSVe6NBXxxwiMcdO1+XbH35NK8vwrz88QKuS5KezDtTFRcjxGJEmW1yp+1Y3KpFSsHwU67CcH7i8eIKpG+SScOHJ86/5/jcfaKuaj+/FGZupLa4zxlXEb6nSCL3qcXqvEMv5/NtffSTPQ9YLMSdFu2Zxp6L7Lf2JOJePh4aHfcR1YJ6JGnarDK9ncCw+aSIVtLpOU4PnSarrYZ/1ek5aaNxeCxpyU8vIjhVJIn7vYHRJkq8JfWgaKXSlNahGSZG3hJ2J/CyDqpwS2FIcNF7guCHbzQlHnsNYPmVxIo8SKklMYbsdyrglykW2+2Iy4JhB2B1gV8KPPtw9QtpwMWlIUmEvvcGI3WFPV2Z6f3r1hqZMCDsTyvITTG9BN3jJenfkE6PEPn1AVbu0UvOpru+4GDyhapMzdK/jjmG5QG2zMxlAXCjkTY0i5zdoXXqdkDhaUUtB+bDvUJwifv7FSyopOv3D41umF0MuJBnBel9iKirr2eycbb6YXHAz9Pj2VFDJvpq74yuGXh8vEXu0TCu6wxGRf48rxXiH3Qmb04yqaGgb+VmXffyxTi+U2e3ThKqpOWZbVEv2eCs+WaxitxWGPAPcJiBOK/xA/J0kEVrTpchTVOkvNM1gvz8ykRTnm9mCzSZGU8Kz/liWPXJaqlxfTRhfyX6u5Ld0OxNmj1Juo7EwDIOiLen25Tk1DFgs5liKi65JsgGvoWlLLq8lXNpNmT9GTC57XN2Kvf5h+SNN69LUAY4r9oxvWxS5xmwm/MCLLyesVnM0xUKXYtXHKKM7uCbLjxwjYXu6rmJaybmCkp8UrHGApTholTybOx4qGfEhZXopnmt7vMeoTBykRqD6SJqcMPWatpW9y36XokgIfJNP+FrLtpgt5tjmP5/nvucwe7xHkdBhpYbQHuK5IYuVkIy4uh2yP64IAlHJMq0jpiXQEKasQJd1zX6/wTC1s4zL2/cbXN9jLyVy4iyl0wuZreb0pQbqaHKNoln4ak0pbdjWDbK65udfi3P/44c3KI2BrltcXYkz8M3rj3TDkNX9DFXuEStwaIqGTMolxOmc5SrB1G2CQMynYSq0mskpPtGVgrklGnbYO8c36/WBNGoxnYZWxl37ake5S8iljmBoTiizGl1R6UuEhmep3H14Q5aBrkgET2lgmiaOjBeXj0eev7ilriJKiTK6u3sgSzUMo2ItY/k//8Vf0NQJgSPOuzTb06KR59YZQbBzHsjShv0m4tK9kHMcM3r+hNdvfpDr0mI7GmkZMZWtLY4ZoZsmNzc3vP7hOwDu36xxdZ9ACrBvNyfyZkYwaDElRHz9cIJ9yXDs4jji7MpkdfpfG78Xl6u6gko6pvuHt9hWSLfnM38UAYNn9+iFDnFcc3cnjP5i2kfHRJXlPsNSSZKGIs0oZDCQaCeMUZdDbeJI4bZytySpG4xWNv63Bk5HQzcUUhksH+KWMBiQZiesTxok1YEordAle2AQ2ChKS1kmOJ/EVt0u+2NMkrRsP5X8OxWq2iFtRbCzOp5QapUvvrgGRZSMP36YsV1vyNIaWuHALi/6FIUgwwAok5r5ZottdhgNpZDqcY2ueEDO8SS1L0yHOJ3jWrK2jsFye8d0NBYCdcBxdeL6yRTTFnOy2bRodonhXaCYsmk5jbE6Ng+7NRdTCXfZn3j+1VdUUnSzbjJaHFpV4RR/CubGVJVOqSaUMnBqm5YgUDll4vsm189Jy4jx0OHxQRy2re5SaxnHvKCVDGn9QYCp2PSl2OrmdCDP97i2RbqX5ejxJXXxiGHYFLlwOo4BWpuAhE9+/fUF0U4l8HwGstyv6wZF2pImES9fSkYvfcr7+ztGshRM47HYfgdaQSghI+8ff4PvDtDyiumNcDr7ZENZqnR7YjvNZ3N6bpdO6HLYCliSUuns4hJFT7meiENyu1vQvfbOIq27I6hNhaMVFJks01cHHM9gd4hICjEvtt5BdWsMOZ++EVBEe/7wT7/i2x9E429R5iihS3S8pzMWh8Zp3xI4PdaPwjEals70aoxvWdxJwe6UiqIqcF2L4044vq++9qgTnwfJ4vjX//2YD2/+D759D54m7PXlN0P2kc3f/197rl6I/ff0+RM2h3sqKQ44uPqGh7uPmA6YksxhfTqCatPtyQAsayizAMySw0nYWZImeBOX6n3KH8ggonZtPsxDStl3Enoq2xiuri+ZVxI206j87OsBb1+tCXzxvjg6oegamgxo1X6HMj8Q+Ba1vHirmUZBja6HZBJiEPoBdZUylEyL6buc+WGFZbsgNWYeH1MGI53H+St6A7FnhuMRi1VxtmGt0Ulbl+M6Adl47wQ6RXPiejrlJ6lrN+35FNmO8TPxvnFzQXoq6V4aFCcJt1Ui3JHHKt7gDkK57inNtkFyx6BFJ+bLGYqX4RriNVkS4QYuqWnjXYp5CZ0hp+OWwa34fVmckKUxKh1CSQbiew2apjIaX7CRgXdNys2XL5l/kMyj9ZZSc1hva2zprOa7lIla0iQaqj2Qtr9lvY6ZXMl+h8UWIwi4fzvD/0LsvzzWKKOEn39xxVb2rGZVTHK0qKTg49PrK3zb5eHuDU9uJFHD99/yl//11/z00+tzA3u/35DmGbmEmmdqy9XU5X7+iC0JGLRThB/UrJbytyk2X/7smqw68eFB+KKwDOkHIa1Wochm/LRIKbYarWRaxSookgOBPYZWOOfyqHJaVQRXY/JKnHlfXX7Drx9/e4YApeWKrGq5veixl/Mb2LcYpUpRPWCHkjhlu0LVPI61TNhNQvbHmG22ww/F98X1Fku1SfOG0JON2ts5o4HB5igC6v7NBdFhR05x1nhsywov8FmcHslk/63njAmCAcn+E/lBTemY3J/e8eJa9Fc1mUfWRJyOBf/uzwRMzy8cmmNDLPsG1+WWn373Fu9CJZDspBaXdEYJb+fvzr1T4UBlOSuwzmvXZb8+0WjQep9UoFXUUiPZqngj2dCeaQy6YxYyeeVqHpbaYmoOn5xBUabM9Iqs1jGkrx17VxR5TCwvGqmjYSc7tNagllDoSX/E/P6BU5ExnIo5VhWdrIj4zTvx+2zNxjNzCl3Fk+K/SpxyIoOyRy7nM+ioqI1KvysTBkVEgYE98DlKPR4jCDimaz7Mxffn24Q//uYP2d9/y2YjYOvTmytCP0XpFywUQYhA1eeH335gI5PQ3as+Zs9hGAzJUnHGb9cRlhYQuAa5ZF98fvszHMvm+9ciMO34Xf76r/4btrs9+72EYs1quuEIwzA4JtLXajXLaEEioW6HSKXrj1mut9S6eF/gNKTZBk1RWT/k0oYMNrOEdiTeN7q0KO2WJlVA9t6NrkzSMqVtC2bLV9IWLlH0kp20qSc3tywfYjynYrcTsGfTHKCpUOQqyEuL1pYYbYtuiu/f748UecXV5JqTbOGYjq9QG4v5/COeZE19+2qLGwacavEaWxnQFAq2lXOU/dsYLX5gYigq6V5eVh2LtjygSEh+3TbM11sux1fUuViHrFUwtITkkKEjnvO4rbC0BjTxmihpcAObTqiylyyjg1GfLNthmNo5iedaJu9mH/Ek+UjvYkxZZCzmS64uRBJvt6lQ3BQ9tFFP4mJQZi3jS53FvdxDlsG047GOI3JJRGNoNkqhMpDJgUap0A2dwOzQSqKhVRrjWi5JktCV+xEsVocDSiGS7N98+XMMN2GzVLiaimTqm7c/4YU6pzRiJJ+9UFe8+T45J0T9nkJruLhTl63sk9rHJZamczG5JpWQzflqQ1rnKKo47+xBxXobM2kglW0zpjvk4rbl/esVjieeYX1cEXQDel3x/Q+PH/F6Bk5jER1lccWwubkIqKoYxxD7X5Eabf/a+L24XEELisRgYmDoDaZp0+uIg7uuUk6nmO7QPGNmFU2nbjVyWVVptArH66JpCpNr4YQcz2OzPFIne6JSZpJboVj9iba71FsqrWS729HtiECmrFrKJubm9pL7D2ITf3xY4jg+iWwYXi4OvHzyjNMhwZNBxP60xzYrbr8ZUcdic757s0C3Ul58IShd7x8+cnVpst0/ML8XBj4ad9AUH3NS40na3PWiIk4jauVT43YfL/RYr1fMluLQMgwdOywpi4ZG9mYcDg26ZmBKVqXjIebpk6847Y8Umdiwk4tL3nz8ji++FNmg+/sZ40uVyhTBLYgme1MzMVqNXIpJhoHPbrvEko3jWZ6yWj4yvb5GayVTn2VRNQWa0iXJRCaiqiMW8xZPGr3jKTzM5uyWNa4Uctys7ri8uURXIIvEfE5HFxwPG3ZbiVW3XE6nkn7Qo5KZwdliQa1qeGGNoojnPGxdLFNnOJRkEq1Fp+eRFitS2edS5hVh6BEOfApJgLDezalQUHWxEX98+5q6bqjrGl0X83s9vabXG1FXCejiuW57Vzi2x//9H0X/0ZdffI3jWayXGRt5AGsm1G2JrcDNWM6DmXI1HrLaiEDq8e6eJ1/0WC+9Mz2s3ZvihCp612P/VmThp5cQRQmZDBQfVt/SHVokRUwwEmszGP2c+9++wnEc8kKyn11OeXO3wJXZJlUzCDwF2ojLJ+LSWSlCfV4pYq6GYh5GoclR2fFn34jvW72zGYY+z7+y2bwT8/nT6wOHqGZ8O8BxJXNcNsMLG1RVUrpWFtPxCMdRePWDCJZ70w79qYKMtXh8WFC2NcdjTihV3EdTg/WuQAltjpItyFYyTrsHjLH8bN0mUF0U1cWUJe8sy0jyBCfQqaT0gmGq1Gl+poffP74l7Di4psZRqtbbHRe1aWmOa1LZr9LWRxS1PItXF3mLZVn4XoApG7BLtaUuHaajrwj6wqY+fHjD9dULpLYjVVVguxZxnlNKwpckU9Ask8fZguunwhHaqkoVp2yXItgKHJN+oLMrS0wp0mrnDW2tUpQbuoGYQN0N+e7VHVeX4uxMkz29gQuGRZzKytzohjxrUDA5HkWVv8wTrq4m7GRDPYXLzZNLwjA8s4PWuUKaZST3HwTTJxAMPMpGO7O4KaVCXWVYhsd4KsgcTtslUezz+HhHpYv5M1qPjq+jykuuqTlsZ0v+8A+/ZPZB2EagD/ny5XP+4dc/YEnikvnjPY7XoxOKeTIMi0O0Ight+iNh12/fbdivuxSJybEVAcmL51+z+fCe58+fArBZbfnd774n7PYp0k9C0T3QLU6l3LO2wv6YcIi2DIfCNxixhTH0SPb3pKUkCJmEZMfqTIe/T7eEnk28zEg1YQeBuuWXP/+SV7tHahlsLB8WbNMtVSEDm45BntSkWUmSi7XK8j4YU8AGQzKfNVMCp6KRqIp4kxGnCZapC5pjoIpalNsjnq6B7L/R8grf7FHp4rzRixatrThtU/pBX9pBTKqe0AwLC0l2QEOtVOiyP7bb81jsDmiqQpLIit62xOl5mL0dS2lnXz/7mt/+9lsU2lCFUQAAIABJREFU2TfkxyE3L5/xN9//Dd/8XFyE19sPaEaFrjW08oKn6Taa3qBpwqYqNaEJlzT1P1c1TrsVw8GAJN2zmklq++kVx2LDMRfJFdO0WW4bFuuCly9FAnS2eYdWO9iuRSEZ72JNQS9rHNkf4ykO6/UaI7TJDp/8ocI2PjEYDbElQsE2dFaJwvZe2OvzmwuqQkGvNC4HMmnQHDEVi6JKCUzxDPmh5fKyTy4vEU1dkmhb0qrilJXSXl2MWKWSl0Ij6PP4uCOcBNhSKN3QYPT0ll20482rN+IZnr3AHgQ8GctkUpFhOzrr9Zq99DPXV1cYSkmWRBwlqY5haxzjJYej+LsuLV7f/Ybvvv2JL74SkgqqWZJkLQVrTqWwPY8rVqvkLJFTJhvqVsG1dAxDIl5akziOodV48uyTFMMDfesJw7Ekyyr3rE4R4UQjlygK0/Y5pQlVbZ2lO9qsQbcsLHn5qJKIptyQrEc8kVW3x/lHFEwcP6CUCXPXs0iyDq1kks2TmH7vCstUaD6J3mYJXd+nKmrqStieY+nURYxhiL+jeMN0eMlht6YjWerKtqGmRW11Vkt5floOoeeTy6LBuGuxXO857hL64SeiiCNdrc/N1QUfP3wLwGb3kabQWK/FWTYaXqMYLVV7OiePPT/DJMeuLymdTyRec1AHFPIS0Q0NmmZL1/f56ZVkX+z3cVsbrVbObLJ+1+T0EKEfxLzs3Ii43mOrLUUi/FPdKNiWSSaFnE/pHpqC6+EEVElecaiofBPTd6mks1PrFkv3sKW/6roms9UcXVdZrkUsYzsah0ND2LMoJdvrauYw6HskB/GMnhuSZBnJasdGMoE7VoCiaWyimMd34n/DmyckpxivK2U7yogsV/j2dwlFI/zcX/ybP+T7X6/ZH9bYljjfxhcmSbo7V/jHE4uqVNBqC9+QXAmmQpxt0SydSpPESdonPoR/eXzuufo8Po/P4/P4PD6Pz+Pz+Dw+j8/j8/gvMJRWimn+/zmCrt1+9WcCRmOaOmkUo2sOhipuloraUtZ72rallNll02kIwz5ZJjJnTZtyOlZ0OgahxBZnx4okrYibDTQiy+B3fLK0hUJSs/tDOl2V0DeJE5GhXS23WKaDoau8/0lkSG6e9XiYPeJ7IusYeD1MXWO3vefqmcB8LrZLtNrk+mLAQbITnU4xg9EERWbhLNMmz2JmiyWS4Ic81fEDE8vWqeRluG4TDC3Ak31gddXQtAZRvD8/w48/vcEPVRRszE8lXNXBMnVmkmklDLvoagcVledPBYztfv6BptbIZOa1rQO6I52yLPlkD4qqs9tuqQqDvoQclXVKvz9iORdZgCSJGI56aJqC64jX3H34QKfXR1F16lbijaMTw841yOpa2hRsDwmj/gWxhEF5gY+umyznMyqZzW6UEsdVKRsJ/dQdNAMqtWArWWMG7hjTzSgKE0fqK8wX77i5vj6veVbtCDyfslApZQUj6GjUlUq8V7iQ+ip5diKpM0JZ/i4LIWpcVQ3DkYRwfvyIomj0hh67rciY3kxeUtUbQl+UlT13yg/v/xObVYsmM9du6BElJ8q8oO+LcrSuF0RRQSUFGVtV43B6JDna9EZiHUaDp+Tlnu0hY3IpmQePO46HlFJSuiulxfXFmM1mR38iS9YOZFsFy+nwsBRwCU11sK0G2/gkTL2lpaTOFOyueM7tKScwNJ70e/zR1yKr+PCwwgo0gq74vnhxoq4d9nrOu7dirvL4QKYpJOmBvsSna4pHmVeUlWS8s1Se3N6wWW/JskquMUTxnlJmjZ4+f8Z8ueZ4jLEl9fx4MMSxXL5fvKaQQilfjsZMe1PePoo1cH2PQA/Iyh11LaGtScPFxYT1JoJWfFbV1PT7fQo5d5vDnjZuGHZG5LKqopsmlAbf/u4NA1nNfvJsSF5HvH33Vq6dh+t0uLwYsdkISOxkOqLMdWwrYLb8HgDbU0milkzC2FzHoa5zWkwqqYliBQaqUWHpAb2h7NVMTzzs18yPws5fDKfo2pFVk3E9ElWb/bsZ+wQuRg0dT1TGX3+Yo3oKQSjWZTffMOhP2SUz2lYcLt1wSnTMaesURepv9fohdd2SSwbR8WDK6RRj6w6KzHImRUxexHR7IxJZ0dN1laxMcWU2b3uMuLq9RkPh+9c/AdAPA2wC0vaRZCeeq2p31IWKrblyngwMUyM7tWw34nz5t3/xZ9SlzmK1Q5Esn5rRsNudqLRPQo4lge1BbaHJatpht4KypeN7tF3x+dvtlrpquZE04fcf7nFth+nlBTsJ/UiiNaOrL/AkfPrj/D2t0mA6onoN0GFCpifkp/i8bw1To0wSVF38tlat6Ngdom3GphLz9M2XY5pc4fG0xJNw8J7pcuB01rQq24QsL/FtA1fCl6vCwDdU7I7JQopjjlwVJSnYyfflioOi6NiWzsNM7PWr6yFxlTIMBuiyP3W5WqObLqqVyvVMsfSASf/i3LvckOKHPXaHHaXs3xwMb7AUlwspl/Kff/2fKIyKIAjIpX0OBl32SUrXUZh2JXQ2hkhRMFTxmmLTYAxC9qclSiay8GHXYLnakRcRnoT8GVZI4HeYSXpxd9RBV2NUQtYbUSUKPRNPGXB/N8PpyUqOknCIIxSZTff9MbvtGj9wmMo9g5pT5irH5MhQSpHMVxHpMeLiuag2eZZDUiQci4qeLnxtx9eYrbckacwkEFUhVSkoWgNdk70ork0cVfT7XSnjAtv1jusnXX71D/+A6z0FoOt16PY87u8FlM8PE6KqJKtskNDv8eUlRWaQJGIO4rTBN3yGU5XtUpx5l+PnrOKEMqrYbcXZNX4WkuTtmTGNysD0HCwM8kI8k22LfpkqM0BSck8nHd7+tESi+3B8h6Te4Opjbi/Envn22/8H2+lSGzWBXKvZfYSiKDSNOFtCq8PNuMvhMEOXlYEgHLHe7qnqlFDC7Vy7g28HZya5/WHFenvE77Rnodhub8zV1RWr9T3LlZirQfeCqrVpVVGhVRuD4z6i2+ljSwHdtDhguz2oT9B+0jw6UNfB2RcdT1uG/SsCz8EURxd1pZMkCWEYkkvIvWLktGRnxFSLS12m7NcVT54KP+76Dnfz1wR+n1BWgH9885pu6J4px1VPxXICVg8HJrJvaLVd0XH6XD91uf8oYZ2NRSfUmUuYfh57/If/9hnf/vA9ryU0unMxIL1PeToYsJBMeWXbotUt/Svhr3bzDUpZo6k2hi/htRcW23lFW5gUsrrcGA43/YAqEgv//eINmdry53/y7MyCnaU6hllyOgqbUk2HrtunzhpcqZU6Wx5wXBdNbzAcyab3+IjX75+h2WaWopgWnW7Aeid8g9L6dLoDltsZa7Hs9PsOoeNjSn4D19jQnI6Mezm2pId3fQ/bL9msDe4XUrJB1/nwbs7T58JeD7Jn7Mn1LX4g7hdJcs+P38+p2uosbTOeesznBwxJL51mWypUVM0illVU1zNQlBKl1kGxpG2U/Pb//PCrtm3/jH9h/F7AApumYS11L168fMo+PbKNVrx4KSZyu91h2S1xHGPKwPDq6pYoTcilbonjdHCchP1xxT6SAox6iONYmEbvTKU9W8wxdYOOFMuL8hXFWuenH/eMhqIE6LkBSuMQnzbcXEs1e7dLWy3xbPFMupKRnWImoymJ5Mcv44YwsEh2DfFJ/E9VWrbLE09figB+sXoPlUavO6ZBOJ2b2z7zhy1FrpwDhNurZ7x/d8dR9o9ZtoJhdIiOJaYUQH727AWz2QOGoZwvFq3UBru5FpCcx8dHHEfhYjpldxKbpdvvsNtGjDrC4eR5yX6zZDi+JC2EhbeVj2u5NLrCo2xWHU0HbHc7KhloeIGPZTqoWsVaCv/6YYhlORyPO0ypYfX85itmjxsUSb/ZHXTo9CfURUu/LxsG5zuaCnzHOEM9FcNlMOpzdycw5lmeoRg1uqcQygufa9WcogOWPiHLpYMZd4Ga/eET5MEiLSKi/5e9N1mSLMnO9L47z9dmc/MxPCIyMrMqK6sKQAPoFrDZLSQ3xIIbvhcfgSu+ANct7EVLk91oNIBCVuUUc/hobrNdu/PIhap7bYhdL0CRUJFcZIiZ+b2qR48ePec//3/IUSTpRTLXaGodVW3ZHKTy+OiUfJ9jS9jj4bDD8QKqqn4SZT7EB3zfJyvhN9/+SwDSaE/b9khiMff/z//97/j6V6/4tPv0NAdJdCAIXXTT4W4lApn+kUGlKbSyd8L0WlwlwPbhhbywX99+j2n79IdDVhuxR9KsI49ynkstE1szuLtfEY5tLENqly1u0f0+eVNgqMIZDAKTMOxzkL1MlWlRUTH1L9nFj5hyCAOdTmn5j//pv4p1n57w+npFKw/ihw8qv/mLkGrY8Oq3Yq6We4OHn3Ls8BRf0pUfooi6VnnxXPRgbPdzOsXk6maJLR2+Y4eYekApdZmyfUVdJDiWSlOJ+dSNHqv9nH7PZruVjXSmz3y1IpKCntPhgLYp2G8UbKmzEfYtmq7F8VoS2cNyfPqcu7v7p8PTdDraHGytI06l9IPVkacpX748pReKg3Cx+kTbmgyk4KSi2RxNLijyJZ4jna2icEjm5EWM4zz6riWHOEVTxW8buo7r9dhHByZHwob32YJh/5QiNvl0JYLjNLklvBii7cVzjnzRMzFxhyxvxWUg3nUEU48sr5kMZLO81VGWNcsb8b5V2fEhuUNxSjypwfL69XvGwwnnpyekiaQ4bxSSOGYvkx10DzRNQ0rKsQxMN9slXmBQFgl1IXsEoohWi/BDAQtq6oSuhg6LgdS+sXWfdLvAdUckrXj20DnFDApMW+z/LE/Y7HeUG5tvv70EIOgpvP7hAcNpgMcgSSM/JFihtOn+EePhgM0mYi8hjoPxgDY3oYmwZAO7qoJm6Wwknfmg12c07lFVKZ28AAXeDK1Vub8VgU7XdXh9j/nDLWOZsGuVlPiwI1pETwQTSVERei69odh77z/cMnYDZhch2k7qq3g+d+sVddRSmLInWIcirugPRGAaZSbT8Tnr1d3ThdbzFAJH54fXn2hNKaT8q69YRBErCdf2JuCYDUqZUue6tLs7Rscv2KUH2kj8/tFkzOT4iLu5CM4P0S0P+x0jv//U6F/XpiAcMl0qCf06RLdYwTPW0sfH0YHzL064vnngZCx80OGwx7RDBnrA3Z2woW9efMt89QFFlyLtZkyyzxmOAuxG7KNOTWhKBdfqU8ng37VtDE2jL+FT63iDVnaYeoehCTtQOpXp0IVqSutL+YK3G8pGZzQRgVQU53z5zRcobcaDTMKMxj3KqKIuanJLBNq/eHbGf/7xB76WfuphuWax3tDzAoaSBOqwX9FVFXVZEcrgODpsKfY5PQnBq1oouzWdklJIiJPX84jKDLc3e9JKcoyKm9uPPJdB4HrzEaVw0NQOVbZHbHb3qGqHJwlKsqoha5bU2pB+KOzu+uMStd/huj6+DOoX0Zy8Uhn0xXrullscwyOJE3oT4U+zLCEcnZArB6pS7ON3P2/JioTjY3l+JEsMt09atuTSzr66eMmbq4/47oxEUrb3fQ+/bz8lVylNUFNUW2c+l7DZ0Sl5emBy2uf6o7Ch6cDksNujyaSJZQbMxiMUU2V2JtZ4v07Yb2KUTkOVEFUr7FFv71Hknt1tKvpDB0NvqRrp86oOJd/QNjplvpFrtaMfmvT6jzpUOVm6Q+80NBkLTmdTlqtbsiLFkRpG0WGL59pkEq5ZVCWTqUnPOwaZEKzLivFgzGYXcXIsoIl/8ut/wU8//T22DNjjpERXKgahgWuL752fzlBJWN6njEdiTQ9JxCFW6I+EX2x7Ncv1nqPpKcZY2N3NYk4vHKP1OwJTPHuZWrhKTS3j4SAI8G0DlIrVRpJHvE2wbQ8vzHn/RvilF89/w0OUY6hir83GIzq9oSpVOtl20PNciqzkeCrmLskKiiyjzltMeXE6OXdJE4M0PeC6Yq3Ozk5Z7SJ2W7Euf/HNC24XG5IsxlBFPGwHHYcsIokVnj0Tc1c3a5L0gCUlALKdzVlPY3tzQ9WKPTs+gfKhxDYcDF3EebtDzZevRmiyxyCvG4LBiEatuL6TxFtVS1W2HD8LSBOxR1//vGB8ZDwVbrpOYRdl2F6DKm/eXddRlQ111aBLvbHw/xc6V233lAW4/vhAURQcTYZPDaCuD9t1waDXQ5Mve3//IARvZeDY1Ca2q6I1Bl0je21UhaLLUCsdR/ZFtG2N4+rkUotD02v225TZZIamiSA32qW0VcF4FLJ8EJcN3ax4fn5GJRlCbF3D7w/YbZMn0olfffUFP//hNf0e6Mpj0FBT1jkf3j9iTA1GQ5e7+ZJcBnM9t6PKWlyvQ+nEM7x/c0N/EPD6nTjo0zTm5ESjKpUnfPXFxSm6qpHnO85PhLFeX1/jeQ59KWzcH/SwDJ27u+unzI7nhYSBjSYbsufrB0aTkDovOWwfs1sdSq7j+ybhiQj0bTcgK1JeXYhD4dOnK/aLLZqtoGvCKQdBQFpEdDRPQX3T1vT6Dk4gHMj+sCMIbKpqy2otHLCleTRlg2MoGD1JLKA73M5vObmQTHJtw8ebD/juEFUGZckuoaoCVK1kOhWb+up9hmGWT71TaQZdo2GaOnkmf1uzKKodumqwl02oSfKGfm/KUuod6JpJXXbUdcOwL1XHcUmShDK22UXCQT3cL6hyD8cXjvT85YD0kPPy/ISyE5ds2o7tfocduIQD4TTyfEmR81TRq3cBVR1zeTnl+r2Yl/kq5cXzC27u1tTyIPQNh/NLl9VjoDgOUHoW9njI9b2wF6VzqLZrev7oSbSwayuy9IBhSmbJCnGpcTP8UNidFovDovIU1oWs/G1ydlHL3Y14zj//9oJmnNNubBaJeKbZdIRzUpIVFkvJjDmdzNjvclKpq1U3CvfLW3RXp5NZI2/gkMcFz/vCptI4pk5yDNNFl1WNojxwl+ywmxxNdeW8HDjs1hxL4oiu6yjrnBevRlSNJMtYbjD1AXmeMr8TgeJ2+w7DENVIANe3CPo9ZtMJi7fiM9Fuw9n0C8o0pZH6e6bho6rqk0bRYrHAsU64vYp4fikEbvf7HdFhSxggyGmAPNUwNf+pQbqtdBRFYXw0ZfMgkh3rLYx7PXa7ew6yidcye3jqkJfH4uDo6gfKeICah9x/+DvxGcujy1rsfoDxqKNXg22qOENxELaNwmZfktUNO0k6MxwO0dF5+9MVfiC/p1YoekcoA0DN8Oj3DaL9jofFnbQpFbW1ubu94eL8Uv6+St7qHD1W0+ItD/Nrnp38gqkMOlfLOeHQp8tMzFA2Fneit88JxN+7+35OOJ3xV//jn/H77/4TAFcfInp9H13XieKltAWT0cymKsX3tsuMtu7Y7hecPxN/7+rjA4PgHF2zySK5R/whu90OVZLlhKFHU9UcDgmzExGYrvcPFFn21B9nGhZZuuf0eMbQET7o7vYBrW158ewcQ16q319d0TQiqQTw8uVzbNUgawpsR2aEf/8exzfRLZ3BSPan6mC3FrUixcf9jLraoakNttQI7Ps+SZLw21//kvlS+JtdVGAGAdpeaj5VO/RapSv7HF+Iv1ekDofdBs2sGPVFn0u5y3mzek8pzxhfHRKcwtXNLVPJ3pnnOaZtEth9ShlQKk3OfpMS+mKeQs9F7wxCK+QbqYX145sfOT6fcPX2A8djkdibnIwo6z15JS7w+JDfNaSVQufLBEGy5hdffcHV1Q2aLkkuTI372ztmx+KZnNYXqIq2ZTOXCbO2h6apZMWWdC/nz+ihKypFLlkOQ4/F3T3DYIxniMBUbyqasiTw+sQHyXh34fLs2TmtJBZZ3G0Iex55VnF7JyrVjuPj98R/y5XYD4HbRwtFFQRgMrrg5va9YGzshC2k6Q4DnVZVaPVcrhd4vv5UGfTNM05eDtjGH7n/KBNKgYumL1jdysx5/xwviMmKluU7Wdnxeoy9AZmypZNC8dpBo01LVF/M3SAAT1Pweybeo0J5V7Pb7TBUD+vxjNQjToZnPPKFXH88EBz38UODJBdrVRU1o+kJm3hBIpl/nUAl4JLdStjiswuT3T6mbixmMxH4r7efmF3Y7PcHTs+EvZiqiuMMuboVyaSzkUOjxlSxydFIrBVFRVnvUa0OVV4yVa3i7OQrZJsk06OEKFmTpflTLFjVOk1nQF2hy56ZX//6gvndFssW+2MwDAg9H7X1aSTp29sPP9IbDGhrlQaZoA8DAqdPJRO34aBHGu85PwspJPKgaQqsbohppny4EsQbuuoxHgyoJOlB2XboKFi2RSL72qzAw1Bc0nSDoj4mRWtmJx6rlUgoV5noK03LglgSi7hajmW5HO5zNJkwc5SWlpL9SnxGMyMc95LDviONJQpmZpPkW/I8YDgSSIfpsc3VbkVryl4tU+fhU0qVFtiSsThNDuiErB7u5RwMCfsmeXyglHNQNA2N22L4DgeJJjsd23iVzbAn/HLPDnho1+yihKoQZ7SHimYaKIr6dNnpj3pstjd0G5mkXSeYvoV39AqjEM9ZU2B4Hp3Rp5Ji1b2BTdDvkUgxad9uOCxWXEcFf/5n3wIQN1t2ac2nD3NohR9WOofpZMzvf/97QCARTKXGMxzuZYIgCEY0XYXl6WhSg3AtWZj/qfHP4nJlmjrx4VGQq+b09JSmSikrSeXrjjj96htu79+SSIeYpyr7eIchM25VkTA+0VEUk14oDO5+vcTrNKpc5/RUXBDqrGYUjpmvhAGs55+YDGcMfZfvfxCOdDo5pVVLuqZ9YnuJo5RB4KGbsvztm8RxjG3pmLJa8On1Pa4VMhoOuLoWDjiK9vRHnlA0BbbrhuVih+OadJUwnjc/blC1iOhwh6aIoMF2TWgcnp8/sqrcYmgdrdlw7Au4VhIfaJuK4/EJlWRMKbOU8XjM/VI4LUs36DqVyWTGXlauWmoC/4x/+J1Qn59MJhR5R1HsaGrxTGVZU2UNitkSSFHmvKxwXZerG7Hx9/s9/d6QVqkJesJQV+s1dVdyPJuQJ+Kdl6sY3Wh4iMUm6CqNuqlolBTjUfww10VlyWoxZDaraUoO+zWOIw8cy+D0/IhtkpJLsdWBHbCJV/TCEbuDCB4t08dx1SdI3u39HaOzGZ/e72gkNGowbRiNQ+Kd9kStW9cxu92Onsz6GbrLITqg6hppLOzA9UIU1WK3L7i/EwGfZXiYvs56Jy42pmFwcTxl0h/zQVbdfnz7Pf7gGMt1aCTdvlFrKK2KLpUq+06f+/ktXRTwixdijU+PPdabiPOpQyrn0y4V9E5hci6+9+F6g+9ZbB+WDGTVLd6X2IZFmxVoUnC56iyaNuGwEQdjmWf0/R7bTYQu6ZLbpkJzW8pGR5VkI4WyZ3+oGfXF9776yzX/13/IeHH5JV++Eoflz28+8Pbv7vnFn/0W1D8AkOVDZrNz3khq1MHE5+oqZjx5TlmL9Vsv72iygotXUhz80JFuNVq1RtHF3zMMAwUd13DxfWGLVaKg+yGNhAkWWU55yFg2W5r2EbJmYtkazb5gPBbBhusM2O8iUMUaKI1G2J/w05v3T5dO23HYrVZk+R+DY93w0fWMXihgF71ej7Le07U6thRE/jh/SxAENE1DIUWRR6MJ203MXgpOT4YjQOEPv/8JR0LpZscnOLZNvFsTHIkAtheO8XSTu3vxLm2Yg++w37yjPxX+pm0twlFHVzT8/d/KKuNRyOjUp5RQ2tVdjNmJ/mX3SMzB6dE50TZFVxoCWQE6ms2I05L9VhxMhqaSxgU9r89hL4JVRVW5ub4WEDnpF6sip2o65vcfAXBMiyJLaeo1eSEvpnpOodn0+gZVI3yzkuRUK43Vg/jti6MLvPGQbXpDIQkefC9kPLHZHxJaSbNs2hD0TG6vRUDbtBrLRc50dsJKZtNPzi7Z7Tc4gYqZCRtO0hTfsxhI2vymKlGMBtNSmc+FXzJ90JqMSPqRsDdmt94xcE/4/h8Fbe/lq1MU9YjtfEO+Ej73/OQSFbi9FXt9MD7j4/U1ThBwdScC4fPpF8T5B6yeTy3p4He3C2aTU1QZFG7TJZaSYDsmSicqAfeftkzOXDoleRKdzrOWeP+ec7meq5XGen7PV1+NebOQldzjEf2ezd39NddrkYybBB7j2RkrKeo9G05YJyvaqn2CZ+13axzbQ9c0Jn2RuLj6uMZ3MopSsn4OBxiKysl4ysd34pyzVJ/5xxWYGl8ciT3y45v/wM+vl4wlhfvZ8wnJeo/qBuSpmIPAnXD96Yqqrp7E6mejc/o+T35KVWA0HPHp4y39QJwzpmbw4ec7trsI05OJr7ZiNjHZ7MWcN7FJHteoZocuSYt8c4wyWBBXKsdDERP81z/8RL/f57ufhJ+qy5zT0Qn/+Xff03cU+c4zDF9nvVjzxZlIJFy9nWMGGrNH2ZUq5/x8TFdXBH2RhBm0fe4WCaBSSTbCgf2MKHpAFre4OD4mSdekaU5vJHxJ5ztURUgtoftKm2FbAYd0z9mlmM+e5XLYQ66XYIjnXN3lXJyFqDLeqGvo2NMf+Lx+I9ZqOJnhWDWmppPsxVnbNTqd/sC7N8JvnA8vmA09Sg3+y/dSMLgXMLMu6GoNT5LH1HXN3fye2UBWcQ8H+v6EMtnRk+iERnV59+EezQwopDDteBCSpCWKJAXYpRviKqJY6mQSOqzoLcG0z9XDHY6sLtm6y+aw5fxMkGwMjlri5JJGXfLTj8KuVbXjsCswqPF14TfSPAFV4SBtbLV+QNeeo3YHTOnjh75H1wkR5KUkXBgOh8wfElQJK9/tN0wnJxRNRJzKy7E3YLl8IK23hIFIcptWx2Gf0cpEhmkomLpOldW4koq9rGJa1cUPbNJMBOp1K2D0VVNKuzsjL2Js18LWJQ35aktRtnieQ50IH6toKodqDZ1MXhkB601EFEUojbCptpxhqDVpvsOSUkRlp9CmNY0mfOft7oBljyiTGEsm7FQU5g9YZonIAAAgAElEQVR3nD8Ta7DZJKzKDa9eHPHxo9hrVV5hdDZFajIcyUv1ZkGLiy9ZTt9fXZPkGZpukiK+97C0aJqK05MeiZQhWcwzHC8kCMX/b9Y1P71tGfop5UHMy3re8urrCZMjm9//XjK5ug6WG1C34vLaVjpB32A2u3xCNc1XFXWt4wcjBjLJ7YUVb35+w/hIJk7vU1qgoWE0Ee/SVBq+O6JqDnRyb0kE7T85PhNafB6fx+fxeXwen8fn8Xl8Hp/H5/F5/DcY/ywqV7pmoLUS5nVYcnx8TFl1DEeyOW+Rs3z4mbBnQSYeWW1N/vJP/jV1KzID+/hA0SW0rSky08DR0YTNao2lqUSy8dZzfLRCJ5Bimf7JlNnwOdv9PaO+yGTpio3uaLiuRxmKTIemGdTtgclIlDjruma7zbFtm+ggbuGW5bBYLBiNx9zKjGVd16i6Rikp3OuuxrIc6kNFLoWTQ8/FdX1Go3MqiXOdTEe8ff+aWmYwXr44JQhdoviA1MZFxePlqxNcN+TtGwEVchwPVdGxJWxO61rCoEdZltiOeOfTsylvfrxhdiyqI52SUqQdHQ2dFHfLEh0MFcWyaZRHjQCH5fbhqbnb9nokZUq0jcCQDaCWgqm6LLcRh41451CWVBVdZHp6Xo/1akF/MsCW2js3t/fMjk9ZRRvaRsInkojJeIQvtRzyqmWzjVANk9CVzavZkn5osdnOcT3xvaZxKNKKWEIXfGeKaXUEfQvDFvmEwdBmt4v54tUFqkxBLOYdcZYSSzr6om6wLRXbUbm6Etnt4diGTuPsRR9yid+OMhT1gRfnYj59+4Jo/8BquaeuxXO+ePkV+7hmu1sxlpXAm+sI23A5Honvffh+zt3tmjJynsSrV9GcQe8U04SjkXhQtfFZRgccV2QGLd2kyWOUtsW3n0m7yzF8FaXr0GRTdt1mlI3JeCKeKVprZHFGOJzg+sLuNtsI03ZIkpxYaqV0bUMYmvzJNxK6sNnjWibPj56xTUUF4cN3d5hjj6vr99SleC5Na7i+e4Ml5X8UxWAymjIMLD5di2y60dp8cfkb7qSO0HqzJRgMycuUYCCzo1XJcTgm8KCThAtN0ZHsIk7PhI/oOoU0yen1dHYr8ZmLizE0OtE+e+q9wa4IQwNNFdnnQ7InK2L8UcAukdXzQsc2HHIlQ5UCiIHrkMQNaSazVopOVnZ88ctLNomoWNq2hWPbbDd7KqmPkyY7Ar9HKXu86rJhvdtzenbMyVS8n2FYLO8WmFpIGAibWqw+YeU+DZK2V685mtm4jk8lfyuKWuI4YXWjcH4i9lHdLFk9VHRSX8m3Azy9QbddLE9qhFUpptFwdjrm+ERknD99ek+eNWSxeL/RcEQS66SHkkpWTGazEU7gACq2fM6xAjcP10+9jK7ucNSf4ZktH34WGW/XDXEHLT98/5rTl5Iieq9x+uyEQhILOYqK7jRkZcJXvxAwyyRZkWYH+qOQaiU1CLM1y7c7Ro/QoQZc12O/36HL6kvVlmz3dxyf/ApPajwtf/49o+noqULTaTqWrbNaP2BIUday1lldHzBMUbEZ9mY0tcNyueRE9qKYjs719T2UBqOxOC+yuOT05IirK4F8aNsVluFQHkpOj0VF1nBVqlwnPyS0kh56u9lxcnb6RFqQpTXT2TMMw+bhTviyfuBiWh3/+Lu3XJyJnqD76I7tXUYkoUNm3+L5lzPq8kBVPkKVHjjcTlAjhzPZ2+c4W1aLDxydicx5Gkd0hcaf/elvefuz6Cn1nCnnZ8esNzu0R+kMK2M8nHCzEBqTnt8nzw+oqsr9rahE9Ic20+mUIip4/V727do6zy9PeIjEZzY7C88ekDsFY1vYwc3tFXUn9m9PEgIlUU0v6PH6tfh755fPuPv4QB4pjGTV1vcsXK3A91sKCfVcLWCX7Z4IkMzWwTQ1kjyirqU0ynVN4Jacnz7nd9/9LQD2qEeebmllL4Xth8T7mq+/fMn2Xvj9+eKekXYEKMzvxXo9uzzj/c177EBqWgYBSmYTdwWa7Lks9hFNHaG7LbnU2sQssS2fspY+QU05ZDmd5pN1wp9anUZbuEyOJB19kUN1xElvyHwu7Gy7LxifOCRLlSIRZ/SzZ8c8vxyzXopnbEyNosxYzDMUmWu/fHnBarVgM494VHweDG1224KLL8XeG/ohVbXF9aeMp8LOrUDB0BROjy652gr4W68/ojQiAk/s49CbsjnsifMtpjGUdjDgl7/5NafnJ3z68B0Ar7//B77++pLn8rc3SQ55Rn9kYTzCQ20Vw1apspqLSzGfXVVy9/4d82sBUbu7LxgdDRhMHfoSPbPZLrH0DkXTnoiSDklBXraonXhOvz9ksdliaGAWct2tHm1d0+v/kcI9zxrCcEDXPuqkxaA27NMYGVLS658QDlS6OGDxINZPt+HZ6XMKWXm8urnDGrhYhkFfktV8mC+olQrDVmg7sQ4vXpxyOOzYbkTcOR0aqK3GZrEmsMT3bCMEu8IIPCpV2H58SOnaEDopI4FDUaWEXp+ue2yzUHE0D6XTkNQFLG+3tGVHKfsrmy7E7Y3xhxGGJASLDyvGxxqJbNPxwx514fMwL5+EjUtatttbtusIR0IMNaWiM1reXgntzYnjsthueXl5TFaINU6rGM/tY1say0ycoy+e/4K6zVHk2eBqLaNjjYe7Ja2U23n2p6c0psbv7l6jh8JPpWWB4TTkUnsrS1TibEUU21xtRNynKDr9vkPgKqhSjinaJTjWFKUWnwnCOVd3W9yhTyN7ICcnU/JDQpnqTyR6Y9mD9k+NfxaXq6KoKVLhaCajC1Stodf3ULpHPZ6UUeDjehbJQUxuWyss7vY48v38wEAtfVQ1fGKgq+sWzxigkhJJRqjz41OatEHuQzzvhLc/zRlOFC6fiUN1szuQlxX3tzvOz0Xw9tOPHziahSSpOCgWDym73YHJBAJZ9kz2NcPBlI8fbjEl1lfVG8qmRrY3YSk6rmtwdbPl2YU4YIos5+6uQMXBcsSm/ru//x1loRDKpl6Vlra2OOxWT43b0/EFWVywuH+PJRsbVa2jbcF3xQE6v/2I5zk0lOQy4Nvs9nRaiWEKyGGUxPihgao4mFKbwtBCsiZGVzSSSMz5u/kb2q7g5FhAwSzLRdFUJqOLJ0KEQtmSpxG+KfSZABQ7ZrVOUCRcq1QiirJB6WxSqfUR9h0O5R3OyKNNLLlW54z7Ibd34uDvDQZkZcVydYMuoTUdY2otoinKJxhg3sWUaYUr4SKOf2CxynH9PoVsxH/zds7xyYi7+yWtVB7XjQIzUGjlgVO2HWVe4YQ+YfgI16yJDxn3yxazFItabGK+ejVFacVvX324YrtbMugH2JIAoWsL9BpcU2EdiYOvN5yQHUp+9zeCOacoE4aTKbpdEqXCQem2zf1yha1Z9KQQ7jyLCUKDUpIdkKb0R8/Jy5KNZKXLtT3NVqWrG5ACiIPejNEkoJF9YA/ZnMAfCBICqUQe5yVeaVIcVIaSGKKsMgxnz0QyA/28anh1rNPzWn74g4CadtuO6S9CFrc32BLvv1xsCPreE7QuTyJcyyON1/Q98feG9hi1ySlbcQidvRyxPaSMvB5tJi5pN1e3TPo+ZdpQFZJ50J9w1htQl0u55hnWJCSKNvQCAdM5xBGGXnN6PENRxG/pukpRlSSySVvXHHQ7oNNVRrJnLjA8yrSh7RQqGZStDyv01sJxxd/frneY5pjb+0+UlWQiLBuy+ICqOBiSQUzXLMqiIpYEN64dcno2w+vbRAvx7KYZslvtmR55ZKUkmChdqjqjVMQa9HUXq+3z0+sPOBJnrxgGiyxlenr81Dw+64+hVSgqcRA6PZ8oyTE6k7UkUumUhMl0gO/32G8fWfcMfN9jMhF+6/5mjWEYXFycPTF1tSiin0Ft2a8E7NhBod/vP/VAtGw5GZ7y7vuPhK7wnYHdJ1sX/Pabr6kr8ey1dUN/ZPLwIPzyvqmxCxfVrrmei0vZw/WWFy/PqaqKpP4o3ln3cIIelif2Y9dmxNGey8vnrDZi7m6v3+J4KnQW3/0ggrnp0TFt17DZijk3VI39Hg5RzlASGWXNgaAf0BsK/7PZ3nJx+if8z//LX/N//O//GwCL1Ro/GHD51QV/+F4E50E4YrfZosm10jUVUyvI45JQHvy1FZM0Dj1No5GJtunFOa2qcH0tnik8m/Aw36HpJru9eJfp8BlxXHF2/hVRKkgZ5vdrTo+mnFyK+a3dhnyVUu9UTqfyXeIYp7AZ2i1qKXxJVoHvDCkl6UV0eCAIjnn/85rAEb7lF99c8LvvfsCy+igSOjs9NWgz0Z8GULUFYTCCDsYTSVpi9jA0DcUx+LQWiZLji+eEPehWYo01Q2H27IRos6CVukV+YFGUFm1X0g/FeTQcjlnvPqDyaJs1zkCnMXJ2G/FbeQKXF2f89NOBvi3OvkG/Qmk9Mtn03nNtmkrn7u6eb3/75+Lv9VXW61toUgJJTKEoDlWdEcpESt7VpHGE7ShMJyJ4dHyD9WZPU9XcS9+h2ApBf0SNsJflumCx3lAZf/TfM3dAMCyIohxLktrE8YLVasPFi2+Ena+3pHlLZdTsZR/2kWNT5AeeyTXe3a+ID0u02iOTzG5fPptxs/uJNnMYyoC9N7NIMwdTl7bRpURFTnrIsHxxXswfrvh0tWI68kklA5ymnHFx+oxPa3HWvr+75jd/8udcP9yjaeJ7egOwwfT6aE0ov2dBY5LKvJTS7nH7DtdzhX/xxV8B8PwX39KqLb4f0pNJGUevsc2KqztxGUlrHbsKwC24l9A9Iy2YliPOB89QJcnV7f0CTfMIQhGrbeMFWpizOZR0iHeeHI0JAoN3bz8wOxPr53kOW23HIRXn1X6dM530MXSVUgopV4WB49gk6Y6jmZz3TUS026NrIg4Lejp3i2uKwsBzJInI+orBCOq1h+08auYdsVxE1Ir0w+6ALG3ohx1xIln/+h5FGpMmOkhB6zQp0bWAXvDY47nA9xv2uwPNSOqi1SG9qkNdNoSm8IP3ywXBxHzqW6SyCMya7b7hWArR27bOp7crHAJaW7IYFgJC2soeZEtRiOJ7DmjUsvdctQwMXWPzIL7Tm2xwTIsshpu5eBd0D8c2cUObQuoEnk5PuFvFNLLvTG0tjkcTijyllq3opm4xGJpkccKwLzVP9z9Q5iGeJDEan05Q7Iy8svnNl18D8PPP36EGA1R/QiqZD/thj6QskeaD73bkhU56qBlK/Vhv6KN0JX/7N/f8q78QPkHvOu7ufhA6h8DzL75AU2/IywqlE3FDEu1Jki1loWC7j/qbF4CAj/5/jX8WlysAry8beIdjqjrDc6a8fy8yNIpaMvA8Xr/5yPMLkQlsB5VQwVZko3jX4dkV8/ktvZ6YyIePMWHY0e/3n5rqb25u+OXXL0kP4hDScSi7BxxvTHYQzzB/eODs5DlJsaOQrDTHJ30mwy/4/sd/AERWRVdsulZ/arI3jANYOflmy4nMCEf7nLKKsGzZJ6GovH97jRcqHCS15nB2jOM0xKxQW4mFrTUsu33KaHi+zT7ast9vceXi3ny6Zuub/Nt/81e8fyMCINfqMGwFTdLRHo2n7FYRbjj8Y19WnqGbFXcPIvA4mb5kMLC4+nRDIjG8Qa9hcbujPwlQpThmeDxm0PdpZFCv29AbjjjsYhpZQdRwmIYBVRcTR8JhHKINF9M+Mv7D7hzcgUHdNtx9Es/d73mcX7ygyFNS2Wuj2wo5FbVkUGzLlLbUKBOFRorAWl6Jp7nQlFhtKP/NoXFK2kcF7drB1nSaqmZ0LJ5p3BvSKBq6qVJLPL5Wt2RNQyszfNm+4Xho8tPfXTM5lrj6jcJ8GzOKHTRP/H4wDDHdKW/fiAyNgsFwFNCiE8iqzdXGYG8kGEbAYiEyzt5BJdNyMkSGZjQZYzs6QRA8BeNZoXB+HLDf77FkJmGomNx+vEFzxByge8TxAdMzqBDfWy9zjiY9gmHwRIQRhh67zRZHNuLrmoZjOsT5mlxSAA/8kK4Lqdslv/qN2GtvP8yZ2n2qQnitYt0x/GbGcvXArbS72csJTVxj2kPKWMojFCqruzn/5t/+tZiDq/eslw+UVc7RSKrGZxsobVxbXMiqg4pWu9y+mzMMxKFwfnHMfrdlR8JU9jxl+YqeOyaRPXSeomMrJlmmYcl5yfIMzSrFxVFmLA3DxdYcKjnnw+GQJN1x/3CPZckegSHcfpozHMyoUnGJUOwQzVIpI1kdrRwO1YoqrcjSR8ZSg/7giLZtkUlpbMsg2kZMJ+L9NE2hyw44YchB0gSn2Zaj5x5xvKeWlaPQtUiNllcTSSubZHz/3d/jBhbrRBx6I2dA9KnA3uf86Z/+CoBDvML2KgJPrN317S22OSDZJ/xPf/1rAP72b34m2nbMjjT2km1R6VyqrqKQB2pdpLjuEbt9xsNGJAP6vQlFkaGbFrbsZXBMnbpqcMJH2tweu/2Csy8GTxf21XpNqxnc3S/Z7cQecd2Qjx9umUyHcl00lss1SZzgeaKS+8XXMzSr4IefF0xklWi+v+Hl5QxXinreLkpMs6PK1rSV8KfeQMfopjgdJLGw63qYUdKi5pJJioR1uqUfzKhkYJHtar799hW/fy38YlUXDCY6//7f/Z+MJGtcYyiQHtjOP1JLe9H7Okqh0RuLd3m4fSDNK2ZTj1pmkve3e/RDxOmr50h2ZqoCtquMQpbOd/cpeZ7z6tVLHF38W+80YHMV0dYRuWxM/+br57ROTiMzu8uHK6aDEcq5TZWI4M42DYJeR1u5VJ3MStc2vX4LuuxhtU06pcXzGupanA0395CmET1fwZNJEs32eHt3y3AgqbWDgMNOIxxb4Irfmk577LYZF+MJ/7gW53YeH2hiE1cy2X34+I7+Kx/XDZ980smRTxZnlEWCLfup5osPrNdrhmeSQS1dYzkupjtBQ6znKOixW+eEfQNF2oKuVESbim+/Ec3rXZaxqXccqROyRFxM29pCU0M+3S2IZaV6OOqBUrPZiIUZjTzW8Z6HtcGrL8UzVJkglVJNg5PjSwCU8sD0fMz2RlyE0y7F0UHDwJN9GVmWsI0KXFV9Ip1YJh3WYESuiLnzgoI4iinSDFVWxu2+Quq7VLIfeBieYroN+zSnPxCxxMPmI8PwlP6pTSyrYm9/nvPiYkDZivXcpTv2m5wvnr+kkcK7y/mCy5Mx+6RkNBKXj/1+TtIVKDJTPxz6GHWBVh6eKkKa0tIWe9IUJkPph7M1mlKzTsWF2ghekW4r/uqv/lf+1b/+HwD4x59+h69YTKczIinC/qtf/3f87X/899SPZ20e0ZtM2e+3OBIpU2QaWwoMu2UvL6vDizPur27IJbrlq1+dsz6k6FqB/njeVw1vfljjDU5A9tr88OkTuq0/oYVcJ2S/3jCePCPayZuhesPEOqFta6IHMVf93hTTSjk/FUx295t71CLAckqmQxGrRfEBrRmjWTfohfDpD3fvefn8nH0iPmMFDavlFk+ZcNgLP6XbKkrV0VQl/Z7Ya0ka0QKnkgk4TUrSNOX0xRGtzGBVVctkcsLDaokv+6Isp6UqGqhlhc0sGI0maHrB7mEr13RMuk9wxz4Hkc/h+EyhqDyeX4iq+L7c8O7tJ1hUJLIPq+eO2a0WeJJpsShLAtfj22+f8/at2OtprmK4Hqa2x5JkVe/efcSxPX71reiPW727obUV0qTBtWVBojwQFTmaplHIvrOu6VC6kvNvxBm2O3zih99vUNQKArlnJlM+vF3TGx6YHouL9j5OiNM9vpRwGh6dQ1OjGjpVKs65u/d3nF7MmE09UinZ0u/1USyLkUT0JHGJ7wyh3tKXJD5XH++YXRxxf73k9Ej6qQcRG/xT45/J5arjkAijPz7xyfcx9/e32BK61yHgL69evUKVN0k6TVJKi8+cnJ3wsL4l8HzuZfB6/uyCQc8lihL68sJVNDb7XUIrmXrWdYZlG+x2GyQCjzAM8X2X+fU1RScM+vhoxPX1Na5k3PEDg7ao8TwHRRGH3jbaMpz6nJycMJuJ4HFlRyxXJbmEM/V6PWazKYbdYOiPuiw7qrSlN7EYSfazn3/+gYuvXjKTQb3l1lx9fM3R0RGtpFunVvFck7u7uyedK7SGrmufmvrbuuFwOGDaAQOZ3SrLCl0xefZMVKCidYGq5liWg++JTf7x6h3T2QzTM/AdSWihpJhmh63KoLAp+OEPP+JaLiNZ3t9FezRFxbJ89muZ6fADyrxCVR41uxps02az2xHITJala7x5/ROns2OQa5ylBcvlEldCAOu6pW4qhqPwiexkNO6zWi14dnnGw63IYMyORux2G9JMkhaoHbbtYdkuloQYHg47doeE8WhELQ/CvCgolQ7blvS3RYyq9Xl2+ZJWMkIqbctwOCKPc1wZABVlCaqOI22jN5iwXD1QVhWv38qGTxOMQEdTO3xfBGrL97c0TsP5c7HGjuHgeiZZkZPIZ59Op6TJnsFgQF2LdX+YLzEM7Wl/tG1Dlsd0qoMimWyGwz5JEmPZKplUWu8N+iRZTC0PpouLCz59mLN42JPVYj7/5Nd/SuANyfYVn96JQ8A2BijNjvmD8MinpxNcP+Bv/+YdgS8utLqq8enuipe//Jp7SSc+Ho4oW50HSYNa1zWmrjPoTVjKqs1g4NDr9VhLJrQwGJHVsYDbStzFKiopmpzBUYCuiXceDQPU1iC0xFwWUUGSp4R+D00SRfTCoZiXsqOVB3a0jTAMh1LCp7I85+P796ApnJ+JA223i5jMJgROSCnJVd7e3HBxfs6gJw+FfMPRZEK8TZhOhb2EYYjrDri9vSWQGjqWaRCGIQfJEKVocHRyzH5/AMkIN+4N2Wy3VHX3ZLNHJz36dh95TvHuu9e8fHZBnEdM5EWtZ3r8y9mfoTQ5UmKJYtWyWqZohpjPxXLB6bnO178+45NsPvbcgCTbcXV1Q7J7ZLMysMKOoSUOjt/+9kv+y9//A64/fIIYd11H13WCIVAysgX2lDhL0GSSq8tV5ldbdFVjMJTMg+oRry5F4/bbdwIOqhkOaVo8QVbevr9iNjsm3qaoEp7VD/t8uvkZXevwpA8wEgtN6Z5gs5qmUVUFruOxWIv3y4uM08sXzO8XPH8pmOuSQ4TmGhgyEL+5XuBPXIajkIcHEXh7nsfbt2+f7C7s2dzc3LDd7nFE2p6Uir7roqsGYSD84vWnG0JtwF5mpHVDZeIGQMdK6h32RyGO4WGZDne3V/KZWk5PT7k8F4Hbcr5mdHxCXddP8NDtdsu7958wfIPzE/Eug+OAH97/Dt8Tn5lNZtRZQRRvcB6r93WB64UMRyMSCXEqo5ykyJHbn6oDoy3p9UPUTtKe77eMpxOyIsI0JdSz53N8MmUfyYt3U3J8do6i5SwWW7meGl2rUlU6o4EISBzbZLfcMzkTZ9r8vmIb7TkZjViuxZwHQx8v9BgaPhtJcqFoYFoWOwknPKwPqJZBvzfmeCoJbYqKNM5Iq1jcfADV1FB1jflC+Jt0s6d33hckRtJXr3Zbmkahasonf5qmGUcnwydik0Oc0u/32e0bdjsRlJmqjqIIe3v8Xt9x+fjxiqEn/LdSNzhux2q9oDeQmoTpgaPjKZ3a8f6d0L5TWxfbMClksGzp0DUKtumgSORBXde0bftki2O9h+/7XN0/0EgYm2spgjynKNhK3cemrWnb+uldLp5fED3cEMcJuivWMwx7tK2w9+lQrE2WrIVegRxJFpNnYn86Uq9qv10T2C7z+Q7TFc8wOxqhobHfCdufTs5pWoPBsMd6I+ylrTK80OPtm+/57nei2lsUe8aeiWlIqHvbsNlsME2N0Be+8+OHezzfZ7/fo8uKTNfURFH8RIN9/eEjhhsSBi661Oi8ublGVfv0g5DdWuy1pmlwTZedlDMJgyGm7XN3d4chyUB6/T5lWeI4FqX0QV3XEQQBHz/KhEFXYKgOpqs/aRdqhkGWZXieRyKT4ZeXlziOyT4RcV+WJXRdh+M4tJ2IzeI8YuD5RPvNU8IzrzU0eIKVb7ZL2rZlvV0x64uz1jY91qsFZVZQyBB+2Ouz2u2eZGQ2mwVtWzKZjtAMWeXvGsKBx3Q64pmkuzeMincf5xyk71psDvh+SJQtmUqiFteH3bqmkhVw0wrYb3PeRB8IemIdGq2gaRqOj4+JJbOrokhofJo+zeVisURXzCffWRQVSqnQGQqlhPNpLYynfT5+ENDTpquZTAesFrdcfRB27TkuL56f44U6iYxd6qql1xvQl8yLy8WaX371kvvlgtMTEY8bmy2HeIumq2xk0nDSG+A4Fk37iIZSyYuScBKy24j9/+zZM3bpHN1Q6ck4epuI9/qnxmdCi8/j8/g8Po/P4/P4PD6Pz+Pz+Dw+j/8G459H5UqBr7/+JQDr1YGqbhgOB2xlNsQwTZIkoe+3T1Ub33OwbZNOYlW//+H3zI6HzE5OGMns1na9oqNBQcOSdN49p4drj1hIIcU4Lrm7W/HFyyMcR5RYe0GPw+FAr9ejVnP5uZiiFBhxgKJIsGyPtmt4kFl33dBQFZ0oTvn0UVTP5vMVw5ELsmw/HA6J4oKjWe9JwDNKt/SDAXGyYb8VN+VvfvktPd9CkZndD+8/YVkW6+UDvi8yyePxlK4tSZIEXdL5aorQJXisVnieQ+D7WLrxlFltG5Wm7Z6mP00bosOGwP9jdUTXXFpgf9g/isaTtTG20eE1IhOCqeM7LsdHJ2wlHv/i9IyuKdnHe8Yyg2kaFeluxXgsoAT5pqQoMjRNQ2kkBKAoeHZ2jmOb1DspNNO2bJYrdrJf5vj4mC+/uOCnn35AQzx/tNtimQYP97eMR6LyUJYpWX7A88WcDEdH/Pj6E37YPRYLOOxSHrZ7+sMRRdPK9TPpOR47SYhiOzqv391g2iFHpyKze3e7RAtCTEMhk1TTugLf/eF7PCkS0u4jajo6RaFr87IAAAs0SURBVKGS2imaYtNUkCc5RyOR+bj8ywGNC4Yrsj/ZvqCV8LXH/rG7uzlnp0eMx2PuJKmG67q0VfLHDGp/RB+HooYHqcHiqBaapqHqCiPZF5EXMZatPdGSa2qH67pMj1QOslk1TVMqCg6Hhycoa9O1lNqao6HInJleyGq1YbPe44Xit+k0hsM+dVE+VYUUCs4uT8nTR6x4jSn38qNI6mA45vr+ji8uLwGY3y85JCmWZeNaEj+ut5iuRqv9ERue5zkUFYrsnbQMk+1uzeDsjFRCW3uhTZaWBGH4RGSw2Syg1Zgdi0qrooieodnxhExmkhWlo2ka8irn/voRH9/j+uoB/XQsbUWh6xrxHpLqXlUVFss5t3fX/PKbV0+/pev6k+ZaOOgR54moxEk72Kz3rDcRs9MZhiv9S5ujNS11JjbfX/33f0ESHTgJJhRSdDrfZygGDAb9pzlvuxJdczg5FZk6x3PR7Y6qrvnDd6Kf4vnlGZatYdkmlqTN3WUFDTGGrFzd3tzTthW6bvAwF7YRhDWaZhBtYgzrUZR9S1eBK8XVHc/i21+/4uXzU3wJFczLHMqaJN3yzS8E1OP+fkuaFmzWIpPcNC1JWuL3wieI8Zs3b1CNlunRiE5S7lMq1FXxlOndryP6gxBV/SNpUWcKwfndLkKRz1kWDU1+wJU4/iDsc/78hPyQg6RZzrIMXdef9AB1XadsaqIkpjaE3Q1PpmR5zuLTNbqsHHmmR5nVeBLW1nMcbN1gsTxgyCqq0irYtouq6uTZY3b7C8bT/7e9u4uNrKzjOP79z7Qz085M29m2W9jdLqywvGwILBtjIBKCEA0qES+IwWgkhIQbLjDRGPTGaMKFN6JGQ2IAReMbWUWJF0YCm+iNCAgILhqg7Lp9b3c67XTeO/17cZ7tNhv1hqGzO+f3SZqe85xJ9tn5nZ7O0+dtnIXFEwBcecXlrFdqzM5NUwpDHI/cfJjmfmewkGMoF/177558B99oMxAWaSnXK1TXK1xzzbVMheXEPeEk+5Ns2ialMN+43ox6VxphY5+hsV1sVhtseoti6Pkora1y0USOTGaQWrj3NottllcWcD/7szc8vEGrVSERejvS6TTV6jpzc0uUQ89Kmk3GR3excno5vJ8JyuUyc+0NimHeaXo4Tbm8QF8SlsLcXqeJJaEv9DLumdxHX6qf5kY0OgSgvd5gcs8Bqq0UUzPR+5fKDZKwDKWwXcvYyAhTJ/5NNpklmx8M9+ssu4bGaKzVtt7PWqPB4uIylTB6Zmh4gKXlIhdffGCr9yWJkRpM0mw2mQ9zF/dcfRmVRpXX3oje8yuOXEm5VGd09zizM9Hni4OXXcbplSIzc9McDD2Uq8UKCTao1aIcUok+ivOr5EcHyQ1E9Wy1GlRrdfKZwtb76wknnR6gEDaTrawtkcsNsVpa2OodmJgYpVorb432WFlZYffu3SRga97wzKlZ9l9yEYvFFQbCvV4YGqaRTFELC2oVCgXmFuZJ2CYeehTTmWE2WmXGxyZp2NnPRROjeymE+eOFsf1kBrIcP36cdhgR0q7VefWtFzl+8i3S4eeoMNzPRqPCYpgDOZQfZjPRz/LyPCWP7kUjTaNeZyQ/QitsjrswO4/RT73aCO9Tk/xQH5sbTVbD/LF6tU4qHb1nm+H/19+EVqu1tRiXe/RcHhhIELbQol6vUyiMUa3WOB323ywMjZFMwlAYEleaP00mP0itXqVvIwwjN9hMtBkaz2ztmZUbzFIqLlIJw3T7cm3cnXK5srXlQKVZxhstLJmkEeaU5fNZarUaM9NR7+vuiyaiDdAZYOlUNPRywzdo02J0fIxk+Gw02D9EX2qA9TD5bXR8F/lcjtJaiWyYt1/fqJDNpqnVy+TD/OJ3p07Sn85wajpaPGZ+2dm7d4KJvWOcCPMbl+bXGMqPsbYa/V9y+Wivrma7zVo5LHpRcdKZGpP79wBhxFIj6o06FbajGWga/akk6WSKvrAFj5HA3VhZXiMbRl9l0kkWZucYGQm/s09vcvWhSXJZoxqm7qyvr1NaaZDoH90aIppOZ8jnspRmo+xabSgWi4yPj7O6Gj1vEkloNZpkUtmthdoymRRYm1x4fkM/K9Ua7XZ72+ewWXbvG6LddObDXqK18DnjfzF3/78v2AlmtgRUgOVu10W6YgxlH1fKPt6Uf3wp+/hS9vHWK/lf4u7j/+3CedG4AjCzl9z9g92uh+w8ZR9fyj7elH98Kfv4UvbxFof8NedKRERERESkA9S4EhERERER6YDzqXH1w25XQLpG2ceXso835R9fyj6+lH289Xz+582cKxERERERkQvZ+dRzJSIiIiIicsHqeuPKzG43s3+Z2dtm9lC36yOdZ2ZPmNmimb2xrWyXmT1rZm+F74VQbmb2vXA//N3MjnSv5vJemdmkmR0zs+Nm9g8zezCUK/8eZ2YZM/urmb0Wsv9GKD9gZi+EjH9lZqlQng7nb4frl3az/vLemVnSzF4xs9+Hc2UfE2Z2wsxeN7NXzeylUKbnfgyY2YiZHTWzf5rZm2Z2Y9yy72rjysySwA+AjwOHgM+a2aFu1kneFz8Gbj+n7CHgOXc/CDwXziG6Fw6Gr/uBR3eojvL+2AC+5O6HgBuAB8LPuPLvfQ3gVne/DjgM3G5mNwDfAh5x98uBFeC+8Pr7gJVQ/kh4nVzYHgTe3Hau7OPlI+5+eNuy23rux8N3gT+4+1XAdUTPgFhl3+2eqw8Bb7v7lLs3gV8Cd3a5TtJh7v4noHhO8Z3Ak+H4SeDT28p/4pG/ACNmdvHO1FQ6zd3n3P1v4bhM9JDdi/LveSHD9XDaH74cuBU4GsrPzf7MPXEUuM3MbIeqKx1mZvuATwKPhXND2cednvs9zsyGgZuBxwHcvenuJWKWfbcbV3uBU9vOp0OZ9L4Jd58Lx/PARDjWPdGjwlCf64EXUP6xEIaFvQosAs8C7wAld98IL9me71b24foqMLqzNZYO+g7wFWAznI+i7OPEgT+a2ctmdn8o03O/9x0AloAfhSHBj5lZlphl3+3GlQgeLVmpZSt7mJnlgF8DX3T3te3XlH/vcve2ux8G9hGNVLiqy1WSHWBmdwCL7v5yt+siXXOTux8hGvb1gJndvP2invs9qw84Ajzq7tcDFc4OAQTikX23G1czwOS2832hTHrfwpmu3/B9MZTrnugxZtZP1LD6mbv/JhQr/xgJw0KOATcSDfvoC5e257uVfbg+DJze4apKZ3wY+JSZnSAa7n8r0TwMZR8T7j4Tvi8CTxP9cUXP/d43DUy7+wvh/ChRYytW2Xe7cfUicDCsIJQC7gae6XKdZGc8A9wTju8Bfret/AthBZkbgNVtXclygQnzJh4H3nT3b2+7pPx7nJmNm9lIOB4APko05+4YcFd42bnZn7kn7gKed23EeEFy96+6+z53v5To9/rz7v45lH0smFnWzPJnjoGPAW+g537Pc/d54JSZXRmKbgOOE7Psu76JsJl9gmhsdhJ4wt0f7mqFpOPM7BfALcAYsAB8Hfgt8BSwHzgJfMbdi+HD+PeJVhesAve6+0vdqLe8d2Z2E/Bn4HXOzr34GtG8K+Xfw8zsWqKJy0miP+Q95e7fNLMPEPVm7AJeAT7v7g0zywA/JZqXVwTudvep7tReOsXMbgG+7O53KPt4CDk/HU77gJ+7+8NmNoqe+z3PzA4TLWSTAqaAewm/A4hJ9l1vXImIiIiIiPSCbg8LFBERERER6QlqXImIiIiIiHSAGlciIiIiIiIdoMaViIiIiIhIB6hxJSIiIiIi0gFqXImIiIiIiHSAGlciIiIiIiIdoMaViIiIiIhIB/wHsD9gY4ceWooAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# show the results\n", + "show_result_pyplot(model, img, result)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + }, + "pycharm": { + "stem_cell": { + "cell_type": "raw", + "metadata": { + "collapsed": false + }, + "source": [] + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/thirdparty/mmdetection/demo/webcam_demo.py b/thirdparty/mmdetection/demo/webcam_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..5bded14ff6c3ca633ba6af1843d5a32a433f2e06 --- /dev/null +++ b/thirdparty/mmdetection/demo/webcam_demo.py @@ -0,0 +1,46 @@ +import argparse + +import cv2 +import torch + +from mmdet.apis import inference_detector, init_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDetection webcam demo') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + parser.add_argument( + '--camera-id', type=int, default=0, help='camera device id') + parser.add_argument( + '--score-thr', type=float, default=0.5, help='bbox score threshold') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + device = torch.device(args.device) + + model = init_detector(args.config, args.checkpoint, device=device) + + camera = cv2.VideoCapture(args.camera_id) + + print('Press "Esc", "q" or "Q" to exit.') + while True: + ret_val, img = camera.read() + result = inference_detector(model, img) + + ch = cv2.waitKey(1) + if ch == 27 or ch == ord('q') or ch == ord('Q'): + break + + model.show_result( + img, result, score_thr=args.score_thr, wait_time=1, show=True) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/docker/Dockerfile b/thirdparty/mmdetection/docker/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..81e458fc1c9b1a50a457c196de1e6da619ac0695 --- /dev/null +++ b/thirdparty/mmdetection/docker/Dockerfile @@ -0,0 +1,24 @@ +ARG PYTORCH="1.6.0" +ARG CUDA="10.1" +ARG CUDNN="7" + +FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel + +ENV TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0+PTX" +ENV TORCH_NVCC_FLAGS="-Xfatbin -compress-all" +ENV CMAKE_PREFIX_PATH="$(dirname $(which conda))/../" + +RUN apt-get update && apt-get install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install MMCV +RUN pip install mmcv-full==latest+torch1.6.0+cu101 -f https://openmmlab.oss-accelerate.aliyuncs.com/mmcv/dist/index.html + +# Install MMDetection +RUN conda clean --all +RUN git clone https://github.com/open-mmlab/mmdetection.git /mmdetection +WORKDIR /mmdetection +ENV FORCE_CUDA="1" +RUN pip install -r requirements/build.txt +RUN pip install --no-cache-dir -e . diff --git a/thirdparty/mmdetection/docs/1_exist_data_model.md b/thirdparty/mmdetection/docs/1_exist_data_model.md new file mode 100644 index 0000000000000000000000000000000000000000..8d7598688865cedfa20b18c08238fb9b9fabd49c --- /dev/null +++ b/thirdparty/mmdetection/docs/1_exist_data_model.md @@ -0,0 +1,442 @@ +# 1: Inference and train with existing models and standard datasets + +MMDetection provides hundreds of existing and existing detection models in [Model Zoo](https://mmdetection.readthedocs.io/en/latest/model_zoo.html)), and supports multiple standard datasets, including Pascal VOC, COCO, CityScapes, LVIS, etc. This note will show how to perform common tasks on these existing models and standard datasets, including: + +- Use existing models to inference on given images. +- Test existing models on standard datasets. +- Train predefined models on standard datasets. + +## Inference with existing models + +By inference, we mean using trained models to detect objects on images. In MMDetection, a model is defined by a configuration file and existing model parameters are save in a checkpoint file. + +To start with, we recommend [Faster RCNN](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn) with this [configuration file](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) and this [checkpoint file](http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth). It is recommended to download the checkpoint file to `checkpoints` directory. + +### High-level APIs for inference + +MMDetection provide high-level Python APIs for inference on images. Here is an example of building the model and inference on given images or videos. + +```python +from mmdet.apis import init_detector, inference_detector +import mmcv + +# Specify the path to model config and checkpoint file +config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +checkpoint_file = 'checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + +# build the model from a config file and a checkpoint file +model = init_detector(config_file, checkpoint_file, device='cuda:0') + +# test a single image and show the results +img = 'test.jpg' # or img = mmcv.imread(img), which will only load it once +result = inference_detector(model, img) +# visualize the results in a new window +model.show_result(img, result) +# or save the visualization results to image files +model.show_result(img, result, out_file='result.jpg') + +# test a video and show the results +video = mmcv.VideoReader('video.mp4') +for frame in video: + result = inference_detector(model, frame) + model.show_result(frame, result, wait_time=1) +``` + +A notebook demo can be found in [demo/inference_demo.ipynb](https://github.com/open-mmlab/mmdetection/blob/master/demo/inference_demo.ipynb). + +### Asynchronous interface - supported for Python 3.7+ + +For Python 3.7+, MMDetection also supports async interfaces. +By utilizing CUDA streams, it allows not to block CPU on GPU bound inference code and enables better CPU/GPU utilization for single-threaded application. Inference can be done concurrently either between different input data samples or between different models of some inference pipeline. + +See `tests/async_benchmark.py` to compare the speed of synchronous and asynchronous interfaces. + +```python +import asyncio +import torch +from mmdet.apis import init_detector, async_inference_detector +from mmdet.utils.contextmanagers import concurrent + +async def main(): + config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + checkpoint_file = 'checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + device = 'cuda:0' + model = init_detector(config_file, checkpoint=checkpoint_file, device=device) + + # queue is used for concurrent inference of multiple images + streamqueue = asyncio.Queue() + # queue size defines concurrency level + streamqueue_size = 3 + + for _ in range(streamqueue_size): + streamqueue.put_nowait(torch.cuda.Stream(device=device)) + + # test a single image and show the results + img = 'test.jpg' # or img = mmcv.imread(img), which will only load it once + + async with concurrent(streamqueue): + result = await async_inference_detector(model, img) + + # visualize the results in a new window + model.show_result(img, result) + # or save the visualization results to image files + model.show_result(img, result, out_file='result.jpg') + + +asyncio.run(main()) + +``` + +### Demos + +We also provide two demo scripts, implemented with high-level APIs and supporting functionality codes. +Source codes are available [here](https://github.com/open-mmlab/mmdetection/tree/master/demo). + +#### Image demo + +This script performs inference on a single image. + +```shell +python demo/image_demo.py \ + ${IMAGE_FILE} \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--device ${GPU_ID}] \ + [--score-thr ${SCORE_THR}] +``` + +Examples: + +```shell +python demo/image_demo.py demo/demo.jpg \ + configs/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --device cpu +``` + +#### Webcam demo + +This is a live demo from a webcam. + +```shell +python demo/webcam_demo.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--device ${GPU_ID}] \ + [--camera-id ${CAMERA-ID}] \ + [--score-thr ${SCORE_THR}] +``` + +Examples: + +```shell +python demo/webcam_demo.py \ + configs/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth +``` + +## Test existing models on standard datasets + +To evaluate a model's accuracy, one usually tests the model on some standard datasets. +MMDetection supports multiple public datasets including COCO, Pascal VOC, CityScapes, and [more](https://github.com/open-mmlab/mmdetection/tree/master/configs/_base_/datasets). +This section will show how to test existing models on supported datasets. + +### Prepare datasets + +Public datasets like Pascal VOC and COCO are available from official websites or mirrors. +It is recommended to download and extract the dataset somewhere outside the project directory and symlink the dataset root to `$MMDETECTION/data` as below. +If your folder structure is different, you may need to change the corresponding paths in config files. + +```plain +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +│ ├── cityscapes +│ │ ├── annotations +│ │ ├── leftImg8bit +│ │ │ ├── train +│ │ │ ├── val +│ │ ├── gtFine +│ │ │ ├── train +│ │ │ ├── val +│ ├── VOCdevkit +│ │ ├── VOC2007 +│ │ ├── VOC2012 + +``` + +The cityscapes annotations need to be converted into the coco format using `tools/convert_datasets/cityscapes.py`: + +```shell +pip install cityscapesscripts + +python tools/convert_datasets/cityscapes.py \ + ./data/cityscapes \ + --nproc 8 \ + --out-dir ./data/cityscapes/annotations +``` + +TODO: CHANGE TO THE NEW PATH + +### Test existing models + +We provide testing scripts for evaluating an existing model on the whole dataset (COCO, PASCAL VOC, Cityscapes, etc.). +The following testing environments are supported: + +- single GPU +- single node multiple GPUs +- multiple nodes + +Choose the proper script to perform testing depending on the testing environment. + +```shell +# single-gpu testing +python tools/test.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--out ${RESULT_FILE}] \ + [--eval ${EVAL_METRICS}] \ + [--show] + +# multi-gpu testing +bash tools/dist_test.sh \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + ${GPU_NUM} \ + [--out ${RESULT_FILE}] \ + [--eval ${EVAL_METRICS}] +``` + +`tools/dist_test.sh` also supports multi-node testing, but relies on PyTorch's [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility). + +Optional arguments: + +- `RESULT_FILE`: Filename of the output results in pickle format. If not specified, the results will not be saved to a file. +- `EVAL_METRICS`: Items to be evaluated on the results. Allowed values depend on the dataset, e.g., `proposal_fast`, `proposal`, `bbox`, `segm` are available for COCO, `mAP`, `recall` for PASCAL VOC. Cityscapes could be evaluated by `cityscapes` as well as all COCO metrics. +- `--show`: If specified, detection results will be plotted on the images and shown in a new window. It is only applicable to single GPU testing and used for debugging and visualization. Please make sure that GUI is available in your environment. Otherwise, you may encounter an error like `cannot connect to X server`. +- `--show-dir`: If specified, detection results will be plotted on the images and saved to the specified directory. It is only applicable to single GPU testing and used for debugging and visualization. You do NOT need a GUI available in your environment for using this option. +- `--show-score-thr`: If specified, detections with scores below this threshold will be removed. + +#### Examples + +Assume that you have already downloaded the checkpoints to the directory `checkpoints/`. + +1. Test Faster R-CNN and visualize the results. Press any key for the next image. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn). + + ```shell + python tools/test.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --show + ``` + +2. Test Faster R-CNN and save the painted images for future visualization. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn). + + ```shell + python tools/test.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --show-dir faster_rcnn_r50_fpn_1x_results + ``` + +3. Test Faster R-CNN on PASCAL VOC (without saving the test results) and evaluate the mAP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc). + + ```shell + python tools/test.py \ + configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc.py \ + checkpoints/faster_rcnn_r50_fpn_1x_voc0712_20200624-c9895d40.pth \ + --eval mAP + ``` + +4. Test Mask R-CNN with 8 GPUs, and evaluate the bbox and mask AP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + --out results.pkl \ + --eval bbox segm + ``` + +5. Test Mask R-CNN with 8 GPUs, and evaluate the **classwise** bbox and mask AP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + --out results.pkl \ + --eval bbox segm \ + --options "classwise=True" + ``` + +6. Test Mask R-CNN on COCO test-dev with 8 GPUs, and generate JSON files for submitting to the official evaluation server. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + -format-only \ + --options "jsonfile_prefix=./mask_rcnn_test-dev_results" + ``` + + This command generates two JSON files `mask_rcnn_test-dev_results.bbox.json` and `mask_rcnn_test-dev_results.segm.json`. + +7. Test Mask R-CNN on Cityscapes test with 8 GPUs, and generate txt and png files for submitting to the official evaluation server. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes). + + ```shell + ./tools/dist_test.sh \ + configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py \ + checkpoints/mask_rcnn_r50_fpn_1x_cityscapes_20200227-afe51d5a.pth \ + 8 \ + --format-only \ + --options "txtfile_prefix=./mask_rcnn_cityscapes_test_results" + ``` + + The generated png and txt would be under `./mask_rcnn_cityscapes_test_results` directory. + +## Train predefined models on standard datasets + +MMDetection also provides out-of-the-box tools for training detection models. +This section will show how to train _predefined_ models (under [configs](https://github.com/open-mmlab/mmdetection/tree/master/configs)) on standard datasets i.e. COCO. + +**Important**: The default learning rate in config files is for 8 GPUs and 2 img/gpu (batch size = 8\*2 = 16). +According to the [linear scaling rule](https://arxiv.org/abs/1706.02677), you need to set the learning rate proportional to the batch size if you use different GPUs or images per GPU, e.g., `lr=0.01` for 4 GPUs \* 2 imgs/gpu and `lr=0.08` for 16 GPUs \* 4 imgs/gpu. + +### Prepare datasets + +Training requires preparing datasets too. See section [Prepare datasets](#prepare-datasets) above for details. + +**Note**: +Currently, the config files under `configs/cityscapes` use COCO pretrained weights to initialize. +You could download the existing models in advance if the network connection is unavailable or slow. Otherwise, it would cause errors at the beginning of training. + +### Training on a single GPU + +We provide `tools/train.py` to launch training jobs on a single GPU. +The basic usage is as follows. + +```shell +python tools/train.py \ + ${CONFIG_FILE} \ + [optional arguments] +``` + +During training, log files and checkpoints will be saved to the working directory, which is specified by `work_dir` in the config file or via CLI argument `--work-dir`. + +By default, the model is evaluated on the validation set every epoch, the evaluation interval can be specified in the config file as shown below. + +```python +# evaluate the model every 12 epoch. +evaluation = dict(interval=12) +``` + +This tool accepts several optional arguments, including: + +- `--no-validate` (**not suggested**): Disable evaluation during training. +- `--work-dir ${WORK_DIR}`: Override the working directory. +- `--resume-from ${CHECKPOINT_FILE}`: Resume from a previous checkpoint file. +- `--options 'Key=value'`: Overrides other settings in the used config. + +**Note**: + +Difference between `resume-from` and `load-from`: + +`resume-from` loads both the model weights and optimizer status, and the epoch is also inherited from the specified checkpoint. It is usually used for resuming the training process that is interrupted accidentally. +`load-from` only loads the model weights and the training epoch starts from 0. It is usually used for finetuning. + +### Training on multiple GPUs + +We provide `tools/dist_train.sh` to launch training on multiple GPUs. +The basic usage is as follows. + +```shell +bash ./tools/dist_train.sh \ + ${CONFIG_FILE} \ + ${GPU_NUM} \ + [optional arguments] +``` + +Optional arguments remain the same as stated [above](#train-with-a-single-GPU). + +#### Launch multiple jobs simultaneously + +If you would like to launch multiple jobs on a single machine, e.g., 2 jobs of 4-GPU training on a machine with 8 GPUs, +you need to specify different ports (29500 by default) for each job to avoid communication conflict. + +If you use `dist_train.sh` to launch training jobs, you can set the port in commands. + +```shell +CUDA_VISIBLE_DEVICES=0,1,2,3 PORT=29500 ./tools/dist_train.sh ${CONFIG_FILE} 4 +CUDA_VISIBLE_DEVICES=4,5,6,7 PORT=29501 ./tools/dist_train.sh ${CONFIG_FILE} 4 +``` + +### Training on multiple nodes + +MMDetection relies on `torch.distributed` package for distributed training. +Thus, as a basic usage, one can launch distributed training via PyTorch's [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility). + +### Manage jobs with Slurm + +[Slurm](https://slurm.schedmd.com/) is a good job scheduling system for computing clusters. +On a cluster managed by Slurm, you can use `slurm_train.sh` to spawn training jobs. It supports both single-node and multi-node training. + +The basic usage is as follows. + +```shell +[GPUS=${GPUS}] ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} ${CONFIG_FILE} ${WORK_DIR} +``` + +Below is an example of using 16 GPUs to train Mask R-CNN on a Slurm partition named _dev_, and set the work-dir to some shared file systems. + +```shell +GPUS=16 ./tools/slurm_train.sh dev mask_r50_1x configs/mask_rcnn_r50_fpn_1x_coco.py /nfs/xxxx/mask_rcnn_r50_fpn_1x +``` + +You can check [the source code](https://github.com/open-mmlab/mmdetection/blob/master/tools/slurm_train.sh) to review full arguments and environment variables. + +When using Slurm, the port option need to be set in one of the following ways: + +1. Set the port through `--options`. This is more recommended since it does not change the original configs. + + ```shell + CUDA_VISIBLE_DEVICES=0,1,2,3 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config1.py ${WORK_DIR} --options 'dist_params.port=29500' + CUDA_VISIBLE_DEVICES=4,5,6,7 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config2.py ${WORK_DIR} --options 'dist_params.port=29501' + ``` + +2. Modify the config files to set different communication ports. + + In `config1.py`, set + + ```python + dist_params = dict(backend='nccl', port=29500) + ``` + + In `config2.py`, set + + ```python + dist_params = dict(backend='nccl', port=29501) + ``` + + Then you can launch two jobs with `config1.py` and `config2.py`. + + ```shell + CUDA_VISIBLE_DEVICES=0,1,2,3 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config1.py ${WORK_DIR} + CUDA_VISIBLE_DEVICES=4,5,6,7 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config2.py ${WORK_DIR} + ``` diff --git a/thirdparty/mmdetection/docs/2_new_data_model.md b/thirdparty/mmdetection/docs/2_new_data_model.md new file mode 100644 index 0000000000000000000000000000000000000000..608f6ac162f5af1153ba2af8b48ea25055c17e56 --- /dev/null +++ b/thirdparty/mmdetection/docs/2_new_data_model.md @@ -0,0 +1,263 @@ +# 2: Train with customized datasets + +In this note, you will know how to inference, test, and train predefined models with customized datasets. We use the [ballon dataset](https://github.com/matterport/Mask_RCNN/tree/master/samples/balloon) as an example to describe the whole process. + +The basic steps are as below: + +1. Prepare the customized dataset +2. Prepare a config +3. Train, test, inference models on the customized dataset. + +## Prepare the customized dataset + +There are three ways to support a new dataset in MMDetection: + +1. reorganize the dataset into COCO format. +2. reorganize the dataset into a middle format. +3. implement a new dataset. + +Usually we recommend to use the first two methods which are usually easier than the third. + +In this note, we give an example for converting the data into COCO format. + +**Note**: MMDetection only supports evaluating mask AP of dataset in COCO format for now. +So for instance segmentation task users should convert the data into coco format. + +### COCO annotation format + +The necessary keys of COCO format for instance segmentation is as below, for the complete details, please refer [here](https://cocodataset.org/#format-data). + +```json +{ + "images": [image], + "annotations": [annotation], + "categories": [category] +} + + +image = { + "id": int, + "width": int, + "height": int, + "file_name": str, +} + +annotation = { + "id": int, + "image_id": int, + "category_id": int, + "segmentation": RLE or [polygon], + "area": float, + "bbox": [x,y,width,height], + "iscrowd": 0 or 1, +} + +categories = [{ + "id": int, + "name": str, + "supercategory": str, +}] +``` + +Assume we use the ballon dataset. +After downloading the data, we need to implement a function to convert the annotation format into the COCO format. Then we can use implemented COCODataset to load the data and perform training and evaluation. + +If you take a look at the dataset, you will find the dataset format is as below: + +```json +{'base64_img_data': '', + 'file_attributes': {}, + 'filename': '34020010494_e5cb88e1c4_k.jpg', + 'fileref': '', + 'regions': {'0': {'region_attributes': {}, + 'shape_attributes': {'all_points_x': [1020, + 1000, + 994, + 1003, + 1023, + 1050, + 1089, + 1134, + 1190, + 1265, + 1321, + 1361, + 1403, + 1428, + 1442, + 1445, + 1441, + 1427, + 1400, + 1361, + 1316, + 1269, + 1228, + 1198, + 1207, + 1210, + 1190, + 1177, + 1172, + 1174, + 1170, + 1153, + 1127, + 1104, + 1061, + 1032, + 1020], + 'all_points_y': [963, + 899, + 841, + 787, + 738, + 700, + 663, + 638, + 621, + 619, + 643, + 672, + 720, + 765, + 800, + 860, + 896, + 942, + 990, + 1035, + 1079, + 1112, + 1129, + 1134, + 1144, + 1153, + 1166, + 1166, + 1150, + 1136, + 1129, + 1122, + 1112, + 1084, + 1037, + 989, + 963], + 'name': 'polygon'}}}, + 'size': 1115004} +``` +The annotation is a JSON file where each key indicates an image's all annotations. +The code to convert the ballon dataset into coco format is as below. + +```python +import os.path as osp + +def convert_balloon_to_coco(ann_file, out_file, image_prefix): + data_infos = mmcv.load(ann_file) + + annotations = [] + images = [] + obj_count = 0 + for idx, v in enumerate(mmcv.track_iter_progress(data_infos.values())): + filename = v['filename'] + img_path = osp.join(image_prefix, filename) + height, width = mmcv.imread(img_path).shape[:2] + + images.append(dict( + id=idx, + file_name=filename, + height=height, + width=width)) + + bboxes = [] + labels = [] + masks = [] + for _, obj in v['regions'].items(): + assert not obj['region_attributes'] + obj = obj['shape_attributes'] + px = obj['all_points_x'] + py = obj['all_points_y'] + poly = [(x + 0.5, y + 0.5) for x, y in zip(px, py)] + poly = [p for x in poly for p in x] + + x_min, y_min, x_max, y_max = ( + min(px), min(py), max(px), max(py)) + + + data_anno = dict( + image_id=idx, + id=obj_count, + category_id=0, + bbox=[x_min, y_min, x_max - x_min, y_max - y_min], + area=(x_max - x_min) * (y_max - y_min), + segmentation=[poly], + iscrowd=0) + annotations.append(data_anno) + obj_count += 1 + + coco_format_json = dict( + images=images, + annotations=annotations, + categories=[{'id':0, 'name': 'balloon'}]) + mmcv.dump(coco_format_json, out_file) + +``` + +Using the function above, users can successfully convert the annotation file into json format, then we can use `CocoDataset` to train and evaluate the model. + + +## Prepare a config + +The second step is to prepare a config thus the dataset could be successfully loaded. Assume that we want to use Mask R-CNN with FPN, the config to train the detector on ballon dataset is as below. Assume the config is under directory `configs/ballon/` and named as `mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py`, the config is as below. + +```python +# The new config inherits a base config to highlight the necessary modification +_base_ = 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' + +# We also need to change the num_classes in head to match the dataset's annotation +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1), + mask_head=dict(num_classes=1))) + +# Modify dataset related settings +dataset_type = 'COCODataset' +classes = ('balloon',) +data = dict( + train=dict( + img_prefix='balloon/train/', + classes=classes, + ann_file='balloon/train/annotation_coco.json'), + val=dict( + img_prefix='balloon/val/', + classes=classes, + ann_file='balloon/val/annotation_coco.json'), + test=dict( + img_prefix='balloon/val/', + classes=classes, + ann_file='balloon/val/annotation_coco.json')) + +# We can use the pre-trained Mask RCNN model to obtain higher performance +load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth' +``` + +## Train a new model + +To train a model with the new config, you can simply run + +```shell +python tools/train.py configs/ballon/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). + +## Test and inference + +To test the trained model, you can simply run + +```shell +python tools/test.py configs/ballon/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py work_dirs/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py/latest.pth --eval bbox segm +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). diff --git a/thirdparty/mmdetection/docs/3_exist_data_new_model.md b/thirdparty/mmdetection/docs/3_exist_data_new_model.md new file mode 100644 index 0000000000000000000000000000000000000000..4052d80d9df1a1f4445a82e233ce96de1f0c5671 --- /dev/null +++ b/thirdparty/mmdetection/docs/3_exist_data_new_model.md @@ -0,0 +1,164 @@ +# 3: Inference and training with existing models and standard datasets + +In this note, you will know how to inference, test, and train predefined models with your own settings for standard datasets. We use the cityscapes dataset to train a COCO pretrained Cascade Mask R-CNN model as an example to describe the whole process. + +The basic steps are as below: + +1. Prepare the standard dataset +2. Prepare a config +3. Train, test, inference models on the standard dataset. + +### Prepare the standard dataset + +In this note, as we use the standard cityscapes dataset as an example. + +It is recommended to symlink the dataset root to `$MMDETECTION/data`. +If your folder structure is different, you may need to change the corresponding paths in config files. + +``` +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +│ ├── cityscapes +│ │ ├── annotations +│ │ ├── leftImg8bit +│ │ │ ├── train +│ │ │ ├── val +│ │ ├── gtFine +│ │ │ ├── train +│ │ │ ├── val +│ ├── VOCdevkit +│ │ ├── VOC2007 +│ │ ├── VOC2012 + +``` + +The cityscapes annotations have to be converted into the coco format using `tools/convert_datasets/cityscapes.py`: + +```shell +pip install cityscapesscripts +python tools/convert_datasets/cityscapes.py ./data/cityscapes --nproc 8 --out-dir ./data/cityscapes/annotations +``` + +Currently the config files in `cityscapes` use COCO pre-trained weights to initialize. +You could download the pre-trained models in advance if network is unavailable or slow, otherwise it would cause errors at the beginning of training. + +### Prepare a config + +The second step is to prepare a config for your own training setting. Assume that we want to use Cascade Mask R-CNN with FPN to train the cityscapes dataset, and assume the config is under directory `configs/cityscapes/` and named as `cascade_mask_rcnn_r50_fpn_1x_cityscapes.py`, the config is as below. + +```python +# The new config inherits the base configs to highlight the necessary modification +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] + +# We also need to change the num_classes in head from 80 to 8, to match the cityscapes dataset's annotation. +# This modification involves `bbox_head` and `mask_head`. +model = dict( + pretrained=None, + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) + +# Set optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# Set learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[7]) +total_epochs = 8 + +# We can use the COCO pretrained Cascade Mask R-CNN model for more stable performance initialization +load_from = 'http://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth' +``` + +### Train a new model + +To train a model with the new config, you can simply run + +```shell +python tools/train.py configs/cityscapes/cascade_mask_rcnn_r50_fpn_1x_cityscapes.py +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). + +### Test and inference + +To test the trained model, you can simply run + +```shell +python tools/test.py configs/cityscapes/cascade_mask_rcnn_r50_fpn_1x_cityscapes.py work_dirs/cascade_mask_rcnn_r50_fpn_1x_cityscapes/latest.pth --eval bbox segm +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). diff --git a/thirdparty/mmdetection/docs/Makefile b/thirdparty/mmdetection/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d4bb2cbb9eddb1bb1b4f366623044af8e4830919 --- /dev/null +++ b/thirdparty/mmdetection/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/thirdparty/mmdetection/docs/api.rst b/thirdparty/mmdetection/docs/api.rst new file mode 100644 index 0000000000000000000000000000000000000000..04406303ebafd54920490647714c446ede53b833 --- /dev/null +++ b/thirdparty/mmdetection/docs/api.rst @@ -0,0 +1,101 @@ +API Reference +================= + +mmdet.apis +-------------- +.. automodule:: mmdet.apis + :members: + +mmdet.core +-------------- + +anchor +^^^^^^^^^^ +.. automodule:: mmdet.core.anchor + :members: + +bbox +^^^^^^^^^^ +.. automodule:: mmdet.core.bbox + :members: + +export +^^^^^^^^^^ +.. automodule:: mmdet.core.export + :members: + +mask +^^^^^^^^^^ +.. automodule:: mmdet.core.mask + :members: + +evaluation +^^^^^^^^^^ +.. automodule:: mmdet.core.evaluation + :members: + +post_processing +^^^^^^^^^^^^^^^ +.. automodule:: mmdet.core.post_processing + :members: + +optimizer +^^^^^^^^^^ +.. automodule:: mmdet.core.optimizer + :members: + +utils +^^^^^^^^^^ +.. automodule:: mmdet.core.utils + :members: + +mmdet.datasets +-------------- + +datasets +^^^^^^^^^^ +.. automodule:: mmdet.datasets + :members: + +pipelines +^^^^^^^^^^ +.. automodule:: mmdet.datasets.pipelines + :members: + +mmdet.models +-------------- + +detectors +^^^^^^^^^^ +.. automodule:: mmdet.models.detectors + :members: + +backbones +^^^^^^^^^^ +.. automodule:: mmdet.models.backbones + :members: + +necks +^^^^^^^^^^^^ +.. automodule:: mmdet.models.necks + :members: + +dense_heads +^^^^^^^^^^^^ +.. automodule:: mmdet.models.dense_heads + :members: + +roi_heads +^^^^^^^^^^ +.. automodule:: mmdet.models.roi_heads + :members: + +losses +^^^^^^^^^^ +.. automodule:: mmdet.models.losses + :members: + +utils +^^^^^^^^^^ +.. automodule:: mmdet.models.utils + :members: diff --git a/thirdparty/mmdetection/docs/changelog.md b/thirdparty/mmdetection/docs/changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..637de72e7e7428636a6ffabfcee4c56655422ab7 --- /dev/null +++ b/thirdparty/mmdetection/docs/changelog.md @@ -0,0 +1,606 @@ +## Changelog + +### v2.7.0 (30/11/2020) + +- Support new method: [DETR](https://arxiv.org/abs/2005.12872), [ResNest](https://arxiv.org/abs/2004.08955), Faster R-CNN DC5. +- Support YOLO, Mask R-CNN, and Cascade R-CNN models exportable to ONNX. + +#### New Features + +- Support [DETR](https://arxiv.org/abs/2005.12872) (#4201, #4206) +- Support to link the best checkpoint in training (#3773) +- Support to override config through options in inference.py (#4175) +- Support YOLO, Mask R-CNN, and Cascade R-CNN models exportable to ONNX (#4087, #4083) +- Support [ResNeSt](https://arxiv.org/abs/2004.08955) backbone (#2959) +- Support unclip border bbox regression (#4076) +- Add tpfp func in evaluating AP (#4069) +- Support mixed precision training of SSD detector with other backbones (#4081) +- Add Faster R-CNN DC5 models (#4043) + +#### Bug Fixes + +- Fix bug of `gpu_id` in distributed training mode (#4163) +- Support Albumentations with version higher than 0.5 (#4032) +- Fix num_classes bug in faster rcnn config (#4088) +- Update code in docs/2_new_data_model.md (#4041) + +#### Improvements + +- Ensure DCN offset to have similar type as features in VFNet (#4198) +- Add config links in README files of models (#4190) +- Add tutorials for loss conventions (#3818) +- Add solution to installation issues in 30-series GPUs (#4176) +- Update docker version in get_started.md (#4145) +- Add model statistics and polish some titles in configs README (#4140) +- Clamp neg probability in FreeAnchor (#4082) +- Speed up expanding large images (#4089) +- Fix Pytorch 1.7 incompatibility issues (#4103) +- Update trouble shooting page to resolve segmentation fault (#4055) +- Update aLRP-Loss in project page (#4078) +- Clean duplicated `reduce_mean` function (#4056) +- Refactor Q&A (#4045) + + +### v2.6.0 (1/11/2020) + +- Support new method: [VarifocalNet](https://arxiv.org/abs/2008.13367). +- Refactored documentation with more tutorials. + +#### New Features + +- Support GIoU calculation in `BboxOverlaps2D`, and re-implement `giou_loss` using `bbox_overlaps` (#3936) +- Support random sampling in CPU mode (#3948) +- Support VarifocalNet (#3666, #4024) + +#### Bug Fixes + +- Fix SABL validating bug in Cascade R-CNN (#3913) +- Avoid division by zero in PAA head when num_pos=0 (#3938) +- Fix temporary directory bug of multi-node testing error (#4034, #4017) +- Fix `--show-dir` option in test script (#4025) +- Fix GA-RetinaNet r50 model url (#3983) +- Update code in docs and fix broken urls (#3947) + +#### Improvements + +- Refactor pytorch2onnx API into `mmdet.core.export` and use `generate_inputs_and_wrap_model` for pytorch2onnx (#3857, #3912) +- Update RPN upgrade scripts for v2.5.0 compatibility (#3986) +- Use mmcv `tensor2imgs` (#4010) +- Update test robustness (#4000) +- Update trouble shooting page (#3994) +- Accelerate PAA training speed (#3985) +- Support batch_size > 1 in validation (#3966) +- Use RoIAlign implemented in MMCV for inference in CPU mode (#3930) +- Documentation refactoring (#4031) + +### v2.5.0 (5/10/2020) + +#### Highlights + +- Support new methods: [YOLACT](https://arxiv.org/abs/1904.02689), [CentripetalNet](https://arxiv.org/abs/2003.09119). +- Add more documentations for easier and more clear usage. + +#### Backwards Incompatible Changes + +**FP16 related methods are imported from mmcv instead of mmdet. (#3766, #3822)** +Mixed precision training utils in `mmdet.core.fp16` are moved to `mmcv.runner`, including `force_fp32`, `auto_fp16`, `wrap_fp16_model`, and `Fp16OptimizerHook`. A deprecation warning will be raised if users attempt to import those methods from `mmdet.core.fp16`, and will be finally removed in V2.8.0. + +**[0, N-1] represents foreground classes and N indicates background classes for all models. (#3221)** +Before v2.5.0, the background label for RPN is 0, and N for other heads. Now the behavior is consistent for all models. Thus `self.background_labels` in `dense_heads` is removed and all heads use `self.num_classes` to indicate the class index of background labels. +This change has no effect on the pre-trained models in the v2.x model zoo, but will affect the training of all models with RPN heads. Two-stage detectors whose RPN head uses softmax will be affected because the order of categories is changed. + +**Only call `get_subset_by_classes` when `test_mode=True` and `self.filter_empty_gt=True` (#3695)** +Function `get_subset_by_classes` in dataset is refactored and only filters out images when `test_mode=True` and `self.filter_empty_gt=True`. + In the original implementation, `get_subset_by_classes` is not related to the flag `self.filter_empty_gt` and will only be called when the classes is set during initialization no matter `test_mode` is `True` or `False`. This brings ambiguous behavior and potential bugs in many cases. After v2.5.0, if `filter_empty_gt=False`, no matter whether the classes are specified in a dataset, the dataset will use all the images in the annotations. If `filter_empty_gt=True` and `test_mode=True`, no matter whether the classes are specified, the dataset will call ``get_subset_by_classes` to check the images and filter out images containing no GT boxes. Therefore, the users should be responsible for the data filtering/cleaning process for the test dataset. + +#### New Features + +- Test time augmentation for single stage detectors (#3844, #3638) +- Support to show the name of experiments during training (#3764) +- Add `Shear`, `Rotate`, `Translate` Augmentation (#3656, #3619, #3687) +- Add image-only transformations including `Constrast`, `Equalize`, `Color`, and `Brightness`. (#3643) +- Support [YOLACT](https://arxiv.org/abs/1904.02689) (#3456) +- Support [CentripetalNet](https://arxiv.org/abs/2003.09119) (#3390) +- Support PyTorch 1.6 in docker (#3905) + +#### Bug Fixes + +- Fix the bug of training ATSS when there is no ground truth boxes (#3702) +- Fix the bug of using Focal Loss when there is `num_pos` is 0 (#3702) +- Fix the label index mapping in dataset browser (#3708) +- Fix Mask R-CNN training stuck problem when ther is no positive rois (#3713) +- Fix the bug of `self.rpn_head.test_cfg` in `RPNTestMixin` by using `self.rpn_head` in rpn head (#3808) +- Fix deprecated `Conv2d` from mmcv.ops (#3791) +- Fix device bug in RepPoints (#3836) +- Fix SABL validating bug (#3849) +- Use `https://download.openmmlab.com/mmcv/dist/index.html` for installing MMCV (#3840) +- Fix nonzero in NMS for PyTorch 1.6.0 (#3867) +- Fix the API change bug of PAA (#3883) +- Fix typo in bbox_flip (#3886) +- Fix cv2 import error of ligGL.so.1 in Dockerfile (#3891) + +#### Improvements + +- Change to use `mmcv.utils.collect_env` for collecting environment information to avoid duplicate codes (#3779) +- Update checkpoint file names to v2.0 models in documentation (#3795) +- Update tutorials for changing runtime settings (#3778), modifing loss (#3777) +- Improve the function of `simple_test_bboxes` in SABL (#3853) +- Convert mask to bool before using it as img's index for robustness and speedup (#3870) +- Improve documentation of modules and dataset customization (#3821) + +### v2.4.0 (5/9/2020) + +**Highlights** +- Fix lots of issues/bugs and reorganize the trouble shooting page +- Support new methods [SABL](https://arxiv.org/abs/1912.04260), [YOLOv3](https://arxiv.org/abs/1804.02767), and [PAA Assign](https://arxiv.org/abs/2007.08103) +- Support Batch Inference +- Start to publish `mmdet` package to PyPI since v2.3.0 +- Switch model zoo to download.openmmlab.com + +**Backwards Incompatible Changes** +- Support Batch Inference (#3564, #3686, #3705): Since v2.4.0, MMDetection could inference model with multiple images in a single GPU. +This change influences all the test APIs in MMDetection and downstream codebases. To help the users migrate their code, we use `replace_ImageToTensor` (#3686) to convert legacy test data pipelines during dataset initialization. +- Support RandomFlip with horizontal/vertical/diagonal direction (#3608): Since v2.4.0, MMDetection supports horizontal/vertical/diagonal flip in the data augmentation. This influences bounding box, mask, and image transformations in data augmentation process and the process that will map those data back to the original format. +- Migrate to use `mmlvis` and `mmpycocotools` for COCO and LVIS dataset (#3727). The APIs are fully compatible with the original `lvis` and `pycocotools`. Users need to uninstall the existing pycocotools and lvis packages in their environment first and install `mmlvis` & `mmpycocotools`. + +**Bug Fixes** +- Fix default mean/std for onnx (#3491) +- Fix coco evaluation and add metric items (#3497) +- Fix typo for install.md (#3516) +- Fix atss when sampler per gpu is 1 (#3528) +- Fix import of fuse_conv_bn (#3529) +- Fix bug of gaussian_target, update unittest of heatmap (#3543) +- Fixed VOC2012 evaluate (#3553) +- Fix scale factor bug of rescale (#3566) +- Fix with_xxx_attributes in base detector (#3567) +- Fix boxes scaling when number is 0 (#3575) +- Fix rfp check when neck config is a list (#3591) +- Fix import of fuse conv bn in benchmark.py (#3606) +- Fix webcam demo (#3634) +- Fix typo and itemize issues in tutorial (#3658) +- Fix error in distributed training when some levels of FPN are not assigned with bounding boxes (#3670) +- Fix the width and height orders of stride in valid flag generation (#3685) +- Fix weight initialization bug in Res2Net DCN (#3714) +- Fix bug in OHEMSampler (#3677) + +**New Features** +- Support Cutout augmentation (#3521) +- Support evaluation on multiple datasets through ConcatDataset (#3522) +- Support [PAA assign](https://arxiv.org/abs/2007.08103) #(3547) +- Support eval metric with pickle results (#3607) +- Support [YOLOv3](https://arxiv.org/abs/1804.02767) (#3083) +- Support [SABL](https://arxiv.org/abs/1912.04260) (#3603) +- Support to publish to Pypi in github-action (#3510) +- Support custom imports (#3641) + +**Improvements** +- Refactor common issues in documentation (#3530) +- Add pytorch 1.6 to CI config (#3532) +- Add config to runner meta (#3534) +- Add eval-option flag for testing (#3537) +- Add init_eval to evaluation hook (#3550) +- Add include_bkg in ClassBalancedDataset (#3577) +- Using config's loading in inference_detector (#3611) +- Add ATSS ResNet-101 models in model zoo (#3639) +- Update urls to download.openmmlab.com (#3665) +- Support non-mask training for CocoDataset (#3711) + +### v2.3.0 (5/8/2020) + +**Highlights** +- The CUDA/C++ operators have been moved to `mmcv.ops`. For backward compatibility `mmdet.ops` is kept as warppers of `mmcv.ops`. +- Support new methods [CornerNet](https://arxiv.org/abs/1808.01244), [DIOU](https://arxiv.org/abs/1911.08287)/[CIOU](https://arxiv.org/abs/2005.03572) loss, and new dataset: [LVIS V1](https://arxiv.org/abs/1908.03195) +- Provide more detailed colab training tutorials and more complete documentation. +- Support to convert RetinaNet from Pytorch to ONNX. + +**Bug Fixes** +- Fix the model initialization bug of DetectoRS (#3187) +- Fix the bug of module names in NASFCOSHead (#3205) +- Fix the filename bug in publish_model.py (#3237) +- Fix the dimensionality bug when `inside_flags.any()` is `False` in dense heads (#3242) +- Fix the bug of forgetting to pass flip directions in `MultiScaleFlipAug` (#3262) +- Fixed the bug caused by default value of `stem_channels` (#3333) +- Fix the bug of model checkpoint loading for CPU inference (#3318, #3316) +- Fix topk bug when box number is smaller than the expected topk number in ATSSAssigner (#3361) +- Fix the gt priority bug in center_region_assigner.py (#3208) +- Fix NaN issue of iou calculation in iou_loss.py (#3394) +- Fix the bug that `iou_thrs` is not actually used during evaluation in coco.py (#3407) +- Fix test-time augmentation of RepPoints (#3435) +- Fix runtimeError caused by incontiguous tensor in Res2Net+DCN (#3412) + +**New Features** +- Support [CornerNet](https://arxiv.org/abs/1808.01244) (#3036) +- Support [DIOU](https://arxiv.org/abs/1911.08287)/[CIOU](https://arxiv.org/abs/2005.03572) loss (#3151) +- Support [LVIS V1](https://arxiv.org/abs/1908.03195) dataset (#) +- Support customized hooks in training (#3395) +- Support fp16 training of generalized focal loss (#3410) +- Support to convert RetinaNet from Pytorch to ONNX (#3075) + +**Improvements** +- Support to process ignore boxes in ATSS assigner (#3082) +- Allow to crop images without ground truth in `RandomCrop` (#3153) +- Enable the the `Accuracy` module to set threshold (#3155) +- Refactoring unit tests (#3206) +- Unify the training settings of `to_float32` and `norm_cfg` in RegNets configs (#3210) +- Add colab training tutorials for beginners (#3213, #3273) +- Move CUDA/C++ operators into `mmcv.ops` and keep `mmdet.ops` as warppers for backward compatibility (#3232)(#3457) +- Update installation scripts in documentation (#3290) and dockerfile (#3320) +- Support to set image resize backend (#3392) +- Remove git hash in version file (#3466) +- Check mmcv version to force version compatibility (#3460) + + +### v2.2.0 (1/7/2020) + +**Highlights** +- Support new methods: [DetectoRS](https://arxiv.org/abs/2006.02334), [PointRend](https://arxiv.org/abs/1912.08193), [Generalized Focal Loss](https://arxiv.org/abs/2006.04388), [Dynamic R-CNN](https://arxiv.org/abs/2004.06002) + +**Bug Fixes** + - Fix FreeAnchor when no gt in image (#3176) + - Clean up deprecated usage of `register_module()` (#3092, #3161) + - Fix pretrain bug in NAS FCOS (#3145) + - Fix `num_classes` in SSD (#3142) + - Fix FCOS warmup (#3119) + - Fix `rstrip` in `tools/publish_model.py` + - Fix `flip_ratio` default value in RandomFLip pipeline (#3106) + - Fix cityscapes eval with ms_rcnn (#3112) + - Fix RPN softmax (#3056) + - Fix filename of LVIS@v0.5 (#2998) + - Fix nan loss by filtering out-of-frame gt_bboxes in COCO (#2999) + - Fix bug in FSAF (#3018) + - Add FocalLoss `num_classes` check (#2964) + - Fix PISA Loss when there are no gts (#2992) + - Avoid nan in `iou_calculator` (#2975) + - Prevent possible bugs in loading and transforms caused by shallow copy (#2967) + +**New Features** +- Add DetectoRS (#3064) +- Support Generalize Focal Loss (#3097) +- Support PointRend (#2752) +- Support Dynamic R-CNN (#3040) +- Add DeepFashion dataset (#2968) +- Implement FCOS training tricks (#2935) +- Use BaseDenseHead as base class for anchor-base heads (#2963) +- Add `with_cp` for BasicBlock (#2891) +- Add `stem_channles` argument for ResNet (#2954) + +**Improvements** + +- Add anchor free base head (#2867) +- Migrate to github action (#3137) +- Add docstring for datasets, pipelines, core modules and methods (#3130, #3125, #3120) +- Add VOC benchmark (#3060) +- Add `concat` mode in GRoI (#3098) +- Remove cmd arg `autorescale-lr` (#3080) +- Use `len(data['img_metas'])` to indicate `num_samples` (#3073, #3053) +- Switch to EpochBasedRunner (#2976) + + +### v2.1.0 (8/6/2020) + +**Highlights** +- Support new backbones: [RegNetX](https://arxiv.org/abs/2003.13678), [Res2Net](https://arxiv.org/abs/1904.01169) +- Support new methods: [NASFCOS](https://arxiv.org/abs/1906.04423), [PISA](https://arxiv.org/abs/1904.04821), [GRoIE](https://arxiv.org/abs/2004.13665) +- Support new dataset: [LVIS](https://arxiv.org/abs/1908.03195) + +**Bug Fixes** +- Change the CLI argument `--validate` to `--no-validate` to enable validation after training epochs by default. (#2651) +- Add missing cython to docker file (#2713) +- Fix bug in nms cpu implementation (#2754) +- Fix bug when showing mask results (#2763) +- Fix gcc requirement (#2806) +- Fix bug in async test (#2820) +- Fix mask encoding-decoding bugs in test API (#2824) +- Fix bug in test time augmentation (#2858, #2921, #2944) +- Fix a typo in comment of apis/train (#2877) +- Fix the bug of returning None when no gt bboxes are in the original image in `RandomCrop`. Fix the bug that misses to handle `gt_bboxes_ignore`, `gt_label_ignore`, and `gt_masks_ignore` in `RandomCrop`, `MinIoURandomCrop` and `Expand` modules. (#2810) +- Fix bug of `base_channels` of regnet (#2917) +- Fix the bug of logger when loading pre-trained weights in base detector (#2936) + +**New Features** +- Add IoU models (#2666) +- Add colab demo for inference +- Support class agnostic nms (#2553) +- Add benchmark gathering scripts for development only (#2676) +- Add mmdet-based project links (#2736, #2767, #2895) +- Add config dump in training (#2779) +- Add ClassBalancedDataset (#2721) +- Add res2net backbone (#2237) +- Support RegNetX models (#2710) +- Use `mmcv.FileClient` to support different storage backends (#2712) +- Add ClassBalancedDataset (#2721) +- Code Release: Prime Sample Attention in Object Detection (CVPR 2020) (#2626) +- Implement NASFCOS (#2682) +- Add class weight in CrossEntropyLoss (#2797) +- Support LVIS dataset (#2088) +- Support GRoIE (#2584) + +**Improvements** +- Allow different x and y strides in anchor heads. (#2629) +- Make FSAF loss more robust to no gt (#2680) +- Compute pure inference time instead (#2657) and update inference speed (#2730) +- Avoided the possibility that a patch with 0 area is cropped. (#2704) +- Add warnings when deprecated `imgs_per_gpu` is used. (#2700) +- Add a mask rcnn example for config (#2645) +- Update model zoo (#2762, #2866, #2876, #2879, #2831) +- Add `ori_filename` to img_metas and use it in test show-dir (#2612) +- Use `img_fields` to handle multiple images during image transform (#2800) +- Add upsample_cfg support in FPN (#2787) +- Add `['img']` as default `img_fields` for back compatibility (#2809) +- Rename the pretrained model from `open-mmlab://resnet50_caffe` and `open-mmlab://resnet50_caffe_bgr` to `open-mmlab://detectron/resnet50_caffe` and `open-mmlab://detectron2/resnet50_caffe`. (#2832) +- Added sleep(2) in test.py to reduce hanging problem (#2847) +- Support `c10::half` in CARAFE (#2890) +- Improve documentations (#2918, #2714) +- Use optimizer constructor in mmcv and clean the original implementation in `mmdet.core.optimizer` (#2947) + + +### v2.0.0 (6/5/2020) +In this release, we made lots of major refactoring and modifications. + +1. **Faster speed**. We optimize the training and inference speed for common models, achieving up to 30% speedup for training and 25% for inference. Please refer to [model zoo](model_zoo.md#comparison-with-detectron2) for details. + +2. **Higher performance**. We change some default hyperparameters with no additional cost, which leads to a gain of performance for most models. Please refer to [compatibility](compatibility.md#training-hyperparameters) for details. + +3. **More documentation and tutorials**. We add a bunch of documentation and tutorials to help users get started more smoothly. Read it [here](https://mmdetection.readthedocs.io/en/latest/). + +4. **Support PyTorch 1.5**. The support for 1.1 and 1.2 is dropped, and we switch to some new APIs. + +5. **Better configuration system**. Inheritance is supported to reduce the redundancy of configs. + +6. **Better modular design**. Towards the goal of simplicity and flexibility, we simplify some encapsulation while add more other configurable modules like BBoxCoder, IoUCalculator, OptimizerConstructor, RoIHead. Target computation is also included in heads and the call hierarchy is simpler. + +7. Support new methods: [FSAF](https://arxiv.org/abs/1903.00621) and PAFPN (part of [PAFPN](https://arxiv.org/abs/1803.01534)). + +**Breaking Changes** +Models training with MMDetection 1.x are not fully compatible with 2.0, please refer to the [compatibility doc](compatibility.md) for the details and how to migrate to the new version. + +**Improvements** +- Unify cuda and cpp API for custom ops. (#2277) +- New config files with inheritance. (#2216) +- Encapsulate the second stage into RoI heads. (#1999) +- Refactor GCNet/EmpericalAttention into plugins. (#2345) +- Set low quality match as an option in IoU-based bbox assigners. (#2375) +- Change the codebase's coordinate system. (#2380) +- Refactor the category order in heads. 0 means the first positive class instead of background now. (#2374) +- Add bbox sampler and assigner registry. (#2419) +- Speed up the inference of RPN. (#2420) +- Add `train_cfg` and `test_cfg` as class members in all anchor heads. (#2422) +- Merge target computation methods into heads. (#2429) +- Add bbox coder to support different bbox encoding and losses. (#2480) +- Unify the API for regression loss. (#2156) +- Refactor Anchor Generator. (#2474) +- Make `lr` an optional argument for optimizers. (#2509) +- Migrate to modules and methods in MMCV. (#2502, #2511, #2569, #2572) +- Support PyTorch 1.5. (#2524) +- Drop the support for Python 3.5 and use F-string in the codebase. (#2531) + +**Bug Fixes** +- Fix the scale factors for resized images without keep the aspect ratio. (#2039) +- Check if max_num > 0 before slicing in NMS. (#2486) +- Fix Deformable RoIPool when there is no instance. (#2490) +- Fix the default value of assigned labels. (#2536) +- Fix the evaluation of Cityscapes. (#2578) + +**New Features** +- Add deep_stem and avg_down option to ResNet, i.e., support ResNetV1d. (#2252) +- Add L1 loss. (#2376) +- Support both polygon and bitmap for instance masks. (#2353, #2540) +- Support CPU mode for inference. (#2385) +- Add optimizer constructor for complicated configuration of optimizers. (#2397, #2488) +- Implement PAFPN. (#2392) +- Support empty tensor input for some modules. (#2280) +- Support for custom dataset classes without overriding it. (#2408, #2443) +- Support to train subsets of coco dataset. (#2340) +- Add iou_calculator to potentially support more IoU calculation methods. (2405) +- Support class wise mean AP (was removed in the last version). (#2459) +- Add option to save the testing result images. (#2414) +- Support MomentumUpdaterHook. (#2571) +- Add a demo to inference a single image. (#2605) + +### v1.1.0 (24/2/2020) + +**Highlights** +- Dataset evaluation is rewritten with a unified api, which is used by both evaluation hooks and test scripts. +- Support new methods: [CARAFE](https://arxiv.org/abs/1905.02188). + +**Breaking Changes** +- The new MMDDP inherits from the official DDP, thus the `__init__` api is changed to be the same as official DDP. +- The `mask_head` field in HTC config files is modified. +- The evaluation and testing script is updated. +- In all transforms, instance masks are stored as a numpy array shaped (n, h, w) instead of a list of (h, w) arrays, where n is the number of instances. + +**Bug Fixes** +- Fix IOU assigners when ignore_iof_thr > 0 and there is no pred boxes. (#2135) +- Fix mAP evaluation when there are no ignored boxes. (#2116) +- Fix the empty RoI input for Deformable RoI Pooling. (#2099) +- Fix the dataset settings for multiple workflows. (#2103) +- Fix the warning related to `torch.uint8` in PyTorch 1.4. (#2105) +- Fix the inference demo on devices other than gpu:0. (#2098) +- Fix Dockerfile. (#2097) +- Fix the bug that `pad_val` is unused in Pad transform. (#2093) +- Fix the albumentation transform when there is no ground truth bbox. (#2032) + +**Improvements** +- Use torch instead of numpy for random sampling. (#2094) +- Migrate to the new MMDDP implementation in MMCV v0.3. (#2090) +- Add meta information in logs. (#2086) +- Rewrite Soft NMS with pytorch extension and remove cython as a dependency. (#2056) +- Rewrite dataset evaluation. (#2042, #2087, #2114, #2128) +- Use numpy array for masks in transforms. (#2030) + +**New Features** +- Implement "CARAFE: Content-Aware ReAssembly of FEatures". (#1583) +- Add `worker_init_fn()` in data_loader when seed is set. (#2066, #2111) +- Add logging utils. (#2035) + +### v1.0.0 (30/1/2020) + +This release mainly improves the code quality and add more docstrings. + +**Highlights** +- Documentation is online now: https://mmdetection.readthedocs.io. +- Support new models: [ATSS](https://arxiv.org/abs/1912.02424). +- DCN is now available with the api `build_conv_layer` and `ConvModule` like the normal conv layer. +- A tool to collect environment information is available for trouble shooting. + +**Bug Fixes** +- Fix the incompatibility of the latest numpy and pycocotools. (#2024) +- Fix the case when distributed package is unavailable, e.g., on Windows. (#1985) +- Fix the dimension issue for `refine_bboxes()`. (#1962) +- Fix the typo when `seg_prefix` is a list. (#1906) +- Add segmentation map cropping to RandomCrop. (#1880) +- Fix the return value of `ga_shape_target_single()`. (#1853) +- Fix the loaded shape of empty proposals. (#1819) +- Fix the mask data type when using albumentation. (#1818) + +**Improvements** +- Enhance AssignResult and SamplingResult. (#1995) +- Add ability to overwrite existing module in Registry. (#1982) +- Reorganize requirements and make albumentations and imagecorruptions optional. (#1969) +- Check NaN in `SSDHead`. (#1935) +- Encapsulate the DCN in ResNe(X)t into a ConvModule & Conv_layers. (#1894) +- Refactoring for mAP evaluation and support multiprocessing and logging. (#1889) +- Init the root logger before constructing Runner to log more information. (#1865) +- Split `SegResizeFlipPadRescale` into different existing transforms. (#1852) +- Move `init_dist()` to MMCV. (#1851) +- Documentation and docstring improvements. (#1971, #1938, #1869, #1838) +- Fix the color of the same class for mask visualization. (#1834) +- Remove the option `keep_all_stages` in HTC and Cascade R-CNN. (#1806) + +**New Features** +- Add two test-time options `crop_mask` and `rle_mask_encode` for mask heads. (#2013) +- Support loading grayscale images as single channel. (#1975) +- Implement "Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection". (#1872) +- Add sphinx generated docs. (#1859, #1864) +- Add GN support for flops computation. (#1850) +- Collect env info for trouble shooting. (#1812) + + +### v1.0rc1 (13/12/2019) + +The RC1 release mainly focuses on improving the user experience, and fixing bugs. + +**Highlights** +- Support new models: [FoveaBox](https://arxiv.org/abs/1904.03797), [RepPoints](https://arxiv.org/abs/1904.11490) and [FreeAnchor](https://arxiv.org/abs/1909.02466). +- Add a Dockerfile. +- Add a jupyter notebook demo and a webcam demo. +- Setup the code style and CI. +- Add lots of docstrings and unit tests. +- Fix lots of bugs. + +**Breaking Changes** +- There was a bug for computing COCO-style mAP w.r.t different scales (AP_s, AP_m, AP_l), introduced by #621. (#1679) + +**Bug Fixes** +- Fix a sampling interval bug in Libra R-CNN. (#1800) +- Fix the learning rate in SSD300 WIDER FACE. (#1781) +- Fix the scaling issue when `keep_ratio=False`. (#1730) +- Fix typos. (#1721, #1492, #1242, #1108, #1107) +- Fix the shuffle argument in `build_dataloader`. (#1693) +- Clip the proposal when computing mask targets. (#1688) +- Fix the "index out of range" bug for samplers in some corner cases. (#1610, #1404) +- Fix the NMS issue on devices other than GPU:0. (#1603) +- Fix SSD Head and GHM Loss on CPU. (#1578) +- Fix the OOM error when there are too many gt bboxes. (#1575) +- Fix the wrong keyword argument `nms_cfg` in HTC. (#1573) +- Process masks and semantic segmentation in Expand and MinIoUCrop transforms. (#1550, #1361) +- Fix a scale bug in the Non Local op. (#1528) +- Fix a bug in transforms when `gt_bboxes_ignore` is None. (#1498) +- Fix a bug when `img_prefix` is None. (#1497) +- Pass the device argument to `grid_anchors` and `valid_flags`. (#1478) +- Fix the data pipeline for test_robustness. (#1476) +- Fix the argument type of deformable pooling. (#1390) +- Fix the coco_eval when there are only two classes. (#1376) +- Fix a bug in Modulated DeformableConv when deformable_group>1. (#1359) +- Fix the mask cropping in RandomCrop. (#1333) +- Fix zero outputs in DeformConv when not running on cuda:0. (#1326) +- Fix the type issue in Expand. (#1288) +- Fix the inference API. (#1255) +- Fix the inplace operation in Expand. (#1249) +- Fix the from-scratch training config. (#1196) +- Fix inplace add in RoIExtractor which cause an error in PyTorch 1.2. (#1160) +- Fix FCOS when input images has no positive sample. (#1136) +- Fix recursive imports. (#1099) + +**Improvements** +- Print the config file and mmdet version in the log. (#1721) +- Lint the code before compiling in travis CI. (#1715) +- Add a probability argument for the `Expand` transform. (#1651) +- Update the PyTorch and CUDA version in the docker file. (#1615) +- Raise a warning when specifying `--validate` in non-distributed training. (#1624, #1651) +- Beautify the mAP printing. (#1614) +- Add pre-commit hook. (#1536) +- Add the argument `in_channels` to backbones. (#1475) +- Add lots of docstrings and unit tests, thanks to [@Erotemic](https://github.com/Erotemic). (#1603, #1517, #1506, #1505, #1491, #1479, #1477, #1475, #1474) +- Add support for multi-node distributed test when there is no shared storage. (#1399) +- Optimize Dockerfile to reduce the image size. (#1306) +- Update new results of HRNet. (#1284, #1182) +- Add an argument `no_norm_on_lateral` in FPN. (#1240) +- Test the compiling in CI. (#1235) +- Move docs to a separate folder. (#1233) +- Add a jupyter notebook demo. (#1158) +- Support different type of dataset for training. (#1133) +- Use int64_t instead of long in cuda kernels. (#1131) +- Support unsquare RoIs for bbox and mask heads. (#1128) +- Manually add type promotion to make compatible to PyTorch 1.2. (#1114) +- Allowing validation dataset for computing validation loss. (#1093) +- Use `.scalar_type()` instead of `.type()` to suppress some warnings. (#1070) + +**New Features** +- Add an option `--with_ap` to compute the AP for each class. (#1549) +- Implement "FreeAnchor: Learning to Match Anchors for Visual Object Detection". (#1391) +- Support [Albumentations](https://github.com/albumentations-team/albumentations) for augmentations in the data pipeline. (#1354) +- Implement "FoveaBox: Beyond Anchor-based Object Detector". (#1339) +- Support horizontal and vertical flipping. (#1273, #1115) +- Implement "RepPoints: Point Set Representation for Object Detection". (#1265) +- Add test-time augmentation to HTC and Cascade R-CNN. (#1251) +- Add a COCO result analysis tool. (#1228) +- Add Dockerfile. (#1168) +- Add a webcam demo. (#1155, #1150) +- Add FLOPs counter. (#1127) +- Allow arbitrary layer order for ConvModule. (#1078) + + +### v1.0rc0 (27/07/2019) +- Implement lots of new methods and components (Mixed Precision Training, HTC, Libra R-CNN, Guided Anchoring, Empirical Attention, Mask Scoring R-CNN, Grid R-CNN (Plus), GHM, GCNet, FCOS, HRNet, Weight Standardization, etc.). Thank all collaborators! +- Support two additional datasets: WIDER FACE and Cityscapes. +- Refactoring for loss APIs and make it more flexible to adopt different losses and related hyper-parameters. +- Speed up multi-gpu testing. +- Integrate all compiling and installing in a single script. + +### v0.6.0 (14/04/2019) +- Up to 30% speedup compared to the model zoo. +- Support both PyTorch stable and nightly version. +- Replace NMS and SigmoidFocalLoss with Pytorch CUDA extensions. + +### v0.6rc0(06/02/2019) +- Migrate to PyTorch 1.0. + +### v0.5.7 (06/02/2019) +- Add support for Deformable ConvNet v2. (Many thanks to the authors and [@chengdazhi](https://github.com/chengdazhi)) +- This is the last release based on PyTorch 0.4.1. + +### v0.5.6 (17/01/2019) +- Add support for Group Normalization. +- Unify RPNHead and single stage heads (RetinaHead, SSDHead) with AnchorHead. + +### v0.5.5 (22/12/2018) +- Add SSD for COCO and PASCAL VOC. +- Add ResNeXt backbones and detection models. +- Refactoring for Samplers/Assigners and add OHEM. +- Add VOC dataset and evaluation scripts. + +### v0.5.4 (27/11/2018) +- Add SingleStageDetector and RetinaNet. + +### v0.5.3 (26/11/2018) +- Add Cascade R-CNN and Cascade Mask R-CNN. +- Add support for Soft-NMS in config files. + +### v0.5.2 (21/10/2018) +- Add support for custom datasets. +- Add a script to convert PASCAL VOC annotations to the expected format. + +### v0.5.1 (20/10/2018) +- Add BBoxAssigner and BBoxSampler, the `train_cfg` field in config files are restructured. +- `ConvFCRoIHead` / `SharedFCRoIHead` are renamed to `ConvFCBBoxHead` / `SharedFCBBoxHead` for consistency. diff --git a/thirdparty/mmdetection/docs/compatibility.md b/thirdparty/mmdetection/docs/compatibility.md new file mode 100644 index 0000000000000000000000000000000000000000..b8520a859d97d892dad33c601258ba315619e4f7 --- /dev/null +++ b/thirdparty/mmdetection/docs/compatibility.md @@ -0,0 +1,79 @@ +# Compatibility with MMDetection 1.x + +MMDetection 2.0 goes through a big refactoring and addresses many legacy issues. It is not compatible with the 1.x version, i.e., running inference with the same model weights in these two versions will produce different results. Thus, MMDetection 2.0 re-benchmarks all the models and provides their links and logs in the model zoo. + +The major differences are in four folds: coordinate system, codebase conventions, training hyperparameters, and modular design. + +## Coordinate System +The new coordinate system is consistent with [Detectron2](https://github.com/facebookresearch/detectron2/) and treats the center of the most left-top pixel as (0, 0) rather than the left-top corner of that pixel. +Accordingly, the system interprets the coordinates in COCO bounding box and segmentation annotations as coordinates in range `[0, width]` or `[0, height]`. +This modification affects all the computation related to the bbox and pixel selection, +which is more natural and accurate. + +- The height and width of a box with corners (x1, y1) and (x2, y2) in the new coordinate system is computed as `width = x2 - x1` and `height = y2 - y1`. +In MMDetection 1.x and previous version, a "+ 1" was added both height and width. +This modification are in three folds: + + 1. Box transformation and encoding/decoding in regression. + 2. IoU calculation. This affects the matching process between ground truth and bounding box and the NMS process. The effect to compatibility is very negligible, though. + 3. The corners of bounding box is in float type and no longer quantized. This should provide more accurate bounding box results. This also makes the bounding box and RoIs not required to have minimum size of 1, whose effect is small, though. + +- The anchors are center-aligned to feature grid points and in float type. +In MMDetection 1.x and previous version, the anchors are in `int` type and not center-aligned. +This affects the anchor generation in RPN and all the anchor-based methods. + +- ROIAlign is better aligned with the image coordinate system. The new implementation is adopted from [Detectron2](https://github.com/facebookresearch/detectron2/tree/master/detectron2/layers/csrc/ROIAlign). +The RoIs are shifted by half a pixel by default when they are used to cropping RoI features, compared to MMDetection 1.x. +The old behavior is still available by setting `aligned=False` instead of `aligned=True`. + +- Mask cropping and pasting are more accurate. + + 1. We use the new RoIAlign to crop mask targets. In MMDetection 1.x, the bounding box is quantized before it is used to crop mask target, and the crop process is implemented by numpy. In new implementation, the bounding box for crop is not quantized and sent to RoIAlign. This implementation accelerates the training speed by a large margin (~0.1s per iter, ~2 hour when training Mask R50 for 1x schedule) and should be more accurate. + + 2. In MMDetection 2.0, the "`paste_mask()`" function is different and should be more accurate than those in previous versions. This change follows the modification in [Detectron2](https://github.com/facebookresearch/detectron2/blob/master/detectron2/structures/masks.py) and can improve mask AP on COCO by ~0.5% absolute. + +## Codebase Conventions + +- MMDetection 2.0 changes the order of class labels to reduce unused parameters in regression and mask branch more naturally (without +1 and -1). +This effect all the classification layers of the model to have a different ordering of class labels. The final layers of regression branch and mask head no longer keep K+1 channels for K categories, and their class orders are consistent with the classification branch. + + - In MMDetection 2.0, label "K" means background, and labels [0, K-1] correspond to the K = num_categories object categories. + + - In MMDetection 1.x and previous version, label "0" means background, and labels [1, K] correspond to the K categories. + + - **Note**: The class order of softmax RPN is still the same as that in 1.x in versions<=2.4.0 while sigmoid RPN is not affected. The class orders in all heads are unified since MMDetection v2.5.0. + +- Low quality matching in R-CNN is not used. In MMDetection 1.x and previous versions, the `max_iou_assigner` will match low quality boxes for each ground truth box in both RPN and R-CNN training. We observe this sometimes does not assign the most perfect GT box to some bounding boxes, +thus MMDetection 2.0 do not allow low quality matching by default in R-CNN training in the new system. This sometimes may slightly improve the box AP (~0.1% absolute). + +- Separate scale factors for width and height. In MMDetection 1.x and previous versions, the scale factor is a single float in mode `keep_ratio=True`. This is slightly inaccurate because the scale factors for width and height have slight difference. MMDetection 2.0 adopts separate scale factors for width and height, the improvement on AP ~0.1% absolute. + +- Configs name conventions are changed. MMDetection V2.0 adopts the new name convention to maintain the gradually growing model zoo as the following: + ``` + [model]_(model setting)_[backbone]_[neck]_(norm setting)_(misc)_(gpu x batch)_[schedule]_[dataset].py, + ``` + where the (`misc`) includes DCN and GCBlock, etc. More details are illustrated in the [documentation for config](config.md) + +- MMDetection V2.0 uses new ResNet Caffe backbones to reduce warnings when loading pre-trained models. Most of the new backbones' weights are the same as the former ones but do not have `conv.bias`, except that they use a different `img_norm_cfg`. Thus, the new backbone will not cause warning of unexpected keys. + +## Training Hyperparameters + +The change in training hyperparameters does not affect +model-level compatibility but slightly improves the performance. The major ones are: + +- The number of proposals after nms is changed from 2000 to 1000 by setting `nms_post=1000` and `max_num=1000`. +This slightly improves both mask AP and bbox AP by ~0.2% absolute. + +- The default box regression losses for Mask R-CNN, Faster R-CNN and RetinaNet are changed from smooth L1 Loss to L1 loss. This leads to an overall improvement in box AP (~0.6% absolute). However, using L1-loss for other methods such as Cascade R-CNN and HTC does not improve the performance, so we keep the original settings for these methods. + +- The sample num of RoIAlign layer is set to be 0 for simplicity. This leads to slightly improvement on mask AP (~0.2% absolute). + +- The default setting does not use gradient clipping anymore during training for faster training speed. This does not degrade performance of the most of models. For some models such as RepPoints we keep using gradient clipping to stabilize the training process and to obtain better performance. + +- The default warmup ratio is changed from 1/3 to 0.001 for a more smooth warming up process since the gradient clipping is usually not used. The effect is found negligible during our re-benchmarking, though. + +## Upgrade Models from 1.x to 2.0 + +To convert the models trained by MMDetection V1.x to MMDetection V2.0, the users can use the script `tools/upgrade_model_version.py` to convert +their models. The converted models can be run in MMDetection V2.0 with slightly dropped performance (less than 1% AP absolute). +Details can be found in `configs/legacy`. diff --git a/thirdparty/mmdetection/docs/conf.py b/thirdparty/mmdetection/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..1c60d9c5b332ad630a53cadd413b4860aca12713 --- /dev/null +++ b/thirdparty/mmdetection/docs/conf.py @@ -0,0 +1,90 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import subprocess +import sys + +sys.path.insert(0, os.path.abspath('..')) + +# -- Project information ----------------------------------------------------- + +project = 'MMDetection' +copyright = '2018-2020, OpenMMLab' +author = 'MMDetection Authors' +version_file = '../mmdet/version.py' + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +# The full version, including alpha/beta/rc tags +release = get_version() + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', + 'recommonmark', + 'sphinx_markdown_tables', +] + +autodoc_mock_imports = [ + 'matplotlib', 'pycocotools', 'terminaltables', 'mmdet.version', 'mmcv.ops' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = { + '.rst': 'restructuredtext', + '.md': 'markdown', +} + +# The master toctree document. +master_doc = 'index' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +def builder_inited_handler(app): + subprocess.run(['./stat.py']) + + +def setup(app): + app.connect('builder-inited', builder_inited_handler) diff --git a/thirdparty/mmdetection/docs/conventions.md b/thirdparty/mmdetection/docs/conventions.md new file mode 100644 index 0000000000000000000000000000000000000000..4b65229303bb9879ead4dbde2e45b02d6f66e13a --- /dev/null +++ b/thirdparty/mmdetection/docs/conventions.md @@ -0,0 +1,28 @@ +# Conventions + +Please check the following conventions if you would like to modify MMDetection as your own project. + +## Loss +In MMDetection, a `dict` containing losses and metrics will be returned by `model(**data)`. + +For example, in bbox head, +```python +class BBoxHead(nn.Module): + ... + def loss(self, ...): + losses = dict() + # classification loss + losses['loss_cls'] = self.loss_cls(...) + # classification accuracy + losses['acc'] = accuracy(...) + # bbox regression loss + losses['loss_bbox'] = self.loss_bbox(...) + return losses +``` +`bbox_head.loss()` will be called during model forward. +The returned dict contains `'loss_bbox'`, `'loss_cls'`, `'acc'` . +Only `'loss_bbox'`, `'loss_cls'` will be used during back propagation, +`'acc'` will only be used as a metric to monitor training process. + +By default, only values whose keys contain `'loss'` will be back propagated. +This behavior could be changed by modifying `BaseDetector.train_step()`. diff --git a/thirdparty/mmdetection/docs/faq.md b/thirdparty/mmdetection/docs/faq.md new file mode 100644 index 0000000000000000000000000000000000000000..b0aedd3fdcbfcd7321d5767dbfd4738832fcb726 --- /dev/null +++ b/thirdparty/mmdetection/docs/faq.md @@ -0,0 +1,80 @@ +We list some common troubles faced by many users and their corresponding solutions here. Feel free to enrich the list if you find any frequent issues and have ways to help others to solve them. If the contents here do not cover your issue, please create an issue using the [provided templates](https://github.com/open-mmlab/mmdetection/blob/master/.github/ISSUE_TEMPLATE/error-report.md) and make sure you fill in all required information in the template. + +## MMCV Installation + +- Compatibility issue between MMCV and MMDetection; "ConvWS is already registered in conv layer"; "AssertionError: MMCV==xxx is used but incompatible. Please install mmcv>=xxx, <=xxx." + + Please install the correct version of MMCV for the version of your MMDetection following the [installation instruction](https://mmdetection.readthedocs.io/en/latest/get_started.html#installation). + +- "No module named 'mmcv.ops'"; "No module named 'mmcv._ext'". + + 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv`. + 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation). + +## PyTorch/CUDA Environment + +- "RTX 30 series card fails when building MMCV or MMDet" + + 1. Temporary work-around: do `MMCV_WITH_OPS=1 MMCV_CUDA_ARGS='-gencode=arch=compute_80,code=sm_80' pip install -e .`. + The common issue is `nvcc fatal : Unsupported gpu architecture 'compute_86'`. This means that the compiler should optimize for sm_86, i.e., nvidia 30 series card, but such optimizations have not been supported by CUDA toolkit 11.0. + This work-around modifies the compile flag by adding `MMCV_CUDA_ARGS='-gencode=arch=compute_80,code=sm_80'`, which tells `nvcc` to optimize for **sm_80**, i.e., Nvidia A100. Although A100 is different from the 30 series card, they use similar ampere architecture. This may hurt the performance but it works. + 2. PyTorch developers have updated that the default compiler flags should be fixed by [pytorch/pytorch#47585](https://github.com/pytorch/pytorch/pull/47585). So using PyTorch-nightly may also be able to solve the problem, though we have not tested it yet. + +- "invalid device function" or "no kernel image is available for execution". + + 1. Check if your cuda runtime version (under `/usr/local/`), `nvcc --version` and `conda list cudatoolkit` version match. + 2. Run `python mmdet/utils/collect_env.py` to check whether PyTorch, torchvision, and MMCV are built for the correct GPU architecture. + You may need to set `TORCH_CUDA_ARCH_LIST` to reinstall MMCV. + The GPU arch table could be found [here](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list), + i.e. run `TORCH_CUDA_ARCH_LIST=7.0 pip install mmcv-full` to build MMCV for Volta GPUs. + The compatibility issue could happen when using old GPUS, e.g., Tesla K80 (3.7) on colab. + 3. Check whether the running environment is the same as that when mmcv/mmdet has compiled. + For example, you may compile mmcv using CUDA 10.0 but run it on CUDA 9.0 environments. + +- "undefined symbol" or "cannot open xxx.so". + + 1. If those symbols are CUDA/C++ symbols (e.g., libcudart.so or GLIBCXX), check whether the CUDA/GCC runtimes are the same as those used for compiling mmcv, + i.e. run `python mmdet/utils/collect_env.py` to see if `"MMCV Compiler"`/`"MMCV CUDA Compiler"` is the same as `"GCC"`/`"CUDA_HOME"`. + 2. If those symbols are PyTorch symbols (e.g., symbols containing caffe, aten, and TH), check whether the PyTorch version is the same as that used for compiling mmcv. + 3. Run `python mmdet/utils/collect_env.py` to check whether PyTorch, torchvision, and MMCV are built by and running on the same environment. + +- setuptools.sandbox.UnpickleableException: DistutilsSetupError("each element of 'ext_modules' option must be an Extension instance or 2-tuple") + + 1. If you are using miniconda rather than anaconda, check whether Cython is installed as indicated in [#3379](https://github.com/open-mmlab/mmdetection/issues/3379). + You need to manually install Cython first and then run command `pip install -r requirements.txt`. + 2. You may also need to check the compatibility between the `setuptools`, `Cython`, and `PyTorch` in your environment. + +- "Segmentation fault". + 1. Check you GCC version and use GCC 5.4. This usually caused by the incompatibility between PyTorch and the environment (e.g., GCC < 4.9 for PyTorch). We also recommand the users to avoid using GCC 5.5 because many feedbacks report that GCC 5.5 will cause "segmentation fault" and simply changing it to GCC 5.4 could solve the problem. + + 2. Check whether PyTorch is correctly installed and could use CUDA op, e.g. type the following command in your terminal. + + ```shell + python -c 'import torch; print(torch.cuda.is_available())' + ``` + + And see whether they could correctly output results. + + 3. If Pytorch is correctly installed, check whether MMCV is correctly installed. + + ```shell + python -c 'import mmcv; import mmcv.ops' + ``` + + If MMCV is correctly installed, then there will be no issue of the above two commands. + + 4. If MMCV and Pytorch is correctly installed, you man use `ipdb`, `pdb` to set breakpoints or directly add 'print' in mmdetection code and see which part leads the segmentation fault. + +## Training + +- "Loss goes Nan" + 1. Check if the dataset annotations are valid: zero-size bounding boxes will cause the regression loss to be Nan due to the commonly used transformation for box regression. Some small size (width or height are smaller than 1) boxes will also cause this problem after data augmentation (e.g., instaboost). So check the data and try to filter out those zero-size boxes and skip some risky augmentations on the small-size boxes when you face the problem. + 2. Reduce the learning rate: the learning rate might be too large due to some reasons, e.g., change of batch size. You can rescale them to the value that could stably train the model. + 3. Extend the warmup iterations: some models are sensitive to the learning rate at the start of the training. You can extend the warmup iterations, e.g., change the `warmup_iters` from 500 to 1000 or 2000. + 4. Add gradient clipping: some models requires gradient clipping to stablize the training process. You can add gradient clippint to avoid gradients that are too large. + +- ’GPU out of memory" + 1. There are some scenarios when there are large amount of ground truth boxes, which may cause OOM during target assignment. + You can set `gpu_assign_thr=N` in the config of assigner thus the assigner will calculate box overlaps through CPU when there are more than N GT boxes. + 2. Set `with_cp=True` in the backbone. This uses the sublinear strategy in PyTorch to reduce GPU memory cost in the backbone. + 3. Try mixed precision training using following the examples in `config/fp16`. The `loss_scale` might need further tuning for different models. diff --git a/thirdparty/mmdetection/docs/get_started.md b/thirdparty/mmdetection/docs/get_started.md new file mode 100644 index 0000000000000000000000000000000000000000..a2bfb45b633ec18c5ab0517a6c36c6907f1d0108 --- /dev/null +++ b/thirdparty/mmdetection/docs/get_started.md @@ -0,0 +1,188 @@ +## Prerequisites + +- Linux or macOS (Windows is in experimental support) +- Python 3.6+ +- PyTorch 1.3+ +- CUDA 9.2+ (If you build PyTorch from source, CUDA 9.0 is also compatible) +- GCC 5+ +- [MMCV](https://mmcv.readthedocs.io/en/latest/#installation) + +The compatible MMDetection and MMCV versions are as below. Please install the correct version of MMCV to avoid installation issues. + +| MMDetection version | MMCV version | +|:-------------------:|:-------------------:| +| master | mmcv-full>=1.1.5, <1.3| +| 2.7.0 | mmcv-full>=1.1.5, <1.3| +| 2.6.0 | mmcv-full>=1.1.5, <1.3| +| 2.5.0 | mmcv-full>=1.1.5, <1.3| +| 2.4.0 | mmcv-full>=1.1.1, <1.3| +| 2.3.0 | mmcv-full==1.0.5| +| 2.3.0rc0 | mmcv-full>=1.0.2 | +| 2.2.1 | mmcv==0.6.2 | +| 2.2.0 | mmcv==0.6.2 | +| 2.1.0 | mmcv>=0.5.9, <=0.6.1| +| 2.0.0 | mmcv>=0.5.1, <=0.5.8| + +Note: You need to run `pip uninstall mmcv` first if you have mmcv installed. +If mmcv and mmcv-full are both installed, there will be `ModuleNotFoundError`. + +## Installation + +1. Create a conda virtual environment and activate it. + + ```shell + conda create -n open-mmlab python=3.7 -y + conda activate open-mmlab + ``` + +2. Install PyTorch and torchvision following the [official instructions +](https://pytorch.org/), e.g., + + ```shell + conda install pytorch torchvision -c pytorch + ``` + + Note: Make sure that your compilation CUDA version and runtime CUDA version match. + You can check the supported CUDA version for precompiled packages on the [PyTorch website](https://pytorch.org/). + + `E.g.1` If you have CUDA 10.1 installed under `/usr/local/cuda` and would like to install + PyTorch 1.5, you need to install the prebuilt PyTorch with CUDA 10.1. + + ```shell + conda install pytorch cudatoolkit=10.1 torchvision -c pytorch + ``` + + `E.g. 2` If you have CUDA 9.2 installed under `/usr/local/cuda` and would like to install + PyTorch 1.3.1., you need to install the prebuilt PyTorch with CUDA 9.2. + + ```shell + conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch + ``` + + If you build PyTorch from source instead of installing the prebuilt pacakge, + you can use more CUDA versions such as 9.0. + +3. Install mmcv-full, we recommend you to install the pre-build package as below. + + ```shell + pip install mmcv-full==latest+torch1.6.0+cu101 -f https://download.openmmlab.com/mmcv/dist/index.html + ``` + + See [here](https://github.com/open-mmlab/mmcv#install-with-pip) for different versions of MMCV compatible to different PyTorch and CUDA versions. + Optionally you can choose to compile mmcv from source by the following command + + ```shell + git clone https://github.com/open-mmlab/mmcv.git + cd mmcv + MMCV_WITH_OPS=1 pip install -e . # package mmcv-full will be installed after this step + cd .. + ``` + + Or directly run + + ```shell + pip install mmcv-full + ``` + +4. Clone the MMDetection repository. + + ```shell + git clone https://github.com/open-mmlab/mmdetection.git + cd mmdetection + ``` + +5. Install build requirements and then install MMDetection. + + ```shell + pip install -r requirements/build.txt + pip install -v -e . # or "python setup.py develop" + ``` + +Note: + +a. Following the above instructions, MMDetection is installed on `dev` mode +, any local modifications made to the code will take effect without the need to reinstall it. + +b. If you would like to use `opencv-python-headless` instead of `opencv +-python`, +you can install it before installing MMCV. + +c. Some dependencies are optional. Simply running `pip install -v -e .` will + only install the minimum runtime requirements. To use optional dependencies like `albumentations` and `imagecorruptions` either install them manually with `pip install -r requirements/optional.txt` or specify desired extras when calling `pip` (e.g. `pip install -v -e .[optional]`). Valid keys for the extras field are: `all`, `tests`, `build`, and `optional`. + +### Install with CPU only + +The code can be built for CPU only environment (where CUDA isn't available). + +In CPU mode you can run the demo/webcam_demo.py for example. +However some functionality is gone in this mode: + +- Deformable Convolution +- Deformable ROI pooling +- CARAFE: Content-Aware ReAssembly of FEatures +- nms_cuda +- sigmoid_focal_loss_cuda + +So if you try to run inference with a model containing deformable convolution you will get an error. + +### Another option: Docker Image + +We provide a [Dockerfile](https://github.com/open-mmlab/mmdetection/blob/master/docker/Dockerfile) to build an image. Ensure that you are using [docker version](https://docs.docker.com/engine/install/) >=19.03. + +```shell +# build an image with PyTorch 1.6, CUDA 10.1 +docker build -t mmdetection docker/ +``` + +Run it with + +```shell +docker run --gpus all --shm-size=8g -it -v {DATA_DIR}:/mmdetection/data mmdetection +``` + +### A from-scratch setup script + +Assuming that you already have CUDA 10.1 installed, here is a full script for setting up MMDetection with conda. + +```shell +conda create -n open-mmlab python=3.7 -y +conda activate open-mmlab + +conda install pytorch==1.6.0 torchvision==0.7.0 cudatoolkit=10.1 -c pytorch -y + +# install the latest mmcv +pip install mmcv-full==latest+torch1.6.0+cu101 -f https://download.openmmlab.com/mmcv/dist/index.html + +# install mmdetection +git clone https://github.com/open-mmlab/mmdetection.git +cd mmdetection +pip install -r requirements/build.txt +pip install -v -e . +``` + +### Developing with multiple MMDetection versions + +The train and test scripts already modify the `PYTHONPATH` to ensure the script use the MMDetection in the current directory. + +To use the default MMDetection installed in the environment rather than that you are working with, you can remove the following line in those scripts + +```shell +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH +``` + +## Verification + +To verify whether MMDetection and the required environment are installed correctly, we can run sample python codes to initialize a detector and inference a demo image: + +```python +from mmdet.apis import init_detector, inference_detector + +config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +device = 'cuda:0' +# init a detector +model = init_detector(config_file, device=device) +# inference the demo image +inference_detector(model, 'demo/demo.jpg') +``` + +The above code is supposed to run successfully upon you finish the installation. diff --git a/thirdparty/mmdetection/docs/index.rst b/thirdparty/mmdetection/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..5b30e24133956f4e46a00deb07c74e57ebff0154 --- /dev/null +++ b/thirdparty/mmdetection/docs/index.rst @@ -0,0 +1,50 @@ +Welcome to MMDetection's documentation! +======================================= + +.. toctree:: + :maxdepth: 2 + :caption: Get Started + + get_started.md + modelzoo_statistics.md + model_zoo.md + +.. toctree:: + :maxdepth: 2 + :caption: Quick Run + + 1_exist_data_model.md + 2_new_data_model.md + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials + + tutorials/index.rst + +.. toctree:: + :maxdepth: 2 + :caption: Useful Tools and Scripts + + useful_tools.md + +.. toctree:: + :maxdepth: 2 + :caption: Notes + + conventions.md + compatibility.md + projects.md + changelog.md + faq.md + +.. toctree:: + :caption: API Reference + + api.rst + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`search` diff --git a/thirdparty/mmdetection/docs/make.bat b/thirdparty/mmdetection/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..922152e96a04a242e6fc40f124261d74890617d8 --- /dev/null +++ b/thirdparty/mmdetection/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/thirdparty/mmdetection/docs/model_zoo.md b/thirdparty/mmdetection/docs/model_zoo.md new file mode 100644 index 0000000000000000000000000000000000000000..95bba2a7454aae7ba5eadc4de2d423779ba4a390 --- /dev/null +++ b/thirdparty/mmdetection/docs/model_zoo.md @@ -0,0 +1,250 @@ +# Benchmark and Model Zoo + +## Mirror sites + +We use AWS as the main site to host our model zoo, and maintain a mirror on aliyun. +You can replace `https://s3.ap-northeast-2.amazonaws.com/open-mmlab` with `https://open-mmlab.oss-cn-beijing.aliyuncs.com` in model urls. + +## Common settings + +- All models were trained on `coco_2017_train`, and tested on the `coco_2017_val`. +- We use distributed training. +- All pytorch-style pretrained backbones on ImageNet are from PyTorch model zoo, caffe-style pretrained backbones are converted from the newly released model from detectron2. +- For fair comparison with other codebases, we report the GPU memory as the maximum value of `torch.cuda.max_memory_allocated()` for all 8 GPUs. Note that this value is usually less than what `nvidia-smi` shows. +- We report the inference time as the total time of network forwarding and post-processing, excluding the data loading time. Results are obtained with the script [benchmark.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/benchmark.py) which computes the average time on 2000 images. + + +## Baselines + +### RPN + +Please refer to [RPN](https://github.com/open-mmlab/mmdetection/blob/master/configs/rpn) for details. + +### Faster R-CNN + +Please refer to [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn) for details. + +### Mask R-CNN + +Please refer to [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn) for details. + +### Fast R-CNN (with pre-computed proposals) + +Please refer to [Fast R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/fast_rcnn) for details. + +### RetinaNet + +Please refer to [RetinaNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/retinanet) for details. + +### Cascade R-CNN and Cascade Mask R-CNN + +Please refer to [Cascade R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/cascade_rcnn) for details. + +### Hybrid Task Cascade (HTC) + +Please refer to [HTC](https://github.com/open-mmlab/mmdetection/blob/master/configs/htc) for details. + +### SSD + +Please refer to [SSD](https://github.com/open-mmlab/mmdetection/blob/master/configs/ssd) for details. + +### Group Normalization (GN) + +Please refer to [Group Normalization](https://github.com/open-mmlab/mmdetection/blob/master/configs/gn) for details. + +### Weight Standardization + +Please refer to [Weight Standardization](https://github.com/open-mmlab/mmdetection/blob/master/configs/gn+ws) for details. + +### Deformable Convolution v2 + +Please refer to [Deformable Convolutional Networks](https://github.com/open-mmlab/mmdetection/blob/master/configs/dcn) for details. + +### CARAFE: Content-Aware ReAssembly of FEatures +Please refer to [CARAFE](https://github.com/open-mmlab/mmdetection/blob/master/configs/carafe) for details. + +### Instaboost + +Please refer to [Instaboost](https://github.com/open-mmlab/mmdetection/blob/master/configs/instaboost) for details. + +### Libra R-CNN + +Please refer to [Libra R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/libra_rcnn) for details. + +### Guided Anchoring + +Please refer to [Guided Anchoring](https://github.com/open-mmlab/mmdetection/blob/master/configs/guided_anchoring) for details. + +### FCOS + +Please refer to [FCOS](https://github.com/open-mmlab/mmdetection/blob/master/configs/fcos) for details. + +### FoveaBox + +Please refer to [FoveaBox](https://github.com/open-mmlab/mmdetection/blob/master/configs/foveabox) for details. + +### RepPoints + +Please refer to [RepPoints](https://github.com/open-mmlab/mmdetection/blob/master/configs/reppoints) for details. + +### FreeAnchor + +Please refer to [FreeAnchor](https://github.com/open-mmlab/mmdetection/blob/master/configs/free_anchor) for details. + +### Grid R-CNN (plus) + +Please refer to [Grid R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/grid_rcnn) for details. + +### GHM + +Please refer to [GHM](https://github.com/open-mmlab/mmdetection/blob/master/configs/ghm) for details. + +### GCNet + +Please refer to [GCNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/gcnet) for details. + +### HRNet +Please refer to [HRNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/hrnet) for details. + +### Mask Scoring R-CNN + +Please refer to [Mask Scoring R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/ms_rcnn) for details. + +### Train from Scratch + +Please refer to [Rethinking ImageNet Pre-training](https://github.com/open-mmlab/mmdetection/blob/master/configs/scratch) for details. + +### NAS-FPN +Please refer to [NAS-FPN](https://github.com/open-mmlab/mmdetection/blob/master/configs/nas_fpn) for details. + +### ATSS +Please refer to [ATSS](https://github.com/open-mmlab/mmdetection/blob/master/configs/atss) for details. + +### FSAF +Please refer to [FSAF](https://github.com/open-mmlab/mmdetection/blob/master/configs/fsaf) for details. + +### RegNetX +Please refer to [RegNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/regnet) for details. + +### Res2Net +Please refer to [Res2Net](https://github.com/open-mmlab/mmdetection/blob/master/configs/res2net) for details. + +### GRoIE +Please refer to [GRoIE](https://github.com/open-mmlab/mmdetection/blob/master/configs/groie) for details. + +### Dynamic R-CNN +Please refer to [Dynamic R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/dynamic_rcnn) for details. + +### PointRend +Please refer to [PointRend](https://github.com/open-mmlab/mmdetection/blob/master/configs/point_rend) for details. + +### DetectoRS +Please refer to [DetectoRS](https://github.com/open-mmlab/mmdetection/blob/master/configs/detectors) for details. + +### Generalized Focal Loss +Please refer to [Generalized Focal Loss](https://github.com/open-mmlab/mmdetection/blob/master/configs/gfl) for details. + +### CornerNet +Please refer to [CornerNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/cornernet) for details. + +### YOLOv3 +Please refer to [YOLOv3](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolo) for details. + +### PAA +Please refer to [PAA](https://github.com/open-mmlab/mmdetection/blob/master/configs/paa) for details. + +### SABL +Please refer to [SABL](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl) for details. + +### CentripetalNet +Please refer to [CentripetalNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/centripetalnet) for details. + +### ResNeSt +Please refer to [ResNeSt](https://github.com/open-mmlab/mmdetection/blob/master/configs/resnest) for details. + +### DETR +Please refer to [DETR](https://github.com/open-mmlab/mmdetection/blob/master/configs/detr) for details. + +### Other datasets + +We also benchmark some methods on [PASCAL VOC](https://github.com/open-mmlab/mmdetection/blob/master/configs/pascal_voc), [Cityscapes](https://github.com/open-mmlab/mmdetection/blob/master/configs/cityscapes) and [WIDER FACE](https://github.com/open-mmlab/mmdetection/blob/master/configs/wider_face). + +### Pre-trained Models + +We also train [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn) and [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn) using ResNet-50 and [RegNetX-3.2G](https://github.com/open-mmlab/mmdetection/blob/master/configs/regnet) with multi-scale training and longer schedules. These models serve as strong pre-trained models for downstream tasks for convenience. + +## Speed benchmark +We compare the training speed of Mask R-CNN with some other popular frameworks (The data is copied from [detectron2](https://github.com/facebookresearch/detectron2/blob/master/docs/notes/benchmarks.md)). +For mmdetection, we benchmark with [mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py), which should have the same setting with [mask_rcnn_R_50_FPN_noaug_1x.yaml](https://github.com/facebookresearch/detectron2/blob/master/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml) of detectron2. +We also provide the [checkpoint](http://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug_compare_20200518-10127928.pth) and [training log](http://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug_20200518_105755.log.json) for reference. The throughput is computed as the average throughput in iterations 100-500 to skip GPU warmup time. + +| Implementation | Throughput (img/s) | +|----------------------|--------------------| +| [Detectron2](https://github.com/facebookresearch/detectron2) | 62 | +| [MMDetection](https://github.com/open-mmlab/mmdetection) | 61 | +| [maskrcnn-benchmark](https://github.com/facebookresearch/maskrcnn-benchmark/) | 53 | +| [tensorpack](https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN) | 50 | +| [simpledet](https://github.com/TuSimple/simpledet/) | 39 | +| [Detectron](https://github.com/facebookresearch/Detectron) | 19 | +| [matterport/Mask_RCNN](https://github.com/matterport/Mask_RCNN/) | 14 | + +## Comparison with Detectron2 + +We compare mmdetection with [Detectron2](https://github.com/facebookresearch/detectron2.git) in terms of speed and performance. +We use the commit id [185c27e](https://github.com/facebookresearch/detectron2/tree/185c27e4b4d2d4c68b5627b3765420c6d7f5a659)(30/4/2020) of detectron. +For fair comparison, we install and run both frameworks on the same machine. + +### Hardware + +- 8 NVIDIA Tesla V100 (32G) GPUs +- Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz + +### Software environment + +- Python 3.7 +- PyTorch 1.4 +- CUDA 10.1 +- CUDNN 7.6.03 +- NCCL 2.4.08 + +### Performance + +| Type | Lr schd | Detectron2 | mmdetection | Download | +|--------------|---------|-------------|-------------|-------------| +| [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py) | 1x | [37.9](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml) | 38.0 | [model](http://download.openmmlab.com/mmdetection/v2.0/benchmark/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-5324cff8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/benchmark/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco_20200429_234554.log.json) | +| [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py) | 1x | [38.6 & 35.2](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml) | 38.8 & 35.4 | [model](http://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco-dbecf295.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco_20200430_054239.log.json) | +| [Retinanet](https://github.com/open-mmlab/mmdetection/blob/master/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py) | 1x | [36.5](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml) | 37.0 | [model](http://download.openmmlab.com/mmdetection/v2.0/benchmark/retinanet_r50_caffe_fpn_mstrain_1x_coco/retinanet_r50_caffe_fpn_mstrain_1x_coco-586977a0.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/benchmark/retinanet_r50_caffe_fpn_mstrain_1x_coco/retinanet_r50_caffe_fpn_mstrain_1x_coco_20200430_014748.log.json) | + + +### Training Speed + +The training speed is measure with s/iter. The lower, the better. + +| Type | Detectron2 | mmdetection | +|--------------|------------|-------------| +| Faster R-CNN | 0.210 | 0.216 | +| Mask R-CNN | 0.261 | 0.265 | +| Retinanet | 0.200 | 0.205 | + + +### Inference Speed + +The inference speed is measured with fps (img/s) on a single GPU, the higher, the better. +To be consistent with Detectron2, we report the pure inference speed (without the time of data loading). +For Mask R-CNN, we exclude the time of RLE encoding in post-processing. +We also include the officially reported speed in the parentheses, which is slightly higher +than the results tested on our server due to differences of hardwares. + +| Type | Detectron2 | mmdetection | +|--------------|-------------|-------------| +| Faster R-CNN | 25.6 (26.3) | 22.2 | +| Mask R-CNN | 22.5 (23.3) | 19.6 | +| Retinanet | 17.8 (18.2) | 20.6 | + +### Training memory + +| Type | Detectron2 | mmdetection | +|--------------|------------|-------------| +| Faster R-CNN | 3.0 | 3.8 | +| Mask R-CNN | 3.4 | 3.9 | +| Retinanet | 3.9 | 3.4 | diff --git a/thirdparty/mmdetection/docs/projects.md b/thirdparty/mmdetection/docs/projects.md new file mode 100644 index 0000000000000000000000000000000000000000..110e1df86fcd18f970b37352799420d0b6754033 --- /dev/null +++ b/thirdparty/mmdetection/docs/projects.md @@ -0,0 +1,46 @@ +# Projects based on MMDetection + +There are many projects built upon MMDetection. +We list some of them as examples of how to extend MMDetection for your own projects. +Pull requests are also welcomed. + +## Projects as an extension + +Some projects extend the boundary of MMDetection for deployment or other research fields. +They reveal the potential of what MMDetection can do. We list several of them as below. + +- [OTEDetection](https://github.com/opencv/mmdetection): OpenVINO training extensions for object detection. +- [MMDetection3d](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. + +## Projects of papers + +There are also projects released with papers. +Some of the papers are published in top-tier conferences (CVPR, ICCV, and ECCV), the others are also highly influential. +To make this list also a reference for the community to develop and compare new object detection algorithms, we list them following the time order of top-tier conferences. +Methods already supported and maintained by MMDetection are not listed. + +- Overcoming Classifier Imbalance for Long-tail Object Detection with Balanced Group Softmax, CVPR2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/papers/Li_Overcoming_Classifier_Imbalance_for_Long-Tail_Object_Detection_With_Balanced_Group_CVPR_2020_paper.pdf)[[github]](https://github.com/FishYuLi/BalancedGroupSoftmax) +- Coherent Reconstruction of Multiple Humans from a Single Image, CVPR2020. [[paper]](https://jiangwenpl.github.io/multiperson/)[[github]](https://github.com/JiangWenPL/multiperson) +- Look-into-Object: Self-supervised Structure Modeling for Object Recognition, CVPR 2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/papers/Zhou_Look-Into-Object_Self-Supervised_Structure_Modeling_for_Object_Recognition_CVPR_2020_paper.pdf)[[github]](https://github.com/JDAI-CV/LIO) +- Video Panoptic Segmentation, CVPR2020. [[paper]](https://arxiv.org/abs/2006.11339)[[github]](https://github.com/mcahny/vps) +- D2Det: Towards High Quality Object Detection and Instance Segmentation, CVPR2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/html/Cao_D2Det_Towards_High_Quality_Object_Detection_and_Instance_Segmentation_CVPR_2020_paper.html)[[github]](https://github.com/JialeCao001/D2Det) +- CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2003.09119)[[github]](https://github.com/KiveeDong/CentripetalNet) +- Learning a Unified Sample Weighting Network for Object Detection, CVPR 2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/html/Cai_Learning_a_Unified_Sample_Weighting_Network_for_Object_Detection_CVPR_2020_paper.html)[[github]](https://github.com/caiqi/sample-weighting-network) +- Scale-equalizing Pyramid Convolution for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2005.03101) [[github]](https://github.com/jshilong/SEPC) +- Revisiting the Sibling Head in Object Detector, CVPR2020. [[paper]](https://arxiv.org/abs/2003.07540)[[github]](https://github.com/Sense-X/TSD) +- PolarMask: Single Shot Instance Segmentation with Polar Representation, CVPR2020. [[paper]](https://arxiv.org/abs/1909.13226)[[github]](https://github.com/xieenze/PolarMask) +- Hit-Detector: Hierarchical Trinity Architecture Search for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2003.11818)[[github]](https://github.com/ggjy/HitDet.pytorch) +- ZeroQ: A Novel Zero Shot Quantization Framework, CVPR2020. [[paper]](https://arxiv.org/abs/2001.00281)[[github]](https://github.com/amirgholami/ZeroQ) +- CBNet: A Novel Composite Backbone Network Architecture for Object Detection, AAAI2020. [[paper]](https://aaai.org/Papers/AAAI/2020GB/AAAI-LiuY.1833.pdf)[[github]](https://github.com/VDIGPKU/CBNet) +- RDSNet: A New Deep Architecture for Reciprocal Object Detection and Instance Segmentation, AAAI2020. [[paper]](https://arxiv.org/abs/1912.05070)[[github]](https://github.com/wangsr126/RDSNet) +- Training-Time-Friendly Network for Real-Time Object Detection, AAAI2020. [[paper]](https://arxiv.org/abs/1909.00700)[[github]](https://github.com/ZJULearning/ttfnet) +- Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution, NeurIPS 2019. [[paper]](https://arxiv.org/abs/1909.06720)[[github]](https://github.com/thangvubk/Cascade-RPN) +- Reasoning R-CNN: Unifying Adaptive Global Reasoning into Large-scale Object Detection, CVPR2019. [[paper]](http://openaccess.thecvf.com/content_CVPR_2019/papers/Xu_Reasoning-RCNN_Unifying_Adaptive_Global_Reasoning_Into_Large-Scale_Object_Detection_CVPR_2019_paper.pdf)[[github]](https://github.com/chanyn/Reasoning-RCNN) +- Learning RoI Transformer for Oriented Object Detection in Aerial Images, CVPR2019. [[paper]](https://arxiv.org/abs/1812.00155)[[github]](https://github.com/dingjiansw101/AerialDetection) +- SOLO: Segmenting Objects by Locations. [[paper]](https://arxiv.org/abs/1912.04488)[[github]](https://github.com/WXinlong/SOLO) +- SOLOv2: Dynamic, Faster and Stronger. [[paper]](https://arxiv.org/abs/2003.10152)[[github]](https://github.com/WXinlong/SOLO) +- Dense Peppoints: Representing Visual Objects with Dense Point Sets. [[paper]](https://arxiv.org/abs/1912.11473)[[github]](https://github.com/justimyhxu/Dense-RepPoints) +- IterDet: Iterative Scheme for Object Detection in Crowded Environments. [[paper]](https://arxiv.org/abs/2005.05708)[[github]](https://github.com/saic-vul/iterdet) +- Cross-Iteration Batch Normalization. [[paper]](https://arxiv.org/abs/2002.05712)[[github]](https://github.com/Howal/Cross-iterationBatchNorm) +- Pedestrian Detection: The Elephant In The Room. [[paper]](https://arxiv.org/abs/2003.08799)[[github]](https://github.com/hasanirtiza/Pedestron) +- A Ranking-based, Balanced Loss Function Unifying Classification and Localisation in Object Detection, NeurIPS2020 [[paper]](https://arxiv.org/abs/2009.13592)[[github]](https://github.com/kemaloksuz/aLRPLoss) diff --git a/thirdparty/mmdetection/docs/robustness_benchmarking.md b/thirdparty/mmdetection/docs/robustness_benchmarking.md new file mode 100644 index 0000000000000000000000000000000000000000..4a6938c3f1aa06b74774d8e529445328194a84b2 --- /dev/null +++ b/thirdparty/mmdetection/docs/robustness_benchmarking.md @@ -0,0 +1,109 @@ +# Corruption Benchmarking + +## Introduction + +We provide tools to test object detection and instance segmentation models on the image corruption benchmark defined in [Benchmarking Robustness in Object Detection: Autonomous Driving when Winter is Coming](https://arxiv.org/abs/1907.07484). +This page provides basic tutorials how to use the benchmark. + +``` +@article{michaelis2019winter, + title={Benchmarking Robustness in Object Detection: + Autonomous Driving when Winter is Coming}, + author={Michaelis, Claudio and Mitzkus, Benjamin and + Geirhos, Robert and Rusak, Evgenia and + Bringmann, Oliver and Ecker, Alexander S. and + Bethge, Matthias and Brendel, Wieland}, + journal={arXiv:1907.07484}, + year={2019} +} +``` + +![image corruption example](../resources/corruptions_sev_3.png) + +## About the benchmark + +To submit results to the benchmark please visit the [benchmark homepage](https://github.com/bethgelab/robust-detection-benchmark) + +The benchmark is modelled after the [imagenet-c benchmark](https://github.com/hendrycks/robustness) which was originally +published in [Benchmarking Neural Network Robustness to Common Corruptions and Perturbations](https://arxiv.org/abs/1903.12261) (ICLR 2019) by Dan Hendrycks and Thomas Dietterich. + +The image corruption functions are included in this library but can be installed separately using: + +```shell +pip install imagecorruptions +``` + +Compared to imagenet-c a few changes had to be made to handle images of arbitrary size and greyscale images. +We also modfied the 'motion blur' and 'snow' corruptions to remove dependency from a linux specific library, +which would have to be installed separately otherwise. For details please refer to the [imagecorruptions repository](https://github.com/bethgelab/imagecorruptions). + +## Inference with pretrained models + +We provide a testing script to evaluate a models performance on any combination of the corruptions provided in the benchmark. + +### Test a dataset + +- [x] single GPU testing +- [ ] multiple GPU testing +- [ ] visualize detection results + +You can use the following commands to test a models performance under the 15 corruptions used in the benchmark. + +```shell +# single-gpu testing +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] +``` + +Alternatively different group of corruptions can be selected. + +```shell +# noise +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions noise + +# blur +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions blur + +# wetaher +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions weather + +# digital +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions digital +``` + +Or a costom set of corruptions e.g.: +```shell +# gaussian noise, zoom blur and snow +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions gaussian_noise zoom_blur snow +``` + +Finally the corruption severities to evaluate can be chosen. +Severity 0 corresponds to clean data and the effect increases from 1 to 5. + +```shell +# severity 1 +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --severities 1 + +# severities 0,2,4 +python tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --severities 0 2 4 +``` + +## Results for modelzoo models + +The results on COCO 2017val are shown in the below table. + +Model | Backbone | Style | Lr schd | box AP clean | box AP corr. | box % | mask AP clean | mask AP corr. | mask % | +:-----:|:---------:|:-------:|:-------:|:------------:|:------------:|:-----:|:-------------:|:-------------:|:------:| +Faster R-CNN | R-50-FPN | pytorch | 1x | 36.3 | 18.2 | 50.2 | - | - | - | +Faster R-CNN | R-101-FPN | pytorch | 1x | 38.5 | 20.9 | 54.2 | - | - | - | +Faster R-CNN | X-101-32x4d-FPN | pytorch |1x | 40.1 | 22.3 | 55.5 | - | - | - | +Faster R-CNN | X-101-64x4d-FPN | pytorch |1x | 41.3 | 23.4 | 56.6 | - | - | - | +Faster R-CNN | R-50-FPN-DCN | pytorch | 1x | 40.0 | 22.4 | 56.1 | - | - | - | +Faster R-CNN | X-101-32x4d-FPN-DCN | pytorch | 1x | 43.4 | 26.7 | 61.6 | - | - | - | +Mask R-CNN | R-50-FPN | pytorch | 1x | 37.3 | 18.7 | 50.1 | 34.2 | 16.8 | 49.1 | +Mask R-CNN | R-50-FPN-DCN | pytorch | 1x | 41.1 | 23.3 | 56.7 | 37.2 | 20.7 | 55.7 | +Cascade R-CNN | R-50-FPN | pytorch | 1x | 40.4 | 20.1 | 49.7 | - | - | - | +Cascade Mask R-CNN | R-50-FPN | pytorch | 1x| 41.2 | 20.7 | 50.2 | 35.7 | 17.6 | 49.3 | +RetinaNet | R-50-FPN | pytorch | 1x | 35.6 | 17.8 | 50.1 | - | - | - | +Hybrid Task Cascade | X-101-64x4d-FPN-DCN | pytorch | 1x | 50.6 | 32.7 | 64.7 | 43.8 | 28.1 | 64.0 | + +Results may vary slightly due to the stochastic application of the corruptions. diff --git a/thirdparty/mmdetection/docs/stat.py b/thirdparty/mmdetection/docs/stat.py new file mode 100644 index 0000000000000000000000000000000000000000..230b9c166889da83ee6477c19e4963f192836904 --- /dev/null +++ b/thirdparty/mmdetection/docs/stat.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +import glob +import os.path as osp +import re + +url_prefix = 'https://github.com/open-mmlab/mmdetection/blob/master/' + +files = sorted(glob.glob('../configs/*/README.md')) + +stats = [] +titles = [] +num_ckpts = 0 + +for f in files: + url = osp.dirname(f.replace('../', url_prefix)) + + with open(f, 'r') as content_file: + content = content_file.read() + + title = content.split('\n')[0].replace('# ', '') + + titles.append(title) + ckpts = set(x.lower().strip() + for x in re.findall(r'https?://download.*\.pth', content) + if 'mmdetection' in x) + num_ckpts += len(ckpts) + statsmsg = f""" +\t* [{title}]({url}) ({len(ckpts)} ckpts) +""" + stats.append((title, ckpts, statsmsg)) + +msglist = '\n'.join(x for _, _, x in stats) + +modelzoo = f""" +# Model Zoo Statistics + +* Number of papers: {len(titles)} +* Number of checkpoints: {num_ckpts} +{msglist} +""" + +with open('modelzoo_statistics.md', 'w') as f: + f.write(modelzoo) diff --git a/thirdparty/mmdetection/docs/tutorials/config.md b/thirdparty/mmdetection/docs/tutorials/config.md new file mode 100644 index 0000000000000000000000000000000000000000..3a81e7e61c4398f887b55fbc859668bc37e84615 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/config.md @@ -0,0 +1,479 @@ +# Tutorial 1: Learn about Configs + +We incorporate modular and inheritance design into our config system, which is convenient to conduct various experiments. +If you wish to inspect the config file, you may run `python tools/print_config.py /PATH/TO/CONFIG` to see the complete config. +You may also pass `--cfg-options xxx.yyy=zzz` to see updated config. + +## Config File Structure + +There are 4 basic component types under `config/_base_`, dataset, model, schedule, default_runtime. +Many methods could be easily constructed with one of each like Faster R-CNN, Mask R-CNN, Cascade R-CNN, RPN, SSD. +The configs that are composed by components from `_base_` are called _primitive_. + +For all configs under the same folder, it is recommended to have only **one** _primitive_ config. All other configs should inherit from the _primitive_ config. In this way, the maximum of inheritance level is 3. + +For easy understanding, we recommend contributors to inherit from exiting methods. +For example, if some modification is made base on Faster R-CNN, user may first inherit the basic Faster R-CNN structure by specifying `_base_ = ../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py`, then modify the necessary fields in the config files. + +If you are building an entirely new method that does not share the structure with any of the existing methods, you may create a folder `xxx_rcnn` under `configs`, + +Please refer to [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#config) for detailed documentation. + +## Config Name Style + +We follow the below style to name config files. Contributors are advised to follow the same style. + +``` +{model}_[model setting]_{backbone}_{neck}_[norm setting]_[misc]_[gpu x batch_per_gpu]_{schedule}_{dataset} +``` + +`{xxx}` is required field and `[yyy]` is optional. + +- `{model}`: model type like `faster_rcnn`, `mask_rcnn`, etc. +- `[model setting]`: specific setting for some model, like `without_semantic` for `htc`, `moment` for `reppoints`, etc. +- `{backbone}`: backbone type like `r50` (ResNet-50), `x101` (ResNeXt-101). +- `{neck}`: neck type like `fpn`, `pafpn`, `nasfpn`, `c4`. +- `[norm_setting]`: `bn` (Batch Normalization) is used unless specified, other norm layer type could be `gn` (Group Normalization), `syncbn` (Synchronized Batch Normalization). +`gn-head`/`gn-neck` indicates GN is applied in head/neck only, while `gn-all` means GN is applied in the entire model, e.g. backbone, neck, head. +- `[misc]`: miscellaneous setting/plugins of model, e.g. `dconv`, `gcb`, `attention`, `albu`, `mstrain`. +- `[gpu x batch_per_gpu]`: GPUs and samples per GPU, `8x2` is used by default. +- `{schedule}`: training schedule, options are `1x`, `2x`, `20e`, etc. +`1x` and `2x` means 12 epochs and 24 epochs respectively. +`20e` is adopted in cascade models, which denotes 20 epochs. +For `1x`/`2x`, initial learning rate decays by a factor of 10 at the 8/16th and 11/22th epochs. +For `20e`, initial learning rate decays by a factor of 10 at the 16th and 19th epochs. +- `{dataset}`: dataset like `coco`, `cityscapes`, `voc_0712`, `wider_face`. + +## An Example of Mask R-CNN + +To help the users have a basic idea of a complete config and the modules in a modern detection system, +we make brief comments on the config of Mask R-CNN using ResNet50 and FPN as the following. +For more detailed usage and the corresponding alternative for each modules, please refer to the API documentation. + +```python +model = dict( + type='MaskRCNN', # The name of detector + pretrained= + 'torchvision://resnet50', # The ImageNet pretrained backbone to be loaded + backbone=dict( # The config of backbone + type='ResNet', # The type of the backbone, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/backbones/resnet.py#L288 for more details. + depth=50, # The depth of backbone, usually it is 50 or 101 for ResNet and ResNext backbones. + num_stages=4, # Number of stages of the backbone. + out_indices=(0, 1, 2, 3), # The index of output feature maps produced in each stages + frozen_stages=1, # The weights in the first 1 stage are fronzen + norm_cfg=dict( # The config of normalization layers. + type='BN', # Type of norm layer, usually it is BN or GN + requires_grad=True), # Whether to train the gamma and beta in BN + norm_eval=True, # Whether to freeze the statistics in BN + style='pytorch'), # The style of backbone, 'pytorch' means that stride 2 layers are in 3x3 conv, 'caffe' means stride 2 layers are in 1x1 convs. + neck=dict( + type='FPN', # The neck of detector is FPN. We also support 'NASFPN', 'PAFPN', etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/necks/fpn.py#L10 for more details. + in_channels=[256, 512, 1024, 2048], # The input channels, this is consistent with the output channels of backbone + out_channels=256, # The output channels of each level of the pyramid feature map + num_outs=5), # The number of output scales + rpn_head=dict( + type='RPNHead', # The type of RPN head is 'RPNHead', we also support 'GARPNHead', etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/rpn_head.py#L12 for more details. + in_channels=256, # The input channels of each input feature map, this is consistent with the output channels of neck + feat_channels=256, # Feature channels of convolutional layers in the head. + anchor_generator=dict( # The config of anchor generator + type='AnchorGenerator', # Most of methods use AnchorGenerator, SSD Detectors uses `SSDAnchorGenerator`. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/anchor/anchor_generator.py#L10 for more details + scales=[8], # Basic scale of the anchor, the area of the anchor in one position of a feature map will be scale * base_sizes + ratios=[0.5, 1.0, 2.0], # The ratio between height and width. + strides=[4, 8, 16, 32, 64]), # The strides of the anchor generator. This is consistent with the FPN feature strides. The strides will be taken as base_sizes if base_sizes is not set. + bbox_coder=dict( # Config of box coder to encode and decode the boxes during training and testing + type='DeltaXYWHBBoxCoder', # Type of box coder. 'DeltaXYWHBBoxCoder' is applied for most of methods. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py#L9 for more details. + target_means=[0.0, 0.0, 0.0, 0.0], # The target means used to encode and decode boxes + target_stds=[1.0, 1.0, 1.0, 1.0]), # The standard variance used to encode and decode boxes + loss_cls=dict( # Config of loss function for the classification branch + type='CrossEntropyLoss', # Type of loss for classification branch, we also support FocalLoss etc. + use_sigmoid=True, # RPN usually perform two-class classification, so it usually uses sigmoid function. + loss_weight=1.0), # Loss weight of the classification branch. + loss_bbox=dict( # Config of loss function for the regression branch. + type='L1Loss', # Type of loss, we also support many IoU Losses and smooth L1-loss, etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/losses/smooth_l1_loss.py#L56 for implementation. + loss_weight=1.0)), # Loss weight of the regression branch. + roi_head=dict( # RoIHead encapsulates the second stage of two-stage/cascade detectors. + type='StandardRoIHead', # Type of the RoI head. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/standard_roi_head.py#L10 for implementation. + bbox_roi_extractor=dict( # RoI feature extractor for bbox regression. + type='SingleRoIExtractor', # Type of the RoI feature extractor, most of methods uses SingleRoIExtractor. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/roi_extractors/single_level.py#L10 for details. + roi_layer=dict( # Config of RoI Layer + type='RoIAlign', # Type of RoI Layer, DeformRoIPoolingPack and ModulatedDeformRoIPoolingPack are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/roi_align/roi_align.py#L79 for details. + output_size=7, # The output size of feature maps. + sampling_ratio=0), # Sampling ratio when extracting the RoI features. 0 means adaptive ratio. + out_channels=256, # output channels of the extracted feature. + featmap_strides=[4, 8, 16, 32]), # Strides of multi-scale feature maps. It should be consistent to the architecture of the backbone. + bbox_head=dict( # Config of box head in the RoIHead. + type='Shared2FCBBoxHead', # Type of the bbox head, Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py#L177 for implementation details. + in_channels=256, # Input channels for bbox head. This is consistent with the out_channels in roi_extractor + fc_out_channels=1024, # Output feature channels of FC layers. + roi_feat_size=7, # Size of RoI features + num_classes=80, # Number of classes for classification + bbox_coder=dict( # Box coder used in the second stage. + type='DeltaXYWHBBoxCoder', # Type of box coder. 'DeltaXYWHBBoxCoder' is applied for most of methods. + target_means=[0.0, 0.0, 0.0, 0.0], # Means used to encode and decode box + target_stds=[0.1, 0.1, 0.2, 0.2]), # Standard variance for encoding and decoding. It is smaller since the boxes are more accurate. [0.1, 0.1, 0.2, 0.2] is a conventional setting. + reg_class_agnostic=False, # Whether the regression is class agnostic. + loss_cls=dict( # Config of loss function for the classification branch + type='CrossEntropyLoss', # Type of loss for classification branch, we also support FocalLoss etc. + use_sigmoid=False, # Whether to use sigmoid. + loss_weight=1.0), # Loss weight of the classification branch. + loss_bbox=dict( # Config of loss function for the regression branch. + type='L1Loss', # Type of loss, we also support many IoU Losses and smooth L1-loss, etc. + loss_weight=1.0)), # Loss weight of the regression branch. + mask_roi_extractor=dict( # RoI feature extractor for bbox regression. + type='SingleRoIExtractor', # Type of the RoI feature extractor, most of methods uses SingleRoIExtractor. + roi_layer=dict( # Config of RoI Layer that extracts features for instance segmentation + type='RoIAlign', # Type of RoI Layer, DeformRoIPoolingPack and ModulatedDeformRoIPoolingPack are also supported + output_size=14, # The output size of feature maps. + sampling_ratio=0), # Sampling ratio when extracting the RoI features. + out_channels=256, # Output channels of the extracted feature. + featmap_strides=[4, 8, 16, 32]), # Strides of multi-scale feature maps. + mask_head=dict( # Mask prediction head + type='FCNMaskHead', # Type of mask head, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py#L21 for implementation details. + num_convs=4, # Number of convolutional layers in mask head. + in_channels=256, # Input channels, should be consistent with the output channels of mask roi extractor. + conv_out_channels=256, # Output channels of the convolutional layer. + num_classes=80, # Number of class to be segmented. + loss_mask=dict( # Config of loss function for the mask branch. + type='CrossEntropyLoss', # Type of loss used for segmentation + use_mask=True, # Whether to only train the mask in the correct class. + loss_weight=1.0)))) # Loss weight of mask branch. +train_cfg = dict( # Config of training hyperparameters for rpn and rcnn + rpn=dict( # Training config of rpn + assigner=dict( # Config of assigner + type='MaxIoUAssigner', # Type of assigner, MaxIoUAssigner is used for many common detectors. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/assigners/max_iou_assigner.py#L10 for more details. + pos_iou_thr=0.7, # IoU >= threshold 0.7 will be taken as positive samples + neg_iou_thr=0.3, # IoU < threshold 0.3 will be taken as negative samples + min_pos_iou=0.3, # The minimal IoU threshold to take boxes as positive samples + match_low_quality=True, # Whether to match the boxes under low quality (see API doc for more details). + ignore_iof_thr=-1), # IoF threshold for ignoring bboxes + sampler=dict( # Config of positive/negative sampler + type='RandomSampler', # Type of sampler, PseudoSampler and other samplers are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/samplers/random_sampler.py#L8 for implementation details. + num=256, # Number of samples + pos_fraction=0.5, # The ratio of positive samples in the total samples. + neg_pos_ub=-1, # The upper bound of negative samples based on the number of positive samples. + add_gt_as_proposals=False), # Whether add GT as proposals after sampling. + allowed_border=-1, # The border allowed after padding for valid anchors. + pos_weight=-1, # The weight of positive samples during training. + debug=False), # Whether to set the debug mode + rpn_proposal=dict( # The config to generate proposals during training + nms_across_levels=False, # Whether to do NMS for boxes across levels + nms_pre=2000, # The number of boxes before NMS + nms_post=1000, # The number of boxes to be kept by NMS + max_num=1000, # The number of boxes to be used after NMS + nms_thr=0.7, # The threshold to be used during NMS + min_bbox_size=0), # The allowed minimal box size + rcnn=dict( # The config for the roi heads. + assigner=dict( # Config of assigner for second stage, this is different for that in rpn + type='MaxIoUAssigner', # Type of assigner, MaxIoUAssigner is used for all roi_heads for now. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/assigners/max_iou_assigner.py#L10 for more details. + pos_iou_thr=0.5, # IoU >= threshold 0.5 will be taken as positive samples + neg_iou_thr=0.5, # IoU >= threshold 0.5 will be taken as positive samples + min_pos_iou=0.5, # The minimal IoU threshold to take boxes as positive samples + match_low_quality=False, # Whether to match the boxes under low quality (see API doc for more details). + ignore_iof_thr=-1), # IoF threshold for ignoring bboxes + sampler=dict( + type='RandomSampler', # Type of sampler, PseudoSampler and other samplers are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/samplers/random_sampler.py#L8 for implementation details. + num=512, # Number of samples + pos_fraction=0.25, # The ratio of positive samples in the total samples. + neg_pos_ub=-1, # The upper bound of negative samples based on the number of positive samples. + add_gt_as_proposals=True + ), # Whether add GT as proposals after sampling. + mask_size=28, # Size of mask + pos_weight=-1, # The weight of positive samples during training. + debug=False)) # Whether to set the debug mode +test_cfg = dict( # Config for testing hyperparameters for rpn and rcnn + rpn=dict( # The config to generate proposals during testing + nms_across_levels=False, # Whether to do NMS for boxes across levels + nms_pre=1000, # The number of boxes before NMS + nms_post=1000, # The number of boxes to be kept by NMS + max_num=1000, # The number of boxes to be used after NMS + nms_thr=0.7, # The threshold to be used during NMS + min_bbox_size=0), # The allowed minimal box size + rcnn=dict( # The config for the roi heads. + score_thr=0.05, # Threshold to filter out boxes + nms=dict( # Config of nms in the second stage + type='nms', # Type of nms + iou_thr=0.5), # NMS threshold + max_per_img=100, # Max number of detections of each image + mask_thr_binary=0.5)) # Threshold of mask prediction +dataset_type = 'CocoDataset' # Dataset type, this will be used to define the dataset +data_root = 'data/coco/' # Root path of data +img_norm_cfg = dict( # Image normalization config to normalize the input images + mean=[123.675, 116.28, 103.53], # Mean values used to pre-training the pre-trained backbone models + std=[58.395, 57.12, 57.375], # Standard variance used to pre-training the pre-trained backbone models + to_rgb=True +) # The channel orders of image used to pre-training the pre-trained backbone models +train_pipeline = [ # Training pipeline + dict(type='LoadImageFromFile'), # First pipeline to load images from file path + dict( + type='LoadAnnotations', # Second pipeline to load annotations for current image + with_bbox=True, # Whether to use bounding box, True for detection + with_mask=True, # Whether to use instance mask, True for instance segmentation + poly2mask=False), # Whether to convert the polygon mask to instance mask, set False for acceleration and to save memory + dict( + type='Resize', # Augmentation pipeline that resize the images and their annotations + img_scale=(1333, 800), # The largest scale of image + keep_ratio=True + ), # whether to keep the ratio between height and width. + dict( + type='RandomFlip', # Augmentation pipeline that flip the images and their annotations + flip_ratio=0.5), # The ratio or probability to flip + dict( + type='Normalize', # Augmentation pipeline that normalize the input images + mean=[123.675, 116.28, 103.53], # These keys are the same of img_norm_cfg since the + std=[58.395, 57.12, 57.375], # keys of img_norm_cfg are used here as arguments + to_rgb=True), + dict( + type='Pad', # Padding config + size_divisor=32), # The number the padded images should be divisible + dict(type='DefaultFormatBundle'), # Default format bundle to gather data in the pipeline + dict( + type='Collect', # Pipeline that decides which keys in the data should be passed to the detector + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), # First pipeline to load images from file path + dict( + type='MultiScaleFlipAug', # An encapsulation that encapsulates the testing augmentations + img_scale=(1333, 800), # Decides the largest scale for testing, used for the Resize pipeline + flip=False, # Whether to flip images during testing + transforms=[ + dict(type='Resize', # Use resize augmentation + keep_ratio=True), # Whether to keep the ratio between height and width, the img_scale set here will be supressed by the img_scale set above. + dict(type='RandomFlip'), # Thought RandomFlip is added in pipeline, it is not used because flip=False + dict( + type='Normalize', # Normalization config, the values are from img_norm_cfg + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict( + type='Pad', # Padding config to pad images divisable by 32. + size_divisor=32), + dict( + type='ImageToTensor', # convert image to tensor + keys=['img']), + dict( + type='Collect', # Collect pipeline that collect necessary keys for testing. + keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, # Batch size of a single GPU + workers_per_gpu=2, # Worker to pre-fetch data for each single GPU + train=dict( # Train dataset config + type='CocoDataset', # Type of dataset, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/coco.py#L19 for details. + ann_file='data/coco/annotations/instances_train2017.json', # Path of annotation file + img_prefix='data/coco/train2017/', # Prefix of image path + pipeline=[ # pipeline, this is passed by the train_pipeline created before. + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) + ]), + val=dict( # Validation dataset config + type='CocoDataset', + ann_file='data/coco/annotations/instances_val2017.json', + img_prefix='data/coco/val2017/', + pipeline=[ # Pipeline is passed by test_pipeline created before + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( # Test dataset config, modify the ann_file for test-dev/test submission + type='CocoDataset', + ann_file='data/coco/annotations/instances_val2017.json', + img_prefix='data/coco/val2017/', + pipeline=[ # Pipeline is passed by test_pipeline created before + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ], + samples_per_gpu=2 # Batch size of a single GPU used in testing + )) +evaluation = dict( # The config to build the evaluation hook, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/evaluation/eval_hooks.py#L7 for more details. + interval=1, # Evaluation interval + metric=['bbox', 'segm']) # Metrics used during evaluation +optimizer = dict( # Config used to build optimizer, support all the optimizers in PyTorch whose arguments are also the same as those in PyTorch + type='SGD', # Type of optimizers, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/optimizer/default_constructor.py#L13 for more details + lr=0.02, # Learning rate of optimizers, see detail usages of the parameters in the documentaion of PyTorch + momentum=0.9, # Momentum + weight_decay=0.0001) # Weight decay of SGD +optimizer_config = dict( # Config used to build the optimizer hook, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/optimizer.py#L8 for implementation details. + grad_clip=None) # Most of the methods do not use gradient clip +lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook + policy='step', # The policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9. + warmup='linear', # The warmup policy, also support `exp` and `constant`. + warmup_iters=500, # The number of iterations for warmup + warmup_ratio= + 0.001, # The ratio of the starting learning rate used for warmup + step=[8, 11]) # Steps to decay the learning rate +total_epochs = 12 # Total epochs to train the model +checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation. + interval=1) # The save interval is 1 +log_config = dict( # config to register logger hook + interval=50, # Interval to print the log + hooks=[ + # dict(type='TensorboardLoggerHook') # The Tensorboard logger is also supported + dict(type='TextLoggerHook') + ]) # The logger used to record the training process. +dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set. +log_level = 'INFO' # The level of logging. +load_from = None # load models as a pre-trained model from a given path. This will not resume training. +resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved. +workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once. The workflow trains the model by 12 epochs according to the total_epochs. +work_dir = 'work_dir' # Directory to save the model checkpoints and logs for the current experiments. + +``` + +## FAQ + +### Ignore some fields in the base configs + +Sometimes, you may set `_delete_=True` to ignore some of fields in base configs. +You may refer to [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#inherit-from-base-config-with-ignored-fields) for simple inllustration. + +In MMDetection, for example, to change the backbone of Mask R-CNN with the following config. + +```python +model = dict( + type='MaskRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict(...), + rpn_head=dict(...), + roi_head=dict(...)) +``` + +`ResNet` and `HRNet` use different keywords to construct. + +```python +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict(...)) +``` + +The `_delete_=True` would replace all old keys in `backbone` field with new keys. + +### Use intermediate variables in configs + +Some intermediate variables are used in the configs files, like `train_pipeline`/`test_pipeline` in datasets. +It's worth noting that when modifying intermediate variables in the children configs, user need to pass the intermediate variables into corresponding fields again. +For example, we would like to use multi scale strategy to train a Mask R-CNN. `train_pipeline`/`test_pipeline` are intermediate variable we would like modify. +```python +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode="value", + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +``` +We first define the new `train_pipeline`/`test_pipeline` and pass them into `data`. diff --git a/thirdparty/mmdetection/docs/tutorials/customize_dataset.md b/thirdparty/mmdetection/docs/tutorials/customize_dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..146840d7911bb840d010f61275ef03a4994f06ca --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/customize_dataset.md @@ -0,0 +1,415 @@ +# Tutorial 2: Customize Datasets + +## Support new data format + +To support a new data format, you can either convert them to existing formats (COCO format or PASCAL format) or directly convert them to the middle format. You could also choose to convert them offline (before training by a script) or online (implement a new dataset and do the conversion at training). In MMDetection, we recommand to convert the data into COCO formats and do the conversion offline, thus you only need to modify the config's data annotation pathes and classes after the conversion to your data. + +### Reorganize new data formats to existing format + +The simplest way is to convert your dataset to existing dataset formats (COCO or PASCAL VOC). + +The annotation json files in COCO format has the following necessary keys: + +```python +'images': [ + { + 'file_name': 'COCO_val2014_000000001268.jpg', + 'height': 427, + 'width': 640, + 'id': 1268 + }, + ... +], + +'annotations': [ + { + 'segmentation': [[192.81, + 247.09, + ... + 219.03, + 249.06]], # if you have mask labels + 'area': 1035.749, + 'iscrowd': 0, + 'image_id': 1268, + 'bbox': [192.81, 224.8, 74.73, 33.43], + 'category_id': 16, + 'id': 42986 + }, + ... +], + +'categories': [ + {'id': 0, 'name': 'car'}, + ] +``` + +There are three necessary keys in the json file: + +- `images`: contains a list of images with their informations like `file_name`, `height`, `width`, and `id`. +- `annotations`: contains the list of instance annotations. +- `categories`: contains the list of categories names and their ID. + +After the data pre-processing, the users need to further modify the config files to use the dataset. +Here we show an example of using a custom dataset of 5 classes, assuming it is also in COCO format. + +In `configs/my_custom_config.py`: + +```python +... +# dataset settings +dataset_type = 'CocoDataset' +classes = ('a', 'b', 'c', 'd', 'e') +... +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + classes=classes, + ann_file='path/to/your/train/data', + ...), + val=dict( + type=dataset_type, + classes=classes, + ann_file='path/to/your/val/data', + ...), + test=dict( + type=dataset_type, + classes=classes, + ann_file='path/to/your/test/data', + ...)) +... +``` + +We use this way to support CityScapes dataset. The script is in [cityscapes.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/convert_datasets/cityscapes.py) and we also provide the finetuning [configs](https://github.com/open-mmlab/mmdetection/blob/master/configs/cityscapes). + +**Note** + +1. For instance segmentation datasets, **MMDetection only supports evaluating mask AP of dataset in COCO format for now**. +2. It is recommanded to convert the data offline before training, thus you can still use `CocoDataset` and only need to modify the path of annotations and the training classes. + +### Reorganize new data format to middle format + +It is also fine if you do not want to convert the annotation format to COCO or PASCAL format. +Actually, we define a simple annotation format and all existing datasets are +processed to be compatible with it, either online or offline. + +The annotation of a dataset is a list of dict, each dict corresponds to an image. +There are 3 field `filename` (relative path), `width`, `height` for testing, +and an additional field `ann` for training. `ann` is also a dict containing at least 2 fields: +`bboxes` and `labels`, both of which are numpy arrays. Some datasets may provide +annotations like crowd/difficult/ignored bboxes, we use `bboxes_ignore` and `labels_ignore` +to cover them. + +Here is an example. + +```python + +[ + { + 'filename': 'a.jpg', + 'width': 1280, + 'height': 720, + 'ann': { + 'bboxes': (n, 4), + 'labels': (n, ), + 'bboxes_ignore': (k, 4), + 'labels_ignore': (k, ) (optional field) + } + }, + ... +] +``` + +There are two ways to work with custom datasets. + +- online conversion + + You can write a new Dataset class inherited from `CustomDataset`, and overwrite two methods + `load_annotations(self, ann_file)` and `get_ann_info(self, idx)`, + like [CocoDataset](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/coco.py) and [VOCDataset](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/voc.py). + +- offline conversion + + You can convert the annotation format to the expected format above and save it to + a pickle or json file, like [pascal_voc.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/convert_datasets/pascal_voc.py). + Then you can simply use `CustomDataset`. + +### An example of customized dataset + +Assume the annotation is in a new format in text files. +The bounding boxes annotations are stored in text file `annotation.txt` as the following + +``` +# +000001.jpg +1280 720 +2 +10 20 40 60 1 +20 40 50 60 2 +# +000002.jpg +1280 720 +3 +50 20 40 60 2 +20 40 30 45 2 +30 40 50 60 3 +``` + +We can create a new dataset in `mmdet/datasets/my_dataset.py` to load the data. + +```python +import mmcv +import numpy as np + +from .builder import DATASETS +from .custom import CustomDataset + + +@DATASETS.register_module() +class MyDataset(CustomDataset): + + CLASSES = ('person', 'bicycle', 'car', 'motorcycle') + + def load_annotations(self, ann_file): + ann_list = mmcv.list_from_file(ann_file) + + data_infos = [] + for i, ann_line in enumerate(ann_list): + if ann_line != '#': + continue + + img_shape = ann_list[i + 2].split(' ') + width = int(img_shape[0]) + height = int(img_shape[1]) + bbox_number = int(ann_list[i + 3]) + + anns = ann_line.split(' ') + bboxes = [] + labels = [] + for anns in ann_list[i + 4:i + 4 + bbox_number]: + bboxes.append([float(ann) for ann in anns[:4]]) + labels.append(int(anns[4])) + + data_infos.append( + dict( + filename=ann_list[i + 1], + width=width, + height=height, + ann=dict( + bboxes=np.array(bboxes).astype(np.float32), + labels=np.array(labels).astype(np.int64)) + )) + + return data_infos + + def get_ann_info(self, idx): + return self.data_infos[idx]['ann'] + +``` + +Then in the config, to use `MyDataset` you can modify the config as the following + +```python +dataset_A_train = dict( + type='MyDataset', + ann_file = 'image_list.txt', + pipeline=train_pipeline +) +``` + +## Customize datasets by dataset wrappers + +MMDetection also supports many dataset wrappers to mix the dataset or modify the dataset distribution for training. +Currently it supports to three dataset wrappers as below: + +- `RepeatDataset`: simply repeat the whole dataset. +- `ClassBalancedDataset`: repeat dataset in a class balanced manner. +- `ConcatDataset`: concat datasets. + +### Repeat dataset + +We use `RepeatDataset` as wrapper to repeat the dataset. For example, suppose the original dataset is `Dataset_A`, to repeat it, the config looks like the following + +```python +dataset_A_train = dict( + type='RepeatDataset', + times=N, + dataset=dict( # This is the original config of Dataset_A + type='Dataset_A', + ... + pipeline=train_pipeline + ) + ) +``` + +### Class balanced dataset + +We use `ClassBalancedDataset` as wrapper to repeat the dataset based on category +frequency. The dataset to repeat needs to instantiate function `self.get_cat_ids(idx)` +to support `ClassBalancedDataset`. +For example, to repeat `Dataset_A` with `oversample_thr=1e-3`, the config looks like the following + +```python +dataset_A_train = dict( + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( # This is the original config of Dataset_A + type='Dataset_A', + ... + pipeline=train_pipeline + ) + ) +``` + +You may refer to [source code](../../mmdet/datasets/dataset_wrappers.py) for details. + +### Concatenate dataset + +There are three ways to concatenate the dataset. + +1. If the datasets you want to concatenate are in the same type with different annotation files, you can concatenate the dataset configs like the following. + + ```python + dataset_A_train = dict( + type='Dataset_A', + ann_file = ['anno_file_1', 'anno_file_2'], + pipeline=train_pipeline + ) + ``` + + If the concatenated dataset is used for test or evaluation, this manner supports to evaluate each dataset separately. To test the concatenated datasets as a whole, you can set `separate_eval=False` as below. + + ```python + dataset_A_train = dict( + type='Dataset_A', + ann_file = ['anno_file_1', 'anno_file_2'], + separate_eval=False, + pipeline=train_pipeline + ) + ``` + +2. In case the dataset you want to concatenate is different, you can concatenate the dataset configs like the following. + + ```python + dataset_A_train = dict() + dataset_B_train = dict() + + data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train = [ + dataset_A_train, + dataset_B_train + ], + val = dataset_A_val, + test = dataset_A_test + ) + ``` + + If the concatenated dataset is used for test or evaluation, this manner also supports to evaluate each dataset separately. + +3. We also support to define `ConcatDataset` explicitly as the following. + + ```python + dataset_A_val = dict() + dataset_B_val = dict() + + data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train=dataset_A_train, + val=dict( + type='ConcatDataset', + datasets=[dataset_A_val, dataset_B_val], + separate_eval=False)) + ``` + + This manner allows users to evaluate all the datasets as a single one by setting `separate_eval=False`. + +**Note:** + +1. The option `separate_eval=False` assumes the datasets use `self.data_infos` during evaluation. Therefore, COCO datasets do not support this behavior since COCO datasets do not fully rely on `self.data_infos` for evaluation. Combining different types of datasets and evaluating them as a whole is not tested thus is not suggested. +2. Evaluating `ClassBalancedDataset` and `RepeatDataset` is not supported thus evaluating concatenated datasets of these types is also not supported. + +A more complex example that repeats `Dataset_A` and `Dataset_B` by N and M times, respectively, and then concatenates the repeated datasets is as the following. + +```python +dataset_A_train = dict( + type='RepeatDataset', + times=N, + dataset=dict( + type='Dataset_A', + ... + pipeline=train_pipeline + ) +) +dataset_A_val = dict( + ... + pipeline=test_pipeline +) +dataset_A_test = dict( + ... + pipeline=test_pipeline +) +dataset_B_train = dict( + type='RepeatDataset', + times=M, + dataset=dict( + type='Dataset_B', + ... + pipeline=train_pipeline + ) +) +data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train = [ + dataset_A_train, + dataset_B_train + ], + val = dataset_A_val, + test = dataset_A_test +) + +``` + +## Modify Dataset Classes + +With existing dataset types, we can modify the class names of them to train subset of the annotations. +For example, if you want to train only three classes of the current dataset, +you can modify the classes of dataset. +The dataset will filter out the ground truth boxes of other classes automatically. + +```python +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +``` + +MMDetection V2.0 also supports to read the classes from a file, which is common in real applications. +For example, assume the `classes.txt` contains the name of classes as the following. + +``` +person +bicycle +car +``` + +Users can set the classes as a file path, the dataset will load it and convert it to a list automatically. + +```python +classes = 'path/to/classes.txt' +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +``` + +**Note**: + +- Before MMDetection v2.5.0, the dataset will filter out the empty GT images automatically if the classes are set and there is no way to disable that through config. This is an undesirable behavior and introduces confusion because if the classes are not set, the dataset only filter the empty GT images when `filter_empty_gt=True` and `test_mode=False`. After MMDetection v2.5.0, we decouple the image filtering process and the classes modification, i.e., the dataset will only filter empty GT images when `filter_empty_gt=True` and `test_mode=False`, no matter whether the classes are set. Thus, setting the classes only influences the annotations of classes used for training and users could decide whether to filter empty GT images by themselves. +- Since the middle format only has box labels and does not contain the class names, when using `CustomDataset`, users cannot filter out the empty GT images through configs but only do this offline. +- The features for setting dataset classes and dataset filtering will be refactored to be more user-friendly in v2.8.0 or v2.9.0 (depends on the progress). diff --git a/thirdparty/mmdetection/docs/tutorials/customize_losses.md b/thirdparty/mmdetection/docs/tutorials/customize_losses.md new file mode 100644 index 0000000000000000000000000000000000000000..c3e1ddd8900a2a295a1f78df37408f2ea14d7214 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/customize_losses.md @@ -0,0 +1,105 @@ +# Tutorial 6: Customize Losses + +MMDetection provides users with different loss functions. But the default configuration may be not applicable for different datasets or models, so users may want to modify a specific loss to adapt the new situation. + +This tutorial first elaborate the computation pipeline of losses, then give some instructions about how to modify each step. The modification can be categorized as tweaking and weighting. + +## Computation pipeline of a loss + +Given the input prediction and target, as well as the weights, a loss function maps the input tensor to the final loss scalar. The mapping can be divided into four steps: + +1. Get **element-wise** or sample-wise loss by the loss kernel function. + +2. Weighting the loss with a weight tensor **element-wisely**. + +3. Reduce the loss tensor to a **scalar**. + +4. Weighting the loss with a **scalar**. + +## Tweaking loss + +Tweaking a loss is more related with step 1, 3, 4, and most modifications can be specified in the config. +Here we take [Focal Loss (FL)](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/losses/focal_loss.py) as an example. +The following code sniper are the construction method and config of FL respectively, they are actually one to one correspondence. + +```python +@LOSSES.register_module() +class FocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=1.0): +``` + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0) +``` + +### Tweaking hyper-parameters (step 1) + +`gamma` and `beta` are two hyper-parameters in the Focal Loss. Say if we want to change the value of `gamma` to be 1.5 and `alpha` to be 0.5, then we can specify them in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.5, + alpha=0.5, + loss_weight=1.0) +``` + +### Tweaking the way of reduction (step 3) + +The default way of reduction is `mean` for FL. Say if we want to change the reduction from `mean` to `sum`, we can specify it in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='sum') +``` + +### Tweaking loss weight (step 4) + +The loss weight here is a scalar which controls the weight of different losses in multi-task learning, e.g. classification loss and regression loss. Say if we want to change to loss weight of classification loss to be 0.5, we can specify it in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=0.5) +``` + +## Weighting loss (step 2) + +Weighting loss means we re-weight the loss element-wisely. To be more specific, we multiply the loss tensor with a weight tensor which has the same shape. As a result, different entries of the loss can be scaled differently, and so called element-wisely. +The loss weight varies across different models and highly context related, but overall there are two kinds of loss weights, `label_weights` for classification loss and `bbox_weights` for bbox regression loss. You can find them in the `get_target` method of the corresponding head. Here we take [ATSSHead](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/atss_head.py#L530) as an example, which inherit [AnchorHead](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/anchor_head.py) but overwrite its `get_targets` method which yields different `label_weights` and `bbox_weights`. + +``` +class ATSSHead(AnchorHead): + + ... + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): +``` diff --git a/thirdparty/mmdetection/docs/tutorials/customize_models.md b/thirdparty/mmdetection/docs/tutorials/customize_models.md new file mode 100644 index 0000000000000000000000000000000000000000..8f76980bd89d86c0669c8a9e0ac9d78cb523acb2 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/customize_models.md @@ -0,0 +1,369 @@ +# Tutorial 4: Customize Models + +We basically categorize model components into 5 types. + +- backbone: usually an FCN network to extract feature maps, e.g., ResNet, MobileNet. +- neck: the component between backbones and heads, e.g., FPN, PAFPN. +- head: the component for specific tasks, e.g., bbox prediction and mask prediction. +- roi extractor: the part for extracting RoI features from feature maps, e.g., RoI Align. +- loss: the component in head for calculating losses, e.g., FocalLoss, L1Loss, and GHMLoss. + +## Develop new components + +### Add a new backbone + +Here we show how to develop new components with an example of MobileNet. + +#### 1. Define a new backbone (e.g. MobileNet) + +Create a new file `mmdet/models/backbones/mobilenet.py`. + +```python +import torch.nn as nn + +from ..builder import BACKBONES + + +@BACKBONES.register_module() +class MobileNet(nn.Module): + + def __init__(self, arg1, arg2): + pass + + def forward(self, x): # should return a tuple + pass + + def init_weights(self, pretrained=None): + pass +``` + +#### 2. Import the module + +You can either add the following line to `mmdet/models/backbones/__init__.py` + +```python +from .mobilenet import MobileNet +``` + +or alternatively add + +```python +custom_imports = dict( + imports=['mmdet.models.backbones.mobilenet'], + allow_failed_imports=False) +``` + +to the config file to avoid modifying the original code. + +#### 3. Use the backbone in your config file + +```python +model = dict( + ... + backbone=dict( + type='MobileNet', + arg1=xxx, + arg2=xxx), + ... +``` + +### Add new necks + +#### 1. Define a neck (e.g. PAFPN) + +Create a new file `mmdet/models/necks/pafpn.py`. + +```python +from ..builder import NECKS + +@NECKS.register +class PAFPN(nn.Module): + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False): + pass + + def forward(self, inputs): + # implementation is ignored + pass +``` + +#### 2. Import the module + +You can either add the following line to `mmdet/models/necks/__init__.py`, + +```python +from .pafpn import PAFPN +``` + +or alternatively add + +```python +custom_imports = dict( + imports=['mmdet.models.necks.mobilenet'], + allow_failed_imports=False) +``` + +to the config file and avoid modifying the original code. + +#### 3. Modify the config file + +```python +neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5) +``` + +### Add new heads + +Here we show how to develop a new head with the example of [Double Head R-CNN](https://arxiv.org/abs/1904.06493) as the following. + +First, add a new bbox head in `mmdet/models/roi_heads/bbox_heads/double_bbox_head.py`. +Double Head R-CNN implements a new bbox head for object detection. +To implement a bbox head, basically we need to implement three functions of the new module as the following. + +```python +from mmdet.models.builder import HEADS +from .bbox_head import BBoxHead + +@HEADS.register_module() +class DoubleConvFCBBoxHead(BBoxHead): + r"""Bbox head used in Double-Head R-CNN + + /-> cls + /-> shared convs -> + \-> reg + roi features + /-> cls + \-> shared fc -> + \-> reg + """ # noqa: W605 + + def __init__(self, + num_convs=0, + num_fcs=0, + conv_out_channels=1024, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=dict(type='BN'), + **kwargs): + kwargs.setdefault('with_avg_pool', True) + super(DoubleConvFCBBoxHead, self).__init__(**kwargs) + + def init_weights(self): + # conv layers are already initialized by ConvModule + + def forward(self, x_cls, x_reg): + +``` + +Second, implement a new RoI Head if it is necessary. We plan to inherit the new `DoubleHeadRoIHead` from `StandardRoIHead`. We can find that a `StandardRoIHead` already implements the following functions. + +```python +import torch + +from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class StandardRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Simplest base roi head including one bbox head and one mask head. + """ + + def init_assigner_sampler(self): + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + + def init_mask_head(self, mask_roi_extractor, mask_head): + + def init_weights(self, pretrained): + + def forward_dummy(self, x, proposals): + + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + + def _bbox_forward(self, x, rois): + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + + def _mask_forward(self, x, rois=None, pos_inds=None, bbox_feats=None): + + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + +``` + +Double Head's modification is mainly in the bbox_forward logic, and it inherits other logics from the `StandardRoIHead`. +In the `mmdet/models/roi_heads/double_roi_head.py`, we implement the new RoI Head as the following: + +```python +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class DoubleHeadRoIHead(StandardRoIHead): + """RoI head for Double Head RCNN + + https://arxiv.org/abs/1904.06493 + """ + + def __init__(self, reg_roi_scale_factor, **kwargs): + super(DoubleHeadRoIHead, self).__init__(**kwargs) + self.reg_roi_scale_factor = reg_roi_scale_factor + + def _bbox_forward(self, x, rois): + bbox_cls_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + bbox_reg_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], + rois, + roi_scale_factor=self.reg_roi_scale_factor) + if self.with_shared_head: + bbox_cls_feats = self.shared_head(bbox_cls_feats) + bbox_reg_feats = self.shared_head(bbox_reg_feats) + cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats) + + bbox_results = dict( + cls_score=cls_score, + bbox_pred=bbox_pred, + bbox_feats=bbox_cls_feats) + return bbox_results +``` + +Last, the users need to add the module in +`mmdet/models/bbox_heads/__init__.py` and `mmdet/models/roi_heads/__init__.py` thus the corresponding registry could find and load them. + +Alternatively, the users can add + +```python +custom_imports=dict( + imports=['mmdet.models.roi_heads.double_roi_head', 'mmdet.models.bbox_heads.double_bbox_head']) +``` + +to the config file and achieve the same goal. + +The config file of Double Head R-CNN is as the following + +```python +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) + +``` + +Since MMDetection 2.0, the config system supports to inherit configs such that the users can focus on the modification. +The Double Head R-CNN mainly uses a new DoubleHeadRoIHead and a new +`DoubleConvFCBBoxHead`, the arguments are set according to the `__init__` function of each module. + +### Add new loss + +Assume you want to add a new loss as `MyLoss`, for bounding box regression. +To add a new loss function, the users need implement it in `mmdet/models/losses/my_loss.py`. +The decorator `weighted_loss` enable the loss to be weighted for each element. + +```python +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + +@weighted_loss +def my_loss(pred, target): + assert pred.size() == target.size() and target.numel() > 0 + loss = torch.abs(pred - target) + return loss + +@LOSSES.register_module() +class MyLoss(nn.Module): + + def __init__(self, reduction='mean', loss_weight=1.0): + super(MyLoss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * my_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_bbox +``` + +Then the users need to add it in the `mmdet/models/losses/__init__.py`. + +```python +from .my_loss import MyLoss, my_loss + +``` + +Alternatively, you can add + +```python +custom_imports=dict( + imports=['mmdet.models.losses.my_loss']) +``` + +to the config file and achieve the same goal. + +To use it, modify the `loss_xxx` field. +Since MyLoss is for regression, you need to modify the `loss_bbox` field in the head. + +```python +loss_bbox=dict(type='MyLoss', loss_weight=1.0)) +``` diff --git a/thirdparty/mmdetection/docs/tutorials/customize_runtime.md b/thirdparty/mmdetection/docs/tutorials/customize_runtime.md new file mode 100644 index 0000000000000000000000000000000000000000..87653e18e03ec7e276135cfe4f96c6c43b4370a6 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/customize_runtime.md @@ -0,0 +1,319 @@ +# Tutorial 5: Customize Runtime Settings + +## Customize optimization settings + +### Customize optimizer supported by Pytorch + +We already support to use all the optimizers implemented by PyTorch, and the only modification is to change the `optimizer` field of config files. +For example, if you want to use `ADAM` (note that the performance could drop a lot), the modification could be as the following. + +```python +optimizer = dict(type='Adam', lr=0.0003, weight_decay=0.0001) +``` + +To modify the learning rate of the model, the users only need to modify the `lr` in the config of optimizer. The users can directly set arguments following the [API doc](https://pytorch.org/docs/stable/optim.html?highlight=optim#module-torch.optim) of PyTorch. + +### Customize self-implemented optimizer + +#### 1. Define a new optimizer + +A customized optimizer could be defined as following. + +Assume you want to add a optimizer named `MyOptimizer`, which has arguments `a`, `b`, and `c`. +You need to create a new directory named `mmdet/core/optimizer`. +And then implement the new optimizer in a file, e.g., in `mmdet/core/optimizer/my_optimizer.py`: + +```python +from .registry import OPTIMIZERS +from torch.optim import Optimizer + + +@OPTIMIZERS.register_module() +class MyOptimizer(Optimizer): + + def __init__(self, a, b, c) + +``` + +#### 2. Add the optimizer to registry + +To find the above module defined above, this module should be imported into the main namespace at first. There are two options to achieve it. + +- Modify `mmdet/core/optimizer/__init__.py` to import it. + + The newly defined module should be imported in `mmdet/core/optimizer/__init__.py` so that the registry will + find the new module and add it: + +```python +from .my_optimizer import MyOptimizer +``` + +- Use `custom_imports` in the config to manually import it + +```python +custom_imports = dict(imports=['mmdet.core.optimizer.my_optimizer'], allow_failed_imports=False) +``` + +The module `mmdet.core.optimizer.my_optimizer` will be imported at the beginning of the program and the class `MyOptimizer` is then automatically registered. +Note that only the package containing the class `MyOptimizer` should be imported. +`mmdet.core.optimizer.my_optimizer.MyOptimizer` **cannot** be imported directly. + +Actually users can use a totally different file directory structure using this importing method, as long as the module root can be located in `PYTHONPATH`. + +#### 3. Specify the optimizer in the config file + +Then you can use `MyOptimizer` in `optimizer` field of config files. +In the configs, the optimizers are defined by the field `optimizer` like the following: + +```python +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +``` + +To use your own optimizer, the field can be changed to + +```python +optimizer = dict(type='MyOptimizer', a=a_value, b=b_value, c=c_value) +``` + +### Customize optimizer constructor + +Some models may have some parameter-specific settings for optimization, e.g. weight decay for BatchNorm layers. +The users can do those fine-grained parameter tuning through customizing optimizer constructor. + +```python +from mmcv.utils import build_from_cfg + +from mmcv.runner.optimizer import OPTIMIZER_BUILDERS, OPTIMIZERS +from mmdet.utils import get_root_logger +from .my_optimizer import MyOptimizer + + +@OPTIMIZER_BUILDERS.register_module() +class MyOptimizerConstructor(object): + + def __init__(self, optimizer_cfg, paramwise_cfg=None): + + def __call__(self, model): + + return my_optimizer + +``` + +The default optimizer constructor is implemented [here](https://github.com/open-mmlab/mmcv/blob/9ecd6b0d5ff9d2172c49a182eaa669e9f27bb8e7/mmcv/runner/optimizer/default_constructor.py#L11), which could also serve as a template for new optimizer constructor. + +### Additional settings + +Tricks not implemented by the optimizer should be implemented through optimizer constructor (e.g., set parameter-wise learning rates) or hooks. We list some common settings that could stabilize the training or accelerate the training. Feel free to create PR, issue for more settings. + +- __Use gradient clip to stabilize training__: + Some models need gradient clip to clip the gradients to stabilize the training process. An example is as below: + + ```python + optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) + ``` + + If your config inherits the base config which already sets the `optimizer_config`, you might need `_delete_=True` to overide the unnecessary settings. See the [config documenetation](https://mmdetection.readthedocs.io/en/latest/config.html) for more details. + +- __Use momentum schedule to accelerate model convergence__: + We support momentum scheduler to modify model's momentum according to learning rate, which could make the model converge in a faster way. + Momentum scheduler is usually used with LR scheduler, for example, the following config is used in 3D detection to accelerate convergence. + For more details, please refer to the implementation of [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) and [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130). + + ```python + lr_config = dict( + policy='cyclic', + target_ratio=(10, 1e-4), + cyclic_times=1, + step_ratio_up=0.4, + ) + momentum_config = dict( + policy='cyclic', + target_ratio=(0.85 / 0.95, 1), + cyclic_times=1, + step_ratio_up=0.4, + ) + ``` + +## Customize training schedules + +By default we use step learning rate with 1x schedule, this calls [`StepLRHook`](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L153) in MMCV. +We support many other learning rate schedule [here](https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py), such as `CosineAnnealing` and `Poly` schedule. Here are some examples + +- Poly schedule: + + ```python + lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) + ``` + +- ConsineAnnealing schedule: + + ```python + lr_config = dict( + policy='CosineAnnealing', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 10, + min_lr_ratio=1e-5) + ``` + +## Customize workflow + +Workflow is a list of (phase, epochs) to specify the running order and epochs. +By default it is set to be + +```python +workflow = [('train', 1)] +``` + +which means running 1 epoch for training. +Sometimes user may want to check some metrics (e.g. loss, accuracy) about the model on the validate set. +In such case, we can set the workflow as + +```python +[('train', 1), ('val', 1)] +``` + +so that 1 epoch for training and 1 epoch for validation will be run iteratively. + +**Note**: + +1. The parameters of model will not be updated during val epoch. +2. Keyword `total_epochs` in the config only controls the number of training epochs and will not affect the validation workflow. +3. Workflows `[('train', 1), ('val', 1)]` and `[('train', 1)]` will not change the behavior of `EvalHook` because `EvalHook` is called by `after_train_epoch` and validation workflow only affect hooks that are called through `after_val_epoch`. Therefore, the only difference between `[('train', 1), ('val', 1)]` and ``[('train', 1)]` is that the runner will calculate losses on validation set after each training epoch. + +## Customize hooks + +### Customize self-implemented hooks + +#### 1. Implement a new hook + +There are some occasions when the users might need to implement a new hook. MMDetection supports customized hooks in training (#3395) since v2.3.0. Thus the users could implement a hook directly in mmdet or their mmdet-based codebases and use the hook by only modifying the config in training. +Before v2.3.0, the users need to modify the code to get the hook registered before training starts. +Here we give an example of creating a new hook in mmdet and using it in training. + +```python +from mmcv.runner import HOOKS, Hook + + +@HOOKS.register_module() +class MyHook(Hook): + + def __init__(self, a, b): + pass + + def before_run(self, runner): + pass + + def after_run(self, runner): + pass + + def before_epoch(self, runner): + pass + + def after_epoch(self, runner): + pass + + def before_iter(self, runner): + pass + + def after_iter(self, runner): + pass +``` + +Depending on the functionality of the hook, the users need to specify what the hook will do at each stage of the training in `before_run`, `after_run`, `before_epoch`, `after_epoch`, `before_iter`, and `after_iter`. + +#### 2. Register the new hook + +Then we need to make `MyHook` imported. Assuming the file is in `mmdet/core/utils/my_hook.py` there are two ways to do that: + +- Modify `mmdet/core/utils/__init__.py` to import it. + + The newly defined module should be imported in `mmdet/core/utils/__init__.py` so that the registry will + find the new module and add it: + +```python +from .my_hook import MyHook +``` + +- Use `custom_imports` in the config to manually import it + +```python +custom_imports = dict(imports=['mmdet.core.utils.my_hook'], allow_failed_imports=False) +``` + +#### 3. Modify the config + +```python +custom_hooks = [ + dict(type='MyHook', a=a_value, b=b_value) +] +``` + +You can also set the priority of the hook by adding key `priority` to `'NORMAL'` or `'HIGHEST'` as below + +```python +custom_hooks = [ + dict(type='MyHook', a=a_value, b=b_value, priority='NORMAL') +] +``` + +By default the hook's priority is set as `NORMAL` during registration. + +### Use hooks implemented in MMCV + +If the hook is already implemented in MMCV, you can directly modify the config to use the hook as below + +```python +custom_hooks = [ + dict(type='MyHook', a=a_value, b=b_value, priority='NORMAL') +] +``` + +### Modify default runtime hooks + +There are some common hooks that are not registerd through `custom_hooks`, they are + +- log_config +- checkpoint_config +- evaluation +- lr_config +- optimizer_config +- momentum_config + +In those hooks, only the logger hook has the `VERY_LOW` priority, others' priority are `NORMAL`. +The above-mentioned tutorials already covers how to modify `optimizer_config`, `momentum_config`, and `lr_config`. +Here we reveals how what we can do with `log_config`, `checkpoint_config`, and `evaluation`. + +#### Checkpoint config + +The MMCV runner will use `checkpoint_config` to initialize [`CheckpointHook`](https://github.com/open-mmlab/mmcv/blob/9ecd6b0d5ff9d2172c49a182eaa669e9f27bb8e7/mmcv/runner/hooks/checkpoint.py#L9). + +```python +checkpoint_config = dict(interval=1) +``` + +The users could set `max_keep_ckpts` to only save only small number of checkpoints or decide whether to store state dict of optimizer by `save_optimizer`. More details of the arguments are [here](https://mmcv.readthedocs.io/en/latest/api.html#mmcv.runner.CheckpointHook) + +#### Log config + +The `log_config` wraps multiple logger hooks and enables to set intervals. Now MMCV supports `WandbLoggerHook`, `MlflowLoggerHook`, and `TensorboardLoggerHook`. +The detail usages can be found in the [doc](https://mmcv.readthedocs.io/en/latest/api.html#mmcv.runner.LoggerHook). + +```python +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + dict(type='TensorboardLoggerHook') + ]) +``` + +#### Evaluation config + +The config of `evaluation` will be used to initialize the [`EvalHook`](https://github.com/open-mmlab/mmdetection/blob/7a404a2c000620d52156774a5025070d9e00d918/mmdet/core/evaluation/eval_hooks.py#L8). +Except the key `interval`, other arguments such as `metric` will be passed to the `dataset.evaluate()` + +```python +evaluation = dict(interval=1, metric='bbox') +``` diff --git a/thirdparty/mmdetection/docs/tutorials/data_pipeline.md b/thirdparty/mmdetection/docs/tutorials/data_pipeline.md new file mode 100644 index 0000000000000000000000000000000000000000..02a9b5c8304156d7f21d8e2d988776067b0f2364 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/data_pipeline.md @@ -0,0 +1,164 @@ +# Tutorial 3: Customize Data Pipelines + +## Design of Data pipelines + +Following typical conventions, we use `Dataset` and `DataLoader` for data loading +with multiple workers. `Dataset` returns a dict of data items corresponding +the arguments of models' forward method. +Since the data in object detection may not be the same size (image size, gt bbox size, etc.), +we introduce a new `DataContainer` type in MMCV to help collect and distribute +data of different size. +See [here](https://github.com/open-mmlab/mmcv/blob/master/mmcv/parallel/data_container.py) for more details. + +The data preparation pipeline and the dataset is decomposed. Usually a dataset +defines how to process the annotations and a data pipeline defines all the steps to prepare a data dict. +A pipeline consists of a sequence of operations. Each operation takes a dict as input and also output a dict for the next transform. + +We present a classical pipeline in the following figure. The blue blocks are pipeline operations. With the pipeline going on, each operator can add new keys (marked as green) to the result dict or update the existing keys (marked as orange). +![pipeline figure](../../resources/data_pipeline.png) + +The operations are categorized into data loading, pre-processing, formatting and test-time augmentation. + +Here is a pipeline example for Faster R-CNN. +```python +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +``` + +For each operation, we list the related dict fields that are added/updated/removed. + +### Data loading + +`LoadImageFromFile` +- add: img, img_shape, ori_shape + +`LoadAnnotations` +- add: gt_bboxes, gt_bboxes_ignore, gt_labels, gt_masks, gt_semantic_seg, bbox_fields, mask_fields + +`LoadProposals` +- add: proposals + +### Pre-processing + +`Resize` +- add: scale, scale_idx, pad_shape, scale_factor, keep_ratio +- update: img, img_shape, *bbox_fields, *mask_fields, *seg_fields + +`RandomFlip` +- add: flip +- update: img, *bbox_fields, *mask_fields, *seg_fields + +`Pad` +- add: pad_fixed_size, pad_size_divisor +- update: img, pad_shape, *mask_fields, *seg_fields + +`RandomCrop` +- update: img, pad_shape, gt_bboxes, gt_labels, gt_masks, *bbox_fields + +`Normalize` +- add: img_norm_cfg +- update: img + +`SegRescale` +- update: gt_semantic_seg + +`PhotoMetricDistortion` +- update: img + +`Expand` +- update: img, gt_bboxes + +`MinIoURandomCrop` +- update: img, gt_bboxes, gt_labels + +`Corrupt` +- update: img + +### Formatting + +`ToTensor` +- update: specified by `keys`. + +`ImageToTensor` +- update: specified by `keys`. + +`Transpose` +- update: specified by `keys`. + +`ToDataContainer` +- update: specified by `fields`. + +`DefaultFormatBundle` +- update: img, proposals, gt_bboxes, gt_bboxes_ignore, gt_labels, gt_masks, gt_semantic_seg + +`Collect` +- add: img_meta (the keys of img_meta is specified by `meta_keys`) +- remove: all other keys except for those specified by `keys` + +### Test time augmentation + +`MultiScaleFlipAug` + +## Extend and use custom pipelines + +1. Write a new pipeline in any file, e.g., `my_pipeline.py`. It takes a dict as input and return a dict. + + ```python + from mmdet.datasets import PIPELINES + + @PIPELINES.register_module() + class MyTransform: + + def __call__(self, results): + results['dummy'] = True + return results + ``` + +2. Import the new class. + + ```python + from .my_pipeline import MyTransform + ``` + +3. Use it in config files. + + ```python + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='MyTransform'), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), + ] + ``` diff --git a/thirdparty/mmdetection/docs/tutorials/finetune.md b/thirdparty/mmdetection/docs/tutorials/finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..1c1a97bd0c49bc40d25a4396bfeac7730a735602 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/finetune.md @@ -0,0 +1,84 @@ +# Tutorial 7: Finetuning Models + +Detectors pre-trained on the COCO dataset can serve as a good pre-trained model for other datasets, e.g., CityScapes and KITTI Dataset. +This tutorial provides instruction for users to use the models provided in the [Model Zoo](../model_zoo.md) for other datasets to obtain better performance. + +There are two steps to finetune a model on a new dataset. +- Add support for the new dataset following [Tutorial 2: Customize Datasets](customize_dataset.md). +- Modify the configs as will be discussed in this tutorial. + + +Take the finetuning process on Cityscapes Dataset as an example, the users need to modify five parts in the config. + +## Inherit base configs +To release the burden and reduce bugs in writing the whole configs, MMDetection V2.0 support inheriting configs from multiple existing configs. To finetune a Mask RCNN model, the new config needs to inherit +`_base_/models/mask_rcnn_r50_fpn.py` to build the basic structure of the model. To use the Cityscapes Dataset, the new config can also simply inherit `_base_/datasets/cityscapes_instance.py`. For runtime settings such as training schedules, the new config needs to inherit `_base_/default_runtime.py`. This configs are in the `configs` directory and the users can also choose to write the whole contents rather than use inheritance. + +```python +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +``` + +## Modify head +Then the new config needs to modify the head according to the class numbers of the new datasets. By only changing `num_classes` in the roi_head, the weights of the pre-trained models are mostly reused except the final prediction head. + +```python +model = dict( + pretrained=None, + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +``` + +## Modify dataset +The users may also need to prepare the dataset and write the configs about dataset. MMDetection V2.0 already support VOC, WIDER FACE, COCO and Cityscapes Dataset. + +## Modify training schedule +The finetuning hyperparameters vary from the default schedule. It usually requires smaller learning rate and less training epochs + +```python +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +total_epochs = 8 # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +``` + +## Use pre-trained model +To use the pre-trained model, the new config add the link of pre-trained models in the `load_from`. The users might need to download the model weights before training to avoid the download time during training. + +```python +load_from = 'https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/mask_rcnn_r50_fpn_2x_20181010-41d35c05.pth' # noqa + +``` diff --git a/thirdparty/mmdetection/docs/tutorials/index.rst b/thirdparty/mmdetection/docs/tutorials/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..554dc81a40ae9f4bc1c2556cf86f68d2b34f88e2 --- /dev/null +++ b/thirdparty/mmdetection/docs/tutorials/index.rst @@ -0,0 +1,10 @@ +.. toctree:: + :maxdepth: 2 + + config.md + customize_dataset.md + data_pipeline.md + customize_models.md + customize_runtime.md + customize_losses.md + finetune.md diff --git a/thirdparty/mmdetection/docs/useful_tools.md b/thirdparty/mmdetection/docs/useful_tools.md new file mode 100644 index 0000000000000000000000000000000000000000..9cd1d5fabadb0ffb1160729f99ad375eccde2616 --- /dev/null +++ b/thirdparty/mmdetection/docs/useful_tools.md @@ -0,0 +1,212 @@ +Apart from training/testing scripts, We provide lots of useful tools under the + `tools/` directory. + +## Log Analysis + +`tools/analyze_logs.py` plots loss/mAP curves given a training + log file. Run `pip install seaborn` first to install the dependency. + + ```shell +python tools/analyze_logs.py plot_curve [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] +``` + +![loss curve image](../resources/loss_curve.png) + +Examples: + +- Plot the classification loss of some run. + + ```shell + python tools/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls + ``` + +- Plot the classification and regression loss of some run, and save the figure to a pdf. + + ```shell + python tools/analyze_logs.py plot_curve log.json --keys loss_cls loss_bbox --out losses.pdf + ``` + +- Compare the bbox mAP of two runs in the same figure. + + ```shell + python tools/analyze_logs.py plot_curve log1.json log2.json --keys bbox_mAP --legend run1 run2 + ``` + +- Compute the average training speed. + + ```shell + python tools/analyze_logs.py cal_train_time log.json [--include-outliers] + ``` + + The output is expected to be like the following. + + ```text + -----Analyze train time of work_dirs/some_exp/20190611_192040.log.json----- + slowest epoch 11, average time is 1.2024 + fastest epoch 1, average time is 1.1909 + time std over epochs is 0.0028 + average iter time: 1.1959 s/iter + ``` + +## Visualization + +### Visualize Datasets + +`tools/browse_dataset.py` helps the user to browse a detection dataset (both + images and bounding box annotations) visually, or save the image to a + designated directory. + +```shell +python tools/browse_dataset.py ${CONFIG} [-h] [--skip-type ${SKIP_TYPE[SKIP_TYPE...]}] [--output-dir ${OUTPUT_DIR}] [--not-show] [--show-interval ${SHOW_INTERVAL}] +``` + +### Visualize Models + +First, convert the model to ONNX as described +[here](#convert-mmdetection-model-to-onnx-experimental). +Note that currently only RetinaNet is supported, support for other models + will be coming in later versions. +The converted model could be visualized by tools like [Netron](https://github.com/lutzroeder/netron). + +### Visualize Predictions + +If you need a lightweight GUI for visualizing the detection results, you can refer [DetVisGUI project](https://github.com/Chien-Hung/DetVisGUI/tree/mmdetection). + +## Error Analysis + +`tools/coco_error_analysis.py` analyzes COCO results per category and by + different criterion. It can also make a plot to provide useful + information. + +```shell +python tools/coco_error_analysis.py ${RESULT} ${OUT_DIR} [-h] [--ann ${ANN}] [--types ${TYPES[TYPES...]}] +``` + +## Model Complexity + +`tools/get_flops.py` is a script adapted from [flops-counter.pytorch](https://github.com/sovrasov/flops-counter.pytorch) to compute the FLOPs and params of a given model. + +```shell +python tools/get_flops.py ${CONFIG_FILE} [--shape ${INPUT_SHAPE}] +``` + +You will get the results like this. + +```text +============================== +Input shape: (3, 1280, 800) +Flops: 239.32 GFLOPs +Params: 37.74 M +============================== +``` + +**Note**: This tool is still experimental and we do not guarantee that the + number is absolutely correct. You may well use the result for simple + comparisons, but double check it before you adopt it in technical reports or papers. + +1. FLOPs are related to the input shape while parameters are not. The default + input shape is (1, 3, 1280, 800). +2. Some operators are not counted into FLOPs like GN and custom operators +. Refer to [`mmcv.cnn.get_model_complexity_info()`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/cnn/utils/flops_counter.py) for details. +3. The FLOPs of two-stage detectors is dependent on the number of proposals. + +## Model conversion + +### MMDetection model to ONNX (experimental) + +We provide a script to convert model to [ONNX](https://github.com/onnx/onnx) format. We also support comparing the output results between Pytorch and + ONNX model for verification. + +```shell +python tools/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} --output_file ${ONNX_FILE} [--shape ${INPUT_SHAPE} --verify] +``` + +**Note**: This tool is still experimental. Some customized operators are not supported for now. We only support exporting RetinaNet model at this moment. + +### MMDetection 1.x model to MMDetection 2.x + +`tools/upgrade_model_version.py` upgrades a previous MMDetection checkpoint + to the new version. Note that this script is not guaranteed to work as some + breaking changes are introduced in the new version. It is recommended to + directly use the new checkpoints. + +```shell +python tools/upgrade_model_version.py ${IN_FILE} ${OUT_FILE} [-h] [--num-classes NUM_CLASSES] +``` + +### RegNet model to MMDetection + +`tools/regnet2mmdet.py` convert keys in pycls pretrained RegNet models to + MMDetection style. + +```shell +python tools/regnet2mmdet.py ${SRC} ${DST} [-h] +``` + +### Detectron ResNet to Pytorch + +`tools/detectron2pytorch.py` converts keys in the original detectron pretrained + ResNet models to PyTorch style. + +```shell +python tools/detectron2pytorch.py ${SRC} ${DST} ${DEPTH} [-h] +``` + +### Prepare a model for publishing + +`tools/publish_model.py` helps users to prepare their model for publishing. + +Before you upload a model to AWS, you may want to + +1. convert model weights to CPU tensors +2. delete the optimizer states and +3. compute the hash of the checkpoint file and append the hash id to the + filename. + +```shell +python tools/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} +``` + +E.g., + +```shell +python tools/publish_model.py work_dirs/faster_rcnn/latest.pth faster_rcnn_r50_fpn_1x_20190801.pth +``` + +The final output filename will be `faster_rcnn_r50_fpn_1x_20190801-{hash id}.pth`. + +## Dataset Conversion + +`tools/convert_datasets/` contains tools to convert the Cityscapes dataset + and Pascal VOC dataset to the COCO format. + +```shell +python tools/convert_datasets/cityscapes.py ${CITYSCAPES_PATH} [-h] [--img-dir ${IMG_DIR}] [--gt-dir ${GT_DIR}] [-o ${OUT_DIR}] [--nproc ${NPROC}] +python tools/convert_datasets/pascal_voc.py ${DEVKIT_PATH} [-h] [-o ${OUT_DIR}] +``` + +## Miscellaneous + +### Evaluating a metric + +`tools/eval_metric.py` evaluates certain metrics of a pkl result file + according to a config file. + +```shell +python tools/eval_metric.py ${CONFIG} ${PKL_RESULTS} [-h] [--format-only] [--eval ${EVAL[EVAL ...]}] + [--cfg-options ${CFG_OPTIONS [CFG_OPTIONS ...]}] + [--eval-options ${EVAL_OPTIONS [EVAL_OPTIONS ...]}] +``` + +### Print the entire config + +`tools/print_config.py` prints the whole config verbatim, expanding all its + imports. + +```shell +python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] +``` + +### Test the robustness of detectors + +Please refer to [robustness_benchmarking.md](robustness_benchmarking.md). diff --git a/thirdparty/mmdetection/mmdet/__init__.py b/thirdparty/mmdetection/mmdet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..39faed8b0d2fcf749a90c30cc071e4d406a38257 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/__init__.py @@ -0,0 +1,28 @@ +import mmcv + +from .version import __version__, short_version + + +def digit_version(version_str): + digit_version = [] + for x in version_str.split('.'): + if x.isdigit(): + digit_version.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + digit_version.append(int(patch_version[0]) - 1) + digit_version.append(int(patch_version[1])) + return digit_version + + +mmcv_minimum_version = '1.1.5' +mmcv_maximum_version = '1.3' +mmcv_version = digit_version(mmcv.__version__) + + +assert (mmcv_version >= digit_version(mmcv_minimum_version) + and mmcv_version <= digit_version(mmcv_maximum_version)), \ + f'MMCV=={mmcv.__version__} is used but incompatible. ' \ + f'Please install mmcv>={mmcv_minimum_version}, <={mmcv_maximum_version}.' + +__all__ = ['__version__', 'short_version'] diff --git a/thirdparty/mmdetection/mmdet/apis/__init__.py b/thirdparty/mmdetection/mmdet/apis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1d8035b74877fdeccaa41cbc10a9f1f9924eac85 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/apis/__init__.py @@ -0,0 +1,10 @@ +from .inference import (async_inference_detector, inference_detector, + init_detector, show_result_pyplot) +from .test import multi_gpu_test, single_gpu_test +from .train import get_root_logger, set_random_seed, train_detector + +__all__ = [ + 'get_root_logger', 'set_random_seed', 'train_detector', 'init_detector', + 'async_inference_detector', 'inference_detector', 'show_result_pyplot', + 'multi_gpu_test', 'single_gpu_test' +] diff --git a/thirdparty/mmdetection/mmdet/apis/inference.py b/thirdparty/mmdetection/mmdet/apis/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..6fa19cd585a509b5647bd4b2bb81383288dc49cb --- /dev/null +++ b/thirdparty/mmdetection/mmdet/apis/inference.py @@ -0,0 +1,187 @@ +import warnings + +import matplotlib.pyplot as plt +import mmcv +import numpy as np +import torch +from mmcv.ops import RoIPool +from mmcv.parallel import collate, scatter +from mmcv.runner import load_checkpoint + +from mmdet.core import get_classes +from mmdet.datasets.pipelines import Compose +from mmdet.models import build_detector + + +def init_detector(config, checkpoint=None, device='cuda:0', cfg_options=None): + """Initialize a detector from config file. + + Args: + config (str or :obj:`mmcv.Config`): Config file path or the config + object. + checkpoint (str, optional): Checkpoint path. If left as None, the model + will not load any weights. + cfg_options (dict): Options to override some settings in the used + config. + + Returns: + nn.Module: The constructed detector. + """ + if isinstance(config, str): + config = mmcv.Config.fromfile(config) + elif not isinstance(config, mmcv.Config): + raise TypeError('config must be a filename or Config object, ' + f'but got {type(config)}') + if cfg_options is not None: + config.merge_from_dict(cfg_options) + config.model.pretrained = None + model = build_detector(config.model, test_cfg=config.test_cfg) + if checkpoint is not None: + map_loc = 'cpu' if device == 'cpu' else None + checkpoint = load_checkpoint(model, checkpoint, map_location=map_loc) + if 'CLASSES' in checkpoint['meta']: + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + warnings.simplefilter('once') + warnings.warn('Class names are not saved in the checkpoint\'s ' + 'meta data, use COCO classes by default.') + model.CLASSES = get_classes('coco') + model.cfg = config # save the config in the model for convenience + model.to(device) + model.eval() + return model + + +class LoadImage(object): + """A simple pipeline to load image.""" + + def __call__(self, results): + """Call function to load images into results. + + Args: + results (dict): A result dict contains the file name + of the image to be read. + + Returns: + dict: ``results`` will be returned containing loaded image. + """ + if isinstance(results['img'], str): + results['filename'] = results['img'] + results['ori_filename'] = results['img'] + else: + results['filename'] = None + results['ori_filename'] = None + img = mmcv.imread(results['img']) + results['img'] = img + results['img_fields'] = ['img'] + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + return results + + +def inference_detector(model, img): + """Inference image(s) with the detector. + + Args: + model (nn.Module): The loaded detector. + imgs (str/ndarray or list[str/ndarray]): Either image files or loaded + images. + + Returns: + If imgs is a str, a generator will be returned, otherwise return the + detection results directly. + """ + cfg = model.cfg + device = next(model.parameters()).device # model device + # prepare data + if isinstance(img, np.ndarray): + # directly add img + data = dict(img=img) + cfg = cfg.copy() + # set loading pipeline type + cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam' + else: + # add information into dict + data = dict(img_info=dict(filename=img), img_prefix=None) + # build the data pipeline + test_pipeline = Compose(cfg.data.test.pipeline) + data = test_pipeline(data) + data = collate([data], samples_per_gpu=1) + if next(model.parameters()).is_cuda: + # scatter to specified GPU + data = scatter(data, [device])[0] + else: + for m in model.modules(): + assert not isinstance( + m, RoIPool + ), 'CPU inference with RoIPool is not supported currently.' + # just get the actual data from DataContainer + data['img_metas'] = data['img_metas'][0].data + + # forward the model + with torch.no_grad(): + result = model(return_loss=False, rescale=True, **data)[0] + return result + + +async def async_inference_detector(model, img): + """Async inference image(s) with the detector. + + Args: + model (nn.Module): The loaded detector. + img (str | ndarray): Either image files or loaded images. + + Returns: + Awaitable detection results. + """ + cfg = model.cfg + device = next(model.parameters()).device # model device + # prepare data + if isinstance(img, np.ndarray): + # directly add img + data = dict(img=img) + cfg = cfg.copy() + # set loading pipeline type + cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam' + else: + # add information into dict + data = dict(img_info=dict(filename=img), img_prefix=None) + # build the data pipeline + test_pipeline = Compose(cfg.data.test.pipeline) + data = test_pipeline(data) + data = scatter(collate([data], samples_per_gpu=1), [device])[0] + + # We don't restore `torch.is_grad_enabled()` value during concurrent + # inference since execution can overlap + torch.set_grad_enabled(False) + result = await model.aforward_test(rescale=True, **data) + return result + + +def show_result_pyplot(model, + img, + result, + score_thr=0.3, + fig_size=(15, 10), + title='result', + block=True): + """Visualize the detection results on the image. + + Args: + model (nn.Module): The loaded detector. + img (str or np.ndarray): Image filename or loaded image. + result (tuple[list] or list): The detection result, can be either + (bbox, segm) or just bbox. + score_thr (float): The threshold to visualize the bboxes and masks. + fig_size (tuple): Figure size of the pyplot figure. + title (str): Title of the pyplot figure. + block (bool): Whether to block GUI. + """ + if hasattr(model, 'module'): + model = model.module + img = model.show_result(img, result, score_thr=score_thr, show=False) + plt.figure(figsize=fig_size) + plt.imshow(mmcv.bgr2rgb(img)) + plt.title(title) + plt.tight_layout() + plt.show(block=block) diff --git a/thirdparty/mmdetection/mmdet/apis/test.py b/thirdparty/mmdetection/mmdet/apis/test.py new file mode 100644 index 0000000000000000000000000000000000000000..e54b1b8c24efc448972c31ee5da63041d7f97a47 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/apis/test.py @@ -0,0 +1,190 @@ +import os.path as osp +import pickle +import shutil +import tempfile +import time + +import mmcv +import torch +import torch.distributed as dist +from mmcv.image import tensor2imgs +from mmcv.runner import get_dist_info + +from mmdet.core import encode_mask_results + + +def single_gpu_test(model, + data_loader, + show=False, + out_dir=None, + show_score_thr=0.3): + model.eval() + results = [] + dataset = data_loader.dataset + prog_bar = mmcv.ProgressBar(len(dataset)) + for i, data in enumerate(data_loader): + with torch.no_grad(): + result = model(return_loss=False, rescale=True, **data) + + batch_size = len(result) + if show or out_dir: + if batch_size == 1 and isinstance(data['img'][0], torch.Tensor): + img_tensor = data['img'][0] + else: + img_tensor = data['img'][0].data[0] + img_metas = data['img_metas'][0].data[0] + imgs = tensor2imgs(img_tensor, **img_metas[0]['img_norm_cfg']) + assert len(imgs) == len(img_metas) + + for i, (img, img_meta) in enumerate(zip(imgs, img_metas)): + h, w, _ = img_meta['img_shape'] + img_show = img[:h, :w, :] + + ori_h, ori_w = img_meta['ori_shape'][:-1] + img_show = mmcv.imresize(img_show, (ori_w, ori_h)) + + if out_dir: + out_file = osp.join(out_dir, img_meta['ori_filename']) + else: + out_file = None + + model.module.show_result( + img_show, + result[i], + show=show, + out_file=out_file, + score_thr=show_score_thr) + + # encode mask results + if isinstance(result[0], tuple): + result = [(bbox_results, encode_mask_results(mask_results)) + for bbox_results, mask_results in result] + results.extend(result) + + for _ in range(batch_size): + prog_bar.update() + return results + + +def multi_gpu_test(model, data_loader, tmpdir=None, gpu_collect=False): + """Test model with multiple gpus. + + This method tests model with multiple gpus and collects the results + under two different modes: gpu and cpu modes. By setting 'gpu_collect=True' + it encodes results to gpu tensors and use gpu communication for results + collection. On cpu mode it saves the results on different gpus to 'tmpdir' + and collects them by the rank 0 worker. + + Args: + model (nn.Module): Model to be tested. + data_loader (nn.Dataloader): Pytorch data loader. + tmpdir (str): Path of directory to save the temporary results from + different gpus under cpu mode. + gpu_collect (bool): Option to use either gpu or cpu to collect results. + + Returns: + list: The prediction results. + """ + model.eval() + results = [] + dataset = data_loader.dataset + rank, world_size = get_dist_info() + if rank == 0: + prog_bar = mmcv.ProgressBar(len(dataset)) + time.sleep(2) # This line can prevent deadlock problem in some cases. + for i, data in enumerate(data_loader): + with torch.no_grad(): + result = model(return_loss=False, rescale=True, **data) + # encode mask results + if isinstance(result[0], tuple): + result = [(bbox_results, encode_mask_results(mask_results)) + for bbox_results, mask_results in result] + results.extend(result) + + if rank == 0: + batch_size = len(result) + for _ in range(batch_size * world_size): + prog_bar.update() + + # collect results from all ranks + if gpu_collect: + results = collect_results_gpu(results, len(dataset)) + else: + results = collect_results_cpu(results, len(dataset), tmpdir) + return results + + +def collect_results_cpu(result_part, size, tmpdir=None): + rank, world_size = get_dist_info() + # create a tmp dir if it is not specified + if tmpdir is None: + MAX_LEN = 512 + # 32 is whitespace + dir_tensor = torch.full((MAX_LEN, ), + 32, + dtype=torch.uint8, + device='cuda') + if rank == 0: + mmcv.mkdir_or_exist('.dist_test') + tmpdir = tempfile.mkdtemp(dir='.dist_test') + tmpdir = torch.tensor( + bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') + dir_tensor[:len(tmpdir)] = tmpdir + dist.broadcast(dir_tensor, 0) + tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() + else: + mmcv.mkdir_or_exist(tmpdir) + # dump the part result to the dir + mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) + dist.barrier() + # collect all parts + if rank != 0: + return None + else: + # load results of all parts from tmp dir + part_list = [] + for i in range(world_size): + part_file = osp.join(tmpdir, f'part_{i}.pkl') + part_list.append(mmcv.load(part_file)) + # sort the results + ordered_results = [] + for res in zip(*part_list): + ordered_results.extend(list(res)) + # the dataloader may pad some samples + ordered_results = ordered_results[:size] + # remove tmp dir + shutil.rmtree(tmpdir) + return ordered_results + + +def collect_results_gpu(result_part, size): + rank, world_size = get_dist_info() + # dump result part to tensor with pickle + part_tensor = torch.tensor( + bytearray(pickle.dumps(result_part)), dtype=torch.uint8, device='cuda') + # gather all result part tensor shape + shape_tensor = torch.tensor(part_tensor.shape, device='cuda') + shape_list = [shape_tensor.clone() for _ in range(world_size)] + dist.all_gather(shape_list, shape_tensor) + # padding result part tensor to max length + shape_max = torch.tensor(shape_list).max() + part_send = torch.zeros(shape_max, dtype=torch.uint8, device='cuda') + part_send[:shape_tensor[0]] = part_tensor + part_recv_list = [ + part_tensor.new_zeros(shape_max) for _ in range(world_size) + ] + # gather all result part + dist.all_gather(part_recv_list, part_send) + + if rank == 0: + part_list = [] + for recv, shape in zip(part_recv_list, shape_list): + part_list.append( + pickle.loads(recv[:shape[0]].cpu().numpy().tobytes())) + # sort the results + ordered_results = [] + for res in zip(*part_list): + ordered_results.extend(list(res)) + # the dataloader may pad some samples + ordered_results = ordered_results[:size] + return ordered_results diff --git a/thirdparty/mmdetection/mmdet/apis/train.py b/thirdparty/mmdetection/mmdet/apis/train.py new file mode 100644 index 0000000000000000000000000000000000000000..ad17a5379888028c793aed8837d90e3644e4e13f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/apis/train.py @@ -0,0 +1,150 @@ +import random + +import numpy as np +import torch +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner, + Fp16OptimizerHook, OptimizerHook, build_optimizer) +from mmcv.utils import build_from_cfg + +from mmdet.core import DistEvalHook, EvalHook +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.utils import get_root_logger + + +def set_random_seed(seed, deterministic=False): + """Set random seed. + + Args: + seed (int): Seed to be used. + deterministic (bool): Whether to set the deterministic option for + CUDNN backend, i.e., set `torch.backends.cudnn.deterministic` + to True and `torch.backends.cudnn.benchmark` to False. + Default: False. + """ + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + if deterministic: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def train_detector(model, + dataset, + cfg, + distributed=False, + validate=False, + timestamp=None, + meta=None): + logger = get_root_logger(cfg.log_level) + + # prepare data loaders + dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset] + if 'imgs_per_gpu' in cfg.data: + logger.warning('"imgs_per_gpu" is deprecated in MMDet V2.0. ' + 'Please use "samples_per_gpu" instead') + if 'samples_per_gpu' in cfg.data: + logger.warning( + f'Got "imgs_per_gpu"={cfg.data.imgs_per_gpu} and ' + f'"samples_per_gpu"={cfg.data.samples_per_gpu}, "imgs_per_gpu"' + f'={cfg.data.imgs_per_gpu} is used in this experiments') + else: + logger.warning( + 'Automatically set "samples_per_gpu"="imgs_per_gpu"=' + f'{cfg.data.imgs_per_gpu} in this experiments') + cfg.data.samples_per_gpu = cfg.data.imgs_per_gpu + + data_loaders = [ + build_dataloader( + ds, + cfg.data.samples_per_gpu, + cfg.data.workers_per_gpu, + # cfg.gpus will be ignored if distributed + len(cfg.gpu_ids), + dist=distributed, + seed=cfg.seed) for ds in dataset + ] + + # put model on gpus + if distributed: + find_unused_parameters = cfg.get('find_unused_parameters', False) + # Sets the `find_unused_parameters` parameter in + # torch.nn.parallel.DistributedDataParallel + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False, + find_unused_parameters=find_unused_parameters) + else: + model = MMDataParallel( + model.cuda(cfg.gpu_ids[0]), device_ids=cfg.gpu_ids) + + # build runner + optimizer = build_optimizer(model, cfg.optimizer) + runner = EpochBasedRunner( + model, + optimizer=optimizer, + work_dir=cfg.work_dir, + logger=logger, + meta=meta) + # an ugly workaround to make .log and .log.json filenames the same + runner.timestamp = timestamp + + # fp16 setting + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + optimizer_config = Fp16OptimizerHook( + **cfg.optimizer_config, **fp16_cfg, distributed=distributed) + elif distributed and 'type' not in cfg.optimizer_config: + optimizer_config = OptimizerHook(**cfg.optimizer_config) + else: + optimizer_config = cfg.optimizer_config + + # register hooks + runner.register_training_hooks(cfg.lr_config, optimizer_config, + cfg.checkpoint_config, cfg.log_config, + cfg.get('momentum_config', None)) + if distributed: + runner.register_hook(DistSamplerSeedHook()) + + # register eval hooks + if validate: + # Support batch_size > 1 in validation + val_samples_per_gpu = cfg.data.val.pop('samples_per_gpu', 1) + if val_samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.val.pipeline = replace_ImageToTensor( + cfg.data.val.pipeline) + val_dataset = build_dataset(cfg.data.val, dict(test_mode=True)) + val_dataloader = build_dataloader( + val_dataset, + samples_per_gpu=val_samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + eval_cfg = cfg.get('evaluation', {}) + eval_hook = DistEvalHook if distributed else EvalHook + runner.register_hook(eval_hook(val_dataloader, **eval_cfg)) + + # user-defined hooks + if cfg.get('custom_hooks', None): + custom_hooks = cfg.custom_hooks + assert isinstance(custom_hooks, list), \ + f'custom_hooks expect list type, but got {type(custom_hooks)}' + for hook_cfg in cfg.custom_hooks: + assert isinstance(hook_cfg, dict), \ + 'Each item in custom_hooks expects dict type, but got ' \ + f'{type(hook_cfg)}' + hook_cfg = hook_cfg.copy() + priority = hook_cfg.pop('priority', 'NORMAL') + hook = build_from_cfg(hook_cfg, HOOKS) + runner.register_hook(hook, priority=priority) + + if cfg.resume_from: + runner.resume(cfg.resume_from) + elif cfg.load_from: + runner.load_checkpoint(cfg.load_from) + runner.run(data_loaders, cfg.workflow, cfg.total_epochs) diff --git a/thirdparty/mmdetection/mmdet/core/__init__.py b/thirdparty/mmdetection/mmdet/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b075369096b284f0112cb37e19d6e2d50878b60f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/__init__.py @@ -0,0 +1,8 @@ +from .anchor import * # noqa: F401, F403 +from .bbox import * # noqa: F401, F403 +from .evaluation import * # noqa: F401, F403 +from .export import * # noqa: F401, F403 +from .fp16 import * # noqa: F401, F403 +from .mask import * # noqa: F401, F403 +from .post_processing import * # noqa: F401, F403 +from .utils import * # noqa: F401, F403 diff --git a/thirdparty/mmdetection/mmdet/core/anchor/__init__.py b/thirdparty/mmdetection/mmdet/core/anchor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5838ff3eefb03bc83928fa13848cea9ff8647827 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/anchor/__init__.py @@ -0,0 +1,11 @@ +from .anchor_generator import (AnchorGenerator, LegacyAnchorGenerator, + YOLOAnchorGenerator) +from .builder import ANCHOR_GENERATORS, build_anchor_generator +from .point_generator import PointGenerator +from .utils import anchor_inside_flags, calc_region, images_to_levels + +__all__ = [ + 'AnchorGenerator', 'LegacyAnchorGenerator', 'anchor_inside_flags', + 'PointGenerator', 'images_to_levels', 'calc_region', + 'build_anchor_generator', 'ANCHOR_GENERATORS', 'YOLOAnchorGenerator' +] diff --git a/thirdparty/mmdetection/mmdet/core/anchor/anchor_generator.py b/thirdparty/mmdetection/mmdet/core/anchor/anchor_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..29b5ed04b95081b4145d2bd2272dbcda30be00d8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/anchor/anchor_generator.py @@ -0,0 +1,728 @@ +import mmcv +import numpy as np +import torch +from torch.nn.modules.utils import _pair + +from .builder import ANCHOR_GENERATORS + + +@ANCHOR_GENERATORS.register_module() +class AnchorGenerator(object): + """Standard anchor generator for 2D anchor-based detectors. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels in order (w, h). + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + scales (list[int] | None): Anchor scales for anchors in a single level. + It cannot be set at the same time if `octave_base_scale` and + `scales_per_octave` are set. + base_sizes (list[int] | None): The basic sizes + of anchors in multiple levels. + If None is given, strides will be used as base_sizes. + (If strides are non square, the shortest stride is taken.) + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. By default it is True in V2.0 + octave_base_scale (int): The base scale of octave. + scales_per_octave (int): Number of scales for each octave. + `octave_base_scale` and `scales_per_octave` are usually used in + retinanet and the `scales` should be None when they are set. + centers (list[tuple[float, float]] | None): The centers of the anchor + relative to the feature grid center in multiple feature levels. + By default it is set to be None and not used. If a list of tuple of + float is given, they will be used to shift the centers of anchors. + center_offset (float): The offset of center in proportion to anchors' + width and height. By default it is 0 in V2.0. + + Examples: + >>> from mmdet.core import AnchorGenerator + >>> self = AnchorGenerator([16], [1.], [1.], [9]) + >>> all_anchors = self.grid_anchors([(2, 2)], device='cpu') + >>> print(all_anchors) + [tensor([[-4.5000, -4.5000, 4.5000, 4.5000], + [11.5000, -4.5000, 20.5000, 4.5000], + [-4.5000, 11.5000, 4.5000, 20.5000], + [11.5000, 11.5000, 20.5000, 20.5000]])] + >>> self = AnchorGenerator([16, 32], [1.], [1.], [9, 18]) + >>> all_anchors = self.grid_anchors([(2, 2), (1, 1)], device='cpu') + >>> print(all_anchors) + [tensor([[-4.5000, -4.5000, 4.5000, 4.5000], + [11.5000, -4.5000, 20.5000, 4.5000], + [-4.5000, 11.5000, 4.5000, 20.5000], + [11.5000, 11.5000, 20.5000, 20.5000]]), \ + tensor([[-9., -9., 9., 9.]])] + """ + + def __init__(self, + strides, + ratios, + scales=None, + base_sizes=None, + scale_major=True, + octave_base_scale=None, + scales_per_octave=None, + centers=None, + center_offset=0.): + # check center and center_offset + if center_offset != 0: + assert centers is None, 'center cannot be set when center_offset' \ + f'!=0, {centers} is given.' + if not (0 <= center_offset <= 1): + raise ValueError('center_offset should be in range [0, 1], ' + f'{center_offset} is given.') + if centers is not None: + assert len(centers) == len(strides), \ + 'The number of strides should be the same as centers, got ' \ + f'{strides} and {centers}' + + # calculate base sizes of anchors + self.strides = [_pair(stride) for stride in strides] + self.base_sizes = [min(stride) for stride in self.strides + ] if base_sizes is None else base_sizes + assert len(self.base_sizes) == len(self.strides), \ + 'The number of strides should be the same as base sizes, got ' \ + f'{self.strides} and {self.base_sizes}' + + # calculate scales of anchors + assert ((octave_base_scale is not None + and scales_per_octave is not None) ^ (scales is not None)), \ + 'scales and octave_base_scale with scales_per_octave cannot' \ + ' be set at the same time' + if scales is not None: + self.scales = torch.Tensor(scales) + elif octave_base_scale is not None and scales_per_octave is not None: + octave_scales = np.array( + [2**(i / scales_per_octave) for i in range(scales_per_octave)]) + scales = octave_scales * octave_base_scale + self.scales = torch.Tensor(scales) + else: + raise ValueError('Either scales or octave_base_scale with ' + 'scales_per_octave should be set') + + self.octave_base_scale = octave_base_scale + self.scales_per_octave = scales_per_octave + self.ratios = torch.Tensor(ratios) + self.scale_major = scale_major + self.centers = centers + self.center_offset = center_offset + self.base_anchors = self.gen_base_anchors() + + @property + def num_base_anchors(self): + """list[int]: total number of base anchors in a feature grid""" + return [base_anchors.size(0) for base_anchors in self.base_anchors] + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.strides) + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_size in enumerate(self.base_sizes): + center = None + if self.centers is not None: + center = self.centers[i] + multi_level_base_anchors.append( + self.gen_single_level_base_anchors( + base_size, + scales=self.scales, + ratios=self.ratios, + center=center)) + return multi_level_base_anchors + + def gen_single_level_base_anchors(self, + base_size, + scales, + ratios, + center=None): + """Generate base anchors of a single level. + + Args: + base_size (int | float): Basic size of an anchor. + scales (torch.Tensor): Scales of the anchor. + ratios (torch.Tensor): The ratio between between the height + and width of anchors in a single level. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature maps. + """ + w = base_size + h = base_size + if center is None: + x_center = self.center_offset * w + y_center = self.center_offset * h + else: + x_center, y_center = center + + h_ratios = torch.sqrt(ratios) + w_ratios = 1 / h_ratios + if self.scale_major: + ws = (w * w_ratios[:, None] * scales[None, :]).view(-1) + hs = (h * h_ratios[:, None] * scales[None, :]).view(-1) + else: + ws = (w * scales[:, None] * w_ratios[None, :]).view(-1) + hs = (h * scales[:, None] * h_ratios[None, :]).view(-1) + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchors = [ + x_center - 0.5 * ws, y_center - 0.5 * hs, x_center + 0.5 * ws, + y_center + 0.5 * hs + ] + base_anchors = torch.stack(base_anchors, dim=-1) + + return base_anchors + + def _meshgrid(self, x, y, row_major=True): + """Generate mesh grid of x and y. + + Args: + x (torch.Tensor): Grids of x dimension. + y (torch.Tensor): Grids of y dimension. + row_major (bool, optional): Whether to return y grids first. + Defaults to True. + + Returns: + tuple[torch.Tensor]: The mesh grids of x and y. + """ + xx = x.repeat(len(y)) + yy = y.view(-1, 1).repeat(1, len(x)).view(-1) + if row_major: + return xx, yy + else: + return yy, xx + + def grid_anchors(self, featmap_sizes, device='cuda'): + """Generate grid anchors in multiple feature levels. + + Args: + featmap_sizes (list[tuple]): List of feature map sizes in + multiple feature levels. + device (str): Device where the anchors will be put on. + + Return: + list[torch.Tensor]: Anchors in multiple feature levels. \ + The sizes of each tensor should be [N, 4], where \ + N = width * height * num_base_anchors, width and height \ + are the sizes of the corresponding feature level, \ + num_base_anchors is the number of anchors for that level. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_anchors = [] + for i in range(self.num_levels): + anchors = self.single_level_grid_anchors( + self.base_anchors[i].to(device), + featmap_sizes[i], + self.strides[i], + device=device) + multi_level_anchors.append(anchors) + return multi_level_anchors + + def single_level_grid_anchors(self, + base_anchors, + featmap_size, + stride=(16, 16), + device='cuda'): + """Generate grid anchors of a single level. + + Note: + This function is usually called by method ``self.grid_anchors``. + + Args: + base_anchors (torch.Tensor): The base anchors of a feature grid. + featmap_size (tuple[int]): Size of the feature maps. + stride (tuple[int], optional): Stride of the feature map in order + (w, h). Defaults to (16, 16). + device (str, optional): Device the tensor will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: Anchors in the overall feature maps. + """ + feat_h, feat_w = featmap_size + # convert Tensor to int, so that we can covert to ONNX correctlly + feat_h = int(feat_h) + feat_w = int(feat_w) + shift_x = torch.arange(0, feat_w, device=device) * stride[0] + shift_y = torch.arange(0, feat_h, device=device) * stride[1] + + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + shifts = torch.stack([shift_xx, shift_yy, shift_xx, shift_yy], dim=-1) + shifts = shifts.type_as(base_anchors) + # first feat_w elements correspond to the first row of shifts + # add A anchors (1, A, 4) to K shifts (K, 1, 4) to get + # shifted anchors (K, A, 4), reshape to (K*A, 4) + + all_anchors = base_anchors[None, :, :] + shifts[:, None, :] + all_anchors = all_anchors.view(-1, 4) + # first A rows correspond to A anchors of (0, 0) in feature map, + # then (0, 1), (0, 2), ... + return all_anchors + + def valid_flags(self, featmap_sizes, pad_shape, device='cuda'): + """Generate valid flags of anchors in multiple feature levels. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in + multiple feature levels. + pad_shape (tuple): The padded shape of the image. + device (str): Device where the anchors will be put on. + + Return: + list(torch.Tensor): Valid flags of anchors in multiple levels. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_flags = [] + for i in range(self.num_levels): + anchor_stride = self.strides[i] + feat_h, feat_w = featmap_sizes[i] + h, w = pad_shape[:2] + valid_feat_h = min(int(np.ceil(h / anchor_stride[1])), feat_h) + valid_feat_w = min(int(np.ceil(w / anchor_stride[0])), feat_w) + flags = self.single_level_valid_flags((feat_h, feat_w), + (valid_feat_h, valid_feat_w), + self.num_base_anchors[i], + device=device) + multi_level_flags.append(flags) + return multi_level_flags + + def single_level_valid_flags(self, + featmap_size, + valid_size, + num_base_anchors, + device='cuda'): + """Generate the valid flags of anchor in a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps. + valid_size (tuple[int]): The valid size of the feature maps. + num_base_anchors (int): The number of base anchors. + device (str, optional): Device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each anchor in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + valid = valid[:, None].expand(valid.size(0), + num_base_anchors).contiguous().view(-1) + return valid + + def __repr__(self): + """str: a string that describes the module""" + indent_str = ' ' + repr_str = self.__class__.__name__ + '(\n' + repr_str += f'{indent_str}strides={self.strides},\n' + repr_str += f'{indent_str}ratios={self.ratios},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}base_sizes={self.base_sizes},\n' + repr_str += f'{indent_str}scale_major={self.scale_major},\n' + repr_str += f'{indent_str}octave_base_scale=' + repr_str += f'{self.octave_base_scale},\n' + repr_str += f'{indent_str}scales_per_octave=' + repr_str += f'{self.scales_per_octave},\n' + repr_str += f'{indent_str}num_levels={self.num_levels}\n' + repr_str += f'{indent_str}centers={self.centers},\n' + repr_str += f'{indent_str}center_offset={self.center_offset})' + return repr_str + + +@ANCHOR_GENERATORS.register_module() +class SSDAnchorGenerator(AnchorGenerator): + """Anchor generator for SSD. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels. + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + basesize_ratio_range (tuple(float)): Ratio range of anchors. + input_size (int): Size of feature map, 300 for SSD300, + 512 for SSD512. + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. It is always set to be False in SSD. + """ + + def __init__(self, + strides, + ratios, + basesize_ratio_range, + input_size=300, + scale_major=True): + assert len(strides) == len(ratios) + assert mmcv.is_tuple_of(basesize_ratio_range, float) + + self.strides = [_pair(stride) for stride in strides] + self.input_size = input_size + self.centers = [(stride[0] / 2., stride[1] / 2.) + for stride in self.strides] + self.basesize_ratio_range = basesize_ratio_range + + # calculate anchor ratios and sizes + min_ratio, max_ratio = basesize_ratio_range + min_ratio = int(min_ratio * 100) + max_ratio = int(max_ratio * 100) + step = int(np.floor(max_ratio - min_ratio) / (self.num_levels - 2)) + min_sizes = [] + max_sizes = [] + for ratio in range(int(min_ratio), int(max_ratio) + 1, step): + min_sizes.append(int(self.input_size * ratio / 100)) + max_sizes.append(int(self.input_size * (ratio + step) / 100)) + if self.input_size == 300: + if basesize_ratio_range[0] == 0.15: # SSD300 COCO + min_sizes.insert(0, int(self.input_size * 7 / 100)) + max_sizes.insert(0, int(self.input_size * 15 / 100)) + elif basesize_ratio_range[0] == 0.2: # SSD300 VOC + min_sizes.insert(0, int(self.input_size * 10 / 100)) + max_sizes.insert(0, int(self.input_size * 20 / 100)) + else: + raise ValueError( + 'basesize_ratio_range[0] should be either 0.15' + 'or 0.2 when input_size is 300, got ' + f'{basesize_ratio_range[0]}.') + elif self.input_size == 512: + if basesize_ratio_range[0] == 0.1: # SSD512 COCO + min_sizes.insert(0, int(self.input_size * 4 / 100)) + max_sizes.insert(0, int(self.input_size * 10 / 100)) + elif basesize_ratio_range[0] == 0.15: # SSD512 VOC + min_sizes.insert(0, int(self.input_size * 7 / 100)) + max_sizes.insert(0, int(self.input_size * 15 / 100)) + else: + raise ValueError('basesize_ratio_range[0] should be either 0.1' + 'or 0.15 when input_size is 512, got' + f' {basesize_ratio_range[0]}.') + else: + raise ValueError('Only support 300 or 512 in SSDAnchorGenerator' + f', got {self.input_size}.') + + anchor_ratios = [] + anchor_scales = [] + for k in range(len(self.strides)): + scales = [1., np.sqrt(max_sizes[k] / min_sizes[k])] + anchor_ratio = [1.] + for r in ratios[k]: + anchor_ratio += [1 / r, r] # 4 or 6 ratio + anchor_ratios.append(torch.Tensor(anchor_ratio)) + anchor_scales.append(torch.Tensor(scales)) + + self.base_sizes = min_sizes + self.scales = anchor_scales + self.ratios = anchor_ratios + self.scale_major = scale_major + self.center_offset = 0 + self.base_anchors = self.gen_base_anchors() + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_size in enumerate(self.base_sizes): + base_anchors = self.gen_single_level_base_anchors( + base_size, + scales=self.scales[i], + ratios=self.ratios[i], + center=self.centers[i]) + indices = list(range(len(self.ratios[i]))) + indices.insert(1, len(indices)) + base_anchors = torch.index_select(base_anchors, 0, + torch.LongTensor(indices)) + multi_level_base_anchors.append(base_anchors) + return multi_level_base_anchors + + def __repr__(self): + """str: a string that describes the module""" + indent_str = ' ' + repr_str = self.__class__.__name__ + '(\n' + repr_str += f'{indent_str}strides={self.strides},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}scale_major={self.scale_major},\n' + repr_str += f'{indent_str}input_size={self.input_size},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}ratios={self.ratios},\n' + repr_str += f'{indent_str}num_levels={self.num_levels},\n' + repr_str += f'{indent_str}base_sizes={self.base_sizes},\n' + repr_str += f'{indent_str}basesize_ratio_range=' + repr_str += f'{self.basesize_ratio_range})' + return repr_str + + +@ANCHOR_GENERATORS.register_module() +class LegacyAnchorGenerator(AnchorGenerator): + """Legacy anchor generator used in MMDetection V1.x. + + Note: + Difference to the V2.0 anchor generator: + + 1. The center offset of V1.x anchors are set to be 0.5 rather than 0. + 2. The width/height are minused by 1 when calculating the anchors' \ + centers and corners to meet the V1.x coordinate system. + 3. The anchors' corners are quantized. + + Args: + strides (list[int] | list[tuple[int]]): Strides of anchors + in multiple feature levels. + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + scales (list[int] | None): Anchor scales for anchors in a single level. + It cannot be set at the same time if `octave_base_scale` and + `scales_per_octave` are set. + base_sizes (list[int]): The basic sizes of anchors in multiple levels. + If None is given, strides will be used to generate base_sizes. + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. By default it is True in V2.0 + octave_base_scale (int): The base scale of octave. + scales_per_octave (int): Number of scales for each octave. + `octave_base_scale` and `scales_per_octave` are usually used in + retinanet and the `scales` should be None when they are set. + centers (list[tuple[float, float]] | None): The centers of the anchor + relative to the feature grid center in multiple feature levels. + By default it is set to be None and not used. It a list of float + is given, this list will be used to shift the centers of anchors. + center_offset (float): The offset of center in propotion to anchors' + width and height. By default it is 0.5 in V2.0 but it should be 0.5 + in v1.x models. + + Examples: + >>> from mmdet.core import LegacyAnchorGenerator + >>> self = LegacyAnchorGenerator( + >>> [16], [1.], [1.], [9], center_offset=0.5) + >>> all_anchors = self.grid_anchors(((2, 2),), device='cpu') + >>> print(all_anchors) + [tensor([[ 0., 0., 8., 8.], + [16., 0., 24., 8.], + [ 0., 16., 8., 24.], + [16., 16., 24., 24.]])] + """ + + def gen_single_level_base_anchors(self, + base_size, + scales, + ratios, + center=None): + """Generate base anchors of a single level. + + Note: + The width/height of anchors are minused by 1 when calculating \ + the centers and corners to meet the V1.x coordinate system. + + Args: + base_size (int | float): Basic size of an anchor. + scales (torch.Tensor): Scales of the anchor. + ratios (torch.Tensor): The ratio between between the height. + and width of anchors in a single level. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature map. + """ + w = base_size + h = base_size + if center is None: + x_center = self.center_offset * (w - 1) + y_center = self.center_offset * (h - 1) + else: + x_center, y_center = center + + h_ratios = torch.sqrt(ratios) + w_ratios = 1 / h_ratios + if self.scale_major: + ws = (w * w_ratios[:, None] * scales[None, :]).view(-1) + hs = (h * h_ratios[:, None] * scales[None, :]).view(-1) + else: + ws = (w * scales[:, None] * w_ratios[None, :]).view(-1) + hs = (h * scales[:, None] * h_ratios[None, :]).view(-1) + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchors = [ + x_center - 0.5 * (ws - 1), y_center - 0.5 * (hs - 1), + x_center + 0.5 * (ws - 1), y_center + 0.5 * (hs - 1) + ] + base_anchors = torch.stack(base_anchors, dim=-1).round() + + return base_anchors + + +@ANCHOR_GENERATORS.register_module() +class LegacySSDAnchorGenerator(SSDAnchorGenerator, LegacyAnchorGenerator): + """Legacy anchor generator used in MMDetection V1.x. + + The difference between `LegacySSDAnchorGenerator` and `SSDAnchorGenerator` + can be found in `LegacyAnchorGenerator`. + """ + + def __init__(self, + strides, + ratios, + basesize_ratio_range, + input_size=300, + scale_major=True): + super(LegacySSDAnchorGenerator, + self).__init__(strides, ratios, basesize_ratio_range, input_size, + scale_major) + self.centers = [((stride - 1) / 2., (stride - 1) / 2.) + for stride in strides] + self.base_anchors = self.gen_base_anchors() + + +@ANCHOR_GENERATORS.register_module() +class YOLOAnchorGenerator(AnchorGenerator): + """Anchor generator for YOLO. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels. + base_sizes (list[list[tuple[int, int]]]): The basic sizes + of anchors in multiple levels. + """ + + def __init__(self, strides, base_sizes): + self.strides = [_pair(stride) for stride in strides] + self.centers = [(stride[0] / 2., stride[1] / 2.) + for stride in self.strides] + self.base_sizes = [] + num_anchor_per_level = len(base_sizes[0]) + for base_sizes_per_level in base_sizes: + assert num_anchor_per_level == len(base_sizes_per_level) + self.base_sizes.append( + [_pair(base_size) for base_size in base_sizes_per_level]) + self.base_anchors = self.gen_base_anchors() + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.base_sizes) + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_sizes_per_level in enumerate(self.base_sizes): + center = None + if self.centers is not None: + center = self.centers[i] + multi_level_base_anchors.append( + self.gen_single_level_base_anchors(base_sizes_per_level, + center)) + return multi_level_base_anchors + + def gen_single_level_base_anchors(self, base_sizes_per_level, center=None): + """Generate base anchors of a single level. + + Args: + base_sizes_per_level (list[tuple[int, int]]): Basic sizes of + anchors. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature maps. + """ + x_center, y_center = center + base_anchors = [] + for base_size in base_sizes_per_level: + w, h = base_size + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchor = torch.Tensor([ + x_center - 0.5 * w, y_center - 0.5 * h, x_center + 0.5 * w, + y_center + 0.5 * h + ]) + base_anchors.append(base_anchor) + base_anchors = torch.stack(base_anchors, dim=0) + + return base_anchors + + def responsible_flags(self, featmap_sizes, gt_bboxes, device='cuda'): + """Generate responsible anchor flags of grid cells in multiple scales. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in multiple + feature levels. + gt_bboxes (Tensor): Ground truth boxes, shape (n, 4). + device (str): Device where the anchors will be put on. + + Return: + list(torch.Tensor): responsible flags of anchors in multiple level + """ + assert self.num_levels == len(featmap_sizes) + multi_level_responsible_flags = [] + for i in range(self.num_levels): + anchor_stride = self.strides[i] + flags = self.single_level_responsible_flags( + featmap_sizes[i], + gt_bboxes, + anchor_stride, + self.num_base_anchors[i], + device=device) + multi_level_responsible_flags.append(flags) + return multi_level_responsible_flags + + def single_level_responsible_flags(self, + featmap_size, + gt_bboxes, + stride, + num_base_anchors, + device='cuda'): + """Generate the responsible flags of anchor in a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps. + gt_bboxes (Tensor): Ground truth boxes, shape (n, 4). + stride (tuple(int)): stride of current level + num_base_anchors (int): The number of base anchors. + device (str, optional): Device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each anchor in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + gt_bboxes_cx = ((gt_bboxes[:, 0] + gt_bboxes[:, 2]) * 0.5).to(device) + gt_bboxes_cy = ((gt_bboxes[:, 1] + gt_bboxes[:, 3]) * 0.5).to(device) + gt_bboxes_grid_x = torch.floor(gt_bboxes_cx / stride[0]).long() + gt_bboxes_grid_y = torch.floor(gt_bboxes_cy / stride[1]).long() + + # row major indexing + gt_bboxes_grid_idx = gt_bboxes_grid_y * feat_w + gt_bboxes_grid_x + + responsible_grid = torch.zeros( + feat_h * feat_w, dtype=torch.uint8, device=device) + responsible_grid[gt_bboxes_grid_idx] = 1 + + responsible_grid = responsible_grid[:, None].expand( + responsible_grid.size(0), num_base_anchors).contiguous().view(-1) + return responsible_grid diff --git a/thirdparty/mmdetection/mmdet/core/anchor/builder.py b/thirdparty/mmdetection/mmdet/core/anchor/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..d79b448ebca9f2b21d455046623172c48c5c3ef0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/anchor/builder.py @@ -0,0 +1,7 @@ +from mmcv.utils import Registry, build_from_cfg + +ANCHOR_GENERATORS = Registry('Anchor generator') + + +def build_anchor_generator(cfg, default_args=None): + return build_from_cfg(cfg, ANCHOR_GENERATORS, default_args) diff --git a/thirdparty/mmdetection/mmdet/core/anchor/point_generator.py b/thirdparty/mmdetection/mmdet/core/anchor/point_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..e6fbd988c317992c092c68c827dc4c53223b4a4a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/anchor/point_generator.py @@ -0,0 +1,37 @@ +import torch + +from .builder import ANCHOR_GENERATORS + + +@ANCHOR_GENERATORS.register_module() +class PointGenerator(object): + + def _meshgrid(self, x, y, row_major=True): + xx = x.repeat(len(y)) + yy = y.view(-1, 1).repeat(1, len(x)).view(-1) + if row_major: + return xx, yy + else: + return yy, xx + + def grid_points(self, featmap_size, stride=16, device='cuda'): + feat_h, feat_w = featmap_size + shift_x = torch.arange(0., feat_w, device=device) * stride + shift_y = torch.arange(0., feat_h, device=device) * stride + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + stride = shift_x.new_full((shift_xx.shape[0], ), stride) + shifts = torch.stack([shift_xx, shift_yy, stride], dim=-1) + all_points = shifts.to(device) + return all_points + + def valid_flags(self, featmap_size, valid_size, device='cuda'): + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + return valid diff --git a/thirdparty/mmdetection/mmdet/core/anchor/utils.py b/thirdparty/mmdetection/mmdet/core/anchor/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ab9b53f37f7be1f52fe63c5e53df64ac1303b9e0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/anchor/utils.py @@ -0,0 +1,71 @@ +import torch + + +def images_to_levels(target, num_levels): + """Convert targets by image to targets by feature level. + + [target_img0, target_img1] -> [target_level0, target_level1, ...] + """ + target = torch.stack(target, 0) + level_targets = [] + start = 0 + for n in num_levels: + end = start + n + # level_targets.append(target[:, start:end].squeeze(0)) + level_targets.append(target[:, start:end]) + start = end + return level_targets + + +def anchor_inside_flags(flat_anchors, + valid_flags, + img_shape, + allowed_border=0): + """Check whether the anchors are inside the border. + + Args: + flat_anchors (torch.Tensor): Flatten anchors, shape (n, 4). + valid_flags (torch.Tensor): An existing valid flags of anchors. + img_shape (tuple(int)): Shape of current image. + allowed_border (int, optional): The border to allow the valid anchor. + Defaults to 0. + + Returns: + torch.Tensor: Flags indicating whether the anchors are inside a \ + valid range. + """ + img_h, img_w = img_shape[:2] + if allowed_border >= 0: + inside_flags = valid_flags & \ + (flat_anchors[:, 0] >= -allowed_border) & \ + (flat_anchors[:, 1] >= -allowed_border) & \ + (flat_anchors[:, 2] < img_w + allowed_border) & \ + (flat_anchors[:, 3] < img_h + allowed_border) + else: + inside_flags = valid_flags + return inside_flags + + +def calc_region(bbox, ratio, featmap_size=None): + """Calculate a proportional bbox region. + + The bbox center are fixed and the new h' and w' is h * ratio and w * ratio. + + Args: + bbox (Tensor): Bboxes to calculate regions, shape (n, 4). + ratio (float): Ratio of the output region. + featmap_size (tuple): Feature map size used for clipping the boundary. + + Returns: + tuple: x1, y1, x2, y2 + """ + x1 = torch.round((1 - ratio) * bbox[0] + ratio * bbox[2]).long() + y1 = torch.round((1 - ratio) * bbox[1] + ratio * bbox[3]).long() + x2 = torch.round(ratio * bbox[0] + (1 - ratio) * bbox[2]).long() + y2 = torch.round(ratio * bbox[1] + (1 - ratio) * bbox[3]).long() + if featmap_size is not None: + x1 = x1.clamp(min=0, max=featmap_size[1]) + y1 = y1.clamp(min=0, max=featmap_size[0]) + x2 = x2.clamp(min=0, max=featmap_size[1]) + y2 = y2.clamp(min=0, max=featmap_size[0]) + return (x1, y1, x2, y2) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/__init__.py b/thirdparty/mmdetection/mmdet/core/bbox/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc1f83130737b4bc4b07084b90a29113e3532ad --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/__init__.py @@ -0,0 +1,26 @@ +from .assigners import (AssignResult, BaseAssigner, CenterRegionAssigner, + MaxIoUAssigner) +from .builder import build_assigner, build_bbox_coder, build_sampler +from .coder import (BaseBBoxCoder, DeltaXYWHBBoxCoder, PseudoBBoxCoder, + TBLRBBoxCoder) +from .iou_calculators import BboxOverlaps2D, bbox_overlaps +from .samplers import (BaseSampler, CombinedSampler, + InstanceBalancedPosSampler, IoUBalancedNegSampler, + OHEMSampler, PseudoSampler, RandomSampler, + SamplingResult, ScoreHLRSampler) +from .transforms import (bbox2distance, bbox2result, bbox2roi, + bbox_cxcywh_to_xyxy, bbox_flip, bbox_mapping, + bbox_mapping_back, bbox_rescale, bbox_xyxy_to_cxcywh, + distance2bbox, roi2bbox) + +__all__ = [ + 'bbox_overlaps', 'BboxOverlaps2D', 'BaseAssigner', 'MaxIoUAssigner', + 'AssignResult', 'BaseSampler', 'PseudoSampler', 'RandomSampler', + 'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler', + 'OHEMSampler', 'SamplingResult', 'ScoreHLRSampler', 'build_assigner', + 'build_sampler', 'bbox_flip', 'bbox_mapping', 'bbox_mapping_back', + 'bbox2roi', 'roi2bbox', 'bbox2result', 'distance2bbox', 'bbox2distance', + 'build_bbox_coder', 'BaseBBoxCoder', 'PseudoBBoxCoder', + 'DeltaXYWHBBoxCoder', 'TBLRBBoxCoder', 'CenterRegionAssigner', + 'bbox_rescale', 'bbox_cxcywh_to_xyxy', 'bbox_xyxy_to_cxcywh' +] diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/__init__.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b8f0f48d8cfab09ae68ab2797f8ce0a5b8de0f12 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/__init__.py @@ -0,0 +1,15 @@ +from .approx_max_iou_assigner import ApproxMaxIoUAssigner +from .assign_result import AssignResult +from .atss_assigner import ATSSAssigner +from .base_assigner import BaseAssigner +from .center_region_assigner import CenterRegionAssigner +from .grid_assigner import GridAssigner +from .hungarian_assigner import HungarianAssigner +from .max_iou_assigner import MaxIoUAssigner +from .point_assigner import PointAssigner + +__all__ = [ + 'BaseAssigner', 'MaxIoUAssigner', 'ApproxMaxIoUAssigner', 'AssignResult', + 'PointAssigner', 'ATSSAssigner', 'CenterRegionAssigner', 'GridAssigner', + 'HungarianAssigner' +] diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/approx_max_iou_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/approx_max_iou_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..6d07656d173744426795c81c14c6bcdb4e63a406 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/approx_max_iou_assigner.py @@ -0,0 +1,145 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .max_iou_assigner import MaxIoUAssigner + + +@BBOX_ASSIGNERS.register_module() +class ApproxMaxIoUAssigner(MaxIoUAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with an integer indicating the ground truth + index. (semi-positive index: gt label (0-based), -1: background) + + - -1: negative sample, no assigned gt + - semi-positive integer: positive sample, index (0-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + ignore_iof_thr (float): IoF threshold for ignoring bboxes (if + `gt_bboxes_ignore` is specified). Negative values mean not + ignoring any bboxes. + ignore_wrt_candidates (bool): Whether to compute the iof between + `bboxes` and `gt_bboxes_ignore`, or the contrary. + match_low_quality (bool): Whether to allow quality matches. This is + usually allowed for RPN and single stage detectors, but not allowed + in the second stage. + gpu_assign_thr (int): The upper bound of the number of GT for GPU + assign. When the number of gt is above this threshold, will assign + on CPU device. Negative values mean not assign on CPU. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + ignore_iof_thr=-1, + ignore_wrt_candidates=True, + match_low_quality=True, + gpu_assign_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.ignore_iof_thr = ignore_iof_thr + self.ignore_wrt_candidates = ignore_wrt_candidates + self.gpu_assign_thr = gpu_assign_thr + self.match_low_quality = match_low_quality + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, + approxs, + squares, + approxs_per_octave, + gt_bboxes, + gt_bboxes_ignore=None, + gt_labels=None): + """Assign gt to approxs. + + This method assign a gt bbox to each group of approxs (bboxes), + each group of approxs is represent by a base approx (bbox) and + will be assigned with -1, or a semi-positive number. + background_label (-1) means negative sample, + semi-positive number is the index (0-based) of assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to background_label (-1) + 2. use the max IoU of each group of approxs to assign + 2. assign proposals whose iou with all gts < neg_iou_thr to background + 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals (may be more than + one) to itself + + Args: + approxs (Tensor): Bounding boxes to be assigned, + shape(approxs_per_octave*n, 4). + squares (Tensor): Base Bounding boxes to be assigned, + shape(n, 4). + approxs_per_octave (int): number of approxs per octave + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_squares = squares.size(0) + num_gts = gt_bboxes.size(0) + + if num_squares == 0 or num_gts == 0: + # No predictions and/or truth, return empty assignment + overlaps = approxs.new(num_gts, num_squares) + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + return assign_result + + # re-organize anchors by approxs_per_octave x num_squares + approxs = torch.transpose( + approxs.view(num_squares, approxs_per_octave, 4), 0, + 1).contiguous().view(-1, 4) + assign_on_cpu = True if (self.gpu_assign_thr > 0) and ( + num_gts > self.gpu_assign_thr) else False + # compute overlap and assign gt on CPU when number of GT is large + if assign_on_cpu: + device = approxs.device + approxs = approxs.cpu() + gt_bboxes = gt_bboxes.cpu() + if gt_bboxes_ignore is not None: + gt_bboxes_ignore = gt_bboxes_ignore.cpu() + if gt_labels is not None: + gt_labels = gt_labels.cpu() + all_overlaps = self.iou_calculator(approxs, gt_bboxes) + + overlaps, _ = all_overlaps.view(approxs_per_octave, num_squares, + num_gts).max(dim=0) + overlaps = torch.transpose(overlaps, 0, 1) + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and squares.numel() > 0): + if self.ignore_wrt_candidates: + ignore_overlaps = self.iou_calculator( + squares, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + else: + ignore_overlaps = self.iou_calculator( + gt_bboxes_ignore, squares, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=0) + overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 + + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + if assign_on_cpu: + assign_result.gt_inds = assign_result.gt_inds.to(device) + assign_result.max_overlaps = assign_result.max_overlaps.to(device) + if assign_result.labels is not None: + assign_result.labels = assign_result.labels.to(device) + return assign_result diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/assign_result.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/assign_result.py new file mode 100644 index 0000000000000000000000000000000000000000..4639fbdba0a5b92778e1ab87d61182e54bfb9b6f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/assign_result.py @@ -0,0 +1,204 @@ +import torch + +from mmdet.utils import util_mixins + + +class AssignResult(util_mixins.NiceRepr): + """Stores assignments between predicted and truth boxes. + + Attributes: + num_gts (int): the number of truth boxes considered when computing this + assignment + + gt_inds (LongTensor): for each predicted box indicates the 1-based + index of the assigned truth box. 0 means unassigned and -1 means + ignore. + + max_overlaps (FloatTensor): the iou between the predicted box and its + assigned truth box. + + labels (None | LongTensor): If specified, for each predicted box + indicates the category label of the assigned truth box. + + Example: + >>> # An assign result between 4 predicted boxes and 9 true boxes + >>> # where only two boxes were assigned. + >>> num_gts = 9 + >>> max_overlaps = torch.LongTensor([0, .5, .9, 0]) + >>> gt_inds = torch.LongTensor([-1, 1, 2, 0]) + >>> labels = torch.LongTensor([0, 3, 4, 0]) + >>> self = AssignResult(num_gts, gt_inds, max_overlaps, labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + >>> # Force addition of gt labels (when adding gt as proposals) + >>> new_labels = torch.LongTensor([3, 4, 5]) + >>> self.add_gt_(new_labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + """ + + def __init__(self, num_gts, gt_inds, max_overlaps, labels=None): + self.num_gts = num_gts + self.gt_inds = gt_inds + self.max_overlaps = max_overlaps + self.labels = labels + # Interface for possible user-defined properties + self._extra_properties = {} + + @property + def num_preds(self): + """int: the number of predictions in this assignment""" + return len(self.gt_inds) + + def set_extra_property(self, key, value): + """Set user-defined new property.""" + assert key not in self.info + self._extra_properties[key] = value + + def get_extra_property(self, key): + """Get user-defined property.""" + return self._extra_properties.get(key, None) + + @property + def info(self): + """dict: a dictionary of info about the object""" + basic_info = { + 'num_gts': self.num_gts, + 'num_preds': self.num_preds, + 'gt_inds': self.gt_inds, + 'max_overlaps': self.max_overlaps, + 'labels': self.labels, + } + basic_info.update(self._extra_properties) + return basic_info + + def __nice__(self): + """str: a "nice" summary string describing this assign result""" + parts = [] + parts.append(f'num_gts={self.num_gts!r}') + if self.gt_inds is None: + parts.append(f'gt_inds={self.gt_inds!r}') + else: + parts.append(f'gt_inds.shape={tuple(self.gt_inds.shape)!r}') + if self.max_overlaps is None: + parts.append(f'max_overlaps={self.max_overlaps!r}') + else: + parts.append('max_overlaps.shape=' + f'{tuple(self.max_overlaps.shape)!r}') + if self.labels is None: + parts.append(f'labels={self.labels!r}') + else: + parts.append(f'labels.shape={tuple(self.labels.shape)!r}') + return ', '.join(parts) + + @classmethod + def random(cls, **kwargs): + """Create random AssignResult for tests or debugging. + + Args: + num_preds: number of predicted boxes + num_gts: number of true boxes + p_ignore (float): probability of a predicted box assinged to an + ignored truth + p_assigned (float): probability of a predicted box not being + assigned + p_use_label (float | bool): with labels or not + rng (None | int | numpy.random.RandomState): seed or state + + Returns: + :obj:`AssignResult`: Randomly generated assign results. + + Example: + >>> from mmdet.core.bbox.assigners.assign_result import * # NOQA + >>> self = AssignResult.random() + >>> print(self.info) + """ + from mmdet.core.bbox import demodata + rng = demodata.ensure_rng(kwargs.get('rng', None)) + + num_gts = kwargs.get('num_gts', None) + num_preds = kwargs.get('num_preds', None) + p_ignore = kwargs.get('p_ignore', 0.3) + p_assigned = kwargs.get('p_assigned', 0.7) + p_use_label = kwargs.get('p_use_label', 0.5) + num_classes = kwargs.get('p_use_label', 3) + + if num_gts is None: + num_gts = rng.randint(0, 8) + if num_preds is None: + num_preds = rng.randint(0, 16) + + if num_gts == 0: + max_overlaps = torch.zeros(num_preds, dtype=torch.float32) + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + if p_use_label is True or p_use_label < rng.rand(): + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = None + else: + import numpy as np + # Create an overlap for each predicted box + max_overlaps = torch.from_numpy(rng.rand(num_preds)) + + # Construct gt_inds for each predicted box + is_assigned = torch.from_numpy(rng.rand(num_preds) < p_assigned) + # maximum number of assignments constraints + n_assigned = min(num_preds, min(num_gts, is_assigned.sum())) + + assigned_idxs = np.where(is_assigned)[0] + rng.shuffle(assigned_idxs) + assigned_idxs = assigned_idxs[0:n_assigned] + assigned_idxs.sort() + + is_assigned[:] = 0 + is_assigned[assigned_idxs] = True + + is_ignore = torch.from_numpy( + rng.rand(num_preds) < p_ignore) & is_assigned + + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + + true_idxs = np.arange(num_gts) + rng.shuffle(true_idxs) + true_idxs = torch.from_numpy(true_idxs) + gt_inds[is_assigned] = true_idxs[:n_assigned] + + gt_inds = torch.from_numpy( + rng.randint(1, num_gts + 1, size=num_preds)) + gt_inds[is_ignore] = -1 + gt_inds[~is_assigned] = 0 + max_overlaps[~is_assigned] = 0 + + if p_use_label is True or p_use_label < rng.rand(): + if num_classes == 0: + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = torch.from_numpy( + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + rng.randint(0, num_classes, size=num_preds)) + labels[~is_assigned] = 0 + else: + labels = None + + self = cls(num_gts, gt_inds, max_overlaps, labels) + return self + + def add_gt_(self, gt_labels): + """Add ground truth as assigned results. + + Args: + gt_labels (torch.Tensor): Labels of gt boxes + """ + self_inds = torch.arange( + 1, len(gt_labels) + 1, dtype=torch.long, device=gt_labels.device) + self.gt_inds = torch.cat([self_inds, self.gt_inds]) + + self.max_overlaps = torch.cat( + [self.max_overlaps.new_ones(len(gt_labels)), self.max_overlaps]) + + if self.labels is not None: + self.labels = torch.cat([gt_labels, self.labels]) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/atss_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/atss_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..8e21726b40c459ed6df020f59ff6dfc1ce73d8b3 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/atss_assigner.py @@ -0,0 +1,178 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class ATSSAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `0` or a positive integer + indicating the ground truth index. + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + topk (float): number of bbox selected in each level + """ + + def __init__(self, + topk, + iou_calculator=dict(type='BboxOverlaps2D'), + ignore_iof_thr=-1): + self.topk = topk + self.iou_calculator = build_iou_calculator(iou_calculator) + self.ignore_iof_thr = ignore_iof_thr + + # https://github.com/sfzhang15/ATSS/blob/master/atss_core/modeling/rpn/atss/loss.py + + def assign(self, + bboxes, + num_level_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + gt_labels=None): + """Assign gt to bboxes. + + The assignment is done in following steps + + 1. compute iou between all bbox (bbox of all pyramid levels) and gt + 2. compute center distance between all bbox and gt + 3. on each pyramid level, for each gt, select k bbox whose center + are closest to the gt center, so we total select k*l bbox as + candidates for each gt + 4. get corresponding iou for the these candidates, and compute the + mean and std, set mean + std as the iou threshold + 5. select these candidates whose iou are greater than or equal to + the threshold as postive + 6. limit the positive sample's center in gt + + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + num_level_bboxes (List): num of bboxes in each level + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + INF = 100000000 + bboxes = bboxes[:, :4] + num_gt, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + + # compute iou between all bbox and gt + overlaps = self.iou_calculator(bboxes, gt_bboxes) + + # assign 0 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + 0, + dtype=torch.long) + + if num_gt == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gt == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels) + + # compute center distance between all bbox and gt + gt_cx = (gt_bboxes[:, 0] + gt_bboxes[:, 2]) / 2.0 + gt_cy = (gt_bboxes[:, 1] + gt_bboxes[:, 3]) / 2.0 + gt_points = torch.stack((gt_cx, gt_cy), dim=1) + + bboxes_cx = (bboxes[:, 0] + bboxes[:, 2]) / 2.0 + bboxes_cy = (bboxes[:, 1] + bboxes[:, 3]) / 2.0 + bboxes_points = torch.stack((bboxes_cx, bboxes_cy), dim=1) + + distances = (bboxes_points[:, None, :] - + gt_points[None, :, :]).pow(2).sum(-1).sqrt() + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and bboxes.numel() > 0): + ignore_overlaps = self.iou_calculator( + bboxes, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + ignore_idxs = ignore_max_overlaps > self.ignore_iof_thr + distances[ignore_idxs, :] = INF + assigned_gt_inds[ignore_idxs] = -1 + + # Selecting candidates based on the center distance + candidate_idxs = [] + start_idx = 0 + for level, bboxes_per_level in enumerate(num_level_bboxes): + # on each pyramid level, for each gt, + # select k bbox whose center are closest to the gt center + end_idx = start_idx + bboxes_per_level + distances_per_level = distances[start_idx:end_idx, :] + selectable_k = min(self.topk, bboxes_per_level) + _, topk_idxs_per_level = distances_per_level.topk( + selectable_k, dim=0, largest=False) + candidate_idxs.append(topk_idxs_per_level + start_idx) + start_idx = end_idx + candidate_idxs = torch.cat(candidate_idxs, dim=0) + + # get corresponding iou for the these candidates, and compute the + # mean and std, set mean + std as the iou threshold + candidate_overlaps = overlaps[candidate_idxs, torch.arange(num_gt)] + overlaps_mean_per_gt = candidate_overlaps.mean(0) + overlaps_std_per_gt = candidate_overlaps.std(0) + overlaps_thr_per_gt = overlaps_mean_per_gt + overlaps_std_per_gt + + is_pos = candidate_overlaps >= overlaps_thr_per_gt[None, :] + + # limit the positive sample's center in gt + for gt_idx in range(num_gt): + candidate_idxs[:, gt_idx] += gt_idx * num_bboxes + ep_bboxes_cx = bboxes_cx.view(1, -1).expand( + num_gt, num_bboxes).contiguous().view(-1) + ep_bboxes_cy = bboxes_cy.view(1, -1).expand( + num_gt, num_bboxes).contiguous().view(-1) + candidate_idxs = candidate_idxs.view(-1) + + # calculate the left, top, right, bottom distance between positive + # bbox center and gt side + l_ = ep_bboxes_cx[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 0] + t_ = ep_bboxes_cy[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 1] + r_ = gt_bboxes[:, 2] - ep_bboxes_cx[candidate_idxs].view(-1, num_gt) + b_ = gt_bboxes[:, 3] - ep_bboxes_cy[candidate_idxs].view(-1, num_gt) + is_in_gts = torch.stack([l_, t_, r_, b_], dim=1).min(dim=1)[0] > 0.01 + is_pos = is_pos & is_in_gts + + # if an anchor box is assigned to multiple gts, + # the one with the highest IoU will be selected. + overlaps_inf = torch.full_like(overlaps, + -INF).t().contiguous().view(-1) + index = candidate_idxs.view(-1)[is_pos.view(-1)] + overlaps_inf[index] = overlaps.t().contiguous().view(-1)[index] + overlaps_inf = overlaps_inf.view(num_gt, -1).t() + + max_overlaps, argmax_overlaps = overlaps_inf.max(dim=1) + assigned_gt_inds[ + max_overlaps != -INF] = argmax_overlaps[max_overlaps != -INF] + 1 + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/base_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/base_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..2da9e0f4aa55b46e0059a037c18cb58577d04871 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/base_assigner.py @@ -0,0 +1,10 @@ +from abc import ABCMeta, abstractmethod + + +class BaseAssigner(metaclass=ABCMeta): + """Base assigner that assigns boxes to ground truth boxes.""" + + @abstractmethod + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign boxes to either a ground truth boxe or a negative boxes.""" + pass diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/center_region_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/center_region_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..488e3b615318787751cab3211e38dd9471c666be --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/center_region_assigner.py @@ -0,0 +1,335 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +def scale_boxes(bboxes, scale): + """Expand an array of boxes by a given scale. + + Args: + bboxes (Tensor): Shape (m, 4) + scale (float): The scale factor of bboxes + + Returns: + (Tensor): Shape (m, 4). Scaled bboxes + """ + assert bboxes.size(1) == 4 + w_half = (bboxes[:, 2] - bboxes[:, 0]) * .5 + h_half = (bboxes[:, 3] - bboxes[:, 1]) * .5 + x_c = (bboxes[:, 2] + bboxes[:, 0]) * .5 + y_c = (bboxes[:, 3] + bboxes[:, 1]) * .5 + + w_half *= scale + h_half *= scale + + boxes_scaled = torch.zeros_like(bboxes) + boxes_scaled[:, 0] = x_c - w_half + boxes_scaled[:, 2] = x_c + w_half + boxes_scaled[:, 1] = y_c - h_half + boxes_scaled[:, 3] = y_c + h_half + return boxes_scaled + + +def is_located_in(points, bboxes): + """Are points located in bboxes. + + Args: + points (Tensor): Points, shape: (m, 2). + bboxes (Tensor): Bounding boxes, shape: (n, 4). + + Return: + Tensor: Flags indicating if points are located in bboxes, shape: (m, n). + """ + assert points.size(1) == 2 + assert bboxes.size(1) == 4 + return (points[:, 0].unsqueeze(1) > bboxes[:, 0].unsqueeze(0)) & \ + (points[:, 0].unsqueeze(1) < bboxes[:, 2].unsqueeze(0)) & \ + (points[:, 1].unsqueeze(1) > bboxes[:, 1].unsqueeze(0)) & \ + (points[:, 1].unsqueeze(1) < bboxes[:, 3].unsqueeze(0)) + + +def bboxes_area(bboxes): + """Compute the area of an array of bboxes. + + Args: + bboxes (Tensor): The coordinates ox bboxes. Shape: (m, 4) + + Returns: + Tensor: Area of the bboxes. Shape: (m, ) + """ + assert bboxes.size(1) == 4 + w = (bboxes[:, 2] - bboxes[:, 0]) + h = (bboxes[:, 3] - bboxes[:, 1]) + areas = w * h + return areas + + +@BBOX_ASSIGNERS.register_module() +class CenterRegionAssigner(BaseAssigner): + """Assign pixels at the center region of a bbox as positive. + + Each proposals will be assigned with `-1`, `0`, or a positive integer + indicating the ground truth index. + - -1: negative samples + - semi-positive numbers: positive sample, index (0-based) of assigned gt + + Args: + pos_scale (float): Threshold within which pixels are + labelled as positive. + neg_scale (float): Threshold above which pixels are + labelled as positive. + min_pos_iof (float): Minimum iof of a pixel with a gt to be + labelled as positive. Default: 1e-2 + ignore_gt_scale (float): Threshold within which the pixels + are ignored when the gt is labelled as shadowed. Default: 0.5 + foreground_dominate (bool): If True, the bbox will be assigned as + positive when a gt's kernel region overlaps with another's shadowed + (ignored) region, otherwise it is set as ignored. Default to False. + """ + + def __init__(self, + pos_scale, + neg_scale, + min_pos_iof=1e-2, + ignore_gt_scale=0.5, + foreground_dominate=False, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_scale = pos_scale + self.neg_scale = neg_scale + self.min_pos_iof = min_pos_iof + self.ignore_gt_scale = ignore_gt_scale + self.foreground_dominate = foreground_dominate + self.iou_calculator = build_iou_calculator(iou_calculator) + + def get_gt_priorities(self, gt_bboxes): + """Get gt priorities according to their areas. + + Smaller gt has higher priority. + + Args: + gt_bboxes (Tensor): Ground truth boxes, shape (k, 4). + + Returns: + Tensor: The priority of gts so that gts with larger priority is \ + more likely to be assigned. Shape (k, ) + """ + gt_areas = bboxes_area(gt_bboxes) + # Rank all gt bbox areas. Smaller objects has larger priority + _, sort_idx = gt_areas.sort(descending=True) + sort_idx = sort_idx.argsort() + return sort_idx + + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to bboxes. + + This method assigns gts to every bbox (proposal/anchor), each bbox \ + will be assigned with -1, or a semi-positive number. -1 means \ + negative sample, semi-positive number is the index (0-based) of \ + assigned gt. + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (tensor, optional): Label of gt_bboxes, shape (num_gts,). + + Returns: + :obj:`AssignResult`: The assigned result. Note that \ + shadowed_labels of shape (N, 2) is also added as an \ + `assign_result` attribute. `shadowed_labels` is a tensor \ + composed of N pairs of anchor_ind, class_label], where N \ + is the number of anchors that lie in the outer region of a \ + gt, anchor_ind is the shadowed anchor index and class_label \ + is the shadowed class label. + + Example: + >>> self = CenterRegionAssigner(0.2, 0.2) + >>> bboxes = torch.Tensor([[0, 0, 10, 10], [10, 10, 20, 20]]) + >>> gt_bboxes = torch.Tensor([[0, 0, 10, 10]]) + >>> assign_result = self.assign(bboxes, gt_bboxes) + >>> expected_gt_inds = torch.LongTensor([1, 0]) + >>> assert torch.all(assign_result.gt_inds == expected_gt_inds) + """ + # There are in total 5 steps in the pixel assignment + # 1. Find core (the center region, say inner 0.2) + # and shadow (the relatively ourter part, say inner 0.2-0.5) + # regions of every gt. + # 2. Find all prior bboxes that lie in gt_core and gt_shadow regions + # 3. Assign prior bboxes in gt_core with a one-hot id of the gt in + # the image. + # 3.1. For overlapping objects, the prior bboxes in gt_core is + # assigned with the object with smallest area + # 4. Assign prior bboxes with class label according to its gt id. + # 4.1. Assign -1 to prior bboxes lying in shadowed gts + # 4.2. Assign positive prior boxes with the corresponding label + # 5. Find pixels lying in the shadow of an object and assign them with + # background label, but set the loss weight of its corresponding + # gt to zero. + assert bboxes.size(1) == 4, 'bboxes must have size of 4' + # 1. Find core positive and shadow region of every gt + gt_core = scale_boxes(gt_bboxes, self.pos_scale) + gt_shadow = scale_boxes(gt_bboxes, self.neg_scale) + + # 2. Find prior bboxes that lie in gt_core and gt_shadow regions + bbox_centers = (bboxes[:, 2:4] + bboxes[:, 0:2]) / 2 + # The center points lie within the gt boxes + is_bbox_in_gt = is_located_in(bbox_centers, gt_bboxes) + # Only calculate bbox and gt_core IoF. This enables small prior bboxes + # to match large gts + bbox_and_gt_core_overlaps = self.iou_calculator( + bboxes, gt_core, mode='iof') + # The center point of effective priors should be within the gt box + is_bbox_in_gt_core = is_bbox_in_gt & ( + bbox_and_gt_core_overlaps > self.min_pos_iof) # shape (n, k) + + is_bbox_in_gt_shadow = ( + self.iou_calculator(bboxes, gt_shadow, mode='iof') > + self.min_pos_iof) + # Rule out center effective positive pixels + is_bbox_in_gt_shadow &= (~is_bbox_in_gt_core) + + num_gts, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + if num_gts == 0 or num_bboxes == 0: + # If no gts exist, assign all pixels to negative + assigned_gt_ids = \ + is_bbox_in_gt_core.new_zeros((num_bboxes,), + dtype=torch.long) + pixels_in_gt_shadow = assigned_gt_ids.new_empty((0, 2)) + else: + # Step 3: assign a one-hot gt id to each pixel, and smaller objects + # have high priority to assign the pixel. + sort_idx = self.get_gt_priorities(gt_bboxes) + assigned_gt_ids, pixels_in_gt_shadow = \ + self.assign_one_hot_gt_indices(is_bbox_in_gt_core, + is_bbox_in_gt_shadow, + gt_priority=sort_idx) + + if gt_bboxes_ignore is not None and gt_bboxes_ignore.numel() > 0: + # No ground truth or boxes, return empty assignment + gt_bboxes_ignore = scale_boxes( + gt_bboxes_ignore, scale=self.ignore_gt_scale) + is_bbox_in_ignored_gts = is_located_in(bbox_centers, + gt_bboxes_ignore) + is_bbox_in_ignored_gts = is_bbox_in_ignored_gts.any(dim=1) + assigned_gt_ids[is_bbox_in_ignored_gts] = -1 + + # 4. Assign prior bboxes with class label according to its gt id. + assigned_labels = None + shadowed_pixel_labels = None + if gt_labels is not None: + # Default assigned label is the background (-1) + assigned_labels = assigned_gt_ids.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_ids > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[assigned_gt_ids[pos_inds] + - 1] + # 5. Find pixels lying in the shadow of an object + shadowed_pixel_labels = pixels_in_gt_shadow.clone() + if pixels_in_gt_shadow.numel() > 0: + pixel_idx, gt_idx =\ + pixels_in_gt_shadow[:, 0], pixels_in_gt_shadow[:, 1] + assert (assigned_gt_ids[pixel_idx] != gt_idx).all(), \ + 'Some pixels are dually assigned to ignore and gt!' + shadowed_pixel_labels[:, 1] = gt_labels[gt_idx - 1] + override = ( + assigned_labels[pixel_idx] == shadowed_pixel_labels[:, 1]) + if self.foreground_dominate: + # When a pixel is both positive and shadowed, set it as pos + shadowed_pixel_labels = shadowed_pixel_labels[~override] + else: + # When a pixel is both pos and shadowed, set it as shadowed + assigned_labels[pixel_idx[override]] = -1 + assigned_gt_ids[pixel_idx[override]] = 0 + + assign_result = AssignResult( + num_gts, assigned_gt_ids, None, labels=assigned_labels) + # Add shadowed_labels as assign_result property. Shape: (num_shadow, 2) + assign_result.set_extra_property('shadowed_labels', + shadowed_pixel_labels) + return assign_result + + def assign_one_hot_gt_indices(self, + is_bbox_in_gt_core, + is_bbox_in_gt_shadow, + gt_priority=None): + """Assign only one gt index to each prior box. + + Gts with large gt_priority are more likely to be assigned. + + Args: + is_bbox_in_gt_core (Tensor): Bool tensor indicating the bbox center + is in the core area of a gt (e.g. 0-0.2). + Shape: (num_prior, num_gt). + is_bbox_in_gt_shadow (Tensor): Bool tensor indicating the bbox + center is in the shadowed area of a gt (e.g. 0.2-0.5). + Shape: (num_prior, num_gt). + gt_priority (Tensor): Priorities of gts. The gt with a higher + priority is more likely to be assigned to the bbox when the bbox + match with multiple gts. Shape: (num_gt, ). + + Returns: + tuple: Returns (assigned_gt_inds, shadowed_gt_inds). + + - assigned_gt_inds: The assigned gt index of each prior bbox \ + (i.e. index from 1 to num_gts). Shape: (num_prior, ). + - shadowed_gt_inds: shadowed gt indices. It is a tensor of \ + shape (num_ignore, 2) with first column being the \ + shadowed prior bbox indices and the second column the \ + shadowed gt indices (1-based). + """ + num_bboxes, num_gts = is_bbox_in_gt_core.shape + + if gt_priority is None: + gt_priority = torch.arange( + num_gts, device=is_bbox_in_gt_core.device) + assert gt_priority.size(0) == num_gts + # The bigger gt_priority, the more preferable to be assigned + # The assigned inds are by default 0 (background) + assigned_gt_inds = is_bbox_in_gt_core.new_zeros((num_bboxes, ), + dtype=torch.long) + # Shadowed bboxes are assigned to be background. But the corresponding + # label is ignored during loss calculation, which is done through + # shadowed_gt_inds + shadowed_gt_inds = torch.nonzero(is_bbox_in_gt_shadow, as_tuple=False) + if is_bbox_in_gt_core.sum() == 0: # No gt match + shadowed_gt_inds[:, 1] += 1 # 1-based. For consistency issue + return assigned_gt_inds, shadowed_gt_inds + + # The priority of each prior box and gt pair. If one prior box is + # matched bo multiple gts. Only the pair with the highest priority + # is saved + pair_priority = is_bbox_in_gt_core.new_full((num_bboxes, num_gts), + -1, + dtype=torch.long) + + # Each bbox could match with multiple gts. + # The following codes deal with this situation + # Matched bboxes (to any gt). Shape: (num_pos_anchor, ) + inds_of_match = torch.any(is_bbox_in_gt_core, dim=1) + # The matched gt index of each positive bbox. Length >= num_pos_anchor + # , since one bbox could match multiple gts + matched_bbox_gt_inds = torch.nonzero( + is_bbox_in_gt_core, as_tuple=False)[:, 1] + # Assign priority to each bbox-gt pair. + pair_priority[is_bbox_in_gt_core] = gt_priority[matched_bbox_gt_inds] + _, argmax_priority = pair_priority[inds_of_match].max(dim=1) + assigned_gt_inds[inds_of_match] = argmax_priority + 1 # 1-based + # Zero-out the assigned anchor box to filter the shadowed gt indices + is_bbox_in_gt_core[inds_of_match, argmax_priority] = 0 + # Concat the shadowed indices due to overlapping with that out side of + # effective scale. shape: (total_num_ignore, 2) + shadowed_gt_inds = torch.cat( + (shadowed_gt_inds, torch.nonzero( + is_bbox_in_gt_core, as_tuple=False)), + dim=0) + # `is_bbox_in_gt_core` should be changed back to keep arguments intact. + is_bbox_in_gt_core[inds_of_match, argmax_priority] = 1 + # 1-based shadowed gt indices, to be consistent with `assigned_gt_inds` + if shadowed_gt_inds.numel() > 0: + shadowed_gt_inds[:, 1] += 1 + return assigned_gt_inds, shadowed_gt_inds diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/grid_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/grid_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..7390ea6370639c939d578c6ebf0f9268499161bc --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/grid_assigner.py @@ -0,0 +1,155 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class GridAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `-1`, `0`, or a positive integer + indicating the ground truth index. + + - -1: don't care + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, bboxes, box_responsible_flags, gt_bboxes, gt_labels=None): + """Assign gt to bboxes. The process is very much like the max iou + assigner, except that positive samples are constrained within the cell + that the gt boxes fell in. + + This method assign a gt bbox to every bbox (proposal/anchor), each bbox + will be assigned with -1, 0, or a positive number. -1 means don't care, + 0 means negative sample, positive number is the index (1-based) of + assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to -1 + 2. assign proposals whose iou with all gts <= neg_iou_thr to 0 + 3. for each bbox within a cell, if the iou with its nearest gt > + pos_iou_thr and the center of that gt falls inside the cell, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals within the cell the + gt bbox falls in to itself. + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + box_responsible_flags (Tensor): flag to indicate whether box is + responsible for prediction, shape(n, ) + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_gts, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + + # compute iou between all gt and bboxes + overlaps = self.iou_calculator(gt_bboxes, bboxes) + + # 1. assign -1 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gts == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, + assigned_gt_inds, + max_overlaps, + labels=assigned_labels) + + # 2. assign negative: below + # for each anchor, which gt best overlaps with it + # for each anchor, the max iou of all gts + # shape of max_overlaps == argmax_overlaps == num_bboxes + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + if isinstance(self.neg_iou_thr, float): + assigned_gt_inds[(max_overlaps >= 0) + & (max_overlaps <= self.neg_iou_thr)] = 0 + elif isinstance(self.neg_iou_thr, (tuple, list)): + assert len(self.neg_iou_thr) == 2 + assigned_gt_inds[(max_overlaps > self.neg_iou_thr[0]) + & (max_overlaps <= self.neg_iou_thr[1])] = 0 + + # 3. assign positive: falls into responsible cell and above + # positive IOU threshold, the order matters. + # the prior condition of comparision is to filter out all + # unrelated anchors, i.e. not box_responsible_flags + overlaps[:, ~box_responsible_flags.type(torch.bool)] = -1. + + # calculate max_overlaps again, but this time we only consider IOUs + # for anchors responsible for prediction + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # for each gt, which anchor best overlaps with it + # for each gt, the max iou of all proposals + # shape of gt_max_overlaps == gt_argmax_overlaps == num_gts + gt_max_overlaps, gt_argmax_overlaps = overlaps.max(dim=1) + + pos_inds = (max_overlaps > + self.pos_iou_thr) & box_responsible_flags.type(torch.bool) + assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 + + # 4. assign positive to max overlapped anchors within responsible cell + for i in range(num_gts): + if gt_max_overlaps[i] > self.min_pos_iou: + if self.gt_max_assign_all: + max_iou_inds = (overlaps[i, :] == gt_max_overlaps[i]) & \ + box_responsible_flags.type(torch.bool) + assigned_gt_inds[max_iou_inds] = i + 1 + elif box_responsible_flags[gt_argmax_overlaps[i]]: + assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 + + # assign labels of positive anchors + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/hungarian_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/hungarian_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..224609300f6e7aa3dd296f5ca5e33c1df6372cb4 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/hungarian_assigner.py @@ -0,0 +1,158 @@ +import torch +from scipy.optimize import linear_sum_assignment + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from ..transforms import bbox_cxcywh_to_xyxy, bbox_xyxy_to_cxcywh +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class HungarianAssigner(BaseAssigner): + """Computes one-to-one matching between predictions and ground truth. + + This class computes an assignment between the targets and the predictions + based on the costs. The costs are weighted sum of three components: + classfication cost, regression L1 cost and regression iou cost. The + targets don't include the no_object, so generally there are more + predictions than targets. After the one-to-one matching, the un-matched + are treated as backgrounds. Thus each query prediction will be assigned + with `0` or a positive integer indicating the ground truth index: + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + cls_weight (int | float, optional): The scale factor for classification + cost. Default 1.0. + bbox_weight (int | float, optional): The scale factor for regression + L1 cost. Default 1.0. + iou_weight (int | float, optional): The scale factor for regression + iou cost. Default 1.0. + iou_calculator (dict | optional): The config for the iou calculation. + Default type `BboxOverlaps2D`. + iou_mode (str | optional): "iou" (intersection over union), "iof" + (intersection over foreground), or "giou" (generalized + intersection over union). Default "giou". + """ + + def __init__(self, + cls_weight=1., + bbox_weight=1., + iou_weight=1., + iou_calculator=dict(type='BboxOverlaps2D'), + iou_mode='giou'): + # defaultly giou cost is used in the official DETR repo. + self.iou_mode = iou_mode + self.cls_weight = cls_weight + self.bbox_weight = bbox_weight + self.iou_weight = iou_weight + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, + bbox_pred, + cls_pred, + gt_bboxes, + gt_labels, + img_meta, + gt_bboxes_ignore=None, + eps=1e-7): + """Computes one-to-one matching based on the weighted costs. + + This method assign each query prediction to a ground truth or + background. The `assigned_gt_inds` with -1 means don't care, + 0 means negative sample, and positive number is the index (1-based) + of assigned gt. + The assignment is done in the following steps, the order matters. + + 1. assign every prediction to -1 + 2. compute the weighted costs + 3. do Hungarian matching on CPU based on the costs + 4. assign all to 0 (background) first, then for each matched pair + between predictions and gts, treat this prediction as foreground + and assign the corresponding gt index (plus 1) to it. + + Args: + bbox_pred (Tensor): Predicted boxes with normalized coordinates + (cx, cy, w, h), which are all in range [0, 1]. Shape + [num_query, 4]. + cls_pred (Tensor): Predicted classification logits, shape + [num_query, num_class]. + gt_bboxes (Tensor): Ground truth boxes with unnormalized + coordinates (x1, y1, x2, y2). Shape [num_gt, 4]. + gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,). + img_meta (dict): Meta information for current image. + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`. Default None. + eps (int | float, optional): A value added to the denominator for + numerical stability. Default 1e-7. + + Returns: + :obj:`AssignResult`: The assigned result. + """ + assert gt_bboxes_ignore is None, \ + 'Only case when gt_bboxes_ignore is None is supported.' + num_gts, num_bboxes = gt_bboxes.size(0), bbox_pred.size(0) + + # 1. assign -1 by default + assigned_gt_inds = bbox_pred.new_full((num_bboxes, ), + -1, + dtype=torch.long) + assigned_labels = bbox_pred.new_full((num_bboxes, ), + -1, + dtype=torch.long) + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + if num_gts == 0: + # No ground truth, assign all to background + assigned_gt_inds[:] = 0 + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) + + # 2. compute the weighted costs + # classification cost. + # Following the official DETR repo, contrary to the loss that + # NLL is used, we approximate it in 1 - cls_score[gt_label]. + # The 1 is a constant that doesn't change the matching, + # so it can be ommitted. + cls_score = cls_pred.softmax(-1) + cls_cost = -cls_score[:, gt_labels] # [num_bboxes, num_gt] + + # regression L1 cost + img_h, img_w, _ = img_meta['img_shape'] + factor = torch.Tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0).to(gt_bboxes.device) + gt_bboxes_normalized = gt_bboxes / factor + bbox_cost = torch.cdist( + bbox_pred, bbox_xyxy_to_cxcywh(gt_bboxes_normalized), + p=1) # [num_bboxes, num_gt] + + # regression iou cost, defaultly giou is used in official DETR. + bboxes = bbox_cxcywh_to_xyxy(bbox_pred) * factor + # overlaps: [num_bboxes, num_gt] + overlaps = self.iou_calculator( + bboxes, gt_bboxes, mode=self.iou_mode, is_aligned=False) + # The 1 is a constant that doesn't change the matching, so ommitted. + iou_cost = -overlaps + + # weighted sum of above three costs + cost = self.cls_weight * cls_cost + self.bbox_weight * bbox_cost + cost = cost + self.iou_weight * iou_cost + + # 3. do Hungarian matching on CPU using linear_sum_assignment + cost = cost.detach().cpu() + matched_row_inds, matched_col_inds = linear_sum_assignment(cost) + matched_row_inds = torch.from_numpy(matched_row_inds).to( + bbox_pred.device) + matched_col_inds = torch.from_numpy(matched_col_inds).to( + bbox_pred.device) + + # 4. assign backgrounds and foregrounds + # assign all indices to backgrounds first + assigned_gt_inds[:] = 0 + # assign foregrounds based on matching results + assigned_gt_inds[matched_row_inds] = matched_col_inds + 1 + assigned_labels[matched_row_inds] = gt_labels[matched_col_inds] + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/max_iou_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/max_iou_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..a99f77e104bb6e0429f0ad1ef9cbe91750258e63 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/max_iou_assigner.py @@ -0,0 +1,212 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class MaxIoUAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `-1`, or a semi-positive integer + indicating the ground truth index. + + - -1: negative sample, no assigned gt + - semi-positive integer: positive sample, index (0-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + ignore_iof_thr (float): IoF threshold for ignoring bboxes (if + `gt_bboxes_ignore` is specified). Negative values mean not + ignoring any bboxes. + ignore_wrt_candidates (bool): Whether to compute the iof between + `bboxes` and `gt_bboxes_ignore`, or the contrary. + match_low_quality (bool): Whether to allow low quality matches. This is + usually allowed for RPN and single stage detectors, but not allowed + in the second stage. Details are demonstrated in Step 4. + gpu_assign_thr (int): The upper bound of the number of GT for GPU + assign. When the number of gt is above this threshold, will assign + on CPU device. Negative values mean not assign on CPU. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + ignore_iof_thr=-1, + ignore_wrt_candidates=True, + match_low_quality=True, + gpu_assign_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.ignore_iof_thr = ignore_iof_thr + self.ignore_wrt_candidates = ignore_wrt_candidates + self.gpu_assign_thr = gpu_assign_thr + self.match_low_quality = match_low_quality + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to bboxes. + + This method assign a gt bbox to every bbox (proposal/anchor), each bbox + will be assigned with -1, or a semi-positive number. -1 means negative + sample, semi-positive number is the index (0-based) of assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to the background + 2. assign proposals whose iou with all gts < neg_iou_thr to 0 + 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals (may be more than + one) to itself + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + + Example: + >>> self = MaxIoUAssigner(0.5, 0.5) + >>> bboxes = torch.Tensor([[0, 0, 10, 10], [10, 10, 20, 20]]) + >>> gt_bboxes = torch.Tensor([[0, 0, 10, 9]]) + >>> assign_result = self.assign(bboxes, gt_bboxes) + >>> expected_gt_inds = torch.LongTensor([1, 0]) + >>> assert torch.all(assign_result.gt_inds == expected_gt_inds) + """ + assign_on_cpu = True if (self.gpu_assign_thr > 0) and ( + gt_bboxes.shape[0] > self.gpu_assign_thr) else False + # compute overlap and assign gt on CPU when number of GT is large + if assign_on_cpu: + device = bboxes.device + bboxes = bboxes.cpu() + gt_bboxes = gt_bboxes.cpu() + if gt_bboxes_ignore is not None: + gt_bboxes_ignore = gt_bboxes_ignore.cpu() + if gt_labels is not None: + gt_labels = gt_labels.cpu() + + overlaps = self.iou_calculator(gt_bboxes, bboxes) + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and bboxes.numel() > 0): + if self.ignore_wrt_candidates: + ignore_overlaps = self.iou_calculator( + bboxes, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + else: + ignore_overlaps = self.iou_calculator( + gt_bboxes_ignore, bboxes, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=0) + overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 + + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + if assign_on_cpu: + assign_result.gt_inds = assign_result.gt_inds.to(device) + assign_result.max_overlaps = assign_result.max_overlaps.to(device) + if assign_result.labels is not None: + assign_result.labels = assign_result.labels.to(device) + return assign_result + + def assign_wrt_overlaps(self, overlaps, gt_labels=None): + """Assign w.r.t. the overlaps of bboxes with gts. + + Args: + overlaps (Tensor): Overlaps between k gt_bboxes and n bboxes, + shape(k, n). + gt_labels (Tensor, optional): Labels of k gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_gts, num_bboxes = overlaps.size(0), overlaps.size(1) + + # 1. assign -1 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gts == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, + assigned_gt_inds, + max_overlaps, + labels=assigned_labels) + + # for each anchor, which gt best overlaps with it + # for each anchor, the max iou of all gts + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + # for each gt, which anchor best overlaps with it + # for each gt, the max iou of all proposals + gt_max_overlaps, gt_argmax_overlaps = overlaps.max(dim=1) + + # 2. assign negative: below + # the negative inds are set to be 0 + if isinstance(self.neg_iou_thr, float): + assigned_gt_inds[(max_overlaps >= 0) + & (max_overlaps < self.neg_iou_thr)] = 0 + elif isinstance(self.neg_iou_thr, tuple): + assert len(self.neg_iou_thr) == 2 + assigned_gt_inds[(max_overlaps >= self.neg_iou_thr[0]) + & (max_overlaps < self.neg_iou_thr[1])] = 0 + + # 3. assign positive: above positive IoU threshold + pos_inds = max_overlaps >= self.pos_iou_thr + assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 + + if self.match_low_quality: + # Low-quality matching will overwirte the assigned_gt_inds assigned + # in Step 3. Thus, the assigned gt might not be the best one for + # prediction. + # For example, if bbox A has 0.9 and 0.8 iou with GT bbox 1 & 2, + # bbox 1 will be assigned as the best target for bbox A in step 3. + # However, if GT bbox 2's gt_argmax_overlaps = A, bbox A's + # assigned_gt_inds will be overwritten to be bbox B. + # This might be the reason that it is not used in ROI Heads. + for i in range(num_gts): + if gt_max_overlaps[i] >= self.min_pos_iou: + if self.gt_max_assign_all: + max_iou_inds = overlaps[i, :] == gt_max_overlaps[i] + assigned_gt_inds[max_iou_inds] = i + 1 + else: + assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/assigners/point_assigner.py b/thirdparty/mmdetection/mmdet/core/bbox/assigners/point_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..fb8f5e4edc63f4851e2067034c5e67a3558f31bc --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/assigners/point_assigner.py @@ -0,0 +1,133 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class PointAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each point. + + Each proposals will be assigned with `0`, or a positive integer + indicating the ground truth index. + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + """ + + def __init__(self, scale=4, pos_num=3): + self.scale = scale + self.pos_num = pos_num + + def assign(self, points, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to points. + + This method assign a gt bbox to every points set, each points set + will be assigned with the background_label (-1), or a label number. + -1 is background, and semi-positive number is the index (0-based) of + assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every points to the background_label (-1) + 2. A point is assigned to some gt bbox if + (i) the point is within the k closest points to the gt bbox + (ii) the distance between this point and the gt is smaller than + other gt bboxes + + Args: + points (Tensor): points to be assigned, shape(n, 3) while last + dimension stands for (x, y, stride). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + NOTE: currently unused. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_points = points.shape[0] + num_gts = gt_bboxes.shape[0] + + if num_gts == 0 or num_points == 0: + # If no truth assign everything to the background + assigned_gt_inds = points.new_full((num_points, ), + 0, + dtype=torch.long) + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = points.new_full((num_points, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) + + points_xy = points[:, :2] + points_stride = points[:, 2] + points_lvl = torch.log2( + points_stride).int() # [3...,4...,5...,6...,7...] + lvl_min, lvl_max = points_lvl.min(), points_lvl.max() + + # assign gt box + gt_bboxes_xy = (gt_bboxes[:, :2] + gt_bboxes[:, 2:]) / 2 + gt_bboxes_wh = (gt_bboxes[:, 2:] - gt_bboxes[:, :2]).clamp(min=1e-6) + scale = self.scale + gt_bboxes_lvl = ((torch.log2(gt_bboxes_wh[:, 0] / scale) + + torch.log2(gt_bboxes_wh[:, 1] / scale)) / 2).int() + gt_bboxes_lvl = torch.clamp(gt_bboxes_lvl, min=lvl_min, max=lvl_max) + + # stores the assigned gt index of each point + assigned_gt_inds = points.new_zeros((num_points, ), dtype=torch.long) + # stores the assigned gt dist (to this point) of each point + assigned_gt_dist = points.new_full((num_points, ), float('inf')) + points_range = torch.arange(points.shape[0]) + + for idx in range(num_gts): + gt_lvl = gt_bboxes_lvl[idx] + # get the index of points in this level + lvl_idx = gt_lvl == points_lvl + points_index = points_range[lvl_idx] + # get the points in this level + lvl_points = points_xy[lvl_idx, :] + # get the center point of gt + gt_point = gt_bboxes_xy[[idx], :] + # get width and height of gt + gt_wh = gt_bboxes_wh[[idx], :] + # compute the distance between gt center and + # all points in this level + points_gt_dist = ((lvl_points - gt_point) / gt_wh).norm(dim=1) + # find the nearest k points to gt center in this level + min_dist, min_dist_index = torch.topk( + points_gt_dist, self.pos_num, largest=False) + # the index of nearest k points to gt center in this level + min_dist_points_index = points_index[min_dist_index] + # The less_than_recorded_index stores the index + # of min_dist that is less then the assigned_gt_dist. Where + # assigned_gt_dist stores the dist from previous assigned gt + # (if exist) to each point. + less_than_recorded_index = min_dist < assigned_gt_dist[ + min_dist_points_index] + # The min_dist_points_index stores the index of points satisfy: + # (1) it is k nearest to current gt center in this level. + # (2) it is closer to current gt center than other gt center. + min_dist_points_index = min_dist_points_index[ + less_than_recorded_index] + # assign the result + assigned_gt_inds[min_dist_points_index] = idx + 1 + assigned_gt_dist[min_dist_points_index] = min_dist[ + less_than_recorded_index] + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_points, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/builder.py b/thirdparty/mmdetection/mmdet/core/bbox/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..682683b62ae55396f24e9f9eea0f8193e2e88de6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/builder.py @@ -0,0 +1,20 @@ +from mmcv.utils import Registry, build_from_cfg + +BBOX_ASSIGNERS = Registry('bbox_assigner') +BBOX_SAMPLERS = Registry('bbox_sampler') +BBOX_CODERS = Registry('bbox_coder') + + +def build_assigner(cfg, **default_args): + """Builder of box assigner.""" + return build_from_cfg(cfg, BBOX_ASSIGNERS, default_args) + + +def build_sampler(cfg, **default_args): + """Builder of box sampler.""" + return build_from_cfg(cfg, BBOX_SAMPLERS, default_args) + + +def build_bbox_coder(cfg, **default_args): + """Builder of box coder.""" + return build_from_cfg(cfg, BBOX_CODERS, default_args) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/__init__.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ae455ba8fc0e0727e2d581cdc8f20fceededf99a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/__init__.py @@ -0,0 +1,13 @@ +from .base_bbox_coder import BaseBBoxCoder +from .bucketing_bbox_coder import BucketingBBoxCoder +from .delta_xywh_bbox_coder import DeltaXYWHBBoxCoder +from .legacy_delta_xywh_bbox_coder import LegacyDeltaXYWHBBoxCoder +from .pseudo_bbox_coder import PseudoBBoxCoder +from .tblr_bbox_coder import TBLRBBoxCoder +from .yolo_bbox_coder import YOLOBBoxCoder + +__all__ = [ + 'BaseBBoxCoder', 'PseudoBBoxCoder', 'DeltaXYWHBBoxCoder', + 'LegacyDeltaXYWHBBoxCoder', 'TBLRBBoxCoder', 'YOLOBBoxCoder', + 'BucketingBBoxCoder' +] diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/base_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/base_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..6e4272721534127c66ce3443df527d17ae6fa118 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/base_bbox_coder.py @@ -0,0 +1,19 @@ +from abc import ABCMeta, abstractmethod + + +class BaseBBoxCoder(metaclass=ABCMeta): + """Base bounding box coder.""" + + def __init__(self, **kwargs): + pass + + @abstractmethod + def encode(self, bboxes, gt_bboxes): + """Encode deltas between bboxes and ground truth boxes.""" + pass + + @abstractmethod + def decode(self, bboxes, bboxes_pred): + """Decode the predicted bboxes according to prediction and base + boxes.""" + pass diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/bucketing_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/bucketing_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..e8c450c5fbe90295aefb888cf1e4c24c26fbed5d --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/bucketing_bbox_coder.py @@ -0,0 +1,346 @@ +import numpy as np +import torch +import torch.nn.functional as F + +from ..builder import BBOX_CODERS +from ..transforms import bbox_rescale +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class BucketingBBoxCoder(BaseBBoxCoder): + """Bucketing BBox Coder for Side-Aware Bounday Localization (SABL). + + Boundary Localization with Bucketing and Bucketing Guided Rescoring + are implemented here. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + num_buckets (int): Number of buckets. + scale_factor (int): Scale factor of proposals to generate buckets. + offset_topk (int): Topk buckets are used to generate + bucket fine regression targets. Defaults to 2. + offset_upperbound (float): Offset upperbound to generate + bucket fine regression targets. + To avoid too large offset displacements. Defaults to 1.0. + cls_ignore_neighbor (bool): Ignore second nearest bucket or Not. + Defaults to True. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + """ + + def __init__(self, + num_buckets, + scale_factor, + offset_topk=2, + offset_upperbound=1.0, + cls_ignore_neighbor=True, + clip_border=True): + super(BucketingBBoxCoder, self).__init__() + self.num_buckets = num_buckets + self.scale_factor = scale_factor + self.offset_topk = offset_topk + self.offset_upperbound = offset_upperbound + self.cls_ignore_neighbor = cls_ignore_neighbor + self.clip_border = clip_border + + def encode(self, bboxes, gt_bboxes): + """Get bucketing estimation and fine regression targets during + training. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground truth boxes. + + Returns: + encoded_bboxes(tuple[Tensor]): bucketing estimation + and fine regression targets and weights + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bbox2bucket(bboxes, gt_bboxes, self.num_buckets, + self.scale_factor, self.offset_topk, + self.offset_upperbound, + self.cls_ignore_neighbor) + return encoded_bboxes + + def decode(self, bboxes, pred_bboxes, max_shape=None): + """Apply transformation `pred_bboxes` to `boxes`. + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Predictions for bucketing estimation + and fine regression + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert len(pred_bboxes) == 2 + cls_preds, offset_preds = pred_bboxes + assert cls_preds.size(0) == bboxes.size(0) and offset_preds.size( + 0) == bboxes.size(0) + decoded_bboxes = bucket2bbox(bboxes, cls_preds, offset_preds, + self.num_buckets, self.scale_factor, + max_shape, self.clip_border) + + return decoded_bboxes + + +def generat_buckets(proposals, num_buckets, scale_factor=1.0): + """Generate buckets w.r.t bucket number and scale factor of proposals. + + Args: + proposals (Tensor): Shape (n, 4) + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + + Returns: + tuple[Tensor]: (bucket_w, bucket_h, l_buckets, r_buckets, + t_buckets, d_buckets) + + - bucket_w: Width of buckets on x-axis. Shape (n, ). + - bucket_h: Height of buckets on y-axis. Shape (n, ). + - l_buckets: Left buckets. Shape (n, ceil(side_num/2)). + - r_buckets: Right buckets. Shape (n, ceil(side_num/2)). + - t_buckets: Top buckets. Shape (n, ceil(side_num/2)). + - d_buckets: Down buckets. Shape (n, ceil(side_num/2)). + """ + proposals = bbox_rescale(proposals, scale_factor) + + # number of buckets in each side + side_num = int(np.ceil(num_buckets / 2.0)) + pw = proposals[..., 2] - proposals[..., 0] + ph = proposals[..., 3] - proposals[..., 1] + px1 = proposals[..., 0] + py1 = proposals[..., 1] + px2 = proposals[..., 2] + py2 = proposals[..., 3] + + bucket_w = pw / num_buckets + bucket_h = ph / num_buckets + + # left buckets + l_buckets = px1[:, None] + (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_w[:, None] + # right buckets + r_buckets = px2[:, None] - (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_w[:, None] + # top buckets + t_buckets = py1[:, None] + (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_h[:, None] + # down buckets + d_buckets = py2[:, None] - (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_h[:, None] + return bucket_w, bucket_h, l_buckets, r_buckets, t_buckets, d_buckets + + +def bbox2bucket(proposals, + gt, + num_buckets, + scale_factor, + offset_topk=2, + offset_upperbound=1.0, + cls_ignore_neighbor=True): + """Generate buckets estimation and fine regression targets. + + Args: + proposals (Tensor): Shape (n, 4) + gt (Tensor): Shape (n, 4) + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + offset_topk (int): Topk buckets are used to generate + bucket fine regression targets. Defaults to 2. + offset_upperbound (float): Offset allowance to generate + bucket fine regression targets. + To avoid too large offset displacements. Defaults to 1.0. + cls_ignore_neighbor (bool): Ignore second nearest bucket or Not. + Defaults to True. + + Returns: + tuple[Tensor]: (offsets, offsets_weights, bucket_labels, cls_weights). + + - offsets: Fine regression targets. \ + Shape (n, num_buckets*2). + - offsets_weights: Fine regression weights. \ + Shape (n, num_buckets*2). + - bucket_labels: Bucketing estimation labels. \ + Shape (n, num_buckets*2). + - cls_weights: Bucketing estimation weights. \ + Shape (n, num_buckets*2). + """ + assert proposals.size() == gt.size() + + # generate buckets + proposals = proposals.float() + gt = gt.float() + (bucket_w, bucket_h, l_buckets, r_buckets, t_buckets, + d_buckets) = generat_buckets(proposals, num_buckets, scale_factor) + + gx1 = gt[..., 0] + gy1 = gt[..., 1] + gx2 = gt[..., 2] + gy2 = gt[..., 3] + + # generate offset targets and weights + # offsets from buckets to gts + l_offsets = (l_buckets - gx1[:, None]) / bucket_w[:, None] + r_offsets = (r_buckets - gx2[:, None]) / bucket_w[:, None] + t_offsets = (t_buckets - gy1[:, None]) / bucket_h[:, None] + d_offsets = (d_buckets - gy2[:, None]) / bucket_h[:, None] + + # select top-k nearset buckets + l_topk, l_label = l_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + r_topk, r_label = r_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + t_topk, t_label = t_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + d_topk, d_label = d_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + + offset_l_weights = l_offsets.new_zeros(l_offsets.size()) + offset_r_weights = r_offsets.new_zeros(r_offsets.size()) + offset_t_weights = t_offsets.new_zeros(t_offsets.size()) + offset_d_weights = d_offsets.new_zeros(d_offsets.size()) + inds = torch.arange(0, proposals.size(0)).to(proposals).long() + + # generate offset weights of top-k nearset buckets + for k in range(offset_topk): + if k >= 1: + offset_l_weights[inds, l_label[:, + k]] = (l_topk[:, k] < + offset_upperbound).float() + offset_r_weights[inds, r_label[:, + k]] = (r_topk[:, k] < + offset_upperbound).float() + offset_t_weights[inds, t_label[:, + k]] = (t_topk[:, k] < + offset_upperbound).float() + offset_d_weights[inds, d_label[:, + k]] = (d_topk[:, k] < + offset_upperbound).float() + else: + offset_l_weights[inds, l_label[:, k]] = 1.0 + offset_r_weights[inds, r_label[:, k]] = 1.0 + offset_t_weights[inds, t_label[:, k]] = 1.0 + offset_d_weights[inds, d_label[:, k]] = 1.0 + + offsets = torch.cat([l_offsets, r_offsets, t_offsets, d_offsets], dim=-1) + offsets_weights = torch.cat([ + offset_l_weights, offset_r_weights, offset_t_weights, offset_d_weights + ], + dim=-1) + + # generate bucket labels and weight + side_num = int(np.ceil(num_buckets / 2.0)) + labels = torch.stack( + [l_label[:, 0], r_label[:, 0], t_label[:, 0], d_label[:, 0]], dim=-1) + + batch_size = labels.size(0) + bucket_labels = F.one_hot(labels.view(-1), side_num).view(batch_size, + -1).float() + bucket_cls_l_weights = (l_offsets.abs() < 1).float() + bucket_cls_r_weights = (r_offsets.abs() < 1).float() + bucket_cls_t_weights = (t_offsets.abs() < 1).float() + bucket_cls_d_weights = (d_offsets.abs() < 1).float() + bucket_cls_weights = torch.cat([ + bucket_cls_l_weights, bucket_cls_r_weights, bucket_cls_t_weights, + bucket_cls_d_weights + ], + dim=-1) + # ignore second nearest buckets for cls if necessay + if cls_ignore_neighbor: + bucket_cls_weights = (~((bucket_cls_weights == 1) & + (bucket_labels == 0))).float() + else: + bucket_cls_weights[:] = 1.0 + return offsets, offsets_weights, bucket_labels, bucket_cls_weights + + +def bucket2bbox(proposals, + cls_preds, + offset_preds, + num_buckets, + scale_factor=1.0, + max_shape=None, + clip_border=True): + """Apply bucketing estimation (cls preds) and fine regression (offset + preds) to generate det bboxes. + + Args: + proposals (Tensor): Boxes to be transformed. Shape (n, 4) + cls_preds (Tensor): bucketing estimation. Shape (n, num_buckets*2). + offset_preds (Tensor): fine regression. Shape (n, num_buckets*2). + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + max_shape (tuple[int, int]): Maximum bounds for boxes. specifies (H, W) + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + + Returns: + tuple[Tensor]: (bboxes, loc_confidence). + + - bboxes: predicted bboxes. Shape (n, 4) + - loc_confidence: localization confidence of predicted bboxes. + Shape (n,). + """ + + side_num = int(np.ceil(num_buckets / 2.0)) + cls_preds = cls_preds.view(-1, side_num) + offset_preds = offset_preds.view(-1, side_num) + + scores = F.softmax(cls_preds, dim=1) + score_topk, score_label = scores.topk(2, dim=1, largest=True, sorted=True) + + rescaled_proposals = bbox_rescale(proposals, scale_factor) + + pw = rescaled_proposals[..., 2] - rescaled_proposals[..., 0] + ph = rescaled_proposals[..., 3] - rescaled_proposals[..., 1] + px1 = rescaled_proposals[..., 0] + py1 = rescaled_proposals[..., 1] + px2 = rescaled_proposals[..., 2] + py2 = rescaled_proposals[..., 3] + + bucket_w = pw / num_buckets + bucket_h = ph / num_buckets + + score_inds_l = score_label[0::4, 0] + score_inds_r = score_label[1::4, 0] + score_inds_t = score_label[2::4, 0] + score_inds_d = score_label[3::4, 0] + l_buckets = px1 + (0.5 + score_inds_l.float()) * bucket_w + r_buckets = px2 - (0.5 + score_inds_r.float()) * bucket_w + t_buckets = py1 + (0.5 + score_inds_t.float()) * bucket_h + d_buckets = py2 - (0.5 + score_inds_d.float()) * bucket_h + + offsets = offset_preds.view(-1, 4, side_num) + inds = torch.arange(proposals.size(0)).to(proposals).long() + l_offsets = offsets[:, 0, :][inds, score_inds_l] + r_offsets = offsets[:, 1, :][inds, score_inds_r] + t_offsets = offsets[:, 2, :][inds, score_inds_t] + d_offsets = offsets[:, 3, :][inds, score_inds_d] + + x1 = l_buckets - l_offsets * bucket_w + x2 = r_buckets - r_offsets * bucket_w + y1 = t_buckets - t_offsets * bucket_h + y2 = d_buckets - d_offsets * bucket_h + + if clip_border and max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1] - 1) + y1 = y1.clamp(min=0, max=max_shape[0] - 1) + x2 = x2.clamp(min=0, max=max_shape[1] - 1) + y2 = y2.clamp(min=0, max=max_shape[0] - 1) + bboxes = torch.cat([x1[:, None], y1[:, None], x2[:, None], y2[:, None]], + dim=-1) + + # bucketing guided rescoring + loc_confidence = score_topk[:, 0] + top2_neighbor_inds = (score_label[:, 0] - score_label[:, 1]).abs() == 1 + loc_confidence += score_topk[:, 1] * top2_neighbor_inds.float() + loc_confidence = loc_confidence.view(-1, 4).mean(dim=1) + + return bboxes, loc_confidence diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..e9eb35790512cdc2befde2b0e11d0950aa2a608e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py @@ -0,0 +1,204 @@ +import numpy as np +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class DeltaXYWHBBoxCoder(BaseBBoxCoder): + """Delta XYWH BBox coder. + + Following the practice in `R-CNN `_, + this coder encodes bbox (x1, y1, x2, y2) into delta (dx, dy, dw, dh) and + decodes delta (dx, dy, dw, dh) back to original bbox (x1, y1, x2, y2). + + Args: + target_means (Sequence[float]): Denormalizing means of target for + delta coordinates + target_stds (Sequence[float]): Denormalizing standard deviation of + target for delta coordinates + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + """ + + def __init__(self, + target_means=(0., 0., 0., 0.), + target_stds=(1., 1., 1., 1.), + clip_border=True): + super(BaseBBoxCoder, self).__init__() + self.means = target_means + self.stds = target_stds + self.clip_border = clip_border + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): Source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): Target of the transformation, e.g., + ground-truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bbox2delta(bboxes, gt_bboxes, self.means, self.stds) + return encoded_bboxes + + def decode(self, + bboxes, + pred_bboxes, + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Encoded boxes with shape + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + wh_ratio_clip (float, optional): The allowed ratio between + width and height. + + Returns: + torch.Tensor: Decoded boxes. + """ + + assert pred_bboxes.size(0) == bboxes.size(0) + decoded_bboxes = delta2bbox(bboxes, pred_bboxes, self.means, self.stds, + max_shape, wh_ratio_clip, self.clip_border) + + return decoded_bboxes + + +def bbox2delta(proposals, gt, means=(0., 0., 0., 0.), stds=(1., 1., 1., 1.)): + """Compute deltas of proposals w.r.t. gt. + + We usually compute the deltas of x, y, w, h of proposals w.r.t ground + truth bboxes to get regression target. + This is the inverse function of :func:`delta2bbox`. + + Args: + proposals (Tensor): Boxes to be transformed, shape (N, ..., 4) + gt (Tensor): Gt bboxes to be used as base, shape (N, ..., 4) + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + + Returns: + Tensor: deltas with shape (N, 4), where columns represent dx, dy, + dw, dh. + """ + assert proposals.size() == gt.size() + + proposals = proposals.float() + gt = gt.float() + px = (proposals[..., 0] + proposals[..., 2]) * 0.5 + py = (proposals[..., 1] + proposals[..., 3]) * 0.5 + pw = proposals[..., 2] - proposals[..., 0] + ph = proposals[..., 3] - proposals[..., 1] + + gx = (gt[..., 0] + gt[..., 2]) * 0.5 + gy = (gt[..., 1] + gt[..., 3]) * 0.5 + gw = gt[..., 2] - gt[..., 0] + gh = gt[..., 3] - gt[..., 1] + + dx = (gx - px) / pw + dy = (gy - py) / ph + dw = torch.log(gw / pw) + dh = torch.log(gh / ph) + deltas = torch.stack([dx, dy, dw, dh], dim=-1) + + means = deltas.new_tensor(means).unsqueeze(0) + stds = deltas.new_tensor(stds).unsqueeze(0) + deltas = deltas.sub_(means).div_(stds) + + return deltas + + +def delta2bbox(rois, + deltas, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.), + max_shape=None, + wh_ratio_clip=16 / 1000, + clip_border=True): + """Apply deltas to shift/scale base boxes. + + Typically the rois are anchor or proposed bounding boxes and the deltas are + network outputs used to shift/scale those boxes. + This is the inverse function of :func:`bbox2delta`. + + Args: + rois (Tensor): Boxes to be transformed. Has shape (N, 4) + deltas (Tensor): Encoded offsets with respect to each roi. + Has shape (N, 4 * num_classes). Note N = num_anchors * W * H when + rois is a grid of anchors. Offset encoding follows [1]_. + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + max_shape (tuple[int, int]): Maximum bounds for boxes. specifies (H, W) + wh_ratio_clip (float): Maximum aspect ratio for boxes. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + + Returns: + Tensor: Boxes with shape (N, 4), where columns represent + tl_x, tl_y, br_x, br_y. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Example: + >>> rois = torch.Tensor([[ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 5., 5., 5., 5.]]) + >>> deltas = torch.Tensor([[ 0., 0., 0., 0.], + >>> [ 1., 1., 1., 1.], + >>> [ 0., 0., 2., -1.], + >>> [ 0.7, -1.9, -0.5, 0.3]]) + >>> delta2bbox(rois, deltas, max_shape=(32, 32)) + tensor([[0.0000, 0.0000, 1.0000, 1.0000], + [0.1409, 0.1409, 2.8591, 2.8591], + [0.0000, 0.3161, 4.1945, 0.6839], + [5.0000, 5.0000, 5.0000, 5.0000]]) + """ + means = deltas.new_tensor(means).view(1, -1).repeat(1, deltas.size(1) // 4) + stds = deltas.new_tensor(stds).view(1, -1).repeat(1, deltas.size(1) // 4) + denorm_deltas = deltas * stds + means + dx = denorm_deltas[:, 0::4] + dy = denorm_deltas[:, 1::4] + dw = denorm_deltas[:, 2::4] + dh = denorm_deltas[:, 3::4] + max_ratio = np.abs(np.log(wh_ratio_clip)) + dw = dw.clamp(min=-max_ratio, max=max_ratio) + dh = dh.clamp(min=-max_ratio, max=max_ratio) + # Compute center of each roi + px = ((rois[:, 0] + rois[:, 2]) * 0.5).unsqueeze(1).expand_as(dx) + py = ((rois[:, 1] + rois[:, 3]) * 0.5).unsqueeze(1).expand_as(dy) + # Compute width/height of each roi + pw = (rois[:, 2] - rois[:, 0]).unsqueeze(1).expand_as(dw) + ph = (rois[:, 3] - rois[:, 1]).unsqueeze(1).expand_as(dh) + # Use exp(network energy) to enlarge/shrink each roi + gw = pw * dw.exp() + gh = ph * dh.exp() + # Use network energy to shift the center of each roi + gx = px + pw * dx + gy = py + ph * dy + # Convert center-xy/width/height to top-left, bottom-right + x1 = gx - gw * 0.5 + y1 = gy - gh * 0.5 + x2 = gx + gw * 0.5 + y2 = gy + gh * 0.5 + if clip_border and max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1]) + y1 = y1.clamp(min=0, max=max_shape[0]) + x2 = x2.clamp(min=0, max=max_shape[1]) + y2 = y2.clamp(min=0, max=max_shape[0]) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1).view(deltas.size()) + return bboxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..74e801a154dce35ee0bf39187035430bbdb4b897 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py @@ -0,0 +1,212 @@ +import numpy as np +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class LegacyDeltaXYWHBBoxCoder(BaseBBoxCoder): + """Legacy Delta XYWH BBox coder used in MMDet V1.x. + + Following the practice in R-CNN [1]_, this coder encodes bbox (x1, y1, x2, + y2) into delta (dx, dy, dw, dh) and decodes delta (dx, dy, dw, dh) + back to original bbox (x1, y1, x2, y2). + + Note: + The main difference between :class`LegacyDeltaXYWHBBoxCoder` and + :class:`DeltaXYWHBBoxCoder` is whether ``+ 1`` is used during width and + height calculation. We suggest to only use this coder when testing with + MMDet V1.x models. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Args: + target_means (Sequence[float]): denormalizing means of target for + delta coordinates + target_stds (Sequence[float]): denormalizing standard deviation of + target for delta coordinates + """ + + def __init__(self, + target_means=(0., 0., 0., 0.), + target_stds=(1., 1., 1., 1.)): + super(BaseBBoxCoder, self).__init__() + self.means = target_means + self.stds = target_stds + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground-truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = legacy_bbox2delta(bboxes, gt_bboxes, self.means, + self.stds) + return encoded_bboxes + + def decode(self, + bboxes, + pred_bboxes, + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Encoded boxes with shape + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + wh_ratio_clip (float, optional): The allowed ratio between + width and height. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert pred_bboxes.size(0) == bboxes.size(0) + decoded_bboxes = legacy_delta2bbox(bboxes, pred_bboxes, self.means, + self.stds, max_shape, wh_ratio_clip) + + return decoded_bboxes + + +def legacy_bbox2delta(proposals, + gt, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.)): + """Compute deltas of proposals w.r.t. gt in the MMDet V1.x manner. + + We usually compute the deltas of x, y, w, h of proposals w.r.t ground + truth bboxes to get regression target. + This is the inverse function of `delta2bbox()` + + Args: + proposals (Tensor): Boxes to be transformed, shape (N, ..., 4) + gt (Tensor): Gt bboxes to be used as base, shape (N, ..., 4) + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + + Returns: + Tensor: deltas with shape (N, 4), where columns represent dx, dy, + dw, dh. + """ + assert proposals.size() == gt.size() + + proposals = proposals.float() + gt = gt.float() + px = (proposals[..., 0] + proposals[..., 2]) * 0.5 + py = (proposals[..., 1] + proposals[..., 3]) * 0.5 + pw = proposals[..., 2] - proposals[..., 0] + 1.0 + ph = proposals[..., 3] - proposals[..., 1] + 1.0 + + gx = (gt[..., 0] + gt[..., 2]) * 0.5 + gy = (gt[..., 1] + gt[..., 3]) * 0.5 + gw = gt[..., 2] - gt[..., 0] + 1.0 + gh = gt[..., 3] - gt[..., 1] + 1.0 + + dx = (gx - px) / pw + dy = (gy - py) / ph + dw = torch.log(gw / pw) + dh = torch.log(gh / ph) + deltas = torch.stack([dx, dy, dw, dh], dim=-1) + + means = deltas.new_tensor(means).unsqueeze(0) + stds = deltas.new_tensor(stds).unsqueeze(0) + deltas = deltas.sub_(means).div_(stds) + + return deltas + + +def legacy_delta2bbox(rois, + deltas, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.), + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply deltas to shift/scale base boxes in the MMDet V1.x manner. + + Typically the rois are anchor or proposed bounding boxes and the deltas are + network outputs used to shift/scale those boxes. + This is the inverse function of `bbox2delta()` + + Args: + rois (Tensor): Boxes to be transformed. Has shape (N, 4) + deltas (Tensor): Encoded offsets with respect to each roi. + Has shape (N, 4 * num_classes). Note N = num_anchors * W * H when + rois is a grid of anchors. Offset encoding follows [1]_. + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + max_shape (tuple[int, int]): Maximum bounds for boxes. specifies (H, W) + wh_ratio_clip (float): Maximum aspect ratio for boxes. + + Returns: + Tensor: Boxes with shape (N, 4), where columns represent + tl_x, tl_y, br_x, br_y. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Example: + >>> rois = torch.Tensor([[ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 5., 5., 5., 5.]]) + >>> deltas = torch.Tensor([[ 0., 0., 0., 0.], + >>> [ 1., 1., 1., 1.], + >>> [ 0., 0., 2., -1.], + >>> [ 0.7, -1.9, -0.5, 0.3]]) + >>> legacy_delta2bbox(rois, deltas, max_shape=(32, 32)) + tensor([[0.0000, 0.0000, 1.5000, 1.5000], + [0.0000, 0.0000, 5.2183, 5.2183], + [0.0000, 0.1321, 7.8891, 0.8679], + [5.3967, 2.4251, 6.0033, 3.7749]]) + """ + means = deltas.new_tensor(means).repeat(1, deltas.size(1) // 4) + stds = deltas.new_tensor(stds).repeat(1, deltas.size(1) // 4) + denorm_deltas = deltas * stds + means + dx = denorm_deltas[:, 0::4] + dy = denorm_deltas[:, 1::4] + dw = denorm_deltas[:, 2::4] + dh = denorm_deltas[:, 3::4] + max_ratio = np.abs(np.log(wh_ratio_clip)) + dw = dw.clamp(min=-max_ratio, max=max_ratio) + dh = dh.clamp(min=-max_ratio, max=max_ratio) + # Compute center of each roi + px = ((rois[:, 0] + rois[:, 2]) * 0.5).unsqueeze(1).expand_as(dx) + py = ((rois[:, 1] + rois[:, 3]) * 0.5).unsqueeze(1).expand_as(dy) + # Compute width/height of each roi + pw = (rois[:, 2] - rois[:, 0] + 1.0).unsqueeze(1).expand_as(dw) + ph = (rois[:, 3] - rois[:, 1] + 1.0).unsqueeze(1).expand_as(dh) + # Use exp(network energy) to enlarge/shrink each roi + gw = pw * dw.exp() + gh = ph * dh.exp() + # Use network energy to shift the center of each roi + gx = px + pw * dx + gy = py + ph * dy + # Convert center-xy/width/height to top-left, bottom-right + + # The true legacy box coder should +- 0.5 here. + # However, current implementation improves the performance when testing + # the models trained in MMDetection 1.X (~0.5 bbox AP, 0.2 mask AP) + x1 = gx - gw * 0.5 + y1 = gy - gh * 0.5 + x2 = gx + gw * 0.5 + y2 = gy + gh * 0.5 + if max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1] - 1) + y1 = y1.clamp(min=0, max=max_shape[0] - 1) + x2 = x2.clamp(min=0, max=max_shape[1] - 1) + y2 = y2.clamp(min=0, max=max_shape[0] - 1) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1).view_as(deltas) + return bboxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/pseudo_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/pseudo_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..1c8346f4ae2c7db9719a70c7dc0244e088a9965b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/pseudo_bbox_coder.py @@ -0,0 +1,18 @@ +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class PseudoBBoxCoder(BaseBBoxCoder): + """Pseudo bounding box coder.""" + + def __init__(self, **kwargs): + super(BaseBBoxCoder, self).__init__(**kwargs) + + def encode(self, bboxes, gt_bboxes): + """torch.Tensor: return the given ``bboxes``""" + return gt_bboxes + + def decode(self, bboxes, pred_bboxes): + """torch.Tensor: return the given ``pred_bboxes``""" + return pred_bboxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/tblr_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/tblr_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..436670b4c076eeed27ff137804d5006ed4fee343 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/tblr_bbox_coder.py @@ -0,0 +1,172 @@ +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class TBLRBBoxCoder(BaseBBoxCoder): + """TBLR BBox coder. + + Following the practice in `FSAF `_, + this coder encodes gt bboxes (x1, y1, x2, y2) into (top, bottom, left, + right) and decode it back to the original. + + Args: + normalizer (list | float): Normalization factor to be + divided with when coding the coordinates. If it is a list, it should + have length of 4 indicating normalization factor in tblr dims. + Otherwise it is a unified float factor for all dims. Default: 4.0 + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + """ + + def __init__(self, normalizer=4.0, clip_border=True): + super(BaseBBoxCoder, self).__init__() + self.normalizer = normalizer + self.clip_border = clip_border + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes`` in the (top, left, + bottom, right) order. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bboxes2tblr( + bboxes, gt_bboxes, normalizer=self.normalizer) + return encoded_bboxes + + def decode(self, bboxes, pred_bboxes, max_shape=None): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Encoded boxes with shape + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert pred_bboxes.size(0) == bboxes.size(0) + decoded_bboxes = tblr2bboxes( + bboxes, + pred_bboxes, + normalizer=self.normalizer, + max_shape=max_shape, + clip_border=self.clip_border) + + return decoded_bboxes + + +def bboxes2tblr(priors, gts, normalizer=4.0, normalize_by_wh=True): + """Encode ground truth boxes to tblr coordinate. + + It first convert the gt coordinate to tblr format, + (top, bottom, left, right), relative to prior box centers. + The tblr coordinate may be normalized by the side length of prior bboxes + if `normalize_by_wh` is specified as True, and it is then normalized by + the `normalizer` factor. + + Args: + priors (Tensor): Prior boxes in point form + Shape: (num_proposals,4). + gts (Tensor): Coords of ground truth for each prior in point-form + Shape: (num_proposals, 4). + normalizer (Sequence[float] | float): normalization parameter of + encoded boxes. If it is a list, it has to have length = 4. + Default: 4.0 + normalize_by_wh (bool): Whether to normalize tblr coordinate by the + side length (wh) of prior bboxes. + + Return: + encoded boxes (Tensor), Shape: (num_proposals, 4) + """ + + # dist b/t match center and prior's center + if not isinstance(normalizer, float): + normalizer = torch.tensor(normalizer, device=priors.device) + assert len(normalizer) == 4, 'Normalizer must have length = 4' + assert priors.size(0) == gts.size(0) + prior_centers = (priors[:, 0:2] + priors[:, 2:4]) / 2 + xmin, ymin, xmax, ymax = gts.split(1, dim=1) + top = prior_centers[:, 1].unsqueeze(1) - ymin + bottom = ymax - prior_centers[:, 1].unsqueeze(1) + left = prior_centers[:, 0].unsqueeze(1) - xmin + right = xmax - prior_centers[:, 0].unsqueeze(1) + loc = torch.cat((top, bottom, left, right), dim=1) + if normalize_by_wh: + # Normalize tblr by anchor width and height + wh = priors[:, 2:4] - priors[:, 0:2] + w, h = torch.split(wh, 1, dim=1) + loc[:, :2] /= h # tb is normalized by h + loc[:, 2:] /= w # lr is normalized by w + # Normalize tblr by the given normalization factor + return loc / normalizer + + +def tblr2bboxes(priors, + tblr, + normalizer=4.0, + normalize_by_wh=True, + max_shape=None, + clip_border=True): + """Decode tblr outputs to prediction boxes. + + The process includes 3 steps: 1) De-normalize tblr coordinates by + multiplying it with `normalizer`; 2) De-normalize tblr coordinates by the + prior bbox width and height if `normalize_by_wh` is `True`; 3) Convert + tblr (top, bottom, left, right) pair relative to the center of priors back + to (xmin, ymin, xmax, ymax) coordinate. + + Args: + priors (Tensor): Prior boxes in point form (x0, y0, x1, y1) + Shape: (n,4). + tblr (Tensor): Coords of network output in tblr form + Shape: (n, 4). + normalizer (Sequence[float] | float): Normalization parameter of + encoded boxes. By list, it represents the normalization factors at + tblr dims. By float, it is the unified normalization factor at all + dims. Default: 4.0 + normalize_by_wh (bool): Whether the tblr coordinates have been + normalized by the side length (wh) of prior bboxes. + max_shape (tuple, optional): Shape of the image. Decoded bboxes + exceeding which will be clamped. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + + Return: + encoded boxes (Tensor), Shape: (n, 4) + """ + if not isinstance(normalizer, float): + normalizer = torch.tensor(normalizer, device=priors.device) + assert len(normalizer) == 4, 'Normalizer must have length = 4' + assert priors.size(0) == tblr.size(0) + loc_decode = tblr * normalizer + prior_centers = (priors[:, 0:2] + priors[:, 2:4]) / 2 + if normalize_by_wh: + wh = priors[:, 2:4] - priors[:, 0:2] + w, h = torch.split(wh, 1, dim=1) + loc_decode[:, :2] *= h # tb + loc_decode[:, 2:] *= w # lr + top, bottom, left, right = loc_decode.split((1, 1, 1, 1), dim=1) + xmin = prior_centers[:, 0].unsqueeze(1) - left + xmax = prior_centers[:, 0].unsqueeze(1) + right + ymin = prior_centers[:, 1].unsqueeze(1) - top + ymax = prior_centers[:, 1].unsqueeze(1) + bottom + boxes = torch.cat((xmin, ymin, xmax, ymax), dim=1) + if clip_border and max_shape is not None: + boxes[:, 0].clamp_(min=0, max=max_shape[1]) + boxes[:, 1].clamp_(min=0, max=max_shape[0]) + boxes[:, 2].clamp_(min=0, max=max_shape[1]) + boxes[:, 3].clamp_(min=0, max=max_shape[0]) + return boxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/coder/yolo_bbox_coder.py b/thirdparty/mmdetection/mmdet/core/bbox/coder/yolo_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..2a1dc34fcd3d3920f93ffc908322d440cb7e950a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/coder/yolo_bbox_coder.py @@ -0,0 +1,86 @@ +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class YOLOBBoxCoder(BaseBBoxCoder): + """YOLO BBox coder. + + Following `YOLO `_, this coder divide + image into grids, and encode bbox (x1, y1, x2, y2) into (cx, cy, dw, dh). + cx, cy in [0., 1.], denotes relative center position w.r.t the center of + bboxes. dw, dh are the same as :obj:`DeltaXYWHBBoxCoder`. + + Args: + eps (float): Min value of cx, cy when encoding. + """ + + def __init__(self, eps=1e-6): + super(BaseBBoxCoder, self).__init__() + self.eps = eps + + def encode(self, bboxes, gt_bboxes, stride): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): Source boxes, e.g., anchors. + gt_bboxes (torch.Tensor): Target of the transformation, e.g., + ground-truth boxes. + stride (torch.Tensor | int): Stride of bboxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + x_center_gt = (gt_bboxes[..., 0] + gt_bboxes[..., 2]) * 0.5 + y_center_gt = (gt_bboxes[..., 1] + gt_bboxes[..., 3]) * 0.5 + w_gt = gt_bboxes[..., 2] - gt_bboxes[..., 0] + h_gt = gt_bboxes[..., 3] - gt_bboxes[..., 1] + x_center = (bboxes[..., 0] + bboxes[..., 2]) * 0.5 + y_center = (bboxes[..., 1] + bboxes[..., 3]) * 0.5 + w = bboxes[..., 2] - bboxes[..., 0] + h = bboxes[..., 3] - bboxes[..., 1] + w_target = torch.log((w_gt / w).clamp(min=self.eps)) + h_target = torch.log((h_gt / h).clamp(min=self.eps)) + x_center_target = ((x_center_gt - x_center) / stride + 0.5).clamp( + self.eps, 1 - self.eps) + y_center_target = ((y_center_gt - y_center) / stride + 0.5).clamp( + self.eps, 1 - self.eps) + encoded_bboxes = torch.stack( + [x_center_target, y_center_target, w_target, h_target], dim=-1) + return encoded_bboxes + + def decode(self, bboxes, pred_bboxes, stride): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes, e.g. anchors. + pred_bboxes (torch.Tensor): Encoded boxes with shape + stride (torch.Tensor | int): Strides of bboxes. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert pred_bboxes.size(0) == bboxes.size(0) + assert pred_bboxes.size(-1) == bboxes.size(-1) == 4 + x_center = (bboxes[..., 0] + bboxes[..., 2]) * 0.5 + y_center = (bboxes[..., 1] + bboxes[..., 3]) * 0.5 + w = bboxes[..., 2] - bboxes[..., 0] + h = bboxes[..., 3] - bboxes[..., 1] + # Get outputs x, y + x_center_pred = (pred_bboxes[..., 0] - 0.5) * stride + x_center + y_center_pred = (pred_bboxes[..., 1] - 0.5) * stride + y_center + w_pred = torch.exp(pred_bboxes[..., 2]) * w + h_pred = torch.exp(pred_bboxes[..., 3]) * h + + decoded_bboxes = torch.stack( + (x_center_pred - w_pred / 2, y_center_pred - h_pred / 2, + x_center_pred + w_pred / 2, y_center_pred + h_pred / 2), + dim=-1) + + return decoded_bboxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/demodata.py b/thirdparty/mmdetection/mmdet/core/bbox/demodata.py new file mode 100644 index 0000000000000000000000000000000000000000..9430858597bcf322917a7a853099334f6e31624c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/demodata.py @@ -0,0 +1,63 @@ +import numpy as np +import torch + + +def ensure_rng(rng=None): + """Simple version of the ``kwarray.ensure_rng`` + + Args: + rng (int | numpy.random.RandomState | None): + if None, then defaults to the global rng. Otherwise this can be an + integer or a RandomState class + Returns: + (numpy.random.RandomState) : rng - + a numpy random number generator + + References: + https://gitlab.kitware.com/computer-vision/kwarray/blob/master/kwarray/util_random.py#L270 + """ + + if rng is None: + rng = np.random.mtrand._rand + elif isinstance(rng, int): + rng = np.random.RandomState(rng) + else: + rng = rng + return rng + + +def random_boxes(num=1, scale=1, rng=None): + """Simple version of ``kwimage.Boxes.random`` + + Returns: + Tensor: shape (n, 4) in x1, y1, x2, y2 format. + + References: + https://gitlab.kitware.com/computer-vision/kwimage/blob/master/kwimage/structs/boxes.py#L1390 + + Example: + >>> num = 3 + >>> scale = 512 + >>> rng = 0 + >>> boxes = random_boxes(num, scale, rng) + >>> print(boxes) + tensor([[280.9925, 278.9802, 308.6148, 366.1769], + [216.9113, 330.6978, 224.0446, 456.5878], + [405.3632, 196.3221, 493.3953, 270.7942]]) + """ + rng = ensure_rng(rng) + + tlbr = rng.rand(num, 4).astype(np.float32) + + tl_x = np.minimum(tlbr[:, 0], tlbr[:, 2]) + tl_y = np.minimum(tlbr[:, 1], tlbr[:, 3]) + br_x = np.maximum(tlbr[:, 0], tlbr[:, 2]) + br_y = np.maximum(tlbr[:, 1], tlbr[:, 3]) + + tlbr[:, 0] = tl_x * scale + tlbr[:, 1] = tl_y * scale + tlbr[:, 2] = br_x * scale + tlbr[:, 3] = br_y * scale + + boxes = torch.from_numpy(tlbr) + return boxes diff --git a/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/__init__.py b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e71369a58a05fa25e6a754300875fdbb87cb26a5 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/__init__.py @@ -0,0 +1,4 @@ +from .builder import build_iou_calculator +from .iou2d_calculator import BboxOverlaps2D, bbox_overlaps + +__all__ = ['build_iou_calculator', 'BboxOverlaps2D', 'bbox_overlaps'] diff --git a/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/builder.py b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..09094d7ece46a9f18a28ed0960feac2afa9331bb --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/builder.py @@ -0,0 +1,8 @@ +from mmcv.utils import Registry, build_from_cfg + +IOU_CALCULATORS = Registry('IoU calculator') + + +def build_iou_calculator(cfg, default_args=None): + """Builder of IoU calculator.""" + return build_from_cfg(cfg, IOU_CALCULATORS, default_args) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/iou2d_calculator.py b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/iou2d_calculator.py new file mode 100644 index 0000000000000000000000000000000000000000..dd5cd56e7007826b351aa4c30746f9f2c6d55f39 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/iou_calculators/iou2d_calculator.py @@ -0,0 +1,158 @@ +import torch + +from .builder import IOU_CALCULATORS + + +@IOU_CALCULATORS.register_module() +class BboxOverlaps2D(object): + """2D Overlaps (e.g. IoUs, GIoUs) Calculator.""" + + def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False): + """Calculate IoU between 2D bboxes. + + Args: + bboxes1 (Tensor): bboxes have shape (m, 4) in + format, or shape (m, 5) in format. + bboxes2 (Tensor): bboxes have shape (m, 4) in + format, shape (m, 5) in format, or be + empty. If ``is_aligned `` is ``True``, then m and n must be + equal. + mode (str): "iou" (intersection over union), "iof" (intersection + over foreground), or "giou" (generalized intersection over + union). + is_aligned (bool, optional): If True, then m and n must be equal. + Default False. + + Returns: + Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) + """ + assert bboxes1.size(-1) in [0, 4, 5] + assert bboxes2.size(-1) in [0, 4, 5] + if bboxes2.size(-1) == 5: + bboxes2 = bboxes2[..., :4] + if bboxes1.size(-1) == 5: + bboxes1 = bboxes1[..., :4] + return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned) + + def __repr__(self): + """str: a string describing the module""" + repr_str = self.__class__.__name__ + '()' + return repr_str + + +def bbox_overlaps(bboxes1, bboxes2, mode='iou', is_aligned=False, eps=1e-6): + """Calculate overlap between two set of bboxes. + + If ``is_aligned `` is ``False``, then calculate the overlaps between each + bbox of bboxes1 and bboxes2, otherwise the overlaps between each aligned + pair of bboxes1 and bboxes2. + + Args: + bboxes1 (Tensor): shape (B, m, 4) in format or empty. + bboxes2 (Tensor): shape (B, n, 4) in format or empty. + B indicates the batch dim, in shape (B1, B2, ..., Bn). + If ``is_aligned `` is ``True``, then m and n must be equal. + mode (str): "iou" (intersection over union) or "iof" (intersection over + foreground). + is_aligned (bool, optional): If True, then m and n must be equal. + Default False. + eps (float, optional): A value added to the denominator for numerical + stability. Default 1e-6. + + Returns: + Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) + + Example: + >>> bboxes1 = torch.FloatTensor([ + >>> [0, 0, 10, 10], + >>> [10, 10, 20, 20], + >>> [32, 32, 38, 42], + >>> ]) + >>> bboxes2 = torch.FloatTensor([ + >>> [0, 0, 10, 20], + >>> [0, 10, 10, 19], + >>> [10, 10, 20, 20], + >>> ]) + >>> overlaps = bbox_overlaps(bboxes1, bboxes2) + >>> assert overlaps.shape == (3, 3) + >>> overlaps = bbox_overlaps(bboxes1, bboxes2, is_aligned=True) + >>> assert overlaps.shape == (3, ) + + Example: + >>> empty = torch.empty(0, 4) + >>> nonempty = torch.FloatTensor([[0, 0, 10, 9]]) + >>> assert tuple(bbox_overlaps(empty, nonempty).shape) == (0, 1) + >>> assert tuple(bbox_overlaps(nonempty, empty).shape) == (1, 0) + >>> assert tuple(bbox_overlaps(empty, empty).shape) == (0, 0) + """ + + assert mode in ['iou', 'iof', 'giou'], f'Unsupported mode {mode}' + # Either the boxes are empty or the length of boxes's last dimenstion is 4 + assert (bboxes1.size(-1) == 4 or bboxes1.size(0) == 0) + assert (bboxes2.size(-1) == 4 or bboxes2.size(0) == 0) + + # Batch dim must be the same + # Batch dim: (B1, B2, ... Bn) + assert bboxes1.shape[:-2] == bboxes2.shape[:-2] + batch_shape = bboxes1.shape[:-2] + + rows = bboxes1.size(-2) + cols = bboxes2.size(-2) + if is_aligned: + assert rows == cols + + if rows * cols == 0: + if is_aligned: + return bboxes1.new(batch_shape + (rows, )) + else: + return bboxes1.new(batch_shape + (rows, cols)) + + area1 = (bboxes1[..., 2] - bboxes1[..., 0]) * ( + bboxes1[..., 3] - bboxes1[..., 1]) + area2 = (bboxes2[..., 2] - bboxes2[..., 0]) * ( + bboxes2[..., 3] - bboxes2[..., 1]) + + if is_aligned: + lt = torch.max(bboxes1[..., :2], bboxes2[..., :2]) # [B, rows, 2] + rb = torch.min(bboxes1[..., 2:], bboxes2[..., 2:]) # [B, rows, 2] + + wh = (rb - lt).clamp(min=0) # [B, rows, 2] + overlap = wh[..., 0] * wh[..., 1] + + if mode in ['iou', 'giou']: + union = area1 + area2 - overlap + else: + union = area1 + if mode == 'giou': + enclosed_lt = torch.min(bboxes1[..., :2], bboxes2[..., :2]) + enclosed_rb = torch.max(bboxes1[..., 2:], bboxes2[..., 2:]) + else: + lt = torch.max(bboxes1[..., :, None, :2], + bboxes2[..., None, :, :2]) # [B, rows, cols, 2] + rb = torch.min(bboxes1[..., :, None, 2:], + bboxes2[..., None, :, 2:]) # [B, rows, cols, 2] + + wh = (rb - lt).clamp(min=0) # [B, rows, cols, 2] + overlap = wh[..., 0] * wh[..., 1] + + if mode in ['iou', 'giou']: + union = area1[..., None] + area2[..., None, :] - overlap + else: + union = area1[..., None] + if mode == 'giou': + enclosed_lt = torch.min(bboxes1[..., :, None, :2], + bboxes2[..., None, :, :2]) + enclosed_rb = torch.max(bboxes1[..., :, None, 2:], + bboxes2[..., None, :, 2:]) + + eps = union.new_tensor([eps]) + union = torch.max(union, eps) + ious = overlap / union + if mode in ['iou', 'iof']: + return ious + # calculate gious + enclose_wh = (enclosed_rb - enclosed_lt).clamp(min=0) + enclose_area = enclose_wh[..., 0] * enclose_wh[..., 1] + enclose_area = torch.max(enclose_area, eps) + gious = ious - (enclose_area - union) / enclose_area + return gious diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/__init__.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0b06303fe1000e11c5486c40c70606a34a5208e3 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/__init__.py @@ -0,0 +1,15 @@ +from .base_sampler import BaseSampler +from .combined_sampler import CombinedSampler +from .instance_balanced_pos_sampler import InstanceBalancedPosSampler +from .iou_balanced_neg_sampler import IoUBalancedNegSampler +from .ohem_sampler import OHEMSampler +from .pseudo_sampler import PseudoSampler +from .random_sampler import RandomSampler +from .sampling_result import SamplingResult +from .score_hlr_sampler import ScoreHLRSampler + +__all__ = [ + 'BaseSampler', 'PseudoSampler', 'RandomSampler', + 'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler', + 'OHEMSampler', 'SamplingResult', 'ScoreHLRSampler' +] diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/base_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/base_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..9ea35def115b49dfdad8a1f7c040ef3cd983b0d1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/base_sampler.py @@ -0,0 +1,101 @@ +from abc import ABCMeta, abstractmethod + +import torch + +from .sampling_result import SamplingResult + + +class BaseSampler(metaclass=ABCMeta): + """Base class of samplers.""" + + def __init__(self, + num, + pos_fraction, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + self.num = num + self.pos_fraction = pos_fraction + self.neg_pos_ub = neg_pos_ub + self.add_gt_as_proposals = add_gt_as_proposals + self.pos_sampler = self + self.neg_sampler = self + + @abstractmethod + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Sample positive samples.""" + pass + + @abstractmethod + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Sample negative samples.""" + pass + + def sample(self, + assign_result, + bboxes, + gt_bboxes, + gt_labels=None, + **kwargs): + """Sample positive and negative bboxes. + + This is a simple implementation of bbox sampling given candidates, + assigning results and ground truth bboxes. + + Args: + assign_result (:obj:`AssignResult`): Bbox assigning results. + bboxes (Tensor): Boxes to be sampled from. + gt_bboxes (Tensor): Ground truth bboxes. + gt_labels (Tensor, optional): Class labels of ground truth bboxes. + + Returns: + :obj:`SamplingResult`: Sampling result. + + Example: + >>> from mmdet.core.bbox import RandomSampler + >>> from mmdet.core.bbox import AssignResult + >>> from mmdet.core.bbox.demodata import ensure_rng, random_boxes + >>> rng = ensure_rng(None) + >>> assign_result = AssignResult.random(rng=rng) + >>> bboxes = random_boxes(assign_result.num_preds, rng=rng) + >>> gt_bboxes = random_boxes(assign_result.num_gts, rng=rng) + >>> gt_labels = None + >>> self = RandomSampler(num=32, pos_fraction=0.5, neg_pos_ub=-1, + >>> add_gt_as_proposals=False) + >>> self = self.sample(assign_result, bboxes, gt_bboxes, gt_labels) + """ + if len(bboxes.shape) < 2: + bboxes = bboxes[None, :] + + bboxes = bboxes[:, :4] + + gt_flags = bboxes.new_zeros((bboxes.shape[0], ), dtype=torch.uint8) + if self.add_gt_as_proposals and len(gt_bboxes) > 0: + if gt_labels is None: + raise ValueError( + 'gt_labels must be given when add_gt_as_proposals is True') + bboxes = torch.cat([gt_bboxes, bboxes], dim=0) + assign_result.add_gt_(gt_labels) + gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8) + gt_flags = torch.cat([gt_ones, gt_flags]) + + num_expected_pos = int(self.num * self.pos_fraction) + pos_inds = self.pos_sampler._sample_pos( + assign_result, num_expected_pos, bboxes=bboxes, **kwargs) + # We found that sampled indices have duplicated items occasionally. + # (may be a bug of PyTorch) + pos_inds = pos_inds.unique() + num_sampled_pos = pos_inds.numel() + num_expected_neg = self.num - num_sampled_pos + if self.neg_pos_ub >= 0: + _pos = max(1, num_sampled_pos) + neg_upper_bound = int(self.neg_pos_ub * _pos) + if num_expected_neg > neg_upper_bound: + num_expected_neg = neg_upper_bound + neg_inds = self.neg_sampler._sample_neg( + assign_result, num_expected_neg, bboxes=bboxes, **kwargs) + neg_inds = neg_inds.unique() + + sampling_result = SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags) + return sampling_result diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/combined_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/combined_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..564729f0895b1863d94c479a67202438af45f996 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/combined_sampler.py @@ -0,0 +1,20 @@ +from ..builder import BBOX_SAMPLERS, build_sampler +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class CombinedSampler(BaseSampler): + """A sampler that combines positive sampler and negative sampler.""" + + def __init__(self, pos_sampler, neg_sampler, **kwargs): + super(CombinedSampler, self).__init__(**kwargs) + self.pos_sampler = build_sampler(pos_sampler, **kwargs) + self.neg_sampler = build_sampler(neg_sampler, **kwargs) + + def _sample_pos(self, **kwargs): + """Sample positive samples.""" + raise NotImplementedError + + def _sample_neg(self, **kwargs): + """Sample negative samples.""" + raise NotImplementedError diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..c735298487e14e4a0ec42913f25673cccb98a8a0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py @@ -0,0 +1,55 @@ +import numpy as np +import torch + +from ..builder import BBOX_SAMPLERS +from .random_sampler import RandomSampler + + +@BBOX_SAMPLERS.register_module() +class InstanceBalancedPosSampler(RandomSampler): + """Instance balanced sampler that samples equal number of positive samples + for each instance.""" + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Sample positive boxes. + + Args: + assign_result (:obj:`AssignResult`): The assigned results of boxes. + num_expected (int): The number of expected positive samples + + Returns: + Tensor or ndarray: sampled indices. + """ + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + unique_gt_inds = assign_result.gt_inds[pos_inds].unique() + num_gts = len(unique_gt_inds) + num_per_gt = int(round(num_expected / float(num_gts)) + 1) + sampled_inds = [] + for i in unique_gt_inds: + inds = torch.nonzero( + assign_result.gt_inds == i.item(), as_tuple=False) + if inds.numel() != 0: + inds = inds.squeeze(1) + else: + continue + if len(inds) > num_per_gt: + inds = self.random_choice(inds, num_per_gt) + sampled_inds.append(inds) + sampled_inds = torch.cat(sampled_inds) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array( + list(set(pos_inds.cpu()) - set(sampled_inds.cpu()))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + extra_inds = torch.from_numpy(extra_inds).to( + assign_result.gt_inds.device).long() + sampled_inds = torch.cat([sampled_inds, extra_inds]) + elif len(sampled_inds) > num_expected: + sampled_inds = self.random_choice(sampled_inds, num_expected) + return sampled_inds diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..f275e430d1b57c4d9df57387b8f3ae6f0ff68cf1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py @@ -0,0 +1,157 @@ +import numpy as np +import torch + +from ..builder import BBOX_SAMPLERS +from .random_sampler import RandomSampler + + +@BBOX_SAMPLERS.register_module() +class IoUBalancedNegSampler(RandomSampler): + """IoU Balanced Sampling. + + arXiv: https://arxiv.org/pdf/1904.02701.pdf (CVPR 2019) + + Sampling proposals according to their IoU. `floor_fraction` of needed RoIs + are sampled from proposals whose IoU are lower than `floor_thr` randomly. + The others are sampled from proposals whose IoU are higher than + `floor_thr`. These proposals are sampled from some bins evenly, which are + split by `num_bins` via IoU evenly. + + Args: + num (int): number of proposals. + pos_fraction (float): fraction of positive proposals. + floor_thr (float): threshold (minimum) IoU for IoU balanced sampling, + set to -1 if all using IoU balanced sampling. + floor_fraction (float): sampling fraction of proposals under floor_thr. + num_bins (int): number of bins in IoU balanced sampling. + """ + + def __init__(self, + num, + pos_fraction, + floor_thr=-1, + floor_fraction=0, + num_bins=3, + **kwargs): + super(IoUBalancedNegSampler, self).__init__(num, pos_fraction, + **kwargs) + assert floor_thr >= 0 or floor_thr == -1 + assert 0 <= floor_fraction <= 1 + assert num_bins >= 1 + + self.floor_thr = floor_thr + self.floor_fraction = floor_fraction + self.num_bins = num_bins + + def sample_via_interval(self, max_overlaps, full_set, num_expected): + """Sample according to the iou interval. + + Args: + max_overlaps (torch.Tensor): IoU between bounding boxes and ground + truth boxes. + full_set (set(int)): A full set of indices of boxes。 + num_expected (int): Number of expected samples。 + + Returns: + np.ndarray: Indices of samples + """ + max_iou = max_overlaps.max() + iou_interval = (max_iou - self.floor_thr) / self.num_bins + per_num_expected = int(num_expected / self.num_bins) + + sampled_inds = [] + for i in range(self.num_bins): + start_iou = self.floor_thr + i * iou_interval + end_iou = self.floor_thr + (i + 1) * iou_interval + tmp_set = set( + np.where( + np.logical_and(max_overlaps >= start_iou, + max_overlaps < end_iou))[0]) + tmp_inds = list(tmp_set & full_set) + if len(tmp_inds) > per_num_expected: + tmp_sampled_set = self.random_choice(tmp_inds, + per_num_expected) + else: + tmp_sampled_set = np.array(tmp_inds, dtype=np.int) + sampled_inds.append(tmp_sampled_set) + + sampled_inds = np.concatenate(sampled_inds) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array(list(full_set - set(sampled_inds))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + sampled_inds = np.concatenate([sampled_inds, extra_inds]) + + return sampled_inds + + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Sample negative boxes. + + Args: + assign_result (:obj:`AssignResult`): The assigned results of boxes. + num_expected (int): The number of expected negative samples + + Returns: + Tensor or ndarray: sampled indices. + """ + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + max_overlaps = assign_result.max_overlaps.cpu().numpy() + # balance sampling for negative samples + neg_set = set(neg_inds.cpu().numpy()) + + if self.floor_thr > 0: + floor_set = set( + np.where( + np.logical_and(max_overlaps >= 0, + max_overlaps < self.floor_thr))[0]) + iou_sampling_set = set( + np.where(max_overlaps >= self.floor_thr)[0]) + elif self.floor_thr == 0: + floor_set = set(np.where(max_overlaps == 0)[0]) + iou_sampling_set = set( + np.where(max_overlaps > self.floor_thr)[0]) + else: + floor_set = set() + iou_sampling_set = set( + np.where(max_overlaps > self.floor_thr)[0]) + # for sampling interval calculation + self.floor_thr = 0 + + floor_neg_inds = list(floor_set & neg_set) + iou_sampling_neg_inds = list(iou_sampling_set & neg_set) + num_expected_iou_sampling = int(num_expected * + (1 - self.floor_fraction)) + if len(iou_sampling_neg_inds) > num_expected_iou_sampling: + if self.num_bins >= 2: + iou_sampled_inds = self.sample_via_interval( + max_overlaps, set(iou_sampling_neg_inds), + num_expected_iou_sampling) + else: + iou_sampled_inds = self.random_choice( + iou_sampling_neg_inds, num_expected_iou_sampling) + else: + iou_sampled_inds = np.array( + iou_sampling_neg_inds, dtype=np.int) + num_expected_floor = num_expected - len(iou_sampled_inds) + if len(floor_neg_inds) > num_expected_floor: + sampled_floor_inds = self.random_choice( + floor_neg_inds, num_expected_floor) + else: + sampled_floor_inds = np.array(floor_neg_inds, dtype=np.int) + sampled_inds = np.concatenate( + (sampled_floor_inds, iou_sampled_inds)) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array(list(neg_set - set(sampled_inds))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + sampled_inds = np.concatenate((sampled_inds, extra_inds)) + sampled_inds = torch.from_numpy(sampled_inds).long().to( + assign_result.gt_inds.device) + return sampled_inds diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/ohem_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/ohem_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..8b99f60ef0176f1b7a56665fb0f59272f65b84cd --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/ohem_sampler.py @@ -0,0 +1,107 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from ..transforms import bbox2roi +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class OHEMSampler(BaseSampler): + r"""Online Hard Example Mining Sampler described in `Training Region-based + Object Detectors with Online Hard Example Mining + `_. + """ + + def __init__(self, + num, + pos_fraction, + context, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + super(OHEMSampler, self).__init__(num, pos_fraction, neg_pos_ub, + add_gt_as_proposals) + self.context = context + if not hasattr(self.context, 'num_stages'): + self.bbox_head = self.context.bbox_head + else: + self.bbox_head = self.context.bbox_head[self.context.current_stage] + + def hard_mining(self, inds, num_expected, bboxes, labels, feats): + with torch.no_grad(): + rois = bbox2roi([bboxes]) + if not hasattr(self.context, 'num_stages'): + bbox_results = self.context._bbox_forward(feats, rois) + else: + bbox_results = self.context._bbox_forward( + self.context.current_stage, feats, rois) + cls_score = bbox_results['cls_score'] + loss = self.bbox_head.loss( + cls_score=cls_score, + bbox_pred=None, + rois=rois, + labels=labels, + label_weights=cls_score.new_ones(cls_score.size(0)), + bbox_targets=None, + bbox_weights=None, + reduction_override='none')['loss_cls'] + _, topk_loss_inds = loss.topk(num_expected) + return inds[topk_loss_inds] + + def _sample_pos(self, + assign_result, + num_expected, + bboxes=None, + feats=None, + **kwargs): + """Sample positive boxes. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + num_expected (int): Number of expected positive samples + bboxes (torch.Tensor, optional): Boxes. Defaults to None. + feats (list[torch.Tensor], optional): Multi-level features. + Defaults to None. + + Returns: + torch.Tensor: Indices of positive samples + """ + # Sample some hard positive samples + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.hard_mining(pos_inds, num_expected, bboxes[pos_inds], + assign_result.labels[pos_inds], feats) + + def _sample_neg(self, + assign_result, + num_expected, + bboxes=None, + feats=None, + **kwargs): + """Sample negative boxes. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + num_expected (int): Number of expected negative samples + bboxes (torch.Tensor, optional): Boxes. Defaults to None. + feats (list[torch.Tensor], optional): Multi-level features. + Defaults to None. + + Returns: + torch.Tensor: Indices of negative samples + """ + # Sample some hard negative samples + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + neg_labels = assign_result.labels.new_empty( + neg_inds.size(0)).fill_(self.bbox_head.num_classes) + return self.hard_mining(neg_inds, num_expected, bboxes[neg_inds], + neg_labels, feats) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/pseudo_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/pseudo_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..2bd81abcdc62debc14772659d7a171f20bf33364 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/pseudo_sampler.py @@ -0,0 +1,41 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from .base_sampler import BaseSampler +from .sampling_result import SamplingResult + + +@BBOX_SAMPLERS.register_module() +class PseudoSampler(BaseSampler): + """A pseudo sampler that does not do sampling actually.""" + + def __init__(self, **kwargs): + pass + + def _sample_pos(self, **kwargs): + """Sample positive samples.""" + raise NotImplementedError + + def _sample_neg(self, **kwargs): + """Sample negative samples.""" + raise NotImplementedError + + def sample(self, assign_result, bboxes, gt_bboxes, **kwargs): + """Directly returns the positive and negative indices of samples. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + bboxes (torch.Tensor): Bounding boxes + gt_bboxes (torch.Tensor): Ground truth boxes + + Returns: + :obj:`SamplingResult`: sampler results + """ + pos_inds = torch.nonzero( + assign_result.gt_inds > 0, as_tuple=False).squeeze(-1).unique() + neg_inds = torch.nonzero( + assign_result.gt_inds == 0, as_tuple=False).squeeze(-1).unique() + gt_flags = bboxes.new_zeros(bboxes.shape[0], dtype=torch.uint8) + sampling_result = SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags) + return sampling_result diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/random_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/random_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..f34b006e8bb0b55c74aa1c3b792f3664ada93162 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/random_sampler.py @@ -0,0 +1,78 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class RandomSampler(BaseSampler): + """Random sampler. + + Args: + num (int): Number of samples + pos_fraction (float): Fraction of positive samples + neg_pos_up (int, optional): Upper bound number of negative and + positive samples. Defaults to -1. + add_gt_as_proposals (bool, optional): Whether to add ground truth + boxes as proposals. Defaults to True. + """ + + def __init__(self, + num, + pos_fraction, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + from mmdet.core.bbox import demodata + super(RandomSampler, self).__init__(num, pos_fraction, neg_pos_ub, + add_gt_as_proposals) + self.rng = demodata.ensure_rng(kwargs.get('rng', None)) + + def random_choice(self, gallery, num): + """Random select some elements from the gallery. + + If `gallery` is a Tensor, the returned indices will be a Tensor; + If `gallery` is a ndarray or list, the returned indices will be a + ndarray. + + Args: + gallery (Tensor | ndarray | list): indices pool. + num (int): expected sample num. + + Returns: + Tensor or ndarray: sampled indices. + """ + assert len(gallery) >= num + + is_tensor = isinstance(gallery, torch.Tensor) + if not is_tensor: + if torch.cuda.is_available(): + device = torch.cuda.current_device() + else: + device = 'cpu' + gallery = torch.tensor(gallery, dtype=torch.long, device=device) + perm = torch.randperm(gallery.numel(), device=gallery.device)[:num] + rand_inds = gallery[perm] + if not is_tensor: + rand_inds = rand_inds.cpu().numpy() + return rand_inds + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Randomly sample some positive samples.""" + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.random_choice(pos_inds, num_expected) + + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Randomly sample some negative samples.""" + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + return self.random_choice(neg_inds, num_expected) diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/sampling_result.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/sampling_result.py new file mode 100644 index 0000000000000000000000000000000000000000..419a8e39a3c307a7cd9cfd0565a20037ded0d646 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/sampling_result.py @@ -0,0 +1,152 @@ +import torch + +from mmdet.utils import util_mixins + + +class SamplingResult(util_mixins.NiceRepr): + """Bbox sampling result. + + Example: + >>> # xdoctest: +IGNORE_WANT + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random(rng=10) + >>> print(f'self = {self}') + self = + """ + + def __init__(self, pos_inds, neg_inds, bboxes, gt_bboxes, assign_result, + gt_flags): + self.pos_inds = pos_inds + self.neg_inds = neg_inds + self.pos_bboxes = bboxes[pos_inds] + self.neg_bboxes = bboxes[neg_inds] + self.pos_is_gt = gt_flags[pos_inds] + + self.num_gts = gt_bboxes.shape[0] + self.pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_bboxes.numel() == 0: + # hack for index error case + assert self.pos_assigned_gt_inds.numel() == 0 + self.pos_gt_bboxes = torch.empty_like(gt_bboxes).view(-1, 4) + else: + if len(gt_bboxes.shape) < 2: + gt_bboxes = gt_bboxes.view(-1, 4) + + self.pos_gt_bboxes = gt_bboxes[self.pos_assigned_gt_inds, :] + + if assign_result.labels is not None: + self.pos_gt_labels = assign_result.labels[pos_inds] + else: + self.pos_gt_labels = None + + @property + def bboxes(self): + """torch.Tensor: concatenated positive and negative boxes""" + return torch.cat([self.pos_bboxes, self.neg_bboxes]) + + def to(self, device): + """Change the device of the data inplace. + + Example: + >>> self = SamplingResult.random() + >>> print(f'self = {self.to(None)}') + >>> # xdoctest: +REQUIRES(--gpu) + >>> print(f'self = {self.to(0)}') + """ + _dict = self.__dict__ + for key, value in _dict.items(): + if isinstance(value, torch.Tensor): + _dict[key] = value.to(device) + return self + + def __nice__(self): + data = self.info.copy() + data['pos_bboxes'] = data.pop('pos_bboxes').shape + data['neg_bboxes'] = data.pop('neg_bboxes').shape + parts = [f"'{k}': {v!r}" for k, v in sorted(data.items())] + body = ' ' + ',\n '.join(parts) + return '{\n' + body + '\n}' + + @property + def info(self): + """Returns a dictionary of info about the object.""" + return { + 'pos_inds': self.pos_inds, + 'neg_inds': self.neg_inds, + 'pos_bboxes': self.pos_bboxes, + 'neg_bboxes': self.neg_bboxes, + 'pos_is_gt': self.pos_is_gt, + 'num_gts': self.num_gts, + 'pos_assigned_gt_inds': self.pos_assigned_gt_inds, + } + + @classmethod + def random(cls, rng=None, **kwargs): + """ + Args: + rng (None | int | numpy.random.RandomState): seed or state. + kwargs (keyword arguments): + - num_preds: number of predicted boxes + - num_gts: number of true boxes + - p_ignore (float): probability of a predicted box assinged to \ + an ignored truth. + - p_assigned (float): probability of a predicted box not being \ + assigned. + - p_use_label (float | bool): with labels or not. + + Returns: + :obj:`SamplingResult`: Randomly generated sampling result. + + Example: + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random() + >>> print(self.__dict__) + """ + from mmdet.core.bbox.samplers.random_sampler import RandomSampler + from mmdet.core.bbox.assigners.assign_result import AssignResult + from mmdet.core.bbox import demodata + rng = demodata.ensure_rng(rng) + + # make probabalistic? + num = 32 + pos_fraction = 0.5 + neg_pos_ub = -1 + + assign_result = AssignResult.random(rng=rng, **kwargs) + + # Note we could just compute an assignment + bboxes = demodata.random_boxes(assign_result.num_preds, rng=rng) + gt_bboxes = demodata.random_boxes(assign_result.num_gts, rng=rng) + + if rng.rand() > 0.2: + # sometimes algorithms squeeze their data, be robust to that + gt_bboxes = gt_bboxes.squeeze() + bboxes = bboxes.squeeze() + + if assign_result.labels is None: + gt_labels = None + else: + gt_labels = None # todo + + if gt_labels is None: + add_gt_as_proposals = False + else: + add_gt_as_proposals = True # make probabalistic? + + sampler = RandomSampler( + num, + pos_fraction, + neg_pos_ub=neg_pos_ub, + add_gt_as_proposals=add_gt_as_proposals, + rng=rng) + self = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + return self diff --git a/thirdparty/mmdetection/mmdet/core/bbox/samplers/score_hlr_sampler.py b/thirdparty/mmdetection/mmdet/core/bbox/samplers/score_hlr_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..11d46b97705db60fb6a4eb5fa7da10ac78acb8bc --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/samplers/score_hlr_sampler.py @@ -0,0 +1,264 @@ +import torch +from mmcv.ops import nms_match + +from ..builder import BBOX_SAMPLERS +from ..transforms import bbox2roi +from .base_sampler import BaseSampler +from .sampling_result import SamplingResult + + +@BBOX_SAMPLERS.register_module() +class ScoreHLRSampler(BaseSampler): + r"""Importance-based Sample Reweighting (ISR_N), described in `Prime Sample + Attention in Object Detection `_. + + Score hierarchical local rank (HLR) differentiates with RandomSampler in + negative part. It firstly computes Score-HLR in a two-step way, + then linearly maps score hlr to the loss weights. + + Args: + num (int): Total number of sampled RoIs. + pos_fraction (float): Fraction of positive samples. + context (:class:`BaseRoIHead`): RoI head that the sampler belongs to. + neg_pos_ub (int): Upper bound of the ratio of num negative to num + positive, -1 means no upper bound. + add_gt_as_proposals (bool): Whether to add ground truth as proposals. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + score_thr (float): Minimum score that a negative sample is to be + considered as valid bbox. + """ + + def __init__(self, + num, + pos_fraction, + context, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0, + score_thr=0.05, + iou_thr=0.5, + **kwargs): + super().__init__(num, pos_fraction, neg_pos_ub, add_gt_as_proposals) + self.k = k + self.bias = bias + self.score_thr = score_thr + self.iou_thr = iou_thr + self.context = context + # context of cascade detectors is a list, so distinguish them here. + if not hasattr(context, 'num_stages'): + self.bbox_roi_extractor = context.bbox_roi_extractor + self.bbox_head = context.bbox_head + self.with_shared_head = context.with_shared_head + if self.with_shared_head: + self.shared_head = context.shared_head + else: + self.bbox_roi_extractor = context.bbox_roi_extractor[ + context.current_stage] + self.bbox_head = context.bbox_head[context.current_stage] + + @staticmethod + def random_choice(gallery, num): + """Randomly select some elements from the gallery. + + If `gallery` is a Tensor, the returned indices will be a Tensor; + If `gallery` is a ndarray or list, the returned indices will be a + ndarray. + + Args: + gallery (Tensor | ndarray | list): indices pool. + num (int): expected sample num. + + Returns: + Tensor or ndarray: sampled indices. + """ + assert len(gallery) >= num + + is_tensor = isinstance(gallery, torch.Tensor) + if not is_tensor: + if torch.cuda.is_available(): + device = torch.cuda.current_device() + else: + device = 'cpu' + gallery = torch.tensor(gallery, dtype=torch.long, device=device) + perm = torch.randperm(gallery.numel(), device=gallery.device)[:num] + rand_inds = gallery[perm] + if not is_tensor: + rand_inds = rand_inds.cpu().numpy() + return rand_inds + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Randomly sample some positive samples.""" + pos_inds = torch.nonzero(assign_result.gt_inds > 0).flatten() + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.random_choice(pos_inds, num_expected) + + def _sample_neg(self, + assign_result, + num_expected, + bboxes, + feats=None, + img_meta=None, + **kwargs): + """Sample negative samples. + + Score-HLR sampler is done in the following steps: + 1. Take the maximum positive score prediction of each negative samples + as s_i. + 2. Filter out negative samples whose s_i <= score_thr, the left samples + are called valid samples. + 3. Use NMS-Match to divide valid samples into different groups, + samples in the same group will greatly overlap with each other + 4. Rank the matched samples in two-steps to get Score-HLR. + (1) In the same group, rank samples with their scores. + (2) In the same score rank across different groups, + rank samples with their scores again. + 5. Linearly map Score-HLR to the final label weights. + + Args: + assign_result (:obj:`AssignResult`): result of assigner. + num_expected (int): Expected number of samples. + bboxes (Tensor): bbox to be sampled. + feats (Tensor): Features come from FPN. + img_meta (dict): Meta information dictionary. + """ + neg_inds = torch.nonzero(assign_result.gt_inds == 0).flatten() + num_neg = neg_inds.size(0) + if num_neg == 0: + return neg_inds, None + with torch.no_grad(): + neg_bboxes = bboxes[neg_inds] + neg_rois = bbox2roi([neg_bboxes]) + bbox_result = self.context._bbox_forward(feats, neg_rois) + cls_score, bbox_pred = bbox_result['cls_score'], bbox_result[ + 'bbox_pred'] + + ori_loss = self.bbox_head.loss( + cls_score=cls_score, + bbox_pred=None, + rois=None, + labels=neg_inds.new_full((num_neg, ), + self.bbox_head.num_classes), + label_weights=cls_score.new_ones(num_neg), + bbox_targets=None, + bbox_weights=None, + reduction_override='none')['loss_cls'] + + # filter out samples with the max score lower than score_thr + max_score, argmax_score = cls_score.softmax(-1)[:, :-1].max(-1) + valid_inds = (max_score > self.score_thr).nonzero().view(-1) + invalid_inds = (max_score <= self.score_thr).nonzero().view(-1) + num_valid = valid_inds.size(0) + num_invalid = invalid_inds.size(0) + + num_expected = min(num_neg, num_expected) + num_hlr = min(num_valid, num_expected) + num_rand = num_expected - num_hlr + if num_valid > 0: + valid_rois = neg_rois[valid_inds] + valid_max_score = max_score[valid_inds] + valid_argmax_score = argmax_score[valid_inds] + valid_bbox_pred = bbox_pred[valid_inds] + + # valid_bbox_pred shape: [num_valid, #num_classes, 4] + valid_bbox_pred = valid_bbox_pred.view( + valid_bbox_pred.size(0), -1, 4) + selected_bbox_pred = valid_bbox_pred[range(num_valid), + valid_argmax_score] + pred_bboxes = self.bbox_head.bbox_coder.decode( + valid_rois[:, 1:], selected_bbox_pred) + pred_bboxes_with_score = torch.cat( + [pred_bboxes, valid_max_score[:, None]], -1) + group = nms_match(pred_bboxes_with_score, self.iou_thr) + + # imp: importance + imp = cls_score.new_zeros(num_valid) + for g in group: + g_score = valid_max_score[g] + # g_score has already sorted + rank = g_score.new_tensor(range(g_score.size(0))) + imp[g] = num_valid - rank + g_score + _, imp_rank_inds = imp.sort(descending=True) + _, imp_rank = imp_rank_inds.sort() + hlr_inds = imp_rank_inds[:num_expected] + + if num_rand > 0: + rand_inds = torch.randperm(num_invalid)[:num_rand] + select_inds = torch.cat( + [valid_inds[hlr_inds], invalid_inds[rand_inds]]) + else: + select_inds = valid_inds[hlr_inds] + + neg_label_weights = cls_score.new_ones(num_expected) + + up_bound = max(num_expected, num_valid) + imp_weights = (up_bound - + imp_rank[hlr_inds].float()) / up_bound + neg_label_weights[:num_hlr] = imp_weights + neg_label_weights[num_hlr:] = imp_weights.min() + neg_label_weights = (self.bias + + (1 - self.bias) * neg_label_weights).pow( + self.k) + ori_selected_loss = ori_loss[select_inds] + new_loss = ori_selected_loss * neg_label_weights + norm_ratio = ori_selected_loss.sum() / new_loss.sum() + neg_label_weights *= norm_ratio + else: + neg_label_weights = cls_score.new_ones(num_expected) + select_inds = torch.randperm(num_neg)[:num_expected] + + return neg_inds[select_inds], neg_label_weights + + def sample(self, + assign_result, + bboxes, + gt_bboxes, + gt_labels=None, + img_meta=None, + **kwargs): + """Sample positive and negative bboxes. + + This is a simple implementation of bbox sampling given candidates, + assigning results and ground truth bboxes. + + Args: + assign_result (:obj:`AssignResult`): Bbox assigning results. + bboxes (Tensor): Boxes to be sampled from. + gt_bboxes (Tensor): Ground truth bboxes. + gt_labels (Tensor, optional): Class labels of ground truth bboxes. + + Returns: + tuple[:obj:`SamplingResult`, Tensor]: Sampling result and negetive + label weights. + """ + bboxes = bboxes[:, :4] + + gt_flags = bboxes.new_zeros((bboxes.shape[0], ), dtype=torch.uint8) + if self.add_gt_as_proposals: + bboxes = torch.cat([gt_bboxes, bboxes], dim=0) + assign_result.add_gt_(gt_labels) + gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8) + gt_flags = torch.cat([gt_ones, gt_flags]) + + num_expected_pos = int(self.num * self.pos_fraction) + pos_inds = self.pos_sampler._sample_pos( + assign_result, num_expected_pos, bboxes=bboxes, **kwargs) + num_sampled_pos = pos_inds.numel() + num_expected_neg = self.num - num_sampled_pos + if self.neg_pos_ub >= 0: + _pos = max(1, num_sampled_pos) + neg_upper_bound = int(self.neg_pos_ub * _pos) + if num_expected_neg > neg_upper_bound: + num_expected_neg = neg_upper_bound + neg_inds, neg_label_weights = self.neg_sampler._sample_neg( + assign_result, + num_expected_neg, + bboxes, + img_meta=img_meta, + **kwargs) + + return SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags), neg_label_weights diff --git a/thirdparty/mmdetection/mmdet/core/bbox/transforms.py b/thirdparty/mmdetection/mmdet/core/bbox/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..102db0d1f382fd818ed7dd33fb6885b514032bb3 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/bbox/transforms.py @@ -0,0 +1,224 @@ +import numpy as np +import torch + + +def bbox_flip(bboxes, img_shape, direction='horizontal'): + """Flip bboxes horizontally or vertically. + + Args: + bboxes (Tensor): Shape (..., 4*k) + img_shape (tuple): Image shape. + direction (str): Flip direction, options are "horizontal", "vertical", + "diagonal". Default: "horizontal" + + Returns: + Tensor: Flipped bboxes. + """ + assert bboxes.shape[-1] % 4 == 0 + assert direction in ['horizontal', 'vertical', 'diagonal'] + flipped = bboxes.clone() + if direction == 'horizontal': + flipped[..., 0::4] = img_shape[1] - bboxes[..., 2::4] + flipped[..., 2::4] = img_shape[1] - bboxes[..., 0::4] + elif direction == 'vertical': + flipped[..., 1::4] = img_shape[0] - bboxes[..., 3::4] + flipped[..., 3::4] = img_shape[0] - bboxes[..., 1::4] + else: + flipped[..., 0::4] = img_shape[1] - bboxes[..., 2::4] + flipped[..., 1::4] = img_shape[0] - bboxes[..., 3::4] + flipped[..., 2::4] = img_shape[1] - bboxes[..., 0::4] + flipped[..., 3::4] = img_shape[0] - bboxes[..., 1::4] + return flipped + + +def bbox_mapping(bboxes, + img_shape, + scale_factor, + flip, + flip_direction='horizontal'): + """Map bboxes from the original image scale to testing scale.""" + new_bboxes = bboxes * bboxes.new_tensor(scale_factor) + if flip: + new_bboxes = bbox_flip(new_bboxes, img_shape, flip_direction) + return new_bboxes + + +def bbox_mapping_back(bboxes, + img_shape, + scale_factor, + flip, + flip_direction='horizontal'): + """Map bboxes from testing scale to original image scale.""" + new_bboxes = bbox_flip(bboxes, img_shape, + flip_direction) if flip else bboxes + new_bboxes = new_bboxes.view(-1, 4) / new_bboxes.new_tensor(scale_factor) + return new_bboxes.view(bboxes.shape) + + +def bbox2roi(bbox_list): + """Convert a list of bboxes to roi format. + + Args: + bbox_list (list[Tensor]): a list of bboxes corresponding to a batch + of images. + + Returns: + Tensor: shape (n, 5), [batch_ind, x1, y1, x2, y2] + """ + rois_list = [] + for img_id, bboxes in enumerate(bbox_list): + if bboxes.size(0) > 0: + img_inds = bboxes.new_full((bboxes.size(0), 1), img_id) + rois = torch.cat([img_inds, bboxes[:, :4]], dim=-1) + else: + rois = bboxes.new_zeros((0, 5)) + rois_list.append(rois) + rois = torch.cat(rois_list, 0) + return rois + + +def roi2bbox(rois): + """Convert rois to bounding box format. + + Args: + rois (torch.Tensor): RoIs with the shape (n, 5) where the first + column indicates batch id of each RoI. + + Returns: + list[torch.Tensor]: Converted boxes of corresponding rois. + """ + bbox_list = [] + img_ids = torch.unique(rois[:, 0].cpu(), sorted=True) + for img_id in img_ids: + inds = (rois[:, 0] == img_id.item()) + bbox = rois[inds, 1:] + bbox_list.append(bbox) + return bbox_list + + +def bbox2result(bboxes, labels, num_classes): + """Convert detection results to a list of numpy arrays. + + Args: + bboxes (torch.Tensor | np.ndarray): shape (n, 5) + labels (torch.Tensor | np.ndarray): shape (n, ) + num_classes (int): class number, including background class + + Returns: + list(ndarray): bbox results of each class + """ + if bboxes.shape[0] == 0: + return [np.zeros((0, 5), dtype=np.float32) for i in range(num_classes)] + else: + if isinstance(bboxes, torch.Tensor): + bboxes = bboxes.detach().cpu().numpy() + labels = labels.detach().cpu().numpy() + return [bboxes[labels == i, :] for i in range(num_classes)] + + +def distance2bbox(points, distance, max_shape=None): + """Decode distance prediction to bounding box. + + Args: + points (Tensor): Shape (n, 2), [x, y]. + distance (Tensor): Distance from the given point to 4 + boundaries (left, top, right, bottom). + max_shape (tuple): Shape of the image. + + Returns: + Tensor: Decoded bboxes. + """ + x1 = points[:, 0] - distance[:, 0] + y1 = points[:, 1] - distance[:, 1] + x2 = points[:, 0] + distance[:, 2] + y2 = points[:, 1] + distance[:, 3] + if max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1]) + y1 = y1.clamp(min=0, max=max_shape[0]) + x2 = x2.clamp(min=0, max=max_shape[1]) + y2 = y2.clamp(min=0, max=max_shape[0]) + return torch.stack([x1, y1, x2, y2], -1) + + +def bbox2distance(points, bbox, max_dis=None, eps=0.1): + """Decode bounding box based on distances. + + Args: + points (Tensor): Shape (n, 2), [x, y]. + bbox (Tensor): Shape (n, 4), "xyxy" format + max_dis (float): Upper bound of the distance. + eps (float): a small value to ensure target < max_dis, instead <= + + Returns: + Tensor: Decoded distances. + """ + left = points[:, 0] - bbox[:, 0] + top = points[:, 1] - bbox[:, 1] + right = bbox[:, 2] - points[:, 0] + bottom = bbox[:, 3] - points[:, 1] + if max_dis is not None: + left = left.clamp(min=0, max=max_dis - eps) + top = top.clamp(min=0, max=max_dis - eps) + right = right.clamp(min=0, max=max_dis - eps) + bottom = bottom.clamp(min=0, max=max_dis - eps) + return torch.stack([left, top, right, bottom], -1) + + +def bbox_rescale(bboxes, scale_factor=1.0): + """Rescale bounding box w.r.t. scale_factor. + + Args: + bboxes (Tensor): Shape (n, 4) for bboxes or (n, 5) for rois + scale_factor (float): rescale factor + + Returns: + Tensor: Rescaled bboxes. + """ + if bboxes.size(1) == 5: + bboxes_ = bboxes[:, 1:] + inds_ = bboxes[:, 0] + else: + bboxes_ = bboxes + cx = (bboxes_[:, 0] + bboxes_[:, 2]) * 0.5 + cy = (bboxes_[:, 1] + bboxes_[:, 3]) * 0.5 + w = bboxes_[:, 2] - bboxes_[:, 0] + h = bboxes_[:, 3] - bboxes_[:, 1] + w = w * scale_factor + h = h * scale_factor + x1 = cx - 0.5 * w + x2 = cx + 0.5 * w + y1 = cy - 0.5 * h + y2 = cy + 0.5 * h + if bboxes.size(1) == 5: + rescaled_bboxes = torch.stack([inds_, x1, y1, x2, y2], dim=-1) + else: + rescaled_bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + return rescaled_bboxes + + +def bbox_cxcywh_to_xyxy(bbox): + """Convert bbox coordinates from (cx, cy, w, h) to (x1, y1, x2, y2). + + Args: + bbox (Tensor): Shape (n, 4) for bboxes. + + Returns: + Tensor: Converted bboxes. + """ + cx, cy, w, h = bbox.split((1, 1, 1, 1), dim=-1) + bbox_new = [(cx - 0.5 * w), (cy - 0.5 * h), (cx + 0.5 * w), (cy + 0.5 * h)] + return torch.cat(bbox_new, dim=-1) + + +def bbox_xyxy_to_cxcywh(bbox): + """Convert bbox coordinates from (x1, y1, x2, y2) to (cx, cy, w, h). + + Args: + bbox (Tensor): Shape (n, 4) for bboxes. + + Returns: + Tensor: Converted bboxes. + """ + x1, y1, x2, y2 = bbox.split((1, 1, 1, 1), dim=-1) + bbox_new = [(x1 + x2) / 2, (y1 + y2) / 2, (x2 - x1), (y2 - y1)] + return torch.cat(bbox_new, dim=-1) diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/__init__.py b/thirdparty/mmdetection/mmdet/core/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d11ef15b9db95166b4427ad4d08debbd0630a741 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/__init__.py @@ -0,0 +1,15 @@ +from .class_names import (cityscapes_classes, coco_classes, dataset_aliases, + get_classes, imagenet_det_classes, + imagenet_vid_classes, voc_classes) +from .eval_hooks import DistEvalHook, EvalHook +from .mean_ap import average_precision, eval_map, print_map_summary +from .recall import (eval_recalls, plot_iou_recall, plot_num_recall, + print_recall_summary) + +__all__ = [ + 'voc_classes', 'imagenet_det_classes', 'imagenet_vid_classes', + 'coco_classes', 'cityscapes_classes', 'dataset_aliases', 'get_classes', + 'DistEvalHook', 'EvalHook', 'average_precision', 'eval_map', + 'print_map_summary', 'eval_recalls', 'print_recall_summary', + 'plot_num_recall', 'plot_iou_recall' +] diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/bbox_overlaps.py b/thirdparty/mmdetection/mmdet/core/evaluation/bbox_overlaps.py new file mode 100644 index 0000000000000000000000000000000000000000..93559ea0f25369d552a5365312fa32b9ffec9226 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/bbox_overlaps.py @@ -0,0 +1,48 @@ +import numpy as np + + +def bbox_overlaps(bboxes1, bboxes2, mode='iou', eps=1e-6): + """Calculate the ious between each bbox of bboxes1 and bboxes2. + + Args: + bboxes1(ndarray): shape (n, 4) + bboxes2(ndarray): shape (k, 4) + mode(str): iou (intersection over union) or iof (intersection + over foreground) + + Returns: + ious(ndarray): shape (n, k) + """ + + assert mode in ['iou', 'iof'] + + bboxes1 = bboxes1.astype(np.float32) + bboxes2 = bboxes2.astype(np.float32) + rows = bboxes1.shape[0] + cols = bboxes2.shape[0] + ious = np.zeros((rows, cols), dtype=np.float32) + if rows * cols == 0: + return ious + exchange = False + if bboxes1.shape[0] > bboxes2.shape[0]: + bboxes1, bboxes2 = bboxes2, bboxes1 + ious = np.zeros((cols, rows), dtype=np.float32) + exchange = True + area1 = (bboxes1[:, 2] - bboxes1[:, 0]) * (bboxes1[:, 3] - bboxes1[:, 1]) + area2 = (bboxes2[:, 2] - bboxes2[:, 0]) * (bboxes2[:, 3] - bboxes2[:, 1]) + for i in range(bboxes1.shape[0]): + x_start = np.maximum(bboxes1[i, 0], bboxes2[:, 0]) + y_start = np.maximum(bboxes1[i, 1], bboxes2[:, 1]) + x_end = np.minimum(bboxes1[i, 2], bboxes2[:, 2]) + y_end = np.minimum(bboxes1[i, 3], bboxes2[:, 3]) + overlap = np.maximum(x_end - x_start, 0) * np.maximum( + y_end - y_start, 0) + if mode == 'iou': + union = area1[i] + area2 - overlap + else: + union = area1[i] if not exchange else area2 + union = np.maximum(union, eps) + ious[i, :] = overlap / union + if exchange: + ious = ious.T + return ious diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/class_names.py b/thirdparty/mmdetection/mmdet/core/evaluation/class_names.py new file mode 100644 index 0000000000000000000000000000000000000000..4b8845f3fa1ee6b0c24c764b349d4dc7c6e8fe32 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/class_names.py @@ -0,0 +1,116 @@ +import mmcv + + +def wider_face_classes(): + return ['face'] + + +def voc_classes(): + return [ + 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', + 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', + 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor' + ] + + +def imagenet_det_classes(): + return [ + 'accordion', 'airplane', 'ant', 'antelope', 'apple', 'armadillo', + 'artichoke', 'axe', 'baby_bed', 'backpack', 'bagel', 'balance_beam', + 'banana', 'band_aid', 'banjo', 'baseball', 'basketball', 'bathing_cap', + 'beaker', 'bear', 'bee', 'bell_pepper', 'bench', 'bicycle', 'binder', + 'bird', 'bookshelf', 'bow_tie', 'bow', 'bowl', 'brassiere', 'burrito', + 'bus', 'butterfly', 'camel', 'can_opener', 'car', 'cart', 'cattle', + 'cello', 'centipede', 'chain_saw', 'chair', 'chime', 'cocktail_shaker', + 'coffee_maker', 'computer_keyboard', 'computer_mouse', 'corkscrew', + 'cream', 'croquet_ball', 'crutch', 'cucumber', 'cup_or_mug', 'diaper', + 'digital_clock', 'dishwasher', 'dog', 'domestic_cat', 'dragonfly', + 'drum', 'dumbbell', 'electric_fan', 'elephant', 'face_powder', 'fig', + 'filing_cabinet', 'flower_pot', 'flute', 'fox', 'french_horn', 'frog', + 'frying_pan', 'giant_panda', 'goldfish', 'golf_ball', 'golfcart', + 'guacamole', 'guitar', 'hair_dryer', 'hair_spray', 'hamburger', + 'hammer', 'hamster', 'harmonica', 'harp', 'hat_with_a_wide_brim', + 'head_cabbage', 'helmet', 'hippopotamus', 'horizontal_bar', 'horse', + 'hotdog', 'iPod', 'isopod', 'jellyfish', 'koala_bear', 'ladle', + 'ladybug', 'lamp', 'laptop', 'lemon', 'lion', 'lipstick', 'lizard', + 'lobster', 'maillot', 'maraca', 'microphone', 'microwave', 'milk_can', + 'miniskirt', 'monkey', 'motorcycle', 'mushroom', 'nail', 'neck_brace', + 'oboe', 'orange', 'otter', 'pencil_box', 'pencil_sharpener', 'perfume', + 'person', 'piano', 'pineapple', 'ping-pong_ball', 'pitcher', 'pizza', + 'plastic_bag', 'plate_rack', 'pomegranate', 'popsicle', 'porcupine', + 'power_drill', 'pretzel', 'printer', 'puck', 'punching_bag', 'purse', + 'rabbit', 'racket', 'ray', 'red_panda', 'refrigerator', + 'remote_control', 'rubber_eraser', 'rugby_ball', 'ruler', + 'salt_or_pepper_shaker', 'saxophone', 'scorpion', 'screwdriver', + 'seal', 'sheep', 'ski', 'skunk', 'snail', 'snake', 'snowmobile', + 'snowplow', 'soap_dispenser', 'soccer_ball', 'sofa', 'spatula', + 'squirrel', 'starfish', 'stethoscope', 'stove', 'strainer', + 'strawberry', 'stretcher', 'sunglasses', 'swimming_trunks', 'swine', + 'syringe', 'table', 'tape_player', 'tennis_ball', 'tick', 'tie', + 'tiger', 'toaster', 'traffic_light', 'train', 'trombone', 'trumpet', + 'turtle', 'tv_or_monitor', 'unicycle', 'vacuum', 'violin', + 'volleyball', 'waffle_iron', 'washer', 'water_bottle', 'watercraft', + 'whale', 'wine_bottle', 'zebra' + ] + + +def imagenet_vid_classes(): + return [ + 'airplane', 'antelope', 'bear', 'bicycle', 'bird', 'bus', 'car', + 'cattle', 'dog', 'domestic_cat', 'elephant', 'fox', 'giant_panda', + 'hamster', 'horse', 'lion', 'lizard', 'monkey', 'motorcycle', 'rabbit', + 'red_panda', 'sheep', 'snake', 'squirrel', 'tiger', 'train', 'turtle', + 'watercraft', 'whale', 'zebra' + ] + + +def coco_classes(): + return [ + 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', + 'truck', 'boat', 'traffic_light', 'fire_hydrant', 'stop_sign', + 'parking_meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', + 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', + 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', + 'sports_ball', 'kite', 'baseball_bat', 'baseball_glove', 'skateboard', + 'surfboard', 'tennis_racket', 'bottle', 'wine_glass', 'cup', 'fork', + 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange', + 'broccoli', 'carrot', 'hot_dog', 'pizza', 'donut', 'cake', 'chair', + 'couch', 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', + 'laptop', 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', + 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush' + ] + + +def cityscapes_classes(): + return [ + 'person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', + 'bicycle' + ] + + +dataset_aliases = { + 'voc': ['voc', 'pascal_voc', 'voc07', 'voc12'], + 'imagenet_det': ['det', 'imagenet_det', 'ilsvrc_det'], + 'imagenet_vid': ['vid', 'imagenet_vid', 'ilsvrc_vid'], + 'coco': ['coco', 'mscoco', 'ms_coco'], + 'wider_face': ['WIDERFaceDataset', 'wider_face', 'WDIERFace'], + 'cityscapes': ['cityscapes'] +} + + +def get_classes(dataset): + """Get class names of a dataset.""" + alias2name = {} + for name, aliases in dataset_aliases.items(): + for alias in aliases: + alias2name[alias] = name + + if mmcv.is_str(dataset): + if dataset in alias2name: + labels = eval(alias2name[dataset] + '_classes()') + else: + raise ValueError(f'Unrecognized dataset: {dataset}') + else: + raise TypeError(f'dataset must a str, but got {type(dataset)}') + return labels diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/eval_hooks.py b/thirdparty/mmdetection/mmdet/core/evaluation/eval_hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..15d9abd9291e2e0a5395e5e51be3c9e27cb887e9 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/eval_hooks.py @@ -0,0 +1,255 @@ +import os.path as osp +import warnings +from math import inf + +import mmcv +from mmcv.runner import Hook +from torch.utils.data import DataLoader + +from mmdet.utils import get_root_logger + + +class EvalHook(Hook): + """Evaluation hook. + + Notes: + If new arguments are added for EvalHook, tools/test.py, + tools/eval_metric.py may be effected. + + Attributes: + dataloader (DataLoader): A PyTorch dataloader. + start (int, optional): Evaluation starting epoch. It enables evaluation + before the training starts if ``start`` <= the resuming epoch. + If None, whether to evaluate is merely decided by ``interval``. + Default: None. + interval (int): Evaluation interval (by epochs). Default: 1. + save_best (str, optional): If a metric is specified, it would measure + the best checkpoint during evaluation. The information about best + checkpoint would be save in best.json. + Options are the evaluation metrics to the test dataset. e.g., + ``bbox_mAP``, ``segm_mAP`` for bbox detection and instance + segmentation. ``AR@100`` for proposal recall. If ``save_best`` is + ``auto``, the first key will be used. The interval of + ``CheckpointHook`` should device EvalHook. Default: None. + rule (str, optional): Comparison rule for best score. If set to None, + it will infer a reasonable rule. Keys such as 'mAP' or 'AR' will + be inferred by 'greater' rule. Keys contain 'loss' will be inferred + by 'less' rule. Options are 'greater', 'less'. Default: None. + **eval_kwargs: Evaluation arguments fed into the evaluate function of + the dataset. + """ + + rule_map = {'greater': lambda x, y: x > y, 'less': lambda x, y: x < y} + init_value_map = {'greater': -inf, 'less': inf} + greater_keys = ['mAP', 'AR'] + less_keys = ['loss'] + + def __init__(self, + dataloader, + start=None, + interval=1, + save_best=None, + rule=None, + **eval_kwargs): + if not isinstance(dataloader, DataLoader): + raise TypeError('dataloader must be a pytorch DataLoader, but got' + f' {type(dataloader)}') + if not interval > 0: + raise ValueError(f'interval must be positive, but got {interval}') + if start is not None and start < 0: + warnings.warn( + f'The evaluation start epoch {start} is smaller than 0, ' + f'use 0 instead', UserWarning) + start = 0 + self.dataloader = dataloader + self.interval = interval + self.start = start + assert isinstance(save_best, str) or save_best is None + self.save_best = save_best + self.eval_kwargs = eval_kwargs + self.initial_epoch_flag = True + + self.logger = get_root_logger() + + if self.save_best is not None: + self._init_rule(rule, self.save_best) + + def _init_rule(self, rule, key_indicator): + """Initialize rule, key_indicator, comparison_func, and best score. + + Args: + rule (str | None): Comparison rule for best score. + key_indicator (str | None): Key indicator to determine the + comparison rule. + """ + if rule not in self.rule_map and rule is not None: + raise KeyError(f'rule must be greater, less or None, ' + f'but got {rule}.') + + if rule is None: + if key_indicator != 'auto': + if any(key in key_indicator for key in self.greater_keys): + rule = 'greater' + elif any(key in key_indicator for key in self.less_keys): + rule = 'less' + else: + raise ValueError(f'Cannot infer the rule for key ' + f'{key_indicator}, thus a specific rule ' + f'must be specified.') + self.rule = rule + self.key_indicator = key_indicator + if self.rule is not None: + self.compare_func = self.rule_map[self.rule] + + def before_run(self, runner): + if self.save_best is not None: + if runner.meta is None: + warnings.warn('runner.meta is None. Creating a empty one.') + runner.meta = dict() + runner.meta.setdefault('hook_msgs', dict()) + + def before_train_epoch(self, runner): + """Evaluate the model only at the start of training.""" + if not self.initial_epoch_flag: + return + if self.start is not None and runner.epoch >= self.start: + self.after_train_epoch(runner) + self.initial_epoch_flag = False + + def evaluation_flag(self, runner): + """Judge whether to perform_evaluation after this epoch. + + Returns: + bool: The flag indicating whether to perform evaluation. + """ + if self.start is None: + if not self.every_n_epochs(runner, self.interval): + # No evaluation during the interval epochs. + return False + elif (runner.epoch + 1) < self.start: + # No evaluation if start is larger than the current epoch. + return False + else: + # Evaluation only at epochs 3, 5, 7... if start==3 and interval==2 + if (runner.epoch + 1 - self.start) % self.interval: + return False + return True + + def after_train_epoch(self, runner): + if not self.evaluation_flag(runner): + return + from mmdet.apis import single_gpu_test + results = single_gpu_test(runner.model, self.dataloader, show=False) + key_score = self.evaluate(runner, results) + if self.save_best: + best_score = runner.meta['hook_msgs'].get( + 'best_score', self.init_value_map[self.rule]) + if self.compare_func(key_score, best_score): + best_score = key_score + runner.meta['hook_msgs']['best_score'] = best_score + last_ckpt = runner.meta['hook_msgs']['last_ckpt'] + runner.meta['hook_msgs']['best_ckpt'] = last_ckpt + mmcv.symlink( + last_ckpt, + osp.join(runner.work_dir, + f'best_{self.key_indicator}.pth')) + self.logger.info( + f'Now best checkpoint is epoch_{runner.epoch + 1}.pth.' + f'Best {self.key_indicator} is {best_score:0.4f}') + + def evaluate(self, runner, results): + eval_res = self.dataloader.dataset.evaluate( + results, logger=runner.logger, **self.eval_kwargs) + for name, val in eval_res.items(): + runner.log_buffer.output[name] = val + runner.log_buffer.ready = True + if self.save_best is not None: + if self.key_indicator == 'auto': + # infer from eval_results + self._init_rule(self.rule, list(eval_res.keys())[0]) + return eval_res[self.key_indicator] + else: + return None + + +class DistEvalHook(EvalHook): + """Distributed evaluation hook. + + Notes: + If new arguments are added, tools/test.py may be effected. + + Attributes: + dataloader (DataLoader): A PyTorch dataloader. + start (int, optional): Evaluation starting epoch. It enables evaluation + before the training starts if ``start`` <= the resuming epoch. + If None, whether to evaluate is merely decided by ``interval``. + Default: None. + interval (int): Evaluation interval (by epochs). Default: 1. + tmpdir (str | None): Temporary directory to save the results of all + processes. Default: None. + gpu_collect (bool): Whether to use gpu or cpu to collect results. + Default: False. + save_best (str, optional): If a metric is specified, it would measure + the best checkpoint during evaluation. The information about best + checkpoint would be save in best.json. + Options are the evaluation metrics to the test dataset. e.g., + ``bbox_mAP``, ``segm_mAP`` for bbox detection and instance + segmentation. ``AR@100`` for proposal recall. If ``save_best`` is + ``auto``, the first key will be used. The interval of + ``CheckpointHook`` should device EvalHook. Default: None. + rule (str | None): Comparison rule for best score. If set to None, + it will infer a reasonable rule. Default: 'None'. + **eval_kwargs: Evaluation arguments fed into the evaluate function of + the dataset. + """ + + def __init__(self, + dataloader, + start=None, + interval=1, + tmpdir=None, + gpu_collect=False, + save_best=None, + rule=None, + **eval_kwargs): + super().__init__( + dataloader, + start=start, + interval=interval, + save_best=save_best, + rule=rule, + **eval_kwargs) + self.tmpdir = tmpdir + self.gpu_collect = gpu_collect + + def after_train_epoch(self, runner): + if not self.evaluation_flag(runner): + return + + from mmdet.apis import multi_gpu_test + tmpdir = self.tmpdir + if tmpdir is None: + tmpdir = osp.join(runner.work_dir, '.eval_hook') + results = multi_gpu_test( + runner.model, + self.dataloader, + tmpdir=tmpdir, + gpu_collect=self.gpu_collect) + if runner.rank == 0: + print('\n') + key_score = self.evaluate(runner, results) + if self.save_best: + best_score = runner.meta['hook_msgs'].get( + 'best_score', self.init_value_map[self.rule]) + if self.compare_func(key_score, best_score): + best_score = key_score + runner.meta['hook_msgs']['best_score'] = best_score + last_ckpt = runner.meta['hook_msgs']['last_ckpt'] + runner.meta['hook_msgs']['best_ckpt'] = last_ckpt + mmcv.symlink( + last_ckpt, + osp.join(runner.work_dir, + f'best_{self.key_indicator}.pth')) + self.logger.info( + f'Now best checkpoint is {last_ckpt}.' + f'Best {self.key_indicator} is {best_score:0.4f}') diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/mean_ap.py b/thirdparty/mmdetection/mmdet/core/evaluation/mean_ap.py new file mode 100644 index 0000000000000000000000000000000000000000..f44314b423220e4673426b831979b141244c687d --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/mean_ap.py @@ -0,0 +1,469 @@ +from multiprocessing import Pool + +import mmcv +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .bbox_overlaps import bbox_overlaps +from .class_names import get_classes + + +def average_precision(recalls, precisions, mode='area'): + """Calculate average precision (for single or multiple scales). + + Args: + recalls (ndarray): shape (num_scales, num_dets) or (num_dets, ) + precisions (ndarray): shape (num_scales, num_dets) or (num_dets, ) + mode (str): 'area' or '11points', 'area' means calculating the area + under precision-recall curve, '11points' means calculating + the average precision of recalls at [0, 0.1, ..., 1] + + Returns: + float or ndarray: calculated average precision + """ + no_scale = False + if recalls.ndim == 1: + no_scale = True + recalls = recalls[np.newaxis, :] + precisions = precisions[np.newaxis, :] + assert recalls.shape == precisions.shape and recalls.ndim == 2 + num_scales = recalls.shape[0] + ap = np.zeros(num_scales, dtype=np.float32) + if mode == 'area': + zeros = np.zeros((num_scales, 1), dtype=recalls.dtype) + ones = np.ones((num_scales, 1), dtype=recalls.dtype) + mrec = np.hstack((zeros, recalls, ones)) + mpre = np.hstack((zeros, precisions, zeros)) + for i in range(mpre.shape[1] - 1, 0, -1): + mpre[:, i - 1] = np.maximum(mpre[:, i - 1], mpre[:, i]) + for i in range(num_scales): + ind = np.where(mrec[i, 1:] != mrec[i, :-1])[0] + ap[i] = np.sum( + (mrec[i, ind + 1] - mrec[i, ind]) * mpre[i, ind + 1]) + elif mode == '11points': + for i in range(num_scales): + for thr in np.arange(0, 1 + 1e-3, 0.1): + precs = precisions[i, recalls[i, :] >= thr] + prec = precs.max() if precs.size > 0 else 0 + ap[i] += prec + ap /= 11 + else: + raise ValueError( + 'Unrecognized mode, only "area" and "11points" are supported') + if no_scale: + ap = ap[0] + return ap + + +def tpfp_imagenet(det_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + default_iou_thr=0.5, + area_ranges=None): + """Check if detected bboxes are true positive or false positive. + + Args: + det_bbox (ndarray): Detected bboxes of this image, of shape (m, 5). + gt_bboxes (ndarray): GT bboxes of this image, of shape (n, 4). + gt_bboxes_ignore (ndarray): Ignored gt bboxes of this image, + of shape (k, 4). Default: None + default_iou_thr (float): IoU threshold to be considered as matched for + medium and large bboxes (small ones have special rules). + Default: 0.5. + area_ranges (list[tuple] | None): Range of bbox areas to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. Default: None. + + Returns: + tuple[np.ndarray]: (tp, fp) whose elements are 0 and 1. The shape of + each array is (num_scales, m). + """ + # an indicator of ignored gts + gt_ignore_inds = np.concatenate( + (np.zeros(gt_bboxes.shape[0], dtype=np.bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool))) + # stack gt_bboxes and gt_bboxes_ignore for convenience + gt_bboxes = np.vstack((gt_bboxes, gt_bboxes_ignore)) + + num_dets = det_bboxes.shape[0] + num_gts = gt_bboxes.shape[0] + if area_ranges is None: + area_ranges = [(None, None)] + num_scales = len(area_ranges) + # tp and fp are of shape (num_scales, num_gts), each row is tp or fp + # of a certain scale. + tp = np.zeros((num_scales, num_dets), dtype=np.float32) + fp = np.zeros((num_scales, num_dets), dtype=np.float32) + if gt_bboxes.shape[0] == 0: + if area_ranges == [(None, None)]: + fp[...] = 1 + else: + det_areas = (det_bboxes[:, 2] - det_bboxes[:, 0]) * ( + det_bboxes[:, 3] - det_bboxes[:, 1]) + for i, (min_area, max_area) in enumerate(area_ranges): + fp[i, (det_areas >= min_area) & (det_areas < max_area)] = 1 + return tp, fp + ious = bbox_overlaps(det_bboxes, gt_bboxes - 1) + gt_w = gt_bboxes[:, 2] - gt_bboxes[:, 0] + gt_h = gt_bboxes[:, 3] - gt_bboxes[:, 1] + iou_thrs = np.minimum((gt_w * gt_h) / ((gt_w + 10.0) * (gt_h + 10.0)), + default_iou_thr) + # sort all detections by scores in descending order + sort_inds = np.argsort(-det_bboxes[:, -1]) + for k, (min_area, max_area) in enumerate(area_ranges): + gt_covered = np.zeros(num_gts, dtype=bool) + # if no area range is specified, gt_area_ignore is all False + if min_area is None: + gt_area_ignore = np.zeros_like(gt_ignore_inds, dtype=bool) + else: + gt_areas = gt_w * gt_h + gt_area_ignore = (gt_areas < min_area) | (gt_areas >= max_area) + for i in sort_inds: + max_iou = -1 + matched_gt = -1 + # find best overlapped available gt + for j in range(num_gts): + # different from PASCAL VOC: allow finding other gts if the + # best overlaped ones are already matched by other det bboxes + if gt_covered[j]: + continue + elif ious[i, j] >= iou_thrs[j] and ious[i, j] > max_iou: + max_iou = ious[i, j] + matched_gt = j + # there are 4 cases for a det bbox: + # 1. it matches a gt, tp = 1, fp = 0 + # 2. it matches an ignored gt, tp = 0, fp = 0 + # 3. it matches no gt and within area range, tp = 0, fp = 1 + # 4. it matches no gt but is beyond area range, tp = 0, fp = 0 + if matched_gt >= 0: + gt_covered[matched_gt] = 1 + if not (gt_ignore_inds[matched_gt] + or gt_area_ignore[matched_gt]): + tp[k, i] = 1 + elif min_area is None: + fp[k, i] = 1 + else: + bbox = det_bboxes[i, :4] + area = (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) + if area >= min_area and area < max_area: + fp[k, i] = 1 + return tp, fp + + +def tpfp_default(det_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + iou_thr=0.5, + area_ranges=None): + """Check if detected bboxes are true positive or false positive. + + Args: + det_bbox (ndarray): Detected bboxes of this image, of shape (m, 5). + gt_bboxes (ndarray): GT bboxes of this image, of shape (n, 4). + gt_bboxes_ignore (ndarray): Ignored gt bboxes of this image, + of shape (k, 4). Default: None + iou_thr (float): IoU threshold to be considered as matched. + Default: 0.5. + area_ranges (list[tuple] | None): Range of bbox areas to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. Default: None. + + Returns: + tuple[np.ndarray]: (tp, fp) whose elements are 0 and 1. The shape of + each array is (num_scales, m). + """ + # an indicator of ignored gts + gt_ignore_inds = np.concatenate( + (np.zeros(gt_bboxes.shape[0], dtype=np.bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool))) + # stack gt_bboxes and gt_bboxes_ignore for convenience + gt_bboxes = np.vstack((gt_bboxes, gt_bboxes_ignore)) + + num_dets = det_bboxes.shape[0] + num_gts = gt_bboxes.shape[0] + if area_ranges is None: + area_ranges = [(None, None)] + num_scales = len(area_ranges) + # tp and fp are of shape (num_scales, num_gts), each row is tp or fp of + # a certain scale + tp = np.zeros((num_scales, num_dets), dtype=np.float32) + fp = np.zeros((num_scales, num_dets), dtype=np.float32) + + # if there is no gt bboxes in this image, then all det bboxes + # within area range are false positives + if gt_bboxes.shape[0] == 0: + if area_ranges == [(None, None)]: + fp[...] = 1 + else: + det_areas = (det_bboxes[:, 2] - det_bboxes[:, 0]) * ( + det_bboxes[:, 3] - det_bboxes[:, 1]) + for i, (min_area, max_area) in enumerate(area_ranges): + fp[i, (det_areas >= min_area) & (det_areas < max_area)] = 1 + return tp, fp + + ious = bbox_overlaps(det_bboxes, gt_bboxes) + # for each det, the max iou with all gts + ious_max = ious.max(axis=1) + # for each det, which gt overlaps most with it + ious_argmax = ious.argmax(axis=1) + # sort all dets in descending order by scores + sort_inds = np.argsort(-det_bboxes[:, -1]) + for k, (min_area, max_area) in enumerate(area_ranges): + gt_covered = np.zeros(num_gts, dtype=bool) + # if no area range is specified, gt_area_ignore is all False + if min_area is None: + gt_area_ignore = np.zeros_like(gt_ignore_inds, dtype=bool) + else: + gt_areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * ( + gt_bboxes[:, 3] - gt_bboxes[:, 1]) + gt_area_ignore = (gt_areas < min_area) | (gt_areas >= max_area) + for i in sort_inds: + if ious_max[i] >= iou_thr: + matched_gt = ious_argmax[i] + if not (gt_ignore_inds[matched_gt] + or gt_area_ignore[matched_gt]): + if not gt_covered[matched_gt]: + gt_covered[matched_gt] = True + tp[k, i] = 1 + else: + fp[k, i] = 1 + # otherwise ignore this detected bbox, tp = 0, fp = 0 + elif min_area is None: + fp[k, i] = 1 + else: + bbox = det_bboxes[i, :4] + area = (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) + if area >= min_area and area < max_area: + fp[k, i] = 1 + return tp, fp + + +def get_cls_results(det_results, annotations, class_id): + """Get det results and gt information of a certain class. + + Args: + det_results (list[list]): Same as `eval_map()`. + annotations (list[dict]): Same as `eval_map()`. + class_id (int): ID of a specific class. + + Returns: + tuple[list[np.ndarray]]: detected bboxes, gt bboxes, ignored gt bboxes + """ + cls_dets = [img_res[class_id] for img_res in det_results] + cls_gts = [] + cls_gts_ignore = [] + for ann in annotations: + gt_inds = ann['labels'] == class_id + cls_gts.append(ann['bboxes'][gt_inds, :]) + + if ann.get('labels_ignore', None) is not None: + ignore_inds = ann['labels_ignore'] == class_id + cls_gts_ignore.append(ann['bboxes_ignore'][ignore_inds, :]) + else: + cls_gts_ignore.append(np.empty((0, 4), dtype=np.float32)) + + return cls_dets, cls_gts, cls_gts_ignore + + +def eval_map(det_results, + annotations, + scale_ranges=None, + iou_thr=0.5, + dataset=None, + logger=None, + tpfp_fn=None, + nproc=4): + """Evaluate mAP of a dataset. + + Args: + det_results (list[list]): [[cls1_det, cls2_det, ...], ...]. + The outer list indicates images, and the inner list indicates + per-class detected bboxes. + annotations (list[dict]): Ground truth annotations where each item of + the list indicates an image. Keys of annotations are: + + - `bboxes`: numpy array of shape (n, 4) + - `labels`: numpy array of shape (n, ) + - `bboxes_ignore` (optional): numpy array of shape (k, 4) + - `labels_ignore` (optional): numpy array of shape (k, ) + scale_ranges (list[tuple] | None): Range of scales to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. A range of + (32, 64) means the area range between (32**2, 64**2). + Default: None. + iou_thr (float): IoU threshold to be considered as matched. + Default: 0.5. + dataset (list[str] | str | None): Dataset name or dataset classes, + there are minor differences in metrics for different datsets, e.g. + "voc07", "imagenet_det", etc. Default: None. + logger (logging.Logger | str | None): The way to print the mAP + summary. See `mmdet.utils.print_log()` for details. Default: None. + tpfp_fn (callable | None): The function used to determine true/ + false positives. If None, :func:`tpfp_default` is used as default + unless dataset is 'det' or 'vid' (:func:`tpfp_imagenet` in this + case). If it is given as a function, then this function is used + to evaluate tp & fp. Default None. + nproc (int): Processes used for computing TP and FP. + Default: 4. + + Returns: + tuple: (mAP, [dict, dict, ...]) + """ + assert len(det_results) == len(annotations) + + num_imgs = len(det_results) + num_scales = len(scale_ranges) if scale_ranges is not None else 1 + num_classes = len(det_results[0]) # positive class num + area_ranges = ([(rg[0]**2, rg[1]**2) for rg in scale_ranges] + if scale_ranges is not None else None) + + pool = Pool(nproc) + eval_results = [] + for i in range(num_classes): + # get gt and det bboxes of this class + cls_dets, cls_gts, cls_gts_ignore = get_cls_results( + det_results, annotations, i) + # choose proper function according to datasets to compute tp and fp + if tpfp_fn is None: + if dataset in ['det', 'vid']: + tpfp_fn = tpfp_imagenet + else: + tpfp_fn = tpfp_default + if not callable(tpfp_fn): + raise ValueError( + f'tpfp_fn has to be a function or None, but got {tpfp_fn}') + + # compute tp and fp for each image with multiple processes + tpfp = pool.starmap( + tpfp_fn, + zip(cls_dets, cls_gts, cls_gts_ignore, + [iou_thr for _ in range(num_imgs)], + [area_ranges for _ in range(num_imgs)])) + tp, fp = tuple(zip(*tpfp)) + # calculate gt number of each scale + # ignored gts or gts beyond the specific scale are not counted + num_gts = np.zeros(num_scales, dtype=int) + for j, bbox in enumerate(cls_gts): + if area_ranges is None: + num_gts[0] += bbox.shape[0] + else: + gt_areas = (bbox[:, 2] - bbox[:, 0]) * ( + bbox[:, 3] - bbox[:, 1]) + for k, (min_area, max_area) in enumerate(area_ranges): + num_gts[k] += np.sum((gt_areas >= min_area) + & (gt_areas < max_area)) + # sort all det bboxes by score, also sort tp and fp + cls_dets = np.vstack(cls_dets) + num_dets = cls_dets.shape[0] + sort_inds = np.argsort(-cls_dets[:, -1]) + tp = np.hstack(tp)[:, sort_inds] + fp = np.hstack(fp)[:, sort_inds] + # calculate recall and precision with tp and fp + tp = np.cumsum(tp, axis=1) + fp = np.cumsum(fp, axis=1) + eps = np.finfo(np.float32).eps + recalls = tp / np.maximum(num_gts[:, np.newaxis], eps) + precisions = tp / np.maximum((tp + fp), eps) + # calculate AP + if scale_ranges is None: + recalls = recalls[0, :] + precisions = precisions[0, :] + num_gts = num_gts.item() + mode = 'area' if dataset != 'voc07' else '11points' + ap = average_precision(recalls, precisions, mode) + eval_results.append({ + 'num_gts': num_gts, + 'num_dets': num_dets, + 'recall': recalls, + 'precision': precisions, + 'ap': ap + }) + pool.close() + if scale_ranges is not None: + # shape (num_classes, num_scales) + all_ap = np.vstack([cls_result['ap'] for cls_result in eval_results]) + all_num_gts = np.vstack( + [cls_result['num_gts'] for cls_result in eval_results]) + mean_ap = [] + for i in range(num_scales): + if np.any(all_num_gts[:, i] > 0): + mean_ap.append(all_ap[all_num_gts[:, i] > 0, i].mean()) + else: + mean_ap.append(0.0) + else: + aps = [] + for cls_result in eval_results: + if cls_result['num_gts'] > 0: + aps.append(cls_result['ap']) + mean_ap = np.array(aps).mean().item() if aps else 0.0 + + print_map_summary( + mean_ap, eval_results, dataset, area_ranges, logger=logger) + + return mean_ap, eval_results + + +def print_map_summary(mean_ap, + results, + dataset=None, + scale_ranges=None, + logger=None): + """Print mAP and results of each class. + + A table will be printed to show the gts/dets/recall/AP of each class and + the mAP. + + Args: + mean_ap (float): Calculated from `eval_map()`. + results (list[dict]): Calculated from `eval_map()`. + dataset (list[str] | str | None): Dataset name or dataset classes. + scale_ranges (list[tuple] | None): Range of scales to be evaluated. + logger (logging.Logger | str | None): The way to print the mAP + summary. See `mmdet.utils.print_log()` for details. Default: None. + """ + + if logger == 'silent': + return + + if isinstance(results[0]['ap'], np.ndarray): + num_scales = len(results[0]['ap']) + else: + num_scales = 1 + + if scale_ranges is not None: + assert len(scale_ranges) == num_scales + + num_classes = len(results) + + recalls = np.zeros((num_scales, num_classes), dtype=np.float32) + aps = np.zeros((num_scales, num_classes), dtype=np.float32) + num_gts = np.zeros((num_scales, num_classes), dtype=int) + for i, cls_result in enumerate(results): + if cls_result['recall'].size > 0: + recalls[:, i] = np.array(cls_result['recall'], ndmin=2)[:, -1] + aps[:, i] = cls_result['ap'] + num_gts[:, i] = cls_result['num_gts'] + + if dataset is None: + label_names = [str(i) for i in range(num_classes)] + elif mmcv.is_str(dataset): + label_names = get_classes(dataset) + else: + label_names = dataset + + if not isinstance(mean_ap, list): + mean_ap = [mean_ap] + + header = ['class', 'gts', 'dets', 'recall', 'ap'] + for i in range(num_scales): + if scale_ranges is not None: + print_log(f'Scale range {scale_ranges[i]}', logger=logger) + table_data = [header] + for j in range(num_classes): + row_data = [ + label_names[j], num_gts[i, j], results[j]['num_dets'], + f'{recalls[i, j]:.3f}', f'{aps[i, j]:.3f}' + ] + table_data.append(row_data) + table_data.append(['mAP', '', '', '', f'{mean_ap[i]:.3f}']) + table = AsciiTable(table_data) + table.inner_footing_row_border = True + print_log('\n' + table.table, logger=logger) diff --git a/thirdparty/mmdetection/mmdet/core/evaluation/recall.py b/thirdparty/mmdetection/mmdet/core/evaluation/recall.py new file mode 100644 index 0000000000000000000000000000000000000000..ea6277abc1d14024d5234fa38e3182c6a9c7ad3e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/evaluation/recall.py @@ -0,0 +1,189 @@ +from collections.abc import Sequence + +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .bbox_overlaps import bbox_overlaps + + +def _recalls(all_ious, proposal_nums, thrs): + + img_num = all_ious.shape[0] + total_gt_num = sum([ious.shape[0] for ious in all_ious]) + + _ious = np.zeros((proposal_nums.size, total_gt_num), dtype=np.float32) + for k, proposal_num in enumerate(proposal_nums): + tmp_ious = np.zeros(0) + for i in range(img_num): + ious = all_ious[i][:, :proposal_num].copy() + gt_ious = np.zeros((ious.shape[0])) + if ious.size == 0: + tmp_ious = np.hstack((tmp_ious, gt_ious)) + continue + for j in range(ious.shape[0]): + gt_max_overlaps = ious.argmax(axis=1) + max_ious = ious[np.arange(0, ious.shape[0]), gt_max_overlaps] + gt_idx = max_ious.argmax() + gt_ious[j] = max_ious[gt_idx] + box_idx = gt_max_overlaps[gt_idx] + ious[gt_idx, :] = -1 + ious[:, box_idx] = -1 + tmp_ious = np.hstack((tmp_ious, gt_ious)) + _ious[k, :] = tmp_ious + + _ious = np.fliplr(np.sort(_ious, axis=1)) + recalls = np.zeros((proposal_nums.size, thrs.size)) + for i, thr in enumerate(thrs): + recalls[:, i] = (_ious >= thr).sum(axis=1) / float(total_gt_num) + + return recalls + + +def set_recall_param(proposal_nums, iou_thrs): + """Check proposal_nums and iou_thrs and set correct format.""" + if isinstance(proposal_nums, Sequence): + _proposal_nums = np.array(proposal_nums) + elif isinstance(proposal_nums, int): + _proposal_nums = np.array([proposal_nums]) + else: + _proposal_nums = proposal_nums + + if iou_thrs is None: + _iou_thrs = np.array([0.5]) + elif isinstance(iou_thrs, Sequence): + _iou_thrs = np.array(iou_thrs) + elif isinstance(iou_thrs, float): + _iou_thrs = np.array([iou_thrs]) + else: + _iou_thrs = iou_thrs + + return _proposal_nums, _iou_thrs + + +def eval_recalls(gts, + proposals, + proposal_nums=None, + iou_thrs=0.5, + logger=None): + """Calculate recalls. + + Args: + gts (list[ndarray]): a list of arrays of shape (n, 4) + proposals (list[ndarray]): a list of arrays of shape (k, 4) or (k, 5) + proposal_nums (int | Sequence[int]): Top N proposals to be evaluated. + iou_thrs (float | Sequence[float]): IoU thresholds. Default: 0.5. + logger (logging.Logger | str | None): The way to print the recall + summary. See `mmdet.utils.print_log()` for details. Default: None. + + Returns: + ndarray: recalls of different ious and proposal nums + """ + + img_num = len(gts) + assert img_num == len(proposals) + + proposal_nums, iou_thrs = set_recall_param(proposal_nums, iou_thrs) + + all_ious = [] + for i in range(img_num): + if proposals[i].ndim == 2 and proposals[i].shape[1] == 5: + scores = proposals[i][:, 4] + sort_idx = np.argsort(scores)[::-1] + img_proposal = proposals[i][sort_idx, :] + else: + img_proposal = proposals[i] + prop_num = min(img_proposal.shape[0], proposal_nums[-1]) + if gts[i] is None or gts[i].shape[0] == 0: + ious = np.zeros((0, img_proposal.shape[0]), dtype=np.float32) + else: + ious = bbox_overlaps(gts[i], img_proposal[:prop_num, :4]) + all_ious.append(ious) + all_ious = np.array(all_ious) + recalls = _recalls(all_ious, proposal_nums, iou_thrs) + + print_recall_summary(recalls, proposal_nums, iou_thrs, logger=logger) + return recalls + + +def print_recall_summary(recalls, + proposal_nums, + iou_thrs, + row_idxs=None, + col_idxs=None, + logger=None): + """Print recalls in a table. + + Args: + recalls (ndarray): calculated from `bbox_recalls` + proposal_nums (ndarray or list): top N proposals + iou_thrs (ndarray or list): iou thresholds + row_idxs (ndarray): which rows(proposal nums) to print + col_idxs (ndarray): which cols(iou thresholds) to print + logger (logging.Logger | str | None): The way to print the recall + summary. See `mmdet.utils.print_log()` for details. Default: None. + """ + proposal_nums = np.array(proposal_nums, dtype=np.int32) + iou_thrs = np.array(iou_thrs) + if row_idxs is None: + row_idxs = np.arange(proposal_nums.size) + if col_idxs is None: + col_idxs = np.arange(iou_thrs.size) + row_header = [''] + iou_thrs[col_idxs].tolist() + table_data = [row_header] + for i, num in enumerate(proposal_nums[row_idxs]): + row = [f'{val:.3f}' for val in recalls[row_idxs[i], col_idxs].tolist()] + row.insert(0, num) + table_data.append(row) + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + +def plot_num_recall(recalls, proposal_nums): + """Plot Proposal_num-Recalls curve. + + Args: + recalls(ndarray or list): shape (k,) + proposal_nums(ndarray or list): same shape as `recalls` + """ + if isinstance(proposal_nums, np.ndarray): + _proposal_nums = proposal_nums.tolist() + else: + _proposal_nums = proposal_nums + if isinstance(recalls, np.ndarray): + _recalls = recalls.tolist() + else: + _recalls = recalls + + import matplotlib.pyplot as plt + f = plt.figure() + plt.plot([0] + _proposal_nums, [0] + _recalls) + plt.xlabel('Proposal num') + plt.ylabel('Recall') + plt.axis([0, proposal_nums.max(), 0, 1]) + f.show() + + +def plot_iou_recall(recalls, iou_thrs): + """Plot IoU-Recalls curve. + + Args: + recalls(ndarray or list): shape (k,) + iou_thrs(ndarray or list): same shape as `recalls` + """ + if isinstance(iou_thrs, np.ndarray): + _iou_thrs = iou_thrs.tolist() + else: + _iou_thrs = iou_thrs + if isinstance(recalls, np.ndarray): + _recalls = recalls.tolist() + else: + _recalls = recalls + + import matplotlib.pyplot as plt + f = plt.figure() + plt.plot(_iou_thrs + [1.0], _recalls + [0.]) + plt.xlabel('IoU') + plt.ylabel('Recall') + plt.axis([iou_thrs.min(), 1, 0, 1]) + f.show() diff --git a/thirdparty/mmdetection/mmdet/core/export/__init__.py b/thirdparty/mmdetection/mmdet/core/export/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..76589b1f279a71a59a5515d1b78cea0865f83131 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/export/__init__.py @@ -0,0 +1,8 @@ +from .pytorch2onnx import (build_model_from_cfg, + generate_inputs_and_wrap_model, + preprocess_example_input) + +__all__ = [ + 'build_model_from_cfg', 'generate_inputs_and_wrap_model', + 'preprocess_example_input' +] diff --git a/thirdparty/mmdetection/mmdet/core/export/pytorch2onnx.py b/thirdparty/mmdetection/mmdet/core/export/pytorch2onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..8f9309df8c763d68404d4654b92abb3db4abdf93 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/export/pytorch2onnx.py @@ -0,0 +1,143 @@ +from functools import partial + +import mmcv +import numpy as np +import torch +from mmcv.runner import load_checkpoint + + +def generate_inputs_and_wrap_model(config_path, checkpoint_path, input_config): + """Prepare sample input and wrap model for ONNX export. + + The ONNX export API only accept args, and all inputs should be + torch.Tensor or corresponding types (such as tuple of tensor). + So we should call this function before exporting. This function will: + + 1. generate corresponding inputs which are used to execute the model. + 2. Wrap the model's forward function. + + For example, the MMDet models' forward function has a parameter + ``return_loss:bool``. As we want to set it as False while export API + supports neither bool type or kwargs. So we have to replace the forward + like: ``model.forward = partial(model.forward, return_loss=False)`` + + Args: + config_path (str): the OpenMMLab config for the model we want to + export to ONNX + checkpoint_path (str): Path to the corresponding checkpoint + input_config (dict): the exactly data in this dict depends on the + framework. For MMSeg, we can just declare the input shape, + and generate the dummy data accordingly. However, for MMDet, + we may pass the real img path, or the NMS will return None + as there is no legal bbox. + + Returns: + tuple: (model, tensor_data) wrapped model which can be called by \ + model(*tensor_data) and a list of inputs which are used to execute \ + the model while exporting. + """ + + model = build_model_from_cfg(config_path, checkpoint_path) + one_img, one_meta = preprocess_example_input(input_config) + tensor_data = [one_img] + model.forward = partial( + model.forward, img_metas=[[one_meta]], return_loss=False) + + # pytorch has some bug in pytorch1.3, we have to fix it + # by replacing these existing op + opset_version = 11 + # put the import within the function thus it will not cause import error + # when not using this function + try: + from mmcv.onnx.symbolic import register_extra_symbolics + except ModuleNotFoundError: + raise NotImplementedError('please update mmcv to version>=v1.0.4') + register_extra_symbolics(opset_version) + + return model, tensor_data + + +def build_model_from_cfg(config_path, checkpoint_path): + """Build a model from config and load the given checkpoint. + + Args: + config_path (str): the OpenMMLab config for the model we want to + export to ONNX + checkpoint_path (str): Path to the corresponding checkpoint + + Returns: + torch.nn.Module: the built model + """ + from mmdet.models import build_detector + + cfg = mmcv.Config.fromfile(config_path) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + cfg.model.pretrained = None + cfg.data.test.test_mode = True + + # build the model + model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + load_checkpoint(model, checkpoint_path, map_location='cpu') + model.cpu().eval() + return model + + +def preprocess_example_input(input_config): + """Prepare an example input image for ``generate_inputs_and_wrap_model``. + + Args: + input_config (dict): customized config describing the example input. + + Returns: + tuple: (one_img, one_meta), tensor of the example input image and \ + meta information for the example input image. + + Examples: + >>> from mmdet.core.export import preprocess_example_input + >>> input_config = { + >>> 'input_shape': (1,3,224,224), + >>> 'input_path': 'demo/demo.jpg', + >>> 'normalize_cfg': { + >>> 'mean': (123.675, 116.28, 103.53), + >>> 'std': (58.395, 57.12, 57.375) + >>> } + >>> } + >>> one_img, one_meta = preprocess_example_input(input_config) + >>> print(one_img.shape) + torch.Size([1, 3, 224, 224]) + >>> print(one_meta) + {'img_shape': (224, 224, 3), + 'ori_shape': (224, 224, 3), + 'pad_shape': (224, 224, 3), + 'filename': '.png', + 'scale_factor': 1.0, + 'flip': False} + """ + input_path = input_config['input_path'] + input_shape = input_config['input_shape'] + one_img = mmcv.imread(input_path) + one_img = mmcv.imresize(one_img, input_shape[2:][::-1]) + show_img = one_img.copy() + if 'normalize_cfg' in input_config.keys(): + normalize_cfg = input_config['normalize_cfg'] + mean = np.array(normalize_cfg['mean'], dtype=np.float32) + std = np.array(normalize_cfg['std'], dtype=np.float32) + one_img = mmcv.imnormalize(one_img, mean, std) + one_img = one_img.transpose(2, 0, 1) + one_img = torch.from_numpy(one_img).unsqueeze(0).float().requires_grad_( + True) + (_, C, H, W) = input_shape + one_meta = { + 'img_shape': (H, W, C), + 'ori_shape': (H, W, C), + 'pad_shape': (H, W, C), + 'filename': '.png', + 'scale_factor': 1.0, + 'flip': False, + 'show_img': show_img, + } + + return one_img, one_meta diff --git a/thirdparty/mmdetection/mmdet/core/fp16/__init__.py b/thirdparty/mmdetection/mmdet/core/fp16/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0a68c28b9014f3452c661eaa1daab08153da59f9 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/fp16/__init__.py @@ -0,0 +1,8 @@ +from .deprecated_fp16_utils import \ + DeprecatedFp16OptimizerHook as Fp16OptimizerHook +from .deprecated_fp16_utils import deprecated_auto_fp16 as auto_fp16 +from .deprecated_fp16_utils import deprecated_force_fp32 as force_fp32 +from .deprecated_fp16_utils import \ + deprecated_wrap_fp16_model as wrap_fp16_model + +__all__ = ['auto_fp16', 'force_fp32', 'Fp16OptimizerHook', 'wrap_fp16_model'] diff --git a/thirdparty/mmdetection/mmdet/core/fp16/deprecated_fp16_utils.py b/thirdparty/mmdetection/mmdet/core/fp16/deprecated_fp16_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1b15b047e0b40ab494037449465bda1de2c8ecf7 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/fp16/deprecated_fp16_utils.py @@ -0,0 +1,47 @@ +import warnings + +from mmcv.runner import (Fp16OptimizerHook, auto_fp16, force_fp32, + wrap_fp16_model) + + +class DeprecatedFp16OptimizerHook(Fp16OptimizerHook): + """A wrapper class for the FP16 optimizer hook. This class wraps + :class:`Fp16OptimizerHook` in `mmcv.runner` and shows a warning that the + :class:`Fp16OptimizerHook` from `mmdet.core` will be deprecated. + + Refer to :class:`Fp16OptimizerHook` in `mmcv.runner` for more details. + + Args: + loss_scale (float): Scale factor multiplied with loss. + """ + + def __init__(*args, **kwargs): + super().__init__(*args, **kwargs) + warnings.warn( + 'Importing Fp16OptimizerHook from "mmdet.core" will be ' + 'deprecated in the future. Please import them from "mmcv.runner" ' + 'instead') + + +def deprecated_auto_fp16(*args, **kwargs): + warnings.warn( + 'Importing auto_fp16 from "mmdet.core" will be ' + 'deprecated in the future. Please import them from "mmcv.runner" ' + 'instead') + return auto_fp16(*args, **kwargs) + + +def deprecated_force_fp32(*args, **kwargs): + warnings.warn( + 'Importing force_fp32 from "mmdet.core" will be ' + 'deprecated in the future. Please import them from "mmcv.runner" ' + 'instead') + return force_fp32(*args, **kwargs) + + +def deprecated_wrap_fp16_model(*args, **kwargs): + warnings.warn( + 'Importing wrap_fp16_model from "mmdet.core" will be ' + 'deprecated in the future. Please import them from "mmcv.runner" ' + 'instead') + wrap_fp16_model(*args, **kwargs) diff --git a/thirdparty/mmdetection/mmdet/core/mask/__init__.py b/thirdparty/mmdetection/mmdet/core/mask/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ab1e88bc686d5c2fe72b3114cb2b3e372e73a0f8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/mask/__init__.py @@ -0,0 +1,8 @@ +from .mask_target import mask_target +from .structures import BaseInstanceMasks, BitmapMasks, PolygonMasks +from .utils import encode_mask_results, split_combined_polys + +__all__ = [ + 'split_combined_polys', 'mask_target', 'BaseInstanceMasks', 'BitmapMasks', + 'PolygonMasks', 'encode_mask_results' +] diff --git a/thirdparty/mmdetection/mmdet/core/mask/mask_target.py b/thirdparty/mmdetection/mmdet/core/mask/mask_target.py new file mode 100644 index 0000000000000000000000000000000000000000..18e423507086e8bc0ba36ff01138a0808a0735b2 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/mask/mask_target.py @@ -0,0 +1,62 @@ +import numpy as np +import torch +from torch.nn.modules.utils import _pair + + +def mask_target(pos_proposals_list, pos_assigned_gt_inds_list, gt_masks_list, + cfg): + """Compute mask target for positive proposals in multiple images. + + Args: + pos_proposals_list (list[Tensor]): Positive proposals in multiple + images. + pos_assigned_gt_inds_list (list[Tensor]): Assigned GT indices for each + positive proposals. + gt_masks_list (list[:obj:`BaseInstanceMasks`]): Ground truth masks of + each image. + cfg (dict): Config dict that specifies the mask size. + + Returns: + list[Tensor]: Mask target of each image. + """ + cfg_list = [cfg for _ in range(len(pos_proposals_list))] + mask_targets = map(mask_target_single, pos_proposals_list, + pos_assigned_gt_inds_list, gt_masks_list, cfg_list) + mask_targets = list(mask_targets) + if len(mask_targets) > 0: + mask_targets = torch.cat(mask_targets) + return mask_targets + + +def mask_target_single(pos_proposals, pos_assigned_gt_inds, gt_masks, cfg): + """Compute mask target for each positive proposal in the image. + + Args: + pos_proposals (Tensor): Positive proposals. + pos_assigned_gt_inds (Tensor): Assigned GT inds of positive proposals. + gt_masks (:obj:`BaseInstanceMasks`): GT masks in the format of Bitmap + or Polygon. + cfg (dict): Config dict that indicate the mask size. + + Returns: + Tensor: Mask target of each positive proposals in the image. + """ + device = pos_proposals.device + mask_size = _pair(cfg.mask_size) + num_pos = pos_proposals.size(0) + if num_pos > 0: + proposals_np = pos_proposals.cpu().numpy() + maxh, maxw = gt_masks.height, gt_masks.width + proposals_np[:, [0, 2]] = np.clip(proposals_np[:, [0, 2]], 0, maxw) + proposals_np[:, [1, 3]] = np.clip(proposals_np[:, [1, 3]], 0, maxh) + pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy() + + mask_targets = gt_masks.crop_and_resize( + proposals_np, mask_size, device=device, + inds=pos_assigned_gt_inds).to_ndarray() + + mask_targets = torch.from_numpy(mask_targets).float().to(device) + else: + mask_targets = pos_proposals.new_zeros((0, ) + mask_size) + + return mask_targets diff --git a/thirdparty/mmdetection/mmdet/core/mask/structures.py b/thirdparty/mmdetection/mmdet/core/mask/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..6f5da5547f27045da43894c7c08f8c75b71c82d8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/mask/structures.py @@ -0,0 +1,827 @@ +from abc import ABCMeta, abstractmethod + +import cv2 +import mmcv +import numpy as np +import pycocotools.mask as maskUtils +import torch +from mmcv.ops.roi_align import roi_align + + +class BaseInstanceMasks(metaclass=ABCMeta): + """Base class for instance masks.""" + + @abstractmethod + def rescale(self, scale, interpolation='nearest'): + """Rescale masks as large as possible while keeping the aspect ratio. + For details can refer to `mmcv.imrescale`. + + Args: + scale (tuple[int]): The maximum size (h, w) of rescaled mask. + interpolation (str): Same as :func:`mmcv.imrescale`. + + Returns: + BaseInstanceMasks: The rescaled masks. + """ + pass + + @abstractmethod + def resize(self, out_shape, interpolation='nearest'): + """Resize masks to the given out_shape. + + Args: + out_shape: Target (h, w) of resized mask. + interpolation (str): See :func:`mmcv.imresize`. + + Returns: + BaseInstanceMasks: The resized masks. + """ + pass + + @abstractmethod + def flip(self, flip_direction='horizontal'): + """Flip masks alone the given direction. + + Args: + flip_direction (str): Either 'horizontal' or 'vertical'. + + Returns: + BaseInstanceMasks: The flipped masks. + """ + pass + + @abstractmethod + def pad(self, out_shape, pad_val): + """Pad masks to the given size of (h, w). + + Args: + out_shape (tuple[int]): Target (h, w) of padded mask. + pad_val (int): The padded value. + + Returns: + BaseInstanceMasks: The padded masks. + """ + pass + + @abstractmethod + def crop(self, bbox): + """Crop each mask by the given bbox. + + Args: + bbox (ndarray): Bbox in format [x1, y1, x2, y2], shape (4, ). + + Return: + BaseInstanceMasks: The cropped masks. + """ + pass + + @abstractmethod + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device, + interpolation='bilinear'): + """Crop and resize masks by the given bboxes. + + This function is mainly used in mask targets computation. + It firstly align mask to bboxes by assigned_inds, then crop mask by the + assigned bbox and resize to the size of (mask_h, mask_w) + + Args: + bboxes (Tensor): Bboxes in format [x1, y1, x2, y2], shape (N, 4) + out_shape (tuple[int]): Target (h, w) of resized mask + inds (ndarray): Indexes to assign masks to each bbox + device (str): Device of bboxes + interpolation (str): See `mmcv.imresize` + + Return: + BaseInstanceMasks: the cropped and resized masks. + """ + pass + + @abstractmethod + def expand(self, expanded_h, expanded_w, top, left): + """see :class:`Expand`.""" + pass + + @property + @abstractmethod + def areas(self): + """ndarray: areas of each instance.""" + pass + + @abstractmethod + def to_ndarray(self): + """Convert masks to the format of ndarray. + + Return: + ndarray: Converted masks in the format of ndarray. + """ + pass + + @abstractmethod + def to_tensor(self, dtype, device): + """Convert masks to the format of Tensor. + + Args: + dtype (str): Dtype of converted mask. + device (torch.device): Device of converted masks. + + Returns: + Tensor: Converted masks in the format of Tensor. + """ + pass + + @abstractmethod + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Translate the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + fill_val (int | float): Border value. Default 0. + interpolation (str): Same as :func:`mmcv.imtranslate`. + + Returns: + Translated masks. + """ + pass + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """Shear the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + magnitude (int | float): The magnitude used for shear. + direction (str): The shear direction, either "horizontal" + or "vertical". + border_value (int | tuple[int]): Value used in case of a + constant border. Default 0. + interpolation (str): Same as in :func:`mmcv.imshear`. + + Returns: + ndarray: Sheared masks. + """ + pass + + @abstractmethod + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """Rotate the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + angle (int | float): Rotation angle in degrees. Positive values + mean counter-clockwise rotation. + center (tuple[float], optional): Center point (w, h) of the + rotation in source image. If not specified, the center of + the image will be used. + scale (int | float): Isotropic scale factor. + fill_val (int | float): Border value. Default 0 for masks. + + Returns: + Rotated masks. + """ + pass + + +class BitmapMasks(BaseInstanceMasks): + """This class represents masks in the form of bitmaps. + + Args: + masks (ndarray): ndarray of masks in shape (N, H, W), where N is + the number of objects. + height (int): height of masks + width (int): width of masks + """ + + def __init__(self, masks, height, width): + self.height = height + self.width = width + if len(masks) == 0: + self.masks = np.empty((0, self.height, self.width), dtype=np.uint8) + else: + assert isinstance(masks, (list, np.ndarray)) + if isinstance(masks, list): + assert isinstance(masks[0], np.ndarray) + assert masks[0].ndim == 2 # (H, W) + else: + assert masks.ndim == 3 # (N, H, W) + + self.masks = np.stack(masks).reshape(-1, height, width) + assert self.masks.shape[1] == self.height + assert self.masks.shape[2] == self.width + + def __getitem__(self, index): + """Index the BitmapMask. + + Args: + index (int | ndarray): Indices in the format of integer or ndarray. + + Returns: + :obj:`BitmapMasks`: Indexed bitmap masks. + """ + masks = self.masks[index].reshape(-1, self.height, self.width) + return BitmapMasks(masks, self.height, self.width) + + def __iter__(self): + return iter(self.masks) + + def __repr__(self): + s = self.__class__.__name__ + '(' + s += f'num_masks={len(self.masks)}, ' + s += f'height={self.height}, ' + s += f'width={self.width})' + return s + + def __len__(self): + """Number of masks.""" + return len(self.masks) + + def rescale(self, scale, interpolation='nearest'): + """See :func:`BaseInstanceMasks.rescale`.""" + if len(self.masks) == 0: + new_w, new_h = mmcv.rescale_size((self.width, self.height), scale) + rescaled_masks = np.empty((0, new_h, new_w), dtype=np.uint8) + else: + rescaled_masks = np.stack([ + mmcv.imrescale(mask, scale, interpolation=interpolation) + for mask in self.masks + ]) + height, width = rescaled_masks.shape[1:] + return BitmapMasks(rescaled_masks, height, width) + + def resize(self, out_shape, interpolation='nearest'): + """See :func:`BaseInstanceMasks.resize`.""" + if len(self.masks) == 0: + resized_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + resized_masks = np.stack([ + mmcv.imresize(mask, out_shape, interpolation=interpolation) + for mask in self.masks + ]) + return BitmapMasks(resized_masks, *out_shape) + + def flip(self, flip_direction='horizontal'): + """See :func:`BaseInstanceMasks.flip`.""" + assert flip_direction in ('horizontal', 'vertical', 'diagonal') + + if len(self.masks) == 0: + flipped_masks = self.masks + else: + flipped_masks = np.stack([ + mmcv.imflip(mask, direction=flip_direction) + for mask in self.masks + ]) + return BitmapMasks(flipped_masks, self.height, self.width) + + def pad(self, out_shape, pad_val=0): + """See :func:`BaseInstanceMasks.pad`.""" + if len(self.masks) == 0: + padded_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + padded_masks = np.stack([ + mmcv.impad(mask, shape=out_shape, pad_val=pad_val) + for mask in self.masks + ]) + return BitmapMasks(padded_masks, *out_shape) + + def crop(self, bbox): + """See :func:`BaseInstanceMasks.crop`.""" + assert isinstance(bbox, np.ndarray) + assert bbox.ndim == 1 + + # clip the boundary + bbox = bbox.copy() + bbox[0::2] = np.clip(bbox[0::2], 0, self.width) + bbox[1::2] = np.clip(bbox[1::2], 0, self.height) + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + + if len(self.masks) == 0: + cropped_masks = np.empty((0, h, w), dtype=np.uint8) + else: + cropped_masks = self.masks[:, y1:y1 + h, x1:x1 + w] + return BitmapMasks(cropped_masks, h, w) + + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device='cpu', + interpolation='bilinear'): + """See :func:`BaseInstanceMasks.crop_and_resize`.""" + if len(self.masks) == 0: + empty_masks = np.empty((0, *out_shape), dtype=np.uint8) + return BitmapMasks(empty_masks, *out_shape) + + # convert bboxes to tensor + if isinstance(bboxes, np.ndarray): + bboxes = torch.from_numpy(bboxes).to(device=device) + if isinstance(inds, np.ndarray): + inds = torch.from_numpy(inds).to(device=device) + + num_bbox = bboxes.shape[0] + fake_inds = torch.arange( + num_bbox, device=device).to(dtype=bboxes.dtype)[:, None] + rois = torch.cat([fake_inds, bboxes], dim=1) # Nx5 + rois = rois.to(device=device) + if num_bbox > 0: + gt_masks_th = torch.from_numpy(self.masks).to(device).index_select( + 0, inds).to(dtype=rois.dtype) + targets = roi_align(gt_masks_th[:, None, :, :], rois, out_shape, + 1.0, 0, 'avg', True).squeeze(1) + resized_masks = (targets >= 0.5).cpu().numpy() + else: + resized_masks = [] + return BitmapMasks(resized_masks, *out_shape) + + def expand(self, expanded_h, expanded_w, top, left): + """See :func:`BaseInstanceMasks.expand`.""" + if len(self.masks) == 0: + expanded_mask = np.empty((0, expanded_h, expanded_w), + dtype=np.uint8) + else: + expanded_mask = np.zeros((len(self), expanded_h, expanded_w), + dtype=np.uint8) + expanded_mask[:, top:top + self.height, + left:left + self.width] = self.masks + return BitmapMasks(expanded_mask, expanded_h, expanded_w) + + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Translate the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + fill_val (int | float): Border value. Default 0 for masks. + interpolation (str): Same as :func:`mmcv.imtranslate`. + + Returns: + BitmapMasks: Translated BitmapMasks. + """ + if len(self.masks) == 0: + translated_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + translated_masks = mmcv.imtranslate( + self.masks.transpose((1, 2, 0)), + offset, + direction, + border_value=fill_val, + interpolation=interpolation) + if translated_masks.ndim == 2: + translated_masks = translated_masks[:, :, None] + translated_masks = translated_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(translated_masks, *out_shape) + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """Shear the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + magnitude (int | float): The magnitude used for shear. + direction (str): The shear direction, either "horizontal" + or "vertical". + border_value (int | tuple[int]): Value used in case of a + constant border. + interpolation (str): Same as in :func:`mmcv.imshear`. + + Returns: + BitmapMasks: The sheared masks. + """ + if len(self.masks) == 0: + sheared_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + sheared_masks = mmcv.imshear( + self.masks.transpose((1, 2, 0)), + magnitude, + direction, + border_value=border_value, + interpolation=interpolation) + if sheared_masks.ndim == 2: + sheared_masks = sheared_masks[:, :, None] + sheared_masks = sheared_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(sheared_masks, *out_shape) + + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """Rotate the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + angle (int | float): Rotation angle in degrees. Positive values + mean counter-clockwise rotation. + center (tuple[float], optional): Center point (w, h) of the + rotation in source image. If not specified, the center of + the image will be used. + scale (int | float): Isotropic scale factor. + fill_val (int | float): Border value. Default 0 for masks. + + Returns: + BitmapMasks: Rotated BitmapMasks. + """ + if len(self.masks) == 0: + rotated_masks = np.empty((0, *out_shape), dtype=self.masks.dtype) + else: + rotated_masks = mmcv.imrotate( + self.masks.transpose((1, 2, 0)), + angle, + center=center, + scale=scale, + border_value=fill_val) + if rotated_masks.ndim == 2: + # case when only one mask, (h, w) + rotated_masks = rotated_masks[:, :, None] # (h, w, 1) + rotated_masks = rotated_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(rotated_masks, *out_shape) + + @property + def areas(self): + """See :py:attr:`BaseInstanceMasks.areas`.""" + return self.masks.sum((1, 2)) + + def to_ndarray(self): + """See :func:`BaseInstanceMasks.to_ndarray`.""" + return self.masks + + def to_tensor(self, dtype, device): + """See :func:`BaseInstanceMasks.to_tensor`.""" + return torch.tensor(self.masks, dtype=dtype, device=device) + + +class PolygonMasks(BaseInstanceMasks): + """This class represents masks in the form of polygons. + + Polygons is a list of three levels. The first level of the list + corresponds to objects, the second level to the polys that compose the + object, the third level to the poly coordinates + + Args: + masks (list[list[ndarray]]): The first level of the list + corresponds to objects, the second level to the polys that + compose the object, the third level to the poly coordinates + height (int): height of masks + width (int): width of masks + """ + + def __init__(self, masks, height, width): + assert isinstance(masks, list) + if len(masks) > 0: + assert isinstance(masks[0], list) + assert isinstance(masks[0][0], np.ndarray) + + self.height = height + self.width = width + self.masks = masks + + def __getitem__(self, index): + """Index the polygon masks. + + Args: + index (ndarray | List): The indices. + + Returns: + :obj:`PolygonMasks`: The indexed polygon masks. + """ + if isinstance(index, np.ndarray): + index = index.tolist() + if isinstance(index, list): + masks = [self.masks[i] for i in index] + else: + try: + masks = self.masks[index] + except Exception: + raise ValueError( + f'Unsupported input of type {type(index)} for indexing!') + if len(masks) and isinstance(masks[0], np.ndarray): + masks = [masks] # ensure a list of three levels + return PolygonMasks(masks, self.height, self.width) + + def __iter__(self): + return iter(self.masks) + + def __repr__(self): + s = self.__class__.__name__ + '(' + s += f'num_masks={len(self.masks)}, ' + s += f'height={self.height}, ' + s += f'width={self.width})' + return s + + def __len__(self): + """Number of masks.""" + return len(self.masks) + + def rescale(self, scale, interpolation=None): + """see :func:`BaseInstanceMasks.rescale`""" + new_w, new_h = mmcv.rescale_size((self.width, self.height), scale) + if len(self.masks) == 0: + rescaled_masks = PolygonMasks([], new_h, new_w) + else: + rescaled_masks = self.resize((new_h, new_w)) + return rescaled_masks + + def resize(self, out_shape, interpolation=None): + """see :func:`BaseInstanceMasks.resize`""" + if len(self.masks) == 0: + resized_masks = PolygonMasks([], *out_shape) + else: + h_scale = out_shape[0] / self.height + w_scale = out_shape[1] / self.width + resized_masks = [] + for poly_per_obj in self.masks: + resized_poly = [] + for p in poly_per_obj: + p = p.copy() + p[0::2] *= w_scale + p[1::2] *= h_scale + resized_poly.append(p) + resized_masks.append(resized_poly) + resized_masks = PolygonMasks(resized_masks, *out_shape) + return resized_masks + + def flip(self, flip_direction='horizontal'): + """see :func:`BaseInstanceMasks.flip`""" + assert flip_direction in ('horizontal', 'vertical', 'diagonal') + if len(self.masks) == 0: + flipped_masks = PolygonMasks([], self.height, self.width) + else: + flipped_masks = [] + for poly_per_obj in self.masks: + flipped_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if flip_direction == 'horizontal': + p[0::2] = self.width - p[0::2] + elif flip_direction == 'vertical': + p[1::2] = self.height - p[1::2] + else: + p[0::2] = self.width - p[0::2] + p[1::2] = self.height - p[1::2] + flipped_poly_per_obj.append(p) + flipped_masks.append(flipped_poly_per_obj) + flipped_masks = PolygonMasks(flipped_masks, self.height, + self.width) + return flipped_masks + + def crop(self, bbox): + """see :func:`BaseInstanceMasks.crop`""" + assert isinstance(bbox, np.ndarray) + assert bbox.ndim == 1 + + # clip the boundary + bbox = bbox.copy() + bbox[0::2] = np.clip(bbox[0::2], 0, self.width) + bbox[1::2] = np.clip(bbox[1::2], 0, self.height) + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + + if len(self.masks) == 0: + cropped_masks = PolygonMasks([], h, w) + else: + cropped_masks = [] + for poly_per_obj in self.masks: + cropped_poly_per_obj = [] + for p in poly_per_obj: + # pycocotools will clip the boundary + p = p.copy() + p[0::2] -= bbox[0] + p[1::2] -= bbox[1] + cropped_poly_per_obj.append(p) + cropped_masks.append(cropped_poly_per_obj) + cropped_masks = PolygonMasks(cropped_masks, h, w) + return cropped_masks + + def pad(self, out_shape, pad_val=0): + """padding has no effect on polygons`""" + return PolygonMasks(self.masks, *out_shape) + + def expand(self, *args, **kwargs): + """TODO: Add expand for polygon""" + raise NotImplementedError + + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device='cpu', + interpolation='bilinear'): + """see :func:`BaseInstanceMasks.crop_and_resize`""" + out_h, out_w = out_shape + if len(self.masks) == 0: + return PolygonMasks([], out_h, out_w) + + resized_masks = [] + for i in range(len(bboxes)): + mask = self.masks[inds[i]] + bbox = bboxes[i, :] + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + h_scale = out_h / max(h, 0.1) # avoid too large scale + w_scale = out_w / max(w, 0.1) + + resized_mask = [] + for p in mask: + p = p.copy() + # crop + # pycocotools will clip the boundary + p[0::2] -= bbox[0] + p[1::2] -= bbox[1] + + # resize + p[0::2] *= w_scale + p[1::2] *= h_scale + resized_mask.append(p) + resized_masks.append(resized_mask) + return PolygonMasks(resized_masks, *out_shape) + + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=None, + interpolation=None): + """Translate the PolygonMasks.""" + assert fill_val is None or fill_val == 0, 'Here fill_val is not '\ + f'used, and defaultly should be None or 0. got {fill_val}.' + if len(self.masks) == 0: + translated_masks = PolygonMasks([], *out_shape) + else: + translated_masks = [] + for poly_per_obj in self.masks: + translated_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if direction == 'horizontal': + p[0::2] = np.clip(p[0::2] + offset, 0, out_shape[1]) + elif direction == 'vertical': + p[1::2] = np.clip(p[1::2] + offset, 0, out_shape[0]) + translated_poly_per_obj.append(p) + translated_masks.append(translated_poly_per_obj) + translated_masks = PolygonMasks(translated_masks, *out_shape) + return translated_masks + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """See :func:`BaseInstanceMasks.shear`.""" + if len(self.masks) == 0: + sheared_masks = PolygonMasks([], *out_shape) + else: + sheared_masks = [] + if direction == 'horizontal': + shear_matrix = np.stack([[1, magnitude], + [0, 1]]).astype(np.float32) + elif direction == 'vertical': + shear_matrix = np.stack([[1, 0], [magnitude, + 1]]).astype(np.float32) + for poly_per_obj in self.masks: + sheared_poly = [] + for p in poly_per_obj: + p = np.stack([p[0::2], p[1::2]], axis=0) # [2, n] + new_coords = np.matmul(shear_matrix, p) # [2, n] + new_coords[0, :] = np.clip(new_coords[0, :], 0, + out_shape[1]) + new_coords[1, :] = np.clip(new_coords[1, :], 0, + out_shape[0]) + sheared_poly.append( + new_coords.transpose((1, 0)).reshape(-1)) + sheared_masks.append(sheared_poly) + sheared_masks = PolygonMasks(sheared_masks, *out_shape) + return sheared_masks + + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """See :func:`BaseInstanceMasks.rotate`.""" + if len(self.masks) == 0: + rotated_masks = PolygonMasks([], *out_shape) + else: + rotated_masks = [] + rotate_matrix = cv2.getRotationMatrix2D(center, -angle, scale) + for poly_per_obj in self.masks: + rotated_poly = [] + for p in poly_per_obj: + p = p.copy() + coords = np.stack([p[0::2], p[1::2]], axis=1) # [n, 2] + # pad 1 to convert from format [x, y] to homogeneous + # coordinates format [x, y, 1] + coords = np.concatenate( + (coords, np.ones((coords.shape[0], 1), coords.dtype)), + axis=1) # [n, 3] + rotated_coords = np.matmul( + rotate_matrix[None, :, :], + coords[:, :, None])[..., 0] # [n, 2, 1] -> [n, 2] + rotated_coords[:, 0] = np.clip(rotated_coords[:, 0], 0, + out_shape[1]) + rotated_coords[:, 1] = np.clip(rotated_coords[:, 1], 0, + out_shape[0]) + rotated_poly.append(rotated_coords.reshape(-1)) + rotated_masks.append(rotated_poly) + rotated_masks = PolygonMasks(rotated_masks, *out_shape) + return rotated_masks + + def to_bitmap(self): + """convert polygon masks to bitmap masks.""" + bitmap_masks = self.to_ndarray() + return BitmapMasks(bitmap_masks, self.height, self.width) + + @property + def areas(self): + """Compute areas of masks. + + This func is modified from `detectron2 + `_. + The function only works with Polygons using the shoelace formula. + + Return: + ndarray: areas of each instance + """ # noqa: W501 + area = [] + for polygons_per_obj in self.masks: + area_per_obj = 0 + for p in polygons_per_obj: + area_per_obj += self._polygon_area(p[0::2], p[1::2]) + area.append(area_per_obj) + return np.asarray(area) + + def _polygon_area(self, x, y): + """Compute the area of a component of a polygon. + + Using the shoelace formula: + https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + + Args: + x (ndarray): x coordinates of the component + y (ndarray): y coordinates of the component + + Return: + float: the are of the component + """ # noqa: 501 + return 0.5 * np.abs( + np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) + + def to_ndarray(self): + """Convert masks to the format of ndarray.""" + if len(self.masks) == 0: + return np.empty((0, self.height, self.width), dtype=np.uint8) + bitmap_masks = [] + for poly_per_obj in self.masks: + bitmap_masks.append( + polygon_to_bitmap(poly_per_obj, self.height, self.width)) + return np.stack(bitmap_masks) + + def to_tensor(self, dtype, device): + """See :func:`BaseInstanceMasks.to_tensor`.""" + if len(self.masks) == 0: + return torch.empty((0, self.height, self.width), + dtype=dtype, + device=device) + ndarray_masks = self.to_ndarray() + return torch.tensor(ndarray_masks, dtype=dtype, device=device) + + +def polygon_to_bitmap(polygons, height, width): + """Convert masks from the form of polygons to bitmaps. + + Args: + polygons (list[ndarray]): masks in polygon representation + height (int): mask height + width (int): mask width + + Return: + ndarray: the converted masks in bitmap representation + """ + rles = maskUtils.frPyObjects(polygons, height, width) + rle = maskUtils.merge(rles) + bitmap_mask = maskUtils.decode(rle).astype(np.bool) + return bitmap_mask diff --git a/thirdparty/mmdetection/mmdet/core/mask/utils.py b/thirdparty/mmdetection/mmdet/core/mask/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c88208291ab2a605bee9fe6c1a28a443b74c6372 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/mask/utils.py @@ -0,0 +1,63 @@ +import mmcv +import numpy as np +import pycocotools.mask as mask_util + + +def split_combined_polys(polys, poly_lens, polys_per_mask): + """Split the combined 1-D polys into masks. + + A mask is represented as a list of polys, and a poly is represented as + a 1-D array. In dataset, all masks are concatenated into a single 1-D + tensor. Here we need to split the tensor into original representations. + + Args: + polys (list): a list (length = image num) of 1-D tensors + poly_lens (list): a list (length = image num) of poly length + polys_per_mask (list): a list (length = image num) of poly number + of each mask + + Returns: + list: a list (length = image num) of list (length = mask num) of \ + list (length = poly num) of numpy array. + """ + mask_polys_list = [] + for img_id in range(len(polys)): + polys_single = polys[img_id] + polys_lens_single = poly_lens[img_id].tolist() + polys_per_mask_single = polys_per_mask[img_id].tolist() + + split_polys = mmcv.slice_list(polys_single, polys_lens_single) + mask_polys = mmcv.slice_list(split_polys, polys_per_mask_single) + mask_polys_list.append(mask_polys) + return mask_polys_list + + +# TODO: move this function to more proper place +def encode_mask_results(mask_results): + """Encode bitmap mask to RLE code. + + Args: + mask_results (list | tuple[list]): bitmap mask results. + In mask scoring rcnn, mask_results is a tuple of (segm_results, + segm_cls_score). + + Returns: + list | tuple: RLE encoded mask. + """ + if isinstance(mask_results, tuple): # mask scoring + cls_segms, cls_mask_scores = mask_results + else: + cls_segms = mask_results + num_classes = len(cls_segms) + encoded_mask_results = [[] for _ in range(num_classes)] + for i in range(len(cls_segms)): + for cls_segm in cls_segms[i]: + encoded_mask_results[i].append( + mask_util.encode( + np.array( + cls_segm[:, :, np.newaxis], order='F', + dtype='uint8'))[0]) # encoded with RLE + if isinstance(mask_results, tuple): + return encoded_mask_results, cls_mask_scores + else: + return encoded_mask_results diff --git a/thirdparty/mmdetection/mmdet/core/post_processing/__init__.py b/thirdparty/mmdetection/mmdet/core/post_processing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..880b3f06609b050aae163b2e38088c1ee4aa0998 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/post_processing/__init__.py @@ -0,0 +1,8 @@ +from .bbox_nms import fast_nms, multiclass_nms +from .merge_augs import (merge_aug_bboxes, merge_aug_masks, + merge_aug_proposals, merge_aug_scores) + +__all__ = [ + 'multiclass_nms', 'merge_aug_proposals', 'merge_aug_bboxes', + 'merge_aug_scores', 'merge_aug_masks', 'fast_nms' +] diff --git a/thirdparty/mmdetection/mmdet/core/post_processing/bbox_nms.py b/thirdparty/mmdetection/mmdet/core/post_processing/bbox_nms.py new file mode 100644 index 0000000000000000000000000000000000000000..8d1f181fd2306b8369d6721517f55d51c65e3867 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/post_processing/bbox_nms.py @@ -0,0 +1,142 @@ +import torch +from mmcv.ops.nms import batched_nms + +from mmdet.core.bbox.iou_calculators import bbox_overlaps + + +def multiclass_nms(multi_bboxes, + multi_scores, + score_thr, + nms_cfg, + max_num=-1, + score_factors=None): + """NMS for multi-class bboxes. + + Args: + multi_bboxes (Tensor): shape (n, #class*4) or (n, 4) + multi_scores (Tensor): shape (n, #class), where the last column + contains scores of the background class, but this will be ignored. + score_thr (float): bbox threshold, bboxes with scores lower than it + will not be considered. + nms_thr (float): NMS IoU threshold + max_num (int): if there are more than max_num bboxes after NMS, + only top max_num will be kept. + score_factors (Tensor): The factors multiplied to scores before + applying NMS + + Returns: + tuple: (bboxes, labels), tensors of shape (k, 5) and (k, 1). Labels \ + are 0-based. + """ + num_classes = multi_scores.size(1) - 1 + # exclude background category + if multi_bboxes.shape[1] > 4: + bboxes = multi_bboxes.view(multi_scores.size(0), -1, 4) + else: + bboxes = multi_bboxes[:, None].expand( + multi_scores.size(0), num_classes, 4) + + scores = multi_scores[:, :-1] + if score_factors is not None: + scores = scores * score_factors[:, None] + + labels = torch.arange(num_classes, dtype=torch.long) + labels = labels.view(1, -1).expand_as(scores) + + bboxes = bboxes.reshape(-1, 4) + scores = scores.reshape(-1) + labels = labels.reshape(-1) + + # remove low scoring boxes + valid_mask = scores > score_thr + inds = valid_mask.nonzero(as_tuple=False).squeeze(1) + bboxes, scores, labels = bboxes[inds], scores[inds], labels[inds] + if inds.numel() == 0: + if torch.onnx.is_in_onnx_export(): + raise RuntimeError('[ONNX Error] Can not record NMS ' + 'as it has not been executed this time') + return bboxes, labels + + # TODO: add size check before feed into batched_nms + dets, keep = batched_nms(bboxes, scores, labels, nms_cfg) + + if max_num > 0: + dets = dets[:max_num] + keep = keep[:max_num] + + return dets, labels[keep] + + +def fast_nms(multi_bboxes, + multi_scores, + multi_coeffs, + score_thr, + iou_thr, + top_k, + max_num=-1): + """Fast NMS in `YOLACT `_. + + Fast NMS allows already-removed detections to suppress other detections so + that every instance can be decided to be kept or discarded in parallel, + which is not possible in traditional NMS. This relaxation allows us to + implement Fast NMS entirely in standard GPU-accelerated matrix operations. + + Args: + multi_bboxes (Tensor): shape (n, #class*4) or (n, 4) + multi_scores (Tensor): shape (n, #class+1), where the last column + contains scores of the background class, but this will be ignored. + multi_coeffs (Tensor): shape (n, #class*coeffs_dim). + score_thr (float): bbox threshold, bboxes with scores lower than it + will not be considered. + iou_thr (float): IoU threshold to be considered as conflicted. + top_k (int): if there are more than top_k bboxes before NMS, + only top top_k will be kept. + max_num (int): if there are more than max_num bboxes after NMS, + only top max_num will be kept. If -1, keep all the bboxes. + Default: -1. + + Returns: + tuple: (bboxes, labels, coefficients), tensors of shape (k, 5), (k, 1), + and (k, coeffs_dim). Labels are 0-based. + """ + + scores = multi_scores[:, :-1].t() # [#class, n] + scores, idx = scores.sort(1, descending=True) + + idx = idx[:, :top_k].contiguous() + scores = scores[:, :top_k] # [#class, topk] + num_classes, num_dets = idx.size() + boxes = multi_bboxes[idx.view(-1), :].view(num_classes, num_dets, 4) + coeffs = multi_coeffs[idx.view(-1), :].view(num_classes, num_dets, -1) + + iou = bbox_overlaps(boxes, boxes) # [#class, topk, topk] + iou.triu_(diagonal=1) + iou_max, _ = iou.max(dim=1) + + # Now just filter out the ones higher than the threshold + keep = iou_max <= iou_thr + + # Second thresholding introduces 0.2 mAP gain at negligible time cost + keep *= scores > score_thr + + # Assign each kept detection to its corresponding class + classes = torch.arange( + num_classes, device=boxes.device)[:, None].expand_as(keep) + classes = classes[keep] + + boxes = boxes[keep] + coeffs = coeffs[keep] + scores = scores[keep] + + # Only keep the top max_num highest scores across all classes + scores, idx = scores.sort(0, descending=True) + if max_num > 0: + idx = idx[:max_num] + scores = scores[:max_num] + + classes = classes[idx] + boxes = boxes[idx] + coeffs = coeffs[idx] + + cls_dets = torch.cat([boxes, scores[:, None]], dim=1) + return cls_dets, classes, coeffs diff --git a/thirdparty/mmdetection/mmdet/core/post_processing/merge_augs.py b/thirdparty/mmdetection/mmdet/core/post_processing/merge_augs.py new file mode 100644 index 0000000000000000000000000000000000000000..167093ebf1d016806b2b997f28207887231b2e6b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/post_processing/merge_augs.py @@ -0,0 +1,117 @@ +import numpy as np +import torch +from mmcv.ops import nms + +from ..bbox import bbox_mapping_back + + +def merge_aug_proposals(aug_proposals, img_metas, rpn_test_cfg): + """Merge augmented proposals (multiscale, flip, etc.) + + Args: + aug_proposals (list[Tensor]): proposals from different testing + schemes, shape (n, 5). Note that they are not rescaled to the + original image size. + + img_metas (list[dict]): list of image info dict where each dict has: + 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + rpn_test_cfg (dict): rpn test config. + + Returns: + Tensor: shape (n, 4), proposals corresponding to original image scale. + """ + recovered_proposals = [] + for proposals, img_info in zip(aug_proposals, img_metas): + img_shape = img_info['img_shape'] + scale_factor = img_info['scale_factor'] + flip = img_info['flip'] + flip_direction = img_info['flip_direction'] + _proposals = proposals.clone() + _proposals[:, :4] = bbox_mapping_back(_proposals[:, :4], img_shape, + scale_factor, flip, + flip_direction) + recovered_proposals.append(_proposals) + aug_proposals = torch.cat(recovered_proposals, dim=0) + merged_proposals, _ = nms(aug_proposals[:, :4].contiguous(), + aug_proposals[:, -1].contiguous(), + rpn_test_cfg.nms_thr) + scores = merged_proposals[:, 4] + _, order = scores.sort(0, descending=True) + num = min(rpn_test_cfg.max_num, merged_proposals.shape[0]) + order = order[:num] + merged_proposals = merged_proposals[order, :] + return merged_proposals + + +def merge_aug_bboxes(aug_bboxes, aug_scores, img_metas, rcnn_test_cfg): + """Merge augmented detection bboxes and scores. + + Args: + aug_bboxes (list[Tensor]): shape (n, 4*#class) + aug_scores (list[Tensor] or None): shape (n, #class) + img_shapes (list[Tensor]): shape (3, ). + rcnn_test_cfg (dict): rcnn test config. + + Returns: + tuple: (bboxes, scores) + """ + recovered_bboxes = [] + for bboxes, img_info in zip(aug_bboxes, img_metas): + img_shape = img_info[0]['img_shape'] + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip, + flip_direction) + recovered_bboxes.append(bboxes) + bboxes = torch.stack(recovered_bboxes).mean(dim=0) + if aug_scores is None: + return bboxes + else: + scores = torch.stack(aug_scores).mean(dim=0) + return bboxes, scores + + +def merge_aug_scores(aug_scores): + """Merge augmented bbox scores.""" + if isinstance(aug_scores[0], torch.Tensor): + return torch.mean(torch.stack(aug_scores), dim=0) + else: + return np.mean(aug_scores, axis=0) + + +def merge_aug_masks(aug_masks, img_metas, rcnn_test_cfg, weights=None): + """Merge augmented mask prediction. + + Args: + aug_masks (list[ndarray]): shape (n, #class, h, w) + img_shapes (list[ndarray]): shape (3, ). + rcnn_test_cfg (dict): rcnn test config. + + Returns: + tuple: (bboxes, scores) + """ + recovered_masks = [] + for mask, img_info in zip(aug_masks, img_metas): + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + if flip: + if flip_direction == 'horizontal': + mask = mask[:, :, :, ::-1] + elif flip_direction == 'vertical': + mask = mask[:, :, ::-1, :] + else: + raise ValueError( + f"Invalid flipping direction '{flip_direction}'") + recovered_masks.append(mask) + + if weights is None: + merged_masks = np.mean(recovered_masks, axis=0) + else: + merged_masks = np.average( + np.array(recovered_masks), axis=0, weights=np.array(weights)) + return merged_masks diff --git a/thirdparty/mmdetection/mmdet/core/utils/__init__.py b/thirdparty/mmdetection/mmdet/core/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b49b3fc468f29121b8ea74ede09b6aea069d47fb --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/utils/__init__.py @@ -0,0 +1,7 @@ +from .dist_utils import DistOptimizerHook, allreduce_grads, reduce_mean +from .misc import multi_apply, unmap + +__all__ = [ + 'allreduce_grads', 'DistOptimizerHook', 'reduce_mean', 'multi_apply', + 'unmap' +] diff --git a/thirdparty/mmdetection/mmdet/core/utils/dist_utils.py b/thirdparty/mmdetection/mmdet/core/utils/dist_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5fe77753313783f95bd7111038ef8b58ee4e4bc5 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/utils/dist_utils.py @@ -0,0 +1,69 @@ +import warnings +from collections import OrderedDict + +import torch.distributed as dist +from mmcv.runner import OptimizerHook +from torch._utils import (_flatten_dense_tensors, _take_tensors, + _unflatten_dense_tensors) + + +def _allreduce_coalesced(tensors, world_size, bucket_size_mb=-1): + if bucket_size_mb > 0: + bucket_size_bytes = bucket_size_mb * 1024 * 1024 + buckets = _take_tensors(tensors, bucket_size_bytes) + else: + buckets = OrderedDict() + for tensor in tensors: + tp = tensor.type() + if tp not in buckets: + buckets[tp] = [] + buckets[tp].append(tensor) + buckets = buckets.values() + + for bucket in buckets: + flat_tensors = _flatten_dense_tensors(bucket) + dist.all_reduce(flat_tensors) + flat_tensors.div_(world_size) + for tensor, synced in zip( + bucket, _unflatten_dense_tensors(flat_tensors, bucket)): + tensor.copy_(synced) + + +def allreduce_grads(params, coalesce=True, bucket_size_mb=-1): + """Allreduce gradients. + + Args: + params (list[torch.Parameters]): List of parameters of a model + coalesce (bool, optional): Whether allreduce parameters as a whole. + Defaults to True. + bucket_size_mb (int, optional): Size of bucket, the unit is MB. + Defaults to -1. + """ + grads = [ + param.grad.data for param in params + if param.requires_grad and param.grad is not None + ] + world_size = dist.get_world_size() + if coalesce: + _allreduce_coalesced(grads, world_size, bucket_size_mb) + else: + for tensor in grads: + dist.all_reduce(tensor.div_(world_size)) + + +class DistOptimizerHook(OptimizerHook): + """Deprecated optimizer hook for distributed training.""" + + def __init__(self, *args, **kwargs): + warnings.warn('"DistOptimizerHook" is deprecated, please switch to' + '"mmcv.runner.OptimizerHook".') + super().__init__(*args, **kwargs) + + +def reduce_mean(tensor): + """"Obtain the mean of tensor on different GPUs.""" + if not (dist.is_available() and dist.is_initialized()): + return tensor + tensor = tensor.clone() + dist.all_reduce(tensor.div_(dist.get_world_size()), op=dist.ReduceOp.SUM) + return tensor diff --git a/thirdparty/mmdetection/mmdet/core/utils/misc.py b/thirdparty/mmdetection/mmdet/core/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..54ac8198f7ce5eb932f8b49b1e8fb05b684488e1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/core/utils/misc.py @@ -0,0 +1,39 @@ +from functools import partial + +import torch +from six.moves import map, zip + + +def multi_apply(func, *args, **kwargs): + """Apply function to a list of arguments. + + Note: + This function applies the ``func`` to multiple inputs and + map the multiple outputs of the ``func`` into different + list. Each list contains the same type of outputs corresponding + to different inputs. + + Args: + func (Function): A function that will be applied to a list of + arguments + + Returns: + tuple(list): A tuple containing multiple list, each list contains \ + a kind of returned results by the function + """ + pfunc = partial(func, **kwargs) if kwargs else func + map_results = map(pfunc, *args) + return tuple(map(list, zip(*map_results))) + + +def unmap(data, count, inds, fill=0): + """Unmap a subset of item (data) back to the original set of items (of size + count)""" + if data.dim() == 1: + ret = data.new_full((count, ), fill) + ret[inds.type(torch.bool)] = data + else: + new_size = (count, ) + data.size()[1:] + ret = data.new_full(new_size, fill) + ret[inds.type(torch.bool), :] = data + return ret diff --git a/thirdparty/mmdetection/mmdet/datasets/__init__.py b/thirdparty/mmdetection/mmdet/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9b811d1caeabfe809ce20acb00d8e9f1c8f49211 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/__init__.py @@ -0,0 +1,22 @@ +from .builder import DATASETS, PIPELINES, build_dataloader, build_dataset +from .cityscapes import CityscapesDataset +from .coco import CocoDataset +from .custom import CustomDataset +from .dataset_wrappers import (ClassBalancedDataset, ConcatDataset, + RepeatDataset) +from .deepfashion import DeepFashionDataset +from .lvis import LVISDataset, LVISV1Dataset, LVISV05Dataset +from .samplers import DistributedGroupSampler, DistributedSampler, GroupSampler +from .utils import replace_ImageToTensor +from .voc import VOCDataset +from .wider_face import WIDERFaceDataset +from .xml_style import XMLDataset + +__all__ = [ + 'CustomDataset', 'XMLDataset', 'CocoDataset', 'DeepFashionDataset', + 'VOCDataset', 'CityscapesDataset', 'LVISDataset', 'LVISV05Dataset', + 'LVISV1Dataset', 'GroupSampler', 'DistributedGroupSampler', + 'DistributedSampler', 'build_dataloader', 'ConcatDataset', 'RepeatDataset', + 'ClassBalancedDataset', 'WIDERFaceDataset', 'DATASETS', 'PIPELINES', + 'build_dataset', 'replace_ImageToTensor' +] diff --git a/thirdparty/mmdetection/mmdet/datasets/builder.py b/thirdparty/mmdetection/mmdet/datasets/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..16d9ae34b8ec1852b095bdcd75e89a8f2be67efb --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/builder.py @@ -0,0 +1,143 @@ +import copy +import platform +import random +from functools import partial + +import numpy as np +from mmcv.parallel import collate +from mmcv.runner import get_dist_info +from mmcv.utils import Registry, build_from_cfg +from torch.utils.data import DataLoader + +from .samplers import DistributedGroupSampler, DistributedSampler, GroupSampler + +if platform.system() != 'Windows': + # https://github.com/pytorch/pytorch/issues/973 + import resource + rlimit = resource.getrlimit(resource.RLIMIT_NOFILE) + hard_limit = rlimit[1] + soft_limit = min(4096, hard_limit) + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + +DATASETS = Registry('dataset') +PIPELINES = Registry('pipeline') + + +def _concat_dataset(cfg, default_args=None): + from .dataset_wrappers import ConcatDataset + ann_files = cfg['ann_file'] + img_prefixes = cfg.get('img_prefix', None) + seg_prefixes = cfg.get('seg_prefix', None) + proposal_files = cfg.get('proposal_file', None) + separate_eval = cfg.get('separate_eval', True) + + datasets = [] + num_dset = len(ann_files) + for i in range(num_dset): + data_cfg = copy.deepcopy(cfg) + # pop 'separate_eval' since it is not a valid key for common datasets. + if 'separate_eval' in data_cfg: + data_cfg.pop('separate_eval') + data_cfg['ann_file'] = ann_files[i] + if isinstance(img_prefixes, (list, tuple)): + data_cfg['img_prefix'] = img_prefixes[i] + if isinstance(seg_prefixes, (list, tuple)): + data_cfg['seg_prefix'] = seg_prefixes[i] + if isinstance(proposal_files, (list, tuple)): + data_cfg['proposal_file'] = proposal_files[i] + datasets.append(build_dataset(data_cfg, default_args)) + + return ConcatDataset(datasets, separate_eval) + + +def build_dataset(cfg, default_args=None): + from .dataset_wrappers import (ConcatDataset, RepeatDataset, + ClassBalancedDataset) + if isinstance(cfg, (list, tuple)): + dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg]) + elif cfg['type'] == 'ConcatDataset': + dataset = ConcatDataset( + [build_dataset(c, default_args) for c in cfg['datasets']], + cfg.get('separate_eval', True)) + elif cfg['type'] == 'RepeatDataset': + dataset = RepeatDataset( + build_dataset(cfg['dataset'], default_args), cfg['times']) + elif cfg['type'] == 'ClassBalancedDataset': + dataset = ClassBalancedDataset( + build_dataset(cfg['dataset'], default_args), cfg['oversample_thr']) + elif isinstance(cfg.get('ann_file'), (list, tuple)): + dataset = _concat_dataset(cfg, default_args) + else: + dataset = build_from_cfg(cfg, DATASETS, default_args) + + return dataset + + +def build_dataloader(dataset, + samples_per_gpu, + workers_per_gpu, + num_gpus=1, + dist=True, + shuffle=True, + seed=None, + **kwargs): + """Build PyTorch DataLoader. + + In distributed training, each GPU/process has a dataloader. + In non-distributed training, there is only one dataloader for all GPUs. + + Args: + dataset (Dataset): A PyTorch dataset. + samples_per_gpu (int): Number of training samples on each GPU, i.e., + batch size of each GPU. + workers_per_gpu (int): How many subprocesses to use for data loading + for each GPU. + num_gpus (int): Number of GPUs. Only used in non-distributed training. + dist (bool): Distributed training/test or not. Default: True. + shuffle (bool): Whether to shuffle the data at every epoch. + Default: True. + kwargs: any keyword argument to be used to initialize DataLoader + + Returns: + DataLoader: A PyTorch dataloader. + """ + rank, world_size = get_dist_info() + if dist: + # DistributedGroupSampler will definitely shuffle the data to satisfy + # that images on each GPU are in the same group + if shuffle: + sampler = DistributedGroupSampler(dataset, samples_per_gpu, + world_size, rank) + else: + sampler = DistributedSampler( + dataset, world_size, rank, shuffle=False) + batch_size = samples_per_gpu + num_workers = workers_per_gpu + else: + sampler = GroupSampler(dataset, samples_per_gpu) if shuffle else None + batch_size = num_gpus * samples_per_gpu + num_workers = num_gpus * workers_per_gpu + + init_fn = partial( + worker_init_fn, num_workers=num_workers, rank=rank, + seed=seed) if seed is not None else None + + data_loader = DataLoader( + dataset, + batch_size=batch_size, + sampler=sampler, + num_workers=num_workers, + collate_fn=partial(collate, samples_per_gpu=samples_per_gpu), + pin_memory=False, + worker_init_fn=init_fn, + **kwargs) + + return data_loader + + +def worker_init_fn(worker_id, num_workers, rank, seed): + # The seed of each worker equals to + # num_worker * rank + worker_id + user_seed + worker_seed = num_workers * rank + worker_id + seed + np.random.seed(worker_seed) + random.seed(worker_seed) diff --git a/thirdparty/mmdetection/mmdet/datasets/cityscapes.py b/thirdparty/mmdetection/mmdet/datasets/cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..71eead87e7f4e511c0cb59e69c3a599832ada0e4 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/cityscapes.py @@ -0,0 +1,334 @@ +# Modified from https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/cityscapes.py # noqa +# and https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa + +import glob +import os +import os.path as osp +import tempfile +from collections import OrderedDict + +import mmcv +import numpy as np +import pycocotools.mask as maskUtils +from mmcv.utils import print_log + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class CityscapesDataset(CocoDataset): + + CLASSES = ('person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', + 'bicycle') + + def _filter_imgs(self, min_size=32): + """Filter images too small or without ground truths.""" + valid_inds = [] + # obtain images that contain annotation + ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values()) + # obtain images that contain annotations of the required categories + ids_in_cat = set() + for i, class_id in enumerate(self.cat_ids): + ids_in_cat |= set(self.coco.cat_img_map[class_id]) + # merge the image id sets of the two conditions and use the merged set + # to filter out images if self.filter_empty_gt=True + ids_in_cat &= ids_with_ann + + valid_img_ids = [] + for i, img_info in enumerate(self.data_infos): + img_id = img_info['id'] + ann_ids = self.coco.getAnnIds(imgIds=[img_id]) + ann_info = self.coco.loadAnns(ann_ids) + all_iscrowd = all([_['iscrowd'] for _ in ann_info]) + if self.filter_empty_gt and (self.img_ids[i] not in ids_in_cat + or all_iscrowd): + continue + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + valid_img_ids.append(img_id) + self.img_ids = valid_img_ids + return valid_inds + + def _parse_ann_info(self, img_info, ann_info): + """Parse bbox and mask annotation. + + Args: + img_info (dict): Image info of an image. + ann_info (list[dict]): Annotation info of an image. + + Returns: + dict: A dict containing the following keys: bboxes, \ + bboxes_ignore, labels, masks, seg_map. \ + "masks" are already decoded into binary masks. + """ + gt_bboxes = [] + gt_labels = [] + gt_bboxes_ignore = [] + gt_masks_ann = [] + + for i, ann in enumerate(ann_info): + if ann.get('ignore', False): + continue + x1, y1, w, h = ann['bbox'] + if ann['area'] <= 0 or w < 1 or h < 1: + continue + if ann['category_id'] not in self.cat_ids: + continue + bbox = [x1, y1, x1 + w, y1 + h] + if ann.get('iscrowd', False): + gt_bboxes_ignore.append(bbox) + else: + gt_bboxes.append(bbox) + gt_labels.append(self.cat2label[ann['category_id']]) + gt_masks_ann.append(ann['segmentation']) + + if gt_bboxes: + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + else: + gt_bboxes = np.zeros((0, 4), dtype=np.float32) + gt_labels = np.array([], dtype=np.int64) + + if gt_bboxes_ignore: + gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32) + else: + gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32) + + ann = dict( + bboxes=gt_bboxes, + labels=gt_labels, + bboxes_ignore=gt_bboxes_ignore, + masks=gt_masks_ann, + seg_map=img_info['segm_file']) + + return ann + + def results2txt(self, results, outfile_prefix): + """Dump the detection results to a txt file. + + Args: + results (list[list | tuple]): Testing results of the + dataset. + outfile_prefix (str): The filename prefix of the json files. + If the prefix is "somepath/xxx", + the txt files will be named "somepath/xxx.txt". + + Returns: + list[str]: Result txt files which contains corresponding \ + instance segmentation images. + """ + try: + import cityscapesscripts.helpers.labels as CSLabels + except ImportError: + raise ImportError('Please run "pip install citscapesscripts" to ' + 'install cityscapesscripts first.') + result_files = [] + os.makedirs(outfile_prefix, exist_ok=True) + prog_bar = mmcv.ProgressBar(len(self)) + for idx in range(len(self)): + result = results[idx] + filename = self.data_infos[idx]['filename'] + basename = osp.splitext(osp.basename(filename))[0] + pred_txt = osp.join(outfile_prefix, basename + '_pred.txt') + + bbox_result, segm_result = result + bboxes = np.vstack(bbox_result) + # segm results + if isinstance(segm_result, tuple): + # Some detectors use different scores for bbox and mask, + # like Mask Scoring R-CNN. Score of segm will be used instead + # of bbox score. + segms = mmcv.concat_list(segm_result[0]) + mask_score = segm_result[1] + else: + # use bbox score for mask score + segms = mmcv.concat_list(segm_result) + mask_score = [bbox[-1] for bbox in bboxes] + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + labels = np.concatenate(labels) + + assert len(bboxes) == len(segms) == len(labels) + num_instances = len(bboxes) + prog_bar.update() + with open(pred_txt, 'w') as fout: + for i in range(num_instances): + pred_class = labels[i] + classes = self.CLASSES[pred_class] + class_id = CSLabels.name2label[classes].id + score = mask_score[i] + mask = maskUtils.decode(segms[i]).astype(np.uint8) + png_filename = osp.join(outfile_prefix, + basename + f'_{i}_{classes}.png') + mmcv.imwrite(mask, png_filename) + fout.write(f'{osp.basename(png_filename)} {class_id} ' + f'{score}\n') + result_files.append(pred_txt) + + return result_files + + def format_results(self, results, txtfile_prefix=None): + """Format the results to txt (standard format for Cityscapes + evaluation). + + Args: + results (list): Testing results of the dataset. + txtfile_prefix (str | None): The prefix of txt files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + + Returns: + tuple: (result_files, tmp_dir), result_files is a dict containing \ + the json filepaths, tmp_dir is the temporal directory created \ + for saving txt/png files when txtfile_prefix is not specified. + """ + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + if txtfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + txtfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2txt(results, txtfile_prefix) + + return result_files, tmp_dir + + def evaluate(self, + results, + metric='bbox', + logger=None, + outfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=np.arange(0.5, 0.96, 0.05)): + """Evaluation in Cityscapes/COCO protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + outfile_prefix (str | None): The prefix of output file. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If results are evaluated with COCO protocol, it would be the + prefix of output json file. For example, the metric is 'bbox' + and 'segm', then json files would be "a/b/prefix.bbox.json" and + "a/b/prefix.segm.json". + If results are evaluated with cityscapes protocol, it would be + the prefix of output txt/png files. The output files would be + png images under folder "a/b/prefix/xxx/" and the file name of + images would be written into a txt file + "a/b/prefix/xxx_pred.txt", where "xxx" is the video name of + cityscapes. If not specified, a temp file will be created. + Default: None. + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float]): IoU threshold used for evaluating + recalls. If set to a list, the average recall of all IoUs will + also be computed. Default: 0.5. + + Returns: + dict[str, float]: COCO style evaluation metric or cityscapes mAP \ + and AP@50. + """ + eval_results = dict() + + metrics = metric.copy() if isinstance(metric, list) else [metric] + + if 'cityscapes' in metrics: + eval_results.update( + self._evaluate_cityscapes(results, outfile_prefix, logger)) + metrics.remove('cityscapes') + + # left metrics are all coco metric + if len(metrics) > 0: + # create CocoDataset with CityscapesDataset annotation + self_coco = CocoDataset(self.ann_file, self.pipeline.transforms, + None, self.data_root, self.img_prefix, + self.seg_prefix, self.proposal_file, + self.test_mode, self.filter_empty_gt) + # TODO: remove this in the future + # reload annotations of correct class + self_coco.CLASSES = self.CLASSES + self_coco.data_infos = self_coco.load_annotations(self.ann_file) + eval_results.update( + self_coco.evaluate(results, metrics, logger, outfile_prefix, + classwise, proposal_nums, iou_thrs)) + + return eval_results + + def _evaluate_cityscapes(self, results, txtfile_prefix, logger): + """Evaluation in Cityscapes protocol. + + Args: + results (list): Testing results of the dataset. + txtfile_prefix (str | None): The prefix of output txt file + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + + Returns: + dict[str: float]: Cityscapes evaluation results, contains 'mAP' \ + and 'AP@50'. + """ + + try: + import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as CSEval # noqa + except ImportError: + raise ImportError('Please run "pip install citscapesscripts" to ' + 'install cityscapesscripts first.') + msg = 'Evaluating in Cityscapes style' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + result_files, tmp_dir = self.format_results(results, txtfile_prefix) + + if tmp_dir is None: + result_dir = osp.join(txtfile_prefix, 'results') + else: + result_dir = osp.join(tmp_dir.name, 'results') + + eval_results = OrderedDict() + print_log(f'Evaluating results under {result_dir} ...', logger=logger) + + # set global states in cityscapes evaluation API + CSEval.args.cityscapesPath = os.path.join(self.img_prefix, '../..') + CSEval.args.predictionPath = os.path.abspath(result_dir) + CSEval.args.predictionWalk = None + CSEval.args.JSONOutput = False + CSEval.args.colorized = False + CSEval.args.gtInstancesFile = os.path.join(result_dir, + 'gtInstances.json') + CSEval.args.groundTruthSearch = os.path.join( + self.img_prefix.replace('leftImg8bit', 'gtFine'), + '*/*_gtFine_instanceIds.png') + + groundTruthImgList = glob.glob(CSEval.args.groundTruthSearch) + assert len(groundTruthImgList), 'Cannot find ground truth images' \ + f' in {CSEval.args.groundTruthSearch}.' + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(CSEval.getPrediction(gt, CSEval.args)) + CSEval_results = CSEval.evaluateImgLists(predictionImgList, + groundTruthImgList, + CSEval.args)['averages'] + + eval_results['mAP'] = CSEval_results['allAp'] + eval_results['AP@50'] = CSEval_results['allAp50%'] + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results diff --git a/thirdparty/mmdetection/mmdet/datasets/coco.py b/thirdparty/mmdetection/mmdet/datasets/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eea6a4fd35c22a383e87db415bc0171db4248de --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/coco.py @@ -0,0 +1,544 @@ +import itertools +import logging +import os.path as osp +import tempfile +from collections import OrderedDict + +import mmcv +import numpy as np +from mmcv.utils import print_log +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from terminaltables import AsciiTable + +from mmdet.core import eval_recalls +from .builder import DATASETS +from .custom import CustomDataset + +try: + import pycocotools + if not hasattr(pycocotools, '__sphinx_mock__'): # for doc generation + assert pycocotools.__version__ >= '12.0.2' +except AssertionError: + raise AssertionError('Incompatible version of pycocotools is installed. ' + 'Run pip uninstall pycocotools first. Then run pip ' + 'install mmpycocotools to install open-mmlab forked ' + 'pycocotools.') + + +@DATASETS.register_module() +class CocoDataset(CustomDataset): + + CLASSES = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', + 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', + 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', + 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush') + + def load_annotations(self, ann_file): + """Load annotation from COCO style annotation file. + + Args: + ann_file (str): Path of annotation file. + + Returns: + list[dict]: Annotation info from COCO api. + """ + + self.coco = COCO(ann_file) + self.cat_ids = self.coco.get_cat_ids(cat_names=self.CLASSES) + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + info['filename'] = info['file_name'] + data_infos.append(info) + return data_infos + + def get_ann_info(self, idx): + """Get COCO annotation by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + img_id = self.data_infos[idx]['id'] + ann_ids = self.coco.get_ann_ids(img_ids=[img_id]) + ann_info = self.coco.load_anns(ann_ids) + return self._parse_ann_info(self.data_infos[idx], ann_info) + + def get_cat_ids(self, idx): + """Get COCO category ids by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + img_id = self.data_infos[idx]['id'] + ann_ids = self.coco.get_ann_ids(img_ids=[img_id]) + ann_info = self.coco.load_anns(ann_ids) + return [ann['category_id'] for ann in ann_info] + + def _filter_imgs(self, min_size=32): + """Filter images too small or without ground truths.""" + valid_inds = [] + # obtain images that contain annotation + ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values()) + # obtain images that contain annotations of the required categories + ids_in_cat = set() + for i, class_id in enumerate(self.cat_ids): + ids_in_cat |= set(self.coco.cat_img_map[class_id]) + # merge the image id sets of the two conditions and use the merged set + # to filter out images if self.filter_empty_gt=True + ids_in_cat &= ids_with_ann + + valid_img_ids = [] + for i, img_info in enumerate(self.data_infos): + img_id = self.img_ids[i] + if self.filter_empty_gt and img_id not in ids_in_cat: + continue + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + valid_img_ids.append(img_id) + self.img_ids = valid_img_ids + return valid_inds + + def _parse_ann_info(self, img_info, ann_info): + """Parse bbox and mask annotation. + + Args: + ann_info (list[dict]): Annotation info of an image. + with_mask (bool): Whether to parse mask annotations. + + Returns: + dict: A dict containing the following keys: bboxes, bboxes_ignore,\ + labels, masks, seg_map. "masks" are raw annotations and not \ + decoded into binary masks. + """ + gt_bboxes = [] + gt_labels = [] + gt_bboxes_ignore = [] + gt_masks_ann = [] + for i, ann in enumerate(ann_info): + if ann.get('ignore', False): + continue + x1, y1, w, h = ann['bbox'] + inter_w = max(0, min(x1 + w, img_info['width']) - max(x1, 0)) + inter_h = max(0, min(y1 + h, img_info['height']) - max(y1, 0)) + if inter_w * inter_h == 0: + continue + if ann['area'] <= 0 or w < 1 or h < 1: + continue + if ann['category_id'] not in self.cat_ids: + continue + bbox = [x1, y1, x1 + w, y1 + h] + if ann.get('iscrowd', False): + gt_bboxes_ignore.append(bbox) + else: + gt_bboxes.append(bbox) + gt_labels.append(self.cat2label[ann['category_id']]) + gt_masks_ann.append(ann.get('segmentation', None)) + + if gt_bboxes: + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + else: + gt_bboxes = np.zeros((0, 4), dtype=np.float32) + gt_labels = np.array([], dtype=np.int64) + + if gt_bboxes_ignore: + gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32) + else: + gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32) + + seg_map = img_info['filename'].replace('jpg', 'png') + + ann = dict( + bboxes=gt_bboxes, + labels=gt_labels, + bboxes_ignore=gt_bboxes_ignore, + masks=gt_masks_ann, + seg_map=seg_map) + + return ann + + def xyxy2xywh(self, bbox): + """Convert ``xyxy`` style bounding boxes to ``xywh`` style for COCO + evaluation. + + Args: + bbox (numpy.ndarray): The bounding boxes, shape (4, ), in + ``xyxy`` order. + + Returns: + list[float]: The converted bounding boxes, in ``xywh`` order. + """ + + _bbox = bbox.tolist() + return [ + _bbox[0], + _bbox[1], + _bbox[2] - _bbox[0], + _bbox[3] - _bbox[1], + ] + + def _proposal2json(self, results): + """Convert proposal results to COCO json style.""" + json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + bboxes = results[idx] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = 1 + json_results.append(data) + return json_results + + def _det2json(self, results): + """Convert detection results to COCO json style.""" + json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + result = results[idx] + for label in range(len(result)): + bboxes = result[label] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = self.cat_ids[label] + json_results.append(data) + return json_results + + def _segm2json(self, results): + """Convert instance segmentation results to COCO json style.""" + bbox_json_results = [] + segm_json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + det, seg = results[idx] + for label in range(len(det)): + # bbox results + bboxes = det[label] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = self.cat_ids[label] + bbox_json_results.append(data) + + # segm results + # some detectors use different scores for bbox and mask + if isinstance(seg, tuple): + segms = seg[0][label] + mask_score = seg[1][label] + else: + segms = seg[label] + mask_score = [bbox[4] for bbox in bboxes] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(mask_score[i]) + data['category_id'] = self.cat_ids[label] + if isinstance(segms[i]['counts'], bytes): + segms[i]['counts'] = segms[i]['counts'].decode() + data['segmentation'] = segms[i] + segm_json_results.append(data) + return bbox_json_results, segm_json_results + + def results2json(self, results, outfile_prefix): + """Dump the detection results to a COCO style json file. + + There are 3 types of results: proposals, bbox predictions, mask + predictions, and they have different data types. This method will + automatically recognize the type, and dump them to json files. + + Args: + results (list[list | tuple | ndarray]): Testing results of the + dataset. + outfile_prefix (str): The filename prefix of the json files. If the + prefix is "somepath/xxx", the json files will be named + "somepath/xxx.bbox.json", "somepath/xxx.segm.json", + "somepath/xxx.proposal.json". + + Returns: + dict[str: str]: Possible keys are "bbox", "segm", "proposal", and \ + values are corresponding filenames. + """ + result_files = dict() + if isinstance(results[0], list): + json_results = self._det2json(results) + result_files['bbox'] = f'{outfile_prefix}.bbox.json' + result_files['proposal'] = f'{outfile_prefix}.bbox.json' + mmcv.dump(json_results, result_files['bbox']) + elif isinstance(results[0], tuple): + json_results = self._segm2json(results) + result_files['bbox'] = f'{outfile_prefix}.bbox.json' + result_files['proposal'] = f'{outfile_prefix}.bbox.json' + result_files['segm'] = f'{outfile_prefix}.segm.json' + mmcv.dump(json_results[0], result_files['bbox']) + mmcv.dump(json_results[1], result_files['segm']) + elif isinstance(results[0], np.ndarray): + json_results = self._proposal2json(results) + result_files['proposal'] = f'{outfile_prefix}.proposal.json' + mmcv.dump(json_results, result_files['proposal']) + else: + raise TypeError('invalid type of results') + return result_files + + def fast_eval_recall(self, results, proposal_nums, iou_thrs, logger=None): + gt_bboxes = [] + for i in range(len(self.img_ids)): + ann_ids = self.coco.get_ann_ids(img_ids=self.img_ids[i]) + ann_info = self.coco.load_anns(ann_ids) + if len(ann_info) == 0: + gt_bboxes.append(np.zeros((0, 4))) + continue + bboxes = [] + for ann in ann_info: + if ann.get('ignore', False) or ann['iscrowd']: + continue + x1, y1, w, h = ann['bbox'] + bboxes.append([x1, y1, x1 + w, y1 + h]) + bboxes = np.array(bboxes, dtype=np.float32) + if bboxes.shape[0] == 0: + bboxes = np.zeros((0, 4)) + gt_bboxes.append(bboxes) + + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thrs, logger=logger) + ar = recalls.mean(axis=1) + return ar + + def format_results(self, results, jsonfile_prefix=None, **kwargs): + """Format the results to json (standard format for COCO evaluation). + + Args: + results (list[tuple | numpy.ndarray]): Testing results of the + dataset. + jsonfile_prefix (str | None): The prefix of json files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + + Returns: + tuple: (result_files, tmp_dir), result_files is a dict containing \ + the json filepaths, tmp_dir is the temporal directory created \ + for saving json files when jsonfile_prefix is not specified. + """ + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + if jsonfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + jsonfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2json(results, jsonfile_prefix) + return result_files, tmp_dir + + def evaluate(self, + results, + metric='bbox', + logger=None, + jsonfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=None, + metric_items=None): + """Evaluation in COCO protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + jsonfile_prefix (str | None): The prefix of json files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float], optional): IoU threshold used for + evaluating recalls/mAPs. If set to a list, the average of all + IoUs will also be computed. If not specified, [0.50, 0.55, + 0.60, 0.65, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95] will be used. + Default: None. + metric_items (list[str] | str, optional): Metric items that will + be returned. If not specified, ``['AR@100', 'AR@300', + 'AR@1000', 'AR_s@1000', 'AR_m@1000', 'AR_l@1000' ]`` will be + used when ``metric=='proposal'``, ``['mAP', 'mAP_50', 'mAP_75', + 'mAP_s', 'mAP_m', 'mAP_l']`` will be used when + ``metric=='bbox' or metric=='segm'``. + + Returns: + dict[str, float]: COCO style evaluation metric. + """ + + metrics = metric if isinstance(metric, list) else [metric] + allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast'] + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + if iou_thrs is None: + iou_thrs = np.linspace( + .5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True) + if metric_items is not None: + if not isinstance(metric_items, list): + metric_items = [metric_items] + + result_files, tmp_dir = self.format_results(results, jsonfile_prefix) + + eval_results = OrderedDict() + cocoGt = self.coco + for metric in metrics: + msg = f'Evaluating {metric}...' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'proposal_fast': + ar = self.fast_eval_recall( + results, proposal_nums, iou_thrs, logger='silent') + log_msg = [] + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + log_msg.append(f'\nAR@{num}\t{ar[i]:.4f}') + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + continue + + if metric not in result_files: + raise KeyError(f'{metric} is not in results') + try: + cocoDt = cocoGt.loadRes(result_files[metric]) + except IndexError: + print_log( + 'The testing results of the whole dataset is empty.', + logger=logger, + level=logging.ERROR) + break + + iou_type = 'bbox' if metric == 'proposal' else metric + cocoEval = COCOeval(cocoGt, cocoDt, iou_type) + cocoEval.params.catIds = self.cat_ids + cocoEval.params.imgIds = self.img_ids + cocoEval.params.maxDets = list(proposal_nums) + cocoEval.params.iouThrs = iou_thrs + # mapping of cocoEval.stats + coco_metric_names = { + 'mAP': 0, + 'mAP_50': 1, + 'mAP_75': 2, + 'mAP_s': 3, + 'mAP_m': 4, + 'mAP_l': 5, + 'AR@100': 6, + 'AR@300': 7, + 'AR@1000': 8, + 'AR_s@1000': 9, + 'AR_m@1000': 10, + 'AR_l@1000': 11 + } + if metric_items is not None: + for metric_item in metric_items: + if metric_item not in coco_metric_names: + raise KeyError( + f'metric item {metric_item} is not supported') + + if metric == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if metric_items is None: + metric_items = [ + 'AR@100', 'AR@300', 'AR@1000', 'AR_s@1000', + 'AR_m@1000', 'AR_l@1000' + ] + + for item in metric_items: + val = float( + f'{cocoEval.stats[coco_metric_names[item]]:.3f}') + eval_results[item] = val + else: + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if classwise: # Compute per-category AP + # Compute per-category AP + # from https://github.com/facebookresearch/detectron2/ + precisions = cocoEval.eval['precision'] + # precision: (iou, recall, cls, area range, max dets) + assert len(self.cat_ids) == precisions.shape[2] + + results_per_category = [] + for idx, catId in enumerate(self.cat_ids): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + nm = self.coco.loadCats(catId)[0] + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + if precision.size: + ap = np.mean(precision) + else: + ap = float('nan') + results_per_category.append( + (f'{nm["name"]}', f'{float(ap):0.3f}')) + + num_columns = min(6, len(results_per_category) * 2) + results_flatten = list( + itertools.chain(*results_per_category)) + headers = ['category', 'AP'] * (num_columns // 2) + results_2d = itertools.zip_longest(*[ + results_flatten[i::num_columns] + for i in range(num_columns) + ]) + table_data = [headers] + table_data += [result for result in results_2d] + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + if metric_items is None: + metric_items = [ + 'mAP', 'mAP_50', 'mAP_75', 'mAP_s', 'mAP_m', 'mAP_l' + ] + + for metric_item in metric_items: + key = f'{metric}_{metric_item}' + val = float( + f'{cocoEval.stats[coco_metric_names[metric_item]]:.3f}' + ) + eval_results[key] = val + ap = cocoEval.stats[:6] + eval_results[f'{metric}_mAP_copypaste'] = ( + f'{ap[0]:.3f} {ap[1]:.3f} {ap[2]:.3f} {ap[3]:.3f} ' + f'{ap[4]:.3f} {ap[5]:.3f}') + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results diff --git a/thirdparty/mmdetection/mmdet/datasets/custom.py b/thirdparty/mmdetection/mmdet/datasets/custom.py new file mode 100644 index 0000000000000000000000000000000000000000..3ca16d6bc6769663318897e96b2d8d60eb2851ce --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/custom.py @@ -0,0 +1,321 @@ +import os.path as osp +import warnings +from collections import OrderedDict + +import mmcv +import numpy as np +from torch.utils.data import Dataset + +from mmdet.core import eval_map, eval_recalls +from .builder import DATASETS +from .pipelines import Compose + + +@DATASETS.register_module() +class CustomDataset(Dataset): + """Custom dataset for detection. + + The annotation format is shown as follows. The `ann` field is optional for + testing. + + .. code-block:: none + + [ + { + 'filename': 'a.jpg', + 'width': 1280, + 'height': 720, + 'ann': { + 'bboxes': (n, 4) in (x1, y1, x2, y2) order. + 'labels': (n, ), + 'bboxes_ignore': (k, 4), (optional field) + 'labels_ignore': (k, 4) (optional field) + } + }, + ... + ] + + Args: + ann_file (str): Annotation file path. + pipeline (list[dict]): Processing pipeline. + classes (str | Sequence[str], optional): Specify classes to load. + If is None, ``cls.CLASSES`` will be used. Default: None. + data_root (str, optional): Data root for ``ann_file``, + ``img_prefix``, ``seg_prefix``, ``proposal_file`` if specified. + test_mode (bool, optional): If set True, annotation will not be loaded. + filter_empty_gt (bool, optional): If set true, images without bounding + boxes of the dataset's classes will be filtered out. This option + only works when `test_mode=False`, i.e., we never filter images + during tests. + """ + + CLASSES = None + + def __init__(self, + ann_file, + pipeline, + classes=None, + data_root=None, + img_prefix='', + seg_prefix=None, + proposal_file=None, + test_mode=False, + filter_empty_gt=True): + self.ann_file = ann_file + self.data_root = data_root + self.img_prefix = img_prefix + self.seg_prefix = seg_prefix + self.proposal_file = proposal_file + self.test_mode = test_mode + self.filter_empty_gt = filter_empty_gt + self.CLASSES = self.get_classes(classes) + + # join paths if data_root is specified + if self.data_root is not None: + if not osp.isabs(self.ann_file): + self.ann_file = osp.join(self.data_root, self.ann_file) + if not (self.img_prefix is None or osp.isabs(self.img_prefix)): + self.img_prefix = osp.join(self.data_root, self.img_prefix) + if not (self.seg_prefix is None or osp.isabs(self.seg_prefix)): + self.seg_prefix = osp.join(self.data_root, self.seg_prefix) + if not (self.proposal_file is None + or osp.isabs(self.proposal_file)): + self.proposal_file = osp.join(self.data_root, + self.proposal_file) + # load annotations (and proposals) + self.data_infos = self.load_annotations(self.ann_file) + + if self.proposal_file is not None: + self.proposals = self.load_proposals(self.proposal_file) + else: + self.proposals = None + + # filter images too small and containing no annotations + if not test_mode: + valid_inds = self._filter_imgs() + self.data_infos = [self.data_infos[i] for i in valid_inds] + if self.proposals is not None: + self.proposals = [self.proposals[i] for i in valid_inds] + # set group flag for the sampler + self._set_group_flag() + + # processing pipeline + self.pipeline = Compose(pipeline) + + def __len__(self): + """Total number of samples of data.""" + return len(self.data_infos) + + def load_annotations(self, ann_file): + """Load annotation from annotation file.""" + return mmcv.load(ann_file) + + def load_proposals(self, proposal_file): + """Load proposal from proposal file.""" + return mmcv.load(proposal_file) + + def get_ann_info(self, idx): + """Get annotation by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + return self.data_infos[idx]['ann'] + + def get_cat_ids(self, idx): + """Get category ids by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + return self.data_infos[idx]['ann']['labels'].astype(np.int).tolist() + + def pre_pipeline(self, results): + """Prepare results dict for pipeline.""" + results['img_prefix'] = self.img_prefix + results['seg_prefix'] = self.seg_prefix + results['proposal_file'] = self.proposal_file + results['bbox_fields'] = [] + results['mask_fields'] = [] + results['seg_fields'] = [] + + def _filter_imgs(self, min_size=32): + """Filter images too small.""" + if self.filter_empty_gt: + warnings.warn( + 'CustomDataset does not support filtering empty gt images.') + valid_inds = [] + for i, img_info in enumerate(self.data_infos): + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + return valid_inds + + def _set_group_flag(self): + """Set flag according to image aspect ratio. + + Images with aspect ratio greater than 1 will be set as group 1, + otherwise group 0. + """ + self.flag = np.zeros(len(self), dtype=np.uint8) + for i in range(len(self)): + img_info = self.data_infos[i] + if img_info['width'] / img_info['height'] > 1: + self.flag[i] = 1 + + def _rand_another(self, idx): + """Get another random index from the same group as the given index.""" + pool = np.where(self.flag == self.flag[idx])[0] + return np.random.choice(pool) + + def __getitem__(self, idx): + """Get training/test data after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Training/test data (with annotation if `test_mode` is set \ + True). + """ + + if self.test_mode: + return self.prepare_test_img(idx) + while True: + data = self.prepare_train_img(idx) + if data is None: + idx = self._rand_another(idx) + continue + return data + + def prepare_train_img(self, idx): + """Get training data and annotations after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Training data and annotation after pipeline with new keys \ + introduced by pipeline. + """ + + img_info = self.data_infos[idx] + ann_info = self.get_ann_info(idx) + results = dict(img_info=img_info, ann_info=ann_info) + if self.proposals is not None: + results['proposals'] = self.proposals[idx] + self.pre_pipeline(results) + return self.pipeline(results) + + def prepare_test_img(self, idx): + """Get testing data after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Testing data after pipeline with new keys intorduced by \ + piepline. + """ + + img_info = self.data_infos[idx] + results = dict(img_info=img_info) + if self.proposals is not None: + results['proposals'] = self.proposals[idx] + self.pre_pipeline(results) + return self.pipeline(results) + + @classmethod + def get_classes(cls, classes=None): + """Get class names of current dataset. + + Args: + classes (Sequence[str] | str | None): If classes is None, use + default CLASSES defined by builtin dataset. If classes is a + string, take it as a file name. The file contains the name of + classes where each line contains one class name. If classes is + a tuple or list, override the CLASSES defined by the dataset. + + Returns: + tuple[str] or list[str]: Names of categories of the dataset. + """ + if classes is None: + return cls.CLASSES + + if isinstance(classes, str): + # take it as a file path + class_names = mmcv.list_from_file(classes) + elif isinstance(classes, (tuple, list)): + class_names = classes + else: + raise ValueError(f'Unsupported type {type(classes)} of classes.') + + return class_names + + def format_results(self, results, **kwargs): + """Place holder to format result to dataset specific output.""" + pass + + def evaluate(self, + results, + metric='mAP', + logger=None, + proposal_nums=(100, 300, 1000), + iou_thr=0.5, + scale_ranges=None): + """Evaluate the dataset. + + Args: + results (list): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. + logger (logging.Logger | None | str): Logger used for printing + related information during evaluation. Default: None. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thr (float | list[float]): IoU threshold. It must be a float + when evaluating mAP, and can be a list when evaluating recall. + Default: 0.5. + scale_ranges (list[tuple] | None): Scale ranges for evaluating mAP. + Default: None. + """ + + if not isinstance(metric, str): + assert len(metric) == 1 + metric = metric[0] + allowed_metrics = ['mAP', 'recall'] + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + annotations = [self.get_ann_info(i) for i in range(len(self))] + eval_results = OrderedDict() + if metric == 'mAP': + assert isinstance(iou_thr, float) + mean_ap, _ = eval_map( + results, + annotations, + scale_ranges=scale_ranges, + iou_thr=iou_thr, + dataset=self.CLASSES, + logger=logger) + eval_results['mAP'] = mean_ap + elif metric == 'recall': + gt_bboxes = [ann['bboxes'] for ann in annotations] + if isinstance(iou_thr, float): + iou_thr = [iou_thr] + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thr, logger=logger) + for i, num in enumerate(proposal_nums): + for j, iou in enumerate(iou_thr): + eval_results[f'recall@{num}@{iou}'] = recalls[i, j] + if recalls.shape[1] > 1: + ar = recalls.mean(axis=1) + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + return eval_results diff --git a/thirdparty/mmdetection/mmdet/datasets/dataset_wrappers.py b/thirdparty/mmdetection/mmdet/datasets/dataset_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..55ad5cb60e581a96bdbd1fbbeebc2f46f8c4e899 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/dataset_wrappers.py @@ -0,0 +1,282 @@ +import bisect +import math +from collections import defaultdict + +import numpy as np +from mmcv.utils import print_log +from torch.utils.data.dataset import ConcatDataset as _ConcatDataset + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class ConcatDataset(_ConcatDataset): + """A wrapper of concatenated dataset. + + Same as :obj:`torch.utils.data.dataset.ConcatDataset`, but + concat the group flag for image aspect ratio. + + Args: + datasets (list[:obj:`Dataset`]): A list of datasets. + separate_eval (bool): Whether to evaluate the results + separately if it is used as validation dataset. + Defaults to True. + """ + + def __init__(self, datasets, separate_eval=True): + super(ConcatDataset, self).__init__(datasets) + self.CLASSES = datasets[0].CLASSES + self.separate_eval = separate_eval + if not separate_eval: + if any([isinstance(ds, CocoDataset) for ds in datasets]): + raise NotImplementedError( + 'Evaluating concatenated CocoDataset as a whole is not' + ' supported! Please set "separate_eval=True"') + elif len(set([type(ds) for ds in datasets])) != 1: + raise NotImplementedError( + 'All the datasets should have same types') + + if hasattr(datasets[0], 'flag'): + flags = [] + for i in range(0, len(datasets)): + flags.append(datasets[i].flag) + self.flag = np.concatenate(flags) + + def get_cat_ids(self, idx): + """Get category ids of concatenated dataset by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + if idx < 0: + if -idx > len(self): + raise ValueError( + 'absolute value of index should not exceed dataset length') + idx = len(self) + idx + dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) + if dataset_idx == 0: + sample_idx = idx + else: + sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] + return self.datasets[dataset_idx].get_cat_ids(sample_idx) + + def evaluate(self, results, logger=None, **kwargs): + """Evaluate the results. + + Args: + results (list[list | tuple]): Testing results of the dataset. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + + Returns: + dict[str: float]: AP results of the total dataset or each separate + dataset if `self.separate_eval=True`. + """ + assert len(results) == self.cumulative_sizes[-1], \ + ('Dataset and results have different sizes: ' + f'{self.cumulative_sizes[-1]} v.s. {len(results)}') + + # Check whether all the datasets support evaluation + for dataset in self.datasets: + assert hasattr(dataset, 'evaluate'), \ + f'{type(dataset)} does not implement evaluate function' + + if self.separate_eval: + dataset_idx = -1 + total_eval_results = dict() + for size, dataset in zip(self.cumulative_sizes, self.datasets): + start_idx = 0 if dataset_idx == -1 else \ + self.cumulative_sizes[dataset_idx] + end_idx = self.cumulative_sizes[dataset_idx + 1] + + results_per_dataset = results[start_idx:end_idx] + print_log( + f'\nEvaluateing {dataset.ann_file} with ' + f'{len(results_per_dataset)} images now', + logger=logger) + + eval_results_per_dataset = dataset.evaluate( + results_per_dataset, logger=logger, **kwargs) + dataset_idx += 1 + for k, v in eval_results_per_dataset.items(): + total_eval_results.update({f'{dataset_idx}_{k}': v}) + + return total_eval_results + elif any([isinstance(ds, CocoDataset) for ds in self.datasets]): + raise NotImplementedError( + 'Evaluating concatenated CocoDataset as a whole is not' + ' supported! Please set "separate_eval=True"') + elif len(set([type(ds) for ds in self.datasets])) != 1: + raise NotImplementedError( + 'All the datasets should have same types') + else: + original_data_infos = self.datasets[0].data_infos + self.datasets[0].data_infos = sum( + [dataset.data_infos for dataset in self.datasets], []) + eval_results = self.datasets[0].evaluate( + results, logger=logger, **kwargs) + self.datasets[0].data_infos = original_data_infos + return eval_results + + +@DATASETS.register_module() +class RepeatDataset(object): + """A wrapper of repeated dataset. + + The length of repeated dataset will be `times` larger than the original + dataset. This is useful when the data loading time is long but the dataset + is small. Using RepeatDataset can reduce the data loading time between + epochs. + + Args: + dataset (:obj:`Dataset`): The dataset to be repeated. + times (int): Repeat times. + """ + + def __init__(self, dataset, times): + self.dataset = dataset + self.times = times + self.CLASSES = dataset.CLASSES + if hasattr(self.dataset, 'flag'): + self.flag = np.tile(self.dataset.flag, times) + + self._ori_len = len(self.dataset) + + def __getitem__(self, idx): + return self.dataset[idx % self._ori_len] + + def get_cat_ids(self, idx): + """Get category ids of repeat dataset by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + return self.dataset.get_cat_ids(idx % self._ori_len) + + def __len__(self): + """Length after repetition.""" + return self.times * self._ori_len + + +# Modified from https://github.com/facebookresearch/detectron2/blob/41d475b75a230221e21d9cac5d69655e3415e3a4/detectron2/data/samplers/distributed_sampler.py#L57 # noqa +@DATASETS.register_module() +class ClassBalancedDataset(object): + """A wrapper of repeated dataset with repeat factor. + + Suitable for training on class imbalanced datasets like LVIS. Following + the sampling strategy in the `paper `_, + in each epoch, an image may appear multiple times based on its + "repeat factor". + The repeat factor for an image is a function of the frequency the rarest + category labeled in that image. The "frequency of category c" in [0, 1] + is defined by the fraction of images in the training set (without repeats) + in which category c appears. + The dataset needs to instantiate :func:`self.get_cat_ids` to support + ClassBalancedDataset. + + The repeat factor is computed as followed. + + 1. For each category c, compute the fraction # of images + that contain it: :math:`f(c)` + 2. For each category c, compute the category-level repeat factor: + :math:`r(c) = max(1, sqrt(t/f(c)))` + 3. For each image I, compute the image-level repeat factor: + :math:`r(I) = max_{c in I} r(c)` + + Args: + dataset (:obj:`CustomDataset`): The dataset to be repeated. + oversample_thr (float): frequency threshold below which data is + repeated. For categories with ``f_c >= oversample_thr``, there is + no oversampling. For categories with ``f_c < oversample_thr``, the + degree of oversampling following the square-root inverse frequency + heuristic above. + filter_empty_gt (bool, optional): If set true, images without bounding + boxes will not be oversampled. Otherwise, they will be categorized + as the pure background class and involved into the oversampling. + Default: True. + """ + + def __init__(self, dataset, oversample_thr, filter_empty_gt=True): + self.dataset = dataset + self.oversample_thr = oversample_thr + self.filter_empty_gt = filter_empty_gt + self.CLASSES = dataset.CLASSES + + repeat_factors = self._get_repeat_factors(dataset, oversample_thr) + repeat_indices = [] + for dataset_idx, repeat_factor in enumerate(repeat_factors): + repeat_indices.extend([dataset_idx] * math.ceil(repeat_factor)) + self.repeat_indices = repeat_indices + + flags = [] + if hasattr(self.dataset, 'flag'): + for flag, repeat_factor in zip(self.dataset.flag, repeat_factors): + flags.extend([flag] * int(math.ceil(repeat_factor))) + assert len(flags) == len(repeat_indices) + self.flag = np.asarray(flags, dtype=np.uint8) + + def _get_repeat_factors(self, dataset, repeat_thr): + """Get repeat factor for each images in the dataset. + + Args: + dataset (:obj:`CustomDataset`): The dataset + repeat_thr (float): The threshold of frequency. If an image + contains the categories whose frequency below the threshold, + it would be repeated. + + Returns: + list[float]: The repeat factors for each images in the dataset. + """ + + # 1. For each category c, compute the fraction # of images + # that contain it: f(c) + category_freq = defaultdict(int) + num_images = len(dataset) + for idx in range(num_images): + cat_ids = set(self.dataset.get_cat_ids(idx)) + if len(cat_ids) == 0 and not self.filter_empty_gt: + cat_ids = set([len(self.CLASSES)]) + for cat_id in cat_ids: + category_freq[cat_id] += 1 + for k, v in category_freq.items(): + category_freq[k] = v / num_images + + # 2. For each category c, compute the category-level repeat factor: + # r(c) = max(1, sqrt(t/f(c))) + category_repeat = { + cat_id: max(1.0, math.sqrt(repeat_thr / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + # 3. For each image I, compute the image-level repeat factor: + # r(I) = max_{c in I} r(c) + repeat_factors = [] + for idx in range(num_images): + cat_ids = set(self.dataset.get_cat_ids(idx)) + if len(cat_ids) == 0 and not self.filter_empty_gt: + cat_ids = set([len(self.CLASSES)]) + repeat_factor = 1 + if len(cat_ids) > 0: + repeat_factor = max( + {category_repeat[cat_id] + for cat_id in cat_ids}) + repeat_factors.append(repeat_factor) + + return repeat_factors + + def __getitem__(self, idx): + ori_index = self.repeat_indices[idx] + return self.dataset[ori_index] + + def __len__(self): + """Length after repetition.""" + return len(self.repeat_indices) diff --git a/thirdparty/mmdetection/mmdet/datasets/deepfashion.py b/thirdparty/mmdetection/mmdet/datasets/deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..1125376091f2d4ee6843ae4f2156b3b0453be369 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/deepfashion.py @@ -0,0 +1,10 @@ +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class DeepFashionDataset(CocoDataset): + + CLASSES = ('top', 'skirt', 'leggings', 'dress', 'outer', 'pants', 'bag', + 'neckwear', 'headwear', 'eyeglass', 'belt', 'footwear', 'hair', + 'skin', 'face') diff --git a/thirdparty/mmdetection/mmdet/datasets/lvis.py b/thirdparty/mmdetection/mmdet/datasets/lvis.py new file mode 100644 index 0000000000000000000000000000000000000000..9f3eba0663a4dcde4432ed128ba7bd31160732d7 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/lvis.py @@ -0,0 +1,744 @@ +import itertools +import logging +import os.path as osp +import tempfile +from collections import OrderedDict + +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class LVISV05Dataset(CocoDataset): + + CLASSES = ( + 'acorn', 'aerosol_can', 'air_conditioner', 'airplane', 'alarm_clock', + 'alcohol', 'alligator', 'almond', 'ambulance', 'amplifier', 'anklet', + 'antenna', 'apple', 'apple_juice', 'applesauce', 'apricot', 'apron', + 'aquarium', 'armband', 'armchair', 'armoire', 'armor', 'artichoke', + 'trash_can', 'ashtray', 'asparagus', 'atomizer', 'avocado', 'award', + 'awning', 'ax', 'baby_buggy', 'basketball_backboard', 'backpack', + 'handbag', 'suitcase', 'bagel', 'bagpipe', 'baguet', 'bait', 'ball', + 'ballet_skirt', 'balloon', 'bamboo', 'banana', 'Band_Aid', 'bandage', + 'bandanna', 'banjo', 'banner', 'barbell', 'barge', 'barrel', + 'barrette', 'barrow', 'baseball_base', 'baseball', 'baseball_bat', + 'baseball_cap', 'baseball_glove', 'basket', 'basketball_hoop', + 'basketball', 'bass_horn', 'bat_(animal)', 'bath_mat', 'bath_towel', + 'bathrobe', 'bathtub', 'batter_(food)', 'battery', 'beachball', 'bead', + 'beaker', 'bean_curd', 'beanbag', 'beanie', 'bear', 'bed', + 'bedspread', 'cow', 'beef_(food)', 'beeper', 'beer_bottle', 'beer_can', + 'beetle', 'bell', 'bell_pepper', 'belt', 'belt_buckle', 'bench', + 'beret', 'bib', 'Bible', 'bicycle', 'visor', 'binder', 'binoculars', + 'bird', 'birdfeeder', 'birdbath', 'birdcage', 'birdhouse', + 'birthday_cake', 'birthday_card', 'biscuit_(bread)', 'pirate_flag', + 'black_sheep', 'blackboard', 'blanket', 'blazer', 'blender', 'blimp', + 'blinker', 'blueberry', 'boar', 'gameboard', 'boat', 'bobbin', + 'bobby_pin', 'boiled_egg', 'bolo_tie', 'deadbolt', 'bolt', 'bonnet', + 'book', 'book_bag', 'bookcase', 'booklet', 'bookmark', + 'boom_microphone', 'boot', 'bottle', 'bottle_opener', 'bouquet', + 'bow_(weapon)', 'bow_(decorative_ribbons)', 'bow-tie', 'bowl', + 'pipe_bowl', 'bowler_hat', 'bowling_ball', 'bowling_pin', + 'boxing_glove', 'suspenders', 'bracelet', 'brass_plaque', 'brassiere', + 'bread-bin', 'breechcloth', 'bridal_gown', 'briefcase', + 'bristle_brush', 'broccoli', 'broach', 'broom', 'brownie', + 'brussels_sprouts', 'bubble_gum', 'bucket', 'horse_buggy', 'bull', + 'bulldog', 'bulldozer', 'bullet_train', 'bulletin_board', + 'bulletproof_vest', 'bullhorn', 'corned_beef', 'bun', 'bunk_bed', + 'buoy', 'burrito', 'bus_(vehicle)', 'business_card', 'butcher_knife', + 'butter', 'butterfly', 'button', 'cab_(taxi)', 'cabana', 'cabin_car', + 'cabinet', 'locker', 'cake', 'calculator', 'calendar', 'calf', + 'camcorder', 'camel', 'camera', 'camera_lens', 'camper_(vehicle)', + 'can', 'can_opener', 'candelabrum', 'candle', 'candle_holder', + 'candy_bar', 'candy_cane', 'walking_cane', 'canister', 'cannon', + 'canoe', 'cantaloup', 'canteen', 'cap_(headwear)', 'bottle_cap', + 'cape', 'cappuccino', 'car_(automobile)', 'railcar_(part_of_a_train)', + 'elevator_car', 'car_battery', 'identity_card', 'card', 'cardigan', + 'cargo_ship', 'carnation', 'horse_carriage', 'carrot', 'tote_bag', + 'cart', 'carton', 'cash_register', 'casserole', 'cassette', 'cast', + 'cat', 'cauliflower', 'caviar', 'cayenne_(spice)', 'CD_player', + 'celery', 'cellular_telephone', 'chain_mail', 'chair', 'chaise_longue', + 'champagne', 'chandelier', 'chap', 'checkbook', 'checkerboard', + 'cherry', 'chessboard', 'chest_of_drawers_(furniture)', + 'chicken_(animal)', 'chicken_wire', 'chickpea', 'Chihuahua', + 'chili_(vegetable)', 'chime', 'chinaware', 'crisp_(potato_chip)', + 'poker_chip', 'chocolate_bar', 'chocolate_cake', 'chocolate_milk', + 'chocolate_mousse', 'choker', 'chopping_board', 'chopstick', + 'Christmas_tree', 'slide', 'cider', 'cigar_box', 'cigarette', + 'cigarette_case', 'cistern', 'clarinet', 'clasp', 'cleansing_agent', + 'clementine', 'clip', 'clipboard', 'clock', 'clock_tower', + 'clothes_hamper', 'clothespin', 'clutch_bag', 'coaster', 'coat', + 'coat_hanger', 'coatrack', 'cock', 'coconut', 'coffee_filter', + 'coffee_maker', 'coffee_table', 'coffeepot', 'coil', 'coin', + 'colander', 'coleslaw', 'coloring_material', 'combination_lock', + 'pacifier', 'comic_book', 'computer_keyboard', 'concrete_mixer', + 'cone', 'control', 'convertible_(automobile)', 'sofa_bed', 'cookie', + 'cookie_jar', 'cooking_utensil', 'cooler_(for_food)', + 'cork_(bottle_plug)', 'corkboard', 'corkscrew', 'edible_corn', + 'cornbread', 'cornet', 'cornice', 'cornmeal', 'corset', + 'romaine_lettuce', 'costume', 'cougar', 'coverall', 'cowbell', + 'cowboy_hat', 'crab_(animal)', 'cracker', 'crape', 'crate', 'crayon', + 'cream_pitcher', 'credit_card', 'crescent_roll', 'crib', 'crock_pot', + 'crossbar', 'crouton', 'crow', 'crown', 'crucifix', 'cruise_ship', + 'police_cruiser', 'crumb', 'crutch', 'cub_(animal)', 'cube', + 'cucumber', 'cufflink', 'cup', 'trophy_cup', 'cupcake', 'hair_curler', + 'curling_iron', 'curtain', 'cushion', 'custard', 'cutting_tool', + 'cylinder', 'cymbal', 'dachshund', 'dagger', 'dartboard', + 'date_(fruit)', 'deck_chair', 'deer', 'dental_floss', 'desk', + 'detergent', 'diaper', 'diary', 'die', 'dinghy', 'dining_table', 'tux', + 'dish', 'dish_antenna', 'dishrag', 'dishtowel', 'dishwasher', + 'dishwasher_detergent', 'diskette', 'dispenser', 'Dixie_cup', 'dog', + 'dog_collar', 'doll', 'dollar', 'dolphin', 'domestic_ass', 'eye_mask', + 'doorbell', 'doorknob', 'doormat', 'doughnut', 'dove', 'dragonfly', + 'drawer', 'underdrawers', 'dress', 'dress_hat', 'dress_suit', + 'dresser', 'drill', 'drinking_fountain', 'drone', 'dropper', + 'drum_(musical_instrument)', 'drumstick', 'duck', 'duckling', + 'duct_tape', 'duffel_bag', 'dumbbell', 'dumpster', 'dustpan', + 'Dutch_oven', 'eagle', 'earphone', 'earplug', 'earring', 'easel', + 'eclair', 'eel', 'egg', 'egg_roll', 'egg_yolk', 'eggbeater', + 'eggplant', 'electric_chair', 'refrigerator', 'elephant', 'elk', + 'envelope', 'eraser', 'escargot', 'eyepatch', 'falcon', 'fan', + 'faucet', 'fedora', 'ferret', 'Ferris_wheel', 'ferry', 'fig_(fruit)', + 'fighter_jet', 'figurine', 'file_cabinet', 'file_(tool)', 'fire_alarm', + 'fire_engine', 'fire_extinguisher', 'fire_hose', 'fireplace', + 'fireplug', 'fish', 'fish_(food)', 'fishbowl', 'fishing_boat', + 'fishing_rod', 'flag', 'flagpole', 'flamingo', 'flannel', 'flash', + 'flashlight', 'fleece', 'flip-flop_(sandal)', 'flipper_(footwear)', + 'flower_arrangement', 'flute_glass', 'foal', 'folding_chair', + 'food_processor', 'football_(American)', 'football_helmet', + 'footstool', 'fork', 'forklift', 'freight_car', 'French_toast', + 'freshener', 'frisbee', 'frog', 'fruit_juice', 'fruit_salad', + 'frying_pan', 'fudge', 'funnel', 'futon', 'gag', 'garbage', + 'garbage_truck', 'garden_hose', 'gargle', 'gargoyle', 'garlic', + 'gasmask', 'gazelle', 'gelatin', 'gemstone', 'giant_panda', + 'gift_wrap', 'ginger', 'giraffe', 'cincture', + 'glass_(drink_container)', 'globe', 'glove', 'goat', 'goggles', + 'goldfish', 'golf_club', 'golfcart', 'gondola_(boat)', 'goose', + 'gorilla', 'gourd', 'surgical_gown', 'grape', 'grasshopper', 'grater', + 'gravestone', 'gravy_boat', 'green_bean', 'green_onion', 'griddle', + 'grillroom', 'grinder_(tool)', 'grits', 'grizzly', 'grocery_bag', + 'guacamole', 'guitar', 'gull', 'gun', 'hair_spray', 'hairbrush', + 'hairnet', 'hairpin', 'ham', 'hamburger', 'hammer', 'hammock', + 'hamper', 'hamster', 'hair_dryer', 'hand_glass', 'hand_towel', + 'handcart', 'handcuff', 'handkerchief', 'handle', 'handsaw', + 'hardback_book', 'harmonium', 'hat', 'hatbox', 'hatch', 'veil', + 'headband', 'headboard', 'headlight', 'headscarf', 'headset', + 'headstall_(for_horses)', 'hearing_aid', 'heart', 'heater', + 'helicopter', 'helmet', 'heron', 'highchair', 'hinge', 'hippopotamus', + 'hockey_stick', 'hog', 'home_plate_(baseball)', 'honey', 'fume_hood', + 'hook', 'horse', 'hose', 'hot-air_balloon', 'hotplate', 'hot_sauce', + 'hourglass', 'houseboat', 'hummingbird', 'hummus', 'polar_bear', + 'icecream', 'popsicle', 'ice_maker', 'ice_pack', 'ice_skate', + 'ice_tea', 'igniter', 'incense', 'inhaler', 'iPod', + 'iron_(for_clothing)', 'ironing_board', 'jacket', 'jam', 'jean', + 'jeep', 'jelly_bean', 'jersey', 'jet_plane', 'jewelry', 'joystick', + 'jumpsuit', 'kayak', 'keg', 'kennel', 'kettle', 'key', 'keycard', + 'kilt', 'kimono', 'kitchen_sink', 'kitchen_table', 'kite', 'kitten', + 'kiwi_fruit', 'knee_pad', 'knife', 'knight_(chess_piece)', + 'knitting_needle', 'knob', 'knocker_(on_a_door)', 'koala', 'lab_coat', + 'ladder', 'ladle', 'ladybug', 'lamb_(animal)', 'lamb-chop', 'lamp', + 'lamppost', 'lampshade', 'lantern', 'lanyard', 'laptop_computer', + 'lasagna', 'latch', 'lawn_mower', 'leather', 'legging_(clothing)', + 'Lego', 'lemon', 'lemonade', 'lettuce', 'license_plate', 'life_buoy', + 'life_jacket', 'lightbulb', 'lightning_rod', 'lime', 'limousine', + 'linen_paper', 'lion', 'lip_balm', 'lipstick', 'liquor', 'lizard', + 'Loafer_(type_of_shoe)', 'log', 'lollipop', 'lotion', + 'speaker_(stero_equipment)', 'loveseat', 'machine_gun', 'magazine', + 'magnet', 'mail_slot', 'mailbox_(at_home)', 'mallet', 'mammoth', + 'mandarin_orange', 'manger', 'manhole', 'map', 'marker', 'martini', + 'mascot', 'mashed_potato', 'masher', 'mask', 'mast', + 'mat_(gym_equipment)', 'matchbox', 'mattress', 'measuring_cup', + 'measuring_stick', 'meatball', 'medicine', 'melon', 'microphone', + 'microscope', 'microwave_oven', 'milestone', 'milk', 'minivan', + 'mint_candy', 'mirror', 'mitten', 'mixer_(kitchen_tool)', 'money', + 'monitor_(computer_equipment) computer_monitor', 'monkey', 'motor', + 'motor_scooter', 'motor_vehicle', 'motorboat', 'motorcycle', + 'mound_(baseball)', 'mouse_(animal_rodent)', + 'mouse_(computer_equipment)', 'mousepad', 'muffin', 'mug', 'mushroom', + 'music_stool', 'musical_instrument', 'nailfile', 'nameplate', 'napkin', + 'neckerchief', 'necklace', 'necktie', 'needle', 'nest', 'newsstand', + 'nightshirt', 'nosebag_(for_animals)', 'noseband_(for_animals)', + 'notebook', 'notepad', 'nut', 'nutcracker', 'oar', 'octopus_(food)', + 'octopus_(animal)', 'oil_lamp', 'olive_oil', 'omelet', 'onion', + 'orange_(fruit)', 'orange_juice', 'oregano', 'ostrich', 'ottoman', + 'overalls_(clothing)', 'owl', 'packet', 'inkpad', 'pad', 'paddle', + 'padlock', 'paintbox', 'paintbrush', 'painting', 'pajamas', 'palette', + 'pan_(for_cooking)', 'pan_(metal_container)', 'pancake', 'pantyhose', + 'papaya', 'paperclip', 'paper_plate', 'paper_towel', 'paperback_book', + 'paperweight', 'parachute', 'parakeet', 'parasail_(sports)', + 'parchment', 'parka', 'parking_meter', 'parrot', + 'passenger_car_(part_of_a_train)', 'passenger_ship', 'passport', + 'pastry', 'patty_(food)', 'pea_(food)', 'peach', 'peanut_butter', + 'pear', 'peeler_(tool_for_fruit_and_vegetables)', 'pegboard', + 'pelican', 'pen', 'pencil', 'pencil_box', 'pencil_sharpener', + 'pendulum', 'penguin', 'pennant', 'penny_(coin)', 'pepper', + 'pepper_mill', 'perfume', 'persimmon', 'baby', 'pet', 'petfood', + 'pew_(church_bench)', 'phonebook', 'phonograph_record', 'piano', + 'pickle', 'pickup_truck', 'pie', 'pigeon', 'piggy_bank', 'pillow', + 'pin_(non_jewelry)', 'pineapple', 'pinecone', 'ping-pong_ball', + 'pinwheel', 'tobacco_pipe', 'pipe', 'pistol', 'pita_(bread)', + 'pitcher_(vessel_for_liquid)', 'pitchfork', 'pizza', 'place_mat', + 'plate', 'platter', 'playing_card', 'playpen', 'pliers', + 'plow_(farm_equipment)', 'pocket_watch', 'pocketknife', + 'poker_(fire_stirring_tool)', 'pole', 'police_van', 'polo_shirt', + 'poncho', 'pony', 'pool_table', 'pop_(soda)', 'portrait', + 'postbox_(public)', 'postcard', 'poster', 'pot', 'flowerpot', 'potato', + 'potholder', 'pottery', 'pouch', 'power_shovel', 'prawn', 'printer', + 'projectile_(weapon)', 'projector', 'propeller', 'prune', 'pudding', + 'puffer_(fish)', 'puffin', 'pug-dog', 'pumpkin', 'puncher', 'puppet', + 'puppy', 'quesadilla', 'quiche', 'quilt', 'rabbit', 'race_car', + 'racket', 'radar', 'radiator', 'radio_receiver', 'radish', 'raft', + 'rag_doll', 'raincoat', 'ram_(animal)', 'raspberry', 'rat', + 'razorblade', 'reamer_(juicer)', 'rearview_mirror', 'receipt', + 'recliner', 'record_player', 'red_cabbage', 'reflector', + 'remote_control', 'rhinoceros', 'rib_(food)', 'rifle', 'ring', + 'river_boat', 'road_map', 'robe', 'rocking_chair', 'roller_skate', + 'Rollerblade', 'rolling_pin', 'root_beer', + 'router_(computer_equipment)', 'rubber_band', 'runner_(carpet)', + 'plastic_bag', 'saddle_(on_an_animal)', 'saddle_blanket', 'saddlebag', + 'safety_pin', 'sail', 'salad', 'salad_plate', 'salami', + 'salmon_(fish)', 'salmon_(food)', 'salsa', 'saltshaker', + 'sandal_(type_of_shoe)', 'sandwich', 'satchel', 'saucepan', 'saucer', + 'sausage', 'sawhorse', 'saxophone', 'scale_(measuring_instrument)', + 'scarecrow', 'scarf', 'school_bus', 'scissors', 'scoreboard', + 'scrambled_eggs', 'scraper', 'scratcher', 'screwdriver', + 'scrubbing_brush', 'sculpture', 'seabird', 'seahorse', 'seaplane', + 'seashell', 'seedling', 'serving_dish', 'sewing_machine', 'shaker', + 'shampoo', 'shark', 'sharpener', 'Sharpie', 'shaver_(electric)', + 'shaving_cream', 'shawl', 'shears', 'sheep', 'shepherd_dog', + 'sherbert', 'shield', 'shirt', 'shoe', 'shopping_bag', 'shopping_cart', + 'short_pants', 'shot_glass', 'shoulder_bag', 'shovel', 'shower_head', + 'shower_curtain', 'shredder_(for_paper)', 'sieve', 'signboard', 'silo', + 'sink', 'skateboard', 'skewer', 'ski', 'ski_boot', 'ski_parka', + 'ski_pole', 'skirt', 'sled', 'sleeping_bag', 'sling_(bandage)', + 'slipper_(footwear)', 'smoothie', 'snake', 'snowboard', 'snowman', + 'snowmobile', 'soap', 'soccer_ball', 'sock', 'soda_fountain', + 'carbonated_water', 'sofa', 'softball', 'solar_array', 'sombrero', + 'soup', 'soup_bowl', 'soupspoon', 'sour_cream', 'soya_milk', + 'space_shuttle', 'sparkler_(fireworks)', 'spatula', 'spear', + 'spectacles', 'spice_rack', 'spider', 'sponge', 'spoon', 'sportswear', + 'spotlight', 'squirrel', 'stapler_(stapling_machine)', 'starfish', + 'statue_(sculpture)', 'steak_(food)', 'steak_knife', + 'steamer_(kitchen_appliance)', 'steering_wheel', 'stencil', + 'stepladder', 'step_stool', 'stereo_(sound_system)', 'stew', 'stirrer', + 'stirrup', 'stockings_(leg_wear)', 'stool', 'stop_sign', 'brake_light', + 'stove', 'strainer', 'strap', 'straw_(for_drinking)', 'strawberry', + 'street_sign', 'streetlight', 'string_cheese', 'stylus', 'subwoofer', + 'sugar_bowl', 'sugarcane_(plant)', 'suit_(clothing)', 'sunflower', + 'sunglasses', 'sunhat', 'sunscreen', 'surfboard', 'sushi', 'mop', + 'sweat_pants', 'sweatband', 'sweater', 'sweatshirt', 'sweet_potato', + 'swimsuit', 'sword', 'syringe', 'Tabasco_sauce', 'table-tennis_table', + 'table', 'table_lamp', 'tablecloth', 'tachometer', 'taco', 'tag', + 'taillight', 'tambourine', 'army_tank', 'tank_(storage_vessel)', + 'tank_top_(clothing)', 'tape_(sticky_cloth_or_paper)', 'tape_measure', + 'tapestry', 'tarp', 'tartan', 'tassel', 'tea_bag', 'teacup', + 'teakettle', 'teapot', 'teddy_bear', 'telephone', 'telephone_booth', + 'telephone_pole', 'telephoto_lens', 'television_camera', + 'television_set', 'tennis_ball', 'tennis_racket', 'tequila', + 'thermometer', 'thermos_bottle', 'thermostat', 'thimble', 'thread', + 'thumbtack', 'tiara', 'tiger', 'tights_(clothing)', 'timer', 'tinfoil', + 'tinsel', 'tissue_paper', 'toast_(food)', 'toaster', 'toaster_oven', + 'toilet', 'toilet_tissue', 'tomato', 'tongs', 'toolbox', 'toothbrush', + 'toothpaste', 'toothpick', 'cover', 'tortilla', 'tow_truck', 'towel', + 'towel_rack', 'toy', 'tractor_(farm_equipment)', 'traffic_light', + 'dirt_bike', 'trailer_truck', 'train_(railroad_vehicle)', 'trampoline', + 'tray', 'tree_house', 'trench_coat', 'triangle_(musical_instrument)', + 'tricycle', 'tripod', 'trousers', 'truck', 'truffle_(chocolate)', + 'trunk', 'vat', 'turban', 'turkey_(bird)', 'turkey_(food)', 'turnip', + 'turtle', 'turtleneck_(clothing)', 'typewriter', 'umbrella', + 'underwear', 'unicycle', 'urinal', 'urn', 'vacuum_cleaner', 'valve', + 'vase', 'vending_machine', 'vent', 'videotape', 'vinegar', 'violin', + 'vodka', 'volleyball', 'vulture', 'waffle', 'waffle_iron', 'wagon', + 'wagon_wheel', 'walking_stick', 'wall_clock', 'wall_socket', 'wallet', + 'walrus', 'wardrobe', 'wasabi', 'automatic_washer', 'watch', + 'water_bottle', 'water_cooler', 'water_faucet', 'water_filter', + 'water_heater', 'water_jug', 'water_gun', 'water_scooter', 'water_ski', + 'water_tower', 'watering_can', 'watermelon', 'weathervane', 'webcam', + 'wedding_cake', 'wedding_ring', 'wet_suit', 'wheel', 'wheelchair', + 'whipped_cream', 'whiskey', 'whistle', 'wick', 'wig', 'wind_chime', + 'windmill', 'window_box_(for_plants)', 'windshield_wiper', 'windsock', + 'wine_bottle', 'wine_bucket', 'wineglass', 'wing_chair', + 'blinder_(for_horses)', 'wok', 'wolf', 'wooden_spoon', 'wreath', + 'wrench', 'wristband', 'wristlet', 'yacht', 'yak', 'yogurt', + 'yoke_(animal_equipment)', 'zebra', 'zucchini') + + def load_annotations(self, ann_file): + """Load annotation from lvis style annotation file. + + Args: + ann_file (str): Path of annotation file. + + Returns: + list[dict]: Annotation info from LVIS api. + """ + + try: + import lvis + assert lvis.__version__ >= '10.5.3' + from lvis import LVIS + except AssertionError: + raise AssertionError('Incompatible version of lvis is installed. ' + 'Run pip uninstall lvis first. Then run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis. ') + except ImportError: + raise ImportError('Package lvis is not installed. Please run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis.') + self.coco = LVIS(ann_file) + assert not self.custom_classes, 'LVIS custom classes is not supported' + self.cat_ids = self.coco.get_cat_ids() + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + if info['file_name'].startswith('COCO'): + # Convert form the COCO 2014 file naming convention of + # COCO_[train/val/test]2014_000000000000.jpg to the 2017 + # naming convention of 000000000000.jpg + # (LVIS v1 will fix this naming issue) + info['filename'] = info['file_name'][-16:] + else: + info['filename'] = info['file_name'] + data_infos.append(info) + return data_infos + + def evaluate(self, + results, + metric='bbox', + logger=None, + jsonfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=np.arange(0.5, 0.96, 0.05)): + """Evaluation in LVIS protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + jsonfile_prefix (str | None): + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float]): IoU threshold used for evaluating + recalls. If set to a list, the average recall of all IoUs will + also be computed. Default: 0.5. + + Returns: + dict[str, float]: LVIS style metrics. + """ + + try: + import lvis + assert lvis.__version__ >= '10.5.3' + from lvis import LVISResults, LVISEval + except AssertionError: + raise AssertionError('Incompatible version of lvis is installed. ' + 'Run pip uninstall lvis first. Then run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis. ') + except ImportError: + raise ImportError('Package lvis is not installed. Please run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis.') + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + metrics = metric if isinstance(metric, list) else [metric] + allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast'] + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError('metric {} is not supported'.format(metric)) + + if jsonfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + jsonfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2json(results, jsonfile_prefix) + + eval_results = OrderedDict() + # get original api + lvis_gt = self.coco + for metric in metrics: + msg = 'Evaluating {}...'.format(metric) + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'proposal_fast': + ar = self.fast_eval_recall( + results, proposal_nums, iou_thrs, logger='silent') + log_msg = [] + for i, num in enumerate(proposal_nums): + eval_results['AR@{}'.format(num)] = ar[i] + log_msg.append('\nAR@{}\t{:.4f}'.format(num, ar[i])) + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + continue + + if metric not in result_files: + raise KeyError('{} is not in results'.format(metric)) + try: + lvis_dt = LVISResults(lvis_gt, result_files[metric]) + except IndexError: + print_log( + 'The testing results of the whole dataset is empty.', + logger=logger, + level=logging.ERROR) + break + + iou_type = 'bbox' if metric == 'proposal' else metric + lvis_eval = LVISEval(lvis_gt, lvis_dt, iou_type) + lvis_eval.params.imgIds = self.img_ids + if metric == 'proposal': + lvis_eval.params.useCats = 0 + lvis_eval.params.maxDets = list(proposal_nums) + lvis_eval.evaluate() + lvis_eval.accumulate() + lvis_eval.summarize() + for k, v in lvis_eval.get_results().items(): + if k.startswith('AR'): + val = float('{:.3f}'.format(float(v))) + eval_results[k] = val + else: + lvis_eval.evaluate() + lvis_eval.accumulate() + lvis_eval.summarize() + lvis_results = lvis_eval.get_results() + if classwise: # Compute per-category AP + # Compute per-category AP + # from https://github.com/facebookresearch/detectron2/ + precisions = lvis_eval.eval['precision'] + # precision: (iou, recall, cls, area range, max dets) + assert len(self.cat_ids) == precisions.shape[2] + + results_per_category = [] + for idx, catId in enumerate(self.cat_ids): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + nm = self.coco.load_cats(catId)[0] + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + if precision.size: + ap = np.mean(precision) + else: + ap = float('nan') + results_per_category.append( + (f'{nm["name"]}', f'{float(ap):0.3f}')) + + num_columns = min(6, len(results_per_category) * 2) + results_flatten = list( + itertools.chain(*results_per_category)) + headers = ['category', 'AP'] * (num_columns // 2) + results_2d = itertools.zip_longest(*[ + results_flatten[i::num_columns] + for i in range(num_columns) + ]) + table_data = [headers] + table_data += [result for result in results_2d] + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + for k, v in lvis_results.items(): + if k.startswith('AP'): + key = '{}_{}'.format(metric, k) + val = float('{:.3f}'.format(float(v))) + eval_results[key] = val + ap_summary = ' '.join([ + '{}:{:.3f}'.format(k, float(v)) + for k, v in lvis_results.items() if k.startswith('AP') + ]) + eval_results['{}_mAP_copypaste'.format(metric)] = ap_summary + lvis_eval.print_results() + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results + + +LVISDataset = LVISV05Dataset +DATASETS.register_module(name='LVISDataset', module=LVISDataset) + + +@DATASETS.register_module() +class LVISV1Dataset(LVISDataset): + + CLASSES = ( + 'aerosol_can', 'air_conditioner', 'airplane', 'alarm_clock', 'alcohol', + 'alligator', 'almond', 'ambulance', 'amplifier', 'anklet', 'antenna', + 'apple', 'applesauce', 'apricot', 'apron', 'aquarium', + 'arctic_(type_of_shoe)', 'armband', 'armchair', 'armoire', 'armor', + 'artichoke', 'trash_can', 'ashtray', 'asparagus', 'atomizer', + 'avocado', 'award', 'awning', 'ax', 'baboon', 'baby_buggy', + 'basketball_backboard', 'backpack', 'handbag', 'suitcase', 'bagel', + 'bagpipe', 'baguet', 'bait', 'ball', 'ballet_skirt', 'balloon', + 'bamboo', 'banana', 'Band_Aid', 'bandage', 'bandanna', 'banjo', + 'banner', 'barbell', 'barge', 'barrel', 'barrette', 'barrow', + 'baseball_base', 'baseball', 'baseball_bat', 'baseball_cap', + 'baseball_glove', 'basket', 'basketball', 'bass_horn', 'bat_(animal)', + 'bath_mat', 'bath_towel', 'bathrobe', 'bathtub', 'batter_(food)', + 'battery', 'beachball', 'bead', 'bean_curd', 'beanbag', 'beanie', + 'bear', 'bed', 'bedpan', 'bedspread', 'cow', 'beef_(food)', 'beeper', + 'beer_bottle', 'beer_can', 'beetle', 'bell', 'bell_pepper', 'belt', + 'belt_buckle', 'bench', 'beret', 'bib', 'Bible', 'bicycle', 'visor', + 'billboard', 'binder', 'binoculars', 'bird', 'birdfeeder', 'birdbath', + 'birdcage', 'birdhouse', 'birthday_cake', 'birthday_card', + 'pirate_flag', 'black_sheep', 'blackberry', 'blackboard', 'blanket', + 'blazer', 'blender', 'blimp', 'blinker', 'blouse', 'blueberry', + 'gameboard', 'boat', 'bob', 'bobbin', 'bobby_pin', 'boiled_egg', + 'bolo_tie', 'deadbolt', 'bolt', 'bonnet', 'book', 'bookcase', + 'booklet', 'bookmark', 'boom_microphone', 'boot', 'bottle', + 'bottle_opener', 'bouquet', 'bow_(weapon)', 'bow_(decorative_ribbons)', + 'bow-tie', 'bowl', 'pipe_bowl', 'bowler_hat', 'bowling_ball', 'box', + 'boxing_glove', 'suspenders', 'bracelet', 'brass_plaque', 'brassiere', + 'bread-bin', 'bread', 'breechcloth', 'bridal_gown', 'briefcase', + 'broccoli', 'broach', 'broom', 'brownie', 'brussels_sprouts', + 'bubble_gum', 'bucket', 'horse_buggy', 'bull', 'bulldog', 'bulldozer', + 'bullet_train', 'bulletin_board', 'bulletproof_vest', 'bullhorn', + 'bun', 'bunk_bed', 'buoy', 'burrito', 'bus_(vehicle)', 'business_card', + 'butter', 'butterfly', 'button', 'cab_(taxi)', 'cabana', 'cabin_car', + 'cabinet', 'locker', 'cake', 'calculator', 'calendar', 'calf', + 'camcorder', 'camel', 'camera', 'camera_lens', 'camper_(vehicle)', + 'can', 'can_opener', 'candle', 'candle_holder', 'candy_bar', + 'candy_cane', 'walking_cane', 'canister', 'canoe', 'cantaloup', + 'canteen', 'cap_(headwear)', 'bottle_cap', 'cape', 'cappuccino', + 'car_(automobile)', 'railcar_(part_of_a_train)', 'elevator_car', + 'car_battery', 'identity_card', 'card', 'cardigan', 'cargo_ship', + 'carnation', 'horse_carriage', 'carrot', 'tote_bag', 'cart', 'carton', + 'cash_register', 'casserole', 'cassette', 'cast', 'cat', 'cauliflower', + 'cayenne_(spice)', 'CD_player', 'celery', 'cellular_telephone', + 'chain_mail', 'chair', 'chaise_longue', 'chalice', 'chandelier', + 'chap', 'checkbook', 'checkerboard', 'cherry', 'chessboard', + 'chicken_(animal)', 'chickpea', 'chili_(vegetable)', 'chime', + 'chinaware', 'crisp_(potato_chip)', 'poker_chip', 'chocolate_bar', + 'chocolate_cake', 'chocolate_milk', 'chocolate_mousse', 'choker', + 'chopping_board', 'chopstick', 'Christmas_tree', 'slide', 'cider', + 'cigar_box', 'cigarette', 'cigarette_case', 'cistern', 'clarinet', + 'clasp', 'cleansing_agent', 'cleat_(for_securing_rope)', 'clementine', + 'clip', 'clipboard', 'clippers_(for_plants)', 'cloak', 'clock', + 'clock_tower', 'clothes_hamper', 'clothespin', 'clutch_bag', 'coaster', + 'coat', 'coat_hanger', 'coatrack', 'cock', 'cockroach', + 'cocoa_(beverage)', 'coconut', 'coffee_maker', 'coffee_table', + 'coffeepot', 'coil', 'coin', 'colander', 'coleslaw', + 'coloring_material', 'combination_lock', 'pacifier', 'comic_book', + 'compass', 'computer_keyboard', 'condiment', 'cone', 'control', + 'convertible_(automobile)', 'sofa_bed', 'cooker', 'cookie', + 'cooking_utensil', 'cooler_(for_food)', 'cork_(bottle_plug)', + 'corkboard', 'corkscrew', 'edible_corn', 'cornbread', 'cornet', + 'cornice', 'cornmeal', 'corset', 'costume', 'cougar', 'coverall', + 'cowbell', 'cowboy_hat', 'crab_(animal)', 'crabmeat', 'cracker', + 'crape', 'crate', 'crayon', 'cream_pitcher', 'crescent_roll', 'crib', + 'crock_pot', 'crossbar', 'crouton', 'crow', 'crowbar', 'crown', + 'crucifix', 'cruise_ship', 'police_cruiser', 'crumb', 'crutch', + 'cub_(animal)', 'cube', 'cucumber', 'cufflink', 'cup', 'trophy_cup', + 'cupboard', 'cupcake', 'hair_curler', 'curling_iron', 'curtain', + 'cushion', 'cylinder', 'cymbal', 'dagger', 'dalmatian', 'dartboard', + 'date_(fruit)', 'deck_chair', 'deer', 'dental_floss', 'desk', + 'detergent', 'diaper', 'diary', 'die', 'dinghy', 'dining_table', 'tux', + 'dish', 'dish_antenna', 'dishrag', 'dishtowel', 'dishwasher', + 'dishwasher_detergent', 'dispenser', 'diving_board', 'Dixie_cup', + 'dog', 'dog_collar', 'doll', 'dollar', 'dollhouse', 'dolphin', + 'domestic_ass', 'doorknob', 'doormat', 'doughnut', 'dove', 'dragonfly', + 'drawer', 'underdrawers', 'dress', 'dress_hat', 'dress_suit', + 'dresser', 'drill', 'drone', 'dropper', 'drum_(musical_instrument)', + 'drumstick', 'duck', 'duckling', 'duct_tape', 'duffel_bag', 'dumbbell', + 'dumpster', 'dustpan', 'eagle', 'earphone', 'earplug', 'earring', + 'easel', 'eclair', 'eel', 'egg', 'egg_roll', 'egg_yolk', 'eggbeater', + 'eggplant', 'electric_chair', 'refrigerator', 'elephant', 'elk', + 'envelope', 'eraser', 'escargot', 'eyepatch', 'falcon', 'fan', + 'faucet', 'fedora', 'ferret', 'Ferris_wheel', 'ferry', 'fig_(fruit)', + 'fighter_jet', 'figurine', 'file_cabinet', 'file_(tool)', 'fire_alarm', + 'fire_engine', 'fire_extinguisher', 'fire_hose', 'fireplace', + 'fireplug', 'first-aid_kit', 'fish', 'fish_(food)', 'fishbowl', + 'fishing_rod', 'flag', 'flagpole', 'flamingo', 'flannel', 'flap', + 'flash', 'flashlight', 'fleece', 'flip-flop_(sandal)', + 'flipper_(footwear)', 'flower_arrangement', 'flute_glass', 'foal', + 'folding_chair', 'food_processor', 'football_(American)', + 'football_helmet', 'footstool', 'fork', 'forklift', 'freight_car', + 'French_toast', 'freshener', 'frisbee', 'frog', 'fruit_juice', + 'frying_pan', 'fudge', 'funnel', 'futon', 'gag', 'garbage', + 'garbage_truck', 'garden_hose', 'gargle', 'gargoyle', 'garlic', + 'gasmask', 'gazelle', 'gelatin', 'gemstone', 'generator', + 'giant_panda', 'gift_wrap', 'ginger', 'giraffe', 'cincture', + 'glass_(drink_container)', 'globe', 'glove', 'goat', 'goggles', + 'goldfish', 'golf_club', 'golfcart', 'gondola_(boat)', 'goose', + 'gorilla', 'gourd', 'grape', 'grater', 'gravestone', 'gravy_boat', + 'green_bean', 'green_onion', 'griddle', 'grill', 'grits', 'grizzly', + 'grocery_bag', 'guitar', 'gull', 'gun', 'hairbrush', 'hairnet', + 'hairpin', 'halter_top', 'ham', 'hamburger', 'hammer', 'hammock', + 'hamper', 'hamster', 'hair_dryer', 'hand_glass', 'hand_towel', + 'handcart', 'handcuff', 'handkerchief', 'handle', 'handsaw', + 'hardback_book', 'harmonium', 'hat', 'hatbox', 'veil', 'headband', + 'headboard', 'headlight', 'headscarf', 'headset', + 'headstall_(for_horses)', 'heart', 'heater', 'helicopter', 'helmet', + 'heron', 'highchair', 'hinge', 'hippopotamus', 'hockey_stick', 'hog', + 'home_plate_(baseball)', 'honey', 'fume_hood', 'hook', 'hookah', + 'hornet', 'horse', 'hose', 'hot-air_balloon', 'hotplate', 'hot_sauce', + 'hourglass', 'houseboat', 'hummingbird', 'hummus', 'polar_bear', + 'icecream', 'popsicle', 'ice_maker', 'ice_pack', 'ice_skate', + 'igniter', 'inhaler', 'iPod', 'iron_(for_clothing)', 'ironing_board', + 'jacket', 'jam', 'jar', 'jean', 'jeep', 'jelly_bean', 'jersey', + 'jet_plane', 'jewel', 'jewelry', 'joystick', 'jumpsuit', 'kayak', + 'keg', 'kennel', 'kettle', 'key', 'keycard', 'kilt', 'kimono', + 'kitchen_sink', 'kitchen_table', 'kite', 'kitten', 'kiwi_fruit', + 'knee_pad', 'knife', 'knitting_needle', 'knob', 'knocker_(on_a_door)', + 'koala', 'lab_coat', 'ladder', 'ladle', 'ladybug', 'lamb_(animal)', + 'lamb-chop', 'lamp', 'lamppost', 'lampshade', 'lantern', 'lanyard', + 'laptop_computer', 'lasagna', 'latch', 'lawn_mower', 'leather', + 'legging_(clothing)', 'Lego', 'legume', 'lemon', 'lemonade', 'lettuce', + 'license_plate', 'life_buoy', 'life_jacket', 'lightbulb', + 'lightning_rod', 'lime', 'limousine', 'lion', 'lip_balm', 'liquor', + 'lizard', 'log', 'lollipop', 'speaker_(stero_equipment)', 'loveseat', + 'machine_gun', 'magazine', 'magnet', 'mail_slot', 'mailbox_(at_home)', + 'mallard', 'mallet', 'mammoth', 'manatee', 'mandarin_orange', 'manger', + 'manhole', 'map', 'marker', 'martini', 'mascot', 'mashed_potato', + 'masher', 'mask', 'mast', 'mat_(gym_equipment)', 'matchbox', + 'mattress', 'measuring_cup', 'measuring_stick', 'meatball', 'medicine', + 'melon', 'microphone', 'microscope', 'microwave_oven', 'milestone', + 'milk', 'milk_can', 'milkshake', 'minivan', 'mint_candy', 'mirror', + 'mitten', 'mixer_(kitchen_tool)', 'money', + 'monitor_(computer_equipment) computer_monitor', 'monkey', 'motor', + 'motor_scooter', 'motor_vehicle', 'motorcycle', 'mound_(baseball)', + 'mouse_(computer_equipment)', 'mousepad', 'muffin', 'mug', 'mushroom', + 'music_stool', 'musical_instrument', 'nailfile', 'napkin', + 'neckerchief', 'necklace', 'necktie', 'needle', 'nest', 'newspaper', + 'newsstand', 'nightshirt', 'nosebag_(for_animals)', + 'noseband_(for_animals)', 'notebook', 'notepad', 'nut', 'nutcracker', + 'oar', 'octopus_(food)', 'octopus_(animal)', 'oil_lamp', 'olive_oil', + 'omelet', 'onion', 'orange_(fruit)', 'orange_juice', 'ostrich', + 'ottoman', 'oven', 'overalls_(clothing)', 'owl', 'packet', 'inkpad', + 'pad', 'paddle', 'padlock', 'paintbrush', 'painting', 'pajamas', + 'palette', 'pan_(for_cooking)', 'pan_(metal_container)', 'pancake', + 'pantyhose', 'papaya', 'paper_plate', 'paper_towel', 'paperback_book', + 'paperweight', 'parachute', 'parakeet', 'parasail_(sports)', 'parasol', + 'parchment', 'parka', 'parking_meter', 'parrot', + 'passenger_car_(part_of_a_train)', 'passenger_ship', 'passport', + 'pastry', 'patty_(food)', 'pea_(food)', 'peach', 'peanut_butter', + 'pear', 'peeler_(tool_for_fruit_and_vegetables)', 'wooden_leg', + 'pegboard', 'pelican', 'pen', 'pencil', 'pencil_box', + 'pencil_sharpener', 'pendulum', 'penguin', 'pennant', 'penny_(coin)', + 'pepper', 'pepper_mill', 'perfume', 'persimmon', 'person', 'pet', + 'pew_(church_bench)', 'phonebook', 'phonograph_record', 'piano', + 'pickle', 'pickup_truck', 'pie', 'pigeon', 'piggy_bank', 'pillow', + 'pin_(non_jewelry)', 'pineapple', 'pinecone', 'ping-pong_ball', + 'pinwheel', 'tobacco_pipe', 'pipe', 'pistol', 'pita_(bread)', + 'pitcher_(vessel_for_liquid)', 'pitchfork', 'pizza', 'place_mat', + 'plate', 'platter', 'playpen', 'pliers', 'plow_(farm_equipment)', + 'plume', 'pocket_watch', 'pocketknife', 'poker_(fire_stirring_tool)', + 'pole', 'polo_shirt', 'poncho', 'pony', 'pool_table', 'pop_(soda)', + 'postbox_(public)', 'postcard', 'poster', 'pot', 'flowerpot', 'potato', + 'potholder', 'pottery', 'pouch', 'power_shovel', 'prawn', 'pretzel', + 'printer', 'projectile_(weapon)', 'projector', 'propeller', 'prune', + 'pudding', 'puffer_(fish)', 'puffin', 'pug-dog', 'pumpkin', 'puncher', + 'puppet', 'puppy', 'quesadilla', 'quiche', 'quilt', 'rabbit', + 'race_car', 'racket', 'radar', 'radiator', 'radio_receiver', 'radish', + 'raft', 'rag_doll', 'raincoat', 'ram_(animal)', 'raspberry', 'rat', + 'razorblade', 'reamer_(juicer)', 'rearview_mirror', 'receipt', + 'recliner', 'record_player', 'reflector', 'remote_control', + 'rhinoceros', 'rib_(food)', 'rifle', 'ring', 'river_boat', 'road_map', + 'robe', 'rocking_chair', 'rodent', 'roller_skate', 'Rollerblade', + 'rolling_pin', 'root_beer', 'router_(computer_equipment)', + 'rubber_band', 'runner_(carpet)', 'plastic_bag', + 'saddle_(on_an_animal)', 'saddle_blanket', 'saddlebag', 'safety_pin', + 'sail', 'salad', 'salad_plate', 'salami', 'salmon_(fish)', + 'salmon_(food)', 'salsa', 'saltshaker', 'sandal_(type_of_shoe)', + 'sandwich', 'satchel', 'saucepan', 'saucer', 'sausage', 'sawhorse', + 'saxophone', 'scale_(measuring_instrument)', 'scarecrow', 'scarf', + 'school_bus', 'scissors', 'scoreboard', 'scraper', 'screwdriver', + 'scrubbing_brush', 'sculpture', 'seabird', 'seahorse', 'seaplane', + 'seashell', 'sewing_machine', 'shaker', 'shampoo', 'shark', + 'sharpener', 'Sharpie', 'shaver_(electric)', 'shaving_cream', 'shawl', + 'shears', 'sheep', 'shepherd_dog', 'sherbert', 'shield', 'shirt', + 'shoe', 'shopping_bag', 'shopping_cart', 'short_pants', 'shot_glass', + 'shoulder_bag', 'shovel', 'shower_head', 'shower_cap', + 'shower_curtain', 'shredder_(for_paper)', 'signboard', 'silo', 'sink', + 'skateboard', 'skewer', 'ski', 'ski_boot', 'ski_parka', 'ski_pole', + 'skirt', 'skullcap', 'sled', 'sleeping_bag', 'sling_(bandage)', + 'slipper_(footwear)', 'smoothie', 'snake', 'snowboard', 'snowman', + 'snowmobile', 'soap', 'soccer_ball', 'sock', 'sofa', 'softball', + 'solar_array', 'sombrero', 'soup', 'soup_bowl', 'soupspoon', + 'sour_cream', 'soya_milk', 'space_shuttle', 'sparkler_(fireworks)', + 'spatula', 'spear', 'spectacles', 'spice_rack', 'spider', 'crawfish', + 'sponge', 'spoon', 'sportswear', 'spotlight', 'squid_(food)', + 'squirrel', 'stagecoach', 'stapler_(stapling_machine)', 'starfish', + 'statue_(sculpture)', 'steak_(food)', 'steak_knife', 'steering_wheel', + 'stepladder', 'step_stool', 'stereo_(sound_system)', 'stew', 'stirrer', + 'stirrup', 'stool', 'stop_sign', 'brake_light', 'stove', 'strainer', + 'strap', 'straw_(for_drinking)', 'strawberry', 'street_sign', + 'streetlight', 'string_cheese', 'stylus', 'subwoofer', 'sugar_bowl', + 'sugarcane_(plant)', 'suit_(clothing)', 'sunflower', 'sunglasses', + 'sunhat', 'surfboard', 'sushi', 'mop', 'sweat_pants', 'sweatband', + 'sweater', 'sweatshirt', 'sweet_potato', 'swimsuit', 'sword', + 'syringe', 'Tabasco_sauce', 'table-tennis_table', 'table', + 'table_lamp', 'tablecloth', 'tachometer', 'taco', 'tag', 'taillight', + 'tambourine', 'army_tank', 'tank_(storage_vessel)', + 'tank_top_(clothing)', 'tape_(sticky_cloth_or_paper)', 'tape_measure', + 'tapestry', 'tarp', 'tartan', 'tassel', 'tea_bag', 'teacup', + 'teakettle', 'teapot', 'teddy_bear', 'telephone', 'telephone_booth', + 'telephone_pole', 'telephoto_lens', 'television_camera', + 'television_set', 'tennis_ball', 'tennis_racket', 'tequila', + 'thermometer', 'thermos_bottle', 'thermostat', 'thimble', 'thread', + 'thumbtack', 'tiara', 'tiger', 'tights_(clothing)', 'timer', 'tinfoil', + 'tinsel', 'tissue_paper', 'toast_(food)', 'toaster', 'toaster_oven', + 'toilet', 'toilet_tissue', 'tomato', 'tongs', 'toolbox', 'toothbrush', + 'toothpaste', 'toothpick', 'cover', 'tortilla', 'tow_truck', 'towel', + 'towel_rack', 'toy', 'tractor_(farm_equipment)', 'traffic_light', + 'dirt_bike', 'trailer_truck', 'train_(railroad_vehicle)', 'trampoline', + 'tray', 'trench_coat', 'triangle_(musical_instrument)', 'tricycle', + 'tripod', 'trousers', 'truck', 'truffle_(chocolate)', 'trunk', 'vat', + 'turban', 'turkey_(food)', 'turnip', 'turtle', 'turtleneck_(clothing)', + 'typewriter', 'umbrella', 'underwear', 'unicycle', 'urinal', 'urn', + 'vacuum_cleaner', 'vase', 'vending_machine', 'vent', 'vest', + 'videotape', 'vinegar', 'violin', 'vodka', 'volleyball', 'vulture', + 'waffle', 'waffle_iron', 'wagon', 'wagon_wheel', 'walking_stick', + 'wall_clock', 'wall_socket', 'wallet', 'walrus', 'wardrobe', + 'washbasin', 'automatic_washer', 'watch', 'water_bottle', + 'water_cooler', 'water_faucet', 'water_heater', 'water_jug', + 'water_gun', 'water_scooter', 'water_ski', 'water_tower', + 'watering_can', 'watermelon', 'weathervane', 'webcam', 'wedding_cake', + 'wedding_ring', 'wet_suit', 'wheel', 'wheelchair', 'whipped_cream', + 'whistle', 'wig', 'wind_chime', 'windmill', 'window_box_(for_plants)', + 'windshield_wiper', 'windsock', 'wine_bottle', 'wine_bucket', + 'wineglass', 'blinder_(for_horses)', 'wok', 'wolf', 'wooden_spoon', + 'wreath', 'wrench', 'wristband', 'wristlet', 'yacht', 'yogurt', + 'yoke_(animal_equipment)', 'zebra', 'zucchini') + + def load_annotations(self, ann_file): + try: + import lvis + assert lvis.__version__ >= '10.5.3' + from lvis import LVIS + except AssertionError: + raise AssertionError('Incompatible version of lvis is installed. ' + 'Run pip uninstall lvis first. Then run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis. ') + except ImportError: + raise ImportError('Package lvis is not installed. Please run pip ' + 'install mmlvis to install open-mmlab forked ' + 'lvis.') + self.coco = LVIS(ann_file) + assert not self.custom_classes, 'LVIS custom classes is not supported' + self.cat_ids = self.coco.get_cat_ids() + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + # coco_url is used in LVISv1 instead of file_name + # e.g. http://images.cocodataset.org/train2017/000000391895.jpg + # train/val split in specified in url + info['filename'] = info['coco_url'].replace( + 'http://images.cocodataset.org/', '') + data_infos.append(info) + return data_infos diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/__init__.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c6f424debd1623e7511dd77da464a6639d816745 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/__init__.py @@ -0,0 +1,25 @@ +from .auto_augment import (AutoAugment, BrightnessTransform, ColorTransform, + ContrastTransform, EqualizeTransform, Rotate, Shear, + Translate) +from .compose import Compose +from .formating import (Collect, DefaultFormatBundle, ImageToTensor, + ToDataContainer, ToTensor, Transpose, to_tensor) +from .instaboost import InstaBoost +from .loading import (LoadAnnotations, LoadImageFromFile, LoadImageFromWebcam, + LoadMultiChannelImageFromFiles, LoadProposals) +from .test_time_aug import MultiScaleFlipAug +from .transforms import (Albu, CutOut, Expand, MinIoURandomCrop, Normalize, + Pad, PhotoMetricDistortion, RandomCenterCropPad, + RandomCrop, RandomFlip, Resize, SegRescale) + +__all__ = [ + 'Compose', 'to_tensor', 'ToTensor', 'ImageToTensor', 'ToDataContainer', + 'Transpose', 'Collect', 'DefaultFormatBundle', 'LoadAnnotations', + 'LoadImageFromFile', 'LoadImageFromWebcam', + 'LoadMultiChannelImageFromFiles', 'LoadProposals', 'MultiScaleFlipAug', + 'Resize', 'RandomFlip', 'Pad', 'RandomCrop', 'Normalize', 'SegRescale', + 'MinIoURandomCrop', 'Expand', 'PhotoMetricDistortion', 'Albu', + 'InstaBoost', 'RandomCenterCropPad', 'AutoAugment', 'CutOut', 'Shear', + 'Rotate', 'ColorTransform', 'EqualizeTransform', 'BrightnessTransform', + 'ContrastTransform', 'Translate' +] diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/auto_augment.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/auto_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..0d225331afa69440587e2f13daf1aaacff9ff786 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/auto_augment.py @@ -0,0 +1,890 @@ +import copy + +import cv2 +import mmcv +import numpy as np + +from ..builder import PIPELINES +from .compose import Compose + +_MAX_LEVEL = 10 + + +def level_to_value(level, max_value): + """Map from level to values based on max_value.""" + return (level / _MAX_LEVEL) * max_value + + +def enhance_level_to_value(level, a=1.8, b=0.1): + """Map from level to values.""" + return (level / _MAX_LEVEL) * a + b + + +def random_negative(value, random_negative_prob): + """Randomly negate value based on random_negative_prob.""" + return -value if np.random.rand() < random_negative_prob else value + + +def bbox2fields(): + """The key correspondence from bboxes to labels, masks and + segmentations.""" + bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + bbox2seg = { + 'gt_bboxes': 'gt_semantic_seg', + } + return bbox2label, bbox2mask, bbox2seg + + +@PIPELINES.register_module() +class AutoAugment(object): + """Auto augmentation. + + This data augmentation is proposed in `Learning Data Augmentation + Strategies for Object Detection `_. + + TODO: Implement 'Shear', 'Sharpness' and 'Rotate' transforms + + Args: + policies (list[list[dict]]): The policies of auto augmentation. Each + policy in ``policies`` is a specific augmentation policy, and is + composed by several augmentations (dict). When AutoAugment is + called, a random policy in ``policies`` will be selected to + augment images. + + Examples: + >>> replace = (104, 116, 124) + >>> policies = [ + >>> [ + >>> dict(type='Sharpness', prob=0.0, level=8), + >>> dict( + >>> type='Shear', + >>> prob=0.4, + >>> level=0, + >>> replace=replace, + >>> axis='x') + >>> ], + >>> [ + >>> dict( + >>> type='Rotate', + >>> prob=0.6, + >>> level=10, + >>> replace=replace), + >>> dict(type='Color', prob=1.0, level=6) + >>> ] + >>> ] + >>> augmentation = AutoAugment(policies) + >>> img = np.ones(100, 100, 3) + >>> gt_bboxes = np.ones(10, 4) + >>> results = dict(img=img, gt_bboxes=gt_bboxes) + >>> results = augmentation(results) + """ + + def __init__(self, policies): + assert isinstance(policies, list) and len(policies) > 0, \ + 'Policies must be a non-empty list.' + for policy in policies: + assert isinstance(policy, list) and len(policy) > 0, \ + 'Each policy in policies must be a non-empty list.' + for augment in policy: + assert isinstance(augment, dict) and 'type' in augment, \ + 'Each specific augmentation must be a dict with key' \ + ' "type".' + + self.policies = copy.deepcopy(policies) + self.transforms = [Compose(policy) for policy in self.policies] + + def __call__(self, results): + transform = np.random.choice(self.transforms) + return transform(results) + + def __repr__(self): + return f'{self.__class__.__name__}(policies={self.policies})' + + +@PIPELINES.register_module() +class Shear(object): + """Apply Shear Transformation to image (and its corresponding bbox, mask, + segmentation). + + Args: + level (int | float): The level should be in range [0,_MAX_LEVEL]. + img_fill_val (int | float | tuple): The filled values for image border. + If float, the same fill value will be used for all the three + channels of image. If tuple, the should be 3 elements. + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + prob (float): The probability for performing Shear and should be in + range [0, 1]. + direction (str): The direction for shear, either "horizontal" + or "vertical". + max_shear_magnitude (float): The maximum magnitude for Shear + transformation. + random_negative_prob (float): The probability that turns the + offset negative. Should be in range [0,1] + interpolation (str): Same as in :func:`mmcv.imshear`. + """ + + def __init__(self, + level, + img_fill_val=128, + seg_ignore_label=255, + prob=0.5, + direction='horizontal', + max_shear_magnitude=0.3, + random_negative_prob=0.5, + interpolation='bilinear'): + assert isinstance(level, (int, float)), 'The level must be type ' \ + f'int or float, got {type(level)}.' + assert 0 <= level <= _MAX_LEVEL, 'The level should be in range ' \ + f'[0,{_MAX_LEVEL}], got {level}.' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, 'img_fill_val as tuple must ' \ + f'have 3 elements. got {len(img_fill_val)}.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError( + 'img_fill_val must be float or tuple with 3 elements.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), 'all ' \ + 'elements of img_fill_val should between range [0,255].' \ + f'got {img_fill_val}.' + assert 0 <= prob <= 1.0, 'The probability of shear should be in ' \ + f'range [0,1]. got {prob}.' + assert direction in ('horizontal', 'vertical'), 'direction must ' \ + f'in be either "horizontal" or "vertical". got {direction}.' + assert isinstance(max_shear_magnitude, float), 'max_shear_magnitude ' \ + f'should be type float. got {type(max_shear_magnitude)}.' + assert 0. <= max_shear_magnitude <= 1., 'Defaultly ' \ + 'max_shear_magnitude should be in range [0,1]. ' \ + f'got {max_shear_magnitude}.' + self.level = level + self.magnitude = level_to_value(level, max_shear_magnitude) + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.prob = prob + self.direction = direction + self.max_shear_magnitude = max_shear_magnitude + self.random_negative_prob = random_negative_prob + self.interpolation = interpolation + + def _shear_img(self, + results, + magnitude, + direction='horizontal', + interpolation='bilinear'): + """Shear the image. + + Args: + results (dict): Result dict from loading pipeline. + magnitude (int | float): The magnitude used for shear. + direction (str): The direction for shear, either "horizontal" + or "vertical". + interpolation (str): Same as in :func:`mmcv.imshear`. + """ + for key in results.get('img_fields', ['img']): + img = results[key] + img_sheared = mmcv.imshear( + img, + magnitude, + direction, + border_value=self.img_fill_val, + interpolation=interpolation) + results[key] = img_sheared.astype(img.dtype) + + def _shear_bboxes(self, results, magnitude): + """Shear the bboxes.""" + h, w, c = results['img_shape'] + if self.direction == 'horizontal': + shear_matrix = np.stack([[1, magnitude], + [0, 1]]).astype(np.float32) # [2, 2] + else: + shear_matrix = np.stack([[1, 0], [magnitude, + 1]]).astype(np.float32) + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + coordinates = np.stack([[min_x, min_y], [max_x, min_y], + [min_x, max_y], + [max_x, max_y]]) # [4, 2, nb_box, 1] + coordinates = coordinates[..., 0].transpose( + (2, 1, 0)).astype(np.float32) # [nb_box, 2, 4] + new_coords = np.matmul(shear_matrix[None, :, :], + coordinates) # [nb_box, 2, 4] + min_x = np.min(new_coords[:, 0, :], axis=-1) + min_y = np.min(new_coords[:, 1, :], axis=-1) + max_x = np.max(new_coords[:, 0, :], axis=-1) + max_y = np.max(new_coords[:, 1, :], axis=-1) + min_x = np.clip(min_x, a_min=0, a_max=w) + min_y = np.clip(min_y, a_min=0, a_max=h) + max_x = np.clip(max_x, a_min=min_x, a_max=w) + max_y = np.clip(max_y, a_min=min_y, a_max=h) + results[key] = np.stack([min_x, min_y, max_x, max_y], + axis=-1).astype(results[key].dtype) + + def _shear_masks(self, + results, + magnitude, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Shear the masks.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.shear((h, w), + magnitude, + direction, + border_value=fill_val, + interpolation=interpolation) + + def _shear_seg(self, + results, + magnitude, + direction='horizontal', + fill_val=255, + interpolation='bilinear'): + """Shear the segmentation maps.""" + for key in results.get('seg_fields', []): + seg = results[key] + results[key] = mmcv.imshear( + seg, + magnitude, + direction, + border_value=fill_val, + interpolation=interpolation).astype(seg.dtype) + + def _filter_invalid(self, results, min_bbox_size=0): + """Filter bboxes and corresponding masks too small after shear + augmentation.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_bbox_size) & (bbox_h > min_bbox_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + + def __call__(self, results): + """Call function to shear images, bounding boxes, masks and semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Sheared results. + """ + if np.random.rand() > self.prob: + return results + magnitude = random_negative(self.magnitude, self.random_negative_prob) + self._shear_img(results, magnitude, self.direction, self.interpolation) + self._shear_bboxes(results, magnitude) + # fill_val set to 0 for background of mask. + self._shear_masks( + results, + magnitude, + self.direction, + fill_val=0, + interpolation=self.interpolation) + self._shear_seg( + results, + magnitude, + self.direction, + fill_val=self.seg_ignore_label, + interpolation=self.interpolation) + self._filter_invalid(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'img_fill_val={self.img_fill_val}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'direction={self.direction}, ' + repr_str += f'max_shear_magnitude={self.max_shear_magnitude}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}, ' + repr_str += f'interpolation={self.interpolation})' + return repr_str + + +@PIPELINES.register_module() +class Rotate(object): + """Apply Rotate Transformation to image (and its corresponding bbox, mask, + segmentation). + + Args: + level (int | float): The level should be in range (0,_MAX_LEVEL]. + scale (int | float): Isotropic scale factor. Same in + ``mmcv.imrotate``. + center (int | float | tuple[float]): Center point (w, h) of the + rotation in the source image. If None, the center of the + image will be used. Same in ``mmcv.imrotate``. + img_fill_val (int | float | tuple): The fill value for image border. + If float, the same value will be used for all the three + channels of image. If tuple, the should be 3 elements (e.g. + equals the number of channels for image). + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + prob (float): The probability for perform transformation and + should be in range 0 to 1. + max_rotate_angle (int | float): The maximum angles for rotate + transformation. + random_negative_prob (float): The probability that turns the + offset negative. + """ + + def __init__(self, + level, + scale=1, + center=None, + img_fill_val=128, + seg_ignore_label=255, + prob=0.5, + max_rotate_angle=30, + random_negative_prob=0.5): + assert isinstance(level, (int, float)), \ + f'The level must be type int or float. got {type(level)}.' + assert 0 <= level <= _MAX_LEVEL, \ + f'The level should be in range (0,{_MAX_LEVEL}]. got {level}.' + assert isinstance(scale, (int, float)), \ + f'The scale must be type int or float. got type {type(scale)}.' + if isinstance(center, (int, float)): + center = (center, center) + elif isinstance(center, tuple): + assert len(center) == 2, 'center with type tuple must have '\ + f'2 elements. got {len(center)} elements.' + else: + assert center is None, 'center must be None or type int, '\ + f'float or tuple, got type {type(center)}.' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, 'img_fill_val as tuple must '\ + f'have 3 elements. got {len(img_fill_val)}.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError( + 'img_fill_val must be float or tuple with 3 elements.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), \ + 'all elements of img_fill_val should between range [0,255]. '\ + f'got {img_fill_val}.' + assert 0 <= prob <= 1.0, 'The probability should be in range [0,1]. '\ + 'got {prob}.' + assert isinstance(max_rotate_angle, (int, float)), 'max_rotate_angle '\ + f'should be type int or float. got type {type(max_rotate_angle)}.' + self.level = level + self.scale = scale + # Rotation angle in degrees. Positive values mean + # clockwise rotation. + self.angle = level_to_value(level, max_rotate_angle) + self.center = center + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.prob = prob + self.max_rotate_angle = max_rotate_angle + self.random_negative_prob = random_negative_prob + + def _rotate_img(self, results, angle, center=None, scale=1.0): + """Rotate the image. + + Args: + results (dict): Result dict from loading pipeline. + angle (float): Rotation angle in degrees, positive values + mean clockwise rotation. Same in ``mmcv.imrotate``. + center (tuple[float], optional): Center point (w, h) of the + rotation. Same in ``mmcv.imrotate``. + scale (int | float): Isotropic scale factor. Same in + ``mmcv.imrotate``. + """ + for key in results.get('img_fields', ['img']): + img = results[key].copy() + img_rotated = mmcv.imrotate( + img, angle, center, scale, border_value=self.img_fill_val) + results[key] = img_rotated.astype(img.dtype) + + def _rotate_bboxes(self, results, rotate_matrix): + """Rotate the bboxes.""" + h, w, c = results['img_shape'] + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + coordinates = np.stack([[min_x, min_y], [max_x, min_y], + [min_x, max_y], + [max_x, max_y]]) # [4, 2, nb_bbox, 1] + # pad 1 to convert from format [x, y] to homogeneous + # coordinates format [x, y, 1] + coordinates = np.concatenate( + (coordinates, + np.ones((4, 1, coordinates.shape[2], 1), coordinates.dtype)), + axis=1) # [4, 3, nb_bbox, 1] + coordinates = coordinates.transpose( + (2, 0, 1, 3)) # [nb_bbox, 4, 3, 1] + rotated_coords = np.matmul(rotate_matrix, + coordinates) # [nb_bbox, 4, 2, 1] + rotated_coords = rotated_coords[..., 0] # [nb_bbox, 4, 2] + min_x, min_y = np.min( + rotated_coords[:, :, 0], axis=1), np.min( + rotated_coords[:, :, 1], axis=1) + max_x, max_y = np.max( + rotated_coords[:, :, 0], axis=1), np.max( + rotated_coords[:, :, 1], axis=1) + min_x, min_y = np.clip( + min_x, a_min=0, a_max=w), np.clip( + min_y, a_min=0, a_max=h) + max_x, max_y = np.clip( + max_x, a_min=min_x, a_max=w), np.clip( + max_y, a_min=min_y, a_max=h) + results[key] = np.stack([min_x, min_y, max_x, max_y], + axis=-1).astype(results[key].dtype) + + def _rotate_masks(self, + results, + angle, + center=None, + scale=1.0, + fill_val=0): + """Rotate the masks.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.rotate((h, w), angle, center, scale, fill_val) + + def _rotate_seg(self, + results, + angle, + center=None, + scale=1.0, + fill_val=255): + """Rotate the segmentation map.""" + for key in results.get('seg_fields', []): + seg = results[key].copy() + results[key] = mmcv.imrotate( + seg, angle, center, scale, + border_value=fill_val).astype(seg.dtype) + + def _filter_invalid(self, results, min_bbox_size=0): + """Filter bboxes and corresponding masks too small after rotate + augmentation.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_bbox_size) & (bbox_h > min_bbox_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + + def __call__(self, results): + """Call function to rotate images, bounding boxes, masks and semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Rotated results. + """ + if np.random.rand() > self.prob: + return results + h, w = results['img'].shape[:2] + center = self.center + if center is None: + center = ((w - 1) * 0.5, (h - 1) * 0.5) + angle = random_negative(self.angle, self.random_negative_prob) + self._rotate_img(results, angle, center, self.scale) + rotate_matrix = cv2.getRotationMatrix2D(center, -angle, self.scale) + self._rotate_bboxes(results, rotate_matrix) + self._rotate_masks(results, angle, center, self.scale, fill_val=0) + self._rotate_seg( + results, angle, center, self.scale, fill_val=self.seg_ignore_label) + self._filter_invalid(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'center={self.center}, ' + repr_str += f'img_fill_val={self.img_fill_val}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'max_rotate_angle={self.max_rotate_angle}, ' + repr_str += f'random_negative_prob={self.random_negative_prob})' + return repr_str + + +@PIPELINES.register_module() +class Translate(object): + """Translate the images, bboxes, masks and segmentation maps horizontally + or vertically. + + Args: + level (int | float): The level for Translate and should be in + range [0,_MAX_LEVEL]. + prob (float): The probability for performing translation and + should be in range [0, 1]. + img_fill_val (int | float | tuple): The filled value for image + border. If float, the same fill value will be used for all + the three channels of image. If tuple, the should be 3 + elements (e.g. equals the number of channels for image). + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + direction (str): The translate direction, either "horizontal" + or "vertical". + max_translate_offset (int | float): The maximum pixel's offset for + Translate. + random_negative_prob (float): The probability that turns the + offset negative. + min_size (int | float): The minimum pixel for filtering + invalid bboxes after the translation. + """ + + def __init__(self, + level, + prob=0.5, + img_fill_val=128, + seg_ignore_label=255, + direction='horizontal', + max_translate_offset=250., + random_negative_prob=0.5, + min_size=0): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level used for calculating Translate\'s offset should be ' \ + 'in range [0,_MAX_LEVEL]' + assert 0 <= prob <= 1.0, \ + 'The probability of translation should be in range [0, 1].' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, \ + 'img_fill_val as tuple must have 3 elements.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError('img_fill_val must be type float or tuple.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), \ + 'all elements of img_fill_val should between range [0,255].' + assert direction in ('horizontal', 'vertical'), \ + 'direction should be "horizontal" or "vertical".' + assert isinstance(max_translate_offset, (int, float)), \ + 'The max_translate_offset must be type int or float.' + # the offset used for translation + self.offset = int(level_to_value(level, max_translate_offset)) + self.level = level + self.prob = prob + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.direction = direction + self.max_translate_offset = max_translate_offset + self.random_negative_prob = random_negative_prob + self.min_size = min_size + + def _translate_img(self, results, offset, direction='horizontal'): + """Translate the image. + + Args: + results (dict): Result dict from loading pipeline. + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + """ + for key in results.get('img_fields', ['img']): + img = results[key].copy() + results[key] = mmcv.imtranslate( + img, offset, direction, self.img_fill_val).astype(img.dtype) + + def _translate_bboxes(self, results, offset): + """Shift bboxes horizontally or vertically, according to offset.""" + h, w, c = results['img_shape'] + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + if self.direction == 'horizontal': + min_x = np.maximum(0, min_x + offset) + max_x = np.minimum(w, max_x + offset) + elif self.direction == 'vertical': + min_y = np.maximum(0, min_y + offset) + max_y = np.minimum(h, max_y + offset) + + # the boxs translated outside of image will be filtered along with + # the corresponding masks, by invoking ``_filter_invalid``. + results[key] = np.concatenate([min_x, min_y, max_x, max_y], + axis=-1) + + def _translate_masks(self, + results, + offset, + direction='horizontal', + fill_val=0): + """Translate masks horizontally or vertically.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.translate((h, w), offset, direction, fill_val) + + def _translate_seg(self, + results, + offset, + direction='horizontal', + fill_val=255): + """Translate segmentation maps horizontally or vertically.""" + for key in results.get('seg_fields', []): + seg = results[key].copy() + results[key] = mmcv.imtranslate(seg, offset, direction, + fill_val).astype(seg.dtype) + + def _filter_invalid(self, results, min_size=0): + """Filter bboxes and masks too small or translated out of image.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_size) & (bbox_h > min_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + return results + + def __call__(self, results): + """Call function to translate images, bounding boxes, masks and + semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Translated results. + """ + if np.random.rand() > self.prob: + return results + offset = random_negative(self.offset, self.random_negative_prob) + self._translate_img(results, offset, self.direction) + self._translate_bboxes(results, offset) + # fill_val defaultly 0 for BitmapMasks and None for PolygonMasks. + self._translate_masks(results, offset, self.direction) + # fill_val set to ``seg_ignore_label`` for the ignored value + # of segmentation map. + self._translate_seg( + results, offset, self.direction, fill_val=self.seg_ignore_label) + self._filter_invalid(results, min_size=self.min_size) + return results + + +@PIPELINES.register_module() +class ColorTransform(object): + """Apply Color transformation to image. The bboxes, masks, and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Color transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_color_img(self, results, factor=1.0): + """Apply Color transformation to image.""" + for key in results.get('img_fields', ['img']): + # NOTE defaultly the image should be BGR format + img = results[key] + results[key] = mmcv.adjust_color(img, factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Color transformation. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Colored results. + """ + if np.random.rand() > self.prob: + return results + self._adjust_color_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str + + +@PIPELINES.register_module() +class EqualizeTransform(object): + """Apply Equalize transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + prob (float): The probability for performing Equalize transformation. + """ + + def __init__(self, prob=0.5): + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.prob = prob + + def _imequalize(self, results): + """Equalizes the histogram of one image.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.imequalize(img).astype(img.dtype) + + def __call__(self, results): + """Call function for Equalize transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._imequalize(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(prob={self.prob})' + + +@PIPELINES.register_module() +class BrightnessTransform(object): + """Apply Brightness transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Brightness transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_brightness_img(self, results, factor=1.0): + """Adjust the brightness of image.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.adjust_brightness(img, + factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Brightness transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._adjust_brightness_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str + + +@PIPELINES.register_module() +class ContrastTransform(object): + """Apply Contrast transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Contrast transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_contrast_img(self, results, factor=1.0): + """Adjust the image contrast.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.adjust_contrast(img, factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Contrast transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._adjust_contrast_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/compose.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/compose.py new file mode 100644 index 0000000000000000000000000000000000000000..ca48f1c935755c486edc2744e1713e2b5ba3cdc8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/compose.py @@ -0,0 +1,51 @@ +import collections + +from mmcv.utils import build_from_cfg + +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class Compose(object): + """Compose multiple transforms sequentially. + + Args: + transforms (Sequence[dict | callable]): Sequence of transform object or + config dict to be composed. + """ + + def __init__(self, transforms): + assert isinstance(transforms, collections.abc.Sequence) + self.transforms = [] + for transform in transforms: + if isinstance(transform, dict): + transform = build_from_cfg(transform, PIPELINES) + self.transforms.append(transform) + elif callable(transform): + self.transforms.append(transform) + else: + raise TypeError('transform must be callable or a dict') + + def __call__(self, data): + """Call function to apply transforms sequentially. + + Args: + data (dict): A result dict contains the data to transform. + + Returns: + dict: Transformed data. + """ + + for t in self.transforms: + data = t(data) + if data is None: + return None + return data + + def __repr__(self): + format_string = self.__class__.__name__ + '(' + for t in self.transforms: + format_string += '\n' + format_string += f' {t}' + format_string += '\n)' + return format_string diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/formating.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/formating.py new file mode 100644 index 0000000000000000000000000000000000000000..b47074121d48a4e06e389a3a13d1e6eb970ff529 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/formating.py @@ -0,0 +1,364 @@ +from collections.abc import Sequence + +import mmcv +import numpy as np +import torch +from mmcv.parallel import DataContainer as DC + +from ..builder import PIPELINES + + +def to_tensor(data): + """Convert objects of various python types to :obj:`torch.Tensor`. + + Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`, + :class:`Sequence`, :class:`int` and :class:`float`. + + Args: + data (torch.Tensor | numpy.ndarray | Sequence | int | float): Data to + be converted. + """ + + if isinstance(data, torch.Tensor): + return data + elif isinstance(data, np.ndarray): + return torch.from_numpy(data) + elif isinstance(data, Sequence) and not mmcv.is_str(data): + return torch.tensor(data) + elif isinstance(data, int): + return torch.LongTensor([data]) + elif isinstance(data, float): + return torch.FloatTensor([data]) + else: + raise TypeError(f'type {type(data)} cannot be converted to tensor.') + + +@PIPELINES.register_module() +class ToTensor(object): + """Convert some results to :obj:`torch.Tensor` by given keys. + + Args: + keys (Sequence[str]): Keys that need to be converted to Tensor. + """ + + def __init__(self, keys): + self.keys = keys + + def __call__(self, results): + """Call function to convert data in results to :obj:`torch.Tensor`. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data converted + to :obj:`torch.Tensor`. + """ + for key in self.keys: + results[key] = to_tensor(results[key]) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(keys={self.keys})' + + +@PIPELINES.register_module() +class ImageToTensor(object): + """Convert image to :obj:`torch.Tensor` by given keys. + + The dimension order of input image is (H, W, C). The pipeline will convert + it to (C, H, W). If only 2 dimension (H, W) is given, the output would be + (1, H, W). + + Args: + keys (Sequence[str]): Key of images to be converted to Tensor. + """ + + def __init__(self, keys): + self.keys = keys + + def __call__(self, results): + """Call function to convert image in results to :obj:`torch.Tensor` and + transpose the channel order. + + Args: + results (dict): Result dict contains the image data to convert. + + Returns: + dict: The result dict contains the image converted + to :obj:`torch.Tensor` and transposed to (C, H, W) order. + """ + for key in self.keys: + img = results[key] + if len(img.shape) < 3: + img = np.expand_dims(img, -1) + results[key] = to_tensor(img.transpose(2, 0, 1)) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(keys={self.keys})' + + +@PIPELINES.register_module() +class Transpose(object): + """Transpose some results by given keys. + + Args: + keys (Sequence[str]): Keys of results to be transposed. + order (Sequence[int]): Order of transpose. + """ + + def __init__(self, keys, order): + self.keys = keys + self.order = order + + def __call__(self, results): + """Call function to transpose the channel order of data in results. + + Args: + results (dict): Result dict contains the data to transpose. + + Returns: + dict: The result dict contains the data transposed to \ + ``self.order``. + """ + for key in self.keys: + results[key] = results[key].transpose(self.order) + return results + + def __repr__(self): + return self.__class__.__name__ + \ + f'(keys={self.keys}, order={self.order})' + + +@PIPELINES.register_module() +class ToDataContainer(object): + """Convert results to :obj:`mmcv.DataContainer` by given fields. + + Args: + fields (Sequence[dict]): Each field is a dict like + ``dict(key='xxx', **kwargs)``. The ``key`` in result will + be converted to :obj:`mmcv.DataContainer` with ``**kwargs``. + Default: ``(dict(key='img', stack=True), dict(key='gt_bboxes'), + dict(key='gt_labels'))``. + """ + + def __init__(self, + fields=(dict(key='img', stack=True), dict(key='gt_bboxes'), + dict(key='gt_labels'))): + self.fields = fields + + def __call__(self, results): + """Call function to convert data in results to + :obj:`mmcv.DataContainer`. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data converted to \ + :obj:`mmcv.DataContainer`. + """ + + for field in self.fields: + field = field.copy() + key = field.pop('key') + results[key] = DC(results[key], **field) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(fields={self.fields})' + + +@PIPELINES.register_module() +class DefaultFormatBundle(object): + """Default formatting bundle. + + It simplifies the pipeline of formatting common fields, including "img", + "proposals", "gt_bboxes", "gt_labels", "gt_masks" and "gt_semantic_seg". + These fields are formatted as follows. + + - img: (1)transpose, (2)to tensor, (3)to DataContainer (stack=True) + - proposals: (1)to tensor, (2)to DataContainer + - gt_bboxes: (1)to tensor, (2)to DataContainer + - gt_bboxes_ignore: (1)to tensor, (2)to DataContainer + - gt_labels: (1)to tensor, (2)to DataContainer + - gt_masks: (1)to tensor, (2)to DataContainer (cpu_only=True) + - gt_semantic_seg: (1)unsqueeze dim-0 (2)to tensor, \ + (3)to DataContainer (stack=True) + """ + + def __call__(self, results): + """Call function to transform and format common fields in results. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data that is formatted with \ + default bundle. + """ + + if 'img' in results: + img = results['img'] + # add default meta keys + results = self._add_default_meta_keys(results) + if len(img.shape) < 3: + img = np.expand_dims(img, -1) + img = np.ascontiguousarray(img.transpose(2, 0, 1)) + results['img'] = DC(to_tensor(img), stack=True) + for key in ['proposals', 'gt_bboxes', 'gt_bboxes_ignore', 'gt_labels']: + if key not in results: + continue + results[key] = DC(to_tensor(results[key])) + if 'gt_masks' in results: + results['gt_masks'] = DC(results['gt_masks'], cpu_only=True) + if 'gt_semantic_seg' in results: + results['gt_semantic_seg'] = DC( + to_tensor(results['gt_semantic_seg'][None, ...]), stack=True) + return results + + def _add_default_meta_keys(self, results): + """Add default meta keys. + + We set default meta keys including `pad_shape`, `scale_factor` and + `img_norm_cfg` to avoid the case where no `Resize`, `Normalize` and + `Pad` are implemented during the whole pipeline. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + results (dict): Updated result dict contains the data to convert. + """ + img = results['img'] + results.setdefault('pad_shape', img.shape) + results.setdefault('scale_factor', 1.0) + num_channels = 1 if len(img.shape) < 3 else img.shape[2] + results.setdefault( + 'img_norm_cfg', + dict( + mean=np.zeros(num_channels, dtype=np.float32), + std=np.ones(num_channels, dtype=np.float32), + to_rgb=False)) + return results + + def __repr__(self): + return self.__class__.__name__ + + +@PIPELINES.register_module() +class Collect(object): + """Collect data from the loader relevant to the specific task. + + This is usually the last stage of the data loader pipeline. Typically keys + is set to some subset of "img", "proposals", "gt_bboxes", + "gt_bboxes_ignore", "gt_labels", and/or "gt_masks". + + The "img_meta" item is always populated. The contents of the "img_meta" + dictionary depends on "meta_keys". By default this includes: + + - "img_shape": shape of the image input to the network as a tuple \ + (h, w, c). Note that images may be zero padded on the \ + bottom/right if the batch tensor is larger than this shape. + + - "scale_factor": a float indicating the preprocessing scale + + - "flip": a boolean indicating if image flip transform was used + + - "filename": path to the image file + + - "ori_shape": original shape of the image as a tuple (h, w, c) + + - "pad_shape": image shape after padding + + - "img_norm_cfg": a dict of normalization information: + + - mean - per channel mean subtraction + - std - per channel std divisor + - to_rgb - bool indicating if bgr was converted to rgb + + Args: + keys (Sequence[str]): Keys of results to be collected in ``data``. + meta_keys (Sequence[str], optional): Meta keys to be converted to + ``mmcv.DataContainer`` and collected in ``data[img_metas]``. + Default: ``('filename', 'ori_filename', 'ori_shape', 'img_shape', + 'pad_shape', 'scale_factor', 'flip', 'flip_direction', + 'img_norm_cfg')`` + """ + + def __init__(self, + keys, + meta_keys=('filename', 'ori_filename', 'ori_shape', + 'img_shape', 'pad_shape', 'scale_factor', 'flip', + 'flip_direction', 'img_norm_cfg')): + self.keys = keys + self.meta_keys = meta_keys + + def __call__(self, results): + """Call function to collect keys in results. The keys in ``meta_keys`` + will be converted to :obj:mmcv.DataContainer. + + Args: + results (dict): Result dict contains the data to collect. + + Returns: + dict: The result dict contains the following keys + + - keys in``self.keys`` + - ``img_metas`` + """ + + data = {} + img_meta = {} + for key in self.meta_keys: + img_meta[key] = results[key] + data['img_metas'] = DC(img_meta, cpu_only=True) + for key in self.keys: + data[key] = results[key] + return data + + def __repr__(self): + return self.__class__.__name__ + \ + f'(keys={self.keys}, meta_keys={self.meta_keys})' + + +@PIPELINES.register_module() +class WrapFieldsToLists(object): + """Wrap fields of the data dictionary into lists for evaluation. + + This class can be used as a last step of a test or validation + pipeline for single image evaluation or inference. + + Example: + >>> test_pipeline = [ + >>> dict(type='LoadImageFromFile'), + >>> dict(type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + >>> dict(type='Pad', size_divisor=32), + >>> dict(type='ImageToTensor', keys=['img']), + >>> dict(type='Collect', keys=['img']), + >>> dict(type='WrapIntoLists') + >>> ] + """ + + def __call__(self, results): + """Call function to wrap fields into lists. + + Args: + results (dict): Result dict contains the data to wrap. + + Returns: + dict: The result dict where value of ``self.keys`` are wrapped \ + into list. + """ + + # Wrap dict fields into lists + for key, val in results.items(): + results[key] = [val] + return results + + def __repr__(self): + return f'{self.__class__.__name__}()' diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/instaboost.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/instaboost.py new file mode 100644 index 0000000000000000000000000000000000000000..38b6819f60587a6e0c0f6d57bfda32bb3a7a4267 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/instaboost.py @@ -0,0 +1,98 @@ +import numpy as np + +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class InstaBoost(object): + r"""Data augmentation method in `InstaBoost: Boosting Instance + Segmentation Via Probability Map Guided Copy-Pasting + `_. + + Refer to https://github.com/GothicAi/Instaboost for implementation details. + """ + + def __init__(self, + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5): + try: + import instaboostfast as instaboost + except ImportError: + raise ImportError( + 'Please run "pip install instaboostfast" ' + 'to install instaboostfast first for instaboost augmentation.') + self.cfg = instaboost.InstaBoostConfig(action_candidate, action_prob, + scale, dx, dy, theta, + color_prob, hflag) + self.aug_ratio = aug_ratio + + def _load_anns(self, results): + labels = results['ann_info']['labels'] + masks = results['ann_info']['masks'] + bboxes = results['ann_info']['bboxes'] + n = len(labels) + + anns = [] + for i in range(n): + label = labels[i] + bbox = bboxes[i] + mask = masks[i] + x1, y1, x2, y2 = bbox + # assert (x2 - x1) >= 1 and (y2 - y1) >= 1 + bbox = [x1, y1, x2 - x1, y2 - y1] + anns.append({ + 'category_id': label, + 'segmentation': mask, + 'bbox': bbox + }) + + return anns + + def _parse_anns(self, results, anns, img): + gt_bboxes = [] + gt_labels = [] + gt_masks_ann = [] + for ann in anns: + x1, y1, w, h = ann['bbox'] + # TODO: more essential bug need to be fixed in instaboost + if w <= 0 or h <= 0: + continue + bbox = [x1, y1, x1 + w, y1 + h] + gt_bboxes.append(bbox) + gt_labels.append(ann['category_id']) + gt_masks_ann.append(ann['segmentation']) + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + results['ann_info']['labels'] = gt_labels + results['ann_info']['bboxes'] = gt_bboxes + results['ann_info']['masks'] = gt_masks_ann + results['img'] = img + return results + + def __call__(self, results): + img = results['img'] + orig_type = img.dtype + anns = self._load_anns(results) + if np.random.choice([0, 1], p=[1 - self.aug_ratio, self.aug_ratio]): + try: + import instaboostfast as instaboost + except ImportError: + raise ImportError('Please run "pip install instaboostfast" ' + 'to install instaboostfast first.') + anns, img = instaboost.get_new_data( + anns, img.astype(np.uint8), self.cfg, background=None) + + results = self._parse_anns(results, anns, img.astype(orig_type)) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(cfg={self.cfg}, aug_ratio={self.aug_ratio})' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/loading.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/loading.py new file mode 100644 index 0000000000000000000000000000000000000000..6b0d9e14c07be54bea6473fd616f70bcf04255a0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/loading.py @@ -0,0 +1,458 @@ +import os.path as osp + +import mmcv +import numpy as np +import pycocotools.mask as maskUtils + +from mmdet.core import BitmapMasks, PolygonMasks +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class LoadImageFromFile(object): + """Load an image from file. + + Required keys are "img_prefix" and "img_info" (a dict that must contain the + key "filename"). Added or updated keys are "filename", "img", "img_shape", + "ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`), + "scale_factor" (1.0) and "img_norm_cfg" (means=0 and stds=1). + + Args: + to_float32 (bool): Whether to convert the loaded image to a float32 + numpy array. If set to False, the loaded image is an uint8 array. + Defaults to False. + color_type (str): The flag argument for :func:`mmcv.imfrombytes`. + Defaults to 'color'. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + to_float32=False, + color_type='color', + file_client_args=dict(backend='disk')): + self.to_float32 = to_float32 + self.color_type = color_type + self.file_client_args = file_client_args.copy() + self.file_client = None + + def __call__(self, results): + """Call functions to load image and get image meta information. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded image and meta information. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + if results['img_prefix'] is not None: + filename = osp.join(results['img_prefix'], + results['img_info']['filename']) + else: + filename = results['img_info']['filename'] + + img_bytes = self.file_client.get(filename) + img = mmcv.imfrombytes(img_bytes, flag=self.color_type) + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = filename + results['ori_filename'] = results['img_info']['filename'] + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + return results + + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'to_float32={self.to_float32}, ' + f"color_type='{self.color_type}', " + f'file_client_args={self.file_client_args})') + return repr_str + + +@PIPELINES.register_module() +class LoadImageFromWebcam(LoadImageFromFile): + """Load an image from webcam. + + Similar with :obj:`LoadImageFromFile`, but the image read from webcam is in + ``results['img']``. + """ + + def __call__(self, results): + """Call functions to add image meta information. + + Args: + results (dict): Result dict with Webcam read image in + ``results['img']``. + + Returns: + dict: The dict contains loaded image and meta information. + """ + + img = results['img'] + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = None + results['ori_filename'] = None + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + return results + + +@PIPELINES.register_module() +class LoadMultiChannelImageFromFiles(object): + """Load multi-channel images from a list of separate channel files. + + Required keys are "img_prefix" and "img_info" (a dict that must contain the + key "filename", which is expected to be a list of filenames). + Added or updated keys are "filename", "img", "img_shape", + "ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`), + "scale_factor" (1.0) and "img_norm_cfg" (means=0 and stds=1). + + Args: + to_float32 (bool): Whether to convert the loaded image to a float32 + numpy array. If set to False, the loaded image is an uint8 array. + Defaults to False. + color_type (str): The flag argument for :func:`mmcv.imfrombytes`. + Defaults to 'color'. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + to_float32=False, + color_type='unchanged', + file_client_args=dict(backend='disk')): + self.to_float32 = to_float32 + self.color_type = color_type + self.file_client_args = file_client_args.copy() + self.file_client = None + + def __call__(self, results): + """Call functions to load multiple images and get images meta + information. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded images and meta information. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + if results['img_prefix'] is not None: + filename = [ + osp.join(results['img_prefix'], fname) + for fname in results['img_info']['filename'] + ] + else: + filename = results['img_info']['filename'] + + img = [] + for name in filename: + img_bytes = self.file_client.get(name) + img.append(mmcv.imfrombytes(img_bytes, flag=self.color_type)) + img = np.stack(img, axis=-1) + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = filename + results['ori_filename'] = results['img_info']['filename'] + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + num_channels = 1 if len(img.shape) < 3 else img.shape[2] + results['img_norm_cfg'] = dict( + mean=np.zeros(num_channels, dtype=np.float32), + std=np.ones(num_channels, dtype=np.float32), + to_rgb=False) + return results + + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'to_float32={self.to_float32}, ' + f"color_type='{self.color_type}', " + f'file_client_args={self.file_client_args})') + return repr_str + + +@PIPELINES.register_module() +class LoadAnnotations(object): + """Load mutiple types of annotations. + + Args: + with_bbox (bool): Whether to parse and load the bbox annotation. + Default: True. + with_label (bool): Whether to parse and load the label annotation. + Default: True. + with_mask (bool): Whether to parse and load the mask annotation. + Default: False. + with_seg (bool): Whether to parse and load the semantic segmentation + annotation. Default: False. + poly2mask (bool): Whether to convert the instance masks from polygons + to bitmaps. Default: True. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + with_bbox=True, + with_label=True, + with_mask=False, + with_seg=False, + poly2mask=True, + file_client_args=dict(backend='disk')): + self.with_bbox = with_bbox + self.with_label = with_label + self.with_mask = with_mask + self.with_seg = with_seg + self.poly2mask = poly2mask + self.file_client_args = file_client_args.copy() + self.file_client = None + + def _load_bboxes(self, results): + """Private function to load bounding box annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded bounding box annotations. + """ + + ann_info = results['ann_info'] + results['gt_bboxes'] = ann_info['bboxes'].copy() + + gt_bboxes_ignore = ann_info.get('bboxes_ignore', None) + if gt_bboxes_ignore is not None: + results['gt_bboxes_ignore'] = gt_bboxes_ignore.copy() + results['bbox_fields'].append('gt_bboxes_ignore') + results['bbox_fields'].append('gt_bboxes') + return results + + def _load_labels(self, results): + """Private function to load label annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded label annotations. + """ + + results['gt_labels'] = results['ann_info']['labels'].copy() + return results + + def _poly2mask(self, mask_ann, img_h, img_w): + """Private function to convert masks represented with polygon to + bitmaps. + + Args: + mask_ann (list | dict): Polygon mask annotation input. + img_h (int): The height of output mask. + img_w (int): The width of output mask. + + Returns: + numpy.ndarray: The decode bitmap mask of shape (img_h, img_w). + """ + + if isinstance(mask_ann, list): + # polygon -- a single object might consist of multiple parts + # we merge all parts into one mask rle code + rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) + rle = maskUtils.merge(rles) + elif isinstance(mask_ann['counts'], list): + # uncompressed RLE + rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) + else: + # rle + rle = mask_ann + mask = maskUtils.decode(rle) + return mask + + def process_polygons(self, polygons): + """Convert polygons to list of ndarray and filter invalid polygons. + + Args: + polygons (list[list]): Polygons of one instance. + + Returns: + list[numpy.ndarray]: Processed polygons. + """ + + polygons = [np.array(p) for p in polygons] + valid_polygons = [] + for polygon in polygons: + if len(polygon) % 2 == 0 and len(polygon) >= 6: + valid_polygons.append(polygon) + return valid_polygons + + def _load_masks(self, results): + """Private function to load mask annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded mask annotations. + If ``self.poly2mask`` is set ``True``, `gt_mask` will contain + :obj:`PolygonMasks`. Otherwise, :obj:`BitmapMasks` is used. + """ + + h, w = results['img_info']['height'], results['img_info']['width'] + gt_masks = results['ann_info']['masks'] + if self.poly2mask: + gt_masks = BitmapMasks( + [self._poly2mask(mask, h, w) for mask in gt_masks], h, w) + else: + gt_masks = PolygonMasks( + [self.process_polygons(polygons) for polygons in gt_masks], h, + w) + results['gt_masks'] = gt_masks + results['mask_fields'].append('gt_masks') + return results + + def _load_semantic_seg(self, results): + """Private function to load semantic segmentation annotations. + + Args: + results (dict): Result dict from :obj:`dataset`. + + Returns: + dict: The dict contains loaded semantic segmentation annotations. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + filename = osp.join(results['seg_prefix'], + results['ann_info']['seg_map']) + img_bytes = self.file_client.get(filename) + results['gt_semantic_seg'] = mmcv.imfrombytes( + img_bytes, flag='unchanged').squeeze() + results['seg_fields'].append('gt_semantic_seg') + return results + + def __call__(self, results): + """Call function to load multiple types annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded bounding box, label, mask and + semantic segmentation annotations. + """ + + if self.with_bbox: + results = self._load_bboxes(results) + if results is None: + return None + if self.with_label: + results = self._load_labels(results) + if self.with_mask: + results = self._load_masks(results) + if self.with_seg: + results = self._load_semantic_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(with_bbox={self.with_bbox}, ' + repr_str += f'with_label={self.with_label}, ' + repr_str += f'with_mask={self.with_mask}, ' + repr_str += f'with_seg={self.with_seg})' + repr_str += f'poly2mask={self.poly2mask})' + repr_str += f'poly2mask={self.file_client_args})' + return repr_str + + +@PIPELINES.register_module() +class LoadProposals(object): + """Load proposal pipeline. + + Required key is "proposals". Updated keys are "proposals", "bbox_fields". + + Args: + num_max_proposals (int, optional): Maximum number of proposals to load. + If not specified, all proposals will be loaded. + """ + + def __init__(self, num_max_proposals=None): + self.num_max_proposals = num_max_proposals + + def __call__(self, results): + """Call function to load proposals from file. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded proposal annotations. + """ + + proposals = results['proposals'] + if proposals.shape[1] not in (4, 5): + raise AssertionError( + 'proposals should have shapes (n, 4) or (n, 5), ' + f'but found {proposals.shape}') + proposals = proposals[:, :4] + + if self.num_max_proposals is not None: + proposals = proposals[:self.num_max_proposals] + + if len(proposals) == 0: + proposals = np.array([[0, 0, 0, 0]], dtype=np.float32) + results['proposals'] = proposals + results['bbox_fields'].append('proposals') + return results + + def __repr__(self): + return self.__class__.__name__ + \ + f'(num_max_proposals={self.num_max_proposals})' + + +@PIPELINES.register_module() +class FilterAnnotations(object): + """Filter invalid annotations. + + Args: + min_gt_bbox_wh (tuple[int]): Minimum width and height of ground truth + boxes. + """ + + def __init__(self, min_gt_bbox_wh): + # TODO: add more filter options + self.min_gt_bbox_wh = min_gt_bbox_wh + + def __call__(self, results): + assert 'gt_bboxes' in results + gt_bboxes = results['gt_bboxes'] + w = gt_bboxes[:, 2] - gt_bboxes[:, 0] + h = gt_bboxes[:, 3] - gt_bboxes[:, 1] + keep = (w > self.min_gt_bbox_wh[0]) & (h > self.min_gt_bbox_wh[1]) + if not keep.any(): + return None + else: + keys = ('gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg') + for key in keys: + if key in results: + results[key] = results[key][keep] + return results diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/test_time_aug.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/test_time_aug.py new file mode 100644 index 0000000000000000000000000000000000000000..2b0ad4db3e0f42219c713ac899c7fd7d4368d322 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/test_time_aug.py @@ -0,0 +1,119 @@ +import warnings + +import mmcv + +from ..builder import PIPELINES +from .compose import Compose + + +@PIPELINES.register_module() +class MultiScaleFlipAug(object): + """Test-time augmentation with multiple scales and flipping. + + An example configuration is as followed: + + .. code-block:: + + img_scale=[(1333, 400), (1333, 800)], + flip=True, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ] + + After MultiScaleFLipAug with above configuration, the results are wrapped + into lists of the same length as followed: + + .. code-block:: + + dict( + img=[...], + img_shape=[...], + scale=[(1333, 400), (1333, 400), (1333, 800), (1333, 800)] + flip=[False, True, False, True] + ... + ) + + Args: + transforms (list[dict]): Transforms to apply in each augmentation. + img_scale (tuple | list[tuple] | None): Images scales for resizing. + scale_factor (float | list[float] | None): Scale factors for resizing. + flip (bool): Whether apply flip augmentation. Default: False. + flip_direction (str | list[str]): Flip augmentation directions, + options are "horizontal" and "vertical". If flip_direction is list, + multiple flip augmentations will be applied. + It has no effect when flip == False. Default: "horizontal". + """ + + def __init__(self, + transforms, + img_scale=None, + scale_factor=None, + flip=False, + flip_direction='horizontal'): + self.transforms = Compose(transforms) + assert (img_scale is None) ^ (scale_factor is None), ( + 'Must have but only one variable can be setted') + if img_scale is not None: + self.img_scale = img_scale if isinstance(img_scale, + list) else [img_scale] + self.scale_key = 'scale' + assert mmcv.is_list_of(self.img_scale, tuple) + else: + self.img_scale = scale_factor if isinstance( + scale_factor, list) else [scale_factor] + self.scale_key = 'scale_factor' + + self.flip = flip + self.flip_direction = flip_direction if isinstance( + flip_direction, list) else [flip_direction] + assert mmcv.is_list_of(self.flip_direction, str) + if not self.flip and self.flip_direction != ['horizontal']: + warnings.warn( + 'flip_direction has no effect when flip is set to False') + if (self.flip + and not any([t['type'] == 'RandomFlip' for t in transforms])): + warnings.warn( + 'flip has no effect when RandomFlip is not in transforms') + + def __call__(self, results): + """Call function to apply test time augment transforms on results. + + Args: + results (dict): Result dict contains the data to transform. + + Returns: + dict[str: list]: The augmented data, where each value is wrapped + into a list. + """ + + aug_data = [] + flip_args = [(False, None)] + if self.flip: + flip_args += [(True, direction) + for direction in self.flip_direction] + for scale in self.img_scale: + for flip, direction in flip_args: + _results = results.copy() + _results[self.scale_key] = scale + _results['flip'] = flip + _results['flip_direction'] = direction + data = self.transforms(_results) + aug_data.append(data) + # list of dict to dict of list + aug_data_dict = {key: [] for key in aug_data[0]} + for data in aug_data: + for key, val in data.items(): + aug_data_dict[key].append(val) + return aug_data_dict + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(transforms={self.transforms}, ' + repr_str += f'img_scale={self.img_scale}, flip={self.flip})' + repr_str += f'flip_direction={self.flip_direction}' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/datasets/pipelines/transforms.py b/thirdparty/mmdetection/mmdet/datasets/pipelines/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..f9ffda51beb3d9d527802c09c27ea4c9e4c4197a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/pipelines/transforms.py @@ -0,0 +1,1804 @@ +import inspect + +import mmcv +import numpy as np +from numpy import random + +from mmdet.core import PolygonMasks +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from ..builder import PIPELINES + +try: + from imagecorruptions import corrupt +except ImportError: + corrupt = None + +try: + import albumentations + from albumentations import Compose +except ImportError: + albumentations = None + Compose = None + + +@PIPELINES.register_module() +class Resize(object): + """Resize images & bbox & mask. + + This transform resizes the input image to some scale. Bboxes and masks are + then resized with the same scale factor. If the input dict contains the key + "scale", then the scale in the input dict is used, otherwise the specified + scale in the init method is used. If the input dict contains the key + "scale_factor" (if MultiScaleFlipAug does not give img_scale but + scale_factor), the actual scale will be computed by image shape and + scale_factor. + + `img_scale` can either be a tuple (single-scale) or a list of tuple + (multi-scale). There are 3 multiscale modes: + + - ``ratio_range is not None``: randomly sample a ratio from the ratio \ + range and multiply it with the image scale. + - ``ratio_range is None`` and ``multiscale_mode == "range"``: randomly \ + sample a scale from the multiscale range. + - ``ratio_range is None`` and ``multiscale_mode == "value"``: randomly \ + sample a scale from multiple scales. + + Args: + img_scale (tuple or list[tuple]): Images scales for resizing. + multiscale_mode (str): Either "range" or "value". + ratio_range (tuple[float]): (min_ratio, max_ratio) + keep_ratio (bool): Whether to keep the aspect ratio when resizing the + image. + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + backend (str): Image resize backend, choices are 'cv2' and 'pillow'. + These two backends generates slightly different results. Defaults + to 'cv2'. + override (bool, optional): Whether to override `scale` and + `scale_factor` so as to call resize twice. Default False. If True, + after the first resizing, the existed `scale` and `scale_factor` + will be ignored so the second resizing can be allowed. + This option is a work-around for multiple times of resize in DETR. + Defaults to False. + """ + + def __init__(self, + img_scale=None, + multiscale_mode='range', + ratio_range=None, + keep_ratio=True, + bbox_clip_border=True, + backend='cv2', + override=False): + if img_scale is None: + self.img_scale = None + else: + if isinstance(img_scale, list): + self.img_scale = img_scale + else: + self.img_scale = [img_scale] + assert mmcv.is_list_of(self.img_scale, tuple) + + if ratio_range is not None: + # mode 1: given a scale and a range of image ratio + assert len(self.img_scale) == 1 + else: + # mode 2: given multiple scales or a range of scales + assert multiscale_mode in ['value', 'range'] + + self.backend = backend + self.multiscale_mode = multiscale_mode + self.ratio_range = ratio_range + self.keep_ratio = keep_ratio + # TODO: refactor the override option in Resize + self.override = override + self.bbox_clip_border = bbox_clip_border + + @staticmethod + def random_select(img_scales): + """Randomly select an img_scale from given candidates. + + Args: + img_scales (list[tuple]): Images scales for selection. + + Returns: + (tuple, int): Returns a tuple ``(img_scale, scale_dix)``, \ + where ``img_scale`` is the selected image scale and \ + ``scale_idx`` is the selected index in the given candidates. + """ + + assert mmcv.is_list_of(img_scales, tuple) + scale_idx = np.random.randint(len(img_scales)) + img_scale = img_scales[scale_idx] + return img_scale, scale_idx + + @staticmethod + def random_sample(img_scales): + """Randomly sample an img_scale when ``multiscale_mode=='range'``. + + Args: + img_scales (list[tuple]): Images scale range for sampling. + There must be two tuples in img_scales, which specify the lower + and uper bound of image scales. + + Returns: + (tuple, None): Returns a tuple ``(img_scale, None)``, where \ + ``img_scale`` is sampled scale and None is just a placeholder \ + to be consistent with :func:`random_select`. + """ + + assert mmcv.is_list_of(img_scales, tuple) and len(img_scales) == 2 + img_scale_long = [max(s) for s in img_scales] + img_scale_short = [min(s) for s in img_scales] + long_edge = np.random.randint( + min(img_scale_long), + max(img_scale_long) + 1) + short_edge = np.random.randint( + min(img_scale_short), + max(img_scale_short) + 1) + img_scale = (long_edge, short_edge) + return img_scale, None + + @staticmethod + def random_sample_ratio(img_scale, ratio_range): + """Randomly sample an img_scale when ``ratio_range`` is specified. + + A ratio will be randomly sampled from the range specified by + ``ratio_range``. Then it would be multiplied with ``img_scale`` to + generate sampled scale. + + Args: + img_scale (tuple): Images scale base to multiply with ratio. + ratio_range (tuple[float]): The minimum and maximum ratio to scale + the ``img_scale``. + + Returns: + (tuple, None): Returns a tuple ``(scale, None)``, where \ + ``scale`` is sampled ratio multiplied with ``img_scale`` and \ + None is just a placeholder to be consistent with \ + :func:`random_select`. + """ + + assert isinstance(img_scale, tuple) and len(img_scale) == 2 + min_ratio, max_ratio = ratio_range + assert min_ratio <= max_ratio + ratio = np.random.random_sample() * (max_ratio - min_ratio) + min_ratio + scale = int(img_scale[0] * ratio), int(img_scale[1] * ratio) + return scale, None + + def _random_scale(self, results): + """Randomly sample an img_scale according to ``ratio_range`` and + ``multiscale_mode``. + + If ``ratio_range`` is specified, a ratio will be sampled and be + multiplied with ``img_scale``. + If multiple scales are specified by ``img_scale``, a scale will be + sampled according to ``multiscale_mode``. + Otherwise, single scale will be used. + + Args: + results (dict): Result dict from :obj:`dataset`. + + Returns: + dict: Two new keys 'scale` and 'scale_idx` are added into \ + ``results``, which would be used by subsequent pipelines. + """ + + if self.ratio_range is not None: + scale, scale_idx = self.random_sample_ratio( + self.img_scale[0], self.ratio_range) + elif len(self.img_scale) == 1: + scale, scale_idx = self.img_scale[0], 0 + elif self.multiscale_mode == 'range': + scale, scale_idx = self.random_sample(self.img_scale) + elif self.multiscale_mode == 'value': + scale, scale_idx = self.random_select(self.img_scale) + else: + raise NotImplementedError + + results['scale'] = scale + results['scale_idx'] = scale_idx + + def _resize_img(self, results): + """Resize images with ``results['scale']``.""" + for key in results.get('img_fields', ['img']): + if self.keep_ratio: + img, scale_factor = mmcv.imrescale( + results[key], + results['scale'], + return_scale=True, + backend=self.backend) + # the w_scale and h_scale has minor difference + # a real fix should be done in the mmcv.imrescale in the future + new_h, new_w = img.shape[:2] + h, w = results[key].shape[:2] + w_scale = new_w / w + h_scale = new_h / h + else: + img, w_scale, h_scale = mmcv.imresize( + results[key], + results['scale'], + return_scale=True, + backend=self.backend) + results[key] = img + + scale_factor = np.array([w_scale, h_scale, w_scale, h_scale], + dtype=np.float32) + results['img_shape'] = img.shape + # in case that there is no padding + results['pad_shape'] = img.shape + results['scale_factor'] = scale_factor + results['keep_ratio'] = self.keep_ratio + + def _resize_bboxes(self, results): + """Resize bounding boxes with ``results['scale_factor']``.""" + for key in results.get('bbox_fields', []): + bboxes = results[key] * results['scale_factor'] + if self.bbox_clip_border: + img_shape = results['img_shape'] + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, img_shape[1]) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, img_shape[0]) + results[key] = bboxes + + def _resize_masks(self, results): + """Resize masks with ``results['scale']``""" + for key in results.get('mask_fields', []): + if results[key] is None: + continue + if self.keep_ratio: + results[key] = results[key].rescale(results['scale']) + else: + results[key] = results[key].resize(results['img_shape'][:2]) + + def _resize_seg(self, results): + """Resize semantic segmentation map with ``results['scale']``.""" + for key in results.get('seg_fields', []): + if self.keep_ratio: + gt_seg = mmcv.imrescale( + results[key], + results['scale'], + interpolation='nearest', + backend=self.backend) + else: + gt_seg = mmcv.imresize( + results[key], + results['scale'], + interpolation='nearest', + backend=self.backend) + results['gt_semantic_seg'] = gt_seg + + def __call__(self, results): + """Call function to resize images, bounding boxes, masks, semantic + segmentation map. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Resized results, 'img_shape', 'pad_shape', 'scale_factor', \ + 'keep_ratio' keys are added into result dict. + """ + + if 'scale' not in results: + if 'scale_factor' in results: + img_shape = results['img'].shape[:2] + scale_factor = results['scale_factor'] + assert isinstance(scale_factor, float) + results['scale'] = tuple( + [int(x * scale_factor) for x in img_shape][::-1]) + else: + self._random_scale(results) + else: + if not self.override: + assert 'scale_factor' not in results, ( + 'scale and scale_factor cannot be both set.') + else: + results.pop('scale') + if 'scale_factor' in results: + results.pop('scale_factor') + self._random_scale(results) + + self._resize_img(results) + self._resize_bboxes(results) + self._resize_masks(results) + self._resize_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(img_scale={self.img_scale}, ' + repr_str += f'multiscale_mode={self.multiscale_mode}, ' + repr_str += f'ratio_range={self.ratio_range}, ' + repr_str += f'keep_ratio={self.keep_ratio})' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class RandomFlip(object): + """Flip the image & bbox & mask. + + If the input dict contains the key "flip", then the flag will be used, + otherwise it will be randomly decided by a ratio specified in the init + method. + + When random flip is enabled, ``flip_ratio``/``direction`` can either be a + float/string or tuple of float/string. There are 3 flip modes: + + - ``flip_ratio`` is float, ``direction`` is string: the image will be + ``direction``ly flipped with probability of ``flip_ratio`` . + E.g., ``flip_ratio=0.5``, ``direction='horizontal'``, + then image will be horizontally flipped with probability of 0.5. + - ``flip_ratio`` is float, ``direction`` is list of string: the image wil + be ``direction[i]``ly flipped with probability of + ``flip_ratio/len(direction)``. + E.g., ``flip_ratio=0.5``, ``direction=['horizontal', 'vertical']``, + then image will be horizontally flipped with probability of 0.25, + vertically with probability of 0.25. + - ``flip_ratio`` is list of float, ``direction`` is list of string: + given ``len(flip_ratio) == len(direction)``, the image wil + be ``direction[i]``ly flipped with probability of ``flip_ratio[i]``. + E.g., ``flip_ratio=[0.3, 0.5]``, ``direction=['horizontal', + 'vertical']``, then image will be horizontally flipped with probability + of 0.3, vertically with probability of 0.5 + + Args: + flip_ratio (float | list[float], optional): The flipping probability. + Default: None. + direction(str | list[str], optional): The flipping direction. Options + are 'horizontal', 'vertical', 'diagonal'. Default: 'horizontal'. + If input is a list, the length must equal ``flip_ratio``. Each + element in ``flip_ratio`` indicates the flip probability of + corresponding direction. + """ + + def __init__(self, flip_ratio=None, direction='horizontal'): + if isinstance(flip_ratio, list): + assert mmcv.is_list_of(flip_ratio, float) + assert 0 <= sum(flip_ratio) <= 1 + elif isinstance(flip_ratio, float): + assert 0 <= flip_ratio <= 1 + elif flip_ratio is None: + pass + else: + raise ValueError('flip_ratios must be None, float, ' + 'or list of float') + self.flip_ratio = flip_ratio + + valid_directions = ['horizontal', 'vertical', 'diagonal'] + if isinstance(direction, str): + assert direction in valid_directions + elif isinstance(direction, list): + assert mmcv.is_list_of(direction, str) + assert set(direction).issubset(set(valid_directions)) + else: + raise ValueError('direction must be either str or list of str') + self.direction = direction + + if isinstance(flip_ratio, list): + assert len(self.flip_ratio) == len(self.direction) + + def bbox_flip(self, bboxes, img_shape, direction): + """Flip bboxes horizontally. + + Args: + bboxes (numpy.ndarray): Bounding boxes, shape (..., 4*k) + img_shape (tuple[int]): Image shape (height, width) + direction (str): Flip direction. Options are 'horizontal', + 'vertical'. + + Returns: + numpy.ndarray: Flipped bounding boxes. + """ + + assert bboxes.shape[-1] % 4 == 0 + flipped = bboxes.copy() + if direction == 'horizontal': + w = img_shape[1] + flipped[..., 0::4] = w - bboxes[..., 2::4] + flipped[..., 2::4] = w - bboxes[..., 0::4] + elif direction == 'vertical': + h = img_shape[0] + flipped[..., 1::4] = h - bboxes[..., 3::4] + flipped[..., 3::4] = h - bboxes[..., 1::4] + elif direction == 'diagonal': + w = img_shape[1] + h = img_shape[0] + flipped[..., 0::4] = w - bboxes[..., 2::4] + flipped[..., 1::4] = h - bboxes[..., 3::4] + flipped[..., 2::4] = w - bboxes[..., 0::4] + flipped[..., 3::4] = h - bboxes[..., 1::4] + else: + raise ValueError(f"Invalid flipping direction '{direction}'") + return flipped + + def __call__(self, results): + """Call function to flip bounding boxes, masks, semantic segmentation + maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Flipped results, 'flip', 'flip_direction' keys are added \ + into result dict. + """ + + if 'flip' not in results: + if isinstance(self.direction, list): + # None means non-flip + direction_list = self.direction + [None] + else: + # None means non-flip + direction_list = [self.direction, None] + + if isinstance(self.flip_ratio, list): + non_flip_ratio = 1 - sum(self.flip_ratio) + flip_ratio_list = self.flip_ratio + [non_flip_ratio] + else: + non_flip_ratio = 1 - self.flip_ratio + # exclude non-flip + single_ratio = self.flip_ratio / (len(direction_list) - 1) + flip_ratio_list = [single_ratio] * (len(direction_list) - + 1) + [non_flip_ratio] + + cur_dir = np.random.choice(direction_list, p=flip_ratio_list) + + results['flip'] = cur_dir is not None + if 'flip_direction' not in results: + results['flip_direction'] = cur_dir + if results['flip']: + # flip image + for key in results.get('img_fields', ['img']): + results[key] = mmcv.imflip( + results[key], direction=results['flip_direction']) + # flip bboxes + for key in results.get('bbox_fields', []): + results[key] = self.bbox_flip(results[key], + results['img_shape'], + results['flip_direction']) + # flip masks + for key in results.get('mask_fields', []): + results[key] = results[key].flip(results['flip_direction']) + + # flip segs + for key in results.get('seg_fields', []): + results[key] = mmcv.imflip( + results[key], direction=results['flip_direction']) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(flip_ratio={self.flip_ratio})' + + +@PIPELINES.register_module() +class Pad(object): + """Pad the image & mask. + + There are two padding modes: (1) pad to a fixed size and (2) pad to the + minimum size that is divisible by some number. + Added keys are "pad_shape", "pad_fixed_size", "pad_size_divisor", + + Args: + size (tuple, optional): Fixed padding size. + size_divisor (int, optional): The divisor of padded size. + pad_val (float, optional): Padding value, 0 by default. + """ + + def __init__(self, size=None, size_divisor=None, pad_val=0): + self.size = size + self.size_divisor = size_divisor + self.pad_val = pad_val + # only one of size and size_divisor should be valid + assert size is not None or size_divisor is not None + assert size is None or size_divisor is None + + def _pad_img(self, results): + """Pad images according to ``self.size``.""" + for key in results.get('img_fields', ['img']): + if self.size is not None: + padded_img = mmcv.impad( + results[key], shape=self.size, pad_val=self.pad_val) + elif self.size_divisor is not None: + padded_img = mmcv.impad_to_multiple( + results[key], self.size_divisor, pad_val=self.pad_val) + results[key] = padded_img + results['pad_shape'] = padded_img.shape + results['pad_fixed_size'] = self.size + results['pad_size_divisor'] = self.size_divisor + + def _pad_masks(self, results): + """Pad masks according to ``results['pad_shape']``.""" + pad_shape = results['pad_shape'][:2] + for key in results.get('mask_fields', []): + results[key] = results[key].pad(pad_shape, pad_val=self.pad_val) + + def _pad_seg(self, results): + """Pad semantic segmentation map according to + ``results['pad_shape']``.""" + for key in results.get('seg_fields', []): + results[key] = mmcv.impad( + results[key], shape=results['pad_shape'][:2]) + + def __call__(self, results): + """Call function to pad images, masks, semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Updated result dict. + """ + self._pad_img(results) + self._pad_masks(results) + self._pad_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(size={self.size}, ' + repr_str += f'size_divisor={self.size_divisor}, ' + repr_str += f'pad_val={self.pad_val})' + return repr_str + + +@PIPELINES.register_module() +class Normalize(object): + """Normalize the image. + + Added key is "img_norm_cfg". + + Args: + mean (sequence): Mean values of 3 channels. + std (sequence): Std values of 3 channels. + to_rgb (bool): Whether to convert the image from BGR to RGB, + default is true. + """ + + def __init__(self, mean, std, to_rgb=True): + self.mean = np.array(mean, dtype=np.float32) + self.std = np.array(std, dtype=np.float32) + self.to_rgb = to_rgb + + def __call__(self, results): + """Call function to normalize images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Normalized results, 'img_norm_cfg' key is added into + result dict. + """ + for key in results.get('img_fields', ['img']): + results[key] = mmcv.imnormalize(results[key], self.mean, self.std, + self.to_rgb) + results['img_norm_cfg'] = dict( + mean=self.mean, std=self.std, to_rgb=self.to_rgb) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(mean={self.mean}, std={self.std}, to_rgb={self.to_rgb})' + return repr_str + + +@PIPELINES.register_module() +class RandomCrop(object): + """Random crop the image & bboxes & masks. + + The absolute `crop_size` is sampled based on `crop_type` and `image_size`, + then the cropped results are generated. + + Args: + crop_size (tuple): The relative ratio or absolute pixels of + height and width. + crop_type (str, optional): one of "relative_range", "relative", + "absolute", "absolute_range". "relative" randomly crops + (h * crop_size[0], w * crop_size[1]) part from an input of size + (h, w). "relative_range" uniformly samples relative crop size from + range [crop_size[0], 1] and [crop_size[1], 1] for height and width + respectively. "absolute" crops from an input with absolute size + (crop_size[0], crop_size[1]). "absolute_range" uniformly samples + crop_h in range [crop_size[0], min(h, crop_size[1])] and crop_w + in range [crop_size[0], min(w, crop_size[1])]. Default "absolute". + allow_negative_crop (bool, optional): Whether to allow a crop that does + not contain any bbox area. Default False. + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + + Note: + - If the image is smaller than the absolute crop size, return the + original image. + - The keys for bboxes, labels and masks must be aligned. That is, + `gt_bboxes` corresponds to `gt_labels` and `gt_masks`, and + `gt_bboxes_ignore` corresponds to `gt_labels_ignore` and + `gt_masks_ignore`. + - If the crop does not contain any gt-bbox region and + `allow_negative_crop` is set to False, skip this image. + """ + + def __init__(self, + crop_size, + crop_type='absolute', + allow_negative_crop=False, + bbox_clip_border=True): + if crop_type not in [ + 'relative_range', 'relative', 'absolute', 'absolute_range' + ]: + raise ValueError(f'Invalid crop_type {crop_type}.') + if crop_type in ['absolute', 'absolute_range']: + assert crop_size[0] > 0 and crop_size[1] > 0 + assert isinstance(crop_size[0], int) and isinstance( + crop_size[1], int) + else: + assert 0 < crop_size[0] <= 1 and 0 < crop_size[1] <= 1 + self.crop_size = crop_size + self.crop_type = crop_type + self.allow_negative_crop = allow_negative_crop + self.bbox_clip_border = bbox_clip_border + # The key correspondence from bboxes to labels and masks. + self.bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + self.bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def _crop_data(self, results, crop_size, allow_negative_crop): + """Function to randomly crop images, bounding boxes, masks, semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + crop_size (tuple): Expected absolute size after cropping, (h, w). + allow_negative_crop (bool): Whether to allow a crop that does not + contain any bbox area. Default to False. + + Returns: + dict: Randomly cropped results, 'img_shape' key in result dict is + updated according to crop size. + """ + assert crop_size[0] > 0 and crop_size[1] > 0 + for key in results.get('img_fields', ['img']): + img = results[key] + margin_h = max(img.shape[0] - crop_size[0], 0) + margin_w = max(img.shape[1] - crop_size[1], 0) + offset_h = np.random.randint(0, margin_h + 1) + offset_w = np.random.randint(0, margin_w + 1) + crop_y1, crop_y2 = offset_h, offset_h + crop_size[0] + crop_x1, crop_x2 = offset_w, offset_w + crop_size[1] + + # crop the image + img = img[crop_y1:crop_y2, crop_x1:crop_x2, ...] + img_shape = img.shape + results[key] = img + results['img_shape'] = img_shape + + # crop bboxes accordingly and clip to the image boundary + for key in results.get('bbox_fields', []): + # e.g. gt_bboxes and gt_bboxes_ignore + bbox_offset = np.array([offset_w, offset_h, offset_w, offset_h], + dtype=np.float32) + bboxes = results[key] - bbox_offset + if self.bbox_clip_border: + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, img_shape[1]) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, img_shape[0]) + valid_inds = (bboxes[:, 2] > bboxes[:, 0]) & ( + bboxes[:, 3] > bboxes[:, 1]) + # If the crop does not contain any gt-bbox area and + # allow_negative_crop is False, skip this image. + if (key == 'gt_bboxes' and not valid_inds.any() + and not allow_negative_crop): + return None + results[key] = bboxes[valid_inds, :] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = self.bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = self.bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][ + valid_inds.nonzero()[0]].crop( + np.asarray([crop_x1, crop_y1, crop_x2, crop_y2])) + + # crop semantic seg + for key in results.get('seg_fields', []): + results[key] = results[key][crop_y1:crop_y2, crop_x1:crop_x2] + + return results + + def _get_crop_size(self, image_size): + """Randomly generates the absolute crop size based on `crop_type` and + `image_size`. + + Args: + image_size (tuple): (h, w). + + Returns: + crop_size (tuple): (crop_h, crop_w) in absolute pixels. + """ + h, w = image_size + if self.crop_type == 'absolute': + return (min(self.crop_size[0], h), min(self.crop_size[1], w)) + elif self.crop_type == 'absolute_range': + assert self.crop_size[0] <= self.crop_size[1] + crop_h = np.random.randint( + min(h, self.crop_size[0]), + min(h, self.crop_size[1]) + 1) + crop_w = np.random.randint( + min(w, self.crop_size[0]), + min(w, self.crop_size[1]) + 1) + return crop_h, crop_w + elif self.crop_type == 'relative': + crop_h, crop_w = self.crop_size + return int(h * crop_h + 0.5), int(w * crop_w + 0.5) + elif self.crop_type == 'relative_range': + crop_size = np.asarray(self.crop_size, dtype=np.float32) + crop_h, crop_w = crop_size + np.random.rand(2) * (1 - crop_size) + return int(h * crop_h + 0.5), int(w * crop_w + 0.5) + + def __call__(self, results): + """Call function to randomly crop images, bounding boxes, masks, + semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Randomly cropped results, 'img_shape' key in result dict is + updated according to crop size. + """ + image_size = results['img'].shape[:2] + crop_size = self._get_crop_size(image_size) + results = self._crop_data(results, crop_size, self.allow_negative_crop) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(crop_size={self.crop_size}, ' + repr_str += f'crop_type={self.crop_type}, ' + repr_str += f'allow_negative_crop={self.allow_negative_crop}, ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class SegRescale(object): + """Rescale semantic segmentation maps. + + Args: + scale_factor (float): The scale factor of the final output. + backend (str): Image rescale backend, choices are 'cv2' and 'pillow'. + These two backends generates slightly different results. Defaults + to 'cv2'. + """ + + def __init__(self, scale_factor=1, backend='cv2'): + self.scale_factor = scale_factor + self.backend = backend + + def __call__(self, results): + """Call function to scale the semantic segmentation map. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with semantic segmentation map scaled. + """ + + for key in results.get('seg_fields', []): + if self.scale_factor != 1: + results[key] = mmcv.imrescale( + results[key], + self.scale_factor, + interpolation='nearest', + backend=self.backend) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(scale_factor={self.scale_factor})' + + +@PIPELINES.register_module() +class PhotoMetricDistortion(object): + """Apply photometric distortion to image sequentially, every transformation + is applied with a probability of 0.5. The position of random contrast is in + second or second to last. + + 1. random brightness + 2. random contrast (mode 0) + 3. convert color from BGR to HSV + 4. random saturation + 5. random hue + 6. convert color from HSV to BGR + 7. random contrast (mode 1) + 8. randomly swap channels + + Args: + brightness_delta (int): delta of brightness. + contrast_range (tuple): range of contrast. + saturation_range (tuple): range of saturation. + hue_delta (int): delta of hue. + """ + + def __init__(self, + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18): + self.brightness_delta = brightness_delta + self.contrast_lower, self.contrast_upper = contrast_range + self.saturation_lower, self.saturation_upper = saturation_range + self.hue_delta = hue_delta + + def __call__(self, results): + """Call function to perform photometric distortion on images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images distorted. + """ + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + assert img.dtype == np.float32, \ + 'PhotoMetricDistortion needs the input image of dtype np.float32,'\ + ' please set "to_float32=True" in "LoadImageFromFile" pipeline' + # random brightness + if random.randint(2): + delta = random.uniform(-self.brightness_delta, + self.brightness_delta) + img += delta + + # mode == 0 --> do random contrast first + # mode == 1 --> do random contrast last + mode = random.randint(2) + if mode == 1: + if random.randint(2): + alpha = random.uniform(self.contrast_lower, + self.contrast_upper) + img *= alpha + + # convert color from BGR to HSV + img = mmcv.bgr2hsv(img) + + # random saturation + if random.randint(2): + img[..., 1] *= random.uniform(self.saturation_lower, + self.saturation_upper) + + # random hue + if random.randint(2): + img[..., 0] += random.uniform(-self.hue_delta, self.hue_delta) + img[..., 0][img[..., 0] > 360] -= 360 + img[..., 0][img[..., 0] < 0] += 360 + + # convert color from HSV to BGR + img = mmcv.hsv2bgr(img) + + # random contrast + if mode == 0: + if random.randint(2): + alpha = random.uniform(self.contrast_lower, + self.contrast_upper) + img *= alpha + + # randomly swap channels + if random.randint(2): + img = img[..., random.permutation(3)] + + results['img'] = img + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(\nbrightness_delta={self.brightness_delta},\n' + repr_str += 'contrast_range=' + repr_str += f'{(self.contrast_lower, self.contrast_upper)},\n' + repr_str += 'saturation_range=' + repr_str += f'{(self.saturation_lower, self.saturation_upper)},\n' + repr_str += f'hue_delta={self.hue_delta})' + return repr_str + + +@PIPELINES.register_module() +class Expand(object): + """Random expand the image & bboxes. + + Randomly place the original image on a canvas of 'ratio' x original image + size filled with mean values. The ratio is in the range of ratio_range. + + Args: + mean (tuple): mean value of dataset. + to_rgb (bool): if need to convert the order of mean to align with RGB. + ratio_range (tuple): range of expand ratio. + prob (float): probability of applying this transformation + """ + + def __init__(self, + mean=(0, 0, 0), + to_rgb=True, + ratio_range=(1, 4), + seg_ignore_label=None, + prob=0.5): + self.to_rgb = to_rgb + self.ratio_range = ratio_range + if to_rgb: + self.mean = mean[::-1] + else: + self.mean = mean + self.min_ratio, self.max_ratio = ratio_range + self.seg_ignore_label = seg_ignore_label + self.prob = prob + + def __call__(self, results): + """Call function to expand images, bounding boxes. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images, bounding boxes expanded + """ + + if random.uniform(0, 1) > self.prob: + return results + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + + h, w, c = img.shape + ratio = random.uniform(self.min_ratio, self.max_ratio) + # speedup expand when meets large image + if np.all(self.mean == self.mean[0]): + expand_img = np.empty((int(h * ratio), int(w * ratio), c), + img.dtype) + expand_img.fill(self.mean[0]) + else: + expand_img = np.full((int(h * ratio), int(w * ratio), c), + self.mean, + dtype=img.dtype) + left = int(random.uniform(0, w * ratio - w)) + top = int(random.uniform(0, h * ratio - h)) + expand_img[top:top + h, left:left + w] = img + + results['img'] = expand_img + # expand bboxes + for key in results.get('bbox_fields', []): + results[key] = results[key] + np.tile( + (left, top), 2).astype(results[key].dtype) + + # expand masks + for key in results.get('mask_fields', []): + results[key] = results[key].expand( + int(h * ratio), int(w * ratio), top, left) + + # expand segs + for key in results.get('seg_fields', []): + gt_seg = results[key] + expand_gt_seg = np.full((int(h * ratio), int(w * ratio)), + self.seg_ignore_label, + dtype=gt_seg.dtype) + expand_gt_seg[top:top + h, left:left + w] = gt_seg + results[key] = expand_gt_seg + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(mean={self.mean}, to_rgb={self.to_rgb}, ' + repr_str += f'ratio_range={self.ratio_range}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label})' + return repr_str + + +@PIPELINES.register_module() +class MinIoURandomCrop(object): + """Random crop the image & bboxes, the cropped patches have minimum IoU + requirement with original image & bboxes, the IoU threshold is randomly + selected from min_ious. + + Args: + min_ious (tuple): minimum IoU threshold for all intersections with + bounding boxes + min_crop_size (float): minimum crop's size (i.e. h,w := a*h, a*w, + where a >= min_crop_size). + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + + Note: + The keys for bboxes, labels and masks should be paired. That is, \ + `gt_bboxes` corresponds to `gt_labels` and `gt_masks`, and \ + `gt_bboxes_ignore` to `gt_labels_ignore` and `gt_masks_ignore`. + """ + + def __init__(self, + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3, + bbox_clip_border=True): + # 1: return ori img + self.min_ious = min_ious + self.sample_mode = (1, *min_ious, 0) + self.min_crop_size = min_crop_size + self.bbox_clip_border = bbox_clip_border + self.bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + self.bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def __call__(self, results): + """Call function to crop images and bounding boxes with minimum IoU + constraint. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images and bounding boxes cropped, \ + 'img_shape' key is updated. + """ + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + assert 'bbox_fields' in results + boxes = [results[key] for key in results['bbox_fields']] + boxes = np.concatenate(boxes, 0) + h, w, c = img.shape + while True: + mode = random.choice(self.sample_mode) + self.mode = mode + if mode == 1: + return results + + min_iou = mode + for i in range(50): + new_w = random.uniform(self.min_crop_size * w, w) + new_h = random.uniform(self.min_crop_size * h, h) + + # h / w in [0.5, 2] + if new_h / new_w < 0.5 or new_h / new_w > 2: + continue + + left = random.uniform(w - new_w) + top = random.uniform(h - new_h) + + patch = np.array( + (int(left), int(top), int(left + new_w), int(top + new_h))) + # Line or point crop is not allowed + if patch[2] == patch[0] or patch[3] == patch[1]: + continue + overlaps = bbox_overlaps( + patch.reshape(-1, 4), boxes.reshape(-1, 4)).reshape(-1) + if len(overlaps) > 0 and overlaps.min() < min_iou: + continue + + # center of boxes should inside the crop img + # only adjust boxes and instance masks when the gt is not empty + if len(overlaps) > 0: + # adjust boxes + def is_center_of_bboxes_in_patch(boxes, patch): + center = (boxes[:, :2] + boxes[:, 2:]) / 2 + mask = ((center[:, 0] > patch[0]) * + (center[:, 1] > patch[1]) * + (center[:, 0] < patch[2]) * + (center[:, 1] < patch[3])) + return mask + + mask = is_center_of_bboxes_in_patch(boxes, patch) + if not mask.any(): + continue + for key in results.get('bbox_fields', []): + boxes = results[key].copy() + mask = is_center_of_bboxes_in_patch(boxes, patch) + boxes = boxes[mask] + if self.bbox_clip_border: + boxes[:, 2:] = boxes[:, 2:].clip(max=patch[2:]) + boxes[:, :2] = boxes[:, :2].clip(min=patch[:2]) + boxes -= np.tile(patch[:2], 2) + + results[key] = boxes + # labels + label_key = self.bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][mask] + + # mask fields + mask_key = self.bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][ + mask.nonzero()[0]].crop(patch) + # adjust the img no matter whether the gt is empty before crop + img = img[patch[1]:patch[3], patch[0]:patch[2]] + results['img'] = img + results['img_shape'] = img.shape + + # seg fields + for key in results.get('seg_fields', []): + results[key] = results[key][patch[1]:patch[3], + patch[0]:patch[2]] + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(min_ious={self.min_ious}, ' + repr_str += f'min_crop_size={self.min_crop_size}), ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class Corrupt(object): + """Corruption augmentation. + + Corruption transforms implemented based on + `imagecorruptions `_. + + Args: + corruption (str): Corruption name. + severity (int, optional): The severity of corruption. Default: 1. + """ + + def __init__(self, corruption, severity=1): + self.corruption = corruption + self.severity = severity + + def __call__(self, results): + """Call function to corrupt image. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images corrupted. + """ + + if corrupt is None: + raise RuntimeError('imagecorruptions is not installed') + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + results['img'] = corrupt( + results['img'].astype(np.uint8), + corruption_name=self.corruption, + severity=self.severity) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(corruption={self.corruption}, ' + repr_str += f'severity={self.severity})' + return repr_str + + +@PIPELINES.register_module() +class Albu(object): + """Albumentation augmentation. + + Adds custom transformations from Albumentations library. + Please, visit `https://albumentations.readthedocs.io` + to get more information. + + An example of ``transforms`` is as followed: + + .. code-block:: + + [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), + ] + + Args: + transforms (list[dict]): A list of albu transformations + bbox_params (dict): Bbox_params for albumentation `Compose` + keymap (dict): Contains {'input key':'albumentation-style key'} + skip_img_without_anno (bool): Whether to skip the image if no ann left + after aug + """ + + def __init__(self, + transforms, + bbox_params=None, + keymap=None, + update_pad_shape=False, + skip_img_without_anno=False): + if Compose is None: + raise RuntimeError('albumentations is not installed') + + self.transforms = transforms + self.filter_lost_elements = False + self.update_pad_shape = update_pad_shape + self.skip_img_without_anno = skip_img_without_anno + + # A simple workaround to remove masks without boxes + if (isinstance(bbox_params, dict) and 'label_fields' in bbox_params + and 'filter_lost_elements' in bbox_params): + self.filter_lost_elements = True + self.origin_label_fields = bbox_params['label_fields'] + bbox_params['label_fields'] = ['idx_mapper'] + del bbox_params['filter_lost_elements'] + + self.bbox_params = ( + self.albu_builder(bbox_params) if bbox_params else None) + self.aug = Compose([self.albu_builder(t) for t in self.transforms], + bbox_params=self.bbox_params) + + if not keymap: + self.keymap_to_albu = { + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + } + else: + self.keymap_to_albu = keymap + self.keymap_back = {v: k for k, v in self.keymap_to_albu.items()} + + def albu_builder(self, cfg): + """Import a module from albumentations. + + It inherits some of :func:`build_from_cfg` logic. + + Args: + cfg (dict): Config dict. It should at least contain the key "type". + + Returns: + obj: The constructed object. + """ + + assert isinstance(cfg, dict) and 'type' in cfg + args = cfg.copy() + + obj_type = args.pop('type') + if mmcv.is_str(obj_type): + if albumentations is None: + raise RuntimeError('albumentations is not installed') + obj_cls = getattr(albumentations, obj_type) + elif inspect.isclass(obj_type): + obj_cls = obj_type + else: + raise TypeError( + f'type must be a str or valid type, but got {type(obj_type)}') + + if 'transforms' in args: + args['transforms'] = [ + self.albu_builder(transform) + for transform in args['transforms'] + ] + + return obj_cls(**args) + + @staticmethod + def mapper(d, keymap): + """Dictionary mapper. Renames keys according to keymap provided. + + Args: + d (dict): old dict + keymap (dict): {'old_key':'new_key'} + Returns: + dict: new dict. + """ + + updated_dict = {} + for k, v in zip(d.keys(), d.values()): + new_k = keymap.get(k, k) + updated_dict[new_k] = d[k] + return updated_dict + + def __call__(self, results): + # dict to albumentations format + results = self.mapper(results, self.keymap_to_albu) + # TODO: add bbox_fields + if 'bboxes' in results: + # to list of boxes + if isinstance(results['bboxes'], np.ndarray): + results['bboxes'] = [x for x in results['bboxes']] + # add pseudo-field for filtration + if self.filter_lost_elements: + results['idx_mapper'] = np.arange(len(results['bboxes'])) + + # TODO: Support mask structure in albu + if 'masks' in results: + if isinstance(results['masks'], PolygonMasks): + raise NotImplementedError( + 'Albu only supports BitMap masks now') + ori_masks = results['masks'] + if albumentations.__version__ < '0.5': + results['masks'] = results['masks'].masks + else: + results['masks'] = [mask for mask in results['masks'].masks] + + results = self.aug(**results) + + if 'bboxes' in results: + if isinstance(results['bboxes'], list): + results['bboxes'] = np.array( + results['bboxes'], dtype=np.float32) + results['bboxes'] = results['bboxes'].reshape(-1, 4) + + # filter label_fields + if self.filter_lost_elements: + + for label in self.origin_label_fields: + results[label] = np.array( + [results[label][i] for i in results['idx_mapper']]) + if 'masks' in results: + results['masks'] = np.array( + [results['masks'][i] for i in results['idx_mapper']]) + results['masks'] = ori_masks.__class__( + results['masks'], results['image'].shape[0], + results['image'].shape[1]) + + if (not len(results['idx_mapper']) + and self.skip_img_without_anno): + return None + + if 'gt_labels' in results: + if isinstance(results['gt_labels'], list): + results['gt_labels'] = np.array(results['gt_labels']) + results['gt_labels'] = results['gt_labels'].astype(np.int64) + + # back to the original format + results = self.mapper(results, self.keymap_back) + + # update final shape + if self.update_pad_shape: + results['pad_shape'] = results['img'].shape + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + f'(transforms={self.transforms})' + return repr_str + + +@PIPELINES.register_module() +class RandomCenterCropPad(object): + """Random center crop and random around padding for CornerNet. + + This operation generates randomly cropped image from the original image and + pads it simultaneously. Different from :class:`RandomCrop`, the output + shape may not equal to ``crop_size`` strictly. We choose a random value + from ``ratios`` and the output shape could be larger or smaller than + ``crop_size``. The padding operation is also different from :class:`Pad`, + here we use around padding instead of right-bottom padding. + + The relation between output image (padding image) and original image: + + .. code:: text + + output image + + +----------------------------+ + | padded area | + +------|----------------------------|----------+ + | | cropped area | | + | | +---------------+ | | + | | | . center | | | original image + | | | range | | | + | | +---------------+ | | + +------|----------------------------|----------+ + | padded area | + +----------------------------+ + + There are 5 main areas in the figure: + + - output image: output image of this operation, also called padding + image in following instruction. + - original image: input image of this operation. + - padded area: non-intersect area of output image and original image. + - cropped area: the overlap of output image and original image. + - center range: a smaller area where random center chosen from. + center range is computed by ``border`` and original image's shape + to avoid our random center is too close to original image's border. + + Also this operation act differently in train and test mode, the summary + pipeline is listed below. + + Train pipeline: + + 1. Choose a ``random_ratio`` from ``ratios``, the shape of padding image + will be ``random_ratio * crop_size``. + 2. Choose a ``random_center`` in center range. + 3. Generate padding image with center matches the ``random_center``. + 4. Initialize the padding image with pixel value equals to ``mean``. + 5. Copy the cropped area to padding image. + 6. Refine annotations. + + Test pipeline: + + 1. Compute output shape according to ``test_pad_mode``. + 2. Generate padding image with center matches the original image + center. + 3. Initialize the padding image with pixel value equals to ``mean``. + 4. Copy the ``cropped area`` to padding image. + + Args: + crop_size (tuple | None): expected size after crop, final size will + computed according to ratio. Requires (h, w) in train mode, and + None in test mode. + ratios (tuple): random select a ratio from tuple and crop image to + (crop_size[0] * ratio) * (crop_size[1] * ratio). + Only available in train mode. + border (int): max distance from center select area to image border. + Only available in train mode. + mean (sequence): Mean values of 3 channels. + std (sequence): Std values of 3 channels. + to_rgb (bool): Whether to convert the image from BGR to RGB. + test_mode (bool): whether involve random variables in transform. + In train mode, crop_size is fixed, center coords and ratio is + random selected from predefined lists. In test mode, crop_size + is image's original shape, center coords and ratio is fixed. + test_pad_mode (tuple): padding method and padding shape value, only + available in test mode. Default is using 'logical_or' with + 127 as padding shape value. + + - 'logical_or': final_shape = input_shape | padding_shape_value + - 'size_divisor': final_shape = int( + ceil(input_shape / padding_shape_value) * padding_shape_value) + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + """ + + def __init__(self, + crop_size=None, + ratios=(0.9, 1.0, 1.1), + border=128, + mean=None, + std=None, + to_rgb=None, + test_mode=False, + test_pad_mode=('logical_or', 127), + bbox_clip_border=True): + if test_mode: + assert crop_size is None, 'crop_size must be None in test mode' + assert ratios is None, 'ratios must be None in test mode' + assert border is None, 'border must be None in test mode' + assert isinstance(test_pad_mode, (list, tuple)) + assert test_pad_mode[0] in ['logical_or', 'size_divisor'] + else: + assert isinstance(crop_size, (list, tuple)) + assert crop_size[0] > 0 and crop_size[1] > 0, ( + 'crop_size must > 0 in train mode') + assert isinstance(ratios, (list, tuple)) + assert test_pad_mode is None, ( + 'test_pad_mode must be None in train mode') + + self.crop_size = crop_size + self.ratios = ratios + self.border = border + # We do not set default value to mean, std and to_rgb because these + # hyper-parameters are easy to forget but could affect the performance. + # Please use the same setting as Normalize for performance assurance. + assert mean is not None and std is not None and to_rgb is not None + self.to_rgb = to_rgb + self.input_mean = mean + self.input_std = std + if to_rgb: + self.mean = mean[::-1] + self.std = std[::-1] + else: + self.mean = mean + self.std = std + self.test_mode = test_mode + self.test_pad_mode = test_pad_mode + self.bbox_clip_border = bbox_clip_border + + def _get_border(self, border, size): + """Get final border for the target size. + + This function generates a ``final_border`` according to image's shape. + The area between ``final_border`` and ``size - final_border`` is the + ``center range``. We randomly choose center from the ``center range`` + to avoid our random center is too close to original image's border. + Also ``center range`` should be larger than 0. + + Args: + border (int): The initial border, default is 128. + size (int): The width or height of original image. + Returns: + int: The final border. + """ + k = 2 * border / size + i = pow(2, np.ceil(np.log2(np.ceil(k))) + (k == int(k))) + return border // i + + def _filter_boxes(self, patch, boxes): + """Check whether the center of each box is in the patch. + + Args: + patch (list[int]): The cropped area, [left, top, right, bottom]. + boxes (numpy array, (N x 4)): Ground truth boxes. + + Returns: + mask (numpy array, (N,)): Each box is inside or outside the patch. + """ + center = (boxes[:, :2] + boxes[:, 2:]) / 2 + mask = (center[:, 0] > patch[0]) * (center[:, 1] > patch[1]) * ( + center[:, 0] < patch[2]) * ( + center[:, 1] < patch[3]) + return mask + + def _crop_image_and_paste(self, image, center, size): + """Crop image with a given center and size, then paste the cropped + image to a blank image with two centers align. + + This function is equivalent to generating a blank image with ``size`` + as its shape. Then cover it on the original image with two centers ( + the center of blank image and the random center of original image) + aligned. The overlap area is paste from the original image and the + outside area is filled with ``mean pixel``. + + Args: + image (np array, H x W x C): Original image. + center (list[int]): Target crop center coord. + size (list[int]): Target crop size. [target_h, target_w] + + Returns: + cropped_img (np array, target_h x target_w x C): Cropped image. + border (np array, 4): The distance of four border of + ``cropped_img`` to the original image area, [top, bottom, + left, right] + patch (list[int]): The cropped area, [left, top, right, bottom]. + """ + center_y, center_x = center + target_h, target_w = size + img_h, img_w, img_c = image.shape + + x0 = max(0, center_x - target_w // 2) + x1 = min(center_x + target_w // 2, img_w) + y0 = max(0, center_y - target_h // 2) + y1 = min(center_y + target_h // 2, img_h) + patch = np.array((int(x0), int(y0), int(x1), int(y1))) + + left, right = center_x - x0, x1 - center_x + top, bottom = center_y - y0, y1 - center_y + + cropped_center_y, cropped_center_x = target_h // 2, target_w // 2 + cropped_img = np.zeros((target_h, target_w, img_c), dtype=image.dtype) + for i in range(img_c): + cropped_img[:, :, i] += self.mean[i] + y_slice = slice(cropped_center_y - top, cropped_center_y + bottom) + x_slice = slice(cropped_center_x - left, cropped_center_x + right) + cropped_img[y_slice, x_slice, :] = image[y0:y1, x0:x1, :] + + border = np.array([ + cropped_center_y - top, cropped_center_y + bottom, + cropped_center_x - left, cropped_center_x + right + ], + dtype=np.float32) + + return cropped_img, border, patch + + def _train_aug(self, results): + """Random crop and around padding the original image. + + Args: + results (dict): Image infomations in the augment pipeline. + + Returns: + results (dict): The updated dict. + """ + img = results['img'] + h, w, c = img.shape + boxes = results['gt_bboxes'] + while True: + scale = random.choice(self.ratios) + new_h = int(self.crop_size[0] * scale) + new_w = int(self.crop_size[1] * scale) + h_border = self._get_border(self.border, h) + w_border = self._get_border(self.border, w) + + for i in range(50): + center_x = random.randint(low=w_border, high=w - w_border) + center_y = random.randint(low=h_border, high=h - h_border) + + cropped_img, border, patch = self._crop_image_and_paste( + img, [center_y, center_x], [new_h, new_w]) + + mask = self._filter_boxes(patch, boxes) + # if image do not have valid bbox, any crop patch is valid. + if not mask.any() and len(boxes) > 0: + continue + + results['img'] = cropped_img + results['img_shape'] = cropped_img.shape + results['pad_shape'] = cropped_img.shape + + x0, y0, x1, y1 = patch + + left_w, top_h = center_x - x0, center_y - y0 + cropped_center_x, cropped_center_y = new_w // 2, new_h // 2 + + # crop bboxes accordingly and clip to the image boundary + for key in results.get('bbox_fields', []): + mask = self._filter_boxes(patch, results[key]) + bboxes = results[key][mask] + bboxes[:, 0:4:2] += cropped_center_x - left_w - x0 + bboxes[:, 1:4:2] += cropped_center_y - top_h - y0 + if self.bbox_clip_border: + bboxes[:, 0:4:2] = np.clip(bboxes[:, 0:4:2], 0, new_w) + bboxes[:, 1:4:2] = np.clip(bboxes[:, 1:4:2], 0, new_h) + keep = (bboxes[:, 2] > bboxes[:, 0]) & ( + bboxes[:, 3] > bboxes[:, 1]) + bboxes = bboxes[keep] + results[key] = bboxes + if key in ['gt_bboxes']: + if 'gt_labels' in results: + labels = results['gt_labels'][mask] + labels = labels[keep] + results['gt_labels'] = labels + if 'gt_masks' in results: + raise NotImplementedError( + 'RandomCenterCropPad only supports bbox.') + + # crop semantic seg + for key in results.get('seg_fields', []): + raise NotImplementedError( + 'RandomCenterCropPad only supports bbox.') + return results + + def _test_aug(self, results): + """Around padding the original image without cropping. + + The padding mode and value are from ``test_pad_mode``. + + Args: + results (dict): Image infomations in the augment pipeline. + + Returns: + results (dict): The updated dict. + """ + img = results['img'] + h, w, c = img.shape + results['img_shape'] = img.shape + if self.test_pad_mode[0] in ['logical_or']: + target_h = h | self.test_pad_mode[1] + target_w = w | self.test_pad_mode[1] + elif self.test_pad_mode[0] in ['size_divisor']: + divisor = self.test_pad_mode[1] + target_h = int(np.ceil(h / divisor)) * divisor + target_w = int(np.ceil(w / divisor)) * divisor + else: + raise NotImplementedError( + 'RandomCenterCropPad only support two testing pad mode:' + 'logical-or and size_divisor.') + + cropped_img, border, _ = self._crop_image_and_paste( + img, [h // 2, w // 2], [target_h, target_w]) + results['img'] = cropped_img + results['pad_shape'] = cropped_img.shape + results['border'] = border + return results + + def __call__(self, results): + img = results['img'] + assert img.dtype == np.float32, ( + 'RandomCenterCropPad needs the input image of dtype np.float32,' + ' please set "to_float32=True" in "LoadImageFromFile" pipeline') + h, w, c = img.shape + assert c == len(self.mean) + if self.test_mode: + return self._test_aug(results) + else: + return self._train_aug(results) + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(crop_size={self.crop_size}, ' + repr_str += f'ratios={self.ratios}, ' + repr_str += f'border={self.border}, ' + repr_str += f'mean={self.input_mean}, ' + repr_str += f'std={self.input_std}, ' + repr_str += f'to_rgb={self.to_rgb}, ' + repr_str += f'test_mode={self.test_mode}, ' + repr_str += f'test_pad_mode={self.test_pad_mode}), ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class CutOut(object): + """CutOut operation. + + Randomly drop some regions of image used in + `Cutout `_. + + Args: + n_holes (int | tuple[int, int]): Number of regions to be dropped. + If it is given as a list, number of holes will be randomly + selected from the closed interval [`n_holes[0]`, `n_holes[1]`]. + cutout_shape (tuple[int, int] | list[tuple[int, int]]): The candidate + shape of dropped regions. It can be `tuple[int, int]` to use a + fixed cutout shape, or `list[tuple[int, int]]` to randomly choose + shape from the list. + cutout_ratio (tuple[float, float] | list[tuple[float, float]]): The + candidate ratio of dropped regions. It can be `tuple[float, float]` + to use a fixed ratio or `list[tuple[float, float]]` to randomly + choose ratio from the list. Please note that `cutout_shape` + and `cutout_ratio` cannot be both given at the same time. + fill_in (tuple[float, float, float] | tuple[int, int, int]): The value + of pixel to fill in the dropped regions. Default: (0, 0, 0). + """ + + def __init__(self, + n_holes, + cutout_shape=None, + cutout_ratio=None, + fill_in=(0, 0, 0)): + + assert (cutout_shape is None) ^ (cutout_ratio is None), \ + 'Either cutout_shape or cutout_ratio should be specified.' + assert (isinstance(cutout_shape, (list, tuple)) + or isinstance(cutout_ratio, (list, tuple))) + if isinstance(n_holes, tuple): + assert len(n_holes) == 2 and 0 <= n_holes[0] < n_holes[1] + else: + n_holes = (n_holes, n_holes) + self.n_holes = n_holes + self.fill_in = fill_in + self.with_ratio = cutout_ratio is not None + self.candidates = cutout_ratio if self.with_ratio else cutout_shape + if not isinstance(self.candidates, list): + self.candidates = [self.candidates] + + def __call__(self, results): + """Call function to drop some regions of image.""" + h, w, c = results['img'].shape + n_holes = np.random.randint(self.n_holes[0], self.n_holes[1] + 1) + for _ in range(n_holes): + x1 = np.random.randint(0, w) + y1 = np.random.randint(0, h) + index = np.random.randint(0, len(self.candidates)) + if not self.with_ratio: + cutout_w, cutout_h = self.candidates[index] + else: + cutout_w = int(self.candidates[index][0] * w) + cutout_h = int(self.candidates[index][1] * h) + + x2 = np.clip(x1 + cutout_w, 0, w) + y2 = np.clip(y1 + cutout_h, 0, h) + results['img'][y1:y2, x1:x2, :] = self.fill_in + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(n_holes={self.n_holes}, ' + repr_str += (f'cutout_ratio={self.candidates}, ' if self.with_ratio + else f'cutout_shape={self.candidates}, ') + repr_str += f'fill_in={self.fill_in})' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/datasets/samplers/__init__.py b/thirdparty/mmdetection/mmdet/datasets/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2596aeb2ccfc85b58624713c04453d34e94a4062 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/samplers/__init__.py @@ -0,0 +1,4 @@ +from .distributed_sampler import DistributedSampler +from .group_sampler import DistributedGroupSampler, GroupSampler + +__all__ = ['DistributedSampler', 'DistributedGroupSampler', 'GroupSampler'] diff --git a/thirdparty/mmdetection/mmdet/datasets/samplers/distributed_sampler.py b/thirdparty/mmdetection/mmdet/datasets/samplers/distributed_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..2a85619cfa0d067ed7be79d5d4fcfd770314ec0f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/samplers/distributed_sampler.py @@ -0,0 +1,28 @@ +import torch +from torch.utils.data import DistributedSampler as _DistributedSampler + + +class DistributedSampler(_DistributedSampler): + + def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True): + super().__init__(dataset, num_replicas=num_replicas, rank=rank) + self.shuffle = shuffle + + def __iter__(self): + # deterministically shuffle based on epoch + if self.shuffle: + g = torch.Generator() + g.manual_seed(self.epoch) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + else: + indices = torch.arange(len(self.dataset)).tolist() + + # add extra samples to make it evenly divisible + indices += indices[:(self.total_size - len(indices))] + assert len(indices) == self.total_size + + # subsample + indices = indices[self.rank:self.total_size:self.num_replicas] + assert len(indices) == self.num_samples + + return iter(indices) diff --git a/thirdparty/mmdetection/mmdet/datasets/samplers/group_sampler.py b/thirdparty/mmdetection/mmdet/datasets/samplers/group_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..a691b949d73b067fcfa95f192e91d28195425a98 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/samplers/group_sampler.py @@ -0,0 +1,143 @@ +from __future__ import division +import math + +import numpy as np +import torch +from mmcv.runner import get_dist_info +from torch.utils.data import Sampler + + +class GroupSampler(Sampler): + + def __init__(self, dataset, samples_per_gpu=1): + assert hasattr(dataset, 'flag') + self.dataset = dataset + self.samples_per_gpu = samples_per_gpu + self.flag = dataset.flag.astype(np.int64) + self.group_sizes = np.bincount(self.flag) + self.num_samples = 0 + for i, size in enumerate(self.group_sizes): + self.num_samples += int(np.ceil( + size / self.samples_per_gpu)) * self.samples_per_gpu + + def __iter__(self): + indices = [] + for i, size in enumerate(self.group_sizes): + if size == 0: + continue + indice = np.where(self.flag == i)[0] + assert len(indice) == size + np.random.shuffle(indice) + num_extra = int(np.ceil(size / self.samples_per_gpu) + ) * self.samples_per_gpu - len(indice) + indice = np.concatenate( + [indice, np.random.choice(indice, num_extra)]) + indices.append(indice) + indices = np.concatenate(indices) + indices = [ + indices[i * self.samples_per_gpu:(i + 1) * self.samples_per_gpu] + for i in np.random.permutation( + range(len(indices) // self.samples_per_gpu)) + ] + indices = np.concatenate(indices) + indices = indices.astype(np.int64).tolist() + assert len(indices) == self.num_samples + return iter(indices) + + def __len__(self): + return self.num_samples + + +class DistributedGroupSampler(Sampler): + """Sampler that restricts data loading to a subset of the dataset. + + It is especially useful in conjunction with + :class:`torch.nn.parallel.DistributedDataParallel`. In such case, each + process can pass a DistributedSampler instance as a DataLoader sampler, + and load a subset of the original dataset that is exclusive to it. + + .. note:: + Dataset is assumed to be of constant size. + + Arguments: + dataset: Dataset used for sampling. + num_replicas (optional): Number of processes participating in + distributed training. + rank (optional): Rank of the current process within num_replicas. + """ + + def __init__(self, + dataset, + samples_per_gpu=1, + num_replicas=None, + rank=None): + _rank, _num_replicas = get_dist_info() + if num_replicas is None: + num_replicas = _num_replicas + if rank is None: + rank = _rank + self.dataset = dataset + self.samples_per_gpu = samples_per_gpu + self.num_replicas = num_replicas + self.rank = rank + self.epoch = 0 + + assert hasattr(self.dataset, 'flag') + self.flag = self.dataset.flag + self.group_sizes = np.bincount(self.flag) + + self.num_samples = 0 + for i, j in enumerate(self.group_sizes): + self.num_samples += int( + math.ceil(self.group_sizes[i] * 1.0 / self.samples_per_gpu / + self.num_replicas)) * self.samples_per_gpu + self.total_size = self.num_samples * self.num_replicas + + def __iter__(self): + # deterministically shuffle based on epoch + g = torch.Generator() + g.manual_seed(self.epoch) + + indices = [] + for i, size in enumerate(self.group_sizes): + if size > 0: + indice = np.where(self.flag == i)[0] + assert len(indice) == size + # add .numpy() to avoid bug when selecting indice in parrots. + # TODO: check whether torch.randperm() can be replaced by + # numpy.random.permutation(). + indice = indice[list( + torch.randperm(int(size), generator=g).numpy())].tolist() + extra = int( + math.ceil( + size * 1.0 / self.samples_per_gpu / self.num_replicas) + ) * self.samples_per_gpu * self.num_replicas - len(indice) + # pad indice + tmp = indice.copy() + for _ in range(extra // size): + indice.extend(tmp) + indice.extend(tmp[:extra % size]) + indices.extend(indice) + + assert len(indices) == self.total_size + + indices = [ + indices[j] for i in list( + torch.randperm( + len(indices) // self.samples_per_gpu, generator=g)) + for j in range(i * self.samples_per_gpu, (i + 1) * + self.samples_per_gpu) + ] + + # subsample + offset = self.num_samples * self.rank + indices = indices[offset:offset + self.num_samples] + assert len(indices) == self.num_samples + + return iter(indices) + + def __len__(self): + return self.num_samples + + def set_epoch(self, epoch): + self.epoch = epoch diff --git a/thirdparty/mmdetection/mmdet/datasets/utils.py b/thirdparty/mmdetection/mmdet/datasets/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..04a4c4c926a0ea82adf807d2614e62cc29a78df9 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/utils.py @@ -0,0 +1,62 @@ +import copy +import warnings + + +def replace_ImageToTensor(pipelines): + """Replace the ImageToTensor transform in a data pipeline to + DefaultFormatBundle, which is normally useful in batch inference. + + Args: + pipelines (list[dict]): Data pipeline configs. + + Returns: + list: The new pipeline list with all ImageToTensor replaced by + DefaultFormatBundle. + + Examples: + >>> pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict( + ... type='MultiScaleFlipAug', + ... img_scale=(1333, 800), + ... flip=False, + ... transforms=[ + ... dict(type='Resize', keep_ratio=True), + ... dict(type='RandomFlip'), + ... dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + ... dict(type='Pad', size_divisor=32), + ... dict(type='ImageToTensor', keys=['img']), + ... dict(type='Collect', keys=['img']), + ... ]) + ... ] + >>> expected_pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict( + ... type='MultiScaleFlipAug', + ... img_scale=(1333, 800), + ... flip=False, + ... transforms=[ + ... dict(type='Resize', keep_ratio=True), + ... dict(type='RandomFlip'), + ... dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + ... dict(type='Pad', size_divisor=32), + ... dict(type='DefaultFormatBundle'), + ... dict(type='Collect', keys=['img']), + ... ]) + ... ] + >>> assert expected_pipelines == replace_ImageToTensor(pipelines) + """ + pipelines = copy.deepcopy(pipelines) + for i, pipeline in enumerate(pipelines): + if pipeline['type'] == 'MultiScaleFlipAug': + assert 'transforms' in pipeline + pipeline['transforms'] = replace_ImageToTensor( + pipeline['transforms']) + elif pipeline['type'] == 'ImageToTensor': + warnings.warn( + '"ImageToTensor" pipeline is replaced by ' + '"DefaultFormatBundle" for batch inference. It is ' + 'recommended to manually replace it in the test ' + 'data pipeline in your config file.', UserWarning) + pipelines[i] = {'type': 'DefaultFormatBundle'} + return pipelines diff --git a/thirdparty/mmdetection/mmdet/datasets/voc.py b/thirdparty/mmdetection/mmdet/datasets/voc.py new file mode 100644 index 0000000000000000000000000000000000000000..4d1e861ea09d67739512991ba0166c93cd2b7d55 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/voc.py @@ -0,0 +1,89 @@ +from collections import OrderedDict + +from mmdet.core import eval_map, eval_recalls +from .builder import DATASETS +from .xml_style import XMLDataset + + +@DATASETS.register_module() +class VOCDataset(XMLDataset): + + CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', + 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', + 'tvmonitor') + + def __init__(self, **kwargs): + super(VOCDataset, self).__init__(**kwargs) + if 'VOC2007' in self.img_prefix: + self.year = 2007 + elif 'VOC2012' in self.img_prefix: + self.year = 2012 + else: + raise ValueError('Cannot infer dataset year from img_prefix') + + def evaluate(self, + results, + metric='mAP', + logger=None, + proposal_nums=(100, 300, 1000), + iou_thr=0.5, + scale_ranges=None): + """Evaluate in VOC protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'mAP', 'recall'. + logger (logging.Logger | str, optional): Logger used for printing + related information during evaluation. Default: None. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thr (float | list[float]): IoU threshold. It must be a float + when evaluating mAP, and can be a list when evaluating recall. + Default: 0.5. + scale_ranges (list[tuple], optional): Scale ranges for evaluating + mAP. If not specified, all bounding boxes would be included in + evaluation. Default: None. + + Returns: + dict[str, float]: AP/recall metrics. + """ + + if not isinstance(metric, str): + assert len(metric) == 1 + metric = metric[0] + allowed_metrics = ['mAP', 'recall'] + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + annotations = [self.get_ann_info(i) for i in range(len(self))] + eval_results = OrderedDict() + if metric == 'mAP': + assert isinstance(iou_thr, float) + if self.year == 2007: + ds_name = 'voc07' + else: + ds_name = self.CLASSES + mean_ap, _ = eval_map( + results, + annotations, + scale_ranges=None, + iou_thr=iou_thr, + dataset=ds_name, + logger=logger) + eval_results['mAP'] = mean_ap + elif metric == 'recall': + gt_bboxes = [ann['bboxes'] for ann in annotations] + if isinstance(iou_thr, float): + iou_thr = [iou_thr] + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thr, logger=logger) + for i, num in enumerate(proposal_nums): + for j, iou in enumerate(iou_thr): + eval_results[f'recall@{num}@{iou}'] = recalls[i, j] + if recalls.shape[1] > 1: + ar = recalls.mean(axis=1) + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + return eval_results diff --git a/thirdparty/mmdetection/mmdet/datasets/wider_face.py b/thirdparty/mmdetection/mmdet/datasets/wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..3a13907db87a9986a7d701837259a0b712fc9dca --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/wider_face.py @@ -0,0 +1,51 @@ +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv + +from .builder import DATASETS +from .xml_style import XMLDataset + + +@DATASETS.register_module() +class WIDERFaceDataset(XMLDataset): + """Reader for the WIDER Face dataset in PASCAL VOC format. + + Conversion scripts can be found in + https://github.com/sovrasov/wider-face-pascal-voc-annotations + """ + CLASSES = ('face', ) + + def __init__(self, **kwargs): + super(WIDERFaceDataset, self).__init__(**kwargs) + + def load_annotations(self, ann_file): + """Load annotation from WIDERFace XML style annotation file. + + Args: + ann_file (str): Path of XML file. + + Returns: + list[dict]: Annotation info from XML file. + """ + + data_infos = [] + img_ids = mmcv.list_from_file(ann_file) + for img_id in img_ids: + filename = f'{img_id}.jpg' + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + width = int(size.find('width').text) + height = int(size.find('height').text) + folder = root.find('folder').text + data_infos.append( + dict( + id=img_id, + filename=osp.join(folder, filename), + width=width, + height=height)) + + return data_infos diff --git a/thirdparty/mmdetection/mmdet/datasets/xml_style.py b/thirdparty/mmdetection/mmdet/datasets/xml_style.py new file mode 100644 index 0000000000000000000000000000000000000000..b912de38d12d1e146e34eac61ff2e09c4a989706 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/datasets/xml_style.py @@ -0,0 +1,169 @@ +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv +import numpy as np +from PIL import Image + +from .builder import DATASETS +from .custom import CustomDataset + + +@DATASETS.register_module() +class XMLDataset(CustomDataset): + """XML dataset for detection. + + Args: + min_size (int | float, optional): The minimum size of bounding + boxes in the images. If the size of a bounding box is less than + ``min_size``, it would be add to ignored field. + """ + + def __init__(self, min_size=None, **kwargs): + super(XMLDataset, self).__init__(**kwargs) + self.cat2label = {cat: i for i, cat in enumerate(self.CLASSES)} + self.min_size = min_size + + def load_annotations(self, ann_file): + """Load annotation from XML style ann_file. + + Args: + ann_file (str): Path of XML file. + + Returns: + list[dict]: Annotation info from XML file. + """ + + data_infos = [] + img_ids = mmcv.list_from_file(ann_file) + for img_id in img_ids: + filename = f'JPEGImages/{img_id}.jpg' + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + width = 0 + height = 0 + if size is not None: + width = int(size.find('width').text) + height = int(size.find('height').text) + else: + img_path = osp.join(self.img_prefix, 'JPEGImages', + '{}.jpg'.format(img_id)) + img = Image.open(img_path) + width, height = img.size + data_infos.append( + dict(id=img_id, filename=filename, width=width, height=height)) + + return data_infos + + def _filter_imgs(self, min_size=32): + """Filter images too small or without annotation.""" + valid_inds = [] + for i, img_info in enumerate(self.data_infos): + if min(img_info['width'], img_info['height']) < min_size: + continue + if self.filter_empty_gt: + img_id = img_info['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + for obj in root.findall('object'): + name = obj.find('name').text + if name in self.CLASSES: + valid_inds.append(i) + break + else: + valid_inds.append(i) + return valid_inds + + def get_ann_info(self, idx): + """Get annotation from XML file by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + img_id = self.data_infos[idx]['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + bboxes = [] + labels = [] + bboxes_ignore = [] + labels_ignore = [] + for obj in root.findall('object'): + name = obj.find('name').text + if name not in self.CLASSES: + continue + label = self.cat2label[name] + difficult = int(obj.find('difficult').text) + bnd_box = obj.find('bndbox') + # TODO: check whether it is necessary to use int + # Coordinates may be float type + bbox = [ + int(float(bnd_box.find('xmin').text)), + int(float(bnd_box.find('ymin').text)), + int(float(bnd_box.find('xmax').text)), + int(float(bnd_box.find('ymax').text)) + ] + ignore = False + if self.min_size: + assert not self.test_mode + w = bbox[2] - bbox[0] + h = bbox[3] - bbox[1] + if w < self.min_size or h < self.min_size: + ignore = True + if difficult or ignore: + bboxes_ignore.append(bbox) + labels_ignore.append(label) + else: + bboxes.append(bbox) + labels.append(label) + if not bboxes: + bboxes = np.zeros((0, 4)) + labels = np.zeros((0, )) + else: + bboxes = np.array(bboxes, ndmin=2) - 1 + labels = np.array(labels) + if not bboxes_ignore: + bboxes_ignore = np.zeros((0, 4)) + labels_ignore = np.zeros((0, )) + else: + bboxes_ignore = np.array(bboxes_ignore, ndmin=2) - 1 + labels_ignore = np.array(labels_ignore) + ann = dict( + bboxes=bboxes.astype(np.float32), + labels=labels.astype(np.int64), + bboxes_ignore=bboxes_ignore.astype(np.float32), + labels_ignore=labels_ignore.astype(np.int64)) + return ann + + def get_cat_ids(self, idx): + """Get category ids in XML file by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + cat_ids = [] + img_id = self.data_infos[idx]['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + for obj in root.findall('object'): + name = obj.find('name').text + if name not in self.CLASSES: + continue + label = self.cat2label[name] + cat_ids.append(label) + + return cat_ids diff --git a/thirdparty/mmdetection/mmdet/models/__init__.py b/thirdparty/mmdetection/mmdet/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..44ac99855ae52101c91be167fa78d8219fc47259 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/__init__.py @@ -0,0 +1,16 @@ +from .backbones import * # noqa: F401,F403 +from .builder import (BACKBONES, DETECTORS, HEADS, LOSSES, NECKS, + ROI_EXTRACTORS, SHARED_HEADS, build_backbone, + build_detector, build_head, build_loss, build_neck, + build_roi_extractor, build_shared_head) +from .dense_heads import * # noqa: F401,F403 +from .detectors import * # noqa: F401,F403 +from .losses import * # noqa: F401,F403 +from .necks import * # noqa: F401,F403 +from .roi_heads import * # noqa: F401,F403 + +__all__ = [ + 'BACKBONES', 'NECKS', 'ROI_EXTRACTORS', 'SHARED_HEADS', 'HEADS', 'LOSSES', + 'DETECTORS', 'build_backbone', 'build_neck', 'build_roi_extractor', + 'build_shared_head', 'build_head', 'build_loss', 'build_detector' +] diff --git a/thirdparty/mmdetection/mmdet/models/backbones/__init__.py b/thirdparty/mmdetection/mmdet/models/backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..873a3c9893b3edf9d0c28ccc201f1df53f5c89f5 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/__init__.py @@ -0,0 +1,17 @@ +from .darknet import Darknet +from .detectors_resnet import DetectoRS_ResNet +from .detectors_resnext import DetectoRS_ResNeXt +from .hourglass import HourglassNet +from .hrnet import HRNet +from .regnet import RegNet +from .res2net import Res2Net +from .resnest import ResNeSt +from .resnet import ResNet, ResNetV1d +from .resnext import ResNeXt +from .ssd_vgg import SSDVGG + +__all__ = [ + 'RegNet', 'ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet', 'Res2Net', + 'HourglassNet', 'DetectoRS_ResNet', 'DetectoRS_ResNeXt', 'Darknet', + 'ResNeSt' +] diff --git a/thirdparty/mmdetection/mmdet/models/backbones/darknet.py b/thirdparty/mmdetection/mmdet/models/backbones/darknet.py new file mode 100644 index 0000000000000000000000000000000000000000..517fe26259217792e0dad80ca3824d914cfe3904 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/darknet.py @@ -0,0 +1,199 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import logging + +import torch.nn as nn +from mmcv.cnn import ConvModule, constant_init, kaiming_init +from mmcv.runner import load_checkpoint +from torch.nn.modules.batchnorm import _BatchNorm + +from ..builder import BACKBONES + + +class ResBlock(nn.Module): + """The basic residual block used in Darknet. Each ResBlock consists of two + ConvModules and the input is added to the final output. Each ConvModule is + composed of Conv, BN, and LeakyReLU. In YoloV3 paper, the first convLayer + has half of the number of the filters as much as the second convLayer. The + first convLayer has filter size of 1x1 and the second one has the filter + size of 3x3. + + Args: + in_channels (int): The input channels. Must be even. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + """ + + def __init__(self, + in_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)): + super(ResBlock, self).__init__() + assert in_channels % 2 == 0 # ensure the in_channels is even + half_in_channels = in_channels // 2 + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + self.conv1 = ConvModule(in_channels, half_in_channels, 1, **cfg) + self.conv2 = ConvModule( + half_in_channels, in_channels, 3, padding=1, **cfg) + + def forward(self, x): + residual = x + out = self.conv1(x) + out = self.conv2(out) + out = out + residual + + return out + + +@BACKBONES.register_module() +class Darknet(nn.Module): + """Darknet backbone. + + Args: + depth (int): Depth of Darknet. Currently only support 53. + out_indices (Sequence[int]): Output from which stages. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + + Example: + >>> from mmdet.models import Darknet + >>> import torch + >>> self = Darknet(depth=53) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 416, 416) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + ... + (1, 256, 52, 52) + (1, 512, 26, 26) + (1, 1024, 13, 13) + """ + + # Dict(depth: (layers, channels)) + arch_settings = { + 53: ((1, 2, 8, 8, 4), ((32, 64), (64, 128), (128, 256), (256, 512), + (512, 1024))) + } + + def __init__(self, + depth=53, + out_indices=(3, 4, 5), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + norm_eval=True): + super(Darknet, self).__init__() + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for darknet') + self.depth = depth + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.layers, self.channels = self.arch_settings[depth] + + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + self.conv1 = ConvModule(3, 32, 3, padding=1, **cfg) + + self.cr_blocks = ['conv1'] + for i, n_layers in enumerate(self.layers): + layer_name = f'conv_res_block{i + 1}' + in_c, out_c = self.channels[i] + self.add_module( + layer_name, + self.make_conv_res_block(in_c, out_c, n_layers, **cfg)) + self.cr_blocks.append(layer_name) + + self.norm_eval = norm_eval + + def forward(self, x): + outs = [] + for i, layer_name in enumerate(self.cr_blocks): + cr_block = getattr(self, layer_name) + x = cr_block(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def init_weights(self, pretrained=None): + if isinstance(pretrained, str): + logger = logging.getLogger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, 1) + + else: + raise TypeError('pretrained must be a str or None') + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for i in range(self.frozen_stages): + m = getattr(self, self.cr_blocks[i]) + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(Darknet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + @staticmethod + def make_conv_res_block(in_channels, + out_channels, + res_repeat, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', + negative_slope=0.1)): + """In Darknet backbone, ConvLayer is usually followed by ResBlock. This + function will make that. The Conv layers always have 3x3 filters with + stride=2. The number of the filters in Conv layer is the same as the + out channels of the ResBlock. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + res_repeat (int): The number of ResBlocks. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + """ + + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + model = nn.Sequential() + model.add_module( + 'conv', + ConvModule( + in_channels, out_channels, 3, stride=2, padding=1, **cfg)) + for idx in range(res_repeat): + model.add_module('res{}'.format(idx), + ResBlock(out_channels, **cfg)) + return model diff --git a/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnet.py b/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..324e737ded5858dddd074539a67aacf25dc0c77f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnet.py @@ -0,0 +1,305 @@ +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer, constant_init + +from ..builder import BACKBONES +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottleneck(_Bottleneck): + r"""Bottleneck for the ResNet backbone in `DetectoRS + `_. + + This bottleneck allows the users to specify whether to use + SAC (Switchable Atrous Convolution) and RFP (Recursive Feature Pyramid). + + Args: + inplanes (int): The number of input channels. + planes (int): The number of output channels before expansion. + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + sac (dict, optional): Dictionary to construct SAC. Default: None. + """ + expansion = 4 + + def __init__(self, + inplanes, + planes, + rfp_inplanes=None, + sac=None, + **kwargs): + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + assert sac is None or isinstance(sac, dict) + self.sac = sac + self.with_sac = sac is not None + if self.with_sac: + self.conv2 = build_conv_layer( + self.sac, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False) + + self.rfp_inplanes = rfp_inplanes + if self.rfp_inplanes: + self.rfp_conv = build_conv_layer( + None, + self.rfp_inplanes, + planes * self.expansion, + 1, + stride=1, + bias=True) + self.init_weights() + + def init_weights(self): + """Initialize the weights.""" + if self.rfp_inplanes: + constant_init(self.rfp_conv, 0) + + def rfp_forward(self, x, rfp_feat): + """The forward function that also takes the RFP features as input.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + if self.rfp_inplanes: + rfp_feat = self.rfp_conv(rfp_feat) + out = out + rfp_feat + + out = self.relu(out) + + return out + + +class ResLayer(nn.Sequential): + """ResLayer to build ResNet style backbone for RPF in detectoRS. + + The difference between this module and base class is that we pass + ``rfp_inplanes`` to the first block. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + downsample_first (bool): Downsample at the first block or last block. + False for Hourglass, True for ResNet. Default: True + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + downsample_first=True, + rfp_inplanes=None, + **kwargs): + self.block = block + assert downsample_first, f'downsampel_first={downsample_first} is ' \ + 'not supported in DetectoRS' + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = [] + conv_stride = stride + if avg_down and stride != 1: + conv_stride = 1 + downsample.append( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False)) + downsample.extend([ + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + rfp_inplanes=rfp_inplanes, + **kwargs)) + inplanes = planes * block.expansion + for _ in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + + super(ResLayer, self).__init__(*layers) + + +@BACKBONES.register_module() +class DetectoRS_ResNet(ResNet): + """ResNet backbone for DetectoRS. + + Args: + sac (dict, optional): Dictionary to construct SAC (Switchable Atrous + Convolution). Default: None. + stage_with_sac (list): Which stage to use sac. Default: (False, False, + False, False). + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + output_img (bool): If ``True``, the input image will be inserted into + the starting position of output. Default: False. + pretrained (str, optional): The pretrained model to load. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + sac=None, + stage_with_sac=(False, False, False, False), + rfp_inplanes=None, + output_img=False, + pretrained=None, + **kwargs): + self.sac = sac + self.stage_with_sac = stage_with_sac + self.rfp_inplanes = rfp_inplanes + self.output_img = output_img + self.pretrained = pretrained + super(DetectoRS_ResNet, self).__init__(**kwargs) + + self.inplanes = self.stem_channels + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = self.strides[i] + dilation = self.dilations[i] + dcn = self.dcn if self.stage_with_dcn[i] else None + sac = self.sac if self.stage_with_sac[i] else None + if self.plugins is not None: + stage_plugins = self.make_stage_plugins(self.plugins, i) + else: + stage_plugins = None + planes = self.base_channels * 2**i + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=planes, + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=dcn, + sac=sac, + rfp_inplanes=rfp_inplanes if i > 0 else None, + plugins=stage_plugins) + self.inplanes = planes * self.block.expansion + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer`` for DetectoRS.""" + return ResLayer(**kwargs) + + def forward(self, x): + """Forward function.""" + outs = list(super(DetectoRS_ResNet, self).forward(x)) + if self.output_img: + outs.insert(0, x) + return tuple(outs) + + def rfp_forward(self, x, rfp_feats): + """Forward function for RFP.""" + if self.deep_stem: + x = self.stem(x) + else: + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + rfp_feat = rfp_feats[i] if i > 0 else None + for layer in res_layer: + x = layer.rfp_forward(x, rfp_feat) + if i in self.out_indices: + outs.append(x) + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnext.py b/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..57d032fe37ed82d5ba24e761bdc014cc0ee5ac64 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/detectors_resnext.py @@ -0,0 +1,122 @@ +import math + +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from .detectors_resnet import Bottleneck as _Bottleneck +from .detectors_resnet import DetectoRS_ResNet + + +class Bottleneck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + **kwargs): + """Bottleneck block for ResNeXt. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, width, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + self.with_modulated_dcn = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if self.with_sac: + self.conv2 = build_conv_layer( + self.sac, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + elif not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + +@BACKBONES.register_module() +class DetectoRS_ResNeXt(DetectoRS_ResNet): + """ResNeXt backbone for DetectoRS. + + Args: + groups (int): The number of groups in ResNeXt. + base_width (int): The base width of ResNeXt. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, groups=1, base_width=4, **kwargs): + self.groups = groups + self.base_width = base_width + super(DetectoRS_ResNeXt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + return super().make_res_layer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/hourglass.py b/thirdparty/mmdetection/mmdet/models/backbones/hourglass.py new file mode 100644 index 0000000000000000000000000000000000000000..3422acee35e3c6f8731cdb310f188e671b5be12f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/hourglass.py @@ -0,0 +1,198 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import BasicBlock + + +class HourglassModule(nn.Module): + """Hourglass Module for HourglassNet backbone. + + Generate module recursively and use BasicBlock as the base unit. + + Args: + depth (int): Depth of current HourglassModule. + stage_channels (list[int]): Feature channels of sub-modules in current + and follow-up HourglassModule. + stage_blocks (list[int]): Number of sub-modules stacked in current and + follow-up HourglassModule. + norm_cfg (dict): Dictionary to construct and config norm layer. + """ + + def __init__(self, + depth, + stage_channels, + stage_blocks, + norm_cfg=dict(type='BN', requires_grad=True)): + super(HourglassModule, self).__init__() + + self.depth = depth + + cur_block = stage_blocks[0] + next_block = stage_blocks[1] + + cur_channel = stage_channels[0] + next_channel = stage_channels[1] + + self.up1 = ResLayer( + BasicBlock, cur_channel, cur_channel, cur_block, norm_cfg=norm_cfg) + + self.low1 = ResLayer( + BasicBlock, + cur_channel, + next_channel, + cur_block, + stride=2, + norm_cfg=norm_cfg) + + if self.depth > 1: + self.low2 = HourglassModule(depth - 1, stage_channels[1:], + stage_blocks[1:]) + else: + self.low2 = ResLayer( + BasicBlock, + next_channel, + next_channel, + next_block, + norm_cfg=norm_cfg) + + self.low3 = ResLayer( + BasicBlock, + next_channel, + cur_channel, + cur_block, + norm_cfg=norm_cfg, + downsample_first=False) + + self.up2 = nn.Upsample(scale_factor=2) + + def forward(self, x): + """Forward function.""" + up1 = self.up1(x) + low1 = self.low1(x) + low2 = self.low2(low1) + low3 = self.low3(low2) + up2 = self.up2(low3) + return up1 + up2 + + +@BACKBONES.register_module() +class HourglassNet(nn.Module): + """HourglassNet backbone. + + Stacked Hourglass Networks for Human Pose Estimation. + More details can be found in the `paper + `_ . + + Args: + downsample_times (int): Downsample times in a HourglassModule. + num_stacks (int): Number of HourglassModule modules stacked, + 1 for Hourglass-52, 2 for Hourglass-104. + stage_channels (list[int]): Feature channel of each sub-module in a + HourglassModule. + stage_blocks (list[int]): Number of sub-modules stacked in a + HourglassModule. + feat_channel (int): Feature channel of conv after a HourglassModule. + norm_cfg (dict): Dictionary to construct and config norm layer. + + Example: + >>> from mmdet.models import HourglassNet + >>> import torch + >>> self = HourglassNet() + >>> self.eval() + >>> inputs = torch.rand(1, 3, 511, 511) + >>> level_outputs = self.forward(inputs) + >>> for level_output in level_outputs: + ... print(tuple(level_output.shape)) + (1, 256, 128, 128) + (1, 256, 128, 128) + """ + + def __init__(self, + downsample_times=5, + num_stacks=2, + stage_channels=(256, 256, 384, 384, 384, 512), + stage_blocks=(2, 2, 2, 2, 2, 4), + feat_channel=256, + norm_cfg=dict(type='BN', requires_grad=True)): + super(HourglassNet, self).__init__() + + self.num_stacks = num_stacks + assert self.num_stacks >= 1 + assert len(stage_channels) == len(stage_blocks) + assert len(stage_channels) > downsample_times + + cur_channel = stage_channels[0] + + self.stem = nn.Sequential( + ConvModule(3, 128, 7, padding=3, stride=2, norm_cfg=norm_cfg), + ResLayer(BasicBlock, 128, 256, 1, stride=2, norm_cfg=norm_cfg)) + + self.hourglass_modules = nn.ModuleList([ + HourglassModule(downsample_times, stage_channels, stage_blocks) + for _ in range(num_stacks) + ]) + + self.inters = ResLayer( + BasicBlock, + cur_channel, + cur_channel, + num_stacks - 1, + norm_cfg=norm_cfg) + + self.conv1x1s = nn.ModuleList([ + ConvModule( + cur_channel, cur_channel, 1, norm_cfg=norm_cfg, act_cfg=None) + for _ in range(num_stacks - 1) + ]) + + self.out_convs = nn.ModuleList([ + ConvModule( + cur_channel, feat_channel, 3, padding=1, norm_cfg=norm_cfg) + for _ in range(num_stacks) + ]) + + self.remap_convs = nn.ModuleList([ + ConvModule( + feat_channel, cur_channel, 1, norm_cfg=norm_cfg, act_cfg=None) + for _ in range(num_stacks - 1) + ]) + + self.relu = nn.ReLU(inplace=True) + + def init_weights(self, pretrained=None): + """Init module weights. + + We do nothing in this function because all modules we used + (ConvModule, BasicBlock and etc.) have default initialization, and + currently we don't provide pretrained model of HourglassNet. + + Detector's __init__() will call backbone's init_weights() with + pretrained as input, so we keep this function. + """ + # Training Centripetal Model needs to reset parameters for Conv2d + for m in self.modules(): + if isinstance(m, nn.Conv2d): + m.reset_parameters() + + def forward(self, x): + """Forward function.""" + inter_feat = self.stem(x) + out_feats = [] + + for ind in range(self.num_stacks): + single_hourglass = self.hourglass_modules[ind] + out_conv = self.out_convs[ind] + + hourglass_feat = single_hourglass(inter_feat) + out_feat = out_conv(hourglass_feat) + out_feats.append(out_feat) + + if ind < self.num_stacks - 1: + inter_feat = self.conv1x1s[ind]( + inter_feat) + self.remap_convs[ind]( + out_feat) + inter_feat = self.inters[ind](self.relu(inter_feat)) + + return out_feats diff --git a/thirdparty/mmdetection/mmdet/models/backbones/hrnet.py b/thirdparty/mmdetection/mmdet/models/backbones/hrnet.py new file mode 100644 index 0000000000000000000000000000000000000000..1ecc79f125f7e95fb5518c9c654b47ef40a81010 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/hrnet.py @@ -0,0 +1,537 @@ +import torch.nn as nn +from mmcv.cnn import (build_conv_layer, build_norm_layer, constant_init, + kaiming_init) +from mmcv.runner import load_checkpoint +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.utils import get_root_logger +from ..builder import BACKBONES +from .resnet import BasicBlock, Bottleneck + + +class HRModule(nn.Module): + """High-Resolution Module for HRNet. + + In this module, every branch has 4 BasicBlocks/Bottlenecks. Fusion/Exchange + is in this module. + """ + + def __init__(self, + num_branches, + blocks, + num_blocks, + in_channels, + num_channels, + multiscale_output=True, + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN')): + super(HRModule, self).__init__() + self._check_branches(num_branches, num_blocks, in_channels, + num_channels) + + self.in_channels = in_channels + self.num_branches = num_branches + + self.multiscale_output = multiscale_output + self.norm_cfg = norm_cfg + self.conv_cfg = conv_cfg + self.with_cp = with_cp + self.branches = self._make_branches(num_branches, blocks, num_blocks, + num_channels) + self.fuse_layers = self._make_fuse_layers() + self.relu = nn.ReLU(inplace=False) + + def _check_branches(self, num_branches, num_blocks, in_channels, + num_channels): + if num_branches != len(num_blocks): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_BLOCKS({len(num_blocks)})' + raise ValueError(error_msg) + + if num_branches != len(num_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_CHANNELS({len(num_channels)})' + raise ValueError(error_msg) + + if num_branches != len(in_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_INCHANNELS({len(in_channels)})' + raise ValueError(error_msg) + + def _make_one_branch(self, + branch_index, + block, + num_blocks, + num_channels, + stride=1): + downsample = None + if stride != 1 or \ + self.in_channels[branch_index] != \ + num_channels[branch_index] * block.expansion: + downsample = nn.Sequential( + build_conv_layer( + self.conv_cfg, + self.in_channels[branch_index], + num_channels[branch_index] * block.expansion, + kernel_size=1, + stride=stride, + bias=False), + build_norm_layer(self.norm_cfg, num_channels[branch_index] * + block.expansion)[1]) + + layers = [] + layers.append( + block( + self.in_channels[branch_index], + num_channels[branch_index], + stride, + downsample=downsample, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg)) + self.in_channels[branch_index] = \ + num_channels[branch_index] * block.expansion + for i in range(1, num_blocks[branch_index]): + layers.append( + block( + self.in_channels[branch_index], + num_channels[branch_index], + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg)) + + return nn.Sequential(*layers) + + def _make_branches(self, num_branches, block, num_blocks, num_channels): + branches = [] + + for i in range(num_branches): + branches.append( + self._make_one_branch(i, block, num_blocks, num_channels)) + + return nn.ModuleList(branches) + + def _make_fuse_layers(self): + if self.num_branches == 1: + return None + + num_branches = self.num_branches + in_channels = self.in_channels + fuse_layers = [] + num_out_branches = num_branches if self.multiscale_output else 1 + for i in range(num_out_branches): + fuse_layer = [] + for j in range(num_branches): + if j > i: + fuse_layer.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=1, + stride=1, + padding=0, + bias=False), + build_norm_layer(self.norm_cfg, in_channels[i])[1], + nn.Upsample( + scale_factor=2**(j - i), mode='nearest'))) + elif j == i: + fuse_layer.append(None) + else: + conv_downsamples = [] + for k in range(i - j): + if k == i - j - 1: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[i])[1])) + else: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[j], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[j])[1], + nn.ReLU(inplace=False))) + fuse_layer.append(nn.Sequential(*conv_downsamples)) + fuse_layers.append(nn.ModuleList(fuse_layer)) + + return nn.ModuleList(fuse_layers) + + def forward(self, x): + """Forward function.""" + if self.num_branches == 1: + return [self.branches[0](x[0])] + + for i in range(self.num_branches): + x[i] = self.branches[i](x[i]) + + x_fuse = [] + for i in range(len(self.fuse_layers)): + y = 0 + for j in range(self.num_branches): + if i == j: + y += x[j] + else: + y += self.fuse_layers[i][j](x[j]) + x_fuse.append(self.relu(y)) + return x_fuse + + +@BACKBONES.register_module() +class HRNet(nn.Module): + """HRNet backbone. + + High-Resolution Representations for Labeling Pixels and Regions + arXiv: https://arxiv.org/abs/1904.04514 + + Args: + extra (dict): detailed configuration for each stage of HRNet. + in_channels (int): Number of input image channels. Default: 3. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + + Example: + >>> from mmdet.models import HRNet + >>> import torch + >>> extra = dict( + >>> stage1=dict( + >>> num_modules=1, + >>> num_branches=1, + >>> block='BOTTLENECK', + >>> num_blocks=(4, ), + >>> num_channels=(64, )), + >>> stage2=dict( + >>> num_modules=1, + >>> num_branches=2, + >>> block='BASIC', + >>> num_blocks=(4, 4), + >>> num_channels=(32, 64)), + >>> stage3=dict( + >>> num_modules=4, + >>> num_branches=3, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4), + >>> num_channels=(32, 64, 128)), + >>> stage4=dict( + >>> num_modules=3, + >>> num_branches=4, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4, 4), + >>> num_channels=(32, 64, 128, 256))) + >>> self = HRNet(extra, in_channels=1) + >>> self.eval() + >>> inputs = torch.rand(1, 1, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 32, 8, 8) + (1, 64, 4, 4) + (1, 128, 2, 2) + (1, 256, 1, 1) + """ + + blocks_dict = {'BASIC': BasicBlock, 'BOTTLENECK': Bottleneck} + + def __init__(self, + extra, + in_channels=3, + conv_cfg=None, + norm_cfg=dict(type='BN'), + norm_eval=True, + with_cp=False, + zero_init_residual=False): + super(HRNet, self).__init__() + self.extra = extra + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + self.zero_init_residual = zero_init_residual + + # stem net + self.norm1_name, norm1 = build_norm_layer(self.norm_cfg, 64, postfix=1) + self.norm2_name, norm2 = build_norm_layer(self.norm_cfg, 64, postfix=2) + + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + 64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + self.conv_cfg, + 64, + 64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.relu = nn.ReLU(inplace=True) + + # stage 1 + self.stage1_cfg = self.extra['stage1'] + num_channels = self.stage1_cfg['num_channels'][0] + block_type = self.stage1_cfg['block'] + num_blocks = self.stage1_cfg['num_blocks'][0] + + block = self.blocks_dict[block_type] + stage1_out_channels = num_channels * block.expansion + self.layer1 = self._make_layer(block, 64, num_channels, num_blocks) + + # stage 2 + self.stage2_cfg = self.extra['stage2'] + num_channels = self.stage2_cfg['num_channels'] + block_type = self.stage2_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition1 = self._make_transition_layer([stage1_out_channels], + num_channels) + self.stage2, pre_stage_channels = self._make_stage( + self.stage2_cfg, num_channels) + + # stage 3 + self.stage3_cfg = self.extra['stage3'] + num_channels = self.stage3_cfg['num_channels'] + block_type = self.stage3_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition2 = self._make_transition_layer(pre_stage_channels, + num_channels) + self.stage3, pre_stage_channels = self._make_stage( + self.stage3_cfg, num_channels) + + # stage 4 + self.stage4_cfg = self.extra['stage4'] + num_channels = self.stage4_cfg['num_channels'] + block_type = self.stage4_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition3 = self._make_transition_layer(pre_stage_channels, + num_channels) + self.stage4, pre_stage_channels = self._make_stage( + self.stage4_cfg, num_channels) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: the normalization layer named "norm2" """ + return getattr(self, self.norm2_name) + + def _make_transition_layer(self, num_channels_pre_layer, + num_channels_cur_layer): + num_branches_cur = len(num_channels_cur_layer) + num_branches_pre = len(num_channels_pre_layer) + + transition_layers = [] + for i in range(num_branches_cur): + if i < num_branches_pre: + if num_channels_cur_layer[i] != num_channels_pre_layer[i]: + transition_layers.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + num_channels_pre_layer[i], + num_channels_cur_layer[i], + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + num_channels_cur_layer[i])[1], + nn.ReLU(inplace=True))) + else: + transition_layers.append(None) + else: + conv_downsamples = [] + for j in range(i + 1 - num_branches_pre): + in_channels = num_channels_pre_layer[-1] + out_channels = num_channels_cur_layer[i] \ + if j == i - num_branches_pre else in_channels + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels, + out_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, out_channels)[1], + nn.ReLU(inplace=True))) + transition_layers.append(nn.Sequential(*conv_downsamples)) + + return nn.ModuleList(transition_layers) + + def _make_layer(self, block, inplanes, planes, blocks, stride=1): + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = nn.Sequential( + build_conv_layer( + self.conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False), + build_norm_layer(self.norm_cfg, planes * block.expansion)[1]) + + layers = [] + layers.append( + block( + inplanes, + planes, + stride, + downsample=downsample, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg)) + inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append( + block( + inplanes, + planes, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg)) + + return nn.Sequential(*layers) + + def _make_stage(self, layer_config, in_channels, multiscale_output=True): + num_modules = layer_config['num_modules'] + num_branches = layer_config['num_branches'] + num_blocks = layer_config['num_blocks'] + num_channels = layer_config['num_channels'] + block = self.blocks_dict[layer_config['block']] + + hr_modules = [] + for i in range(num_modules): + # multi_scale_output is only used for the last module + if not multiscale_output and i == num_modules - 1: + reset_multiscale_output = False + else: + reset_multiscale_output = True + + hr_modules.append( + HRModule( + num_branches, + block, + num_blocks, + in_channels, + num_channels, + reset_multiscale_output, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg)) + + return nn.Sequential(*hr_modules), in_channels + + def init_weights(self, pretrained=None): + """Initialize the weights in backbone. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, 1) + + if self.zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + constant_init(m.norm3, 0) + elif isinstance(m, BasicBlock): + constant_init(m.norm2, 0) + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.norm2(x) + x = self.relu(x) + x = self.layer1(x) + + x_list = [] + for i in range(self.stage2_cfg['num_branches']): + if self.transition1[i] is not None: + x_list.append(self.transition1[i](x)) + else: + x_list.append(x) + y_list = self.stage2(x_list) + + x_list = [] + for i in range(self.stage3_cfg['num_branches']): + if self.transition2[i] is not None: + x_list.append(self.transition2[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage3(x_list) + + x_list = [] + for i in range(self.stage4_cfg['num_branches']): + if self.transition3[i] is not None: + x_list.append(self.transition3[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage4(x_list) + + return y_list + + def train(self, mode=True): + """Convert the model into training mode whill keeping the normalization + layer freezed.""" + super(HRNet, self).train(mode) + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() diff --git a/thirdparty/mmdetection/mmdet/models/backbones/regnet.py b/thirdparty/mmdetection/mmdet/models/backbones/regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..b786a3f8add4456f5d5f9f7660cc30958d966ae1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/regnet.py @@ -0,0 +1,325 @@ +import numpy as np +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from .resnet import ResNet +from .resnext import Bottleneck + + +@BACKBONES.register_module() +class RegNet(ResNet): + """RegNet backbone. + + More details can be found in `paper `_ . + + Args: + arch (dict): The parameter of RegNets. + + - w0 (int): initial width + - wa (float): slope of width + - wm (float): quantization parameter to quantize the width + - depth (int): depth of the backbone + - group_w (int): width of group + - bot_mul (float): bottleneck ratio, i.e. expansion of bottlneck. + strides (Sequence[int]): Strides of the first block of each stage. + base_channels (int): Base channels after stem layer. + in_channels (int): Number of input image channels. Default: 3. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + + Example: + >>> from mmdet.models import RegNet + >>> import torch + >>> self = RegNet( + arch=dict( + w0=88, + wa=26.31, + wm=2.25, + group_w=48, + depth=25, + bot_mul=1.0)) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 96, 8, 8) + (1, 192, 4, 4) + (1, 432, 2, 2) + (1, 1008, 1, 1) + """ + arch_settings = { + 'regnetx_400mf': + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, bot_mul=1.0), + 'regnetx_800mf': + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, bot_mul=1.0), + 'regnetx_1.6gf': + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, bot_mul=1.0), + 'regnetx_3.2gf': + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, bot_mul=1.0), + 'regnetx_4.0gf': + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, bot_mul=1.0), + 'regnetx_6.4gf': + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, bot_mul=1.0), + 'regnetx_8.0gf': + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, bot_mul=1.0), + 'regnetx_12gf': + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, bot_mul=1.0), + } + + def __init__(self, + arch, + in_channels=3, + stem_channels=32, + base_channels=32, + strides=(2, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(0, 1, 2, 3), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + dcn=None, + stage_with_dcn=(False, False, False, False), + plugins=None, + with_cp=False, + zero_init_residual=True): + super(ResNet, self).__init__() + + # Generate RegNet parameters first + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'"arch": "{arch}" is not one of the' \ + ' arch_settings' + arch = self.arch_settings[arch] + elif not isinstance(arch, dict): + raise ValueError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + widths, num_stages = self.generate_regnet( + arch['w0'], + arch['wa'], + arch['wm'], + arch['depth'], + ) + # Convert to per stage format + stage_widths, stage_blocks = self.get_stages_from_blocks(widths) + # Generate group widths and bot muls + group_widths = [arch['group_w'] for _ in range(num_stages)] + self.bottleneck_ratio = [arch['bot_mul'] for _ in range(num_stages)] + # Adjust the compatibility of stage_widths and group_widths + stage_widths, group_widths = self.adjust_width_group( + stage_widths, self.bottleneck_ratio, group_widths) + + # Group params by stage + self.stage_widths = stage_widths + self.group_widths = group_widths + self.depth = sum(stage_blocks) + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.dcn = dcn + self.stage_with_dcn = stage_with_dcn + if dcn is not None: + assert len(stage_with_dcn) == num_stages + self.plugins = plugins + self.zero_init_residual = zero_init_residual + self.block = Bottleneck + expansion_bak = self.block.expansion + self.block.expansion = 1 + self.stage_blocks = stage_blocks[:num_stages] + + self._make_stem_layer(in_channels, stem_channels) + + self.inplanes = stem_channels + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = self.strides[i] + dilation = self.dilations[i] + group_width = self.group_widths[i] + width = int(round(self.stage_widths[i] * self.bottleneck_ratio[i])) + stage_groups = width // group_width + + dcn = self.dcn if self.stage_with_dcn[i] else None + if self.plugins is not None: + stage_plugins = self.make_stage_plugins(self.plugins, i) + else: + stage_plugins = None + + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=self.stage_widths[i], + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=dcn, + plugins=stage_plugins, + groups=stage_groups, + base_width=group_width, + base_channels=self.stage_widths[i]) + self.inplanes = self.stage_widths[i] + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = stage_widths[-1] + self.block.expansion = expansion_bak + + def _make_stem_layer(self, in_channels, base_channels): + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + base_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, base_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + + def generate_regnet(self, + initial_width, + width_slope, + width_parameter, + depth, + divisor=8): + """Generates per block width from RegNet parameters. + + Args: + initial_width ([int]): Initial width of the backbone + width_slope ([float]): Slope of the quantized linear function + width_parameter ([int]): Parameter used to quantize the width. + depth ([int]): Depth of the backbone. + divisor (int, optional): The divisor of channels. Defaults to 8. + + Returns: + list, int: return a list of widths of each stage and the number \ + of stages + """ + assert width_slope >= 0 + assert initial_width > 0 + assert width_parameter > 1 + assert initial_width % divisor == 0 + widths_cont = np.arange(depth) * width_slope + initial_width + ks = np.round( + np.log(widths_cont / initial_width) / np.log(width_parameter)) + widths = initial_width * np.power(width_parameter, ks) + widths = np.round(np.divide(widths, divisor)) * divisor + num_stages = len(np.unique(widths)) + widths, widths_cont = widths.astype(int).tolist(), widths_cont.tolist() + return widths, num_stages + + @staticmethod + def quantize_float(number, divisor): + """Converts a float to closest non-zero int divisible by divior. + + Args: + number (int): Original number to be quantized. + divisor (int): Divisor used to quantize the number. + + Returns: + int: quantized number that is divisible by devisor. + """ + return int(round(number / divisor) * divisor) + + def adjust_width_group(self, widths, bottleneck_ratio, groups): + """Adjusts the compatibility of widths and groups. + + Args: + widths (list[int]): Width of each stage. + bottleneck_ratio (float): Bottleneck ratio. + groups (int): number of groups in each stage + + Returns: + tuple(list): The adjusted widths and groups of each stage. + """ + bottleneck_width = [ + int(w * b) for w, b in zip(widths, bottleneck_ratio) + ] + groups = [min(g, w_bot) for g, w_bot in zip(groups, bottleneck_width)] + bottleneck_width = [ + self.quantize_float(w_bot, g) + for w_bot, g in zip(bottleneck_width, groups) + ] + widths = [ + int(w_bot / b) + for w_bot, b in zip(bottleneck_width, bottleneck_ratio) + ] + return widths, groups + + def get_stages_from_blocks(self, widths): + """Gets widths/stage_blocks of network at each stage. + + Args: + widths (list[int]): Width in each stage. + + Returns: + tuple(list): width and depth of each stage + """ + width_diff = [ + width != width_prev + for width, width_prev in zip(widths + [0], [0] + widths) + ] + stage_widths = [ + width for width, diff in zip(widths, width_diff[:-1]) if diff + ] + stage_blocks = np.diff([ + depth for depth, diff in zip(range(len(width_diff)), width_diff) + if diff + ]).tolist() + return stage_widths, stage_blocks + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/res2net.py b/thirdparty/mmdetection/mmdet/models/backbones/res2net.py new file mode 100644 index 0000000000000000000000000000000000000000..7901b7f2fa29741d72328bdbdbf92fc4d5c5f847 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/res2net.py @@ -0,0 +1,351 @@ +import math + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import (build_conv_layer, build_norm_layer, constant_init, + kaiming_init) +from mmcv.runner import load_checkpoint +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.utils import get_root_logger +from ..builder import BACKBONES +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottle2neck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + scales=4, + base_width=26, + base_channels=64, + stage_type='normal', + **kwargs): + """Bottle2neck block for Res2Net. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottle2neck, self).__init__(inplanes, planes, **kwargs) + assert scales > 1, 'Res2Net degenerates to ResNet when scales = 1.' + width = int(math.floor(self.planes * (base_width / base_channels))) + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width * scales, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width * scales, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + + if stage_type == 'stage' and self.conv2_stride != 1: + self.pool = nn.AvgPool2d( + kernel_size=3, stride=self.conv2_stride, padding=1) + convs = [] + bns = [] + + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + for i in range(scales - 1): + convs.append( + build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False)) + bns.append( + build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1]) + self.convs = nn.ModuleList(convs) + self.bns = nn.ModuleList(bns) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + for i in range(scales - 1): + convs.append( + build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False)) + bns.append( + build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1]) + self.convs = nn.ModuleList(convs) + self.bns = nn.ModuleList(bns) + + self.conv3 = build_conv_layer( + self.conv_cfg, + width * scales, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.stage_type = stage_type + self.scales = scales + self.width = width + delattr(self, 'conv2') + delattr(self, self.norm2_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + spx = torch.split(out, self.width, 1) + sp = self.convs[0](spx[0].contiguous()) + sp = self.relu(self.bns[0](sp)) + out = sp + for i in range(1, self.scales - 1): + if self.stage_type == 'stage': + sp = spx[i] + else: + sp = sp + spx[i] + sp = self.convs[i](sp.contiguous()) + sp = self.relu(self.bns[i](sp)) + out = torch.cat((out, sp), 1) + + if self.stage_type == 'normal' or self.conv2_stride == 1: + out = torch.cat((out, spx[self.scales - 1]), 1) + elif self.stage_type == 'stage': + out = torch.cat((out, self.pool(spx[self.scales - 1])), 1) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Res2Layer(nn.Sequential): + """Res2Layer to build Res2Net style backbone. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + scales (int): Scales used in Res2Net. Default: 4 + base_width (int): Basic width of each scale. Default: 26 + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + scales=4, + base_width=26, + **kwargs): + self.block = block + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False), + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=1, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1], + ) + + layers = [] + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + stage_type='stage', + **kwargs)) + inplanes = planes * block.expansion + for i in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + **kwargs)) + super(Res2Layer, self).__init__(*layers) + + +@BACKBONES.register_module() +class Res2Net(ResNet): + """Res2Net backbone. + + Args: + scales (int): Scales used in Res2Net. Default: 4 + base_width (int): Basic width of each scale. Default: 26 + depth (int): Depth of res2net, from {50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Res2net stages. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + norm_cfg (dict): Dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + plugins (list[dict]): List of plugins for stages, each dict contains: + + - cfg (dict, required): Cfg dict to build plugin. + - position (str, required): Position inside block to insert + plugin, options are 'after_conv1', 'after_conv2', 'after_conv3'. + - stages (tuple[bool], optional): Stages to apply plugin, length + should be same as 'num_stages'. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. + + Example: + >>> from mmdet.models import Res2Net + >>> import torch + >>> self = Res2Net(depth=50, scales=4, base_width=26) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 256, 8, 8) + (1, 512, 4, 4) + (1, 1024, 2, 2) + (1, 2048, 1, 1) + """ + + arch_settings = { + 50: (Bottle2neck, (3, 4, 6, 3)), + 101: (Bottle2neck, (3, 4, 23, 3)), + 152: (Bottle2neck, (3, 8, 36, 3)) + } + + def __init__(self, + scales=4, + base_width=26, + style='pytorch', + deep_stem=True, + avg_down=True, + **kwargs): + self.scales = scales + self.base_width = base_width + super(Res2Net, self).__init__( + style='pytorch', deep_stem=True, avg_down=True, **kwargs) + + def make_res_layer(self, **kwargs): + return Res2Layer( + scales=self.scales, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) + + def init_weights(self, pretrained=None): + """Initialize the weights in backbone. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, 1) + + if self.dcn is not None: + for m in self.modules(): + if isinstance(m, Bottle2neck): + # dcn in Res2Net bottle2neck is in ModuleList + for n in m.convs: + if hasattr(n, 'conv_offset'): + constant_init(n.conv_offset, 0) + + if self.zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottle2neck): + constant_init(m.norm3, 0) + else: + raise TypeError('pretrained must be a str or None') diff --git a/thirdparty/mmdetection/mmdet/models/backbones/resnest.py b/thirdparty/mmdetection/mmdet/models/backbones/resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..48e1d8bfa47348a13f0da0b9ecf32354fa270340 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/resnest.py @@ -0,0 +1,317 @@ +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNetV1d + + +class RSoftmax(nn.Module): + """Radix Softmax module in ``SplitAttentionConv2d``. + + Args: + radix (int): Radix of input. + groups (int): Groups of input. + """ + + def __init__(self, radix, groups): + super().__init__() + self.radix = radix + self.groups = groups + + def forward(self, x): + batch = x.size(0) + if self.radix > 1: + x = x.view(batch, self.groups, self.radix, -1).transpose(1, 2) + x = F.softmax(x, dim=1) + x = x.reshape(batch, -1) + else: + x = torch.sigmoid(x) + return x + + +class SplitAttentionConv2d(nn.Module): + """Split-Attention Conv2d in ResNeSt. + + Args: + in_channels (int): Number of channels in the input feature map. + channels (int): Number of intermediate channels. + kernel_size (int | tuple[int]): Size of the convolution kernel. + stride (int | tuple[int]): Stride of the convolution. + padding (int | tuple[int]): Zero-padding added to both sides of + dilation (int | tuple[int]): Spacing between kernel elements. + groups (int): Number of blocked connections from input channels to + output channels. + groups (int): Same as nn.Conv2d. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels. Default: 4. + conv_cfg (dict): Config dict for convolution layer. Default: None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. Default: None. + dcn (dict): Config dict for DCN. Default: None. + """ + + def __init__(self, + in_channels, + channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + radix=2, + reduction_factor=4, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None): + super(SplitAttentionConv2d, self).__init__() + inter_channels = max(in_channels * radix // reduction_factor, 32) + self.radix = radix + self.groups = groups + self.channels = channels + self.with_dcn = dcn is not None + self.dcn = dcn + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if self.with_dcn and not fallback_on_stride: + assert conv_cfg is None, 'conv_cfg must be None for DCN' + conv_cfg = dcn + self.conv = build_conv_layer( + conv_cfg, + in_channels, + channels * radix, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups * radix, + bias=False) + # To be consistent with original implementation, starting from 0 + self.norm0_name, norm0 = build_norm_layer( + norm_cfg, channels * radix, postfix=0) + self.add_module(self.norm0_name, norm0) + self.relu = nn.ReLU(inplace=True) + self.fc1 = build_conv_layer( + None, channels, inter_channels, 1, groups=self.groups) + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, inter_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.fc2 = build_conv_layer( + None, inter_channels, channels * radix, 1, groups=self.groups) + self.rsoftmax = RSoftmax(radix, groups) + + @property + def norm0(self): + """nn.Module: the normalization layer named "norm0" """ + return getattr(self, self.norm0_name) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + def forward(self, x): + x = self.conv(x) + x = self.norm0(x) + x = self.relu(x) + + batch, rchannel = x.shape[:2] + batch = x.size(0) + if self.radix > 1: + splits = x.view(batch, self.radix, -1, *x.shape[2:]) + gap = splits.sum(dim=1) + else: + gap = x + gap = F.adaptive_avg_pool2d(gap, 1) + gap = self.fc1(gap) + + gap = self.norm1(gap) + gap = self.relu(gap) + + atten = self.fc2(gap) + atten = self.rsoftmax(atten).view(batch, -1, 1, 1) + + if self.radix > 1: + attens = atten.view(batch, self.radix, -1, *atten.shape[2:]) + out = torch.sum(attens * splits, dim=1) + else: + out = atten * x + return out.contiguous() + + +class Bottleneck(_Bottleneck): + """Bottleneck block for ResNeSt. + + Args: + inplane (int): Input planes of this block. + planes (int): Middle planes of this block. + groups (int): Groups of conv2. + base_width (int): Base of width in terms of base channels. Default: 4. + base_channels (int): Base of channels for calculating width. + Default: 64. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels in + SplitAttentionConv2d. Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + kwargs (dict): Key word arguments for base class. + """ + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + """Bottleneck block for ResNeSt.""" + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.avg_down_stride = avg_down_stride and self.conv2_stride > 1 + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.with_modulated_dcn = False + self.conv2 = SplitAttentionConv2d( + width, + width, + kernel_size=3, + stride=1 if self.avg_down_stride else self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + radix=radix, + reduction_factor=reduction_factor, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=self.dcn) + delattr(self, self.norm2_name) + + if self.avg_down_stride: + self.avd_layer = nn.AvgPool2d(3, self.conv2_stride, padding=1) + + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + + if self.avg_down_stride: + out = self.avd_layer(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@BACKBONES.register_module() +class ResNeSt(ResNetV1d): + """ResNeSt backbone. + + Args: + groups (int): Number of groups of Bottleneck. Default: 1 + base_width (int): Base width of Bottleneck. Default: 4 + radix (int): Radix of SplitAttentionConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels in + SplitAttentionConv2d. Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + kwargs (dict): Keyword arguments for ResNet. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)), + 200: (Bottleneck, (3, 24, 36, 3)) + } + + def __init__(self, + groups=1, + base_width=4, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + self.groups = groups + self.base_width = base_width + self.radix = radix + self.reduction_factor = reduction_factor + self.avg_down_stride = avg_down_stride + super(ResNeSt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``.""" + return ResLayer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + radix=self.radix, + reduction_factor=self.reduction_factor, + avg_down_stride=self.avg_down_stride, + **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/resnet.py b/thirdparty/mmdetection/mmdet/models/backbones/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..195d5fe9eb19a34abf32249c05451023e209d5f8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/resnet.py @@ -0,0 +1,664 @@ +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import (build_conv_layer, build_norm_layer, build_plugin_layer, + constant_init, kaiming_init) +from mmcv.runner import load_checkpoint +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.utils import get_root_logger +from ..builder import BACKBONES +from ..utils import ResLayer + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, + inplanes, + planes, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None): + super(BasicBlock, self).__init__() + assert dcn is None, 'Not implemented yet.' + assert plugins is None, 'Not implemented yet.' + + self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1) + self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2) + + self.conv1 = build_conv_layer( + conv_cfg, + inplanes, + planes, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + conv_cfg, planes, planes, 3, padding=1, bias=False) + self.add_module(self.norm2_name, norm2) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + self.dilation = dilation + self.with_cp = with_cp + + @property + def norm1(self): + """nn.Module: normalization layer after the first convolution layer""" + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: normalization layer after the second convolution layer""" + return getattr(self, self.norm2_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.norm2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, + inplanes, + planes, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None): + """Bottleneck block for ResNet. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__() + assert style in ['pytorch', 'caffe'] + assert dcn is None or isinstance(dcn, dict) + assert plugins is None or isinstance(plugins, list) + if plugins is not None: + allowed_position = ['after_conv1', 'after_conv2', 'after_conv3'] + assert all(p['position'] in allowed_position for p in plugins) + + self.inplanes = inplanes + self.planes = planes + self.stride = stride + self.dilation = dilation + self.style = style + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.dcn = dcn + self.with_dcn = dcn is not None + self.plugins = plugins + self.with_plugins = plugins is not None + + if self.with_plugins: + # collect plugins for conv1/conv2/conv3 + self.after_conv1_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv1' + ] + self.after_conv2_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv2' + ] + self.after_conv3_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv3' + ] + + if self.style == 'pytorch': + self.conv1_stride = 1 + self.conv2_stride = stride + else: + self.conv1_stride = stride + self.conv2_stride = 1 + + self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1) + self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + norm_cfg, planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + conv_cfg, + inplanes, + planes, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + conv_cfg, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=dilation, + dilation=dilation, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + dcn, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=dilation, + dilation=dilation, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + conv_cfg, + planes, + planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + + if self.with_plugins: + self.after_conv1_plugin_names = self.make_block_plugins( + planes, self.after_conv1_plugins) + self.after_conv2_plugin_names = self.make_block_plugins( + planes, self.after_conv2_plugins) + self.after_conv3_plugin_names = self.make_block_plugins( + planes * self.expansion, self.after_conv3_plugins) + + def make_block_plugins(self, in_channels, plugins): + """make plugins for block. + + Args: + in_channels (int): Input channels of plugin. + plugins (list[dict]): List of plugins cfg to build. + + Returns: + list[str]: List of the names of plugin. + """ + assert isinstance(plugins, list) + plugin_names = [] + for plugin in plugins: + plugin = plugin.copy() + name, layer = build_plugin_layer( + plugin, + in_channels=in_channels, + postfix=plugin.pop('postfix', '')) + assert not hasattr(self, name), f'duplicate plugin {name}' + self.add_module(name, layer) + plugin_names.append(name) + return plugin_names + + def forward_plugin(self, x, plugin_names): + out = x + for name in plugin_names: + out = getattr(self, name)(x) + return out + + @property + def norm1(self): + """nn.Module: normalization layer after the first convolution layer""" + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: normalization layer after the second convolution layer""" + return getattr(self, self.norm2_name) + + @property + def norm3(self): + """nn.Module: normalization layer after the third convolution layer""" + return getattr(self, self.norm3_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@BACKBONES.register_module() +class ResNet(nn.Module): + """ResNet backbone. + + Args: + depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. + stem_channels (int | None): Number of stem channels. If not specified, + it will be the same as `base_channels`. Default: None. + base_channels (int): Number of base channels of res layer. Default: 64. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Resnet stages. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + norm_cfg (dict): Dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + plugins (list[dict]): List of plugins for stages, each dict contains: + + - cfg (dict, required): Cfg dict to build plugin. + - position (str, required): Position inside block to insert + plugin, options are 'after_conv1', 'after_conv2', 'after_conv3'. + - stages (tuple[bool], optional): Stages to apply plugin, length + should be same as 'num_stages'. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. + + Example: + >>> from mmdet.models import ResNet + >>> import torch + >>> self = ResNet(depth=18) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 64, 8, 8) + (1, 128, 4, 4) + (1, 256, 2, 2) + (1, 512, 1, 1) + """ + + arch_settings = { + 18: (BasicBlock, (2, 2, 2, 2)), + 34: (BasicBlock, (3, 4, 6, 3)), + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + depth, + in_channels=3, + stem_channels=None, + base_channels=64, + num_stages=4, + strides=(1, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(0, 1, 2, 3), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + dcn=None, + stage_with_dcn=(False, False, False, False), + plugins=None, + with_cp=False, + zero_init_residual=True): + super(ResNet, self).__init__() + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for resnet') + self.depth = depth + if stem_channels is None: + stem_channels = base_channels + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.dcn = dcn + self.stage_with_dcn = stage_with_dcn + if dcn is not None: + assert len(stage_with_dcn) == num_stages + self.plugins = plugins + self.zero_init_residual = zero_init_residual + self.block, stage_blocks = self.arch_settings[depth] + self.stage_blocks = stage_blocks[:num_stages] + self.inplanes = stem_channels + + self._make_stem_layer(in_channels, stem_channels) + + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = strides[i] + dilation = dilations[i] + dcn = self.dcn if self.stage_with_dcn[i] else None + if plugins is not None: + stage_plugins = self.make_stage_plugins(plugins, i) + else: + stage_plugins = None + planes = base_channels * 2**i + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=planes, + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=with_cp, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + dcn=dcn, + plugins=stage_plugins) + self.inplanes = planes * self.block.expansion + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = self.block.expansion * base_channels * 2**( + len(self.stage_blocks) - 1) + + def make_stage_plugins(self, plugins, stage_idx): + """Make plugins for ResNet ``stage_idx`` th stage. + + Currently we support to insert ``context_block``, + ``empirical_attention_block``, ``nonlocal_block`` into the backbone + like ResNet/ResNeXt. They could be inserted after conv1/conv2/conv3 of + Bottleneck. + + An example of plugins format could be: + + Examples: + >>> plugins=[ + ... dict(cfg=dict(type='xxx', arg1='xxx'), + ... stages=(False, True, True, True), + ... position='after_conv2'), + ... dict(cfg=dict(type='yyy'), + ... stages=(True, True, True, True), + ... position='after_conv3'), + ... dict(cfg=dict(type='zzz', postfix='1'), + ... stages=(True, True, True, True), + ... position='after_conv3'), + ... dict(cfg=dict(type='zzz', postfix='2'), + ... stages=(True, True, True, True), + ... position='after_conv3') + ... ] + >>> self = ResNet(depth=18) + >>> stage_plugins = self.make_stage_plugins(plugins, 0) + >>> assert len(stage_plugins) == 3 + + Suppose ``stage_idx=0``, the structure of blocks in the stage would be: + + .. code-block:: none + + conv1-> conv2->conv3->yyy->zzz1->zzz2 + + Suppose 'stage_idx=1', the structure of blocks in the stage would be: + + .. code-block:: none + + conv1-> conv2->xxx->conv3->yyy->zzz1->zzz2 + + If stages is missing, the plugin would be applied to all stages. + + Args: + plugins (list[dict]): List of plugins cfg to build. The postfix is + required if multiple same type plugins are inserted. + stage_idx (int): Index of stage to build + + Returns: + list[dict]: Plugins for current stage + """ + stage_plugins = [] + for plugin in plugins: + plugin = plugin.copy() + stages = plugin.pop('stages', None) + assert stages is None or len(stages) == self.num_stages + # whether to insert plugin into current stage + if stages is None or stages[stage_idx]: + stage_plugins.append(plugin) + + return stage_plugins + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``.""" + return ResLayer(**kwargs) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + def _make_stem_layer(self, in_channels, stem_channels): + if self.deep_stem: + self.stem = nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels, + stem_channels // 2, + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels // 2)[1], + nn.ReLU(inplace=True), + build_conv_layer( + self.conv_cfg, + stem_channels // 2, + stem_channels // 2, + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels // 2)[1], + nn.ReLU(inplace=True), + build_conv_layer( + self.conv_cfg, + stem_channels // 2, + stem_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels)[1], + nn.ReLU(inplace=True)) + else: + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + stem_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, stem_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + if self.deep_stem: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + else: + self.norm1.eval() + for m in [self.conv1, self.norm1]: + for param in m.parameters(): + param.requires_grad = False + + for i in range(1, self.frozen_stages + 1): + m = getattr(self, f'layer{i}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def init_weights(self, pretrained=None): + """Initialize the weights in backbone. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, 1) + + if self.dcn is not None: + for m in self.modules(): + if isinstance(m, Bottleneck) and hasattr( + m.conv2, 'conv_offset'): + constant_init(m.conv2.conv_offset, 0) + + if self.zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + constant_init(m.norm3, 0) + elif isinstance(m, BasicBlock): + constant_init(m.norm2, 0) + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + """Forward function.""" + if self.deep_stem: + x = self.stem(x) + else: + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep normalization layer + freezed.""" + super(ResNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + +@BACKBONES.register_module() +class ResNetV1d(ResNet): + r"""ResNetV1d variant described in `Bag of Tricks + `_. + + Compared with default ResNet(ResNetV1b), ResNetV1d replaces the 7x7 conv in + the input stem with three 3x3 convs. And in the downsampling block, a 2x2 + avg_pool with stride 2 is added before conv, whose stride is changed to 1. + """ + + def __init__(self, **kwargs): + super(ResNetV1d, self).__init__( + deep_stem=True, avg_down=True, **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/resnext.py b/thirdparty/mmdetection/mmdet/models/backbones/resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..bf0360ea7e67d475bb4e10ae87d7accc5e9988c6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/resnext.py @@ -0,0 +1,132 @@ +import math + +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottleneck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + **kwargs): + """Bottleneck block for ResNeXt. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, width, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + self.with_modulated_dcn = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + +@BACKBONES.register_module() +class ResNeXt(ResNet): + """ResNeXt backbone. + + Args: + depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Resnet stages. Default: 4. + groups (int): Group of resnext. + base_width (int): Base width of resnext. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, groups=1, base_width=4, **kwargs): + self.groups = groups + self.base_width = base_width + super(ResNeXt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``""" + return ResLayer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/backbones/ssd_vgg.py b/thirdparty/mmdetection/mmdet/models/backbones/ssd_vgg.py new file mode 100644 index 0000000000000000000000000000000000000000..cbc4fbb2301afc002f47abb9ed133a500d6cf23f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/backbones/ssd_vgg.py @@ -0,0 +1,169 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import VGG, constant_init, kaiming_init, normal_init, xavier_init +from mmcv.runner import load_checkpoint + +from mmdet.utils import get_root_logger +from ..builder import BACKBONES + + +@BACKBONES.register_module() +class SSDVGG(VGG): + """VGG Backbone network for single-shot-detection. + + Args: + input_size (int): width and height of input, from {300, 512}. + depth (int): Depth of vgg, from {11, 13, 16, 19}. + out_indices (Sequence[int]): Output from which stages. + + Example: + >>> self = SSDVGG(input_size=300, depth=11) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 300, 300) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 1024, 19, 19) + (1, 512, 10, 10) + (1, 256, 5, 5) + (1, 256, 3, 3) + (1, 256, 1, 1) + """ + extra_setting = { + 300: (256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256), + 512: (256, 'S', 512, 128, 'S', 256, 128, 'S', 256, 128, 'S', 256, 128), + } + + def __init__(self, + input_size, + depth, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + l2_norm_scale=20.): + # TODO: in_channels for mmcv.VGG + super(SSDVGG, self).__init__( + depth, + with_last_pool=with_last_pool, + ceil_mode=ceil_mode, + out_indices=out_indices) + assert input_size in (300, 512) + self.input_size = input_size + + self.features.add_module( + str(len(self.features)), + nn.MaxPool2d(kernel_size=3, stride=1, padding=1)) + self.features.add_module( + str(len(self.features)), + nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)) + self.features.add_module( + str(len(self.features)), nn.ReLU(inplace=True)) + self.features.add_module( + str(len(self.features)), nn.Conv2d(1024, 1024, kernel_size=1)) + self.features.add_module( + str(len(self.features)), nn.ReLU(inplace=True)) + self.out_feature_indices = out_feature_indices + + self.inplanes = 1024 + self.extra = self._make_extra_layers(self.extra_setting[input_size]) + self.l2_norm = L2Norm( + self.features[out_feature_indices[0] - 1].out_channels, + l2_norm_scale) + + def init_weights(self, pretrained=None): + """Initialize the weights in backbone. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.features.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, nn.BatchNorm2d): + constant_init(m, 1) + elif isinstance(m, nn.Linear): + normal_init(m, std=0.01) + else: + raise TypeError('pretrained must be a str or None') + + for m in self.extra.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + + constant_init(self.l2_norm, self.l2_norm.scale) + + def forward(self, x): + """Forward function.""" + outs = [] + for i, layer in enumerate(self.features): + x = layer(x) + if i in self.out_feature_indices: + outs.append(x) + for i, layer in enumerate(self.extra): + x = F.relu(layer(x), inplace=True) + if i % 2 == 1: + outs.append(x) + outs[0] = self.l2_norm(outs[0]) + if len(outs) == 1: + return outs[0] + else: + return tuple(outs) + + def _make_extra_layers(self, outplanes): + layers = [] + kernel_sizes = (1, 3) + num_layers = 0 + outplane = None + for i in range(len(outplanes)): + if self.inplanes == 'S': + self.inplanes = outplane + continue + k = kernel_sizes[num_layers % 2] + if outplanes[i] == 'S': + outplane = outplanes[i + 1] + conv = nn.Conv2d( + self.inplanes, outplane, k, stride=2, padding=1) + else: + outplane = outplanes[i] + conv = nn.Conv2d( + self.inplanes, outplane, k, stride=1, padding=0) + layers.append(conv) + self.inplanes = outplanes[i] + num_layers += 1 + if self.input_size == 512: + layers.append(nn.Conv2d(self.inplanes, 256, 4, padding=1)) + + return nn.Sequential(*layers) + + +class L2Norm(nn.Module): + + def __init__(self, n_dims, scale=20., eps=1e-10): + """L2 normalization layer. + + Args: + n_dims (int): Number of dimensions to be normalized + scale (float, optional): Defaults to 20.. + eps (float, optional): Used to avoid division by zero. + Defaults to 1e-10. + """ + super(L2Norm, self).__init__() + self.n_dims = n_dims + self.weight = nn.Parameter(torch.Tensor(self.n_dims)) + self.eps = eps + self.scale = scale + + def forward(self, x): + """Forward function.""" + # normalization layer convert to FP32 in FP16 training + x_float = x.float() + norm = x_float.pow(2).sum(1, keepdim=True).sqrt() + self.eps + return (self.weight[None, :, None, None].float().expand_as(x_float) * + x_float / norm).type_as(x) diff --git a/thirdparty/mmdetection/mmdet/models/builder.py b/thirdparty/mmdetection/mmdet/models/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..899e787449d735cde42c0e2e717007a9778cda85 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/builder.py @@ -0,0 +1,67 @@ +from mmcv.utils import Registry, build_from_cfg +from torch import nn + +BACKBONES = Registry('backbone') +NECKS = Registry('neck') +ROI_EXTRACTORS = Registry('roi_extractor') +SHARED_HEADS = Registry('shared_head') +HEADS = Registry('head') +LOSSES = Registry('loss') +DETECTORS = Registry('detector') + + +def build(cfg, registry, default_args=None): + """Build a module. + + Args: + cfg (dict, list[dict]): The config of modules, is is either a dict + or a list of configs. + registry (:obj:`Registry`): A registry the module belongs to. + default_args (dict, optional): Default arguments to build the module. + Defaults to None. + + Returns: + nn.Module: A built nn module. + """ + if isinstance(cfg, list): + modules = [ + build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg + ] + return nn.Sequential(*modules) + else: + return build_from_cfg(cfg, registry, default_args) + + +def build_backbone(cfg): + """Build backbone.""" + return build(cfg, BACKBONES) + + +def build_neck(cfg): + """Build neck.""" + return build(cfg, NECKS) + + +def build_roi_extractor(cfg): + """Build roi extractor.""" + return build(cfg, ROI_EXTRACTORS) + + +def build_shared_head(cfg): + """Build shared head.""" + return build(cfg, SHARED_HEADS) + + +def build_head(cfg): + """Build head.""" + return build(cfg, HEADS) + + +def build_loss(cfg): + """Build loss.""" + return build(cfg, LOSSES) + + +def build_detector(cfg, train_cfg=None, test_cfg=None): + """Build detector.""" + return build(cfg, DETECTORS, dict(train_cfg=train_cfg, test_cfg=test_cfg)) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/__init__.py b/thirdparty/mmdetection/mmdet/models/dense_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..74750546b58c96afcaca624b6ec85dfba0f38997 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/__init__.py @@ -0,0 +1,37 @@ +from .anchor_free_head import AnchorFreeHead +from .anchor_head import AnchorHead +from .atss_head import ATSSHead +from .centripetal_head import CentripetalHead +from .corner_head import CornerHead +from .fcos_head import FCOSHead +from .fovea_head import FoveaHead +from .free_anchor_retina_head import FreeAnchorRetinaHead +from .fsaf_head import FSAFHead +from .ga_retina_head import GARetinaHead +from .ga_rpn_head import GARPNHead +from .gfl_head import GFLHead +from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead +from .nasfcos_head import NASFCOSHead +from .paa_head import PAAHead +from .pisa_retinanet_head import PISARetinaHead +from .pisa_ssd_head import PISASSDHead +from .reppoints_head import RepPointsHead +from .retina_head import RetinaHead +from .retina_sepbn_head import RetinaSepBNHead +from .rpn_head import RPNHead +from .sabl_retina_head import SABLRetinaHead +from .ssd_head import SSDHead +from .transformer_head import TransformerHead +from .vfnet_head import VFNetHead +from .yolact_head import YOLACTHead, YOLACTProtonet, YOLACTSegmHead +from .yolo_head import YOLOV3Head + +__all__ = [ + 'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption', + 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead', + 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead', + 'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead', + 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YOLACTHead', + 'YOLACTSegmHead', 'YOLACTProtonet', 'YOLOV3Head', 'PAAHead', + 'SABLRetinaHead', 'CentripetalHead', 'VFNetHead', 'TransformerHead' +] diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_free_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_free_head.py new file mode 100644 index 0000000000000000000000000000000000000000..917acde637ab723dbee91eb8a74aca036380180f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_free_head.py @@ -0,0 +1,340 @@ +from abc import abstractmethod + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class AnchorFreeHead(BaseDenseHead, BBoxTestMixin): + """Anchor-free head (FCOS, Fovea, RepPoints, etc.). + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. Used in child classes. + stacked_convs (int): Number of stacking convs of the head. + strides (tuple): Downsample factor of each feature map. + dcn_on_last_conv (bool): If true, use dcn in the last layer of + towers. Default: False. + conv_bias (bool | str): If specified as `auto`, it will be decided by + the norm_cfg. Bias of conv will be set as True if `norm_cfg` is + None, otherwise False. Default: "auto". + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + """ # noqa: W605 + + _version = 1 + + def __init__(self, + num_classes, + in_channels, + feat_channels=256, + stacked_convs=4, + strides=(4, 8, 16, 32, 64), + dcn_on_last_conv=False, + conv_bias='auto', + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + conv_cfg=None, + norm_cfg=None, + train_cfg=None, + test_cfg=None): + super(AnchorFreeHead, self).__init__() + self.num_classes = num_classes + self.cls_out_channels = num_classes + self.in_channels = in_channels + self.feat_channels = feat_channels + self.stacked_convs = stacked_convs + self.strides = strides + self.dcn_on_last_conv = dcn_on_last_conv + assert conv_bias == 'auto' or isinstance(conv_bias, bool) + self.conv_bias = conv_bias + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.fp16_enabled = False + + self._init_layers() + + def _init_layers(self): + """Initialize layers of the head.""" + self._init_cls_convs() + self._init_reg_convs() + self._init_predictor() + + def _init_cls_convs(self): + """Initialize classification conv layers of the head.""" + self.cls_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + if self.dcn_on_last_conv and i == self.stacked_convs - 1: + conv_cfg = dict(type='DCNv2') + else: + conv_cfg = self.conv_cfg + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias)) + + def _init_reg_convs(self): + """Initialize bbox regression conv layers of the head.""" + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + if self.dcn_on_last_conv and i == self.stacked_convs - 1: + conv_cfg = dict(type='DCNv2') + else: + conv_cfg = self.conv_cfg + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias)) + + def _init_predictor(self): + """Initialize predictor layers of the head.""" + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + if isinstance(m.conv, nn.Conv2d): + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + if isinstance(m.conv, nn.Conv2d): + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.conv_cls, std=0.01, bias=bias_cls) + normal_init(self.conv_reg, std=0.01) + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """Hack some keys of the model state dict so that can load checkpoints + of previous version.""" + version = local_metadata.get('version', None) + if version is None: + # the key is different in early versions + # for example, 'fcos_cls' become 'conv_cls' now + bbox_head_keys = [ + k for k in state_dict.keys() if k.startswith(prefix) + ] + ori_predictor_keys = [] + new_predictor_keys = [] + # e.g. 'fcos_cls' or 'fcos_reg' + for key in bbox_head_keys: + ori_predictor_keys.append(key) + key = key.split('.') + conv_name = None + if key[1].endswith('cls'): + conv_name = 'conv_cls' + elif key[1].endswith('reg'): + conv_name = 'conv_reg' + elif key[1].endswith('centerness'): + conv_name = 'conv_centerness' + else: + assert NotImplementedError + if conv_name is not None: + key[1] = conv_name + new_predictor_keys.append('.'.join(key)) + else: + ori_predictor_keys.pop(-1) + for i in range(len(new_predictor_keys)): + state_dict[new_predictor_keys[i]] = state_dict.pop( + ori_predictor_keys[i]) + super()._load_from_state_dict(state_dict, prefix, local_metadata, + strict, missing_keys, unexpected_keys, + error_msgs) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually contain classification scores and bbox predictions. + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + """ + return multi_apply(self.forward_single, feats)[:2] + + def forward_single(self, x): + """Forward features of a single scale levle. + + Args: + x (Tensor): FPN feature maps of the specified stride. + + Returns: + tuple: Scores for each class, bbox predictions, features + after classification and regression conv layers, some + models needs these features like FCOS. + """ + cls_feat = x + reg_feat = x + + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + cls_score = self.conv_cls(cls_feat) + + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + bbox_pred = self.conv_reg(reg_feat) + return cls_score, bbox_pred, cls_feat, reg_feat + + @abstractmethod + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + """ + + raise NotImplementedError + + @abstractmethod + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=None): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space + """ + + raise NotImplementedError + + @abstractmethod + def get_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute regression, classification and centerss targets for points + in multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + """ + raise NotImplementedError + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points of a single scale level.""" + h, w = featmap_size + x_range = torch.arange(w, dtype=dtype, device=device) + y_range = torch.arange(h, dtype=dtype, device=device) + y, x = torch.meshgrid(y_range, x_range) + if flatten: + y = y.flatten() + x = x.flatten() + return y, x + + def get_points(self, featmap_sizes, dtype, device, flatten=False): + """Get points according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + dtype (torch.dtype): Type of points. + device (torch.device): Device of points. + + Returns: + tuple: points of each image. + """ + mlvl_points = [] + for i in range(len(featmap_sizes)): + mlvl_points.append( + self._get_points_single(featmap_sizes[i], self.strides[i], + dtype, device, flatten)) + return mlvl_points + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a5bb413738840b286a38de4acfa1fd28dcaee131 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/anchor_head.py @@ -0,0 +1,682 @@ +import torch +import torch.nn as nn +from mmcv.cnn import normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, build_anchor_generator, + build_assigner, build_bbox_coder, build_sampler, + images_to_levels, multi_apply, multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class AnchorHead(BaseDenseHead, BBoxTestMixin): + """Anchor-based head (RPN, RetinaNet, SSD, etc.). + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. Used in child classes. + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + reg_decoded_bbox (bool): If true, the regression loss would be + applied on decoded bounding boxes. Default: False + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=(.0, .0, .0, .0), + target_stds=(1.0, 1.0, 1.0, 1.0)), + reg_decoded_bbox=False, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), + train_cfg=None, + test_cfg=None): + super(AnchorHead, self).__init__() + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + # TODO better way to determine whether sample or not + self.sampling = loss_cls['type'] not in [ + 'FocalLoss', 'GHMC', 'QualityFocalLoss' + ] + if self.use_sigmoid_cls: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + + if self.cls_out_channels <= 0: + raise ValueError(f'num_classes={num_classes} is too small') + self.reg_decoded_bbox = reg_decoded_bbox + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.fp16_enabled = False + + self.anchor_generator = build_anchor_generator(anchor_generator) + # usually the numbers of anchors for each level are the same + # except SSD detectors + self.num_anchors = self.anchor_generator.num_base_anchors[0] + self._init_layers() + + def _init_layers(self): + """Initialize layers of the head.""" + self.conv_cls = nn.Conv2d(self.in_channels, + self.num_anchors * self.cls_out_channels, 1) + self.conv_reg = nn.Conv2d(self.in_channels, self.num_anchors * 4, 1) + + def init_weights(self): + """Initialize weights of the head.""" + normal_init(self.conv_cls, std=0.01) + normal_init(self.conv_reg, std=0.01) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level \ + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale \ + level, the channels number is num_anchors * 4. + """ + cls_score = self.conv_cls(x) + bbox_pred = self.conv_reg(x) + return cls_score, bbox_pred + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: A tuple of classification scores and bbox prediction. + + - cls_scores (list[Tensor]): Classification scores for all \ + scale levels, each is a 4D-tensor, the channels number \ + is num_anchors * num_classes. + - bbox_preds (list[Tensor]): Box energies / deltas for all \ + scale levels, each is a 4D-tensor, the channels number \ + is num_anchors * 4. + """ + return multi_apply(self.forward_single, feats) + + def get_anchors(self, featmap_sizes, img_metas, device='cuda'): + """Get anchors according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): Device for returned tensors + + Returns: + tuple: + anchor_list (list[Tensor]): Anchors of each image. + valid_flag_list (list[Tensor]): Valid flags of each image. + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # anchors for one time + multi_level_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device) + anchor_list = [multi_level_anchors for _ in range(num_imgs)] + + # for each image, we compute valid flags of multi level anchors + valid_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = self.anchor_generator.valid_flags( + featmap_sizes, img_meta['pad_shape'], device) + valid_flag_list.append(multi_level_flags) + + return anchor_list, valid_flag_list + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors ,4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + img_meta (dict): Meta info of the image. + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + img_meta (dict): Meta info of the image. + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level + label_weights_list (list[Tensor]): Label weights of each level + bbox_targets_list (list[Tensor]): BBox targets of each level + bbox_weights_list (list[Tensor]): BBox weights of each level + num_total_pos (int): Number of positive samples in all images + num_total_neg (int): Number of negative samples in all images + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + assign_result = self.assigner.assign( + anchors, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class since v2.5.0 + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + labels = unmap( + labels, num_total_anchors, inside_flags, + fill=self.num_classes) # fill bg label + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds, sampling_result) + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True, + return_sampling_results=False): + """Compute regression and classification targets for anchors in + multiple images. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, 4). + valid_flag_list (list[list[Tensor]]): Multi level valid flags of + each image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each \ + level. + - bbox_targets_list (list[Tensor]): BBox targets of each level. + - bbox_weights_list (list[Tensor]): BBox weights of each level. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + additional_returns: This function enables user-defined returns from + `self._get_targets_single`. These returns are currently refined + to properties at each feature map (i.e. having HxW dimension). + The results will be concatenated after the end + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors to a single tensor + concat_anchor_list = [] + concat_valid_flag_list = [] + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + concat_anchor_list.append(torch.cat(anchor_list[i])) + concat_valid_flag_list.append(torch.cat(valid_flag_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + results = multi_apply( + self._get_targets_single, + concat_anchor_list, + concat_valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + (all_labels, all_label_weights, all_bbox_targets, all_bbox_weights, + pos_inds_list, neg_inds_list, sampling_results_list) = results[:7] + rest_results = list(results[7:]) # user-added return values + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + res = (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) + if return_sampling_results: + res = res + (sampling_results_list, ) + for i, r in enumerate(rest_results): # user-added return values + rest_results[i] = images_to_levels(r, num_level_anchors) + + return res + tuple(rest_results) + + def loss_single(self, cls_score, bbox_pred, anchors, labels, label_weights, + bbox_targets, bbox_weights, num_total_samples): + """Compute loss of a single scale level. + + Args: + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + bbox_weights (Tensor): BBox regression loss weights of each anchor + with shape (N, num_total_anchors, 4). + num_total_samples (int): If sampling, num total samples equal to + the number of total anchors; Otherwise, it is the number of + positive anchors. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + # regression loss + bbox_targets = bbox_targets.reshape(-1, 4) + bbox_weights = bbox_weights.reshape(-1, 4) + bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + if self.reg_decoded_bbox: + anchors = anchors.reshape(-1, 4) + bbox_pred = self.bbox_coder.decode(anchors, bbox_pred) + loss_bbox = self.loss_bbox( + bbox_pred, + bbox_targets, + bbox_weights, + avg_factor=num_total_samples) + return loss_cls, loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + return dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class labelof the + corresponding box. + + Example: + >>> import mmcv + >>> self = AnchorHead( + >>> num_classes=9, + >>> in_channels=1, + >>> anchor_generator=dict( + >>> type='AnchorGenerator', + >>> scales=[8], + >>> ratios=[0.5, 1.0, 2.0], + >>> strides=[4,])) + >>> img_metas = [{'img_shape': (32, 32, 3), 'scale_factor': 1}] + >>> cfg = mmcv.Config(dict( + >>> score_thr=0.00, + >>> nms=dict(type='nms', iou_thr=1.0), + >>> max_per_img=10)) + >>> feat = torch.rand(1, 1, 3, 3) + >>> cls_score, bbox_pred = self.forward_single(feat) + >>> # note the input lists are over different levels, not images + >>> cls_scores, bbox_preds = [cls_score], [bbox_pred] + >>> result_list = self.get_bboxes(cls_scores, bbox_preds, + >>> img_metas, cfg) + >>> det_bboxes, det_labels = result_list[0] + >>> assert len(result_list) == 1 + >>> assert det_bboxes.shape[1] == 5 + >>> assert len(det_bboxes) == len(det_labels) == cfg.max_per_img + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + if with_nms: + # some heads don't support with_nms argument + proposals = self._get_bboxes_single(cls_score_list, + bbox_pred_list, + mlvl_anchors, img_shape, + scale_factor, cfg, rescale) + else: + proposals = self._get_bboxes_single(cls_score_list, + bbox_pred_list, + mlvl_anchors, img_shape, + scale_factor, cfg, rescale, + with_nms) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_score_list, + bbox_pred_list, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_score_list (list[Tensor]): Box scores for a single scale level + Has shape (num_anchors * num_classes, H, W). + bbox_pred_list (list[Tensor]): Box energies / deltas for a single + scale level with shape (num_anchors * 4, H, W). + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + Tensor: Labeled boxes in shape (n, 5), where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_score_list) == len(bbox_pred_list) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, anchors in zip(cls_score_list, + bbox_pred_list, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + # Get maximum scores for foreground classes. + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/atss_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/atss_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7649386293557dd14f7cb70a5b681a2609b9c97b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/atss_head.py @@ -0,0 +1,650 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, Scale, bias_init_with_prob, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, build_assigner, build_sampler, + images_to_levels, multi_apply, multiclass_nms, + reduce_mean, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + +EPS = 1e-12 + + +@HEADS.register_module() +class ATSSHead(AnchorHead): + """Bridging the Gap Between Anchor-based and Anchor-free Detection via + Adaptive Training Sample Selection. + + ATSS head structure is similar with FCOS, however ATSS use anchor boxes + and assign label by Adaptive Training Sample Selection instead max-iou. + + https://arxiv.org/abs/1912.02424 + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + loss_centerness=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(ATSSHead, self).__init__(num_classes, in_channels, **kwargs) + + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.loss_centerness = build_loss(loss_centerness) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.atss_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.atss_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + self.atss_centerness = nn.Conv2d( + self.feat_channels, self.num_anchors * 1, 3, padding=1) + self.scales = nn.ModuleList( + [Scale(1.0) for _ in self.anchor_generator.strides]) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.atss_cls, std=0.01, bias=bias_cls) + normal_init(self.atss_reg, std=0.01) + normal_init(self.atss_centerness, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + return multi_apply(self.forward_single, feats, self.scales) + + def forward_single(self, x, scale): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale + level, the channels number is num_anchors * 4. + centerness (Tensor): Centerness for a single scale level, the + channel number is (N, num_anchors * 1, H, W). + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.atss_cls(cls_feat) + # we just follow atss, not apply exp in bbox_pred + bbox_pred = scale(self.atss_reg(reg_feat)).float() + centerness = self.atss_centerness(reg_feat) + return cls_score, bbox_pred, centerness + + def loss_single(self, anchors, cls_score, bbox_pred, centerness, labels, + label_weights, bbox_targets, num_total_samples): + """Compute loss of a single scale level. + + Args: + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + num_total_samples (int): Number os positive samples that is + reduced over all GPUs. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + anchors = anchors.reshape(-1, 4) + cls_score = cls_score.permute(0, 2, 3, 1).reshape( + -1, self.cls_out_channels).contiguous() + bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + centerness = centerness.permute(0, 2, 3, 1).reshape(-1) + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # classification loss + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((labels >= 0) + & (labels < bg_class_ind)).nonzero().squeeze(1) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_anchors = anchors[pos_inds] + pos_centerness = centerness[pos_inds] + + centerness_targets = self.centerness_target( + pos_anchors, pos_bbox_targets) + pos_decode_bbox_pred = self.bbox_coder.decode( + pos_anchors, pos_bbox_pred) + pos_decode_bbox_targets = self.bbox_coder.decode( + pos_anchors, pos_bbox_targets) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=centerness_targets, + avg_factor=1.0) + + # centerness loss + loss_centerness = self.loss_centerness( + pos_centerness, + centerness_targets, + avg_factor=num_total_samples) + + else: + loss_bbox = bbox_pred.sum() * 0 + loss_centerness = centerness.sum() * 0 + centerness_targets = bbox_targets.new_tensor(0.) + + return loss_cls, loss_bbox, loss_centerness, centerness_targets.sum() + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def loss(self, + cls_scores, + bbox_preds, + centernesses, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + centernesses (list[Tensor]): Centerness for each scale + level with shape (N, num_anchors * 1, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + num_total_samples = reduce_mean( + torch.tensor(num_total_pos).cuda()).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_cls, losses_bbox, loss_centerness,\ + bbox_avg_factor = multi_apply( + self.loss_single, + anchor_list, + cls_scores, + bbox_preds, + centernesses, + labels_list, + label_weights_list, + bbox_targets_list, + num_total_samples=num_total_samples) + + bbox_avg_factor = sum(bbox_avg_factor) + bbox_avg_factor = reduce_mean(bbox_avg_factor).item() + if bbox_avg_factor < EPS: + bbox_avg_factor = 1 + losses_bbox = list(map(lambda x: x / bbox_avg_factor, losses_bbox)) + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + loss_centerness=loss_centerness) + + def centerness_target(self, anchors, bbox_targets): + # only calculate pos centerness targets, otherwise there may be nan + gts = self.bbox_coder.decode(anchors, bbox_targets) + anchors_cx = (anchors[:, 2] + anchors[:, 0]) / 2 + anchors_cy = (anchors[:, 3] + anchors[:, 1]) / 2 + l_ = anchors_cx - gts[:, 0] + t_ = anchors_cy - gts[:, 1] + r_ = gts[:, 2] - anchors_cx + b_ = gts[:, 3] - anchors_cy + + left_right = torch.stack([l_, r_], dim=1) + top_bottom = torch.stack([t_, b_], dim=1) + centerness = torch.sqrt( + (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * + (top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0])) + assert not torch.isnan(centerness).any() + return centerness + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + centernesses (list[Tensor]): Centerness for each scale level with + shape (N, num_anchors * 1, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of the + corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + centerness_pred_list = [ + centernesses[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + centerness_pred_list, + mlvl_anchors, img_shape, + scale_factor, cfg, rescale, + with_nms) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + centernesses, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + with shape (num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for a single + scale level with shape (num_anchors * 4, H, W). + centernesses (list[Tensor]): Centerness for a single scale level + with shape (num_anchors * 1, H, W). + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_centerness = [] + for cls_score, bbox_pred, centerness, anchors in zip( + cls_scores, bbox_preds, centernesses, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + centerness = centerness.permute(1, 2, 0).reshape(-1).sigmoid() + + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + max_scores, _ = (scores * centerness[:, None]).max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + centerness = centerness[topk_inds] + + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_centerness.append(centerness) + + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + mlvl_centerness = torch.cat(mlvl_centerness) + + if with_nms: + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_centerness) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores, mlvl_centerness + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """Get targets for ATSS head. + + This method is almost the same as `AnchorHead.get_targets()`. Besides + returning the targets as the parent method does, it also returns the + anchors as the first element of the returned tuple. + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + num_level_anchors_list = [num_level_anchors] * num_imgs + + # concat all level anchors and flags to a single tensor + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + anchor_list[i] = torch.cat(anchor_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_anchors, all_labels, all_label_weights, all_bbox_targets, + all_bbox_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + anchor_list, + valid_flag_list, + num_level_anchors_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + anchors_list = images_to_levels(all_anchors, num_level_anchors) + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + return (anchors_list, labels_list, label_weights_list, + bbox_targets_list, bbox_weights_list, num_total_pos, + num_total_neg) + + def _get_target_single(self, + flat_anchors, + valid_flags, + num_level_anchors, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression, classification targets for anchors in a single + image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors ,4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + num_level_anchors Tensor): Number of anchors of each scale level. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: N is the number of total anchors in the image. + labels (Tensor): Labels of all anchors in the image with shape + (N,). + label_weights (Tensor): Label weights of all anchor in the + image with shape (N,). + bbox_targets (Tensor): BBox targets of all anchors in the + image with shape (N, 4). + bbox_weights (Tensor): BBox weights of all anchors in the + image with shape (N, 4) + pos_inds (Tensor): Indices of postive anchor with shape + (num_pos,). + neg_inds (Tensor): Indices of negative anchor with shape + (num_neg,). + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + num_level_anchors_inside = self.get_num_level_anchors_inside( + num_level_anchors, inside_flags) + assign_result = self.assigner.assign(anchors, num_level_anchors_inside, + gt_bboxes, gt_bboxes_ignore, + gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if hasattr(self, 'bbox_coder'): + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + # used in VFNetHead + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class since v2.5.0 + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + anchors = unmap(anchors, num_total_anchors, inside_flags) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (anchors, labels, label_weights, bbox_targets, bbox_weights, + pos_inds, neg_inds) + + def get_num_level_anchors_inside(self, num_level_anchors, inside_flags): + split_inside_flags = torch.split(inside_flags, num_level_anchors) + num_level_anchors_inside = [ + int(flags.sum()) for flags in split_inside_flags + ] + return num_level_anchors_inside diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/base_dense_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/base_dense_head.py new file mode 100644 index 0000000000000000000000000000000000000000..de11e4a2197b1dfe241ce7a66daa1907a8fc5661 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/base_dense_head.py @@ -0,0 +1,59 @@ +from abc import ABCMeta, abstractmethod + +import torch.nn as nn + + +class BaseDenseHead(nn.Module, metaclass=ABCMeta): + """Base class for DenseHeads.""" + + def __init__(self): + super(BaseDenseHead, self).__init__() + + @abstractmethod + def loss(self, **kwargs): + """Compute losses of the head.""" + pass + + @abstractmethod + def get_bboxes(self, **kwargs): + """Transform network output for a batch into bbox predictions.""" + pass + + def forward_train(self, + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None, + **kwargs): + """ + Args: + x (list[Tensor]): Features from FPN. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + proposal_cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used + + Returns: + tuple: + losses: (dict[str, Tensor]): A dictionary of loss components. + proposal_list (list[Tensor]): Proposals of each image. + """ + outs = self(x) + if gt_labels is None: + loss_inputs = outs + (gt_bboxes, img_metas) + else: + loss_inputs = outs + (gt_bboxes, gt_labels, img_metas) + losses = self.loss(*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + if proposal_cfg is None: + return losses + else: + proposal_list = self.get_bboxes(*outs, img_metas, cfg=proposal_cfg) + return losses, proposal_list diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/centripetal_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/centripetal_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6728218b60539a71f6353645635f741a1ad7263d --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/centripetal_head.py @@ -0,0 +1,421 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, normal_init +from mmcv.ops import DeformConv2d + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from .corner_head import CornerHead + + +@HEADS.register_module() +class CentripetalHead(CornerHead): + """Head of CentripetalNet: Pursuing High-quality Keypoint Pairs for Object + Detection. + + CentripetalHead inherits from :class:`CornerHead`. It removes the + embedding branch and adds guiding shift and centripetal shift branches. + More details can be found in the `paper + `_ . + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + num_feat_levels (int): Levels of feature from the previous module. 2 + for HourglassNet-104 and 1 for HourglassNet-52. HourglassNet-104 + outputs the final feature and intermediate supervision feature and + HourglassNet-52 only outputs the final feature. Default: 2. + corner_emb_channels (int): Channel of embedding vector. Default: 1. + train_cfg (dict | None): Training config. Useless in CornerHead, + but we keep this variable for SingleStageDetector. Default: None. + test_cfg (dict | None): Testing config of CornerHead. Default: None. + loss_heatmap (dict | None): Config of corner heatmap loss. Default: + GaussianFocalLoss. + loss_embedding (dict | None): Config of corner embedding loss. Default: + AssociativeEmbeddingLoss. + loss_offset (dict | None): Config of corner offset loss. Default: + SmoothL1Loss. + loss_guiding_shift (dict): Config of guiding shift loss. Default: + SmoothL1Loss. + loss_centripetal_shift (dict): Config of centripetal shift loss. + Default: SmoothL1Loss. + """ + + def __init__(self, + *args, + centripetal_shift_channels=2, + guiding_shift_channels=2, + feat_adaption_conv_kernel=3, + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1), + **kwargs): + assert centripetal_shift_channels == 2, ( + 'CentripetalHead only support centripetal_shift_channels == 2') + self.centripetal_shift_channels = centripetal_shift_channels + assert guiding_shift_channels == 2, ( + 'CentripetalHead only support guiding_shift_channels == 2') + self.guiding_shift_channels = guiding_shift_channels + self.feat_adaption_conv_kernel = feat_adaption_conv_kernel + super(CentripetalHead, self).__init__(*args, **kwargs) + self.loss_guiding_shift = build_loss(loss_guiding_shift) + self.loss_centripetal_shift = build_loss(loss_centripetal_shift) + + def _init_centripetal_layers(self): + """Initialize centripetal layers. + + Including feature adaption deform convs (feat_adaption), deform offset + prediction convs (dcn_off), guiding shift (guiding_shift) and + centripetal shift ( centripetal_shift). Each branch has two parts: + prefix `tl_` for top-left and `br_` for bottom-right. + """ + self.tl_feat_adaption = nn.ModuleList() + self.br_feat_adaption = nn.ModuleList() + self.tl_dcn_offset = nn.ModuleList() + self.br_dcn_offset = nn.ModuleList() + self.tl_guiding_shift = nn.ModuleList() + self.br_guiding_shift = nn.ModuleList() + self.tl_centripetal_shift = nn.ModuleList() + self.br_centripetal_shift = nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_feat_adaption.append( + DeformConv2d(self.in_channels, self.in_channels, + self.feat_adaption_conv_kernel, 1, 1)) + self.br_feat_adaption.append( + DeformConv2d(self.in_channels, self.in_channels, + self.feat_adaption_conv_kernel, 1, 1)) + + self.tl_guiding_shift.append( + self._make_layers( + out_channels=self.guiding_shift_channels, + in_channels=self.in_channels)) + self.br_guiding_shift.append( + self._make_layers( + out_channels=self.guiding_shift_channels, + in_channels=self.in_channels)) + + self.tl_dcn_offset.append( + ConvModule( + self.guiding_shift_channels, + self.feat_adaption_conv_kernel**2 * + self.guiding_shift_channels, + 1, + bias=False, + act_cfg=None)) + self.br_dcn_offset.append( + ConvModule( + self.guiding_shift_channels, + self.feat_adaption_conv_kernel**2 * + self.guiding_shift_channels, + 1, + bias=False, + act_cfg=None)) + + self.tl_centripetal_shift.append( + self._make_layers( + out_channels=self.centripetal_shift_channels, + in_channels=self.in_channels)) + self.br_centripetal_shift.append( + self._make_layers( + out_channels=self.centripetal_shift_channels, + in_channels=self.in_channels)) + + def _init_layers(self): + """Initialize layers for CentripetalHead. + + Including two parts: CornerHead layers and CentripetalHead layers + """ + super()._init_layers() # using _init_layers in CornerHead + self._init_centripetal_layers() + + def init_weights(self): + """Initialize weights of the head.""" + super().init_weights() + for i in range(self.num_feat_levels): + normal_init(self.tl_feat_adaption[i], std=0.01) + normal_init(self.br_feat_adaption[i], std=0.01) + normal_init(self.tl_dcn_offset[i].conv, std=0.1) + normal_init(self.br_dcn_offset[i].conv, std=0.1) + _ = [x.conv.reset_parameters() for x in self.tl_guiding_shift[i]] + _ = [x.conv.reset_parameters() for x in self.br_guiding_shift[i]] + _ = [ + x.conv.reset_parameters() for x in self.tl_centripetal_shift[i] + ] + _ = [ + x.conv.reset_parameters() for x in self.br_centripetal_shift[i] + ] + + def forward_single(self, x, lvl_ind): + """Forward feature of a single level. + + Args: + x (Tensor): Feature of a single level. + lvl_ind (int): Level index of current feature. + + Returns: + tuple[Tensor]: A tuple of CentripetalHead's output for current + feature level. Containing the following Tensors: + + - tl_heat (Tensor): Predicted top-left corner heatmap. + - br_heat (Tensor): Predicted bottom-right corner heatmap. + - tl_off (Tensor): Predicted top-left offset heatmap. + - br_off (Tensor): Predicted bottom-right offset heatmap. + - tl_guiding_shift (Tensor): Predicted top-left guiding shift + heatmap. + - br_guiding_shift (Tensor): Predicted bottom-right guiding + shift heatmap. + - tl_centripetal_shift (Tensor): Predicted top-left centripetal + shift heatmap. + - br_centripetal_shift (Tensor): Predicted bottom-right + centripetal shift heatmap. + """ + tl_heat, br_heat, _, _, tl_off, br_off, tl_pool, br_pool = super( + ).forward_single( + x, lvl_ind, return_pool=True) + + tl_guiding_shift = self.tl_guiding_shift[lvl_ind](tl_pool) + br_guiding_shift = self.br_guiding_shift[lvl_ind](br_pool) + + tl_dcn_offset = self.tl_dcn_offset[lvl_ind](tl_guiding_shift.detach()) + br_dcn_offset = self.br_dcn_offset[lvl_ind](br_guiding_shift.detach()) + + tl_feat_adaption = self.tl_feat_adaption[lvl_ind](tl_pool, + tl_dcn_offset) + br_feat_adaption = self.br_feat_adaption[lvl_ind](br_pool, + br_dcn_offset) + + tl_centripetal_shift = self.tl_centripetal_shift[lvl_ind]( + tl_feat_adaption) + br_centripetal_shift = self.br_centripetal_shift[lvl_ind]( + br_feat_adaption) + + result_list = [ + tl_heat, br_heat, tl_off, br_off, tl_guiding_shift, + br_guiding_shift, tl_centripetal_shift, br_centripetal_shift + ] + return result_list + + def loss(self, + tl_heats, + br_heats, + tl_offs, + br_offs, + tl_guiding_shifts, + br_guiding_shifts, + tl_centripetal_shifts, + br_centripetal_shifts, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + tl_guiding_shifts (list[Tensor]): Top-left guiding shifts for each + level with shape (N, guiding_shift_channels, H, W). + br_guiding_shifts (list[Tensor]): Bottom-right guiding shifts for + each level with shape (N, guiding_shift_channels, H, W). + tl_centripetal_shifts (list[Tensor]): Top-left centripetal shifts + for each level with shape (N, centripetal_shift_channels, H, + W). + br_centripetal_shifts (list[Tensor]): Bottom-right centripetal + shifts for each level with shape (N, + centripetal_shift_channels, H, W). + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [left, top, right, bottom] format. + gt_labels (list[Tensor]): Class indices corresponding to each box. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. Containing the + following losses: + + - det_loss (list[Tensor]): Corner keypoint losses of all + feature levels. + - off_loss (list[Tensor]): Corner offset losses of all feature + levels. + - guiding_loss (list[Tensor]): Guiding shift losses of all + feature levels. + - centripetal_loss (list[Tensor]): Centripetal shift losses of + all feature levels. + """ + targets = self.get_targets( + gt_bboxes, + gt_labels, + tl_heats[-1].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb, + with_guiding_shift=True, + with_centripetal_shift=True) + mlvl_targets = [targets for _ in range(self.num_feat_levels)] + [det_losses, off_losses, guiding_losses, centripetal_losses + ] = multi_apply(self.loss_single, tl_heats, br_heats, tl_offs, + br_offs, tl_guiding_shifts, br_guiding_shifts, + tl_centripetal_shifts, br_centripetal_shifts, + mlvl_targets) + loss_dict = dict( + det_loss=det_losses, + off_loss=off_losses, + guiding_loss=guiding_losses, + centripetal_loss=centripetal_losses) + return loss_dict + + def loss_single(self, tl_hmp, br_hmp, tl_off, br_off, tl_guiding_shift, + br_guiding_shift, tl_centripetal_shift, + br_centripetal_shift, targets): + """Compute losses for single level. + + Args: + tl_hmp (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_hmp (Tensor): Bottom-right corner heatmap for current level with + shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + tl_guiding_shift (Tensor): Top-left guiding shift for current level + with shape (N, guiding_shift_channels, H, W). + br_guiding_shift (Tensor): Bottom-right guiding shift for current + level with shape (N, guiding_shift_channels, H, W). + tl_centripetal_shift (Tensor): Top-left centripetal shift for + current level with shape (N, centripetal_shift_channels, H, W). + br_centripetal_shift (Tensor): Bottom-right centripetal shift for + current level with shape (N, centripetal_shift_channels, H, W). + targets (dict): Corner target generated by `get_targets`. + + Returns: + tuple[torch.Tensor]: Losses of the head's differnet branches + containing the following losses: + + - det_loss (Tensor): Corner keypoint loss. + - off_loss (Tensor): Corner offset loss. + - guiding_loss (Tensor): Guiding shift loss. + - centripetal_loss (Tensor): Centripetal shift loss. + """ + targets['corner_embedding'] = None + + det_loss, _, _, off_loss = super().loss_single(tl_hmp, br_hmp, None, + None, tl_off, br_off, + targets) + + gt_tl_guiding_shift = targets['topleft_guiding_shift'] + gt_br_guiding_shift = targets['bottomright_guiding_shift'] + gt_tl_centripetal_shift = targets['topleft_centripetal_shift'] + gt_br_centripetal_shift = targets['bottomright_centripetal_shift'] + + gt_tl_heatmap = targets['topleft_heatmap'] + gt_br_heatmap = targets['bottomright_heatmap'] + # We only compute the offset loss at the real corner position. + # The value of real corner would be 1 in heatmap ground truth. + # The mask is computed in class agnostic mode and its shape is + # batch * 1 * width * height. + tl_mask = gt_tl_heatmap.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_tl_heatmap) + br_mask = gt_br_heatmap.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_br_heatmap) + + # Guiding shift loss + tl_guiding_loss = self.loss_guiding_shift( + tl_guiding_shift, + gt_tl_guiding_shift, + tl_mask, + avg_factor=tl_mask.sum()) + br_guiding_loss = self.loss_guiding_shift( + br_guiding_shift, + gt_br_guiding_shift, + br_mask, + avg_factor=br_mask.sum()) + guiding_loss = (tl_guiding_loss + br_guiding_loss) / 2.0 + # Centripetal shift loss + tl_centripetal_loss = self.loss_centripetal_shift( + tl_centripetal_shift, + gt_tl_centripetal_shift, + tl_mask, + avg_factor=tl_mask.sum()) + br_centripetal_loss = self.loss_centripetal_shift( + br_centripetal_shift, + gt_br_centripetal_shift, + br_mask, + avg_factor=br_mask.sum()) + centripetal_loss = (tl_centripetal_loss + br_centripetal_loss) / 2.0 + + return det_loss, off_loss, guiding_loss, centripetal_loss + + def get_bboxes(self, + tl_heats, + br_heats, + tl_offs, + br_offs, + tl_guiding_shifts, + br_guiding_shifts, + tl_centripetal_shifts, + br_centripetal_shifts, + img_metas, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + tl_guiding_shifts (list[Tensor]): Top-left guiding shifts for each + level with shape (N, guiding_shift_channels, H, W). Useless in + this function, we keep this arg because it's the raw output + from CentripetalHead. + br_guiding_shifts (list[Tensor]): Bottom-right guiding shifts for + each level with shape (N, guiding_shift_channels, H, W). + Useless in this function, we keep this arg because it's the + raw output from CentripetalHead. + tl_centripetal_shifts (list[Tensor]): Top-left centripetal shifts + for each level with shape (N, centripetal_shift_channels, H, + W). + br_centripetal_shifts (list[Tensor]): Bottom-right centripetal + shifts for each level with shape (N, + centripetal_shift_channels, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas) + result_list = [] + for img_id in range(len(img_metas)): + result_list.append( + self._get_bboxes_single( + tl_heats[-1][img_id:img_id + 1, :], + br_heats[-1][img_id:img_id + 1, :], + tl_offs[-1][img_id:img_id + 1, :], + br_offs[-1][img_id:img_id + 1, :], + img_metas[img_id], + tl_emb=None, + br_emb=None, + tl_centripetal_shift=tl_centripetal_shifts[-1][ + img_id:img_id + 1, :], + br_centripetal_shift=br_centripetal_shifts[-1][ + img_id:img_id + 1, :], + rescale=rescale, + with_nms=with_nms)) + + return result_list diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/corner_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/corner_head.py new file mode 100644 index 0000000000000000000000000000000000000000..cdaeca43188e0eeb8302bf9cc66933ed12a8e801 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/corner_head.py @@ -0,0 +1,1064 @@ +from math import ceil, log + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, bias_init_with_prob +from mmcv.ops import CornerPool, batched_nms + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from ..utils import gaussian_radius, gen_gaussian_target +from .base_dense_head import BaseDenseHead + + +class BiCornerPool(nn.Module): + """Bidirectional Corner Pooling Module (TopLeft, BottomRight, etc.) + + Args: + in_channels (int): Input channels of module. + out_channels (int): Output channels of module. + feat_channels (int): Feature channels of module. + directions (list[str]): Directions of two CornerPools. + norm_cfg (dict): Dictionary to construct and config norm layer. + """ + + def __init__(self, + in_channels, + directions, + feat_channels=128, + out_channels=128, + norm_cfg=dict(type='BN', requires_grad=True)): + super(BiCornerPool, self).__init__() + self.direction1_conv = ConvModule( + in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) + self.direction2_conv = ConvModule( + in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) + + self.aftpool_conv = ConvModule( + feat_channels, + out_channels, + 3, + padding=1, + norm_cfg=norm_cfg, + act_cfg=None) + + self.conv1 = ConvModule( + in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) + self.conv2 = ConvModule( + in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg) + + self.direction1_pool = CornerPool(directions[0]) + self.direction2_pool = CornerPool(directions[1]) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + """Forward features from the upstream network. + + Args: + x (tensor): Input feature of BiCornerPool. + + Returns: + conv2 (tensor): Output feature of BiCornerPool. + """ + direction1_conv = self.direction1_conv(x) + direction2_conv = self.direction2_conv(x) + direction1_feat = self.direction1_pool(direction1_conv) + direction2_feat = self.direction2_pool(direction2_conv) + aftpool_conv = self.aftpool_conv(direction1_feat + direction2_feat) + conv1 = self.conv1(x) + relu = self.relu(aftpool_conv + conv1) + conv2 = self.conv2(relu) + return conv2 + + +@HEADS.register_module() +class CornerHead(BaseDenseHead): + """Head of CornerNet: Detecting Objects as Paired Keypoints. + + Code is modified from the `official github repo + `_ . + + More details can be found in the `paper + `_ . + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + num_feat_levels (int): Levels of feature from the previous module. 2 + for HourglassNet-104 and 1 for HourglassNet-52. Because + HourglassNet-104 outputs the final feature and intermediate + supervision feature and HourglassNet-52 only outputs the final + feature. Default: 2. + corner_emb_channels (int): Channel of embedding vector. Default: 1. + train_cfg (dict | None): Training config. Useless in CornerHead, + but we keep this variable for SingleStageDetector. Default: None. + test_cfg (dict | None): Testing config of CornerHead. Default: None. + loss_heatmap (dict | None): Config of corner heatmap loss. Default: + GaussianFocalLoss. + loss_embedding (dict | None): Config of corner embedding loss. Default: + AssociativeEmbeddingLoss. + loss_offset (dict | None): Config of corner offset loss. Default: + SmoothL1Loss. + """ + + def __init__(self, + num_classes, + in_channels, + num_feat_levels=2, + corner_emb_channels=1, + train_cfg=None, + test_cfg=None, + loss_heatmap=dict( + type='GaussianFocalLoss', + alpha=2.0, + gamma=4.0, + loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.25, + push_weight=0.25), + loss_offset=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1)): + super(CornerHead, self).__init__() + self.num_classes = num_classes + self.in_channels = in_channels + self.corner_emb_channels = corner_emb_channels + self.with_corner_emb = self.corner_emb_channels > 0 + self.corner_offset_channels = 2 + self.num_feat_levels = num_feat_levels + self.loss_heatmap = build_loss( + loss_heatmap) if loss_heatmap is not None else None + self.loss_embedding = build_loss( + loss_embedding) if loss_embedding is not None else None + self.loss_offset = build_loss( + loss_offset) if loss_offset is not None else None + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + self._init_layers() + + def _make_layers(self, out_channels, in_channels=256, feat_channels=256): + """Initialize conv sequential for CornerHead.""" + return nn.Sequential( + ConvModule(in_channels, feat_channels, 3, padding=1), + ConvModule( + feat_channels, out_channels, 1, norm_cfg=None, act_cfg=None)) + + def _init_corner_kpt_layers(self): + """Initialize corner keypoint layers. + + Including corner heatmap branch and corner offset branch. Each branch + has two parts: prefix `tl_` for top-left and `br_` for bottom-right. + """ + self.tl_pool, self.br_pool = nn.ModuleList(), nn.ModuleList() + self.tl_heat, self.br_heat = nn.ModuleList(), nn.ModuleList() + self.tl_off, self.br_off = nn.ModuleList(), nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_pool.append( + BiCornerPool( + self.in_channels, ['top', 'left'], + out_channels=self.in_channels)) + self.br_pool.append( + BiCornerPool( + self.in_channels, ['bottom', 'right'], + out_channels=self.in_channels)) + + self.tl_heat.append( + self._make_layers( + out_channels=self.num_classes, + in_channels=self.in_channels)) + self.br_heat.append( + self._make_layers( + out_channels=self.num_classes, + in_channels=self.in_channels)) + + self.tl_off.append( + self._make_layers( + out_channels=self.corner_offset_channels, + in_channels=self.in_channels)) + self.br_off.append( + self._make_layers( + out_channels=self.corner_offset_channels, + in_channels=self.in_channels)) + + def _init_corner_emb_layers(self): + """Initialize corner embedding layers. + + Only include corner embedding branch with two parts: prefix `tl_` for + top-left and `br_` for bottom-right. + """ + self.tl_emb, self.br_emb = nn.ModuleList(), nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_emb.append( + self._make_layers( + out_channels=self.corner_emb_channels, + in_channels=self.in_channels)) + self.br_emb.append( + self._make_layers( + out_channels=self.corner_emb_channels, + in_channels=self.in_channels)) + + def _init_layers(self): + """Initialize layers for CornerHead. + + Including two parts: corner keypoint layers and corner embedding layers + """ + self._init_corner_kpt_layers() + if self.with_corner_emb: + self._init_corner_emb_layers() + + def init_weights(self): + """Initialize weights of the head.""" + bias_init = bias_init_with_prob(0.1) + for i in range(self.num_feat_levels): + # The initialization of parameters are different between nn.Conv2d + # and ConvModule. Our experiments show that using the original + # initialization of nn.Conv2d increases the final mAP by about 0.2% + self.tl_heat[i][-1].conv.reset_parameters() + self.tl_heat[i][-1].conv.bias.data.fill_(bias_init) + self.br_heat[i][-1].conv.reset_parameters() + self.br_heat[i][-1].conv.bias.data.fill_(bias_init) + self.tl_off[i][-1].conv.reset_parameters() + self.br_off[i][-1].conv.reset_parameters() + if self.with_corner_emb: + self.tl_emb[i][-1].conv.reset_parameters() + self.br_emb[i][-1].conv.reset_parameters() + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of corner heatmaps, offset heatmaps and + embedding heatmaps. + - tl_heats (list[Tensor]): Top-left corner heatmaps for all + levels, each is a 4D-tensor, the channels number is + num_classes. + - br_heats (list[Tensor]): Bottom-right corner heatmaps for all + levels, each is a 4D-tensor, the channels number is + num_classes. + - tl_embs (list[Tensor] | list[None]): Top-left embedding + heatmaps for all levels, each is a 4D-tensor or None. + If not None, the channels number is corner_emb_channels. + - br_embs (list[Tensor] | list[None]): Bottom-right embedding + heatmaps for all levels, each is a 4D-tensor or None. + If not None, the channels number is corner_emb_channels. + - tl_offs (list[Tensor]): Top-left offset heatmaps for all + levels, each is a 4D-tensor. The channels number is + corner_offset_channels. + - br_offs (list[Tensor]): Bottom-right offset heatmaps for all + levels, each is a 4D-tensor. The channels number is + corner_offset_channels. + """ + lvl_ind = list(range(self.num_feat_levels)) + return multi_apply(self.forward_single, feats, lvl_ind) + + def forward_single(self, x, lvl_ind, return_pool=False): + """Forward feature of a single level. + + Args: + x (Tensor): Feature of a single level. + lvl_ind (int): Level index of current feature. + return_pool (bool): Return corner pool feature or not. + + Returns: + tuple[Tensor]: A tuple of CornerHead's output for current feature + level. Containing the following Tensors: + + - tl_heat (Tensor): Predicted top-left corner heatmap. + - br_heat (Tensor): Predicted bottom-right corner heatmap. + - tl_emb (Tensor | None): Predicted top-left embedding heatmap. + None for `self.with_corner_emb == False`. + - br_emb (Tensor | None): Predicted bottom-right embedding + heatmap. None for `self.with_corner_emb == False`. + - tl_off (Tensor): Predicted top-left offset heatmap. + - br_off (Tensor): Predicted bottom-right offset heatmap. + - tl_pool (Tensor): Top-left corner pool feature. Not must + have. + - br_pool (Tensor): Bottom-right corner pool feature. Not must + have. + """ + tl_pool = self.tl_pool[lvl_ind](x) + tl_heat = self.tl_heat[lvl_ind](tl_pool) + br_pool = self.br_pool[lvl_ind](x) + br_heat = self.br_heat[lvl_ind](br_pool) + + tl_emb, br_emb = None, None + if self.with_corner_emb: + tl_emb = self.tl_emb[lvl_ind](tl_pool) + br_emb = self.br_emb[lvl_ind](br_pool) + + tl_off = self.tl_off[lvl_ind](tl_pool) + br_off = self.br_off[lvl_ind](br_pool) + + result_list = [tl_heat, br_heat, tl_emb, br_emb, tl_off, br_off] + if return_pool: + result_list.append(tl_pool) + result_list.append(br_pool) + + return result_list + + def get_targets(self, + gt_bboxes, + gt_labels, + feat_shape, + img_shape, + with_corner_emb=False, + with_guiding_shift=False, + with_centripetal_shift=False): + """Generate corner targets. + + Including corner heatmap, corner offset. + + Optional: corner embedding, corner guiding shift, centripetal shift. + + For CornerNet, we generate corner heatmap, corner offset and corner + embedding from this function. + + For CentripetalNet, we generate corner heatmap, corner offset, guiding + shift and centripetal shift from this function. + + Args: + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, each + has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, each has + shape (num_gt,). + feat_shape (list[int]): Shape of output feature, + [batch, channel, height, width]. + img_shape (list[int]): Shape of input image, + [height, width, channel]. + with_corner_emb (bool): Generate corner embedding target or not. + Default: False. + with_guiding_shift (bool): Generate guiding shift target or not. + Default: False. + with_centripetal_shift (bool): Generate centripetal shift target or + not. Default: False. + + Returns: + dict: Ground truth of corner heatmap, corner offset, corner + embedding, guiding shift and centripetal shift. Containing the + following keys: + + - topleft_heatmap (Tensor): Ground truth top-left corner + heatmap. + - bottomright_heatmap (Tensor): Ground truth bottom-right + corner heatmap. + - topleft_offset (Tensor): Ground truth top-left corner offset. + - bottomright_offset (Tensor): Ground truth bottom-right corner + offset. + - corner_embedding (list[list[list[int]]]): Ground truth corner + embedding. Not must have. + - topleft_guiding_shift (Tensor): Ground truth top-left corner + guiding shift. Not must have. + - bottomright_guiding_shift (Tensor): Ground truth bottom-right + corner guiding shift. Not must have. + - topleft_centripetal_shift (Tensor): Ground truth top-left + corner centripetal shift. Not must have. + - bottomright_centripetal_shift (Tensor): Ground truth + bottom-right corner centripetal shift. Not must have. + """ + batch_size, _, height, width = feat_shape + img_h, img_w = img_shape[:2] + + width_ratio = float(width / img_w) + height_ratio = float(height / img_h) + + gt_tl_heatmap = gt_bboxes[-1].new_zeros( + [batch_size, self.num_classes, height, width]) + gt_br_heatmap = gt_bboxes[-1].new_zeros( + [batch_size, self.num_classes, height, width]) + gt_tl_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width]) + gt_br_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width]) + + if with_corner_emb: + match = [] + + # Guiding shift is a kind of offset, from center to corner + if with_guiding_shift: + gt_tl_guiding_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + gt_br_guiding_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + # Centripetal shift is also a kind of offset, from center to corner + # and normalized by log. + if with_centripetal_shift: + gt_tl_centripetal_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + gt_br_centripetal_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + + for batch_id in range(batch_size): + # Ground truth of corner embedding per image is a list of coord set + corner_match = [] + for box_id in range(len(gt_labels[batch_id])): + left, top, right, bottom = gt_bboxes[batch_id][box_id] + center_x = (left + right) / 2.0 + center_y = (top + bottom) / 2.0 + label = gt_labels[batch_id][box_id] + + # Use coords in the feature level to generate ground truth + scale_left = left * width_ratio + scale_right = right * width_ratio + scale_top = top * height_ratio + scale_bottom = bottom * height_ratio + scale_center_x = center_x * width_ratio + scale_center_y = center_y * height_ratio + + # Int coords on feature map/ground truth tensor + left_idx = int(min(scale_left, width - 1)) + right_idx = int(min(scale_right, width - 1)) + top_idx = int(min(scale_top, height - 1)) + bottom_idx = int(min(scale_bottom, height - 1)) + + # Generate gaussian heatmap + scale_box_width = ceil(scale_right - scale_left) + scale_box_height = ceil(scale_bottom - scale_top) + radius = gaussian_radius((scale_box_height, scale_box_width), + min_overlap=0.3) + radius = max(0, int(radius)) + gt_tl_heatmap[batch_id, label] = gen_gaussian_target( + gt_tl_heatmap[batch_id, label], [left_idx, top_idx], + radius) + gt_br_heatmap[batch_id, label] = gen_gaussian_target( + gt_br_heatmap[batch_id, label], [right_idx, bottom_idx], + radius) + + # Generate corner offset + left_offset = scale_left - left_idx + top_offset = scale_top - top_idx + right_offset = scale_right - right_idx + bottom_offset = scale_bottom - bottom_idx + gt_tl_offset[batch_id, 0, top_idx, left_idx] = left_offset + gt_tl_offset[batch_id, 1, top_idx, left_idx] = top_offset + gt_br_offset[batch_id, 0, bottom_idx, right_idx] = right_offset + gt_br_offset[batch_id, 1, bottom_idx, + right_idx] = bottom_offset + + # Generate corner embedding + if with_corner_emb: + corner_match.append([[top_idx, left_idx], + [bottom_idx, right_idx]]) + # Generate guiding shift + if with_guiding_shift: + gt_tl_guiding_shift[batch_id, 0, top_idx, + left_idx] = scale_center_x - left_idx + gt_tl_guiding_shift[batch_id, 1, top_idx, + left_idx] = scale_center_y - top_idx + gt_br_guiding_shift[batch_id, 0, bottom_idx, + right_idx] = right_idx - scale_center_x + gt_br_guiding_shift[ + batch_id, 1, bottom_idx, + right_idx] = bottom_idx - scale_center_y + # Generate centripetal shift + if with_centripetal_shift: + gt_tl_centripetal_shift[batch_id, 0, top_idx, + left_idx] = log(scale_center_x - + scale_left) + gt_tl_centripetal_shift[batch_id, 1, top_idx, + left_idx] = log(scale_center_y - + scale_top) + gt_br_centripetal_shift[batch_id, 0, bottom_idx, + right_idx] = log(scale_right - + scale_center_x) + gt_br_centripetal_shift[batch_id, 1, bottom_idx, + right_idx] = log(scale_bottom - + scale_center_y) + + if with_corner_emb: + match.append(corner_match) + + target_result = dict( + topleft_heatmap=gt_tl_heatmap, + topleft_offset=gt_tl_offset, + bottomright_heatmap=gt_br_heatmap, + bottomright_offset=gt_br_offset) + + if with_corner_emb: + target_result.update(corner_embedding=match) + if with_guiding_shift: + target_result.update( + topleft_guiding_shift=gt_tl_guiding_shift, + bottomright_guiding_shift=gt_br_guiding_shift) + if with_centripetal_shift: + target_result.update( + topleft_centripetal_shift=gt_tl_centripetal_shift, + bottomright_centripetal_shift=gt_br_centripetal_shift) + + return target_result + + def loss(self, + tl_heats, + br_heats, + tl_embs, + br_embs, + tl_offs, + br_offs, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_embs (list[Tensor]): Top-left corner embeddings for each level + with shape (N, corner_emb_channels, H, W). + br_embs (list[Tensor]): Bottom-right corner embeddings for each + level with shape (N, corner_emb_channels, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [left, top, right, bottom] format. + gt_labels (list[Tensor]): Class indices corresponding to each box. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. Containing the + following losses: + + - det_loss (list[Tensor]): Corner keypoint losses of all + feature levels. + - pull_loss (list[Tensor]): Part one of AssociativeEmbedding + losses of all feature levels. + - push_loss (list[Tensor]): Part two of AssociativeEmbedding + losses of all feature levels. + - off_loss (list[Tensor]): Corner offset losses of all feature + levels. + """ + targets = self.get_targets( + gt_bboxes, + gt_labels, + tl_heats[-1].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb) + mlvl_targets = [targets for _ in range(self.num_feat_levels)] + det_losses, pull_losses, push_losses, off_losses = multi_apply( + self.loss_single, tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, mlvl_targets) + loss_dict = dict(det_loss=det_losses, off_loss=off_losses) + if self.with_corner_emb: + loss_dict.update(pull_loss=pull_losses, push_loss=push_losses) + return loss_dict + + def loss_single(self, tl_hmp, br_hmp, tl_emb, br_emb, tl_off, br_off, + targets): + """Compute losses for single level. + + Args: + tl_hmp (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_hmp (Tensor): Bottom-right corner heatmap for current level with + shape (N, num_classes, H, W). + tl_emb (Tensor): Top-left corner embedding for current level with + shape (N, corner_emb_channels, H, W). + br_emb (Tensor): Bottom-right corner embedding for current level + with shape (N, corner_emb_channels, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + targets (dict): Corner target generated by `get_targets`. + + Returns: + tuple[torch.Tensor]: Losses of the head's differnet branches + containing the following losses: + + - det_loss (Tensor): Corner keypoint loss. + - pull_loss (Tensor): Part one of AssociativeEmbedding loss. + - push_loss (Tensor): Part two of AssociativeEmbedding loss. + - off_loss (Tensor): Corner offset loss. + """ + gt_tl_hmp = targets['topleft_heatmap'] + gt_br_hmp = targets['bottomright_heatmap'] + gt_tl_off = targets['topleft_offset'] + gt_br_off = targets['bottomright_offset'] + gt_embedding = targets['corner_embedding'] + + # Detection loss + tl_det_loss = self.loss_heatmap( + tl_hmp.sigmoid(), + gt_tl_hmp, + avg_factor=max(1, + gt_tl_hmp.eq(1).sum())) + br_det_loss = self.loss_heatmap( + br_hmp.sigmoid(), + gt_br_hmp, + avg_factor=max(1, + gt_br_hmp.eq(1).sum())) + det_loss = (tl_det_loss + br_det_loss) / 2.0 + + # AssociativeEmbedding loss + if self.with_corner_emb and self.loss_embedding is not None: + pull_loss, push_loss = self.loss_embedding(tl_emb, br_emb, + gt_embedding) + else: + pull_loss, push_loss = None, None + + # Offset loss + # We only compute the offset loss at the real corner position. + # The value of real corner would be 1 in heatmap ground truth. + # The mask is computed in class agnostic mode and its shape is + # batch * 1 * width * height. + tl_off_mask = gt_tl_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_tl_hmp) + br_off_mask = gt_br_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_br_hmp) + tl_off_loss = self.loss_offset( + tl_off, + gt_tl_off, + tl_off_mask, + avg_factor=max(1, tl_off_mask.sum())) + br_off_loss = self.loss_offset( + br_off, + gt_br_off, + br_off_mask, + avg_factor=max(1, br_off_mask.sum())) + + off_loss = (tl_off_loss + br_off_loss) / 2.0 + + return det_loss, pull_loss, push_loss, off_loss + + def get_bboxes(self, + tl_heats, + br_heats, + tl_embs, + br_embs, + tl_offs, + br_offs, + img_metas, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_embs (list[Tensor]): Top-left corner embeddings for each level + with shape (N, corner_emb_channels, H, W). + br_embs (list[Tensor]): Bottom-right corner embeddings for each + level with shape (N, corner_emb_channels, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas) + result_list = [] + for img_id in range(len(img_metas)): + result_list.append( + self._get_bboxes_single( + tl_heats[-1][img_id:img_id + 1, :], + br_heats[-1][img_id:img_id + 1, :], + tl_offs[-1][img_id:img_id + 1, :], + br_offs[-1][img_id:img_id + 1, :], + img_metas[img_id], + tl_emb=tl_embs[-1][img_id:img_id + 1, :], + br_emb=br_embs[-1][img_id:img_id + 1, :], + rescale=rescale, + with_nms=with_nms)) + + return result_list + + def _get_bboxes_single(self, + tl_heat, + br_heat, + tl_off, + br_off, + img_meta, + tl_emb=None, + br_emb=None, + tl_centripetal_shift=None, + br_centripetal_shift=None, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + tl_heat (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_heat (Tensor): Bottom-right corner heatmap for current level + with shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + img_meta (dict): Meta information of current image, e.g., + image size, scaling factor, etc. + tl_emb (Tensor): Top-left corner embedding for current level with + shape (N, corner_emb_channels, H, W). + br_emb (Tensor): Bottom-right corner embedding for current level + with shape (N, corner_emb_channels, H, W). + tl_centripetal_shift: Top-left corner's centripetal shift for + current level with shape (N, 2, H, W). + br_centripetal_shift: Bottom-right corner's centripetal shift for + current level with shape (N, 2, H, W). + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + if isinstance(img_meta, (list, tuple)): + img_meta = img_meta[0] + + batch_bboxes, batch_scores, batch_clses = self.decode_heatmap( + tl_heat=tl_heat.sigmoid(), + br_heat=br_heat.sigmoid(), + tl_off=tl_off, + br_off=br_off, + tl_emb=tl_emb, + br_emb=br_emb, + tl_centripetal_shift=tl_centripetal_shift, + br_centripetal_shift=br_centripetal_shift, + img_meta=img_meta, + k=self.test_cfg.corner_topk, + kernel=self.test_cfg.local_maximum_kernel, + distance_threshold=self.test_cfg.distance_threshold) + + if rescale: + batch_bboxes /= img_meta['scale_factor'] + + bboxes = batch_bboxes.view([-1, 4]) + scores = batch_scores.view([-1, 1]) + clses = batch_clses.view([-1, 1]) + + idx = scores.argsort(dim=0, descending=True) + bboxes = bboxes[idx].view([-1, 4]) + scores = scores[idx].view(-1) + clses = clses[idx].view(-1) + + detections = torch.cat([bboxes, scores.unsqueeze(-1)], -1) + keepinds = (detections[:, -1] > -0.1) + detections = detections[keepinds] + labels = clses[keepinds] + + if with_nms: + detections, labels = self._bboxes_nms(detections, labels, + self.test_cfg) + + return detections, labels + + def _bboxes_nms(self, bboxes, labels, cfg): + out_bboxes, keep = batched_nms(bboxes[:, :4], bboxes[:, -1], labels, + cfg.nms_cfg) + out_labels = labels[keep] + + if len(out_bboxes) > 0: + idx = torch.argsort(out_bboxes[:, -1], descending=True) + idx = idx[:cfg.max_per_img] + out_bboxes = out_bboxes[idx] + out_labels = out_labels[idx] + + return out_bboxes, out_labels + + def _gather_feat(self, feat, ind, mask=None): + """Gather feature according to index. + + Args: + feat (Tensor): Target feature map. + ind (Tensor): Target coord index. + mask (Tensor | None): Mask of featuremap. Default: None. + + Returns: + feat (Tensor): Gathered feature. + """ + dim = feat.size(2) + ind = ind.unsqueeze(2).repeat(1, 1, dim) + feat = feat.gather(1, ind) + if mask is not None: + mask = mask.unsqueeze(2).expand_as(feat) + feat = feat[mask] + feat = feat.view(-1, dim) + return feat + + def _local_maximum(self, heat, kernel=3): + """Extract local maximum pixel with given kernal. + + Args: + heat (Tensor): Target heatmap. + kernel (int): Kernel size of max pooling. Default: 3. + + Returns: + heat (Tensor): A heatmap where local maximum pixels maintain its + own value and other positions are 0. + """ + pad = (kernel - 1) // 2 + hmax = F.max_pool2d(heat, kernel, stride=1, padding=pad) + keep = (hmax == heat).float() + return heat * keep + + def _transpose_and_gather_feat(self, feat, ind): + """Transpose and gather feature according to index. + + Args: + feat (Tensor): Target feature map. + ind (Tensor): Target coord index. + + Returns: + feat (Tensor): Transposed and gathered feature. + """ + feat = feat.permute(0, 2, 3, 1).contiguous() + feat = feat.view(feat.size(0), -1, feat.size(3)) + feat = self._gather_feat(feat, ind) + return feat + + def _topk(self, scores, k=20): + """Get top k positions from heatmap. + + Args: + scores (Tensor): Target heatmap with shape + [batch, num_classes, height, width]. + k (int): Target number. Default: 20. + + Returns: + tuple[torch.Tensor]: Scores, indexes, categories and coords of + topk keypoint. Containing following Tensors: + + - topk_scores (Tensor): Max scores of each topk keypoint. + - topk_inds (Tensor): Indexes of each topk keypoint. + - topk_clses (Tensor): Categories of each topk keypoint. + - topk_ys (Tensor): Y-coord of each topk keypoint. + - topk_xs (Tensor): X-coord of each topk keypoint. + """ + batch, _, height, width = scores.size() + topk_scores, topk_inds = torch.topk(scores.view(batch, -1), k) + topk_clses = topk_inds // (height * width) + topk_inds = topk_inds % (height * width) + topk_ys = topk_inds // width + topk_xs = (topk_inds % width).int().float() + return topk_scores, topk_inds, topk_clses, topk_ys, topk_xs + + def decode_heatmap(self, + tl_heat, + br_heat, + tl_off, + br_off, + tl_emb=None, + br_emb=None, + tl_centripetal_shift=None, + br_centripetal_shift=None, + img_meta=None, + k=100, + kernel=3, + distance_threshold=0.5, + num_dets=1000): + """Transform outputs for a single batch item into raw bbox predictions. + + Args: + tl_heat (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_heat (Tensor): Bottom-right corner heatmap for current level + with shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + tl_emb (Tensor | None): Top-left corner embedding for current + level with shape (N, corner_emb_channels, H, W). + br_emb (Tensor | None): Bottom-right corner embedding for current + level with shape (N, corner_emb_channels, H, W). + tl_centripetal_shift (Tensor | None): Top-left centripetal shift + for current level with shape (N, 2, H, W). + br_centripetal_shift (Tensor | None): Bottom-right centripetal + shift for current level with shape (N, 2, H, W). + img_meta (dict): Meta information of current image, e.g., + image size, scaling factor, etc. + k (int): Get top k corner keypoints from heatmap. + kernel (int): Max pooling kernel for extract local maximum pixels. + distance_threshold (float): Distance threshold. Top-left and + bottom-right corner keypoints with feature distance less than + the threshold will be regarded as keypoints from same object. + num_dets (int): Num of raw boxes before doing nms. + + Returns: + tuple[torch.Tensor]: Decoded output of CornerHead, containing the + following Tensors: + + - bboxes (Tensor): Coords of each box. + - scores (Tensor): Scores of each box. + - clses (Tensor): Categories of each box. + """ + with_embedding = tl_emb is not None and br_emb is not None + with_centripetal_shift = ( + tl_centripetal_shift is not None + and br_centripetal_shift is not None) + assert with_embedding + with_centripetal_shift == 1 + batch, _, height, width = tl_heat.size() + inp_h, inp_w, _ = img_meta['pad_shape'] + + # perform nms on heatmaps + tl_heat = self._local_maximum(tl_heat, kernel=kernel) + br_heat = self._local_maximum(br_heat, kernel=kernel) + + tl_scores, tl_inds, tl_clses, tl_ys, tl_xs = self._topk(tl_heat, k=k) + br_scores, br_inds, br_clses, br_ys, br_xs = self._topk(br_heat, k=k) + + # We use repeat instead of expand here because expand is a + # shallow-copy function. Thus it could cause unexpected testing result + # sometimes. Using expand will decrease about 10% mAP during testing + # compared to repeat. + tl_ys = tl_ys.view(batch, k, 1).repeat(1, 1, k) + tl_xs = tl_xs.view(batch, k, 1).repeat(1, 1, k) + br_ys = br_ys.view(batch, 1, k).repeat(1, k, 1) + br_xs = br_xs.view(batch, 1, k).repeat(1, k, 1) + + tl_off = self._transpose_and_gather_feat(tl_off, tl_inds) + tl_off = tl_off.view(batch, k, 1, 2) + br_off = self._transpose_and_gather_feat(br_off, br_inds) + br_off = br_off.view(batch, 1, k, 2) + + tl_xs = tl_xs + tl_off[..., 0] + tl_ys = tl_ys + tl_off[..., 1] + br_xs = br_xs + br_off[..., 0] + br_ys = br_ys + br_off[..., 1] + + if with_centripetal_shift: + tl_centripetal_shift = self._transpose_and_gather_feat( + tl_centripetal_shift, tl_inds).view(batch, k, 1, 2).exp() + br_centripetal_shift = self._transpose_and_gather_feat( + br_centripetal_shift, br_inds).view(batch, 1, k, 2).exp() + + tl_ctxs = tl_xs + tl_centripetal_shift[..., 0] + tl_ctys = tl_ys + tl_centripetal_shift[..., 1] + br_ctxs = br_xs - br_centripetal_shift[..., 0] + br_ctys = br_ys - br_centripetal_shift[..., 1] + + # all possible boxes based on top k corners (ignoring class) + tl_xs *= (inp_w / width) + tl_ys *= (inp_h / height) + br_xs *= (inp_w / width) + br_ys *= (inp_h / height) + + if with_centripetal_shift: + tl_ctxs *= (inp_w / width) + tl_ctys *= (inp_h / height) + br_ctxs *= (inp_w / width) + br_ctys *= (inp_h / height) + + x_off = img_meta['border'][2] + y_off = img_meta['border'][0] + + tl_xs -= x_off + tl_ys -= y_off + br_xs -= x_off + br_ys -= y_off + + tl_xs *= tl_xs.gt(0.0).type_as(tl_xs) + tl_ys *= tl_ys.gt(0.0).type_as(tl_ys) + br_xs *= br_xs.gt(0.0).type_as(br_xs) + br_ys *= br_ys.gt(0.0).type_as(br_ys) + + bboxes = torch.stack((tl_xs, tl_ys, br_xs, br_ys), dim=3) + area_bboxes = ((br_xs - tl_xs) * (br_ys - tl_ys)).abs() + + if with_centripetal_shift: + tl_ctxs -= x_off + tl_ctys -= y_off + br_ctxs -= x_off + br_ctys -= y_off + + tl_ctxs *= tl_ctxs.gt(0.0).type_as(tl_ctxs) + tl_ctys *= tl_ctys.gt(0.0).type_as(tl_ctys) + br_ctxs *= br_ctxs.gt(0.0).type_as(br_ctxs) + br_ctys *= br_ctys.gt(0.0).type_as(br_ctys) + + ct_bboxes = torch.stack((tl_ctxs, tl_ctys, br_ctxs, br_ctys), + dim=3) + area_ct_bboxes = ((br_ctxs - tl_ctxs) * (br_ctys - tl_ctys)).abs() + + rcentral = torch.zeros_like(ct_bboxes) + # magic nums from paper section 4.1 + mu = torch.ones_like(area_bboxes) / 2.4 + mu[area_bboxes > 3500] = 1 / 2.1 # large bbox have smaller mu + + bboxes_center_x = (bboxes[..., 0] + bboxes[..., 2]) / 2 + bboxes_center_y = (bboxes[..., 1] + bboxes[..., 3]) / 2 + rcentral[..., 0] = bboxes_center_x - mu * (bboxes[..., 2] - + bboxes[..., 0]) / 2 + rcentral[..., 1] = bboxes_center_y - mu * (bboxes[..., 3] - + bboxes[..., 1]) / 2 + rcentral[..., 2] = bboxes_center_x + mu * (bboxes[..., 2] - + bboxes[..., 0]) / 2 + rcentral[..., 3] = bboxes_center_y + mu * (bboxes[..., 3] - + bboxes[..., 1]) / 2 + area_rcentral = ((rcentral[..., 2] - rcentral[..., 0]) * + (rcentral[..., 3] - rcentral[..., 1])).abs() + dists = area_ct_bboxes / area_rcentral + + tl_ctx_inds = (ct_bboxes[..., 0] <= rcentral[..., 0]) | ( + ct_bboxes[..., 0] >= rcentral[..., 2]) + tl_cty_inds = (ct_bboxes[..., 1] <= rcentral[..., 1]) | ( + ct_bboxes[..., 1] >= rcentral[..., 3]) + br_ctx_inds = (ct_bboxes[..., 2] <= rcentral[..., 0]) | ( + ct_bboxes[..., 2] >= rcentral[..., 2]) + br_cty_inds = (ct_bboxes[..., 3] <= rcentral[..., 1]) | ( + ct_bboxes[..., 3] >= rcentral[..., 3]) + + if with_embedding: + tl_emb = self._transpose_and_gather_feat(tl_emb, tl_inds) + tl_emb = tl_emb.view(batch, k, 1) + br_emb = self._transpose_and_gather_feat(br_emb, br_inds) + br_emb = br_emb.view(batch, 1, k) + dists = torch.abs(tl_emb - br_emb) + + tl_scores = tl_scores.view(batch, k, 1).repeat(1, 1, k) + br_scores = br_scores.view(batch, 1, k).repeat(1, k, 1) + + scores = (tl_scores + br_scores) / 2 # scores for all possible boxes + + # tl and br should have same class + tl_clses = tl_clses.view(batch, k, 1).repeat(1, 1, k) + br_clses = br_clses.view(batch, 1, k).repeat(1, k, 1) + cls_inds = (tl_clses != br_clses) + + # reject boxes based on distances + dist_inds = dists > distance_threshold + + # reject boxes based on widths and heights + width_inds = (br_xs <= tl_xs) + height_inds = (br_ys <= tl_ys) + + scores[cls_inds] = -1 + scores[width_inds] = -1 + scores[height_inds] = -1 + scores[dist_inds] = -1 + if with_centripetal_shift: + scores[tl_ctx_inds] = -1 + scores[tl_cty_inds] = -1 + scores[br_ctx_inds] = -1 + scores[br_cty_inds] = -1 + + scores = scores.view(batch, -1) + scores, inds = torch.topk(scores, num_dets) + scores = scores.unsqueeze(2) + + bboxes = bboxes.view(batch, -1, 4) + bboxes = self._gather_feat(bboxes, inds) + + clses = tl_clses.contiguous().view(batch, -1, 1) + clses = self._gather_feat(clses, inds).float() + + return bboxes, scores, clses diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/dense_test_mixins.py b/thirdparty/mmdetection/mmdet/models/dense_heads/dense_test_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..a07c9d4236a1f1f823cb3d659ea1f04c64524745 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/dense_test_mixins.py @@ -0,0 +1,97 @@ +from inspect import signature + +import torch + +from mmdet.core import bbox2result, bbox_mapping_back, multiclass_nms + + +class BBoxTestMixin(object): + """Mixin class for test time augmentation of bboxes.""" + + def merge_aug_bboxes(self, aug_bboxes, aug_scores, img_metas): + """Merge augmented detection bboxes and scores. + + Args: + aug_bboxes (list[Tensor]): shape (n, 4*#class) + aug_scores (list[Tensor] or None): shape (n, #class) + img_shapes (list[Tensor]): shape (3, ). + + Returns: + tuple: (bboxes, scores) + """ + recovered_bboxes = [] + for bboxes, img_info in zip(aug_bboxes, img_metas): + img_shape = img_info[0]['img_shape'] + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip, + flip_direction) + recovered_bboxes.append(bboxes) + bboxes = torch.cat(recovered_bboxes, dim=0) + if aug_scores is None: + return bboxes + else: + scores = torch.cat(aug_scores, dim=0) + return bboxes, scores + + def aug_test_bboxes(self, feats, img_metas, rescale=False): + """Test det bboxes with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + # check with_nms argument + gb_sig = signature(self.get_bboxes) + gb_args = [p.name for p in gb_sig.parameters.values()] + gbs_sig = signature(self._get_bboxes_single) + gbs_args = [p.name for p in gbs_sig.parameters.values()] + assert ('with_nms' in gb_args) and ('with_nms' in gbs_args), \ + f'{self.__class__.__name__}' \ + ' does not support test-time augmentation' + + aug_bboxes = [] + aug_scores = [] + aug_factors = [] # score_factors for NMS + for x, img_meta in zip(feats, img_metas): + # only one image in the batch + outs = self.forward(x) + bbox_inputs = outs + (img_meta, self.test_cfg, False, False) + bbox_outputs = self.get_bboxes(*bbox_inputs)[0] + aug_bboxes.append(bbox_outputs[0]) + aug_scores.append(bbox_outputs[1]) + # bbox_outputs of some detectors (e.g., ATSS, FCOS, YOLOv3) + # contains additional element to adjust scores before NMS + if len(bbox_outputs) >= 3: + aug_factors.append(bbox_outputs[2]) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = self.merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas) + merged_factors = torch.cat(aug_factors, dim=0) if aug_factors else None + det_bboxes, det_labels = multiclass_nms( + merged_bboxes, + merged_scores, + self.test_cfg.score_thr, + self.test_cfg.nms, + self.test_cfg.max_per_img, + score_factors=merged_factors) + + if rescale: + _det_bboxes = det_bboxes + else: + _det_bboxes = det_bboxes.clone() + _det_bboxes[:, :4] *= det_bboxes.new_tensor( + img_metas[0][0]['scale_factor']) + bbox_results = bbox2result(_det_bboxes, det_labels, self.num_classes) + return bbox_results diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/fcos_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/fcos_head.py new file mode 100644 index 0000000000000000000000000000000000000000..51639c984c2c44a29b41655fd55030722b6736d7 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/fcos_head.py @@ -0,0 +1,574 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Scale, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import distance2bbox, multi_apply, multiclass_nms +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + +INF = 1e8 + + +@HEADS.register_module() +class FCOSHead(AnchorFreeHead): + """Anchor-free head used in `FCOS `_. + + The FCOS head does not use anchor boxes. Instead bounding boxes are + predicted at each pixel and a centerness measure is used to supress + low-quality predictions. + Here norm_on_bbox, centerness_on_reg, dcn_on_last_conv are training + tricks used in official repo, which will bring remarkable mAP gains + of up to 4.9. Please see https://github.com/tianzhi0549/FCOS for + more detail. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + strides (list[int] | list[tuple[int, int]]): Strides of points + in multiple feature levels. Default: (4, 8, 16, 32, 64). + regress_ranges (tuple[tuple[int, int]]): Regress range of multiple + level points. + center_sampling (bool): If true, use center sampling. Default: False. + center_sample_radius (float): Radius of center sampling. Default: 1.5. + norm_on_bbox (bool): If true, normalize the regression targets + with FPN strides. Default: False. + centerness_on_reg (bool): If true, position centerness on the + regress branch. Please refer to https://github.com/tianzhi0549/FCOS/issues/89#issuecomment-516877042. + Default: False. + conv_bias (bool | str): If specified as `auto`, it will be decided by the + norm_cfg. Bias of conv will be set as True if `norm_cfg` is None, otherwise + False. Default: "auto". + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + loss_centerness (dict): Config of centerness loss. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, requires_grad=True). + + Example: + >>> self = FCOSHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_score, bbox_pred, centerness = self.forward(feats) + >>> assert len(cls_score) == len(self.scales) + """ # noqa: E501 + + def __init__(self, + num_classes, + in_channels, + regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512), + (512, INF)), + center_sampling=False, + center_sample_radius=1.5, + norm_on_bbox=False, + centerness_on_reg=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + **kwargs): + self.regress_ranges = regress_ranges + self.center_sampling = center_sampling + self.center_sample_radius = center_sample_radius + self.norm_on_bbox = norm_on_bbox + self.centerness_on_reg = centerness_on_reg + super().__init__( + num_classes, + in_channels, + loss_cls=loss_cls, + loss_bbox=loss_bbox, + norm_cfg=norm_cfg, + **kwargs) + self.loss_centerness = build_loss(loss_centerness) + + def _init_layers(self): + """Initialize layers of the head.""" + super()._init_layers() + self.conv_centerness = nn.Conv2d(self.feat_channels, 1, 3, padding=1) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + def init_weights(self): + """Initialize weights of the head.""" + super().init_weights() + normal_init(self.conv_centerness, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Box scores for each scale level, \ + each is a 4D-tensor, the channel number is \ + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each \ + scale level, each is a 4D-tensor, the channel number is \ + num_points * 4. + centernesses (list[Tensor]): Centerss for each scale level, \ + each is a 4D-tensor, the channel number is num_points * 1. + """ + return multi_apply(self.forward_single, feats, self.scales, + self.strides) + + def forward_single(self, x, scale, stride): + """Forward features of a single scale levle. + + Args: + x (Tensor): FPN feature maps of the specified stride. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + stride (int): The corresponding stride for feature maps, only + used to normalize the bbox prediction when self.norm_on_bbox + is True. + + Returns: + tuple: scores for each class, bbox predictions and centerness \ + predictions of input feature maps. + """ + cls_score, bbox_pred, cls_feat, reg_feat = super().forward_single(x) + if self.centerness_on_reg: + centerness = self.conv_centerness(reg_feat) + else: + centerness = self.conv_centerness(cls_feat) + # scale the bbox_pred of different level + # float to avoid overflow when enabling FP16 + bbox_pred = scale(bbox_pred).float() + if self.norm_on_bbox: + bbox_pred = F.relu(bbox_pred) + if not self.training: + bbox_pred *= stride + else: + bbox_pred = bbox_pred.exp() + return cls_score, bbox_pred, centerness + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def loss(self, + cls_scores, + bbox_preds, + centernesses, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + centernesses (list[Tensor]): Centerss for each scale level, each + is a 4D-tensor, the channel number is num_points * 1. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert len(cls_scores) == len(bbox_preds) == len(centernesses) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + labels, bbox_targets = self.get_targets(all_level_points, gt_bboxes, + gt_labels) + + num_imgs = cls_scores[0].size(0) + # flatten cls_scores, bbox_preds and centerness + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels) + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + for bbox_pred in bbox_preds + ] + flatten_centerness = [ + centerness.permute(0, 2, 3, 1).reshape(-1) + for centerness in centernesses + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_centerness = torch.cat(flatten_centerness) + flatten_labels = torch.cat(labels) + flatten_bbox_targets = torch.cat(bbox_targets) + # repeat points to align with bbox_preds + flatten_points = torch.cat( + [points.repeat(num_imgs, 1) for points in all_level_points]) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((flatten_labels >= 0) + & (flatten_labels < bg_class_ind)).nonzero().reshape(-1) + num_pos = len(pos_inds) + loss_cls = self.loss_cls( + flatten_cls_scores, flatten_labels, + avg_factor=num_pos + num_imgs) # avoid num_pos is 0 + + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_centerness = flatten_centerness[pos_inds] + + if num_pos > 0: + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_centerness_targets = self.centerness_target(pos_bbox_targets) + pos_points = flatten_points[pos_inds] + pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds) + pos_decoded_target_preds = distance2bbox(pos_points, + pos_bbox_targets) + # centerness weighted iou loss + loss_bbox = self.loss_bbox( + pos_decoded_bbox_preds, + pos_decoded_target_preds, + weight=pos_centerness_targets, + avg_factor=pos_centerness_targets.sum()) + loss_centerness = self.loss_centerness(pos_centerness, + pos_centerness_targets) + else: + loss_bbox = pos_bbox_preds.sum() + loss_centerness = pos_centerness.sum() + + return dict( + loss_cls=loss_cls, + loss_bbox=loss_bbox, + loss_centerness=loss_centerness) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + centernesses (list[Tensor]): Centerness for each scale level with + shape (N, num_points * 1, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of the + corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + mlvl_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + centerness_pred_list = [ + centernesses[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + det_bboxes = self._get_bboxes_single( + cls_score_list, bbox_pred_list, centerness_pred_list, + mlvl_points, img_shape, scale_factor, cfg, rescale, with_nms) + result_list.append(det_bboxes) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + centernesses, + mlvl_points, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + with shape (num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for a single scale + level with shape (num_points * 4, H, W). + centernesses (list[Tensor]): Centerness for a single scale level + with shape (num_points * 4, H, W). + mlvl_points (list[Tensor]): Box reference for a single scale level + with shape (num_total_points, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_centerness = [] + for cls_score, bbox_pred, centerness, points in zip( + cls_scores, bbox_preds, centernesses, mlvl_points): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + centerness = centerness.permute(1, 2, 0).reshape(-1).sigmoid() + + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + max_scores, _ = (scores * centerness[:, None]).max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + points = points[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + centerness = centerness[topk_inds] + bboxes = distance2bbox(points, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_centerness.append(centerness) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + mlvl_centerness = torch.cat(mlvl_centerness) + + if with_nms: + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_centerness) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores, mlvl_centerness + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points according to feature map sizes.""" + y, x = super()._get_points_single(featmap_size, stride, dtype, device) + points = torch.stack((x.reshape(-1) * stride, y.reshape(-1) * stride), + dim=-1) + stride // 2 + return points + + def get_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute regression, classification and centerss targets for points + in multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + + Returns: + tuple: + concat_lvl_labels (list[Tensor]): Labels of each level. \ + concat_lvl_bbox_targets (list[Tensor]): BBox targets of each \ + level. + """ + assert len(points) == len(self.regress_ranges) + num_levels = len(points) + # expand regress ranges to align with points + expanded_regress_ranges = [ + points[i].new_tensor(self.regress_ranges[i])[None].expand_as( + points[i]) for i in range(num_levels) + ] + # concat all levels points and regress ranges + concat_regress_ranges = torch.cat(expanded_regress_ranges, dim=0) + concat_points = torch.cat(points, dim=0) + + # the number of points per img, per lvl + num_points = [center.size(0) for center in points] + + # get labels and bbox_targets of each image + labels_list, bbox_targets_list = multi_apply( + self._get_target_single, + gt_bboxes_list, + gt_labels_list, + points=concat_points, + regress_ranges=concat_regress_ranges, + num_points_per_lvl=num_points) + + # split to per img, per level + labels_list = [labels.split(num_points, 0) for labels in labels_list] + bbox_targets_list = [ + bbox_targets.split(num_points, 0) + for bbox_targets in bbox_targets_list + ] + + # concat per level image + concat_lvl_labels = [] + concat_lvl_bbox_targets = [] + for i in range(num_levels): + concat_lvl_labels.append( + torch.cat([labels[i] for labels in labels_list])) + bbox_targets = torch.cat( + [bbox_targets[i] for bbox_targets in bbox_targets_list]) + if self.norm_on_bbox: + bbox_targets = bbox_targets / self.strides[i] + concat_lvl_bbox_targets.append(bbox_targets) + return concat_lvl_labels, concat_lvl_bbox_targets + + def _get_target_single(self, gt_bboxes, gt_labels, points, regress_ranges, + num_points_per_lvl): + """Compute regression and classification targets for a single image.""" + num_points = points.size(0) + num_gts = gt_labels.size(0) + if num_gts == 0: + return gt_labels.new_full((num_points,), self.num_classes), \ + gt_bboxes.new_zeros((num_points, 4)) + + areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * ( + gt_bboxes[:, 3] - gt_bboxes[:, 1]) + # TODO: figure out why these two are different + # areas = areas[None].expand(num_points, num_gts) + areas = areas[None].repeat(num_points, 1) + regress_ranges = regress_ranges[:, None, :].expand( + num_points, num_gts, 2) + gt_bboxes = gt_bboxes[None].expand(num_points, num_gts, 4) + xs, ys = points[:, 0], points[:, 1] + xs = xs[:, None].expand(num_points, num_gts) + ys = ys[:, None].expand(num_points, num_gts) + + left = xs - gt_bboxes[..., 0] + right = gt_bboxes[..., 2] - xs + top = ys - gt_bboxes[..., 1] + bottom = gt_bboxes[..., 3] - ys + bbox_targets = torch.stack((left, top, right, bottom), -1) + + if self.center_sampling: + # condition1: inside a `center bbox` + radius = self.center_sample_radius + center_xs = (gt_bboxes[..., 0] + gt_bboxes[..., 2]) / 2 + center_ys = (gt_bboxes[..., 1] + gt_bboxes[..., 3]) / 2 + center_gts = torch.zeros_like(gt_bboxes) + stride = center_xs.new_zeros(center_xs.shape) + + # project the points on current lvl back to the `original` sizes + lvl_begin = 0 + for lvl_idx, num_points_lvl in enumerate(num_points_per_lvl): + lvl_end = lvl_begin + num_points_lvl + stride[lvl_begin:lvl_end] = self.strides[lvl_idx] * radius + lvl_begin = lvl_end + + x_mins = center_xs - stride + y_mins = center_ys - stride + x_maxs = center_xs + stride + y_maxs = center_ys + stride + center_gts[..., 0] = torch.where(x_mins > gt_bboxes[..., 0], + x_mins, gt_bboxes[..., 0]) + center_gts[..., 1] = torch.where(y_mins > gt_bboxes[..., 1], + y_mins, gt_bboxes[..., 1]) + center_gts[..., 2] = torch.where(x_maxs > gt_bboxes[..., 2], + gt_bboxes[..., 2], x_maxs) + center_gts[..., 3] = torch.where(y_maxs > gt_bboxes[..., 3], + gt_bboxes[..., 3], y_maxs) + + cb_dist_left = xs - center_gts[..., 0] + cb_dist_right = center_gts[..., 2] - xs + cb_dist_top = ys - center_gts[..., 1] + cb_dist_bottom = center_gts[..., 3] - ys + center_bbox = torch.stack( + (cb_dist_left, cb_dist_top, cb_dist_right, cb_dist_bottom), -1) + inside_gt_bbox_mask = center_bbox.min(-1)[0] > 0 + else: + # condition1: inside a gt bbox + inside_gt_bbox_mask = bbox_targets.min(-1)[0] > 0 + + # condition2: limit the regression range for each location + max_regress_distance = bbox_targets.max(-1)[0] + inside_regress_range = ( + (max_regress_distance >= regress_ranges[..., 0]) + & (max_regress_distance <= regress_ranges[..., 1])) + + # if there are still more than one objects for a location, + # we choose the one with minimal area + areas[inside_gt_bbox_mask == 0] = INF + areas[inside_regress_range == 0] = INF + min_area, min_area_inds = areas.min(dim=1) + + labels = gt_labels[min_area_inds] + labels[min_area == INF] = self.num_classes # set as BG + bbox_targets = bbox_targets[range(num_points), min_area_inds] + + return labels, bbox_targets + + def centerness_target(self, pos_bbox_targets): + """Compute centerness targets. + + Args: + pos_bbox_targets (Tensor): BBox targets of positive bboxes in shape + (num_pos, 4) + + Returns: + Tensor: Centerness target. + """ + # only calculate pos centerness targets, otherwise there may be nan + left_right = pos_bbox_targets[:, [0, 2]] + top_bottom = pos_bbox_targets[:, [1, 3]] + centerness_targets = ( + left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * ( + top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0]) + return torch.sqrt(centerness_targets) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/fovea_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/fovea_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c8ccea787cba3d092284d4a5e209adaf6521c86a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/fovea_head.py @@ -0,0 +1,341 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, normal_init +from mmcv.ops import DeformConv2d + +from mmdet.core import multi_apply, multiclass_nms +from ..builder import HEADS +from .anchor_free_head import AnchorFreeHead + +INF = 1e8 + + +class FeatureAlign(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + deform_groups=4): + super(FeatureAlign, self).__init__() + offset_channels = kernel_size * kernel_size * 2 + self.conv_offset = nn.Conv2d( + 4, deform_groups * offset_channels, 1, bias=False) + self.conv_adaption = DeformConv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=(kernel_size - 1) // 2, + deform_groups=deform_groups) + self.relu = nn.ReLU(inplace=True) + + def init_weights(self): + normal_init(self.conv_offset, std=0.1) + normal_init(self.conv_adaption, std=0.01) + + def forward(self, x, shape): + offset = self.conv_offset(shape) + x = self.relu(self.conv_adaption(x, offset)) + return x + + +@HEADS.register_module() +class FoveaHead(AnchorFreeHead): + """FoveaBox: Beyond Anchor-based Object Detector + https://arxiv.org/abs/1904.03797 + """ + + def __init__(self, + num_classes, + in_channels, + base_edge_list=(16, 32, 64, 128, 256), + scale_ranges=((8, 32), (16, 64), (32, 128), (64, 256), (128, + 512)), + sigma=0.4, + with_deform=False, + deform_groups=4, + **kwargs): + self.base_edge_list = base_edge_list + self.scale_ranges = scale_ranges + self.sigma = sigma + self.with_deform = with_deform + self.deform_groups = deform_groups + super().__init__(num_classes, in_channels, **kwargs) + + def _init_layers(self): + # box branch + super()._init_reg_convs() + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + + # cls branch + if not self.with_deform: + super()._init_cls_convs() + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + else: + self.cls_convs = nn.ModuleList() + self.cls_convs.append( + ConvModule( + self.feat_channels, (self.feat_channels * 4), + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None)) + self.cls_convs.append( + ConvModule((self.feat_channels * 4), (self.feat_channels * 4), + 1, + stride=1, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None)) + self.feature_adaption = FeatureAlign( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.conv_cls = nn.Conv2d( + int(self.feat_channels * 4), + self.cls_out_channels, + 3, + padding=1) + + def init_weights(self): + super().init_weights() + if self.with_deform: + self.feature_adaption.init_weights() + + def forward_single(self, x): + cls_feat = x + reg_feat = x + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + bbox_pred = self.conv_reg(reg_feat) + if self.with_deform: + cls_feat = self.feature_adaption(cls_feat, bbox_pred.exp()) + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + cls_score = self.conv_cls(cls_feat) + return cls_score, bbox_pred + + def _get_points_single(self, *args, **kwargs): + y, x = super()._get_points_single(*args, **kwargs) + return y + 0.5, x + 0.5 + + def loss(self, + cls_scores, + bbox_preds, + gt_bbox_list, + gt_label_list, + img_metas, + gt_bboxes_ignore=None): + assert len(cls_scores) == len(bbox_preds) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + num_imgs = cls_scores[0].size(0) + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels) + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + for bbox_pred in bbox_preds + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_labels, flatten_bbox_targets = self.get_targets( + gt_bbox_list, gt_label_list, featmap_sizes, points) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((flatten_labels >= 0) + & (flatten_labels < self.num_classes)).nonzero().view(-1) + num_pos = len(pos_inds) + + loss_cls = self.loss_cls( + flatten_cls_scores, flatten_labels, avg_factor=num_pos + num_imgs) + if num_pos > 0: + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_weights = pos_bbox_targets.new_zeros( + pos_bbox_targets.size()) + 1.0 + loss_bbox = self.loss_bbox( + pos_bbox_preds, + pos_bbox_targets, + pos_weights, + avg_factor=num_pos) + else: + loss_bbox = torch.tensor( + 0, + dtype=flatten_bbox_preds.dtype, + device=flatten_bbox_preds.device) + return dict(loss_cls=loss_cls, loss_bbox=loss_bbox) + + def get_targets(self, gt_bbox_list, gt_label_list, featmap_sizes, points): + label_list, bbox_target_list = multi_apply( + self._get_target_single, + gt_bbox_list, + gt_label_list, + featmap_size_list=featmap_sizes, + point_list=points) + flatten_labels = [ + torch.cat([ + labels_level_img.flatten() for labels_level_img in labels_level + ]) for labels_level in zip(*label_list) + ] + flatten_bbox_targets = [ + torch.cat([ + bbox_targets_level_img.reshape(-1, 4) + for bbox_targets_level_img in bbox_targets_level + ]) for bbox_targets_level in zip(*bbox_target_list) + ] + flatten_labels = torch.cat(flatten_labels) + flatten_bbox_targets = torch.cat(flatten_bbox_targets) + return flatten_labels, flatten_bbox_targets + + def _get_target_single(self, + gt_bboxes_raw, + gt_labels_raw, + featmap_size_list=None, + point_list=None): + + gt_areas = torch.sqrt((gt_bboxes_raw[:, 2] - gt_bboxes_raw[:, 0]) * + (gt_bboxes_raw[:, 3] - gt_bboxes_raw[:, 1])) + label_list = [] + bbox_target_list = [] + # for each pyramid, find the cls and box target + for base_len, (lower_bound, upper_bound), stride, featmap_size, \ + (y, x) in zip(self.base_edge_list, self.scale_ranges, + self.strides, featmap_size_list, point_list): + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + labels = gt_labels_raw.new_zeros(featmap_size) + self.num_classes + bbox_targets = gt_bboxes_raw.new(featmap_size[0], featmap_size[1], + 4) + 1 + # scale assignment + hit_indices = ((gt_areas >= lower_bound) & + (gt_areas <= upper_bound)).nonzero().flatten() + if len(hit_indices) == 0: + label_list.append(labels) + bbox_target_list.append(torch.log(bbox_targets)) + continue + _, hit_index_order = torch.sort(-gt_areas[hit_indices]) + hit_indices = hit_indices[hit_index_order] + gt_bboxes = gt_bboxes_raw[hit_indices, :] / stride + gt_labels = gt_labels_raw[hit_indices] + half_w = 0.5 * (gt_bboxes[:, 2] - gt_bboxes[:, 0]) + half_h = 0.5 * (gt_bboxes[:, 3] - gt_bboxes[:, 1]) + # valid fovea area: left, right, top, down + pos_left = torch.ceil( + gt_bboxes[:, 0] + (1 - self.sigma) * half_w - 0.5).long().\ + clamp(0, featmap_size[1] - 1) + pos_right = torch.floor( + gt_bboxes[:, 0] + (1 + self.sigma) * half_w - 0.5).long().\ + clamp(0, featmap_size[1] - 1) + pos_top = torch.ceil( + gt_bboxes[:, 1] + (1 - self.sigma) * half_h - 0.5).long().\ + clamp(0, featmap_size[0] - 1) + pos_down = torch.floor( + gt_bboxes[:, 1] + (1 + self.sigma) * half_h - 0.5).long().\ + clamp(0, featmap_size[0] - 1) + for px1, py1, px2, py2, label, (gt_x1, gt_y1, gt_x2, gt_y2) in \ + zip(pos_left, pos_top, pos_right, pos_down, gt_labels, + gt_bboxes_raw[hit_indices, :]): + labels[py1:py2 + 1, px1:px2 + 1] = label + bbox_targets[py1:py2 + 1, px1:px2 + 1, 0] = \ + (stride * x[py1:py2 + 1, px1:px2 + 1] - gt_x1) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 1] = \ + (stride * y[py1:py2 + 1, px1:px2 + 1] - gt_y1) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 2] = \ + (gt_x2 - stride * x[py1:py2 + 1, px1:px2 + 1]) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 3] = \ + (gt_y2 - stride * y[py1:py2 + 1, px1:px2 + 1]) / base_len + bbox_targets = bbox_targets.clamp(min=1. / 16, max=16.) + label_list.append(labels) + bbox_target_list.append(torch.log(bbox_targets)) + return label_list, bbox_target_list + + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=None): + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + points = self.get_points( + featmap_sizes, + bbox_preds[0].dtype, + bbox_preds[0].device, + flatten=True) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + det_bboxes = self._get_bboxes_single(cls_score_list, + bbox_pred_list, featmap_sizes, + points, img_shape, + scale_factor, cfg, rescale) + result_list.append(det_bboxes) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + featmap_sizes, + point_list, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(point_list) + det_bboxes = [] + det_scores = [] + for cls_score, bbox_pred, featmap_size, stride, base_len, (y, x) \ + in zip(cls_scores, bbox_preds, featmap_sizes, self.strides, + self.base_edge_list, point_list): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4).exp() + nms_pre = cfg.get('nms_pre', -1) + if (nms_pre > 0) and (scores.shape[0] > nms_pre): + max_scores, _ = scores.max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + y = y[topk_inds] + x = x[topk_inds] + x1 = (stride * x - base_len * bbox_pred[:, 0]).\ + clamp(min=0, max=img_shape[1] - 1) + y1 = (stride * y - base_len * bbox_pred[:, 1]).\ + clamp(min=0, max=img_shape[0] - 1) + x2 = (stride * x + base_len * bbox_pred[:, 2]).\ + clamp(min=0, max=img_shape[1] - 1) + y2 = (stride * y + base_len * bbox_pred[:, 3]).\ + clamp(min=0, max=img_shape[0] - 1) + bboxes = torch.stack([x1, y1, x2, y2], -1) + det_bboxes.append(bboxes) + det_scores.append(scores) + det_bboxes = torch.cat(det_bboxes) + if rescale: + det_bboxes /= det_bboxes.new_tensor(scale_factor) + det_scores = torch.cat(det_scores) + padding = det_scores.new_zeros(det_scores.shape[0], 1) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + det_scores = torch.cat([det_scores, padding], dim=1) + det_bboxes, det_labels = multiclass_nms(det_bboxes, det_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/free_anchor_retina_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/free_anchor_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..79879fdc3171b8e34b606b27eb1ceb67f4473e3e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/free_anchor_retina_head.py @@ -0,0 +1,270 @@ +import torch +import torch.nn.functional as F + +from mmdet.core import bbox_overlaps +from ..builder import HEADS +from .retina_head import RetinaHead + +EPS = 1e-12 + + +@HEADS.register_module() +class FreeAnchorRetinaHead(RetinaHead): + """FreeAnchor RetinaHead used in https://arxiv.org/abs/1909.02466. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of conv layers in cls and reg tower. + Default: 4. + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, + requires_grad=True). + pre_anchor_topk (int): Number of boxes that be token in each bag. + bbox_thr (float): The threshold of the saturated linear function. It is + usually the same with the IoU threshold used in NMS. + gamma (float): Gamma parameter in focal loss. + alpha (float): Alpha parameter in focal loss. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + pre_anchor_topk=50, + bbox_thr=0.6, + gamma=2.0, + alpha=0.5, + **kwargs): + super(FreeAnchorRetinaHead, + self).__init__(num_classes, in_channels, stacked_convs, conv_cfg, + norm_cfg, **kwargs) + + self.pre_anchor_topk = pre_anchor_topk + self.bbox_thr = bbox_thr + self.gamma = gamma + self.alpha = alpha + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == len(self.anchor_generator.base_anchors) + + anchor_list, _ = self.get_anchors(featmap_sizes, img_metas) + anchors = [torch.cat(anchor) for anchor in anchor_list] + + # concatenate each level + cls_scores = [ + cls.permute(0, 2, 3, + 1).reshape(cls.size(0), -1, self.cls_out_channels) + for cls in cls_scores + ] + bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(bbox_pred.size(0), -1, 4) + for bbox_pred in bbox_preds + ] + cls_scores = torch.cat(cls_scores, dim=1) + bbox_preds = torch.cat(bbox_preds, dim=1) + + cls_prob = torch.sigmoid(cls_scores) + box_prob = [] + num_pos = 0 + positive_losses = [] + for _, (anchors_, gt_labels_, gt_bboxes_, cls_prob_, + bbox_preds_) in enumerate( + zip(anchors, gt_labels, gt_bboxes, cls_prob, bbox_preds)): + + with torch.no_grad(): + if len(gt_bboxes_) == 0: + image_box_prob = torch.zeros( + anchors_.size(0), + self.cls_out_channels).type_as(bbox_preds_) + else: + # box_localization: a_{j}^{loc}, shape: [j, 4] + pred_boxes = self.bbox_coder.decode(anchors_, bbox_preds_) + + # object_box_iou: IoU_{ij}^{loc}, shape: [i, j] + object_box_iou = bbox_overlaps(gt_bboxes_, pred_boxes) + + # object_box_prob: P{a_{j} -> b_{i}}, shape: [i, j] + t1 = self.bbox_thr + t2 = object_box_iou.max( + dim=1, keepdim=True).values.clamp(min=t1 + 1e-12) + object_box_prob = ((object_box_iou - t1) / + (t2 - t1)).clamp( + min=0, max=1) + + # object_cls_box_prob: P{a_{j} -> b_{i}}, shape: [i, c, j] + num_obj = gt_labels_.size(0) + indices = torch.stack([ + torch.arange(num_obj).type_as(gt_labels_), gt_labels_ + ], + dim=0) + object_cls_box_prob = torch.sparse_coo_tensor( + indices, object_box_prob) + + # image_box_iou: P{a_{j} \in A_{+}}, shape: [c, j] + """ + from "start" to "end" implement: + image_box_iou = torch.sparse.max(object_cls_box_prob, + dim=0).t() + + """ + # start + box_cls_prob = torch.sparse.sum( + object_cls_box_prob, dim=0).to_dense() + + indices = torch.nonzero(box_cls_prob, as_tuple=False).t_() + if indices.numel() == 0: + image_box_prob = torch.zeros( + anchors_.size(0), + self.cls_out_channels).type_as(object_box_prob) + else: + nonzero_box_prob = torch.where( + (gt_labels_.unsqueeze(dim=-1) == indices[0]), + object_box_prob[:, indices[1]], + torch.tensor([ + 0 + ]).type_as(object_box_prob)).max(dim=0).values + + # upmap to shape [j, c] + image_box_prob = torch.sparse_coo_tensor( + indices.flip([0]), + nonzero_box_prob, + size=(anchors_.size(0), + self.cls_out_channels)).to_dense() + # end + + box_prob.append(image_box_prob) + + # construct bags for objects + match_quality_matrix = bbox_overlaps(gt_bboxes_, anchors_) + _, matched = torch.topk( + match_quality_matrix, + self.pre_anchor_topk, + dim=1, + sorted=False) + del match_quality_matrix + + # matched_cls_prob: P_{ij}^{cls} + matched_cls_prob = torch.gather( + cls_prob_[matched], 2, + gt_labels_.view(-1, 1, 1).repeat(1, self.pre_anchor_topk, + 1)).squeeze(2) + + # matched_box_prob: P_{ij}^{loc} + matched_anchors = anchors_[matched] + matched_object_targets = self.bbox_coder.encode( + matched_anchors, + gt_bboxes_.unsqueeze(dim=1).expand_as(matched_anchors)) + loss_bbox = self.loss_bbox( + bbox_preds_[matched], + matched_object_targets, + reduction_override='none').sum(-1) + matched_box_prob = torch.exp(-loss_bbox) + + # positive_losses: {-log( Mean-max(P_{ij}^{cls} * P_{ij}^{loc}) )} + num_pos += len(gt_bboxes_) + positive_losses.append( + self.positive_bag_loss(matched_cls_prob, matched_box_prob)) + positive_loss = torch.cat(positive_losses).sum() / max(1, num_pos) + + # box_prob: P{a_{j} \in A_{+}} + box_prob = torch.stack(box_prob, dim=0) + + # negative_loss: + # \sum_{j}{ FL((1 - P{a_{j} \in A_{+}}) * (1 - P_{j}^{bg})) } / n||B|| + negative_loss = self.negative_bag_loss(cls_prob, box_prob).sum() / max( + 1, num_pos * self.pre_anchor_topk) + + # avoid the absence of gradients in regression subnet + # when no ground-truth in a batch + if num_pos == 0: + positive_loss = bbox_preds.sum() * 0 + + losses = { + 'positive_bag_loss': positive_loss, + 'negative_bag_loss': negative_loss + } + return losses + + def positive_bag_loss(self, matched_cls_prob, matched_box_prob): + """Compute positive bag loss. + + :math:`-log( Mean-max(P_{ij}^{cls} * P_{ij}^{loc}) )`. + + :math:`P_{ij}^{cls}`: matched_cls_prob, classification probability of matched samples. + + :math:`P_{ij}^{loc}`: matched_box_prob, box probability of matched samples. + + Args: + matched_cls_prob (Tensor): Classification probabilty of matched + samples in shape (num_gt, pre_anchor_topk). + matched_box_prob (Tensor): BBox probability of matched samples, + in shape (num_gt, pre_anchor_topk). + + Returns: + Tensor: Positive bag loss in shape (num_gt,). + """ # noqa: E501, W605 + # bag_prob = Mean-max(matched_prob) + matched_prob = matched_cls_prob * matched_box_prob + weight = 1 / torch.clamp(1 - matched_prob, 1e-12, None) + weight /= weight.sum(dim=1).unsqueeze(dim=-1) + bag_prob = (weight * matched_prob).sum(dim=1) + # positive_bag_loss = -self.alpha * log(bag_prob) + return self.alpha * F.binary_cross_entropy( + bag_prob, torch.ones_like(bag_prob), reduction='none') + + def negative_bag_loss(self, cls_prob, box_prob): + """Compute negative bag loss. + + :math:`FL((1 - P_{a_{j} \in A_{+}}) * (1 - P_{j}^{bg}))`. + + :math:`P_{a_{j} \in A_{+}}`: Box_probability of matched samples. + + :math:`P_{j}^{bg}`: Classification probability of negative samples. + + Args: + cls_prob (Tensor): Classification probability, in shape + (num_img, num_anchors, num_classes). + box_prob (Tensor): Box probability, in shape + (num_img, num_anchors, num_classes). + + Returns: + Tensor: Negative bag loss in shape (num_img, num_anchors, num_classes). + """ # noqa: E501, W605 + prob = cls_prob * (1 - box_prob) + # There are some cases when neg_prob = 0. + # This will cause the neg_prob.log() to be inf without clamp. + prob = prob.clamp(min=EPS, max=1 - EPS) + negative_bag_loss = prob**self.gamma * F.binary_cross_entropy( + prob, torch.zeros_like(prob), reduction='none') + return (1 - self.alpha) * negative_bag_loss diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/fsaf_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/fsaf_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c23e3699f76892989bb06bba1fb25cd43c39da12 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/fsaf_head.py @@ -0,0 +1,418 @@ +import numpy as np +import torch +from mmcv.cnn import normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, images_to_levels, multi_apply, + unmap) +from ..builder import HEADS +from ..losses.accuracy import accuracy +from ..losses.utils import weight_reduce_loss +from .retina_head import RetinaHead + + +@HEADS.register_module() +class FSAFHead(RetinaHead): + """Anchor-free head used in `FSAF `_. + + The head contains two subnetworks. The first classifies anchor boxes and + the second regresses deltas for the anchors (num_anchors is 1 for anchor- + free methods) + + Args: + *args: Same as its base class in :class:`RetinaHead` + score_threshold (float, optional): The score_threshold to calculate + positive recall. If given, prediction scores lower than this value + is counted as incorrect prediction. Default to None. + **kwargs: Same as its base class in :class:`RetinaHead` + + Example: + >>> import torch + >>> self = FSAFHead(11, 7) + >>> x = torch.rand(1, 7, 32, 32) + >>> cls_score, bbox_pred = self.forward_single(x) + >>> # Each anchor predicts a score for each class except background + >>> cls_per_anchor = cls_score.shape[1] / self.num_anchors + >>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors + >>> assert cls_per_anchor == self.num_classes + >>> assert box_per_anchor == 4 + """ + + def __init__(self, *args, score_threshold=None, **kwargs): + super().__init__(*args, **kwargs) + self.score_threshold = score_threshold + + def forward_single(self, x): + """Forward feature map of a single scale level. + + Args: + x (Tensor): Feature map of a single scale level. + + Returns: + tuple (Tensor): + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + """ + cls_score, bbox_pred = super().forward_single(x) + # relu: TBLR encoder only accepts positive bbox_pred + return cls_score, self.relu(bbox_pred) + + def init_weights(self): + """Initialize weights of the head.""" + super(FSAFHead, self).init_weights() + # The positive bias in self.retina_reg conv is to prevent predicted \ + # bbox with 0 area + normal_init(self.retina_reg, std=0.01, bias=0.25) + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Most of the codes are the same with the base class + :obj: `AnchorHead`, except that it also collects and returns + the matched gt index in the image (from 0 to num_gt-1). If the + anchor bbox is not matched to any gt, the corresponding value in + pos_gt_inds is -1. + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # Assign gt and sample anchors + anchors = flat_anchors[inside_flags.type(torch.bool), :] + assign_result = self.assigner.assign( + anchors, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros((num_valid_anchors, label_channels), + dtype=torch.float) + pos_gt_inds = anchors.new_full((num_valid_anchors, ), + -1, + dtype=torch.long) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + + if len(pos_inds) > 0: + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + # The assigned gt_index for each anchor. (0-based) + pos_gt_inds[pos_inds] = sampling_result.pos_assigned_gt_inds + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # shadowed_labels is a tensor composed of tuples + # (anchor_inds, class_label) that indicate those anchors lying in the + # outer region of a gt or overlapped by another gt with a smaller + # area. + # + # Therefore, only the shadowed labels are ignored for loss calculation. + # the key `shadowed_labels` is defined in :obj:`CenterRegionAssigner` + shadowed_labels = assign_result.get_extra_property('shadowed_labels') + if shadowed_labels is not None and shadowed_labels.numel(): + if len(shadowed_labels.shape) == 2: + idx_, label_ = shadowed_labels[:, 0], shadowed_labels[:, 1] + assert (labels[idx_] != label_).all(), \ + 'One label cannot be both positive and ignored' + label_weights[idx_, label_] = 0 + else: + label_weights[shadowed_labels] = 0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + labels = unmap(labels, num_total_anchors, inside_flags) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + pos_gt_inds = unmap( + pos_gt_inds, num_total_anchors, inside_flags, fill=-1) + + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds, sampling_result, pos_gt_inds) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + for i in range(len(bbox_preds)): # loop over fpn level + # avoid 0 area of the predicted bbox + bbox_preds[i] = bbox_preds[i].clamp(min=1e-4) + # TODO: It may directly use the base-class loss function. + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + batch_size = len(gt_bboxes) + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, + pos_assigned_gt_inds_list) = cls_reg_targets + + num_gts = np.array(list(map(len, gt_labels))) + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + # `pos_assigned_gt_inds_list` (length: fpn_levels) stores the assigned + # gt index of each anchor bbox in each fpn level. + cum_num_gts = list(np.cumsum(num_gts)) # length of batch_size + for i, assign in enumerate(pos_assigned_gt_inds_list): + # loop over fpn levels + for j in range(1, batch_size): + # loop over batch size + # Convert gt indices in each img to those in the batch + assign[j][assign[j] >= 0] += int(cum_num_gts[j - 1]) + pos_assigned_gt_inds_list[i] = assign.flatten() + labels_list[i] = labels_list[i].flatten() + num_gts = sum(map(len, gt_labels)) # total number of gt in the batch + # The unique label index of each gt in the batch + label_sequence = torch.arange(num_gts, device=device) + # Collect the average loss of each gt in each level + with torch.no_grad(): + loss_levels, = multi_apply( + self.collect_loss_level_single, + losses_cls, + losses_bbox, + pos_assigned_gt_inds_list, + labels_seq=label_sequence) + # Shape: (fpn_levels, num_gts). Loss of each gt at each fpn level + loss_levels = torch.stack(loss_levels, dim=0) + # Locate the best fpn level for loss back-propagation + if loss_levels.numel() == 0: # zero gt + argmin = loss_levels.new_empty((num_gts, ), dtype=torch.long) + else: + _, argmin = loss_levels.min(dim=0) + + # Reweight the loss of each (anchor, label) pair, so that only those + # at the best gt level are back-propagated. + losses_cls, losses_bbox, pos_inds = multi_apply( + self.reweight_loss_single, + losses_cls, + losses_bbox, + pos_assigned_gt_inds_list, + labels_list, + list(range(len(losses_cls))), + min_levels=argmin) + num_pos = torch.cat(pos_inds, 0).sum().float() + pos_recall = self.calculate_pos_recall(cls_scores, labels_list, + pos_inds) + + if num_pos == 0: # No gt + avg_factor = num_pos + float(num_total_neg) + else: + avg_factor = num_pos + for i in range(len(losses_cls)): + losses_cls[i] /= avg_factor + losses_bbox[i] /= avg_factor + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + num_pos=num_pos / batch_size, + pos_recall=pos_recall) + + def calculate_pos_recall(self, cls_scores, labels_list, pos_inds): + """Calculate positive recall with score threshold. + + Args: + cls_scores (list[Tensor]): Classification scores at all fpn levels. + Each tensor is in shape (N, num_classes * num_anchors, H, W) + labels_list (list[Tensor]): The label that each anchor is assigned + to. Shape (N * H * W * num_anchors, ) + pos_inds (list[Tensor]): List of bool tensors indicating whether + the anchor is assigned to a positive label. + Shape (N * H * W * num_anchors, ) + + Returns: + Tensor: A single float number indicating the positive recall. + """ + with torch.no_grad(): + num_class = self.num_classes + scores = [ + cls.permute(0, 2, 3, 1).reshape(-1, num_class)[pos] + for cls, pos in zip(cls_scores, pos_inds) + ] + labels = [ + label.reshape(-1)[pos] + for label, pos in zip(labels_list, pos_inds) + ] + scores = torch.cat(scores, dim=0) + labels = torch.cat(labels, dim=0) + if self.use_sigmoid_cls: + scores = scores.sigmoid() + else: + scores = scores.softmax(dim=1) + + return accuracy(scores, labels, thresh=self.score_threshold) + + def collect_loss_level_single(self, cls_loss, reg_loss, assigned_gt_inds, + labels_seq): + """Get the average loss in each FPN level w.r.t. each gt label. + + Args: + cls_loss (Tensor): Classification loss of each feature map pixel, + shape (num_anchor, num_class) + reg_loss (Tensor): Regression loss of each feature map pixel, + shape (num_anchor, 4) + assigned_gt_inds (Tensor): It indicates which gt the prior is + assigned to (0-based, -1: no assignment). shape (num_anchor), + labels_seq: The rank of labels. shape (num_gt) + + Returns: + shape: (num_gt), average loss of each gt in this level + """ + if len(reg_loss.shape) == 2: # iou loss has shape (num_prior, 4) + reg_loss = reg_loss.sum(dim=-1) # sum loss in tblr dims + if len(cls_loss.shape) == 2: + cls_loss = cls_loss.sum(dim=-1) # sum loss in class dims + loss = cls_loss + reg_loss + assert loss.size(0) == assigned_gt_inds.size(0) + # Default loss value is 1e6 for a layer where no anchor is positive + # to ensure it will not be chosen to back-propagate gradient + losses_ = loss.new_full(labels_seq.shape, 1e6) + for i, l in enumerate(labels_seq): + match = assigned_gt_inds == l + if match.any(): + losses_[i] = loss[match].mean() + return losses_, + + def reweight_loss_single(self, cls_loss, reg_loss, assigned_gt_inds, + labels, level, min_levels): + """Reweight loss values at each level. + + Reassign loss values at each level by masking those where the + pre-calculated loss is too large. Then return the reduced losses. + + Args: + cls_loss (Tensor): Element-wise classification loss. + Shape: (num_anchors, num_classes) + reg_loss (Tensor): Element-wise regression loss. + Shape: (num_anchors, 4) + assigned_gt_inds (Tensor): The gt indices that each anchor bbox + is assigned to. -1 denotes a negative anchor, otherwise it is the + gt index (0-based). Shape: (num_anchors, ), + labels (Tensor): Label assigned to anchors. Shape: (num_anchors, ). + level (int): The current level index in the pyramid + (0-4 for RetinaNet) + min_levels (Tensor): The best-matching level for each gt. + Shape: (num_gts, ), + + Returns: + tuple: + - cls_loss: Reduced corrected classification loss. Scalar. + - reg_loss: Reduced corrected regression loss. Scalar. + - pos_flags (Tensor): Corrected bool tensor indicating the + final postive anchors. Shape: (num_anchors, ). + """ + loc_weight = torch.ones_like(reg_loss) + cls_weight = torch.ones_like(cls_loss) + pos_flags = assigned_gt_inds >= 0 # positive pixel flag + pos_indices = torch.nonzero(pos_flags, as_tuple=False).flatten() + + if pos_flags.any(): # pos pixels exist + pos_assigned_gt_inds = assigned_gt_inds[pos_flags] + zeroing_indices = (min_levels[pos_assigned_gt_inds] != level) + neg_indices = pos_indices[zeroing_indices] + + if neg_indices.numel(): + pos_flags[neg_indices] = 0 + loc_weight[neg_indices] = 0 + # Only the weight corresponding to the label is + # zeroed out if not selected + zeroing_labels = labels[neg_indices] + assert (zeroing_labels >= 0).all() + cls_weight[neg_indices, zeroing_labels] = 0 + + # Weighted loss for both cls and reg loss + cls_loss = weight_reduce_loss(cls_loss, cls_weight, reduction='sum') + reg_loss = weight_reduce_loss(reg_loss, loc_weight, reduction='sum') + + return cls_loss, reg_loss, pos_flags diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/ga_retina_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/ga_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8822d1ca78ee2fa2f304a0649e81274830383533 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/ga_retina_head.py @@ -0,0 +1,109 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init +from mmcv.ops import MaskedConv2d + +from ..builder import HEADS +from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead + + +@HEADS.register_module() +class GARetinaHead(GuidedAnchorHead): + """Guided-Anchor-based RetinaNet head.""" + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(GARetinaHead, self).__init__(num_classes, in_channels, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + + self.conv_loc = nn.Conv2d(self.feat_channels, 1, 1) + self.conv_shape = nn.Conv2d(self.feat_channels, self.num_anchors * 2, + 1) + self.feature_adaption_cls = FeatureAdaption( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.feature_adaption_reg = FeatureAdaption( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.retina_cls = MaskedConv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = MaskedConv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def init_weights(self): + """Initialize weights of the layer.""" + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + + self.feature_adaption_cls.init_weights() + self.feature_adaption_reg.init_weights() + + bias_cls = bias_init_with_prob(0.01) + normal_init(self.conv_loc, std=0.01, bias=bias_cls) + normal_init(self.conv_shape, std=0.01) + normal_init(self.retina_cls, std=0.01, bias=bias_cls) + normal_init(self.retina_reg, std=0.01) + + def forward_single(self, x): + """Forward feature map of a single scale level.""" + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + + loc_pred = self.conv_loc(cls_feat) + shape_pred = self.conv_shape(reg_feat) + + cls_feat = self.feature_adaption_cls(cls_feat, shape_pred) + reg_feat = self.feature_adaption_reg(reg_feat, shape_pred) + + if not self.training: + mask = loc_pred.sigmoid()[0] >= self.loc_filter_thr + else: + mask = None + cls_score = self.retina_cls(cls_feat, mask) + bbox_pred = self.retina_reg(reg_feat, mask) + return cls_score, bbox_pred, shape_pred, loc_pred diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/ga_rpn_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/ga_rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d3c3a84b24ee1057198f3c3c581d5887608ff48e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/ga_rpn_head.py @@ -0,0 +1,133 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import normal_init +from mmcv.ops import nms + +from ..builder import HEADS +from .guided_anchor_head import GuidedAnchorHead +from .rpn_test_mixin import RPNTestMixin + + +@HEADS.register_module() +class GARPNHead(RPNTestMixin, GuidedAnchorHead): + """Guided-Anchor-based RPN head.""" + + def __init__(self, in_channels, **kwargs): + super(GARPNHead, self).__init__(1, in_channels, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.rpn_conv = nn.Conv2d( + self.in_channels, self.feat_channels, 3, padding=1) + super(GARPNHead, self)._init_layers() + + def init_weights(self): + """Initialize weights of the head.""" + normal_init(self.rpn_conv, std=0.01) + super(GARPNHead, self).init_weights() + + def forward_single(self, x): + """Forward feature of a single scale level.""" + + x = self.rpn_conv(x) + x = F.relu(x, inplace=True) + (cls_score, bbox_pred, shape_pred, + loc_pred) = super(GARPNHead, self).forward_single(x) + return cls_score, bbox_pred, shape_pred, loc_pred + + def loss(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + img_metas, + gt_bboxes_ignore=None): + losses = super(GARPNHead, self).loss( + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + None, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + return dict( + loss_rpn_cls=losses['loss_cls'], + loss_rpn_bbox=losses['loss_bbox'], + loss_anchor_shape=losses['loss_shape'], + loss_anchor_loc=losses['loss_loc']) + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + mlvl_masks, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + mlvl_proposals = [] + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + anchors = mlvl_anchors[idx] + mask = mlvl_masks[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + # if no location is kept, end. + if mask.sum() == 0: + continue + rpn_cls_score = rpn_cls_score.permute(1, 2, 0) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(-1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(-1, 2) + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + scores = rpn_cls_score.softmax(dim=1)[:, :-1] + # filter scores, bbox_pred w.r.t. mask. + # anchors are filtered in get_anchors() beforehand. + scores = scores[mask] + rpn_bbox_pred = rpn_bbox_pred.permute(1, 2, 0).reshape(-1, + 4)[mask, :] + if scores.dim() == 0: + rpn_bbox_pred = rpn_bbox_pred.unsqueeze(0) + anchors = anchors.unsqueeze(0) + scores = scores.unsqueeze(0) + # filter anchors, bbox_pred, scores w.r.t. scores + if cfg.nms_pre > 0 and scores.shape[0] > cfg.nms_pre: + _, topk_inds = scores.topk(cfg.nms_pre) + rpn_bbox_pred = rpn_bbox_pred[topk_inds, :] + anchors = anchors[topk_inds, :] + scores = scores[topk_inds] + # get proposals w.r.t. anchors and rpn_bbox_pred + proposals = self.bbox_coder.decode( + anchors, rpn_bbox_pred, max_shape=img_shape) + # filter out too small bboxes + if cfg.min_bbox_size > 0: + w = proposals[:, 2] - proposals[:, 0] + h = proposals[:, 3] - proposals[:, 1] + valid_inds = torch.nonzero( + (w >= cfg.min_bbox_size) & (h >= cfg.min_bbox_size), + as_tuple=False).squeeze() + proposals = proposals[valid_inds, :] + scores = scores[valid_inds] + # NMS in current level + proposals, _ = nms(proposals, scores, cfg.nms_thr) + proposals = proposals[:cfg.nms_post, :] + mlvl_proposals.append(proposals) + proposals = torch.cat(mlvl_proposals, 0) + if cfg.nms_across_levels: + # NMS across multi levels + proposals, _ = nms(proposals[:, :4], proposals[:, -1], cfg.nms_thr) + proposals = proposals[:cfg.max_num, :] + else: + scores = proposals[:, 4] + num = min(cfg.max_num, proposals.shape[0]) + _, topk_inds = scores.topk(num) + proposals = proposals[topk_inds, :] + return proposals diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/gfl_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/gfl_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7ee02992152ae8242ffb057808ce785c18c7034e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/gfl_head.py @@ -0,0 +1,631 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, Scale, bias_init_with_prob, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, bbox2distance, bbox_overlaps, + build_assigner, build_sampler, distance2bbox, + images_to_levels, multi_apply, multiclass_nms, + reduce_mean, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +class Integral(nn.Module): + """A fixed layer for calculating integral result from distribution. + + This layer calculates the target location by :math: `sum{P(y_i) * y_i}`, + P(y_i) denotes the softmax vector that represents the discrete distribution + y_i denotes the discrete set, usually {0, 1, 2, ..., reg_max} + + Args: + reg_max (int): The maximal value of the discrete set. Default: 16. You + may want to reset it according to your new dataset or related + settings. + """ + + def __init__(self, reg_max=16): + super(Integral, self).__init__() + self.reg_max = reg_max + self.register_buffer('project', + torch.linspace(0, self.reg_max, self.reg_max + 1)) + + def forward(self, x): + """Forward feature from the regression head to get integral result of + bounding box location. + + Args: + x (Tensor): Features of the regression head, shape (N, 4*(n+1)), + n is self.reg_max. + + Returns: + x (Tensor): Integral result of box locations, i.e., distance + offsets from the box center in four directions, shape (N, 4). + """ + x = F.softmax(x.reshape(-1, self.reg_max + 1), dim=1) + x = F.linear(x, self.project.type_as(x)).reshape(-1, 4) + return x + + +@HEADS.register_module() +class GFLHead(AnchorHead): + """Generalized Focal Loss: Learning Qualified and Distributed Bounding + Boxes for Dense Object Detection. + + GFL head structure is similar with ATSS, however GFL uses + 1) joint representation for classification and localization quality, and + 2) flexible General distribution for bounding box locations, + which are supervised by + Quality Focal Loss (QFL) and Distribution Focal Loss (DFL), respectively + + https://arxiv.org/abs/2006.04388 + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of conv layers in cls and reg tower. + Default: 4. + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='GN', num_groups=32, requires_grad=True). + loss_qfl (dict): Config of Quality Focal Loss (QFL). + reg_max (int): Max value of integral set :math: `{0, ..., reg_max}` + in QFL setting. Default: 16. + Example: + >>> self = GFLHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_quality_score, bbox_pred = self.forward(feats) + >>> assert len(cls_quality_score) == len(self.scales) + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.reg_max = reg_max + super(GFLHead, self).__init__(num_classes, in_channels, **kwargs) + + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.integral = Integral(self.reg_max) + self.loss_dfl = build_loss(loss_dfl) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + assert self.num_anchors == 1, 'anchor free version' + self.gfl_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.gfl_reg = nn.Conv2d( + self.feat_channels, 4 * (self.reg_max + 1), 3, padding=1) + self.scales = nn.ModuleList( + [Scale(1.0) for _ in self.anchor_generator.strides]) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.gfl_cls, std=0.01, bias=bias_cls) + normal_init(self.gfl_reg, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification and quality (IoU) + joint scores for all scale levels, each is a 4D-tensor, + the channel number is num_classes. + bbox_preds (list[Tensor]): Box distribution logits for all + scale levels, each is a 4D-tensor, the channel number is + 4*(n+1), n is max value of integral set. + """ + return multi_apply(self.forward_single, feats, self.scales) + + def forward_single(self, x, scale): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + + Returns: + tuple: + cls_score (Tensor): Cls and quality joint scores for a single + scale level the channel number is num_classes. + bbox_pred (Tensor): Box distribution logits for a single scale + level, the channel number is 4*(n+1), n is max value of + integral set. + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.gfl_cls(cls_feat) + bbox_pred = scale(self.gfl_reg(reg_feat)).float() + return cls_score, bbox_pred + + def anchor_center(self, anchors): + """Get anchor centers from anchors. + + Args: + anchors (Tensor): Anchor list with shape (N, 4), "xyxy" format. + + Returns: + Tensor: Anchor centers with shape (N, 2), "xy" format. + """ + anchors_cx = (anchors[:, 2] + anchors[:, 0]) / 2 + anchors_cy = (anchors[:, 3] + anchors[:, 1]) / 2 + return torch.stack([anchors_cx, anchors_cy], dim=-1) + + def loss_single(self, anchors, cls_score, bbox_pred, labels, label_weights, + bbox_targets, stride, num_total_samples): + """Compute loss of a single scale level. + + Args: + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + cls_score (Tensor): Cls and quality joint scores for each scale + level has shape (N, num_classes, H, W). + bbox_pred (Tensor): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + stride (tuple): Stride in this scale level. + num_total_samples (int): Number of positive samples that is + reduced over all GPUs. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert stride[0] == stride[1], 'h stride is not equal to w stride!' + anchors = anchors.reshape(-1, 4) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(-1, 4 * (self.reg_max + 1)) + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((labels >= 0) + & (labels < bg_class_ind)).nonzero().squeeze(1) + score = label_weights.new_zeros(labels.shape) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_anchors = anchors[pos_inds] + pos_anchor_centers = self.anchor_center(pos_anchors) / stride[0] + + weight_targets = cls_score.detach().sigmoid() + weight_targets = weight_targets.max(dim=1)[0][pos_inds] + pos_bbox_pred_corners = self.integral(pos_bbox_pred) + pos_decode_bbox_pred = distance2bbox(pos_anchor_centers, + pos_bbox_pred_corners) + pos_decode_bbox_targets = pos_bbox_targets / stride[0] + score[pos_inds] = bbox_overlaps( + pos_decode_bbox_pred.detach(), + pos_decode_bbox_targets, + is_aligned=True) + pred_corners = pos_bbox_pred.reshape(-1, self.reg_max + 1) + target_corners = bbox2distance(pos_anchor_centers, + pos_decode_bbox_targets, + self.reg_max).reshape(-1) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=weight_targets, + avg_factor=1.0) + + # dfl loss + loss_dfl = self.loss_dfl( + pred_corners, + target_corners, + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0) + else: + loss_bbox = bbox_pred.sum() * 0 + loss_dfl = bbox_pred.sum() * 0 + weight_targets = torch.tensor(0).cuda() + + # cls (qfl) loss + loss_cls = self.loss_cls( + cls_score, (labels, score), + weight=label_weights, + avg_factor=num_total_samples) + + return loss_cls, loss_bbox, loss_dfl, weight_targets.sum() + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Cls and quality scores for each scale + level has shape (N, num_classes, H, W). + bbox_preds (list[Tensor]): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + num_total_samples = reduce_mean( + torch.tensor(num_total_pos).cuda()).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_cls, losses_bbox, losses_dfl,\ + avg_factor = multi_apply( + self.loss_single, + anchor_list, + cls_scores, + bbox_preds, + labels_list, + label_weights_list, + bbox_targets_list, + self.anchor_generator.strides, + num_total_samples=num_total_samples) + + avg_factor = sum(avg_factor) + avg_factor = reduce_mean(avg_factor).item() + losses_bbox = list(map(lambda x: x / avg_factor, losses_bbox)) + losses_dfl = list(map(lambda x: x / avg_factor, losses_dfl)) + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox, loss_dfl=losses_dfl) + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + has shape (num_classes, H, W). + bbox_preds (list[Tensor]): Box distribution logits for a single + scale level with shape (4*(n+1), H, W), n is max value of + integral set. + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): Bbox predictions in shape (N, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (N,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, stride, anchors in zip( + cls_scores, bbox_preds, self.anchor_generator.strides, + mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + assert stride[0] == stride[1] + + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0) + bbox_pred = self.integral(bbox_pred) * stride[0] + + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + max_scores, _ = scores.max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + + bboxes = distance2bbox( + self.anchor_center(anchors), bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + + mlvl_scores = torch.cat(mlvl_scores) + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """Get targets for GFL head. + + This method is almost the same as `AnchorHead.get_targets()`. Besides + returning the targets as the parent method does, it also returns the + anchors as the first element of the returned tuple. + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + num_level_anchors_list = [num_level_anchors] * num_imgs + + # concat all level anchors and flags to a single tensor + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + anchor_list[i] = torch.cat(anchor_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_anchors, all_labels, all_label_weights, all_bbox_targets, + all_bbox_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + anchor_list, + valid_flag_list, + num_level_anchors_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + anchors_list = images_to_levels(all_anchors, num_level_anchors) + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + return (anchors_list, labels_list, label_weights_list, + bbox_targets_list, bbox_weights_list, num_total_pos, + num_total_neg) + + def _get_target_single(self, + flat_anchors, + valid_flags, + num_level_anchors, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression, classification targets for anchors in a single + image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors, 4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + num_level_anchors Tensor): Number of anchors of each scale level. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: N is the number of total anchors in the image. + anchors (Tensor): All anchors in the image with shape (N, 4). + labels (Tensor): Labels of all anchors in the image with shape + (N,). + label_weights (Tensor): Label weights of all anchor in the + image with shape (N,). + bbox_targets (Tensor): BBox targets of all anchors in the + image with shape (N, 4). + bbox_weights (Tensor): BBox weights of all anchors in the + image with shape (N, 4). + pos_inds (Tensor): Indices of postive anchor with shape + (num_pos,). + neg_inds (Tensor): Indices of negative anchor with shape + (num_neg,). + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + num_level_anchors_inside = self.get_num_level_anchors_inside( + num_level_anchors, inside_flags) + assign_result = self.assigner.assign(anchors, num_level_anchors_inside, + gt_bboxes, gt_bboxes_ignore, + gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + anchors = unmap(anchors, num_total_anchors, inside_flags) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (anchors, labels, label_weights, bbox_targets, bbox_weights, + pos_inds, neg_inds) + + def get_num_level_anchors_inside(self, num_level_anchors, inside_flags): + split_inside_flags = torch.split(inside_flags, num_level_anchors) + num_level_anchors_inside = [ + int(flags.sum()) for flags in split_inside_flags + ] + return num_level_anchors_inside diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/guided_anchor_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/guided_anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..062df486495acdaf01160a5ed7514c1e77f28741 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/guided_anchor_head.py @@ -0,0 +1,855 @@ +import torch +import torch.nn as nn +from mmcv.cnn import bias_init_with_prob, normal_init +from mmcv.ops import DeformConv2d, MaskedConv2d +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, build_anchor_generator, + build_assigner, build_bbox_coder, build_sampler, + calc_region, images_to_levels, multi_apply, + multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +class FeatureAdaption(nn.Module): + """Feature Adaption Module. + + Feature Adaption Module is implemented based on DCN v1. + It uses anchor shape prediction rather than feature map to + predict offsets of deform conv layer. + + Args: + in_channels (int): Number of channels in the input feature map. + out_channels (int): Number of channels in the output feature map. + kernel_size (int): Deformable conv kernel size. + deform_groups (int): Deformable conv group size. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + deform_groups=4): + super(FeatureAdaption, self).__init__() + offset_channels = kernel_size * kernel_size * 2 + self.conv_offset = nn.Conv2d( + 2, deform_groups * offset_channels, 1, bias=False) + self.conv_adaption = DeformConv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=(kernel_size - 1) // 2, + deform_groups=deform_groups) + self.relu = nn.ReLU(inplace=True) + + def init_weights(self): + normal_init(self.conv_offset, std=0.1) + normal_init(self.conv_adaption, std=0.01) + + def forward(self, x, shape): + offset = self.conv_offset(shape.detach()) + x = self.relu(self.conv_adaption(x, offset)) + return x + + +@HEADS.register_module() +class GuidedAnchorHead(AnchorHead): + """Guided-Anchor-based head (GA-RPN, GA-RetinaNet, etc.). + + This GuidedAnchorHead will predict high-quality feature guided + anchors and locations where anchors will be kept in inference. + There are mainly 3 categories of bounding-boxes. + + - Sampled 9 pairs for target assignment. (approxes) + - The square boxes where the predicted anchors are based on. (squares) + - Guided anchors. + + Please refer to https://arxiv.org/abs/1901.03278 for more details. + + Args: + num_classes (int): Number of classes. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. + approx_anchor_generator (dict): Config dict for approx generator + square_anchor_generator (dict): Config dict for square generator + anchor_coder (dict): Config dict for anchor coder + bbox_coder (dict): Config dict for bbox coder + deform_groups: (int): Group number of DCN in + FeatureAdaption module. + loc_filter_thr (float): Threshold to filter out unconcerned regions. + loss_loc (dict): Config of location loss. + loss_shape (dict): Config of anchor shape loss. + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of bbox regression loss. + """ + + def __init__( + self, + num_classes, + in_channels, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0] + ), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0] + ), + reg_decoded_bbox=False, + deform_groups=4, + loc_filter_thr=0.01, + train_cfg=None, + test_cfg=None, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)): # yapf: disable + super(AnchorHead, self).__init__() + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.deform_groups = deform_groups + self.loc_filter_thr = loc_filter_thr + + # build approx_anchor_generator and square_anchor_generator + assert (approx_anchor_generator['octave_base_scale'] == + square_anchor_generator['scales'][0]) + assert (approx_anchor_generator['strides'] == + square_anchor_generator['strides']) + self.approx_anchor_generator = build_anchor_generator( + approx_anchor_generator) + self.square_anchor_generator = build_anchor_generator( + square_anchor_generator) + self.approxs_per_octave = self.approx_anchor_generator \ + .num_base_anchors[0] + + self.reg_decoded_bbox = reg_decoded_bbox + + # one anchor per location + self.num_anchors = 1 + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.loc_focal_loss = loss_loc['type'] in ['FocalLoss'] + self.sampling = loss_cls['type'] not in ['FocalLoss'] + self.ga_sampling = train_cfg is not None and hasattr( + train_cfg, 'ga_sampler') + if self.use_sigmoid_cls: + self.cls_out_channels = self.num_classes + else: + self.cls_out_channels = self.num_classes + 1 + + # build bbox_coder + self.anchor_coder = build_bbox_coder(anchor_coder) + self.bbox_coder = build_bbox_coder(bbox_coder) + + # build losses + self.loss_loc = build_loss(loss_loc) + self.loss_shape = build_loss(loss_shape) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.ga_assigner = build_assigner(self.train_cfg.ga_assigner) + if self.ga_sampling: + ga_sampler_cfg = self.train_cfg.ga_sampler + else: + ga_sampler_cfg = dict(type='PseudoSampler') + self.ga_sampler = build_sampler(ga_sampler_cfg, context=self) + + self.fp16_enabled = False + + self._init_layers() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.conv_loc = nn.Conv2d(self.in_channels, 1, 1) + self.conv_shape = nn.Conv2d(self.in_channels, self.num_anchors * 2, 1) + self.feature_adaption = FeatureAdaption( + self.in_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.conv_cls = MaskedConv2d(self.feat_channels, + self.num_anchors * self.cls_out_channels, + 1) + self.conv_reg = MaskedConv2d(self.feat_channels, self.num_anchors * 4, + 1) + + def init_weights(self): + normal_init(self.conv_cls, std=0.01) + normal_init(self.conv_reg, std=0.01) + + bias_cls = bias_init_with_prob(0.01) + normal_init(self.conv_loc, std=0.01, bias=bias_cls) + normal_init(self.conv_shape, std=0.01) + + self.feature_adaption.init_weights() + + def forward_single(self, x): + loc_pred = self.conv_loc(x) + shape_pred = self.conv_shape(x) + x = self.feature_adaption(x, shape_pred) + # masked conv is only used during inference for speed-up + if not self.training: + mask = loc_pred.sigmoid()[0] >= self.loc_filter_thr + else: + mask = None + cls_score = self.conv_cls(x, mask) + bbox_pred = self.conv_reg(x, mask) + return cls_score, bbox_pred, shape_pred, loc_pred + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def get_sampled_approxs(self, featmap_sizes, img_metas, device='cuda'): + """Get sampled approxs and inside flags according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): device for returned tensors + + Returns: + tuple: approxes of each image, inside flags of each image + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # approxes for one time + multi_level_approxs = self.approx_anchor_generator.grid_anchors( + featmap_sizes, device=device) + approxs_list = [multi_level_approxs for _ in range(num_imgs)] + + # for each image, we compute inside flags of multi level approxes + inside_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = [] + multi_level_approxs = approxs_list[img_id] + + # obtain valid flags for each approx first + multi_level_approx_flags = self.approx_anchor_generator \ + .valid_flags(featmap_sizes, + img_meta['pad_shape'], + device=device) + + for i, flags in enumerate(multi_level_approx_flags): + approxs = multi_level_approxs[i] + inside_flags_list = [] + for i in range(self.approxs_per_octave): + split_valid_flags = flags[i::self.approxs_per_octave] + split_approxs = approxs[i::self.approxs_per_octave, :] + inside_flags = anchor_inside_flags( + split_approxs, split_valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + inside_flags_list.append(inside_flags) + # inside_flag for a position is true if any anchor in this + # position is true + inside_flags = ( + torch.stack(inside_flags_list, 0).sum(dim=0) > 0) + multi_level_flags.append(inside_flags) + inside_flag_list.append(multi_level_flags) + return approxs_list, inside_flag_list + + def get_anchors(self, + featmap_sizes, + shape_preds, + loc_preds, + img_metas, + use_loc_filter=False, + device='cuda'): + """Get squares according to feature map sizes and guided anchors. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + shape_preds (list[tensor]): Multi-level shape predictions. + loc_preds (list[tensor]): Multi-level location predictions. + img_metas (list[dict]): Image meta info. + use_loc_filter (bool): Use loc filter or not. + device (torch.device | str): device for returned tensors + + Returns: + tuple: square approxs of each image, guided anchors of each image, + loc masks of each image + """ + num_imgs = len(img_metas) + num_levels = len(featmap_sizes) + + # since feature map sizes of all images are the same, we only compute + # squares for one time + multi_level_squares = self.square_anchor_generator.grid_anchors( + featmap_sizes, device=device) + squares_list = [multi_level_squares for _ in range(num_imgs)] + + # for each image, we compute multi level guided anchors + guided_anchors_list = [] + loc_mask_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_guided_anchors = [] + multi_level_loc_mask = [] + for i in range(num_levels): + squares = squares_list[img_id][i] + shape_pred = shape_preds[i][img_id] + loc_pred = loc_preds[i][img_id] + guided_anchors, loc_mask = self._get_guided_anchors_single( + squares, + shape_pred, + loc_pred, + use_loc_filter=use_loc_filter) + multi_level_guided_anchors.append(guided_anchors) + multi_level_loc_mask.append(loc_mask) + guided_anchors_list.append(multi_level_guided_anchors) + loc_mask_list.append(multi_level_loc_mask) + return squares_list, guided_anchors_list, loc_mask_list + + def _get_guided_anchors_single(self, + squares, + shape_pred, + loc_pred, + use_loc_filter=False): + """Get guided anchors and loc masks for a single level. + + Args: + square (tensor): Squares of a single level. + shape_pred (tensor): Shape predections of a single level. + loc_pred (tensor): Loc predections of a single level. + use_loc_filter (list[tensor]): Use loc filter or not. + + Returns: + tuple: guided anchors, location masks + """ + # calculate location filtering mask + loc_pred = loc_pred.sigmoid().detach() + if use_loc_filter: + loc_mask = loc_pred >= self.loc_filter_thr + else: + loc_mask = loc_pred >= 0.0 + mask = loc_mask.permute(1, 2, 0).expand(-1, -1, self.num_anchors) + mask = mask.contiguous().view(-1) + # calculate guided anchors + squares = squares[mask] + anchor_deltas = shape_pred.permute(1, 2, 0).contiguous().view( + -1, 2).detach()[mask] + bbox_deltas = anchor_deltas.new_full(squares.size(), 0) + bbox_deltas[:, 2:] = anchor_deltas + guided_anchors = self.anchor_coder.decode( + squares, bbox_deltas, wh_ratio_clip=1e-6) + return guided_anchors, mask + + def ga_loc_targets(self, gt_bboxes_list, featmap_sizes): + """Compute location targets for guided anchoring. + + Each feature map is divided into positive, negative and ignore regions. + - positive regions: target 1, weight 1 + - ignore regions: target 0, weight 0 + - negative regions: target 0, weight 0.1 + + Args: + gt_bboxes_list (list[Tensor]): Gt bboxes of each image. + featmap_sizes (list[tuple]): Multi level sizes of each feature + maps. + + Returns: + tuple + """ + anchor_scale = self.approx_anchor_generator.octave_base_scale + anchor_strides = self.approx_anchor_generator.strides + # Currently only supports same stride in x and y direction. + for stride in anchor_strides: + assert (stride[0] == stride[1]) + anchor_strides = [stride[0] for stride in anchor_strides] + + center_ratio = self.train_cfg.center_ratio + ignore_ratio = self.train_cfg.ignore_ratio + img_per_gpu = len(gt_bboxes_list) + num_lvls = len(featmap_sizes) + r1 = (1 - center_ratio) / 2 + r2 = (1 - ignore_ratio) / 2 + all_loc_targets = [] + all_loc_weights = [] + all_ignore_map = [] + for lvl_id in range(num_lvls): + h, w = featmap_sizes[lvl_id] + loc_targets = torch.zeros( + img_per_gpu, + 1, + h, + w, + device=gt_bboxes_list[0].device, + dtype=torch.float32) + loc_weights = torch.full_like(loc_targets, -1) + ignore_map = torch.zeros_like(loc_targets) + all_loc_targets.append(loc_targets) + all_loc_weights.append(loc_weights) + all_ignore_map.append(ignore_map) + for img_id in range(img_per_gpu): + gt_bboxes = gt_bboxes_list[img_id] + scale = torch.sqrt((gt_bboxes[:, 2] - gt_bboxes[:, 0]) * + (gt_bboxes[:, 3] - gt_bboxes[:, 1])) + min_anchor_size = scale.new_full( + (1, ), float(anchor_scale * anchor_strides[0])) + # assign gt bboxes to different feature levels w.r.t. their scales + target_lvls = torch.floor( + torch.log2(scale) - torch.log2(min_anchor_size) + 0.5) + target_lvls = target_lvls.clamp(min=0, max=num_lvls - 1).long() + for gt_id in range(gt_bboxes.size(0)): + lvl = target_lvls[gt_id].item() + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[lvl] + # calculate ignore regions + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[lvl]) + # calculate positive (center) regions + ctr_x1, ctr_y1, ctr_x2, ctr_y2 = calc_region( + gt_, r1, featmap_sizes[lvl]) + all_loc_targets[lvl][img_id, 0, ctr_y1:ctr_y2 + 1, + ctr_x1:ctr_x2 + 1] = 1 + all_loc_weights[lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 0 + all_loc_weights[lvl][img_id, 0, ctr_y1:ctr_y2 + 1, + ctr_x1:ctr_x2 + 1] = 1 + # calculate ignore map on nearby low level feature + if lvl > 0: + d_lvl = lvl - 1 + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[d_lvl] + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[d_lvl]) + all_ignore_map[d_lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 1 + # calculate ignore map on nearby high level feature + if lvl < num_lvls - 1: + u_lvl = lvl + 1 + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[u_lvl] + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[u_lvl]) + all_ignore_map[u_lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 1 + for lvl_id in range(num_lvls): + # ignore negative regions w.r.t. ignore map + all_loc_weights[lvl_id][(all_loc_weights[lvl_id] < 0) + & (all_ignore_map[lvl_id] > 0)] = 0 + # set negative regions with weight 0.1 + all_loc_weights[lvl_id][all_loc_weights[lvl_id] < 0] = 0.1 + # loc average factor to balance loss + loc_avg_factor = sum( + [t.size(0) * t.size(-1) * t.size(-2) + for t in all_loc_targets]) / 200 + return all_loc_targets, all_loc_weights, loc_avg_factor + + def _ga_shape_target_single(self, + flat_approxs, + inside_flags, + flat_squares, + gt_bboxes, + gt_bboxes_ignore, + img_meta, + unmap_outputs=True): + """Compute guided anchoring targets. + + This function returns sampled anchors and gt bboxes directly + rather than calculates regression targets. + + Args: + flat_approxs (Tensor): flat approxs of a single image, + shape (n, 4) + inside_flags (Tensor): inside flags of a single image, + shape (n, ). + flat_squares (Tensor): flat squares of a single image, + shape (approxs_per_octave * n, 4) + gt_bboxes (Tensor): Ground truth bboxes of a single image. + img_meta (dict): Meta info of a single image. + approxs_per_octave (int): number of approxs per octave + cfg (dict): RPN train configs. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple + """ + if not inside_flags.any(): + return (None, ) * 5 + # assign gt and sample anchors + expand_inside_flags = inside_flags[:, None].expand( + -1, self.approxs_per_octave).reshape(-1) + approxs = flat_approxs[expand_inside_flags, :] + squares = flat_squares[inside_flags, :] + + assign_result = self.ga_assigner.assign(approxs, squares, + self.approxs_per_octave, + gt_bboxes, gt_bboxes_ignore) + sampling_result = self.ga_sampler.sample(assign_result, squares, + gt_bboxes) + + bbox_anchors = torch.zeros_like(squares) + bbox_gts = torch.zeros_like(squares) + bbox_weights = torch.zeros_like(squares) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + bbox_anchors[pos_inds, :] = sampling_result.pos_bboxes + bbox_gts[pos_inds, :] = sampling_result.pos_gt_bboxes + bbox_weights[pos_inds, :] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_squares.size(0) + bbox_anchors = unmap(bbox_anchors, num_total_anchors, inside_flags) + bbox_gts = unmap(bbox_gts, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (bbox_anchors, bbox_gts, bbox_weights, pos_inds, neg_inds) + + def ga_shape_targets(self, + approx_list, + inside_flag_list, + square_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + unmap_outputs=True): + """Compute guided anchoring targets. + + Args: + approx_list (list[list]): Multi level approxs of each image. + inside_flag_list (list[list]): Multi level inside flags of each + image. + square_list (list[list]): Multi level squares of each image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): ignore list of gt bboxes. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple + """ + num_imgs = len(img_metas) + assert len(approx_list) == len(inside_flag_list) == len( + square_list) == num_imgs + # anchor number of multi levels + num_level_squares = [squares.size(0) for squares in square_list[0]] + # concat all level anchors and flags to a single tensor + inside_flag_flat_list = [] + approx_flat_list = [] + square_flat_list = [] + for i in range(num_imgs): + assert len(square_list[i]) == len(inside_flag_list[i]) + inside_flag_flat_list.append(torch.cat(inside_flag_list[i])) + approx_flat_list.append(torch.cat(approx_list[i])) + square_flat_list.append(torch.cat(square_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + (all_bbox_anchors, all_bbox_gts, all_bbox_weights, pos_inds_list, + neg_inds_list) = multi_apply( + self._ga_shape_target_single, + approx_flat_list, + inside_flag_flat_list, + square_flat_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + img_metas, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([bbox_anchors is None for bbox_anchors in all_bbox_anchors]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + bbox_anchors_list = images_to_levels(all_bbox_anchors, + num_level_squares) + bbox_gts_list = images_to_levels(all_bbox_gts, num_level_squares) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_squares) + return (bbox_anchors_list, bbox_gts_list, bbox_weights_list, + num_total_pos, num_total_neg) + + def loss_shape_single(self, shape_pred, bbox_anchors, bbox_gts, + anchor_weights, anchor_total_num): + shape_pred = shape_pred.permute(0, 2, 3, 1).contiguous().view(-1, 2) + bbox_anchors = bbox_anchors.contiguous().view(-1, 4) + bbox_gts = bbox_gts.contiguous().view(-1, 4) + anchor_weights = anchor_weights.contiguous().view(-1, 4) + bbox_deltas = bbox_anchors.new_full(bbox_anchors.size(), 0) + bbox_deltas[:, 2:] += shape_pred + # filter out negative samples to speed-up weighted_bounded_iou_loss + inds = torch.nonzero( + anchor_weights[:, 0] > 0, as_tuple=False).squeeze(1) + bbox_deltas_ = bbox_deltas[inds] + bbox_anchors_ = bbox_anchors[inds] + bbox_gts_ = bbox_gts[inds] + anchor_weights_ = anchor_weights[inds] + pred_anchors_ = self.anchor_coder.decode( + bbox_anchors_, bbox_deltas_, wh_ratio_clip=1e-6) + loss_shape = self.loss_shape( + pred_anchors_, + bbox_gts_, + anchor_weights_, + avg_factor=anchor_total_num) + return loss_shape + + def loss_loc_single(self, loc_pred, loc_target, loc_weight, + loc_avg_factor): + loss_loc = self.loss_loc( + loc_pred.reshape(-1, 1), + loc_target.reshape(-1).long(), + loc_weight.reshape(-1), + avg_factor=loc_avg_factor) + return loss_loc + + @force_fp32( + apply_to=('cls_scores', 'bbox_preds', 'shape_preds', 'loc_preds')) + def loss(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.approx_anchor_generator.num_levels + + device = cls_scores[0].device + + # get loc targets + loc_targets, loc_weights, loc_avg_factor = self.ga_loc_targets( + gt_bboxes, featmap_sizes) + + # get sampled approxes + approxs_list, inside_flag_list = self.get_sampled_approxs( + featmap_sizes, img_metas, device=device) + # get squares and guided anchors + squares_list, guided_anchors_list, _ = self.get_anchors( + featmap_sizes, shape_preds, loc_preds, img_metas, device=device) + + # get shape targets + shape_targets = self.ga_shape_targets(approxs_list, inside_flag_list, + squares_list, gt_bboxes, + img_metas) + if shape_targets is None: + return None + (bbox_anchors_list, bbox_gts_list, anchor_weights_list, anchor_fg_num, + anchor_bg_num) = shape_targets + anchor_total_num = ( + anchor_fg_num if not self.ga_sampling else anchor_fg_num + + anchor_bg_num) + + # get anchor targets + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + guided_anchors_list, + inside_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [ + anchors.size(0) for anchors in guided_anchors_list[0] + ] + # concat all level anchors to a single tensor + concat_anchor_list = [] + for i in range(len(guided_anchors_list)): + concat_anchor_list.append(torch.cat(guided_anchors_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + # get classification and bbox regression losses + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + # get anchor location loss + losses_loc = [] + for i in range(len(loc_preds)): + loss_loc = self.loss_loc_single( + loc_preds[i], + loc_targets[i], + loc_weights[i], + loc_avg_factor=loc_avg_factor) + losses_loc.append(loss_loc) + + # get anchor shape loss + losses_shape = [] + for i in range(len(shape_preds)): + loss_shape = self.loss_shape_single( + shape_preds[i], + bbox_anchors_list[i], + bbox_gts_list[i], + anchor_weights_list[i], + anchor_total_num=anchor_total_num) + losses_shape.append(loss_shape) + + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + loss_shape=losses_shape, + loss_loc=losses_loc) + + @force_fp32( + apply_to=('cls_scores', 'bbox_preds', 'shape_preds', 'loc_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + img_metas, + cfg=None, + rescale=False): + assert len(cls_scores) == len(bbox_preds) == len(shape_preds) == len( + loc_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + device = cls_scores[0].device + # get guided anchors + _, guided_anchors, loc_masks = self.get_anchors( + featmap_sizes, + shape_preds, + loc_preds, + img_metas, + use_loc_filter=not self.training, + device=device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + guided_anchor_list = [ + guided_anchors[img_id][i].detach() for i in range(num_levels) + ] + loc_mask_list = [ + loc_masks[img_id][i].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + guided_anchor_list, + loc_mask_list, img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + mlvl_masks, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, anchors, mask in zip(cls_scores, bbox_preds, + mlvl_anchors, + mlvl_masks): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + # if no location is kept, end. + if mask.sum() == 0: + continue + # reshape scores and bbox_pred + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + # filter scores, bbox_pred w.r.t. mask. + # anchors are filtered in get_anchors() beforehand. + scores = scores[mask, :] + bbox_pred = bbox_pred[mask, :] + if scores.dim() == 0: + anchors = anchors.unsqueeze(0) + scores = scores.unsqueeze(0) + bbox_pred = bbox_pred.unsqueeze(0) + # filter anchors, bbox_pred, scores w.r.t. scores + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + # multi class NMS + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/nasfcos_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/nasfcos_head.py new file mode 100644 index 0000000000000000000000000000000000000000..994ce0455e1982110f237b3958a81394c319bb47 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/nasfcos_head.py @@ -0,0 +1,75 @@ +import copy + +import torch.nn as nn +from mmcv.cnn import (ConvModule, Scale, bias_init_with_prob, + caffe2_xavier_init, normal_init) + +from mmdet.models.dense_heads.fcos_head import FCOSHead +from ..builder import HEADS + + +@HEADS.register_module() +class NASFCOSHead(FCOSHead): + """Anchor-free head used in `NASFCOS `_. + + It is quite similar with FCOS head, except for the searched structure of + classification branch and bbox regression branch, where a structure of + "dconv3x3, conv3x3, dconv3x3, conv1x1" is utilized instead. + """ + + def _init_layers(self): + """Initialize layers of the head.""" + dconv3x3_config = dict( + type='DCNv2', + kernel_size=3, + use_bias=True, + deform_groups=2, + padding=1) + conv3x3_config = dict(type='Conv', kernel_size=3, padding=1) + conv1x1_config = dict(type='Conv', kernel_size=1) + + self.arch_config = [ + dconv3x3_config, conv3x3_config, dconv3x3_config, conv1x1_config + ] + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i, op_ in enumerate(self.arch_config): + op = copy.deepcopy(op_) + chn = self.in_channels if i == 0 else self.feat_channels + assert isinstance(op, dict) + use_bias = op.pop('use_bias', False) + padding = op.pop('padding', 0) + kernel_size = op.pop('kernel_size') + module = ConvModule( + chn, + self.feat_channels, + kernel_size, + stride=1, + padding=padding, + norm_cfg=self.norm_cfg, + bias=use_bias, + conv_cfg=op) + + self.cls_convs.append(copy.deepcopy(module)) + self.reg_convs.append(copy.deepcopy(module)) + + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.conv_centerness = nn.Conv2d(self.feat_channels, 1, 3, padding=1) + + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + def init_weights(self): + """Initialize weights of the head.""" + # retinanet_bias_init + bias_cls = bias_init_with_prob(0.01) + normal_init(self.conv_reg, std=0.01) + normal_init(self.conv_centerness, std=0.01) + normal_init(self.conv_cls, std=0.01, bias=bias_cls) + + for branch in [self.cls_convs, self.reg_convs]: + for module in branch.modules(): + if isinstance(module, ConvModule) \ + and isinstance(module.conv, nn.Conv2d): + caffe2_xavier_init(module.conv) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/paa_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/paa_head.py new file mode 100644 index 0000000000000000000000000000000000000000..db28abc7e62c3eaf8305289b760df1c33bf8bf05 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/paa_head.py @@ -0,0 +1,653 @@ +import numpy as np +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply, multiclass_nms +from mmdet.core.bbox.iou_calculators import bbox_overlaps +from mmdet.models import HEADS +from mmdet.models.dense_heads import ATSSHead + +EPS = 1e-12 +try: + import sklearn.mixture as skm +except ImportError: + skm = None + + +def levels_to_images(mlvl_tensor): + """Concat multi-level feature maps by image. + + [feature_level0, feature_level1...] -> [feature_image0, feature_image1...] + Convert the shape of each element in mlvl_tensor from (N, C, H, W) to + (N, H*W , C), then split the element to N elements with shape (H*W, C), and + concat elements in same image of all level along first dimension. + + Args: + mlvl_tensor (list[torch.Tensor]): list of Tensor which collect from + corresponding level. Each element is of shape (N, C, H, W) + + Returns: + list[torch.Tensor]: A list that contains N tensors and each tensor is + of shape (num_elements, C) + """ + batch_size = mlvl_tensor[0].size(0) + batch_list = [[] for _ in range(batch_size)] + channels = mlvl_tensor[0].size(1) + for t in mlvl_tensor: + t = t.permute(0, 2, 3, 1) + t = t.view(batch_size, -1, channels).contiguous() + for img in range(batch_size): + batch_list[img].append(t[img]) + return [torch.cat(item, 0) for item in batch_list] + + +@HEADS.register_module() +class PAAHead(ATSSHead): + """Head of PAAAssignment: Probabilistic Anchor Assignment with IoU + Prediction for Object Detection. + + Code is modified from the `official github repo + `_. + + More details can be found in the `paper + `_ . + + Args: + topk (int): Select topk samples with smallest loss in + each level. + score_voting (bool): Whether to use score voting in post-process. + covariance_type : String describing the type of covariance parameters + to be used in :class:`sklearn.mixture.GaussianMixture`. + It must be one of: + + - 'full': each component has its own general covariance matrix + - 'tied': all components share the same general covariance matrix + - 'diag': each component has its own diagonal covariance matrix + - 'spherical': each component has its own single variance + Default: 'diag'. From 'full' to 'spherical', the gmm fitting + process is faster yet the performance could be influenced. For most + cases, 'diag' should be a good choice. + """ + + def __init__(self, + *args, + topk=9, + score_voting=True, + covariance_type='diag', + **kwargs): + # topk used in paa reassign process + self.topk = topk + self.with_score_voting = score_voting + self.covariance_type = covariance_type + super(PAAHead, self).__init__(*args, **kwargs) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'iou_preds')) + def loss(self, + cls_scores, + bbox_preds, + iou_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + iou_preds (list[Tensor]): iou_preds for each scale + level with shape (N, num_anchors * 1, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when are computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss gmm_assignment. + """ + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + ) + (labels, labels_weight, bboxes_target, bboxes_weight, pos_inds, + pos_gt_index) = cls_reg_targets + cls_scores = levels_to_images(cls_scores) + cls_scores = [ + item.reshape(-1, self.cls_out_channels) for item in cls_scores + ] + bbox_preds = levels_to_images(bbox_preds) + bbox_preds = [item.reshape(-1, 4) for item in bbox_preds] + iou_preds = levels_to_images(iou_preds) + iou_preds = [item.reshape(-1, 1) for item in iou_preds] + pos_losses_list, = multi_apply(self.get_pos_loss, anchor_list, + cls_scores, bbox_preds, labels, + labels_weight, bboxes_target, + bboxes_weight, pos_inds) + + with torch.no_grad(): + labels, label_weights, bbox_weights, num_pos = multi_apply( + self.paa_reassign, + pos_losses_list, + labels, + labels_weight, + bboxes_weight, + pos_inds, + pos_gt_index, + anchor_list, + ) + num_pos = sum(num_pos) + # convert all tensor list to a flatten tensor + cls_scores = torch.cat(cls_scores, 0).view(-1, cls_scores[0].size(-1)) + bbox_preds = torch.cat(bbox_preds, 0).view(-1, bbox_preds[0].size(-1)) + iou_preds = torch.cat(iou_preds, 0).view(-1, iou_preds[0].size(-1)) + labels = torch.cat(labels, 0).view(-1) + flatten_anchors = torch.cat( + [torch.cat(item, 0) for item in anchor_list]) + labels_weight = torch.cat(labels_weight, 0).view(-1) + bboxes_target = torch.cat(bboxes_target, + 0).view(-1, bboxes_target[0].size(-1)) + + pos_inds_flatten = ((labels >= 0) + & + (labels < self.num_classes)).nonzero().reshape(-1) + + losses_cls = self.loss_cls( + cls_scores, + labels, + labels_weight, + avg_factor=max(num_pos, len(img_metas))) # avoid num_pos=0 + if num_pos: + pos_bbox_pred = self.bbox_coder.decode( + flatten_anchors[pos_inds_flatten], + bbox_preds[pos_inds_flatten]) + pos_bbox_target = bboxes_target[pos_inds_flatten] + iou_target = bbox_overlaps( + pos_bbox_pred.detach(), pos_bbox_target, is_aligned=True) + losses_iou = self.loss_centerness( + iou_preds[pos_inds_flatten], + iou_target.unsqueeze(-1), + avg_factor=num_pos) + losses_bbox = self.loss_bbox( + pos_bbox_pred, + pos_bbox_target, + iou_target.clamp(min=EPS), + avg_factor=iou_target.sum()) + else: + losses_iou = iou_preds.sum() * 0 + losses_bbox = bbox_preds.sum() * 0 + + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox, loss_iou=losses_iou) + + def get_pos_loss(self, anchors, cls_score, bbox_pred, label, label_weight, + bbox_target, bbox_weight, pos_inds): + """Calculate loss of all potential positive samples obtained from first + match process. + + Args: + anchors (list[Tensor]): Anchors of each scale. + cls_score (Tensor): Box scores of single image with shape + (num_anchors, num_classes) + bbox_pred (Tensor): Box energies / deltas of single image + with shape (num_anchors, 4) + label (Tensor): classification target of each anchor with + shape (num_anchors,) + label_weight (Tensor): Classification loss weight of each + anchor with shape (num_anchors). + bbox_target (dict): Regression target of each anchor with + shape (num_anchors, 4). + bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + pos_inds (Tensor): Index of all positive samples got from + first assign process. + + Returns: + Tensor: Losses of all positive samples in single image. + """ + if not len(pos_inds): + return cls_score.new([]), + anchors_all_level = torch.cat(anchors, 0) + pos_scores = cls_score[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_label = label[pos_inds] + pos_label_weight = label_weight[pos_inds] + pos_bbox_target = bbox_target[pos_inds] + pos_bbox_weight = bbox_weight[pos_inds] + pos_anchors = anchors_all_level[pos_inds] + pos_bbox_pred = self.bbox_coder.decode(pos_anchors, pos_bbox_pred) + + # to keep loss dimension + loss_cls = self.loss_cls( + pos_scores, + pos_label, + pos_label_weight, + avg_factor=self.loss_cls.loss_weight, + reduction_override='none') + + loss_bbox = self.loss_bbox( + pos_bbox_pred, + pos_bbox_target, + pos_bbox_weight, + avg_factor=self.loss_cls.loss_weight, + reduction_override='none') + + loss_cls = loss_cls.sum(-1) + pos_loss = loss_bbox + loss_cls + return pos_loss, + + def paa_reassign(self, pos_losses, label, label_weight, bbox_weight, + pos_inds, pos_gt_inds, anchors): + """Fit loss to GMM distribution and separate positive, ignore, negative + samples again with GMM model. + + Args: + pos_losses (Tensor): Losses of all positive samples in + single image. + label (Tensor): classification target of each anchor with + shape (num_anchors,) + label_weight (Tensor): Classification loss weight of each + anchor with shape (num_anchors). + bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + pos_inds (Tensor): Index of all positive samples got from + first assign process. + pos_gt_inds (Tensor): Gt_index of all positive samples got + from first assign process. + anchors (list[Tensor]): Anchors of each scale. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - label (Tensor): classification target of each anchor after + paa assign, with shape (num_anchors,) + - label_weight (Tensor): Classification loss weight of each + anchor after paa assign, with shape (num_anchors). + - bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + - num_pos (int): The number of positive samples after paa + assign. + """ + if not len(pos_inds): + return label, label_weight, bbox_weight, 0 + + num_gt = pos_gt_inds.max() + 1 + num_level = len(anchors) + num_anchors_each_level = [item.size(0) for item in anchors] + num_anchors_each_level.insert(0, 0) + inds_level_interval = np.cumsum(num_anchors_each_level) + pos_level_mask = [] + for i in range(num_level): + mask = (pos_inds >= inds_level_interval[i]) & ( + pos_inds < inds_level_interval[i + 1]) + pos_level_mask.append(mask) + pos_inds_after_paa = [label.new_tensor([])] + ignore_inds_after_paa = [label.new_tensor([])] + for gt_ind in range(num_gt): + pos_inds_gmm = [] + pos_loss_gmm = [] + gt_mask = pos_gt_inds == gt_ind + for level in range(num_level): + level_mask = pos_level_mask[level] + level_gt_mask = level_mask & gt_mask + value, topk_inds = pos_losses[level_gt_mask].topk( + min(level_gt_mask.sum(), self.topk), largest=False) + pos_inds_gmm.append(pos_inds[level_gt_mask][topk_inds]) + pos_loss_gmm.append(value) + pos_inds_gmm = torch.cat(pos_inds_gmm) + pos_loss_gmm = torch.cat(pos_loss_gmm) + # fix gmm need at least two sample + if len(pos_inds_gmm) < 2: + continue + device = pos_inds_gmm.device + pos_loss_gmm, sort_inds = pos_loss_gmm.sort() + pos_inds_gmm = pos_inds_gmm[sort_inds] + pos_loss_gmm = pos_loss_gmm.view(-1, 1).cpu().numpy() + min_loss, max_loss = pos_loss_gmm.min(), pos_loss_gmm.max() + means_init = np.array([min_loss, max_loss]).reshape(2, 1) + weights_init = np.array([0.5, 0.5]) + precisions_init = np.array([1.0, 1.0]).reshape(2, 1, 1) # full + if self.covariance_type == 'spherical': + precisions_init = precisions_init.reshape(2) + elif self.covariance_type == 'diag': + precisions_init = precisions_init.reshape(2, 1) + elif self.covariance_type == 'tied': + precisions_init = np.array([[1.0]]) + if skm is None: + raise ImportError('Please run "pip install sklearn" ' + 'to install sklearn first.') + gmm = skm.GaussianMixture( + 2, + weights_init=weights_init, + means_init=means_init, + precisions_init=precisions_init, + covariance_type=self.covariance_type) + gmm.fit(pos_loss_gmm) + gmm_assignment = gmm.predict(pos_loss_gmm) + scores = gmm.score_samples(pos_loss_gmm) + gmm_assignment = torch.from_numpy(gmm_assignment).to(device) + scores = torch.from_numpy(scores).to(device) + + pos_inds_temp, ignore_inds_temp = self.gmm_separation_scheme( + gmm_assignment, scores, pos_inds_gmm) + pos_inds_after_paa.append(pos_inds_temp) + ignore_inds_after_paa.append(ignore_inds_temp) + + pos_inds_after_paa = torch.cat(pos_inds_after_paa) + ignore_inds_after_paa = torch.cat(ignore_inds_after_paa) + reassign_mask = (pos_inds.unsqueeze(1) != pos_inds_after_paa).all(1) + reassign_ids = pos_inds[reassign_mask] + label[reassign_ids] = self.num_classes + label_weight[ignore_inds_after_paa] = 0 + bbox_weight[reassign_ids] = 0 + num_pos = len(pos_inds_after_paa) + return label, label_weight, bbox_weight, num_pos + + def gmm_separation_scheme(self, gmm_assignment, scores, pos_inds_gmm): + """A general separation scheme for gmm model. + + It separates a GMM distribution of candidate samples into three + parts, 0 1 and uncertain areas, and you can implement other + separation schemes by rewriting this function. + + Args: + gmm_assignment (Tensor): The prediction of GMM which is of shape + (num_samples,). The 0/1 value indicates the distribution + that each sample comes from. + scores (Tensor): The probability of sample coming from the + fit GMM distribution. The tensor is of shape (num_samples,). + pos_inds_gmm (Tensor): All the indexes of samples which are used + to fit GMM model. The tensor is of shape (num_samples,) + + Returns: + tuple[Tensor]: The indices of positive and ignored samples. + + - pos_inds_temp (Tensor): Indices of positive samples. + - ignore_inds_temp (Tensor): Indices of ignore samples. + """ + # The implementation is (c) in Fig.3 in origin paper intead of (b). + # You can refer to issues such as + # https://github.com/kkhoot/PAA/issues/8 and + # https://github.com/kkhoot/PAA/issues/9. + fgs = gmm_assignment == 0 + pos_inds_temp = fgs.new_tensor([], dtype=torch.long) + ignore_inds_temp = fgs.new_tensor([], dtype=torch.long) + if fgs.nonzero().numel(): + _, pos_thr_ind = scores[fgs].topk(1) + pos_inds_temp = pos_inds_gmm[fgs][:pos_thr_ind + 1] + ignore_inds_temp = pos_inds_gmm.new_tensor([]) + return pos_inds_temp, ignore_inds_temp + + def get_targets( + self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True, + ): + """Get targets for PAA head. + + This method is almost the same as `AnchorHead.get_targets()`. We direct + return the results from _get_targets_single instead map it to levels + by images_to_levels function. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, 4). + valid_flag_list (list[list[Tensor]]): Multi level valid flags of + each image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - labels (list[Tensor]): Labels of all anchors, each with + shape (num_anchors,). + - label_weights (list[Tensor]): Label weights of all anchor. + each with shape (num_anchors,). + - bbox_targets (list[Tensor]): BBox targets of all anchors. + each with shape (num_anchors, 4). + - bbox_weights (list[Tensor]): BBox weights of all anchors. + each with shape (num_anchors, 4). + - pos_inds (list[Tensor]): Contains all index of positive + sample in all anchor. + - gt_inds (list[Tensor]): Contains all gt_index of positive + sample in all anchor. + """ + + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + concat_anchor_list = [] + concat_valid_flag_list = [] + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + concat_anchor_list.append(torch.cat(anchor_list[i])) + concat_valid_flag_list.append(torch.cat(valid_flag_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + results = multi_apply( + self._get_targets_single, + concat_anchor_list, + concat_valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + + (labels, label_weights, bbox_targets, bbox_weights, valid_pos_inds, + valid_neg_inds, sampling_result) = results + + # Due to valid flag of anchors, we have to calculate the real pos_inds + # in origin anchor set. + pos_inds = [] + for i, single_labels in enumerate(labels): + pos_mask = (0 <= single_labels) & ( + single_labels < self.num_classes) + pos_inds.append(pos_mask.nonzero().view(-1)) + + gt_inds = [item.pos_assigned_gt_inds for item in sampling_result] + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + gt_inds) + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + This method is same as `AnchorHead._get_targets_single()`. + """ + assert unmap_outputs, 'We must map outputs back to the original' \ + 'set of anchors in PAAhead' + return super(ATSSHead, self)._get_targets_single( + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True) + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + iou_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + This method is almost same as `ATSSHead._get_bboxes_single()`. + We use sqrt(iou_preds * cls_scores) in NMS process instead of just + cls_scores. Besides, score voting is used when `` score_voting`` + is set to True. + """ + assert with_nms, 'PAA only supports "with_nms=True" now' + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_iou_preds = [] + for cls_score, bbox_pred, iou_preds, anchors in zip( + cls_scores, bbox_preds, iou_preds, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + iou_preds = iou_preds.permute(1, 2, 0).reshape(-1).sigmoid() + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + max_scores, _ = (scores * iou_preds[:, None]).sqrt().max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + iou_preds = iou_preds[topk_inds] + + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_iou_preds.append(iou_preds) + + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + mlvl_iou_preds = torch.cat(mlvl_iou_preds) + mlvl_nms_scores = (mlvl_scores * mlvl_iou_preds[:, None]).sqrt() + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_nms_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=None) + if self.with_score_voting: + det_bboxes, det_labels = self.score_voting(det_bboxes, det_labels, + mlvl_bboxes, + mlvl_nms_scores, + cfg.score_thr) + + return det_bboxes, det_labels + + def score_voting(self, det_bboxes, det_labels, mlvl_bboxes, + mlvl_nms_scores, score_thr): + """Implementation of score voting method works on each remaining boxes + after NMS procedure. + + Args: + det_bboxes (Tensor): Remaining boxes after NMS procedure, + with shape (k, 5), each dimension means + (x1, y1, x2, y2, score). + det_labels (Tensor): The label of remaining boxes, with shape + (k, 1),Labels are 0-based. + mlvl_bboxes (Tensor): All boxes before the NMS procedure, + with shape (num_anchors,4). + mlvl_nms_scores (Tensor): The scores of all boxes which is used + in the NMS procedure, with shape (num_anchors, num_class) + mlvl_iou_preds (Tensot): The predictions of IOU of all boxes + before the NMS procedure, with shape (num_anchors, 1) + score_thr (float): The score threshold of bboxes. + + Returns: + tuple: Usually returns a tuple containing voting results. + + - det_bboxes_voted (Tensor): Remaining boxes after + score voting procedure, with shape (k, 5), each + dimension means (x1, y1, x2, y2, score). + - det_labels_voted (Tensor): Label of remaining bboxes + after voting, with shape (num_anchors,). + """ + candidate_mask = mlvl_nms_scores > score_thr + candidate_mask_nozeros = candidate_mask.nonzero() + candidate_inds = candidate_mask_nozeros[:, 0] + candidate_labels = candidate_mask_nozeros[:, 1] + candidate_bboxes = mlvl_bboxes[candidate_inds] + candidate_scores = mlvl_nms_scores[candidate_mask] + det_bboxes_voted = [] + det_labels_voted = [] + for cls in range(self.cls_out_channels): + candidate_cls_mask = candidate_labels == cls + if not candidate_cls_mask.any(): + continue + candidate_cls_scores = candidate_scores[candidate_cls_mask] + candidate_cls_bboxes = candidate_bboxes[candidate_cls_mask] + det_cls_mask = det_labels == cls + det_cls_bboxes = det_bboxes[det_cls_mask].view( + -1, det_bboxes.size(-1)) + det_candidate_ious = bbox_overlaps(det_cls_bboxes[:, :4], + candidate_cls_bboxes) + for det_ind in range(len(det_cls_bboxes)): + single_det_ious = det_candidate_ious[det_ind] + pos_ious_mask = single_det_ious > 0.01 + pos_ious = single_det_ious[pos_ious_mask] + pos_bboxes = candidate_cls_bboxes[pos_ious_mask] + pos_scores = candidate_cls_scores[pos_ious_mask] + pis = (torch.exp(-(1 - pos_ious)**2 / 0.025) * + pos_scores)[:, None] + voted_box = torch.sum( + pis * pos_bboxes, dim=0) / torch.sum( + pis, dim=0) + voted_score = det_cls_bboxes[det_ind][-1:][None, :] + det_bboxes_voted.append( + torch.cat((voted_box[None, :], voted_score), dim=1)) + det_labels_voted.append(cls) + + det_bboxes_voted = torch.cat(det_bboxes_voted, dim=0) + det_labels_voted = det_labels.new_tensor(det_labels_voted) + return det_bboxes_voted, det_labels_voted diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_retinanet_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_retinanet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..bd87b9aeb07e05ff94b444ac8999eca3f616711a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_retinanet_head.py @@ -0,0 +1,154 @@ +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import images_to_levels +from ..builder import HEADS +from ..losses import carl_loss, isr_p +from .retina_head import RetinaHead + + +@HEADS.register_module() +class PISARetinaHead(RetinaHead): + """PISA Retinanet Head. + + The head owns the same structure with Retinanet Head, but differs in two + aspects: + 1. Importance-based Sample Reweighting Positive (ISR-P) is applied to + change the positive loss weights. + 2. Classification-aware regression loss is adopted as a third loss. + """ + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes of each image + with shape (num_obj, 4). + gt_labels (list[Tensor]): Ground truth labels of each image + with shape (num_obj, 4). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor]): Ignored gt bboxes of each image. + Default: None. + + Returns: + dict: Loss dict, comprise classification loss, regression loss and + carl loss. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results_list) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + num_imgs = len(img_metas) + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(num_imgs, -1, label_channels) + for cls_score in cls_scores + ] + flatten_cls_scores = torch.cat( + flatten_cls_scores, dim=1).reshape(-1, + flatten_cls_scores[0].size(-1)) + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(num_imgs, -1, 4) + for bbox_pred in bbox_preds + ] + flatten_bbox_preds = torch.cat( + flatten_bbox_preds, dim=1).view(-1, flatten_bbox_preds[0].size(-1)) + flatten_labels = torch.cat(labels_list, dim=1).reshape(-1) + flatten_label_weights = torch.cat( + label_weights_list, dim=1).reshape(-1) + flatten_anchors = torch.cat(all_anchor_list, dim=1).reshape(-1, 4) + flatten_bbox_targets = torch.cat( + bbox_targets_list, dim=1).reshape(-1, 4) + flatten_bbox_weights = torch.cat( + bbox_weights_list, dim=1).reshape(-1, 4) + + # Apply ISR-P + isr_cfg = self.train_cfg.get('isr', None) + if isr_cfg is not None: + all_targets = (flatten_labels, flatten_label_weights, + flatten_bbox_targets, flatten_bbox_weights) + with torch.no_grad(): + all_targets = isr_p( + flatten_cls_scores, + flatten_bbox_preds, + all_targets, + flatten_anchors, + sampling_results_list, + bbox_coder=self.bbox_coder, + loss_cls=self.loss_cls, + num_class=self.num_classes, + **self.train_cfg.isr) + (flatten_labels, flatten_label_weights, flatten_bbox_targets, + flatten_bbox_weights) = all_targets + + # For convenience we compute loss once instead separating by fpn level, + # so that we don't need to separate the weights by level again. + # The result should be the same + losses_cls = self.loss_cls( + flatten_cls_scores, + flatten_labels, + flatten_label_weights, + avg_factor=num_total_samples) + losses_bbox = self.loss_bbox( + flatten_bbox_preds, + flatten_bbox_targets, + flatten_bbox_weights, + avg_factor=num_total_samples) + loss_dict = dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + + # CARL Loss + carl_cfg = self.train_cfg.get('carl', None) + if carl_cfg is not None: + loss_carl = carl_loss( + flatten_cls_scores, + flatten_labels, + flatten_bbox_preds, + flatten_bbox_targets, + self.loss_bbox, + **self.train_cfg.carl, + avg_factor=num_total_pos, + sigmoid=True, + num_class=self.num_classes) + loss_dict.update(loss_carl) + + return loss_dict diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_ssd_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_ssd_head.py new file mode 100644 index 0000000000000000000000000000000000000000..90ef3c83ed62d8346c8daef01f18ad7bd236623c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/pisa_ssd_head.py @@ -0,0 +1,139 @@ +import torch + +from mmdet.core import multi_apply +from ..builder import HEADS +from ..losses import CrossEntropyLoss, SmoothL1Loss, carl_loss, isr_p +from .ssd_head import SSDHead + + +# TODO: add loss evaluator for SSD +@HEADS.register_module() +class PISASSDHead(SSDHead): + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes of each image + with shape (num_obj, 4). + gt_labels (list[Tensor]): Ground truth labels of each image + with shape (num_obj, 4). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor]): Ignored gt bboxes of each image. + Default: None. + + Returns: + dict: Loss dict, comprise classification loss regression loss and + carl loss. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=1, + unmap_outputs=False, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results_list) = cls_reg_targets + + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + isr_cfg = self.train_cfg.get('isr', None) + all_targets = (all_labels.view(-1), all_label_weights.view(-1), + all_bbox_targets.view(-1, + 4), all_bbox_weights.view(-1, 4)) + # apply ISR-P + if isr_cfg is not None: + all_targets = isr_p( + all_cls_scores.view(-1, all_cls_scores.size(-1)), + all_bbox_preds.view(-1, 4), + all_targets, + torch.cat(all_anchors), + sampling_results_list, + loss_cls=CrossEntropyLoss(), + bbox_coder=self.bbox_coder, + **self.train_cfg.isr, + num_class=self.num_classes) + (new_labels, new_label_weights, new_bbox_targets, + new_bbox_weights) = all_targets + all_labels = new_labels.view(all_labels.shape) + all_label_weights = new_label_weights.view(all_label_weights.shape) + all_bbox_targets = new_bbox_targets.view(all_bbox_targets.shape) + all_bbox_weights = new_bbox_weights.view(all_bbox_weights.shape) + + # add CARL loss + carl_loss_cfg = self.train_cfg.get('carl', None) + if carl_loss_cfg is not None: + loss_carl = carl_loss( + all_cls_scores.view(-1, all_cls_scores.size(-1)), + all_targets[0], + all_bbox_preds.view(-1, 4), + all_targets[2], + SmoothL1Loss(beta=1.), + **self.train_cfg.carl, + avg_factor=num_total_pos, + num_class=self.num_classes) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + loss_dict = dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + if carl_loss_cfg is not None: + loss_dict.update(loss_carl) + return loss_dict diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/reppoints_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/reppoints_head.py new file mode 100644 index 0000000000000000000000000000000000000000..03e3fa0f19e575ec22ee6edef8af5a0a7ccf345e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/reppoints_head.py @@ -0,0 +1,763 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init +from mmcv.ops import DeformConv2d + +from mmdet.core import (PointGenerator, build_assigner, build_sampler, + images_to_levels, multi_apply, multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + + +@HEADS.register_module() +class RepPointsHead(AnchorFreeHead): + """RepPoint head. + + Args: + point_feat_channels (int): Number of channels of points features. + gradient_mul (float): The multiplier to gradients from + points refinement and recognition. + point_strides (Iterable): points strides. + point_base_scale (int): bbox scale for assigning labels. + loss_cls (dict): Config of classification loss. + loss_bbox_init (dict): Config of initial points loss. + loss_bbox_refine (dict): Config of points loss in refinement. + use_grid_points (bool): If we use bounding box representation, the + reppoints is represented as grid points on the bounding box. + center_init (bool): Whether to use center point assignment. + transform_method (str): The methods to transform RepPoints to bbox. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + point_feat_channels=256, + num_points=9, + gradient_mul=0.1, + point_strides=[8, 16, 32, 64, 128], + point_base_scale=4, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_init=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=0.5), + loss_bbox_refine=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), + use_grid_points=False, + center_init=True, + transform_method='moment', + moment_mul=0.01, + **kwargs): + self.num_points = num_points + self.point_feat_channels = point_feat_channels + self.use_grid_points = use_grid_points + self.center_init = center_init + + # we use deform conv to extract points features + self.dcn_kernel = int(np.sqrt(num_points)) + self.dcn_pad = int((self.dcn_kernel - 1) / 2) + assert self.dcn_kernel * self.dcn_kernel == num_points, \ + 'The points number should be a square number.' + assert self.dcn_kernel % 2 == 1, \ + 'The points number should be an odd square number.' + dcn_base = np.arange(-self.dcn_pad, + self.dcn_pad + 1).astype(np.float64) + dcn_base_y = np.repeat(dcn_base, self.dcn_kernel) + dcn_base_x = np.tile(dcn_base, self.dcn_kernel) + dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape( + (-1)) + self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1) + + super().__init__(num_classes, in_channels, loss_cls=loss_cls, **kwargs) + + self.gradient_mul = gradient_mul + self.point_base_scale = point_base_scale + self.point_strides = point_strides + self.point_generators = [PointGenerator() for _ in self.point_strides] + + self.sampling = loss_cls['type'] not in ['FocalLoss'] + if self.train_cfg: + self.init_assigner = build_assigner(self.train_cfg.init.assigner) + self.refine_assigner = build_assigner( + self.train_cfg.refine.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.transform_method = transform_method + if self.transform_method == 'moment': + self.moment_transfer = nn.Parameter( + data=torch.zeros(2), requires_grad=True) + self.moment_mul = moment_mul + + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + if self.use_sigmoid_cls: + self.cls_out_channels = self.num_classes + else: + self.cls_out_channels = self.num_classes + 1 + self.loss_bbox_init = build_loss(loss_bbox_init) + self.loss_bbox_refine = build_loss(loss_bbox_refine) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + pts_out_dim = 4 if self.use_grid_points else 2 * self.num_points + self.reppoints_cls_conv = DeformConv2d(self.feat_channels, + self.point_feat_channels, + self.dcn_kernel, 1, + self.dcn_pad) + self.reppoints_cls_out = nn.Conv2d(self.point_feat_channels, + self.cls_out_channels, 1, 1, 0) + self.reppoints_pts_init_conv = nn.Conv2d(self.feat_channels, + self.point_feat_channels, 3, + 1, 1) + self.reppoints_pts_init_out = nn.Conv2d(self.point_feat_channels, + pts_out_dim, 1, 1, 0) + self.reppoints_pts_refine_conv = DeformConv2d(self.feat_channels, + self.point_feat_channels, + self.dcn_kernel, 1, + self.dcn_pad) + self.reppoints_pts_refine_out = nn.Conv2d(self.point_feat_channels, + pts_out_dim, 1, 1, 0) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.reppoints_cls_conv, std=0.01) + normal_init(self.reppoints_cls_out, std=0.01, bias=bias_cls) + normal_init(self.reppoints_pts_init_conv, std=0.01) + normal_init(self.reppoints_pts_init_out, std=0.01) + normal_init(self.reppoints_pts_refine_conv, std=0.01) + normal_init(self.reppoints_pts_refine_out, std=0.01) + + def points2bbox(self, pts, y_first=True): + """Converting the points set into bounding box. + + :param pts: the input points sets (fields), each points + set (fields) is represented as 2n scalar. + :param y_first: if y_fisrt=True, the point set is represented as + [y1, x1, y2, x2 ... yn, xn], otherwise the point set is + represented as [x1, y1, x2, y2 ... xn, yn]. + :return: each points set is converting to a bbox [x1, y1, x2, y2]. + """ + pts_reshape = pts.view(pts.shape[0], -1, 2, *pts.shape[2:]) + pts_y = pts_reshape[:, :, 0, ...] if y_first else pts_reshape[:, :, 1, + ...] + pts_x = pts_reshape[:, :, 1, ...] if y_first else pts_reshape[:, :, 0, + ...] + if self.transform_method == 'minmax': + bbox_left = pts_x.min(dim=1, keepdim=True)[0] + bbox_right = pts_x.max(dim=1, keepdim=True)[0] + bbox_up = pts_y.min(dim=1, keepdim=True)[0] + bbox_bottom = pts_y.max(dim=1, keepdim=True)[0] + bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom], + dim=1) + elif self.transform_method == 'partial_minmax': + pts_y = pts_y[:, :4, ...] + pts_x = pts_x[:, :4, ...] + bbox_left = pts_x.min(dim=1, keepdim=True)[0] + bbox_right = pts_x.max(dim=1, keepdim=True)[0] + bbox_up = pts_y.min(dim=1, keepdim=True)[0] + bbox_bottom = pts_y.max(dim=1, keepdim=True)[0] + bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom], + dim=1) + elif self.transform_method == 'moment': + pts_y_mean = pts_y.mean(dim=1, keepdim=True) + pts_x_mean = pts_x.mean(dim=1, keepdim=True) + pts_y_std = torch.std(pts_y - pts_y_mean, dim=1, keepdim=True) + pts_x_std = torch.std(pts_x - pts_x_mean, dim=1, keepdim=True) + moment_transfer = (self.moment_transfer * self.moment_mul) + ( + self.moment_transfer.detach() * (1 - self.moment_mul)) + moment_width_transfer = moment_transfer[0] + moment_height_transfer = moment_transfer[1] + half_width = pts_x_std * torch.exp(moment_width_transfer) + half_height = pts_y_std * torch.exp(moment_height_transfer) + bbox = torch.cat([ + pts_x_mean - half_width, pts_y_mean - half_height, + pts_x_mean + half_width, pts_y_mean + half_height + ], + dim=1) + else: + raise NotImplementedError + return bbox + + def gen_grid_from_reg(self, reg, previous_boxes): + """Base on the previous bboxes and regression values, we compute the + regressed bboxes and generate the grids on the bboxes. + + :param reg: the regression value to previous bboxes. + :param previous_boxes: previous bboxes. + :return: generate grids on the regressed bboxes. + """ + b, _, h, w = reg.shape + bxy = (previous_boxes[:, :2, ...] + previous_boxes[:, 2:, ...]) / 2. + bwh = (previous_boxes[:, 2:, ...] - + previous_boxes[:, :2, ...]).clamp(min=1e-6) + grid_topleft = bxy + bwh * reg[:, :2, ...] - 0.5 * bwh * torch.exp( + reg[:, 2:, ...]) + grid_wh = bwh * torch.exp(reg[:, 2:, ...]) + grid_left = grid_topleft[:, [0], ...] + grid_top = grid_topleft[:, [1], ...] + grid_width = grid_wh[:, [0], ...] + grid_height = grid_wh[:, [1], ...] + intervel = torch.linspace(0., 1., self.dcn_kernel).view( + 1, self.dcn_kernel, 1, 1).type_as(reg) + grid_x = grid_left + grid_width * intervel + grid_x = grid_x.unsqueeze(1).repeat(1, self.dcn_kernel, 1, 1, 1) + grid_x = grid_x.view(b, -1, h, w) + grid_y = grid_top + grid_height * intervel + grid_y = grid_y.unsqueeze(2).repeat(1, 1, self.dcn_kernel, 1, 1) + grid_y = grid_y.view(b, -1, h, w) + grid_yx = torch.stack([grid_y, grid_x], dim=2) + grid_yx = grid_yx.view(b, -1, h, w) + regressed_bbox = torch.cat([ + grid_left, grid_top, grid_left + grid_width, grid_top + grid_height + ], 1) + return grid_yx, regressed_bbox + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def forward_single(self, x): + """Forward feature map of a single FPN level.""" + dcn_base_offset = self.dcn_base_offset.type_as(x) + # If we use center_init, the initial reppoints is from center points. + # If we use bounding bbox representation, the initial reppoints is + # from regular grid placed on a pre-defined bbox. + if self.use_grid_points or not self.center_init: + scale = self.point_base_scale / 2 + points_init = dcn_base_offset / dcn_base_offset.max() * scale + bbox_init = x.new_tensor([-scale, -scale, scale, + scale]).view(1, 4, 1, 1) + else: + points_init = 0 + cls_feat = x + pts_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + pts_feat = reg_conv(pts_feat) + # initialize reppoints + pts_out_init = self.reppoints_pts_init_out( + self.relu(self.reppoints_pts_init_conv(pts_feat))) + if self.use_grid_points: + pts_out_init, bbox_out_init = self.gen_grid_from_reg( + pts_out_init, bbox_init.detach()) + else: + pts_out_init = pts_out_init + points_init + # refine and classify reppoints + pts_out_init_grad_mul = (1 - self.gradient_mul) * pts_out_init.detach( + ) + self.gradient_mul * pts_out_init + dcn_offset = pts_out_init_grad_mul - dcn_base_offset + cls_out = self.reppoints_cls_out( + self.relu(self.reppoints_cls_conv(cls_feat, dcn_offset))) + pts_out_refine = self.reppoints_pts_refine_out( + self.relu(self.reppoints_pts_refine_conv(pts_feat, dcn_offset))) + if self.use_grid_points: + pts_out_refine, bbox_out_refine = self.gen_grid_from_reg( + pts_out_refine, bbox_out_init.detach()) + else: + pts_out_refine = pts_out_refine + pts_out_init.detach() + return cls_out, pts_out_init, pts_out_refine + + def get_points(self, featmap_sizes, img_metas, device): + """Get points according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + + Returns: + tuple: points of each image, valid flags of each image + """ + num_imgs = len(img_metas) + num_levels = len(featmap_sizes) + + # since feature map sizes of all images are the same, we only compute + # points center for one time + multi_level_points = [] + for i in range(num_levels): + points = self.point_generators[i].grid_points( + featmap_sizes[i], self.point_strides[i], device) + multi_level_points.append(points) + points_list = [[point.clone() for point in multi_level_points] + for _ in range(num_imgs)] + + # for each image, we compute valid flags of multi level grids + valid_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = [] + for i in range(num_levels): + point_stride = self.point_strides[i] + feat_h, feat_w = featmap_sizes[i] + h, w = img_meta['pad_shape'][:2] + valid_feat_h = min(int(np.ceil(h / point_stride)), feat_h) + valid_feat_w = min(int(np.ceil(w / point_stride)), feat_w) + flags = self.point_generators[i].valid_flags( + (feat_h, feat_w), (valid_feat_h, valid_feat_w), device) + multi_level_flags.append(flags) + valid_flag_list.append(multi_level_flags) + + return points_list, valid_flag_list + + def centers_to_bboxes(self, point_list): + """Get bboxes according to center points. + + Only used in :class:`MaxIoUAssigner`. + """ + bbox_list = [] + for i_img, point in enumerate(point_list): + bbox = [] + for i_lvl in range(len(self.point_strides)): + scale = self.point_base_scale * self.point_strides[i_lvl] * 0.5 + bbox_shift = torch.Tensor([-scale, -scale, scale, + scale]).view(1, 4).type_as(point[0]) + bbox_center = torch.cat( + [point[i_lvl][:, :2], point[i_lvl][:, :2]], dim=1) + bbox.append(bbox_center + bbox_shift) + bbox_list.append(bbox) + return bbox_list + + def offset_to_pts(self, center_list, pred_list): + """Change from point offset to point coordinate.""" + pts_list = [] + for i_lvl in range(len(self.point_strides)): + pts_lvl = [] + for i_img in range(len(center_list)): + pts_center = center_list[i_img][i_lvl][:, :2].repeat( + 1, self.num_points) + pts_shift = pred_list[i_lvl][i_img] + yx_pts_shift = pts_shift.permute(1, 2, 0).view( + -1, 2 * self.num_points) + y_pts_shift = yx_pts_shift[..., 0::2] + x_pts_shift = yx_pts_shift[..., 1::2] + xy_pts_shift = torch.stack([x_pts_shift, y_pts_shift], -1) + xy_pts_shift = xy_pts_shift.view(*yx_pts_shift.shape[:-1], -1) + pts = xy_pts_shift * self.point_strides[i_lvl] + pts_center + pts_lvl.append(pts) + pts_lvl = torch.stack(pts_lvl, 0) + pts_list.append(pts_lvl) + return pts_list + + def _point_target_single(self, + flat_proposals, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + label_channels=1, + stage='init', + unmap_outputs=True): + inside_flags = valid_flags + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample proposals + proposals = flat_proposals[inside_flags, :] + + if stage == 'init': + assigner = self.init_assigner + pos_weight = self.train_cfg.init.pos_weight + else: + assigner = self.refine_assigner + pos_weight = self.train_cfg.refine.pos_weight + assign_result = assigner.assign(proposals, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + sampling_result = self.sampler.sample(assign_result, proposals, + gt_bboxes) + + num_valid_proposals = proposals.shape[0] + bbox_gt = proposals.new_zeros([num_valid_proposals, 4]) + pos_proposals = torch.zeros_like(proposals) + proposals_weights = proposals.new_zeros([num_valid_proposals, 4]) + labels = proposals.new_full((num_valid_proposals, ), + self.num_classes, + dtype=torch.long) + label_weights = proposals.new_zeros( + num_valid_proposals, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + pos_gt_bboxes = sampling_result.pos_gt_bboxes + bbox_gt[pos_inds, :] = pos_gt_bboxes + pos_proposals[pos_inds, :] = proposals[pos_inds, :] + proposals_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of proposals + if unmap_outputs: + num_total_proposals = flat_proposals.size(0) + labels = unmap(labels, num_total_proposals, inside_flags) + label_weights = unmap(label_weights, num_total_proposals, + inside_flags) + bbox_gt = unmap(bbox_gt, num_total_proposals, inside_flags) + pos_proposals = unmap(pos_proposals, num_total_proposals, + inside_flags) + proposals_weights = unmap(proposals_weights, num_total_proposals, + inside_flags) + + return (labels, label_weights, bbox_gt, pos_proposals, + proposals_weights, pos_inds, neg_inds) + + def get_targets(self, + proposals_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + stage='init', + label_channels=1, + unmap_outputs=True): + """Compute corresponding GT box and classification targets for + proposals. + + Args: + proposals_list (list[list]): Multi level points/bboxes of each + image. + valid_flag_list (list[list]): Multi level valid flags of each + image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_bboxes_list (list[Tensor]): Ground truth labels of each box. + stage (str): `init` or `refine`. Generate target for init stage or + refine stage + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each level. # noqa: E501 + - bbox_gt_list (list[Tensor]): Ground truth bbox of each level. + - proposal_list (list[Tensor]): Proposals(points/bboxes) of each level. # noqa: E501 + - proposal_weights_list (list[Tensor]): Proposal weights of each level. # noqa: E501 + - num_total_pos (int): Number of positive samples in all images. # noqa: E501 + - num_total_neg (int): Number of negative samples in all images. # noqa: E501 + """ + assert stage in ['init', 'refine'] + num_imgs = len(img_metas) + assert len(proposals_list) == len(valid_flag_list) == num_imgs + + # points number of multi levels + num_level_proposals = [points.size(0) for points in proposals_list[0]] + + # concat all level points and flags to a single tensor + for i in range(num_imgs): + assert len(proposals_list[i]) == len(valid_flag_list[i]) + proposals_list[i] = torch.cat(proposals_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_labels, all_label_weights, all_bbox_gt, all_proposals, + all_proposal_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._point_target_single, + proposals_list, + valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + stage=stage, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + # no valid points + if any([labels is None for labels in all_labels]): + return None + # sampled points of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + labels_list = images_to_levels(all_labels, num_level_proposals) + label_weights_list = images_to_levels(all_label_weights, + num_level_proposals) + bbox_gt_list = images_to_levels(all_bbox_gt, num_level_proposals) + proposals_list = images_to_levels(all_proposals, num_level_proposals) + proposal_weights_list = images_to_levels(all_proposal_weights, + num_level_proposals) + return (labels_list, label_weights_list, bbox_gt_list, proposals_list, + proposal_weights_list, num_total_pos, num_total_neg) + + def loss_single(self, cls_score, pts_pred_init, pts_pred_refine, labels, + label_weights, bbox_gt_init, bbox_weights_init, + bbox_gt_refine, bbox_weights_refine, stride, + num_total_samples_init, num_total_samples_refine): + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + cls_score = cls_score.contiguous() + loss_cls = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=num_total_samples_refine) + + # points loss + bbox_gt_init = bbox_gt_init.reshape(-1, 4) + bbox_weights_init = bbox_weights_init.reshape(-1, 4) + bbox_pred_init = self.points2bbox( + pts_pred_init.reshape(-1, 2 * self.num_points), y_first=False) + bbox_gt_refine = bbox_gt_refine.reshape(-1, 4) + bbox_weights_refine = bbox_weights_refine.reshape(-1, 4) + bbox_pred_refine = self.points2bbox( + pts_pred_refine.reshape(-1, 2 * self.num_points), y_first=False) + normalize_term = self.point_base_scale * stride + loss_pts_init = self.loss_bbox_init( + bbox_pred_init / normalize_term, + bbox_gt_init / normalize_term, + bbox_weights_init, + avg_factor=num_total_samples_init) + loss_pts_refine = self.loss_bbox_refine( + bbox_pred_refine / normalize_term, + bbox_gt_refine / normalize_term, + bbox_weights_refine, + avg_factor=num_total_samples_refine) + return loss_cls, loss_pts_init, loss_pts_refine + + def loss(self, + cls_scores, + pts_preds_init, + pts_preds_refine, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == len(self.point_generators) + device = cls_scores[0].device + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + # target for initial stage + center_list, valid_flag_list = self.get_points(featmap_sizes, + img_metas, device) + pts_coordinate_preds_init = self.offset_to_pts(center_list, + pts_preds_init) + if self.train_cfg.init.assigner['type'] == 'PointAssigner': + # Assign target for center list + candidate_list = center_list + else: + # transform center list to bbox list and + # assign target for bbox list + bbox_list = self.centers_to_bboxes(center_list) + candidate_list = bbox_list + cls_reg_targets_init = self.get_targets( + candidate_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + stage='init', + label_channels=label_channels) + (*_, bbox_gt_list_init, candidate_list_init, bbox_weights_list_init, + num_total_pos_init, num_total_neg_init) = cls_reg_targets_init + num_total_samples_init = ( + num_total_pos_init + + num_total_neg_init if self.sampling else num_total_pos_init) + + # target for refinement stage + center_list, valid_flag_list = self.get_points(featmap_sizes, + img_metas, device) + pts_coordinate_preds_refine = self.offset_to_pts( + center_list, pts_preds_refine) + bbox_list = [] + for i_img, center in enumerate(center_list): + bbox = [] + for i_lvl in range(len(pts_preds_refine)): + bbox_preds_init = self.points2bbox( + pts_preds_init[i_lvl].detach()) + bbox_shift = bbox_preds_init * self.point_strides[i_lvl] + bbox_center = torch.cat( + [center[i_lvl][:, :2], center[i_lvl][:, :2]], dim=1) + bbox.append(bbox_center + + bbox_shift[i_img].permute(1, 2, 0).reshape(-1, 4)) + bbox_list.append(bbox) + cls_reg_targets_refine = self.get_targets( + bbox_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + stage='refine', + label_channels=label_channels) + (labels_list, label_weights_list, bbox_gt_list_refine, + candidate_list_refine, bbox_weights_list_refine, num_total_pos_refine, + num_total_neg_refine) = cls_reg_targets_refine + num_total_samples_refine = ( + num_total_pos_refine + + num_total_neg_refine if self.sampling else num_total_pos_refine) + + # compute loss + losses_cls, losses_pts_init, losses_pts_refine = multi_apply( + self.loss_single, + cls_scores, + pts_coordinate_preds_init, + pts_coordinate_preds_refine, + labels_list, + label_weights_list, + bbox_gt_list_init, + bbox_weights_list_init, + bbox_gt_list_refine, + bbox_weights_list_refine, + self.point_strides, + num_total_samples_init=num_total_samples_init, + num_total_samples_refine=num_total_samples_refine) + loss_dict_all = { + 'loss_cls': losses_cls, + 'loss_pts_init': losses_pts_init, + 'loss_pts_refine': losses_pts_refine + } + return loss_dict_all + + def get_bboxes(self, + cls_scores, + pts_preds_init, + pts_preds_refine, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + assert len(cls_scores) == len(pts_preds_refine) + device = cls_scores[0].device + bbox_preds_refine = [ + self.points2bbox(pts_pred_refine) + for pts_pred_refine in pts_preds_refine + ] + num_levels = len(cls_scores) + mlvl_points = [ + self.point_generators[i].grid_points(cls_scores[i].size()[-2:], + self.point_strides[i], device) + for i in range(num_levels) + ] + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds_refine[i][img_id].detach() + for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + mlvl_points, img_shape, + scale_factor, cfg, rescale, + with_nms) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_points, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + mlvl_bboxes = [] + mlvl_scores = [] + for i_lvl, (cls_score, bbox_pred, points) in enumerate( + zip(cls_scores, bbox_preds, mlvl_points)): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + points = points[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bbox_pos_center = torch.cat([points[:, :2], points[:, :2]], dim=1) + bboxes = bbox_pred * self.point_strides[i_lvl] + bbox_pos_center + x1 = bboxes[:, 0].clamp(min=0, max=img_shape[1]) + y1 = bboxes[:, 1].clamp(min=0, max=img_shape[0]) + x2 = bboxes[:, 2].clamp(min=0, max=img_shape[1]) + y2 = bboxes[:, 3].clamp(min=0, max=img_shape[0]) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/retina_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b12416fa8332f02b9a04bbfc7926f6d13875e61b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/retina_head.py @@ -0,0 +1,114 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init + +from ..builder import HEADS +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class RetinaHead(AnchorHead): + r"""An anchor-based head used in `RetinaNet + `_. + + The head contains two subnetworks. The first classifies anchor boxes and + the second regresses deltas for the anchors. + + Example: + >>> import torch + >>> self = RetinaHead(11, 7) + >>> x = torch.rand(1, 7, 32, 32) + >>> cls_score, bbox_pred = self.forward_single(x) + >>> # Each anchor predicts a score for each class except background + >>> cls_per_anchor = cls_score.shape[1] / self.num_anchors + >>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors + >>> assert cls_per_anchor == (self.num_classes) + >>> assert box_per_anchor == 4 + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(RetinaHead, self).__init__( + num_classes, + in_channels, + anchor_generator=anchor_generator, + **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.retina_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.retina_cls, std=0.01, bias=bias_cls) + normal_init(self.retina_reg, std=0.01) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale + level, the channels number is num_anchors * 4. + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_pred = self.retina_reg(reg_feat) + return cls_score, bbox_pred diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/retina_sepbn_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/retina_sepbn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6b8ce7f0104b90af4b128e0f245473a1c0219fcd --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/retina_sepbn_head.py @@ -0,0 +1,113 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init + +from ..builder import HEADS +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class RetinaSepBNHead(AnchorHead): + """"RetinaHead with separate BN. + + In RetinaHead, conv/norm layers are shared across different FPN levels, + while in RetinaSepBNHead, conv layers are shared across different FPN + levels, but BN layers are separated. + """ + + def __init__(self, + num_classes, + num_ins, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.num_ins = num_ins + super(RetinaSepBNHead, self).__init__(num_classes, in_channels, + **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.num_ins): + cls_convs = nn.ModuleList() + reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.cls_convs.append(cls_convs) + self.reg_convs.append(reg_convs) + for i in range(self.stacked_convs): + for j in range(1, self.num_ins): + self.cls_convs[j][i].conv = self.cls_convs[0][i].conv + self.reg_convs[j][i].conv = self.reg_convs[0][i].conv + self.retina_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs[0]: + normal_init(m.conv, std=0.01) + for m in self.reg_convs[0]: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.retina_cls, std=0.01, bias=bias_cls) + normal_init(self.retina_reg, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + cls_scores = [] + bbox_preds = [] + for i, x in enumerate(feats): + cls_feat = feats[i] + reg_feat = feats[i] + for cls_conv in self.cls_convs[i]: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs[i]: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_pred = self.retina_reg(reg_feat) + cls_scores.append(cls_score) + bbox_preds.append(bbox_pred) + return cls_scores, bbox_preds diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..f565d1a41b06c92eaeae2e5418fad54dd27ae656 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_head.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import normal_init +from mmcv.ops import batched_nms + +from ..builder import HEADS +from .anchor_head import AnchorHead +from .rpn_test_mixin import RPNTestMixin + + +@HEADS.register_module() +class RPNHead(RPNTestMixin, AnchorHead): + """RPN head. + + Args: + in_channels (int): Number of channels in the input feature map. + """ # noqa: W605 + + def __init__(self, in_channels, **kwargs): + super(RPNHead, self).__init__(1, in_channels, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.rpn_conv = nn.Conv2d( + self.in_channels, self.feat_channels, 3, padding=1) + self.rpn_cls = nn.Conv2d(self.feat_channels, + self.num_anchors * self.cls_out_channels, 1) + self.rpn_reg = nn.Conv2d(self.feat_channels, self.num_anchors * 4, 1) + + def init_weights(self): + """Initialize weights of the head.""" + normal_init(self.rpn_conv, std=0.01) + normal_init(self.rpn_cls, std=0.01) + normal_init(self.rpn_reg, std=0.01) + + def forward_single(self, x): + """Forward feature map of a single scale level.""" + x = self.rpn_conv(x) + x = F.relu(x, inplace=True) + rpn_cls_score = self.rpn_cls(x) + rpn_bbox_pred = self.rpn_reg(x) + return rpn_cls_score, rpn_bbox_pred + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + losses = super(RPNHead, self).loss( + cls_scores, + bbox_preds, + gt_bboxes, + None, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + return dict( + loss_rpn_cls=losses['loss_cls'], loss_rpn_bbox=losses['loss_bbox']) + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (num_anchors * 4, H, W). + mlvl_anchors (list[Tensor]): Box reference for each scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + + Returns: + Tensor: Labeled boxes in shape (n, 5), where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. + """ + cfg = self.test_cfg if cfg is None else cfg + # bboxes from different level should be independent during NMS, + # level_ids are used as labels for batched NMS to separate them + level_ids = [] + mlvl_scores = [] + mlvl_bbox_preds = [] + mlvl_valid_anchors = [] + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + rpn_cls_score = rpn_cls_score.permute(1, 2, 0) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(-1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(-1, 2) + # We set FG labels to [0, num_class-1] and BG label to + # num_class in RPN head since mmdet v2.5, which is unified to + # be consistent with other head since mmdet v2.0. In mmdet v2.0 + # to v2.4 we keep BG label as 0 and FG label as 1 in rpn head. + scores = rpn_cls_score.softmax(dim=1)[:, 0] + rpn_bbox_pred = rpn_bbox_pred.permute(1, 2, 0).reshape(-1, 4) + anchors = mlvl_anchors[idx] + if cfg.nms_pre > 0 and scores.shape[0] > cfg.nms_pre: + # sort is faster than topk + # _, topk_inds = scores.topk(cfg.nms_pre) + ranked_scores, rank_inds = scores.sort(descending=True) + topk_inds = rank_inds[:cfg.nms_pre] + scores = ranked_scores[:cfg.nms_pre] + rpn_bbox_pred = rpn_bbox_pred[topk_inds, :] + anchors = anchors[topk_inds, :] + mlvl_scores.append(scores) + mlvl_bbox_preds.append(rpn_bbox_pred) + mlvl_valid_anchors.append(anchors) + level_ids.append( + scores.new_full((scores.size(0), ), idx, dtype=torch.long)) + + scores = torch.cat(mlvl_scores) + anchors = torch.cat(mlvl_valid_anchors) + rpn_bbox_pred = torch.cat(mlvl_bbox_preds) + proposals = self.bbox_coder.decode( + anchors, rpn_bbox_pred, max_shape=img_shape) + ids = torch.cat(level_ids) + + if cfg.min_bbox_size > 0: + w = proposals[:, 2] - proposals[:, 0] + h = proposals[:, 3] - proposals[:, 1] + valid_inds = torch.nonzero( + (w >= cfg.min_bbox_size) + & (h >= cfg.min_bbox_size), + as_tuple=False).squeeze() + if valid_inds.sum().item() != len(proposals): + proposals = proposals[valid_inds, :] + scores = scores[valid_inds] + ids = ids[valid_inds] + + # TODO: remove the hard coded nms type + nms_cfg = dict(type='nms', iou_threshold=cfg.nms_thr) + dets, keep = batched_nms(proposals, scores, ids, nms_cfg) + return dets[:cfg.nms_post] diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_test_mixin.py b/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_test_mixin.py new file mode 100644 index 0000000000000000000000000000000000000000..4ce5c66f82595f496e6e55719c1caee75150d568 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/rpn_test_mixin.py @@ -0,0 +1,59 @@ +import sys + +from mmdet.core import merge_aug_proposals + +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import completed + + +class RPNTestMixin(object): + """Test methods of RPN.""" + + if sys.version_info >= (3, 7): + + async def async_simple_test_rpn(self, x, img_metas): + sleep_interval = self.test_cfg.pop('async_sleep_interval', 0.025) + async with completed( + __name__, 'rpn_head_forward', + sleep_interval=sleep_interval): + rpn_outs = self(x) + + proposal_list = self.get_bboxes(*rpn_outs, img_metas) + return proposal_list + + def simple_test_rpn(self, x, img_metas): + """Test without augmentation. + + Args: + x (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Proposals of each image. + """ + rpn_outs = self(x) + proposal_list = self.get_bboxes(*rpn_outs, img_metas) + return proposal_list + + def aug_test_rpn(self, feats, img_metas): + samples_per_gpu = len(img_metas[0]) + aug_proposals = [[] for _ in range(samples_per_gpu)] + for x, img_meta in zip(feats, img_metas): + proposal_list = self.simple_test_rpn(x, img_meta) + for i, proposals in enumerate(proposal_list): + aug_proposals[i].append(proposals) + # reorganize the order of 'img_metas' to match the dimensions + # of 'aug_proposals' + aug_img_metas = [] + for i in range(samples_per_gpu): + aug_img_meta = [] + for j in range(len(img_metas)): + aug_img_meta.append(img_metas[j][i]) + aug_img_metas.append(aug_img_meta) + # after merging, proposals will be rescaled to the original image size + merged_proposals = [ + merge_aug_proposals(proposals, aug_img_meta, self.test_cfg) + for proposals, aug_img_meta in zip(aug_proposals, aug_img_metas) + ] + return merged_proposals diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/sabl_retina_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/sabl_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..73143da4a5a1545c24fcbf60d42b12615da14efd --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/sabl_retina_head.py @@ -0,0 +1,618 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, images_to_levels, + multi_apply, multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .guided_anchor_head import GuidedAnchorHead + + +@HEADS.register_module() +class SABLRetinaHead(BaseDenseHead): + """Side-Aware Boundary Localization (SABL) for RetinaNet. + + The anchor generation, assigning and sampling in SABLRetinaHead + are the same as GuidedAnchorHead for guided anchoring. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + num_classes (int): Number of classes. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of Convs for classification \ + and regression branches. Defaults to 4. + feat_channels (int): Number of hidden channels. \ + Defaults to 256. + approx_anchor_generator (dict): Config dict for approx generator. + square_anchor_generator (dict): Config dict for square generator. + conv_cfg (dict): Config dict for ConvModule. Defaults to None. + norm_cfg (dict): Config dict for Norm Layer. Defaults to None. + bbox_coder (dict): Config dict for bbox coder. + reg_decoded_bbox (bool): Whether to regress decoded bbox. \ + Defaults to False. + train_cfg (dict): Training config of SABLRetinaHead. + test_cfg (dict): Testing config of SABLRetinaHead. + loss_cls (dict): Config of classification loss. + loss_bbox_cls (dict): Config of classification loss for bbox branch. + loss_bbox_reg (dict): Config of regression loss for bbox branch. + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + conv_cfg=None, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', + num_buckets=14, + scale_factor=3.0), + reg_decoded_bbox=False, + train_cfg=None, + test_cfg=None, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)): + super(SABLRetinaHead, self).__init__() + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.num_buckets = bbox_coder['num_buckets'] + self.side_num = int(np.ceil(self.num_buckets / 2)) + + assert (approx_anchor_generator['octave_base_scale'] == + square_anchor_generator['scales'][0]) + assert (approx_anchor_generator['strides'] == + square_anchor_generator['strides']) + + self.approx_anchor_generator = build_anchor_generator( + approx_anchor_generator) + self.square_anchor_generator = build_anchor_generator( + square_anchor_generator) + self.approxs_per_octave = ( + self.approx_anchor_generator.num_base_anchors[0]) + + # one anchor per location + self.num_anchors = 1 + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.reg_decoded_bbox = reg_decoded_bbox + + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.sampling = loss_cls['type'] not in [ + 'FocalLoss', 'GHMC', 'QualityFocalLoss' + ] + if self.use_sigmoid_cls: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox_cls = build_loss(loss_bbox_cls) + self.loss_bbox_reg = build_loss(loss_bbox_reg) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.fp16_enabled = False + self._init_layers() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.retina_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.retina_bbox_reg = nn.Conv2d( + self.feat_channels, self.side_num * 4, 3, padding=1) + self.retina_bbox_cls = nn.Conv2d( + self.feat_channels, self.side_num * 4, 3, padding=1) + + def init_weights(self): + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.retina_cls, std=0.01, bias=bias_cls) + normal_init(self.retina_bbox_reg, std=0.01) + normal_init(self.retina_bbox_cls, std=0.01) + + def forward_single(self, x): + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_cls_pred = self.retina_bbox_cls(reg_feat) + bbox_reg_pred = self.retina_bbox_reg(reg_feat) + bbox_pred = (bbox_cls_pred, bbox_reg_pred) + return cls_score, bbox_pred + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def get_anchors(self, featmap_sizes, img_metas, device='cuda'): + """Get squares according to feature map sizes and guided anchors. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): device for returned tensors + + Returns: + tuple: square approxs of each image + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # squares for one time + multi_level_squares = self.square_anchor_generator.grid_anchors( + featmap_sizes, device=device) + squares_list = [multi_level_squares for _ in range(num_imgs)] + + return squares_list + + def get_target(self, + approx_list, + inside_flag_list, + square_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=None, + sampling=True, + unmap_outputs=True): + """Compute bucketing targets. + Args: + approx_list (list[list]): Multi level approxs of each image. + inside_flag_list (list[list]): Multi level inside flags of each + image. + square_list (list[list]): Multi level squares of each image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): ignore list of gt bboxes. + gt_bboxes_list (list[Tensor]): Gt bboxes of each image. + label_channels (int): Channel of label. + sampling (bool): Sample Anchors or not. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple: Returns a tuple containing learning targets. + + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each \ + level. + - bbox_cls_targets_list (list[Tensor]): BBox cls targets of \ + each level. + - bbox_cls_weights_list (list[Tensor]): BBox cls weights of \ + each level. + - bbox_reg_targets_list (list[Tensor]): BBox reg targets of \ + each level. + - bbox_reg_weights_list (list[Tensor]): BBox reg weights of \ + each level. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + """ + num_imgs = len(img_metas) + assert len(approx_list) == len(inside_flag_list) == len( + square_list) == num_imgs + # anchor number of multi levels + num_level_squares = [squares.size(0) for squares in square_list[0]] + # concat all level anchors and flags to a single tensor + inside_flag_flat_list = [] + approx_flat_list = [] + square_flat_list = [] + for i in range(num_imgs): + assert len(square_list[i]) == len(inside_flag_list[i]) + inside_flag_flat_list.append(torch.cat(inside_flag_list[i])) + approx_flat_list.append(torch.cat(approx_list[i])) + square_flat_list.append(torch.cat(square_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_labels, all_label_weights, all_bbox_cls_targets, + all_bbox_cls_weights, all_bbox_reg_targets, all_bbox_reg_weights, + pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + approx_flat_list, + inside_flag_flat_list, + square_flat_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + sampling=sampling, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + labels_list = images_to_levels(all_labels, num_level_squares) + label_weights_list = images_to_levels(all_label_weights, + num_level_squares) + bbox_cls_targets_list = images_to_levels(all_bbox_cls_targets, + num_level_squares) + bbox_cls_weights_list = images_to_levels(all_bbox_cls_weights, + num_level_squares) + bbox_reg_targets_list = images_to_levels(all_bbox_reg_targets, + num_level_squares) + bbox_reg_weights_list = images_to_levels(all_bbox_reg_weights, + num_level_squares) + return (labels_list, label_weights_list, bbox_cls_targets_list, + bbox_cls_weights_list, bbox_reg_targets_list, + bbox_reg_weights_list, num_total_pos, num_total_neg) + + def _get_target_single(self, + flat_approxs, + inside_flags, + flat_squares, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=None, + sampling=True, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Args: + flat_approxs (Tensor): flat approxs of a single image, + shape (n, 4) + inside_flags (Tensor): inside flags of a single image, + shape (n, ). + flat_squares (Tensor): flat squares of a single image, + shape (approxs_per_octave * n, 4) + gt_bboxes (Tensor): Ground truth bboxes of a single image, \ + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + sampling (bool): Sample Anchors or not. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple: + + - labels_list (Tensor): Labels in a single image + - label_weights (Tensor): Label weights in a single image + - bbox_cls_targets (Tensor): BBox cls targets in a single image + - bbox_cls_weights (Tensor): BBox cls weights in a single image + - bbox_reg_targets (Tensor): BBox reg targets in a single image + - bbox_reg_weights (Tensor): BBox reg weights in a single image + - num_total_pos (int): Number of positive samples \ + in a single image + - num_total_neg (int): Number of negative samples \ + in a single image + """ + if not inside_flags.any(): + return (None, ) * 8 + # assign gt and sample anchors + expand_inside_flags = inside_flags[:, None].expand( + -1, self.approxs_per_octave).reshape(-1) + approxs = flat_approxs[expand_inside_flags, :] + squares = flat_squares[inside_flags, :] + + assign_result = self.assigner.assign(approxs, squares, + self.approxs_per_octave, + gt_bboxes, gt_bboxes_ignore) + sampling_result = self.sampler.sample(assign_result, squares, + gt_bboxes) + + num_valid_squares = squares.shape[0] + bbox_cls_targets = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_cls_weights = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_reg_targets = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_reg_weights = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + labels = squares.new_full((num_valid_squares, ), + self.num_classes, + dtype=torch.long) + label_weights = squares.new_zeros(num_valid_squares, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + (pos_bbox_reg_targets, pos_bbox_reg_weights, pos_bbox_cls_targets, + pos_bbox_cls_weights) = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + + bbox_cls_targets[pos_inds, :] = pos_bbox_cls_targets + bbox_reg_targets[pos_inds, :] = pos_bbox_reg_targets + bbox_cls_weights[pos_inds, :] = pos_bbox_cls_weights + bbox_reg_weights[pos_inds, :] = pos_bbox_reg_weights + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_squares.size(0) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_cls_targets = unmap(bbox_cls_targets, num_total_anchors, + inside_flags) + bbox_cls_weights = unmap(bbox_cls_weights, num_total_anchors, + inside_flags) + bbox_reg_targets = unmap(bbox_reg_targets, num_total_anchors, + inside_flags) + bbox_reg_weights = unmap(bbox_reg_weights, num_total_anchors, + inside_flags) + return (labels, label_weights, bbox_cls_targets, bbox_cls_weights, + bbox_reg_targets, bbox_reg_weights, pos_inds, neg_inds) + + def loss_single(self, cls_score, bbox_pred, labels, label_weights, + bbox_cls_targets, bbox_cls_weights, bbox_reg_targets, + bbox_reg_weights, num_total_samples): + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + # regression loss + bbox_cls_targets = bbox_cls_targets.reshape(-1, self.side_num * 4) + bbox_cls_weights = bbox_cls_weights.reshape(-1, self.side_num * 4) + bbox_reg_targets = bbox_reg_targets.reshape(-1, self.side_num * 4) + bbox_reg_weights = bbox_reg_weights.reshape(-1, self.side_num * 4) + (bbox_cls_pred, bbox_reg_pred) = bbox_pred + bbox_cls_pred = bbox_cls_pred.permute(0, 2, 3, 1).reshape( + -1, self.side_num * 4) + bbox_reg_pred = bbox_reg_pred.permute(0, 2, 3, 1).reshape( + -1, self.side_num * 4) + loss_bbox_cls = self.loss_bbox_cls( + bbox_cls_pred, + bbox_cls_targets.long(), + bbox_cls_weights, + avg_factor=num_total_samples * 4 * self.side_num) + loss_bbox_reg = self.loss_bbox_reg( + bbox_reg_pred, + bbox_reg_targets, + bbox_reg_weights, + avg_factor=num_total_samples * 4 * self.bbox_coder.offset_topk) + return loss_cls, loss_bbox_cls, loss_bbox_reg + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.approx_anchor_generator.num_levels + + device = cls_scores[0].device + + # get sampled approxes + approxs_list, inside_flag_list = GuidedAnchorHead.get_sampled_approxs( + self, featmap_sizes, img_metas, device=device) + + square_list = self.get_anchors(featmap_sizes, img_metas, device=device) + + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_target( + approxs_list, + inside_flag_list, + square_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + sampling=self.sampling) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_cls_targets_list, + bbox_cls_weights_list, bbox_reg_targets_list, bbox_reg_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + losses_cls, losses_bbox_cls, losses_bbox_reg = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + labels_list, + label_weights_list, + bbox_cls_targets_list, + bbox_cls_weights_list, + bbox_reg_targets_list, + bbox_reg_weights_list, + num_total_samples=num_total_samples) + return dict( + loss_cls=losses_cls, + loss_bbox_cls=losses_bbox_cls, + loss_bbox_reg=losses_bbox_reg) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=False): + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + + device = cls_scores[0].device + mlvl_anchors = self.get_anchors( + featmap_sizes, img_metas, device=device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_cls_pred_list = [ + bbox_preds[i][0][img_id].detach() for i in range(num_levels) + ] + bbox_reg_pred_list = [ + bbox_preds[i][1][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self.get_bboxes_single(cls_score_list, + bbox_cls_pred_list, + bbox_reg_pred_list, + mlvl_anchors[img_id], img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def get_bboxes_single(self, + cls_scores, + bbox_cls_preds, + bbox_reg_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_confids = [] + assert len(cls_scores) == len(bbox_cls_preds) == len( + bbox_reg_preds) == len(mlvl_anchors) + for cls_score, bbox_cls_pred, bbox_reg_pred, anchors in zip( + cls_scores, bbox_cls_preds, bbox_reg_preds, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_cls_pred.size( + )[-2:] == bbox_reg_pred.size()[-2::] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_cls_pred = bbox_cls_pred.permute(1, 2, 0).reshape( + -1, self.side_num * 4) + bbox_reg_pred = bbox_reg_pred.permute(1, 2, 0).reshape( + -1, self.side_num * 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_cls_pred = bbox_cls_pred[topk_inds, :] + bbox_reg_pred = bbox_reg_pred[topk_inds, :] + scores = scores[topk_inds, :] + bbox_preds = [ + bbox_cls_pred.contiguous(), + bbox_reg_pred.contiguous() + ] + bboxes, confids = self.bbox_coder.decode( + anchors.contiguous(), bbox_preds, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_confids.append(confids) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + mlvl_confids = torch.cat(mlvl_confids) + if self.use_sigmoid_cls: + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_confids) + return det_bboxes, det_labels diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/ssd_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/ssd_head.py new file mode 100644 index 0000000000000000000000000000000000000000..42554c12c6f19ce48af1b49bc19f029b849250d6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/ssd_head.py @@ -0,0 +1,259 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import xavier_init +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, multi_apply) +from ..builder import HEADS +from ..losses import smooth_l1_loss +from .anchor_head import AnchorHead + + +# TODO: add loss evaluator for SSD +@HEADS.register_module() +class SSDHead(AnchorHead): + """SSD head used in https://arxiv.org/abs/1512.02325. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + reg_decoded_bbox (bool): If true, the regression loss would be + applied on decoded bounding boxes. Default: False + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + """ # noqa: W605 + + def __init__(self, + num_classes=80, + in_channels=(512, 1024, 512, 256, 256, 256), + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + strides=[8, 16, 32, 64, 100, 300], + ratios=([2], [2, 3], [2, 3], [2, 3], [2], [2]), + basesize_ratio_range=(0.1, 0.9)), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0], + ), + reg_decoded_bbox=False, + train_cfg=None, + test_cfg=None): + super(AnchorHead, self).__init__() + self.num_classes = num_classes + self.in_channels = in_channels + self.cls_out_channels = num_classes + 1 # add background class + self.anchor_generator = build_anchor_generator(anchor_generator) + num_anchors = self.anchor_generator.num_base_anchors + + reg_convs = [] + cls_convs = [] + for i in range(len(in_channels)): + reg_convs.append( + nn.Conv2d( + in_channels[i], + num_anchors[i] * 4, + kernel_size=3, + padding=1)) + cls_convs.append( + nn.Conv2d( + in_channels[i], + num_anchors[i] * (num_classes + 1), + kernel_size=3, + padding=1)) + self.reg_convs = nn.ModuleList(reg_convs) + self.cls_convs = nn.ModuleList(cls_convs) + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.reg_decoded_bbox = reg_decoded_bbox + self.use_sigmoid_cls = False + self.cls_focal_loss = False + self.train_cfg = train_cfg + self.test_cfg = test_cfg + # set sampling=False for archor_target + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.fp16_enabled = False + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform', bias=0) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + cls_scores = [] + bbox_preds = [] + for feat, reg_conv, cls_conv in zip(feats, self.reg_convs, + self.cls_convs): + cls_scores.append(cls_conv(feat)) + bbox_preds.append(reg_conv(feat)) + return cls_scores, bbox_preds + + def loss_single(self, cls_score, bbox_pred, anchor, labels, label_weights, + bbox_targets, bbox_weights, num_total_samples): + """Compute loss of a single image. + + Args: + cls_score (Tensor): Box scores for eachimage + Has shape (num_total_anchors, num_classes). + bbox_pred (Tensor): Box energies / deltas for each image + level with shape (num_total_anchors, 4). + anchors (Tensor): Box reference for each scale level with shape + (num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (num_total_anchors,). + label_weights (Tensor): Label weights of each anchor with shape + (num_total_anchors,) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (num_total_anchors, 4). + bbox_weights (Tensor): BBox regression loss weights of each anchor + with shape (num_total_anchors, 4). + num_total_samples (int): If sampling, num total samples equal to + the number of total anchors; Otherwise, it is the number of + positive anchors. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + loss_cls_all = F.cross_entropy( + cls_score, labels, reduction='none') * label_weights + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((labels >= 0) & + (labels < self.num_classes)).nonzero().reshape(-1) + neg_inds = (labels == self.num_classes).nonzero().view(-1) + + num_pos_samples = pos_inds.size(0) + num_neg_samples = self.train_cfg.neg_pos_ratio * num_pos_samples + if num_neg_samples > neg_inds.size(0): + num_neg_samples = neg_inds.size(0) + topk_loss_cls_neg, _ = loss_cls_all[neg_inds].topk(num_neg_samples) + loss_cls_pos = loss_cls_all[pos_inds].sum() + loss_cls_neg = topk_loss_cls_neg.sum() + loss_cls = (loss_cls_pos + loss_cls_neg) / num_total_samples + + if self.reg_decoded_bbox: + bbox_pred = self.bbox_coder.decode(anchor, bbox_pred) + + loss_bbox = smooth_l1_loss( + bbox_pred, + bbox_targets, + bbox_weights, + beta=self.train_cfg.smoothl1_beta, + avg_factor=num_total_samples) + return loss_cls[None], loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=1, + unmap_outputs=False) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + return dict(loss_cls=losses_cls, loss_bbox=losses_bbox) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/transformer_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/transformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..da3b035609bdd4a67612f38ba66205f6f8f65694 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/transformer_head.py @@ -0,0 +1,655 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Conv2d, Linear, build_activation_layer +from mmcv.runner import force_fp32 + +from mmdet.core import (bbox_cxcywh_to_xyxy, bbox_xyxy_to_cxcywh, + build_assigner, build_sampler, multi_apply, + reduce_mean) +from mmdet.models.utils import (FFN, build_positional_encoding, + build_transformer) +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + + +@HEADS.register_module() +class TransformerHead(AnchorFreeHead): + """Implements the DETR transformer head. + + See `paper: End-to-End Object Detection with Transformers + `_ for details. + + Args: + num_classes (int): Number of categories excluding the background. + in_channels (int): Number of channels in the input feature map. + num_fcs (int, optional): Number of fully-connected layers used in + `FFN`, which is then used for the regression head. Default 2. + transformer (dict, optional): Config for transformer. + positional_encoding (dict, optional): Config for position encoding. + loss_cls (dict, optional): Config of the classification loss. + Default `CrossEntropyLoss`. + loss_bbox (dict, optional): Config of the regression loss. + Default `L1Loss`. + loss_iou (dict, optional): Config of the regression iou loss. + Default `GIoULoss`. + tran_cfg (dict, optional): Training config of transformer head. + test_cfg (dict, optional): Testing config of transformer head. + + Example: + >>> import torch + >>> self = TransformerHead(80, 2048) + >>> x = torch.rand(1, 2048, 32, 32) + >>> mask = torch.ones(1, 32, 32).to(x.dtype) + >>> mask[:, :16, :15] = 0 + >>> all_cls_scores, all_bbox_preds = self(x, mask) + """ + + def __init__(self, + num_classes, + in_channels, + num_fcs=2, + transformer=dict( + type='Transformer', + embed_dims=256, + num_heads=8, + num_encoder_layers=6, + num_decoder_layers=6, + feedforward_channels=2048, + dropout=0.1, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2, + pre_norm=False, + return_intermediate_dec=True), + positional_encoding=dict( + type='SinePositionalEncoding', + num_feats=128, + normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_weight=1., + bbox_weight=5., + iou_weight=2., + iou_calculator=dict(type='BboxOverlaps2D'), + iou_mode='giou')), + test_cfg=dict(max_per_img=100), + **kwargs): + # NOTE here use `AnchorFreeHead` instead of `TransformerHead`, + # since it brings inconvenience when the initialization of + # `AnchorFreeHead` is called. + super(AnchorFreeHead, self).__init__() + use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + assert not use_sigmoid_cls, 'setting use_sigmoid_cls as True is ' \ + 'not supported in DETR, since background is needed for the ' \ + 'matching process.' + assert 'embed_dims' in transformer \ + and 'num_feats' in positional_encoding + num_feats = positional_encoding['num_feats'] + embed_dims = transformer['embed_dims'] + assert num_feats * 2 == embed_dims, 'embed_dims should' \ + f' be exactly 2 times of num_feats. Found {embed_dims}' \ + f' and {num_feats}.' + assert test_cfg is not None and 'max_per_img' in test_cfg + + class_weight = loss_cls.get('class_weight', None) + if class_weight is not None: + assert isinstance(class_weight, float), 'Expected ' \ + 'class_weight to have type float. Found ' \ + f'{type(class_weight)}.' + # NOTE following the official DETR rep0, bg_cls_weight means + # relative classification weight of the no-object class. + bg_cls_weight = loss_cls.get('bg_cls_weight', class_weight) + assert isinstance(bg_cls_weight, float), 'Expected ' \ + 'bg_cls_weight to have type float. Found ' \ + f'{type(bg_cls_weight)}.' + class_weight = torch.ones(num_classes + 1) * class_weight + # set background class as the last indice + class_weight[num_classes] = bg_cls_weight + loss_cls.update({'class_weight': class_weight}) + if 'bg_cls_weight' in loss_cls: + loss_cls.pop('bg_cls_weight') + self.bg_cls_weight = bg_cls_weight + + if train_cfg: + assert 'assigner' in train_cfg, 'assigner should be provided '\ + 'when train_cfg is set.' + assigner = train_cfg['assigner'] + assert loss_cls['loss_weight'] == assigner['cls_weight'], \ + 'The classification weight for loss and matcher should be' \ + 'exactly the same.' + assert loss_bbox['loss_weight'] == assigner['bbox_weight'], \ + 'The regression L1 weight for loss and matcher should be' \ + 'exactly the same.' + assert loss_iou['loss_weight'] == assigner['iou_weight'], \ + 'The regression iou weight for loss and matcher should be' \ + 'exactly the same.' + self.assigner = build_assigner(assigner) + # DETR sampling=False, so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.num_classes = num_classes + self.cls_out_channels = num_classes + 1 + self.in_channels = in_channels + self.num_fcs = num_fcs + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.use_sigmoid_cls = use_sigmoid_cls + self.embed_dims = embed_dims + self.num_query = test_cfg['max_per_img'] + self.fp16_enabled = False + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.loss_iou = build_loss(loss_iou) + self.act_cfg = transformer.get('act_cfg', + dict(type='ReLU', inplace=True)) + self.activate = build_activation_layer(self.act_cfg) + self.positional_encoding = build_positional_encoding( + positional_encoding) + self.transformer = build_transformer(transformer) + self._init_layers() + + def _init_layers(self): + """Initialize layers of the transformer head.""" + self.input_proj = Conv2d( + self.in_channels, self.embed_dims, kernel_size=1) + self.fc_cls = Linear(self.embed_dims, self.cls_out_channels) + self.reg_ffn = FFN( + self.embed_dims, + self.embed_dims, + self.num_fcs, + self.act_cfg, + dropout=0.0, + add_residual=False) + self.fc_reg = Linear(self.embed_dims, 4) + self.query_embedding = nn.Embedding(self.num_query, self.embed_dims) + + def init_weights(self, distribution='uniform'): + """Initialize weights of the transformer head.""" + # The initialization for transformer is important + self.transformer.init_weights() + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """load checkpoints.""" + # NOTE here use `AnchorFreeHead` instead of `TransformerHead`, + # since `AnchorFreeHead._load_from_state_dict` should not be + # called here. Invoking the default `Module._load_from_state_dict` + # is enough. + super(AnchorFreeHead, + self)._load_from_state_dict(state_dict, prefix, local_metadata, + strict, missing_keys, + unexpected_keys, error_msgs) + + def forward(self, feats, img_metas): + """Forward function. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): List of image information. + + Returns: + tuple[list[Tensor], list[Tensor]]: Outputs for all scale levels. + + - all_cls_scores_list (list[Tensor]): Classification scores \ + for each scale level. Each is a 4D-tensor with shape \ + [nb_dec, bs, num_query, cls_out_channels]. Note \ + `cls_out_channels` should includes background. + - all_bbox_preds_list (list[Tensor]): Sigmoid regression \ + outputs for each scale level. Each is a 4D-tensor with \ + normalized coordinate format (cx, cy, w, h) and shape \ + [nb_dec, bs, num_query, 4]. + """ + num_levels = len(feats) + img_metas_list = [img_metas for _ in range(num_levels)] + return multi_apply(self.forward_single, feats, img_metas_list) + + def forward_single(self, x, img_metas): + """"Forward function for a single feature level. + + Args: + x (Tensor): Input feature from backbone's single stage, shape + [bs, c, h, w]. + img_metas (list[dict]): List of image information. + + Returns: + all_cls_scores (Tensor): Outputs from the classification head, + shape [nb_dec, bs, num_query, cls_out_channels]. Note + cls_out_channels should includes background. + all_bbox_preds (Tensor): Sigmoid outputs from the regression + head with normalized coordinate format (cx, cy, w, h). + Shape [nb_dec, bs, num_query, 4]. + """ + # construct binary masks which used for the transformer. + # NOTE following the official DETR repo, non-zero values representing + # ignored positions, while zero values means valid positions. + batch_size = x.size(0) + input_img_h, input_img_w = img_metas[0]['batch_intput_shape'] + masks = x.new_ones((batch_size, input_img_h, input_img_w)) + for img_id in range(batch_size): + img_h, img_w, _ = img_metas[img_id]['img_shape'] + masks[img_id, :img_h, :img_w] = 0 + + x = self.input_proj(x) + # interpolate masks to have the same spatial shape with x + masks = F.interpolate( + masks.unsqueeze(1), size=x.shape[-2:]).to(torch.bool).squeeze(1) + # position encoding + pos_embed = self.positional_encoding(masks) # [bs, embed_dim, h, w] + # outs_dec: [nb_dec, bs, num_query, embed_dim] + outs_dec, _ = self.transformer(x, masks, self.query_embedding.weight, + pos_embed) + + all_cls_scores = self.fc_cls(outs_dec) + all_bbox_preds = self.fc_reg(self.activate( + self.reg_ffn(outs_dec))).sigmoid() + return all_cls_scores, all_bbox_preds + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def loss(self, + all_cls_scores_list, + all_bbox_preds_list, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore=None): + """"Loss function. + + Only outputs from the last feature level are used for computing + losses by default. + + Args: + all_cls_scores_list (list[Tensor]): Classification outputs + for each feature level. Each is a 4D-tensor with shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds_list (list[Tensor]): Sigmoid regression + outputs for each feature level. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore (list[Tensor], optional): Bounding boxes + which can be ignored for each image. Default None. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + # NOTE defaultly only the outputs from the last feature scale is used. + all_cls_scores = all_cls_scores_list[-1] + all_bbox_preds = all_bbox_preds_list[-1] + assert gt_bboxes_ignore is None, \ + 'Only supports for gt_bboxes_ignore setting to None.' + + num_dec_layers = len(all_cls_scores) + all_gt_bboxes_list = [gt_bboxes_list for _ in range(num_dec_layers)] + all_gt_labels_list = [gt_labels_list for _ in range(num_dec_layers)] + all_gt_bboxes_ignore_list = [ + gt_bboxes_ignore for _ in range(num_dec_layers) + ] + img_metas_list = [img_metas for _ in range(num_dec_layers)] + + losses_cls, losses_bbox, losses_iou = multi_apply( + self.loss_single, all_cls_scores, all_bbox_preds, + all_gt_bboxes_list, all_gt_labels_list, img_metas_list, + all_gt_bboxes_ignore_list) + + loss_dict = dict() + # loss from the last decoder layer + loss_dict['loss_cls'] = losses_cls[-1] + loss_dict['loss_bbox'] = losses_bbox[-1] + loss_dict['loss_iou'] = losses_iou[-1] + # loss from other decoder layers + num_dec_layer = 0 + for loss_cls_i, loss_bbox_i, loss_iou_i in zip(losses_cls[:-1], + losses_bbox[:-1], + losses_iou[:-1]): + loss_dict[f'd{num_dec_layer}.loss_cls'] = loss_cls_i + loss_dict[f'd{num_dec_layer}.loss_bbox'] = loss_bbox_i + loss_dict[f'd{num_dec_layer}.loss_iou'] = loss_iou_i + num_dec_layer += 1 + return loss_dict + + def loss_single(self, + cls_scores, + bbox_preds, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore_list=None): + """"Loss function for outputs from a single decoder layer of a single + feature level. + + Args: + cls_scores (Tensor): Box score logits from a single decoder layer + for all images. Shape [bs, num_query, cls_out_channels]. + bbox_preds (Tensor): Sigmoid outputs from a single decoder layer + for all images, with normalized coordinate (cx, cy, w, h) and + shape [bs, num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore_list (list[Tensor], optional): Bounding + boxes which can be ignored for each image. Default None. + + Returns: + dict[str, Tensor]: A dictionary of loss components for outputs from + a single decoder layer. + """ + num_imgs = cls_scores.size(0) + cls_scores_list = [cls_scores[i] for i in range(num_imgs)] + bbox_preds_list = [bbox_preds[i] for i in range(num_imgs)] + cls_reg_targets = self.get_targets(cls_scores_list, bbox_preds_list, + gt_bboxes_list, gt_labels_list, + img_metas, gt_bboxes_ignore_list) + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + labels = torch.cat(labels_list, 0) + label_weights = torch.cat(label_weights_list, 0) + bbox_targets = torch.cat(bbox_targets_list, 0) + bbox_weights = torch.cat(bbox_weights_list, 0) + + # classification loss + cls_scores = cls_scores.reshape(-1, self.cls_out_channels) + # construct weighted avg_factor to match with the official DETR repo + cls_avg_factor = num_total_pos * 1.0 + \ + num_total_neg * self.bg_cls_weight + loss_cls = self.loss_cls( + cls_scores, labels, label_weights, avg_factor=cls_avg_factor) + + # Compute the average number of gt boxes accross all gpus, for + # normalization purposes + num_total_pos = loss_cls.new_tensor([num_total_pos]) + num_total_pos = torch.clamp(reduce_mean(num_total_pos), min=1).item() + + # construct factors used for rescale bboxes + factors = [] + for img_meta, bbox_pred in zip(img_metas, bbox_preds): + img_h, img_w, _ = img_meta['img_shape'] + factor = bbox_pred.new_tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0).repeat( + bbox_pred.size(0), 1) + factors.append(factor) + factors = torch.cat(factors, 0) + + # DETR regress the relative position of boxes (cxcywh) in the image, + # thus the learning target is normalized by the image size. So here + # we need to re-scale them for calculating IoU loss + bbox_preds = bbox_preds.reshape(-1, 4) + bboxes = bbox_cxcywh_to_xyxy(bbox_preds) * factors + bboxes_gt = bbox_cxcywh_to_xyxy(bbox_targets) * factors + + # regression IoU loss, defaultly GIoU loss + loss_iou = self.loss_iou( + bboxes, bboxes_gt, bbox_weights, avg_factor=num_total_pos) + + # regression L1 loss + loss_bbox = self.loss_bbox( + bbox_preds, bbox_targets, bbox_weights, avg_factor=num_total_pos) + return loss_cls, loss_bbox, loss_iou + + def get_targets(self, + cls_scores_list, + bbox_preds_list, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore_list=None): + """"Compute regression and classification targets for a batch image. + + Outputs from a single decoder layer of a single feature level are used. + + Args: + cls_scores_list (list[Tensor]): Box score logits from a single + decoder layer for each image with shape [num_query, + cls_out_channels]. + bbox_preds_list (list[Tensor]): Sigmoid outputs from a single + decoder layer for each image, with normalized coordinate + (cx, cy, w, h) and shape [num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore_list (list[Tensor], optional): Bounding + boxes which can be ignored for each image. Default None. + + Returns: + tuple: a tuple containing the following targets. + + - labels_list (list[Tensor]): Labels for all images. + - label_weights_list (list[Tensor]): Label weights for all \ + images. + - bbox_targets_list (list[Tensor]): BBox targets for all \ + images. + - bbox_weights_list (list[Tensor]): BBox weights for all \ + images. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + """ + assert gt_bboxes_ignore_list is None, \ + 'Only supports for gt_bboxes_ignore setting to None.' + num_imgs = len(cls_scores_list) + gt_bboxes_ignore_list = [ + gt_bboxes_ignore_list for _ in range(num_imgs) + ] + + (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, cls_scores_list, bbox_preds_list, + gt_bboxes_list, gt_labels_list, img_metas, gt_bboxes_ignore_list) + num_total_pos = sum((inds.numel() for inds in pos_inds_list)) + num_total_neg = sum((inds.numel() for inds in neg_inds_list)) + return (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) + + def _get_target_single(self, + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_meta, + gt_bboxes_ignore=None): + """"Compute regression and classification targets for one image. + + Outputs from a single decoder layer of a single feature level are used. + + Args: + cls_score (Tensor): Box score logits from a single decoder layer + for one image. Shape [num_query, cls_out_channels]. + bbox_pred (Tensor): Sigmoid outputs from a single decoder layer + for one image, with normalized coordinate (cx, cy, w, h) and + shape [num_query, 4]. + gt_bboxes (Tensor): Ground truth bboxes for one image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (Tensor): Ground truth class indices for one image + with shape (num_gts, ). + img_meta (dict): Meta information for one image. + gt_bboxes_ignore (Tensor, optional): Bounding boxes + which can be ignored. Default None. + + Returns: + tuple[Tensor]: a tuple containing the following for one image. + + - labels (Tensor): Labels of each image. + - label_weights (Tensor]): Label weights of each image. + - bbox_targets (Tensor): BBox targets of each image. + - bbox_weights (Tensor): BBox weights of each image. + - pos_inds (Tensor): Sampled positive indices for each image. + - neg_inds (Tensor): Sampled negative indices for each image. + """ + + num_bboxes = bbox_pred.size(0) + # assigner and sampler + assign_result = self.assigner.assign(bbox_pred, cls_score, gt_bboxes, + gt_labels, img_meta, + gt_bboxes_ignore) + sampling_result = self.sampler.sample(assign_result, bbox_pred, + gt_bboxes) + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + + # label targets + labels = gt_bboxes.new_full((num_bboxes, ), + self.num_classes, + dtype=torch.long) + labels[pos_inds] = gt_labels[sampling_result.pos_assigned_gt_inds] + label_weights = gt_bboxes.new_ones(num_bboxes) + + # bbox targets + bbox_targets = torch.zeros_like(bbox_pred) + bbox_weights = torch.zeros_like(bbox_pred) + bbox_weights[pos_inds] = 1.0 + img_h, img_w, _ = img_meta['img_shape'] + + # DETR regress the relative position of boxes (cxcywh) in the image. + # Thus the learning target should be normalized by the image size, also + # the box format should be converted from defaultly x1y1x2y2 to cxcywh. + factor = bbox_pred.new_tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0) + pos_gt_bboxes_normalized = sampling_result.pos_gt_bboxes / factor + pos_gt_bboxes_targets = bbox_xyxy_to_cxcywh(pos_gt_bboxes_normalized) + bbox_targets[pos_inds] = pos_gt_bboxes_targets + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds) + + # over-write because img_metas are needed as inputs for bbox_head. + def forward_train(self, + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None, + **kwargs): + """Forward function for training mode. + + Args: + x (list[Tensor]): Features from backbone. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + proposal_cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert proposal_cfg is None, '"proposal_cfg" must be None' + outs = self(x, img_metas) + if gt_labels is None: + loss_inputs = outs + (gt_bboxes, img_metas) + else: + loss_inputs = outs + (gt_bboxes, gt_labels, img_metas) + losses = self.loss(*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + return losses + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def get_bboxes(self, + all_cls_scores_list, + all_bbox_preds_list, + img_metas, + rescale=False): + """Transform network outputs for a batch into bbox predictions. + + Args: + all_cls_scores_list (list[Tensor]): Classification outputs + for each feature level. Each is a 4D-tensor with shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds_list (list[Tensor]): Sigmoid regression + outputs for each feature level. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + img_metas (list[dict]): Meta information of each image. + rescale (bool, optional): If True, return boxes in original + image space. Defalut False. + + Returns: + list[list[Tensor, Tensor]]: Each item in result_list is 2-tuple. \ + The first item is an (n, 5) tensor, where the first 4 columns \ + are bounding box positions (tl_x, tl_y, br_x, br_y) and the \ + 5-th column is a score between 0 and 1. The second item is a \ + (n,) tensor where each item is the predicted class label of \ + the corresponding box. + """ + # NOTE defaultly only using outputs from the last feature level, + # and only the ouputs from the last decoder layer is used. + cls_scores = all_cls_scores_list[-1][-1] + bbox_preds = all_bbox_preds_list[-1][-1] + + result_list = [] + for img_id in range(len(img_metas)): + cls_score = cls_scores[img_id] + bbox_pred = bbox_preds[img_id] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score, bbox_pred, + img_shape, scale_factor, + rescale) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False): + """Transform outputs from the last decoder layer into bbox predictions + for each image. + + Args: + cls_score (Tensor): Box score logits from the last decoder layer + for each image. Shape [num_query, cls_out_channels]. + bbox_pred (Tensor): Sigmoid outputs from the last decoder layer + for each image, with coordinate format (cx, cy, w, h) and + shape [num_query, 4]. + img_shape (tuple[int]): Shape of input image, (height, width, 3). + scale_factor (ndarray, optional): Scale factor of the image arange + as (w_scale, h_scale, w_scale, h_scale). + rescale (bool, optional): If True, return boxes in original image + space. Default False. + + Returns: + tuple[Tensor]: Results of detected bboxes and labels. + + - det_bboxes: Predicted bboxes with shape [num_query, 5], \ + where the first 4 columns are bounding box positions \ + (tl_x, tl_y, br_x, br_y) and the 5-th column are scores \ + between 0 and 1. + - det_labels: Predicted labels of the corresponding box with \ + shape [num_query]. + """ + assert len(cls_score) == len(bbox_pred) + # exclude background + scores, det_labels = F.softmax(cls_score, dim=-1)[..., :-1].max(-1) + det_bboxes = bbox_cxcywh_to_xyxy(bbox_pred) + det_bboxes[:, 0::2] = det_bboxes[:, 0::2] * img_shape[1] + det_bboxes[:, 1::2] = det_bboxes[:, 1::2] * img_shape[0] + det_bboxes[:, 0::2].clamp_(min=0, max=img_shape[1]) + det_bboxes[:, 1::2].clamp_(min=0, max=img_shape[0]) + if rescale: + det_bboxes /= det_bboxes.new_tensor(scale_factor) + det_bboxes = torch.cat((det_bboxes, scores.unsqueeze(1)), -1) + return det_bboxes, det_labels diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/vfnet_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/vfnet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7243bb62893839568ec51928d88a5ad40b02a66c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/vfnet_head.py @@ -0,0 +1,794 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, Scale, bias_init_with_prob, normal_init +from mmcv.ops import DeformConv2d +from mmcv.runner import force_fp32 + +from mmdet.core import (bbox2distance, bbox_overlaps, build_anchor_generator, + build_assigner, build_sampler, distance2bbox, + multi_apply, multiclass_nms, reduce_mean) +from ..builder import HEADS, build_loss +from .atss_head import ATSSHead +from .fcos_head import FCOSHead + +INF = 1e8 + + +@HEADS.register_module() +class VFNetHead(ATSSHead, FCOSHead): + """Head of `VarifocalNet (VFNet): An IoU-aware Dense Object + Detector.`_. + + The VFNet predicts IoU-aware classification scores which mix the + object presence confidence and object localization accuracy as the + detection score. It is built on the FCOS architecture and uses ATSS + for defining positive/negative training examples. The VFNet is trained + with Varifocal Loss and empolys star-shaped deformable convolution to + extract features for a bbox. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + regress_ranges (tuple[tuple[int, int]]): Regress range of multiple + level points. + center_sampling (bool): If true, use center sampling. Default: False. + center_sample_radius (float): Radius of center sampling. Default: 1.5. + sync_num_pos (bool): If true, synchronize the number of positive + examples across GPUs. Default: True + gradient_mul (float): The multiplier to gradients from bbox refinement + and recognition. Default: 0.1. + bbox_norm_type (str): The bbox normalization type, 'reg_denom' or + 'stride'. Default: reg_denom + loss_cls_fl (dict): Config of focal loss. + use_vfl (bool): If true, use varifocal loss for training. + Default: True. + loss_cls (dict): Config of varifocal loss. + loss_bbox (dict): Config of localization loss, GIoU Loss. + loss_bbox (dict): Config of localization refinement loss, GIoU Loss. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, + requires_grad=True). + use_atss (bool): If true, use ATSS to define positive/negative + examples. Default: True. + anchor_generator (dict): Config of anchor generator for ATSS. + + Example: + >>> self = VFNetHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_score, bbox_pred, bbox_pred_refine= self.forward(feats) + >>> assert len(cls_score) == len(self.scales) + """ # noqa: E501 + + def __init__(self, + num_classes, + in_channels, + regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512), + (512, INF)), + center_sampling=False, + center_sample_radius=1.5, + sync_num_pos=True, + gradient_mul=0.1, + bbox_norm_type='reg_denom', + loss_cls_fl=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + use_vfl=True, + loss_cls=dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.5), + loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + use_atss=True, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + center_offset=0.0, + strides=[8, 16, 32, 64, 128]), + **kwargs): + # dcn base offsets, adapted from reppoints_head.py + self.num_dconv_points = 9 + self.dcn_kernel = int(np.sqrt(self.num_dconv_points)) + self.dcn_pad = int((self.dcn_kernel - 1) / 2) + dcn_base = np.arange(-self.dcn_pad, + self.dcn_pad + 1).astype(np.float64) + dcn_base_y = np.repeat(dcn_base, self.dcn_kernel) + dcn_base_x = np.tile(dcn_base, self.dcn_kernel) + dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape( + (-1)) + self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1) + + super(FCOSHead, self).__init__( + num_classes, in_channels, norm_cfg=norm_cfg, **kwargs) + self.regress_ranges = regress_ranges + self.reg_denoms = [ + regress_range[-1] for regress_range in regress_ranges + ] + self.reg_denoms[-1] = self.reg_denoms[-2] * 2 + self.center_sampling = center_sampling + self.center_sample_radius = center_sample_radius + self.sync_num_pos = sync_num_pos + self.bbox_norm_type = bbox_norm_type + self.gradient_mul = gradient_mul + self.use_vfl = use_vfl + if self.use_vfl: + self.loss_cls = build_loss(loss_cls) + else: + self.loss_cls = build_loss(loss_cls_fl) + self.loss_bbox = build_loss(loss_bbox) + self.loss_bbox_refine = build_loss(loss_bbox_refine) + + # for getting ATSS targets + self.use_atss = use_atss + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.anchor_generator = build_anchor_generator(anchor_generator) + self.anchor_center_offset = anchor_generator['center_offset'] + self.num_anchors = self.anchor_generator.num_base_anchors[0] + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + def _init_layers(self): + """Initialize layers of the head.""" + super(FCOSHead, self)._init_cls_convs() + super(FCOSHead, self)._init_reg_convs() + self.relu = nn.ReLU(inplace=True) + self.vfnet_reg_conv = ConvModule( + self.feat_channels, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias) + self.vfnet_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + self.vfnet_reg_refine_dconv = DeformConv2d( + self.feat_channels, + self.feat_channels, + self.dcn_kernel, + 1, + padding=self.dcn_pad) + self.vfnet_reg_refine = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.scales_refine = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + self.vfnet_cls_dconv = DeformConv2d( + self.feat_channels, + self.feat_channels, + self.dcn_kernel, + 1, + padding=self.dcn_pad) + self.vfnet_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.cls_convs: + if isinstance(m.conv, nn.Conv2d): + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + if isinstance(m.conv, nn.Conv2d): + normal_init(m.conv, std=0.01) + normal_init(self.vfnet_reg_conv.conv, std=0.01) + normal_init(self.vfnet_reg, std=0.01) + normal_init(self.vfnet_reg_refine_dconv, std=0.01) + normal_init(self.vfnet_reg_refine, std=0.01) + normal_init(self.vfnet_cls_dconv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.vfnet_cls, std=0.01, bias=bias_cls) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level, each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box offsets for each + scale level, each is a 4D-tensor, the channel number is + num_points * 4. + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level, each is a 4D-tensor, the channel + number is num_points * 4. + """ + return multi_apply(self.forward_single, feats, self.scales, + self.scales_refine, self.strides, self.reg_denoms) + + def forward_single(self, x, scale, scale_refine, stride, reg_denom): + """Forward features of a single scale level. + + Args: + x (Tensor): FPN feature maps of the specified stride. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + scale_refine (:obj: `mmcv.cnn.Scale`): Learnable scale module to + resize the refined bbox prediction. + stride (int): The corresponding stride for feature maps, + used to normalize the bbox prediction when + bbox_norm_type = 'stride'. + reg_denom (int): The corresponding regression range for feature + maps, only used to normalize the bbox prediction when + bbox_norm_type = 'reg_denom'. + + Returns: + tuple: iou-aware cls scores for each box, bbox predictions and + refined bbox predictions of input feature maps. + """ + cls_feat = x + reg_feat = x + + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + + # predict the bbox_pred of different level + reg_feat_init = self.vfnet_reg_conv(reg_feat) + if self.bbox_norm_type == 'reg_denom': + bbox_pred = scale( + self.vfnet_reg(reg_feat_init)).float().exp() * reg_denom + elif self.bbox_norm_type == 'stride': + bbox_pred = scale( + self.vfnet_reg(reg_feat_init)).float().exp() * stride + else: + raise NotImplementedError + + # compute star deformable convolution offsets + # converting dcn_offset to reg_feat.dtype thus VFNet can be + # trained with FP16 + dcn_offset = self.star_dcn_offset(bbox_pred, self.gradient_mul, + stride).to(reg_feat.dtype) + + # refine the bbox_pred + reg_feat = self.relu(self.vfnet_reg_refine_dconv(reg_feat, dcn_offset)) + bbox_pred_refine = scale_refine( + self.vfnet_reg_refine(reg_feat)).float().exp() + bbox_pred_refine = bbox_pred_refine * bbox_pred.detach() + + # predict the iou-aware cls score + cls_feat = self.relu(self.vfnet_cls_dconv(cls_feat, dcn_offset)) + cls_score = self.vfnet_cls(cls_feat) + + return cls_score, bbox_pred, bbox_pred_refine + + def star_dcn_offset(self, bbox_pred, gradient_mul, stride): + """Compute the star deformable conv offsets. + + Args: + bbox_pred (Tensor): Predicted bbox distance offsets (l, r, t, b). + gradient_mul (float): Gradient multiplier. + stride (int): The corresponding stride for feature maps, + used to project the bbox onto the feature map. + + Returns: + dcn_offsets (Tensor): The offsets for deformable convolution. + """ + dcn_base_offset = self.dcn_base_offset.type_as(bbox_pred) + bbox_pred_grad_mul = (1 - gradient_mul) * bbox_pred.detach() + \ + gradient_mul * bbox_pred + # map to the feature map scale + bbox_pred_grad_mul = bbox_pred_grad_mul / stride + N, C, H, W = bbox_pred.size() + + x1 = bbox_pred_grad_mul[:, 0, :, :] + y1 = bbox_pred_grad_mul[:, 1, :, :] + x2 = bbox_pred_grad_mul[:, 2, :, :] + y2 = bbox_pred_grad_mul[:, 3, :, :] + bbox_pred_grad_mul_offset = bbox_pred.new_zeros( + N, 2 * self.num_dconv_points, H, W) + bbox_pred_grad_mul_offset[:, 0, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 1, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 2, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 4, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 5, :, :] = x2 # x2 + bbox_pred_grad_mul_offset[:, 7, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 11, :, :] = x2 # x2 + bbox_pred_grad_mul_offset[:, 12, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 13, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 14, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 16, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 17, :, :] = x2 # x2 + dcn_offset = bbox_pred_grad_mul_offset - dcn_base_offset + + return dcn_offset + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine')) + def loss(self, + cls_scores, + bbox_preds, + bbox_preds_refine, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level, each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box offsets for each + scale level, each is a 4D-tensor, the channel number is + num_points * 4. + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level, each is a 4D-tensor, the channel + number is num_points * 4. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + Default: None. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + labels, label_weights, bbox_targets, bbox_weights = self.get_targets( + cls_scores, all_level_points, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + + num_imgs = cls_scores[0].size(0) + # flatten cls_scores, bbox_preds and bbox_preds_refine + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, + 1).reshape(-1, + self.cls_out_channels).contiguous() + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4).contiguous() + for bbox_pred in bbox_preds + ] + flatten_bbox_preds_refine = [ + bbox_pred_refine.permute(0, 2, 3, 1).reshape(-1, 4).contiguous() + for bbox_pred_refine in bbox_preds_refine + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_bbox_preds_refine = torch.cat(flatten_bbox_preds_refine) + flatten_labels = torch.cat(labels) + flatten_bbox_targets = torch.cat(bbox_targets) + # repeat points to align with bbox_preds + flatten_points = torch.cat( + [points.repeat(num_imgs, 1) for points in all_level_points]) + + # FG cat_id: [0, num_classes - 1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = torch.where( + ((flatten_labels >= 0) & (flatten_labels < bg_class_ind)) > 0)[0] + num_pos = len(pos_inds) + + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_bbox_preds_refine = flatten_bbox_preds_refine[pos_inds] + pos_labels = flatten_labels[pos_inds] + + # sync num_pos across all gpus + if self.sync_num_pos: + num_pos_avg_per_gpu = reduce_mean( + pos_inds.new_tensor(num_pos).float()).item() + num_pos_avg_per_gpu = max(num_pos_avg_per_gpu, 1.0) + else: + num_pos_avg_per_gpu = num_pos + + if num_pos > 0: + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_points = flatten_points[pos_inds] + + pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds) + pos_decoded_target_preds = distance2bbox(pos_points, + pos_bbox_targets) + iou_targets_ini = bbox_overlaps( + pos_decoded_bbox_preds, + pos_decoded_target_preds.detach(), + is_aligned=True).clamp(min=1e-6) + bbox_weights_ini = iou_targets_ini.clone().detach() + iou_targets_ini_avg_per_gpu = reduce_mean( + bbox_weights_ini.sum()).item() + bbox_avg_factor_ini = max(iou_targets_ini_avg_per_gpu, 1.0) + loss_bbox = self.loss_bbox( + pos_decoded_bbox_preds, + pos_decoded_target_preds.detach(), + weight=bbox_weights_ini, + avg_factor=bbox_avg_factor_ini) + + pos_decoded_bbox_preds_refine = \ + distance2bbox(pos_points, pos_bbox_preds_refine) + iou_targets_rf = bbox_overlaps( + pos_decoded_bbox_preds_refine, + pos_decoded_target_preds.detach(), + is_aligned=True).clamp(min=1e-6) + bbox_weights_rf = iou_targets_rf.clone().detach() + iou_targets_rf_avg_per_gpu = reduce_mean( + bbox_weights_rf.sum()).item() + bbox_avg_factor_rf = max(iou_targets_rf_avg_per_gpu, 1.0) + loss_bbox_refine = self.loss_bbox_refine( + pos_decoded_bbox_preds_refine, + pos_decoded_target_preds.detach(), + weight=bbox_weights_rf, + avg_factor=bbox_avg_factor_rf) + + # build IoU-aware cls_score targets + if self.use_vfl: + pos_ious = iou_targets_rf.clone().detach() + cls_iou_targets = torch.zeros_like(flatten_cls_scores) + cls_iou_targets[pos_inds, pos_labels] = pos_ious + else: + loss_bbox = pos_bbox_preds.sum() * 0 + loss_bbox_refine = pos_bbox_preds_refine.sum() * 0 + if self.use_vfl: + cls_iou_targets = torch.zeros_like(flatten_cls_scores) + + if self.use_vfl: + loss_cls = self.loss_cls( + flatten_cls_scores, + cls_iou_targets, + avg_factor=num_pos_avg_per_gpu) + else: + loss_cls = self.loss_cls( + flatten_cls_scores, + flatten_labels, + weight=label_weights, + avg_factor=num_pos_avg_per_gpu) + + return dict( + loss_cls=loss_cls, + loss_bbox=loss_bbox, + loss_bbox_rf=loss_bbox_refine) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine')) + def get_bboxes(self, + cls_scores, + bbox_preds, + bbox_preds_refine, + img_metas, + cfg=None, + rescale=None, + with_nms=True): + """Transform network outputs for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box offsets for each scale + level with shape (N, num_points * 4, H, W). + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level with shape (N, num_points * 4, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before returning boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of + the corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine) + num_levels = len(cls_scores) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + mlvl_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds_refine[i][img_id].detach() + for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + det_bboxes = self._get_bboxes_single(cls_score_list, + bbox_pred_list, mlvl_points, + img_shape, scale_factor, cfg, + rescale, with_nms) + result_list.append(det_bboxes) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_points, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for a single scale + level with shape (num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box offsets for a single scale + level with shape (num_points * 4, H, W). + mlvl_points (list[Tensor]): Box reference for a single scale level + with shape (num_total_points, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before returning boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, points in zip(cls_scores, bbox_preds, + mlvl_points): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).contiguous().sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4).contiguous() + + nms_pre = cfg.get('nms_pre', -1) + if 0 < nms_pre < scores.shape[0]: + max_scores, _ = scores.max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + points = points[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bboxes = distance2bbox(points, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points according to feature map sizes.""" + h, w = featmap_size + x_range = torch.arange( + 0, w * stride, stride, dtype=dtype, device=device) + y_range = torch.arange( + 0, h * stride, stride, dtype=dtype, device=device) + y, x = torch.meshgrid(y_range, x_range) + # to be compatible with anchor points in ATSS + if self.use_atss: + points = torch.stack( + (x.reshape(-1), y.reshape(-1)), dim=-1) + \ + stride * self.anchor_center_offset + else: + points = torch.stack( + (x.reshape(-1), y.reshape(-1)), dim=-1) + stride // 2 + return points + + def get_targets(self, cls_scores, mlvl_points, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore): + """A wrapper for computing ATSS and FCOS targets for points in multiple + images. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level. + label_weights (Tensor/None): Label weights of all levels. + bbox_targets_list (list[Tensor]): Regression targets of each + level, (l, t, r, b). + bbox_weights (Tensor/None): Bbox weights of all levels. + """ + if self.use_atss: + return self.get_atss_targets(cls_scores, mlvl_points, gt_bboxes, + gt_labels, img_metas, + gt_bboxes_ignore) + else: + self.norm_on_bbox = False + return self.get_fcos_targets(mlvl_points, gt_bboxes, gt_labels) + + def _get_target_single(self, *args, **kwargs): + """Avoid ambiguity in multiple inheritance.""" + if self.use_atss: + return ATSSHead._get_target_single(self, *args, **kwargs) + else: + return FCOSHead._get_target_single(self, *args, **kwargs) + + def get_fcos_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute FCOS regression and classification targets for points in + multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + + Returns: + tuple: + labels (list[Tensor]): Labels of each level. + label_weights: None, to be compatible with ATSS targets. + bbox_targets (list[Tensor]): BBox targets of each level. + bbox_weights: None, to be compatible with ATSS targets. + """ + labels, bbox_targets = FCOSHead.get_targets(self, points, + gt_bboxes_list, + gt_labels_list) + label_weights = None + bbox_weights = None + return labels, label_weights, bbox_targets, bbox_weights + + def get_atss_targets(self, + cls_scores, + mlvl_points, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """A wrapper for computing ATSS targets for points in multiple images. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). Default: None. + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level. + label_weights (Tensor): Label weights of all levels. + bbox_targets_list (list[Tensor]): Regression targets of each + level, (l, t, r, b). + bbox_weights (Tensor): Bbox weights of all levels. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = ATSSHead.get_targets( + self, + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + unmap_outputs=True) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + bbox_targets_list = [ + bbox_targets.reshape(-1, 4) for bbox_targets in bbox_targets_list + ] + + num_imgs = len(img_metas) + # transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format + bbox_targets_list = self.transform_bbox_targets( + bbox_targets_list, mlvl_points, num_imgs) + + labels_list = [labels.reshape(-1) for labels in labels_list] + label_weights_list = [ + label_weights.reshape(-1) for label_weights in label_weights_list + ] + bbox_weights_list = [ + bbox_weights.reshape(-1) for bbox_weights in bbox_weights_list + ] + label_weights = torch.cat(label_weights_list) + bbox_weights = torch.cat(bbox_weights_list) + return labels_list, label_weights, bbox_targets_list, bbox_weights + + def transform_bbox_targets(self, decoded_bboxes, mlvl_points, num_imgs): + """Transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format. + + Args: + decoded_bboxes (list[Tensor]): Regression targets of each level, + in the form of (x1, y1, x2, y2). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + num_imgs (int): the number of images in a batch. + + Returns: + bbox_targets (list[Tensor]): Regression targets of each level in + the form of (l, t, r, b). + """ + # TODO: Re-implemented in Class PointCoder + assert len(decoded_bboxes) == len(mlvl_points) + num_levels = len(decoded_bboxes) + mlvl_points = [points.repeat(num_imgs, 1) for points in mlvl_points] + bbox_targets = [] + for i in range(num_levels): + bbox_target = bbox2distance(mlvl_points[i], decoded_bboxes[i]) + bbox_targets.append(bbox_target) + + return bbox_targets + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """Override the method in the parent class to avoid changing para's + name.""" + pass diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/yolact_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/yolact_head.py new file mode 100644 index 0000000000000000000000000000000000000000..824246ce7a888adaa2dd545fc4553b82c035e099 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/yolact_head.py @@ -0,0 +1,939 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, xavier_init +from mmcv.runner import force_fp32 + +from mmdet.core import build_sampler, fast_nms, images_to_levels, multi_apply +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class YOLACTHead(AnchorHead): + """YOLACT box head used in https://arxiv.org/abs/1904.02689. + + Note that YOLACT head is a light version of RetinaNet head. + Four differences are described as follows: + + 1. YOLACT box head has three-times fewer anchors. + 2. YOLACT box head shares the convs for box and cls branches. + 3. YOLACT box head uses OHEM instead of Focal loss. + 4. YOLACT box head predicts a set of mask coefficients for each box. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + anchor_generator (dict): Config dict for anchor generator + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + num_head_convs (int): Number of the conv layers shared by + box and cls branches. + num_protos (int): Number of the mask coefficients. + use_ohem (bool): If true, ``loss_single_OHEM`` will be used for + cls loss calculation. If false, ``loss_single`` will be used. + conv_cfg (dict): Dictionary to construct and config conv layer. + norm_cfg (dict): Dictionary to construct and config norm layer. + """ + + def __init__(self, + num_classes, + in_channels, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True, + conv_cfg=None, + norm_cfg=None, + **kwargs): + self.num_head_convs = num_head_convs + self.num_protos = num_protos + self.use_ohem = use_ohem + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(YOLACTHead, self).__init__( + num_classes, + in_channels, + loss_cls=loss_cls, + loss_bbox=loss_bbox, + anchor_generator=anchor_generator, + **kwargs) + if self.use_ohem: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.sampling = False + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.head_convs = nn.ModuleList() + for i in range(self.num_head_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.head_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.conv_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.conv_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + self.conv_coeff = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.num_protos, + 3, + padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.head_convs: + xavier_init(m.conv, distribution='uniform', bias=0) + xavier_init(self.conv_cls, distribution='uniform', bias=0) + xavier_init(self.conv_reg, distribution='uniform', bias=0) + xavier_init(self.conv_coeff, distribution='uniform', bias=0) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level \ + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale \ + level, the channels number is num_anchors * 4. + coeff_pred (Tensor): Mask coefficients for a single scale \ + level, the channels number is num_anchors * num_protos. + """ + for head_conv in self.head_convs: + x = head_conv(x) + cls_score = self.conv_cls(x) + bbox_pred = self.conv_reg(x) + coeff_pred = self.conv_coeff(x).tanh() + return cls_score, bbox_pred, coeff_pred + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """A combination of the func:``AnchorHead.loss`` and + func:``SSDHead.loss``. + + When ``self.use_ohem == True``, it functions like ``SSDHead.loss``, + otherwise, it follows ``AnchorHead.loss``. Besides, it additionally + returns ``sampling_results``. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + tuple: + dict[str, Tensor]: A dictionary of loss components. + List[:obj:``SamplingResult``]: Sampler results for each image. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + unmap_outputs=not self.use_ohem, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results) = cls_reg_targets + + if self.use_ohem: + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single_OHEM, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + else: + num_total_samples = ( + num_total_pos + + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox), sampling_results + + def loss_single_OHEM(self, cls_score, bbox_pred, anchors, labels, + label_weights, bbox_targets, bbox_weights, + num_total_samples): + """"See func:``SSDHead.loss``.""" + loss_cls_all = self.loss_cls(cls_score, labels, label_weights) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((labels >= 0) & + (labels < self.num_classes)).nonzero().reshape(-1) + neg_inds = (labels == self.num_classes).nonzero().view(-1) + + num_pos_samples = pos_inds.size(0) + if num_pos_samples == 0: + num_neg_samples = neg_inds.size(0) + else: + num_neg_samples = self.train_cfg.neg_pos_ratio * num_pos_samples + if num_neg_samples > neg_inds.size(0): + num_neg_samples = neg_inds.size(0) + topk_loss_cls_neg, _ = loss_cls_all[neg_inds].topk(num_neg_samples) + loss_cls_pos = loss_cls_all[pos_inds].sum() + loss_cls_neg = topk_loss_cls_neg.sum() + loss_cls = (loss_cls_pos + loss_cls_neg) / num_total_samples + if self.reg_decoded_bbox: + bbox_pred = self.bbox_coder.decode(anchors, bbox_pred) + loss_bbox = self.loss_bbox( + bbox_pred, + bbox_targets, + bbox_weights, + avg_factor=num_total_samples) + return loss_cls[None], loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'coeff_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + coeff_preds, + img_metas, + cfg=None, + rescale=False): + """"Similiar to func:``AnchorHead.get_bboxes``, but additionally + processes coeff_preds. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + coeff_preds (list[Tensor]): Mask coefficients for each scale + level with shape (N, num_anchors * num_protos, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space. + Default: False. + + Returns: + list[tuple[Tensor, Tensor, Tensor]]: Each item in result_list is + a 3-tuple. The first item is an (n, 5) tensor, where the + first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. The second item is an (n,) tensor where each + item is the predicted class label of the corresponding box. + The third item is an (n, num_protos) tensor where each item + is the predicted mask coefficients of instance inside the + corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + det_bboxes = [] + det_labels = [] + det_coeffs = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + coeff_pred_list = [ + coeff_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + bbox_res = self._get_bboxes_single(cls_score_list, bbox_pred_list, + coeff_pred_list, mlvl_anchors, + img_shape, scale_factor, cfg, + rescale) + det_bboxes.append(bbox_res[0]) + det_labels.append(bbox_res[1]) + det_coeffs.append(bbox_res[2]) + return det_bboxes, det_labels, det_coeffs + + def _get_bboxes_single(self, + cls_score_list, + bbox_pred_list, + coeff_preds_list, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + """"Similiar to func:``AnchorHead._get_bboxes_single``, but + additionally processes coeff_preds_list and uses fast NMS instead of + traditional NMS. + + Args: + cls_score_list (list[Tensor]): Box scores for a single scale level + Has shape (num_anchors * num_classes, H, W). + bbox_pred_list (list[Tensor]): Box energies / deltas for a single + scale level with shape (num_anchors * 4, H, W). + coeff_preds_list (list[Tensor]): Mask coefficients for a single + scale level with shape (num_anchors * num_protos, H, W). + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + + Returns: + tuple[Tensor, Tensor, Tensor]: The first item is an (n, 5) tensor, + where the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score between + 0 and 1. The second item is an (n,) tensor where each item is + the predicted class label of the corresponding box. The third + item is an (n, num_protos) tensor where each item is the + predicted mask coefficients of instance inside the + corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_score_list) == len(bbox_pred_list) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_coeffs = [] + for cls_score, bbox_pred, coeff_pred, anchors in \ + zip(cls_score_list, bbox_pred_list, + coeff_preds_list, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + coeff_pred = coeff_pred.permute(1, 2, + 0).reshape(-1, self.num_protos) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + # Get maximum scores for foreground classes. + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + coeff_pred = coeff_pred[topk_inds, :] + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_coeffs.append(coeff_pred) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + mlvl_coeffs = torch.cat(mlvl_coeffs) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + det_bboxes, det_labels, det_coeffs = fast_nms(mlvl_bboxes, mlvl_scores, + mlvl_coeffs, + cfg.score_thr, + cfg.iou_thr, cfg.top_k, + cfg.max_per_img) + return det_bboxes, det_labels, det_coeffs + + +@HEADS.register_module() +class YOLACTSegmHead(nn.Module): + """YOLACT segmentation head used in https://arxiv.org/abs/1904.02689. + + Apply a semantic segmentation loss on feature space using layers that are + only evaluated during training to increase performance with no speed + penalty. + + Args: + in_channels (int): Number of channels in the input feature map. + num_classes (int): Number of categories excluding the background + category. + loss_segm (dict): Config of semantic segmentation loss. + """ + + def __init__(self, + num_classes, + in_channels=256, + loss_segm=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0)): + super(YOLACTSegmHead, self).__init__() + self.in_channels = in_channels + self.num_classes = num_classes + self.loss_segm = build_loss(loss_segm) + self._init_layers() + self.fp16_enabled = False + + def _init_layers(self): + """Initialize layers of the head.""" + self.segm_conv = nn.Conv2d( + self.in_channels, self.num_classes, kernel_size=1) + + def init_weights(self): + """Initialize weights of the head.""" + xavier_init(self.segm_conv, distribution='uniform') + + def forward(self, x): + """Forward feature from the upstream network. + + Args: + x (Tensor): Feature from the upstream network, which is + a 4D-tensor. + + Returns: + Tensor: Predicted semantic segmentation map with shape + (N, num_classes, H, W). + """ + return self.segm_conv(x) + + @force_fp32(apply_to=('segm_pred', )) + def loss(self, segm_pred, gt_masks, gt_labels): + """Compute loss of the head. + + Args: + segm_pred (list[Tensor]): Predicted semantic segmentation map + with shape (N, num_classes, H, W). + gt_masks (list[Tensor]): Ground truth masks for each image with + the same shape of the input image. + gt_labels (list[Tensor]): Class indices corresponding to each box. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + loss_segm = [] + num_imgs, num_classes, mask_h, mask_w = segm_pred.size() + for idx in range(num_imgs): + cur_segm_pred = segm_pred[idx] + cur_gt_masks = gt_masks[idx].float() + cur_gt_labels = gt_labels[idx] + segm_targets = self.get_targets(cur_segm_pred, cur_gt_masks, + cur_gt_labels) + if segm_targets is None: + loss = self.loss_segm(cur_segm_pred, + torch.zeros_like(cur_segm_pred), + torch.zeros_like(cur_segm_pred)) + else: + loss = self.loss_segm( + cur_segm_pred, + segm_targets, + avg_factor=num_imgs * mask_h * mask_w) + loss_segm.append(loss) + return dict(loss_segm=loss_segm) + + def get_targets(self, segm_pred, gt_masks, gt_labels): + """Compute semantic segmentation targets for each image. + + Args: + segm_pred (Tensor): Predicted semantic segmentation map + with shape (num_classes, H, W). + gt_masks (Tensor): Ground truth masks for each image with + the same shape of the input image. + gt_labels (Tensor): Class indices corresponding to each box. + + Returns: + Tensor: Semantic segmentation targets with shape + (num_classes, H, W). + """ + if gt_masks.size(0) == 0: + return None + num_classes, mask_h, mask_w = segm_pred.size() + with torch.no_grad(): + downsampled_masks = F.interpolate( + gt_masks.unsqueeze(0), (mask_h, mask_w), + mode='bilinear', + align_corners=False).squeeze(0) + downsampled_masks = downsampled_masks.gt(0.5).float() + segm_targets = torch.zeros_like(segm_pred, requires_grad=False) + for obj_idx in range(downsampled_masks.size(0)): + segm_targets[gt_labels[obj_idx] - 1] = torch.max( + segm_targets[gt_labels[obj_idx] - 1], + downsampled_masks[obj_idx]) + return segm_targets + + +@HEADS.register_module() +class YOLACTProtonet(nn.Module): + """YOLACT mask head used in https://arxiv.org/abs/1904.02689. + + This head outputs the mask prototypes for YOLACT. + + Args: + in_channels (int): Number of channels in the input feature map. + proto_channels (tuple[int]): Output channels of protonet convs. + proto_kernel_sizes (tuple[int]): Kernel sizes of protonet convs. + include_last_relu (Bool): If keep the last relu of protonet. + num_protos (int): Number of prototypes. + num_classes (int): Number of categories excluding the background + category. + loss_mask_weight (float): Reweight the mask loss by this factor. + max_masks_to_train (int): Maximum number of masks to train for + each image. + """ + + def __init__(self, + num_classes, + in_channels=256, + proto_channels=(256, 256, 256, None, 256, 32), + proto_kernel_sizes=(3, 3, 3, -2, 3, 1), + include_last_relu=True, + num_protos=32, + loss_mask_weight=1.0, + max_masks_to_train=100): + super(YOLACTProtonet, self).__init__() + self.in_channels = in_channels + self.proto_channels = proto_channels + self.proto_kernel_sizes = proto_kernel_sizes + self.include_last_relu = include_last_relu + self.protonet = self._init_layers() + + self.loss_mask_weight = loss_mask_weight + self.num_protos = num_protos + self.num_classes = num_classes + self.max_masks_to_train = max_masks_to_train + self.fp16_enabled = False + + def _init_layers(self): + """A helper function to take a config setting and turn it into a + network.""" + # Possible patterns: + # ( 256, 3) -> conv + # ( 256,-2) -> deconv + # (None,-2) -> bilinear interpolate + in_channels = self.in_channels + protonets = nn.ModuleList() + for num_channels, kernel_size in zip(self.proto_channels, + self.proto_kernel_sizes): + if kernel_size > 0: + layer = nn.Conv2d( + in_channels, + num_channels, + kernel_size, + padding=kernel_size // 2) + else: + if num_channels is None: + layer = InterpolateModule( + scale_factor=-kernel_size, + mode='bilinear', + align_corners=False) + else: + layer = nn.ConvTranspose2d( + in_channels, + num_channels, + -kernel_size, + padding=kernel_size // 2) + protonets.append(layer) + protonets.append(nn.ReLU(inplace=True)) + in_channels = num_channels if num_channels is not None \ + else in_channels + if not self.include_last_relu: + protonets = protonets[:-1] + return nn.Sequential(*protonets) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.protonet: + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + + def forward(self, x, coeff_pred, bboxes, img_meta, sampling_results=None): + """Forward feature from the upstream network to get prototypes and + linearly combine the prototypes, using masks coefficients, into + instance masks. Finally, crop the instance masks with given bboxes. + + Args: + x (Tensor): Feature from the upstream network, which is + a 4D-tensor. + coeff_pred (list[Tensor]): Mask coefficients for each scale + level with shape (N, num_anchors * num_protos, H, W). + bboxes (list[Tensor]): Box used for cropping with shape + (N, num_anchors * 4, H, W). During training, they are + ground truth boxes. During testing, they are predicted + boxes. + img_meta (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + sampling_results (List[:obj:``SamplingResult``]): Sampler results + for each image. + + Returns: + list[Tensor]: Predicted instance segmentation masks. + """ + prototypes = self.protonet(x) + prototypes = prototypes.permute(0, 2, 3, 1).contiguous() + + num_imgs = x.size(0) + # Training state + if self.training: + coeff_pred_list = [] + for coeff_pred_per_level in coeff_pred: + coeff_pred_per_level = \ + coeff_pred_per_level.permute(0, 2, 3, 1)\ + .reshape(num_imgs, -1, self.num_protos) + coeff_pred_list.append(coeff_pred_per_level) + coeff_pred = torch.cat(coeff_pred_list, dim=1) + + mask_pred_list = [] + for idx in range(num_imgs): + cur_prototypes = prototypes[idx] + cur_coeff_pred = coeff_pred[idx] + cur_bboxes = bboxes[idx] + cur_img_meta = img_meta[idx] + + # Testing state + if not self.training: + bboxes_for_cropping = cur_bboxes + else: + cur_sampling_results = sampling_results[idx] + pos_assigned_gt_inds = \ + cur_sampling_results.pos_assigned_gt_inds + bboxes_for_cropping = cur_bboxes[pos_assigned_gt_inds].clone() + pos_inds = cur_sampling_results.pos_inds + cur_coeff_pred = cur_coeff_pred[pos_inds] + + # Linearly combine the prototypes with the mask coefficients + mask_pred = cur_prototypes @ cur_coeff_pred.t() + mask_pred = torch.sigmoid(mask_pred) + + h, w = cur_img_meta['img_shape'][:2] + bboxes_for_cropping[:, 0] /= w + bboxes_for_cropping[:, 1] /= h + bboxes_for_cropping[:, 2] /= w + bboxes_for_cropping[:, 3] /= h + + mask_pred = self.crop(mask_pred, bboxes_for_cropping) + mask_pred = mask_pred.permute(2, 0, 1).contiguous() + mask_pred_list.append(mask_pred) + return mask_pred_list + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, gt_masks, gt_bboxes, img_meta, sampling_results): + """Compute loss of the head. + + Args: + mask_pred (list[Tensor]): Predicted prototypes with shape + (num_classes, H, W). + gt_masks (list[Tensor]): Ground truth masks for each image with + the same shape of the input image. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + img_meta (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + sampling_results (List[:obj:``SamplingResult``]): Sampler results + for each image. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + loss_mask = [] + num_imgs = len(mask_pred) + total_pos = 0 + for idx in range(num_imgs): + cur_mask_pred = mask_pred[idx] + cur_gt_masks = gt_masks[idx].float() + cur_gt_bboxes = gt_bboxes[idx] + cur_img_meta = img_meta[idx] + cur_sampling_results = sampling_results[idx] + + pos_assigned_gt_inds = cur_sampling_results.pos_assigned_gt_inds + num_pos = pos_assigned_gt_inds.size(0) + # Since we're producing (near) full image masks, + # it'd take too much vram to backprop on every single mask. + # Thus we select only a subset. + if num_pos > self.max_masks_to_train: + perm = torch.randperm(num_pos) + select = perm[:self.max_masks_to_train] + cur_mask_pred = cur_mask_pred[select] + pos_assigned_gt_inds = pos_assigned_gt_inds[select] + num_pos = self.max_masks_to_train + total_pos += num_pos + + gt_bboxes_for_reweight = cur_gt_bboxes[pos_assigned_gt_inds] + + mask_targets = self.get_targets(cur_mask_pred, cur_gt_masks, + pos_assigned_gt_inds) + if num_pos == 0: + loss = cur_mask_pred.sum() * 0. + elif mask_targets is None: + loss = F.binary_cross_entropy(cur_mask_pred, + torch.zeros_like(cur_mask_pred), + torch.zeros_like(cur_mask_pred)) + else: + cur_mask_pred = torch.clamp(cur_mask_pred, 0, 1) + loss = F.binary_cross_entropy( + cur_mask_pred, mask_targets, + reduction='none') * self.loss_mask_weight + + h, w = cur_img_meta['img_shape'][:2] + gt_bboxes_width = (gt_bboxes_for_reweight[:, 2] - + gt_bboxes_for_reweight[:, 0]) / w + gt_bboxes_height = (gt_bboxes_for_reweight[:, 3] - + gt_bboxes_for_reweight[:, 1]) / h + loss = loss.mean(dim=(1, + 2)) / gt_bboxes_width / gt_bboxes_height + loss = torch.sum(loss) + loss_mask.append(loss) + + if total_pos == 0: + total_pos += 1 # avoid nan + loss_mask = [x / total_pos for x in loss_mask] + + return dict(loss_mask=loss_mask) + + def get_targets(self, mask_pred, gt_masks, pos_assigned_gt_inds): + """Compute instance segmentation targets for each image. + + Args: + mask_pred (Tensor): Predicted prototypes with shape + (num_classes, H, W). + gt_masks (Tensor): Ground truth masks for each image with + the same shape of the input image. + pos_assigned_gt_inds (Tensor): GT indices of the corresponding + positive samples. + Returns: + Tensor: Instance segmentation targets with shape + (num_instances, H, W). + """ + if gt_masks.size(0) == 0: + return None + mask_h, mask_w = mask_pred.shape[-2:] + gt_masks = F.interpolate( + gt_masks.unsqueeze(0), (mask_h, mask_w), + mode='bilinear', + align_corners=False).squeeze(0) + gt_masks = gt_masks.gt(0.5).float() + mask_targets = gt_masks[pos_assigned_gt_inds] + return mask_targets + + def get_seg_masks(self, mask_pred, label_pred, img_meta, rescale): + """Resize, binarize, and format the instance mask predictions. + + Args: + mask_pred (Tensor): shape (N, H, W). + label_pred (Tensor): shape (N, ). + img_meta (dict): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If rescale is False, then returned masks will + fit the scale of imgs[0]. + Returns: + list[ndarray]: Mask predictions grouped by their predicted classes. + """ + ori_shape = img_meta['ori_shape'] + scale_factor = img_meta['scale_factor'] + if rescale: + img_h, img_w = ori_shape[:2] + else: + img_h = np.round(ori_shape[0] * scale_factor[1]).astype(np.int32) + img_w = np.round(ori_shape[1] * scale_factor[0]).astype(np.int32) + + cls_segms = [[] for _ in range(self.num_classes)] + if mask_pred.size(0) == 0: + return cls_segms + + mask_pred = F.interpolate( + mask_pred.unsqueeze(0), (img_h, img_w), + mode='bilinear', + align_corners=False).squeeze(0) > 0.5 + mask_pred = mask_pred.cpu().numpy().astype(np.uint8) + + for m, l in zip(mask_pred, label_pred): + cls_segms[l].append(m) + return cls_segms + + def crop(self, masks, boxes, padding=1): + """Crop predicted masks by zeroing out everything not in the predicted + bbox. + + Args: + masks (Tensor): shape [H, W, N]. + boxes (Tensor): bbox coords in relative point form with + shape [N, 4]. + + Return: + Tensor: The cropped masks. + """ + h, w, n = masks.size() + x1, x2 = self.sanitize_coordinates( + boxes[:, 0], boxes[:, 2], w, padding, cast=False) + y1, y2 = self.sanitize_coordinates( + boxes[:, 1], boxes[:, 3], h, padding, cast=False) + + rows = torch.arange( + w, device=masks.device, dtype=x1.dtype).view(1, -1, + 1).expand(h, w, n) + cols = torch.arange( + h, device=masks.device, dtype=x1.dtype).view(-1, 1, + 1).expand(h, w, n) + + masks_left = rows >= x1.view(1, 1, -1) + masks_right = rows < x2.view(1, 1, -1) + masks_up = cols >= y1.view(1, 1, -1) + masks_down = cols < y2.view(1, 1, -1) + + crop_mask = masks_left * masks_right * masks_up * masks_down + + return masks * crop_mask.float() + + def sanitize_coordinates(self, x1, x2, img_size, padding=0, cast=True): + """Sanitizes the input coordinates so that x1 < x2, x1 != x2, x1 >= 0, + and x2 <= image_size. Also converts from relative to absolute + coordinates and casts the results to long tensors. + + Warning: this does things in-place behind the scenes so + copy if necessary. + + Args: + _x1 (Tensor): shape (N, ). + _x2 (Tensor): shape (N, ). + img_size (int): Size of the input image. + padding (int): x1 >= padding, x2 <= image_size-padding. + cast (bool): If cast is false, the result won't be cast to longs. + + Returns: + tuple: + x1 (Tensor): Sanitized _x1. + x2 (Tensor): Sanitized _x2. + """ + x1 = x1 * img_size + x2 = x2 * img_size + if cast: + x1 = x1.long() + x2 = x2.long() + x1 = torch.min(x1, x2) + x2 = torch.max(x1, x2) + x1 = torch.clamp(x1 - padding, min=0) + x2 = torch.clamp(x2 + padding, max=img_size) + return x1, x2 + + +class InterpolateModule(nn.Module): + """This is a module version of F.interpolate. + + Any arguments you give it just get passed along for the ride. + """ + + def __init__(self, *args, **kwargs): + super().__init__() + + self.args = args + self.kwargs = kwargs + + def forward(self, x): + """Forward features from the upstream network.""" + return F.interpolate(x, *self.args, **self.kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/dense_heads/yolo_head.py b/thirdparty/mmdetection/mmdet/models/dense_heads/yolo_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d59e93897377c230fbb49a3fe8f89d2557c3e325 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/dense_heads/yolo_head.py @@ -0,0 +1,533 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import warnings + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, images_to_levels, + multi_apply, multiclass_nms) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class YOLOV3Head(BaseDenseHead, BBoxTestMixin): + """YOLOV3Head Paper link: https://arxiv.org/abs/1804.02767. + + Args: + num_classes (int): The number of object classes (w/o background) + in_channels (List[int]): Number of input channels per scale. + out_channels (List[int]): The number of output channels per scale + before the final 1x1 layer. Default: (1024, 512, 256). + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + featmap_strides (List[int]): The stride of each scale. + Should be in descending order. Default: (32, 16, 8). + one_hot_smoother (float): Set a non-zero value to enable label-smooth + Default: 0. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + loss_cls (dict): Config of classification loss. + loss_conf (dict): Config of confidence loss. + loss_xy (dict): Config of xy coordinate loss. + loss_wh (dict): Config of wh coordinate loss. + train_cfg (dict): Training config of YOLOV3 head. Default: None. + test_cfg (dict): Testing config of YOLOV3 head. Default: None. + """ + + def __init__(self, + num_classes, + in_channels, + out_channels=(1024, 512, 256), + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + one_hot_smoother=0., + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_wh=dict(type='MSELoss', loss_weight=1.0), + train_cfg=None, + test_cfg=None): + super(YOLOV3Head, self).__init__() + # Check params + assert (len(in_channels) == len(out_channels) == len(featmap_strides)) + + self.num_classes = num_classes + self.in_channels = in_channels + self.out_channels = out_channels + self.featmap_strides = featmap_strides + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + if hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.one_hot_smoother = one_hot_smoother + + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.anchor_generator = build_anchor_generator(anchor_generator) + + self.loss_cls = build_loss(loss_cls) + self.loss_conf = build_loss(loss_conf) + self.loss_xy = build_loss(loss_xy) + self.loss_wh = build_loss(loss_wh) + # usually the numbers of anchors for each level are the same + # except SSD detectors + self.num_anchors = self.anchor_generator.num_base_anchors[0] + assert len( + self.anchor_generator.num_base_anchors) == len(featmap_strides) + self._init_layers() + + @property + def num_levels(self): + return len(self.featmap_strides) + + @property + def num_attrib(self): + """int: number of attributes in pred_map, bboxes (4) + + objectness (1) + num_classes""" + + return 5 + self.num_classes + + def _init_layers(self): + self.convs_bridge = nn.ModuleList() + self.convs_pred = nn.ModuleList() + for i in range(self.num_levels): + conv_bridge = ConvModule( + self.in_channels[i], + self.out_channels[i], + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + conv_pred = nn.Conv2d(self.out_channels[i], + self.num_anchors * self.num_attrib, 1) + + self.convs_bridge.append(conv_bridge) + self.convs_pred.append(conv_pred) + + def init_weights(self): + """Initialize weights of the head.""" + for m in self.convs_pred: + normal_init(m, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple[Tensor]: A tuple of multi-level predication map, each is a + 4D-tensor of shape (batch_size, 5+num_classes, height, width). + """ + + assert len(feats) == self.num_levels + pred_maps = [] + for i in range(self.num_levels): + x = feats[i] + x = self.convs_bridge[i](x) + pred_map = self.convs_pred[i](x) + pred_maps.append(pred_map) + + return tuple(pred_maps), + + @force_fp32(apply_to=('pred_maps', )) + def get_bboxes(self, + pred_maps, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + pred_maps (list[Tensor]): Raw predictions for a batch of images. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of the + corresponding box. + """ + result_list = [] + num_levels = len(pred_maps) + for img_id in range(len(img_metas)): + pred_maps_list = [ + pred_maps[i][img_id].detach() for i in range(num_levels) + ] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(pred_maps_list, scale_factor, + cfg, rescale, with_nms) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + pred_maps_list, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + pred_maps_list (list[Tensor]): Prediction maps for different scales + of each single image in the batch. + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(pred_maps_list) == self.num_levels + multi_lvl_bboxes = [] + multi_lvl_cls_scores = [] + multi_lvl_conf_scores = [] + num_levels = len(pred_maps_list) + featmap_sizes = [ + pred_maps_list[i].shape[-2:] for i in range(num_levels) + ] + multi_lvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, pred_maps_list[0][0].device) + for i in range(self.num_levels): + # get some key info for current scale + pred_map = pred_maps_list[i] + stride = self.featmap_strides[i] + + # (h, w, num_anchors*num_attrib) -> (h*w*num_anchors, num_attrib) + pred_map = pred_map.permute(1, 2, 0).reshape(-1, self.num_attrib) + + pred_map[..., :2] = torch.sigmoid(pred_map[..., :2]) + bbox_pred = self.bbox_coder.decode(multi_lvl_anchors[i], + pred_map[..., :4], stride) + # conf and cls + conf_pred = torch.sigmoid(pred_map[..., 4]).view(-1) + cls_pred = torch.sigmoid(pred_map[..., 5:]).view( + -1, self.num_classes) # Cls pred one-hot. + + # Filtering out all predictions with conf < conf_thr + conf_thr = cfg.get('conf_thr', -1) + if conf_thr > 0: + # add as_tuple=False for compatibility in Pytorch 1.6 + conf_inds = conf_pred.ge(conf_thr).nonzero( + as_tuple=False).flatten() + bbox_pred = bbox_pred[conf_inds, :] + cls_pred = cls_pred[conf_inds, :] + conf_pred = conf_pred[conf_inds] + + # Get top-k prediction + nms_pre = cfg.get('nms_pre', -1) + if 0 < nms_pre < conf_pred.size(0) and ( + not torch.onnx.is_in_onnx_export()): + _, topk_inds = conf_pred.topk(nms_pre) + bbox_pred = bbox_pred[topk_inds, :] + cls_pred = cls_pred[topk_inds, :] + conf_pred = conf_pred[topk_inds] + + # Save the result of current scale + multi_lvl_bboxes.append(bbox_pred) + multi_lvl_cls_scores.append(cls_pred) + multi_lvl_conf_scores.append(conf_pred) + + # Merge the results of different scales together + multi_lvl_bboxes = torch.cat(multi_lvl_bboxes) + multi_lvl_cls_scores = torch.cat(multi_lvl_cls_scores) + multi_lvl_conf_scores = torch.cat(multi_lvl_conf_scores) + + if with_nms and (multi_lvl_conf_scores.size(0) == 0): + return torch.zeros((0, 5)), torch.zeros((0, )) + + if rescale: + multi_lvl_bboxes /= multi_lvl_bboxes.new_tensor(scale_factor) + + # In mmdet 2.x, the class_id for background is num_classes. + # i.e., the last column. + padding = multi_lvl_cls_scores.new_zeros(multi_lvl_cls_scores.shape[0], + 1) + multi_lvl_cls_scores = torch.cat([multi_lvl_cls_scores, padding], + dim=1) + + # Support exporting to onnx without nms + if with_nms and cfg.get('nms', None) is not None: + det_bboxes, det_labels = multiclass_nms( + multi_lvl_bboxes, + multi_lvl_cls_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=multi_lvl_conf_scores) + return det_bboxes, det_labels + else: + return (multi_lvl_bboxes, multi_lvl_cls_scores, + multi_lvl_conf_scores) + + @force_fp32(apply_to=('pred_maps', )) + def loss(self, + pred_maps, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + pred_maps (list[Tensor]): Prediction map for each scale level, + shape (N, num_anchors * num_attrib, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + num_imgs = len(img_metas) + device = pred_maps[0][0].device + + featmap_sizes = [ + pred_maps[i].shape[-2:] for i in range(self.num_levels) + ] + multi_level_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device) + anchor_list = [multi_level_anchors for _ in range(num_imgs)] + + responsible_flag_list = [] + for img_id in range(len(img_metas)): + responsible_flag_list.append( + self.anchor_generator.responsible_flags( + featmap_sizes, gt_bboxes[img_id], device)) + + target_maps_list, neg_maps_list = self.get_targets( + anchor_list, responsible_flag_list, gt_bboxes, gt_labels) + + losses_cls, losses_conf, losses_xy, losses_wh = multi_apply( + self.loss_single, pred_maps, target_maps_list, neg_maps_list) + + return dict( + loss_cls=losses_cls, + loss_conf=losses_conf, + loss_xy=losses_xy, + loss_wh=losses_wh) + + def loss_single(self, pred_map, target_map, neg_map): + """Compute loss of a single image from a batch. + + Args: + pred_map (Tensor): Raw predictions for a single level. + target_map (Tensor): The Ground-Truth target for a single level. + neg_map (Tensor): The negative masks for a single level. + + Returns: + tuple: + loss_cls (Tensor): Classification loss. + loss_conf (Tensor): Confidence loss. + loss_xy (Tensor): Regression loss of x, y coordinate. + loss_wh (Tensor): Regression loss of w, h coordinate. + """ + + num_imgs = len(pred_map) + pred_map = pred_map.permute(0, 2, 3, + 1).reshape(num_imgs, -1, self.num_attrib) + neg_mask = neg_map.float() + pos_mask = target_map[..., 4] + pos_and_neg_mask = neg_mask + pos_mask + pos_mask = pos_mask.unsqueeze(dim=-1) + if torch.max(pos_and_neg_mask) > 1.: + warnings.warn('There is overlap between pos and neg sample.') + pos_and_neg_mask = pos_and_neg_mask.clamp(min=0., max=1.) + + pred_xy = pred_map[..., :2] + pred_wh = pred_map[..., 2:4] + pred_conf = pred_map[..., 4] + pred_label = pred_map[..., 5:] + + target_xy = target_map[..., :2] + target_wh = target_map[..., 2:4] + target_conf = target_map[..., 4] + target_label = target_map[..., 5:] + + loss_cls = self.loss_cls(pred_label, target_label, weight=pos_mask) + loss_conf = self.loss_conf( + pred_conf, target_conf, weight=pos_and_neg_mask) + loss_xy = self.loss_xy(pred_xy, target_xy, weight=pos_mask) + loss_wh = self.loss_wh(pred_wh, target_wh, weight=pos_mask) + + return loss_cls, loss_conf, loss_xy, loss_wh + + def get_targets(self, anchor_list, responsible_flag_list, gt_bboxes_list, + gt_labels_list): + """Compute target maps for anchors in multiple images. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_total_anchors, 4). + responsible_flag_list (list[list[Tensor]]): Multi level responsible + flags of each image. Each element is a tensor of shape + (num_total_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + + Returns: + tuple: Usually returns a tuple containing learning targets. + - target_map_list (list[Tensor]): Target map of each level. + - neg_map_list (list[Tensor]): Negative map of each level. + """ + num_imgs = len(anchor_list) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + + results = multi_apply(self._get_targets_single, anchor_list, + responsible_flag_list, gt_bboxes_list, + gt_labels_list) + + all_target_maps, all_neg_maps = results + assert num_imgs == len(all_target_maps) == len(all_neg_maps) + target_maps_list = images_to_levels(all_target_maps, num_level_anchors) + neg_maps_list = images_to_levels(all_neg_maps, num_level_anchors) + + return target_maps_list, neg_maps_list + + def _get_targets_single(self, anchors, responsible_flags, gt_bboxes, + gt_labels): + """Generate matching bounding box prior and converted GT. + + Args: + anchors (list[Tensor]): Multi-level anchors of the image. + responsible_flags (list[Tensor]): Multi-level responsible flags of + anchors + gt_bboxes (Tensor): Ground truth bboxes of single image. + gt_labels (Tensor): Ground truth labels of single image. + + Returns: + tuple: + target_map (Tensor): Predication target map of each + scale level, shape (num_total_anchors, + 5+num_classes) + neg_map (Tensor): Negative map of each scale level, + shape (num_total_anchors,) + """ + + anchor_strides = [] + for i in range(len(anchors)): + anchor_strides.append( + torch.tensor(self.featmap_strides[i], + device=gt_bboxes.device).repeat(len(anchors[i]))) + concat_anchors = torch.cat(anchors) + concat_responsible_flags = torch.cat(responsible_flags) + + anchor_strides = torch.cat(anchor_strides) + assert len(anchor_strides) == len(concat_anchors) == \ + len(concat_responsible_flags) + assign_result = self.assigner.assign(concat_anchors, + concat_responsible_flags, + gt_bboxes) + sampling_result = self.sampler.sample(assign_result, concat_anchors, + gt_bboxes) + + target_map = concat_anchors.new_zeros( + concat_anchors.size(0), self.num_attrib) + + target_map[sampling_result.pos_inds, :4] = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes, + anchor_strides[sampling_result.pos_inds]) + + target_map[sampling_result.pos_inds, 4] = 1 + + gt_labels_one_hot = F.one_hot( + gt_labels, num_classes=self.num_classes).float() + if self.one_hot_smoother != 0: # label smooth + gt_labels_one_hot = gt_labels_one_hot * ( + 1 - self.one_hot_smoother + ) + self.one_hot_smoother / self.num_classes + target_map[sampling_result.pos_inds, 5:] = gt_labels_one_hot[ + sampling_result.pos_assigned_gt_inds] + + neg_map = concat_anchors.new_zeros( + concat_anchors.size(0), dtype=torch.uint8) + neg_map[sampling_result.neg_inds] = 1 + + return target_map, neg_map + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/__init__.py b/thirdparty/mmdetection/mmdet/models/detectors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e819f7fd0b821e0a396b816a53ab0c905a9bf52e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/__init__.py @@ -0,0 +1,34 @@ +from .atss import ATSS +from .base import BaseDetector +from .cascade_rcnn import CascadeRCNN +from .cornernet import CornerNet +from .detr import DETR +from .fast_rcnn import FastRCNN +from .faster_rcnn import FasterRCNN +from .fcos import FCOS +from .fovea import FOVEA +from .fsaf import FSAF +from .gfl import GFL +from .grid_rcnn import GridRCNN +from .htc import HybridTaskCascade +from .mask_rcnn import MaskRCNN +from .mask_scoring_rcnn import MaskScoringRCNN +from .nasfcos import NASFCOS +from .paa import PAA +from .point_rend import PointRend +from .reppoints_detector import RepPointsDetector +from .retinanet import RetinaNet +from .rpn import RPN +from .single_stage import SingleStageDetector +from .two_stage import TwoStageDetector +from .vfnet import VFNet +from .yolact import YOLACT +from .yolo import YOLOV3 + +__all__ = [ + 'ATSS', 'BaseDetector', 'SingleStageDetector', 'TwoStageDetector', 'RPN', + 'FastRCNN', 'FasterRCNN', 'MaskRCNN', 'CascadeRCNN', 'HybridTaskCascade', + 'RetinaNet', 'FCOS', 'GridRCNN', 'MaskScoringRCNN', 'RepPointsDetector', + 'FOVEA', 'FSAF', 'NASFCOS', 'PointRend', 'GFL', 'CornerNet', 'PAA', + 'YOLOV3', 'YOLACT', 'VFNet', 'DETR' +] diff --git a/thirdparty/mmdetection/mmdet/models/detectors/atss.py b/thirdparty/mmdetection/mmdet/models/detectors/atss.py new file mode 100644 index 0000000000000000000000000000000000000000..db7139c6b4fcd7e83007cdb785520743ddae7066 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/atss.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class ATSS(SingleStageDetector): + """Implementation of `ATSS `_.""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(ATSS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/base.py b/thirdparty/mmdetection/mmdet/models/detectors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..627e3912fce306db196d092a29078ea3dcaefa82 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/base.py @@ -0,0 +1,355 @@ +from abc import ABCMeta, abstractmethod +from collections import OrderedDict + +import mmcv +import numpy as np +import torch +import torch.distributed as dist +import torch.nn as nn +from mmcv.runner import auto_fp16 +from mmcv.utils import print_log + +from mmdet.utils import get_root_logger + + +class BaseDetector(nn.Module, metaclass=ABCMeta): + """Base class for detectors.""" + + def __init__(self): + super(BaseDetector, self).__init__() + self.fp16_enabled = False + + @property + def with_neck(self): + """bool: whether the detector has a neck""" + return hasattr(self, 'neck') and self.neck is not None + + # TODO: these properties need to be carefully handled + # for both single stage & two stage detectors + @property + def with_shared_head(self): + """bool: whether the detector has a shared head in the RoI Head""" + return hasattr(self, 'roi_head') and self.roi_head.with_shared_head + + @property + def with_bbox(self): + """bool: whether the detector has a bbox head""" + return ((hasattr(self, 'roi_head') and self.roi_head.with_bbox) + or (hasattr(self, 'bbox_head') and self.bbox_head is not None)) + + @property + def with_mask(self): + """bool: whether the detector has a mask head""" + return ((hasattr(self, 'roi_head') and self.roi_head.with_mask) + or (hasattr(self, 'mask_head') and self.mask_head is not None)) + + @abstractmethod + def extract_feat(self, imgs): + """Extract features from images.""" + pass + + def extract_feats(self, imgs): + """Extract features from multiple images. + + Args: + imgs (list[torch.Tensor]): A list of images. The images are + augmented from the same image but in different ways. + + Returns: + list[torch.Tensor]: Features of different images + """ + assert isinstance(imgs, list) + return [self.extract_feat(img) for img in imgs] + + def forward_train(self, imgs, img_metas, **kwargs): + """ + Args: + img (list[Tensor]): List of tensors of shape (1, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys, see + :class:`mmdet.datasets.pipelines.Collect`. + kwargs (keyword arguments): Specific to concrete implementation. + """ + # NOTE the batched image size information may be useful, e.g. + # in DETR, this is needed for the construction of masks, which is + # then used for the transformer_head. + batch_intput_shape = tuple(imgs[0].size()[-2:]) + for img_meta in img_metas: + img_meta['batch_intput_shape'] = batch_intput_shape + + async def async_simple_test(self, img, img_metas, **kwargs): + raise NotImplementedError + + @abstractmethod + def simple_test(self, img, img_metas, **kwargs): + pass + + @abstractmethod + def aug_test(self, imgs, img_metas, **kwargs): + """Test function with test time augmentation.""" + pass + + def init_weights(self, pretrained=None): + """Initialize the weights in detector. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if pretrained is not None: + logger = get_root_logger() + print_log(f'load model from: {pretrained}', logger=logger) + + async def aforward_test(self, *, img, img_metas, **kwargs): + for var, name in [(img, 'img'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(img) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(img)}) ' + f'!= num of image metas ({len(img_metas)})') + # TODO: remove the restriction of samples_per_gpu == 1 when prepared + samples_per_gpu = img[0].size(0) + assert samples_per_gpu == 1 + + if num_augs == 1: + return await self.async_simple_test(img[0], img_metas[0], **kwargs) + else: + raise NotImplementedError + + def forward_test(self, imgs, img_metas, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + """ + for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(imgs)}) ' + f'!= num of image meta ({len(img_metas)})') + + # NOTE the batched image size information may be useful, e.g. + # in DETR, this is needed for the construction of masks, which is + # then used for the transformer_head. + for img, img_meta in zip(imgs, img_metas): + batch_size = len(img_meta) + for img_id in range(batch_size): + img_meta[img_id]['batch_intput_shape'] = tuple(img.size()[-2:]) + + if num_augs == 1: + # proposals (List[List[Tensor]]): the outer list indicates + # test-time augs (multiscale, flip, etc.) and the inner list + # indicates images in a batch. + # The Tensor should have a shape Px4, where P is the number of + # proposals. + if 'proposals' in kwargs: + kwargs['proposals'] = kwargs['proposals'][0] + return self.simple_test(imgs[0], img_metas[0], **kwargs) + else: + assert imgs[0].size(0) == 1, 'aug test does not support ' \ + 'inference with batch size ' \ + f'{imgs[0].size(0)}' + # TODO: support test augmentation for predefined proposals + assert 'proposals' not in kwargs + return self.aug_test(imgs, img_metas, **kwargs) + + @auto_fp16(apply_to=('img', )) + def forward(self, img, img_metas, return_loss=True, **kwargs): + """Calls either :func:`forward_train` or :func:`forward_test` depending + on whether ``return_loss`` is ``True``. + + Note this setting will change the expected inputs. When + ``return_loss=True``, img and img_meta are single-nested (i.e. Tensor + and List[dict]), and when ``resturn_loss=False``, img and img_meta + should be double nested (i.e. List[Tensor], List[List[dict]]), with + the outer list indicating test time augmentations. + """ + if return_loss: + return self.forward_train(img, img_metas, **kwargs) + else: + return self.forward_test(img, img_metas, **kwargs) + + def _parse_losses(self, losses): + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary infomation. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor \ + which may be a weighted sum of all losses, log_vars contains \ + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError( + f'{loss_name} is not a tensor or list of tensors') + + loss = sum(_value for _key, _value in log_vars.items() + if 'loss' in _key) + + log_vars['loss'] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars + + def train_step(self, data, optimizer): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, \ + ``num_samples``. + + - ``loss`` is a tensor for back propagation, which can be a \ + weighted sum of multiple losses. + - ``log_vars`` contains all the variables to be sent to the + logger. + - ``num_samples`` indicates the batch size (when the model is \ + DDP, it means the batch size on each GPU), which is used for \ + averaging the logs. + """ + losses = self(**data) + loss, log_vars = self._parse_losses(losses) + + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(data['img_metas'])) + + return outputs + + def val_step(self, data, optimizer): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + losses = self(**data) + loss, log_vars = self._parse_losses(losses) + + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(data['img_metas'])) + + return outputs + + def show_result(self, + img, + result, + score_thr=0.3, + bbox_color='green', + text_color='green', + thickness=1, + font_scale=0.5, + win_name='', + show=False, + wait_time=0, + out_file=None): + """Draw `result` over `img`. + + Args: + img (str or Tensor): The image to be displayed. + result (Tensor or tuple): The results to draw over `img` + bbox_result or (bbox_result, segm_result). + score_thr (float, optional): Minimum score of bboxes to be shown. + Default: 0.3. + bbox_color (str or tuple or :obj:`Color`): Color of bbox lines. + text_color (str or tuple or :obj:`Color`): Color of texts. + thickness (int): Thickness of lines. + font_scale (float): Font scales of texts. + win_name (str): The window name. + wait_time (int): Value of waitKey param. + Default: 0. + show (bool): Whether to show the image. + Default: False. + out_file (str or None): The filename to write the image. + Default: None. + + Returns: + img (Tensor): Only if not `show` or `out_file` + """ + img = mmcv.imread(img) + img = img.copy() + if isinstance(result, tuple): + bbox_result, segm_result = result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] # ms rcnn + else: + bbox_result, segm_result = result, None + bboxes = np.vstack(bbox_result) + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + labels = np.concatenate(labels) + # draw segmentation masks + if segm_result is not None and len(labels) > 0: # non empty + segms = mmcv.concat_list(segm_result) + inds = np.where(bboxes[:, -1] > score_thr)[0] + np.random.seed(42) + color_masks = [ + np.random.randint(0, 256, (1, 3), dtype=np.uint8) + for _ in range(max(labels) + 1) + ] + for i in inds: + i = int(i) + color_mask = color_masks[labels[i]] + sg = segms[i] + if isinstance(sg, torch.Tensor): + sg = sg.detach().cpu().numpy() + mask = sg.astype(bool) + img[mask] = img[mask] * 0.5 + color_mask * 0.5 + # if out_file specified, do not show image in window + if out_file is not None: + show = False + # draw bounding boxes + mmcv.imshow_det_bboxes( + img, + bboxes, + labels, + class_names=self.CLASSES, + score_thr=score_thr, + bbox_color=bbox_color, + text_color=text_color, + thickness=thickness, + font_scale=font_scale, + win_name=win_name, + show=show, + wait_time=wait_time, + out_file=out_file) + + if not (show or out_file): + return img diff --git a/thirdparty/mmdetection/mmdet/models/detectors/cascade_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/cascade_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..47cc7cef984123804c4f99900d496807cde3c0e6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/cascade_rcnn.py @@ -0,0 +1,37 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class CascadeRCNN(TwoStageDetector): + r"""Implementation of `Cascade R-CNN: Delving into High Quality Object + Detection `_""" + + def __init__(self, + backbone, + neck=None, + rpn_head=None, + roi_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(CascadeRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) + + def show_result(self, data, result, **kwargs): + """Show prediction results of the detector.""" + if self.with_mask: + ms_bbox_result, ms_segm_result = result + if isinstance(ms_bbox_result, dict): + result = (ms_bbox_result['ensemble'], + ms_segm_result['ensemble']) + else: + if isinstance(result, dict): + result = result['ensemble'] + return super(CascadeRCNN, self).show_result(data, result, **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/cornernet.py b/thirdparty/mmdetection/mmdet/models/detectors/cornernet.py new file mode 100644 index 0000000000000000000000000000000000000000..bb8ccc1465ab66d1615ca16701a533a22b156295 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/cornernet.py @@ -0,0 +1,95 @@ +import torch + +from mmdet.core import bbox2result, bbox_mapping_back +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class CornerNet(SingleStageDetector): + """CornerNet. + + This detector is the implementation of the paper `CornerNet: Detecting + Objects as Paired Keypoints `_ . + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(CornerNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) + + def merge_aug_results(self, aug_results, img_metas): + """Merge augmented detection bboxes and score. + + Args: + aug_results (list[list[Tensor]]): Det_bboxes and det_labels of each + image. + img_metas (list[list[dict]]): Meta information of each image, e.g., + image size, scaling factor, etc. + + Returns: + tuple: (bboxes, labels) + """ + recovered_bboxes, aug_labels = [], [] + for bboxes_labels, img_info in zip(aug_results, img_metas): + img_shape = img_info[0]['img_shape'] # using shape before padding + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + bboxes, labels = bboxes_labels + bboxes, scores = bboxes[:, :4], bboxes[:, -1:] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip) + recovered_bboxes.append(torch.cat([bboxes, scores], dim=-1)) + aug_labels.append(labels) + + bboxes = torch.cat(recovered_bboxes, dim=0) + labels = torch.cat(aug_labels) + + if bboxes.shape[0] > 0: + out_bboxes, out_labels = self.bbox_head._bboxes_nms( + bboxes, labels, self.bbox_head.test_cfg) + else: + out_bboxes, out_labels = bboxes, labels + + return out_bboxes, out_labels + + def aug_test(self, imgs, img_metas, rescale=False): + """Augment testing of CornerNet. + + Args: + imgs (list[Tensor]): Augmented images. + img_metas (list[list[dict]]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + + Note: + ``imgs`` must including flipped image pairs. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + img_inds = list(range(len(imgs))) + + assert img_metas[0][0]['flip'] + img_metas[1][0]['flip'], ( + 'aug test must have flipped image pair') + aug_results = [] + for ind, flip_ind in zip(img_inds[0::2], img_inds[1::2]): + img_pair = torch.cat([imgs[ind], imgs[flip_ind]]) + x = self.extract_feat(img_pair) + outs = self.bbox_head(x) + bbox_list = self.bbox_head.get_bboxes( + *outs, [img_metas[ind], img_metas[flip_ind]], False, False) + aug_results.append(bbox_list[0]) + aug_results.append(bbox_list[1]) + + bboxes, labels = self.merge_aug_results(aug_results, img_metas) + bbox_results = bbox2result(bboxes, labels, self.bbox_head.num_classes) + + return [bbox_results] diff --git a/thirdparty/mmdetection/mmdet/models/detectors/detr.py b/thirdparty/mmdetection/mmdet/models/detectors/detr.py new file mode 100644 index 0000000000000000000000000000000000000000..5ff82a280daa0a015f662bdf2509fa11542d46d4 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/detr.py @@ -0,0 +1,46 @@ +from mmdet.core import bbox2result +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class DETR(SingleStageDetector): + r"""Implementation of `DETR: End-to-End Object Detection with + Transformers `_""" + + def __init__(self, + backbone, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(DETR, self).__init__(backbone, None, bbox_head, train_cfg, + test_cfg, pretrained) + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + batch_size = len(img_metas) + assert batch_size == 1, 'Currently only batch_size 1 for inference ' \ + f'mode is supported. Found batch_size {batch_size}.' + x = self.extract_feat(img) + outs = self.bbox_head(x, img_metas) + bbox_list = self.bbox_head.get_bboxes( + *outs, img_metas, rescale=rescale) + + bbox_results = [ + bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) + for det_bboxes, det_labels in bbox_list + ] + return bbox_results diff --git a/thirdparty/mmdetection/mmdet/models/detectors/fast_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/fast_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..3d6e242767b927ed37198b6bc7862abecef99a33 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/fast_rcnn.py @@ -0,0 +1,52 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class FastRCNN(TwoStageDetector): + """Implementation of `Fast R-CNN `_""" + + def __init__(self, + backbone, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(FastRCNN, self).__init__( + backbone=backbone, + neck=neck, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) + + def forward_test(self, imgs, img_metas, proposals, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + proposals (List[List[Tensor]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. The Tensor should have a shape Px4, where + P is the number of proposals. + """ + for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(imgs)}) ' + f'!= num of image meta ({len(img_metas)})') + + if num_augs == 1: + return self.simple_test(imgs[0], img_metas[0], proposals[0], + **kwargs) + else: + # TODO: support test-time augmentation + assert NotImplementedError diff --git a/thirdparty/mmdetection/mmdet/models/detectors/faster_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/faster_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..81bad0f43a48b1022c4cd996e26d6c90be93d4d0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/faster_rcnn.py @@ -0,0 +1,24 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class FasterRCNN(TwoStageDetector): + """Implementation of `Faster R-CNN `_""" + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(FasterRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/fcos.py b/thirdparty/mmdetection/mmdet/models/detectors/fcos.py new file mode 100644 index 0000000000000000000000000000000000000000..58485c1864a11a66168b7597f345ea759ce20551 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/fcos.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FCOS(SingleStageDetector): + """Implementation of `FCOS `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(FCOS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/fovea.py b/thirdparty/mmdetection/mmdet/models/detectors/fovea.py new file mode 100644 index 0000000000000000000000000000000000000000..22a578efffbd108db644d907bae95c7c8df31f2e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/fovea.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FOVEA(SingleStageDetector): + """Implementation of `FoveaBox `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(FOVEA, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/fsaf.py b/thirdparty/mmdetection/mmdet/models/detectors/fsaf.py new file mode 100644 index 0000000000000000000000000000000000000000..9f10fa1ae10f31e6cb5de65505b14a4fc97dd022 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/fsaf.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FSAF(SingleStageDetector): + """Implementation of `FSAF `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(FSAF, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/gfl.py b/thirdparty/mmdetection/mmdet/models/detectors/gfl.py new file mode 100644 index 0000000000000000000000000000000000000000..64d65cb2dfb7a56f57e08c3fcad67e1539e1e841 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/gfl.py @@ -0,0 +1,16 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class GFL(SingleStageDetector): + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(GFL, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/grid_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/grid_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..b6145a1464cd940bd4f98eaa15f6f9ecf6a10a20 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/grid_rcnn.py @@ -0,0 +1,29 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class GridRCNN(TwoStageDetector): + """Grid R-CNN. + + This detector is the implementation of: + - Grid R-CNN (https://arxiv.org/abs/1811.12030) + - Grid R-CNN Plus: Faster and Better (https://arxiv.org/abs/1906.05688) + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(GridRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/htc.py b/thirdparty/mmdetection/mmdet/models/detectors/htc.py new file mode 100644 index 0000000000000000000000000000000000000000..d9efdf420fa7373f7f1d116f8d97836d73b457bf --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/htc.py @@ -0,0 +1,15 @@ +from ..builder import DETECTORS +from .cascade_rcnn import CascadeRCNN + + +@DETECTORS.register_module() +class HybridTaskCascade(CascadeRCNN): + """Implementation of `HTC `_""" + + def __init__(self, **kwargs): + super(HybridTaskCascade, self).__init__(**kwargs) + + @property + def with_semantic(self): + """bool: whether the detector has a semantic head""" + return self.roi_head.with_semantic diff --git a/thirdparty/mmdetection/mmdet/models/detectors/mask_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/mask_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..c15a7733170e059d2825138b3812319915b7cad6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/mask_rcnn.py @@ -0,0 +1,24 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class MaskRCNN(TwoStageDetector): + """Implementation of `Mask R-CNN `_""" + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(MaskRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/mask_scoring_rcnn.py b/thirdparty/mmdetection/mmdet/models/detectors/mask_scoring_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..b6252b6e1d234a201725342a5780fade7e21957c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/mask_scoring_rcnn.py @@ -0,0 +1,27 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class MaskScoringRCNN(TwoStageDetector): + """Mask Scoring RCNN. + + https://arxiv.org/abs/1903.00241 + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(MaskScoringRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/nasfcos.py b/thirdparty/mmdetection/mmdet/models/detectors/nasfcos.py new file mode 100644 index 0000000000000000000000000000000000000000..fb0148351546f45a451ef5f7a2a9ef4024e85b7c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/nasfcos.py @@ -0,0 +1,20 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class NASFCOS(SingleStageDetector): + """NAS-FCOS: Fast Neural Architecture Search for Object Detection. + + https://arxiv.org/abs/1906.0442 + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(NASFCOS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/paa.py b/thirdparty/mmdetection/mmdet/models/detectors/paa.py new file mode 100644 index 0000000000000000000000000000000000000000..9b4bb5e0939b824d9fef7fc3bd49a0164c29613a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/paa.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class PAA(SingleStageDetector): + """Implementation of `PAA `_.""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(PAA, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/point_rend.py b/thirdparty/mmdetection/mmdet/models/detectors/point_rend.py new file mode 100644 index 0000000000000000000000000000000000000000..808ef2258ae88301d349db3aaa2711f223e5c971 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/point_rend.py @@ -0,0 +1,29 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class PointRend(TwoStageDetector): + """PointRend: Image Segmentation as Rendering + + This detector is the implementation of + `PointRend `_. + + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None): + super(PointRend, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/reppoints_detector.py b/thirdparty/mmdetection/mmdet/models/detectors/reppoints_detector.py new file mode 100644 index 0000000000000000000000000000000000000000..a5f6be31e14488e4b8a006b7142a82c872388d82 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/reppoints_detector.py @@ -0,0 +1,22 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class RepPointsDetector(SingleStageDetector): + """RepPoints: Point Set Representation for Object Detection. + + This detector is the implementation of: + - RepPoints detector (https://arxiv.org/pdf/1904.11490) + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(RepPointsDetector, + self).__init__(backbone, neck, bbox_head, train_cfg, test_cfg, + pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/retinanet.py b/thirdparty/mmdetection/mmdet/models/detectors/retinanet.py new file mode 100644 index 0000000000000000000000000000000000000000..41378e8bc74bf9d5cbc7e3e6630bb1e6657049f9 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/retinanet.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class RetinaNet(SingleStageDetector): + """Implementation of `RetinaNet `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(RetinaNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/rpn.py b/thirdparty/mmdetection/mmdet/models/detectors/rpn.py new file mode 100644 index 0000000000000000000000000000000000000000..36c38afc5aaed8219d80eb1170f444410f4c8135 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/rpn.py @@ -0,0 +1,153 @@ +import mmcv +from mmcv.image import tensor2imgs + +from mmdet.core import bbox_mapping +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class RPN(BaseDetector): + """Implementation of Region Proposal Network.""" + + def __init__(self, + backbone, + neck, + rpn_head, + train_cfg, + test_cfg, + pretrained=None): + super(RPN, self).__init__() + self.backbone = build_backbone(backbone) + self.neck = build_neck(neck) if neck is not None else None + rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None + rpn_head.update(train_cfg=rpn_train_cfg) + rpn_head.update(test_cfg=test_cfg.rpn) + self.rpn_head = build_head(rpn_head) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.init_weights(pretrained=pretrained) + + def init_weights(self, pretrained=None): + """Initialize the weights in detector. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(RPN, self).init_weights(pretrained) + self.backbone.init_weights(pretrained=pretrained) + if self.with_neck: + self.neck.init_weights() + self.rpn_head.init_weights() + + def extract_feat(self, img): + """Extract features. + + Args: + img (torch.Tensor): Image tensor with shape (n, c, h ,w). + + Returns: + list[torch.Tensor]: Multi-level features that may have + different resolutions. + """ + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Dummy forward function.""" + x = self.extract_feat(img) + rpn_outs = self.rpn_head(x) + return rpn_outs + + def forward_train(self, + img, + img_metas, + gt_bboxes=None, + gt_bboxes_ignore=None): + """ + Args: + img (Tensor): Input images of shape (N, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): A List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + if self.train_cfg.rpn.get('debug', False): + self.rpn_head.debug_imgs = tensor2imgs(img) + + x = self.extract_feat(img) + losses = self.rpn_head.forward_train(x, img_metas, gt_bboxes, None, + gt_bboxes_ignore) + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[np.ndarray]: proposals + """ + x = self.extract_feat(img) + proposal_list = self.rpn_head.simple_test_rpn(x, img_metas) + if rescale: + for proposals, meta in zip(proposal_list, img_metas): + proposals[:, :4] /= proposals.new_tensor(meta['scale_factor']) + + return [proposal.cpu().numpy() for proposal in proposal_list] + + def aug_test(self, imgs, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[np.ndarray]: proposals + """ + proposal_list = self.rpn_head.aug_test_rpn( + self.extract_feats(imgs), img_metas) + if not rescale: + for proposals, img_meta in zip(proposal_list, img_metas[0]): + img_shape = img_meta['img_shape'] + scale_factor = img_meta['scale_factor'] + flip = img_meta['flip'] + flip_direction = img_meta['flip_direction'] + proposals[:, :4] = bbox_mapping(proposals[:, :4], img_shape, + scale_factor, flip, + flip_direction) + return [proposal.cpu().numpy() for proposal in proposal_list] + + def show_result(self, data, result, dataset=None, top_k=20): + """Show RPN proposals on the image. + + Although we assume batch size is 1, this method supports arbitrary + batch size. + """ + img_tensor = data['img'][0] + img_metas = data['img_metas'][0].data[0] + imgs = tensor2imgs(img_tensor, **img_metas[0]['img_norm_cfg']) + assert len(imgs) == len(img_metas) + for img, img_meta in zip(imgs, img_metas): + h, w, _ = img_meta['img_shape'] + img_show = img[:h, :w, :] + mmcv.imshow_bboxes(img_show, result, top_k=top_k) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/single_stage.py b/thirdparty/mmdetection/mmdet/models/detectors/single_stage.py new file mode 100644 index 0000000000000000000000000000000000000000..96c4acac08f26255654354703b66d1fa482e2dc2 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/single_stage.py @@ -0,0 +1,149 @@ +import torch +import torch.nn as nn + +from mmdet.core import bbox2result +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class SingleStageDetector(BaseDetector): + """Base class for single-stage detectors. + + Single-stage detectors directly and densely predict bounding boxes on the + output features of the backbone+neck. + """ + + def __init__(self, + backbone, + neck=None, + bbox_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(SingleStageDetector, self).__init__() + self.backbone = build_backbone(backbone) + if neck is not None: + self.neck = build_neck(neck) + bbox_head.update(train_cfg=train_cfg) + bbox_head.update(test_cfg=test_cfg) + self.bbox_head = build_head(bbox_head) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.init_weights(pretrained=pretrained) + + def init_weights(self, pretrained=None): + """Initialize the weights in detector. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(SingleStageDetector, self).init_weights(pretrained) + self.backbone.init_weights(pretrained=pretrained) + if self.with_neck: + if isinstance(self.neck, nn.Sequential): + for m in self.neck: + m.init_weights() + else: + self.neck.init_weights() + self.bbox_head.init_weights() + + def extract_feat(self, img): + """Directly extract features from the backbone+neck.""" + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/get_flops.py` + """ + x = self.extract_feat(img) + outs = self.bbox_head(x) + return outs + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None): + """ + Args: + img (Tensor): Input images of shape (N, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): A List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + super(SingleStageDetector, self).forward_train(img, img_metas) + x = self.extract_feat(img) + losses = self.bbox_head.forward_train(x, img_metas, gt_bboxes, + gt_labels, gt_bboxes_ignore) + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + x = self.extract_feat(img) + outs = self.bbox_head(x) + bbox_list = self.bbox_head.get_bboxes( + *outs, img_metas, rescale=rescale) + # skip post-processing when exporting to ONNX + if torch.onnx.is_in_onnx_export(): + return bbox_list + + bbox_results = [ + bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) + for det_bboxes, det_labels in bbox_list + ] + return bbox_results + + def aug_test(self, imgs, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + imgs (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + assert hasattr(self.bbox_head, 'aug_test'), \ + f'{self.bbox_head.__class__.__name__}' \ + ' does not support test-time augmentation' + + feats = self.extract_feats(imgs) + return [self.bbox_head.aug_test(feats, img_metas, rescale=rescale)] diff --git a/thirdparty/mmdetection/mmdet/models/detectors/two_stage.py b/thirdparty/mmdetection/mmdet/models/detectors/two_stage.py new file mode 100644 index 0000000000000000000000000000000000000000..d66923f32605dc58f0d78a1eed92a846577cae0d --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/two_stage.py @@ -0,0 +1,210 @@ +import torch +import torch.nn as nn + +# from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class TwoStageDetector(BaseDetector): + """Base class for two-stage detectors. + + Two-stage detectors typically consisting of a region proposal network and a + task-specific regression head. + """ + + def __init__(self, + backbone, + neck=None, + rpn_head=None, + roi_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(TwoStageDetector, self).__init__() + self.backbone = build_backbone(backbone) + + if neck is not None: + self.neck = build_neck(neck) + + if rpn_head is not None: + rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None + rpn_head_ = rpn_head.copy() + rpn_head_.update(train_cfg=rpn_train_cfg, test_cfg=test_cfg.rpn) + self.rpn_head = build_head(rpn_head_) + + if roi_head is not None: + # update train and test cfg here for now + # TODO: refactor assigner & sampler + rcnn_train_cfg = train_cfg.rcnn if train_cfg is not None else None + roi_head.update(train_cfg=rcnn_train_cfg) + roi_head.update(test_cfg=test_cfg.rcnn) + self.roi_head = build_head(roi_head) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + self.init_weights(pretrained=pretrained) + + @property + def with_rpn(self): + """bool: whether the detector has RPN""" + return hasattr(self, 'rpn_head') and self.rpn_head is not None + + @property + def with_roi_head(self): + """bool: whether the detector has a RoI head""" + return hasattr(self, 'roi_head') and self.roi_head is not None + + def init_weights(self, pretrained=None): + """Initialize the weights in detector. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(TwoStageDetector, self).init_weights(pretrained) + self.backbone.init_weights(pretrained=pretrained) + if self.with_neck: + if isinstance(self.neck, nn.Sequential): + for m in self.neck: + m.init_weights() + else: + self.neck.init_weights() + if self.with_rpn: + self.rpn_head.init_weights() + if self.with_roi_head: + self.roi_head.init_weights(pretrained) + + def extract_feat(self, img): + """Directly extract features from the backbone+neck.""" + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/get_flops.py` + """ + outs = () + # backbone + x = self.extract_feat(img) + # rpn + if self.with_rpn: + rpn_outs = self.rpn_head(x) + outs = outs + (rpn_outs, ) + proposals = torch.randn(1000, 4).to(img.device) + # roi_head + roi_outs = self.roi_head.forward_dummy(x, proposals) + outs = outs + (roi_outs, ) + return outs + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + proposals=None, + **kwargs): + """ + Args: + img (Tensor): of shape (N, C, H, W) encoding input images. + Typically these should be mean centered and std scaled. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + proposals : override rpn proposals with custom proposals. Use when + `with_rpn` is False. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + x = self.extract_feat(img) + + losses = dict() + + # RPN forward and loss + if self.with_rpn: + proposal_cfg = self.train_cfg.get('rpn_proposal', + self.test_cfg.rpn) + rpn_losses, proposal_list = self.rpn_head.forward_train( + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=gt_bboxes_ignore, + proposal_cfg=proposal_cfg) + losses.update(rpn_losses) + else: + proposal_list = proposals + + roi_losses = self.roi_head.forward_train(x, img_metas, proposal_list, + gt_bboxes, gt_labels, + gt_bboxes_ignore, gt_masks, + **kwargs) + losses.update(roi_losses) + + return losses + + async def async_simple_test(self, + img, + img_meta, + proposals=None, + rescale=False): + """Async test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + x = self.extract_feat(img) + + if proposals is None: + proposal_list = await self.rpn_head.async_simple_test_rpn( + x, img_meta) + else: + proposal_list = proposals + + return await self.roi_head.async_simple_test( + x, proposal_list, img_meta, rescale=rescale) + + def simple_test(self, img, img_metas, proposals=None, rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + x = self.extract_feat(img) + + if proposals is None: + proposal_list = self.rpn_head.simple_test_rpn(x, img_metas) + else: + proposal_list = proposals + + return self.roi_head.simple_test( + x, proposal_list, img_metas, rescale=rescale) + + def aug_test(self, imgs, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + x = self.extract_feats(imgs) + proposal_list = self.rpn_head.aug_test_rpn(x, img_metas) + return self.roi_head.aug_test( + x, proposal_list, img_metas, rescale=rescale) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/vfnet.py b/thirdparty/mmdetection/mmdet/models/detectors/vfnet.py new file mode 100644 index 0000000000000000000000000000000000000000..e23f89674c919921219ffd3486587a2d3c318fbd --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/vfnet.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class VFNet(SingleStageDetector): + """Implementation of `VarifocalNet + (VFNet).`_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(VFNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/detectors/yolact.py b/thirdparty/mmdetection/mmdet/models/detectors/yolact.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6c7ff09b5adb792a2560cd5b4ee374f42f2a59 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/yolact.py @@ -0,0 +1,146 @@ +import torch + +from mmdet.core import bbox2result +from ..builder import DETECTORS, build_head +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class YOLACT(SingleStageDetector): + """Implementation of `YOLACT `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + segm_head, + mask_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(YOLACT, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) + self.segm_head = build_head(segm_head) + self.mask_head = build_head(mask_head) + self.init_segm_mask_weights() + + def init_segm_mask_weights(self): + """Initialize weights of the YOLACT semg head and YOLACT mask head.""" + self.segm_head.init_weights() + self.mask_head.init_weights() + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/get_flops.py` + """ + raise NotImplementedError + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + img (Tensor): of shape (N, C, H, W) encoding input images. + Typically these should be mean centered and std scaled. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # convert Bitmap mask or Polygon Mask to Tensor here + gt_masks = [ + gt_mask.to_tensor(dtype=torch.uint8, device=img.device) + for gt_mask in gt_masks + ] + + x = self.extract_feat(img) + + cls_score, bbox_pred, coeff_pred = self.bbox_head(x) + bbox_head_loss_inputs = (cls_score, bbox_pred) + (gt_bboxes, gt_labels, + img_metas) + losses, sampling_results = self.bbox_head.loss( + *bbox_head_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + + segm_head_outs = self.segm_head(x[0]) + loss_segm = self.segm_head.loss(segm_head_outs, gt_masks, gt_labels) + losses.update(loss_segm) + + mask_pred = self.mask_head(x[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + loss_mask = self.mask_head.loss(mask_pred, gt_masks, gt_bboxes, + img_metas, sampling_results) + losses.update(loss_mask) + + # check NaN and Inf + for loss_name in losses.keys(): + assert torch.isfinite(torch.stack(losses[loss_name]))\ + .all().item(), '{} becomes infinite or NaN!'\ + .format(loss_name) + + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation.""" + x = self.extract_feat(img) + + cls_score, bbox_pred, coeff_pred = self.bbox_head(x) + + bbox_inputs = (cls_score, bbox_pred, + coeff_pred) + (img_metas, self.test_cfg, rescale) + det_bboxes, det_labels, det_coeffs = self.bbox_head.get_bboxes( + *bbox_inputs) + bbox_results = [ + bbox2result(det_bbox, det_label, self.bbox_head.num_classes) + for det_bbox, det_label in zip(det_bboxes, det_labels) + ] + + num_imgs = len(img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.mask_head.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_preds = self.mask_head(x[0], det_coeffs, _bboxes, img_metas) + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], det_labels[i], img_metas[i], rescale) + segm_results.append(segm_result) + return list(zip(bbox_results, segm_results)) + + def aug_test(self, imgs, img_metas, rescale=False): + """Test with augmentations.""" + raise NotImplementedError diff --git a/thirdparty/mmdetection/mmdet/models/detectors/yolo.py b/thirdparty/mmdetection/mmdet/models/detectors/yolo.py new file mode 100644 index 0000000000000000000000000000000000000000..240aab20f857befe25e64114300ebb15a66c6a70 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/detectors/yolo.py @@ -0,0 +1,18 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class YOLOV3(SingleStageDetector): + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(YOLOV3, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/thirdparty/mmdetection/mmdet/models/losses/__init__.py b/thirdparty/mmdetection/mmdet/models/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bb887d3735df692aa0c7b3496c18add6b9c52391 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/__init__.py @@ -0,0 +1,28 @@ +from .accuracy import Accuracy, accuracy +from .ae_loss import AssociativeEmbeddingLoss +from .balanced_l1_loss import BalancedL1Loss, balanced_l1_loss +from .cross_entropy_loss import (CrossEntropyLoss, binary_cross_entropy, + cross_entropy, mask_cross_entropy) +from .focal_loss import FocalLoss, sigmoid_focal_loss +from .gaussian_focal_loss import GaussianFocalLoss +from .gfocal_loss import DistributionFocalLoss, QualityFocalLoss +from .ghm_loss import GHMC, GHMR +from .iou_loss import (BoundedIoULoss, CIoULoss, DIoULoss, GIoULoss, IoULoss, + bounded_iou_loss, iou_loss) +from .mse_loss import MSELoss, mse_loss +from .pisa_loss import carl_loss, isr_p +from .smooth_l1_loss import L1Loss, SmoothL1Loss, l1_loss, smooth_l1_loss +from .utils import reduce_loss, weight_reduce_loss, weighted_loss +from .varifocal_loss import VarifocalLoss + +__all__ = [ + 'accuracy', 'Accuracy', 'cross_entropy', 'binary_cross_entropy', + 'mask_cross_entropy', 'CrossEntropyLoss', 'sigmoid_focal_loss', + 'FocalLoss', 'smooth_l1_loss', 'SmoothL1Loss', 'balanced_l1_loss', + 'BalancedL1Loss', 'mse_loss', 'MSELoss', 'iou_loss', 'bounded_iou_loss', + 'IoULoss', 'BoundedIoULoss', 'GIoULoss', 'DIoULoss', 'CIoULoss', 'GHMC', + 'GHMR', 'reduce_loss', 'weight_reduce_loss', 'weighted_loss', 'L1Loss', + 'l1_loss', 'isr_p', 'carl_loss', 'AssociativeEmbeddingLoss', + 'GaussianFocalLoss', 'QualityFocalLoss', 'DistributionFocalLoss', + 'VarifocalLoss' +] diff --git a/thirdparty/mmdetection/mmdet/models/losses/accuracy.py b/thirdparty/mmdetection/mmdet/models/losses/accuracy.py new file mode 100644 index 0000000000000000000000000000000000000000..924ebbed7739b47515e27c03cd814a7c5db200d1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/accuracy.py @@ -0,0 +1,76 @@ +import torch.nn as nn + + +def accuracy(pred, target, topk=1, thresh=None): + """Calculate accuracy according to the prediction and target. + + Args: + pred (torch.Tensor): The model prediction, shape (N, num_class) + target (torch.Tensor): The target of each prediction, shape (N, ) + topk (int | tuple[int], optional): If the predictions in ``topk`` + matches the target, the predictions will be regarded as + correct ones. Defaults to 1. + thresh (float, optional): If not None, predictions with scores under + this threshold are considered incorrect. Default to None. + + Returns: + float | tuple[float]: If the input ``topk`` is a single integer, + the function will return a single float as accuracy. If + ``topk`` is a tuple containing multiple integers, the + function will return a tuple containing accuracies of + each ``topk`` number. + """ + assert isinstance(topk, (int, tuple)) + if isinstance(topk, int): + topk = (topk, ) + return_single = True + else: + return_single = False + + maxk = max(topk) + if pred.size(0) == 0: + accu = [pred.new_tensor(0.) for i in range(len(topk))] + return accu[0] if return_single else accu + assert pred.ndim == 2 and target.ndim == 1 + assert pred.size(0) == target.size(0) + assert maxk <= pred.size(1), \ + f'maxk {maxk} exceeds pred dimension {pred.size(1)}' + pred_value, pred_label = pred.topk(maxk, dim=1) + pred_label = pred_label.t() # transpose to shape (maxk, N) + correct = pred_label.eq(target.view(1, -1).expand_as(pred_label)) + if thresh is not None: + # Only prediction values larger than thresh are counted as correct + correct = correct & (pred_value > thresh).t() + res = [] + for k in topk: + correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) + res.append(correct_k.mul_(100.0 / pred.size(0))) + return res[0] if return_single else res + + +class Accuracy(nn.Module): + + def __init__(self, topk=(1, ), thresh=None): + """Module to calculate the accuracy. + + Args: + topk (tuple, optional): The criterion used to calculate the + accuracy. Defaults to (1,). + thresh (float, optional): If not None, predictions with scores + under this threshold are considered incorrect. Default to None. + """ + super().__init__() + self.topk = topk + self.thresh = thresh + + def forward(self, pred, target): + """Forward function to calculate accuracy. + + Args: + pred (torch.Tensor): Prediction of models. + target (torch.Tensor): Target for each prediction. + + Returns: + tuple[float]: The accuracies under different topk criterions. + """ + return accuracy(pred, target, self.topk, self.thresh) diff --git a/thirdparty/mmdetection/mmdet/models/losses/ae_loss.py b/thirdparty/mmdetection/mmdet/models/losses/ae_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..6077652cff7e3dcd13f81ce8d2a85b52ac8b99f8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/ae_loss.py @@ -0,0 +1,100 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES + + +def ae_loss_per_image(tl_preds, br_preds, match): + """Associative Embedding Loss in one image. + + Associative Embedding Loss including two parts: pull loss and push loss. + Pull loss makes embedding vectors from same object closer to each other. + Push loss distinguish embedding vector from different objects, and makes + the gap between them is large enough. + + During computing, usually there are 3 cases: + - no object in image: both pull loss and push loss will be 0. + - one object in image: push loss will be 0 and pull loss is computed + by the two corner of the only object. + - more than one objects in image: pull loss is computed by corner pairs + from each object, push loss is computed by each object with all + other objects. We use confusion matrix with 0 in diagonal to + compute the push loss. + + Args: + tl_preds (tensor): Embedding feature map of left-top corner. + br_preds (tensor): Embedding feature map of bottim-right corner. + match (list): Downsampled coordinates pair of each ground truth box. + """ + + tl_list, br_list, me_list = [], [], [] + if len(match) == 0: # no object in image + pull_loss = tl_preds.sum() * 0. + push_loss = tl_preds.sum() * 0. + else: + for m in match: + [tl_y, tl_x], [br_y, br_x] = m + tl_e = tl_preds[:, tl_y, tl_x].view(-1, 1) + br_e = br_preds[:, br_y, br_x].view(-1, 1) + tl_list.append(tl_e) + br_list.append(br_e) + me_list.append((tl_e + br_e) / 2.0) + + tl_list = torch.cat(tl_list) + br_list = torch.cat(br_list) + me_list = torch.cat(me_list) + + assert tl_list.size() == br_list.size() + + # N is object number in image, M is dimension of embedding vector + N, M = tl_list.size() + + pull_loss = (tl_list - me_list).pow(2) + (br_list - me_list).pow(2) + pull_loss = pull_loss.sum() / N + + margin = 1 # exp setting of CornerNet, details in section 3.3 of paper + + # confusion matrix of push loss + conf_mat = me_list.expand((N, N, M)).permute(1, 0, 2) - me_list + conf_weight = 1 - torch.eye(N).type_as(me_list) + conf_mat = conf_weight * (margin - conf_mat.sum(-1).abs()) + + if N > 1: # more than one object in current image + push_loss = F.relu(conf_mat).sum() / (N * (N - 1)) + else: + push_loss = tl_preds.sum() * 0. + + return pull_loss, push_loss + + +@LOSSES.register_module() +class AssociativeEmbeddingLoss(nn.Module): + """Associative Embedding Loss. + + More details can be found in + `Associative Embedding `_ and + `CornerNet `_ . + Code is modified from `kp_utils.py `_ # noqa: E501 + + Args: + pull_weight (float): Loss weight for corners from same object. + push_weight (float): Loss weight for corners from different object. + """ + + def __init__(self, pull_weight=0.25, push_weight=0.25): + super(AssociativeEmbeddingLoss, self).__init__() + self.pull_weight = pull_weight + self.push_weight = push_weight + + def forward(self, pred, target, match): + """Forward function.""" + batch = pred.size(0) + pull_all, push_all = 0.0, 0.0 + for i in range(batch): + pull, push = ae_loss_per_image(pred[i], target[i], match[i]) + + pull_all += self.pull_weight * pull + push_all += self.push_weight * push + + return pull_all, push_all diff --git a/thirdparty/mmdetection/mmdet/models/losses/balanced_l1_loss.py b/thirdparty/mmdetection/mmdet/models/losses/balanced_l1_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..3790a80b8a72e8405c068ba4097ae0046b68e7f5 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/balanced_l1_loss.py @@ -0,0 +1,118 @@ +import numpy as np +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def balanced_l1_loss(pred, + target, + beta=1.0, + alpha=0.5, + gamma=1.5, + reduction='mean'): + """Calculate balanced L1 loss. + + Please see the `Libra R-CNN `_ + + Args: + pred (torch.Tensor): The prediction with shape (N, 4). + target (torch.Tensor): The learning target of the prediction with + shape (N, 4). + beta (float): The loss is a piecewise function of prediction and target + and ``beta`` serves as a threshold for the difference between the + prediction and target. Defaults to 1.0. + alpha (float): The denominator ``alpha`` in the balanced L1 loss. + Defaults to 0.5. + gamma (float): The ``gamma`` in the balanced L1 loss. + Defaults to 1.5. + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert beta > 0 + assert pred.size() == target.size() and target.numel() > 0 + + diff = torch.abs(pred - target) + b = np.e**(gamma / alpha) - 1 + loss = torch.where( + diff < beta, alpha / b * + (b * diff + 1) * torch.log(b * diff / beta + 1) - alpha * diff, + gamma * diff + gamma / b - alpha * beta) + + return loss + + +@LOSSES.register_module() +class BalancedL1Loss(nn.Module): + """Balanced L1 Loss. + + arXiv: https://arxiv.org/pdf/1904.02701.pdf (CVPR 2019) + + Args: + alpha (float): The denominator ``alpha`` in the balanced L1 loss. + Defaults to 0.5. + gamma (float): The ``gamma`` in the balanced L1 loss. Defaults to 1.5. + beta (float, optional): The loss is a piecewise function of prediction + and target. ``beta`` serves as a threshold for the difference + between the prediction and target. Defaults to 1.0. + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of the loss. Defaults to 1.0 + """ + + def __init__(self, + alpha=0.5, + gamma=1.5, + beta=1.0, + reduction='mean', + loss_weight=1.0): + super(BalancedL1Loss, self).__init__() + self.alpha = alpha + self.gamma = gamma + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function of loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, 4). + target (torch.Tensor): The learning target of the prediction with + shape (N, 4). + weight (torch.Tensor, optional): Sample-wise loss weight with + shape (N, ). + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * balanced_l1_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_bbox diff --git a/thirdparty/mmdetection/mmdet/models/losses/cross_entropy_loss.py b/thirdparty/mmdetection/mmdet/models/losses/cross_entropy_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..d1134576d39e66dc316ddcbce062d8eebe47df06 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/cross_entropy_loss.py @@ -0,0 +1,200 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +def cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str, optional): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + """ + # element-wise losses + loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none') + + # apply weights and do the reduction + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def _expand_onehot_labels(labels, label_weights, label_channels): + bin_labels = labels.new_full((labels.size(0), label_channels), 0) + inds = torch.nonzero( + (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze() + if inds.numel() > 0: + bin_labels[inds, labels[inds]] = 1 + + if label_weights is None: + bin_label_weights = None + else: + bin_label_weights = label_weights.view(-1, 1).expand( + label_weights.size(0), label_channels) + + return bin_labels, bin_label_weights + + +def binary_cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the binary CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, 1). + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + """ + if pred.dim() != label.dim(): + label, weight = _expand_onehot_labels(label, weight, pred.size(-1)) + + # weighted element-wise losses + if weight is not None: + weight = weight.float() + loss = F.binary_cross_entropy_with_logits( + pred, label.float(), pos_weight=class_weight, reduction='none') + # do the reduction for the weighted loss + loss = weight_reduce_loss( + loss, weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def mask_cross_entropy(pred, + target, + label, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the CrossEntropy loss for masks. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + target (torch.Tensor): The learning label of the prediction. + label (torch.Tensor): ``label`` indicates the class label of the mask' + corresponding object. This will be used to select the mask in the + of the class which the object belongs to when the mask prediction + if not class-agnostic. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + """ + # TODO: handle these two reserved arguments + assert reduction == 'mean' and avg_factor is None + num_rois = pred.size()[0] + inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) + pred_slice = pred[inds, label].squeeze(1) + return F.binary_cross_entropy_with_logits( + pred_slice, target, weight=class_weight, reduction='mean')[None] + + +@LOSSES.register_module() +class CrossEntropyLoss(nn.Module): + + def __init__(self, + use_sigmoid=False, + use_mask=False, + reduction='mean', + class_weight=None, + loss_weight=1.0): + """CrossEntropyLoss. + + Args: + use_sigmoid (bool, optional): Whether the prediction uses sigmoid + of softmax. Defaults to False. + use_mask (bool, optional): Whether to use mask cross entropy loss. + Defaults to False. + reduction (str, optional): . Defaults to 'mean'. + Options are "none", "mean" and "sum". + class_weight (list[float], optional): Weight of each class. + Defaults to None. + loss_weight (float, optional): Weight of the loss. Defaults to 1.0. + """ + super(CrossEntropyLoss, self).__init__() + assert (use_sigmoid is False) or (use_mask is False) + self.use_sigmoid = use_sigmoid + self.use_mask = use_mask + self.reduction = reduction + self.loss_weight = loss_weight + self.class_weight = class_weight + + if self.use_sigmoid: + self.cls_criterion = binary_cross_entropy + elif self.use_mask: + self.cls_criterion = mask_cross_entropy + else: + self.cls_criterion = cross_entropy + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + cls_score (torch.Tensor): The prediction. + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.class_weight is not None: + class_weight = cls_score.new_tensor(self.class_weight) + else: + class_weight = None + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, + label, + weight, + class_weight=class_weight, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_cls diff --git a/thirdparty/mmdetection/mmdet/models/losses/focal_loss.py b/thirdparty/mmdetection/mmdet/models/losses/focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..be252e5792a6698e9faea9f4661881fb5a66ca88 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/focal_loss.py @@ -0,0 +1,157 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.ops import sigmoid_focal_loss as _sigmoid_focal_loss + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +# This method is only for debugging +def py_sigmoid_focal_loss(pred, + target, + weight=None, + gamma=2.0, + alpha=0.25, + reduction='mean', + avg_factor=None): + """PyTorch version of `Focal Loss `_. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the + number of classes + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + pred_sigmoid = pred.sigmoid() + target = target.type_as(pred) + pt = (1 - pred_sigmoid) * target + pred_sigmoid * (1 - target) + focal_weight = (alpha * target + (1 - alpha) * + (1 - target)) * pt.pow(gamma) + loss = F.binary_cross_entropy_with_logits( + pred, target, reduction='none') * focal_weight + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +def sigmoid_focal_loss(pred, + target, + weight=None, + gamma=2.0, + alpha=0.25, + reduction='mean', + avg_factor=None): + r"""A warpper of cuda version `Focal Loss + `_. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + # Function.apply does not accept keyword arguments, so the decorator + # "weighted_loss" is not applicable + loss = _sigmoid_focal_loss(pred.contiguous(), target, gamma, alpha, None, + 'none') + if weight is not None: + if weight.shape != loss.shape: + if weight.size(0) == loss.size(0): + # For most cases, weight is of shape (num_priors, ), + # which means it does not have the second axis num_class + weight = weight.view(-1, 1) + else: + # Sometimes, weight per anchor per class is also needed. e.g. + # in FSAF. But it may be flattened of shape + # (num_priors x num_class, ), while loss is still of shape + # (num_priors, num_class). + assert weight.numel() == loss.numel() + weight = weight.view(loss.size(0), -1) + assert weight.ndim == loss.ndim + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@LOSSES.register_module() +class FocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=1.0): + """`Focal Loss `_ + + Args: + use_sigmoid (bool, optional): Whether to the prediction is + used for sigmoid or softmax. Defaults to True. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + loss_weight (float, optional): Weight of loss. Defaults to 1.0. + """ + super(FocalLoss, self).__init__() + assert use_sigmoid is True, 'Only sigmoid focal loss supported now.' + self.use_sigmoid = use_sigmoid + self.gamma = gamma + self.alpha = alpha + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + loss_cls = self.loss_weight * sigmoid_focal_loss( + pred, + target, + weight, + gamma=self.gamma, + alpha=self.alpha, + reduction=reduction, + avg_factor=avg_factor) + else: + raise NotImplementedError + return loss_cls diff --git a/thirdparty/mmdetection/mmdet/models/losses/gaussian_focal_loss.py b/thirdparty/mmdetection/mmdet/models/losses/gaussian_focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..54307a1e0f542e3e49876d41f0774ceaff6b5073 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/gaussian_focal_loss.py @@ -0,0 +1,89 @@ +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def gaussian_focal_loss(pred, gaussian_target, alpha=2.0, gamma=4.0): + """`Focal Loss `_ for targets in gaussian + distribution. + + Args: + pred (torch.Tensor): The prediction. + gaussian_target (torch.Tensor): The learning target of the prediction + in gaussian distribution. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 2.0. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 4.0. + """ + eps = 1e-12 + pos_weights = gaussian_target.eq(1) + neg_weights = (1 - gaussian_target).pow(gamma) + pos_loss = -(pred + eps).log() * (1 - pred).pow(alpha) * pos_weights + neg_loss = -(1 - pred + eps).log() * pred.pow(alpha) * neg_weights + return pos_loss + neg_loss + + +@LOSSES.register_module() +class GaussianFocalLoss(nn.Module): + """GaussianFocalLoss is a variant of focal loss. + + More details can be found in the `paper + `_ + Code is modified from `kp_utils.py + `_ # noqa: E501 + Please notice that the target in GaussianFocalLoss is a gaussian heatmap, + not 0/1 binary target. + + Args: + alpha (float): Power of prediction. + gamma (float): Power of target for negtive samples. + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, + alpha=2.0, + gamma=4.0, + reduction='mean', + loss_weight=1.0): + super(GaussianFocalLoss, self).__init__() + self.alpha = alpha + self.gamma = gamma + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction + in gaussian distribution. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_reg = self.loss_weight * gaussian_focal_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + reduction=reduction, + avg_factor=avg_factor) + return loss_reg diff --git a/thirdparty/mmdetection/mmdet/models/losses/gfocal_loss.py b/thirdparty/mmdetection/mmdet/models/losses/gfocal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..73102da820866d39ffa676a8e854fadf8da6f594 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/gfocal_loss.py @@ -0,0 +1,185 @@ +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def quality_focal_loss(pred, target, beta=2.0): + r"""Quality Focal Loss (QFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted joint representation of classification + and quality (IoU) estimation with shape (N, C), C is the number of + classes. + target (tuple([torch.Tensor])): Target category label with shape (N,) + and target quality label with shape (N,). + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + assert len(target) == 2, """target for QFL must be a tuple of two elements, + including category label and quality label, respectively""" + # label denotes the category id, score denotes the quality score + label, score = target + + # negatives are supervised by 0 quality score + pred_sigmoid = pred.sigmoid() + scale_factor = pred_sigmoid + zerolabel = scale_factor.new_zeros(pred.shape) + loss = F.binary_cross_entropy_with_logits( + pred, zerolabel, reduction='none') * scale_factor.pow(beta) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = pred.size(1) + pos = ((label >= 0) & (label < bg_class_ind)).nonzero().squeeze(1) + pos_label = label[pos].long() + # positives are supervised by bbox quality (IoU) score + scale_factor = score[pos] - pred_sigmoid[pos, pos_label] + loss[pos, pos_label] = F.binary_cross_entropy_with_logits( + pred[pos, pos_label], score[pos], + reduction='none') * scale_factor.abs().pow(beta) + + loss = loss.sum(dim=1, keepdim=False) + return loss + + +@weighted_loss +def distribution_focal_loss(pred, label): + r"""Distribution Focal Loss (DFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding boxes + (before softmax) with shape (N, n+1), n is the max value of the + integral set `{0, ..., n}` in paper. + label (torch.Tensor): Target distance label for bounding boxes with + shape (N,). + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + dis_left = label.long() + dis_right = dis_left + 1 + weight_left = dis_right.float() - label + weight_right = label - dis_left.float() + loss = F.cross_entropy(pred, dis_left, reduction='none') * weight_left \ + + F.cross_entropy(pred, dis_right, reduction='none') * weight_right + return loss + + +@LOSSES.register_module() +class QualityFocalLoss(nn.Module): + r"""Quality Focal Loss (QFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + use_sigmoid (bool): Whether sigmoid operation is conducted in QFL. + Defaults to True. + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, + use_sigmoid=True, + beta=2.0, + reduction='mean', + loss_weight=1.0): + super(QualityFocalLoss, self).__init__() + assert use_sigmoid is True, 'Only sigmoid in QFL supported now.' + self.use_sigmoid = use_sigmoid + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): Predicted joint representation of + classification and quality (IoU) estimation with shape (N, C), + C is the number of classes. + target (tuple([torch.Tensor])): Target category label with shape + (N,) and target quality label with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + loss_cls = self.loss_weight * quality_focal_loss( + pred, + target, + weight, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor) + else: + raise NotImplementedError + return loss_cls + + +@LOSSES.register_module() +class DistributionFocalLoss(nn.Module): + r"""Distribution Focal Loss (DFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + reduction (str): Options are `'none'`, `'mean'` and `'sum'`. + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super(DistributionFocalLoss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding + boxes (before softmax) with shape (N, n+1), n is the max value + of the integral set `{0, ..., n}` in paper. + target (torch.Tensor): Target distance label for bounding boxes + with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_cls = self.loss_weight * distribution_focal_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_cls diff --git a/thirdparty/mmdetection/mmdet/models/losses/ghm_loss.py b/thirdparty/mmdetection/mmdet/models/losses/ghm_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..8969a23fd98bb746415f96ac5e4ad9e37ba3af52 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/ghm_loss.py @@ -0,0 +1,172 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES + + +def _expand_onehot_labels(labels, label_weights, label_channels): + bin_labels = labels.new_full((labels.size(0), label_channels), 0) + inds = torch.nonzero( + (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze() + if inds.numel() > 0: + bin_labels[inds, labels[inds]] = 1 + bin_label_weights = label_weights.view(-1, 1).expand( + label_weights.size(0), label_channels) + return bin_labels, bin_label_weights + + +# TODO: code refactoring to make it consistent with other losses +@LOSSES.register_module() +class GHMC(nn.Module): + """GHM Classification Loss. + + Details of the theorem can be viewed in the paper + `Gradient Harmonized Single-stage Detector + `_. + + Args: + bins (int): Number of the unit regions for distribution calculation. + momentum (float): The parameter for moving average. + use_sigmoid (bool): Can only be true for BCE based loss now. + loss_weight (float): The weight of the total GHM-C loss. + """ + + def __init__(self, bins=10, momentum=0, use_sigmoid=True, loss_weight=1.0): + super(GHMC, self).__init__() + self.bins = bins + self.momentum = momentum + edges = torch.arange(bins + 1).float() / bins + self.register_buffer('edges', edges) + self.edges[-1] += 1e-6 + if momentum > 0: + acc_sum = torch.zeros(bins) + self.register_buffer('acc_sum', acc_sum) + self.use_sigmoid = use_sigmoid + if not self.use_sigmoid: + raise NotImplementedError + self.loss_weight = loss_weight + + def forward(self, pred, target, label_weight, *args, **kwargs): + """Calculate the GHM-C loss. + + Args: + pred (float tensor of size [batch_num, class_num]): + The direct prediction of classification fc layer. + target (float tensor of size [batch_num, class_num]): + Binary class target for each sample. + label_weight (float tensor of size [batch_num, class_num]): + the value is 1 if the sample is valid and 0 if ignored. + Returns: + The gradient harmonized loss. + """ + # the target should be binary class label + if pred.dim() != target.dim(): + target, label_weight = _expand_onehot_labels( + target, label_weight, pred.size(-1)) + target, label_weight = target.float(), label_weight.float() + edges = self.edges + mmt = self.momentum + weights = torch.zeros_like(pred) + + # gradient length + g = torch.abs(pred.sigmoid().detach() - target) + + valid = label_weight > 0 + tot = max(valid.float().sum().item(), 1.0) + n = 0 # n valid bins + for i in range(self.bins): + inds = (g >= edges[i]) & (g < edges[i + 1]) & valid + num_in_bin = inds.sum().item() + if num_in_bin > 0: + if mmt > 0: + self.acc_sum[i] = mmt * self.acc_sum[i] \ + + (1 - mmt) * num_in_bin + weights[inds] = tot / self.acc_sum[i] + else: + weights[inds] = tot / num_in_bin + n += 1 + if n > 0: + weights = weights / n + + loss = F.binary_cross_entropy_with_logits( + pred, target, weights, reduction='sum') / tot + return loss * self.loss_weight + + +# TODO: code refactoring to make it consistent with other losses +@LOSSES.register_module() +class GHMR(nn.Module): + """GHM Regression Loss. + + Details of the theorem can be viewed in the paper + `Gradient Harmonized Single-stage Detector + `_. + + Args: + mu (float): The parameter for the Authentic Smooth L1 loss. + bins (int): Number of the unit regions for distribution calculation. + momentum (float): The parameter for moving average. + loss_weight (float): The weight of the total GHM-R loss. + """ + + def __init__(self, mu=0.02, bins=10, momentum=0, loss_weight=1.0): + super(GHMR, self).__init__() + self.mu = mu + self.bins = bins + edges = torch.arange(bins + 1).float() / bins + self.register_buffer('edges', edges) + self.edges[-1] = 1e3 + self.momentum = momentum + if momentum > 0: + acc_sum = torch.zeros(bins) + self.register_buffer('acc_sum', acc_sum) + self.loss_weight = loss_weight + + # TODO: support reduction parameter + def forward(self, pred, target, label_weight, avg_factor=None): + """Calculate the GHM-R loss. + + Args: + pred (float tensor of size [batch_num, 4 (* class_num)]): + The prediction of box regression layer. Channel number can be 4 + or 4 * class_num depending on whether it is class-agnostic. + target (float tensor of size [batch_num, 4 (* class_num)]): + The target regression values with the same size of pred. + label_weight (float tensor of size [batch_num, 4 (* class_num)]): + The weight of each sample, 0 if ignored. + Returns: + The gradient harmonized loss. + """ + mu = self.mu + edges = self.edges + mmt = self.momentum + + # ASL1 loss + diff = pred - target + loss = torch.sqrt(diff * diff + mu * mu) - mu + + # gradient length + g = torch.abs(diff / torch.sqrt(mu * mu + diff * diff)).detach() + weights = torch.zeros_like(g) + + valid = label_weight > 0 + tot = max(label_weight.float().sum().item(), 1.0) + n = 0 # n: valid bins + for i in range(self.bins): + inds = (g >= edges[i]) & (g < edges[i + 1]) & valid + num_in_bin = inds.sum().item() + if num_in_bin > 0: + n += 1 + if mmt > 0: + self.acc_sum[i] = mmt * self.acc_sum[i] \ + + (1 - mmt) * num_in_bin + weights[inds] = tot / self.acc_sum[i] + else: + weights[inds] = tot / num_in_bin + if n > 0: + weights /= n + + loss = loss * weights + loss = loss.sum() / tot + return loss * self.loss_weight diff --git a/thirdparty/mmdetection/mmdet/models/losses/iou_loss.py b/thirdparty/mmdetection/mmdet/models/losses/iou_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..dfddfe20844012d0c241044edbd6dcefb794fb1f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/iou_loss.py @@ -0,0 +1,417 @@ +import math + +import torch +import torch.nn as nn + +from mmdet.core import bbox_overlaps +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def iou_loss(pred, target, eps=1e-6): + """IoU loss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + The loss is calculated as negative log of IoU. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + + Return: + torch.Tensor: Loss tensor. + """ + ious = bbox_overlaps(pred, target, is_aligned=True).clamp(min=eps) + loss = -ious.log() + return loss + + +@weighted_loss +def bounded_iou_loss(pred, target, beta=0.2, eps=1e-3): + """BIoULoss. + + This is an implementation of paper + `Improving Object Localization with Fitness NMS and Bounded IoU Loss. + `_. + + Args: + pred (torch.Tensor): Predicted bboxes. + target (torch.Tensor): Target bboxes. + beta (float): beta parameter in smoothl1. + eps (float): eps to avoid NaN. + """ + pred_ctrx = (pred[:, 0] + pred[:, 2]) * 0.5 + pred_ctry = (pred[:, 1] + pred[:, 3]) * 0.5 + pred_w = pred[:, 2] - pred[:, 0] + pred_h = pred[:, 3] - pred[:, 1] + with torch.no_grad(): + target_ctrx = (target[:, 0] + target[:, 2]) * 0.5 + target_ctry = (target[:, 1] + target[:, 3]) * 0.5 + target_w = target[:, 2] - target[:, 0] + target_h = target[:, 3] - target[:, 1] + + dx = target_ctrx - pred_ctrx + dy = target_ctry - pred_ctry + + loss_dx = 1 - torch.max( + (target_w - 2 * dx.abs()) / + (target_w + 2 * dx.abs() + eps), torch.zeros_like(dx)) + loss_dy = 1 - torch.max( + (target_h - 2 * dy.abs()) / + (target_h + 2 * dy.abs() + eps), torch.zeros_like(dy)) + loss_dw = 1 - torch.min(target_w / (pred_w + eps), pred_w / + (target_w + eps)) + loss_dh = 1 - torch.min(target_h / (pred_h + eps), pred_h / + (target_h + eps)) + loss_comb = torch.stack([loss_dx, loss_dy, loss_dw, loss_dh], + dim=-1).view(loss_dx.size(0), -1) + + loss = torch.where(loss_comb < beta, 0.5 * loss_comb * loss_comb / beta, + loss_comb - 0.5 * beta) + return loss + + +@weighted_loss +def giou_loss(pred, target, eps=1e-7): + r"""`Generalized Intersection over Union: A Metric and A Loss for Bounding + Box Regression `_. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + + Return: + Tensor: Loss tensor. + """ + gious = bbox_overlaps(pred, target, mode='giou', is_aligned=True, eps=eps) + loss = 1 - gious + return loss + + +@weighted_loss +def diou_loss(pred, target, eps=1e-7): + r"""`Implementation of Distance-IoU Loss: Faster and Better + Learning for Bounding Box Regression, https://arxiv.org/abs/1911.08287`_. + + Code is modified from https://github.com/Zzh-tju/DIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw**2 + ch**2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2))**2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2))**2 / 4 + rho2 = left + right + + # DIoU + dious = ious - rho2 / c2 + loss = 1 - dious + return loss + + +@weighted_loss +def ciou_loss(pred, target, eps=1e-7): + r"""`Implementation of paper `Enhancing Geometric Factors into + Model Learning and Inference for Object Detection and Instance + Segmentation `_. + + Code is modified from https://github.com/Zzh-tju/CIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw**2 + ch**2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2))**2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2))**2 / 4 + rho2 = left + right + + factor = 4 / math.pi**2 + v = factor * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + + # CIoU + cious = ious - (rho2 / c2 + v**2 / (1 - ious + v)) + loss = 1 - cious + return loss + + +@LOSSES.register_module() +class IoULoss(nn.Module): + """IoULoss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + + Args: + eps (float): Eps to avoid log(0). + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Weight of loss. + """ + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(IoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. Options are "none", "mean" and "sum". + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if (weight is not None) and (not torch.any(weight > 0)) and ( + reduction != 'none'): + return (pred * weight).sum() # 0 + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # iou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * iou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class BoundedIoULoss(nn.Module): + + def __init__(self, beta=0.2, eps=1e-3, reduction='mean', loss_weight=1.0): + super(BoundedIoULoss, self).__init__() + self.beta = beta + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss = self.loss_weight * bounded_iou_loss( + pred, + target, + weight, + beta=self.beta, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class GIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(GIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * giou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class DIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(DIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * diou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class CIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(CIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * ciou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss diff --git a/thirdparty/mmdetection/mmdet/models/losses/mse_loss.py b/thirdparty/mmdetection/mmdet/models/losses/mse_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..68d05752a245548862f4c9919448d4fb8dc1b8ca --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/mse_loss.py @@ -0,0 +1,49 @@ +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def mse_loss(pred, target): + """Warpper of mse loss.""" + return F.mse_loss(pred, target, reduction='none') + + +@LOSSES.register_module() +class MSELoss(nn.Module): + """MSELoss. + + Args: + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of the loss. Defaults to 1.0 + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super().__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, pred, target, weight=None, avg_factor=None): + """Forward function of loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): Weight of the loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + + Returns: + torch.Tensor: The calculated loss + """ + loss = self.loss_weight * mse_loss( + pred, + target, + weight, + reduction=self.reduction, + avg_factor=avg_factor) + return loss diff --git a/thirdparty/mmdetection/mmdet/models/losses/pisa_loss.py b/thirdparty/mmdetection/mmdet/models/losses/pisa_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..fb907a2ea9a0470063d5bb1cfdf5c7c3a054995f --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/pisa_loss.py @@ -0,0 +1,180 @@ +import torch + +from mmdet.core import bbox_overlaps + + +def isr_p(cls_score, + bbox_pred, + bbox_targets, + rois, + sampling_results, + loss_cls, + bbox_coder, + k=2, + bias=0, + num_class=80): + """Importance-based Sample Reweighting (ISR_P), positive part. + + Args: + cls_score (Tensor): Predicted classification scores. + bbox_pred (Tensor): Predicted bbox deltas. + bbox_targets (tuple[Tensor]): A tuple of bbox targets, the are + labels, label_weights, bbox_targets, bbox_weights, respectively. + rois (Tensor): Anchors (single_stage) in shape (n, 4) or RoIs + (two_stage) in shape (n, 5). + sampling_results (obj): Sampling results. + loss_cls (func): Classification loss func of the head. + bbox_coder (obj): BBox coder of the head. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + num_class (int): Number of classes, default: 80. + + Return: + tuple([Tensor]): labels, imp_based_label_weights, bbox_targets, + bbox_target_weights + """ + + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + pos_label_inds = ((labels >= 0) & + (labels < num_class)).nonzero().reshape(-1) + pos_labels = labels[pos_label_inds] + + # if no positive samples, return the original targets + num_pos = float(pos_label_inds.size(0)) + if num_pos == 0: + return labels, label_weights, bbox_targets, bbox_weights + + # merge pos_assigned_gt_inds of per image to a single tensor + gts = list() + last_max_gt = 0 + for i in range(len(sampling_results)): + gt_i = sampling_results[i].pos_assigned_gt_inds + gts.append(gt_i + last_max_gt) + if len(gt_i) != 0: + last_max_gt = gt_i.max() + 1 + gts = torch.cat(gts) + assert len(gts) == num_pos + + cls_score = cls_score.detach() + bbox_pred = bbox_pred.detach() + + # For single stage detectors, rois here indicate anchors, in shape (N, 4) + # For two stage detectors, rois are in shape (N, 5) + if rois.size(-1) == 5: + pos_rois = rois[pos_label_inds][:, 1:] + else: + pos_rois = rois[pos_label_inds] + + if bbox_pred.size(-1) > 4: + bbox_pred = bbox_pred.view(bbox_pred.size(0), -1, 4) + pos_delta_pred = bbox_pred[pos_label_inds, pos_labels].view(-1, 4) + else: + pos_delta_pred = bbox_pred[pos_label_inds].view(-1, 4) + + # compute iou of the predicted bbox and the corresponding GT + pos_delta_target = bbox_targets[pos_label_inds].view(-1, 4) + pos_bbox_pred = bbox_coder.decode(pos_rois, pos_delta_pred) + target_bbox_pred = bbox_coder.decode(pos_rois, pos_delta_target) + ious = bbox_overlaps(pos_bbox_pred, target_bbox_pred, is_aligned=True) + + pos_imp_weights = label_weights[pos_label_inds] + # Two steps to compute IoU-HLR. Samples are first sorted by IoU locally, + # then sorted again within the same-rank group + max_l_num = pos_labels.bincount().max() + for label in pos_labels.unique(): + l_inds = (pos_labels == label).nonzero().view(-1) + l_gts = gts[l_inds] + for t in l_gts.unique(): + t_inds = l_inds[l_gts == t] + t_ious = ious[t_inds] + _, t_iou_rank_idx = t_ious.sort(descending=True) + _, t_iou_rank = t_iou_rank_idx.sort() + ious[t_inds] += max_l_num - t_iou_rank.float() + l_ious = ious[l_inds] + _, l_iou_rank_idx = l_ious.sort(descending=True) + _, l_iou_rank = l_iou_rank_idx.sort() # IoU-HLR + # linearly map HLR to label weights + pos_imp_weights[l_inds] *= (max_l_num - l_iou_rank.float()) / max_l_num + + pos_imp_weights = (bias + pos_imp_weights * (1 - bias)).pow(k) + + # normalize to make the new weighted loss value equal to the original loss + pos_loss_cls = loss_cls( + cls_score[pos_label_inds], pos_labels, reduction_override='none') + if pos_loss_cls.dim() > 1: + ori_pos_loss_cls = pos_loss_cls * label_weights[pos_label_inds][:, + None] + new_pos_loss_cls = pos_loss_cls * pos_imp_weights[:, None] + else: + ori_pos_loss_cls = pos_loss_cls * label_weights[pos_label_inds] + new_pos_loss_cls = pos_loss_cls * pos_imp_weights + pos_loss_cls_ratio = ori_pos_loss_cls.sum() / new_pos_loss_cls.sum() + pos_imp_weights = pos_imp_weights * pos_loss_cls_ratio + label_weights[pos_label_inds] = pos_imp_weights + + bbox_targets = labels, label_weights, bbox_targets, bbox_weights + return bbox_targets + + +def carl_loss(cls_score, + labels, + bbox_pred, + bbox_targets, + loss_bbox, + k=1, + bias=0.2, + avg_factor=None, + sigmoid=False, + num_class=80): + """Classification-Aware Regression Loss (CARL). + + Args: + cls_score (Tensor): Predicted classification scores. + labels (Tensor): Targets of classification. + bbox_pred (Tensor): Predicted bbox deltas. + bbox_targets (Tensor): Target of bbox regression. + loss_bbox (func): Regression loss func of the head. + bbox_coder (obj): BBox coder of the head. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + avg_factor (int): Average factor used in regression loss. + sigmoid (bool): Activation of the classification score. + num_class (int): Number of classes, default: 80. + + Return: + dict: CARL loss dict. + """ + pos_label_inds = ((labels >= 0) & + (labels < num_class)).nonzero().reshape(-1) + if pos_label_inds.numel() == 0: + return dict(loss_carl=cls_score.sum()[None] * 0.) + pos_labels = labels[pos_label_inds] + + # multiply pos_cls_score with the corresponding bbox weight + # and remain gradient + if sigmoid: + pos_cls_score = cls_score.sigmoid()[pos_label_inds, pos_labels] + else: + pos_cls_score = cls_score.softmax(-1)[pos_label_inds, pos_labels] + carl_loss_weights = (bias + (1 - bias) * pos_cls_score).pow(k) + + # normalize carl_loss_weight to make its sum equal to num positive + num_pos = float(pos_cls_score.size(0)) + weight_ratio = num_pos / carl_loss_weights.sum() + carl_loss_weights *= weight_ratio + + if avg_factor is None: + avg_factor = bbox_targets.size(0) + # if is class agnostic, bbox pred is in shape (N, 4) + # otherwise, bbox pred is in shape (N, #classes, 4) + if bbox_pred.size(-1) > 4: + bbox_pred = bbox_pred.view(bbox_pred.size(0), -1, 4) + pos_bbox_preds = bbox_pred[pos_label_inds, pos_labels] + else: + pos_bbox_preds = bbox_pred[pos_label_inds] + ori_loss_reg = loss_bbox( + pos_bbox_preds, + bbox_targets[pos_label_inds], + reduction_override='none') / avg_factor + loss_carl = (ori_loss_reg * carl_loss_weights[:, None]).sum() + return dict(loss_carl=loss_carl[None]) diff --git a/thirdparty/mmdetection/mmdet/models/losses/smooth_l1_loss.py b/thirdparty/mmdetection/mmdet/models/losses/smooth_l1_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..ad5e8a4dfbf77697ce8fefdd02731dd8b29592a8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/smooth_l1_loss.py @@ -0,0 +1,136 @@ +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def smooth_l1_loss(pred, target, beta=1.0): + """Smooth L1 loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + beta (float, optional): The threshold in the piecewise function. + Defaults to 1.0. + + Returns: + torch.Tensor: Calculated loss + """ + assert beta > 0 + assert pred.size() == target.size() and target.numel() > 0 + diff = torch.abs(pred - target) + loss = torch.where(diff < beta, 0.5 * diff * diff / beta, + diff - 0.5 * beta) + return loss + + +@weighted_loss +def l1_loss(pred, target): + """L1 loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + + Returns: + torch.Tensor: Calculated loss + """ + assert pred.size() == target.size() and target.numel() > 0 + loss = torch.abs(pred - target) + return loss + + +@LOSSES.register_module() +class SmoothL1Loss(nn.Module): + """Smooth L1 loss. + + Args: + beta (float, optional): The threshold in the piecewise function. + Defaults to 1.0. + reduction (str, optional): The method to reduce the loss. + Options are "none", "mean" and "sum". Defaults to "mean". + loss_weight (float, optional): The weight of loss. + """ + + def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): + super(SmoothL1Loss, self).__init__() + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * smooth_l1_loss( + pred, + target, + weight, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_bbox + + +@LOSSES.register_module() +class L1Loss(nn.Module): + """L1 loss. + + Args: + reduction (str, optional): The method to reduce the loss. + Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of loss. + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super(L1Loss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * l1_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_bbox diff --git a/thirdparty/mmdetection/mmdet/models/losses/utils.py b/thirdparty/mmdetection/mmdet/models/losses/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3361c6cad198e8b8e47173bf033b58c894eda106 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/utils.py @@ -0,0 +1,98 @@ +import functools + +import torch.nn.functional as F + + +def reduce_loss(loss, reduction): + """Reduce loss as specified. + + Args: + loss (Tensor): Elementwise loss tensor. + reduction (str): Options are "none", "mean" and "sum". + + Return: + Tensor: Reduced loss tensor. + """ + reduction_enum = F._Reduction.get_enum(reduction) + # none: 0, elementwise_mean:1, sum: 2 + if reduction_enum == 0: + return loss + elif reduction_enum == 1: + return loss.mean() + elif reduction_enum == 2: + return loss.sum() + + +def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): + """Apply element-wise weight and reduce loss. + + Args: + loss (Tensor): Element-wise loss. + weight (Tensor): Element-wise weights. + reduction (str): Same as built-in losses of PyTorch. + avg_factor (float): Avarage factor when computing the mean of losses. + + Returns: + Tensor: Processed loss values. + """ + # if weight is specified, apply element-wise weight + if weight is not None: + loss = loss * weight + + # if avg_factor is not specified, just reduce the loss + if avg_factor is None: + loss = reduce_loss(loss, reduction) + else: + # if reduction is mean, then average the loss by avg_factor + if reduction == 'mean': + loss = loss.sum() / avg_factor + # if reduction is 'none', then do nothing, otherwise raise an error + elif reduction != 'none': + raise ValueError('avg_factor can not be used with reduction="sum"') + return loss + + +def weighted_loss(loss_func): + """Create a weighted version of a given loss function. + + To use this decorator, the loss function must have the signature like + `loss_func(pred, target, **kwargs)`. The function only needs to compute + element-wise loss without any reduction. This decorator will add weight + and reduction arguments to the function. The decorated function will have + the signature like `loss_func(pred, target, weight=None, reduction='mean', + avg_factor=None, **kwargs)`. + + :Example: + + >>> import torch + >>> @weighted_loss + >>> def l1_loss(pred, target): + >>> return (pred - target).abs() + + >>> pred = torch.Tensor([0, 2, 3]) + >>> target = torch.Tensor([1, 1, 1]) + >>> weight = torch.Tensor([1, 0, 1]) + + >>> l1_loss(pred, target) + tensor(1.3333) + >>> l1_loss(pred, target, weight) + tensor(1.) + >>> l1_loss(pred, target, reduction='none') + tensor([1., 1., 2.]) + >>> l1_loss(pred, target, weight, avg_factor=2) + tensor(1.5000) + """ + + @functools.wraps(loss_func) + def wrapper(pred, + target, + weight=None, + reduction='mean', + avg_factor=None, + **kwargs): + # get element-wise loss + loss = loss_func(pred, target, **kwargs) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + return wrapper diff --git a/thirdparty/mmdetection/mmdet/models/losses/varifocal_loss.py b/thirdparty/mmdetection/mmdet/models/losses/varifocal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..6a84307fe0649f06a58e2530bde9857252e71db0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/losses/varifocal_loss.py @@ -0,0 +1,131 @@ +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +def varifocal_loss(pred, + target, + weight=None, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + avg_factor=None): + """`Varifocal Loss `_ + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the + number of classes + target (torch.Tensor): The learning target of the iou-aware + classification score with shape (N, C), C is the number of classes. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + alpha (float, optional): A balance factor for the negative part of + Varifocal Loss, which is different from the alpha of Focal Loss. + Defaults to 0.75. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + iou_weighted (bool, optional): Whether to weight the loss of the + positive example with the iou target. Defaults to True. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + # pred and target should be of the same size + assert pred.size() == target.size() + pred_sigmoid = pred.sigmoid() + target = target.type_as(pred) + if iou_weighted: + focal_weight = target * (target > 0.0).float() + \ + alpha * (pred_sigmoid - target).abs().pow(gamma) * \ + (target <= 0.0).float() + else: + focal_weight = (target > 0.0).float() + \ + alpha * (pred_sigmoid - target).abs().pow(gamma) * \ + (target <= 0.0).float() + loss = F.binary_cross_entropy_with_logits( + pred, target, reduction='none') * focal_weight + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@LOSSES.register_module() +class VarifocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + loss_weight=1.0): + """`Varifocal Loss `_ + + Args: + use_sigmoid (bool, optional): Whether the prediction is + used for sigmoid or softmax. Defaults to True. + alpha (float, optional): A balance factor for the negative part of + Varifocal Loss, which is different from the alpha of Focal + Loss. Defaults to 0.75. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + iou_weighted (bool, optional): Whether to weight the loss of the + positive examples with the iou target. Defaults to True. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + loss_weight (float, optional): Weight of loss. Defaults to 1.0. + """ + super(VarifocalLoss, self).__init__() + assert use_sigmoid is True, \ + 'Only sigmoid varifocal loss supported now.' + assert alpha >= 0.0 + self.use_sigmoid = use_sigmoid + self.alpha = alpha + self.gamma = gamma + self.iou_weighted = iou_weighted + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + loss_cls = self.loss_weight * varifocal_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + iou_weighted=self.iou_weighted, + reduction=reduction, + avg_factor=avg_factor) + else: + raise NotImplementedError + return loss_cls diff --git a/thirdparty/mmdetection/mmdet/models/necks/__init__.py b/thirdparty/mmdetection/mmdet/models/necks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2d983844693400170a1cfd8a341f02cf0874ad6a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/__init__.py @@ -0,0 +1,15 @@ +from .bfp import BFP +from .channel_mapper import ChannelMapper +from .fpn import FPN +from .fpn_carafe import FPN_CARAFE +from .hrfpn import HRFPN +from .nas_fpn import NASFPN +from .nasfcos_fpn import NASFCOS_FPN +from .pafpn import PAFPN +from .rfp import RFP +from .yolo_neck import YOLOV3Neck + +__all__ = [ + 'FPN', 'BFP', 'ChannelMapper', 'HRFPN', 'NASFPN', 'FPN_CARAFE', 'PAFPN', + 'NASFCOS_FPN', 'RFP', 'YOLOV3Neck' +] diff --git a/thirdparty/mmdetection/mmdet/models/necks/bfp.py b/thirdparty/mmdetection/mmdet/models/necks/bfp.py new file mode 100644 index 0000000000000000000000000000000000000000..bc61b094e93a548d4149b4054847ca0ea2092ed0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/bfp.py @@ -0,0 +1,104 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, xavier_init +from mmcv.cnn.bricks import NonLocal2d + +from ..builder import NECKS + + +@NECKS.register_module() +class BFP(nn.Module): + """BFP (Balanced Feature Pyrmamids) + + BFP takes multi-level features as inputs and gather them into a single one, + then refine the gathered feature and scatter the refined results to + multi-level features. This module is used in Libra R-CNN (CVPR 2019), see + the paper `Libra R-CNN: Towards Balanced Learning for Object Detection + `_ for details. + + Args: + in_channels (int): Number of input channels (feature maps of all levels + should have the same channels). + num_levels (int): Number of input feature levels. + conv_cfg (dict): The config dict for convolution layers. + norm_cfg (dict): The config dict for normalization layers. + refine_level (int): Index of integration and refine level of BSF in + multi-level features from bottom to top. + refine_type (str): Type of the refine op, currently support + [None, 'conv', 'non_local']. + """ + + def __init__(self, + in_channels, + num_levels, + refine_level=2, + refine_type=None, + conv_cfg=None, + norm_cfg=None): + super(BFP, self).__init__() + assert refine_type in [None, 'conv', 'non_local'] + + self.in_channels = in_channels + self.num_levels = num_levels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.refine_level = refine_level + self.refine_type = refine_type + assert 0 <= self.refine_level < self.num_levels + + if self.refine_type == 'conv': + self.refine = ConvModule( + self.in_channels, + self.in_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + elif self.refine_type == 'non_local': + self.refine = NonLocal2d( + self.in_channels, + reduction=1, + use_scale=False, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + + def init_weights(self): + """Initialize the weights of FPN module.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == self.num_levels + + # step 1: gather multi-level features by resize and average + feats = [] + gather_size = inputs[self.refine_level].size()[2:] + for i in range(self.num_levels): + if i < self.refine_level: + gathered = F.adaptive_max_pool2d( + inputs[i], output_size=gather_size) + else: + gathered = F.interpolate( + inputs[i], size=gather_size, mode='nearest') + feats.append(gathered) + + bsf = sum(feats) / len(feats) + + # step 2: refine gathered features + if self.refine_type is not None: + bsf = self.refine(bsf) + + # step 3: scatter refined features to multi-levels by a residual path + outs = [] + for i in range(self.num_levels): + out_size = inputs[i].size()[2:] + if i < self.refine_level: + residual = F.interpolate(bsf, size=out_size, mode='nearest') + else: + residual = F.adaptive_max_pool2d(bsf, output_size=out_size) + outs.append(residual + inputs[i]) + + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/channel_mapper.py b/thirdparty/mmdetection/mmdet/models/necks/channel_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..a4f5ed44caefb1612df67785b1f4f0d9ec46ee93 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/channel_mapper.py @@ -0,0 +1,74 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, xavier_init + +from ..builder import NECKS + + +@NECKS.register_module() +class ChannelMapper(nn.Module): + r"""Channel Mapper to reduce/increase channels of backbone features. + + This is used to reduce/increase channels of backbone features. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale). + kernel_size (int, optional): kernel_size for reducing channels (used + at each scale). Default: 3. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + act_cfg (dict, optional): Config dict for activation layer in + ConvModule. Default: dict(type='ReLU'). + + Example: + >>> import torch + >>> in_channels = [2, 3, 5, 7] + >>> scales = [340, 170, 84, 43] + >>> inputs = [torch.rand(1, c, s, s) + ... for c, s in zip(in_channels, scales)] + >>> self = ChannelMapper(in_channels, 11, 3).eval() + >>> outputs = self.forward(inputs) + >>> for i in range(len(outputs)): + ... print(f'outputs[{i}].shape = {outputs[i].shape}') + outputs[0].shape = torch.Size([1, 11, 340, 340]) + outputs[1].shape = torch.Size([1, 11, 170, 170]) + outputs[2].shape = torch.Size([1, 11, 84, 84]) + outputs[3].shape = torch.Size([1, 11, 43, 43]) + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='ReLU')): + super(ChannelMapper, self).__init__() + assert isinstance(in_channels, list) + + self.convs = nn.ModuleList() + for in_channel in in_channels: + self.convs.append( + ConvModule( + in_channel, + out_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + """Initialize the weights of ChannelMapper module.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.convs) + outs = [self.convs[i](inputs[i]) for i in range(len(inputs))] + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/fpn.py b/thirdparty/mmdetection/mmdet/models/necks/fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..5dd37f5f7870ae15b39d944466812d0953399fca --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/fpn.py @@ -0,0 +1,216 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, xavier_init +from mmcv.runner import auto_fp16 + +from ..builder import NECKS + + +@NECKS.register_module() +class FPN(nn.Module): + r"""Feature Pyramid Network. + + This is an implementation of paper `Feature Pyramid Networks for Object + Detection `_. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool | str): If bool, it decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + If str, it specifies the source feature map of the extra convs. + Only the following options are allowed + + - 'on_input': Last feat map of neck inputs (i.e. backbone feature). + - 'on_lateral': Last feature map after lateral convs. + - 'on_output': The last output feature map after fpn convs. + extra_convs_on_inputs (bool, deprecated): Whether to apply extra convs + on the original feature from the backbone. If True, + it is equivalent to `add_extra_convs='on_input'`. If False, it is + equivalent to set `add_extra_convs='on_output'`. Default to True. + relu_before_extra_convs (bool): Whether to apply relu before the extra + conv. Default: False. + no_norm_on_lateral (bool): Whether to apply norm on lateral. + Default: False. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (str): Config dict for activation layer in ConvModule. + Default: None. + upsample_cfg (dict): Config dict for interpolate layer. + Default: `dict(mode='nearest')` + + Example: + >>> import torch + >>> in_channels = [2, 3, 5, 7] + >>> scales = [340, 170, 84, 43] + >>> inputs = [torch.rand(1, c, s, s) + ... for c, s in zip(in_channels, scales)] + >>> self = FPN(in_channels, 11, len(in_channels)).eval() + >>> outputs = self.forward(inputs) + >>> for i in range(len(outputs)): + ... print(f'outputs[{i}].shape = {outputs[i].shape}') + outputs[0].shape = torch.Size([1, 11, 340, 340]) + outputs[1].shape = torch.Size([1, 11, 170, 170]) + outputs[2].shape = torch.Size([1, 11, 84, 84]) + outputs[3].shape = torch.Size([1, 11, 43, 43]) + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False, + extra_convs_on_inputs=True, + relu_before_extra_convs=False, + no_norm_on_lateral=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=None, + upsample_cfg=dict(mode='nearest')): + super(FPN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.relu_before_extra_convs = relu_before_extra_convs + self.no_norm_on_lateral = no_norm_on_lateral + self.fp16_enabled = False + self.upsample_cfg = upsample_cfg.copy() + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + assert isinstance(add_extra_convs, (str, bool)) + if isinstance(add_extra_convs, str): + # Extra_convs_source choices: 'on_input', 'on_lateral', 'on_output' + assert add_extra_convs in ('on_input', 'on_lateral', 'on_output') + elif add_extra_convs: # True + if extra_convs_on_inputs: + # For compatibility with previous release + # TODO: deprecate `extra_convs_on_inputs` + self.add_extra_convs = 'on_input' + else: + self.add_extra_convs = 'on_output' + + self.lateral_convs = nn.ModuleList() + self.fpn_convs = nn.ModuleList() + + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg if not self.no_norm_on_lateral else None, + act_cfg=act_cfg, + inplace=False) + fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + + self.lateral_convs.append(l_conv) + self.fpn_convs.append(fpn_conv) + + # add extra conv layers (e.g., RetinaNet) + extra_levels = num_outs - self.backbone_end_level + self.start_level + if self.add_extra_convs and extra_levels >= 1: + for i in range(extra_levels): + if i == 0 and self.add_extra_convs == 'on_input': + in_channels = self.in_channels[self.backbone_end_level - 1] + else: + in_channels = out_channels + extra_fpn_conv = ConvModule( + in_channels, + out_channels, + 3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + self.fpn_convs.append(extra_fpn_conv) + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + """Initialize the weights of FPN module.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + + @auto_fp16() + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + # In some cases, fixing `scale factor` (e.g. 2) is preferred, but + # it cannot co-exist with `size` in `F.interpolate`. + if 'scale_factor' in self.upsample_cfg: + laterals[i - 1] += F.interpolate(laterals[i], + **self.upsample_cfg) + else: + prev_shape = laterals[i - 1].shape[2:] + laterals[i - 1] += F.interpolate( + laterals[i], size=prev_shape, **self.upsample_cfg) + + # build outputs + # part 1: from original levels + outs = [ + self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) + ] + # part 2: add extra levels + if self.num_outs > len(outs): + # use max pool to get more levels on top of outputs + # (e.g., Faster R-CNN, Mask R-CNN) + if not self.add_extra_convs: + for i in range(self.num_outs - used_backbone_levels): + outs.append(F.max_pool2d(outs[-1], 1, stride=2)) + # add conv layers on top of original feature maps (RetinaNet) + else: + if self.add_extra_convs == 'on_input': + extra_source = inputs[self.backbone_end_level - 1] + elif self.add_extra_convs == 'on_lateral': + extra_source = laterals[-1] + elif self.add_extra_convs == 'on_output': + extra_source = outs[-1] + else: + raise NotImplementedError + outs.append(self.fpn_convs[used_backbone_levels](extra_source)) + for i in range(used_backbone_levels + 1, self.num_outs): + if self.relu_before_extra_convs: + outs.append(self.fpn_convs[i](F.relu(outs[-1]))) + else: + outs.append(self.fpn_convs[i](outs[-1])) + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/fpn_carafe.py b/thirdparty/mmdetection/mmdet/models/necks/fpn_carafe.py new file mode 100644 index 0000000000000000000000000000000000000000..b97a6aa73432279e8785bf54a0579c58db46e9d4 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/fpn_carafe.py @@ -0,0 +1,267 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, build_upsample_layer, xavier_init +from mmcv.ops.carafe import CARAFEPack + +from ..builder import NECKS + + +@NECKS.register_module() +class FPN_CARAFE(nn.Module): + """FPN_CARAFE is a more flexible implementation of FPN. It allows more + choice for upsample methods during the top-down pathway. + + It can reproduce the preformance of ICCV 2019 paper + CARAFE: Content-Aware ReAssembly of FEatures + Please refer to https://arxiv.org/abs/1905.02188 for more details. + + Args: + in_channels (list[int]): Number of channels for each input feature map. + out_channels (int): Output channels of feature pyramids. + num_outs (int): Number of output stages. + start_level (int): Start level of feature pyramids. + (Default: 0) + end_level (int): End level of feature pyramids. + (Default: -1 indicates the last level). + norm_cfg (dict): Dictionary to construct and config norm layer. + activate (str): Type of activation function in ConvModule + (Default: None indicates w/o activation). + order (dict): Order of components in ConvModule. + upsample (str): Type of upsample layer. + upsample_cfg (dict): Dictionary to construct and config upsample layer. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1)): + super(FPN_CARAFE, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.with_bias = norm_cfg is None + self.upsample_cfg = upsample_cfg.copy() + self.upsample = self.upsample_cfg.get('type') + self.relu = nn.ReLU(inplace=False) + + self.order = order + assert order in [('conv', 'norm', 'act'), ('act', 'conv', 'norm')] + + assert self.upsample in [ + 'nearest', 'bilinear', 'deconv', 'pixel_shuffle', 'carafe', None + ] + if self.upsample in ['deconv', 'pixel_shuffle']: + assert hasattr( + self.upsample_cfg, + 'upsample_kernel') and self.upsample_cfg.upsample_kernel > 0 + self.upsample_kernel = self.upsample_cfg.pop('upsample_kernel') + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + + self.lateral_convs = nn.ModuleList() + self.fpn_convs = nn.ModuleList() + self.upsample_modules = nn.ModuleList() + + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + norm_cfg=norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + if i != self.backbone_end_level - 1: + upsample_cfg_ = self.upsample_cfg.copy() + if self.upsample == 'deconv': + upsample_cfg_.update( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=self.upsample_kernel, + stride=2, + padding=(self.upsample_kernel - 1) // 2, + output_padding=(self.upsample_kernel - 1) // 2) + elif self.upsample == 'pixel_shuffle': + upsample_cfg_.update( + in_channels=out_channels, + out_channels=out_channels, + scale_factor=2, + upsample_kernel=self.upsample_kernel) + elif self.upsample == 'carafe': + upsample_cfg_.update(channels=out_channels, scale_factor=2) + else: + # suppress warnings + align_corners = (None + if self.upsample == 'nearest' else False) + upsample_cfg_.update( + scale_factor=2, + mode=self.upsample, + align_corners=align_corners) + upsample_module = build_upsample_layer(upsample_cfg_) + self.upsample_modules.append(upsample_module) + self.lateral_convs.append(l_conv) + self.fpn_convs.append(fpn_conv) + + # add extra conv layers (e.g., RetinaNet) + extra_out_levels = ( + num_outs - self.backbone_end_level + self.start_level) + if extra_out_levels >= 1: + for i in range(extra_out_levels): + in_channels = ( + self.in_channels[self.backbone_end_level - + 1] if i == 0 else out_channels) + extra_l_conv = ConvModule( + in_channels, + out_channels, + 3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + if self.upsample == 'deconv': + upsampler_cfg_ = dict( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=self.upsample_kernel, + stride=2, + padding=(self.upsample_kernel - 1) // 2, + output_padding=(self.upsample_kernel - 1) // 2) + elif self.upsample == 'pixel_shuffle': + upsampler_cfg_ = dict( + in_channels=out_channels, + out_channels=out_channels, + scale_factor=2, + upsample_kernel=self.upsample_kernel) + elif self.upsample == 'carafe': + upsampler_cfg_ = dict( + channels=out_channels, + scale_factor=2, + **self.upsample_cfg) + else: + # suppress warnings + align_corners = (None + if self.upsample == 'nearest' else False) + upsampler_cfg_ = dict( + scale_factor=2, + mode=self.upsample, + align_corners=align_corners) + upsampler_cfg_['type'] = self.upsample + upsample_module = build_upsample_layer(upsampler_cfg_) + extra_fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + self.upsample_modules.append(upsample_module) + self.fpn_convs.append(extra_fpn_conv) + self.lateral_convs.append(extra_l_conv) + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + """Initialize the weights of module.""" + for m in self.modules(): + if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)): + xavier_init(m, distribution='uniform') + for m in self.modules(): + if isinstance(m, CARAFEPack): + m.init_weights() + + def slice_as(self, src, dst): + """Slice ``src`` as ``dst`` + + Note: + ``src`` should have the same or larger size than ``dst``. + + Args: + src (torch.Tensor): Tensors to be sliced. + dst (torch.Tensor): ``src`` will be sliced to have the same + size as ``dst``. + + Returns: + torch.Tensor: Sliced tensor. + """ + assert (src.size(2) >= dst.size(2)) and (src.size(3) >= dst.size(3)) + if src.size(2) == dst.size(2) and src.size(3) == dst.size(3): + return src + else: + return src[:, :, :dst.size(2), :dst.size(3)] + + def tensor_add(self, a, b): + """Add tensors ``a`` and ``b`` that might have different sizes.""" + if a.size() == b.size(): + c = a + b + else: + c = a + self.slice_as(b, a) + return c + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [] + for i, lateral_conv in enumerate(self.lateral_convs): + if i <= self.backbone_end_level - self.start_level: + input = inputs[min(i + self.start_level, len(inputs) - 1)] + else: + input = laterals[-1] + lateral = lateral_conv(input) + laterals.append(lateral) + + # build top-down path + for i in range(len(laterals) - 1, 0, -1): + if self.upsample is not None: + upsample_feat = self.upsample_modules[i - 1](laterals[i]) + else: + upsample_feat = laterals[i] + laterals[i - 1] = self.tensor_add(laterals[i - 1], upsample_feat) + + # build outputs + num_conv_outs = len(self.fpn_convs) + outs = [] + for i in range(num_conv_outs): + out = self.fpn_convs[i](laterals[i]) + outs.append(out) + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/hrfpn.py b/thirdparty/mmdetection/mmdet/models/necks/hrfpn.py new file mode 100644 index 0000000000000000000000000000000000000000..cf87cfa7918c3755e22290b8a1dfdc068db6d729 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/hrfpn.py @@ -0,0 +1,102 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, caffe2_xavier_init +from torch.utils.checkpoint import checkpoint + +from ..builder import NECKS + + +@NECKS.register_module() +class HRFPN(nn.Module): + """HRFPN (High Resolution Feature Pyrmamids) + + paper: `High-Resolution Representations for Labeling Pixels and Regions + `_. + + Args: + in_channels (list): number of channels for each branch. + out_channels (int): output channels of feature pyramids. + num_outs (int): number of output stages. + pooling_type (str): pooling for generating feature pyramids + from {MAX, AVG}. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + stride (int): stride of 3x3 convolutional layers + """ + + def __init__(self, + in_channels, + out_channels, + num_outs=5, + pooling_type='AVG', + conv_cfg=None, + norm_cfg=None, + with_cp=False, + stride=1): + super(HRFPN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.reduction_conv = ConvModule( + sum(in_channels), + out_channels, + kernel_size=1, + conv_cfg=self.conv_cfg, + act_cfg=None) + + self.fpn_convs = nn.ModuleList() + for i in range(self.num_outs): + self.fpn_convs.append( + ConvModule( + out_channels, + out_channels, + kernel_size=3, + padding=1, + stride=stride, + conv_cfg=self.conv_cfg, + act_cfg=None)) + + if pooling_type == 'MAX': + self.pooling = F.max_pool2d + else: + self.pooling = F.avg_pool2d + + def init_weights(self): + """Initialize the weights of module.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + caffe2_xavier_init(m) + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == self.num_ins + outs = [inputs[0]] + for i in range(1, self.num_ins): + outs.append( + F.interpolate(inputs[i], scale_factor=2**i, mode='bilinear')) + out = torch.cat(outs, dim=1) + if out.requires_grad and self.with_cp: + out = checkpoint(self.reduction_conv, out) + else: + out = self.reduction_conv(out) + outs = [out] + for i in range(1, self.num_outs): + outs.append(self.pooling(out, kernel_size=2**i, stride=2**i)) + outputs = [] + + for i in range(self.num_outs): + if outs[i].requires_grad and self.with_cp: + tmp_out = checkpoint(self.fpn_convs[i], outs[i]) + else: + tmp_out = self.fpn_convs[i](outs[i]) + outputs.append(tmp_out) + return tuple(outputs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/nas_fpn.py b/thirdparty/mmdetection/mmdet/models/necks/nas_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..8e333ce65d4d06c47c29af489526ba3142736ad7 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/nas_fpn.py @@ -0,0 +1,160 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, caffe2_xavier_init +from mmcv.ops.merge_cells import GlobalPoolingCell, SumCell + +from ..builder import NECKS + + +@NECKS.register_module() +class NASFPN(nn.Module): + """NAS-FPN. + + Implementation of `NAS-FPN: Learning Scalable Feature Pyramid Architecture + for Object Detection `_ + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + stack_times (int): The number of times the pyramid architecture will + be stacked. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): It decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + stack_times, + start_level=0, + end_level=-1, + add_extra_convs=False, + norm_cfg=None): + super(NASFPN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) # num of input feature levels + self.num_outs = num_outs # num of output feature levels + self.stack_times = stack_times + self.norm_cfg = norm_cfg + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + + # add lateral connections + self.lateral_convs = nn.ModuleList() + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + norm_cfg=norm_cfg, + act_cfg=None) + self.lateral_convs.append(l_conv) + + # add extra downsample layers (stride-2 pooling or conv) + extra_levels = num_outs - self.backbone_end_level + self.start_level + self.extra_downsamples = nn.ModuleList() + for i in range(extra_levels): + extra_conv = ConvModule( + out_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) + self.extra_downsamples.append( + nn.Sequential(extra_conv, nn.MaxPool2d(2, 2))) + + # add NAS FPN connections + self.fpn_stages = nn.ModuleList() + for _ in range(self.stack_times): + stage = nn.ModuleDict() + # gp(p6, p4) -> p4_1 + stage['gp_64_4'] = GlobalPoolingCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p4_1, p4) -> p4_2 + stage['sum_44_4'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p4_2, p3) -> p3_out + stage['sum_43_3'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p3_out, p4_2) -> p4_out + stage['sum_34_4'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p5, gp(p4_out, p3_out)) -> p5_out + stage['gp_43_5'] = GlobalPoolingCell(with_out_conv=False) + stage['sum_55_5'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p7, gp(p5_out, p4_2)) -> p7_out + stage['gp_54_7'] = GlobalPoolingCell(with_out_conv=False) + stage['sum_77_7'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # gp(p7_out, p5_out) -> p6_out + stage['gp_75_6'] = GlobalPoolingCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + self.fpn_stages.append(stage) + + def init_weights(self): + """Initialize the weights of module.""" + for m in self.modules(): + if isinstance(m, nn.Conv2d): + caffe2_xavier_init(m) + + def forward(self, inputs): + """Forward function.""" + # build P3-P5 + feats = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + # build P6-P7 on top of P5 + for downsample in self.extra_downsamples: + feats.append(downsample(feats[-1])) + + p3, p4, p5, p6, p7 = feats + + for stage in self.fpn_stages: + # gp(p6, p4) -> p4_1 + p4_1 = stage['gp_64_4'](p6, p4, out_size=p4.shape[-2:]) + # sum(p4_1, p4) -> p4_2 + p4_2 = stage['sum_44_4'](p4_1, p4, out_size=p4.shape[-2:]) + # sum(p4_2, p3) -> p3_out + p3 = stage['sum_43_3'](p4_2, p3, out_size=p3.shape[-2:]) + # sum(p3_out, p4_2) -> p4_out + p4 = stage['sum_34_4'](p3, p4_2, out_size=p4.shape[-2:]) + # sum(p5, gp(p4_out, p3_out)) -> p5_out + p5_tmp = stage['gp_43_5'](p4, p3, out_size=p5.shape[-2:]) + p5 = stage['sum_55_5'](p5, p5_tmp, out_size=p5.shape[-2:]) + # sum(p7, gp(p5_out, p4_2)) -> p7_out + p7_tmp = stage['gp_54_7'](p5, p4_2, out_size=p7.shape[-2:]) + p7 = stage['sum_77_7'](p7, p7_tmp, out_size=p7.shape[-2:]) + # gp(p7_out, p5_out) -> p6_out + p6 = stage['gp_75_6'](p7, p5, out_size=p6.shape[-2:]) + + return p3, p4, p5, p6, p7 diff --git a/thirdparty/mmdetection/mmdet/models/necks/nasfcos_fpn.py b/thirdparty/mmdetection/mmdet/models/necks/nasfcos_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..2daf79ef591373499184c624ccd27fb7456dec06 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/nasfcos_fpn.py @@ -0,0 +1,161 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, caffe2_xavier_init +from mmcv.ops.merge_cells import ConcatCell + +from ..builder import NECKS + + +@NECKS.register_module() +class NASFCOS_FPN(nn.Module): + """FPN structure in NASFPN. + + Implementation of paper `NAS-FCOS: Fast Neural Architecture Search for + Object Detection `_ + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): It decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=1, + end_level=-1, + add_extra_convs=False, + conv_cfg=None, + norm_cfg=None): + super(NASFCOS_FPN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.norm_cfg = norm_cfg + self.conv_cfg = conv_cfg + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + + self.adapt_convs = nn.ModuleList() + for i in range(self.start_level, self.backbone_end_level): + adapt_conv = ConvModule( + in_channels[i], + out_channels, + 1, + stride=1, + padding=0, + bias=False, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU', inplace=False)) + self.adapt_convs.append(adapt_conv) + + # C2 is omitted according to the paper + extra_levels = num_outs - self.backbone_end_level + self.start_level + + def build_concat_cell(with_input1_conv, with_input2_conv): + cell_conv_cfg = dict( + kernel_size=1, padding=0, bias=False, groups=out_channels) + return ConcatCell( + in_channels=out_channels, + out_channels=out_channels, + with_out_conv=True, + out_conv_cfg=cell_conv_cfg, + out_norm_cfg=dict(type='BN'), + out_conv_order=('norm', 'act', 'conv'), + with_input1_conv=with_input1_conv, + with_input2_conv=with_input2_conv, + input_conv_cfg=conv_cfg, + input_norm_cfg=norm_cfg, + upsample_mode='nearest') + + # Denote c3=f0, c4=f1, c5=f2 for convince + self.fpn = nn.ModuleDict() + self.fpn['c22_1'] = build_concat_cell(True, True) + self.fpn['c22_2'] = build_concat_cell(True, True) + self.fpn['c32'] = build_concat_cell(True, False) + self.fpn['c02'] = build_concat_cell(True, False) + self.fpn['c42'] = build_concat_cell(True, True) + self.fpn['c36'] = build_concat_cell(True, True) + self.fpn['c61'] = build_concat_cell(True, True) # f9 + self.extra_downsamples = nn.ModuleList() + for i in range(extra_levels): + extra_act_cfg = None if i == 0 \ + else dict(type='ReLU', inplace=False) + self.extra_downsamples.append( + ConvModule( + out_channels, + out_channels, + 3, + stride=2, + padding=1, + act_cfg=extra_act_cfg, + order=('act', 'norm', 'conv'))) + + def forward(self, inputs): + """Forward function.""" + feats = [ + adapt_conv(inputs[i + self.start_level]) + for i, adapt_conv in enumerate(self.adapt_convs) + ] + + for (i, module_name) in enumerate(self.fpn): + idx_1, idx_2 = int(module_name[1]), int(module_name[2]) + res = self.fpn[module_name](feats[idx_1], feats[idx_2]) + feats.append(res) + + ret = [] + for (idx, input_idx) in zip([9, 8, 7], [1, 2, 3]): # add P3, P4, P5 + feats1, feats2 = feats[idx], feats[5] + feats2_resize = F.interpolate( + feats2, + size=feats1.size()[2:], + mode='bilinear', + align_corners=False) + + feats_sum = feats1 + feats2_resize + ret.append( + F.interpolate( + feats_sum, + size=inputs[input_idx].size()[2:], + mode='bilinear', + align_corners=False)) + + for submodule in self.extra_downsamples: + ret.append(submodule(ret[-1])) + + return tuple(ret) + + def init_weights(self): + """Initialize the weights of module.""" + for module in self.fpn.values(): + if hasattr(module, 'conv_out'): + caffe2_xavier_init(module.out_conv.conv) + + for modules in [ + self.adapt_convs.modules(), + self.extra_downsamples.modules() + ]: + for module in modules: + if isinstance(module, nn.Conv2d): + caffe2_xavier_init(module) diff --git a/thirdparty/mmdetection/mmdet/models/necks/pafpn.py b/thirdparty/mmdetection/mmdet/models/necks/pafpn.py new file mode 100644 index 0000000000000000000000000000000000000000..78782763ced51248e5c3c3d1c3ffd8580e886d65 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/pafpn.py @@ -0,0 +1,137 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import auto_fp16 + +from ..builder import NECKS +from .fpn import FPN + + +@NECKS.register_module() +class PAFPN(FPN): + """Path Aggregation Network for Instance Segmentation. + + This is an implementation of the `PAFPN in Path Aggregation Network + `_. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): Whether to add conv layers on top of the + original feature maps. Default: False. + extra_convs_on_inputs (bool): Whether to apply extra conv on + the original feature from the backbone. Default: False. + relu_before_extra_convs (bool): Whether to apply relu before the extra + conv. Default: False. + no_norm_on_lateral (bool): Whether to apply norm on lateral. + Default: False. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (str): Config dict for activation layer in ConvModule. + Default: None. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False, + extra_convs_on_inputs=True, + relu_before_extra_convs=False, + no_norm_on_lateral=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=None): + super(PAFPN, + self).__init__(in_channels, out_channels, num_outs, start_level, + end_level, add_extra_convs, extra_convs_on_inputs, + relu_before_extra_convs, no_norm_on_lateral, + conv_cfg, norm_cfg, act_cfg) + # add extra bottom up pathway + self.downsample_convs = nn.ModuleList() + self.pafpn_convs = nn.ModuleList() + for i in range(self.start_level + 1, self.backbone_end_level): + d_conv = ConvModule( + out_channels, + out_channels, + 3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + pafpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + self.downsample_convs.append(d_conv) + self.pafpn_convs.append(pafpn_conv) + + @auto_fp16() + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + prev_shape = laterals[i - 1].shape[2:] + laterals[i - 1] += F.interpolate( + laterals[i], size=prev_shape, mode='nearest') + + # build outputs + # part 1: from original levels + inter_outs = [ + self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) + ] + + # part 2: add bottom-up path + for i in range(0, used_backbone_levels - 1): + inter_outs[i + 1] += self.downsample_convs[i](inter_outs[i]) + + outs = [] + outs.append(inter_outs[0]) + outs.extend([ + self.pafpn_convs[i - 1](inter_outs[i]) + for i in range(1, used_backbone_levels) + ]) + + # part 3: add extra levels + if self.num_outs > len(outs): + # use max pool to get more levels on top of outputs + # (e.g., Faster R-CNN, Mask R-CNN) + if not self.add_extra_convs: + for i in range(self.num_outs - used_backbone_levels): + outs.append(F.max_pool2d(outs[-1], 1, stride=2)) + # add conv layers on top of original feature maps (RetinaNet) + else: + if self.extra_convs_on_inputs: + orig = inputs[self.backbone_end_level - 1] + outs.append(self.fpn_convs[used_backbone_levels](orig)) + else: + outs.append(self.fpn_convs[used_backbone_levels](outs[-1])) + for i in range(used_backbone_levels + 1, self.num_outs): + if self.relu_before_extra_convs: + outs.append(self.fpn_convs[i](F.relu(outs[-1]))) + else: + outs.append(self.fpn_convs[i](outs[-1])) + return tuple(outs) diff --git a/thirdparty/mmdetection/mmdet/models/necks/rfp.py b/thirdparty/mmdetection/mmdet/models/necks/rfp.py new file mode 100644 index 0000000000000000000000000000000000000000..8a63e63bdef0094c26c17526d5ddde75bd309cea --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/rfp.py @@ -0,0 +1,128 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import constant_init, kaiming_init, xavier_init + +from ..builder import NECKS, build_backbone +from .fpn import FPN + + +class ASPP(nn.Module): + """ASPP (Atrous Spatial Pyramid Pooling) + + This is an implementation of the ASPP module used in DetectoRS + (https://arxiv.org/pdf/2006.02334.pdf) + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of channels produced by this module + dilations (tuple[int]): Dilations of the four branches. + Default: (1, 3, 6, 1) + """ + + def __init__(self, in_channels, out_channels, dilations=(1, 3, 6, 1)): + super().__init__() + assert dilations[-1] == 1 + self.aspp = nn.ModuleList() + for dilation in dilations: + kernel_size = 3 if dilation > 1 else 1 + padding = dilation if dilation > 1 else 0 + conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=1, + dilation=dilation, + padding=padding, + bias=True) + self.aspp.append(conv) + self.gap = nn.AdaptiveAvgPool2d(1) + self.init_weights() + + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + + def forward(self, x): + avg_x = self.gap(x) + out = [] + for aspp_idx in range(len(self.aspp)): + inp = avg_x if (aspp_idx == len(self.aspp) - 1) else x + out.append(F.relu_(self.aspp[aspp_idx](inp))) + out[-1] = out[-1].expand_as(out[-2]) + out = torch.cat(out, dim=1) + return out + + +@NECKS.register_module() +class RFP(FPN): + """RFP (Recursive Feature Pyramid) + + This is an implementation of RFP in `DetectoRS + `_. Different from standard FPN, the + input of RFP should be multi level features along with origin input image + of backbone. + + Args: + rfp_steps (int): Number of unrolled steps of RFP. + rfp_backbone (dict): Configuration of the backbone for RFP. + aspp_out_channels (int): Number of output channels of ASPP module. + aspp_dilations (tuple[int]): Dilation rates of four branches. + Default: (1, 3, 6, 1) + """ + + def __init__(self, + rfp_steps, + rfp_backbone, + aspp_out_channels, + aspp_dilations=(1, 3, 6, 1), + **kwargs): + super().__init__(**kwargs) + self.rfp_steps = rfp_steps + self.rfp_modules = nn.ModuleList() + for rfp_idx in range(1, rfp_steps): + rfp_module = build_backbone(rfp_backbone) + self.rfp_modules.append(rfp_module) + self.rfp_aspp = ASPP(self.out_channels, aspp_out_channels, + aspp_dilations) + self.rfp_weight = nn.Conv2d( + self.out_channels, + 1, + kernel_size=1, + stride=1, + padding=0, + bias=True) + + def init_weights(self): + # Avoid using super().init_weights(), which may alter the default + # initialization of the modules in self.rfp_modules that have missing + # keys in the pretrained checkpoint. + for convs in [self.lateral_convs, self.fpn_convs]: + for m in convs.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + for rfp_idx in range(self.rfp_steps - 1): + self.rfp_modules[rfp_idx].init_weights( + self.rfp_modules[rfp_idx].pretrained) + constant_init(self.rfp_weight, 0) + + def forward(self, inputs): + inputs = list(inputs) + assert len(inputs) == len(self.in_channels) + 1 # +1 for input image + img = inputs.pop(0) + # FPN forward + x = super().forward(tuple(inputs)) + for rfp_idx in range(self.rfp_steps - 1): + rfp_feats = [x[0]] + list( + self.rfp_aspp(x[i]) for i in range(1, len(x))) + x_idx = self.rfp_modules[rfp_idx].rfp_forward(img, rfp_feats) + # FPN forward + x_idx = super().forward(x_idx) + x_new = [] + for ft_idx in range(len(x_idx)): + add_weight = torch.sigmoid(self.rfp_weight(x_idx[ft_idx])) + x_new.append(add_weight * x_idx[ft_idx] + + (1 - add_weight) * x[ft_idx]) + x = x_new + return x diff --git a/thirdparty/mmdetection/mmdet/models/necks/yolo_neck.py b/thirdparty/mmdetection/mmdet/models/necks/yolo_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..c2f9b9ef3859796c284c16ad1a92fe41ecbed613 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/necks/yolo_neck.py @@ -0,0 +1,136 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule + +from ..builder import NECKS + + +class DetectionBlock(nn.Module): + """Detection block in YOLO neck. + + Let out_channels = n, the DetectionBlock contains: + Six ConvLayers, 1 Conv2D Layer and 1 YoloLayer. + The first 6 ConvLayers are formed the following way: + 1x1xn, 3x3x2n, 1x1xn, 3x3x2n, 1x1xn, 3x3x2n. + The Conv2D layer is 1x1x255. + Some block will have branch after the fifth ConvLayer. + The input channel is arbitrary (in_channels) + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + """ + + def __init__(self, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)): + super(DetectionBlock, self).__init__() + double_out_channels = out_channels * 2 + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + self.conv1 = ConvModule(in_channels, out_channels, 1, **cfg) + self.conv2 = ConvModule( + out_channels, double_out_channels, 3, padding=1, **cfg) + self.conv3 = ConvModule(double_out_channels, out_channels, 1, **cfg) + self.conv4 = ConvModule( + out_channels, double_out_channels, 3, padding=1, **cfg) + self.conv5 = ConvModule(double_out_channels, out_channels, 1, **cfg) + + def forward(self, x): + tmp = self.conv1(x) + tmp = self.conv2(tmp) + tmp = self.conv3(tmp) + tmp = self.conv4(tmp) + out = self.conv5(tmp) + return out + + +@NECKS.register_module() +class YOLOV3Neck(nn.Module): + """The neck of YOLOV3. + + It can be treated as a simplified version of FPN. It + will take the result from Darknet backbone and do some upsampling and + concatenation. It will finally output the detection result. + + Note: + The input feats should be from top to bottom. + i.e., from high-lvl to low-lvl + But YOLOV3Neck will process them in reversed order. + i.e., from bottom (high-lvl) to top (low-lvl) + + Args: + num_scales (int): The number of scales / stages. + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + """ + + def __init__(self, + num_scales, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)): + super(YOLOV3Neck, self).__init__() + assert (num_scales == len(in_channels) == len(out_channels)) + self.num_scales = num_scales + self.in_channels = in_channels + self.out_channels = out_channels + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + # To support arbitrary scales, the code looks awful, but it works. + # Better solution is welcomed. + self.detect1 = DetectionBlock(in_channels[0], out_channels[0], **cfg) + for i in range(1, self.num_scales): + in_c, out_c = self.in_channels[i], self.out_channels[i] + self.add_module(f'conv{i}', ConvModule(in_c, out_c, 1, **cfg)) + # in_c + out_c : High-lvl feats will be cat with low-lvl feats + self.add_module(f'detect{i+1}', + DetectionBlock(in_c + out_c, out_c, **cfg)) + + def forward(self, feats): + assert len(feats) == self.num_scales + + # processed from bottom (high-lvl) to top (low-lvl) + outs = [] + out = self.detect1(feats[-1]) + outs.append(out) + + for i, x in enumerate(reversed(feats[:-1])): + conv = getattr(self, f'conv{i+1}') + tmp = conv(out) + + # Cat with low-lvl feats + tmp = F.interpolate(tmp, scale_factor=2) + tmp = torch.cat((tmp, x), 1) + + detect = getattr(self, f'detect{i+2}') + out = detect(tmp) + outs.append(out) + + return tuple(outs) + + def init_weights(self): + """Initialize the weights of module.""" + # init is done in ConvModule + pass diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/__init__.py b/thirdparty/mmdetection/mmdet/models/roi_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07df42bbf3b8236f4d3c8c333d4ad520d97cc196 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/__init__.py @@ -0,0 +1,26 @@ +from .base_roi_head import BaseRoIHead +from .bbox_heads import (BBoxHead, ConvFCBBoxHead, DoubleConvFCBBoxHead, + Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) +from .cascade_roi_head import CascadeRoIHead +from .double_roi_head import DoubleHeadRoIHead +from .dynamic_roi_head import DynamicRoIHead +from .grid_roi_head import GridRoIHead +from .htc_roi_head import HybridTaskCascadeRoIHead +from .mask_heads import (CoarseMaskHead, FCNMaskHead, FusedSemanticHead, + GridHead, HTCMaskHead, MaskIoUHead, MaskPointHead) +from .mask_scoring_roi_head import MaskScoringRoIHead +from .pisa_roi_head import PISARoIHead +from .point_rend_roi_head import PointRendRoIHead +from .roi_extractors import SingleRoIExtractor +from .shared_heads import ResLayer +from .standard_roi_head import StandardRoIHead + +__all__ = [ + 'BaseRoIHead', 'CascadeRoIHead', 'DoubleHeadRoIHead', 'MaskScoringRoIHead', + 'HybridTaskCascadeRoIHead', 'GridRoIHead', 'ResLayer', 'BBoxHead', + 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'StandardRoIHead', + 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'FCNMaskHead', + 'HTCMaskHead', 'FusedSemanticHead', 'GridHead', 'MaskIoUHead', + 'SingleRoIExtractor', 'PISARoIHead', 'PointRendRoIHead', 'MaskPointHead', + 'CoarseMaskHead', 'DynamicRoIHead' +] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/base_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/base_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ec027dda8c88e356d49ae3182a7cb19c93c5fe51 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/base_roi_head.py @@ -0,0 +1,106 @@ +from abc import ABCMeta, abstractmethod + +import torch.nn as nn + +from ..builder import build_shared_head + + +class BaseRoIHead(nn.Module, metaclass=ABCMeta): + """Base class for RoIHeads.""" + + def __init__(self, + bbox_roi_extractor=None, + bbox_head=None, + mask_roi_extractor=None, + mask_head=None, + shared_head=None, + train_cfg=None, + test_cfg=None): + super(BaseRoIHead, self).__init__() + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if shared_head is not None: + self.shared_head = build_shared_head(shared_head) + + if bbox_head is not None: + self.init_bbox_head(bbox_roi_extractor, bbox_head) + + if mask_head is not None: + self.init_mask_head(mask_roi_extractor, mask_head) + + self.init_assigner_sampler() + + @property + def with_bbox(self): + """bool: whether the RoI head contains a `bbox_head`""" + return hasattr(self, 'bbox_head') and self.bbox_head is not None + + @property + def with_mask(self): + """bool: whether the RoI head contains a `mask_head`""" + return hasattr(self, 'mask_head') and self.mask_head is not None + + @property + def with_shared_head(self): + """bool: whether the RoI head contains a `shared_head`""" + return hasattr(self, 'shared_head') and self.shared_head is not None + + @abstractmethod + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + pass + + @abstractmethod + def init_bbox_head(self): + """Initialize ``bbox_head``""" + pass + + @abstractmethod + def init_mask_head(self): + """Initialize ``mask_head``""" + pass + + @abstractmethod + def init_assigner_sampler(self): + """Initialize assigner and sampler.""" + pass + + @abstractmethod + def forward_train(self, + x, + img_meta, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + **kwargs): + """Forward function during training.""" + pass + + async def async_simple_test(self, x, img_meta, **kwargs): + """Asynchronized test function.""" + raise NotImplementedError + + def simple_test(self, + x, + proposal_list, + img_meta, + proposals=None, + rescale=False, + **kwargs): + """Test without augmentation.""" + pass + + def aug_test(self, x, proposal_list, img_metas, rescale=False, **kwargs): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + pass diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/__init__.py b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..acfb71ebebe0bdd5c0601073409a77ddae1e1b1e --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/__init__.py @@ -0,0 +1,10 @@ +from .bbox_head import BBoxHead +from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, + Shared4Conv1FCBBoxHead) +from .double_bbox_head import DoubleConvFCBBoxHead +from .sabl_head import SABLHead + +__all__ = [ + 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', + 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead' +] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/bbox_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e0931e176160826ea9864b8a558e2cdf51d8d5d2 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/bbox_head.py @@ -0,0 +1,335 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.runner import auto_fp16, force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.core import build_bbox_coder, multi_apply, multiclass_nms +from mmdet.models.builder import HEADS, build_loss +from mmdet.models.losses import accuracy + + +@HEADS.register_module() +class BBoxHead(nn.Module): + """Simplest RoI head, with only two fc layers for classification and + regression respectively.""" + + def __init__(self, + with_avg_pool=False, + with_cls=True, + with_reg=True, + roi_feat_size=7, + in_channels=256, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=False, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1.0)): + super(BBoxHead, self).__init__() + assert with_cls or with_reg + self.with_avg_pool = with_avg_pool + self.with_cls = with_cls + self.with_reg = with_reg + self.roi_feat_size = _pair(roi_feat_size) + self.roi_feat_area = self.roi_feat_size[0] * self.roi_feat_size[1] + self.in_channels = in_channels + self.num_classes = num_classes + self.reg_class_agnostic = reg_class_agnostic + self.reg_decoded_bbox = reg_decoded_bbox + self.fp16_enabled = False + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + + in_channels = self.in_channels + if self.with_avg_pool: + self.avg_pool = nn.AvgPool2d(self.roi_feat_size) + else: + in_channels *= self.roi_feat_area + if self.with_cls: + # need to add background class + self.fc_cls = nn.Linear(in_channels, num_classes + 1) + if self.with_reg: + out_dim_reg = 4 if reg_class_agnostic else 4 * num_classes + self.fc_reg = nn.Linear(in_channels, out_dim_reg) + self.debug_imgs = None + + def init_weights(self): + # conv layers are already initialized by ConvModule + if self.with_cls: + nn.init.normal_(self.fc_cls.weight, 0, 0.01) + nn.init.constant_(self.fc_cls.bias, 0) + if self.with_reg: + nn.init.normal_(self.fc_reg.weight, 0, 0.001) + nn.init.constant_(self.fc_reg.bias, 0) + + @auto_fp16() + def forward(self, x): + if self.with_avg_pool: + x = self.avg_pool(x) + x = x.view(x.size(0), -1) + cls_score = self.fc_cls(x) if self.with_cls else None + bbox_pred = self.fc_reg(x) if self.with_reg else None + return cls_score, bbox_pred + + def _get_target_single(self, pos_bboxes, neg_bboxes, pos_gt_bboxes, + pos_gt_labels, cfg): + num_pos = pos_bboxes.size(0) + num_neg = neg_bboxes.size(0) + num_samples = num_pos + num_neg + + # original implementation uses new_zeros since BG are set to be 0 + # now use empty & fill because BG cat_id = num_classes, + # FG cat_id = [0, num_classes-1] + labels = pos_bboxes.new_full((num_samples, ), + self.num_classes, + dtype=torch.long) + label_weights = pos_bboxes.new_zeros(num_samples) + bbox_targets = pos_bboxes.new_zeros(num_samples, 4) + bbox_weights = pos_bboxes.new_zeros(num_samples, 4) + if num_pos > 0: + labels[:num_pos] = pos_gt_labels + pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight + label_weights[:num_pos] = pos_weight + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + pos_bboxes, pos_gt_bboxes) + else: + pos_bbox_targets = pos_gt_bboxes + bbox_targets[:num_pos, :] = pos_bbox_targets + bbox_weights[:num_pos, :] = 1 + if num_neg > 0: + label_weights[-num_neg:] = 1.0 + + return labels, label_weights, bbox_targets, bbox_weights + + def get_targets(self, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + concat=True): + pos_bboxes_list = [res.pos_bboxes for res in sampling_results] + neg_bboxes_list = [res.neg_bboxes for res in sampling_results] + pos_gt_bboxes_list = [res.pos_gt_bboxes for res in sampling_results] + pos_gt_labels_list = [res.pos_gt_labels for res in sampling_results] + labels, label_weights, bbox_targets, bbox_weights = multi_apply( + self._get_target_single, + pos_bboxes_list, + neg_bboxes_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + cfg=rcnn_train_cfg) + + if concat: + labels = torch.cat(labels, 0) + label_weights = torch.cat(label_weights, 0) + bbox_targets = torch.cat(bbox_targets, 0) + bbox_weights = torch.cat(bbox_weights, 0) + return labels, label_weights, bbox_targets, bbox_weights + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def loss(self, + cls_score, + bbox_pred, + rois, + labels, + label_weights, + bbox_targets, + bbox_weights, + reduction_override=None): + losses = dict() + if cls_score is not None: + avg_factor = max(torch.sum(label_weights > 0).float().item(), 1.) + if cls_score.numel() > 0: + losses['loss_cls'] = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=avg_factor, + reduction_override=reduction_override) + losses['acc'] = accuracy(cls_score, labels) + if bbox_pred is not None: + bg_class_ind = self.num_classes + # 0~self.num_classes-1 are FG, self.num_classes is BG + pos_inds = (labels >= 0) & (labels < bg_class_ind) + # do not perform bounding box regression for BG anymore. + if pos_inds.any(): + if self.reg_decoded_bbox: + bbox_pred = self.bbox_coder.decode(rois[:, 1:], bbox_pred) + if self.reg_class_agnostic: + pos_bbox_pred = bbox_pred.view( + bbox_pred.size(0), 4)[pos_inds.type(torch.bool)] + else: + pos_bbox_pred = bbox_pred.view( + bbox_pred.size(0), -1, + 4)[pos_inds.type(torch.bool), + labels[pos_inds.type(torch.bool)]] + losses['loss_bbox'] = self.loss_bbox( + pos_bbox_pred, + bbox_targets[pos_inds.type(torch.bool)], + bbox_weights[pos_inds.type(torch.bool)], + avg_factor=bbox_targets.size(0), + reduction_override=reduction_override) + else: + losses['loss_bbox'] = bbox_pred[pos_inds].sum() + return losses + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def get_bboxes(self, + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False, + cfg=None): + if isinstance(cls_score, list): + cls_score = sum(cls_score) / float(len(cls_score)) + scores = F.softmax(cls_score, dim=1) if cls_score is not None else None + + if bbox_pred is not None: + bboxes = self.bbox_coder.decode( + rois[:, 1:], bbox_pred, max_shape=img_shape) + else: + bboxes = rois[:, 1:].clone() + if img_shape is not None: + bboxes[:, [0, 2]].clamp_(min=0, max=img_shape[1]) + bboxes[:, [1, 3]].clamp_(min=0, max=img_shape[0]) + + if rescale and bboxes.size(0) > 0: + if isinstance(scale_factor, float): + bboxes /= scale_factor + else: + scale_factor = bboxes.new_tensor(scale_factor) + bboxes = (bboxes.view(bboxes.size(0), -1, 4) / + scale_factor).view(bboxes.size()[0], -1) + + if cfg is None: + return bboxes, scores + else: + det_bboxes, det_labels = multiclass_nms(bboxes, scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + + return det_bboxes, det_labels + + @force_fp32(apply_to=('bbox_preds', )) + def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): + """Refine bboxes during training. + + Args: + rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, + and bs is the sampled RoIs per image. The first column is + the image id and the next 4 columns are x1, y1, x2, y2. + labels (Tensor): Shape (n*bs, ). + bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class). + pos_is_gts (list[Tensor]): Flags indicating if each positive bbox + is a gt bbox. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Refined bboxes of each image in a mini-batch. + + Example: + >>> # xdoctest: +REQUIRES(module:kwarray) + >>> import kwarray + >>> import numpy as np + >>> from mmdet.core.bbox.demodata import random_boxes + >>> self = BBoxHead(reg_class_agnostic=True) + >>> n_roi = 2 + >>> n_img = 4 + >>> scale = 512 + >>> rng = np.random.RandomState(0) + >>> img_metas = [{'img_shape': (scale, scale)} + ... for _ in range(n_img)] + >>> # Create rois in the expected format + >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) + >>> img_ids = torch.randint(0, n_img, (n_roi,)) + >>> img_ids = img_ids.float() + >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1) + >>> # Create other args + >>> labels = torch.randint(0, 2, (n_roi,)).long() + >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) + >>> # For each image, pretend random positive boxes are gts + >>> is_label_pos = (labels.numpy() > 0).astype(np.int) + >>> lbl_per_img = kwarray.group_items(is_label_pos, + ... img_ids.numpy()) + >>> pos_per_img = [sum(lbl_per_img.get(gid, [])) + ... for gid in range(n_img)] + >>> pos_is_gts = [ + >>> torch.randint(0, 2, (npos,)).byte().sort( + >>> descending=True)[0] + >>> for npos in pos_per_img + >>> ] + >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, + >>> pos_is_gts, img_metas) + >>> print(bboxes_list) + """ + img_ids = rois[:, 0].long().unique(sorted=True) + assert img_ids.numel() <= len(img_metas) + + bboxes_list = [] + for i in range(len(img_metas)): + inds = torch.nonzero( + rois[:, 0] == i, as_tuple=False).squeeze(dim=1) + num_rois = inds.numel() + + bboxes_ = rois[inds, 1:] + label_ = labels[inds] + bbox_pred_ = bbox_preds[inds] + img_meta_ = img_metas[i] + pos_is_gts_ = pos_is_gts[i] + + bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, + img_meta_) + + # filter gt bboxes + pos_keep = 1 - pos_is_gts_ + keep_inds = pos_is_gts_.new_ones(num_rois) + keep_inds[:len(pos_is_gts_)] = pos_keep + + bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) + + return bboxes_list + + @force_fp32(apply_to=('bbox_pred', )) + def regress_by_class(self, rois, label, bbox_pred, img_meta): + """Regress the bbox for the predicted class. Used in Cascade R-CNN. + + Args: + rois (Tensor): shape (n, 4) or (n, 5) + label (Tensor): shape (n, ) + bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4) + img_meta (dict): Image meta info. + + Returns: + Tensor: Regressed bboxes, the same shape as input rois. + """ + assert rois.size(1) == 4 or rois.size(1) == 5, repr(rois.shape) + + if not self.reg_class_agnostic: + label = label * 4 + inds = torch.stack((label, label + 1, label + 2, label + 3), 1) + bbox_pred = torch.gather(bbox_pred, 1, inds) + assert bbox_pred.size(1) == 4 + + if rois.size(1) == 4: + new_rois = self.bbox_coder.decode( + rois, bbox_pred, max_shape=img_meta['img_shape']) + else: + bboxes = self.bbox_coder.decode( + rois[:, 1:], bbox_pred, max_shape=img_meta['img_shape']) + new_rois = torch.cat((rois[:, [0]], bboxes), dim=1) + + return new_rois diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0e86d2ea67e154fae18dbf9d2bfde6d0a70e582c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py @@ -0,0 +1,205 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule + +from mmdet.models.builder import HEADS +from .bbox_head import BBoxHead + + +@HEADS.register_module() +class ConvFCBBoxHead(BBoxHead): + r"""More general bbox head, with shared conv and fc layers and two optional + separated branches. + + .. code-block:: none + + /-> cls convs -> cls fcs -> cls + shared convs -> shared fcs + \-> reg convs -> reg fcs -> reg + """ # noqa: W605 + + def __init__(self, + num_shared_convs=0, + num_shared_fcs=0, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + conv_out_channels=256, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=None, + *args, + **kwargs): + super(ConvFCBBoxHead, self).__init__(*args, **kwargs) + assert (num_shared_convs + num_shared_fcs + num_cls_convs + + num_cls_fcs + num_reg_convs + num_reg_fcs > 0) + if num_cls_convs > 0 or num_reg_convs > 0: + assert num_shared_fcs == 0 + if not self.with_cls: + assert num_cls_convs == 0 and num_cls_fcs == 0 + if not self.with_reg: + assert num_reg_convs == 0 and num_reg_fcs == 0 + self.num_shared_convs = num_shared_convs + self.num_shared_fcs = num_shared_fcs + self.num_cls_convs = num_cls_convs + self.num_cls_fcs = num_cls_fcs + self.num_reg_convs = num_reg_convs + self.num_reg_fcs = num_reg_fcs + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + # add shared convs and fcs + self.shared_convs, self.shared_fcs, last_layer_dim = \ + self._add_conv_fc_branch( + self.num_shared_convs, self.num_shared_fcs, self.in_channels, + True) + self.shared_out_channels = last_layer_dim + + # add cls specific branch + self.cls_convs, self.cls_fcs, self.cls_last_dim = \ + self._add_conv_fc_branch( + self.num_cls_convs, self.num_cls_fcs, self.shared_out_channels) + + # add reg specific branch + self.reg_convs, self.reg_fcs, self.reg_last_dim = \ + self._add_conv_fc_branch( + self.num_reg_convs, self.num_reg_fcs, self.shared_out_channels) + + if self.num_shared_fcs == 0 and not self.with_avg_pool: + if self.num_cls_fcs == 0: + self.cls_last_dim *= self.roi_feat_area + if self.num_reg_fcs == 0: + self.reg_last_dim *= self.roi_feat_area + + self.relu = nn.ReLU(inplace=True) + # reconstruct fc_cls and fc_reg since input channels are changed + if self.with_cls: + self.fc_cls = nn.Linear(self.cls_last_dim, self.num_classes + 1) + if self.with_reg: + out_dim_reg = (4 if self.reg_class_agnostic else 4 * + self.num_classes) + self.fc_reg = nn.Linear(self.reg_last_dim, out_dim_reg) + + def _add_conv_fc_branch(self, + num_branch_convs, + num_branch_fcs, + in_channels, + is_shared=False): + """Add shared or separable branch. + + convs -> avg pool (optional) -> fcs + """ + last_layer_dim = in_channels + # add branch specific conv layers + branch_convs = nn.ModuleList() + if num_branch_convs > 0: + for i in range(num_branch_convs): + conv_in_channels = ( + last_layer_dim if i == 0 else self.conv_out_channels) + branch_convs.append( + ConvModule( + conv_in_channels, + self.conv_out_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + last_layer_dim = self.conv_out_channels + # add branch specific fc layers + branch_fcs = nn.ModuleList() + if num_branch_fcs > 0: + # for shared branch, only consider self.with_avg_pool + # for separated branches, also consider self.num_shared_fcs + if (is_shared + or self.num_shared_fcs == 0) and not self.with_avg_pool: + last_layer_dim *= self.roi_feat_area + for i in range(num_branch_fcs): + fc_in_channels = ( + last_layer_dim if i == 0 else self.fc_out_channels) + branch_fcs.append( + nn.Linear(fc_in_channels, self.fc_out_channels)) + last_layer_dim = self.fc_out_channels + return branch_convs, branch_fcs, last_layer_dim + + def init_weights(self): + super(ConvFCBBoxHead, self).init_weights() + # conv layers are already initialized by ConvModule + for module_list in [self.shared_fcs, self.cls_fcs, self.reg_fcs]: + for m in module_list.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + # shared part + if self.num_shared_convs > 0: + for conv in self.shared_convs: + x = conv(x) + + if self.num_shared_fcs > 0: + if self.with_avg_pool: + x = self.avg_pool(x) + + x = x.flatten(1) + + for fc in self.shared_fcs: + x = self.relu(fc(x)) + # separate branches + x_cls = x + x_reg = x + + for conv in self.cls_convs: + x_cls = conv(x_cls) + if x_cls.dim() > 2: + if self.with_avg_pool: + x_cls = self.avg_pool(x_cls) + x_cls = x_cls.flatten(1) + for fc in self.cls_fcs: + x_cls = self.relu(fc(x_cls)) + + for conv in self.reg_convs: + x_reg = conv(x_reg) + if x_reg.dim() > 2: + if self.with_avg_pool: + x_reg = self.avg_pool(x_reg) + x_reg = x_reg.flatten(1) + for fc in self.reg_fcs: + x_reg = self.relu(fc(x_reg)) + + cls_score = self.fc_cls(x_cls) if self.with_cls else None + bbox_pred = self.fc_reg(x_reg) if self.with_reg else None + return cls_score, bbox_pred + + +@HEADS.register_module() +class Shared2FCBBoxHead(ConvFCBBoxHead): + + def __init__(self, fc_out_channels=1024, *args, **kwargs): + super(Shared2FCBBoxHead, self).__init__( + num_shared_convs=0, + num_shared_fcs=2, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + fc_out_channels=fc_out_channels, + *args, + **kwargs) + + +@HEADS.register_module() +class Shared4Conv1FCBBoxHead(ConvFCBBoxHead): + + def __init__(self, fc_out_channels=1024, *args, **kwargs): + super(Shared4Conv1FCBBoxHead, self).__init__( + num_shared_convs=4, + num_shared_fcs=1, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + fc_out_channels=fc_out_channels, + *args, + **kwargs) diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6c154cb3c0d9d7639c3d4a2a1272406d3fab8acd --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py @@ -0,0 +1,172 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, normal_init, xavier_init + +from mmdet.models.backbones.resnet import Bottleneck +from mmdet.models.builder import HEADS +from .bbox_head import BBoxHead + + +class BasicResBlock(nn.Module): + """Basic residual block. + + This block is a little different from the block in the ResNet backbone. + The kernel size of conv1 is 1 in this block while 3 in ResNet BasicBlock. + + Args: + in_channels (int): Channels of the input feature map. + out_channels (int): Channels of the output feature map. + conv_cfg (dict): The config dict for convolution layers. + norm_cfg (dict): The config dict for normalization layers. + """ + + def __init__(self, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN')): + super(BasicResBlock, self).__init__() + + # main path + self.conv1 = ConvModule( + in_channels, + in_channels, + kernel_size=3, + padding=1, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg) + self.conv2 = ConvModule( + in_channels, + out_channels, + kernel_size=1, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + # identity path + self.conv_identity = ConvModule( + in_channels, + out_channels, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + identity = x + + x = self.conv1(x) + x = self.conv2(x) + + identity = self.conv_identity(identity) + out = x + identity + + out = self.relu(out) + return out + + +@HEADS.register_module() +class DoubleConvFCBBoxHead(BBoxHead): + r"""Bbox head used in Double-Head R-CNN + + .. code-block:: none + + /-> cls + /-> shared convs -> + \-> reg + roi features + /-> cls + \-> shared fc -> + \-> reg + """ # noqa: W605 + + def __init__(self, + num_convs=0, + num_fcs=0, + conv_out_channels=1024, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=dict(type='BN'), + **kwargs): + kwargs.setdefault('with_avg_pool', True) + super(DoubleConvFCBBoxHead, self).__init__(**kwargs) + assert self.with_avg_pool + assert num_convs > 0 + assert num_fcs > 0 + self.num_convs = num_convs + self.num_fcs = num_fcs + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + # increase the channel of input features + self.res_block = BasicResBlock(self.in_channels, + self.conv_out_channels) + + # add conv heads + self.conv_branch = self._add_conv_branch() + # add fc heads + self.fc_branch = self._add_fc_branch() + + out_dim_reg = 4 if self.reg_class_agnostic else 4 * self.num_classes + self.fc_reg = nn.Linear(self.conv_out_channels, out_dim_reg) + + self.fc_cls = nn.Linear(self.fc_out_channels, self.num_classes + 1) + self.relu = nn.ReLU(inplace=True) + + def _add_conv_branch(self): + """Add the fc branch which consists of a sequential of conv layers.""" + branch_convs = nn.ModuleList() + for i in range(self.num_convs): + branch_convs.append( + Bottleneck( + inplanes=self.conv_out_channels, + planes=self.conv_out_channels // 4, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + return branch_convs + + def _add_fc_branch(self): + """Add the fc branch which consists of a sequential of fc layers.""" + branch_fcs = nn.ModuleList() + for i in range(self.num_fcs): + fc_in_channels = ( + self.in_channels * + self.roi_feat_area if i == 0 else self.fc_out_channels) + branch_fcs.append(nn.Linear(fc_in_channels, self.fc_out_channels)) + return branch_fcs + + def init_weights(self): + # conv layers are already initialized by ConvModule + normal_init(self.fc_cls, std=0.01) + normal_init(self.fc_reg, std=0.001) + + for m in self.fc_branch.modules(): + if isinstance(m, nn.Linear): + xavier_init(m, distribution='uniform') + + def forward(self, x_cls, x_reg): + # conv head + x_conv = self.res_block(x_reg) + + for conv in self.conv_branch: + x_conv = conv(x_conv) + + if self.with_avg_pool: + x_conv = self.avg_pool(x_conv) + + x_conv = x_conv.view(x_conv.size(0), -1) + bbox_pred = self.fc_reg(x_conv) + + # fc head + x_fc = x_cls.view(x_cls.size(0), -1) + for fc in self.fc_branch: + x_fc = self.relu(fc(x_fc)) + + cls_score = self.fc_cls(x_fc) + + return cls_score, bbox_pred diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/sabl_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/sabl_head.py new file mode 100644 index 0000000000000000000000000000000000000000..370a933bf3d8f3e626fe3608204383f8ced103ee --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/bbox_heads/sabl_head.py @@ -0,0 +1,572 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, kaiming_init, normal_init, xavier_init +from mmcv.runner import force_fp32 + +from mmdet.core import build_bbox_coder, multi_apply, multiclass_nms +from mmdet.models.builder import HEADS, build_loss +from mmdet.models.losses import accuracy + + +@HEADS.register_module() +class SABLHead(nn.Module): + """Side-Aware Boundary Localization (SABL) for RoI-Head. + + Side-Aware features are extracted by conv layers + with an attention mechanism. + Boundary Localization with Bucketing and Bucketing Guided Rescoring + are implemented in BucketingBBoxCoder. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + cls_in_channels (int): Input channels of cls RoI feature. \ + Defaults to 256. + reg_in_channels (int): Input channels of reg RoI feature. \ + Defaults to 256. + roi_feat_size (int): Size of RoI features. Defaults to 7. + reg_feat_up_ratio (int): Upsample ratio of reg features. \ + Defaults to 2. + reg_pre_kernel (int): Kernel of 2D conv layers before \ + attention pooling. Defaults to 3. + reg_post_kernel (int): Kernel of 1D conv layers after \ + attention pooling. Defaults to 3. + reg_pre_num (int): Number of pre convs. Defaults to 2. + reg_post_num (int): Number of post convs. Defaults to 1. + num_classes (int): Number of classes in dataset. Defaults to 80. + cls_out_channels (int): Hidden channels in cls fcs. Defaults to 1024. + reg_offset_out_channels (int): Hidden and output channel \ + of reg offset branch. Defaults to 256. + reg_cls_out_channels (int): Hidden and output channel \ + of reg cls branch. Defaults to 256. + num_cls_fcs (int): Number of fcs for cls branch. Defaults to 1. + num_reg_fcs (int): Number of fcs for reg branch.. Defaults to 0. + reg_class_agnostic (bool): Class agnostic regresion or not. \ + Defaults to True. + norm_cfg (dict): Config of norm layers. Defaults to None. + bbox_coder (dict): Config of bbox coder. Defaults 'BucketingBBoxCoder'. + loss_cls (dict): Config of classification loss. + loss_bbox_cls (dict): Config of classification loss for bbox branch. + loss_bbox_reg (dict): Config of regression loss for bbox branch. + """ + + def __init__(self, + num_classes, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', + num_buckets=14, + scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=0.1, loss_weight=1.0)): + super(SABLHead, self).__init__() + self.cls_in_channels = cls_in_channels + self.reg_in_channels = reg_in_channels + self.roi_feat_size = roi_feat_size + self.reg_feat_up_ratio = int(reg_feat_up_ratio) + self.num_buckets = bbox_coder['num_buckets'] + assert self.reg_feat_up_ratio // 2 >= 1 + self.up_reg_feat_size = roi_feat_size * self.reg_feat_up_ratio + assert self.up_reg_feat_size == bbox_coder['num_buckets'] + self.reg_pre_kernel = reg_pre_kernel + self.reg_post_kernel = reg_post_kernel + self.reg_pre_num = reg_pre_num + self.reg_post_num = reg_post_num + self.num_classes = num_classes + self.cls_out_channels = cls_out_channels + self.reg_offset_out_channels = reg_offset_out_channels + self.reg_cls_out_channels = reg_cls_out_channels + self.num_cls_fcs = num_cls_fcs + self.num_reg_fcs = num_reg_fcs + self.reg_class_agnostic = reg_class_agnostic + assert self.reg_class_agnostic + self.norm_cfg = norm_cfg + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox_cls = build_loss(loss_bbox_cls) + self.loss_bbox_reg = build_loss(loss_bbox_reg) + + self.cls_fcs = self._add_fc_branch(self.num_cls_fcs, + self.cls_in_channels, + self.roi_feat_size, + self.cls_out_channels) + + self.side_num = int(np.ceil(self.num_buckets / 2)) + + if self.reg_feat_up_ratio > 1: + self.upsample_x = nn.ConvTranspose1d( + reg_in_channels, + reg_in_channels, + self.reg_feat_up_ratio, + stride=self.reg_feat_up_ratio) + self.upsample_y = nn.ConvTranspose1d( + reg_in_channels, + reg_in_channels, + self.reg_feat_up_ratio, + stride=self.reg_feat_up_ratio) + + self.reg_pre_convs = nn.ModuleList() + for i in range(self.reg_pre_num): + reg_pre_conv = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=reg_pre_kernel, + padding=reg_pre_kernel // 2, + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_pre_convs.append(reg_pre_conv) + + self.reg_post_conv_xs = nn.ModuleList() + for i in range(self.reg_post_num): + reg_post_conv_x = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=(1, reg_post_kernel), + padding=(0, reg_post_kernel // 2), + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_post_conv_xs.append(reg_post_conv_x) + self.reg_post_conv_ys = nn.ModuleList() + for i in range(self.reg_post_num): + reg_post_conv_y = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=(reg_post_kernel, 1), + padding=(reg_post_kernel // 2, 0), + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_post_conv_ys.append(reg_post_conv_y) + + self.reg_conv_att_x = nn.Conv2d(reg_in_channels, 1, 1) + self.reg_conv_att_y = nn.Conv2d(reg_in_channels, 1, 1) + + self.fc_cls = nn.Linear(self.cls_out_channels, self.num_classes + 1) + self.relu = nn.ReLU(inplace=True) + + self.reg_cls_fcs = self._add_fc_branch(self.num_reg_fcs, + self.reg_in_channels, 1, + self.reg_cls_out_channels) + self.reg_offset_fcs = self._add_fc_branch(self.num_reg_fcs, + self.reg_in_channels, 1, + self.reg_offset_out_channels) + self.fc_reg_cls = nn.Linear(self.reg_cls_out_channels, 1) + self.fc_reg_offset = nn.Linear(self.reg_offset_out_channels, 1) + + def _add_fc_branch(self, num_branch_fcs, in_channels, roi_feat_size, + fc_out_channels): + in_channels = in_channels * roi_feat_size * roi_feat_size + branch_fcs = nn.ModuleList() + for i in range(num_branch_fcs): + fc_in_channels = (in_channels if i == 0 else fc_out_channels) + branch_fcs.append(nn.Linear(fc_in_channels, fc_out_channels)) + return branch_fcs + + def init_weights(self): + for module_list in [ + self.reg_cls_fcs, self.reg_offset_fcs, self.cls_fcs + ]: + for m in module_list.modules(): + if isinstance(m, nn.Linear): + xavier_init(m, distribution='uniform') + if self.reg_feat_up_ratio > 1: + kaiming_init(self.upsample_x, distribution='normal') + kaiming_init(self.upsample_y, distribution='normal') + + normal_init(self.reg_conv_att_x, 0, 0.01) + normal_init(self.reg_conv_att_y, 0, 0.01) + normal_init(self.fc_reg_offset, 0, 0.001) + normal_init(self.fc_reg_cls, 0, 0.01) + normal_init(self.fc_cls, 0, 0.01) + + def cls_forward(self, cls_x): + cls_x = cls_x.view(cls_x.size(0), -1) + for fc in self.cls_fcs: + cls_x = self.relu(fc(cls_x)) + cls_score = self.fc_cls(cls_x) + return cls_score + + def attention_pool(self, reg_x): + """Extract direction-specific features fx and fy with attention + methanism.""" + reg_fx = reg_x + reg_fy = reg_x + reg_fx_att = self.reg_conv_att_x(reg_fx).sigmoid() + reg_fy_att = self.reg_conv_att_y(reg_fy).sigmoid() + reg_fx_att = reg_fx_att / reg_fx_att.sum(dim=2).unsqueeze(2) + reg_fy_att = reg_fy_att / reg_fy_att.sum(dim=3).unsqueeze(3) + reg_fx = (reg_fx * reg_fx_att).sum(dim=2) + reg_fy = (reg_fy * reg_fy_att).sum(dim=3) + return reg_fx, reg_fy + + def side_aware_feature_extractor(self, reg_x): + """Refine and extract side-aware features without split them.""" + for reg_pre_conv in self.reg_pre_convs: + reg_x = reg_pre_conv(reg_x) + reg_fx, reg_fy = self.attention_pool(reg_x) + + if self.reg_post_num > 0: + reg_fx = reg_fx.unsqueeze(2) + reg_fy = reg_fy.unsqueeze(3) + for i in range(self.reg_post_num): + reg_fx = self.reg_post_conv_xs[i](reg_fx) + reg_fy = self.reg_post_conv_ys[i](reg_fy) + reg_fx = reg_fx.squeeze(2) + reg_fy = reg_fy.squeeze(3) + if self.reg_feat_up_ratio > 1: + reg_fx = self.relu(self.upsample_x(reg_fx)) + reg_fy = self.relu(self.upsample_y(reg_fy)) + reg_fx = torch.transpose(reg_fx, 1, 2) + reg_fy = torch.transpose(reg_fy, 1, 2) + return reg_fx.contiguous(), reg_fy.contiguous() + + def reg_pred(self, x, offfset_fcs, cls_fcs): + """Predict bucketing esimation (cls_pred) and fine regression (offset + pred) with side-aware features.""" + x_offset = x.view(-1, self.reg_in_channels) + x_cls = x.view(-1, self.reg_in_channels) + + for fc in offfset_fcs: + x_offset = self.relu(fc(x_offset)) + for fc in cls_fcs: + x_cls = self.relu(fc(x_cls)) + offset_pred = self.fc_reg_offset(x_offset) + cls_pred = self.fc_reg_cls(x_cls) + + offset_pred = offset_pred.view(x.size(0), -1) + cls_pred = cls_pred.view(x.size(0), -1) + + return offset_pred, cls_pred + + def side_aware_split(self, feat): + """Split side-aware features aligned with orders of bucketing + targets.""" + l_end = int(np.ceil(self.up_reg_feat_size / 2)) + r_start = int(np.floor(self.up_reg_feat_size / 2)) + feat_fl = feat[:, :l_end] + feat_fr = feat[:, r_start:].flip(dims=(1, )) + feat_fl = feat_fl.contiguous() + feat_fr = feat_fr.contiguous() + feat = torch.cat([feat_fl, feat_fr], dim=-1) + return feat + + def bbox_pred_split(self, bbox_pred, num_proposals_per_img): + """Split batch bbox prediction back to each image.""" + bucket_cls_preds, bucket_offset_preds = bbox_pred + bucket_cls_preds = bucket_cls_preds.split(num_proposals_per_img, 0) + bucket_offset_preds = bucket_offset_preds.split( + num_proposals_per_img, 0) + bbox_pred = tuple(zip(bucket_cls_preds, bucket_offset_preds)) + return bbox_pred + + def reg_forward(self, reg_x): + outs = self.side_aware_feature_extractor(reg_x) + edge_offset_preds = [] + edge_cls_preds = [] + reg_fx = outs[0] + reg_fy = outs[1] + offset_pred_x, cls_pred_x = self.reg_pred(reg_fx, self.reg_offset_fcs, + self.reg_cls_fcs) + offset_pred_y, cls_pred_y = self.reg_pred(reg_fy, self.reg_offset_fcs, + self.reg_cls_fcs) + offset_pred_x = self.side_aware_split(offset_pred_x) + offset_pred_y = self.side_aware_split(offset_pred_y) + cls_pred_x = self.side_aware_split(cls_pred_x) + cls_pred_y = self.side_aware_split(cls_pred_y) + edge_offset_preds = torch.cat([offset_pred_x, offset_pred_y], dim=-1) + edge_cls_preds = torch.cat([cls_pred_x, cls_pred_y], dim=-1) + + return (edge_cls_preds, edge_offset_preds) + + def forward(self, x): + + bbox_pred = self.reg_forward(x) + cls_score = self.cls_forward(x) + + return cls_score, bbox_pred + + def get_targets(self, sampling_results, gt_bboxes, gt_labels, + rcnn_train_cfg): + pos_proposals = [res.pos_bboxes for res in sampling_results] + neg_proposals = [res.neg_bboxes for res in sampling_results] + pos_gt_bboxes = [res.pos_gt_bboxes for res in sampling_results] + pos_gt_labels = [res.pos_gt_labels for res in sampling_results] + cls_reg_targets = self.bucket_target(pos_proposals, neg_proposals, + pos_gt_bboxes, pos_gt_labels, + rcnn_train_cfg) + (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) = cls_reg_targets + return (labels, label_weights, (bucket_cls_targets, + bucket_offset_targets), + (bucket_cls_weights, bucket_offset_weights)) + + def bucket_target(self, + pos_proposals_list, + neg_proposals_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + rcnn_train_cfg, + concat=True): + (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) = multi_apply( + self._bucket_target_single, + pos_proposals_list, + neg_proposals_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + cfg=rcnn_train_cfg) + + if concat: + labels = torch.cat(labels, 0) + label_weights = torch.cat(label_weights, 0) + bucket_cls_targets = torch.cat(bucket_cls_targets, 0) + bucket_cls_weights = torch.cat(bucket_cls_weights, 0) + bucket_offset_targets = torch.cat(bucket_offset_targets, 0) + bucket_offset_weights = torch.cat(bucket_offset_weights, 0) + return (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) + + def _bucket_target_single(self, pos_proposals, neg_proposals, + pos_gt_bboxes, pos_gt_labels, cfg): + """Compute bucketing estimation targets and fine regression targets for + a single image. + + Args: + pos_proposals (Tensor): positive proposals of a single image, + Shape (n_pos, 4) + neg_proposals (Tensor): negative proposals of a single image, + Shape (n_neg, 4). + pos_gt_bboxes (Tensor): gt bboxes assigned to positive proposals + of a single image, Shape (n_pos, 4). + pos_gt_labels (Tensor): gt labels assigned to positive proposals + of a single image, Shape (n_pos, ). + cfg (dict): Config of calculating targets + + Returns: + tuple: + + - labels (Tensor): Labels in a single image. \ + Shape (n,). + - label_weights (Tensor): Label weights in a single image.\ + Shape (n,) + - bucket_cls_targets (Tensor): Bucket cls targets in \ + a single image. Shape (n, num_buckets*2). + - bucket_cls_weights (Tensor): Bucket cls weights in \ + a single image. Shape (n, num_buckets*2). + - bucket_offset_targets (Tensor): Bucket offset targets \ + in a single image. Shape (n, num_buckets*2). + - bucket_offset_targets (Tensor): Bucket offset weights \ + in a single image. Shape (n, num_buckets*2). + """ + num_pos = pos_proposals.size(0) + num_neg = neg_proposals.size(0) + num_samples = num_pos + num_neg + labels = pos_gt_bboxes.new_full((num_samples, ), + self.num_classes, + dtype=torch.long) + label_weights = pos_proposals.new_zeros(num_samples) + bucket_cls_targets = pos_proposals.new_zeros(num_samples, + 4 * self.side_num) + bucket_cls_weights = pos_proposals.new_zeros(num_samples, + 4 * self.side_num) + bucket_offset_targets = pos_proposals.new_zeros( + num_samples, 4 * self.side_num) + bucket_offset_weights = pos_proposals.new_zeros( + num_samples, 4 * self.side_num) + if num_pos > 0: + labels[:num_pos] = pos_gt_labels + label_weights[:num_pos] = 1.0 + (pos_bucket_offset_targets, pos_bucket_offset_weights, + pos_bucket_cls_targets, + pos_bucket_cls_weights) = self.bbox_coder.encode( + pos_proposals, pos_gt_bboxes) + bucket_cls_targets[:num_pos, :] = pos_bucket_cls_targets + bucket_cls_weights[:num_pos, :] = pos_bucket_cls_weights + bucket_offset_targets[:num_pos, :] = pos_bucket_offset_targets + bucket_offset_weights[:num_pos, :] = pos_bucket_offset_weights + if num_neg > 0: + label_weights[-num_neg:] = 1.0 + return (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) + + def loss(self, + cls_score, + bbox_pred, + rois, + labels, + label_weights, + bbox_targets, + bbox_weights, + reduction_override=None): + losses = dict() + if cls_score is not None: + avg_factor = max(torch.sum(label_weights > 0).float().item(), 1.) + losses['loss_cls'] = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=avg_factor, + reduction_override=reduction_override) + losses['acc'] = accuracy(cls_score, labels) + + if bbox_pred is not None: + bucket_cls_preds, bucket_offset_preds = bbox_pred + bucket_cls_targets, bucket_offset_targets = bbox_targets + bucket_cls_weights, bucket_offset_weights = bbox_weights + # edge cls + bucket_cls_preds = bucket_cls_preds.view(-1, self.side_num) + bucket_cls_targets = bucket_cls_targets.view(-1, self.side_num) + bucket_cls_weights = bucket_cls_weights.view(-1, self.side_num) + losses['loss_bbox_cls'] = self.loss_bbox_cls( + bucket_cls_preds, + bucket_cls_targets, + bucket_cls_weights, + avg_factor=bucket_cls_targets.size(0), + reduction_override=reduction_override) + + losses['loss_bbox_reg'] = self.loss_bbox_reg( + bucket_offset_preds, + bucket_offset_targets, + bucket_offset_weights, + avg_factor=bucket_offset_targets.size(0), + reduction_override=reduction_override) + + return losses + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def get_bboxes(self, + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False, + cfg=None): + if isinstance(cls_score, list): + cls_score = sum(cls_score) / float(len(cls_score)) + scores = F.softmax(cls_score, dim=1) if cls_score is not None else None + + if bbox_pred is not None: + bboxes, confids = self.bbox_coder.decode(rois[:, 1:], bbox_pred, + img_shape) + else: + bboxes = rois[:, 1:].clone() + confids = None + if img_shape is not None: + bboxes[:, [0, 2]].clamp_(min=0, max=img_shape[1] - 1) + bboxes[:, [1, 3]].clamp_(min=0, max=img_shape[0] - 1) + + if rescale and bboxes.size(0) > 0: + if isinstance(scale_factor, float): + bboxes /= scale_factor + else: + bboxes /= torch.from_numpy(scale_factor).to(bboxes.device) + + if cfg is None: + return bboxes, scores + else: + det_bboxes, det_labels = multiclass_nms( + bboxes, + scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=confids) + + return det_bboxes, det_labels + + @force_fp32(apply_to=('bbox_preds', )) + def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): + """Refine bboxes during training. + + Args: + rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, + and bs is the sampled RoIs per image. + labels (Tensor): Shape (n*bs, ). + bbox_preds (list[Tensor]): Shape [(n*bs, num_buckets*2), \ + (n*bs, num_buckets*2)]. + pos_is_gts (list[Tensor]): Flags indicating if each positive bbox + is a gt bbox. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Refined bboxes of each image in a mini-batch. + """ + img_ids = rois[:, 0].long().unique(sorted=True) + assert img_ids.numel() == len(img_metas) + + bboxes_list = [] + for i in range(len(img_metas)): + inds = torch.nonzero( + rois[:, 0] == i, as_tuple=False).squeeze(dim=1) + num_rois = inds.numel() + + bboxes_ = rois[inds, 1:] + label_ = labels[inds] + edge_cls_preds, edge_offset_preds = bbox_preds + edge_cls_preds_ = edge_cls_preds[inds] + edge_offset_preds_ = edge_offset_preds[inds] + bbox_pred_ = [edge_cls_preds_, edge_offset_preds_] + img_meta_ = img_metas[i] + pos_is_gts_ = pos_is_gts[i] + + bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, + img_meta_) + # filter gt bboxes + pos_keep = 1 - pos_is_gts_ + keep_inds = pos_is_gts_.new_ones(num_rois) + keep_inds[:len(pos_is_gts_)] = pos_keep + + bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) + + return bboxes_list + + @force_fp32(apply_to=('bbox_pred', )) + def regress_by_class(self, rois, label, bbox_pred, img_meta): + """Regress the bbox for the predicted class. Used in Cascade R-CNN. + + Args: + rois (Tensor): shape (n, 4) or (n, 5) + label (Tensor): shape (n, ) + bbox_pred (list[Tensor]): shape [(n, num_buckets *2), \ + (n, num_buckets *2)] + img_meta (dict): Image meta info. + + Returns: + Tensor: Regressed bboxes, the same shape as input rois. + """ + assert rois.size(1) == 4 or rois.size(1) == 5 + + if rois.size(1) == 4: + new_rois, _ = self.bbox_coder.decode(rois, bbox_pred, + img_meta['img_shape']) + else: + bboxes, _ = self.bbox_coder.decode(rois[:, 1:], bbox_pred, + img_meta['img_shape']) + new_rois = torch.cat((rois[:, [0]], bboxes), dim=1) + + return new_rois diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/cascade_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/cascade_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..45b6f36a386cd37c50cc43666fcc516f2e14d868 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/cascade_roi_head.py @@ -0,0 +1,507 @@ +import torch +import torch.nn as nn + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, build_assigner, + build_sampler, merge_aug_bboxes, merge_aug_masks, + multiclass_nms) +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class CascadeRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Cascade roi head including one bbox head and one mask head. + + https://arxiv.org/abs/1712.00726 + """ + + def __init__(self, + num_stages, + stage_loss_weights, + bbox_roi_extractor=None, + bbox_head=None, + mask_roi_extractor=None, + mask_head=None, + shared_head=None, + train_cfg=None, + test_cfg=None): + assert bbox_roi_extractor is not None + assert bbox_head is not None + assert shared_head is None, \ + 'Shared head is not supported in Cascade RCNN anymore' + self.num_stages = num_stages + self.stage_loss_weights = stage_loss_weights + super(CascadeRoIHead, self).__init__( + bbox_roi_extractor=bbox_roi_extractor, + bbox_head=bbox_head, + mask_roi_extractor=mask_roi_extractor, + mask_head=mask_head, + shared_head=shared_head, + train_cfg=train_cfg, + test_cfg=test_cfg) + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + """Initialize box head and box roi extractor. + + Args: + bbox_roi_extractor (dict): Config of box roi extractor. + bbox_head (dict): Config of box in box head. + """ + self.bbox_roi_extractor = nn.ModuleList() + self.bbox_head = nn.ModuleList() + if not isinstance(bbox_roi_extractor, list): + bbox_roi_extractor = [ + bbox_roi_extractor for _ in range(self.num_stages) + ] + if not isinstance(bbox_head, list): + bbox_head = [bbox_head for _ in range(self.num_stages)] + assert len(bbox_roi_extractor) == len(bbox_head) == self.num_stages + for roi_extractor, head in zip(bbox_roi_extractor, bbox_head): + self.bbox_roi_extractor.append(build_roi_extractor(roi_extractor)) + self.bbox_head.append(build_head(head)) + + def init_mask_head(self, mask_roi_extractor, mask_head): + """Initialize mask head and mask roi extractor. + + Args: + mask_roi_extractor (dict): Config of mask roi extractor. + mask_head (dict): Config of mask in mask head. + """ + self.mask_head = nn.ModuleList() + if not isinstance(mask_head, list): + mask_head = [mask_head for _ in range(self.num_stages)] + assert len(mask_head) == self.num_stages + for head in mask_head: + self.mask_head.append(build_head(head)) + if mask_roi_extractor is not None: + self.share_roi_extractor = False + self.mask_roi_extractor = nn.ModuleList() + if not isinstance(mask_roi_extractor, list): + mask_roi_extractor = [ + mask_roi_extractor for _ in range(self.num_stages) + ] + assert len(mask_roi_extractor) == self.num_stages + for roi_extractor in mask_roi_extractor: + self.mask_roi_extractor.append( + build_roi_extractor(roi_extractor)) + else: + self.share_roi_extractor = True + self.mask_roi_extractor = self.bbox_roi_extractor + + def init_assigner_sampler(self): + """Initialize assigner and sampler for each stage.""" + self.bbox_assigner = [] + self.bbox_sampler = [] + if self.train_cfg is not None: + for idx, rcnn_train_cfg in enumerate(self.train_cfg): + self.bbox_assigner.append( + build_assigner(rcnn_train_cfg.assigner)) + self.current_stage = idx + self.bbox_sampler.append( + build_sampler(rcnn_train_cfg.sampler, context=self)) + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if self.with_shared_head: + self.shared_head.init_weights(pretrained=pretrained) + for i in range(self.num_stages): + if self.with_bbox: + self.bbox_roi_extractor[i].init_weights() + self.bbox_head[i].init_weights() + if self.with_mask: + if not self.share_roi_extractor: + self.mask_roi_extractor[i].init_weights() + self.mask_head[i].init_weights() + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask heads + if self.with_mask: + mask_rois = rois[:100] + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def _bbox_forward(self, stage, x, rois): + """Box head forward function used in both training and testing.""" + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor(x[:bbox_roi_extractor.num_inputs], + rois) + # do not support caffe_c4 model anymore + cls_score, bbox_pred = bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, stage, x, sampling_results, gt_bboxes, + gt_labels, rcnn_train_cfg): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(stage, x, rois) + bbox_targets = self.bbox_head[stage].get_targets( + sampling_results, gt_bboxes, gt_labels, rcnn_train_cfg) + loss_bbox = self.bbox_head[stage].loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update( + loss_bbox=loss_bbox, rois=rois, bbox_targets=bbox_targets) + return bbox_results + + def _mask_forward(self, stage, x, rois): + """Mask head forward function used in both training and testing.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + mask_feats = mask_roi_extractor(x[:mask_roi_extractor.num_inputs], + rois) + # do not support caffe_c4 model anymore + mask_pred = mask_head(mask_feats) + + mask_results = dict(mask_pred=mask_pred) + return mask_results + + def _mask_forward_train(self, + stage, + x, + sampling_results, + gt_masks, + rcnn_train_cfg, + bbox_feats=None): + """Run forward function and calculate loss for mask head in + training.""" + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_results = self._mask_forward(stage, x, pos_rois) + + mask_targets = self.mask_head[stage].get_targets( + sampling_results, gt_masks, rcnn_train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.mask_head[stage].loss(mask_results['mask_pred'], + mask_targets, pos_labels) + + mask_results.update(loss_mask=loss_mask) + return mask_results + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): list of region proposals. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + losses = dict() + for i in range(self.num_stages): + self.current_stage = i + rcnn_train_cfg = self.train_cfg[i] + lw = self.stage_loss_weights[i] + + # assign gts and sample proposals + sampling_results = [] + if self.with_bbox or self.with_mask: + bbox_assigner = self.bbox_assigner[i] + bbox_sampler = self.bbox_sampler[i] + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + + for j in range(num_imgs): + assign_result = bbox_assigner.assign( + proposal_list[j], gt_bboxes[j], gt_bboxes_ignore[j], + gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + # bbox head forward and loss + bbox_results = self._bbox_forward_train(i, x, sampling_results, + gt_bboxes, gt_labels, + rcnn_train_cfg) + + for name, value in bbox_results['loss_bbox'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train( + i, x, sampling_results, gt_masks, rcnn_train_cfg, + bbox_results['bbox_feats']) + for name, value in mask_results['loss_mask'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # refine bboxes + if i < self.num_stages - 1: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + # bbox_targets is a tuple + roi_labels = bbox_results['bbox_targets'][0] + with torch.no_grad(): + roi_labels = torch.where( + roi_labels == self.bbox_head[i].num_classes, + bbox_results['cls_score'][:, :-1].argmax(1), + roi_labels) + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + + return losses + + def simple_test(self, x, proposal_list, img_metas, rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + num_imgs = len(proposal_list) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # "ms" in variable names means multi-stage + ms_bbox_result = {} + ms_segm_result = {} + ms_scores = [] + rcnn_test_cfg = self.test_cfg + + rois = bbox2roi(proposal_list) + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple( + len(proposals) for proposals in proposal_list) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + if isinstance(bbox_pred, torch.Tensor): + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + else: + bbox_pred = self.bbox_head[i].bbox_pred_split( + bbox_pred, num_proposals_per_img) + ms_scores.append(cls_score) + + if i < self.num_stages - 1: + bbox_label = [s[:, :-1].argmax(dim=1) for s in cls_score] + rois = torch.cat([ + self.bbox_head[i].regress_by_class(rois[j], bbox_label[j], + bbox_pred[j], + img_metas[j]) + for j in range(num_imgs) + ]) + + # average scores of each image by stages + cls_score = [ + sum([score[i] for score in ms_scores]) / float(len(ms_scores)) + for i in range(num_imgs) + ] + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(num_imgs): + det_bbox, det_label = self.bbox_head[-1].get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + + if torch.onnx.is_in_onnx_export(): + return det_bboxes, det_labels + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head[-1].num_classes) + for i in range(num_imgs) + ] + ms_bbox_result['ensemble'] = bbox_results + + if self.with_mask: + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + mask_classes = self.mask_head[-1].num_classes + segm_results = [[[] for _ in range(mask_classes)] + for _ in range(num_imgs)] + else: + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_rois = bbox2roi(_bboxes) + num_mask_rois_per_img = tuple( + _bbox.size(0) for _bbox in _bboxes) + aug_masks = [] + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + mask_pred = mask_results['mask_pred'] + # split batch mask prediction back to each image + mask_pred = mask_pred.split(num_mask_rois_per_img, 0) + aug_masks.append( + [m.sigmoid().cpu().numpy() for m in mask_pred]) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] + for _ in range(self.mask_head[-1].num_classes)]) + else: + aug_mask = [mask[i] for mask in aug_masks] + merged_masks = merge_aug_masks( + aug_mask, [[img_metas[i]]] * self.num_stages, + rcnn_test_cfg) + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, _bboxes[i], det_labels[i], + rcnn_test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + ms_segm_result['ensemble'] = segm_results + + if self.with_mask: + results = list( + zip(ms_bbox_result['ensemble'], ms_segm_result['ensemble'])) + else: + results = ms_bbox_result['ensemble'] + + return results + + def aug_test(self, features, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + rcnn_test_cfg = self.test_cfg + aug_bboxes = [] + aug_scores = [] + for x, img_meta in zip(features, img_metas): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + # "ms" in variable names means multi-stage + ms_scores = [] + + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + ms_scores.append(bbox_results['cls_score']) + + if i < self.num_stages - 1: + bbox_label = bbox_results['cls_score'][:, :-1].argmax( + dim=1) + rois = self.bbox_head[i].regress_by_class( + rois, bbox_label, bbox_results['bbox_pred'], + img_meta[0]) + + cls_score = sum(ms_scores) / float(len(ms_scores)) + bboxes, scores = self.bbox_head[-1].get_bboxes( + rois, + cls_score, + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + + bbox_result = bbox2result(det_bboxes, det_labels, + self.bbox_head[-1].num_classes) + + if self.with_mask: + if det_bboxes.shape[0] == 0: + segm_result = [[[] + for _ in range(self.mask_head[-1].num_classes)] + ] + else: + aug_masks = [] + aug_img_metas = [] + for x, img_meta in zip(features, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + aug_img_metas.append(img_meta) + merged_masks = merge_aug_masks(aug_masks, aug_img_metas, + self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + rcnn_test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return [(bbox_result, segm_result)] + else: + return [bbox_result] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/double_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/double_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a1aa6c8244a889fbbed312a89574c3e11be294f0 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/double_roi_head.py @@ -0,0 +1,33 @@ +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class DoubleHeadRoIHead(StandardRoIHead): + """RoI head for Double Head RCNN. + + https://arxiv.org/abs/1904.06493 + """ + + def __init__(self, reg_roi_scale_factor, **kwargs): + super(DoubleHeadRoIHead, self).__init__(**kwargs) + self.reg_roi_scale_factor = reg_roi_scale_factor + + def _bbox_forward(self, x, rois): + """Box head forward function used in both training and testing time.""" + bbox_cls_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + bbox_reg_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], + rois, + roi_scale_factor=self.reg_roi_scale_factor) + if self.with_shared_head: + bbox_cls_feats = self.shared_head(bbox_cls_feats) + bbox_reg_feats = self.shared_head(bbox_reg_feats) + cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats) + + bbox_results = dict( + cls_score=cls_score, + bbox_pred=bbox_pred, + bbox_feats=bbox_cls_feats) + return bbox_results diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/dynamic_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/dynamic_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8c5a265d9d9a6732068549cd9fa514a2be3d1932 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/dynamic_roi_head.py @@ -0,0 +1,148 @@ +import numpy as np +import torch + +from mmdet.core import bbox2roi +from mmdet.models.losses import SmoothL1Loss +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class DynamicRoIHead(StandardRoIHead): + """RoI head for `Dynamic R-CNN `_.""" + + def __init__(self, **kwargs): + super(DynamicRoIHead, self).__init__(**kwargs) + assert isinstance(self.bbox_head.loss_bbox, SmoothL1Loss) + # the IoU history of the past `update_iter_interval` iterations + self.iou_history = [] + # the beta history of the past `update_iter_interval` iterations + self.beta_history = [] + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """Forward function for training. + + Args: + x (list[Tensor]): list of multi-level img features. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + proposals (list[Tensors]): list of region proposals. + + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + cur_iou = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + # record the `iou_topk`-th largest IoU in an image + iou_topk = min(self.train_cfg.dynamic_rcnn.iou_topk, + len(assign_result.max_overlaps)) + ious, _ = torch.topk(assign_result.max_overlaps, iou_topk) + cur_iou.append(ious[-1].item()) + sampling_results.append(sampling_result) + # average the current IoUs over images + cur_iou = np.mean(cur_iou) + self.iou_history.append(cur_iou) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + # update IoU threshold and SmoothL1 beta + update_iter_interval = self.train_cfg.dynamic_rcnn.update_iter_interval + if len(self.iou_history) % update_iter_interval == 0: + new_iou_thr, new_beta = self.update_hyperparameters() + + return losses + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + num_imgs = len(img_metas) + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + # record the `beta_topk`-th smallest target + # `bbox_targets[2]` and `bbox_targets[3]` stand for bbox_targets + # and bbox_weights, respectively + pos_inds = bbox_targets[3][:, 0].nonzero().squeeze(1) + num_pos = len(pos_inds) + cur_target = bbox_targets[2][pos_inds, :2].abs().mean(dim=1) + beta_topk = min(self.train_cfg.dynamic_rcnn.beta_topk * num_imgs, + num_pos) + cur_target = torch.kthvalue(cur_target, beta_topk)[0].item() + self.beta_history.append(cur_target) + loss_bbox = self.bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results + + def update_hyperparameters(self): + """Update hyperparameters like IoU thresholds for assigner and beta for + SmoothL1 loss based on the training statistics. + + Returns: + tuple[float]: the updated ``iou_thr`` and ``beta``. + """ + new_iou_thr = max(self.train_cfg.dynamic_rcnn.initial_iou, + np.mean(self.iou_history)) + self.iou_history = [] + self.bbox_assigner.pos_iou_thr = new_iou_thr + self.bbox_assigner.neg_iou_thr = new_iou_thr + self.bbox_assigner.min_pos_iou = new_iou_thr + new_beta = min(self.train_cfg.dynamic_rcnn.initial_beta, + np.median(self.beta_history)) + self.beta_history = [] + self.bbox_head.loss_bbox.beta = new_beta + return new_iou_thr, new_beta diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/grid_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/grid_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4c52c79863ebaf17bd023382c7e5d4c237b4da77 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/grid_roi_head.py @@ -0,0 +1,176 @@ +import torch + +from mmdet.core import bbox2result, bbox2roi +from ..builder import HEADS, build_head, build_roi_extractor +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class GridRoIHead(StandardRoIHead): + """Grid roi head for Grid R-CNN. + + https://arxiv.org/abs/1811.12030 + """ + + def __init__(self, grid_roi_extractor, grid_head, **kwargs): + assert grid_head is not None + super(GridRoIHead, self).__init__(**kwargs) + if grid_roi_extractor is not None: + self.grid_roi_extractor = build_roi_extractor(grid_roi_extractor) + self.share_roi_extractor = False + else: + self.share_roi_extractor = True + self.grid_roi_extractor = self.bbox_roi_extractor + self.grid_head = build_head(grid_head) + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(GridRoIHead, self).init_weights(pretrained) + self.grid_head.init_weights() + if not self.share_roi_extractor: + self.grid_roi_extractor.init_weights() + + def _random_jitter(self, sampling_results, img_metas, amplitude=0.15): + """Ramdom jitter positive proposals for training.""" + for sampling_result, img_meta in zip(sampling_results, img_metas): + bboxes = sampling_result.pos_bboxes + random_offsets = bboxes.new_empty(bboxes.shape[0], 4).uniform_( + -amplitude, amplitude) + # before jittering + cxcy = (bboxes[:, 2:4] + bboxes[:, :2]) / 2 + wh = (bboxes[:, 2:4] - bboxes[:, :2]).abs() + # after jittering + new_cxcy = cxcy + wh * random_offsets[:, :2] + new_wh = wh * (1 + random_offsets[:, 2:]) + # xywh to xyxy + new_x1y1 = (new_cxcy - new_wh / 2) + new_x2y2 = (new_cxcy + new_wh / 2) + new_bboxes = torch.cat([new_x1y1, new_x2y2], dim=1) + # clip bboxes + max_shape = img_meta['img_shape'] + if max_shape is not None: + new_bboxes[:, 0::2].clamp_(min=0, max=max_shape[1] - 1) + new_bboxes[:, 1::2].clamp_(min=0, max=max_shape[0] - 1) + + sampling_result.pos_bboxes = new_bboxes + return sampling_results + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + bbox_results = self._bbox_forward(x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + + # grid head + grid_rois = rois[:100] + grid_feats = self.grid_roi_extractor( + x[:self.grid_roi_extractor.num_inputs], grid_rois) + if self.with_shared_head: + grid_feats = self.shared_head(grid_feats) + grid_pred = self.grid_head(grid_feats) + outs = outs + (grid_pred, ) + + # mask head + if self.with_mask: + mask_rois = rois[:100] + mask_results = self._mask_forward(x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + """Run forward function and calculate loss for box head in training.""" + bbox_results = super(GridRoIHead, + self)._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + + # Grid head forward and loss + sampling_results = self._random_jitter(sampling_results, img_metas) + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + + # GN in head does not support zero shape input + if pos_rois.shape[0] == 0: + return bbox_results + + grid_feats = self.grid_roi_extractor( + x[:self.grid_roi_extractor.num_inputs], pos_rois) + if self.with_shared_head: + grid_feats = self.shared_head(grid_feats) + # Accelerate training + max_sample_num_grid = self.train_cfg.get('max_num_grid', 192) + sample_idx = torch.randperm( + grid_feats.shape[0])[:min(grid_feats.shape[0], max_sample_num_grid + )] + grid_feats = grid_feats[sample_idx] + + grid_pred = self.grid_head(grid_feats) + + grid_targets = self.grid_head.get_targets(sampling_results, + self.train_cfg) + grid_targets = grid_targets[sample_idx] + + loss_grid = self.grid_head.loss(grid_pred, grid_targets) + + bbox_results['loss_bbox'].update(loss_grid) + return bbox_results + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = self.simple_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=False) + # pack rois into bboxes + grid_rois = bbox2roi([det_bbox[:, :4] for det_bbox in det_bboxes]) + if grid_rois.shape[0] != 0: + grid_feats = self.grid_roi_extractor( + x[:len(self.grid_roi_extractor.featmap_strides)], grid_rois) + self.grid_head.test_mode = True + grid_pred = self.grid_head(grid_feats) + # split batch grid head prediction back to each image + num_roi_per_img = tuple(len(det_bbox) for det_bbox in det_bboxes) + grid_pred = { + k: v.split(num_roi_per_img, 0) + for k, v in grid_pred.items() + } + + # apply bbox post-processing to each image individually + bbox_results = [] + num_imgs = len(det_bboxes) + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + bbox_results.append(grid_rois.new_tensor([])) + else: + det_bbox = self.grid_head.get_bboxes( + det_bboxes[i], grid_pred['fused'][i], [img_metas[i]]) + if rescale: + det_bbox[:, :4] /= img_metas[i]['scale_factor'] + bbox_results.append( + bbox2result(det_bbox, det_labels[i], + self.bbox_head.num_classes)) + else: + bbox_results = [ + grid_rois.new_tensor([]) for _ in range(len(det_bboxes)) + ] + + if not self.with_mask: + return bbox_results + else: + segm_results = self.simple_test_mask( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return list(zip(bbox_results, segm_results)) diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/htc_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/htc_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e2d3d7384de585a477096e45d119d2459fb7bacb --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/htc_roi_head.py @@ -0,0 +1,589 @@ +import torch +import torch.nn.functional as F + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, merge_aug_bboxes, + merge_aug_masks, multiclass_nms) +from ..builder import HEADS, build_head, build_roi_extractor +from .cascade_roi_head import CascadeRoIHead + + +@HEADS.register_module() +class HybridTaskCascadeRoIHead(CascadeRoIHead): + """Hybrid task cascade roi head including one bbox head and one mask head. + + https://arxiv.org/abs/1901.07518 + """ + + def __init__(self, + num_stages, + stage_loss_weights, + semantic_roi_extractor=None, + semantic_head=None, + semantic_fusion=('bbox', 'mask'), + interleaved=True, + mask_info_flow=True, + **kwargs): + super(HybridTaskCascadeRoIHead, + self).__init__(num_stages, stage_loss_weights, **kwargs) + assert self.with_bbox and self.with_mask + assert not self.with_shared_head # shared head is not supported + + if semantic_head is not None: + self.semantic_roi_extractor = build_roi_extractor( + semantic_roi_extractor) + self.semantic_head = build_head(semantic_head) + + self.semantic_fusion = semantic_fusion + self.interleaved = interleaved + self.mask_info_flow = mask_info_flow + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(HybridTaskCascadeRoIHead, self).init_weights(pretrained) + if self.with_semantic: + self.semantic_head.init_weights() + + @property + def with_semantic(self): + """bool: whether the head has semantic head""" + if hasattr(self, 'semantic_head') and self.semantic_head is not None: + return True + else: + return False + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + outs = () + # semantic head + if self.with_semantic: + _, semantic_feat = self.semantic_head(x) + else: + semantic_feat = None + # bbox heads + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic_feat) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask heads + if self.with_mask: + mask_rois = rois[:100] + mask_roi_extractor = self.mask_roi_extractor[-1] + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + mask_feats += mask_semantic_feat + last_feat = None + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head(mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + outs = outs + (mask_pred, ) + return outs + + def _bbox_forward_train(self, + stage, + x, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + semantic_feat=None): + """Run forward function and calculate loss for box head in training.""" + bbox_head = self.bbox_head[stage] + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward( + stage, x, rois, semantic_feat=semantic_feat) + + bbox_targets = bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, rcnn_train_cfg) + loss_bbox = bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update( + loss_bbox=loss_bbox, + rois=rois, + bbox_targets=bbox_targets, + ) + return bbox_results + + def _mask_forward_train(self, + stage, + x, + sampling_results, + gt_masks, + rcnn_train_cfg, + semantic_feat=None): + """Run forward function and calculate loss for mask head in + training.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_feats = mask_roi_extractor(x[:mask_roi_extractor.num_inputs], + pos_rois) + + # semantic feature fusion + # element-wise sum for original features and pooled semantic features + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], + pos_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + + # mask information flow + # forward all previous mask heads to obtain last_feat, and fuse it + # with the normal mask feature + if self.mask_info_flow: + last_feat = None + for i in range(stage): + last_feat = self.mask_head[i]( + mask_feats, last_feat, return_logits=False) + mask_pred = mask_head(mask_feats, last_feat, return_feat=False) + else: + mask_pred = mask_head(mask_feats, return_feat=False) + + mask_targets = mask_head.get_targets(sampling_results, gt_masks, + rcnn_train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = mask_head.loss(mask_pred, mask_targets, pos_labels) + + mask_results = dict(loss_mask=loss_mask) + return mask_results + + def _bbox_forward(self, stage, x, rois, semantic_feat=None): + """Box head forward function used in both training and testing.""" + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor( + x[:len(bbox_roi_extractor.featmap_strides)], rois) + if self.with_semantic and 'bbox' in self.semantic_fusion: + bbox_semantic_feat = self.semantic_roi_extractor([semantic_feat], + rois) + if bbox_semantic_feat.shape[-2:] != bbox_feats.shape[-2:]: + bbox_semantic_feat = F.adaptive_avg_pool2d( + bbox_semantic_feat, bbox_feats.shape[-2:]) + bbox_feats += bbox_semantic_feat + cls_score, bbox_pred = bbox_head(bbox_feats) + + bbox_results = dict(cls_score=cls_score, bbox_pred=bbox_pred) + return bbox_results + + def _mask_forward_test(self, stage, x, bboxes, semantic_feat=None): + """Mask head forward function for testing.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + mask_rois = bbox2roi([bboxes]) + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], + mask_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + if self.mask_info_flow: + last_feat = None + last_pred = None + for i in range(stage): + mask_pred, last_feat = self.mask_head[i](mask_feats, last_feat) + if last_pred is not None: + mask_pred = mask_pred + last_pred + last_pred = mask_pred + mask_pred = mask_head(mask_feats, last_feat, return_feat=False) + if last_pred is not None: + mask_pred = mask_pred + last_pred + else: + mask_pred = mask_head(mask_feats) + return mask_pred + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + gt_semantic_seg=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + proposal_list (list[Tensors]): list of region proposals. + + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None, list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None, Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + gt_semantic_seg (None, list[Tensor]): semantic segmentation masks + used if the architecture supports semantic segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # semantic segmentation part + # 2 outputs: segmentation prediction and embedded features + losses = dict() + if self.with_semantic: + semantic_pred, semantic_feat = self.semantic_head(x) + loss_seg = self.semantic_head.loss(semantic_pred, gt_semantic_seg) + losses['loss_semantic_seg'] = loss_seg + else: + semantic_feat = None + + for i in range(self.num_stages): + self.current_stage = i + rcnn_train_cfg = self.train_cfg[i] + lw = self.stage_loss_weights[i] + + # assign gts and sample proposals + sampling_results = [] + bbox_assigner = self.bbox_assigner[i] + bbox_sampler = self.bbox_sampler[i] + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + + for j in range(num_imgs): + assign_result = bbox_assigner.assign(proposal_list[j], + gt_bboxes[j], + gt_bboxes_ignore[j], + gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + # bbox head forward and loss + bbox_results = \ + self._bbox_forward_train( + i, x, sampling_results, gt_bboxes, gt_labels, + rcnn_train_cfg, semantic_feat) + roi_labels = bbox_results['bbox_targets'][0] + + for name, value in bbox_results['loss_bbox'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # mask head forward and loss + if self.with_mask: + # interleaved execution: use regressed bboxes by the box branch + # to train the mask branch + if self.interleaved: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + with torch.no_grad(): + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + # re-assign and sample 512 RoIs from 512 RoIs + sampling_results = [] + for j in range(num_imgs): + assign_result = bbox_assigner.assign( + proposal_list[j], gt_bboxes[j], + gt_bboxes_ignore[j], gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + mask_results = self._mask_forward_train( + i, x, sampling_results, gt_masks, rcnn_train_cfg, + semantic_feat) + for name, value in mask_results['loss_mask'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # refine bboxes (same as Cascade R-CNN) + if i < self.num_stages - 1 and not self.interleaved: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + with torch.no_grad(): + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + + return losses + + def simple_test(self, x, proposal_list, img_metas, rescale=False): + """Test without augmentation.""" + if self.with_semantic: + _, semantic_feat = self.semantic_head(x) + else: + semantic_feat = None + + num_imgs = len(proposal_list) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # "ms" in variable names means multi-stage + ms_bbox_result = {} + ms_segm_result = {} + ms_scores = [] + rcnn_test_cfg = self.test_cfg + + rois = bbox2roi(proposal_list) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic_feat) + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple(len(p) for p in proposal_list) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + ms_scores.append(cls_score) + + if i < self.num_stages - 1: + bbox_label = [s[:, :-1].argmax(dim=1) for s in cls_score] + rois = torch.cat([ + bbox_head.regress_by_class(rois[i], bbox_label[i], + bbox_pred[i], img_metas[i]) + for i in range(num_imgs) + ]) + + # average scores of each image by stages + cls_score = [ + sum([score[i] for score in ms_scores]) / float(len(ms_scores)) + for i in range(num_imgs) + ] + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(num_imgs): + det_bbox, det_label = self.bbox_head[-1].get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + bbox_result = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head[-1].num_classes) + for i in range(num_imgs) + ] + ms_bbox_result['ensemble'] = bbox_result + + if self.with_mask: + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + mask_classes = self.mask_head[-1].num_classes + segm_results = [[[] for _ in range(mask_classes)] + for _ in range(num_imgs)] + else: + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i] + for i in range(num_imgs) + ] + mask_rois = bbox2roi(_bboxes) + aug_masks = [] + mask_roi_extractor = self.mask_roi_extractor[-1] + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + mask_feats += mask_semantic_feat + last_feat = None + + num_bbox_per_img = tuple(len(_bbox) for _bbox in _bboxes) + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head(mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + + # split batch mask prediction back to each image + mask_pred = mask_pred.split(num_bbox_per_img, 0) + aug_masks.append( + [mask.sigmoid().cpu().numpy() for mask in mask_pred]) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] + for _ in range(self.mask_head[-1].num_classes)]) + else: + aug_mask = [mask[i] for mask in aug_masks] + merged_mask = merge_aug_masks( + aug_mask, [[img_metas[i]]] * self.num_stages, + rcnn_test_cfg) + segm_result = self.mask_head[-1].get_seg_masks( + merged_mask, _bboxes[i], det_labels[i], + rcnn_test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + ms_segm_result['ensemble'] = segm_results + + if self.with_mask: + results = list( + zip(ms_bbox_result['ensemble'], ms_segm_result['ensemble'])) + else: + results = ms_bbox_result['ensemble'] + + return results + + def aug_test(self, img_feats, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + if self.with_semantic: + semantic_feats = [ + self.semantic_head(feat)[1] for feat in img_feats + ] + else: + semantic_feats = [None] * len(img_metas) + + rcnn_test_cfg = self.test_cfg + aug_bboxes = [] + aug_scores = [] + for x, img_meta, semantic in zip(img_feats, img_metas, semantic_feats): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + # "ms" in variable names means multi-stage + ms_scores = [] + + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic) + ms_scores.append(bbox_results['cls_score']) + + if i < self.num_stages - 1: + bbox_label = bbox_results['cls_score'].argmax(dim=1) + rois = bbox_head.regress_by_class( + rois, bbox_label, bbox_results['bbox_pred'], + img_meta[0]) + + cls_score = sum(ms_scores) / float(len(ms_scores)) + bboxes, scores = self.bbox_head[-1].get_bboxes( + rois, + cls_score, + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + + bbox_result = bbox2result(det_bboxes, det_labels, + self.bbox_head[-1].num_classes) + + if self.with_mask: + if det_bboxes.shape[0] == 0: + segm_result = [[[] + for _ in range(self.mask_head[-1].num_classes - + 1)]] + else: + aug_masks = [] + aug_img_metas = [] + for x, img_meta, semantic in zip(img_feats, img_metas, + semantic_feats): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + mask_feats = self.mask_roi_extractor[-1]( + x[:len(self.mask_roi_extractor[-1].featmap_strides)], + mask_rois) + if self.with_semantic: + semantic_feat = semantic + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[ + -2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + last_feat = None + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head( + mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + aug_masks.append(mask_pred.sigmoid().cpu().numpy()) + aug_img_metas.append(img_meta) + merged_masks = merge_aug_masks(aug_masks, aug_img_metas, + self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + rcnn_test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return [(bbox_result, segm_result)] + else: + return [bbox_result] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/__init__.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..26c3e95a635b62e6fedcafd5d071355188b581a6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/__init__.py @@ -0,0 +1,12 @@ +from .coarse_mask_head import CoarseMaskHead +from .fcn_mask_head import FCNMaskHead +from .fused_semantic_head import FusedSemanticHead +from .grid_head import GridHead +from .htc_mask_head import HTCMaskHead +from .mask_point_head import MaskPointHead +from .maskiou_head import MaskIoUHead + +__all__ = [ + 'FCNMaskHead', 'HTCMaskHead', 'FusedSemanticHead', 'GridHead', + 'MaskIoUHead', 'CoarseMaskHead', 'MaskPointHead' +] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d665dfff83855e6db3866c681559ccdef09f9999 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py @@ -0,0 +1,91 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, Linear, constant_init, xavier_init +from mmcv.runner import auto_fp16 + +from mmdet.models.builder import HEADS +from .fcn_mask_head import FCNMaskHead + + +@HEADS.register_module() +class CoarseMaskHead(FCNMaskHead): + """Coarse mask head used in PointRend. + + Compared with standard ``FCNMaskHead``, ``CoarseMaskHead`` will downsample + the input feature map instead of upsample it. + + Args: + num_convs (int): Number of conv layers in the head. Default: 0. + num_fcs (int): Number of fc layers in the head. Default: 2. + fc_out_channels (int): Number of output channels of fc layer. + Default: 1024. + downsample_factor (int): The factor that feature map is downsampled by. + Default: 2. + """ + + def __init__(self, + num_convs=0, + num_fcs=2, + fc_out_channels=1024, + downsample_factor=2, + *arg, + **kwarg): + super(CoarseMaskHead, self).__init__( + *arg, num_convs=num_convs, upsample_cfg=dict(type=None), **kwarg) + self.num_fcs = num_fcs + assert self.num_fcs > 0 + self.fc_out_channels = fc_out_channels + self.downsample_factor = downsample_factor + assert self.downsample_factor >= 1 + # remove conv_logit + delattr(self, 'conv_logits') + + if downsample_factor > 1: + downsample_in_channels = ( + self.conv_out_channels + if self.num_convs > 0 else self.in_channels) + self.downsample_conv = ConvModule( + downsample_in_channels, + self.conv_out_channels, + kernel_size=downsample_factor, + stride=downsample_factor, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + else: + self.downsample_conv = None + + self.output_size = (self.roi_feat_size[0] // downsample_factor, + self.roi_feat_size[1] // downsample_factor) + self.output_area = self.output_size[0] * self.output_size[1] + + last_layer_dim = self.conv_out_channels * self.output_area + + self.fcs = nn.ModuleList() + for i in range(num_fcs): + fc_in_channels = ( + last_layer_dim if i == 0 else self.fc_out_channels) + self.fcs.append(Linear(fc_in_channels, self.fc_out_channels)) + last_layer_dim = self.fc_out_channels + output_channels = self.num_classes * self.output_area + self.fc_logits = Linear(last_layer_dim, output_channels) + + def init_weights(self): + for m in self.fcs.modules(): + if isinstance(m, nn.Linear): + xavier_init(m) + constant_init(self.fc_logits, 0.001) + + @auto_fp16() + def forward(self, x): + for conv in self.convs: + x = conv(x) + + if self.downsample_conv is not None: + x = self.downsample_conv(x) + + x = x.flatten(1) + for fc in self.fcs: + x = self.relu(fc(x)) + mask_pred = self.fc_logits(x).view( + x.size(0), self.num_classes, *self.output_size) + return mask_pred diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0cba3cda06f1ba1622b61c7d15eb823f154ede54 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py @@ -0,0 +1,328 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Conv2d, ConvModule, build_upsample_layer +from mmcv.ops.carafe import CARAFEPack +from mmcv.runner import auto_fp16, force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.core import mask_target +from mmdet.models.builder import HEADS, build_loss + +BYTES_PER_FLOAT = 4 +# TODO: This memory limit may be too much or too little. It would be better to +# determine it based on available resources. +GPU_MEM_LIMIT = 1024**3 # 1 GB memory limit + + +@HEADS.register_module() +class FCNMaskHead(nn.Module): + + def __init__(self, + num_convs=4, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + conv_out_channels=256, + num_classes=80, + class_agnostic=False, + upsample_cfg=dict(type='deconv', scale_factor=2), + conv_cfg=None, + norm_cfg=None, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)): + super(FCNMaskHead, self).__init__() + self.upsample_cfg = upsample_cfg.copy() + if self.upsample_cfg['type'] not in [ + None, 'deconv', 'nearest', 'bilinear', 'carafe' + ]: + raise ValueError( + f'Invalid upsample method {self.upsample_cfg["type"]}, ' + 'accepted methods are "deconv", "nearest", "bilinear", ' + '"carafe"') + self.num_convs = num_convs + # WARN: roi_feat_size is reserved and not used + self.roi_feat_size = _pair(roi_feat_size) + self.in_channels = in_channels + self.conv_kernel_size = conv_kernel_size + self.conv_out_channels = conv_out_channels + self.upsample_method = self.upsample_cfg.get('type') + self.scale_factor = self.upsample_cfg.pop('scale_factor', None) + self.num_classes = num_classes + self.class_agnostic = class_agnostic + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.fp16_enabled = False + self.loss_mask = build_loss(loss_mask) + + self.convs = nn.ModuleList() + for i in range(self.num_convs): + in_channels = ( + self.in_channels if i == 0 else self.conv_out_channels) + padding = (self.conv_kernel_size - 1) // 2 + self.convs.append( + ConvModule( + in_channels, + self.conv_out_channels, + self.conv_kernel_size, + padding=padding, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) + upsample_in_channels = ( + self.conv_out_channels if self.num_convs > 0 else in_channels) + upsample_cfg_ = self.upsample_cfg.copy() + if self.upsample_method is None: + self.upsample = None + elif self.upsample_method == 'deconv': + upsample_cfg_.update( + in_channels=upsample_in_channels, + out_channels=self.conv_out_channels, + kernel_size=self.scale_factor, + stride=self.scale_factor) + self.upsample = build_upsample_layer(upsample_cfg_) + elif self.upsample_method == 'carafe': + upsample_cfg_.update( + channels=upsample_in_channels, scale_factor=self.scale_factor) + self.upsample = build_upsample_layer(upsample_cfg_) + else: + # suppress warnings + align_corners = (None + if self.upsample_method == 'nearest' else False) + upsample_cfg_.update( + scale_factor=self.scale_factor, + mode=self.upsample_method, + align_corners=align_corners) + self.upsample = build_upsample_layer(upsample_cfg_) + + out_channels = 1 if self.class_agnostic else self.num_classes + logits_in_channel = ( + self.conv_out_channels + if self.upsample_method == 'deconv' else upsample_in_channels) + self.conv_logits = Conv2d(logits_in_channel, out_channels, 1) + self.relu = nn.ReLU(inplace=True) + self.debug_imgs = None + + def init_weights(self): + for m in [self.upsample, self.conv_logits]: + if m is None: + continue + elif isinstance(m, CARAFEPack): + m.init_weights() + else: + nn.init.kaiming_normal_( + m.weight, mode='fan_out', nonlinearity='relu') + nn.init.constant_(m.bias, 0) + + @auto_fp16() + def forward(self, x): + for conv in self.convs: + x = conv(x) + if self.upsample is not None: + x = self.upsample(x) + if self.upsample_method == 'deconv': + x = self.relu(x) + mask_pred = self.conv_logits(x) + return mask_pred + + def get_targets(self, sampling_results, gt_masks, rcnn_train_cfg): + pos_proposals = [res.pos_bboxes for res in sampling_results] + pos_assigned_gt_inds = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + mask_targets = mask_target(pos_proposals, pos_assigned_gt_inds, + gt_masks, rcnn_train_cfg) + return mask_targets + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, mask_targets, labels): + loss = dict() + if mask_pred.size(0) == 0: + loss_mask = mask_pred.sum() + else: + if self.class_agnostic: + loss_mask = self.loss_mask(mask_pred, mask_targets, + torch.zeros_like(labels)) + else: + loss_mask = self.loss_mask(mask_pred, mask_targets, labels) + loss['loss_mask'] = loss_mask + return loss + + def get_seg_masks(self, mask_pred, det_bboxes, det_labels, rcnn_test_cfg, + ori_shape, scale_factor, rescale): + """Get segmentation masks from mask_pred and bboxes. + + Args: + mask_pred (Tensor or ndarray): shape (n, #class, h, w). + For single-scale testing, mask_pred is the direct output of + model, whose type is Tensor, while for multi-scale testing, + it will be converted to numpy array outside of this method. + det_bboxes (Tensor): shape (n, 4/5) + det_labels (Tensor): shape (n, ) + img_shape (Tensor): shape (3, ) + rcnn_test_cfg (dict): rcnn testing config + ori_shape: original image size + + Returns: + list[list]: encoded masks + """ + if isinstance(mask_pred, torch.Tensor): + mask_pred = mask_pred.sigmoid() + else: + mask_pred = det_bboxes.new_tensor(mask_pred) + + device = mask_pred.device + cls_segms = [[] for _ in range(self.num_classes) + ] # BG is not included in num_classes + bboxes = det_bboxes[:, :4] + labels = det_labels + + if rescale: + img_h, img_w = ori_shape[:2] + else: + if isinstance(scale_factor, float): + img_h = np.round(ori_shape[0] * scale_factor).astype(np.int32) + img_w = np.round(ori_shape[1] * scale_factor).astype(np.int32) + else: + w_scale, h_scale = scale_factor[0], scale_factor[1] + img_h = np.round(ori_shape[0] * h_scale.item()).astype( + np.int32) + img_w = np.round(ori_shape[1] * w_scale.item()).astype( + np.int32) + scale_factor = 1.0 + + if not isinstance(scale_factor, (float, torch.Tensor)): + scale_factor = bboxes.new_tensor(scale_factor) + bboxes = bboxes / scale_factor + + if torch.onnx.is_in_onnx_export(): + # TODO: Remove after F.grid_sample is supported. + from torchvision.models.detection.roi_heads \ + import paste_masks_in_image + masks = paste_masks_in_image(mask_pred, bboxes, ori_shape[:2]) + thr = rcnn_test_cfg.get('mask_thr_binary', 0) + if thr > 0: + masks = masks >= thr + return masks + + N = len(mask_pred) + # The actual implementation split the input into chunks, + # and paste them chunk by chunk. + if device.type == 'cpu': + # CPU is most efficient when they are pasted one by one with + # skip_empty=True, so that it performs minimal number of + # operations. + num_chunks = N + else: + # GPU benefits from parallelism for larger chunks, + # but may have memory issue + num_chunks = int( + np.ceil(N * img_h * img_w * BYTES_PER_FLOAT / GPU_MEM_LIMIT)) + assert (num_chunks <= + N), 'Default GPU_MEM_LIMIT is too small; try increasing it' + chunks = torch.chunk(torch.arange(N, device=device), num_chunks) + + threshold = rcnn_test_cfg.mask_thr_binary + im_mask = torch.zeros( + N, + img_h, + img_w, + device=device, + dtype=torch.bool if threshold >= 0 else torch.uint8) + + if not self.class_agnostic: + mask_pred = mask_pred[range(N), labels][:, None] + + for inds in chunks: + masks_chunk, spatial_inds = _do_paste_mask( + mask_pred[inds], + bboxes[inds], + img_h, + img_w, + skip_empty=device.type == 'cpu') + + if threshold >= 0: + masks_chunk = (masks_chunk >= threshold).to(dtype=torch.bool) + else: + # for visualization and debugging + masks_chunk = (masks_chunk * 255).to(dtype=torch.uint8) + + im_mask[(inds, ) + spatial_inds] = masks_chunk + + for i in range(N): + cls_segms[labels[i]].append(im_mask[i].detach().cpu().numpy()) + return cls_segms + + +def _do_paste_mask(masks, boxes, img_h, img_w, skip_empty=True): + """Paste instance masks acoording to boxes. + + This implementation is modified from + https://github.com/facebookresearch/detectron2/ + + Args: + masks (Tensor): N, 1, H, W + boxes (Tensor): N, 4 + img_h (int): Height of the image to be pasted. + img_w (int): Width of the image to be pasted. + skip_empty (bool): Only paste masks within the region that + tightly bound all boxes, and returns the results this region only. + An important optimization for CPU. + + Returns: + tuple: (Tensor, tuple). The first item is mask tensor, the second one + is the slice object. + If skip_empty == False, the whole image will be pasted. It will + return a mask of shape (N, img_h, img_w) and an empty tuple. + If skip_empty == True, only area around the mask will be pasted. + A mask of shape (N, h', w') and its start and end coordinates + in the original image will be returned. + """ + # On GPU, paste all masks together (up to chunk size) + # by using the entire image to sample the masks + # Compared to pasting them one by one, + # this has more operations but is faster on COCO-scale dataset. + device = masks.device + if skip_empty: + x0_int, y0_int = torch.clamp( + boxes.min(dim=0).values.floor()[:2] - 1, + min=0).to(dtype=torch.int32) + x1_int = torch.clamp( + boxes[:, 2].max().ceil() + 1, max=img_w).to(dtype=torch.int32) + y1_int = torch.clamp( + boxes[:, 3].max().ceil() + 1, max=img_h).to(dtype=torch.int32) + else: + x0_int, y0_int = 0, 0 + x1_int, y1_int = img_w, img_h + x0, y0, x1, y1 = torch.split(boxes, 1, dim=1) # each is Nx1 + + N = masks.shape[0] + + img_y = torch.arange( + y0_int, y1_int, device=device, dtype=torch.float32) + 0.5 + img_x = torch.arange( + x0_int, x1_int, device=device, dtype=torch.float32) + 0.5 + img_y = (img_y - y0) / (y1 - y0) * 2 - 1 + img_x = (img_x - x0) / (x1 - x0) * 2 - 1 + # img_x, img_y have shapes (N, w), (N, h) + if torch.isinf(img_x).any(): + inds = torch.where(torch.isinf(img_x)) + img_x[inds] = 0 + if torch.isinf(img_y).any(): + inds = torch.where(torch.isinf(img_y)) + img_y[inds] = 0 + + gx = img_x[:, None, :].expand(N, img_y.size(1), img_x.size(1)) + gy = img_y[:, :, None].expand(N, img_y.size(1), img_x.size(1)) + grid = torch.stack([gx, gy], dim=3) + + if torch.onnx.is_in_onnx_export(): + raise RuntimeError( + 'Exporting F.grid_sample from Pytorch to ONNX is not supported.') + img_masks = F.grid_sample( + masks.to(dtype=torch.float32), grid, align_corners=False) + + if skip_empty: + return img_masks[:, 0], (slice(y0_int, y1_int), slice(x0_int, x1_int)) + else: + return img_masks[:, 0], () diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa6033eec17a30aeb68c0fdd218d8f0d41157e8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py @@ -0,0 +1,107 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, kaiming_init +from mmcv.runner import auto_fp16, force_fp32 + +from mmdet.models.builder import HEADS + + +@HEADS.register_module() +class FusedSemanticHead(nn.Module): + r"""Multi-level fused semantic segmentation head. + + .. code-block:: none + + in_1 -> 1x1 conv --- + | + in_2 -> 1x1 conv -- | + || + in_3 -> 1x1 conv - || + ||| /-> 1x1 conv (mask prediction) + in_4 -> 1x1 conv -----> 3x3 convs (*4) + | \-> 1x1 conv (feature) + in_5 -> 1x1 conv --- + """ # noqa: W605 + + def __init__(self, + num_ins, + fusion_level, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2, + conv_cfg=None, + norm_cfg=None): + super(FusedSemanticHead, self).__init__() + self.num_ins = num_ins + self.fusion_level = fusion_level + self.num_convs = num_convs + self.in_channels = in_channels + self.conv_out_channels = conv_out_channels + self.num_classes = num_classes + self.ignore_label = ignore_label + self.loss_weight = loss_weight + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.fp16_enabled = False + + self.lateral_convs = nn.ModuleList() + for i in range(self.num_ins): + self.lateral_convs.append( + ConvModule( + self.in_channels, + self.in_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=False)) + + self.convs = nn.ModuleList() + for i in range(self.num_convs): + in_channels = self.in_channels if i == 0 else conv_out_channels + self.convs.append( + ConvModule( + in_channels, + conv_out_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.conv_embedding = ConvModule( + conv_out_channels, + conv_out_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + self.conv_logits = nn.Conv2d(conv_out_channels, self.num_classes, 1) + + self.criterion = nn.CrossEntropyLoss(ignore_index=ignore_label) + + def init_weights(self): + kaiming_init(self.conv_logits) + + @auto_fp16() + def forward(self, feats): + x = self.lateral_convs[self.fusion_level](feats[self.fusion_level]) + fused_size = tuple(x.shape[-2:]) + for i, feat in enumerate(feats): + if i != self.fusion_level: + feat = F.interpolate( + feat, size=fused_size, mode='bilinear', align_corners=True) + x += self.lateral_convs[i](feat) + + for i in range(self.num_convs): + x = self.convs[i](x) + + mask_pred = self.conv_logits(x) + x = self.conv_embedding(x) + return mask_pred, x + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, labels): + labels = labels.squeeze(1).long() + loss_semantic_seg = self.criterion(mask_pred, labels) + loss_semantic_seg *= self.loss_weight + return loss_semantic_seg diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/grid_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/grid_head.py new file mode 100644 index 0000000000000000000000000000000000000000..83058cbdda934ebfc3a76088e1820848ac01b78b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/grid_head.py @@ -0,0 +1,359 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, kaiming_init, normal_init + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class GridHead(nn.Module): + + def __init__(self, + grid_points=9, + num_convs=8, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + point_feat_channels=64, + deconv_kernel_size=4, + class_agnostic=False, + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, + loss_weight=15), + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=36)): + super(GridHead, self).__init__() + self.grid_points = grid_points + self.num_convs = num_convs + self.roi_feat_size = roi_feat_size + self.in_channels = in_channels + self.conv_kernel_size = conv_kernel_size + self.point_feat_channels = point_feat_channels + self.conv_out_channels = self.point_feat_channels * self.grid_points + self.class_agnostic = class_agnostic + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + if isinstance(norm_cfg, dict) and norm_cfg['type'] == 'GN': + assert self.conv_out_channels % norm_cfg['num_groups'] == 0 + + assert self.grid_points >= 4 + self.grid_size = int(np.sqrt(self.grid_points)) + if self.grid_size * self.grid_size != self.grid_points: + raise ValueError('grid_points must be a square number') + + # the predicted heatmap is half of whole_map_size + if not isinstance(self.roi_feat_size, int): + raise ValueError('Only square RoIs are supporeted in Grid R-CNN') + self.whole_map_size = self.roi_feat_size * 4 + + # compute point-wise sub-regions + self.sub_regions = self.calc_sub_regions() + + self.convs = [] + for i in range(self.num_convs): + in_channels = ( + self.in_channels if i == 0 else self.conv_out_channels) + stride = 2 if i == 0 else 1 + padding = (self.conv_kernel_size - 1) // 2 + self.convs.append( + ConvModule( + in_channels, + self.conv_out_channels, + self.conv_kernel_size, + stride=stride, + padding=padding, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=True)) + self.convs = nn.Sequential(*self.convs) + + self.deconv1 = nn.ConvTranspose2d( + self.conv_out_channels, + self.conv_out_channels, + kernel_size=deconv_kernel_size, + stride=2, + padding=(deconv_kernel_size - 2) // 2, + groups=grid_points) + self.norm1 = nn.GroupNorm(grid_points, self.conv_out_channels) + self.deconv2 = nn.ConvTranspose2d( + self.conv_out_channels, + grid_points, + kernel_size=deconv_kernel_size, + stride=2, + padding=(deconv_kernel_size - 2) // 2, + groups=grid_points) + + # find the 4-neighbor of each grid point + self.neighbor_points = [] + grid_size = self.grid_size + for i in range(grid_size): # i-th column + for j in range(grid_size): # j-th row + neighbors = [] + if i > 0: # left: (i - 1, j) + neighbors.append((i - 1) * grid_size + j) + if j > 0: # up: (i, j - 1) + neighbors.append(i * grid_size + j - 1) + if j < grid_size - 1: # down: (i, j + 1) + neighbors.append(i * grid_size + j + 1) + if i < grid_size - 1: # right: (i + 1, j) + neighbors.append((i + 1) * grid_size + j) + self.neighbor_points.append(tuple(neighbors)) + # total edges in the grid + self.num_edges = sum([len(p) for p in self.neighbor_points]) + + self.forder_trans = nn.ModuleList() # first-order feature transition + self.sorder_trans = nn.ModuleList() # second-order feature transition + for neighbors in self.neighbor_points: + fo_trans = nn.ModuleList() + so_trans = nn.ModuleList() + for _ in range(len(neighbors)): + # each transition module consists of a 5x5 depth-wise conv and + # 1x1 conv. + fo_trans.append( + nn.Sequential( + nn.Conv2d( + self.point_feat_channels, + self.point_feat_channels, + 5, + stride=1, + padding=2, + groups=self.point_feat_channels), + nn.Conv2d(self.point_feat_channels, + self.point_feat_channels, 1))) + so_trans.append( + nn.Sequential( + nn.Conv2d( + self.point_feat_channels, + self.point_feat_channels, + 5, + 1, + 2, + groups=self.point_feat_channels), + nn.Conv2d(self.point_feat_channels, + self.point_feat_channels, 1))) + self.forder_trans.append(fo_trans) + self.sorder_trans.append(so_trans) + + self.loss_grid = build_loss(loss_grid) + + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear): + # TODO: compare mode = "fan_in" or "fan_out" + kaiming_init(m) + for m in self.modules(): + if isinstance(m, nn.ConvTranspose2d): + normal_init(m, std=0.001) + nn.init.constant_(self.deconv2.bias, -np.log(0.99 / 0.01)) + + def forward(self, x): + assert x.shape[-1] == x.shape[-2] == self.roi_feat_size + # RoI feature transformation, downsample 2x + x = self.convs(x) + + c = self.point_feat_channels + # first-order fusion + x_fo = [None for _ in range(self.grid_points)] + for i, points in enumerate(self.neighbor_points): + x_fo[i] = x[:, i * c:(i + 1) * c] + for j, point_idx in enumerate(points): + x_fo[i] = x_fo[i] + self.forder_trans[i][j]( + x[:, point_idx * c:(point_idx + 1) * c]) + + # second-order fusion + x_so = [None for _ in range(self.grid_points)] + for i, points in enumerate(self.neighbor_points): + x_so[i] = x[:, i * c:(i + 1) * c] + for j, point_idx in enumerate(points): + x_so[i] = x_so[i] + self.sorder_trans[i][j](x_fo[point_idx]) + + # predicted heatmap with fused features + x2 = torch.cat(x_so, dim=1) + x2 = self.deconv1(x2) + x2 = F.relu(self.norm1(x2), inplace=True) + heatmap = self.deconv2(x2) + + # predicted heatmap with original features (applicable during training) + if self.training: + x1 = x + x1 = self.deconv1(x1) + x1 = F.relu(self.norm1(x1), inplace=True) + heatmap_unfused = self.deconv2(x1) + else: + heatmap_unfused = heatmap + + return dict(fused=heatmap, unfused=heatmap_unfused) + + def calc_sub_regions(self): + """Compute point specific representation regions. + + See Grid R-CNN Plus (https://arxiv.org/abs/1906.05688) for details. + """ + # to make it consistent with the original implementation, half_size + # is computed as 2 * quarter_size, which is smaller + half_size = self.whole_map_size // 4 * 2 + sub_regions = [] + for i in range(self.grid_points): + x_idx = i // self.grid_size + y_idx = i % self.grid_size + if x_idx == 0: + sub_x1 = 0 + elif x_idx == self.grid_size - 1: + sub_x1 = half_size + else: + ratio = x_idx / (self.grid_size - 1) - 0.25 + sub_x1 = max(int(ratio * self.whole_map_size), 0) + + if y_idx == 0: + sub_y1 = 0 + elif y_idx == self.grid_size - 1: + sub_y1 = half_size + else: + ratio = y_idx / (self.grid_size - 1) - 0.25 + sub_y1 = max(int(ratio * self.whole_map_size), 0) + sub_regions.append( + (sub_x1, sub_y1, sub_x1 + half_size, sub_y1 + half_size)) + return sub_regions + + def get_targets(self, sampling_results, rcnn_train_cfg): + # mix all samples (across images) together. + pos_bboxes = torch.cat([res.pos_bboxes for res in sampling_results], + dim=0).cpu() + pos_gt_bboxes = torch.cat( + [res.pos_gt_bboxes for res in sampling_results], dim=0).cpu() + assert pos_bboxes.shape == pos_gt_bboxes.shape + + # expand pos_bboxes to 2x of original size + x1 = pos_bboxes[:, 0] - (pos_bboxes[:, 2] - pos_bboxes[:, 0]) / 2 + y1 = pos_bboxes[:, 1] - (pos_bboxes[:, 3] - pos_bboxes[:, 1]) / 2 + x2 = pos_bboxes[:, 2] + (pos_bboxes[:, 2] - pos_bboxes[:, 0]) / 2 + y2 = pos_bboxes[:, 3] + (pos_bboxes[:, 3] - pos_bboxes[:, 1]) / 2 + pos_bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + pos_bbox_ws = (pos_bboxes[:, 2] - pos_bboxes[:, 0]).unsqueeze(-1) + pos_bbox_hs = (pos_bboxes[:, 3] - pos_bboxes[:, 1]).unsqueeze(-1) + + num_rois = pos_bboxes.shape[0] + map_size = self.whole_map_size + # this is not the final target shape + targets = torch.zeros((num_rois, self.grid_points, map_size, map_size), + dtype=torch.float) + + # pre-compute interpolation factors for all grid points. + # the first item is the factor of x-dim, and the second is y-dim. + # for a 9-point grid, factors are like (1, 0), (0.5, 0.5), (0, 1) + factors = [] + for j in range(self.grid_points): + x_idx = j // self.grid_size + y_idx = j % self.grid_size + factors.append((1 - x_idx / (self.grid_size - 1), + 1 - y_idx / (self.grid_size - 1))) + + radius = rcnn_train_cfg.pos_radius + radius2 = radius**2 + for i in range(num_rois): + # ignore small bboxes + if (pos_bbox_ws[i] <= self.grid_size + or pos_bbox_hs[i] <= self.grid_size): + continue + # for each grid point, mark a small circle as positive + for j in range(self.grid_points): + factor_x, factor_y = factors[j] + gridpoint_x = factor_x * pos_gt_bboxes[i, 0] + ( + 1 - factor_x) * pos_gt_bboxes[i, 2] + gridpoint_y = factor_y * pos_gt_bboxes[i, 1] + ( + 1 - factor_y) * pos_gt_bboxes[i, 3] + + cx = int((gridpoint_x - pos_bboxes[i, 0]) / pos_bbox_ws[i] * + map_size) + cy = int((gridpoint_y - pos_bboxes[i, 1]) / pos_bbox_hs[i] * + map_size) + + for x in range(cx - radius, cx + radius + 1): + for y in range(cy - radius, cy + radius + 1): + if x >= 0 and x < map_size and y >= 0 and y < map_size: + if (x - cx)**2 + (y - cy)**2 <= radius2: + targets[i, j, y, x] = 1 + # reduce the target heatmap size by a half + # proposed in Grid R-CNN Plus (https://arxiv.org/abs/1906.05688). + sub_targets = [] + for i in range(self.grid_points): + sub_x1, sub_y1, sub_x2, sub_y2 = self.sub_regions[i] + sub_targets.append(targets[:, [i], sub_y1:sub_y2, sub_x1:sub_x2]) + sub_targets = torch.cat(sub_targets, dim=1) + sub_targets = sub_targets.to(sampling_results[0].pos_bboxes.device) + return sub_targets + + def loss(self, grid_pred, grid_targets): + loss_fused = self.loss_grid(grid_pred['fused'], grid_targets) + loss_unfused = self.loss_grid(grid_pred['unfused'], grid_targets) + loss_grid = loss_fused + loss_unfused + return dict(loss_grid=loss_grid) + + def get_bboxes(self, det_bboxes, grid_pred, img_metas): + # TODO: refactoring + assert det_bboxes.shape[0] == grid_pred.shape[0] + det_bboxes = det_bboxes.cpu() + cls_scores = det_bboxes[:, [4]] + det_bboxes = det_bboxes[:, :4] + grid_pred = grid_pred.sigmoid().cpu() + + R, c, h, w = grid_pred.shape + half_size = self.whole_map_size // 4 * 2 + assert h == w == half_size + assert c == self.grid_points + + # find the point with max scores in the half-sized heatmap + grid_pred = grid_pred.view(R * c, h * w) + pred_scores, pred_position = grid_pred.max(dim=1) + xs = pred_position % w + ys = pred_position // w + + # get the position in the whole heatmap instead of half-sized heatmap + for i in range(self.grid_points): + xs[i::self.grid_points] += self.sub_regions[i][0] + ys[i::self.grid_points] += self.sub_regions[i][1] + + # reshape to (num_rois, grid_points) + pred_scores, xs, ys = tuple( + map(lambda x: x.view(R, c), [pred_scores, xs, ys])) + + # get expanded pos_bboxes + widths = (det_bboxes[:, 2] - det_bboxes[:, 0]).unsqueeze(-1) + heights = (det_bboxes[:, 3] - det_bboxes[:, 1]).unsqueeze(-1) + x1 = (det_bboxes[:, 0, None] - widths / 2) + y1 = (det_bboxes[:, 1, None] - heights / 2) + # map the grid point to the absolute coordinates + abs_xs = (xs.float() + 0.5) / w * widths + x1 + abs_ys = (ys.float() + 0.5) / h * heights + y1 + + # get the grid points indices that fall on the bbox boundaries + x1_inds = [i for i in range(self.grid_size)] + y1_inds = [i * self.grid_size for i in range(self.grid_size)] + x2_inds = [ + self.grid_points - self.grid_size + i + for i in range(self.grid_size) + ] + y2_inds = [(i + 1) * self.grid_size - 1 for i in range(self.grid_size)] + + # voting of all grid points on some boundary + bboxes_x1 = (abs_xs[:, x1_inds] * pred_scores[:, x1_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, x1_inds].sum(dim=1, keepdim=True)) + bboxes_y1 = (abs_ys[:, y1_inds] * pred_scores[:, y1_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, y1_inds].sum(dim=1, keepdim=True)) + bboxes_x2 = (abs_xs[:, x2_inds] * pred_scores[:, x2_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, x2_inds].sum(dim=1, keepdim=True)) + bboxes_y2 = (abs_ys[:, y2_inds] * pred_scores[:, y2_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, y2_inds].sum(dim=1, keepdim=True)) + + bbox_res = torch.cat( + [bboxes_x1, bboxes_y1, bboxes_x2, bboxes_y2, cls_scores], dim=1) + bbox_res[:, [0, 2]].clamp_(min=0, max=img_metas[0]['img_shape'][1]) + bbox_res[:, [1, 3]].clamp_(min=0, max=img_metas[0]['img_shape'][0]) + + return bbox_res diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/htc_mask_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/htc_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..330b778ebad8d48d55d09ddd42baa70ec10ae463 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/htc_mask_head.py @@ -0,0 +1,43 @@ +from mmcv.cnn import ConvModule + +from mmdet.models.builder import HEADS +from .fcn_mask_head import FCNMaskHead + + +@HEADS.register_module() +class HTCMaskHead(FCNMaskHead): + + def __init__(self, with_conv_res=True, *args, **kwargs): + super(HTCMaskHead, self).__init__(*args, **kwargs) + self.with_conv_res = with_conv_res + if self.with_conv_res: + self.conv_res = ConvModule( + self.conv_out_channels, + self.conv_out_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + + def init_weights(self): + super(HTCMaskHead, self).init_weights() + if self.with_conv_res: + self.conv_res.init_weights() + + def forward(self, x, res_feat=None, return_logits=True, return_feat=True): + if res_feat is not None: + assert self.with_conv_res + res_feat = self.conv_res(res_feat) + x = x + res_feat + for conv in self.convs: + x = conv(x) + res_feat = x + outs = [] + if return_logits: + x = self.upsample(x) + if self.upsample_method == 'deconv': + x = self.relu(x) + mask_pred = self.conv_logits(x) + outs.append(mask_pred) + if return_feat: + outs.append(res_feat) + return outs if len(outs) > 1 else outs[0] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/mask_point_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/mask_point_head.py new file mode 100644 index 0000000000000000000000000000000000000000..f38a5c9d7595d441776d6b38070ed75e42911fce --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/mask_point_head.py @@ -0,0 +1,300 @@ +# Modified from https://github.com/facebookresearch/detectron2/tree/master/projects/PointRend/point_head/point_head.py # noqa + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, normal_init +from mmcv.ops import point_sample, rel_roi_point_to_rel_img_point + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class MaskPointHead(nn.Module): + """A mask point head use in PointRend. + + ``MaskPointHead`` use shared multi-layer perceptron (equivalent to + nn.Conv1d) to predict the logit of input points. The fine-grained feature + and coarse feature will be concatenate together for predication. + + Args: + num_fcs (int): Number of fc layers in the head. Default: 3. + in_channels (int): Number of input channels. Default: 256. + fc_channels (int): Number of fc channels. Default: 256. + num_classes (int): Number of classes for logits. Default: 80. + class_agnostic (bool): Whether use class agnostic classification. + If so, the output channels of logits will be 1. Default: False. + coarse_pred_each_layer (bool): Whether concatenate coarse feature with + the output of each fc layer. Default: True. + conv_cfg (dict | None): Dictionary to construct and config conv layer. + Default: dict(type='Conv1d')) + norm_cfg (dict | None): Dictionary to construct and config norm layer. + Default: None. + loss_point (dict): Dictionary to construct and config loss layer of + point head. Default: dict(type='CrossEntropyLoss', use_mask=True, + loss_weight=1.0). + """ + + def __init__(self, + num_classes, + num_fcs=3, + in_channels=256, + fc_channels=256, + class_agnostic=False, + coarse_pred_each_layer=True, + conv_cfg=dict(type='Conv1d'), + norm_cfg=None, + act_cfg=dict(type='ReLU'), + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)): + super().__init__() + self.num_fcs = num_fcs + self.in_channels = in_channels + self.fc_channles = fc_channels + self.num_classes = num_classes + self.class_agnostic = class_agnostic + self.coarse_pred_each_layer = coarse_pred_each_layer + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.loss_point = build_loss(loss_point) + + fc_in_channels = in_channels + num_classes + self.fcs = nn.ModuleList() + for _ in range(num_fcs): + fc = ConvModule( + fc_in_channels, + fc_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.fcs.append(fc) + fc_in_channels = fc_channels + fc_in_channels += num_classes if self.coarse_pred_each_layer else 0 + + out_channels = 1 if self.class_agnostic else self.num_classes + self.fc_logits = nn.Conv1d( + fc_in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def init_weights(self): + """Initialize last classification layer of MaskPointHead, conv layers + are already initialized by ConvModule.""" + normal_init(self.fc_logits, std=0.001) + + def forward(self, fine_grained_feats, coarse_feats): + """Classify each point base on fine grained and coarse feats. + + Args: + fine_grained_feats (Tensor): Fine grained feature sampled from FPN, + shape (num_rois, in_channels, num_points). + coarse_feats (Tensor): Coarse feature sampled from CoarseMaskHead, + shape (num_rois, num_classes, num_points). + + Returns: + Tensor: Point classification results, + shape (num_rois, num_class, num_points). + """ + + x = torch.cat([fine_grained_feats, coarse_feats], dim=1) + for fc in self.fcs: + x = fc(x) + if self.coarse_pred_each_layer: + x = torch.cat((x, coarse_feats), dim=1) + return self.fc_logits(x) + + def get_targets(self, rois, rel_roi_points, sampling_results, gt_masks, + cfg): + """Get training targets of MaskPointHead for all images. + + Args: + rois (Tensor): Region of Interest, shape (num_rois, 5). + rel_roi_points: Points coordinates relative to RoI, shape + (num_rois, num_points, 2). + sampling_results (:obj:`SamplingResult`): Sampling result after + sampling and assignment. + gt_masks (Tensor) : Ground truth segmentation masks of + corresponding boxes, shape (num_rois, height, width). + cfg (dict): Training cfg. + + Returns: + Tensor: Point target, shape (num_rois, num_points). + """ + + num_imgs = len(sampling_results) + rois_list = [] + rel_roi_points_list = [] + for batch_ind in range(num_imgs): + inds = (rois[:, 0] == batch_ind) + rois_list.append(rois[inds]) + rel_roi_points_list.append(rel_roi_points[inds]) + pos_assigned_gt_inds_list = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + cfg_list = [cfg for _ in range(num_imgs)] + + point_targets = map(self._get_target_single, rois_list, + rel_roi_points_list, pos_assigned_gt_inds_list, + gt_masks, cfg_list) + point_targets = list(point_targets) + + if len(point_targets) > 0: + point_targets = torch.cat(point_targets) + + return point_targets + + def _get_target_single(self, rois, rel_roi_points, pos_assigned_gt_inds, + gt_masks, cfg): + """Get training target of MaskPointHead for each image.""" + num_pos = rois.size(0) + num_points = cfg.num_points + if num_pos > 0: + gt_masks_th = ( + gt_masks.to_tensor(rois.dtype, rois.device).index_select( + 0, pos_assigned_gt_inds)) + gt_masks_th = gt_masks_th.unsqueeze(1) + rel_img_points = rel_roi_point_to_rel_img_point( + rois, rel_roi_points, gt_masks_th.shape[2:]) + point_targets = point_sample(gt_masks_th, + rel_img_points).squeeze(1) + else: + point_targets = rois.new_zeros((0, num_points)) + return point_targets + + def loss(self, point_pred, point_targets, labels): + """Calculate loss for MaskPointHead. + + Args: + point_pred (Tensor): Point predication result, shape + (num_rois, num_classes, num_points). + point_targets (Tensor): Point targets, shape (num_roi, num_points). + labels (Tensor): Class label of corresponding boxes, + shape (num_rois, ) + + Returns: + dict[str, Tensor]: a dictionary of point loss components + """ + + loss = dict() + if self.class_agnostic: + loss_point = self.loss_point(point_pred, point_targets, + torch.zeros_like(labels)) + else: + loss_point = self.loss_point(point_pred, point_targets, labels) + loss['loss_point'] = loss_point + return loss + + def _get_uncertainty(self, mask_pred, labels): + """Estimate uncertainty based on pred logits. + + We estimate uncertainty as L1 distance between 0.0 and the logits + prediction in 'mask_pred' for the foreground class in `classes`. + + Args: + mask_pred (Tensor): mask predication logits, shape (num_rois, + num_classes, mask_height, mask_width). + + labels (list[Tensor]): Either predicted or ground truth label for + each predicted mask, of length num_rois. + + Returns: + scores (Tensor): Uncertainty scores with the most uncertain + locations having the highest uncertainty score, + shape (num_rois, 1, mask_height, mask_width) + """ + if mask_pred.shape[1] == 1: + gt_class_logits = mask_pred.clone() + else: + inds = torch.arange(mask_pred.shape[0], device=mask_pred.device) + gt_class_logits = mask_pred[inds, labels].unsqueeze(1) + return -torch.abs(gt_class_logits) + + def get_roi_rel_points_train(self, mask_pred, labels, cfg): + """Get ``num_points`` most uncertain points with random points during + train. + + Sample points in [0, 1] x [0, 1] coordinate space based on their + uncertainty. The uncertainties are calculated for each point using + '_get_uncertainty()' function that takes point's logit prediction as + input. + + Args: + mask_pred (Tensor): A tensor of shape (num_rois, num_classes, + mask_height, mask_width) for class-specific or class-agnostic + prediction. + labels (list): The ground truth class for each instance. + cfg (dict): Training config of point head. + + Returns: + point_coords (Tensor): A tensor of shape (num_rois, num_points, 2) + that contains the coordinates sampled points. + """ + num_points = cfg.num_points + oversample_ratio = cfg.oversample_ratio + importance_sample_ratio = cfg.importance_sample_ratio + assert oversample_ratio >= 1 + assert 0 <= importance_sample_ratio <= 1 + batch_size = mask_pred.shape[0] + num_sampled = int(num_points * oversample_ratio) + point_coords = torch.rand( + batch_size, num_sampled, 2, device=mask_pred.device) + point_logits = point_sample(mask_pred, point_coords) + # It is crucial to calculate uncertainty based on the sampled + # prediction value for the points. Calculating uncertainties of the + # coarse predictions first and sampling them for points leads to + # incorrect results. To illustrate this: assume uncertainty func( + # logits)=-abs(logits), a sampled point between two coarse + # predictions with -1 and 1 logits has 0 logits, and therefore 0 + # uncertainty value. However, if we calculate uncertainties for the + # coarse predictions first, both will have -1 uncertainty, + # and sampled point will get -1 uncertainty. + point_uncertainties = self._get_uncertainty(point_logits, labels) + num_uncertain_points = int(importance_sample_ratio * num_points) + num_random_points = num_points - num_uncertain_points + idx = torch.topk( + point_uncertainties[:, 0, :], k=num_uncertain_points, dim=1)[1] + shift = num_sampled * torch.arange( + batch_size, dtype=torch.long, device=mask_pred.device) + idx += shift[:, None] + point_coords = point_coords.view(-1, 2)[idx.view(-1), :].view( + batch_size, num_uncertain_points, 2) + if num_random_points > 0: + rand_roi_coords = torch.rand( + batch_size, num_random_points, 2, device=mask_pred.device) + point_coords = torch.cat((point_coords, rand_roi_coords), dim=1) + return point_coords + + def get_roi_rel_points_test(self, mask_pred, pred_label, cfg): + """Get ``num_points`` most uncertain points during test. + + Args: + mask_pred (Tensor): A tensor of shape (num_rois, num_classes, + mask_height, mask_width) for class-specific or class-agnostic + prediction. + pred_label (list): The predication class for each instance. + cfg (dict): Testing config of point head. + + Returns: + point_indices (Tensor): A tensor of shape (num_rois, num_points) + that contains indices from [0, mask_height x mask_width) of the + most uncertain points. + point_coords (Tensor): A tensor of shape (num_rois, num_points, 2) + that contains [0, 1] x [0, 1] normalized coordinates of the + most uncertain points from the [mask_height, mask_width] grid . + """ + num_points = cfg.subdivision_num_points + uncertainty_map = self._get_uncertainty(mask_pred, pred_label) + num_rois, _, mask_height, mask_width = uncertainty_map.shape + h_step = 1.0 / mask_height + w_step = 1.0 / mask_width + + uncertainty_map = uncertainty_map.view(num_rois, + mask_height * mask_width) + num_points = min(mask_height * mask_width, num_points) + point_indices = uncertainty_map.topk(num_points, dim=1)[1] + point_coords = uncertainty_map.new_zeros(num_rois, num_points, 2) + point_coords[:, :, 0] = w_step / 2.0 + (point_indices % + mask_width).float() * w_step + point_coords[:, :, 1] = h_step / 2.0 + (point_indices // + mask_width).float() * h_step + return point_indices, point_coords diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/maskiou_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/maskiou_head.py new file mode 100644 index 0000000000000000000000000000000000000000..39bcd6a7dbdb089cd19cef811038e0b6a80ab89a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_heads/maskiou_head.py @@ -0,0 +1,186 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import Conv2d, Linear, MaxPool2d, kaiming_init, normal_init +from mmcv.runner import force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class MaskIoUHead(nn.Module): + """Mask IoU Head. + + This head predicts the IoU of predicted masks and corresponding gt masks. + """ + + def __init__(self, + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_iou=dict(type='MSELoss', loss_weight=0.5)): + super(MaskIoUHead, self).__init__() + self.in_channels = in_channels + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.num_classes = num_classes + self.fp16_enabled = False + + self.convs = nn.ModuleList() + for i in range(num_convs): + if i == 0: + # concatenation of mask feature and mask prediction + in_channels = self.in_channels + 1 + else: + in_channels = self.conv_out_channels + stride = 2 if i == num_convs - 1 else 1 + self.convs.append( + Conv2d( + in_channels, + self.conv_out_channels, + 3, + stride=stride, + padding=1)) + + roi_feat_size = _pair(roi_feat_size) + pooled_area = (roi_feat_size[0] // 2) * (roi_feat_size[1] // 2) + self.fcs = nn.ModuleList() + for i in range(num_fcs): + in_channels = ( + self.conv_out_channels * + pooled_area if i == 0 else self.fc_out_channels) + self.fcs.append(Linear(in_channels, self.fc_out_channels)) + + self.fc_mask_iou = Linear(self.fc_out_channels, self.num_classes) + self.relu = nn.ReLU() + self.max_pool = MaxPool2d(2, 2) + self.loss_iou = build_loss(loss_iou) + + def init_weights(self): + for conv in self.convs: + kaiming_init(conv) + for fc in self.fcs: + kaiming_init( + fc, + a=1, + mode='fan_in', + nonlinearity='leaky_relu', + distribution='uniform') + normal_init(self.fc_mask_iou, std=0.01) + + def forward(self, mask_feat, mask_pred): + mask_pred = mask_pred.sigmoid() + mask_pred_pooled = self.max_pool(mask_pred.unsqueeze(1)) + + x = torch.cat((mask_feat, mask_pred_pooled), 1) + + for conv in self.convs: + x = self.relu(conv(x)) + x = x.flatten(1) + for fc in self.fcs: + x = self.relu(fc(x)) + mask_iou = self.fc_mask_iou(x) + return mask_iou + + @force_fp32(apply_to=('mask_iou_pred', )) + def loss(self, mask_iou_pred, mask_iou_targets): + pos_inds = mask_iou_targets > 0 + if pos_inds.sum() > 0: + loss_mask_iou = self.loss_iou(mask_iou_pred[pos_inds], + mask_iou_targets[pos_inds]) + else: + loss_mask_iou = mask_iou_pred.sum() * 0 + return dict(loss_mask_iou=loss_mask_iou) + + @force_fp32(apply_to=('mask_pred', )) + def get_targets(self, sampling_results, gt_masks, mask_pred, mask_targets, + rcnn_train_cfg): + """Compute target of mask IoU. + + Mask IoU target is the IoU of the predicted mask (inside a bbox) and + the gt mask of corresponding gt mask (the whole instance). + The intersection area is computed inside the bbox, and the gt mask area + is computed with two steps, firstly we compute the gt area inside the + bbox, then divide it by the area ratio of gt area inside the bbox and + the gt area of the whole instance. + + Args: + sampling_results (list[:obj:`SamplingResult`]): sampling results. + gt_masks (BitmapMask | PolygonMask): Gt masks (the whole instance) + of each image, with the same shape of the input image. + mask_pred (Tensor): Predicted masks of each positive proposal, + shape (num_pos, h, w). + mask_targets (Tensor): Gt mask of each positive proposal, + binary map of the shape (num_pos, h, w). + rcnn_train_cfg (dict): Training config for R-CNN part. + + Returns: + Tensor: mask iou target (length == num positive). + """ + pos_proposals = [res.pos_bboxes for res in sampling_results] + pos_assigned_gt_inds = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + + # compute the area ratio of gt areas inside the proposals and + # the whole instance + area_ratios = map(self._get_area_ratio, pos_proposals, + pos_assigned_gt_inds, gt_masks) + area_ratios = torch.cat(list(area_ratios)) + assert mask_targets.size(0) == area_ratios.size(0) + + mask_pred = (mask_pred > rcnn_train_cfg.mask_thr_binary).float() + mask_pred_areas = mask_pred.sum((-1, -2)) + + # mask_pred and mask_targets are binary maps + overlap_areas = (mask_pred * mask_targets).sum((-1, -2)) + + # compute the mask area of the whole instance + gt_full_areas = mask_targets.sum((-1, -2)) / (area_ratios + 1e-7) + + mask_iou_targets = overlap_areas / ( + mask_pred_areas + gt_full_areas - overlap_areas) + return mask_iou_targets + + def _get_area_ratio(self, pos_proposals, pos_assigned_gt_inds, gt_masks): + """Compute area ratio of the gt mask inside the proposal and the gt + mask of the corresponding instance.""" + num_pos = pos_proposals.size(0) + if num_pos > 0: + area_ratios = [] + proposals_np = pos_proposals.cpu().numpy() + pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy() + # compute mask areas of gt instances (batch processing for speedup) + gt_instance_mask_area = gt_masks.areas + for i in range(num_pos): + gt_mask = gt_masks[pos_assigned_gt_inds[i]] + + # crop the gt mask inside the proposal + bbox = proposals_np[i, :].astype(np.int32) + gt_mask_in_proposal = gt_mask.crop(bbox) + + ratio = gt_mask_in_proposal.areas[0] / ( + gt_instance_mask_area[pos_assigned_gt_inds[i]] + 1e-7) + area_ratios.append(ratio) + area_ratios = torch.from_numpy(np.stack(area_ratios)).float().to( + pos_proposals.device) + else: + area_ratios = pos_proposals.new_zeros((0, )) + return area_ratios + + @force_fp32(apply_to=('mask_iou_pred', )) + def get_mask_scores(self, mask_iou_pred, det_bboxes, det_labels): + """Get the mask scores. + + mask_score = bbox_score * mask_iou + """ + inds = range(det_labels.size(0)) + mask_scores = mask_iou_pred[inds, det_labels] * det_bboxes[inds, -1] + mask_scores = mask_scores.cpu().numpy() + det_labels = det_labels.cpu().numpy() + return [mask_scores[det_labels == i] for i in range(self.num_classes)] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/mask_scoring_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_scoring_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c6e55c7752209cb5c15eab689ad9e8ac1fef1b66 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/mask_scoring_roi_head.py @@ -0,0 +1,122 @@ +import torch + +from mmdet.core import bbox2roi +from ..builder import HEADS, build_head +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class MaskScoringRoIHead(StandardRoIHead): + """Mask Scoring RoIHead for Mask Scoring RCNN. + + https://arxiv.org/abs/1903.00241 + """ + + def __init__(self, mask_iou_head, **kwargs): + assert mask_iou_head is not None + super(MaskScoringRoIHead, self).__init__(**kwargs) + self.mask_iou_head = build_head(mask_iou_head) + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + super(MaskScoringRoIHead, self).init_weights(pretrained) + self.mask_iou_head.init_weights() + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for Mask head in + training.""" + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + mask_results = super(MaskScoringRoIHead, + self)._mask_forward_train(x, sampling_results, + bbox_feats, gt_masks, + img_metas) + if mask_results['loss_mask'] is None: + return mask_results + + # mask iou head forward and loss + pos_mask_pred = mask_results['mask_pred'][ + range(mask_results['mask_pred'].size(0)), pos_labels] + mask_iou_pred = self.mask_iou_head(mask_results['mask_feats'], + pos_mask_pred) + pos_mask_iou_pred = mask_iou_pred[range(mask_iou_pred.size(0)), + pos_labels] + + mask_iou_targets = self.mask_iou_head.get_targets( + sampling_results, gt_masks, pos_mask_pred, + mask_results['mask_targets'], self.train_cfg) + loss_mask_iou = self.mask_iou_head.loss(pos_mask_iou_pred, + mask_iou_targets) + mask_results['loss_mask'].update(loss_mask_iou) + return mask_results + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Obtain mask prediction without augmentation.""" + # image shapes of images in the batch + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + num_classes = self.mask_head.num_classes + segm_results = [[[] for _ in range(num_classes)] + for _ in range(num_imgs)] + mask_scores = [[[] for _ in range(num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i] + for i in range(num_imgs) + ] + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + concat_det_labels = torch.cat(det_labels) + # get mask scores with mask iou head + mask_feats = mask_results['mask_feats'] + mask_pred = mask_results['mask_pred'] + mask_iou_pred = self.mask_iou_head( + mask_feats, mask_pred[range(concat_det_labels.size(0)), + concat_det_labels]) + # split batch mask prediction back to each image + num_bboxes_per_img = tuple(len(_bbox) for _bbox in _bboxes) + mask_preds = mask_pred.split(num_bboxes_per_img, 0) + mask_iou_preds = mask_iou_pred.split(num_bboxes_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + mask_scores = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + mask_scores.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], _bboxes[i], det_labels[i], + self.test_cfg, ori_shapes[i], scale_factors[i], + rescale) + # get mask scores with mask iou head + mask_score = self.mask_iou_head.get_mask_scores( + mask_iou_preds[i], det_bboxes[i], det_labels[i]) + segm_results.append(segm_result) + mask_scores.append(mask_score) + return list(zip(segm_results, mask_scores)) diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/pisa_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/pisa_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e01113629837eb9c065ba40cd4025899b7bd0172 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/pisa_roi_head.py @@ -0,0 +1,159 @@ +from mmdet.core import bbox2roi +from ..builder import HEADS +from ..losses.pisa_loss import carl_loss, isr_p +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class PISARoIHead(StandardRoIHead): + r"""The RoI head for `Prime Sample Attention in Object Detection + `_.""" + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """Forward function for training. + + Args: + x (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): List of region proposals. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + gt_bboxes_ignore (list[Tensor], optional): Specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : True segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + neg_label_weights = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + # neg label weight is obtained by sampling when using ISR-N + neg_label_weight = None + if isinstance(sampling_result, tuple): + sampling_result, neg_label_weight = sampling_result + sampling_results.append(sampling_result) + neg_label_weights.append(neg_label_weight) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train( + x, + sampling_results, + gt_bboxes, + gt_labels, + img_metas, + neg_label_weights=neg_label_weights) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + return losses + + def _bbox_forward(self, x, rois): + """Box forward function used in both training and testing.""" + # TODO: a more flexible way to decide which feature maps to use + bbox_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + if self.with_shared_head: + bbox_feats = self.shared_head(bbox_feats) + cls_score, bbox_pred = self.bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, + x, + sampling_results, + gt_bboxes, + gt_labels, + img_metas, + neg_label_weights=None): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + + # neg_label_weights obtained by sampler is image-wise, mapping back to + # the corresponding location in label weights + if neg_label_weights[0] is not None: + label_weights = bbox_targets[1] + cur_num_rois = 0 + for i in range(len(sampling_results)): + num_pos = sampling_results[i].pos_inds.size(0) + num_neg = sampling_results[i].neg_inds.size(0) + label_weights[cur_num_rois + num_pos:cur_num_rois + num_pos + + num_neg] = neg_label_weights[i] + cur_num_rois += num_pos + num_neg + + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + + # Apply ISR-P + isr_cfg = self.train_cfg.get('isr', None) + if isr_cfg is not None: + bbox_targets = isr_p( + cls_score, + bbox_pred, + bbox_targets, + rois, + sampling_results, + self.bbox_head.loss_cls, + self.bbox_head.bbox_coder, + **isr_cfg, + num_class=self.bbox_head.num_classes) + loss_bbox = self.bbox_head.loss(cls_score, bbox_pred, rois, + *bbox_targets) + + # Add CARL Loss + carl_cfg = self.train_cfg.get('carl', None) + if carl_cfg is not None: + loss_carl = carl_loss( + cls_score, + bbox_targets[0], + bbox_pred, + bbox_targets[2], + self.bbox_head.loss_bbox, + **carl_cfg, + num_class=self.bbox_head.num_classes) + loss_bbox.update(loss_carl) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/point_rend_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/point_rend_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..478cdf5bff6779e9291f94c543205289036ea2c6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/point_rend_roi_head.py @@ -0,0 +1,218 @@ +# Modified from https://github.com/facebookresearch/detectron2/tree/master/projects/PointRend # noqa + +import torch +import torch.nn.functional as F +from mmcv.ops import point_sample, rel_roi_point_to_rel_img_point + +from mmdet.core import bbox2roi, bbox_mapping, merge_aug_masks +from .. import builder +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class PointRendRoIHead(StandardRoIHead): + """`PointRend `_.""" + + def __init__(self, point_head, *args, **kwargs): + super().__init__(*args, **kwargs) + assert self.with_bbox and self.with_mask + self.init_point_head(point_head) + + def init_point_head(self, point_head): + """Initialize ``point_head``""" + self.point_head = builder.build_head(point_head) + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + """ + super().init_weights(pretrained) + self.point_head.init_weights() + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for mask head and point head + in training.""" + mask_results = super()._mask_forward_train(x, sampling_results, + bbox_feats, gt_masks, + img_metas) + if mask_results['loss_mask'] is not None: + loss_point = self._mask_point_forward_train( + x, sampling_results, mask_results['mask_pred'], gt_masks, + img_metas) + mask_results['loss_mask'].update(loss_point) + + return mask_results + + def _mask_point_forward_train(self, x, sampling_results, mask_pred, + gt_masks, img_metas): + """Run forward function and calculate loss for point head in + training.""" + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + rel_roi_points = self.point_head.get_roi_rel_points_train( + mask_pred, pos_labels, cfg=self.train_cfg) + rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + + fine_grained_point_feats = self._get_fine_grained_point_feats( + x, rois, rel_roi_points, img_metas) + coarse_point_feats = point_sample(mask_pred, rel_roi_points) + mask_point_pred = self.point_head(fine_grained_point_feats, + coarse_point_feats) + mask_point_target = self.point_head.get_targets( + rois, rel_roi_points, sampling_results, gt_masks, self.train_cfg) + loss_mask_point = self.point_head.loss(mask_point_pred, + mask_point_target, pos_labels) + + return loss_mask_point + + def _get_fine_grained_point_feats(self, x, rois, rel_roi_points, + img_metas): + """Sample fine grained feats from each level feature map and + concatenate them together.""" + num_imgs = len(img_metas) + fine_grained_feats = [] + for idx in range(self.mask_roi_extractor.num_inputs): + feats = x[idx] + spatial_scale = 1. / float( + self.mask_roi_extractor.featmap_strides[idx]) + point_feats = [] + for batch_ind in range(num_imgs): + # unravel batch dim + feat = feats[batch_ind].unsqueeze(0) + inds = (rois[:, 0].long() == batch_ind) + if inds.any(): + rel_img_points = rel_roi_point_to_rel_img_point( + rois[inds], rel_roi_points[inds], feat.shape[2:], + spatial_scale).unsqueeze(0) + point_feat = point_sample(feat, rel_img_points) + point_feat = point_feat.squeeze(0).transpose(0, 1) + point_feats.append(point_feat) + fine_grained_feats.append(torch.cat(point_feats, dim=0)) + return torch.cat(fine_grained_feats, dim=1) + + def _mask_point_forward_test(self, x, rois, label_pred, mask_pred, + img_metas): + """Mask refining process with point head in testing.""" + refined_mask_pred = mask_pred.clone() + for subdivision_step in range(self.test_cfg.subdivision_steps): + refined_mask_pred = F.interpolate( + refined_mask_pred, + scale_factor=self.test_cfg.scale_factor, + mode='bilinear', + align_corners=False) + # If `subdivision_num_points` is larger or equal to the + # resolution of the next step, then we can skip this step + num_rois, channels, mask_height, mask_width = \ + refined_mask_pred.shape + if (self.test_cfg.subdivision_num_points >= + self.test_cfg.scale_factor**2 * mask_height * mask_width + and + subdivision_step < self.test_cfg.subdivision_steps - 1): + continue + point_indices, rel_roi_points = \ + self.point_head.get_roi_rel_points_test( + refined_mask_pred, label_pred, cfg=self.test_cfg) + fine_grained_point_feats = self._get_fine_grained_point_feats( + x, rois, rel_roi_points, img_metas) + coarse_point_feats = point_sample(mask_pred, rel_roi_points) + mask_point_pred = self.point_head(fine_grained_point_feats, + coarse_point_feats) + + point_indices = point_indices.unsqueeze(1).expand(-1, channels, -1) + refined_mask_pred = refined_mask_pred.reshape( + num_rois, channels, mask_height * mask_width) + refined_mask_pred = refined_mask_pred.scatter_( + 2, point_indices, mask_point_pred) + refined_mask_pred = refined_mask_pred.view(num_rois, channels, + mask_height, mask_width) + + return refined_mask_pred + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Obtain mask prediction without augmentation.""" + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.mask_head.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + # split batch mask prediction back to each image + mask_pred = mask_results['mask_pred'] + num_mask_roi_per_img = [len(det_bbox) for det_bbox in det_bboxes] + mask_preds = mask_pred.split(num_mask_roi_per_img, 0) + mask_rois = mask_rois.split(num_mask_roi_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + x_i = [xx[[i]] for xx in x] + mask_rois_i = mask_rois[i] + mask_rois_i[:, 0] = 0 # TODO: remove this hack + mask_pred_i = self._mask_point_forward_test( + x_i, mask_rois_i, det_labels[i], mask_preds[i], + [img_metas]) + segm_result = self.mask_head.get_seg_masks( + mask_pred_i, _bboxes[i], det_labels[i], self.test_cfg, + ori_shapes[i], scale_factors[i], rescale) + segm_results.append(segm_result) + return segm_results + + def aug_test_mask(self, feats, img_metas, det_bboxes, det_labels): + """Test for mask head with test time augmentation.""" + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + aug_masks = [] + for x, img_meta in zip(feats, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip) + mask_rois = bbox2roi([_bboxes]) + mask_results = self._mask_forward(x, mask_rois) + mask_results['mask_pred'] = self._mask_point_forward_test( + x, mask_rois, det_labels, mask_results['mask_pred'], + img_metas) + # convert to numpy array to save memory + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + merged_masks = merge_aug_masks(aug_masks, img_metas, self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head.get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + self.test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return segm_result diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/__init__.py b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a6ec0ecc3063cd23c2463f2f53f1c2a83b04d43b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/__init__.py @@ -0,0 +1,7 @@ +from .generic_roi_extractor import GenericRoIExtractor +from .single_level_roi_extractor import SingleRoIExtractor + +__all__ = [ + 'SingleRoIExtractor', + 'GenericRoIExtractor', +] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..0e42b52f3615722ba9dd575c8f6293dd64004be8 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py @@ -0,0 +1,83 @@ +from abc import ABCMeta, abstractmethod + +import torch +import torch.nn as nn +from mmcv import ops + + +class BaseRoIExtractor(nn.Module, metaclass=ABCMeta): + """Base class for RoI extractor. + + Args: + roi_layer (dict): Specify RoI layer type and arguments. + out_channels (int): Output channels of RoI layers. + featmap_strides (int): Strides of input feature maps. + """ + + def __init__(self, roi_layer, out_channels, featmap_strides): + super(BaseRoIExtractor, self).__init__() + self.roi_layers = self.build_roi_layers(roi_layer, featmap_strides) + self.out_channels = out_channels + self.featmap_strides = featmap_strides + self.fp16_enabled = False + + @property + def num_inputs(self): + """int: Number of input feature maps.""" + return len(self.featmap_strides) + + def init_weights(self): + pass + + def build_roi_layers(self, layer_cfg, featmap_strides): + """Build RoI operator to extract feature from each level feature map. + + Args: + layer_cfg (dict): Dictionary to construct and config RoI layer + operation. Options are modules under ``mmcv/ops`` such as + ``RoIAlign``. + featmap_strides (int): The stride of input feature map w.r.t to the + original image size, which would be used to scale RoI + coordinate (original image coordinate system) to feature + coordinate system. + + Returns: + nn.ModuleList: The RoI extractor modules for each level feature + map. + """ + + cfg = layer_cfg.copy() + layer_type = cfg.pop('type') + assert hasattr(ops, layer_type) + layer_cls = getattr(ops, layer_type) + roi_layers = nn.ModuleList( + [layer_cls(spatial_scale=1 / s, **cfg) for s in featmap_strides]) + return roi_layers + + def roi_rescale(self, rois, scale_factor): + """Scale RoI coordinates by scale factor. + + Args: + rois (torch.Tensor): RoI (Region of Interest), shape (n, 5) + scale_factor (float): Scale factor that RoI will be multiplied by. + + Returns: + torch.Tensor: Scaled RoI. + """ + + cx = (rois[:, 1] + rois[:, 3]) * 0.5 + cy = (rois[:, 2] + rois[:, 4]) * 0.5 + w = rois[:, 3] - rois[:, 1] + h = rois[:, 4] - rois[:, 2] + new_w = w * scale_factor + new_h = h * scale_factor + x1 = cx - new_w * 0.5 + x2 = cx + new_w * 0.5 + y1 = cy - new_h * 0.5 + y2 = cy + new_h * 0.5 + new_rois = torch.stack((rois[:, 0], x1, y1, x2, y2), dim=-1) + return new_rois + + @abstractmethod + def forward(self, feats, rois, roi_scale_factor=None): + pass diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..80c25bb8fde7844c994bfc1f4ae1a2d960cbf3d6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py @@ -0,0 +1,83 @@ +from mmcv.cnn.bricks import build_plugin_layer +from mmcv.runner import force_fp32 + +from mmdet.models.builder import ROI_EXTRACTORS +from .base_roi_extractor import BaseRoIExtractor + + +@ROI_EXTRACTORS.register_module() +class GenericRoIExtractor(BaseRoIExtractor): + """Extract RoI features from all level feature maps levels. + + This is the implementation of `A novel Region of Interest Extraction Layer + for Instance Segmentation `_. + + Args: + aggregation (str): The method to aggregate multiple feature maps. + Options are 'sum', 'concat'. Default: 'sum'. + pre_cfg (dict | None): Specify pre-processing modules. Default: None. + post_cfg (dict | None): Specify post-processing modules. Default: None. + kwargs (keyword arguments): Arguments that are the same + as :class:`BaseRoIExtractor`. + """ + + def __init__(self, + aggregation='sum', + pre_cfg=None, + post_cfg=None, + **kwargs): + super(GenericRoIExtractor, self).__init__(**kwargs) + + assert aggregation in ['sum', 'concat'] + + self.aggregation = aggregation + self.with_post = post_cfg is not None + self.with_pre = pre_cfg is not None + # build pre/post processing modules + if self.with_post: + self.post_module = build_plugin_layer(post_cfg, '_post_module')[1] + if self.with_pre: + self.pre_module = build_plugin_layer(pre_cfg, '_pre_module')[1] + + @force_fp32(apply_to=('feats', ), out_fp16=True) + def forward(self, feats, rois, roi_scale_factor=None): + """Forward function.""" + if len(feats) == 1: + return self.roi_layers[0](feats[0], rois) + + out_size = self.roi_layers[0].output_size + num_levels = len(feats) + roi_feats = feats[0].new_zeros( + rois.size(0), self.out_channels, *out_size) + + # some times rois is an empty tensor + if roi_feats.shape[0] == 0: + return roi_feats + + if roi_scale_factor is not None: + rois = self.roi_rescale(rois, roi_scale_factor) + + # mark the starting channels for concat mode + start_channels = 0 + for i in range(num_levels): + roi_feats_t = self.roi_layers[i](feats[i], rois) + end_channels = start_channels + roi_feats_t.size(1) + if self.with_pre: + # apply pre-processing to a RoI extracted from each layer + roi_feats_t = self.pre_module(roi_feats_t) + if self.aggregation == 'sum': + # and sum them all + roi_feats += roi_feats_t + else: + # and concat them along channel dimension + roi_feats[:, start_channels:end_channels] = roi_feats_t + # update channels starting position + start_channels = end_channels + # check if concat channels match at the end + if self.aggregation == 'concat': + assert start_channels == self.out_channels + + if self.with_post: + # apply post-processing before return the result + roi_feats = self.post_module(roi_feats) + return roi_feats diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..c0eebc4af57bd283d4faac88a7f2af053dff1201 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py @@ -0,0 +1,99 @@ +import torch +from mmcv.runner import force_fp32 + +from mmdet.models.builder import ROI_EXTRACTORS +from .base_roi_extractor import BaseRoIExtractor + + +@ROI_EXTRACTORS.register_module() +class SingleRoIExtractor(BaseRoIExtractor): + """Extract RoI features from a single level feature map. + + If there are multiple input feature levels, each RoI is mapped to a level + according to its scale. The mapping rule is proposed in + `FPN `_. + + Args: + roi_layer (dict): Specify RoI layer type and arguments. + out_channels (int): Output channels of RoI layers. + featmap_strides (int): Strides of input feature maps. + finest_scale (int): Scale threshold of mapping to level 0. Default: 56. + """ + + def __init__(self, + roi_layer, + out_channels, + featmap_strides, + finest_scale=56): + super(SingleRoIExtractor, self).__init__(roi_layer, out_channels, + featmap_strides) + self.finest_scale = finest_scale + + def map_roi_levels(self, rois, num_levels): + """Map rois to corresponding feature levels by scales. + + - scale < finest_scale * 2: level 0 + - finest_scale * 2 <= scale < finest_scale * 4: level 1 + - finest_scale * 4 <= scale < finest_scale * 8: level 2 + - scale >= finest_scale * 8: level 3 + + Args: + rois (Tensor): Input RoIs, shape (k, 5). + num_levels (int): Total level number. + + Returns: + Tensor: Level index (0-based) of each RoI, shape (k, ) + """ + scale = torch.sqrt( + (rois[:, 3] - rois[:, 1]) * (rois[:, 4] - rois[:, 2])) + target_lvls = torch.floor(torch.log2(scale / self.finest_scale + 1e-6)) + target_lvls = target_lvls.clamp(min=0, max=num_levels - 1).long() + return target_lvls + + @force_fp32(apply_to=('feats', ), out_fp16=True) + def forward(self, feats, rois, roi_scale_factor=None): + """Forward function.""" + out_size = self.roi_layers[0].output_size + num_levels = len(feats) + if torch.onnx.is_in_onnx_export(): + # Work around to export mask-rcnn to onnx + roi_feats = rois[:, :1].clone().detach() + roi_feats = roi_feats.expand( + -1, self.out_channels * out_size[0] * out_size[1]) + roi_feats = roi_feats.reshape(-1, self.out_channels, *out_size) + roi_feats = roi_feats * 0 + else: + roi_feats = feats[0].new_zeros( + rois.size(0), self.out_channels, *out_size) + # TODO: remove this when parrots supports + if torch.__version__ == 'parrots': + roi_feats.requires_grad = True + + if num_levels == 1: + if len(rois) == 0: + return roi_feats + return self.roi_layers[0](feats[0], rois) + + target_lvls = self.map_roi_levels(rois, num_levels) + if roi_scale_factor is not None: + rois = self.roi_rescale(rois, roi_scale_factor) + + for i in range(num_levels): + mask = target_lvls == i + inds = mask.nonzero(as_tuple=False).squeeze(1) + # TODO: make it nicer when exporting to onnx + if torch.onnx.is_in_onnx_export(): + # To keep all roi_align nodes exported to onnx + rois_ = rois[inds] + roi_feats_t = self.roi_layers[i](feats[i], rois_) + roi_feats[inds] = roi_feats_t + continue + if inds.numel() > 0: + rois_ = rois[inds] + roi_feats_t = self.roi_layers[i](feats[i], rois_) + roi_feats[inds] = roi_feats_t + else: + roi_feats += sum( + x.view(-1)[0] + for x in self.parameters()) * 0. + feats[i].sum() * 0. + return roi_feats diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/__init__.py b/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bbe70145b8bf7c304370f725f5afa8db98666679 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/__init__.py @@ -0,0 +1,3 @@ +from .res_layer import ResLayer + +__all__ = ['ResLayer'] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/res_layer.py b/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/res_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..b5c343258b079a0dd832d4f999c18d002b06efac --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/shared_heads/res_layer.py @@ -0,0 +1,77 @@ +import torch.nn as nn +from mmcv.cnn import constant_init, kaiming_init +from mmcv.runner import auto_fp16, load_checkpoint + +from mmdet.models.backbones import ResNet +from mmdet.models.builder import SHARED_HEADS +from mmdet.models.utils import ResLayer as _ResLayer +from mmdet.utils import get_root_logger + + +@SHARED_HEADS.register_module() +class ResLayer(nn.Module): + + def __init__(self, + depth, + stage=3, + stride=2, + dilation=1, + style='pytorch', + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + with_cp=False, + dcn=None): + super(ResLayer, self).__init__() + self.norm_eval = norm_eval + self.norm_cfg = norm_cfg + self.stage = stage + self.fp16_enabled = False + block, stage_blocks = ResNet.arch_settings[depth] + stage_block = stage_blocks[stage] + planes = 64 * 2**stage + inplanes = 64 * 2**(stage - 1) * block.expansion + + res_layer = _ResLayer( + block, + inplanes, + planes, + stage_block, + stride=stride, + dilation=dilation, + style=style, + with_cp=with_cp, + norm_cfg=self.norm_cfg, + dcn=dcn) + self.add_module(f'layer{stage + 1}', res_layer) + + def init_weights(self, pretrained=None): + """Initialize the weights in the module. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=False, logger=logger) + elif pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, nn.BatchNorm2d): + constant_init(m, 1) + else: + raise TypeError('pretrained must be a str or None') + + @auto_fp16() + def forward(self, x): + res_layer = getattr(self, f'layer{self.stage + 1}') + out = res_layer(x) + return out + + def train(self, mode=True): + super(ResLayer, self).train(mode) + if self.norm_eval: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/standard_roi_head.py b/thirdparty/mmdetection/mmdet/models/roi_heads/standard_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c530f2a5ce904439492de12ff7d267cc1e757d3a --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/standard_roi_head.py @@ -0,0 +1,295 @@ +import torch + +from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class StandardRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Simplest base roi head including one bbox head and one mask head.""" + + def init_assigner_sampler(self): + """Initialize assigner and sampler.""" + self.bbox_assigner = None + self.bbox_sampler = None + if self.train_cfg: + self.bbox_assigner = build_assigner(self.train_cfg.assigner) + self.bbox_sampler = build_sampler( + self.train_cfg.sampler, context=self) + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + """Initialize ``bbox_head``""" + self.bbox_roi_extractor = build_roi_extractor(bbox_roi_extractor) + self.bbox_head = build_head(bbox_head) + + def init_mask_head(self, mask_roi_extractor, mask_head): + """Initialize ``mask_head``""" + if mask_roi_extractor is not None: + self.mask_roi_extractor = build_roi_extractor(mask_roi_extractor) + self.share_roi_extractor = False + else: + self.share_roi_extractor = True + self.mask_roi_extractor = self.bbox_roi_extractor + self.mask_head = build_head(mask_head) + + def init_weights(self, pretrained): + """Initialize the weights in head. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + if self.with_shared_head: + self.shared_head.init_weights(pretrained=pretrained) + if self.with_bbox: + self.bbox_roi_extractor.init_weights() + self.bbox_head.init_weights() + if self.with_mask: + self.mask_head.init_weights() + if not self.share_roi_extractor: + self.mask_roi_extractor.init_weights() + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + bbox_results = self._bbox_forward(x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask head + if self.with_mask: + mask_rois = rois[:100] + mask_results = self._mask_forward(x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): list of region proposals. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + return losses + + def _bbox_forward(self, x, rois): + """Box head forward function used in both training and testing.""" + # TODO: a more flexible way to decide which feature maps to use + bbox_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + if self.with_shared_head: + bbox_feats = self.shared_head(bbox_feats) + cls_score, bbox_pred = self.bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + loss_bbox = self.bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for mask head in + training.""" + if not self.share_roi_extractor: + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_results = self._mask_forward(x, pos_rois) + else: + pos_inds = [] + device = bbox_feats.device + for res in sampling_results: + pos_inds.append( + torch.ones( + res.pos_bboxes.shape[0], + device=device, + dtype=torch.uint8)) + pos_inds.append( + torch.zeros( + res.neg_bboxes.shape[0], + device=device, + dtype=torch.uint8)) + pos_inds = torch.cat(pos_inds) + + mask_results = self._mask_forward( + x, pos_inds=pos_inds, bbox_feats=bbox_feats) + + mask_targets = self.mask_head.get_targets(sampling_results, gt_masks, + self.train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.mask_head.loss(mask_results['mask_pred'], + mask_targets, pos_labels) + + mask_results.update(loss_mask=loss_mask, mask_targets=mask_targets) + return mask_results + + def _mask_forward(self, x, rois=None, pos_inds=None, bbox_feats=None): + """Mask head forward function used in both training and testing.""" + assert ((rois is not None) ^ + (pos_inds is not None and bbox_feats is not None)) + if rois is not None: + mask_feats = self.mask_roi_extractor( + x[:self.mask_roi_extractor.num_inputs], rois) + if self.with_shared_head: + mask_feats = self.shared_head(mask_feats) + else: + assert bbox_feats is not None + mask_feats = bbox_feats[pos_inds] + + mask_pred = self.mask_head(mask_feats) + mask_results = dict(mask_pred=mask_pred, mask_feats=mask_feats) + return mask_results + + async def async_simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Async test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = await self.async_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=rescale) + bbox_results = bbox2result(det_bboxes, det_labels, + self.bbox_head.num_classes) + if not self.with_mask: + return bbox_results + else: + segm_results = await self.async_test_mask( + x, + img_metas, + det_bboxes, + det_labels, + rescale=rescale, + mask_test_cfg=self.test_cfg.get('mask')) + return bbox_results, segm_results + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = self.simple_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=rescale) + if torch.onnx.is_in_onnx_export(): + if self.with_mask: + segm_results = self.simple_test_mask( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return det_bboxes, det_labels, segm_results + else: + return det_bboxes, det_labels + + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head.num_classes) + for i in range(len(det_bboxes)) + ] + + if not self.with_mask: + return bbox_results + else: + segm_results = self.simple_test_mask( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return list(zip(bbox_results, segm_results)) + + def aug_test(self, x, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + det_bboxes, det_labels = self.aug_test_bboxes(x, img_metas, + proposal_list, + self.test_cfg) + + if rescale: + _det_bboxes = det_bboxes + else: + _det_bboxes = det_bboxes.clone() + _det_bboxes[:, :4] *= det_bboxes.new_tensor( + img_metas[0][0]['scale_factor']) + bbox_results = bbox2result(_det_bboxes, det_labels, + self.bbox_head.num_classes) + + # det_bboxes always keep the original scale + if self.with_mask: + segm_results = self.aug_test_mask(x, img_metas, det_bboxes, + det_labels) + return [(bbox_results, segm_results)] + else: + return [bbox_results] diff --git a/thirdparty/mmdetection/mmdet/models/roi_heads/test_mixins.py b/thirdparty/mmdetection/mmdet/models/roi_heads/test_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..12684c52c2bac0493f1b31e5ea91bd66c004c76b --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/roi_heads/test_mixins.py @@ -0,0 +1,268 @@ +import logging +import sys + +import torch + +from mmdet.core import (bbox2roi, bbox_mapping, merge_aug_bboxes, + merge_aug_masks, multiclass_nms) + +logger = logging.getLogger(__name__) + +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import completed + + +class BBoxTestMixin(object): + + if sys.version_info >= (3, 7): + + async def async_test_bboxes(self, + x, + img_metas, + proposals, + rcnn_test_cfg, + rescale=False, + bbox_semaphore=None, + global_lock=None): + """Asynchronized test for box head without augmentation.""" + rois = bbox2roi(proposals) + roi_feats = self.bbox_roi_extractor( + x[:len(self.bbox_roi_extractor.featmap_strides)], rois) + if self.with_shared_head: + roi_feats = self.shared_head(roi_feats) + sleep_interval = rcnn_test_cfg.get('async_sleep_interval', 0.017) + + async with completed( + __name__, 'bbox_head_forward', + sleep_interval=sleep_interval): + cls_score, bbox_pred = self.bbox_head(roi_feats) + + img_shape = img_metas[0]['img_shape'] + scale_factor = img_metas[0]['scale_factor'] + det_bboxes, det_labels = self.bbox_head.get_bboxes( + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=rescale, + cfg=rcnn_test_cfg) + return det_bboxes, det_labels + + def simple_test_bboxes(self, + x, + img_metas, + proposals, + rcnn_test_cfg, + rescale=False): + """Test only det bboxes without augmentation.""" + rois = bbox2roi(proposals) + bbox_results = self._bbox_forward(x, rois) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple(len(p) for p in proposals) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + + # some detector with_reg is False, bbox_pred will be None + if bbox_pred is not None: + # the bbox prediction of some detectors like SABL is not Tensor + if isinstance(bbox_pred, torch.Tensor): + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + else: + bbox_pred = self.bbox_head.bbox_pred_split( + bbox_pred, num_proposals_per_img) + else: + bbox_pred = (None, ) * len(proposals) + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(len(proposals)): + det_bbox, det_label = self.bbox_head.get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + return det_bboxes, det_labels + + def aug_test_bboxes(self, feats, img_metas, proposal_list, rcnn_test_cfg): + """Test det bboxes with test time augmentation.""" + aug_bboxes = [] + aug_scores = [] + for x, img_meta in zip(feats, img_metas): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + # TODO more flexible + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + rois = bbox2roi([proposals]) + bbox_results = self._bbox_forward(x, rois) + bboxes, scores = self.bbox_head.get_bboxes( + rois, + bbox_results['cls_score'], + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + return det_bboxes, det_labels + + +class MaskTestMixin(object): + + if sys.version_info >= (3, 7): + + async def async_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False, + mask_test_cfg=None): + """Asynchronized test for mask head without augmentation.""" + # image shape of the first image in the batch (only one) + ori_shape = img_metas[0]['ori_shape'] + scale_factor = img_metas[0]['scale_factor'] + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + _bboxes = ( + det_bboxes[:, :4] * + scale_factor if rescale else det_bboxes) + mask_rois = bbox2roi([_bboxes]) + mask_feats = self.mask_roi_extractor( + x[:len(self.mask_roi_extractor.featmap_strides)], + mask_rois) + + if self.with_shared_head: + mask_feats = self.shared_head(mask_feats) + if mask_test_cfg and mask_test_cfg.get('async_sleep_interval'): + sleep_interval = mask_test_cfg['async_sleep_interval'] + else: + sleep_interval = 0.035 + async with completed( + __name__, + 'mask_head_forward', + sleep_interval=sleep_interval): + mask_pred = self.mask_head(mask_feats) + segm_result = self.mask_head.get_seg_masks( + mask_pred, _bboxes, det_labels, self.test_cfg, ori_shape, + scale_factor, rescale) + return segm_result + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Simple test for mask head without augmentation.""" + # image shapes of images in the batch + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.mask_head.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + if torch.onnx.is_in_onnx_export(): + # avoid mask_pred.split with static number of prediction + mask_preds = [] + _bboxes = [] + for i, boxes in enumerate(det_bboxes): + boxes = boxes[:, :4] + if rescale: + boxes *= scale_factors[i] + _bboxes.append(boxes) + img_inds = boxes[:, :1].clone() * 0 + i + mask_rois = torch.cat([img_inds, boxes], dim=-1) + mask_result = self._mask_forward(x, mask_rois) + mask_preds.append(mask_result['mask_pred']) + else: + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + mask_pred = mask_results['mask_pred'] + # split batch mask prediction back to each image + num_mask_roi_per_img = [ + det_bbox.shape[0] for det_bbox in det_bboxes + ] + mask_preds = mask_pred.split(num_mask_roi_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], _bboxes[i], det_labels[i], + self.test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + return segm_results + + def aug_test_mask(self, feats, img_metas, det_bboxes, det_labels): + """Test for mask head with test time augmentation.""" + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + aug_masks = [] + for x, img_meta in zip(feats, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + mask_results = self._mask_forward(x, mask_rois) + # convert to numpy array to save memory + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + merged_masks = merge_aug_masks(aug_masks, img_metas, self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head.get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + self.test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return segm_result diff --git a/thirdparty/mmdetection/mmdet/models/utils/__init__.py b/thirdparty/mmdetection/mmdet/models/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1fc09b5e8fe34c7493203e3e9b7d9a433ed21d7c --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/__init__.py @@ -0,0 +1,16 @@ +from .builder import build_positional_encoding, build_transformer +from .gaussian_target import gaussian_radius, gen_gaussian_target +from .positional_encoding import (LearnedPositionalEncoding, + SinePositionalEncoding) +from .res_layer import ResLayer +from .transformer import (FFN, MultiheadAttention, Transformer, + TransformerDecoder, TransformerDecoderLayer, + TransformerEncoder, TransformerEncoderLayer) + +__all__ = [ + 'ResLayer', 'gaussian_radius', 'gen_gaussian_target', 'MultiheadAttention', + 'FFN', 'TransformerEncoderLayer', 'TransformerEncoder', + 'TransformerDecoderLayer', 'TransformerDecoder', 'Transformer', + 'build_transformer', 'build_positional_encoding', 'SinePositionalEncoding', + 'LearnedPositionalEncoding' +] diff --git a/thirdparty/mmdetection/mmdet/models/utils/builder.py b/thirdparty/mmdetection/mmdet/models/utils/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..f362d1c92ca9d4ed95a2b3d28d3e6baedd14e462 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/builder.py @@ -0,0 +1,14 @@ +from mmcv.utils import Registry, build_from_cfg + +TRANSFORMER = Registry('Transformer') +POSITIONAL_ENCODING = Registry('Position encoding') + + +def build_transformer(cfg, default_args=None): + """Builder for Transformer.""" + return build_from_cfg(cfg, TRANSFORMER, default_args) + + +def build_positional_encoding(cfg, default_args=None): + """Builder for Position Encoding.""" + return build_from_cfg(cfg, POSITIONAL_ENCODING, default_args) diff --git a/thirdparty/mmdetection/mmdet/models/utils/gaussian_target.py b/thirdparty/mmdetection/mmdet/models/utils/gaussian_target.py new file mode 100644 index 0000000000000000000000000000000000000000..7bb7160cb4bf2f47876f6e8373142aa5846920a9 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/gaussian_target.py @@ -0,0 +1,185 @@ +from math import sqrt + +import torch + + +def gaussian2D(radius, sigma=1, dtype=torch.float32, device='cpu'): + """Generate 2D gaussian kernel. + + Args: + radius (int): Radius of gaussian kernel. + sigma (int): Sigma of gaussian function. Default: 1. + dtype (torch.dtype): Dtype of gaussian tensor. Default: torch.float32. + device (str): Device of gaussian tensor. Default: 'cpu'. + + Returns: + h (Tensor): Gaussian kernel with a + ``(2 * radius + 1) * (2 * radius + 1)`` shape. + """ + x = torch.arange( + -radius, radius + 1, dtype=dtype, device=device).view(1, -1) + y = torch.arange( + -radius, radius + 1, dtype=dtype, device=device).view(-1, 1) + + h = (-(x * x + y * y) / (2 * sigma * sigma)).exp() + + h[h < torch.finfo(h.dtype).eps * h.max()] = 0 + return h + + +def gen_gaussian_target(heatmap, center, radius, k=1): + """Generate 2D gaussian heatmap. + + Args: + heatmap (Tensor): Input heatmap, the gaussian kernel will cover on + it and maintain the max value. + center (list[int]): Coord of gaussian kernel's center. + radius (int): Radius of gaussian kernel. + k (int): Coefficient of gaussian kernel. Default: 1. + + Returns: + out_heatmap (Tensor): Updated heatmap covered by gaussian kernel. + """ + diameter = 2 * radius + 1 + gaussian_kernel = gaussian2D( + radius, sigma=diameter / 6, dtype=heatmap.dtype, device=heatmap.device) + + x, y = center + + height, width = heatmap.shape[:2] + + left, right = min(x, radius), min(width - x, radius + 1) + top, bottom = min(y, radius), min(height - y, radius + 1) + + masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right] + masked_gaussian = gaussian_kernel[radius - top:radius + bottom, + radius - left:radius + right] + out_heatmap = heatmap + torch.max( + masked_heatmap, + masked_gaussian * k, + out=out_heatmap[y - top:y + bottom, x - left:x + right]) + + return out_heatmap + + +def gaussian_radius(det_size, min_overlap): + r"""Generate 2D gaussian radius. + + This function is modified from the `official github repo + `_. + + Given ``min_overlap``, radius could computed by a quadratic equation + according to Vieta's formulas. + + There are 3 cases for computing gaussian radius, details are following: + + - Explanation of figure: ``lt`` and ``br`` indicates the left-top and + bottom-right corner of ground truth box. ``x`` indicates the + generated corner at the limited position when ``radius=r``. + + - Case1: one corner is inside the gt box and the other is outside. + + .. code:: text + + |< width >| + + lt-+----------+ - + | | | ^ + +--x----------+--+ + | | | | + | | | | height + | | overlap | | + | | | | + | | | | v + +--+---------br--+ - + | | | + +----------+--x + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{(w-r)*(h-r)}{w*h+(w+h)r-r^2} \ge {iou} \quad\Rightarrow\quad + {r^2-(w+h)r+\cfrac{1-iou}{1+iou}*w*h} \ge 0 \\ + {a} = 1,\quad{b} = {-(w+h)},\quad{c} = {\cfrac{1-iou}{1+iou}*w*h} + {r} \le \cfrac{-b-\sqrt{b^2-4*a*c}}{2*a} + + - Case2: both two corners are inside the gt box. + + .. code:: text + + |< width >| + + lt-+----------+ - + | | | ^ + +--x-------+ | + | | | | + | |overlap| | height + | | | | + | +-------x--+ + | | | v + +----------+-br - + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{(w-2*r)*(h-2*r)}{w*h} \ge {iou} \quad\Rightarrow\quad + {4r^2-2(w+h)r+(1-iou)*w*h} \ge 0 \\ + {a} = 4,\quad {b} = {-2(w+h)},\quad {c} = {(1-iou)*w*h} + {r} \le \cfrac{-b-\sqrt{b^2-4*a*c}}{2*a} + + - Case3: both two corners are outside the gt box. + + .. code:: text + + |< width >| + + x--+----------------+ + | | | + +-lt-------------+ | - + | | | | ^ + | | | | + | | overlap | | height + | | | | + | | | | v + | +------------br--+ - + | | | + +----------------+--x + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{w*h}{(w+2*r)*(h+2*r)} \ge {iou} \quad\Rightarrow\quad + {4*iou*r^2+2*iou*(w+h)r+(iou-1)*w*h} \le 0 \\ + {a} = {4*iou},\quad {b} = {2*iou*(w+h)},\quad {c} = {(iou-1)*w*h} \\ + {r} \le \cfrac{-b+\sqrt{b^2-4*a*c}}{2*a} + + Args: + det_size (list[int]): Shape of object. + min_overlap (float): Min IoU with ground truth for boxes generated by + keypoints inside the gaussian kernel. + + Returns: + radius (int): Radius of gaussian kernel. + """ + height, width = det_size + + a1 = 1 + b1 = (height + width) + c1 = width * height * (1 - min_overlap) / (1 + min_overlap) + sq1 = sqrt(b1**2 - 4 * a1 * c1) + r1 = (b1 - sq1) / (2 * a1) + + a2 = 4 + b2 = 2 * (height + width) + c2 = (1 - min_overlap) * width * height + sq2 = sqrt(b2**2 - 4 * a2 * c2) + r2 = (b2 - sq2) / (2 * a2) + + a3 = 4 * min_overlap + b3 = -2 * min_overlap * (height + width) + c3 = (min_overlap - 1) * width * height + sq3 = sqrt(b3**2 - 4 * a3 * c3) + r3 = (b3 + sq3) / (2 * a3) + return min(r1, r2, r3) diff --git a/thirdparty/mmdetection/mmdet/models/utils/positional_encoding.py b/thirdparty/mmdetection/mmdet/models/utils/positional_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..9bda2bbdbfcc28ba6304b6325ae556fa02554ac1 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/positional_encoding.py @@ -0,0 +1,150 @@ +import math + +import torch +import torch.nn as nn +from mmcv.cnn import uniform_init + +from .builder import POSITIONAL_ENCODING + + +@POSITIONAL_ENCODING.register_module() +class SinePositionalEncoding(nn.Module): + """Position encoding with sine and cosine functions. + + See `End-to-End Object Detection with Transformers + `_ for details. + + Args: + num_feats (int): The feature dimension for each position + along x-axis or y-axis. Note the final returned dimension + for each position is 2 times of this value. + temperature (int, optional): The temperature used for scaling + the position embedding. Default 10000. + normalize (bool, optional): Whether to normalize the position + embedding. Default False. + scale (float, optional): A scale factor that scales the position + embedding. The scale will be used only when `normalize` is True. + Default 2*pi. + eps (float, optional): A value added to the denominator for + numerical stability. Default 1e-6. + """ + + def __init__(self, + num_feats, + temperature=10000, + normalize=False, + scale=2 * math.pi, + eps=1e-6): + super(SinePositionalEncoding, self).__init__() + if normalize: + assert isinstance(scale, (float, int)), 'when normalize is set,' \ + 'scale should be provided and in float or int type, ' \ + f'found {type(scale)}' + self.num_feats = num_feats + self.temperature = temperature + self.normalize = normalize + self.scale = scale + self.eps = eps + + def forward(self, mask): + """Forward function for `SinePositionalEncoding`. + + Args: + mask (Tensor): ByteTensor mask. Non-zero values representing + ignored positions, while zero values means valid positions + for this image. Shape [bs, h, w]. + + Returns: + pos (Tensor): Returned position embedding with shape + [bs, num_feats*2, h, w]. + """ + not_mask = ~mask + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + y_embed = y_embed / (y_embed[:, -1:, :] + self.eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + self.eps) * self.scale + dim_t = torch.arange( + self.num_feats, dtype=torch.float32, device=mask.device) + dim_t = self.temperature**(2 * (dim_t // 2) / self.num_feats) + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), + dim=4).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), + dim=4).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_feats={self.num_feats}, ' + repr_str += f'temperature={self.temperature}, ' + repr_str += f'normalize={self.normalize}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'eps={self.eps})' + return repr_str + + +@POSITIONAL_ENCODING.register_module() +class LearnedPositionalEncoding(nn.Module): + """Position embedding with learnable embedding weights. + + Args: + num_feats (int): The feature dimension for each position + along x-axis or y-axis. The final returned dimension for + each position is 2 times of this value. + row_num_embed (int, optional): The dictionary size of row embeddings. + Default 50. + col_num_embed (int, optional): The dictionary size of col embeddings. + Default 50. + """ + + def __init__(self, num_feats, row_num_embed=50, col_num_embed=50): + super(LearnedPositionalEncoding, self).__init__() + self.row_embed = nn.Embedding(row_num_embed, num_feats) + self.col_embed = nn.Embedding(col_num_embed, num_feats) + self.num_feats = num_feats + self.row_num_embed = row_num_embed + self.col_num_embed = col_num_embed + self.init_weights() + + def init_weights(self): + """Initialize the learnable weights.""" + uniform_init(self.row_embed) + uniform_init(self.col_embed) + + def forward(self, mask): + """Forward function for `LearnedPositionalEncoding`. + + Args: + mask (Tensor): ByteTensor mask. Non-zero values representing + ignored positions, while zero values means valid positions + for this image. Shape [bs, h, w]. + + Returns: + pos (Tensor): Returned position embedding with shape + [bs, num_feats*2, h, w]. + """ + h, w = mask.shape[-2:] + x = torch.arange(w, device=mask.device) + y = torch.arange(h, device=mask.device) + x_embed = self.col_embed(x) + y_embed = self.row_embed(y) + pos = torch.cat( + (x_embed.unsqueeze(0).repeat(h, 1, 1), y_embed.unsqueeze(1).repeat( + 1, w, 1)), + dim=-1).permute(2, 0, + 1).unsqueeze(0).repeat(mask.shape[0], 1, 1, 1) + return pos + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_feats={self.num_feats}, ' + repr_str += f'row_num_embed={self.row_num_embed}, ' + repr_str += f'col_num_embed={self.col_num_embed})' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/models/utils/res_layer.py b/thirdparty/mmdetection/mmdet/models/utils/res_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..27902426469bcdab392db1a38627d852ae8e3029 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/res_layer.py @@ -0,0 +1,102 @@ +from mmcv.cnn import build_conv_layer, build_norm_layer +from torch import nn as nn + + +class ResLayer(nn.Sequential): + """ResLayer to build ResNet style backbone. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + downsample_first (bool): Downsample at the first block or last block. + False for Hourglass, True for ResNet. Default: True + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + downsample_first=True, + **kwargs): + self.block = block + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = [] + conv_stride = stride + if avg_down: + conv_stride = 1 + downsample.append( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False)) + downsample.extend([ + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + if downsample_first: + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + inplanes = planes * block.expansion + for _ in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + + else: # downsample_first=False is for HourglassModule + for _ in range(num_blocks - 1): + layers.append( + block( + inplanes=inplanes, + planes=inplanes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + super(ResLayer, self).__init__(*layers) diff --git a/thirdparty/mmdetection/mmdet/models/utils/transformer.py b/thirdparty/mmdetection/mmdet/models/utils/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..f94b183b5b8825424e41dbd27c9b73532fac1322 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/models/utils/transformer.py @@ -0,0 +1,744 @@ +import torch +import torch.nn as nn +from mmcv.cnn import (Linear, build_activation_layer, build_norm_layer, + xavier_init) + +from .builder import TRANSFORMER + + +class MultiheadAttention(nn.Module): + """A warpper for torch.nn.MultiheadAttention. + + This module implements MultiheadAttention with residual connection, + and positional encoding used in DETR is also passed as input. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. Same as + `nn.MultiheadAttention`. + dropout (float): A Dropout layer on attn_output_weights. Default 0.0. + """ + + def __init__(self, embed_dims, num_heads, dropout=0.0): + super(MultiheadAttention, self).__init__() + assert embed_dims % num_heads == 0, 'embed_dims must be ' \ + f'divisible by num_heads. got {embed_dims} and {num_heads}.' + self.embed_dims = embed_dims + self.num_heads = num_heads + self.dropout = dropout + self.attn = nn.MultiheadAttention(embed_dims, num_heads, dropout) + self.dropout = nn.Dropout(dropout) + + def forward(self, + x, + key=None, + value=None, + residual=None, + query_pos=None, + key_pos=None, + attn_mask=None, + key_padding_mask=None): + """Forward function for `MultiheadAttention`. + + Args: + x (Tensor): The input query with shape [num_query, bs, + embed_dims]. Same in `nn.MultiheadAttention.forward`. + key (Tensor): The key tensor with shape [num_key, bs, + embed_dims]. Same in `nn.MultiheadAttention.forward`. + Default None. If None, the `query` will be used. + value (Tensor): The value tensor with same shape as `key`. + Same in `nn.MultiheadAttention.forward`. Default None. + If None, the `key` will be used. + residual (Tensor): The tensor used for addition, with the + same shape as `x`. Default None. If None, `x` will be used. + query_pos (Tensor): The positional encoding for query, with + the same shape as `x`. Default None. If not None, it will + be added to `x` before forward function. + key_pos (Tensor): The positional encoding for `key`, with the + same shape as `key`. Default None. If not None, it will + be added to `key` before forward function. If None, and + `query_pos` has the same shape as `key`, then `query_pos` + will be used for `key_pos`. + attn_mask (Tensor): ByteTensor mask with shape [num_query, + num_key]. Same in `nn.MultiheadAttention.forward`. + Default None. + key_padding_mask (Tensor): ByteTensor with shape [bs, num_key]. + Same in `nn.MultiheadAttention.forward`. Default None. + + Returns: + Tensor: forwarded results with shape [num_query, bs, embed_dims]. + """ + query = x + if key is None: + key = query + if value is None: + value = key + if residual is None: + residual = x + if key_pos is None: + if query_pos is not None and key is not None: + if query_pos.shape == key.shape: + key_pos = query_pos + if query_pos is not None: + query = query + query_pos + if key_pos is not None: + key = key + key_pos + out = self.attn( + query, + key, + value=value, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask)[0] + + return residual + self.dropout(out) + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'dropout={self.dropout})' + return repr_str + + +class FFN(nn.Module): + """Implements feed-forward networks (FFNs) with residual connection. + + Args: + embed_dims (int): The feature dimension. Same as + `MultiheadAttention`. + feedforward_channels (int): The hidden dimension of FFNs. + num_fcs (int): The number of fully-connected layers in FFNs. + act_cfg (dict): The activation config for FFNs. + dropout (float): Probability of an element to be zeroed. Default 0.0. + """ + + def __init__(self, + embed_dims, + feedforward_channels, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + dropout=0.0, + add_residual=True): + super(FFN, self).__init__() + assert num_fcs >= 2, 'num_fcs should be no less ' \ + f'than 2. got {num_fcs}.' + self.embed_dims = embed_dims + self.feedforward_channels = feedforward_channels + self.num_fcs = num_fcs + self.act_cfg = act_cfg + self.dropout = dropout + self.activate = build_activation_layer(act_cfg) + + layers = nn.ModuleList() + in_channels = embed_dims + for _ in range(num_fcs - 1): + layers.append( + nn.Sequential( + Linear(in_channels, feedforward_channels), self.activate, + nn.Dropout(dropout))) + in_channels = feedforward_channels + layers.append(Linear(feedforward_channels, embed_dims)) + self.layers = nn.Sequential(*layers) + self.dropout = nn.Dropout(dropout) + self.add_residual = add_residual + + def forward(self, x, residual=None): + """Forward function for `FFN`.""" + out = self.layers(x) + if not self.add_residual: + return out + if residual is None: + residual = x + return residual + self.dropout(out) + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(embed_dims={self.embed_dims}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'num_fcs={self.num_fcs}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'add_residual={self.add_residual})' + return repr_str + + +class TransformerEncoderLayer(nn.Module): + """Implements one encoder layer in DETR transformer. + + Args: + embed_dims (int): The feature dimension. Same as `FFN`. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + dropout (float): Probability of an element to be zeroed. Default 0.0. + order (tuple[str]): The order for encoder layer. Valid examples are + ('selfattn', 'norm', 'ffn', 'norm') and ('norm', 'selfattn', + 'norm', 'ffn'). Default ('selfattn', 'norm', 'ffn', 'norm'). + act_cfg (dict): The activation config for FFNs. Defalut ReLU. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization. + num_fcs (int): The number of fully-connected layers for FFNs. + Default 2. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + dropout=0.0, + order=('selfattn', 'norm', 'ffn', 'norm'), + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2): + super(TransformerEncoderLayer, self).__init__() + assert isinstance(order, tuple) and len(order) == 4 + assert set(order) == set(['selfattn', 'norm', 'ffn']) + self.embed_dims = embed_dims + self.num_heads = num_heads + self.feedforward_channels = feedforward_channels + self.dropout = dropout + self.order = order + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.num_fcs = num_fcs + self.pre_norm = order[0] == 'norm' + self.self_attn = MultiheadAttention(embed_dims, num_heads, dropout) + self.ffn = FFN(embed_dims, feedforward_channels, num_fcs, act_cfg, + dropout) + self.norms = nn.ModuleList() + self.norms.append(build_norm_layer(norm_cfg, embed_dims)[1]) + self.norms.append(build_norm_layer(norm_cfg, embed_dims)[1]) + + def forward(self, x, pos=None, attn_mask=None, key_padding_mask=None): + """Forward function for `TransformerEncoderLayer`. + + Args: + x (Tensor): The input query with shape [num_key, bs, + embed_dims]. Same in `MultiheadAttention.forward`. + pos (Tensor): The positional encoding for query. Default None. + Same as `query_pos` in `MultiheadAttention.forward`. + attn_mask (Tensor): ByteTensor mask with shape [num_key, + num_key]. Same in `MultiheadAttention.forward`. Default None. + key_padding_mask (Tensor): ByteTensor with shape [bs, num_key]. + Same in `MultiheadAttention.forward`. Default None. + + Returns: + Tensor: forwarded results with shape [num_key, bs, embed_dims]. + """ + norm_cnt = 0 + inp_residual = x + for layer in self.order: + if layer == 'selfattn': + # self attention + query = key = value = x + x = self.self_attn( + query, + key, + value, + inp_residual if self.pre_norm else None, + query_pos=pos, + key_pos=pos, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask) + inp_residual = x + elif layer == 'norm': + x = self.norms[norm_cnt](x) + norm_cnt += 1 + elif layer == 'ffn': + x = self.ffn(x, inp_residual if self.pre_norm else None) + return x + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'order={self.order}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'norm_cfg={self.norm_cfg}, ' + repr_str += f'num_fcs={self.num_fcs})' + return repr_str + + +class TransformerDecoderLayer(nn.Module): + """Implements one decoder layer in DETR transformer. + + Args: + embed_dims (int): The feature dimension. Same as + `TransformerEncoderLayer`. + num_heads (int): Parallel attention heads. + feedforward_channels (int): Same as `TransformerEncoderLayer`. + dropout (float): Same as `TransformerEncoderLayer`. Default 0.0. + order (tuple[str]): The order for decoder layer. Valid examples are + ('selfattn', 'norm', 'multiheadattn', 'norm', 'ffn', 'norm') and + ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn'). + Default the former. + act_cfg (dict): Same as `TransformerEncoderLayer`. Defalut ReLU. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization. + num_fcs (int): The number of fully-connected layers in FFNs. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + dropout=0.0, + order=('selfattn', 'norm', 'multiheadattn', 'norm', 'ffn', + 'norm'), + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2): + super(TransformerDecoderLayer, self).__init__() + assert isinstance(order, tuple) and len(order) == 6 + assert set(order) == set(['selfattn', 'norm', 'multiheadattn', 'ffn']) + self.embed_dims = embed_dims + self.num_heads = num_heads + self.feedforward_channels = feedforward_channels + self.dropout = dropout + self.order = order + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.num_fcs = num_fcs + self.pre_norm = order[0] == 'norm' + self.self_attn = MultiheadAttention(embed_dims, num_heads, dropout) + self.multihead_attn = MultiheadAttention(embed_dims, num_heads, + dropout) + self.ffn = FFN(embed_dims, feedforward_channels, num_fcs, act_cfg, + dropout) + self.norms = nn.ModuleList() + # 3 norm layers in official DETR's TransformerDecoderLayer + for _ in range(3): + self.norms.append(build_norm_layer(norm_cfg, embed_dims)[1]) + + def forward(self, + x, + memory, + memory_pos=None, + query_pos=None, + memory_attn_mask=None, + target_attn_mask=None, + memory_key_padding_mask=None, + target_key_padding_mask=None): + """Forward function for `TransformerDecoderLayer`. + + Args: + x (Tensor): Input query with shape [num_query, bs, embed_dims]. + memory (Tensor): Tensor got from `TransformerEncoder`, with shape + [num_key, bs, embed_dims]. + memory_pos (Tensor): The positional encoding for `memory`. Default + None. Same as `key_pos` in `MultiheadAttention.forward`. + query_pos (Tensor): The positional encoding for `query`. Default + None. Same as `query_pos` in `MultiheadAttention.forward`. + memory_attn_mask (Tensor): ByteTensor mask for `memory`, with + shape [num_key, num_key]. Same as `attn_mask` in + `MultiheadAttention.forward`. Default None. + target_attn_mask (Tensor): ByteTensor mask for `x`, with shape + [num_query, num_query]. Same as `attn_mask` in + `MultiheadAttention.forward`. Default None. + memory_key_padding_mask (Tensor): ByteTensor for `memory`, with + shape [bs, num_key]. Same as `key_padding_mask` in + `MultiheadAttention.forward`. Default None. + target_key_padding_mask (Tensor): ByteTensor for `x`, with shape + [bs, num_query]. Same as `key_padding_mask` in + `MultiheadAttention.forward`. Default None. + + Returns: + Tensor: forwarded results with shape [num_query, bs, embed_dims]. + """ + norm_cnt = 0 + inp_residual = x + for layer in self.order: + if layer == 'selfattn': + query = key = value = x + x = self.self_attn( + query, + key, + value, + inp_residual if self.pre_norm else None, + query_pos, + key_pos=query_pos, + attn_mask=target_attn_mask, + key_padding_mask=target_key_padding_mask) + inp_residual = x + elif layer == 'norm': + x = self.norms[norm_cnt](x) + norm_cnt += 1 + elif layer == 'multiheadattn': + query = x + key = value = memory + x = self.multihead_attn( + query, + key, + value, + inp_residual if self.pre_norm else None, + query_pos, + key_pos=memory_pos, + attn_mask=memory_attn_mask, + key_padding_mask=memory_key_padding_mask) + inp_residual = x + elif layer == 'ffn': + x = self.ffn(x, inp_residual if self.pre_norm else None) + return x + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'order={self.order}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'norm_cfg={self.norm_cfg}, ' + repr_str += f'num_fcs={self.num_fcs})' + return repr_str + + +class TransformerEncoder(nn.Module): + """Implements the encoder in DETR transformer. + + Args: + num_layers (int): The number of `TransformerEncoderLayer`. + embed_dims (int): Same as `TransformerEncoderLayer`. + num_heads (int): Same as `TransformerEncoderLayer`. + feedforward_channels (int): Same as `TransformerEncoderLayer`. + dropout (float): Same as `TransformerEncoderLayer`. Default 0.0. + order (tuple[str]): Same as `TransformerEncoderLayer`. + act_cfg (dict): Same as `TransformerEncoderLayer`. Defalut ReLU. + norm_cfg (dict): Same as `TransformerEncoderLayer`. Default + layer normalization. + num_fcs (int): Same as `TransformerEncoderLayer`. Default 2. + """ + + def __init__(self, + num_layers, + embed_dims, + num_heads, + feedforward_channels, + dropout=0.0, + order=('selfattn', 'norm', 'ffn', 'norm'), + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2): + super(TransformerEncoder, self).__init__() + assert isinstance(order, tuple) and len(order) == 4 + assert set(order) == set(['selfattn', 'norm', 'ffn']) + self.num_layers = num_layers + self.embed_dims = embed_dims + self.num_heads = num_heads + self.feedforward_channels = feedforward_channels + self.dropout = dropout + self.order = order + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.num_fcs = num_fcs + self.pre_norm = order[0] == 'norm' + self.layers = nn.ModuleList() + for _ in range(num_layers): + self.layers.append( + TransformerEncoderLayer(embed_dims, num_heads, + feedforward_channels, dropout, order, + act_cfg, norm_cfg, num_fcs)) + self.norm = build_norm_layer(norm_cfg, + embed_dims)[1] if self.pre_norm else None + + def forward(self, x, pos=None, attn_mask=None, key_padding_mask=None): + """Forward function for `TransformerEncoder`. + + Args: + x (Tensor): Input query. Same in `TransformerEncoderLayer.forward`. + pos (Tensor): Positional encoding for query. Default None. + Same in `TransformerEncoderLayer.forward`. + attn_mask (Tensor): ByteTensor attention mask. Default None. + Same in `TransformerEncoderLayer.forward`. + key_padding_mask (Tensor): Same in + `TransformerEncoderLayer.forward`. Default None. + + Returns: + Tensor: Results with shape [num_key, bs, embed_dims]. + """ + for layer in self.layers: + x = layer(x, pos, attn_mask, key_padding_mask) + if self.norm is not None: + x = self.norm(x) + return x + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_layers={self.num_layers}, ' + repr_str += f'embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'order={self.order}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'norm_cfg={self.norm_cfg}, ' + repr_str += f'num_fcs={self.num_fcs})' + return repr_str + + +class TransformerDecoder(nn.Module): + """Implements the decoder in DETR transformer. + + Args: + num_layers (int): The number of `TransformerDecoderLayer`. + embed_dims (int): Same as `TransformerDecoderLayer`. + num_heads (int): Same as `TransformerDecoderLayer`. + feedforward_channels (int): Same as `TransformerDecoderLayer`. + dropout (float): Same as `TransformerDecoderLayer`. Default 0.0. + order (tuple[str]): Same as `TransformerDecoderLayer`. + act_cfg (dict): Same as `TransformerDecoderLayer`. Defalut ReLU. + norm_cfg (dict): Same as `TransformerDecoderLayer`. Default + layer normalization. + num_fcs (int): Same as `TransformerDecoderLayer`. Default 2. + """ + + def __init__(self, + num_layers, + embed_dims, + num_heads, + feedforward_channels, + dropout=0.0, + order=('selfattn', 'norm', 'multiheadattn', 'norm', 'ffn', + 'norm'), + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2, + return_intermediate=False): + super(TransformerDecoder, self).__init__() + assert isinstance(order, tuple) and len(order) == 6 + assert set(order) == set(['selfattn', 'norm', 'multiheadattn', 'ffn']) + self.num_layers = num_layers + self.embed_dims = embed_dims + self.num_heads = num_heads + self.feedforward_channels = feedforward_channels + self.dropout = dropout + self.order = order + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.num_fcs = num_fcs + self.return_intermediate = return_intermediate + self.layers = nn.ModuleList() + for _ in range(num_layers): + self.layers.append( + TransformerDecoderLayer(embed_dims, num_heads, + feedforward_channels, dropout, order, + act_cfg, norm_cfg, num_fcs)) + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + + def forward(self, + x, + memory, + memory_pos=None, + query_pos=None, + memory_attn_mask=None, + target_attn_mask=None, + memory_key_padding_mask=None, + target_key_padding_mask=None): + """Forward function for `TransformerDecoder`. + + Args: + x (Tensor): Input query. Same in `TransformerDecoderLayer.forward`. + memory (Tensor): Same in `TransformerDecoderLayer.forward`. + memory_pos (Tensor): Same in `TransformerDecoderLayer.forward`. + Default None. + query_pos (Tensor): Same in `TransformerDecoderLayer.forward`. + Default None. + memory_attn_mask (Tensor): Same in + `TransformerDecoderLayer.forward`. Default None. + target_attn_mask (Tensor): Same in + `TransformerDecoderLayer.forward`. Default None. + memory_key_padding_mask (Tensor): Same in + `TransformerDecoderLayer.forward`. Default None. + target_key_padding_mask (Tensor): Same in + `TransformerDecoderLayer.forward`. Default None. + + Returns: + Tensor: Results with shape [num_query, bs, embed_dims]. + """ + intermediate = [] + for layer in self.layers: + x = layer(x, memory, memory_pos, query_pos, memory_attn_mask, + target_attn_mask, memory_key_padding_mask, + target_key_padding_mask) + if self.return_intermediate: + intermediate.append(self.norm(x)) + if self.norm is not None: + x = self.norm(x) + if self.return_intermediate: + intermediate.pop() + intermediate.append(x) + if self.return_intermediate: + return torch.stack(intermediate) + return x.unsqueeze(0) + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_layers={self.num_layers}, ' + repr_str += f'embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'order={self.order}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'norm_cfg={self.norm_cfg}, ' + repr_str += f'num_fcs={self.num_fcs}, ' + repr_str += f'return_intermediate={self.return_intermediate})' + return repr_str + + +@TRANSFORMER.register_module() +class Transformer(nn.Module): + """Implements the DETR transformer. + + Following the official DETR implementation, this module copy-paste + from torch.nn.Transformer with modifications: + + * positional encodings are passed in MultiheadAttention + * extra LN at the end of encoder is removed + * decoder returns a stack of activations from all decoding layers + + See `paper: End-to-End Object Detection with Transformers + `_ for details. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. Same as + `nn.MultiheadAttention`. + num_encoder_layers (int): Number of `TransformerEncoderLayer`. + num_decoder_layers (int): Number of `TransformerDecoderLayer`. + feedforward_channels (int): The hidden dimension for FFNs used in both + encoder and decoder. + dropout (float): Probability of an element to be zeroed. Default 0.0. + act_cfg (dict): Activation config for FFNs used in both encoder + and decoder. Defalut ReLU. + norm_cfg (dict): Config dict for normalization used in both encoder + and decoder. Default layer normalization. + num_fcs (int): The number of fully-connected layers in FFNs, which is + used for both encoder and decoder. + pre_norm (bool): Whether the normalization layer is ordered + first in the encoder and decoder. Default False. + return_intermediate_dec (bool): Whether to return the intermediate + output from each TransformerDecoderLayer or only the last + TransformerDecoderLayer. Default False. If False, the returned + `hs` has shape [num_decoder_layers, bs, num_query, embed_dims]. + If True, the returned `hs` will have shape [1, bs, num_query, + embed_dims]. + """ + + def __init__(self, + embed_dims=512, + num_heads=8, + num_encoder_layers=6, + num_decoder_layers=6, + feedforward_channels=2048, + dropout=0.0, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2, + pre_norm=False, + return_intermediate_dec=False): + super(Transformer, self).__init__() + self.embed_dims = embed_dims + self.num_heads = num_heads + self.num_encoder_layers = num_encoder_layers + self.num_decoder_layers = num_decoder_layers + self.feedforward_channels = feedforward_channels + self.dropout = dropout + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.num_fcs = num_fcs + self.pre_norm = pre_norm + self.return_intermediate_dec = return_intermediate_dec + if self.pre_norm: + encoder_order = ('norm', 'selfattn', 'norm', 'ffn') + decoder_order = ('norm', 'selfattn', 'norm', 'multiheadattn', + 'norm', 'ffn') + else: + encoder_order = ('selfattn', 'norm', 'ffn', 'norm') + decoder_order = ('selfattn', 'norm', 'multiheadattn', 'norm', + 'ffn', 'norm') + self.encoder = TransformerEncoder(num_encoder_layers, embed_dims, + num_heads, feedforward_channels, + dropout, encoder_order, act_cfg, + norm_cfg, num_fcs) + self.decoder = TransformerDecoder(num_decoder_layers, embed_dims, + num_heads, feedforward_channels, + dropout, decoder_order, act_cfg, + norm_cfg, num_fcs, + return_intermediate_dec) + + def init_weights(self, distribution='uniform'): + """Initialize the transformer weights.""" + # follow the official DETR to init parameters + for m in self.modules(): + if hasattr(m, 'weight') and m.weight.dim() > 1: + xavier_init(m, distribution=distribution) + + def forward(self, x, mask, query_embed, pos_embed): + """Forward function for `Transformer`. + + Args: + x (Tensor): Input query with shape [bs, c, h, w] where + c = embed_dims. + mask (Tensor): The key_padding_mask used for encoder and decoder, + with shape [bs, h, w]. + query_embed (Tensor): The query embedding for decoder, with shape + [num_query, c]. + pos_embed (Tensor): The positional encoding for encoder and + decoder, with the same shape as `x`. + + Returns: + tuple[Tensor]: results of decoder containing the following tensor. + + - out_dec: Output from decoder. If return_intermediate_dec \ + is True output has shape [num_dec_layers, bs, + num_query, embed_dims], else has shape [1, bs, \ + num_query, embed_dims]. + - memory: Output results from encoder, with shape \ + [bs, embed_dims, h, w]. + """ + bs, c, h, w = x.shape + x = x.flatten(2).permute(2, 0, 1) # [bs, c, h, w] -> [h*w, bs, c] + pos_embed = pos_embed.flatten(2).permute(2, 0, 1) + query_embed = query_embed.unsqueeze(1).repeat( + 1, bs, 1) # [num_query, dim] -> [num_query, bs, dim] + mask = mask.flatten(1) # [bs, h, w] -> [bs, h*w] + memory = self.encoder( + x, pos=pos_embed, attn_mask=None, key_padding_mask=mask) + target = torch.zeros_like(query_embed) + # out_dec: [num_layers, num_query, bs, dim] + out_dec = self.decoder( + target, + memory, + memory_pos=pos_embed, + query_pos=query_embed, + memory_attn_mask=None, + target_attn_mask=None, + memory_key_padding_mask=mask, + target_key_padding_mask=None) + out_dec = out_dec.transpose(1, 2) + memory = memory.permute(1, 2, 0).reshape(bs, c, h, w) + return out_dec, memory + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(embed_dims={self.embed_dims}, ' + repr_str += f'num_heads={self.num_heads}, ' + repr_str += f'num_encoder_layers={self.num_encoder_layers}, ' + repr_str += f'num_decoder_layers={self.num_decoder_layers}, ' + repr_str += f'feedforward_channels={self.feedforward_channels}, ' + repr_str += f'dropout={self.dropout}, ' + repr_str += f'act_cfg={self.act_cfg}, ' + repr_str += f'norm_cfg={self.norm_cfg}, ' + repr_str += f'num_fcs={self.num_fcs}, ' + repr_str += f'pre_norm={self.pre_norm}, ' + repr_str += f'return_intermediate_dec={self.return_intermediate_dec})' + return repr_str diff --git a/thirdparty/mmdetection/mmdet/ops/__init__.py b/thirdparty/mmdetection/mmdet/ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ae6a83687d155ba755fc7a1880181e668b18e54d --- /dev/null +++ b/thirdparty/mmdetection/mmdet/ops/__init__.py @@ -0,0 +1,32 @@ +# This file is added for back-compatibility. Thus, downstream codebase +# could still use and import mmdet.ops. + +# yapf: disable +from mmcv.ops import (ContextBlock, Conv2d, ConvTranspose2d, ConvWS2d, + CornerPool, DeformConv, DeformConvPack, DeformRoIPooling, + DeformRoIPoolingPack, GeneralizedAttention, Linear, + MaskedConv2d, MaxPool2d, ModulatedDeformConv, + ModulatedDeformConvPack, ModulatedDeformRoIPoolingPack, + NonLocal2D, RoIAlign, RoIPool, SAConv2d, + SigmoidFocalLoss, SimpleRoIAlign, batched_nms, + build_plugin_layer, conv_ws_2d, deform_conv, + deform_roi_pooling, get_compiler_version, + get_compiling_cuda_version, modulated_deform_conv, nms, + nms_match, point_sample, rel_roi_point_to_rel_img_point, + roi_align, roi_pool, sigmoid_focal_loss, soft_nms) + +# yapf: enable + +__all__ = [ + 'nms', 'soft_nms', 'RoIAlign', 'roi_align', 'RoIPool', 'roi_pool', + 'DeformConv', 'DeformConvPack', 'DeformRoIPooling', 'DeformRoIPoolingPack', + 'ModulatedDeformRoIPoolingPack', 'ModulatedDeformConv', + 'ModulatedDeformConvPack', 'deform_conv', 'modulated_deform_conv', + 'deform_roi_pooling', 'SigmoidFocalLoss', 'sigmoid_focal_loss', + 'MaskedConv2d', 'ContextBlock', 'GeneralizedAttention', 'NonLocal2D', + 'get_compiler_version', 'get_compiling_cuda_version', 'ConvWS2d', + 'conv_ws_2d', 'build_plugin_layer', 'batched_nms', 'Conv2d', + 'ConvTranspose2d', 'MaxPool2d', 'Linear', 'nms_match', 'CornerPool', + 'point_sample', 'rel_roi_point_to_rel_img_point', 'SimpleRoIAlign', + 'SAConv2d' +] diff --git a/thirdparty/mmdetection/mmdet/utils/__init__.py b/thirdparty/mmdetection/mmdet/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ac489e2dbbc0e6fa87f5088b4edcc20f8cadc1a6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/__init__.py @@ -0,0 +1,4 @@ +from .collect_env import collect_env +from .logger import get_root_logger + +__all__ = ['get_root_logger', 'collect_env'] diff --git a/thirdparty/mmdetection/mmdet/utils/collect_env.py b/thirdparty/mmdetection/mmdet/utils/collect_env.py new file mode 100644 index 0000000000000000000000000000000000000000..89c064accdb10abec4a03de04f601d27aab2da70 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/collect_env.py @@ -0,0 +1,16 @@ +from mmcv.utils import collect_env as collect_base_env +from mmcv.utils import get_git_hash + +import mmdet + + +def collect_env(): + """Collect the information of the running environments.""" + env_info = collect_base_env() + env_info['MMDetection'] = mmdet.__version__ + '+' + get_git_hash()[:7] + return env_info + + +if __name__ == '__main__': + for name, val in collect_env().items(): + print(f'{name}: {val}') diff --git a/thirdparty/mmdetection/mmdet/utils/contextmanagers.py b/thirdparty/mmdetection/mmdet/utils/contextmanagers.py new file mode 100644 index 0000000000000000000000000000000000000000..38a639262d949b5754dedf12f33fa814b030ea38 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/contextmanagers.py @@ -0,0 +1,121 @@ +import asyncio +import contextlib +import logging +import os +import time +from typing import List + +import torch + +logger = logging.getLogger(__name__) + +DEBUG_COMPLETED_TIME = bool(os.environ.get('DEBUG_COMPLETED_TIME', False)) + + +@contextlib.asynccontextmanager +async def completed(trace_name='', + name='', + sleep_interval=0.05, + streams: List[torch.cuda.Stream] = None): + """Async context manager that waits for work to complete on given CUDA + streams.""" + if not torch.cuda.is_available(): + yield + return + + stream_before_context_switch = torch.cuda.current_stream() + if not streams: + streams = [stream_before_context_switch] + else: + streams = [s if s else stream_before_context_switch for s in streams] + + end_events = [ + torch.cuda.Event(enable_timing=DEBUG_COMPLETED_TIME) for _ in streams + ] + + if DEBUG_COMPLETED_TIME: + start = torch.cuda.Event(enable_timing=True) + stream_before_context_switch.record_event(start) + + cpu_start = time.monotonic() + logger.debug('%s %s starting, streams: %s', trace_name, name, streams) + grad_enabled_before = torch.is_grad_enabled() + try: + yield + finally: + current_stream = torch.cuda.current_stream() + assert current_stream == stream_before_context_switch + + if DEBUG_COMPLETED_TIME: + cpu_end = time.monotonic() + for i, stream in enumerate(streams): + event = end_events[i] + stream.record_event(event) + + grad_enabled_after = torch.is_grad_enabled() + + # observed change of torch.is_grad_enabled() during concurrent run of + # async_test_bboxes code + assert (grad_enabled_before == grad_enabled_after + ), 'Unexpected is_grad_enabled() value change' + + are_done = [e.query() for e in end_events] + logger.debug('%s %s completed: %s streams: %s', trace_name, name, + are_done, streams) + with torch.cuda.stream(stream_before_context_switch): + while not all(are_done): + await asyncio.sleep(sleep_interval) + are_done = [e.query() for e in end_events] + logger.debug( + '%s %s completed: %s streams: %s', + trace_name, + name, + are_done, + streams, + ) + + current_stream = torch.cuda.current_stream() + assert current_stream == stream_before_context_switch + + if DEBUG_COMPLETED_TIME: + cpu_time = (cpu_end - cpu_start) * 1000 + stream_times_ms = '' + for i, stream in enumerate(streams): + elapsed_time = start.elapsed_time(end_events[i]) + stream_times_ms += f' {stream} {elapsed_time:.2f} ms' + logger.info('%s %s %.2f ms %s', trace_name, name, cpu_time, + stream_times_ms) + + +@contextlib.asynccontextmanager +async def concurrent(streamqueue: asyncio.Queue, + trace_name='concurrent', + name='stream'): + """Run code concurrently in different streams. + + :param streamqueue: asyncio.Queue instance. + + Queue tasks define the pool of streams used for concurrent execution. + """ + if not torch.cuda.is_available(): + yield + return + + initial_stream = torch.cuda.current_stream() + + with torch.cuda.stream(initial_stream): + stream = await streamqueue.get() + assert isinstance(stream, torch.cuda.Stream) + + try: + with torch.cuda.stream(stream): + logger.debug('%s %s is starting, stream: %s', trace_name, name, + stream) + yield + current = torch.cuda.current_stream() + assert current == stream + logger.debug('%s %s has finished, stream: %s', trace_name, + name, stream) + finally: + streamqueue.task_done() + streamqueue.put_nowait(stream) diff --git a/thirdparty/mmdetection/mmdet/utils/logger.py b/thirdparty/mmdetection/mmdet/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc6e6b438a73e857ba6f173594985807cb88b30 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/logger.py @@ -0,0 +1,19 @@ +import logging + +from mmcv.utils import get_logger + + +def get_root_logger(log_file=None, log_level=logging.INFO): + """Get root logger. + + Args: + log_file (str, optional): File path of log. Defaults to None. + log_level (int, optional): The level of logger. + Defaults to logging.INFO. + + Returns: + :obj:`logging.Logger`: The obtained logger + """ + logger = get_logger(name='mmdet', log_file=log_file, log_level=log_level) + + return logger diff --git a/thirdparty/mmdetection/mmdet/utils/profiling.py b/thirdparty/mmdetection/mmdet/utils/profiling.py new file mode 100644 index 0000000000000000000000000000000000000000..4be9222c37e922329d537f883f5587995e27efc6 --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/profiling.py @@ -0,0 +1,39 @@ +import contextlib +import sys +import time + +import torch + +if sys.version_info >= (3, 7): + + @contextlib.contextmanager + def profile_time(trace_name, + name, + enabled=True, + stream=None, + end_stream=None): + """Print time spent by CPU and GPU. + + Useful as a temporary context manager to find sweet spots of code + suitable for async implementation. + """ + if (not enabled) or not torch.cuda.is_available(): + yield + return + stream = stream if stream else torch.cuda.current_stream() + end_stream = end_stream if end_stream else stream + start = torch.cuda.Event(enable_timing=True) + end = torch.cuda.Event(enable_timing=True) + stream.record_event(start) + try: + cpu_start = time.monotonic() + yield + finally: + cpu_end = time.monotonic() + end_stream.record_event(end) + end.synchronize() + cpu_time = (cpu_end - cpu_start) * 1000 + gpu_time = start.elapsed_time(end) + msg = f'{trace_name} {name} cpu_time {cpu_time:.2f} ms ' + msg += f'gpu_time {gpu_time:.2f} ms stream {stream}' + print(msg, end_stream) diff --git a/thirdparty/mmdetection/mmdet/utils/util_mixins.py b/thirdparty/mmdetection/mmdet/utils/util_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..69669a3ca943eebe0f138b2784c5b61724196bbe --- /dev/null +++ b/thirdparty/mmdetection/mmdet/utils/util_mixins.py @@ -0,0 +1,104 @@ +"""This module defines the :class:`NiceRepr` mixin class, which defines a +``__repr__`` and ``__str__`` method that only depend on a custom ``__nice__`` +method, which you must define. This means you only have to overload one +function instead of two. Furthermore, if the object defines a ``__len__`` +method, then the ``__nice__`` method defaults to something sensible, otherwise +it is treated as abstract and raises ``NotImplementedError``. + +To use simply have your object inherit from :class:`NiceRepr` +(multi-inheritance should be ok). + +This code was copied from the ubelt library: https://github.com/Erotemic/ubelt + +Example: + >>> # Objects that define __nice__ have a default __str__ and __repr__ + >>> class Student(NiceRepr): + ... def __init__(self, name): + ... self.name = name + ... def __nice__(self): + ... return self.name + >>> s1 = Student('Alice') + >>> s2 = Student('Bob') + >>> print(f's1 = {s1}') + >>> print(f's2 = {s2}') + s1 = + s2 = + +Example: + >>> # Objects that define __len__ have a default __nice__ + >>> class Group(NiceRepr): + ... def __init__(self, data): + ... self.data = data + ... def __len__(self): + ... return len(self.data) + >>> g = Group([1, 2, 3]) + >>> print(f'g = {g}') + g = +""" +import warnings + + +class NiceRepr(object): + """Inherit from this class and define ``__nice__`` to "nicely" print your + objects. + + Defines ``__str__`` and ``__repr__`` in terms of ``__nice__`` function + Classes that inherit from :class:`NiceRepr` should redefine ``__nice__``. + If the inheriting class has a ``__len__``, method then the default + ``__nice__`` method will return its length. + + Example: + >>> class Foo(NiceRepr): + ... def __nice__(self): + ... return 'info' + >>> foo = Foo() + >>> assert str(foo) == '' + >>> assert repr(foo).startswith('>> class Bar(NiceRepr): + ... pass + >>> bar = Bar() + >>> import pytest + >>> with pytest.warns(None) as record: + >>> assert 'object at' in str(bar) + >>> assert 'object at' in repr(bar) + + Example: + >>> class Baz(NiceRepr): + ... def __len__(self): + ... return 5 + >>> baz = Baz() + >>> assert str(baz) == '' + """ + + def __nice__(self): + """str: a "nice" summary string describing this module""" + if hasattr(self, '__len__'): + # It is a common pattern for objects to use __len__ in __nice__ + # As a convenience we define a default __nice__ for these objects + return str(len(self)) + else: + # In all other cases force the subclass to overload __nice__ + raise NotImplementedError( + f'Define the __nice__ method for {self.__class__!r}') + + def __repr__(self): + """str: the string of the module""" + try: + nice = self.__nice__() + classname = self.__class__.__name__ + return f'<{classname}({nice}) at {hex(id(self))}>' + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) + + def __str__(self): + """str: the string of the module""" + try: + classname = self.__class__.__name__ + nice = self.__nice__() + return f'<{classname}({nice})>' + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) diff --git a/thirdparty/mmdetection/mmdet/version.py b/thirdparty/mmdetection/mmdet/version.py new file mode 100644 index 0000000000000000000000000000000000000000..753ab3c2970cd2c39eaea21464781b892d4a39af --- /dev/null +++ b/thirdparty/mmdetection/mmdet/version.py @@ -0,0 +1,19 @@ +# Copyright (c) Open-MMLab. All rights reserved. + +__version__ = '2.7.0' +short_version = __version__ + + +def parse_version_info(version_str): + version_info = [] + for x in version_str.split('.'): + if x.isdigit(): + version_info.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + version_info.append(int(patch_version[0])) + version_info.append(f'rc{patch_version[1]}') + return tuple(version_info) + + +version_info = parse_version_info(__version__) diff --git a/thirdparty/mmdetection/pytest.ini b/thirdparty/mmdetection/pytest.ini new file mode 100644 index 0000000000000000000000000000000000000000..9796e871e70c7c67345b1d6bcf708c0c82377a98 --- /dev/null +++ b/thirdparty/mmdetection/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +addopts = --xdoctest --xdoctest-style=auto +norecursedirs = .git ignore build __pycache__ data docker docs .eggs + +filterwarnings= default + ignore:.*No cfgstr given in Cacher constructor or call.*:Warning + ignore:.*Define the __nice__ method for.*:Warning diff --git a/thirdparty/mmdetection/resources/coco_test_12510.jpg b/thirdparty/mmdetection/resources/coco_test_12510.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f309755ccbde786c54618fb6b44916c988ef727c --- /dev/null +++ b/thirdparty/mmdetection/resources/coco_test_12510.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05cff8452af9b6807617273756f0c36518050780c4ba239980a4f06ad4191ee2 +size 183096 diff --git a/thirdparty/mmdetection/resources/corruptions_sev_3.png b/thirdparty/mmdetection/resources/corruptions_sev_3.png new file mode 100644 index 0000000000000000000000000000000000000000..02175f6852830659f6e6e7ea5e2911c152d354d5 --- /dev/null +++ b/thirdparty/mmdetection/resources/corruptions_sev_3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5dad12c12637147736fc4b2b1ac2d30d13588867b5da4f88bcd092a4e900c26 +size 1401893 diff --git a/thirdparty/mmdetection/resources/data_pipeline.png b/thirdparty/mmdetection/resources/data_pipeline.png new file mode 100644 index 0000000000000000000000000000000000000000..eb25915047d63e0d568982ca3f51ff97000c1f7d --- /dev/null +++ b/thirdparty/mmdetection/resources/data_pipeline.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e601859a72651cd534be9b9c5eb9854f65624fb78af3c803d979194f06f01472 +size 84111 diff --git a/thirdparty/mmdetection/resources/loss_curve.png b/thirdparty/mmdetection/resources/loss_curve.png new file mode 100644 index 0000000000000000000000000000000000000000..c4bd5cc8d93d2fcc9ab5a010eb13a623b7d01a1a --- /dev/null +++ b/thirdparty/mmdetection/resources/loss_curve.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:158c747c24a7e44305d545fd88bef97d0598741d99fd12948055145a20ab3c50 +size 37484 diff --git a/thirdparty/mmdetection/resources/mmdet-logo.png b/thirdparty/mmdetection/resources/mmdet-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..032553b434551aec07b84072be36954efdd3029a --- /dev/null +++ b/thirdparty/mmdetection/resources/mmdet-logo.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3699d25dc4ef12c09d07ca665f2eed9aafaee0a43a20f4789b4f8c0054e3a9ae +size 32836 diff --git a/thirdparty/mmdetection/setup.cfg b/thirdparty/mmdetection/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..873406e8f19ce243d5e010fcf34fbd6b43fced75 --- /dev/null +++ b/thirdparty/mmdetection/setup.cfg @@ -0,0 +1,13 @@ +[isort] +line_length = 79 +multi_line_output = 0 +known_standard_library = setuptools +known_first_party = mmdet +known_third_party = PIL,asynctest,cityscapesscripts,cv2,matplotlib,mmcv,numpy,onnx,onnxruntime,pycocotools,pytest,robustness_eval,scipy,seaborn,six,terminaltables,torch +no_lines_before = STDLIB,LOCALFOLDER +default_section = THIRDPARTY + +[yapf] +BASED_ON_STYLE = pep8 +BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF = true +SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN = true diff --git a/thirdparty/mmdetection/setup.py b/thirdparty/mmdetection/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..55eea6ba642e49e9a8e71e20aaecae2100542c3e --- /dev/null +++ b/thirdparty/mmdetection/setup.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python +import os +from setuptools import find_packages, setup + +import torch +from torch.utils.cpp_extension import (BuildExtension, CppExtension, + CUDAExtension) + + +def readme(): + with open('README.md', encoding='utf-8') as f: + content = f.read() + return content + + +version_file = 'mmdet/version.py' + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +def make_cuda_ext(name, module, sources, sources_cuda=[]): + + define_macros = [] + extra_compile_args = {'cxx': []} + + if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1': + define_macros += [('WITH_CUDA', None)] + extension = CUDAExtension + extra_compile_args['nvcc'] = [ + '-D__CUDA_NO_HALF_OPERATORS__', + '-D__CUDA_NO_HALF_CONVERSIONS__', + '-D__CUDA_NO_HALF2_OPERATORS__', + ] + sources += sources_cuda + else: + print(f'Compiling {name} without CUDA') + extension = CppExtension + + return extension( + name=f'{module}.{name}', + sources=[os.path.join(*module.split('.'), p) for p in sources], + define_macros=define_macros, + extra_compile_args=extra_compile_args) + + +def parse_requirements(fname='requirements.txt', with_version=True): + """Parse the package dependencies listed in a requirements file but strips + specific versioning information. + + Args: + fname (str): path to requirements file + with_version (bool, default=False): if True include version specs + + Returns: + List[str]: list of requirements items + + CommandLine: + python -c "import setup; print(setup.parse_requirements())" + """ + import sys + from os.path import exists + import re + require_fpath = fname + + def parse_line(line): + """Parse information from a line in a requirements text file.""" + if line.startswith('-r '): + # Allow specifying requirements in other files + target = line.split(' ')[1] + for info in parse_require_file(target): + yield info + else: + info = {'line': line} + if line.startswith('-e '): + info['package'] = line.split('#egg=')[1] + elif '@git+' in line: + info['package'] = line + else: + # Remove versioning from the package + pat = '(' + '|'.join(['>=', '==', '>']) + ')' + parts = re.split(pat, line, maxsplit=1) + parts = [p.strip() for p in parts] + + info['package'] = parts[0] + if len(parts) > 1: + op, rest = parts[1:] + if ';' in rest: + # Handle platform specific dependencies + # http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies + version, platform_deps = map(str.strip, + rest.split(';')) + info['platform_deps'] = platform_deps + else: + version = rest # NOQA + info['version'] = (op, version) + yield info + + def parse_require_file(fpath): + with open(fpath, 'r') as f: + for line in f.readlines(): + line = line.strip() + if line and not line.startswith('#'): + for info in parse_line(line): + yield info + + def gen_packages_items(): + if exists(require_fpath): + for info in parse_require_file(require_fpath): + parts = [info['package']] + if with_version and 'version' in info: + parts.extend(info['version']) + if not sys.version.startswith('3.4'): + # apparently package_deps are broken in 3.4 + platform_deps = info.get('platform_deps') + if platform_deps is not None: + parts.append(';' + platform_deps) + item = ''.join(parts) + yield item + + packages = list(gen_packages_items()) + return packages + + +if __name__ == '__main__': + setup( + name='mmdet', + version=get_version(), + description='OpenMMLab Detection Toolbox and Benchmark', + long_description=readme(), + long_description_content_type='text/markdown', + author='OpenMMLab', + author_email='openmmlab@gmail.com', + keywords='computer vision, object detection', + url='https://github.com/open-mmlab/mmdetection', + packages=find_packages(exclude=('configs', 'tools', 'demo')), + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + ], + license='Apache License 2.0', + setup_requires=parse_requirements('requirements/build.txt'), + tests_require=parse_requirements('requirements/tests.txt'), + install_requires=parse_requirements('requirements/runtime.txt'), + extras_require={ + 'all': parse_requirements('requirements.txt'), + 'tests': parse_requirements('requirements/tests.txt'), + 'build': parse_requirements('requirements/build.txt'), + 'optional': parse_requirements('requirements/optional.txt'), + }, + ext_modules=[], + cmdclass={'build_ext': BuildExtension}, + zip_safe=False) diff --git a/thirdparty/mmdetection/tests/async_benchmark.py b/thirdparty/mmdetection/tests/async_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a661d81d72751bf728003d14e47ced1464f64f --- /dev/null +++ b/thirdparty/mmdetection/tests/async_benchmark.py @@ -0,0 +1,100 @@ +import asyncio +import os +import shutil +import urllib + +import mmcv +import torch + +from mmdet.apis import (async_inference_detector, inference_detector, + init_detector, show_result) +from mmdet.utils.contextmanagers import concurrent +from mmdet.utils.profiling import profile_time + + +async def main(): + """Benchmark between async and synchronous inference interfaces. + + Sample runs for 20 demo images on K80 GPU, model - mask_rcnn_r50_fpn_1x: + + async sync + + 7981.79 ms 9660.82 ms + 8074.52 ms 9660.94 ms + 7976.44 ms 9406.83 ms + + Async variant takes about 0.83-0.85 of the time of the synchronous + interface. + """ + project_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + + config_file = os.path.join(project_dir, + 'configs/mask_rcnn_r50_fpn_1x_coco.py') + checkpoint_file = os.path.join( + project_dir, 'checkpoints/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth') + + if not os.path.exists(checkpoint_file): + url = ('https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection' + '/models/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth') + print(f'Downloading {url} ...') + local_filename, _ = urllib.request.urlretrieve(url) + os.makedirs(os.path.dirname(checkpoint_file), exist_ok=True) + shutil.move(local_filename, checkpoint_file) + print(f'Saved as {checkpoint_file}') + else: + print(f'Using existing checkpoint {checkpoint_file}') + + device = 'cuda:0' + model = init_detector( + config_file, checkpoint=checkpoint_file, device=device) + + # queue is used for concurrent inference of multiple images + streamqueue = asyncio.Queue() + # queue size defines concurrency level + streamqueue_size = 4 + + for _ in range(streamqueue_size): + streamqueue.put_nowait(torch.cuda.Stream(device=device)) + + # test a single image and show the results + img = mmcv.imread(os.path.join(project_dir, 'demo/demo.jpg')) + + # warmup + await async_inference_detector(model, img) + + async def detect(img): + async with concurrent(streamqueue): + return await async_inference_detector(model, img) + + num_of_images = 20 + with profile_time('benchmark', 'async'): + tasks = [ + asyncio.create_task(detect(img)) for _ in range(num_of_images) + ] + async_results = await asyncio.gather(*tasks) + + with torch.cuda.stream(torch.cuda.default_stream()): + with profile_time('benchmark', 'sync'): + sync_results = [ + inference_detector(model, img) for _ in range(num_of_images) + ] + + result_dir = os.path.join(project_dir, 'demo') + show_result( + img, + async_results[0], + model.CLASSES, + score_thr=0.5, + show=False, + out_file=os.path.join(result_dir, 'result_async.jpg')) + show_result( + img, + sync_results[0], + model.CLASSES, + score_thr=0.5, + show=False, + out_file=os.path.join(result_dir, 'result_sync.jpg')) + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/thirdparty/mmdetection/tests/data/coco_sample.json b/thirdparty/mmdetection/tests/data/coco_sample.json new file mode 100644 index 0000000000000000000000000000000000000000..b66cdf309e3e2ab4bc0a0ab5bfe19560c69caa63 --- /dev/null +++ b/thirdparty/mmdetection/tests/data/coco_sample.json @@ -0,0 +1,77 @@ +{ + "images": [ + { + "file_name": "fake1.jpg", + "height": 800, + "width": 800, + "id": 0 + }, + { + "file_name": "fake2.jpg", + "height": 800, + "width": 800, + "id": 1 + }, + { + "file_name": "fake3.jpg", + "height": 800, + "width": 800, + "id": 2 + } + ], + "annotations": [ + { + "bbox": [ + 0, + 0, + 20, + 20 + ], + "area": 400.00, + "score": 1.0, + "category_id": 1, + "id": 1, + "image_id": 0 + }, + { + "bbox": [ + 0, + 0, + 20, + 20 + ], + "area": 400.00, + "score": 1.0, + "category_id": 2, + "id": 2, + "image_id": 0 + }, + { + "bbox": [ + 0, + 0, + 20, + 20 + ], + "area": 400.00, + "score": 1.0, + "category_id": 1, + "id": 3, + "image_id": 1 + } + ], + "categories": [ + { + "id": 1, + "name": "bus", + "supercategory": "none" + }, + { + "id": 2, + "name": "car", + "supercategory": "none" + } + ], + "licenses": [], + "info": null +} diff --git a/thirdparty/mmdetection/tests/data/color.jpg b/thirdparty/mmdetection/tests/data/color.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0d836ea802f953f9ec2c661264da9d0922bafc7b --- /dev/null +++ b/thirdparty/mmdetection/tests/data/color.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2eff7c1f5e8000d860ac240df63ed6c82d1e5659a5042c3a55cda536dc39833c +size 35851 diff --git a/thirdparty/mmdetection/tests/data/gray.jpg b/thirdparty/mmdetection/tests/data/gray.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ff5950e85dfbd17878a4979eed9e003be32be42e --- /dev/null +++ b/thirdparty/mmdetection/tests/data/gray.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cec2d88e594f6de1281d90d8369582b4a7b45bc9fe7bf29f570ff94d1799577 +size 39088 diff --git a/thirdparty/mmdetection/tests/test_anchor.py b/thirdparty/mmdetection/tests/test_anchor.py new file mode 100644 index 0000000000000000000000000000000000000000..813852ea3e7c5c2f1c372bbc36edb4253e176db7 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_anchor.py @@ -0,0 +1,410 @@ +""" +CommandLine: + pytest tests/test_anchor.py + xdoctest tests/test_anchor.py zero + +""" +import torch + + +def test_standard_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + anchor_generator_cfg = dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8]) + + anchor_generator = build_anchor_generator(anchor_generator_cfg) + assert anchor_generator is not None + + +def test_strides(): + from mmdet.core import AnchorGenerator + # Square strides + self = AnchorGenerator([10], [1.], [1.], [10]) + anchors = self.grid_anchors([(2, 2)], device='cpu') + + expected_anchors = torch.tensor([[-5., -5., 5., 5.], [5., -5., 15., 5.], + [-5., 5., 5., 15.], [5., 5., 15., 15.]]) + + assert torch.equal(anchors[0], expected_anchors) + + # Different strides in x and y direction + self = AnchorGenerator([(10, 20)], [1.], [1.], [10]) + anchors = self.grid_anchors([(2, 2)], device='cpu') + + expected_anchors = torch.tensor([[-5., -5., 5., 5.], [5., -5., 15., 5.], + [-5., 15., 5., 25.], [5., 15., 15., 25.]]) + + assert torch.equal(anchors[0], expected_anchors) + + +def test_ssd_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + featmap_sizes = [(38, 38), (19, 19), (10, 10), (5, 5), (3, 3), (1, 1)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-6.5000, -6.5000, 14.5000, 14.5000], + [-11.3704, -11.3704, 19.3704, 19.3704], + [-10.8492, -3.4246, 18.8492, 11.4246], + [-3.4246, -10.8492, 11.4246, 18.8492]]), + torch.Tensor([[-14.5000, -14.5000, 30.5000, 30.5000], + [-25.3729, -25.3729, 41.3729, 41.3729], + [-23.8198, -7.9099, 39.8198, 23.9099], + [-7.9099, -23.8198, 23.9099, 39.8198], + [-30.9711, -4.9904, 46.9711, 20.9904], + [-4.9904, -30.9711, 20.9904, 46.9711]]), + torch.Tensor([[-33.5000, -33.5000, 65.5000, 65.5000], + [-45.5366, -45.5366, 77.5366, 77.5366], + [-54.0036, -19.0018, 86.0036, 51.0018], + [-19.0018, -54.0036, 51.0018, 86.0036], + [-69.7365, -12.5788, 101.7365, 44.5788], + [-12.5788, -69.7365, 44.5788, 101.7365]]), + torch.Tensor([[-44.5000, -44.5000, 108.5000, 108.5000], + [-56.9817, -56.9817, 120.9817, 120.9817], + [-76.1873, -22.0937, 140.1873, 86.0937], + [-22.0937, -76.1873, 86.0937, 140.1873], + [-100.5019, -12.1673, 164.5019, 76.1673], + [-12.1673, -100.5019, 76.1673, 164.5019]]), + torch.Tensor([[-53.5000, -53.5000, 153.5000, 153.5000], + [-66.2185, -66.2185, 166.2185, 166.2185], + [-96.3711, -23.1855, 196.3711, 123.1855], + [-23.1855, -96.3711, 123.1855, 196.3711]]), + torch.Tensor([[19.5000, 19.5000, 280.5000, 280.5000], + [6.6342, 6.6342, 293.3658, 293.3658], + [-34.5549, 57.7226, 334.5549, 242.2774], + [57.7226, -34.5549, 242.2774, 334.5549]]), + ] + base_anchors = anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check valid flags + expected_valid_pixels = [5776, 2166, 600, 150, 36, 4] + multi_level_valid_flags = anchor_generator.valid_flags( + featmap_sizes, (300, 300), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert anchor_generator.num_base_anchors == [4, 6, 6, 6, 4, 4] + + # check anchor generation + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 6 + + +def test_anchor_generator_with_tuples(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + featmap_sizes = [(38, 38), (19, 19), (10, 10), (5, 5), (3, 3), (1, 1)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + + anchor_generator_cfg_tuples = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[(8, 8), (16, 16), (32, 32), (64, 64), (100, 100), (300, 300)], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + anchor_generator_tuples = build_anchor_generator( + anchor_generator_cfg_tuples) + anchors_tuples = anchor_generator_tuples.grid_anchors( + featmap_sizes, device) + for anchor, anchor_tuples in zip(anchors, anchors_tuples): + assert torch.equal(anchor, anchor_tuples) + + +def test_yolo_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='YOLOAnchorGenerator', + strides=[32, 16, 8], + base_sizes=[ + [(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)], + ]) + + featmap_sizes = [(14, 18), (28, 36), (56, 72)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-42.0000, -29.0000, 74.0000, 61.0000], + [-62.0000, -83.0000, 94.0000, 115.0000], + [-170.5000, -147.0000, 202.5000, 179.0000]]), + torch.Tensor([[-7.0000, -22.5000, 23.0000, 38.5000], + [-23.0000, -14.5000, 39.0000, 30.5000], + [-21.5000, -51.5000, 37.5000, 67.5000]]), + torch.Tensor([[-1.0000, -2.5000, 9.0000, 10.5000], + [-4.0000, -11.0000, 12.0000, 19.0000], + [-12.5000, -7.5000, 20.5000, 15.5000]]) + ] + base_anchors = anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check number of base anchors for each level + assert anchor_generator.num_base_anchors == [3, 3, 3] + + # check anchor generation + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 3 + + +def test_retina_anchor(): + from mmdet.models import build_head + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + # head configs modified from + # configs/nas_fpn/retinanet_r50_fpn_crop640_50e.py + bbox_head = dict( + type='RetinaSepBNHead', + num_classes=4, + num_ins=5, + in_channels=4, + stacked_convs=1, + feat_channels=4, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])) + + retina_head = build_head(bbox_head) + assert retina_head.anchor_generator is not None + + # use the featmap sizes in NASFPN setting to test retina head + featmap_sizes = [(80, 80), (40, 40), (20, 20), (10, 10), (5, 5)] + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-22.6274, -11.3137, 22.6274, 11.3137], + [-28.5088, -14.2544, 28.5088, 14.2544], + [-35.9188, -17.9594, 35.9188, 17.9594], + [-16.0000, -16.0000, 16.0000, 16.0000], + [-20.1587, -20.1587, 20.1587, 20.1587], + [-25.3984, -25.3984, 25.3984, 25.3984], + [-11.3137, -22.6274, 11.3137, 22.6274], + [-14.2544, -28.5088, 14.2544, 28.5088], + [-17.9594, -35.9188, 17.9594, 35.9188]]), + torch.Tensor([[-45.2548, -22.6274, 45.2548, 22.6274], + [-57.0175, -28.5088, 57.0175, 28.5088], + [-71.8376, -35.9188, 71.8376, 35.9188], + [-32.0000, -32.0000, 32.0000, 32.0000], + [-40.3175, -40.3175, 40.3175, 40.3175], + [-50.7968, -50.7968, 50.7968, 50.7968], + [-22.6274, -45.2548, 22.6274, 45.2548], + [-28.5088, -57.0175, 28.5088, 57.0175], + [-35.9188, -71.8376, 35.9188, 71.8376]]), + torch.Tensor([[-90.5097, -45.2548, 90.5097, 45.2548], + [-114.0350, -57.0175, 114.0350, 57.0175], + [-143.6751, -71.8376, 143.6751, 71.8376], + [-64.0000, -64.0000, 64.0000, 64.0000], + [-80.6349, -80.6349, 80.6349, 80.6349], + [-101.5937, -101.5937, 101.5937, 101.5937], + [-45.2548, -90.5097, 45.2548, 90.5097], + [-57.0175, -114.0350, 57.0175, 114.0350], + [-71.8376, -143.6751, 71.8376, 143.6751]]), + torch.Tensor([[-181.0193, -90.5097, 181.0193, 90.5097], + [-228.0701, -114.0350, 228.0701, 114.0350], + [-287.3503, -143.6751, 287.3503, 143.6751], + [-128.0000, -128.0000, 128.0000, 128.0000], + [-161.2699, -161.2699, 161.2699, 161.2699], + [-203.1873, -203.1873, 203.1873, 203.1873], + [-90.5097, -181.0193, 90.5097, 181.0193], + [-114.0350, -228.0701, 114.0350, 228.0701], + [-143.6751, -287.3503, 143.6751, 287.3503]]), + torch.Tensor([[-362.0387, -181.0193, 362.0387, 181.0193], + [-456.1401, -228.0701, 456.1401, 228.0701], + [-574.7006, -287.3503, 574.7006, 287.3503], + [-256.0000, -256.0000, 256.0000, 256.0000], + [-322.5398, -322.5398, 322.5398, 322.5398], + [-406.3747, -406.3747, 406.3747, 406.3747], + [-181.0193, -362.0387, 181.0193, 362.0387], + [-228.0701, -456.1401, 228.0701, 456.1401], + [-287.3503, -574.7006, 287.3503, 574.7006]]) + ] + base_anchors = retina_head.anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check valid flags + expected_valid_pixels = [57600, 14400, 3600, 900, 225] + multi_level_valid_flags = retina_head.anchor_generator.valid_flags( + featmap_sizes, (640, 640), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert retina_head.anchor_generator.num_base_anchors == [9, 9, 9, 9, 9] + + # check anchor generation + anchors = retina_head.anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 5 + + +def test_guided_anchor(): + from mmdet.models import build_head + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + # head configs modified from + # configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py + bbox_head = dict( + type='GARetinaHead', + num_classes=8, + in_channels=4, + stacked_convs=1, + feat_channels=4, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128])) + + ga_retina_head = build_head(bbox_head) + assert ga_retina_head.approx_anchor_generator is not None + + # use the featmap sizes in NASFPN setting to test ga_retina_head + featmap_sizes = [(100, 152), (50, 76), (25, 38), (13, 19), (7, 10)] + # check base anchors + expected_approxs = [ + torch.Tensor([[-22.6274, -11.3137, 22.6274, 11.3137], + [-28.5088, -14.2544, 28.5088, 14.2544], + [-35.9188, -17.9594, 35.9188, 17.9594], + [-16.0000, -16.0000, 16.0000, 16.0000], + [-20.1587, -20.1587, 20.1587, 20.1587], + [-25.3984, -25.3984, 25.3984, 25.3984], + [-11.3137, -22.6274, 11.3137, 22.6274], + [-14.2544, -28.5088, 14.2544, 28.5088], + [-17.9594, -35.9188, 17.9594, 35.9188]]), + torch.Tensor([[-45.2548, -22.6274, 45.2548, 22.6274], + [-57.0175, -28.5088, 57.0175, 28.5088], + [-71.8376, -35.9188, 71.8376, 35.9188], + [-32.0000, -32.0000, 32.0000, 32.0000], + [-40.3175, -40.3175, 40.3175, 40.3175], + [-50.7968, -50.7968, 50.7968, 50.7968], + [-22.6274, -45.2548, 22.6274, 45.2548], + [-28.5088, -57.0175, 28.5088, 57.0175], + [-35.9188, -71.8376, 35.9188, 71.8376]]), + torch.Tensor([[-90.5097, -45.2548, 90.5097, 45.2548], + [-114.0350, -57.0175, 114.0350, 57.0175], + [-143.6751, -71.8376, 143.6751, 71.8376], + [-64.0000, -64.0000, 64.0000, 64.0000], + [-80.6349, -80.6349, 80.6349, 80.6349], + [-101.5937, -101.5937, 101.5937, 101.5937], + [-45.2548, -90.5097, 45.2548, 90.5097], + [-57.0175, -114.0350, 57.0175, 114.0350], + [-71.8376, -143.6751, 71.8376, 143.6751]]), + torch.Tensor([[-181.0193, -90.5097, 181.0193, 90.5097], + [-228.0701, -114.0350, 228.0701, 114.0350], + [-287.3503, -143.6751, 287.3503, 143.6751], + [-128.0000, -128.0000, 128.0000, 128.0000], + [-161.2699, -161.2699, 161.2699, 161.2699], + [-203.1873, -203.1873, 203.1873, 203.1873], + [-90.5097, -181.0193, 90.5097, 181.0193], + [-114.0350, -228.0701, 114.0350, 228.0701], + [-143.6751, -287.3503, 143.6751, 287.3503]]), + torch.Tensor([[-362.0387, -181.0193, 362.0387, 181.0193], + [-456.1401, -228.0701, 456.1401, 228.0701], + [-574.7006, -287.3503, 574.7006, 287.3503], + [-256.0000, -256.0000, 256.0000, 256.0000], + [-322.5398, -322.5398, 322.5398, 322.5398], + [-406.3747, -406.3747, 406.3747, 406.3747], + [-181.0193, -362.0387, 181.0193, 362.0387], + [-228.0701, -456.1401, 228.0701, 456.1401], + [-287.3503, -574.7006, 287.3503, 574.7006]]) + ] + approxs = ga_retina_head.approx_anchor_generator.base_anchors + for i, base_anchor in enumerate(approxs): + assert base_anchor.allclose(expected_approxs[i]) + + # check valid flags + expected_valid_pixels = [136800, 34200, 8550, 2223, 630] + multi_level_valid_flags = ga_retina_head.approx_anchor_generator \ + .valid_flags(featmap_sizes, (800, 1216), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert ga_retina_head.approx_anchor_generator.num_base_anchors == [ + 9, 9, 9, 9, 9 + ] + + # check approx generation + squares = ga_retina_head.square_anchor_generator.grid_anchors( + featmap_sizes, device) + assert len(squares) == 5 + + expected_squares = [ + torch.Tensor([[-16., -16., 16., 16.]]), + torch.Tensor([[-32., -32., 32., 32]]), + torch.Tensor([[-64., -64., 64., 64.]]), + torch.Tensor([[-128., -128., 128., 128.]]), + torch.Tensor([[-256., -256., 256., 256.]]) + ] + squares = ga_retina_head.square_anchor_generator.base_anchors + for i, base_anchor in enumerate(squares): + assert base_anchor.allclose(expected_squares[i]) + + # square_anchor_generator does not check valid flags + # check number of base anchors for each level + assert (ga_retina_head.square_anchor_generator.num_base_anchors == [ + 1, 1, 1, 1, 1 + ]) + + # check square generation + anchors = ga_retina_head.square_anchor_generator.grid_anchors( + featmap_sizes, device) + assert len(anchors) == 5 diff --git a/thirdparty/mmdetection/tests/test_assigner.py b/thirdparty/mmdetection/tests/test_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..8e2d4b7e288e9503f4de227e8bbbca7422e05a51 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_assigner.py @@ -0,0 +1,412 @@ +"""Tests the Assigner objects. + +CommandLine: + pytest tests/test_assigner.py + xdoctest tests/test_assigner.py zero +""" +import torch + +from mmdet.core.bbox.assigners import (ApproxMaxIoUAssigner, + CenterRegionAssigner, HungarianAssigner, + MaxIoUAssigner, PointAssigner) + + +def test_max_iou_assigner(): + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 4 + assert len(assign_result.labels) == 4 + + expected_gt_inds = torch.LongTensor([1, 0, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_ignore(): + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [30, 32, 40, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = self.assign( + bboxes, gt_bboxes, gt_bboxes_ignore=gt_bboxes_ignore) + + expected_gt_inds = torch.LongTensor([1, 0, 2, -1]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + assign_result = self.assign(bboxes, gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_empty_boxes(): + """Test corner case where a network might predict no boxes.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + + # Test with gt_labels + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 0 + assert tuple(assign_result.labels.shape) == (0, ) + + # Test without gt_labels + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=None) + assert len(assign_result.gt_inds) == 0 + assert assign_result.labels is None + + +def test_max_iou_assigner_with_empty_boxes_and_ignore(): + """Test corner case where a network might predict no boxes and + ignore_iof_thr is on.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + gt_labels = torch.LongTensor([2, 3]) + + # Test with gt_labels + assign_result = self.assign( + bboxes, + gt_bboxes, + gt_labels=gt_labels, + gt_bboxes_ignore=gt_bboxes_ignore) + assert len(assign_result.gt_inds) == 0 + assert tuple(assign_result.labels.shape) == (0, ) + + # Test without gt_labels + assign_result = self.assign( + bboxes, gt_bboxes, gt_labels=None, gt_bboxes_ignore=gt_bboxes_ignore) + assert len(assign_result.gt_inds) == 0 + assert assign_result.labels is None + + +def test_max_iou_assigner_with_empty_boxes_and_gt(): + """Test corner case where a network might predict no boxes and no gt.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.empty((0, 4)) + assign_result = self.assign(bboxes, gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_point_assigner(): + self = PointAssigner() + points = torch.FloatTensor([ # [x, y, stride] + [0, 0, 1], + [10, 10, 1], + [5, 5, 1], + [32, 32, 1], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + assign_result = self.assign(points, gt_bboxes) + expected_gt_inds = torch.LongTensor([1, 2, 1, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_point_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = PointAssigner() + points = torch.FloatTensor([ # [x, y, stride] + [0, 0, 1], + [10, 10, 1], + [5, 5, 1], + [32, 32, 1], + ]) + gt_bboxes = torch.FloatTensor([]) + assign_result = self.assign(points, gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_point_assigner_with_empty_boxes_and_gt(): + """Test corner case where an image might predict no points and no gt.""" + self = PointAssigner() + points = torch.FloatTensor([]) + gt_bboxes = torch.FloatTensor([]) + assign_result = self.assign(points, gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_approx_iou_assigner(): + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + + expected_gt_inds = torch.LongTensor([1, 0, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_approx_iou_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_approx_iou_assigner_with_empty_boxes(): + """Test corner case where an network might predict no boxes.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_approx_iou_assigner_with_empty_boxes_and_gt(): + """Test corner case where an network might predict no boxes and no gt.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.empty((0, 4)) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_random_assign_result(): + """Test random instantiation of assign result to catch corner cases.""" + from mmdet.core.bbox.assigners.assign_result import AssignResult + AssignResult.random() + + AssignResult.random(num_gts=0, num_preds=0) + AssignResult.random(num_gts=0, num_preds=3) + AssignResult.random(num_gts=3, num_preds=3) + AssignResult.random(num_gts=0, num_preds=3) + AssignResult.random(num_gts=7, num_preds=7) + AssignResult.random(num_gts=7, num_preds=64) + AssignResult.random(num_gts=24, num_preds=3) + + +def test_center_region_assigner(): + self = CenterRegionAssigner(pos_scale=0.3, neg_scale=1) + bboxes = torch.FloatTensor([[0, 0, 10, 10], [10, 10, 20, 20], [8, 8, 9, + 9]]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 11, 11], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + [4.5, 4.5, 5.5, 5.5], # match bboxes[0] but area is too small + [0, 0, 10, 10], # match bboxes[1] and has a smaller area than gt[0] + ]) + gt_labels = torch.LongTensor([2, 3, 4, 5]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 3 + assert len(assign_result.labels) == 3 + expected_gt_inds = torch.LongTensor([4, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + shadowed_labels = assign_result.get_extra_property('shadowed_labels') + # [8, 8, 9, 9] in the shadowed region of [0, 0, 11, 11] (label: 2) + assert torch.any(shadowed_labels == torch.LongTensor([[2, 2]])) + # [8, 8, 9, 9] in the shadowed region of [0, 0, 10, 10] (label: 5) + assert torch.any(shadowed_labels == torch.LongTensor([[2, 5]])) + # [0, 0, 10, 10] is already assigned to [4.5, 4.5, 5.5, 5.5]. + # Therefore, [0, 0, 11, 11] (label: 2) is shadowed + assert torch.any(shadowed_labels == torch.LongTensor([[0, 2]])) + + +def test_center_region_assigner_with_ignore(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + ]) + gt_bboxes_ignore = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = self.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 2 + assert len(assign_result.labels) == 2 + + expected_gt_inds = torch.LongTensor([-1, 2]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_center_region_assigner_with_empty_bboxes(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.empty((0, 4)).float() + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert assign_result.gt_inds is None or assign_result.gt_inds.numel() == 0 + assert assign_result.labels is None or assign_result.labels.numel() == 0 + + +def test_center_region_assigner_with_empty_gts(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + ]) + gt_bboxes = torch.empty((0, 4)).float() + gt_labels = torch.empty((0, )).long() + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 2 + expected_gt_inds = torch.LongTensor([0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_hungarian_match_assigner(): + self = HungarianAssigner() + assert self.iou_mode == 'giou' + + # test no gt bboxes + bbox_pred = torch.rand((10, 4)) + cls_pred = torch.rand((10, 81)) + gt_bboxes = torch.empty((0, 4)).float() + gt_labels = torch.empty((0, )).long() + img_meta = dict(img_shape=(10, 8, 3)) + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds == 0) + assert torch.all(assign_result.labels == -1) + + # test with gt bboxes + gt_bboxes = torch.FloatTensor([[0, 0, 5, 7], [3, 5, 7, 8]]) + gt_labels = torch.LongTensor([1, 20]) + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds > -1) + assert (assign_result.gt_inds > 0).sum() == gt_bboxes.size(0) + assert (assign_result.labels > -1).sum() == gt_bboxes.size(0) + + # test iou mode + self = HungarianAssigner(iou_mode='iou') + assert self.iou_mode == 'iou' + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds > -1) + assert (assign_result.gt_inds > 0).sum() == gt_bboxes.size(0) + assert (assign_result.labels > -1).sum() == gt_bboxes.size(0) diff --git a/thirdparty/mmdetection/tests/test_async.py b/thirdparty/mmdetection/tests/test_async.py new file mode 100644 index 0000000000000000000000000000000000000000..e9733f61bab0bcc97cccdd5844c7d6da6499e92a --- /dev/null +++ b/thirdparty/mmdetection/tests/test_async.py @@ -0,0 +1,82 @@ +"""Tests for async interface.""" + +import asyncio +import os +import sys + +import asynctest +import mmcv +import torch + +from mmdet.apis import async_inference_detector, init_detector + +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import concurrent + + +class AsyncTestCase(asynctest.TestCase): + use_default_loop = False + forbid_get_event_loop = True + + TEST_TIMEOUT = int(os.getenv('ASYNCIO_TEST_TIMEOUT', '30')) + + def _run_test_method(self, method): + result = method() + if asyncio.iscoroutine(result): + self.loop.run_until_complete( + asyncio.wait_for(result, timeout=self.TEST_TIMEOUT)) + + +class MaskRCNNDetector: + + def __init__(self, + model_config, + checkpoint=None, + streamqueue_size=3, + device='cuda:0'): + + self.streamqueue_size = streamqueue_size + self.device = device + # build the model and load checkpoint + self.model = init_detector( + model_config, checkpoint=None, device=self.device) + self.streamqueue = None + + async def init(self): + self.streamqueue = asyncio.Queue() + for _ in range(self.streamqueue_size): + stream = torch.cuda.Stream(device=self.device) + self.streamqueue.put_nowait(stream) + + if sys.version_info >= (3, 7): + + async def apredict(self, img): + if isinstance(img, str): + img = mmcv.imread(img) + async with concurrent(self.streamqueue): + result = await async_inference_detector(self.model, img) + return result + + +class AsyncInferenceTestCase(AsyncTestCase): + + if sys.version_info >= (3, 7): + + async def test_simple_inference(self): + if not torch.cuda.is_available(): + import pytest + + pytest.skip('test requires GPU and torch+cuda') + + ori_grad_enabled = torch.is_grad_enabled() + root_dir = os.path.dirname(os.path.dirname(__name__)) + model_config = os.path.join( + root_dir, 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py') + detector = MaskRCNNDetector(model_config) + await detector.init() + img_path = os.path.join(root_dir, 'demo/demo.jpg') + bboxes, _ = await detector.apredict(img_path) + self.assertTrue(bboxes) + # asy inference detector will hack grad_enabled, + # so restore here to avoid it to influence other tests + torch.set_grad_enabled(ori_grad_enabled) diff --git a/thirdparty/mmdetection/tests/test_coder.py b/thirdparty/mmdetection/tests/test_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..b45c16e97eabbde30fc19e41aee17dffa98d9e85 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_coder.py @@ -0,0 +1,21 @@ +import torch + +from mmdet.core.bbox.coder import YOLOBBoxCoder + + +def test_yolo_bbox_coder(): + coder = YOLOBBoxCoder() + bboxes = torch.Tensor([[-42., -29., 74., 61.], [-10., -29., 106., 61.], + [22., -29., 138., 61.], [54., -29., 170., 61.]]) + pred_bboxes = torch.Tensor([[0.4709, 0.6152, 0.1690, -0.4056], + [0.5399, 0.6653, 0.1162, -0.4162], + [0.4654, 0.6618, 0.1548, -0.4301], + [0.4786, 0.6197, 0.1896, -0.4479]]) + grid_size = 32 + expected_decode_bboxes = torch.Tensor( + [[-53.6102, -10.3096, 83.7478, 49.6824], + [-15.8700, -8.3901, 114.4236, 50.9693], + [11.1822, -8.0924, 146.6034, 50.4476], + [41.2068, -8.9232, 181.4236, 48.5840]]) + assert expected_decode_bboxes.allclose( + coder.decode(bboxes, pred_bboxes, grid_size)) diff --git a/thirdparty/mmdetection/tests/test_config.py b/thirdparty/mmdetection/tests/test_config.py new file mode 100644 index 0000000000000000000000000000000000000000..bbe046080da0cf2567690c555a73d28a02f4b910 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_config.py @@ -0,0 +1,367 @@ +from os.path import dirname, exists, join, relpath + +import pytest +import torch +from mmcv.runner import build_optimizer + +from mmdet.core import BitmapMasks, PolygonMasks + + +def _get_config_directory(): + """Find the predefined detector config directory.""" + try: + # Assume we are running in the source mmdetection repo + repo_dpath = dirname(dirname(__file__)) + except NameError: + # For IPython development when this __file__ is not defined + import mmdet + repo_dpath = dirname(dirname(mmdet.__file__)) + config_dpath = join(repo_dpath, 'configs') + if not exists(config_dpath): + raise Exception('Cannot find config path') + return config_dpath + + +def test_config_build_detector(): + """Test that all detection models defined in the configs can be + initialized.""" + from mmcv import Config + from mmdet.models import build_detector + + config_dpath = _get_config_directory() + print(f'Found config_dpath = {config_dpath}') + + import glob + config_fpaths = list(glob.glob(join(config_dpath, '**', '*.py'))) + config_fpaths = [p for p in config_fpaths if p.find('_base_') == -1] + config_names = [relpath(p, config_dpath) for p in config_fpaths] + + print(f'Using {len(config_names)} config files') + + for config_fname in config_names: + config_fpath = join(config_dpath, config_fname) + config_mod = Config.fromfile(config_fpath) + + config_mod.model + config_mod.train_cfg + config_mod.test_cfg + print(f'Building detector, config_fpath = {config_fpath}') + + # Remove pretrained keys to allow for testing in an offline environment + if 'pretrained' in config_mod.model: + config_mod.model['pretrained'] = None + + detector = build_detector( + config_mod.model, + train_cfg=config_mod.train_cfg, + test_cfg=config_mod.test_cfg) + assert detector is not None + + optimizer = build_optimizer(detector, config_mod.optimizer) + assert isinstance(optimizer, torch.optim.Optimizer) + + if 'roi_head' in config_mod.model.keys(): + # for two stage detector + # detectors must have bbox head + assert detector.roi_head.with_bbox and detector.with_bbox + assert detector.roi_head.with_mask == detector.with_mask + + head_config = config_mod.model['roi_head'] + _check_roi_head(head_config, detector.roi_head) + # else: + # # for single stage detector + # # detectors must have bbox head + # # assert detector.with_bbox + # head_config = config_mod.model['bbox_head'] + # _check_bbox_head(head_config, detector.bbox_head) + + +def _check_roi_head(config, head): + # check consistency between head_config and roi_head + assert config['type'] == head.__class__.__name__ + + # check roi_align + bbox_roi_cfg = config.bbox_roi_extractor + bbox_roi_extractor = head.bbox_roi_extractor + _check_roi_extractor(bbox_roi_cfg, bbox_roi_extractor) + + # check bbox head infos + bbox_cfg = config.bbox_head + bbox_head = head.bbox_head + _check_bbox_head(bbox_cfg, bbox_head) + + if head.with_mask: + # check roi_align + if config.mask_roi_extractor: + mask_roi_cfg = config.mask_roi_extractor + mask_roi_extractor = head.mask_roi_extractor + _check_roi_extractor(mask_roi_cfg, mask_roi_extractor, + bbox_roi_extractor) + + # check mask head infos + mask_head = head.mask_head + mask_cfg = config.mask_head + _check_mask_head(mask_cfg, mask_head) + + # check arch specific settings, e.g., cascade/htc + if config['type'] in ['CascadeRoIHead', 'HybridTaskCascadeRoIHead']: + assert config.num_stages == len(head.bbox_head) + assert config.num_stages == len(head.bbox_roi_extractor) + + if head.with_mask: + assert config.num_stages == len(head.mask_head) + assert config.num_stages == len(head.mask_roi_extractor) + + elif config['type'] in ['MaskScoringRoIHead']: + assert (hasattr(head, 'mask_iou_head') + and head.mask_iou_head is not None) + mask_iou_cfg = config.mask_iou_head + mask_iou_head = head.mask_iou_head + assert (mask_iou_cfg.fc_out_channels == + mask_iou_head.fc_mask_iou.in_features) + + elif config['type'] in ['GridRoIHead']: + grid_roi_cfg = config.grid_roi_extractor + grid_roi_extractor = head.grid_roi_extractor + _check_roi_extractor(grid_roi_cfg, grid_roi_extractor, + bbox_roi_extractor) + + config.grid_head.grid_points = head.grid_head.grid_points + + +def _check_roi_extractor(config, roi_extractor, prev_roi_extractor=None): + import torch.nn as nn + if isinstance(roi_extractor, nn.ModuleList): + if prev_roi_extractor: + prev_roi_extractor = prev_roi_extractor[0] + roi_extractor = roi_extractor[0] + + assert (len(config.featmap_strides) == len(roi_extractor.roi_layers)) + assert (config.out_channels == roi_extractor.out_channels) + from torch.nn.modules.utils import _pair + assert (_pair(config.roi_layer.output_size) == + roi_extractor.roi_layers[0].output_size) + + if 'use_torchvision' in config.roi_layer: + assert (config.roi_layer.use_torchvision == + roi_extractor.roi_layers[0].use_torchvision) + elif 'aligned' in config.roi_layer: + assert ( + config.roi_layer.aligned == roi_extractor.roi_layers[0].aligned) + + if prev_roi_extractor: + assert (roi_extractor.roi_layers[0].aligned == + prev_roi_extractor.roi_layers[0].aligned) + assert (roi_extractor.roi_layers[0].use_torchvision == + prev_roi_extractor.roi_layers[0].use_torchvision) + + +def _check_mask_head(mask_cfg, mask_head): + import torch.nn as nn + if isinstance(mask_cfg, list): + for single_mask_cfg, single_mask_head in zip(mask_cfg, mask_head): + _check_mask_head(single_mask_cfg, single_mask_head) + elif isinstance(mask_head, nn.ModuleList): + for single_mask_head in mask_head: + _check_mask_head(mask_cfg, single_mask_head) + else: + assert mask_cfg['type'] == mask_head.__class__.__name__ + assert mask_cfg.in_channels == mask_head.in_channels + class_agnostic = mask_cfg.get('class_agnostic', False) + out_dim = (1 if class_agnostic else mask_cfg.num_classes) + if hasattr(mask_head, 'conv_logits'): + assert (mask_cfg.conv_out_channels == + mask_head.conv_logits.in_channels) + assert mask_head.conv_logits.out_channels == out_dim + else: + assert mask_cfg.fc_out_channels == mask_head.fc_logits.in_features + assert (mask_head.fc_logits.out_features == out_dim * + mask_head.output_area) + + +def _check_bbox_head(bbox_cfg, bbox_head): + import torch.nn as nn + if isinstance(bbox_cfg, list): + for single_bbox_cfg, single_bbox_head in zip(bbox_cfg, bbox_head): + _check_bbox_head(single_bbox_cfg, single_bbox_head) + elif isinstance(bbox_head, nn.ModuleList): + for single_bbox_head in bbox_head: + _check_bbox_head(bbox_cfg, single_bbox_head) + else: + assert bbox_cfg['type'] == bbox_head.__class__.__name__ + if bbox_cfg['type'] == 'SABLHead': + assert bbox_cfg.cls_in_channels == bbox_head.cls_in_channels + assert bbox_cfg.reg_in_channels == bbox_head.reg_in_channels + + cls_out_channels = bbox_cfg.get('cls_out_channels', 1024) + assert (cls_out_channels == bbox_head.fc_cls.in_features) + assert (bbox_cfg.num_classes + 1 == bbox_head.fc_cls.out_features) + else: + assert bbox_cfg.in_channels == bbox_head.in_channels + with_cls = bbox_cfg.get('with_cls', True) + if with_cls: + fc_out_channels = bbox_cfg.get('fc_out_channels', 2048) + assert (fc_out_channels == bbox_head.fc_cls.in_features) + assert (bbox_cfg.num_classes + + 1 == bbox_head.fc_cls.out_features) + + with_reg = bbox_cfg.get('with_reg', True) + if with_reg: + out_dim = (4 if bbox_cfg.reg_class_agnostic else 4 * + bbox_cfg.num_classes) + assert bbox_head.fc_reg.out_features == out_dim + + +def _check_anchorhead(config, head): + # check consistency between head_config and roi_head + assert config['type'] == head.__class__.__name__ + assert config.in_channels == head.in_channels + + num_classes = ( + config.num_classes - + 1 if config.loss_cls.get('use_sigmoid', False) else config.num_classes) + if config['type'] == 'ATSSHead': + assert (config.feat_channels == head.atss_cls.in_channels) + assert (config.feat_channels == head.atss_reg.in_channels) + assert (config.feat_channels == head.atss_centerness.in_channels) + elif config['type'] == 'SABLRetinaHead': + assert (config.feat_channels == head.retina_cls.in_channels) + assert (config.feat_channels == head.retina_bbox_reg.in_channels) + assert (config.feat_channels == head.retina_bbox_cls.in_channels) + else: + assert (config.in_channels == head.conv_cls.in_channels) + assert (config.in_channels == head.conv_reg.in_channels) + assert (head.conv_cls.out_channels == num_classes * head.num_anchors) + assert head.fc_reg.out_channels == 4 * head.num_anchors + + +# Only tests a representative subset of configurations +# TODO: test pipelines using Albu, current Albu throw None given empty GT +@pytest.mark.parametrize( + 'config_rpath', + [ + 'wider_face/ssd300_wider_face.py', + 'pascal_voc/ssd300_voc0712.py', + 'pascal_voc/ssd512_voc0712.py', + # 'albu_example/mask_rcnn_r50_fpn_1x.py', + 'foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py', + 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py', + 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py', + 'fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py' + ]) +def test_config_data_pipeline(config_rpath): + """Test whether the data pipeline is valid and can process corner cases. + + CommandLine: + xdoctest -m tests/test_config.py test_config_build_data_pipeline + """ + from mmcv import Config + from mmdet.datasets.pipelines import Compose + import numpy as np + + config_dpath = _get_config_directory() + print(f'Found config_dpath = {config_dpath}') + + def dummy_masks(h, w, num_obj=3, mode='bitmap'): + assert mode in ('polygon', 'bitmap') + if mode == 'bitmap': + masks = np.random.randint(0, 2, (num_obj, h, w), dtype=np.uint8) + masks = BitmapMasks(masks, h, w) + else: + masks = [] + for i in range(num_obj): + masks.append([]) + masks[-1].append( + np.random.uniform(0, min(h - 1, w - 1), (8 + 4 * i, ))) + masks[-1].append( + np.random.uniform(0, min(h - 1, w - 1), (10 + 4 * i, ))) + masks = PolygonMasks(masks, h, w) + return masks + + config_fpath = join(config_dpath, config_rpath) + cfg = Config.fromfile(config_fpath) + + # remove loading pipeline + loading_pipeline = cfg.train_pipeline.pop(0) + loading_ann_pipeline = cfg.train_pipeline.pop(0) + cfg.test_pipeline.pop(0) + + train_pipeline = Compose(cfg.train_pipeline) + test_pipeline = Compose(cfg.test_pipeline) + + print(f'Building data pipeline, config_fpath = {config_fpath}') + + print(f'Test training data pipeline: \n{train_pipeline!r}') + img = np.random.randint(0, 255, size=(888, 666, 3), dtype=np.uint8) + if loading_pipeline.get('to_float32', False): + img = img.astype(np.float32) + mode = 'bitmap' if loading_ann_pipeline.get('poly2mask', + True) else 'polygon' + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.array([[35.2, 11.7, 39.7, 15.7]], dtype=np.float32), + gt_labels=np.array([1], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = train_pipeline(results) + assert output_results is not None + + print(f'Test testing data pipeline: \n{test_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.array([[35.2, 11.7, 39.7, 15.7]], dtype=np.float32), + gt_labels=np.array([1], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = test_pipeline(results) + assert output_results is not None + + # test empty GT + print('Test empty GT with training data pipeline: ' + f'\n{train_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.zeros((0, 4), dtype=np.float32), + gt_labels=np.array([], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], num_obj=0, mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = train_pipeline(results) + assert output_results is not None + + print(f'Test empty GT with testing data pipeline: \n{test_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.zeros((0, 4), dtype=np.float32), + gt_labels=np.array([], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], num_obj=0, mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = test_pipeline(results) + assert output_results is not None diff --git a/thirdparty/mmdetection/tests/test_data/test_dataset.py b/thirdparty/mmdetection/tests/test_data/test_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..83de7125b55aba7d8a27e211d0521a21508307f5 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_dataset.py @@ -0,0 +1,493 @@ +import bisect +import logging +import math +import os.path as osp +import tempfile +from collections import defaultdict +from unittest.mock import MagicMock, patch + +import mmcv +import numpy as np +import pytest +import torch +import torch.nn as nn +from mmcv.runner import EpochBasedRunner +from torch.utils.data import DataLoader + +from mmdet.core.evaluation import DistEvalHook, EvalHook +from mmdet.datasets import (DATASETS, ClassBalancedDataset, CocoDataset, + ConcatDataset, CustomDataset, RepeatDataset, + build_dataset) + + +def _create_dummy_coco_json(json_name): + image = { + 'id': 0, + 'width': 640, + 'height': 640, + 'file_name': 'fake_name.jpg', + } + + annotation_1 = { + 'id': 1, + 'image_id': 0, + 'category_id': 0, + 'area': 400, + 'bbox': [50, 60, 20, 20], + 'iscrowd': 0, + } + + annotation_2 = { + 'id': 2, + 'image_id': 0, + 'category_id': 0, + 'area': 900, + 'bbox': [100, 120, 30, 30], + 'iscrowd': 0, + } + + annotation_3 = { + 'id': 3, + 'image_id': 0, + 'category_id': 0, + 'area': 1600, + 'bbox': [150, 160, 40, 40], + 'iscrowd': 0, + } + + annotation_4 = { + 'id': 4, + 'image_id': 0, + 'category_id': 0, + 'area': 10000, + 'bbox': [250, 260, 100, 100], + 'iscrowd': 0, + } + + categories = [{ + 'id': 0, + 'name': 'car', + 'supercategory': 'car', + }] + + fake_json = { + 'images': [image], + 'annotations': + [annotation_1, annotation_2, annotation_3, annotation_4], + 'categories': categories + } + + mmcv.dump(fake_json, json_name) + + +def _create_dummy_custom_pkl(pkl_name): + fake_pkl = [{ + 'filename': 'fake_name.jpg', + 'width': 640, + 'height': 640, + 'ann': { + 'bboxes': + np.array([[50, 60, 70, 80], [100, 120, 130, 150], + [150, 160, 190, 200], [250, 260, 350, 360]]), + 'labels': + np.array([0, 0, 0, 0]) + } + }] + mmcv.dump(fake_pkl, pkl_name) + + +def _create_dummy_results(): + boxes = [ + np.array([[50, 60, 70, 80, 1.0], [100, 120, 130, 150, 0.98], + [150, 160, 190, 200, 0.96], [250, 260, 350, 360, 0.95]]) + ] + return [boxes] + + +def test_dataset_evaluation(): + tmp_dir = tempfile.TemporaryDirectory() + # create dummy data + fake_json_file = osp.join(tmp_dir.name, 'fake_data.json') + _create_dummy_coco_json(fake_json_file) + + # test single coco dataset evaluation + coco_dataset = CocoDataset( + ann_file=fake_json_file, classes=('car', ), pipeline=[]) + fake_results = _create_dummy_results() + eval_results = coco_dataset.evaluate(fake_results, classwise=True) + assert eval_results['bbox_mAP'] == 1 + assert eval_results['bbox_mAP_50'] == 1 + assert eval_results['bbox_mAP_75'] == 1 + + # test concat dataset evaluation + fake_concat_results = _create_dummy_results() + _create_dummy_results() + + # build concat dataset through two config dict + coco_cfg = dict( + type='CocoDataset', + ann_file=fake_json_file, + classes=('car', ), + pipeline=[]) + concat_cfgs = [coco_cfg, coco_cfg] + concat_dataset = build_dataset(concat_cfgs) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_bbox_mAP'] == 1 + assert eval_results['0_bbox_mAP_50'] == 1 + assert eval_results['0_bbox_mAP_75'] == 1 + assert eval_results['1_bbox_mAP'] == 1 + assert eval_results['1_bbox_mAP_50'] == 1 + assert eval_results['1_bbox_mAP_75'] == 1 + + # build concat dataset through concatenated ann_file + coco_cfg = dict( + type='CocoDataset', + ann_file=[fake_json_file, fake_json_file], + classes=('car', ), + pipeline=[]) + concat_dataset = build_dataset(coco_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_bbox_mAP'] == 1 + assert eval_results['0_bbox_mAP_50'] == 1 + assert eval_results['0_bbox_mAP_75'] == 1 + assert eval_results['1_bbox_mAP'] == 1 + assert eval_results['1_bbox_mAP_50'] == 1 + assert eval_results['1_bbox_mAP_75'] == 1 + + # create dummy data + fake_pkl_file = osp.join(tmp_dir.name, 'fake_data.pkl') + _create_dummy_custom_pkl(fake_pkl_file) + + # test single custom dataset evaluation + custom_dataset = CustomDataset( + ann_file=fake_pkl_file, classes=('car', ), pipeline=[]) + fake_results = _create_dummy_results() + eval_results = custom_dataset.evaluate(fake_results) + assert eval_results['mAP'] == 1 + + # test concat dataset evaluation + fake_concat_results = _create_dummy_results() + _create_dummy_results() + + # build concat dataset through two config dict + custom_cfg = dict( + type='CustomDataset', + ann_file=fake_pkl_file, + classes=('car', ), + pipeline=[]) + concat_cfgs = [custom_cfg, custom_cfg] + concat_dataset = build_dataset(concat_cfgs) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_mAP'] == 1 + assert eval_results['1_mAP'] == 1 + + # build concat dataset through concatenated ann_file + concat_cfg = dict( + type='CustomDataset', + ann_file=[fake_pkl_file, fake_pkl_file], + classes=('car', ), + pipeline=[]) + concat_dataset = build_dataset(concat_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_mAP'] == 1 + assert eval_results['1_mAP'] == 1 + + # build concat dataset through explict type + concat_cfg = dict( + type='ConcatDataset', + datasets=[custom_cfg, custom_cfg], + separate_eval=False) + concat_dataset = build_dataset(concat_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results, metric='mAP') + assert eval_results['mAP'] == 1 + assert len(concat_dataset.datasets[0].data_infos) == \ + len(concat_dataset.datasets[1].data_infos) + assert len(concat_dataset.datasets[0].data_infos) == 1 + tmp_dir.cleanup() + + +@patch('mmdet.datasets.CocoDataset.load_annotations', MagicMock) +@patch('mmdet.datasets.CustomDataset.load_annotations', MagicMock) +@patch('mmdet.datasets.XMLDataset.load_annotations', MagicMock) +@patch('mmdet.datasets.CityscapesDataset.load_annotations', MagicMock) +@patch('mmdet.datasets.CocoDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.CustomDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.XMLDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.CityscapesDataset._filter_imgs', MagicMock) +@pytest.mark.parametrize('dataset', + ['CocoDataset', 'VOCDataset', 'CityscapesDataset']) +def test_custom_classes_override_default(dataset): + dataset_class = DATASETS.get(dataset) + if dataset in ['CocoDataset', 'CityscapesDataset']: + dataset_class.coco = MagicMock() + dataset_class.cat_ids = MagicMock() + + original_classes = dataset_class.CLASSES + + # Test setting classes as a tuple + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=('bus', 'car'), + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ('bus', 'car') + + # Test setting classes as a list + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=['bus', 'car'], + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['bus', 'car'] + + # Test overriding not a subset + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=['foo'], + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['foo'] + + # Test default behavior + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=None, + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES == original_classes + + # Test sending file path + import tempfile + tmp_file = tempfile.NamedTemporaryFile() + with open(tmp_file.name, 'w') as f: + f.write('bus\ncar\n') + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=tmp_file.name, + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + tmp_file.close() + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['bus', 'car'] + + +def test_dataset_wrapper(): + CustomDataset.load_annotations = MagicMock() + CustomDataset.__getitem__ = MagicMock(side_effect=lambda idx: idx) + dataset_a = CustomDataset( + ann_file=MagicMock(), pipeline=[], test_mode=True, img_prefix='') + len_a = 10 + cat_ids_list_a = [ + np.random.randint(0, 80, num).tolist() + for num in np.random.randint(1, 20, len_a) + ] + dataset_a.data_infos = MagicMock() + dataset_a.data_infos.__len__.return_value = len_a + dataset_a.get_cat_ids = MagicMock( + side_effect=lambda idx: cat_ids_list_a[idx]) + dataset_b = CustomDataset( + ann_file=MagicMock(), pipeline=[], test_mode=True, img_prefix='') + len_b = 20 + cat_ids_list_b = [ + np.random.randint(0, 80, num).tolist() + for num in np.random.randint(1, 20, len_b) + ] + dataset_b.data_infos = MagicMock() + dataset_b.data_infos.__len__.return_value = len_b + dataset_b.get_cat_ids = MagicMock( + side_effect=lambda idx: cat_ids_list_b[idx]) + + concat_dataset = ConcatDataset([dataset_a, dataset_b]) + assert concat_dataset[5] == 5 + assert concat_dataset[25] == 15 + assert concat_dataset.get_cat_ids(5) == cat_ids_list_a[5] + assert concat_dataset.get_cat_ids(25) == cat_ids_list_b[15] + assert len(concat_dataset) == len(dataset_a) + len(dataset_b) + + repeat_dataset = RepeatDataset(dataset_a, 10) + assert repeat_dataset[5] == 5 + assert repeat_dataset[15] == 5 + assert repeat_dataset[27] == 7 + assert repeat_dataset.get_cat_ids(5) == cat_ids_list_a[5] + assert repeat_dataset.get_cat_ids(15) == cat_ids_list_a[5] + assert repeat_dataset.get_cat_ids(27) == cat_ids_list_a[7] + assert len(repeat_dataset) == 10 * len(dataset_a) + + category_freq = defaultdict(int) + for cat_ids in cat_ids_list_a: + cat_ids = set(cat_ids) + for cat_id in cat_ids: + category_freq[cat_id] += 1 + for k, v in category_freq.items(): + category_freq[k] = v / len(cat_ids_list_a) + + mean_freq = np.mean(list(category_freq.values())) + repeat_thr = mean_freq + + category_repeat = { + cat_id: max(1.0, math.sqrt(repeat_thr / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + repeat_factors = [] + for cat_ids in cat_ids_list_a: + cat_ids = set(cat_ids) + repeat_factor = max({category_repeat[cat_id] for cat_id in cat_ids}) + repeat_factors.append(math.ceil(repeat_factor)) + repeat_factors_cumsum = np.cumsum(repeat_factors) + repeat_factor_dataset = ClassBalancedDataset(dataset_a, repeat_thr) + assert len(repeat_factor_dataset) == repeat_factors_cumsum[-1] + for idx in np.random.randint(0, len(repeat_factor_dataset), 3): + assert repeat_factor_dataset[idx] == bisect.bisect_right( + repeat_factors_cumsum, idx) + + +@patch('mmdet.apis.single_gpu_test', MagicMock) +@patch('mmdet.apis.multi_gpu_test', MagicMock) +@pytest.mark.parametrize('EvalHookParam', (EvalHook, DistEvalHook)) +def test_evaluation_hook(EvalHookParam): + # create dummy data + dataloader = DataLoader(torch.ones((5, 2))) + + # 0.1. dataloader is not a DataLoader object + with pytest.raises(TypeError): + EvalHookParam(dataloader=MagicMock(), interval=-1) + + # 0.2. negative interval + with pytest.raises(ValueError): + EvalHookParam(dataloader, interval=-1) + + # 1. start=None, interval=1: perform evaluation after each epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, interval=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 2 + + # 2. start=1, interval=1: perform evaluation after each epoch. + runner = _build_demo_runner() + + evalhook = EvalHookParam(dataloader, start=1, interval=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 2 + + # 3. start=None, interval=2: perform evaluation after epoch 2, 4, 6, etc + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, interval=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 1 # after epoch 2 + + # 4. start=1, interval=2: perform evaluation after epoch 1, 3, 5, etc + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=1, interval=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 3 + + # 5. start=0/negative, interval=1: perform evaluation after each epoch and + # before epoch 1. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=0) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 3 # before epoch1 and after e1 & e2 + + runner = _build_demo_runner() + with pytest.warns(UserWarning): + evalhook = EvalHookParam(dataloader, start=-2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 3 # before epoch1 and after e1 & e2 + + # 6. resuming from epoch i, start = x (x<=i), interval =1: perform + # evaluation after each epoch and before the first epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner._epoch = 2 + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # before & after epoch 3 + + # 7. resuming from epoch i, start = i+1/None, interval =1: perform + # evaluation after each epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner._epoch = 1 + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # after epoch 2 & 3 + + +def _build_demo_runner(): + + class Model(nn.Module): + + def __init__(self): + super().__init__() + self.linear = nn.Linear(2, 1) + + def forward(self, x): + return self.linear(x) + + def train_step(self, x, optimizer, **kwargs): + return dict(loss=self(x)) + + def val_step(self, x, optimizer, **kwargs): + return dict(loss=self(x)) + + model = Model() + tmp_dir = tempfile.mkdtemp() + + runner = EpochBasedRunner( + model=model, work_dir=tmp_dir, logger=logging.getLogger()) + return runner + + +@pytest.mark.parametrize('classes, expected_length', [(['bus'], 2), + (['car'], 1), + (['bus', 'car'], 2)]) +def test_allow_empty_images(classes, expected_length): + dataset_class = DATASETS.get('CocoDataset') + # Filter empty images + filtered_dataset = dataset_class( + ann_file='tests/data/coco_sample.json', + img_prefix='tests/data', + pipeline=[], + classes=classes, + filter_empty_gt=True) + + # Get all + full_dataset = dataset_class( + ann_file='tests/data/coco_sample.json', + img_prefix='tests/data', + pipeline=[], + classes=classes, + filter_empty_gt=False) + + assert len(filtered_dataset) == expected_length + assert len(filtered_dataset.img_ids) == expected_length + assert len(full_dataset) == 3 + assert len(full_dataset.img_ids) == 3 + assert filtered_dataset.CLASSES == classes + assert full_dataset.CLASSES == classes diff --git a/thirdparty/mmdetection/tests/test_data/test_formatting.py b/thirdparty/mmdetection/tests/test_data/test_formatting.py new file mode 100644 index 0000000000000000000000000000000000000000..1e8ab252c3b1f5b5711afa28877d4b8f17cea71c --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_formatting.py @@ -0,0 +1,23 @@ +import os.path as osp + +from mmcv.utils import build_from_cfg + +from mmdet.datasets.builder import PIPELINES + + +def test_default_format_bundle(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + bundle = dict(type='DefaultFormatBundle') + bundle = build_from_cfg(bundle, PIPELINES) + results = load(results) + assert 'pad_shape' not in results + assert 'scale_factor' not in results + assert 'img_norm_cfg' not in results + results = bundle(results) + assert 'pad_shape' in results + assert 'scale_factor' in results + assert 'img_norm_cfg' in results diff --git a/thirdparty/mmdetection/tests/test_data/test_img_augment.py b/thirdparty/mmdetection/tests/test_data/test_img_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..8f7dd9eb027fc04f300f920c4b80de750373500d --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_img_augment.py @@ -0,0 +1,203 @@ +import copy + +import mmcv +import numpy as np +from mmcv.utils import build_from_cfg +from numpy.testing import assert_array_equal + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([1, 0, 2, 0, 2, 1, 1, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def test_adjust_color(): + results = construct_toy_data() + # test wighout aug + transform = dict(type='ColorTransform', prob=0, level=10) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test with factor 1 + img = results['img'] + transform = dict(type='ColorTransform', prob=1, level=10) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img) + + # test with factor 0 + transform_module.factor = 0 + img_gray = mmcv.bgr2gray(img.copy()) + img_r = np.stack([img_gray, img_gray, img_gray], axis=-1) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img_r) + + # test with factor 0.5 + transform_module.factor = 0.5 + results_transformed = transform_module(copy.deepcopy(results)) + img = results['img'] + assert_array_equal( + results_transformed['img'], + np.round(np.clip((img * 0.5 + img_r * 0.5), 0, 255)).astype(img.dtype)) + + +def test_imequalize(nb_rand_test=100): + + def _imequalize(img): + # equalize the image using PIL.ImageOps.equalize + from PIL import ImageOps, Image + img = Image.fromarray(img) + equalized_img = np.asarray(ImageOps.equalize(img)) + return equalized_img + + results = construct_toy_data() + # test wighout aug + transform = dict(type='EqualizeTransform', prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test equalize with case step=0 + transform = dict(type='EqualizeTransform', prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + img = np.array([[0, 0, 0], [120, 120, 120], [255, 255, 255]], + dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img) + + # test equalize with randomly sampled image. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1000, 1200, 3)) * 260, 0, + 255).astype(np.uint8) + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], _imequalize(img)) + + +def test_adjust_brightness(nb_rand_test=100): + + def _adjust_brightness(img, factor): + # adjust the brightness of image using + # PIL.ImageEnhance.Brightness + from PIL.ImageEnhance import Brightness + from PIL import Image + img = Image.fromarray(img) + brightened_img = Brightness(img).enhance(factor) + return np.asarray(brightened_img) + + results = construct_toy_data() + # test wighout aug + transform = dict(type='BrightnessTransform', level=10, prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 1.0 + transform = dict(type='BrightnessTransform', level=10, prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + transform_module.factor = 1.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 0.0 + transform_module.factor = 0.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], + np.zeros_like(results['img'])) + + # test with randomly sampled images and factors. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1000, 1200, 3)) * 260, 0, + 255).astype(np.uint8) + factor = np.random.uniform() + transform_module.factor = factor + results['img'] = img + np.testing.assert_allclose( + transform_module(copy.deepcopy(results))['img'].astype(np.int32), + _adjust_brightness(img, factor).astype(np.int32), + rtol=0, + atol=1) + + +def test_adjust_contrast(nb_rand_test=100): + + def _adjust_contrast(img, factor): + from PIL.ImageEnhance import Contrast + from PIL import Image + # Image.fromarray defaultly supports RGB, not BGR. + # convert from BGR to RGB + img = Image.fromarray(img[..., ::-1], mode='RGB') + contrasted_img = Contrast(img).enhance(factor) + # convert from RGB to BGR + return np.asarray(contrasted_img)[..., ::-1] + + results = construct_toy_data() + # test wighout aug + transform = dict(type='ContrastTransform', level=10, prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 1.0 + transform = dict(type='ContrastTransform', level=10, prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + transform_module.factor = 1.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 0.0 + transform_module.factor = 0.0 + results_transformed = transform_module(copy.deepcopy(results)) + np.testing.assert_allclose( + results_transformed['img'], + _adjust_contrast(results['img'], 0.), + rtol=0, + atol=1) + + # test adjust_contrast with randomly sampled images and factors. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1200, 1000, 3)) * 260, 0, + 255).astype(np.uint8) + factor = np.random.uniform() + transform_module.factor = factor + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + # Note the gap (less_equal 1) between PIL.ImageEnhance.Contrast + # and mmcv.adjust_contrast comes from the gap that converts from + # a color image to gray image using mmcv or PIL. + np.testing.assert_allclose( + transform_module(copy.deepcopy(results))['img'].astype(np.int32), + _adjust_contrast(results['img'], factor).astype(np.int32), + rtol=0, + atol=1) diff --git a/thirdparty/mmdetection/tests/test_data/test_loading.py b/thirdparty/mmdetection/tests/test_data/test_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..41d125b2e57c1090662b3abe85a1c801c5699301 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_loading.py @@ -0,0 +1,90 @@ +import copy +import os.path as osp + +import mmcv +import numpy as np + +from mmdet.datasets.pipelines import (LoadImageFromFile, LoadImageFromWebcam, + LoadMultiChannelImageFromFiles) + + +class TestLoading(object): + + @classmethod + def setup_class(cls): + cls.data_prefix = osp.join(osp.dirname(__file__), '../data') + + def test_load_img(self): + results = dict( + img_prefix=self.data_prefix, img_info=dict(filename='color.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['filename'] == osp.join(self.data_prefix, 'color.jpg') + assert results['ori_filename'] == 'color.jpg' + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3) + assert results['ori_shape'] == (288, 512, 3) + assert repr(transform) == transform.__class__.__name__ + \ + "(to_float32=False, color_type='color', " + \ + "file_client_args={'backend': 'disk'})" + + # no img_prefix + results = dict( + img_prefix=None, img_info=dict(filename='tests/data/color.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['filename'] == 'tests/data/color.jpg' + assert results['ori_filename'] == 'tests/data/color.jpg' + assert results['img'].shape == (288, 512, 3) + + # to_float32 + transform = LoadImageFromFile(to_float32=True) + results = transform(copy.deepcopy(results)) + assert results['img'].dtype == np.float32 + + # gray image + results = dict( + img_prefix=self.data_prefix, img_info=dict(filename='gray.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + + transform = LoadImageFromFile(color_type='unchanged') + results = transform(copy.deepcopy(results)) + assert results['img'].shape == (288, 512) + assert results['img'].dtype == np.uint8 + + def test_load_multi_channel_img(self): + results = dict( + img_prefix=self.data_prefix, + img_info=dict(filename=['color.jpg', 'color.jpg'])) + transform = LoadMultiChannelImageFromFiles() + results = transform(copy.deepcopy(results)) + assert results['filename'] == [ + osp.join(self.data_prefix, 'color.jpg'), + osp.join(self.data_prefix, 'color.jpg') + ] + assert results['ori_filename'] == ['color.jpg', 'color.jpg'] + assert results['img'].shape == (288, 512, 3, 2) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3, 2) + assert results['ori_shape'] == (288, 512, 3, 2) + assert results['pad_shape'] == (288, 512, 3, 2) + assert results['scale_factor'] == 1.0 + assert repr(transform) == transform.__class__.__name__ + \ + "(to_float32=False, color_type='unchanged', " + \ + "file_client_args={'backend': 'disk'})" + + def test_load_webcam_img(self): + img = mmcv.imread(osp.join(self.data_prefix, 'color.jpg')) + results = dict(img=img) + transform = LoadImageFromWebcam() + results = transform(copy.deepcopy(results)) + assert results['filename'] is None + assert results['ori_filename'] is None + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3) + assert results['ori_shape'] == (288, 512, 3) diff --git a/thirdparty/mmdetection/tests/test_data/test_models_aug_test.py b/thirdparty/mmdetection/tests/test_data/test_models_aug_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8db70439b4b5b144f5ce645e0af9586bc9163718 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_models_aug_test.py @@ -0,0 +1,118 @@ +import os.path as osp + +import mmcv +import torch +from mmcv.parallel import collate +from mmcv.utils import build_from_cfg + +from mmdet.datasets.builder import PIPELINES +from mmdet.models import build_detector + + +def model_aug_test_template(cfg_file): + # get config + cfg = mmcv.Config.fromfile(cfg_file) + # init model + cfg.model.pretrained = None + model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + + # init test pipeline and set aug test + load_cfg, multi_scale_cfg = cfg.test_pipeline + multi_scale_cfg['flip'] = True + multi_scale_cfg['img_scale'] = [(1333, 800), (800, 600), (640, 480)] + + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + results = transform(load(results)) + assert len(results['img']) == 6 + assert len(results['img_metas']) == 6 + + results['img'] = [collate([x]) for x in results['img']] + results['img_metas'] = [collate([x]).data[0] for x in results['img_metas']] + # aug test the model + model.eval() + with torch.no_grad(): + aug_result = model(return_loss=False, rescale=True, **results) + return aug_result + + +def test_aug_test_size(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + + # Define simple pipeline + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + + # get config + transform = dict( + type='MultiScaleFlipAug', + transforms=[], + img_scale=[(1333, 800), (800, 600), (640, 480)], + flip=True, + flip_direction=['horizontal', 'vertical']) + multi_aug_test_module = build_from_cfg(transform, PIPELINES) + + results = load(results) + results = multi_aug_test_module(load(results)) + # len(["original", "horizontal", "vertical"]) * + # len([(1333, 800), (800, 600), (640, 480)]) + assert len(results['img']) == 9 + + +def test_cascade_rcnn_aug_test(): + aug_result = model_aug_test_template( + 'configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 80 + + +def test_mask_rcnn_aug_test(): + aug_result = model_aug_test_template( + 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 2 + assert len(aug_result[0][0]) == 80 + assert len(aug_result[0][1]) == 80 + + +def test_htc_aug_test(): + aug_result = model_aug_test_template('configs/htc/htc_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 2 + assert len(aug_result[0][0]) == 80 + assert len(aug_result[0][1]) == 80 + + +def test_cornernet_aug_test(): + # get config + cfg = mmcv.Config.fromfile( + 'configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py') + # init model + cfg.model.pretrained = None + model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + + # init test pipeline and set aug test + load_cfg, multi_scale_cfg = cfg.test_pipeline + multi_scale_cfg['flip'] = True + multi_scale_cfg['scale_factor'] = [0.5, 1.0, 2.0] + + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + results = transform(load(results)) + assert len(results['img']) == 6 + assert len(results['img_metas']) == 6 + + results['img'] = [collate([x]) for x in results['img']] + results['img_metas'] = [collate([x]).data[0] for x in results['img_metas']] + # aug test the model + model.eval() + with torch.no_grad(): + aug_result = model(return_loss=False, rescale=True, **results) + assert len(aug_result[0]) == 80 diff --git a/thirdparty/mmdetection/tests/test_data/test_rotate.py b/thirdparty/mmdetection/tests/test_data/test_rotate.py new file mode 100644 index 0000000000000000000000000000000000000000..c440451ade7c1cc5dc33df825f350d3c091b42cd --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_rotate.py @@ -0,0 +1,224 @@ +import copy + +import numpy as np +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([0, 0, 2, 0, 2, 1, 0, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def _check_fields(results, results_rotated, keys): + for key in keys: + if isinstance(results[key], (BitmapMasks, PolygonMasks)): + assert np.equal(results[key].to_ndarray(), + results_rotated[key].to_ndarray()).all() + else: + assert np.equal(results[key], results_rotated[key]).all() + + +def check_rotate(results, results_rotated): + # check image + _check_fields(results, results_rotated, results.get('img_fields', ['img'])) + # check bboxes + _check_fields(results, results_rotated, results.get('bbox_fields', [])) + # check masks + _check_fields(results, results_rotated, results.get('mask_fields', [])) + # check segmentations + _check_fields(results, results_rotated, results.get('seg_fields', [])) + # _check gt_labels + if 'gt_labels' in results: + assert np.equal(results['gt_labels'], + results_rotated['gt_labels']).all() + + +def test_rotate(): + # test assertion for invalid type of max_rotate_angle + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=1, max_rotate_angle=(30, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of scale + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, scale=(1.2, )) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict( + type='Rotate', level=2, img_fill_val=[ + 128, + ]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid number of elements in center + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, center=(0.5, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of center + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, center=[0, 0]) + build_from_cfg(transform, PIPELINES) + + # test case when no rotate aug (level=0) + results = construct_toy_data() + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Rotate', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + ) + rotate_module = build_from_cfg(transform, PIPELINES) + results_wo_rotate = rotate_module(copy.deepcopy(results)) + check_rotate(results, results_wo_rotate) + + # test case when no rotate aug (prob<=0) + transform = dict( + type='Rotate', level=10, prob=0., img_fill_val=img_fill_val, scale=0.6) + rotate_module = build_from_cfg(transform, PIPELINES) + results_wo_rotate = rotate_module(copy.deepcopy(results)) + check_rotate(results, results_wo_rotate) + + # test clockwise rotation with angle 90 + results = construct_toy_data() + img_fill_val = 128 + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + img_fill_val=img_fill_val, + # set random_negative_prob to 0 for clockwise rotation + random_negative_prob=0., + prob=1.) + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + img_r = np.array([[img_fill_val, 6, 2, img_fill_val], + [img_fill_val, 7, 3, img_fill_val]]).astype(np.uint8) + img_r = np.stack([img_r, img_r, img_r], axis=-1) + results_gt = copy.deepcopy(results) + results_gt['img'] = img_r + results_gt['gt_bboxes'] = np.array([[1., 0., 2., 1.]], dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + gt_masks = np.array([[0, 1, 1, 0], [0, 0, 1, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[255, 6, 2, 255], [255, 7, 3, + 255]]).astype(results['gt_semantic_seg'].dtype) + check_rotate(results_gt, results_rotated) + + # test clockwise rotation with angle 90, PolygonMasks + results = construct_toy_data(poly2mask=False) + results_rotated = rotate_module(copy.deepcopy(results)) + gt_masks = [[np.array([2, 0, 2, 1, 1, 1, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_rotate(results_gt, results_rotated) + + # test counter-clockwise roatation with angle 90, + # and specify the ratation center + img_fill_val = (104, 116, 124) + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + center=(0, 0), + img_fill_val=img_fill_val, + # set random_negative_prob to 1 for counter-clockwise rotation + random_negative_prob=1., + prob=1.) + results = construct_toy_data() + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + h, w = results['img'].shape[:2] + img_r = np.stack([ + np.ones((h, w)) * img_fill_val[0], + np.ones((h, w)) * img_fill_val[1], + np.ones((h, w)) * img_fill_val[2] + ], + axis=-1).astype(np.uint8) + img_r[0, 0, :] = 1 + img_r[0, 1, :] = 5 + results_gt['img'] = img_r + results_gt['gt_bboxes'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_labels'] = np.empty((0, ), dtype=np.int64) + gt_masks = np.empty((0, h, w), dtype=np.uint8) + results_gt['gt_masks'] = BitmapMasks(gt_masks, h, w) + gt_seg = (np.ones((h, w)) * 255).astype(results['gt_semantic_seg'].dtype) + gt_seg[0, 0], gt_seg[0, 1] = 1, 5 + results_gt['gt_semantic_seg'] = gt_seg + check_rotate(results_gt, results_rotated) + + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + center=(0), + img_fill_val=img_fill_val, + random_negative_prob=1., + prob=1.) + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + check_rotate(results_gt, results_rotated) + + # test counter-clockwise roatation with angle 90, + # and specify the ratation center, PolygonMasks + results = construct_toy_data(poly2mask=False) + results_rotated = rotate_module(copy.deepcopy(results)) + gt_masks = [[np.array([0, 0, 0, 0, 1, 0, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_rotate(results_gt, results_rotated) + + # test AutoAugment equipped with Rotate + policies = [[dict(type='Rotate', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Rotate', level=10, prob=1.), + dict( + type='Rotate', + level=8, + max_rotate_angle=90, + center=(0), + img_fill_val=img_fill_val) + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/thirdparty/mmdetection/tests/test_data/test_sampler.py b/thirdparty/mmdetection/tests/test_data/test_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..957d6f7455a8f6dc8efd669fc839be31784783d6 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_sampler.py @@ -0,0 +1,329 @@ +import torch + +from mmdet.core.bbox.assigners import MaxIoUAssigner +from mmdet.core.bbox.samplers import (OHEMSampler, RandomSampler, + ScoreHLRSampler) + + +def test_random_sampler(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sampler_empty_gt(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.empty(0, ).long() + assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def _context_for_ohem(): + import sys + from os.path import dirname + sys.path.insert(0, dirname(dirname(dirname(__file__)))) + from test_forward import _get_detector_cfg + + model, train_cfg, test_cfg = _get_detector_cfg( + 'faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + context = build_detector( + model, train_cfg=train_cfg, test_cfg=test_cfg).roi_head + return context + + +def test_ohem_sampler(): + + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_ohem_sampler_empty_gt(): + + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.LongTensor([]) + gt_bboxes_ignore = torch.Tensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_ohem_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + gt_bboxes_ignore = torch.Tensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sample_result(): + from mmdet.core.bbox.samplers.sampling_result import SamplingResult + SamplingResult.random(num_gts=0, num_preds=0) + SamplingResult.random(num_gts=0, num_preds=3) + SamplingResult.random(num_gts=3, num_preds=3) + SamplingResult.random(num_gts=0, num_preds=3) + SamplingResult.random(num_gts=7, num_preds=7) + SamplingResult.random(num_gts=7, num_preds=64) + SamplingResult.random(num_gts=24, num_preds=3) + + for i in range(3): + SamplingResult.random(rng=i) + + +def test_score_hlr_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + context = _context_for_ohem() + sampler = ScoreHLRSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + gt_bboxes_ignore = torch.Tensor([]) + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + # empty bbox + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.neg_inds) == 0 + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + # empty gt + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.LongTensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.pos_inds) == 0 + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + # non-empty input + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) diff --git a/thirdparty/mmdetection/tests/test_data/test_shear.py b/thirdparty/mmdetection/tests/test_data/test_shear.py new file mode 100644 index 0000000000000000000000000000000000000000..3d63812521492182833fc16478ad6f44e6d1308f --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_shear.py @@ -0,0 +1,217 @@ +import copy + +import numpy as np +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([1, 0, 2, 0, 2, 1, 1, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def _check_fields(results, results_sheared, keys): + for key in keys: + if isinstance(results[key], (BitmapMasks, PolygonMasks)): + assert np.equal(results[key].to_ndarray(), + results_sheared[key].to_ndarray()).all() + else: + assert np.equal(results[key], results_sheared[key]).all() + + +def check_shear(results, results_sheared): + # _check_keys(results, results_sheared) + # check image + _check_fields(results, results_sheared, results.get('img_fields', ['img'])) + # check bboxes + _check_fields(results, results_sheared, results.get('bbox_fields', [])) + # check masks + _check_fields(results, results_sheared, results.get('mask_fields', [])) + # check segmentations + _check_fields(results, results_sheared, results.get('seg_fields', [])) + # check gt_labels + if 'gt_labels' in results: + assert np.equal(results['gt_labels'], + results_sheared['gt_labels']).all() + + +def test_shear(): + # test assertion for invalid type of max_shear_magnitude + with pytest.raises(AssertionError): + transform = dict(type='Shear', level=1, max_shear_magnitude=(0.5, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of max_shear_magnitude + with pytest.raises(AssertionError): + transform = dict(type='Shear', level=2, max_shear_magnitude=1.2) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict(type='Shear', level=2, img_fill_val=[128]) + build_from_cfg(transform, PIPELINES) + + results = construct_toy_data() + # test case when no shear aug (level=0, direction='horizontal') + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Shear', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='horizontal') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test case when no shear aug (level=0, direction='vertical') + transform = dict( + type='Shear', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='vertical') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test case when no shear aug (prob<=0) + transform = dict( + type='Shear', + level=10, + prob=0., + img_fill_val=img_fill_val, + direction='vertical') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test shear horizontally, magnitude=1 + transform = dict( + type='Shear', + level=10, + prob=1., + img_fill_val=img_fill_val, + direction='horizontal', + max_shear_magnitude=1., + random_negative_prob=0.) + shear_module = build_from_cfg(transform, PIPELINES) + results_sheared = shear_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + img_s = np.array([[1, 2, 3, 4], [0, 5, 6, 7]], dtype=np.uint8) + img_s = np.stack([img_s, img_s, img_s], axis=-1) + img_s[1, 0, :] = np.array(img_fill_val) + results_gt['img'] = img_s + results_gt['gt_bboxes'] = np.array([[0., 0., 3., 1.]], dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.array([[2., 0., 4., 1.]], + dtype=np.float32) + gt_masks = np.array([[0, 1, 1, 0], [0, 0, 1, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[1, 2, 3, 4], [255, 5, 6, 7]], dtype=results['gt_semantic_seg'].dtype) + check_shear(results_gt, results_sheared) + + # test PolygonMasks with shear horizontally, magnitude=1 + results = construct_toy_data(poly2mask=False) + results_sheared = shear_module(copy.deepcopy(results)) + gt_masks = [[np.array([1, 0, 2, 0, 3, 1, 2, 1], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_shear(results_gt, results_sheared) + + # test shear vertically, magnitude=-1 + img_fill_val = 128 + results = construct_toy_data() + transform = dict( + type='Shear', + level=10, + prob=1., + img_fill_val=img_fill_val, + direction='vertical', + max_shear_magnitude=1., + random_negative_prob=1.) + shear_module = build_from_cfg(transform, PIPELINES) + results_sheared = shear_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + img_s = np.array([[1, 6, img_fill_val, img_fill_val], + [5, img_fill_val, img_fill_val, img_fill_val]], + dtype=np.uint8) + img_s = np.stack([img_s, img_s, img_s], axis=-1) + results_gt['img'] = img_s + results_gt['gt_bboxes'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_labels'] = np.empty((0, ), dtype=np.int64) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + gt_masks = np.array([[0, 1, 0, 0], [0, 0, 0, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[1, 6, 255, 255], [5, 255, 255, 255]], + dtype=results['gt_semantic_seg'].dtype) + check_shear(results_gt, results_sheared) + + # test PolygonMasks with shear vertically, magnitude=-1 + results = construct_toy_data(poly2mask=False) + results_sheared = shear_module(copy.deepcopy(results)) + gt_masks = [[np.array([1, 0, 2, 0, 2, 0, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_shear(results_gt, results_sheared) + + results = construct_toy_data() + # same mask for BitmapMasks and PolygonMasks + results['gt_masks'] = BitmapMasks( + np.array([[0, 1, 1, 0], [0, 1, 1, 0]], dtype=np.uint8)[None, :, :], 2, + 4) + results['gt_bboxes'] = np.array([[1., 0., 2., 1.]], dtype=np.float32) + results_sheared_bitmap = shear_module(copy.deepcopy(results)) + check_shear(results_sheared_bitmap, results_sheared) + + # test AutoAugment equipped with Shear + policies = [[dict(type='Shear', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Shear', level=10, prob=1.), + dict( + type='Shear', + level=8, + img_fill_val=img_fill_val, + direction='vertical', + max_shear_magnitude=1.) + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/thirdparty/mmdetection/tests/test_data/test_transform.py b/thirdparty/mmdetection/tests/test_data/test_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..90a11ea36823c24a642adf519083c8c9cb1b97e3 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_transform.py @@ -0,0 +1,752 @@ +import copy +import os.path as osp + +import mmcv +import numpy as np +import pytest +import torch +from mmcv.utils import build_from_cfg + +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from mmdet.datasets.builder import PIPELINES + + +def test_resize(): + # test assertion if img_scale is a list + with pytest.raises(AssertionError): + transform = dict(type='Resize', img_scale=[1333, 800], keep_ratio=True) + build_from_cfg(transform, PIPELINES) + + # test assertion if len(img_scale) while ratio_range is not None + with pytest.raises(AssertionError): + transform = dict( + type='Resize', + img_scale=[(1333, 800), (1333, 600)], + ratio_range=(0.9, 1.1), + keep_ratio=True) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid multiscale_mode + with pytest.raises(AssertionError): + transform = dict( + type='Resize', + img_scale=[(1333, 800), (1333, 600)], + keep_ratio=True, + multiscale_mode='2333') + build_from_cfg(transform, PIPELINES) + + # test assertion if both scale and scale_factor are setted + with pytest.raises(AssertionError): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + transform = dict(type='Resize', img_scale=(1333, 800), keep_ratio=True) + transform = build_from_cfg(transform, PIPELINES) + results = load(results) + results['scale'] = (1333, 800) + results['scale_factor'] = 1.0 + results = transform(results) + + transform = dict(type='Resize', img_scale=(1333, 800), keep_ratio=True) + resize_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['img_fields'] = ['img', 'img2'] + + results = resize_module(results) + assert np.equal(results['img'], results['img2']).all() + + results.pop('scale') + results.pop('scale_factor') + transform = dict( + type='Resize', + img_scale=(1280, 800), + multiscale_mode='value', + keep_ratio=False) + resize_module = build_from_cfg(transform, PIPELINES) + results = resize_module(results) + assert np.equal(results['img'], results['img2']).all() + assert results['img_shape'] == (800, 1280, 3) + + +def test_flip(): + # test assertion for invalid flip_ratio + with pytest.raises(AssertionError): + transform = dict(type='RandomFlip', flip_ratio=1.5) + build_from_cfg(transform, PIPELINES) + # test assertion for 0 <= sum(flip_ratio) <= 1 + with pytest.raises(AssertionError): + transform = dict( + type='RandomFlip', + flip_ratio=[0.7, 0.8], + direction=['horizontal', 'vertical']) + build_from_cfg(transform, PIPELINES) + + # test assertion for mismatch between number of flip_ratio and direction + with pytest.raises(AssertionError): + transform = dict(type='RandomFlip', flip_ratio=[0.4, 0.5]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid direction + with pytest.raises(AssertionError): + transform = dict( + type='RandomFlip', flip_ratio=1., direction='horizonta') + build_from_cfg(transform, PIPELINES) + + transform = dict(type='RandomFlip', flip_ratio=1.) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = flip_module(results) + assert np.equal(results['img'], results['img2']).all() + + flip_module = build_from_cfg(transform, PIPELINES) + results = flip_module(results) + assert np.equal(results['img'], results['img2']).all() + assert np.equal(original_img, results['img']).all() + + # test flip_ratio is float, direction is list + transform = dict( + type='RandomFlip', + flip_ratio=0.9, + direction=['horizontal', 'vertical', 'diagonal']) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img'] + results = flip_module(results) + if results['flip']: + assert np.array_equal( + mmcv.imflip(original_img, results['flip_direction']), + results['img']) + else: + assert np.array_equal(original_img, results['img']) + + # test flip_ratio is list, direction is list + transform = dict( + type='RandomFlip', + flip_ratio=[0.3, 0.3, 0.2], + direction=['horizontal', 'vertical', 'diagonal']) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img'] + results = flip_module(results) + if results['flip']: + assert np.array_equal( + mmcv.imflip(original_img, results['flip_direction']), + results['img']) + else: + assert np.array_equal(original_img, results['img']) + + +def test_random_crop(): + # test assertion for invalid random crop + with pytest.raises(AssertionError): + transform = dict(type='RandomCrop', crop_size=(-1, 0)) + build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + results['img'] = img + + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # TODO: add img_fields test + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + h, w, _ = img.shape + gt_bboxes = create_random_bboxes(8, w, h) + gt_bboxes_ignore = create_random_bboxes(2, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + transform = dict(type='RandomCrop', crop_size=(h - 20, w - 20)) + crop_module = build_from_cfg(transform, PIPELINES) + results = crop_module(results) + assert results['img'].shape[:2] == (h - 20, w - 20) + # All bboxes should be reserved after crop + assert results['img_shape'][:2] == (h - 20, w - 20) + assert results['gt_bboxes'].shape[0] == 8 + assert results['gt_bboxes_ignore'].shape[0] == 2 + + def area(bboxes): + return np.prod(bboxes[:, 2:4] - bboxes[:, 0:2], axis=1) + + assert (area(results['gt_bboxes']) <= area(gt_bboxes)).all() + assert (area(results['gt_bboxes_ignore']) <= area(gt_bboxes_ignore)).all() + + # test assertion for invalid crop_type + with pytest.raises(ValueError): + transform = dict( + type='RandomCrop', crop_size=(1, 1), crop_type='unknown') + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid crop_size + with pytest.raises(AssertionError): + transform = dict( + type='RandomCrop', crop_type='relative', crop_size=(0, 0)) + build_from_cfg(transform, PIPELINES) + + def _construct_toy_data(): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + return results + + # test crop_type "relative_range" + results = _construct_toy_data() + transform = dict( + type='RandomCrop', + crop_type='relative_range', + crop_size=(0.3, 0.7), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert int(2 * 0.3 + 0.5) <= h <= int(2 * 1 + 0.5) + assert int(4 * 0.7 + 0.5) <= w <= int(4 * 1 + 0.5) + + # test crop_type "relative" + transform = dict( + type='RandomCrop', + crop_type='relative', + crop_size=(0.3, 0.7), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert h == int(2 * 0.3 + 0.5) and w == int(4 * 0.7 + 0.5) + + # test crop_type "absolute" + transform = dict( + type='RandomCrop', + crop_type='absolute', + crop_size=(1, 2), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert h == 1 and w == 2 + + # test crop_type "absolute_range" + transform = dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(1, 20), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert 1 <= h <= 2 and 1 <= w <= 4 + + +def test_min_iou_random_crop(): + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + results['img'] = img + + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + h, w, _ = img.shape + gt_bboxes = create_random_bboxes(1, w, h) + gt_bboxes_ignore = create_random_bboxes(1, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + transform = dict(type='MinIoURandomCrop') + crop_module = build_from_cfg(transform, PIPELINES) + + # Test for img_fields + results_test = copy.deepcopy(results) + results_test['img1'] = results_test['img'] + results_test['img_fields'] = ['img', 'img1'] + with pytest.raises(AssertionError): + crop_module(results_test) + results = crop_module(results) + patch = np.array([0, 0, results['img_shape'][1], results['img_shape'][0]]) + ious = bbox_overlaps(patch.reshape(-1, 4), + results['gt_bboxes']).reshape(-1) + ious_ignore = bbox_overlaps( + patch.reshape(-1, 4), results['gt_bboxes_ignore']).reshape(-1) + mode = crop_module.mode + if mode == 1: + assert np.equal(results['gt_bboxes'], gt_bboxes).all() + assert np.equal(results['gt_bboxes_ignore'], gt_bboxes_ignore).all() + else: + assert (ious >= mode).all() + assert (ious_ignore >= mode).all() + + +def test_pad(): + # test assertion if both size_divisor and size is None + with pytest.raises(AssertionError): + transform = dict(type='Pad') + build_from_cfg(transform, PIPELINES) + + transform = dict(type='Pad', size_divisor=32) + transform = build_from_cfg(transform, PIPELINES) + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = transform(results) + assert np.equal(results['img'], results['img2']).all() + # original img already divisible by 32 + assert np.equal(results['img'], original_img).all() + img_shape = results['img'].shape + assert img_shape[0] % 32 == 0 + assert img_shape[1] % 32 == 0 + + resize_transform = dict( + type='Resize', img_scale=(1333, 800), keep_ratio=True) + resize_module = build_from_cfg(resize_transform, PIPELINES) + results = resize_module(results) + results = transform(results) + img_shape = results['img'].shape + assert np.equal(results['img'], results['img2']).all() + assert img_shape[0] % 32 == 0 + assert img_shape[1] % 32 == 0 + + +def test_normalize(): + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True) + transform = dict(type='Normalize', **img_norm_cfg) + transform = build_from_cfg(transform, PIPELINES) + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = transform(results) + assert np.equal(results['img'], results['img2']).all() + + mean = np.array(img_norm_cfg['mean']) + std = np.array(img_norm_cfg['std']) + converted_img = (original_img[..., ::-1] - mean) / std + assert np.allclose(results['img'], converted_img) + + +def test_albu_transform(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + + # Define simple pipeline + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + + albu_transform = dict( + type='Albu', transforms=[dict(type='ChannelShuffle', p=1)]) + albu_transform = build_from_cfg(albu_transform, PIPELINES) + + normalize = dict(type='Normalize', mean=[0] * 3, std=[0] * 3, to_rgb=True) + normalize = build_from_cfg(normalize, PIPELINES) + + # Execute transforms + results = load(results) + results = albu_transform(results) + results = normalize(results) + + assert results['img'].dtype == np.float32 + + +def test_random_center_crop_pad(): + # test assertion for invalid crop_size while test_mode=False + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(-1, 0), + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid ratios while test_mode=False + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(1.0), + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid mean, std and to_rgb + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + mean=None, + std=None, + to_rgb=None, + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid crop_size while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid ratios while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=(0.9, 1.0, 1.1), + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid border while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=128, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid test_pad_mode while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('do_nothing', 100)) + build_from_cfg(transform, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + + load = dict(type='LoadImageFromFile', to_float32=True) + load = build_from_cfg(load, PIPELINES) + results = load(results) + test_results = copy.deepcopy(results) + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + h, w, _ = results['img_shape'] + gt_bboxes = create_random_bboxes(8, w, h) + gt_bboxes_ignore = create_random_bboxes(2, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + train_transform = dict( + type='RandomCenterCropPad', + crop_size=(h - 20, w - 20), + ratios=(1.0, ), + border=128, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=False, + test_pad_mode=None) + crop_module = build_from_cfg(train_transform, PIPELINES) + train_results = crop_module(results) + assert train_results['img'].shape[:2] == (h - 20, w - 20) + # All bboxes should be reserved after crop + assert train_results['pad_shape'][:2] == (h - 20, w - 20) + assert train_results['gt_bboxes'].shape[0] == 8 + assert train_results['gt_bboxes_ignore'].shape[0] == 2 + + test_transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + crop_module = build_from_cfg(test_transform, PIPELINES) + + test_results = crop_module(test_results) + assert test_results['img'].shape[:2] == (h | 127, w | 127) + assert test_results['pad_shape'][:2] == (h | 127, w | 127) + assert 'border' in test_results + + +def test_multi_scale_flip_aug(): + # test assertion if give both scale_factor and img_scale + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + img_scale=[(1333, 800)], + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if both scale_factor and img_scale are None + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + scale_factor=None, + img_scale=None, + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if img_scale is not tuple or list of tuple + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + img_scale=[1333, 800], + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if flip_direction is not str or list of str + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + img_scale=[(1333, 800)], + flip_direction=1, + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + scale_transform = dict( + type='MultiScaleFlipAug', + img_scale=[(1333, 800), (1333, 640)], + transforms=[dict(type='Resize', keep_ratio=True)]) + transform = build_from_cfg(scale_transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['img_fields'] = ['img'] + + scale_results = transform(copy.deepcopy(results)) + assert len(scale_results['img']) == 2 + assert scale_results['img'][0].shape == (750, 1333, 3) + assert scale_results['img_shape'][0] == (750, 1333, 3) + assert scale_results['img'][1].shape == (640, 1138, 3) + assert scale_results['img_shape'][1] == (640, 1138, 3) + + scale_factor_transform = dict( + type='MultiScaleFlipAug', + scale_factor=[0.8, 1.0, 1.2], + transforms=[dict(type='Resize', keep_ratio=False)]) + transform = build_from_cfg(scale_factor_transform, PIPELINES) + scale_factor_results = transform(copy.deepcopy(results)) + assert len(scale_factor_results['img']) == 3 + assert scale_factor_results['img'][0].shape == (230, 409, 3) + assert scale_factor_results['img_shape'][0] == (230, 409, 3) + assert scale_factor_results['img'][1].shape == (288, 512, 3) + assert scale_factor_results['img_shape'][1] == (288, 512, 3) + assert scale_factor_results['img'][2].shape == (345, 614, 3) + assert scale_factor_results['img_shape'][2] == (345, 614, 3) + + # test pipeline of coco_detection + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../data'), + img_info=dict(filename='color.jpg')) + load_cfg, multi_scale_cfg = mmcv.Config.fromfile( + 'configs/_base_/datasets/coco_detection.py').test_pipeline + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + results = transform(load(results)) + assert len(results['img']) == 1 + assert len(results['img_metas']) == 1 + assert isinstance(results['img'][0], torch.Tensor) + assert isinstance(results['img_metas'][0], mmcv.parallel.DataContainer) + assert results['img_metas'][0].data['ori_shape'] == (288, 512, 3) + assert results['img_metas'][0].data['img_shape'] == (750, 1333, 3) + assert results['img_metas'][0].data['pad_shape'] == (768, 1344, 3) + assert results['img_metas'][0].data['scale_factor'].tolist() == [ + 2.603515625, 2.6041667461395264, 2.603515625, 2.6041667461395264 + ] + + +def test_cutout(): + # test n_holes + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=(5, 3), cutout_shape=(8, 8)) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=(3, 4, 5), cutout_shape=(8, 8)) + build_from_cfg(transform, PIPELINES) + # test cutout_shape and cutout_ratio + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1, cutout_shape=8) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1, cutout_ratio=0.2) + build_from_cfg(transform, PIPELINES) + # either of cutout_shape and cutout_ratio should be given + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict( + type='CutOut', + n_holes=1, + cutout_shape=(2, 2), + cutout_ratio=(0.4, 0.4)) + build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../data/color.jpg'), 'color') + + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['pad_shape'] = img.shape + results['img_fields'] = ['img'] + + transform = dict(type='CutOut', n_holes=1, cutout_shape=(10, 10)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() < img.sum() + + transform = dict(type='CutOut', n_holes=1, cutout_ratio=(0.8, 0.8)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() < img.sum() + + transform = dict( + type='CutOut', + n_holes=(2, 4), + cutout_shape=[(10, 10), (15, 15)], + fill_in=(255, 255, 255)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() > img.sum() + + transform = dict( + type='CutOut', + n_holes=1, + cutout_ratio=(0.8, 0.8), + fill_in=(255, 255, 255)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() > img.sum() diff --git a/thirdparty/mmdetection/tests/test_data/test_translate.py b/thirdparty/mmdetection/tests/test_data/test_translate.py new file mode 100644 index 0000000000000000000000000000000000000000..87f37d0d8fc6aeda4200e8b94f7b23d1a6069444 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_translate.py @@ -0,0 +1,515 @@ +import copy + +import numpy as np +import pycocotools.mask as maskUtils +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def _check_keys(results, results_translated): + assert len(set(results.keys()).difference(set( + results_translated.keys()))) == 0 + assert len(set(results_translated.keys()).difference(set( + results.keys()))) == 0 + + +def _pad(h, w, c, pad_val, axis=-1, dtype=np.float32): + assert isinstance(pad_val, (int, float, tuple)) + if isinstance(pad_val, (int, float)): + pad_val = tuple([pad_val] * c) + assert len(pad_val) == c + pad_data = np.stack([np.ones((h, w)) * pad_val[i] for i in range(c)], + axis=axis).astype(dtype) + return pad_data + + +def _construct_img(results): + h, w = results['img_info']['height'], results['img_info']['width'] + img = np.random.uniform(0, 1, (h, w, 3)) * 255 + img = img.astype(np.uint8) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + + +def _construct_ann_info(h=427, w=640, c=3): + bboxes = np.array( + [[222.62, 217.82, 241.81, 238.93], [50.5, 329.7, 130.23, 384.96], + [175.47, 331.97, 254.8, 389.26]], + dtype=np.float32) + labels = np.array([9, 2, 2], dtype=np.int64) + bboxes_ignore = np.array([[59., 253., 311., 337.]], dtype=np.float32) + masks = [ + [[222.62, 217.82, 222.62, 238.93, 241.81, 238.93, 240.85, 218.78]], + [[ + 69.19, 332.17, 82.39, 330.25, 97.24, 329.7, 114.01, 331.35, 116.76, + 337.39, 119.78, 343.17, 128.03, 344.54, 128.86, 347.84, 124.18, + 350.59, 129.96, 358.01, 130.23, 366.54, 129.13, 377.81, 125.28, + 382.48, 119.78, 381.93, 117.31, 377.54, 116.21, 379.46, 114.83, + 382.21, 107.14, 383.31, 105.49, 378.36, 77.99, 377.54, 75.79, + 381.11, 69.74, 381.93, 66.72, 378.91, 65.07, 377.81, 63.15, 379.19, + 62.32, 383.31, 52.7, 384.96, 50.5, 379.46, 51.32, 375.61, 51.6, + 370.11, 51.6, 364.06, 53.52, 354.99, 56.27, 344.54, 59.57, 336.29, + 66.45, 332.72 + ]], + [[ + 175.47, 386.86, 175.87, 376.44, 177.08, 351.2, 189.1, 332.77, + 194.31, 331.97, 236.37, 332.77, 244.79, 342.39, 246.79, 346.79, + 248.39, 345.99, 251.6, 345.59, 254.8, 348.0, 254.8, 351.6, 250.0, + 352.0, 250.0, 354.81, 251.6, 358.41, 251.6, 364.42, 251.6, 370.03, + 252.8, 378.04, 252.8, 384.05, 250.8, 387.26, 246.39, 387.66, + 245.19, 386.46, 242.38, 388.86, 233.97, 389.26, 232.77, 388.06, + 232.77, 383.65, 195.91, 381.25, 195.91, 384.86, 191.1, 384.86, + 187.49, 385.26, 186.69, 382.85, 184.29, 382.45, 183.09, 387.26, + 178.68, 388.46, 176.28, 387.66 + ]] + ] + return dict( + bboxes=bboxes, labels=labels, bboxes_ignore=bboxes_ignore, masks=masks) + + +def _load_bboxes(results): + ann_info = results['ann_info'] + results['gt_bboxes'] = ann_info['bboxes'].copy() + results['bbox_fields'] = ['gt_bboxes'] + gt_bboxes_ignore = ann_info.get('bboxes_ignore', None) + if gt_bboxes_ignore is not None: + results['gt_bboxes_ignore'] = gt_bboxes_ignore.copy() + results['bbox_fields'].append('gt_bboxes_ignore') + + +def _load_labels(results): + results['gt_labels'] = results['ann_info']['labels'].copy() + + +def _poly2mask(mask_ann, img_h, img_w): + if isinstance(mask_ann, list): + # polygon -- a single object might consist of multiple parts + # we merge all parts into one mask rle code + rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) + rle = maskUtils.merge(rles) + elif isinstance(mask_ann['counts'], list): + # uncompressed RLE + rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) + else: + # rle + rle = mask_ann + mask = maskUtils.decode(rle) + return mask + + +def _process_polygons(polygons): + polygons = [np.array(p) for p in polygons] + valid_polygons = [] + for polygon in polygons: + if len(polygon) % 2 == 0 and len(polygon) >= 6: + valid_polygons.append(polygon) + return valid_polygons + + +def _load_masks(results, poly2mask=True): + h, w = results['img_info']['height'], results['img_info']['width'] + gt_masks = results['ann_info']['masks'] + if poly2mask: + gt_masks = BitmapMasks([_poly2mask(mask, h, w) for mask in gt_masks], + h, w) + else: + gt_masks = PolygonMasks( + [_process_polygons(polygons) for polygons in gt_masks], h, w) + results['gt_masks'] = gt_masks + results['mask_fields'] = ['gt_masks'] + + +def _construct_semantic_seg(results): + h, w = results['img_info']['height'], results['img_info']['width'] + seg_toy = (np.random.uniform(0, 1, (h, w)) * 255).astype(np.uint8) + results['gt_semantic_seg'] = seg_toy + results['seg_fields'] = ['gt_semantic_seg'] + + +def construct_toy_data(poly2mask=True): + img_info = dict(height=427, width=640) + ann_info = _construct_ann_info(h=img_info['height'], w=img_info['width']) + results = dict(img_info=img_info, ann_info=ann_info) + # construct image, similar to 'LoadImageFromFile' + _construct_img(results) + # 'LoadAnnotations' (bboxes, labels, masks, semantic_seg) + _load_bboxes(results) + _load_labels(results) + _load_masks(results, poly2mask) + _construct_semantic_seg(results) + return results + + +def test_translate(): + # test assertion for invalid value of level + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=-1) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of level + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=[1]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid prob + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=1, prob=-0.5) + build_from_cfg(transform, PIPELINES) + + # test assertion for the num of elements in tuple img_fill_val + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=(128, 128, 128, 128)) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict( + type='Translate', level=1, img_fill_val=[128, 128, 128]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of img_fill_val + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=(128, -1, 256)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of direction + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=128, direction='diagonal') + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of max_translate_offset + with pytest.raises(AssertionError): + transform = dict( + type='Translate', + level=1, + img_fill_val=128, + max_translate_offset=(250., )) + build_from_cfg(transform, PIPELINES) + + # construct toy data example for unit test + results = construct_toy_data() + + def _check_bbox_mask(results, + results_translated, + offset, + direction, + min_size=0.): + # The key correspondence from bboxes to labels and masks. + bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def _translate_bbox(bboxes, offset, direction, max_h, max_w): + if direction == 'horizontal': + bboxes[:, 0::2] = bboxes[:, 0::2] + offset + elif direction == 'vertical': + bboxes[:, 1::2] = bboxes[:, 1::2] + offset + else: + raise ValueError + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, max_w) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, max_h) + return bboxes + + h, w, c = results_translated['img'].shape + for key in results_translated.get('bbox_fields', []): + label_key, mask_key = bbox2label[key], bbox2mask[key] + # check length of key + if label_key in results: + assert len(results_translated[key]) == len( + results_translated[label_key]) + if mask_key in results: + assert len(results_translated[key]) == len( + results_translated[mask_key]) + # construct gt_bboxes + gt_bboxes = _translate_bbox( + copy.deepcopy(results[key]), offset, direction, h, w) + valid_inds = (gt_bboxes[:, 2] - gt_bboxes[:, 0] > min_size) & ( + gt_bboxes[:, 3] - gt_bboxes[:, 1] > min_size) + gt_bboxes = gt_bboxes[valid_inds] + # check bbox + assert np.equal(gt_bboxes, results_translated[key]).all() + + # construct gt_masks + if mask_key not in results: + # e.g. 'gt_masks_ignore' + continue + masks, masks_translated = results[mask_key].to_ndarray( + ), results_translated[mask_key].to_ndarray() + assert masks.dtype == masks_translated.dtype + if direction == 'horizontal': + masks_pad = _pad( + h, + abs(offset), + masks.shape[0], + 0, + axis=0, + dtype=masks.dtype) + if offset <= 0: + # left shift + gt_masks = np.concatenate( + (masks[:, :, -offset:], masks_pad), axis=-1) + else: + # right shift + gt_masks = np.concatenate( + (masks_pad, masks[:, :, :-offset]), axis=-1) + else: + masks_pad = _pad( + abs(offset), + w, + masks.shape[0], + 0, + axis=0, + dtype=masks.dtype) + if offset <= 0: + # top shift + gt_masks = np.concatenate( + (masks[:, -offset:, :], masks_pad), axis=1) + else: + # bottom shift + gt_masks = np.concatenate( + (masks_pad, masks[:, :-offset, :]), axis=1) + gt_masks = gt_masks[valid_inds] + # check masks + assert np.equal(gt_masks, masks_translated).all() + + def _check_img_seg(results, results_translated, keys, offset, fill_val, + direction): + for key in keys: + assert isinstance(results_translated[key], type(results[key])) + # assert type(results[key]) == type(results_translated[key]) + data, data_translated = results[key], results_translated[key] + if 'mask' in key: + data, data_translated = data.to_ndarray( + ), data_translated.to_ndarray() + assert data.dtype == data_translated.dtype + if 'img' in key: + data, data_translated = data.transpose( + (2, 0, 1)), data_translated.transpose((2, 0, 1)) + elif 'seg' in key: + data, data_translated = data[None, :, :], data_translated[ + None, :, :] + c, h, w = data.shape + if direction == 'horizontal': + data_pad = _pad( + h, abs(offset), c, fill_val, axis=0, dtype=data.dtype) + if offset <= 0: + # left shift + data_gt = np.concatenate((data[:, :, -offset:], data_pad), + axis=-1) + else: + # right shift + data_gt = np.concatenate((data_pad, data[:, :, :-offset]), + axis=-1) + else: + data_pad = _pad( + abs(offset), w, c, fill_val, axis=0, dtype=data.dtype) + if offset <= 0: + # top shift + data_gt = np.concatenate((data[:, -offset:, :], data_pad), + axis=1) + else: + # bottom shift + data_gt = np.concatenate((data_pad, data[:, :-offset, :]), + axis=1) + if 'mask' in key: + # TODO assertion here. ``data_translated`` must be a subset + # (or equal) of ``data_gt`` + pass + else: + assert np.equal(data_gt, data_translated).all() + + def check_translate(results, + results_translated, + offset, + img_fill_val, + seg_ignore_label, + direction, + min_size=0): + # check keys + _check_keys(results, results_translated) + # check image + _check_img_seg(results, results_translated, + results.get('img_fields', ['img']), offset, + img_fill_val, direction) + # check segmentation map + _check_img_seg(results, results_translated, + results.get('seg_fields', []), offset, seg_ignore_label, + direction) + # check masks and bboxes + _check_bbox_mask(results, results_translated, offset, direction, + min_size) + + # test case when level=0 (without translate aug) + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Translate', + level=0, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + results_wo_translate = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_wo_translate, + 0, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate horizontally (left shift). + transform = dict( + type='Translate', + level=8, + prob=1.0, + img_fill_val=img_fill_val, + random_negative_prob=1.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_translated, + -offset, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate horizontally (right shift). + translate_module.random_negative_prob = 0.0 + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_translated, + offset, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate vertically (top shift). + transform = dict( + type='Translate', + level=10, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + random_negative_prob=1.0, + direction='vertical') + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), results_translated, -offset, img_fill_val, + seg_ignore_label, 'vertical') + + # test case when level>0 and translate vertically (bottom shift). + translate_module.random_negative_prob = 0.0 + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), results_translated, offset, img_fill_val, + seg_ignore_label, 'vertical') + + # test case when no translation is called (prob<=0) + transform = dict( + type='Translate', + level=8, + prob=0.0, + img_fill_val=img_fill_val, + random_negative_prob=0.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + results_translated = translate_module(copy.deepcopy(results)) + + # test translate vertically with PolygonMasks (top shift) + results = construct_toy_data(False) + transform = dict( + type='Translate', + level=10, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='vertical') + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + translate_module.random_negative_prob = 1.0 + results_translated = translate_module(copy.deepcopy(results)) + + def _translated_gt(masks, direction, offset, out_shape): + translated_masks = [] + for poly_per_obj in masks: + translated_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if direction == 'horizontal': + p[0::2] = np.clip(p[0::2] + offset, 0, out_shape[1]) + elif direction == 'vertical': + p[1::2] = np.clip(p[1::2] + offset, 0, out_shape[0]) + if PolygonMasks([[p]], *out_shape).areas[0] > 0: + # filter invalid (area=0) + translated_poly_per_obj.append(p) + if len(translated_poly_per_obj): + translated_masks.append(translated_poly_per_obj) + translated_masks = PolygonMasks(translated_masks, *out_shape) + return translated_masks + + h, w = results['img_shape'][:2] + for key in results.get('mask_fields', []): + masks = results[key] + translated_gt = _translated_gt(masks, 'vertical', -offset, (h, w)) + assert np.equal(results_translated[key].to_ndarray(), + translated_gt.to_ndarray()).all() + + # test translate horizontally with PolygonMasks (right shift) + results = construct_toy_data(False) + transform = dict( + type='Translate', + level=8, + prob=1.0, + img_fill_val=img_fill_val, + random_negative_prob=0.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + h, w = results['img_shape'][:2] + for key in results.get('mask_fields', []): + masks = results[key] + translated_gt = _translated_gt(masks, 'horizontal', offset, (h, w)) + assert np.equal(results_translated[key].to_ndarray(), + translated_gt.to_ndarray()).all() + + # test AutoAugment equipped with Translate + policies = [[dict(type='Translate', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Translate', level=10, prob=1.), + dict( + type='Translate', + level=8, + img_fill_val=img_fill_val, + direction='vertical') + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/thirdparty/mmdetection/tests/test_data/test_utils.py b/thirdparty/mmdetection/tests/test_data/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2a8a23f787e4bc52c13d2ce67bfdcfae79f50c07 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_data/test_utils.py @@ -0,0 +1,61 @@ +import pytest + +from mmdet.datasets import replace_ImageToTensor + + +def test_replace_ImageToTensor(): + # with MultiScaleFlipAug + pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) + ] + expected_pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) + ] + with pytest.warns(UserWarning): + assert expected_pipelines == replace_ImageToTensor(pipelines) + + # without MultiScaleFlipAug + pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ] + expected_pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ] + with pytest.warns(UserWarning): + assert expected_pipelines == replace_ImageToTensor(pipelines) diff --git a/thirdparty/mmdetection/tests/test_eval_hook.py b/thirdparty/mmdetection/tests/test_eval_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..2231aa723258d7bc594d92bb8921121b433d5c26 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_eval_hook.py @@ -0,0 +1,263 @@ +import os.path as osp +import tempfile +import unittest.mock as mock +from collections import OrderedDict +from unittest.mock import MagicMock, patch + +import pytest +import torch +import torch.nn as nn +from mmcv.runner import EpochBasedRunner, build_optimizer +from mmcv.utils import get_logger +from torch.utils.data import DataLoader, Dataset + +from mmdet.core import DistEvalHook, EvalHook + + +class ExampleDataset(Dataset): + + def __init__(self): + self.index = 0 + self.eval_result = [0.1, 0.4, 0.3, 0.7, 0.2, 0.05, 0.4, 0.6] + + def __getitem__(self, idx): + results = dict(imgs=torch.tensor([1])) + return results + + def __len__(self): + return 1 + + @mock.create_autospec + def evaluate(self, results, logger=None): + pass + + +class EvalDataset(ExampleDataset): + + def evaluate(self, results, logger=None): + mean_ap = self.eval_result[self.index] + output = OrderedDict(mAP=mean_ap, index=self.index, score=mean_ap) + self.index += 1 + return output + + +class ExampleModel(nn.Module): + + def __init__(self): + super().__init__() + self.conv = nn.Linear(1, 1) + self.test_cfg = None + + def forward(self, imgs, rescale=False, return_loss=False): + return imgs + + def train_step(self, data_batch, optimizer, **kwargs): + outputs = { + 'loss': 0.5, + 'log_vars': { + 'accuracy': 0.98 + }, + 'num_samples': 1 + } + return outputs + + +@pytest.mark.skipif( + not torch.cuda.is_available(), reason='requires CUDA support') +@patch('mmdet.apis.single_gpu_test', MagicMock) +@patch('mmdet.apis.multi_gpu_test', MagicMock) +@pytest.mark.parametrize('EvalHookCls', (EvalHook, DistEvalHook)) +def test_eval_hook(EvalHookCls): + with pytest.raises(TypeError): + # dataloader must be a pytorch DataLoader + test_dataset = ExampleDataset() + data_loader = [ + DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_worker=0, + shuffle=False) + ] + EvalHookCls(data_loader) + + with pytest.raises(KeyError): + # rule must be in keys of rule_map + test_dataset = ExampleDataset() + data_loader = DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_workers=0, + shuffle=False) + EvalHookCls(data_loader, save_best='auto', rule='unsupport') + + with pytest.raises(ValueError): + # key_indicator must be valid when rule_map is None + test_dataset = ExampleDataset() + data_loader = DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_workers=0, + shuffle=False) + EvalHookCls(data_loader, save_best='unsupport') + + optimizer_cfg = dict( + type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) + + test_dataset = ExampleDataset() + loader = DataLoader(test_dataset, batch_size=1) + model = ExampleModel() + optimizer = build_optimizer(model, optimizer_cfg) + + data_loader = DataLoader(test_dataset, batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best=None) + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 1) + assert runner.meta is None or 'best_score' not in runner.meta[ + 'hook_msgs'] + assert runner.meta is None or 'best_ckpt' not in runner.meta[ + 'hook_msgs'] + + # when `save_best` is set to 'auto', first metric will be used. + loader = DataLoader(EvalDataset(), batch_size=1) + model = ExampleModel() + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, interval=1, save_best='auto') + + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'epoch_4.pth') + link_path = osp.join(tmpdir, 'best_mAP.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + loader = DataLoader(EvalDataset(), batch_size=1) + model = ExampleModel() + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, interval=1, save_best='mAP') + + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'epoch_4.pth') + link_path = osp.join(tmpdir, 'best_mAP.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls( + data_loader, interval=1, save_best='score', rule='greater') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'epoch_4.pth') + link_path = osp.join(tmpdir, 'best_score.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP', rule='less') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'epoch_6.pth') + link_path = osp.join(tmpdir, 'best_mAP.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.05 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 2) + + real_path = osp.join(tmpdir, 'epoch_2.pth') + link_path = osp.join(tmpdir, 'best_mAP.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.4 + + resume_from = osp.join(tmpdir, 'latest.pth') + loader = DataLoader(ExampleDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.resume(resume_from) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'epoch_4.pth') + link_path = osp.join(tmpdir, 'best_mAP.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert osp.exists(link_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 diff --git a/thirdparty/mmdetection/tests/test_fp16.py b/thirdparty/mmdetection/tests/test_fp16.py new file mode 100644 index 0000000000000000000000000000000000000000..afcfe260e553111bba922219bb8fdf7868fec0e2 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_fp16.py @@ -0,0 +1,300 @@ +import numpy as np +import pytest +import torch +import torch.nn as nn +from mmcv.runner import auto_fp16, force_fp32 +from mmcv.runner.fp16_utils import cast_tensor_type + + +def test_cast_tensor_type(): + inputs = torch.FloatTensor([5.]) + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, torch.Tensor) + assert outputs.dtype == dst_type + + inputs = 'tensor' + src_type = str + dst_type = str + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, str) + + inputs = np.array([5.]) + src_type = np.ndarray + dst_type = np.ndarray + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, np.ndarray) + + inputs = dict( + tensor_a=torch.FloatTensor([1.]), tensor_b=torch.FloatTensor([2.])) + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, dict) + assert outputs['tensor_a'].dtype == dst_type + assert outputs['tensor_b'].dtype == dst_type + + inputs = [torch.FloatTensor([1.]), torch.FloatTensor([2.])] + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, list) + assert outputs[0].dtype == dst_type + assert outputs[1].dtype == dst_type + + inputs = 5 + outputs = cast_tensor_type(inputs, None, None) + assert isinstance(outputs, int) + + +def test_auto_fp16(): + + with pytest.raises(TypeError): + # ExampleObject is not a subclass of nn.Module + + class ExampleObject(object): + + @auto_fp16() + def __call__(self, x): + return x + + model = ExampleObject() + input_x = torch.ones(1, dtype=torch.float32) + model(input_x) + + # apply to all input args + class ExampleModule(nn.Module): + + @auto_fp16() + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + # apply to specified input args + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', )) + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + + # apply to optional input args + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', 'y')) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + input_z = torch.ones(1, dtype=torch.float32) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.float32 + + # out_fp32=True + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', 'y'), out_fp32=True) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.float32) + input_z = torch.ones(1, dtype=torch.float32) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + +def test_force_fp32(): + + with pytest.raises(TypeError): + # ExampleObject is not a subclass of nn.Module + + class ExampleObject(object): + + @force_fp32() + def __call__(self, x): + return x + + model = ExampleObject() + input_x = torch.ones(1, dtype=torch.float32) + model(input_x) + + # apply to all input args + class ExampleModule(nn.Module): + + @force_fp32() + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + # apply to specified input args + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', )) + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + + # apply to optional input args + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', 'y')) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + input_z = torch.ones(1, dtype=torch.half) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.half + + # out_fp16=True + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', 'y'), out_fp16=True) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.half) + input_z = torch.ones(1, dtype=torch.half) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half diff --git a/thirdparty/mmdetection/tests/test_iou2d_calculator.py b/thirdparty/mmdetection/tests/test_iou2d_calculator.py new file mode 100644 index 0000000000000000000000000000000000000000..94c6400f3360d9cd8df172f74a37f729324ec163 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_iou2d_calculator.py @@ -0,0 +1,105 @@ +import numpy as np +import pytest +import torch + +from mmdet.core import BboxOverlaps2D, bbox_overlaps + + +def test_bbox_overlaps_2d(eps=1e-7): + + def _construct_bbox(num_bbox=None): + img_h = int(np.random.randint(3, 1000)) + img_w = int(np.random.randint(3, 1000)) + if num_bbox is None: + num_bbox = np.random.randint(1, 10) + x1y1 = torch.rand((num_bbox, 2)) + x2y2 = torch.max(torch.rand((num_bbox, 2)), x1y1) + bboxes = torch.cat((x1y1, x2y2), -1) + bboxes[:, 0::2] *= img_w + bboxes[:, 1::2] *= img_h + return bboxes, num_bbox + + # is_aligned is True, bboxes.size(-1) == 5 (include score) + self = BboxOverlaps2D() + bboxes1, num_bbox = _construct_bbox() + bboxes2, _ = _construct_bbox(num_bbox) + bboxes1 = torch.cat((bboxes1, torch.rand((num_bbox, 1))), 1) + bboxes2 = torch.cat((bboxes2, torch.rand((num_bbox, 1))), 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert gious.size() == (num_bbox, ), gious.size() + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # is_aligned is True, bboxes1.size(-2) == 0 + bboxes1 = torch.empty((0, 4)) + bboxes2 = torch.empty((0, 4)) + gious = self(bboxes1, bboxes2, 'giou', True) + assert gious.size() == (0, ), gious.size() + assert torch.all(gious == torch.empty((0, ))) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # is_aligned is True, and bboxes.ndims > 2 + bboxes1, num_bbox = _construct_bbox() + bboxes2, _ = _construct_bbox(num_bbox) + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1) + # test assertion when batch dim is not the same + with pytest.raises(AssertionError): + self(bboxes1, bboxes2.unsqueeze(0).repeat(3, 1, 1), 'giou', True) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, num_bbox) + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1, 1) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1, 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, 2, num_bbox) + + # is_aligned is False + bboxes1, num_bbox1 = _construct_bbox() + bboxes2, num_bbox2 = _construct_bbox() + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (num_bbox1, num_bbox2) + + # is_aligned is False, and bboxes.ndims > 2 + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1) + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, num_bbox1, num_bbox2) + bboxes1 = bboxes1.unsqueeze(0) + bboxes2 = bboxes2.unsqueeze(0) + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (1, 2, num_bbox1, num_bbox2) + + # is_aligned is False, bboxes1.size(-2) == 0 + gious = self(torch.empty(1, 2, 0, 4), bboxes2, 'giou') + assert torch.all(gious == torch.empty(1, 2, 0, bboxes2.size(-2))) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # test allclose between bbox_overlaps and the original official + # implementation. + bboxes1 = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [32, 32, 38, 42], + ]) + bboxes2 = torch.FloatTensor([ + [0, 0, 10, 20], + [0, 10, 10, 19], + [10, 10, 20, 20], + ]) + gious = bbox_overlaps(bboxes1, bboxes2, 'giou', is_aligned=True, eps=eps) + gious = gious.numpy().round(4) + # the gt is got with four decimal precision. + expected_gious = np.array([0.5000, -0.0500, -0.8214]) + assert np.allclose(gious, expected_gious, rtol=0, atol=eps) + + # test mode 'iof' + ious = bbox_overlaps(bboxes1, bboxes2, 'iof', is_aligned=True, eps=eps) + assert torch.all(ious >= -1) and torch.all(ious <= 1) + assert ious.size() == (bboxes1.size(0), ) + ious = bbox_overlaps(bboxes1, bboxes2, 'iof', eps=eps) + assert torch.all(ious >= -1) and torch.all(ious <= 1) + assert ious.size() == (bboxes1.size(0), bboxes2.size(0)) diff --git a/thirdparty/mmdetection/tests/test_masks.py b/thirdparty/mmdetection/tests/test_masks.py new file mode 100644 index 0000000000000000000000000000000000000000..acc4f6fe778ca7d6c0d5022d65cd79cba7e53a68 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_masks.py @@ -0,0 +1,630 @@ +import numpy as np +import pytest +import torch + +from mmdet.core import BitmapMasks, PolygonMasks + + +def dummy_raw_bitmap_masks(size): + """ + Args: + size (tuple): expected shape of dummy masks, (H, W) or (N, H, W) + + Return: + ndarray: dummy mask + """ + return np.random.randint(0, 2, size, dtype=np.uint8) + + +def dummy_raw_polygon_masks(size): + """ + Args: + size (tuple): expected shape of dummy masks, (N, H, W) + + Return: + list[list[ndarray]]: dummy mask + """ + num_obj, heigt, width = size + polygons = [] + for _ in range(num_obj): + num_points = np.random.randint(5) * 2 + 6 + polygons.append([np.random.uniform(0, min(heigt, width), num_points)]) + return polygons + + +def dummy_bboxes(num, max_height, max_width): + x1y1 = np.random.randint(0, min(max_height // 2, max_width // 2), (num, 2)) + wh = np.random.randint(0, min(max_height // 2, max_width // 2), (num, 2)) + x2y2 = x1y1 + wh + return np.concatenate([x1y1, x2y2], axis=1).squeeze().astype(np.float32) + + +def test_bitmap_mask_init(): + # init with empty ndarray masks + raw_masks = np.empty((0, 28, 28), dtype=np.uint8) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 0 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with empty list masks + raw_masks = [] + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 0 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with ndarray masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 3 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with list masks contain 3 instances + raw_masks = [dummy_raw_bitmap_masks((28, 28)) for _ in range(3)] + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 3 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with raw masks of unsupported type + with pytest.raises(AssertionError): + raw_masks = [[dummy_raw_bitmap_masks((28, 28))]] + BitmapMasks(raw_masks, 28, 28) + + +def test_bitmap_mask_rescale(): + # rescale with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + rescaled_masks = bitmap_masks.rescale((56, 72)) + assert len(rescaled_masks) == 0 + assert rescaled_masks.height == 56 + assert rescaled_masks.width == 56 + + # rescale with bitmap masks contain 1 instances + raw_masks = np.array([[[1, 0, 0, 0], [0, 1, 0, 1]]]) + bitmap_masks = BitmapMasks(raw_masks, 2, 4) + rescaled_masks = bitmap_masks.rescale((8, 8)) + assert len(rescaled_masks) == 1 + assert rescaled_masks.height == 4 + assert rescaled_masks.width == 8 + truth = np.array([[[1, 1, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 0, 0, 1, 1], [0, 0, 1, 1, 0, 0, 1, 1]]]) + assert (rescaled_masks.masks == truth).all() + + +def test_bitmap_mask_resize(): + # resize with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + resized_masks = bitmap_masks.resize((56, 72)) + assert len(resized_masks) == 0 + assert resized_masks.height == 56 + assert resized_masks.width == 72 + + # resize with bitmap masks contain 1 instances + raw_masks = np.diag(np.ones(4, dtype=np.uint8))[np.newaxis, ...] + bitmap_masks = BitmapMasks(raw_masks, 4, 4) + resized_masks = bitmap_masks.resize((8, 8)) + assert len(resized_masks) == 1 + assert resized_masks.height == 8 + assert resized_masks.width == 8 + truth = np.array([[[1, 1, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 0, 1, 1]]]) + assert (resized_masks.masks == truth).all() + + +def test_bitmap_mask_flip(): + # flip with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 0 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + + # horizontally flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='horizontal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='horizontal') + assert flipped_masks.masks.shape == (3, 28, 28) + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, :, ::-1]).all() + + # vertically flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='vertical') + flipped_flipped_masks = flipped_masks.flip(flip_direction='vertical') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, ::-1, :]).all() + + # diagonal flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='diagonal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='diagonal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, ::-1, ::-1]).all() + + +def test_bitmap_mask_pad(): + # pad with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + padded_masks = bitmap_masks.pad((56, 56)) + assert len(padded_masks) == 0 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + + # pad with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + padded_masks = bitmap_masks.pad((56, 56)) + assert len(padded_masks) == 3 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert (padded_masks.masks[:, 28:, 28:] == 0).all() + + +def test_bitmap_mask_crop(): + # crop with empty bitmap masks + dummy_bbox = np.array([0, 10, 10, 27], dtype=np.int) + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_masks = bitmap_masks.crop(dummy_bbox) + assert len(cropped_masks) == 0 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + + # crop with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_masks = bitmap_masks.crop(dummy_bbox) + assert len(cropped_masks) == 3 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + x1, y1, x2, y2 = dummy_bbox + assert (cropped_masks.masks == raw_masks[:, y1:y2, x1:x2]).all() + + # crop with invalid bbox + with pytest.raises(AssertionError): + dummy_bbox = dummy_bboxes(2, 28, 28) + bitmap_masks.crop(dummy_bbox) + + +def test_bitmap_mask_crop_and_resize(): + dummy_bbox = dummy_bboxes(5, 28, 28) + inds = np.random.randint(0, 3, (5, )) + + # crop and resize with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_resized_masks = bitmap_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 0 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + + # crop and resize with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_resized_masks = bitmap_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 5 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + + +def test_bitmap_mask_expand(): + # expand with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + expanded_masks = bitmap_masks.expand(56, 56, 12, 14) + assert len(expanded_masks) == 0 + assert expanded_masks.height == 56 + assert expanded_masks.width == 56 + + # expand with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + expanded_masks = bitmap_masks.expand(56, 56, 12, 14) + assert len(expanded_masks) == 3 + assert expanded_masks.height == 56 + assert expanded_masks.width == 56 + assert (expanded_masks.masks[:, :12, :14] == 0).all() + assert (expanded_masks.masks[:, 12 + 28:, 14 + 28:] == 0).all() + + +def test_bitmap_mask_area(): + # area of empty bitmap mask + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert bitmap_masks.areas.sum() == 0 + + # area of bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + areas = bitmap_masks.areas + assert len(areas) == 3 + assert (areas == raw_masks.sum((1, 2))).all() + + +def test_bitmap_mask_to_ndarray(): + # empty bitmap masks to ndarray + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + ndarray_masks = bitmap_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (0, 28, 28) + + # bitmap masks contain 3 instances to ndarray + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + ndarray_masks = bitmap_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (3, 28, 28) + assert (ndarray_masks == raw_masks).all() + + +def test_bitmap_mask_to_tensor(): + # empty bitmap masks to tensor + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + tensor_masks = bitmap_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (0, 28, 28) + + # bitmap masks contain 3 instances to tensor + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + tensor_masks = bitmap_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (3, 28, 28) + assert (tensor_masks.numpy() == raw_masks).all() + + +def test_bitmap_mask_index(): + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert (bitmap_masks[0].masks == raw_masks[0]).all() + assert (bitmap_masks[range(2)].masks == raw_masks[range(2)]).all() + + +def test_bitmap_mask_iter(): + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + for i, bitmap_mask in enumerate(bitmap_masks): + assert bitmap_mask.shape == (28, 28) + assert (bitmap_mask == raw_masks[i]).all() + + +def test_polygon_mask_init(): + # init with empty masks + raw_masks = [] + polygon_masks = BitmapMasks(raw_masks, 28, 28) + assert len(polygon_masks) == 0 + assert polygon_masks.height == 28 + assert polygon_masks.width == 28 + + # init with masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + assert isinstance(polygon_masks.masks, list) + assert isinstance(polygon_masks.masks[0], list) + assert isinstance(polygon_masks.masks[0][0], np.ndarray) + assert len(polygon_masks) == 3 + assert polygon_masks.height == 28 + assert polygon_masks.width == 28 + assert polygon_masks.to_ndarray().shape == (3, 28, 28) + + # init with raw masks of unsupported type + with pytest.raises(AssertionError): + raw_masks = [[[]]] + PolygonMasks(raw_masks, 28, 28) + + raw_masks = [dummy_raw_polygon_masks((3, 28, 28))] + PolygonMasks(raw_masks, 28, 28) + + +def test_polygon_mask_rescale(): + # rescale with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + rescaled_masks = polygon_masks.rescale((56, 72)) + assert len(rescaled_masks) == 0 + assert rescaled_masks.height == 56 + assert rescaled_masks.width == 56 + assert rescaled_masks.to_ndarray().shape == (0, 56, 56) + + # rescale with polygon masks contain 3 instances + raw_masks = [[np.array([1, 1, 3, 1, 4, 3, 2, 4, 1, 3], dtype=np.float)]] + polygon_masks = PolygonMasks(raw_masks, 5, 5) + rescaled_masks = polygon_masks.rescale((12, 10)) + assert len(rescaled_masks) == 1 + assert rescaled_masks.height == 10 + assert rescaled_masks.width == 10 + assert rescaled_masks.to_ndarray().shape == (1, 10, 10) + truth = np.array( + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 1, 1, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], + np.uint8) + assert (rescaled_masks.to_ndarray() == truth).all() + + +def test_polygon_mask_resize(): + # resize with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + resized_masks = polygon_masks.resize((56, 72)) + assert len(resized_masks) == 0 + assert resized_masks.height == 56 + assert resized_masks.width == 72 + assert resized_masks.to_ndarray().shape == (0, 56, 72) + + # resize with polygon masks contain 1 instance 1 part + raw_masks1 = [[np.array([1, 1, 3, 1, 4, 3, 2, 4, 1, 3], dtype=np.float)]] + polygon_masks1 = PolygonMasks(raw_masks1, 5, 5) + resized_masks1 = polygon_masks1.resize((10, 10)) + assert len(resized_masks1) == 1 + assert resized_masks1.height == 10 + assert resized_masks1.width == 10 + assert resized_masks1.to_ndarray().shape == (1, 10, 10) + truth1 = np.array( + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 1, 1, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], + np.uint8) + assert (resized_masks1.to_ndarray() == truth1).all() + + # resize with polygon masks contain 1 instance 2 part + raw_masks2 = [[ + np.array([0., 0., 1., 0., 1., 1.]), + np.array([1., 1., 2., 1., 2., 2., 1., 2.]) + ]] + polygon_masks2 = PolygonMasks(raw_masks2, 3, 3) + resized_masks2 = polygon_masks2.resize((6, 6)) + assert len(resized_masks2) == 1 + assert resized_masks2.height == 6 + assert resized_masks2.width == 6 + assert resized_masks2.to_ndarray().shape == (1, 6, 6) + truth2 = np.array( + [[0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0], + [0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]], np.uint8) + assert (resized_masks2.to_ndarray() == truth2).all() + + # resize with polygon masks contain 2 instances + raw_masks3 = [raw_masks1[0], raw_masks2[0]] + polygon_masks3 = PolygonMasks(raw_masks3, 5, 5) + resized_masks3 = polygon_masks3.resize((10, 10)) + assert len(resized_masks3) == 2 + assert resized_masks3.height == 10 + assert resized_masks3.width == 10 + assert resized_masks3.to_ndarray().shape == (2, 10, 10) + truth3 = np.stack([truth1, np.pad(truth2, ((0, 4), (0, 4)), 'constant')]) + assert (resized_masks3.to_ndarray() == truth3).all() + + +def test_polygon_mask_flip(): + # flip with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 0 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (0, 28, 28) + + # TODO: fixed flip correctness checking after v2.0_coord is merged + # horizontally flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='horizontal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + # vertically flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='vertical') + flipped_flipped_masks = flipped_masks.flip(flip_direction='vertical') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + # diagonal flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='diagonal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='diagonal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + +def test_polygon_mask_crop(): + dummy_bbox = np.array([0, 10, 10, 27], dtype=np.int) + # crop with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_masks = polygon_masks.crop(dummy_bbox) + assert len(cropped_masks) == 0 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + assert cropped_masks.to_ndarray().shape == (0, 17, 10) + + # crop with polygon masks contain 1 instances + raw_masks = [[np.array([1., 3., 5., 1., 5., 6., 1, 6])]] + polygon_masks = PolygonMasks(raw_masks, 7, 7) + bbox = np.array([0, 0, 3, 4]) + cropped_masks = polygon_masks.crop(bbox) + assert len(cropped_masks) == 1 + assert cropped_masks.height == 4 + assert cropped_masks.width == 3 + assert cropped_masks.to_ndarray().shape == (1, 4, 3) + truth = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 1], [0, 1, 1]]) + assert (cropped_masks.to_ndarray() == truth).all() + + # crop with invalid bbox + with pytest.raises(AssertionError): + dummy_bbox = dummy_bboxes(2, 28, 28) + polygon_masks.crop(dummy_bbox) + + +def test_polygon_mask_pad(): + # pad with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + padded_masks = polygon_masks.pad((56, 56)) + assert len(padded_masks) == 0 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert padded_masks.to_ndarray().shape == (0, 56, 56) + + # pad with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + padded_masks = polygon_masks.pad((56, 56)) + assert len(padded_masks) == 3 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert padded_masks.to_ndarray().shape == (3, 56, 56) + assert (padded_masks.to_ndarray()[:, 28:, 28:] == 0).all() + + +def test_polygon_mask_expand(): + with pytest.raises(NotImplementedError): + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + polygon_masks.expand(56, 56, 10, 17) + + +def test_polygon_mask_crop_and_resize(): + dummy_bbox = dummy_bboxes(5, 28, 28) + inds = np.random.randint(0, 3, (5, )) + + # crop and resize with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_resized_masks = polygon_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 0 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + assert cropped_resized_masks.to_ndarray().shape == (0, 56, 56) + + # crop and resize with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_resized_masks = polygon_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 5 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + assert cropped_resized_masks.to_ndarray().shape == (5, 56, 56) + + +def test_polygon_mask_area(): + # area of empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + assert polygon_masks.areas.sum() == 0 + + # area of polygon masks contain 1 instance + # here we hack a case that the gap between the area of bitmap and polygon + # is minor + raw_masks = [[np.array([1, 1, 5, 1, 3, 4])]] + polygon_masks = PolygonMasks(raw_masks, 6, 6) + polygon_area = polygon_masks.areas + bitmap_area = polygon_masks.to_bitmap().areas + assert len(polygon_area) == 1 + assert np.isclose(polygon_area, bitmap_area).all() + + +def test_polygon_mask_to_bitmap(): + # polygon masks contain 3 instances to bitmap + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + bitmap_masks = polygon_masks.to_bitmap() + assert (polygon_masks.to_ndarray() == bitmap_masks.to_ndarray()).all() + + +def test_polygon_mask_to_ndarray(): + # empty polygon masks to ndarray + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + ndarray_masks = polygon_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (0, 28, 28) + + # polygon masks contain 3 instances to ndarray + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + ndarray_masks = polygon_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (3, 28, 28) + + +def test_polygon_to_tensor(): + # empty polygon masks to tensor + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + tensor_masks = polygon_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (0, 28, 28) + + # polygon masks contain 3 instances to tensor + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + tensor_masks = polygon_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (3, 28, 28) + assert (tensor_masks.numpy() == polygon_masks.to_ndarray()).all() + + +def test_polygon_mask_index(): + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + # index by integer + polygon_masks[0] + # index by list + polygon_masks[[0, 1]] + # index by ndarray + polygon_masks[np.asarray([0, 1])] + with pytest.raises(ValueError): + # invalid index + polygon_masks[torch.Tensor([1, 2])] + + +def test_polygon_mask_iter(): + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + for i, polygon_mask in enumerate(polygon_masks): + assert np.equal(polygon_mask, raw_masks[i]).all() diff --git a/thirdparty/mmdetection/tests/test_models/test_backbones.py b/thirdparty/mmdetection/tests/test_models/test_backbones.py new file mode 100644 index 0000000000000000000000000000000000000000..eb7071f86be1070aada2f874d84224118ca31267 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_backbones.py @@ -0,0 +1,896 @@ +import pytest +import torch +from mmcv.ops import DeformConv2dPack +from torch.nn.modules import AvgPool2d, GroupNorm +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.backbones import (RegNet, Res2Net, ResNeSt, ResNet, + ResNetV1d, ResNeXt) +from mmdet.models.backbones.hourglass import HourglassNet +from mmdet.models.backbones.res2net import Bottle2neck +from mmdet.models.backbones.resnest import Bottleneck as BottleneckS +from mmdet.models.backbones.resnet import BasicBlock, Bottleneck +from mmdet.models.backbones.resnext import Bottleneck as BottleneckX +from mmdet.models.utils import ResLayer + + +def is_block(modules): + """Check if is ResNet building block.""" + if isinstance(modules, (BasicBlock, Bottleneck, BottleneckX, Bottle2neck)): + return True + return False + + +def is_norm(modules): + """Check if is one of the norms.""" + if isinstance(modules, (GroupNorm, _BatchNorm)): + return True + return False + + +def all_zeros(modules): + """Check if the weight(and bias) is all zero.""" + weight_zero = torch.allclose(modules.weight.data, + torch.zeros_like(modules.weight.data)) + if hasattr(modules, 'bias'): + bias_zero = torch.allclose(modules.bias.data, + torch.zeros_like(modules.bias.data)) + else: + bias_zero = True + + return weight_zero and bias_zero + + +def check_norm_state(modules, train_state): + """Check if norm layer is in correct train state.""" + for mod in modules: + if isinstance(mod, _BatchNorm): + if mod.training != train_state: + return False + return True + + +def test_resnet_basic_block(): + + with pytest.raises(AssertionError): + # Not implemented yet. + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + BasicBlock(64, 64, dcn=dcn) + + with pytest.raises(AssertionError): + # Not implemented yet. + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + BasicBlock(64, 64, plugins=plugins) + + with pytest.raises(AssertionError): + # Not implemented yet + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + BasicBlock(64, 64, plugins=plugins) + + # test BasicBlock structure and forward + block = BasicBlock(64, 64) + assert block.conv1.in_channels == 64 + assert block.conv1.out_channels == 64 + assert block.conv1.kernel_size == (3, 3) + assert block.conv2.in_channels == 64 + assert block.conv2.out_channels == 64 + assert block.conv2.kernel_size == (3, 3) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test BasicBlock with checkpoint forward + block = BasicBlock(64, 64, with_cp=True) + assert block.with_cp + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnet_bottleneck(): + + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + Bottleneck(64, 64, style='tensorflow') + + with pytest.raises(AssertionError): + # Allowed positions are 'after_conv1', 'after_conv2', 'after_conv3' + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv4') + ] + Bottleneck(64, 16, plugins=plugins) + + with pytest.raises(AssertionError): + # Need to specify different postfix to avoid duplicate plugin name + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + Bottleneck(64, 16, plugins=plugins) + + with pytest.raises(KeyError): + # Plugin type is not supported + plugins = [dict(cfg=dict(type='WrongPlugin'), position='after_conv3')] + Bottleneck(64, 16, plugins=plugins) + + # Test Bottleneck with checkpoint forward + block = Bottleneck(64, 16, with_cp=True) + assert block.with_cp + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck style + block = Bottleneck(64, 64, stride=2, style='pytorch') + assert block.conv1.stride == (1, 1) + assert block.conv2.stride == (2, 2) + block = Bottleneck(64, 64, stride=2, style='caffe') + assert block.conv1.stride == (2, 2) + assert block.conv2.stride == (1, 1) + + # Test Bottleneck DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + Bottleneck(64, 64, dcn=dcn, conv_cfg=dict(type='Conv')) + block = Bottleneck(64, 64, dcn=dcn) + assert isinstance(block.conv2, DeformConv2dPack) + + # Test Bottleneck forward + block = Bottleneck(64, 16) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2, 1 NonLocal2D + # after conv2, 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2'), + dict(cfg=dict(type='NonLocal2d'), position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + assert block.nonlocal_block.in_channels == 16 + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv2, 2 ContextBlock after + # conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=1), + position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=2), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=3), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.context_block1.in_channels == 16 + assert block.context_block2.in_channels == 64 + assert block.context_block3.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnet_res_layer(): + # Test ResLayer of 3 Bottleneck w\o downsample + layer = ResLayer(Bottleneck, 64, 16, 3) + assert len(layer) == 3 + assert layer[0].conv1.in_channels == 64 + assert layer[0].conv1.out_channels == 16 + for i in range(1, len(layer)): + assert layer[i].conv1.in_channels == 64 + assert layer[i].conv1.out_channels == 16 + for i in range(len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test ResLayer of 3 Bottleneck with downsample + layer = ResLayer(Bottleneck, 64, 64, 3) + assert layer[0].downsample[0].out_channels == 256 + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 56, 56]) + + # Test ResLayer of 3 Bottleneck with stride=2 + layer = ResLayer(Bottleneck, 64, 64, 3, stride=2) + assert layer[0].downsample[0].out_channels == 256 + assert layer[0].downsample[0].stride == (2, 2) + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 28, 28]) + + # Test ResLayer of 3 Bottleneck with stride=2 and average downsample + layer = ResLayer(Bottleneck, 64, 64, 3, stride=2, avg_down=True) + assert isinstance(layer[0].downsample[0], AvgPool2d) + assert layer[0].downsample[1].out_channels == 256 + assert layer[0].downsample[1].stride == (1, 1) + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 28, 28]) + + # Test ResLayer of 3 BasicBlock with stride=2 and downsample_first=False + layer = ResLayer(BasicBlock, 64, 64, 3, stride=2, downsample_first=False) + assert layer[2].downsample[0].out_channels == 64 + assert layer[2].downsample[0].stride == (2, 2) + for i in range(len(layer) - 1): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 64, 28, 28]) + + +def test_resnest_stem(): + # Test default stem_channels + model = ResNet(50) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + + # Test default stem_channels, with base_channels=32 + model = ResNet(50, base_channels=32) + assert model.stem_channels == 32 + assert model.conv1.out_channels == 32 + assert model.norm1.num_features == 32 + assert model.layer1[0].conv1.in_channels == 32 + + # Test stem_channels=64 + model = ResNet(50, stem_channels=64) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + assert model.layer1[0].conv1.in_channels == 64 + + # Test stem_channels=64, with base_channels=32 + model = ResNet(50, stem_channels=64, base_channels=32) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + assert model.layer1[0].conv1.in_channels == 64 + + # Test stem_channels=128 + model = ResNet(depth=50, stem_channels=128) + model.init_weights() + model.train() + assert model.conv1.out_channels == 128 + assert model.layer1[0].conv1.in_channels == 128 + + # Test V1d stem_channels + model = ResNetV1d(depth=50, stem_channels=128) + model.init_weights() + model.train() + assert model.stem[0].out_channels == 64 + assert model.stem[1].num_features == 64 + assert model.stem[3].out_channels == 64 + assert model.stem[4].num_features == 64 + assert model.stem[6].out_channels == 128 + assert model.stem[7].num_features == 128 + assert model.layer1[0].conv1.in_channels == 128 + + +def test_resnet_backbone(): + """Test resnet backbone.""" + with pytest.raises(KeyError): + # ResNet depth should be in [18, 34, 50, 101, 152] + ResNet(20) + + with pytest.raises(AssertionError): + # In ResNet: 1 <= num_stages <= 4 + ResNet(50, num_stages=0) + + with pytest.raises(AssertionError): + # len(stage_with_dcn) == num_stages + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + ResNet(50, dcn=dcn, stage_with_dcn=(True, )) + + with pytest.raises(AssertionError): + # len(stage_with_plugin) == num_stages + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True), + position='after_conv3') + ] + ResNet(50, plugins=plugins) + + with pytest.raises(AssertionError): + # In ResNet: 1 <= num_stages <= 4 + ResNet(50, num_stages=5) + + with pytest.raises(AssertionError): + # len(strides) == len(dilations) == num_stages + ResNet(50, strides=(1, ), dilations=(1, 1), num_stages=3) + + with pytest.raises(TypeError): + # pretrained must be a string path + model = ResNet(50) + model.init_weights(pretrained=0) + + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + ResNet(50, style='tensorflow') + + # Test ResNet50 norm_eval=True + model = ResNet(50, norm_eval=True) + model.init_weights() + model.train() + assert check_norm_state(model.modules(), False) + + # Test ResNet50 with torchvision pretrained weight + model = ResNet(depth=50, norm_eval=True) + model.init_weights('torchvision://resnet50') + model.train() + assert check_norm_state(model.modules(), False) + + # Test ResNet50 with first stage frozen + frozen_stages = 1 + model = ResNet(50, frozen_stages=frozen_stages) + model.init_weights() + model.train() + assert model.norm1.training is False + for layer in [model.conv1, model.norm1]: + for param in layer.parameters(): + assert param.requires_grad is False + for i in range(1, frozen_stages + 1): + layer = getattr(model, f'layer{i}') + for mod in layer.modules(): + if isinstance(mod, _BatchNorm): + assert mod.training is False + for param in layer.parameters(): + assert param.requires_grad is False + + # Test ResNet50V1d with first stage frozen + model = ResNetV1d(depth=50, frozen_stages=frozen_stages) + assert len(model.stem) == 9 + model.init_weights() + model.train() + check_norm_state(model.stem, False) + for param in model.stem.parameters(): + assert param.requires_grad is False + for i in range(1, frozen_stages + 1): + layer = getattr(model, f'layer{i}') + for mod in layer.modules(): + if isinstance(mod, _BatchNorm): + assert mod.training is False + for param in layer.parameters(): + assert param.requires_grad is False + + # Test ResNet18 forward + model = ResNet(18) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 64, 56, 56]) + assert feat[1].shape == torch.Size([1, 128, 28, 28]) + assert feat[2].shape == torch.Size([1, 256, 14, 14]) + assert feat[3].shape == torch.Size([1, 512, 7, 7]) + + # Test ResNet18 with checkpoint forward + model = ResNet(18, with_cp=True) + for m in model.modules(): + if is_block(m): + assert m.with_cp + + # Test ResNet50 with BatchNorm forward + model = ResNet(50) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, _BatchNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with layers 1, 2, 3 out forward + model = ResNet(50, out_indices=(0, 1, 2)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 3 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + + # Test ResNet50 with checkpoint forward + model = ResNet(50, with_cp=True) + for m in model.modules(): + if is_block(m): + assert m.with_cp + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with GroupNorm forward + model = ResNet( + 50, norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, GroupNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with 1 GeneralizedAttention after conv2, 1 NonLocal2D + # after conv2, 1 ContextBlock after conv3 in layers 2, 3, 4 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, True, True, True), + position='after_conv2'), + dict(cfg=dict(type='NonLocal2d'), position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, False), + position='after_conv3') + ] + model = ResNet(50, plugins=plugins) + for m in model.layer1.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'gen_attention_block') + assert m.nonlocal_block.in_channels == 64 + for m in model.layer2.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 128 + assert m.gen_attention_block.in_channels == 128 + assert m.context_block.in_channels == 512 + + for m in model.layer3.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 256 + assert m.gen_attention_block.in_channels == 256 + assert m.context_block.in_channels == 1024 + + for m in model.layer4.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 512 + assert m.gen_attention_block.in_channels == 512 + assert not hasattr(m, 'context_block') + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with 1 ContextBlock after conv2, 1 ContextBlock after + # conv3 in layers 2, 3, 4 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=1), + stages=(False, True, True, False), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=2), + stages=(False, True, True, False), + position='after_conv3') + ] + + model = ResNet(50, plugins=plugins) + for m in model.layer1.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'context_block1') + assert not hasattr(m, 'context_block2') + for m in model.layer2.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert m.context_block1.in_channels == 512 + assert m.context_block2.in_channels == 512 + + for m in model.layer3.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert m.context_block1.in_channels == 1024 + assert m.context_block2.in_channels == 1024 + + for m in model.layer4.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'context_block1') + assert not hasattr(m, 'context_block2') + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 zero initialization of residual + model = ResNet(50, zero_init_residual=True) + model.init_weights() + for m in model.modules(): + if isinstance(m, Bottleneck): + assert all_zeros(m.norm3) + elif isinstance(m, BasicBlock): + assert all_zeros(m.norm2) + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNetV1d forward + model = ResNetV1d(depth=50) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + +def test_renext_bottleneck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + BottleneckX(64, 64, groups=32, base_width=4, style='tensorflow') + + # Test ResNeXt Bottleneck structure + block = BottleneckX( + 64, 64, groups=32, base_width=4, stride=2, style='pytorch') + assert block.conv2.stride == (2, 2) + assert block.conv2.groups == 32 + assert block.conv2.out_channels == 128 + + # Test ResNeXt Bottleneck with DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + # conv_cfg must be None if dcn is not None + BottleneckX( + 64, + 64, + groups=32, + base_width=4, + dcn=dcn, + conv_cfg=dict(type='Conv')) + BottleneckX(64, 64, dcn=dcn) + + # Test ResNeXt Bottleneck forward + block = BottleneckX(64, 16, groups=32, base_width=4) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnext_backbone(): + with pytest.raises(KeyError): + # ResNeXt depth should be in [50, 101, 152] + ResNeXt(depth=18) + + # Test ResNeXt with group 32, base_width 4 + model = ResNeXt(depth=50, groups=32, base_width=4) + for m in model.modules(): + if is_block(m): + assert m.conv2.groups == 32 + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + +regnet_test_data = [ + ('regnetx_400mf', + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, + bot_mul=1.0), [32, 64, 160, 384]), + ('regnetx_800mf', + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, + bot_mul=1.0), [64, 128, 288, 672]), + ('regnetx_1.6gf', + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, + bot_mul=1.0), [72, 168, 408, 912]), + ('regnetx_3.2gf', + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, + bot_mul=1.0), [96, 192, 432, 1008]), + ('regnetx_4.0gf', + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, + bot_mul=1.0), [80, 240, 560, 1360]), + ('regnetx_6.4gf', + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, + bot_mul=1.0), [168, 392, 784, 1624]), + ('regnetx_8.0gf', + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, + bot_mul=1.0), [80, 240, 720, 1920]), + ('regnetx_12gf', + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, + bot_mul=1.0), [224, 448, 896, 2240]), +] + + +@pytest.mark.parametrize('arch_name,arch,out_channels', regnet_test_data) +def test_regnet_backbone(arch_name, arch, out_channels): + with pytest.raises(AssertionError): + # ResNeXt depth should be in [50, 101, 152] + RegNet(arch_name + '233') + + # Test RegNet with arch_name + model = RegNet(arch_name) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, out_channels[0], 56, 56]) + assert feat[1].shape == torch.Size([1, out_channels[1], 28, 28]) + assert feat[2].shape == torch.Size([1, out_channels[2], 14, 14]) + assert feat[3].shape == torch.Size([1, out_channels[3], 7, 7]) + + # Test RegNet with arch + model = RegNet(arch) + assert feat[0].shape == torch.Size([1, out_channels[0], 56, 56]) + assert feat[1].shape == torch.Size([1, out_channels[1], 28, 28]) + assert feat[2].shape == torch.Size([1, out_channels[2], 14, 14]) + assert feat[3].shape == torch.Size([1, out_channels[3], 7, 7]) + + +def test_res2net_bottle2neck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + Bottle2neck(64, 64, base_width=26, scales=4, style='tensorflow') + + with pytest.raises(AssertionError): + # Scale must be larger than 1 + Bottle2neck(64, 64, base_width=26, scales=1, style='pytorch') + + # Test Res2Net Bottle2neck structure + block = Bottle2neck( + 64, 64, base_width=26, stride=2, scales=4, style='pytorch') + assert block.scales == 4 + + # Test Res2Net Bottle2neck with DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + # conv_cfg must be None if dcn is not None + Bottle2neck( + 64, + 64, + base_width=26, + scales=4, + dcn=dcn, + conv_cfg=dict(type='Conv')) + Bottle2neck(64, 64, dcn=dcn) + + # Test Res2Net Bottle2neck forward + block = Bottle2neck(64, 16, base_width=26, scales=4) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_res2net_backbone(): + with pytest.raises(KeyError): + # Res2Net depth should be in [50, 101, 152] + Res2Net(depth=18) + + # Test Res2Net with scales 4, base_width 26 + model = Res2Net(depth=50, scales=4, base_width=26) + for m in model.modules(): + if is_block(m): + assert m.scales == 4 + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + +def test_hourglass_backbone(): + with pytest.raises(AssertionError): + # HourglassNet's num_stacks should larger than 0 + HourglassNet(num_stacks=0) + + with pytest.raises(AssertionError): + # len(stage_channels) should equal len(stage_blocks) + HourglassNet( + stage_channels=[256, 256, 384, 384, 384], + stage_blocks=[2, 2, 2, 2, 2, 4]) + + with pytest.raises(AssertionError): + # len(stage_channels) should lagrer than downsample_times + HourglassNet( + downsample_times=5, + stage_channels=[256, 256, 384, 384, 384], + stage_blocks=[2, 2, 2, 2, 2]) + + # Test HourglassNet-52 + model = HourglassNet(num_stacks=1) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 511, 511) + feat = model(imgs) + assert len(feat) == 1 + assert feat[0].shape == torch.Size([1, 256, 128, 128]) + + # Test HourglassNet-104 + model = HourglassNet(num_stacks=2) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 511, 511) + feat = model(imgs) + assert len(feat) == 2 + assert feat[0].shape == torch.Size([1, 256, 128, 128]) + assert feat[1].shape == torch.Size([1, 256, 128, 128]) + + +def test_resnest_bottleneck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + BottleneckS(64, 64, radix=2, reduction_factor=4, style='tensorflow') + + # Test ResNeSt Bottleneck structure + block = BottleneckS( + 64, 256, radix=2, reduction_factor=4, stride=2, style='pytorch') + assert block.avd_layer.stride == 2 + assert block.conv2.channels == 256 + + # Test ResNeSt Bottleneck forward + block = BottleneckS(64, 16, radix=2, reduction_factor=4) + x = torch.randn(2, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([2, 64, 56, 56]) + + +def test_resnest_backbone(): + with pytest.raises(KeyError): + # ResNeSt depth should be in [50, 101, 152, 200] + ResNeSt(depth=18) + + # Test ResNeSt with radix 2, reduction_factor 4 + model = ResNeSt( + depth=50, radix=2, reduction_factor=4, out_indices=(0, 1, 2, 3)) + model.init_weights() + model.train() + + imgs = torch.randn(2, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([2, 256, 56, 56]) + assert feat[1].shape == torch.Size([2, 512, 28, 28]) + assert feat[2].shape == torch.Size([2, 1024, 14, 14]) + assert feat[3].shape == torch.Size([2, 2048, 7, 7]) diff --git a/thirdparty/mmdetection/tests/test_models/test_forward.py b/thirdparty/mmdetection/tests/test_models/test_forward.py new file mode 100644 index 0000000000000000000000000000000000000000..0244f02e0adf92ca05f5721ae1d4b189ede4ea8d --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_forward.py @@ -0,0 +1,437 @@ +"""pytest tests/test_forward.py.""" +import copy +from os.path import dirname, exists, join + +import numpy as np +import pytest +import torch + + +def _get_config_directory(): + """Find the predefined detector config directory.""" + try: + # Assume we are running in the source mmdetection repo + repo_dpath = dirname(dirname(dirname(__file__))) + except NameError: + # For IPython development when this __file__ is not defined + import mmdet + repo_dpath = dirname(dirname(mmdet.__file__)) + config_dpath = join(repo_dpath, 'configs') + if not exists(config_dpath): + raise Exception('Cannot find config path') + return config_dpath + + +def _get_config_module(fname): + """Load a configuration as a python module.""" + from mmcv import Config + config_dpath = _get_config_directory() + config_fpath = join(config_dpath, fname) + config_mod = Config.fromfile(config_fpath) + return config_mod + + +def _get_detector_cfg(fname): + """Grab configs necessary to create a detector. + + These are deep copied to allow for safe modification of parameters without + influencing other tests. + """ + import mmcv + config = _get_config_module(fname) + model = copy.deepcopy(config.model) + train_cfg = mmcv.Config(copy.deepcopy(config.train_cfg)) + test_cfg = mmcv.Config(copy.deepcopy(config.test_cfg)) + return model, train_cfg, test_cfg + + +def test_rpn_forward(): + model, train_cfg, test_cfg = _get_detector_cfg( + 'rpn/rpn_r50_fpn_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 224, 224) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + gt_bboxes = mm_inputs['gt_bboxes'] + losses = detector.forward( + imgs, img_metas, gt_bboxes=gt_bboxes, return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +@pytest.mark.parametrize( + 'cfg_file', + [ + 'retinanet/retinanet_r50_fpn_1x_coco.py', + 'guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py', + 'ghm/retinanet_ghm_r50_fpn_1x_coco.py', + 'fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py', + 'foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py', + # 'free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py', + # 'atss/atss_r50_fpn_1x_coco.py', # not ready for topk + 'reppoints/reppoints_moment_r50_fpn_1x_coco.py', + 'yolo/yolov3_d53_mstrain-608_273e_coco.py' + ]) +def test_single_stage_forward_gpu(cfg_file): + if not torch.cuda.is_available(): + import pytest + pytest.skip('test requires GPU and torch+cuda') + + model, train_cfg, test_cfg = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (2, 3, 224, 224) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + detector = detector.cuda() + imgs = imgs.cuda() + # Test forward train + gt_bboxes = [b.cuda() for b in mm_inputs['gt_bboxes']] + gt_labels = [g.cuda() for g in mm_inputs['gt_labels']] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +def test_faster_rcnn_ohem_forward(): + model, train_cfg, test_cfg = _get_detector_cfg( + 'faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 256, 256) + + # Test forward train with a non-empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[10]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + +# HTC is not ready yet +@pytest.mark.parametrize('cfg_file', [ + 'cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py', + 'mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py', + 'grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py', + 'ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py' +]) +def test_two_stage_forward(cfg_file): + model, train_cfg, test_cfg = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 256, 256) + + # Test forward train with a non-empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[10]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + gt_masks = mm_inputs['gt_masks'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + gt_masks=gt_masks, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + loss.requires_grad_(True) + assert float(loss.item()) > 0 + loss.backward() + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + gt_masks = mm_inputs['gt_masks'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + gt_masks=gt_masks, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + loss.requires_grad_(True) + assert float(loss.item()) > 0 + loss.backward() + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +@pytest.mark.parametrize( + 'cfg_file', ['ghm/retinanet_ghm_r50_fpn_1x_coco.py', 'ssd/ssd300_coco.py']) +def test_single_stage_forward_cpu(cfg_file): + model, train_cfg, test_cfg = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 300, 300) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +def _demo_mm_inputs(input_shape=(1, 3, 300, 300), + num_items=None, num_classes=10): # yapf: disable + """Create a superset of inputs needed to run test or train batches. + + Args: + input_shape (tuple): + input batch dimensions + + num_items (None | List[int]): + specifies the number of boxes in each batch item + + num_classes (int): + number of different labels a box might have + """ + from mmdet.core import BitmapMasks + + (N, C, H, W) = input_shape + + rng = np.random.RandomState(0) + + imgs = rng.rand(*input_shape) + + img_metas = [{ + 'img_shape': (H, W, C), + 'ori_shape': (H, W, C), + 'pad_shape': (H, W, C), + 'filename': '.png', + 'scale_factor': 1.0, + 'flip': False, + } for _ in range(N)] + + gt_bboxes = [] + gt_labels = [] + gt_masks = [] + + for batch_idx in range(N): + if num_items is None: + num_boxes = rng.randint(1, 10) + else: + num_boxes = num_items[batch_idx] + + cx, cy, bw, bh = rng.rand(num_boxes, 4).T + + tl_x = ((cx * W) - (W * bw / 2)).clip(0, W) + tl_y = ((cy * H) - (H * bh / 2)).clip(0, H) + br_x = ((cx * W) + (W * bw / 2)).clip(0, W) + br_y = ((cy * H) + (H * bh / 2)).clip(0, H) + + boxes = np.vstack([tl_x, tl_y, br_x, br_y]).T + class_idxs = rng.randint(1, num_classes, size=num_boxes) + + gt_bboxes.append(torch.FloatTensor(boxes)) + gt_labels.append(torch.LongTensor(class_idxs)) + + mask = np.random.randint(0, 2, (len(boxes), H, W), dtype=np.uint8) + gt_masks.append(BitmapMasks(mask, H, W)) + + mm_inputs = { + 'imgs': torch.FloatTensor(imgs).requires_grad_(True), + 'img_metas': img_metas, + 'gt_bboxes': gt_bboxes, + 'gt_labels': gt_labels, + 'gt_bboxes_ignore': None, + 'gt_masks': gt_masks, + } + return mm_inputs + + +def test_yolact_forward(): + model, train_cfg, test_cfg = _get_detector_cfg( + 'yolact/yolact_r50_1x8_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 550, 550) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + gt_masks = mm_inputs['gt_masks'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + gt_masks=gt_masks, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) + + +def test_detr_forward(): + model, train_cfg, test_cfg = _get_detector_cfg( + 'detr/detr_r50_8x4_150e_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg) + + input_shape = (1, 3, 550, 550) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train with non-empty truth batch + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) diff --git a/thirdparty/mmdetection/tests/test_models/test_heads.py b/thirdparty/mmdetection/tests/test_models/test_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..2632b6690b2846399a5b6dfb7e2a2981b76722a2 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_heads.py @@ -0,0 +1,1313 @@ +import mmcv +import numpy as np +import torch + +from mmdet.core import bbox2roi, build_assigner, build_sampler +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from mmdet.models.dense_heads import (AnchorHead, CornerHead, FCOSHead, + FSAFHead, GuidedAnchorHead, PAAHead, + SABLRetinaHead, TransformerHead, + VFNetHead, YOLACTHead, YOLACTProtonet, + YOLACTSegmHead, paa_head) +from mmdet.models.dense_heads.paa_head import levels_to_images +from mmdet.models.roi_heads.bbox_heads import BBoxHead, SABLHead +from mmdet.models.roi_heads.mask_heads import FCNMaskHead, MaskIoUHead + + +def test_paa_head_loss(): + """Tests paa head loss when truth is empty and non-empty.""" + + class mock_skm(object): + + def GaussianMixture(self, *args, **kwargs): + return self + + def fit(self, loss): + pass + + def predict(self, loss): + components = np.zeros_like(loss, dtype=np.long) + return components.reshape(-1) + + def score_samples(self, loss): + scores = np.random.random(len(loss)) + return scores + + paa_head.skm = mock_skm() + + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = PAAHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + self.init_weights() + cls_scores, bbox_preds, iou_preds = self(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, iou_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + empty_iou_loss = empty_gt_losses['loss_iou'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_iou_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, iou_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + onegt_iou_loss = one_gt_losses['loss_iou'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + assert onegt_iou_loss.item() > 0, 'box loss should be non-zero' + n, c, h, w = 10, 4, 20, 20 + mlvl_tensor = [torch.ones(n, c, h, w) for i in range(5)] + results = levels_to_images(mlvl_tensor) + assert len(results) == n + assert results[0].size() == (h * w * 5, c) + assert self.with_score_voting + cls_scores = [torch.ones(4, 5, 5)] + bbox_preds = [torch.ones(4, 5, 5)] + iou_preds = [torch.ones(1, 5, 5)] + mlvl_anchors = [torch.ones(5 * 5, 4)] + img_shape = None + scale_factor = [0.5, 0.5] + cfg = mmcv.Config( + dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + rescale = False + self._get_bboxes_single( + cls_scores, + bbox_preds, + iou_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=rescale) + + +def test_fcos_head_loss(): + """Tests fcos head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = FCOSHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds, centerness = self.forward(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, centerness, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, centerness, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_vfnet_head_loss(): + """Tests vfnet head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = VFNetHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict(type='VarifocalLoss', use_sigmoid=True, loss_weight=1.0)) + if torch.cuda.is_available(): + self.cuda() + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size).cuda() + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds, bbox_preds_refine = self.forward(feat) + # Test that empty ground truth encourages the network to predict + # background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, bbox_preds_refine, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = self.loss(cls_scores, bbox_preds, bbox_preds_refine, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_anchor_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)) + self = AnchorHead(num_classes=4, in_channels=1, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_fsaf_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = dict( + reg_decoded_bbox=True, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + type='IoULoss', eps=1e-6, loss_weight=1.0, reduction='none')) + + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False)) + head = FSAFHead(num_classes=4, in_channels=1, train_cfg=train_cfg, **cfg) + if torch.cuda.is_available(): + head.cuda() + # FSAF head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.anchor_generator.strides)) + ] + cls_scores, bbox_preds = head.forward(feat) + gt_bboxes_ignore = None + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + # Test that empty ground truth encourages the network to predict bkg + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + empty_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + +def test_ga_anchor_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5, + pos_weight=-1, + debug=False)) + head = GuidedAnchorHead(num_classes=4, in_channels=4, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + if torch.cuda.is_available(): + head.cuda() + feat = [ + torch.rand(1, 4, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.approx_anchor_generator.base_anchors)) + ] + cls_scores, bbox_preds, shape_preds, loc_preds = head.forward(feat) + + # Test that empty ground truth encourages the network to predict + # background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + gt_bboxes_ignore = None + + empty_gt_losses = head.loss(cls_scores, bbox_preds, shape_preds, + loc_preds, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, shape_preds, + loc_preds, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_bbox_head_loss(): + """Tests bbox head loss when truth is empty and non-empty.""" + self = BBoxHead(in_channels=8, roi_feat_size=3) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + target_cfg = mmcv.Config(dict(pos_weight=1)) + + # Test bbox loss when truth is empty + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + rois = bbox2roi([res.bboxes for res in sampling_results]) + dummy_feats = torch.rand(num_sampled, 8 * 3 * 3) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox', 0) == 0, 'empty gt loss should be zero' + + # Test bbox loss when truth is non-empty + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 8 * 3 * 3) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox', 0) > 0, 'box-loss should be non-zero' + + +def test_sabl_bbox_head_loss(): + """Tests bbox head loss when truth is empty and non-empty.""" + self = SABLHead( + num_classes=4, + cls_in_channels=3, + reg_in_channels=3, + cls_out_channels=3, + reg_offset_out_channels=3, + reg_cls_out_channels=3, + roi_feat_size=7) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + target_cfg = mmcv.Config(dict(pos_weight=1)) + + # Test bbox loss when truth is empty + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + rois = bbox2roi([res.bboxes for res in sampling_results]) + dummy_feats = torch.rand(num_sampled, 3, 7, 7) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox_cls', + 0) == 0, 'empty gt bbox-cls-loss should be zero' + assert losses.get('loss_bbox_reg', + 0) == 0, 'empty gt bbox-reg-loss should be zero' + + # Test bbox loss when truth is non-empty + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 3, 7, 7) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_bbox_cls', + 0) > 0, 'empty gt bbox-cls-loss should be zero' + assert losses.get('loss_bbox_reg', + 0) > 0, 'empty gt bbox-reg-loss should be zero' + + +def test_sabl_retina_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + head = SABLRetinaHead( + num_classes=4, + in_channels=3, + feat_channels=10, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + train_cfg=cfg) + if torch.cuda.is_available(): + head.cuda() + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 3, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.approx_anchor_generator.base_anchors)) + ] + cls_scores, bbox_preds = head.forward(feat) + + # Test that empty ground truth encourages the network + # to predict background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + gt_bboxes_ignore = None + empty_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_cls_loss = sum(empty_gt_losses['loss_bbox_cls']) + empty_box_reg_loss = sum(empty_gt_losses['loss_bbox_reg']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_cls_loss.item() == 0, ( + 'there should be no box cls loss when there are no true boxes') + assert empty_box_reg_loss.item() == 0, ( + 'there should be no box reg loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should + # be nonzero for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_cls_loss = sum(one_gt_losses['loss_bbox_cls']) + onegt_box_reg_loss = sum(one_gt_losses['loss_bbox_reg']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_cls_loss.item() > 0, 'box loss cls should be non-zero' + assert onegt_box_reg_loss.item() > 0, 'box loss reg should be non-zero' + + +def test_refine_boxes(): + """Mirrors the doctest in + ``mmdet.models.bbox_heads.bbox_head.BBoxHead.refine_boxes`` but checks for + multiple values of n_roi / n_img.""" + self = BBoxHead(reg_class_agnostic=True) + + test_settings = [ + + # Corner case: less rois than images + { + 'n_roi': 2, + 'n_img': 4, + 'rng': 34285940 + }, + + # Corner case: no images + { + 'n_roi': 0, + 'n_img': 0, + 'rng': 52925222 + }, + + # Corner cases: few images / rois + { + 'n_roi': 1, + 'n_img': 1, + 'rng': 1200281 + }, + { + 'n_roi': 2, + 'n_img': 1, + 'rng': 1200282 + }, + { + 'n_roi': 2, + 'n_img': 2, + 'rng': 1200283 + }, + { + 'n_roi': 1, + 'n_img': 2, + 'rng': 1200284 + }, + + # Corner case: no rois few images + { + 'n_roi': 0, + 'n_img': 1, + 'rng': 23955860 + }, + { + 'n_roi': 0, + 'n_img': 2, + 'rng': 25830516 + }, + + # Corner case: no rois many images + { + 'n_roi': 0, + 'n_img': 10, + 'rng': 671346 + }, + { + 'n_roi': 0, + 'n_img': 20, + 'rng': 699807 + }, + + # Corner case: cal_similarity num rois and images + { + 'n_roi': 20, + 'n_img': 20, + 'rng': 1200238 + }, + { + 'n_roi': 10, + 'n_img': 20, + 'rng': 1200238 + }, + { + 'n_roi': 5, + 'n_img': 5, + 'rng': 1200238 + }, + + # ---------------------------------- + # Common case: more rois than images + { + 'n_roi': 100, + 'n_img': 1, + 'rng': 337156 + }, + { + 'n_roi': 150, + 'n_img': 2, + 'rng': 275898 + }, + { + 'n_roi': 500, + 'n_img': 5, + 'rng': 4903221 + }, + ] + + for demokw in test_settings: + try: + n_roi = demokw['n_roi'] + n_img = demokw['n_img'] + rng = demokw['rng'] + + print(f'Test refine_boxes case: {demokw!r}') + tup = _demodata_refine_boxes(n_roi, n_img, rng=rng) + rois, labels, bbox_preds, pos_is_gts, img_metas = tup + bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, + pos_is_gts, img_metas) + assert len(bboxes_list) == n_img + assert sum(map(len, bboxes_list)) <= n_roi + assert all(b.shape[1] == 4 for b in bboxes_list) + except Exception: + print(f'Test failed with demokw={demokw!r}') + raise + + +def _demodata_refine_boxes(n_roi, n_img, rng=0): + """Create random test data for the + ``mmdet.models.bbox_heads.bbox_head.BBoxHead.refine_boxes`` method.""" + import numpy as np + from mmdet.core.bbox.demodata import random_boxes + from mmdet.core.bbox.demodata import ensure_rng + try: + import kwarray + except ImportError: + import pytest + pytest.skip('kwarray is required for this test') + scale = 512 + rng = ensure_rng(rng) + img_metas = [{'img_shape': (scale, scale)} for _ in range(n_img)] + # Create rois in the expected format + roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) + if n_img == 0: + assert n_roi == 0, 'cannot have any rois if there are no images' + img_ids = torch.empty((0, ), dtype=torch.long) + roi_boxes = torch.empty((0, 4), dtype=torch.float32) + else: + img_ids = rng.randint(0, n_img, (n_roi, )) + img_ids = torch.from_numpy(img_ids) + rois = torch.cat([img_ids[:, None].float(), roi_boxes], dim=1) + # Create other args + labels = rng.randint(0, 2, (n_roi, )) + labels = torch.from_numpy(labels).long() + bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) + # For each image, pretend random positive boxes are gts + is_label_pos = (labels.numpy() > 0).astype(np.int) + lbl_per_img = kwarray.group_items(is_label_pos, img_ids.numpy()) + pos_per_img = [sum(lbl_per_img.get(gid, [])) for gid in range(n_img)] + # randomly generate with numpy then sort with torch + _pos_is_gts = [ + rng.randint(0, 2, (npos, )).astype(np.uint8) for npos in pos_per_img + ] + pos_is_gts = [ + torch.from_numpy(p).sort(descending=True)[0] for p in _pos_is_gts + ] + return rois, labels, bbox_preds, pos_is_gts, img_metas + + +def test_mask_head_loss(): + """Test mask head loss when mask target is empty.""" + self = FCNMaskHead( + num_convs=1, + roi_feat_size=6, + in_channels=8, + conv_out_channels=8, + num_classes=8) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + # create dummy mask + import numpy as np + from mmdet.core import BitmapMasks + dummy_mask = np.random.randint(0, 2, (1, 160, 240), dtype=np.uint8) + gt_masks = [BitmapMasks(dummy_mask, 160, 240)] + + # create dummy train_cfg + train_cfg = mmcv.Config(dict(mask_size=12, mask_thr_binary=0.5)) + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 8, 6, 6) + + mask_pred = self.forward(dummy_feats) + mask_targets = self.get_targets(sampling_results, gt_masks, train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.loss(mask_pred, mask_targets, pos_labels) + + onegt_mask_loss = sum(loss_mask['loss_mask']) + assert onegt_mask_loss.item() > 0, 'mask loss should be non-zero' + + # test mask_iou_head + mask_iou_head = MaskIoUHead( + num_convs=1, + num_fcs=1, + roi_feat_size=6, + in_channels=8, + conv_out_channels=8, + fc_out_channels=8, + num_classes=8) + + pos_mask_pred = mask_pred[range(mask_pred.size(0)), pos_labels] + mask_iou_pred = mask_iou_head(dummy_feats, pos_mask_pred) + pos_mask_iou_pred = mask_iou_pred[range(mask_iou_pred.size(0)), pos_labels] + + mask_iou_targets = mask_iou_head.get_targets(sampling_results, gt_masks, + pos_mask_pred, mask_targets, + train_cfg) + loss_mask_iou = mask_iou_head.loss(pos_mask_iou_pred, mask_iou_targets) + onegt_mask_iou_loss = loss_mask_iou['loss_mask_iou'].sum() + assert onegt_mask_iou_loss.item() >= 0 + + +def _dummy_bbox_sampling(proposal_list, gt_bboxes, gt_labels): + """Create sample results that can be passed to BBoxHead.get_targets.""" + num_imgs = 1 + feat = torch.rand(1, 1, 3, 3) + assign_config = dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1) + sampler_config = dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True) + bbox_assigner = build_assigner(assign_config) + bbox_sampler = build_sampler(sampler_config) + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + for i in range(num_imgs): + assign_result = bbox_assigner.assign(proposal_list[i], gt_bboxes[i], + gt_bboxes_ignore[i], gt_labels[i]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=feat) + sampling_results.append(sampling_result) + + return sampling_results + + +def test_corner_head_loss(): + """Tests corner head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + self = CornerHead(num_classes=4, in_channels=1) + + # Corner head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // 4, s // 4) for _ in range(self.num_feat_levels) + ] + tl_heats, br_heats, tl_embs, br_embs, tl_offs, br_offs = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + empty_det_loss = sum(empty_gt_losses['det_loss']) + empty_push_loss = sum(empty_gt_losses['push_loss']) + empty_pull_loss = sum(empty_gt_losses['pull_loss']) + empty_off_loss = sum(empty_gt_losses['off_loss']) + assert empty_det_loss.item() > 0, 'det loss should be non-zero' + assert empty_push_loss.item() == 0, ( + 'there should be no push loss when there are no true boxes') + assert empty_pull_loss.item() == 0, ( + 'there should be no pull loss when there are no true boxes') + assert empty_off_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_det_loss = sum(one_gt_losses['det_loss']) + onegt_push_loss = sum(one_gt_losses['push_loss']) + onegt_pull_loss = sum(one_gt_losses['pull_loss']) + onegt_off_loss = sum(one_gt_losses['off_loss']) + assert onegt_det_loss.item() > 0, 'det loss should be non-zero' + assert onegt_push_loss.item() == 0, ( + 'there should be no push loss when there are only one true box') + assert onegt_pull_loss.item() > 0, 'pull loss should be non-zero' + assert onegt_off_loss.item() > 0, 'off loss should be non-zero' + + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874], + [123.6667, 123.8757, 138.6326, 251.8874]]), + ] + gt_labels = [torch.LongTensor([2, 3])] + + # equalize the corners' embedding value of different objects to make the + # push_loss larger than 0 + gt_bboxes_ind = (gt_bboxes[0] // 4).int().tolist() + for tl_emb_feat, br_emb_feat in zip(tl_embs, br_embs): + tl_emb_feat[:, :, gt_bboxes_ind[0][1], + gt_bboxes_ind[0][0]] = tl_emb_feat[:, :, + gt_bboxes_ind[1][1], + gt_bboxes_ind[1][0]] + br_emb_feat[:, :, gt_bboxes_ind[0][3], + gt_bboxes_ind[0][2]] = br_emb_feat[:, :, + gt_bboxes_ind[1][3], + gt_bboxes_ind[1][2]] + + two_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + twogt_det_loss = sum(two_gt_losses['det_loss']) + twogt_push_loss = sum(two_gt_losses['push_loss']) + twogt_pull_loss = sum(two_gt_losses['pull_loss']) + twogt_off_loss = sum(two_gt_losses['off_loss']) + assert twogt_det_loss.item() > 0, 'det loss should be non-zero' + assert twogt_push_loss.item() > 0, 'push loss should be non-zero' + assert twogt_pull_loss.item() > 0, 'pull loss should be non-zero' + assert twogt_off_loss.item() > 0, 'off loss should be non-zero' + + +def test_corner_head_encode_and_decode_heatmap(): + """Tests corner head generating and decoding the heatmap.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3), + 'border': (0, 0, 0, 0) + }] + + gt_bboxes = [ + torch.Tensor([[10, 20, 200, 240], [40, 50, 100, 200], + [10, 20, 200, 240]]) + ] + gt_labels = [torch.LongTensor([1, 1, 2])] + + self = CornerHead(num_classes=4, in_channels=1, corner_emb_channels=1) + + feat = [ + torch.rand(1, 1, s // 4, s // 4) for _ in range(self.num_feat_levels) + ] + + targets = self.get_targets( + gt_bboxes, + gt_labels, + feat[0].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb) + + gt_tl_heatmap = targets['topleft_heatmap'] + gt_br_heatmap = targets['bottomright_heatmap'] + gt_tl_offset = targets['topleft_offset'] + gt_br_offset = targets['bottomright_offset'] + embedding = targets['corner_embedding'] + [top, left], [bottom, right] = embedding[0][0] + gt_tl_embedding_heatmap = torch.zeros([1, 1, s // 4, s // 4]) + gt_br_embedding_heatmap = torch.zeros([1, 1, s // 4, s // 4]) + gt_tl_embedding_heatmap[0, 0, top, left] = 1 + gt_br_embedding_heatmap[0, 0, bottom, right] = 1 + + batch_bboxes, batch_scores, batch_clses = self.decode_heatmap( + tl_heat=gt_tl_heatmap, + br_heat=gt_br_heatmap, + tl_off=gt_tl_offset, + br_off=gt_br_offset, + tl_emb=gt_tl_embedding_heatmap, + br_emb=gt_br_embedding_heatmap, + img_meta=img_metas[0], + k=100, + kernel=3, + distance_threshold=0.5) + + bboxes = batch_bboxes.view(-1, 4) + scores = batch_scores.view(-1, 1) + clses = batch_clses.view(-1, 1) + + idx = scores.argsort(dim=0, descending=True) + bboxes = bboxes[idx].view(-1, 4) + scores = scores[idx].view(-1) + clses = clses[idx].view(-1) + + valid_bboxes = bboxes[torch.where(scores > 0.05)] + valid_labels = clses[torch.where(scores > 0.05)] + max_coordinate = valid_bboxes.max() + offsets = valid_labels.to(valid_bboxes) * (max_coordinate + 1) + gt_offsets = gt_labels[0].to(gt_bboxes[0]) * (max_coordinate + 1) + + offset_bboxes = valid_bboxes + offsets[:, None] + offset_gtbboxes = gt_bboxes[0] + gt_offsets[:, None] + + iou_matrix = bbox_overlaps(offset_bboxes.numpy(), offset_gtbboxes.numpy()) + assert (iou_matrix == 1).sum() == 3 + + +def test_yolact_head_loss(): + """Tests yolact head losses when truth is empty and non-empty.""" + s = 550 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False, + min_gt_box_wh=[4.0, 4.0])) + bbox_head = YOLACTHead( + num_classes=80, + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + base_sizes=[8, 16, 32, 64, 128], + ratios=[0.5, 1.0, 2.0], + strides=[550.0 / x for x in [69, 35, 18, 9, 5]], + centers=[(550 * 0.5 / x, 550 * 0.5 / x) + for x in [69, 35, 18, 9, 5]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True, + train_cfg=train_cfg) + segm_head = YOLACTSegmHead( + in_channels=256, + num_classes=80, + loss_segm=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)) + mask_head = YOLACTProtonet( + num_classes=80, + in_channels=256, + num_protos=32, + max_masks_to_train=100, + loss_mask_weight=6.125) + feat = [ + torch.rand(1, 256, feat_size, feat_size) + for feat_size in [69, 35, 18, 9, 5] + ] + cls_score, bbox_pred, coeff_pred = bbox_head.forward(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_masks = [torch.empty((0, 550, 550))] + gt_bboxes_ignore = None + empty_gt_losses, sampling_results = bbox_head.loss( + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # Test segm head and mask head + segm_head_outs = segm_head(feat[0]) + empty_segm_loss = segm_head.loss(segm_head_outs, gt_masks, gt_labels) + mask_pred = mask_head(feat[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + empty_mask_loss = mask_head.loss(mask_pred, gt_masks, gt_bboxes, img_metas, + sampling_results) + # When there is no truth, the segm and mask loss should be zero. + empty_segm_loss = sum(empty_segm_loss['loss_segm']) + empty_mask_loss = sum(empty_mask_loss['loss_mask']) + assert empty_segm_loss.item() == 0, ( + 'there should be no segm loss when there are no true boxes') + assert empty_mask_loss == 0, ( + 'there should be no mask loss when there are no true boxes') + + # When truth is non-empty then cls, box, mask, segm loss should be + # nonzero for random inputs. + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + gt_masks = [(torch.rand((1, 550, 550)) > 0.5).float()] + + one_gt_losses, sampling_results = bbox_head.loss( + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + one_gt_cls_loss = sum(one_gt_losses['loss_cls']) + one_gt_box_loss = sum(one_gt_losses['loss_bbox']) + assert one_gt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert one_gt_box_loss.item() > 0, 'box loss should be non-zero' + + one_gt_segm_loss = segm_head.loss(segm_head_outs, gt_masks, gt_labels) + mask_pred = mask_head(feat[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + one_gt_mask_loss = mask_head.loss(mask_pred, gt_masks, gt_bboxes, + img_metas, sampling_results) + one_gt_segm_loss = sum(one_gt_segm_loss['loss_segm']) + one_gt_mask_loss = sum(one_gt_mask_loss['loss_mask']) + assert one_gt_segm_loss.item() > 0, 'segm loss should be non-zero' + assert one_gt_mask_loss.item() > 0, 'mask loss should be non-zero' + + +def test_transformer_head_loss(): + """Tests transformer head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3), + 'batch_intput_shape': (s, s) + }] + train_cfg = dict( + assigner=dict( + type='HungarianAssigner', + cls_weight=1., + bbox_weight=5., + iou_weight=2., + iou_calculator=dict(type='BboxOverlaps2D'), + iou_mode='giou')) + transformer_cfg = dict( + type='Transformer', + embed_dims=4, + num_heads=1, + num_encoder_layers=1, + num_decoder_layers=1, + feedforward_channels=1, + dropout=0.1, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + num_fcs=2, + pre_norm=False, + return_intermediate_dec=True) + positional_encoding_cfg = dict( + type='SinePositionalEncoding', num_feats=2, normalize=True) + self = TransformerHead( + num_classes=4, + in_channels=1, + num_fcs=2, + train_cfg=train_cfg, + transformer=transformer_cfg, + positional_encoding=positional_encoding_cfg) + self.init_weights() + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds = self.forward(feat, img_metas) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + for key, loss in empty_gt_losses.items(): + if 'cls' in key: + assert loss.item() > 0, 'cls loss should be non-zero' + elif 'bbox' in key: + assert loss.item( + ) == 0, 'there should be no box loss when there are no true boxes' + elif 'iou' in key: + assert loss.item( + ) == 0, 'there should be no iou loss when there are no true boxes' + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + for loss in one_gt_losses.values(): + assert loss.item( + ) > 0, 'cls loss, or box loss, or iou loss should be non-zero' + + # test forward_train + self.forward_train(feat, img_metas, gt_bboxes, gt_labels) + + # test inference mode + self.get_bboxes(cls_scores, bbox_preds, img_metas, rescale=True) diff --git a/thirdparty/mmdetection/tests/test_models/test_losses.py b/thirdparty/mmdetection/tests/test_models/test_losses.py new file mode 100644 index 0000000000000000000000000000000000000000..8a85cee43cda5d803f0c91d0736f7bdf78ab2f4b --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_losses.py @@ -0,0 +1,136 @@ +import pytest +import torch + +from mmdet.models import Accuracy, build_loss + + +def test_ce_loss(): + # use_mask and use_sigmoid cannot be true at the same time + with pytest.raises(AssertionError): + loss_cfg = dict( + type='CrossEntropyLoss', + use_mask=True, + use_sigmoid=True, + loss_weight=1.0) + build_loss(loss_cfg) + + # test loss with class weights + loss_cls_cfg = dict( + type='CrossEntropyLoss', + use_sigmoid=False, + class_weight=[0.8, 0.2], + loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100, -100]]) + fake_label = torch.Tensor([1]).long() + assert torch.allclose(loss_cls(fake_pred, fake_label), torch.tensor(40.)) + + loss_cls_cfg = dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + assert torch.allclose(loss_cls(fake_pred, fake_label), torch.tensor(200.)) + + +def test_varifocal_loss(): + # only sigmoid version of VarifocalLoss is implemented + with pytest.raises(AssertionError): + loss_cfg = dict( + type='VarifocalLoss', use_sigmoid=False, loss_weight=1.0) + build_loss(loss_cfg) + + # test that alpha should be greater than 0 + with pytest.raises(AssertionError): + loss_cfg = dict( + type='VarifocalLoss', + alpha=-0.75, + gamma=2.0, + use_sigmoid=True, + loss_weight=1.0) + build_loss(loss_cfg) + + # test that pred and target should be of the same size + loss_cls_cfg = dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + with pytest.raises(AssertionError): + fake_pred = torch.Tensor([[100.0, -100.0]]) + fake_target = torch.Tensor([[1.0]]) + loss_cls(fake_pred, fake_target) + + # test the calculation + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100.0, -100.0]]) + fake_target = torch.Tensor([[1.0, 0.0]]) + assert torch.allclose(loss_cls(fake_pred, fake_target), torch.tensor(0.0)) + + # test the loss with weights + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[0.0, 100.0]]) + fake_target = torch.Tensor([[1.0, 1.0]]) + fake_weight = torch.Tensor([0.0, 1.0]) + assert torch.allclose( + loss_cls(fake_pred, fake_target, fake_weight), torch.tensor(0.0)) + + +def test_accuracy(): + # test for empty pred + pred = torch.empty(0, 4) + label = torch.empty(0) + accuracy = Accuracy(topk=1) + acc = accuracy(pred, label) + assert acc.item() == 0 + + pred = torch.Tensor([[0.2, 0.3, 0.6, 0.5], [0.1, 0.1, 0.2, 0.6], + [0.9, 0.0, 0.0, 0.1], [0.4, 0.7, 0.1, 0.1], + [0.0, 0.0, 0.99, 0]]) + # test for top1 + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + accuracy = Accuracy(topk=1) + acc = accuracy(pred, true_label) + assert acc.item() == 100 + + # test for top1 with score thresh=0.8 + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + accuracy = Accuracy(topk=1, thresh=0.8) + acc = accuracy(pred, true_label) + assert acc.item() == 40 + + # test for top2 + accuracy = Accuracy(topk=2) + label = torch.Tensor([3, 2, 0, 0, 2]).long() + acc = accuracy(pred, label) + assert acc.item() == 100 + + # test for both top1 and top2 + accuracy = Accuracy(topk=(1, 2)) + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + acc = accuracy(pred, true_label) + for a in acc: + assert a.item() == 100 + + # topk is larger than pred class number + with pytest.raises(AssertionError): + accuracy = Accuracy(topk=5) + accuracy(pred, true_label) + + # wrong topk type + with pytest.raises(AssertionError): + accuracy = Accuracy(topk='wrong type') + accuracy(pred, true_label) + + # label size is larger than required + with pytest.raises(AssertionError): + label = torch.Tensor([2, 3, 0, 1, 2, 0]).long() # size mismatch + accuracy = Accuracy() + accuracy(pred, label) + + # wrong pred dimension + with pytest.raises(AssertionError): + accuracy = Accuracy() + accuracy(pred[:, :, None], true_label) diff --git a/thirdparty/mmdetection/tests/test_models/test_necks.py b/thirdparty/mmdetection/tests/test_models/test_necks.py new file mode 100644 index 0000000000000000000000000000000000000000..56885477fd7791070b0238a417c8d8ff34a93691 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_necks.py @@ -0,0 +1,238 @@ +import pytest +import torch +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.necks import FPN, ChannelMapper + + +def test_fpn(): + """Tests fpn.""" + s = 64 + in_channels = [8, 16, 32, 64] + feat_sizes = [s // 2**i for i in range(4)] # [64, 32, 16, 8] + out_channels = 8 + # `num_outs` is not equal to len(in_channels) - start_level + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + num_outs=2) + + # `end_level` is larger than len(in_channels) - 1 + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + end_level=4, + num_outs=2) + + # `num_outs` is not equal to end_level - start_level + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + end_level=3, + num_outs=1) + + # Invalid `add_extra_convs` option + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs='on_xxx', + num_outs=5) + + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + num_outs=5) + + # FPN expects a multiple levels of features per image + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels)) + ] + outs = fpn_model(feats) + assert fpn_model.add_extra_convs == 'on_input' + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Tests for fpn with no extra convs (pooling is used instead) + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=False, + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert not fpn_model.add_extra_convs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Tests for fpn with lateral bns + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + no_norm_on_lateral=False, + norm_cfg=dict(type='BN', requires_grad=True), + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert fpn_model.add_extra_convs == 'on_input' + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + bn_exist = False + for m in fpn_model.modules(): + if isinstance(m, _BatchNorm): + bn_exist = True + assert bn_exist + + # Bilinear upsample + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + upsample_cfg=dict(mode='bilinear', align_corners=True), + num_outs=5) + fpn_model(feats) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert fpn_model.add_extra_convs == 'on_input' + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Scale factor instead of fixed upsample size upsample + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + upsample_cfg=dict(scale_factor=2), + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'inputs' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_input', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_input' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'laterals' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_lateral', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_lateral' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'outputs' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_output', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_output' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # extra_convs_on_inputs=False is equal to extra convs source is 'on_output' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + extra_convs_on_inputs=False, + start_level=1, + num_outs=5, + ) + assert fpn_model.add_extra_convs == 'on_output' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # extra_convs_on_inputs=True is equal to extra convs source is 'on_input' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + extra_convs_on_inputs=True, + start_level=1, + num_outs=5, + ) + assert fpn_model.add_extra_convs == 'on_input' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + +def test_channel_mapper(): + """Tests ChannelMapper.""" + s = 64 + in_channels = [8, 16, 32, 64] + feat_sizes = [s // 2**i for i in range(4)] # [64, 32, 16, 8] + out_channels = 8 + kernel_size = 3 + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels)) + ] + + # in_channels must be a list + with pytest.raises(AssertionError): + channel_mapper = ChannelMapper( + in_channels=10, out_channels=out_channels, kernel_size=kernel_size) + # the length of channel_mapper's inputs must be equal to the length of + # in_channels + with pytest.raises(AssertionError): + channel_mapper = ChannelMapper( + in_channels=in_channels[:-1], + out_channels=out_channels, + kernel_size=kernel_size) + channel_mapper(feats) + + channel_mapper = ChannelMapper( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size) + + outs = channel_mapper(feats) + assert len(outs) == len(feats) + for i in range(len(feats)): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) diff --git a/thirdparty/mmdetection/tests/test_models/test_pisa_heads.py b/thirdparty/mmdetection/tests/test_models/test_pisa_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..6b1d42db49c498aca59b154b18d59794749643bf --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_pisa_heads.py @@ -0,0 +1,244 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import PISARetinaHead, PISASSDHead +from mmdet.models.roi_heads import PISARoIHead + + +def test_pisa_retinanet_head_loss(): + """Tests pisa retinanet head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + allowed_border=0, + pos_weight=-1, + debug=False)) + self = PISARetinaHead(num_classes=4, in_channels=1, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'].sum() + empty_box_loss = empty_gt_losses['loss_bbox'].sum() + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'].sum() + onegt_box_loss = one_gt_losses['loss_bbox'].sum() + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_pisa_ssd_head_loss(): + """Tests pisa ssd head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False)) + ssd_anchor_generator = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + strides=[1], + ratios=([2], ), + basesize_ratio_range=(0.15, 0.9)) + self = PISASSDHead( + num_classes=4, + in_channels=(1, ), + train_cfg=cfg, + anchor_generator=ssd_anchor_generator) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + # SSD is special, #pos:#neg = 1: 3, so empth gt will also lead loss cls = 0 + assert empty_cls_loss.item() == 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_pisa_roi_head_loss(): + """Tests pisa roi head loss when truth is empty and non-empty.""" + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='ScoreHLRSampler', + num=4, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + allowed_border=0, + pos_weight=-1, + debug=False)) + + bbox_roi_extractor = dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=1, + featmap_strides=[1]) + + bbox_head = dict( + type='Shared2FCBBoxHead', + in_channels=1, + fc_out_channels=2, + roi_feat_size=7, + num_classes=4, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)) + + self = PISARoIHead(bbox_roi_extractor, bbox_head, train_cfg=train_cfg) + + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(1) + ] + + proposal_list = [ + torch.Tensor([[22.6667, 22.8757, 238.6326, 151.8874], [0, 3, 5, 7]]) + ] + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + + empty_gt_losses = self.forward_train(feat, img_metas, proposal_list, + gt_bboxes, gt_labels, + gt_bboxes_ignore) + + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'].sum() + empty_box_loss = empty_gt_losses['loss_bbox'].sum() + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + one_gt_losses = self.forward_train(feat, img_metas, proposal_list, + gt_bboxes, gt_labels, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'].sum() + onegt_box_loss = one_gt_losses['loss_bbox'].sum() + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/thirdparty/mmdetection/tests/test_models/test_position_encoding.py b/thirdparty/mmdetection/tests/test_models/test_position_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..94fdd479a4775d47ae0d0f069c7a37e862b84b2f --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_position_encoding.py @@ -0,0 +1,38 @@ +import pytest +import torch + +from mmdet.models.utils import (LearnedPositionalEncoding, + SinePositionalEncoding) + + +def test_sine_positional_encoding(num_feats=16, batch_size=2): + # test invalid type of scale + with pytest.raises(AssertionError): + module = SinePositionalEncoding( + num_feats, scale=(3., ), normalize=True) + + module = SinePositionalEncoding(num_feats) + h, w = 10, 6 + mask = torch.rand(batch_size, h, w) > 0.5 + assert not module.normalize + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) + + # set normalize + module = SinePositionalEncoding(num_feats, normalize=True) + assert module.normalize + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) + + +def test_learned_positional_encoding(num_feats=16, + row_num_embed=10, + col_num_embed=10, + batch_size=2): + module = LearnedPositionalEncoding(num_feats, row_num_embed, col_num_embed) + assert module.row_embed.weight.shape == (row_num_embed, num_feats) + assert module.col_embed.weight.shape == (col_num_embed, num_feats) + h, w = 10, 6 + mask = torch.rand(batch_size, h, w) > 0.5 + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) diff --git a/thirdparty/mmdetection/tests/test_models/test_roi_extractor.py b/thirdparty/mmdetection/tests/test_models/test_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..22743f2d3be3b4be82bc46699b76fecf7af60020 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_roi_extractor.py @@ -0,0 +1,113 @@ +import pytest +import torch + +from mmdet.models.roi_heads.roi_extractors import GenericRoIExtractor + + +def test_groie(): + # test with pre/post + cfg = dict( + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False)) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 256, 7, 7]) + + # test w.o. pre/post + cfg = dict( + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 256, 7, 7]) + + # test w.o. pre/post concat + cfg = dict( + aggregation='concat', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256 * 4, + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 1024, 7, 7]) + + # test not supported aggregate method + with pytest.raises(AssertionError): + cfg = dict( + aggregation='not support', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=1024, + featmap_strides=[4, 8, 16, 32]) + _ = GenericRoIExtractor(**cfg) + + # test concat channels number + cfg = dict( + aggregation='concat', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256 * 5, # 256*5 != 256*4 + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + # out_channels does not sum of feat channels + with pytest.raises(AssertionError): + _ = groie(feats, rois) diff --git a/thirdparty/mmdetection/tests/test_models/test_transformer.py b/thirdparty/mmdetection/tests/test_models/test_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..0e21549ae8ba7fed3a9b8bfe152e528cc9b2e38f --- /dev/null +++ b/thirdparty/mmdetection/tests/test_models/test_transformer.py @@ -0,0 +1,523 @@ +from unittest.mock import patch + +import pytest +import torch + +from mmdet.models.utils import (FFN, MultiheadAttention, Transformer, + TransformerDecoder, TransformerDecoderLayer, + TransformerEncoder, TransformerEncoderLayer) + + +def _ffn_forward(self, x, residual=None): + if residual is None: + residual = x + residual_str = residual.split('_')[-1] + if '(residual' in residual_str: + residual_str = residual_str.split('(residual')[0] + return x + '_ffn(residual={})'.format(residual_str) + + +def _multihead_attention_forward(self, + x, + key=None, + value=None, + residual=None, + query_pos=None, + key_pos=None, + attn_mask=None, + key_padding_mask=None, + selfattn=True): + if residual is None: + residual = x + residual_str = residual.split('_')[-1] + if '(residual' in residual_str: + residual_str = residual_str.split('(residual')[0] + attn_str = 'selfattn' if selfattn else 'multiheadattn' + return x + '_{}(residual={})'.format(attn_str, residual_str) + + +def _encoder_layer_forward(self, + x, + pos=None, + attn_mask=None, + key_padding_mask=None): + norm_cnt = 0 + inp_residual = x + for layer in self.order: + if layer == 'selfattn': + x = self.self_attn( + x, + x, + x, + inp_residual if self.pre_norm else None, + query_pos=pos, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask) + inp_residual = x + elif layer == 'norm': + x = x + '_norm{}'.format(norm_cnt) + norm_cnt += 1 + elif layer == 'ffn': + x = self.ffn(x, inp_residual if self.pre_norm else None) + else: + raise ValueError(f'Unsupported layer type {layer}.') + return x + + +def _decoder_layer_forward(self, + x, + memory, + memory_pos=None, + query_pos=None, + memory_attn_mask=None, + target_attn_mask=None, + memory_key_padding_mask=None, + target_key_padding_mask=None): + norm_cnt = 0 + inp_residual = x + for layer in self.order: + if layer == 'selfattn': + x = self.self_attn( + x, + x, + x, + inp_residual if self.pre_norm else None, + query_pos, + attn_mask=target_attn_mask, + key_padding_mask=target_key_padding_mask) + inp_residual = x + elif layer == 'norm': + x = x + '_norm{}'.format(norm_cnt) + norm_cnt += 1 + elif layer == 'multiheadattn': + x = self.multihead_attn( + x, + memory, + memory, + inp_residual if self.pre_norm else None, + query_pos, + key_pos=memory_pos, + attn_mask=memory_attn_mask, + key_padding_mask=memory_key_padding_mask, + selfattn=False) + inp_residual = x + elif layer == 'ffn': + x = self.ffn(x, inp_residual if self.pre_norm else None) + else: + raise ValueError(f'Unsupported layer type {layer}.') + return x + + +def test_multihead_attention(embed_dims=8, + num_heads=2, + dropout=0.1, + num_query=5, + num_key=10, + batch_size=1): + module = MultiheadAttention(embed_dims, num_heads, dropout) + # self attention + query = torch.rand(num_query, batch_size, embed_dims) + out = module(query) + assert out.shape == (num_query, batch_size, embed_dims) + + # set key + key = torch.rand(num_key, batch_size, embed_dims) + out = module(query, key) + assert out.shape == (num_query, batch_size, embed_dims) + + # set residual + residual = torch.rand(num_query, batch_size, embed_dims) + out = module(query, key, key, residual) + assert out.shape == (num_query, batch_size, embed_dims) + + # set query_pos and key_pos + query_pos = torch.rand(num_query, batch_size, embed_dims) + key_pos = torch.rand(num_key, batch_size, embed_dims) + out = module(query, key, None, residual, query_pos, key_pos) + assert out.shape == (num_query, batch_size, embed_dims) + + # set key_padding_mask + key_padding_mask = torch.rand(batch_size, num_key) > 0.5 + out = module(query, key, None, residual, query_pos, key_pos, None, + key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + # set attn_mask + attn_mask = torch.rand(num_query, num_key) > 0.5 + out = module(query, key, key, residual, query_pos, key_pos, attn_mask, + key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + +def test_ffn(embed_dims=8, feedforward_channels=8, num_fcs=2, batch_size=1): + # test invalid num_fcs + with pytest.raises(AssertionError): + module = FFN(embed_dims, feedforward_channels, 1) + + module = FFN(embed_dims, feedforward_channels, num_fcs) + x = torch.rand(batch_size, embed_dims) + out = module(x) + assert out.shape == (batch_size, embed_dims) + # set residual + residual = torch.rand(batch_size, embed_dims) + out = module(x, residual) + assert out.shape == (batch_size, embed_dims) + + # test case with no residual + module = FFN(embed_dims, feedforward_channels, num_fcs, add_residual=False) + x = torch.rand(batch_size, embed_dims) + out = module(x) + assert out.shape == (batch_size, embed_dims) + + +def test_transformer_encoder_layer(embed_dims=8, + num_heads=2, + feedforward_channels=8, + num_key=10, + batch_size=1): + x = torch.rand(num_key, batch_size, embed_dims) + # test invalid number of order + with pytest.raises(AssertionError): + order = ('norm', 'selfattn', 'norm', 'ffn', 'norm') + module = TransformerEncoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + + # test invalid value of order + with pytest.raises(AssertionError): + order = ('norm', 'selfattn', 'norm', 'unknown') + module = TransformerEncoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + + module = TransformerEncoderLayer(embed_dims, num_heads, + feedforward_channels) + + key_padding_mask = torch.rand(batch_size, num_key) > 0.5 + out = module(x, key_padding_mask=key_padding_mask) + assert not module.pre_norm + assert out.shape == (num_key, batch_size, embed_dims) + + # set pos + pos = torch.rand(num_key, batch_size, embed_dims) + out = module(x, pos, key_padding_mask=key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + # set attn_mask + attn_mask = torch.rand(num_key, num_key) > 0.5 + out = module(x, pos, attn_mask, key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + # set pre_norm + order = ('norm', 'selfattn', 'norm', 'ffn') + module = TransformerEncoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + assert module.pre_norm + out = module(x, pos, attn_mask, key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + @patch('mmdet.models.utils.TransformerEncoderLayer.forward', + _encoder_layer_forward) + @patch('mmdet.models.utils.FFN.forward', _ffn_forward) + @patch('mmdet.models.utils.MultiheadAttention.forward', + _multihead_attention_forward) + def test_order(): + module = TransformerEncoderLayer(embed_dims, num_heads, + feedforward_channels) + out = module('input') + assert out == 'input_selfattn(residual=input)_norm0_ffn' \ + '(residual=norm0)_norm1' + + # pre_norm + order = ('norm', 'selfattn', 'norm', 'ffn') + module = TransformerEncoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + out = module('input') + assert out == 'input_norm0_selfattn(residual=input)_' \ + 'norm1_ffn(residual=selfattn)' + + test_order() + + +def test_transformer_decoder_layer(embed_dims=8, + num_heads=2, + feedforward_channels=8, + num_key=10, + num_query=5, + batch_size=1): + query = torch.rand(num_query, batch_size, embed_dims) + # test invalid number of order + with pytest.raises(AssertionError): + order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn', + 'norm') + module = TransformerDecoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + + # test invalid value of order + with pytest.raises(AssertionError): + order = ('norm', 'selfattn', 'unknown', 'multiheadattn', 'norm', 'ffn') + module = TransformerDecoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + + module = TransformerDecoderLayer(embed_dims, num_heads, + feedforward_channels) + memory = torch.rand(num_key, batch_size, embed_dims) + assert not module.pre_norm + out = module(query, memory) + assert out.shape == (num_query, batch_size, embed_dims) + + # set query_pos + query_pos = torch.rand(num_query, batch_size, embed_dims) + out = module(query, memory, memory_pos=None, query_pos=query_pos) + assert out.shape == (num_query, batch_size, embed_dims) + + # set memory_pos + memory_pos = torch.rand(num_key, batch_size, embed_dims) + out = module(query, memory, memory_pos, query_pos) + assert out.shape == (num_query, batch_size, embed_dims) + + # set memory_key_padding_mask + memory_key_padding_mask = torch.rand(batch_size, num_key) > 0.5 + out = module( + query, + memory, + memory_pos, + query_pos, + memory_key_padding_mask=memory_key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + # set target_key_padding_mask + target_key_padding_mask = torch.rand(batch_size, num_query) > 0.5 + out = module( + query, + memory, + memory_pos, + query_pos, + memory_key_padding_mask=memory_key_padding_mask, + target_key_padding_mask=target_key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + # set memory_attn_mask + memory_attn_mask = torch.rand(num_query, num_key) + out = module( + query, + memory, + memory_pos, + query_pos, + memory_attn_mask, + memory_key_padding_mask=memory_key_padding_mask, + target_key_padding_mask=target_key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + # set target_attn_mask + target_attn_mask = torch.rand(num_query, num_query) + out = module(query, memory, memory_pos, query_pos, memory_attn_mask, + target_attn_mask, memory_key_padding_mask, + target_key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + # pre_norm + order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn') + module = TransformerDecoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + assert module.pre_norm + out = module( + query, + memory, + memory_pos, + query_pos, + memory_attn_mask, + memory_key_padding_mask=memory_key_padding_mask, + target_key_padding_mask=target_key_padding_mask) + assert out.shape == (num_query, batch_size, embed_dims) + + @patch('mmdet.models.utils.TransformerDecoderLayer.forward', + _decoder_layer_forward) + @patch('mmdet.models.utils.FFN.forward', _ffn_forward) + @patch('mmdet.models.utils.MultiheadAttention.forward', + _multihead_attention_forward) + def test_order(): + module = TransformerDecoderLayer(embed_dims, num_heads, + feedforward_channels) + out = module('input', 'memory') + assert out == 'input_selfattn(residual=input)_norm0_multiheadattn' \ + '(residual=norm0)_norm1_ffn(residual=norm1)_norm2' + + # pre_norm + order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn') + module = TransformerDecoderLayer( + embed_dims, num_heads, feedforward_channels, order=order) + out = module('input', 'memory') + assert out == 'input_norm0_selfattn(residual=input)_norm1_' \ + 'multiheadattn(residual=selfattn)_norm2_ffn(residual=' \ + 'multiheadattn)' + + test_order() + + +def test_transformer_encoder(num_layers=2, + embed_dims=8, + num_heads=2, + feedforward_channels=8, + num_key=10, + batch_size=1): + module = TransformerEncoder(num_layers, embed_dims, num_heads, + feedforward_channels) + assert not module.pre_norm + assert module.norm is None + x = torch.rand(num_key, batch_size, embed_dims) + out = module(x) + assert out.shape == (num_key, batch_size, embed_dims) + + # set pos + pos = torch.rand(num_key, batch_size, embed_dims) + out = module(x, pos) + assert out.shape == (num_key, batch_size, embed_dims) + + # set key_padding_mask + key_padding_mask = torch.rand(batch_size, num_key) > 0.5 + out = module(x, pos, None, key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + # set attn_mask + attn_mask = torch.rand(num_key, num_key) > 0.5 + out = module(x, pos, attn_mask, key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + # pre_norm + order = ('norm', 'selfattn', 'norm', 'ffn') + module = TransformerEncoder( + num_layers, embed_dims, num_heads, feedforward_channels, order=order) + assert module.pre_norm + assert module.norm is not None + out = module(x, pos, attn_mask, key_padding_mask) + assert out.shape == (num_key, batch_size, embed_dims) + + +def test_transformer_decoder(num_layers=2, + embed_dims=8, + num_heads=2, + feedforward_channels=8, + num_key=10, + num_query=5, + batch_size=1): + module = TransformerDecoder(num_layers, embed_dims, num_heads, + feedforward_channels) + query = torch.rand(num_query, batch_size, embed_dims) + memory = torch.rand(num_key, batch_size, embed_dims) + out = module(query, memory) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set query_pos + query_pos = torch.rand(num_query, batch_size, embed_dims) + out = module(query, memory, query_pos=query_pos) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set memory_pos + memory_pos = torch.rand(num_key, batch_size, embed_dims) + out = module(query, memory, memory_pos, query_pos) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set memory_key_padding_mask + memory_key_padding_mask = torch.rand(batch_size, num_key) > 0.5 + out = module( + query, + memory, + memory_pos, + query_pos, + memory_key_padding_mask=memory_key_padding_mask) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set target_key_padding_mask + target_key_padding_mask = torch.rand(batch_size, num_query) > 0.5 + out = module( + query, + memory, + memory_pos, + query_pos, + memory_key_padding_mask=memory_key_padding_mask, + target_key_padding_mask=target_key_padding_mask) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set memory_attn_mask + memory_attn_mask = torch.rand(num_query, num_key) > 0.5 + out = module(query, memory, memory_pos, query_pos, memory_attn_mask, None, + memory_key_padding_mask, target_key_padding_mask) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # set target_attn_mask + target_attn_mask = torch.rand(num_query, num_query) > 0.5 + out = module(query, memory, memory_pos, query_pos, memory_attn_mask, + target_attn_mask, memory_key_padding_mask, + target_key_padding_mask) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # pre_norm + order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn') + module = TransformerDecoder( + num_layers, embed_dims, num_heads, feedforward_channels, order=order) + out = module(query, memory, memory_pos, query_pos, memory_attn_mask, + target_attn_mask, memory_key_padding_mask, + target_key_padding_mask) + assert out.shape == (1, num_query, batch_size, embed_dims) + + # return_intermediate + module = TransformerDecoder( + num_layers, + embed_dims, + num_heads, + feedforward_channels, + order=order, + return_intermediate=True) + out = module(query, memory, memory_pos, query_pos, memory_attn_mask, + target_attn_mask, memory_key_padding_mask, + target_key_padding_mask) + assert out.shape == (num_layers, num_query, batch_size, embed_dims) + + +def test_transformer(num_enc_layers=2, + num_dec_layers=2, + embed_dims=8, + num_heads=2, + num_query=5, + batch_size=1): + module = Transformer(embed_dims, num_heads, num_enc_layers, num_dec_layers) + height, width = 8, 6 + x = torch.rand(batch_size, embed_dims, height, width) + mask = torch.rand(batch_size, height, width) > 0.5 + query_embed = torch.rand(num_query, embed_dims) + pos_embed = torch.rand(batch_size, embed_dims, height, width) + hs, mem = module(x, mask, query_embed, pos_embed) + assert hs.shape == (1, batch_size, num_query, embed_dims) + assert mem.shape == (batch_size, embed_dims, height, width) + + # pre_norm + module = Transformer( + embed_dims, num_heads, num_enc_layers, num_dec_layers, pre_norm=True) + hs, mem = module(x, mask, query_embed, pos_embed) + assert hs.shape == (1, batch_size, num_query, embed_dims) + assert mem.shape == (batch_size, embed_dims, height, width) + + # return_intermediate + module = Transformer( + embed_dims, + num_heads, + num_enc_layers, + num_dec_layers, + return_intermediate_dec=True) + hs, mem = module(x, mask, query_embed, pos_embed) + assert hs.shape == (num_dec_layers, batch_size, num_query, embed_dims) + assert mem.shape == (batch_size, embed_dims, height, width) + + # pre_norm and return_intermediate + module = Transformer( + embed_dims, + num_heads, + num_enc_layers, + num_dec_layers, + pre_norm=True, + return_intermediate_dec=True) + hs, mem = module(x, mask, query_embed, pos_embed) + assert hs.shape == (num_dec_layers, batch_size, num_query, embed_dims) + assert mem.shape == (batch_size, embed_dims, height, width) + + # test init_weights + module.init_weights() diff --git a/thirdparty/mmdetection/tests/test_version.py b/thirdparty/mmdetection/tests/test_version.py new file mode 100644 index 0000000000000000000000000000000000000000..6ddf45c0e2854cb64006281363afe5547aa886c2 --- /dev/null +++ b/thirdparty/mmdetection/tests/test_version.py @@ -0,0 +1,15 @@ +from mmdet import digit_version + + +def test_version_check(): + assert digit_version('1.0.5') > digit_version('1.0.5rc0') + assert digit_version('1.0.5') > digit_version('1.0.4rc0') + assert digit_version('1.0.5') > digit_version('1.0rc0') + assert digit_version('1.0.0') > digit_version('0.6.2') + assert digit_version('1.0.0') > digit_version('0.2.16') + assert digit_version('1.0.5rc0') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc1') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc2') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc2') > digit_version('1.0.0rc1') + assert digit_version('1.0.1rc1') > digit_version('1.0.0rc1') + assert digit_version('1.0.0') > digit_version('1.0.0rc1') diff --git a/thirdparty/mmdetection/tools/analyze_logs.py b/thirdparty/mmdetection/tools/analyze_logs.py new file mode 100644 index 0000000000000000000000000000000000000000..83464f76ef3155be80289431188492c911f5b482 --- /dev/null +++ b/thirdparty/mmdetection/tools/analyze_logs.py @@ -0,0 +1,179 @@ +import argparse +import json +from collections import defaultdict + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns + + +def cal_train_time(log_dicts, args): + for i, log_dict in enumerate(log_dicts): + print(f'{"-" * 5}Analyze train time of {args.json_logs[i]}{"-" * 5}') + all_times = [] + for epoch in log_dict.keys(): + if args.include_outliers: + all_times.append(log_dict[epoch]['time']) + else: + all_times.append(log_dict[epoch]['time'][1:]) + all_times = np.array(all_times) + epoch_ave_time = all_times.mean(-1) + slowest_epoch = epoch_ave_time.argmax() + fastest_epoch = epoch_ave_time.argmin() + std_over_epoch = epoch_ave_time.std() + print(f'slowest epoch {slowest_epoch + 1}, ' + f'average time is {epoch_ave_time[slowest_epoch]:.4f}') + print(f'fastest epoch {fastest_epoch + 1}, ' + f'average time is {epoch_ave_time[fastest_epoch]:.4f}') + print(f'time std over epochs is {std_over_epoch:.4f}') + print(f'average iter time: {np.mean(all_times):.4f} s/iter') + print() + + +def plot_curve(log_dicts, args): + if args.backend is not None: + plt.switch_backend(args.backend) + sns.set_style(args.style) + # if legend is None, use {filename}_{key} as legend + legend = args.legend + if legend is None: + legend = [] + for json_log in args.json_logs: + for metric in args.keys: + legend.append(f'{json_log}_{metric}') + assert len(legend) == (len(args.json_logs) * len(args.keys)) + metrics = args.keys + + num_metrics = len(metrics) + for i, log_dict in enumerate(log_dicts): + epochs = list(log_dict.keys()) + for j, metric in enumerate(metrics): + print(f'plot curve of {args.json_logs[i]}, metric is {metric}') + if metric not in log_dict[epochs[0]]: + raise KeyError( + f'{args.json_logs[i]} does not contain metric {metric}') + + if 'mAP' in metric: + xs = np.arange(1, max(epochs) + 1) + ys = [] + for epoch in epochs: + ys += log_dict[epoch][metric] + ax = plt.gca() + ax.set_xticks(xs) + plt.xlabel('epoch') + plt.plot(xs, ys, label=legend[i * num_metrics + j], marker='o') + else: + xs = [] + ys = [] + num_iters_per_epoch = log_dict[epochs[0]]['iter'][-1] + for epoch in epochs: + iters = log_dict[epoch]['iter'] + if log_dict[epoch]['mode'][-1] == 'val': + iters = iters[:-1] + xs.append( + np.array(iters) + (epoch - 1) * num_iters_per_epoch) + ys.append(np.array(log_dict[epoch][metric][:len(iters)])) + xs = np.concatenate(xs) + ys = np.concatenate(ys) + plt.xlabel('iter') + plt.plot( + xs, ys, label=legend[i * num_metrics + j], linewidth=0.5) + plt.legend() + if args.title is not None: + plt.title(args.title) + if args.out is None: + plt.show() + else: + print(f'save curve to: {args.out}') + plt.savefig(args.out) + plt.cla() + + +def add_plot_parser(subparsers): + parser_plt = subparsers.add_parser( + 'plot_curve', help='parser for plotting curves') + parser_plt.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_plt.add_argument( + '--keys', + type=str, + nargs='+', + default=['bbox_mAP'], + help='the metric that you want to plot') + parser_plt.add_argument('--title', type=str, help='title of figure') + parser_plt.add_argument( + '--legend', + type=str, + nargs='+', + default=None, + help='legend of each plot') + parser_plt.add_argument( + '--backend', type=str, default=None, help='backend of plt') + parser_plt.add_argument( + '--style', type=str, default='dark', help='style of plt') + parser_plt.add_argument('--out', type=str, default=None) + + +def add_time_parser(subparsers): + parser_time = subparsers.add_parser( + 'cal_train_time', + help='parser for computing the average time per training iteration') + parser_time.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_time.add_argument( + '--include-outliers', + action='store_true', + help='include the first value of every epoch when computing ' + 'the average time') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Analyze Json Log') + # currently only support plot curve and calculate average train time + subparsers = parser.add_subparsers(dest='task', help='task parser') + add_plot_parser(subparsers) + add_time_parser(subparsers) + args = parser.parse_args() + return args + + +def load_json_logs(json_logs): + # load and convert json_logs to log_dict, key is epoch, value is a sub dict + # keys of sub dict is different metrics, e.g. memory, bbox_mAP + # value of sub dict is a list of corresponding values of all iterations + log_dicts = [dict() for _ in json_logs] + for json_log, log_dict in zip(json_logs, log_dicts): + with open(json_log, 'r') as log_file: + for line in log_file: + log = json.loads(line.strip()) + # skip lines without `epoch` field + if 'epoch' not in log: + continue + epoch = log.pop('epoch') + if epoch not in log_dict: + log_dict[epoch] = defaultdict(list) + for k, v in log.items(): + log_dict[epoch][k].append(v) + return log_dicts + + +def main(): + args = parse_args() + + json_logs = args.json_logs + for json_log in json_logs: + assert json_log.endswith('.json') + + log_dicts = load_json_logs(json_logs) + + eval(args.task)(log_dicts, args) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/benchmark.py b/thirdparty/mmdetection/tools/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..a4c451a9683be63950a0d3d8829d9c5f8788c390 --- /dev/null +++ b/thirdparty/mmdetection/tools/benchmark.py @@ -0,0 +1,100 @@ +import argparse +import time + +import torch +from mmcv import Config +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel +from mmcv.runner import load_checkpoint, wrap_fp16_model + +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet benchmark a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--log-interval', default=50, help='interval of logging') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + + # build the dataloader + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=False, + shuffle=False) + + # build the model and load checkpoint + model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, args.checkpoint, map_location='cpu') + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + + model = MMDataParallel(model, device_ids=[0]) + + model.eval() + + # the first several iterations may be very slow so skip them + num_warmup = 5 + pure_inf_time = 0 + + # benchmark with 2000 image and take the average + for i, data in enumerate(data_loader): + + torch.cuda.synchronize() + start_time = time.perf_counter() + + with torch.no_grad(): + model(return_loss=False, rescale=True, **data) + + torch.cuda.synchronize() + elapsed = time.perf_counter() - start_time + + if i >= num_warmup: + pure_inf_time += elapsed + if (i + 1) % args.log_interval == 0: + fps = (i + 1 - num_warmup) / pure_inf_time + print(f'Done image [{i + 1:<3}/ 2000], fps: {fps:.1f} img / s') + + if (i + 1) == 2000: + pure_inf_time += elapsed + fps = (i + 1 - num_warmup) / pure_inf_time + print(f'Overall fps: {fps:.1f} img / s') + break + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/browse_dataset.py b/thirdparty/mmdetection/tools/browse_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..55110e8949ca998f346a95e8e21629f8e5474bef --- /dev/null +++ b/thirdparty/mmdetection/tools/browse_dataset.py @@ -0,0 +1,68 @@ +import argparse +import os +from pathlib import Path + +import mmcv +from mmcv import Config + +from mmdet.datasets.builder import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Browse a dataset') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--skip-type', + type=str, + nargs='+', + default=['DefaultFormatBundle', 'Normalize', 'Collect'], + help='skip some useless pipeline') + parser.add_argument( + '--output-dir', + default=None, + type=str, + help='If there is no display interface, you can save it') + parser.add_argument('--not-show', default=False, action='store_true') + parser.add_argument( + '--show-interval', + type=int, + default=999, + help='the interval of show (ms)') + args = parser.parse_args() + return args + + +def retrieve_data_cfg(config_path, skip_type): + cfg = Config.fromfile(config_path) + train_data_cfg = cfg.data.train + train_data_cfg['pipeline'] = [ + x for x in train_data_cfg.pipeline if x['type'] not in skip_type + ] + + return cfg + + +def main(): + args = parse_args() + cfg = retrieve_data_cfg(args.config, args.skip_type) + + dataset = build_dataset(cfg.data.train) + + progress_bar = mmcv.ProgressBar(len(dataset)) + for item in dataset: + filename = os.path.join(args.output_dir, + Path(item['filename']).name + ) if args.output_dir is not None else None + mmcv.imshow_det_bboxes( + item['img'], + item['gt_bboxes'], + item['gt_labels'], + class_names=dataset.CLASSES, + show=not args.not_show, + out_file=filename, + wait_time=args.show_interval) + progress_bar.update() + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/coco_error_analysis.py b/thirdparty/mmdetection/tools/coco_error_analysis.py new file mode 100644 index 0000000000000000000000000000000000000000..fba96cafd2e818afacc43b120793dbb1dd228705 --- /dev/null +++ b/thirdparty/mmdetection/tools/coco_error_analysis.py @@ -0,0 +1,171 @@ +import copy +import os +from argparse import ArgumentParser +from multiprocessing import Pool + +import matplotlib.pyplot as plt +import numpy as np +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval + + +def makeplot(rs, ps, outDir, class_name, iou_type): + cs = np.vstack([ + np.ones((2, 3)), + np.array([.31, .51, .74]), + np.array([.75, .31, .30]), + np.array([.36, .90, .38]), + np.array([.50, .39, .64]), + np.array([1, .6, 0]) + ]) + areaNames = ['allarea', 'small', 'medium', 'large'] + types = ['C75', 'C50', 'Loc', 'Sim', 'Oth', 'BG', 'FN'] + for i in range(len(areaNames)): + area_ps = ps[..., i, 0] + figure_tile = iou_type + '-' + class_name + '-' + areaNames[i] + aps = [ps_.mean() for ps_ in area_ps] + ps_curve = [ + ps_.mean(axis=1) if ps_.ndim > 1 else ps_ for ps_ in area_ps + ] + ps_curve.insert(0, np.zeros(ps_curve[0].shape)) + fig = plt.figure() + ax = plt.subplot(111) + for k in range(len(types)): + ax.plot(rs, ps_curve[k + 1], color=[0, 0, 0], linewidth=0.5) + ax.fill_between( + rs, + ps_curve[k], + ps_curve[k + 1], + color=cs[k], + label=str(f'[{aps[k]:.3f}]' + types[k])) + plt.xlabel('recall') + plt.ylabel('precision') + plt.xlim(0, 1.) + plt.ylim(0, 1.) + plt.title(figure_tile) + plt.legend() + # plt.show() + fig.savefig(outDir + f'/{figure_tile}.png') + plt.close(fig) + + +def analyze_individual_category(k, cocoDt, cocoGt, catId, iou_type): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------analyzing {k + 1}-{nm["name"]}---------------') + ps_ = {} + dt = copy.deepcopy(cocoDt) + nm = cocoGt.loadCats(catId)[0] + imgIds = cocoGt.getImgIds() + dt_anns = dt.dataset['annotations'] + select_dt_anns = [] + for ann in dt_anns: + if ann['category_id'] == catId: + select_dt_anns.append(ann) + dt.dataset['annotations'] = select_dt_anns + dt.createIndex() + # compute precision but ignore superclass confusion + gt = copy.deepcopy(cocoGt) + child_catIds = gt.getCatIds(supNms=[nm['supercategory']]) + for idx, ann in enumerate(gt.dataset['annotations']): + if (ann['category_id'] in child_catIds + and ann['category_id'] != catId): + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [.1] + cocoEval.params.useCats = 1 + cocoEval.evaluate() + cocoEval.accumulate() + ps_supercategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_supercategory'] = ps_supercategory + # compute precision but ignore any class confusion + gt = copy.deepcopy(cocoGt) + for idx, ann in enumerate(gt.dataset['annotations']): + if ann['category_id'] != catId: + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [.1] + cocoEval.params.useCats = 1 + cocoEval.evaluate() + cocoEval.accumulate() + ps_allcategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_allcategory'] = ps_allcategory + return k, ps_ + + +def analyze_results(res_file, ann_file, res_types, out_dir): + for res_type in res_types: + assert res_type in ['bbox', 'segm'] + + directory = os.path.dirname(out_dir + '/') + if not os.path.exists(directory): + print(f'-------------create {out_dir}-----------------') + os.makedirs(directory) + + cocoGt = COCO(ann_file) + cocoDt = cocoGt.loadRes(res_file) + imgIds = cocoGt.getImgIds() + for res_type in res_types: + res_out_dir = out_dir + '/' + res_type + '/' + res_directory = os.path.dirname(res_out_dir) + if not os.path.exists(res_directory): + print(f'-------------create {res_out_dir}-----------------') + os.makedirs(res_directory) + iou_type = res_type + cocoEval = COCOeval( + copy.deepcopy(cocoGt), copy.deepcopy(cocoDt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.iouThrs = [.75, .5, .1] + cocoEval.params.maxDets = [100] + cocoEval.evaluate() + cocoEval.accumulate() + ps = cocoEval.eval['precision'] + ps = np.vstack([ps, np.zeros((4, *ps.shape[1:]))]) + catIds = cocoGt.getCatIds() + recThrs = cocoEval.params.recThrs + with Pool(processes=48) as pool: + args = [(k, cocoDt, cocoGt, catId, iou_type) + for k, catId in enumerate(catIds)] + analyze_results = pool.starmap(analyze_individual_category, args) + for k, catId in enumerate(catIds): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------saving {k + 1}-{nm["name"]}---------------') + analyze_result = analyze_results[k] + assert k == analyze_result[0] + ps_supercategory = analyze_result[1]['ps_supercategory'] + ps_allcategory = analyze_result[1]['ps_allcategory'] + # compute precision but ignore superclass confusion + ps[3, :, k, :, :] = ps_supercategory + # compute precision but ignore any class confusion + ps[4, :, k, :, :] = ps_allcategory + # fill in background and false negative errors and plot + ps[ps == -1] = 0 + ps[5, :, k, :, :] = (ps[4, :, k, :, :] > 0) + ps[6, :, k, :, :] = 1.0 + makeplot(recThrs, ps[:, :, k], res_out_dir, nm['name'], iou_type) + makeplot(recThrs, ps, res_out_dir, 'allclass', iou_type) + + +def main(): + parser = ArgumentParser(description='COCO Error Analysis Tool') + parser.add_argument('result', help='result file (json format) path') + parser.add_argument('out_dir', help='dir to save analyze result images') + parser.add_argument( + '--ann', + default='data/coco/annotations/instances_val2017.json', + help='annotation file path') + parser.add_argument( + '--types', type=str, nargs='+', default=['bbox'], help='result types') + args = parser.parse_args() + analyze_results(args.result, args.ann, args.types, out_dir=args.out_dir) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/convert_datasets/cityscapes.py b/thirdparty/mmdetection/tools/convert_datasets/cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..86ef84fa2ce760dea76ed66b1d48a6b909b6ae02 --- /dev/null +++ b/thirdparty/mmdetection/tools/convert_datasets/cityscapes.py @@ -0,0 +1,151 @@ +import argparse +import glob +import os.path as osp + +import cityscapesscripts.helpers.labels as CSLabels +import mmcv +import numpy as np +import pycocotools.mask as maskUtils + + +def collect_files(img_dir, gt_dir): + suffix = 'leftImg8bit.png' + files = [] + for img_file in glob.glob(osp.join(img_dir, '**/*.png')): + assert img_file.endswith(suffix), img_file + inst_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_instanceIds.png' + # Note that labelIds are not converted to trainId for seg map + segm_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_labelIds.png' + files.append((img_file, inst_file, segm_file)) + assert len(files), f'No images found in {img_dir}' + print(f'Loaded {len(files)} images from {img_dir}') + + return files + + +def collect_annotations(files, nproc=1): + print('Loading annotation images') + if nproc > 1: + images = mmcv.track_parallel_progress( + load_img_info, files, nproc=nproc) + else: + images = mmcv.track_progress(load_img_info, files) + + return images + + +def load_img_info(files): + img_file, inst_file, segm_file = files + inst_img = mmcv.imread(inst_file, 'unchanged') + # ids < 24 are stuff labels (filtering them first is about 5% faster) + unique_inst_ids = np.unique(inst_img[inst_img >= 24]) + anno_info = [] + for inst_id in unique_inst_ids: + # For non-crowd annotations, inst_id // 1000 is the label_id + # Crowd annotations have <1000 instance ids + label_id = inst_id // 1000 if inst_id >= 1000 else inst_id + label = CSLabels.id2label[label_id] + if not label.hasInstances or label.ignoreInEval: + continue + + category_id = label.id + iscrowd = int(inst_id < 1000) + mask = np.asarray(inst_img == inst_id, dtype=np.uint8, order='F') + mask_rle = maskUtils.encode(mask[:, :, None])[0] + + area = maskUtils.area(mask_rle) + # convert to COCO style XYWH format + bbox = maskUtils.toBbox(mask_rle) + + # for json encoding + mask_rle['counts'] = mask_rle['counts'].decode() + + anno = dict( + iscrowd=iscrowd, + category_id=category_id, + bbox=bbox.tolist(), + area=area.tolist(), + segmentation=mask_rle) + anno_info.append(anno) + video_name = osp.basename(osp.dirname(img_file)) + img_info = dict( + # remove img_prefix for filename + file_name=osp.join(video_name, osp.basename(img_file)), + height=inst_img.shape[0], + width=inst_img.shape[1], + anno_info=anno_info, + segm_file=osp.join(video_name, osp.basename(segm_file))) + + return img_info + + +def cvt_annotations(image_infos, out_json_name): + out_json = dict() + img_id = 0 + ann_id = 0 + out_json['images'] = [] + out_json['categories'] = [] + out_json['annotations'] = [] + for image_info in image_infos: + image_info['id'] = img_id + anno_infos = image_info.pop('anno_info') + out_json['images'].append(image_info) + for anno_info in anno_infos: + anno_info['image_id'] = img_id + anno_info['id'] = ann_id + out_json['annotations'].append(anno_info) + ann_id += 1 + img_id += 1 + for label in CSLabels.labels: + if label.hasInstances and not label.ignoreInEval: + cat = dict(id=label.id, name=label.name) + out_json['categories'].append(cat) + + if len(out_json['annotations']) == 0: + out_json.pop('annotations') + + mmcv.dump(out_json, out_json_name) + return out_json + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert Cityscapes annotations to COCO format') + parser.add_argument('cityscapes_path', help='cityscapes data path') + parser.add_argument('--img-dir', default='leftImg8bit', type=str) + parser.add_argument('--gt-dir', default='gtFine', type=str) + parser.add_argument('-o', '--out-dir', help='output path') + parser.add_argument( + '--nproc', default=1, type=int, help='number of process') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + cityscapes_path = args.cityscapes_path + out_dir = args.out_dir if args.out_dir else cityscapes_path + mmcv.mkdir_or_exist(out_dir) + + img_dir = osp.join(cityscapes_path, args.img_dir) + gt_dir = osp.join(cityscapes_path, args.gt_dir) + + set_name = dict( + train='instancesonly_filtered_gtFine_train.json', + val='instancesonly_filtered_gtFine_val.json', + test='instancesonly_filtered_gtFine_test.json') + + for split, json_name in set_name.items(): + print(f'Converting {split} into {json_name}') + with mmcv.Timer( + print_tmpl='It tooks {}s to convert Cityscapes annotation'): + files = collect_files( + osp.join(img_dir, split), osp.join(gt_dir, split)) + image_infos = collect_annotations(files, nproc=args.nproc) + cvt_annotations(image_infos, osp.join(out_dir, json_name)) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/convert_datasets/pascal_voc.py b/thirdparty/mmdetection/tools/convert_datasets/pascal_voc.py new file mode 100644 index 0000000000000000000000000000000000000000..307c93cbae9d652bc6e66867730d982402a77053 --- /dev/null +++ b/thirdparty/mmdetection/tools/convert_datasets/pascal_voc.py @@ -0,0 +1,139 @@ +import argparse +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv +import numpy as np + +from mmdet.core import voc_classes + +label_ids = {name: i for i, name in enumerate(voc_classes())} + + +def parse_xml(args): + xml_path, img_path = args + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + w = int(size.find('width').text) + h = int(size.find('height').text) + bboxes = [] + labels = [] + bboxes_ignore = [] + labels_ignore = [] + for obj in root.findall('object'): + name = obj.find('name').text + label = label_ids[name] + difficult = int(obj.find('difficult').text) + bnd_box = obj.find('bndbox') + bbox = [ + int(bnd_box.find('xmin').text), + int(bnd_box.find('ymin').text), + int(bnd_box.find('xmax').text), + int(bnd_box.find('ymax').text) + ] + if difficult: + bboxes_ignore.append(bbox) + labels_ignore.append(label) + else: + bboxes.append(bbox) + labels.append(label) + if not bboxes: + bboxes = np.zeros((0, 4)) + labels = np.zeros((0, )) + else: + bboxes = np.array(bboxes, ndmin=2) - 1 + labels = np.array(labels) + if not bboxes_ignore: + bboxes_ignore = np.zeros((0, 4)) + labels_ignore = np.zeros((0, )) + else: + bboxes_ignore = np.array(bboxes_ignore, ndmin=2) - 1 + labels_ignore = np.array(labels_ignore) + annotation = { + 'filename': img_path, + 'width': w, + 'height': h, + 'ann': { + 'bboxes': bboxes.astype(np.float32), + 'labels': labels.astype(np.int64), + 'bboxes_ignore': bboxes_ignore.astype(np.float32), + 'labels_ignore': labels_ignore.astype(np.int64) + } + } + return annotation + + +def cvt_annotations(devkit_path, years, split, out_file): + if not isinstance(years, list): + years = [years] + annotations = [] + for year in years: + filelist = osp.join(devkit_path, + f'VOC{year}/ImageSets/Main/{split}.txt') + if not osp.isfile(filelist): + print(f'filelist does not exist: {filelist}, ' + f'skip voc{year} {split}') + return + img_names = mmcv.list_from_file(filelist) + xml_paths = [ + osp.join(devkit_path, f'VOC{year}/Annotations/{img_name}.xml') + for img_name in img_names + ] + img_paths = [ + f'VOC{year}/JPEGImages/{img_name}.jpg' for img_name in img_names + ] + part_annotations = mmcv.track_progress(parse_xml, + list(zip(xml_paths, img_paths))) + annotations.extend(part_annotations) + mmcv.dump(annotations, out_file) + return annotations + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert PASCAL VOC annotations to mmdetection format') + parser.add_argument('devkit_path', help='pascal voc devkit path') + parser.add_argument('-o', '--out-dir', help='output path') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + devkit_path = args.devkit_path + out_dir = args.out_dir if args.out_dir else devkit_path + mmcv.mkdir_or_exist(out_dir) + + years = [] + if osp.isdir(osp.join(devkit_path, 'VOC2007')): + years.append('2007') + if osp.isdir(osp.join(devkit_path, 'VOC2012')): + years.append('2012') + if '2007' in years and '2012' in years: + years.append(['2007', '2012']) + if not years: + raise IOError(f'The devkit path {devkit_path} contains neither ' + '"VOC2007" nor "VOC2012" subfolder') + for year in years: + if year == '2007': + prefix = 'voc07' + elif year == '2012': + prefix = 'voc12' + elif year == ['2007', '2012']: + prefix = 'voc0712' + for split in ['train', 'val', 'trainval']: + dataset_name = prefix + '_' + split + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, split, + osp.join(out_dir, dataset_name + '.pkl')) + if not isinstance(year, list): + dataset_name = prefix + '_test' + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, 'test', + osp.join(out_dir, dataset_name + '.pkl')) + print('Done!') + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/detectron2pytorch.py b/thirdparty/mmdetection/tools/detectron2pytorch.py new file mode 100644 index 0000000000000000000000000000000000000000..961e6f571b785f01236a660651323cc6372e8189 --- /dev/null +++ b/thirdparty/mmdetection/tools/detectron2pytorch.py @@ -0,0 +1,82 @@ +import argparse +from collections import OrderedDict + +import mmcv +import torch + +arch_settings = {50: (3, 4, 6, 3), 101: (3, 4, 23, 3)} + + +def convert_bn(blobs, state_dict, caffe_name, torch_name, converted_names): + # detectron replace bn with affine channel layer + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_s']) + bn_size = state_dict[torch_name + '.weight'].size() + state_dict[torch_name + '.running_mean'] = torch.zeros(bn_size) + state_dict[torch_name + '.running_var'] = torch.ones(bn_size) + converted_names.add(caffe_name + '_b') + converted_names.add(caffe_name + '_s') + + +def convert_conv_fc(blobs, state_dict, caffe_name, torch_name, + converted_names): + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_w']) + converted_names.add(caffe_name + '_w') + if caffe_name + '_b' in blobs: + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + converted_names.add(caffe_name + '_b') + + +def convert(src, dst, depth): + """Convert keys in detectron pretrained ResNet models to pytorch style.""" + # load arch_settings + if depth not in arch_settings: + raise ValueError('Only support ResNet-50 and ResNet-101 currently') + block_nums = arch_settings[depth] + # load caffe model + caffe_model = mmcv.load(src, encoding='latin1') + blobs = caffe_model['blobs'] if 'blobs' in caffe_model else caffe_model + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + convert_conv_fc(blobs, state_dict, 'conv1', 'conv1', converted_names) + convert_bn(blobs, state_dict, 'res_conv1_bn', 'bn1', converted_names) + for i in range(1, len(block_nums) + 1): + for j in range(block_nums[i - 1]): + if j == 0: + convert_conv_fc(blobs, state_dict, f'res{i + 1}_{j}_branch1', + f'layer{i}.{j}.downsample.0', converted_names) + convert_bn(blobs, state_dict, f'res{i + 1}_{j}_branch1_bn', + f'layer{i}.{j}.downsample.1', converted_names) + for k, letter in enumerate(['a', 'b', 'c']): + convert_conv_fc(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}', + f'layer{i}.{j}.conv{k+1}', converted_names) + convert_bn(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}_bn', + f'layer{i}.{j}.bn{k + 1}', converted_names) + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'Not Convert: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + parser.add_argument('depth', type=int, help='ResNet model depth') + args = parser.parse_args() + convert(args.src, args.dst, args.depth) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/dist_test.sh b/thirdparty/mmdetection/tools/dist_test.sh new file mode 100644 index 0000000000000000000000000000000000000000..3c74ec6ecd1f08049a3234f2562f8be7107ed6ec --- /dev/null +++ b/thirdparty/mmdetection/tools/dist_test.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +CONFIG=$1 +CHECKPOINT=$2 +GPUS=$3 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4} diff --git a/thirdparty/mmdetection/tools/dist_train.sh b/thirdparty/mmdetection/tools/dist_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..5b43fffbf28fc9b8ba7c14efcd5e4f8b19279470 --- /dev/null +++ b/thirdparty/mmdetection/tools/dist_train.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +CONFIG=$1 +GPUS=$2 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} diff --git a/thirdparty/mmdetection/tools/eval_metric.py b/thirdparty/mmdetection/tools/eval_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..6dfc434242d091512696a0f5745499d0282d6c67 --- /dev/null +++ b/thirdparty/mmdetection/tools/eval_metric.py @@ -0,0 +1,75 @@ +import argparse + +import mmcv +from mmcv import Config, DictAction + +from mmdet.datasets import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Evaluate metric of the ' + 'results saved in pkl format') + parser.add_argument('config', help='Config of the model') + parser.add_argument('pkl_results', help='Results in pickle format') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='Evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + assert args.eval or args.format_only, ( + 'Please specify at least one operation (eval/format the results) with ' + 'the argument "--eval", "--format-only"') + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + cfg.data.test.test_mode = True + + dataset = build_dataset(cfg.data.test) + outputs = mmcv.load(args.pkl_results) + + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/get_flops.py b/thirdparty/mmdetection/tools/get_flops.py new file mode 100644 index 0000000000000000000000000000000000000000..4d34bcd847447a0c58270a686f6da299982c50bc --- /dev/null +++ b/thirdparty/mmdetection/tools/get_flops.py @@ -0,0 +1,67 @@ +import argparse + +import torch +from mmcv import Config + +from mmdet.models import build_detector + +try: + from mmcv.cnn import get_model_complexity_info +except ImportError: + raise ImportError('Please upgrade mmcv to >0.6.2') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[1280, 800], + help='input image size') + args = parser.parse_args() + return args + + +def main(): + + args = parse_args() + + if len(args.shape) == 1: + input_shape = (3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (3, ) + tuple(args.shape) + else: + raise ValueError('invalid input shape') + + cfg = Config.fromfile(args.config) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + + model = build_detector( + cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg) + if torch.cuda.is_available(): + model.cuda() + model.eval() + + if hasattr(model, 'forward_dummy'): + model.forward = model.forward_dummy + else: + raise NotImplementedError( + 'FLOPs counter is currently not currently supported with {}'. + format(model.__class__.__name__)) + + flops, params = get_model_complexity_info(model, input_shape) + split_line = '=' * 30 + print(f'{split_line}\nInput shape: {input_shape}\n' + f'Flops: {flops}\nParams: {params}\n{split_line}') + print('!!!Please be cautious if you use the results in papers. ' + 'You may need to check if all ops are supported and verify that the ' + 'flops computation is correct.') + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/print_config.py b/thirdparty/mmdetection/tools/print_config.py new file mode 100644 index 0000000000000000000000000000000000000000..2ba994fb388477c4d1cc4d174b4af0d84731b475 --- /dev/null +++ b/thirdparty/mmdetection/tools/print_config.py @@ -0,0 +1,26 @@ +import argparse + +from mmcv import Config, DictAction + + +def parse_args(): + parser = argparse.ArgumentParser(description='Print the whole config') + parser.add_argument('config', help='config file path') + parser.add_argument( + '--options', nargs='+', action=DictAction, help='arguments in dict') + args = parser.parse_args() + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.options is not None: + cfg.merge_from_dict(args.options) + print(f'Config:\n{cfg.pretty_text}') + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/publish_model.py b/thirdparty/mmdetection/tools/publish_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c20e7e38b6461bd1e0697eece6f128824189ff5f --- /dev/null +++ b/thirdparty/mmdetection/tools/publish_model.py @@ -0,0 +1,39 @@ +import argparse +import subprocess + +import torch + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Process a checkpoint to be published') + parser.add_argument('in_file', help='input checkpoint filename') + parser.add_argument('out_file', help='output checkpoint filename') + args = parser.parse_args() + return args + + +def process_checkpoint(in_file, out_file): + checkpoint = torch.load(in_file, map_location='cpu') + # remove optimizer for smaller file size + if 'optimizer' in checkpoint: + del checkpoint['optimizer'] + # if it is necessary to remove some sensitive data in checkpoint['meta'], + # add the code here. + torch.save(checkpoint, out_file) + sha = subprocess.check_output(['sha256sum', out_file]).decode() + if out_file.endswith('.pth'): + out_file_name = out_file[:-4] + else: + out_file_name = out_file + final_file = out_file_name + f'-{sha[:8]}.pth' + subprocess.Popen(['mv', out_file, final_file]) + + +def main(): + args = parse_args() + process_checkpoint(args.in_file, args.out_file) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/pytorch2onnx.py b/thirdparty/mmdetection/tools/pytorch2onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..6271098cb1b46d66e91c0668ad0fd7b976cde98d --- /dev/null +++ b/thirdparty/mmdetection/tools/pytorch2onnx.py @@ -0,0 +1,203 @@ +import argparse +import os.path as osp + +import numpy as np +import onnx +import onnxruntime as rt +import torch + +from mmdet.core import (build_model_from_cfg, generate_inputs_and_wrap_model, + preprocess_example_input) + + +def pytorch2onnx(config_path, + checkpoint_path, + input_img, + input_shape, + opset_version=11, + show=False, + output_file='tmp.onnx', + verify=False, + normalize_cfg=None, + dataset='coco', + test_img=None): + + input_config = { + 'input_shape': input_shape, + 'input_path': input_img, + 'normalize_cfg': normalize_cfg + } + + # prepare original model and meta for verifying the onnx model + orig_model = build_model_from_cfg(config_path, checkpoint_path) + one_img, one_meta = preprocess_example_input(input_config) + model, tensor_data = generate_inputs_and_wrap_model( + config_path, checkpoint_path, input_config) + output_names = ['boxes'] + if model.with_bbox: + output_names.append('labels') + if model.with_mask: + output_names.append('masks') + + torch.onnx.export( + model, + tensor_data, + output_file, + input_names=['input'], + output_names=output_names, + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=show, + opset_version=opset_version) + + model.forward = orig_model.forward + print(f'Successfully exported ONNX model: {output_file}') + if verify: + from mmdet.core import get_classes + from mmdet.apis import show_result_pyplot + model.CLASSES = get_classes(dataset) + num_classes = len(model.CLASSES) + # check by onnx + onnx_model = onnx.load(output_file) + onnx.checker.check_model(onnx_model) + if test_img is not None: + input_config['input_path'] = test_img + one_img, one_meta = preprocess_example_input(input_config) + tensor_data = [one_img] + # check the numerical value + # get pytorch output + pytorch_results = model(tensor_data, [[one_meta]], return_loss=False) + pytorch_results = pytorch_results[0] + # get onnx output + input_all = [node.name for node in onnx_model.graph.input] + input_initializer = [ + node.name for node in onnx_model.graph.initializer + ] + net_feed_input = list(set(input_all) - set(input_initializer)) + assert (len(net_feed_input) == 1) + sess = rt.InferenceSession(output_file) + from mmdet.core import bbox2result + onnx_outputs = sess.run(None, + {net_feed_input[0]: one_img.detach().numpy()}) + output_names = [_.name for _ in sess.get_outputs()] + output_shapes = [_.shape for _ in onnx_outputs] + print(f'onnxruntime output names: {output_names}, \ + output shapes: {output_shapes}') + nrof_out = len(onnx_outputs) + assert nrof_out > 0, 'Must have output' + with_mask = nrof_out == 3 + if nrof_out == 1: + onnx_results = onnx_outputs[0] + else: + det_bboxes, det_labels = onnx_outputs[:2] + onnx_results = bbox2result(det_bboxes, det_labels, num_classes) + if with_mask: + segm_results = onnx_outputs[2].squeeze(1) + cls_segms = [[] for _ in range(num_classes)] + for i in range(det_bboxes.shape[0]): + cls_segms[det_labels[i]].append(segm_results[i]) + onnx_results = (onnx_results, cls_segms) + # visualize predictions + + if show: + show_result_pyplot( + model, + one_meta['show_img'], + pytorch_results, + title='Pytorch', + block=False) + show_result_pyplot( + model, one_meta['show_img'], onnx_results, title='ONNX') + + # compare a part of result + + if with_mask: + compare_pairs = list(zip(onnx_results, pytorch_results)) + else: + compare_pairs = [(onnx_results, pytorch_results)] + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, + p_res, + rtol=1e-03, + atol=1e-05, + ) + print('The numerical values are the same between Pytorch and ONNX') + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert MMDetection models to ONNX') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--input-img', type=str, help='Images for input') + parser.add_argument('--show', action='store_true', help='show onnx graph') + parser.add_argument('--output-file', type=str, default='tmp.onnx') + parser.add_argument('--opset-version', type=int, default=11) + parser.add_argument( + '--test-img', type=str, default=None, help='Images for test') + parser.add_argument( + '--dataset', type=str, default='coco', help='Dataset name') + parser.add_argument( + '--view', action='store_true', help='Visualize results') + parser.add_argument( + '--verify', + action='store_true', + help='verify the onnx model output against pytorch output') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[800, 1216], + help='input image size') + parser.add_argument( + '--mean', + type=float, + nargs='+', + default=[123.675, 116.28, 103.53], + help='mean value used for preprocess input data') + parser.add_argument( + '--std', + type=float, + nargs='+', + default=[58.395, 57.12, 57.375], + help='variance value used for preprocess input data') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + + assert args.opset_version == 11, 'MMDet only support opset 11 now' + + if not args.input_img: + args.input_img = osp.join( + osp.dirname(__file__), '../tests/data/color.jpg') + + if len(args.shape) == 1: + input_shape = (1, 3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (1, 3) + tuple(args.shape) + else: + raise ValueError('invalid input shape') + + assert len(args.mean) == 3 + assert len(args.std) == 3 + + normalize_cfg = {'mean': args.mean, 'std': args.std} + + # convert model to onnx file + pytorch2onnx( + args.config, + args.checkpoint, + args.input_img, + input_shape, + opset_version=args.opset_version, + show=args.show, + output_file=args.output_file, + verify=args.verify, + normalize_cfg=normalize_cfg, + dataset=args.dataset) diff --git a/thirdparty/mmdetection/tools/regnet2mmdet.py b/thirdparty/mmdetection/tools/regnet2mmdet.py new file mode 100644 index 0000000000000000000000000000000000000000..9f4e316d37569a6fbeb6329bd36abaa822b20ccf --- /dev/null +++ b/thirdparty/mmdetection/tools/regnet2mmdet.py @@ -0,0 +1,89 @@ +import argparse +from collections import OrderedDict + +import torch + + +def convert_stem(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('stem.conv', 'conv1') + new_key = new_key.replace('stem.bn', 'bn1') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_head(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('head.fc', 'fc') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_reslayer(model_key, model_weight, state_dict, converted_names): + split_keys = model_key.split('.') + layer, block, module = split_keys[:3] + block_id = int(block[1:]) + layer_name = f'layer{int(layer[1:])}' + block_name = f'{block_id - 1}' + + if block_id == 1 and module == 'bn': + new_key = f'{layer_name}.{block_name}.downsample.1.{split_keys[-1]}' + elif block_id == 1 and module == 'proj': + new_key = f'{layer_name}.{block_name}.downsample.0.{split_keys[-1]}' + elif module == 'f': + if split_keys[3] == 'a_bn': + module_name = 'bn1' + elif split_keys[3] == 'b_bn': + module_name = 'bn2' + elif split_keys[3] == 'c_bn': + module_name = 'bn3' + elif split_keys[3] == 'a': + module_name = 'conv1' + elif split_keys[3] == 'b': + module_name = 'conv2' + elif split_keys[3] == 'c': + module_name = 'conv3' + new_key = f'{layer_name}.{block_name}.{module_name}.{split_keys[-1]}' + else: + raise ValueError(f'Unsupported conversion of key {model_key}') + print(f'Convert {model_key} to {new_key}') + state_dict[new_key] = model_weight + converted_names.add(model_key) + + +def convert(src, dst): + """Convert keys in pycls pretrained RegNet models to mmdet style.""" + # load caffe model + regnet_model = torch.load(src) + blobs = regnet_model['model_state'] + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + for key, weight in blobs.items(): + if 'stem' in key: + convert_stem(key, weight, state_dict, converted_names) + elif 'head' in key: + convert_head(key, weight, state_dict, converted_names) + elif key.startswith('s'): + convert_reslayer(key, weight, state_dict, converted_names) + + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'not converted: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + args = parser.parse_args() + convert(args.src, args.dst) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/robustness_eval.py b/thirdparty/mmdetection/tools/robustness_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..a1b4ce88e174196d6580b272ae77328a4260610a --- /dev/null +++ b/thirdparty/mmdetection/tools/robustness_eval.py @@ -0,0 +1,250 @@ +import os.path as osp +from argparse import ArgumentParser + +import mmcv +import numpy as np + + +def print_coco_results(results): + + def _print(result, ap=1, iouThr=None, areaRng='all', maxDets=100): + titleStr = 'Average Precision' if ap == 1 else 'Average Recall' + typeStr = '(AP)' if ap == 1 else '(AR)' + iouStr = '0.50:0.95' \ + if iouThr is None else f'{iouThr:0.2f}' + iStr = f' {titleStr:<18} {typeStr} @[ IoU={iouStr:<9} | ' + iStr += f'area={areaRng:>6s} | maxDets={maxDets:>3d} ] = {result:0.3f}' + print(iStr) + + stats = np.zeros((12, )) + stats[0] = _print(results[0], 1) + stats[1] = _print(results[1], 1, iouThr=.5) + stats[2] = _print(results[2], 1, iouThr=.75) + stats[3] = _print(results[3], 1, areaRng='small') + stats[4] = _print(results[4], 1, areaRng='medium') + stats[5] = _print(results[5], 1, areaRng='large') + stats[6] = _print(results[6], 0, maxDets=1) + stats[7] = _print(results[7], 0, maxDets=10) + stats[8] = _print(results[8], 0) + stats[9] = _print(results[9], 0, areaRng='small') + stats[10] = _print(results[10], 0, areaRng='medium') + stats[11] = _print(results[11], 0, areaRng='large') + + +def get_coco_style_results(filename, + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + if metric is None: + metrics = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + elif isinstance(metric, list): + metrics = metric + else: + metrics = [metric] + + for metric_name in metrics: + assert metric_name in [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, len(metrics)), dtype='float32') + + for corr_i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + for metric_j, metric_name in enumerate(metrics): + mAP = eval_output[distortion][severity][task][metric_name] + results[corr_i, severity, metric_j] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if metric is None: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + print_coco_results(P) + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + print_coco_results(mPC) + if 'rPC' in prints: + print(f'Realtive Performance under Corruption [rPC] ({task})') + print_coco_results(rPC) + else: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {P[metric_i]:0.3f}') + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {mPC[metric_i]:0.3f}') + if 'rPC' in prints: + print(f'Relative Performance under Corruption [rPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} => {rPC[metric_i] * 100:0.1f} %') + + return results + + +def get_voc_style_results(filename, prints='mPC', aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, 20), dtype='float32') + + for i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + mAP = [ + eval_output[distortion][severity][j]['ap'] + for j in range(len(eval_output[distortion][severity])) + ] + results[i, severity, :] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if 'P' in prints: + print(f'Performance on Clean Data [P] in AP50 = {np.mean(P):0.3f}') + if 'mPC' in prints: + print('Mean Performance under Corruption [mPC] in AP50 = ' + f'{np.mean(mPC):0.3f}') + if 'rPC' in prints: + print('Realtive Performance under Corruption [rPC] in % = ' + f'{np.mean(rPC) * 100:0.1f}') + + return np.mean(results, axis=2, keepdims=True) + + +def get_results(filename, + dataset='coco', + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + assert dataset in ['coco', 'voc', 'cityscapes'] + + if dataset in ['coco', 'cityscapes']: + results = get_coco_style_results( + filename, + task=task, + metric=metric, + prints=prints, + aggregate=aggregate) + elif dataset == 'voc': + if task != 'bbox': + print('Only bbox analysis is supported for Pascal VOC') + print('Will report bbox results\n') + if metric not in [None, ['AP'], ['AP50']]: + print('Only the AP50 metric is supported for Pascal VOC') + print('Will report AP50 metric\n') + results = get_voc_style_results( + filename, prints=prints, aggregate=aggregate) + + return results + + +def get_distortions_from_file(filename): + + eval_output = mmcv.load(filename) + + return get_distortions_from_results(eval_output) + + +def get_distortions_from_results(eval_output): + distortions = [] + for i, distortion in enumerate(eval_output): + distortions.append(distortion.replace('_', ' ')) + return distortions + + +def main(): + parser = ArgumentParser(description='Corruption Result Analysis') + parser.add_argument('filename', help='result file path') + parser.add_argument( + '--dataset', + type=str, + choices=['coco', 'voc', 'cityscapes'], + default='coco', + help='dataset type') + parser.add_argument( + '--task', + type=str, + nargs='+', + choices=['bbox', 'segm'], + default=['bbox'], + help='task to report') + parser.add_argument( + '--metric', + nargs='+', + choices=[ + None, 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ], + default=None, + help='metric to report') + parser.add_argument( + '--prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print') + parser.add_argument( + '--aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those \ + for benchmark corruptions') + + args = parser.parse_args() + + for task in args.task: + get_results( + args.filename, + dataset=args.dataset, + task=task, + metric=args.metric, + prints=args.prints, + aggregate=args.aggregate) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/slurm_test.sh b/thirdparty/mmdetection/tools/slurm_test.sh new file mode 100644 index 0000000000000000000000000000000000000000..6dd67e57442b741fc30f26102eb5afe16139edb1 --- /dev/null +++ b/thirdparty/mmdetection/tools/slurm_test.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +CHECKPOINT=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +PY_ARGS=${@:5} +SRUN_ARGS=${SRUN_ARGS:-""} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/test.py ${CONFIG} ${CHECKPOINT} --launcher="slurm" ${PY_ARGS} diff --git a/thirdparty/mmdetection/tools/slurm_train.sh b/thirdparty/mmdetection/tools/slurm_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..b3feb3d9c7a6c33d82739cdf5ee10365673aaded --- /dev/null +++ b/thirdparty/mmdetection/tools/slurm_train.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +WORK_DIR=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +SRUN_ARGS=${SRUN_ARGS:-""} +PY_ARGS=${@:5} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/train.py ${CONFIG} --work-dir=${WORK_DIR} --launcher="slurm" ${PY_ARGS} diff --git a/thirdparty/mmdetection/tools/test.py b/thirdparty/mmdetection/tools/test.py new file mode 100644 index 0000000000000000000000000000000000000000..8dcd305e155858a89d6da5c054e796ef2c419daa --- /dev/null +++ b/thirdparty/mmdetection/tools/test.py @@ -0,0 +1,208 @@ +import argparse +import os +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results.') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + # build the dataloader + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu') + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + # old versions did not save class info in checkpoints, this walkaround is + # for backward compatibility + if 'CLASSES' in checkpoint['meta']: + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + rank, _ = get_dist_info() + if rank == 0: + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/test_robustness.py b/thirdparty/mmdetection/tools/test_robustness.py new file mode 100644 index 0000000000000000000000000000000000000000..0f1e801bb5872a23dc04f38d16615d2d5273a27e --- /dev/null +++ b/thirdparty/mmdetection/tools/test_robustness.py @@ -0,0 +1,377 @@ +import argparse +import copy +import os +import os.path as osp + +import mmcv +import torch +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from robustness_eval import get_results + +from mmdet import datasets +from mmdet.apis import multi_gpu_test, set_random_seed, single_gpu_test +from mmdet.core import eval_map +from mmdet.datasets import build_dataloader, build_dataset +from mmdet.models import build_detector + + +def coco_eval_with_return(result_files, + result_types, + coco, + max_dets=(100, 300, 1000)): + for res_type in result_types: + assert res_type in ['proposal', 'bbox', 'segm', 'keypoints'] + + if mmcv.is_str(coco): + coco = COCO(coco) + assert isinstance(coco, COCO) + + eval_results = {} + for res_type in result_types: + result_file = result_files[res_type] + assert result_file.endswith('.json') + + coco_dets = coco.loadRes(result_file) + img_ids = coco.getImgIds() + iou_type = 'bbox' if res_type == 'proposal' else res_type + cocoEval = COCOeval(coco, coco_dets, iou_type) + cocoEval.params.imgIds = img_ids + if res_type == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.params.maxDets = list(max_dets) + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if res_type == 'segm' or res_type == 'bbox': + metric_names = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ] + eval_results[res_type] = { + metric_names[i]: cocoEval.stats[i] + for i in range(len(metric_names)) + } + else: + eval_results[res_type] = cocoEval.stats + + return eval_results + + +def voc_eval_with_return(result_file, + dataset, + iou_thr=0.5, + logger='print', + only_ap=True): + det_results = mmcv.load(result_file) + annotations = [dataset.get_ann_info(i) for i in range(len(dataset))] + if hasattr(dataset, 'year') and dataset.year == 2007: + dataset_name = 'voc07' + else: + dataset_name = dataset.CLASSES + mean_ap, eval_results = eval_map( + det_results, + annotations, + scale_ranges=None, + iou_thr=iou_thr, + dataset=dataset_name, + logger=logger) + + if only_ap: + eval_results = [{ + 'ap': eval_results[i]['ap'] + } for i in range(len(eval_results))] + + return mean_ap, eval_results + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet test detector') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--out', help='output result file') + parser.add_argument( + '--corruptions', + type=str, + nargs='+', + default='benchmark', + choices=[ + 'all', 'benchmark', 'noise', 'blur', 'weather', 'digital', + 'holdout', 'None', 'gaussian_noise', 'shot_noise', 'impulse_noise', + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', + 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', + 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur', + 'spatter', 'saturate' + ], + help='corruptions') + parser.add_argument( + '--severities', + type=int, + nargs='+', + default=[0, 1, 2, 3, 4, 5], + help='corruption severity levels') + parser.add_argument( + '--eval', + type=str, + nargs='+', + choices=['proposal', 'proposal_fast', 'bbox', 'segm', 'keypoints'], + help='eval types') + parser.add_argument( + '--iou-thr', + type=float, + default=0.5, + help='IoU threshold for pascal voc evaluation') + parser.add_argument( + '--summaries', + type=bool, + default=False, + help='Print summaries for every corruption and severity') + parser.add_argument( + '--workers', type=int, default=32, help='workers per gpu') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument('--tmpdir', help='tmp dir for writing some results') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument( + '--final-prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print at the end') + parser.add_argument( + '--final-prints-aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those for benchmark corruptions') + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def main(): + args = parse_args() + + assert args.out or args.show or args.show_dir, \ + ('Please specify at least one operation (save or show the results) ' + 'with the argument "--out", "--show" or "show-dir"') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = mmcv.Config.fromfile(args.config) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + if args.workers == 0: + args.workers = cfg.data.workers_per_gpu + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + # set random seeds + if args.seed is not None: + set_random_seed(args.seed) + + if 'all' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression', 'speckle_noise', 'gaussian_blur', 'spatter', + 'saturate' + ] + elif 'benchmark' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression' + ] + elif 'noise' in args.corruptions: + corruptions = ['gaussian_noise', 'shot_noise', 'impulse_noise'] + elif 'blur' in args.corruptions: + corruptions = [ + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur' + ] + elif 'weather' in args.corruptions: + corruptions = ['snow', 'frost', 'fog', 'brightness'] + elif 'digital' in args.corruptions: + corruptions = [ + 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression' + ] + elif 'holdout' in args.corruptions: + corruptions = ['speckle_noise', 'gaussian_blur', 'spatter', 'saturate'] + elif 'None' in args.corruptions: + corruptions = ['None'] + args.severities = [0] + else: + corruptions = args.corruptions + + rank, _ = get_dist_info() + aggregated_results = {} + for corr_i, corruption in enumerate(corruptions): + aggregated_results[corruption] = {} + for sev_i, corruption_severity in enumerate(args.severities): + # evaluate severity 0 (= no corruption) only once + if corr_i > 0 and corruption_severity == 0: + aggregated_results[corruption][0] = \ + aggregated_results[corruptions[0]][0] + continue + + test_data_cfg = copy.deepcopy(cfg.data.test) + # assign corruption and severity + if corruption_severity > 0: + corruption_trans = dict( + type='Corrupt', + corruption=corruption, + severity=corruption_severity) + # TODO: hard coded "1", we assume that the first step is + # loading images, which needs to be fixed in the future + test_data_cfg['pipeline'].insert(1, corruption_trans) + + # print info + print(f'\nTesting {corruption} at severity {corruption_severity}') + + # build the dataloader + # TODO: support multiple images per gpu + # (only minor changes are needed) + dataset = build_dataset(test_data_cfg) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + workers_per_gpu=args.workers, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + model = build_detector( + cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint( + model, args.checkpoint, map_location='cpu') + # old versions did not save class info in checkpoints, + # this walkaround is for backward compatibility + if 'CLASSES' in checkpoint['meta']: + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + show_dir = args.show_dir + if show_dir is not None: + show_dir = osp.join(show_dir, corruption) + show_dir = osp.join(show_dir, str(corruption_severity)) + if not osp.exists(show_dir): + osp.makedirs(show_dir) + outputs = single_gpu_test(model, data_loader, args.show, + show_dir, args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir) + + if args.out and rank == 0: + eval_results_filename = ( + osp.splitext(args.out)[0] + '_results' + + osp.splitext(args.out)[1]) + mmcv.dump(outputs, args.out) + eval_types = args.eval + if cfg.dataset_type == 'VOCDataset': + if eval_types: + for eval_type in eval_types: + if eval_type == 'bbox': + test_dataset = mmcv.runner.obj_from_dict( + cfg.data.test, datasets) + logger = 'print' if args.summaries else None + mean_ap, eval_results = \ + voc_eval_with_return( + args.out, test_dataset, + args.iou_thr, logger) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nOnly "bbox" evaluation \ + is supported for pascal voc') + else: + if eval_types: + print(f'Starting evaluate {" and ".join(eval_types)}') + if eval_types == ['proposal_fast']: + result_file = args.out + else: + if not isinstance(outputs[0], dict): + result_files = dataset.results2json( + outputs, args.out) + else: + for name in outputs[0]: + print(f'\nEvaluating {name}') + outputs_ = [out[name] for out in outputs] + result_file = args.out + + f'.{name}' + result_files = dataset.results2json( + outputs_, result_file) + eval_results = coco_eval_with_return( + result_files, eval_types, dataset.coco) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nNo task was selected for evaluation;' + '\nUse --eval to select a task') + + # save results after each evaluation + mmcv.dump(aggregated_results, eval_results_filename) + + if rank == 0: + # print filan results + print('\nAggregated results:') + prints = args.final_prints + aggregate = args.final_prints_aggregate + + if cfg.dataset_type == 'VOCDataset': + get_results( + eval_results_filename, + dataset='voc', + prints=prints, + aggregate=aggregate) + else: + get_results( + eval_results_filename, + dataset='coco', + prints=prints, + aggregate=aggregate) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/train.py b/thirdparty/mmdetection/tools/train.py new file mode 100644 index 0000000000000000000000000000000000000000..91a59b802cf2a1781ba6251c9b269bb377cefdde --- /dev/null +++ b/thirdparty/mmdetection/tools/train.py @@ -0,0 +1,181 @@ +import argparse +import copy +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.runner import get_dist_info, init_dist +from mmcv.utils import get_git_hash + +from mmdet import __version__ +from mmdet.apis import set_random_seed, train_detector +from mmdet.datasets import build_dataset +from mmdet.models import build_detector +from mmdet.utils import collect_env, get_root_logger + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument('--work-dir', help='the dir to save logs and models') + parser.add_argument( + '--resume-from', help='the checkpoint file to resume from') + parser.add_argument( + '--no-validate', + action='store_true', + help='whether not to evaluate the checkpoint during training') + group_gpus = parser.add_mutually_exclusive_group() + group_gpus.add_argument( + '--gpus', + type=int, + help='number of gpus to use ' + '(only applicable to non-distributed training)') + group_gpus.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='ids of gpus to use ' + '(only applicable to non-distributed training)') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--deterministic', + action='store_true', + help='whether to set deterministic options for CUDNN backend.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file (deprecate), ' + 'change to --cfg-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.cfg_options: + raise ValueError( + '--options and --cfg-options cannot be both ' + 'specified, --options is deprecated in favor of --cfg-options') + if args.options: + warnings.warn('--options is deprecated in favor of --cfg-options') + args.cfg_options = args.options + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + # work_dir is determined in this priority: CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + if args.resume_from is not None: + cfg.resume_from = args.resume_from + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids + else: + cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus) + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + # re-set gpu_ids with distributed training mode + _, world_size = get_dist_info() + cfg.gpu_ids = range(world_size) + + # create work_dir + mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir)) + # dump config + cfg.dump(osp.join(cfg.work_dir, osp.basename(args.config))) + # init the logger before other steps + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + log_file = osp.join(cfg.work_dir, f'{timestamp}.log') + logger = get_root_logger(log_file=log_file, log_level=cfg.log_level) + + # init the meta dict to record some important information such as + # environment info and seed, which will be logged + meta = dict() + # log env info + env_info_dict = collect_env() + env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()]) + dash_line = '-' * 60 + '\n' + logger.info('Environment info:\n' + dash_line + env_info + '\n' + + dash_line) + meta['env_info'] = env_info + meta['config'] = cfg.pretty_text + # log some basic info + logger.info(f'Distributed training: {distributed}') + logger.info(f'Config:\n{cfg.pretty_text}') + + # set random seeds + if args.seed is not None: + logger.info(f'Set random seed to {args.seed}, ' + f'deterministic: {args.deterministic}') + set_random_seed(args.seed, deterministic=args.deterministic) + cfg.seed = args.seed + meta['seed'] = args.seed + meta['exp_name'] = osp.basename(args.config) + + model = build_detector( + cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg) + + datasets = [build_dataset(cfg.data.train)] + if len(cfg.workflow) == 2: + val_dataset = copy.deepcopy(cfg.data.val) + val_dataset.pipeline = cfg.data.train.pipeline + datasets.append(build_dataset(val_dataset)) + if cfg.checkpoint_config is not None: + # save mmdet version, config file content and class names in + # checkpoints as meta data + cfg.checkpoint_config.meta = dict( + mmdet_version=__version__ + get_git_hash()[:7], + CLASSES=datasets[0].CLASSES) + # add an attribute for visualization convenience + model.CLASSES = datasets[0].CLASSES + train_detector( + model, + datasets, + cfg, + distributed=distributed, + validate=(not args.no_validate), + timestamp=timestamp, + meta=meta) + + +if __name__ == '__main__': + main() diff --git a/thirdparty/mmdetection/tools/upgrade_model_version.py b/thirdparty/mmdetection/tools/upgrade_model_version.py new file mode 100644 index 0000000000000000000000000000000000000000..a8e15d4b7bf3e9ba7d4452192816de74d8c466bb --- /dev/null +++ b/thirdparty/mmdetection/tools/upgrade_model_version.py @@ -0,0 +1,209 @@ +import argparse +import re +import tempfile +from collections import OrderedDict + +import torch +from mmcv import Config + + +def is_head(key): + valid_head_list = [ + 'bbox_head', 'mask_head', 'semantic_head', 'grid_head', 'mask_iou_head' + ] + + return any(key.startswith(h) for h in valid_head_list) + + +def parse_config(config_strings): + temp_file = tempfile.NamedTemporaryFile() + config_path = f'{temp_file.name}.py' + with open(config_path, 'w') as f: + f.write(config_strings) + + config = Config.fromfile(config_path) + is_two_stage = True + is_ssd = False + is_retina = False + reg_cls_agnostic = False + if 'rpn_head' not in config.model: + is_two_stage = False + # check whether it is SSD + if config.model.bbox_head.type == 'SSDHead': + is_ssd = True + elif config.model.bbox_head.type == 'RetinaHead': + is_retina = True + elif isinstance(config.model['bbox_head'], list): + reg_cls_agnostic = True + elif 'reg_class_agnostic' in config.model.bbox_head: + reg_cls_agnostic = config.model.bbox_head \ + .reg_class_agnostic + temp_file.close() + return is_two_stage, is_ssd, is_retina, reg_cls_agnostic + + +def reorder_cls_channel(val, num_classes=81): + # bias + if val.dim() == 1: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_cls for softmax output + if out_channels != num_classes and out_channels % num_classes == 0: + new_val = val.reshape(-1, num_classes, in_channels, *val.shape[2:]) + new_val = torch.cat((new_val[:, 1:], new_val[:, :1]), dim=1) + new_val = new_val.reshape(val.size()) + # fc_cls + elif out_channels == num_classes: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # agnostic | retina_cls | rpn_cls + else: + new_val = val + + return new_val + + +def truncate_cls_channel(val, num_classes=81): + + # bias + if val.dim() == 1: + if val.size(0) % num_classes == 0: + new_val = val[:num_classes - 1] + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_logits + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, in_channels, *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def truncate_reg_channel(val, num_classes=81): + # bias + if val.dim() == 1: + # fc_reg | rpn_reg + if val.size(0) % num_classes == 0: + new_val = val.reshape(num_classes, -1)[:num_classes - 1] + new_val = new_val.reshape(-1) + # agnostic + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # fc_reg | rpn_reg + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, -1, in_channels, + *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def convert(in_file, out_file, num_classes): + """Convert keys in checkpoints. + + There can be some breaking changes during the development of mmdetection, + and this tool is used for upgrading checkpoints trained with old versions + to the latest one. + """ + checkpoint = torch.load(in_file) + in_state_dict = checkpoint.pop('state_dict') + out_state_dict = OrderedDict() + meta_info = checkpoint['meta'] + is_two_stage, is_ssd, is_retina, reg_cls_agnostic = parse_config( + meta_info['config']) + if meta_info['mmdet_version'] <= '0.5.3' and is_retina: + upgrade_retina = True + else: + upgrade_retina = False + + # MMDetection v2.5.0 unifies the class order in RPN + # if the model is trained in version=2.5.0 + if meta_info['mmdet_version'] < '2.5.0': + upgrade_rpn = True + else: + upgrade_rpn = False + + for key, val in in_state_dict.items(): + new_key = key + new_val = val + if is_two_stage and is_head(key): + new_key = 'roi_head.{}'.format(key) + + # classification + if upgrade_rpn: + m = re.search( + r'(conv_cls|retina_cls|rpn_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + else: + m = re.search( + r'(conv_cls|retina_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + if m is not None: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + # regression + if upgrade_rpn: + m = re.search(r'(fc_reg).(weight|bias)', new_key) + else: + m = re.search(r'(fc_reg|rpn_reg).(weight|bias)', new_key) + if m is not None and not reg_cls_agnostic: + print(f'truncate regression channels of {new_key}') + new_val = truncate_reg_channel(val, num_classes) + + # mask head + m = re.search(r'(conv_logits).(weight|bias)', new_key) + if m is not None: + print(f'truncate mask prediction channels of {new_key}') + new_val = truncate_cls_channel(val, num_classes) + + m = re.search(r'(cls_convs|reg_convs).\d.(weight|bias)', key) + # Legacy issues in RetinaNet since V1.x + # Use ConvModule instead of nn.Conv2d in RetinaNet + # cls_convs.0.weight -> cls_convs.0.conv.weight + if m is not None and upgrade_retina: + param = m.groups()[1] + new_key = key.replace(param, f'conv.{param}') + out_state_dict[new_key] = val + print(f'rename the name of {key} to {new_key}') + continue + + m = re.search(r'(cls_convs).\d.(weight|bias)', key) + if m is not None and is_ssd: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + out_state_dict[new_key] = new_val + checkpoint['state_dict'] = out_state_dict + torch.save(checkpoint, out_file) + + +def main(): + parser = argparse.ArgumentParser(description='Upgrade model version') + parser.add_argument('in_file', help='input checkpoint file') + parser.add_argument('out_file', help='output checkpoint file') + parser.add_argument( + '--num-classes', + type=int, + default=81, + help='number of classes of the original model') + args = parser.parse_args() + convert(args.in_file, args.out_file, args.num_classes) + + +if __name__ == '__main__': + main() diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fa9f7c7960360bf4d892def657f9ba4cc85f8521 --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1,2 @@ +def datasets(): + return None \ No newline at end of file diff --git a/utils/asserts.py b/utils/asserts.py new file mode 100644 index 0000000000000000000000000000000000000000..59a73cc04025762d6490fcd2945a747d963def32 --- /dev/null +++ b/utils/asserts.py @@ -0,0 +1,13 @@ +from os import environ + + +def assert_in(file, files_to_check): + if file not in files_to_check: + raise AssertionError("{} does not exist in the list".format(str(file))) + return True + + +def assert_in_env(check_list: list): + for item in check_list: + assert_in(item, environ.keys()) + return True diff --git a/utils/draw.py b/utils/draw.py new file mode 100644 index 0000000000000000000000000000000000000000..817ce65c540d4e7f39a7071ea6d916210ddc8c21 --- /dev/null +++ b/utils/draw.py @@ -0,0 +1,51 @@ +import numpy as np +import cv2 + +palette = (2 ** 11 - 1, 2 ** 15 - 1, 2 ** 20 - 1) + + +def compute_color_for_labels(label): + """ + Simple function that adds fixed color depending on the class + """ + color = [int((p * (label ** 2 - label + 1)) % 255) for p in palette] + return tuple(color) + + +def draw_masks(image, mask, color, thresh: float = 0.7, alpha: float = 0.5): + np_image = np.asarray(image) + mask = mask > thresh + + color = np.asarray(color) + img_to_draw = np.copy(np_image) + # TODO: There might be a way to vectorize this + img_to_draw[mask] = color + + out = np_image * (1 - alpha) + img_to_draw * alpha + return out.astype(np.uint8) + + +def draw_boxes(img, bbox, names=None, identities=None, masks=None, offset=(0, 0)): + for i, box in enumerate(bbox): + x1, y1, x2, y2 = [int(i) for i in box] + x1 += offset[0] + x2 += offset[0] + y1 += offset[1] + y2 += offset[1] + # box text and bar + id = int(identities[i]) if identities is not None else 0 + color = compute_color_for_labels(id) + label = '{:}{:d}'.format(names[i], id) + t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 2, 2)[0] + if masks is not None: + mask = masks[i] + img = draw_masks(img, mask, color) + cv2.rectangle(img, (x1, y1), (x2, y2), color, 3) + cv2.rectangle(img, (x1, y1), (x1 + t_size[0] + 3, y1 + t_size[1] + 4), color, -1) + cv2.putText(img, label, (x1, y1 + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 2, [255, 255, 255], 2) + return img + + +if __name__ == '__main__': + for i in range(82): + print(compute_color_for_labels(i)) diff --git a/utils/evaluation.py b/utils/evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..100179407181933d59809b25400d115cfa789867 --- /dev/null +++ b/utils/evaluation.py @@ -0,0 +1,103 @@ +import os +import numpy as np +import copy +import motmetrics as mm +mm.lap.default_solver = 'lap' +from utils.io import read_results, unzip_objs + + +class Evaluator(object): + + def __init__(self, data_root, seq_name, data_type): + self.data_root = data_root + self.seq_name = seq_name + self.data_type = data_type + + self.load_annotations() + self.reset_accumulator() + + def load_annotations(self): + assert self.data_type == 'mot' + + gt_filename = os.path.join(self.data_root, self.seq_name, 'gt', 'gt.txt') + self.gt_frame_dict = read_results(gt_filename, self.data_type, is_gt=True) + self.gt_ignore_frame_dict = read_results(gt_filename, self.data_type, is_ignore=True) + + def reset_accumulator(self): + self.acc = mm.MOTAccumulator(auto_id=True) + + def eval_frame(self, frame_id, trk_tlwhs, trk_ids, rtn_events=False): + # results + trk_tlwhs = np.copy(trk_tlwhs) + trk_ids = np.copy(trk_ids) + + # gts + gt_objs = self.gt_frame_dict.get(frame_id, []) + gt_tlwhs, gt_ids = unzip_objs(gt_objs)[:2] + + # ignore boxes + ignore_objs = self.gt_ignore_frame_dict.get(frame_id, []) + ignore_tlwhs = unzip_objs(ignore_objs)[0] + + + # remove ignored results + keep = np.ones(len(trk_tlwhs), dtype=bool) + iou_distance = mm.distances.iou_matrix(ignore_tlwhs, trk_tlwhs, max_iou=0.5) + if len(iou_distance) > 0: + match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + match_ious = iou_distance[match_is, match_js] + + match_js = np.asarray(match_js, dtype=int) + match_js = match_js[np.logical_not(np.isnan(match_ious))] + keep[match_js] = False + trk_tlwhs = trk_tlwhs[keep] + trk_ids = trk_ids[keep] + + # get distance matrix + iou_distance = mm.distances.iou_matrix(gt_tlwhs, trk_tlwhs, max_iou=0.5) + + # acc + self.acc.update(gt_ids, trk_ids, iou_distance) + + if rtn_events and iou_distance.size > 0 and hasattr(self.acc, 'last_mot_events'): + events = self.acc.last_mot_events # only supported by https://github.com/longcw/py-motmetrics + else: + events = None + return events + + def eval_file(self, filename): + self.reset_accumulator() + + result_frame_dict = read_results(filename, self.data_type, is_gt=False) + frames = sorted(list(set(self.gt_frame_dict.keys()) | set(result_frame_dict.keys()))) + for frame_id in frames: + trk_objs = result_frame_dict.get(frame_id, []) + trk_tlwhs, trk_ids = unzip_objs(trk_objs)[:2] + self.eval_frame(frame_id, trk_tlwhs, trk_ids, rtn_events=False) + + return self.acc + + @staticmethod + def get_summary(accs, names, metrics=('mota', 'num_switches', 'idp', 'idr', 'idf1', 'precision', 'recall')): + names = copy.deepcopy(names) + if metrics is None: + metrics = mm.metrics.motchallenge_metrics + metrics = copy.deepcopy(metrics) + + mh = mm.metrics.create() + summary = mh.compute_many( + accs, + metrics=metrics, + names=names, + generate_overall=True + ) + + return summary + + @staticmethod + def save_summary(summary, filename): + import pandas as pd + writer = pd.ExcelWriter(filename) + summary.to_excel(writer) + writer.save() diff --git a/utils/io.py b/utils/io.py new file mode 100644 index 0000000000000000000000000000000000000000..35bec6293f5c51627cc8cff861ab36361b18d56d --- /dev/null +++ b/utils/io.py @@ -0,0 +1,133 @@ +import os +from typing import Dict +import numpy as np + +# from utils.log import get_logger + + +def write_results(filename, results, data_type): + if data_type == 'mot': + save_format = '{frame},{id},{cls},{x1},{y1},{w},{h},-1,-1,-1,-1\n' + elif data_type == 'kitti': + save_format = '{frame} {id} pedestrian 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n' + else: + raise ValueError(data_type) + + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids, classes in results: + if data_type == 'kitti': + frame_id -= 1 + for tlwh, track_id, cls_id in zip(tlwhs, track_ids, classes): + if track_id < 0: + continue + x1, y1, w, h = tlwh + x2, y2 = x1 + w, y1 + h + line = save_format.format(frame=frame_id, id=track_id, cls=cls_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h) + f.write(line) + + +# def write_results(filename, results_dict: Dict, data_type: str): +# if not filename: +# return +# path = os.path.dirname(filename) +# if not os.path.exists(path): +# os.makedirs(path) + +# if data_type in ('mot', 'mcmot', 'lab'): +# save_format = '{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n' +# elif data_type == 'kitti': +# save_format = '{frame} {id} pedestrian -1 -1 -10 {x1} {y1} {x2} {y2} -1 -1 -1 -1000 -1000 -1000 -10 {score}\n' +# else: +# raise ValueError(data_type) + +# with open(filename, 'w') as f: +# for frame_id, frame_data in results_dict.items(): +# if data_type == 'kitti': +# frame_id -= 1 +# for tlwh, track_id in frame_data: +# if track_id < 0: +# continue +# x1, y1, w, h = tlwh +# x2, y2 = x1 + w, y1 + h +# line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h, score=1.0) +# f.write(line) +# logger.info('Save results to {}'.format(filename)) + + +def read_results(filename, data_type: str, is_gt=False, is_ignore=False): + if data_type in ('mot', 'lab'): + read_fun = read_mot_results + else: + raise ValueError('Unknown data type: {}'.format(data_type)) + + return read_fun(filename, is_gt, is_ignore) + + +""" +labels={'ped', ... % 1 +'person_on_vhcl', ... % 2 +'car', ... % 3 +'bicycle', ... % 4 +'mbike', ... % 5 +'non_mot_vhcl', ... % 6 +'static_person', ... % 7 +'distractor', ... % 8 +'occluder', ... % 9 +'occluder_on_grnd', ... %10 +'occluder_full', ... % 11 +'reflection', ... % 12 +'crowd' ... % 13 +}; +""" + + +def read_mot_results(filename, is_gt, is_ignore): + valid_labels = {1} + ignore_labels = {2, 7, 8, 12} + results_dict = dict() + if os.path.isfile(filename): + with open(filename, 'r') as f: + for line in f.readlines(): + linelist = line.split(',') + if len(linelist) < 7: + continue + fid = int(linelist[0]) + if fid < 1: + continue + results_dict.setdefault(fid, list()) + + if is_gt: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + mark = int(float(linelist[6])) + if mark == 0 or label not in valid_labels: + continue + score = 1 + elif is_ignore: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + vis_ratio = float(linelist[8]) + if label not in ignore_labels and vis_ratio >= 0: + continue + else: + continue + score = 1 + else: + score = float(linelist[6]) + + tlwh = tuple(map(float, linelist[2:6])) + target_id = int(linelist[1]) + + results_dict[fid].append((tlwh, target_id, score)) + + return results_dict + + +def unzip_objs(objs): + if len(objs) > 0: + tlwhs, ids, scores = zip(*objs) + else: + tlwhs, ids, scores = [], [], [] + tlwhs = np.asarray(tlwhs, dtype=float).reshape(-1, 4) + + return tlwhs, ids, scores \ No newline at end of file diff --git a/utils/json_logger.py b/utils/json_logger.py new file mode 100644 index 0000000000000000000000000000000000000000..0afd0b45df736866c49473db78286685d77660ac --- /dev/null +++ b/utils/json_logger.py @@ -0,0 +1,383 @@ +""" +References: + https://medium.com/analytics-vidhya/creating-a-custom-logging-mechanism-for-real-time-object-detection-using-tdd-4ca2cfcd0a2f +""" +import json +from os import makedirs +from os.path import exists, join +from datetime import datetime + + +class JsonMeta(object): + HOURS = 3 + MINUTES = 59 + SECONDS = 59 + PATH_TO_SAVE = 'LOGS' + DEFAULT_FILE_NAME = 'remaining' + + +class BaseJsonLogger(object): + """ + This is the base class that returns __dict__ of its own + it also returns the dicts of objects in the attributes that are list instances + + """ + + def dic(self): + # returns dicts of objects + out = {} + for k, v in self.__dict__.items(): + if hasattr(v, 'dic'): + out[k] = v.dic() + elif isinstance(v, list): + out[k] = self.list(v) + else: + out[k] = v + return out + + @staticmethod + def list(values): + # applies the dic method on items in the list + return [v.dic() if hasattr(v, 'dic') else v for v in values] + + +class Label(BaseJsonLogger): + """ + For each bounding box there are various categories with confidences. Label class keeps track of that information. + """ + + def __init__(self, category: str, confidence: float): + self.category = category + self.confidence = confidence + + +class Bbox(BaseJsonLogger): + """ + This module stores the information for each frame and use them in JsonParser + Attributes: + labels (list): List of label module. + top (int): + left (int): + width (int): + height (int): + + Args: + bbox_id (float): + top (int): + left (int): + width (int): + height (int): + + References: + Check Label module for better understanding. + + + """ + + def __init__(self, bbox_id, top, left, width, height): + self.labels = [] + self.bbox_id = bbox_id + self.top = top + self.left = left + self.width = width + self.height = height + + def add_label(self, category, confidence): + # adds category and confidence only if top_k is not exceeded. + self.labels.append(Label(category, confidence)) + + def labels_full(self, value): + return len(self.labels) == value + + +class Frame(BaseJsonLogger): + """ + This module stores the information for each frame and use them in JsonParser + Attributes: + timestamp (float): The elapsed time of captured frame + frame_id (int): The frame number of the captured video + bboxes (list of Bbox objects): Stores the list of bbox objects. + + References: + Check Bbox class for better information + + Args: + timestamp (float): + frame_id (int): + + """ + + def __init__(self, frame_id: int, timestamp: float = None): + self.frame_id = frame_id + self.timestamp = timestamp + self.bboxes = [] + + def add_bbox(self, bbox_id: int, top: int, left: int, width: int, height: int): + bboxes_ids = [bbox.bbox_id for bbox in self.bboxes] + if bbox_id not in bboxes_ids: + self.bboxes.append(Bbox(bbox_id, top, left, width, height)) + else: + raise ValueError("Frame with id: {} already has a Bbox with id: {}".format(self.frame_id, bbox_id)) + + def add_label_to_bbox(self, bbox_id: int, category: str, confidence: float): + bboxes = {bbox.id: bbox for bbox in self.bboxes} + if bbox_id in bboxes.keys(): + res = bboxes.get(bbox_id) + res.add_label(category, confidence) + else: + raise ValueError('the bbox with id: {} does not exists!'.format(bbox_id)) + + +class BboxToJsonLogger(BaseJsonLogger): + """ + ُ This module is designed to automate the task of logging jsons. An example json is used + to show the contents of json file shortly + Example: + { + "video_details": { + "frame_width": 1920, + "frame_height": 1080, + "frame_rate": 20, + "video_name": "/home/gpu/codes/MSD/pedestrian_2/project/public/camera1.avi" + }, + "frames": [ + { + "frame_id": 329, + "timestamp": 3365.1254 + "bboxes": [ + { + "labels": [ + { + "category": "pedestrian", + "confidence": 0.9 + } + ], + "bbox_id": 0, + "top": 1257, + "left": 138, + "width": 68, + "height": 109 + } + ] + }], + + Attributes: + frames (dict): It's a dictionary that maps each frame_id to json attributes. + video_details (dict): information about video file. + top_k_labels (int): shows the allowed number of labels + start_time (datetime object): we use it to automate the json output by time. + + Args: + top_k_labels (int): shows the allowed number of labels + + """ + + def __init__(self, top_k_labels: int = 1): + self.frames = {} + self.video_details = self.video_details = dict(frame_width=None, frame_height=None, frame_rate=None, + video_name=None) + self.top_k_labels = top_k_labels + self.start_time = datetime.now() + + def set_top_k(self, value): + self.top_k_labels = value + + def frame_exists(self, frame_id: int) -> bool: + """ + Args: + frame_id (int): + + Returns: + bool: true if frame_id is recognized + """ + return frame_id in self.frames.keys() + + def add_frame(self, frame_id: int, timestamp: float = None) -> None: + """ + Args: + frame_id (int): + timestamp (float): opencv captured frame time property + + Raises: + ValueError: if frame_id would not exist in class frames attribute + + Returns: + None + + """ + if not self.frame_exists(frame_id): + self.frames[frame_id] = Frame(frame_id, timestamp) + else: + raise ValueError("Frame id: {} already exists".format(frame_id)) + + def bbox_exists(self, frame_id: int, bbox_id: int) -> bool: + """ + Args: + frame_id: + bbox_id: + + Returns: + bool: if bbox exists in frame bboxes list + """ + bboxes = [] + if self.frame_exists(frame_id=frame_id): + bboxes = [bbox.bbox_id for bbox in self.frames[frame_id].bboxes] + return bbox_id in bboxes + + def find_bbox(self, frame_id: int, bbox_id: int): + """ + + Args: + frame_id: + bbox_id: + + Returns: + bbox_id (int): + + Raises: + ValueError: if bbox_id does not exist in the bbox list of specific frame. + """ + if not self.bbox_exists(frame_id, bbox_id): + raise ValueError("frame with id: {} does not contain bbox with id: {}".format(frame_id, bbox_id)) + bboxes = {bbox.bbox_id: bbox for bbox in self.frames[frame_id].bboxes} + return bboxes.get(bbox_id) + + def add_bbox_to_frame(self, frame_id: int, bbox_id: int, top: int, left: int, width: int, height: int) -> None: + """ + + Args: + frame_id (int): + bbox_id (int): + top (int): + left (int): + width (int): + height (int): + + Returns: + None + + Raises: + ValueError: if bbox_id already exist in frame information with frame_id + ValueError: if frame_id does not exist in frames attribute + """ + if self.frame_exists(frame_id): + frame = self.frames[frame_id] + if not self.bbox_exists(frame_id, bbox_id): + frame.add_bbox(bbox_id, top, left, width, height) + else: + raise ValueError( + "frame with frame_id: {} already contains the bbox with id: {} ".format(frame_id, bbox_id)) + else: + raise ValueError("frame with frame_id: {} does not exist".format(frame_id)) + + def add_label_to_bbox(self, frame_id: int, bbox_id: int, category: str, confidence: float): + """ + Args: + frame_id: + bbox_id: + category: + confidence: the confidence value returned from yolo detection + + Returns: + None + + Raises: + ValueError: if labels quota (top_k_labels) exceeds. + """ + bbox = self.find_bbox(frame_id, bbox_id) + if not bbox.labels_full(self.top_k_labels): + bbox.add_label(category, confidence) + else: + raise ValueError("labels in frame_id: {}, bbox_id: {} is fulled".format(frame_id, bbox_id)) + + def add_video_details(self, frame_width: int = None, frame_height: int = None, frame_rate: int = None, + video_name: str = None): + self.video_details['frame_width'] = frame_width + self.video_details['frame_height'] = frame_height + self.video_details['frame_rate'] = frame_rate + self.video_details['video_name'] = video_name + + def output(self): + output = {'video_details': self.video_details} + result = list(self.frames.values()) + output['frames'] = [item.dic() for item in result] + return output + + def json_output(self, output_name): + """ + Args: + output_name: + + Returns: + None + + Notes: + It creates the json output with `output_name` name. + """ + if not output_name.endswith('.json'): + output_name += '.json' + with open(output_name, 'w') as file: + json.dump(self.output(), file) + file.close() + + def set_start(self): + self.start_time = datetime.now() + + def schedule_output_by_time(self, output_dir=JsonMeta.PATH_TO_SAVE, hours: int = 0, minutes: int = 0, + seconds: int = 60) -> None: + """ + Notes: + Creates folder and then periodically stores the jsons on that address. + + Args: + output_dir (str): the directory where output files will be stored + hours (int): + minutes (int): + seconds (int): + + Returns: + None + + """ + end = datetime.now() + interval = 0 + interval += abs(min([hours, JsonMeta.HOURS]) * 3600) + interval += abs(min([minutes, JsonMeta.MINUTES]) * 60) + interval += abs(min([seconds, JsonMeta.SECONDS])) + diff = (end - self.start_time).seconds + + if diff > interval: + output_name = self.start_time.strftime('%Y-%m-%d %H-%M-%S') + '.json' + if not exists(output_dir): + makedirs(output_dir) + output = join(output_dir, output_name) + self.json_output(output_name=output) + self.frames = {} + self.start_time = datetime.now() + + def schedule_output_by_frames(self, frames_quota, frame_counter, output_dir=JsonMeta.PATH_TO_SAVE): + """ + saves as the number of frames quota increases higher. + :param frames_quota: + :param frame_counter: + :param output_dir: + :return: + """ + pass + + def flush(self, output_dir): + """ + Notes: + We use this function to output jsons whenever possible. + like the time that we exit the while loop of opencv. + + Args: + output_dir: + + Returns: + None + + """ + filename = self.start_time.strftime('%Y-%m-%d %H-%M-%S') + '-remaining.json' + output = join(output_dir, filename) + self.json_output(output_name=output) diff --git a/utils/log.py b/utils/log.py new file mode 100644 index 0000000000000000000000000000000000000000..5b8c940900f1e3f262dc314695467f97988912c0 --- /dev/null +++ b/utils/log.py @@ -0,0 +1,17 @@ +import logging + + +def get_logger(name='root'): + formatter = logging.Formatter( + # fmt='%(asctime)s [%(levelname)s]: %(filename)s(%(funcName)s:%(lineno)s) >> %(message)s') + fmt='%(asctime)s [%(levelname)s]: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + + handler = logging.StreamHandler() + handler.setFormatter(formatter) + + logger = logging.getLogger(name) + logger.setLevel(logging.INFO) + logger.addHandler(handler) + return logger + + diff --git a/utils/parser.py b/utils/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..c9b13d8fdd397416d8648d722307b427b85d4c75 --- /dev/null +++ b/utils/parser.py @@ -0,0 +1,38 @@ +import os +import yaml +from easydict import EasyDict as edict + +class YamlParser(edict): + """ + This is yaml parser based on EasyDict. + """ + def __init__(self, cfg_dict=None, config_file=None): + if cfg_dict is None: + cfg_dict = {} + + if config_file is not None: + assert (os.path.isfile(config_file)) + with open(config_file, 'r') as fo: + cfg_dict.update(yaml.safe_load(fo.read())) + + super(YamlParser, self).__init__(cfg_dict) + + + def merge_from_file(self, config_file): + with open(config_file, 'r') as fo: + self.update(yaml.safe_load(fo.read())) + + + def merge_from_dict(self, config_dict): + self.update(config_dict) + + +def get_config(config_file=None): + return YamlParser(config_file=config_file) + + +if __name__ == "__main__": + cfg = YamlParser(config_file="../configs/yolov3.yaml") + cfg.merge_from_file("../configs/deep_sort.yaml") + + import ipdb; ipdb.set_trace() diff --git a/utils/tools.py b/utils/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..965fb69c2df41510fd740a4ab57d8fc7b81012de --- /dev/null +++ b/utils/tools.py @@ -0,0 +1,39 @@ +from functools import wraps +from time import time + + +def is_video(ext: str): + """ + Returns true if ext exists in + allowed_exts for video files. + + Args: + ext: + + Returns: + + """ + + allowed_exts = ('.mp4', '.webm', '.ogg', '.avi', '.wmv', '.mkv', '.3gp') + return any((ext.endswith(x) for x in allowed_exts)) + + +def tik_tok(func): + """ + keep track of time for each process. + Args: + func: + + Returns: + + """ + @wraps(func) + def _time_it(*args, **kwargs): + start = time() + try: + return func(*args, **kwargs) + finally: + end_ = time() + print("time: {:.03f}s, fps: {:.03f}".format(end_ - start, 1 / (end_ - start))) + + return _time_it diff --git a/webserver/.env b/webserver/.env new file mode 100644 index 0000000000000000000000000000000000000000..de09f70aff16d30662056f8217045bba86a1d811 --- /dev/null +++ b/webserver/.env @@ -0,0 +1,13 @@ +project_root="C:\Users\ZQ_deep_sort_pytorch" +model_type="yolov3" +output_dir="public/" +json_output="json_output/" # ignored for the moment in ped_det_online_server.py +reid_ckpt="deep_sort/deep/checkpoint/ckpt.t7" +yolov3_cfg="detector/YOLOv3/cfg/yolo_v3.cfg" +yolov3_weight="detector/YOLOv3/weight/yolov3.weights" +yolov3_tiny_cfg="detector/YOLOv3/cfg/yolov3-tiny.cfg" +yolov3_tiny_weight="detector/YOLOv3/weight/yolov3-tiny.weights" +yolov3_class_names="detector/YOLOv3/cfg/coco.names" +analysis_output="video_analysis/" +app="flask_stream_server.py" +camera_stream= "rtsp://user@111.222.333.444:somesecretcode" diff --git a/webserver/__init__.py b/webserver/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/webserver/config/config.py b/webserver/config/config.py new file mode 100644 index 0000000000000000000000000000000000000000..115b369b5cbde14605eebcddfc33ec2e70681ee1 --- /dev/null +++ b/webserver/config/config.py @@ -0,0 +1,21 @@ +import os + +app_dir = os.path.abspath(os.path.dirname(__file__)) + + +class BaseConfig: + SECRET_KEY = os.environ.get('SECRET_KEY') or 'Sm9obiBTY2hyb20ga2lja3MgYXNz' + SERVER_NAME = '127.0.0.1:8888' + + +class DevelopmentConfig(BaseConfig): + ENV = 'development' + DEBUG = True + + +class TestingConfig(BaseConfig): + DEBUG = True + + +class ProductionConfig(BaseConfig): + DEBUG = False diff --git a/webserver/images/Thumbs.db b/webserver/images/Thumbs.db new file mode 100644 index 0000000000000000000000000000000000000000..f63bfef3065be4a8e534ea7d920c8ed44746537a Binary files /dev/null and b/webserver/images/Thumbs.db differ diff --git a/webserver/images/arc.png b/webserver/images/arc.png new file mode 100644 index 0000000000000000000000000000000000000000..4cfcfeda45644ef870a0f95a1aa9dd43939612f7 --- /dev/null +++ b/webserver/images/arc.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5a56ed5a4d8021e2bb5fe686f8efdc2adf0f33fad4038350185b1c0eefa5fe2 +size 37822 diff --git a/webserver/images/request.png b/webserver/images/request.png new file mode 100644 index 0000000000000000000000000000000000000000..26d48fce669f94918fab03b01cf41688a9c8f5a1 --- /dev/null +++ b/webserver/images/request.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d28e4aae3ff8164205031995438e22c465ecb348738b6a8cb897b88bf2c59758 +size 26030 diff --git a/webserver/readme.md b/webserver/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..37d1996ee381aa56a9d205ed4352015d2e7bc667 --- /dev/null +++ b/webserver/readme.md @@ -0,0 +1,45 @@ +# Stream pedestrian detection web server + +### Requirements + +- python = 3.7 +- redis +- flask +- opencv +- pytorch +- dotenv + +Please note that you need to install redis on your system. + +### The architecture. + +![web server architecture](images/arc.png) + +1 - `RealTimeTracking` reads frames from rtsp link using threads +(Using threads make the web server robust against network packet loss) + +2 - In `RealTimeTracking.run` function in each iteration frame is stored in redis cache on server. + +3 - Now we can serve the frames on redis to clients. + +4 - To start the pedestrian detection, after running +`rtsp_webserver.py`, send a GET request on `127.0.0.1:8888/run` +with setting these GET method parameters + +| Param | Value | Description | +| :-------------: | :-------------: | :-------------: | +| run | 1/0 | to start the tracking set 1/ to stop tracking service set it as 0| +| camera_stream | 'rtsp://ip:port/admin...' | provide it with valid rtsp link | + +for example: + + (to start the service) 127.0.0.1:8888/run?run=1 + (to stop the service) 127.0.0.1:8888/run?run=0 + (to change the camera) + 1- 127.0.0.1:8888/run?run=0 (first stop the current service) + 2- 127.0.0.1:8888/run?run=0&camera_stream=rtsp://ip:port/admin... (then start it with another rtsp link) + +![web server architecture](images/request.png) + + +5 - get pedestrian detection stream in `127.0.0.1:8888` diff --git a/webserver/rtsp_threaded_tracker.py b/webserver/rtsp_threaded_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..aa10b69837477bd27f4448977c284737d01a7f5b --- /dev/null +++ b/webserver/rtsp_threaded_tracker.py @@ -0,0 +1,108 @@ +import warnings +from os import getenv +import sys +from os.path import dirname, abspath + +sys.path.append(dirname(dirname(abspath(__file__)))) + +import torch +from deep_sort import build_tracker +from detector import build_detector +import cv2 +from utils.draw import compute_color_for_labels +from concurrent.futures import ThreadPoolExecutor +from redis import Redis + +redis_cache = Redis('127.0.0.1') + + +class RealTimeTracking(object): + """ + This class is built to get frame from rtsp link and continuously + assign each frame to an attribute namely as frame in order to + compensate the network packet loss. then we use flask to give it + as service to client. + Args: + args: parse_args inputs + cfg: deepsort dict and yolo-model cfg from server_cfg file + + """ + + def __init__(self, cfg, args): + # Create a VideoCapture object + self.cfg = cfg + self.args = args + use_cuda = self.args.use_cuda and torch.cuda.is_available() + + if not use_cuda: + warnings.warn(UserWarning("Running in cpu mode!")) + + self.detector = build_detector(cfg, use_cuda=use_cuda) + self.deepsort = build_tracker(cfg, use_cuda=use_cuda) + self.class_names = self.detector.class_names + + self.vdo = cv2.VideoCapture(self.args.input) + self.status, self.frame = None, None + self.total_frames = int(cv2.VideoCapture.get(self.vdo, cv2.CAP_PROP_FRAME_COUNT)) + self.im_width = int(self.vdo.get(cv2.CAP_PROP_FRAME_WIDTH)) + self.im_height = int(self.vdo.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + self.output_frame = None + + self.thread = ThreadPoolExecutor(max_workers=1) + self.thread.submit(self.update) + + def update(self): + while True: + if self.vdo.isOpened(): + (self.status, self.frame) = self.vdo.read() + + def run(self): + print('streaming started ...') + while getenv('in_progress') != 'off': + try: + frame = self.frame.copy() + self.detection(frame=frame) + frame_to_bytes = cv2.imencode('.jpg', frame)[1].tobytes() + redis_cache.set('frame', frame_to_bytes) + except AttributeError: + pass + print('streaming stopped ...') + + + def detection(self, frame): + im = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + # do detection + bbox_xywh, cls_conf, cls_ids = self.detector(im) + if bbox_xywh is not None: + # select person class + mask = cls_ids == 0 + + bbox_xywh = bbox_xywh[mask] + bbox_xywh[:, 3:] *= 1.2 # bbox dilation just in case bbox too small + cls_conf = cls_conf[mask] + + # do tracking + outputs = self.deepsort.update(bbox_xywh, cls_conf, im) + + # draw boxes for visualization + if len(outputs) > 0: + self.draw_boxes(img=frame, output=outputs) + + @staticmethod + def draw_boxes(img, output, offset=(0, 0)): + for i, box in enumerate(output): + x1, y1, x2, y2, identity = [int(ii) for ii in box] + x1 += offset[0] + x2 += offset[0] + y1 += offset[1] + y2 += offset[1] + + # box text and bar + color = compute_color_for_labels(identity) + label = '{}{:d}'.format("", identity) + t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 2, 2)[0] + cv2.rectangle(img, (x1, y1), (x2, y2), color, 3) + cv2.rectangle(img, (x1, y1), (x1 + t_size[0] + 3, y1 + t_size[1] + 4), color, -1) + cv2.putText(img, label, (x1, y1 + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 2, [255, 255, 255], 2) + return img diff --git a/webserver/rtsp_webserver.py b/webserver/rtsp_webserver.py new file mode 100644 index 0000000000000000000000000000000000000000..6a164ca354c73b757c71a19053514ced09a2f838 --- /dev/null +++ b/webserver/rtsp_webserver.py @@ -0,0 +1,163 @@ +""" + +# TODO: Load ML model with redis and keep it for sometime. + 1- detector/yolov3/detector.py |=> yolov3 weightfile -> redis cache + 2- deepsort/deep/feature_extractor |=> model_path -> redis cache + 3- Use tmpfs (Insert RAM as a virtual disk and store model state): https://pypi.org/project/memory-tempfile/ + +""" +from os.path import join +from os import getenv, environ +from dotenv import load_dotenv +import argparse +from threading import Thread + +from redis import Redis +from flask import Response, Flask, jsonify, request, abort + +from rtsp_threaded_tracker import RealTimeTracking +from server_cfg import model, deep_sort_dict +from config.config import DevelopmentConfig +from utils.parser import get_config + +redis_cache = Redis('127.0.0.1') +app = Flask(__name__) +environ['in_progress'] = 'off' + + +def parse_args(): + """ + Parses the arguments + Returns: + argparse Namespace + """ + assert 'project_root' in environ.keys() + project_root = getenv('project_root') + parser = argparse.ArgumentParser() + + parser.add_argument("--input", + type=str, + default=getenv('camera_stream')) + + parser.add_argument("--model", + type=str, + default=join(project_root, + getenv('model_type'))) + + parser.add_argument("--cpu", + dest="use_cuda", + action="store_false", default=True) + args = parser.parse_args() + + return args + + +def gen(): + """ + + Returns: video frames from redis cache + + """ + while True: + frame = redis_cache.get('frame') + if frame is not None: + yield b'--frame\r\n'b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n' + + +def pedestrian_tracking(cfg, args): + """ + starts the pedestrian detection on rtsp link + Args: + cfg: + args: + + Returns: + + """ + tracker = RealTimeTracking(cfg, args) + tracker.run() + + +def trigger_process(cfg, args): + """ + triggers pedestrian_tracking process on rtsp link using a thread + Args: + cfg: + args: + + Returns: + """ + try: + t = Thread(target=pedestrian_tracking, args=(cfg, args)) + t.start() + return jsonify({"message": "Pedestrian detection started successfully"}) + except Exception: + return jsonify({'message': "Unexpected exception occured in process"}) + + +@app.errorhandler(400) +def bad_argument(error): + return jsonify({'message': error.description['message']}) + + +# Routes +@app.route('/stream', methods=['GET']) +def stream(): + """ + Provides video frames on http link + Returns: + + """ + return Response(gen(), + mimetype='multipart/x-mixed-replace; boundary=frame') + + +@app.route("/run", methods=['GET']) +def process_manager(): + """ + request parameters: + run (bool): 1 -> start the pedestrian tracking + 0 -> stop it + camera_stream: str -> rtsp link to security camera + + :return: + """ + # data = request.args + data = request.args + status = data['run'] + status = int(status) if status.isnumeric() else abort(400, {'message': f"bad argument for run {data['run']}"}) + if status == 1: + # if pedestrian tracking is not running, start it off! + try: + if environ.get('in_progress', 'off') == 'off': + global cfg, args + vdo = data.get('camera_stream') + if vdo is not None: + args.input = int(vdo) + environ['in_progress'] = 'on' + return trigger_process(cfg, args) + elif environ.get('in_progress') == 'on': + # if pedestrian tracking is running, don't start another one (we are short of gpu resources) + return jsonify({"message": " Pedestrian detection is already in progress."}) + except Exception: + environ['in_progress'] = 'off' + return abort(503) + elif status == 0: + if environ.get('in_progress', 'off') == 'off': + return jsonify({"message": "pedestrian detection is already terminated!"}) + else: + environ['in_progress'] = 'off' + return jsonify({"message": "Pedestrian detection terminated!"}) + + +if __name__ == '__main__': + load_dotenv() + app.config.from_object(DevelopmentConfig) + + # BackProcess Initialization + args = parse_args() + cfg = get_config() + cfg.merge_from_dict(model) + cfg.merge_from_dict(deep_sort_dict) + # Start the flask app + app.run() diff --git a/webserver/server_cfg.py b/webserver/server_cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..64950fe81ee34c06d2a6a7bf8b55d5458abc5e91 --- /dev/null +++ b/webserver/server_cfg.py @@ -0,0 +1,62 @@ +"""""" +import sys +from os.path import dirname, abspath, isfile + +sys.path.append(dirname(dirname(abspath(__file__)))) + +from dotenv import load_dotenv +from utils.asserts import assert_in_env +from os import getenv +from os.path import join + +load_dotenv('.env') +# Configure deep sort info +deep_sort_info = dict(REID_CKPT=join(getenv('project_root'), getenv('reid_ckpt')), + MAX_DIST=0.2, + MIN_CONFIDENCE=.3, + NMS_MAX_OVERLAP=0.5, + MAX_IOU_DISTANCE=0.7, + N_INIT=3, + MAX_AGE=70, + NN_BUDGET=100) +deep_sort_dict = {'DEEPSORT': deep_sort_info} + +# Configure yolov3 info + +yolov3_info = dict(CFG=join(getenv('project_root'), getenv('yolov3_cfg')), + WEIGHT=join(getenv('project_root'), getenv('yolov3_weight')), + CLASS_NAMES=join(getenv('project_root'), getenv('yolov3_class_names')), + SCORE_THRESH=0.5, + NMS_THRESH=0.4 + ) +yolov3_dict = {'YOLOV3': yolov3_info} + +# Configure yolov3-tiny info + +yolov3_tiny_info = dict(CFG=join(getenv('project_root'), getenv('yolov3_tiny_cfg')), + WEIGHT=join(getenv('project_root'), getenv('yolov3_tiny_weight')), + CLASS_NAMES=join(getenv('project_root'), getenv('yolov3_class_names')), + SCORE_THRESH=0.5, + NMS_THRESH=0.4 + ) +yolov3_tiny_dict = {'YOLOV3': yolov3_tiny_info} + + +check_list = ['project_root', 'reid_ckpt', 'yolov3_class_names', 'model_type', 'yolov3_cfg', 'yolov3_weight', + 'yolov3_tiny_cfg', 'yolov3_tiny_weight', 'yolov3_class_names'] + +if assert_in_env(check_list): + assert isfile(deep_sort_info['REID_CKPT']) + if getenv('model_type') == 'yolov3': + assert isfile(yolov3_info['WEIGHT']) + assert isfile(yolov3_info['CFG']) + assert isfile(yolov3_info['CLASS_NAMES']) + model = yolov3_dict.copy() + + elif getenv('model_type') == 'yolov3_tiny': + assert isfile(yolov3_tiny_info['WEIGHT']) + assert isfile(yolov3_tiny_info['CFG']) + assert isfile(yolov3_tiny_info['CLASS_NAMES']) + model = yolov3_tiny_dict.copy() + else: + raise ValueError("Value '{}' for model_type is not valid".format(getenv('model_type'))) diff --git a/webserver/templates/index.html b/webserver/templates/index.html new file mode 100644 index 0000000000000000000000000000000000000000..f329e71d6c42205371940a26f0e6c28e035cdb80 --- /dev/null +++ b/webserver/templates/index.html @@ -0,0 +1,9 @@ + + + دوربین طبقه 2: راهرو 2 + + +

طبقه 2

+ + + \ No newline at end of file diff --git a/yolov3_deepsort_eval.py b/yolov3_deepsort_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..f0036d2889338da2c35c760ae3b9ee3d430d231a --- /dev/null +++ b/yolov3_deepsort_eval.py @@ -0,0 +1,87 @@ +import os +import os.path as osp +import logging +import argparse +from pathlib import Path + +from utils.log import get_logger +from yolov3_deepsort import VideoTracker +from utils.parser import get_config + +import motmetrics as mm +mm.lap.default_solver = 'lap' +from utils.evaluation import Evaluator + +def mkdir_if_missing(dir): + os.makedirs(dir, exist_ok=True) + +def main(data_root='', seqs=('',), args=""): + logger = get_logger() + logger.setLevel(logging.INFO) + data_type = 'mot' + result_root = os.path.join(Path(data_root), "mot_results") + mkdir_if_missing(result_root) + + cfg = get_config() + cfg.merge_from_file(args.config_detection) + cfg.merge_from_file(args.config_deepsort) + + # run tracking + accs = [] + for seq in seqs: + logger.info('start seq: {}'.format(seq)) + result_filename = os.path.join(result_root, '{}.txt'.format(seq)) + video_path = data_root+"/"+seq+"/video/video.mp4" + + with VideoTracker(cfg, args, video_path, result_filename) as vdo_trk: + vdo_trk.run() + + # eval + logger.info('Evaluate seq: {}'.format(seq)) + evaluator = Evaluator(data_root, seq, data_type) + accs.append(evaluator.eval_file(result_filename)) + + # get summary + metrics = mm.metrics.motchallenge_metrics + mh = mm.metrics.create() + summary = Evaluator.get_summary(accs, seqs, metrics) + strsummary = mm.io.render_summary( + summary, + formatters=mh.formatters, + namemap=mm.io.motchallenge_metric_names + ) + print(strsummary) + Evaluator.save_summary(summary, os.path.join(result_root, 'summary_global.xlsx')) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--config_detection", type=str, default="./configs/yolov3.yaml") + parser.add_argument("--config_deepsort", type=str, default="./configs/deep_sort.yaml") + parser.add_argument("--ignore_display", dest="display", action="store_false", default=False) + parser.add_argument("--frame_interval", type=int, default=1) + parser.add_argument("--display_width", type=int, default=800) + parser.add_argument("--display_height", type=int, default=600) + parser.add_argument("--save_path", type=str, default="./demo/demo.avi") + parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True) + parser.add_argument("--camera", action="store", dest="cam", type=int, default="-1") + return parser.parse_args() + +if __name__ == '__main__': + args = parse_args() + + seqs_str = '''MOT16-02 + MOT16-04 + MOT16-05 + MOT16-09 + MOT16-10 + MOT16-11 + MOT16-13 + ''' + data_root = 'data/dataset/MOT16/train/' + + seqs = [seq.strip() for seq in seqs_str.split()] + + main(data_root=data_root, + seqs=seqs, + args=args) \ No newline at end of file